From 616c406ba86e5c7f4fca8246ac4b25f30d210abd Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Wed, 23 Apr 2025 13:46:33 +0000 Subject: [PATCH 001/279] move rocketnode into plugin --- compose.yaml | 30 --- .../node_task/node_task.py} | 180 ++++++++---------- 2 files changed, 80 insertions(+), 130 deletions(-) rename rocketwatch/{rocketnode.py => plugins/node_task/node_task.py} (84%) diff --git a/compose.yaml b/compose.yaml index bc81a34d..2edd5fcd 100644 --- a/compose.yaml +++ b/compose.yaml @@ -13,29 +13,6 @@ services: options: max-size: "100m" max-file: "10" - # required to access my node containers - networks: - - ethereum - labels: - com.centurylinklabs.watchtower.enable: true - - rocketnode: - image: invisiblesymbol/rocketwatch - build: ./rocketwatch - volumes: - - ./rocketwatch/main.cfg:/app/main.cfg - restart: unless-stopped - command: ["python", "rocketnode.py"] - depends_on: - - mongodb - logging: - driver: "json-file" - options: - max-size: "100m" - max-file: "10" - # required to access node containers - networks: - - ethereum labels: com.centurylinklabs.watchtower.enable: true @@ -44,15 +21,12 @@ services: volumes: - ./mongodb:/data/db restart: unless-stopped - # below prevents unnecessary data from being collected command: --setParameter diagnosticDataCollectionEnabled=false logging: driver: "json-file" options: max-size: "100m" max-file: "10" - networks: - - ethereum ports: - "127.0.0.1:27017:27017" @@ -62,7 +36,3 @@ services: - /var/run/docker.sock:/var/run/docker.sock command: --interval 30 --label-enable - -networks: - ethereum: - name: ethereum diff --git a/rocketwatch/rocketnode.py b/rocketwatch/plugins/node_task/node_task.py similarity index 84% rename from rocketwatch/rocketnode.py rename to rocketwatch/plugins/node_task/node_task.py index f5e008fa..0d95c4cb 100644 --- a/rocketwatch/rocketnode.py +++ b/rocketwatch/plugins/node_task/node_task.py @@ -1,23 +1,27 @@ -import asyncio import logging import time import pymongo -import schedule from multicall import Call from cronitor import Monitor from pymongo import UpdateOne, UpdateMany +from discord.ext import tasks, commands + +from rocketwatch import RocketWatch from utils import solidity from utils.cfg import cfg -from utils.get_nearest_block import get_block_by_timestamp +from utils.block_time import ts_to_block from utils.rocketpool import rp from utils.shared_w3 import bacon -from utils.time_debug import timerun +from utils.time_debug import timerun, timerun_async +from utils.event_logs import get_logs + -log = logging.getLogger("rocketnode") +log = logging.getLogger("node_task") log.setLevel(cfg["log_level"]) + def safe_to_float(_, num: int): try: return solidity.to_float(num) @@ -43,15 +47,45 @@ def is_true(_, b): return b is True -class Task: - def __init__(self): - self.event_loop = None +class NodeTask(commands.Cog): + def __init__(self, bot: RocketWatch): + self.bot = bot self.db = pymongo.MongoClient(cfg["mongodb.uri"]).rocketwatch - self.monitor = Monitor('rocketnode-task', api_key=cfg["other.secrets.cronitor"]) - self.batch_size = 10_000 + self.monitor = Monitor("node-task", api_key=cfg["other.secrets.cronitor"]) + self.batch_size = 1000 + self.loop.start() + + def cog_unload(self): + self.loop.cancel() + + @tasks.loop(seconds=solidity.BEACON_EPOCH_LENGTH) + async def loop(self): + p_id = time.time() + self.monitor.ping(state="run", series=p_id) + try: + log.debug("starting node task") + self.check_indexes() + await self.add_untracked_minipools() + await self.add_static_data_to_minipools() + await self.update_dynamic_minipool_metadata() + self.add_static_deposit_data_to_minipools() + self.add_static_beacon_data_to_minipools() + self.update_dynamic_minipool_beacon_metadata() + await self.add_untracked_node_operators() + await self.add_static_data_to_node_operators() + await self.update_dynamic_node_operator_metadata() + log.debug("node task finished") + self.monitor.ping(state="complete", series=p_id) + except Exception as err: + await self.bot.report_error(err) + self.monitor.ping(state="fail", series=p_id) + + @loop.before_loop + async def on_ready(self): + await self.bot.wait_until_ready() - @timerun - def add_untracked_minipools(self): + @timerun_async + async def add_untracked_minipools(self): # rocketMinipoolManager.getMinipoolAt(i) returns the address of the minipool at index i mm = rp.get_contract_by_name("rocketMinipoolManager") latest_rp = rp.call("rocketMinipoolManager.getMinipoolCount") - 1 @@ -65,11 +99,11 @@ def add_untracked_minipools(self): log.debug("No new minipools") return log.debug(f"Latest minipool in db: {latest_db}, latest minipool in rp: {latest_rp}") - # batch into 10k minipools at a time, between latest_id and minipool_count + # batch into self.batch_size minipools at a time, between latest_id and minipool_count for i in range(latest_db + 1, latest_rp + 1, self.batch_size): i_end = min(i + self.batch_size, latest_rp + 1) log.debug(f"Getting untracked minipools ({i} to {i_end})") - data |= rp.multicall2_do_call([ + data |= await rp.multicall2([ Call(mm.address, [rp.seth_sig(mm.abi, "getMinipoolAt"), i], [(i, None)]) for i in range(i, i_end) ]) @@ -80,8 +114,8 @@ def add_untracked_minipools(self): ]) log.debug("New minipools inserted") - @timerun - def add_static_data_to_minipools(self): + @timerun_async + async def add_static_data_to_minipools(self): m = rp.assemble_contract("rocketMinipool") mm = rp.get_contract_by_name("rocketMinipoolManager") lambs = [ @@ -100,7 +134,7 @@ def add_static_data_to_minipools(self): for i in range(0, len(minipool_addresses), batch_size): i_end = min(i + batch_size, len(minipool_addresses)) log.debug(f"Getting minipool static data ({i} to {i_end})") - res = rp.multicall2_do_call([ + res = await rp.multicall2([ Call(*lamb(a)) for a in minipool_addresses[i:i_end] for lamb in lambs @@ -121,26 +155,21 @@ def add_static_data_to_minipools(self): self.db.minipools_new.bulk_write(bulk, ordered=False) log.debug("Minipools updated with static data") - @timerun - def update_dynamic_minipool_metadata(self): + @timerun_async + async def update_dynamic_minipool_metadata(self): m = rp.assemble_contract("rocketMinipool") mc = rp.get_contract_by_name("multicall3") lambs = [ lambda a: (a, rp.seth_sig(m.abi, "getStatus"), [((a, "status"), safe_state_to_str)]), lambda a: (a, rp.seth_sig(m.abi, "getStatusTime"), [((a, "status_time"), None)]), lambda a: (a, rp.seth_sig(m.abi, "getVacant"), [((a, "vacant"), is_true)]), - lambda a: (a, rp.seth_sig(m.abi, "getNodeDepositBalance"), - [((a, "node_deposit_balance"), safe_to_float)]), - lambda a: ( - a, rp.seth_sig(m.abi, "getNodeRefundBalance"), - [((a, "node_refund_balance"), safe_to_float)]), - lambda a: (a, rp.seth_sig(m.abi, "getPreMigrationBalance"), - [((a, "pre_migration_balance"), safe_to_float)]), + lambda a: (a, rp.seth_sig(m.abi, "getNodeDepositBalance"), [((a, "node_deposit_balance"), safe_to_float)]), + lambda a: (a, rp.seth_sig(m.abi, "getNodeRefundBalance"), [((a, "node_refund_balance"), safe_to_float)]), + lambda a: (a, rp.seth_sig(m.abi, "getPreMigrationBalance"), [((a, "pre_migration_balance"), safe_to_float)]), lambda a: (a, rp.seth_sig(m.abi, "getNodeFee"), [((a, "node_fee"), safe_to_float)]), lambda a: (a, rp.seth_sig(m.abi, "getEffectiveDelegate"), [((a, "effective_delegate"), None)]), lambda a: (a, rp.seth_sig(m.abi, "getUseLatestDelegate"), [((a, "use_latest_delegate"), None)]), - lambda a: (mc.address, [rp.seth_sig(mc.abi, "getEthBalance"), a], - [((a, "execution_balance"), safe_to_float)]) + lambda a: (mc.address, [rp.seth_sig(mc.abi, "getEthBalance"), a], [((a, "execution_balance"), safe_to_float)]) ] # get all minipool addresses from db minipool_addresses = self.db.minipools_new.distinct("address") @@ -150,7 +179,7 @@ def update_dynamic_minipool_metadata(self): for i in range(0, len(minipool_addresses), batch_size): i_end = min(i + batch_size, len(minipool_addresses)) log.debug(f"Getting minipool metadata ({i} to {i_end})") - res = rp.multicall2_do_call([ + res = await rp.multicall2([ Call(*lamb(a)) for a in minipool_addresses[i:i_end] for lamb in lambs @@ -190,18 +219,18 @@ def add_static_deposit_data_to_minipools(self): nd = rp.get_contract_by_name("rocketNodeDeposit") mm = rp.get_contract_by_name("rocketMinipoolManager") data = {} - batch_size = 1000 - for i in range(0, len(minipools), batch_size): - i_end = min(i + batch_size, len(minipools)) + for i in range(0, len(minipools), self.batch_size): + i_end = min(i + self.batch_size, len(minipools)) # turn status time of first and last minipool into blocks - block_start = get_block_by_timestamp(minipools[i]["status_time"])[0] - 1 - block_end = get_block_by_timestamp(minipools[i_end - 1]["status_time"])[0] + block_start = ts_to_block(minipools[i]["status_time"]) - 1 + block_end = ts_to_block(minipools[i_end - 1]["status_time"]) + 1 a = [m["address"] for m in minipools[i:i_end]] log.debug(f"Getting minipool deposit data ({i} to {i_end})") - f_deposits = nd.events.DepositReceived.createFilter(fromBlock=block_start, toBlock=block_end) - events = f_deposits.get_all_entries() - f_creations = mm.events.MinipoolCreated.createFilter(fromBlock=block_start, toBlock=block_end) - events.extend(f_creations.get_all_entries()) + + f_deposits = get_logs(nd.events.DepositReceived, block_start, block_end) + f_creations = get_logs(mm.events.MinipoolCreated, block_start, block_end) + events = f_deposits + f_creations + events = sorted(events, key=lambda x: (x['blockNumber'], x['transactionIndex'], x['logIndex'] *1e-8), reverse=True) # map to pairs of 2 prepared_events = [] @@ -243,7 +272,6 @@ def add_static_deposit_data_to_minipools(self): log.debug("Minipools updated with static deposit data") - @timerun def add_static_beacon_data_to_minipools(self): # get all public keys from db where no validator_index is set @@ -253,11 +281,10 @@ def add_static_beacon_data_to_minipools(self): log.debug("No minipools need to be updated with static beacon data") return # we need to do smaller bulks as the pubkey is qutie long and we dont want to make the query url too long - batch_size = 1000 data = {} # endpoint = bacon.get_validators("head", ids=vali_indexes)["data"] - for i in range(0, len(public_keys), batch_size): - i_end = min(i + batch_size, len(public_keys)) + for i in range(0, len(public_keys), self.batch_size): + i_end = min(i + self.batch_size, len(public_keys)) log.debug(f"Getting beacon data for minipools ({i} to {i_end})") # get beacon data for public keys beacon_data = bacon.get_validators("head", ids=public_keys[i:i_end])["data"] @@ -332,8 +359,8 @@ def check_indexes(self): self.db.proposals.create_index("slot", unique=True) log.debug("indexes checked") - @timerun - def add_untracked_node_operators(self): + @timerun_async + async def add_untracked_node_operators(self): # rocketNodeManager.getNodeCount(i) returns the address of the node at index i nm = rp.get_contract_by_name("rocketNodeManager") latest_rp = rp.call("rocketNodeManager.getNodeCount") - 1 @@ -350,7 +377,7 @@ def add_untracked_node_operators(self): for i in range(latest_db + 1, latest_rp + 1, self.batch_size): i_end = min(i + self.batch_size, latest_rp + 1) log.debug(f"Getting untracked node ({i} to {i_end})") - data |= rp.multicall2_do_call([ + data |= await rp.multicall2([ Call(nm.address, [rp.seth_sig(nm.abi, "getNodeAt"), i], [(i, None)]) for i in range(i, i_end) ]) @@ -361,8 +388,8 @@ def add_untracked_node_operators(self): ]) log.debug("New nodes inserted") - @timerun - def add_static_data_to_node_operators(self): + @timerun_async + async def add_static_data_to_node_operators(self): ndf = rp.get_contract_by_name("rocketNodeDistributorFactory") lambs = [ lambda a: (ndf.address, [rp.seth_sig(ndf.abi, "getProxyAddress"), a], [((a, "fee_distributor_address"), None)]), @@ -379,7 +406,7 @@ def add_static_data_to_node_operators(self): for i in range(0, len(node_addresses), batch_size): i_end = min(i + batch_size, len(node_addresses)) log.debug(f"Getting node operators static data ({i} to {i_end})") - res = rp.multicall2_do_call([ + res = await rp.multicall2([ Call(*lamb(a)) for a in node_addresses[i:i_end] for lamb in lambs @@ -400,8 +427,8 @@ def add_static_data_to_node_operators(self): self.db.node_operators_new.bulk_write(bulk, ordered=False) log.debug("Node operators updated with static data") - @timerun - def update_dynamic_node_operator_metadata(self): + @timerun_async + async def update_dynamic_node_operator_metadata(self): ndf = rp.get_contract_by_name("rocketNodeDistributorFactory") nd = rp.get_contract_by_name("rocketNodeDeposit") nm = rp.get_contract_by_name("rocketNodeManager") @@ -445,7 +472,7 @@ def update_dynamic_node_operator_metadata(self): for i in range(0, len(nodes), batch_size): i_end = min(i + batch_size, len(nodes)) log.debug(f"Getting node operator metadata ({i} to {i_end})") - res = rp.multicall2_do_call([ + res = await rp.multicall2([ Call(*lamb(n)) for n in nodes[i:i_end] for lamb in lambs @@ -466,53 +493,6 @@ def update_dynamic_node_operator_metadata(self): ] self.db.node_operators_new.bulk_write(bulk, ordered=False) log.debug("Node operators updated with metadata") - return - - def ensure_event_loop(self): - # the bellow prevents multicall from breaking - if not self.event_loop: - self.event_loop = asyncio.new_event_loop() - asyncio.set_event_loop(self.event_loop) - - @timerun - def task(self): - p_id = time.time() - self.monitor.ping(state='run', series=p_id) - try: - self._run() - self.monitor.ping(state='complete', series=p_id) - except Exception as err: - log.exception(err) - self.monitor.ping(state='fail', series=p_id) - - @timerun - def _run(self): - log.debug("starting rocketnode task") - self.check_indexes() - self.ensure_event_loop() - self.add_untracked_minipools() - self.add_static_data_to_minipools() - self.update_dynamic_minipool_metadata() - self.add_static_deposit_data_to_minipools() - self.add_static_beacon_data_to_minipools() - self.update_dynamic_minipool_beacon_metadata() - self.add_untracked_node_operators() - self.add_static_data_to_node_operators() - self.update_dynamic_node_operator_metadata() - log.debug("rocketnode task finished") - - -logging.basicConfig(format="%(levelname)5s %(asctime)s [%(name)s] %(filename)s:%(lineno)d|%(funcName)s(): %(message)s") -log = logging.getLogger("rocketnode") -log.setLevel(cfg["log_level"]) -logging.getLogger().setLevel("INFO") - -t = Task() - -schedule.every(6.4).minutes.do(t.task) -# run once on startup -t.task() -while True: - schedule.run_pending() - time.sleep(1) +async def setup(self): + await self.add_cog(NodeTask(self)) From 946ac97cbcec3f0b10e54808d2c6b4f2d5b6a803 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Wed, 23 Apr 2025 13:47:22 +0000 Subject: [PATCH 002/279] event logs utility --- rocketwatch/main.cfg.sample | 11 +++-- .../plugins/constellation/constellation.py | 45 +++++++------------ rocketwatch/utils/event_logs.py | 40 +++++++++++++++++ 3 files changed, 63 insertions(+), 33 deletions(-) create mode 100644 rocketwatch/utils/event_logs.py diff --git a/rocketwatch/main.cfg.sample b/rocketwatch/main.cfg.sample index 8cfe1522..e65da708 100644 --- a/rocketwatch/main.cfg.sample +++ b/rocketwatch/main.cfg.sample @@ -34,10 +34,13 @@ mongodb: { rocketpool: { chain: "mainnet" support: { - role_ids: [708268619867619338] - server_id: 544295541849456651 - channel_id: 912434217118498876 - moderator_id: 118923557265735680 + role_ids: [] + server_id: -1 + channel_id: -1 + moderator_id: -1 + } + dm_warning: { + channels: [] } dao_multisigs: [ "0x778c08fC151D7AB10042334B6A0929D4fa2983cA", diff --git a/rocketwatch/plugins/constellation/constellation.py b/rocketwatch/plugins/constellation/constellation.py index b239c412..761b2e1d 100644 --- a/rocketwatch/plugins/constellation/constellation.py +++ b/rocketwatch/plugins/constellation/constellation.py @@ -1,7 +1,9 @@ import logging import math -from discord.ext.commands import Cog, Context, hybrid_command +from discord import Interaction +from discord.app_commands import command +from discord.ext.commands import Cog from motor.motor_asyncio import AsyncIOMotorClient from rocketwatch import RocketWatch @@ -11,6 +13,7 @@ from utils.rocketpool import rp from utils.visibility import is_hidden_weak from utils.embeds import Embed, el_explorer_url +from utils.event_logs import get_logs cog_id = "constellation" @@ -24,7 +27,6 @@ def __init__(self, bot: RocketWatch): self.db = AsyncIOMotorClient(cfg["mongodb.uri"]).rocketwatch async def _fetch_num_operators(self) -> int: - current_block = w3.eth.get_block_number() whitelist_contract = rp.get_contract_by_name("Constellation.Whitelist") if db_entry := (await self.db.last_checked_block.find_one({"_id": cog_id})): @@ -34,45 +36,30 @@ async def _fetch_num_operators(self) -> int: last_checked_block = 20946650 # contract deployment num_operators = 0 - def _fetch_interval(_from: int, _to: int) -> int: - _operators = 0 - - _operators += len(whitelist_contract.events.OperatorAdded().get_logs(fromBlock=_from, toBlock=_to)) - _operators -= len(whitelist_contract.events.OperatorRemoved().get_logs(fromBlock=_from, toBlock=_to)) - for event_log in whitelist_contract.events.OperatorsAdded().get_logs(fromBlock=_from, toBlock=_to): - _operators += len(event_log.args.operators) - for event_log in whitelist_contract.events.OperatorsRemoved().get_logs(fromBlock=_from, toBlock=_to): - _operators -= len(event_log.args.operators) - - return _operators - - request_block_limit = 50_000 b_from = last_checked_block + 1 - b_to = b_from + request_block_limit - - # catch up to current block with chunked requests - while b_to < current_block: - num_operators += _fetch_interval(b_from, b_to) - b_from = b_to + 1 - b_to = b_from + request_block_limit + b_to = w3.eth.get_block_number() - num_operators += _fetch_interval(b_from, current_block) - last_checked_block = current_block + num_operators += len(get_logs(whitelist_contract.events.OperatorAdded, b_from, b_to)) + num_operators -= len(get_logs(whitelist_contract.events.OperatorRemoved, b_from, b_to)) + for event_log in get_logs(whitelist_contract.events.OperatorsAdded, b_from, b_to): + num_operators += len(event_log.args.operators) + for event_log in get_logs(whitelist_contract.events.OperatorsRemoved, b_from, b_to): + num_operators -= len(event_log.args.operators) await self.db.last_checked_block.replace_one( {"_id": cog_id}, - {"_id": cog_id, "block": last_checked_block, "operators": num_operators}, + {"_id": cog_id, "block": b_to, "operators": num_operators}, upsert=True ) return num_operators - @hybrid_command() - async def constellation(self, ctx: Context): + @command() + async def constellation(self, interaction: Interaction): """ Summary of Gravita Constellation protocol stats. """ - await ctx.defer(ephemeral=is_hidden_weak(ctx)) + await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) supernode_contract = rp.get_contract_by_name("Constellation.SuperNodeAccount") distributor_contract = rp.get_contract_by_name("Constellation.OperatorDistributor") @@ -184,7 +171,7 @@ async def constellation(self, ctx: Context): inline=False ) - await ctx.send(embed=embed) + await interaction.followup.send(embed=embed) async def setup(bot): diff --git a/rocketwatch/utils/event_logs.py b/rocketwatch/utils/event_logs.py new file mode 100644 index 00000000..12aca6cb --- /dev/null +++ b/rocketwatch/utils/event_logs.py @@ -0,0 +1,40 @@ +import logging +from typing import Optional, Any + +from eth_typing import BlockNumber +from web3.contract import ContractEvent, LogReceipt + +from utils.cfg import cfg + +log = logging.getLogger("event_logs") +log.setLevel(cfg["log_level"]) + + +def get_logs( + event: ContractEvent, + from_block: BlockNumber, + to_block: BlockNumber, + arg_filters: Optional[dict[str, Any]] = None +) -> list[LogReceipt]: + start_block = from_block + end_block = to_block + + log.debug(f"Fetching vote receipts in [{start_block}, {end_block}]") + + chunk_size = 50_000 + from_block = start_block + to_block = from_block + chunk_size + + logs = [] + + while from_block <= end_block: + logs += event.create_filter( + fromBlock=from_block, + toBlock=min(to_block, end_block), + argument_filters=arg_filters + ).get_all_entries() + + from_block = to_block + 1 + to_block = from_block + chunk_size + + return logs From a639cad07f655c158e7dd35f19479794b00f7fdb Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Wed, 23 Apr 2025 13:47:35 +0000 Subject: [PATCH 003/279] async multicall --- .../plugins/minipool_task/minipool_task.py | 100 +++++++++--------- .../minipools_upkeep_task.py | 61 ++++------- rocketwatch/utils/rocketpool.py | 9 +- 3 files changed, 79 insertions(+), 91 deletions(-) diff --git a/rocketwatch/plugins/minipool_task/minipool_task.py b/rocketwatch/plugins/minipool_task/minipool_task.py index f64937bb..0fd4fd32 100644 --- a/rocketwatch/plugins/minipool_task/minipool_task.py +++ b/rocketwatch/plugins/minipool_task/minipool_task.py @@ -6,8 +6,11 @@ from cronitor import Monitor from pymongo import MongoClient -from discord.ext import commands, tasks from requests.exceptions import HTTPError +from eth_typing import ChecksumAddress + +from discord.ext import commands, tasks +from discord.utils import as_chunks from rocketwatch import RocketWatch from utils.cfg import cfg @@ -19,24 +22,21 @@ log = logging.getLogger("minipool_task") log.setLevel(cfg["log_level"]) + class MinipoolTask(commands.Cog): def __init__(self, bot: RocketWatch): self.bot = bot self.db = MongoClient(cfg["mongodb.uri"]).rocketwatch self.minipool_manager = rp.get_contract_by_name("rocketMinipoolManager") self.monitor = Monitor('gather-minipools', api_key=cfg["other.secrets.cronitor"]) - - if not self.run_loop.is_running() and bot.is_ready(): - self.run_loop.start() - - @commands.Cog.listener() - async def on_ready(self): - if self.run_loop.is_running(): - return - self.run_loop.start() + self.batch_size = 1000 + self.loop.start() + + def cog_unload(self): + self.loop.cancel() @tasks.loop(seconds=60 ** 2) - async def run_loop(self): + async def loop(self): p_id = time.time() self.monitor.ping(state='run', series=p_id) executor = ThreadPoolExecutor() @@ -44,33 +44,37 @@ async def run_loop(self): futures = [loop.run_in_executor(executor, self.task)] try: await asyncio.gather(*futures) - self.monitor.ping(state='complete', series=p_id) + self.monitor.ping(state="complete", series=p_id) except Exception as err: await self.bot.report_error(err) - self.monitor.ping(state='fail', series=p_id) + self.monitor.ping(state="fail", series=p_id) + + @loop.before_loop + async def before_loop(self): + await self.bot.wait_until_ready() @timerun - def get_untracked_minipools(self): + def get_untracked_minipools(self) -> set[ChecksumAddress]: minipool_count = rp.call("rocketMinipoolManager.getMinipoolCount") minipool_addresses = [] - for i in range(0, minipool_count, 10000): + for i in range(0, minipool_count, self.batch_size): log.debug(f"getting minipool addresses for {i}/{minipool_count}") - i_end = min(i + 10000, minipool_count) + i_end = min(i + self.batch_size, minipool_count) minipool_addresses += [ w3.toChecksumAddress(r.results[0]) for r in rp.multicall.aggregate( self.minipool_manager.functions.getMinipoolAt(i) for i in range(i, i_end)).results] # remove address that are already in the minipool collection tracked_addresses = self.db.minipools.distinct("address") - return [a for a in minipool_addresses if a not in tracked_addresses] + return set(minipool_addresses) - set(tracked_addresses) @timerun def get_public_keys(self, addresses): # optimizing this doesn't seem to help much, so keep it simple for readability # batch the same way as get_untracked_minipools minipool_pubkeys = [] - for i in range(0, len(addresses), 10000): + for i in range(0, len(addresses), self.batch_size): log.debug(f"getting minipool pubkeys for {i}/{len(addresses)}") - i_end = min(i + 10000, len(addresses)) + i_end = min(i + self.batch_size, len(addresses)) minipool_pubkeys += [ f"0x{r.results[0].hex()}" for r in rp.multicall.aggregate( self.minipool_manager.functions.getMinipoolPubkey(a) for a in addresses[i:i_end]).results] @@ -138,38 +142,38 @@ def check_indexes(self): def task(self): self.check_indexes() - log.debug("Gathering all untracked Minipools...") - minipool_addresses = self.get_untracked_minipools() - if not minipool_addresses: - log.debug("No untracked Minipools found.") + log.debug("Gathering all untracked minipools...") + all_minipool_addresses = self.get_untracked_minipools() + if not all_minipool_addresses: + log.debug("No untracked minipools found.") return - log.debug(f"Found {len(minipool_addresses)} untracked Minipools.") - log.debug("Gathering all Minipool public keys...") - minipool_pubkeys = self.get_public_keys(minipool_addresses) - log.debug("Gathering all Minipool node operators...") - node_addresses = self.get_node_operator(minipool_addresses) - log.debug("Gathering all Minipool commission rates...") - node_fees = self.get_node_fee(minipool_addresses) - log.debug("Gathering all Minipool validator indexes...") - validator_data = self.get_validator_data(minipool_pubkeys) - data = [{ - "address" : a, - "pubkey" : p, - "node_operator" : n, - "node_fee" : f, - "validator" : validator_data[p]["validator_id"], - "activation_epoch": validator_data[p]["activation_epoch"] - } for a, p, n, f in zip(minipool_addresses, minipool_pubkeys, node_addresses, node_fees) if p in validator_data] - if data: - log.debug(f"Inserting {len(data)} Minipools into the database...") - self.db.minipools.insert_many(data) - else: - log.debug("No new Minipools with data found.") + + log.debug(f"Found {len(all_minipool_addresses)} untracked minipools.") + for minipool_addresses in as_chunks(all_minipool_addresses, self.batch_size): + log.debug("Gathering minipool public keys...") + minipool_pubkeys = self.get_public_keys(minipool_addresses) + log.debug("Gathering minipool node operators...") + node_addresses = self.get_node_operator(minipool_addresses) + log.debug("Gathering minipool commission rates...") + node_fees = self.get_node_fee(minipool_addresses) + log.debug("Gathering minipool validator indexes...") + validator_data = self.get_validator_data(minipool_pubkeys) + data = [{ + "address" : a, + "pubkey" : p, + "node_operator" : n, + "node_fee" : f, + "validator" : validator_data[p]["validator_id"], + "activation_epoch": validator_data[p]["activation_epoch"] + } for a, p, n, f in zip(minipool_addresses, minipool_pubkeys, node_addresses, node_fees) if p in validator_data] + if data: + log.debug(f"Inserting {len(data)} minipools into the database...") + self.db.minipools.insert_many(data) + else: + log.debug("No new minipools with data found.") + log.debug("Finished!") - def cog_unload(self): - self.run_loop.cancel() - async def setup(bot): await bot.add_cog(MinipoolTask(bot)) diff --git a/rocketwatch/plugins/minipools_upkeep_task/minipools_upkeep_task.py b/rocketwatch/plugins/minipools_upkeep_task/minipools_upkeep_task.py index 64460f78..246177e5 100644 --- a/rocketwatch/plugins/minipools_upkeep_task/minipools_upkeep_task.py +++ b/rocketwatch/plugins/minipools_upkeep_task/minipools_upkeep_task.py @@ -16,7 +16,7 @@ from utils.visibility import is_hidden from utils.cfg import cfg from utils.rocketpool import rp -from utils.time_debug import timerun +from utils.time_debug import timerun_async log = logging.getLogger("minipools_upkeep_task") log.setLevel(cfg["log_level"]) @@ -28,26 +28,26 @@ def div_32(i: int): class MinipoolsUpkeepTask(commands.Cog): def __init__(self, bot: RocketWatch): self.bot = bot - self.db = AsyncIOMotorClient(cfg["mongodb.uri"]).get_database("rocketwatch") - self.sync_db = pymongo.MongoClient(cfg["mongodb.uri"]).get_database("rocketwatch") - self.event_loop = None + self.db = AsyncIOMotorClient(cfg["mongodb.uri"]).rocketwatch + self.loop.start() + + def cog_unload(self): + self.loop.cancel() - if not self.run_loop.is_running() and bot.is_ready(): - self.run_loop.start() - - @commands.Cog.listener() + # every 6.4 minutes + @tasks.loop(seconds=solidity.BEACON_EPOCH_LENGTH) + async def loop(self): + try: + await self.upkeep_minipools() + except Exception as err: + await self.bot.report_error(err) + + @loop.before_loop async def on_ready(self): - if self.run_loop.is_running(): - return - self.run_loop.start() - - @timerun - def get_minipools_from_db(self): - # get all minipools from db - return self.sync_db.minipools.find().distinct("address") + await self.bot.wait_until_ready() - @timerun - def get_minipool_stats(self, minipools): + @timerun_async + async def get_minipool_stats(self, minipools): m_d = rp.get_contract_by_name("rocketMinipoolDelegate") m = rp.assemble_contract("rocketMinipool", address=minipools[0]) mc = rp.get_contract_by_name("multicall3") @@ -71,7 +71,7 @@ def get_minipool_stats(self, minipools): for a in addresses for lamb in lambs ] - res = rp.multicall2_do_call(calls) + res = await rp.multicall2(calls) # add data to mini pool stats dict (address => {func_name: value}) # strip get from function name for (address, variable_name), value in res.items(): @@ -80,25 +80,10 @@ def get_minipool_stats(self, minipools): minipool_stats[address][variable_name] = value return minipool_stats - # every 6.4 minutes - @tasks.loop(seconds=solidity.BEACON_EPOCH_LENGTH) - async def run_loop(self): - executor = ThreadPoolExecutor() - loop = asyncio.get_event_loop() - futures = [loop.run_in_executor(executor, self.upkeep_minipools)] - try: - await asyncio.gather(*futures) - except Exception as err: - await self.bot.report_error(err) - - def upkeep_minipools(self): + async def upkeep_minipools(self): logging.info("Updating minipool states") - # the bellow fixes multicall from breaking - if not self.event_loop: - self.event_loop = asyncio.new_event_loop() - asyncio.set_event_loop(self.event_loop) - a = self.get_minipools_from_db() - b = self.get_minipool_stats(a) + a = await self.db.minipools.find().distinct("address") + b = await self.get_minipool_stats(a) # update data in db using unordered bulk write # note: this data is kept in the "meta" field of each minipool bulk = [ @@ -109,7 +94,7 @@ def upkeep_minipools(self): ) for address, stats in b.items() ] - self.sync_db.minipools.bulk_write(bulk, ordered=False) + await self.db.minipools.bulk_write(bulk, ordered=False) logging.info("Updated minipool states") @hybrid_command() diff --git a/rocketwatch/utils/rocketpool.py b/rocketwatch/utils/rocketpool.py index 108a6e93..8a47c614 100644 --- a/rocketwatch/utils/rocketpool.py +++ b/rocketwatch/utils/rocketpool.py @@ -15,7 +15,7 @@ from utils.cfg import cfg from utils.readable import decode_abi from utils.shared_w3 import w3, mainnet_w3, historical_w3 -from utils.time_debug import timerun +from utils.time_debug import timerun_async log = logging.getLogger("rocketpool") log.setLevel(cfg["log_level"]) @@ -90,10 +90,9 @@ def seth_sig(abi, function_name): return f"{function_name}({inputs})({outputs})" raise Exception(f"Function {function_name} not found in ABI") - @timerun - def multicall2_do_call(self, calls: list[Call], require_success=True): - multicall = Multicall(calls, _w3=w3, gas_limit=500_000_000, require_success=require_success) - return multicall() + @timerun_async + async def multicall2(self, calls: list[Call], require_success=True): + return await Multicall(calls, _w3=w3, gas_limit=50_000_000, require_success=require_success) @cached(cache=ADDRESS_CACHE) def get_address_by_name(self, name): From cc130e1d313dae78c0b9805537a5b614b951b305 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Wed, 23 Apr 2025 13:49:49 +0000 Subject: [PATCH 004/279] views utility --- rocketwatch/plugins/dao/dao.py | 113 ++++++++++++++++++++++++++++- rocketwatch/plugins/queue/queue.py | 75 ++++++------------- rocketwatch/utils/views.py | 55 ++++++++++++++ 3 files changed, 188 insertions(+), 55 deletions(-) create mode 100644 rocketwatch/utils/views.py diff --git a/rocketwatch/plugins/dao/dao.py b/rocketwatch/plugins/dao/dao.py index 9c654676..6f81834b 100644 --- a/rocketwatch/plugins/dao/dao.py +++ b/rocketwatch/plugins/dao/dao.py @@ -1,23 +1,34 @@ import logging +from dataclasses import dataclass from typing import Literal +from operator import attrgetter + +from eth_typing import ChecksumAddress +from tabulate import tabulate from discord import Interaction -from discord.app_commands import command, describe +from discord.app_commands import Choice, command, describe, autocomplete from discord.ext.commands import Cog from rocketwatch import RocketWatch +from utils import solidity from utils.cfg import cfg from utils.embeds import Embed from utils.visibility import is_hidden, is_hidden_weak from utils.dao import DefaultDAO, OracleDAO, SecurityCouncil, ProtocolDAO +from utils.views import PageView +from utils.embeds import el_explorer_url +from utils.event_logs import get_logs +from utils.block_time import ts_to_block +from utils.rocketpool import rp log = logging.getLogger("dao") log.setLevel(cfg["log_level"]) -class DAOCommand(Cog): +class OnchainDAO(Cog): def __init__(self, bot: RocketWatch): self.bot = bot @@ -128,8 +139,102 @@ async def dao_votes( case _: raise ValueError(f"Invalid DAO name: {dao_name}") - await interaction.followup.send(embed=embed) + await interaction.followup.send(embed=embed) + + @dataclass(slots=True) + class Vote: + voter: ChecksumAddress + direction: int + voting_power: float + time: int + + class VoterPageView(PageView): + def __init__(self, proposal: ProtocolDAO.Proposal): + super().__init__(page_size=25) + self.proposal = proposal + self._voter_list = self._get_voter_list(proposal) + + def _get_voter_list(self, proposal: ProtocolDAO.Proposal) -> list['OnchainDAO.Vote']: + voters: dict[ChecksumAddress, OnchainDAO.Vote] = {} + dao = ProtocolDAO() + + for vote_log in get_logs( + dao.proposal_contract.events.ProposalVoted, + ts_to_block(proposal.start) - 1, + ts_to_block(proposal.end_phase_2) + 1, + {"proposalID": proposal.id} + ): + vote = OnchainDAO.Vote( + vote_log.args.voter, + vote_log.args.direction, + solidity.to_float(vote_log.args.votingPower), + vote_log.args.time + ) + voters[vote.voter] = vote + + for override_log in get_logs( + dao.proposal_contract.events.ProposalVoteOverridden, + ts_to_block(proposal.end_phase_1) - 1, + ts_to_block(proposal.end_phase_2) + 1, + {"proposalID": proposal.id} + ): + voting_power = solidity.to_float(override_log.args.votingPower) + voters[override_log.args.delegate].voting_power -= voting_power + + return sorted(voters.values(), key=attrgetter("voting_power"), reverse=True) + + @property + def _title(self) -> str: + return f"pDAO Proposal #{self.proposal.id} - Voter List" + + async def _load_content(self, from_idx: int, to_idx: int) -> tuple[int, str]: + headers = ["#", "Voter", "Choice", "Weight"] + data = [] + for i, voter in enumerate(self._voter_list[from_idx:(to_idx + 1)], start=from_idx): + name = el_explorer_url(voter.voter, prefix=-1).split("[")[1].split("]")[0] + vote = ["", "Abstain", "For", "Against", "Veto"][voter.direction] + voting_power = f"{voter.voting_power:,.2f}" + data.append([i+1, name, vote, voting_power]) + + if not data: + return 0, "" + + table = tabulate(data, headers, colalign=("right", "left", "left", "right")) + return len(self._voter_list), f"```{table}```" + + async def _get_recent_proposals(self, interaction: Interaction, current: str) -> list[Choice[int]]: + dao = ProtocolDAO() + num_proposals = dao.proposal_contract.functions.getTotal().call() + + if current: + try: + suggestions = [int(current)] + assert 1 <= int(current) <= num_proposals + except (ValueError, AssertionError): + return [] + else: + suggestions = list(range(1, num_proposals + 1))[:-26:-1] + + titles: list[str] = [ + res.results[0] for res in rp.multicall.aggregate([ + dao.proposal_contract.functions.getMessage(proposal_id) for proposal_id in suggestions + ]).results + ] + return [Choice(name=f"#{pid}: {title}", value=pid) for pid, title in zip(suggestions, titles)] + + @command() + @describe(proposal="proposal to show voters for") + @autocomplete(proposal=_get_recent_proposals) + async def voter_list(self, interaction: Interaction, proposal: int) -> None: + """Show the list of voters for a pDAO proposal""" + await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) + if not (proposal := ProtocolDAO().fetch_proposal(proposal)): + return await interaction.followup.send("Invalid proposal ID.") + + view = OnchainDAO.VoterPageView(proposal) + embed = await view.load() + await interaction.followup.send(embed=embed, view=view) async def setup(bot): - await bot.add_cog(DAOCommand(bot)) + await bot.add_cog(OnchainDAO(bot)) diff --git a/rocketwatch/plugins/queue/queue.py b/rocketwatch/plugins/queue/queue.py index f087ed6f..18bd3151 100644 --- a/rocketwatch/plugins/queue/queue.py +++ b/rocketwatch/plugins/queue/queue.py @@ -2,10 +2,9 @@ import logging from cachetools.func import ttl_cache -from discord import ui, ButtonStyle, Interaction -from discord.ext import commands -from discord.ext.commands import Context -from discord.ext.commands import hybrid_command +from discord import Interaction +from discord.app_commands import command +from discord.ext.commands import Cog from eth_typing import ChecksumAddress from rocketwatch import RocketWatch @@ -16,55 +15,29 @@ from utils.rocketpool import rp from utils.visibility import is_hidden_weak from utils.shared_w3 import w3 +from utils.views import PageView log = logging.getLogger("queue") log.setLevel(cfg["log_level"]) -class Queue(commands.Cog): +class Queue(Cog): def __init__(self, bot: RocketWatch): self.bot = bot - class PageView(ui.View): - PAGE_SIZE = 15 - + class MinipoolPageView(PageView): def __init__(self): - super().__init__(timeout=None) - self.page_index = 0 - - async def load(self) -> Embed: + super().__init__(page_size=15) + + @property + def _title(self) -> str: + return "Minipool Queue" + + async def _load_content(self, from_idx: int, to_idx: int) -> tuple[int, str]: queue_length, queue_content = Queue.get_minipool_queue( - limit=self.PAGE_SIZE, start=(self.page_index * self.PAGE_SIZE) + limit=(to_idx - from_idx + 1), start=from_idx ) - max_page_index = int(math.floor(queue_length / self.PAGE_SIZE)) - - if self.page_index > max_page_index: - # if the queue changed and this is out of bounds, try again - self.page_index = max_page_index - return await self.load() - - embed = Embed(title="Minipool Queue") - if queue_length > 0: - embed.description = queue_content - self.prev_page.disabled = (self.page_index <= 0) - self.next_page.disabled = (self.page_index >= max_page_index) - else: - embed.set_image(url="https://c.tenor.com/1rQLxWiCtiIAAAAd/tenor.gif") - self.clear_items() # remove buttons - - return embed - - @ui.button(emoji="⬅", label="Prev", style=ButtonStyle.gray) - async def prev_page(self, interaction: Interaction, _) -> None: - self.page_index -= 1 - embed = await self.load() - await interaction.response.edit_message(embed=embed, view=self) - - @ui.button(emoji="➡", label="Next", style=ButtonStyle.gray) - async def next_page(self, interaction: Interaction, _) -> None: - self.page_index += 1 - embed = await self.load() - await interaction.response.edit_message(embed=embed, view=self) + return queue_length, queue_content @staticmethod @ttl_cache(ttl=600) @@ -110,18 +83,18 @@ def get_minipool_queue(limit: int, start: int = 0) -> tuple[int, str]: return q_len, content - @hybrid_command() - async def queue(self, ctx: Context): + @command() + async def queue(self, interaction: Interaction): """Show the minipool queue""" - await ctx.defer(ephemeral=is_hidden_weak(ctx)) - view = Queue.PageView() + await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) + view = Queue.MinipoolPageView() embed = await view.load() - await ctx.send(embed=embed, view=view) + await interaction.followup.send(embed=embed, view=view) - @hybrid_command() - async def clear_queue(self, ctx: Context): + @command() + async def clear_queue(self, interaction: Interaction): """Show gas price for clearing the queue using the rocketDepositPoolQueue contract""" - await ctx.defer(ephemeral=is_hidden_weak(ctx)) + await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) e = Embed(title="Gas Prices for Dequeuing Minipools") e.set_author( @@ -167,7 +140,7 @@ async def clear_queue(self, ctx: Context): inline=False ) - await ctx.send(embed=e) + await interaction.followup.send(embed=e) async def setup(bot): diff --git a/rocketwatch/utils/views.py b/rocketwatch/utils/views.py new file mode 100644 index 00000000..7fc391db --- /dev/null +++ b/rocketwatch/utils/views.py @@ -0,0 +1,55 @@ +import math +from abc import abstractmethod + +from discord import ui, ButtonStyle, Interaction +from utils.embeds import Embed + +class PageView(ui.View): + def __init__(self, page_size: int): + super().__init__(timeout=None) + self.page_index = 0 + self.page_size = page_size + + @property + @abstractmethod + def _title(self) -> str: + pass + + @abstractmethod + async def _load_content(self, from_idx: int, to_idx: int) -> tuple[int, str]: + pass + + async def load(self) -> Embed: + num_items, content = await self._load_content( + (self.page_index * self.page_size), + ((self.page_index + 1) * self.page_size - 1) + ) + + embed = Embed(title=self._title) + if num_items <= 0: + embed.set_image(url="https://c.tenor.com/1rQLxWiCtiIAAAAd/tenor.gif") + self.clear_items() # remove buttons + return embed + + max_page_index = int(math.ceil(num_items / self.page_size)) - 1 + if self.page_index > max_page_index: + # if the content changed and this is out of bounds, try again + self.page_index = max_page_index + return await self.load() + + embed.description = content + self.prev_page.disabled = (self.page_index <= 0) + self.next_page.disabled = (self.page_index >= max_page_index) + return embed + + @ui.button(emoji="⬅", label="Prev", style=ButtonStyle.gray) + async def prev_page(self, interaction: Interaction, _) -> None: + self.page_index -= 1 + embed = await self.load() + await interaction.response.edit_message(embed=embed, view=self) + + @ui.button(emoji="➡", label="Next", style=ButtonStyle.gray) + async def next_page(self, interaction: Interaction, _) -> None: + self.page_index += 1 + embed = await self.load() + await interaction.response.edit_message(embed=embed, view=self) From 6afd12a5c34f490f38cf86a3b71766de775d4d6c Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Wed, 23 Apr 2025 13:50:10 +0000 Subject: [PATCH 005/279] refactored block time utility --- rocketwatch/plugins/apr/apr.py | 4 - .../plugins/beacon_events/beacon_events.py | 8 +- rocketwatch/plugins/debug/debug.py | 29 +++++--- rocketwatch/plugins/defi/defi.py | 2 - .../plugins/detect_scam/detect_scam.py | 22 +++--- rocketwatch/plugins/event_core/event_core.py | 6 +- rocketwatch/plugins/events/events.py | 7 +- rocketwatch/plugins/governance/governance.py | 6 +- rocketwatch/plugins/rewards/rewards.py | 25 ++++--- rocketwatch/plugins/rpl/rpl.py | 4 +- rocketwatch/plugins/snapshot/snapshot.py | 8 +- rocketwatch/utils/block_time.py | 45 ++++++++++++ rocketwatch/utils/dao.py | 17 +++-- rocketwatch/utils/embeds.py | 3 +- rocketwatch/utils/get_nearest_block.py | 73 ------------------- 15 files changed, 120 insertions(+), 139 deletions(-) create mode 100644 rocketwatch/utils/block_time.py delete mode 100644 rocketwatch/utils/get_nearest_block.py diff --git a/rocketwatch/plugins/apr/apr.py b/rocketwatch/plugins/apr/apr.py index 21516acf..72a94d80 100644 --- a/rocketwatch/plugins/apr/apr.py +++ b/rocketwatch/plugins/apr/apr.py @@ -84,10 +84,6 @@ async def before_loop(self): async def on_error(self, err: Exception): await self.bot.report_error(err) - def get_time_of_block(self, block_number): - block = w3.eth.getBlock(block_number) - return datetime.fromtimestamp(block["timestamp"]) - @hybrid_command() async def reth_apr(self, ctx: Context): """Show the current rETH APR""" diff --git a/rocketwatch/plugins/beacon_events/beacon_events.py b/rocketwatch/plugins/beacon_events/beacon_events.py index 768ea0ac..e5005e72 100644 --- a/rocketwatch/plugins/beacon_events/beacon_events.py +++ b/rocketwatch/plugins/beacon_events/beacon_events.py @@ -16,7 +16,7 @@ from utils.shared_w3 import bacon, w3 from utils.solidity import date_to_beacon_block, beacon_block_to_date from utils.event import EventPlugin, Event -from utils.get_nearest_block import get_block_by_timestamp +from utils.block_time import ts_to_block from utils.retry import retry log = logging.getLogger("beacon_events") @@ -114,7 +114,7 @@ def _get_slashings(self, beacon_block: dict) -> list[Event]: embed=embed, event_name=slash["event_name"], unique_id=unique_id, - block_number=get_block_by_timestamp(timestamp)[0], + block_number=ts_to_block(timestamp), )) return events @@ -236,7 +236,7 @@ def _check_finality(self, beacon_block: dict) -> Optional[Event]: embed=embed, event_name=event_name, unique_id=f"finality_delay_recover:{epoch_number}", - block_number=get_block_by_timestamp(timestamp)[0] + block_number=ts_to_block(timestamp) ) return event @@ -258,7 +258,7 @@ def _check_finality(self, beacon_block: dict) -> Optional[Event]: embed=embed, event_name=event_name, unique_id=f"{epoch_number}:finality_delay", - block_number=get_block_by_timestamp(timestamp)[0] + block_number=ts_to_block(timestamp) ) return None diff --git a/rocketwatch/plugins/debug/debug.py b/rocketwatch/plugins/debug/debug.py index 0dad24de..02b6b00c 100644 --- a/rocketwatch/plugins/debug/debug.py +++ b/rocketwatch/plugins/debug/debug.py @@ -16,7 +16,7 @@ from utils import solidity from utils.cfg import cfg from utils.embeds import el_explorer_url, Embed -from utils.get_nearest_block import get_block_by_timestamp +from utils.block_time import ts_to_block, block_to_ts from utils.readable import prettify_json_string from utils.rocketpool import rp from utils.shared_w3 import w3 @@ -276,18 +276,23 @@ async def get_block_by_timestamp(self, ctx: Context, timestamp: int): Get a block using a timestamp. Useful for contracts that track blocktime instead of blocknumber. """ await ctx.defer(ephemeral=is_hidden(ctx)) - block, steps = get_block_by_timestamp(timestamp) - found_timestamp = w3.eth.get_block(block).timestamp - if found_timestamp == timestamp: - text = f"```Found perfect match for timestamp: {timestamp}\n" \ - f"Block: {block}\n" \ - f"Steps taken: {steps}```" + + block = ts_to_block(timestamp) + found_ts = block_to_ts(block) + + if found_ts == timestamp: + text = ( + f"Found perfect match for timestamp {timestamp}:\n" + f"Block: {block}" + ) else: - text = f"```Found closest match for timestamp: {timestamp}\n" \ - f"Timestamp: {found_timestamp}\n" \ - f"Block: {block}\n" \ - f"Steps taken: {steps}```" - await ctx.send(content=text) + text = ( + f"Found close match for timestamp {timestamp}:\n" + f"Timestamp: {found_ts}\n" + f"Block: {block}" + ) + + await ctx.send(content=f"```{text}```") @hybrid_command() async def get_abi_of_contract(self, ctx: Context, contract: str): diff --git a/rocketwatch/plugins/defi/defi.py b/rocketwatch/plugins/defi/defi.py index f0adc81c..497f2a2e 100644 --- a/rocketwatch/plugins/defi/defi.py +++ b/rocketwatch/plugins/defi/defi.py @@ -1,7 +1,5 @@ import logging -import aiohttp -from discord import AllowedMentions from discord.ext import commands from discord.ext.commands import Context from discord.ext.commands import hybrid_command diff --git a/rocketwatch/plugins/detect_scam/detect_scam.py b/rocketwatch/plugins/detect_scam/detect_scam.py index 4bb91210..48adbf32 100644 --- a/rocketwatch/plugins/detect_scam/detect_scam.py +++ b/rocketwatch/plugins/detect_scam/detect_scam.py @@ -47,6 +47,14 @@ class Color: WARN = Color.from_rgb(255, 165, 0) OK = Color.from_rgb(0, 255, 0) + @staticmethod + def is_reputable(user: Member) -> bool: + return any(( + user.id == cfg["discord.owner.user_id"], + {role.id for role in user.roles} & set(cfg["rocketpool.support.role_ids"]), + user.guild_permissions.moderate_members + )) + class RemovalVoteView(ui.View): THRESHOLD = 5 @@ -55,14 +63,6 @@ def __init__(self, plugin: 'DetectScam', reportable: Message | Thread): self.plugin = plugin self.reportable = reportable self.safu_votes = set() - - @staticmethod - def is_admin(user: Member) -> bool: - return any(( - user.id == cfg["discord.owner.user_id"], - {role.id for role in user.roles} & set(cfg["rocketpool.support.role_ids"]), - user.guild_permissions.administrator - )) @ui.button(label="Mark Safu", style=ButtonStyle.blurple) async def mark_safe(self, interaction: Interaction, button: ui.Button) -> None: @@ -96,7 +96,7 @@ async def mark_safe(self, interaction: Interaction, button: ui.Button) -> None: self.safu_votes.add(interaction.user.id) - if self.is_admin(interaction.user): + if DetectScam.is_reputable(interaction.user): user_repr = interaction.user.mention elif len(self.safu_votes) >= self.THRESHOLD: user_repr = "the community" @@ -403,8 +403,8 @@ async def on_message(self, message: Message) -> None: log.warning("Ignoring message sent by bot") return - if message.author.guild_permissions.administrator: - log.warning("Ignoring message sent by server admin") + if self.is_reputable(message.author): + log.warning(f"Ignoring message sent by trusted user ({message.author})") return if message.guild is None: diff --git a/rocketwatch/plugins/event_core/event_core.py b/rocketwatch/plugins/event_core/event_core.py index 06744281..3c29887f 100644 --- a/rocketwatch/plugins/event_core/event_core.py +++ b/rocketwatch/plugins/event_core/event_core.py @@ -7,7 +7,7 @@ from datetime import datetime, timedelta from enum import Enum from functools import partial -from typing import Optional, cast, Any +from typing import Optional, Any import pymongo from cronitor import Monitor @@ -120,7 +120,7 @@ async def gather_new_events(self) -> None: target_block = self.head_block + self.block_batch_size to_block = target_block - from_block = cast(BlockNumber, self.head_block + 1) + from_block: BlockNumber = self.head_block + 1 if to_block < from_block: log.warning(f"Skipping empty block range [{from_block}, {to_block}]") return @@ -258,7 +258,7 @@ async def _update_status_message(self, channel_name: str, config: dict) -> None: if not (embed := await generate_template_embed(self.db, "announcement")): try: - plugin = cast(StatusPlugin, self.bot.cogs.get(config["plugin"])) + plugin: StatusPlugin = self.bot.cogs.get(config["plugin"]) embed = await plugin.get_status() except Exception as err: await self.bot.report_error(err) diff --git a/rocketwatch/plugins/events/events.py b/rocketwatch/plugins/events/events.py index e4b33f3e..81077bdf 100644 --- a/rocketwatch/plugins/events/events.py +++ b/rocketwatch/plugins/events/events.py @@ -23,6 +23,7 @@ from utils.rocketpool import rp, NoAddressFound from utils.shared_w3 import w3, bacon from utils.solidity import SUBMISSION_KEYS +from utils.block_time import block_to_ts log = logging.getLogger("events") log.setLevel(cfg["log_level"]) @@ -458,7 +459,7 @@ def handle_event(event_name: str, event: aDict) -> Optional[Embed]: args.rewardPeriodEnd = next_period update_rate = rp.call("rocketDAOProtocolSettingsNetwork.getSubmitPricesFrequency", block=event.blockNumber) # in seconds # get timestamp of event block - ts = w3.eth.getBlock(event.blockNumber).timestamp + ts = block_to_ts(event.blockNumber) # check if the next update is after the next period ts earliest_next_update = ts + update_rate # if it will update before the next period, skip @@ -834,7 +835,7 @@ def share_repr(percentage: float) -> str: block=args.blockNumber - 1) minipool_creation = rp.call("rocketMinipoolDelegate.getStatusTime", address=args.minipool, block=args.blockNumber - 1) - block_time = w3.eth.getBlock(args.blockNumber - 1)["timestamp"] + block_time = block_to_ts(args.blockNumber - 1) if block_time - minipool_creation > scrub_period // 2: reason = "taking too long to migrate their withdrawal credentials on the beacon chain" args.scrub_reason = reason @@ -843,7 +844,7 @@ def share_repr(percentage: float) -> str: if solidity.to_float(args.amountOfStETH) < 10_000: return None if receipt: - args.timestamp = w3.eth.getBlock(receipt["blockNumber"])["timestamp"] + args.timestamp = block_to_ts(receipt["blockNumber"]) args.event_name = event_name args = prepare_args(args) diff --git a/rocketwatch/plugins/governance/governance.py b/rocketwatch/plugins/governance/governance.py index 800c2f4d..753ff351 100644 --- a/rocketwatch/plugins/governance/governance.py +++ b/rocketwatch/plugins/governance/governance.py @@ -15,7 +15,7 @@ from utils.dao import DAO, DefaultDAO, OracleDAO, SecurityCouncil, ProtocolDAO from utils.embeds import Embed from utils.visibility import is_hidden_weak -from utils.get_nearest_block import get_block_by_timestamp +from utils.block_time import ts_to_block log = logging.getLogger("governance") log.setLevel(cfg["log_level"]) @@ -38,8 +38,8 @@ def _get_active_dao_proposals(dao: DefaultDAO) -> list[DefaultDAO.Proposal]: @staticmethod def _get_tx_hash_for_proposal(dao: DAO, proposal: DAO.Proposal) -> HexStr: - from_block = get_block_by_timestamp(proposal.created)[0] - 1 - to_block = get_block_by_timestamp(proposal.created)[0] + 1 + from_block = ts_to_block(proposal.created) - 1 + to_block = ts_to_block(proposal.created) + 1 log.info(f"Looking for proposal {proposal} in [{from_block},{to_block}]") for receipt in dao.proposal_contract.events.ProposalAdded().get_logs(fromBlock=from_block, toBlock=to_block): diff --git a/rocketwatch/plugins/rewards/rewards.py b/rocketwatch/plugins/rewards/rewards.py index 76da5253..33bbbb65 100644 --- a/rocketwatch/plugins/rewards/rewards.py +++ b/rocketwatch/plugins/rewards/rewards.py @@ -19,8 +19,7 @@ from utils.embeds import Embed, resolve_ens from utils.rocketpool import rp from utils.retry import retry_async -from utils.get_nearest_block import get_block_by_timestamp - +from utils.block_time import ts_to_block log = logging.getLogger("rewards") log.setLevel(cfg["log_level"]) @@ -30,7 +29,7 @@ class Rewards(commands.Cog): def __init__(self, bot: RocketWatch): self.bot = bot - @dataclass + @dataclass(frozen=True, slots=True) class RewardEstimate: address: str interval: int @@ -60,7 +59,7 @@ async def get_estimated_rewards(self, ctx: Context, address: str) -> Optional[Re await ctx.send("Error fetching node data from Sprocket Pool API. Blame Patches.") return None - data_block, _ = get_block_by_timestamp(patches_res["time"]) + data_block = ts_to_block(patches_res["time"]) rpl_rewards: int = patches_res[address].get("collateralRpl", 0) eth_rewards: int = patches_res[address].get("smoothingPoolEth", 0) interval_time = rp.call("rocketDAOProtocolSettingsRewards.getRewardsClaimIntervalTime", block=data_block) @@ -118,10 +117,12 @@ async def upcoming_rewards(self, ctx: Context, node_address: str, extrapolate: b await ctx.send(embed=embed) @hybrid_command() - @describe(node_address="address of node to simulate rewards for") - @describe(rpl_stake="amount of staked RPL to simulate") - @describe(num_leb8="number of 8 ETH minipools to simulate") - @describe(num_eb16="number of 16 ETH minipools to simulate") + @describe( + node_address="address of node to simulate rewards for", + rpl_stake="amount of staked RPL to simulate", + num_leb8="number of 8 ETH minipools to simulate", + num_eb16="number of 16 ETH minipools to simulate" + ) async def simulate_rewards( self, ctx: Context, @@ -131,7 +132,7 @@ async def simulate_rewards( num_eb16: int = 0 ): """ - Simulate RPL rewards for this period. + Simulate RPL rewards for this period """ await ctx.defer(ephemeral=True) display_name, address = await resolve_ens(ctx, node_address) @@ -148,7 +149,7 @@ async def simulate_rewards( borrowed_eth = (24 * num_leb8) + (16 * num_eb16) data_block: int = rewards.data_block - reward_start_block, _ = get_block_by_timestamp(rewards.start_time) + reward_start_block = ts_to_block(rewards.start_time) rpl_min: float = solidity.to_float(rp.call("rocketDAOProtocolSettingsNode.getMinimumPerMinipoolStake", block=data_block)) rpl_ratio = solidity.to_float(rp.call("rocketNetworkPrices.getRPLPrice", block=data_block)) @@ -266,9 +267,9 @@ def formatter(_x, _pos) -> str: title = f"Simulated RPL Rewards for {display_name} {sim_info_txt}".strip() embed = self.create_embed(title, rewards) - embed.set_image(url="attachment://graph.png") + embed.set_image(url="attachment://rewards.png") - f = File(img, filename="graph.png") + f = File(img, filename="rewards.png") await ctx.send(embed=embed, files=[f]) img.close() diff --git a/rocketwatch/plugins/rpl/rpl.py b/rocketwatch/plugins/rpl/rpl.py index 143d04b2..cb7cdb55 100644 --- a/rocketwatch/plugins/rpl/rpl.py +++ b/rocketwatch/plugins/rpl/rpl.py @@ -14,7 +14,7 @@ from utils import solidity from utils.cfg import cfg from utils.embeds import Embed -from utils.get_nearest_block import get_block_by_timestamp +from utils.block_time import ts_to_block from utils.rocketpool import rp from utils.shared_w3 import w3 from utils.visibility import is_hidden @@ -51,7 +51,7 @@ async def rpl_apr(self, ctx: Context): # track down the rewards for node operators from the last reward period contract = rp.get_contract_by_name("rocketVault") - m = get_block_by_timestamp(rp.call("rocketRewardsPool.getClaimIntervalTimeStart"))[0] + m = ts_to_block(rp.call("rocketRewardsPool.getClaimIntervalTimeStart")) events = contract.events["TokenDeposited"].getLogs(argument_filters={ "by": w3.soliditySha3( ["string", "address"], diff --git a/rocketwatch/plugins/snapshot/snapshot.py b/rocketwatch/plugins/snapshot/snapshot.py index b9181213..0bf66eda 100644 --- a/rocketwatch/plugins/snapshot/snapshot.py +++ b/rocketwatch/plugins/snapshot/snapshot.py @@ -22,7 +22,7 @@ from utils.rocketpool import rp from utils.event import EventPlugin, Event from utils.visibility import is_hidden_weak -from utils.get_nearest_block import get_block_by_timestamp +from utils.block_time import ts_to_block from utils.retry import retry log = logging.getLogger("snapshot") @@ -245,7 +245,7 @@ def create_start_event(self) -> Event: return Event( embed=embed, topic="snapshot", - block_number=get_block_by_timestamp(self.start)[0], + block_number=ts_to_block(self.start), event_name="pdao_snapshot_vote_start", unique_id=f"snapshot_vote_start:{self.id}", image=self.create_image(include_title=True) @@ -280,7 +280,7 @@ def create_end_event(self) -> Event: return Event( embed=embed, topic="snapshot", - block_number=get_block_by_timestamp(self.end)[0], + block_number=ts_to_block(self.end), event_name="pdao_snapshot_vote_end", unique_id=f"snapshot_vote_end:{self.id}", image=self.create_image(include_title=True) @@ -407,7 +407,7 @@ def create_event(self, prev_vote: Optional['Snapshot.Vote']) -> Optional[Event]: return Event( embed=embed, topic="snapshot", - block_number=get_block_by_timestamp(self.created)[0], + block_number=ts_to_block(self.created), unique_id=f"snapshot_vote:{self.proposal.id}:{self.voter}:{self.created}", **conditional_args ) diff --git a/rocketwatch/utils/block_time.py b/rocketwatch/utils/block_time.py new file mode 100644 index 00000000..680879a5 --- /dev/null +++ b/rocketwatch/utils/block_time.py @@ -0,0 +1,45 @@ +import math +import logging +from functools import cache + +from utils.cfg import cfg +from utils.shared_w3 import w3 + +log = logging.getLogger("block_time") +log.setLevel(cfg["log_level"]) + + +@cache +def block_to_ts(block_number: int) -> int: + return w3.eth.get_block(block_number).timestamp + +@cache +def ts_to_block(target_ts: int) -> int: + log.debug(f"Looking for block at timestamp {target_ts}") + if target_ts < block_to_ts(1): + # genesis block doesn't have a timestamp + return 0, 0 + + lo = 1 + hi = w3.eth.block_number - 1 + + # simple binary search over block numbers + while lo < hi: + mid = math.ceil((lo + hi) / 2) + ts = block_to_ts(mid) + + if ts < target_ts: + lo = mid + elif ts > target_ts: + hi = mid - 1 + elif ts == target_ts: + log.debug(f"Exact match: block {mid} @ {ts}") + return mid + + # l == r, highest block number below target + block = hi + if abs(block_to_ts(block + 1) - target_ts) < abs(block_to_ts(block) - target_ts): + block += 1 + + log.debug(f"Closest match: block {block} @ {block_to_ts(block)}") + return block diff --git a/rocketwatch/utils/dao.py b/rocketwatch/utils/dao.py index b11c2ef6..7c550ac0 100644 --- a/rocketwatch/utils/dao.py +++ b/rocketwatch/utils/dao.py @@ -4,7 +4,7 @@ from enum import IntEnum from abc import ABC, abstractmethod from dataclasses import dataclass -from typing import Literal, cast +from typing import Optional, Literal, cast import termplotlib as tpl from eth_typing import ChecksumAddress @@ -33,7 +33,7 @@ class Proposal(ABC): @staticmethod @abstractmethod - def fetch_proposal(self, proposal_id: int) -> Proposal: + def fetch_proposal(self, proposal_id: int) -> Optional[Proposal]: pass @abstractmethod @@ -127,7 +127,11 @@ def get_proposals_by_state(self) -> dict[ProposalState, list[int]]: return proposals - def fetch_proposal(self, proposal_id: int) -> Proposal: + def fetch_proposal(self, proposal_id: int) -> Optional[Proposal]: + num_proposals = self.proposal_contract.functions.getTotal().call() + if not (1 <= proposal_id <= num_proposals): + return None + # map results of functions calls to function name multicall: dict[str, str | bytes | int] = { res.function_name: res.results[0] for res in rp.multicall.aggregate([ @@ -232,8 +236,11 @@ def get_proposals_by_state(self) -> dict[ProposalState, list[int]]: return proposals - - def fetch_proposal(self, proposal_id: int) -> Proposal: + def fetch_proposal(self, proposal_id: int) -> Optional[Proposal]: + num_proposals = self.proposal_contract.functions.getTotal().call() + if not (1 <= proposal_id <= num_proposals): + return None + # map results of functions calls to function name multicall: dict[str, str | bytes | int] = { res.function_name: res.results[0] for res in rp.multicall.aggregate([ diff --git a/rocketwatch/utils/embeds.py b/rocketwatch/utils/embeds.py index 720e3c52..a2347be4 100644 --- a/rocketwatch/utils/embeds.py +++ b/rocketwatch/utils/embeds.py @@ -21,6 +21,7 @@ from utils.sea_creatures import get_sea_creature_for_address from utils.shared_w3 import w3 from utils.retry import retry +from utils.block_time import block_to_ts ens = CachedEns() @@ -488,7 +489,7 @@ def assemble(args) -> Embed: times = [value for key, value in args.items() if "time" in key.lower()] if block := args.get("blockNumber"): - times += [w3.eth.get_block(block).timestamp] + times += [block_to_ts(block)] time = times[0] if times else int(datetime.datetime.now().timestamp()) e.add_field(name="Timestamp", diff --git a/rocketwatch/utils/get_nearest_block.py b/rocketwatch/utils/get_nearest_block.py deleted file mode 100644 index c962deda..00000000 --- a/rocketwatch/utils/get_nearest_block.py +++ /dev/null @@ -1,73 +0,0 @@ -import logging -import math - -from utils.cfg import cfg -from utils.shared_w3 import w3 - -log = logging.getLogger("get_nearest_block") -log.setLevel(cfg["log_level"]) - - -def _get_timestamp(block): - return w3.eth.getBlock(block)['timestamp'] - - -def get_block_by_timestamp(timestamp): - history = [] - error_map = {} - i_pre = 1 - i_latest = w3.eth.get_block('latest')['number'] - i_post = i_latest - log.debug(f"Looking for block with timestamp {timestamp}") - while (i_pre, i_post) not in history: - log.debug(f'Searching between blocks {i_pre} and {i_post}') - if i_post == i_pre: - break - t0, t1 = _get_timestamp(i_pre), _get_timestamp(i_post) - av_block_time = (t1 - t0) / (i_post - i_pre) - - # if block-times were evenly-spaced, get expected block number - k = (timestamp - t0) / (t1 - t0) - i_expected = round(i_pre + k * (i_post - i_pre)) - # sanitize expected block number - i_expected = max(1, min(i_expected, i_latest)) - if i_expected in history: - break - - # get the ACTUAL time for that block - t_expected = _get_timestamp(i_expected) - - error = (timestamp - t_expected) / av_block_time - error_map[i_expected] = error - log.debug( - f"Estimated Block {i_expected} with timestamp {t_expected} is off {timestamp - t_expected}s ({error:=.3f} Blocks)") - - # if the block before this one has a positive error, and we currently have a negative error, - # then we know we have overshot the target - if i_expected - 1 in error_map and error_map[i_expected - 1] > 0 and error < 0: - log.debug(f'Overshot target, previous Block is behind target, current Block is ahead') - break - - # if the block after this one has a negative error, and we currently have a positive error, - # then we know we have undershot the target - if i_expected + 1 in error_map and error_map[i_expected + 1] < 0 and error > 0: - log.debug(f'Undershot target, next Block is ahead of target, current Block is behind') - break - - if error == 0: - log.debug(f"Block {i_expected} matches timestamp {timestamp}") - return i_expected, len(history) - - if i_expected not in history: - history.append(i_expected) - - i_expected_adj = i_expected + error - - r = abs(error) - - i_pre = max(1, math.floor(i_expected_adj - r)) - i_post = min(math.ceil(i_expected_adj + r), i_latest) - # find the block with the smallest error in the error_map - best_guess = min(error_map.items(), key=lambda x: abs(x[1])) - log.debug(f"Closest Block is {best_guess[0]} with error {best_guess[1]:=.3f}") - return best_guess[0], len(history) From 89fd61a61bbc749f064449e805811a640282ae9f Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Wed, 23 Apr 2025 16:29:35 +0000 Subject: [PATCH 006/279] fix ts_to_block for (pre) genesis --- rocketwatch/utils/block_time.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rocketwatch/utils/block_time.py b/rocketwatch/utils/block_time.py index 680879a5..21d9aaeb 100644 --- a/rocketwatch/utils/block_time.py +++ b/rocketwatch/utils/block_time.py @@ -18,7 +18,7 @@ def ts_to_block(target_ts: int) -> int: log.debug(f"Looking for block at timestamp {target_ts}") if target_ts < block_to_ts(1): # genesis block doesn't have a timestamp - return 0, 0 + return 0 lo = 1 hi = w3.eth.block_number - 1 From 9932d7cbdd9abaf82a1c5be3d124c0a9bb424e67 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Wed, 23 Apr 2025 16:44:58 +0000 Subject: [PATCH 007/279] update :nodeset: emote ID --- rocketwatch/strings/embeds.en.json | 40 +++++++++++++++--------------- 1 file changed, 20 insertions(+), 20 deletions(-) diff --git a/rocketwatch/strings/embeds.en.json b/rocketwatch/strings/embeds.en.json index ecfbf6e2..25d8b57e 100644 --- a/rocketwatch/strings/embeds.en.json +++ b/rocketwatch/strings/embeds.en.json @@ -501,71 +501,71 @@ "description": "Finality has been recovered on the Beacon Chain!" }, "cs_max_validator_increase_event": { - "title": "<:nodeset:1297235812278341653> Constellation Validator Limit Increased", + "title": "<:nodeset:1351406340056285266> Constellation Validator Limit Increased", "description": "Constellation operators can now run **%{newLimit}** minipools!\n This is up from the previous limit of **%{oldLimit}**." }, "cs_max_validator_decrease_event": { - "title": "<:nodeset:1297235812278341653> Constellation Validator Limit Decreased", + "title": "<:nodeset:1351406340056285266> Constellation Validator Limit Decreased", "description": "Constellation operators are now limited to **%{newLimit}** minipools!\n This is down from the previous limit of **%{oldLimit}**." }, "cs_eth_treasury_fee_change_event": { - "title": "<:nodeset:1297235812278341653> xrETH Treasury Fee Changed", + "title": "<:nodeset:1351406340056285266> xrETH Treasury Fee Changed", "description": "The xrETH treasury fee has changed from **%{oldFee}%** to **%{newFee}%**!" }, "cs_eth_no_fee_change_event": { - "title": "<:nodeset:1297235812278341653> xrETH Operator Fee Changed", + "title": "<:nodeset:1351406340056285266> xrETH Operator Fee Changed", "description": "The xrETH node operator fee has changed from **%{oldFee}%** to **%{newFee}%**!" }, "cs_eth_mint_fee_change_event": { - "title": "<:nodeset:1297235812278341653> xrETH Mint Fee Changed", + "title": "<:nodeset:1351406340056285266> xrETH Mint Fee Changed", "description": "The xrETH mint fee has changed from **%{oldFee}%** to **%{newFee}%**!" }, "cs_deposit_eth_event": { - "title": "<:nodeset:1297235812278341653> xrETH Deposit", + "title": "<:nodeset:1351406340056285266> xrETH Deposit", "description": "**%{shares} xrETH** minted from **%{assets} ETH**!", - "description_small": "<:nodeset:1297235812278341653> %{sender} minted **%{shares} xrETH** from **%{assets} ETH**!" + "description_small": "<:nodeset:1351406340056285266> %{sender} minted **%{shares} xrETH** from **%{assets} ETH**!" }, "cs_withdraw_eth_event": { - "title": "<:nodeset:1297235812278341653> xrETH Withdrawal", + "title": "<:nodeset:1351406340056285266> xrETH Withdrawal", "description": "**%{shares} xrETH** burned for **%{assets} ETH**!", - "description_small": "<:nodeset:1297235812278341653> %{sender} burned **%{shares} xrETH** for **%{assets} ETH**!" + "description_small": "<:nodeset:1351406340056285266> %{sender} burned **%{shares} xrETH** for **%{assets} ETH**!" }, "cs_rpl_treasury_fee_change_event": { - "title": "<:nodeset:1297235812278341653> xRPL Treasury Fee Changed", + "title": "<:nodeset:1351406340056285266> xRPL Treasury Fee Changed", "description": "The xRPL treasury fee has been changed from **%{oldFee}%** to **%{newFee}%**!" }, "cs_deposit_rpl_event": { - "title": "<:nodeset:1297235812278341653> xRPL Deposit", + "title": "<:nodeset:1351406340056285266> xRPL Deposit", "description": "**%{shares} xRPL** minted from **%{assets} RPL**!", - "description_small": "<:nodeset:1297235812278341653> %{sender} minted **%{shares} xRPL** from **%{assets} RPL**!" + "description_small": "<:nodeset:1351406340056285266> %{sender} minted **%{shares} xRPL** from **%{assets} RPL**!" }, "cs_withdraw_rpl_event": { - "title": "<:nodeset:1297235812278341653> xRPL Withdrawal", + "title": "<:nodeset:1351406340056285266> xRPL Withdrawal", "description": "**%{shares} xRPL** burned for **%{assets} RPL**!", - "description_small": "<:nodeset:1297235812278341653> %{sender} burned **%{shares} xRPL** for **%{assets} RPL**!" + "description_small": "<:nodeset:1351406340056285266> %{sender} burned **%{shares} xRPL** for **%{assets} RPL**!" }, "cs_operator_added_event": { - "title": "<:nodeset:1297235812278341653> New Constellation Operator", + "title": "<:nodeset:1351406340056285266> New Constellation Operator", "description": "%{address} registered as a node operator for Constellation!" }, "cs_operators_added_event": { - "title": "<:nodeset:1297235812278341653> New Constellation Operators", + "title": "<:nodeset:1351406340056285266> New Constellation Operators", "description": "%{operatorList}" }, "cs_operator_removed_event": { - "title": "<:nodeset:1297235812278341653> Constellation Operator Removed", + "title": "<:nodeset:1351406340056285266> Constellation Operator Removed", "description": "%{address} has been removed as a node operator!" }, "cs_operators_removed_event": { - "title": "<:nodeset:1297235812278341653> Constellation Operators Removed", + "title": "<:nodeset:1351406340056285266> Constellation Operators Removed", "description": "%{operatorList}" }, "cs_rpl_min_ratio_change_event": { - "title": "<:nodeset:1297235812278341653> Constellation RPL Minimum Changed", + "title": "<:nodeset:1351406340056285266> Constellation RPL Minimum Changed", "description": "The supernode's RPL bond minimum changed from **%{oldRatio}%** to **%{newRatio}%**!" }, "cs_rpl_target_ratio_change_event": { - "title": "<:nodeset:1297235812278341653> Constellation RPL Target Changed", + "title": "<:nodeset:1351406340056285266> Constellation RPL Target Changed", "description": "The supernode's RPL bond target changed from **%{oldRatio}%** to **%{newRatio}%**!" }, "exit_arbitrage_event": { From 53ea30753dc11b1356f76e769e99243a0a6ef816 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Wed, 23 Apr 2025 19:49:59 +0000 Subject: [PATCH 008/279] error handling for app commands --- rocketwatch/rocketwatch.py | 28 ++++++++++++++++++---------- 1 file changed, 18 insertions(+), 10 deletions(-) diff --git a/rocketwatch/rocketwatch.py b/rocketwatch/rocketwatch.py index 21059caa..82afd2e4 100644 --- a/rocketwatch/rocketwatch.py +++ b/rocketwatch/rocketwatch.py @@ -6,15 +6,17 @@ from discord import ( app_commands, + Interaction, Intents, Thread, File, Object, - User + User, ) from discord.abc import GuildChannel, PrivateChannel -from discord.ext.commands import Bot, Context from discord.ext import commands +from discord.ext.commands import Bot, Context +from discord.app_commands import CommandTree, AppCommandError from utils.cfg import cfg from utils.retry import retry_async @@ -24,8 +26,14 @@ class RocketWatch(Bot): + class RWCommandTree(CommandTree): + async def on_error(self, interaction: Interaction, error: AppCommandError) -> None: + bot: RocketWatch = self.client + ctx = await Context.from_interaction(interaction) + await bot.on_command_error(ctx, error) + def __init__(self, intents: Intents) -> None: - super().__init__(command_prefix=(), intents=intents) + super().__init__(command_prefix=(), tree_cls=self.RWCommandTree, intents=intents) async def _load_plugins(self): chain = cfg["rocketpool.chain"] @@ -92,17 +100,17 @@ async def on_ready(self): await self.sync_commands() - async def on_command_error(self, ctx: Context, exception: Exception) -> None: - log.info(f"/{ctx.command.name} called by {ctx.author} in #{ctx.channel.name} ({ctx.guild}) failed") - if isinstance(exception, commands.errors.MaxConcurrencyReached): + async def on_command_error(self, ctx: Context, error: Exception) -> None: + log.error(f"/{ctx.command.name} called by {ctx.author} in #{ctx.channel.name} ({ctx.guild}) failed") + if isinstance(error, commands.errors.MaxConcurrencyReached): msg = "Someone else is already using this command. Please try again later" - elif isinstance(exception, app_commands.errors.CommandOnCooldown): - msg = f"Slow down! You are using this command too fast. Please try again in {exception.retry_after:.0f} seconds" + elif isinstance(error, app_commands.errors.CommandOnCooldown): + msg = f"Slow down! You are using this command too fast. Please try again in {error.retry_after:.0f} seconds" else: msg = "An unexpected error occurred and has been reported to the developer. Please try again later" try: - await self.report_error(exception, ctx) + await self.report_error(error, ctx) await ctx.send(content=msg, ephemeral=True) except Exception: log.exception("Failed to alert user") @@ -137,7 +145,7 @@ async def report_error(self, exception: Exception, ctx: Optional[Context] = None try: channel = await self.get_or_fetch_channel(cfg["discord.channels.errors"]) - file = File(io.BytesIO(err_trace.encode()), "exception.txt") + file = File(io.StringIO(err_trace), "exception.txt") await retry_async(tries=5, delay=5)(channel.send)(err_description, file=file) except Exception: log.exception("Failed to send message. Max retries reached.") From af8e6cd52a670ec386a7a43937277e3e1566a252 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Thu, 24 Apr 2025 09:31:50 +0000 Subject: [PATCH 009/279] support template restore function --- rocketwatch/plugins/debug/debug.py | 256 +++++++++++------- rocketwatch/plugins/event_core/event_core.py | 13 +- rocketwatch/plugins/random/random.py | 2 +- .../plugins/support_utils/support_utils.py | 71 ++--- 4 files changed, 196 insertions(+), 146 deletions(-) diff --git a/rocketwatch/plugins/debug/debug.py b/rocketwatch/plugins/debug/debug.py index 02b6b00c..d09b4c94 100644 --- a/rocketwatch/plugins/debug/debug.py +++ b/rocketwatch/plugins/debug/debug.py @@ -7,15 +7,15 @@ import humanize import requests from colorama import Fore, Style -from discord import File, Object -from discord.app_commands import Choice, guilds, describe -from discord.ext.commands import is_owner, Cog, hybrid_command, Context +from discord import File, Object, Interaction +from discord.app_commands import Choice, command, guilds, describe +from discord.ext.commands import Cog, is_owner from motor.motor_asyncio import AsyncIOMotorClient from rocketwatch import RocketWatch from utils import solidity from utils.cfg import cfg -from utils.embeds import el_explorer_url, Embed +from utils.embeds import Embed, el_explorer_url from utils.block_time import ts_to_block, block_to_ts from utils.readable import prettify_json_string from utils.rocketpool import rp @@ -50,22 +50,22 @@ async def on_ready(self): # --------- PRIVATE OWNER COMMANDS --------- # - @hybrid_command() + @command() @guilds(Object(id=cfg["discord.owner.server_id"])) @is_owner() - async def raise_exception(self, _: Context): + async def raise_exception(self, interaction: Interaction): """ Raise an exception for testing purposes. """ with open(str(random.random()), "rb"): raise Exception("this should never happen wtf is your filesystem") - @hybrid_command() + @command() @guilds(Object(id=cfg["discord.owner.server_id"])) @is_owner() - async def get_members_of_role(self, ctx: Context, guild_id: str, role_id: str): + async def get_members_of_role(self, interaction: Interaction, guild_id: str, role_id: str): """Get members of a role""" - await ctx.defer(ephemeral=True) + await interaction.response.defer(ephemeral=True) try: guild = self.bot.get_guild(int(guild_id)) log.debug(guild) @@ -75,18 +75,18 @@ async def get_members_of_role(self, ctx: Context, guild_id: str, role_id: str): members = [f"{member.name}#{member.discriminator}, ({member.id})" for member in role.members] # generate a file with a header that mentions what role and guild the members are from content = f"Members of {role.name} ({role.id}) in {guild.name} ({guild.id})\n\n" + "\n".join(members) - file = File(io.BytesIO(content.encode()), "members.txt") - await ctx.send(file=file) + file = File(io.StringIO(content), "members.txt") + await interaction.followup.send(file=file) except Exception as err: - await ctx.send(content=f"```{repr(err)}```") + await interaction.followup.send(content=f"```{repr(err)}```") # list all roles of a guild with name and id - @hybrid_command() + @command() @guilds(Object(id=cfg["discord.owner.server_id"])) @is_owner() - async def get_roles(self, ctx: Context, guild_id: str): + async def get_roles(self, interaction: Interaction, guild_id: str): """Get roles of a guild""" - await ctx.defer(ephemeral=True) + await interaction.response.defer(ephemeral=True) try: guild = self.bot.get_guild(int(guild_id)) log.debug(guild) @@ -94,129 +94,173 @@ async def get_roles(self, ctx: Context, guild_id: str): roles = [f"{role.name}, ({role.id})" for role in guild.roles] # generate a file with a header that mentions what role and guild the members are from content = f"Roles of {guild.name} ({guild.id})\n\n" + "\n".join(roles) - file = File(io.BytesIO(content.encode()), filename="roles.txt") - await ctx.send(file=file) + file = File(io.StringIO(content), filename="roles.txt") + await interaction.followup.send(file=file) except Exception as err: - await ctx.send(content=f"```{repr(err)}```") + await interaction.followup.send(content=f"```{repr(err)}```") - @hybrid_command() + @command() @guilds(Object(id=cfg["discord.owner.server_id"])) @is_owner() - async def delete(self, ctx: Context, message_url: str): + async def delete_msg(self, interaction: Interaction, message_url: str): """ Guess what. It deletes a message. """ - await ctx.defer(ephemeral=True) + await interaction.response.defer(ephemeral=True) channel_id, message_id = message_url.split("/")[-2:] channel = await self.bot.get_or_fetch_channel(int(channel_id)) msg = await channel.fetch_message(int(message_id)) await msg.delete() - await ctx.send(content="Done") + await interaction.followup.send(content="Done") - @hybrid_command() + @command() @guilds(Object(id=cfg["discord.owner.server_id"])) @is_owner() - async def decode_tnx(self, ctx: Context, tnx_hash: str, contract_name: str = None): + async def decode_tnx(self, interaction: Interaction, tnx_hash: str, contract_name: str = None): """ Decode transaction calldata """ - await ctx.defer(ephemeral=True) + await interaction.response.defer(ephemeral=True) tnx = w3.eth.get_transaction(tnx_hash) if contract_name: contract = rp.get_contract_by_name(contract_name) else: contract = rp.get_contract_by_address(tnx.to) data = contract.decode_function_input(tnx.input) - await ctx.send(content=f"```Input:\n{data}```") + await interaction.followup.send(content=f"```Input:\n{data}```") - @hybrid_command() + @command() @guilds(Object(id=cfg["discord.owner.server_id"])) @is_owner() - async def debug_transaction(self, ctx: Context, tnx_hash: str): + async def debug_transaction(self, interaction: Interaction, tnx_hash: str): """ Try to return the revert reason of a transaction. """ - await ctx.defer(ephemeral=True) + await interaction.response.defer(ephemeral=True) transaction_receipt = w3.eth.getTransaction(tnx_hash) if revert_reason := rp.get_revert_reason(transaction_receipt): - await ctx.send(content=f"```Revert reason: {revert_reason}```") + await interaction.followup.send(content=f"```Revert reason: {revert_reason}```") else: - await ctx.send(content="```No revert reason Available```") + await interaction.followup.send(content="```No revert reason Available```") - @hybrid_command() + @command() @guilds(Object(id=cfg["discord.owner.server_id"])) @is_owner() - async def purge_minipools(self, ctx: Context, confirm: bool = False): + async def purge_minipools(self, interaction: Interaction, confirm: bool = False): """ Purge minipool collection, so it can be resynced from scratch in the next update. """ - await ctx.defer(ephemeral=True) + await interaction.response.defer(ephemeral=True) if not confirm: - await ctx.send("Not running. Set `confirm` to `true` to run.") + await interaction.followup.send("Not running. Set `confirm` to `true` to run.") return await self.db.minipools.drop() - await ctx.send(content="Done") + await interaction.followup.send(content="Done") - @hybrid_command() + @command() @guilds(Object(id=cfg["discord.owner.server_id"])) @is_owner() - async def purge_minipools_new(self, ctx: Context, confirm: bool = False): + async def purge_minipools_new(self, interaction: Interaction, confirm: bool = False): """ Purge minipools_new collection, so it can be resynced from scratch in the next update. """ - await ctx.defer(ephemeral=True) + await interaction.response.defer(ephemeral=True) if not confirm: - await ctx.send("Not running. Set `confirm` to `true` to run.") + await interaction.followup.send("Not running. Set `confirm` to `true` to run.") return await self.db.minipools_new.drop() - await ctx.send(content="Done") + await interaction.followup.send(content="Done") - @hybrid_command() + @command() @guilds(Object(id=cfg["discord.owner.server_id"])) @is_owner() - async def sync_commands(self, ctx: Context): + async def sync_commands(self, interaction: Interaction): """ Full sync of the commands tree """ - await ctx.defer(ephemeral=True) + await interaction.response.defer(ephemeral=True) await self.bot.sync_commands() - await ctx.send(content="Done") + await interaction.followup.send(content="Done") - @hybrid_command() + @command() @guilds(Object(id=cfg["discord.owner.server_id"])) @is_owner() - async def talk(self, ctx: Context, channel: str, message: str): + async def talk(self, interaction: Interaction, channel: str, message: str): """ Send a message to a channel. """ - await ctx.defer(ephemeral=True) + await interaction.response.defer(ephemeral=True) channel = await self.bot.get_or_fetch_channel(int(channel)) await channel.send(message) - await ctx.send(content="Done", ephemeral=True) + await interaction.followup.send(content="Done") - @hybrid_command() + @command() @guilds(Object(id=cfg["discord.owner.server_id"])) @is_owner() - async def announce(self, ctx: Context, channel: str, message: str): + async def announce(self, interaction: Interaction, channel: str, message: str): """ Send a message to a channel. """ - await ctx.defer(ephemeral=True) + await interaction.response.defer(ephemeral=True) channel = await self.bot.get_or_fetch_channel(int(channel)) e = Embed(title="Announcement", description=message) e.add_field(name="Timestamp", value=f" ()") await channel.send(embed=e) - await ctx.send(content="Done", ephemeral=True) + await interaction.followup.send(content="Done") - @hybrid_command() + @command() @guilds(Object(id=cfg["discord.owner.server_id"])) @is_owner() - async def restore_missed_events(self, ctx: Context, tx_hash: str): + async def restore_support_template(self, interaction: Interaction, template_name: str, message_url: str): + await interaction.response.defer(ephemeral=True) + channel_id, message_id = message_url.split("/")[-2:] + channel = await self.bot.get_or_fetch_channel(int(channel_id)) + + msg = await channel.fetch_message(int(message_id)) + template_embed = msg.embeds[0] + template_title = template_embed.title + template_description = "\n".join(template_embed.description.splitlines()[:-2]) + + import re + from datetime import datetime, timezone + + edit_line = template_embed.description.splitlines()[-1] + match = re.search(r"Last Edited by <@(?P[0-9]+)> [0-9]+):R>", edit_line) + user_id = int(match.group("user")) + ts = int(match.group("ts")) + + user = await self.bot.get_or_fetch_user(user_id) + + await self.db.support_bot_dumps.insert_one( + { + "ts" : datetime.fromtimestamp(ts, tz=timezone.utc), + "template": template_name, + "prev" : None, + "new" : { + "title" : template_title, + "description": template_description + }, + "author" : { + "id" : user.id, + "name": user.name + } + } + ) + await self.db.support_bot.insert_one( + {"_id": template_name, "title": template_title, "description": template_description} + ) + + await interaction.followup.send(content="Done") + + @command() + @guilds(Object(id=cfg["discord.owner.server_id"])) + @is_owner() + async def restore_missed_events(self, interaction: Interaction, tx_hash: str): import pickle from datetime import datetime from plugins.events.events import Events - await ctx.defer(ephemeral=True) + await interaction.response.defer(ephemeral=True) events_plugin: Events = self.bot.cogs["Events"] @@ -242,40 +286,40 @@ async def restore_missed_events(self, ctx: Context, tx_hash: str): "channel_id": channel_id, "message_id": None }) - await ctx.send(embed=event.embed, ephemeral=True) - await ctx.send(content="Done", ephemeral=True) + await interaction.followup.send(embed=event.embed) + await interaction.followup.send(content="Done") # --------- PUBLIC COMMANDS --------- # - @hybrid_command() - async def color_test(self, ctx: Context): + @command() + async def color_test(self, interaction: Interaction): """ Simple test to check ansi color support """ - await ctx.defer(ephemeral=is_hidden(ctx)) + await interaction.response.defer(ephemeral=is_hidden(interaction)) payload = "```ansi" for fg_name, fg in Fore.__dict__.items(): if fg_name.endswith("_EX"): continue payload += f"\n{fg}Hello World" payload += f"{Style.RESET_ALL}```" - await ctx.reply(content=payload) + await interaction.followup.reply(content=payload) - @hybrid_command() - async def asian_restaurant_name(self, ctx: Context): + @command() + async def asian_restaurant_name(self, interaction: Interaction): """ - Randomly generated Asian Restaurant Names. + Randomly generated Asian restaurant names """ - await ctx.defer(ephemeral=is_hidden_weak(ctx)) + await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) a = requests.get("https://www.dotomator.com/api/random_name.json?type=asian").json()["name"] - await ctx.reply(a) + await interaction.followup.reply(a) - @hybrid_command() - async def get_block_by_timestamp(self, ctx: Context, timestamp: int): + @command() + async def get_block_by_timestamp(self, interaction: Interaction, timestamp: int): """ - Get a block using a timestamp. Useful for contracts that track blocktime instead of blocknumber. + Get a block using its timestamp. Useful for contracts that track block time instead of block number. """ - await ctx.defer(ephemeral=is_hidden(ctx)) + await interaction.response.defer(ephemeral=is_hidden(interaction)) block = ts_to_block(timestamp) found_ts = block_to_ts(block) @@ -292,51 +336,53 @@ async def get_block_by_timestamp(self, ctx: Context, timestamp: int): f"Block: {block}" ) - await ctx.send(content=f"```{text}```") + await interaction.followup.send(content=f"```{text}```") - @hybrid_command() - async def get_abi_of_contract(self, ctx: Context, contract: str): - """retrieve the latest ABI for a contract""" - await ctx.defer(ephemeral=is_hidden_role_controlled(ctx.interaction)) + @command() + async def get_abi_of_contract(self, interaction: Interaction, contract: str): + """Retrieve the latest ABI for a contract""" + await interaction.response.defer(ephemeral=is_hidden_role_controlled(interaction)) try: abi = prettify_json_string(rp.uncached_get_abi_by_name(contract)) - await ctx.send(file=File( - fp=io.BytesIO(abi.encode()), - filename=f"{contract}.{cfg['rocketpool.chain']}.abi.json") - ) + file = File(io.StringIO(abi), f"{contract}.{cfg['rocketpool.chain'].lower()}.abi.json") + await interaction.followup.send(file=file) except Exception as err: - await ctx.send(content=f"```Exception: {repr(err)}```") + await interaction.followup.send(content=f"```Exception: {repr(err)}```") - @hybrid_command() - async def get_address_of_contract(self, ctx: Context, contract: str): - """retrieve the latest address for a contract""" - await ctx.defer(ephemeral=is_hidden_role_controlled(ctx.interaction)) + @command() + async def get_address_of_contract(self, interaction: Interaction, contract: str): + """Retrieve the latest address for a contract""" + await interaction.response.defer(ephemeral=is_hidden_role_controlled(interaction)) try: address = cfg["rocketpool.manual_addresses"].get(contract) if not address: address = rp.uncached_get_address_by_name(contract) - await ctx.send(content=el_explorer_url(address)) + await interaction.followup.send(content=el_explorer_url(address)) except Exception as err: - await ctx.send(content=f"Exception: ```{repr(err)}```") + await interaction.followup.send(content=f"Exception: ```{repr(err)}```") if "No address found for" in repr(err): # private response as a tip m = "It may be that you are requesting the address of a contract that does not get deployed (`rocketBase` for example), " \ " is deployed multiple times (i.e node operator related contracts, like `rocketNodeDistributor`)," \ " or is not yet deployed on the current chain.\n... Or you simply messed up the name :P" - await ctx.send(content=m, ephemeral=True) - - @hybrid_command() - @describe(json_args="json formatted arguments. example: `[1, \"World\"]`", - block="call against block state") - async def call(self, - ctx: Context, - function: str, - json_args: str = "[]", - block: str = "latest", - address: str = None, - raw_output: bool = False): + await interaction.followup.send(content=m) + + @command() + @describe( + json_args="json formatted arguments. example: `[1, \"World\"]`", + block="call against block state" + ) + async def call( + self, + interaction: Interaction, + function: str, + json_args: str = "[]", + block: str = "latest", + address: str = None, + raw_output: bool = False + ): """Call Function of Contract""" - await ctx.defer(ephemeral=is_hidden_role_controlled(ctx.interaction)) + await interaction.response.defer(ephemeral=is_hidden_role_controlled(interaction)) # convert block to int if number if block.isnumeric(): block = int(block) @@ -346,7 +392,7 @@ async def call(self, args = [args] v = rp.call(function, *args, block=block, address=w3.toChecksumAddress(address) if address else None) except Exception as err: - await ctx.send(content=f"Exception: ```{repr(err)}```") + await interaction.followup.send(content=f"Exception: ```{repr(err)}```") return try: g = rp.estimate_gas_for_call(function, *args, block=block) @@ -361,21 +407,21 @@ async def call(self, text = f"`block: {block}`\n`gas estimate: {g}`\n`{function}({', '.join([repr(a) for a in args])}): " if len(text + str(v)) > 2000: text += "too long, attached as file`" - await ctx.send(text, file=File(io.BytesIO(str(v).encode()), "exception.txt")) + await interaction.followup.send(text, file=File(io.StringIO(str(v)), "exception.txt")) else: text += f"{str(v)}`" - await ctx.send(content=text) + await interaction.followup.send(content=text) # --------- OTHERS --------- # @get_address_of_contract.autocomplete("contract") @get_abi_of_contract.autocomplete("contract") @decode_tnx.autocomplete("contract_name") - async def match_contract_names(self, ctx: Context, current: str) -> list[Choice[str]]: + async def match_contract_names(self, interaction: Interaction, current: str) -> list[Choice[str]]: return [Choice(name=name, value=name) for name in self.contract_names if current.lower() in name.lower()][:25] @call.autocomplete("function") - async def match_function_name(self, ctx: Context, current: str) -> list[Choice[str]]: + async def match_function_name(self, interaction: Interaction, current: str) -> list[Choice[str]]: return [Choice(name=name, value=name) for name in self.function_names if current.lower() in name.lower()][:25] diff --git a/rocketwatch/plugins/event_core/event_core.py b/rocketwatch/plugins/event_core/event_core.py index 3c29887f..97b0516f 100644 --- a/rocketwatch/plugins/event_core/event_core.py +++ b/rocketwatch/plugins/event_core/event_core.py @@ -136,15 +136,10 @@ async def gather_new_events(self) -> None: log.debug(f"{target_block = }") - try: - with ThreadPoolExecutor() as executor: - loop = asyncio.get_running_loop() - futures = [loop.run_in_executor(executor, gather_fn) for gather_fn in gather_fns] - results = await asyncio.gather(*futures) - except Exception as err: - log.exception("Failed to gather events") - await self.bot.report_error(err) - raise err + with ThreadPoolExecutor() as executor: + loop = asyncio.get_running_loop() + futures = [loop.run_in_executor(executor, gather_fn) for gather_fn in gather_fns] + results = await asyncio.gather(*futures) channels = cfg["discord.channels"] events: list[dict[str, Any]] = [] diff --git a/rocketwatch/plugins/random/random.py b/rocketwatch/plugins/random/random.py index a87da451..20907acb 100644 --- a/rocketwatch/plugins/random/random.py +++ b/rocketwatch/plugins/random/random.py @@ -52,7 +52,7 @@ async def dice(self, ctx: Context, dice_string: str = "1d6"): e.title = f"🎲 {dice_string}" if len(str(result)) >= 2000: e.description = "Result too long to display, attaching as file." - file = File(io.BytesIO(str(result).encode()), filename="dice_result.txt") + file = File(io.StringIO(str(result)), filename="dice_result.txt") await ctx.send(embed=e, file=file) else: e.description = f"Result: `{result}`" diff --git a/rocketwatch/plugins/support_utils/support_utils.py b/rocketwatch/plugins/support_utils/support_utils.py index d111c447..ec24fa56 100644 --- a/rocketwatch/plugins/support_utils/support_utils.py +++ b/rocketwatch/plugins/support_utils/support_utils.py @@ -3,7 +3,7 @@ from datetime import datetime, timezone from bson import CodecOptions -from discord import app_commands, Interaction, ui, TextStyle, ButtonStyle, File, User +from discord import app_commands, ui, Interaction, TextStyle, ButtonStyle, File, User from discord.app_commands import Group, Choice, choices from discord.ext.commands import Cog, GroupCog from motor.motor_asyncio import AsyncIOMotorClient @@ -19,7 +19,8 @@ async def generate_template_embed(db, template_name: str): # get the boiler message from the database template = await db.support_bot.find_one({'_id': template_name}) - if not template: return None + if not template: + return None # get the last log entry from the db dumps_col = db.support_bot_dumps.with_options(codec_options=CodecOptions(tz_aware=True)) last_edit = await dumps_col.find_one( @@ -42,9 +43,9 @@ def __init__(self, db: AsyncIOMotorClient, template_name: str): @ui.button(label='Edit', style=ButtonStyle.blurple) async def edit(self, interaction: Interaction, button: ui.Button): - boiler = await self.db.support_bot.find_one({'_id': self.template_name}) + template = await self.db.support_bot.find_one({'_id': self.template_name}) # Make sure to update the message with our update - await interaction.response.send_modal(AdminModal(boiler["title"], boiler["description"], self.db, self.template_name)) + await interaction.response.send_modal(AdminModal(template["title"], template["description"], self.db, self.template_name)) class DeletableView(ui.View): @@ -63,8 +64,7 @@ async def delete(self, interaction: Interaction, button: ui.Button): log.warning(f"Support Template Message deleted by {interaction.user} in {interaction.channel}") -class AdminModal(ui.Modal, - title="Change Template Message"): +class AdminModal(ui.Modal, title="Change Template Message"): def __init__(self, old_title, old_description, db, template_name): super().__init__() self.db = db @@ -74,13 +74,15 @@ def __init__(self, old_title, old_description, db, template_name): self.title_field = ui.TextInput( label="Title", placeholder="Enter a title", - default=old_title) + default=old_title + ) self.description_field = ui.TextInput( label="Description", placeholder="Enter a description", default=old_description, style=TextStyle.paragraph, - max_length=4000) + max_length=4000 + ) self.add_item(self.title_field) self.add_item(self.description_field) @@ -100,7 +102,7 @@ async def on_submit(self, interaction: Interaction) -> None: ) ) a = await interaction.original_response() - file = File(io.BytesIO(self.description_field.value.encode()), "pending_description_dump.txt") + file = File(io.StringIO(self.description_field.value), f"{self.title_field.value}.txt") await a.add_files(file) return @@ -124,12 +126,14 @@ async def on_submit(self, interaction: Interaction) -> None: await self.db.support_bot.update_one( {"_id": self.template_name}, - {"$set": {"title": self.title_field.value, "description": self.description_field.value}}) - embeds = [Embed(), await generate_template_embed(self.db, self.template_name)] - embeds[0].title = f"View & Edit {self.template_name} template" - embeds[0].description = f"The following is a preview of the {self.template_name} template.\n" \ - f"You can edit this template by clicking the 'Edit' button." - await interaction.response.edit_message(embeds=embeds, view=AdminView(self.db, self.template_name)) + {"$set": {"title": self.title_field.value, "description": self.description_field.value}} + ) + content = ( + f"This is a preview of the '{self.template_name}' template.\n" + f"You can change it using the 'Edit' button." + ) + embed = await generate_template_embed(self.db, self.template_name) + await interaction.response.edit_message(content=content, embed=embed, view=AdminView(self.db, self.template_name)) def has_perms(interaction: Interaction, template_name): @@ -138,7 +142,7 @@ def has_perms(interaction: Interaction, template_name): return any([ any(r.id in cfg["rocketpool.support.role_ids"] for r in interaction.user.roles), cfg["discord.owner.user_id"] == interaction.user.id, - interaction.user.guild_permissions.ban_members and interaction.guild.id == cfg["rocketpool.support.server_id"] + interaction.user.guild_permissions.moderate_members and interaction.guild.id == cfg["rocketpool.support.server_id"] ]) @@ -213,8 +217,11 @@ async def match_template(self, interaction: Interaction, current: str): class SupportUtils(GroupCog, name="support"): - subgroup = Group(name='template', description='various templates used by active support members', - guild_ids=[cfg["rocketpool.support.server_id"]]) + subgroup = Group( + name='template', + description='various templates used by active support members', + guild_ids=[cfg["rocketpool.support.server_id"]] + ) def __init__(self, bot: RocketWatch): self.bot = bot @@ -250,12 +257,14 @@ async def add(self, interaction: Interaction, name: str): return # create the template in the db await self.db.support_bot.insert_one( - {"_id": name, "title": "Insert Title here", "description": "Insert Description here"}) - embeds = [Embed(), await generate_template_embed(self.db, name)] - embeds[0].title = f"View & Edit {name} template" - embeds[0].description = f"The following is a preview of the {name} template.\n" \ - f"You can edit this template by clicking the 'Edit' button." - await interaction.edit_original_response(embeds=embeds, view=AdminView(self.db, name)) + {"_id": name, "title": "Insert Title here", "description": "Insert Description here"} + ) + content = ( + f"This is a preview of the '{name}' template.\n" + f"You can change it using the 'Edit' button." + ) + embed = await generate_template_embed(self.db, name) + await interaction.edit_original_response(content=content, embed=embed, view=AdminView(self.db, name)) @subgroup.command() async def edit(self, interaction: Interaction, name: str): @@ -275,12 +284,13 @@ async def edit(self, interaction: Interaction, name: str): ), ) return - embeds = [Embed(), await generate_template_embed(self.db, name)] - embeds[0].title = f"View & Edit {name} template" - embeds[0].description = f"The following is a preview of the {name} template.\n" \ - f"You can edit this template by clicking the 'Edit' button." - # respond with the edit view - await interaction.edit_original_response(embeds=embeds, view=AdminView(self.db, name)) + + content = ( + f"This is a preview of the '{name}' template.\n" + f"You can change it using the 'Edit' button." + ) + embed = await generate_template_embed(self.db, name) + await interaction.edit_original_response(content=content, embed=embed, view=AdminView(self.db, name)) @subgroup.command() async def remove(self, interaction: Interaction, name: str): @@ -334,7 +344,6 @@ async def list(self, interaction: Interaction, order_by: Choice[str] = "_id"): "foreignField": "template", "as": "dump" } - }, { "$project": { From 04989e1bd3fc0a598ad87a4979b59a986e00224f Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Thu, 24 Apr 2025 09:34:23 +0000 Subject: [PATCH 010/279] punctuation --- rocketwatch/rocketwatch.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/rocketwatch/rocketwatch.py b/rocketwatch/rocketwatch.py index 82afd2e4..976ce7c5 100644 --- a/rocketwatch/rocketwatch.py +++ b/rocketwatch/rocketwatch.py @@ -103,11 +103,11 @@ async def on_ready(self): async def on_command_error(self, ctx: Context, error: Exception) -> None: log.error(f"/{ctx.command.name} called by {ctx.author} in #{ctx.channel.name} ({ctx.guild}) failed") if isinstance(error, commands.errors.MaxConcurrencyReached): - msg = "Someone else is already using this command. Please try again later" + msg = "Someone else is already using this command. Please try again later." elif isinstance(error, app_commands.errors.CommandOnCooldown): - msg = f"Slow down! You are using this command too fast. Please try again in {error.retry_after:.0f} seconds" + msg = f"Slow down! You are using this command too fast. Please try again in {error.retry_after:.0f} seconds." else: - msg = "An unexpected error occurred and has been reported to the developer. Please try again later" + msg = "An unexpected error occurred and has been reported to the developer. Please try again later." try: await self.report_error(error, ctx) From a1950b333ce8e314643db8f690371d8ce76d3c4f Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Thu, 24 Apr 2025 13:35:39 +0000 Subject: [PATCH 011/279] update emotes --- rocketwatch/plugins/random/random.py | 12 ++++--- .../plugins/support_utils/support_utils.py | 32 ++++++++----------- rocketwatch/rocketwatch.py | 4 ++- 3 files changed, 24 insertions(+), 24 deletions(-) diff --git a/rocketwatch/plugins/random/random.py b/rocketwatch/plugins/random/random.py index 20907acb..a56f55ac 100644 --- a/rocketwatch/plugins/random/random.py +++ b/rocketwatch/plugins/random/random.py @@ -70,14 +70,15 @@ async def burn_reason(self, ctx: Context): e = Embed() e.set_author(name="🔗 Data from ultrasound.money", url="https://ultrasound.money") - description = "**Eth Burned:**\n```" + description = "**ETH Burned:**\n```" feesburned = data["feesBurned"] for span in ["5m", "1h", "24h"]: k = f"feesBurned{span}" description += f"Last {span}: {solidity.to_float(feesburned[k]):,.2f} ETH ({feesburned[f'{k}Usd']:,.2f} USDC)\n" description += "```\n" - description += "**Burn Ranking (last 5 minutes):**\n" + description += "**Burn Ranking (last 5 minutes)**\n" ranking = data["leaderboards"]["leaderboard5m"][:5] + for i, entry in enumerate(ranking): # use a number emoji as rank (:one:, :two:, ...) # first of convert the number to a word @@ -92,9 +93,12 @@ async def burn_reason(self, ctx: Context): description += f" {target}" if entry.get("category"): description += f" `[{entry['category'].upper()}]`" - description += f"\n<:VOID:721787344138797116>`{solidity.to_float(entry['fees']):,.2f} ETH` :fire:\n" + + description += "\n\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0" + description += f"`{solidity.to_float(entry['fees']):,.2f} ETH` :fire:\n" + e.add_field( - name="Current Base Fee:", + name="Current Base Fee", value=f"`{solidity.to_float(data['latestBlockFees'][0]['baseFeePerGas'], 9):,.2f} GWEI`" ) e.description = description diff --git a/rocketwatch/plugins/support_utils/support_utils.py b/rocketwatch/plugins/support_utils/support_utils.py index ec24fa56..9bcacc73 100644 --- a/rocketwatch/plugins/support_utils/support_utils.py +++ b/rocketwatch/plugins/support_utils/support_utils.py @@ -49,19 +49,15 @@ async def edit(self, interaction: Interaction, button: ui.Button): class DeletableView(ui.View): - def __init__(self, template_name: str): - super().__init__() - self.template_name = template_name - - @ui.button(emoji='<:deletethis:1168673165489213551>', style=ButtonStyle.secondary) + def __init__(self, user: User): + super().__init__(timeout=None) + self.user = user + + @ui.button(emoji="<:delete:1364953621191721002>", style=ButtonStyle.gray) async def delete(self, interaction: Interaction, button: ui.Button): - # check if the user has perms - if not has_perms(interaction, self.template_name): - return - # delete the message - await interaction.message.delete() - # log deletion - log.warning(f"Support Template Message deleted by {interaction.user} in {interaction.channel}") + if (interaction.user == self.user) or has_perms(interaction): + await interaction.message.delete() + log.warning(f"Support template deleted by {interaction.user} in {interaction.channel}") class AdminModal(ui.Modal, title="Change Template Message"): @@ -136,9 +132,7 @@ async def on_submit(self, interaction: Interaction) -> None: await interaction.response.edit_message(content=content, embed=embed, view=AdminView(self.db, self.template_name)) -def has_perms(interaction: Interaction, template_name): - if template_name == "announcement" and cfg["discord.owner.user_id"] != interaction.user.id: - return False +def has_perms(interaction: Interaction): return any([ any(r.id in cfg["rocketpool.support.role_ids"] for r in interaction.user.roles), cfg["discord.owner.user_id"] == interaction.user.id, @@ -172,7 +166,7 @@ async def _use(db, interaction: Interaction, name: str, mention: User | None): await interaction.response.send_message( content=mention.mention if mention else "", embed=e, - view=DeletableView(name) + view=DeletableView(interaction.user) ) else: await interaction.response.send_message( @@ -241,7 +235,7 @@ async def on_ready(self): @subgroup.command() async def add(self, interaction: Interaction, name: str): - if not has_perms(interaction, name): + if not has_perms(interaction): await interaction.response.send_message( embed=Embed(title="Error", description="You do not have permission to use this command."), ephemeral=True) return @@ -268,7 +262,7 @@ async def add(self, interaction: Interaction, name: str): @subgroup.command() async def edit(self, interaction: Interaction, name: str): - if not has_perms(interaction, name): + if not has_perms(interaction): await interaction.response.send_message( embed=Embed(title="Error", description="You do not have permission to use this command."), ephemeral=True) return @@ -294,7 +288,7 @@ async def edit(self, interaction: Interaction, name: str): @subgroup.command() async def remove(self, interaction: Interaction, name: str): - if not has_perms(interaction, name): + if not has_perms(interaction): await interaction.response.send_message( embed=Embed(title="Error", description="You do not have permission to use this command."), ephemeral=True) return diff --git a/rocketwatch/rocketwatch.py b/rocketwatch/rocketwatch.py index 976ce7c5..abd7f857 100644 --- a/rocketwatch/rocketwatch.py +++ b/rocketwatch/rocketwatch.py @@ -126,14 +126,16 @@ async def get_or_fetch_user(self, user_id: int) -> User: async def report_error(self, exception: Exception, ctx: Optional[Context] = None, *args) -> None: err_description = f"`{repr(exception)[:150]}`" + if args: args_fmt = "\n".join(f"args[{i}] = {arg}" for i, arg in enumerate(args)) err_description += f"\n```{args_fmt}```" + if ctx: err_description += ( f"\n```" f"{ctx.command.name = }\n" - f"{ctx.command.params = }\n" + f"ctx.command.params = {getattr(ctx.command, 'params')}\n" f"{ctx.channel = }\n" f"{ctx.author = }" f"```" From ccbb0f693463d500b2b4e6b874243abccfa20102 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 25 Apr 2025 17:44:51 +0000 Subject: [PATCH 012/279] add backoff to beaconcha.in API retry --- rocketwatch/plugins/beacon_events/beacon_events.py | 4 ++-- rocketwatch/utils/retry.py | 12 ++++++++---- 2 files changed, 10 insertions(+), 6 deletions(-) diff --git a/rocketwatch/plugins/beacon_events/beacon_events.py b/rocketwatch/plugins/beacon_events/beacon_events.py index e5005e72..53879f43 100644 --- a/rocketwatch/plugins/beacon_events/beacon_events.py +++ b/rocketwatch/plugins/beacon_events/beacon_events.py @@ -119,7 +119,7 @@ def _get_slashings(self, beacon_block: dict) -> list[Event]: return events - @retry(tries=3, delay=10) + @retry(tries=5, delay=10, backoff=2, max_delay=30) def _get_proposal(self, beacon_block: dict) -> Optional[Event]: if not (payload := beacon_block["body"].get("execution_payload")): # no proposed block @@ -170,7 +170,7 @@ def _get_proposal(self, beacon_block: dict) -> Optional[Event]: "minipool": minipool["address"], "slot": int(beacon_block["slot"]), "reward_amount": block_reward_eth, - "timestamp": timestamp, + "timestamp": timestamp } if eth_utils.is_same_address(fee_recipient, rp.get_address_by_name("rocketSmoothingPool")): diff --git a/rocketwatch/utils/retry.py b/rocketwatch/utils/retry.py index ab9f31a6..7056ab9f 100644 --- a/rocketwatch/utils/retry.py +++ b/rocketwatch/utils/retry.py @@ -9,14 +9,18 @@ def retry( exceptions: EXCEPTIONS = Exception, *, tries: int = -1, - delay: float = 0 + delay: float = 0, + max_delay: float = None, + backoff: float = 1 ) -> Callable[..., Any]: - return __retry(exceptions, is_async=False, tries=tries, delay=delay) + return __retry(exceptions, is_async=False, tries=tries, delay=delay, max_delay=max_delay, backoff=backoff) def retry_async( exceptions: EXCEPTIONS = Exception, *, tries: int = -1, - delay: float = 0 + delay: float = 0, + max_delay: float = None, + backoff: float = 1 ) -> Callable[..., Any]: - return __retry(exceptions, is_async=True, tries=tries, delay=delay) + return __retry(exceptions, is_async=True, tries=tries, delay=delay, max_delay=max_delay, backoff=backoff) From 5c793d71f6bb32d05358b746ad9455f6a6ad9058 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Wed, 30 Apr 2025 23:16:48 +0000 Subject: [PATCH 013/279] remove sleep module --- rocketwatch/main.cfg.sample | 2 +- rocketwatch/plugins/sleep/sleep.py | 438 ----------------------------- 2 files changed, 1 insertion(+), 439 deletions(-) delete mode 100644 rocketwatch/plugins/sleep/sleep.py diff --git a/rocketwatch/main.cfg.sample b/rocketwatch/main.cfg.sample index e65da708..add70996 100644 --- a/rocketwatch/main.cfg.sample +++ b/rocketwatch/main.cfg.sample @@ -66,7 +66,7 @@ rocketpool: { } modules: { include: [] - exclude: ["sleep"] + exclude: [] enable_commands: true } events: { diff --git a/rocketwatch/plugins/sleep/sleep.py b/rocketwatch/plugins/sleep/sleep.py deleted file mode 100644 index fcae7663..00000000 --- a/rocketwatch/plugins/sleep/sleep.py +++ /dev/null @@ -1,438 +0,0 @@ -import datetime -import logging -from io import BytesIO - -import matplotlib.colors as mcolors -import matplotlib.pyplot as plt -import pytz -import requests -from discord import File -from discord.ext import commands -from discord.ext.commands import Context, hybrid_command -from homeassistant_api import Client -from matplotlib import dates - -from rocketwatch import RocketWatch -from utils.cfg import cfg -from utils.embeds import Embed -from utils.visibility import is_hidden - -log = logging.getLogger("sleep") -log.setLevel(cfg["log_level"]) - - -def get_color_hsv(value): - # Ensure the value is within [0, 1] - value -= 0.5 - value *= 2 - value = max(0, min(1, value)) - - # Map the value to the hue in HSV (red to green) - hue = value / 3 # Red is at 0, green is at 1/3 in HSV space - color_hsv = (hue, 1, 0.8) # Full saturation and value - - # Convert HSV to RGB - return mcolors.hsv_to_rgb(color_hsv) - -class Oura(commands.Cog): - def __init__(self, bot: RocketWatch): - self.bot = bot - self.calendar_url = cfg["oura.calendar_url"] - - @hybrid_command() - async def sleep_schedule(self, ctx: Context): - await ctx.defer(ephemeral=is_hidden(ctx)) - e = Embed(title="Invis's Sleep Schedule") - current_date = datetime.datetime.now() - tz = pytz.timezone("Europe/Vienna") - start_date = current_date - datetime.timedelta(days=150) - # make start date timezone aware - start_date = tz.localize(start_date) - end_date = current_date - # make end date timezone aware - end_date = tz.localize(end_date) - res = requests.get("https://api.ouraring.com/v2/usercollection/sleep", - params={"start_date": start_date.strftime("%Y-%m-%d"), - "end_date" : (end_date + datetime.timedelta(days=1)).strftime("%Y-%m-%d")}, - headers={"Authorization": f"Bearer {cfg['oura.secret']}"}) - if res.status_code != 200: - e.description = "Error fetching sleep data" - await ctx.send(embed=e) - return - data = res.json() - if len(data["data"]) == 0: - e.description = "No sleep data found" - await ctx.send(embed=e) - return - - res2 = requests.get("https://api.ouraring.com/v2/usercollection/daily_sleep", - params={"start_date": start_date.strftime("%Y-%m-%d"), - "end_date" : (end_date + datetime.timedelta(days=1)).strftime("%Y-%m-%d")}, - headers={"Authorization": f"Bearer {cfg['oura.secret']}"}) - if res2.status_code != 200: - e.description = "Error fetching sleep data" - await ctx.send(embed=e) - return - data2 = res2.json() - score_mapping = dict() - if len(data2["data"]) != 0: - for kasd in data2["data"]: - score_mapping[kasd["day"]] = kasd["score"] - daily_sleep = { - (start_date + datetime.timedelta(days=i)).strftime("%Y-%m-%d"): [] - for i in range((end_date - start_date).days + 1)} - - for sleep in reversed(data["data"]): - if sleep["type"] == "rest": - continue - # skip if sleep_duration is less than 30 minutes. units are in seconds - if sleep["total_sleep_duration"] < 30 * 60: - continue - score = score_mapping.get(sleep["day"]) - hr = sleep["lowest_heart_rate"] - hrv = sleep["average_hrv"] - temperature = sleep["readiness"]["temperature_deviation"] - sd = datetime.datetime.fromisoformat(sleep["bedtime_start"]).astimezone(tz=tz) - log.info(f"start date: {sd}") - # the start day is the next day if we are past 12pm, otherwise it is the current day - start_day_r = sd + datetime.timedelta(days=1) if sd.hour >= 18 else sd - # format to string - start_day = start_day_r.strftime("%Y-%m-%d") - ed = datetime.datetime.fromisoformat(sleep["bedtime_end"]).astimezone(tz=tz) - log.info(f"end date: {ed}") - # the end day is the next day if we are past 12pm, otherwise it is the current day - ed_r = ed + datetime.timedelta(days=1) if ed.hour >= 18 else ed - # format to string - end_day = ed_r.strftime("%Y-%m-%d") - thresh = datetime.datetime(year=ed.year, month=ed.month, day=ed.day, hour=18, - tzinfo=ed.tzinfo) - # Define virtual day start at 18:00 - virtual_day_start = datetime.datetime.combine(sd.date(), datetime.time(18, 0)) - virtual_day_start = tz.localize(virtual_day_start) - if sd < virtual_day_start: - virtual_day_start -= datetime.timedelta(days=1) - virtual_day_end = virtual_day_start + datetime.timedelta(days=1) - start_day = virtual_day_start.strftime("%Y-%m-%d") - if start_day not in daily_sleep: - daily_sleep[start_day] = [] - if end_day not in daily_sleep: - daily_sleep[end_day] = [] - # weekday based on start date - weekday = datetime.datetime.fromisoformat(start_day).weekday() - stats = sleep["sleep_phase_5_min"] - # Initialize sleep segments - sleep_segments = [] - - total_duration = ed - sd - total_seconds = total_duration.total_seconds() - cumulative_seconds = 0 - - # Check for overflow into previous virtual day - if sd < virtual_day_start: - dur_prev = virtual_day_start - sd - if dur_prev.total_seconds() > 0: - stats_prev_len = int(len(stats) * (dur_prev.total_seconds() / total_seconds)) - stats_prev = stats[:stats_prev_len] - prev_day = (virtual_day_start - datetime.timedelta(days=1)).strftime("%Y-%m-%d") - if prev_day not in daily_sleep: - daily_sleep[prev_day] = [] - daily_sleep[prev_day].append({ - "relative_start": sd - (virtual_day_start - datetime.timedelta(days=1)), - "duration": dur_prev, - "weekday": weekday, - "sleep_stats": stats_prev, - "readiness": score, - "hr": hr, - "hrv": hrv, - "temperature": temperature - }) - stats = stats[stats_prev_len:] # Remove used stats - cumulative_seconds += dur_prev.total_seconds() - sd = virtual_day_start # Adjust start time - - # Now compute duration in current virtual day - dur_current = min(ed, virtual_day_end) - sd - if dur_current.total_seconds() > 0: - stats_current_len = int(len(stats) * (dur_current.total_seconds() / (total_seconds - cumulative_seconds))) - stats_current = stats[:stats_current_len] - if start_day not in daily_sleep: - daily_sleep[start_day] = [] - daily_sleep[start_day].append({ - "relative_start": sd - virtual_day_start, - "duration": dur_current, - "weekday": weekday, - "sleep_stats": stats_current, - "readiness": score, - "hr": hr, - "hrv": hrv, - "temperature": temperature - }) - stats = stats[stats_current_len:] - cumulative_seconds += dur_current.total_seconds() - sd = virtual_day_end # Adjust start time - -# Check for overflow into next virtual day - if ed > virtual_day_end: - dur_next = ed - virtual_day_end - if dur_next.total_seconds() > 0: - stats_next = stats # Remaining stats - next_day = virtual_day_end.strftime("%Y-%m-%d") - if next_day not in daily_sleep: - daily_sleep[next_day] = [] - daily_sleep[next_day].append({ - "relative_start": datetime.timedelta(), - "duration": dur_next, - "weekday": weekday, - "sleep_stats": stats_next, - "readiness": score, - "hr": hr, - "hrv": hrv, - "temperature": temperature - }) - # sort by date - daily_sleep = dict(sorted(daily_sleep.items(), key=lambda x: x[0])) - # plot, one large plot that has the sleep data and a small thin plot that shows the hr&hrv data below - fig, ax = plt.subplots(3, 1, figsize=(15, 10), gridspec_kw={'height_ratios': [6, 1, 1]}, sharex=True) - # dark mode - # create horizontal dark gray line at midnight and noon - ax[0].axhline(y=18, color="#808080", linewidth=1) - ax[0].axhline(y=18 - 12, color="#808080", linewidth=1) - calendar_data = await self.get_calendar_data() - # render calendar data if they are within the last 180 days - if calendar_data is not None: - for day, data in calendar_data.items(): - for d in data: - bottom = ((24 * 60 * 60) - d[ - "relative_start"].total_seconds() - d["duration"].total_seconds()) / 3600 - width = d["duration"].total_seconds() / 3600 - try: - i = list(daily_sleep.keys()).index(day) - except ValueError: - continue - ax[0].bar(i, width, bottom=bottom, color="#AAAAAA", width=1, alpha=0.25) - for i, (day, sleeps) in reversed(list(enumerate(daily_sleep.items()))): - for sleep in sleeps: - color = "gray" - if sleep["readiness"] is not None: - color = get_color_hsv(sleep["readiness"] / 100) - bottom = ((24 * 60 * 60) - sleep[ - "relative_start"].total_seconds() - sleep["duration"].total_seconds()) / 3600 - width = sleep["duration"].total_seconds() / 3600 - current_bottom = bottom + width - for state in sleep["sleep_stats"]: - current_bottom -= (width / len(sleep["sleep_stats"])) - w = 0.9 - match int(state): - case 4: - w = 0.4 - case 3: - w = 0.4 - case 2: - w = 0.9 - case 1: - w = 0.9 - ax[0].bar(i, width / len(sleep["sleep_stats"]), bottom=current_bottom, color=color, - alpha=0.2 if state == "4" else 1, width=w) - # set x axis labels, only every 7th day - ax[0].set_xticks(range(len(daily_sleep) - 1, 0, -14)) - ax[0].set_xticklabels([day[2:] for i, (day, _) in enumerate(reversed(daily_sleep.items())) if i % 14 == 0]) - # set y axis labels - ax[0].set_yticks(range(0, 25, 2)) - ax[0].set_yticklabels([f"{i}:00" if i >= 0 else f"{24 + i}:00" for i in range(18, -7, -2)]) - # set y limit - ax[0].set_ylim(0, 24) - # set x limit - ax[0].set_xlim(0, len(daily_sleep)) - # grid - ax[0].grid(True) - ax[0].set_axisbelow(True) - # set title - ax[0].set_title("Invis's Sleep Schedule") - - x = [] - y_hr = [] - y_hrv = [] - y_temperature = [] - for i, (day, sleeps) in reversed(list(enumerate(daily_sleep.items()))): - if len(sleeps) == 0: - continue - min_hr = min(sleep["hr"] for sleep in sleeps) - min_hrv = min(sleep["hrv"] for sleep in sleeps) - s = list(sleep["temperature"] for sleep in sleeps if (sleep["temperature"] != [] and sleep["temperature"] is not None)) - max_temperature = max(s) if s else 0 - x.append(i) - y_hr.append(min_hr) - y_hrv.append(min_hrv) - y_temperature.append(max_temperature) - # fill the area between the the line and zero - #ax[1].plot(x, y_temperature, color="gray", alpha=0.7) - # draw as bars instead of line, red if positive, blue if negative - ax[1].bar(x, y_temperature, color=["red" if i >= 0 else "blue" for i in y_temperature], alpha=0.5) - # show x.5 ticks on y axis for axis 1 - min_t, max_t = min(y_temperature), max(y_temperature) - # we want 4 ticks evenly spaecd between min and max - ax[1].set_yticks([round(min_t + (max_t - min_t) / 3 * i,2) for i in range(4)]) - # blue area, negative - #ax[1].fill_between(x, y_temperature, color="blue", alpha=0.25, where=[i <= 0 for i in y_temperature], interpolate=True) - # red area, positive - #ax[1].fill_between(x, y_temperature, color="red", alpha=0.25, where=[i >= 0 for i in y_temperature], interpolate=True) - ax[2].plot(x, y_hr, color="black", alpha=0.7) - min_hr, max_hr = min(y_hr), max(y_hr) - ax[2].set_yticks([round(min_hr + (max_hr - min_hr) / 3 * i,0) for i in range(4)]) - # add y axis label - ax[2].set_ylabel("HR") - ax3 = ax[2].twinx() - # add y axis label - ax3.set_ylabel("HRV") - ax3.plot(x, y_hrv, color="green", alpha=0.7) - min_hrv, max_hrv = min(y_hrv), max(y_hrv) - ax3.set_yticks([round(min_hrv + (max_hrv - min_hrv) / 3 * i,0) for i in range(4)]) - ax[2].legend(["HR"], loc="lower left") - ax3.legend(["HRV"], loc="upper left") - ax[1].legend(["Temperature"], loc="upper left") - # make axis 2 right side axis color green with 0.7 alpha - # unit °C on y axis - - # reduce padding - plt.tight_layout() - - img = BytesIO() - fig.savefig(img, format='png', dpi=250) - img.seek(0) - plt.close() - - e.set_image(url="attachment://sleep.png") - buf = File(img, filename="sleep.png") - # send image - await ctx.send(file=buf, embed=e) - - @hybrid_command() - async def temperature(self, ctx: Context): - await ctx.defer(ephemeral=is_hidden(ctx)) - client = Client(cfg["homeassistant.url"], cfg["homeassistant.token"], use_async=True) - entity = await client.async_get_entity(entity_id="sensor.aranet_4_home_temperature") - temp = client.async_get_entity_histories( - entities=[entity], - start_timestamp=datetime.datetime.now(tz=pytz.timezone("Europe/Vienna")).replace(tzinfo=None) - datetime.timedelta(days=7), - end_timestamp=datetime.datetime.now(tz=pytz.timezone("Europe/Vienna")).replace(tzinfo=None) - ) - e = Embed(title="Indoor Temperature Chart") - # plot - with plt.rc_context({'font.size': 24}): - fig, ax = plt.subplots(figsize=(15, 10)) - x = [] - y = [] - async for entity in temp: - for state in entity.states: - try: - f = float(state.state) - except ValueError: - continue - x.append(state.last_updated.astimezone(pytz.timezone("Europe/Vienna"))) - y.append(f) - # make line thicker - ax.plot(x, y, linewidth=4) - #ax.set_ylabel("Temperature") - #ax.set_xlabel("Time") - # temp range 15-35°C - ax.set_ylim(15, 35) - ax.grid() - # set x_axis min to x[0] but leave max to None - ax.set_xlim(x[0], None) - # format x axis as DD.MM HH:MM - ax.xaxis.set_major_formatter( - dates.ConciseDateFormatter(ax.xaxis.get_major_locator())) - # format y axis as °C - ax.yaxis.set_major_formatter('{x:.0f}°C') - # get the color that was used to plot the line - color = ax.get_lines()[0].get_color() - # add a big hollow point at the latest point - ax.scatter(x[-1], y[-1], s=1000, facecolor='none', edgecolor="black", linewidth=4, alpha=0.3) - # add a vertical line at the latest point - ax.axvline(x[-1], color="black", linewidth=2, linestyle="--", alpha=0.3) - # reduce padding - plt.tight_layout() - img = BytesIO() - fig.savefig(img, format='png', dpi=100) - img.seek(0) - plt.close() - e.set_image(url="attachment://temperature.png") - buf = File(img, filename="temperature.png") - e.description = f"{y[-1]} °C (as of )" - # send image - await ctx.send(file=buf, embed=e) - - - # replace get_calendar_data with home assistant variant - async def get_calendar_data(self): - return None - client = Client(cfg["homeassistant.url"], cfg["homeassistant.token"], use_async=True) - work_periods = [] - last_state = None - async for zone in client.async_get_logbook_entries( - filter_entities="device_tracker.pixel_8_pro_2", - start_timestamp=datetime.datetime.now(tz=pytz.timezone("Europe/Vienna")).replace(tzinfo=None) - datetime.timedelta(days=150), - end_timestamp=datetime.datetime.now(tz=pytz.timezone("Europe/Vienna")).replace(tzinfo=None) - ): - state = zone.state - when = round_minute(zone.when, 10).astimezone(pytz.timezone("Europe/Vienna")) - if state == "work" and last_state != "work": - if work_periods and when - work_periods[-1]["end"] < datetime.timedelta(minutes=30): - work_periods[-1]["end"] = when - else: - work_periods.append({"start": when, "end": when}) - elif last_state == "work": - work_periods[-1]["end"] = when - last_state = state - print(f"zone changed to {last_state} at {when}") - # it has to have the same format as the old get_calendar_data - d = {} - for period in work_periods: - sd = period["start"].astimezone( - pytz.timezone("Europe/Vienna")) - sd_r = sd + datetime.timedelta(days=1) if sd > sd.replace(hour=18, minute=0, second=0) else sd - start_day = sd_r.strftime("%Y-%m-%d") - ed = period["end"].astimezone( - pytz.timezone("Europe/Vienna")) - ed_r = ed + datetime.timedelta(days=1) if ed > ed.replace(hour=18, minute=0, second=0) else ed - end_day = ed_r.strftime("%Y-%m-%d") - - if start_day not in d: - d[start_day] = [] - if end_day not in d: - d[end_day] = [] - thresh = datetime.datetime(year=period["end"].year, month=period["end"].month, day=period["end"].day, hour=18, - tzinfo=period["end"].tzinfo) - if start_day != end_day: - dur_first = thresh - period["start"] - if dur_first >= datetime.timedelta(hours=24): - dur_first -= datetime.timedelta(hours=24) - dur_second = period["end"] - thresh - if dur_second <= datetime.timedelta(hours=0): - dur_second += datetime.timedelta(hours=24) - total_dur = dur_first + dur_second - # split stats into two parts based on duration of each part - d[start_day].append( - {"relative_start": period["start"] - (thresh - datetime.timedelta(days=1)), "duration": dur_first}) - d[end_day].append( - {"relative_start": datetime.timedelta(), "duration": dur_second}) - else: - relative_start = period["start"] - (thresh - datetime.timedelta(days=1)) - if relative_start >= datetime.timedelta(hours=24): - relative_start -= datetime.timedelta(hours=24) - d[start_day].append( - {"relative_start": relative_start, "duration": period["end"] - period["start"]}) - return d - -def round_minute(date: datetime = None, round_to: int = 1): - """ - round datetime object to minutes - """ - if not date: - date = datetime.datetime.now() - minute = round(date.minute / round_to) * round_to - date = date.replace(minute=0, second=0, microsecond=0) - return date + datetime.timedelta(minutes=minute) - -async def setup(bot): - await bot.add_cog(Oura(bot)) From 3c77563b0c7e5ca3fada5c71bb2fb055f912e6f2 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Wed, 30 Apr 2025 23:17:45 +0000 Subject: [PATCH 014/279] remove boiler template --- .../plugins/support_utils/support_utils.py | 73 ++++--------------- 1 file changed, 15 insertions(+), 58 deletions(-) diff --git a/rocketwatch/plugins/support_utils/support_utils.py b/rocketwatch/plugins/support_utils/support_utils.py index 9bcacc73..13f3626d 100644 --- a/rocketwatch/plugins/support_utils/support_utils.py +++ b/rocketwatch/plugins/support_utils/support_utils.py @@ -17,8 +17,7 @@ async def generate_template_embed(db, template_name: str): - # get the boiler message from the database - template = await db.support_bot.find_one({'_id': template_name}) + template = await db.support_bot.find_one({"_id": template_name}) if not template: return None # get the last log entry from the db @@ -27,8 +26,7 @@ async def generate_template_embed(db, template_name: str): {"template": template_name}, sort=[("ts", -1)] ) - - e = Embed(title=template['title'], description=template['description']) + e = Embed(title=template["title"], description=template["description"]) if last_edit and template_name != "announcement": e.description += f"\n\n*Last Edited by <@{last_edit['author']['id']}> *" return e @@ -57,7 +55,7 @@ def __init__(self, user: User): async def delete(self, interaction: Interaction, button: ui.Button): if (interaction.user == self.user) or has_perms(interaction): await interaction.message.delete() - log.warning(f"Support template deleted by {interaction.user} in {interaction.channel}") + log.warning(f"Support template message deleted by {interaction.user} in {interaction.channel}") class AdminModal(ui.Modal, title="Change Template Message"): @@ -125,8 +123,8 @@ async def on_submit(self, interaction: Interaction) -> None: {"$set": {"title": self.title_field.value, "description": self.description_field.value}} ) content = ( - f"This is a preview of the '{self.template_name}' template.\n" - f"You can change it using the 'Edit' button." + f"This is a preview of the `{self.template_name}` template.\n" + f"You can change it using the `Edit` button." ) embed = await generate_template_embed(self.db, self.template_name) await interaction.response.edit_message(content=content, embed=embed, view=AdminView(self.db, self.template_name)) @@ -152,15 +150,7 @@ async def _use(db, interaction: Interaction, name: str, mention: User | None): ephemeral=True ) return - if name == "boiler": - await interaction.response.send_message( - embed=Embed( - title="Error", - description=f"The template '{name}' cannot be used." - ), - ephemeral=True - ) - return + # respond with the template embed if e := (await generate_template_embed(db, name)): await interaction.response.send_message( @@ -184,28 +174,16 @@ def __init__(self, bot: RocketWatch): self.db = AsyncIOMotorClient(cfg["mongodb.uri"]).rocketwatch @app_commands.command(name="use") - async def _use_1(self, interaction: Interaction, name: str, mention: User | None): - await _use(self.db, interaction, name, mention) - - @app_commands.command(name="template") - async def _use_2(self, interaction: Interaction, name: str, mention: User | None): + async def _use(self, interaction: Interaction, name: str, mention: User | None): await _use(self.db, interaction, name, mention) - @_use_1.autocomplete("name") - @_use_2.autocomplete("name") + @_use.autocomplete("name") async def match_template(self, interaction: Interaction, current: str): return [ Choice( - name=c["_id"], - value=c["_id"] + name=c["_id"], value=c["_id"] ) for c in await self.db.support_bot.find( - { - "_id": { - "$regex": current, - "$options": "i", - "$ne" : "boiler" if interaction.command.name != "edit" else None - } - } + {"_id": {"$regex": current, "$options": "i"}} ).to_list(25) ] @@ -221,18 +199,6 @@ def __init__(self, bot: RocketWatch): self.bot = bot self.db = AsyncIOMotorClient(cfg["mongodb.uri"]).rocketwatch - @Cog.listener() - async def on_ready(self): - # insert the boiler message into the database, if it doesn't already exist - await self.db.support_bot.update_one( - {'_id': 'boiler'}, - {'$setOnInsert': { - 'title' : 'Support Message', - 'description': 'This is a support message.' - }}, - upsert=True - ) - @subgroup.command() async def add(self, interaction: Interaction, name: str): if not has_perms(interaction): @@ -254,8 +220,8 @@ async def add(self, interaction: Interaction, name: str): {"_id": name, "title": "Insert Title here", "description": "Insert Description here"} ) content = ( - f"This is a preview of the '{name}' template.\n" - f"You can change it using the 'Edit' button." + f"This is a preview of the `{name}` template.\n" + f"You can change it using the `Edit` button." ) embed = await generate_template_embed(self.db, name) await interaction.edit_original_response(content=content, embed=embed, view=AdminView(self.db, name)) @@ -280,8 +246,8 @@ async def edit(self, interaction: Interaction, name: str): return content = ( - f"This is a preview of the '{name}' template.\n" - f"You can change it using the 'Edit' button." + f"This is a preview of the `{name}` template.\n" + f"You can change it using the `Edit` button." ) embed = await generate_template_embed(self.db, name) await interaction.edit_original_response(content=content, embed=embed, view=AdminView(self.db, name)) @@ -292,14 +258,6 @@ async def remove(self, interaction: Interaction, name: str): await interaction.response.send_message( embed=Embed(title="Error", description="You do not have permission to use this command."), ephemeral=True) return - if name == "boiler": - await interaction.edit_original_response( - embed=Embed( - title="Error", - description=f"The template '{name}' cannot be removed." - ), - ) - return await interaction.response.defer(ephemeral=True) # check if the template exists in the db template = await self.db.support_bot.find_one({"_id": name}) @@ -379,8 +337,7 @@ async def match_template(self, interaction: Interaction, current: str): { "_id": { "$regex": current, - "$options": "i", - "$ne" : "boiler" if interaction.command.name != "edit" else None + "$options": "i" } } ).to_list(25) From 767bd6b3f1a82692ee2a3a1b042f48b9428756bc Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Wed, 30 Apr 2025 23:18:02 +0000 Subject: [PATCH 015/279] clean up upkeep tasks --- .../plugins/minipool_task/minipool_task.py | 13 +-- .../minipools_upkeep_task.py | 31 +++--- rocketwatch/plugins/node_task/node_task.py | 94 +++++++------------ 3 files changed, 53 insertions(+), 85 deletions(-) diff --git a/rocketwatch/plugins/minipool_task/minipool_task.py b/rocketwatch/plugins/minipool_task/minipool_task.py index 0fd4fd32..6c56161e 100644 --- a/rocketwatch/plugins/minipool_task/minipool_task.py +++ b/rocketwatch/plugins/minipool_task/minipool_task.py @@ -57,12 +57,11 @@ async def before_loop(self): def get_untracked_minipools(self) -> set[ChecksumAddress]: minipool_count = rp.call("rocketMinipoolManager.getMinipoolCount") minipool_addresses = [] - for i in range(0, minipool_count, self.batch_size): - log.debug(f"getting minipool addresses for {i}/{minipool_count}") - i_end = min(i + self.batch_size, minipool_count) + for index_batch in as_chunks(range(minipool_count), self.batch_size): minipool_addresses += [ w3.toChecksumAddress(r.results[0]) for r in rp.multicall.aggregate( - self.minipool_manager.functions.getMinipoolAt(i) for i in range(i, i_end)).results] + self.minipool_manager.functions.getMinipoolAt(i) for i in index_batch).results + ] # remove address that are already in the minipool collection tracked_addresses = self.db.minipools.distinct("address") return set(minipool_addresses) - set(tracked_addresses) @@ -72,12 +71,10 @@ def get_public_keys(self, addresses): # optimizing this doesn't seem to help much, so keep it simple for readability # batch the same way as get_untracked_minipools minipool_pubkeys = [] - for i in range(0, len(addresses), self.batch_size): - log.debug(f"getting minipool pubkeys for {i}/{len(addresses)}") - i_end = min(i + self.batch_size, len(addresses)) + for address_batch in as_chunks(addresses, self.batch_size): minipool_pubkeys += [ f"0x{r.results[0].hex()}" for r in rp.multicall.aggregate( - self.minipool_manager.functions.getMinipoolPubkey(a) for a in addresses[i:i_end]).results] + self.minipool_manager.functions.getMinipoolPubkey(a) for a in address_batch).results] return minipool_pubkeys @timerun diff --git a/rocketwatch/plugins/minipools_upkeep_task/minipools_upkeep_task.py b/rocketwatch/plugins/minipools_upkeep_task/minipools_upkeep_task.py index 246177e5..169d2975 100644 --- a/rocketwatch/plugins/minipools_upkeep_task/minipools_upkeep_task.py +++ b/rocketwatch/plugins/minipools_upkeep_task/minipools_upkeep_task.py @@ -1,13 +1,15 @@ -import asyncio import logging -from concurrent.futures import ThreadPoolExecutor import pymongo -from discord.ext import commands, tasks -from discord.ext.commands import hybrid_command + from motor.motor_asyncio import AsyncIOMotorClient from multicall import Call +from discord import Interaction +from discord.ext import commands, tasks +from discord.app_commands import command +from discord.utils import as_chunks + from rocketwatch import RocketWatch from utils import solidity from utils.embeds import Embed, el_explorer_url @@ -29,6 +31,7 @@ class MinipoolsUpkeepTask(commands.Cog): def __init__(self, bot: RocketWatch): self.bot = bot self.db = AsyncIOMotorClient(cfg["mongodb.uri"]).rocketwatch + self.batch_size = 1000 self.loop.start() def cog_unload(self): @@ -61,16 +64,8 @@ async def get_minipool_stats(self, minipools): lambda x: (mc.address, [rp.seth_sig(mc.abi, "getEthBalance"), x], [((x, "EthBalance"), solidity.to_float)]) ] minipool_stats = {} - batch_size = 10_000 // len(lambs) - for i in range(0, len(minipools), batch_size): - i_end = min(i + batch_size, len(minipools)) - log.debug(f"getting minipool stats for {i}-{i_end}") - addresses = minipools[i:i_end] - calls = [ - Call(*lamb(a)) - for a in addresses - for lamb in lambs - ] + for minipool_batch in as_chunks(minipools, self.batch_size // len(lambs)): + calls = [Call(*lamb(a)) for a in minipool_batch for lamb in lambs] res = await rp.multicall2(calls) # add data to mini pool stats dict (address => {func_name: value}) # strip get from function name @@ -97,9 +92,9 @@ async def upkeep_minipools(self): await self.db.minipools.bulk_write(bulk, ordered=False) logging.info("Updated minipool states") - @hybrid_command() - async def delegate_stats(self, ctx): - await ctx.defer(ephemeral=is_hidden(ctx)) + @command() + async def delegate_stats(self, interaction: Interaction): + await interaction.response.defer(ephemeral=is_hidden(interaction)) # get stats about delegates # we want to show the distribution of minipools that are using each delegate distribution_stats = await self.db.minipools_new.aggregate([ @@ -135,7 +130,7 @@ async def delegate_stats(self, ctx): d['_id'] = "Yes" if d['_id'] else "No" desc += f"{s}**{d['_id']}**: {d['count']} ({d['count'] / c_sum * 100:.2f}%)\n" e.description = desc - await ctx.send(embed=e) + await interaction.followup.send(embed=e) async def setup(self): diff --git a/rocketwatch/plugins/node_task/node_task.py b/rocketwatch/plugins/node_task/node_task.py index 0d95c4cb..7c1c1cc7 100644 --- a/rocketwatch/plugins/node_task/node_task.py +++ b/rocketwatch/plugins/node_task/node_task.py @@ -7,6 +7,7 @@ from pymongo import UpdateOne, UpdateMany from discord.ext import tasks, commands +from discord.utils import as_chunks from rocketwatch import RocketWatch from utils import solidity @@ -100,12 +101,10 @@ async def add_untracked_minipools(self): return log.debug(f"Latest minipool in db: {latest_db}, latest minipool in rp: {latest_rp}") # batch into self.batch_size minipools at a time, between latest_id and minipool_count - for i in range(latest_db + 1, latest_rp + 1, self.batch_size): - i_end = min(i + self.batch_size, latest_rp + 1) - log.debug(f"Getting untracked minipools ({i} to {i_end})") + for index_batch in as_chunks(range(latest_db + 1, latest_rp + 1), self.batch_size): data |= await rp.multicall2([ Call(mm.address, [rp.seth_sig(mm.abi, "getMinipoolAt"), i], [(i, None)]) - for i in range(i, i_end) + for i in index_batch ]) log.debug(f"Inserting {len(data)} new minipools into db") self.db.minipools_new.insert_many([ @@ -130,15 +129,11 @@ async def add_static_data_to_minipools(self): log.debug("No minipools need to be updated with static data") return data = {} - batch_size = self.batch_size // len(lambs) - for i in range(0, len(minipool_addresses), batch_size): - i_end = min(i + batch_size, len(minipool_addresses)) - log.debug(f"Getting minipool static data ({i} to {i_end})") - res = await rp.multicall2([ - Call(*lamb(a)) - for a in minipool_addresses[i:i_end] - for lamb in lambs - ], require_success=False) + for minipool_batch in as_chunks(minipool_addresses),self.batch_size // len(lambs): + res = await rp.multicall2( + [Call(*lamb(a)) for a in minipool_batch for lamb in lambs], + require_success=False + ) # update data dict with results for (address, variable_name), value in res.items(): if address not in data: @@ -175,15 +170,11 @@ async def update_dynamic_minipool_metadata(self): minipool_addresses = self.db.minipools_new.distinct("address") data = {} att_count = 0 - batch_size = self.batch_size // len(lambs) - for i in range(0, len(minipool_addresses), batch_size): - i_end = min(i + batch_size, len(minipool_addresses)) - log.debug(f"Getting minipool metadata ({i} to {i_end})") - res = await rp.multicall2([ - Call(*lamb(a)) - for a in minipool_addresses[i:i_end] - for lamb in lambs - ], require_success=False) + for minipool_batch in as_chunks(minipool_addresses, self.batch_size // len(lambs)): + res = await rp.multicall2( + [Call(*lamb(a)) for a in minipool_batch for lamb in lambs], + require_success=False + ) # update data dict with results for (address, variable_name), value in res.items(): if address not in data: @@ -219,13 +210,12 @@ def add_static_deposit_data_to_minipools(self): nd = rp.get_contract_by_name("rocketNodeDeposit") mm = rp.get_contract_by_name("rocketMinipoolManager") data = {} - for i in range(0, len(minipools), self.batch_size): - i_end = min(i + self.batch_size, len(minipools)) + + for minipool_batch in as_chunks(minipools, self.batch_size): # turn status time of first and last minipool into blocks - block_start = ts_to_block(minipools[i]["status_time"]) - 1 - block_end = ts_to_block(minipools[i_end - 1]["status_time"]) + 1 - a = [m["address"] for m in minipools[i:i_end]] - log.debug(f"Getting minipool deposit data ({i} to {i_end})") + block_start = ts_to_block(minipool_batch[0]["status_time"]) - 1 + block_end = ts_to_block(minipool_batch[-1]["status_time"]) + 1 + a = [m["address"] for m in minipool_batch] f_deposits = get_logs(nd.events.DepositReceived, block_start, block_end) f_creations = get_logs(mm.events.MinipoolCreated, block_start, block_end) @@ -283,11 +273,9 @@ def add_static_beacon_data_to_minipools(self): # we need to do smaller bulks as the pubkey is qutie long and we dont want to make the query url too long data = {} # endpoint = bacon.get_validators("head", ids=vali_indexes)["data"] - for i in range(0, len(public_keys), self.batch_size): - i_end = min(i + self.batch_size, len(public_keys)) - log.debug(f"Getting beacon data for minipools ({i} to {i_end})") + for pubkey_batch in as_chunks(public_keys, self.batch_size): # get beacon data for public keys - beacon_data = bacon.get_validators("head", ids=public_keys[i:i_end])["data"] + beacon_data = bacon.get_validators("head", ids=pubkey_batch)["data"] # update data dict with results for d in beacon_data: data[d["validator"]["pubkey"]] = int(d["index"]) @@ -314,11 +302,9 @@ def update_dynamic_minipool_beacon_metadata(self): validator_indexes = [i for i in validator_indexes if i is not None] data = {} # endpoint = bacon.get_validators("head", ids=vali_indexes)["data"] - for i in range(0, len(validator_indexes), self.batch_size): - i_end = min(i + self.batch_size, len(validator_indexes)) - log.debug(f"Getting beacon data for minipools ({i} to {i_end})") + for index_batch in as_chunks(validator_indexes, self.batch_size): # get beacon data for public keys - beacon_data = bacon.get_validators("head", ids=validator_indexes[i:i_end])["data"] + beacon_data = bacon.get_validators("head", ids=index_batch)["data"] # update data dict with results for d in beacon_data: data[int(d["index"])] = { @@ -373,13 +359,11 @@ async def add_untracked_node_operators(self): if latest_db == latest_rp: log.debug("No new nodes") return - # batch into 10k nodes at a time, between latest_id and latest_rp - for i in range(latest_db + 1, latest_rp + 1, self.batch_size): - i_end = min(i + self.batch_size, latest_rp + 1) - log.debug(f"Getting untracked node ({i} to {i_end})") + # batch into self.batch_size nodes at a time, between latest_id and latest_rp + for index_batch in as_chunks(range(latest_db + 1, latest_rp + 1), self.batch_size): data |= await rp.multicall2([ Call(nm.address, [rp.seth_sig(nm.abi, "getNodeAt"), i], [(i, None)]) - for i in range(i, i_end) + for i in index_batch ]) log.debug(f"Inserting {len(data)} new nodes into db") self.db.node_operators_new.insert_many([ @@ -402,15 +386,11 @@ async def add_static_data_to_node_operators(self): log.debug("No node operators need to be updated with static data") return data = {} - batch_size = self.batch_size // len(lambs) - for i in range(0, len(node_addresses), batch_size): - i_end = min(i + batch_size, len(node_addresses)) - log.debug(f"Getting node operators static data ({i} to {i_end})") - res = await rp.multicall2([ - Call(*lamb(a)) - for a in node_addresses[i:i_end] - for lamb in lambs - ], require_success=False) + for node_batch in as_chunks(node_addresses, self.batch_size // len(lambs)): + res = await rp.multicall2( + [Call(*lamb(a)) for a in node_batch for lamb in lambs], + require_success=False + ) # update data dict with results for (address, variable_name), value in res.items(): if address not in data: @@ -468,15 +448,11 @@ async def update_dynamic_node_operator_metadata(self): nodes = list(self.db.node_operators_new.find({}, {"address": 1, "fee_distributor_address": 1})) data = {} att_count = 0 - batch_size = self.batch_size // len(lambs) - for i in range(0, len(nodes), batch_size): - i_end = min(i + batch_size, len(nodes)) - log.debug(f"Getting node operator metadata ({i} to {i_end})") - res = await rp.multicall2([ - Call(*lamb(n)) - for n in nodes[i:i_end] - for lamb in lambs - ], require_success=False) + for node_batch in as_chunks(nodes, self.batch_size // len(lambs)): + res = await rp.multicall2( + [Call(*lamb(n)) for n in node_batch for lamb in lambs], + require_success=False + ) # update data dict with results for (address, variable_name), value in res.items(): if address not in data: From b02f62a5c3574956fbb9350d9023bcbe423ed93e Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Wed, 30 Apr 2025 23:19:09 +0000 Subject: [PATCH 016/279] more app commands --- rocketwatch/plugins/cow_orders/cow_orders.py | 24 ++++++++-------- rocketwatch/plugins/debug/debug.py | 28 +++++++++---------- rocketwatch/plugins/governance/governance.py | 9 +++--- .../pinned_messages/pinned_messages.py | 5 ++-- rocketwatch/plugins/rpips/rpips.py | 20 ++++++------- rocketwatch/plugins/tvl/tvl.py | 20 ++++++------- rocketwatch/utils/embeds.py | 2 +- rocketwatch/utils/rocketpool.py | 3 +- rocketwatch/utils/visibility.py | 2 +- 9 files changed, 56 insertions(+), 57 deletions(-) diff --git a/rocketwatch/plugins/cow_orders/cow_orders.py b/rocketwatch/plugins/cow_orders/cow_orders.py index ec1b980c..6bf00fbd 100644 --- a/rocketwatch/plugins/cow_orders/cow_orders.py +++ b/rocketwatch/plugins/cow_orders/cow_orders.py @@ -4,10 +4,11 @@ import pymongo import requests from datetime import timezone - -from discord.ext.commands import Context, hybrid_command from web3.datastructures import MutableAttributeDict as aDict +from discord import Interaction +from discord.app_commands import command + from rocketwatch import RocketWatch from utils import solidity from utils.cfg import cfg @@ -23,7 +24,7 @@ class CowOrders(EventPlugin): def __init__(self, bot: RocketWatch): - super().__init__(bot, timedelta(seconds=60)) + super().__init__(bot, timedelta(minutes=5)) self.state = "OK" self.db = pymongo.MongoClient(cfg["mongodb.uri"]).rocketwatch # create the cow_orders collection if it doesn't exist @@ -39,16 +40,17 @@ def __init__(self, bot: RocketWatch): str(rp.get_address_by_name("rocketTokenRETH")).lower() ] - @hybrid_command() - async def cow(self, ctx: Context, tnx: str): + @command() + async def cow(self, interaction: Interaction, tnx: str): # https://etherscan.io/tx/0x47d96c6310f08b473f2c9948d6fbeef1084f0b393c2263d2fc8d5dc624f97fe3 if "etherscan.io/tx/" not in tnx: - await ctx.send("nop", ephemeral=True) - await ctx.defer(ephemeral=is_hidden_weak(ctx)) - e = Embed() + await interaction.response.send_message("nop", ephemeral=True) + return + + await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) url = tnx.replace("etherscan.io", "explorer.cow.fi") - e.description = f"[cow explorer]({url})" - await ctx.send(embed=e) + embed = Embed(description = f"[cow explorer]({url})") + await interaction.followup.send(embed=embed) def _get_new_events(self) -> list[Event]: if self.state == "RUNNING": @@ -71,7 +73,7 @@ def check_for_new_events(self): # get all pending orders from the cow api (https://api.cow.fi/mainnet/api/v1/auction) - response = requests.get("https://cow-proxy.invis.workers.dev/mainnet/api/v1/auction") + response = requests.get("https://api.cow.fi/mainnet/api/v1/auction") if response.status_code != 200: log.error("Cow API returned non-200 status code: %s", response.text) raise Exception("Cow API returned non-200 status code") diff --git a/rocketwatch/plugins/debug/debug.py b/rocketwatch/plugins/debug/debug.py index d09b4c94..2fb7a5ba 100644 --- a/rocketwatch/plugins/debug/debug.py +++ b/rocketwatch/plugins/debug/debug.py @@ -7,7 +7,7 @@ import humanize import requests from colorama import Fore, Style -from discord import File, Object, Interaction +from discord import File, Interaction from discord.app_commands import Choice, command, guilds, describe from discord.ext.commands import Cog, is_owner from motor.motor_asyncio import AsyncIOMotorClient @@ -51,7 +51,7 @@ async def on_ready(self): # --------- PRIVATE OWNER COMMANDS --------- # @command() - @guilds(Object(id=cfg["discord.owner.server_id"])) + @guilds(cfg["discord.owner.server_id"]) @is_owner() async def raise_exception(self, interaction: Interaction): """ @@ -61,7 +61,7 @@ async def raise_exception(self, interaction: Interaction): raise Exception("this should never happen wtf is your filesystem") @command() - @guilds(Object(id=cfg["discord.owner.server_id"])) + @guilds(cfg["discord.owner.server_id"]) @is_owner() async def get_members_of_role(self, interaction: Interaction, guild_id: str, role_id: str): """Get members of a role""" @@ -82,7 +82,7 @@ async def get_members_of_role(self, interaction: Interaction, guild_id: str, rol # list all roles of a guild with name and id @command() - @guilds(Object(id=cfg["discord.owner.server_id"])) + @guilds(cfg["discord.owner.server_id"]) @is_owner() async def get_roles(self, interaction: Interaction, guild_id: str): """Get roles of a guild""" @@ -100,7 +100,7 @@ async def get_roles(self, interaction: Interaction, guild_id: str): await interaction.followup.send(content=f"```{repr(err)}```") @command() - @guilds(Object(id=cfg["discord.owner.server_id"])) + @guilds(cfg["discord.owner.server_id"]) @is_owner() async def delete_msg(self, interaction: Interaction, message_url: str): """ @@ -114,7 +114,7 @@ async def delete_msg(self, interaction: Interaction, message_url: str): await interaction.followup.send(content="Done") @command() - @guilds(Object(id=cfg["discord.owner.server_id"])) + @guilds(cfg["discord.owner.server_id"]) @is_owner() async def decode_tnx(self, interaction: Interaction, tnx_hash: str, contract_name: str = None): """ @@ -130,7 +130,7 @@ async def decode_tnx(self, interaction: Interaction, tnx_hash: str, contract_nam await interaction.followup.send(content=f"```Input:\n{data}```") @command() - @guilds(Object(id=cfg["discord.owner.server_id"])) + @guilds(cfg["discord.owner.server_id"]) @is_owner() async def debug_transaction(self, interaction: Interaction, tnx_hash: str): """ @@ -144,7 +144,7 @@ async def debug_transaction(self, interaction: Interaction, tnx_hash: str): await interaction.followup.send(content="```No revert reason Available```") @command() - @guilds(Object(id=cfg["discord.owner.server_id"])) + @guilds(cfg["discord.owner.server_id"]) @is_owner() async def purge_minipools(self, interaction: Interaction, confirm: bool = False): """ @@ -158,7 +158,7 @@ async def purge_minipools(self, interaction: Interaction, confirm: bool = False) await interaction.followup.send(content="Done") @command() - @guilds(Object(id=cfg["discord.owner.server_id"])) + @guilds(cfg["discord.owner.server_id"]) @is_owner() async def purge_minipools_new(self, interaction: Interaction, confirm: bool = False): """ @@ -172,7 +172,7 @@ async def purge_minipools_new(self, interaction: Interaction, confirm: bool = Fa await interaction.followup.send(content="Done") @command() - @guilds(Object(id=cfg["discord.owner.server_id"])) + @guilds(cfg["discord.owner.server_id"]) @is_owner() async def sync_commands(self, interaction: Interaction): """ @@ -183,7 +183,7 @@ async def sync_commands(self, interaction: Interaction): await interaction.followup.send(content="Done") @command() - @guilds(Object(id=cfg["discord.owner.server_id"])) + @guilds(cfg["discord.owner.server_id"]) @is_owner() async def talk(self, interaction: Interaction, channel: str, message: str): """ @@ -195,7 +195,7 @@ async def talk(self, interaction: Interaction, channel: str, message: str): await interaction.followup.send(content="Done") @command() - @guilds(Object(id=cfg["discord.owner.server_id"])) + @guilds(cfg["discord.owner.server_id"]) @is_owner() async def announce(self, interaction: Interaction, channel: str, message: str): """ @@ -209,7 +209,7 @@ async def announce(self, interaction: Interaction, channel: str, message: str): await interaction.followup.send(content="Done") @command() - @guilds(Object(id=cfg["discord.owner.server_id"])) + @guilds(cfg["discord.owner.server_id"]) @is_owner() async def restore_support_template(self, interaction: Interaction, template_name: str, message_url: str): await interaction.response.defer(ephemeral=True) @@ -253,7 +253,7 @@ async def restore_support_template(self, interaction: Interaction, template_name await interaction.followup.send(content="Done") @command() - @guilds(Object(id=cfg["discord.owner.server_id"])) + @guilds(cfg["discord.owner.server_id"]) @is_owner() async def restore_missed_events(self, interaction: Interaction, tx_hash: str): import pickle diff --git a/rocketwatch/plugins/governance/governance.py b/rocketwatch/plugins/governance/governance.py index 753ff351..5ce9bcd3 100644 --- a/rocketwatch/plugins/governance/governance.py +++ b/rocketwatch/plugins/governance/governance.py @@ -58,7 +58,8 @@ async def _get_active_snapshot_proposals(self) -> list[Snapshot.Proposal]: async def _get_draft_rpips(self) -> list[RPIPs.RPIP]: try: - return [rpip for rpip in RPIPs.get_all_rpips() if (rpip.status == "Draft")][::-1] + statuses = {"Draft", "Review"} + return [rpip for rpip in RPIPs.get_all_rpips() if (rpip.status in statuses)][::-1] except Exception as e: await self.bot.report_error(e) return [] @@ -100,7 +101,7 @@ def print_proposals(_dao: DAO, _proposals: list[DAO.Proposal]) -> str: dao = SecurityCouncil() if proposals := self._get_active_dao_proposals(dao): embed.description += "### Security Council\n" - embed.description += "- **Active proposals**\n" + embed.description += "- **Active on-chain proposals**\n" embed.description += print_proposals(dao, proposals) # --------- ORACLE DAO --------- # @@ -108,7 +109,7 @@ def print_proposals(_dao: DAO, _proposals: list[DAO.Proposal]) -> str: dao = OracleDAO() if proposals := self._get_active_dao_proposals(dao): embed.description += "### Oracle DAO\n" - embed.description += "- **Active proposals**\n" + embed.description += "- **Active on-chain proposals**\n" embed.description += print_proposals(dao, proposals) # --------- PROTOCOL DAO --------- # @@ -127,7 +128,7 @@ def print_proposals(_dao: DAO, _proposals: list[DAO.Proposal]) -> str: section_content += f" {i}. [{title}]({proposal.url})\n" if draft_rpips := await self._get_draft_rpips(): - section_content += "- **RPIPs in draft status**\n" + section_content += "- **RPIPs in review or draft status**\n" for i, rpip in enumerate(draft_rpips, start=1): title = sanitize(rpip.title, 40) section_content += f" {i}. [{title}]({rpip.url}) (RPIP-{rpip.number})\n" diff --git a/rocketwatch/plugins/pinned_messages/pinned_messages.py b/rocketwatch/plugins/pinned_messages/pinned_messages.py index a7cad1fe..f73c7288 100644 --- a/rocketwatch/plugins/pinned_messages/pinned_messages.py +++ b/rocketwatch/plugins/pinned_messages/pinned_messages.py @@ -2,7 +2,6 @@ from datetime import datetime, timedelta from motor.motor_asyncio import AsyncIOMotorClient -from discord import Object from discord.app_commands import guilds from discord.ext import commands, tasks from discord.ext.commands import hybrid_command, is_owner @@ -70,7 +69,7 @@ async def run_loop(self): await self.bot.report_error(err) @hybrid_command() - @guilds(Object(id=cfg["discord.owner.server_id"])) + @guilds(cfg["discord.owner.server_id"]) @is_owner() async def pin(self, ctx, channel_id, title, description): await ctx.defer() @@ -97,7 +96,7 @@ async def pin(self, ctx, channel_id, title, description): await ctx.send("Created pinned message") @hybrid_command() - @guilds(Object(id=cfg["discord.owner.server_id"])) + @guilds(cfg["discord.owner.server_id"]) @is_owner() async def unpin(self, ctx, channel_id): await ctx.defer() diff --git a/rocketwatch/plugins/rpips/rpips.py b/rocketwatch/plugins/rpips/rpips.py index 870301f7..2b9604ba 100644 --- a/rocketwatch/plugins/rpips/rpips.py +++ b/rocketwatch/plugins/rpips/rpips.py @@ -3,12 +3,12 @@ from typing import Optional, Any from bs4 import BeautifulSoup -from discord.ext import commands -from discord.ext.commands import Context -from discord.ext.commands import hybrid_command -from discord.app_commands import Choice, describe from cachetools.func import ttl_cache +from discord import Interaction +from discord.ext.commands import Cog +from discord.app_commands import Choice, command, describe + from rocketwatch import RocketWatch from utils.cfg import cfg from utils.embeds import Embed @@ -18,15 +18,15 @@ log.setLevel(cfg["log_level"]) -class RPIPs(commands.Cog): +class RPIPs(Cog): def __init__(self, bot: RocketWatch): self.bot = bot - @hybrid_command() + @command() @describe(name="RPIP name") - async def rpip(self, ctx: Context, name: str): + async def rpip(self, interaction: Interaction, name: str): """Show information about a specific RPIP.""" - await ctx.defer() + await interaction.response.defer() embed = Embed() embed.set_author(name="🔗 Data from rpips.rocketpool.net", url="https://rpips.rocketpool.net") @@ -47,7 +47,7 @@ async def rpip(self, ctx: Context, name: str): else: embed.description = "No matching RPIPs." - await ctx.send(embed=embed) + await interaction.followup.send(embed=embed) class RPIP: __slots__ = ( @@ -107,7 +107,7 @@ def __getattr__(self, key: str) -> Any: raise AttributeError(f"RPIP has no attribute '{key}'") @rpip.autocomplete("name") - async def _get_rpip_names(self, ctx: Context, current: str) -> list[Choice[str]]: + async def _get_rpip_names(self, interaction: Interaction, current: str) -> list[Choice[str]]: choices = [] for rpip in self.get_all_rpips(): if current.lower() in (name := rpip.full_title).lower(): diff --git a/rocketwatch/plugins/tvl/tvl.py b/rocketwatch/plugins/tvl/tvl.py index bdd08631..e6837c39 100644 --- a/rocketwatch/plugins/tvl/tvl.py +++ b/rocketwatch/plugins/tvl/tvl.py @@ -2,11 +2,12 @@ import humanize from colorama import Style -from discord.app_commands import describe -from discord.ext import commands -from discord.ext.commands import Context, hybrid_command from motor.motor_asyncio import AsyncIOMotorClient +from discord import Interaction +from discord.ext.commands import Cog +from discord.app_commands import command, describe + from rocketwatch import RocketWatch from utils import solidity from utils.cfg import cfg @@ -48,20 +49,18 @@ def split_rewards_logic(balance, node_share, commission, force_base=False): return d -class TVL(commands.Cog): +class TVL(Cog): def __init__(self, bot: RocketWatch): self.bot = bot self.db = AsyncIOMotorClient(cfg["mongodb.uri"]).get_database("rocketwatch") - @hybrid_command() + @command() @describe(show_all="Also show entries with 0 value") - async def tvl(self, - ctx: Context, - show_all: bool = False): + async def tvl(self, interaction: Interaction, show_all: bool = False): """ Show the total value locked in the Protocol. """ - await ctx.defer(ephemeral=is_hidden(ctx)) + await interaction.response.defer(ephemeral=is_hidden(interaction)) data = { "Total RPL Locked": { "Staked RPL" : { @@ -241,7 +240,6 @@ async def tvl(self, data["Total ETH Locked"]["Minipools Stake"]["Staking Minipools"]["Node Share"][ "_val"] += beacon_balance beacon_balance = 0 - beacon_rewards = max(0, beacon_balance - 32) if beacon_balance > 0: d = split_rewards_logic(beacon_balance, node_share, commission, force_base=True) data["Total ETH Locked"]["Minipools Stake"]["Staking Minipools"]["Node Share"]["_val"] += d["base"]["node"] @@ -454,7 +452,7 @@ def set_val_of_branch(branch, unit): closer = f"or about {Style.BRIGHT}{humanize.intword(usdc_total_tvl, format='%.3f')} USDC{Style.RESET_ALL}".rjust(max([len(line) for line in test.split("\n")])-1) e.description = f"```ansi\n{test}\n{closer}```" e.set_footer(text="\"that looks good to me\" - invis 2023") - await ctx.send(embed=e) + await interaction.followup.send(embed=e) async def setup(bot): diff --git a/rocketwatch/utils/embeds.py b/rocketwatch/utils/embeds.py index a2347be4..67005e4b 100644 --- a/rocketwatch/utils/embeds.py +++ b/rocketwatch/utils/embeds.py @@ -279,7 +279,7 @@ def assemble(args) -> Embed: match args.event_name: case "pdao_set_delegate": - use_large = (args.votingPower >= 250) + use_large = (args.votingPower >= 200) case "eth_deposit_event": use_large = (amount >= 32) case "rpl_stake_event": diff --git a/rocketwatch/utils/rocketpool.py b/rocketwatch/utils/rocketpool.py index 8a47c614..933c7e88 100644 --- a/rocketwatch/utils/rocketpool.py +++ b/rocketwatch/utils/rocketpool.py @@ -1,7 +1,6 @@ import logging import os from pathlib import Path -from typing import Optional from bidict import bidict from cachetools import cached, FIFOCache @@ -222,7 +221,7 @@ def get_eth_usdc_price(self) -> float: return 1 / UniswapV3.Pool(pool_address).get_normalized_price() @ttl_cache(ttl=60) - def get_reth_eth_price(self) -> Optional[float]: + def get_reth_eth_price(self) -> float: from utils.liquidity import UniswapV3 pool_address = self.get_address_by_name("UniV3_rETH_ETH") return UniswapV3.Pool(pool_address).get_normalized_price() diff --git a/rocketwatch/utils/visibility.py b/rocketwatch/utils/visibility.py index 0a5ffd84..661b5a43 100644 --- a/rocketwatch/utils/visibility.py +++ b/rocketwatch/utils/visibility.py @@ -14,4 +14,4 @@ def is_hidden_weak(interaction: Context | Interaction): def is_hidden_role_controlled(interaction: Interaction): # reuses the has_perms function from support_utils, but overrides it when is_hidden would return false - return not has_perms(interaction, "") if is_hidden(interaction) else False + return not has_perms(interaction) if is_hidden(interaction) else False From 4d8b78b65474885be436eebb84438ba73ffb95d1 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Thu, 1 May 2025 19:10:35 +0000 Subject: [PATCH 017/279] fix as_chunks call --- rocketwatch/plugins/node_task/node_task.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rocketwatch/plugins/node_task/node_task.py b/rocketwatch/plugins/node_task/node_task.py index 7c1c1cc7..ece8bee0 100644 --- a/rocketwatch/plugins/node_task/node_task.py +++ b/rocketwatch/plugins/node_task/node_task.py @@ -129,7 +129,7 @@ async def add_static_data_to_minipools(self): log.debug("No minipools need to be updated with static data") return data = {} - for minipool_batch in as_chunks(minipool_addresses),self.batch_size // len(lambs): + for minipool_batch in as_chunks(minipool_addresses, self.batch_size // len(lambs)): res = await rp.multicall2( [Call(*lamb(a)) for a in minipool_batch for lamb in lambs], require_success=False From e6f407b3b5bee2d8103a55f0d9867b6569e4bb64 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Thu, 22 May 2025 23:07:27 +0000 Subject: [PATCH 018/279] fix div by zero --- rocketwatch/plugins/constellation/constellation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rocketwatch/plugins/constellation/constellation.py b/rocketwatch/plugins/constellation/constellation.py index 761b2e1d..60d2fee3 100644 --- a/rocketwatch/plugins/constellation/constellation.py +++ b/rocketwatch/plugins/constellation/constellation.py @@ -97,7 +97,7 @@ async def constellation(self, interaction: Interaction): min_rpl_stake_ratio: float = solidity.to_float(info_calls["minimumStakeRatio"]) rpl_ratio: float = solidity.to_float(rp.call("rocketNetworkPrices.getRPLPrice")) - rpl_stake_perc: float = rpl_staked * rpl_ratio / eth_matched + rpl_stake_perc: float = (rpl_staked * rpl_ratio / eth_matched) if (eth_matched > 0) else 0.0 balance_eth: float = solidity.to_float(w3.eth.getBalance(distributor_contract.address)) balance_rpl: float = solidity.to_float(rp.call("rocketTokenRPL.balanceOf", distributor_contract.address)) From 16ab7f8531d9fe15c51751a7b3f24bb9e2dd3d81 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Thu, 22 May 2025 23:07:43 +0000 Subject: [PATCH 019/279] update scam detection --- .../plugins/detect_scam/detect_scam.py | 21 ++++++++++++------- 1 file changed, 14 insertions(+), 7 deletions(-) diff --git a/rocketwatch/plugins/detect_scam/detect_scam.py b/rocketwatch/plugins/detect_scam/detect_scam.py index 48adbf32..6aab1f58 100644 --- a/rocketwatch/plugins/detect_scam/detect_scam.py +++ b/rocketwatch/plugins/detect_scam/detect_scam.py @@ -120,8 +120,8 @@ def __init__(self, bot: RocketWatch): self._message_react_cache = TTLCache(maxsize=1000, ttl=300) self.markdown_link_pattern = re.compile(r"(?<=\[)([^/\] ]*).+?(?<=\(https?:\/\/)([^/\)]*)") - self.basic_url_pattern = re.compile(r"https?:\/\/([/\\@\-_0-9a-zA-Z]+\.)+[\\@\-_0-9a-zA-Z]+") - self.invite_pattern = re.compile(r"((discord(app)?\.com\/invite)|((dsc|discord)\.gg))(\\|\/)(?P[a-zA-Z0-9]+)") + self.basic_url_pattern = re.compile(r"https?:\/\/?([/\\@\-_0-9a-zA-Z]+\.)+[\\@\-_0-9a-zA-Z]+") + self.invite_pattern = re.compile(r"((discord(app)?\.com\/invite)|((dsc|dcd|discord)\.gg))(\\|\/)(?P[a-zA-Z0-9]+)") self.message_report_menu = ContextMenu( name="Report Message", @@ -313,6 +313,7 @@ def _discord_invite(self, message: Message) -> Optional[str]: def _ticket_system(self, message: Message) -> Optional[str]: # message contains one of the relevant keyword combinations and a link txt = self._get_message_content(message) + log.debug(f"message content: {txt}") if not self.basic_url_pattern.search(txt): return None @@ -327,7 +328,7 @@ def _ticket_system(self, message: Message) -> Optional[str]: ], ("support team", "supp0rt", "🎫", "🎟️", "m0d"), [ - ("ask", "seek", "request", "contact"), + ("get", "ask", "seek", "request", "contact"), ("help", "assistance", "service", "support") ], [ @@ -515,12 +516,18 @@ async def on_thread_create(self, thread: Thread) -> None: log.warning(f"Ignoring thread creation in {thread.guild.id}") return - keywords = ("support", "ticket", "assistance", "🎫", "🎟️") - if not any(kw in thread.name.lower() for kw in keywords): - log.debug(f"Ignoring thread creation (id: {thread.id}, name: {thread.name})") + keywords = ("support", "tick", "assistance", "error", "🎫", "🎟️") + if any(kw in thread.name.lower() for kw in keywords): + await self.report_thread(thread, "Illegitimate support thread") return + names = (".", "!", "///") + if thread.name.strip().lower() in names: + await self.report_thread(thread, "Illegitimate support thread") + return + + log.debug(f"Ignoring thread creation (id: {thread.id}, name: {thread.name})") + - await self.report_thread(thread, "Illegitimate support thread") @Cog.listener() async def on_raw_thread_update(self, event: RawThreadUpdateEvent) -> None: From 06da0927e389eea72bb733e90bcb9219eed15228 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Thu, 22 May 2025 23:08:02 +0000 Subject: [PATCH 020/279] rate limit improvements --- rocketwatch/plugins/event_core/event_core.py | 2 +- rocketwatch/plugins/events/events.py | 37 +++++++------------- rocketwatch/plugins/forum/forum.py | 8 ++--- 3 files changed, 17 insertions(+), 30 deletions(-) diff --git a/rocketwatch/plugins/event_core/event_core.py b/rocketwatch/plugins/event_core/event_core.py index 97b0516f..315fb763 100644 --- a/rocketwatch/plugins/event_core/event_core.py +++ b/rocketwatch/plugins/event_core/event_core.py @@ -49,7 +49,7 @@ def __init__(self, bot: RocketWatch): def cog_unload(self) -> None: self.loop.cancel() - @tasks.loop(seconds=12) + @tasks.loop(seconds=30) async def loop(self) -> None: p_id = time.time() self.monitor.ping(state="run", series=p_id) diff --git a/rocketwatch/plugins/events/events.py b/rocketwatch/plugins/events/events.py index 81077bdf..c672110b 100644 --- a/rocketwatch/plugins/events/events.py +++ b/rocketwatch/plugins/events/events.py @@ -7,7 +7,7 @@ from discord import Interaction from discord.ext.commands import is_owner from discord.app_commands import command, guilds -from eth_typing import ChecksumAddress, BlockNumber +from eth_typing.evm import ChecksumAddress, BlockNumber from hexbytes import HexBytes from web3._utils.filters import Filter from web3.datastructures import MutableAttributeDict as aDict @@ -38,7 +38,6 @@ def __init__(self, bot: RocketWatch): self._partial_filters = partial_filters self.event_map = event_map self.topic_map = topic_map - self.active_filters: list[Filter] = [] def _parse_event_config(self) -> tuple[list[PartialFilter], dict, dict]: with open("./plugins/events/events.json") as f: @@ -65,7 +64,7 @@ def _parse_event_config(self) -> tuple[list[PartialFilter], dict, dict]: try: topic = contract.events[event_name].build_filter().topics[0] except ABIEventFunctionNotFound as e: - self.bot.report_error(e) + log.exception(e) log.warning(f"Couldn't find event {event_name} ({event['name']}) in the contract") continue @@ -169,13 +168,16 @@ async def replay_events(self, interaction: Interaction, tx_hash: str): await interaction.followup.send(content="No events found.") def _get_new_events(self) -> list[Event]: - if not self.active_filters: - from_block = self.last_served_block + 1 - self.lookback_distance - self.active_filters = [pf(from_block, "latest") for pf in self._partial_filters] + from_block = self.last_served_block + 1 - self.lookback_distance + return self.get_past_events(from_block, self._pending_block) + def get_past_events(self, from_block: BlockNumber, to_block: BlockNumber) -> list[Event]: + log.debug(f"Fetching events in [{from_block}, {to_block}]") + log.debug(f"Using {len(self._partial_filters)} filters") + events = [] - for event_filter in self.active_filters: - events.extend(event_filter.get_new_entries()) + for pf in self._partial_filters: + events.extend(pf(from_block, to_block).get_all_entries()) messages, contract_upgrade_block = self.process_events(events) if not contract_upgrade_block: @@ -187,27 +189,12 @@ def _get_new_events(self) -> list[Event]: try: rp.flush() self.__init__(self.bot) - self.start_tracking(BlockNumber(contract_upgrade_block + 1)) - messages.extend(self._get_new_events()) - return messages + return messages + self.get_past_events(contract_upgrade_block + 1, to_block) except Exception as err: # rollback to pre upgrade config if this goes wrong self._partial_filters, self.event_map, self.topic_map = old_config - self.active_filters.clear() raise err - def start_tracking(self, block: BlockNumber) -> None: - super().start_tracking(block) - self.active_filters.clear() - - def get_past_events(self, from_block: BlockNumber, to_block: BlockNumber) -> list[Event]: - events = [] - for pf in self._partial_filters: - events.extend(pf(from_block, to_block).get_all_entries()) - - messages, _ = self.process_events(events) - return messages - def process_events(self, events: list[LogReceipt | EventData]) -> tuple[list[Event], Optional[BlockNumber]]: events.sort(key=lambda e: (e.blockNumber, e.logIndex)) messages = [] @@ -253,7 +240,6 @@ def hash_args(_args: aDict) -> None: event_name = event.args.get("event_name", event_name) else: log.warning(f"Skipping unknown event {n}.{event.event}") - elif event.get("event") in self.event_map: event_name = self.event_map[event.event] if event_name in ["contract_upgraded", "contract_added"]: @@ -267,6 +253,7 @@ def hash_args(_args: aDict) -> None: event_name = event.args.get("event_name", event_name) if (event_name is None) or (embed is None): + log.debug(f"Skipping event {event}") continue # get the event offset based on the lowest event log index of events with the same txn hashes and block hashes diff --git a/rocketwatch/plugins/forum/forum.py b/rocketwatch/plugins/forum/forum.py index 91a0043b..4e5fef1a 100644 --- a/rocketwatch/plugins/forum/forum.py +++ b/rocketwatch/plugins/forum/forum.py @@ -7,7 +7,7 @@ from discord.ext import commands from discord.ext.commands import Context from discord.ext.commands import hybrid_command -from discord.app_commands import Choice, choices +from discord.app_commands import Choice from rocketwatch import RocketWatch from utils.cfg import cfg @@ -82,7 +82,7 @@ def datetime_to_epoch(_dt: str) -> int: return topics @staticmethod - @retry_async(tries=3, delay=1) + @retry_async(tries=3, delay=2, backoff=2) async def get_popular_topics(period: Period) -> list[Topic]: async with aiohttp.ClientSession() as session: response = await session.get(f"{Forum.DOMAIN}/top.json?period={period}") @@ -91,7 +91,7 @@ async def get_popular_topics(period: Period) -> list[Topic]: return Forum._parse_topics(data["topic_list"]["topics"]) @staticmethod - @retry_async(tries=3, delay=1) + @retry_async(tries=3, delay=2, backoff=2) async def get_recent_topics() -> list[Topic]: async with aiohttp.ClientSession() as session: response = await session.get(f"{Forum.DOMAIN}/latest.json") @@ -100,7 +100,7 @@ async def get_recent_topics() -> list[Topic]: return Forum._parse_topics(data["topic_list"]["topics"]) @staticmethod - @retry_async(tries=3, delay=1) + @retry_async(tries=3, delay=2, backoff=2) async def get_top_users(period: Period, order_by: UserMetric) -> list[User]: async with aiohttp.ClientSession() as session: response = await session.get(f"{Forum.DOMAIN}/directory_items.json?period={period}&order={order_by}") From 6a259d9cc47fcab6b9c8a3075799e955c45e055c Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Wed, 30 Apr 2025 23:16:48 +0000 Subject: [PATCH 021/279] remove sleep module --- rocketwatch/main.cfg.sample | 2 +- rocketwatch/plugins/sleep/sleep.py | 438 ----------------------------- 2 files changed, 1 insertion(+), 439 deletions(-) delete mode 100644 rocketwatch/plugins/sleep/sleep.py diff --git a/rocketwatch/main.cfg.sample b/rocketwatch/main.cfg.sample index e65da708..add70996 100644 --- a/rocketwatch/main.cfg.sample +++ b/rocketwatch/main.cfg.sample @@ -66,7 +66,7 @@ rocketpool: { } modules: { include: [] - exclude: ["sleep"] + exclude: [] enable_commands: true } events: { diff --git a/rocketwatch/plugins/sleep/sleep.py b/rocketwatch/plugins/sleep/sleep.py deleted file mode 100644 index fcae7663..00000000 --- a/rocketwatch/plugins/sleep/sleep.py +++ /dev/null @@ -1,438 +0,0 @@ -import datetime -import logging -from io import BytesIO - -import matplotlib.colors as mcolors -import matplotlib.pyplot as plt -import pytz -import requests -from discord import File -from discord.ext import commands -from discord.ext.commands import Context, hybrid_command -from homeassistant_api import Client -from matplotlib import dates - -from rocketwatch import RocketWatch -from utils.cfg import cfg -from utils.embeds import Embed -from utils.visibility import is_hidden - -log = logging.getLogger("sleep") -log.setLevel(cfg["log_level"]) - - -def get_color_hsv(value): - # Ensure the value is within [0, 1] - value -= 0.5 - value *= 2 - value = max(0, min(1, value)) - - # Map the value to the hue in HSV (red to green) - hue = value / 3 # Red is at 0, green is at 1/3 in HSV space - color_hsv = (hue, 1, 0.8) # Full saturation and value - - # Convert HSV to RGB - return mcolors.hsv_to_rgb(color_hsv) - -class Oura(commands.Cog): - def __init__(self, bot: RocketWatch): - self.bot = bot - self.calendar_url = cfg["oura.calendar_url"] - - @hybrid_command() - async def sleep_schedule(self, ctx: Context): - await ctx.defer(ephemeral=is_hidden(ctx)) - e = Embed(title="Invis's Sleep Schedule") - current_date = datetime.datetime.now() - tz = pytz.timezone("Europe/Vienna") - start_date = current_date - datetime.timedelta(days=150) - # make start date timezone aware - start_date = tz.localize(start_date) - end_date = current_date - # make end date timezone aware - end_date = tz.localize(end_date) - res = requests.get("https://api.ouraring.com/v2/usercollection/sleep", - params={"start_date": start_date.strftime("%Y-%m-%d"), - "end_date" : (end_date + datetime.timedelta(days=1)).strftime("%Y-%m-%d")}, - headers={"Authorization": f"Bearer {cfg['oura.secret']}"}) - if res.status_code != 200: - e.description = "Error fetching sleep data" - await ctx.send(embed=e) - return - data = res.json() - if len(data["data"]) == 0: - e.description = "No sleep data found" - await ctx.send(embed=e) - return - - res2 = requests.get("https://api.ouraring.com/v2/usercollection/daily_sleep", - params={"start_date": start_date.strftime("%Y-%m-%d"), - "end_date" : (end_date + datetime.timedelta(days=1)).strftime("%Y-%m-%d")}, - headers={"Authorization": f"Bearer {cfg['oura.secret']}"}) - if res2.status_code != 200: - e.description = "Error fetching sleep data" - await ctx.send(embed=e) - return - data2 = res2.json() - score_mapping = dict() - if len(data2["data"]) != 0: - for kasd in data2["data"]: - score_mapping[kasd["day"]] = kasd["score"] - daily_sleep = { - (start_date + datetime.timedelta(days=i)).strftime("%Y-%m-%d"): [] - for i in range((end_date - start_date).days + 1)} - - for sleep in reversed(data["data"]): - if sleep["type"] == "rest": - continue - # skip if sleep_duration is less than 30 minutes. units are in seconds - if sleep["total_sleep_duration"] < 30 * 60: - continue - score = score_mapping.get(sleep["day"]) - hr = sleep["lowest_heart_rate"] - hrv = sleep["average_hrv"] - temperature = sleep["readiness"]["temperature_deviation"] - sd = datetime.datetime.fromisoformat(sleep["bedtime_start"]).astimezone(tz=tz) - log.info(f"start date: {sd}") - # the start day is the next day if we are past 12pm, otherwise it is the current day - start_day_r = sd + datetime.timedelta(days=1) if sd.hour >= 18 else sd - # format to string - start_day = start_day_r.strftime("%Y-%m-%d") - ed = datetime.datetime.fromisoformat(sleep["bedtime_end"]).astimezone(tz=tz) - log.info(f"end date: {ed}") - # the end day is the next day if we are past 12pm, otherwise it is the current day - ed_r = ed + datetime.timedelta(days=1) if ed.hour >= 18 else ed - # format to string - end_day = ed_r.strftime("%Y-%m-%d") - thresh = datetime.datetime(year=ed.year, month=ed.month, day=ed.day, hour=18, - tzinfo=ed.tzinfo) - # Define virtual day start at 18:00 - virtual_day_start = datetime.datetime.combine(sd.date(), datetime.time(18, 0)) - virtual_day_start = tz.localize(virtual_day_start) - if sd < virtual_day_start: - virtual_day_start -= datetime.timedelta(days=1) - virtual_day_end = virtual_day_start + datetime.timedelta(days=1) - start_day = virtual_day_start.strftime("%Y-%m-%d") - if start_day not in daily_sleep: - daily_sleep[start_day] = [] - if end_day not in daily_sleep: - daily_sleep[end_day] = [] - # weekday based on start date - weekday = datetime.datetime.fromisoformat(start_day).weekday() - stats = sleep["sleep_phase_5_min"] - # Initialize sleep segments - sleep_segments = [] - - total_duration = ed - sd - total_seconds = total_duration.total_seconds() - cumulative_seconds = 0 - - # Check for overflow into previous virtual day - if sd < virtual_day_start: - dur_prev = virtual_day_start - sd - if dur_prev.total_seconds() > 0: - stats_prev_len = int(len(stats) * (dur_prev.total_seconds() / total_seconds)) - stats_prev = stats[:stats_prev_len] - prev_day = (virtual_day_start - datetime.timedelta(days=1)).strftime("%Y-%m-%d") - if prev_day not in daily_sleep: - daily_sleep[prev_day] = [] - daily_sleep[prev_day].append({ - "relative_start": sd - (virtual_day_start - datetime.timedelta(days=1)), - "duration": dur_prev, - "weekday": weekday, - "sleep_stats": stats_prev, - "readiness": score, - "hr": hr, - "hrv": hrv, - "temperature": temperature - }) - stats = stats[stats_prev_len:] # Remove used stats - cumulative_seconds += dur_prev.total_seconds() - sd = virtual_day_start # Adjust start time - - # Now compute duration in current virtual day - dur_current = min(ed, virtual_day_end) - sd - if dur_current.total_seconds() > 0: - stats_current_len = int(len(stats) * (dur_current.total_seconds() / (total_seconds - cumulative_seconds))) - stats_current = stats[:stats_current_len] - if start_day not in daily_sleep: - daily_sleep[start_day] = [] - daily_sleep[start_day].append({ - "relative_start": sd - virtual_day_start, - "duration": dur_current, - "weekday": weekday, - "sleep_stats": stats_current, - "readiness": score, - "hr": hr, - "hrv": hrv, - "temperature": temperature - }) - stats = stats[stats_current_len:] - cumulative_seconds += dur_current.total_seconds() - sd = virtual_day_end # Adjust start time - -# Check for overflow into next virtual day - if ed > virtual_day_end: - dur_next = ed - virtual_day_end - if dur_next.total_seconds() > 0: - stats_next = stats # Remaining stats - next_day = virtual_day_end.strftime("%Y-%m-%d") - if next_day not in daily_sleep: - daily_sleep[next_day] = [] - daily_sleep[next_day].append({ - "relative_start": datetime.timedelta(), - "duration": dur_next, - "weekday": weekday, - "sleep_stats": stats_next, - "readiness": score, - "hr": hr, - "hrv": hrv, - "temperature": temperature - }) - # sort by date - daily_sleep = dict(sorted(daily_sleep.items(), key=lambda x: x[0])) - # plot, one large plot that has the sleep data and a small thin plot that shows the hr&hrv data below - fig, ax = plt.subplots(3, 1, figsize=(15, 10), gridspec_kw={'height_ratios': [6, 1, 1]}, sharex=True) - # dark mode - # create horizontal dark gray line at midnight and noon - ax[0].axhline(y=18, color="#808080", linewidth=1) - ax[0].axhline(y=18 - 12, color="#808080", linewidth=1) - calendar_data = await self.get_calendar_data() - # render calendar data if they are within the last 180 days - if calendar_data is not None: - for day, data in calendar_data.items(): - for d in data: - bottom = ((24 * 60 * 60) - d[ - "relative_start"].total_seconds() - d["duration"].total_seconds()) / 3600 - width = d["duration"].total_seconds() / 3600 - try: - i = list(daily_sleep.keys()).index(day) - except ValueError: - continue - ax[0].bar(i, width, bottom=bottom, color="#AAAAAA", width=1, alpha=0.25) - for i, (day, sleeps) in reversed(list(enumerate(daily_sleep.items()))): - for sleep in sleeps: - color = "gray" - if sleep["readiness"] is not None: - color = get_color_hsv(sleep["readiness"] / 100) - bottom = ((24 * 60 * 60) - sleep[ - "relative_start"].total_seconds() - sleep["duration"].total_seconds()) / 3600 - width = sleep["duration"].total_seconds() / 3600 - current_bottom = bottom + width - for state in sleep["sleep_stats"]: - current_bottom -= (width / len(sleep["sleep_stats"])) - w = 0.9 - match int(state): - case 4: - w = 0.4 - case 3: - w = 0.4 - case 2: - w = 0.9 - case 1: - w = 0.9 - ax[0].bar(i, width / len(sleep["sleep_stats"]), bottom=current_bottom, color=color, - alpha=0.2 if state == "4" else 1, width=w) - # set x axis labels, only every 7th day - ax[0].set_xticks(range(len(daily_sleep) - 1, 0, -14)) - ax[0].set_xticklabels([day[2:] for i, (day, _) in enumerate(reversed(daily_sleep.items())) if i % 14 == 0]) - # set y axis labels - ax[0].set_yticks(range(0, 25, 2)) - ax[0].set_yticklabels([f"{i}:00" if i >= 0 else f"{24 + i}:00" for i in range(18, -7, -2)]) - # set y limit - ax[0].set_ylim(0, 24) - # set x limit - ax[0].set_xlim(0, len(daily_sleep)) - # grid - ax[0].grid(True) - ax[0].set_axisbelow(True) - # set title - ax[0].set_title("Invis's Sleep Schedule") - - x = [] - y_hr = [] - y_hrv = [] - y_temperature = [] - for i, (day, sleeps) in reversed(list(enumerate(daily_sleep.items()))): - if len(sleeps) == 0: - continue - min_hr = min(sleep["hr"] for sleep in sleeps) - min_hrv = min(sleep["hrv"] for sleep in sleeps) - s = list(sleep["temperature"] for sleep in sleeps if (sleep["temperature"] != [] and sleep["temperature"] is not None)) - max_temperature = max(s) if s else 0 - x.append(i) - y_hr.append(min_hr) - y_hrv.append(min_hrv) - y_temperature.append(max_temperature) - # fill the area between the the line and zero - #ax[1].plot(x, y_temperature, color="gray", alpha=0.7) - # draw as bars instead of line, red if positive, blue if negative - ax[1].bar(x, y_temperature, color=["red" if i >= 0 else "blue" for i in y_temperature], alpha=0.5) - # show x.5 ticks on y axis for axis 1 - min_t, max_t = min(y_temperature), max(y_temperature) - # we want 4 ticks evenly spaecd between min and max - ax[1].set_yticks([round(min_t + (max_t - min_t) / 3 * i,2) for i in range(4)]) - # blue area, negative - #ax[1].fill_between(x, y_temperature, color="blue", alpha=0.25, where=[i <= 0 for i in y_temperature], interpolate=True) - # red area, positive - #ax[1].fill_between(x, y_temperature, color="red", alpha=0.25, where=[i >= 0 for i in y_temperature], interpolate=True) - ax[2].plot(x, y_hr, color="black", alpha=0.7) - min_hr, max_hr = min(y_hr), max(y_hr) - ax[2].set_yticks([round(min_hr + (max_hr - min_hr) / 3 * i,0) for i in range(4)]) - # add y axis label - ax[2].set_ylabel("HR") - ax3 = ax[2].twinx() - # add y axis label - ax3.set_ylabel("HRV") - ax3.plot(x, y_hrv, color="green", alpha=0.7) - min_hrv, max_hrv = min(y_hrv), max(y_hrv) - ax3.set_yticks([round(min_hrv + (max_hrv - min_hrv) / 3 * i,0) for i in range(4)]) - ax[2].legend(["HR"], loc="lower left") - ax3.legend(["HRV"], loc="upper left") - ax[1].legend(["Temperature"], loc="upper left") - # make axis 2 right side axis color green with 0.7 alpha - # unit °C on y axis - - # reduce padding - plt.tight_layout() - - img = BytesIO() - fig.savefig(img, format='png', dpi=250) - img.seek(0) - plt.close() - - e.set_image(url="attachment://sleep.png") - buf = File(img, filename="sleep.png") - # send image - await ctx.send(file=buf, embed=e) - - @hybrid_command() - async def temperature(self, ctx: Context): - await ctx.defer(ephemeral=is_hidden(ctx)) - client = Client(cfg["homeassistant.url"], cfg["homeassistant.token"], use_async=True) - entity = await client.async_get_entity(entity_id="sensor.aranet_4_home_temperature") - temp = client.async_get_entity_histories( - entities=[entity], - start_timestamp=datetime.datetime.now(tz=pytz.timezone("Europe/Vienna")).replace(tzinfo=None) - datetime.timedelta(days=7), - end_timestamp=datetime.datetime.now(tz=pytz.timezone("Europe/Vienna")).replace(tzinfo=None) - ) - e = Embed(title="Indoor Temperature Chart") - # plot - with plt.rc_context({'font.size': 24}): - fig, ax = plt.subplots(figsize=(15, 10)) - x = [] - y = [] - async for entity in temp: - for state in entity.states: - try: - f = float(state.state) - except ValueError: - continue - x.append(state.last_updated.astimezone(pytz.timezone("Europe/Vienna"))) - y.append(f) - # make line thicker - ax.plot(x, y, linewidth=4) - #ax.set_ylabel("Temperature") - #ax.set_xlabel("Time") - # temp range 15-35°C - ax.set_ylim(15, 35) - ax.grid() - # set x_axis min to x[0] but leave max to None - ax.set_xlim(x[0], None) - # format x axis as DD.MM HH:MM - ax.xaxis.set_major_formatter( - dates.ConciseDateFormatter(ax.xaxis.get_major_locator())) - # format y axis as °C - ax.yaxis.set_major_formatter('{x:.0f}°C') - # get the color that was used to plot the line - color = ax.get_lines()[0].get_color() - # add a big hollow point at the latest point - ax.scatter(x[-1], y[-1], s=1000, facecolor='none', edgecolor="black", linewidth=4, alpha=0.3) - # add a vertical line at the latest point - ax.axvline(x[-1], color="black", linewidth=2, linestyle="--", alpha=0.3) - # reduce padding - plt.tight_layout() - img = BytesIO() - fig.savefig(img, format='png', dpi=100) - img.seek(0) - plt.close() - e.set_image(url="attachment://temperature.png") - buf = File(img, filename="temperature.png") - e.description = f"{y[-1]} °C (as of )" - # send image - await ctx.send(file=buf, embed=e) - - - # replace get_calendar_data with home assistant variant - async def get_calendar_data(self): - return None - client = Client(cfg["homeassistant.url"], cfg["homeassistant.token"], use_async=True) - work_periods = [] - last_state = None - async for zone in client.async_get_logbook_entries( - filter_entities="device_tracker.pixel_8_pro_2", - start_timestamp=datetime.datetime.now(tz=pytz.timezone("Europe/Vienna")).replace(tzinfo=None) - datetime.timedelta(days=150), - end_timestamp=datetime.datetime.now(tz=pytz.timezone("Europe/Vienna")).replace(tzinfo=None) - ): - state = zone.state - when = round_minute(zone.when, 10).astimezone(pytz.timezone("Europe/Vienna")) - if state == "work" and last_state != "work": - if work_periods and when - work_periods[-1]["end"] < datetime.timedelta(minutes=30): - work_periods[-1]["end"] = when - else: - work_periods.append({"start": when, "end": when}) - elif last_state == "work": - work_periods[-1]["end"] = when - last_state = state - print(f"zone changed to {last_state} at {when}") - # it has to have the same format as the old get_calendar_data - d = {} - for period in work_periods: - sd = period["start"].astimezone( - pytz.timezone("Europe/Vienna")) - sd_r = sd + datetime.timedelta(days=1) if sd > sd.replace(hour=18, minute=0, second=0) else sd - start_day = sd_r.strftime("%Y-%m-%d") - ed = period["end"].astimezone( - pytz.timezone("Europe/Vienna")) - ed_r = ed + datetime.timedelta(days=1) if ed > ed.replace(hour=18, minute=0, second=0) else ed - end_day = ed_r.strftime("%Y-%m-%d") - - if start_day not in d: - d[start_day] = [] - if end_day not in d: - d[end_day] = [] - thresh = datetime.datetime(year=period["end"].year, month=period["end"].month, day=period["end"].day, hour=18, - tzinfo=period["end"].tzinfo) - if start_day != end_day: - dur_first = thresh - period["start"] - if dur_first >= datetime.timedelta(hours=24): - dur_first -= datetime.timedelta(hours=24) - dur_second = period["end"] - thresh - if dur_second <= datetime.timedelta(hours=0): - dur_second += datetime.timedelta(hours=24) - total_dur = dur_first + dur_second - # split stats into two parts based on duration of each part - d[start_day].append( - {"relative_start": period["start"] - (thresh - datetime.timedelta(days=1)), "duration": dur_first}) - d[end_day].append( - {"relative_start": datetime.timedelta(), "duration": dur_second}) - else: - relative_start = period["start"] - (thresh - datetime.timedelta(days=1)) - if relative_start >= datetime.timedelta(hours=24): - relative_start -= datetime.timedelta(hours=24) - d[start_day].append( - {"relative_start": relative_start, "duration": period["end"] - period["start"]}) - return d - -def round_minute(date: datetime = None, round_to: int = 1): - """ - round datetime object to minutes - """ - if not date: - date = datetime.datetime.now() - minute = round(date.minute / round_to) * round_to - date = date.replace(minute=0, second=0, microsecond=0) - return date + datetime.timedelta(minutes=minute) - -async def setup(bot): - await bot.add_cog(Oura(bot)) From 67cfe9a27b542164f20dba57ffb7d414e4c5b871 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Wed, 30 Apr 2025 23:17:45 +0000 Subject: [PATCH 022/279] remove boiler template --- .../plugins/support_utils/support_utils.py | 73 ++++--------------- 1 file changed, 15 insertions(+), 58 deletions(-) diff --git a/rocketwatch/plugins/support_utils/support_utils.py b/rocketwatch/plugins/support_utils/support_utils.py index 9bcacc73..13f3626d 100644 --- a/rocketwatch/plugins/support_utils/support_utils.py +++ b/rocketwatch/plugins/support_utils/support_utils.py @@ -17,8 +17,7 @@ async def generate_template_embed(db, template_name: str): - # get the boiler message from the database - template = await db.support_bot.find_one({'_id': template_name}) + template = await db.support_bot.find_one({"_id": template_name}) if not template: return None # get the last log entry from the db @@ -27,8 +26,7 @@ async def generate_template_embed(db, template_name: str): {"template": template_name}, sort=[("ts", -1)] ) - - e = Embed(title=template['title'], description=template['description']) + e = Embed(title=template["title"], description=template["description"]) if last_edit and template_name != "announcement": e.description += f"\n\n*Last Edited by <@{last_edit['author']['id']}> *" return e @@ -57,7 +55,7 @@ def __init__(self, user: User): async def delete(self, interaction: Interaction, button: ui.Button): if (interaction.user == self.user) or has_perms(interaction): await interaction.message.delete() - log.warning(f"Support template deleted by {interaction.user} in {interaction.channel}") + log.warning(f"Support template message deleted by {interaction.user} in {interaction.channel}") class AdminModal(ui.Modal, title="Change Template Message"): @@ -125,8 +123,8 @@ async def on_submit(self, interaction: Interaction) -> None: {"$set": {"title": self.title_field.value, "description": self.description_field.value}} ) content = ( - f"This is a preview of the '{self.template_name}' template.\n" - f"You can change it using the 'Edit' button." + f"This is a preview of the `{self.template_name}` template.\n" + f"You can change it using the `Edit` button." ) embed = await generate_template_embed(self.db, self.template_name) await interaction.response.edit_message(content=content, embed=embed, view=AdminView(self.db, self.template_name)) @@ -152,15 +150,7 @@ async def _use(db, interaction: Interaction, name: str, mention: User | None): ephemeral=True ) return - if name == "boiler": - await interaction.response.send_message( - embed=Embed( - title="Error", - description=f"The template '{name}' cannot be used." - ), - ephemeral=True - ) - return + # respond with the template embed if e := (await generate_template_embed(db, name)): await interaction.response.send_message( @@ -184,28 +174,16 @@ def __init__(self, bot: RocketWatch): self.db = AsyncIOMotorClient(cfg["mongodb.uri"]).rocketwatch @app_commands.command(name="use") - async def _use_1(self, interaction: Interaction, name: str, mention: User | None): - await _use(self.db, interaction, name, mention) - - @app_commands.command(name="template") - async def _use_2(self, interaction: Interaction, name: str, mention: User | None): + async def _use(self, interaction: Interaction, name: str, mention: User | None): await _use(self.db, interaction, name, mention) - @_use_1.autocomplete("name") - @_use_2.autocomplete("name") + @_use.autocomplete("name") async def match_template(self, interaction: Interaction, current: str): return [ Choice( - name=c["_id"], - value=c["_id"] + name=c["_id"], value=c["_id"] ) for c in await self.db.support_bot.find( - { - "_id": { - "$regex": current, - "$options": "i", - "$ne" : "boiler" if interaction.command.name != "edit" else None - } - } + {"_id": {"$regex": current, "$options": "i"}} ).to_list(25) ] @@ -221,18 +199,6 @@ def __init__(self, bot: RocketWatch): self.bot = bot self.db = AsyncIOMotorClient(cfg["mongodb.uri"]).rocketwatch - @Cog.listener() - async def on_ready(self): - # insert the boiler message into the database, if it doesn't already exist - await self.db.support_bot.update_one( - {'_id': 'boiler'}, - {'$setOnInsert': { - 'title' : 'Support Message', - 'description': 'This is a support message.' - }}, - upsert=True - ) - @subgroup.command() async def add(self, interaction: Interaction, name: str): if not has_perms(interaction): @@ -254,8 +220,8 @@ async def add(self, interaction: Interaction, name: str): {"_id": name, "title": "Insert Title here", "description": "Insert Description here"} ) content = ( - f"This is a preview of the '{name}' template.\n" - f"You can change it using the 'Edit' button." + f"This is a preview of the `{name}` template.\n" + f"You can change it using the `Edit` button." ) embed = await generate_template_embed(self.db, name) await interaction.edit_original_response(content=content, embed=embed, view=AdminView(self.db, name)) @@ -280,8 +246,8 @@ async def edit(self, interaction: Interaction, name: str): return content = ( - f"This is a preview of the '{name}' template.\n" - f"You can change it using the 'Edit' button." + f"This is a preview of the `{name}` template.\n" + f"You can change it using the `Edit` button." ) embed = await generate_template_embed(self.db, name) await interaction.edit_original_response(content=content, embed=embed, view=AdminView(self.db, name)) @@ -292,14 +258,6 @@ async def remove(self, interaction: Interaction, name: str): await interaction.response.send_message( embed=Embed(title="Error", description="You do not have permission to use this command."), ephemeral=True) return - if name == "boiler": - await interaction.edit_original_response( - embed=Embed( - title="Error", - description=f"The template '{name}' cannot be removed." - ), - ) - return await interaction.response.defer(ephemeral=True) # check if the template exists in the db template = await self.db.support_bot.find_one({"_id": name}) @@ -379,8 +337,7 @@ async def match_template(self, interaction: Interaction, current: str): { "_id": { "$regex": current, - "$options": "i", - "$ne" : "boiler" if interaction.command.name != "edit" else None + "$options": "i" } } ).to_list(25) From c8ea0b1ef2fa5e5d811c7d4db61af605d169123e Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Wed, 30 Apr 2025 23:18:02 +0000 Subject: [PATCH 023/279] clean up upkeep tasks --- .../plugins/minipool_task/minipool_task.py | 13 +-- .../minipools_upkeep_task.py | 31 +++--- rocketwatch/plugins/node_task/node_task.py | 94 +++++++------------ 3 files changed, 53 insertions(+), 85 deletions(-) diff --git a/rocketwatch/plugins/minipool_task/minipool_task.py b/rocketwatch/plugins/minipool_task/minipool_task.py index 0fd4fd32..6c56161e 100644 --- a/rocketwatch/plugins/minipool_task/minipool_task.py +++ b/rocketwatch/plugins/minipool_task/minipool_task.py @@ -57,12 +57,11 @@ async def before_loop(self): def get_untracked_minipools(self) -> set[ChecksumAddress]: minipool_count = rp.call("rocketMinipoolManager.getMinipoolCount") minipool_addresses = [] - for i in range(0, minipool_count, self.batch_size): - log.debug(f"getting minipool addresses for {i}/{minipool_count}") - i_end = min(i + self.batch_size, minipool_count) + for index_batch in as_chunks(range(minipool_count), self.batch_size): minipool_addresses += [ w3.toChecksumAddress(r.results[0]) for r in rp.multicall.aggregate( - self.minipool_manager.functions.getMinipoolAt(i) for i in range(i, i_end)).results] + self.minipool_manager.functions.getMinipoolAt(i) for i in index_batch).results + ] # remove address that are already in the minipool collection tracked_addresses = self.db.minipools.distinct("address") return set(minipool_addresses) - set(tracked_addresses) @@ -72,12 +71,10 @@ def get_public_keys(self, addresses): # optimizing this doesn't seem to help much, so keep it simple for readability # batch the same way as get_untracked_minipools minipool_pubkeys = [] - for i in range(0, len(addresses), self.batch_size): - log.debug(f"getting minipool pubkeys for {i}/{len(addresses)}") - i_end = min(i + self.batch_size, len(addresses)) + for address_batch in as_chunks(addresses, self.batch_size): minipool_pubkeys += [ f"0x{r.results[0].hex()}" for r in rp.multicall.aggregate( - self.minipool_manager.functions.getMinipoolPubkey(a) for a in addresses[i:i_end]).results] + self.minipool_manager.functions.getMinipoolPubkey(a) for a in address_batch).results] return minipool_pubkeys @timerun diff --git a/rocketwatch/plugins/minipools_upkeep_task/minipools_upkeep_task.py b/rocketwatch/plugins/minipools_upkeep_task/minipools_upkeep_task.py index 246177e5..169d2975 100644 --- a/rocketwatch/plugins/minipools_upkeep_task/minipools_upkeep_task.py +++ b/rocketwatch/plugins/minipools_upkeep_task/minipools_upkeep_task.py @@ -1,13 +1,15 @@ -import asyncio import logging -from concurrent.futures import ThreadPoolExecutor import pymongo -from discord.ext import commands, tasks -from discord.ext.commands import hybrid_command + from motor.motor_asyncio import AsyncIOMotorClient from multicall import Call +from discord import Interaction +from discord.ext import commands, tasks +from discord.app_commands import command +from discord.utils import as_chunks + from rocketwatch import RocketWatch from utils import solidity from utils.embeds import Embed, el_explorer_url @@ -29,6 +31,7 @@ class MinipoolsUpkeepTask(commands.Cog): def __init__(self, bot: RocketWatch): self.bot = bot self.db = AsyncIOMotorClient(cfg["mongodb.uri"]).rocketwatch + self.batch_size = 1000 self.loop.start() def cog_unload(self): @@ -61,16 +64,8 @@ async def get_minipool_stats(self, minipools): lambda x: (mc.address, [rp.seth_sig(mc.abi, "getEthBalance"), x], [((x, "EthBalance"), solidity.to_float)]) ] minipool_stats = {} - batch_size = 10_000 // len(lambs) - for i in range(0, len(minipools), batch_size): - i_end = min(i + batch_size, len(minipools)) - log.debug(f"getting minipool stats for {i}-{i_end}") - addresses = minipools[i:i_end] - calls = [ - Call(*lamb(a)) - for a in addresses - for lamb in lambs - ] + for minipool_batch in as_chunks(minipools, self.batch_size // len(lambs)): + calls = [Call(*lamb(a)) for a in minipool_batch for lamb in lambs] res = await rp.multicall2(calls) # add data to mini pool stats dict (address => {func_name: value}) # strip get from function name @@ -97,9 +92,9 @@ async def upkeep_minipools(self): await self.db.minipools.bulk_write(bulk, ordered=False) logging.info("Updated minipool states") - @hybrid_command() - async def delegate_stats(self, ctx): - await ctx.defer(ephemeral=is_hidden(ctx)) + @command() + async def delegate_stats(self, interaction: Interaction): + await interaction.response.defer(ephemeral=is_hidden(interaction)) # get stats about delegates # we want to show the distribution of minipools that are using each delegate distribution_stats = await self.db.minipools_new.aggregate([ @@ -135,7 +130,7 @@ async def delegate_stats(self, ctx): d['_id'] = "Yes" if d['_id'] else "No" desc += f"{s}**{d['_id']}**: {d['count']} ({d['count'] / c_sum * 100:.2f}%)\n" e.description = desc - await ctx.send(embed=e) + await interaction.followup.send(embed=e) async def setup(self): diff --git a/rocketwatch/plugins/node_task/node_task.py b/rocketwatch/plugins/node_task/node_task.py index 0d95c4cb..7c1c1cc7 100644 --- a/rocketwatch/plugins/node_task/node_task.py +++ b/rocketwatch/plugins/node_task/node_task.py @@ -7,6 +7,7 @@ from pymongo import UpdateOne, UpdateMany from discord.ext import tasks, commands +from discord.utils import as_chunks from rocketwatch import RocketWatch from utils import solidity @@ -100,12 +101,10 @@ async def add_untracked_minipools(self): return log.debug(f"Latest minipool in db: {latest_db}, latest minipool in rp: {latest_rp}") # batch into self.batch_size minipools at a time, between latest_id and minipool_count - for i in range(latest_db + 1, latest_rp + 1, self.batch_size): - i_end = min(i + self.batch_size, latest_rp + 1) - log.debug(f"Getting untracked minipools ({i} to {i_end})") + for index_batch in as_chunks(range(latest_db + 1, latest_rp + 1), self.batch_size): data |= await rp.multicall2([ Call(mm.address, [rp.seth_sig(mm.abi, "getMinipoolAt"), i], [(i, None)]) - for i in range(i, i_end) + for i in index_batch ]) log.debug(f"Inserting {len(data)} new minipools into db") self.db.minipools_new.insert_many([ @@ -130,15 +129,11 @@ async def add_static_data_to_minipools(self): log.debug("No minipools need to be updated with static data") return data = {} - batch_size = self.batch_size // len(lambs) - for i in range(0, len(minipool_addresses), batch_size): - i_end = min(i + batch_size, len(minipool_addresses)) - log.debug(f"Getting minipool static data ({i} to {i_end})") - res = await rp.multicall2([ - Call(*lamb(a)) - for a in minipool_addresses[i:i_end] - for lamb in lambs - ], require_success=False) + for minipool_batch in as_chunks(minipool_addresses),self.batch_size // len(lambs): + res = await rp.multicall2( + [Call(*lamb(a)) for a in minipool_batch for lamb in lambs], + require_success=False + ) # update data dict with results for (address, variable_name), value in res.items(): if address not in data: @@ -175,15 +170,11 @@ async def update_dynamic_minipool_metadata(self): minipool_addresses = self.db.minipools_new.distinct("address") data = {} att_count = 0 - batch_size = self.batch_size // len(lambs) - for i in range(0, len(minipool_addresses), batch_size): - i_end = min(i + batch_size, len(minipool_addresses)) - log.debug(f"Getting minipool metadata ({i} to {i_end})") - res = await rp.multicall2([ - Call(*lamb(a)) - for a in minipool_addresses[i:i_end] - for lamb in lambs - ], require_success=False) + for minipool_batch in as_chunks(minipool_addresses, self.batch_size // len(lambs)): + res = await rp.multicall2( + [Call(*lamb(a)) for a in minipool_batch for lamb in lambs], + require_success=False + ) # update data dict with results for (address, variable_name), value in res.items(): if address not in data: @@ -219,13 +210,12 @@ def add_static_deposit_data_to_minipools(self): nd = rp.get_contract_by_name("rocketNodeDeposit") mm = rp.get_contract_by_name("rocketMinipoolManager") data = {} - for i in range(0, len(minipools), self.batch_size): - i_end = min(i + self.batch_size, len(minipools)) + + for minipool_batch in as_chunks(minipools, self.batch_size): # turn status time of first and last minipool into blocks - block_start = ts_to_block(minipools[i]["status_time"]) - 1 - block_end = ts_to_block(minipools[i_end - 1]["status_time"]) + 1 - a = [m["address"] for m in minipools[i:i_end]] - log.debug(f"Getting minipool deposit data ({i} to {i_end})") + block_start = ts_to_block(minipool_batch[0]["status_time"]) - 1 + block_end = ts_to_block(minipool_batch[-1]["status_time"]) + 1 + a = [m["address"] for m in minipool_batch] f_deposits = get_logs(nd.events.DepositReceived, block_start, block_end) f_creations = get_logs(mm.events.MinipoolCreated, block_start, block_end) @@ -283,11 +273,9 @@ def add_static_beacon_data_to_minipools(self): # we need to do smaller bulks as the pubkey is qutie long and we dont want to make the query url too long data = {} # endpoint = bacon.get_validators("head", ids=vali_indexes)["data"] - for i in range(0, len(public_keys), self.batch_size): - i_end = min(i + self.batch_size, len(public_keys)) - log.debug(f"Getting beacon data for minipools ({i} to {i_end})") + for pubkey_batch in as_chunks(public_keys, self.batch_size): # get beacon data for public keys - beacon_data = bacon.get_validators("head", ids=public_keys[i:i_end])["data"] + beacon_data = bacon.get_validators("head", ids=pubkey_batch)["data"] # update data dict with results for d in beacon_data: data[d["validator"]["pubkey"]] = int(d["index"]) @@ -314,11 +302,9 @@ def update_dynamic_minipool_beacon_metadata(self): validator_indexes = [i for i in validator_indexes if i is not None] data = {} # endpoint = bacon.get_validators("head", ids=vali_indexes)["data"] - for i in range(0, len(validator_indexes), self.batch_size): - i_end = min(i + self.batch_size, len(validator_indexes)) - log.debug(f"Getting beacon data for minipools ({i} to {i_end})") + for index_batch in as_chunks(validator_indexes, self.batch_size): # get beacon data for public keys - beacon_data = bacon.get_validators("head", ids=validator_indexes[i:i_end])["data"] + beacon_data = bacon.get_validators("head", ids=index_batch)["data"] # update data dict with results for d in beacon_data: data[int(d["index"])] = { @@ -373,13 +359,11 @@ async def add_untracked_node_operators(self): if latest_db == latest_rp: log.debug("No new nodes") return - # batch into 10k nodes at a time, between latest_id and latest_rp - for i in range(latest_db + 1, latest_rp + 1, self.batch_size): - i_end = min(i + self.batch_size, latest_rp + 1) - log.debug(f"Getting untracked node ({i} to {i_end})") + # batch into self.batch_size nodes at a time, between latest_id and latest_rp + for index_batch in as_chunks(range(latest_db + 1, latest_rp + 1), self.batch_size): data |= await rp.multicall2([ Call(nm.address, [rp.seth_sig(nm.abi, "getNodeAt"), i], [(i, None)]) - for i in range(i, i_end) + for i in index_batch ]) log.debug(f"Inserting {len(data)} new nodes into db") self.db.node_operators_new.insert_many([ @@ -402,15 +386,11 @@ async def add_static_data_to_node_operators(self): log.debug("No node operators need to be updated with static data") return data = {} - batch_size = self.batch_size // len(lambs) - for i in range(0, len(node_addresses), batch_size): - i_end = min(i + batch_size, len(node_addresses)) - log.debug(f"Getting node operators static data ({i} to {i_end})") - res = await rp.multicall2([ - Call(*lamb(a)) - for a in node_addresses[i:i_end] - for lamb in lambs - ], require_success=False) + for node_batch in as_chunks(node_addresses, self.batch_size // len(lambs)): + res = await rp.multicall2( + [Call(*lamb(a)) for a in node_batch for lamb in lambs], + require_success=False + ) # update data dict with results for (address, variable_name), value in res.items(): if address not in data: @@ -468,15 +448,11 @@ async def update_dynamic_node_operator_metadata(self): nodes = list(self.db.node_operators_new.find({}, {"address": 1, "fee_distributor_address": 1})) data = {} att_count = 0 - batch_size = self.batch_size // len(lambs) - for i in range(0, len(nodes), batch_size): - i_end = min(i + batch_size, len(nodes)) - log.debug(f"Getting node operator metadata ({i} to {i_end})") - res = await rp.multicall2([ - Call(*lamb(n)) - for n in nodes[i:i_end] - for lamb in lambs - ], require_success=False) + for node_batch in as_chunks(nodes, self.batch_size // len(lambs)): + res = await rp.multicall2( + [Call(*lamb(n)) for n in node_batch for lamb in lambs], + require_success=False + ) # update data dict with results for (address, variable_name), value in res.items(): if address not in data: From 3fd16d3d62e4ee9a442ae16722cf1bc36a33887b Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Wed, 30 Apr 2025 23:19:09 +0000 Subject: [PATCH 024/279] more app commands --- rocketwatch/plugins/cow_orders/cow_orders.py | 24 ++++++++-------- rocketwatch/plugins/debug/debug.py | 28 +++++++++---------- rocketwatch/plugins/governance/governance.py | 9 +++--- .../pinned_messages/pinned_messages.py | 5 ++-- rocketwatch/plugins/rpips/rpips.py | 20 ++++++------- rocketwatch/plugins/tvl/tvl.py | 20 ++++++------- rocketwatch/utils/embeds.py | 2 +- rocketwatch/utils/rocketpool.py | 3 +- rocketwatch/utils/visibility.py | 2 +- 9 files changed, 56 insertions(+), 57 deletions(-) diff --git a/rocketwatch/plugins/cow_orders/cow_orders.py b/rocketwatch/plugins/cow_orders/cow_orders.py index ec1b980c..6bf00fbd 100644 --- a/rocketwatch/plugins/cow_orders/cow_orders.py +++ b/rocketwatch/plugins/cow_orders/cow_orders.py @@ -4,10 +4,11 @@ import pymongo import requests from datetime import timezone - -from discord.ext.commands import Context, hybrid_command from web3.datastructures import MutableAttributeDict as aDict +from discord import Interaction +from discord.app_commands import command + from rocketwatch import RocketWatch from utils import solidity from utils.cfg import cfg @@ -23,7 +24,7 @@ class CowOrders(EventPlugin): def __init__(self, bot: RocketWatch): - super().__init__(bot, timedelta(seconds=60)) + super().__init__(bot, timedelta(minutes=5)) self.state = "OK" self.db = pymongo.MongoClient(cfg["mongodb.uri"]).rocketwatch # create the cow_orders collection if it doesn't exist @@ -39,16 +40,17 @@ def __init__(self, bot: RocketWatch): str(rp.get_address_by_name("rocketTokenRETH")).lower() ] - @hybrid_command() - async def cow(self, ctx: Context, tnx: str): + @command() + async def cow(self, interaction: Interaction, tnx: str): # https://etherscan.io/tx/0x47d96c6310f08b473f2c9948d6fbeef1084f0b393c2263d2fc8d5dc624f97fe3 if "etherscan.io/tx/" not in tnx: - await ctx.send("nop", ephemeral=True) - await ctx.defer(ephemeral=is_hidden_weak(ctx)) - e = Embed() + await interaction.response.send_message("nop", ephemeral=True) + return + + await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) url = tnx.replace("etherscan.io", "explorer.cow.fi") - e.description = f"[cow explorer]({url})" - await ctx.send(embed=e) + embed = Embed(description = f"[cow explorer]({url})") + await interaction.followup.send(embed=embed) def _get_new_events(self) -> list[Event]: if self.state == "RUNNING": @@ -71,7 +73,7 @@ def check_for_new_events(self): # get all pending orders from the cow api (https://api.cow.fi/mainnet/api/v1/auction) - response = requests.get("https://cow-proxy.invis.workers.dev/mainnet/api/v1/auction") + response = requests.get("https://api.cow.fi/mainnet/api/v1/auction") if response.status_code != 200: log.error("Cow API returned non-200 status code: %s", response.text) raise Exception("Cow API returned non-200 status code") diff --git a/rocketwatch/plugins/debug/debug.py b/rocketwatch/plugins/debug/debug.py index d09b4c94..2fb7a5ba 100644 --- a/rocketwatch/plugins/debug/debug.py +++ b/rocketwatch/plugins/debug/debug.py @@ -7,7 +7,7 @@ import humanize import requests from colorama import Fore, Style -from discord import File, Object, Interaction +from discord import File, Interaction from discord.app_commands import Choice, command, guilds, describe from discord.ext.commands import Cog, is_owner from motor.motor_asyncio import AsyncIOMotorClient @@ -51,7 +51,7 @@ async def on_ready(self): # --------- PRIVATE OWNER COMMANDS --------- # @command() - @guilds(Object(id=cfg["discord.owner.server_id"])) + @guilds(cfg["discord.owner.server_id"]) @is_owner() async def raise_exception(self, interaction: Interaction): """ @@ -61,7 +61,7 @@ async def raise_exception(self, interaction: Interaction): raise Exception("this should never happen wtf is your filesystem") @command() - @guilds(Object(id=cfg["discord.owner.server_id"])) + @guilds(cfg["discord.owner.server_id"]) @is_owner() async def get_members_of_role(self, interaction: Interaction, guild_id: str, role_id: str): """Get members of a role""" @@ -82,7 +82,7 @@ async def get_members_of_role(self, interaction: Interaction, guild_id: str, rol # list all roles of a guild with name and id @command() - @guilds(Object(id=cfg["discord.owner.server_id"])) + @guilds(cfg["discord.owner.server_id"]) @is_owner() async def get_roles(self, interaction: Interaction, guild_id: str): """Get roles of a guild""" @@ -100,7 +100,7 @@ async def get_roles(self, interaction: Interaction, guild_id: str): await interaction.followup.send(content=f"```{repr(err)}```") @command() - @guilds(Object(id=cfg["discord.owner.server_id"])) + @guilds(cfg["discord.owner.server_id"]) @is_owner() async def delete_msg(self, interaction: Interaction, message_url: str): """ @@ -114,7 +114,7 @@ async def delete_msg(self, interaction: Interaction, message_url: str): await interaction.followup.send(content="Done") @command() - @guilds(Object(id=cfg["discord.owner.server_id"])) + @guilds(cfg["discord.owner.server_id"]) @is_owner() async def decode_tnx(self, interaction: Interaction, tnx_hash: str, contract_name: str = None): """ @@ -130,7 +130,7 @@ async def decode_tnx(self, interaction: Interaction, tnx_hash: str, contract_nam await interaction.followup.send(content=f"```Input:\n{data}```") @command() - @guilds(Object(id=cfg["discord.owner.server_id"])) + @guilds(cfg["discord.owner.server_id"]) @is_owner() async def debug_transaction(self, interaction: Interaction, tnx_hash: str): """ @@ -144,7 +144,7 @@ async def debug_transaction(self, interaction: Interaction, tnx_hash: str): await interaction.followup.send(content="```No revert reason Available```") @command() - @guilds(Object(id=cfg["discord.owner.server_id"])) + @guilds(cfg["discord.owner.server_id"]) @is_owner() async def purge_minipools(self, interaction: Interaction, confirm: bool = False): """ @@ -158,7 +158,7 @@ async def purge_minipools(self, interaction: Interaction, confirm: bool = False) await interaction.followup.send(content="Done") @command() - @guilds(Object(id=cfg["discord.owner.server_id"])) + @guilds(cfg["discord.owner.server_id"]) @is_owner() async def purge_minipools_new(self, interaction: Interaction, confirm: bool = False): """ @@ -172,7 +172,7 @@ async def purge_minipools_new(self, interaction: Interaction, confirm: bool = Fa await interaction.followup.send(content="Done") @command() - @guilds(Object(id=cfg["discord.owner.server_id"])) + @guilds(cfg["discord.owner.server_id"]) @is_owner() async def sync_commands(self, interaction: Interaction): """ @@ -183,7 +183,7 @@ async def sync_commands(self, interaction: Interaction): await interaction.followup.send(content="Done") @command() - @guilds(Object(id=cfg["discord.owner.server_id"])) + @guilds(cfg["discord.owner.server_id"]) @is_owner() async def talk(self, interaction: Interaction, channel: str, message: str): """ @@ -195,7 +195,7 @@ async def talk(self, interaction: Interaction, channel: str, message: str): await interaction.followup.send(content="Done") @command() - @guilds(Object(id=cfg["discord.owner.server_id"])) + @guilds(cfg["discord.owner.server_id"]) @is_owner() async def announce(self, interaction: Interaction, channel: str, message: str): """ @@ -209,7 +209,7 @@ async def announce(self, interaction: Interaction, channel: str, message: str): await interaction.followup.send(content="Done") @command() - @guilds(Object(id=cfg["discord.owner.server_id"])) + @guilds(cfg["discord.owner.server_id"]) @is_owner() async def restore_support_template(self, interaction: Interaction, template_name: str, message_url: str): await interaction.response.defer(ephemeral=True) @@ -253,7 +253,7 @@ async def restore_support_template(self, interaction: Interaction, template_name await interaction.followup.send(content="Done") @command() - @guilds(Object(id=cfg["discord.owner.server_id"])) + @guilds(cfg["discord.owner.server_id"]) @is_owner() async def restore_missed_events(self, interaction: Interaction, tx_hash: str): import pickle diff --git a/rocketwatch/plugins/governance/governance.py b/rocketwatch/plugins/governance/governance.py index 753ff351..5ce9bcd3 100644 --- a/rocketwatch/plugins/governance/governance.py +++ b/rocketwatch/plugins/governance/governance.py @@ -58,7 +58,8 @@ async def _get_active_snapshot_proposals(self) -> list[Snapshot.Proposal]: async def _get_draft_rpips(self) -> list[RPIPs.RPIP]: try: - return [rpip for rpip in RPIPs.get_all_rpips() if (rpip.status == "Draft")][::-1] + statuses = {"Draft", "Review"} + return [rpip for rpip in RPIPs.get_all_rpips() if (rpip.status in statuses)][::-1] except Exception as e: await self.bot.report_error(e) return [] @@ -100,7 +101,7 @@ def print_proposals(_dao: DAO, _proposals: list[DAO.Proposal]) -> str: dao = SecurityCouncil() if proposals := self._get_active_dao_proposals(dao): embed.description += "### Security Council\n" - embed.description += "- **Active proposals**\n" + embed.description += "- **Active on-chain proposals**\n" embed.description += print_proposals(dao, proposals) # --------- ORACLE DAO --------- # @@ -108,7 +109,7 @@ def print_proposals(_dao: DAO, _proposals: list[DAO.Proposal]) -> str: dao = OracleDAO() if proposals := self._get_active_dao_proposals(dao): embed.description += "### Oracle DAO\n" - embed.description += "- **Active proposals**\n" + embed.description += "- **Active on-chain proposals**\n" embed.description += print_proposals(dao, proposals) # --------- PROTOCOL DAO --------- # @@ -127,7 +128,7 @@ def print_proposals(_dao: DAO, _proposals: list[DAO.Proposal]) -> str: section_content += f" {i}. [{title}]({proposal.url})\n" if draft_rpips := await self._get_draft_rpips(): - section_content += "- **RPIPs in draft status**\n" + section_content += "- **RPIPs in review or draft status**\n" for i, rpip in enumerate(draft_rpips, start=1): title = sanitize(rpip.title, 40) section_content += f" {i}. [{title}]({rpip.url}) (RPIP-{rpip.number})\n" diff --git a/rocketwatch/plugins/pinned_messages/pinned_messages.py b/rocketwatch/plugins/pinned_messages/pinned_messages.py index a7cad1fe..f73c7288 100644 --- a/rocketwatch/plugins/pinned_messages/pinned_messages.py +++ b/rocketwatch/plugins/pinned_messages/pinned_messages.py @@ -2,7 +2,6 @@ from datetime import datetime, timedelta from motor.motor_asyncio import AsyncIOMotorClient -from discord import Object from discord.app_commands import guilds from discord.ext import commands, tasks from discord.ext.commands import hybrid_command, is_owner @@ -70,7 +69,7 @@ async def run_loop(self): await self.bot.report_error(err) @hybrid_command() - @guilds(Object(id=cfg["discord.owner.server_id"])) + @guilds(cfg["discord.owner.server_id"]) @is_owner() async def pin(self, ctx, channel_id, title, description): await ctx.defer() @@ -97,7 +96,7 @@ async def pin(self, ctx, channel_id, title, description): await ctx.send("Created pinned message") @hybrid_command() - @guilds(Object(id=cfg["discord.owner.server_id"])) + @guilds(cfg["discord.owner.server_id"]) @is_owner() async def unpin(self, ctx, channel_id): await ctx.defer() diff --git a/rocketwatch/plugins/rpips/rpips.py b/rocketwatch/plugins/rpips/rpips.py index 870301f7..2b9604ba 100644 --- a/rocketwatch/plugins/rpips/rpips.py +++ b/rocketwatch/plugins/rpips/rpips.py @@ -3,12 +3,12 @@ from typing import Optional, Any from bs4 import BeautifulSoup -from discord.ext import commands -from discord.ext.commands import Context -from discord.ext.commands import hybrid_command -from discord.app_commands import Choice, describe from cachetools.func import ttl_cache +from discord import Interaction +from discord.ext.commands import Cog +from discord.app_commands import Choice, command, describe + from rocketwatch import RocketWatch from utils.cfg import cfg from utils.embeds import Embed @@ -18,15 +18,15 @@ log.setLevel(cfg["log_level"]) -class RPIPs(commands.Cog): +class RPIPs(Cog): def __init__(self, bot: RocketWatch): self.bot = bot - @hybrid_command() + @command() @describe(name="RPIP name") - async def rpip(self, ctx: Context, name: str): + async def rpip(self, interaction: Interaction, name: str): """Show information about a specific RPIP.""" - await ctx.defer() + await interaction.response.defer() embed = Embed() embed.set_author(name="🔗 Data from rpips.rocketpool.net", url="https://rpips.rocketpool.net") @@ -47,7 +47,7 @@ async def rpip(self, ctx: Context, name: str): else: embed.description = "No matching RPIPs." - await ctx.send(embed=embed) + await interaction.followup.send(embed=embed) class RPIP: __slots__ = ( @@ -107,7 +107,7 @@ def __getattr__(self, key: str) -> Any: raise AttributeError(f"RPIP has no attribute '{key}'") @rpip.autocomplete("name") - async def _get_rpip_names(self, ctx: Context, current: str) -> list[Choice[str]]: + async def _get_rpip_names(self, interaction: Interaction, current: str) -> list[Choice[str]]: choices = [] for rpip in self.get_all_rpips(): if current.lower() in (name := rpip.full_title).lower(): diff --git a/rocketwatch/plugins/tvl/tvl.py b/rocketwatch/plugins/tvl/tvl.py index bdd08631..e6837c39 100644 --- a/rocketwatch/plugins/tvl/tvl.py +++ b/rocketwatch/plugins/tvl/tvl.py @@ -2,11 +2,12 @@ import humanize from colorama import Style -from discord.app_commands import describe -from discord.ext import commands -from discord.ext.commands import Context, hybrid_command from motor.motor_asyncio import AsyncIOMotorClient +from discord import Interaction +from discord.ext.commands import Cog +from discord.app_commands import command, describe + from rocketwatch import RocketWatch from utils import solidity from utils.cfg import cfg @@ -48,20 +49,18 @@ def split_rewards_logic(balance, node_share, commission, force_base=False): return d -class TVL(commands.Cog): +class TVL(Cog): def __init__(self, bot: RocketWatch): self.bot = bot self.db = AsyncIOMotorClient(cfg["mongodb.uri"]).get_database("rocketwatch") - @hybrid_command() + @command() @describe(show_all="Also show entries with 0 value") - async def tvl(self, - ctx: Context, - show_all: bool = False): + async def tvl(self, interaction: Interaction, show_all: bool = False): """ Show the total value locked in the Protocol. """ - await ctx.defer(ephemeral=is_hidden(ctx)) + await interaction.response.defer(ephemeral=is_hidden(interaction)) data = { "Total RPL Locked": { "Staked RPL" : { @@ -241,7 +240,6 @@ async def tvl(self, data["Total ETH Locked"]["Minipools Stake"]["Staking Minipools"]["Node Share"][ "_val"] += beacon_balance beacon_balance = 0 - beacon_rewards = max(0, beacon_balance - 32) if beacon_balance > 0: d = split_rewards_logic(beacon_balance, node_share, commission, force_base=True) data["Total ETH Locked"]["Minipools Stake"]["Staking Minipools"]["Node Share"]["_val"] += d["base"]["node"] @@ -454,7 +452,7 @@ def set_val_of_branch(branch, unit): closer = f"or about {Style.BRIGHT}{humanize.intword(usdc_total_tvl, format='%.3f')} USDC{Style.RESET_ALL}".rjust(max([len(line) for line in test.split("\n")])-1) e.description = f"```ansi\n{test}\n{closer}```" e.set_footer(text="\"that looks good to me\" - invis 2023") - await ctx.send(embed=e) + await interaction.followup.send(embed=e) async def setup(bot): diff --git a/rocketwatch/utils/embeds.py b/rocketwatch/utils/embeds.py index a2347be4..67005e4b 100644 --- a/rocketwatch/utils/embeds.py +++ b/rocketwatch/utils/embeds.py @@ -279,7 +279,7 @@ def assemble(args) -> Embed: match args.event_name: case "pdao_set_delegate": - use_large = (args.votingPower >= 250) + use_large = (args.votingPower >= 200) case "eth_deposit_event": use_large = (amount >= 32) case "rpl_stake_event": diff --git a/rocketwatch/utils/rocketpool.py b/rocketwatch/utils/rocketpool.py index 8a47c614..933c7e88 100644 --- a/rocketwatch/utils/rocketpool.py +++ b/rocketwatch/utils/rocketpool.py @@ -1,7 +1,6 @@ import logging import os from pathlib import Path -from typing import Optional from bidict import bidict from cachetools import cached, FIFOCache @@ -222,7 +221,7 @@ def get_eth_usdc_price(self) -> float: return 1 / UniswapV3.Pool(pool_address).get_normalized_price() @ttl_cache(ttl=60) - def get_reth_eth_price(self) -> Optional[float]: + def get_reth_eth_price(self) -> float: from utils.liquidity import UniswapV3 pool_address = self.get_address_by_name("UniV3_rETH_ETH") return UniswapV3.Pool(pool_address).get_normalized_price() diff --git a/rocketwatch/utils/visibility.py b/rocketwatch/utils/visibility.py index 0a5ffd84..661b5a43 100644 --- a/rocketwatch/utils/visibility.py +++ b/rocketwatch/utils/visibility.py @@ -14,4 +14,4 @@ def is_hidden_weak(interaction: Context | Interaction): def is_hidden_role_controlled(interaction: Interaction): # reuses the has_perms function from support_utils, but overrides it when is_hidden would return false - return not has_perms(interaction, "") if is_hidden(interaction) else False + return not has_perms(interaction) if is_hidden(interaction) else False From 2179b6527c1aa03b34fe285d67cf5c8e9760cd0e Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Thu, 1 May 2025 19:10:35 +0000 Subject: [PATCH 025/279] fix as_chunks call --- rocketwatch/plugins/node_task/node_task.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rocketwatch/plugins/node_task/node_task.py b/rocketwatch/plugins/node_task/node_task.py index 7c1c1cc7..ece8bee0 100644 --- a/rocketwatch/plugins/node_task/node_task.py +++ b/rocketwatch/plugins/node_task/node_task.py @@ -129,7 +129,7 @@ async def add_static_data_to_minipools(self): log.debug("No minipools need to be updated with static data") return data = {} - for minipool_batch in as_chunks(minipool_addresses),self.batch_size // len(lambs): + for minipool_batch in as_chunks(minipool_addresses, self.batch_size // len(lambs)): res = await rp.multicall2( [Call(*lamb(a)) for a in minipool_batch for lamb in lambs], require_success=False From f612b51029067dd812a7fb6546f7b11c7338292c Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Thu, 22 May 2025 23:07:27 +0000 Subject: [PATCH 026/279] fix div by zero --- rocketwatch/plugins/constellation/constellation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rocketwatch/plugins/constellation/constellation.py b/rocketwatch/plugins/constellation/constellation.py index 761b2e1d..60d2fee3 100644 --- a/rocketwatch/plugins/constellation/constellation.py +++ b/rocketwatch/plugins/constellation/constellation.py @@ -97,7 +97,7 @@ async def constellation(self, interaction: Interaction): min_rpl_stake_ratio: float = solidity.to_float(info_calls["minimumStakeRatio"]) rpl_ratio: float = solidity.to_float(rp.call("rocketNetworkPrices.getRPLPrice")) - rpl_stake_perc: float = rpl_staked * rpl_ratio / eth_matched + rpl_stake_perc: float = (rpl_staked * rpl_ratio / eth_matched) if (eth_matched > 0) else 0.0 balance_eth: float = solidity.to_float(w3.eth.getBalance(distributor_contract.address)) balance_rpl: float = solidity.to_float(rp.call("rocketTokenRPL.balanceOf", distributor_contract.address)) From 7f37ca325d63ed84a6c1eda5d1b9c32df3fda8d7 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Thu, 22 May 2025 23:07:43 +0000 Subject: [PATCH 027/279] update scam detection --- .../plugins/detect_scam/detect_scam.py | 21 ++++++++++++------- 1 file changed, 14 insertions(+), 7 deletions(-) diff --git a/rocketwatch/plugins/detect_scam/detect_scam.py b/rocketwatch/plugins/detect_scam/detect_scam.py index 48adbf32..6aab1f58 100644 --- a/rocketwatch/plugins/detect_scam/detect_scam.py +++ b/rocketwatch/plugins/detect_scam/detect_scam.py @@ -120,8 +120,8 @@ def __init__(self, bot: RocketWatch): self._message_react_cache = TTLCache(maxsize=1000, ttl=300) self.markdown_link_pattern = re.compile(r"(?<=\[)([^/\] ]*).+?(?<=\(https?:\/\/)([^/\)]*)") - self.basic_url_pattern = re.compile(r"https?:\/\/([/\\@\-_0-9a-zA-Z]+\.)+[\\@\-_0-9a-zA-Z]+") - self.invite_pattern = re.compile(r"((discord(app)?\.com\/invite)|((dsc|discord)\.gg))(\\|\/)(?P[a-zA-Z0-9]+)") + self.basic_url_pattern = re.compile(r"https?:\/\/?([/\\@\-_0-9a-zA-Z]+\.)+[\\@\-_0-9a-zA-Z]+") + self.invite_pattern = re.compile(r"((discord(app)?\.com\/invite)|((dsc|dcd|discord)\.gg))(\\|\/)(?P[a-zA-Z0-9]+)") self.message_report_menu = ContextMenu( name="Report Message", @@ -313,6 +313,7 @@ def _discord_invite(self, message: Message) -> Optional[str]: def _ticket_system(self, message: Message) -> Optional[str]: # message contains one of the relevant keyword combinations and a link txt = self._get_message_content(message) + log.debug(f"message content: {txt}") if not self.basic_url_pattern.search(txt): return None @@ -327,7 +328,7 @@ def _ticket_system(self, message: Message) -> Optional[str]: ], ("support team", "supp0rt", "🎫", "🎟️", "m0d"), [ - ("ask", "seek", "request", "contact"), + ("get", "ask", "seek", "request", "contact"), ("help", "assistance", "service", "support") ], [ @@ -515,12 +516,18 @@ async def on_thread_create(self, thread: Thread) -> None: log.warning(f"Ignoring thread creation in {thread.guild.id}") return - keywords = ("support", "ticket", "assistance", "🎫", "🎟️") - if not any(kw in thread.name.lower() for kw in keywords): - log.debug(f"Ignoring thread creation (id: {thread.id}, name: {thread.name})") + keywords = ("support", "tick", "assistance", "error", "🎫", "🎟️") + if any(kw in thread.name.lower() for kw in keywords): + await self.report_thread(thread, "Illegitimate support thread") return + names = (".", "!", "///") + if thread.name.strip().lower() in names: + await self.report_thread(thread, "Illegitimate support thread") + return + + log.debug(f"Ignoring thread creation (id: {thread.id}, name: {thread.name})") + - await self.report_thread(thread, "Illegitimate support thread") @Cog.listener() async def on_raw_thread_update(self, event: RawThreadUpdateEvent) -> None: From d5d4dc38669cbe0ab0c87a9d89b23e9473aef9fe Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Thu, 22 May 2025 23:08:02 +0000 Subject: [PATCH 028/279] rate limit improvements --- rocketwatch/plugins/event_core/event_core.py | 2 +- rocketwatch/plugins/events/events.py | 37 +++++++------------- rocketwatch/plugins/forum/forum.py | 8 ++--- 3 files changed, 17 insertions(+), 30 deletions(-) diff --git a/rocketwatch/plugins/event_core/event_core.py b/rocketwatch/plugins/event_core/event_core.py index 97b0516f..315fb763 100644 --- a/rocketwatch/plugins/event_core/event_core.py +++ b/rocketwatch/plugins/event_core/event_core.py @@ -49,7 +49,7 @@ def __init__(self, bot: RocketWatch): def cog_unload(self) -> None: self.loop.cancel() - @tasks.loop(seconds=12) + @tasks.loop(seconds=30) async def loop(self) -> None: p_id = time.time() self.monitor.ping(state="run", series=p_id) diff --git a/rocketwatch/plugins/events/events.py b/rocketwatch/plugins/events/events.py index 81077bdf..c672110b 100644 --- a/rocketwatch/plugins/events/events.py +++ b/rocketwatch/plugins/events/events.py @@ -7,7 +7,7 @@ from discord import Interaction from discord.ext.commands import is_owner from discord.app_commands import command, guilds -from eth_typing import ChecksumAddress, BlockNumber +from eth_typing.evm import ChecksumAddress, BlockNumber from hexbytes import HexBytes from web3._utils.filters import Filter from web3.datastructures import MutableAttributeDict as aDict @@ -38,7 +38,6 @@ def __init__(self, bot: RocketWatch): self._partial_filters = partial_filters self.event_map = event_map self.topic_map = topic_map - self.active_filters: list[Filter] = [] def _parse_event_config(self) -> tuple[list[PartialFilter], dict, dict]: with open("./plugins/events/events.json") as f: @@ -65,7 +64,7 @@ def _parse_event_config(self) -> tuple[list[PartialFilter], dict, dict]: try: topic = contract.events[event_name].build_filter().topics[0] except ABIEventFunctionNotFound as e: - self.bot.report_error(e) + log.exception(e) log.warning(f"Couldn't find event {event_name} ({event['name']}) in the contract") continue @@ -169,13 +168,16 @@ async def replay_events(self, interaction: Interaction, tx_hash: str): await interaction.followup.send(content="No events found.") def _get_new_events(self) -> list[Event]: - if not self.active_filters: - from_block = self.last_served_block + 1 - self.lookback_distance - self.active_filters = [pf(from_block, "latest") for pf in self._partial_filters] + from_block = self.last_served_block + 1 - self.lookback_distance + return self.get_past_events(from_block, self._pending_block) + def get_past_events(self, from_block: BlockNumber, to_block: BlockNumber) -> list[Event]: + log.debug(f"Fetching events in [{from_block}, {to_block}]") + log.debug(f"Using {len(self._partial_filters)} filters") + events = [] - for event_filter in self.active_filters: - events.extend(event_filter.get_new_entries()) + for pf in self._partial_filters: + events.extend(pf(from_block, to_block).get_all_entries()) messages, contract_upgrade_block = self.process_events(events) if not contract_upgrade_block: @@ -187,27 +189,12 @@ def _get_new_events(self) -> list[Event]: try: rp.flush() self.__init__(self.bot) - self.start_tracking(BlockNumber(contract_upgrade_block + 1)) - messages.extend(self._get_new_events()) - return messages + return messages + self.get_past_events(contract_upgrade_block + 1, to_block) except Exception as err: # rollback to pre upgrade config if this goes wrong self._partial_filters, self.event_map, self.topic_map = old_config - self.active_filters.clear() raise err - def start_tracking(self, block: BlockNumber) -> None: - super().start_tracking(block) - self.active_filters.clear() - - def get_past_events(self, from_block: BlockNumber, to_block: BlockNumber) -> list[Event]: - events = [] - for pf in self._partial_filters: - events.extend(pf(from_block, to_block).get_all_entries()) - - messages, _ = self.process_events(events) - return messages - def process_events(self, events: list[LogReceipt | EventData]) -> tuple[list[Event], Optional[BlockNumber]]: events.sort(key=lambda e: (e.blockNumber, e.logIndex)) messages = [] @@ -253,7 +240,6 @@ def hash_args(_args: aDict) -> None: event_name = event.args.get("event_name", event_name) else: log.warning(f"Skipping unknown event {n}.{event.event}") - elif event.get("event") in self.event_map: event_name = self.event_map[event.event] if event_name in ["contract_upgraded", "contract_added"]: @@ -267,6 +253,7 @@ def hash_args(_args: aDict) -> None: event_name = event.args.get("event_name", event_name) if (event_name is None) or (embed is None): + log.debug(f"Skipping event {event}") continue # get the event offset based on the lowest event log index of events with the same txn hashes and block hashes diff --git a/rocketwatch/plugins/forum/forum.py b/rocketwatch/plugins/forum/forum.py index 91a0043b..4e5fef1a 100644 --- a/rocketwatch/plugins/forum/forum.py +++ b/rocketwatch/plugins/forum/forum.py @@ -7,7 +7,7 @@ from discord.ext import commands from discord.ext.commands import Context from discord.ext.commands import hybrid_command -from discord.app_commands import Choice, choices +from discord.app_commands import Choice from rocketwatch import RocketWatch from utils.cfg import cfg @@ -82,7 +82,7 @@ def datetime_to_epoch(_dt: str) -> int: return topics @staticmethod - @retry_async(tries=3, delay=1) + @retry_async(tries=3, delay=2, backoff=2) async def get_popular_topics(period: Period) -> list[Topic]: async with aiohttp.ClientSession() as session: response = await session.get(f"{Forum.DOMAIN}/top.json?period={period}") @@ -91,7 +91,7 @@ async def get_popular_topics(period: Period) -> list[Topic]: return Forum._parse_topics(data["topic_list"]["topics"]) @staticmethod - @retry_async(tries=3, delay=1) + @retry_async(tries=3, delay=2, backoff=2) async def get_recent_topics() -> list[Topic]: async with aiohttp.ClientSession() as session: response = await session.get(f"{Forum.DOMAIN}/latest.json") @@ -100,7 +100,7 @@ async def get_recent_topics() -> list[Topic]: return Forum._parse_topics(data["topic_list"]["topics"]) @staticmethod - @retry_async(tries=3, delay=1) + @retry_async(tries=3, delay=2, backoff=2) async def get_top_users(period: Period, order_by: UserMetric) -> list[User]: async with aiohttp.ClientSession() as session: response = await session.get(f"{Forum.DOMAIN}/directory_items.json?period={period}&order={order_by}") From 7330a07657810183a0d4c553095dbd54c9bf9c55 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 24 May 2025 11:52:06 +0000 Subject: [PATCH 029/279] add support ticket to scam detection --- rocketwatch/plugins/detect_scam/detect_scam.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/rocketwatch/plugins/detect_scam/detect_scam.py b/rocketwatch/plugins/detect_scam/detect_scam.py index 6aab1f58..fb770f59 100644 --- a/rocketwatch/plugins/detect_scam/detect_scam.py +++ b/rocketwatch/plugins/detect_scam/detect_scam.py @@ -319,7 +319,7 @@ def _ticket_system(self, message: Message) -> Optional[str]: keywords = ( [ - ("open", "create", "raise", "raisse"), + ("support", "open", "create", "raise", "raisse"), "ticket" ], [ @@ -331,6 +331,10 @@ def _ticket_system(self, message: Message) -> Optional[str]: ("get", "ask", "seek", "request", "contact"), ("help", "assistance", "service", "support") ], + [ + ("relay"), + ("query", "question", "inquiry") + ], [ ("instant", "live"), "chat" From c5f80c10a2879c2260bf26878e46252e852db8b6 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sun, 25 May 2025 10:45:20 +0000 Subject: [PATCH 030/279] fix followup send --- rocketwatch/plugins/debug/debug.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/rocketwatch/plugins/debug/debug.py b/rocketwatch/plugins/debug/debug.py index 2fb7a5ba..67bb0207 100644 --- a/rocketwatch/plugins/debug/debug.py +++ b/rocketwatch/plugins/debug/debug.py @@ -303,7 +303,7 @@ async def color_test(self, interaction: Interaction): continue payload += f"\n{fg}Hello World" payload += f"{Style.RESET_ALL}```" - await interaction.followup.reply(content=payload) + await interaction.followup.send(content=payload) @command() async def asian_restaurant_name(self, interaction: Interaction): @@ -312,7 +312,7 @@ async def asian_restaurant_name(self, interaction: Interaction): """ await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) a = requests.get("https://www.dotomator.com/api/random_name.json?type=asian").json()["name"] - await interaction.followup.reply(a) + await interaction.followup.send(a) @command() async def get_block_by_timestamp(self, interaction: Interaction, timestamp: int): From ebde40fb9b5c619fd2de8a458be1f5ea0804d5d5 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Wed, 9 Jul 2025 10:06:27 +0000 Subject: [PATCH 031/279] fix Snapshot render --- rocketwatch/plugins/snapshot/snapshot.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/rocketwatch/plugins/snapshot/snapshot.py b/rocketwatch/plugins/snapshot/snapshot.py index 0bf66eda..715f3486 100644 --- a/rocketwatch/plugins/snapshot/snapshot.py +++ b/rocketwatch/plugins/snapshot/snapshot.py @@ -613,9 +613,9 @@ async def snapshot_votes(self, interaction: Interaction): # make sure proportions don't become too skewed if total_width < total_height: proposal_width = (total_height - h_spacing * (num_cols - 1)) // num_cols - total_width = (proposal_width * num_cols) + h_spacing * (num_cols - 1) + pad_left + pad_right + total_width = (proposal_width * num_cols) + h_spacing * (num_cols - 1) - canvas = ImageCanvas(total_width + pad_top + pad_bottom, total_height + pad_left + pad_right) + canvas = ImageCanvas(total_width + pad_left + pad_right, total_height + pad_top + pad_bottom) # draw proposals in num_rows x num_cols grid y_offset = pad_top From 9fdd5b56acc11b1b707e63328d83745d54738093 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Wed, 9 Jul 2025 10:06:42 +0000 Subject: [PATCH 032/279] fix forum topic URL --- rocketwatch/plugins/forum/forum.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rocketwatch/plugins/forum/forum.py b/rocketwatch/plugins/forum/forum.py index 4e5fef1a..4b84a78e 100644 --- a/rocketwatch/plugins/forum/forum.py +++ b/rocketwatch/plugins/forum/forum.py @@ -38,7 +38,7 @@ class Topic: @property def url(self) -> str: - return f"{Forum.DOMAIN}/t/{self.slug}" + return f"{Forum.DOMAIN}/t/{self.slug}/{self.id}" def __str__(self) -> str: return self.title From 2a262de41ab418fee97b5ce3d237e6f0dc0fd92a Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Thu, 10 Jul 2025 16:20:55 +0000 Subject: [PATCH 033/279] add operator to dissolve event --- rocketwatch/plugins/events/events.py | 2 ++ rocketwatch/strings/embeds.en.json | 7 ++++++- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/rocketwatch/plugins/events/events.py b/rocketwatch/plugins/events/events.py index c672110b..b4baa2ea 100644 --- a/rocketwatch/plugins/events/events.py +++ b/rocketwatch/plugins/events/events.py @@ -771,6 +771,8 @@ def share_repr(percentage: float) -> str: event_name = "minipool_dissolve_event" case _: return None + + args.operator = rp.call("rocketMinipoolDelegate.getNodeAddress", address=args.minipool) if event_name in ["minipool_bond_reduce_event", "minipool_vacancy_prepared_event", "minipool_withdrawal_processed_event", "minipool_bond_reduction_started_event", diff --git a/rocketwatch/strings/embeds.en.json b/rocketwatch/strings/embeds.en.json index 25d8b57e..456b7fb3 100644 --- a/rocketwatch/strings/embeds.en.json +++ b/rocketwatch/strings/embeds.en.json @@ -175,6 +175,11 @@ "title": ":chart_with_upwards_trend: RPL Inflation Occurred", "description": "%{value} new RPL minted! The new total supply is %{total_supply} RPL." }, + "rpl_migration_event": { + "title": ":arrows_counterclockwise: RPL Migration", + "description": "%{from} migrated **%{amount} RPL V1** to the new token contract!", + "description_small": ":arrows_counterclockwise: %{from} migrated **%{amount} RPL**!" + }, "milestone_rpl_stake": { "title": ":tada: Milestone Reached", "description": "%{result_value} RPL has been staked by node operators!" @@ -334,7 +339,7 @@ }, "minipool_dissolve_event": { "title": ":rotating_light: Minipool Dissolved", - "description": "Minipool %{minipool} failed to stake its assigned ETH and has been dissolved!" + "description": "Minipool %{minipool} owned by operator %{operator} failed to stake its assigned ETH and has been dissolved!" }, "vacant_minipool_scrub_event": { "title": ":rotating_light: Vacant Minipool Scrubbed", From 59355d4995cfa1f6c10a9f038b96bcb4c1743047 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Thu, 10 Jul 2025 16:21:20 +0000 Subject: [PATCH 034/279] add RPL migration event --- rocketwatch/plugins/events/events.json | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/rocketwatch/plugins/events/events.json b/rocketwatch/plugins/events/events.json index b0593a95..392bdf6c 100644 --- a/rocketwatch/plugins/events/events.json +++ b/rocketwatch/plugins/events/events.json @@ -158,6 +158,10 @@ { "event_name": "Transfer", "name": "rpl_transfer_event" + }, + { + "event_name": "RPLFixedSupplyBurn", + "name": "rpl_migration_event" } ] }, From de0d8f6b9abc7e6571b0ff672fd870f700917056 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Thu, 10 Jul 2025 16:21:31 +0000 Subject: [PATCH 035/279] remove rocketscan links --- rocketwatch/utils/embeds.py | 19 ++++--------------- 1 file changed, 4 insertions(+), 15 deletions(-) diff --git a/rocketwatch/utils/embeds.py b/rocketwatch/utils/embeds.py index 67005e4b..ef46cc93 100644 --- a/rocketwatch/utils/embeds.py +++ b/rocketwatch/utils/embeds.py @@ -103,21 +103,8 @@ def el_explorer_url( url = f"{cfg['execution_layer.explorer']}/address/{target}" target = w3.toChecksumAddress(target) - # rocketscan url stuff - rocketscan_chains = { - "mainnet": "https://rocketscan.io", - "holesky": "https://holesky.rocketscan.io", - } - - if cfg["rocketpool.chain"] in rocketscan_chains: - rocketscan_url = rocketscan_chains[cfg["rocketpool.chain"]] - - if rp.call("rocketMinipoolManager.getMinipoolExists", target, block=block): - url = f"{rocketscan_url}/minipool/{target}" - elif rp.call("rocketNodeManager.getNodeExists", target, block=block): - if rp.call("rocketNodeManager.getSmoothingPoolRegistrationState", target, block=block) and prefix != -1: - prefix += ":cup_with_straw:" - url = f"{rocketscan_url}/node/{target}" + if prefix != -1 and rp.call("rocketNodeManager.getSmoothingPoolRegistrationState", target, block=block): + prefix += ":cup_with_straw:" n_key = f"addresses.{target}" if not name and (n := _(n_key)) != n_key: @@ -284,6 +271,8 @@ def assemble(args) -> Embed: use_large = (amount >= 32) case "rpl_stake_event": use_large = (amount >= ((3 * 2.4) / solidity.to_float(rp.call("rocketNetworkPrices.getRPLPrice")))) + case "rpl_stake_event": + use_large = (amount >= 10_000) case "cs_deposit_eth_event" | "cs_withdraw_eth_event": use_large = (args["assets"] >= 32) case "cs_deposit_rpl_event" | "cs_withdraw_rpl_event": From 128451ab91cd7ac8a7892decd0dab4144296bdb6 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Thu, 10 Jul 2025 16:21:56 +0000 Subject: [PATCH 036/279] add RPL USDC pair on Binance --- rocketwatch/plugins/wall/wall.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/rocketwatch/plugins/wall/wall.py b/rocketwatch/plugins/wall/wall.py index 64029d94..9de4116b 100644 --- a/rocketwatch/plugins/wall/wall.py +++ b/rocketwatch/plugins/wall/wall.py @@ -40,7 +40,7 @@ class Wall(commands.Cog): def __init__(self, bot: RocketWatch): self.bot = bot self.cex: set[CEX] = { - Binance("RPL", ["USDT"]), + Binance("RPL", ["USDT", "USDC"]), Coinbase("RPL", ["USDC"]), GateIO("RPL", ["USDT"]), OKX("RPL", ["USDT"]), @@ -295,6 +295,7 @@ async def on_fail() -> None: buffer = BytesIO() fig = self._plot_data(x, rpl_usd, rpl_eth, cex_data, dex_data) fig.savefig(buffer, format="png") + fig.clf() buffer.seek(0) embed.set_author(name="🔗 Data from CEX APIs and Mainnet") From 3b510c897251922d9cb1aaeef73172d0455b3e63 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Thu, 10 Jul 2025 16:22:21 +0000 Subject: [PATCH 037/279] async unload --- rocketwatch/plugins/activity/activity.py | 2 +- rocketwatch/plugins/apr/apr.py | 2 +- .../plugins/detect_scam/detect_scam.py | 20 +++++++++++++------ rocketwatch/plugins/event_core/event_core.py | 2 +- .../plugins/minipool_task/minipool_task.py | 4 ++-- .../minipools_upkeep_task.py | 5 ++--- rocketwatch/plugins/node_task/node_task.py | 4 ++-- .../pinned_messages/pinned_messages.py | 2 +- 8 files changed, 24 insertions(+), 17 deletions(-) diff --git a/rocketwatch/plugins/activity/activity.py b/rocketwatch/plugins/activity/activity.py index 97cf7281..050cea6f 100644 --- a/rocketwatch/plugins/activity/activity.py +++ b/rocketwatch/plugins/activity/activity.py @@ -18,7 +18,7 @@ def __init__(self, bot: RocketWatch): self.monitor = Monitor("update-activity", api_key=cfg["other.secrets.cronitor"]) self.loop.start() - def cog_unload(self): + async def cog_unload(self): self.loop.cancel() @tasks.loop(seconds=60) diff --git a/rocketwatch/plugins/apr/apr.py b/rocketwatch/plugins/apr/apr.py index 72a94d80..1f76a202 100644 --- a/rocketwatch/plugins/apr/apr.py +++ b/rocketwatch/plugins/apr/apr.py @@ -46,7 +46,7 @@ def __init__(self, bot: RocketWatch): self.db = AsyncIOMotorClient(cfg["mongodb.uri"]).rocketwatch self.loop.start() - def cog_unload(self): + async def cog_unload(self): self.loop.cancel() @tasks.loop(seconds=60) diff --git a/rocketwatch/plugins/detect_scam/detect_scam.py b/rocketwatch/plugins/detect_scam/detect_scam.py index fb770f59..bfd5c441 100644 --- a/rocketwatch/plugins/detect_scam/detect_scam.py +++ b/rocketwatch/plugins/detect_scam/detect_scam.py @@ -137,7 +137,7 @@ def __init__(self, bot: RocketWatch): ) self.bot.tree.add_command(self.user_report_menu) - def cog_unload(self) -> None: + async def cog_unload(self) -> None: self.bot.tree.remove_command(self.message_report_menu.name, type=self.message_report_menu.type) self.bot.tree.remove_command(self.user_report_menu.name, type=self.user_report_menu.type) @@ -221,10 +221,11 @@ async def _generate_thread_report(self, thread: Thread, reason: str) -> Optional "There is no ticket system for support on this server.\n" "Ignore this thread and any invites or DMs you may receive." )) + thread_owner = await self.bot.get_or_fetch_user(thread.owner_id) report.description += ( "\n" f"Thread Name: `{thread.name}`\n" - f"User ID: `{thread.owner}` ({thread.owner.mention})\n" + f"User ID: `{thread_owner.id}` ({thread_owner.mention})\n" f"Thread ID: `{thread.id}` ({thread.jump_url})\n" "\n" "Please review and take appropriate action." @@ -356,9 +357,16 @@ def txt_contains(_x: list | tuple | str) -> bool: def _paperhands(self, message: Message) -> Optional[str]: # message contains the word "paperhand" and a link txt = self._get_message_content(message) - # if has http and contains the word paperhand or paperhold - if (any(x in txt for x in ["paperhand", "paper hand", "paperhold", "pages.dev", "web.app"]) and "http" in txt) or "pages.dev" in txt: - return "The linked website is most likely a wallet drainer" + if "http" not in txt: + return None + + reason = "The linked website is most likely a wallet drainer" + if any(x in txt for x in ["paperhand", "paper hand", "paperhold", "pages.dev", "web.app"]): + return reason + + if any(x in txt for x in ["mint", "opensea"]) and any(x in txt for x in ["vercel.app"]): + return reason + return None # contains @here or @everyone but doesn't actually have the permission to do so @@ -465,7 +473,7 @@ async def _on_message_delete(self, message_id: int) -> None: return channel = await self.bot.get_or_fetch_channel(report["channel_id"]) - with contextlib.suppress(errors.NotFound, errors.Forbidden): + with contextlib.suppress(errors.NotFound, errors.Forbidden, errors.HTTPException): message = await channel.fetch_message(report["warning_id"]) await message.delete() diff --git a/rocketwatch/plugins/event_core/event_core.py b/rocketwatch/plugins/event_core/event_core.py index 315fb763..d349fda6 100644 --- a/rocketwatch/plugins/event_core/event_core.py +++ b/rocketwatch/plugins/event_core/event_core.py @@ -46,7 +46,7 @@ def __init__(self, bot: RocketWatch): self.monitor = Monitor("gather-new-events", api_key=cfg["other.secrets.cronitor"]) self.loop.start() - def cog_unload(self) -> None: + async def cog_unload(self) -> None: self.loop.cancel() @tasks.loop(seconds=30) diff --git a/rocketwatch/plugins/minipool_task/minipool_task.py b/rocketwatch/plugins/minipool_task/minipool_task.py index 6c56161e..b09822e2 100644 --- a/rocketwatch/plugins/minipool_task/minipool_task.py +++ b/rocketwatch/plugins/minipool_task/minipool_task.py @@ -32,10 +32,10 @@ def __init__(self, bot: RocketWatch): self.batch_size = 1000 self.loop.start() - def cog_unload(self): + async def cog_unload(self): self.loop.cancel() - @tasks.loop(seconds=60 ** 2) + @tasks.loop(minutes=15) async def loop(self): p_id = time.time() self.monitor.ping(state='run', series=p_id) diff --git a/rocketwatch/plugins/minipools_upkeep_task/minipools_upkeep_task.py b/rocketwatch/plugins/minipools_upkeep_task/minipools_upkeep_task.py index 169d2975..14a4733c 100644 --- a/rocketwatch/plugins/minipools_upkeep_task/minipools_upkeep_task.py +++ b/rocketwatch/plugins/minipools_upkeep_task/minipools_upkeep_task.py @@ -34,11 +34,10 @@ def __init__(self, bot: RocketWatch): self.batch_size = 1000 self.loop.start() - def cog_unload(self): + async def cog_unload(self): self.loop.cancel() - # every 6.4 minutes - @tasks.loop(seconds=solidity.BEACON_EPOCH_LENGTH) + @tasks.loop(minutes=15) async def loop(self): try: await self.upkeep_minipools() diff --git a/rocketwatch/plugins/node_task/node_task.py b/rocketwatch/plugins/node_task/node_task.py index ece8bee0..6589807c 100644 --- a/rocketwatch/plugins/node_task/node_task.py +++ b/rocketwatch/plugins/node_task/node_task.py @@ -56,10 +56,10 @@ def __init__(self, bot: RocketWatch): self.batch_size = 1000 self.loop.start() - def cog_unload(self): + async def cog_unload(self): self.loop.cancel() - @tasks.loop(seconds=solidity.BEACON_EPOCH_LENGTH) + @tasks.loop(minutes=15) async def loop(self): p_id = time.time() self.monitor.ping(state="run", series=p_id) diff --git a/rocketwatch/plugins/pinned_messages/pinned_messages.py b/rocketwatch/plugins/pinned_messages/pinned_messages.py index f73c7288..de2ba702 100644 --- a/rocketwatch/plugins/pinned_messages/pinned_messages.py +++ b/rocketwatch/plugins/pinned_messages/pinned_messages.py @@ -119,7 +119,7 @@ async def unpin(self, ctx, channel_id): # rest is done by the run_loop await ctx.send("Disabled pinned message") - def cog_unload(self): + async def cog_unload(self): self.run_loop.cancel() From a2e9b5f7c744b1a548356d37084344361cb26422 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Thu, 10 Jul 2025 16:22:35 +0000 Subject: [PATCH 038/279] fix render bg color --- rocketwatch/utils/image.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rocketwatch/utils/image.py b/rocketwatch/utils/image.py index 99c0b468..c62b5ccd 100644 --- a/rocketwatch/utils/image.py +++ b/rocketwatch/utils/image.py @@ -33,7 +33,7 @@ class FontVariant(str, Enum): class ImageCanvas(ImageDraw): # default color matches Discord mobile dark mode Embed - def __init__(self, width: int, height: int, bg_color: Color = (37, 39, 26)): + def __init__(self, width: int, height: int, bg_color: Color = (57, 58, 64)): p_img = PillowImage.new('RGB', (width, height), color=bg_color) super().__init__(p_img) self.image = Image(p_img) From 9f165de4e4618746a832b2b90f7a807ea5d1813d Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 11 Jul 2025 09:18:11 +0000 Subject: [PATCH 039/279] fix large event threshold for RPL migration --- rocketwatch/utils/embeds.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/rocketwatch/utils/embeds.py b/rocketwatch/utils/embeds.py index ef46cc93..70c1c98a 100644 --- a/rocketwatch/utils/embeds.py +++ b/rocketwatch/utils/embeds.py @@ -271,8 +271,8 @@ def assemble(args) -> Embed: use_large = (amount >= 32) case "rpl_stake_event": use_large = (amount >= ((3 * 2.4) / solidity.to_float(rp.call("rocketNetworkPrices.getRPLPrice")))) - case "rpl_stake_event": - use_large = (amount >= 10_000) + case "rpl_migration_event": + use_large = (amount >= 1000) case "cs_deposit_eth_event" | "cs_withdraw_eth_event": use_large = (args["assets"] >= 32) case "cs_deposit_rpl_event" | "cs_withdraw_rpl_event": From 2368bb6f7d934c82bd188377f27b7e913aa5d4f3 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Tue, 7 Oct 2025 12:27:26 +0000 Subject: [PATCH 040/279] RockSolid events --- rocketwatch/contracts/RockSolidVault.abi.json | 2466 +++++++++++++++++ rocketwatch/plugins/events/events.json | 13 + rocketwatch/plugins/events/events.py | 4 +- rocketwatch/strings/embeds.en.json | 10 + rocketwatch/utils/embeds.py | 8 +- 5 files changed, 2497 insertions(+), 4 deletions(-) create mode 100644 rocketwatch/contracts/RockSolidVault.abi.json diff --git a/rocketwatch/contracts/RockSolidVault.abi.json b/rocketwatch/contracts/RockSolidVault.abi.json new file mode 100644 index 00000000..f628a39c --- /dev/null +++ b/rocketwatch/contracts/RockSolidVault.abi.json @@ -0,0 +1,2466 @@ +[ + { + "inputs": [ + { + "internalType": "bool", + "name": "disable", + "type": "bool" + } + ], + "stateMutability": "nonpayable", + "type": "constructor" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "maxRate", + "type": "uint256" + } + ], + "name": "AboveMaxRate", + "type": "error" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "target", + "type": "address" + } + ], + "name": "AddressEmptyCode", + "type": "error" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "account", + "type": "address" + } + ], + "name": "AddressInsufficientBalance", + "type": "error" + }, + { + "inputs": [], + "name": "CantDepositNativeToken", + "type": "error" + }, + { + "inputs": [], + "name": "Closed", + "type": "error" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "spender", + "type": "address" + }, + { + "internalType": "uint256", + "name": "allowance", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "needed", + "type": "uint256" + } + ], + "name": "ERC20InsufficientAllowance", + "type": "error" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "sender", + "type": "address" + }, + { + "internalType": "uint256", + "name": "balance", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "needed", + "type": "uint256" + } + ], + "name": "ERC20InsufficientBalance", + "type": "error" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "approver", + "type": "address" + } + ], + "name": "ERC20InvalidApprover", + "type": "error" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "receiver", + "type": "address" + } + ], + "name": "ERC20InvalidReceiver", + "type": "error" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "sender", + "type": "address" + } + ], + "name": "ERC20InvalidSender", + "type": "error" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "spender", + "type": "address" + } + ], + "name": "ERC20InvalidSpender", + "type": "error" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "receiver", + "type": "address" + }, + { + "internalType": "uint256", + "name": "assets", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "max", + "type": "uint256" + } + ], + "name": "ERC4626ExceededMaxDeposit", + "type": "error" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "receiver", + "type": "address" + }, + { + "internalType": "uint256", + "name": "shares", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "max", + "type": "uint256" + } + ], + "name": "ERC4626ExceededMaxMint", + "type": "error" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "owner", + "type": "address" + }, + { + "internalType": "uint256", + "name": "shares", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "max", + "type": "uint256" + } + ], + "name": "ERC4626ExceededMaxRedeem", + "type": "error" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "owner", + "type": "address" + }, + { + "internalType": "uint256", + "name": "assets", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "max", + "type": "uint256" + } + ], + "name": "ERC4626ExceededMaxWithdraw", + "type": "error" + }, + { + "inputs": [], + "name": "ERC7540InvalidOperator", + "type": "error" + }, + { + "inputs": [], + "name": "ERC7540PreviewDepositDisabled", + "type": "error" + }, + { + "inputs": [], + "name": "ERC7540PreviewMintDisabled", + "type": "error" + }, + { + "inputs": [], + "name": "ERC7540PreviewRedeemDisabled", + "type": "error" + }, + { + "inputs": [], + "name": "ERC7540PreviewWithdrawDisabled", + "type": "error" + }, + { + "inputs": [], + "name": "EnforcedPause", + "type": "error" + }, + { + "inputs": [], + "name": "ExpectedPause", + "type": "error" + }, + { + "inputs": [], + "name": "FailedInnerCall", + "type": "error" + }, + { + "inputs": [], + "name": "InvalidInitialization", + "type": "error" + }, + { + "inputs": [], + "name": "MathOverflowedMulDiv", + "type": "error" + }, + { + "inputs": [], + "name": "NewTotalAssetsMissing", + "type": "error" + }, + { + "inputs": [ + { + "internalType": "enum State", + "name": "currentState", + "type": "uint8" + } + ], + "name": "NotClosing", + "type": "error" + }, + { + "inputs": [], + "name": "NotInitializing", + "type": "error" + }, + { + "inputs": [ + { + "internalType": "enum State", + "name": "currentState", + "type": "uint8" + } + ], + "name": "NotOpen", + "type": "error" + }, + { + "inputs": [], + "name": "NotWhitelisted", + "type": "error" + }, + { + "inputs": [], + "name": "OnlyAsyncDepositAllowed", + "type": "error" + }, + { + "inputs": [], + "name": "OnlyOneRequestAllowed", + "type": "error" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "safe", + "type": "address" + } + ], + "name": "OnlySafe", + "type": "error" + }, + { + "inputs": [], + "name": "OnlySyncDepositAllowed", + "type": "error" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "valuationManager", + "type": "address" + } + ], + "name": "OnlyValuationManager", + "type": "error" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "whitelistManager", + "type": "address" + } + ], + "name": "OnlyWhitelistManager", + "type": "error" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "owner", + "type": "address" + } + ], + "name": "OwnableInvalidOwner", + "type": "error" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "account", + "type": "address" + } + ], + "name": "OwnableUnauthorizedAccount", + "type": "error" + }, + { + "inputs": [], + "name": "RequestIdNotClaimable", + "type": "error" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "requestId", + "type": "uint256" + } + ], + "name": "RequestNotCancelable", + "type": "error" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "token", + "type": "address" + } + ], + "name": "SafeERC20FailedOperation", + "type": "error" + }, + { + "inputs": [], + "name": "ValuationUpdateNotAllowed", + "type": "error" + }, + { + "inputs": [], + "name": "WrongNewTotalAssets", + "type": "error" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "owner", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "spender", + "type": "address" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "value", + "type": "uint256" + } + ], + "name": "Approval", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "sender", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "owner", + "type": "address" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "assets", + "type": "uint256" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "shares", + "type": "uint256" + } + ], + "name": "Deposit", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "controller", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "owner", + "type": "address" + }, + { + "indexed": true, + "internalType": "uint256", + "name": "requestId", + "type": "uint256" + }, + { + "indexed": false, + "internalType": "address", + "name": "sender", + "type": "address" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "assets", + "type": "uint256" + } + ], + "name": "DepositRequest", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "uint256", + "name": "requestId", + "type": "uint256" + }, + { + "indexed": true, + "internalType": "address", + "name": "controller", + "type": "address" + } + ], + "name": "DepositRequestCanceled", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "sender", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "owner", + "type": "address" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "assets", + "type": "uint256" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "shares", + "type": "uint256" + } + ], + "name": "DepositSync", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": false, + "internalType": "address", + "name": "oldReceiver", + "type": "address" + }, + { + "indexed": false, + "internalType": "address", + "name": "newReceiver", + "type": "address" + } + ], + "name": "FeeReceiverUpdated", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": false, + "internalType": "uint256", + "name": "oldHighWaterMark", + "type": "uint256" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "newHighWaterMark", + "type": "uint256" + } + ], + "name": "HighWaterMarkUpdated", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": false, + "internalType": "uint64", + "name": "version", + "type": "uint64" + } + ], + "name": "Initialized", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": false, + "internalType": "uint256", + "name": "totalAssets", + "type": "uint256" + } + ], + "name": "NewTotalAssetsUpdated", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "controller", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "operator", + "type": "address" + }, + { + "indexed": false, + "internalType": "bool", + "name": "approved", + "type": "bool" + } + ], + "name": "OperatorSet", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "previousOwner", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "newOwner", + "type": "address" + } + ], + "name": "OwnershipTransferStarted", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "previousOwner", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "newOwner", + "type": "address" + } + ], + "name": "OwnershipTransferred", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": false, + "internalType": "address", + "name": "account", + "type": "address" + } + ], + "name": "Paused", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "components": [ + { + "internalType": "uint16", + "name": "managementRate", + "type": "uint16" + }, + { + "internalType": "uint16", + "name": "performanceRate", + "type": "uint16" + } + ], + "indexed": false, + "internalType": "struct Rates", + "name": "oldRates", + "type": "tuple" + }, + { + "components": [ + { + "internalType": "uint16", + "name": "managementRate", + "type": "uint16" + }, + { + "internalType": "uint16", + "name": "performanceRate", + "type": "uint16" + } + ], + "indexed": false, + "internalType": "struct Rates", + "name": "newRate", + "type": "tuple" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "timestamp", + "type": "uint256" + } + ], + "name": "RatesUpdated", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "controller", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "owner", + "type": "address" + }, + { + "indexed": true, + "internalType": "uint256", + "name": "requestId", + "type": "uint256" + }, + { + "indexed": false, + "internalType": "address", + "name": "sender", + "type": "address" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "shares", + "type": "uint256" + } + ], + "name": "RedeemRequest", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "referral", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "owner", + "type": "address" + }, + { + "indexed": true, + "internalType": "uint256", + "name": "requestId", + "type": "uint256" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "assets", + "type": "uint256" + } + ], + "name": "Referral", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "uint40", + "name": "epochId", + "type": "uint40" + }, + { + "indexed": true, + "internalType": "uint40", + "name": "settledId", + "type": "uint40" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "totalAssets", + "type": "uint256" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "totalSupply", + "type": "uint256" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "assetsDeposited", + "type": "uint256" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "sharesMinted", + "type": "uint256" + } + ], + "name": "SettleDeposit", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "uint40", + "name": "epochId", + "type": "uint40" + }, + { + "indexed": true, + "internalType": "uint40", + "name": "settledId", + "type": "uint40" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "totalAssets", + "type": "uint256" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "totalSupply", + "type": "uint256" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "assetsWithdrawed", + "type": "uint256" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "sharesBurned", + "type": "uint256" + } + ], + "name": "SettleRedeem", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": false, + "internalType": "enum State", + "name": "state", + "type": "uint8" + } + ], + "name": "StateUpdated", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": false, + "internalType": "uint128", + "name": "oldLifespan", + "type": "uint128" + }, + { + "indexed": false, + "internalType": "uint128", + "name": "newLifespan", + "type": "uint128" + } + ], + "name": "TotalAssetsLifespanUpdated", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": false, + "internalType": "uint256", + "name": "totalAssets", + "type": "uint256" + } + ], + "name": "TotalAssetsUpdated", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "from", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "to", + "type": "address" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "value", + "type": "uint256" + } + ], + "name": "Transfer", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": false, + "internalType": "address", + "name": "account", + "type": "address" + } + ], + "name": "Unpaused", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": false, + "internalType": "address", + "name": "oldManager", + "type": "address" + }, + { + "indexed": false, + "internalType": "address", + "name": "newManager", + "type": "address" + } + ], + "name": "ValuationManagerUpdated", + "type": "event" + }, + { + "anonymous": false, + "inputs": [], + "name": "WhitelistDisabled", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": false, + "internalType": "address", + "name": "oldManager", + "type": "address" + }, + { + "indexed": false, + "internalType": "address", + "name": "newManager", + "type": "address" + } + ], + "name": "WhitelistManagerUpdated", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "account", + "type": "address" + }, + { + "indexed": false, + "internalType": "bool", + "name": "authorized", + "type": "bool" + } + ], + "name": "WhitelistUpdated", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "sender", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "receiver", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "owner", + "type": "address" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "assets", + "type": "uint256" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "shares", + "type": "uint256" + } + ], + "name": "Withdraw", + "type": "event" + }, + { + "inputs": [], + "name": "MAX_MANAGEMENT_RATE", + "outputs": [ + { + "internalType": "uint16", + "name": "", + "type": "uint16" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "MAX_PERFORMANCE_RATE", + "outputs": [ + { + "internalType": "uint16", + "name": "", + "type": "uint16" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "MAX_PROTOCOL_RATE", + "outputs": [ + { + "internalType": "uint16", + "name": "", + "type": "uint16" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "acceptOwnership", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address[]", + "name": "accounts", + "type": "address[]" + } + ], + "name": "addToWhitelist", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "owner", + "type": "address" + }, + { + "internalType": "address", + "name": "spender", + "type": "address" + } + ], + "name": "allowance", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "spender", + "type": "address" + }, + { + "internalType": "uint256", + "name": "value", + "type": "uint256" + } + ], + "name": "approve", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [], + "name": "asset", + "outputs": [ + { + "internalType": "address", + "name": "", + "type": "address" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "account", + "type": "address" + } + ], + "name": "balanceOf", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "cancelRequestDeposit", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "sharesToRedeem", + "type": "uint256" + } + ], + "name": "claimSharesAndRequestRedeem", + "outputs": [ + { + "internalType": "uint40", + "name": "requestId", + "type": "uint40" + } + ], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address[]", + "name": "controllers", + "type": "address[]" + } + ], + "name": "claimSharesOnBehalf", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "requestId", + "type": "uint256" + }, + { + "internalType": "address", + "name": "controller", + "type": "address" + } + ], + "name": "claimableDepositRequest", + "outputs": [ + { + "internalType": "uint256", + "name": "assets", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "requestId", + "type": "uint256" + }, + { + "internalType": "address", + "name": "controller", + "type": "address" + } + ], + "name": "claimableRedeemRequest", + "outputs": [ + { + "internalType": "uint256", + "name": "shares", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "_newTotalAssets", + "type": "uint256" + } + ], + "name": "close", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "shares", + "type": "uint256" + } + ], + "name": "convertToAssets", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "shares", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "requestId", + "type": "uint256" + } + ], + "name": "convertToAssets", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "assets", + "type": "uint256" + } + ], + "name": "convertToShares", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "assets", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "requestId", + "type": "uint256" + } + ], + "name": "convertToShares", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "decimals", + "outputs": [ + { + "internalType": "uint8", + "name": "", + "type": "uint8" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "assets", + "type": "uint256" + }, + { + "internalType": "address", + "name": "receiver", + "type": "address" + }, + { + "internalType": "address", + "name": "controller", + "type": "address" + } + ], + "name": "deposit", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "assets", + "type": "uint256" + }, + { + "internalType": "address", + "name": "receiver", + "type": "address" + } + ], + "name": "deposit", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [], + "name": "disableWhitelist", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [], + "name": "expireTotalAssets", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [], + "name": "feeRates", + "outputs": [ + { + "components": [ + { + "internalType": "uint16", + "name": "managementRate", + "type": "uint16" + }, + { + "internalType": "uint16", + "name": "performanceRate", + "type": "uint16" + } + ], + "internalType": "struct Rates", + "name": "", + "type": "tuple" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "getRolesStorage", + "outputs": [ + { + "components": [ + { + "internalType": "address", + "name": "whitelistManager", + "type": "address" + }, + { + "internalType": "address", + "name": "feeReceiver", + "type": "address" + }, + { + "internalType": "address", + "name": "safe", + "type": "address" + }, + { + "internalType": "contract FeeRegistry", + "name": "feeRegistry", + "type": "address" + }, + { + "internalType": "address", + "name": "valuationManager", + "type": "address" + } + ], + "internalType": "struct Roles.RolesStorage", + "name": "_rolesStorage", + "type": "tuple" + } + ], + "stateMutability": "pure", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "bytes", + "name": "data", + "type": "bytes" + }, + { + "internalType": "address", + "name": "feeRegistry", + "type": "address" + }, + { + "internalType": "address", + "name": "wrappedNativeToken", + "type": "address" + } + ], + "name": "initialize", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [], + "name": "initiateClosing", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "controller", + "type": "address" + }, + { + "internalType": "address", + "name": "operator", + "type": "address" + } + ], + "name": "isOperator", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "isTotalAssetsValid", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "account", + "type": "address" + } + ], + "name": "isWhitelisted", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "controller", + "type": "address" + } + ], + "name": "maxDeposit", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "controller", + "type": "address" + } + ], + "name": "maxMint", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "controller", + "type": "address" + } + ], + "name": "maxRedeem", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "controller", + "type": "address" + } + ], + "name": "maxWithdraw", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "shares", + "type": "uint256" + }, + { + "internalType": "address", + "name": "receiver", + "type": "address" + } + ], + "name": "mint", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "shares", + "type": "uint256" + }, + { + "internalType": "address", + "name": "receiver", + "type": "address" + }, + { + "internalType": "address", + "name": "controller", + "type": "address" + } + ], + "name": "mint", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [], + "name": "name", + "outputs": [ + { + "internalType": "string", + "name": "", + "type": "string" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "owner", + "outputs": [ + { + "internalType": "address", + "name": "", + "type": "address" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "pause", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [], + "name": "paused", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "requestId", + "type": "uint256" + }, + { + "internalType": "address", + "name": "controller", + "type": "address" + } + ], + "name": "pendingDepositRequest", + "outputs": [ + { + "internalType": "uint256", + "name": "assets", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "pendingOwner", + "outputs": [ + { + "internalType": "address", + "name": "", + "type": "address" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "requestId", + "type": "uint256" + }, + { + "internalType": "address", + "name": "controller", + "type": "address" + } + ], + "name": "pendingRedeemRequest", + "outputs": [ + { + "internalType": "uint256", + "name": "shares", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "name": "previewDeposit", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "pure", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "name": "previewMint", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "pure", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "name": "previewRedeem", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "pure", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "name": "previewWithdraw", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "pure", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "shares", + "type": "uint256" + }, + { + "internalType": "address", + "name": "receiver", + "type": "address" + }, + { + "internalType": "address", + "name": "controller", + "type": "address" + } + ], + "name": "redeem", + "outputs": [ + { + "internalType": "uint256", + "name": "assets", + "type": "uint256" + } + ], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [], + "name": "renounceOwnership", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "assets", + "type": "uint256" + }, + { + "internalType": "address", + "name": "controller", + "type": "address" + }, + { + "internalType": "address", + "name": "owner", + "type": "address" + } + ], + "name": "requestDeposit", + "outputs": [ + { + "internalType": "uint256", + "name": "requestId", + "type": "uint256" + } + ], + "stateMutability": "payable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "assets", + "type": "uint256" + }, + { + "internalType": "address", + "name": "controller", + "type": "address" + }, + { + "internalType": "address", + "name": "owner", + "type": "address" + }, + { + "internalType": "address", + "name": "referral", + "type": "address" + } + ], + "name": "requestDeposit", + "outputs": [ + { + "internalType": "uint256", + "name": "requestId", + "type": "uint256" + } + ], + "stateMutability": "payable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "shares", + "type": "uint256" + }, + { + "internalType": "address", + "name": "controller", + "type": "address" + }, + { + "internalType": "address", + "name": "owner", + "type": "address" + } + ], + "name": "requestRedeem", + "outputs": [ + { + "internalType": "uint256", + "name": "requestId", + "type": "uint256" + } + ], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address[]", + "name": "accounts", + "type": "address[]" + } + ], + "name": "revokeFromWhitelist", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [], + "name": "safe", + "outputs": [ + { + "internalType": "address", + "name": "", + "type": "address" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "operator", + "type": "address" + }, + { + "internalType": "bool", + "name": "approved", + "type": "bool" + } + ], + "name": "setOperator", + "outputs": [ + { + "internalType": "bool", + "name": "success", + "type": "bool" + } + ], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "_newTotalAssets", + "type": "uint256" + } + ], + "name": "settleDeposit", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "_newTotalAssets", + "type": "uint256" + } + ], + "name": "settleRedeem", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [], + "name": "share", + "outputs": [ + { + "internalType": "address", + "name": "", + "type": "address" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "bytes4", + "name": "interfaceId", + "type": "bytes4" + } + ], + "name": "supportsInterface", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "symbol", + "outputs": [ + { + "internalType": "string", + "name": "", + "type": "string" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "assets", + "type": "uint256" + }, + { + "internalType": "address", + "name": "receiver", + "type": "address" + }, + { + "internalType": "address", + "name": "referral", + "type": "address" + } + ], + "name": "syncDeposit", + "outputs": [ + { + "internalType": "uint256", + "name": "shares", + "type": "uint256" + } + ], + "stateMutability": "payable", + "type": "function" + }, + { + "inputs": [], + "name": "totalAssets", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "totalSupply", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "to", + "type": "address" + }, + { + "internalType": "uint256", + "name": "value", + "type": "uint256" + } + ], + "name": "transfer", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "from", + "type": "address" + }, + { + "internalType": "address", + "name": "to", + "type": "address" + }, + { + "internalType": "uint256", + "name": "value", + "type": "uint256" + } + ], + "name": "transferFrom", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "newOwner", + "type": "address" + } + ], + "name": "transferOwnership", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [], + "name": "unpause", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "_feeReceiver", + "type": "address" + } + ], + "name": "updateFeeReceiver", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "_newTotalAssets", + "type": "uint256" + } + ], + "name": "updateNewTotalAssets", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "components": [ + { + "internalType": "uint16", + "name": "managementRate", + "type": "uint16" + }, + { + "internalType": "uint16", + "name": "performanceRate", + "type": "uint16" + } + ], + "internalType": "struct Rates", + "name": "newRates", + "type": "tuple" + } + ], + "name": "updateRates", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint128", + "name": "lifespan", + "type": "uint128" + } + ], + "name": "updateTotalAssetsLifespan", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "_valuationManager", + "type": "address" + } + ], + "name": "updateValuationManager", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "_whitelistManager", + "type": "address" + } + ], + "name": "updateWhitelistManager", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [], + "name": "version", + "outputs": [ + { + "internalType": "string", + "name": "", + "type": "string" + } + ], + "stateMutability": "pure", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "assets", + "type": "uint256" + }, + { + "internalType": "address", + "name": "receiver", + "type": "address" + }, + { + "internalType": "address", + "name": "controller", + "type": "address" + } + ], + "name": "withdraw", + "outputs": [ + { + "internalType": "uint256", + "name": "shares", + "type": "uint256" + } + ], + "stateMutability": "nonpayable", + "type": "function" + } +] diff --git a/rocketwatch/plugins/events/events.json b/rocketwatch/plugins/events/events.json index 392bdf6c..d80640d2 100644 --- a/rocketwatch/plugins/events/events.json +++ b/rocketwatch/plugins/events/events.json @@ -500,6 +500,19 @@ "name": "contract_added" } ] + }, + { + "contract_name": "RockSolidVault", + "events": [ + { + "event_name": "DepositSync", + "name": "rocksolid_deposit_event" + }, + { + "event_name": "RedeemRequest", + "name": "rocksolid_withdrawal_event" + } + ] } ] } diff --git a/rocketwatch/plugins/events/events.py b/rocketwatch/plugins/events/events.py index b4baa2ea..c72000b0 100644 --- a/rocketwatch/plugins/events/events.py +++ b/rocketwatch/plugins/events/events.py @@ -500,9 +500,11 @@ def share_repr(percentage: float) -> str: "sdao_member_request_leave_event" ]: args.nodeAddress = el_explorer_url(args.nodeAddress, block=(event.blockNumber - 1)) - elif event_name.startswith("cs_deposit") or event_name.startswith("cs_withdraw"): + elif event_name.startswith("cs_deposit") or event_name.startswith("cs_withdraw") or event_name.startswith("rocksolid_deposit"): args.assets = solidity.to_float(args.assets) args.shares = solidity.to_float(args.shares) + elif event_name.startswith("rocksolid_withdraw"): + args.shares = solidity.to_float(args.shares) elif event_name == "cs_max_validator_change_event": args.oldLimit, args.newLimit = args.oldValue, args.newValue if args.newLimit > args.oldLimit: diff --git a/rocketwatch/strings/embeds.en.json b/rocketwatch/strings/embeds.en.json index 456b7fb3..4185b283 100644 --- a/rocketwatch/strings/embeds.en.json +++ b/rocketwatch/strings/embeds.en.json @@ -577,5 +577,15 @@ "title": ":money_mouth: Large Exit Arbitrage", "description": "%{caller} earned **%{profit} ETH** with a %{amount} ETH flash loan!", "description_small": ":money_mouth: %{caller} earned **%{profit} ETH** from an exit arbitrage!" + }, + "rocksolid_deposit_event": { + "title": "<:rocksolid:1425091714267480158> RockSolid rETH Deposit", + "description": "**%{assets} rETH** deposited for **%{shares} rock.rETH**!", + "description_small": "<:rocksolid:1425091714267480158> %{sender} deposited **%{assets} rETH** for **%{shares} rock.rETH**!" + }, + "rocksolid_withdrawal_event": { + "title": "<:rocksolid:1425091714267480158> RockSolid rETH Withdrawal", + "description": "New redemption request for **%{shares} rock.rETH**!", + "description_small": "<:rocksolid:1425091714267480158> %{sender} requested redemption of **%{shares} rock.rETH**!" } } diff --git a/rocketwatch/utils/embeds.py b/rocketwatch/utils/embeds.py index 70c1c98a..9bf850dd 100644 --- a/rocketwatch/utils/embeds.py +++ b/rocketwatch/utils/embeds.py @@ -274,11 +274,13 @@ def assemble(args) -> Embed: case "rpl_migration_event": use_large = (amount >= 1000) case "cs_deposit_eth_event" | "cs_withdraw_eth_event": - use_large = (args["assets"] >= 32) + use_large = (args["assets"] >= 100) case "cs_deposit_rpl_event" | "cs_withdraw_rpl_event": use_large = (args["assets"] >= 16 / solidity.to_float(rp.call("rocketNetworkPrices.getRPLPrice"))) - case "exit_arbitrage_event": - use_large = args["amount"] >= 100 + case "rocksolid_deposit_event": + use_large = args["assets"] >= 100 + case "rocksolid_withdrawal_event": + use_large = args["shares"] >= 100 case _: use_large = (amount >= 100) From e847aae9dc404963bc0e529a3f8c6be93c8298a6 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Tue, 7 Oct 2025 12:35:41 +0000 Subject: [PATCH 041/279] small scam detection improvements --- rocketwatch/plugins/detect_scam/detect_scam.py | 17 ++++++++++++----- rocketwatch/utils/readable.py | 2 +- 2 files changed, 13 insertions(+), 6 deletions(-) diff --git a/rocketwatch/plugins/detect_scam/detect_scam.py b/rocketwatch/plugins/detect_scam/detect_scam.py index bfd5c441..ed382336 100644 --- a/rocketwatch/plugins/detect_scam/detect_scam.py +++ b/rocketwatch/plugins/detect_scam/detect_scam.py @@ -307,8 +307,16 @@ def _markdown_link_trick(self, message: Message) -> Optional[str]: def _discord_invite(self, message: Message) -> Optional[str]: txt = self._get_message_content(message) - if self.invite_pattern.search(txt): - return "Invite to external server" + if match := self.invite_pattern.search(txt): + link = match.group(0) + if not any(domain in link for domain in cfg["youtu.be", "youtube.com"]): + return "Invite to external server" + return None + + def _tap_on_this(self, message: Message) -> Optional[str]: + txt = self._get_message_content(message) + if txt.startswith("tap on") and "bio" in txt: + return "Tap on deez nuts nerd" return None def _ticket_system(self, message: Message) -> Optional[str]: @@ -432,6 +440,7 @@ async def on_message(self, message: Message) -> None: self._markdown_link_trick, self._paperhands, self._discord_invite, + self._tap_on_this, self._mention_everyone, ] for check in checks: @@ -529,7 +538,7 @@ async def on_thread_create(self, thread: Thread) -> None: return keywords = ("support", "tick", "assistance", "error", "🎫", "🎟️") - if any(kw in thread.name.lower() for kw in keywords): + if any(kw in thread.name.lower() for kw in keywords) or re.search(r"(-|–|—)\d{3,}", thread.name): await self.report_thread(thread, "Illegitimate support thread") return names = (".", "!", "///") @@ -538,8 +547,6 @@ async def on_thread_create(self, thread: Thread) -> None: return log.debug(f"Ignoring thread creation (id: {thread.id}, name: {thread.name})") - - @Cog.listener() async def on_raw_thread_update(self, event: RawThreadUpdateEvent) -> None: diff --git a/rocketwatch/utils/readable.py b/rocketwatch/utils/readable.py index b541e87e..8271ee91 100644 --- a/rocketwatch/utils/readable.py +++ b/rocketwatch/utils/readable.py @@ -38,7 +38,7 @@ def uptime(time, highres= False): parts.append('%d minute%s' % (minutes, 's' if minutes != 1 else '')) if time or not parts: - parts.append('%.2f seconds' % time) + parts.append('%.0f seconds' % time) return " ".join(parts[:2] if not highres else parts) From 0443b234abd2f85a2ab7f7d2e68fb6c915faaefe Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Tue, 7 Oct 2025 12:35:58 +0000 Subject: [PATCH 042/279] better collateral distribution range --- rocketwatch/plugins/collateral/collateral.py | 18 ++++++++---------- 1 file changed, 8 insertions(+), 10 deletions(-) diff --git a/rocketwatch/plugins/collateral/collateral.py b/rocketwatch/plugins/collateral/collateral.py index b99c5d48..2f954d9d 100644 --- a/rocketwatch/plugins/collateral/collateral.py +++ b/rocketwatch/plugins/collateral/collateral.py @@ -90,7 +90,7 @@ def get_average_collateral_percentage_per_node(collateral_cap, bonded): # calculate percentage percentage = rpl_stake_value / minipool_value * 100 # round percentage to 5% steps - percentage = (percentage // 5) * 5 + percentage = (percentage * 10 // 5) / 2 # add to result if percentage not in result: result[percentage] = [] @@ -218,27 +218,25 @@ def node_minipools(node): @hybrid_command() @describe(raw="Show Raw Distribution Data", - cap_collateral="Cap Collateral to 150%", bonded="Calculate collateral as percent of bonded eth instead of borrowed") async def collateral_distribution(self, ctx: Context, raw: bool = False, - cap_collateral: bool = True, - collateral_cap: int = 150, + collateral_cap: int = 15, bonded: bool = False): """ Show the distribution of collateral across nodes. """ await ctx.defer(ephemeral=is_hidden(ctx)) - data = get_average_collateral_percentage_per_node(collateral_cap or 150 if cap_collateral else None, bonded) + data = get_average_collateral_percentage_per_node(collateral_cap, bonded) counts = [] for collateral, nodes in data.items(): counts.extend([collateral] * len(nodes)) - counts = list(sorted(counts)) - bins = np.bincount(counts) - distribution = [(i, bins[i]) for i in range(len(bins)) if i % 5 == 0] + counts = np.array(list(sorted(counts))) + bins = np.bincount((counts * 2).astype(int)) + distribution = [(i / 2, bins[i]) for i in range(len(bins))] # If the raw data were requested, print them and exit early if raw: @@ -264,7 +262,7 @@ async def collateral_distribution(self, ax.set_ylim(top=(ax.get_ylim()[1] * 1.1)) ax.yaxis.set_visible(False) ax.get_xaxis().set_major_formatter(FuncFormatter( - lambda n, _: f"{x_keys[n] if n < len(x_keys) else 0}{'+' if n == len(x_keys)-1 and cap_collateral else ''}%") + lambda n, _: f"{x_keys[n] if n < len(x_keys) else 0}{'+' if n == len(x_keys)-1 else ''}%") ) staked_distribution = [ @@ -272,7 +270,7 @@ async def collateral_distribution(self, ] bars = dict(staked_distribution) - line = ax2.plot(x_keys, [bars.get(int(x), 0) for x in x_keys]) + line = ax2.plot(x_keys, [bars.get(float(x), 0) for x in x_keys]) ax2.set_ylim(top=(ax2.get_ylim()[1] * 1.1)) ax2.tick_params(axis='y', colors=line[0].get_color()) ax2.get_yaxis().set_major_formatter(FuncFormatter(lambda y, _: f"{int(y / 10 ** 3)}k")) From cbd4818ab6390ccd90ba27d5bcff07731452d13d Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Tue, 7 Oct 2025 12:40:44 +0000 Subject: [PATCH 043/279] weakly hidden collateral command --- rocketwatch/plugins/collateral/collateral.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/rocketwatch/plugins/collateral/collateral.py b/rocketwatch/plugins/collateral/collateral.py index 2f954d9d..913f43cb 100644 --- a/rocketwatch/plugins/collateral/collateral.py +++ b/rocketwatch/plugins/collateral/collateral.py @@ -18,7 +18,7 @@ from utils.cfg import cfg from utils.embeds import Embed, resolve_ens from utils.rocketpool import rp -from utils.visibility import is_hidden +from utils.visibility import is_hidden_weak log = logging.getLogger("collateral") log.setLevel(cfg["log_level"]) @@ -113,7 +113,7 @@ async def node_tvl_vs_collateral(self, """ Show a scatter plot of collateral ratios for given node TVLs """ - await ctx.defer(ephemeral=is_hidden(ctx)) + await ctx.defer(ephemeral=is_hidden_weak(ctx)) display_name = None address = None @@ -227,7 +227,7 @@ async def collateral_distribution(self, """ Show the distribution of collateral across nodes. """ - await ctx.defer(ephemeral=is_hidden(ctx)) + await ctx.defer(ephemeral=is_hidden_weak(ctx)) data = get_average_collateral_percentage_per_node(collateral_cap, bonded) From 3f37d5a7d74e93ac92449c241ec15b59466b4981 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Thu, 9 Oct 2025 17:16:42 +0000 Subject: [PATCH 044/279] ignore small rETH burns --- rocketwatch/plugins/events/events.py | 21 ++++++++++----------- 1 file changed, 10 insertions(+), 11 deletions(-) diff --git a/rocketwatch/plugins/events/events.py b/rocketwatch/plugins/events/events.py index c72000b0..dd09ccf3 100644 --- a/rocketwatch/plugins/events/events.py +++ b/rocketwatch/plugins/events/events.py @@ -439,8 +439,7 @@ def handle_event(event_name: str, event: aDict) -> Optional[Embed]: d = args.currRETHRate - args.prevRETHRate if d > 0 or abs(d) < 0.00001: return None - - if "price_update_event" in event_name: + elif "price_update_event" in event_name: args.value = args.rplPrice next_period = rp.call("rocketRewardsPool.getClaimIntervalTimeStart", block=event.blockNumber) + rp.call("rocketRewardsPool.getClaimIntervalTime", block=event.blockNumber) args.rewardPeriodEnd = next_period @@ -452,8 +451,7 @@ def handle_event(event_name: str, event: aDict) -> Optional[Embed]: # if it will update before the next period, skip if not (ts < next_period < earliest_next_update): return None - - if event_name == "bootstrap_pdao_setting_multi_event": + elif event_name == "bootstrap_pdao_setting_multi_event": description_parts = [] for i in range(len(args.settingContractNames)): value_raw = args.values[i] @@ -639,29 +637,26 @@ def share_repr(percentage: float) -> str: include_payload=("add" in event_name), include_votes=("add" not in event_name), ) - # add inflation and new supply if inflation occurred - if "rpl_inflation" in event_name: + elif "rpl_inflation" in event_name: args.total_supply = int(solidity.to_float(rp.call("rocketTokenRPL.totalSupply"))) args.inflation = round(rp.get_annual_rpl_inflation() * 100, 4) - - if "auction_bid_event" in event_name: + elif "auction_bid_event" in event_name: eth = solidity.to_float(args.bidAmount) price = solidity.to_float( rp.call("rocketAuctionManager.getLotPriceAtBlock", args.lotIndex, args.blockNumber)) args.rplAmount = eth / price - if event_name in ["rpl_stake_event", "rpl_withdraw_event"]: # get eth price by multiplying the amount by the current RPL ratio rpl_ratio = solidity.to_float(rp.call("rocketNetworkPrices.getRPLPrice")) args.amount = solidity.to_float(args.amount) args.ethAmount = args.amount * rpl_ratio - if event_name in ["node_merkle_rewards_claimed"]: + elif event_name in ["node_merkle_rewards_claimed"]: rpl_ratio = solidity.to_float(rp.call("rocketNetworkPrices.getRPLPrice")) args.amountRPL = sum(solidity.to_float(r) for r in args.amountRPL) args.amountETH = sum(solidity.to_float(e) for e in args.amountETH) args.ethAmount = args.amountRPL * rpl_ratio - if "transfer_event" in event_name: + elif "transfer_event" in event_name: token_prefix = event_name.split("_", 1)[0] args.amount = args.value / 10**18 if args["from"] in cfg["rocketpool.dao_multsigs"]: @@ -670,6 +665,10 @@ def share_repr(percentage: float) -> str: args.symbol = token_contract.functions.symbol().call() elif token_prefix != "reth": return None + elif event_name == "reth_burn_event": + # filter small burns < 1 rETH + if solidity.to_float(args.amount) < 1: + return None # reject if the amount is not major if any([event_name == "reth_transfer_event" and args.amount < 1000, From cf488389a7297e92398f3cbd5516220c9f560c01 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Thu, 9 Oct 2025 17:16:49 +0000 Subject: [PATCH 045/279] scam detection fix --- rocketwatch/plugins/detect_scam/detect_scam.py | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/rocketwatch/plugins/detect_scam/detect_scam.py b/rocketwatch/plugins/detect_scam/detect_scam.py index ed382336..78a51edf 100644 --- a/rocketwatch/plugins/detect_scam/detect_scam.py +++ b/rocketwatch/plugins/detect_scam/detect_scam.py @@ -145,12 +145,19 @@ async def cog_unload(self) -> None: def _get_message_content(message: Message, *, preserve_formatting: bool = False) -> str: text = "" if message.content: - content = message.content if preserve_formatting else message.content.replace("\n", "") + content = message.content + if not preserve_formatting: + content = content.replace("\n> ", "") + content = content.replace("\n", " ") text += content + "\n" if message.embeds: for embed in message.embeds: text += f"---\n Embed: {embed.title}\n{embed.description}\n---\n" - return text if preserve_formatting else parse.unquote(text).lower() + + if not preserve_formatting: + text = parse.unquote(text).lower() + + return text async def _generate_message_report(self, message: Message, reason: str) -> Optional[tuple[Embed, Embed, File]]: try: @@ -309,7 +316,7 @@ def _discord_invite(self, message: Message) -> Optional[str]: txt = self._get_message_content(message) if match := self.invite_pattern.search(txt): link = match.group(0) - if not any(domain in link for domain in cfg["youtu.be", "youtube.com"]): + if not any(domain in link for domain in ["youtu.be", "youtube.com"]): return "Invite to external server" return None From 320306b64de8c486a36d155aff5111e4bfc20401 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 10 Oct 2025 17:54:49 +0000 Subject: [PATCH 046/279] transliterate unicode into ascii --- .../plugins/detect_scam/detect_scam.py | 44 +++++++++++-------- rocketwatch/requirements.txt | 1 + 2 files changed, 26 insertions(+), 19 deletions(-) diff --git a/rocketwatch/plugins/detect_scam/detect_scam.py b/rocketwatch/plugins/detect_scam/detect_scam.py index 78a51edf..f3069fe7 100644 --- a/rocketwatch/plugins/detect_scam/detect_scam.py +++ b/rocketwatch/plugins/detect_scam/detect_scam.py @@ -5,6 +5,7 @@ import regex as re from urllib import parse +from anyascii import anyascii from typing import Optional from datetime import datetime, timezone, timedelta @@ -121,7 +122,7 @@ def __init__(self, bot: RocketWatch): self._message_react_cache = TTLCache(maxsize=1000, ttl=300) self.markdown_link_pattern = re.compile(r"(?<=\[)([^/\] ]*).+?(?<=\(https?:\/\/)([^/\)]*)") self.basic_url_pattern = re.compile(r"https?:\/\/?([/\\@\-_0-9a-zA-Z]+\.)+[\\@\-_0-9a-zA-Z]+") - self.invite_pattern = re.compile(r"((discord(app)?\.com\/invite)|((dsc|dcd|discord)\.gg))(\\|\/)(?P[a-zA-Z0-9]+)") + self.invite_pattern = re.compile(r"((discord(app)?\.com\/(invite|oauth2))|((dsc|dcd|discord)\.gg))(\\|\/)(?P[a-zA-Z0-9]+)") self.message_report_menu = ContextMenu( name="Report Message", @@ -148,14 +149,16 @@ def _get_message_content(message: Message, *, preserve_formatting: bool = False) content = message.content if not preserve_formatting: content = content.replace("\n> ", "") - content = content.replace("\n", " ") + content = content.replace("\n", "") text += content + "\n" if message.embeds: for embed in message.embeds: text += f"---\n Embed: {embed.title}\n{embed.description}\n---\n" if not preserve_formatting: - text = parse.unquote(text).lower() + text = parse.unquote(text) + text = anyascii(text) + text = text.lower() return text @@ -316,15 +319,17 @@ def _discord_invite(self, message: Message) -> Optional[str]: txt = self._get_message_content(message) if match := self.invite_pattern.search(txt): link = match.group(0) - if not any(domain in link for domain in ["youtu.be", "youtube.com"]): + trusted_domains = ["youtu.be", "youtube.com", "tenor.com", "giphy.com", "imgur.com"] + if not any(domain in link for domain in trusted_domains): return "Invite to external server" return None def _tap_on_this(self, message: Message) -> Optional[str]: txt = self._get_message_content(message) - if txt.startswith("tap on") and "bio" in txt: - return "Tap on deez nuts nerd" - return None + keywords = ( + [("tap on", "click on"), "proper"] + ) + return "Tap on deez nuts nerd" if self.__txt_contains(txt, keywords) else None def _ticket_system(self, message: Message) -> Optional[str]: # message contains one of the relevant keyword combinations and a link @@ -342,7 +347,7 @@ def _ticket_system(self, message: Message) -> Optional[str]: ("contact", "reach out", "report", [("talk", "speak"), ("to", "with")], "ask"), ("admin", "mod", "administrator", "moderator") ], - ("support team", "supp0rt", "🎫", "🎟️", "m0d"), + ("support team", "supp0rt", "🎫", "🎟️", "m0d", "tlcket"), [ ("get", "ask", "seek", "request", "contact"), ("help", "assistance", "service", "support") @@ -357,17 +362,18 @@ def _ticket_system(self, message: Message) -> Optional[str]: ] ) - def txt_contains(_x: list | tuple | str) -> bool: - match _x: - case str(): - return (re.search(rf"\b{_x}\b", txt) is not None) - case tuple(): - return any(map(txt_contains, _x)) - case list(): - return all(map(txt_contains, _x)) - return False - - return "There is no ticket system in this server." if txt_contains(keywords) else None + return "There is no ticket system in this server." if self.__txt_contains(txt, keywords) else None + + @staticmethod + def __txt_contains(txt: str, kw: list | tuple | str) -> bool: + match kw: + case str(): + return kw in txt + case tuple(): + return any(map(lambda w: DetectScam.__txt_contains(txt, w), kw)) + case list(): + return all(map(lambda w: DetectScam.__txt_contains(txt, w), kw)) + return False def _paperhands(self, message: Message) -> Optional[str]: # message contains the word "paperhand" and a link diff --git a/rocketwatch/requirements.txt b/rocketwatch/requirements.txt index 728956de..1e272ca3 100644 --- a/rocketwatch/requirements.txt +++ b/rocketwatch/requirements.txt @@ -49,3 +49,4 @@ eth-typing==2.2.1 hexbytes==0.3.1 eth-utils==1.10.0 tabulate==0.9.0 +anyascii==0.3.3 From 79d5ba6744061c9d1ff9132b7fa9c656f994a67d Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Mon, 13 Oct 2025 13:29:07 +0000 Subject: [PATCH 047/279] async node task --- .../plugins/beacon_states/beacon_states.py | 4 +- rocketwatch/plugins/node_task/node_task.py | 72 +++++++++---------- rocketwatch/requirements.txt | 2 +- rocketwatch/utils/event_logs.py | 2 +- 4 files changed, 40 insertions(+), 40 deletions(-) diff --git a/rocketwatch/plugins/beacon_states/beacon_states.py b/rocketwatch/plugins/beacon_states/beacon_states.py index 7e1e72c3..1083555e 100644 --- a/rocketwatch/plugins/beacon_states/beacon_states.py +++ b/rocketwatch/plugins/beacon_states/beacon_states.py @@ -9,7 +9,7 @@ from utils.embeds import Embed, el_explorer_url from utils.readable import render_tree_legacy from utils.shared_w3 import w3 -from utils.visibility import is_hidden +from utils.visibility import is_hidden_weak log = logging.getLogger("beacon_states") log.setLevel(cfg["log_level"]) @@ -22,7 +22,7 @@ def __init__(self, bot: RocketWatch): @hybrid_command() async def beacon_states(self, ctx: Context): - await ctx.defer(ephemeral=is_hidden(ctx)) + await ctx.defer(ephemeral=is_hidden_weak(ctx)) # fetch from db res = await self.db.minipools_new.find({ "beacon.status": {"$exists": True} diff --git a/rocketwatch/plugins/node_task/node_task.py b/rocketwatch/plugins/node_task/node_task.py index 6589807c..3f72cff5 100644 --- a/rocketwatch/plugins/node_task/node_task.py +++ b/rocketwatch/plugins/node_task/node_task.py @@ -4,7 +4,7 @@ import pymongo from multicall import Call from cronitor import Monitor -from pymongo import UpdateOne, UpdateMany +from pymongo import AsyncMongoClient, UpdateOne, UpdateMany from discord.ext import tasks, commands from discord.utils import as_chunks @@ -51,7 +51,7 @@ def is_true(_, b): class NodeTask(commands.Cog): def __init__(self, bot: RocketWatch): self.bot = bot - self.db = pymongo.MongoClient(cfg["mongodb.uri"]).rocketwatch + self.db = AsyncMongoClient(cfg["mongodb.uri"]).rocketwatch self.monitor = Monitor("node-task", api_key=cfg["other.secrets.cronitor"]) self.batch_size = 1000 self.loop.start() @@ -65,13 +65,13 @@ async def loop(self): self.monitor.ping(state="run", series=p_id) try: log.debug("starting node task") - self.check_indexes() + await self.check_indexes() await self.add_untracked_minipools() await self.add_static_data_to_minipools() await self.update_dynamic_minipool_metadata() - self.add_static_deposit_data_to_minipools() - self.add_static_beacon_data_to_minipools() - self.update_dynamic_minipool_beacon_metadata() + await self.add_static_deposit_data_to_minipools() + await self.add_static_beacon_data_to_minipools() + await self.update_dynamic_minipool_beacon_metadata() await self.add_untracked_node_operators() await self.add_static_data_to_node_operators() await self.update_dynamic_node_operator_metadata() @@ -92,7 +92,7 @@ async def add_untracked_minipools(self): latest_rp = rp.call("rocketMinipoolManager.getMinipoolCount") - 1 # get latest _id in minipools_new collection latest_db = 0 - if res := self.db.minipools_new.find_one(sort=[("_id", pymongo.DESCENDING)]): + if res := await self.db.minipools_new.find_one(sort=[("_id", pymongo.DESCENDING)]): latest_db = res["_id"] data = {} # return early if we're up to date @@ -107,7 +107,7 @@ async def add_untracked_minipools(self): for i in index_batch ]) log.debug(f"Inserting {len(data)} new minipools into db") - self.db.minipools_new.insert_many([ + await self.db.minipools_new.insert_many([ {"_id": i, "address": a} for i, a in data.items() ]) @@ -122,7 +122,7 @@ async def add_static_data_to_minipools(self): lambda a: (mm.address, [rp.seth_sig(mm.abi, "getMinipoolPubkey"), a], [((a, "pubkey"), safe_to_hex)]), ] # get all minipool addresses from db that do not have a node operator assigned - minipool_addresses = self.db.minipools_new.distinct("address", {"node_operator": {"$exists": False}}) + minipool_addresses = await self.db.minipools_new.distinct("address", {"node_operator": {"$exists": False}}) # get node operator addresses from rp # return early if no minipools need to be updated if not minipool_addresses: @@ -147,7 +147,7 @@ async def add_static_data_to_minipools(self): {"$set": d}, ) for a, d in data.items() ] - self.db.minipools_new.bulk_write(bulk, ordered=False) + await self.db.minipools_new.bulk_write(bulk, ordered=False) log.debug("Minipools updated with static data") @timerun_async @@ -167,7 +167,7 @@ async def update_dynamic_minipool_metadata(self): lambda a: (mc.address, [rp.seth_sig(mc.abi, "getEthBalance"), a], [((a, "execution_balance"), safe_to_float)]) ] # get all minipool addresses from db - minipool_addresses = self.db.minipools_new.distinct("address") + minipool_addresses = await self.db.minipools_new.distinct("address") data = {} att_count = 0 for minipool_batch in as_chunks(minipool_addresses, self.batch_size // len(lambs)): @@ -189,20 +189,20 @@ async def update_dynamic_minipool_metadata(self): {"$set": d} ) for a, d in data.items() ] - self.db.minipools_new.bulk_write(bulk, ordered=False) + await self.db.minipools_new.bulk_write(bulk, ordered=False) log.debug("Minipools updated with metadata") return @timerun - def add_static_deposit_data_to_minipools(self): + async def add_static_deposit_data_to_minipools(self): # get all minipool addresses and their status time from db that : # - do not have a deposit_amount # - are in the initialised state # sort by status time - minipools = list(self.db.minipools_new.find( + minipools = await self.db.minipools_new.find( {"deposit_amount": {"$exists": False}, "status": "initialised"}, {"address": 1, "_id": 0, "status_time": 1} - ).sort("status_time", pymongo.ASCENDING)) + ).sort("status_time", pymongo.ASCENDING).to_list() # return early if no minipools need to be updated if not minipools: log.debug("No minipools need to be updated with static deposit data") @@ -258,14 +258,14 @@ def add_static_deposit_data_to_minipools(self): {"$set": d}, ) for a, d in data.items() ] - self.db.minipools_new.bulk_write(bulk, ordered=False) + await self.db.minipools_new.bulk_write(bulk, ordered=False) log.debug("Minipools updated with static deposit data") @timerun - def add_static_beacon_data_to_minipools(self): + async def add_static_beacon_data_to_minipools(self): # get all public keys from db where no validator_index is set - public_keys = self.db.minipools_new.distinct("pubkey", {"validator_index": {"$exists": False}}) + public_keys = await self.db.minipools_new.distinct("pubkey", {"validator_index": {"$exists": False}}) # return early if no minipools need to be updated if not public_keys: log.debug("No minipools need to be updated with static beacon data") @@ -290,14 +290,14 @@ def add_static_beacon_data_to_minipools(self): {"$set": {"validator_index": d}} ) for a, d in data.items() ] - self.db.minipools_new.bulk_write(bulk, ordered=False) + await self.db.minipools_new.bulk_write(bulk, ordered=False) log.debug("Minipools updated with static beacon data") @timerun - def update_dynamic_minipool_beacon_metadata(self): + async def update_dynamic_minipool_beacon_metadata(self): # basically same ordeal as above, but we use the validator index to get the data to improve performance # get all validator indexes from db - validator_indexes = self.db.minipools_new.distinct("validator_index") + validator_indexes = await self.db.minipools_new.distinct("validator_index") # remove None values validator_indexes = [i for i in validator_indexes if i is not None] data = {} @@ -330,19 +330,19 @@ def update_dynamic_minipool_beacon_metadata(self): {"$set": d} ) for a, d in data.items() ] - self.db.minipools_new.bulk_write(bulk, ordered=False) + await self.db.minipools_new.bulk_write(bulk, ordered=False) log.debug("Minipools updated with dynamic beacon data") - def check_indexes(self): + async def check_indexes(self): log.debug("checking indexes") - self.db.minipools_new.create_index("address") - self.db.minipools_new.create_index("pubkey") - self.db.minipools_new.create_index("validator_index") - self.db.node_operators_new.create_index("address") + await self.db.minipools_new.create_index("address") + await self.db.minipools_new.create_index("pubkey") + await self.db.minipools_new.create_index("validator_index") + await self.db.node_operators_new.create_index("address") # proposal index creation that is for some reason here - self.db.proposals.create_index("validator") - self.db.proposals.create_index("validator") - self.db.proposals.create_index("slot", unique=True) + await self.db.proposals.create_index("validator") + await self.db.proposals.create_index("validator") + await self.db.proposals.create_index("slot", unique=True) log.debug("indexes checked") @timerun_async @@ -352,7 +352,7 @@ async def add_untracked_node_operators(self): latest_rp = rp.call("rocketNodeManager.getNodeCount") - 1 # get latest _id in node_operators_new collection latest_db = 0 - if res := self.db.node_operators_new.find_one(sort=[("_id", pymongo.DESCENDING)]): + if res := await self.db.node_operators_new.find_one(sort=[("_id", pymongo.DESCENDING)]): latest_db = res["_id"] data = {} # return early if we're up to date @@ -366,7 +366,7 @@ async def add_untracked_node_operators(self): for i in index_batch ]) log.debug(f"Inserting {len(data)} new nodes into db") - self.db.node_operators_new.insert_many([ + await self.db.node_operators_new.insert_many([ {"_id": i, "address": a} for i, a in data.items() ]) @@ -379,7 +379,7 @@ async def add_static_data_to_node_operators(self): lambda a: (ndf.address, [rp.seth_sig(ndf.abi, "getProxyAddress"), a], [((a, "fee_distributor_address"), None)]), ] # get all minipool addresses from db that do not have a node operator assigned - node_addresses = self.db.node_operators_new.distinct("address", {"fee_distributor_address": {"$exists": False}}) + node_addresses = await self.db.node_operators_new.distinct("address", {"fee_distributor_address": {"$exists": False}}) # get node operator addresses from rp # return early if no minipools need to be updated if not node_addresses: @@ -404,7 +404,7 @@ async def add_static_data_to_node_operators(self): {"$set": d}, ) for a, d in data.items() ] - self.db.node_operators_new.bulk_write(bulk, ordered=False) + await self.db.node_operators_new.bulk_write(bulk, ordered=False) log.debug("Node operators updated with static data") @timerun_async @@ -445,7 +445,7 @@ async def update_dynamic_node_operator_metadata(self): [((n["address"], "deposit_credit"), safe_to_float)]) ] # get all node operators from db, but we only care about the address and the fee_distributor_address - nodes = list(self.db.node_operators_new.find({}, {"address": 1, "fee_distributor_address": 1})) + nodes = await self.db.node_operators_new.find({}, {"address": 1, "fee_distributor_address": 1}).to_list() data = {} att_count = 0 for node_batch in as_chunks(nodes, self.batch_size // len(lambs)): @@ -467,7 +467,7 @@ async def update_dynamic_node_operator_metadata(self): {"$set": d} ) for a, d in data.items() ] - self.db.node_operators_new.bulk_write(bulk, ordered=False) + await self.db.node_operators_new.bulk_write(bulk, ordered=False) log.debug("Node operators updated with metadata") async def setup(self): diff --git a/rocketwatch/requirements.txt b/rocketwatch/requirements.txt index 1e272ca3..d3149858 100644 --- a/rocketwatch/requirements.txt +++ b/rocketwatch/requirements.txt @@ -38,7 +38,7 @@ HomeAssistant-API==4.2.2.post2 bs4==0.0.2 pydantic==2.8.2 pydantic_core==2.20.1 -pymongo==4.8.0 +pymongo==4.15.3 graphql_query==1.4.0 pillow==11.1.0 aiohttp==3.11.12 diff --git a/rocketwatch/utils/event_logs.py b/rocketwatch/utils/event_logs.py index 12aca6cb..f72176ba 100644 --- a/rocketwatch/utils/event_logs.py +++ b/rocketwatch/utils/event_logs.py @@ -19,7 +19,7 @@ def get_logs( start_block = from_block end_block = to_block - log.debug(f"Fetching vote receipts in [{start_block}, {end_block}]") + log.debug(f"Fetching event logs in [{start_block}, {end_block}]") chunk_size = 50_000 from_block = start_block From de57cf4dab336ca8daea51572ada67aa8a3affc8 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Mon, 13 Oct 2025 16:03:22 +0000 Subject: [PATCH 048/279] switch from motor to async pymongo --- rocketwatch/plugins/apr/apr.py | 4 ++-- rocketwatch/plugins/beacon_states/beacon_states.py | 4 ++-- rocketwatch/plugins/chat_summary/chat_summary.py | 4 ++-- rocketwatch/plugins/commissions/commissions.py | 4 ++-- rocketwatch/plugins/constellation/constellation.py | 4 ++-- rocketwatch/plugins/debug/debug.py | 4 ++-- rocketwatch/plugins/deposit_pool/deposit_pool.py | 4 ++-- rocketwatch/plugins/detect_scam/detect_scam.py | 4 ++-- rocketwatch/plugins/event_core/event_core.py | 4 ++-- rocketwatch/plugins/karma/karma.py | 5 ++--- rocketwatch/plugins/lottery/lottery.py | 5 ++--- rocketwatch/plugins/metrics/metrics.py | 4 ++-- .../minipools_upkeep_task/minipools_upkeep_task.py | 4 ++-- rocketwatch/plugins/node_task/node_task.py | 2 +- rocketwatch/plugins/pinned_messages/pinned_messages.py | 4 ++-- rocketwatch/plugins/proposals/proposals.py | 5 ++--- rocketwatch/plugins/random/random.py | 4 ++-- rocketwatch/plugins/rpl/rpl.py | 4 ++-- rocketwatch/plugins/scam_warning/scam_warning.py | 4 ++-- rocketwatch/plugins/support_utils/support_utils.py | 8 ++++---- rocketwatch/plugins/tvl/tvl.py | 4 ++-- rocketwatch/requirements.txt | 1 - 22 files changed, 43 insertions(+), 47 deletions(-) diff --git a/rocketwatch/plugins/apr/apr.py b/rocketwatch/plugins/apr/apr.py index 1f76a202..a4e0217b 100644 --- a/rocketwatch/plugins/apr/apr.py +++ b/rocketwatch/plugins/apr/apr.py @@ -9,7 +9,7 @@ from discord.ext.commands import Context from discord.ext.commands import hybrid_command from matplotlib.dates import DateFormatter -from motor.motor_asyncio import AsyncIOMotorClient +from pymongo import AsyncMongoClient from rocketwatch import RocketWatch from utils import solidity @@ -43,7 +43,7 @@ def get_duration(d1, d2): class APR(commands.Cog): def __init__(self, bot: RocketWatch): self.bot = bot - self.db = AsyncIOMotorClient(cfg["mongodb.uri"]).rocketwatch + self.db = AsyncMongoClient(cfg["mongodb.uri"]).rocketwatch self.loop.start() async def cog_unload(self): diff --git a/rocketwatch/plugins/beacon_states/beacon_states.py b/rocketwatch/plugins/beacon_states/beacon_states.py index 1083555e..f3ca35ab 100644 --- a/rocketwatch/plugins/beacon_states/beacon_states.py +++ b/rocketwatch/plugins/beacon_states/beacon_states.py @@ -2,7 +2,7 @@ from discord.ext import commands from discord.ext.commands import hybrid_command, Context -from motor.motor_asyncio import AsyncIOMotorClient +from pymongo import AsyncMongoClient from rocketwatch import RocketWatch from utils.cfg import cfg @@ -18,7 +18,7 @@ class BeaconStates(commands.Cog): def __init__(self, bot: RocketWatch): self.bot = bot - self.db = AsyncIOMotorClient(cfg["mongodb.uri"]).get_database("rocketwatch") + self.db = AsyncMongoClient(cfg["mongodb.uri"]).get_database("rocketwatch") @hybrid_command() async def beacon_states(self, ctx: Context): diff --git a/rocketwatch/plugins/chat_summary/chat_summary.py b/rocketwatch/plugins/chat_summary/chat_summary.py index 80f4db24..0bd5b39b 100644 --- a/rocketwatch/plugins/chat_summary/chat_summary.py +++ b/rocketwatch/plugins/chat_summary/chat_summary.py @@ -11,7 +11,7 @@ from discord.ext import commands from discord.ext.commands import Context, is_owner from discord.ext.commands import hybrid_command -from motor.motor_asyncio import AsyncIOMotorClient +from pymongo import AsyncMongoClient from rocketwatch import RocketWatch from utils.cfg import cfg @@ -27,7 +27,7 @@ def __init__(self, bot: RocketWatch): self.client = anthropic.AsyncAnthropic(api_key=cfg["other.secrets.anthropic"]) # log all possible engines self.tokenizer = tiktoken.encoding_for_model("gpt-4-turbo") - self.db = AsyncIOMotorClient(cfg["mongodb.uri"]).rocketwatch + self.db = AsyncMongoClient(cfg["mongodb.uri"]).rocketwatch @classmethod def message_to_text(cls, message, index): diff --git a/rocketwatch/plugins/commissions/commissions.py b/rocketwatch/plugins/commissions/commissions.py index 1d12b828..fca6012a 100644 --- a/rocketwatch/plugins/commissions/commissions.py +++ b/rocketwatch/plugins/commissions/commissions.py @@ -8,7 +8,7 @@ from discord.ext.commands import Context from discord.ext.commands import hybrid_command from matplotlib import pyplot as plt -from motor.motor_asyncio import AsyncIOMotorClient +from pymongo import AsyncMongoClient from rocketwatch import RocketWatch from utils.cfg import cfg @@ -23,7 +23,7 @@ class Commissions(commands.Cog): def __init__(self, bot: RocketWatch): self.bot = bot # connect to local mongodb - self.db = AsyncIOMotorClient(cfg["mongodb.uri"]).get_database("rocketwatch") + self.db = AsyncMongoClient(cfg["mongodb.uri"]).get_database("rocketwatch") @hybrid_command() async def commission_history(self, ctx: Context): diff --git a/rocketwatch/plugins/constellation/constellation.py b/rocketwatch/plugins/constellation/constellation.py index 60d2fee3..76fbf733 100644 --- a/rocketwatch/plugins/constellation/constellation.py +++ b/rocketwatch/plugins/constellation/constellation.py @@ -4,7 +4,7 @@ from discord import Interaction from discord.app_commands import command from discord.ext.commands import Cog -from motor.motor_asyncio import AsyncIOMotorClient +from pymongo import AsyncMongoClient from rocketwatch import RocketWatch from utils import solidity @@ -24,7 +24,7 @@ class Constellation(Cog): def __init__(self, bot: RocketWatch): self.bot = bot - self.db = AsyncIOMotorClient(cfg["mongodb.uri"]).rocketwatch + self.db = AsyncMongoClient(cfg["mongodb.uri"]).rocketwatch async def _fetch_num_operators(self) -> int: whitelist_contract = rp.get_contract_by_name("Constellation.Whitelist") diff --git a/rocketwatch/plugins/debug/debug.py b/rocketwatch/plugins/debug/debug.py index 67bb0207..319bdd15 100644 --- a/rocketwatch/plugins/debug/debug.py +++ b/rocketwatch/plugins/debug/debug.py @@ -10,7 +10,7 @@ from discord import File, Interaction from discord.app_commands import Choice, command, guilds, describe from discord.ext.commands import Cog, is_owner -from motor.motor_asyncio import AsyncIOMotorClient +from pymongo import AsyncMongoClient from rocketwatch import RocketWatch from utils import solidity @@ -29,7 +29,7 @@ class Debug(Cog): def __init__(self, bot: RocketWatch): self.bot = bot - self.db = AsyncIOMotorClient(cfg["mongodb.uri"]).rocketwatch + self.db = AsyncMongoClient(cfg["mongodb.uri"]).rocketwatch self.contract_names = [] self.function_names = [] diff --git a/rocketwatch/plugins/deposit_pool/deposit_pool.py b/rocketwatch/plugins/deposit_pool/deposit_pool.py index 00099342..41fe7ec3 100644 --- a/rocketwatch/plugins/deposit_pool/deposit_pool.py +++ b/rocketwatch/plugins/deposit_pool/deposit_pool.py @@ -2,7 +2,7 @@ from discord.ext.commands import Context from discord.ext.commands import hybrid_command -from motor.motor_asyncio import AsyncIOMotorClient +from pymongo import AsyncMongoClient from rocketwatch import RocketWatch from plugins.queue.queue import Queue @@ -20,7 +20,7 @@ class DepositPool(StatusPlugin): def __init__(self, bot: RocketWatch): super().__init__(bot) - self.db = AsyncIOMotorClient(cfg["mongodb.uri"]).rocketwatch + self.db = AsyncMongoClient(cfg["mongodb.uri"]).rocketwatch @staticmethod def get_deposit_pool_stats() -> Embed: diff --git a/rocketwatch/plugins/detect_scam/detect_scam.py b/rocketwatch/plugins/detect_scam/detect_scam.py index f3069fe7..c68c2bf6 100644 --- a/rocketwatch/plugins/detect_scam/detect_scam.py +++ b/rocketwatch/plugins/detect_scam/detect_scam.py @@ -32,7 +32,7 @@ ) from discord.ext.commands import Cog from discord.app_commands import command, guilds, ContextMenu -from motor.motor_asyncio import AsyncIOMotorClient +from pymongo import AsyncMongoClient from rocketwatch import RocketWatch from utils.cfg import cfg @@ -114,7 +114,7 @@ async def mark_safe(self, interaction: Interaction, button: ui.Button) -> None: def __init__(self, bot: RocketWatch): self.bot = bot - self.db = AsyncIOMotorClient(cfg["mongodb.uri"]).get_database("rocketwatch") + self.db = AsyncMongoClient(cfg["mongodb.uri"]).get_database("rocketwatch") self._report_lock = asyncio.Lock() self._update_lock = asyncio.Lock() diff --git a/rocketwatch/plugins/event_core/event_core.py b/rocketwatch/plugins/event_core/event_core.py index d349fda6..134dabe1 100644 --- a/rocketwatch/plugins/event_core/event_core.py +++ b/rocketwatch/plugins/event_core/event_core.py @@ -13,7 +13,7 @@ from cronitor import Monitor from discord.ext import commands, tasks from eth_typing import BlockIdentifier, BlockNumber -from motor.motor_asyncio import AsyncIOMotorClient +from pymongo import AsyncMongoClient from web3.datastructures import MutableAttributeDict from rocketwatch import RocketWatch @@ -40,7 +40,7 @@ def __init__(self, bot: RocketWatch): self.bot = bot self.state = self.State.OK self.channels = cfg["discord.channels"] - self.db = AsyncIOMotorClient(cfg["mongodb.uri"]).rocketwatch + self.db = AsyncMongoClient(cfg["mongodb.uri"]).rocketwatch self.head_block: BlockIdentifier = cfg["events.genesis"] self.block_batch_size = cfg["events.block_batch_size"] self.monitor = Monitor("gather-new-events", api_key=cfg["other.secrets.cronitor"]) diff --git a/rocketwatch/plugins/karma/karma.py b/rocketwatch/plugins/karma/karma.py index c94c48f1..1510d2b9 100644 --- a/rocketwatch/plugins/karma/karma.py +++ b/rocketwatch/plugins/karma/karma.py @@ -3,8 +3,7 @@ from discord import app_commands, Interaction, User, AppCommandType from discord.app_commands.checks import cooldown from discord.ext.commands import Cog, GroupCog -from motor.motor_asyncio import AsyncIOMotorClient -from pymongo import IndexModel +from pymongo import AsyncMongoClient, IndexModel from rocketwatch import RocketWatch from utils.cfg import cfg @@ -18,7 +17,7 @@ class KarmaUtils(GroupCog, name="karma"): def __init__(self, bot: RocketWatch): self.bot = bot - self.db = AsyncIOMotorClient(cfg["mongodb.uri"]).get_database("rocketwatch") + self.db = AsyncMongoClient(cfg["mongodb.uri"]).get_database("rocketwatch") self.menus = [] for c in [5,10]: self.menus.append(app_commands.ContextMenu( diff --git a/rocketwatch/plugins/lottery/lottery.py b/rocketwatch/plugins/lottery/lottery.py index c204dce4..429ea865 100644 --- a/rocketwatch/plugins/lottery/lottery.py +++ b/rocketwatch/plugins/lottery/lottery.py @@ -2,8 +2,7 @@ from discord.ext import commands from discord.ext.commands import hybrid_command, Context -from motor.motor_asyncio import AsyncIOMotorClient -from pymongo import InsertOne +from pymongo import AsyncMongoClient, InsertOne from rocketwatch import RocketWatch from utils.cfg import cfg @@ -21,7 +20,7 @@ class LotteryBase: def __init__(self): # connect to local mongodb - self.client = AsyncIOMotorClient(cfg["mongodb.uri"]) + self.client = AsyncMongoClient(cfg["mongodb.uri"]) self.db = self.client.get_database("rocketwatch") self.did_check = False diff --git a/rocketwatch/plugins/metrics/metrics.py b/rocketwatch/plugins/metrics/metrics.py index 8d2fb97c..a724b4c6 100644 --- a/rocketwatch/plugins/metrics/metrics.py +++ b/rocketwatch/plugins/metrics/metrics.py @@ -3,7 +3,7 @@ from datetime import datetime, timedelta from io import BytesIO -from motor.motor_asyncio import AsyncIOMotorClient +from pymongo import AsyncMongoClient from bson import SON from cachetools import TTLCache from discord import File @@ -25,7 +25,7 @@ class Metrics(commands.Cog): def __init__(self, bot: RocketWatch): self.bot = bot self.notice_ttl_cache = TTLCache(math.inf, ttl=60 * 15) - self.db = AsyncIOMotorClient(cfg["mongodb.uri"]).rocketwatch + self.db = AsyncMongoClient(cfg["mongodb.uri"]).rocketwatch self.collection = self.db.command_metrics @hybrid_command() diff --git a/rocketwatch/plugins/minipools_upkeep_task/minipools_upkeep_task.py b/rocketwatch/plugins/minipools_upkeep_task/minipools_upkeep_task.py index 14a4733c..7b8c637b 100644 --- a/rocketwatch/plugins/minipools_upkeep_task/minipools_upkeep_task.py +++ b/rocketwatch/plugins/minipools_upkeep_task/minipools_upkeep_task.py @@ -2,7 +2,7 @@ import pymongo -from motor.motor_asyncio import AsyncIOMotorClient +from pymongo import AsyncMongoClient from multicall import Call from discord import Interaction @@ -30,7 +30,7 @@ def div_32(i: int): class MinipoolsUpkeepTask(commands.Cog): def __init__(self, bot: RocketWatch): self.bot = bot - self.db = AsyncIOMotorClient(cfg["mongodb.uri"]).rocketwatch + self.db = AsyncMongoClient(cfg["mongodb.uri"]).rocketwatch self.batch_size = 1000 self.loop.start() diff --git a/rocketwatch/plugins/node_task/node_task.py b/rocketwatch/plugins/node_task/node_task.py index 3f72cff5..c0d8964b 100644 --- a/rocketwatch/plugins/node_task/node_task.py +++ b/rocketwatch/plugins/node_task/node_task.py @@ -4,7 +4,7 @@ import pymongo from multicall import Call from cronitor import Monitor -from pymongo import AsyncMongoClient, UpdateOne, UpdateMany +from pymongo import AsyncMongoClient UpdateOne, UpdateMany from discord.ext import tasks, commands from discord.utils import as_chunks diff --git a/rocketwatch/plugins/pinned_messages/pinned_messages.py b/rocketwatch/plugins/pinned_messages/pinned_messages.py index de2ba702..664ed9d9 100644 --- a/rocketwatch/plugins/pinned_messages/pinned_messages.py +++ b/rocketwatch/plugins/pinned_messages/pinned_messages.py @@ -1,7 +1,7 @@ import logging from datetime import datetime, timedelta -from motor.motor_asyncio import AsyncIOMotorClient +from pymongo import AsyncMongoClient from discord.app_commands import guilds from discord.ext import commands, tasks from discord.ext.commands import hybrid_command, is_owner @@ -17,7 +17,7 @@ class PinnedMessages(commands.Cog): def __init__(self, bot: RocketWatch): self.bot = bot - self.db = AsyncIOMotorClient(cfg["mongodb.uri"]).rocketwatch + self.db = AsyncMongoClient(cfg["mongodb.uri"]).rocketwatch if not self.run_loop.is_running() and bot.is_ready(): self.run_loop.start() diff --git a/rocketwatch/plugins/proposals/proposals.py b/rocketwatch/plugins/proposals/proposals.py index f29faa87..d2ce8460 100644 --- a/rocketwatch/plugins/proposals/proposals.py +++ b/rocketwatch/plugins/proposals/proposals.py @@ -13,8 +13,7 @@ from discord.ext.commands import Context from discord.ext.commands import hybrid_command from matplotlib import pyplot as plt -from motor.motor_asyncio import AsyncIOMotorClient -from pymongo import ReplaceOne +from pymongo import AsyncMongoClient, ReplaceOne from wordcloud import WordCloud from rocketwatch import RocketWatch @@ -120,7 +119,7 @@ def __init__(self, bot: RocketWatch): self.rocketscan_proposals_url = "https://rocketscan.io/api/mainnet/beacon/blocks/all" self.last_chore_run = 0 # connect to local mongodb - self.db = AsyncIOMotorClient(cfg["mongodb.uri"]).get_database("rocketwatch") + self.db = AsyncMongoClient(cfg["mongodb.uri"]).get_database("rocketwatch") self.created_view = False async def create_minipool_proposal_view(self): diff --git a/rocketwatch/plugins/random/random.py b/rocketwatch/plugins/random/random.py index a56f55ac..f08aa783 100644 --- a/rocketwatch/plugins/random/random.py +++ b/rocketwatch/plugins/random/random.py @@ -10,7 +10,7 @@ from discord.ext import commands from discord.ext.commands import Context from discord.ext.commands import hybrid_command -from motor.motor_asyncio import AsyncIOMotorClient +from pymongo import AsyncMongoClient from rocketwatch import RocketWatch from utils import solidity @@ -29,7 +29,7 @@ class Random(commands.Cog): def __init__(self, bot: RocketWatch): self.bot = bot - self.db = AsyncIOMotorClient(cfg["mongodb.uri"]).get_database("rocketwatch") + self.db = AsyncMongoClient(cfg["mongodb.uri"]).get_database("rocketwatch") @hybrid_command() async def dice(self, ctx: Context, dice_string: str = "1d6"): diff --git a/rocketwatch/plugins/rpl/rpl.py b/rocketwatch/plugins/rpl/rpl.py index cb7cdb55..1abd8798 100644 --- a/rocketwatch/plugins/rpl/rpl.py +++ b/rocketwatch/plugins/rpl/rpl.py @@ -8,7 +8,7 @@ from discord.ext import commands from discord.ext.commands import Context from discord.ext.commands import hybrid_command -from motor.motor_asyncio import AsyncIOMotorClient +from pymongo import AsyncMongoClient from rocketwatch import RocketWatch from utils import solidity @@ -26,7 +26,7 @@ class RPL(commands.Cog): def __init__(self, bot: RocketWatch): self.bot = bot - self.db = AsyncIOMotorClient(cfg["mongodb.uri"]).rocketwatch + self.db = AsyncMongoClient(cfg["mongodb.uri"]).rocketwatch @hybrid_command() async def rpl_apr(self, ctx: Context): diff --git a/rocketwatch/plugins/scam_warning/scam_warning.py b/rocketwatch/plugins/scam_warning/scam_warning.py index 859449d0..91dd7eb7 100644 --- a/rocketwatch/plugins/scam_warning/scam_warning.py +++ b/rocketwatch/plugins/scam_warning/scam_warning.py @@ -3,7 +3,7 @@ from discord import errors from discord.ext import commands -from motor.motor_asyncio import AsyncIOMotorClient +from pymongo import AsyncMongoClient from rocketwatch import RocketWatch from utils.cfg import cfg @@ -17,7 +17,7 @@ class ScamWarning(commands.Cog): def __init__(self, bot: RocketWatch): self.bot = bot - self.db = AsyncIOMotorClient(cfg["mongodb.uri"]).get_database("rocketwatch") + self.db = AsyncMongoClient(cfg["mongodb.uri"]).get_database("rocketwatch") self.channel_ids = set(cfg["rocketpool.dm_warning.channels"]) self.inactivity_cooldown = timedelta(days=90) self.failure_cooldown = timedelta(days=1) diff --git a/rocketwatch/plugins/support_utils/support_utils.py b/rocketwatch/plugins/support_utils/support_utils.py index 13f3626d..d3ab01f5 100644 --- a/rocketwatch/plugins/support_utils/support_utils.py +++ b/rocketwatch/plugins/support_utils/support_utils.py @@ -6,7 +6,7 @@ from discord import app_commands, ui, Interaction, TextStyle, ButtonStyle, File, User from discord.app_commands import Group, Choice, choices from discord.ext.commands import Cog, GroupCog -from motor.motor_asyncio import AsyncIOMotorClient +from pymongo import AsyncMongoClient from rocketwatch import RocketWatch from utils.cfg import cfg @@ -34,7 +34,7 @@ async def generate_template_embed(db, template_name: str): # Define a simple View that gives us a counter button class AdminView(ui.View): - def __init__(self, db: AsyncIOMotorClient, template_name: str): + def __init__(self, db: AsyncMongoClient, template_name: str): super().__init__() self.db = db self.template_name = template_name @@ -171,7 +171,7 @@ async def _use(db, interaction: Interaction, name: str, mention: User | None): class SupportGlobal(Cog): def __init__(self, bot: RocketWatch): self.bot = bot - self.db = AsyncIOMotorClient(cfg["mongodb.uri"]).rocketwatch + self.db = AsyncMongoClient(cfg["mongodb.uri"]).rocketwatch @app_commands.command(name="use") async def _use(self, interaction: Interaction, name: str, mention: User | None): @@ -197,7 +197,7 @@ class SupportUtils(GroupCog, name="support"): def __init__(self, bot: RocketWatch): self.bot = bot - self.db = AsyncIOMotorClient(cfg["mongodb.uri"]).rocketwatch + self.db = AsyncMongoClient(cfg["mongodb.uri"]).rocketwatch @subgroup.command() async def add(self, interaction: Interaction, name: str): diff --git a/rocketwatch/plugins/tvl/tvl.py b/rocketwatch/plugins/tvl/tvl.py index e6837c39..8a3051ae 100644 --- a/rocketwatch/plugins/tvl/tvl.py +++ b/rocketwatch/plugins/tvl/tvl.py @@ -2,7 +2,7 @@ import humanize from colorama import Style -from motor.motor_asyncio import AsyncIOMotorClient +from pymongo import AsyncMongoClient from discord import Interaction from discord.ext.commands import Cog @@ -52,7 +52,7 @@ def split_rewards_logic(balance, node_share, commission, force_base=False): class TVL(Cog): def __init__(self, bot: RocketWatch): self.bot = bot - self.db = AsyncIOMotorClient(cfg["mongodb.uri"]).get_database("rocketwatch") + self.db = AsyncMongoClient(cfg["mongodb.uri"]).get_database("rocketwatch") @command() @describe(show_all="Also show entries with 0 value") diff --git a/rocketwatch/requirements.txt b/rocketwatch/requirements.txt index d3149858..198f8581 100644 --- a/rocketwatch/requirements.txt +++ b/rocketwatch/requirements.txt @@ -14,7 +14,6 @@ pytz==2022.7.1 matplotlib==3.7.1 scipy==1.11.3 inflect==7.3.1 -motor==3.1.1 wordcloud==1.9.4 web3-multicall==0.0.7 colorama==0.4.6 From c09a3b8f69628d364b0f84dcb4b54a7c6ccd370c Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Tue, 14 Oct 2025 21:48:40 +0000 Subject: [PATCH 049/279] fix aggregate usage with AsyncMongoClient --- rocketwatch/plugins/apr/apr.py | 8 +++---- rocketwatch/plugins/karma/karma.py | 8 +++---- rocketwatch/plugins/lottery/lottery.py | 9 ++++--- rocketwatch/plugins/metrics/metrics.py | 24 +++++++++---------- .../minipools_upkeep_task.py | 8 +++---- rocketwatch/plugins/proposals/proposals.py | 16 ++++++------- rocketwatch/plugins/random/random.py | 4 ++-- rocketwatch/plugins/rpl/rpl.py | 12 +++++----- .../plugins/support_utils/support_utils.py | 4 ++-- rocketwatch/plugins/tvl/tvl.py | 20 ++++++++-------- 10 files changed, 56 insertions(+), 57 deletions(-) diff --git a/rocketwatch/plugins/apr/apr.py b/rocketwatch/plugins/apr/apr.py index a4e0217b..240999af 100644 --- a/rocketwatch/plugins/apr/apr.py +++ b/rocketwatch/plugins/apr/apr.py @@ -99,7 +99,7 @@ async def reth_apr(self, ctx: Context): return await ctx.send(embed=e) # get average meta.NodeFee from db, weighted by meta.NodeOperatorShare - tmp = await self.db.minipools_new.aggregate([ + tmp = await (await self.db.minipools_new.aggregate([ { '$match': { 'beacon.status' : 'active_ongoing', @@ -170,7 +170,7 @@ async def reth_apr(self, ctx: Context): } } } - ]).to_list(length=1) + ])).to_list(length=1) node_fee = tmp[0]["average"] if len(tmp) > 0 else 20 peth_share = tmp[0]["used_pETH_share"] if len(tmp) > 0 else 0.75 @@ -270,7 +270,7 @@ async def node_apr(self, ctx: Context): return await ctx.send(embed=e) # get average meta.NodeFee from db, weighted by meta.NodeOperatorShare - tmp = await self.db.minipools_new.aggregate([ + tmp = await (await self.db.minipools_new.aggregate([ { '$match': { 'beacon.status' : 'active_ongoing', @@ -341,7 +341,7 @@ async def node_apr(self, ctx: Context): } } } - ]).to_list(length=1) + ])).to_list(length=1) node_fee = tmp[0]["average"] if len(tmp) > 0 else 0.2 peth_share = tmp[0]["used_pETH_share"] if len(tmp) > 0 else 0.75 diff --git a/rocketwatch/plugins/karma/karma.py b/rocketwatch/plugins/karma/karma.py index 1510d2b9..536fba9a 100644 --- a/rocketwatch/plugins/karma/karma.py +++ b/rocketwatch/plugins/karma/karma.py @@ -102,7 +102,7 @@ async def remove_user_points(self, interaction: Interaction, user: User): async def top(self, interaction: Interaction): await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) # find the top karma users - top = await self.db.karma.aggregate([ + top = await (await self.db.karma.aggregate([ {"$group": {"_id": "$user", "points": {"$sum": "$points"}}}, {"$sort": {"points": -1}}, {"$limit": 10}, @@ -118,7 +118,7 @@ async def top(self, interaction: Interaction): "as" : "top_issuer" }}, {"$project": {"_id": 1, "points": 1, "issuer": {"$arrayElemAt": ["$top_issuer._id", 0]}}} - ]).to_list(length=10) + ])).to_list(length=10) e = Embed(title="Top 10 Karma Users") des = "" for i, u in enumerate(top): @@ -146,10 +146,10 @@ async def user(self, interaction: Interaction, user: User = None): await interaction.edit_original_response(content=f"`{user.global_name or user.name}` has no points!") return # fetch total score for user - total = await self.db.karma.aggregate([ + total = await (await self.db.karma.aggregate([ {"$match": {"user": user.id}}, {"$group": {"_id": "$user", "points": {"$sum": "$points"}}} - ]).to_list(length=1) + ])).to_list(length=1) e = Embed(title=f"Points held by {user.global_name or user.name}") des = "" if total: diff --git a/rocketwatch/plugins/lottery/lottery.py b/rocketwatch/plugins/lottery/lottery.py index 429ea865..9a2aaf5f 100644 --- a/rocketwatch/plugins/lottery/lottery.py +++ b/rocketwatch/plugins/lottery/lottery.py @@ -61,8 +61,8 @@ async def load_sync_committee(self, period): InsertOne({"index": i, "validator": int(validator)}) for i, validator in enumerate(validators) ] - async with await self.client.start_session() as s: - async with s.start_transaction(): + async with self.client.start_session() as session: + async with await session.start_transaction(): await col.delete_many({}) await col.bulk_write(payload) @@ -100,9 +100,8 @@ async def get_validators_for_sync_committee_period(self, period): '$ne': None } } - }]).to_list(length=None) - - return data + }]) + return await data.to_list() async def generate_sync_committee_description(self, period): await self.load_sync_committee(period) diff --git a/rocketwatch/plugins/metrics/metrics.py b/rocketwatch/plugins/metrics/metrics.py index a724b4c6..bcfa115f 100644 --- a/rocketwatch/plugins/metrics/metrics.py +++ b/rocketwatch/plugins/metrics/metrics.py @@ -49,38 +49,38 @@ async def metrics(self, ctx: Context): desc += f"Total Commands Handled:\n\t{total_commands_handled}\n\n" # get the average command response time in the last 7 days - avg_response_time = await self.collection.aggregate([ + avg_response_time = await (await self.collection.aggregate([ {'$match': {'timestamp': {'$gte': start}}}, {'$group': {'_id': None, 'avg': {'$avg': '$took'}}} - ]).to_list(length=1) + ])).to_list(length=1) if avg_response_time[0]['avg'] is not None: desc += f"Average Command Response Time:\n\t{avg_response_time[0]['avg']:.03} seconds\n\n" # get completed rate in the last 7 days - completed_rate = await self.collection.aggregate([ + completed_rate = await (await self.collection.aggregate([ {'$match': {'timestamp': {'$gte': start}, 'status': 'completed'}}, {'$group': {'_id': None, 'count': {'$sum': 1}}} - ]).to_list(length=1) + ])).to_list(length=1) if completed_rate: percent = completed_rate[0]['count'] / (total_commands_handled - 1) desc += f"Command Success Rate:\n\t{percent:.03%}\n\n" # get the 5 most used commands of the last 7 days - most_used_commands = await self.collection.aggregate([ + most_used_commands = await (await self.collection.aggregate([ {'$match': {'timestamp': {'$gte': start}}}, {'$group': {'_id': '$command', 'count': {'$sum': 1}}}, {'$sort': {'count': -1}} - ]).to_list(length=5) + ])).to_list(length=5) desc += "Top 5 Commands based on usage:\n" for command in most_used_commands: desc += f" - {command['_id']}: {command['count']}\n" # get the top 5 channels of the last 7 days - top_channels = await self.collection.aggregate([ + top_channels = await (await self.collection.aggregate([ {'$match': {'timestamp': {'$gte': start}}}, {'$group': {'_id': '$channel', 'count': {'$sum': 1}}}, {'$sort': {'count': -1}} - ]).to_list(length=5) + ])).to_list(length=5) desc += "\nTop 5 Channels based on commands handled:\n" for channel in top_channels: desc += f" - {channel['_id']['name']}: {channel['count']}\n" @@ -95,7 +95,7 @@ async def metrics_chart(self, ctx): await ctx.defer(ephemeral=is_hidden(ctx)) # generate mathplotlib chart that shows monthly command usage and monthly event emission, in separate subplots - command_usage = await self.collection.aggregate([ + command_usage = await (await self.collection.aggregate([ { '$group': { '_id' : { @@ -108,8 +108,8 @@ async def metrics_chart(self, ctx): { '$sort': SON([('_id.year', 1), ('_id.month', 1)]) } - ]).to_list(None) - event_emission = await self.db.event_queue.aggregate([ + ])).to_list(None) + event_emission = await (await self.db.event_queue.aggregate([ { '$group': { '_id' : { @@ -122,7 +122,7 @@ async def metrics_chart(self, ctx): { '$sort': SON([('_id.year', 1), ('_id.month', 1)]) } - ]).to_list(None) + ])).to_list(None) # create a new figure fig, (ax1, ax2) = plt.subplots(2, 1, figsize=(10, 10)) diff --git a/rocketwatch/plugins/minipools_upkeep_task/minipools_upkeep_task.py b/rocketwatch/plugins/minipools_upkeep_task/minipools_upkeep_task.py index 7b8c637b..7ea6712b 100644 --- a/rocketwatch/plugins/minipools_upkeep_task/minipools_upkeep_task.py +++ b/rocketwatch/plugins/minipools_upkeep_task/minipools_upkeep_task.py @@ -96,17 +96,17 @@ async def delegate_stats(self, interaction: Interaction): await interaction.response.defer(ephemeral=is_hidden(interaction)) # get stats about delegates # we want to show the distribution of minipools that are using each delegate - distribution_stats = await self.db.minipools_new.aggregate([ + distribution_stats = await (await self.db.minipools_new.aggregate([ {"$match": {"effective_delegate": {"$exists": True}}}, {"$group": {"_id": "$effective_delegate", "count": {"$sum": 1}}}, {"$sort": {"count": -1}}, - ]).to_list(None) + ])).to_list() # and the percentage of minipools that are using the useLatestDelegate flag - use_latest_delegate_stats = await self.db.minipools_new .aggregate([ + use_latest_delegate_stats = await (await self.db.minipools_new.aggregate([ {"$match": {"use_latest_delegate": {"$exists": True}}}, {"$group": {"_id": "$use_latest_delegate", "count": {"$sum": 1}}}, {"$sort": {"count": -1}}, - ]).to_list(None) + ])).to_list() e = Embed() e.title = "Delegate Stats" desc = "**Effective Delegate Distribution of Minipools:**\n" diff --git a/rocketwatch/plugins/proposals/proposals.py b/rocketwatch/plugins/proposals/proposals.py index d2ce8460..11eeefe8 100644 --- a/rocketwatch/plugins/proposals/proposals.py +++ b/rocketwatch/plugins/proposals/proposals.py @@ -236,7 +236,7 @@ async def chore(self, ctx: Context): @timerun_async async def gather_attribute(self, attribute, remove_allnodes=False): - distribution = await self.db.minipool_proposals.aggregate([ + distribution = await (await self.db.minipool_proposals.aggregate([ { '$project': { 'attribute' : f'$latest_proposal.{attribute}', @@ -258,7 +258,7 @@ async def gather_attribute(self, attribute, remove_allnodes=False): 'count': 1 } } - ]).to_list(length=None) + ])).to_list() if remove_allnodes: d = {'remove_from_total': {'count': 0, 'validator_count': 0}} for entry in distribution: @@ -309,7 +309,7 @@ async def version_chart(self, ctx: Context): # get version used after max_slot - look_back # and have at least 10 occurrences start_slot = max_slot - look_back - recent_versions = await self.db.proposals.aggregate([ + recent_versions = await (await self.db.proposals.aggregate([ { '$match': { 'slot' : { @@ -328,7 +328,7 @@ async def version_chart(self, ctx: Context): '_id': -1 } } - ]).to_list(None) + ])).to_list() recent_versions = [v['_id'] for v in recent_versions] data = {} versions = [] @@ -550,11 +550,11 @@ async def comments(self, ctx: Context): color_func=lambda *args, **kwargs: "rgb(235, 142, 85)") # aggregate comments with their count - comments = await self.db.proposals.aggregate([ + comments = await (await self.db.proposals.aggregate([ {"$match": {"comment": {"$exists": 1}}}, {"$group": {"_id": "$comment", "count": {"$sum": 1}}}, {"$sort": {"count": -1, "slot": -1}} - ]).to_list(None) + ])).to_list() comment_words = {x['_id']: x["count"] for x in comments} # generate word cloud @@ -580,7 +580,7 @@ async def client_combo_ranking(self, ctx: Context, remove_allnodes=False, group_ await msg.edit(content="generating client combo ranking...") # aggregate [consensus, execution] pair counts - client_pairs = await self.db.minipool_proposals.aggregate([ + client_pairs = await (await self.db.minipool_proposals.aggregate([ { "$match": { "latest_proposal.consensus_client": {"$ne": "Unknown"}, @@ -603,7 +603,7 @@ async def client_combo_ranking(self, ctx: Context, remove_allnodes=False, group_ "count": -1 } } - ]).to_list(None) + ])).to_list() e = Embed(title=f"Client Combo Ranking{' without Allnodes' if remove_allnodes else ''}") diff --git a/rocketwatch/plugins/random/random.py b/rocketwatch/plugins/random/random.py index f08aa783..48505f1f 100644 --- a/rocketwatch/plugins/random/random.py +++ b/rocketwatch/plugins/random/random.py @@ -181,7 +181,7 @@ async def smoothie(self, ctx: Context): e = Embed(title="Smoothing Pool") smoothie_eth = solidity.to_float(w3.eth.get_balance(rp.get_address_by_name("rocketSmoothingPool"))) - data = await self.db.minipools_new.aggregate([ + data = await (await self.db.minipools_new.aggregate([ { '$match': { 'beacon.status': { @@ -258,7 +258,7 @@ async def smoothie(self, ctx: Context): } } } - ]).to_list(length=None) + ])).to_list() if not data: await ctx.send("no minipools found", ephemeral=True) return diff --git a/rocketwatch/plugins/rpl/rpl.py b/rocketwatch/plugins/rpl/rpl.py index 1abd8798..18afed4d 100644 --- a/rocketwatch/plugins/rpl/rpl.py +++ b/rocketwatch/plugins/rpl/rpl.py @@ -37,7 +37,7 @@ async def rpl_apr(self, ctx: Context): e = Embed() reward_duration = rp.call("rocketRewardsPool.getClaimIntervalTime") - total_rpl_staked = await self.db.node_operators_new.aggregate([ + total_rpl_staked = await (await self.db.node_operators_new.aggregate([ { '$group': { '_id' : 'out', @@ -46,7 +46,7 @@ async def rpl_apr(self, ctx: Context): } } } - ]).next() + ])).next() total_rpl_staked = total_rpl_staked["total_effective_rpl_stake"] # track down the rewards for node operators from the last reward period @@ -112,7 +112,7 @@ async def effective_rpl_staked(self, ctx: Context): total_rpl_staked = solidity.to_float(rp.call("rocketNodeStaking.getTotalRPLStake")) e.add_field(name="Total RPL Staked:", value=f"{humanize.intcomma(total_rpl_staked, 2)} RPL", inline=False) # get effective RPL staked - effective_rpl_stake = await self.db.node_operators_new.aggregate([ + effective_rpl_stake = await (await self.db.node_operators_new.aggregate([ { '$group': { '_id' : 'out', @@ -121,7 +121,7 @@ async def effective_rpl_staked(self, ctx: Context): } } } - ]).next() + ])).next() effective_rpl_stake = effective_rpl_stake["total_effective_rpl_stake"] # calculate percentage staked percentage_staked = effective_rpl_stake / total_rpl_staked e.add_field(name="Effective RPL Staked:", value=f"{humanize.intcomma(effective_rpl_stake, 2)} RPL " @@ -143,7 +143,7 @@ async def withdrawable_rpl(self, e = Embed() img = BytesIO() - data = await self.db.node_operators_new.aggregate([ + data = await (await self.db.node_operators_new.aggregate([ { '$match': { 'staking_minipool_count': { @@ -164,7 +164,7 @@ async def withdrawable_rpl(self, 'rpl_stake': 1 } } - ]).to_list(length=None) + ])).to_list() rpl_eth_price = solidity.to_float(rp.call("rocketNetworkPrices.getRPLPrice")) # calculate withdrawable RPL at various RPL ETH prices diff --git a/rocketwatch/plugins/support_utils/support_utils.py b/rocketwatch/plugins/support_utils/support_utils.py index d3ab01f5..3fd432f9 100644 --- a/rocketwatch/plugins/support_utils/support_utils.py +++ b/rocketwatch/plugins/support_utils/support_utils.py @@ -288,7 +288,7 @@ async def remove(self, interaction: Interaction, name: str): async def list(self, interaction: Interaction, order_by: Choice[str] = "_id"): await interaction.response.defer(ephemeral=True) # get all templates and their last edited date using the support_bot_dumps collection - templates = await self.db.support_bot.aggregate([ + templates = await (await self.db.support_bot.aggregate([ { "$lookup": { "from": "support_bot_dumps", @@ -303,7 +303,7 @@ async def list(self, interaction: Interaction, order_by: Choice[str] = "_id"): "last_edited_date": {"$arrayElemAt": ["$dump.ts", 0]} } } - ]).to_list(None) + ])).to_list() # sort the templates by the specified order if isinstance(order_by, Choice): order_by = order_by.value diff --git a/rocketwatch/plugins/tvl/tvl.py b/rocketwatch/plugins/tvl/tvl.py index 8a3051ae..faf6b1b3 100644 --- a/rocketwatch/plugins/tvl/tvl.py +++ b/rocketwatch/plugins/tvl/tvl.py @@ -125,7 +125,7 @@ async def tvl(self, interaction: Interaction, show_all: bool = False): # Minipools that are flagged as initialised have the following applied to them: # - They have 1 ETH staked on the beacon chain. # - They have not yet received 31 ETH from the Deposit Pool. - tmp = await self.db.minipools_new.aggregate([ + tmp = await (await self.db.minipools_new.aggregate([ { '$match': { 'status': 'initialised', @@ -139,7 +139,7 @@ async def tvl(self, interaction: Interaction, show_all: bool = False): } } } - ]).to_list(1) + ])).to_list(1) if tmp: data["Total ETH Locked"]["Minipools Stake"]["Queued Minipools"]["_val"] = tmp[0]["beacon_balance"] @@ -148,7 +148,7 @@ async def tvl(self, interaction: Interaction, show_all: bool = False): # - They have deposited 1 ETH to the Beacon Chain. # - They have 31 ETH from the Deposit Pool in their contract waiting to be staked as well. # - They are currently in the scrubbing process (should be 12 hours) or have not yet initiated the second phase. - tmp = await self.db.minipools_new.aggregate([ + tmp = await (await self.db.minipools_new.aggregate([ { '$match': { 'status': 'prelaunch', @@ -165,7 +165,7 @@ async def tvl(self, interaction: Interaction, show_all: bool = False): } } } - ]).to_list(1) + ])).to_list(1) if tmp: data["Total ETH Locked"]["Minipools Stake"]["Pending Minipools"]["_val"] = tmp[0]["beacon_balance"] + tmp[0][ "execution_balance"] @@ -177,7 +177,7 @@ async def tvl(self, interaction: Interaction, show_all: bool = False): # - They have 1 ETH locked on the Beacon Chain, not earning any rewards. # - The 31 ETH that was waiting in their address was moved back to the Deposit Pool (This can cause the Deposit Pool # to grow beyond its Cap, check the bellow comment for information about that). - tmp = await self.db.minipools_new.aggregate([ + tmp = await (await self.db.minipools_new.aggregate([ { '$match': { 'status': 'dissolved', @@ -194,7 +194,7 @@ async def tvl(self, interaction: Interaction, show_all: bool = False): } } } - ]).to_list(1) + ])).to_list(1) if len(tmp) > 0: tmp = tmp[0] data["Total ETH Locked"]["Minipools Stake"]["Dissolved Minipools"]["Locked on Beacon Chain"]["_val"] = tmp[ @@ -271,7 +271,7 @@ async def tvl(self, interaction: Interaction, show_all: bool = False): # Smoothing Pool Balance: This is ETH from Proposals by minipools that have joined the Smoothing Pool. smoothie_balance = solidity.to_float(w3.eth.getBalance(rp.get_address_by_name("rocketSmoothingPool"))) - tmp = await self.db.node_operators_new.aggregate([ + tmp = await (await self.db.node_operators_new.aggregate([ { '$match': { 'smoothing_pool_registration_state': True, @@ -324,7 +324,7 @@ async def tvl(self, interaction: Interaction, show_all: bool = False): } } } - ]).to_list(None) + ])).to_list() if len(tmp) > 0: data["Total ETH Locked"]["Undistributed Balances"]["Smoothing Pool Balance"]["Node Share"][ "_val"] = smoothie_balance * tmp[0]["avg_node_share"] @@ -364,7 +364,7 @@ async def tvl(self, interaction: Interaction, show_all: bool = False): rp.call("rocketVault.balanceOfToken", "rocketAuctionManager", rpl_address)) # create _value string for each branch. the _value is the sum of all _val or _val values in the children - tmp = await self.db.node_operators_new.aggregate([ + tmp = await (await self.db.node_operators_new.aggregate([ { '$match': { 'fee_distributor_eth_balance': { @@ -416,7 +416,7 @@ async def tvl(self, interaction: Interaction, show_all: bool = False): } } } - ]).to_list(None) + ])).to_list() if len(tmp) > 0: data["Total ETH Locked"]["Undistributed Balances"]["Node Distributor Contracts"]["Node Share"]["_val"] = tmp[0][ "node_share"] From dec70360c7fffa7913cf4aa3a906e847a730b955 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 24 Oct 2025 22:45:15 +0000 Subject: [PATCH 050/279] fix async db call --- rocketwatch/plugins/event_core/event_core.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rocketwatch/plugins/event_core/event_core.py b/rocketwatch/plugins/event_core/event_core.py index 134dabe1..1e873be4 100644 --- a/rocketwatch/plugins/event_core/event_core.py +++ b/rocketwatch/plugins/event_core/event_core.py @@ -172,7 +172,7 @@ async def gather_new_events(self) -> None: await self.db.event_queue.insert_many(events) self.head_block = target_block - self.db.last_checked_block.replace_one( + await self.db.last_checked_block.replace_one( {"_id": "events"}, {"_id": "events", "block": to_block}, upsert=True From 55b1f5f2c18a9dabcb315b77fd94a70af91a5512 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 24 Oct 2025 22:45:26 +0000 Subject: [PATCH 051/279] add withdrawn beacon state --- rocketwatch/plugins/beacon_states/beacon_states.py | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/rocketwatch/plugins/beacon_states/beacon_states.py b/rocketwatch/plugins/beacon_states/beacon_states.py index f3ca35ab..e2323c81 100644 --- a/rocketwatch/plugins/beacon_states/beacon_states.py +++ b/rocketwatch/plugins/beacon_states/beacon_states.py @@ -31,7 +31,8 @@ async def beacon_states(self, ctx: Context): "pending": {}, "active" : {}, "exiting": {}, - "exited" : {} + "exited" : {}, + "withdrawn": {} } exiting_valis = [] for minipool in res: @@ -48,11 +49,17 @@ async def beacon_states(self, ctx: Context): case "active_slashed": data["exiting"]["slashed"] = data["exiting"].get("slashed", 0) + 1 exiting_valis.append(minipool) - case "exited_unslashed" | "exited_slashed" | "withdrawal_possible" | "withdrawal_done": + case "exited_unslashed" | "exited_slashed" | "withdrawal_possible": if minipool["beacon"]["slashed"]: data["exited"]["slashed"] = data["exited"].get("slashed", 0) + 1 else: data["exited"]["voluntarily"] = data["exited"].get("voluntarily", 0) + 1 + exiting_valis.append(minipool) + case "withdrawal_done": + if minipool["beacon"]["slashed"]: + data["withdrawn"]["slashed"] = data["withdrawn"].get("slashed", 0) + 1 + else: + data["withdrawn"]["unslashed"] = data["withdrawn"].get("unslashed", 0) + 1 case _: logging.warning(f"Unknown status {minipool['status']}") @@ -78,7 +85,7 @@ async def beacon_states(self, ctx: Context): node_operators.sort(key=lambda x: x[1], reverse=True) description += "" # use el_explorer_url - description += ", ".join([f"{el_explorer_url(w3.toChecksumAddress(v))} ({c})" for v, c in node_operators[:16]]) + description += ", ".join([f"{el_explorer_url(w3.to_checksum_address(v))} ({c})" for v, c in node_operators[:16]]) # append ",…" if more than 16 if len(node_operators) > 16: description += ",…" From cc6b485a7bc2a9e6621ed043a5a4c8083c9d2385 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 24 Oct 2025 22:45:43 +0000 Subject: [PATCH 052/279] fix incorrect event detection --- rocketwatch/plugins/events/events.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/rocketwatch/plugins/events/events.py b/rocketwatch/plugins/events/events.py index dd09ccf3..86985736 100644 --- a/rocketwatch/plugins/events/events.py +++ b/rocketwatch/plugins/events/events.py @@ -386,7 +386,7 @@ def handle_global_event(self, event_name: str, event: aDict) -> Optional[Embed]: if not any([ rp.call("rocketMinipoolManager.getMinipoolExists", receipt.to), rp.call("rocketMinipoolManager.getMinipoolExists", event.address), - rp.get_name_by_address(receipt.to), + rp.get_name_by_address(receipt.to) not in [None, "multicall3"], rp.get_name_by_address(event.address) ]): # some random contract we don't care about @@ -429,8 +429,7 @@ def handle_global_event(self, event_name: str, event: aDict) -> Optional[Embed]: return self.handle_event(event_name, event) - @staticmethod - def handle_event(event_name: str, event: aDict) -> Optional[Embed]: + def handle_event(self, event_name: str, event: aDict) -> Optional[Embed]: args = aDict(event.args) if "negative_rETH_ratio_update_event" in event_name: @@ -502,6 +501,8 @@ def share_repr(percentage: float) -> str: args.assets = solidity.to_float(args.assets) args.shares = solidity.to_float(args.shares) elif event_name.startswith("rocksolid_withdraw"): + assets = rp.call("RockSolidVault.convertToAssets", args.shares, args.requestId, block=event.blockNumber) + args.assets = solidity.to_float(assets) args.shares = solidity.to_float(args.shares) elif event_name == "cs_max_validator_change_event": args.oldLimit, args.newLimit = args.oldValue, args.newValue From b0d03775fa4fef5c42587721a55af3fa6a3a1b1c Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 24 Oct 2025 22:45:52 +0000 Subject: [PATCH 053/279] fix import --- rocketwatch/plugins/node_task/node_task.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rocketwatch/plugins/node_task/node_task.py b/rocketwatch/plugins/node_task/node_task.py index c0d8964b..3f72cff5 100644 --- a/rocketwatch/plugins/node_task/node_task.py +++ b/rocketwatch/plugins/node_task/node_task.py @@ -4,7 +4,7 @@ import pymongo from multicall import Call from cronitor import Monitor -from pymongo import AsyncMongoClient UpdateOne, UpdateMany +from pymongo import AsyncMongoClient, UpdateOne, UpdateMany from discord.ext import tasks, commands from discord.utils import as_chunks From 9e21ac3c77d5c05f23f8c0db629c6fa24f9e09a3 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 24 Oct 2025 22:46:05 +0000 Subject: [PATCH 054/279] remove advanced tx link --- rocketwatch/utils/readable.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/rocketwatch/utils/readable.py b/rocketwatch/utils/readable.py index 8271ee91..4348fa47 100644 --- a/rocketwatch/utils/readable.py +++ b/rocketwatch/utils/readable.py @@ -62,10 +62,7 @@ def cl_explorer_url(target, name=None): def advanced_tnx_url(tx_hash): - chain = cfg["rocketpool.chain"] - if chain not in ["mainnet"]: - return "" - return f"[[A]](https://ethtx.info/{chain}/{tx_hash})" + return "" def render_tree_legacy(data: dict, name: str) -> str: From 6fd0b3de9f9c3ce2d7fa8e53633eb779cbde6fa6 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 24 Oct 2025 22:46:21 +0000 Subject: [PATCH 055/279] add more trusted domains --- .gitignore | 5 ++++- rocketwatch/plugins/detect_scam/detect_scam.py | 5 ++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/.gitignore b/.gitignore index 510dae18..db8e76ce 100644 --- a/.gitignore +++ b/.gitignore @@ -119,4 +119,7 @@ dmypy.json # state state.db */main.cfg -mongodb/ \ No newline at end of file +mongodb/ + +# helper scripts +*.sh \ No newline at end of file diff --git a/rocketwatch/plugins/detect_scam/detect_scam.py b/rocketwatch/plugins/detect_scam/detect_scam.py index c68c2bf6..46de1a29 100644 --- a/rocketwatch/plugins/detect_scam/detect_scam.py +++ b/rocketwatch/plugins/detect_scam/detect_scam.py @@ -319,7 +319,10 @@ def _discord_invite(self, message: Message) -> Optional[str]: txt = self._get_message_content(message) if match := self.invite_pattern.search(txt): link = match.group(0) - trusted_domains = ["youtu.be", "youtube.com", "tenor.com", "giphy.com", "imgur.com"] + trusted_domains = [ + "youtu.be", "youtube.com", "tenor.com", "giphy.com", + "imgur.com", "bluesky.app" + ] if not any(domain in link for domain in trusted_domains): return "Invite to external server" return None From 480c9cf186fa0385799fc3b8c6077da814877f77 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 24 Oct 2025 22:46:32 +0000 Subject: [PATCH 056/279] add RockSolid milestone --- rocketwatch/main.cfg.sample | 1 + rocketwatch/plugins/milestones/milestones.json | 10 ++++++++++ rocketwatch/strings/embeds.en.json | 14 +++++++++----- rocketwatch/utils/embeds.py | 7 +++---- 4 files changed, 23 insertions(+), 9 deletions(-) diff --git a/rocketwatch/main.cfg.sample b/rocketwatch/main.cfg.sample index add70996..f02688e1 100644 --- a/rocketwatch/main.cfg.sample +++ b/rocketwatch/main.cfg.sample @@ -62,6 +62,7 @@ rocketpool: { BalancerVault: "0xBA12222222228d8Ba445958a75a0704d566BF2C8" UniV3_USDC_ETH: "0x88e6A0c2dDD26FEEb64F039a2c41296FcB3f5640" UniV3_rETH_ETH: "0x553e9C493678d8606d6a5ba284643dB2110Df823" + RockSolidVault: "0x936faCdf10c8c36294e7b9d28345255539d81bc7" } } modules: { diff --git a/rocketwatch/plugins/milestones/milestones.json b/rocketwatch/plugins/milestones/milestones.json index 91a648d7..d9eb4206 100644 --- a/rocketwatch/plugins/milestones/milestones.json +++ b/rocketwatch/plugins/milestones/milestones.json @@ -56,5 +56,15 @@ "formatter": "", "min": 50, "step_size": 100 + }, + { + "id": "milestone_rocksolid_tvl", + "function": "call", + "args": [ + "RockSolidVault.totalAssets" + ], + "formatter": "to_float", + "min": 1000, + "step_size": 1000 } ] diff --git a/rocketwatch/strings/embeds.en.json b/rocketwatch/strings/embeds.en.json index 4185b283..15e80726 100644 --- a/rocketwatch/strings/embeds.en.json +++ b/rocketwatch/strings/embeds.en.json @@ -198,12 +198,16 @@ }, "milestone_registered_nodes": { "title": ":tada: Milestone Reached", - "description": "%{result_value} Nodes have been registered!" + "description": "%{result_value} nodes have been registered!" }, "milestone_rpl_swapped": { "title": ":tada: Milestone Reached", "description": "%{result_value}% of all RPL has been exchanged for the new version!" }, + "milestone_rocksolid_tvl": { + "title": ":tada: Milestone Reached", + "description": "%{result_value} rETH deposited into the RockSolid vault!" + }, "bootstrap_odao_member": { "title": ":satellite_orbital: oDAO Bootstrap Mode: Member Added", "description": "%{nodeAddress} added as a new oDAO member!" @@ -580,12 +584,12 @@ }, "rocksolid_deposit_event": { "title": "<:rocksolid:1425091714267480158> RockSolid rETH Deposit", - "description": "**%{assets} rETH** deposited for **%{shares} rock.rETH**!", - "description_small": "<:rocksolid:1425091714267480158> %{sender} deposited **%{assets} rETH** for **%{shares} rock.rETH**!" + "description": "**%{assets} rETH** deposited into the RockSolid vault!", + "description_small": "<:rocksolid:1425091714267480158> %{sender} deposited **%{assets} rETH** into the RockSolid vault!" }, "rocksolid_withdrawal_event": { "title": "<:rocksolid:1425091714267480158> RockSolid rETH Withdrawal", - "description": "New redemption request for **%{shares} rock.rETH**!", - "description_small": "<:rocksolid:1425091714267480158> %{sender} requested redemption of **%{shares} rock.rETH**!" + "description": "New withdrawal request for **%{assets} rETH** from the RockSolid vault!", + "description_small": "<:rocksolid:1425091714267480158> %{sender} withdrawing **%{assets} rETH** from the RockSolid vault!" } } diff --git a/rocketwatch/utils/embeds.py b/rocketwatch/utils/embeds.py index 9bf850dd..ee94a823 100644 --- a/rocketwatch/utils/embeds.py +++ b/rocketwatch/utils/embeds.py @@ -278,9 +278,9 @@ def assemble(args) -> Embed: case "cs_deposit_rpl_event" | "cs_withdraw_rpl_event": use_large = (args["assets"] >= 16 / solidity.to_float(rp.call("rocketNetworkPrices.getRPLPrice"))) case "rocksolid_deposit_event": - use_large = args["assets"] >= 100 + use_large = args["assets"] >= 50 case "rocksolid_withdrawal_event": - use_large = args["shares"] >= 100 + use_large = args["shares"] >= 50 case _: use_large = (amount >= 100) @@ -440,8 +440,7 @@ def assemble(args) -> Embed: # show transaction hash if possible if "transactionHash" in args: content = f"{args.transactionHash}{advanced_tnx_url(args.transactionHash_raw)}" - e.add_field(name="Transaction Hash", - value=content) + e.add_field(name="Transaction Hash", value=content) # show sender address if senders := [value for key, value in args.items() if key.lower() in ["sender", "from"]]: From e081553f35892543a8d5e0f1d445b756c22b1c03 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 25 Oct 2025 09:20:30 +0000 Subject: [PATCH 057/279] /rocksolid command --- rocketwatch/plugins/apr/apr.py | 4 +- rocketwatch/plugins/event_core/event_core.py | 2 +- .../pinned_messages/pinned_messages.py | 2 +- rocketwatch/plugins/rocksolid/rocksolid.py | 143 ++++++++++++++++++ rocketwatch/rocketwatch.py | 2 +- rocketwatch/strings/embeds.en.json | 2 +- rocketwatch/utils/rocketpool.py | 5 +- 7 files changed, 153 insertions(+), 7 deletions(-) create mode 100644 rocketwatch/plugins/rocksolid/rocksolid.py diff --git a/rocketwatch/plugins/apr/apr.py b/rocketwatch/plugins/apr/apr.py index 240999af..68c9e9b1 100644 --- a/rocketwatch/plugins/apr/apr.py +++ b/rocketwatch/plugins/apr/apr.py @@ -245,11 +245,11 @@ async def reth_apr(self, ctx: Context): e.set_image(url="attachment://reth_apr.png") - e.add_field(name="Current Average Effective Commission:", + e.add_field(name="Current Average Effective Commission", value=f"{node_fee:.2%} (Observed pETH Share: {peth_share:.2%})", inline=False) - e.add_field(name="Effectiveness:", + e.add_field(name="Effectiveness", value=f"{y_effectiveness[-1]:.2%}", inline=False) await ctx.send(embed=e, file=File(img, "reth_apr.png")) diff --git a/rocketwatch/plugins/event_core/event_core.py b/rocketwatch/plugins/event_core/event_core.py index 1e873be4..ffd7822b 100644 --- a/rocketwatch/plugins/event_core/event_core.py +++ b/rocketwatch/plugins/event_core/event_core.py @@ -233,7 +233,7 @@ def try_load(_entry: dict, _key: str) -> Optional[Any]: async def update_status_messages(self) -> None: configs = cfg.get("events.status_message", {}) - for state_message in (await self.db.state_messages.find().to_list(None)): + for state_message in (await self.db.state_messages.find().to_list()): if state_message["_id"] not in configs: log.debug(f"No config for state message ID {state_message['_id']}, removing message") await self._replace_or_add_status("", None, state_message) diff --git a/rocketwatch/plugins/pinned_messages/pinned_messages.py b/rocketwatch/plugins/pinned_messages/pinned_messages.py index 664ed9d9..d413422b 100644 --- a/rocketwatch/plugins/pinned_messages/pinned_messages.py +++ b/rocketwatch/plugins/pinned_messages/pinned_messages.py @@ -31,7 +31,7 @@ async def on_ready(self): @tasks.loop(seconds=60.0) async def run_loop(self): # get all pinned messages in db - messages = await self.db.pinned_messages.find().to_list(length=None) + messages = await self.db.pinned_messages.find().to_list() for message in messages: # if it's older than 6 hours and not disabled, mark as disabled if message["created_at"] + timedelta(hours=6) < datetime.utcnow() and not message["disabled"]: diff --git a/rocketwatch/plugins/rocksolid/rocksolid.py b/rocketwatch/plugins/rocksolid/rocksolid.py new file mode 100644 index 00000000..c56e5ccb --- /dev/null +++ b/rocketwatch/plugins/rocksolid/rocksolid.py @@ -0,0 +1,143 @@ +import logging +from datetime import datetime, timedelta +from io import BytesIO + +import matplotlib.pyplot as plt +from matplotlib.dates import DateFormatter +from discord import File +from discord import Interaction +from discord.app_commands import command +from discord.ext.commands import Cog +from pymongo import AsyncMongoClient, InsertOne + +from rocketwatch import RocketWatch +from utils import solidity +from utils.cfg import cfg +from utils.shared_w3 import w3 +from utils.rocketpool import rp +from utils.visibility import is_hidden_weak +from utils.block_time import block_to_ts, ts_to_block +from utils.embeds import Embed, el_explorer_url +from utils.event_logs import get_logs + + +cog_id = "rocksolid" +log = logging.getLogger(cog_id) +log.setLevel(cfg["log_level"]) + + +class RockSolid(Cog): + def __init__(self, bot: RocketWatch): + self.bot = bot + self.db = AsyncMongoClient(cfg["mongodb.uri"]).rocketwatch + self.deployment_block = 23237366 + + async def _fetch_asset_updates(self) -> list[tuple[int, float]]: + vault_contract = rp.get_contract_by_name("RockSolidVault") + + if db_entry := (await self.db.last_checked_block.find_one({"_id": cog_id})): + last_checked_block = db_entry["block"] + else: + last_checked_block = 23237366 # contract deployment + + b_from = last_checked_block + 1 + b_to = w3.eth.get_block_number() + + updates = [] + + async for doc in self.db.rocksolid.find({}): + updates.append((doc["time"], doc["assets"])) + + payload = [] + for event_log in get_logs(vault_contract.events.TotalAssetsUpdated, b_from, b_to): + ts = block_to_ts(event_log.blockNumber) + assets = solidity.to_float(event_log.args.totalAssets) + updates.append((ts, assets)) + payload.append(InsertOne({"time": ts, "assets": assets})) + + if payload: + await self.db.rocksolid.bulk_write(payload) + + await self.db.last_checked_block.replace_one( + {"_id": cog_id}, + {"_id": cog_id, "block": b_to}, + upsert=True + ) + + return updates + + @command() + async def rocksolid(self, interaction: Interaction): + """ + Summary of RockSolid rETH vault stats. + """ + await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) + + current_block = w3.eth.get_block_number() + now = block_to_ts(current_block) + apy_7d_block = ts_to_block(now - timedelta(days=7).total_seconds()) + apy_30d_block = ts_to_block(now - timedelta(days=30).total_seconds()) + apy_90d_block = ts_to_block(now - timedelta(days=90).total_seconds()) + + def get_eth_rate(block_number: int) -> int: + block_number = max(block_number, self.deployment_block) + reth_value = rp.call("RockSolidVault.convertToAssets", 10**18, block=block_number) + return rp.call("rocketTokenRETH.getEthValue", reth_value, block=block_number) + + current_eth_rate = get_eth_rate(current_block) + apy_7d = (current_eth_rate / get_eth_rate(apy_7d_block) - 1) * (365 / 7) * 100 + apy_30d = (current_eth_rate / get_eth_rate(apy_30d_block) - 1) * (365 / 30) * 100 + apy_90d = (current_eth_rate / get_eth_rate(apy_90d_block) - 1) * (365 / 90) * 100 + + tvl_reth = solidity.to_float(rp.call("RockSolidVault.totalAssets")) + tvl_rock_reth = solidity.to_float(rp.call("RockSolidVault.totalSupply")) + + asset_updates: list[tuple[int, float]] = await self._fetch_asset_updates() + current_date = datetime.fromtimestamp(asset_updates[0][0]).date() - timedelta(days=1) + current_assets = 0.0 + + x, y = [], [] + for ts, assets in asset_updates: + update_date = datetime.fromtimestamp(ts).date() + while current_date < update_date: + x.append(current_date) + y.append(current_assets) + current_date += timedelta(days=1) + + current_date = update_date + current_assets = assets + + x.append(current_date) + y.append(current_assets) + + fig, ax = plt.subplots(figsize=(6, 2)) + ax.grid() + + ax.plot(x, y, color="#50b1f7") + ax.xaxis.set_major_formatter(DateFormatter("%b %d")) + ax.set_ylabel("rETH deposited") + ax.set_xlim((x[0], x[-1])) + ax.set_ylim((y[0], y[-1] * 1.01)) + + img = BytesIO() + fig.tight_layout() + fig.savefig(img, format='png') + img.seek(0) + plt.clf() + + ca_reth = rp.get_address_by_name("rocketTokenRETH") + ca_rock_reth = rp.get_address_by_name("RockSolidVault") + + embed = Embed(title="<:rocksolid:1425091714267480158> RockSolid rETH Vault") + embed.add_field(name="7d APY", value=f"{apy_7d:.2f}%" if (apy_7d_block >= self.deployment_block) else "-") + embed.add_field(name="30d APY", value=f"{apy_30d:.2f}%" if (apy_30d_block >= self.deployment_block) else "-") + embed.add_field(name="90d APY", value=f"{apy_90d:.2f}%" if (apy_90d_block >= self.deployment_block) else "-") + embed.add_field(name="TVL", value=f"`{tvl_reth:,.2f}` {el_explorer_url(ca_reth, name=' rETH')}") + embed.add_field(name="Supply", value=f"`{tvl_rock_reth:,.2f}` {el_explorer_url(ca_rock_reth, name=' rock.rETH')}") + embed.set_image(url="attachment://rocksolid_tvl.png") + + await interaction.followup.send(embed=embed, file=File(img, "rocksolid_tvl.png")) + + +async def setup(bot): + await bot.add_cog(RockSolid(bot)) diff --git a/rocketwatch/rocketwatch.py b/rocketwatch/rocketwatch.py index abd7f857..08740787 100644 --- a/rocketwatch/rocketwatch.py +++ b/rocketwatch/rocketwatch.py @@ -135,7 +135,7 @@ async def report_error(self, exception: Exception, ctx: Optional[Context] = None err_description += ( f"\n```" f"{ctx.command.name = }\n" - f"ctx.command.params = {getattr(ctx.command, 'params')}\n" + f"ctx.command.params = {getattr(ctx.command, 'params', '')}\n" f"{ctx.channel = }\n" f"{ctx.author = }" f"```" diff --git a/rocketwatch/strings/embeds.en.json b/rocketwatch/strings/embeds.en.json index 15e80726..8e8514de 100644 --- a/rocketwatch/strings/embeds.en.json +++ b/rocketwatch/strings/embeds.en.json @@ -590,6 +590,6 @@ "rocksolid_withdrawal_event": { "title": "<:rocksolid:1425091714267480158> RockSolid rETH Withdrawal", "description": "New withdrawal request for **%{assets} rETH** from the RockSolid vault!", - "description_small": "<:rocksolid:1425091714267480158> %{sender} withdrawing **%{assets} rETH** from the RockSolid vault!" + "description_small": "<:rocksolid:1425091714267480158> %{sender} requested a withdrawal for **%{assets} rETH** from the RockSolid vault!" } } diff --git a/rocketwatch/utils/rocketpool.py b/rocketwatch/utils/rocketpool.py index 933c7e88..fe62325f 100644 --- a/rocketwatch/utils/rocketpool.py +++ b/rocketwatch/utils/rocketpool.py @@ -3,6 +3,7 @@ from pathlib import Path from bidict import bidict +from eth_typing import BlockIdentifier from cachetools import cached, FIFOCache from cachetools.func import ttl_cache from multicall import Call, Multicall @@ -195,9 +196,11 @@ def get_function(self, path, *args, historical=False, address=None, mainnet=Fals if not address: address = self.get_address_by_name(name) contract = self.assemble_contract(name, address, historical, mainnet) + if "(" in path and ")" in path: + return contract.get_function_by_signature(function)(*args) return contract.functions[function](*args) - def call(self, path, *args, block="latest", address=None, mainnet=False): + def call(self, path, *args, block: BlockIdentifier = "latest", address=None, mainnet=False): log.debug(f"Calling {path} (block={block})") return self.get_function(path, *args, historical=block != "latest", address=address, mainnet=mainnet).call(block_identifier=block) From d37ba628e8f9feffe83915a691ae52b5434da4d4 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 25 Oct 2025 16:16:46 +0000 Subject: [PATCH 058/279] tweak y axis label --- rocketwatch/plugins/rocksolid/rocksolid.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rocketwatch/plugins/rocksolid/rocksolid.py b/rocketwatch/plugins/rocksolid/rocksolid.py index c56e5ccb..ffcdc588 100644 --- a/rocketwatch/plugins/rocksolid/rocksolid.py +++ b/rocketwatch/plugins/rocksolid/rocksolid.py @@ -115,7 +115,7 @@ def get_eth_rate(block_number: int) -> int: ax.plot(x, y, color="#50b1f7") ax.xaxis.set_major_formatter(DateFormatter("%b %d")) - ax.set_ylabel("rETH deposited") + ax.set_ylabel("AUM (rETH)") ax.set_xlim((x[0], x[-1])) ax.set_ylim((y[0], y[-1] * 1.01)) From 9676e068493badcef5977aa6981f15209478529c Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 25 Oct 2025 16:23:48 +0000 Subject: [PATCH 059/279] use self.deployment_block --- rocketwatch/plugins/rocksolid/rocksolid.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rocketwatch/plugins/rocksolid/rocksolid.py b/rocketwatch/plugins/rocksolid/rocksolid.py index ffcdc588..88d30e09 100644 --- a/rocketwatch/plugins/rocksolid/rocksolid.py +++ b/rocketwatch/plugins/rocksolid/rocksolid.py @@ -38,7 +38,7 @@ async def _fetch_asset_updates(self) -> list[tuple[int, float]]: if db_entry := (await self.db.last_checked_block.find_one({"_id": cog_id})): last_checked_block = db_entry["block"] else: - last_checked_block = 23237366 # contract deployment + last_checked_block = self.deployment_block b_from = last_checked_block + 1 b_to = w3.eth.get_block_number() From 3c07675a56f9fee40360edfc4e81e7d56b8559ef Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Mon, 24 Nov 2025 17:42:10 +0000 Subject: [PATCH 060/279] allowlist some users for support template --- rocketwatch/main.cfg.sample | 1 + rocketwatch/plugins/support_utils/support_utils.py | 1 + 2 files changed, 2 insertions(+) diff --git a/rocketwatch/main.cfg.sample b/rocketwatch/main.cfg.sample index f02688e1..6028c787 100644 --- a/rocketwatch/main.cfg.sample +++ b/rocketwatch/main.cfg.sample @@ -34,6 +34,7 @@ mongodb: { rocketpool: { chain: "mainnet" support: { + user_ids: [] role_ids: [] server_id: -1 channel_id: -1 diff --git a/rocketwatch/plugins/support_utils/support_utils.py b/rocketwatch/plugins/support_utils/support_utils.py index 3fd432f9..f5e7e8f7 100644 --- a/rocketwatch/plugins/support_utils/support_utils.py +++ b/rocketwatch/plugins/support_utils/support_utils.py @@ -132,6 +132,7 @@ async def on_submit(self, interaction: Interaction) -> None: def has_perms(interaction: Interaction): return any([ + interaction.user.id in cfg["rocketpool.support.user_ids"], any(r.id in cfg["rocketpool.support.role_ids"] for r in interaction.user.roles), cfg["discord.owner.user_id"] == interaction.user.id, interaction.user.guild_permissions.moderate_members and interaction.guild.id == cfg["rocketpool.support.server_id"] From 461a67ab96ce9010e66a280cad47a6b9a5382f08 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Mon, 24 Nov 2025 17:42:21 +0000 Subject: [PATCH 061/279] more minipool states --- .../plugins/beacon_states/beacon_states.py | 43 ++++++++++--------- 1 file changed, 23 insertions(+), 20 deletions(-) diff --git a/rocketwatch/plugins/beacon_states/beacon_states.py b/rocketwatch/plugins/beacon_states/beacon_states.py index e2323c81..f35adae2 100644 --- a/rocketwatch/plugins/beacon_states/beacon_states.py +++ b/rocketwatch/plugins/beacon_states/beacon_states.py @@ -32,7 +32,8 @@ async def beacon_states(self, ctx: Context): "active" : {}, "exiting": {}, "exited" : {}, - "withdrawn": {} + "withdrawn": {}, + "closed": {} } exiting_valis = [] for minipool in res: @@ -50,16 +51,16 @@ async def beacon_states(self, ctx: Context): data["exiting"]["slashed"] = data["exiting"].get("slashed", 0) + 1 exiting_valis.append(minipool) case "exited_unslashed" | "exited_slashed" | "withdrawal_possible": - if minipool["beacon"]["slashed"]: - data["exited"]["slashed"] = data["exited"].get("slashed", 0) + 1 - else: - data["exited"]["voluntarily"] = data["exited"].get("voluntarily", 0) + 1 + status_2 = "slashed" if minipool["beacon"]["slashed"] else "voluntarily" + data["exited"][status_2] = data["exited"].get(status_2, 0) + 1 exiting_valis.append(minipool) case "withdrawal_done": - if minipool["beacon"]["slashed"]: - data["withdrawn"]["slashed"] = data["withdrawn"].get("slashed", 0) + 1 + status_2 = "slashed" if minipool["beacon"]["slashed"] else "unslashed" + if minipool["execution_balance"] > 0: + data["withdrawn"][status_2] = data["withdrawn"].get(status_2, 0) + 1 + exiting_valis.append(minipool) else: - data["withdrawn"]["unslashed"] = data["withdrawn"].get("unslashed", 0) + 1 + data["closed"][status_2] = data["closed"].get(status_2, 0) + 1 case _: logging.warning(f"Unknown status {minipool['status']}") @@ -67,14 +68,17 @@ async def beacon_states(self, ctx: Context): description = "```\n" # render dict as a tree like structure description += render_tree_legacy(data, "Minipool States") - if 0 < len(exiting_valis) <= 24: + + if len(exiting_valis) == 0: + description += "```" + elif len(exiting_valis) < 24: description += "\n\n--- Exiting Minipools ---\n\n" # array of validator attribute, sorted by index - valis = sorted([v["validator_index"] for v in exiting_valis], key=lambda x: x) + valis = sorted([v["validator_index"] for v in exiting_valis]) description += ", ".join([str(v) for v in valis]) description += "```" - elif len(exiting_valis) > 24: - description += "```\n**Exiting Node Operators:**\n" + else: + description += "```\n**Exiting Node Operators**\n" node_operators = {} # dedupe, add count of validators with matching node operator for v in exiting_valis: @@ -83,14 +87,13 @@ async def beacon_states(self, ctx: Context): node_operators = list(node_operators.items()) # sort by count node_operators.sort(key=lambda x: x[1], reverse=True) - description += "" - # use el_explorer_url - description += ", ".join([f"{el_explorer_url(w3.to_checksum_address(v))} ({c})" for v, c in node_operators[:16]]) - # append ",…" if more than 16 - if len(node_operators) > 16: - description += ",…" - else: - description += "```" + # create description + max_list_length = 16 + description += ", ".join([f"{el_explorer_url(w3.to_checksum_address(v))} ({c})" for v, c in node_operators[:max_list_length]]) + if len(node_operators) > max_list_length: + remaining_no = len(node_operators) - max_list_length + remaining_validators = sum([c for _, c in node_operators[max_list_length:]]) + description += f", and {remaining_no} more ({remaining_validators})" embed.description = description await ctx.send(embed=embed) From e78c1c72e1757ef1ac8ed70db13238c0f36b8b67 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Mon, 24 Nov 2025 17:42:57 +0000 Subject: [PATCH 062/279] spelling / phrasing tweaks --- rocketwatch/plugins/milestones/milestones.json | 4 ++-- rocketwatch/strings/addresses.en.json | 8 +++++--- rocketwatch/strings/embeds.en.json | 4 ++-- 3 files changed, 9 insertions(+), 7 deletions(-) diff --git a/rocketwatch/plugins/milestones/milestones.json b/rocketwatch/plugins/milestones/milestones.json index d9eb4206..0e26b32a 100644 --- a/rocketwatch/plugins/milestones/milestones.json +++ b/rocketwatch/plugins/milestones/milestones.json @@ -64,7 +64,7 @@ "RockSolidVault.totalAssets" ], "formatter": "to_float", - "min": 1000, - "step_size": 1000 + "min": 0, + "step_size": 5000 } ] diff --git a/rocketwatch/strings/addresses.en.json b/rocketwatch/strings/addresses.en.json index ce854020..8a96d280 100644 --- a/rocketwatch/strings/addresses.en.json +++ b/rocketwatch/strings/addresses.en.json @@ -4,14 +4,14 @@ "0xF0138d2e4037957D7b37De312a16a88A7f83A32a": "🛠️Invis", "0x75Cf8e1F8F4fbF4C7BB216E450BCff5F51Ab3E5A": "🛠️Invis", "0x701F4dcEAD1049FA01F321d49F6dca525cF4A5A5": "MEEK", - "0x17Fa597cEc16Ab63A7ca00Fb351eb4B29Ffa6f46": "thomas", + "0x17Fa597cEc16Ab63A7ca00Fb351eb4B29Ffa6f46": "Thomas", "0x78072BA5f77d01B3f5B1098df73176933da02A7A": "markobarko", "0x5e624FAEDc7AA381b574c3C2fF1731677Dd2ee1d": "jamescarnley", "0xb8ed9ea221bf33d37360A76DDD52bA7b1E66AA5C": "Lovinall #1", "0xca317A4ecCbe0Dd5832dE2A7407e3c03F88b2CdD": "Lovinall #2", "0x64627611655C8CdcaFaE7607589b4483a1578f4A": "Darcius", "0x33043c521E9c3e80E0c05A2c25f2e894FefC0328": "cjrtp", - "0xc942B5aA63A3410a13358a7a3aEdF33d9e9D3AC3": "langers", + "0xc942B5aA63A3410a13358a7a3aEdF33d9e9D3AC3": "Langers", "0x8630eE161EF00dE8E70F19Bf5fE5a06046898990": "Marceau.eth #2", "0x01A2a10ed806d4e65Ad92c2c6b10bC4D5F37001e": "onethousand.eth", "0x75C8F18e401113167A43bB21556cc132BF8C7ca9": "onethousand.eth", @@ -86,5 +86,7 @@ "0x3666f603Cc164936C1b87e207F36BEBa4AC5f18a": "Hop: L1 USDC bridge", "0x076732017b95A98A618BC9eEc3523A0058366807": "Cakepie Reward Distributor", "0xfEb352930cA196a80B708CDD5dcb4eCA94805daB": "Paladin V2.1 QuestBoard veBAL", - "0x2A906f92B0378Bb19a3619E2751b1e0b8cab6B29": "Constellation Supernode" + "0x2A906f92B0378Bb19a3619E2751b1e0b8cab6B29": "Constellation Supernode", + "0xFD857D3cFcb942039388FBd44c18163f91552b35": "Dev Wallet", + "0x89Af09B5fA88B8989BA5a8960982cCCCA0BEa6F0": "Core Team" } diff --git a/rocketwatch/strings/embeds.en.json b/rocketwatch/strings/embeds.en.json index 8e8514de..374c7afa 100644 --- a/rocketwatch/strings/embeds.en.json +++ b/rocketwatch/strings/embeds.en.json @@ -177,7 +177,7 @@ }, "rpl_migration_event": { "title": ":arrows_counterclockwise: RPL Migration", - "description": "%{from} migrated **%{amount} RPL V1** to the new token contract!", + "description": "%{from} migrated **%{amount} RPL v1** to the new token contract!", "description_small": ":arrows_counterclockwise: %{from} migrated **%{amount} RPL**!" }, "milestone_rpl_stake": { @@ -323,7 +323,7 @@ }, "pdao_spend_treasury": { "title": ":bank: DAO Treasury Spend", - "description": "**%{amount} RPL** from treasury sent to %{to}!" + "description": "**%{amount} RPL** from treasury sent to %{recipientAddress}!" }, "pdao_spend_treasury_recurring_new": { "title": ":bank: DAO Treasury: New Recurring Spend", From 518116f69fccdf18b30344c65ee55fac3f9ce1ab Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Mon, 24 Nov 2025 17:43:33 +0000 Subject: [PATCH 063/279] use modern snake case methods in web3py --- rocketwatch/plugins/cow_orders/cow_orders.py | 8 +-- rocketwatch/plugins/debug/debug.py | 15 +++++- rocketwatch/plugins/events/events.py | 14 ++--- .../plugins/minipool_task/minipool_task.py | 12 ++--- .../minipools_upkeep_task.py | 2 +- rocketwatch/plugins/node_task/node_task.py | 2 +- rocketwatch/plugins/queue/queue.py | 2 +- rocketwatch/plugins/random/random.py | 2 +- rocketwatch/plugins/rocksolid/rocksolid.py | 53 +++++++++++-------- rocketwatch/plugins/rpl/rpl.py | 2 +- .../plugins/transactions/transactions.py | 4 +- rocketwatch/utils/embeds.py | 6 +-- rocketwatch/utils/rocketpool.py | 6 +-- 13 files changed, 73 insertions(+), 55 deletions(-) diff --git a/rocketwatch/plugins/cow_orders/cow_orders.py b/rocketwatch/plugins/cow_orders/cow_orders.py index 6bf00fbd..7a6f8e48 100644 --- a/rocketwatch/plugins/cow_orders/cow_orders.py +++ b/rocketwatch/plugins/cow_orders/cow_orders.py @@ -149,7 +149,7 @@ def check_for_new_events(self): data = aDict({}) data["cow_uid"] = order["uid"] - data["cow_owner"] = w3.toChecksumAddress(order["owner"]) + data["cow_owner"] = w3.to_checksum_address(order["owner"]) decimals = 18 # base the event_name depending on if its buying or selling RPL if order["sellToken"] in self.tokens: @@ -159,7 +159,7 @@ def check_for_new_events(self): data["ratio"] = int(order["sellAmount"]) / int(order["buyAmount"]) # store rpl and other token amount data["ourAmount"] = solidity.to_float(int(order["sellAmount"])) - s = rp.assemble_contract(name="ERC20", address=w3.toChecksumAddress(order["buyToken"])) + s = rp.assemble_contract(name="ERC20", address=w3.to_checksum_address(order["buyToken"])) try: decimals = s.functions.decimals().call() except: @@ -170,7 +170,7 @@ def check_for_new_events(self): data["event_name"] = f"cow_order_buy_{token}_found" # store rpl and other token amount data["ourAmount"] = solidity.to_float(int(order["buyAmount"])) - s = rp.assemble_contract(name="ERC20", address=w3.toChecksumAddress(order["sellToken"])) + s = rp.assemble_contract(name="ERC20", address=w3.to_checksum_address(order["sellToken"])) try: decimals = s.functions.decimals().call() except: @@ -182,7 +182,7 @@ def check_for_new_events(self): data["otherToken"] = s.functions.symbol().call() except: data["otherToken"] = "UNKWN" - if s.address == w3.toChecksumAddress("0xeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee"): + if s.address == w3.to_checksum_address("0xeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee"): data["otherToken"] = "ETH" data["deadline"] = int(order["validTo"]) # if the rpl value in usd is less than 25k, ignore it diff --git a/rocketwatch/plugins/debug/debug.py b/rocketwatch/plugins/debug/debug.py index 319bdd15..35e2c028 100644 --- a/rocketwatch/plugins/debug/debug.py +++ b/rocketwatch/plugins/debug/debug.py @@ -112,6 +112,19 @@ async def delete_msg(self, interaction: Interaction, message_url: str): msg = await channel.fetch_message(int(message_id)) await msg.delete() await interaction.followup.send(content="Done") + + @command() + @guilds(cfg["discord.owner.server_id"]) + @is_owner() + async def edit_embed(self, interaction: Interaction, message_url: str, new_description: str): + await interaction.response.defer(ephemeral=True) + channel_id, message_id = message_url.split("/")[-2:] + channel = await self.bot.get_or_fetch_channel(int(channel_id)) + msg = await channel.fetch_message(int(message_id)) + embed = msg.embeds[0] + embed.description = new_description + await msg.edit(embed=embed) + await interaction.followup.send(content="Done") @command() @guilds(cfg["discord.owner.server_id"]) @@ -390,7 +403,7 @@ async def call( args = json.loads(json_args) if not isinstance(args, list): args = [args] - v = rp.call(function, *args, block=block, address=w3.toChecksumAddress(address) if address else None) + v = rp.call(function, *args, block=block, address=w3.to_checksum_address(address) if address else None) except Exception as err: await interaction.followup.send(content=f"Exception: ```{repr(err)}```") return diff --git a/rocketwatch/plugins/events/events.py b/rocketwatch/plugins/events/events.py index 86985736..9a82203d 100644 --- a/rocketwatch/plugins/events/events.py +++ b/rocketwatch/plugins/events/events.py @@ -408,7 +408,7 @@ def handle_global_event(self, event_name: str, event: aDict) -> Optional[Embed]: with warnings.catch_warnings(): warnings.simplefilter("ignore") deposit_contract = rp.get_contract_by_name("casperDeposit") - processed_logs = deposit_contract.events.DepositEvent().processReceipt(receipt) + processed_logs = deposit_contract.events.DepositEvent().process_receipt(receipt) # attempt to retrieve the pubkey if processed_logs: @@ -463,7 +463,7 @@ def handle_event(self, event_name: str, event: aDict) -> Optional[Embed]: value = bool(value_raw) case 2: # SettingType.ADDRESS - value = w3.toChecksumAddress(value_raw) + value = w3.to_checksum_address(value_raw) case _: value = "???" description_parts.append( @@ -538,8 +538,8 @@ def share_repr(percentage: float) -> str: rpl = rp.get_address_by_name("rocketTokenRPL") if args.signerToken != rpl and args.senderToken != rpl: return None - args.seller = w3.toChecksumAddress(f"0x{event.topics[2][-40:]}") - args.buyer = w3.toChecksumAddress(f"0x{event.topics[3][-40:]}") + args.seller = w3.to_checksum_address(f"0x{event.topics[2][-40:]}") + args.buyer = w3.to_checksum_address(f"0x{event.topics[3][-40:]}") # token names s = rp.assemble_contract(name="ERC20", address=args.signerToken) args.sellToken = s.functions.symbol().call() @@ -738,7 +738,7 @@ def share_repr(percentage: float) -> str: ee = rp.get_contract_by_name("rocketNodeDeposit").events.DepositReceived() with warnings.catch_warnings(): warnings.simplefilter("ignore") - processed_logs = ee.processReceipt(receipt) + processed_logs = ee.process_receipt(receipt) for deposit_event in processed_logs: # needs to be within 5 before the event if event.logIndex - 6 < deposit_event.logIndex < event.logIndex: @@ -749,9 +749,9 @@ def share_repr(percentage: float) -> str: e = rp.get_contract_by_name("rocketVault").events.EtherWithdrawn() with warnings.catch_warnings(): warnings.simplefilter("ignore") - processed_logs = e.processReceipt(receipt) + processed_logs = e.process_receipt(receipt) - deposit_contract = bytes(w3.soliditySha3(["string"], ["rocketNodeDeposit"])) + deposit_contract = bytes(w3.solidity_keccak(["string"], ["rocketNodeDeposit"])) for withdraw_event in processed_logs: # event.logindex 44, withdraw_event.logindex 50, rough distance like that # reminder order is different than the previous example diff --git a/rocketwatch/plugins/minipool_task/minipool_task.py b/rocketwatch/plugins/minipool_task/minipool_task.py index b09822e2..882dc82c 100644 --- a/rocketwatch/plugins/minipool_task/minipool_task.py +++ b/rocketwatch/plugins/minipool_task/minipool_task.py @@ -59,7 +59,7 @@ def get_untracked_minipools(self) -> set[ChecksumAddress]: minipool_addresses = [] for index_batch in as_chunks(range(minipool_count), self.batch_size): minipool_addresses += [ - w3.toChecksumAddress(r.results[0]) for r in rp.multicall.aggregate( + w3.to_checksum_address(r.results[0]) for r in rp.multicall.aggregate( self.minipool_manager.functions.getMinipoolAt(i) for i in index_batch).results ] # remove address that are already in the minipool collection @@ -79,25 +79,25 @@ def get_public_keys(self, addresses): @timerun def get_node_operator(self, addresses): - base_contract = rp.assemble_contract("rocketMinipool", w3.toChecksumAddress(addresses[0])) + base_contract = rp.assemble_contract("rocketMinipool", w3.to_checksum_address(addresses[0])) func = base_contract.functions.getNodeAddress() minipool_contracts = [] for a in addresses: tmp = copy.deepcopy(func) - tmp.address = w3.toChecksumAddress(a) + tmp.address = w3.to_checksum_address(a) minipool_contracts.append(tmp) node_addresses = rp.multicall.aggregate(minipool_contracts) - node_addresses = [w3.toChecksumAddress(r.results[0]) for r in node_addresses.results] + node_addresses = [w3.to_checksum_address(r.results[0]) for r in node_addresses.results] return node_addresses @timerun def get_node_fee(self, addresses): - base_contract = rp.assemble_contract("rocketMinipool", w3.toChecksumAddress(addresses[0])) + base_contract = rp.assemble_contract("rocketMinipool", w3.to_checksum_address(addresses[0])) func = base_contract.functions.getNodeFee() minipool_contracts = [] for a in addresses: tmp = copy.deepcopy(func) - tmp.address = w3.toChecksumAddress(a) + tmp.address = w3.to_checksum_address(a) minipool_contracts.append(tmp) node_fees = rp.multicall.aggregate(minipool_contracts) node_fees = [to_float(r.results[0]) for r in node_fees.results] diff --git a/rocketwatch/plugins/minipools_upkeep_task/minipools_upkeep_task.py b/rocketwatch/plugins/minipools_upkeep_task/minipools_upkeep_task.py index 7ea6712b..bdb140d8 100644 --- a/rocketwatch/plugins/minipools_upkeep_task/minipools_upkeep_task.py +++ b/rocketwatch/plugins/minipools_upkeep_task/minipools_upkeep_task.py @@ -116,7 +116,7 @@ async def delegate_stats(self, interaction: Interaction): rp.uncached_get_address_by_name("rocketMinipoolDelegate") for d in distribution_stats: # I HATE THE CHECKSUMMED ADDRESS REQUIREMENTS I HATE THEM SO MUCH - a = w3.toChecksumAddress(d['_id']) + a = w3.to_checksum_address(d['_id']) name = s_hex(a) if a == rp.get_address_by_name("rocketMinipoolDelegate"): name += " (Latest)" diff --git a/rocketwatch/plugins/node_task/node_task.py b/rocketwatch/plugins/node_task/node_task.py index 3f72cff5..d66502e5 100644 --- a/rocketwatch/plugins/node_task/node_task.py +++ b/rocketwatch/plugins/node_task/node_task.py @@ -96,7 +96,7 @@ async def add_untracked_minipools(self): latest_db = res["_id"] data = {} # return early if we're up to date - if latest_db == latest_rp: + if latest_db >= latest_rp: log.debug("No new minipools") return log.debug(f"Latest minipool in db: {latest_db}, latest minipool in rp: {latest_rp}") diff --git a/rocketwatch/plugins/queue/queue.py b/rocketwatch/plugins/queue/queue.py index 18bd3151..b9ec6e9b 100644 --- a/rocketwatch/plugins/queue/queue.py +++ b/rocketwatch/plugins/queue/queue.py @@ -49,7 +49,7 @@ def get_minipool_queue(limit: int, start: int = 0) -> tuple[int, str]: """Get the next {limit} minipools in the queue""" queue_contract = rp.get_contract_by_name("addressQueueStorage") - key = w3.soliditySha3(["string"], ["minipools.available.variable"]) + key = w3.solidity_keccak(["string"], ["minipools.available.variable"]) q_len = queue_contract.functions.getLength(key).call() start = max(start, 0) diff --git a/rocketwatch/plugins/random/random.py b/rocketwatch/plugins/random/random.py index 48505f1f..e0255133 100644 --- a/rocketwatch/plugins/random/random.py +++ b/rocketwatch/plugins/random/random.py @@ -144,7 +144,7 @@ async def sea_creatures(self, ctx: Context, address: str = None): try: if ".eth" in address: address = ens.resolve_name(address) - address = w3.toChecksumAddress(address) + address = w3.to_checksum_address(address) except (ValueError, TypeError): e.description = "Invalid address" await ctx.send(embed=e) diff --git a/rocketwatch/plugins/rocksolid/rocksolid.py b/rocketwatch/plugins/rocksolid/rocksolid.py index 88d30e09..a0ee3ee3 100644 --- a/rocketwatch/plugins/rocksolid/rocksolid.py +++ b/rocketwatch/plugins/rocksolid/rocksolid.py @@ -1,6 +1,7 @@ import logging from datetime import datetime, timedelta from io import BytesIO +from typing import Optional import matplotlib.pyplot as plt from matplotlib.dates import DateFormatter @@ -29,7 +30,8 @@ class RockSolid(Cog): def __init__(self, bot: RocketWatch): self.bot = bot - self.db = AsyncMongoClient(cfg["mongodb.uri"]).rocketwatch + self.client = AsyncMongoClient(cfg["mongodb.uri"]) + self.db = self.client.rocketwatch self.deployment_block = 23237366 async def _fetch_asset_updates(self) -> list[tuple[int, float]]: @@ -48,21 +50,22 @@ async def _fetch_asset_updates(self) -> list[tuple[int, float]]: async for doc in self.db.rocksolid.find({}): updates.append((doc["time"], doc["assets"])) - payload = [] + db_operations = [] for event_log in get_logs(vault_contract.events.TotalAssetsUpdated, b_from, b_to): ts = block_to_ts(event_log.blockNumber) assets = solidity.to_float(event_log.args.totalAssets) updates.append((ts, assets)) - payload.append(InsertOne({"time": ts, "assets": assets})) + db_operations.append(InsertOne({"time": ts, "assets": assets})) - if payload: - await self.db.rocksolid.bulk_write(payload) - - await self.db.last_checked_block.replace_one( - {"_id": cog_id}, - {"_id": cog_id, "block": b_to}, - upsert=True - ) + async with self.client.start_session() as session: + async with await session.start_transaction(): + if db_operations: + await self.db.rocksolid.bulk_write(db_operations) + await self.db.last_checked_block.replace_one( + {"_id": cog_id}, + {"_id": cog_id, "block": b_to}, + upsert=True + ) return updates @@ -75,20 +78,24 @@ async def rocksolid(self, interaction: Interaction): current_block = w3.eth.get_block_number() now = block_to_ts(current_block) - apy_7d_block = ts_to_block(now - timedelta(days=7).total_seconds()) - apy_30d_block = ts_to_block(now - timedelta(days=30).total_seconds()) - apy_90d_block = ts_to_block(now - timedelta(days=90).total_seconds()) def get_eth_rate(block_number: int) -> int: block_number = max(block_number, self.deployment_block) reth_value = rp.call("RockSolidVault.convertToAssets", 10**18, block=block_number) - return rp.call("rocketTokenRETH.getEthValue", reth_value, block=block_number) + return rp.call("rocketTokenRETH.getEthValue", reth_value, block=block_number) current_eth_rate = get_eth_rate(current_block) - apy_7d = (current_eth_rate / get_eth_rate(apy_7d_block) - 1) * (365 / 7) * 100 - apy_30d = (current_eth_rate / get_eth_rate(apy_30d_block) - 1) * (365 / 30) * 100 - apy_90d = (current_eth_rate / get_eth_rate(apy_90d_block) - 1) * (365 / 90) * 100 + def get_apy(days: int) -> Optional[float]: + reference_block = ts_to_block(now - timedelta(days=days).total_seconds()) + if reference_block < self.deployment_block: + return None + return (current_eth_rate / get_eth_rate(reference_block) - 1) * (365 / days) * 100 + + apy_7d = get_apy(days=7) + apy_30d = get_apy(days=30) + apy_90d = get_apy(days=90) + tvl_reth = solidity.to_float(rp.call("RockSolidVault.totalAssets")) tvl_rock_reth = solidity.to_float(rp.call("RockSolidVault.totalSupply")) @@ -129,14 +136,14 @@ def get_eth_rate(block_number: int) -> int: ca_rock_reth = rp.get_address_by_name("RockSolidVault") embed = Embed(title="<:rocksolid:1425091714267480158> RockSolid rETH Vault") - embed.add_field(name="7d APY", value=f"{apy_7d:.2f}%" if (apy_7d_block >= self.deployment_block) else "-") - embed.add_field(name="30d APY", value=f"{apy_30d:.2f}%" if (apy_30d_block >= self.deployment_block) else "-") - embed.add_field(name="90d APY", value=f"{apy_90d:.2f}%" if (apy_90d_block >= self.deployment_block) else "-") + embed.add_field(name="7d APY", value=f"{apy_7d:.2f}%" if apy_7d else "-") + embed.add_field(name="30d APY", value=f"{apy_30d:.2f}%" if apy_30d else "-") + embed.add_field(name="90d APY", value=f"{apy_90d:.2f}%" if apy_90d else "-") embed.add_field(name="TVL", value=f"`{tvl_reth:,.2f}` {el_explorer_url(ca_reth, name=' rETH')}") embed.add_field(name="Supply", value=f"`{tvl_rock_reth:,.2f}` {el_explorer_url(ca_rock_reth, name=' rock.rETH')}") - embed.set_image(url="attachment://rocksolid_tvl.png") + embed.set_image(url="attachment://rocksolid-tvl.png") - await interaction.followup.send(embed=embed, file=File(img, "rocksolid_tvl.png")) + await interaction.followup.send(embed=embed, file=File(img, "rocksolid-tvl.png")) async def setup(bot): diff --git a/rocketwatch/plugins/rpl/rpl.py b/rocketwatch/plugins/rpl/rpl.py index 18afed4d..4b9088b5 100644 --- a/rocketwatch/plugins/rpl/rpl.py +++ b/rocketwatch/plugins/rpl/rpl.py @@ -53,7 +53,7 @@ async def rpl_apr(self, ctx: Context): contract = rp.get_contract_by_name("rocketVault") m = ts_to_block(rp.call("rocketRewardsPool.getClaimIntervalTimeStart")) events = contract.events["TokenDeposited"].getLogs(argument_filters={ - "by": w3.soliditySha3( + "by": w3.solidity_keccak( ["string", "address"], ["rocketMerkleDistributorMainnet", rp.get_address_by_name("rocketTokenRPL")]) }, fromBlock=m - 10000, toBlock=m + 10000) diff --git a/rocketwatch/plugins/transactions/transactions.py b/rocketwatch/plugins/transactions/transactions.py index aa32a456..bf80852f 100644 --- a/rocketwatch/plugins/transactions/transactions.py +++ b/rocketwatch/plugins/transactions/transactions.py @@ -159,7 +159,7 @@ def create_embeds(event_name: str, event: aDict) -> list[Embed]: # get the amount of dequeues that happened in this transaction using the event logs with warnings.catch_warnings(): warnings.simplefilter("ignore") - processed_logs = event.processReceipt(receipt) + processed_logs = event.process_receipt(receipt) args.count = len(processed_logs) elif "SettingBool" in args.function_name: args.value = bool(args.value) @@ -196,7 +196,7 @@ def share_repr(percentage: float) -> str: value = bool(value_raw) case 2: # SettingType.ADDRESS - value = w3.toChecksumAddress(value_raw) + value = w3.to_checksum_address(value_raw) case _: value = "???" description_parts.append( diff --git a/rocketwatch/utils/embeds.py b/rocketwatch/utils/embeds.py index ee94a823..78aa9c2f 100644 --- a/rocketwatch/utils/embeds.py +++ b/rocketwatch/utils/embeds.py @@ -63,7 +63,7 @@ async def resolve_ens(ctx, node_address): # if it's just an address, look for a reverse record try: - address = w3.toChecksumAddress(node_address) + address = w3.to_checksum_address(node_address) except Exception: await ctx.send("Invalid address") return None, None @@ -101,7 +101,7 @@ def el_explorer_url( if w3.isAddress(target): # sanitize address url = f"{cfg['execution_layer.explorer']}/address/{target}" - target = w3.toChecksumAddress(target) + target = w3.to_checksum_address(target) if prefix != -1 and rp.call("rocketNodeManager.getSmoothingPoolRegistrationState", target, block=block): prefix += ":cup_with_straw:" @@ -205,7 +205,7 @@ def prepare_args(args): if w3.isAddress(arg_value): # get rocketpool related holdings value for this address - address = w3.toChecksumAddress(arg_value) + address = w3.to_checksum_address(arg_value) prefix = get_sea_creature_for_address(address) # handle validators diff --git a/rocketwatch/utils/rocketpool.py b/rocketwatch/utils/rocketpool.py index fe62325f..4ef623e5 100644 --- a/rocketwatch/utils/rocketpool.py +++ b/rocketwatch/utils/rocketpool.py @@ -103,7 +103,7 @@ def get_address_by_name(self, name): def uncached_get_address_by_name(self, name, block="latest"): log.debug(f"Retrieving address for {name} Contract") - sha3 = w3.soliditySha3(["string", "string"], ["contract.address", name]) + sha3 = w3.solidity_keccak(["string", "string"], ["contract.address", name]) address = self.get_contract_by_name("rocketStorage", historical=block != "latest").functions.getAddress(sha3).call(block_identifier=block) if not w3.toInt(hexstr=address): raise NoAddressFound(f"No address found for {name} Contract") @@ -144,7 +144,7 @@ def get_abi_by_name(self, name): def uncached_get_abi_by_name(self, name): log.debug(f"Retrieving abi for {name} Contract") - sha3 = w3.soliditySha3(["string", "string"], ["contract.abi", name]) + sha3 = w3.solidity_keccak(["string", "string"], ["contract.abi", name]) compressed_string = self.get_contract_by_name("rocketStorage").functions.getString(sha3).call() if not compressed_string: raise Exception(f"No abi found for {name} Contract") @@ -196,8 +196,6 @@ def get_function(self, path, *args, historical=False, address=None, mainnet=Fals if not address: address = self.get_address_by_name(name) contract = self.assemble_contract(name, address, historical, mainnet) - if "(" in path and ")" in path: - return contract.get_function_by_signature(function)(*args) return contract.functions[function](*args) def call(self, path, *args, block: BlockIdentifier = "latest", address=None, mainnet=False): From 1ea6a5bed0cd9ff05cb53552017db0aac4ed4cf4 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 13 Dec 2025 00:30:58 +0000 Subject: [PATCH 064/279] going through chaaaaanges --- compose.yaml | 2 +- rocketwatch/plugins/detect_scam/detect_scam.py | 1 + rocketwatch/plugins/node_task/node_task.py | 2 +- 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/compose.yaml b/compose.yaml index 2edd5fcd..44a63944 100644 --- a/compose.yaml +++ b/compose.yaml @@ -1,6 +1,6 @@ services: rocketwatch: - image: invisiblesymbol/rocketwatch + image: haloooloolo/rocketwatch build: ./rocketwatch volumes: - ./rocketwatch/contracts/rocketpool:/app/contracts/rocketpool diff --git a/rocketwatch/plugins/detect_scam/detect_scam.py b/rocketwatch/plugins/detect_scam/detect_scam.py index 46de1a29..d2534d12 100644 --- a/rocketwatch/plugins/detect_scam/detect_scam.py +++ b/rocketwatch/plugins/detect_scam/detect_scam.py @@ -52,6 +52,7 @@ class Color: def is_reputable(user: Member) -> bool: return any(( user.id == cfg["discord.owner.user_id"], + user.id in cfg["rocketpool.support.user_ids"], {role.id for role in user.roles} & set(cfg["rocketpool.support.role_ids"]), user.guild_permissions.moderate_members )) diff --git a/rocketwatch/plugins/node_task/node_task.py b/rocketwatch/plugins/node_task/node_task.py index d66502e5..7ac2ebe6 100644 --- a/rocketwatch/plugins/node_task/node_task.py +++ b/rocketwatch/plugins/node_task/node_task.py @@ -53,7 +53,7 @@ def __init__(self, bot: RocketWatch): self.bot = bot self.db = AsyncMongoClient(cfg["mongodb.uri"]).rocketwatch self.monitor = Monitor("node-task", api_key=cfg["other.secrets.cronitor"]) - self.batch_size = 1000 + self.batch_size = 50 self.loop.start() async def cog_unload(self): From 698037ab6417d9473fe8dc047a20ba1f29562a12 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 13 Dec 2025 00:47:15 +0000 Subject: [PATCH 065/279] switch to maintained watchtower fork --- compose.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compose.yaml b/compose.yaml index 44a63944..037f64eb 100644 --- a/compose.yaml +++ b/compose.yaml @@ -31,7 +31,7 @@ services: - "127.0.0.1:27017:27017" watchtower: - image: containrrr/watchtower + image: nickfedor/watchtower volumes: - /var/run/docker.sock:/var/run/docker.sock command: --interval 30 From 589ae1366e9b61ece02fe3baa34718c626105b75 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 19 Dec 2025 19:16:13 +0000 Subject: [PATCH 066/279] smaller bulk writes --- rocketwatch/plugins/node_task/node_task.py | 185 +++++++++--------- .../plugins/support_utils/support_utils.py | 2 +- 2 files changed, 97 insertions(+), 90 deletions(-) diff --git a/rocketwatch/plugins/node_task/node_task.py b/rocketwatch/plugins/node_task/node_task.py index 7ac2ebe6..dad19eca 100644 --- a/rocketwatch/plugins/node_task/node_task.py +++ b/rocketwatch/plugins/node_task/node_task.py @@ -59,7 +59,7 @@ def __init__(self, bot: RocketWatch): async def cog_unload(self): self.loop.cancel() - @tasks.loop(minutes=15) + @tasks.loop(minutes=60) async def loop(self): p_id = time.time() self.monitor.ping(state="run", series=p_id) @@ -94,23 +94,24 @@ async def add_untracked_minipools(self): latest_db = 0 if res := await self.db.minipools_new.find_one(sort=[("_id", pymongo.DESCENDING)]): latest_db = res["_id"] - data = {} # return early if we're up to date if latest_db >= latest_rp: log.debug("No new minipools") return + log.debug(f"Latest minipool in db: {latest_db}, latest minipool in rp: {latest_rp}") # batch into self.batch_size minipools at a time, between latest_id and minipool_count for index_batch in as_chunks(range(latest_db + 1, latest_rp + 1), self.batch_size): - data |= await rp.multicall2([ + data = await rp.multicall2([ Call(mm.address, [rp.seth_sig(mm.abi, "getMinipoolAt"), i], [(i, None)]) for i in index_batch ]) - log.debug(f"Inserting {len(data)} new minipools into db") - await self.db.minipools_new.insert_many([ - {"_id": i, "address": a} - for i, a in data.items() - ]) + log.debug(f"Inserting {len(data)} new minipools into db") + await self.db.minipools_new.insert_many([ + {"_id": i, "address": a} + for i, a in data.items() + ]) + log.debug("New minipools inserted") @timerun_async @@ -128,8 +129,9 @@ async def add_static_data_to_minipools(self): if not minipool_addresses: log.debug("No minipools need to be updated with static data") return - data = {} + for minipool_batch in as_chunks(minipool_addresses, self.batch_size // len(lambs)): + data = {} res = await rp.multicall2( [Call(*lamb(a)) for a in minipool_batch for lamb in lambs], require_success=False @@ -139,15 +141,15 @@ async def add_static_data_to_minipools(self): if address not in data: data[address] = {} data[address][variable_name] = value - log.debug(f"Updating {len(data)} minipools with static data") - # update minipools in db - bulk = [ - UpdateOne( - {"address": a}, - {"$set": d}, - ) for a, d in data.items() - ] - await self.db.minipools_new.bulk_write(bulk, ordered=False) + log.debug(f"Updating {len(data)} minipools with static data") + # update minipools in db + bulk = [ + UpdateOne( + {"address": a}, + {"$set": d}, + ) for a, d in data.items() + ] + await self.db.minipools_new.bulk_write(bulk, ordered=False) log.debug("Minipools updated with static data") @timerun_async @@ -169,7 +171,6 @@ async def update_dynamic_minipool_metadata(self): # get all minipool addresses from db minipool_addresses = await self.db.minipools_new.distinct("address") data = {} - att_count = 0 for minipool_batch in as_chunks(minipool_addresses, self.batch_size // len(lambs)): res = await rp.multicall2( [Call(*lamb(a)) for a in minipool_batch for lamb in lambs], @@ -180,18 +181,17 @@ async def update_dynamic_minipool_metadata(self): if address not in data: data[address] = {} data[address][variable_name] = value - att_count += 1 - log.debug(f"Updating {att_count} minipool attributes in db") - # update minipools in db - bulk = [ - UpdateOne( - {"address": a}, - {"$set": d} - ) for a, d in data.items() - ] - await self.db.minipools_new.bulk_write(bulk, ordered=False) + # update minipools in db + log.debug(f"Updating {len(res)} minipool attributes in db") + bulk = [ + UpdateOne( + {"address": a}, + {"$set": d} + ) for a, d in data.items() + ] + await self.db.minipools_new.bulk_write(bulk, ordered=False) + log.debug("Minipools updated with metadata") - return @timerun async def add_static_deposit_data_to_minipools(self): @@ -225,6 +225,7 @@ async def add_static_deposit_data_to_minipools(self): # map to pairs of 2 prepared_events = [] last_addition_is_creation = False + while events: # get event e = events.pop(0) @@ -237,6 +238,7 @@ async def add_static_deposit_data_to_minipools(self): elif e["event"] == "DepositReceived" and last_addition_is_creation: prepared_events[-1].insert(0, e) last_addition_is_creation = e["event"] == "MinipoolCreated" + for e in prepared_events: assert "amount" in e[0]["args"] assert "minipool" in e[1]["args"] @@ -247,21 +249,23 @@ async def add_static_deposit_data_to_minipools(self): continue amount = solidity.to_float(e[0]["args"]["amount"]) data[mp] = {"deposit_amount": amount} - if len(data) == 0: - log.debug("No minipools need to be updated with static deposit data") - return - log.debug(f"Updating {len(data)} minipools with static deposit data") - # update minipools in db - bulk = [ - UpdateOne( - {"address": a}, - {"$set": d}, - ) for a, d in data.items() - ] - await self.db.minipools_new.bulk_write(bulk, ordered=False) + + if not data: + log.debug("No minipools need to be updated with static deposit data") + continue + + log.debug(f"Updating {len(data)} minipools with static deposit data") + # update minipools in db + bulk = [ + UpdateOne( + {"address": a}, + {"$set": d}, + ) for a, d in data.items() + ] + await self.db.minipools_new.bulk_write(bulk, ordered=False) + log.debug("Minipools updated with static deposit data") - @timerun async def add_static_beacon_data_to_minipools(self): # get all public keys from db where no validator_index is set @@ -270,27 +274,27 @@ async def add_static_beacon_data_to_minipools(self): if not public_keys: log.debug("No minipools need to be updated with static beacon data") return - # we need to do smaller bulks as the pubkey is qutie long and we dont want to make the query url too long - data = {} + + # we need to do smaller bulks as the pubkey is quite long and we dont want to make the query url too long # endpoint = bacon.get_validators("head", ids=vali_indexes)["data"] for pubkey_batch in as_chunks(public_keys, self.batch_size): + data = {} # get beacon data for public keys beacon_data = bacon.get_validators("head", ids=pubkey_batch)["data"] # update data dict with results for d in beacon_data: data[d["validator"]["pubkey"]] = int(d["index"]) - if not data: - log.debug("No minipools need to be updated with static beacon data") - return - log.debug(f"Updating {len(data)} minipools with static beacon data") - # update minipools in db - bulk = [ - UpdateMany( - {"pubkey": a}, - {"$set": {"validator_index": d}} - ) for a, d in data.items() - ] - await self.db.minipools_new.bulk_write(bulk, ordered=False) + + log.debug(f"Updating {len(data)} minipools with static beacon data") + # update minipools in db + bulk = [ + UpdateMany( + {"pubkey": a}, + {"$set": {"validator_index": d}} + ) for a, d in data.items() + ] + await self.db.minipools_new.bulk_write(bulk, ordered=False) + log.debug("Minipools updated with static beacon data") @timerun @@ -300,9 +304,9 @@ async def update_dynamic_minipool_beacon_metadata(self): validator_indexes = await self.db.minipools_new.distinct("validator_index") # remove None values validator_indexes = [i for i in validator_indexes if i is not None] - data = {} # endpoint = bacon.get_validators("head", ids=vali_indexes)["data"] for index_batch in as_chunks(validator_indexes, self.batch_size): + data = {} # get beacon data for public keys beacon_data = bacon.get_validators("head", ids=index_batch)["data"] # update data dict with results @@ -322,15 +326,16 @@ async def update_dynamic_minipool_beacon_metadata(self): "withdrawable_epoch" : int(d["validator"]["withdrawable_epoch"]) if int( d["validator"]["withdrawable_epoch"]) < 2 ** 32 else None, }} - log.debug(f"Updating {len(data)} minipools with dynamic beacon data") - # update minipools in db - bulk = [ - UpdateMany( - {"validator_index": a}, - {"$set": d} - ) for a, d in data.items() - ] - await self.db.minipools_new.bulk_write(bulk, ordered=False) + log.debug(f"Updating {len(data)} minipools with dynamic beacon data") + # update minipools in db + bulk = [ + UpdateMany( + {"validator_index": a}, + {"$set": d} + ) for a, d in data.items() + ] + await self.db.minipools_new.bulk_write(bulk, ordered=False) + log.debug("Minipools updated with dynamic beacon data") async def check_indexes(self): @@ -385,8 +390,9 @@ async def add_static_data_to_node_operators(self): if not node_addresses: log.debug("No node operators need to be updated with static data") return - data = {} + for node_batch in as_chunks(node_addresses, self.batch_size // len(lambs)): + data = {} res = await rp.multicall2( [Call(*lamb(a)) for a in node_batch for lamb in lambs], require_success=False @@ -396,15 +402,16 @@ async def add_static_data_to_node_operators(self): if address not in data: data[address] = {} data[address][variable_name] = value - log.debug(f"Updating {len(data)} node operators with static data") - # update minipools in db - bulk = [ - UpdateOne( - {"address": a}, - {"$set": d}, - ) for a, d in data.items() - ] - await self.db.node_operators_new.bulk_write(bulk, ordered=False) + log.debug(f"Updating {len(data)} node operators with static data") + # update minipools in db + bulk = [ + UpdateOne( + {"address": a}, + {"$set": d}, + ) for a, d in data.items() + ] + await self.db.node_operators_new.bulk_write(bulk, ordered=False) + log.debug("Node operators updated with static data") @timerun_async @@ -446,9 +453,8 @@ async def update_dynamic_node_operator_metadata(self): ] # get all node operators from db, but we only care about the address and the fee_distributor_address nodes = await self.db.node_operators_new.find({}, {"address": 1, "fee_distributor_address": 1}).to_list() - data = {} - att_count = 0 for node_batch in as_chunks(nodes, self.batch_size // len(lambs)): + data = {} res = await rp.multicall2( [Call(*lamb(n)) for n in node_batch for lamb in lambs], require_success=False @@ -458,17 +464,18 @@ async def update_dynamic_node_operator_metadata(self): if address not in data: data[address] = {} data[address][variable_name] = value - att_count += 1 - log.debug(f"Updating {att_count} node operator attributes in db") - # update minipools in db - bulk = [ - UpdateOne( - {"address": a}, - {"$set": d} - ) for a, d in data.items() - ] - await self.db.node_operators_new.bulk_write(bulk, ordered=False) + log.debug(f"Updating {len(res)} node operator attributes in db") + # update minipools in db + bulk = [ + UpdateOne( + {"address": a}, + {"$set": d} + ) for a, d in data.items() + ] + await self.db.node_operators_new.bulk_write(bulk, ordered=False) + log.debug("Node operators updated with metadata") + async def setup(self): await self.add_cog(NodeTask(self)) diff --git a/rocketwatch/plugins/support_utils/support_utils.py b/rocketwatch/plugins/support_utils/support_utils.py index f5e7e8f7..759d41d2 100644 --- a/rocketwatch/plugins/support_utils/support_utils.py +++ b/rocketwatch/plugins/support_utils/support_utils.py @@ -130,7 +130,7 @@ async def on_submit(self, interaction: Interaction) -> None: await interaction.response.edit_message(content=content, embed=embed, view=AdminView(self.db, self.template_name)) -def has_perms(interaction: Interaction): +def has_perms(user: Interaction): return any([ interaction.user.id in cfg["rocketpool.support.user_ids"], any(r.id in cfg["rocketpool.support.role_ids"] for r in interaction.user.roles), From 61d0739f9353f0e1f66721a9fda410fd95d9db84 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 19 Dec 2025 21:05:58 +0000 Subject: [PATCH 067/279] fix data initialization --- rocketwatch/plugins/node_task/node_task.py | 23 +++++++++++----------- 1 file changed, 12 insertions(+), 11 deletions(-) diff --git a/rocketwatch/plugins/node_task/node_task.py b/rocketwatch/plugins/node_task/node_task.py index dad19eca..735681de 100644 --- a/rocketwatch/plugins/node_task/node_task.py +++ b/rocketwatch/plugins/node_task/node_task.py @@ -170,13 +170,13 @@ async def update_dynamic_minipool_metadata(self): ] # get all minipool addresses from db minipool_addresses = await self.db.minipools_new.distinct("address") - data = {} for minipool_batch in as_chunks(minipool_addresses, self.batch_size // len(lambs)): res = await rp.multicall2( [Call(*lamb(a)) for a in minipool_batch for lamb in lambs], require_success=False ) # update data dict with results + data = {} for (address, variable_name), value in res.items(): if address not in data: data[address] = {} @@ -209,7 +209,6 @@ async def add_static_deposit_data_to_minipools(self): return nd = rp.get_contract_by_name("rocketNodeDeposit") mm = rp.get_contract_by_name("rocketMinipoolManager") - data = {} for minipool_batch in as_chunks(minipools, self.batch_size): # turn status time of first and last minipool into blocks @@ -239,6 +238,7 @@ async def add_static_deposit_data_to_minipools(self): prepared_events[-1].insert(0, e) last_addition_is_creation = e["event"] == "MinipoolCreated" + data = {} for e in prepared_events: assert "amount" in e[0]["args"] assert "minipool" in e[1]["args"] @@ -326,15 +326,16 @@ async def update_dynamic_minipool_beacon_metadata(self): "withdrawable_epoch" : int(d["validator"]["withdrawable_epoch"]) if int( d["validator"]["withdrawable_epoch"]) < 2 ** 32 else None, }} - log.debug(f"Updating {len(data)} minipools with dynamic beacon data") - # update minipools in db - bulk = [ - UpdateMany( - {"validator_index": a}, - {"$set": d} - ) for a, d in data.items() - ] - await self.db.minipools_new.bulk_write(bulk, ordered=False) + + log.debug(f"Updating {len(data)} minipools with dynamic beacon data") + # update minipools in db + bulk = [ + UpdateMany( + {"validator_index": a}, + {"$set": d} + ) for a, d in data.items() + ] + await self.db.minipools_new.bulk_write(bulk, ordered=False) log.debug("Minipools updated with dynamic beacon data") From 796bd6480720cf97a10f77407d2a5b6c76d5cbf5 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 20 Dec 2025 19:11:23 +0000 Subject: [PATCH 068/279] fix /lottery --- rocketwatch/plugins/lottery/lottery.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rocketwatch/plugins/lottery/lottery.py b/rocketwatch/plugins/lottery/lottery.py index 9a2aaf5f..5ce2cbef 100644 --- a/rocketwatch/plugins/lottery/lottery.py +++ b/rocketwatch/plugins/lottery/lottery.py @@ -45,7 +45,7 @@ async def load_sync_committee(self, period): sync_period += 1 res = bacon._make_get_request(f"/eth/v1/beacon/states/head/sync_committees?epoch={sync_period * 256}") data = res["data"] - self.db.sync_committee_stats.replace_one({"period": period}, + await self.db.sync_committee_stats.replace_one({"period": period}, {"period" : period, "start_epoch": sync_period * 256, "end_epoch" : (sync_period + 1) * 256, From 7787874b358aa51478e7146b5af00612094ebc8f Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 20 Dec 2025 23:43:26 +0000 Subject: [PATCH 069/279] only show delegate stats for active pools --- .../minipools_upkeep_task.py | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/rocketwatch/plugins/minipools_upkeep_task/minipools_upkeep_task.py b/rocketwatch/plugins/minipools_upkeep_task/minipools_upkeep_task.py index bdb140d8..b16a2ef6 100644 --- a/rocketwatch/plugins/minipools_upkeep_task/minipools_upkeep_task.py +++ b/rocketwatch/plugins/minipools_upkeep_task/minipools_upkeep_task.py @@ -15,7 +15,7 @@ from utils.embeds import Embed, el_explorer_url from utils.readable import s_hex from utils.shared_w3 import w3 -from utils.visibility import is_hidden +from utils.visibility import is_hidden_weak from utils.cfg import cfg from utils.rocketpool import rp from utils.time_debug import timerun_async @@ -93,22 +93,23 @@ async def upkeep_minipools(self): @command() async def delegate_stats(self, interaction: Interaction): - await interaction.response.defer(ephemeral=is_hidden(interaction)) - # get stats about delegates + await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) + # only consider active minipools + minipool_filter = {"beacon.status": {"$in": ["pending_initialized", "pending_queued", "active_ongoing"]}} # we want to show the distribution of minipools that are using each delegate distribution_stats = await (await self.db.minipools_new.aggregate([ - {"$match": {"effective_delegate": {"$exists": True}}}, + {"$match": minipool_filter}, {"$group": {"_id": "$effective_delegate", "count": {"$sum": 1}}}, {"$sort": {"count": -1}}, ])).to_list() # and the percentage of minipools that are using the useLatestDelegate flag use_latest_delegate_stats = await (await self.db.minipools_new.aggregate([ - {"$match": {"use_latest_delegate": {"$exists": True}}}, + {"$match": minipool_filter}, {"$group": {"_id": "$use_latest_delegate", "count": {"$sum": 1}}}, {"$sort": {"count": -1}}, ])).to_list() e = Embed() - e.title = "Delegate Stats" + e.title = "Delegate Stats (Active Minipools)" desc = "**Effective Delegate Distribution of Minipools:**\n" c_sum = sum(d['count'] for d in distribution_stats) s = "\u00A0" * 4 @@ -120,14 +121,14 @@ async def delegate_stats(self, interaction: Interaction): name = s_hex(a) if a == rp.get_address_by_name("rocketMinipoolDelegate"): name += " (Latest)" - desc += f"{s}{el_explorer_url(a, name)}: {d['count']} ({d['count'] / c_sum * 100:.2f}%)\n" + desc += f"{s}{el_explorer_url(a, name)}: {d['count']:,} ({d['count'] / c_sum * 100:.2f}%)\n" desc += "\n" desc += "**Minipools configured to always use latest delegate:**\n" c_sum = sum(d['count'] for d in use_latest_delegate_stats) for d in use_latest_delegate_stats: # true = yes, false = no d['_id'] = "Yes" if d['_id'] else "No" - desc += f"{s}**{d['_id']}**: {d['count']} ({d['count'] / c_sum * 100:.2f}%)\n" + desc += f"{s}**{d['_id']}**: {d['count']:,} ({d['count'] / c_sum * 100:.2f}%)\n" e.description = desc await interaction.followup.send(embed=e) From 6ff0a9e3f07f3a10f79807a51ad183086e7a4650 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Mon, 22 Dec 2025 00:16:36 +0000 Subject: [PATCH 070/279] fix asset conversion for RockSolid withdrawals --- rocketwatch/plugins/events/events.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rocketwatch/plugins/events/events.py b/rocketwatch/plugins/events/events.py index 9a82203d..dd346c5e 100644 --- a/rocketwatch/plugins/events/events.py +++ b/rocketwatch/plugins/events/events.py @@ -501,7 +501,7 @@ def share_repr(percentage: float) -> str: args.assets = solidity.to_float(args.assets) args.shares = solidity.to_float(args.shares) elif event_name.startswith("rocksolid_withdraw"): - assets = rp.call("RockSolidVault.convertToAssets", args.shares, args.requestId, block=event.blockNumber) + assets = rp.call("RockSolidVault.convertToAssets", args.shares, block=event.blockNumber) args.assets = solidity.to_float(assets) args.shares = solidity.to_float(args.shares) elif event_name == "cs_max_validator_change_event": From d69d2eac3f3440769a5fe2dfb5645e123e75e9d1 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 14 Feb 2026 19:42:08 +0000 Subject: [PATCH 071/279] miscellaneous changes --- .github/workflows/docker-ci.yml | 4 +- .gitmodules | 2 +- rocketwatch/plugins/about/about.py | 35 ++++---------- rocketwatch/plugins/collateral/collateral.py | 48 +++++++++---------- rocketwatch/plugins/dao/dao.py | 5 +- .../plugins/support_utils/support_utils.py | 2 +- 6 files changed, 36 insertions(+), 60 deletions(-) diff --git a/.github/workflows/docker-ci.yml b/.github/workflows/docker-ci.yml index 27cc2a4a..3a700033 100644 --- a/.github/workflows/docker-ci.yml +++ b/.github/workflows/docker-ci.yml @@ -16,7 +16,7 @@ jobs: name: Login to DockerHub uses: docker/login-action@v2 with: - username: ${{ secrets.DOCKERHUB_USERNAME }} + username: haloooloolo password: ${{ secrets.DOCKERHUB_TOKEN }} - name: Build and push @@ -24,7 +24,7 @@ jobs: with: context: "{{defaultContext}}:rocketwatch" push: true - tags: invisiblesymbol/rocketwatch:latest + tags: haloooloolo/rocketwatch:latest no-cache: true platforms: linux/amd64 diff --git a/.gitmodules b/.gitmodules index f04a9117..d9ac7a15 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,4 +1,4 @@ [submodule "rocketwatch/contracts/rocketpool"] path = rocketwatch/contracts/rocketpool url = https://github.com/rocket-pool/rocketpool - branch = houston + branch = v1.4 diff --git a/rocketwatch/plugins/about/about.py b/rocketwatch/plugins/about/about.py index 6d85e7df..f9e2c74f 100644 --- a/rocketwatch/plugins/about/about.py +++ b/rocketwatch/plugins/about/about.py @@ -1,5 +1,6 @@ import os import time +import logging from urllib.parse import urlencode import humanize @@ -15,11 +16,13 @@ from utils.cfg import cfg from utils.embeds import Embed from utils.embeds import el_explorer_url -from utils.visibility import is_hidden +from utils.visibility import is_hidden_weak psutil.getloadavg() BOOT_TIME = time.time() +log = logging.getLogger("about") +log.setLevel(cfg["log_level"]) class About(commands.Cog): def __init__(self, bot: RocketWatch): @@ -29,7 +32,7 @@ def __init__(self, bot: RocketWatch): @hybrid_command() async def about(self, ctx: Context): """Bot and Server Information""" - await ctx.defer(ephemeral=is_hidden(ctx)) + await ctx.defer(ephemeral=is_hidden_weak(ctx)) e = Embed() g = self.bot.guilds code_time = None @@ -76,11 +79,13 @@ async def about(self, ctx: Context): bot_uptime = time.time() - BOOT_TIME e.add_field(name="Bot Uptime", value=f"{readable.uptime(bot_uptime)}") + repo_name = "haloooloolo/rocketwatch" + # show credits try: contributors = [ f"[{c['login']}]({c['html_url']}) ({c['contributions']})" - for c in requests.get("https://api.github.com/repos/InvisibleSymbol/rocketwatch/contributors").json() + for c in requests.get(f"https://api.github.com/repos/{repo_name}/contributors").json() if "bot" not in c["login"].lower() ] contributors_str = ", ".join(contributors[:10]) @@ -92,30 +97,6 @@ async def about(self, ctx: Context): await ctx.send(embed=e) - @hybrid_command() - async def donate(self, ctx: Context): - """Donate to the Bot Developer""" - await ctx.defer(ephemeral=True) - e = Embed() - e.title = "Donate to the Developer" - e.description = "I hope my bot has been useful to you, it has been a fun experience building it!\n" \ - "Donations will help me keep doing what I love (and pay the server bills haha)\n\n" \ - "I accept Donations on all Ethereum related Chains! (Mainnet, Polygon, Rollups, etc.)" - e.add_field(name="Donation Address", - value="[`0xinvis.eth`](https://etherscan.io/address/0xf0138d2e4037957d7b37de312a16a88a7f83a32a)") - - # add address qrcode - query_string = urlencode({ - "chs" : "128x128", - "cht" : "qr", - "chl" : "0xF0138d2e4037957D7b37De312a16a88A7f83A32a", - "choe": "UTF-8", - "chld": "L|0" - }) - e.set_image(url=f"https://chart.googleapis.com/chart?{query_string}") - - e.set_footer(text="Thank you for your support! <3") - await ctx.send(embed=e) async def setup(bot): diff --git a/rocketwatch/plugins/collateral/collateral.py b/rocketwatch/plugins/collateral/collateral.py index 913f43cb..852ed3ab 100644 --- a/rocketwatch/plugins/collateral/collateral.py +++ b/rocketwatch/plugins/collateral/collateral.py @@ -1,5 +1,6 @@ import logging from io import BytesIO +from typing import Optional import inflect import matplotlib as mpl @@ -69,32 +70,38 @@ def get_node_minipools_and_collateral() -> dict[ChecksumAddress, dict[str, int]] } -def get_average_collateral_percentage_per_node(collateral_cap, bonded): +def get_average_collateral_percentage_per_node(collateral_cap: Optional[int], bonded: bool): # get stakes for each node stakes = list(get_node_minipools_and_collateral().values()) # get the current rpl price - rpl_price = solidity.to_float(rp.call("rocketNetworkPrices.getRPLPrice")) - - result = {} - # process the data + rpl_price = solidity.to_float(rp.call("rocketNetworkPrices.getRPLPrice")) + + node_collaterals = [] for node in stakes: # get the minipool eth value minipool_value = int(node["eb16s"]) * 16 + int(node["eb8s"]) * (8 if bonded else 24) if not minipool_value: continue # rpl stake value - rpl_stake_value = solidity.to_float(node["rplStaked"]) * rpl_price + rpl_stake = solidity.to_float(node["rplStaked"]) + rpl_stake_value = rpl_stake * rpl_price # cap rpl stake at x% of minipool_value using collateral_cap + collateral = rpl_stake_value / minipool_value * 100 if collateral_cap: - rpl_stake_value = min(rpl_stake_value, minipool_value * collateral_cap / 100) + collateral = min(collateral, collateral_cap) # calculate percentage - percentage = rpl_stake_value / minipool_value * 100 - # round percentage to 5% steps - percentage = (percentage * 10 // 5) / 2 - # add to result + node_collaterals.append((rpl_stake, collateral)) + + effective_bound = max(perc for rpl, perc in node_collaterals) + possible_step_sizes = [0.1, 0.2, 0.5, 1, 2, 5, 10, 20, 50, 100] + step_size = possible_step_sizes[np.argmin([abs(effective_bound / 30 - s) for s in possible_step_sizes])] + + result = {} + for rpl_stake, percentage in node_collaterals: + percentage = step_size * (percentage * 10 // (step_size * 10)) if percentage not in result: result[percentage] = [] - result[percentage].append(rpl_stake_value / rpl_price) + result[percentage].append(rpl_stake) return result @@ -230,13 +237,8 @@ async def collateral_distribution(self, await ctx.defer(ephemeral=is_hidden_weak(ctx)) data = get_average_collateral_percentage_per_node(collateral_cap, bonded) - - counts = [] - for collateral, nodes in data.items(): - counts.extend([collateral] * len(nodes)) - counts = np.array(list(sorted(counts))) - bins = np.bincount((counts * 2).astype(int)) - distribution = [(i / 2, bins[i]) for i in range(len(bins))] + distribution = [(collateral, len(nodes)) for collateral, nodes in sorted(data.items(), key=lambda x: x[0])] + counts = sum(([collateral] * num_nodes for collateral, num_nodes in distribution), []) # If the raw data were requested, print them and exit early if raw: @@ -257,19 +259,13 @@ async def collateral_distribution(self, ax.set_xticklabels(x_keys, rotation='vertical') ax.set_xlabel(f"Collateral Percent of { 'Bonded' if bonded else 'Borrowed'} Eth") - for label in ax.xaxis.get_major_ticks()[1::2]: - label.label.set_visible(False) ax.set_ylim(top=(ax.get_ylim()[1] * 1.1)) ax.yaxis.set_visible(False) ax.get_xaxis().set_major_formatter(FuncFormatter( lambda n, _: f"{x_keys[n] if n < len(x_keys) else 0}{'+' if n == len(x_keys)-1 else ''}%") ) - staked_distribution = [ - (collateral, sum(nodes)) for collateral, nodes in sorted(data.items(), key=lambda x: x[0]) - ] - - bars = dict(staked_distribution) + bars = {collateral: sum(nodes) for collateral, nodes in sorted(data.items(), key=lambda x: x[0])} line = ax2.plot(x_keys, [bars.get(float(x), 0) for x in x_keys]) ax2.set_ylim(top=(ax2.get_ylim()[1] * 1.1)) ax2.tick_params(axis='y', colors=line[0].get_color()) diff --git a/rocketwatch/plugins/dao/dao.py b/rocketwatch/plugins/dao/dao.py index 6f81834b..a46bec42 100644 --- a/rocketwatch/plugins/dao/dao.py +++ b/rocketwatch/plugins/dao/dao.py @@ -15,7 +15,7 @@ from utils import solidity from utils.cfg import cfg from utils.embeds import Embed -from utils.visibility import is_hidden, is_hidden_weak +from utils.visibility import is_hidden_weak from utils.dao import DefaultDAO, OracleDAO, SecurityCouncil, ProtocolDAO from utils.views import PageView from utils.embeds import el_explorer_url @@ -123,8 +123,7 @@ async def dao_votes( full: bool = False ) -> None: """Show currently active on-chain proposals""" - visibility = is_hidden(interaction) if full else is_hidden_weak(interaction) - await interaction.response.defer(ephemeral=visibility) + await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) match dao_name: case "pDAO": diff --git a/rocketwatch/plugins/support_utils/support_utils.py b/rocketwatch/plugins/support_utils/support_utils.py index 759d41d2..f5e7e8f7 100644 --- a/rocketwatch/plugins/support_utils/support_utils.py +++ b/rocketwatch/plugins/support_utils/support_utils.py @@ -130,7 +130,7 @@ async def on_submit(self, interaction: Interaction) -> None: await interaction.response.edit_message(content=content, embed=embed, view=AdminView(self.db, self.template_name)) -def has_perms(user: Interaction): +def has_perms(interaction: Interaction): return any([ interaction.user.id in cfg["rocketpool.support.user_ids"], any(r.id in cfg["rocketpool.support.role_ids"] for r in interaction.user.roles), From 65508358638420b7e3c887446459f852effc6b3c Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 14 Feb 2026 19:42:16 +0000 Subject: [PATCH 072/279] minipool_states --- .../plugins/beacon_states/beacon_states.py | 103 ------------- .../minipool_states/minipool_states.py | 138 ++++++++++++++++++ 2 files changed, 138 insertions(+), 103 deletions(-) delete mode 100644 rocketwatch/plugins/beacon_states/beacon_states.py create mode 100644 rocketwatch/plugins/minipool_states/minipool_states.py diff --git a/rocketwatch/plugins/beacon_states/beacon_states.py b/rocketwatch/plugins/beacon_states/beacon_states.py deleted file mode 100644 index f35adae2..00000000 --- a/rocketwatch/plugins/beacon_states/beacon_states.py +++ /dev/null @@ -1,103 +0,0 @@ -import logging - -from discord.ext import commands -from discord.ext.commands import hybrid_command, Context -from pymongo import AsyncMongoClient - -from rocketwatch import RocketWatch -from utils.cfg import cfg -from utils.embeds import Embed, el_explorer_url -from utils.readable import render_tree_legacy -from utils.shared_w3 import w3 -from utils.visibility import is_hidden_weak - -log = logging.getLogger("beacon_states") -log.setLevel(cfg["log_level"]) - - -class BeaconStates(commands.Cog): - def __init__(self, bot: RocketWatch): - self.bot = bot - self.db = AsyncMongoClient(cfg["mongodb.uri"]).get_database("rocketwatch") - - @hybrid_command() - async def beacon_states(self, ctx: Context): - await ctx.defer(ephemeral=is_hidden_weak(ctx)) - # fetch from db - res = await self.db.minipools_new.find({ - "beacon.status": {"$exists": True} - }).to_list(None) - data = { - "pending": {}, - "active" : {}, - "exiting": {}, - "exited" : {}, - "withdrawn": {}, - "closed": {} - } - exiting_valis = [] - for minipool in res: - match minipool["beacon"]["status"]: - case "pending_initialized": - data["pending"]["initialized"] = data["pending"].get("initialized", 0) + 1 - case "pending_queued": - data["pending"]["queued"] = data["pending"].get("queued", 0) + 1 - case "active_ongoing": - data["active"]["ongoing"] = data["active"].get("ongoing", 0) + 1 - case "active_exiting": - data["exiting"]["voluntarily"] = data["exiting"].get("voluntarily", 0) + 1 - exiting_valis.append(minipool) - case "active_slashed": - data["exiting"]["slashed"] = data["exiting"].get("slashed", 0) + 1 - exiting_valis.append(minipool) - case "exited_unslashed" | "exited_slashed" | "withdrawal_possible": - status_2 = "slashed" if minipool["beacon"]["slashed"] else "voluntarily" - data["exited"][status_2] = data["exited"].get(status_2, 0) + 1 - exiting_valis.append(minipool) - case "withdrawal_done": - status_2 = "slashed" if minipool["beacon"]["slashed"] else "unslashed" - if minipool["execution_balance"] > 0: - data["withdrawn"][status_2] = data["withdrawn"].get(status_2, 0) + 1 - exiting_valis.append(minipool) - else: - data["closed"][status_2] = data["closed"].get(status_2, 0) + 1 - case _: - logging.warning(f"Unknown status {minipool['status']}") - - embed = Embed(title="Beacon Chain Minipool States", color=0x00ff00) - description = "```\n" - # render dict as a tree like structure - description += render_tree_legacy(data, "Minipool States") - - if len(exiting_valis) == 0: - description += "```" - elif len(exiting_valis) < 24: - description += "\n\n--- Exiting Minipools ---\n\n" - # array of validator attribute, sorted by index - valis = sorted([v["validator_index"] for v in exiting_valis]) - description += ", ".join([str(v) for v in valis]) - description += "```" - else: - description += "```\n**Exiting Node Operators**\n" - node_operators = {} - # dedupe, add count of validators with matching node operator - for v in exiting_valis: - node_operators[v["node_operator"]] = node_operators.get(v["node_operator"], 0) + 1 - # turn into list - node_operators = list(node_operators.items()) - # sort by count - node_operators.sort(key=lambda x: x[1], reverse=True) - # create description - max_list_length = 16 - description += ", ".join([f"{el_explorer_url(w3.to_checksum_address(v))} ({c})" for v, c in node_operators[:max_list_length]]) - if len(node_operators) > max_list_length: - remaining_no = len(node_operators) - max_list_length - remaining_validators = sum([c for _, c in node_operators[max_list_length:]]) - description += f", and {remaining_no} more ({remaining_validators})" - - embed.description = description - await ctx.send(embed=embed) - - -async def setup(self): - await self.add_cog(BeaconStates(self)) diff --git a/rocketwatch/plugins/minipool_states/minipool_states.py b/rocketwatch/plugins/minipool_states/minipool_states.py new file mode 100644 index 00000000..055fa243 --- /dev/null +++ b/rocketwatch/plugins/minipool_states/minipool_states.py @@ -0,0 +1,138 @@ +import logging + +from discord.ext import commands +from discord.ext.commands import hybrid_command, Context +from pymongo import AsyncMongoClient + +from rocketwatch import RocketWatch +from utils.cfg import cfg +from utils.embeds import Embed, el_explorer_url +from utils.readable import render_tree_legacy +from utils.shared_w3 import w3 +from utils.visibility import is_hidden_weak + +log = logging.getLogger("beacon_states") +log.setLevel(cfg["log_level"]) + + +class MinipoolStates(commands.Cog): + def __init__(self, bot: RocketWatch): + self.bot = bot + self.db = AsyncMongoClient(cfg["mongodb.uri"]).get_database("rocketwatch") + + @hybrid_command() + async def minipool_states(self, ctx: Context): + """Show minipool counts by beacon chain and contract status""" + await ctx.defer(ephemeral=is_hidden_weak(ctx)) + # fetch from db + res = await self.db.minipools_new.find({ + "beacon.status": {"$exists": True} + }).to_list(None) + data = { + "pending": {}, + "active" : {}, + "exiting": {}, + "exited" : {}, + "withdrawn": {}, + "closed": {} + } + exiting_valis = [] + withdrawn_valis = [] + for minipool in res: + match minipool["beacon"]["status"]: + case "pending_initialized": + data["pending"]["initialized"] = data["pending"].get("initialized", 0) + 1 + case "pending_queued": + data["pending"]["queued"] = data["pending"].get("queued", 0) + 1 + case "active_ongoing": + data["active"]["ongoing"] = data["active"].get("ongoing", 0) + 1 + case "active_exiting": + data["exiting"]["voluntarily"] = data["exiting"].get("voluntarily", 0) + 1 + exiting_valis.append(minipool) + case "active_slashed": + data["exiting"]["slashed"] = data["exiting"].get("slashed", 0) + 1 + exiting_valis.append(minipool) + case "exited_unslashed" | "exited_slashed" | "withdrawal_possible": + status_2 = "slashed" if minipool["beacon"]["slashed"] else "voluntarily" + data["exited"][status_2] = data["exited"].get(status_2, 0) + 1 + exiting_valis.append(minipool) + case "withdrawal_done": + status_2 = "slashed" if minipool["beacon"]["slashed"] else "unslashed" + if minipool["execution_balance"] > 0: + data["withdrawn"][status_2] = data["withdrawn"].get(status_2, 0) + 1 + withdrawn_valis.append(minipool) + else: + data["closed"][status_2] = data["closed"].get(status_2, 0) + 1 + case _: + logging.warning(f"Unknown status {minipool['status']}") + + embed = Embed(title="Minipool States", color=0x00ff00) + description = "```\n" + # render dict as a tree like structure + description += render_tree_legacy(data, "Minipools") + + total_listed_valis = len(exiting_valis) + len(withdrawn_valis) + + if total_listed_valis == 0: + description += "```" + elif total_listed_valis < 24: + description += "\n" + if len(exiting_valis) > 0: + description += "\n--- Exiting Minipools ---\n\n" + valis = sorted([v["validator_index"] for v in exiting_valis]) + description += ", ".join([str(v) for v in valis]) + if len(withdrawn_valis) > 0: + description += "\n--- Withdrawn Minipools ---\n\n" + valis = sorted([v["validator_index"] for v in withdrawn_valis]) + description += ", ".join([str(v) for v in valis]) + description += "```" + else: + description += "```" + + node_operators = [] + for valis in (exiting_valis, withdrawn_valis): + valis_no = {} + # dedupe, add count of validators with matching node operator + for v in valis: + valis_no[v["node_operator"]] = valis_no.get(v["node_operator"], 0) + 1 + # turn into list + valis_no = list(valis_no.items()) + # sort by count + valis_no.sort(key=lambda x: x[1], reverse=True) + node_operators.append(valis_no) + + exiting_node_operators, withdrawn_node_operators = node_operators + max_total_list_length = 16 + + if len(exiting_node_operators) + len(withdrawn_node_operators) <= max_total_list_length: + num_exiting = len(exiting_node_operators) + num_withdrawn = len(withdrawn_node_operators) + elif len(exiting_node_operators) >= len(withdrawn_node_operators): + num_withdrawn = min(len(withdrawn_node_operators), max_total_list_length // 2) + num_exiting = max_total_list_length - num_withdrawn + else: + num_exiting = min(len(exiting_node_operators), max_total_list_length // 2) + num_withdrawn = max_total_list_length - num_exiting + + if num_exiting > 0: + description += "\n**Exiting Node Operators**\n" + description += ", ".join([f"{el_explorer_url(w3.to_checksum_address(v))} ({c})" for v, c in exiting_node_operators[:num_exiting]]) + if remaining_no := exiting_node_operators[num_exiting:]: + num_remaining_valis = sum([c for _, c in remaining_no]) + description += f", and {len(remaining_no)} more ({num_remaining_valis})" + description += "\n" + if num_withdrawn > 0: + description += "\n**Withdrawn Node Operators**\n" + description += ", ".join([f"{el_explorer_url(w3.to_checksum_address(v))} ({c})" for v, c in withdrawn_node_operators[:num_withdrawn]]) + if remaining_no := withdrawn_node_operators[num_withdrawn:]: + num_remaining_valis = sum([c for _, c in remaining_no]) + description += f", and {len(remaining_no)} more ({num_remaining_valis})" + description += "\n" + + + embed.description = description + await ctx.send(embed=embed) + + +async def setup(self): + await self.add_cog(MinipoolStates(self)) From 74c3a6c39f3d0ffd16b761a06170f8d55e6150a7 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 14 Feb 2026 19:42:47 +0000 Subject: [PATCH 073/279] LEB4 count in deposit pool stats --- rocketwatch/plugins/deposit_pool/deposit_pool.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/rocketwatch/plugins/deposit_pool/deposit_pool.py b/rocketwatch/plugins/deposit_pool/deposit_pool.py index 41fe7ec3..8579c489 100644 --- a/rocketwatch/plugins/deposit_pool/deposit_pool.py +++ b/rocketwatch/plugins/deposit_pool/deposit_pool.py @@ -57,10 +57,10 @@ def get_deposit_pool_stats() -> Embed: embed.description += f"Need **{queue_capacity:,.2f} ETH** to dequeue all minipools." else: lines = [] - if (num_leb8 := int(dp_balance // 24)) > 0: - lines.append(f"**`{num_leb8:>4}`** 8 ETH minipools (24 ETH from DP)") + if (num_eb4 := int(dp_balance // 28)) > 0: + lines.append(f"**`{num_eb4:>4}`** 4 ETH validators (28 ETH from DP)") if (num_credit := int(dp_balance // 32)) > 0: - lines.append(f"**`{num_credit:>4}`** credit minipools (32 ETH from DP)") + lines.append(f"**`{num_credit:>4}`** credit validators (32 ETH from DP)") if lines: embed.add_field(name="Enough For", value="\n".join(lines), inline=False) From 68c8130393746baea8f15cede49bbb9e405ae0f9 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 14 Feb 2026 19:43:09 +0000 Subject: [PATCH 074/279] more scam message logging --- rocketwatch/plugins/detect_scam/detect_scam.py | 5 +++-- rocketwatch/plugins/events/events.py | 4 ++-- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/rocketwatch/plugins/detect_scam/detect_scam.py b/rocketwatch/plugins/detect_scam/detect_scam.py index d2534d12..cbeb233c 100644 --- a/rocketwatch/plugins/detect_scam/detect_scam.py +++ b/rocketwatch/plugins/detect_scam/detect_scam.py @@ -338,7 +338,6 @@ def _tap_on_this(self, message: Message) -> Optional[str]: def _ticket_system(self, message: Message) -> Optional[str]: # message contains one of the relevant keyword combinations and a link txt = self._get_message_content(message) - log.debug(f"message content: {txt}") if not self.basic_url_pattern.search(txt): return None @@ -436,7 +435,9 @@ async def _reaction_spam(self, reaction: Reaction, user: User) -> Optional[str]: return "Reaction spam by message author" if (reaction_count >= 8) else None @Cog.listener() - async def on_message(self, message: Message) -> None: + async def on_message(self, message: Message) -> None: + log.debug(f"Message(id={message.id}, author={message.author}, channel={message.channel}, content=\"{message.content}\", embeds={message.embeds})") + if message.author.bot: log.warning("Ignoring message sent by bot") return diff --git a/rocketwatch/plugins/events/events.py b/rocketwatch/plugins/events/events.py index dd346c5e..6e9fe870 100644 --- a/rocketwatch/plugins/events/events.py +++ b/rocketwatch/plugins/events/events.py @@ -566,8 +566,8 @@ def share_repr(percentage: float) -> str: receipt = None if cfg["rocketpool.chain"] == "mainnet": receipt = w3.eth.get_transaction_receipt(event.transactionHash) - args.tnx_fee = solidity.to_float(receipt["gasUsed"] * receipt["effectiveGasPrice"]) - args.tnx_fee_usd = round(rp.get_eth_usdc_price() * args.tnx_fee, 2) + args.tnx_fee = receipt["gasUsed"] * receipt["effectiveGasPrice"] + args.tnx_fee_usd = round(rp.get_eth_usdc_price() * args.tnx_fee / 10**18, 2) args.caller = receipt["from"] # add transaction hash and block number to args From 8f6d8e4227ebc7b6045f4f33c1d6c0fd3a2466a6 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 14 Feb 2026 19:43:21 +0000 Subject: [PATCH 075/279] new fee_distribution command --- .../fee_distribution/fee_distribution.py | 98 +++++++++++++++++++ .../node_fee_distribution.py | 73 -------------- 2 files changed, 98 insertions(+), 73 deletions(-) create mode 100644 rocketwatch/plugins/fee_distribution/fee_distribution.py delete mode 100644 rocketwatch/plugins/node_fee_distribution/node_fee_distribution.py diff --git a/rocketwatch/plugins/fee_distribution/fee_distribution.py b/rocketwatch/plugins/fee_distribution/fee_distribution.py new file mode 100644 index 00000000..976573d1 --- /dev/null +++ b/rocketwatch/plugins/fee_distribution/fee_distribution.py @@ -0,0 +1,98 @@ +import logging +from io import BytesIO +from typing import Literal + +from discord import Interaction, File +from discord.ext import commands +from discord.app_commands import command +from pymongo import AsyncMongoClient +from matplotlib import pyplot as plt + +from rocketwatch import RocketWatch +from utils.cfg import cfg +from utils.embeds import Embed +from utils.visibility import is_hidden_weak +from utils.readable import render_tree_legacy + +log = logging.getLogger("fee_distribution") +log.setLevel(cfg["log_level"]) + + +class FeeDistribution(commands.Cog): + def __init__(self, bot: RocketWatch): + self.bot = bot + self.db = AsyncMongoClient(cfg["mongodb.uri"]).rocketwatch + + @command() + async def fee_distribution(self, interaction: Interaction, mode: Literal["tree", "pie"]): + """ + Show the distribution of minipool commission percentages. + """ + await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) + + e = Embed() + e.title = "Minipool Fee Distribution" + + tree = {} + fig, axs = plt.subplots(1, 2) + + for i, bond in enumerate([8, 16]): + result = await self.db.minipools_new.aggregate([ + { + "$match": { + "node_deposit_balance": bond, + "beacon.status": "active_ongoing" + } + }, + { + "$group": { + "_id" : { "$round": ["$node_fee", 2] }, + "count": { "$sum": 1 } + } + }, + { + "$sort": { "_id": 1 } + } + ]) + + labels = [] + sizes = [] + subtree = {} + + for entry in await result.to_list(): + fee_percentage = entry['_id'] * 100 + labels.append(f"{fee_percentage:.0f}%") + sizes.append(entry["count"]) + subtree[labels[-1]] = sizes[-1] + + ax = axs[i] + total = sum(sizes) + tree[f"{bond} ETH"] = subtree + + # avoid overlapping labels for small slices + for i in range(len(sizes)): + if sizes[i] < 0.05 * total: + labels[i] = "" + + ax.set_title(f"{bond} ETH") + ax.pie(sizes, labels=labels, autopct=lambda p: f"{p * total / 100:.0f}" if (p >= 5) else "") + + if mode == "tree": + e.description = f"```\n{render_tree_legacy(tree, 'Minipools')}\n```" + await interaction.followup.send(embed=e) + elif mode == "pie": + img = BytesIO() + fig.tight_layout() + fig.savefig(img, format='png') + img.seek(0) + fig.clear() + plt.close() + + file_name = "fee_distribution.png" + e.set_image(url=f"attachment://{file_name}") + await interaction.followup.send(embed=e, file=File(img, filename=file_name)) + + + +async def setup(bot): + await bot.add_cog(FeeDistribution(bot)) diff --git a/rocketwatch/plugins/node_fee_distribution/node_fee_distribution.py b/rocketwatch/plugins/node_fee_distribution/node_fee_distribution.py deleted file mode 100644 index 1e26a08b..00000000 --- a/rocketwatch/plugins/node_fee_distribution/node_fee_distribution.py +++ /dev/null @@ -1,73 +0,0 @@ -import logging - -import numpy as np -from discord.ext import commands -from discord.ext.commands import Context -from discord.ext.commands import hybrid_command - -from rocketwatch import RocketWatch -from utils.cfg import cfg -from utils.embeds import Embed -from utils.etherscan import get_recent_account_transactions -from utils.rocketpool import rp -from utils.visibility import is_hidden - -log = logging.getLogger("node_fee_distribution") -log.setLevel(cfg["log_level"]) - - -def get_percentiles(percentiles, values): - return {p: np.percentile(values, p, interpolation='nearest') for p in percentiles} - - -class NodeFeeDistribution(commands.Cog): - PERCENTILES = [1, 10, 25, 50, 75, 90, 99] - - def __init__(self, bot: RocketWatch): - self.bot = bot - - @hybrid_command() - async def node_fee_distribution(self, ctx: Context): - """ - Show the distribution of node expenses due to gas fees. - """ - await ctx.defer(ephemeral=is_hidden(ctx)) - - e = Embed() - e.title = "Node Fee Distributions" - e.description = "" - - deposit_txs = await get_recent_account_transactions( - rp.get_address_by_name("rocketNodeDeposit")) - rpl_staking_txs = await get_recent_account_transactions( - rp.get_address_by_name("rocketNodeStaking")) - first = True - - for title, txs in [('Minipool Deposit', deposit_txs), ('RPL Staking', rpl_staking_txs)]: - if not first: - e.description += "\n" - else: - first = False - - if len(txs) > 0: - since = min([int(x["timeStamp"]) for x in txs.values()]) - gas = [int(x["gasPrice"]) // int(1E9) for x in txs.values()] - totals = [int(x["gasUsed"]) * int(x["gasPrice"]) / - 1E18 for x in txs.values()] - gas_percentiles = get_percentiles(NodeFeeDistribution.PERCENTILES, gas) - fee_percentiles = get_percentiles(NodeFeeDistribution.PERCENTILES, totals) - - e.description += f"**{title} Fees:**\n" - e.description += f"_Since _\n```" - e.description += f"Minimum: {min(gas)} gwei gas, {min(totals):.4f} eth total\n" - for p in NodeFeeDistribution.PERCENTILES: - e.description += f"{str(p):>2}th percentile: {int(gas_percentiles[p]):>4} gwei gas, {fee_percentiles[p]:.4f} eth total\n" - e.description += f"Maximum: {max(gas)} gwei gas, {max(totals):.4f} eth total```\n" - else: - e.description += f"No recent {title} transactions found.\n" - - await ctx.send(embed=e) - - -async def setup(bot): - await bot.add_cog(NodeFeeDistribution(bot)) From 6c29326219a595ff79dcbe044c78ffd6d4a6a4bd Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 14 Feb 2026 19:43:35 +0000 Subject: [PATCH 076/279] Saturn upgrade event --- .../plugins/transactions/functions.json | 3 +++ .../plugins/transactions/transactions.py | 4 +++- rocketwatch/strings/addresses.en.json | 4 +++- rocketwatch/strings/embeds.en.json | 4 ++++ rocketwatch/utils/embeds.py | 18 ++++++++++++++---- 5 files changed, 27 insertions(+), 6 deletions(-) diff --git a/rocketwatch/plugins/transactions/functions.json b/rocketwatch/plugins/transactions/functions.json index eb8a2cc7..c82eea82 100644 --- a/rocketwatch/plugins/transactions/functions.json +++ b/rocketwatch/plugins/transactions/functions.json @@ -68,5 +68,8 @@ }, "rocketUpgradeOneDotThreeDotOne": { "execute": "houston_hotfix_upgrade_triggered" + }, + "rocketUpgradeOneDotFour": { + "execute": "saturn_one_upgrade_triggered" } } diff --git a/rocketwatch/plugins/transactions/transactions.py b/rocketwatch/plugins/transactions/transactions.py index bf80852f..185042d8 100644 --- a/rocketwatch/plugins/transactions/transactions.py +++ b/rocketwatch/plugins/transactions/transactions.py @@ -139,21 +139,23 @@ def create_embeds(event_name: str, event: aDict) -> list[Embed]: args.transactionHash = event.hash.hex() args.blockNumber = event.blockNumber - receipt = w3.eth.get_transaction_receipt(args.transactionHash) # oDAO bootstrap doesn't emit an event if "odao_disable" in event_name and not args.confirmDisableBootstrapMode: return [] elif event_name == "pdao_set_delegate": + receipt = w3.eth.get_transaction_receipt(args.transactionHash) args.delegator = receipt["from"] args.delegate = args.get("delegate") or args.get("newDelegate") args.votingPower = solidity.to_float(rp.call("rocketNetworkVoting.getVotingPower", args.delegator, args.blockNumber)) if (args.votingPower < 50) or (args.delegate == args.delegator): return [] elif "failed_deposit" in event_name: + receipt = w3.eth.get_transaction_receipt(args.transactionHash) args.node = receipt["from"] args.burnedValue = solidity.to_float(event.gasPrice * receipt.gasUsed) elif "deposit_pool_queue" in event_name: + receipt = w3.eth.get_transaction_receipt(args.transactionHash) args.node = receipt["from"] event = rp.get_contract_by_name("rocketMinipoolQueue").events.MinipoolDequeued() # get the amount of dequeues that happened in this transaction using the event logs diff --git a/rocketwatch/strings/addresses.en.json b/rocketwatch/strings/addresses.en.json index 8a96d280..f5946ded 100644 --- a/rocketwatch/strings/addresses.en.json +++ b/rocketwatch/strings/addresses.en.json @@ -88,5 +88,7 @@ "0xfEb352930cA196a80B708CDD5dcb4eCA94805daB": "Paladin V2.1 QuestBoard veBAL", "0x2A906f92B0378Bb19a3619E2751b1e0b8cab6B29": "Constellation Supernode", "0xFD857D3cFcb942039388FBd44c18163f91552b35": "Dev Wallet", - "0x89Af09B5fA88B8989BA5a8960982cCCCA0BEa6F0": "Core Team" + "0x89Af09B5fA88B8989BA5a8960982cCCCA0BEa6F0": "Core Team", + "0x9Ca1d6E730Eb9fbfD45c9FF5F0AC4E3d172d8F4d": "RockSolid Vault", + "0xc23b28337896ab92d7e8ed0303cec0609a58143b": "elpresidank" } diff --git a/rocketwatch/strings/embeds.en.json b/rocketwatch/strings/embeds.en.json index 374c7afa..ae082c90 100644 --- a/rocketwatch/strings/embeds.en.json +++ b/rocketwatch/strings/embeds.en.json @@ -481,6 +481,10 @@ "title": ":tada: Houston Hotfix Upgrade Complete!", "description": "" }, + "saturn_one_upgrade_triggered": { + "title": ":ringed_planet: Saturn 1 Upgrade Complete!", + "description": "" + }, "unsteth_withdrawal_requested_event": { "title": ":money_with_wings: Large stETH Withdrawal Requested", "description": "%{owner} has requested a withdrawal of **%{amountOfStETH} stETH**!" diff --git a/rocketwatch/utils/embeds.py b/rocketwatch/utils/embeds.py index 78aa9c2f..708d4ff6 100644 --- a/rocketwatch/utils/embeds.py +++ b/rocketwatch/utils/embeds.py @@ -263,6 +263,8 @@ def assemble(args) -> Embed: e.set_image(url="https://i.imgur.com/XT5qPWf.png") case "houston_hotfix_upgrade_triggered": e.set_image(url="https://i.imgur.com/JcQS3Sh.png") + case "saturn_one_upgrade_triggered": + e.set_image(url="https://i.imgur.com/n3wMCOA.png") match args.event_name: case "pdao_set_delegate": @@ -488,8 +490,16 @@ def assemble(args) -> Embed: # show the transaction fees if "tnx_fee" in args: - e.add_field(name="Transaction Fee", - value=f"{args.tnx_fee} ETH ({args.tnx_fee_usd} USDC)", - inline=False) - + tnx_fee_wei = args.tnx_fee_raw + if tnx_fee_wei >= 10**15: + tnx_fee_eth = round(tnx_fee_wei / 10**18, 3) + value = f"{tnx_fee_eth:,} ETH ({args.tnx_fee_usd} USDC)" + elif tnx_fee_wei >= 10**9: + tnx_fee_gwei = round(tnx_fee_wei / 10**9) + value = f"{tnx_fee_gwei:,} Gwei ({args.tnx_fee_usd} USDC)" + else: + value = f"{tnx_fee_wei:,} Wei ({args.tnx_fee_usd} USDC)" + + e.add_field(name="Transaction Fee", value=value, inline=False) + return e From cd4cd5067ab2a2f64ff247cb9a566896853389e7 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sun, 15 Feb 2026 00:58:57 +0000 Subject: [PATCH 077/279] update queue for megapools --- .../plugins/deposit_pool/deposit_pool.py | 79 ++------ rocketwatch/plugins/queue/queue.py | 180 +++++++++++++----- rocketwatch/utils/rocketpool.py | 18 +- 3 files changed, 171 insertions(+), 106 deletions(-) diff --git a/rocketwatch/plugins/deposit_pool/deposit_pool.py b/rocketwatch/plugins/deposit_pool/deposit_pool.py index 8579c489..941344c1 100644 --- a/rocketwatch/plugins/deposit_pool/deposit_pool.py +++ b/rocketwatch/plugins/deposit_pool/deposit_pool.py @@ -46,15 +46,25 @@ def get_deposit_pool_stats() -> Embed: embed.add_field(name="Maximum Size", value=f"{deposit_cap:,} ETH") embed.add_field(name="Status", value=dp_status, inline=False) - display_limit = 3 - queue_length, queue_content = Queue.get_minipool_queue(display_limit) - if queue_length > 0: - embed.description = f"**Minipool Queue ({queue_length})**\n" - embed.description += queue_content - if queue_length > display_limit: - embed.description += f"{display_limit + 1}. `...`\n" - queue_capacity = max(queue_length * 31 - dp_balance, 0.0) - embed.description += f"Need **{queue_capacity:,.2f} ETH** to dequeue all minipools." + display_limit = 2 + exp_queue_length, exp_queue_content = Queue.get_express_queue(display_limit) + std_queue_length, std_queue_content = Queue.get_standard_queue(display_limit) + total_queue_length = exp_queue_length + std_queue_length + if (total_queue_length) > 0: + embed.description = "" + if exp_queue_length > 0: + embed.description += f"**Express Queue ({exp_queue_length})**\n" + embed.description += exp_queue_content + if exp_queue_length > display_limit: + embed.description += f"{display_limit + 1}. `...`\n" + if std_queue_length > 0: + embed.description += f"**Standard Queue ({std_queue_length})**\n" + embed.description += std_queue_content + if std_queue_length > display_limit: + embed.description += f"{display_limit + 1}. `...`\n" + + queue_capacity = max(total_queue_length * 31 - dp_balance, 0.0) + embed.description += f"Need **{queue_capacity:,.2f} ETH** to dequeue all validators." else: lines = [] if (num_eb4 := int(dp_balance // 28)) > 0: @@ -149,57 +159,6 @@ async def get_status(self) -> Embed: embed.add_field(name="Secondary Market", value=f"rETH is trading {rate_status}", inline=False) return embed - @hybrid_command() - async def atlas_queue(self, ctx): - await ctx.defer(ephemeral=is_hidden_weak(ctx)) - - e = Embed() - e.title = "Atlas Queue Stats" - - data = await self.db.minipools_new.aggregate([ - { - '$match': { - 'status' : 'initialised', - 'deposit_amount': { - '$gt': 1 - } - } - }, { - '$group': { - '_id' : 'total', - 'value' : { - '$sum': { - '$subtract': [ - '$deposit_amount', 1 - ] - } - }, - 'count' : { - '$sum': 1 - }, - 'count_16': { - '$sum': { - '$floor': { - '$divide': [ - '$node_deposit_balance', 16 - ] - } - } - } - } - } - ]).to_list(None) - - total = int(data[0]['value']) - count = data[0]['count'] - count_16 = int(data[0]['count_16']) - count_8 = count - count_16 - - e.description = f"Amount deposited into deposit pool by queued minipools: **{total} ETH**\n" \ - f"Non-credit minipools in the queue: **{count}** (16 ETH: **{count_16}**, 8 ETH: **{count_8}**)\n" \ - - await ctx.send(embed=e) - async def setup(bot): await bot.add_cog(DepositPool(bot)) diff --git a/rocketwatch/plugins/queue/queue.py b/rocketwatch/plugins/queue/queue.py index b9ec6e9b..2a3c6d25 100644 --- a/rocketwatch/plugins/queue/queue.py +++ b/rocketwatch/plugins/queue/queue.py @@ -1,11 +1,13 @@ import math import logging +from typing import Literal, NamedTuple + from cachetools.func import ttl_cache from discord import Interaction from discord.app_commands import command from discord.ext.commands import Cog -from eth_typing import ChecksumAddress +from eth_typing import ChecksumAddress, BlockIdentifier from rocketwatch import RocketWatch from utils import solidity @@ -22,19 +24,34 @@ class Queue(Cog): + class Entry(NamedTuple): + megapool: ChecksumAddress + validator_id: int + bond: int # always 4,000 for now + deposit_size: int # always 32,000 for now + def __init__(self, bot: RocketWatch): self.bot = bot - class MinipoolPageView(PageView): - def __init__(self): + class ValidatorPageView(PageView): + def __init__(self, queue_type: Literal["combined", "standard", "express"]): super().__init__(page_size=15) + if queue_type == "standard": + self.queue_name = "Validator Standard Queue" + self.content_loader = Queue.get_standard_queue + elif queue_type == "express": + self.queue_name = "Validator Express Queue" + self.content_loader = Queue.get_express_queue + else: + self.queue_name = "Validator Queue" + self.content_loader = Queue.get_combined_queue @property def _title(self) -> str: - return "Minipool Queue" + return self.queue_name - async def _load_content(self, from_idx: int, to_idx: int) -> tuple[int, str]: - queue_length, queue_content = Queue.get_minipool_queue( + async def _load_content(self, from_idx: int, to_idx: int) -> tuple[int, str]: + queue_length, queue_content = self.content_loader( limit=(to_idx - from_idx + 1), start=from_idx ) return queue_length, queue_content @@ -43,51 +60,124 @@ async def _load_content(self, from_idx: int, to_idx: int) -> tuple[int, str]: @ttl_cache(ttl=600) def _cached_el_url(address, prefix="") -> str: return el_explorer_url(address, name_fmt=lambda n: f"`{n}`", prefix=prefix) - + @staticmethod - def get_minipool_queue(limit: int, start: int = 0) -> tuple[int, str]: - """Get the next {limit} minipools in the queue""" - - queue_contract = rp.get_contract_by_name("addressQueueStorage") - key = w3.solidity_keccak(["string"], ["minipools.available.variable"]) - q_len = queue_contract.functions.getLength(key).call() - - start = max(start, 0) - limit = min(limit, q_len - start) - - if limit <= 0: - return 0, "" - - queue: list[ChecksumAddress] = [ - w3.to_checksum_address(res.results[0]) for res in rp.multicall.aggregate([ - queue_contract.functions.getItem(key, i) for i in range(start, start + limit) - ]).results - ] - mp_contracts = [rp.assemble_contract("rocketMinipool", address=minipool) for minipool in queue] - nodes: list[ChecksumAddress] = [ - w3.to_checksum_address(res.results[0]) for res in rp.multicall.aggregate([ - contract.functions.getNodeAddress() for contract in mp_contracts - ]).results - ] - status_times: list[int] = [ - res.results[0] for res in rp.multicall.aggregate([ - contract.functions.getStatusTime() for contract in mp_contracts - ]).results - ] - + def __format_queue_entries(entries: list['Queue.Entry'], offset: int = 0) -> str: content = "" - for i, minipool in enumerate(queue[:limit]): - mp_label = Queue._cached_el_url(minipool, -1) - node_label = Queue._cached_el_url(nodes[i]) - content += f"{start+i+1}. {mp_label} :construction_site: by {node_label}\n" + for i, entry in enumerate(entries): + node_address = rp.call("rocketMegapoolDelegate.getNodeAddress", address=entry.megapool) + node_label = Queue._cached_el_url(node_address) + content += f"{offset+i+1}. {node_label} #{entry.validator_id}\n" + return content + + @staticmethod + def get_standard_queue(limit: int, start: int = 0) -> tuple[int, str]: + """Get the next {limit} validators in the standard queue""" + q_len, entries = Queue._get_queue("deposit.queue.standard", limit, start) + return q_len, Queue.__format_queue_entries(entries, start) + + @staticmethod + def get_express_queue(limit: int, start: int = 0) -> tuple[int, str]: + """Get the next {limit} validators in the express queue""" + q_len, entries = Queue._get_queue("deposit.queue.express", limit, start) + return q_len, Queue.__format_queue_entries(entries, start) + + @staticmethod + def _scan_list(namespace: bytes, start: int, limit: int, block_identifier: BlockIdentifier) -> list['Queue.Entry']: + list_contract = rp.get_contract_by_name("linkedListStorage") + raw_entries, _ = list_contract.functions.scan(namespace, 0, start + limit).call(block_identifier=block_identifier) + return [Queue.Entry(*entry) for entry in raw_entries][start:] + + @staticmethod + def _get_queue(namespace: str, limit: int, start: int = 0) -> tuple[int, list['Queue.Entry']]: + if not rp.is_saturn_deployed() or limit <= 0: + return 0, [] + + list_contract = rp.get_contract_by_name("linkedListStorage") + queue_namespace = bytes(w3.solidity_keccak(["string"], [namespace])) + + start = max(start, 0) + latest_block = w3.eth.get_block_number() + q_len = list_contract.functions.getLength(queue_namespace).call(block_identifier=latest_block) + + if start >= q_len: + return q_len, [] - return q_len, content + return q_len, Queue._scan_list(queue_namespace, start, limit, latest_block) + + @staticmethod + def _get_entries_used_in_interval(start: int, end: int, len_express: int, len_standard: int, express_rate: int) -> tuple[int, int]: + total_entries = end - start + 1 # end is inclusive + num_express = total_entries // (express_rate + 1) + # express queue is used when index % (express_queue_rate + 1) != express_queue_rate + # this checks whether we "cross" an extra express queue slot in the interval + if ((end + 1) % (express_rate + 1)) < (start % (express_rate + 1)): + num_express += 1 + + num_express = min(num_express, len_express) + # if express queue runs out, remaining entries are taken from standard queue + num_standard = min(total_entries - num_express, len_standard) + # if standard queue runs out, remaining entries are taken from express queue + if (num_express + num_standard) < total_entries: + num_express = min(total_entries - num_standard, len_express) + + return num_express, num_standard + + + @staticmethod + def get_combined_queue(limit: int, start: int = 0) -> tuple[int, str]: + """Get the next {limit} validators in the combined queue (express + standard)""" + + latest_block = w3.eth.get_block_number() + express_queue_rate = rp.call("rocketDAOProtocolSettingsDeposit.getExpressQueueRate", block=latest_block) + queue_index = rp.call("rocketDepositPool.getQueueIndex", block=latest_block) + + list_contract = rp.get_contract_by_name("linkedListStorage") + exp_namespace = bytes(w3.solidity_keccak(["string"], ["deposit.queue.express"])) + std_namespace = bytes(w3.solidity_keccak(["string"], ["deposit.queue.standard"])) + + express_queue_length = list_contract.functions.getLength(exp_namespace).call(block_identifier=latest_block) + standard_queue_length = list_contract.functions.getLength(std_namespace).call(block_identifier=latest_block) + q_len = express_queue_length + standard_queue_length + + if start >= q_len: + return q_len, "" + + start_express_queue, start_standard_queue = Queue._get_entries_used_in_interval( + queue_index, + queue_index + start - 1, + express_queue_length, + standard_queue_length, express_queue_rate + ) + limit_express_queue, limit_standard_queue = Queue._get_entries_used_in_interval( + queue_index + start, + queue_index + start + limit - 1, + express_queue_length - start_express_queue, + standard_queue_length - start_standard_queue, + express_queue_rate + ) + + express_entries_rev = Queue._scan_list(exp_namespace, start_express_queue, limit_express_queue, latest_block)[::-1] + standard_entries_rev = Queue._scan_list(std_namespace, start_standard_queue, limit_standard_queue, latest_block)[::-1] + queue_entries = [] + + for i in range(len(express_entries_rev ) + len(standard_entries_rev)): + effective_queue_index = queue_index + start + i + is_express = (effective_queue_index % (express_queue_rate + 1)) != express_queue_rate + if is_express and express_entries_rev: + queue_entries.append(express_entries_rev.pop()) + elif standard_entries_rev: + queue_entries.append(standard_entries_rev.pop()) + else: + queue_entries.append(express_entries_rev.pop()) + + return q_len, Queue.__format_queue_entries(queue_entries, start) @command() - async def queue(self, interaction: Interaction): - """Show the minipool queue""" + async def queue(self, interaction: Interaction, queue_type: Literal["combined", "standard", "express"] = "combined"): + """Show the RP validator queue""" await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) - view = Queue.MinipoolPageView() + view = Queue.ValidatorPageView(queue_type) embed = await view.load() await interaction.followup.send(embed=embed, view=view) diff --git a/rocketwatch/utils/rocketpool.py b/rocketwatch/utils/rocketpool.py index 4ef623e5..2390196e 100644 --- a/rocketwatch/utils/rocketpool.py +++ b/rocketwatch/utils/rocketpool.py @@ -137,7 +137,23 @@ def get_revert_reason(tnx): return "Hidden Error" else: return None - + + def get_string(self, key: str) -> str: + sha3 = w3.solidity_keccak(["string"], [key]) + return self.get_contract_by_name("rocketStorage").functions.getString(sha3).call() + + def get_uint(self, key: str) -> int: + sha3 = w3.solidity_keccak(["string"], [key]) + return self.get_contract_by_name("rocketStorage").functions.getUint(sha3).call() + + def get_protocol_version(self) -> tuple: + version_string = self.get_string("protocol.version") + return tuple(map(int, version_string.split("."))) + + def is_saturn_deployed(self) -> bool: + protocol_version = self.get_protocol_version() + return protocol_version >= (1, 4) + @cached(cache=ABI_CACHE) def get_abi_by_name(self, name): return self.uncached_get_abi_by_name(name) From f94caf9c6642ce853e0962bc5934d1aadd4114a2 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sun, 15 Feb 2026 02:06:34 +0000 Subject: [PATCH 078/279] queue improvements --- rocketwatch/plugins/queue/queue.py | 62 +++++++++++++++++------------- 1 file changed, 35 insertions(+), 27 deletions(-) diff --git a/rocketwatch/plugins/queue/queue.py b/rocketwatch/plugins/queue/queue.py index 2a3c6d25..911c36d4 100644 --- a/rocketwatch/plugins/queue/queue.py +++ b/rocketwatch/plugins/queue/queue.py @@ -62,25 +62,20 @@ def _cached_el_url(address, prefix="") -> str: return el_explorer_url(address, name_fmt=lambda n: f"`{n}`", prefix=prefix) @staticmethod - def __format_queue_entries(entries: list['Queue.Entry'], offset: int = 0) -> str: - content = "" - for i, entry in enumerate(entries): - node_address = rp.call("rocketMegapoolDelegate.getNodeAddress", address=entry.megapool) - node_label = Queue._cached_el_url(node_address) - content += f"{offset+i+1}. {node_label} #{entry.validator_id}\n" - return content + def __format_queue_entry(entry: 'Queue.Entry') -> str: + node_address = rp.call("rocketMegapoolDelegate.getNodeAddress", address=entry.megapool) + node_label = Queue._cached_el_url(node_address) + return f"{node_label} #{entry.validator_id + 1}" @staticmethod def get_standard_queue(limit: int, start: int = 0) -> tuple[int, str]: """Get the next {limit} validators in the standard queue""" - q_len, entries = Queue._get_queue("deposit.queue.standard", limit, start) - return q_len, Queue.__format_queue_entries(entries, start) + return Queue._get_queue("deposit.queue.standard", limit, start) @staticmethod def get_express_queue(limit: int, start: int = 0) -> tuple[int, str]: """Get the next {limit} validators in the express queue""" - q_len, entries = Queue._get_queue("deposit.queue.express", limit, start) - return q_len, Queue.__format_queue_entries(entries, start) + return Queue._get_queue("deposit.queue.express", limit, start) @staticmethod def _scan_list(namespace: bytes, start: int, limit: int, block_identifier: BlockIdentifier) -> list['Queue.Entry']: @@ -89,9 +84,9 @@ def _scan_list(namespace: bytes, start: int, limit: int, block_identifier: Block return [Queue.Entry(*entry) for entry in raw_entries][start:] @staticmethod - def _get_queue(namespace: str, limit: int, start: int = 0) -> tuple[int, list['Queue.Entry']]: + def _get_queue(namespace: str, limit: int, start: int = 0) -> tuple[int, str]: if not rp.is_saturn_deployed() or limit <= 0: - return 0, [] + return 0, "" list_contract = rp.get_contract_by_name("linkedListStorage") queue_namespace = bytes(w3.solidity_keccak(["string"], [namespace])) @@ -101,9 +96,16 @@ def _get_queue(namespace: str, limit: int, start: int = 0) -> tuple[int, list['Q q_len = list_contract.functions.getLength(queue_namespace).call(block_identifier=latest_block) if start >= q_len: - return q_len, [] + return q_len, "" + + queue_entries = Queue._scan_list(queue_namespace, start, limit, latest_block) + + content = "" + for i, entry in enumerate(queue_entries): + entry_str = Queue.__format_queue_entry(entry) + content += f"{start+i+1}. {entry_str}\n" - return q_len, Queue._scan_list(queue_namespace, start, limit, latest_block) + return q_len, content @staticmethod def _get_entries_used_in_interval(start: int, end: int, len_express: int, len_standard: int, express_rate: int) -> tuple[int, int]: @@ -122,7 +124,6 @@ def _get_entries_used_in_interval(start: int, end: int, len_express: int, len_st num_express = min(total_entries - num_standard, len_express) return num_express, num_standard - @staticmethod def get_combined_queue(limit: int, start: int = 0) -> tuple[int, str]: @@ -159,19 +160,26 @@ def get_combined_queue(limit: int, start: int = 0) -> tuple[int, str]: express_entries_rev = Queue._scan_list(exp_namespace, start_express_queue, limit_express_queue, latest_block)[::-1] standard_entries_rev = Queue._scan_list(std_namespace, start_standard_queue, limit_standard_queue, latest_block)[::-1] - queue_entries = [] - - for i in range(len(express_entries_rev ) + len(standard_entries_rev)): + + index_digits = len(str(q_len)) + content = "" + for i in range(len(express_entries_rev) + len(standard_entries_rev)): effective_queue_index = queue_index + start + i - is_express = (effective_queue_index % (express_queue_rate + 1)) != express_queue_rate - if is_express and express_entries_rev: - queue_entries.append(express_entries_rev.pop()) - elif standard_entries_rev: - queue_entries.append(standard_entries_rev.pop()) + is_express = (effective_queue_index % (express_queue_rate + 1)) != express_queue_rate + if (is_express and express_entries_rev) or (not standard_entries_rev): + entry = express_entries_rev.pop() + express_pos = start_express_queue + limit_express_queue - len(express_entries_rev) + queue_pos = f"E{express_pos:0{index_digits}}" else: - queue_entries.append(express_entries_rev.pop()) - - return q_len, Queue.__format_queue_entries(queue_entries, start) + entry = standard_entries_rev.pop() + standard_pos = start_standard_queue + limit_standard_queue - len(standard_entries_rev) + queue_pos = f"S{standard_pos:0{index_digits}}" + + overall_pos = start + i + 1 + entry_str = Queue.__format_queue_entry(entry) + content += f"{overall_pos}. ({queue_pos}) {entry_str}\n" + + return q_len, content @command() async def queue(self, interaction: Interaction, queue_type: Literal["combined", "standard", "express"] = "combined"): From cff197e2c1b8d4ff7bcbf484949d382c2fd51ac2 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sun, 15 Feb 2026 02:30:30 +0000 Subject: [PATCH 079/279] formatting tweaks --- rocketwatch/plugins/queue/queue.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/rocketwatch/plugins/queue/queue.py b/rocketwatch/plugins/queue/queue.py index 911c36d4..8910cd9b 100644 --- a/rocketwatch/plugins/queue/queue.py +++ b/rocketwatch/plugins/queue/queue.py @@ -3,6 +3,7 @@ from typing import Literal, NamedTuple +from functools import cache from cachetools.func import ttl_cache from discord import Interaction from discord.app_commands import command @@ -61,11 +62,16 @@ async def _load_content(self, from_idx: int, to_idx: int) -> tuple[int, str]: def _cached_el_url(address, prefix="") -> str: return el_explorer_url(address, name_fmt=lambda n: f"`{n}`", prefix=prefix) + @staticmethod + @cache + def _megapool_to_node(megapool_address) -> ChecksumAddress: + return rp.call("rocketMegapoolDelegate.getNodeAddress", address=megapool_address) + @staticmethod def __format_queue_entry(entry: 'Queue.Entry') -> str: - node_address = rp.call("rocketMegapoolDelegate.getNodeAddress", address=entry.megapool) + node_address = Queue._megapool_to_node(entry.megapool) node_label = Queue._cached_el_url(node_address) - return f"{node_label} #{entry.validator_id + 1}" + return f"{node_label} #`{entry.validator_id + 1}`" @staticmethod def get_standard_queue(limit: int, start: int = 0) -> tuple[int, str]: @@ -177,7 +183,7 @@ def get_combined_queue(limit: int, start: int = 0) -> tuple[int, str]: overall_pos = start + i + 1 entry_str = Queue.__format_queue_entry(entry) - content += f"{overall_pos}. ({queue_pos}) {entry_str}\n" + content += f"{overall_pos}. (`{queue_pos}`) {entry_str}\n" return q_len, content From 113121671ecbab49831558f67d41b4077b93817e Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sun, 15 Feb 2026 15:59:39 +0000 Subject: [PATCH 080/279] basic Saturn 1 events --- rocketwatch/plugins/events/events.json | 53 ++++++++++++++++++++------ rocketwatch/plugins/events/events.py | 35 ++++++++++++----- rocketwatch/plugins/queue/queue.py | 2 +- rocketwatch/strings/embeds.en.json | 30 ++++++++++++--- rocketwatch/utils/embeds.py | 10 ++--- 5 files changed, 96 insertions(+), 34 deletions(-) diff --git a/rocketwatch/plugins/events/events.json b/rocketwatch/plugins/events/events.json index d80640d2..6dc41bc0 100644 --- a/rocketwatch/plugins/events/events.json +++ b/rocketwatch/plugins/events/events.json @@ -81,6 +81,10 @@ { "event_name": "DepositRecycled", "name": "pool_deposit_recycled_event" + }, + { + "event_name": "QueueExited", + "name": "validator_queue_exited_event" } ] }, @@ -199,7 +203,7 @@ "contract_name": "rocketNodeStaking", "events": [ { - "event_name": "RPLStaked", + "event_name": "RPLStaked(address,address,uint256,uint256)", "name": "rpl_stake_event" }, { @@ -310,6 +314,14 @@ { "event_name": "Withdrawal", "name": "eth_withdraw_event" + }, + { + "event_name": "DepositReceived", + "name": "validator_deposit_event" + }, + { + "event_name": "MultiDepositReceived", + "name": "validator_multi_deposit_event" } ] }, @@ -456,6 +468,19 @@ "name": "cs_rpl_target_ratio_change_event" } ] + }, + { + "contract_name": "RockSolidVault", + "events": [ + { + "event_name": "DepositSync", + "name": "rocksolid_deposit_event" + }, + { + "event_name": "RedeemRequest", + "name": "rocksolid_withdrawal_event" + } + ] } ], "global": [ @@ -488,29 +513,33 @@ } ] }, - { - "contract_name": "rocketDAONodeTrustedUpgrade", + { + "contract_name": "rocketMegapoolDelegate", "events": [ { - "event_name": "ContractUpgraded", - "name": "contract_upgraded" + "event_name": "MegapoolValidatorAssigned", + "name": "megapool_validator_assigned_event" }, { - "event_name": "ContractAdded", - "name": "contract_added" + "event_name": "MegapoolValidatorExiting", + "name": "megapool_validator_exiting_event" + }, + { + "event_name": "MegapoolValidatorExited", + "name": "megapool_validator_exited_event" } ] }, { - "contract_name": "RockSolidVault", + "contract_name": "rocketDAONodeTrustedUpgrade", "events": [ { - "event_name": "DepositSync", - "name": "rocksolid_deposit_event" + "event_name": "ContractUpgraded", + "name": "contract_upgraded" }, { - "event_name": "RedeemRequest", - "name": "rocksolid_withdrawal_event" + "event_name": "ContractAdded", + "name": "contract_added" } ] } diff --git a/rocketwatch/plugins/events/events.py b/rocketwatch/plugins/events/events.py index 6e9fe870..ecd67c2c 100644 --- a/rocketwatch/plugins/events/events.py +++ b/rocketwatch/plugins/events/events.py @@ -62,6 +62,7 @@ def _parse_event_config(self) -> tuple[list[PartialFilter], dict, dict]: for event in group["events"]: event_name = event["event_name"] try: + log.info(f"Adding filter for {contract_name}.{event_name}") topic = contract.events[event_name].build_filter().topics[0] except ABIEventFunctionNotFound as e: log.exception(e) @@ -383,9 +384,20 @@ def get_event_name(_event: LogReceipt | EventData) -> tuple[str, str]: def handle_global_event(self, event_name: str, event: aDict) -> Optional[Embed]: receipt = w3.eth.get_transaction_receipt(event.transactionHash) + + def is_minipool(_address: ChecksumAddress) -> bool: + return rp.call("rocketMinipoolManager.getMinipoolExists", _address) + + def is_megapool(_address: ChecksumAddress) -> bool: + sha3 = w3.solidity_keccak(["string", "address"], ["megapool.exists", _address]) + return rp.get_contract_by_name("rocketStorage").functions.getBool(sha3).call() + + is_minipool_event = is_minipool(event.address) or is_minipool(receipt.to) + is_megapool_event = is_megapool(event.address) or is_megapool(receipt.to) + if not any([ - rp.call("rocketMinipoolManager.getMinipoolExists", receipt.to), - rp.call("rocketMinipoolManager.getMinipoolExists", event.address), + is_minipool_event, + is_megapool_event, rp.get_name_by_address(receipt.to) not in [None, "multicall3"], rp.get_name_by_address(event.address) ]): @@ -423,9 +435,13 @@ def handle_global_event(self, event_name: str, event: aDict) -> Optional[Embed]: if (n := rp.get_name_by_address(receipt["to"])) is None or not n.startswith("rocket"): event.args["from"] = receipt["to"] event.args["caller"] = receipt["from"] - - # and add the minipool address, which is the origin of the event - event.args.minipool = event.address + + if is_minipool_event: + # and add the minipool address, which is the origin of the event + event.args.minipool = event.address + if is_megapool_event: + event.args.megapool = event.address + event.args.node = rp.call("rocketMegapoolDelegate.getNodeAddress", address=event.address) return self.handle_event(event_name, event) @@ -653,10 +669,7 @@ def share_repr(percentage: float) -> str: args.amount = solidity.to_float(args.amount) args.ethAmount = args.amount * rpl_ratio elif event_name in ["node_merkle_rewards_claimed"]: - rpl_ratio = solidity.to_float(rp.call("rocketNetworkPrices.getRPLPrice")) - args.amountRPL = sum(solidity.to_float(r) for r in args.amountRPL) - args.amountETH = sum(solidity.to_float(e) for e in args.amountETH) - args.ethAmount = args.amountRPL * rpl_ratio + return None # TODO elif "transfer_event" in event_name: token_prefix = event_name.split("_", 1)[0] args.amount = args.value / 10**18 @@ -670,6 +683,10 @@ def share_repr(percentage: float) -> str: # filter small burns < 1 rETH if solidity.to_float(args.amount) < 1: return None + elif event_name == "validator_multi_deposit_event": + args.amount = args.totalBond + if args.numberOfValidators == 1: + event_name = "validator_deposit_event" # reject if the amount is not major if any([event_name == "reth_transfer_event" and args.amount < 1000, diff --git a/rocketwatch/plugins/queue/queue.py b/rocketwatch/plugins/queue/queue.py index 8910cd9b..5c014db8 100644 --- a/rocketwatch/plugins/queue/queue.py +++ b/rocketwatch/plugins/queue/queue.py @@ -71,7 +71,7 @@ def _megapool_to_node(megapool_address) -> ChecksumAddress: def __format_queue_entry(entry: 'Queue.Entry') -> str: node_address = Queue._megapool_to_node(entry.megapool) node_label = Queue._cached_el_url(node_address) - return f"{node_label} #`{entry.validator_id + 1}`" + return f"{node_label} #`{entry.validator_id}`" @staticmethod def get_standard_queue(limit: int, start: int = 0) -> tuple[int, str]: diff --git a/rocketwatch/strings/embeds.en.json b/rocketwatch/strings/embeds.en.json index ae082c90..9fc6957f 100644 --- a/rocketwatch/strings/embeds.en.json +++ b/rocketwatch/strings/embeds.en.json @@ -366,12 +366,10 @@ "description": "Minipool %{minipoolAddress} has had its Penalty increased to %{penalty_perc}%!" }, "node_smoothing_pool_joined": { - "title": ":cup_with_straw: Node Operator Joined Smoothing Pool", - "description": "Node operator %{node} joined the smoothing pool with their %{minipoolCount} minipools!" + "description_small": "Node operator %{node} joined the smoothing pool with their %{minipoolCount} minipools!" }, "node_smoothing_pool_left": { - "title": ":leaves: Node Operator Left Smoothing Pool", - "description": "Node operator %{node} has left the smoothing pool with their %{minipoolCount} minipools!" + "description_small": "Node operator %{node} has left the smoothing pool with their %{minipoolCount} minipools!" }, "auction_lot_create_event": { "title": ":scales: Lot Created", @@ -430,13 +428,33 @@ "description": "Migration of solo validator %{pubkey} to minipool %{minipool} with a bond of **%{bondAmount} ETH** was initiated!" }, "minipool_failed_deposit": { - "title": ":fire: Failed Minipool Deposit", - "description": ":fire_engine: %{node} burned **%{burnedValue} ETH** trying to create a minipool! :fire_engine:" + "title": ":fire: Failed Validator Deposit", + "description": ":fire_engine: %{node} burned **%{burnedValue} ETH** trying to create a validator! :fire_engine:" }, "minipool_slash_event": { "title": ":rotating_light: Minipool Slashed", "description": "Minipool %{minipool} has been slashed by %{slasher}" }, + "validator_deposit_event": { + "description_small": ":construction_site: %{from} created a validator with a **%{amount} ETH** bond!" + }, + "validator_multi_deposit_event": { + "title": ":construction_site: Multi Validator Deposit", + "description": "**%{numberOfValidators} validators** created with a total bond of **%{amount} ETH**!", + "description_small": ":construction_site: %{from} created **%{numberOfValidators} validators** with a **%{amount} ETH** bond!" + }, + "megapool_validator_assigned_event": { + "description_small": ":handshake: Validator %{validatorId} of node %{node} has been assigned funds from the deposit pool!" + }, + "megapool_validator_exiting_event": { + "description_small": ":octagonal_sign: Validator %{validatorId} of node %{node} has started exiting!" + }, + "megapool_validator_exited_event": { + "description_small": ":leaves: Validator %{validatorId} of node %{node} has exited!" + }, + "validator_queue_exited_event": { + "description_small": ":leaves: %{nodeAddress} has removed a validator from the queue!" + }, "otc_swap_event": { "title": ":currency_exchange: OTC Swap", "description": "%{seller_clean} exchanged %{sellAmount} %{sellToken} for %{buyAmount} %{buyToken} with %{buyer_clean}" diff --git a/rocketwatch/utils/embeds.py b/rocketwatch/utils/embeds.py index 708d4ff6..0d9d175c 100644 --- a/rocketwatch/utils/embeds.py +++ b/rocketwatch/utils/embeds.py @@ -239,17 +239,13 @@ def assemble(args) -> Embed: # do this here before the amounts are converted to a string amount = args.get("amount") or args.get("ethAmount", 0) # raise Exception(str((args, args.assets, args.event_name))) - if any(( - ("pool_deposit" in args.event_name and amount >= 1000), - (args.event_name == "eth_deposit_event" and amount >= 500), - (args.event_name == "cs_deposit_eth_event" and args.assets >= 500) - )): + if ("pool_deposit" in args.event_name) and (amount >= 1000): e.set_image(url="https://media.giphy.com/media/VIX2atZr8dCKk5jF6L/giphy.gif") elif any(kw in args.event_name for kw in ["_scrub_event", "_dissolve_event", "_slash_event", "finality_delay_event"]): e.set_image(url="https://c.tenor.com/p3hWK5YRo6IAAAAC/this-is-fine-dog.gif") elif "_proposal_smoothie_" in args.event_name: e.set_image(url="https://cdn.discordapp.com/attachments/812745786638336021/1106983677130461214/butta-commie-filter.png") - elif "sdao_member_kick_multi" in args.event_name: + elif "sdao_member_kick" in args.event_name: e.set_image(url="https://media1.tenor.com/m/Xuv3IEoH1a4AAAAC/youre-fired-donald-trump.gif") match args.event_name: @@ -283,6 +279,8 @@ def assemble(args) -> Embed: use_large = args["assets"] >= 50 case "rocksolid_withdrawal_event": use_large = args["shares"] >= 50 + case "validator_multi_deposit_event": + use_large = args["numberOfValidators"] >= 5 case _: use_large = (amount >= 100) From 862caf60fca9e9777ab7f729c7455cfbfe94b687 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sun, 15 Feb 2026 20:19:46 +0000 Subject: [PATCH 081/279] dissolve and penalty events --- rocketwatch/plugins/events/events.json | 8 ++++++++ rocketwatch/plugins/events/events.py | 7 +++++-- rocketwatch/strings/embeds.en.json | 12 ++++++++++-- rocketwatch/utils/embeds.py | 2 ++ 4 files changed, 25 insertions(+), 4 deletions(-) diff --git a/rocketwatch/plugins/events/events.json b/rocketwatch/plugins/events/events.json index 6dc41bc0..9b41f974 100644 --- a/rocketwatch/plugins/events/events.json +++ b/rocketwatch/plugins/events/events.json @@ -527,6 +527,14 @@ { "event_name": "MegapoolValidatorExited", "name": "megapool_validator_exited_event" + }, + { + "event_name": "MegapoolValidatorDissolved", + "name": "megapool_validator_dissolve_event" + }, + { + "event_name": "MegapoolPenaltyApplied", + "name": "megapool_penalty_event" } ] }, diff --git a/rocketwatch/plugins/events/events.py b/rocketwatch/plugins/events/events.py index ecd67c2c..cbbf058c 100644 --- a/rocketwatch/plugins/events/events.py +++ b/rocketwatch/plugins/events/events.py @@ -732,8 +732,11 @@ def share_repr(percentage: float) -> str: else: event_name = "odao_member_challenge_rejected_event" if "node_smoothing_pool_state_changed" in event_name: - # geet minipool count - args.minipoolCount = rp.call("rocketMinipoolManager.getNodeMinipoolCount", args.node) + validator_count = rp.call("rocketMinipoolManager.getNodeMinipoolCount", args.node) + megapool_address = rp.call("rocketNodeManager.getMegapoolAddress", args.node) + if megapool_address != "0x0000000000000000000000000000000000000000": + validator_count += rp.call("rocketMegapoolDelegate.getActiveValidatorCount", address=megapool_address) + args.validatorCount = validator_count if args.state: event_name = "node_smoothing_pool_joined" else: diff --git a/rocketwatch/strings/embeds.en.json b/rocketwatch/strings/embeds.en.json index 9fc6957f..e24ada55 100644 --- a/rocketwatch/strings/embeds.en.json +++ b/rocketwatch/strings/embeds.en.json @@ -366,10 +366,10 @@ "description": "Minipool %{minipoolAddress} has had its Penalty increased to %{penalty_perc}%!" }, "node_smoothing_pool_joined": { - "description_small": "Node operator %{node} joined the smoothing pool with their %{minipoolCount} minipools!" + "description_small": ":cup_with_straw: %{node} joined the smoothing pool with their %{validatorCount} validators!" }, "node_smoothing_pool_left": { - "description_small": "Node operator %{node} has left the smoothing pool with their %{minipoolCount} minipools!" + "description_small": ":cup_with_straw: %{node} has left the smoothing pool with their %{validatorCount} validators!" }, "auction_lot_create_event": { "title": ":scales: Lot Created", @@ -452,6 +452,14 @@ "megapool_validator_exited_event": { "description_small": ":leaves: Validator %{validatorId} of node %{node} has exited!" }, + "megapool_validator_dissolve_event": { + "title": ":rotating_light: Validator Dissolved", + "description": ":leaves: Validator %{validatorId} of node %{node} has been dissolved!" + }, + "megapool_penalty_event": { + "title": ":police_car: Megapool Penalty Applied", + "description": "Node %{node} has been penalized for **%{amount} ETH**!" + }, "validator_queue_exited_event": { "description_small": ":leaves: %{nodeAddress} has removed a validator from the queue!" }, diff --git a/rocketwatch/utils/embeds.py b/rocketwatch/utils/embeds.py index 0d9d175c..6ce424c5 100644 --- a/rocketwatch/utils/embeds.py +++ b/rocketwatch/utils/embeds.py @@ -243,6 +243,8 @@ def assemble(args) -> Embed: e.set_image(url="https://media.giphy.com/media/VIX2atZr8dCKk5jF6L/giphy.gif") elif any(kw in args.event_name for kw in ["_scrub_event", "_dissolve_event", "_slash_event", "finality_delay_event"]): e.set_image(url="https://c.tenor.com/p3hWK5YRo6IAAAAC/this-is-fine-dog.gif") + elif "_penalty" in args.event_name: + e.set_image(url="https://i.giphy.com/jmSjPi6soIoQCFwaXJ.webp") elif "_proposal_smoothie_" in args.event_name: e.set_image(url="https://cdn.discordapp.com/attachments/812745786638336021/1106983677130461214/butta-commie-filter.png") elif "sdao_member_kick" in args.event_name: From 6b6612ddd4555207d55ec17636ac0def2738b098 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Mon, 16 Feb 2026 08:48:21 +0000 Subject: [PATCH 082/279] handle missing global contracts --- rocketwatch/contracts/rocketpool | 2 +- rocketwatch/plugins/events/events.py | 7 ++++++- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/rocketwatch/contracts/rocketpool b/rocketwatch/contracts/rocketpool index a08da963..fb7d9c42 160000 --- a/rocketwatch/contracts/rocketpool +++ b/rocketwatch/contracts/rocketpool @@ -1 +1 @@ -Subproject commit a08da9639b8a1619c06f6ec314e36b4765b9452c +Subproject commit fb7d9c428dc3dddc3fbd3e634e3cb365655df89e diff --git a/rocketwatch/plugins/events/events.py b/rocketwatch/plugins/events/events.py index cbbf058c..fa78dc09 100644 --- a/rocketwatch/plugins/events/events.py +++ b/rocketwatch/plugins/events/events.py @@ -86,7 +86,12 @@ def build_direct_filter(_from: BlockNumber, _to: BlockNumber | Literal["latest"] # generate filters for global events for group in config["global"]: - contract = rp.assemble_contract(name=group["contract_name"]) + try: + contract = rp.assemble_contract(name=group["contract_name"]) + except Exception as e: + log.warning(f"Failed to get contract {group['contract_name']}: {e}") + continue + for event in group["events"]: event_map[event["event_name"]] = event["name"] def super_builder(_contract, _event) -> PartialFilter: From 350a930a5d38603cfc39195d2a0e7c53ba686c1a Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Mon, 16 Feb 2026 23:23:13 +0000 Subject: [PATCH 083/279] rename delegate_stats --- .../minipools_upkeep_task/minipools_upkeep_task.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/rocketwatch/plugins/minipools_upkeep_task/minipools_upkeep_task.py b/rocketwatch/plugins/minipools_upkeep_task/minipools_upkeep_task.py index b16a2ef6..5e29a938 100644 --- a/rocketwatch/plugins/minipools_upkeep_task/minipools_upkeep_task.py +++ b/rocketwatch/plugins/minipools_upkeep_task/minipools_upkeep_task.py @@ -92,7 +92,8 @@ async def upkeep_minipools(self): logging.info("Updated minipool states") @command() - async def delegate_stats(self, interaction: Interaction): + async def minipool_delegates(self, interaction: Interaction): + """Show stats for minipool delegate adoption""" await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) # only consider active minipools minipool_filter = {"beacon.status": {"$in": ["pending_initialized", "pending_queued", "active_ongoing"]}} @@ -109,8 +110,8 @@ async def delegate_stats(self, interaction: Interaction): {"$sort": {"count": -1}}, ])).to_list() e = Embed() - e.title = "Delegate Stats (Active Minipools)" - desc = "**Effective Delegate Distribution of Minipools:**\n" + e.title = "Minipool Delegate Stats" + desc = "**Effective Delegate Distribution:**\n" c_sum = sum(d['count'] for d in distribution_stats) s = "\u00A0" * 4 # latest delegate acording to rp @@ -123,7 +124,7 @@ async def delegate_stats(self, interaction: Interaction): name += " (Latest)" desc += f"{s}{el_explorer_url(a, name)}: {d['count']:,} ({d['count'] / c_sum * 100:.2f}%)\n" desc += "\n" - desc += "**Minipools configured to always use latest delegate:**\n" + desc += "**Use Latest Delegate:**\n" c_sum = sum(d['count'] for d in use_latest_delegate_stats) for d in use_latest_delegate_stats: # true = yes, false = no From 095f40ebe9698eb348d52c749612c06bd923d7b7 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Mon, 16 Feb 2026 23:23:22 +0000 Subject: [PATCH 084/279] guard megapool check --- rocketwatch/plugins/events/events.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/rocketwatch/plugins/events/events.py b/rocketwatch/plugins/events/events.py index fa78dc09..b9c5c97f 100644 --- a/rocketwatch/plugins/events/events.py +++ b/rocketwatch/plugins/events/events.py @@ -738,9 +738,10 @@ def share_repr(percentage: float) -> str: event_name = "odao_member_challenge_rejected_event" if "node_smoothing_pool_state_changed" in event_name: validator_count = rp.call("rocketMinipoolManager.getNodeMinipoolCount", args.node) - megapool_address = rp.call("rocketNodeManager.getMegapoolAddress", args.node) - if megapool_address != "0x0000000000000000000000000000000000000000": - validator_count += rp.call("rocketMegapoolDelegate.getActiveValidatorCount", address=megapool_address) + if rp.is_saturn_deployed(): + megapool_address = rp.call("rocketNodeManager.getMegapoolAddress", args.node) + if megapool_address != "0x0000000000000000000000000000000000000000": + validator_count += rp.call("rocketMegapoolDelegate.getActiveValidatorCount", address=megapool_address) args.validatorCount = validator_count if args.state: event_name = "node_smoothing_pool_joined" From 224fd8ffaff2f02d5949a72e93a178fa4056bf5d Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Mon, 16 Feb 2026 23:32:16 +0000 Subject: [PATCH 085/279] fix combined queue logic --- rocketwatch/plugins/queue/queue.py | 20 +++++++++++++------- 1 file changed, 13 insertions(+), 7 deletions(-) diff --git a/rocketwatch/plugins/queue/queue.py b/rocketwatch/plugins/queue/queue.py index 5c014db8..a7afc209 100644 --- a/rocketwatch/plugins/queue/queue.py +++ b/rocketwatch/plugins/queue/queue.py @@ -115,19 +115,21 @@ def _get_queue(namespace: str, limit: int, start: int = 0) -> tuple[int, str]: @staticmethod def _get_entries_used_in_interval(start: int, end: int, len_express: int, len_standard: int, express_rate: int) -> tuple[int, int]: + log.debug(f"Calculating entries used in interval [{start}, {end}] with express_rate {express_rate} and queue lengths {len_express} (express) and {len_standard} (standard)") + total_entries = end - start + 1 # end is inclusive - num_express = total_entries // (express_rate + 1) - # express queue is used when index % (express_queue_rate + 1) != express_queue_rate + num_standard = total_entries // (express_rate + 1) + # standard queue is used when index % (express_queue_rate + 1) == express_queue_rate # this checks whether we "cross" an extra express queue slot in the interval if ((end + 1) % (express_rate + 1)) < (start % (express_rate + 1)): - num_express += 1 + num_standard += 1 - num_express = min(num_express, len_express) - # if express queue runs out, remaining entries are taken from standard queue - num_standard = min(total_entries - num_express, len_standard) + num_standard = min(num_standard, len_standard) # if standard queue runs out, remaining entries are taken from express queue + num_express = min(total_entries - num_standard, len_express) + # if express queue runs out, remaining entries are taken from standard queue if (num_express + num_standard) < total_entries: - num_express = min(total_entries - num_standard, len_express) + num_standard = min(total_entries - num_express, len_standard) return num_express, num_standard @@ -156,6 +158,8 @@ def get_combined_queue(limit: int, start: int = 0) -> tuple[int, str]: express_queue_length, standard_queue_length, express_queue_rate ) + log.debug(f"{start_express_queue = }") + log.debug(f"{start_standard_queue = }") limit_express_queue, limit_standard_queue = Queue._get_entries_used_in_interval( queue_index + start, queue_index + start + limit - 1, @@ -163,6 +167,8 @@ def get_combined_queue(limit: int, start: int = 0) -> tuple[int, str]: standard_queue_length - start_standard_queue, express_queue_rate ) + log.debug(f"{limit_express_queue = }") + log.debug(f"{limit_standard_queue = }") express_entries_rev = Queue._scan_list(exp_namespace, start_express_queue, limit_express_queue, latest_block)[::-1] standard_entries_rev = Queue._scan_list(std_namespace, start_standard_queue, limit_standard_queue, latest_block)[::-1] From 67facbda60b1b60976d075d15375243fc444c1d0 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Tue, 17 Feb 2026 00:00:14 +0000 Subject: [PATCH 086/279] force https for submodules --- .github/workflows/docker-ci.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/docker-ci.yml b/.github/workflows/docker-ci.yml index 3a700033..9c0b90b3 100644 --- a/.github/workflows/docker-ci.yml +++ b/.github/workflows/docker-ci.yml @@ -9,6 +9,10 @@ jobs: docker: runs-on: ubuntu-latest steps: + - + name: Force HTTPS for Git submodules + run: | + git config --global url."https://github.com/".insteadOf git@github.com: - name: Set up Docker Buildx uses: docker/setup-buildx-action@v2 From 6f4ce5fffa5f68bd5435887a2d722218a64ba88d Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Tue, 17 Feb 2026 00:03:46 +0000 Subject: [PATCH 087/279] custom checkout --- .github/workflows/docker-ci.yml | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/.github/workflows/docker-ci.yml b/.github/workflows/docker-ci.yml index 9c0b90b3..2d791496 100644 --- a/.github/workflows/docker-ci.yml +++ b/.github/workflows/docker-ci.yml @@ -9,21 +9,21 @@ jobs: docker: runs-on: ubuntu-latest steps: - - - name: Force HTTPS for Git submodules + - name: Force HTTPS for Git submodules run: | git config --global url."https://github.com/".insteadOf git@github.com: - - - name: Set up Docker Buildx + - name: Checkout + uses: actions/checkout@v4 + with: + submodules: recursive + - name: Set up Docker Buildx uses: docker/setup-buildx-action@v2 - - - name: Login to DockerHub + - name: Login to DockerHub uses: docker/login-action@v2 with: username: haloooloolo password: ${{ secrets.DOCKERHUB_TOKEN }} - - - name: Build and push + - name: Build and push uses: docker/build-push-action@v4 with: context: "{{defaultContext}}:rocketwatch" From 124a37b4918b2598621b64bf8fd5570697fa3f1c Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Tue, 17 Feb 2026 00:05:12 +0000 Subject: [PATCH 088/279] fix context --- .github/workflows/docker-ci.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/docker-ci.yml b/.github/workflows/docker-ci.yml index 2d791496..bd004837 100644 --- a/.github/workflows/docker-ci.yml +++ b/.github/workflows/docker-ci.yml @@ -12,6 +12,7 @@ jobs: - name: Force HTTPS for Git submodules run: | git config --global url."https://github.com/".insteadOf git@github.com: + git config --global url."https://github.com/".insteadOf ssh://git@github.com/ - name: Checkout uses: actions/checkout@v4 with: @@ -26,7 +27,7 @@ jobs: - name: Build and push uses: docker/build-push-action@v4 with: - context: "{{defaultContext}}:rocketwatch" + context: ./rocketwatch push: true tags: haloooloolo/rocketwatch:latest no-cache: true From b7adafe3843b56f4a16eb1edc779f4807715dcac Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Tue, 17 Feb 2026 00:30:13 +0000 Subject: [PATCH 089/279] add cuteness --- rocketwatch/plugins/deposit_pool/deposit_pool.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/rocketwatch/plugins/deposit_pool/deposit_pool.py b/rocketwatch/plugins/deposit_pool/deposit_pool.py index 941344c1..b5feef32 100644 --- a/rocketwatch/plugins/deposit_pool/deposit_pool.py +++ b/rocketwatch/plugins/deposit_pool/deposit_pool.py @@ -53,12 +53,12 @@ def get_deposit_pool_stats() -> Embed: if (total_queue_length) > 0: embed.description = "" if exp_queue_length > 0: - embed.description += f"**Express Queue ({exp_queue_length})**\n" + embed.description += f"🐇 **Express Queue ({exp_queue_length})**\n" embed.description += exp_queue_content if exp_queue_length > display_limit: embed.description += f"{display_limit + 1}. `...`\n" if std_queue_length > 0: - embed.description += f"**Standard Queue ({std_queue_length})**\n" + embed.description += f"🐢 **Standard Queue ({std_queue_length})**\n" embed.description += std_queue_content if std_queue_length > display_limit: embed.description += f"{display_limit + 1}. `...`\n" From be3171c6fda8491a26d28a1f7d086d087aa87818 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Tue, 17 Feb 2026 14:06:50 +0000 Subject: [PATCH 090/279] update queue formatting --- rocketwatch/plugins/queue/queue.py | 29 +++++++++++++++-------------- 1 file changed, 15 insertions(+), 14 deletions(-) diff --git a/rocketwatch/plugins/queue/queue.py b/rocketwatch/plugins/queue/queue.py index a7afc209..111c44f0 100644 --- a/rocketwatch/plugins/queue/queue.py +++ b/rocketwatch/plugins/queue/queue.py @@ -6,7 +6,7 @@ from functools import cache from cachetools.func import ttl_cache from discord import Interaction -from discord.app_commands import command +from discord.app_commands import command, describe from discord.ext.commands import Cog from eth_typing import ChecksumAddress, BlockIdentifier @@ -35,13 +35,13 @@ def __init__(self, bot: RocketWatch): self.bot = bot class ValidatorPageView(PageView): - def __init__(self, queue_type: Literal["combined", "standard", "express"]): + def __init__(self, lane: Literal["combined", "standard", "express"]): super().__init__(page_size=15) - if queue_type == "standard": - self.queue_name = "Validator Standard Queue" + if lane == "standard": + self.queue_name = "🐢 Validator Standard Queue" self.content_loader = Queue.get_standard_queue - elif queue_type == "express": - self.queue_name = "Validator Express Queue" + elif lane == "express": + self.queue_name = "🐇 Validator Express Queue" self.content_loader = Queue.get_express_queue else: self.queue_name = "Validator Queue" @@ -173,31 +173,32 @@ def get_combined_queue(limit: int, start: int = 0) -> tuple[int, str]: express_entries_rev = Queue._scan_list(exp_namespace, start_express_queue, limit_express_queue, latest_block)[::-1] standard_entries_rev = Queue._scan_list(std_namespace, start_standard_queue, limit_standard_queue, latest_block)[::-1] - index_digits = len(str(q_len)) + index_digits = len(str(max(standard_queue_length, express_queue_length))) content = "" for i in range(len(express_entries_rev) + len(standard_entries_rev)): effective_queue_index = queue_index + start + i is_express = (effective_queue_index % (express_queue_rate + 1)) != express_queue_rate if (is_express and express_entries_rev) or (not standard_entries_rev): entry = express_entries_rev.pop() - express_pos = start_express_queue + limit_express_queue - len(express_entries_rev) - queue_pos = f"E{express_pos:0{index_digits}}" + # express_pos = start_express_queue + limit_express_queue - len(express_entries_rev) + lane_pos = "🐇" else: entry = standard_entries_rev.pop() - standard_pos = start_standard_queue + limit_standard_queue - len(standard_entries_rev) - queue_pos = f"S{standard_pos:0{index_digits}}" + # standard_pos = start_standard_queue + limit_standard_queue - len(standard_entries_rev) + lane_pos = "🐢" overall_pos = start + i + 1 entry_str = Queue.__format_queue_entry(entry) - content += f"{overall_pos}. (`{queue_pos}`) {entry_str}\n" + content += f"{overall_pos}. {lane_pos} {entry_str}\n" return q_len, content @command() - async def queue(self, interaction: Interaction, queue_type: Literal["combined", "standard", "express"] = "combined"): + @describe(lane="type of queue to display") + async def queue(self, interaction: Interaction, lane: Literal["combined", "standard", "express"] = "combined"): """Show the RP validator queue""" await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) - view = Queue.ValidatorPageView(queue_type) + view = Queue.ValidatorPageView(lane) embed = await view.load() await interaction.followup.send(embed=embed, view=view) From c7d3c5bbb63a4329e8d1d9aa082864448c4264a0 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Wed, 18 Feb 2026 01:00:34 +0000 Subject: [PATCH 091/279] adjust rocketNodeStaking calls for Saturn --- rocketwatch/plugins/milestones/milestones.json | 2 +- rocketwatch/plugins/rpl/rpl.py | 2 +- rocketwatch/plugins/tvl/tvl.py | 2 +- rocketwatch/utils/sea_creatures.py | 4 ++-- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/rocketwatch/plugins/milestones/milestones.json b/rocketwatch/plugins/milestones/milestones.json index 0e26b32a..f605cf77 100644 --- a/rocketwatch/plugins/milestones/milestones.json +++ b/rocketwatch/plugins/milestones/milestones.json @@ -3,7 +3,7 @@ "id": "milestone_rpl_stake", "function": "call", "args": [ - "rocketNodeStaking.getTotalRPLStake" + "rocketNodeStaking.getTotalStakedRPL" ], "formatter": "to_float", "min": 10000, diff --git a/rocketwatch/plugins/rpl/rpl.py b/rocketwatch/plugins/rpl/rpl.py index 4b9088b5..0f026a3e 100644 --- a/rocketwatch/plugins/rpl/rpl.py +++ b/rocketwatch/plugins/rpl/rpl.py @@ -109,7 +109,7 @@ async def effective_rpl_staked(self, ctx: Context): await ctx.defer(ephemeral=is_hidden(ctx)) e = Embed() # get total RPL staked - total_rpl_staked = solidity.to_float(rp.call("rocketNodeStaking.getTotalRPLStake")) + total_rpl_staked = solidity.to_float(rp.call("rocketNodeStaking.getTotalStakedRPL")) e.add_field(name="Total RPL Staked:", value=f"{humanize.intcomma(total_rpl_staked, 2)} RPL", inline=False) # get effective RPL staked effective_rpl_stake = await (await self.db.node_operators_new.aggregate([ diff --git a/rocketwatch/plugins/tvl/tvl.py b/rocketwatch/plugins/tvl/tvl.py index faf6b1b3..ae8596db 100644 --- a/rocketwatch/plugins/tvl/tvl.py +++ b/rocketwatch/plugins/tvl/tvl.py @@ -337,7 +337,7 @@ async def tvl(self, interaction: Interaction, show_all: bool = False): # Staked RPL: This is all ETH that has been staked by Node Operators. data["Total RPL Locked"]["Staked RPL"]["Node Operators"]["_val"] = solidity.to_float( - rp.call("rocketNodeStaking.getTotalRPLStake")) + rp.call("rocketNodeStaking.getTotalStakedRPL")) # oDAO bonded RPL: RPL oDAO Members have to lock up to join it. This RPL can be slashed if they misbehave. data["Total RPL Locked"]["Staked RPL"]["oDAO Bond"]["_val"] = solidity.to_float( diff --git a/rocketwatch/utils/sea_creatures.py b/rocketwatch/utils/sea_creatures.py index b242aa31..9fb8f081 100644 --- a/rocketwatch/utils/sea_creatures.py +++ b/rocketwatch/utils/sea_creatures.py @@ -71,9 +71,9 @@ def get_holding_for_address(address): if "RETH" in contract_name: eth_balance += solidity.to_float(token.results[0]) * price_cache["reth_price"] # add eth they provided for minipools - eth_balance += solidity.to_int(rp.call("rocketNodeStaking.getNodeETHProvided", address)) + eth_balance += solidity.to_float(rp.call("rocketNodeStaking.getNodeETHBonded", address)) # add their staked RPL - staked_rpl = solidity.to_int(rp.call("rocketNodeStaking.getNodeRPLStake", address)) + staked_rpl = solidity.to_float(rp.call("rocketNodeStaking.getNodeStakedRPL", address)) eth_balance += staked_rpl * price_cache["rpl_price"] return eth_balance From ee6740a3a7ee124904eaec0d78b4d405cf63d2fa Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Wed, 18 Feb 2026 01:34:26 +0000 Subject: [PATCH 092/279] more rocketNodeStaking updates --- rocketwatch/plugins/collateral/collateral.py | 2 +- rocketwatch/plugins/node_task/node_task.py | 2 +- rocketwatch/plugins/rewards/rewards.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/rocketwatch/plugins/collateral/collateral.py b/rocketwatch/plugins/collateral/collateral.py index 852ed3ab..7f42b437 100644 --- a/rocketwatch/plugins/collateral/collateral.py +++ b/rocketwatch/plugins/collateral/collateral.py @@ -58,7 +58,7 @@ def get_node_minipools_and_collateral() -> dict[ChecksumAddress, dict[str, int]] minipool_manager.functions.getNodeStakingMinipoolCountBySize(node, 8 * 10**18) for node in node_batch ).results] rpl_stakes += [r.results[0] for r in rp.multicall.aggregate( - node_staking.functions.getNodeRPLStake(node) for node in node_batch + node_staking.functions.getNodeStakedRPL(node) for node in node_batch ).results] return { diff --git a/rocketwatch/plugins/node_task/node_task.py b/rocketwatch/plugins/node_task/node_task.py index 735681de..7490ca2c 100644 --- a/rocketwatch/plugins/node_task/node_task.py +++ b/rocketwatch/plugins/node_task/node_task.py @@ -439,7 +439,7 @@ async def update_dynamic_node_operator_metadata(self): [((n["address"], "smoothing_pool_registration_state"), None)]), lambda n: (nm.address, [rp.seth_sig(nm.abi, "getAverageNodeFee"), n["address"]], [((n["address"], "average_node_fee"), safe_to_float)]), - lambda n: (ns.address, [rp.seth_sig(ns.abi, "getNodeRPLStake"), n["address"]], + lambda n: (ns.address, [rp.seth_sig(ns.abi, "getNodeStakedRPL"), n["address"]], [((n["address"], "rpl_stake"), safe_to_float)]), lambda n: (ns.address, [rp.seth_sig(ns.abi, "getNodeEffectiveRPLStake"), n["address"]], [((n["address"], "effective_rpl_stake"), safe_to_float)]), diff --git a/rocketwatch/plugins/rewards/rewards.py b/rocketwatch/plugins/rewards/rewards.py index 33bbbb65..63135a53 100644 --- a/rocketwatch/plugins/rewards/rewards.py +++ b/rocketwatch/plugins/rewards/rewards.py @@ -154,7 +154,7 @@ async def simulate_rewards( rpl_min: float = solidity.to_float(rp.call("rocketDAOProtocolSettingsNode.getMinimumPerMinipoolStake", block=data_block)) rpl_ratio = solidity.to_float(rp.call("rocketNetworkPrices.getRPLPrice", block=data_block)) actual_borrowed_eth = solidity.to_float(rp.call("rocketNodeStaking.getNodeETHMatched", address, block=data_block)) - actual_rpl_stake = solidity.to_float(rp.call("rocketNodeStaking.getNodeRPLStake", address, block=data_block)) + actual_rpl_stake = solidity.to_float(rp.call("rocketNodeStaking.getNodeStakedRPL", address, block=data_block)) inflation_rate: int = rp.call("rocketTokenRPL.getInflationIntervalRate", block=data_block) inflation_interval: int = rp.call("rocketTokenRPL.getInflationIntervalTime", block=data_block) From 70e7c04453fdf808035e264c252266111693ae56 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Wed, 18 Feb 2026 02:52:15 +0000 Subject: [PATCH 093/279] remove command notice --- rocketwatch/plugins/metrics/metrics.py | 12 +----------- 1 file changed, 1 insertion(+), 11 deletions(-) diff --git a/rocketwatch/plugins/metrics/metrics.py b/rocketwatch/plugins/metrics/metrics.py index bcfa115f..799e615f 100644 --- a/rocketwatch/plugins/metrics/metrics.py +++ b/rocketwatch/plugins/metrics/metrics.py @@ -24,7 +24,6 @@ class Metrics(commands.Cog): def __init__(self, bot: RocketWatch): self.bot = bot - self.notice_ttl_cache = TTLCache(math.inf, ttl=60 * 15) self.db = AsyncMongoClient(cfg["mongodb.uri"]).rocketwatch self.collection = self.db.command_metrics @@ -182,16 +181,7 @@ async def on_command(self, ctx): @commands.Cog.listener() async def on_command_completion(self, ctx): - log.info( - f"/{ctx.command.name} called by {ctx.author} in #{ctx.channel.name} ({ctx.guild}) completed successfully") - if not is_hidden(ctx) and ctx.author not in self.notice_ttl_cache: - self.notice_ttl_cache[ctx.author] = True - e = Embed() - e.title = 'Did you know?' - e.description = "Calling this command (or any!) in other channels will make them only appear for you! " \ - "Give it a try next time!" - await ctx.reply(embed=e, ephemeral=True) - + log.info(f"/{ctx.command.name} called by {ctx.author} in #{ctx.channel.name} ({ctx.guild}) completed successfully") try: # get the timestamp of when the command was called from the db data = await self.collection.find_one({'_id': ctx.interaction.id}) From 21dc72c9679c5b84aa92e401ed721f8aca44a6e0 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Wed, 18 Feb 2026 08:36:49 +0000 Subject: [PATCH 094/279] stop updating effective stake for now --- rocketwatch/plugins/node_task/node_task.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/rocketwatch/plugins/node_task/node_task.py b/rocketwatch/plugins/node_task/node_task.py index 7490ca2c..f84ab647 100644 --- a/rocketwatch/plugins/node_task/node_task.py +++ b/rocketwatch/plugins/node_task/node_task.py @@ -441,8 +441,8 @@ async def update_dynamic_node_operator_metadata(self): [((n["address"], "average_node_fee"), safe_to_float)]), lambda n: (ns.address, [rp.seth_sig(ns.abi, "getNodeStakedRPL"), n["address"]], [((n["address"], "rpl_stake"), safe_to_float)]), - lambda n: (ns.address, [rp.seth_sig(ns.abi, "getNodeEffectiveRPLStake"), n["address"]], - [((n["address"], "effective_rpl_stake"), safe_to_float)]), + # lambda n: (ns.address, [rp.seth_sig(ns.abi, "getNodeEffectiveRPLStake"), n["address"]], + # [((n["address"], "effective_rpl_stake"), safe_to_float)]), lambda n: (ns.address, [rp.seth_sig(ns.abi, "getNodeETHCollateralisationRatio"), n["address"]], [((n["address"], "effective_node_share"), safe_inv)]), lambda n: (mc.address, [rp.seth_sig(mc.abi, "getEthBalance"), n["fee_distributor_address"]], From 0c8281e36b86fa004aed9990f96a6971fe1c44df Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sun, 22 Feb 2026 13:27:01 +0000 Subject: [PATCH 095/279] tweak rETH deposit event --- rocketwatch/strings/embeds.en.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/rocketwatch/strings/embeds.en.json b/rocketwatch/strings/embeds.en.json index e24ada55..2e068d34 100644 --- a/rocketwatch/strings/embeds.en.json +++ b/rocketwatch/strings/embeds.en.json @@ -72,8 +72,8 @@ }, "pool_deposit_event": { "title": ":rocket: Pool Deposit", - "description": "**%{amount} ETH** deposited into the deposit pool!", - "description_small": ":rocket: %{fancy_from} deposited **%{amount} ETH** into the deposit pool!" + "description": "**%{amount} ETH** deposited for rETH!", + "description_small": ":rocket: %{fancy_from} deposited **%{amount} ETH** for rETH!" }, "odao_rewards_snapshot_event": { "title": ":camera_with_flash: Reward Snapshot Published", From 2259214648267a954199bcaec04628010a652bec Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sun, 22 Feb 2026 13:27:15 +0000 Subject: [PATCH 096/279] fix DP queue capacity calculation --- rocketwatch/plugins/deposit_pool/deposit_pool.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rocketwatch/plugins/deposit_pool/deposit_pool.py b/rocketwatch/plugins/deposit_pool/deposit_pool.py index b5feef32..e1b690de 100644 --- a/rocketwatch/plugins/deposit_pool/deposit_pool.py +++ b/rocketwatch/plugins/deposit_pool/deposit_pool.py @@ -63,7 +63,7 @@ def get_deposit_pool_stats() -> Embed: if std_queue_length > display_limit: embed.description += f"{display_limit + 1}. `...`\n" - queue_capacity = max(total_queue_length * 31 - dp_balance, 0.0) + queue_capacity = max(total_queue_length * 32 - dp_balance, 0.0) embed.description += f"Need **{queue_capacity:,.2f} ETH** to dequeue all validators." else: lines = [] From 64ec8cd7b86a6231a22ab7c36c7576d1fad55a0d Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sun, 22 Feb 2026 22:04:24 +0000 Subject: [PATCH 097/279] add jump button to page view --- rocketwatch/utils/views.py | 30 +++++++++++++++++++++++++++++- 1 file changed, 29 insertions(+), 1 deletion(-) diff --git a/rocketwatch/utils/views.py b/rocketwatch/utils/views.py index 7fc391db..ad6447de 100644 --- a/rocketwatch/utils/views.py +++ b/rocketwatch/utils/views.py @@ -18,8 +18,14 @@ def _title(self) -> str: @abstractmethod async def _load_content(self, from_idx: int, to_idx: int) -> tuple[int, str]: pass + + def position_to_page_index(self, position: int) -> int: + return (position - 1) // self.page_size async def load(self) -> Embed: + if self.page_index < 0: + self.page_index = 0 + num_items, content = await self._load_content( (self.page_index * self.page_size), ((self.page_index + 1) * self.page_size - 1) @@ -31,7 +37,7 @@ async def load(self) -> Embed: self.clear_items() # remove buttons return embed - max_page_index = int(math.ceil(num_items / self.page_size)) - 1 + max_page_index = self.position_to_page_index(num_items) if self.page_index > max_page_index: # if the content changed and this is out of bounds, try again self.page_index = max_page_index @@ -53,3 +59,25 @@ async def next_page(self, interaction: Interaction, _) -> None: self.page_index += 1 embed = await self.load() await interaction.response.edit_message(embed=embed, view=self) + + class JumpToModal(ui.Modal, title="Jump To Position"): + def __init__(self, view: 'PageView'): + super().__init__() + self.view = view + self.position_field = ui.TextInput( + label="Position", + placeholder="Enter position to jump to", + required=True + ) + self.add_item(self.position_field) + + async def on_submit(self, interaction: Interaction) -> None: + position = int(self.position_field.value) + self.view.page_index = self.view.position_to_page_index(position) + embed = await self.view.load() + await interaction.response.edit_message(embed=embed, view=self.view) + + @ui.button(label="Jump", style=ButtonStyle.gray) + async def jump_to_position(self, interaction: Interaction, _) -> None: + modal = self.JumpToModal(self) + await interaction.response.send_modal(modal) From 80ca7ed6f8218c52804acddaf343875e5bef0bf2 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Mon, 23 Feb 2026 11:05:53 +0000 Subject: [PATCH 098/279] remove Saturn deployment guard --- rocketwatch/plugins/events/events.py | 7 +++---- rocketwatch/plugins/queue/queue.py | 2 +- rocketwatch/utils/rocketpool.py | 4 ---- 3 files changed, 4 insertions(+), 9 deletions(-) diff --git a/rocketwatch/plugins/events/events.py b/rocketwatch/plugins/events/events.py index b9c5c97f..fa78dc09 100644 --- a/rocketwatch/plugins/events/events.py +++ b/rocketwatch/plugins/events/events.py @@ -738,10 +738,9 @@ def share_repr(percentage: float) -> str: event_name = "odao_member_challenge_rejected_event" if "node_smoothing_pool_state_changed" in event_name: validator_count = rp.call("rocketMinipoolManager.getNodeMinipoolCount", args.node) - if rp.is_saturn_deployed(): - megapool_address = rp.call("rocketNodeManager.getMegapoolAddress", args.node) - if megapool_address != "0x0000000000000000000000000000000000000000": - validator_count += rp.call("rocketMegapoolDelegate.getActiveValidatorCount", address=megapool_address) + megapool_address = rp.call("rocketNodeManager.getMegapoolAddress", args.node) + if megapool_address != "0x0000000000000000000000000000000000000000": + validator_count += rp.call("rocketMegapoolDelegate.getActiveValidatorCount", address=megapool_address) args.validatorCount = validator_count if args.state: event_name = "node_smoothing_pool_joined" diff --git a/rocketwatch/plugins/queue/queue.py b/rocketwatch/plugins/queue/queue.py index 111c44f0..1aa90323 100644 --- a/rocketwatch/plugins/queue/queue.py +++ b/rocketwatch/plugins/queue/queue.py @@ -91,7 +91,7 @@ def _scan_list(namespace: bytes, start: int, limit: int, block_identifier: Block @staticmethod def _get_queue(namespace: str, limit: int, start: int = 0) -> tuple[int, str]: - if not rp.is_saturn_deployed() or limit <= 0: + if limit <= 0: return 0, "" list_contract = rp.get_contract_by_name("linkedListStorage") diff --git a/rocketwatch/utils/rocketpool.py b/rocketwatch/utils/rocketpool.py index 2390196e..e84e43b2 100644 --- a/rocketwatch/utils/rocketpool.py +++ b/rocketwatch/utils/rocketpool.py @@ -149,10 +149,6 @@ def get_uint(self, key: str) -> int: def get_protocol_version(self) -> tuple: version_string = self.get_string("protocol.version") return tuple(map(int, version_string.split("."))) - - def is_saturn_deployed(self) -> bool: - protocol_version = self.get_protocol_version() - return protocol_version >= (1, 4) @cached(cache=ABI_CACHE) def get_abi_by_name(self, name): From b56d13aa787ecb4a2c390439b03ea571274ab5fe Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Mon, 23 Feb 2026 11:06:16 +0000 Subject: [PATCH 099/279] more robust queue capacity calculation --- rocketwatch/plugins/deposit_pool/deposit_pool.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/rocketwatch/plugins/deposit_pool/deposit_pool.py b/rocketwatch/plugins/deposit_pool/deposit_pool.py index e1b690de..0e949284 100644 --- a/rocketwatch/plugins/deposit_pool/deposit_pool.py +++ b/rocketwatch/plugins/deposit_pool/deposit_pool.py @@ -34,11 +34,11 @@ def get_deposit_pool_stats() -> Embed: dp_balance = solidity.to_float(multicall["getBalance"]) deposit_cap = solidity.to_int(multicall["getMaximumDepositPoolSize"]) + free_capacity = solidity.to_float(multicall["getMaximumDepositAmount"]) if deposit_cap - dp_balance < 0.01: dp_status = "Capacity reached!" else: - free_capacity = solidity.to_float(multicall["getMaximumDepositAmount"]) dp_status = f"Enough space for **{free_capacity:,.2f} ETH**." embed = Embed(title="Deposit Pool Stats") @@ -63,7 +63,7 @@ def get_deposit_pool_stats() -> Embed: if std_queue_length > display_limit: embed.description += f"{display_limit + 1}. `...`\n" - queue_capacity = max(total_queue_length * 32 - dp_balance, 0.0) + queue_capacity = max(free_capacity - deposit_cap, 0.0) embed.description += f"Need **{queue_capacity:,.2f} ETH** to dequeue all validators." else: lines = [] From 9f352d9f5d1ca57ebf04e87108afdbd97c3d3e4c Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Mon, 23 Feb 2026 11:06:50 +0000 Subject: [PATCH 100/279] =?UTF-8?q?=F0=9F=90=94=F0=9F=8D=B2?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../plugins/chicken_soup/chicken_soup.py | 39 +++++++++++++++++++ 1 file changed, 39 insertions(+) create mode 100644 rocketwatch/plugins/chicken_soup/chicken_soup.py diff --git a/rocketwatch/plugins/chicken_soup/chicken_soup.py b/rocketwatch/plugins/chicken_soup/chicken_soup.py new file mode 100644 index 00000000..a7b2c1d5 --- /dev/null +++ b/rocketwatch/plugins/chicken_soup/chicken_soup.py @@ -0,0 +1,39 @@ +from discord import Interaction +from discord.ext import commands +from discord.app_commands import command +from rocketwatch import RocketWatch + +from datetime import datetime, timedelta + + +class ChickenSoup(commands.Cog): + def __init__(self, bot: RocketWatch): + self.bot = bot + self.duration = timedelta(minutes=5) + self.dispense_end = {} + + @command() + async def chicken_soup(self, interaction: Interaction): + self.dispense_end[interaction.channel_id] = datetime.now() + self.duration + await interaction.response.send_message( + "https://tenor.com/view/muppets-muppet-show-swedish-chef-chicken-pot-gif-9362214582988742217" + ) + + @commands.Cog.listener() + async def on_message(self, message) -> None: + if message.author == self.bot.user: + return + + if message.channel.id not in self.dispense_end: + return + + if datetime.now() > self.dispense_end[message.channel.id]: + del self.dispense_end[message.channel.id] + return + + await message.add_reaction("🐔") + await message.add_reaction("🍲") + + +async def setup(bot): + await bot.add_cog(ChickenSoup(bot)) From f85e62549a6e1f0bd961b4d9b26df19d8b667106 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Mon, 23 Feb 2026 13:15:38 +0000 Subject: [PATCH 101/279] link to saturn-1.net --- rocketwatch/plugins/events/events.py | 11 ++--------- rocketwatch/utils/embeds.py | 26 +++++++++++++++++++------- rocketwatch/utils/rocketpool.py | 14 ++++++++++++-- 3 files changed, 33 insertions(+), 18 deletions(-) diff --git a/rocketwatch/plugins/events/events.py b/rocketwatch/plugins/events/events.py index fa78dc09..9f6ea4ca 100644 --- a/rocketwatch/plugins/events/events.py +++ b/rocketwatch/plugins/events/events.py @@ -390,15 +390,8 @@ def get_event_name(_event: LogReceipt | EventData) -> tuple[str, str]: def handle_global_event(self, event_name: str, event: aDict) -> Optional[Embed]: receipt = w3.eth.get_transaction_receipt(event.transactionHash) - def is_minipool(_address: ChecksumAddress) -> bool: - return rp.call("rocketMinipoolManager.getMinipoolExists", _address) - - def is_megapool(_address: ChecksumAddress) -> bool: - sha3 = w3.solidity_keccak(["string", "address"], ["megapool.exists", _address]) - return rp.get_contract_by_name("rocketStorage").functions.getBool(sha3).call() - - is_minipool_event = is_minipool(event.address) or is_minipool(receipt.to) - is_megapool_event = is_megapool(event.address) or is_megapool(receipt.to) + is_minipool_event = rp.is_minipool(event.address) or rp.is_minipool(receipt.to) + is_megapool_event = rp.is_megapool(event.address) or rp.is_megapool(receipt.to) if not any([ is_minipool_event, diff --git a/rocketwatch/utils/embeds.py b/rocketwatch/utils/embeds.py index 6ce424c5..8f20c839 100644 --- a/rocketwatch/utils/embeds.py +++ b/rocketwatch/utils/embeds.py @@ -97,19 +97,32 @@ def el_explorer_url( name_fmt: Optional[Callable[[str], str]] = None, block="latest" ): - if w3.isAddress(target): # sanitize address - url = f"{cfg['execution_layer.explorer']}/address/{target}" target = w3.to_checksum_address(target) - - if prefix != -1 and rp.call("rocketNodeManager.getSmoothingPoolRegistrationState", target, block=block): - prefix += ":cup_with_straw:" - + url = f"{cfg['execution_layer.explorer']}/address/{target}" + + chain = cfg["rocketpool.chain"] + dashboard_network = "" if (chain == "mainnet") else f"?network={chain}" + + if rp.is_node(target): + megapool_address = rp.call("rocketNodeManager.getMegapoolAddress", target) + if megapool_address != "0x0000000000000000000000000000000000000000": + url = f"https://saturn-1.net/megapool/{megapool_address}{dashboard_network}" + + if rp.is_megapool(target): + url = f"https://saturn-1.net/megapool/{target}{dashboard_network}" + + if rp.is_minipool(target): + pass # TODO add explorer url once supported + n_key = f"addresses.{target}" if not name and (n := _(n_key)) != n_key: name = n + if prefix != -1 and rp.call("rocketNodeManager.getSmoothingPoolRegistrationState", target, block=block): + prefix += ":cup_with_straw:" + if not name and (member_id := rp.call("rocketDAONodeTrusted.getMemberID", target, block=block)): if prefix != -1: prefix += "🔮" @@ -179,7 +192,6 @@ def el_explorer_url( prefix = "" return f"{prefix}[{name}]({url})" - def prepare_args(args): for arg_key, arg_value in list(args.items()): # store raw value diff --git a/rocketwatch/utils/rocketpool.py b/rocketwatch/utils/rocketpool.py index e84e43b2..1a1763b4 100644 --- a/rocketwatch/utils/rocketpool.py +++ b/rocketwatch/utils/rocketpool.py @@ -3,7 +3,7 @@ from pathlib import Path from bidict import bidict -from eth_typing import BlockIdentifier +from eth_typing import BlockIdentifier, ChecksumAddress from cachetools import cached, FIFOCache from cachetools.func import ttl_cache from multicall import Call, Multicall @@ -39,7 +39,7 @@ def flush(self): self.CONTRACT_CACHE.clear() self.ABI_CACHE.clear() self.ADDRESS_CACHE.clear() - self.addresses = bidict() + self.addresses.clear() self._init_contract_addresses() def _init_contract_addresses(self) -> None: @@ -226,6 +226,16 @@ def get_percentage_rpl_swapped(self): value = solidity.to_float(self.call("rocketTokenRPL.totalSwappedRPL")) percentage = (value / 18_000_000) * 100 return round(percentage, 2) + + def is_node(self, address: ChecksumAddress) -> bool: + return self.call("rocketNodeManager.getNodeExists", address) + + def is_minipool(self, address: ChecksumAddress) -> bool: + return self.call("rocketMinipoolManager.getMinipoolExists", address) + + def is_megapool(self, address: ChecksumAddress) -> bool: + sha3 = w3.solidity_keccak(["string", "address"], ["megapool.exists", address]) + return self.get_contract_by_name("rocketStorage").functions.getBool(sha3).call() @ttl_cache(ttl=60) def get_eth_usdc_price(self) -> float: From c78d20521c06fb4d730a4e0ff2a0e9587f29fa4a Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Mon, 23 Feb 2026 14:20:30 +0000 Subject: [PATCH 102/279] add assignment capacity info to DP status --- rocketwatch/plugins/deposit_pool/deposit_pool.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/rocketwatch/plugins/deposit_pool/deposit_pool.py b/rocketwatch/plugins/deposit_pool/deposit_pool.py index 0e949284..4ab4a475 100644 --- a/rocketwatch/plugins/deposit_pool/deposit_pool.py +++ b/rocketwatch/plugins/deposit_pool/deposit_pool.py @@ -65,6 +65,9 @@ def get_deposit_pool_stats() -> Embed: queue_capacity = max(free_capacity - deposit_cap, 0.0) embed.description += f"Need **{queue_capacity:,.2f} ETH** to dequeue all validators." + possible_assignments = min(int(dp_balance // 32), total_queue_length) + if possible_assignments > 0: + embed.description += f"\nSufficient balance for **{possible_assignments} deposit assignments**!" else: lines = [] if (num_eb4 := int(dp_balance // 28)) > 0: From 91fcad46d8a52f25d506bbee8a8a81fe7a068b7f Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Mon, 23 Feb 2026 14:30:18 +0000 Subject: [PATCH 103/279] fix newline handling --- rocketwatch/plugins/deposit_pool/deposit_pool.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/rocketwatch/plugins/deposit_pool/deposit_pool.py b/rocketwatch/plugins/deposit_pool/deposit_pool.py index 4ab4a475..47f8eb3d 100644 --- a/rocketwatch/plugins/deposit_pool/deposit_pool.py +++ b/rocketwatch/plugins/deposit_pool/deposit_pool.py @@ -64,10 +64,10 @@ def get_deposit_pool_stats() -> Embed: embed.description += f"{display_limit + 1}. `...`\n" queue_capacity = max(free_capacity - deposit_cap, 0.0) - embed.description += f"Need **{queue_capacity:,.2f} ETH** to dequeue all validators." + embed.description += f"Need **{queue_capacity:,.2f} ETH** to dequeue all validators.\n" possible_assignments = min(int(dp_balance // 32), total_queue_length) if possible_assignments > 0: - embed.description += f"\nSufficient balance for **{possible_assignments} deposit assignments**!" + embed.description += f"Sufficient balance for **{possible_assignments} deposit assignments**!\n" else: lines = [] if (num_eb4 := int(dp_balance // 28)) > 0: From 552d13a5f06e201d72c3738730ea749c568559a0 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 27 Feb 2026 10:38:59 +0000 Subject: [PATCH 104/279] build minipool proposal index --- rocketwatch/plugins/apr/apr.py | 6 +- .../plugins/beacon_events/beacon_events.py | 3 +- .../plugins/deposit_pool/deposit_pool.py | 5 +- rocketwatch/plugins/lottery/lottery.py | 5 +- rocketwatch/plugins/node_task/node_task.py | 6 +- rocketwatch/plugins/proposals/proposals.py | 372 ++++++++---------- rocketwatch/plugins/random/random.py | 2 +- rocketwatch/utils/shared_w3.py | 108 ++--- 8 files changed, 202 insertions(+), 305 deletions(-) diff --git a/rocketwatch/plugins/apr/apr.py b/rocketwatch/plugins/apr/apr.py index 68c9e9b1..2d69511d 100644 --- a/rocketwatch/plugins/apr/apr.py +++ b/rocketwatch/plugins/apr/apr.py @@ -17,7 +17,7 @@ from utils.embeds import Embed from utils.rocketpool import rp from utils.shared_w3 import w3, historical_w3 -from utils.visibility import is_hidden +from utils.visibility import is_hidden_weak log = logging.getLogger("apr") log.setLevel(cfg["log_level"]) @@ -87,7 +87,7 @@ async def on_error(self, err: Exception): @hybrid_command() async def reth_apr(self, ctx: Context): """Show the current rETH APR""" - await ctx.defer(ephemeral=is_hidden(ctx)) + await ctx.defer(ephemeral=is_hidden_weak(ctx)) e = Embed() e.title = "Current rETH APR" e.description = "For some comparisons against other LST: [dune dashboard](https://dune.com/rp_community/lst-comparison)" @@ -257,7 +257,7 @@ async def reth_apr(self, ctx: Context): @hybrid_command() async def node_apr(self, ctx: Context): """Show the current node operator APR""" - await ctx.defer(ephemeral=is_hidden(ctx)) + await ctx.defer(ephemeral=is_hidden_weak(ctx)) e = Embed() e.title = "Current NO APR" e.description = "Dashed red lines above and bellow the solid red one are leb8 and leb16 respectively. " \ diff --git a/rocketwatch/plugins/beacon_events/beacon_events.py b/rocketwatch/plugins/beacon_events/beacon_events.py index 53879f43..a15fdb4d 100644 --- a/rocketwatch/plugins/beacon_events/beacon_events.py +++ b/rocketwatch/plugins/beacon_events/beacon_events.py @@ -1,6 +1,7 @@ import logging from typing import Optional, cast +import asyncio import pymongo import requests import eth_utils @@ -51,7 +52,7 @@ def get_past_events(self, from_block: BlockNumber, to_block: BlockNumber) -> lis def _get_events_for_slot(self, slot_number: int, *, check_finality: bool) -> list[Event]: try: log.debug(f"Checking slot {slot_number}") - beacon_block = bacon.get_block(slot_number)["data"]["message"] + beacon_block = asyncio.run(bacon.get_block(slot_number))["data"]["message"] except ValueError as err: if err.args[0] == "Block does not exist": log.error(f"Beacon block {slot_number} not found, skipping.") diff --git a/rocketwatch/plugins/deposit_pool/deposit_pool.py b/rocketwatch/plugins/deposit_pool/deposit_pool.py index 47f8eb3d..5dee621a 100644 --- a/rocketwatch/plugins/deposit_pool/deposit_pool.py +++ b/rocketwatch/plugins/deposit_pool/deposit_pool.py @@ -64,10 +64,11 @@ def get_deposit_pool_stats() -> Embed: embed.description += f"{display_limit + 1}. `...`\n" queue_capacity = max(free_capacity - deposit_cap, 0.0) - embed.description += f"Need **{queue_capacity:,.2f} ETH** to dequeue all validators.\n" possible_assignments = min(int(dp_balance // 32), total_queue_length) + + embed.description += f"Need **{queue_capacity:,.2f} ETH** to dequeue all validators." if possible_assignments > 0: - embed.description += f"Sufficient balance for **{possible_assignments} deposit assignments**!\n" + embed.description += f"\nSufficient balance for **{possible_assignments} deposit assignments**!" else: lines = [] if (num_eb4 := int(dp_balance // 28)) > 0: diff --git a/rocketwatch/plugins/lottery/lottery.py b/rocketwatch/plugins/lottery/lottery.py index 5ce2cbef..19fbdfa3 100644 --- a/rocketwatch/plugins/lottery/lottery.py +++ b/rocketwatch/plugins/lottery/lottery.py @@ -39,12 +39,11 @@ async def _check_indexes(self): async def load_sync_committee(self, period): assert period in ["latest", "next"] await self._check_indexes() - h = bacon.get_block("head") + h = await bacon.get_block("head") sync_period = int(h['data']['message']['slot']) // 32 // 256 if period == "next": sync_period += 1 - res = bacon._make_get_request(f"/eth/v1/beacon/states/head/sync_committees?epoch={sync_period * 256}") - data = res["data"] + data = (await bacon.get_sync_committee(sync_period * 256))["data"] await self.db.sync_committee_stats.replace_one({"period": period}, {"period" : period, "start_epoch": sync_period * 256, diff --git a/rocketwatch/plugins/node_task/node_task.py b/rocketwatch/plugins/node_task/node_task.py index f84ab647..f5a821cd 100644 --- a/rocketwatch/plugins/node_task/node_task.py +++ b/rocketwatch/plugins/node_task/node_task.py @@ -276,11 +276,10 @@ async def add_static_beacon_data_to_minipools(self): return # we need to do smaller bulks as the pubkey is quite long and we dont want to make the query url too long - # endpoint = bacon.get_validators("head", ids=vali_indexes)["data"] for pubkey_batch in as_chunks(public_keys, self.batch_size): data = {} # get beacon data for public keys - beacon_data = bacon.get_validators("head", ids=pubkey_batch)["data"] + beacon_data = (await bacon.get_validators("head", ids=pubkey_batch))["data"] # update data dict with results for d in beacon_data: data[d["validator"]["pubkey"]] = int(d["index"]) @@ -304,11 +303,10 @@ async def update_dynamic_minipool_beacon_metadata(self): validator_indexes = await self.db.minipools_new.distinct("validator_index") # remove None values validator_indexes = [i for i in validator_indexes if i is not None] - # endpoint = bacon.get_validators("head", ids=vali_indexes)["data"] for index_batch in as_chunks(validator_indexes, self.batch_size): data = {} # get beacon data for public keys - beacon_data = bacon.get_validators("head", ids=index_batch)["data"] + beacon_data = (await bacon.get_validators("head", ids=index_batch))["data"] # update data dict with results for d in beacon_data: data[int(d["index"])] = { diff --git a/rocketwatch/plugins/proposals/proposals.py b/rocketwatch/plugins/proposals/proposals.py index 11eeefe8..ff2dda37 100644 --- a/rocketwatch/plugins/proposals/proposals.py +++ b/rocketwatch/plugins/proposals/proposals.py @@ -4,26 +4,28 @@ from datetime import datetime, timedelta from io import BytesIO -import aiohttp +import asyncio +from aiohttp.client_exceptions import ClientResponseError import matplotlib as mpl -import numpy as np -from PIL import Image from discord import File +from discord.utils import as_chunks from discord.ext import commands from discord.ext.commands import Context from discord.ext.commands import hybrid_command from matplotlib import pyplot as plt -from pymongo import AsyncMongoClient, ReplaceOne -from wordcloud import WordCloud +from pymongo import AsyncMongoClient +from cronitor import Monitor from rocketwatch import RocketWatch from utils.cfg import cfg from utils.embeds import Embed from utils.solidity import beacon_block_to_date, date_to_beacon_block from utils.time_debug import timerun_async -from utils.visibility import is_hidden +from utils.visibility import is_hidden_weak +from utils.shared_w3 import bacon -log = logging.getLogger("proposals") +cog_id = "proposals" +log = logging.getLogger(cog_id) log.setLevel(cfg["log_level"]) LOOKUP = { @@ -35,30 +37,28 @@ "S": "Lodestar" }, "execution": { - "I": "Infura", - "P": "Pocket", "G": "Geth", "B": "Besu", "N": "Nethermind", + "R": "Reth", "X": "External" } } COLORS = { - "Nimbus" : "#cc9133", - "Prysm" : "#40bfbf", - "Lighthouse" : "#9933cc", - "Teku" : "#3357cc", - "Lodestar" : "#fb5b9d", - - "Infura" : "#ff2f00", - "Pocket" : "#e216e9", - "Geth" : "#40bfbf", - "Besu" : "#55aa7a", - "Nethermind" : "#2688d9", + "Nimbus" : "#CC9133", + "Prysm" : "#40BFBF", + "Lighthouse" : "#9933CC", + "Teku" : "#3357CC", + "Lodestar" : "#FB5B9D", + + "Geth" : "#40BFBF", + "Besu" : "#55AA7A", + "Nethermind" : "#2688D9", + "Reth" : "#CF0512", "External" : "#808080", - "Smart Node" : "#cc6e33", + "Smart Node" : "#CC6E33", "Allnodes" : "#4533cc", "No proposals yet": "#E0E0E0", "Unknown" : "#AAAAAA", @@ -73,14 +73,13 @@ # noinspection RegExpUnnecessaryNonCapturingGroup SMARTNODE_REGEX = re.compile(r"^RP(?:(?:-)([A-Z])([A-Z])?)? (?:v)?(\d+\.\d+\.\d+(?:-\w+)?)(?:(?:(?: \()|(?: gw:))(.+)(?:\)))?") - -def parse_propsal(entry): - graffiti = bytes.fromhex(entry["validator"]["graffiti"][2:]).decode("utf-8").rstrip('\x00') +def parse_proposal(beacon_block: dict) -> dict: + graffiti = bytes.fromhex(beacon_block["body"]["graffiti"][2:]).decode("utf-8").rstrip('\x00') data = { - "slot" : int(entry["number"]), - "validator": int(entry["validator"]["index"]), + "slot" : int(beacon_block["slot"]), + "validator": int(beacon_block["proposer_index"]), "graffiti" : graffiti, - } + } | PROPOSAL_TEMPLATE if m := SMARTNODE_REGEX.findall(graffiti): groups = m[0] # smart node proposal @@ -116,179 +115,172 @@ def parse_propsal(entry): class Proposals(commands.Cog): def __init__(self, bot: RocketWatch): self.bot = bot - self.rocketscan_proposals_url = "https://rocketscan.io/api/mainnet/beacon/blocks/all" - self.last_chore_run = 0 - # connect to local mongodb - self.db = AsyncMongoClient(cfg["mongodb.uri"]).get_database("rocketwatch") - self.created_view = False - - async def create_minipool_proposal_view(self): - if self.created_view: - return + self.db = AsyncMongoClient(cfg["mongodb.uri"]).rocketwatch + self.monitor = Monitor("proposals-task", api_key=cfg["other.secrets.cronitor"]) + self.batch_size = 100 + self.bot.loop.create_task(self.loop()) + + async def loop(self): + await self.bot.wait_until_ready() + await self._create_indices() + while not self.bot.is_closed(): + p_id = time.time() + self.monitor.ping(state="run", series=p_id) + try: + await self.work() + self.monitor.ping(state="complete", series=p_id) + except Exception as err: + await self.bot.report_error(err) + self.monitor.ping(state="fail", series=p_id) + finally: + await asyncio.sleep(300) + + async def _create_indices(self): + await self.bot.wait_until_ready() + try: + await self.db.minipools_new.create_index([("validator_index", 1)]) + await self.db.proposals.create_index([("validator", 1), ("slot", -1)]) + log.info("Created indexes on minipools_new and proposals collections") + except Exception as e: + log.debug(f"Could not create indexes: {e}") + + async def work(self): + log.debug("starting proposal task") + await self.fetch_proposals() + await self.create_minipool_proposal_view() + log.debug("finished proposal task") + + async def fetch_proposals(self): + if db_entry := (await self.db.last_checked_block.find_one({"_id": cog_id})): + last_checked_slot = db_entry["slot"] + else: + last_checked_slot = 4700012 # last slot before merge + + latest_slot = int((await bacon.get_header("finalized"))["data"]["header"]["message"]["slot"]) + for slots in as_chunks(range(last_checked_slot + 1, latest_slot + 1), self.batch_size): + log.info(f"Fetching proposals for slots {slots[0]} to {slots[-1]}") + await asyncio.gather(*[self.fetch_proposal(s) for s in slots]) + await self.db.last_checked_block.replace_one({"_id": cog_id}, {"_id": cog_id, "slot": slots[-1]}, upsert=True) + + async def fetch_proposal(self, slot: int) -> None: + try: + beacon_header = (await bacon.get_header(slot))["data"]["header"]["message"] + except ClientResponseError as e: + if e.status == 404: + return None + else: + raise e + + validator_index = int(beacon_header["proposer_index"]) + if not (minipool := (await self.db.minipools.find_one({"validator": validator_index}))): + return None + + beacon_block = (await bacon.get_block(slot))["data"]["message"] + proposal_data = parse_proposal(beacon_block) + await self.db.proposals.update_one({"slot": slot}, {"$set": proposal_data}, upsert=True) + + async def create_minipool_proposal_view(self): log.info("creating minipool proposal view") pipeline = [ { '$match': { - 'node_operator': { - '$ne': None - }, - 'beacon.status' : 'active_ongoing', - "status": "staking" + 'node_operator': {'$ne': None}, + 'beacon.status' : 'active_ongoing' } - }, { + }, + { '$lookup': { 'from' : 'proposals', 'localField' : 'validator_index', 'foreignField': 'validator', 'as' : 'proposals', 'pipeline' : [ - { - '$sort': { - 'slot': -1 - } - } + {'$sort': {'slot': -1}}, + {'$limit': 1} ] } - }, { - '$project': { - 'node_operator': 1, - 'validator' : 1, - 'proposal' : { - '$arrayElemAt': [ - '$proposals', 0 - ] - } - } - }, { - '$project': { - 'node_operator': 1, - 'validator' : "$validator_index", - 'slot' : '$proposal.slot' + }, + { + '$unwind': { + 'path': '$proposals', + 'preserveNullAndEmptyArrays': True } - }, { + }, + { '$group': { '_id' : '$node_operator', - 'slot' : { - '$max': '$slot' - }, - 'validator_count': { - '$sum': 1 - } - } - }, { - '$match': { - 'slot': { - '$ne': None - } - } - }, { - '$lookup': { - 'from' : 'proposals', - 'localField' : 'slot', - 'foreignField': 'slot', - 'as' : 'proposals' + 'validator_count': {'$sum': 1}, + 'latest_proposal': {'$first': '$proposals'} } - }, { + }, + { + '$match': {'latest_proposal': {'$ne': None}} + }, + { '$project': { - 'node_operator' : 1, - 'latest_proposal': { - '$arrayElemAt': [ - '$proposals', 0 - ] - }, - 'validator_count': 1 + '_id': '$_id', + 'node_operator': '$_id', + 'validator_count': 1, + 'latest_proposal': 1 } } ] await self.db.minipool_proposals.drop() - await self.db.create_collection( - "minipool_proposals", - viewOn="minipools_new", - pipeline=pipeline - ) - self.created_view = True - - async def gather_all_proposals(self): - log.info("getting all proposals using the rocketscan.dev API") - async with aiohttp.ClientSession() as session: - async with session.get(self.rocketscan_proposals_url) as resp: - if resp.status != 200: - log.error("failed to get proposals using the rocketscan.dev API") - return - proposals = await resp.json() - log.info("got all proposals using the rocketscan.dev API") - await self.db.proposals.bulk_write([ReplaceOne({"slot": int(entry["number"])}, - PROPOSAL_TEMPLATE | parse_propsal(entry), - upsert=True) for entry in proposals]) - log.info("finished gathering all proposals") - - async def chore(self, ctx: Context): - # only run if self.last_chore_run timestamp is older than 1 hour - msg = await ctx.send(content="doing chores...") - if (time.time() - self.last_chore_run) > 3600: - self.last_chore_run = time.time() - await msg.edit(content="gathering proposals...") - await self.gather_all_proposals() - await self.create_minipool_proposal_view() - else: - log.debug("skipping chore") - return msg + await self.db.create_collection("minipool_proposals", viewOn="minipools_new", pipeline=pipeline) @timerun_async async def gather_attribute(self, attribute, remove_allnodes=False): - distribution = await (await self.db.minipool_proposals.aggregate([ + # Build the match stage to filter out Allnodes if needed + match_stage = {} + if remove_allnodes: + match_stage['$match'] = {'latest_proposal.type': {'$ne': 'Allnodes'}} + + pipeline = [ { '$project': { 'attribute' : f'$latest_proposal.{attribute}', 'type' : '$latest_proposal.type', 'validator_count': 1 } - }, { + }, + { '$group': { - '_id' : ['$attribute', '$type'], - 'count' : { - '$sum': 1 - }, - 'validator_count': { - '$sum': '$validator_count' - } - } - }, { - '$sort': { - 'count': 1 + '_id' : {'attribute': '$attribute', 'type': '$type'}, + 'count' : {'$sum': 1}, + 'validator_count': {'$sum': '$validator_count'} } } - ])).to_list() + ] + + # Add match stage at the beginning if filtering Allnodes + if remove_allnodes: + pipeline.insert(0, match_stage) + + distribution = await (await self.db.minipool_proposals.aggregate(pipeline)).to_list() + if remove_allnodes: d = {'remove_from_total': {'count': 0, 'validator_count': 0}} for entry in distribution: - if entry['_id'][1] == 'Allnodes': - d['remove_from_total']['count'] += entry['count'] - d['remove_from_total']['validator_count'] += entry['validator_count'] - else: - d[entry['_id'][0]] = entry + d[entry['_id']['attribute']] = entry return d else: - distribution = [entry | {'_id': entry['_id'][0]} for entry in distribution] - # merge entries that have the same _id by summing their attributes + # Convert nested _id structure and merge by attribute d = {} for entry in distribution: - if entry["_id"] in d: - d[entry["_id"]]["count"] += entry["count"] - d[entry["_id"]]["validator_count"] += entry["validator_count"] + key = entry['_id']['attribute'] + if key in d: + d[key]['count'] += entry['count'] + d[key]['validator_count'] += entry['validator_count'] else: - d[entry["_id"]] = entry - return d + d[key] = entry + return d @hybrid_command() async def version_chart(self, ctx: Context): """ Show a historical chart of used Smart Node versions """ - await ctx.defer(ephemeral=is_hidden(ctx)) - msg = await self.chore(ctx) - await msg.edit(content="generating version chart...") - + await ctx.defer(ephemeral=is_hidden_weak(ctx)) e = Embed(title="Version Chart") e.description = "The graph below shows proposal stats using a **5-day rolling window**, " \ "and **does not represent operator adoption**.\n" \ @@ -401,10 +393,10 @@ async def version_chart(self, ctx: Context): e.set_image(url="attachment://chart.png") # send data - await msg.edit(content="", embed=e, attachments=[File(img, filename="chart.png")]) + await ctx.send(embed=e, file=File(img, filename="chart.png")) img.close() - async def plot_axes_with_data(self, attr: str, ax1, ax2, name, remove_allnodes=False): + async def plot_axes_with_data(self, attr: str, ax1, ax2, remove_allnodes=False): # group by client and get count data = await self.gather_attribute(attr, remove_allnodes) @@ -475,12 +467,11 @@ async def plot_axes_with_data(self, attr: str, ax1, ax2, name, remove_allnodes=F ) ax2.set_title("Node Operators", fontsize=22) - async def proposal_vs_node_operators_embed(self, attribute, name, msg, remove_allnodes=False): + async def proposal_vs_node_operators_embed(self, attribute, name, remove_allnodes=False): fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(12, 8)) # iterate axes in pairs title = f"Rocket Pool {name} Distribution {'without Allnodes' if remove_allnodes else ''}" - await msg.edit(content=f"generating {attribute} distribution graph...") - await self.plot_axes_with_data(attribute, ax1, ax2, name, remove_allnodes) + await self.plot_axes_with_data(attribute, ax1, ax2, remove_allnodes) e = Embed(title=title) @@ -505,80 +496,29 @@ async def client_distribution(self, ctx: Context, remove_allnodes=False): """ Generate a distribution graph of clients. """ - await ctx.defer(ephemeral=is_hidden(ctx)) - msg = await self.chore(ctx) + await ctx.defer(ephemeral=is_hidden_weak(ctx)) embeds, files = [], [] for attr, name in [["consensus_client", "Consensus Client"], ["execution_client", "Execution Client"]]: - e, f = await self.proposal_vs_node_operators_embed(attr, name, msg, remove_allnodes) + e, f = await self.proposal_vs_node_operators_embed(attr, name, remove_allnodes) embeds.append(e) files.append(f) - await msg.edit(content="", embeds=embeds, attachments=files) + await ctx.send(embeds=embeds, files=files) @hybrid_command() async def user_distribution(self, ctx: Context): """ Generate a distribution graph of users. """ - await ctx.defer(ephemeral=is_hidden(ctx)) - msg = await self.chore(ctx) - e, f = await self.proposal_vs_node_operators_embed("type", "User", msg) - await msg.edit(content="", embed=e, attachments=[f]) - - @hybrid_command() - async def comments(self, ctx: Context): - """ - Generate a world cloud of comments. - """ - await ctx.defer(ephemeral=is_hidden(ctx)) - msg = await self.chore(ctx) - await msg.edit(content="generating comments word cloud...") - - # load image - mask = np.array(Image.open("./plugins/proposals/assets/logo-words.png")) - - # load font - font_path = "./plugins/proposals/assets/noto.ttf" - - wc = WordCloud(max_words=2 ** 16, - scale=2, - mask=mask, - max_font_size=100, - min_font_size=1, - background_color="white", - relative_scaling=0, - font_path=font_path, - color_func=lambda *args, **kwargs: "rgb(235, 142, 85)") - - # aggregate comments with their count - comments = await (await self.db.proposals.aggregate([ - {"$match": {"comment": {"$exists": 1}}}, - {"$group": {"_id": "$comment", "count": {"$sum": 1}}}, - {"$sort": {"count": -1, "slot": -1}} - ])).to_list() - comment_words = {x['_id']: x["count"] for x in comments} - - # generate word cloud - wc.fit_words(comment_words) - - # respond with image - img = BytesIO() - wc.to_image().save(img, format="png") - img.seek(0) - plt.close() - e = Embed(title="Rocket Pool Proposal Comments") - e.set_image(url="attachment://image.png") - await msg.edit(content="", embed=e, attachments=[File(img, filename="image.png")]) - img.close() + await ctx.defer(ephemeral=is_hidden_weak(ctx)) + embed, file = await self.proposal_vs_node_operators_embed("type", "User") + await ctx.send(embed=embed, file=file) @hybrid_command() async def client_combo_ranking(self, ctx: Context, remove_allnodes=False, group_by_node_operators=False): """ Generate a ranking of most used execution and consensus clients. """ - await ctx.defer(ephemeral=is_hidden(ctx)) - msg = await self.chore(ctx) - await msg.edit(content="generating client combo ranking...") - + await ctx.defer(ephemeral=is_hidden_weak(ctx)) # aggregate [consensus, execution] pair counts client_pairs = await (await self.db.minipool_proposals.aggregate([ { @@ -621,7 +561,7 @@ async def client_combo_ranking(self, ctx: Context, remove_allnodes=False, group_ for i, pair in enumerate(client_pairs) ) e.description = f"Currently showing {'node operator' if group_by_node_operators else 'validator'} counts\n```{desc}```" - await msg.edit(content="", embed=e) + await ctx.send(embed=e) async def setup(bot): diff --git a/rocketwatch/plugins/random/random.py b/rocketwatch/plugins/random/random.py index e0255133..61112392 100644 --- a/rocketwatch/plugins/random/random.py +++ b/rocketwatch/plugins/random/random.py @@ -121,7 +121,7 @@ async def dev_time(self, ctx: Context): e.add_field(name="Coordinated Universal Time", value=f"{dev_time.strftime(time_format)}\n" f"`{binary_day} (0x{uint_day:04x})`") - b = solidity.slot_to_beacon_day_epoch_slot(int(bacon.get_block("head")["data"]["message"]["slot"])) + b = solidity.slot_to_beacon_day_epoch_slot(int((await bacon.get_header("head"))["data"]["header"]["message"]["slot"])) e.add_field(name="Beacon Time", value=f"Day {b[0]}, {b[1]}:{b[2]}") dev_time = datetime.now(tz=pytz.timezone("Australia/Lindeman")) diff --git a/rocketwatch/utils/shared_w3.py b/rocketwatch/utils/shared_w3.py index 89db7c35..e916231a 100644 --- a/rocketwatch/utils/shared_w3.py +++ b/rocketwatch/utils/shared_w3.py @@ -1,15 +1,14 @@ import logging import math -import circuitbreaker -import requests -from requests import HTTPError, ConnectTimeout +import aiohttp +from aiohttp.web import HTTPError +from eth_typing import BlockIdentifier from web3 import Web3, HTTPProvider -from web3.beacon import Beacon as Bacon from web3.middleware import geth_poa_middleware from utils.cfg import cfg -from utils.retry import retry +from utils.retry import retry_async log = logging.getLogger("shared_w3") log.setLevel(cfg["log_level"]) @@ -25,73 +24,32 @@ if "archive" in cfg['execution_layer.endpoint'].keys(): historical_w3 = Web3(HTTPProvider(cfg['execution_layer.endpoint.archive'])) -endpoints = cfg["consensus_layer.endpoints"] -tmp = [] -exceptions = ( - HTTPError, ConnectionError, ConnectTimeout, requests.exceptions.ConnectionError, requests.exceptions.ReadTimeout) -for fallback_endpoint in reversed(endpoints): - class SuperBacon(Bacon): - def __init__( - self, - base_url: str, - session: requests.Session = requests.Session(), - ) -> None: - super().__init__(base_url, session) - - @retry(tries=3 if tmp else 1, exceptions=exceptions, delay=0.5) - @retry(tries=5 if tmp else 1, exceptions=ValueError, delay=0.1) - @circuitbreaker.circuit(failure_threshold=2 if tmp else math.inf, - recovery_timeout=15, - expected_exception=exceptions, - fallback_function=tmp[-1].get_block if tmp else None, - name=f"get_block using {fallback_endpoint}") - def get_block(self, *args): - block_id = args[-1] - if len(args) > 1: - log.warning(f"falling back to {self.base_url} for block {block_id}") - endpoint = f"/eth/v2/beacon/blocks/{block_id}" - url = self.base_url + endpoint - response = self.session.get(url, timeout=(3.05, 20)) - if response.status_code == 404 and all(q in response.json()["message"].lower() for q in ["not", "found"]): - raise ValueError("Block does not exist") - response.raise_for_status() - return response.json() - - @retry(tries=3 if tmp else 1, exceptions=exceptions, delay=0.5) - @circuitbreaker.circuit(failure_threshold=2 if tmp else math.inf, - recovery_timeout=90, - fallback_function=tmp[-1].get_validator_balances if tmp else None, - name=f"get_validator_balances using {fallback_endpoint}") - def get_validator_balances(self, *args, **kwargs): - state_id = args[-1] - if len(args) > 1: - log.warning(f"falling back to {self.base_url} for validator balances {state_id}") - endpoint = f"/eth/v1/beacon/states/{state_id}/validator_balances" - # id array if present, and is array of ints - if "ids" in kwargs and all(isinstance(i, int) for i in kwargs['ids']): - # turn to array of strings - kwargs['ids'] = [str(i) for i in kwargs['ids']] - endpoint += f"?id={','.join(kwargs['ids'])}" - url = self.base_url + endpoint - response = self.session.get(url, timeout=(5, 30)) - response.raise_for_status() - return response.json() - - def get_validators(self, *args, **kwargs): - state_id = args[-1] - if len(args) > 1: - log.warning(f"falling back to {self.base_url} for validator balances {state_id}") - endpoint = f"/eth/v1/beacon/states/{state_id}/validators" - # id array if present, and is array of ints - if "ids" in kwargs and isinstance(kwargs["ids"], list): - # turn to array of strings - kwargs['ids'] = [str(i) for i in kwargs['ids']] - endpoint += f"?id={','.join(kwargs['ids'])}" - url = self.base_url + endpoint - response = self.session.get(url, timeout=(5, 30)) - response.raise_for_status() - return response.json() - - - tmp.append(SuperBacon(fallback_endpoint)) -bacon = tmp[-1] +class SuperBacon: + def __init__(self, base_url: str) -> None: + self.base_url = base_url + timeout = aiohttp.ClientTimeout(sock_connect=3.05, total=20) + self.session = aiohttp.ClientSession(raise_for_status=True, timeout=timeout) + + @retry_async(tries=3, exceptions=HTTPError, delay=0.5) + async def _make_get_request(self, url: str): + async with self.session.get(url) as response: + return await response.json() + + async def get_header(self, block_id: BlockIdentifier): + url = f"{self.base_url}/eth/v1/beacon/headers/{block_id}" + return await self._make_get_request(url) + + async def get_block(self, block_id: BlockIdentifier): + url = f"{self.base_url}/eth/v2/beacon/blocks/{block_id}" + return await self._make_get_request(url) + + async def get_validators(self, state_id, ids: list[int]): + id_str = ','.join([str(i) for i in ids]) + url = f"{self.base_url}/eth/v1/beacon/states/{state_id}/validators?id={id_str}" + return await self._make_get_request(url) + + async def get_sync_committee(self, epoch): + url = f"{self.base_url}/eth/v1/beacon/states/head/sync_committees?epoch={epoch}" + return await self._make_get_request(url) + +bacon = SuperBacon(cfg["consensus_layer.endpoints"][-1]) From a5ffcc596e8e3009aecf18f467ebc9f52e65d960 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 27 Feb 2026 11:26:30 +0000 Subject: [PATCH 105/279] bring back the bacon --- rocketwatch/plugins/beacon_events/beacon_events.py | 3 +-- rocketwatch/plugins/proposals/proposals.py | 2 +- rocketwatch/utils/shared_w3.py | 4 ++-- 3 files changed, 4 insertions(+), 5 deletions(-) diff --git a/rocketwatch/plugins/beacon_events/beacon_events.py b/rocketwatch/plugins/beacon_events/beacon_events.py index a15fdb4d..b1ab5a41 100644 --- a/rocketwatch/plugins/beacon_events/beacon_events.py +++ b/rocketwatch/plugins/beacon_events/beacon_events.py @@ -1,7 +1,6 @@ import logging from typing import Optional, cast -import asyncio import pymongo import requests import eth_utils @@ -52,7 +51,7 @@ def get_past_events(self, from_block: BlockNumber, to_block: BlockNumber) -> lis def _get_events_for_slot(self, slot_number: int, *, check_finality: bool) -> list[Event]: try: log.debug(f"Checking slot {slot_number}") - beacon_block = asyncio.run(bacon.get_block(slot_number))["data"]["message"] + beacon_block = await bacon.get_block(slot_number)["data"]["message"] except ValueError as err: if err.args[0] == "Block does not exist": log.error(f"Beacon block {slot_number} not found, skipping.") diff --git a/rocketwatch/plugins/proposals/proposals.py b/rocketwatch/plugins/proposals/proposals.py index ff2dda37..9870754d 100644 --- a/rocketwatch/plugins/proposals/proposals.py +++ b/rocketwatch/plugins/proposals/proposals.py @@ -142,7 +142,7 @@ async def _create_indices(self): await self.db.proposals.create_index([("validator", 1), ("slot", -1)]) log.info("Created indexes on minipools_new and proposals collections") except Exception as e: - log.debug(f"Could not create indexes: {e}") + log.warning(f"Could not create indexes: {e}") async def work(self): log.debug("starting proposal task") diff --git a/rocketwatch/utils/shared_w3.py b/rocketwatch/utils/shared_w3.py index e916231a..51450e0a 100644 --- a/rocketwatch/utils/shared_w3.py +++ b/rocketwatch/utils/shared_w3.py @@ -1,7 +1,7 @@ import logging -import math import aiohttp +from web3.beacon import Beacon as Bacon from aiohttp.web import HTTPError from eth_typing import BlockIdentifier from web3 import Web3, HTTPProvider @@ -24,7 +24,7 @@ if "archive" in cfg['execution_layer.endpoint'].keys(): historical_w3 = Web3(HTTPProvider(cfg['execution_layer.endpoint.archive'])) -class SuperBacon: +class SuperBacon(Bacon): def __init__(self, base_url: str) -> None: self.base_url = base_url timeout = aiohttp.ClientTimeout(sock_connect=3.05, total=20) From 447bb02a820294a83bf7f0d9bc48c91ab96acfe8 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 27 Feb 2026 12:04:27 +0000 Subject: [PATCH 106/279] bacon strips --- .../plugins/beacon_events/beacon_events.py | 10 ++++---- rocketwatch/plugins/lottery/lottery.py | 4 ++-- rocketwatch/plugins/node_task/node_task.py | 4 ++-- rocketwatch/plugins/proposals/proposals.py | 6 ++--- rocketwatch/plugins/random/random.py | 2 +- rocketwatch/utils/shared_w3.py | 24 +++++++++---------- 6 files changed, 24 insertions(+), 26 deletions(-) diff --git a/rocketwatch/plugins/beacon_events/beacon_events.py b/rocketwatch/plugins/beacon_events/beacon_events.py index b1ab5a41..2e54279c 100644 --- a/rocketwatch/plugins/beacon_events/beacon_events.py +++ b/rocketwatch/plugins/beacon_events/beacon_events.py @@ -51,12 +51,10 @@ def get_past_events(self, from_block: BlockNumber, to_block: BlockNumber) -> lis def _get_events_for_slot(self, slot_number: int, *, check_finality: bool) -> list[Event]: try: log.debug(f"Checking slot {slot_number}") - beacon_block = await bacon.get_block(slot_number)["data"]["message"] - except ValueError as err: - if err.args[0] == "Block does not exist": - log.error(f"Beacon block {slot_number} not found, skipping.") - return [] - raise err + beacon_block = bacon.get_block(slot_number)["data"]["message"] + except requests.exceptions.HTTPError: + log.error(f"Beacon block {slot_number} not found, skipping.") + return [] events = self._get_slashings(beacon_block) if proposal_event := self._get_proposal(beacon_block): diff --git a/rocketwatch/plugins/lottery/lottery.py b/rocketwatch/plugins/lottery/lottery.py index 19fbdfa3..5a44a6d0 100644 --- a/rocketwatch/plugins/lottery/lottery.py +++ b/rocketwatch/plugins/lottery/lottery.py @@ -39,11 +39,11 @@ async def _check_indexes(self): async def load_sync_committee(self, period): assert period in ["latest", "next"] await self._check_indexes() - h = await bacon.get_block("head") + h = await bacon.get_block_async("head") sync_period = int(h['data']['message']['slot']) // 32 // 256 if period == "next": sync_period += 1 - data = (await bacon.get_sync_committee(sync_period * 256))["data"] + data = (await bacon.get_sync_committee_async(sync_period * 256))["data"] await self.db.sync_committee_stats.replace_one({"period": period}, {"period" : period, "start_epoch": sync_period * 256, diff --git a/rocketwatch/plugins/node_task/node_task.py b/rocketwatch/plugins/node_task/node_task.py index f5a821cd..0cc120b6 100644 --- a/rocketwatch/plugins/node_task/node_task.py +++ b/rocketwatch/plugins/node_task/node_task.py @@ -279,7 +279,7 @@ async def add_static_beacon_data_to_minipools(self): for pubkey_batch in as_chunks(public_keys, self.batch_size): data = {} # get beacon data for public keys - beacon_data = (await bacon.get_validators("head", ids=pubkey_batch))["data"] + beacon_data = (await bacon.get_validators_async("head", ids=pubkey_batch))["data"] # update data dict with results for d in beacon_data: data[d["validator"]["pubkey"]] = int(d["index"]) @@ -306,7 +306,7 @@ async def update_dynamic_minipool_beacon_metadata(self): for index_batch in as_chunks(validator_indexes, self.batch_size): data = {} # get beacon data for public keys - beacon_data = (await bacon.get_validators("head", ids=index_batch))["data"] + beacon_data = (await bacon.get_validators_async("head", ids=index_batch))["data"] # update data dict with results for d in beacon_data: data[int(d["index"])] = { diff --git a/rocketwatch/plugins/proposals/proposals.py b/rocketwatch/plugins/proposals/proposals.py index 9870754d..353ebe83 100644 --- a/rocketwatch/plugins/proposals/proposals.py +++ b/rocketwatch/plugins/proposals/proposals.py @@ -156,7 +156,7 @@ async def fetch_proposals(self): else: last_checked_slot = 4700012 # last slot before merge - latest_slot = int((await bacon.get_header("finalized"))["data"]["header"]["message"]["slot"]) + latest_slot = int((await bacon.get_block_header_async("finalized"))["data"]["header"]["message"]["slot"]) for slots in as_chunks(range(last_checked_slot + 1, latest_slot + 1), self.batch_size): log.info(f"Fetching proposals for slots {slots[0]} to {slots[-1]}") await asyncio.gather(*[self.fetch_proposal(s) for s in slots]) @@ -164,7 +164,7 @@ async def fetch_proposals(self): async def fetch_proposal(self, slot: int) -> None: try: - beacon_header = (await bacon.get_header(slot))["data"]["header"]["message"] + beacon_header = (await bacon.get_block_header_async(slot))["data"]["header"]["message"] except ClientResponseError as e: if e.status == 404: return None @@ -175,7 +175,7 @@ async def fetch_proposal(self, slot: int) -> None: if not (minipool := (await self.db.minipools.find_one({"validator": validator_index}))): return None - beacon_block = (await bacon.get_block(slot))["data"]["message"] + beacon_block = (await bacon.get_block_async(slot))["data"]["message"] proposal_data = parse_proposal(beacon_block) await self.db.proposals.update_one({"slot": slot}, {"$set": proposal_data}, upsert=True) diff --git a/rocketwatch/plugins/random/random.py b/rocketwatch/plugins/random/random.py index 61112392..d38ee75b 100644 --- a/rocketwatch/plugins/random/random.py +++ b/rocketwatch/plugins/random/random.py @@ -121,7 +121,7 @@ async def dev_time(self, ctx: Context): e.add_field(name="Coordinated Universal Time", value=f"{dev_time.strftime(time_format)}\n" f"`{binary_day} (0x{uint_day:04x})`") - b = solidity.slot_to_beacon_day_epoch_slot(int((await bacon.get_header("head"))["data"]["header"]["message"]["slot"])) + b = solidity.slot_to_beacon_day_epoch_slot(int((await bacon.get_block_header_async("head"))["data"]["header"]["message"]["slot"])) e.add_field(name="Beacon Time", value=f"Day {b[0]}, {b[1]}:{b[2]}") dev_time = datetime.now(tz=pytz.timezone("Australia/Lindeman")) diff --git a/rocketwatch/utils/shared_w3.py b/rocketwatch/utils/shared_w3.py index 51450e0a..591a64e0 100644 --- a/rocketwatch/utils/shared_w3.py +++ b/rocketwatch/utils/shared_w3.py @@ -26,30 +26,30 @@ class SuperBacon(Bacon): def __init__(self, base_url: str) -> None: - self.base_url = base_url + super().__init__(base_url) timeout = aiohttp.ClientTimeout(sock_connect=3.05, total=20) - self.session = aiohttp.ClientSession(raise_for_status=True, timeout=timeout) + self.async_session = aiohttp.ClientSession(raise_for_status=True, timeout=timeout) @retry_async(tries=3, exceptions=HTTPError, delay=0.5) - async def _make_get_request(self, url: str): - async with self.session.get(url) as response: + async def _make_get_request_async(self, url: str): + async with self.async_session.get(url) as response: return await response.json() - async def get_header(self, block_id: BlockIdentifier): + async def get_block_header_async(self, block_id: BlockIdentifier): url = f"{self.base_url}/eth/v1/beacon/headers/{block_id}" - return await self._make_get_request(url) + return await self._make_get_request_async(url) - async def get_block(self, block_id: BlockIdentifier): + async def get_block_async(self, block_id: BlockIdentifier): url = f"{self.base_url}/eth/v2/beacon/blocks/{block_id}" - return await self._make_get_request(url) + return await self._make_get_request_async(url) - async def get_validators(self, state_id, ids: list[int]): + async def get_validators_async(self, state_id, ids: list[int]): id_str = ','.join([str(i) for i in ids]) url = f"{self.base_url}/eth/v1/beacon/states/{state_id}/validators?id={id_str}" - return await self._make_get_request(url) + return await self._make_get_request_async(url) - async def get_sync_committee(self, epoch): + async def get_sync_committee_async(self, epoch): url = f"{self.base_url}/eth/v1/beacon/states/head/sync_committees?epoch={epoch}" - return await self._make_get_request(url) + return await self._make_get_request_async(url) bacon = SuperBacon(cfg["consensus_layer.endpoints"][-1]) From 9674f1a8633882dbfac331a5c1c7e6fc9cd62ebf Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 28 Feb 2026 09:58:24 +0000 Subject: [PATCH 107/279] consolidate DB collections --- rocketwatch/plugins/apr/apr.py | 4 +- .../plugins/beacon_events/beacon_events.py | 4 +- .../plugins/commissions/commissions.py | 2 +- .../db_upkeep_task.py} | 109 +++++------ rocketwatch/plugins/debug/debug.py | 18 +- .../fee_distribution/fee_distribution.py | 2 +- .../minipool_delegates/minipool_delegates.py | 69 +++++++ .../minipool_distribution.py | 2 +- .../minipool_states/minipool_states.py | 4 +- .../plugins/minipool_task/minipool_task.py | 176 ------------------ .../minipools_upkeep_task.py | 138 -------------- rocketwatch/plugins/proposals/proposals.py | 36 ++-- rocketwatch/plugins/queue/queue.py | 51 ----- rocketwatch/plugins/random/random.py | 4 +- rocketwatch/plugins/rpl/rpl.py | 6 +- rocketwatch/plugins/tvl/tvl.py | 12 +- 16 files changed, 156 insertions(+), 481 deletions(-) rename rocketwatch/plugins/{node_task/node_task.py => db_upkeep_task/db_upkeep_task.py} (85%) create mode 100644 rocketwatch/plugins/minipool_delegates/minipool_delegates.py delete mode 100644 rocketwatch/plugins/minipool_task/minipool_task.py delete mode 100644 rocketwatch/plugins/minipools_upkeep_task/minipools_upkeep_task.py diff --git a/rocketwatch/plugins/apr/apr.py b/rocketwatch/plugins/apr/apr.py index 2d69511d..6bb81362 100644 --- a/rocketwatch/plugins/apr/apr.py +++ b/rocketwatch/plugins/apr/apr.py @@ -99,7 +99,7 @@ async def reth_apr(self, ctx: Context): return await ctx.send(embed=e) # get average meta.NodeFee from db, weighted by meta.NodeOperatorShare - tmp = await (await self.db.minipools_new.aggregate([ + tmp = await (await self.db.minipools.aggregate([ { '$match': { 'beacon.status' : 'active_ongoing', @@ -270,7 +270,7 @@ async def node_apr(self, ctx: Context): return await ctx.send(embed=e) # get average meta.NodeFee from db, weighted by meta.NodeOperatorShare - tmp = await (await self.db.minipools_new.aggregate([ + tmp = await (await self.db.minipools.aggregate([ { '$match': { 'beacon.status' : 'active_ongoing', diff --git a/rocketwatch/plugins/beacon_events/beacon_events.py b/rocketwatch/plugins/beacon_events/beacon_events.py index 2e54279c..5bffd126 100644 --- a/rocketwatch/plugins/beacon_events/beacon_events.py +++ b/rocketwatch/plugins/beacon_events/beacon_events.py @@ -89,7 +89,7 @@ def _get_slashings(self, beacon_block: dict) -> list[Event]: events = [] for slash in slashings: - minipool = self.db.minipools.find_one({"validator": int(slash["minipool"])}) + minipool = self.db.minipools.find_one({"validator_index": int(slash["minipool"])}) if not minipool: log.info(f"Skipping slashing of unknown validator {slash['minipool']}") continue @@ -124,7 +124,7 @@ def _get_proposal(self, beacon_block: dict) -> Optional[Event]: return None validator_index = int(beacon_block["proposer_index"]) - if not (minipool := self.db.minipools.find_one({"validator": validator_index})): + if not (minipool := self.db.minipools.find_one({"validator_index": validator_index})): # not proposed by a minipool return None diff --git a/rocketwatch/plugins/commissions/commissions.py b/rocketwatch/plugins/commissions/commissions.py index fca6012a..f7df5df4 100644 --- a/rocketwatch/plugins/commissions/commissions.py +++ b/rocketwatch/plugins/commissions/commissions.py @@ -34,7 +34,7 @@ async def commission_history(self, ctx: Context): e = Embed(title='Commission History') - minipools = await self.db.minipools.find().sort("validator", 1).to_list(None) + minipools = await self.db.minipools.find().sort("validator_index", 1).to_list(None) # create dot chart of minipools # x-axis: validator # y-axis: node_fee diff --git a/rocketwatch/plugins/node_task/node_task.py b/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py similarity index 85% rename from rocketwatch/plugins/node_task/node_task.py rename to rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py index 0cc120b6..67c7325f 100644 --- a/rocketwatch/plugins/node_task/node_task.py +++ b/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py @@ -1,12 +1,13 @@ import logging import time +import asyncio import pymongo from multicall import Call from cronitor import Monitor from pymongo import AsyncMongoClient, UpdateOne, UpdateMany -from discord.ext import tasks, commands +from discord.ext import commands from discord.utils import as_chunks from rocketwatch import RocketWatch @@ -48,51 +49,47 @@ def is_true(_, b): return b is True -class NodeTask(commands.Cog): +class DBUpkeepTask(commands.Cog): def __init__(self, bot: RocketWatch): self.bot = bot self.db = AsyncMongoClient(cfg["mongodb.uri"]).rocketwatch self.monitor = Monitor("node-task", api_key=cfg["other.secrets.cronitor"]) self.batch_size = 50 - self.loop.start() - - async def cog_unload(self): - self.loop.cancel() + self.bot.loop.create_task(self.loop()) - @tasks.loop(minutes=60) async def loop(self): - p_id = time.time() - self.monitor.ping(state="run", series=p_id) - try: - log.debug("starting node task") - await self.check_indexes() - await self.add_untracked_minipools() - await self.add_static_data_to_minipools() - await self.update_dynamic_minipool_metadata() - await self.add_static_deposit_data_to_minipools() - await self.add_static_beacon_data_to_minipools() - await self.update_dynamic_minipool_beacon_metadata() - await self.add_untracked_node_operators() - await self.add_static_data_to_node_operators() - await self.update_dynamic_node_operator_metadata() - log.debug("node task finished") - self.monitor.ping(state="complete", series=p_id) - except Exception as err: - await self.bot.report_error(err) - self.monitor.ping(state="fail", series=p_id) - - @loop.before_loop - async def on_ready(self): await self.bot.wait_until_ready() + await self.check_indexes() + while not self.bot.is_closed(): + p_id = time.time() + self.monitor.ping(state="run", series=p_id) + try: + log.debug("starting db upkeep task") + await self.add_untracked_minipools() + await self.add_static_data_to_minipools() + await self.update_dynamic_minipool_metadata() + await self.add_static_deposit_data_to_minipools() + await self.add_static_beacon_data_to_minipools() + await self.update_dynamic_minipool_beacon_metadata() + await self.add_untracked_node_operators() + await self.add_static_data_to_node_operators() + await self.update_dynamic_node_operator_metadata() + log.debug("finished db upkeep task") + self.monitor.ping(state="complete", series=p_id) + except Exception as err: + await self.bot.report_error(err) + self.monitor.ping(state="fail", series=p_id) + finally: + await asyncio.sleep(600) @timerun_async async def add_untracked_minipools(self): # rocketMinipoolManager.getMinipoolAt(i) returns the address of the minipool at index i mm = rp.get_contract_by_name("rocketMinipoolManager") latest_rp = rp.call("rocketMinipoolManager.getMinipoolCount") - 1 - # get latest _id in minipools_new collection + # get latest _id in minipools collection latest_db = 0 - if res := await self.db.minipools_new.find_one(sort=[("_id", pymongo.DESCENDING)]): + if res := await self.db.minipools.find_one(sort=[("_id", pymongo.DESCENDING)]): latest_db = res["_id"] # return early if we're up to date if latest_db >= latest_rp: @@ -107,7 +104,7 @@ async def add_untracked_minipools(self): for i in index_batch ]) log.debug(f"Inserting {len(data)} new minipools into db") - await self.db.minipools_new.insert_many([ + await self.db.minipools.insert_many([ {"_id": i, "address": a} for i, a in data.items() ]) @@ -123,7 +120,7 @@ async def add_static_data_to_minipools(self): lambda a: (mm.address, [rp.seth_sig(mm.abi, "getMinipoolPubkey"), a], [((a, "pubkey"), safe_to_hex)]), ] # get all minipool addresses from db that do not have a node operator assigned - minipool_addresses = await self.db.minipools_new.distinct("address", {"node_operator": {"$exists": False}}) + minipool_addresses = await self.db.minipools.distinct("address", {"node_operator": {"$exists": False}}) # get node operator addresses from rp # return early if no minipools need to be updated if not minipool_addresses: @@ -149,7 +146,7 @@ async def add_static_data_to_minipools(self): {"$set": d}, ) for a, d in data.items() ] - await self.db.minipools_new.bulk_write(bulk, ordered=False) + await self.db.minipools.bulk_write(bulk, ordered=False) log.debug("Minipools updated with static data") @timerun_async @@ -169,7 +166,7 @@ async def update_dynamic_minipool_metadata(self): lambda a: (mc.address, [rp.seth_sig(mc.abi, "getEthBalance"), a], [((a, "execution_balance"), safe_to_float)]) ] # get all minipool addresses from db - minipool_addresses = await self.db.minipools_new.distinct("address") + minipool_addresses = await self.db.minipools.distinct("address", {"finalized": {"$ne": True}}) for minipool_batch in as_chunks(minipool_addresses, self.batch_size // len(lambs)): res = await rp.multicall2( [Call(*lamb(a)) for a in minipool_batch for lamb in lambs], @@ -189,7 +186,7 @@ async def update_dynamic_minipool_metadata(self): {"$set": d} ) for a, d in data.items() ] - await self.db.minipools_new.bulk_write(bulk, ordered=False) + await self.db.minipools.bulk_write(bulk, ordered=False) log.debug("Minipools updated with metadata") @@ -199,7 +196,7 @@ async def add_static_deposit_data_to_minipools(self): # - do not have a deposit_amount # - are in the initialised state # sort by status time - minipools = await self.db.minipools_new.find( + minipools = await self.db.minipools.find( {"deposit_amount": {"$exists": False}, "status": "initialised"}, {"address": 1, "_id": 0, "status_time": 1} ).sort("status_time", pymongo.ASCENDING).to_list() @@ -262,14 +259,14 @@ async def add_static_deposit_data_to_minipools(self): {"$set": d}, ) for a, d in data.items() ] - await self.db.minipools_new.bulk_write(bulk, ordered=False) + await self.db.minipools.bulk_write(bulk, ordered=False) log.debug("Minipools updated with static deposit data") @timerun async def add_static_beacon_data_to_minipools(self): # get all public keys from db where no validator_index is set - public_keys = await self.db.minipools_new.distinct("pubkey", {"validator_index": {"$exists": False}}) + public_keys = await self.db.minipools.distinct("pubkey", {"validator_index": {"$exists": False}}) # return early if no minipools need to be updated if not public_keys: log.debug("No minipools need to be updated with static beacon data") @@ -292,7 +289,7 @@ async def add_static_beacon_data_to_minipools(self): {"$set": {"validator_index": d}} ) for a, d in data.items() ] - await self.db.minipools_new.bulk_write(bulk, ordered=False) + await self.db.minipools.bulk_write(bulk, ordered=False) log.debug("Minipools updated with static beacon data") @@ -300,7 +297,7 @@ async def add_static_beacon_data_to_minipools(self): async def update_dynamic_minipool_beacon_metadata(self): # basically same ordeal as above, but we use the validator index to get the data to improve performance # get all validator indexes from db - validator_indexes = await self.db.minipools_new.distinct("validator_index") + validator_indexes = await self.db.minipools.distinct("validator_index", {"beacon.status": {"$ne": "withdrawal_done"}}) # remove None values validator_indexes = [i for i in validator_indexes if i is not None] for index_batch in as_chunks(validator_indexes, self.batch_size): @@ -333,20 +330,16 @@ async def update_dynamic_minipool_beacon_metadata(self): {"$set": d} ) for a, d in data.items() ] - await self.db.minipools_new.bulk_write(bulk, ordered=False) + await self.db.minipools.bulk_write(bulk, ordered=False) log.debug("Minipools updated with dynamic beacon data") async def check_indexes(self): log.debug("checking indexes") - await self.db.minipools_new.create_index("address") - await self.db.minipools_new.create_index("pubkey") - await self.db.minipools_new.create_index("validator_index") - await self.db.node_operators_new.create_index("address") - # proposal index creation that is for some reason here - await self.db.proposals.create_index("validator") - await self.db.proposals.create_index("validator") - await self.db.proposals.create_index("slot", unique=True) + await self.db.minipools.create_index("address") + await self.db.minipools.create_index("pubkey") + await self.db.minipools.create_index("validator_index") + await self.db.node_operators.create_index("address") log.debug("indexes checked") @timerun_async @@ -354,9 +347,9 @@ async def add_untracked_node_operators(self): # rocketNodeManager.getNodeCount(i) returns the address of the node at index i nm = rp.get_contract_by_name("rocketNodeManager") latest_rp = rp.call("rocketNodeManager.getNodeCount") - 1 - # get latest _id in node_operators_new collection + # get latest _id in node_operators collection latest_db = 0 - if res := await self.db.node_operators_new.find_one(sort=[("_id", pymongo.DESCENDING)]): + if res := await self.db.node_operators.find_one(sort=[("_id", pymongo.DESCENDING)]): latest_db = res["_id"] data = {} # return early if we're up to date @@ -370,7 +363,7 @@ async def add_untracked_node_operators(self): for i in index_batch ]) log.debug(f"Inserting {len(data)} new nodes into db") - await self.db.node_operators_new.insert_many([ + await self.db.node_operators.insert_many([ {"_id": i, "address": a} for i, a in data.items() ]) @@ -383,7 +376,7 @@ async def add_static_data_to_node_operators(self): lambda a: (ndf.address, [rp.seth_sig(ndf.abi, "getProxyAddress"), a], [((a, "fee_distributor_address"), None)]), ] # get all minipool addresses from db that do not have a node operator assigned - node_addresses = await self.db.node_operators_new.distinct("address", {"fee_distributor_address": {"$exists": False}}) + node_addresses = await self.db.node_operators.distinct("address", {"fee_distributor_address": {"$exists": False}}) # get node operator addresses from rp # return early if no minipools need to be updated if not node_addresses: @@ -409,7 +402,7 @@ async def add_static_data_to_node_operators(self): {"$set": d}, ) for a, d in data.items() ] - await self.db.node_operators_new.bulk_write(bulk, ordered=False) + await self.db.node_operators.bulk_write(bulk, ordered=False) log.debug("Node operators updated with static data") @@ -451,7 +444,7 @@ async def update_dynamic_node_operator_metadata(self): [((n["address"], "deposit_credit"), safe_to_float)]) ] # get all node operators from db, but we only care about the address and the fee_distributor_address - nodes = await self.db.node_operators_new.find({}, {"address": 1, "fee_distributor_address": 1}).to_list() + nodes = await self.db.node_operators.find({}, {"address": 1, "fee_distributor_address": 1}).to_list() for node_batch in as_chunks(nodes, self.batch_size // len(lambs)): data = {} res = await rp.multicall2( @@ -471,10 +464,10 @@ async def update_dynamic_node_operator_metadata(self): {"$set": d} ) for a, d in data.items() ] - await self.db.node_operators_new.bulk_write(bulk, ordered=False) + await self.db.node_operators.bulk_write(bulk, ordered=False) log.debug("Node operators updated with metadata") async def setup(self): - await self.add_cog(NodeTask(self)) + await self.add_cog(DBUpkeepTask(self)) diff --git a/rocketwatch/plugins/debug/debug.py b/rocketwatch/plugins/debug/debug.py index 35e2c028..8954e6bd 100644 --- a/rocketwatch/plugins/debug/debug.py +++ b/rocketwatch/plugins/debug/debug.py @@ -161,7 +161,7 @@ async def debug_transaction(self, interaction: Interaction, tnx_hash: str): @is_owner() async def purge_minipools(self, interaction: Interaction, confirm: bool = False): """ - Purge minipool collection, so it can be resynced from scratch in the next update. + Purge minipools collection, so it can be resynced from scratch in the next update. """ await interaction.response.defer(ephemeral=True) if not confirm: @@ -170,26 +170,12 @@ async def purge_minipools(self, interaction: Interaction, confirm: bool = False) await self.db.minipools.drop() await interaction.followup.send(content="Done") - @command() - @guilds(cfg["discord.owner.server_id"]) - @is_owner() - async def purge_minipools_new(self, interaction: Interaction, confirm: bool = False): - """ - Purge minipools_new collection, so it can be resynced from scratch in the next update. - """ - await interaction.response.defer(ephemeral=True) - if not confirm: - await interaction.followup.send("Not running. Set `confirm` to `true` to run.") - return - await self.db.minipools_new.drop() - await interaction.followup.send(content="Done") - @command() @guilds(cfg["discord.owner.server_id"]) @is_owner() async def sync_commands(self, interaction: Interaction): """ - Full sync of the commands tree + Full sync of the command tree """ await interaction.response.defer(ephemeral=True) await self.bot.sync_commands() diff --git a/rocketwatch/plugins/fee_distribution/fee_distribution.py b/rocketwatch/plugins/fee_distribution/fee_distribution.py index 976573d1..5ba8845f 100644 --- a/rocketwatch/plugins/fee_distribution/fee_distribution.py +++ b/rocketwatch/plugins/fee_distribution/fee_distribution.py @@ -37,7 +37,7 @@ async def fee_distribution(self, interaction: Interaction, mode: Literal["tree", fig, axs = plt.subplots(1, 2) for i, bond in enumerate([8, 16]): - result = await self.db.minipools_new.aggregate([ + result = await self.db.minipools.aggregate([ { "$match": { "node_deposit_balance": bond, diff --git a/rocketwatch/plugins/minipool_delegates/minipool_delegates.py b/rocketwatch/plugins/minipool_delegates/minipool_delegates.py new file mode 100644 index 00000000..d78193a1 --- /dev/null +++ b/rocketwatch/plugins/minipool_delegates/minipool_delegates.py @@ -0,0 +1,69 @@ +import logging + +from pymongo import AsyncMongoClient + +from discord import Interaction +from discord.ext import commands +from discord.app_commands import command + +from rocketwatch import RocketWatch +from utils.embeds import Embed, el_explorer_url +from utils.readable import s_hex +from utils.shared_w3 import w3 +from utils.cfg import cfg +from utils.rocketpool import rp + +log = logging.getLogger("minipool_delegates") +log.setLevel(cfg["log_level"]) + + +class MinipoolDelegates(commands.Cog): + def __init__(self, bot: RocketWatch): + self.bot = bot + self.db = AsyncMongoClient(cfg["mongodb.uri"]).rocketwatch + + @command() + async def minipool_delegates(self, interaction: Interaction): + """Show stats for minipool delegate adoption""" + await interaction.response.defer() + # only consider active minipools + minipool_filter = {"beacon.status": {"$in": ["pending_initialized", "pending_queued", "active_ongoing"]}} + # we want to show the distribution of minipools that are using each delegate + distribution_stats = await (await self.db.minipools.aggregate([ + {"$match": minipool_filter}, + {"$group": {"_id": "$effective_delegate", "count": {"$sum": 1}}}, + {"$sort": {"count": -1}}, + ])).to_list() + # and the percentage of minipools that are using the useLatestDelegate flag + use_latest_delegate_stats = await (await self.db.minipools.aggregate([ + {"$match": minipool_filter}, + {"$group": {"_id": "$use_latest_delegate", "count": {"$sum": 1}}}, + {"$sort": {"count": -1}}, + ])).to_list() + e = Embed() + e.title = "Minipool Delegate Stats" + desc = "**Effective Delegate Distribution:**\n" + c_sum = sum(d['count'] for d in distribution_stats) + s = "\u00A0" * 4 + # latest delegate acording to rp + rp.uncached_get_address_by_name("rocketMinipoolDelegate") + for d in distribution_stats: + # I HATE THE CHECKSUMMED ADDRESS REQUIREMENTS I HATE THEM SO MUCH + a = w3.to_checksum_address(d['_id']) + name = s_hex(a) + if a == rp.get_address_by_name("rocketMinipoolDelegate"): + name += " (Latest)" + desc += f"{s}{el_explorer_url(a, name)}: {d['count']:,} ({d['count'] / c_sum * 100:.2f}%)\n" + desc += "\n" + desc += "**Use Latest Delegate:**\n" + c_sum = sum(d['count'] for d in use_latest_delegate_stats) + for d in use_latest_delegate_stats: + # true = yes, false = no + d['_id'] = "Yes" if d['_id'] else "No" + desc += f"{s}**{d['_id']}**: {d['count']:,} ({d['count'] / c_sum * 100:.2f}%)\n" + e.description = desc + await interaction.followup.send(embed=e) + + +async def setup(self): + await self.add_cog(MinipoolDelegates(self)) diff --git a/rocketwatch/plugins/minipool_distribution/minipool_distribution.py b/rocketwatch/plugins/minipool_distribution/minipool_distribution.py index ce9b7cfd..4602e1de 100644 --- a/rocketwatch/plugins/minipool_distribution/minipool_distribution.py +++ b/rocketwatch/plugins/minipool_distribution/minipool_distribution.py @@ -71,7 +71,7 @@ def get_minipool_counts_per_node(self): } } ] - return [x["count"] for x in self.db.minipools_new.aggregate(pipeline)] + return [x["count"] for x in self.db.minipools.aggregate(pipeline)] @hybrid_command() @describe(raw="Show the raw Distribution Data") diff --git a/rocketwatch/plugins/minipool_states/minipool_states.py b/rocketwatch/plugins/minipool_states/minipool_states.py index 055fa243..f625d0dc 100644 --- a/rocketwatch/plugins/minipool_states/minipool_states.py +++ b/rocketwatch/plugins/minipool_states/minipool_states.py @@ -25,7 +25,7 @@ async def minipool_states(self, ctx: Context): """Show minipool counts by beacon chain and contract status""" await ctx.defer(ephemeral=is_hidden_weak(ctx)) # fetch from db - res = await self.db.minipools_new.find({ + res = await self.db.minipools.find({ "beacon.status": {"$exists": True} }).to_list(None) data = { @@ -58,7 +58,7 @@ async def minipool_states(self, ctx: Context): exiting_valis.append(minipool) case "withdrawal_done": status_2 = "slashed" if minipool["beacon"]["slashed"] else "unslashed" - if minipool["execution_balance"] > 0: + if not minipool["finalized"]: data["withdrawn"][status_2] = data["withdrawn"].get(status_2, 0) + 1 withdrawn_valis.append(minipool) else: diff --git a/rocketwatch/plugins/minipool_task/minipool_task.py b/rocketwatch/plugins/minipool_task/minipool_task.py deleted file mode 100644 index 882dc82c..00000000 --- a/rocketwatch/plugins/minipool_task/minipool_task.py +++ /dev/null @@ -1,176 +0,0 @@ -import asyncio -import copy -import logging -import time -from concurrent.futures import ThreadPoolExecutor - -from cronitor import Monitor -from pymongo import MongoClient -from requests.exceptions import HTTPError -from eth_typing import ChecksumAddress - -from discord.ext import commands, tasks -from discord.utils import as_chunks - -from rocketwatch import RocketWatch -from utils.cfg import cfg -from utils.rocketpool import rp -from utils.shared_w3 import w3, bacon -from utils.solidity import to_float -from utils.time_debug import timerun - -log = logging.getLogger("minipool_task") -log.setLevel(cfg["log_level"]) - - -class MinipoolTask(commands.Cog): - def __init__(self, bot: RocketWatch): - self.bot = bot - self.db = MongoClient(cfg["mongodb.uri"]).rocketwatch - self.minipool_manager = rp.get_contract_by_name("rocketMinipoolManager") - self.monitor = Monitor('gather-minipools', api_key=cfg["other.secrets.cronitor"]) - self.batch_size = 1000 - self.loop.start() - - async def cog_unload(self): - self.loop.cancel() - - @tasks.loop(minutes=15) - async def loop(self): - p_id = time.time() - self.monitor.ping(state='run', series=p_id) - executor = ThreadPoolExecutor() - loop = asyncio.get_event_loop() - futures = [loop.run_in_executor(executor, self.task)] - try: - await asyncio.gather(*futures) - self.monitor.ping(state="complete", series=p_id) - except Exception as err: - await self.bot.report_error(err) - self.monitor.ping(state="fail", series=p_id) - - @loop.before_loop - async def before_loop(self): - await self.bot.wait_until_ready() - - @timerun - def get_untracked_minipools(self) -> set[ChecksumAddress]: - minipool_count = rp.call("rocketMinipoolManager.getMinipoolCount") - minipool_addresses = [] - for index_batch in as_chunks(range(minipool_count), self.batch_size): - minipool_addresses += [ - w3.to_checksum_address(r.results[0]) for r in rp.multicall.aggregate( - self.minipool_manager.functions.getMinipoolAt(i) for i in index_batch).results - ] - # remove address that are already in the minipool collection - tracked_addresses = self.db.minipools.distinct("address") - return set(minipool_addresses) - set(tracked_addresses) - - @timerun - def get_public_keys(self, addresses): - # optimizing this doesn't seem to help much, so keep it simple for readability - # batch the same way as get_untracked_minipools - minipool_pubkeys = [] - for address_batch in as_chunks(addresses, self.batch_size): - minipool_pubkeys += [ - f"0x{r.results[0].hex()}" for r in rp.multicall.aggregate( - self.minipool_manager.functions.getMinipoolPubkey(a) for a in address_batch).results] - return minipool_pubkeys - - @timerun - def get_node_operator(self, addresses): - base_contract = rp.assemble_contract("rocketMinipool", w3.to_checksum_address(addresses[0])) - func = base_contract.functions.getNodeAddress() - minipool_contracts = [] - for a in addresses: - tmp = copy.deepcopy(func) - tmp.address = w3.to_checksum_address(a) - minipool_contracts.append(tmp) - node_addresses = rp.multicall.aggregate(minipool_contracts) - node_addresses = [w3.to_checksum_address(r.results[0]) for r in node_addresses.results] - return node_addresses - - @timerun - def get_node_fee(self, addresses): - base_contract = rp.assemble_contract("rocketMinipool", w3.to_checksum_address(addresses[0])) - func = base_contract.functions.getNodeFee() - minipool_contracts = [] - for a in addresses: - tmp = copy.deepcopy(func) - tmp.address = w3.to_checksum_address(a) - minipool_contracts.append(tmp) - node_fees = rp.multicall.aggregate(minipool_contracts) - node_fees = [to_float(r.results[0]) for r in node_fees.results] - return node_fees - - @timerun - def get_validator_data(self, pubkeys): - result = {} - pubkeys_divisor = max(len(pubkeys) // 10, 1) # Make sure divisor is at least 1 to avoid division by zero - for i, pubkey in enumerate(pubkeys): - if i % pubkeys_divisor == 0: - log.debug(f"getting validator data for {i}/{len(pubkeys)}") - try: - data = bacon.get_validator(validator_id=pubkey, state_id="finalized") - except HTTPError: - continue - data = data["data"] - validator_id = int(data["index"]) - activation_epoch = int(data["validator"]["activation_epoch"]) - # The activation epoch is set to the possible maximum int if none has been determined yet. - # I don't check for an exact value because it turns out that nimbus uses uint64 while Teku uses int64. - # >=2**23 will be good enough for the next 100 years, after which neither this bot nor its creator will be alive. - if activation_epoch >= 2 ** 23: - continue - result[pubkey] = {"validator_id": validator_id, "activation_epoch": activation_epoch} - return result - - def check_indexes(self): - log.debug("checking indexes") - self.db.proposals.create_index("validator") - # self.db.minipools.create_index("validator", unique=True) - # remove the old unique validator index if it exists, create a new one without unique called validator_2 - if "validator_1" in self.db.minipools.index_information(): - self.db.minipools.drop_index("validator_1") - self.db.minipools.create_index("validator", name="validator_2") - self.db.proposals.create_index("slot", unique=True) - self.db.minipools.create_index("address") - log.debug("indexes checked") - - def task(self): - self.check_indexes() - log.debug("Gathering all untracked minipools...") - all_minipool_addresses = self.get_untracked_minipools() - if not all_minipool_addresses: - log.debug("No untracked minipools found.") - return - - log.debug(f"Found {len(all_minipool_addresses)} untracked minipools.") - for minipool_addresses in as_chunks(all_minipool_addresses, self.batch_size): - log.debug("Gathering minipool public keys...") - minipool_pubkeys = self.get_public_keys(minipool_addresses) - log.debug("Gathering minipool node operators...") - node_addresses = self.get_node_operator(minipool_addresses) - log.debug("Gathering minipool commission rates...") - node_fees = self.get_node_fee(minipool_addresses) - log.debug("Gathering minipool validator indexes...") - validator_data = self.get_validator_data(minipool_pubkeys) - data = [{ - "address" : a, - "pubkey" : p, - "node_operator" : n, - "node_fee" : f, - "validator" : validator_data[p]["validator_id"], - "activation_epoch": validator_data[p]["activation_epoch"] - } for a, p, n, f in zip(minipool_addresses, minipool_pubkeys, node_addresses, node_fees) if p in validator_data] - if data: - log.debug(f"Inserting {len(data)} minipools into the database...") - self.db.minipools.insert_many(data) - else: - log.debug("No new minipools with data found.") - - log.debug("Finished!") - - -async def setup(bot): - await bot.add_cog(MinipoolTask(bot)) diff --git a/rocketwatch/plugins/minipools_upkeep_task/minipools_upkeep_task.py b/rocketwatch/plugins/minipools_upkeep_task/minipools_upkeep_task.py deleted file mode 100644 index 5e29a938..00000000 --- a/rocketwatch/plugins/minipools_upkeep_task/minipools_upkeep_task.py +++ /dev/null @@ -1,138 +0,0 @@ -import logging - -import pymongo - -from pymongo import AsyncMongoClient -from multicall import Call - -from discord import Interaction -from discord.ext import commands, tasks -from discord.app_commands import command -from discord.utils import as_chunks - -from rocketwatch import RocketWatch -from utils import solidity -from utils.embeds import Embed, el_explorer_url -from utils.readable import s_hex -from utils.shared_w3 import w3 -from utils.visibility import is_hidden_weak -from utils.cfg import cfg -from utils.rocketpool import rp -from utils.time_debug import timerun_async - -log = logging.getLogger("minipools_upkeep_task") -log.setLevel(cfg["log_level"]) - - -def div_32(i: int): - return solidity.to_float(i) / 32 - -class MinipoolsUpkeepTask(commands.Cog): - def __init__(self, bot: RocketWatch): - self.bot = bot - self.db = AsyncMongoClient(cfg["mongodb.uri"]).rocketwatch - self.batch_size = 1000 - self.loop.start() - - async def cog_unload(self): - self.loop.cancel() - - @tasks.loop(minutes=15) - async def loop(self): - try: - await self.upkeep_minipools() - except Exception as err: - await self.bot.report_error(err) - - @loop.before_loop - async def on_ready(self): - await self.bot.wait_until_ready() - - @timerun_async - async def get_minipool_stats(self, minipools): - m_d = rp.get_contract_by_name("rocketMinipoolDelegate") - m = rp.assemble_contract("rocketMinipool", address=minipools[0]) - mc = rp.get_contract_by_name("multicall3") - lambs = [ - lambda x: (x, rp.seth_sig(m_d.abi, "getNodeFee"), [((x, "NodeFee"), solidity.to_float)]), - lambda x: (x, rp.seth_sig(m.abi, "getEffectiveDelegate"), [((x, "Delegate"), None)]), - lambda x: (x, rp.seth_sig(m.abi, "getPreviousDelegate"), [((x, "PreviousDelegate"), None)]), - lambda x: (x, rp.seth_sig(m.abi, "getUseLatestDelegate"), [((x, "UseLatestDelegate"), None)]), - lambda x: (x, rp.seth_sig(m.abi, "getNodeDepositBalance"), [((x, "NodeOperatorShare"), div_32)]), - # get balances of minipool as well - lambda x: (mc.address, [rp.seth_sig(mc.abi, "getEthBalance"), x], [((x, "EthBalance"), solidity.to_float)]) - ] - minipool_stats = {} - for minipool_batch in as_chunks(minipools, self.batch_size // len(lambs)): - calls = [Call(*lamb(a)) for a in minipool_batch for lamb in lambs] - res = await rp.multicall2(calls) - # add data to mini pool stats dict (address => {func_name: value}) - # strip get from function name - for (address, variable_name), value in res.items(): - if address not in minipool_stats: - minipool_stats[address] = {} - minipool_stats[address][variable_name] = value - return minipool_stats - - async def upkeep_minipools(self): - logging.info("Updating minipool states") - a = await self.db.minipools.find().distinct("address") - b = await self.get_minipool_stats(a) - # update data in db using unordered bulk write - # note: this data is kept in the "meta" field of each minipool - bulk = [ - pymongo.UpdateOne( - {"address": address}, - {"$set": {"meta": stats}}, - upsert=True - ) for address, stats in b.items() - ] - - await self.db.minipools.bulk_write(bulk, ordered=False) - logging.info("Updated minipool states") - - @command() - async def minipool_delegates(self, interaction: Interaction): - """Show stats for minipool delegate adoption""" - await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) - # only consider active minipools - minipool_filter = {"beacon.status": {"$in": ["pending_initialized", "pending_queued", "active_ongoing"]}} - # we want to show the distribution of minipools that are using each delegate - distribution_stats = await (await self.db.minipools_new.aggregate([ - {"$match": minipool_filter}, - {"$group": {"_id": "$effective_delegate", "count": {"$sum": 1}}}, - {"$sort": {"count": -1}}, - ])).to_list() - # and the percentage of minipools that are using the useLatestDelegate flag - use_latest_delegate_stats = await (await self.db.minipools_new.aggregate([ - {"$match": minipool_filter}, - {"$group": {"_id": "$use_latest_delegate", "count": {"$sum": 1}}}, - {"$sort": {"count": -1}}, - ])).to_list() - e = Embed() - e.title = "Minipool Delegate Stats" - desc = "**Effective Delegate Distribution:**\n" - c_sum = sum(d['count'] for d in distribution_stats) - s = "\u00A0" * 4 - # latest delegate acording to rp - rp.uncached_get_address_by_name("rocketMinipoolDelegate") - for d in distribution_stats: - # I HATE THE CHECKSUMMED ADDRESS REQUIREMENTS I HATE THEM SO MUCH - a = w3.to_checksum_address(d['_id']) - name = s_hex(a) - if a == rp.get_address_by_name("rocketMinipoolDelegate"): - name += " (Latest)" - desc += f"{s}{el_explorer_url(a, name)}: {d['count']:,} ({d['count'] / c_sum * 100:.2f}%)\n" - desc += "\n" - desc += "**Use Latest Delegate:**\n" - c_sum = sum(d['count'] for d in use_latest_delegate_stats) - for d in use_latest_delegate_stats: - # true = yes, false = no - d['_id'] = "Yes" if d['_id'] else "No" - desc += f"{s}**{d['_id']}**: {d['count']:,} ({d['count'] / c_sum * 100:.2f}%)\n" - e.description = desc - await interaction.followup.send(embed=e) - - -async def setup(self): - await self.add_cog(MinipoolsUpkeepTask(self)) diff --git a/rocketwatch/plugins/proposals/proposals.py b/rocketwatch/plugins/proposals/proposals.py index 353ebe83..3a41255c 100644 --- a/rocketwatch/plugins/proposals/proposals.py +++ b/rocketwatch/plugins/proposals/proposals.py @@ -55,7 +55,7 @@ "Geth" : "#40BFBF", "Besu" : "#55AA7A", "Nethermind" : "#2688D9", - "Reth" : "#CF0512", + "Reth" : "#760910", "External" : "#808080", "Smart Node" : "#CC6E33", @@ -122,12 +122,15 @@ def __init__(self, bot: RocketWatch): async def loop(self): await self.bot.wait_until_ready() - await self._create_indices() + await self.check_indexes() while not self.bot.is_closed(): p_id = time.time() self.monitor.ping(state="run", series=p_id) try: - await self.work() + log.debug("starting proposal task") + await self.fetch_proposals() + await self.create_minipool_proposal_view() + log.debug("finished proposal task") self.monitor.ping(state="complete", series=p_id) except Exception as err: await self.bot.report_error(err) @@ -135,21 +138,15 @@ async def loop(self): finally: await asyncio.sleep(300) - async def _create_indices(self): + async def check_indexes(self): await self.bot.wait_until_ready() try: - await self.db.minipools_new.create_index([("validator_index", 1)]) + await self.db.proposals.create_index("validator") + await self.db.proposals.create_index("slot", unique=True) await self.db.proposals.create_index([("validator", 1), ("slot", -1)]) - log.info("Created indexes on minipools_new and proposals collections") except Exception as e: log.warning(f"Could not create indexes: {e}") - async def work(self): - log.debug("starting proposal task") - await self.fetch_proposals() - await self.create_minipool_proposal_view() - log.debug("finished proposal task") - async def fetch_proposals(self): if db_entry := (await self.db.last_checked_block.find_one({"_id": cog_id})): last_checked_slot = db_entry["slot"] @@ -172,7 +169,7 @@ async def fetch_proposal(self, slot: int) -> None: raise e validator_index = int(beacon_header["proposer_index"]) - if not (minipool := (await self.db.minipools.find_one({"validator": validator_index}))): + if not (minipool := (await self.db.minipools.find_one({"validator_index": validator_index}))): return None beacon_block = (await bacon.get_block_async(slot))["data"]["message"] @@ -226,7 +223,7 @@ async def create_minipool_proposal_view(self): } ] await self.db.minipool_proposals.drop() - await self.db.create_collection("minipool_proposals", viewOn="minipools_new", pipeline=pipeline) + await self.db.create_collection("minipool_proposals", viewOn="minipools", pipeline=pipeline) @timerun_async async def gather_attribute(self, attribute, remove_allnodes=False): @@ -282,12 +279,7 @@ async def version_chart(self, ctx: Context): """ await ctx.defer(ephemeral=is_hidden_weak(ctx)) e = Embed(title="Version Chart") - e.description = "The graph below shows proposal stats using a **5-day rolling window**, " \ - "and **does not represent operator adoption**.\n" \ - "Versions with a proposal in the **last 2 days** are emphasized.\n\n" \ - "The percentages in the top left legend show the percentage of proposals observed in the last 5 days using that version.\n" \ - "**If an old version is shown as 10%, it means that it was 10% of the proposals in the last 5 days.**\n" \ - "_No it does not mean that the minipools simply haven't proposed with the new version yet._\n" \ + e.description = "The graph below shows proposal stats using a **5-day rolling window**.\n" \ "This only looks at proposals, it does not care about what individual minipools do." # get proposals # limit to 6 months @@ -404,7 +396,7 @@ async def plot_axes_with_data(self, attr: str, ax1, ax2, remove_allnodes=False): minipools = sorted(minipools, key=lambda x: x[1]) # get total minipool count from rocketpool - unobserved_minipools = len(await self.db.minipools_new.find({"beacon.status": "active_ongoing", "status": "staking"}).distinct("_id")) - sum(d[1] for d in minipools) + unobserved_minipools = len(await self.db.minipools.find({"beacon.status": "active_ongoing", "status": "staking"}).distinct("_id")) - sum(d[1] for d in minipools) if "remove_from_total" in data: unobserved_minipools -= data["remove_from_total"]["validator_count"] minipools.insert(0, ("No proposals yet", unobserved_minipools)) @@ -420,7 +412,7 @@ async def plot_axes_with_data(self, attr: str, ax1, ax2, remove_allnodes=False): node_operators = sorted(node_operators, key=lambda x: x[1]) # get total node operator count from rp - unobserved_node_operators = len(await self.db.minipools_new.find({"beacon.status": "active_ongoing", "status": "staking"}).distinct("node_operator")) - sum(d[1] for d in node_operators) + unobserved_node_operators = len(await self.db.minipools.find({"beacon.status": "active_ongoing", "status": "staking"}).distinct("node_operator")) - sum(d[1] for d in node_operators) if "remove_from_total" in data: unobserved_node_operators -= data["remove_from_total"]["count"] node_operators.insert(0, ("No proposals yet", unobserved_node_operators)) diff --git a/rocketwatch/plugins/queue/queue.py b/rocketwatch/plugins/queue/queue.py index 1aa90323..ccb08153 100644 --- a/rocketwatch/plugins/queue/queue.py +++ b/rocketwatch/plugins/queue/queue.py @@ -202,57 +202,6 @@ async def queue(self, interaction: Interaction, lane: Literal["combined", "stand embed = await view.load() await interaction.followup.send(embed=embed, view=view) - @command() - async def clear_queue(self, interaction: Interaction): - """Show gas price for clearing the queue using the rocketDepositPoolQueue contract""" - await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) - - e = Embed(title="Gas Prices for Dequeuing Minipools") - e.set_author( - name="🔗 Forum: Clear minipool queue contract", - url="https://dao.rocketpool.net/t/clear-minipool-queue-contract/670" - ) - - queue_length = rp.call("rocketMinipoolQueue.getTotalLength") - dp_balance = solidity.to_float(rp.call("rocketDepositPool.getBalance")) - match_amount = solidity.to_float(rp.call("rocketDAOProtocolSettingsMinipool.getVariableDepositAmount")) - max_dequeues = min(int(dp_balance / match_amount), queue_length) - - if max_dequeues > 0: - max_assignments = rp.call("rocketDAOProtocolSettingsDeposit.getMaximumDepositAssignments") - min_assignments = rp.call("rocketDAOProtocolSettingsDeposit.getMaximumDepositSocialisedAssignments") - - # half queue clear - half_clear_count = int(max_dequeues / 2) - half_clear_input = max_assignments * math.ceil(half_clear_count / min_assignments) - gas = rp.estimate_gas_for_call("rocketDepositPoolQueue.clearQueueUpTo", half_clear_input) - e.add_field( - name=f"Half Clear ({half_clear_count} MPs)", - value=f"`clearQueueUpTo({half_clear_input})`\n `{gas:,}` gas" - ) - - # full queue clear - full_clear_size = max_dequeues - full_clear_input = max_assignments * math.ceil(full_clear_size / min_assignments) - gas = rp.estimate_gas_for_call("rocketDepositPoolQueue.clearQueueUpTo", full_clear_input) - e.add_field( - name=f"Full Clear ({full_clear_size} MPs)", - value=f"`clearQueueUpTo({full_clear_input})`\n `{gas:,}` gas" - ) - elif queue_length > 0: - e.description = "Not enough funds in deposit pool to dequeue any minipools." - else: - e.description = "Queue is empty." - - # link to contract - e.add_field( - name="Contract", - value=el_explorer_url(rp.get_address_by_name('rocketDepositPoolQueue'), "RocketDepositPoolQueue"), - inline=False - ) - - await interaction.followup.send(embed=e) - async def setup(bot): await bot.add_cog(Queue(bot)) diff --git a/rocketwatch/plugins/random/random.py b/rocketwatch/plugins/random/random.py index d38ee75b..182f4b7b 100644 --- a/rocketwatch/plugins/random/random.py +++ b/rocketwatch/plugins/random/random.py @@ -181,7 +181,7 @@ async def smoothie(self, ctx: Context): e = Embed(title="Smoothing Pool") smoothie_eth = solidity.to_float(w3.eth.get_balance(rp.get_address_by_name("rocketSmoothingPool"))) - data = await (await self.db.minipools_new.aggregate([ + data = await (await self.db.minipools.aggregate([ { '$match': { 'beacon.status': { @@ -200,7 +200,7 @@ async def smoothie(self, ctx: Context): } }, { '$lookup': { - 'from' : 'node_operators_new', + 'from' : 'node_operators', 'localField' : '_id', 'foreignField': 'address', 'as' : 'meta' diff --git a/rocketwatch/plugins/rpl/rpl.py b/rocketwatch/plugins/rpl/rpl.py index 0f026a3e..49a46ce8 100644 --- a/rocketwatch/plugins/rpl/rpl.py +++ b/rocketwatch/plugins/rpl/rpl.py @@ -37,7 +37,7 @@ async def rpl_apr(self, ctx: Context): e = Embed() reward_duration = rp.call("rocketRewardsPool.getClaimIntervalTime") - total_rpl_staked = await (await self.db.node_operators_new.aggregate([ + total_rpl_staked = await (await self.db.node_operators.aggregate([ { '$group': { '_id' : 'out', @@ -112,7 +112,7 @@ async def effective_rpl_staked(self, ctx: Context): total_rpl_staked = solidity.to_float(rp.call("rocketNodeStaking.getTotalStakedRPL")) e.add_field(name="Total RPL Staked:", value=f"{humanize.intcomma(total_rpl_staked, 2)} RPL", inline=False) # get effective RPL staked - effective_rpl_stake = await (await self.db.node_operators_new.aggregate([ + effective_rpl_stake = await (await self.db.node_operators.aggregate([ { '$group': { '_id' : 'out', @@ -143,7 +143,7 @@ async def withdrawable_rpl(self, e = Embed() img = BytesIO() - data = await (await self.db.node_operators_new.aggregate([ + data = await (await self.db.node_operators.aggregate([ { '$match': { 'staking_minipool_count': { diff --git a/rocketwatch/plugins/tvl/tvl.py b/rocketwatch/plugins/tvl/tvl.py index ae8596db..86d6fd34 100644 --- a/rocketwatch/plugins/tvl/tvl.py +++ b/rocketwatch/plugins/tvl/tvl.py @@ -125,7 +125,7 @@ async def tvl(self, interaction: Interaction, show_all: bool = False): # Minipools that are flagged as initialised have the following applied to them: # - They have 1 ETH staked on the beacon chain. # - They have not yet received 31 ETH from the Deposit Pool. - tmp = await (await self.db.minipools_new.aggregate([ + tmp = await (await self.db.minipools.aggregate([ { '$match': { 'status': 'initialised', @@ -148,7 +148,7 @@ async def tvl(self, interaction: Interaction, show_all: bool = False): # - They have deposited 1 ETH to the Beacon Chain. # - They have 31 ETH from the Deposit Pool in their contract waiting to be staked as well. # - They are currently in the scrubbing process (should be 12 hours) or have not yet initiated the second phase. - tmp = await (await self.db.minipools_new.aggregate([ + tmp = await (await self.db.minipools.aggregate([ { '$match': { 'status': 'prelaunch', @@ -177,7 +177,7 @@ async def tvl(self, interaction: Interaction, show_all: bool = False): # - They have 1 ETH locked on the Beacon Chain, not earning any rewards. # - The 31 ETH that was waiting in their address was moved back to the Deposit Pool (This can cause the Deposit Pool # to grow beyond its Cap, check the bellow comment for information about that). - tmp = await (await self.db.minipools_new.aggregate([ + tmp = await (await self.db.minipools.aggregate([ { '$match': { 'status': 'dissolved', @@ -203,7 +203,7 @@ async def tvl(self, interaction: Interaction, show_all: bool = False): "execution_balance"] # Staking Minipools: - minipools = await self.db.minipools_new.find({ + minipools = await self.db.minipools.find({ 'status': {"$nin": ["initialised", "prelaunch", "dissolved"]}, 'node_deposit_balance': {"$exists": True}, }).to_list(None) @@ -271,7 +271,7 @@ async def tvl(self, interaction: Interaction, show_all: bool = False): # Smoothing Pool Balance: This is ETH from Proposals by minipools that have joined the Smoothing Pool. smoothie_balance = solidity.to_float(w3.eth.getBalance(rp.get_address_by_name("rocketSmoothingPool"))) - tmp = await (await self.db.node_operators_new.aggregate([ + tmp = await (await self.db.node_operators.aggregate([ { '$match': { 'smoothing_pool_registration_state': True, @@ -364,7 +364,7 @@ async def tvl(self, interaction: Interaction, show_all: bool = False): rp.call("rocketVault.balanceOfToken", "rocketAuctionManager", rpl_address)) # create _value string for each branch. the _value is the sum of all _val or _val values in the children - tmp = await (await self.db.node_operators_new.aggregate([ + tmp = await (await self.db.node_operators.aggregate([ { '$match': { 'fee_distributor_eth_balance': { From 222a0fe07bc76538d16223217d998b7c481ba238 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 28 Feb 2026 10:38:38 +0000 Subject: [PATCH 108/279] switch config to single CL endpoint --- rocketwatch/main.cfg.sample | 11 ++++------- rocketwatch/utils/shared_w3.py | 2 +- 2 files changed, 5 insertions(+), 8 deletions(-) diff --git a/rocketwatch/main.cfg.sample b/rocketwatch/main.cfg.sample index 6028c787..8baf0ec1 100644 --- a/rocketwatch/main.cfg.sample +++ b/rocketwatch/main.cfg.sample @@ -14,18 +14,15 @@ discord: { execution_layer: { explorer: "https://etherscan.io" endpoint: { - current: "http://node1:8545" - mainnet: "http://node1:8545" - archive: "http://node1:8545" + current: "http://node:8545" + mainnet: "http://node:8545" + archive: "http://node:8545" } etherscan_secret: "" } consensus_layer: { explorer: "https://beaconcha.in" - endpoints: [ - "http://node1:5052", - "http://node2:5052" - ], + endpoint: "http://node:5052" beaconcha_secret: "" } mongodb: { diff --git a/rocketwatch/utils/shared_w3.py b/rocketwatch/utils/shared_w3.py index 591a64e0..dc300257 100644 --- a/rocketwatch/utils/shared_w3.py +++ b/rocketwatch/utils/shared_w3.py @@ -52,4 +52,4 @@ async def get_sync_committee_async(self, epoch): url = f"{self.base_url}/eth/v1/beacon/states/head/sync_committees?epoch={epoch}" return await self._make_get_request_async(url) -bacon = SuperBacon(cfg["consensus_layer.endpoints"][-1]) +bacon = SuperBacon(cfg["consensus_layer.endpoint"]) From 4132338226703d301e490dd9aa341d7d2aa23cdd Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 28 Feb 2026 12:00:39 +0000 Subject: [PATCH 109/279] user_distribute status --- .../plugins/db_upkeep_task/db_upkeep_task.py | 1 + .../minipool_states/minipool_states.py | 2 +- rocketwatch/plugins/proposals/proposals.py | 4 +- .../user_distribute/user_distribute.py | 99 +++++++++++++++++++ 4 files changed, 103 insertions(+), 3 deletions(-) create mode 100644 rocketwatch/plugins/user_distribute/user_distribute.py diff --git a/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py b/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py index 67c7325f..d432abe5 100644 --- a/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py +++ b/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py @@ -163,6 +163,7 @@ async def update_dynamic_minipool_metadata(self): lambda a: (a, rp.seth_sig(m.abi, "getNodeFee"), [((a, "node_fee"), safe_to_float)]), lambda a: (a, rp.seth_sig(m.abi, "getEffectiveDelegate"), [((a, "effective_delegate"), None)]), lambda a: (a, rp.seth_sig(m.abi, "getUseLatestDelegate"), [((a, "use_latest_delegate"), None)]), + lambda a: (a, rp.seth_sig(m.abi, "getUserDistributed"), [((a, "user_distributed"), None)]), lambda a: (mc.address, [rp.seth_sig(mc.abi, "getEthBalance"), a], [((a, "execution_balance"), safe_to_float)]) ] # get all minipool addresses from db diff --git a/rocketwatch/plugins/minipool_states/minipool_states.py b/rocketwatch/plugins/minipool_states/minipool_states.py index f625d0dc..61631cdb 100644 --- a/rocketwatch/plugins/minipool_states/minipool_states.py +++ b/rocketwatch/plugins/minipool_states/minipool_states.py @@ -58,7 +58,7 @@ async def minipool_states(self, ctx: Context): exiting_valis.append(minipool) case "withdrawal_done": status_2 = "slashed" if minipool["beacon"]["slashed"] else "unslashed" - if not minipool["finalized"]: + if minipool["execution_balance"] > 0: data["withdrawn"][status_2] = data["withdrawn"].get(status_2, 0) + 1 withdrawn_valis.append(minipool) else: diff --git a/rocketwatch/plugins/proposals/proposals.py b/rocketwatch/plugins/proposals/proposals.py index 3a41255c..89903581 100644 --- a/rocketwatch/plugins/proposals/proposals.py +++ b/rocketwatch/plugins/proposals/proposals.py @@ -13,7 +13,7 @@ from discord.ext.commands import Context from discord.ext.commands import hybrid_command from matplotlib import pyplot as plt -from pymongo import AsyncMongoClient +from pymongo import AsyncMongoClient, ASCENDING, DESCENDING from cronitor import Monitor from rocketwatch import RocketWatch @@ -143,7 +143,7 @@ async def check_indexes(self): try: await self.db.proposals.create_index("validator") await self.db.proposals.create_index("slot", unique=True) - await self.db.proposals.create_index([("validator", 1), ("slot", -1)]) + await self.db.proposals.create_index([("validator", ASCENDING), ("slot", DESCENDING)]) except Exception as e: log.warning(f"Could not create indexes: {e}") diff --git a/rocketwatch/plugins/user_distribute/user_distribute.py b/rocketwatch/plugins/user_distribute/user_distribute.py new file mode 100644 index 00000000..111d6b52 --- /dev/null +++ b/rocketwatch/plugins/user_distribute/user_distribute.py @@ -0,0 +1,99 @@ +import logging + +from discord.ext import commands +from discord.ext.commands import Context, hybrid_command +from pymongo import AsyncMongoClient, ASCENDING + +import time +from rocketwatch import RocketWatch +from utils.rocketpool import rp +from utils.cfg import cfg +from utils.embeds import Embed, el_explorer_url +from utils.shared_w3 import w3, bacon +from utils.views import PageView +from utils.visibility import is_hidden_weak + +log = logging.getLogger("user_distribute") +log.setLevel(cfg["log_level"]) + + +class UserDistribute(commands.Cog): + def __init__(self, bot: RocketWatch): + self.bot = bot + self.db = AsyncMongoClient(cfg["mongodb.uri"]).get_database("rocketwatch") + + @hybrid_command() + async def minipool_user_distribute(self, ctx: Context): + """Show user distribute summary for minipools""" + await ctx.defer(ephemeral=is_hidden_weak(ctx)) + + head = await bacon.get_block_header_async("head") + current_epoch = int(head["data"]["header"]["message"]["slot"]) // 32 + threshold_epoch = current_epoch - 5000 + + minipools = await self.db.minipools.find({ + "user_distributed": False, + "status": "staking", + "execution_balance": {"$gte": 8}, + "beacon.withdrawable_epoch": {"$lt": threshold_epoch} + }).sort("beacon.withdrawable_epoch", ASCENDING).to_list() + + eligible = [] + pending = [] + distributable = [] + + min_pending_time = 2 ** 256 + min_distributable_time = 2 ** 256 + + current_time = int(time.time()) + ud_window_start = rp.call("rocketDAOProtocolSettingsMinipool.getUserDistributeWindowStart") + ud_window_end = ud_window_start + rp.call("rocketDAOProtocolSettingsMinipool.getUserDistributeWindowLength") + + for mp in minipools: + mp_address = w3.to_checksum_address(mp["address"]) + storage = w3.eth.get_storage_at(mp_address, 0x17) + user_distribute_time = int.from_bytes(storage, "big") + elapsed_time = current_time - user_distribute_time + + if elapsed_time >= ud_window_end: + eligible.append(mp) + elif elapsed_time < ud_window_start: + min_pending_time = min(ud_window_start, min_pending_time) + pending.append(mp) + elif not rp.call("rocketMinipoolDelegate.getUserDistributed", address=mp_address): # double check, DB may lag behind + min_distributable_time = min(ud_window_end, min_distributable_time) + distributable.append(mp) + + embed = Embed(title="User Distribute Status") + + embed.add_field( + name="Eligible", + value=f"**{len(eligible)}** minipool{'s' if len(eligible) != 1 else ''}", + inline=False + ) + + if pending: + earliest_ts = current_time + min_pending_time + embed.add_field( + name="Pending", + value=f"**{len(pending)}** minipool{'s' if len(pending) != 1 else ''} · first window opens ", + inline=False + ) + else: + embed.add_field(name="Pending", value="**0** minipools", inline=False) + + if distributable: + closes_ts = current_time + min_distributable_time + embed.add_field( + name="Distributable", + value=f"**{len(distributable)}** minipool{'s' if len(distributable) != 1 else ''} · first window closes ", + inline=False + ) + else: + embed.add_field(name="Distributable", value="**0** minipools", inline=False) + + await ctx.send(embed=embed) + + +async def setup(bot): + await bot.add_cog(UserDistribute(bot)) From 903af4d3e34cf89e6e9163630048176783ae8c38 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 28 Feb 2026 12:16:34 +0000 Subject: [PATCH 110/279] fix timestamps --- .../plugins/user_distribute/user_distribute.py | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/rocketwatch/plugins/user_distribute/user_distribute.py b/rocketwatch/plugins/user_distribute/user_distribute.py index 111d6b52..e5d1f458 100644 --- a/rocketwatch/plugins/user_distribute/user_distribute.py +++ b/rocketwatch/plugins/user_distribute/user_distribute.py @@ -42,8 +42,8 @@ async def minipool_user_distribute(self, ctx: Context): pending = [] distributable = [] - min_pending_time = 2 ** 256 - min_distributable_time = 2 ** 256 + min_open_time = 2 ** 256 + min_close_time = 2 ** 256 current_time = int(time.time()) ud_window_start = rp.call("rocketDAOProtocolSettingsMinipool.getUserDistributeWindowStart") @@ -58,10 +58,10 @@ async def minipool_user_distribute(self, ctx: Context): if elapsed_time >= ud_window_end: eligible.append(mp) elif elapsed_time < ud_window_start: - min_pending_time = min(ud_window_start, min_pending_time) + min_open_time = min(user_distribute_time + ud_window_start, min_open_time) pending.append(mp) elif not rp.call("rocketMinipoolDelegate.getUserDistributed", address=mp_address): # double check, DB may lag behind - min_distributable_time = min(ud_window_end, min_distributable_time) + min_close_time = min(user_distribute_time + ud_window_end, min_close_time) distributable.append(mp) embed = Embed(title="User Distribute Status") @@ -73,20 +73,18 @@ async def minipool_user_distribute(self, ctx: Context): ) if pending: - earliest_ts = current_time + min_pending_time embed.add_field( name="Pending", - value=f"**{len(pending)}** minipool{'s' if len(pending) != 1 else ''} · first window opens ", + value=f"**{len(pending)}** minipool{'s' if len(pending) != 1 else ''} · next window opens ", inline=False ) else: embed.add_field(name="Pending", value="**0** minipools", inline=False) if distributable: - closes_ts = current_time + min_distributable_time embed.add_field( name="Distributable", - value=f"**{len(distributable)}** minipool{'s' if len(distributable) != 1 else ''} · first window closes ", + value=f"**{len(distributable)}** minipool{'s' if len(distributable) != 1 else ''} · next window closes ", inline=False ) else: From fd59b3d4c485e01788c0326e947f1e6861512963 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 28 Feb 2026 13:31:45 +0000 Subject: [PATCH 111/279] add transaction instructions --- .../user_distribute/user_distribute.py | 65 +++++++++++++++++-- 1 file changed, 59 insertions(+), 6 deletions(-) diff --git a/rocketwatch/plugins/user_distribute/user_distribute.py b/rocketwatch/plugins/user_distribute/user_distribute.py index e5d1f458..507b6e54 100644 --- a/rocketwatch/plugins/user_distribute/user_distribute.py +++ b/rocketwatch/plugins/user_distribute/user_distribute.py @@ -1,5 +1,8 @@ import logging +from io import StringIO +import discord +from discord import ui, ButtonStyle, Interaction from discord.ext import commands from discord.ext.commands import Context, hybrid_command from pymongo import AsyncMongoClient, ASCENDING @@ -8,7 +11,7 @@ from rocketwatch import RocketWatch from utils.rocketpool import rp from utils.cfg import cfg -from utils.embeds import Embed, el_explorer_url +from utils.embeds import Embed from utils.shared_w3 import w3, bacon from utils.views import PageView from utils.visibility import is_hidden_weak @@ -16,6 +19,53 @@ log = logging.getLogger("user_distribute") log.setLevel(cfg["log_level"]) +class InstructionsView(ui.View): + def __init__(self, eligible: list[dict], distributable: list[dict]): + super().__init__(timeout=None) + self.eligible = eligible + self.distributable = distributable + + @ui.button(label="Instructions", style=ButtonStyle.blurple) + async def instructions(self, interaction: Interaction, _) -> None: + mp_contract = rp.assemble_contract("rocketMinipoolDelegate") + bud_calldata = bytes.fromhex(mp_contract.encodeABI(fn_name="beginUserDistribute")[2:]) + dist_calldata = bytes.fromhex(mp_contract.encodeABI(fn_name="distributeBalance", args=[False])[2:]) + + tuple_strs = [] + for mp in self.distributable: + tuple_strs.append(f"[\"{mp['address']}\", true, 0x{dist_calldata.hex()}]") + for mp in self.eligible: + tuple_strs.append(f"[\"{mp['address']}\", true, 0x{bud_calldata.hex()}]") + + input_data = "[" + ",".join(tuple_strs) + "]" + + etherscan_url = "https://etherscan.io/address/0xcA11bde05977b3631167028862bE2a173976CA11#writeContract#F2" + + embed = Embed(title="Distribution Instructions") + embed.description = ( + f"1. Open the [Multicall `aggregate3` function]({etherscan_url}) on Etherscan\n" + f"2. Enter `0` for `payableAmount (ether)`\n" + f"3. Paste the provided input data into the `calls (tuple[])` field\n" + f"4. Connect your wallet (`Connect to Web3`)\n" + f"5. Click `Write` and sign with your wallet\n" + ) + + actions = [] + if self.distributable: + actions.append(f"distribute the balance of **{len(self.eligible)}** minipools") + + if self.eligible: + actions.append(f"begin the user distribution process for **{len(self.eligible)}** minipools") + + embed.description += "\nThis will " + " and ".join(actions) + "." + + file_data = StringIO(input_data) + await interaction.response.send_message( + embed=embed, + file=discord.File(file_data, filename="input_data.txt"), + ephemeral=True + ) + class UserDistribute(commands.Cog): def __init__(self, bot: RocketWatch): @@ -50,8 +100,8 @@ async def minipool_user_distribute(self, ctx: Context): ud_window_end = ud_window_start + rp.call("rocketDAOProtocolSettingsMinipool.getUserDistributeWindowLength") for mp in minipools: - mp_address = w3.to_checksum_address(mp["address"]) - storage = w3.eth.get_storage_at(mp_address, 0x17) + mp["address"] = w3.to_checksum_address(mp["address"]) + storage = w3.eth.get_storage_at(mp["address"], 0x17) user_distribute_time = int.from_bytes(storage, "big") elapsed_time = current_time - user_distribute_time @@ -60,7 +110,7 @@ async def minipool_user_distribute(self, ctx: Context): elif elapsed_time < ud_window_start: min_open_time = min(user_distribute_time + ud_window_start, min_open_time) pending.append(mp) - elif not rp.call("rocketMinipoolDelegate.getUserDistributed", address=mp_address): # double check, DB may lag behind + elif not rp.call("rocketMinipoolDelegate.getUserDistributed", address=mp["address"]): # double check, DB may lag behind min_close_time = min(user_distribute_time + ud_window_end, min_close_time) distributable.append(mp) @@ -89,8 +139,11 @@ async def minipool_user_distribute(self, ctx: Context): ) else: embed.add_field(name="Distributable", value="**0** minipools", inline=False) - - await ctx.send(embed=embed) + + if eligible or distributable: + await ctx.send(embed=embed, view=InstructionsView(eligible, distributable)) + else: + await ctx.send(embed=embed) async def setup(bot): From 29a14196f832349146ca2d763448acd3b32af622 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 28 Feb 2026 14:48:03 +0000 Subject: [PATCH 112/279] small refactor --- .../user_distribute/user_distribute.py | 48 ++++++++++--------- rocketwatch/utils/shared_w3.py | 4 +- 2 files changed, 28 insertions(+), 24 deletions(-) diff --git a/rocketwatch/plugins/user_distribute/user_distribute.py b/rocketwatch/plugins/user_distribute/user_distribute.py index 507b6e54..bb33546c 100644 --- a/rocketwatch/plugins/user_distribute/user_distribute.py +++ b/rocketwatch/plugins/user_distribute/user_distribute.py @@ -1,3 +1,4 @@ +import time import logging from io import StringIO @@ -7,7 +8,6 @@ from discord.ext.commands import Context, hybrid_command from pymongo import AsyncMongoClient, ASCENDING -import time from rocketwatch import RocketWatch from utils.rocketpool import rp from utils.cfg import cfg @@ -21,7 +21,7 @@ class InstructionsView(ui.View): def __init__(self, eligible: list[dict], distributable: list[dict]): - super().__init__(timeout=None) + super().__init__(timeout=300) self.eligible = eligible self.distributable = distributable @@ -59,10 +59,9 @@ async def instructions(self, interaction: Interaction, _) -> None: embed.description += "\nThis will " + " and ".join(actions) + "." - file_data = StringIO(input_data) await interaction.response.send_message( embed=embed, - file=discord.File(file_data, filename="input_data.txt"), + file=discord.File(StringIO(input_data), filename="input_data.txt"), ephemeral=True ) @@ -71,12 +70,8 @@ class UserDistribute(commands.Cog): def __init__(self, bot: RocketWatch): self.bot = bot self.db = AsyncMongoClient(cfg["mongodb.uri"]).get_database("rocketwatch") - - @hybrid_command() - async def minipool_user_distribute(self, ctx: Context): - """Show user distribute summary for minipools""" - await ctx.defer(ephemeral=is_hidden_weak(ctx)) - + + async def _fetch_minipools(self) -> tuple[list[dict], list[dict], list[dict]]: head = await bacon.get_block_header_async("head") current_epoch = int(head["data"]["header"]["message"]["slot"]) // 32 threshold_epoch = current_epoch - 5000 @@ -92,9 +87,6 @@ async def minipool_user_distribute(self, ctx: Context): pending = [] distributable = [] - min_open_time = 2 ** 256 - min_close_time = 2 ** 256 - current_time = int(time.time()) ud_window_start = rp.call("rocketDAOProtocolSettingsMinipool.getUserDistributeWindowStart") ud_window_end = ud_window_start + rp.call("rocketDAOProtocolSettingsMinipool.getUserDistributeWindowLength") @@ -102,20 +94,29 @@ async def minipool_user_distribute(self, ctx: Context): for mp in minipools: mp["address"] = w3.to_checksum_address(mp["address"]) storage = w3.eth.get_storage_at(mp["address"], 0x17) - user_distribute_time = int.from_bytes(storage, "big") + user_distribute_time: int = int.from_bytes(storage, "big") elapsed_time = current_time - user_distribute_time - + if elapsed_time >= ud_window_end: - eligible.append(mp) + eligible.append((mp, user_distribute_time)) elif elapsed_time < ud_window_start: - min_open_time = min(user_distribute_time + ud_window_start, min_open_time) + mp["ud_window_open"] = user_distribute_time + ud_window_start pending.append(mp) elif not rp.call("rocketMinipoolDelegate.getUserDistributed", address=mp["address"]): # double check, DB may lag behind - min_close_time = min(user_distribute_time + ud_window_end, min_close_time) + mp["ud_window_close"] = user_distribute_time + ud_window_end distributable.append(mp) + + return eligible, pending, distributable - embed = Embed(title="User Distribute Status") + @hybrid_command() + async def minipool_user_distribute(self, ctx: Context): + """Show user distribute summary for minipools""" + await ctx.defer(ephemeral=is_hidden_weak(ctx)) + eligible, pending, distributable = await self._fetch_minipools() + + embed = Embed(title="User Distribute Status") + embed.add_field( name="Eligible", value=f"**{len(eligible)}** minipool{'s' if len(eligible) != 1 else ''}", @@ -123,25 +124,28 @@ async def minipool_user_distribute(self, ctx: Context): ) if pending: + next_window_open = min(mp["ud_window_open"] for mp in pending) embed.add_field( name="Pending", - value=f"**{len(pending)}** minipool{'s' if len(pending) != 1 else ''} · next window opens ", + value=f"**{len(pending)}** minipool{'s' if len(pending) != 1 else ''} · next window opens ", inline=False ) else: embed.add_field(name="Pending", value="**0** minipools", inline=False) if distributable: + next_window_close = min(mp["ud_window_close"] for mp in distributable) embed.add_field( name="Distributable", - value=f"**{len(distributable)}** minipool{'s' if len(distributable) != 1 else ''} · next window closes ", + value=f"**{len(distributable)}** minipool{'s' if len(distributable) != 1 else ''} · next window closes ", inline=False ) else: embed.add_field(name="Distributable", value="**0** minipools", inline=False) if eligible or distributable: - await ctx.send(embed=embed, view=InstructionsView(eligible, distributable)) + # limit the number of distributions to not run out of gas + await ctx.send(embed=embed, view=InstructionsView(eligible[:50], distributable[:100])) else: await ctx.send(embed=embed) diff --git a/rocketwatch/utils/shared_w3.py b/rocketwatch/utils/shared_w3.py index dc300257..510ce9d0 100644 --- a/rocketwatch/utils/shared_w3.py +++ b/rocketwatch/utils/shared_w3.py @@ -35,11 +35,11 @@ async def _make_get_request_async(self, url: str): async with self.async_session.get(url) as response: return await response.json() - async def get_block_header_async(self, block_id: BlockIdentifier): + async def get_block_header_async(self, block_id: int | str): url = f"{self.base_url}/eth/v1/beacon/headers/{block_id}" return await self._make_get_request_async(url) - async def get_block_async(self, block_id: BlockIdentifier): + async def get_block_async(self, block_id: int | str): url = f"{self.base_url}/eth/v2/beacon/blocks/{block_id}" return await self._make_get_request_async(url) From 5cc6c4362d6e3e4c5aa4694eb4048a10a57c7c68 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 28 Feb 2026 15:14:21 +0000 Subject: [PATCH 113/279] add warning for open distribution window --- rocketwatch/plugins/activity/activity.py | 10 ++--- rocketwatch/plugins/apr/apr.py | 10 ++--- rocketwatch/plugins/event_core/event_core.py | 12 +++--- .../user_distribute/user_distribute.py | 41 +++++++++++++++++-- 4 files changed, 53 insertions(+), 20 deletions(-) diff --git a/rocketwatch/plugins/activity/activity.py b/rocketwatch/plugins/activity/activity.py index 050cea6f..97c8d2a7 100644 --- a/rocketwatch/plugins/activity/activity.py +++ b/rocketwatch/plugins/activity/activity.py @@ -16,13 +16,13 @@ class RichActivity(commands.Cog): def __init__(self, bot: RocketWatch): self.bot = bot self.monitor = Monitor("update-activity", api_key=cfg["other.secrets.cronitor"]) - self.loop.start() + self.task.start() async def cog_unload(self): - self.loop.cancel() + self.task.cancel() @tasks.loop(seconds=60) - async def loop(self): + async def task(self): self.monitor.ping() log.debug("Updating Discord activity") @@ -34,11 +34,11 @@ async def loop(self): ) ) - @loop.before_loop + @task.before_loop async def before_loop(self): await self.bot.wait_until_ready() - @loop.error + @task.error async def on_error(self, err: Exception): await self.bot.report_error(err) diff --git a/rocketwatch/plugins/apr/apr.py b/rocketwatch/plugins/apr/apr.py index 6bb81362..9a04370f 100644 --- a/rocketwatch/plugins/apr/apr.py +++ b/rocketwatch/plugins/apr/apr.py @@ -44,13 +44,13 @@ class APR(commands.Cog): def __init__(self, bot: RocketWatch): self.bot = bot self.db = AsyncMongoClient(cfg["mongodb.uri"]).rocketwatch - self.loop.start() + self.task.start() async def cog_unload(self): - self.loop.cancel() + self.task.cancel() @tasks.loop(seconds=60) - async def loop(self): + async def task(self): # get latest block update from the db latest_db_block = await self.db.reth_apr.find_one(sort=[("block", -1)]) latest_db_block = 0 if latest_db_block is None else latest_db_block["block"] @@ -76,11 +76,11 @@ async def loop(self): }) cursor_block = balance_block - 1 - @loop.before_loop + @task.before_loop async def before_loop(self): await self.bot.wait_until_ready() - @loop.error + @task.error async def on_error(self, err: Exception): await self.bot.report_error(err) diff --git a/rocketwatch/plugins/event_core/event_core.py b/rocketwatch/plugins/event_core/event_core.py index ffd7822b..af8af6e8 100644 --- a/rocketwatch/plugins/event_core/event_core.py +++ b/rocketwatch/plugins/event_core/event_core.py @@ -44,13 +44,13 @@ def __init__(self, bot: RocketWatch): self.head_block: BlockIdentifier = cfg["events.genesis"] self.block_batch_size = cfg["events.block_batch_size"] self.monitor = Monitor("gather-new-events", api_key=cfg["other.secrets.cronitor"]) - self.loop.start() + self.task.start() async def cog_unload(self) -> None: - self.loop.cancel() + self.task.cancel() @tasks.loop(seconds=30) - async def loop(self) -> None: + async def task(self) -> None: p_id = time.time() self.monitor.ping(state="run", series=p_id) @@ -64,20 +64,20 @@ async def loop(self) -> None: await self.on_error(error) self.monitor.ping(state="fail", series=p_id) - @loop.before_loop + @task.before_loop async def before_loop(self) -> None: await self.bot.wait_until_ready() async def on_success(self) -> None: if self.state == self.State.ERROR: self.state = self.State.OK - self.loop.change_interval(seconds=12) + self.task.change_interval(seconds=12) async def on_error(self, error: Exception) -> None: await self.bot.report_error(error) if self.state == self.State.OK: self.state = self.State.ERROR - self.loop.change_interval(seconds=30) + self.task.change_interval(seconds=30) try: await self.show_service_interrupt() diff --git a/rocketwatch/plugins/user_distribute/user_distribute.py b/rocketwatch/plugins/user_distribute/user_distribute.py index bb33546c..249b67ec 100644 --- a/rocketwatch/plugins/user_distribute/user_distribute.py +++ b/rocketwatch/plugins/user_distribute/user_distribute.py @@ -1,10 +1,11 @@ import time import logging from io import StringIO +from typing import Optional import discord from discord import ui, ButtonStyle, Interaction -from discord.ext import commands +from discord.ext import commands, tasks from discord.ext.commands import Context, hybrid_command from pymongo import AsyncMongoClient, ASCENDING @@ -20,8 +21,8 @@ log.setLevel(cfg["log_level"]) class InstructionsView(ui.View): - def __init__(self, eligible: list[dict], distributable: list[dict]): - super().__init__(timeout=300) + def __init__(self, eligible: list[dict], distributable: list[dict], instruction_timeout: int): + super().__init__(timeout=instruction_timeout) self.eligible = eligible self.distributable = distributable @@ -70,6 +71,38 @@ class UserDistribute(commands.Cog): def __init__(self, bot: RocketWatch): self.bot = bot self.db = AsyncMongoClient(cfg["mongodb.uri"]).get_database("rocketwatch") + self.task.start() + + async def cog_unload(self): + self.task.cancel() + + @tasks.loop(hours=8) + async def task(self): + channel_id = cfg.get("discord.channels.user_distribute") + if not channel_id: + return + + channel = await self.bot.get_or_fetch_channel(channel_id) + + _, _, distributable = await self._fetch_minipools() + if not distributable: + return + + embed = Embed(title=":warning: User Distribution Window Open") + next_window_close = min(mp["ud_window_close"] for mp in distributable) + embed.description = ( + f"There are **{len(distributable)}** minipools eligible for distribution.\n" + f"The next window closes !" + ) + await channel.send(embed=embed, view=InstructionsView([], distributable[:100], instruction_timeout=(4 * 3600))) + + @task.before_loop + async def before_task(self): + await self.bot.wait_until_ready() + + @task.error + async def on_task_error(self, err: Exception): + await self.bot.report_error(err) async def _fetch_minipools(self) -> tuple[list[dict], list[dict], list[dict]]: head = await bacon.get_block_header_async("head") @@ -145,7 +178,7 @@ async def minipool_user_distribute(self, ctx: Context): if eligible or distributable: # limit the number of distributions to not run out of gas - await ctx.send(embed=embed, view=InstructionsView(eligible[:50], distributable[:100])) + await ctx.send(embed=embed, view=InstructionsView(eligible[:50], distributable[:100], instruction_timeout=300)) else: await ctx.send(embed=embed) From 4ff427e8a16e1df591df65e582618584a71b8657 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 28 Feb 2026 15:17:31 +0000 Subject: [PATCH 114/279] rename commands --- rocketwatch/plugins/proposals/proposals.py | 4 ++-- rocketwatch/plugins/user_distribute/user_distribute.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/rocketwatch/plugins/proposals/proposals.py b/rocketwatch/plugins/proposals/proposals.py index 89903581..827b0b56 100644 --- a/rocketwatch/plugins/proposals/proposals.py +++ b/rocketwatch/plugins/proposals/proposals.py @@ -497,9 +497,9 @@ async def client_distribution(self, ctx: Context, remove_allnodes=False): await ctx.send(embeds=embeds, files=files) @hybrid_command() - async def user_distribution(self, ctx: Context): + async def operator_type_distribution(self, ctx: Context): """ - Generate a distribution graph of users. + Generate a graph of NO groups. """ await ctx.defer(ephemeral=is_hidden_weak(ctx)) embed, file = await self.proposal_vs_node_operators_embed("type", "User") diff --git a/rocketwatch/plugins/user_distribute/user_distribute.py b/rocketwatch/plugins/user_distribute/user_distribute.py index 249b67ec..0d3f5e44 100644 --- a/rocketwatch/plugins/user_distribute/user_distribute.py +++ b/rocketwatch/plugins/user_distribute/user_distribute.py @@ -142,7 +142,7 @@ async def _fetch_minipools(self) -> tuple[list[dict], list[dict], list[dict]]: return eligible, pending, distributable @hybrid_command() - async def minipool_user_distribute(self, ctx: Context): + async def user_distribute_status(self, ctx: Context): """Show user distribute summary for minipools""" await ctx.defer(ephemeral=is_hidden_weak(ctx)) From 22bb74c115cd7b404dc367c752ae11e7a4f0ba16 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 28 Feb 2026 18:54:35 +0000 Subject: [PATCH 115/279] add gas estimate --- .../user_distribute/user_distribute.py | 42 ++++++++++++------- 1 file changed, 26 insertions(+), 16 deletions(-) diff --git a/rocketwatch/plugins/user_distribute/user_distribute.py b/rocketwatch/plugins/user_distribute/user_distribute.py index 0d3f5e44..6f3e23fa 100644 --- a/rocketwatch/plugins/user_distribute/user_distribute.py +++ b/rocketwatch/plugins/user_distribute/user_distribute.py @@ -1,7 +1,7 @@ import time import logging from io import StringIO -from typing import Optional +from operator import itemgetter import discord from discord import ui, ButtonStyle, Interaction @@ -14,7 +14,6 @@ from utils.cfg import cfg from utils.embeds import Embed from utils.shared_w3 import w3, bacon -from utils.views import PageView from utils.visibility import is_hidden_weak log = logging.getLogger("user_distribute") @@ -32,15 +31,20 @@ async def instructions(self, interaction: Interaction, _) -> None: bud_calldata = bytes.fromhex(mp_contract.encodeABI(fn_name="beginUserDistribute")[2:]) dist_calldata = bytes.fromhex(mp_contract.encodeABI(fn_name="distributeBalance", args=[False])[2:]) + calls = [(mp["address"], True, dist_calldata) for mp in self.distributable] + calls += [(mp["address"], True, bud_calldata) for mp in self.eligible] + + multicall_contract = rp.get_contract_by_name("multicall3") + gas_used = multicall_contract.functions.aggregate3(calls).estimate_gas() + gas_price = w3.eth.gas_price + cost_eth = gas_used * gas_price / 1e18 + tuple_strs = [] - for mp in self.distributable: - tuple_strs.append(f"[\"{mp['address']}\", true, 0x{dist_calldata.hex()}]") - for mp in self.eligible: - tuple_strs.append(f"[\"{mp['address']}\", true, 0x{bud_calldata.hex()}]") - + for address, allow_failure, calldata in calls: + tuple_strs.append(f"[\"{address}\", {str(allow_failure).lower()}, 0x{calldata.hex()}]") + input_data = "[" + ",".join(tuple_strs) + "]" - - etherscan_url = "https://etherscan.io/address/0xcA11bde05977b3631167028862bE2a173976CA11#writeContract#F2" + etherscan_url = f"https://etherscan.io/address/{multicall_contract.address}#writeContract#F2" embed = Embed(title="Distribution Instructions") embed.description = ( @@ -59,6 +63,7 @@ async def instructions(self, interaction: Interaction, _) -> None: actions.append(f"begin the user distribution process for **{len(self.eligible)}** minipools") embed.description += "\nThis will " + " and ".join(actions) + "." + embed.description += f"\nEstimated cost: **{cost_eth:,.5f} ETH** ({gas_used:,} gas @ {(gas_price / 1e9):.2f} gwei)" await interaction.response.send_message( embed=embed, @@ -88,12 +93,14 @@ async def task(self): if not distributable: return - embed = Embed(title=":warning: User Distribution Window Open") - next_window_close = min(mp["ud_window_close"] for mp in distributable) + embed = Embed(title=":hourglass_flowing_sand: User Distribution Window Open") + count = len(distributable) + next_window_close = distributable[0]["ud_window_close"] embed.description = ( - f"There are **{len(distributable)}** minipools eligible for distribution.\n" + f"There {'are' if count != 1 else 'is'} **{count}** minipool{'s' if count != 1 else ''} eligible for distribution.\n" f"The next window closes !" ) + await channel.send(embed=embed, view=InstructionsView([], distributable[:100], instruction_timeout=(4 * 3600))) @task.before_loop @@ -103,7 +110,7 @@ async def before_task(self): @task.error async def on_task_error(self, err: Exception): await self.bot.report_error(err) - + async def _fetch_minipools(self) -> tuple[list[dict], list[dict], list[dict]]: head = await bacon.get_block_header_async("head") current_epoch = int(head["data"]["header"]["message"]["slot"]) // 32 @@ -138,7 +145,10 @@ async def _fetch_minipools(self) -> tuple[list[dict], list[dict], list[dict]]: elif not rp.call("rocketMinipoolDelegate.getUserDistributed", address=mp["address"]): # double check, DB may lag behind mp["ud_window_close"] = user_distribute_time + ud_window_end distributable.append(mp) - + + pending.sort(key=itemgetter("ud_window_open")) + distributable.sort(key=itemgetter("ud_window_close")) + return eligible, pending, distributable @hybrid_command() @@ -157,7 +167,7 @@ async def user_distribute_status(self, ctx: Context): ) if pending: - next_window_open = min(mp["ud_window_open"] for mp in pending) + next_window_open = pending[0]["ud_window_open"] embed.add_field( name="Pending", value=f"**{len(pending)}** minipool{'s' if len(pending) != 1 else ''} · next window opens ", @@ -167,7 +177,7 @@ async def user_distribute_status(self, ctx: Context): embed.add_field(name="Pending", value="**0** minipools", inline=False) if distributable: - next_window_close = min(mp["ud_window_close"] for mp in distributable) + next_window_close = distributable[0]["ud_window_close"] embed.add_field( name="Distributable", value=f"**{len(distributable)}** minipool{'s' if len(distributable) != 1 else ''} · next window closes ", From 145e22e961c9ec7d1faed338c83e222540520329 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 28 Feb 2026 19:31:58 +0000 Subject: [PATCH 116/279] update from hybrid_command --- rocketwatch/plugins/proposals/proposals.py | 55 +++++++++---------- .../user_distribute/user_distribute.py | 12 ++-- 2 files changed, 33 insertions(+), 34 deletions(-) diff --git a/rocketwatch/plugins/proposals/proposals.py b/rocketwatch/plugins/proposals/proposals.py index 827b0b56..1afc8716 100644 --- a/rocketwatch/plugins/proposals/proposals.py +++ b/rocketwatch/plugins/proposals/proposals.py @@ -7,11 +7,10 @@ import asyncio from aiohttp.client_exceptions import ClientResponseError import matplotlib as mpl -from discord import File -from discord.utils import as_chunks +from discord import File, Interaction from discord.ext import commands -from discord.ext.commands import Context -from discord.ext.commands import hybrid_command +from discord.app_commands import command, describe +from discord.utils import as_chunks from matplotlib import pyplot as plt from pymongo import AsyncMongoClient, ASCENDING, DESCENDING from cronitor import Monitor @@ -272,21 +271,24 @@ async def gather_attribute(self, attribute, remove_allnodes=False): d[key] = entry return d - @hybrid_command() - async def version_chart(self, ctx: Context): + @command() + @describe(days="how many days to show history for") + async def version_chart(self, interaction: Interaction, days: int = 90): """ Show a historical chart of used Smart Node versions """ - await ctx.defer(ephemeral=is_hidden_weak(ctx)) + await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) e = Embed(title="Version Chart") - e.description = "The graph below shows proposal stats using a **5-day rolling window**.\n" \ - "This only looks at proposals, it does not care about what individual minipools do." + e.description = ( + "The graph below shows proposal stats using a **5-day rolling window**. " + "It relies on proposal frequency to approximate adoption by active validator count." + ) # get proposals # limit to 6 months proposals = await self.db.proposals.find( { "version": {"$exists": 1}, - "slot" : {"$gt": date_to_beacon_block((datetime.now() - timedelta(days=180)).timestamp())} + "slot" : {"$gt": date_to_beacon_block((datetime.now() - timedelta(days=days)).timestamp())} }).sort("slot", 1).to_list(None) look_back = int(60 / 12 * 60 * 24 * 2) # last 2 days max_slot = proposals[-1]["slot"] @@ -374,9 +376,6 @@ async def version_chart(self, ctx: Context): plt.stackplot([x[-1], future_point], *last_y_values, colors=colors) plt.tight_layout() - # the title should mention that the /version_chart command contains more information about how this chart works. but short - plt.title("READ DESC OF /version_chart IF CONFUSED", y=0.95, fontsize=9) - # respond with image img = BytesIO() plt.savefig(img, format="png", bbox_inches="tight", dpi=300) @@ -385,10 +384,10 @@ async def version_chart(self, ctx: Context): e.set_image(url="attachment://chart.png") # send data - await ctx.send(embed=e, file=File(img, filename="chart.png")) + await interaction.followup.send(embed=e, file=File(img, filename="chart.png")) img.close() - async def plot_axes_with_data(self, attr: str, ax1, ax2, remove_allnodes=False): + async def plot_axes_with_data(self, attr: str, ax1, ax2, remove_allnodes: bool = False): # group by client and get count data = await self.gather_attribute(attr, remove_allnodes) @@ -459,7 +458,7 @@ async def plot_axes_with_data(self, attr: str, ax1, ax2, remove_allnodes=False): ) ax2.set_title("Node Operators", fontsize=22) - async def proposal_vs_node_operators_embed(self, attribute, name, remove_allnodes=False): + async def proposal_vs_node_operators_embed(self, attribute, name, remove_allnodes: bool = False): fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(12, 8)) # iterate axes in pairs title = f"Rocket Pool {name} Distribution {'without Allnodes' if remove_allnodes else ''}" @@ -483,34 +482,34 @@ async def proposal_vs_node_operators_embed(self, attribute, name, remove_allnode img.close() return e, f - @hybrid_command() - async def client_distribution(self, ctx: Context, remove_allnodes=False): + @command() + async def client_distribution(self, interaction: Interaction, remove_allnodes: bool = False): """ Generate a distribution graph of clients. """ - await ctx.defer(ephemeral=is_hidden_weak(ctx)) + await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) embeds, files = [], [] for attr, name in [["consensus_client", "Consensus Client"], ["execution_client", "Execution Client"]]: e, f = await self.proposal_vs_node_operators_embed(attr, name, remove_allnodes) embeds.append(e) files.append(f) - await ctx.send(embeds=embeds, files=files) + await interaction.followup.send(embeds=embeds, files=files) - @hybrid_command() - async def operator_type_distribution(self, ctx: Context): + @command() + async def operator_type_distribution(self, interaction: Interaction): """ Generate a graph of NO groups. """ - await ctx.defer(ephemeral=is_hidden_weak(ctx)) + await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) embed, file = await self.proposal_vs_node_operators_embed("type", "User") - await ctx.send(embed=embed, file=file) + await interaction.followup.send(embed=embed, file=file) - @hybrid_command() - async def client_combo_ranking(self, ctx: Context, remove_allnodes=False, group_by_node_operators=False): + @command() + async def client_combo_ranking(self, interaction: Interaction, remove_allnodes: bool = False, group_by_node_operators: bool = False): """ Generate a ranking of most used execution and consensus clients. """ - await ctx.defer(ephemeral=is_hidden_weak(ctx)) + await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) # aggregate [consensus, execution] pair counts client_pairs = await (await self.db.minipool_proposals.aggregate([ { @@ -553,7 +552,7 @@ async def client_combo_ranking(self, ctx: Context, remove_allnodes=False, group_ for i, pair in enumerate(client_pairs) ) e.description = f"Currently showing {'node operator' if group_by_node_operators else 'validator'} counts\n```{desc}```" - await ctx.send(embed=e) + await interaction.followup.send(embed=e) async def setup(bot): diff --git a/rocketwatch/plugins/user_distribute/user_distribute.py b/rocketwatch/plugins/user_distribute/user_distribute.py index 6f3e23fa..8af4bb38 100644 --- a/rocketwatch/plugins/user_distribute/user_distribute.py +++ b/rocketwatch/plugins/user_distribute/user_distribute.py @@ -6,7 +6,7 @@ import discord from discord import ui, ButtonStyle, Interaction from discord.ext import commands, tasks -from discord.ext.commands import Context, hybrid_command +from discord.app_commands import command from pymongo import AsyncMongoClient, ASCENDING from rocketwatch import RocketWatch @@ -151,10 +151,10 @@ async def _fetch_minipools(self) -> tuple[list[dict], list[dict], list[dict]]: return eligible, pending, distributable - @hybrid_command() - async def user_distribute_status(self, ctx: Context): + @command() + async def user_distribute_status(self, interaction: Interaction): """Show user distribute summary for minipools""" - await ctx.defer(ephemeral=is_hidden_weak(ctx)) + await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) eligible, pending, distributable = await self._fetch_minipools() @@ -188,9 +188,9 @@ async def user_distribute_status(self, ctx: Context): if eligible or distributable: # limit the number of distributions to not run out of gas - await ctx.send(embed=embed, view=InstructionsView(eligible[:50], distributable[:100], instruction_timeout=300)) + await interaction.followup.send(embed=embed, view=InstructionsView(eligible[:50], distributable[:100], instruction_timeout=300)) else: - await ctx.send(embed=embed) + await interaction.followup.send(embed=embed) async def setup(bot): From ce8cd1c541a4c865f6d6a859fdde227699fc8acd Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 28 Feb 2026 23:25:51 +0000 Subject: [PATCH 117/279] support for more levels in legacy tree --- .../minipool_states/minipool_states.py | 10 +++- rocketwatch/utils/readable.py | 60 ++++++++++++------- 2 files changed, 47 insertions(+), 23 deletions(-) diff --git a/rocketwatch/plugins/minipool_states/minipool_states.py b/rocketwatch/plugins/minipool_states/minipool_states.py index 61631cdb..ad3b24b4 100644 --- a/rocketwatch/plugins/minipool_states/minipool_states.py +++ b/rocketwatch/plugins/minipool_states/minipool_states.py @@ -65,10 +65,16 @@ async def minipool_states(self, ctx: Context): data["closed"][status_2] = data["closed"].get(status_2, 0) + 1 case _: logging.warning(f"Unknown status {minipool['status']}") - + + # collapse tree where possible + for status in list(data.keys()): + if len(data[status]) == 1: + sub_status = list(data[status].keys())[0] + data[status] = data[status][sub_status] + embed = Embed(title="Minipool States", color=0x00ff00) description = "```\n" - # render dict as a tree like structure + # render dict as a tree-like structure description += render_tree_legacy(data, "Minipools") total_listed_valis = len(exiting_valis) + len(withdrawn_valis) diff --git a/rocketwatch/utils/readable.py b/rocketwatch/utils/readable.py index 4348fa47..bfdb8996 100644 --- a/rocketwatch/utils/readable.py +++ b/rocketwatch/utils/readable.py @@ -66,29 +66,47 @@ def advanced_tnx_url(tx_hash): def render_tree_legacy(data: dict, name: str) -> str: - # remove empty states - data = {k: v for k, v in data.items() if v} - strings = [] - values = [] - for i, (state, substates) in enumerate(data.items()): - c = sum(substates.values()) - l = "├" if i != len(data) - 1 else "└" - strings.append(f" {l}{state.title()}: ") - values.append(c) - l = "│" if i != len(data) - 1 else " " - for j, (substate, count) in enumerate(substates.items()): - sl = "├" if j != len(substates) - 1 else "└" - strings.append(f" {l} {sl}{substate.title()}: ") - values.append(count) + def render_branch(_data: dict[str, dict | int]) -> tuple[list, list, int]: + _strings = [] + _values = [] + count = 0 + + for i, (state, sub_data) in enumerate(_data.items()): + if not sub_data: + continue + + link = "├" if (i != len(_data) - 1) else "└" + _strings.append(f" {link}{state.title()}: ") + + if isinstance(sub_data, dict): + sub_strings, sub_values, sub_count = render_branch(sub_data) + sub_link = " │" if (i != len(_data) - 1) else " " + _strings.extend([sub_link + s for s in sub_strings]) + _values.append(sub_count) + _values.extend(sub_values) + count += sub_count + elif isinstance(sub_data, int): + _values.append(sub_data) + count += sub_data + + return _strings, _values, count + + strings, values, tree_sum = render_branch(data) + strings.insert(0, f"{name}:") + values.insert(0, tree_sum) + + fmt_values = [f"{v:,}" for v in values] + # longest string offset max_left_len = max(len(s) for s in strings) - max_right_len = max(len(str(v)) for v in values) - # right align all values - for i, v in enumerate(values): - strings[i] = strings[i].ljust(max_left_len) + str(v).rjust(max_right_len) - description = f"{name}:\n" - description += "\n".join(strings) - return description + max_right_len = max(len(v) for v in fmt_values) + + lines = [] + for s, v in zip(strings, fmt_values): + # right align all values + lines.append(s.ljust(max_left_len) + v.rjust(max_right_len)) + + return "\n".join(lines) def render_branch(k, v, prefix, current_depth=0, max_depth=0, reverse=False, m_prev=""): From 7407517cc23d313eaf8a66ae0d4ee9212462bb26 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sun, 1 Mar 2026 00:24:47 +0000 Subject: [PATCH 118/279] small tweak to DB attributes --- .../plugins/db_upkeep_task/db_upkeep_task.py | 65 ++++++++++++------- rocketwatch/plugins/random/random.py | 2 +- rocketwatch/plugins/rpl/rpl.py | 5 +- rocketwatch/plugins/tvl/tvl.py | 2 +- 4 files changed, 47 insertions(+), 27 deletions(-) diff --git a/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py b/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py index d432abe5..5e363179 100644 --- a/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py +++ b/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py @@ -20,7 +20,7 @@ from utils.event_logs import get_logs -log = logging.getLogger("node_task") +log = logging.getLogger("db_upkeep_task") log.setLevel(cfg["log_level"]) @@ -56,7 +56,7 @@ def __init__(self, bot: RocketWatch): self.monitor = Monitor("node-task", api_key=cfg["other.secrets.cronitor"]) self.batch_size = 50 self.bot.loop.create_task(self.loop()) - + async def loop(self): await self.bot.wait_until_ready() await self.check_indexes() @@ -65,15 +65,17 @@ async def loop(self): self.monitor.ping(state="run", series=p_id) try: log.debug("starting db upkeep task") + # node tasks + await self.add_untracked_node_operators() + await self.add_static_data_to_node_operators() + await self.update_dynamic_node_operator_metadata() + # minipool tasks await self.add_untracked_minipools() await self.add_static_data_to_minipools() - await self.update_dynamic_minipool_metadata() await self.add_static_deposit_data_to_minipools() await self.add_static_beacon_data_to_minipools() + await self.update_dynamic_minipool_metadata() await self.update_dynamic_minipool_beacon_metadata() - await self.add_untracked_node_operators() - await self.add_static_data_to_node_operators() - await self.update_dynamic_node_operator_metadata() log.debug("finished db upkeep task") self.monitor.ping(state="complete", series=p_id) except Exception as err: @@ -195,7 +197,7 @@ async def update_dynamic_minipool_metadata(self): async def add_static_deposit_data_to_minipools(self): # get all minipool addresses and their status time from db that : # - do not have a deposit_amount - # - are in the initialised state + # - are in the initialized state # sort by status time minipools = await self.db.minipools.find( {"deposit_amount": {"$exists": False}, "status": "initialised"}, @@ -372,12 +374,20 @@ async def add_untracked_node_operators(self): @timerun_async async def add_static_data_to_node_operators(self): - ndf = rp.get_contract_by_name("rocketNodeDistributorFactory") + df = rp.get_contract_by_name("rocketNodeDistributorFactory") + mf = rp.get_contract_by_name("rocketMegapoolFactory") lambs = [ - lambda a: (ndf.address, [rp.seth_sig(ndf.abi, "getProxyAddress"), a], [((a, "fee_distributor_address"), None)]), + lambda a: (df.address, [rp.seth_sig(df.abi, "getProxyAddress"), a], [((a, "fee_distributor_address"), None)]), + lambda a: (mf.address, [rp.seth_sig(mf.abi, "getExpectedAddress"), a], [((a, "megapool_address"), None)]), ] # get all minipool addresses from db that do not have a node operator assigned - node_addresses = await self.db.node_operators.distinct("address", {"fee_distributor_address": {"$exists": False}}) + node_addresses = await self.db.node_operators.distinct( + "address", + {"$or": [ + {"fee_distributor_address": {"$exists": False}}, + {"megapool_address": {"$exists": False} + }]} + ) # get node operator addresses from rp # return early if no minipools need to be updated if not node_addresses: @@ -409,43 +419,54 @@ async def add_static_data_to_node_operators(self): @timerun_async async def update_dynamic_node_operator_metadata(self): - ndf = rp.get_contract_by_name("rocketNodeDistributorFactory") + mf = rp.get_contract_by_name("rocketMegapoolFactory") nd = rp.get_contract_by_name("rocketNodeDeposit") nm = rp.get_contract_by_name("rocketNodeManager") mm = rp.get_contract_by_name("rocketMinipoolManager") ns = rp.get_contract_by_name("rocketNodeStaking") mc = rp.get_contract_by_name("multicall3") lambs = [ - lambda n: (ndf.address, [rp.seth_sig(ndf.abi, "getProxyAddress"), n["address"]], - [((n["address"], "fee_distributor_address"), None)]), lambda n: (nm.address, [rp.seth_sig(nm.abi, "getNodeWithdrawalAddress"), n["address"]], [((n["address"], "withdrawal_address"), None)]), lambda n: (nm.address, [rp.seth_sig(nm.abi, "getNodeTimezoneLocation"), n["address"]], [((n["address"], "timezone_location"), None)]), lambda n: (nm.address, [rp.seth_sig(nm.abi, "getFeeDistributorInitialised"), n["address"]], - [((n["address"], "fee_distributor_initialised"), None)]), - lambda n: ( - nm.address, [rp.seth_sig(nm.abi, "getRewardNetwork"), n["address"]], - [((n["address"], "reward_network"), None)]), + [((n["address"], "fee_distributor_initialized"), None)]), + lambda n: (nm.address, [rp.seth_sig(mf.abi, "getMegapoolDeployed"), n["address"]], + [((n["address"], "megapool_deployed"), is_true)]), lambda n: (nm.address, [rp.seth_sig(nm.abi, "getSmoothingPoolRegistrationState"), n["address"]], - [((n["address"], "smoothing_pool_registration_state"), None)]), + [((n["address"], "smoothing_pool_registration"), None)]), lambda n: (nm.address, [rp.seth_sig(nm.abi, "getAverageNodeFee"), n["address"]], [((n["address"], "average_node_fee"), safe_to_float)]), lambda n: (ns.address, [rp.seth_sig(ns.abi, "getNodeStakedRPL"), n["address"]], [((n["address"], "rpl_stake"), safe_to_float)]), - # lambda n: (ns.address, [rp.seth_sig(ns.abi, "getNodeEffectiveRPLStake"), n["address"]], - # [((n["address"], "effective_rpl_stake"), safe_to_float)]), + lambda n: (ns.address, [rp.seth_sig(ns.abi, "getNodeLegacyStakedRPL"), n["address"]], + [((n["address"], "legacy_rpl_stake"), safe_to_float)]), + lambda n: (ns.address, [rp.seth_sig(ns.abi, "getNodeMegapoolStakedRPL"), n["address"]], + [((n["address"], "megapool_rpl_stake"), safe_to_float)]), + lambda n: (ns.address, [rp.seth_sig(ns.abi, "getNodeLockedRPL"), n["address"]], + [((n["address"], "locked_rpl"), safe_to_float)]), + lambda n: (ns.address, [rp.seth_sig(ns.abi, "getNodeUnstakingRPL"), n["address"]], + [((n["address"], "unstaking_rpl"), safe_to_float)]), + lambda n: (ns.address, [rp.seth_sig(ns.abi, "getNodeRPLStakedTime"), n["address"]], + [((n["address"], "last_rpl_stake_time"), None)]), + lambda n: (ns.address, [rp.seth_sig(ns.abi, "getNodeLastUnstakeTime"), n["address"]], + [((n["address"], "last_rpl_unstake_time"), None)]), lambda n: (ns.address, [rp.seth_sig(ns.abi, "getNodeETHCollateralisationRatio"), n["address"]], [((n["address"], "effective_node_share"), safe_inv)]), lambda n: (mc.address, [rp.seth_sig(mc.abi, "getEthBalance"), n["fee_distributor_address"]], [((n["address"], "fee_distributor_eth_balance"), safe_to_float)]), + lambda n: (mc.address, [rp.seth_sig(mc.abi, "getEthBalance"), n["megapool_address"]], + [((n["address"], "megapool_eth_balance"), safe_to_float)]), lambda n: (mm.address, [rp.seth_sig(mm.abi, "getNodeStakingMinipoolCount"), n["address"]], [((n["address"], "staking_minipool_count"), None)]), lambda n: (nd.address, [rp.seth_sig(nd.abi, "getNodeDepositCredit"), n["address"]], - [((n["address"], "deposit_credit"), safe_to_float)]) + [((n["address"], "node_credit"), safe_to_float)]), + lambda n: (nd.address, [rp.seth_sig(nd.abi, "getNodeEthBalance"), n["address"]], + [((n["address"], "node_eth_balance"), safe_to_float)]) ] # get all node operators from db, but we only care about the address and the fee_distributor_address - nodes = await self.db.node_operators.find({}, {"address": 1, "fee_distributor_address": 1}).to_list() + nodes = await self.db.node_operators.find({}, {"address": 1, "fee_distributor_address": 1, "megapool_address": 1}).to_list() for node_batch in as_chunks(nodes, self.batch_size // len(lambs)): data = {} res = await rp.multicall2( diff --git a/rocketwatch/plugins/random/random.py b/rocketwatch/plugins/random/random.py index 182f4b7b..526e5bee 100644 --- a/rocketwatch/plugins/random/random.py +++ b/rocketwatch/plugins/random/random.py @@ -214,7 +214,7 @@ async def smoothie(self, ctx: Context): '$project': { '_id' : 1, 'count' : 1, - 'smoothie': '$meta.smoothing_pool_registration_state' + 'smoothie': '$meta.smoothing_pool_registration' } }, { '$group': { diff --git a/rocketwatch/plugins/rpl/rpl.py b/rocketwatch/plugins/rpl/rpl.py index 49a46ce8..57af7873 100644 --- a/rocketwatch/plugins/rpl/rpl.py +++ b/rocketwatch/plugins/rpl/rpl.py @@ -152,7 +152,7 @@ async def withdrawable_rpl(self, } }, { '$project': { - 'ethStake': { + 'eth_stake': { '$multiply': [ '$effective_node_share', { '$multiply': [ @@ -180,8 +180,7 @@ async def withdrawable_rpl(self, liquid_rpl = 0 for node in data: - - eth_stake = node["ethStake"] + eth_stake = node["eth_stake"] rpl_stake = node["rpl_stake"] # if there are no pools, then all the RPL can be withdrawn diff --git a/rocketwatch/plugins/tvl/tvl.py b/rocketwatch/plugins/tvl/tvl.py index 86d6fd34..0d4aa926 100644 --- a/rocketwatch/plugins/tvl/tvl.py +++ b/rocketwatch/plugins/tvl/tvl.py @@ -274,7 +274,7 @@ async def tvl(self, interaction: Interaction, show_all: bool = False): tmp = await (await self.db.node_operators.aggregate([ { '$match': { - 'smoothing_pool_registration_state': True, + 'smoothing_pool_registration': True, 'staking_minipool_count' : { '$ne': 0 } From 5b64426f996caf6105fbfe0e6df7a51377b5bd9e Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sun, 1 Mar 2026 01:13:04 +0000 Subject: [PATCH 119/279] node operator db field groups --- .../plugins/db_upkeep_task/db_upkeep_task.py | 58 +++++++++---------- rocketwatch/plugins/tvl/tvl.py | 8 +-- 2 files changed, 33 insertions(+), 33 deletions(-) diff --git a/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py b/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py index 5e363179..24fafa2d 100644 --- a/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py +++ b/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py @@ -377,15 +377,15 @@ async def add_static_data_to_node_operators(self): df = rp.get_contract_by_name("rocketNodeDistributorFactory") mf = rp.get_contract_by_name("rocketMegapoolFactory") lambs = [ - lambda a: (df.address, [rp.seth_sig(df.abi, "getProxyAddress"), a], [((a, "fee_distributor_address"), None)]), - lambda a: (mf.address, [rp.seth_sig(mf.abi, "getExpectedAddress"), a], [((a, "megapool_address"), None)]), + lambda a: (df.address, [rp.seth_sig(df.abi, "getProxyAddress"), a], [((a, "fee_distributor.address"), None)]), + lambda a: (mf.address, [rp.seth_sig(mf.abi, "getExpectedAddress"), a], [((a, "megapool.address"), None)]), ] # get all minipool addresses from db that do not have a node operator assigned node_addresses = await self.db.node_operators.distinct( "address", {"$or": [ - {"fee_distributor_address": {"$exists": False}}, - {"megapool_address": {"$exists": False} + {"fee_distributor.address": {"$exists": False}}, + {"megapool.address": {"$exists": False} }]} ) # get node operator addresses from rp @@ -430,43 +430,43 @@ async def update_dynamic_node_operator_metadata(self): [((n["address"], "withdrawal_address"), None)]), lambda n: (nm.address, [rp.seth_sig(nm.abi, "getNodeTimezoneLocation"), n["address"]], [((n["address"], "timezone_location"), None)]), - lambda n: (nm.address, [rp.seth_sig(nm.abi, "getFeeDistributorInitialised"), n["address"]], - [((n["address"], "fee_distributor_initialized"), None)]), - lambda n: (nm.address, [rp.seth_sig(mf.abi, "getMegapoolDeployed"), n["address"]], - [((n["address"], "megapool_deployed"), is_true)]), lambda n: (nm.address, [rp.seth_sig(nm.abi, "getSmoothingPoolRegistrationState"), n["address"]], [((n["address"], "smoothing_pool_registration"), None)]), lambda n: (nm.address, [rp.seth_sig(nm.abi, "getAverageNodeFee"), n["address"]], [((n["address"], "average_node_fee"), safe_to_float)]), - lambda n: (ns.address, [rp.seth_sig(ns.abi, "getNodeStakedRPL"), n["address"]], - [((n["address"], "rpl_stake"), safe_to_float)]), - lambda n: (ns.address, [rp.seth_sig(ns.abi, "getNodeLegacyStakedRPL"), n["address"]], - [((n["address"], "legacy_rpl_stake"), safe_to_float)]), - lambda n: (ns.address, [rp.seth_sig(ns.abi, "getNodeMegapoolStakedRPL"), n["address"]], - [((n["address"], "megapool_rpl_stake"), safe_to_float)]), - lambda n: (ns.address, [rp.seth_sig(ns.abi, "getNodeLockedRPL"), n["address"]], - [((n["address"], "locked_rpl"), safe_to_float)]), - lambda n: (ns.address, [rp.seth_sig(ns.abi, "getNodeUnstakingRPL"), n["address"]], - [((n["address"], "unstaking_rpl"), safe_to_float)]), - lambda n: (ns.address, [rp.seth_sig(ns.abi, "getNodeRPLStakedTime"), n["address"]], - [((n["address"], "last_rpl_stake_time"), None)]), - lambda n: (ns.address, [rp.seth_sig(ns.abi, "getNodeLastUnstakeTime"), n["address"]], - [((n["address"], "last_rpl_unstake_time"), None)]), lambda n: (ns.address, [rp.seth_sig(ns.abi, "getNodeETHCollateralisationRatio"), n["address"]], [((n["address"], "effective_node_share"), safe_inv)]), - lambda n: (mc.address, [rp.seth_sig(mc.abi, "getEthBalance"), n["fee_distributor_address"]], - [((n["address"], "fee_distributor_eth_balance"), safe_to_float)]), - lambda n: (mc.address, [rp.seth_sig(mc.abi, "getEthBalance"), n["megapool_address"]], - [((n["address"], "megapool_eth_balance"), safe_to_float)]), lambda n: (mm.address, [rp.seth_sig(mm.abi, "getNodeStakingMinipoolCount"), n["address"]], [((n["address"], "staking_minipool_count"), None)]), lambda n: (nd.address, [rp.seth_sig(nd.abi, "getNodeDepositCredit"), n["address"]], [((n["address"], "node_credit"), safe_to_float)]), lambda n: (nd.address, [rp.seth_sig(nd.abi, "getNodeEthBalance"), n["address"]], - [((n["address"], "node_eth_balance"), safe_to_float)]) + [((n["address"], "node_eth_balance"), safe_to_float)]), + lambda n: (nm.address, [rp.seth_sig(nm.abi, "getFeeDistributorInitialised"), n["address"]], + [((n["address"], "fee_distributor.initialized"), None)]), + lambda n: (mc.address, [rp.seth_sig(mc.abi, "getEthBalance"), n["fee_distributor"]["address"]], + [((n["address"], "fee_distributor.eth_balance"), safe_to_float)]), + lambda n: (nm.address, [rp.seth_sig(mf.abi, "getMegapoolDeployed"), n["address"]], + [((n["address"], "megapool.deployed"), is_true)]), + lambda n: (mc.address, [rp.seth_sig(mc.abi, "getEthBalance"), n["megapool"]["address"]], + [((n["address"], "megapool.eth_balance"), safe_to_float)]), + lambda n: (ns.address, [rp.seth_sig(ns.abi, "getNodeStakedRPL"), n["address"]], + [((n["address"], "rpl.total_stake"), safe_to_float)]), + lambda n: (ns.address, [rp.seth_sig(ns.abi, "getNodeLegacyStakedRPL"), n["address"]], + [((n["address"], "rpl.legacy_stake"), safe_to_float)]), + lambda n: (ns.address, [rp.seth_sig(ns.abi, "getNodeMegapoolStakedRPL"), n["address"]], + [((n["address"], "rpl.megapool_stake"), safe_to_float)]), + lambda n: (ns.address, [rp.seth_sig(ns.abi, "getNodeLockedRPL"), n["address"]], + [((n["address"], "rpl.locked"), safe_to_float)]), + lambda n: (ns.address, [rp.seth_sig(ns.abi, "getNodeUnstakingRPL"), n["address"]], + [((n["address"], "rpl.unstaking"), safe_to_float)]), + lambda n: (ns.address, [rp.seth_sig(ns.abi, "getNodeRPLStakedTime"), n["address"]], + [((n["address"], "rpl.last_stake_time"), None)]), + lambda n: (ns.address, [rp.seth_sig(ns.abi, "getNodeLastUnstakeTime"), n["address"]], + [((n["address"], "rpl.last_unstake_time"), None)]) ] - # get all node operators from db, but we only care about the address and the fee_distributor_address - nodes = await self.db.node_operators.find({}, {"address": 1, "fee_distributor_address": 1, "megapool_address": 1}).to_list() + # get all node operators from db, but we only care about the address and the fee_distributor.address + nodes = await self.db.node_operators.find({}, {"address": 1, "fee_distributor.address": 1, "megapool.address": 1}).to_list() for node_batch in as_chunks(nodes, self.batch_size // len(lambs)): data = {} res = await rp.multicall2( diff --git a/rocketwatch/plugins/tvl/tvl.py b/rocketwatch/plugins/tvl/tvl.py index 0d4aa926..7091163d 100644 --- a/rocketwatch/plugins/tvl/tvl.py +++ b/rocketwatch/plugins/tvl/tvl.py @@ -367,13 +367,13 @@ async def tvl(self, interaction: Interaction, show_all: bool = False): tmp = await (await self.db.node_operators.aggregate([ { '$match': { - 'fee_distributor_eth_balance': { + 'fee_distributor.eth_balance': { '$gt': 0 } } }, { '$project': { - 'fee_distributor_eth_balance': 1, + 'fee_distributor.eth_balance': 1, 'node_share' : { '$sum': [ '$effective_node_share', { @@ -392,12 +392,12 @@ async def tvl(self, interaction: Interaction, show_all: bool = False): '$project': { 'node_share': { '$multiply': [ - '$fee_distributor_eth_balance', '$node_share' + '$fee_distributor.eth_balance', '$node_share' ] }, 'reth_share': { '$multiply': [ - '$fee_distributor_eth_balance', { + '$fee_distributor.eth_balance', { '$subtract': [ 1, '$node_share' ] From 0b754ef86dbf11c271c06da2e8b0e09d0a1cf3e2 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sun, 1 Mar 2026 01:13:37 +0000 Subject: [PATCH 120/279] fix RPL commands --- rocketwatch/plugins/rpl/rpl.py | 166 ++++++++++----------------------- 1 file changed, 50 insertions(+), 116 deletions(-) diff --git a/rocketwatch/plugins/rpl/rpl.py b/rocketwatch/plugins/rpl/rpl.py index 57af7873..ceae8fad 100644 --- a/rocketwatch/plugins/rpl/rpl.py +++ b/rocketwatch/plugins/rpl/rpl.py @@ -3,21 +3,17 @@ import humanize import matplotlib.pyplot as plt -import numpy as np -from discord import File +from discord import File, Interaction from discord.ext import commands -from discord.ext.commands import Context -from discord.ext.commands import hybrid_command +from discord.app_commands import command from pymongo import AsyncMongoClient from rocketwatch import RocketWatch from utils import solidity from utils.cfg import cfg from utils.embeds import Embed -from utils.block_time import ts_to_block from utils.rocketpool import rp -from utils.shared_w3 import w3 -from utils.visibility import is_hidden +from utils.visibility import is_hidden_weak log = logging.getLogger("rpl") log.setLevel(cfg["log_level"]) @@ -28,120 +24,56 @@ def __init__(self, bot: RocketWatch): self.bot = bot self.db = AsyncMongoClient(cfg["mongodb.uri"]).rocketwatch - @hybrid_command() - async def rpl_apr(self, ctx: Context): + @command() + async def staked_rpl(self, interaction: Interaction): """ - Show the RPL APR. + Show the amount of RPL staked """ - await ctx.defer(ephemeral=is_hidden(ctx)) - e = Embed() - - reward_duration = rp.call("rocketRewardsPool.getClaimIntervalTime") - total_rpl_staked = await (await self.db.node_operators.aggregate([ - { - '$group': { - '_id' : 'out', - 'total_effective_rpl_stake': { - '$sum': '$effective_rpl_stake' - } - } - } - ])).next() - total_rpl_staked = total_rpl_staked["total_effective_rpl_stake"] - - # track down the rewards for node operators from the last reward period - contract = rp.get_contract_by_name("rocketVault") - m = ts_to_block(rp.call("rocketRewardsPool.getClaimIntervalTimeStart")) - events = contract.events["TokenDeposited"].getLogs(argument_filters={ - "by": w3.solidity_keccak( - ["string", "address"], - ["rocketMerkleDistributorMainnet", rp.get_address_by_name("rocketTokenRPL")]) - }, fromBlock=m - 10000, toBlock=m + 10000) - perc_nodes = solidity.to_float(rp.call("rocketRewardsPool.getClaimingContractPerc", "rocketClaimNode")) - perc_odao = solidity.to_float(rp.call("rocketRewardsPool.getClaimingContractPerc", "rocketClaimTrustedNode")) - node_operator_rewards = solidity.to_float(events[0].args.amount) * (perc_nodes / (perc_nodes + perc_odao)) - if not e: - raise Exception("no rpl deposit event found") - - xmin = total_rpl_staked * 0.66 - xmax = total_rpl_staked * 1.33 - x = np.linspace(xmin, xmax) - - def apr_curve(staked): - return (node_operator_rewards / staked) / (reward_duration / 60 / 60 / 24) * 365 - - apr = apr_curve(total_rpl_staked) - y = apr_curve(x) - fig = plt.figure() - plt.plot(x, y, color=str(e.color)) - plt.xlim(xmin, xmax) - plt.ylim(apr_curve(xmax) * 0.9, apr_curve(xmin) * 1.1) - plt.plot(total_rpl_staked, apr, 'bo') - plt.annotate(f"{apr:.2%}", (total_rpl_staked, apr), - textcoords="offset points", xytext=(-10, -5), ha='right') - plt.annotate(f"{total_rpl_staked / 1000000:.2f} million staked", - (total_rpl_staked, apr), textcoords="offset points", xytext=(10, -5), ha='left') - plt.grid() - - ax = plt.gca() - ax.xaxis.set_major_formatter(lambda x, _: "{:.1f}m".format(x / 1000000)) - ax.yaxis.set_major_formatter("{x:.2%}") - ax.set_ylabel("APR") - ax.set_xlabel("RPL Staked") - fig.tight_layout() + await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) + + rpl_supply = solidity.to_float(rp.call("rocketTokenRPL.totalSupply")) + legacy_staked_rpl = solidity.to_float(rp.call("rocketNodeStaking.getTotalLegacyStakedRPL")) + megapool_staked_rpl = solidity.to_float(rp.call("rocketNodeStaking.getTotalMegapoolStakedRPL")) + total_rpl_staked = solidity.to_float(rp.call("rocketNodeStaking.getTotalStakedRPL")) + unstaked_rpl = rpl_supply - total_rpl_staked + + sizes = [legacy_staked_rpl, megapool_staked_rpl, unstaked_rpl] + labels = ["Legacy", "Megapools", "Unstaked"] + colors = ["#CC4400", "#FF6B00", "#808080"] + + fig, ax = plt.subplots() + ax.pie( + sizes, + labels=labels, + colors=colors, + autopct="%1.1f%%", + startangle=90, + wedgeprops={"linewidth": 0.5, "edgecolor": "white"}, + ) img = BytesIO() - fig.savefig(img, format='png') + fig.tight_layout() + fig.savefig(img, format="png") img.seek(0) - plt.close() - - e.title = "RPL APR Graph" - e.set_image(url="attachment://graph.png") - f = File(img, filename="graph.png") - await ctx.send(embed=e, files=[f]) + plt.close(fig) + + embed = Embed() + embed.title = "Staked RPL" + embed.add_field(name="Legacy", value=f"{humanize.intcomma(legacy_staked_rpl, 2)}", inline=True) + embed.add_field(name="Megapools", value=f"{humanize.intcomma(megapool_staked_rpl, 2)}", inline=True) + embed.add_field(name="Total Staked", value=f"{humanize.intcomma(total_rpl_staked, 2)}", inline=True) + embed.set_image(url="attachment://graph.png") + file = File(img, filename="graph.png") + + await interaction.followup.send(embed=embed, file=file) img.close() - @hybrid_command() - async def effective_rpl_staked(self, ctx: Context): - """ - Show the effective RPL staked by users - """ - await ctx.defer(ephemeral=is_hidden(ctx)) - e = Embed() - # get total RPL staked - total_rpl_staked = solidity.to_float(rp.call("rocketNodeStaking.getTotalStakedRPL")) - e.add_field(name="Total RPL Staked:", value=f"{humanize.intcomma(total_rpl_staked, 2)} RPL", inline=False) - # get effective RPL staked - effective_rpl_stake = await (await self.db.node_operators.aggregate([ - { - '$group': { - '_id' : 'out', - 'total_effective_rpl_stake': { - '$sum': '$effective_rpl_stake' - } - } - } - ])).next() - effective_rpl_stake = effective_rpl_stake["total_effective_rpl_stake"] # calculate percentage staked - percentage_staked = effective_rpl_stake / total_rpl_staked - e.add_field(name="Effective RPL Staked:", value=f"{humanize.intcomma(effective_rpl_stake, 2)} RPL " - f"({percentage_staked:.2%})", inline=False) - # get total supply - total_rpl_supply = solidity.to_float(rp.call("rocketTokenRPL.totalSupply")) - # calculate total staked as a percentage of total supply - percentage_of_total_staked = total_rpl_staked / total_rpl_supply - e.add_field(name="Percentage of RPL Supply Staked:", value=f"{percentage_of_total_staked:.2%}", inline=False) - await ctx.send(embed=e) - - @hybrid_command() - async def withdrawable_rpl(self, - ctx: Context): + @command() + async def withdrawable_rpl(self, interaction: Interaction): """ Show the available liquidity at different RPL/ETH prices """ - await ctx.defer(ephemeral=is_hidden(ctx)) - e = Embed() - img = BytesIO() + await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) data = await (await self.db.node_operators.aggregate([ { @@ -161,7 +93,7 @@ async def withdrawable_rpl(self, } ] }, - 'rpl_stake': 1 + 'rpl.legacy_stake': 1 } } ])).to_list() @@ -205,7 +137,7 @@ async def withdrawable_rpl(self, x, y = zip(*list(free_rpl_liquidity.values())) # plot the data - plt.plot(x, y, color=str(e.color)) + plt.plot(x, y, color=str(embed.color)) plt.plot(rpl_eth_price, current_withdrawable_rpl, 'bo') plt.xlim(min(x), max(x)) @@ -222,16 +154,18 @@ async def withdrawable_rpl(self, ax.yaxis.set_major_formatter(lambda x, _: "{:.1f}m".format(x / 1000000)) ax.xaxis.set_major_formatter(lambda x, _: "{:.4f}".format(x)) + img = BytesIO() plt.tight_layout() plt.savefig(img, format='png') img.seek(0) plt.close() - e.title = "Available RPL Liquidity" - e.set_image(url="attachment://graph.png") + embed = Embed() + embed.title = "Available RPL Liquidity" + embed.set_image(url="attachment://graph.png") f = File(img, filename="graph.png") - await ctx.send(embed=e, files=[f]) + await interaction.followup.send(embed=embed, files=[f]) img.close() From da59ced17303e751bbd6cfaf1f04f0493b4be20a Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sun, 1 Mar 2026 01:40:05 +0000 Subject: [PATCH 121/279] add unstaking RPL --- rocketwatch/plugins/rpl/rpl.py | 39 ++++++++++++++++++++++++---------- 1 file changed, 28 insertions(+), 11 deletions(-) diff --git a/rocketwatch/plugins/rpl/rpl.py b/rocketwatch/plugins/rpl/rpl.py index ceae8fad..1b00b017 100644 --- a/rocketwatch/plugins/rpl/rpl.py +++ b/rocketwatch/plugins/rpl/rpl.py @@ -1,7 +1,6 @@ import logging from io import BytesIO -import humanize import matplotlib.pyplot as plt from discord import File, Interaction from discord.ext import commands @@ -34,19 +33,40 @@ async def staked_rpl(self, interaction: Interaction): rpl_supply = solidity.to_float(rp.call("rocketTokenRPL.totalSupply")) legacy_staked_rpl = solidity.to_float(rp.call("rocketNodeStaking.getTotalLegacyStakedRPL")) megapool_staked_rpl = solidity.to_float(rp.call("rocketNodeStaking.getTotalMegapoolStakedRPL")) - total_rpl_staked = solidity.to_float(rp.call("rocketNodeStaking.getTotalStakedRPL")) - unstaked_rpl = rpl_supply - total_rpl_staked + staked_rpl = legacy_staked_rpl + megapool_staked_rpl + unstaking_rpl = (await (await self.db.node_operators.aggregate([ + { + '$group': { + '_id' : 'out', + 'total_unstaking_rpl_': { + '$sum': '$rpl.unstaking' + } + } + } + ])).next())['total_unstaking_rpl_'] + unstaked_rpl = rpl_supply - staked_rpl - unstaking_rpl - sizes = [legacy_staked_rpl, megapool_staked_rpl, unstaked_rpl] - labels = ["Legacy", "Megapools", "Unstaked"] - colors = ["#CC4400", "#FF6B00", "#808080"] + def fmt(v): + if v >= 1_000_000: + return f"{v / 1_000_000:.2f}M" + if v >= 1_000: + return f"{v / 1_000:.1f}K" + return f"{v:.0f}" + + sizes = [legacy_staked_rpl, megapool_staked_rpl, unstaking_rpl, unstaked_rpl] + labels = ["Legacy", "Megapools", "Unstaking", "Unstaked"] + colors = ["#CC4400", "#FF6B00", "#D2B48C", "#808080"] + + total = sum(sizes) + def autopct(pct): + return f"{fmt(pct / 100 * total)} ({pct:.1f}%)" fig, ax = plt.subplots() ax.pie( sizes, labels=labels, colors=colors, - autopct="%1.1f%%", + autopct=autopct, startangle=90, wedgeprops={"linewidth": 0.5, "edgecolor": "white"}, ) @@ -59,12 +79,9 @@ async def staked_rpl(self, interaction: Interaction): embed = Embed() embed.title = "Staked RPL" - embed.add_field(name="Legacy", value=f"{humanize.intcomma(legacy_staked_rpl, 2)}", inline=True) - embed.add_field(name="Megapools", value=f"{humanize.intcomma(megapool_staked_rpl, 2)}", inline=True) - embed.add_field(name="Total Staked", value=f"{humanize.intcomma(total_rpl_staked, 2)}", inline=True) embed.set_image(url="attachment://graph.png") file = File(img, filename="graph.png") - + await interaction.followup.send(embed=embed, file=file) img.close() From e1792515ccfb9c5a84be5554c8ec93702e77995c Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sun, 1 Mar 2026 01:51:02 +0000 Subject: [PATCH 122/279] fix withdrawable_rpl --- rocketwatch/plugins/rpl/rpl.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/rocketwatch/plugins/rpl/rpl.py b/rocketwatch/plugins/rpl/rpl.py index 1b00b017..e0868c11 100644 --- a/rocketwatch/plugins/rpl/rpl.py +++ b/rocketwatch/plugins/rpl/rpl.py @@ -110,7 +110,7 @@ async def withdrawable_rpl(self, interaction: Interaction): } ] }, - 'rpl.legacy_stake': 1 + 'rpl_stake': "$rpl.legacy_stake" } } ])).to_list() @@ -120,7 +120,7 @@ async def withdrawable_rpl(self, interaction: Interaction): # i/10 is the ratio of the price checked to the actual RPL ETH price free_rpl_liquidity = {} - max_collateral = solidity.to_float(rp.call("rocketDAOProtocolSettingsNode.getMaximumPerMinipoolStake")) + max_collateral = solidity.to_float(rp.call("rocketDAOProtocolSettingsNode.getMinimumLegacyRPLStake")) current_withdrawable_rpl = 0 for i in range(1, 31): @@ -153,6 +153,8 @@ async def withdrawable_rpl(self, interaction: Interaction): # break the tuples into lists to plot x, y = zip(*list(free_rpl_liquidity.values())) + embed = Embed() + # plot the data plt.plot(x, y, color=str(embed.color)) plt.plot(rpl_eth_price, current_withdrawable_rpl, 'bo') @@ -178,7 +180,6 @@ async def withdrawable_rpl(self, interaction: Interaction): plt.close() - embed = Embed() embed.title = "Available RPL Liquidity" embed.set_image(url="attachment://graph.png") f = File(img, filename="graph.png") From 92540db9ef43ec2d15fc65ee606f38cdc32c61c5 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sun, 1 Mar 2026 13:27:39 +0000 Subject: [PATCH 123/279] remove unused modules --- .../plugins/chat_summary/chat_summary.py | 2 +- .../plugins/chicken_soup/chicken_soup.py | 4 +- .../plugins/db_upkeep_task/db_upkeep_task.py | 2 + rocketwatch/plugins/defi/defi.py | 118 ------------- rocketwatch/plugins/karma/karma.py | 165 ------------------ 5 files changed, 5 insertions(+), 286 deletions(-) delete mode 100644 rocketwatch/plugins/defi/defi.py delete mode 100644 rocketwatch/plugins/karma/karma.py diff --git a/rocketwatch/plugins/chat_summary/chat_summary.py b/rocketwatch/plugins/chat_summary/chat_summary.py index 0bd5b39b..420428d8 100644 --- a/rocketwatch/plugins/chat_summary/chat_summary.py +++ b/rocketwatch/plugins/chat_summary/chat_summary.py @@ -85,7 +85,7 @@ async def summarize_chat(self, ctx: Context): "----------------\n" "- Discussions between invis, langers, knoshua and more about the meaning of life.\n" "- The current status of the war in europe was discussed.\n" - "- Patches announced that he has been taking a vacation in switzerland and shared some images of his skiing.}\n" + "- Patches announced that he has been taking a vacation in Switzerland and shared some images of his skiing.}\n" "----------------\n\n" "Please begin the task now." ) diff --git a/rocketwatch/plugins/chicken_soup/chicken_soup.py b/rocketwatch/plugins/chicken_soup/chicken_soup.py index a7b2c1d5..38875ea5 100644 --- a/rocketwatch/plugins/chicken_soup/chicken_soup.py +++ b/rocketwatch/plugins/chicken_soup/chicken_soup.py @@ -1,10 +1,10 @@ +from datetime import datetime, timedelta + from discord import Interaction from discord.ext import commands from discord.app_commands import command from rocketwatch import RocketWatch -from datetime import datetime, timedelta - class ChickenSoup(commands.Cog): def __init__(self, bot: RocketWatch): diff --git a/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py b/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py index 24fafa2d..ac083dc1 100644 --- a/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py +++ b/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py @@ -69,6 +69,7 @@ async def loop(self): await self.add_untracked_node_operators() await self.add_static_data_to_node_operators() await self.update_dynamic_node_operator_metadata() + # TODO: update megapool stats if deployed # minipool tasks await self.add_untracked_minipools() await self.add_static_data_to_minipools() @@ -76,6 +77,7 @@ async def loop(self): await self.add_static_beacon_data_to_minipools() await self.update_dynamic_minipool_metadata() await self.update_dynamic_minipool_beacon_metadata() + # TODO: populate megapool validator DB log.debug("finished db upkeep task") self.monitor.ping(state="complete", series=p_id) except Exception as err: diff --git a/rocketwatch/plugins/defi/defi.py b/rocketwatch/plugins/defi/defi.py deleted file mode 100644 index 497f2a2e..00000000 --- a/rocketwatch/plugins/defi/defi.py +++ /dev/null @@ -1,118 +0,0 @@ -import logging - -from discord.ext import commands -from discord.ext.commands import Context -from discord.ext.commands import hybrid_command - -from rocketwatch import RocketWatch -from utils import solidity -from utils.cfg import cfg -from utils.embeds import Embed, el_explorer_url -from utils.rocketpool import rp -from utils.shared_w3 import w3 -from utils.visibility import is_hidden_weak - -log = logging.getLogger("defi") -log.setLevel(cfg["log_level"]) - - -class DeFi(commands.Cog): - def __init__(self, bot: RocketWatch): - self.bot = bot - - @hybrid_command() - async def curve(self, ctx: Context): - """ - Show stats of the curve pool - """ - await ctx.defer(ephemeral=is_hidden_weak(ctx)) - e = Embed() - e.title = "Curve Pool" - reth_r, wsteth_r = rp.call("curvePool.get_balances") - # token amounts - reth = solidity.to_float(reth_r) - wsteth = solidity.to_float(wsteth_r) - # token values - reth_v = solidity.to_float(rp.call("rocketTokenRETH.getEthValue", reth_r)) - wsteth_v = solidity.to_float(rp.call("wstETHToken.getStETHByWstETH", wsteth_r)) - # token shares - reth_s = reth / (reth + wsteth) - wsteth_s = wsteth / (reth + wsteth) - e.add_field( - name="rETH Locked", - value=f"`{reth:,.2f} rETH ({reth_s:.0%})`", - ) - e.add_field( - name="wstETH Locked", - value=f"`{wsteth:,.2f} wstETH ({wsteth_s:.0%})`", - ) - total_locked = reth_v + wsteth_v - total_locked_usd = total_locked * rp.get_eth_usdc_price() - e.add_field( - name="Total Value Locked", - value=f"`{total_locked:,.2f} ETH ({total_locked_usd:,.2f} USDC)", - inline=False, - ) - # rETH => wstETH premium - eth_to_wsteth = rp.call("curvePool.get_dy", 0, 1, rp.call("rocketTokenRETH.getRethValue", w3.toWei(1, "ether"))) - e.add_field( - name="Current rETH => wstETH Exchange (Assuming true-lsd value)", - value=f"`1 ETH worth of rETH will get you " - f"{solidity.to_float(rp.call('wstETHToken.getStETHByWstETH',eth_to_wsteth)):,.4f} ETH " - f"worth of wstETH`", - inline=False, - ) - # wstETH => rETH premium - eth_to_reth = rp.call("curvePool.get_dy", 1, 0, rp.call("wstETHToken.getWstETHByStETH", w3.toWei(1, "ether"))) - e.add_field( - name="Current wstETH => rETH Exchange (Assuming true-lsd value)", - value=f"`1 ETH worth of wstETH will get you " - f"{solidity.to_float(rp.call('rocketTokenRETH.getEthValue',eth_to_reth)):,.4f} ETH" - f" worth of rETH`", - inline=False, - ) - token_name = rp.call("curvePool.symbol") - link = el_explorer_url(rp.get_address_by_name("curvePool"), token_name) - e.add_field( - name="Contract Address", - value=link, - ) - await ctx.send(embed=e) - - @hybrid_command() - async def yearn(self, ctx: Context): - """ - Show stats of the yearn vault - """ - await ctx.defer(ephemeral=is_hidden_weak(ctx)) - e = Embed() - e.title = "Yearn Pool" - deposit_limit = solidity.to_float(rp.call("yearnPool.depositLimit")) - deposited = solidity.to_float(rp.call("yearnPool.totalAssets")) - asset_name = rp.call("curvePool.symbol") - e.add_field( - name="Deposit Limit Status", - value=f"`{deposited:,.2f}/{deposit_limit:,.2f} {asset_name}`", - ) - reth_r, wsteth_r = rp.call("curvePool.get_balances") - # token values - reth_v = solidity.to_float(rp.call("rocketTokenRETH.getEthValue", reth_r)) - wsteth_v = solidity.to_float(rp.call("wstETHToken.getStETHByWstETH", wsteth_r)) - yearn_locked = (reth_v + wsteth_v) * (rp.call("yearnPool.totalAssets") / rp.call("curvePool.totalSupply")) - yearn_locked_usd = yearn_locked * rp.get_eth_usdc_price() - e.add_field( - name="Total Value Locked", - value=f"`{yearn_locked:,.2f} ETH ({yearn_locked_usd:,.2f} USDC)`", - inline=False - ) - token_name = rp.call("yearnPool.symbol") - link = el_explorer_url(rp.get_address_by_name("yearnPool"), token_name) - e.add_field( - name="Contract Address", - value=link, - ) - await ctx.send(embed=e) - - -async def setup(bot): - await bot.add_cog(DeFi(bot)) diff --git a/rocketwatch/plugins/karma/karma.py b/rocketwatch/plugins/karma/karma.py deleted file mode 100644 index 536fba9a..00000000 --- a/rocketwatch/plugins/karma/karma.py +++ /dev/null @@ -1,165 +0,0 @@ -import logging - -from discord import app_commands, Interaction, User, AppCommandType -from discord.app_commands.checks import cooldown -from discord.ext.commands import Cog, GroupCog -from pymongo import AsyncMongoClient, IndexModel - -from rocketwatch import RocketWatch -from utils.cfg import cfg -from utils.embeds import Embed -from utils.visibility import is_hidden_weak - -log = logging.getLogger("karma") -log.setLevel(cfg["log_level"]) - - -class KarmaUtils(GroupCog, name="karma"): - def __init__(self, bot: RocketWatch): - self.bot = bot - self.db = AsyncMongoClient(cfg["mongodb.uri"]).get_database("rocketwatch") - self.menus = [] - for c in [5,10]: - self.menus.append(app_commands.ContextMenu( - name=f"Give {c} Point{'s' if c != 1 else ''}", - callback=self.add_user_points, - type=AppCommandType.user, - guild_ids=[cfg["rocketpool.support.server_id"]], - extras={"amount": c} - )) - self.menus.append(app_commands.ContextMenu( - name=f"Remove {c} Point{'s' if c != 1 else ''}", - callback=self.remove_user_points, - type=AppCommandType.user, - guild_ids=[cfg["rocketpool.support.server_id"]], - extras={"amount": c} - )) - - for menu in self.menus: - self.bot.tree.add_command(menu) - - @Cog.listener() - async def on_ready(self): - # ensure user and issuer indexes exist - await self.db.karma.create_indexes([ - IndexModel("user"), - IndexModel("issuer") - ]) - - async def cog_unload(self) -> None: - for menu in self.menus: - self.bot.tree.remove_command(menu) - - @app_commands.guilds(cfg["rocketpool.support.server_id"]) - @cooldown(1, 10) - async def add_user_points(self, interaction: Interaction, user: User): - await interaction.response.defer(ephemeral=True) - # dissallow users from giving themselves points - if user.id == interaction.user.id: - await interaction.edit_original_response( - content="You can't give yourself points!", - ) - return - amount = interaction.command.extras["amount"] - await self.db.karma.update_one( - {"user": user.id, "issuer": interaction.user.id}, - {"$inc": {"points": amount}}, - upsert=True - ) - # create a self-deleting announcement message - await interaction.channel.send( - f"Gave {amount} `{interaction.user.global_name or interaction.user.name}`" - f" point{'s' if amount != 1 else ''} to `{user.global_name or user.name}`!", - delete_after=30 - ) - await interaction.delete_original_response() - - @app_commands.guilds(cfg["rocketpool.support.server_id"]) - @cooldown(1, 10) - async def remove_user_points(self, interaction: Interaction, user: User): - await interaction.response.defer(ephemeral=True) - # dissallow users from giving themselves points - if user.id == interaction.user.id: - await interaction.edit_original_response( - content="You can't remove points from yourself!", - ) - return - amount = interaction.command.extras["amount"] - await self.db.karma.update_one( - {"user": user.id, "issuer": interaction.user.id}, - {"$inc": {"points": -amount}}, - upsert=True - ) - # create a self-deleting announcement message - await interaction.channel.send( - f"Removed {amount} `{interaction.user.global_name or interaction.user.name}`" - f" point{'s' if amount != 1 else ''} from `{user.global_name or user.name}`!", - delete_after=30 - ) - await interaction.delete_original_response() - - @app_commands.command(name="top") - async def top(self, interaction: Interaction): - await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) - # find the top karma users - top = await (await self.db.karma.aggregate([ - {"$group": {"_id": "$user", "points": {"$sum": "$points"}}}, - {"$sort": {"points": -1}}, - {"$limit": 10}, - {"$lookup": { - "from" : "karma", - "let" : {"user_id": "$_id"}, - "pipeline": [ - {"$match": {"$expr": {"$eq": ["$user", "$$user_id"]}}}, - {"$group": {"_id": "$issuer", "total": {"$sum": "$points"}}}, - {"$sort": {"total": -1}}, - {"$limit": 1} - ], - "as" : "top_issuer" - }}, - {"$project": {"_id": 1, "points": 1, "issuer": {"$arrayElemAt": ["$top_issuer._id", 0]}}} - ])).to_list(length=10) - e = Embed(title="Top 10 Karma Users") - des = "" - for i, u in enumerate(top): - # try to resolve users - user = self.bot.get_user(u["_id"]) - if not user: - user = await self.bot.fetch_user(u["_id"]) - issuer = self.bot.get_user(u["issuer"]) - if not issuer: - issuer = await self.bot.fetch_user(u["issuer"]) - des += f"`{f'#{str(i + 1)}':>3}` {user.mention} – `{u['points']}` points (most given by {issuer.mention})\n" - - e.description = des - await interaction.edit_original_response(embed=e) - - # user lookup command, defaults to caller. top 10 points split by issuer - @app_commands.command(name="user") - async def user(self, interaction: Interaction, user: User = None): - await interaction.response.defer(ephemeral=is_hidden_weak(interaction) or not user or user.id == interaction.user.id) - if not user: - user = interaction.user - # find the top karma users - top = await self.db.karma.find({"user": user.id}).sort("points", -1).to_list(length=10) - if not top: - await interaction.edit_original_response(content=f"`{user.global_name or user.name}` has no points!") - return - # fetch total score for user - total = await (await self.db.karma.aggregate([ - {"$match": {"user": user.id}}, - {"$group": {"_id": "$user", "points": {"$sum": "$points"}}} - ])).to_list(length=1) - e = Embed(title=f"Points held by {user.global_name or user.name}") - des = "" - if total: - des += f"**Total points: `{total[0]['points']}`**\n" - for u in top: - issuer = self.bot.get_user(u["issuer"]) or await self.bot.fetch_user(u["issuer"]) - des += f"– `{u['points']}` points received from {issuer.mention}\n" - e.description = des - await interaction.edit_original_response(embed=e) - - -async def setup(self): - await self.add_cog(KarmaUtils(self)) From 8a97cf5295ffb643713bdf153d9cfb48153e201d Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Mon, 2 Mar 2026 07:42:09 +0000 Subject: [PATCH 124/279] differentiate between pending and dissolved --- rocketwatch/plugins/minipool_states/minipool_states.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/rocketwatch/plugins/minipool_states/minipool_states.py b/rocketwatch/plugins/minipool_states/minipool_states.py index ad3b24b4..34f55be1 100644 --- a/rocketwatch/plugins/minipool_states/minipool_states.py +++ b/rocketwatch/plugins/minipool_states/minipool_states.py @@ -29,6 +29,7 @@ async def minipool_states(self, ctx: Context): "beacon.status": {"$exists": True} }).to_list(None) data = { + "dissolved": 0, "pending": {}, "active" : {}, "exiting": {}, @@ -41,7 +42,10 @@ async def minipool_states(self, ctx: Context): for minipool in res: match minipool["beacon"]["status"]: case "pending_initialized": - data["pending"]["initialized"] = data["pending"].get("initialized", 0) + 1 + if minipool["status"] == "dissolved": + data["dissolved"] += 1 + else: + data["pending"]["initialized"] = data["pending"].get("initialized", 0) + 1 case "pending_queued": data["pending"]["queued"] = data["pending"].get("queued", 0) + 1 case "active_ongoing": @@ -68,7 +72,7 @@ async def minipool_states(self, ctx: Context): # collapse tree where possible for status in list(data.keys()): - if len(data[status]) == 1: + if isinstance(data[status], dict) and len(data[status]) == 1: sub_status = list(data[status].keys())[0] data[status] = data[status][sub_status] From e5d224b80aeffc3816e39812550339e147a9dcc7 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Mon, 2 Mar 2026 07:52:08 +0000 Subject: [PATCH 125/279] fix user distribute instructions --- rocketwatch/plugins/user_distribute/user_distribute.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/rocketwatch/plugins/user_distribute/user_distribute.py b/rocketwatch/plugins/user_distribute/user_distribute.py index 8af4bb38..83375bfc 100644 --- a/rocketwatch/plugins/user_distribute/user_distribute.py +++ b/rocketwatch/plugins/user_distribute/user_distribute.py @@ -63,7 +63,7 @@ async def instructions(self, interaction: Interaction, _) -> None: actions.append(f"begin the user distribution process for **{len(self.eligible)}** minipools") embed.description += "\nThis will " + " and ".join(actions) + "." - embed.description += f"\nEstimated cost: **{cost_eth:,.5f} ETH** ({gas_used:,} gas @ {(gas_price / 1e9):.2f} gwei)" + embed.description += f"\nEstimated cost: **{cost_eth:,.6f} ETH** ({gas_used:,} gas @ {(gas_price / 1e9):.2f} gwei)" await interaction.response.send_message( embed=embed, @@ -136,9 +136,9 @@ async def _fetch_minipools(self) -> tuple[list[dict], list[dict], list[dict]]: storage = w3.eth.get_storage_at(mp["address"], 0x17) user_distribute_time: int = int.from_bytes(storage, "big") elapsed_time = current_time - user_distribute_time - + if elapsed_time >= ud_window_end: - eligible.append((mp, user_distribute_time)) + eligible.append(mp) elif elapsed_time < ud_window_start: mp["ud_window_open"] = user_distribute_time + ud_window_start pending.append(mp) From 83514aa83093dcd30072035be59cd0614ab50bec Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Mon, 2 Mar 2026 21:11:32 +0000 Subject: [PATCH 126/279] tiny refactor --- .../user_distribute/user_distribute.py | 9 ++++---- rocketwatch/utils/shared_w3.py | 22 ++++++++++--------- 2 files changed, 16 insertions(+), 15 deletions(-) diff --git a/rocketwatch/plugins/user_distribute/user_distribute.py b/rocketwatch/plugins/user_distribute/user_distribute.py index 83375bfc..b0c832ef 100644 --- a/rocketwatch/plugins/user_distribute/user_distribute.py +++ b/rocketwatch/plugins/user_distribute/user_distribute.py @@ -56,11 +56,10 @@ async def instructions(self, interaction: Interaction, _) -> None: ) actions = [] - if self.distributable: - actions.append(f"distribute the balance of **{len(self.eligible)}** minipools") - - if self.eligible: - actions.append(f"begin the user distribution process for **{len(self.eligible)}** minipools") + if (count := len(self.distributable)) > 0: + actions.append(f"distribute the balance of **{count}** minipool{'s' if count != 1 else ''}") + if (count := len(self.eligible)) > 0: + actions.append(f"begin the user distribution process for **{count}** minipool{'s' if count != 1 else ''}") embed.description += "\nThis will " + " and ".join(actions) + "." embed.description += f"\nEstimated cost: **{cost_eth:,.6f} ETH** ({gas_used:,} gas @ {(gas_price / 1e9):.2f} gwei)" diff --git a/rocketwatch/utils/shared_w3.py b/rocketwatch/utils/shared_w3.py index 510ce9d0..aa5c837a 100644 --- a/rocketwatch/utils/shared_w3.py +++ b/rocketwatch/utils/shared_w3.py @@ -31,25 +31,27 @@ def __init__(self, base_url: str) -> None: self.async_session = aiohttp.ClientSession(raise_for_status=True, timeout=timeout) @retry_async(tries=3, exceptions=HTTPError, delay=0.5) - async def _make_get_request_async(self, url: str): + async def _make_get_request_async(self, path: str): + url = self.base_url + path async with self.async_session.get(url) as response: return await response.json() async def get_block_header_async(self, block_id: int | str): - url = f"{self.base_url}/eth/v1/beacon/headers/{block_id}" - return await self._make_get_request_async(url) + path = f"/eth/v1/beacon/headers/{block_id}" + return await self._make_get_request_async(path) async def get_block_async(self, block_id: int | str): - url = f"{self.base_url}/eth/v2/beacon/blocks/{block_id}" - return await self._make_get_request_async(url) + path = f"/eth/v2/beacon/blocks/{block_id}" + return await self._make_get_request_async(path) async def get_validators_async(self, state_id, ids: list[int]): id_str = ','.join([str(i) for i in ids]) - url = f"{self.base_url}/eth/v1/beacon/states/{state_id}/validators?id={id_str}" - return await self._make_get_request_async(url) + path = f"/eth/v1/beacon/states/{state_id}/validators?id={id_str}" + return await self._make_get_request_async(path) - async def get_sync_committee_async(self, epoch): - url = f"{self.base_url}/eth/v1/beacon/states/head/sync_committees?epoch={epoch}" - return await self._make_get_request_async(url) + async def get_sync_committee_async(self, epoch: int): + path = f"/eth/v1/beacon/states/head/sync_committees?epoch={epoch}" + return await self._make_get_request_async(path) + bacon = SuperBacon(cfg["consensus_layer.endpoint"]) From 7f2adb72e4dc43258a88fd5ac646c8e07af50624 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Mon, 2 Mar 2026 21:11:42 +0000 Subject: [PATCH 127/279] fix /lottery --- rocketwatch/plugins/lottery/lottery.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/rocketwatch/plugins/lottery/lottery.py b/rocketwatch/plugins/lottery/lottery.py index 5a44a6d0..b754e505 100644 --- a/rocketwatch/plugins/lottery/lottery.py +++ b/rocketwatch/plugins/lottery/lottery.py @@ -71,7 +71,7 @@ async def get_validators_for_sync_committee_period(self, period): '$lookup': { 'from' : 'minipools', 'localField' : 'validator', - 'foreignField': 'validator', + 'foreignField': 'validator_index', 'as' : 'entry' } }, { @@ -89,7 +89,7 @@ async def get_validators_for_sync_committee_period(self, period): }, { '$project': { '_id' : 0, - 'validator' : 1, + 'validator' : "$validator_index", 'pubkey' : 1, 'node_operator': 1 } From 4ce8ffd78ae025b2333f3fa70c3a46afd7f99679 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Thu, 5 Mar 2026 20:44:32 +0000 Subject: [PATCH 128/279] refactor DB upkeep task --- .../plugins/db_upkeep_task/db_upkeep_task.py | 551 +++++++----------- rocketwatch/utils/rocketpool.py | 1 - rocketwatch/utils/shared_w3.py | 30 +- 3 files changed, 234 insertions(+), 348 deletions(-) diff --git a/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py b/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py index ac083dc1..0187d560 100644 --- a/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py +++ b/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py @@ -1,6 +1,7 @@ import logging import time import asyncio +from collections import defaultdict import pymongo from multicall import Call @@ -23,45 +24,63 @@ log = logging.getLogger("db_upkeep_task") log.setLevel(cfg["log_level"]) +FAR_FUTURE_EPOCH = 2 ** 32 -def safe_to_float(_, num: int): + +def safe_to_float(_, num): try: return solidity.to_float(num) except Exception: return None -def safe_to_hex(_, b: bytes): + +def safe_to_hex(_, b): return f"0x{b.hex()}" if b else None -def safe_state_to_str(_, state: int): + +def safe_state_to_str(_, state): try: return solidity.mp_state_to_str(state) except Exception: return None -def safe_inv(_, num: int): + +def safe_inv(_, num): try: return 1 / solidity.to_float(num) except Exception: return None + def is_true(_, b): return b is True +def _parse_epoch(value): + epoch = int(value) + return epoch if epoch < FAR_FUTURE_EPOCH else None + + +def _group_multicall_results(res): + data = defaultdict(dict) + for (key, field), value in res.items(): + data[key][field] = value + return data + + class DBUpkeepTask(commands.Cog): def __init__(self, bot: RocketWatch): self.bot = bot self.db = AsyncMongoClient(cfg["mongodb.uri"]).rocketwatch self.monitor = Monitor("node-task", api_key=cfg["other.secrets.cronitor"]) - self.batch_size = 50 + self.batch_size = 250 self.bot.loop.create_task(self.loop()) - + async def loop(self): await self.bot.wait_until_ready() await self.check_indexes() while not self.bot.is_closed(): - p_id = time.time() + p_id = time.time() self.monitor.ping(state="run", series=p_id) try: log.debug("starting db upkeep task") @@ -69,7 +88,7 @@ async def loop(self): await self.add_untracked_node_operators() await self.add_static_data_to_node_operators() await self.update_dynamic_node_operator_metadata() - # TODO: update megapool stats if deployed + # TODO: update megapool stats if deployed # minipool tasks await self.add_untracked_minipools() await self.add_static_data_to_minipools() @@ -86,293 +105,55 @@ async def loop(self): finally: await asyncio.sleep(600) - @timerun_async - async def add_untracked_minipools(self): - # rocketMinipoolManager.getMinipoolAt(i) returns the address of the minipool at index i - mm = rp.get_contract_by_name("rocketMinipoolManager") - latest_rp = rp.call("rocketMinipoolManager.getMinipoolCount") - 1 - # get latest _id in minipools collection - latest_db = 0 - if res := await self.db.minipools.find_one(sort=[("_id", pymongo.DESCENDING)]): - latest_db = res["_id"] - # return early if we're up to date - if latest_db >= latest_rp: - log.debug("No new minipools") - return - - log.debug(f"Latest minipool in db: {latest_db}, latest minipool in rp: {latest_rp}") - # batch into self.batch_size minipools at a time, between latest_id and minipool_count - for index_batch in as_chunks(range(latest_db + 1, latest_rp + 1), self.batch_size): - data = await rp.multicall2([ - Call(mm.address, [rp.seth_sig(mm.abi, "getMinipoolAt"), i], [(i, None)]) - for i in index_batch - ]) - log.debug(f"Inserting {len(data)} new minipools into db") - await self.db.minipools.insert_many([ - {"_id": i, "address": a} - for i, a in data.items() - ]) - - log.debug("New minipools inserted") + async def check_indexes(self): + log.debug("checking indexes") + await self.db.minipools.create_index("address") + await self.db.minipools.create_index("pubkey") + await self.db.minipools.create_index("validator_index") + await self.db.node_operators.create_index("address") + log.debug("indexes checked") - @timerun_async - async def add_static_data_to_minipools(self): - m = rp.assemble_contract("rocketMinipool") - mm = rp.get_contract_by_name("rocketMinipoolManager") - lambs = [ - lambda a: (a, rp.seth_sig(m.abi, "getNodeAddress"), [((a, "node_operator"), None)]), - lambda a: (mm.address, [rp.seth_sig(mm.abi, "getMinipoolPubkey"), a], [((a, "pubkey"), safe_to_hex)]), - ] - # get all minipool addresses from db that do not have a node operator assigned - minipool_addresses = await self.db.minipools.distinct("address", {"node_operator": {"$exists": False}}) - # get node operator addresses from rp - # return early if no minipools need to be updated - if not minipool_addresses: - log.debug("No minipools need to be updated with static data") + async def _batch_multicall_update(self, collection, query, lambs, label=None): + addresses = await collection.distinct("address", query) + if not addresses: return - for minipool_batch in as_chunks(minipool_addresses, self.batch_size // len(lambs)): - data = {} + total = len(addresses) + batch_size = self.batch_size // len(lambs) + for i, batch in enumerate(as_chunks(addresses, batch_size)): + if label: + start = i * batch_size + 1 + end = min((i + 1) * batch_size, total) + log.debug(f"Processing {label} [{start}, {end}]/{total}") res = await rp.multicall2( - [Call(*lamb(a)) for a in minipool_batch for lamb in lambs], + [Call(*lamb(a)) for a in batch for lamb in lambs], require_success=False ) - # update data dict with results - for (address, variable_name), value in res.items(): - if address not in data: - data[address] = {} - data[address][variable_name] = value - log.debug(f"Updating {len(data)} minipools with static data") - # update minipools in db - bulk = [ - UpdateOne( - {"address": a}, - {"$set": d}, - ) for a, d in data.items() - ] - await self.db.minipools.bulk_write(bulk, ordered=False) - log.debug("Minipools updated with static data") - - @timerun_async - async def update_dynamic_minipool_metadata(self): - m = rp.assemble_contract("rocketMinipool") - mc = rp.get_contract_by_name("multicall3") - lambs = [ - lambda a: (a, rp.seth_sig(m.abi, "getStatus"), [((a, "status"), safe_state_to_str)]), - lambda a: (a, rp.seth_sig(m.abi, "getStatusTime"), [((a, "status_time"), None)]), - lambda a: (a, rp.seth_sig(m.abi, "getVacant"), [((a, "vacant"), is_true)]), - lambda a: (a, rp.seth_sig(m.abi, "getNodeDepositBalance"), [((a, "node_deposit_balance"), safe_to_float)]), - lambda a: (a, rp.seth_sig(m.abi, "getNodeRefundBalance"), [((a, "node_refund_balance"), safe_to_float)]), - lambda a: (a, rp.seth_sig(m.abi, "getPreMigrationBalance"), [((a, "pre_migration_balance"), safe_to_float)]), - lambda a: (a, rp.seth_sig(m.abi, "getNodeFee"), [((a, "node_fee"), safe_to_float)]), - lambda a: (a, rp.seth_sig(m.abi, "getEffectiveDelegate"), [((a, "effective_delegate"), None)]), - lambda a: (a, rp.seth_sig(m.abi, "getUseLatestDelegate"), [((a, "use_latest_delegate"), None)]), - lambda a: (a, rp.seth_sig(m.abi, "getUserDistributed"), [((a, "user_distributed"), None)]), - lambda a: (mc.address, [rp.seth_sig(mc.abi, "getEthBalance"), a], [((a, "execution_balance"), safe_to_float)]) - ] - # get all minipool addresses from db - minipool_addresses = await self.db.minipools.distinct("address", {"finalized": {"$ne": True}}) - for minipool_batch in as_chunks(minipool_addresses, self.batch_size // len(lambs)): - res = await rp.multicall2( - [Call(*lamb(a)) for a in minipool_batch for lamb in lambs], - require_success=False + data = _group_multicall_results(res) + await collection.bulk_write( + [UpdateOne({"address": addr}, {"$set": d}) for addr, d in data.items()], + ordered=False ) - # update data dict with results - data = {} - for (address, variable_name), value in res.items(): - if address not in data: - data[address] = {} - data[address][variable_name] = value - # update minipools in db - log.debug(f"Updating {len(res)} minipool attributes in db") - bulk = [ - UpdateOne( - {"address": a}, - {"$set": d} - ) for a, d in data.items() - ] - await self.db.minipools.bulk_write(bulk, ordered=False) - - log.debug("Minipools updated with metadata") - - @timerun - async def add_static_deposit_data_to_minipools(self): - # get all minipool addresses and their status time from db that : - # - do not have a deposit_amount - # - are in the initialized state - # sort by status time - minipools = await self.db.minipools.find( - {"deposit_amount": {"$exists": False}, "status": "initialised"}, - {"address": 1, "_id": 0, "status_time": 1} - ).sort("status_time", pymongo.ASCENDING).to_list() - # return early if no minipools need to be updated - if not minipools: - log.debug("No minipools need to be updated with static deposit data") - return - nd = rp.get_contract_by_name("rocketNodeDeposit") - mm = rp.get_contract_by_name("rocketMinipoolManager") - - for minipool_batch in as_chunks(minipools, self.batch_size): - # turn status time of first and last minipool into blocks - block_start = ts_to_block(minipool_batch[0]["status_time"]) - 1 - block_end = ts_to_block(minipool_batch[-1]["status_time"]) + 1 - a = [m["address"] for m in minipool_batch] - - f_deposits = get_logs(nd.events.DepositReceived, block_start, block_end) - f_creations = get_logs(mm.events.MinipoolCreated, block_start, block_end) - events = f_deposits + f_creations - - events = sorted(events, key=lambda x: (x['blockNumber'], x['transactionIndex'], x['logIndex'] *1e-8), reverse=True) - # map to pairs of 2 - prepared_events = [] - last_addition_is_creation = False - - while events: - # get event - e = events.pop(0) - if e["event"] == "MinipoolCreated": - if not last_addition_is_creation: - prepared_events.append([e]) - else: - prepared_events[-1] = [e] - log.info(f"replacing creation even with newly found one ({prepared_events[-1]})") - elif e["event"] == "DepositReceived" and last_addition_is_creation: - prepared_events[-1].insert(0, e) - last_addition_is_creation = e["event"] == "MinipoolCreated" - - data = {} - for e in prepared_events: - assert "amount" in e[0]["args"] - assert "minipool" in e[1]["args"] - # assert that the txn hashes match - assert e[0]["transactionHash"] == e[1]["transactionHash"] - mp = str(e[1]["args"]["minipool"]).lower() - if mp not in a: - continue - amount = solidity.to_float(e[0]["args"]["amount"]) - data[mp] = {"deposit_amount": amount} - - if not data: - log.debug("No minipools need to be updated with static deposit data") - continue - - log.debug(f"Updating {len(data)} minipools with static deposit data") - # update minipools in db - bulk = [ - UpdateOne( - {"address": a}, - {"$set": d}, - ) for a, d in data.items() - ] - await self.db.minipools.bulk_write(bulk, ordered=False) - - log.debug("Minipools updated with static deposit data") - - @timerun - async def add_static_beacon_data_to_minipools(self): - # get all public keys from db where no validator_index is set - public_keys = await self.db.minipools.distinct("pubkey", {"validator_index": {"$exists": False}}) - # return early if no minipools need to be updated - if not public_keys: - log.debug("No minipools need to be updated with static beacon data") - return - - # we need to do smaller bulks as the pubkey is quite long and we dont want to make the query url too long - for pubkey_batch in as_chunks(public_keys, self.batch_size): - data = {} - # get beacon data for public keys - beacon_data = (await bacon.get_validators_async("head", ids=pubkey_batch))["data"] - # update data dict with results - for d in beacon_data: - data[d["validator"]["pubkey"]] = int(d["index"]) - - log.debug(f"Updating {len(data)} minipools with static beacon data") - # update minipools in db - bulk = [ - UpdateMany( - {"pubkey": a}, - {"$set": {"validator_index": d}} - ) for a, d in data.items() - ] - await self.db.minipools.bulk_write(bulk, ordered=False) - - log.debug("Minipools updated with static beacon data") - @timerun - async def update_dynamic_minipool_beacon_metadata(self): - # basically same ordeal as above, but we use the validator index to get the data to improve performance - # get all validator indexes from db - validator_indexes = await self.db.minipools.distinct("validator_index", {"beacon.status": {"$ne": "withdrawal_done"}}) - # remove None values - validator_indexes = [i for i in validator_indexes if i is not None] - for index_batch in as_chunks(validator_indexes, self.batch_size): - data = {} - # get beacon data for public keys - beacon_data = (await bacon.get_validators_async("head", ids=index_batch))["data"] - # update data dict with results - for d in beacon_data: - data[int(d["index"])] = { - "beacon": { - "status" : d["status"], - "balance" : solidity.to_float(d["balance"], 9), - "effective_balance" : solidity.to_float(d["validator"]["effective_balance"], 9), - "slashed" : d["validator"]["slashed"], - "activation_eligibility_epoch": int(d["validator"]["activation_eligibility_epoch"]) if int( - d["validator"]["activation_eligibility_epoch"]) < 2 ** 32 else None, - "activation_epoch" : int(d["validator"]["activation_epoch"]) if int( - d["validator"]["activation_epoch"]) < 2 ** 32 else None, - "exit_epoch" : int(d["validator"]["exit_epoch"]) if int( - d["validator"]["exit_epoch"]) < 2 ** 32 else None, - "withdrawable_epoch" : int(d["validator"]["withdrawable_epoch"]) if int( - d["validator"]["withdrawable_epoch"]) < 2 ** 32 else None, - }} - - log.debug(f"Updating {len(data)} minipools with dynamic beacon data") - # update minipools in db - bulk = [ - UpdateMany( - {"validator_index": a}, - {"$set": d} - ) for a, d in data.items() - ] - await self.db.minipools.bulk_write(bulk, ordered=False) - - log.debug("Minipools updated with dynamic beacon data") - - async def check_indexes(self): - log.debug("checking indexes") - await self.db.minipools.create_index("address") - await self.db.minipools.create_index("pubkey") - await self.db.minipools.create_index("validator_index") - await self.db.node_operators.create_index("address") - log.debug("indexes checked") + # -- Node operator tasks -- @timerun_async async def add_untracked_node_operators(self): - # rocketNodeManager.getNodeCount(i) returns the address of the node at index i nm = rp.get_contract_by_name("rocketNodeManager") latest_rp = rp.call("rocketNodeManager.getNodeCount") - 1 - # get latest _id in node_operators collection latest_db = 0 if res := await self.db.node_operators.find_one(sort=[("_id", pymongo.DESCENDING)]): latest_db = res["_id"] - data = {} - # return early if we're up to date - if latest_db == latest_rp: + if latest_db >= latest_rp: log.debug("No new nodes") return - # batch into self.batch_size nodes at a time, between latest_id and latest_rp + data = {} for index_batch in as_chunks(range(latest_db + 1, latest_rp + 1), self.batch_size): data |= await rp.multicall2([ Call(nm.address, [rp.seth_sig(nm.abi, "getNodeAt"), i], [(i, None)]) for i in index_batch ]) - log.debug(f"Inserting {len(data)} new nodes into db") - await self.db.node_operators.insert_many([ - {"_id": i, "address": a} - for i, a in data.items() - ]) - log.debug("New nodes inserted") + await self.db.node_operators.insert_many([{"_id": i, "address": a} for i, a in data.items()]) @timerun_async async def add_static_data_to_node_operators(self): @@ -382,42 +163,11 @@ async def add_static_data_to_node_operators(self): lambda a: (df.address, [rp.seth_sig(df.abi, "getProxyAddress"), a], [((a, "fee_distributor.address"), None)]), lambda a: (mf.address, [rp.seth_sig(mf.abi, "getExpectedAddress"), a], [((a, "megapool.address"), None)]), ] - # get all minipool addresses from db that do not have a node operator assigned - node_addresses = await self.db.node_operators.distinct( - "address", - {"$or": [ - {"fee_distributor.address": {"$exists": False}}, - {"megapool.address": {"$exists": False} - }]} + await self._batch_multicall_update( + self.db.node_operators, + {"$or": [{"fee_distributor.address": {"$exists": False}}, {"megapool.address": {"$exists": False}}]}, + lambs ) - # get node operator addresses from rp - # return early if no minipools need to be updated - if not node_addresses: - log.debug("No node operators need to be updated with static data") - return - - for node_batch in as_chunks(node_addresses, self.batch_size // len(lambs)): - data = {} - res = await rp.multicall2( - [Call(*lamb(a)) for a in node_batch for lamb in lambs], - require_success=False - ) - # update data dict with results - for (address, variable_name), value in res.items(): - if address not in data: - data[address] = {} - data[address][variable_name] = value - log.debug(f"Updating {len(data)} node operators with static data") - # update minipools in db - bulk = [ - UpdateOne( - {"address": a}, - {"$set": d}, - ) for a, d in data.items() - ] - await self.db.node_operators.bulk_write(bulk, ordered=False) - - log.debug("Node operators updated with static data") @timerun_async async def update_dynamic_node_operator_metadata(self): @@ -441,9 +191,9 @@ async def update_dynamic_node_operator_metadata(self): lambda n: (mm.address, [rp.seth_sig(mm.abi, "getNodeStakingMinipoolCount"), n["address"]], [((n["address"], "staking_minipool_count"), None)]), lambda n: (nd.address, [rp.seth_sig(nd.abi, "getNodeDepositCredit"), n["address"]], - [((n["address"], "node_credit"), safe_to_float)]), + [((n["address"], "node_credit"), safe_to_float)]), lambda n: (nd.address, [rp.seth_sig(nd.abi, "getNodeEthBalance"), n["address"]], - [((n["address"], "node_eth_balance"), safe_to_float)]), + [((n["address"], "node_eth_balance"), safe_to_float)]), lambda n: (nm.address, [rp.seth_sig(nm.abi, "getFeeDistributorInitialised"), n["address"]], [((n["address"], "fee_distributor.initialized"), None)]), lambda n: (mc.address, [rp.seth_sig(mc.abi, "getEthBalance"), n["fee_distributor"]["address"]], @@ -467,30 +217,171 @@ async def update_dynamic_node_operator_metadata(self): lambda n: (ns.address, [rp.seth_sig(ns.abi, "getNodeLastUnstakeTime"), n["address"]], [((n["address"], "rpl.last_unstake_time"), None)]) ] - # get all node operators from db, but we only care about the address and the fee_distributor.address - nodes = await self.db.node_operators.find({}, {"address": 1, "fee_distributor.address": 1, "megapool.address": 1}).to_list() - for node_batch in as_chunks(nodes, self.batch_size // len(lambs)): - data = {} + nodes = await self.db.node_operators.find( + {}, {"address": 1, "fee_distributor.address": 1, "megapool.address": 1} + ).to_list() + total = len(nodes) + batch_size = self.batch_size // len(lambs) + for i, node_batch in enumerate(as_chunks(nodes, batch_size)): + start = i * batch_size + 1 + end = min((i + 1) * batch_size, total) + log.debug(f"Processing node operators [{start}, {end}]/{total}") res = await rp.multicall2( - [Call(*lamb(n)) for n in node_batch for lamb in lambs], + [Call(*lamb(n)) for n in node_batch for lamb in lambs], require_success=False ) - # update data dict with results - for (address, variable_name), value in res.items(): - if address not in data: - data[address] = {} - data[address][variable_name] = value - log.debug(f"Updating {len(res)} node operator attributes in db") - # update minipools in db - bulk = [ - UpdateOne( - {"address": a}, - {"$set": d} - ) for a, d in data.items() - ] - await self.db.node_operators.bulk_write(bulk, ordered=False) - - log.debug("Node operators updated with metadata") + data = _group_multicall_results(res) + await self.db.node_operators.bulk_write( + [UpdateOne({"address": addr}, {"$set": d}) for addr, d in data.items()], + ordered=False + ) + + # -- Minipool tasks -- + + @timerun_async + async def add_untracked_minipools(self): + mm = rp.get_contract_by_name("rocketMinipoolManager") + latest_rp = rp.call("rocketMinipoolManager.getMinipoolCount") - 1 + latest_db = 0 + if res := await self.db.minipools.find_one(sort=[("_id", pymongo.DESCENDING)]): + latest_db = res["_id"] + if latest_db >= latest_rp: + log.debug("No new minipools") + return + log.debug(f"Latest minipool in db: {latest_db}, latest minipool in rp: {latest_rp}") + for index_batch in as_chunks(range(latest_db + 1, latest_rp + 1), self.batch_size): + data = await rp.multicall2([ + Call(mm.address, [rp.seth_sig(mm.abi, "getMinipoolAt"), i], [(i, None)]) + for i in index_batch + ]) + await self.db.minipools.insert_many([{"_id": i, "address": a} for i, a in data.items()]) + + @timerun_async + async def add_static_data_to_minipools(self): + m = rp.assemble_contract("rocketMinipool") + mm = rp.get_contract_by_name("rocketMinipoolManager") + lambs = [ + lambda a: (a, rp.seth_sig(m.abi, "getNodeAddress"), [((a, "node_operator"), None)]), + lambda a: (mm.address, [rp.seth_sig(mm.abi, "getMinipoolPubkey"), a], [((a, "pubkey"), safe_to_hex)]), + ] + await self._batch_multicall_update( + self.db.minipools, + {"node_operator": {"$exists": False}}, + lambs + ) + + @timerun + async def add_static_deposit_data_to_minipools(self): + minipools = await self.db.minipools.find( + {"deposit_amount": {"$exists": False}, "status": "initialised"}, + {"address": 1, "_id": 0, "status_time": 1} + ).sort("status_time", pymongo.ASCENDING).to_list() + if not minipools: + return + nd = rp.get_contract_by_name("rocketNodeDeposit") + mm = rp.get_contract_by_name("rocketMinipoolManager") + + for minipool_batch in as_chunks(minipools, self.batch_size): + block_start = ts_to_block(minipool_batch[0]["status_time"]) - 1 + block_end = ts_to_block(minipool_batch[-1]["status_time"]) + 1 + log.debug(f"Processing deposit data for blocks {block_start}..{block_end}") + addresses = {m["address"] for m in minipool_batch} + + events = get_logs(nd.events.DepositReceived, block_start, block_end) \ + + get_logs(mm.events.MinipoolCreated, block_start, block_end) + events.sort(key=lambda e: (e['blockNumber'], e['transactionIndex'], e['logIndex']), reverse=True) + + # pair DepositReceived + MinipoolCreated events from same transaction + pairs = [] + last_is_creation = False + for e in events: + if e["event"] == "MinipoolCreated": + if not last_is_creation: + pairs.append([e]) + else: + pairs[-1] = [e] + log.info(f"replacing creation event with newly found one ({pairs[-1]})") + elif e["event"] == "DepositReceived" and last_is_creation: + pairs[-1].insert(0, e) + last_is_creation = e["event"] == "MinipoolCreated" + + data = {} + for pair in pairs: + assert "amount" in pair[0]["args"] + assert "minipool" in pair[1]["args"] + assert pair[0]["transactionHash"] == pair[1]["transactionHash"] + mp = str(pair[1]["args"]["minipool"]).lower() + if mp in addresses: + data[mp] = {"deposit_amount": solidity.to_float(pair[0]["args"]["amount"])} + + if not data: + continue + await self.db.minipools.bulk_write( + [UpdateOne({"address": addr}, {"$set": d}) for addr, d in data.items()], + ordered=False + ) + + @timerun + async def add_static_beacon_data_to_minipools(self): + public_keys = await self.db.minipools.distinct("pubkey", {"validator_index": {"$exists": False}}) + if not public_keys: + return + for pubkey_batch in as_chunks(public_keys, self.batch_size): + beacon_data = (await bacon.get_validators_async("head", ids=pubkey_batch))["data"] + data = {d["validator"]["pubkey"]: int(d["index"]) for d in beacon_data} + await self.db.minipools.bulk_write( + [UpdateMany({"pubkey": pk}, {"$set": {"validator_index": idx}}) for pk, idx in data.items()], + ordered=False + ) + + @timerun_async + async def update_dynamic_minipool_metadata(self): + m = rp.assemble_contract("rocketMinipool") + mc = rp.get_contract_by_name("multicall3") + lambs = [ + lambda a: (a, rp.seth_sig(m.abi, "getStatus"), [((a, "status"), safe_state_to_str)]), + lambda a: (a, rp.seth_sig(m.abi, "getStatusTime"), [((a, "status_time"), None)]), + lambda a: (a, rp.seth_sig(m.abi, "getVacant"), [((a, "vacant"), is_true)]), + lambda a: (a, rp.seth_sig(m.abi, "getNodeDepositBalance"), [((a, "node_deposit_balance"), safe_to_float)]), + lambda a: (a, rp.seth_sig(m.abi, "getNodeRefundBalance"), [((a, "node_refund_balance"), safe_to_float)]), + lambda a: (a, rp.seth_sig(m.abi, "getPreMigrationBalance"), [((a, "pre_migration_balance"), safe_to_float)]), + lambda a: (a, rp.seth_sig(m.abi, "getNodeFee"), [((a, "node_fee"), safe_to_float)]), + lambda a: (a, rp.seth_sig(m.abi, "getEffectiveDelegate"), [((a, "effective_delegate"), None)]), + lambda a: (a, rp.seth_sig(m.abi, "getUseLatestDelegate"), [((a, "use_latest_delegate"), None)]), + lambda a: (a, rp.seth_sig(m.abi, "getUserDistributed"), [((a, "user_distributed"), None)]), + lambda a: (mc.address, [rp.seth_sig(mc.abi, "getEthBalance"), a], [((a, "execution_balance"), safe_to_float)]) + ] + await self._batch_multicall_update(self.db.minipools, {"finalized": {"$ne": True}}, lambs, label="minipools") + + @timerun + async def update_dynamic_minipool_beacon_metadata(self): + validator_indexes = await self.db.minipools.distinct( + "validator_index", {"beacon.status": {"$ne": "withdrawal_done"}} + ) + validator_indexes = [i for i in validator_indexes if i is not None] + total = len(validator_indexes) + for i, index_batch in enumerate(as_chunks(validator_indexes, self.batch_size)): + start = i * self.batch_size + 1 + end = min((i + 1) * self.batch_size, total) + log.info(f"Updating beacon chain data for minipools [{start}, {end}]/{total}") + beacon_data = (await bacon.get_validators_async("head", ids=index_batch))["data"] + data = {} + for d in beacon_data: + v = d["validator"] + data[int(d["index"])] = {"beacon": { + "status": d["status"], + "balance": solidity.to_float(d["balance"], 9), + "effective_balance": solidity.to_float(v["effective_balance"], 9), + "slashed": v["slashed"], + "activation_eligibility_epoch": _parse_epoch(v["activation_eligibility_epoch"]), + "activation_epoch": _parse_epoch(v["activation_epoch"]), + "exit_epoch": _parse_epoch(v["exit_epoch"]), + "withdrawable_epoch": _parse_epoch(v["withdrawable_epoch"]), + }} + await self.db.minipools.bulk_write( + [UpdateMany({"validator_index": idx}, {"$set": d}) for idx, d in data.items()], + ordered=False + ) async def setup(self): diff --git a/rocketwatch/utils/rocketpool.py b/rocketwatch/utils/rocketpool.py index 1a1763b4..21d34acf 100644 --- a/rocketwatch/utils/rocketpool.py +++ b/rocketwatch/utils/rocketpool.py @@ -90,7 +90,6 @@ def seth_sig(abi, function_name): return f"{function_name}({inputs})({outputs})" raise Exception(f"Function {function_name} not found in ABI") - @timerun_async async def multicall2(self, calls: list[Call], require_success=True): return await Multicall(calls, _w3=w3, gas_limit=50_000_000, require_success=require_success) diff --git a/rocketwatch/utils/shared_w3.py b/rocketwatch/utils/shared_w3.py index aa5c837a..92927cc6 100644 --- a/rocketwatch/utils/shared_w3.py +++ b/rocketwatch/utils/shared_w3.py @@ -3,7 +3,6 @@ import aiohttp from web3.beacon import Beacon as Bacon from aiohttp.web import HTTPError -from eth_typing import BlockIdentifier from web3 import Web3, HTTPProvider from web3.middleware import geth_poa_middleware @@ -27,31 +26,28 @@ class SuperBacon(Bacon): def __init__(self, base_url: str) -> None: super().__init__(base_url) - timeout = aiohttp.ClientTimeout(sock_connect=3.05, total=20) - self.async_session = aiohttp.ClientSession(raise_for_status=True, timeout=timeout) - + self.async_session = aiohttp.ClientSession( + raise_for_status=True, + timeout=aiohttp.ClientTimeout(sock_connect=3.05, total=20) + ) + @retry_async(tries=3, exceptions=HTTPError, delay=0.5) async def _make_get_request_async(self, path: str): - url = self.base_url + path - async with self.async_session.get(url) as response: + async with self.async_session.get(self.base_url + path) as response: return await response.json() - + async def get_block_header_async(self, block_id: int | str): - path = f"/eth/v1/beacon/headers/{block_id}" - return await self._make_get_request_async(path) + return await self._make_get_request_async(f"/eth/v1/beacon/headers/{block_id}") async def get_block_async(self, block_id: int | str): - path = f"/eth/v2/beacon/blocks/{block_id}" - return await self._make_get_request_async(path) + return await self._make_get_request_async(f"/eth/v2/beacon/blocks/{block_id}") async def get_validators_async(self, state_id, ids: list[int]): - id_str = ','.join([str(i) for i in ids]) - path = f"/eth/v1/beacon/states/{state_id}/validators?id={id_str}" - return await self._make_get_request_async(path) - + id_str = ','.join(map(str, ids)) + return await self._make_get_request_async(f"/eth/v1/beacon/states/{state_id}/validators?id={id_str}") + async def get_sync_committee_async(self, epoch: int): - path = f"/eth/v1/beacon/states/head/sync_committees?epoch={epoch}" - return await self._make_get_request_async(path) + return await self._make_get_request_async(f"/eth/v1/beacon/states/head/sync_committees?epoch={epoch}") bacon = SuperBacon(cfg["consensus_layer.endpoint"]) From e8b4442695111746599b12a1ea3dd30334d5fe70 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Thu, 5 Mar 2026 21:57:29 +0000 Subject: [PATCH 129/279] remove web3-multicall --- rocketwatch/plugins/collateral/collateral.py | 12 +- .../plugins/constellation/constellation.py | 178 ------------------ rocketwatch/plugins/dao/dao.py | 8 +- .../plugins/db_upkeep_task/db_upkeep_task.py | 125 ++++++------ .../plugins/deposit_pool/deposit_pool.py | 42 ++--- rocketwatch/requirements.txt | 1 - rocketwatch/utils/dao.py | 137 +++++++------- rocketwatch/utils/liquidity.py | 15 +- rocketwatch/utils/rocketpool.py | 64 +++++-- rocketwatch/utils/sea_creatures.py | 19 +- 10 files changed, 219 insertions(+), 382 deletions(-) delete mode 100644 rocketwatch/plugins/constellation/constellation.py diff --git a/rocketwatch/plugins/collateral/collateral.py b/rocketwatch/plugins/collateral/collateral.py index 7f42b437..bef07b63 100644 --- a/rocketwatch/plugins/collateral/collateral.py +++ b/rocketwatch/plugins/collateral/collateral.py @@ -51,15 +51,15 @@ def get_node_minipools_and_collateral() -> dict[ChecksumAddress, dict[str, int]] nodes = rp.call("rocketNodeManager.getNodeAddresses", 0, 10_000) for node_batch in as_chunks(nodes, 500): - eb16s += [r.results[0] for r in rp.multicall.aggregate( + eb16s += rp.multicall_sync([ minipool_manager.functions.getNodeStakingMinipoolCountBySize(node, 16 * 10**18) for node in node_batch - ).results] - eb8s += [r.results[0] for r in rp.multicall.aggregate( + ]) + eb8s += rp.multicall_sync([ minipool_manager.functions.getNodeStakingMinipoolCountBySize(node, 8 * 10**18) for node in node_batch - ).results] - rpl_stakes += [r.results[0] for r in rp.multicall.aggregate( + ]) + rpl_stakes += rp.multicall_sync([ node_staking.functions.getNodeStakedRPL(node) for node in node_batch - ).results] + ]) return { nodes[i]: { diff --git a/rocketwatch/plugins/constellation/constellation.py b/rocketwatch/plugins/constellation/constellation.py deleted file mode 100644 index 76fbf733..00000000 --- a/rocketwatch/plugins/constellation/constellation.py +++ /dev/null @@ -1,178 +0,0 @@ -import logging -import math - -from discord import Interaction -from discord.app_commands import command -from discord.ext.commands import Cog -from pymongo import AsyncMongoClient - -from rocketwatch import RocketWatch -from utils import solidity -from utils.cfg import cfg -from utils.shared_w3 import w3 -from utils.rocketpool import rp -from utils.visibility import is_hidden_weak -from utils.embeds import Embed, el_explorer_url -from utils.event_logs import get_logs - - -cog_id = "constellation" -log = logging.getLogger(cog_id) -log.setLevel(cfg["log_level"]) - - -class Constellation(Cog): - def __init__(self, bot: RocketWatch): - self.bot = bot - self.db = AsyncMongoClient(cfg["mongodb.uri"]).rocketwatch - - async def _fetch_num_operators(self) -> int: - whitelist_contract = rp.get_contract_by_name("Constellation.Whitelist") - - if db_entry := (await self.db.last_checked_block.find_one({"_id": cog_id})): - last_checked_block = db_entry["block"] - num_operators = db_entry["operators"] - else: - last_checked_block = 20946650 # contract deployment - num_operators = 0 - - b_from = last_checked_block + 1 - b_to = w3.eth.get_block_number() - - num_operators += len(get_logs(whitelist_contract.events.OperatorAdded, b_from, b_to)) - num_operators -= len(get_logs(whitelist_contract.events.OperatorRemoved, b_from, b_to)) - for event_log in get_logs(whitelist_contract.events.OperatorsAdded, b_from, b_to): - num_operators += len(event_log.args.operators) - for event_log in get_logs(whitelist_contract.events.OperatorsRemoved, b_from, b_to): - num_operators -= len(event_log.args.operators) - - await self.db.last_checked_block.replace_one( - {"_id": cog_id}, - {"_id": cog_id, "block": b_to, "operators": num_operators}, - upsert=True - ) - - return num_operators - - @command() - async def constellation(self, interaction: Interaction): - """ - Summary of Gravita Constellation protocol stats. - """ - await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) - - supernode_contract = rp.get_contract_by_name("Constellation.SuperNodeAccount") - distributor_contract = rp.get_contract_by_name("Constellation.OperatorDistributor") - info_calls: dict[str, int] = { - res.function_name: res.results[0] for res in rp.multicall.aggregate([ - supernode_contract.functions.getNumMinipools(), - supernode_contract.functions.getEthStaked(), - supernode_contract.functions.getEthMatched(), - supernode_contract.functions.getRplStaked(), - supernode_contract.functions.bond(), - supernode_contract.functions.maxValidators(), - distributor_contract.functions.getTvlEth(), - distributor_contract.functions.getTvlRpl(), - distributor_contract.functions.minimumStakeRatio() - ]).results - } - - num_minipools: int = info_calls["getNumMinipools"] - eth_staked: int = solidity.to_int(info_calls["getEthStaked"]) - eth_matched: int = solidity.to_int(info_calls["getEthMatched"]) - rpl_staked: float = solidity.to_float(info_calls["getRplStaked"]) - eth_bond: int = solidity.to_int(info_calls["bond"]) - max_validators: int = info_calls["maxValidators"] - - # update operator count - num_operators: int = await self._fetch_num_operators() - - vault_address_eth: str = rp.get_address_by_name("Constellation.ETHVault") - vault_balance_eth = rp.call("WETH.balanceOf", vault_address_eth) - tvl_eth: float = solidity.to_float(info_calls["getTvlEth"] + vault_balance_eth) - - vault_address_rpl: str = rp.get_address_by_name("Constellation.RPLVault") - vault_balance_rpl = rp.call("rocketTokenRPL.balanceOf", vault_address_rpl) - tvl_rpl: float = solidity.to_float(info_calls["getTvlRpl"] + vault_balance_rpl) - - min_rpl_stake_ratio: float = solidity.to_float(info_calls["minimumStakeRatio"]) - rpl_ratio: float = solidity.to_float(rp.call("rocketNetworkPrices.getRPLPrice")) - rpl_stake_perc: float = (rpl_staked * rpl_ratio / eth_matched) if (eth_matched > 0) else 0.0 - - balance_eth: float = solidity.to_float(w3.eth.getBalance(distributor_contract.address)) - balance_rpl: float = solidity.to_float(rp.call("rocketTokenRPL.balanceOf", distributor_contract.address)) - - # number of new minipools that can be created with available liquidity - if min_rpl_stake_ratio > 0: - max_eth_matched: float = (rpl_staked + balance_rpl) * rpl_ratio / min_rpl_stake_ratio - max_minipools_rpl: float = (max_eth_matched - eth_matched) // (32 - eth_bond) - else: - max_minipools_rpl: float = math.inf - - max_minipools_eth: float = balance_eth // eth_bond - max_new_minipools = min(max_minipools_eth, max_minipools_rpl) - - # break-even time for new minipools - solo_apr: float = 0.033 - deployment_gas: int = 2_250_000 - gas_price_wei: int = w3.eth.gas_price - operator_commission: float = (0.1 + 0.04 * min(1.0, 10 * rpl_stake_perc)) / 2 - daily_income_wei: int = round((32 - eth_bond) * 1e18 * solo_apr * operator_commission / 365) - break_even_days: int = round(deployment_gas * gas_price_wei / daily_income_wei) - - embed = Embed(title="Gravita Constellation") - embed.add_field( - name="Node Address", - value=el_explorer_url(supernode_contract.address, name=" Supernode"), - inline=False - ) - embed.add_field(name="Minipools", value=num_minipools) - embed.add_field(name="Operators", value=num_operators) - embed.add_field(name="MP Limit", value=f"{max_validators} ({max_validators * num_operators:,})") - embed.add_field(name="ETH Stake", value=f"{eth_staked:,}") - embed.add_field(name="RPL Stake", value=f"{rpl_staked:,.2f}") - embed.add_field(name="RPL Bond", value=f"{rpl_stake_perc:.2%}") - - if max_minipools_eth > 0: - balance_status_eth = f"`{max_minipools_eth:,.0f}` pools" - else: - shortfall_eth: float = eth_bond - (balance_eth % eth_bond) - balance_status_eth = f"`-{shortfall_eth:,.2f}`" - - if max_minipools_rpl > 0: - count_fmt: str = "∞" if math.isinf(max_minipools_rpl) else f"{max_minipools_rpl:,.0f}" - balance_status_rpl = f"`{count_fmt}` pools" - else: - new_eth_matched = eth_matched + 32 - eth_bond - new_rpl_required = new_eth_matched * min_rpl_stake_ratio / rpl_ratio - shortfall_rpl: float = new_rpl_required - rpl_staked - balance_rpl - balance_status_rpl = f"`-{shortfall_rpl:,.2f}`" - - if max_new_minipools > 0: - balance_status = f"`{max_new_minipools:,.0f}` new minipool(s) can be created!" - else: - balance_status = "No new minipools can be created." - - embed.add_field( - name="Distributor Balances", - value=( - f"`{balance_eth:,.2f}` ETH ({balance_status_eth})\n" - f"`{balance_rpl:,.2f}` RPL ({balance_status_rpl})\n" - f"{balance_status}" - ), - inline=False - ) - embed.add_field(name="Gas Price", value=f"{(gas_price_wei / 1e9):,.2f} gwei") - embed.add_field(name="Break-Even", value=f"{break_even_days:,} days") - embed.add_field( - name="Protocol TVL", - value=f"{el_explorer_url(vault_address_eth, name=' xrETH')}: `{tvl_eth:,.2f}` ETH\n" - f"{el_explorer_url(vault_address_rpl, name=' xRPL')}: `{tvl_rpl:,.2f}` RPL", - inline=False - ) - - await interaction.followup.send(embed=embed) - - -async def setup(bot): - await bot.add_cog(Constellation(bot)) diff --git a/rocketwatch/plugins/dao/dao.py b/rocketwatch/plugins/dao/dao.py index a46bec42..69faa3ed 100644 --- a/rocketwatch/plugins/dao/dao.py +++ b/rocketwatch/plugins/dao/dao.py @@ -214,11 +214,9 @@ async def _get_recent_proposals(self, interaction: Interaction, current: str) -> else: suggestions = list(range(1, num_proposals + 1))[:-26:-1] - titles: list[str] = [ - res.results[0] for res in rp.multicall.aggregate([ - dao.proposal_contract.functions.getMessage(proposal_id) for proposal_id in suggestions - ]).results - ] + titles: list[str] = rp.multicall_sync([ + dao.proposal_contract.functions.getMessage(proposal_id) for proposal_id in suggestions + ]) return [Choice(name=f"#{pid}: {title}", value=pid) for pid, title in zip(suggestions, titles)] @command() diff --git a/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py b/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py index 0187d560..a3e8aec5 100644 --- a/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py +++ b/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py @@ -4,7 +4,6 @@ from collections import defaultdict import pymongo -from multicall import Call from cronitor import Monitor from pymongo import AsyncMongoClient, UpdateOne, UpdateMany @@ -117,7 +116,7 @@ async def _batch_multicall_update(self, collection, query, lambs, label=None): addresses = await collection.distinct("address", query) if not addresses: return - + total = len(addresses) batch_size = self.batch_size // len(lambs) for i, batch in enumerate(as_chunks(addresses, batch_size)): @@ -125,8 +124,8 @@ async def _batch_multicall_update(self, collection, query, lambs, label=None): start = i * batch_size + 1 end = min((i + 1) * batch_size, total) log.debug(f"Processing {label} [{start}, {end}]/{total}") - res = await rp.multicall2( - [Call(*lamb(a)) for a in batch for lamb in lambs], + res = await rp.multicall( + [lamb(a) for a in batch for lamb in lambs], require_success=False ) data = _group_multicall_results(res) @@ -149,8 +148,8 @@ async def add_untracked_node_operators(self): return data = {} for index_batch in as_chunks(range(latest_db + 1, latest_rp + 1), self.batch_size): - data |= await rp.multicall2([ - Call(nm.address, [rp.seth_sig(nm.abi, "getNodeAt"), i], [(i, None)]) + data |= await rp.multicall([ + rp.build_call(nm, "getNodeAt", i, key=i) for i in index_batch ]) await self.db.node_operators.insert_many([{"_id": i, "address": a} for i, a in data.items()]) @@ -160,8 +159,8 @@ async def add_static_data_to_node_operators(self): df = rp.get_contract_by_name("rocketNodeDistributorFactory") mf = rp.get_contract_by_name("rocketMegapoolFactory") lambs = [ - lambda a: (df.address, [rp.seth_sig(df.abi, "getProxyAddress"), a], [((a, "fee_distributor.address"), None)]), - lambda a: (mf.address, [rp.seth_sig(mf.abi, "getExpectedAddress"), a], [((a, "megapool.address"), None)]), + lambda a: rp.build_call(df, "getProxyAddress", a, key=(a, "fee_distributor.address")), + lambda a: rp.build_call(mf, "getExpectedAddress", a, key=(a, "megapool.address")), ] await self._batch_multicall_update( self.db.node_operators, @@ -178,44 +177,44 @@ async def update_dynamic_node_operator_metadata(self): ns = rp.get_contract_by_name("rocketNodeStaking") mc = rp.get_contract_by_name("multicall3") lambs = [ - lambda n: (nm.address, [rp.seth_sig(nm.abi, "getNodeWithdrawalAddress"), n["address"]], - [((n["address"], "withdrawal_address"), None)]), - lambda n: (nm.address, [rp.seth_sig(nm.abi, "getNodeTimezoneLocation"), n["address"]], - [((n["address"], "timezone_location"), None)]), - lambda n: (nm.address, [rp.seth_sig(nm.abi, "getSmoothingPoolRegistrationState"), n["address"]], - [((n["address"], "smoothing_pool_registration"), None)]), - lambda n: (nm.address, [rp.seth_sig(nm.abi, "getAverageNodeFee"), n["address"]], - [((n["address"], "average_node_fee"), safe_to_float)]), - lambda n: (ns.address, [rp.seth_sig(ns.abi, "getNodeETHCollateralisationRatio"), n["address"]], - [((n["address"], "effective_node_share"), safe_inv)]), - lambda n: (mm.address, [rp.seth_sig(mm.abi, "getNodeStakingMinipoolCount"), n["address"]], - [((n["address"], "staking_minipool_count"), None)]), - lambda n: (nd.address, [rp.seth_sig(nd.abi, "getNodeDepositCredit"), n["address"]], - [((n["address"], "node_credit"), safe_to_float)]), - lambda n: (nd.address, [rp.seth_sig(nd.abi, "getNodeEthBalance"), n["address"]], - [((n["address"], "node_eth_balance"), safe_to_float)]), - lambda n: (nm.address, [rp.seth_sig(nm.abi, "getFeeDistributorInitialised"), n["address"]], - [((n["address"], "fee_distributor.initialized"), None)]), - lambda n: (mc.address, [rp.seth_sig(mc.abi, "getEthBalance"), n["fee_distributor"]["address"]], - [((n["address"], "fee_distributor.eth_balance"), safe_to_float)]), - lambda n: (nm.address, [rp.seth_sig(mf.abi, "getMegapoolDeployed"), n["address"]], - [((n["address"], "megapool.deployed"), is_true)]), - lambda n: (mc.address, [rp.seth_sig(mc.abi, "getEthBalance"), n["megapool"]["address"]], - [((n["address"], "megapool.eth_balance"), safe_to_float)]), - lambda n: (ns.address, [rp.seth_sig(ns.abi, "getNodeStakedRPL"), n["address"]], - [((n["address"], "rpl.total_stake"), safe_to_float)]), - lambda n: (ns.address, [rp.seth_sig(ns.abi, "getNodeLegacyStakedRPL"), n["address"]], - [((n["address"], "rpl.legacy_stake"), safe_to_float)]), - lambda n: (ns.address, [rp.seth_sig(ns.abi, "getNodeMegapoolStakedRPL"), n["address"]], - [((n["address"], "rpl.megapool_stake"), safe_to_float)]), - lambda n: (ns.address, [rp.seth_sig(ns.abi, "getNodeLockedRPL"), n["address"]], - [((n["address"], "rpl.locked"), safe_to_float)]), - lambda n: (ns.address, [rp.seth_sig(ns.abi, "getNodeUnstakingRPL"), n["address"]], - [((n["address"], "rpl.unstaking"), safe_to_float)]), - lambda n: (ns.address, [rp.seth_sig(ns.abi, "getNodeRPLStakedTime"), n["address"]], - [((n["address"], "rpl.last_stake_time"), None)]), - lambda n: (ns.address, [rp.seth_sig(ns.abi, "getNodeLastUnstakeTime"), n["address"]], - [((n["address"], "rpl.last_unstake_time"), None)]) + lambda n: rp.build_call(nm, "getNodeWithdrawalAddress", n["address"], + key=(n["address"], "withdrawal_address")), + lambda n: rp.build_call(nm, "getNodeTimezoneLocation", n["address"], + key=(n["address"], "timezone_location")), + lambda n: rp.build_call(nm, "getSmoothingPoolRegistrationState", n["address"], + key=(n["address"], "smoothing_pool_registration")), + lambda n: rp.build_call(nm, "getAverageNodeFee", n["address"], + key=(n["address"], "average_node_fee"), transform=safe_to_float), + lambda n: rp.build_call(ns, "getNodeETHCollateralisationRatio", n["address"], + key=(n["address"], "effective_node_share"), transform=safe_inv), + lambda n: rp.build_call(mm, "getNodeStakingMinipoolCount", n["address"], + key=(n["address"], "staking_minipool_count")), + lambda n: rp.build_call(nd, "getNodeDepositCredit", n["address"], + key=(n["address"], "node_credit"), transform=safe_to_float), + lambda n: rp.build_call(nd, "getNodeEthBalance", n["address"], + key=(n["address"], "node_eth_balance"), transform=safe_to_float), + lambda n: rp.build_call(nm, "getFeeDistributorInitialised", n["address"], + key=(n["address"], "fee_distributor.initialized")), + lambda n: rp.build_call(mc, "getEthBalance", n["fee_distributor"]["address"], + key=(n["address"], "fee_distributor.eth_balance"), transform=safe_to_float), + lambda n: rp.build_call(mf, "getMegapoolDeployed", n["address"], + target=nm.address, key=(n["address"], "megapool.deployed"), transform=is_true), + lambda n: rp.build_call(mc, "getEthBalance", n["megapool"]["address"], + key=(n["address"], "megapool.eth_balance"), transform=safe_to_float), + lambda n: rp.build_call(ns, "getNodeStakedRPL", n["address"], + key=(n["address"], "rpl.total_stake"), transform=safe_to_float), + lambda n: rp.build_call(ns, "getNodeLegacyStakedRPL", n["address"], + key=(n["address"], "rpl.legacy_stake"), transform=safe_to_float), + lambda n: rp.build_call(ns, "getNodeMegapoolStakedRPL", n["address"], + key=(n["address"], "rpl.megapool_stake"), transform=safe_to_float), + lambda n: rp.build_call(ns, "getNodeLockedRPL", n["address"], + key=(n["address"], "rpl.locked"), transform=safe_to_float), + lambda n: rp.build_call(ns, "getNodeUnstakingRPL", n["address"], + key=(n["address"], "rpl.unstaking"), transform=safe_to_float), + lambda n: rp.build_call(ns, "getNodeRPLStakedTime", n["address"], + key=(n["address"], "rpl.last_stake_time")), + lambda n: rp.build_call(ns, "getNodeLastUnstakeTime", n["address"], + key=(n["address"], "rpl.last_unstake_time")), ] nodes = await self.db.node_operators.find( {}, {"address": 1, "fee_distributor.address": 1, "megapool.address": 1} @@ -226,8 +225,8 @@ async def update_dynamic_node_operator_metadata(self): start = i * batch_size + 1 end = min((i + 1) * batch_size, total) log.debug(f"Processing node operators [{start}, {end}]/{total}") - res = await rp.multicall2( - [Call(*lamb(n)) for n in node_batch for lamb in lambs], + res = await rp.multicall( + [lamb(n) for n in node_batch for lamb in lambs], require_success=False ) data = _group_multicall_results(res) @@ -250,8 +249,8 @@ async def add_untracked_minipools(self): return log.debug(f"Latest minipool in db: {latest_db}, latest minipool in rp: {latest_rp}") for index_batch in as_chunks(range(latest_db + 1, latest_rp + 1), self.batch_size): - data = await rp.multicall2([ - Call(mm.address, [rp.seth_sig(mm.abi, "getMinipoolAt"), i], [(i, None)]) + data = await rp.multicall([ + rp.build_call(mm, "getMinipoolAt", i, key=i) for i in index_batch ]) await self.db.minipools.insert_many([{"_id": i, "address": a} for i, a in data.items()]) @@ -261,8 +260,8 @@ async def add_static_data_to_minipools(self): m = rp.assemble_contract("rocketMinipool") mm = rp.get_contract_by_name("rocketMinipoolManager") lambs = [ - lambda a: (a, rp.seth_sig(m.abi, "getNodeAddress"), [((a, "node_operator"), None)]), - lambda a: (mm.address, [rp.seth_sig(mm.abi, "getMinipoolPubkey"), a], [((a, "pubkey"), safe_to_hex)]), + lambda a: rp.build_call(m, "getNodeAddress", target=a, key=(a, "node_operator")), + lambda a: rp.build_call(mm, "getMinipoolPubkey", a, key=(a, "pubkey"), transform=safe_to_hex), ] await self._batch_multicall_update( self.db.minipools, @@ -339,17 +338,17 @@ async def update_dynamic_minipool_metadata(self): m = rp.assemble_contract("rocketMinipool") mc = rp.get_contract_by_name("multicall3") lambs = [ - lambda a: (a, rp.seth_sig(m.abi, "getStatus"), [((a, "status"), safe_state_to_str)]), - lambda a: (a, rp.seth_sig(m.abi, "getStatusTime"), [((a, "status_time"), None)]), - lambda a: (a, rp.seth_sig(m.abi, "getVacant"), [((a, "vacant"), is_true)]), - lambda a: (a, rp.seth_sig(m.abi, "getNodeDepositBalance"), [((a, "node_deposit_balance"), safe_to_float)]), - lambda a: (a, rp.seth_sig(m.abi, "getNodeRefundBalance"), [((a, "node_refund_balance"), safe_to_float)]), - lambda a: (a, rp.seth_sig(m.abi, "getPreMigrationBalance"), [((a, "pre_migration_balance"), safe_to_float)]), - lambda a: (a, rp.seth_sig(m.abi, "getNodeFee"), [((a, "node_fee"), safe_to_float)]), - lambda a: (a, rp.seth_sig(m.abi, "getEffectiveDelegate"), [((a, "effective_delegate"), None)]), - lambda a: (a, rp.seth_sig(m.abi, "getUseLatestDelegate"), [((a, "use_latest_delegate"), None)]), - lambda a: (a, rp.seth_sig(m.abi, "getUserDistributed"), [((a, "user_distributed"), None)]), - lambda a: (mc.address, [rp.seth_sig(mc.abi, "getEthBalance"), a], [((a, "execution_balance"), safe_to_float)]) + lambda a: rp.build_call(m, "getStatus", target=a, key=(a, "status"), transform=safe_state_to_str), + lambda a: rp.build_call(m, "getStatusTime", target=a, key=(a, "status_time")), + lambda a: rp.build_call(m, "getVacant", target=a, key=(a, "vacant"), transform=is_true), + lambda a: rp.build_call(m, "getNodeDepositBalance", target=a, key=(a, "node_deposit_balance"), transform=safe_to_float), + lambda a: rp.build_call(m, "getNodeRefundBalance", target=a, key=(a, "node_refund_balance"), transform=safe_to_float), + lambda a: rp.build_call(m, "getPreMigrationBalance", target=a, key=(a, "pre_migration_balance"), transform=safe_to_float), + lambda a: rp.build_call(m, "getNodeFee", target=a, key=(a, "node_fee"), transform=safe_to_float), + lambda a: rp.build_call(m, "getEffectiveDelegate", target=a, key=(a, "effective_delegate")), + lambda a: rp.build_call(m, "getUseLatestDelegate", target=a, key=(a, "use_latest_delegate")), + lambda a: rp.build_call(m, "getUserDistributed", target=a, key=(a, "user_distributed")), + lambda a: rp.build_call(mc, "getEthBalance", a, key=(a, "execution_balance"), transform=safe_to_float), ] await self._batch_multicall_update(self.db.minipools, {"finalized": {"$ne": True}}, lambs, label="minipools") diff --git a/rocketwatch/plugins/deposit_pool/deposit_pool.py b/rocketwatch/plugins/deposit_pool/deposit_pool.py index 5dee621a..09163105 100644 --- a/rocketwatch/plugins/deposit_pool/deposit_pool.py +++ b/rocketwatch/plugins/deposit_pool/deposit_pool.py @@ -24,17 +24,15 @@ def __init__(self, bot: RocketWatch): @staticmethod def get_deposit_pool_stats() -> Embed: - multicall: dict[str, int] = { - res.function_name: res.results[0] for res in rp.multicall.aggregate([ - rp.get_contract_by_name("rocketDepositPool").functions.getBalance(), - rp.get_contract_by_name("rocketDAOProtocolSettingsDeposit").functions.getMaximumDepositPoolSize(), - rp.get_contract_by_name("rocketDepositPool").functions.getMaximumDepositAmount(), - ]).results - } - - dp_balance = solidity.to_float(multicall["getBalance"]) - deposit_cap = solidity.to_int(multicall["getMaximumDepositPoolSize"]) - free_capacity = solidity.to_float(multicall["getMaximumDepositAmount"]) + balance_raw, max_size_raw, max_amount_raw = rp.multicall_sync([ + rp.get_contract_by_name("rocketDepositPool").functions.getBalance(), + rp.get_contract_by_name("rocketDAOProtocolSettingsDeposit").functions.getMaximumDepositPoolSize(), + rp.get_contract_by_name("rocketDepositPool").functions.getMaximumDepositAmount(), + ]) + + dp_balance = solidity.to_float(balance_raw) + deposit_cap = solidity.to_int(max_size_raw) + free_capacity = solidity.to_float(max_amount_raw) if deposit_cap - dp_balance < 0.01: dp_status = "Capacity reached!" @@ -83,18 +81,16 @@ def get_deposit_pool_stats() -> Embed: @staticmethod def get_contract_collateral_stats() -> Embed: - multicall: dict[str, int] = { - res.function_name: res.results[0] for res in rp.multicall.aggregate([ - rp.get_contract_by_name("rocketTokenRETH").functions.getExchangeRate(), - rp.get_contract_by_name("rocketTokenRETH").functions.totalSupply(), - rp.get_contract_by_name("rocketTokenRETH").functions.getCollateralRate(), - rp.get_contract_by_name("rocketDAOProtocolSettingsNetwork").functions.getTargetRethCollateralRate(), - ]).results - } - - total_eth_in_reth: float = multicall["totalSupply"] * multicall["getExchangeRate"] / 10**36 - collateral_rate: float = solidity.to_float(multicall["getCollateralRate"]) - collateral_rate_target: float = solidity.to_float(multicall["getTargetRethCollateralRate"]) + exchange_rate, total_supply, collateral_rate_raw, target_rate_raw = rp.multicall_sync([ + rp.get_contract_by_name("rocketTokenRETH").functions.getExchangeRate(), + rp.get_contract_by_name("rocketTokenRETH").functions.totalSupply(), + rp.get_contract_by_name("rocketTokenRETH").functions.getCollateralRate(), + rp.get_contract_by_name("rocketDAOProtocolSettingsNetwork").functions.getTargetRethCollateralRate(), + ]) + + total_eth_in_reth: float = total_supply * exchange_rate / 10**36 + collateral_rate: float = solidity.to_float(collateral_rate_raw) + collateral_rate_target: float = solidity.to_float(target_rate_raw) collateral_eth: float = total_eth_in_reth * collateral_rate collateral_target_eth: float = total_eth_in_reth * collateral_rate_target diff --git a/rocketwatch/requirements.txt b/rocketwatch/requirements.txt index 198f8581..cf5a3722 100644 --- a/rocketwatch/requirements.txt +++ b/rocketwatch/requirements.txt @@ -15,7 +15,6 @@ matplotlib==3.7.1 scipy==1.11.3 inflect==7.3.1 wordcloud==1.9.4 -web3-multicall==0.0.7 colorama==0.4.6 seaborn==0.12.2 etherscan_labels @ git+https://github.com/InvisibleSymbol/etherscan-labels@7eb617d715a4dda0eabdd858106a526a3abd3394 diff --git a/rocketwatch/utils/dao.py b/rocketwatch/utils/dao.py index 7c550ac0..90a90ea0 100644 --- a/rocketwatch/utils/dao.py +++ b/rocketwatch/utils/dao.py @@ -108,18 +108,14 @@ class Proposal(DAO.Proposal): def get_proposals_by_state(self) -> dict[ProposalState, list[int]]: num_proposals = self.proposal_contract.functions.getTotal().call() - proposal_dao_names = [ - res.results[0] for res in rp.multicall.aggregate([ - self.proposal_contract.functions.getDAO(proposal_id) for proposal_id in range(1, num_proposals + 1) - ]).results - ] + proposal_dao_names = rp.multicall_sync([ + self.proposal_contract.functions.getDAO(proposal_id) for proposal_id in range(1, num_proposals + 1) + ]) relevant_proposals = [(i+1) for (i, dao_name) in enumerate(proposal_dao_names) if (dao_name == self.contract_name)] - proposal_states = [ - res.results[0] for res in rp.multicall.aggregate([ - self.proposal_contract.functions.getState(proposal_id) for proposal_id in relevant_proposals - ]).results - ] + proposal_states = rp.multicall_sync([ + self.proposal_contract.functions.getState(proposal_id) for proposal_id in relevant_proposals + ]) proposals = {state: [] for state in DefaultDAO.ProposalState} for proposal_id, state in zip(relevant_proposals, proposal_states): @@ -132,33 +128,31 @@ def fetch_proposal(self, proposal_id: int) -> Optional[Proposal]: if not (1 <= proposal_id <= num_proposals): return None - # map results of functions calls to function name - multicall: dict[str, str | bytes | int] = { - res.function_name: res.results[0] for res in rp.multicall.aggregate([ - self.proposal_contract.functions.getProposer(proposal_id), - self.proposal_contract.functions.getMessage(proposal_id), - self.proposal_contract.functions.getPayload(proposal_id), - self.proposal_contract.functions.getCreated(proposal_id), - self.proposal_contract.functions.getStart(proposal_id), - self.proposal_contract.functions.getEnd(proposal_id), - self.proposal_contract.functions.getExpires(proposal_id), - self.proposal_contract.functions.getVotesFor(proposal_id), - self.proposal_contract.functions.getVotesAgainst(proposal_id), - self.proposal_contract.functions.getVotesRequired(proposal_id) - ]).results - } + (proposer, message, payload, created, start, end, expires, + votes_for_raw, votes_against_raw, votes_required_raw) = rp.multicall_sync([ + self.proposal_contract.functions.getProposer(proposal_id), + self.proposal_contract.functions.getMessage(proposal_id), + self.proposal_contract.functions.getPayload(proposal_id), + self.proposal_contract.functions.getCreated(proposal_id), + self.proposal_contract.functions.getStart(proposal_id), + self.proposal_contract.functions.getEnd(proposal_id), + self.proposal_contract.functions.getExpires(proposal_id), + self.proposal_contract.functions.getVotesFor(proposal_id), + self.proposal_contract.functions.getVotesAgainst(proposal_id), + self.proposal_contract.functions.getVotesRequired(proposal_id) + ]) return DefaultDAO.Proposal( id=proposal_id, - proposer=cast(ChecksumAddress, multicall["getProposer"]), - message=multicall["getMessage"], - payload=multicall["getPayload"], - created=multicall["getCreated"], - start=multicall["getStart"], - end=multicall["getEnd"], - expires=multicall["getExpires"], - votes_for=solidity.to_int(multicall["getVotesFor"]), - votes_against=solidity.to_int(multicall["getVotesAgainst"]), - votes_required=solidity.to_float(multicall["getVotesRequired"]) + proposer=cast(ChecksumAddress, proposer), + message=message, + payload=payload, + created=created, + start=start, + end=end, + expires=expires, + votes_for=solidity.to_int(votes_for_raw), + votes_against=solidity.to_int(votes_against_raw), + votes_required=solidity.to_float(votes_required_raw) ) def _build_vote_graph(self, proposal: Proposal) -> str: @@ -223,11 +217,9 @@ def votes_total(self): def get_proposals_by_state(self) -> dict[ProposalState, list[int]]: num_proposals = self.proposal_contract.functions.getTotal().call() - proposal_states = [ - res.results[0] for res in rp.multicall.aggregate([ - self.proposal_contract.functions.getState(proposal_id) for proposal_id in range(1, num_proposals + 1) - ]).results - ] + proposal_states = rp.multicall_sync([ + self.proposal_contract.functions.getState(proposal_id) for proposal_id in range(1, num_proposals + 1) + ]) proposals = {state: [] for state in ProtocolDAO.ProposalState} for proposal_id in range(1, num_proposals + 1): @@ -241,41 +233,40 @@ def fetch_proposal(self, proposal_id: int) -> Optional[Proposal]: if not (1 <= proposal_id <= num_proposals): return None - # map results of functions calls to function name - multicall: dict[str, str | bytes | int] = { - res.function_name: res.results[0] for res in rp.multicall.aggregate([ - self.proposal_contract.functions.getProposer(proposal_id), - self.proposal_contract.functions.getMessage(proposal_id), - self.proposal_contract.functions.getPayload(proposal_id), - self.proposal_contract.functions.getCreated(proposal_id), - self.proposal_contract.functions.getStart(proposal_id), - self.proposal_contract.functions.getPhase1End(proposal_id), - self.proposal_contract.functions.getPhase2End(proposal_id), - self.proposal_contract.functions.getExpires(proposal_id), - self.proposal_contract.functions.getVotingPowerFor(proposal_id), - self.proposal_contract.functions.getVotingPowerAgainst(proposal_id), - self.proposal_contract.functions.getVotingPowerVeto(proposal_id), - self.proposal_contract.functions.getVotingPowerAbstained(proposal_id), - self.proposal_contract.functions.getVotingPowerRequired(proposal_id), - self.proposal_contract.functions.getVetoQuorum(proposal_id) - ]).results - } + (proposer, message, payload, created, start, phase1_end, phase2_end, + expires, vp_for_raw, vp_against_raw, vp_veto_raw, vp_abstain_raw, + vp_required_raw, veto_quorum_raw) = rp.multicall_sync([ + self.proposal_contract.functions.getProposer(proposal_id), + self.proposal_contract.functions.getMessage(proposal_id), + self.proposal_contract.functions.getPayload(proposal_id), + self.proposal_contract.functions.getCreated(proposal_id), + self.proposal_contract.functions.getStart(proposal_id), + self.proposal_contract.functions.getPhase1End(proposal_id), + self.proposal_contract.functions.getPhase2End(proposal_id), + self.proposal_contract.functions.getExpires(proposal_id), + self.proposal_contract.functions.getVotingPowerFor(proposal_id), + self.proposal_contract.functions.getVotingPowerAgainst(proposal_id), + self.proposal_contract.functions.getVotingPowerVeto(proposal_id), + self.proposal_contract.functions.getVotingPowerAbstained(proposal_id), + self.proposal_contract.functions.getVotingPowerRequired(proposal_id), + self.proposal_contract.functions.getVetoQuorum(proposal_id) + ]) return ProtocolDAO.Proposal( id=proposal_id, - proposer=cast(ChecksumAddress, multicall["getProposer"]), - message=multicall["getMessage"], - payload=multicall["getPayload"], - created=multicall["getCreated"], - start=multicall["getStart"], - end_phase_1=multicall["getPhase1End"], - end_phase_2= multicall["getPhase2End"], - expires=multicall["getExpires"], - votes_for=solidity.to_float(multicall["getVotingPowerFor"]), - votes_against=solidity.to_float(multicall["getVotingPowerAgainst"]), - votes_veto=solidity.to_float(multicall["getVotingPowerVeto"]), - votes_abstain=solidity.to_float(multicall["getVotingPowerAbstained"]), - quorum=solidity.to_float(multicall["getVotingPowerRequired"]), - veto_quorum=solidity.to_float(multicall["getVetoQuorum"]) + proposer=cast(ChecksumAddress, proposer), + message=message, + payload=payload, + created=created, + start=start, + end_phase_1=phase1_end, + end_phase_2=phase2_end, + expires=expires, + votes_for=solidity.to_float(vp_for_raw), + votes_against=solidity.to_float(vp_against_raw), + votes_veto=solidity.to_float(vp_veto_raw), + votes_abstain=solidity.to_float(vp_abstain_raw), + quorum=solidity.to_float(vp_required_raw), + veto_quorum=solidity.to_float(veto_quorum_raw) ) def _build_vote_graph(self, proposal: Proposal) -> str: diff --git a/rocketwatch/utils/liquidity.py b/rocketwatch/utils/liquidity.py index ec7ba950..45ea1c14 100644 --- a/rocketwatch/utils/liquidity.py +++ b/rocketwatch/utils/liquidity.py @@ -696,22 +696,17 @@ def tick_to_word_and_bit(self, tick: int) -> tuple[int, int]: return word_position, bit_position def get_ticks_net_liquidity(self, ticks: list[int]) -> dict[int, int]: - return dict(zip(ticks, [ - res.results[1] for res in rp.multicall.aggregate( - [self.contract.functions.ticks(tick) for tick in ticks], - ).results - ])) + results = rp.multicall_sync([self.contract.functions.ticks(tick) for tick in ticks]) + return dict(zip(ticks, [r[1] for r in results])) def get_initialized_ticks(self, current_tick: int) -> list[int]: ticks = [] active_word, b = self.tick_to_word_and_bit(current_tick) word_range = list(range(active_word - 5, active_word + 5)) - bitmaps = [ - res.results[0] for res in rp.multicall.aggregate( - [self.contract.functions.tickBitmap(word) for word in word_range], - ).results - ] + bitmaps = rp.multicall_sync([ + self.contract.functions.tickBitmap(word) for word in word_range + ]) for word, tick_bitmap in zip(word_range, bitmaps): if not tick_bitmap: diff --git a/rocketwatch/utils/rocketpool.py b/rocketwatch/utils/rocketpool.py index 21d34acf..e183410a 100644 --- a/rocketwatch/utils/rocketpool.py +++ b/rocketwatch/utils/rocketpool.py @@ -9,7 +9,6 @@ from multicall import Call, Multicall from multicall.constants import MULTICALL3_ADDRESSES from web3.exceptions import ContractLogicError -from web3_multicall import Multicall as Web3Multicall from utils import solidity from utils.cfg import cfg @@ -31,15 +30,22 @@ class RocketPool: def __init__(self): self.addresses = bidict() - self.multicall = Web3Multicall(w3.eth, MULTICALL3_ADDRESSES[w3.eth.chain_id]) + self._mc3_address = MULTICALL3_ADDRESSES[w3.eth.chain_id] + self._mc3 = None self.flush() + def _get_mc3(self): + if self._mc3 is None: + self._mc3 = self.get_contract_by_name("multicall3") + return self._mc3 + def flush(self): log.warning("FLUSHING RP CACHE") self.CONTRACT_CACHE.clear() self.ABI_CACHE.clear() self.ADDRESS_CACHE.clear() self.addresses.clear() + self._mc3 = None self._init_contract_addresses() def _init_contract_addresses(self) -> None: @@ -47,7 +53,7 @@ def _init_contract_addresses(self) -> None: for name, address in manual_addresses.items(): self.addresses[name] = address - self.addresses["multicall3"] = self.multicall.address + self.addresses["multicall3"] = self._mc3_address log.info("Indexing Rocket Pool contracts...") # generate list of all file names with the .sol extension from the rocketpool submodule @@ -90,7 +96,41 @@ def seth_sig(abi, function_name): return f"{function_name}({inputs})({outputs})" raise Exception(f"Function {function_name} not found in ABI") - async def multicall2(self, calls: list[Call], require_success=True): + @staticmethod + def _fn_to_call(fn, key): + """Convert a web3 ContractFunction to a multicall Call with integer key.""" + sig = RocketPool.seth_sig(fn.contract_abi, fn.function_identifier) + return Call(fn.address, [sig, *fn.args], [(key, None)]) + + @staticmethod + def build_call(abi_source, function_name, *args, target=None, key=None, transform=None): + """Build a multicall Call object. + + Args: + abi_source: Contract object with .abi attribute + function_name: Function name to call + *args: Function arguments + target: Target address (defaults to abi_source.address) + key: Result key (defaults to function_name) + transform: Optional result transform function + """ + abi = abi_source.abi if hasattr(abi_source, 'abi') else abi_source + address = target if target is not None else abi_source.address + sig = RocketPool.seth_sig(abi, function_name) + return Call(address, [sig, *args], [(key if key is not None else function_name, transform)]) + + def multicall_sync(self, calls, require_success=True): + """Sync multicall accepting ContractFunction objects. Returns list of results.""" + mc_calls = [self._fn_to_call(fn, i) for i, fn in enumerate(calls)] + encoded = [(call.target, not require_success, call.data) for call in mc_calls] + results = self._get_mc3().functions.aggregate3(encoded).call() + return [ + Call.decode_output(data, mc_calls[i].signature, success=success) + for i, (success, data) in enumerate(results) + ] + + async def multicall(self, calls: list[Call], require_success=True): + """Async multicall accepting Call objects. Returns dict of keyed results.""" return await Multicall(calls, _w3=w3, gas_limit=50_000_000, require_success=require_success) @cached(cache=ADDRESS_CACHE) @@ -136,19 +176,19 @@ def get_revert_reason(tnx): return "Hidden Error" else: return None - + def get_string(self, key: str) -> str: sha3 = w3.solidity_keccak(["string"], [key]) return self.get_contract_by_name("rocketStorage").functions.getString(sha3).call() - + def get_uint(self, key: str) -> int: sha3 = w3.solidity_keccak(["string"], [key]) return self.get_contract_by_name("rocketStorage").functions.getUint(sha3).call() - + def get_protocol_version(self) -> tuple: version_string = self.get_string("protocol.version") return tuple(map(int, version_string.split("."))) - + @cached(cache=ABI_CACHE) def get_abi_by_name(self, name): return self.uncached_get_abi_by_name(name) @@ -225,16 +265,16 @@ def get_percentage_rpl_swapped(self): value = solidity.to_float(self.call("rocketTokenRPL.totalSwappedRPL")) percentage = (value / 18_000_000) * 100 return round(percentage, 2) - + def is_node(self, address: ChecksumAddress) -> bool: return self.call("rocketNodeManager.getNodeExists", address) - + def is_minipool(self, address: ChecksumAddress) -> bool: return self.call("rocketMinipoolManager.getMinipoolExists", address) - + def is_megapool(self, address: ChecksumAddress) -> bool: sha3 = w3.solidity_keccak(["string", "address"], ["megapool.exists", address]) - return self.get_contract_by_name("rocketStorage").functions.getBool(sha3).call() + return self.get_contract_by_name("rocketStorage").functions.getBool(sha3).call() @ttl_cache(ttl=60) def get_eth_usdc_price(self) -> float: diff --git a/rocketwatch/utils/sea_creatures.py b/rocketwatch/utils/sea_creatures.py index 9fb8f081..4e5793b9 100644 --- a/rocketwatch/utils/sea_creatures.py +++ b/rocketwatch/utils/sea_creatures.py @@ -59,17 +59,14 @@ def get_holding_for_address(address): eth_balance = solidity.to_float(w3.eth.getBalance(address)) # get ERC-20 token balance for this address with contextlib.suppress(Exception): - resp = rp.multicall.aggregate( - rp.get_contract_by_name(name).functions.balanceOf(address) for name in - ["rocketTokenRPL", "rocketTokenRPLFixedSupply", "rocketTokenRETH"] - ) - # add their tokens to their eth balance - for token in resp.results: - contract_name = rp.get_name_by_address(token.contract_address) - if "RPL" in contract_name: - eth_balance += solidity.to_float(token.results[0]) * price_cache["rpl_price"] - if "RETH" in contract_name: - eth_balance += solidity.to_float(token.results[0]) * price_cache["reth_price"] + rpl_balance, rplfs_balance, reth_balance = rp.multicall_sync([ + rp.get_contract_by_name("rocketTokenRPL").functions.balanceOf(address), + rp.get_contract_by_name("rocketTokenRPLFixedSupply").functions.balanceOf(address), + rp.get_contract_by_name("rocketTokenRETH").functions.balanceOf(address), + ]) + eth_balance += solidity.to_float(rpl_balance) * price_cache["rpl_price"] + eth_balance += solidity.to_float(rplfs_balance) * price_cache["rpl_price"] + eth_balance += solidity.to_float(reth_balance) * price_cache["reth_price"] # add eth they provided for minipools eth_balance += solidity.to_float(rp.call("rocketNodeStaking.getNodeETHBonded", address)) # add their staked RPL From 7fb5ca46606b70428c90826b860aa46ea4ef29b0 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Thu, 5 Mar 2026 22:10:05 +0000 Subject: [PATCH 130/279] small fixes --- rocketwatch/plugins/dao/dao.py | 4 ++-- rocketwatch/plugins/governance/governance.py | 13 +++++++------ rocketwatch/utils/dao.py | 18 +++++------------- rocketwatch/utils/rocketpool.py | 14 ++++---------- 4 files changed, 18 insertions(+), 31 deletions(-) diff --git a/rocketwatch/plugins/dao/dao.py b/rocketwatch/plugins/dao/dao.py index 69faa3ed..a7cd8f8e 100644 --- a/rocketwatch/plugins/dao/dao.py +++ b/rocketwatch/plugins/dao/dao.py @@ -40,7 +40,7 @@ def get_dao_votes_embed(dao: DefaultDAO, full: bool) -> Embed: dao.ProposalState.Succeeded: [], } - for state, ids in dao.get_proposals_by_state().items(): + for state, ids in dao.get_proposal_ids_by_state().items(): if state in current_proposals: current_proposals[state].extend([dao.fetch_proposal(pid) for pid in ids]) @@ -78,7 +78,7 @@ def get_pdao_votes_embed(dao: ProtocolDAO, full: bool) -> Embed: dao.ProposalState.Succeeded: [], } - for state, ids in dao.get_proposals_by_state().items(): + for state, ids in dao.get_proposal_ids_by_state().items(): if state in current_proposals: current_proposals[state].extend([dao.fetch_proposal(pid) for pid in ids]) diff --git a/rocketwatch/plugins/governance/governance.py b/rocketwatch/plugins/governance/governance.py index 5ce9bcd3..d4c86132 100644 --- a/rocketwatch/plugins/governance/governance.py +++ b/rocketwatch/plugins/governance/governance.py @@ -24,16 +24,16 @@ class Governance(StatusPlugin): @staticmethod def _get_active_pdao_proposals(dao: ProtocolDAO) -> list[ProtocolDAO.Proposal]: - proposals = dao.get_proposals_by_state() + proposal_ids = dao.get_proposal_ids_by_state() active_proposal_ids = [] - active_proposal_ids += proposals[dao.ProposalState.ActivePhase1] - active_proposal_ids += proposals[dao.ProposalState.ActivePhase2] + active_proposal_ids += proposal_ids[dao.ProposalState.ActivePhase1] + active_proposal_ids += proposal_ids[dao.ProposalState.ActivePhase2] return [dao.fetch_proposal(proposal_id) for proposal_id in reversed(active_proposal_ids)] @staticmethod def _get_active_dao_proposals(dao: DefaultDAO) -> list[DefaultDAO.Proposal]: - proposals = dao.get_proposals_by_state() - active_proposal_ids = proposals[dao.ProposalState.Active] + proposal_ids = dao.get_proposal_ids_by_state() + active_proposal_ids = proposal_ids[dao.ProposalState.Active] return [dao.fetch_proposal(proposal_id) for proposal_id in reversed(active_proposal_ids)] @staticmethod @@ -76,7 +76,8 @@ async def _get_latest_forum_topics(self, days: int) -> list[Forum.Topic]: return [] async def get_digest(self) -> Embed: - embed = Embed(title="Governance Digest", description="") + embed = Embed(title="Governance Digest") + embed.description = "" def sanitize(text: str, max_length: int = 50) -> str: text = text.strip() diff --git a/rocketwatch/utils/dao.py b/rocketwatch/utils/dao.py index 90a90ea0..8eb1d37b 100644 --- a/rocketwatch/utils/dao.py +++ b/rocketwatch/utils/dao.py @@ -33,7 +33,7 @@ class Proposal(ABC): @staticmethod @abstractmethod - def fetch_proposal(self, proposal_id: int) -> Optional[Proposal]: + def fetch_proposal(self, proposal_id: int) -> Proposal: pass @abstractmethod @@ -106,7 +106,7 @@ class Proposal(DAO.Proposal): votes_against: int votes_required: int - def get_proposals_by_state(self) -> dict[ProposalState, list[int]]: + def get_proposal_ids_by_state(self) -> dict[ProposalState, list[int]]: num_proposals = self.proposal_contract.functions.getTotal().call() proposal_dao_names = rp.multicall_sync([ self.proposal_contract.functions.getDAO(proposal_id) for proposal_id in range(1, num_proposals + 1) @@ -123,11 +123,7 @@ def get_proposals_by_state(self) -> dict[ProposalState, list[int]]: return proposals - def fetch_proposal(self, proposal_id: int) -> Optional[Proposal]: - num_proposals = self.proposal_contract.functions.getTotal().call() - if not (1 <= proposal_id <= num_proposals): - return None - + def fetch_proposal(self, proposal_id: int) -> Proposal: (proposer, message, payload, created, start, end, expires, votes_for_raw, votes_against_raw, votes_required_raw) = rp.multicall_sync([ self.proposal_contract.functions.getProposer(proposal_id), @@ -215,7 +211,7 @@ class Proposal(DAO.Proposal): def votes_total(self): return self.votes_for + self.votes_against + self.votes_abstain - def get_proposals_by_state(self) -> dict[ProposalState, list[int]]: + def get_proposal_ids_by_state(self) -> dict[ProposalState, list[int]]: num_proposals = self.proposal_contract.functions.getTotal().call() proposal_states = rp.multicall_sync([ self.proposal_contract.functions.getState(proposal_id) for proposal_id in range(1, num_proposals + 1) @@ -228,11 +224,7 @@ def get_proposals_by_state(self) -> dict[ProposalState, list[int]]: return proposals - def fetch_proposal(self, proposal_id: int) -> Optional[Proposal]: - num_proposals = self.proposal_contract.functions.getTotal().call() - if not (1 <= proposal_id <= num_proposals): - return None - + def fetch_proposal(self, proposal_id: int) -> Proposal: (proposer, message, payload, created, start, phase1_end, phase2_end, expires, vp_for_raw, vp_against_raw, vp_veto_raw, vp_abstain_raw, vp_required_raw, veto_quorum_raw) = rp.multicall_sync([ diff --git a/rocketwatch/utils/rocketpool.py b/rocketwatch/utils/rocketpool.py index e183410a..fd9949e6 100644 --- a/rocketwatch/utils/rocketpool.py +++ b/rocketwatch/utils/rocketpool.py @@ -30,30 +30,24 @@ class RocketPool: def __init__(self): self.addresses = bidict() - self._mc3_address = MULTICALL3_ADDRESSES[w3.eth.chain_id] - self._mc3 = None self.flush() - def _get_mc3(self): - if self._mc3 is None: - self._mc3 = self.get_contract_by_name("multicall3") - return self._mc3 - def flush(self): log.warning("FLUSHING RP CACHE") self.CONTRACT_CACHE.clear() self.ABI_CACHE.clear() self.ADDRESS_CACHE.clear() self.addresses.clear() - self._mc3 = None self._init_contract_addresses() + def _init_contract_addresses(self) -> None: manual_addresses = cfg["rocketpool.manual_addresses"] for name, address in manual_addresses.items(): self.addresses[name] = address - self.addresses["multicall3"] = self._mc3_address + self.addresses["multicall3"] = w3.to_checksum_address(MULTICALL3_ADDRESSES[w3.eth.chain_id]) + self._multicall = self.get_contract_by_name("multicall3") log.info("Indexing Rocket Pool contracts...") # generate list of all file names with the .sol extension from the rocketpool submodule @@ -123,7 +117,7 @@ def multicall_sync(self, calls, require_success=True): """Sync multicall accepting ContractFunction objects. Returns list of results.""" mc_calls = [self._fn_to_call(fn, i) for i, fn in enumerate(calls)] encoded = [(call.target, not require_success, call.data) for call in mc_calls] - results = self._get_mc3().functions.aggregate3(encoded).call() + results = self._multicall.functions.aggregate3(encoded).call() return [ Call.decode_output(data, mc_calls[i].signature, success=success) for i, (success, data) in enumerate(results) From 83e8caf694ed64d5f9f2901fd635776573fea96c Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Thu, 5 Mar 2026 22:21:03 +0000 Subject: [PATCH 131/279] clean up dependencies --- rocketwatch/requirements.txt | 15 --------------- 1 file changed, 15 deletions(-) diff --git a/rocketwatch/requirements.txt b/rocketwatch/requirements.txt index cf5a3722..c2efba78 100644 --- a/rocketwatch/requirements.txt +++ b/rocketwatch/requirements.txt @@ -5,42 +5,27 @@ humanize==4.6.0 termplotlib==0.3.9 cachetools==5.3.3 bidict==0.22.1 -tinydb==4.7.1 requests==2.32.3 uptime==3.0.1 discord.py==2.5.2 config==0.5.1 pytz==2022.7.1 matplotlib==3.7.1 -scipy==1.11.3 inflect==7.3.1 -wordcloud==1.9.4 colorama==0.4.6 seaborn==0.12.2 etherscan_labels @ git+https://github.com/InvisibleSymbol/etherscan-labels@7eb617d715a4dda0eabdd858106a526a3abd3394 cronitor==4.6.0 -circuitbreaker==1.4.0 retry-async==0.1.4 -checksumdir==1.2.0 multicall==0.11.0 dice==3.1.2 -openai==1.10.0 -transformers==4.48.0 -schedule==1.2.2 -suntimes==1.1.2 -icalendar==5.0.13 regex==2023.8.8 tiktoken==0.5.2 anthropic==0.18.1 -HomeAssistant-API==4.2.2.post2 -bs4==0.0.2 -pydantic==2.8.2 -pydantic_core==2.20.1 pymongo==4.15.3 graphql_query==1.4.0 pillow==11.1.0 aiohttp==3.11.12 -eth-account==0.5.9 numpy==1.26.4 beautifulsoup4==4.13.3 eth-typing==2.2.1 From c19990d1e89b787c515849899fbd4f5fe234c85c Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Thu, 5 Mar 2026 22:34:17 +0000 Subject: [PATCH 132/279] add global megapool data to DB --- .../plugins/db_upkeep_task/db_upkeep_task.py | 79 +++++++++++++++---- 1 file changed, 63 insertions(+), 16 deletions(-) diff --git a/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py b/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py index a3e8aec5..e7ef1101 100644 --- a/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py +++ b/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py @@ -85,16 +85,16 @@ async def loop(self): log.debug("starting db upkeep task") # node tasks await self.add_untracked_node_operators() - await self.add_static_data_to_node_operators() - await self.update_dynamic_node_operator_metadata() - # TODO: update megapool stats if deployed + await self.add_static_node_operator_data() + await self.update_dynamic_node_operator_data() + await self.update_dynamic_megapool_data() # minipool tasks await self.add_untracked_minipools() - await self.add_static_data_to_minipools() - await self.add_static_deposit_data_to_minipools() - await self.add_static_beacon_data_to_minipools() - await self.update_dynamic_minipool_metadata() - await self.update_dynamic_minipool_beacon_metadata() + await self.add_static_minipool_data() + await self.add_static_minipool_deposit_data() + await self.add_static_minipool_beacon_data() + await self.update_dynamic_minipool_data() + await self.update_dynamic_minipool_beacon_data() # TODO: populate megapool validator DB log.debug("finished db upkeep task") self.monitor.ping(state="complete", series=p_id) @@ -155,7 +155,7 @@ async def add_untracked_node_operators(self): await self.db.node_operators.insert_many([{"_id": i, "address": a} for i, a in data.items()]) @timerun_async - async def add_static_data_to_node_operators(self): + async def add_static_node_operator_data(self): df = rp.get_contract_by_name("rocketNodeDistributorFactory") mf = rp.get_contract_by_name("rocketMegapoolFactory") lambs = [ @@ -169,7 +169,7 @@ async def add_static_data_to_node_operators(self): ) @timerun_async - async def update_dynamic_node_operator_metadata(self): + async def update_dynamic_node_operator_data(self): mf = rp.get_contract_by_name("rocketMegapoolFactory") nd = rp.get_contract_by_name("rocketNodeDeposit") nm = rp.get_contract_by_name("rocketNodeManager") @@ -198,7 +198,7 @@ async def update_dynamic_node_operator_metadata(self): lambda n: rp.build_call(mc, "getEthBalance", n["fee_distributor"]["address"], key=(n["address"], "fee_distributor.eth_balance"), transform=safe_to_float), lambda n: rp.build_call(mf, "getMegapoolDeployed", n["address"], - target=nm.address, key=(n["address"], "megapool.deployed"), transform=is_true), + key=(n["address"], "megapool.deployed")), lambda n: rp.build_call(mc, "getEthBalance", n["megapool"]["address"], key=(n["address"], "megapool.eth_balance"), transform=safe_to_float), lambda n: rp.build_call(ns, "getNodeStakedRPL", n["address"], @@ -235,6 +235,53 @@ async def update_dynamic_node_operator_metadata(self): ordered=False ) + @timerun_async + async def update_dynamic_megapool_data(self): + mp = rp.assemble_contract("rocketMegapoolDelegate") + lambs = [ + lambda n: rp.build_call(mp, "getValidatorCount", + target=n["megapool"]["address"], key=(n["address"], "megapool.validator_count")), + lambda n: rp.build_call(mp, "getActiveValidatorCount", + target=n["megapool"]["address"], key=(n["address"], "megapool.active_validator_count")), + lambda n: rp.build_call(mp, "getExitingValidatorCount", + target=n["megapool"]["address"], key=(n["address"], "megapool.exiting_validator_count")), + lambda n: rp.build_call(mp, "getLockedValidatorCount", + target=n["megapool"]["address"], key=(n["address"], "megapool.locked_validator_count")), + lambda n: rp.build_call(mp, "getNodeBond", + target=n["megapool"]["address"], key=(n["address"], "megapool.node_bond"), transform=safe_to_float), + lambda n: rp.build_call(mp, "getUserCapital", + target=n["megapool"]["address"], key=(n["address"], "megapool.user_capital"), transform=safe_to_float), + lambda n: rp.build_call(mp, "getDebt", + target=n["megapool"]["address"], key=(n["address"], "megapool.debt"), transform=safe_to_float), + lambda n: rp.build_call(mp, "getRefundValue", + target=n["megapool"]["address"], key=(n["address"], "megapool.refund_value"), transform=safe_to_float), + lambda n: rp.build_call(mp, "getPendingRewards", + target=n["megapool"]["address"], key=(n["address"], "megapool.pending_rewards"), transform=safe_to_float), + lambda n: rp.build_call(mp, "getLastDistributionTime", + target=n["megapool"]["address"], key=(n["address"], "megapool.last_distribution_time")), + ] + nodes = await self.db.node_operators.find( + {"megapool.deployed": True}, {"address": 1, "megapool.address": 1} + ).to_list() + if not nodes: + return + + total = len(nodes) + batch_size = self.batch_size // len(lambs) + for i, node_batch in enumerate(as_chunks(nodes, batch_size)): + start = i * batch_size + 1 + end = min((i + 1) * batch_size, total) + log.debug(f"Processing megapools [{start}, {end}]/{total}") + res = await rp.multicall( + [lamb(n) for n in node_batch for lamb in lambs], + require_success=False + ) + data = _group_multicall_results(res) + await self.db.node_operators.bulk_write( + [UpdateOne({"address": addr}, {"$set": d}) for addr, d in data.items()], + ordered=False + ) + # -- Minipool tasks -- @timerun_async @@ -256,7 +303,7 @@ async def add_untracked_minipools(self): await self.db.minipools.insert_many([{"_id": i, "address": a} for i, a in data.items()]) @timerun_async - async def add_static_data_to_minipools(self): + async def add_static_minipool_data(self): m = rp.assemble_contract("rocketMinipool") mm = rp.get_contract_by_name("rocketMinipoolManager") lambs = [ @@ -270,7 +317,7 @@ async def add_static_data_to_minipools(self): ) @timerun - async def add_static_deposit_data_to_minipools(self): + async def add_static_minipool_deposit_data(self): minipools = await self.db.minipools.find( {"deposit_amount": {"$exists": False}, "status": "initialised"}, {"address": 1, "_id": 0, "status_time": 1} @@ -321,7 +368,7 @@ async def add_static_deposit_data_to_minipools(self): ) @timerun - async def add_static_beacon_data_to_minipools(self): + async def add_static_minipool_beacon_data(self): public_keys = await self.db.minipools.distinct("pubkey", {"validator_index": {"$exists": False}}) if not public_keys: return @@ -334,7 +381,7 @@ async def add_static_beacon_data_to_minipools(self): ) @timerun_async - async def update_dynamic_minipool_metadata(self): + async def update_dynamic_minipool_data(self): m = rp.assemble_contract("rocketMinipool") mc = rp.get_contract_by_name("multicall3") lambs = [ @@ -353,7 +400,7 @@ async def update_dynamic_minipool_metadata(self): await self._batch_multicall_update(self.db.minipools, {"finalized": {"$ne": True}}, lambs, label="minipools") @timerun - async def update_dynamic_minipool_beacon_metadata(self): + async def update_dynamic_minipool_beacon_data(self): validator_indexes = await self.db.minipools.distinct( "validator_index", {"beacon.status": {"$ne": "withdrawal_done"}} ) From dde7553120ae84768c5eaee50368e4eb883c8cf8 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Thu, 5 Mar 2026 23:12:10 +0000 Subject: [PATCH 133/279] add megapool_validator collection --- .../plugins/db_upkeep_task/db_upkeep_task.py | 193 +++++++++++++++++- 1 file changed, 186 insertions(+), 7 deletions(-) diff --git a/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py b/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py index e7ef1101..ab6cd9ce 100644 --- a/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py +++ b/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py @@ -51,15 +51,54 @@ def safe_inv(_, num): return None -def is_true(_, b): - return b is True - - def _parse_epoch(value): epoch = int(value) return epoch if epoch < FAR_FUTURE_EPOCH else None +def _derive_validator_status(info): + if info[9]: # dissolved + return "dissolved" + if info[5]: # exited + return "exited" + if info[6]: # inQueue + return "in_queue" + if info[7]: # inPrestake + return "prestaked" + if info[11]: # locked + return "locked" + if info[10]: # exiting + return "exiting" + if info[4]: # staked + return "staking" + return "unknown" + + +def _unpack_validator_info(_, info): + if info is None: + return None + return { + "status": _derive_validator_status(info), + "express_used": info[8], + "assignment_time": info[0], + "requested_bond": info[2] / 1000, # milliether to ETH + "deposit_value": info[3] / 1000, # milliether to ETH + "exit_balance": solidity.to_float(info[12], 9), # gwei to ETH + } + + +def _unpack_validator_info_dynamic(_, info): + if info is None: + return None + return { + "status": _derive_validator_status(info), + "assignment_time": info[0], + "requested_bond": info[2] / 1000, + "deposit_value": info[3] / 1000, + "exit_balance": solidity.to_float(info[12], 9), + } + + def _group_multicall_results(res): data = defaultdict(dict) for (key, field), value in res.items(): @@ -95,7 +134,11 @@ async def loop(self): await self.add_static_minipool_beacon_data() await self.update_dynamic_minipool_data() await self.update_dynamic_minipool_beacon_data() - # TODO: populate megapool validator DB + # megapool validator tasks + await self.add_untracked_megapool_validators() + await self.add_static_megapool_validator_beacon_data() + await self.update_dynamic_megapool_validator_data() + await self.update_dynamic_megapool_validator_beacon_data() log.debug("finished db upkeep task") self.monitor.ping(state="complete", series=p_id) except Exception as err: @@ -106,10 +149,16 @@ async def loop(self): async def check_indexes(self): log.debug("checking indexes") + await self.db.node_operators.create_index("address") + await self.db.node_operators.create_index("megapool.address") await self.db.minipools.create_index("address") await self.db.minipools.create_index("pubkey") await self.db.minipools.create_index("validator_index") - await self.db.node_operators.create_index("address") + await self.db.megapool_validators.create_index( + [("megapool", pymongo.ASCENDING), ("validator_id", pymongo.ASCENDING)], unique=True + ) + await self.db.megapool_validators.create_index("pubkey") + await self.db.megapool_validators.create_index("validator_index") log.debug("indexes checked") async def _batch_multicall_update(self, collection, query, lambs, label=None): @@ -387,7 +436,8 @@ async def update_dynamic_minipool_data(self): lambs = [ lambda a: rp.build_call(m, "getStatus", target=a, key=(a, "status"), transform=safe_state_to_str), lambda a: rp.build_call(m, "getStatusTime", target=a, key=(a, "status_time")), - lambda a: rp.build_call(m, "getVacant", target=a, key=(a, "vacant"), transform=is_true), + lambda a: rp.build_call(m, "getVacant", target=a, key=(a, "vacant")), + lambda a: rp.build_call(m, "getFinalised", target=a, key=(a, "finalized")), lambda a: rp.build_call(m, "getNodeDepositBalance", target=a, key=(a, "node_deposit_balance"), transform=safe_to_float), lambda a: rp.build_call(m, "getNodeRefundBalance", target=a, key=(a, "node_refund_balance"), transform=safe_to_float), lambda a: rp.build_call(m, "getPreMigrationBalance", target=a, key=(a, "pre_migration_balance"), transform=safe_to_float), @@ -430,5 +480,134 @@ async def update_dynamic_minipool_beacon_data(self): ) + # -- Megapool validator tasks -- + + @timerun_async + async def add_untracked_megapool_validators(self): + mp = rp.assemble_contract("rocketMegapoolDelegate") + # get deployed megapools with their on-chain validator count + nodes = await self.db.node_operators.find( + {"megapool.deployed": True, "megapool.validator_count": {"$gt": 0}}, + {"address": 1, "megapool.address": 1, "megapool.validator_count": 1} + ).to_list() + if not nodes: + return + + for node in nodes: + megapool_addr = node["megapool"]["address"] + on_chain_count = node["megapool"]["validator_count"] + db_count = await self.db.megapool_validators.count_documents({"megapool": megapool_addr}) + if db_count >= on_chain_count: + continue + + new_ids = list(range(db_count, on_chain_count)) + log.debug(f"Adding {len(new_ids)} new validators for megapool {megapool_addr}") + + for id_batch in as_chunks(new_ids, self.batch_size // 2): + # fetch pubkey + validator info for each new validator + calls = [] + for vid in id_batch: + calls.append(rp.build_call(mp, "getValidatorPubkey", vid, + target=megapool_addr, key=(vid, "pubkey"), transform=safe_to_hex)) + calls.append(rp.build_call(mp, "getValidatorInfo", vid, + target=megapool_addr, key=(vid, "info"), transform=_unpack_validator_info)) + res = await rp.multicall(calls, require_success=False) + grouped = _group_multicall_results(res) + + docs = [] + for vid, data in grouped.items(): + doc = { + "megapool": megapool_addr, + "node_operator": node["address"], + "validator_id": vid, + "pubkey": data.get("pubkey"), + } + info = data.get("info") + if info: + doc.update(info) + docs.append(doc) + if docs: + await self.db.megapool_validators.insert_many(docs, ordered=False) + + @timerun + async def add_static_megapool_validator_beacon_data(self): + public_keys = await self.db.megapool_validators.distinct( + "pubkey", {"validator_index": {"$exists": False}, "pubkey": {"$ne": None}} + ) + if not public_keys: + return + + for pubkey_batch in as_chunks(public_keys, self.batch_size): + beacon_data = (await bacon.get_validators_async("head", ids=pubkey_batch))["data"] + data = {d["validator"]["pubkey"]: int(d["index"]) for d in beacon_data} + if data: + await self.db.megapool_validators.bulk_write( + [UpdateMany({"pubkey": pk}, {"$set": {"validator_index": idx}}) for pk, idx in data.items()], + ordered=False + ) + + @timerun_async + async def update_dynamic_megapool_validator_data(self): + mp = rp.assemble_contract("rocketMegapoolDelegate") + validators = await self.db.megapool_validators.find( + {"status": {"$nin": ["exited", "dissolved"]}}, + {"megapool": 1, "validator_id": 1} + ).to_list() + if not validators: + return + + total = len(validators) + for i, batch in enumerate(as_chunks(validators, self.batch_size)): + start = i * self.batch_size + 1 + end = min((i + 1) * self.batch_size, total) + log.debug(f"Processing megapool validators [{start}, {end}]/{total}") + calls = [ + rp.build_call(mp, "getValidatorInfo", v["validator_id"], + target=v["megapool"], key=j, transform=_unpack_validator_info_dynamic) + for j, v in enumerate(batch) + ] + res = await rp.multicall(calls, require_success=False) + ops = [] + for j, v in enumerate(batch): + info = res.get(j) + if info is not None: + ops.append(UpdateOne({"_id": v["_id"]}, {"$set": info})) + if ops: + await self.db.megapool_validators.bulk_write(ops, ordered=False) + + @timerun + async def update_dynamic_megapool_validator_beacon_data(self): + validator_indexes = await self.db.megapool_validators.distinct( + "validator_index", {"beacon.status": {"$ne": "withdrawal_done"}} + ) + validator_indexes = [i for i in validator_indexes if i is not None] + if not validator_indexes: + return + total = len(validator_indexes) + for i, index_batch in enumerate(as_chunks(validator_indexes, self.batch_size)): + start = i * self.batch_size + 1 + end = min((i + 1) * self.batch_size, total) + log.debug(f"Updating beacon data for megapool validators [{start}, {end}]/{total}") + beacon_data = (await bacon.get_validators_async("head", ids=index_batch))["data"] + data = {} + for d in beacon_data: + v = d["validator"] + data[int(d["index"])] = {"beacon": { + "status": d["status"], + "balance": solidity.to_float(d["balance"], 9), + "effective_balance": solidity.to_float(v["effective_balance"], 9), + "slashed": v["slashed"], + "activation_eligibility_epoch": _parse_epoch(v["activation_eligibility_epoch"]), + "activation_epoch": _parse_epoch(v["activation_epoch"]), + "exit_epoch": _parse_epoch(v["exit_epoch"]), + "withdrawable_epoch": _parse_epoch(v["withdrawable_epoch"]), + }} + if data: + await self.db.megapool_validators.bulk_write( + [UpdateMany({"validator_index": idx}, {"$set": d}) for idx, d in data.items()], + ordered=False + ) + + async def setup(self): await self.add_cog(DBUpkeepTask(self)) From a88dcf03db1319806767d6d2a15e780640a357a5 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 6 Mar 2026 00:08:02 +0000 Subject: [PATCH 134/279] use pubkey for queries in upkeep task --- .../plugins/db_upkeep_task/db_upkeep_task.py | 117 +++++++----------- 1 file changed, 47 insertions(+), 70 deletions(-) diff --git a/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py b/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py index ab6cd9ce..a5546d6a 100644 --- a/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py +++ b/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py @@ -131,12 +131,10 @@ async def loop(self): await self.add_untracked_minipools() await self.add_static_minipool_data() await self.add_static_minipool_deposit_data() - await self.add_static_minipool_beacon_data() await self.update_dynamic_minipool_data() await self.update_dynamic_minipool_beacon_data() # megapool validator tasks await self.add_untracked_megapool_validators() - await self.add_static_megapool_validator_beacon_data() await self.update_dynamic_megapool_validator_data() await self.update_dynamic_megapool_validator_beacon_data() log.debug("finished db upkeep task") @@ -154,11 +152,13 @@ async def check_indexes(self): await self.db.minipools.create_index("address") await self.db.minipools.create_index("pubkey") await self.db.minipools.create_index("validator_index") + await self.db.minipools.create_index("beacon.status") await self.db.megapool_validators.create_index( [("megapool", pymongo.ASCENDING), ("validator_id", pymongo.ASCENDING)], unique=True ) await self.db.megapool_validators.create_index("pubkey") await self.db.megapool_validators.create_index("validator_index") + await self.db.megapool_validators.create_index("beacon.status") log.debug("indexes checked") async def _batch_multicall_update(self, collection, query, lambs, label=None): @@ -416,19 +416,6 @@ async def add_static_minipool_deposit_data(self): ordered=False ) - @timerun - async def add_static_minipool_beacon_data(self): - public_keys = await self.db.minipools.distinct("pubkey", {"validator_index": {"$exists": False}}) - if not public_keys: - return - for pubkey_batch in as_chunks(public_keys, self.batch_size): - beacon_data = (await bacon.get_validators_async("head", ids=pubkey_batch))["data"] - data = {d["validator"]["pubkey"]: int(d["index"]) for d in beacon_data} - await self.db.minipools.bulk_write( - [UpdateMany({"pubkey": pk}, {"$set": {"validator_index": idx}}) for pk, idx in data.items()], - ordered=False - ) - @timerun_async async def update_dynamic_minipool_data(self): m = rp.assemble_contract("rocketMinipool") @@ -451,33 +438,37 @@ async def update_dynamic_minipool_data(self): @timerun async def update_dynamic_minipool_beacon_data(self): - validator_indexes = await self.db.minipools.distinct( - "validator_index", {"beacon.status": {"$ne": "withdrawal_done"}} + pubkeys = await self.db.minipools.distinct( + "pubkey", {"beacon.status": {"$ne": "withdrawal_done"}} ) - validator_indexes = [i for i in validator_indexes if i is not None] - total = len(validator_indexes) - for i, index_batch in enumerate(as_chunks(validator_indexes, self.batch_size)): + pubkeys = [pk for pk in pubkeys if pk is not None] + total = len(pubkeys) + for i, pubkey_batch in enumerate(as_chunks(pubkeys, self.batch_size)): start = i * self.batch_size + 1 end = min((i + 1) * self.batch_size, total) log.info(f"Updating beacon chain data for minipools [{start}, {end}]/{total}") - beacon_data = (await bacon.get_validators_async("head", ids=index_batch))["data"] + beacon_data = (await bacon.get_validators_async("head", ids=pubkey_batch))["data"] data = {} for d in beacon_data: v = d["validator"] - data[int(d["index"])] = {"beacon": { - "status": d["status"], - "balance": solidity.to_float(d["balance"], 9), - "effective_balance": solidity.to_float(v["effective_balance"], 9), - "slashed": v["slashed"], - "activation_eligibility_epoch": _parse_epoch(v["activation_eligibility_epoch"]), - "activation_epoch": _parse_epoch(v["activation_epoch"]), - "exit_epoch": _parse_epoch(v["exit_epoch"]), - "withdrawable_epoch": _parse_epoch(v["withdrawable_epoch"]), - }} - await self.db.minipools.bulk_write( - [UpdateMany({"validator_index": idx}, {"$set": d}) for idx, d in data.items()], - ordered=False - ) + data[v["pubkey"]] = { + "validator_index": int(d["index"]), + "beacon": { + "status": d["status"], + "balance": solidity.to_float(d["balance"], 9), + "effective_balance": solidity.to_float(v["effective_balance"], 9), + "slashed": v["slashed"], + "activation_eligibility_epoch": _parse_epoch(v["activation_eligibility_epoch"]), + "activation_epoch": _parse_epoch(v["activation_epoch"]), + "exit_epoch": _parse_epoch(v["exit_epoch"]), + "withdrawable_epoch": _parse_epoch(v["withdrawable_epoch"]), + }, + } + if data: + await self.db.minipools.bulk_write( + [UpdateMany({"pubkey": pk}, {"$set": d}) for pk, d in data.items()], + ordered=False + ) # -- Megapool validator tasks -- @@ -529,23 +520,6 @@ async def add_untracked_megapool_validators(self): if docs: await self.db.megapool_validators.insert_many(docs, ordered=False) - @timerun - async def add_static_megapool_validator_beacon_data(self): - public_keys = await self.db.megapool_validators.distinct( - "pubkey", {"validator_index": {"$exists": False}, "pubkey": {"$ne": None}} - ) - if not public_keys: - return - - for pubkey_batch in as_chunks(public_keys, self.batch_size): - beacon_data = (await bacon.get_validators_async("head", ids=pubkey_batch))["data"] - data = {d["validator"]["pubkey"]: int(d["index"]) for d in beacon_data} - if data: - await self.db.megapool_validators.bulk_write( - [UpdateMany({"pubkey": pk}, {"$set": {"validator_index": idx}}) for pk, idx in data.items()], - ordered=False - ) - @timerun_async async def update_dynamic_megapool_validator_data(self): mp = rp.assemble_contract("rocketMegapoolDelegate") @@ -577,34 +551,37 @@ async def update_dynamic_megapool_validator_data(self): @timerun async def update_dynamic_megapool_validator_beacon_data(self): - validator_indexes = await self.db.megapool_validators.distinct( - "validator_index", {"beacon.status": {"$ne": "withdrawal_done"}} + pubkeys = await self.db.megapool_validators.distinct( + "pubkey", {"beacon.status": {"$ne": "withdrawal_done"}} ) - validator_indexes = [i for i in validator_indexes if i is not None] - if not validator_indexes: + pubkeys = [pk for pk in pubkeys if pk is not None] + if not pubkeys: return - total = len(validator_indexes) - for i, index_batch in enumerate(as_chunks(validator_indexes, self.batch_size)): + total = len(pubkeys) + for i, pubkey_batch in enumerate(as_chunks(pubkeys, self.batch_size)): start = i * self.batch_size + 1 end = min((i + 1) * self.batch_size, total) log.debug(f"Updating beacon data for megapool validators [{start}, {end}]/{total}") - beacon_data = (await bacon.get_validators_async("head", ids=index_batch))["data"] + beacon_data = (await bacon.get_validators_async("head", ids=pubkey_batch))["data"] data = {} for d in beacon_data: v = d["validator"] - data[int(d["index"])] = {"beacon": { - "status": d["status"], - "balance": solidity.to_float(d["balance"], 9), - "effective_balance": solidity.to_float(v["effective_balance"], 9), - "slashed": v["slashed"], - "activation_eligibility_epoch": _parse_epoch(v["activation_eligibility_epoch"]), - "activation_epoch": _parse_epoch(v["activation_epoch"]), - "exit_epoch": _parse_epoch(v["exit_epoch"]), - "withdrawable_epoch": _parse_epoch(v["withdrawable_epoch"]), - }} + data[v["pubkey"]] = { + "validator_index": int(d["index"]), + "beacon": { + "status": d["status"], + "balance": solidity.to_float(d["balance"], 9), + "effective_balance": solidity.to_float(v["effective_balance"], 9), + "slashed": v["slashed"], + "activation_eligibility_epoch": _parse_epoch(v["activation_eligibility_epoch"]), + "activation_epoch": _parse_epoch(v["activation_epoch"]), + "exit_epoch": _parse_epoch(v["exit_epoch"]), + "withdrawable_epoch": _parse_epoch(v["withdrawable_epoch"]), + }, + } if data: await self.db.megapool_validators.bulk_write( - [UpdateMany({"validator_index": idx}, {"$set": d}) for idx, d in data.items()], + [UpdateMany({"pubkey": pk}, {"$set": d}) for pk, d in data.items()], ordered=False ) From 008d0796660824fb2165c954d686e360bfac819e Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 6 Mar 2026 00:08:38 +0000 Subject: [PATCH 135/279] minipool_states -> validator_states --- .../minipool_states/minipool_states.py | 148 ------------ .../validator_states/validator_states.py | 211 ++++++++++++++++++ 2 files changed, 211 insertions(+), 148 deletions(-) delete mode 100644 rocketwatch/plugins/minipool_states/minipool_states.py create mode 100644 rocketwatch/plugins/validator_states/validator_states.py diff --git a/rocketwatch/plugins/minipool_states/minipool_states.py b/rocketwatch/plugins/minipool_states/minipool_states.py deleted file mode 100644 index 34f55be1..00000000 --- a/rocketwatch/plugins/minipool_states/minipool_states.py +++ /dev/null @@ -1,148 +0,0 @@ -import logging - -from discord.ext import commands -from discord.ext.commands import hybrid_command, Context -from pymongo import AsyncMongoClient - -from rocketwatch import RocketWatch -from utils.cfg import cfg -from utils.embeds import Embed, el_explorer_url -from utils.readable import render_tree_legacy -from utils.shared_w3 import w3 -from utils.visibility import is_hidden_weak - -log = logging.getLogger("beacon_states") -log.setLevel(cfg["log_level"]) - - -class MinipoolStates(commands.Cog): - def __init__(self, bot: RocketWatch): - self.bot = bot - self.db = AsyncMongoClient(cfg["mongodb.uri"]).get_database("rocketwatch") - - @hybrid_command() - async def minipool_states(self, ctx: Context): - """Show minipool counts by beacon chain and contract status""" - await ctx.defer(ephemeral=is_hidden_weak(ctx)) - # fetch from db - res = await self.db.minipools.find({ - "beacon.status": {"$exists": True} - }).to_list(None) - data = { - "dissolved": 0, - "pending": {}, - "active" : {}, - "exiting": {}, - "exited" : {}, - "withdrawn": {}, - "closed": {} - } - exiting_valis = [] - withdrawn_valis = [] - for minipool in res: - match minipool["beacon"]["status"]: - case "pending_initialized": - if minipool["status"] == "dissolved": - data["dissolved"] += 1 - else: - data["pending"]["initialized"] = data["pending"].get("initialized", 0) + 1 - case "pending_queued": - data["pending"]["queued"] = data["pending"].get("queued", 0) + 1 - case "active_ongoing": - data["active"]["ongoing"] = data["active"].get("ongoing", 0) + 1 - case "active_exiting": - data["exiting"]["voluntarily"] = data["exiting"].get("voluntarily", 0) + 1 - exiting_valis.append(minipool) - case "active_slashed": - data["exiting"]["slashed"] = data["exiting"].get("slashed", 0) + 1 - exiting_valis.append(minipool) - case "exited_unslashed" | "exited_slashed" | "withdrawal_possible": - status_2 = "slashed" if minipool["beacon"]["slashed"] else "voluntarily" - data["exited"][status_2] = data["exited"].get(status_2, 0) + 1 - exiting_valis.append(minipool) - case "withdrawal_done": - status_2 = "slashed" if minipool["beacon"]["slashed"] else "unslashed" - if minipool["execution_balance"] > 0: - data["withdrawn"][status_2] = data["withdrawn"].get(status_2, 0) + 1 - withdrawn_valis.append(minipool) - else: - data["closed"][status_2] = data["closed"].get(status_2, 0) + 1 - case _: - logging.warning(f"Unknown status {minipool['status']}") - - # collapse tree where possible - for status in list(data.keys()): - if isinstance(data[status], dict) and len(data[status]) == 1: - sub_status = list(data[status].keys())[0] - data[status] = data[status][sub_status] - - embed = Embed(title="Minipool States", color=0x00ff00) - description = "```\n" - # render dict as a tree-like structure - description += render_tree_legacy(data, "Minipools") - - total_listed_valis = len(exiting_valis) + len(withdrawn_valis) - - if total_listed_valis == 0: - description += "```" - elif total_listed_valis < 24: - description += "\n" - if len(exiting_valis) > 0: - description += "\n--- Exiting Minipools ---\n\n" - valis = sorted([v["validator_index"] for v in exiting_valis]) - description += ", ".join([str(v) for v in valis]) - if len(withdrawn_valis) > 0: - description += "\n--- Withdrawn Minipools ---\n\n" - valis = sorted([v["validator_index"] for v in withdrawn_valis]) - description += ", ".join([str(v) for v in valis]) - description += "```" - else: - description += "```" - - node_operators = [] - for valis in (exiting_valis, withdrawn_valis): - valis_no = {} - # dedupe, add count of validators with matching node operator - for v in valis: - valis_no[v["node_operator"]] = valis_no.get(v["node_operator"], 0) + 1 - # turn into list - valis_no = list(valis_no.items()) - # sort by count - valis_no.sort(key=lambda x: x[1], reverse=True) - node_operators.append(valis_no) - - exiting_node_operators, withdrawn_node_operators = node_operators - max_total_list_length = 16 - - if len(exiting_node_operators) + len(withdrawn_node_operators) <= max_total_list_length: - num_exiting = len(exiting_node_operators) - num_withdrawn = len(withdrawn_node_operators) - elif len(exiting_node_operators) >= len(withdrawn_node_operators): - num_withdrawn = min(len(withdrawn_node_operators), max_total_list_length // 2) - num_exiting = max_total_list_length - num_withdrawn - else: - num_exiting = min(len(exiting_node_operators), max_total_list_length // 2) - num_withdrawn = max_total_list_length - num_exiting - - if num_exiting > 0: - description += "\n**Exiting Node Operators**\n" - description += ", ".join([f"{el_explorer_url(w3.to_checksum_address(v))} ({c})" for v, c in exiting_node_operators[:num_exiting]]) - if remaining_no := exiting_node_operators[num_exiting:]: - num_remaining_valis = sum([c for _, c in remaining_no]) - description += f", and {len(remaining_no)} more ({num_remaining_valis})" - description += "\n" - if num_withdrawn > 0: - description += "\n**Withdrawn Node Operators**\n" - description += ", ".join([f"{el_explorer_url(w3.to_checksum_address(v))} ({c})" for v, c in withdrawn_node_operators[:num_withdrawn]]) - if remaining_no := withdrawn_node_operators[num_withdrawn:]: - num_remaining_valis = sum([c for _, c in remaining_no]) - description += f", and {len(remaining_no)} more ({num_remaining_valis})" - description += "\n" - - - embed.description = description - await ctx.send(embed=embed) - - -async def setup(self): - await self.add_cog(MinipoolStates(self)) diff --git a/rocketwatch/plugins/validator_states/validator_states.py b/rocketwatch/plugins/validator_states/validator_states.py new file mode 100644 index 00000000..06f04b5f --- /dev/null +++ b/rocketwatch/plugins/validator_states/validator_states.py @@ -0,0 +1,211 @@ +import logging + +from discord import Interaction +from discord.ext import commands +from discord.app_commands import command +from pymongo import AsyncMongoClient + +from rocketwatch import RocketWatch +from utils.cfg import cfg +from utils.embeds import Embed, el_explorer_url +from utils.readable import render_tree_legacy +from utils.shared_w3 import w3 +from utils.visibility import is_hidden_weak + +log = logging.getLogger("validator_states") +log.setLevel(cfg["log_level"]) + + +_BEACON_PENDING = {"in_queue": "unassigned", "prestaked": "prestaked", "staking": "staked"} + +def _classify_beacon_validator(beacon, contract_status): + """Classify a validator by beacon status. Returns (status, sub_status).""" + match beacon["status"]: + case "pending_initialized": + if contract_status == "dissolved": + return "dissolved", None + else: + return "pending", _BEACON_PENDING[contract_status] + case "pending_queued": + return "pending", "queued" + case "active_ongoing": + return "active", "ongoing" + case "active_exiting": + return "exiting", "voluntarily" + case "active_slashed": + return "exiting", "slashed" + case "exited_unslashed" | "exited_slashed" | "withdrawal_possible": + sub = "slashed" if beacon["slashed"] else "voluntarily" + return "exited", sub + case "withdrawal_done": + sub = "slashed" if beacon["slashed"] else "unslashed" + return "withdrawn", sub + case _: + log.warning(f"Unknown beacon status {beacon['status']}") + return None, None + + +def _empty_state_tree(): + return { + "dissolved": 0, + "pending": {}, + "active": {}, + "exiting": {}, + "exited": {}, + "withdrawn": {}, + "closed": {} + } + +def _classify_collection(docs, done_fn): + """Classify docs into state tree. + + Args: + docs: list of DB documents, with or without beacon data + done_fn: function that takes a doc and returns True if its lifecycle is complete + (used to distinguish withdrawn vs closed for withdrawal_done validators) + """ + data = _empty_state_tree() + exiting_valis = [] + withdrawn_valis = [] + + for doc in docs: + beacon = doc.get("beacon") + contract_status = doc.get("status", "") + + if beacon is None: + sub = _BEACON_PENDING.get(contract_status) + if sub: + data["pending"][sub] = data["pending"].get(sub, 0) + 1 + elif contract_status == "dissolved": + data["dissolved"] += 1 + continue + + category, sub = _classify_beacon_validator(beacon, contract_status) + if category is None: + continue + if category == "withdrawn" and done_fn(doc): + category = "closed" + if category == "dissolved": + data["dissolved"] += 1 + else: + data[category][sub] = data[category].get(sub, 0) + 1 + if category in ("exiting", "exited"): + exiting_valis.append(doc) + elif category == "withdrawn": + withdrawn_valis.append(doc) + + return data, exiting_valis, withdrawn_valis + + +def _collapse_tree(data: dict) -> dict: + collapsed_data = {} + for status in data.keys(): + if isinstance(data[status], dict) and len(data[status]) == 1: + sub_status = list(data[status].keys())[0] + collapsed_data[status] = data[status][sub_status] + else: + collapsed_data[status] = data[status] + return collapsed_data + + +def _get_node_operator(doc): + return doc.get("node_operator", "") + + +class ValidatorStates(commands.Cog): + def __init__(self, bot: RocketWatch): + self.bot = bot + self.db = AsyncMongoClient(cfg["mongodb.uri"]).rocketwatch + + @command() + async def validator_states(self, interaction: Interaction): + """Show validator counts by beacon chain and contract status""" + await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) + + minipools = await self.db.minipools.find( + {"beacon.status": {"$exists": True}}, + {"beacon": 1, "status": 1, "finalized": 1, "node_operator": 1, "validator_index": 1} + ).to_list(None) + megapool_vals = await self.db.megapool_validators.find( + {}, {"beacon": 1, "status": 1, "node_operator": 1, "validator_index": 1} + ).to_list(None) + + mp_data, mp_exiting, mp_withdrawn = _classify_collection( + minipools, lambda d: d.get("finalized", False) + ) + mg_data, mg_exiting, mg_withdrawn = _classify_collection( + megapool_vals, lambda d: d.get("status") == "exited" + ) + + tree = { + "minipools": _collapse_tree(mp_data), + "megapools": _collapse_tree(mg_data), + } + + embed = Embed(title="Validator States", color=0x00ff00) + description = "```\n" + description += render_tree_legacy(tree, "Validators") + + exiting_valis = mp_exiting + mg_exiting + withdrawn_valis = mp_withdrawn + mg_withdrawn + total_listed_valis = len(exiting_valis) + len(withdrawn_valis) + + if total_listed_valis == 0: + description += "```" + elif total_listed_valis < 24: + description += "\n" + if exiting_valis: + description += "\n--- Exiting Validators ---\n\n" + valis = sorted([v["validator_index"] for v in exiting_valis]) + description += ", ".join([str(v) for v in valis]) + if withdrawn_valis: + description += "\n--- Withdrawn Validators ---\n\n" + valis = sorted([v["validator_index"] for v in withdrawn_valis]) + description += ", ".join([str(v) for v in valis]) + description += "```" + else: + description += "```" + + node_operators = [] + for valis in (exiting_valis, withdrawn_valis): + valis_no = {} + for v in valis: + no = _get_node_operator(v) + valis_no[no] = valis_no.get(no, 0) + 1 + valis_no = sorted(valis_no.items(), key=lambda x: x[1], reverse=True) + node_operators.append(valis_no) + + exiting_node_operators, withdrawn_node_operators = node_operators + max_total_list_length = 16 + + if len(exiting_node_operators) + len(withdrawn_node_operators) <= max_total_list_length: + num_exiting = len(exiting_node_operators) + num_withdrawn = len(withdrawn_node_operators) + elif len(exiting_node_operators) >= len(withdrawn_node_operators): + num_withdrawn = min(len(withdrawn_node_operators), max_total_list_length // 2) + num_exiting = max_total_list_length - num_withdrawn + else: + num_exiting = min(len(exiting_node_operators), max_total_list_length // 2) + num_withdrawn = max_total_list_length - num_exiting + + if num_exiting > 0: + description += "\n**Exiting Node Operators**\n" + description += ", ".join([f"{el_explorer_url(w3.to_checksum_address(v))} ({c})" for v, c in exiting_node_operators[:num_exiting]]) + if remaining_no := exiting_node_operators[num_exiting:]: + num_remaining_valis = sum([c for _, c in remaining_no]) + description += f", and {len(remaining_no)} more ({num_remaining_valis})" + description += "\n" + if num_withdrawn > 0: + description += "\n**Withdrawn Node Operators**\n" + description += ", ".join([f"{el_explorer_url(w3.to_checksum_address(v))} ({c})" for v, c in withdrawn_node_operators[:num_withdrawn]]) + if remaining_no := withdrawn_node_operators[num_withdrawn:]: + num_remaining_valis = sum([c for _, c in remaining_no]) + description += f", and {len(remaining_no)} more ({num_remaining_valis})" + description += "\n" + + embed.description = description + await interaction.followup.send(embed=embed) + + +async def setup(self): + await self.add_cog(ValidatorStates(self)) From 1c76bf0fcf993627081fff4042d183b4bac2e673 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 6 Mar 2026 00:08:45 +0000 Subject: [PATCH 136/279] basic reward simulation fixes --- rocketwatch/plugins/rewards/rewards.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/rocketwatch/plugins/rewards/rewards.py b/rocketwatch/plugins/rewards/rewards.py index 63135a53..37eace0f 100644 --- a/rocketwatch/plugins/rewards/rewards.py +++ b/rocketwatch/plugins/rewards/rewards.py @@ -151,9 +151,8 @@ async def simulate_rewards( data_block: int = rewards.data_block reward_start_block = ts_to_block(rewards.start_time) - rpl_min: float = solidity.to_float(rp.call("rocketDAOProtocolSettingsNode.getMinimumPerMinipoolStake", block=data_block)) rpl_ratio = solidity.to_float(rp.call("rocketNetworkPrices.getRPLPrice", block=data_block)) - actual_borrowed_eth = solidity.to_float(rp.call("rocketNodeStaking.getNodeETHMatched", address, block=data_block)) + actual_borrowed_eth = solidity.to_float(rp.call("rocketNodeStaking.getNodeETHBorrowed", address, block=data_block)) actual_rpl_stake = solidity.to_float(rp.call("rocketNodeStaking.getNodeStakedRPL", address, block=data_block)) inflation_rate: int = rp.call("rocketTokenRPL.getInflationIntervalRate", block=data_block) @@ -169,9 +168,7 @@ async def simulate_rewards( def node_weight(_stake: float, _borrowed_eth: float) -> float: rpl_value = _stake * rpl_ratio collateral_ratio = (rpl_value / _borrowed_eth) if _borrowed_eth > 0 else 0 - if collateral_ratio < rpl_min: - return 0.0 - elif collateral_ratio <= 0.15: + if collateral_ratio <= 0.15: return 100 * rpl_value else: return (13.6137 + 2 * np.log(100 * collateral_ratio - 13)) * _borrowed_eth From a028c31f6ac172fe5de1c5c6bbe26400153ec7a3 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 6 Mar 2026 00:16:56 +0000 Subject: [PATCH 137/279] fix render bug --- rocketwatch/utils/readable.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/rocketwatch/utils/readable.py b/rocketwatch/utils/readable.py index bfdb8996..3687ef84 100644 --- a/rocketwatch/utils/readable.py +++ b/rocketwatch/utils/readable.py @@ -71,10 +71,8 @@ def render_branch(_data: dict[str, dict | int]) -> tuple[list, list, int]: _values = [] count = 0 - for i, (state, sub_data) in enumerate(_data.items()): - if not sub_data: - continue - + _data = {k: v for k, v in _data.items() if v} + for i, (state, sub_data) in enumerate(_data.items()): link = "├" if (i != len(_data) - 1) else "└" _strings.append(f" {link}{state.title()}: ") From 594525ea5bfd27e4f04c8927ea179c3d77d7ef4b Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 6 Mar 2026 00:23:38 +0000 Subject: [PATCH 138/279] include megapools in bot status --- rocketwatch/plugins/activity/activity.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/rocketwatch/plugins/activity/activity.py b/rocketwatch/plugins/activity/activity.py index 97c8d2a7..1cbe365f 100644 --- a/rocketwatch/plugins/activity/activity.py +++ b/rocketwatch/plugins/activity/activity.py @@ -3,10 +3,10 @@ from cronitor import Monitor from discord import Activity, ActivityType from discord.ext import commands, tasks +from pymongo import AsyncMongoClient from rocketwatch import RocketWatch from utils.cfg import cfg -from utils.rocketpool import rp log = logging.getLogger("rich_activity") log.setLevel(cfg["log_level"]) @@ -15,6 +15,7 @@ class RichActivity(commands.Cog): def __init__(self, bot: RocketWatch): self.bot = bot + self.db = AsyncMongoClient(cfg["mongodb.uri"]).rocketwatch self.monitor = Monitor("update-activity", api_key=cfg["other.secrets.cronitor"]) self.task.start() @@ -25,12 +26,18 @@ async def cog_unload(self): async def task(self): self.monitor.ping() log.debug("Updating Discord activity") - - minipool_count = rp.call("rocketMinipoolManager.getActiveMinipoolCount") + + minipool_count = await self.db.minipools.count_documents( + {"beacon.status": "active_ongoing"} + ) + megapool_count = await self.db.megapool_validators.count_documents( + {"beacon.status": "active_ongoing"} + ) + validator_count = minipool_count + megapool_count await self.bot.change_presence( activity=Activity( type=ActivityType.watching, - name=f"{minipool_count:,} minipools" + name=f"{validator_count:,} active validators" ) ) From 777ebd2bc55d7df38c9c955725ff86123ad8efaf Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 6 Mar 2026 08:39:22 +0000 Subject: [PATCH 139/279] update to web3 v7 --- rocketwatch/main.cfg.sample | 2 +- rocketwatch/plugins/apr/apr.py | 4 +- rocketwatch/plugins/collateral/collateral.py | 6 +- rocketwatch/plugins/dao/dao.py | 2 +- .../plugins/db_upkeep_task/db_upkeep_task.py | 325 +++++++----------- rocketwatch/plugins/debug/debug.py | 2 +- .../plugins/deposit_pool/deposit_pool.py | 4 +- rocketwatch/plugins/event_core/event_core.py | 2 +- rocketwatch/plugins/events/events.json | 17 - rocketwatch/plugins/events/events.py | 60 ++-- rocketwatch/plugins/governance/governance.py | 2 +- rocketwatch/plugins/proposals/proposals.py | 3 +- rocketwatch/plugins/random/random.py | 8 +- .../plugins/transactions/functions.json | 5 - .../plugins/transactions/transactions.py | 3 +- rocketwatch/plugins/tvl/tvl.py | 4 +- .../validator_states/validator_states.py | 6 +- rocketwatch/requirements.txt | 9 +- rocketwatch/utils/dao.py | 10 +- rocketwatch/utils/embeds.py | 10 +- rocketwatch/utils/event_logs.py | 15 +- rocketwatch/utils/liquidity.py | 4 +- rocketwatch/utils/rocketpool.py | 101 +++--- rocketwatch/utils/sea_creatures.py | 6 +- rocketwatch/utils/shared_w3.py | 9 +- 25 files changed, 270 insertions(+), 349 deletions(-) diff --git a/rocketwatch/main.cfg.sample b/rocketwatch/main.cfg.sample index 8baf0ec1..b6c99b0f 100644 --- a/rocketwatch/main.cfg.sample +++ b/rocketwatch/main.cfg.sample @@ -49,12 +49,12 @@ rocketpool: { rocketStorage: "0x1d8f8f00cfa6758d7bE78336684788Fb0ee0Fa46" rocketSignerRegistry: "0xc1062617d10Ae99E09D941b60746182A87eAB38F" rocketExitArbitrage: "0x2631618408497d27D455aBA9c99A6f61eF305559" + multicall3: "0xcA11bde05977b3631167028862bE2a173976CA11" AirSwap: "0x4572f2554421Bd64Bef1c22c8a81840E8D496BeA" yearnPool: "0x5c0A86A32c129538D62C106Eb8115a8b02358d57" curvePool: "0x447Ddd4960d9fdBF6af9a790560d0AF76795CB08" wstETHToken: "0x7f39C581F595B53c5cb19bD0b3f8dA6c935E2Ca0" unstETH: "0x889edC2eDab5f40e902b864aD4d7AdE8E412F9B1" - rocketDepositPoolQueue: "0xD95C1B65255Eb69303c0159c656976389F8dA225" ConstellationDirectory: "0x4343743dBc46F67D3340b45286D8cdC13c8575DE" LUSD: "0x5f98805A4E8be255a32880FDeC7F6728C6568bA0" BalancerVault: "0xBA12222222228d8Ba445958a75a0704d566BF2C8" diff --git a/rocketwatch/plugins/apr/apr.py b/rocketwatch/plugins/apr/apr.py index 9a04370f..228f6dfd 100644 --- a/rocketwatch/plugins/apr/apr.py +++ b/rocketwatch/plugins/apr/apr.py @@ -54,14 +54,14 @@ async def task(self): # get latest block update from the db latest_db_block = await self.db.reth_apr.find_one(sort=[("block", -1)]) latest_db_block = 0 if latest_db_block is None else latest_db_block["block"] - cursor_block = historical_w3.eth.getBlock("latest")["number"] + cursor_block = historical_w3.eth.get_block("latest")["number"] while True: # get address of rocketNetworkBalances contract at cursor block address = rp.uncached_get_address_by_name("rocketNetworkBalances", block=cursor_block) balance_block = rp.call("rocketNetworkBalances.getBalancesBlock", block=cursor_block, address=address) if balance_block == latest_db_block: break - block_time = w3.eth.getBlock(balance_block)["timestamp"] + block_time = w3.eth.get_block(balance_block)["timestamp"] # abort if the blocktime is older than 120 days if block_time < (datetime.now().timestamp() - 120 * 24 * 60 * 60): break diff --git a/rocketwatch/plugins/collateral/collateral.py b/rocketwatch/plugins/collateral/collateral.py index bef07b63..7996f0cb 100644 --- a/rocketwatch/plugins/collateral/collateral.py +++ b/rocketwatch/plugins/collateral/collateral.py @@ -51,13 +51,13 @@ def get_node_minipools_and_collateral() -> dict[ChecksumAddress, dict[str, int]] nodes = rp.call("rocketNodeManager.getNodeAddresses", 0, 10_000) for node_batch in as_chunks(nodes, 500): - eb16s += rp.multicall_sync([ + eb16s += rp.multicall([ minipool_manager.functions.getNodeStakingMinipoolCountBySize(node, 16 * 10**18) for node in node_batch ]) - eb8s += rp.multicall_sync([ + eb8s += rp.multicall([ minipool_manager.functions.getNodeStakingMinipoolCountBySize(node, 8 * 10**18) for node in node_batch ]) - rpl_stakes += rp.multicall_sync([ + rpl_stakes += rp.multicall([ node_staking.functions.getNodeStakedRPL(node) for node in node_batch ]) diff --git a/rocketwatch/plugins/dao/dao.py b/rocketwatch/plugins/dao/dao.py index a7cd8f8e..ef3b71cd 100644 --- a/rocketwatch/plugins/dao/dao.py +++ b/rocketwatch/plugins/dao/dao.py @@ -214,7 +214,7 @@ async def _get_recent_proposals(self, interaction: Interaction, current: str) -> else: suggestions = list(range(1, num_proposals + 1))[:-26:-1] - titles: list[str] = rp.multicall_sync([ + titles: list[str] = rp.multicall([ dao.proposal_contract.functions.getMessage(proposal_id) for proposal_id in suggestions ]) return [Choice(name=f"#{pid}: {title}", value=pid) for pid, title in zip(suggestions, titles)] diff --git a/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py b/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py index a5546d6a..0b6254dd 100644 --- a/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py +++ b/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py @@ -2,6 +2,7 @@ import time import asyncio from collections import defaultdict +from datetime import timedelta import pymongo from cronitor import Monitor @@ -15,7 +16,7 @@ from utils.cfg import cfg from utils.block_time import ts_to_block from utils.rocketpool import rp -from utils.shared_w3 import bacon +from utils.shared_w3 import w3, bacon from utils.time_debug import timerun, timerun_async from utils.event_logs import get_logs @@ -23,38 +24,34 @@ log = logging.getLogger("db_upkeep_task") log.setLevel(cfg["log_level"]) -FAR_FUTURE_EPOCH = 2 ** 32 +def is_true(v) -> bool: + return v is True -def safe_to_float(_, num): +def safe_to_float(num): try: return solidity.to_float(num) except Exception: return None - -def safe_to_hex(_, b): +def safe_to_hex(b): return f"0x{b.hex()}" if b else None - -def safe_state_to_str(_, state): +def safe_state_to_str(state): try: return solidity.mp_state_to_str(state) except Exception: return None - -def safe_inv(_, num): +def safe_inv(num): try: return 1 / solidity.to_float(num) except Exception: return None - def _parse_epoch(value): epoch = int(value) - return epoch if epoch < FAR_FUTURE_EPOCH else None - + return epoch if epoch < 2 ** 32 else None def _derive_validator_status(info): if info[9]: # dissolved @@ -73,8 +70,7 @@ def _derive_validator_status(info): return "staking" return "unknown" - -def _unpack_validator_info(_, info): +def _unpack_validator_info(info): if info is None: return None return { @@ -86,8 +82,7 @@ def _unpack_validator_info(_, info): "exit_balance": solidity.to_float(info[12], 9), # gwei to ETH } - -def _unpack_validator_info_dynamic(_, info): +def _unpack_validator_info_dynamic(info): if info is None: return None return { @@ -99,19 +94,13 @@ def _unpack_validator_info_dynamic(_, info): } -def _group_multicall_results(res): - data = defaultdict(dict) - for (key, field), value in res.items(): - data[key][field] = value - return data - - class DBUpkeepTask(commands.Cog): def __init__(self, bot: RocketWatch): self.bot = bot self.db = AsyncMongoClient(cfg["mongodb.uri"]).rocketwatch - self.monitor = Monitor("node-task", api_key=cfg["other.secrets.cronitor"]) + self.monitor = Monitor("db-task", api_key=cfg["other.secrets.cronitor"]) self.batch_size = 250 + self.cooldown = timedelta(minutes=10) self.bot.loop.create_task(self.loop()) async def loop(self): @@ -122,7 +111,7 @@ async def loop(self): self.monitor.ping(state="run", series=p_id) try: log.debug("starting db upkeep task") - # node tasks + # node operator tasks await self.add_untracked_node_operators() await self.add_static_node_operator_data() await self.update_dynamic_node_operator_data() @@ -140,10 +129,10 @@ async def loop(self): log.debug("finished db upkeep task") self.monitor.ping(state="complete", series=p_id) except Exception as err: - await self.bot.report_error(err) self.monitor.ping(state="fail", series=p_id) + await self.bot.report_error(err) finally: - await asyncio.sleep(600) + await asyncio.sleep(self.cooldown.total_seconds()) async def check_indexes(self): log.debug("checking indexes") @@ -161,25 +150,36 @@ async def check_indexes(self): await self.db.megapool_validators.create_index("beacon.status") log.debug("indexes checked") - async def _batch_multicall_update(self, collection, query, lambs, label=None): - addresses = await collection.distinct("address", query) - if not addresses: + async def _batch_multicall_update(self, collection, query, lamb, label=None, projection=None): + if projection is not None: + items = await collection.find(query, projection).to_list() + def get_addr(d): return d["address"] + else: + items = await collection.distinct("address", query) + def get_addr(a): return a + + if not items: return - total = len(addresses) - batch_size = self.batch_size // len(lambs) - for i, batch in enumerate(as_chunks(addresses, batch_size)): + total = len(items) + batch_size = self.batch_size // len(lamb(items[0])) + for i, batch in enumerate(as_chunks(items, batch_size)): if label: start = i * batch_size + 1 end = min((i + 1) * batch_size, total) log.debug(f"Processing {label} [{start}, {end}]/{total}") - res = await rp.multicall( - [lamb(a) for a in batch for lamb in lambs], - require_success=False - ) - data = _group_multicall_results(res) + # lamb(item) returns a list of (fn, require_success, transform, field) + expanded = [(get_addr(item), *t) for item in batch for t in lamb(item)] + calls = [(e[1], e[2]) for e in expanded] + results = await rp.multicall_async(calls) + updates = defaultdict(dict) + for e, value in zip(expanded, results): + addr, transform, field = e[0], e[3], e[4] + if transform is not None and value is not None: + value = transform(value) + updates[addr][field] = value await collection.bulk_write( - [UpdateOne({"address": addr}, {"$set": d}) for addr, d in data.items()], + [UpdateOne({"address": addr}, {"$set": d}) for addr, d in updates.items()], ordered=False ) @@ -197,24 +197,22 @@ async def add_untracked_node_operators(self): return data = {} for index_batch in as_chunks(range(latest_db + 1, latest_rp + 1), self.batch_size): - data |= await rp.multicall([ - rp.build_call(nm, "getNodeAt", i, key=i) - for i in index_batch - ]) - await self.db.node_operators.insert_many([{"_id": i, "address": a} for i, a in data.items()]) + results = await rp.multicall_async([nm.functions.getNodeAt(i) for i in index_batch]) + data |= dict(zip(index_batch, results)) + await self.db.node_operators.insert_many([{"_id": i, "address": w3.to_checksum_address(a)} for i, a in data.items()]) @timerun_async async def add_static_node_operator_data(self): df = rp.get_contract_by_name("rocketNodeDistributorFactory") mf = rp.get_contract_by_name("rocketMegapoolFactory") - lambs = [ - lambda a: rp.build_call(df, "getProxyAddress", a, key=(a, "fee_distributor.address")), - lambda a: rp.build_call(mf, "getExpectedAddress", a, key=(a, "megapool.address")), + def get_calls(a): return [ + (df.functions.getProxyAddress(a), True, w3.to_checksum_address, "fee_distributor.address"), + (mf.functions.getExpectedAddress(a), True, w3.to_checksum_address, "megapool.address"), ] await self._batch_multicall_update( self.db.node_operators, {"$or": [{"fee_distributor.address": {"$exists": False}}, {"megapool.address": {"$exists": False}}]}, - lambs + get_calls ) @timerun_async @@ -225,111 +223,51 @@ async def update_dynamic_node_operator_data(self): mm = rp.get_contract_by_name("rocketMinipoolManager") ns = rp.get_contract_by_name("rocketNodeStaking") mc = rp.get_contract_by_name("multicall3") - lambs = [ - lambda n: rp.build_call(nm, "getNodeWithdrawalAddress", n["address"], - key=(n["address"], "withdrawal_address")), - lambda n: rp.build_call(nm, "getNodeTimezoneLocation", n["address"], - key=(n["address"], "timezone_location")), - lambda n: rp.build_call(nm, "getSmoothingPoolRegistrationState", n["address"], - key=(n["address"], "smoothing_pool_registration")), - lambda n: rp.build_call(nm, "getAverageNodeFee", n["address"], - key=(n["address"], "average_node_fee"), transform=safe_to_float), - lambda n: rp.build_call(ns, "getNodeETHCollateralisationRatio", n["address"], - key=(n["address"], "effective_node_share"), transform=safe_inv), - lambda n: rp.build_call(mm, "getNodeStakingMinipoolCount", n["address"], - key=(n["address"], "staking_minipool_count")), - lambda n: rp.build_call(nd, "getNodeDepositCredit", n["address"], - key=(n["address"], "node_credit"), transform=safe_to_float), - lambda n: rp.build_call(nd, "getNodeEthBalance", n["address"], - key=(n["address"], "node_eth_balance"), transform=safe_to_float), - lambda n: rp.build_call(nm, "getFeeDistributorInitialised", n["address"], - key=(n["address"], "fee_distributor.initialized")), - lambda n: rp.build_call(mc, "getEthBalance", n["fee_distributor"]["address"], - key=(n["address"], "fee_distributor.eth_balance"), transform=safe_to_float), - lambda n: rp.build_call(mf, "getMegapoolDeployed", n["address"], - key=(n["address"], "megapool.deployed")), - lambda n: rp.build_call(mc, "getEthBalance", n["megapool"]["address"], - key=(n["address"], "megapool.eth_balance"), transform=safe_to_float), - lambda n: rp.build_call(ns, "getNodeStakedRPL", n["address"], - key=(n["address"], "rpl.total_stake"), transform=safe_to_float), - lambda n: rp.build_call(ns, "getNodeLegacyStakedRPL", n["address"], - key=(n["address"], "rpl.legacy_stake"), transform=safe_to_float), - lambda n: rp.build_call(ns, "getNodeMegapoolStakedRPL", n["address"], - key=(n["address"], "rpl.megapool_stake"), transform=safe_to_float), - lambda n: rp.build_call(ns, "getNodeLockedRPL", n["address"], - key=(n["address"], "rpl.locked"), transform=safe_to_float), - lambda n: rp.build_call(ns, "getNodeUnstakingRPL", n["address"], - key=(n["address"], "rpl.unstaking"), transform=safe_to_float), - lambda n: rp.build_call(ns, "getNodeRPLStakedTime", n["address"], - key=(n["address"], "rpl.last_stake_time")), - lambda n: rp.build_call(ns, "getNodeLastUnstakeTime", n["address"], - key=(n["address"], "rpl.last_unstake_time")), + def get_calls(n): return [ + (nm.functions.getNodeWithdrawalAddress(n["address"]), True, w3.to_checksum_address, "withdrawal_address"), + (nm.functions.getNodeTimezoneLocation(n["address"]), True, None, "timezone_location"), + (nm.functions.getSmoothingPoolRegistrationState(n["address"]), True, None, "smoothing_pool_registration"), + (nm.functions.getAverageNodeFee(n["address"]), True, safe_to_float, "average_node_fee"), + (ns.functions.getNodeETHCollateralisationRatio(n["address"]), True, safe_inv, "effective_node_share"), + (mm.functions.getNodeStakingMinipoolCount(n["address"]), True, None, "staking_minipool_count"), + (nd.functions.getNodeDepositCredit(n["address"]), True, safe_to_float, "node_credit"), + (nd.functions.getNodeEthBalance(n["address"]), True, safe_to_float, "node_eth_balance"), + (nm.functions.getFeeDistributorInitialised(n["address"]), True, None, "fee_distributor.initialized"), + (mc.functions.getEthBalance(n["fee_distributor"]["address"]), True, safe_to_float, "fee_distributor.eth_balance"), + (mf.functions.getMegapoolDeployed(n["address"]), True, None, "megapool.deployed"), + (mc.functions.getEthBalance(n["megapool"]["address"]), True, safe_to_float, "megapool.eth_balance"), + (ns.functions.getNodeStakedRPL(n["address"]), True, safe_to_float, "rpl.total_stake"), + (ns.functions.getNodeLegacyStakedRPL(n["address"]), True, safe_to_float, "rpl.legacy_stake"), + (ns.functions.getNodeMegapoolStakedRPL(n["address"]), True, safe_to_float, "rpl.megapool_stake"), + (ns.functions.getNodeLockedRPL(n["address"]), True, safe_to_float, "rpl.locked"), + (ns.functions.getNodeUnstakingRPL(n["address"]), True, safe_to_float, "rpl.unstaking"), + (ns.functions.getNodeRPLStakedTime(n["address"]), True, None, "rpl.last_stake_time"), + (ns.functions.getNodeLastUnstakeTime(n["address"]), True, None, "rpl.last_unstake_time"), ] - nodes = await self.db.node_operators.find( - {}, {"address": 1, "fee_distributor.address": 1, "megapool.address": 1} - ).to_list() - total = len(nodes) - batch_size = self.batch_size // len(lambs) - for i, node_batch in enumerate(as_chunks(nodes, batch_size)): - start = i * batch_size + 1 - end = min((i + 1) * batch_size, total) - log.debug(f"Processing node operators [{start}, {end}]/{total}") - res = await rp.multicall( - [lamb(n) for n in node_batch for lamb in lambs], - require_success=False - ) - data = _group_multicall_results(res) - await self.db.node_operators.bulk_write( - [UpdateOne({"address": addr}, {"$set": d}) for addr, d in data.items()], - ordered=False - ) + await self._batch_multicall_update( + self.db.node_operators, {}, get_calls, label="node operators", + projection={"address": 1, "fee_distributor.address": 1, "megapool.address": 1} + ) @timerun_async async def update_dynamic_megapool_data(self): - mp = rp.assemble_contract("rocketMegapoolDelegate") - lambs = [ - lambda n: rp.build_call(mp, "getValidatorCount", - target=n["megapool"]["address"], key=(n["address"], "megapool.validator_count")), - lambda n: rp.build_call(mp, "getActiveValidatorCount", - target=n["megapool"]["address"], key=(n["address"], "megapool.active_validator_count")), - lambda n: rp.build_call(mp, "getExitingValidatorCount", - target=n["megapool"]["address"], key=(n["address"], "megapool.exiting_validator_count")), - lambda n: rp.build_call(mp, "getLockedValidatorCount", - target=n["megapool"]["address"], key=(n["address"], "megapool.locked_validator_count")), - lambda n: rp.build_call(mp, "getNodeBond", - target=n["megapool"]["address"], key=(n["address"], "megapool.node_bond"), transform=safe_to_float), - lambda n: rp.build_call(mp, "getUserCapital", - target=n["megapool"]["address"], key=(n["address"], "megapool.user_capital"), transform=safe_to_float), - lambda n: rp.build_call(mp, "getDebt", - target=n["megapool"]["address"], key=(n["address"], "megapool.debt"), transform=safe_to_float), - lambda n: rp.build_call(mp, "getRefundValue", - target=n["megapool"]["address"], key=(n["address"], "megapool.refund_value"), transform=safe_to_float), - lambda n: rp.build_call(mp, "getPendingRewards", - target=n["megapool"]["address"], key=(n["address"], "megapool.pending_rewards"), transform=safe_to_float), - lambda n: rp.build_call(mp, "getLastDistributionTime", - target=n["megapool"]["address"], key=(n["address"], "megapool.last_distribution_time")), + def mp_at(addr): return rp.assemble_contract("rocketMegapoolDelegate", address=addr) + def lamb(n): return [ + (mp_at(n["megapool"]["address"]).functions.getValidatorCount(), True, None, "megapool.validator_count"), + (mp_at(n["megapool"]["address"]).functions.getActiveValidatorCount(), True, None, "megapool.active_validator_count"), + (mp_at(n["megapool"]["address"]).functions.getExitingValidatorCount(), True, None, "megapool.exiting_validator_count"), + (mp_at(n["megapool"]["address"]).functions.getLockedValidatorCount(), True, None, "megapool.locked_validator_count"), + (mp_at(n["megapool"]["address"]).functions.getNodeBond(), True, safe_to_float, "megapool.node_bond"), + (mp_at(n["megapool"]["address"]).functions.getUserCapital(), True, safe_to_float, "megapool.user_capital"), + (mp_at(n["megapool"]["address"]).functions.getDebt(), True, safe_to_float, "megapool.debt"), + (mp_at(n["megapool"]["address"]).functions.getRefundValue(), True, safe_to_float, "megapool.refund_value"), + (mp_at(n["megapool"]["address"]).functions.getPendingRewards(), True, safe_to_float, "megapool.pending_rewards"), + (mp_at(n["megapool"]["address"]).functions.getLastDistributionTime(), True, None, "megapool.last_distribution_time"), ] - nodes = await self.db.node_operators.find( - {"megapool.deployed": True}, {"address": 1, "megapool.address": 1} - ).to_list() - if not nodes: - return - - total = len(nodes) - batch_size = self.batch_size // len(lambs) - for i, node_batch in enumerate(as_chunks(nodes, batch_size)): - start = i * batch_size + 1 - end = min((i + 1) * batch_size, total) - log.debug(f"Processing megapools [{start}, {end}]/{total}") - res = await rp.multicall( - [lamb(n) for n in node_batch for lamb in lambs], - require_success=False - ) - data = _group_multicall_results(res) - await self.db.node_operators.bulk_write( - [UpdateOne({"address": addr}, {"$set": d}) for addr, d in data.items()], - ordered=False - ) + await self._batch_multicall_update( + self.db.node_operators, {"megapool.deployed": True}, lamb, label="megapools", + projection={"address": 1, "megapool.address": 1} + ) # -- Minipool tasks -- @@ -345,24 +283,20 @@ async def add_untracked_minipools(self): return log.debug(f"Latest minipool in db: {latest_db}, latest minipool in rp: {latest_rp}") for index_batch in as_chunks(range(latest_db + 1, latest_rp + 1), self.batch_size): - data = await rp.multicall([ - rp.build_call(mm, "getMinipoolAt", i, key=i) - for i in index_batch - ]) - await self.db.minipools.insert_many([{"_id": i, "address": a} for i, a in data.items()]) + results = await rp.multicall_async([mm.functions.getMinipoolAt(i) for i in index_batch]) + await self.db.minipools.insert_many([{"_id": i, "address": w3.to_checksum_address(a)} for i, a in zip(index_batch, results)]) @timerun_async async def add_static_minipool_data(self): - m = rp.assemble_contract("rocketMinipool") mm = rp.get_contract_by_name("rocketMinipoolManager") - lambs = [ - lambda a: rp.build_call(m, "getNodeAddress", target=a, key=(a, "node_operator")), - lambda a: rp.build_call(mm, "getMinipoolPubkey", a, key=(a, "pubkey"), transform=safe_to_hex), + def lamb(a): return [ + (rp.assemble_contract("rocketMinipool", address=a).functions.getNodeAddress(), True, w3.to_checksum_address, "node_operator"), + (mm.functions.getMinipoolPubkey(a), True, safe_to_hex, "pubkey"), ] await self._batch_multicall_update( self.db.minipools, {"node_operator": {"$exists": False}}, - lambs + lamb ) @timerun @@ -418,23 +352,24 @@ async def add_static_minipool_deposit_data(self): @timerun_async async def update_dynamic_minipool_data(self): - m = rp.assemble_contract("rocketMinipool") mc = rp.get_contract_by_name("multicall3") - lambs = [ - lambda a: rp.build_call(m, "getStatus", target=a, key=(a, "status"), transform=safe_state_to_str), - lambda a: rp.build_call(m, "getStatusTime", target=a, key=(a, "status_time")), - lambda a: rp.build_call(m, "getVacant", target=a, key=(a, "vacant")), - lambda a: rp.build_call(m, "getFinalised", target=a, key=(a, "finalized")), - lambda a: rp.build_call(m, "getNodeDepositBalance", target=a, key=(a, "node_deposit_balance"), transform=safe_to_float), - lambda a: rp.build_call(m, "getNodeRefundBalance", target=a, key=(a, "node_refund_balance"), transform=safe_to_float), - lambda a: rp.build_call(m, "getPreMigrationBalance", target=a, key=(a, "pre_migration_balance"), transform=safe_to_float), - lambda a: rp.build_call(m, "getNodeFee", target=a, key=(a, "node_fee"), transform=safe_to_float), - lambda a: rp.build_call(m, "getEffectiveDelegate", target=a, key=(a, "effective_delegate")), - lambda a: rp.build_call(m, "getUseLatestDelegate", target=a, key=(a, "use_latest_delegate")), - lambda a: rp.build_call(m, "getUserDistributed", target=a, key=(a, "user_distributed")), - lambda a: rp.build_call(mc, "getEthBalance", a, key=(a, "execution_balance"), transform=safe_to_float), - ] - await self._batch_multicall_update(self.db.minipools, {"finalized": {"$ne": True}}, lambs, label="minipools") + def get_calls(addr): + minipool_contract = rp.assemble_contract("rocketMinipool", address=addr) + return [ + (minipool_contract.functions.getStatus(), True, safe_state_to_str, "status"), + (minipool_contract.functions.getStatusTime(), True, None, "status_time"), + (minipool_contract.functions.getVacant(), False, is_true, "vacant"), + (minipool_contract.functions.getFinalised(), True, is_true, "finalized"), + (minipool_contract.functions.getNodeDepositBalance(), True, safe_to_float, "node_deposit_balance"), + (minipool_contract.functions.getNodeRefundBalance(), True, safe_to_float, "node_refund_balance"), + (minipool_contract.functions.getPreMigrationBalance(), False, safe_to_float, "pre_migration_balance"), + (minipool_contract.functions.getNodeFee(), True, safe_to_float, "node_fee"), + (minipool_contract.functions.getEffectiveDelegate(), True, w3.to_checksum_address, "effective_delegate"), + (minipool_contract.functions.getUseLatestDelegate(), True, is_true, "use_latest_delegate"), + (minipool_contract.functions.getUserDistributed(), False, is_true, "user_distributed"), + (mc.functions.getEthBalance(addr), True, safe_to_float, "execution_balance"), + ] + await self._batch_multicall_update(self.db.minipools, {"finalized": {"$ne": True}}, get_calls, label="minipools") @timerun async def update_dynamic_minipool_beacon_data(self): @@ -475,7 +410,6 @@ async def update_dynamic_minipool_beacon_data(self): @timerun_async async def add_untracked_megapool_validators(self): - mp = rp.assemble_contract("rocketMegapoolDelegate") # get deployed megapools with their on-chain validator count nodes = await self.db.node_operators.find( {"megapool.deployed": True, "megapool.validator_count": {"$gt": 0}}, @@ -494,26 +428,29 @@ async def add_untracked_megapool_validators(self): new_ids = list(range(db_count, on_chain_count)) log.debug(f"Adding {len(new_ids)} new validators for megapool {megapool_addr}") + megapool_contract = rp.assemble_contract("rocketMegapoolDelegate", address=megapool_addr) for id_batch in as_chunks(new_ids, self.batch_size // 2): - # fetch pubkey + validator info for each new validator - calls = [] - for vid in id_batch: - calls.append(rp.build_call(mp, "getValidatorPubkey", vid, - target=megapool_addr, key=(vid, "pubkey"), transform=safe_to_hex)) - calls.append(rp.build_call(mp, "getValidatorInfo", vid, - target=megapool_addr, key=(vid, "info"), transform=_unpack_validator_info)) - res = await rp.multicall(calls, require_success=False) - grouped = _group_multicall_results(res) + fns = [ + fn + for vid in id_batch + for fn in [ + megapool_contract.functions.getValidatorPubkey(vid), + megapool_contract.functions.getValidatorInfo(vid), + ] + ] + results = await rp.multicall_async(fns) docs = [] - for vid, data in grouped.items(): + for i, vid in enumerate(id_batch): + pubkey_raw = results[i * 2] + info_raw = results[i * 2 + 1] doc = { "megapool": megapool_addr, "node_operator": node["address"], "validator_id": vid, - "pubkey": data.get("pubkey"), + "pubkey": safe_to_hex(pubkey_raw) if pubkey_raw is not None else None, } - info = data.get("info") + info = _unpack_validator_info(info_raw) if info: doc.update(info) docs.append(doc) @@ -522,7 +459,6 @@ async def add_untracked_megapool_validators(self): @timerun_async async def update_dynamic_megapool_validator_data(self): - mp = rp.assemble_contract("rocketMegapoolDelegate") validators = await self.db.megapool_validators.find( {"status": {"$nin": ["exited", "dissolved"]}}, {"megapool": 1, "validator_id": 1} @@ -535,15 +471,14 @@ async def update_dynamic_megapool_validator_data(self): start = i * self.batch_size + 1 end = min((i + 1) * self.batch_size, total) log.debug(f"Processing megapool validators [{start}, {end}]/{total}") - calls = [ - rp.build_call(mp, "getValidatorInfo", v["validator_id"], - target=v["megapool"], key=j, transform=_unpack_validator_info_dynamic) - for j, v in enumerate(batch) + fns = [ + rp.assemble_contract("rocketMegapoolDelegate", address=v["megapool"]).functions.getValidatorInfo(v["validator_id"]) + for v in batch ] - res = await rp.multicall(calls, require_success=False) + results = await rp.multicall_async(fns) ops = [] - for j, v in enumerate(batch): - info = res.get(j) + for v, info_raw in zip(batch, results): + info = _unpack_validator_info_dynamic(info_raw) if info_raw is not None else None if info is not None: ops.append(UpdateOne({"_id": v["_id"]}, {"$set": info})) if ops: diff --git a/rocketwatch/plugins/debug/debug.py b/rocketwatch/plugins/debug/debug.py index 8954e6bd..33b81977 100644 --- a/rocketwatch/plugins/debug/debug.py +++ b/rocketwatch/plugins/debug/debug.py @@ -150,7 +150,7 @@ async def debug_transaction(self, interaction: Interaction, tnx_hash: str): Try to return the revert reason of a transaction. """ await interaction.response.defer(ephemeral=True) - transaction_receipt = w3.eth.getTransaction(tnx_hash) + transaction_receipt = w3.eth.get_transaction(tnx_hash) if revert_reason := rp.get_revert_reason(transaction_receipt): await interaction.followup.send(content=f"```Revert reason: {revert_reason}```") else: diff --git a/rocketwatch/plugins/deposit_pool/deposit_pool.py b/rocketwatch/plugins/deposit_pool/deposit_pool.py index 09163105..38312977 100644 --- a/rocketwatch/plugins/deposit_pool/deposit_pool.py +++ b/rocketwatch/plugins/deposit_pool/deposit_pool.py @@ -24,7 +24,7 @@ def __init__(self, bot: RocketWatch): @staticmethod def get_deposit_pool_stats() -> Embed: - balance_raw, max_size_raw, max_amount_raw = rp.multicall_sync([ + balance_raw, max_size_raw, max_amount_raw = rp.multicall([ rp.get_contract_by_name("rocketDepositPool").functions.getBalance(), rp.get_contract_by_name("rocketDAOProtocolSettingsDeposit").functions.getMaximumDepositPoolSize(), rp.get_contract_by_name("rocketDepositPool").functions.getMaximumDepositAmount(), @@ -81,7 +81,7 @@ def get_deposit_pool_stats() -> Embed: @staticmethod def get_contract_collateral_stats() -> Embed: - exchange_rate, total_supply, collateral_rate_raw, target_rate_raw = rp.multicall_sync([ + exchange_rate, total_supply, collateral_rate_raw, target_rate_raw = rp.multicall([ rp.get_contract_by_name("rocketTokenRETH").functions.getExchangeRate(), rp.get_contract_by_name("rocketTokenRETH").functions.totalSupply(), rp.get_contract_by_name("rocketTokenRETH").functions.getCollateralRate(), diff --git a/rocketwatch/plugins/event_core/event_core.py b/rocketwatch/plugins/event_core/event_core.py index af8af6e8..88d830f0 100644 --- a/rocketwatch/plugins/event_core/event_core.py +++ b/rocketwatch/plugins/event_core/event_core.py @@ -61,8 +61,8 @@ async def task(self) -> None: await self.on_success() self.monitor.ping(state="complete", series=p_id) except Exception as error: - await self.on_error(error) self.monitor.ping(state="fail", series=p_id) + await self.on_error(error) @task.before_loop async def before_loop(self) -> None: diff --git a/rocketwatch/plugins/events/events.json b/rocketwatch/plugins/events/events.json index 9b41f974..568423be 100644 --- a/rocketwatch/plugins/events/events.json +++ b/rocketwatch/plugins/events/events.json @@ -234,23 +234,6 @@ } ] }, - { - "contract_name": "rocketMinipoolBondReducer", - "events": [ - { - "event_name": "CancelReductionVoted", - "name": "minipool_vote_against_bond_reduction_event" - }, - { - "event_name": "ReductionCancelled", - "name": "minipool_bond_reduction_cancelled_event" - }, - { - "event_name": "BeginBondReduction", - "name": "minipool_bond_reduction_started_event" - } - ] - }, { "contract_name": "rocketDAOProtocol", "events": [ diff --git a/rocketwatch/plugins/events/events.py b/rocketwatch/plugins/events/events.py index 9f6ea4ca..0afbecb9 100644 --- a/rocketwatch/plugins/events/events.py +++ b/rocketwatch/plugins/events/events.py @@ -9,10 +9,9 @@ from discord.app_commands import command, guilds from eth_typing.evm import ChecksumAddress, BlockNumber from hexbytes import HexBytes -from web3._utils.filters import Filter from web3.datastructures import MutableAttributeDict as aDict -from web3.exceptions import ABIEventFunctionNotFound -from web3.types import LogReceipt, EventData, FilterParams +from web3.logs import DISCARD +from web3.types import LogReceipt, EventData from rocketwatch import RocketWatch from utils import solidity @@ -29,7 +28,7 @@ log.setLevel(cfg["log_level"]) -PartialFilter = Callable[[BlockNumber, BlockNumber | Literal["latest"]], Filter] +PartialFilter = Callable[[BlockNumber, BlockNumber | Literal["latest"]], list[LogReceipt | EventData]] class Events(EventPlugin): def __init__(self, bot: RocketWatch): @@ -63,8 +62,10 @@ def _parse_event_config(self) -> tuple[list[PartialFilter], dict, dict]: event_name = event["event_name"] try: log.info(f"Adding filter for {contract_name}.{event_name}") - topic = contract.events[event_name].build_filter().topics[0] - except ABIEventFunctionNotFound as e: + event_abi = contract.events[event_name].abi + input_types = ','.join(i['type'] for i in event_abi['inputs']) + topic = w3.keccak(text=f"{event_name}({input_types})").hex() + except Exception as e: log.exception(e) log.warning(f"Couldn't find event {event_name} ({event['name']}) in the contract") continue @@ -74,20 +75,19 @@ def _parse_event_config(self) -> tuple[list[PartialFilter], dict, dict]: topic_map[topic] = event_name if addresses: - def build_direct_filter(_from: BlockNumber, _to: BlockNumber | Literal["latest"]) -> Filter: - filter_params: FilterParams = { + def build_direct_filter(_from: BlockNumber, _to: BlockNumber | Literal["latest"]) -> list[LogReceipt]: + return w3.eth.get_logs({ "address" : list(addresses), "topics" : [list(aggregated_topics)], "fromBlock": _from, "toBlock" : _to - } - return w3.eth.filter(filter_params) + }) partial_filters.append(build_direct_filter) # generate filters for global events for group in config["global"]: try: - contract = rp.assemble_contract(name=group["contract_name"]) + contract = rp.get_contract_by_name(name=group["contract_name"]) except Exception as e: log.warning(f"Failed to get contract {group['contract_name']}: {e}") continue @@ -96,12 +96,17 @@ def build_direct_filter(_from: BlockNumber, _to: BlockNumber | Literal["latest"] event_map[event["event_name"]] = event["name"] def super_builder(_contract, _event) -> PartialFilter: # this is needed to pin nonlocal variables - def build_topic_filter(_from: BlockNumber, _to: BlockNumber | Literal["latest"]) -> Filter: - return _contract.events[_event["event_name"]].createFilter( - fromBlock=_from, - toBlock=_to, - argument_filters=_event.get("filter", {}) - ) + def build_topic_filter(_from: BlockNumber, _to: BlockNumber | Literal["latest"]) -> list[EventData]: + event_cls = _contract.events[_event["event_name"]] + event_abi = event_cls.abi + input_types = ','.join(i['type'] for i in event_abi['inputs']) + topic0 = w3.keccak(text=f"{_event['event_name']}({input_types})").hex() + raw_logs = w3.eth.get_logs({ + "topics" : [topic0], + "fromBlock": _from, + "toBlock" : _to, + }) + return [event_cls().process_log(raw_log) for raw_log in raw_logs] return build_topic_filter partial_filters.append(super_builder(contract, event)) @@ -126,7 +131,7 @@ async def trigger_event( } event_obj = aDict({ "event": event, - "transactionHash": aDict({"hex": lambda: '0x0000000000000000000000000000000000000000'}), + "transactionHash": aDict({"hex": lambda: '0' * 64}), "blockNumber": block_number, "args": aDict(default_args | json.loads(json_args)) }) @@ -153,7 +158,8 @@ async def replay_events(self, interaction: Interaction, tx_hash: str): # get direct events for event_log in logs: - if ("topics" in event_log) and (event_log["topics"][0].hex() in self.topic_map): + topics = event_log.get("topics", []) + if topics and (topics[0].hex() in self.topic_map): filtered_events.append(event_log) # get global events @@ -164,7 +170,7 @@ async def replay_events(self, interaction: Interaction, tx_hash: str): contract = rp.assemble_contract(name=group["contract_name"]) for event in group["events"]: event = contract.events[event["event_name"]]() - rich_logs = event.process_receipt(receipt) + rich_logs = event.process_receipt(receipt, errors=DISCARD) filtered_events.extend(rich_logs) responses, _ = self.process_events(filtered_events) @@ -183,7 +189,7 @@ def get_past_events(self, from_block: BlockNumber, to_block: BlockNumber) -> lis events = [] for pf in self._partial_filters: - events.extend(pf(from_block, to_block).get_all_entries()) + events.extend(pf(from_block, to_block)) messages, contract_upgrade_block = self.process_events(events) if not contract_upgrade_block: @@ -228,7 +234,7 @@ def hash_args(_args: aDict) -> None: # default event path contract = rp.get_contract_by_address(event.address) contract_event = self.topic_map[event.topics[0].hex()] - topics = [w3.toHex(t) for t in event.topics] + topics = [w3.to_hex(t) for t in event.topics] _event = aDict(contract.events[contract_event]().process_log(event)) _event.topics = topics _event.args = aDict(_event.args) @@ -323,7 +329,7 @@ def get_event_name(_event: LogReceipt | EventData) -> tuple[str, str]: if full_event_name == "unstETH.WithdrawalRequested": contract = rp.get_contract_by_address(event["address"]) - _event = aDict(contract.events[event_name]().processLog(event)) + _event = aDict(contract.events[event_name]().process_log(event)) # sum up the amount of stETH withdrawn in this transaction if amount := tx_aggregates.get(full_event_name, 0): events.remove(event) @@ -336,8 +342,8 @@ def get_event_name(_event: LogReceipt | EventData) -> tuple[str, str]: if prev_event := tx_aggregates.get(full_event_name, None): # only keep largest rETH transfer contract = rp.get_contract_by_address(event["address"]) - _event = aDict(contract.events[event_name]().processLog(event)) - _prev_event = aDict(contract.events[event_name]().processLog(event)) + _event = aDict(contract.events[event_name]().process_log(event)) + _prev_event = aDict(contract.events[event_name]().process_log(event)) if _prev_event["args"]["value"] > _event["args"]["value"]: events.remove(event) event = prev_event @@ -471,7 +477,7 @@ def handle_event(self, event_name: str, event: aDict) -> Optional[Embed]: match args.types[i]: case 0: # SettingType.UINT256 - value = w3.toInt(value_raw) + value = w3.to_int(value_raw) case 1: # SettingType.BOOL value = bool(value_raw) @@ -585,7 +591,7 @@ def share_repr(percentage: float) -> str: args.caller = receipt["from"] # add transaction hash and block number to args - args.transactionHash = event.transactionHash.hex() + args.transactionHash = "0x" + event.transactionHash.hex() args.blockNumber = event.blockNumber # add proposal message manually if the event contains a proposal diff --git a/rocketwatch/plugins/governance/governance.py b/rocketwatch/plugins/governance/governance.py index d4c86132..5ebce7c1 100644 --- a/rocketwatch/plugins/governance/governance.py +++ b/rocketwatch/plugins/governance/governance.py @@ -42,7 +42,7 @@ def _get_tx_hash_for_proposal(dao: DAO, proposal: DAO.Proposal) -> HexStr: to_block = ts_to_block(proposal.created) + 1 log.info(f"Looking for proposal {proposal} in [{from_block},{to_block}]") - for receipt in dao.proposal_contract.events.ProposalAdded().get_logs(fromBlock=from_block, toBlock=to_block): + for receipt in dao.proposal_contract.events.ProposalAdded().get_logs(from_block=from_block, to_block=to_block): log.info(f"Found receipt {receipt}") if receipt.args.proposalID == proposal.id: return receipt.transactionHash.hex() diff --git a/rocketwatch/plugins/proposals/proposals.py b/rocketwatch/plugins/proposals/proposals.py index 1afc8716..0c749251 100644 --- a/rocketwatch/plugins/proposals/proposals.py +++ b/rocketwatch/plugins/proposals/proposals.py @@ -117,6 +117,7 @@ def __init__(self, bot: RocketWatch): self.db = AsyncMongoClient(cfg["mongodb.uri"]).rocketwatch self.monitor = Monitor("proposals-task", api_key=cfg["other.secrets.cronitor"]) self.batch_size = 100 + self.cooldown = timedelta(minutes=5) self.bot.loop.create_task(self.loop()) async def loop(self): @@ -135,7 +136,7 @@ async def loop(self): await self.bot.report_error(err) self.monitor.ping(state="fail", series=p_id) finally: - await asyncio.sleep(300) + await asyncio.sleep(self.cooldown.total_seconds()) async def check_indexes(self): await self.bot.wait_until_ready() diff --git a/rocketwatch/plugins/random/random.py b/rocketwatch/plugins/random/random.py index 526e5bee..7c92c30e 100644 --- a/rocketwatch/plugins/random/random.py +++ b/rocketwatch/plugins/random/random.py @@ -287,10 +287,8 @@ async def odao_challenges(self, ctx: Context): await ctx.defer(ephemeral=is_hidden_weak(ctx)) c = rp.get_contract_by_name("rocketDAONodeTrustedActions") # get challenges made - events = c.events["ActionChallengeMade"].createFilter( - fromBlock=w3.eth.get_block("latest").number - 7 * 24 * 60 * 60 // 12) - # get all events - events = events.get_all_entries() + events = list(c.events["ActionChallengeMade"].get_logs( + from_block=w3.eth.get_block("latest").number - 7 * 24 * 60 * 60 // 12)) # remove all events of nodes that aren't challenged anymore for event in events: if not rp.call("rocketDAONodeTrusted.getMemberIsChallenged", event.args.nodeChallengedAddress): @@ -298,7 +296,7 @@ async def odao_challenges(self, ctx: Context): # sort by block number events.sort(key=lambda x: x.blockNumber) if not events: - await ctx.send("no active challenges found") + await ctx.send("No active challenges found") return e = Embed(title="Active oDAO Challenges") e.description = "" diff --git a/rocketwatch/plugins/transactions/functions.json b/rocketwatch/plugins/transactions/functions.json index c82eea82..94a805df 100644 --- a/rocketwatch/plugins/transactions/functions.json +++ b/rocketwatch/plugins/transactions/functions.json @@ -52,11 +52,6 @@ "deposit": "minipool_failed_deposit", "depositWithCredit": "minipool_failed_deposit" }, - "rocketDepositPoolQueue": { - "clearHalfQueue": "deposit_pool_queue_clear_partial", - "clearQueue": "deposit_pool_queue_clear_full", - "clearQueueUpTo": "deposit_pool_queue_clear_partial" - }, "rocketUpgradeOneDotOne": { "execute": "redstone_upgrade_triggered" }, diff --git a/rocketwatch/plugins/transactions/transactions.py b/rocketwatch/plugins/transactions/transactions.py index 185042d8..0a2c704a 100644 --- a/rocketwatch/plugins/transactions/transactions.py +++ b/rocketwatch/plugins/transactions/transactions.py @@ -3,7 +3,6 @@ import warnings import web3.exceptions -import humanize from datetime import timedelta from discord import Interaction from discord.app_commands import command, guilds @@ -192,7 +191,7 @@ def share_repr(percentage: float) -> str: match args.types[i]: case 0: # SettingType.UINT256 - value = w3.toInt(value_raw) + value = w3.to_int(value_raw) case 1: # SettingType.BOOL value = bool(value_raw) diff --git a/rocketwatch/plugins/tvl/tvl.py b/rocketwatch/plugins/tvl/tvl.py index 7091163d..20387d75 100644 --- a/rocketwatch/plugins/tvl/tvl.py +++ b/rocketwatch/plugins/tvl/tvl.py @@ -267,10 +267,10 @@ async def tvl(self, interaction: Interaction, show_all: bool = False): # Extra Collateral: This is ETH stored in the rETH contract from Minipools that have been withdrawn from. # This value has a cap - read the above comment for more information about that. data["Total ETH Locked"]["rETH Collateral"]["Extra Collateral"]["_val"] = solidity.to_float( - w3.eth.getBalance(rp.get_address_by_name("rocketTokenRETH"))) + w3.eth.get_balance(rp.get_address_by_name("rocketTokenRETH"))) # Smoothing Pool Balance: This is ETH from Proposals by minipools that have joined the Smoothing Pool. - smoothie_balance = solidity.to_float(w3.eth.getBalance(rp.get_address_by_name("rocketSmoothingPool"))) + smoothie_balance = solidity.to_float(w3.eth.get_balance(rp.get_address_by_name("rocketSmoothingPool"))) tmp = await (await self.db.node_operators.aggregate([ { '$match': { diff --git a/rocketwatch/plugins/validator_states/validator_states.py b/rocketwatch/plugins/validator_states/validator_states.py index 06f04b5f..0cfc1713 100644 --- a/rocketwatch/plugins/validator_states/validator_states.py +++ b/rocketwatch/plugins/validator_states/validator_states.py @@ -108,10 +108,6 @@ def _collapse_tree(data: dict) -> dict: return collapsed_data -def _get_node_operator(doc): - return doc.get("node_operator", "") - - class ValidatorStates(commands.Cog): def __init__(self, bot: RocketWatch): self.bot = bot @@ -170,7 +166,7 @@ async def validator_states(self, interaction: Interaction): for valis in (exiting_valis, withdrawn_valis): valis_no = {} for v in valis: - no = _get_node_operator(v) + no = v["node_operator"] valis_no[no] = valis_no.get(no, 0) + 1 valis_no = sorted(valis_no.items(), key=lambda x: x[1], reverse=True) node_operators.append(valis_no) diff --git a/rocketwatch/requirements.txt b/rocketwatch/requirements.txt index c2efba78..fefd1d24 100644 --- a/rocketwatch/requirements.txt +++ b/rocketwatch/requirements.txt @@ -1,6 +1,6 @@ psutil==5.9.8 python_i18n==0.3.9 -web3==5.31.4 +web3>=7.0.0,<8.0.0 humanize==4.6.0 termplotlib==0.3.9 cachetools==5.3.3 @@ -17,7 +17,6 @@ seaborn==0.12.2 etherscan_labels @ git+https://github.com/InvisibleSymbol/etherscan-labels@7eb617d715a4dda0eabdd858106a526a3abd3394 cronitor==4.6.0 retry-async==0.1.4 -multicall==0.11.0 dice==3.1.2 regex==2023.8.8 tiktoken==0.5.2 @@ -28,8 +27,8 @@ pillow==11.1.0 aiohttp==3.11.12 numpy==1.26.4 beautifulsoup4==4.13.3 -eth-typing==2.2.1 -hexbytes==0.3.1 -eth-utils==1.10.0 +eth-typing==5.2.1 +hexbytes==1.3.1 +eth-utils==5.3.1 tabulate==0.9.0 anyascii==0.3.3 diff --git a/rocketwatch/utils/dao.py b/rocketwatch/utils/dao.py index 8eb1d37b..fe66ed39 100644 --- a/rocketwatch/utils/dao.py +++ b/rocketwatch/utils/dao.py @@ -108,12 +108,12 @@ class Proposal(DAO.Proposal): def get_proposal_ids_by_state(self) -> dict[ProposalState, list[int]]: num_proposals = self.proposal_contract.functions.getTotal().call() - proposal_dao_names = rp.multicall_sync([ + proposal_dao_names = rp.multicall([ self.proposal_contract.functions.getDAO(proposal_id) for proposal_id in range(1, num_proposals + 1) ]) relevant_proposals = [(i+1) for (i, dao_name) in enumerate(proposal_dao_names) if (dao_name == self.contract_name)] - proposal_states = rp.multicall_sync([ + proposal_states = rp.multicall([ self.proposal_contract.functions.getState(proposal_id) for proposal_id in relevant_proposals ]) @@ -125,7 +125,7 @@ def get_proposal_ids_by_state(self) -> dict[ProposalState, list[int]]: def fetch_proposal(self, proposal_id: int) -> Proposal: (proposer, message, payload, created, start, end, expires, - votes_for_raw, votes_against_raw, votes_required_raw) = rp.multicall_sync([ + votes_for_raw, votes_against_raw, votes_required_raw) = rp.multicall([ self.proposal_contract.functions.getProposer(proposal_id), self.proposal_contract.functions.getMessage(proposal_id), self.proposal_contract.functions.getPayload(proposal_id), @@ -213,7 +213,7 @@ def votes_total(self): def get_proposal_ids_by_state(self) -> dict[ProposalState, list[int]]: num_proposals = self.proposal_contract.functions.getTotal().call() - proposal_states = rp.multicall_sync([ + proposal_states = rp.multicall([ self.proposal_contract.functions.getState(proposal_id) for proposal_id in range(1, num_proposals + 1) ]) @@ -227,7 +227,7 @@ def get_proposal_ids_by_state(self) -> dict[ProposalState, list[int]]: def fetch_proposal(self, proposal_id: int) -> Proposal: (proposer, message, payload, created, start, phase1_end, phase2_end, expires, vp_for_raw, vp_against_raw, vp_veto_raw, vp_abstain_raw, - vp_required_raw, veto_quorum_raw) = rp.multicall_sync([ + vp_required_raw, veto_quorum_raw) = rp.multicall([ self.proposal_contract.functions.getProposer(proposal_id), self.proposal_contract.functions.getMessage(proposal_id), self.proposal_contract.functions.getPayload(proposal_id), diff --git a/rocketwatch/utils/embeds.py b/rocketwatch/utils/embeds.py index 8f20c839..b8aed390 100644 --- a/rocketwatch/utils/embeds.py +++ b/rocketwatch/utils/embeds.py @@ -97,7 +97,7 @@ def el_explorer_url( name_fmt: Optional[Callable[[str], str]] = None, block="latest" ): - if w3.isAddress(target): + if w3.is_address(target): # sanitize address target = w3.to_checksum_address(target) url = f"{cfg['execution_layer.explorer']}/address/{target}" @@ -180,7 +180,7 @@ def el_explorer_url( else: name = f"{discord.utils.remove_markdown(n, ignore_links=False)}*" else: - # transaction_hash + # transaction hash url = f"{cfg['execution_layer.explorer']}/tx/{target}" if not name: @@ -215,7 +215,7 @@ def prepare_args(args): if str(arg_value).startswith("0x"): prefix = "" - if w3.isAddress(arg_value): + if w3.is_address(arg_value): # get rocketpool related holdings value for this address address = w3.to_checksum_address(arg_value) prefix = get_sea_creature_for_address(address) @@ -227,7 +227,7 @@ def prepare_args(args): args[arg_key] = f"[ORDER](https://explorer.cow.fi/orders/{arg_value})" else: args[arg_key] = el_explorer_url(arg_value, prefix=prefix) - args[f'{arg_key}_clean'] = el_explorer_url(arg_value) + args[f"{arg_key}_clean"] = el_explorer_url(arg_value) if len(arg_value) == 66: args[f'{arg_key}_small'] = el_explorer_url(arg_value, name="[tnx]") if "from" in args: @@ -468,7 +468,7 @@ def assemble(args) -> Embed: # show block number el_explorer = cfg["execution_layer.explorer"] - if "blockNumber" in args: + if "block_number" in args: e.add_field(name="Block Number", value=f"[{args.blockNumber}]({el_explorer}/block/{args.blockNumber})") diff --git a/rocketwatch/utils/event_logs.py b/rocketwatch/utils/event_logs.py index f72176ba..0172e44e 100644 --- a/rocketwatch/utils/event_logs.py +++ b/rocketwatch/utils/event_logs.py @@ -1,8 +1,9 @@ import logging from typing import Optional, Any -from eth_typing import BlockNumber -from web3.contract import ContractEvent, LogReceipt +from eth_typing import BlockNumber +from web3.contract.contract import ContractEvent +from web3.types import LogReceipt from utils.cfg import cfg @@ -11,7 +12,7 @@ def get_logs( - event: ContractEvent, + event: ContractEvent, from_block: BlockNumber, to_block: BlockNumber, arg_filters: Optional[dict[str, Any]] = None @@ -28,11 +29,11 @@ def get_logs( logs = [] while from_block <= end_block: - logs += event.create_filter( - fromBlock=from_block, - toBlock=min(to_block, end_block), + logs += event.get_logs( + from_block=from_block, + to_block=min(to_block, end_block), argument_filters=arg_filters - ).get_all_entries() + ) from_block = to_block + 1 to_block = from_block + chunk_size diff --git a/rocketwatch/utils/liquidity.py b/rocketwatch/utils/liquidity.py index 45ea1c14..0dd38f22 100644 --- a/rocketwatch/utils/liquidity.py +++ b/rocketwatch/utils/liquidity.py @@ -696,7 +696,7 @@ def tick_to_word_and_bit(self, tick: int) -> tuple[int, int]: return word_position, bit_position def get_ticks_net_liquidity(self, ticks: list[int]) -> dict[int, int]: - results = rp.multicall_sync([self.contract.functions.ticks(tick) for tick in ticks]) + results = rp.multicall([self.contract.functions.ticks(tick) for tick in ticks]) return dict(zip(ticks, [r[1] for r in results])) def get_initialized_ticks(self, current_tick: int) -> list[int]: @@ -704,7 +704,7 @@ def get_initialized_ticks(self, current_tick: int) -> list[int]: active_word, b = self.tick_to_word_and_bit(current_tick) word_range = list(range(active_word - 5, active_word + 5)) - bitmaps = rp.multicall_sync([ + bitmaps = rp.multicall([ self.contract.functions.tickBitmap(word) for word in word_range ]) diff --git a/rocketwatch/utils/rocketpool.py b/rocketwatch/utils/rocketpool.py index fd9949e6..6d14dd81 100644 --- a/rocketwatch/utils/rocketpool.py +++ b/rocketwatch/utils/rocketpool.py @@ -1,20 +1,19 @@ +import eth_abi import logging import os from pathlib import Path +from typing import Any from bidict import bidict from eth_typing import BlockIdentifier, ChecksumAddress from cachetools import cached, FIFOCache from cachetools.func import ttl_cache -from multicall import Call, Multicall -from multicall.constants import MULTICALL3_ADDRESSES from web3.exceptions import ContractLogicError from utils import solidity from utils.cfg import cfg from utils.readable import decode_abi -from utils.shared_w3 import w3, mainnet_w3, historical_w3 -from utils.time_debug import timerun_async +from utils.shared_w3 import w3, w3_async, mainnet_w3, historical_w3 log = logging.getLogger("rocketpool") log.setLevel(cfg["log_level"]) @@ -23,6 +22,7 @@ class NoAddressFound(Exception): pass + class RocketPool: ADDRESS_CACHE = FIFOCache(maxsize=2048) ABI_CACHE = FIFOCache(maxsize=2048) @@ -46,8 +46,11 @@ def _init_contract_addresses(self) -> None: for name, address in manual_addresses.items(): self.addresses[name] = address - self.addresses["multicall3"] = w3.to_checksum_address(MULTICALL3_ADDRESSES[w3.eth.chain_id]) self._multicall = self.get_contract_by_name("multicall3") + self._multicall_async = w3_async.eth.contract( + address=self._multicall.address, + abi=self._multicall.abi + ) log.info("Indexing Rocket Pool contracts...") # generate list of all file names with the .sol extension from the rocketpool submodule @@ -75,57 +78,58 @@ def _init_contract_addresses(self) -> None: log.warning("Failed to find address for Constellation contracts") @staticmethod - def seth_sig(abi, function_name): - # also handle tuple outputs, so `example(unit256)((unit256,unit256))` for example - for item in abi: - if item.get("name") == function_name: - inputs = ','.join([i['type'] for i in item['inputs']]) - outputs = [] - for o in item['outputs']: - if o['type'] == 'tuple': - outputs.append(f"({','.join([i['type'] for i in o['components']])})") - else: - outputs.append(o['type']) - outputs = ','.join(outputs) - return f"{function_name}({inputs})({outputs})" - raise Exception(f"Function {function_name} not found in ABI") + def _abi_type_str(output: dict) -> str: + """Convert a single ABI output entry to an eth_abi type string, handling tuples.""" + t = output["type"] + if "tuple" in t: + inner = ",".join(RocketPool._abi_type_str(c) for c in output["components"]) + suffix = t[5:] # captures "", "[]", "[N]", etc. + return f"({inner}){suffix}" + return t @staticmethod - def _fn_to_call(fn, key): - """Convert a web3 ContractFunction to a multicall Call with integer key.""" - sig = RocketPool.seth_sig(fn.contract_abi, fn.function_identifier) - return Call(fn.address, [sig, *fn.args], [(key, None)]) + def _decode_fn_output(fn, data: bytes) -> Any: + """Decode raw ABI output bytes for a ContractFunction.""" + outputs = fn.abi["outputs"] + if not outputs: + return None + types = [RocketPool._abi_type_str(o) for o in outputs] + decoded = eth_abi.decode(types, data) + return decoded[0] if len(decoded) == 1 else decoded @staticmethod - def build_call(abi_source, function_name, *args, target=None, key=None, transform=None): - """Build a multicall Call object. - - Args: - abi_source: Contract object with .abi attribute - function_name: Function name to call - *args: Function arguments - target: Target address (defaults to abi_source.address) - key: Result key (defaults to function_name) - transform: Optional result transform function - """ - abi = abi_source.abi if hasattr(abi_source, 'abi') else abi_source - address = target if target is not None else abi_source.address - sig = RocketPool.seth_sig(abi, function_name) - return Call(address, [sig, *args], [(key if key is not None else function_name, transform)]) - - def multicall_sync(self, calls, require_success=True): - """Sync multicall accepting ContractFunction objects. Returns list of results.""" - mc_calls = [self._fn_to_call(fn, i) for i, fn in enumerate(calls)] - encoded = [(call.target, not require_success, call.data) for call in mc_calls] + def _normalize_calls(calls, default_require_success): + """Normalize calls to (fn, allow_failure) pairs. Each call may be a + plain ContractFunction or a (ContractFunction, require_success) tuple.""" + fns, flags = [], [] + for call in calls: + if isinstance(call, tuple): + fn, req = call + else: + fn, req = call, default_require_success + fns.append(fn) + flags.append(not req) + return fns, flags + + def multicall(self, calls, require_success=True) -> list: + """Sync multicall accepting ContractFunction objects or (fn, require_success) tuples.""" + fns, flags = self._normalize_calls(calls, require_success) + encoded = [(fn.address, af, fn._encode_transaction_data()) for fn, af in zip(fns, flags)] results = self._multicall.functions.aggregate3(encoded).call() return [ - Call.decode_output(data, mc_calls[i].signature, success=success) + RocketPool._decode_fn_output(fns[i], data) if success else None for i, (success, data) in enumerate(results) ] - async def multicall(self, calls: list[Call], require_success=True): - """Async multicall accepting Call objects. Returns dict of keyed results.""" - return await Multicall(calls, _w3=w3, gas_limit=50_000_000, require_success=require_success) + async def multicall_async(self, calls, require_success=True) -> list: + """Async multicall accepting ContractFunction objects or (fn, require_success) tuples.""" + fns, flags = self._normalize_calls(calls, require_success) + encoded = [(fn.address, af, fn._encode_transaction_data()) for fn, af in zip(fns, flags)] + results = await self._multicall_async.functions.aggregate3(encoded).call() + return [ + RocketPool._decode_fn_output(fns[i], data) if success else None + for i, (success, data) in enumerate(results) + ] @cached(cache=ADDRESS_CACHE) def get_address_by_name(self, name): @@ -138,7 +142,7 @@ def uncached_get_address_by_name(self, name, block="latest"): log.debug(f"Retrieving address for {name} Contract") sha3 = w3.solidity_keccak(["string", "string"], ["contract.address", name]) address = self.get_contract_by_name("rocketStorage", historical=block != "latest").functions.getAddress(sha3).call(block_identifier=block) - if not w3.toInt(hexstr=address): + if not w3.to_int(hexstr=address): raise NoAddressFound(f"No address found for {name} Contract") self.addresses[name] = address log.debug(f"Retrieved address for {name} Contract: {address}") @@ -241,6 +245,7 @@ def get_function(self, path, *args, historical=False, address=None, mainnet=Fals if not address: address = self.get_address_by_name(name) contract = self.assemble_contract(name, address, historical, mainnet) + args = tuple(w3.to_checksum_address(a) if isinstance(a, str) and w3.is_address(a) else a for a in args) return contract.functions[function](*args) def call(self, path, *args, block: BlockIdentifier = "latest", address=None, mainnet=False): diff --git a/rocketwatch/utils/sea_creatures.py b/rocketwatch/utils/sea_creatures.py index 4e5793b9..0e734898 100644 --- a/rocketwatch/utils/sea_creatures.py +++ b/rocketwatch/utils/sea_creatures.py @@ -50,16 +50,16 @@ def get_sea_creature_for_holdings(holdings): def get_holding_for_address(address): if cfg["rocketpool.chain"] != "mainnet": return 0 - if price_cache["block"] != (b := w3.eth.blockNumber): + if price_cache["block"] != (b := w3.eth.block_number): price_cache["rpl_price"] = solidity.to_float(rp.call("rocketNetworkPrices.getRPLPrice")) price_cache["reth_price"] = solidity.to_float(rp.call("rocketTokenRETH.getExchangeRate")) price_cache["block"] = b # get their eth balance - eth_balance = solidity.to_float(w3.eth.getBalance(address)) + eth_balance = solidity.to_float(w3.eth.get_balance(address)) # get ERC-20 token balance for this address with contextlib.suppress(Exception): - rpl_balance, rplfs_balance, reth_balance = rp.multicall_sync([ + rpl_balance, rplfs_balance, reth_balance = rp.multicall([ rp.get_contract_by_name("rocketTokenRPL").functions.balanceOf(address), rp.get_contract_by_name("rocketTokenRPLFixedSupply").functions.balanceOf(address), rp.get_contract_by_name("rocketTokenRETH").functions.balanceOf(address), diff --git a/rocketwatch/utils/shared_w3.py b/rocketwatch/utils/shared_w3.py index 92927cc6..7880aae8 100644 --- a/rocketwatch/utils/shared_w3.py +++ b/rocketwatch/utils/shared_w3.py @@ -3,8 +3,9 @@ import aiohttp from web3.beacon import Beacon as Bacon from aiohttp.web import HTTPError -from web3 import Web3, HTTPProvider -from web3.middleware import geth_poa_middleware +from web3 import Web3, AsyncWeb3, HTTPProvider +from web3.providers import AsyncHTTPProvider +from web3.middleware import ExtraDataToPOAMiddleware from utils.cfg import cfg from utils.retry import retry_async @@ -13,11 +14,13 @@ log.setLevel(cfg["log_level"]) w3 = Web3(HTTPProvider(cfg['execution_layer.endpoint.current'], request_kwargs={'timeout': 60})) +w3_async = AsyncWeb3(AsyncHTTPProvider(cfg['execution_layer.endpoint.current'], request_kwargs={'timeout': 60})) mainnet_w3 = w3 if cfg['rocketpool.chain'] != "mainnet": mainnet_w3 = Web3(HTTPProvider(cfg['execution_layer.endpoint.mainnet'])) - w3.middleware_onion.inject(geth_poa_middleware, layer=0) + w3.middleware_onion.inject(ExtraDataToPOAMiddleware, layer=0) + w3_async.middleware_onion.inject(ExtraDataToPOAMiddleware, layer=0) historical_w3 = None if "archive" in cfg['execution_layer.endpoint'].keys(): From 6099b67601333b3abd0aef290f751a82c7b2af89 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 6 Mar 2026 09:34:14 +0000 Subject: [PATCH 140/279] refactor _batch_multicall_update --- .../plugins/db_upkeep_task/db_upkeep_task.py | 80 ++++++++++--------- 1 file changed, 43 insertions(+), 37 deletions(-) diff --git a/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py b/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py index 0b6254dd..53eedb0d 100644 --- a/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py +++ b/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py @@ -2,11 +2,15 @@ import time import asyncio from collections import defaultdict +from collections.abc import Callable from datetime import timedelta +from typing import Any, Optional import pymongo from cronitor import Monitor from pymongo import AsyncMongoClient, UpdateOne, UpdateMany +from pymongo.asynchronous.collection import AsyncCollection +from web3.contract.contract import ContractFunction from discord.ext import commands from discord.utils import as_chunks @@ -150,26 +154,27 @@ async def check_indexes(self): await self.db.megapool_validators.create_index("beacon.status") log.debug("indexes checked") - async def _batch_multicall_update(self, collection, query, lamb, label=None, projection=None): - if projection is not None: - items = await collection.find(query, projection).to_list() - def get_addr(d): return d["address"] - else: - items = await collection.distinct("address", query) - def get_addr(a): return a - + async def _batch_multicall_update( + self, + collection: AsyncCollection, + query: dict[str, Any], + call_fn: Callable[[dict[str, Any]], list[tuple]], + projection: dict[str, Any], + label: Optional[str], + ) -> None: + items = await collection.find(query, projection).to_list() if not items: return total = len(items) - batch_size = self.batch_size // len(lamb(items[0])) + batch_size = self.batch_size // len(call_fn(items[0])) for i, batch in enumerate(as_chunks(items, batch_size)): if label: start = i * batch_size + 1 end = min((i + 1) * batch_size, total) log.debug(f"Processing {label} [{start}, {end}]/{total}") - # lamb(item) returns a list of (fn, require_success, transform, field) - expanded = [(get_addr(item), *t) for item in batch for t in lamb(item)] + # call_fn(item) returns a list of (fn, require_success, transform, field) + expanded = [(item["address"], *t) for item in batch for t in call_fn(item)] calls = [(e[1], e[2]) for e in expanded] results = await rp.multicall_async(calls) updates = defaultdict(dict) @@ -205,14 +210,14 @@ async def add_untracked_node_operators(self): async def add_static_node_operator_data(self): df = rp.get_contract_by_name("rocketNodeDistributorFactory") mf = rp.get_contract_by_name("rocketMegapoolFactory") - def get_calls(a): return [ - (df.functions.getProxyAddress(a), True, w3.to_checksum_address, "fee_distributor.address"), - (mf.functions.getExpectedAddress(a), True, w3.to_checksum_address, "megapool.address"), + def get_calls(n): return [ + (df.functions.getProxyAddress(n["address"]), True, w3.to_checksum_address, "fee_distributor.address"), + (mf.functions.getExpectedAddress(n["address"]), True, w3.to_checksum_address, "megapool.address"), ] await self._batch_multicall_update( self.db.node_operators, {"$or": [{"fee_distributor.address": {"$exists": False}}, {"megapool.address": {"$exists": False}}]}, - get_calls + get_calls, {"address": 1}, label="node operators" ) @timerun_async @@ -252,7 +257,7 @@ def get_calls(n): return [ @timerun_async async def update_dynamic_megapool_data(self): def mp_at(addr): return rp.assemble_contract("rocketMegapoolDelegate", address=addr) - def lamb(n): return [ + def get_calls(n): return [ (mp_at(n["megapool"]["address"]).functions.getValidatorCount(), True, None, "megapool.validator_count"), (mp_at(n["megapool"]["address"]).functions.getActiveValidatorCount(), True, None, "megapool.active_validator_count"), (mp_at(n["megapool"]["address"]).functions.getExitingValidatorCount(), True, None, "megapool.exiting_validator_count"), @@ -265,8 +270,9 @@ def lamb(n): return [ (mp_at(n["megapool"]["address"]).functions.getLastDistributionTime(), True, None, "megapool.last_distribution_time"), ] await self._batch_multicall_update( - self.db.node_operators, {"megapool.deployed": True}, lamb, label="megapools", - projection={"address": 1, "megapool.address": 1} + self.db.node_operators, {"megapool.deployed": True}, + get_calls, {"address": 1, "megapool.address": 1}, + label="megapools" ) # -- Minipool tasks -- @@ -289,14 +295,14 @@ async def add_untracked_minipools(self): @timerun_async async def add_static_minipool_data(self): mm = rp.get_contract_by_name("rocketMinipoolManager") - def lamb(a): return [ - (rp.assemble_contract("rocketMinipool", address=a).functions.getNodeAddress(), True, w3.to_checksum_address, "node_operator"), - (mm.functions.getMinipoolPubkey(a), True, safe_to_hex, "pubkey"), + def lamb(n): return [ + (rp.assemble_contract("rocketMinipool", address=n["address"]).functions.getNodeAddress(), True, w3.to_checksum_address, "node_operator"), + (mm.functions.getMinipoolPubkey(n["address"]), True, safe_to_hex, "pubkey"), ] await self._batch_multicall_update( self.db.minipools, {"node_operator": {"$exists": False}}, - lamb + lamb, {"address": 1}, label="minipools" ) @timerun @@ -353,23 +359,23 @@ async def add_static_minipool_deposit_data(self): @timerun_async async def update_dynamic_minipool_data(self): mc = rp.get_contract_by_name("multicall3") - def get_calls(addr): - minipool_contract = rp.assemble_contract("rocketMinipool", address=addr) + def get_calls(n): + minipool_contract = rp.assemble_contract("rocketMinipool", address=n["address"]) return [ - (minipool_contract.functions.getStatus(), True, safe_state_to_str, "status"), - (minipool_contract.functions.getStatusTime(), True, None, "status_time"), - (minipool_contract.functions.getVacant(), False, is_true, "vacant"), - (minipool_contract.functions.getFinalised(), True, is_true, "finalized"), - (minipool_contract.functions.getNodeDepositBalance(), True, safe_to_float, "node_deposit_balance"), - (minipool_contract.functions.getNodeRefundBalance(), True, safe_to_float, "node_refund_balance"), - (minipool_contract.functions.getPreMigrationBalance(), False, safe_to_float, "pre_migration_balance"), - (minipool_contract.functions.getNodeFee(), True, safe_to_float, "node_fee"), - (minipool_contract.functions.getEffectiveDelegate(), True, w3.to_checksum_address, "effective_delegate"), - (minipool_contract.functions.getUseLatestDelegate(), True, is_true, "use_latest_delegate"), - (minipool_contract.functions.getUserDistributed(), False, is_true, "user_distributed"), - (mc.functions.getEthBalance(addr), True, safe_to_float, "execution_balance"), + (minipool_contract.functions.getStatus(), True, safe_state_to_str, "status"), + (minipool_contract.functions.getStatusTime(), True, None, "status_time"), + (minipool_contract.functions.getVacant(), False, is_true, "vacant"), + (minipool_contract.functions.getFinalised(), True, is_true, "finalized"), + (minipool_contract.functions.getNodeDepositBalance(), True, safe_to_float, "node_deposit_balance"), + (minipool_contract.functions.getNodeRefundBalance(), True, safe_to_float, "node_refund_balance"), + (minipool_contract.functions.getPreMigrationBalance(), False, safe_to_float, "pre_migration_balance"), + (minipool_contract.functions.getNodeFee(), True, safe_to_float, "node_fee"), + (minipool_contract.functions.getEffectiveDelegate(), True, w3.to_checksum_address, "effective_delegate"), + (minipool_contract.functions.getUseLatestDelegate(), True, is_true, "use_latest_delegate"), + (minipool_contract.functions.getUserDistributed(), False, is_true, "user_distributed"), + (mc.functions.getEthBalance(n["address"]), True, safe_to_float, "execution_balance"), ] - await self._batch_multicall_update(self.db.minipools, {"finalized": {"$ne": True}}, get_calls, label="minipools") + await self._batch_multicall_update(self.db.minipools, {"finalized": {"$ne": True}}, get_calls, {"address": 1}, label="minipools") @timerun async def update_dynamic_minipool_beacon_data(self): From 79eebd263619ea37a635721854777937cf792393 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 6 Mar 2026 09:34:21 +0000 Subject: [PATCH 141/279] add renovate config --- .github/renovate.json | 31 ++++++++++++++----------------- 1 file changed, 14 insertions(+), 17 deletions(-) diff --git a/.github/renovate.json b/.github/renovate.json index bbca9d87..8c5841f2 100644 --- a/.github/renovate.json +++ b/.github/renovate.json @@ -1,23 +1,20 @@ { + "$schema": "https://docs.renovatebot.com/renovate-schema.json", "extends": [ - "config:base" + "config:recommended" ], - "patch": { - "automerge": true - }, - "pin": { - "automerge": true - }, - "rollback": { - "automerge": true - }, - "docker-compose": { - "automerge": false - }, - "docker": { - "automerge": false - }, + "forkProcessing": "enabled", "prCreation": "not-pending", "rollbackPrs": true, - "stabilityDays": 3 + "stabilityDays": 3, + "packageRules": [ + { + "matchUpdateTypes": ["patch", "pin", "rollback"], + "automerge": true + }, + { + "matchManagers": ["dockerfile", "docker-compose"], + "automerge": false + } + ] } From e6ecda5f75763946048c40f07ebad1d994b3f386 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 6 Mar 2026 09:42:20 +0000 Subject: [PATCH 142/279] match upstream requests version --- rocketwatch/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rocketwatch/requirements.txt b/rocketwatch/requirements.txt index fefd1d24..cef48d15 100644 --- a/rocketwatch/requirements.txt +++ b/rocketwatch/requirements.txt @@ -5,7 +5,7 @@ humanize==4.6.0 termplotlib==0.3.9 cachetools==5.3.3 bidict==0.22.1 -requests==2.32.3 +requests==2.32.4 uptime==3.0.1 discord.py==2.5.2 config==0.5.1 From ac7813425d8b69c0cbde645a9372d0dec61a1160 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 6 Mar 2026 10:22:33 +0000 Subject: [PATCH 143/279] update to mongodb v7 --- compose.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compose.yaml b/compose.yaml index 037f64eb..4887f35a 100644 --- a/compose.yaml +++ b/compose.yaml @@ -17,7 +17,7 @@ services: com.centurylinklabs.watchtower.enable: true mongodb: - image: mongo:6.0.5 + image: mongo:7.0 volumes: - ./mongodb:/data/db restart: unless-stopped From a026a3ed3e545d30c887073bb2933818474bb215 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 6 Mar 2026 10:34:18 +0000 Subject: [PATCH 144/279] update to mongodb v8 --- compose.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compose.yaml b/compose.yaml index 4887f35a..0b8e8ea8 100644 --- a/compose.yaml +++ b/compose.yaml @@ -17,7 +17,7 @@ services: com.centurylinklabs.watchtower.enable: true mongodb: - image: mongo:7.0 + image: mongo:8.0 volumes: - ./mongodb:/data/db restart: unless-stopped From bbcf30124c4e5c721e4a46a0fb871be8b39e15af Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 6 Mar 2026 10:50:26 +0000 Subject: [PATCH 145/279] handle deleted status message --- rocketwatch/plugins/event_core/event_core.py | 20 +++++++++++++------- 1 file changed, 13 insertions(+), 7 deletions(-) diff --git a/rocketwatch/plugins/event_core/event_core.py b/rocketwatch/plugins/event_core/event_core.py index 88d830f0..63b30872 100644 --- a/rocketwatch/plugins/event_core/event_core.py +++ b/rocketwatch/plugins/event_core/event_core.py @@ -11,6 +11,7 @@ import pymongo from cronitor import Monitor +import discord from discord.ext import commands, tasks from eth_typing import BlockIdentifier, BlockNumber from pymongo import AsyncMongoClient @@ -284,13 +285,18 @@ async def _replace_or_add_status( if embed and prev_status and (prev_status["channel_id"] == target_channel_id): log.debug(f"Replacing existing status message for channel {target_channel}") channel = await self.bot.get_or_fetch_channel(target_channel_id) - msg = await channel.fetch_message(prev_status["message_id"]) - await msg.edit(embed=embed) - await self.db.state_messages.update_one( - prev_status, - {"$set": {"sent_at": datetime.now(), "state": str(self.state)}} - ) - return + try: + msg = await channel.fetch_message(prev_status["message_id"]) + await msg.edit(embed=embed) + await self.db.state_messages.update_one( + prev_status, + {"$set": {"sent_at": datetime.now(), "state": str(self.state)}} + ) + return + except discord.errors.NotFound: + log.warning("Could not fetch status, removing DB entry") + await self.db.state_messages.delete_one(prev_status) + prev_status = None if prev_status: log.debug(f"Deleting status message for channel {target_channel}") From 4bc189e01a4943af7e0287a3c7c9d66962dd94cc Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 6 Mar 2026 10:50:35 +0000 Subject: [PATCH 146/279] bump mongodb to v8.2.5 --- compose.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compose.yaml b/compose.yaml index 0b8e8ea8..659ff0fe 100644 --- a/compose.yaml +++ b/compose.yaml @@ -17,7 +17,7 @@ services: com.centurylinklabs.watchtower.enable: true mongodb: - image: mongo:8.0 + image: mongo:8.2.5 volumes: - ./mongodb:/data/db restart: unless-stopped From ba73d20c1166ae76a4f43005507cff1f2ebaf18c Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 6 Mar 2026 11:03:12 +0000 Subject: [PATCH 147/279] update GitHub actions --- .github/workflows/codeql-analysis.yml | 22 +++------------------- .github/workflows/docker-ci.yml | 6 +++--- 2 files changed, 6 insertions(+), 22 deletions(-) diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index f8e03d00..f9132943 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -39,11 +39,11 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL - uses: github/codeql-action/init@v2.2.6 + uses: github/codeql-action/init@v3 with: languages: ${{ matrix.language }} # If you wish to specify custom queries, you can do so here or in a config file. @@ -51,21 +51,5 @@ jobs: # Prefix the list here with "+" to use these queries and those in the config file. # queries: ./path/to/local/query, your-org/your-repo/queries@main - # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). - # If this step fails, then you should remove it and run the build manually (see below) - - name: Autobuild - uses: github/codeql-action/autobuild@v2.2.6 - - # ℹ️ Command-line programs to run using the OS shell. - # 📚 https://git.io/JvXDl - - # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines - # and modify them (or add more) to build your code if your project - # uses a compiled language - - #- run: | - # make bootstrap - # make release - - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v2.2.6 + uses: github/codeql-action/analyze@v3 diff --git a/.github/workflows/docker-ci.yml b/.github/workflows/docker-ci.yml index bd004837..bdfa134b 100644 --- a/.github/workflows/docker-ci.yml +++ b/.github/workflows/docker-ci.yml @@ -18,14 +18,14 @@ jobs: with: submodules: recursive - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 + uses: docker/setup-buildx-action@v3 - name: Login to DockerHub - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: username: haloooloolo password: ${{ secrets.DOCKERHUB_TOKEN }} - name: Build and push - uses: docker/build-push-action@v4 + uses: docker/build-push-action@v6 with: context: ./rocketwatch push: true From d88f81ee578b1b2297c5cb418795395e6b520646 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 6 Mar 2026 11:07:31 +0000 Subject: [PATCH 148/279] more actions version bumps --- .github/workflows/codeql-analysis.yml | 6 +++--- .github/workflows/docker-ci.yml | 8 ++++---- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index f9132943..cc70e62a 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -39,11 +39,11 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v4 + uses: actions/checkout@v6 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL - uses: github/codeql-action/init@v3 + uses: github/codeql-action/init@v4 with: languages: ${{ matrix.language }} # If you wish to specify custom queries, you can do so here or in a config file. @@ -52,4 +52,4 @@ jobs: # queries: ./path/to/local/query, your-org/your-repo/queries@main - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v3 + uses: github/codeql-action/analyze@v4 diff --git a/.github/workflows/docker-ci.yml b/.github/workflows/docker-ci.yml index bdfa134b..70a0adc1 100644 --- a/.github/workflows/docker-ci.yml +++ b/.github/workflows/docker-ci.yml @@ -14,18 +14,18 @@ jobs: git config --global url."https://github.com/".insteadOf git@github.com: git config --global url."https://github.com/".insteadOf ssh://git@github.com/ - name: Checkout - uses: actions/checkout@v4 + uses: actions/checkout@v6 with: submodules: recursive - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 + uses: docker/setup-buildx-action@v4 - name: Login to DockerHub - uses: docker/login-action@v3 + uses: docker/login-action@v4 with: username: haloooloolo password: ${{ secrets.DOCKERHUB_TOKEN }} - name: Build and push - uses: docker/build-push-action@v6 + uses: docker/build-push-action@v7 with: context: ./rocketwatch push: true From 99a5707711f9e70b9ca76ab1191bae872fb5de1a Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 6 Mar 2026 11:22:28 +0000 Subject: [PATCH 149/279] keep stale delegate data if necessary --- rocketwatch/utils/embeds.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/rocketwatch/utils/embeds.py b/rocketwatch/utils/embeds.py index b8aed390..c7663af3 100644 --- a/rocketwatch/utils/embeds.py +++ b/rocketwatch/utils/embeds.py @@ -76,18 +76,23 @@ async def resolve_ens(ctx, node_address): return None, None +_pdao_delegates: dict[str, str] = {} + @ttl_cache(ttl=900) def get_pdao_delegates() -> dict[str, str]: + global _pdao_delegates + @retry(tries=3, delay=1) def _get_delegates() -> dict[str, str]: response = requests.get("https://delegates.rocketpool.net/api/delegates") return {delegate["nodeAddress"]: delegate["name"] for delegate in response.json()} try: - return _get_delegates() + _pdao_delegates = _get_delegates() except Exception: log.warning("Failed to fetch pDAO delegates.") - return {} + + return _pdao_delegates def el_explorer_url( From 46b52a98638a277cd7971c2e4fa7a65fbbe14123 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 6 Mar 2026 11:22:45 +0000 Subject: [PATCH 150/279] update minor dependency versions --- compose.yaml | 2 +- rocketwatch/requirements.txt | 32 ++++++++++++++++---------------- 2 files changed, 17 insertions(+), 17 deletions(-) diff --git a/compose.yaml b/compose.yaml index 659ff0fe..09d80491 100644 --- a/compose.yaml +++ b/compose.yaml @@ -31,7 +31,7 @@ services: - "127.0.0.1:27017:27017" watchtower: - image: nickfedor/watchtower + image: nickfedor/watchtower:latest volumes: - /var/run/docker.sock:/var/run/docker.sock command: --interval 30 diff --git a/rocketwatch/requirements.txt b/rocketwatch/requirements.txt index cef48d15..41d0fa4b 100644 --- a/rocketwatch/requirements.txt +++ b/rocketwatch/requirements.txt @@ -1,34 +1,34 @@ psutil==5.9.8 python_i18n==0.3.9 web3>=7.0.0,<8.0.0 -humanize==4.6.0 +humanize==4.15.0 termplotlib==0.3.9 -cachetools==5.3.3 -bidict==0.22.1 -requests==2.32.4 +cachetools==5.5.2 +bidict==0.23.1 +requests==2.32.5 uptime==3.0.1 -discord.py==2.5.2 +discord.py==2.7.0 config==0.5.1 pytz==2022.7.1 -matplotlib==3.7.1 -inflect==7.3.1 +matplotlib==3.10.8 +inflect==7.5.0 colorama==0.4.6 -seaborn==0.12.2 +seaborn==0.13.2 etherscan_labels @ git+https://github.com/InvisibleSymbol/etherscan-labels@7eb617d715a4dda0eabdd858106a526a3abd3394 -cronitor==4.6.0 +cronitor==4.9.0 retry-async==0.1.4 dice==3.1.2 -regex==2023.8.8 -tiktoken==0.5.2 -anthropic==0.18.1 -pymongo==4.15.3 +regex==2023.12.25 +tiktoken==0.12.0 +anthropic==0.84.0 +pymongo==4.16.0 graphql_query==1.4.0 pillow==11.1.0 -aiohttp==3.11.12 +aiohttp==3.13.3 numpy==1.26.4 -beautifulsoup4==4.13.3 +beautifulsoup4==4.14.3 eth-typing==5.2.1 hexbytes==1.3.1 eth-utils==5.3.1 -tabulate==0.9.0 +tabulate==0.10.0 anyascii==0.3.3 From 254738fe4aa123b8fec1b6cb59057c6dabdb76a6 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 6 Mar 2026 12:23:56 +0000 Subject: [PATCH 151/279] update remaining dependencies --- rocketwatch/plugins/collateral/collateral.py | 2 +- .../minipool_distribution.py | 2 +- rocketwatch/plugins/random/random.py | 15 +-------------- rocketwatch/requirements.txt | 18 +++++++++--------- 4 files changed, 12 insertions(+), 25 deletions(-) diff --git a/rocketwatch/plugins/collateral/collateral.py b/rocketwatch/plugins/collateral/collateral.py index 7996f0cb..f359395f 100644 --- a/rocketwatch/plugins/collateral/collateral.py +++ b/rocketwatch/plugins/collateral/collateral.py @@ -29,7 +29,7 @@ def get_percentiles(percentiles, counts): for p in percentiles: - yield p, np.percentile(counts, p, interpolation='nearest') + yield p, np.percentile(counts, p, method='nearest') async def collateral_distribution_raw(ctx: Context, distribution): diff --git a/rocketwatch/plugins/minipool_distribution/minipool_distribution.py b/rocketwatch/plugins/minipool_distribution/minipool_distribution.py index 4602e1de..1ab77d39 100644 --- a/rocketwatch/plugins/minipool_distribution/minipool_distribution.py +++ b/rocketwatch/plugins/minipool_distribution/minipool_distribution.py @@ -23,7 +23,7 @@ def get_percentiles(percentiles, counts): for p in percentiles: - yield p, np.percentile(counts, p, interpolation='nearest') + yield p, np.percentile(counts, p, method='nearest') async def minipool_distribution_raw(ctx: Context, distribution): diff --git a/rocketwatch/plugins/random/random.py b/rocketwatch/plugins/random/random.py index 7c92c30e..a9084b38 100644 --- a/rocketwatch/plugins/random/random.py +++ b/rocketwatch/plugins/random/random.py @@ -34,20 +34,7 @@ def __init__(self, bot: RocketWatch): @hybrid_command() async def dice(self, ctx: Context, dice_string: str = "1d6"): await ctx.defer(ephemeral=is_hidden_weak(ctx)) - try: - result = dice.roll(dice_string) - except dice.exceptions.DiceException as e: - await ctx.send(f"Dice Error:\n```{e}```") - return - except dice.exceptions.DiceFatalException as e: - await ctx.send(f"Dice Fatal Error:\n```{e}```") - return - except dice.exceptions.ParseException as e: - await ctx.send(f"Dice Parse Error:\n```{e}```") - return - except dice.exceptions.ParseFatalException as e: - await ctx.send(f"Dice Parse Fatal Error:\n```{e}```") - return + result = dice.roll(dice_string) e = Embed() e.title = f"🎲 {dice_string}" if len(str(result)) >= 2000: diff --git a/rocketwatch/requirements.txt b/rocketwatch/requirements.txt index 41d0fa4b..c9f464d4 100644 --- a/rocketwatch/requirements.txt +++ b/rocketwatch/requirements.txt @@ -1,31 +1,31 @@ -psutil==5.9.8 +psutil==7.2.2 python_i18n==0.3.9 web3>=7.0.0,<8.0.0 humanize==4.15.0 termplotlib==0.3.9 -cachetools==5.5.2 +cachetools==7.0.2 bidict==0.23.1 requests==2.32.5 uptime==3.0.1 -discord.py==2.7.0 +discord.py==2.7.1 config==0.5.1 -pytz==2022.7.1 +pytz==2026.1.post1 matplotlib==3.10.8 inflect==7.5.0 colorama==0.4.6 seaborn==0.13.2 -etherscan_labels @ git+https://github.com/InvisibleSymbol/etherscan-labels@7eb617d715a4dda0eabdd858106a526a3abd3394 +etherscan_labels @ git+https://github.com/haloooloolo/etherscan-labels cronitor==4.9.0 retry-async==0.1.4 -dice==3.1.2 -regex==2023.12.25 +dice==4.0.0 +regex==2026.2.28 tiktoken==0.12.0 anthropic==0.84.0 pymongo==4.16.0 graphql_query==1.4.0 -pillow==11.1.0 +pillow==11.3.0 aiohttp==3.13.3 -numpy==1.26.4 +numpy==2.4.2 beautifulsoup4==4.14.3 eth-typing==5.2.1 hexbytes==1.3.1 From ad9d6c4af92551e659a5232d25e6f4bd5db3e76a Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 6 Mar 2026 12:24:10 +0000 Subject: [PATCH 152/279] update to Python v3.14.2 --- rocketwatch/Dockerfile | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/rocketwatch/Dockerfile b/rocketwatch/Dockerfile index 01e8839a..2efb301d 100644 --- a/rocketwatch/Dockerfile +++ b/rocketwatch/Dockerfile @@ -1,5 +1,5 @@ # syntax=docker/dockerfile:1 -FROM python:3.10.8 +FROM python:3.14.2 COPY requirements.txt requirements.txt RUN pip install --upgrade pip @@ -7,6 +7,5 @@ RUN pip install -r requirements.txt COPY . /app ENV PYTHONUNBUFFERED=1 -ENV MULTICALL_PROCESSES=11 WORKDIR /app CMD [ "python", "." ] From ed3e7cf04f5fc9b16b33b9c9408ccffe8e87a377 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 6 Mar 2026 12:28:51 +0000 Subject: [PATCH 153/279] more minor dependency bumps --- rocketwatch/Dockerfile | 2 +- rocketwatch/requirements.txt | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/rocketwatch/Dockerfile b/rocketwatch/Dockerfile index 2efb301d..f2490cf1 100644 --- a/rocketwatch/Dockerfile +++ b/rocketwatch/Dockerfile @@ -1,5 +1,5 @@ # syntax=docker/dockerfile:1 -FROM python:3.14.2 +FROM python:3.14.3 COPY requirements.txt requirements.txt RUN pip install --upgrade pip diff --git a/rocketwatch/requirements.txt b/rocketwatch/requirements.txt index c9f464d4..8ee9b3e7 100644 --- a/rocketwatch/requirements.txt +++ b/rocketwatch/requirements.txt @@ -3,7 +3,7 @@ python_i18n==0.3.9 web3>=7.0.0,<8.0.0 humanize==4.15.0 termplotlib==0.3.9 -cachetools==7.0.2 +cachetools==7.0.3 bidict==0.23.1 requests==2.32.5 uptime==3.0.1 @@ -23,7 +23,7 @@ tiktoken==0.12.0 anthropic==0.84.0 pymongo==4.16.0 graphql_query==1.4.0 -pillow==11.3.0 +pillow==12.1.1 aiohttp==3.13.3 numpy==2.4.2 beautifulsoup4==4.14.3 From 0892b68b950162c20dc964a68cb2f5928ed18aca Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 6 Mar 2026 13:15:32 +0000 Subject: [PATCH 154/279] megapool delegate command --- .../plugins/db_upkeep_task/db_upkeep_task.py | 50 +++++---- .../delegate_contracts/delegate_contracts.py | 101 ++++++++++++++++++ .../minipool_delegates/minipool_delegates.py | 69 ------------ 3 files changed, 129 insertions(+), 91 deletions(-) create mode 100644 rocketwatch/plugins/delegate_contracts/delegate_contracts.py delete mode 100644 rocketwatch/plugins/minipool_delegates/minipool_delegates.py diff --git a/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py b/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py index 53eedb0d..be146e66 100644 --- a/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py +++ b/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py @@ -257,17 +257,21 @@ def get_calls(n): return [ @timerun_async async def update_dynamic_megapool_data(self): def mp_at(addr): return rp.assemble_contract("rocketMegapoolDelegate", address=addr) + def proxy_at(addr): return rp.assemble_contract("rocketMegapoolProxy", address=addr) def get_calls(n): return [ - (mp_at(n["megapool"]["address"]).functions.getValidatorCount(), True, None, "megapool.validator_count"), - (mp_at(n["megapool"]["address"]).functions.getActiveValidatorCount(), True, None, "megapool.active_validator_count"), - (mp_at(n["megapool"]["address"]).functions.getExitingValidatorCount(), True, None, "megapool.exiting_validator_count"), - (mp_at(n["megapool"]["address"]).functions.getLockedValidatorCount(), True, None, "megapool.locked_validator_count"), - (mp_at(n["megapool"]["address"]).functions.getNodeBond(), True, safe_to_float, "megapool.node_bond"), - (mp_at(n["megapool"]["address"]).functions.getUserCapital(), True, safe_to_float, "megapool.user_capital"), - (mp_at(n["megapool"]["address"]).functions.getDebt(), True, safe_to_float, "megapool.debt"), - (mp_at(n["megapool"]["address"]).functions.getRefundValue(), True, safe_to_float, "megapool.refund_value"), - (mp_at(n["megapool"]["address"]).functions.getPendingRewards(), True, safe_to_float, "megapool.pending_rewards"), - (mp_at(n["megapool"]["address"]).functions.getLastDistributionTime(), True, None, "megapool.last_distribution_time"), + (mp_at(n["megapool"]["address"]).functions.getValidatorCount(), True, None, "megapool.validator_count"), + (mp_at(n["megapool"]["address"]).functions.getActiveValidatorCount(), True, None, "megapool.active_validator_count"), + (mp_at(n["megapool"]["address"]).functions.getExitingValidatorCount(), True, None, "megapool.exiting_validator_count"), + (mp_at(n["megapool"]["address"]).functions.getLockedValidatorCount(), True, None, "megapool.locked_validator_count"), + (mp_at(n["megapool"]["address"]).functions.getNodeBond(), True, safe_to_float, "megapool.node_bond"), + (mp_at(n["megapool"]["address"]).functions.getUserCapital(), True, safe_to_float, "megapool.user_capital"), + (mp_at(n["megapool"]["address"]).functions.getDebt(), True, safe_to_float, "megapool.debt"), + (mp_at(n["megapool"]["address"]).functions.getRefundValue(), True, safe_to_float, "megapool.refund_value"), + (mp_at(n["megapool"]["address"]).functions.getPendingRewards(), True, safe_to_float, "megapool.pending_rewards"), + (mp_at(n["megapool"]["address"]).functions.getLastDistributionTime(), True, None, "megapool.last_distribution_time"), + (proxy_at(n["megapool"]["address"]).functions.getDelegate(), True, w3.to_checksum_address, "megapool.delegate"), + (proxy_at(n["megapool"]["address"]).functions.getEffectiveDelegate(), True, w3.to_checksum_address, "megapool.effective_delegate"), + (proxy_at(n["megapool"]["address"]).functions.getUseLatestDelegate(), True, None, "megapool.use_latest_delegate"), ] await self._batch_multicall_update( self.db.node_operators, {"megapool.deployed": True}, @@ -362,18 +366,20 @@ async def update_dynamic_minipool_data(self): def get_calls(n): minipool_contract = rp.assemble_contract("rocketMinipool", address=n["address"]) return [ - (minipool_contract.functions.getStatus(), True, safe_state_to_str, "status"), - (minipool_contract.functions.getStatusTime(), True, None, "status_time"), - (minipool_contract.functions.getVacant(), False, is_true, "vacant"), - (minipool_contract.functions.getFinalised(), True, is_true, "finalized"), - (minipool_contract.functions.getNodeDepositBalance(), True, safe_to_float, "node_deposit_balance"), - (minipool_contract.functions.getNodeRefundBalance(), True, safe_to_float, "node_refund_balance"), - (minipool_contract.functions.getPreMigrationBalance(), False, safe_to_float, "pre_migration_balance"), - (minipool_contract.functions.getNodeFee(), True, safe_to_float, "node_fee"), - (minipool_contract.functions.getEffectiveDelegate(), True, w3.to_checksum_address, "effective_delegate"), - (minipool_contract.functions.getUseLatestDelegate(), True, is_true, "use_latest_delegate"), - (minipool_contract.functions.getUserDistributed(), False, is_true, "user_distributed"), - (mc.functions.getEthBalance(n["address"]), True, safe_to_float, "execution_balance"), + (minipool_contract.functions.getStatus(), True, safe_state_to_str, "status"), + (minipool_contract.functions.getStatusTime(), True, None, "status_time"), + (minipool_contract.functions.getVacant(), False, is_true, "vacant"), + (minipool_contract.functions.getFinalised(), True, is_true, "finalized"), + (minipool_contract.functions.getNodeDepositBalance(), True, safe_to_float, "node_deposit_balance"), + (minipool_contract.functions.getNodeRefundBalance(), True, safe_to_float, "node_refund_balance"), + (minipool_contract.functions.getPreMigrationBalance(), False, safe_to_float, "pre_migration_balance"), + (minipool_contract.functions.getNodeFee(), True, safe_to_float, "node_fee"), + (minipool_contract.functions.getDelegate(), True, w3.to_checksum_address, "delegate"), + (minipool_contract.functions.getPreviousDelegate(), False, w3.to_checksum_address, "previous_delegate"), + (minipool_contract.functions.getEffectiveDelegate(), True, w3.to_checksum_address, "effective_delegate"), + (minipool_contract.functions.getUseLatestDelegate(), True, is_true, "use_latest_delegate"), + (minipool_contract.functions.getUserDistributed(), False, is_true, "user_distributed"), + (mc.functions.getEthBalance(n["address"]), True, safe_to_float, "execution_balance"), ] await self._batch_multicall_update(self.db.minipools, {"finalized": {"$ne": True}}, get_calls, {"address": 1}, label="minipools") diff --git a/rocketwatch/plugins/delegate_contracts/delegate_contracts.py b/rocketwatch/plugins/delegate_contracts/delegate_contracts.py new file mode 100644 index 00000000..27945023 --- /dev/null +++ b/rocketwatch/plugins/delegate_contracts/delegate_contracts.py @@ -0,0 +1,101 @@ +import logging + +from pymongo import AsyncMongoClient +from pymongo.asynchronous.collection import AsyncCollection + +from discord import Interaction +from discord.ext import commands +from discord.app_commands import command + +from rocketwatch import RocketWatch +from utils.embeds import Embed, el_explorer_url +from utils.readable import s_hex +from utils.shared_w3 import w3 +from utils.cfg import cfg +from utils.rocketpool import rp + +log = logging.getLogger("delegate_contracts") +log.setLevel(cfg["log_level"]) + + +class DelegateContracts(commands.Cog): + def __init__(self, bot: RocketWatch): + self.bot = bot + self.db = AsyncMongoClient(cfg["mongodb.uri"]).rocketwatch + + async def _delegate_stats( + self, + collection: AsyncCollection, + match_filter: dict, + delegate_field: str, + use_latest_field: str, + latest_contract: str, + title: str, + ) -> Embed: + distribution_stats = await (await collection.aggregate([ + {"$match": match_filter}, + {"$group": {"_id": f"${delegate_field}", "count": {"$sum": 1}}}, + {"$sort": {"count": -1}}, + ])).to_list() + + use_latest_counts = {True: 0, False: 0} + for d in await (await collection.aggregate([ + {"$match": match_filter}, + {"$group": {"_id": f"${use_latest_field}", "count": {"$sum": 1}}}, + ])).to_list(): + use_latest_counts[bool(d["_id"])] = d["count"] + + e = Embed() + e.title = title + s = "\u00A0" * 4 + desc = "**Effective Delegate Distribution:**\n" + c_sum = sum(d["count"] for d in distribution_stats) + # refresh cached address + rp.uncached_get_address_by_name(latest_contract) + latest_addr = rp.get_address_by_name(latest_contract) + for d in distribution_stats: + a = w3.to_checksum_address(d["_id"]) + name = s_hex(a) + if a == latest_addr: + name += " (Latest)" + desc += f"{s}{el_explorer_url(a, name)}: {d['count']:,} ({d['count'] / c_sum * 100:.2f}%)\n" + desc += "\n" + desc += "**Use Latest Delegate:**\n" + c_sum = sum(use_latest_counts.values()) + for value, label in [(True, "Yes"), (False, "No")]: + count = use_latest_counts[value] + desc += f"{s}**{label}**: {count:,} ({count / c_sum * 100:.2f}%)\n" + e.description = desc + return e + + @command() + async def minipool_delegates(self, interaction: Interaction): + """Show stats for minipool delegate contract adoption""" + await interaction.response.defer() + e = await self._delegate_stats( + collection=self.db.minipools, + match_filter={"beacon.status": {"$in": ["pending_initialized", "pending_queued", "active_ongoing"]}}, + delegate_field="effective_delegate", + use_latest_field="use_latest_delegate", + latest_contract="rocketMinipoolDelegate", + title="Minipool Delegate Stats", + ) + await interaction.followup.send(embed=e) + + @command() + async def megapool_delegates(self, interaction: Interaction): + """Show stats for megapool delegate contract adoption""" + await interaction.response.defer() + e = await self._delegate_stats( + collection=self.db.node_operators, + match_filter={"megapool.active_validator_count": {"$gt": 0}}, + delegate_field="megapool.effective_delegate", + use_latest_field="megapool.use_latest_delegate", + latest_contract="rocketMegapoolDelegate", + title="Megapool Delegate Stats", + ) + await interaction.followup.send(embed=e) + + +async def setup(self): + await self.add_cog(DelegateContracts(self)) diff --git a/rocketwatch/plugins/minipool_delegates/minipool_delegates.py b/rocketwatch/plugins/minipool_delegates/minipool_delegates.py deleted file mode 100644 index d78193a1..00000000 --- a/rocketwatch/plugins/minipool_delegates/minipool_delegates.py +++ /dev/null @@ -1,69 +0,0 @@ -import logging - -from pymongo import AsyncMongoClient - -from discord import Interaction -from discord.ext import commands -from discord.app_commands import command - -from rocketwatch import RocketWatch -from utils.embeds import Embed, el_explorer_url -from utils.readable import s_hex -from utils.shared_w3 import w3 -from utils.cfg import cfg -from utils.rocketpool import rp - -log = logging.getLogger("minipool_delegates") -log.setLevel(cfg["log_level"]) - - -class MinipoolDelegates(commands.Cog): - def __init__(self, bot: RocketWatch): - self.bot = bot - self.db = AsyncMongoClient(cfg["mongodb.uri"]).rocketwatch - - @command() - async def minipool_delegates(self, interaction: Interaction): - """Show stats for minipool delegate adoption""" - await interaction.response.defer() - # only consider active minipools - minipool_filter = {"beacon.status": {"$in": ["pending_initialized", "pending_queued", "active_ongoing"]}} - # we want to show the distribution of minipools that are using each delegate - distribution_stats = await (await self.db.minipools.aggregate([ - {"$match": minipool_filter}, - {"$group": {"_id": "$effective_delegate", "count": {"$sum": 1}}}, - {"$sort": {"count": -1}}, - ])).to_list() - # and the percentage of minipools that are using the useLatestDelegate flag - use_latest_delegate_stats = await (await self.db.minipools.aggregate([ - {"$match": minipool_filter}, - {"$group": {"_id": "$use_latest_delegate", "count": {"$sum": 1}}}, - {"$sort": {"count": -1}}, - ])).to_list() - e = Embed() - e.title = "Minipool Delegate Stats" - desc = "**Effective Delegate Distribution:**\n" - c_sum = sum(d['count'] for d in distribution_stats) - s = "\u00A0" * 4 - # latest delegate acording to rp - rp.uncached_get_address_by_name("rocketMinipoolDelegate") - for d in distribution_stats: - # I HATE THE CHECKSUMMED ADDRESS REQUIREMENTS I HATE THEM SO MUCH - a = w3.to_checksum_address(d['_id']) - name = s_hex(a) - if a == rp.get_address_by_name("rocketMinipoolDelegate"): - name += " (Latest)" - desc += f"{s}{el_explorer_url(a, name)}: {d['count']:,} ({d['count'] / c_sum * 100:.2f}%)\n" - desc += "\n" - desc += "**Use Latest Delegate:**\n" - c_sum = sum(d['count'] for d in use_latest_delegate_stats) - for d in use_latest_delegate_stats: - # true = yes, false = no - d['_id'] = "Yes" if d['_id'] else "No" - desc += f"{s}**{d['_id']}**: {d['count']:,} ({d['count'] / c_sum * 100:.2f}%)\n" - e.description = desc - await interaction.followup.send(embed=e) - - -async def setup(self): - await self.add_cog(MinipoolDelegates(self)) From 3ee793e6d43d8515f4717ae732cc6be3612be7fd Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 6 Mar 2026 17:32:46 +0000 Subject: [PATCH 155/279] move DB instance into RocketWatch instance --- rocketwatch/plugins/activity/activity.py | 6 +- rocketwatch/plugins/apr/apr.py | 14 ++-- .../plugins/beacon_events/beacon_events.py | 71 +++++++++---------- .../plugins/chat_summary/chat_summary.py | 6 +- .../plugins/commissions/commissions.py | 5 +- rocketwatch/plugins/cow_orders/cow_orders.py | 32 +++++---- .../plugins/db_upkeep_task/db_upkeep_task.py | 63 ++++++++-------- rocketwatch/plugins/debug/debug.py | 10 ++- .../delegate_contracts/delegate_contracts.py | 6 +- .../plugins/deposit_pool/deposit_pool.py | 2 - .../plugins/detect_scam/detect_scam.py | 40 +++++------ rocketwatch/plugins/event_core/event_core.py | 56 +++++++-------- rocketwatch/plugins/events/events.py | 69 +++++++++--------- .../fee_distribution/fee_distribution.py | 4 +- rocketwatch/plugins/lottery/lottery.py | 34 ++++----- rocketwatch/plugins/metrics/metrics.py | 8 +-- rocketwatch/plugins/milestones/milestones.py | 16 ++--- .../minipool_distribution.py | 10 ++- .../pinned_messages/pinned_messages.py | 20 +++--- rocketwatch/plugins/proposals/proposals.py | 33 +++++---- rocketwatch/plugins/random/random.py | 4 +- rocketwatch/plugins/rocksolid/rocksolid.py | 14 ++-- rocketwatch/plugins/rpl/rpl.py | 6 +- .../plugins/scam_warning/scam_warning.py | 6 +- rocketwatch/plugins/snapshot/snapshot.py | 56 ++++++++------- .../plugins/support_utils/support_utils.py | 37 +++++----- .../plugins/transactions/transactions.py | 46 ++++++------ rocketwatch/plugins/tvl/tvl.py | 15 ++-- .../user_distribute/user_distribute.py | 5 +- .../validator_states/validator_states.py | 6 +- rocketwatch/rocketwatch.py | 3 + rocketwatch/strings/embeds.en.json | 6 +- rocketwatch/utils/event.py | 8 +-- rocketwatch/utils/shared_w3.py | 3 + 34 files changed, 332 insertions(+), 388 deletions(-) diff --git a/rocketwatch/plugins/activity/activity.py b/rocketwatch/plugins/activity/activity.py index 1cbe365f..601f524e 100644 --- a/rocketwatch/plugins/activity/activity.py +++ b/rocketwatch/plugins/activity/activity.py @@ -3,7 +3,6 @@ from cronitor import Monitor from discord import Activity, ActivityType from discord.ext import commands, tasks -from pymongo import AsyncMongoClient from rocketwatch import RocketWatch from utils.cfg import cfg @@ -15,7 +14,6 @@ class RichActivity(commands.Cog): def __init__(self, bot: RocketWatch): self.bot = bot - self.db = AsyncMongoClient(cfg["mongodb.uri"]).rocketwatch self.monitor = Monitor("update-activity", api_key=cfg["other.secrets.cronitor"]) self.task.start() @@ -27,10 +25,10 @@ async def task(self): self.monitor.ping() log.debug("Updating Discord activity") - minipool_count = await self.db.minipools.count_documents( + minipool_count = await self.bot.db.minipools.count_documents( {"beacon.status": "active_ongoing"} ) - megapool_count = await self.db.megapool_validators.count_documents( + megapool_count = await self.bot.db.megapool_validators.count_documents( {"beacon.status": "active_ongoing"} ) validator_count = minipool_count + megapool_count diff --git a/rocketwatch/plugins/apr/apr.py b/rocketwatch/plugins/apr/apr.py index 228f6dfd..dedc3644 100644 --- a/rocketwatch/plugins/apr/apr.py +++ b/rocketwatch/plugins/apr/apr.py @@ -9,7 +9,6 @@ from discord.ext.commands import Context from discord.ext.commands import hybrid_command from matplotlib.dates import DateFormatter -from pymongo import AsyncMongoClient from rocketwatch import RocketWatch from utils import solidity @@ -43,7 +42,6 @@ def get_duration(d1, d2): class APR(commands.Cog): def __init__(self, bot: RocketWatch): self.bot = bot - self.db = AsyncMongoClient(cfg["mongodb.uri"]).rocketwatch self.task.start() async def cog_unload(self): @@ -52,7 +50,7 @@ async def cog_unload(self): @tasks.loop(seconds=60) async def task(self): # get latest block update from the db - latest_db_block = await self.db.reth_apr.find_one(sort=[("block", -1)]) + latest_db_block = await self.bot.db.reth_apr.find_one(sort=[("block", -1)]) latest_db_block = 0 if latest_db_block is None else latest_db_block["block"] cursor_block = historical_w3.eth.get_block("latest")["number"] while True: @@ -68,7 +66,7 @@ async def task(self): reth_ratio = solidity.to_float(rp.call("rocketTokenRETH.getExchangeRate", block=cursor_block)) effectiveness = solidity.to_float( rp.call("rocketNetworkBalances.getETHUtilizationRate", block=cursor_block, address=address)) - await self.db.reth_apr.insert_one({ + await self.bot.db.reth_apr.insert_one({ "block" : balance_block, "time" : block_time, "value" : reth_ratio, @@ -93,13 +91,13 @@ async def reth_apr(self, ctx: Context): e.description = "For some comparisons against other LST: [dune dashboard](https://dune.com/rp_community/lst-comparison)" # get the last 30 datapoints - datapoints = await self.db.reth_apr.find().sort("block", -1).limit(180 + 38).to_list(None) + datapoints = await self.bot.db.reth_apr.find().sort("block", -1).limit(180 + 38).to_list(None) if len(datapoints) == 0: e.description = "No data available yet." return await ctx.send(embed=e) # get average meta.NodeFee from db, weighted by meta.NodeOperatorShare - tmp = await (await self.db.minipools.aggregate([ + tmp = await (await self.bot.db.minipools.aggregate([ { '$match': { 'beacon.status' : 'active_ongoing', @@ -264,13 +262,13 @@ async def node_apr(self, ctx: Context): "The solid line is the protocol average." # get the last 30 datapoints - datapoints = await self.db.reth_apr.find().sort("block", -1).limit(180 + 38).to_list(None) + datapoints = await self.bot.db.reth_apr.find().sort("block", -1).limit(180 + 38).to_list(None) if len(datapoints) == 0: e.description = "No data available yet." return await ctx.send(embed=e) # get average meta.NodeFee from db, weighted by meta.NodeOperatorShare - tmp = await (await self.db.minipools.aggregate([ + tmp = await (await self.bot.db.minipools.aggregate([ { '$match': { 'beacon.status' : 'active_ongoing', diff --git a/rocketwatch/plugins/beacon_events/beacon_events.py b/rocketwatch/plugins/beacon_events/beacon_events.py index 5bffd126..d25f9b90 100644 --- a/rocketwatch/plugins/beacon_events/beacon_events.py +++ b/rocketwatch/plugins/beacon_events/beacon_events.py @@ -1,7 +1,6 @@ import logging from typing import Optional, cast -import pymongo import requests import eth_utils from eth_typing import BlockNumber @@ -13,11 +12,11 @@ from utils.embeds import assemble, prepare_args from utils.readable import cl_explorer_url from utils.rocketpool import rp -from utils.shared_w3 import bacon, w3 +from utils.shared_w3 import bacon, w3_async from utils.solidity import date_to_beacon_block, beacon_block_to_date from utils.event import EventPlugin, Event from utils.block_time import ts_to_block -from utils.retry import retry +from utils.retry import retry_async log = logging.getLogger("beacon_events") log.setLevel(cfg["log_level"]) @@ -26,46 +25,45 @@ class BeaconEvents(EventPlugin): def __init__(self, bot: RocketWatch): super().__init__(bot) - self.db = pymongo.MongoClient(cfg["mongodb.uri"]).rocketwatch self.finality_delay_threshold = 3 - def _get_new_events(self) -> list[Event]: + async def _get_new_events(self) -> list[Event]: from_block = self.last_served_block + 1 - self.lookback_distance - return self.get_past_events(from_block, self._pending_block) + return await self.get_past_events(from_block, self._pending_block) - def get_past_events(self, from_block: BlockNumber, to_block: BlockNumber) -> list[Event]: - from_slot = max(0, date_to_beacon_block(w3.eth.get_block(from_block - 1).timestamp) + 1) - to_slot = date_to_beacon_block(w3.eth.get_block(to_block).timestamp) + async def get_past_events(self, from_block: BlockNumber, to_block: BlockNumber) -> list[Event]: + from_slot = max(0, date_to_beacon_block((await w3_async.eth.get_block(from_block - 1)).timestamp) + 1) + to_slot = date_to_beacon_block((await w3_async.eth.get_block(to_block)).timestamp) log.info(f"Checking for new beacon chain events in slot range [{from_slot}, {to_slot}]") events: list[Event] = [] for slot_number in range(from_slot, to_slot-1): - events.extend(self._get_events_for_slot(slot_number, check_finality=False)) + events.extend(await self._get_events_for_slot(slot_number, check_finality=False)) # quite expensive and only really makes sense to check toward the head of the chain - events.extend(self._get_events_for_slot(to_slot, check_finality=True)) + events.extend(await self._get_events_for_slot(to_slot, check_finality=True)) log.debug("Finished checking beacon chain events") return events - def _get_events_for_slot(self, slot_number: int, *, check_finality: bool) -> list[Event]: + async def _get_events_for_slot(self, slot_number: int, *, check_finality: bool) -> list[Event]: try: log.debug(f"Checking slot {slot_number}") - beacon_block = bacon.get_block(slot_number)["data"]["message"] + beacon_block = (await bacon.get_block_async(slot_number))["data"]["message"] except requests.exceptions.HTTPError: log.error(f"Beacon block {slot_number} not found, skipping.") return [] - events = self._get_slashings(beacon_block) - if proposal_event := self._get_proposal(beacon_block): + events = await self._get_slashings(beacon_block) + if proposal_event := await self._get_proposal(beacon_block): events.append(proposal_event) - if check_finality and (finality_delay_event := self._check_finality(beacon_block)): + if check_finality and (finality_delay_event := await self._check_finality(beacon_block)): events.append(finality_delay_event) return events - def _get_slashings(self, beacon_block: dict) -> list[Event]: + async def _get_slashings(self, beacon_block: dict) -> list[Event]: slot = int(beacon_block["slot"]) timestamp = beacon_block_to_date(slot) slashings = [] @@ -75,35 +73,36 @@ def _get_slashings(self, beacon_block: dict) -> list[Event]: att_2 = set(slash["attestation_2"]["attesting_indices"]) slashings.extend({ "slashing_type": "Attestation", - "minipool" : index, + "validator" : index, "slasher" : beacon_block["proposer_index"], "timestamp" : timestamp } for index in att_1.intersection(att_2)) slashings.extend({ "slashing_type": "Proposal", - "minipool" : slash["signed_header_1"]["message"]["proposer_index"], + "validator" : slash["signed_header_1"]["message"]["proposer_index"], "slasher" : beacon_block["proposer_index"], "timestamp" : timestamp } for slash in beacon_block["body"]["proposer_slashings"]) events = [] for slash in slashings: - minipool = self.db.minipools.find_one({"validator_index": int(slash["minipool"])}) - if not minipool: - log.info(f"Skipping slashing of unknown validator {slash['minipool']}") + minipool = await self.bot.db.minipools.find_one({"validator_index": int(slash["validator"])}) + megapool = await self.bot.db.megapool_validators.find_one({"validator_index": int(slash["validator"])}) + if not (minipool or megapool): + log.info(f"Skipping slashing of unknown validator {slash['validator']}") continue - + unique_id = ( - f"slash-{slash['minipool']}" + f"slash-{slash['validator']}" f":slasher-{slash['slasher']}" f":slashing-type-{slash['slashing_type']}" f":{timestamp}" ) - slash["minipool"] = cl_explorer_url(slash["minipool"]) + slash["validator"] = cl_explorer_url(slash["validator"]) slash["slasher"] = cl_explorer_url(slash["slasher"]) - slash["node_operator"] = minipool["node_operator"] - slash["event_name"] = "minipool_slash_event" + slash["node_operator"] = (minipool or megapool)["node_operator"] + slash["event_name"] = "validator_slash_event" args = prepare_args(aDict(slash)) if embed := assemble(args): @@ -117,14 +116,14 @@ def _get_slashings(self, beacon_block: dict) -> list[Event]: return events - @retry(tries=5, delay=10, backoff=2, max_delay=30) - def _get_proposal(self, beacon_block: dict) -> Optional[Event]: + @retry_async(tries=5, delay=10, backoff=2, max_delay=30) + async def _get_proposal(self, beacon_block: dict) -> Optional[Event]: if not (payload := beacon_block["body"].get("execution_payload")): # no proposed block return None validator_index = int(beacon_block["proposer_index"]) - if not (minipool := self.db.minipools.find_one({"validator_index": validator_index})): + if not (minipool := await self.bot.db.minipools.find_one({"validator_index": validator_index})): # not proposed by a minipool return None @@ -173,8 +172,8 @@ def _get_proposal(self, beacon_block: dict) -> Optional[Event]: if eth_utils.is_same_address(fee_recipient, rp.get_address_by_name("rocketSmoothingPool")): args["event_name"] = "mev_proposal_smoothie_event" - args["smoothie_amount"] = w3.eth.get_balance( - w3.to_checksum_address(fee_recipient), block_identifier=block_number + args["smoothie_amount"] = await w3_async.eth.get_balance( + w3_async.to_checksum_address(fee_recipient), block_identifier=block_number ) else: args["event_name"] = "mev_proposal_event" @@ -191,14 +190,14 @@ def _get_proposal(self, beacon_block: dict) -> Optional[Event]: block_number=block_number ) - def _check_finality(self, beacon_block: dict) -> Optional[Event]: + async def _check_finality(self, beacon_block: dict) -> Optional[Event]: slot_number = int(beacon_block["slot"]) epoch_number = slot_number // 32 timestamp = beacon_block_to_date(slot_number) try: # calculate finality delay - finality_checkpoint = bacon.get_finality_checkpoint(state_id=str(slot_number)) + finality_checkpoint = await bacon.get_finality_checkpoint_async(state_id=str(slot_number)) last_finalized_epoch = int(finality_checkpoint["data"]["finalized"]["epoch"]) finality_delay = epoch_number - last_finalized_epoch except requests.exceptions.HTTPError: @@ -206,10 +205,10 @@ def _check_finality(self, beacon_block: dict) -> Optional[Event]: return None # latest finality delay from db - delay_entry = self.db.finality_checkpoints.find_one({"epoch": epoch_number - 1}) + delay_entry = await self.bot.db.finality_checkpoints.find_one({"epoch": epoch_number - 1}) prev_finality_delay = delay_entry["finality_delay"] if delay_entry else 0 - self.db.finality_checkpoints.update_one( + await self.bot.db.finality_checkpoints.update_one( {"epoch": epoch_number}, {"$set": {"finality_delay": finality_delay}}, upsert=True diff --git a/rocketwatch/plugins/chat_summary/chat_summary.py b/rocketwatch/plugins/chat_summary/chat_summary.py index 420428d8..b041bcd8 100644 --- a/rocketwatch/plugins/chat_summary/chat_summary.py +++ b/rocketwatch/plugins/chat_summary/chat_summary.py @@ -11,7 +11,6 @@ from discord.ext import commands from discord.ext.commands import Context, is_owner from discord.ext.commands import hybrid_command -from pymongo import AsyncMongoClient from rocketwatch import RocketWatch from utils.cfg import cfg @@ -27,7 +26,6 @@ def __init__(self, bot: RocketWatch): self.client = anthropic.AsyncAnthropic(api_key=cfg["other.secrets.anthropic"]) # log all possible engines self.tokenizer = tiktoken.encoding_for_model("gpt-4-turbo") - self.db = AsyncMongoClient(cfg["mongodb.uri"]).rocketwatch @classmethod def message_to_text(cls, message, index): @@ -59,7 +57,7 @@ def message_to_text(cls, message, index): @is_owner() async def summarize_chat(self, ctx: Context): await ctx.defer(ephemeral=True) - last_ts = await self.db["last_summary"].find_one({"channel_id": ctx.channel.id}) + last_ts = await self.bot.db["last_summary"].find_one({"channel_id": ctx.channel.id}) # ratelimit if last_ts and (datetime.now(timezone.utc) - last_ts["timestamp"].replace(tzinfo=pytz.utc)) < timedelta(hours=6): await ctx.send("You can only summarize once every 6 hours.", ephemeral=True) @@ -125,7 +123,7 @@ async def summarize_chat(self, ctx: Context): await ctx.send("done", ephemeral=True) await msg.edit(embeds=es, attachments=[f]) # save the timestamp of the last summary - await self.db["last_summary"].update_one({"channel_id": ctx.channel.id}, {"$set": {"timestamp": datetime.now(timezone.utc)}}, upsert=True) + await self.bot.db["last_summary"].update_one({"channel_id": ctx.channel.id}, {"$set": {"timestamp": datetime.now(timezone.utc)}}, upsert=True) # a function that generates the prompt for the model by taking an array of messages, a prefix and a suffix def generate_prompt(self, messages, prefix, suffix): diff --git a/rocketwatch/plugins/commissions/commissions.py b/rocketwatch/plugins/commissions/commissions.py index f7df5df4..cec67ed2 100644 --- a/rocketwatch/plugins/commissions/commissions.py +++ b/rocketwatch/plugins/commissions/commissions.py @@ -8,7 +8,6 @@ from discord.ext.commands import Context from discord.ext.commands import hybrid_command from matplotlib import pyplot as plt -from pymongo import AsyncMongoClient from rocketwatch import RocketWatch from utils.cfg import cfg @@ -22,8 +21,6 @@ class Commissions(commands.Cog): def __init__(self, bot: RocketWatch): self.bot = bot - # connect to local mongodb - self.db = AsyncMongoClient(cfg["mongodb.uri"]).get_database("rocketwatch") @hybrid_command() async def commission_history(self, ctx: Context): @@ -34,7 +31,7 @@ async def commission_history(self, ctx: Context): e = Embed(title='Commission History') - minipools = await self.db.minipools.find().sort("validator_index", 1).to_list(None) + minipools = await self.bot.db.minipools.find().sort("validator_index", 1).to_list(None) # create dot chart of minipools # x-axis: validator # y-axis: node_fee diff --git a/rocketwatch/plugins/cow_orders/cow_orders.py b/rocketwatch/plugins/cow_orders/cow_orders.py index 7a6f8e48..aa6b24fd 100644 --- a/rocketwatch/plugins/cow_orders/cow_orders.py +++ b/rocketwatch/plugins/cow_orders/cow_orders.py @@ -1,7 +1,6 @@ import logging from datetime import datetime, timedelta -import pymongo import requests from datetime import timezone from web3.datastructures import MutableAttributeDict as aDict @@ -26,14 +25,8 @@ class CowOrders(EventPlugin): def __init__(self, bot: RocketWatch): super().__init__(bot, timedelta(minutes=5)) self.state = "OK" - self.db = pymongo.MongoClient(cfg["mongodb.uri"]).rocketwatch - # create the cow_orders collection if it doesn't exist - # limit the collection to 10000 entries - # create an index on order_uid - if "cow_orders" not in self.db.list_collection_names(): - self.db.create_collection("cow_orders", capped=True, size=10_000) - self.collection = self.db.cow_orders - self.collection.create_index("order_uid", unique=True) + self.collection = bot.db.cow_orders + self._did_setup = False self.tokens = [ str(rp.get_address_by_name("rocketTokenRPL")).lower(), @@ -52,13 +45,22 @@ async def cow(self, interaction: Interaction, tnx: str): embed = Embed(description = f"[cow explorer]({url})") await interaction.followup.send(embed=embed) - def _get_new_events(self) -> list[Event]: + async def _setup_collection(self): + if self._did_setup: + return + if "cow_orders" not in await self.bot.db.list_collection_names(): + await self.bot.db.create_collection("cow_orders", capped=True, size=10_000) + await self.collection.create_index("order_uid", unique=True) + self._did_setup = True + + async def _get_new_events(self) -> list[Event]: + await self._setup_collection() if self.state == "RUNNING": log.error("Cow Orders plugin was interrupted while running. Re-initializing...") self.__init__(self.bot) self.state = "RUNNING" try: - result = self.check_for_new_events() + result = await self.check_for_new_events() self.state = "OK" except Exception as e: log.error(f"Error while checking for new Cow Orders: {e}") @@ -67,7 +69,7 @@ def _get_new_events(self) -> list[Event]: return result # noinspection PyTypeChecker - def check_for_new_events(self): + async def check_for_new_events(self): log.info("Checking Cow Orders") payload = [] @@ -131,7 +133,7 @@ def check_for_new_events(self): # efficiently check if the orders are already in the database order_uids = [order["uid"] for order in cow_orders] existing_orders = self.collection.find({"order_uid": {"$in": order_uids}}) - existing_order_uids = [order["order_uid"] for order in existing_orders] + existing_order_uids = [order["order_uid"] async for order in existing_orders] # filter all orders that are already in the database cow_orders = [order for order in cow_orders if order["uid"] not in existing_order_uids] @@ -221,11 +223,11 @@ def check_for_new_events(self): unique_id=f"cow_order_found_{order['uid']}" )) # don't emit if the db collection is empty - this is to prevent the bot from spamming the channel with stale data - if not self.collection.count_documents({}): + if not await self.collection.count_documents({}): payload = [] # insert all new orders into the database - self.collection.insert_many([{"order_uid": order["uid"]} for order in cow_orders]) + await self.collection.insert_many([{"order_uid": order["uid"]} for order in cow_orders]) log.debug("Finished Checking Cow Orders") return payload diff --git a/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py b/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py index be146e66..9c57c6e7 100644 --- a/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py +++ b/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py @@ -8,7 +8,7 @@ import pymongo from cronitor import Monitor -from pymongo import AsyncMongoClient, UpdateOne, UpdateMany +from pymongo import UpdateOne, UpdateMany from pymongo.asynchronous.collection import AsyncCollection from web3.contract.contract import ContractFunction @@ -101,7 +101,6 @@ def _unpack_validator_info_dynamic(info): class DBUpkeepTask(commands.Cog): def __init__(self, bot: RocketWatch): self.bot = bot - self.db = AsyncMongoClient(cfg["mongodb.uri"]).rocketwatch self.monitor = Monitor("db-task", api_key=cfg["other.secrets.cronitor"]) self.batch_size = 250 self.cooldown = timedelta(minutes=10) @@ -140,18 +139,18 @@ async def loop(self): async def check_indexes(self): log.debug("checking indexes") - await self.db.node_operators.create_index("address") - await self.db.node_operators.create_index("megapool.address") - await self.db.minipools.create_index("address") - await self.db.minipools.create_index("pubkey") - await self.db.minipools.create_index("validator_index") - await self.db.minipools.create_index("beacon.status") - await self.db.megapool_validators.create_index( + await self.bot.db.node_operators.create_index("address") + await self.bot.db.node_operators.create_index("megapool.address") + await self.bot.db.minipools.create_index("address") + await self.bot.db.minipools.create_index("pubkey") + await self.bot.db.minipools.create_index("validator_index") + await self.bot.db.minipools.create_index("beacon.status") + await self.bot.db.megapool_validators.create_index( [("megapool", pymongo.ASCENDING), ("validator_id", pymongo.ASCENDING)], unique=True ) - await self.db.megapool_validators.create_index("pubkey") - await self.db.megapool_validators.create_index("validator_index") - await self.db.megapool_validators.create_index("beacon.status") + await self.bot.db.megapool_validators.create_index("pubkey") + await self.bot.db.megapool_validators.create_index("validator_index") + await self.bot.db.megapool_validators.create_index("beacon.status") log.debug("indexes checked") async def _batch_multicall_update( @@ -195,7 +194,7 @@ async def add_untracked_node_operators(self): nm = rp.get_contract_by_name("rocketNodeManager") latest_rp = rp.call("rocketNodeManager.getNodeCount") - 1 latest_db = 0 - if res := await self.db.node_operators.find_one(sort=[("_id", pymongo.DESCENDING)]): + if res := await self.bot.db.node_operators.find_one(sort=[("_id", pymongo.DESCENDING)]): latest_db = res["_id"] if latest_db >= latest_rp: log.debug("No new nodes") @@ -204,7 +203,7 @@ async def add_untracked_node_operators(self): for index_batch in as_chunks(range(latest_db + 1, latest_rp + 1), self.batch_size): results = await rp.multicall_async([nm.functions.getNodeAt(i) for i in index_batch]) data |= dict(zip(index_batch, results)) - await self.db.node_operators.insert_many([{"_id": i, "address": w3.to_checksum_address(a)} for i, a in data.items()]) + await self.bot.db.node_operators.insert_many([{"_id": i, "address": w3.to_checksum_address(a)} for i, a in data.items()]) @timerun_async async def add_static_node_operator_data(self): @@ -215,7 +214,7 @@ def get_calls(n): return [ (mf.functions.getExpectedAddress(n["address"]), True, w3.to_checksum_address, "megapool.address"), ] await self._batch_multicall_update( - self.db.node_operators, + self.bot.db.node_operators, {"$or": [{"fee_distributor.address": {"$exists": False}}, {"megapool.address": {"$exists": False}}]}, get_calls, {"address": 1}, label="node operators" ) @@ -250,7 +249,7 @@ def get_calls(n): return [ (ns.functions.getNodeLastUnstakeTime(n["address"]), True, None, "rpl.last_unstake_time"), ] await self._batch_multicall_update( - self.db.node_operators, {}, get_calls, label="node operators", + self.bot.db.node_operators, {}, get_calls, label="node operators", projection={"address": 1, "fee_distributor.address": 1, "megapool.address": 1} ) @@ -274,7 +273,7 @@ def get_calls(n): return [ (proxy_at(n["megapool"]["address"]).functions.getUseLatestDelegate(), True, None, "megapool.use_latest_delegate"), ] await self._batch_multicall_update( - self.db.node_operators, {"megapool.deployed": True}, + self.bot.db.node_operators, {"megapool.deployed": True}, get_calls, {"address": 1, "megapool.address": 1}, label="megapools" ) @@ -286,7 +285,7 @@ async def add_untracked_minipools(self): mm = rp.get_contract_by_name("rocketMinipoolManager") latest_rp = rp.call("rocketMinipoolManager.getMinipoolCount") - 1 latest_db = 0 - if res := await self.db.minipools.find_one(sort=[("_id", pymongo.DESCENDING)]): + if res := await self.bot.db.minipools.find_one(sort=[("_id", pymongo.DESCENDING)]): latest_db = res["_id"] if latest_db >= latest_rp: log.debug("No new minipools") @@ -294,7 +293,7 @@ async def add_untracked_minipools(self): log.debug(f"Latest minipool in db: {latest_db}, latest minipool in rp: {latest_rp}") for index_batch in as_chunks(range(latest_db + 1, latest_rp + 1), self.batch_size): results = await rp.multicall_async([mm.functions.getMinipoolAt(i) for i in index_batch]) - await self.db.minipools.insert_many([{"_id": i, "address": w3.to_checksum_address(a)} for i, a in zip(index_batch, results)]) + await self.bot.db.minipools.insert_many([{"_id": i, "address": w3.to_checksum_address(a)} for i, a in zip(index_batch, results)]) @timerun_async async def add_static_minipool_data(self): @@ -304,14 +303,14 @@ def lamb(n): return [ (mm.functions.getMinipoolPubkey(n["address"]), True, safe_to_hex, "pubkey"), ] await self._batch_multicall_update( - self.db.minipools, + self.bot.db.minipools, {"node_operator": {"$exists": False}}, lamb, {"address": 1}, label="minipools" ) @timerun async def add_static_minipool_deposit_data(self): - minipools = await self.db.minipools.find( + minipools = await self.bot.db.minipools.find( {"deposit_amount": {"$exists": False}, "status": "initialised"}, {"address": 1, "_id": 0, "status_time": 1} ).sort("status_time", pymongo.ASCENDING).to_list() @@ -355,7 +354,7 @@ async def add_static_minipool_deposit_data(self): if not data: continue - await self.db.minipools.bulk_write( + await self.bot.db.minipools.bulk_write( [UpdateOne({"address": addr}, {"$set": d}) for addr, d in data.items()], ordered=False ) @@ -381,11 +380,11 @@ def get_calls(n): (minipool_contract.functions.getUserDistributed(), False, is_true, "user_distributed"), (mc.functions.getEthBalance(n["address"]), True, safe_to_float, "execution_balance"), ] - await self._batch_multicall_update(self.db.minipools, {"finalized": {"$ne": True}}, get_calls, {"address": 1}, label="minipools") + await self._batch_multicall_update(self.bot.db.minipools, {"finalized": {"$ne": True}}, get_calls, {"address": 1}, label="minipools") @timerun async def update_dynamic_minipool_beacon_data(self): - pubkeys = await self.db.minipools.distinct( + pubkeys = await self.bot.db.minipools.distinct( "pubkey", {"beacon.status": {"$ne": "withdrawal_done"}} ) pubkeys = [pk for pk in pubkeys if pk is not None] @@ -412,7 +411,7 @@ async def update_dynamic_minipool_beacon_data(self): }, } if data: - await self.db.minipools.bulk_write( + await self.bot.db.minipools.bulk_write( [UpdateMany({"pubkey": pk}, {"$set": d}) for pk, d in data.items()], ordered=False ) @@ -423,7 +422,7 @@ async def update_dynamic_minipool_beacon_data(self): @timerun_async async def add_untracked_megapool_validators(self): # get deployed megapools with their on-chain validator count - nodes = await self.db.node_operators.find( + nodes = await self.bot.db.node_operators.find( {"megapool.deployed": True, "megapool.validator_count": {"$gt": 0}}, {"address": 1, "megapool.address": 1, "megapool.validator_count": 1} ).to_list() @@ -433,7 +432,7 @@ async def add_untracked_megapool_validators(self): for node in nodes: megapool_addr = node["megapool"]["address"] on_chain_count = node["megapool"]["validator_count"] - db_count = await self.db.megapool_validators.count_documents({"megapool": megapool_addr}) + db_count = await self.bot.db.megapool_validators.count_documents({"megapool": megapool_addr}) if db_count >= on_chain_count: continue @@ -467,11 +466,11 @@ async def add_untracked_megapool_validators(self): doc.update(info) docs.append(doc) if docs: - await self.db.megapool_validators.insert_many(docs, ordered=False) + await self.bot.db.megapool_validators.insert_many(docs, ordered=False) @timerun_async async def update_dynamic_megapool_validator_data(self): - validators = await self.db.megapool_validators.find( + validators = await self.bot.db.megapool_validators.find( {"status": {"$nin": ["exited", "dissolved"]}}, {"megapool": 1, "validator_id": 1} ).to_list() @@ -494,11 +493,11 @@ async def update_dynamic_megapool_validator_data(self): if info is not None: ops.append(UpdateOne({"_id": v["_id"]}, {"$set": info})) if ops: - await self.db.megapool_validators.bulk_write(ops, ordered=False) + await self.bot.db.megapool_validators.bulk_write(ops, ordered=False) @timerun async def update_dynamic_megapool_validator_beacon_data(self): - pubkeys = await self.db.megapool_validators.distinct( + pubkeys = await self.bot.db.megapool_validators.distinct( "pubkey", {"beacon.status": {"$ne": "withdrawal_done"}} ) pubkeys = [pk for pk in pubkeys if pk is not None] @@ -527,7 +526,7 @@ async def update_dynamic_megapool_validator_beacon_data(self): }, } if data: - await self.db.megapool_validators.bulk_write( + await self.bot.db.megapool_validators.bulk_write( [UpdateMany({"pubkey": pk}, {"$set": d}) for pk, d in data.items()], ordered=False ) diff --git a/rocketwatch/plugins/debug/debug.py b/rocketwatch/plugins/debug/debug.py index 33b81977..a5fb5434 100644 --- a/rocketwatch/plugins/debug/debug.py +++ b/rocketwatch/plugins/debug/debug.py @@ -10,7 +10,6 @@ from discord import File, Interaction from discord.app_commands import Choice, command, guilds, describe from discord.ext.commands import Cog, is_owner -from pymongo import AsyncMongoClient from rocketwatch import RocketWatch from utils import solidity @@ -29,7 +28,6 @@ class Debug(Cog): def __init__(self, bot: RocketWatch): self.bot = bot - self.db = AsyncMongoClient(cfg["mongodb.uri"]).rocketwatch self.contract_names = [] self.function_names = [] @@ -167,7 +165,7 @@ async def purge_minipools(self, interaction: Interaction, confirm: bool = False) if not confirm: await interaction.followup.send("Not running. Set `confirm` to `true` to run.") return - await self.db.minipools.drop() + await self.bot.db.minipools.drop() await interaction.followup.send(content="Done") @command() @@ -230,7 +228,7 @@ async def restore_support_template(self, interaction: Interaction, template_name user = await self.bot.get_or_fetch_user(user_id) - await self.db.support_bot_dumps.insert_one( + await self.bot.db.support_bot_dumps.insert_one( { "ts" : datetime.fromtimestamp(ts, tz=timezone.utc), "template": template_name, @@ -245,7 +243,7 @@ async def restore_support_template(self, interaction: Interaction, template_name } } ) - await self.db.support_bot.insert_one( + await self.bot.db.support_bot.insert_one( {"_id": template_name, "title": template_title, "description": template_description} ) @@ -273,7 +271,7 @@ async def restore_missed_events(self, interaction: Interaction, tx_hash: str): for event in events: channel_candidates = [value for key, value in channels.items() if event.event_name.startswith(key)] channel_id = channel_candidates[0] if channel_candidates else channels["default"] - await self.db.event_queue.insert_one({ + await self.bot.db.event_queue.insert_one({ "_id": event.unique_id, "embed": pickle.dumps(event.embed), "topic": event.topic, diff --git a/rocketwatch/plugins/delegate_contracts/delegate_contracts.py b/rocketwatch/plugins/delegate_contracts/delegate_contracts.py index 27945023..a12bcac9 100644 --- a/rocketwatch/plugins/delegate_contracts/delegate_contracts.py +++ b/rocketwatch/plugins/delegate_contracts/delegate_contracts.py @@ -1,6 +1,5 @@ import logging -from pymongo import AsyncMongoClient from pymongo.asynchronous.collection import AsyncCollection from discord import Interaction @@ -21,7 +20,6 @@ class DelegateContracts(commands.Cog): def __init__(self, bot: RocketWatch): self.bot = bot - self.db = AsyncMongoClient(cfg["mongodb.uri"]).rocketwatch async def _delegate_stats( self, @@ -73,7 +71,7 @@ async def minipool_delegates(self, interaction: Interaction): """Show stats for minipool delegate contract adoption""" await interaction.response.defer() e = await self._delegate_stats( - collection=self.db.minipools, + collection=self.bot.db.minipools, match_filter={"beacon.status": {"$in": ["pending_initialized", "pending_queued", "active_ongoing"]}}, delegate_field="effective_delegate", use_latest_field="use_latest_delegate", @@ -87,7 +85,7 @@ async def megapool_delegates(self, interaction: Interaction): """Show stats for megapool delegate contract adoption""" await interaction.response.defer() e = await self._delegate_stats( - collection=self.db.node_operators, + collection=self.bot.db.node_operators, match_filter={"megapool.active_validator_count": {"$gt": 0}}, delegate_field="megapool.effective_delegate", use_latest_field="megapool.use_latest_delegate", diff --git a/rocketwatch/plugins/deposit_pool/deposit_pool.py b/rocketwatch/plugins/deposit_pool/deposit_pool.py index 38312977..69839e76 100644 --- a/rocketwatch/plugins/deposit_pool/deposit_pool.py +++ b/rocketwatch/plugins/deposit_pool/deposit_pool.py @@ -2,7 +2,6 @@ from discord.ext.commands import Context from discord.ext.commands import hybrid_command -from pymongo import AsyncMongoClient from rocketwatch import RocketWatch from plugins.queue.queue import Queue @@ -20,7 +19,6 @@ class DepositPool(StatusPlugin): def __init__(self, bot: RocketWatch): super().__init__(bot) - self.db = AsyncMongoClient(cfg["mongodb.uri"]).rocketwatch @staticmethod def get_deposit_pool_stats() -> Embed: diff --git a/rocketwatch/plugins/detect_scam/detect_scam.py b/rocketwatch/plugins/detect_scam/detect_scam.py index cbeb233c..3451b8d4 100644 --- a/rocketwatch/plugins/detect_scam/detect_scam.py +++ b/rocketwatch/plugins/detect_scam/detect_scam.py @@ -32,7 +32,6 @@ ) from discord.ext.commands import Cog from discord.app_commands import command, guilds, ContextMenu -from pymongo import AsyncMongoClient from rocketwatch import RocketWatch from utils.cfg import cfg @@ -108,14 +107,13 @@ async def mark_safe(self, interaction: Interaction, button: ui.Button) -> None: await interaction.message.delete() async with self.plugin._update_lock: - report = await self.plugin.db.scam_reports.find_one(db_filter) + report = await self.plugin.bot.db.scam_reports.find_one(db_filter) await self.plugin._update_report(report, f"This has been marked as safe by {user_repr}.") - await self.plugin.db.scam_reports.update_one(db_filter, {"$set": {"warning_id": None}}) + await self.plugin.bot.db.scam_reports.update_one(db_filter, {"$set": {"warning_id": None}}) await interaction.response.send_message(content="Warning removed!", ephemeral=True) def __init__(self, bot: RocketWatch): self.bot = bot - self.db = AsyncMongoClient(cfg["mongodb.uri"]).get_database("rocketwatch") self._report_lock = asyncio.Lock() self._update_lock = asyncio.Lock() @@ -172,7 +170,7 @@ async def _generate_message_report(self, message: Message, reason: str) -> Optio return None async with self._report_lock: - if await self.db.scam_reports.find_one({"type": "message", "message_id": message.id}): + if await self.bot.db.scam_reports.find_one({"type": "message", "message_id": message.id}): log.info(f"Found existing report for message {message.id} in database") return None @@ -197,7 +195,7 @@ async def _generate_message_report(self, message: Message, reason: str) -> Optio with io.StringIO(text) as f: contents = File(f, filename="original_message.txt") - await self.db.scam_reports.insert_one({ + await self.bot.db.scam_reports.insert_one({ "type" : "message", "guild_id" : message.guild.id, "channel_id" : message.channel.id, @@ -219,7 +217,7 @@ async def _generate_thread_report(self, thread: Thread, reason: str) -> Optional return None async with self._report_lock: - if await self.db.scam_reports.find_one({"type": "thread", "channel_id": thread.id}): + if await self.bot.db.scam_reports.find_one({"type": "thread", "channel_id": thread.id}): log.info(f"Found existing report for thread {thread.id} in database") return None @@ -241,7 +239,7 @@ async def _generate_thread_report(self, thread: Thread, reason: str) -> Optional "\n" "Please review and take appropriate action." ) - await self.db.scam_reports.insert_one({ + await self.bot.db.scam_reports.insert_one({ "type" : "thread", "guild_id" : thread.guild.id, "channel_id" : thread.id, @@ -271,7 +269,7 @@ async def report_message(self, message: Message, reason: str) -> None: report_channel = await self.bot.get_or_fetch_channel(cfg["discord.channels.report_scams"]) report_msg = await report_channel.send(embed=report, file=contents) - await self.db.scam_reports.update_one( + await self.bot.db.scam_reports.update_one( {"message_id": message.id}, {"$set": {"warning_id": warning_msg.id if warning_msg else None, "report_id": report_msg.id}} ) @@ -296,7 +294,7 @@ async def manual_message_report(self, interaction: Interaction, message: Message report_channel = await self.bot.get_or_fetch_channel(cfg["discord.channels.report_scams"]) report_msg = await report_channel.send(embed=report, file=contents) - await self.db.scam_reports.update_one({"message_id": message.id}, {"$set": {"report_id": report_msg.id}}) + await self.bot.db.scam_reports.update_one({"message_id": message.id}, {"$set": {"report_id": report_msg.id}}) moderator = await self.bot.get_or_fetch_user(cfg["rocketpool.support.moderator_id"]) view = self.RemovalVoteView(self, message) @@ -306,7 +304,7 @@ async def manual_message_report(self, interaction: Interaction, message: Message view=view, mention_author=False ) - await self.db.scam_reports.update_one({"message_id": message.id}, {"$set": {"warning_id": warning_msg.id}}) + await self.bot.db.scam_reports.update_one({"message_id": message.id}, {"$set": {"warning_id": warning_msg.id}}) await interaction.followup.send(content="Thanks for reporting!") def _markdown_link_trick(self, message: Message) -> Optional[str]: @@ -496,7 +494,7 @@ async def on_raw_bulk_message_delete(self, event: RawBulkMessageDeleteEvent) -> async def _on_message_delete(self, message_id: int) -> None: db_filter = {"type": "message", "message_id": message_id, "removed": False} - if not (report := await self.db.scam_reports.find_one(db_filter)): + if not (report := await self.bot.db.scam_reports.find_one(db_filter)): return channel = await self.bot.get_or_fetch_channel(report["channel_id"]) @@ -505,17 +503,17 @@ async def _on_message_delete(self, message_id: int) -> None: await message.delete() await self._update_report(report, "Original message has been deleted.") - await self.db.scam_reports.update_one(db_filter, {"$set": {"warning_id": None, "removed": True}}) + await self.bot.db.scam_reports.update_one(db_filter, {"$set": {"warning_id": None, "removed": True}}) @Cog.listener() async def on_member_ban(self, guild: Guild, user: User) -> None: async with self._update_lock: - reports = await self.db.scam_reports.find( + reports = await self.bot.db.scam_reports.find( {"guild_id": guild.id, "user_id": user.id, "user_banned": False} ).to_list(None) for report in reports: await self._update_report(report, "User has been banned.") - await self.db.scam_reports.update_one(report, {"$set": {"user_banned": True}}) + await self.bot.db.scam_reports.update_one(report, {"$set": {"user_banned": True}}) async def _update_report(self, report: dict, note: str) -> None: report_channel = await self.bot.get_or_fetch_channel(cfg["discord.channels.report_scams"]) @@ -544,7 +542,7 @@ async def report_thread(self, thread: Thread, reason: str) -> None: report_channel = await self.bot.get_or_fetch_channel(cfg["discord.channels.report_scams"]) report_msg = await report_channel.send(embed=report) - await self.db.scam_reports.update_one( + await self.bot.db.scam_reports.update_one( {"channel_id": thread.id, "message_id": None}, {"$set": {"warning_id": warning_msg.id if warning_msg else None, "report_id": report_msg.id}} ) @@ -575,9 +573,9 @@ async def on_raw_thread_update(self, event: RawThreadUpdateEvent) -> None: async def on_raw_thread_delete(self, event: RawThreadDeleteEvent) -> None: async with self._update_lock: db_filter = {"type": "thread", "channel_id": event.thread_id, "removed": False} - if report := await self.db.scam_reports.find_one(db_filter): + if report := await self.bot.db.scam_reports.find_one(db_filter): await self._update_report(report, "Thread has been deleted.") - await self.db.scam_reports.update_one(db_filter, {"$set": {"warning_id": None, "removed": True}}) + await self.bot.db.scam_reports.update_one(db_filter, {"$set": {"warning_id": None, "removed": True}}) @command() @guilds(cfg["rocketpool.support.server_id"]) @@ -603,7 +601,7 @@ async def manual_user_report(self, interaction: Interaction, user: Member) -> No report_channel = await self.bot.get_or_fetch_channel(cfg["discord.channels.report_scams"]) report_msg = await report_channel.send(embed=report) - await self.db.scam_reports.update_one( + await self.bot.db.scam_reports.update_one( {"guild_id": user.guild.id, "user_id": user.id, "channel_id": None, "message_id": None}, {"$set": {"report_id": report_msg.id}} ) @@ -614,7 +612,7 @@ async def _generate_user_report(self, user: Member, reason: str) -> Optional[Emb return None async with self._report_lock: - if await self.db.scam_reports.find_one( + if await self.bot.db.scam_reports.find_one( {"type": "user", "guild_id": user.guild.id, "user_id": user.id} ): log.info(f"Found existing report for user {user.id} in database") @@ -633,7 +631,7 @@ async def _generate_user_report(self, user: Member, reason: str) -> Optional[Emb ) report.set_thumbnail(url=user.display_avatar.url) - await self.db.scam_reports.insert_one({ + await self.bot.db.scam_reports.insert_one({ "type" : "user", "guild_id" : user.guild.id, "user_id" : user.id, diff --git a/rocketwatch/plugins/event_core/event_core.py b/rocketwatch/plugins/event_core/event_core.py index 63b30872..2be63696 100644 --- a/rocketwatch/plugins/event_core/event_core.py +++ b/rocketwatch/plugins/event_core/event_core.py @@ -3,10 +3,8 @@ import asyncio import logging -from concurrent.futures import ThreadPoolExecutor from datetime import datetime, timedelta from enum import Enum -from functools import partial from typing import Optional, Any import pymongo @@ -14,7 +12,6 @@ import discord from discord.ext import commands, tasks from eth_typing import BlockIdentifier, BlockNumber -from pymongo import AsyncMongoClient from web3.datastructures import MutableAttributeDict from rocketwatch import RocketWatch @@ -23,7 +20,7 @@ from utils.cfg import cfg from utils.embeds import assemble, Embed from utils.event import EventPlugin -from utils.shared_w3 import w3 +from utils.shared_w3 import w3_async log = logging.getLogger("event_core") log.setLevel(cfg["log_level"]) @@ -41,7 +38,6 @@ def __init__(self, bot: RocketWatch): self.bot = bot self.state = self.State.OK self.channels = cfg["discord.channels"] - self.db = AsyncMongoClient(cfg["mongodb.uri"]).rocketwatch self.head_block: BlockIdentifier = cfg["events.genesis"] self.block_batch_size = cfg["events.block_batch_size"] self.monitor = Monitor("gather-new-events", api_key=cfg["other.secrets.cronitor"]) @@ -89,7 +85,7 @@ async def gather_new_events(self) -> None: log.info("Gathering messages from submodules") log.debug(f"{self.head_block = }") - latest_block = w3.eth.get_block_number() + latest_block = await w3_async.eth.get_block_number() submodules = [cog for cog in self.bot.cogs.values() if isinstance(cog, EventPlugin)] log.debug(f"Running {len(submodules)} submodules") @@ -97,18 +93,18 @@ async def gather_new_events(self) -> None: # already caught up to head, just fetch new events target_block = "latest" to_block = latest_block - gather_fns = [sm.get_new_events for sm in submodules] + coroutines = [sm.get_new_events() for sm in submodules] # prevent losing state if process is interrupted before updating db self.head_block = cfg["events.genesis"] else: # behind chain head, let's see how far - last_event_entry = await self.db.event_queue.find().sort( + last_event_entry = await self.bot.db.event_queue.find().sort( "block_number", pymongo.DESCENDING ).limit(1).to_list(None) if last_event_entry: self.head_block = max(self.head_block, last_event_entry[0]["block_number"]) - last_checked_entry = await self.db.last_checked_block.find_one({"_id": "events"}) + last_checked_entry = await self.bot.db.last_checked_block.find_one({"_id": "events"}) if last_checked_entry: self.head_block = max(self.head_block, last_checked_entry["block"]) @@ -128,26 +124,22 @@ async def gather_new_events(self) -> None: log.info(f"Checking block range [{from_block}, {to_block}]") - gather_fns = [] + coroutines = [] for sm in submodules: - fn = partial(sm.get_past_events, from_block=from_block, to_block=to_block) - gather_fns.append(fn) + coroutines.append(sm.get_past_events(from_block=from_block, to_block=to_block)) if target_block == "latest": sm.start_tracking(to_block + 1) log.debug(f"{target_block = }") - with ThreadPoolExecutor() as executor: - loop = asyncio.get_running_loop() - futures = [loop.run_in_executor(executor, gather_fn) for gather_fn in gather_fns] - results = await asyncio.gather(*futures) + results = await asyncio.gather(*coroutines) channels = cfg["discord.channels"] events: list[dict[str, Any]] = [] for result in results: for event in result: - if await self.db.event_queue.find_one({"_id": event.unique_id}): + if await self.bot.db.event_queue.find_one({"_id": event.unique_id}): log.debug(f"Event {event} already exists, skipping") continue @@ -170,10 +162,10 @@ async def gather_new_events(self) -> None: log.info(f"{len(events)} new events gathered, updating DB") if events: - await self.db.event_queue.insert_many(events) + await self.bot.db.event_queue.insert_many(events) self.head_block = target_block - await self.db.last_checked_block.replace_one( + await self.bot.db.last_checked_block.replace_one( {"_id": "events"}, {"_id": "events", "block": to_block}, upsert=True @@ -182,7 +174,7 @@ async def gather_new_events(self) -> None: async def process_event_queue(self) -> None: log.debug("Processing events in queue") # get all channels with unprocessed events - channels = await self.db.event_queue.distinct("channel_id", {"message_id": None}) + channels = await self.bot.db.event_queue.distinct("channel_id", {"message_id": None}) if not channels: log.debug("No pending events in queue") return @@ -196,17 +188,17 @@ def try_load(_entry: dict, _key: str) -> Optional[Any]: return None for channel_id in channels: - db_events: list[dict] = await self.db.event_queue.find( + db_events: list[dict] = await self.bot.db.event_queue.find( {"channel_id": channel_id, "message_id": None} ).sort("score", pymongo.ASCENDING).to_list(None) log.debug(f"Found {len(db_events)} events for channel {channel_id}.") channel = await self.bot.get_or_fetch_channel(channel_id) - for state_message in await self.db.state_messages.find({"channel_id": channel_id}).to_list(None): + for state_message in await self.bot.db.state_messages.find({"channel_id": channel_id}).to_list(None): msg = await channel.fetch_message(state_message["message_id"]) await msg.delete() - await self.db.state_messages.delete_one({"channel_id": channel_id}) + await self.bot.db.state_messages.delete_one({"channel_id": channel_id}) for event_entry in db_events: embed: Optional[Embed] = try_load(event_entry, "embed") @@ -225,7 +217,7 @@ def try_load(_entry: dict, _key: str) -> Optional[Any]: # post event message msg = await channel.send(embed=embed, files=files) # add message id to event - await self.db.event_queue.update_one( + await self.bot.db.event_queue.update_one( {"_id": event_entry["_id"]}, {"$set": {"message_id": msg.id}} ) @@ -234,7 +226,7 @@ def try_load(_entry: dict, _key: str) -> Optional[Any]: async def update_status_messages(self) -> None: configs = cfg.get("events.status_message", {}) - for state_message in (await self.db.state_messages.find().to_list()): + for state_message in (await self.bot.db.state_messages.find().to_list()): if state_message["_id"] not in configs: log.debug(f"No config for state message ID {state_message['_id']}, removing message") await self._replace_or_add_status("", None, state_message) @@ -244,7 +236,7 @@ async def update_status_messages(self) -> None: await self._update_status_message(channel_name, config) async def _update_status_message(self, channel_name: str, config: dict) -> None: - state_message = await self.db.state_messages.find_one({"_id": channel_name}) + state_message = await self.bot.db.state_messages.find_one({"_id": channel_name}) if state_message: age = datetime.now() - state_message["sent_at"] cooldown = timedelta(seconds=config["cooldown"]) @@ -252,7 +244,7 @@ async def _update_status_message(self, channel_name: str, config: dict) -> None: log.debug(f"State message for {channel_name} not past cooldown: {age} < {cooldown}") return - if not (embed := await generate_template_embed(self.db, "announcement")): + if not (embed := await generate_template_embed(self.bot.db, "announcement")): try: plugin: StatusPlugin = self.bot.cogs.get(config["plugin"]) embed = await plugin.get_status() @@ -270,7 +262,7 @@ async def _update_status_message(self, channel_name: str, config: dict) -> None: async def show_service_interrupt(self) -> None: embed = assemble(MutableAttributeDict({"event_name": "service_interrupted"})) for channel_name in cfg.get("events.status_message", {}).keys(): - state_message = await self.db.state_messages.find_one({"_id": channel_name}) + state_message = await self.bot.db.state_messages.find_one({"_id": channel_name}) if (not state_message) or (state_message["state"] != str(self.state.ERROR)): await self._replace_or_add_status(channel_name, embed, state_message) @@ -288,14 +280,14 @@ async def _replace_or_add_status( try: msg = await channel.fetch_message(prev_status["message_id"]) await msg.edit(embed=embed) - await self.db.state_messages.update_one( + await self.bot.db.state_messages.update_one( prev_status, {"$set": {"sent_at": datetime.now(), "state": str(self.state)}} ) return except discord.errors.NotFound: log.warning("Could not fetch status, removing DB entry") - await self.db.state_messages.delete_one(prev_status) + await self.bot.db.state_messages.delete_one(prev_status) prev_status = None if prev_status: @@ -303,13 +295,13 @@ async def _replace_or_add_status( channel = await self.bot.get_or_fetch_channel(prev_status["channel_id"]) msg = await channel.fetch_message(prev_status["message_id"]) await msg.delete() - await self.db.state_messages.delete_one(prev_status) + await self.bot.db.state_messages.delete_one(prev_status) if embed: log.debug(f"Creating new status message for channel {target_channel}") channel = await self.bot.get_or_fetch_channel(target_channel_id) msg = await channel.send(embed=embed, silent=True) - await self.db.state_messages.insert_one({ + await self.bot.db.state_messages.insert_one({ "_id" : target_channel, "channel_id": target_channel_id, "message_id": msg.id, diff --git a/rocketwatch/plugins/events/events.py b/rocketwatch/plugins/events/events.py index 0afbecb9..eef4c09c 100644 --- a/rocketwatch/plugins/events/events.py +++ b/rocketwatch/plugins/events/events.py @@ -20,7 +20,7 @@ from utils.embeds import assemble, prepare_args, el_explorer_url, Embed from utils.event import EventPlugin, Event from utils.rocketpool import rp, NoAddressFound -from utils.shared_w3 import w3, bacon +from utils.shared_w3 import w3_async, bacon from utils.solidity import SUBMISSION_KEYS from utils.block_time import block_to_ts @@ -28,7 +28,8 @@ log.setLevel(cfg["log_level"]) -PartialFilter = Callable[[BlockNumber, BlockNumber | Literal["latest"]], list[LogReceipt | EventData]] +from collections.abc import Coroutine +PartialFilter = Callable[[BlockNumber, BlockNumber | Literal["latest"]], Coroutine[None, None, list[LogReceipt | EventData]]] class Events(EventPlugin): def __init__(self, bot: RocketWatch): @@ -64,7 +65,7 @@ def _parse_event_config(self) -> tuple[list[PartialFilter], dict, dict]: log.info(f"Adding filter for {contract_name}.{event_name}") event_abi = contract.events[event_name].abi input_types = ','.join(i['type'] for i in event_abi['inputs']) - topic = w3.keccak(text=f"{event_name}({input_types})").hex() + topic = w3_async.keccak(text=f"{event_name}({input_types})").hex() except Exception as e: log.exception(e) log.warning(f"Couldn't find event {event_name} ({event['name']}) in the contract") @@ -75,8 +76,8 @@ def _parse_event_config(self) -> tuple[list[PartialFilter], dict, dict]: topic_map[topic] = event_name if addresses: - def build_direct_filter(_from: BlockNumber, _to: BlockNumber | Literal["latest"]) -> list[LogReceipt]: - return w3.eth.get_logs({ + async def build_direct_filter(_from: BlockNumber, _to: BlockNumber | Literal["latest"]) -> list[LogReceipt]: + return await w3_async.eth.get_logs({ "address" : list(addresses), "topics" : [list(aggregated_topics)], "fromBlock": _from, @@ -96,12 +97,12 @@ def build_direct_filter(_from: BlockNumber, _to: BlockNumber | Literal["latest"] event_map[event["event_name"]] = event["name"] def super_builder(_contract, _event) -> PartialFilter: # this is needed to pin nonlocal variables - def build_topic_filter(_from: BlockNumber, _to: BlockNumber | Literal["latest"]) -> list[EventData]: + async def build_topic_filter(_from: BlockNumber, _to: BlockNumber | Literal["latest"]) -> list[EventData]: event_cls = _contract.events[_event["event_name"]] event_abi = event_cls.abi input_types = ','.join(i['type'] for i in event_abi['inputs']) - topic0 = w3.keccak(text=f"{_event['event_name']}({input_types})").hex() - raw_logs = w3.eth.get_logs({ + topic0 = w3_async.keccak(text=f"{_event['event_name']}({input_types})").hex() + raw_logs = await w3_async.eth.get_logs({ "topics" : [topic0], "fromBlock": _from, "toBlock" : _to, @@ -141,7 +142,7 @@ async def trigger_event( if not (event_name := self.event_map.get(event, None)): event_name = self.event_map[f"{contract}.{event}"] - if embed := self.handle_event(event_name, event_obj): + if embed := await self.handle_event(event_name, event_obj): await interaction.followup.send(embed=embed) else: await interaction.followup.send(content="No events triggered.") @@ -151,7 +152,7 @@ async def trigger_event( @is_owner() async def replay_events(self, interaction: Interaction, tx_hash: str): await interaction.response.defer() - receipt = w3.eth.get_transaction_receipt(tx_hash) + receipt = await w3_async.eth.get_transaction_receipt(tx_hash) logs: list[LogReceipt] = receipt.logs filtered_events: list[LogReceipt | EventData] = [] @@ -173,25 +174,25 @@ async def replay_events(self, interaction: Interaction, tx_hash: str): rich_logs = event.process_receipt(receipt, errors=DISCARD) filtered_events.extend(rich_logs) - responses, _ = self.process_events(filtered_events) + responses, _ = await self.process_events(filtered_events) if responses: await interaction.followup.send(embeds=[response.embed for response in responses]) else: await interaction.followup.send(content="No events found.") - def _get_new_events(self) -> list[Event]: + async def _get_new_events(self) -> list[Event]: from_block = self.last_served_block + 1 - self.lookback_distance - return self.get_past_events(from_block, self._pending_block) + return await self.get_past_events(from_block, self._pending_block) - def get_past_events(self, from_block: BlockNumber, to_block: BlockNumber) -> list[Event]: + async def get_past_events(self, from_block: BlockNumber, to_block: BlockNumber) -> list[Event]: log.debug(f"Fetching events in [{from_block}, {to_block}]") log.debug(f"Using {len(self._partial_filters)} filters") events = [] for pf in self._partial_filters: - events.extend(pf(from_block, to_block)) + events.extend(await pf(from_block, to_block)) - messages, contract_upgrade_block = self.process_events(events) + messages, contract_upgrade_block = await self.process_events(events) if not contract_upgrade_block: return messages @@ -201,13 +202,13 @@ def get_past_events(self, from_block: BlockNumber, to_block: BlockNumber) -> lis try: rp.flush() self.__init__(self.bot) - return messages + self.get_past_events(contract_upgrade_block + 1, to_block) + return messages + await self.get_past_events(contract_upgrade_block + 1, to_block) except Exception as err: # rollback to pre upgrade config if this goes wrong self._partial_filters, self.event_map, self.topic_map = old_config raise err - def process_events(self, events: list[LogReceipt | EventData]) -> tuple[list[Event], Optional[BlockNumber]]: + async def process_events(self, events: list[LogReceipt | EventData]) -> tuple[list[Event], Optional[BlockNumber]]: events.sort(key=lambda e: (e.blockNumber, e.logIndex)) messages = [] upgrade_block = None @@ -234,7 +235,7 @@ def hash_args(_args: aDict) -> None: # default event path contract = rp.get_contract_by_address(event.address) contract_event = self.topic_map[event.topics[0].hex()] - topics = [w3.to_hex(t) for t in event.topics] + topics = [w3_async.to_hex(t) for t in event.topics] _event = aDict(contract.events[contract_event]().process_log(event)) _event.topics = topics _event.args = aDict(_event.args) @@ -248,7 +249,7 @@ def hash_args(_args: aDict) -> None: event = _event if event_name := self.event_map.get(f"{n}.{event.event}"): - embed = self.handle_event(event_name, event) + embed = await self.handle_event(event_name, event) event_name = event.args.get("event_name", event_name) else: log.warning(f"Skipping unknown event {n}.{event.event}") @@ -261,7 +262,7 @@ def hash_args(_args: aDict) -> None: # deposit/exit event path event.args = aDict(event.args) hash_args(event.args) - embed = self.handle_global_event(event_name, event) + embed = await self.handle_global_event(event_name, event) event_name = event.args.get("event_name", event_name) if (event_name is None) or (embed is None): @@ -362,7 +363,7 @@ def get_event_name(_event: LogReceipt | EventData) -> tuple[str, str]: events.remove(vote_event) elif full_event_name == "MinipoolPrestaked": for assign_event in events_by_name.get("rocketDepositPool.DepositAssigned", []).copy(): - assigned_minipool = w3.to_checksum_address(assign_event["topics"][1][-20:]) + assigned_minipool = w3_async.to_checksum_address(assign_event["topics"][1][-20:]) if event["address"] == assigned_minipool: events_by_name["rocketDepositPool.DepositAssigned"].remove(assign_event) events.remove(assign_event) @@ -393,8 +394,8 @@ def get_event_name(_event: LogReceipt | EventData) -> tuple[str, str]: return events - def handle_global_event(self, event_name: str, event: aDict) -> Optional[Embed]: - receipt = w3.eth.get_transaction_receipt(event.transactionHash) + async def handle_global_event(self, event_name: str, event: aDict) -> Optional[Embed]: + receipt = await w3_async.eth.get_transaction_receipt(event.transactionHash) is_minipool_event = rp.is_minipool(event.address) or rp.is_minipool(receipt.to) is_megapool_event = rp.is_megapool(event.address) or rp.is_megapool(receipt.to) @@ -447,9 +448,9 @@ def handle_global_event(self, event_name: str, event: aDict) -> Optional[Embed]: event.args.megapool = event.address event.args.node = rp.call("rocketMegapoolDelegate.getNodeAddress", address=event.address) - return self.handle_event(event_name, event) + return await self.handle_event(event_name, event) - def handle_event(self, event_name: str, event: aDict) -> Optional[Embed]: + async def handle_event(self, event_name: str, event: aDict) -> Optional[Embed]: args = aDict(event.args) if "negative_rETH_ratio_update_event" in event_name: @@ -477,13 +478,13 @@ def handle_event(self, event_name: str, event: aDict) -> Optional[Embed]: match args.types[i]: case 0: # SettingType.UINT256 - value = w3.to_int(value_raw) + value = w3_async.to_int(value_raw) case 1: # SettingType.BOOL value = bool(value_raw) case 2: # SettingType.ADDRESS - value = w3.to_checksum_address(value_raw) + value = w3_async.to_checksum_address(value_raw) case _: value = "???" description_parts.append( @@ -531,7 +532,7 @@ def share_repr(percentage: float) -> str: elif args.newLimit < args.oldLimit: event_name = event_name.replace("change", "decrease") elif event_name == "cs_operator_added_event": - args.address = w3.eth.get_transaction_receipt(event.transactionHash)["from"] + args.address = await w3_async.eth.get_transaction_receipt(event.transactionHash)["from"] elif event_name == "cs_rpl_treasury_fee_change_event": args.oldFee = 100 * solidity.to_float(args.oldFee) args.newFee = 100 * solidity.to_float(args.newFee) @@ -558,8 +559,8 @@ def share_repr(percentage: float) -> str: rpl = rp.get_address_by_name("rocketTokenRPL") if args.signerToken != rpl and args.senderToken != rpl: return None - args.seller = w3.to_checksum_address(f"0x{event.topics[2][-40:]}") - args.buyer = w3.to_checksum_address(f"0x{event.topics[3][-40:]}") + args.seller = w3_async.to_checksum_address(f"0x{event.topics[2][-40:]}") + args.buyer = w3_async.to_checksum_address(f"0x{event.topics[3][-40:]}") # token names s = rp.assemble_contract(name="ERC20", address=args.signerToken) args.sellToken = s.functions.symbol().call() @@ -585,7 +586,7 @@ def share_repr(percentage: float) -> str: receipt = None if cfg["rocketpool.chain"] == "mainnet": - receipt = w3.eth.get_transaction_receipt(event.transactionHash) + receipt = await w3_async.eth.get_transaction_receipt(event.transactionHash) args.tnx_fee = receipt["gasUsed"] * receipt["effectiveGasPrice"] args.tnx_fee_usd = round(rp.get_eth_usdc_price() * args.tnx_fee / 10**18, 2) args.caller = receipt["from"] @@ -757,7 +758,7 @@ def share_repr(percentage: float) -> str: # get the transaction receipt args.depositAmount = rp.call("rocketMinipool.getNodeDepositBalance", address=args.minipool, block=args.blockNumber) user_deposit = args.depositAmount - receipt = w3.eth.get_transaction_receipt(args.transactionHash) + receipt = await w3_async.eth.get_transaction_receipt(args.transactionHash) args.node = receipt["from"] ee = rp.get_contract_by_name("rocketNodeDeposit").events.DepositReceived() with warnings.catch_warnings(): @@ -775,7 +776,7 @@ def share_repr(percentage: float) -> str: warnings.simplefilter("ignore") processed_logs = e.process_receipt(receipt) - deposit_contract = bytes(w3.solidity_keccak(["string"], ["rocketNodeDeposit"])) + deposit_contract = bytes(w3_async.solidity_keccak(["string"], ["rocketNodeDeposit"])) for withdraw_event in processed_logs: # event.logindex 44, withdraw_event.logindex 50, rough distance like that # reminder order is different than the previous example diff --git a/rocketwatch/plugins/fee_distribution/fee_distribution.py b/rocketwatch/plugins/fee_distribution/fee_distribution.py index 5ba8845f..e1994145 100644 --- a/rocketwatch/plugins/fee_distribution/fee_distribution.py +++ b/rocketwatch/plugins/fee_distribution/fee_distribution.py @@ -5,7 +5,6 @@ from discord import Interaction, File from discord.ext import commands from discord.app_commands import command -from pymongo import AsyncMongoClient from matplotlib import pyplot as plt from rocketwatch import RocketWatch @@ -21,7 +20,6 @@ class FeeDistribution(commands.Cog): def __init__(self, bot: RocketWatch): self.bot = bot - self.db = AsyncMongoClient(cfg["mongodb.uri"]).rocketwatch @command() async def fee_distribution(self, interaction: Interaction, mode: Literal["tree", "pie"]): @@ -37,7 +35,7 @@ async def fee_distribution(self, interaction: Interaction, mode: Literal["tree", fig, axs = plt.subplots(1, 2) for i, bond in enumerate([8, 16]): - result = await self.db.minipools.aggregate([ + result = await self.bot.db.minipools.aggregate([ { "$match": { "node_deposit_balance": bond, diff --git a/rocketwatch/plugins/lottery/lottery.py b/rocketwatch/plugins/lottery/lottery.py index b754e505..4ab75033 100644 --- a/rocketwatch/plugins/lottery/lottery.py +++ b/rocketwatch/plugins/lottery/lottery.py @@ -2,7 +2,7 @@ from discord.ext import commands from discord.ext.commands import hybrid_command, Context -from pymongo import AsyncMongoClient, InsertOne +from pymongo import InsertOne from rocketwatch import RocketWatch from utils.cfg import cfg @@ -17,11 +17,9 @@ log.setLevel(cfg["log_level"]) -class LotteryBase: - def __init__(self): - # connect to local mongodb - self.client = AsyncMongoClient(cfg["mongodb.uri"]) - self.db = self.client.get_database("rocketwatch") +class Lottery(commands.Cog): + def __init__(self, bot: RocketWatch): + self.bot = bot self.did_check = False async def _check_indexes(self): @@ -29,7 +27,7 @@ async def _check_indexes(self): return log.debug("Checking indexes") for period in ["latest", "next"]: - col = self.db[f"sync_committee_{period}"] + col = self.bot.db[f"sync_committee_{period}"] await col.create_index("validator", unique=True) await col.create_index("index", unique=True) self.did_check = True @@ -44,14 +42,14 @@ async def load_sync_committee(self, period): if period == "next": sync_period += 1 data = (await bacon.get_sync_committee_async(sync_period * 256))["data"] - await self.db.sync_committee_stats.replace_one({"period": period}, + await self.bot.db.sync_committee_stats.replace_one({"period": period}, {"period" : period, "start_epoch": sync_period * 256, "end_epoch" : (sync_period + 1) * 256, "sync_period": sync_period * 256, }, upsert=True) validators = data["validators"] - col = self.db[f"sync_committee_{period}"] + col = self.bot.db[f"sync_committee_{period}"] # get unique validators from collection validators_in_db = await col.distinct("validator") if set(validators) == set(validators_in_db): @@ -60,13 +58,13 @@ async def load_sync_committee(self, period): InsertOne({"index": i, "validator": int(validator)}) for i, validator in enumerate(validators) ] - async with self.client.start_session() as session: + async with self.bot.db.client.start_session() as session: async with await session.start_transaction(): await col.delete_many({}) await col.bulk_write(payload) async def get_validators_for_sync_committee_period(self, period): - data = await self.db[f"sync_committee_{period}"].aggregate([ + data = await self.bot.db[f"sync_committee_{period}"].aggregate([ { '$lookup': { 'from' : 'minipools', @@ -106,7 +104,7 @@ async def generate_sync_committee_description(self, period): await self.load_sync_committee(period) validators = await self.get_validators_for_sync_committee_period(period) # get stats about the current period - stats = await self.db.sync_committee_stats.find_one({"period": period}) + stats = await self.bot.db.sync_committee_stats.find_one({"period": period}) perc = len(validators) / 512 description = f"_Rocket Pool Participation:_ {len(validators)}/512 ({perc:.2%})\n" start_timestamp = BEACON_START_DATE + (stats['start_epoch'] * BEACON_EPOCH_LENGTH) @@ -131,14 +129,6 @@ async def generate_sync_committee_description(self, period): node_operators]) return description - -lottery = LotteryBase() - - -class Lottery(commands.Cog): - def __init__(self, bot: RocketWatch): - self.bot = bot - @hybrid_command() async def lottery(self, ctx: Context): """ @@ -146,8 +136,8 @@ async def lottery(self, ctx: Context): """ await ctx.defer(ephemeral=is_hidden(ctx)) embeds = [ - Embed(title="Current sync committee:", description=await lottery.generate_sync_committee_description("latest")), - Embed(title="Next sync committee:", description=await lottery.generate_sync_committee_description("next")) + Embed(title="Current sync committee:", description=await self.generate_sync_committee_description("latest")), + Embed(title="Next sync committee:", description=await self.generate_sync_committee_description("next")) ] await ctx.send(embeds=embeds) diff --git a/rocketwatch/plugins/metrics/metrics.py b/rocketwatch/plugins/metrics/metrics.py index 799e615f..73486ac7 100644 --- a/rocketwatch/plugins/metrics/metrics.py +++ b/rocketwatch/plugins/metrics/metrics.py @@ -3,7 +3,6 @@ from datetime import datetime, timedelta from io import BytesIO -from pymongo import AsyncMongoClient from bson import SON from cachetools import TTLCache from discord import File @@ -24,8 +23,7 @@ class Metrics(commands.Cog): def __init__(self, bot: RocketWatch): self.bot = bot - self.db = AsyncMongoClient(cfg["mongodb.uri"]).rocketwatch - self.collection = self.db.command_metrics + self.collection = self.bot.db.command_metrics @hybrid_command() async def metrics(self, ctx: Context): @@ -40,7 +38,7 @@ async def metrics(self, ctx: Context): start = datetime.utcnow() - timedelta(days=7) # get the total number of processed events from the event_queue in the last 7 days - total_events_processed = await self.db.event_queue.count_documents({'time_seen': {'$gte': start}}) + total_events_processed = await self.bot.db.event_queue.count_documents({'time_seen': {'$gte': start}}) desc += f"Total Events Processed:\n\t{total_events_processed}\n\n" # get the total number of handled commands in the last 7 days @@ -108,7 +106,7 @@ async def metrics_chart(self, ctx): '$sort': SON([('_id.year', 1), ('_id.month', 1)]) } ])).to_list(None) - event_emission = await (await self.db.event_queue.aggregate([ + event_emission = await (await self.bot.db.event_queue.aggregate([ { '$group': { '_id' : { diff --git a/rocketwatch/plugins/milestones/milestones.py b/rocketwatch/plugins/milestones/milestones.py index 5d39ae36..4dee313c 100644 --- a/rocketwatch/plugins/milestones/milestones.py +++ b/rocketwatch/plugins/milestones/milestones.py @@ -1,7 +1,6 @@ import json import logging -import pymongo from web3.datastructures import MutableAttributeDict as aDict from rocketwatch import RocketWatch @@ -18,32 +17,31 @@ class Milestones(EventPlugin): def __init__(self, bot: RocketWatch): super().__init__(bot) - self.db = pymongo.MongoClient(cfg["mongodb.uri"]).rocketwatch - self.collection = self.db.milestones + self.collection = bot.db.milestones self.state = "OK" with open("./plugins/milestones/milestones.json") as f: self.milestones = json.load(f) - def _get_new_events(self) -> list[Event]: + async def _get_new_events(self) -> list[Event]: if self.state == "RUNNING": log.error("Milestones plugin was interrupted while running. Re-initializing...") self.__init__(self.bot) self.state = "RUNNING" - result = self.check_for_new_events() + result = await self.check_for_new_events() self.state = "OK" return result # noinspection PyTypeChecker - def check_for_new_events(self): + async def check_for_new_events(self): log.info("Checking Milestones") payload = [] for milestone in self.milestones: milestone = aDict(milestone) - state = self.collection.find_one({"_id": milestone["id"]}) + state = await self.collection.find_one({"_id": milestone["id"]}) value = getattr(rp, milestone.function)(*milestone.args) if milestone.formatter: @@ -60,7 +58,7 @@ def check_for_new_events(self): else: log.debug( f"First time we have processed Milestones for milestone {milestone.id}. Adding it to the Database.") - self.collection.insert_one({"_id": milestone["id"], "current_goal": latest_goal}) + await self.collection.insert_one({"_id": milestone["id"], "current_goal": latest_goal}) previous_milestone = milestone.min if previous_milestone < latest_goal: log.info(f"Goal for milestone {milestone.id} has increased. Triggering Milestone!") @@ -76,7 +74,7 @@ def check_for_new_events(self): unique_id=f"{milestone.id}:{latest_goal}", )) # update the current goal in collection - self.collection.update_one({"_id": milestone["id"]}, {"$set": {"current_goal": latest_goal}}) + await self.collection.update_one({"_id": milestone["id"]}, {"$set": {"current_goal": latest_goal}}) log.debug("Finished Checking Milestones") return payload diff --git a/rocketwatch/plugins/minipool_distribution/minipool_distribution.py b/rocketwatch/plugins/minipool_distribution/minipool_distribution.py index 1ab77d39..9bafc733 100644 --- a/rocketwatch/plugins/minipool_distribution/minipool_distribution.py +++ b/rocketwatch/plugins/minipool_distribution/minipool_distribution.py @@ -5,7 +5,6 @@ import inflect import matplotlib.pyplot as plt import numpy as np -import pymongo from discord import File from discord.app_commands import describe from discord.ext import commands @@ -41,9 +40,8 @@ async def minipool_distribution_raw(ctx: Context, distribution): class MinipoolDistribution(commands.Cog): def __init__(self, bot: RocketWatch): self.bot = bot - self.db = pymongo.MongoClient(cfg["mongodb.uri"]).rocketwatch - def get_minipool_counts_per_node(self): + async def get_minipool_counts_per_node(self): # get an array for minipool counts per node from db using aggregation # example: [0,0,1,2,3,3,3] # 2 nodes have 0 minipools @@ -71,7 +69,7 @@ def get_minipool_counts_per_node(self): } } ] - return [x["count"] for x in self.db.minipools.aggregate(pipeline)] + return [x["count"] async for x in self.bot.db.minipools.aggregate(pipeline)] @hybrid_command() @describe(raw="Show the raw Distribution Data") @@ -83,7 +81,7 @@ async def minipool_distribution(self, e = Embed() # Get the minipool distribution - counts = self.get_minipool_counts_per_node() + counts = await self.get_minipool_counts_per_node() # Converts the array of counts, eg [ 0, 0, 0, 1, 1, 2 ], to a list of tuples # where the first item is the number of minipools and the second item is the # number of nodes, eg [ (0, 3), (1, 2), (2, 1) ] @@ -138,7 +136,7 @@ async def node_gini(self, ctx: Context, raw: bool = False): e = Embed() e.title = "Validator Share of Largest Nodes" - minipool_counts = np.array(self.get_minipool_counts_per_node()) + minipool_counts = np.array(await self.get_minipool_counts_per_node()) # sort descending minipool_counts[::-1].sort() diff --git a/rocketwatch/plugins/pinned_messages/pinned_messages.py b/rocketwatch/plugins/pinned_messages/pinned_messages.py index d413422b..376b88ed 100644 --- a/rocketwatch/plugins/pinned_messages/pinned_messages.py +++ b/rocketwatch/plugins/pinned_messages/pinned_messages.py @@ -1,7 +1,6 @@ import logging from datetime import datetime, timedelta -from pymongo import AsyncMongoClient from discord.app_commands import guilds from discord.ext import commands, tasks from discord.ext.commands import hybrid_command, is_owner @@ -17,7 +16,6 @@ class PinnedMessages(commands.Cog): def __init__(self, bot: RocketWatch): self.bot = bot - self.db = AsyncMongoClient(cfg["mongodb.uri"]).rocketwatch if not self.run_loop.is_running() and bot.is_ready(): self.run_loop.start() @@ -31,11 +29,11 @@ async def on_ready(self): @tasks.loop(seconds=60.0) async def run_loop(self): # get all pinned messages in db - messages = await self.db.pinned_messages.find().to_list() + messages = await self.bot.db.pinned_messages.find().to_list() for message in messages: # if it's older than 6 hours and not disabled, mark as disabled if message["created_at"] + timedelta(hours=6) < datetime.utcnow() and not message["disabled"]: - await self.db.pinned_messages.update_one({"_id": message["_id"]}, {"$set": {"disabled": True}}) + await self.bot.db.pinned_messages.update_one({"_id": message["_id"]}, {"$set": {"disabled": True}}) message["disabled"] = True try: # check if it's marked as disabled but not cleaned_up @@ -47,7 +45,7 @@ async def run_loop(self): # delete message await msg.delete() # mark as cleaned_up - await self.db.pinned_messages.update_one({"_id": message["_id"]}, {"$set": {"cleaned_up": True}}) + await self.bot.db.pinned_messages.update_one({"_id": message["_id"]}, {"$set": {"cleaned_up": True}}) elif not message["disabled"]: # delete and resend message channel = self.bot.get_channel(message["channel_id"]) @@ -64,7 +62,7 @@ async def run_loop(self): e.description = message["content"] e.set_footer(text="This message has been pinned by Invis. Will be automatically removed if not updated within 6 hours.") m = await channel.send(embed=e) - await self.db.pinned_messages.update_one({"_id": message["_id"]}, {"$set": {"message_id": m.id}}) + await self.bot.db.pinned_messages.update_one({"_id": message["_id"]}, {"$set": {"message_id": m.id}}) except Exception as err: await self.bot.report_error(err) @@ -79,17 +77,17 @@ async def pin(self, ctx, channel_id, title, description): await ctx.send("Channel not found") return # check if we already have a pinned message - message = await self.db.pinned_messages.find_one({"channel_id": channel.id}) + message = await self.bot.db.pinned_messages.find_one({"channel_id": channel.id}) if message: # update message - await self.db.pinned_messages.update_one({"_id": message["_id"]}, { + await self.bot.db.pinned_messages.update_one({"_id": message["_id"]}, { "$set": {"title" : title, "content": description, "disabled": False, "cleaned_up": False, "message_id": None, "created_at": datetime.utcnow()}}) # rest is done by the run_loop await ctx.send("Updated pinned message") return # create new message - await self.db.pinned_messages.insert_one( + await self.bot.db.pinned_messages.insert_one( {"channel_id": channel.id, "message_id": None, "title": title, "content": description, "disabled": False, "cleaned_up": False, "created_at": datetime.utcnow()}) # rest is done by the run_loop @@ -106,7 +104,7 @@ async def unpin(self, ctx, channel_id): await ctx.send("Channel not found") return # check if we already have a pinned message - message = await self.db.pinned_messages.find_one({"channel_id": channel.id}) + message = await self.bot.db.pinned_messages.find_one({"channel_id": channel.id}) if not message: await ctx.send("No pinned message found") return @@ -115,7 +113,7 @@ async def unpin(self, ctx, channel_id): await ctx.send("Pinned message already disabled") return # soft delete - await self.db.pinned_messages.update_one({"_id": message["_id"]}, {"$set": {"disabled": True}}) + await self.bot.db.pinned_messages.update_one({"_id": message["_id"]}, {"$set": {"disabled": True}}) # rest is done by the run_loop await ctx.send("Disabled pinned message") diff --git a/rocketwatch/plugins/proposals/proposals.py b/rocketwatch/plugins/proposals/proposals.py index 0c749251..5418cc83 100644 --- a/rocketwatch/plugins/proposals/proposals.py +++ b/rocketwatch/plugins/proposals/proposals.py @@ -12,7 +12,7 @@ from discord.app_commands import command, describe from discord.utils import as_chunks from matplotlib import pyplot as plt -from pymongo import AsyncMongoClient, ASCENDING, DESCENDING +from pymongo import ASCENDING, DESCENDING from cronitor import Monitor from rocketwatch import RocketWatch @@ -114,7 +114,6 @@ def parse_proposal(beacon_block: dict) -> dict: class Proposals(commands.Cog): def __init__(self, bot: RocketWatch): self.bot = bot - self.db = AsyncMongoClient(cfg["mongodb.uri"]).rocketwatch self.monitor = Monitor("proposals-task", api_key=cfg["other.secrets.cronitor"]) self.batch_size = 100 self.cooldown = timedelta(minutes=5) @@ -141,14 +140,14 @@ async def loop(self): async def check_indexes(self): await self.bot.wait_until_ready() try: - await self.db.proposals.create_index("validator") - await self.db.proposals.create_index("slot", unique=True) - await self.db.proposals.create_index([("validator", ASCENDING), ("slot", DESCENDING)]) + await self.bot.db.proposals.create_index("validator") + await self.bot.db.proposals.create_index("slot", unique=True) + await self.bot.db.proposals.create_index([("validator", ASCENDING), ("slot", DESCENDING)]) except Exception as e: log.warning(f"Could not create indexes: {e}") async def fetch_proposals(self): - if db_entry := (await self.db.last_checked_block.find_one({"_id": cog_id})): + if db_entry := (await self.bot.db.last_checked_block.find_one({"_id": cog_id})): last_checked_slot = db_entry["slot"] else: last_checked_slot = 4700012 # last slot before merge @@ -157,7 +156,7 @@ async def fetch_proposals(self): for slots in as_chunks(range(last_checked_slot + 1, latest_slot + 1), self.batch_size): log.info(f"Fetching proposals for slots {slots[0]} to {slots[-1]}") await asyncio.gather(*[self.fetch_proposal(s) for s in slots]) - await self.db.last_checked_block.replace_one({"_id": cog_id}, {"_id": cog_id, "slot": slots[-1]}, upsert=True) + await self.bot.db.last_checked_block.replace_one({"_id": cog_id}, {"_id": cog_id, "slot": slots[-1]}, upsert=True) async def fetch_proposal(self, slot: int) -> None: try: @@ -169,12 +168,12 @@ async def fetch_proposal(self, slot: int) -> None: raise e validator_index = int(beacon_header["proposer_index"]) - if not (minipool := (await self.db.minipools.find_one({"validator_index": validator_index}))): + if not (minipool := (await self.bot.db.minipools.find_one({"validator_index": validator_index}))): return None beacon_block = (await bacon.get_block_async(slot))["data"]["message"] proposal_data = parse_proposal(beacon_block) - await self.db.proposals.update_one({"slot": slot}, {"$set": proposal_data}, upsert=True) + await self.bot.db.proposals.update_one({"slot": slot}, {"$set": proposal_data}, upsert=True) async def create_minipool_proposal_view(self): log.info("creating minipool proposal view") @@ -222,8 +221,8 @@ async def create_minipool_proposal_view(self): } } ] - await self.db.minipool_proposals.drop() - await self.db.create_collection("minipool_proposals", viewOn="minipools", pipeline=pipeline) + await self.bot.db.minipool_proposals.drop() + await self.bot.db.create_collection("minipool_proposals", viewOn="minipools", pipeline=pipeline) @timerun_async async def gather_attribute(self, attribute, remove_allnodes=False): @@ -253,7 +252,7 @@ async def gather_attribute(self, attribute, remove_allnodes=False): if remove_allnodes: pipeline.insert(0, match_stage) - distribution = await (await self.db.minipool_proposals.aggregate(pipeline)).to_list() + distribution = await (await self.bot.db.minipool_proposals.aggregate(pipeline)).to_list() if remove_allnodes: d = {'remove_from_total': {'count': 0, 'validator_count': 0}} @@ -286,7 +285,7 @@ async def version_chart(self, interaction: Interaction, days: int = 90): ) # get proposals # limit to 6 months - proposals = await self.db.proposals.find( + proposals = await self.bot.db.proposals.find( { "version": {"$exists": 1}, "slot" : {"$gt": date_to_beacon_block((datetime.now() - timedelta(days=days)).timestamp())} @@ -296,7 +295,7 @@ async def version_chart(self, interaction: Interaction, days: int = 90): # get version used after max_slot - look_back # and have at least 10 occurrences start_slot = max_slot - look_back - recent_versions = await (await self.db.proposals.aggregate([ + recent_versions = await (await self.bot.db.proposals.aggregate([ { '$match': { 'slot' : { @@ -396,7 +395,7 @@ async def plot_axes_with_data(self, attr: str, ax1, ax2, remove_allnodes: bool = minipools = sorted(minipools, key=lambda x: x[1]) # get total minipool count from rocketpool - unobserved_minipools = len(await self.db.minipools.find({"beacon.status": "active_ongoing", "status": "staking"}).distinct("_id")) - sum(d[1] for d in minipools) + unobserved_minipools = len(await self.bot.db.minipools.find({"beacon.status": "active_ongoing", "status": "staking"}).distinct("_id")) - sum(d[1] for d in minipools) if "remove_from_total" in data: unobserved_minipools -= data["remove_from_total"]["validator_count"] minipools.insert(0, ("No proposals yet", unobserved_minipools)) @@ -412,7 +411,7 @@ async def plot_axes_with_data(self, attr: str, ax1, ax2, remove_allnodes: bool = node_operators = sorted(node_operators, key=lambda x: x[1]) # get total node operator count from rp - unobserved_node_operators = len(await self.db.minipools.find({"beacon.status": "active_ongoing", "status": "staking"}).distinct("node_operator")) - sum(d[1] for d in node_operators) + unobserved_node_operators = len(await self.bot.db.minipools.find({"beacon.status": "active_ongoing", "status": "staking"}).distinct("node_operator")) - sum(d[1] for d in node_operators) if "remove_from_total" in data: unobserved_node_operators -= data["remove_from_total"]["count"] node_operators.insert(0, ("No proposals yet", unobserved_node_operators)) @@ -512,7 +511,7 @@ async def client_combo_ranking(self, interaction: Interaction, remove_allnodes: """ await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) # aggregate [consensus, execution] pair counts - client_pairs = await (await self.db.minipool_proposals.aggregate([ + client_pairs = await (await self.bot.db.minipool_proposals.aggregate([ { "$match": { "latest_proposal.consensus_client": {"$ne": "Unknown"}, diff --git a/rocketwatch/plugins/random/random.py b/rocketwatch/plugins/random/random.py index a9084b38..dff5e409 100644 --- a/rocketwatch/plugins/random/random.py +++ b/rocketwatch/plugins/random/random.py @@ -10,7 +10,6 @@ from discord.ext import commands from discord.ext.commands import Context from discord.ext.commands import hybrid_command -from pymongo import AsyncMongoClient from rocketwatch import RocketWatch from utils import solidity @@ -29,7 +28,6 @@ class Random(commands.Cog): def __init__(self, bot: RocketWatch): self.bot = bot - self.db = AsyncMongoClient(cfg["mongodb.uri"]).get_database("rocketwatch") @hybrid_command() async def dice(self, ctx: Context, dice_string: str = "1d6"): @@ -168,7 +166,7 @@ async def smoothie(self, ctx: Context): e = Embed(title="Smoothing Pool") smoothie_eth = solidity.to_float(w3.eth.get_balance(rp.get_address_by_name("rocketSmoothingPool"))) - data = await (await self.db.minipools.aggregate([ + data = await (await self.bot.db.minipools.aggregate([ { '$match': { 'beacon.status': { diff --git a/rocketwatch/plugins/rocksolid/rocksolid.py b/rocketwatch/plugins/rocksolid/rocksolid.py index a0ee3ee3..0b0d1de7 100644 --- a/rocketwatch/plugins/rocksolid/rocksolid.py +++ b/rocketwatch/plugins/rocksolid/rocksolid.py @@ -9,7 +9,7 @@ from discord import Interaction from discord.app_commands import command from discord.ext.commands import Cog -from pymongo import AsyncMongoClient, InsertOne +from pymongo import InsertOne from rocketwatch import RocketWatch from utils import solidity @@ -30,14 +30,12 @@ class RockSolid(Cog): def __init__(self, bot: RocketWatch): self.bot = bot - self.client = AsyncMongoClient(cfg["mongodb.uri"]) - self.db = self.client.rocketwatch self.deployment_block = 23237366 async def _fetch_asset_updates(self) -> list[tuple[int, float]]: vault_contract = rp.get_contract_by_name("RockSolidVault") - if db_entry := (await self.db.last_checked_block.find_one({"_id": cog_id})): + if db_entry := (await self.bot.db.last_checked_block.find_one({"_id": cog_id})): last_checked_block = db_entry["block"] else: last_checked_block = self.deployment_block @@ -47,7 +45,7 @@ async def _fetch_asset_updates(self) -> list[tuple[int, float]]: updates = [] - async for doc in self.db.rocksolid.find({}): + async for doc in self.bot.db.rocksolid.find({}): updates.append((doc["time"], doc["assets"])) db_operations = [] @@ -57,11 +55,11 @@ async def _fetch_asset_updates(self) -> list[tuple[int, float]]: updates.append((ts, assets)) db_operations.append(InsertOne({"time": ts, "assets": assets})) - async with self.client.start_session() as session: + async with self.bot.db.client.start_session() as session: async with await session.start_transaction(): if db_operations: - await self.db.rocksolid.bulk_write(db_operations) - await self.db.last_checked_block.replace_one( + await self.bot.db.rocksolid.bulk_write(db_operations) + await self.bot.db.last_checked_block.replace_one( {"_id": cog_id}, {"_id": cog_id, "block": b_to}, upsert=True diff --git a/rocketwatch/plugins/rpl/rpl.py b/rocketwatch/plugins/rpl/rpl.py index e0868c11..601be2de 100644 --- a/rocketwatch/plugins/rpl/rpl.py +++ b/rocketwatch/plugins/rpl/rpl.py @@ -5,7 +5,6 @@ from discord import File, Interaction from discord.ext import commands from discord.app_commands import command -from pymongo import AsyncMongoClient from rocketwatch import RocketWatch from utils import solidity @@ -21,7 +20,6 @@ class RPL(commands.Cog): def __init__(self, bot: RocketWatch): self.bot = bot - self.db = AsyncMongoClient(cfg["mongodb.uri"]).rocketwatch @command() async def staked_rpl(self, interaction: Interaction): @@ -34,7 +32,7 @@ async def staked_rpl(self, interaction: Interaction): legacy_staked_rpl = solidity.to_float(rp.call("rocketNodeStaking.getTotalLegacyStakedRPL")) megapool_staked_rpl = solidity.to_float(rp.call("rocketNodeStaking.getTotalMegapoolStakedRPL")) staked_rpl = legacy_staked_rpl + megapool_staked_rpl - unstaking_rpl = (await (await self.db.node_operators.aggregate([ + unstaking_rpl = (await (await self.bot.db.node_operators.aggregate([ { '$group': { '_id' : 'out', @@ -92,7 +90,7 @@ async def withdrawable_rpl(self, interaction: Interaction): """ await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) - data = await (await self.db.node_operators.aggregate([ + data = await (await self.bot.db.node_operators.aggregate([ { '$match': { 'staking_minipool_count': { diff --git a/rocketwatch/plugins/scam_warning/scam_warning.py b/rocketwatch/plugins/scam_warning/scam_warning.py index 91dd7eb7..8650483f 100644 --- a/rocketwatch/plugins/scam_warning/scam_warning.py +++ b/rocketwatch/plugins/scam_warning/scam_warning.py @@ -3,7 +3,6 @@ from discord import errors from discord.ext import commands -from pymongo import AsyncMongoClient from rocketwatch import RocketWatch from utils.cfg import cfg @@ -17,7 +16,6 @@ class ScamWarning(commands.Cog): def __init__(self, bot: RocketWatch): self.bot = bot - self.db = AsyncMongoClient(cfg["mongodb.uri"]).get_database("rocketwatch") self.channel_ids = set(cfg["rocketpool.dm_warning.channels"]) self.inactivity_cooldown = timedelta(days=90) self.failure_cooldown = timedelta(days=1) @@ -80,7 +78,7 @@ async def on_message(self, message) -> None: return msg_time = message.created_at.replace(tzinfo=None) - db_entry = (await self.db.scam_warning.find_one({"_id": message.author.id})) or {} + db_entry = (await self.bot.db.scam_warning.find_one({"_id": message.author.id})) or {} cooldown_end = datetime.fromtimestamp(0) if last_failure_time := db_entry.get("last_failure"): @@ -97,7 +95,7 @@ async def on_message(self, message) -> None: log.info(f"Unable to DM {message.author}, skipping warning.") last_failure_time = msg_time - await self.db.scam_warning.replace_one( + await self.bot.db.scam_warning.replace_one( {"_id": message.author.id}, {"_id": message.author.id, "last_message": msg_time, "last_failure": last_failure_time}, upsert=True diff --git a/rocketwatch/plugins/snapshot/snapshot.py b/rocketwatch/plugins/snapshot/snapshot.py index 715f3486..735c3b57 100644 --- a/rocketwatch/plugins/snapshot/snapshot.py +++ b/rocketwatch/plugins/snapshot/snapshot.py @@ -5,14 +5,14 @@ from datetime import datetime, timedelta import regex -import requests +import aiohttp import termplotlib as tpl from discord import Interaction from discord.app_commands import command from web3.constants import ADDRESS_ZERO from eth_typing import ChecksumAddress, BlockNumber from graphql_query import Operation, Query, Argument -from pymongo import MongoClient, InsertOne, UpdateOne, DeleteOne, DESCENDING +from pymongo import InsertOne, UpdateOne, DeleteOne, DESCENDING from rocketwatch import RocketWatch from utils.cfg import cfg @@ -23,7 +23,7 @@ from utils.event import EventPlugin, Event from utils.visibility import is_hidden_weak from utils.block_time import ts_to_block -from utils.retry import retry +from utils.retry import retry_async log = logging.getLogger("snapshot") log.setLevel(cfg["log_level"]) @@ -32,16 +32,17 @@ class Snapshot(EventPlugin): def __init__(self, bot: RocketWatch): super().__init__(bot, timedelta(minutes=2)) - client = MongoClient(cfg["mongodb.uri"]).rocketwatch - self.proposal_db = client.snapshot_proposals - self.vote_db = client.snapshot_votes + self.proposal_db = bot.db.snapshot_proposals + self.vote_db = bot.db.snapshot_votes @staticmethod - @retry(tries=3, delay=1) - def _query_api(query: Query) -> list[dict] | Optional[dict]: + @retry_async(tries=3, delay=1) + async def _query_api(query: Query) -> list[dict] | Optional[dict]: query_json = {"query": Operation(type="query", queries=[query]).render()} log.debug(f"Snapshot query: {query_json}") - response = requests.get("https://hub.snapshot.org/graphql", json=query_json).json() + async with aiohttp.ClientSession() as session: + async with session.get("https://hub.snapshot.org/graphql", json=query_json) as resp: + response = await resp.json() if "errors" in response: raise Exception(response["errors"]) return response["data"][query.name] @@ -413,17 +414,17 @@ def create_event(self, prev_vote: Optional['Snapshot.Vote']) -> Optional[Event]: ) @staticmethod - def fetch_proposal(proposal_id: str) -> Optional[Proposal]: + async def fetch_proposal(proposal_id: str) -> Optional[Proposal]: query = Query( name="proposal", arguments=[Argument(name="id", value=f"\"{proposal_id}\"")], fields=["id", "title", "choices", "start", "end", "scores", "quorum"] ) - response: Optional[dict] = Snapshot._query_api(query) + response: Optional[dict] = await Snapshot._query_api(query) return Snapshot.Proposal(**response) if response else None @staticmethod - def fetch_proposals( + async def fetch_proposals( state: Proposal.State, *, reverse: bool = False, @@ -447,11 +448,11 @@ def fetch_proposals( ], fields=["id", "title", "choices", "start", "end", "scores", "quorum"] ) - response: list[dict] = Snapshot._query_api(query) + response: list[dict] = await Snapshot._query_api(query) return [Snapshot.Proposal(**d) for d in response] @staticmethod - def fetch_votes( + async def fetch_votes( proposal: Proposal, *, created_after: int = 0, @@ -476,10 +477,10 @@ def fetch_votes( ], fields=["id", "voter", "created", "vp", "choice", "reason"] ) - response: list[dict] = Snapshot._query_api(query) + response: list[dict] = await Snapshot._query_api(query) return [Snapshot.Vote(proposal=proposal, **d) for d in response] - def _get_new_events(self) -> list[Event]: + async def _get_new_events(self) -> list[Event]: now = datetime.now() events: list[Event] = [] @@ -487,19 +488,19 @@ def _get_new_events(self) -> list[Event]: vote_db_changes: list[InsertOne] = [] known_active_proposals: dict[str, dict] = {} - for stored_proposal in self.proposal_db.find(): + async for stored_proposal in self.proposal_db.find(): if stored_proposal["end"] >= now.timestamp(): known_active_proposals[stored_proposal["_id"]] = stored_proposal else: # stored proposal ended, emit event and delete from DB log.info(f"Found expired proposal: {stored_proposal}") # recover full proposal - if proposal := self.fetch_proposal(stored_proposal["_id"]): + if proposal := await self.fetch_proposal(stored_proposal["_id"]): event = proposal.create_end_event() proposal_db_changes.append(DeleteOne(stored_proposal)) events.append(event) - active_proposals = self.fetch_proposals("active") + active_proposals = await self.fetch_proposals("active") for proposal in active_proposals: log.debug(f"Processing proposal {proposal}") if proposal.id not in known_active_proposals: @@ -525,20 +526,21 @@ def _get_new_events(self) -> list[Event]: events.append(event) try: - last_vote_ts = self.vote_db.find( + last_vote_entry = await self.vote_db.find( {"proposal_id": proposal.id} - ).sort({"created": DESCENDING}).limit(1)[0]["created"] + ).sort({"created": DESCENDING}).limit(1).to_list() + last_vote_ts = last_vote_entry[0]["created"] except IndexError: last_vote_ts = 0 - current_votes: list[Snapshot.Vote] = self.fetch_votes(proposal, created_after=last_vote_ts) + current_votes: list[Snapshot.Vote] = await self.fetch_votes(proposal, created_after=last_vote_ts) for vote in current_votes: log.debug(f"Processing vote {vote}") try: - stored_vote = self.vote_db.find( + stored_vote = (await self.vote_db.find( {"proposal_id": proposal.id, "voter": vote.voter} - ).sort({"created": DESCENDING}).limit(1)[0] + ).sort({"created": DESCENDING}).limit(1).to_list())[0] prev_vote = Snapshot.Vote( id=stored_vote["_id"], proposal=proposal, @@ -568,10 +570,10 @@ def _get_new_events(self) -> list[Event]: vote_db_changes.append(db_update) if proposal_db_changes: - self.proposal_db.bulk_write(proposal_db_changes) + await self.proposal_db.bulk_write(proposal_db_changes) if vote_db_changes: - self.vote_db.bulk_write(vote_db_changes) + await self.vote_db.bulk_write(vote_db_changes) return events @@ -583,7 +585,7 @@ async def snapshot_votes(self, interaction: Interaction): embed = Embed(title="Snapshot Proposals") embed.set_author(name="🔗 Data from snapshot.org", url="https://vote.rocketpool.net") - proposals = self.fetch_proposals("active", reverse=True)[::-1] + proposals = (await self.fetch_proposals("active", reverse=True))[::-1] if not proposals: embed.description = "No active proposals." return await interaction.followup.send(embed=embed) diff --git a/rocketwatch/plugins/support_utils/support_utils.py b/rocketwatch/plugins/support_utils/support_utils.py index f5e7e8f7..5ee0ae9d 100644 --- a/rocketwatch/plugins/support_utils/support_utils.py +++ b/rocketwatch/plugins/support_utils/support_utils.py @@ -6,7 +6,6 @@ from discord import app_commands, ui, Interaction, TextStyle, ButtonStyle, File, User from discord.app_commands import Group, Choice, choices from discord.ext.commands import Cog, GroupCog -from pymongo import AsyncMongoClient from rocketwatch import RocketWatch from utils.cfg import cfg @@ -34,7 +33,7 @@ async def generate_template_embed(db, template_name: str): # Define a simple View that gives us a counter button class AdminView(ui.View): - def __init__(self, db: AsyncMongoClient, template_name: str): + def __init__(self, db, template_name: str): super().__init__() self.db = db self.template_name = template_name @@ -172,18 +171,17 @@ async def _use(db, interaction: Interaction, name: str, mention: User | None): class SupportGlobal(Cog): def __init__(self, bot: RocketWatch): self.bot = bot - self.db = AsyncMongoClient(cfg["mongodb.uri"]).rocketwatch @app_commands.command(name="use") async def _use(self, interaction: Interaction, name: str, mention: User | None): - await _use(self.db, interaction, name, mention) + await _use(self.bot.db, interaction, name, mention) @_use.autocomplete("name") async def match_template(self, interaction: Interaction, current: str): return [ Choice( name=c["_id"], value=c["_id"] - ) for c in await self.db.support_bot.find( + ) for c in await self.bot.db.support_bot.find( {"_id": {"$regex": current, "$options": "i"}} ).to_list(25) ] @@ -191,14 +189,13 @@ async def match_template(self, interaction: Interaction, current: str): class SupportUtils(GroupCog, name="support"): subgroup = Group( - name='template', + name='template', description='various templates used by active support members', guild_ids=[cfg["rocketpool.support.server_id"]] ) def __init__(self, bot: RocketWatch): self.bot = bot - self.db = AsyncMongoClient(cfg["mongodb.uri"]).rocketwatch @subgroup.command() async def add(self, interaction: Interaction, name: str): @@ -208,7 +205,7 @@ async def add(self, interaction: Interaction, name: str): return await interaction.response.defer(ephemeral=True) # check if the template already exists in the db - if await self.db.support_bot.find_one({"_id": name}): + if await self.bot.db.support_bot.find_one({"_id": name}): await interaction.edit_original_response( embed=Embed( title="Error", @@ -217,15 +214,15 @@ async def add(self, interaction: Interaction, name: str): ) return # create the template in the db - await self.db.support_bot.insert_one( + await self.bot.db.support_bot.insert_one( {"_id": name, "title": "Insert Title here", "description": "Insert Description here"} ) content = ( f"This is a preview of the `{name}` template.\n" f"You can change it using the `Edit` button." ) - embed = await generate_template_embed(self.db, name) - await interaction.edit_original_response(content=content, embed=embed, view=AdminView(self.db, name)) + embed = await generate_template_embed(self.bot.db, name) + await interaction.edit_original_response(content=content, embed=embed, view=AdminView(self.bot.db, name)) @subgroup.command() async def edit(self, interaction: Interaction, name: str): @@ -235,7 +232,7 @@ async def edit(self, interaction: Interaction, name: str): return await interaction.response.defer(ephemeral=True) # check if the template exists in the db - template = await self.db.support_bot.find_one({"_id": name}) + template = await self.bot.db.support_bot.find_one({"_id": name}) if not template: await interaction.edit_original_response( @@ -245,13 +242,13 @@ async def edit(self, interaction: Interaction, name: str): ), ) return - + content = ( f"This is a preview of the `{name}` template.\n" f"You can change it using the `Edit` button." ) - embed = await generate_template_embed(self.db, name) - await interaction.edit_original_response(content=content, embed=embed, view=AdminView(self.db, name)) + embed = await generate_template_embed(self.bot.db, name) + await interaction.edit_original_response(content=content, embed=embed, view=AdminView(self.bot.db, name)) @subgroup.command() async def remove(self, interaction: Interaction, name: str): @@ -261,7 +258,7 @@ async def remove(self, interaction: Interaction, name: str): return await interaction.response.defer(ephemeral=True) # check if the template exists in the db - template = await self.db.support_bot.find_one({"_id": name}) + template = await self.bot.db.support_bot.find_one({"_id": name}) if not template: await interaction.edit_original_response( embed=Embed( @@ -271,7 +268,7 @@ async def remove(self, interaction: Interaction, name: str): ) return # remove the template from the db - await self.db.support_bot.delete_one({"_id": name}) + await self.bot.db.support_bot.delete_one({"_id": name}) await interaction.edit_original_response( embed=Embed( title="Success", @@ -289,7 +286,7 @@ async def remove(self, interaction: Interaction, name: str): async def list(self, interaction: Interaction, order_by: Choice[str] = "_id"): await interaction.response.defer(ephemeral=True) # get all templates and their last edited date using the support_bot_dumps collection - templates = await (await self.db.support_bot.aggregate([ + templates = await (await self.bot.db.support_bot.aggregate([ { "$lookup": { "from": "support_bot_dumps", @@ -324,7 +321,7 @@ async def list(self, interaction: Interaction, order_by: Choice[str] = "_id"): @subgroup.command() async def use(self, interaction: Interaction, name: str, mention: User | None): - await _use(self.db, interaction, name, mention) + await _use(self.bot.db, interaction, name, mention) @edit.autocomplete("name") @remove.autocomplete("name") @@ -334,7 +331,7 @@ async def match_template(self, interaction: Interaction, current: str): Choice( name=c["_id"], value=c["_id"] - ) for c in await self.db.support_bot.find( + ) for c in await self.bot.db.support_bot.find( { "_id": { "$regex": current, diff --git a/rocketwatch/plugins/transactions/transactions.py b/rocketwatch/plugins/transactions/transactions.py index 0a2c704a..d991e5a5 100644 --- a/rocketwatch/plugins/transactions/transactions.py +++ b/rocketwatch/plugins/transactions/transactions.py @@ -3,7 +3,6 @@ import warnings import web3.exceptions -from datetime import timedelta from discord import Interaction from discord.app_commands import command, guilds from discord.ext.commands import is_owner @@ -17,7 +16,7 @@ from utils.embeds import assemble, prepare_args, el_explorer_url, Embed from utils.event import EventPlugin, Event from utils.rocketpool import rp -from utils.shared_w3 import w3 +from utils.shared_w3 import w3_async log = logging.getLogger("transactions") log.setLevel(cfg["log_level"]) @@ -71,7 +70,7 @@ async def trigger_tx( return event_name = self.function_map[contract][function] - if embeds := self.create_embeds(event_name, event_obj): + if embeds := await self.create_embeds(event_name, event_obj): await interaction.followup.send(embeds=embeds) else: await interaction.followup.send(content="No events triggered.") @@ -81,35 +80,35 @@ async def trigger_tx( @is_owner() async def replay_tx(self, interaction: Interaction, tx_hash: str): await interaction.response.defer() - tnx = w3.eth.get_transaction(tx_hash) - block = w3.eth.get_block(tnx.blockHash) + tnx = await w3_async.eth.get_transaction(tx_hash) + block = await w3_async.eth.get_block(tnx.blockHash) - responses: list[Event] = self.process_transaction(block, tnx, tnx.to, tnx.input) + responses: list[Event] = await self.process_transaction(block, tnx, tnx.to, tnx.input) if responses: await interaction.followup.send(embeds=[response.embed for response in responses]) else: await interaction.followup.send(content="No events found.") - def _get_new_events(self) -> list[Event]: + async def _get_new_events(self) -> list[Event]: old_addresses = self.addresses try: from_block = self.last_served_block + 1 - self.lookback_distance - return self.get_past_events(from_block, self._pending_block) + return await self.get_past_events(from_block, self._pending_block) except Exception as err: # rollback in case of contract upgrade self.addresses = old_addresses raise err - def get_past_events(self, from_block: BlockNumber, to_block: BlockNumber) -> list[Event]: + async def get_past_events(self, from_block: BlockNumber, to_block: BlockNumber) -> list[Event]: events = [] for block in range(from_block, to_block): - events.extend(self.get_events_for_block(block)) + events.extend(await self.get_events_for_block(block)) return events - def get_events_for_block(self, block_number: BlockIdentifier) -> list[Event]: + async def get_events_for_block(self, block_number: BlockIdentifier) -> list[Event]: log.debug(f"Checking block {block_number}") try: - block = w3.eth.get_block(block_number, full_transactions=True) + block = await w3_async.eth.get_block(block_number, full_transactions=True) except web3.exceptions.BlockNotFound: log.error(f"Skipping block {block_number} as it can't be found") return [] @@ -117,7 +116,7 @@ def get_events_for_block(self, block_number: BlockIdentifier) -> list[Event]: events = [] for tnx in block.transactions: if "to" in tnx: - events.extend(self.process_transaction(block, tnx, tnx.to, tnx.input)) + events.extend(await self.process_transaction(block, tnx, tnx.to, tnx.input)) else: log.debug(( f"Skipping transaction {tnx.hash.hex()} as it has no `to` parameter. " @@ -126,8 +125,7 @@ def get_events_for_block(self, block_number: BlockIdentifier) -> list[Event]: return events - @staticmethod - def create_embeds(event_name: str, event: aDict) -> list[Embed]: + async def create_embeds(self, event_name: str, event: aDict) -> list[Embed]: # prepare args args = aDict(event.args) @@ -143,18 +141,18 @@ def create_embeds(event_name: str, event: aDict) -> list[Embed]: if "odao_disable" in event_name and not args.confirmDisableBootstrapMode: return [] elif event_name == "pdao_set_delegate": - receipt = w3.eth.get_transaction_receipt(args.transactionHash) + receipt = await w3_async.eth.get_transaction_receipt(args.transactionHash) args.delegator = receipt["from"] args.delegate = args.get("delegate") or args.get("newDelegate") args.votingPower = solidity.to_float(rp.call("rocketNetworkVoting.getVotingPower", args.delegator, args.blockNumber)) if (args.votingPower < 50) or (args.delegate == args.delegator): return [] elif "failed_deposit" in event_name: - receipt = w3.eth.get_transaction_receipt(args.transactionHash) + receipt = await w3_async.eth.get_transaction_receipt(args.transactionHash) args.node = receipt["from"] args.burnedValue = solidity.to_float(event.gasPrice * receipt.gasUsed) elif "deposit_pool_queue" in event_name: - receipt = w3.eth.get_transaction_receipt(args.transactionHash) + receipt = await w3_async.eth.get_transaction_receipt(args.transactionHash) args.node = receipt["from"] event = rp.get_contract_by_name("rocketMinipoolQueue").events.MinipoolDequeued() # get the amount of dequeues that happened in this transaction using the event logs @@ -191,13 +189,13 @@ def share_repr(percentage: float) -> str: match args.types[i]: case 0: # SettingType.UINT256 - value = w3.to_int(value_raw) + value = w3_async.to_int(value_raw) case 1: # SettingType.BOOL value = bool(value_raw) case 2: # SettingType.ADDRESS - value = w3.to_checksum_address(value_raw) + value = w3_async.to_checksum_address(value_raw) case _: value = "???" description_parts.append( @@ -255,13 +253,13 @@ def share_repr(percentage: float) -> str: args = prepare_args(args) return [assemble(args)] - def process_transaction(self, block, tnx, contract_address, fn_input) -> list[Event]: + async def process_transaction(self, block, tnx, contract_address, fn_input) -> list[Event]: if contract_address not in self.addresses: return [] contract_name = rp.get_name_by_address(contract_address) # get receipt and check if the transaction reverted using status attribute - receipt = w3.eth.get_transaction_receipt(tnx.hash) + receipt = await w3_async.eth.get_transaction_receipt(tnx.hash) if contract_name == "rocketNodeDeposit" and receipt.status: log.info(f"Skipping successful node deposit {tnx.hash.hex()}") return [] @@ -319,9 +317,9 @@ def process_transaction(self, block, tnx, contract_address, fn_input) -> list[Ev event.args["proposal_body"] = dao.build_proposal_body(proposal, include_proposer=False) dao_address = dao.contract.address - responses = self.process_transaction(block, tnx, dao_address, payload) + responses = await self.process_transaction(block, tnx, dao_address, payload) - embeds = self.create_embeds(event_name, event) + embeds = await self.create_embeds(event_name, event) new_responses = [] for embed in embeds: diff --git a/rocketwatch/plugins/tvl/tvl.py b/rocketwatch/plugins/tvl/tvl.py index 20387d75..c00a5e6d 100644 --- a/rocketwatch/plugins/tvl/tvl.py +++ b/rocketwatch/plugins/tvl/tvl.py @@ -2,8 +2,6 @@ import humanize from colorama import Style -from pymongo import AsyncMongoClient - from discord import Interaction from discord.ext.commands import Cog from discord.app_commands import command, describe @@ -52,7 +50,6 @@ def split_rewards_logic(balance, node_share, commission, force_base=False): class TVL(Cog): def __init__(self, bot: RocketWatch): self.bot = bot - self.db = AsyncMongoClient(cfg["mongodb.uri"]).get_database("rocketwatch") @command() @describe(show_all="Also show entries with 0 value") @@ -125,7 +122,7 @@ async def tvl(self, interaction: Interaction, show_all: bool = False): # Minipools that are flagged as initialised have the following applied to them: # - They have 1 ETH staked on the beacon chain. # - They have not yet received 31 ETH from the Deposit Pool. - tmp = await (await self.db.minipools.aggregate([ + tmp = await (await self.bot.db.minipools.aggregate([ { '$match': { 'status': 'initialised', @@ -148,7 +145,7 @@ async def tvl(self, interaction: Interaction, show_all: bool = False): # - They have deposited 1 ETH to the Beacon Chain. # - They have 31 ETH from the Deposit Pool in their contract waiting to be staked as well. # - They are currently in the scrubbing process (should be 12 hours) or have not yet initiated the second phase. - tmp = await (await self.db.minipools.aggregate([ + tmp = await (await self.bot.db.minipools.aggregate([ { '$match': { 'status': 'prelaunch', @@ -177,7 +174,7 @@ async def tvl(self, interaction: Interaction, show_all: bool = False): # - They have 1 ETH locked on the Beacon Chain, not earning any rewards. # - The 31 ETH that was waiting in their address was moved back to the Deposit Pool (This can cause the Deposit Pool # to grow beyond its Cap, check the bellow comment for information about that). - tmp = await (await self.db.minipools.aggregate([ + tmp = await (await self.bot.db.minipools.aggregate([ { '$match': { 'status': 'dissolved', @@ -203,7 +200,7 @@ async def tvl(self, interaction: Interaction, show_all: bool = False): "execution_balance"] # Staking Minipools: - minipools = await self.db.minipools.find({ + minipools = await self.bot.db.minipools.find({ 'status': {"$nin": ["initialised", "prelaunch", "dissolved"]}, 'node_deposit_balance': {"$exists": True}, }).to_list(None) @@ -271,7 +268,7 @@ async def tvl(self, interaction: Interaction, show_all: bool = False): # Smoothing Pool Balance: This is ETH from Proposals by minipools that have joined the Smoothing Pool. smoothie_balance = solidity.to_float(w3.eth.get_balance(rp.get_address_by_name("rocketSmoothingPool"))) - tmp = await (await self.db.node_operators.aggregate([ + tmp = await (await self.bot.db.node_operators.aggregate([ { '$match': { 'smoothing_pool_registration': True, @@ -364,7 +361,7 @@ async def tvl(self, interaction: Interaction, show_all: bool = False): rp.call("rocketVault.balanceOfToken", "rocketAuctionManager", rpl_address)) # create _value string for each branch. the _value is the sum of all _val or _val values in the children - tmp = await (await self.db.node_operators.aggregate([ + tmp = await (await self.bot.db.node_operators.aggregate([ { '$match': { 'fee_distributor.eth_balance': { diff --git a/rocketwatch/plugins/user_distribute/user_distribute.py b/rocketwatch/plugins/user_distribute/user_distribute.py index b0c832ef..bdf3a453 100644 --- a/rocketwatch/plugins/user_distribute/user_distribute.py +++ b/rocketwatch/plugins/user_distribute/user_distribute.py @@ -7,7 +7,7 @@ from discord import ui, ButtonStyle, Interaction from discord.ext import commands, tasks from discord.app_commands import command -from pymongo import AsyncMongoClient, ASCENDING +from pymongo import ASCENDING from rocketwatch import RocketWatch from utils.rocketpool import rp @@ -74,7 +74,6 @@ async def instructions(self, interaction: Interaction, _) -> None: class UserDistribute(commands.Cog): def __init__(self, bot: RocketWatch): self.bot = bot - self.db = AsyncMongoClient(cfg["mongodb.uri"]).get_database("rocketwatch") self.task.start() async def cog_unload(self): @@ -115,7 +114,7 @@ async def _fetch_minipools(self) -> tuple[list[dict], list[dict], list[dict]]: current_epoch = int(head["data"]["header"]["message"]["slot"]) // 32 threshold_epoch = current_epoch - 5000 - minipools = await self.db.minipools.find({ + minipools = await self.bot.db.minipools.find({ "user_distributed": False, "status": "staking", "execution_balance": {"$gte": 8}, diff --git a/rocketwatch/plugins/validator_states/validator_states.py b/rocketwatch/plugins/validator_states/validator_states.py index 0cfc1713..8ee6cb73 100644 --- a/rocketwatch/plugins/validator_states/validator_states.py +++ b/rocketwatch/plugins/validator_states/validator_states.py @@ -3,7 +3,6 @@ from discord import Interaction from discord.ext import commands from discord.app_commands import command -from pymongo import AsyncMongoClient from rocketwatch import RocketWatch from utils.cfg import cfg @@ -111,18 +110,17 @@ def _collapse_tree(data: dict) -> dict: class ValidatorStates(commands.Cog): def __init__(self, bot: RocketWatch): self.bot = bot - self.db = AsyncMongoClient(cfg["mongodb.uri"]).rocketwatch @command() async def validator_states(self, interaction: Interaction): """Show validator counts by beacon chain and contract status""" await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) - minipools = await self.db.minipools.find( + minipools = await self.bot.db.minipools.find( {"beacon.status": {"$exists": True}}, {"beacon": 1, "status": 1, "finalized": 1, "node_operator": 1, "validator_index": 1} ).to_list(None) - megapool_vals = await self.db.megapool_validators.find( + megapool_vals = await self.bot.db.megapool_validators.find( {}, {"beacon": 1, "status": 1, "node_operator": 1, "validator_index": 1} ).to_list(None) diff --git a/rocketwatch/rocketwatch.py b/rocketwatch/rocketwatch.py index 08740787..3e7adad7 100644 --- a/rocketwatch/rocketwatch.py +++ b/rocketwatch/rocketwatch.py @@ -18,6 +18,8 @@ from discord.ext.commands import Bot, Context from discord.app_commands import CommandTree, AppCommandError +from pymongo import AsyncMongoClient + from utils.cfg import cfg from utils.retry import retry_async @@ -34,6 +36,7 @@ async def on_error(self, interaction: Interaction, error: AppCommandError) -> No def __init__(self, intents: Intents) -> None: super().__init__(command_prefix=(), tree_cls=self.RWCommandTree, intents=intents) + self.db = AsyncMongoClient(cfg["mongodb.uri"]).rocketwatch async def _load_plugins(self): chain = cfg["rocketpool.chain"] diff --git a/rocketwatch/strings/embeds.en.json b/rocketwatch/strings/embeds.en.json index 2e068d34..af93387f 100644 --- a/rocketwatch/strings/embeds.en.json +++ b/rocketwatch/strings/embeds.en.json @@ -431,9 +431,9 @@ "title": ":fire: Failed Validator Deposit", "description": ":fire_engine: %{node} burned **%{burnedValue} ETH** trying to create a validator! :fire_engine:" }, - "minipool_slash_event": { - "title": ":rotating_light: Minipool Slashed", - "description": "Minipool %{minipool} has been slashed by %{slasher}" + "validator_slash_event": { + "title": ":rotating_light: Validator Slashed", + "description": "Validtor %{validator} has been slashed by %{slasher}" }, "validator_deposit_event": { "description_small": ":construction_site: %{from} created a validator with a **%{amount} ETH** bond!" diff --git a/rocketwatch/utils/event.py b/rocketwatch/utils/event.py index ee16c4dd..2d2b8fa1 100644 --- a/rocketwatch/utils/event.py +++ b/rocketwatch/utils/event.py @@ -40,20 +40,20 @@ def __init__(self, bot: RocketWatch, rate_limit=timedelta(seconds=5)): def start_tracking(self, block: BlockNumber) -> None: self.last_served_block = block - 1 - def get_new_events(self) -> list[Event]: + async def get_new_events(self) -> list[Event]: now = datetime.now() if (now - self._last_run) < self.rate_limit: return [] self._last_run = now self._pending_block = w3.eth.get_block_number() - events = self._get_new_events() + events = await self._get_new_events() self.last_served_block = self._pending_block return events @abstractmethod - def _get_new_events(self) -> list[Event]: + async def _get_new_events(self) -> list[Event]: pass - def get_past_events(self, from_block: BlockNumber, to_block: BlockNumber) -> list[Event]: + async def get_past_events(self, from_block: BlockNumber, to_block: BlockNumber) -> list[Event]: return [] diff --git a/rocketwatch/utils/shared_w3.py b/rocketwatch/utils/shared_w3.py index 7880aae8..b0b17671 100644 --- a/rocketwatch/utils/shared_w3.py +++ b/rocketwatch/utils/shared_w3.py @@ -51,6 +51,9 @@ async def get_validators_async(self, state_id, ids: list[int]): async def get_sync_committee_async(self, epoch: int): return await self._make_get_request_async(f"/eth/v1/beacon/states/head/sync_committees?epoch={epoch}") + + async def get_finality_checkpoint_async(self, state_id): + return await self._make_get_request_async(f"/eth/v1/beacon/states/{state_id}/finality_checkpoints") bacon = SuperBacon(cfg["consensus_layer.endpoint"]) From ba8cd29aa2837f6a18a8b3c846383c5b91853273 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 6 Mar 2026 17:44:50 +0000 Subject: [PATCH 156/279] make governance related plugins async --- rocketwatch/plugins/governance/governance.py | 4 +- rocketwatch/plugins/rpips/rpips.py | 71 +++++++++----------- rocketwatch/requirements.txt | 1 + 3 files changed, 35 insertions(+), 41 deletions(-) diff --git a/rocketwatch/plugins/governance/governance.py b/rocketwatch/plugins/governance/governance.py index 5ebce7c1..65c1174b 100644 --- a/rocketwatch/plugins/governance/governance.py +++ b/rocketwatch/plugins/governance/governance.py @@ -51,7 +51,7 @@ def _get_tx_hash_for_proposal(dao: DAO, proposal: DAO.Proposal) -> HexStr: async def _get_active_snapshot_proposals(self) -> list[Snapshot.Proposal]: try: - return Snapshot.fetch_proposals("active", reverse=True) + return await Snapshot.fetch_proposals("active", reverse=True) except Exception as e: await self.bot.report_error(e) return [] @@ -59,7 +59,7 @@ async def _get_active_snapshot_proposals(self) -> list[Snapshot.Proposal]: async def _get_draft_rpips(self) -> list[RPIPs.RPIP]: try: statuses = {"Draft", "Review"} - return [rpip for rpip in RPIPs.get_all_rpips() if (rpip.status in statuses)][::-1] + return [rpip for rpip in await RPIPs.get_all_rpips() if (rpip.status in statuses)][::-1] except Exception as e: await self.bot.report_error(e) return [] diff --git a/rocketwatch/plugins/rpips/rpips.py b/rocketwatch/plugins/rpips/rpips.py index 2b9604ba..78cca06d 100644 --- a/rocketwatch/plugins/rpips/rpips.py +++ b/rocketwatch/plugins/rpips/rpips.py @@ -1,9 +1,8 @@ import logging -import requests -from typing import Optional, Any +import aiohttp +from aiocache import cached from bs4 import BeautifulSoup -from cachetools.func import ttl_cache from discord import Interaction from discord.ext.commands import Cog @@ -12,7 +11,7 @@ from rocketwatch import RocketWatch from utils.cfg import cfg from utils.embeds import Embed -from utils.retry import retry +from utils.retry import retry_async log = logging.getLogger("rpips") log.setLevel(cfg["log_level"]) @@ -30,38 +29,31 @@ async def rpip(self, interaction: Interaction, name: str): embed = Embed() embed.set_author(name="🔗 Data from rpips.rocketpool.net", url="https://rpips.rocketpool.net") - rpips_by_name: dict[str, RPIPs.RPIP] = {rpip.full_title: rpip for rpip in self.get_all_rpips()} + rpips_by_name: dict[str, RPIPs.RPIP] = {rpip.full_title: rpip for rpip in await self.get_all_rpips()} if rpip := rpips_by_name.get(name): + details = await rpip.fetch_details() embed.title = name embed.url = rpip.url - embed.description = rpip.description + embed.description = details["description"] - if len(rpip.authors) == 1: - embed.add_field(name="Author", value=rpip.authors[0]) + authors = details["authors"] + if len(authors) == 1: + embed.add_field(name="Author", value=authors[0]) else: - embed.add_field(name="Authors", value=", ".join(rpip.authors)) + embed.add_field(name="Authors", value=", ".join(authors)) embed.add_field(name="Status", value=rpip.status) - embed.add_field(name="Created", value=rpip.created) - embed.add_field(name="Discussion Link", value=rpip.discussion, inline=False) + embed.add_field(name="Created", value=details["created"]) + embed.add_field(name="Discussion Link", value=details["discussion"], inline=False) else: embed.description = "No matching RPIPs." await interaction.followup.send(embed=embed) class RPIP: - __slots__ = ( - "title", - "number", - "status", - "type", - "authors", - "created", - "discussion", - "description" - ) - - def __init__(self, title: str, number: int, status:str): + __slots__ = ("title", "number", "status") + + def __init__(self, title: str, number: int, status: str): self.title = title self.number = number self.status = status @@ -69,10 +61,14 @@ def __init__(self, title: str, number: int, status:str): def __str__(self) -> str: return self.full_title - @ttl_cache(ttl=300) - @retry(tries=3, delay=1) - def __fetch_data(self) -> dict[str, Optional[str | list[str]]]: - soup = BeautifulSoup(requests.get(self.url).text, "html.parser") + @cached(ttl=300, key_builder=lambda _, rpip: rpip.number) + @retry_async(tries=3, delay=1) + async def fetch_details(self) -> dict: + async with aiohttp.ClientSession() as session: + async with session.get(self.url) as resp: + html = await resp.text() + + soup = BeautifulSoup(html, "html.parser") metadata = {} for field in soup.main.find("table", {"class": "rpip-preamble"}).find_all("tr"): @@ -100,26 +96,23 @@ def full_title(self) -> str: def url(self) -> str: return f"https://rpips.rocketpool.net/RPIPs/RPIP-{self.number}" - def __getattr__(self, key: str) -> Any: - try: - return self.__fetch_data()[key] or "N/A" - except KeyError: - raise AttributeError(f"RPIP has no attribute '{key}'") - @rpip.autocomplete("name") async def _get_rpip_names(self, interaction: Interaction, current: str) -> list[Choice[str]]: choices = [] - for rpip in self.get_all_rpips(): + for rpip in await self.get_all_rpips(): if current.lower() in (name := rpip.full_title).lower(): choices.append(Choice(name=name, value=name)) return choices[:-26:-1] @staticmethod - @ttl_cache(ttl=60) - @retry(tries=3, delay=1) - def get_all_rpips() -> list['RPIPs.RPIP']: - html_doc = requests.get("https://rpips.rocketpool.net/all").text - soup = BeautifulSoup(html_doc, "html.parser") + @cached(ttl=60) + @retry_async(tries=3, delay=1) + async def get_all_rpips() -> list['RPIPs.RPIP']: + async with aiohttp.ClientSession() as session: + async with session.get("https://rpips.rocketpool.net/all") as resp: + html = await resp.text() + + soup = BeautifulSoup(html, "html.parser") rpips: list['RPIPs.RPIP'] = [] for row in soup.table.find_all("tr", recursive=False): diff --git a/rocketwatch/requirements.txt b/rocketwatch/requirements.txt index 8ee9b3e7..0573c580 100644 --- a/rocketwatch/requirements.txt +++ b/rocketwatch/requirements.txt @@ -25,6 +25,7 @@ pymongo==4.16.0 graphql_query==1.4.0 pillow==12.1.1 aiohttp==3.13.3 +aiocache==0.12.3 numpy==2.4.2 beautifulsoup4==4.14.3 eth-typing==5.2.1 From cb9f5576bf0d446688d33ff536ef00fe42b7ca94 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 6 Mar 2026 17:58:40 +0000 Subject: [PATCH 157/279] use aiohttp in beacon_events --- .../plugins/beacon_events/beacon_events.py | 28 +++++++++++-------- rocketwatch/utils/shared_w3.py | 4 +-- 2 files changed, 18 insertions(+), 14 deletions(-) diff --git a/rocketwatch/plugins/beacon_events/beacon_events.py b/rocketwatch/plugins/beacon_events/beacon_events.py index d25f9b90..f2d4532b 100644 --- a/rocketwatch/plugins/beacon_events/beacon_events.py +++ b/rocketwatch/plugins/beacon_events/beacon_events.py @@ -1,7 +1,7 @@ import logging from typing import Optional, cast -import requests +import aiohttp import eth_utils from eth_typing import BlockNumber from web3.datastructures import MutableAttributeDict as aDict @@ -50,9 +50,13 @@ async def _get_events_for_slot(self, slot_number: int, *, check_finality: bool) try: log.debug(f"Checking slot {slot_number}") beacon_block = (await bacon.get_block_async(slot_number))["data"]["message"] - except requests.exceptions.HTTPError: - log.error(f"Beacon block {slot_number} not found, skipping.") - return [] + except aiohttp.ClientResponseError as e: + if e.status == 404: + log.error(f"Beacon block {slot_number} not found, skipping.") + return [] + else: + raise e + events = await self._get_slashings(beacon_block) if proposal_event := await self._get_proposal(beacon_block): @@ -138,16 +142,16 @@ async def _get_proposal(self, beacon_block: dict) -> Optional[Event]: # fetch from beaconcha.in because beacon node is unaware of MEV bribes endpoint = f"https://beaconcha.in/api/v1/execution/block/{block_number}" - response = requests.get(endpoint, headers={"apikey": api_key}) - - if response.status_code != 200: - log.warning(f"Error code {response.status_code} from {endpoint}") - return None + async with aiohttp.ClientSession() as session: + async with session.get(endpoint, headers={"apikey": api_key}) as resp: + if resp.status != 200: + log.warning(f"Error code {resp.status} from {endpoint}") + return None + response_body = await resp.json() - response_body = response.json() log.debug(f"{response_body = }") - proposal_data = response.json()["data"][0] + proposal_data = response_body["data"][0] log.debug(f"{proposal_data = }") block_reward_eth = solidity.to_float(proposal_data["producerReward"]) @@ -200,7 +204,7 @@ async def _check_finality(self, beacon_block: dict) -> Optional[Event]: finality_checkpoint = await bacon.get_finality_checkpoint_async(state_id=str(slot_number)) last_finalized_epoch = int(finality_checkpoint["data"]["finalized"]["epoch"]) finality_delay = epoch_number - last_finalized_epoch - except requests.exceptions.HTTPError: + except aiohttp.ClientResponseError: log.exception("Failed to get finality checkpoints") return None diff --git a/rocketwatch/utils/shared_w3.py b/rocketwatch/utils/shared_w3.py index b0b17671..8c91c44e 100644 --- a/rocketwatch/utils/shared_w3.py +++ b/rocketwatch/utils/shared_w3.py @@ -2,7 +2,7 @@ import aiohttp from web3.beacon import Beacon as Bacon -from aiohttp.web import HTTPError +from aiohttp import ClientResponseError from web3 import Web3, AsyncWeb3, HTTPProvider from web3.providers import AsyncHTTPProvider from web3.middleware import ExtraDataToPOAMiddleware @@ -34,7 +34,7 @@ def __init__(self, base_url: str) -> None: timeout=aiohttp.ClientTimeout(sock_connect=3.05, total=20) ) - @retry_async(tries=3, exceptions=HTTPError, delay=0.5) + @retry_async(tries=3, exceptions=ClientResponseError, delay=0.5) async def _make_get_request_async(self, path: str): async with self.async_session.get(self.base_url + path) as response: return await response.json() From ac57b6c2f47ace6531f58bd68d438d7def7237c9 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 6 Mar 2026 18:12:24 +0000 Subject: [PATCH 158/279] minimal bacon --- .../plugins/beacon_events/beacon_events.py | 16 ++++---- rocketwatch/plugins/cow_orders/cow_orders.py | 2 +- .../plugins/db_upkeep_task/db_upkeep_task.py | 5 +-- rocketwatch/plugins/events/events.py | 4 +- rocketwatch/plugins/lottery/lottery.py | 4 +- rocketwatch/plugins/proposals/proposals.py | 6 +-- rocketwatch/plugins/random/random.py | 2 +- .../plugins/transactions/transactions.py | 4 +- .../user_distribute/user_distribute.py | 2 +- rocketwatch/utils/embeds.py | 4 +- rocketwatch/utils/readable.py | 4 +- rocketwatch/utils/shared_w3.py | 40 ++++--------------- 12 files changed, 34 insertions(+), 59 deletions(-) diff --git a/rocketwatch/plugins/beacon_events/beacon_events.py b/rocketwatch/plugins/beacon_events/beacon_events.py index f2d4532b..8dee9365 100644 --- a/rocketwatch/plugins/beacon_events/beacon_events.py +++ b/rocketwatch/plugins/beacon_events/beacon_events.py @@ -49,7 +49,7 @@ async def get_past_events(self, from_block: BlockNumber, to_block: BlockNumber) async def _get_events_for_slot(self, slot_number: int, *, check_finality: bool) -> list[Event]: try: log.debug(f"Checking slot {slot_number}") - beacon_block = (await bacon.get_block_async(slot_number))["data"]["message"] + beacon_block = (await bacon.get_block(str(slot_number)))["data"]["message"] except aiohttp.ClientResponseError as e: if e.status == 404: log.error(f"Beacon block {slot_number} not found, skipping.") @@ -103,12 +103,12 @@ async def _get_slashings(self, beacon_block: dict) -> list[Event]: f":slashing-type-{slash['slashing_type']}" f":{timestamp}" ) - slash["validator"] = cl_explorer_url(slash["validator"]) - slash["slasher"] = cl_explorer_url(slash["slasher"]) + slash["validator"] = await cl_explorer_url(slash["validator"]) + slash["slasher"] = await cl_explorer_url(slash["slasher"]) slash["node_operator"] = (minipool or megapool)["node_operator"] slash["event_name"] = "validator_slash_event" - args = prepare_args(aDict(slash)) + args = await prepare_args(aDict(slash)) if embed := assemble(args): events.append(Event( topic="beacon_events", @@ -182,7 +182,7 @@ async def _get_proposal(self, beacon_block: dict) -> Optional[Event]: else: args["event_name"] = "mev_proposal_event" - args = prepare_args(aDict(args)) + args = await prepare_args(aDict(args)) if not (embed := assemble(args)): return None @@ -201,7 +201,7 @@ async def _check_finality(self, beacon_block: dict) -> Optional[Event]: try: # calculate finality delay - finality_checkpoint = await bacon.get_finality_checkpoint_async(state_id=str(slot_number)) + finality_checkpoint = await bacon.get_finality_checkpoint(str(slot_number)) last_finalized_epoch = int(finality_checkpoint["data"]["finalized"]["epoch"]) finality_delay = epoch_number - last_finalized_epoch except aiohttp.ClientResponseError: @@ -228,7 +228,7 @@ async def _check_finality(self, beacon_block: dict) -> Optional[Event]: "timestamp": timestamp, "epoch": epoch_number } - args = prepare_args(aDict(args)) + args = await prepare_args(aDict(args)) if not (embed := assemble(args)): return None @@ -250,7 +250,7 @@ async def _check_finality(self, beacon_block: dict) -> Optional[Event]: "timestamp" : timestamp, "epoch" : epoch_number } - args = prepare_args(aDict(args)) + args = await prepare_args(aDict(args)) if not (embed := assemble(args)): return None diff --git a/rocketwatch/plugins/cow_orders/cow_orders.py b/rocketwatch/plugins/cow_orders/cow_orders.py index aa6b24fd..257a67ab 100644 --- a/rocketwatch/plugins/cow_orders/cow_orders.py +++ b/rocketwatch/plugins/cow_orders/cow_orders.py @@ -213,7 +213,7 @@ async def check_for_new_events(self): data["timestamp"] = int(created.timestamp()) - data = prepare_args(data) + data = await prepare_args(data) embed = assemble(data) payload.append(Event( embed=embed, diff --git a/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py b/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py index 9c57c6e7..4e2fdf67 100644 --- a/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py +++ b/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py @@ -10,7 +10,6 @@ from cronitor import Monitor from pymongo import UpdateOne, UpdateMany from pymongo.asynchronous.collection import AsyncCollection -from web3.contract.contract import ContractFunction from discord.ext import commands from discord.utils import as_chunks @@ -393,7 +392,7 @@ async def update_dynamic_minipool_beacon_data(self): start = i * self.batch_size + 1 end = min((i + 1) * self.batch_size, total) log.info(f"Updating beacon chain data for minipools [{start}, {end}]/{total}") - beacon_data = (await bacon.get_validators_async("head", ids=pubkey_batch))["data"] + beacon_data = (await bacon.get_validators_by_ids("head", ids=pubkey_batch))["data"] data = {} for d in beacon_data: v = d["validator"] @@ -508,7 +507,7 @@ async def update_dynamic_megapool_validator_beacon_data(self): start = i * self.batch_size + 1 end = min((i + 1) * self.batch_size, total) log.debug(f"Updating beacon data for megapool validators [{start}, {end}]/{total}") - beacon_data = (await bacon.get_validators_async("head", ids=pubkey_batch))["data"] + beacon_data = (await bacon.get_validators_by_ids("head", ids=pubkey_batch))["data"] data = {} for d in beacon_data: v = d["validator"] diff --git a/rocketwatch/plugins/events/events.py b/rocketwatch/plugins/events/events.py index eef4c09c..ed3dc012 100644 --- a/rocketwatch/plugins/events/events.py +++ b/rocketwatch/plugins/events/events.py @@ -830,7 +830,7 @@ def share_repr(percentage: float) -> str: # 4. the migration could have timed out, the oDAO will scrub minipools after they have passed half of the migration window # get pubkey from minipool contract pubkey = rp.call("rocketMinipoolManager.getMinipoolPubkey", args.minipool).hex() - vali_info = bacon.get_validator(f"0x{pubkey}")["data"] + vali_info = (await bacon.get_validator(f"0x{pubkey}"))["data"] reason = "joe fucking up (unknown reason)" if vali_info: # check for #1 @@ -863,7 +863,7 @@ def share_repr(percentage: float) -> str: args.timestamp = block_to_ts(receipt["blockNumber"]) args.event_name = event_name - args = prepare_args(args) + args = await prepare_args(args) event.args = args return assemble(args) diff --git a/rocketwatch/plugins/lottery/lottery.py b/rocketwatch/plugins/lottery/lottery.py index 4ab75033..2692b4d5 100644 --- a/rocketwatch/plugins/lottery/lottery.py +++ b/rocketwatch/plugins/lottery/lottery.py @@ -37,11 +37,11 @@ async def _check_indexes(self): async def load_sync_committee(self, period): assert period in ["latest", "next"] await self._check_indexes() - h = await bacon.get_block_async("head") + h = await bacon.get_block("head") sync_period = int(h['data']['message']['slot']) // 32 // 256 if period == "next": sync_period += 1 - data = (await bacon.get_sync_committee_async(sync_period * 256))["data"] + data = (await bacon.get_epoch_sync_committees(str(sync_period * 256)))["data"] await self.bot.db.sync_committee_stats.replace_one({"period": period}, {"period" : period, "start_epoch": sync_period * 256, diff --git a/rocketwatch/plugins/proposals/proposals.py b/rocketwatch/plugins/proposals/proposals.py index 5418cc83..fea15ee1 100644 --- a/rocketwatch/plugins/proposals/proposals.py +++ b/rocketwatch/plugins/proposals/proposals.py @@ -152,7 +152,7 @@ async def fetch_proposals(self): else: last_checked_slot = 4700012 # last slot before merge - latest_slot = int((await bacon.get_block_header_async("finalized"))["data"]["header"]["message"]["slot"]) + latest_slot = int((await bacon.get_block_header("finalized"))["data"]["header"]["message"]["slot"]) for slots in as_chunks(range(last_checked_slot + 1, latest_slot + 1), self.batch_size): log.info(f"Fetching proposals for slots {slots[0]} to {slots[-1]}") await asyncio.gather(*[self.fetch_proposal(s) for s in slots]) @@ -160,7 +160,7 @@ async def fetch_proposals(self): async def fetch_proposal(self, slot: int) -> None: try: - beacon_header = (await bacon.get_block_header_async(slot))["data"]["header"]["message"] + beacon_header = (await bacon.get_block_header(str(slot)))["data"]["header"]["message"] except ClientResponseError as e: if e.status == 404: return None @@ -171,7 +171,7 @@ async def fetch_proposal(self, slot: int) -> None: if not (minipool := (await self.bot.db.minipools.find_one({"validator_index": validator_index}))): return None - beacon_block = (await bacon.get_block_async(slot))["data"]["message"] + beacon_block = (await bacon.get_block(str(slot)))["data"]["message"] proposal_data = parse_proposal(beacon_block) await self.bot.db.proposals.update_one({"slot": slot}, {"$set": proposal_data}, upsert=True) diff --git a/rocketwatch/plugins/random/random.py b/rocketwatch/plugins/random/random.py index dff5e409..4272491c 100644 --- a/rocketwatch/plugins/random/random.py +++ b/rocketwatch/plugins/random/random.py @@ -106,7 +106,7 @@ async def dev_time(self, ctx: Context): e.add_field(name="Coordinated Universal Time", value=f"{dev_time.strftime(time_format)}\n" f"`{binary_day} (0x{uint_day:04x})`") - b = solidity.slot_to_beacon_day_epoch_slot(int((await bacon.get_block_header_async("head"))["data"]["header"]["message"]["slot"])) + b = solidity.slot_to_beacon_day_epoch_slot(int((await bacon.get_block_header("head"))["data"]["header"]["message"]["slot"])) e.add_field(name="Beacon Time", value=f"Day {b[0]}, {b[1]}:{b[2]}") dev_time = datetime.now(tz=pytz.timezone("Australia/Lindeman")) diff --git a/rocketwatch/plugins/transactions/transactions.py b/rocketwatch/plugins/transactions/transactions.py index d991e5a5..74b53eef 100644 --- a/rocketwatch/plugins/transactions/transactions.py +++ b/rocketwatch/plugins/transactions/transactions.py @@ -245,12 +245,12 @@ def share_repr(percentage: float) -> str: else: args.contract_validity = f"The contract is valid for {periods_left} more periods." - embed = assemble(prepare_args(args)) + embed = assemble(await prepare_args(args)) embeds.append(embed) return embeds - args = prepare_args(args) + args = await prepare_args(args) return [assemble(args)] async def process_transaction(self, block, tnx, contract_address, fn_input) -> list[Event]: diff --git a/rocketwatch/plugins/user_distribute/user_distribute.py b/rocketwatch/plugins/user_distribute/user_distribute.py index bdf3a453..1a1bbd7e 100644 --- a/rocketwatch/plugins/user_distribute/user_distribute.py +++ b/rocketwatch/plugins/user_distribute/user_distribute.py @@ -110,7 +110,7 @@ async def on_task_error(self, err: Exception): await self.bot.report_error(err) async def _fetch_minipools(self) -> tuple[list[dict], list[dict], list[dict]]: - head = await bacon.get_block_header_async("head") + head = await bacon.get_block_header("head") current_epoch = int(head["data"]["header"]["message"]["slot"]) // 32 threshold_epoch = current_epoch - 5000 diff --git a/rocketwatch/utils/embeds.py b/rocketwatch/utils/embeds.py index c7663af3..cb48a9e8 100644 --- a/rocketwatch/utils/embeds.py +++ b/rocketwatch/utils/embeds.py @@ -197,7 +197,7 @@ def el_explorer_url( prefix = "" return f"{prefix}[{name}]({url})" -def prepare_args(args): +async def prepare_args(args): for arg_key, arg_value in list(args.items()): # store raw value args[f"{arg_key}_raw"] = arg_value @@ -227,7 +227,7 @@ def prepare_args(args): # handle validators if arg_key == "pubkey": - args[arg_key] = cl_explorer_url(arg_value) + args[arg_key] = await cl_explorer_url(arg_value) elif arg_key == "cow_uid": args[arg_key] = f"[ORDER](https://explorer.cow.fi/orders/{arg_value})" else: diff --git a/rocketwatch/utils/readable.py b/rocketwatch/utils/readable.py index 3687ef84..c1a74aac 100644 --- a/rocketwatch/utils/readable.py +++ b/rocketwatch/utils/readable.py @@ -47,11 +47,11 @@ def s_hex(string): return string[:10] -def cl_explorer_url(target, name=None): +async def cl_explorer_url(target, name=None): # if name is none, and it has the correct length for a validator pubkey, try to lookup the validator index if not name and isinstance(target, str) and len(target) == 98: with contextlib.suppress(Exception): - if v := bacon.get_validator(target)["data"]["index"]: + if v := (await bacon.get_validator(target))["data"]["index"]: name = f"#{v}" if not name and isinstance(target, str): name = s_hex(target) diff --git a/rocketwatch/utils/shared_w3.py b/rocketwatch/utils/shared_w3.py index 8c91c44e..68b74ae4 100644 --- a/rocketwatch/utils/shared_w3.py +++ b/rocketwatch/utils/shared_w3.py @@ -1,14 +1,12 @@ import logging +from typing import Dict, Any -import aiohttp -from web3.beacon import Beacon as Bacon -from aiohttp import ClientResponseError +from web3.beacon import AsyncBeacon from web3 import Web3, AsyncWeb3, HTTPProvider from web3.providers import AsyncHTTPProvider from web3.middleware import ExtraDataToPOAMiddleware from utils.cfg import cfg -from utils.retry import retry_async log = logging.getLogger("shared_w3") log.setLevel(cfg["log_level"]) @@ -26,34 +24,12 @@ if "archive" in cfg['execution_layer.endpoint'].keys(): historical_w3 = Web3(HTTPProvider(cfg['execution_layer.endpoint.archive'])) -class SuperBacon(Bacon): - def __init__(self, base_url: str) -> None: - super().__init__(base_url) - self.async_session = aiohttp.ClientSession( - raise_for_status=True, - timeout=aiohttp.ClientTimeout(sock_connect=3.05, total=20) - ) - - @retry_async(tries=3, exceptions=ClientResponseError, delay=0.5) - async def _make_get_request_async(self, path: str): - async with self.async_session.get(self.base_url + path) as response: - return await response.json() - - async def get_block_header_async(self, block_id: int | str): - return await self._make_get_request_async(f"/eth/v1/beacon/headers/{block_id}") - - async def get_block_async(self, block_id: int | str): - return await self._make_get_request_async(f"/eth/v2/beacon/blocks/{block_id}") - async def get_validators_async(self, state_id, ids: list[int]): +class Bacon(AsyncBeacon): + async def get_validators_by_ids(self, state_id: str, ids: list[int]) -> Dict[str, Any]: id_str = ','.join(map(str, ids)) - return await self._make_get_request_async(f"/eth/v1/beacon/states/{state_id}/validators?id={id_str}") - - async def get_sync_committee_async(self, epoch: int): - return await self._make_get_request_async(f"/eth/v1/beacon/states/head/sync_committees?epoch={epoch}") - - async def get_finality_checkpoint_async(self, state_id): - return await self._make_get_request_async(f"/eth/v1/beacon/states/{state_id}/finality_checkpoints") - + return await self._async_make_get_request( + f"/eth/v1/beacon/states/{state_id}/validators?id={id_str}" + ) -bacon = SuperBacon(cfg["consensus_layer.endpoint"]) +bacon = Bacon(cfg["consensus_layer.endpoint"]) From 7a4b0f32e5e3091b47ea54c2e316f465e3c69f3d Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 6 Mar 2026 18:21:45 +0000 Subject: [PATCH 159/279] remove POA middleware --- rocketwatch/plugins/lottery/lottery.py | 2 +- rocketwatch/utils/shared_w3.py | 8 +++++--- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/rocketwatch/plugins/lottery/lottery.py b/rocketwatch/plugins/lottery/lottery.py index 2692b4d5..b490f1c0 100644 --- a/rocketwatch/plugins/lottery/lottery.py +++ b/rocketwatch/plugins/lottery/lottery.py @@ -41,7 +41,7 @@ async def load_sync_committee(self, period): sync_period = int(h['data']['message']['slot']) // 32 // 256 if period == "next": sync_period += 1 - data = (await bacon.get_epoch_sync_committees(str(sync_period * 256)))["data"] + data = (await bacon.get_sync_committee(sync_period * 256))["data"] await self.bot.db.sync_committee_stats.replace_one({"period": period}, {"period" : period, "start_epoch": sync_period * 256, diff --git a/rocketwatch/utils/shared_w3.py b/rocketwatch/utils/shared_w3.py index 68b74ae4..4aced91a 100644 --- a/rocketwatch/utils/shared_w3.py +++ b/rocketwatch/utils/shared_w3.py @@ -4,7 +4,6 @@ from web3.beacon import AsyncBeacon from web3 import Web3, AsyncWeb3, HTTPProvider from web3.providers import AsyncHTTPProvider -from web3.middleware import ExtraDataToPOAMiddleware from utils.cfg import cfg @@ -17,8 +16,6 @@ if cfg['rocketpool.chain'] != "mainnet": mainnet_w3 = Web3(HTTPProvider(cfg['execution_layer.endpoint.mainnet'])) - w3.middleware_onion.inject(ExtraDataToPOAMiddleware, layer=0) - w3_async.middleware_onion.inject(ExtraDataToPOAMiddleware, layer=0) historical_w3 = None if "archive" in cfg['execution_layer.endpoint'].keys(): @@ -32,4 +29,9 @@ async def get_validators_by_ids(self, state_id: str, ids: list[int]) -> Dict[str f"/eth/v1/beacon/states/{state_id}/validators?id={id_str}" ) + async def get_sync_committee(self, epoch: int) -> Dict[str, Any]: + return await self._async_make_get_request( + f"/eth/v1/beacon/states/head/sync_committees?epoch={epoch}" + ) + bacon = Bacon(cfg["consensus_layer.endpoint"]) From 56c1e225312f5902065888cc1f8d25223a242945 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 6 Mar 2026 18:27:51 +0000 Subject: [PATCH 160/279] migrate forum to slash commands --- rocketwatch/plugins/forum/forum.py | 30 ++++++++++++++---------------- 1 file changed, 14 insertions(+), 16 deletions(-) diff --git a/rocketwatch/plugins/forum/forum.py b/rocketwatch/plugins/forum/forum.py index 4b84a78e..948275a7 100644 --- a/rocketwatch/plugins/forum/forum.py +++ b/rocketwatch/plugins/forum/forum.py @@ -4,10 +4,9 @@ from typing import Optional, Literal, cast import aiohttp +from discord import Interaction from discord.ext import commands -from discord.ext.commands import Context -from discord.ext.commands import hybrid_command -from discord.app_commands import Choice +from discord.app_commands import command, Choice from rocketwatch import RocketWatch from utils.cfg import cfg @@ -118,19 +117,20 @@ async def get_top_users(period: Period, order_by: UserMetric) -> list[User]: )) return users - @hybrid_command() + @command() async def top_forum_posts( self, - ctx: Context, + interaction: Interaction, period: Period = "monthly" ) -> None: """Get the most popular topics from the forum""" - await ctx.defer(ephemeral=is_hidden_weak(ctx)) + await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) if isinstance(period, Choice): period: Forum.Period = cast(Forum.Period, period.value) - embed = Embed(title=f"Top Forum Posts ({period})", description="") + embed = Embed(title=f"Top Forum Posts ({period})") + embed.description = "" if topics := await self.get_popular_topics(period): for i, topic in enumerate(topics[:10], start=1): @@ -142,22 +142,20 @@ async def top_forum_posts( else: embed.description = "No topics found." - await ctx.send(embed=embed) + await interaction.followup.send(embed=embed) - @hybrid_command() + @command() async def top_forum_users( self, - ctx: Context, + interaction: Interaction, period: Period = "monthly", order_by: UserMetric = "likes_received" ) -> None: """Get the most active forum users""" - await ctx.defer(ephemeral=is_hidden_weak(ctx)) + await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) - embed = Embed( - title=f"Top Forum Users ({period})", - description="" - ) + embed = Embed(title=f"Top Forum Users ({period})") + embed.description = "" users = await self.get_top_users(period, order_by) if users: @@ -169,7 +167,7 @@ async def top_forum_users( else: embed.description = "No users found." - await ctx.send(embed=embed) + await interaction.followup.send(embed=embed) async def setup(bot): From 7c329e1ad9f841ac037338e6ef34d25160cc18d7 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 6 Mar 2026 19:39:50 +0000 Subject: [PATCH 161/279] async deposit pool stats --- .../plugins/beacon_events/beacon_events.py | 22 +++++------ .../plugins/deposit_pool/deposit_pool.py | 32 ++++++++-------- rocketwatch/plugins/queue/queue.py | 38 +++++++++---------- rocketwatch/rocketwatch.py | 3 +- rocketwatch/strings/embeds.en.json | 4 +- 5 files changed, 48 insertions(+), 51 deletions(-) diff --git a/rocketwatch/plugins/beacon_events/beacon_events.py b/rocketwatch/plugins/beacon_events/beacon_events.py index 8dee9365..f7a44746 100644 --- a/rocketwatch/plugins/beacon_events/beacon_events.py +++ b/rocketwatch/plugins/beacon_events/beacon_events.py @@ -125,10 +125,16 @@ async def _get_proposal(self, beacon_block: dict) -> Optional[Event]: if not (payload := beacon_block["body"].get("execution_payload")): # no proposed block return None + + if not (api_key := cfg["consensus_layer.beaconcha_secret"]): + log.warning("Missing beaconcha.in API key") + return None validator_index = int(beacon_block["proposer_index"]) - if not (minipool := await self.bot.db.minipools.find_one({"validator_index": validator_index})): - # not proposed by a minipool + minipool = await self.bot.db.minipools.find_one({"validator_index": validator_index}) + megapool = await self.bot.db.megapool_validators.find_one({"validator_index": validator_index}) + if not (minipool or megapool): + # not proposed by RP validator return None log.info(f"Validator {validator_index} proposed a block") @@ -136,21 +142,13 @@ async def _get_proposal(self, beacon_block: dict) -> Optional[Event]: timestamp = int(payload["timestamp"]) block_number = cast(BlockNumber, int(payload["block_number"])) - if not (api_key := cfg["consensus_layer.beaconcha_secret"]): - log.warning("Missing beaconcha.in API key") - return None - # fetch from beaconcha.in because beacon node is unaware of MEV bribes endpoint = f"https://beaconcha.in/api/v1/execution/block/{block_number}" async with aiohttp.ClientSession() as session: async with session.get(endpoint, headers={"apikey": api_key}) as resp: - if resp.status != 200: - log.warning(f"Error code {resp.status} from {endpoint}") - return None response_body = await resp.json() log.debug(f"{response_body = }") - proposal_data = response_body["data"][0] log.debug(f"{proposal_data = }") @@ -167,8 +165,8 @@ async def _get_proposal(self, beacon_block: dict) -> Optional[Event]: fee_recipient = proposal_data["feeRecipient"] args = { - "node_operator": minipool["node_operator"], - "minipool": minipool["address"], + "node_operator": (minipool or megapool)["node_operator"], + "validator": await cl_explorer_url(validator_index), "slot": int(beacon_block["slot"]), "reward_amount": block_reward_eth, "timestamp": timestamp diff --git a/rocketwatch/plugins/deposit_pool/deposit_pool.py b/rocketwatch/plugins/deposit_pool/deposit_pool.py index 69839e76..a024e5a1 100644 --- a/rocketwatch/plugins/deposit_pool/deposit_pool.py +++ b/rocketwatch/plugins/deposit_pool/deposit_pool.py @@ -1,7 +1,7 @@ import logging -from discord.ext.commands import Context -from discord.ext.commands import hybrid_command +from discord import Interaction +from discord.app_commands import command from rocketwatch import RocketWatch from plugins.queue.queue import Queue @@ -21,7 +21,7 @@ def __init__(self, bot: RocketWatch): super().__init__(bot) @staticmethod - def get_deposit_pool_stats() -> Embed: + async def get_deposit_pool_stats() -> Embed: balance_raw, max_size_raw, max_amount_raw = rp.multicall([ rp.get_contract_by_name("rocketDepositPool").functions.getBalance(), rp.get_contract_by_name("rocketDAOProtocolSettingsDeposit").functions.getMaximumDepositPoolSize(), @@ -43,8 +43,8 @@ def get_deposit_pool_stats() -> Embed: embed.add_field(name="Status", value=dp_status, inline=False) display_limit = 2 - exp_queue_length, exp_queue_content = Queue.get_express_queue(display_limit) - std_queue_length, std_queue_content = Queue.get_standard_queue(display_limit) + exp_queue_length, exp_queue_content = await Queue.get_express_queue(display_limit) + std_queue_length, std_queue_content = await Queue.get_standard_queue(display_limit) total_queue_length = exp_queue_length + std_queue_length if (total_queue_length) > 0: embed.description = "" @@ -78,7 +78,7 @@ def get_deposit_pool_stats() -> Embed: return embed @staticmethod - def get_contract_collateral_stats() -> Embed: + async def get_contract_collateral_stats() -> Embed: exchange_rate, total_supply, collateral_rate_raw, target_rate_raw = rp.multicall([ rp.get_contract_by_name("rocketTokenRETH").functions.getExchangeRate(), rp.get_contract_by_name("rocketTokenRETH").functions.totalSupply(), @@ -108,22 +108,22 @@ def get_contract_collateral_stats() -> Embed: return Embed(title="rETH Extra Collateral", description=description) - @hybrid_command() - async def deposit_pool(self, ctx: Context) -> None: + @command() + async def deposit_pool(self, interaction: Interaction) -> None: """Show the current deposit pool status""" - await ctx.defer(ephemeral=is_hidden_weak(ctx)) - await ctx.send(embed=self.get_deposit_pool_stats()) + await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) + await interaction.followup.send(embed=await self.get_deposit_pool_stats()) - @hybrid_command() - async def reth_extra_collateral(self, ctx: Context) -> None: + @command() + async def reth_extra_collateral(self, interaction: Interaction) -> None: """Show the amount of tokens held in the rETH contract for exit liquidity""" - await ctx.defer(ephemeral=is_hidden_weak(ctx)) - await ctx.send(embed=self.get_contract_collateral_stats()) + await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) + await interaction.followup.send(embed=await self.get_contract_collateral_stats()) async def get_status(self) -> Embed: embed = Embed(title=":rocket: Live Protocol Status") - dp_embed = self.get_deposit_pool_stats() + dp_embed = await self.get_deposit_pool_stats() embed.description = dp_embed.description dp_fields = {field.name: field for field in dp_embed.fields} @@ -136,7 +136,7 @@ async def get_status(self) -> Embed: if field := dp_fields.get("Status"): embed.add_field(name="Deposits", value=field.value, inline=False) - collateral_embed = self.get_contract_collateral_stats() + collateral_embed = await self.get_contract_collateral_stats() embed.add_field(name="Withdrawals", value=collateral_embed.description, inline=False) if cfg["rocketpool.chain"] != "mainnet": diff --git a/rocketwatch/plugins/queue/queue.py b/rocketwatch/plugins/queue/queue.py index ccb08153..09d370dc 100644 --- a/rocketwatch/plugins/queue/queue.py +++ b/rocketwatch/plugins/queue/queue.py @@ -17,7 +17,7 @@ from utils.embeds import el_explorer_url from utils.rocketpool import rp from utils.visibility import is_hidden_weak -from utils.shared_w3 import w3 +from utils.shared_w3 import w3_async from utils.views import PageView log = logging.getLogger("queue") @@ -51,8 +51,8 @@ def __init__(self, lane: Literal["combined", "standard", "express"]): def _title(self) -> str: return self.queue_name - async def _load_content(self, from_idx: int, to_idx: int) -> tuple[int, str]: - queue_length, queue_content = self.content_loader( + async def _load_content(self, from_idx: int, to_idx: int) -> tuple[int, str]: + queue_length, queue_content = await self.content_loader( limit=(to_idx - from_idx + 1), start=from_idx ) return queue_length, queue_content @@ -74,14 +74,14 @@ def __format_queue_entry(entry: 'Queue.Entry') -> str: return f"{node_label} #`{entry.validator_id}`" @staticmethod - def get_standard_queue(limit: int, start: int = 0) -> tuple[int, str]: + async def get_standard_queue(limit: int, start: int = 0) -> tuple[int, str]: """Get the next {limit} validators in the standard queue""" - return Queue._get_queue("deposit.queue.standard", limit, start) - + return await Queue._get_queue("deposit.queue.standard", limit, start) + @staticmethod - def get_express_queue(limit: int, start: int = 0) -> tuple[int, str]: + async def get_express_queue(limit: int, start: int = 0) -> tuple[int, str]: """Get the next {limit} validators in the express queue""" - return Queue._get_queue("deposit.queue.express", limit, start) + return await Queue._get_queue("deposit.queue.express", limit, start) @staticmethod def _scan_list(namespace: bytes, start: int, limit: int, block_identifier: BlockIdentifier) -> list['Queue.Entry']: @@ -90,15 +90,15 @@ def _scan_list(namespace: bytes, start: int, limit: int, block_identifier: Block return [Queue.Entry(*entry) for entry in raw_entries][start:] @staticmethod - def _get_queue(namespace: str, limit: int, start: int = 0) -> tuple[int, str]: + async def _get_queue(namespace: str, limit: int, start: int = 0) -> tuple[int, str]: if limit <= 0: return 0, "" - + list_contract = rp.get_contract_by_name("linkedListStorage") - queue_namespace = bytes(w3.solidity_keccak(["string"], [namespace])) - + queue_namespace = bytes(w3_async.solidity_keccak(["string"], [namespace])) + start = max(start, 0) - latest_block = w3.eth.get_block_number() + latest_block = await w3_async.eth.get_block_number() q_len = list_contract.functions.getLength(queue_namespace).call(block_identifier=latest_block) if start >= q_len: @@ -134,16 +134,16 @@ def _get_entries_used_in_interval(start: int, end: int, len_express: int, len_st return num_express, num_standard @staticmethod - def get_combined_queue(limit: int, start: int = 0) -> tuple[int, str]: + async def get_combined_queue(limit: int, start: int = 0) -> tuple[int, str]: """Get the next {limit} validators in the combined queue (express + standard)""" - - latest_block = w3.eth.get_block_number() + + latest_block = await w3_async.eth.get_block_number() express_queue_rate = rp.call("rocketDAOProtocolSettingsDeposit.getExpressQueueRate", block=latest_block) queue_index = rp.call("rocketDepositPool.getQueueIndex", block=latest_block) - + list_contract = rp.get_contract_by_name("linkedListStorage") - exp_namespace = bytes(w3.solidity_keccak(["string"], ["deposit.queue.express"])) - std_namespace = bytes(w3.solidity_keccak(["string"], ["deposit.queue.standard"])) + exp_namespace = bytes(w3_async.solidity_keccak(["string"], ["deposit.queue.express"])) + std_namespace = bytes(w3_async.solidity_keccak(["string"], ["deposit.queue.standard"])) express_queue_length = list_contract.functions.getLength(exp_namespace).call(block_identifier=latest_block) standard_queue_length = list_contract.functions.getLength(std_namespace).call(block_identifier=latest_block) diff --git a/rocketwatch/rocketwatch.py b/rocketwatch/rocketwatch.py index 3e7adad7..c8352988 100644 --- a/rocketwatch/rocketwatch.py +++ b/rocketwatch/rocketwatch.py @@ -30,9 +30,8 @@ class RocketWatch(Bot): class RWCommandTree(CommandTree): async def on_error(self, interaction: Interaction, error: AppCommandError) -> None: - bot: RocketWatch = self.client ctx = await Context.from_interaction(interaction) - await bot.on_command_error(ctx, error) + await self.client.on_command_error(ctx, error) def __init__(self, intents: Intents) -> None: super().__init__(command_prefix=(), tree_cls=self.RWCommandTree, intents=intents) diff --git a/rocketwatch/strings/embeds.en.json b/rocketwatch/strings/embeds.en.json index af93387f..1f3713c5 100644 --- a/rocketwatch/strings/embeds.en.json +++ b/rocketwatch/strings/embeds.en.json @@ -417,11 +417,11 @@ }, "mev_proposal_event": { "title": ":moneybag: Large Minipool Proposal", - "description": "Minipool %{minipool} has proposed a block worth **%{reward_amount} ETH**!" + "description": "Validator %{validator} has proposed a block worth **%{reward_amount} ETH**!" }, "mev_proposal_smoothie_event": { "title": ":cup_with_straw: Large Smoothing Pool Proposal", - "description": "Minipool %{minipool} has proposed a block worth **%{reward_amount} ETH**!" + "description": "Validator %{validator} has proposed a block worth **%{reward_amount} ETH**!" }, "minipool_vacancy_prepared_event": { "title": ":link: Solo Migration Initiated", From c1a3f0532c634c0c66f659abe8f7094e3ef8313c Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 6 Mar 2026 20:03:37 +0000 Subject: [PATCH 162/279] move everything to async rp.multicall --- rocketwatch/plugins/collateral/collateral.py | 16 ++++----- rocketwatch/plugins/dao/dao.py | 22 ++++++------ .../plugins/db_upkeep_task/db_upkeep_task.py | 10 +++--- .../plugins/deposit_pool/deposit_pool.py | 4 +-- rocketwatch/plugins/events/events.py | 4 +-- rocketwatch/plugins/governance/governance.py | 18 +++++----- rocketwatch/plugins/random/random.py | 4 +-- .../plugins/transactions/transactions.py | 2 +- rocketwatch/plugins/wall/wall.py | 6 ++-- rocketwatch/utils/dao.py | 20 +++++------ rocketwatch/utils/embeds.py | 2 +- rocketwatch/utils/liquidity.py | 34 +++++++++---------- rocketwatch/utils/rocketpool.py | 14 ++------ rocketwatch/utils/sea_creatures.py | 17 ++++------ 14 files changed, 80 insertions(+), 93 deletions(-) diff --git a/rocketwatch/plugins/collateral/collateral.py b/rocketwatch/plugins/collateral/collateral.py index f359395f..b8ebdf1a 100644 --- a/rocketwatch/plugins/collateral/collateral.py +++ b/rocketwatch/plugins/collateral/collateral.py @@ -44,20 +44,20 @@ async def collateral_distribution_raw(ctx: Context, distribution): await ctx.send(embed=e) -def get_node_minipools_and_collateral() -> dict[ChecksumAddress, dict[str, int]]: +async def get_node_minipools_and_collateral() -> dict[ChecksumAddress, dict[str, int]]: node_staking = rp.get_contract_by_name("rocketNodeStaking") minipool_manager = rp.get_contract_by_name("rocketMinipoolManager") eb16s, eb8s, rpl_stakes = [], [], [] nodes = rp.call("rocketNodeManager.getNodeAddresses", 0, 10_000) for node_batch in as_chunks(nodes, 500): - eb16s += rp.multicall([ + eb16s += await rp.multicall([ minipool_manager.functions.getNodeStakingMinipoolCountBySize(node, 16 * 10**18) for node in node_batch ]) - eb8s += rp.multicall([ + eb8s += await rp.multicall([ minipool_manager.functions.getNodeStakingMinipoolCountBySize(node, 8 * 10**18) for node in node_batch ]) - rpl_stakes += rp.multicall([ + rpl_stakes += await rp.multicall([ node_staking.functions.getNodeStakedRPL(node) for node in node_batch ]) @@ -70,9 +70,9 @@ def get_node_minipools_and_collateral() -> dict[ChecksumAddress, dict[str, int]] } -def get_average_collateral_percentage_per_node(collateral_cap: Optional[int], bonded: bool): +async def get_average_collateral_percentage_per_node(collateral_cap: Optional[int], bonded: bool): # get stakes for each node - stakes = list(get_node_minipools_and_collateral().values()) + stakes = list(await get_node_minipools_and_collateral().values()) # get the current rpl price rpl_price = solidity.to_float(rp.call("rocketNetworkPrices.getRPLPrice")) @@ -130,7 +130,7 @@ async def node_tvl_vs_collateral(self, return rpl_price = solidity.to_float(rp.call("rocketNetworkPrices.getRPLPrice")) - data = get_node_minipools_and_collateral() + data = await get_node_minipools_and_collateral() # Calculate each node's tvl and collateral and add it to the data def node_tvl(node): @@ -236,7 +236,7 @@ async def collateral_distribution(self, """ await ctx.defer(ephemeral=is_hidden_weak(ctx)) - data = get_average_collateral_percentage_per_node(collateral_cap, bonded) + data = await get_average_collateral_percentage_per_node(collateral_cap, bonded) distribution = [(collateral, len(nodes)) for collateral, nodes in sorted(data.items(), key=lambda x: x[0])] counts = sum(([collateral] * num_nodes for collateral, num_nodes in distribution), []) diff --git a/rocketwatch/plugins/dao/dao.py b/rocketwatch/plugins/dao/dao.py index ef3b71cd..87263de3 100644 --- a/rocketwatch/plugins/dao/dao.py +++ b/rocketwatch/plugins/dao/dao.py @@ -33,16 +33,16 @@ def __init__(self, bot: RocketWatch): self.bot = bot @staticmethod - def get_dao_votes_embed(dao: DefaultDAO, full: bool) -> Embed: + async def get_dao_votes_embed(dao: DefaultDAO, full: bool) -> Embed: current_proposals: dict[DefaultDAO.ProposalState, list[DefaultDAO.Proposal]] = { dao.ProposalState.Pending: [], dao.ProposalState.Active: [], dao.ProposalState.Succeeded: [], } - for state, ids in dao.get_proposal_ids_by_state().items(): + for state, ids in (await dao.get_proposal_ids_by_state()).items(): if state in current_proposals: - current_proposals[state].extend([dao.fetch_proposal(pid) for pid in ids]) + current_proposals[state].extend([await dao.fetch_proposal(pid) for pid in ids]) return Embed( title=f"{dao.display_name} Proposals", @@ -70,7 +70,7 @@ def get_dao_votes_embed(dao: DefaultDAO, full: bool) -> Embed: ) @staticmethod - def get_pdao_votes_embed(dao: ProtocolDAO, full: bool) -> Embed: + async def get_pdao_votes_embed(dao: ProtocolDAO, full: bool) -> Embed: current_proposals: dict[ProtocolDAO.ProposalState, list[ProtocolDAO.Proposal]] = { dao.ProposalState.Pending: [], dao.ProposalState.ActivePhase1: [], @@ -78,9 +78,9 @@ def get_pdao_votes_embed(dao: ProtocolDAO, full: bool) -> Embed: dao.ProposalState.Succeeded: [], } - for state, ids in dao.get_proposal_ids_by_state().items(): + for state, ids in (await dao.get_proposal_ids_by_state()).items(): if state in current_proposals: - current_proposals[state].extend([dao.fetch_proposal(pid) for pid in ids]) + current_proposals[state].extend([await dao.fetch_proposal(pid) for pid in ids]) return Embed( title="pDAO Proposals", @@ -128,13 +128,13 @@ async def dao_votes( match dao_name: case "pDAO": dao = ProtocolDAO() - embed = self.get_pdao_votes_embed(dao, full) + embed = await self.get_pdao_votes_embed(dao, full) case "oDAO": dao = OracleDAO() - embed = self.get_dao_votes_embed(dao, full) + embed = await self.get_dao_votes_embed(dao, full) case "Security Council": dao = SecurityCouncil() - embed = self.get_dao_votes_embed(dao, full) + embed = await self.get_dao_votes_embed(dao, full) case _: raise ValueError(f"Invalid DAO name: {dao_name}") @@ -214,7 +214,7 @@ async def _get_recent_proposals(self, interaction: Interaction, current: str) -> else: suggestions = list(range(1, num_proposals + 1))[:-26:-1] - titles: list[str] = rp.multicall([ + titles: list[str] = await rp.multicall([ dao.proposal_contract.functions.getMessage(proposal_id) for proposal_id in suggestions ]) return [Choice(name=f"#{pid}: {title}", value=pid) for pid, title in zip(suggestions, titles)] @@ -225,7 +225,7 @@ async def _get_recent_proposals(self, interaction: Interaction, current: str) -> async def voter_list(self, interaction: Interaction, proposal: int) -> None: """Show the list of voters for a pDAO proposal""" await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) - if not (proposal := ProtocolDAO().fetch_proposal(proposal)): + if not (proposal := await ProtocolDAO().fetch_proposal(proposal)): return await interaction.followup.send("Invalid proposal ID.") view = OnchainDAO.VoterPageView(proposal) diff --git a/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py b/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py index 4e2fdf67..4f24851b 100644 --- a/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py +++ b/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py @@ -174,7 +174,7 @@ async def _batch_multicall_update( # call_fn(item) returns a list of (fn, require_success, transform, field) expanded = [(item["address"], *t) for item in batch for t in call_fn(item)] calls = [(e[1], e[2]) for e in expanded] - results = await rp.multicall_async(calls) + results = await rp.multicall(calls) updates = defaultdict(dict) for e, value in zip(expanded, results): addr, transform, field = e[0], e[3], e[4] @@ -200,7 +200,7 @@ async def add_untracked_node_operators(self): return data = {} for index_batch in as_chunks(range(latest_db + 1, latest_rp + 1), self.batch_size): - results = await rp.multicall_async([nm.functions.getNodeAt(i) for i in index_batch]) + results = await rp.multicall([nm.functions.getNodeAt(i) for i in index_batch]) data |= dict(zip(index_batch, results)) await self.bot.db.node_operators.insert_many([{"_id": i, "address": w3.to_checksum_address(a)} for i, a in data.items()]) @@ -291,7 +291,7 @@ async def add_untracked_minipools(self): return log.debug(f"Latest minipool in db: {latest_db}, latest minipool in rp: {latest_rp}") for index_batch in as_chunks(range(latest_db + 1, latest_rp + 1), self.batch_size): - results = await rp.multicall_async([mm.functions.getMinipoolAt(i) for i in index_batch]) + results = await rp.multicall([mm.functions.getMinipoolAt(i) for i in index_batch]) await self.bot.db.minipools.insert_many([{"_id": i, "address": w3.to_checksum_address(a)} for i, a in zip(index_batch, results)]) @timerun_async @@ -448,7 +448,7 @@ async def add_untracked_megapool_validators(self): megapool_contract.functions.getValidatorInfo(vid), ] ] - results = await rp.multicall_async(fns) + results = await rp.multicall(fns) docs = [] for i, vid in enumerate(id_batch): @@ -485,7 +485,7 @@ async def update_dynamic_megapool_validator_data(self): rp.assemble_contract("rocketMegapoolDelegate", address=v["megapool"]).functions.getValidatorInfo(v["validator_id"]) for v in batch ] - results = await rp.multicall_async(fns) + results = await rp.multicall(fns) ops = [] for v, info_raw in zip(batch, results): info = _unpack_validator_info_dynamic(info_raw) if info_raw is not None else None diff --git a/rocketwatch/plugins/deposit_pool/deposit_pool.py b/rocketwatch/plugins/deposit_pool/deposit_pool.py index a024e5a1..27a25111 100644 --- a/rocketwatch/plugins/deposit_pool/deposit_pool.py +++ b/rocketwatch/plugins/deposit_pool/deposit_pool.py @@ -22,7 +22,7 @@ def __init__(self, bot: RocketWatch): @staticmethod async def get_deposit_pool_stats() -> Embed: - balance_raw, max_size_raw, max_amount_raw = rp.multicall([ + balance_raw, max_size_raw, max_amount_raw = await rp.multicall([ rp.get_contract_by_name("rocketDepositPool").functions.getBalance(), rp.get_contract_by_name("rocketDAOProtocolSettingsDeposit").functions.getMaximumDepositPoolSize(), rp.get_contract_by_name("rocketDepositPool").functions.getMaximumDepositAmount(), @@ -79,7 +79,7 @@ async def get_deposit_pool_stats() -> Embed: @staticmethod async def get_contract_collateral_stats() -> Embed: - exchange_rate, total_supply, collateral_rate_raw, target_rate_raw = rp.multicall([ + exchange_rate, total_supply, collateral_rate_raw, target_rate_raw = await rp.multicall([ rp.get_contract_by_name("rocketTokenRETH").functions.getExchangeRate(), rp.get_contract_by_name("rocketTokenRETH").functions.totalSupply(), rp.get_contract_by_name("rocketTokenRETH").functions.getCollateralRate(), diff --git a/rocketwatch/plugins/events/events.py b/rocketwatch/plugins/events/events.py index ed3dc012..9393f7d4 100644 --- a/rocketwatch/plugins/events/events.py +++ b/rocketwatch/plugins/events/events.py @@ -630,7 +630,7 @@ def share_repr(percentage: float) -> str: return None dao = ProtocolDAO() - proposal = dao.fetch_proposal(proposal_id) + proposal = await dao.fetch_proposal(proposal_id) args.proposal_body = dao.build_proposal_body( proposal, include_proposer=False, @@ -652,7 +652,7 @@ def share_repr(percentage: float) -> str: }[dao_name]) dao = DefaultDAO(dao_name) - proposal = dao.fetch_proposal(proposal_id) + proposal = await dao.fetch_proposal(proposal_id) args.proposal_body = dao.build_proposal_body( proposal, include_proposer=False, diff --git a/rocketwatch/plugins/governance/governance.py b/rocketwatch/plugins/governance/governance.py index 65c1174b..0c85343a 100644 --- a/rocketwatch/plugins/governance/governance.py +++ b/rocketwatch/plugins/governance/governance.py @@ -23,18 +23,18 @@ class Governance(StatusPlugin): @staticmethod - def _get_active_pdao_proposals(dao: ProtocolDAO) -> list[ProtocolDAO.Proposal]: - proposal_ids = dao.get_proposal_ids_by_state() + async def _get_active_pdao_proposals(dao: ProtocolDAO) -> list[ProtocolDAO.Proposal]: + proposal_ids = await dao.get_proposal_ids_by_state() active_proposal_ids = [] active_proposal_ids += proposal_ids[dao.ProposalState.ActivePhase1] active_proposal_ids += proposal_ids[dao.ProposalState.ActivePhase2] - return [dao.fetch_proposal(proposal_id) for proposal_id in reversed(active_proposal_ids)] + return [await dao.fetch_proposal(proposal_id) for proposal_id in reversed(active_proposal_ids)] @staticmethod - def _get_active_dao_proposals(dao: DefaultDAO) -> list[DefaultDAO.Proposal]: - proposal_ids = dao.get_proposal_ids_by_state() + async def _get_active_dao_proposals(dao: DefaultDAO) -> list[DefaultDAO.Proposal]: + proposal_ids = await dao.get_proposal_ids_by_state() active_proposal_ids = proposal_ids[dao.ProposalState.Active] - return [dao.fetch_proposal(proposal_id) for proposal_id in reversed(active_proposal_ids)] + return [await dao.fetch_proposal(proposal_id) for proposal_id in reversed(active_proposal_ids)] @staticmethod def _get_tx_hash_for_proposal(dao: DAO, proposal: DAO.Proposal) -> HexStr: @@ -100,7 +100,7 @@ def print_proposals(_dao: DAO, _proposals: list[DAO.Proposal]) -> str: # --------- SECURITY COUNCIL --------- # dao = SecurityCouncil() - if proposals := self._get_active_dao_proposals(dao): + if proposals := await self._get_active_dao_proposals(dao): embed.description += "### Security Council\n" embed.description += "- **Active on-chain proposals**\n" embed.description += print_proposals(dao, proposals) @@ -108,7 +108,7 @@ def print_proposals(_dao: DAO, _proposals: list[DAO.Proposal]) -> str: # --------- ORACLE DAO --------- # dao = OracleDAO() - if proposals := self._get_active_dao_proposals(dao): + if proposals := await self._get_active_dao_proposals(dao): embed.description += "### Oracle DAO\n" embed.description += "- **Active on-chain proposals**\n" embed.description += print_proposals(dao, proposals) @@ -118,7 +118,7 @@ def print_proposals(_dao: DAO, _proposals: list[DAO.Proposal]) -> str: section_content = "" dao = ProtocolDAO() - if proposals := self._get_active_pdao_proposals(dao): + if proposals := await self._get_active_pdao_proposals(dao): section_content += "- **Active on-chain proposals**\n" section_content += print_proposals(dao, proposals) diff --git a/rocketwatch/plugins/random/random.py b/rocketwatch/plugins/random/random.py index 4272491c..8ee496cf 100644 --- a/rocketwatch/plugins/random/random.py +++ b/rocketwatch/plugins/random/random.py @@ -134,7 +134,7 @@ async def sea_creatures(self, ctx: Context, address: str = None): e.description = "Invalid address" await ctx.send(embed=e) return - creature = get_sea_creature_for_address(address) + creature = await get_sea_creature_for_address(address) if not creature: e.description = f"No sea creature for {address}" else: @@ -142,7 +142,7 @@ async def sea_creatures(self, ctx: Context, address: str = None): required_holding = [h for h, c in sea_creatures.items() if c == creature[0]][0] e.add_field(name="Visualization", value=el_explorer_url(address, prefix=creature), inline=False) e.add_field(name="Required holding for emoji", value=f"{required_holding * len(creature)} ETH", inline=False) - holding = get_holding_for_address(address) + holding = await get_holding_for_address(address) e.add_field(name="Actual Holding", value=f"{holding:.0f} ETH", inline=False) else: e.title = "Possible Sea Creatures" diff --git a/rocketwatch/plugins/transactions/transactions.py b/rocketwatch/plugins/transactions/transactions.py index 74b53eef..1c847ab7 100644 --- a/rocketwatch/plugins/transactions/transactions.py +++ b/rocketwatch/plugins/transactions/transactions.py @@ -313,7 +313,7 @@ async def process_transaction(self, block, tnx, contract_address, fn_input) -> l payload = rp.call("rocketDAOProposal.getPayload", proposal_id) event.args["executor"] = event["from"] - proposal = dao.fetch_proposal(proposal_id) + proposal = await dao.fetch_proposal(proposal_id) event.args["proposal_body"] = dao.build_proposal_body(proposal, include_proposer=False) dao_address = dao.contract.address diff --git a/rocketwatch/plugins/wall/wall.py b/rocketwatch/plugins/wall/wall.py index 9de4116b..a5b153b8 100644 --- a/rocketwatch/plugins/wall/wall.py +++ b/rocketwatch/plugins/wall/wall.py @@ -101,11 +101,11 @@ async def _get_cex_data(self, x: np.ndarray, rpl_usd: float) -> OrderedDict[CEX, return OrderedDict(sorted(depth.items(), key=lambda e: liquidity[e[0]], reverse=True)) @timerun - def _get_dex_data(self, x: np.ndarray, rpl_usd: float) -> OrderedDict[DEX, np.ndarray]: + async def _get_dex_data(self, x: np.ndarray, rpl_usd: float) -> OrderedDict[DEX, np.ndarray]: depth: dict[DEX, np.ndarray] = {} liquidity: dict[DEX, float] = {} for dex in self.dex: - if pools := dex.get_liquidity(): + if pools := await dex.get_liquidity(): depth[dex], liquidity[dex] = self._get_market_depth_and_liquidity(pools, x, rpl_usd) return OrderedDict(sorted(depth.items(), key=lambda e: liquidity[e[0]], reverse=True)) @@ -276,7 +276,7 @@ async def on_fail() -> None: try: if sources != "CEX": - dex_data = self._get_dex_data(x, rpl_usd) + dex_data = await self._get_dex_data(x, rpl_usd) source_desc.append(f"{len(dex_data)} DEX") if sources != "DEX": cex_data = await self._get_cex_data(x, rpl_usd) diff --git a/rocketwatch/utils/dao.py b/rocketwatch/utils/dao.py index fe66ed39..66fd820a 100644 --- a/rocketwatch/utils/dao.py +++ b/rocketwatch/utils/dao.py @@ -33,7 +33,7 @@ class Proposal(ABC): @staticmethod @abstractmethod - def fetch_proposal(self, proposal_id: int) -> Proposal: + async def fetch_proposal(self, proposal_id: int) -> Proposal: pass @abstractmethod @@ -106,14 +106,14 @@ class Proposal(DAO.Proposal): votes_against: int votes_required: int - def get_proposal_ids_by_state(self) -> dict[ProposalState, list[int]]: + async def get_proposal_ids_by_state(self) -> dict[ProposalState, list[int]]: num_proposals = self.proposal_contract.functions.getTotal().call() - proposal_dao_names = rp.multicall([ + proposal_dao_names = await rp.multicall([ self.proposal_contract.functions.getDAO(proposal_id) for proposal_id in range(1, num_proposals + 1) ]) relevant_proposals = [(i+1) for (i, dao_name) in enumerate(proposal_dao_names) if (dao_name == self.contract_name)] - proposal_states = rp.multicall([ + proposal_states = await rp.multicall([ self.proposal_contract.functions.getState(proposal_id) for proposal_id in relevant_proposals ]) @@ -123,9 +123,9 @@ def get_proposal_ids_by_state(self) -> dict[ProposalState, list[int]]: return proposals - def fetch_proposal(self, proposal_id: int) -> Proposal: + async def fetch_proposal(self, proposal_id: int) -> Proposal: (proposer, message, payload, created, start, end, expires, - votes_for_raw, votes_against_raw, votes_required_raw) = rp.multicall([ + votes_for_raw, votes_against_raw, votes_required_raw) = await rp.multicall([ self.proposal_contract.functions.getProposer(proposal_id), self.proposal_contract.functions.getMessage(proposal_id), self.proposal_contract.functions.getPayload(proposal_id), @@ -211,9 +211,9 @@ class Proposal(DAO.Proposal): def votes_total(self): return self.votes_for + self.votes_against + self.votes_abstain - def get_proposal_ids_by_state(self) -> dict[ProposalState, list[int]]: + async def get_proposal_ids_by_state(self) -> dict[ProposalState, list[int]]: num_proposals = self.proposal_contract.functions.getTotal().call() - proposal_states = rp.multicall([ + proposal_states = await rp.multicall([ self.proposal_contract.functions.getState(proposal_id) for proposal_id in range(1, num_proposals + 1) ]) @@ -224,10 +224,10 @@ def get_proposal_ids_by_state(self) -> dict[ProposalState, list[int]]: return proposals - def fetch_proposal(self, proposal_id: int) -> Proposal: + async def fetch_proposal(self, proposal_id: int) -> Proposal: (proposer, message, payload, created, start, phase1_end, phase2_end, expires, vp_for_raw, vp_against_raw, vp_veto_raw, vp_abstain_raw, - vp_required_raw, veto_quorum_raw) = rp.multicall([ + vp_required_raw, veto_quorum_raw) = await rp.multicall([ self.proposal_contract.functions.getProposer(proposal_id), self.proposal_contract.functions.getMessage(proposal_id), self.proposal_contract.functions.getPayload(proposal_id), diff --git a/rocketwatch/utils/embeds.py b/rocketwatch/utils/embeds.py index cb48a9e8..d90a1da3 100644 --- a/rocketwatch/utils/embeds.py +++ b/rocketwatch/utils/embeds.py @@ -223,7 +223,7 @@ async def prepare_args(args): if w3.is_address(arg_value): # get rocketpool related holdings value for this address address = w3.to_checksum_address(arg_value) - prefix = get_sea_creature_for_address(address) + prefix = await get_sea_creature_for_address(address) # handle validators if arg_key == "pubkey": diff --git a/rocketwatch/utils/liquidity.py b/rocketwatch/utils/liquidity.py index 0dd38f22..2ab474e8 100644 --- a/rocketwatch/utils/liquidity.py +++ b/rocketwatch/utils/liquidity.py @@ -90,7 +90,7 @@ async def _get_order_book( async def _get_liquidity(self, market: Market, session: aiohttp.ClientSession) -> Optional[Liquidity]: bids, asks = await self._get_order_book(market, session) if not (bids and asks): - log.warning(f"Empty order book") + log.warning("Empty order book") return None bid_prices = np.array(list(bids.keys())) @@ -607,16 +607,16 @@ def get_normalized_price(self) -> float: pass @abstractmethod - def get_liquidity(self) -> Optional[Liquidity]: + async def get_liquidity(self) -> Optional[Liquidity]: pass def __init__(self, pools: list[LiquidityPool]): self.pools = pools - def get_liquidity(self) -> dict[LiquidityPool, Liquidity]: + async def get_liquidity(self) -> dict[LiquidityPool, Liquidity]: pools = {} for pool in self.pools: - if liq := pool.get_liquidity(): + if liq := await pool.get_liquidity(): pools[pool] = liq return pools @@ -637,7 +637,7 @@ def get_price(self) -> float: def get_normalized_price(self) -> float: return self.get_price() * 10 ** (self.token_0.decimals - self.token_1.decimals) - def get_liquidity(self) -> Optional[Liquidity]: + async def get_liquidity(self) -> Optional[Liquidity]: balance_0, balance_1 = self.vault.functions.getPoolTokens(self.id).call()[1] if (balance_0 == 0) or (balance_1 == 0): log.warning("Empty token balances") @@ -695,16 +695,16 @@ def tick_to_word_and_bit(self, tick: int) -> tuple[int, int]: bit_position = compressed % UniswapV3.TICK_WORD_SIZE return word_position, bit_position - def get_ticks_net_liquidity(self, ticks: list[int]) -> dict[int, int]: - results = rp.multicall([self.contract.functions.ticks(tick) for tick in ticks]) + async def get_ticks_net_liquidity(self, ticks: list[int]) -> dict[int, int]: + results = await rp.multicall([self.contract.functions.ticks(tick) for tick in ticks]) return dict(zip(ticks, [r[1] for r in results])) - def get_initialized_ticks(self, current_tick: int) -> list[int]: + async def get_initialized_ticks(self, current_tick: int) -> list[int]: ticks = [] active_word, b = self.tick_to_word_and_bit(current_tick) word_range = list(range(active_word - 5, active_word + 5)) - bitmaps = rp.multicall([ + bitmaps = await rp.multicall([ self.contract.functions.tickBitmap(word) for word in word_range ]) @@ -738,13 +738,13 @@ def get_price(self) -> float: def get_normalized_price(self) -> float: return self.get_price() * 10 ** (self.token_0.decimals - self.token_1.decimals) - def get_liquidity(self) -> Optional[Liquidity]: + async def get_liquidity(self) -> Optional[Liquidity]: price = self.get_price() initial_liquidity = self.contract.functions.liquidity().call() calculated_tick = UniswapV3.price_to_tick(price) current_tick = int(calculated_tick) - ticks = self.get_initialized_ticks(current_tick) + ticks = await self.get_initialized_ticks(current_tick) if not ticks: log.warning("No liquidity found") @@ -752,12 +752,12 @@ def get_liquidity(self) -> Optional[Liquidity]: log.debug(f"Found {len(ticks)} initialized ticks!") - def get_cumulative_liquidity(_ticks: list[int]) -> list[float]: + async def get_cumulative_liquidity(_ticks: list[int]) -> list[float]: cumulative_liquidity = 0 last_tick = calculated_tick active_liquidity = initial_liquidity - net_liquidity: dict[int, int] = self.get_ticks_net_liquidity(_ticks) + net_liquidity: dict[int, int] = await self.get_ticks_net_liquidity(_ticks) liquidity = [] # assume liquidity in token 0 for now @@ -776,12 +776,12 @@ def get_cumulative_liquidity(_ticks: list[int]) -> list[float]: return liquidity ask_ticks = [t for t in reversed(ticks) if t <= current_tick] + [UniswapV3.MIN_TICK] - ask_liquidity = [0] + get_cumulative_liquidity(ask_ticks) - ask_ticks = [calculated_tick] + ask_ticks + ask_liquidity = [0] + await get_cumulative_liquidity(ask_ticks) + ask_ticks.insert(0, calculated_tick) bid_ticks = [t for t in ticks if t > current_tick] + [UniswapV3.MAX_TICK] - bid_liquidity = [0] + get_cumulative_liquidity(bid_ticks) - bid_ticks = [calculated_tick] + bid_ticks + bid_liquidity = [0] + await get_cumulative_liquidity(bid_ticks) + bid_ticks.insert(0, calculated_tick) balance_norm = 10 ** (self.token_1.decimals - self.token_0.decimals) diff --git a/rocketwatch/utils/rocketpool.py b/rocketwatch/utils/rocketpool.py index 6d14dd81..c917466c 100644 --- a/rocketwatch/utils/rocketpool.py +++ b/rocketwatch/utils/rocketpool.py @@ -111,18 +111,8 @@ def _normalize_calls(calls, default_require_success): flags.append(not req) return fns, flags - def multicall(self, calls, require_success=True) -> list: - """Sync multicall accepting ContractFunction objects or (fn, require_success) tuples.""" - fns, flags = self._normalize_calls(calls, require_success) - encoded = [(fn.address, af, fn._encode_transaction_data()) for fn, af in zip(fns, flags)] - results = self._multicall.functions.aggregate3(encoded).call() - return [ - RocketPool._decode_fn_output(fns[i], data) if success else None - for i, (success, data) in enumerate(results) - ] - - async def multicall_async(self, calls, require_success=True) -> list: - """Async multicall accepting ContractFunction objects or (fn, require_success) tuples.""" + async def multicall(self, calls, require_success=True) -> list: + """Multicall accepting ContractFunction objects or (fn, require_success) tuples.""" fns, flags = self._normalize_calls(calls, require_success) encoded = [(fn.address, af, fn._encode_transaction_data()) for fn, af in zip(fns, flags)] results = await self._multicall_async.functions.aggregate3(encoded).call() diff --git a/rocketwatch/utils/sea_creatures.py b/rocketwatch/utils/sea_creatures.py index 0e734898..10e51a55 100644 --- a/rocketwatch/utils/sea_creatures.py +++ b/rocketwatch/utils/sea_creatures.py @@ -2,7 +2,7 @@ from utils import solidity from utils.cfg import cfg from utils.rocketpool import rp -from utils.shared_w3 import w3 +from utils.shared_w3 import w3_async price_cache = { "block" : 0, @@ -47,19 +47,17 @@ def get_sea_creature_for_holdings(holdings): return next((sea_creature for holding_value, sea_creature in sea_creatures.items() if holdings >= holding_value), '') -def get_holding_for_address(address): - if cfg["rocketpool.chain"] != "mainnet": - return 0 - if price_cache["block"] != (b := w3.eth.block_number): +async def get_holding_for_address(address): + if price_cache["block"] != (b := await w3_async.eth.get_block_number()): price_cache["rpl_price"] = solidity.to_float(rp.call("rocketNetworkPrices.getRPLPrice")) price_cache["reth_price"] = solidity.to_float(rp.call("rocketTokenRETH.getExchangeRate")) price_cache["block"] = b # get their eth balance - eth_balance = solidity.to_float(w3.eth.get_balance(address)) + eth_balance = solidity.to_float(await w3_async.eth.get_balance(address)) # get ERC-20 token balance for this address with contextlib.suppress(Exception): - rpl_balance, rplfs_balance, reth_balance = rp.multicall([ + rpl_balance, rplfs_balance, reth_balance = await rp.multicall([ rp.get_contract_by_name("rocketTokenRPL").functions.balanceOf(address), rp.get_contract_by_name("rocketTokenRPLFixedSupply").functions.balanceOf(address), rp.get_contract_by_name("rocketTokenRETH").functions.balanceOf(address), @@ -75,6 +73,5 @@ def get_holding_for_address(address): return eth_balance -def get_sea_creature_for_address(address): - # return the sea creature for the given holdings - return get_sea_creature_for_holdings(get_holding_for_address(address)) +async def get_sea_creature_for_address(address): + return get_sea_creature_for_holdings(await get_holding_for_address(address)) From a36e6e84bcd17efb199d29d730d0d04ea50fd3e8 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 6 Mar 2026 21:16:17 +0000 Subject: [PATCH 163/279] fully migrate to AsyncWeb3 --- rocketwatch/plugins/about/about.py | 2 +- rocketwatch/plugins/apr/apr.py | 14 +- .../plugins/beacon_events/beacon_events.py | 26 +-- rocketwatch/plugins/collateral/collateral.py | 12 +- rocketwatch/plugins/cow_orders/cow_orders.py | 35 ++-- rocketwatch/plugins/dao/dao.py | 121 +++++------ .../plugins/db_upkeep_task/db_upkeep_task.py | 94 +++++---- rocketwatch/plugins/debug/debug.py | 28 +-- .../delegate_contracts/delegate_contracts.py | 6 +- .../plugins/deposit_pool/deposit_pool.py | 18 +- rocketwatch/plugins/event_core/event_core.py | 6 +- rocketwatch/plugins/events/events.py | 192 +++++++++--------- rocketwatch/plugins/governance/governance.py | 16 +- rocketwatch/plugins/lottery/lottery.py | 2 +- rocketwatch/plugins/milestones/milestones.py | 4 +- rocketwatch/plugins/queue/queue.py | 79 ++++--- rocketwatch/plugins/random/random.py | 30 +-- rocketwatch/plugins/rewards/rewards.py | 22 +- rocketwatch/plugins/rocksolid/rocksolid.py | 48 ++--- rocketwatch/plugins/rpl/rpl.py | 10 +- rocketwatch/plugins/snapshot/snapshot.py | 24 +-- .../plugins/transactions/transactions.py | 70 ++++--- rocketwatch/plugins/tvl/tvl.py | 26 +-- .../user_distribute/user_distribute.py | 16 +- .../validator_states/validator_states.py | 4 +- rocketwatch/plugins/wall/wall.py | 27 ++- rocketwatch/rocketwatch.py | 2 + rocketwatch/utils/block_time.py | 28 +-- rocketwatch/utils/cached_ens.py | 28 ++- rocketwatch/utils/dao.py | 91 +++++---- rocketwatch/utils/embeds.py | 62 +++--- rocketwatch/utils/etherscan.py | 2 +- rocketwatch/utils/event.py | 13 +- rocketwatch/utils/liquidity.py | 95 ++++++--- rocketwatch/utils/rocketpool.py | 178 ++++++++-------- rocketwatch/utils/sea_creatures.py | 23 ++- rocketwatch/utils/shared_w3.py | 13 +- 37 files changed, 785 insertions(+), 682 deletions(-) diff --git a/rocketwatch/plugins/about/about.py b/rocketwatch/plugins/about/about.py index f9e2c74f..83b0ba72 100644 --- a/rocketwatch/plugins/about/about.py +++ b/rocketwatch/plugins/about/about.py @@ -59,7 +59,7 @@ async def about(self, ctx: Context): f"{humanize.intcomma(sum(guild.member_count for guild in g))} Members reached!", inline=False) - address = el_explorer_url(cfg["rocketpool.manual_addresses.rocketStorage"]) + address = await el_explorer_url(cfg["rocketpool.manual_addresses.rocketStorage"]) e.add_field(name="Storage Contract", value=address) e.add_field(name="Chain", value=cfg["rocketpool.chain"].capitalize()) diff --git a/rocketwatch/plugins/apr/apr.py b/rocketwatch/plugins/apr/apr.py index dedc3644..fab378e8 100644 --- a/rocketwatch/plugins/apr/apr.py +++ b/rocketwatch/plugins/apr/apr.py @@ -15,7 +15,7 @@ from utils.cfg import cfg from utils.embeds import Embed from utils.rocketpool import rp -from utils.shared_w3 import w3, historical_w3 +from utils.shared_w3 import w3, w3_archive from utils.visibility import is_hidden_weak log = logging.getLogger("apr") @@ -52,20 +52,20 @@ async def task(self): # get latest block update from the db latest_db_block = await self.bot.db.reth_apr.find_one(sort=[("block", -1)]) latest_db_block = 0 if latest_db_block is None else latest_db_block["block"] - cursor_block = historical_w3.eth.get_block("latest")["number"] + cursor_block = (await w3_archive.eth.get_block("latest"))["number"] while True: # get address of rocketNetworkBalances contract at cursor block - address = rp.uncached_get_address_by_name("rocketNetworkBalances", block=cursor_block) - balance_block = rp.call("rocketNetworkBalances.getBalancesBlock", block=cursor_block, address=address) + address = await rp.uncached_get_address_by_name("rocketNetworkBalances", block=cursor_block) + balance_block = await rp.call("rocketNetworkBalances.getBalancesBlock", block=cursor_block, address=address) if balance_block == latest_db_block: break - block_time = w3.eth.get_block(balance_block)["timestamp"] + block_time = (await w3.eth.get_block(balance_block))["timestamp"] # abort if the blocktime is older than 120 days if block_time < (datetime.now().timestamp() - 120 * 24 * 60 * 60): break - reth_ratio = solidity.to_float(rp.call("rocketTokenRETH.getExchangeRate", block=cursor_block)) + reth_ratio = solidity.to_float(await rp.call("rocketTokenRETH.getExchangeRate", block=cursor_block)) effectiveness = solidity.to_float( - rp.call("rocketNetworkBalances.getETHUtilizationRate", block=cursor_block, address=address)) + await rp.call("rocketNetworkBalances.getETHUtilizationRate", block=cursor_block, address=address)) await self.bot.db.reth_apr.insert_one({ "block" : balance_block, "time" : block_time, diff --git a/rocketwatch/plugins/beacon_events/beacon_events.py b/rocketwatch/plugins/beacon_events/beacon_events.py index f7a44746..3372ba36 100644 --- a/rocketwatch/plugins/beacon_events/beacon_events.py +++ b/rocketwatch/plugins/beacon_events/beacon_events.py @@ -12,7 +12,7 @@ from utils.embeds import assemble, prepare_args from utils.readable import cl_explorer_url from utils.rocketpool import rp -from utils.shared_w3 import bacon, w3_async +from utils.shared_w3 import bacon, w3 from utils.solidity import date_to_beacon_block, beacon_block_to_date from utils.event import EventPlugin, Event from utils.block_time import ts_to_block @@ -32,8 +32,8 @@ async def _get_new_events(self) -> list[Event]: return await self.get_past_events(from_block, self._pending_block) async def get_past_events(self, from_block: BlockNumber, to_block: BlockNumber) -> list[Event]: - from_slot = max(0, date_to_beacon_block((await w3_async.eth.get_block(from_block - 1)).timestamp) + 1) - to_slot = date_to_beacon_block((await w3_async.eth.get_block(to_block)).timestamp) + from_slot = max(0, date_to_beacon_block((await w3.eth.get_block(from_block - 1)).timestamp) + 1) + to_slot = date_to_beacon_block((await w3.eth.get_block(to_block)).timestamp) log.info(f"Checking for new beacon chain events in slot range [{from_slot}, {to_slot}]") events: list[Event] = [] @@ -109,13 +109,13 @@ async def _get_slashings(self, beacon_block: dict) -> list[Event]: slash["event_name"] = "validator_slash_event" args = await prepare_args(aDict(slash)) - if embed := assemble(args): + if embed := await assemble(args): events.append(Event( topic="beacon_events", embed=embed, event_name=slash["event_name"], unique_id=unique_id, - block_number=ts_to_block(timestamp), + block_number=await ts_to_block(timestamp), )) return events @@ -172,16 +172,16 @@ async def _get_proposal(self, beacon_block: dict) -> Optional[Event]: "timestamp": timestamp } - if eth_utils.is_same_address(fee_recipient, rp.get_address_by_name("rocketSmoothingPool")): + if eth_utils.is_same_address(fee_recipient, await rp.get_address_by_name("rocketSmoothingPool")): args["event_name"] = "mev_proposal_smoothie_event" - args["smoothie_amount"] = await w3_async.eth.get_balance( - w3_async.to_checksum_address(fee_recipient), block_identifier=block_number + args["smoothie_amount"] = await w3.eth.get_balance( + w3.to_checksum_address(fee_recipient), block_identifier=block_number ) else: args["event_name"] = "mev_proposal_event" args = await prepare_args(aDict(args)) - if not (embed := assemble(args)): + if not (embed := await assemble(args)): return None return Event( @@ -227,7 +227,7 @@ async def _check_finality(self, beacon_block: dict) -> Optional[Event]: "epoch": epoch_number } args = await prepare_args(aDict(args)) - if not (embed := assemble(args)): + if not (embed := await assemble(args)): return None event = Event( @@ -235,7 +235,7 @@ async def _check_finality(self, beacon_block: dict) -> Optional[Event]: embed=embed, event_name=event_name, unique_id=f"finality_delay_recover:{epoch_number}", - block_number=ts_to_block(timestamp) + block_number=await ts_to_block(timestamp) ) return event @@ -249,7 +249,7 @@ async def _check_finality(self, beacon_block: dict) -> Optional[Event]: "epoch" : epoch_number } args = await prepare_args(aDict(args)) - if not (embed := assemble(args)): + if not (embed := await assemble(args)): return None return Event( @@ -257,7 +257,7 @@ async def _check_finality(self, beacon_block: dict) -> Optional[Event]: embed=embed, event_name=event_name, unique_id=f"{epoch_number}:finality_delay", - block_number=ts_to_block(timestamp) + block_number=await ts_to_block(timestamp) ) return None diff --git a/rocketwatch/plugins/collateral/collateral.py b/rocketwatch/plugins/collateral/collateral.py index b8ebdf1a..2835b3cd 100644 --- a/rocketwatch/plugins/collateral/collateral.py +++ b/rocketwatch/plugins/collateral/collateral.py @@ -45,11 +45,11 @@ async def collateral_distribution_raw(ctx: Context, distribution): async def get_node_minipools_and_collateral() -> dict[ChecksumAddress, dict[str, int]]: - node_staking = rp.get_contract_by_name("rocketNodeStaking") - minipool_manager = rp.get_contract_by_name("rocketMinipoolManager") + node_staking = await rp.get_contract_by_name("rocketNodeStaking") + minipool_manager = await rp.get_contract_by_name("rocketMinipoolManager") eb16s, eb8s, rpl_stakes = [], [], [] - nodes = rp.call("rocketNodeManager.getNodeAddresses", 0, 10_000) + nodes = await rp.call("rocketNodeManager.getNodeAddresses", 0, 10_000) for node_batch in as_chunks(nodes, 500): eb16s += await rp.multicall([ minipool_manager.functions.getNodeStakingMinipoolCountBySize(node, 16 * 10**18) for node in node_batch @@ -72,9 +72,9 @@ async def get_node_minipools_and_collateral() -> dict[ChecksumAddress, dict[str, async def get_average_collateral_percentage_per_node(collateral_cap: Optional[int], bonded: bool): # get stakes for each node - stakes = list(await get_node_minipools_and_collateral().values()) + stakes = list((await get_node_minipools_and_collateral()).values()) # get the current rpl price - rpl_price = solidity.to_float(rp.call("rocketNetworkPrices.getRPLPrice")) + rpl_price = solidity.to_float(await rp.call("rocketNetworkPrices.getRPLPrice")) node_collaterals = [] for node in stakes: @@ -129,7 +129,7 @@ async def node_tvl_vs_collateral(self, if display_name is None: return - rpl_price = solidity.to_float(rp.call("rocketNetworkPrices.getRPLPrice")) + rpl_price = solidity.to_float(await rp.call("rocketNetworkPrices.getRPLPrice")) data = await get_node_minipools_and_collateral() # Calculate each node's tvl and collateral and add it to the data diff --git a/rocketwatch/plugins/cow_orders/cow_orders.py b/rocketwatch/plugins/cow_orders/cow_orders.py index 257a67ab..5da75d1d 100644 --- a/rocketwatch/plugins/cow_orders/cow_orders.py +++ b/rocketwatch/plugins/cow_orders/cow_orders.py @@ -27,11 +27,7 @@ def __init__(self, bot: RocketWatch): self.state = "OK" self.collection = bot.db.cow_orders self._did_setup = False - - self.tokens = [ - str(rp.get_address_by_name("rocketTokenRPL")).lower(), - str(rp.get_address_by_name("rocketTokenRETH")).lower() - ] + self.tokens = None @command() async def cow(self, interaction: Interaction, tnx: str): @@ -53,7 +49,15 @@ async def _setup_collection(self): await self.collection.create_index("order_uid", unique=True) self._did_setup = True + async def _ensure_tokens(self): + if self.tokens is None: + self.tokens = [ + str(await rp.get_address_by_name("rocketTokenRPL")).lower(), + str(await rp.get_address_by_name("rocketTokenRETH")).lower() + ] + async def _get_new_events(self) -> list[Event]: + await self._ensure_tokens() await self._setup_collection() if self.state == "RUNNING": log.error("Cow Orders plugin was interrupted while running. Re-initializing...") @@ -141,10 +145,11 @@ async def check_for_new_events(self): if not cow_orders: return [] # get rpl price in dai - rpl_ratio = solidity.to_float(rp.call("rocketNetworkPrices.getRPLPrice")) - reth_ratio = solidity.to_float(rp.call("rocketTokenRETH.getExchangeRate")) - rpl_price = rpl_ratio * rp.get_eth_usdc_price() - reth_price = reth_ratio * rp.get_eth_usdc_price() + rpl_ratio = solidity.to_float(await rp.call("rocketNetworkPrices.getRPLPrice")) + reth_ratio = solidity.to_float(await rp.call("rocketTokenRETH.getExchangeRate")) + eth_usdc_price = await rp.get_eth_usdc_price() + rpl_price = rpl_ratio * eth_usdc_price + reth_price = reth_ratio * eth_usdc_price # generate payloads for order in cow_orders: @@ -161,9 +166,9 @@ async def check_for_new_events(self): data["ratio"] = int(order["sellAmount"]) / int(order["buyAmount"]) # store rpl and other token amount data["ourAmount"] = solidity.to_float(int(order["sellAmount"])) - s = rp.assemble_contract(name="ERC20", address=w3.to_checksum_address(order["buyToken"])) + s = await rp.assemble_contract(name="ERC20", address=w3.to_checksum_address(order["buyToken"])) try: - decimals = s.functions.decimals().call() + decimals = await s.functions.decimals().call() except: pass data["otherAmount"] = solidity.to_float(int(order["buyAmount"]), decimals) @@ -172,16 +177,16 @@ async def check_for_new_events(self): data["event_name"] = f"cow_order_buy_{token}_found" # store rpl and other token amount data["ourAmount"] = solidity.to_float(int(order["buyAmount"])) - s = rp.assemble_contract(name="ERC20", address=w3.to_checksum_address(order["sellToken"])) + s = await rp.assemble_contract(name="ERC20", address=w3.to_checksum_address(order["sellToken"])) try: - decimals = s.functions.decimals().call() + decimals = await s.functions.decimals().call() except: pass data["otherAmount"] = solidity.to_float(int(order["sellAmount"]), decimals) # our/other ratio data["ratioAmount"] = data["otherAmount"] / data["ourAmount"] try: - data["otherToken"] = s.functions.symbol().call() + data["otherToken"] = await s.functions.symbol().call() except: data["otherToken"] = "UNKWN" if s.address == w3.to_checksum_address("0xeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee"): @@ -214,7 +219,7 @@ async def check_for_new_events(self): data = await prepare_args(data) - embed = assemble(data) + embed = await assemble(data) payload.append(Event( embed=embed, topic="cow_orders", diff --git a/rocketwatch/plugins/dao/dao.py b/rocketwatch/plugins/dao/dao.py index 87263de3..dab90ffe 100644 --- a/rocketwatch/plugins/dao/dao.py +++ b/rocketwatch/plugins/dao/dao.py @@ -44,29 +44,20 @@ async def get_dao_votes_embed(dao: DefaultDAO, full: bool) -> Embed: if state in current_proposals: current_proposals[state].extend([await dao.fetch_proposal(pid) for pid in ids]) + parts = [] + for proposal in current_proposals[dao.ProposalState.Pending]: + body = await dao.build_proposal_body(proposal, include_proposer=full, include_votes=False, include_payload=full) + parts.append(f"**Proposal #{proposal.id}** - Pending\n```{body}```Voting starts , ends .") + for proposal in current_proposals[dao.ProposalState.Active]: + body = await dao.build_proposal_body(proposal, include_proposer=full, include_votes=True, include_payload=full) + parts.append(f"**Proposal #{proposal.id}** - Active\n```{body}```Voting ends .") + for proposal in current_proposals[dao.ProposalState.Succeeded]: + body = await dao.build_proposal_body(proposal, include_proposer=full, include_votes=full, include_payload=full) + parts.append(f"**Proposal #{proposal.id}** - Succeeded (Not Yet Executed)\n```{body}```Expires .") + return Embed( title=f"{dao.display_name} Proposals", - description="\n\n".join( - [ - ( - f"**Proposal #{proposal.id}** - Pending\n" - f"```{dao.build_proposal_body(proposal, include_proposer=full, include_votes=False, include_payload=full)}```" - f"Voting starts , ends ." - ) for proposal in current_proposals[dao.ProposalState.Pending] - ] + [ - ( - f"**Proposal #{proposal.id}** - Active\n" - f"```{dao.build_proposal_body(proposal, include_proposer=full, include_votes=True, include_payload=full)}```" - f"Voting ends ." - ) for proposal in current_proposals[dao.ProposalState.Active] - ] + [ - ( - f"**Proposal #{proposal.id}** - Succeeded (Not Yet Executed)\n" - f"```{dao.build_proposal_body(proposal, include_proposer=full, include_votes=full, include_payload=full)}```" - f"Expires ." - ) for proposal in current_proposals[dao.ProposalState.Succeeded] - ] - ) or "No active proposals." + description="\n\n".join(parts) or "No active proposals." ) @staticmethod @@ -82,35 +73,23 @@ async def get_pdao_votes_embed(dao: ProtocolDAO, full: bool) -> Embed: if state in current_proposals: current_proposals[state].extend([await dao.fetch_proposal(pid) for pid in ids]) + parts = [] + for proposal in current_proposals[dao.ProposalState.Pending]: + body = await dao.build_proposal_body(proposal, include_proposer=full, include_votes=False, include_payload=full) + parts.append(f"**Proposal #{proposal.id}** - Pending\n```{body}```Voting starts , ends .") + for proposal in current_proposals[dao.ProposalState.ActivePhase1]: + body = await dao.build_proposal_body(proposal, include_proposer=full, include_votes=True, include_payload=full) + parts.append(f"**Proposal #{proposal.id}** - Active (Phase 1)\n```{body}```Next phase , voting ends .") + for proposal in current_proposals[dao.ProposalState.ActivePhase2]: + body = await dao.build_proposal_body(proposal, include_proposer=full, include_votes=True, include_payload=full) + parts.append(f"**Proposal #{proposal.id}** - Active (Phase 2)\n```{body}```Voting ends .") + for proposal in current_proposals[dao.ProposalState.Succeeded]: + body = await dao.build_proposal_body(proposal, include_proposer=full, include_votes=full, include_payload=full) + parts.append(f"**Proposal #{proposal.id}** - Succeeded (Not Yet Executed)\n```{body}```Expires .") + return Embed( title="pDAO Proposals", - description="\n\n".join( - [ - ( - f"**Proposal #{proposal.id}** - Pending\n" - f"```{dao.build_proposal_body(proposal, include_proposer=full, include_votes=False, include_payload=full)}```" - f"Voting starts , ends ." - ) for proposal in current_proposals[dao.ProposalState.Pending] - ] + [ - ( - f"**Proposal #{proposal.id}** - Active (Phase 1)\n" - f"```{dao.build_proposal_body(proposal, include_proposer=full, include_votes=True, include_payload=full)}```" - f"Next phase , voting ends ." - ) for proposal in current_proposals[dao.ProposalState.ActivePhase1] - ] + [ - ( - f"**Proposal #{proposal.id}** - Active (Phase 2)\n" - f"```{dao.build_proposal_body(proposal, include_proposer=full, include_votes=True, include_payload=full)}```" - f"Voting ends ." - ) for proposal in current_proposals[dao.ProposalState.ActivePhase2] - ] + [ - ( - f"**Proposal #{proposal.id}** - Succeeded (Not Yet Executed)\n" - f"```{dao.build_proposal_body(proposal, include_proposer=full, include_votes=full, include_payload=full)}```" - f"Expires ." - ) for proposal in current_proposals[dao.ProposalState.Succeeded] - ] - ) or "No active proposals." + description="\n\n".join(parts) or "No active proposals." ) @command() @@ -151,46 +130,53 @@ class VoterPageView(PageView): def __init__(self, proposal: ProtocolDAO.Proposal): super().__init__(page_size=25) self.proposal = proposal - self._voter_list = self._get_voter_list(proposal) - - def _get_voter_list(self, proposal: ProtocolDAO.Proposal) -> list['OnchainDAO.Vote']: + self._voter_list = None + + async def _ensure_voter_list(self): + if self._voter_list is not None: + return + self._voter_list = await self._get_voter_list(self.proposal) + + async def _get_voter_list(self, proposal: ProtocolDAO.Proposal) -> list['OnchainDAO.Vote']: voters: dict[ChecksumAddress, OnchainDAO.Vote] = {} dao = ProtocolDAO() - + proposal_contract = await dao._get_proposal_contract() + for vote_log in get_logs( - dao.proposal_contract.events.ProposalVoted, - ts_to_block(proposal.start) - 1, - ts_to_block(proposal.end_phase_2) + 1, + proposal_contract.events.ProposalVoted, + await ts_to_block(proposal.start) - 1, + await ts_to_block(proposal.end_phase_2) + 1, {"proposalID": proposal.id} ): vote = OnchainDAO.Vote( - vote_log.args.voter, + vote_log.args.voter, vote_log.args.direction, solidity.to_float(vote_log.args.votingPower), vote_log.args.time ) voters[vote.voter] = vote - + for override_log in get_logs( - dao.proposal_contract.events.ProposalVoteOverridden, - ts_to_block(proposal.end_phase_1) - 1, - ts_to_block(proposal.end_phase_2) + 1, + proposal_contract.events.ProposalVoteOverridden, + await ts_to_block(proposal.end_phase_1) - 1, + await ts_to_block(proposal.end_phase_2) + 1, {"proposalID": proposal.id} ): voting_power = solidity.to_float(override_log.args.votingPower) voters[override_log.args.delegate].voting_power -= voting_power - + return sorted(voters.values(), key=attrgetter("voting_power"), reverse=True) @property def _title(self) -> str: return f"pDAO Proposal #{self.proposal.id} - Voter List" - async def _load_content(self, from_idx: int, to_idx: int) -> tuple[int, str]: + async def _load_content(self, from_idx: int, to_idx: int) -> tuple[int, str]: + await self._ensure_voter_list() headers = ["#", "Voter", "Choice", "Weight"] data = [] for i, voter in enumerate(self._voter_list[from_idx:(to_idx + 1)], start=from_idx): - name = el_explorer_url(voter.voter, prefix=-1).split("[")[1].split("]")[0] + name = (await el_explorer_url(voter.voter, prefix=-1)).split("[")[1].split("]")[0] vote = ["", "Abstain", "For", "Against", "Veto"][voter.direction] voting_power = f"{voter.voting_power:,.2f}" data.append([i+1, name, vote, voting_power]) @@ -203,8 +189,9 @@ async def _load_content(self, from_idx: int, to_idx: int) -> tuple[int, str]: async def _get_recent_proposals(self, interaction: Interaction, current: str) -> list[Choice[int]]: dao = ProtocolDAO() - num_proposals = dao.proposal_contract.functions.getTotal().call() - + proposal_contract = await dao._get_proposal_contract() + num_proposals = await proposal_contract.functions.getTotal().call() + if current: try: suggestions = [int(current)] @@ -213,9 +200,9 @@ async def _get_recent_proposals(self, interaction: Interaction, current: str) -> return [] else: suggestions = list(range(1, num_proposals + 1))[:-26:-1] - + titles: list[str] = await rp.multicall([ - dao.proposal_contract.functions.getMessage(proposal_id) for proposal_id in suggestions + proposal_contract.functions.getMessage(proposal_id) for proposal_id in suggestions ]) return [Choice(name=f"#{pid}: {title}", value=pid) for pid, title in zip(suggestions, titles)] diff --git a/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py b/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py index 4f24851b..f8ce81fd 100644 --- a/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py +++ b/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py @@ -165,14 +165,21 @@ async def _batch_multicall_update( return total = len(items) - batch_size = self.batch_size // len(call_fn(items[0])) + first_calls = await call_fn(items[0]) if asyncio.iscoroutinefunction(call_fn) else call_fn(items[0]) + batch_size = self.batch_size // len(first_calls) for i, batch in enumerate(as_chunks(items, batch_size)): if label: start = i * batch_size + 1 end = min((i + 1) * batch_size, total) log.debug(f"Processing {label} [{start}, {end}]/{total}") # call_fn(item) returns a list of (fn, require_success, transform, field) - expanded = [(item["address"], *t) for item in batch for t in call_fn(item)] + if asyncio.iscoroutinefunction(call_fn): + expanded = [] + for item in batch: + for t in await call_fn(item): + expanded.append((item["address"], *t)) + else: + expanded = [(item["address"], *t) for item in batch for t in call_fn(item)] calls = [(e[1], e[2]) for e in expanded] results = await rp.multicall(calls) updates = defaultdict(dict) @@ -190,8 +197,8 @@ async def _batch_multicall_update( @timerun_async async def add_untracked_node_operators(self): - nm = rp.get_contract_by_name("rocketNodeManager") - latest_rp = rp.call("rocketNodeManager.getNodeCount") - 1 + nm = await rp.get_contract_by_name("rocketNodeManager") + latest_rp = await rp.call("rocketNodeManager.getNodeCount") - 1 latest_db = 0 if res := await self.bot.db.node_operators.find_one(sort=[("_id", pymongo.DESCENDING)]): latest_db = res["_id"] @@ -206,8 +213,8 @@ async def add_untracked_node_operators(self): @timerun_async async def add_static_node_operator_data(self): - df = rp.get_contract_by_name("rocketNodeDistributorFactory") - mf = rp.get_contract_by_name("rocketMegapoolFactory") + df = await rp.get_contract_by_name("rocketNodeDistributorFactory") + mf = await rp.get_contract_by_name("rocketMegapoolFactory") def get_calls(n): return [ (df.functions.getProxyAddress(n["address"]), True, w3.to_checksum_address, "fee_distributor.address"), (mf.functions.getExpectedAddress(n["address"]), True, w3.to_checksum_address, "megapool.address"), @@ -220,12 +227,12 @@ def get_calls(n): return [ @timerun_async async def update_dynamic_node_operator_data(self): - mf = rp.get_contract_by_name("rocketMegapoolFactory") - nd = rp.get_contract_by_name("rocketNodeDeposit") - nm = rp.get_contract_by_name("rocketNodeManager") - mm = rp.get_contract_by_name("rocketMinipoolManager") - ns = rp.get_contract_by_name("rocketNodeStaking") - mc = rp.get_contract_by_name("multicall3") + mf = await rp.get_contract_by_name("rocketMegapoolFactory") + nd = await rp.get_contract_by_name("rocketNodeDeposit") + nm = await rp.get_contract_by_name("rocketNodeManager") + mm = await rp.get_contract_by_name("rocketMinipoolManager") + ns = await rp.get_contract_by_name("rocketNodeStaking") + mc = await rp.get_contract_by_name("multicall3") def get_calls(n): return [ (nm.functions.getNodeWithdrawalAddress(n["address"]), True, w3.to_checksum_address, "withdrawal_address"), (nm.functions.getNodeTimezoneLocation(n["address"]), True, None, "timezone_location"), @@ -254,22 +261,23 @@ def get_calls(n): return [ @timerun_async async def update_dynamic_megapool_data(self): - def mp_at(addr): return rp.assemble_contract("rocketMegapoolDelegate", address=addr) - def proxy_at(addr): return rp.assemble_contract("rocketMegapoolProxy", address=addr) - def get_calls(n): return [ - (mp_at(n["megapool"]["address"]).functions.getValidatorCount(), True, None, "megapool.validator_count"), - (mp_at(n["megapool"]["address"]).functions.getActiveValidatorCount(), True, None, "megapool.active_validator_count"), - (mp_at(n["megapool"]["address"]).functions.getExitingValidatorCount(), True, None, "megapool.exiting_validator_count"), - (mp_at(n["megapool"]["address"]).functions.getLockedValidatorCount(), True, None, "megapool.locked_validator_count"), - (mp_at(n["megapool"]["address"]).functions.getNodeBond(), True, safe_to_float, "megapool.node_bond"), - (mp_at(n["megapool"]["address"]).functions.getUserCapital(), True, safe_to_float, "megapool.user_capital"), - (mp_at(n["megapool"]["address"]).functions.getDebt(), True, safe_to_float, "megapool.debt"), - (mp_at(n["megapool"]["address"]).functions.getRefundValue(), True, safe_to_float, "megapool.refund_value"), - (mp_at(n["megapool"]["address"]).functions.getPendingRewards(), True, safe_to_float, "megapool.pending_rewards"), - (mp_at(n["megapool"]["address"]).functions.getLastDistributionTime(), True, None, "megapool.last_distribution_time"), - (proxy_at(n["megapool"]["address"]).functions.getDelegate(), True, w3.to_checksum_address, "megapool.delegate"), - (proxy_at(n["megapool"]["address"]).functions.getEffectiveDelegate(), True, w3.to_checksum_address, "megapool.effective_delegate"), - (proxy_at(n["megapool"]["address"]).functions.getUseLatestDelegate(), True, None, "megapool.use_latest_delegate"), + async def get_calls(n): + mp = await rp.assemble_contract("rocketMegapoolDelegate", address=n["megapool"]["address"]) + proxy = await rp.assemble_contract("rocketMegapoolProxy", address=n["megapool"]["address"]) + return [ + (mp.functions.getValidatorCount(), True, None, "megapool.validator_count"), + (mp.functions.getActiveValidatorCount(), True, None, "megapool.active_validator_count"), + (mp.functions.getExitingValidatorCount(), True, None, "megapool.exiting_validator_count"), + (mp.functions.getLockedValidatorCount(), True, None, "megapool.locked_validator_count"), + (mp.functions.getNodeBond(), True, safe_to_float, "megapool.node_bond"), + (mp.functions.getUserCapital(), True, safe_to_float, "megapool.user_capital"), + (mp.functions.getDebt(), True, safe_to_float, "megapool.debt"), + (mp.functions.getRefundValue(), True, safe_to_float, "megapool.refund_value"), + (mp.functions.getPendingRewards(), True, safe_to_float, "megapool.pending_rewards"), + (mp.functions.getLastDistributionTime(), True, None, "megapool.last_distribution_time"), + (proxy.functions.getDelegate(), True, w3.to_checksum_address, "megapool.delegate"), + (proxy.functions.getEffectiveDelegate(), True, w3.to_checksum_address, "megapool.effective_delegate"), + (proxy.functions.getUseLatestDelegate(), True, None, "megapool.use_latest_delegate"), ] await self._batch_multicall_update( self.bot.db.node_operators, {"megapool.deployed": True}, @@ -281,8 +289,8 @@ def get_calls(n): return [ @timerun_async async def add_untracked_minipools(self): - mm = rp.get_contract_by_name("rocketMinipoolManager") - latest_rp = rp.call("rocketMinipoolManager.getMinipoolCount") - 1 + mm = await rp.get_contract_by_name("rocketMinipoolManager") + latest_rp = await rp.call("rocketMinipoolManager.getMinipoolCount") - 1 latest_db = 0 if res := await self.bot.db.minipools.find_one(sort=[("_id", pymongo.DESCENDING)]): latest_db = res["_id"] @@ -296,10 +304,10 @@ async def add_untracked_minipools(self): @timerun_async async def add_static_minipool_data(self): - mm = rp.get_contract_by_name("rocketMinipoolManager") - def lamb(n): return [ - (rp.assemble_contract("rocketMinipool", address=n["address"]).functions.getNodeAddress(), True, w3.to_checksum_address, "node_operator"), - (mm.functions.getMinipoolPubkey(n["address"]), True, safe_to_hex, "pubkey"), + mm = await rp.get_contract_by_name("rocketMinipoolManager") + async def lamb(n): return [ + ((await rp.assemble_contract("rocketMinipool", address=n["address"])).functions.getNodeAddress(), True, w3.to_checksum_address, "node_operator"), + (mm.functions.getMinipoolPubkey(n["address"]), True, safe_to_hex, "pubkey"), ] await self._batch_multicall_update( self.bot.db.minipools, @@ -315,12 +323,12 @@ async def add_static_minipool_deposit_data(self): ).sort("status_time", pymongo.ASCENDING).to_list() if not minipools: return - nd = rp.get_contract_by_name("rocketNodeDeposit") - mm = rp.get_contract_by_name("rocketMinipoolManager") + nd = await rp.get_contract_by_name("rocketNodeDeposit") + mm = await rp.get_contract_by_name("rocketMinipoolManager") for minipool_batch in as_chunks(minipools, self.batch_size): - block_start = ts_to_block(minipool_batch[0]["status_time"]) - 1 - block_end = ts_to_block(minipool_batch[-1]["status_time"]) + 1 + block_start = await ts_to_block(minipool_batch[0]["status_time"]) - 1 + block_end = await ts_to_block(minipool_batch[-1]["status_time"]) + 1 log.debug(f"Processing deposit data for blocks {block_start}..{block_end}") addresses = {m["address"] for m in minipool_batch} @@ -360,9 +368,9 @@ async def add_static_minipool_deposit_data(self): @timerun_async async def update_dynamic_minipool_data(self): - mc = rp.get_contract_by_name("multicall3") - def get_calls(n): - minipool_contract = rp.assemble_contract("rocketMinipool", address=n["address"]) + mc = await rp.get_contract_by_name("multicall3") + async def get_calls(n): + minipool_contract = await rp.assemble_contract("rocketMinipool", address=n["address"]) return [ (minipool_contract.functions.getStatus(), True, safe_state_to_str, "status"), (minipool_contract.functions.getStatusTime(), True, None, "status_time"), @@ -438,7 +446,7 @@ async def add_untracked_megapool_validators(self): new_ids = list(range(db_count, on_chain_count)) log.debug(f"Adding {len(new_ids)} new validators for megapool {megapool_addr}") - megapool_contract = rp.assemble_contract("rocketMegapoolDelegate", address=megapool_addr) + megapool_contract = await rp.assemble_contract("rocketMegapoolDelegate", address=megapool_addr) for id_batch in as_chunks(new_ids, self.batch_size // 2): fns = [ fn @@ -482,7 +490,7 @@ async def update_dynamic_megapool_validator_data(self): end = min((i + 1) * self.batch_size, total) log.debug(f"Processing megapool validators [{start}, {end}]/{total}") fns = [ - rp.assemble_contract("rocketMegapoolDelegate", address=v["megapool"]).functions.getValidatorInfo(v["validator_id"]) + (await rp.assemble_contract("rocketMegapoolDelegate", address=v["megapool"])).functions.getValidatorInfo(v["validator_id"]) for v in batch ] results = await rp.multicall(fns) diff --git a/rocketwatch/plugins/debug/debug.py b/rocketwatch/plugins/debug/debug.py index a5fb5434..566ba097 100644 --- a/rocketwatch/plugins/debug/debug.py +++ b/rocketwatch/plugins/debug/debug.py @@ -40,7 +40,7 @@ async def on_ready(self): for contract in rp.addresses.copy(): try: - for function in rp.get_contract_by_name(contract).functions: + for function in (await rp.get_contract_by_name(contract)).functions: self.function_names.append(f"{contract}.{function}") self.contract_names.append(contract) except Exception: @@ -132,11 +132,11 @@ async def decode_tnx(self, interaction: Interaction, tnx_hash: str, contract_nam Decode transaction calldata """ await interaction.response.defer(ephemeral=True) - tnx = w3.eth.get_transaction(tnx_hash) + tnx = await w3.eth.get_transaction(tnx_hash) if contract_name: - contract = rp.get_contract_by_name(contract_name) + contract = await rp.get_contract_by_name(contract_name) else: - contract = rp.get_contract_by_address(tnx.to) + contract = await rp.get_contract_by_address(tnx.to) data = contract.decode_function_input(tnx.input) await interaction.followup.send(content=f"```Input:\n{data}```") @@ -148,8 +148,8 @@ async def debug_transaction(self, interaction: Interaction, tnx_hash: str): Try to return the revert reason of a transaction. """ await interaction.response.defer(ephemeral=True) - transaction_receipt = w3.eth.get_transaction(tnx_hash) - if revert_reason := rp.get_revert_reason(transaction_receipt): + transaction_receipt = await w3.eth.get_transaction(tnx_hash) + if revert_reason := await rp.get_revert_reason(transaction_receipt): await interaction.followup.send(content=f"```Revert reason: {revert_reason}```") else: await interaction.followup.send(content="```No revert reason Available```") @@ -262,7 +262,7 @@ async def restore_missed_events(self, interaction: Interaction, tx_hash: str): events_plugin: Events = self.bot.cogs["Events"] filtered_events = [] - for event_log in w3.eth.get_transaction_receipt(tx_hash).logs: + for event_log in (await w3.eth.get_transaction_receipt(tx_hash)).logs: if ("topics" in event_log) and (event_log["topics"][0].hex() in events_plugin.topic_map): filtered_events.append(event_log) @@ -318,8 +318,8 @@ async def get_block_by_timestamp(self, interaction: Interaction, timestamp: int) """ await interaction.response.defer(ephemeral=is_hidden(interaction)) - block = ts_to_block(timestamp) - found_ts = block_to_ts(block) + block = await ts_to_block(timestamp) + found_ts = await block_to_ts(block) if found_ts == timestamp: text = ( @@ -340,7 +340,7 @@ async def get_abi_of_contract(self, interaction: Interaction, contract: str): """Retrieve the latest ABI for a contract""" await interaction.response.defer(ephemeral=is_hidden_role_controlled(interaction)) try: - abi = prettify_json_string(rp.uncached_get_abi_by_name(contract)) + abi = prettify_json_string(await rp.uncached_get_abi_by_name(contract)) file = File(io.StringIO(abi), f"{contract}.{cfg['rocketpool.chain'].lower()}.abi.json") await interaction.followup.send(file=file) except Exception as err: @@ -353,8 +353,8 @@ async def get_address_of_contract(self, interaction: Interaction, contract: str) try: address = cfg["rocketpool.manual_addresses"].get(contract) if not address: - address = rp.uncached_get_address_by_name(contract) - await interaction.followup.send(content=el_explorer_url(address)) + address = await rp.uncached_get_address_by_name(contract) + await interaction.followup.send(content=await el_explorer_url(address)) except Exception as err: await interaction.followup.send(content=f"Exception: ```{repr(err)}```") if "No address found for" in repr(err): @@ -387,12 +387,12 @@ async def call( args = json.loads(json_args) if not isinstance(args, list): args = [args] - v = rp.call(function, *args, block=block, address=w3.to_checksum_address(address) if address else None) + v = await rp.call(function, *args, block=block, address=w3.to_checksum_address(address) if address else None) except Exception as err: await interaction.followup.send(content=f"Exception: ```{repr(err)}```") return try: - g = rp.estimate_gas_for_call(function, *args, block=block) + g = await rp.estimate_gas_for_call(function, *args, block=block) except Exception as err: g = "N/A" if isinstance(err, ValueError) and err.args and "code" in err.args and err.args[0]["code"] == -32000: diff --git a/rocketwatch/plugins/delegate_contracts/delegate_contracts.py b/rocketwatch/plugins/delegate_contracts/delegate_contracts.py index a12bcac9..9e803bd9 100644 --- a/rocketwatch/plugins/delegate_contracts/delegate_contracts.py +++ b/rocketwatch/plugins/delegate_contracts/delegate_contracts.py @@ -49,14 +49,14 @@ async def _delegate_stats( desc = "**Effective Delegate Distribution:**\n" c_sum = sum(d["count"] for d in distribution_stats) # refresh cached address - rp.uncached_get_address_by_name(latest_contract) - latest_addr = rp.get_address_by_name(latest_contract) + await rp.uncached_get_address_by_name(latest_contract) + latest_addr = await rp.get_address_by_name(latest_contract) for d in distribution_stats: a = w3.to_checksum_address(d["_id"]) name = s_hex(a) if a == latest_addr: name += " (Latest)" - desc += f"{s}{el_explorer_url(a, name)}: {d['count']:,} ({d['count'] / c_sum * 100:.2f}%)\n" + desc += f"{s}{await el_explorer_url(a, name)}: {d['count']:,} ({d['count'] / c_sum * 100:.2f}%)\n" desc += "\n" desc += "**Use Latest Delegate:**\n" c_sum = sum(use_latest_counts.values()) diff --git a/rocketwatch/plugins/deposit_pool/deposit_pool.py b/rocketwatch/plugins/deposit_pool/deposit_pool.py index 27a25111..32235952 100644 --- a/rocketwatch/plugins/deposit_pool/deposit_pool.py +++ b/rocketwatch/plugins/deposit_pool/deposit_pool.py @@ -23,9 +23,9 @@ def __init__(self, bot: RocketWatch): @staticmethod async def get_deposit_pool_stats() -> Embed: balance_raw, max_size_raw, max_amount_raw = await rp.multicall([ - rp.get_contract_by_name("rocketDepositPool").functions.getBalance(), - rp.get_contract_by_name("rocketDAOProtocolSettingsDeposit").functions.getMaximumDepositPoolSize(), - rp.get_contract_by_name("rocketDepositPool").functions.getMaximumDepositAmount(), + (await rp.get_contract_by_name("rocketDepositPool")).functions.getBalance(), + (await rp.get_contract_by_name("rocketDAOProtocolSettingsDeposit")).functions.getMaximumDepositPoolSize(), + (await rp.get_contract_by_name("rocketDepositPool")).functions.getMaximumDepositAmount(), ]) dp_balance = solidity.to_float(balance_raw) @@ -80,10 +80,10 @@ async def get_deposit_pool_stats() -> Embed: @staticmethod async def get_contract_collateral_stats() -> Embed: exchange_rate, total_supply, collateral_rate_raw, target_rate_raw = await rp.multicall([ - rp.get_contract_by_name("rocketTokenRETH").functions.getExchangeRate(), - rp.get_contract_by_name("rocketTokenRETH").functions.totalSupply(), - rp.get_contract_by_name("rocketTokenRETH").functions.getCollateralRate(), - rp.get_contract_by_name("rocketDAOProtocolSettingsNetwork").functions.getTargetRethCollateralRate(), + (await rp.get_contract_by_name("rocketTokenRETH")).functions.getExchangeRate(), + (await rp.get_contract_by_name("rocketTokenRETH")).functions.totalSupply(), + (await rp.get_contract_by_name("rocketTokenRETH")).functions.getCollateralRate(), + (await rp.get_contract_by_name("rocketDAOProtocolSettingsNetwork")).functions.getTargetRethCollateralRate(), ]) total_eth_in_reth: float = total_supply * exchange_rate / 10**36 @@ -142,8 +142,8 @@ async def get_status(self) -> Embed: if cfg["rocketpool.chain"] != "mainnet": return embed - reth_price = rp.get_reth_eth_price() - protocol_rate = solidity.to_float(rp.call("rocketTokenRETH.getExchangeRate")) + reth_price = await rp.get_reth_eth_price() + protocol_rate = solidity.to_float(await rp.call("rocketTokenRETH.getExchangeRate")) relative_rate_diff = (reth_price / protocol_rate) - 1 expected_rate_diff = 0.0005 diff --git a/rocketwatch/plugins/event_core/event_core.py b/rocketwatch/plugins/event_core/event_core.py index 2be63696..f47acf97 100644 --- a/rocketwatch/plugins/event_core/event_core.py +++ b/rocketwatch/plugins/event_core/event_core.py @@ -20,7 +20,7 @@ from utils.cfg import cfg from utils.embeds import assemble, Embed from utils.event import EventPlugin -from utils.shared_w3 import w3_async +from utils.shared_w3 import w3 log = logging.getLogger("event_core") log.setLevel(cfg["log_level"]) @@ -85,7 +85,7 @@ async def gather_new_events(self) -> None: log.info("Gathering messages from submodules") log.debug(f"{self.head_block = }") - latest_block = await w3_async.eth.get_block_number() + latest_block = await w3.eth.get_block_number() submodules = [cog for cog in self.bot.cogs.values() if isinstance(cog, EventPlugin)] log.debug(f"Running {len(submodules)} submodules") @@ -260,7 +260,7 @@ async def _update_status_message(self, channel_name: str, config: dict) -> None: await self._replace_or_add_status(channel_name, embed, state_message) async def show_service_interrupt(self) -> None: - embed = assemble(MutableAttributeDict({"event_name": "service_interrupted"})) + embed = await assemble(MutableAttributeDict({"event_name": "service_interrupted"})) for channel_name in cfg.get("events.status_message", {}).keys(): state_message = await self.bot.db.state_messages.find_one({"_id": channel_name}) if (not state_message) or (state_message["state"] != str(self.state.ERROR)): diff --git a/rocketwatch/plugins/events/events.py b/rocketwatch/plugins/events/events.py index 9393f7d4..b54d1d74 100644 --- a/rocketwatch/plugins/events/events.py +++ b/rocketwatch/plugins/events/events.py @@ -20,7 +20,7 @@ from utils.embeds import assemble, prepare_args, el_explorer_url, Embed from utils.event import EventPlugin, Event from utils.rocketpool import rp, NoAddressFound -from utils.shared_w3 import w3_async, bacon +from utils.shared_w3 import w3, bacon from utils.solidity import SUBMISSION_KEYS from utils.block_time import block_to_ts @@ -34,12 +34,17 @@ class Events(EventPlugin): def __init__(self, bot: RocketWatch): super().__init__(bot) - partial_filters, event_map, topic_map = self._parse_event_config() + self._partial_filters = [] + self.event_map = {} + self.topic_map = {} + + async def async_init(self): + partial_filters, event_map, topic_map = await self._parse_event_config() self._partial_filters = partial_filters self.event_map = event_map self.topic_map = topic_map - def _parse_event_config(self) -> tuple[list[PartialFilter], dict, dict]: + async def _parse_event_config(self) -> tuple[list[PartialFilter], dict, dict]: with open("./plugins/events/events.json") as f: config = json.load(f) @@ -53,7 +58,7 @@ def _parse_event_config(self) -> tuple[list[PartialFilter], dict, dict]: for group in config["direct"]: contract_name = group["contract_name"] try: - contract = rp.get_contract_by_name(contract_name) + contract = await rp.get_contract_by_name(contract_name) addresses.add(contract.address) except NoAddressFound: log.warning(f"Failed to get contract {contract_name}") @@ -65,7 +70,7 @@ def _parse_event_config(self) -> tuple[list[PartialFilter], dict, dict]: log.info(f"Adding filter for {contract_name}.{event_name}") event_abi = contract.events[event_name].abi input_types = ','.join(i['type'] for i in event_abi['inputs']) - topic = w3_async.keccak(text=f"{event_name}({input_types})").hex() + topic = w3.keccak(text=f"{event_name}({input_types})").hex() except Exception as e: log.exception(e) log.warning(f"Couldn't find event {event_name} ({event['name']}) in the contract") @@ -77,7 +82,7 @@ def _parse_event_config(self) -> tuple[list[PartialFilter], dict, dict]: if addresses: async def build_direct_filter(_from: BlockNumber, _to: BlockNumber | Literal["latest"]) -> list[LogReceipt]: - return await w3_async.eth.get_logs({ + return await w3.eth.get_logs({ "address" : list(addresses), "topics" : [list(aggregated_topics)], "fromBlock": _from, @@ -88,7 +93,7 @@ async def build_direct_filter(_from: BlockNumber, _to: BlockNumber | Literal["la # generate filters for global events for group in config["global"]: try: - contract = rp.get_contract_by_name(name=group["contract_name"]) + contract = await rp.get_contract_by_name(name=group["contract_name"]) except Exception as e: log.warning(f"Failed to get contract {group['contract_name']}: {e}") continue @@ -101,8 +106,8 @@ async def build_topic_filter(_from: BlockNumber, _to: BlockNumber | Literal["lat event_cls = _contract.events[_event["event_name"]] event_abi = event_cls.abi input_types = ','.join(i['type'] for i in event_abi['inputs']) - topic0 = w3_async.keccak(text=f"{_event['event_name']}({input_types})").hex() - raw_logs = await w3_async.eth.get_logs({ + topic0 = w3.keccak(text=f"{_event['event_name']}({input_types})").hex() + raw_logs = await w3.eth.get_logs({ "topics" : [topic0], "fromBlock": _from, "toBlock" : _to, @@ -152,7 +157,7 @@ async def trigger_event( @is_owner() async def replay_events(self, interaction: Interaction, tx_hash: str): await interaction.response.defer() - receipt = await w3_async.eth.get_transaction_receipt(tx_hash) + receipt = await w3.eth.get_transaction_receipt(tx_hash) logs: list[LogReceipt] = receipt.logs filtered_events: list[LogReceipt | EventData] = [] @@ -168,7 +173,7 @@ async def replay_events(self, interaction: Interaction, tx_hash: str): global_events = json.load(f)["global"] for group in global_events: - contract = rp.assemble_contract(name=group["contract_name"]) + contract = await rp.assemble_contract(name=group["contract_name"]) for event in group["events"]: event = contract.events[event["event_name"]]() rich_logs = event.process_receipt(receipt, errors=DISCARD) @@ -200,8 +205,9 @@ async def get_past_events(self, from_block: BlockNumber, to_block: BlockNumber) old_config = self._partial_filters, self.event_map, self.topic_map try: - rp.flush() + await rp.flush() self.__init__(self.bot) + await self.async_init() return messages + await self.get_past_events(contract_upgrade_block + 1, to_block) except Exception as err: # rollback to pre upgrade config if this goes wrong @@ -214,7 +220,7 @@ async def process_events(self, events: list[LogReceipt | EventData]) -> tuple[li upgrade_block = None log.debug(f"Aggregating {len(events)} events") - events: list[aDict] = self.aggregate_events(events) + events: list[aDict] = await self.aggregate_events(events) log.debug(f"Processing {len(events)} events") for event in events: @@ -233,9 +239,9 @@ def hash_args(_args: aDict) -> None: if (n := rp.get_name_by_address(event.address)) and "topics" in event: log.debug(f"Found event {event} for {n}") # default event path - contract = rp.get_contract_by_address(event.address) + contract = await rp.get_contract_by_address(event.address) contract_event = self.topic_map[event.topics[0].hex()] - topics = [w3_async.to_hex(t) for t in event.topics] + topics = [w3.to_hex(t) for t in event.topics] _event = aDict(contract.events[contract_event]().process_log(event)) _event.topics = topics _event.args = aDict(_event.args) @@ -289,7 +295,7 @@ def hash_args(_args: aDict) -> None: return messages, upgrade_block - def aggregate_events(self, events: list[LogReceipt | EventData]) -> list[aDict]: + async def aggregate_events(self, events: list[LogReceipt | EventData]) -> list[aDict]: # aggregate and deduplicate events within the same transaction events_by_tx = {} for event in reversed(events): @@ -304,7 +310,7 @@ def aggregate_events(self, events: list[LogReceipt | EventData]) -> list[aDict]: "unstETH.WithdrawalRequested": "amountOfStETH" } - def get_event_name(_event: LogReceipt | EventData) -> tuple[str, str]: + async def get_event_name(_event: LogReceipt | EventData) -> tuple[str, str]: if "topics" in _event: contract_name = rp.get_name_by_address(_event["address"]) name = self.topic_map[_event["topics"][0].hex()] @@ -322,14 +328,14 @@ def get_event_name(_event: LogReceipt | EventData) -> tuple[str, str]: events_by_name: dict[str, list[LogReceipt | EventData]] = {} for event in tx_events: - event_name, full_event_name = get_event_name(event) + event_name, full_event_name = await get_event_name(event) log.debug(f"Processing event {full_event_name}") if full_event_name not in events_by_name: events_by_name[full_event_name] = [] if full_event_name == "unstETH.WithdrawalRequested": - contract = rp.get_contract_by_address(event["address"]) + contract = await rp.get_contract_by_address(event["address"]) _event = aDict(contract.events[event_name]().process_log(event)) # sum up the amount of stETH withdrawn in this transaction if amount := tx_aggregates.get(full_event_name, 0): @@ -342,7 +348,7 @@ def get_event_name(_event: LogReceipt | EventData) -> tuple[str, str]: continue if prev_event := tx_aggregates.get(full_event_name, None): # only keep largest rETH transfer - contract = rp.get_contract_by_address(event["address"]) + contract = await rp.get_contract_by_address(event["address"]) _event = aDict(contract.events[event_name]().process_log(event)) _prev_event = aDict(contract.events[event_name]().process_log(event)) if _prev_event["args"]["value"] > _event["args"]["value"]: @@ -363,7 +369,7 @@ def get_event_name(_event: LogReceipt | EventData) -> tuple[str, str]: events.remove(vote_event) elif full_event_name == "MinipoolPrestaked": for assign_event in events_by_name.get("rocketDepositPool.DepositAssigned", []).copy(): - assigned_minipool = w3_async.to_checksum_address(assign_event["topics"][1][-20:]) + assigned_minipool = w3.to_checksum_address(assign_event["topics"][1][-20:]) if event["address"] == assigned_minipool: events_by_name["rocketDepositPool.DepositAssigned"].remove(assign_event) events.remove(assign_event) @@ -382,7 +388,7 @@ def get_event_name(_event: LogReceipt | EventData) -> tuple[str, str]: events = [aDict(event) for event in events] for event in events: - _, full_event_name = get_event_name(event) + _, full_event_name = await get_event_name(event) if full_event_name not in aggregation_attributes: continue @@ -395,10 +401,10 @@ def get_event_name(_event: LogReceipt | EventData) -> tuple[str, str]: return events async def handle_global_event(self, event_name: str, event: aDict) -> Optional[Embed]: - receipt = await w3_async.eth.get_transaction_receipt(event.transactionHash) + receipt = await w3.eth.get_transaction_receipt(event.transactionHash) - is_minipool_event = rp.is_minipool(event.address) or rp.is_minipool(receipt.to) - is_megapool_event = rp.is_megapool(event.address) or rp.is_megapool(receipt.to) + is_minipool_event = await rp.is_minipool(event.address) or await rp.is_minipool(receipt.to) + is_megapool_event = await rp.is_megapool(event.address) or await rp.is_megapool(receipt.to) if not any([ is_minipool_event, @@ -418,13 +424,13 @@ async def handle_global_event(self, event_name: str, event: aDict) -> Optional[E # maybe the contract has it stored? if not pubkey: - pubkey = rp.call("rocketMinipoolManager.getMinipoolPubkey", event.address).hex() + pubkey = (await rp.call("rocketMinipoolManager.getMinipoolPubkey", event.address)).hex() # maybe it's in the transaction? if not pubkey: with warnings.catch_warnings(): warnings.simplefilter("ignore") - deposit_contract = rp.get_contract_by_name("casperDeposit") + deposit_contract = await rp.get_contract_by_name("casperDeposit") processed_logs = deposit_contract.events.DepositEvent().process_receipt(receipt) # attempt to retrieve the pubkey @@ -446,7 +452,7 @@ async def handle_global_event(self, event_name: str, event: aDict) -> Optional[E event.args.minipool = event.address if is_megapool_event: event.args.megapool = event.address - event.args.node = rp.call("rocketMegapoolDelegate.getNodeAddress", address=event.address) + event.args.node = await rp.call("rocketMegapoolDelegate.getNodeAddress", address=event.address) return await self.handle_event(event_name, event) @@ -455,17 +461,17 @@ async def handle_event(self, event_name: str, event: aDict) -> Optional[Embed]: if "negative_rETH_ratio_update_event" in event_name: args.currRETHRate = solidity.to_float(args.totalEth) / solidity.to_float(args.rethSupply) if args.rethSupply > 0 else 1 - args.prevRETHRate = solidity.to_float(rp.call("rocketTokenRETH.getExchangeRate", block=event.blockNumber - 1)) + args.prevRETHRate = solidity.to_float(await rp.call("rocketTokenRETH.getExchangeRate", block=event.blockNumber - 1)) d = args.currRETHRate - args.prevRETHRate if d > 0 or abs(d) < 0.00001: return None elif "price_update_event" in event_name: args.value = args.rplPrice - next_period = rp.call("rocketRewardsPool.getClaimIntervalTimeStart", block=event.blockNumber) + rp.call("rocketRewardsPool.getClaimIntervalTime", block=event.blockNumber) + next_period = await rp.call("rocketRewardsPool.getClaimIntervalTimeStart", block=event.blockNumber) + await rp.call("rocketRewardsPool.getClaimIntervalTime", block=event.blockNumber) args.rewardPeriodEnd = next_period - update_rate = rp.call("rocketDAOProtocolSettingsNetwork.getSubmitPricesFrequency", block=event.blockNumber) # in seconds + update_rate = await rp.call("rocketDAOProtocolSettingsNetwork.getSubmitPricesFrequency", block=event.blockNumber) # in seconds # get timestamp of event block - ts = block_to_ts(event.blockNumber) + ts = await block_to_ts(event.blockNumber) # check if the next update is after the next period ts earliest_next_update = ts + update_rate # if it will update before the next period, skip @@ -478,13 +484,13 @@ async def handle_event(self, event_name: str, event: aDict) -> Optional[Embed]: match args.types[i]: case 0: # SettingType.UINT256 - value = w3_async.to_int(value_raw) + value = w3.to_int(value_raw) case 1: # SettingType.BOOL value = bool(value_raw) case 2: # SettingType.ADDRESS - value = w3_async.to_checksum_address(value_raw) + value = w3.to_checksum_address(value_raw) case _: value = "???" description_parts.append( @@ -510,19 +516,19 @@ def share_repr(percentage: float) -> str: f"{share_repr(odao_share)} {odao_share:.1f}%", ]) elif event_name == "bootstrap_sdao_member_kick_event": - args.memberAddress = el_explorer_url(args.memberAddress, block=(event.blockNumber - 1)) + args.memberAddress = await el_explorer_url(args.memberAddress, block=(event.blockNumber - 1)) elif event_name in [ "odao_member_leave_event", "odao_member_kick_event", "sdao_member_leave_event", "sdao_member_request_leave_event" ]: - args.nodeAddress = el_explorer_url(args.nodeAddress, block=(event.blockNumber - 1)) + args.nodeAddress = await el_explorer_url(args.nodeAddress, block=(event.blockNumber - 1)) elif event_name.startswith("cs_deposit") or event_name.startswith("cs_withdraw") or event_name.startswith("rocksolid_deposit"): args.assets = solidity.to_float(args.assets) args.shares = solidity.to_float(args.shares) elif event_name.startswith("rocksolid_withdraw"): - assets = rp.call("RockSolidVault.convertToAssets", args.shares, block=event.blockNumber) + assets = await rp.call("RockSolidVault.convertToAssets", args.shares, block=event.blockNumber) args.assets = solidity.to_float(assets) args.shares = solidity.to_float(args.shares) elif event_name == "cs_max_validator_change_event": @@ -532,7 +538,7 @@ def share_repr(percentage: float) -> str: elif args.newLimit < args.oldLimit: event_name = event_name.replace("change", "decrease") elif event_name == "cs_operator_added_event": - args.address = await w3_async.eth.get_transaction_receipt(event.transactionHash)["from"] + args.address = await w3.eth.get_transaction_receipt(event.transactionHash)["from"] elif event_name == "cs_rpl_treasury_fee_change_event": args.oldFee = 100 * solidity.to_float(args.oldFee) args.newFee = 100 * solidity.to_float(args.newFee) @@ -544,7 +550,7 @@ def share_repr(percentage: float) -> str: args.oldFee = 100 * solidity.to_float(args.oldValue) args.newFee = 100 * solidity.to_float(args.newValue) elif event_name.startswith("cs_operators"): - args.operatorList = "\n".join([el_explorer_url(address) for address in args.operators]) + args.operatorList = "\n".join([await el_explorer_url(address) for address in args.operators]) elif event_name in ["cs_rpl_min_ratio_change_event", "cs_rpl_target_ratio_change_event"]: args.oldRatio = 100 * solidity.to_float(args.oldRatio) args.newRatio = 100 * solidity.to_float(args.newRatio) @@ -556,18 +562,18 @@ def share_repr(percentage: float) -> str: # signer = seller # sender = buyer # either the selling or buying token has to be the RPL token - rpl = rp.get_address_by_name("rocketTokenRPL") + rpl = await rp.get_address_by_name("rocketTokenRPL") if args.signerToken != rpl and args.senderToken != rpl: return None - args.seller = w3_async.to_checksum_address(f"0x{event.topics[2][-40:]}") - args.buyer = w3_async.to_checksum_address(f"0x{event.topics[3][-40:]}") + args.seller = w3.to_checksum_address(f"0x{event.topics[2][-40:]}") + args.buyer = w3.to_checksum_address(f"0x{event.topics[3][-40:]}") # token names - s = rp.assemble_contract(name="ERC20", address=args.signerToken) - args.sellToken = s.functions.symbol().call() - sell_decimals = s.functions.decimals().call() - b = rp.assemble_contract(name="ERC20", address=args.senderToken) - args.buyToken = b.functions.symbol().call() - buy_decimals = b.functions.decimals().call() + s = await rp.assemble_contract(name="ERC20", address=args.signerToken) + args.sellToken = await s.functions.symbol().call() + sell_decimals = await s.functions.decimals().call() + b = await rp.assemble_contract(name="ERC20", address=args.senderToken) + args.buyToken = await b.functions.symbol().call() + buy_decimals = await b.functions.decimals().call() # token amounts args.sellAmount = solidity.to_float(args.signerAmount, sell_decimals) args.buyAmount = solidity.to_float(args.senderAmount, buy_decimals) @@ -580,15 +586,15 @@ def share_repr(percentage: float) -> str: args.otherToken = args.sellToken if args.otherToken.lower() == "wETH": # get exchange rate from rp - args.marketExchangeRate = rp.call("rocketNetworkPrices.getRPLPrice") + args.marketExchangeRate = await rp.call("rocketNetworkPrices.getRPLPrice") # calculate the discount received compared to the market price args.discountAmount = (1 - args.exchangeRate / solidity.to_float(args.marketExchangeRate)) * 100 receipt = None if cfg["rocketpool.chain"] == "mainnet": - receipt = await w3_async.eth.get_transaction_receipt(event.transactionHash) + receipt = await w3.eth.get_transaction_receipt(event.transactionHash) args.tnx_fee = receipt["gasUsed"] * receipt["effectiveGasPrice"] - args.tnx_fee_usd = round(rp.get_eth_usdc_price() * args.tnx_fee / 10**18, 2) + args.tnx_fee_usd = round(await rp.get_eth_usdc_price() * args.tnx_fee / 10**18, 2) args.caller = receipt["from"] # add transaction hash and block number to args @@ -602,16 +608,16 @@ def share_repr(percentage: float) -> str: if "root" in event_name: # not interesting if the root wasn't submitted in response to a challenge # ChallengeState.Challenged = 1 - challenge_state = rp.call("rocketDAOProtocolVerifier.getChallengeState", proposal_id, args.index, block=event.blockNumber) + challenge_state = await rp.call("rocketDAOProtocolVerifier.getChallengeState", proposal_id, args.index, block=event.blockNumber) if challenge_state != 1: return None if "add" in event_name or "destroy" in event_name: - args.proposalBond = solidity.to_int(rp.call("rocketDAOProtocolVerifier.getProposalBond", proposal_id)) + args.proposalBond = solidity.to_int(await rp.call("rocketDAOProtocolVerifier.getProposalBond", proposal_id)) elif "root" in event_name or "challenge" in event_name: - args.proposalBond = solidity.to_int(rp.call("rocketDAOProtocolVerifier.getProposalBond", proposal_id)) - args.challengeBond = solidity.to_int(rp.call("rocketDAOProtocolVerifier.getChallengeBond", proposal_id)) - args.challengePeriod = rp.call("rocketDAOProtocolVerifier.getChallengePeriod", proposal_id) + args.proposalBond = solidity.to_int(await rp.call("rocketDAOProtocolVerifier.getProposalBond", proposal_id)) + args.challengeBond = solidity.to_int(await rp.call("rocketDAOProtocolVerifier.getChallengeBond", proposal_id)) + args.challengePeriod = await rp.call("rocketDAOProtocolVerifier.getChallengePeriod", proposal_id) # create human-readable decision for votes if "direction" in args: @@ -623,8 +629,8 @@ def share_repr(percentage: float) -> str: # not interesting return None elif "vote_override" in event_name: - proposal_block = rp.call("rocketDAOProtocolProposal.getProposalBlock", proposal_id) - args.votingPower = solidity.to_float(rp.call("rocketNetworkVoting.getVotingPower", args.voter, proposal_block)) + proposal_block = await rp.call("rocketDAOProtocolProposal.getProposalBlock", proposal_id) + args.votingPower = solidity.to_float(await rp.call("rocketNetworkVoting.getVotingPower", args.voter, proposal_block)) if args.votingPower < 100: # not interesting return None @@ -645,7 +651,7 @@ def share_repr(percentage: float) -> str: args.decision = "for" if args.supported else "against" # change prefix for DAO-specific event - dao_name = rp.call("rocketDAOProposal.getDAO", proposal_id) + dao_name = await rp.call("rocketDAOProposal.getDAO", proposal_id) event_name = event_name.replace("dao", { "rocketDAONodeTrustedProposals": "odao", "rocketDAOSecurityProposals": "sdao" @@ -661,16 +667,16 @@ def share_repr(percentage: float) -> str: ) # add inflation and new supply if inflation occurred elif "rpl_inflation" in event_name: - args.total_supply = int(solidity.to_float(rp.call("rocketTokenRPL.totalSupply"))) - args.inflation = round(rp.get_annual_rpl_inflation() * 100, 4) + args.total_supply = int(solidity.to_float(await rp.call("rocketTokenRPL.totalSupply"))) + args.inflation = round(await rp.get_annual_rpl_inflation() * 100, 4) elif "auction_bid_event" in event_name: eth = solidity.to_float(args.bidAmount) price = solidity.to_float( - rp.call("rocketAuctionManager.getLotPriceAtBlock", args.lotIndex, args.blockNumber)) + await rp.call("rocketAuctionManager.getLotPriceAtBlock", args.lotIndex, args.blockNumber)) args.rplAmount = eth / price if event_name in ["rpl_stake_event", "rpl_withdraw_event"]: # get eth price by multiplying the amount by the current RPL ratio - rpl_ratio = solidity.to_float(rp.call("rocketNetworkPrices.getRPLPrice")) + rpl_ratio = solidity.to_float(await rp.call("rocketNetworkPrices.getRPLPrice")) args.amount = solidity.to_float(args.amount) args.ethAmount = args.amount * rpl_ratio elif event_name in ["node_merkle_rewards_claimed"]: @@ -680,8 +686,8 @@ def share_repr(percentage: float) -> str: args.amount = args.value / 10**18 if args["from"] in cfg["rocketpool.dao_multsigs"]: event_name = "pdao_erc20_transfer_event" - token_contract = rp.assemble_contract(name="ERC20", address=event["address"]) - args.symbol = token_contract.functions.symbol().call() + token_contract = await rp.assemble_contract(name="ERC20", address=event["address"]) + args.symbol = await token_contract.functions.symbol().call() elif token_prefix != "reth": return None elif event_name == "reth_burn_event": @@ -715,19 +721,19 @@ def share_repr(percentage: float) -> str: # loop over all possible contracts if we get a match return empty response for contract in possible_contracts: - if rp.get_address_by_name(contract) == args.claimingContract: + if await rp.get_address_by_name(contract) == args.claimingContract: return None if "node_register_event" in event_name: - args.timezone = rp.call("rocketNodeManager.getNodeTimezoneLocation", args.node) + args.timezone = await rp.call("rocketNodeManager.getNodeTimezoneLocation", args.node) if "odao_member_challenge_event" in event_name: - args.challengeDeadline = args.time + rp.call("rocketDAONodeTrustedSettingsMembers.getChallengeWindow") + args.challengeDeadline = args.time + await rp.call("rocketDAONodeTrustedSettingsMembers.getChallengeWindow") if "odao_member_challenge_decision_event" in event_name: if args.success: event_name = "odao_member_challenge_accepted_event" # get their RPL bond that was burned by querying the previous block args.rplBondAmount = solidity.to_float( - rp.call( + await rp.call( "rocketDAONodeTrusted.getMemberRPLBondAmount", args.nodeChallengedAddress, block=args.blockNumber - 1 @@ -737,10 +743,10 @@ def share_repr(percentage: float) -> str: else: event_name = "odao_member_challenge_rejected_event" if "node_smoothing_pool_state_changed" in event_name: - validator_count = rp.call("rocketMinipoolManager.getNodeMinipoolCount", args.node) - megapool_address = rp.call("rocketNodeManager.getMegapoolAddress", args.node) + validator_count = await rp.call("rocketMinipoolManager.getNodeMinipoolCount", args.node) + megapool_address = await rp.call("rocketNodeManager.getMegapoolAddress", args.node) if megapool_address != "0x0000000000000000000000000000000000000000": - validator_count += rp.call("rocketMegapoolDelegate.getActiveValidatorCount", address=megapool_address) + validator_count += await rp.call("rocketMegapoolDelegate.getActiveValidatorCount", address=megapool_address) args.validatorCount = validator_count if args.state: event_name = "node_smoothing_pool_joined" @@ -753,14 +759,14 @@ def share_repr(percentage: float) -> str: event_name = "node_merkle_rewards_claimed_rpl" if "minipool_deposit_received_event" in event_name: - contract = rp.assemble_contract("rocketMinipoolDelegate", args.minipool) - args.commission = solidity.to_float(contract.functions.getNodeFee().call()) + contract = await rp.assemble_contract("rocketMinipoolDelegate", args.minipool) + args.commission = solidity.to_float(await contract.functions.getNodeFee().call()) # get the transaction receipt - args.depositAmount = rp.call("rocketMinipool.getNodeDepositBalance", address=args.minipool, block=args.blockNumber) + args.depositAmount = await rp.call("rocketMinipool.getNodeDepositBalance", address=args.minipool, block=args.blockNumber) user_deposit = args.depositAmount - receipt = await w3_async.eth.get_transaction_receipt(args.transactionHash) + receipt = await w3.eth.get_transaction_receipt(args.transactionHash) args.node = receipt["from"] - ee = rp.get_contract_by_name("rocketNodeDeposit").events.DepositReceived() + ee = (await rp.get_contract_by_name("rocketNodeDeposit")).events.DepositReceived() with warnings.catch_warnings(): warnings.simplefilter("ignore") processed_logs = ee.process_receipt(receipt) @@ -771,12 +777,12 @@ def share_repr(percentage: float) -> str: if user_deposit < args.depositAmount: args.creditAmount = args.depositAmount - user_deposit args.balanceAmount = 0 - e = rp.get_contract_by_name("rocketVault").events.EtherWithdrawn() + e = (await rp.get_contract_by_name("rocketVault")).events.EtherWithdrawn() with warnings.catch_warnings(): warnings.simplefilter("ignore") processed_logs = e.process_receipt(receipt) - deposit_contract = bytes(w3_async.solidity_keccak(["string"], ["rocketNodeDeposit"])) + deposit_contract = bytes(w3.solidity_keccak(["string"], ["rocketNodeDeposit"])) for withdraw_event in processed_logs: # event.logindex 44, withdraw_event.logindex 50, rough distance like that # reminder order is different than the previous example @@ -799,18 +805,18 @@ def share_repr(percentage: float) -> str: case _: return None - args.operator = rp.call("rocketMinipoolDelegate.getNodeAddress", address=args.minipool) + args.operator = await rp.call("rocketMinipoolDelegate.getNodeAddress", address=args.minipool) if event_name in ["minipool_bond_reduce_event", "minipool_vacancy_prepared_event", "minipool_withdrawal_processed_event", "minipool_bond_reduction_started_event", "pool_deposit_assigned_event"]: # get the node operator address from minipool contract - contract = rp.assemble_contract("rocketMinipool", args.minipool) - args.node = contract.functions.getNodeAddress().call() + contract = await rp.assemble_contract("rocketMinipool", args.minipool) + args.node = await contract.functions.getNodeAddress().call() if "minipool_bond_reduction_started_event" in event_name: # get the previousBondAmount from the minipool contract args.previousBondAmount = solidity.to_float( - rp.call("rocketMinipool.getNodeDepositBalance", address=args.minipool, block=args.blockNumber - 1)) + await rp.call("rocketMinipool.getNodeDepositBalance", address=args.minipool, block=args.blockNumber - 1)) elif event_name == "minipool_withdrawal_processed_event": args.totalAmount = args.nodeAmount + args.userAmount elif event_name == "pool_deposit_assigned_event": @@ -820,7 +826,7 @@ def share_repr(percentage: float) -> str: args.assignmentCount = event["assignment_count"] else: return None - elif "minipool_scrub" in event_name and rp.call("rocketMinipoolDelegate.getVacant", address=args.minipool): + elif "minipool_scrub" in event_name and await rp.call("rocketMinipoolDelegate.getVacant", address=args.minipool): event_name = f"vacant_{event_name}" if event_name == "vacant_minipool_scrub_event": # let's try to determine the reason. there are 4 reasons a vacant minipool can get scrubbed: @@ -829,7 +835,7 @@ def share_repr(percentage: float) -> str: # 3. the validator does not have the active_ongoing validator status # 4. the migration could have timed out, the oDAO will scrub minipools after they have passed half of the migration window # get pubkey from minipool contract - pubkey = rp.call("rocketMinipoolManager.getMinipoolPubkey", args.minipool).hex() + pubkey = (await rp.call("rocketMinipoolManager.getMinipoolPubkey", args.minipool)).hex() vali_info = (await bacon.get_validator(f"0x{pubkey}"))["data"] reason = "joe fucking up (unknown reason)" if vali_info: @@ -839,7 +845,7 @@ def share_repr(percentage: float) -> str: reason = "having invalid withdrawal credentials set on the beacon chain" # check for #2 configured_balance = solidity.to_float( - rp.call("rocketMinipoolDelegate.getPreMigrationBalance", address=args.minipool, + await rp.call("rocketMinipoolDelegate.getPreMigrationBalance", address=args.minipool, block=args.blockNumber - 1)) if (solidity.to_float(vali_info["balance"], 9) - configured_balance) < -0.01: reason = "having a balance lower than configured in the minipool contract on the beacon chain" @@ -847,11 +853,11 @@ def share_repr(percentage: float) -> str: if vali_info["status"] != "active_ongoing": reason = "not being active on the beacon chain" # check for #4 - scrub_period = rp.call("rocketDAONodeTrustedSettingsMinipool.getPromotionScrubPeriod", + scrub_period = await rp.call("rocketDAONodeTrustedSettingsMinipool.getPromotionScrubPeriod", block=args.blockNumber - 1) - minipool_creation = rp.call("rocketMinipoolDelegate.getStatusTime", address=args.minipool, + minipool_creation = await rp.call("rocketMinipoolDelegate.getStatusTime", address=args.minipool, block=args.blockNumber - 1) - block_time = block_to_ts(args.blockNumber - 1) + block_time = await block_to_ts(args.blockNumber - 1) if block_time - minipool_creation > scrub_period // 2: reason = "taking too long to migrate their withdrawal credentials on the beacon chain" args.scrub_reason = reason @@ -860,12 +866,14 @@ def share_repr(percentage: float) -> str: if solidity.to_float(args.amountOfStETH) < 10_000: return None if receipt: - args.timestamp = block_to_ts(receipt["blockNumber"]) + args.timestamp = await block_to_ts(receipt["blockNumber"]) args.event_name = event_name args = await prepare_args(args) event.args = args - return assemble(args) + return await assemble(args) async def setup(bot): - await bot.add_cog(Events(bot)) + cog = Events(bot) + await cog.async_init() + await bot.add_cog(cog) diff --git a/rocketwatch/plugins/governance/governance.py b/rocketwatch/plugins/governance/governance.py index 0c85343a..5c0d2064 100644 --- a/rocketwatch/plugins/governance/governance.py +++ b/rocketwatch/plugins/governance/governance.py @@ -37,9 +37,9 @@ async def _get_active_dao_proposals(dao: DefaultDAO) -> list[DefaultDAO.Proposal return [await dao.fetch_proposal(proposal_id) for proposal_id in reversed(active_proposal_ids)] @staticmethod - def _get_tx_hash_for_proposal(dao: DAO, proposal: DAO.Proposal) -> HexStr: - from_block = ts_to_block(proposal.created) - 1 - to_block = ts_to_block(proposal.created) + 1 + async def _get_tx_hash_for_proposal(dao: DAO, proposal: DAO.Proposal) -> HexStr: + from_block = (await ts_to_block(proposal.created)) - 1 + to_block = (await ts_to_block(proposal.created)) + 1 log.info(f"Looking for proposal {proposal} in [{from_block},{to_block}]") for receipt in dao.proposal_contract.events.ProposalAdded().get_logs(from_block=from_block, to_block=to_block): @@ -88,11 +88,11 @@ def sanitize(text: str, max_length: int = 50) -> str: text = text[:(max_length - 1)] + "…" return text - def print_proposals(_dao: DAO, _proposals: list[DAO.Proposal]) -> str: + async def print_proposals(_dao: DAO, _proposals: list[DAO.Proposal]) -> str: text = "" for _i, _proposal in enumerate(_proposals, start=1): _title = sanitize(_proposal.message, 40) - _tx_hash = self._get_tx_hash_for_proposal(_dao, _proposal) + _tx_hash = await self._get_tx_hash_for_proposal(_dao, _proposal) _url = f"{cfg['execution_layer.explorer']}/tx/{_tx_hash}" text += f" {_i}. [{_title}]({_url}) (#{_proposal.id})\n" return text @@ -103,7 +103,7 @@ def print_proposals(_dao: DAO, _proposals: list[DAO.Proposal]) -> str: if proposals := await self._get_active_dao_proposals(dao): embed.description += "### Security Council\n" embed.description += "- **Active on-chain proposals**\n" - embed.description += print_proposals(dao, proposals) + embed.description += await print_proposals(dao, proposals) # --------- ORACLE DAO --------- # @@ -111,7 +111,7 @@ def print_proposals(_dao: DAO, _proposals: list[DAO.Proposal]) -> str: if proposals := await self._get_active_dao_proposals(dao): embed.description += "### Oracle DAO\n" embed.description += "- **Active on-chain proposals**\n" - embed.description += print_proposals(dao, proposals) + embed.description += await print_proposals(dao, proposals) # --------- PROTOCOL DAO --------- # @@ -120,7 +120,7 @@ def print_proposals(_dao: DAO, _proposals: list[DAO.Proposal]) -> str: if proposals := await self._get_active_pdao_proposals(dao): section_content += "- **Active on-chain proposals**\n" - section_content += print_proposals(dao, proposals) + section_content += await print_proposals(dao, proposals) if snapshot_proposals := await self._get_active_snapshot_proposals(): section_content += "- **Active Snapshot proposals**\n" diff --git a/rocketwatch/plugins/lottery/lottery.py b/rocketwatch/plugins/lottery/lottery.py index b490f1c0..cf024e85 100644 --- a/rocketwatch/plugins/lottery/lottery.py +++ b/rocketwatch/plugins/lottery/lottery.py @@ -125,7 +125,7 @@ async def generate_sync_committee_description(self, period): # sort by count node_operators = sorted(node_operators.items(), key=lambda x: x[1], reverse=True) description += "_Node Operators:_ " - description += ", ".join([f"{count}x {el_explorer_url(node_operator)}" for node_operator, count in + description += ", ".join([f"{count}x {await el_explorer_url(node_operator)}" for node_operator, count in node_operators]) return description diff --git a/rocketwatch/plugins/milestones/milestones.py b/rocketwatch/plugins/milestones/milestones.py index 4dee313c..97919568 100644 --- a/rocketwatch/plugins/milestones/milestones.py +++ b/rocketwatch/plugins/milestones/milestones.py @@ -43,7 +43,7 @@ async def check_for_new_events(self): state = await self.collection.find_one({"_id": milestone["id"]}) - value = getattr(rp, milestone.function)(*milestone.args) + value = await getattr(rp, milestone.function)(*milestone.args) if milestone.formatter: value = getattr(solidity, milestone.formatter)(value) log.debug(f"{milestone.id}:{value}") @@ -62,7 +62,7 @@ async def check_for_new_events(self): previous_milestone = milestone.min if previous_milestone < latest_goal: log.info(f"Goal for milestone {milestone.id} has increased. Triggering Milestone!") - embed = assemble(aDict({ + embed = await assemble(aDict({ "event_name" : milestone.id, "result_value": value })) diff --git a/rocketwatch/plugins/queue/queue.py b/rocketwatch/plugins/queue/queue.py index 09d370dc..e853017b 100644 --- a/rocketwatch/plugins/queue/queue.py +++ b/rocketwatch/plugins/queue/queue.py @@ -1,23 +1,18 @@ -import math import logging from typing import Literal, NamedTuple -from functools import cache -from cachetools.func import ttl_cache from discord import Interaction from discord.app_commands import command, describe from discord.ext.commands import Cog from eth_typing import ChecksumAddress, BlockIdentifier from rocketwatch import RocketWatch -from utils import solidity from utils.cfg import cfg -from utils.embeds import Embed from utils.embeds import el_explorer_url from utils.rocketpool import rp from utils.visibility import is_hidden_weak -from utils.shared_w3 import w3_async +from utils.shared_w3 import w3 from utils.views import PageView log = logging.getLogger("queue") @@ -57,20 +52,23 @@ async def _load_content(self, from_idx: int, to_idx: int) -> tuple[int, str]: ) return queue_length, queue_content + _el_url_cache: dict[tuple[str, str], str] = {} + @staticmethod - @ttl_cache(ttl=600) - def _cached_el_url(address, prefix="") -> str: - return el_explorer_url(address, name_fmt=lambda n: f"`{n}`", prefix=prefix) + async def _cached_el_url(address, prefix="") -> str: + key = (address, prefix) + if key not in Queue._el_url_cache: + Queue._el_url_cache[key] = await el_explorer_url(address, name_fmt=lambda n: f"`{n}`", prefix=prefix) + return Queue._el_url_cache[key] @staticmethod - @cache - def _megapool_to_node(megapool_address) -> ChecksumAddress: - return rp.call("rocketMegapoolDelegate.getNodeAddress", address=megapool_address) + async def _megapool_to_node(megapool_address) -> ChecksumAddress: + return await rp.call("rocketMegapoolDelegate.getNodeAddress", address=megapool_address) @staticmethod - def __format_queue_entry(entry: 'Queue.Entry') -> str: - node_address = Queue._megapool_to_node(entry.megapool) - node_label = Queue._cached_el_url(node_address) + async def __format_queue_entry(entry: 'Queue.Entry') -> str: + node_address = await Queue._megapool_to_node(entry.megapool) + node_label = await Queue._cached_el_url(node_address) return f"{node_label} #`{entry.validator_id}`" @staticmethod @@ -84,9 +82,9 @@ async def get_express_queue(limit: int, start: int = 0) -> tuple[int, str]: return await Queue._get_queue("deposit.queue.express", limit, start) @staticmethod - def _scan_list(namespace: bytes, start: int, limit: int, block_identifier: BlockIdentifier) -> list['Queue.Entry']: - list_contract = rp.get_contract_by_name("linkedListStorage") - raw_entries, _ = list_contract.functions.scan(namespace, 0, start + limit).call(block_identifier=block_identifier) + async def _scan_list(namespace: bytes, start: int, limit: int, block_identifier: BlockIdentifier) -> list['Queue.Entry']: + list_contract = await rp.get_contract_by_name("linkedListStorage") + raw_entries, _ = await list_contract.functions.scan(namespace, 0, start + limit).call(block_identifier=block_identifier) return [Queue.Entry(*entry) for entry in raw_entries][start:] @staticmethod @@ -94,22 +92,22 @@ async def _get_queue(namespace: str, limit: int, start: int = 0) -> tuple[int, s if limit <= 0: return 0, "" - list_contract = rp.get_contract_by_name("linkedListStorage") - queue_namespace = bytes(w3_async.solidity_keccak(["string"], [namespace])) + list_contract = await rp.get_contract_by_name("linkedListStorage") + queue_namespace = bytes(w3.solidity_keccak(["string"], [namespace])) start = max(start, 0) - latest_block = await w3_async.eth.get_block_number() - q_len = list_contract.functions.getLength(queue_namespace).call(block_identifier=latest_block) - + latest_block = await w3.eth.get_block_number() + q_len = await list_contract.functions.getLength(queue_namespace).call(block_identifier=latest_block) + if start >= q_len: return q_len, "" - - queue_entries = Queue._scan_list(queue_namespace, start, limit, latest_block) + + queue_entries = await Queue._scan_list(queue_namespace, start, limit, latest_block) content = "" for i, entry in enumerate(queue_entries): - entry_str = Queue.__format_queue_entry(entry) - content += f"{start+i+1}. {entry_str}\n" + entry_str = await Queue.__format_queue_entry(entry) + content += f"{start+i+1}. {entry_str}\n" return q_len, content @@ -137,16 +135,16 @@ def _get_entries_used_in_interval(start: int, end: int, len_express: int, len_st async def get_combined_queue(limit: int, start: int = 0) -> tuple[int, str]: """Get the next {limit} validators in the combined queue (express + standard)""" - latest_block = await w3_async.eth.get_block_number() - express_queue_rate = rp.call("rocketDAOProtocolSettingsDeposit.getExpressQueueRate", block=latest_block) - queue_index = rp.call("rocketDepositPool.getQueueIndex", block=latest_block) + latest_block = await w3.eth.get_block_number() + express_queue_rate = await rp.call("rocketDAOProtocolSettingsDeposit.getExpressQueueRate", block=latest_block) + queue_index = await rp.call("rocketDepositPool.getQueueIndex", block=latest_block) - list_contract = rp.get_contract_by_name("linkedListStorage") - exp_namespace = bytes(w3_async.solidity_keccak(["string"], ["deposit.queue.express"])) - std_namespace = bytes(w3_async.solidity_keccak(["string"], ["deposit.queue.standard"])) - - express_queue_length = list_contract.functions.getLength(exp_namespace).call(block_identifier=latest_block) - standard_queue_length = list_contract.functions.getLength(std_namespace).call(block_identifier=latest_block) + list_contract = await rp.get_contract_by_name("linkedListStorage") + exp_namespace = bytes(w3.solidity_keccak(["string"], ["deposit.queue.express"])) + std_namespace = bytes(w3.solidity_keccak(["string"], ["deposit.queue.standard"])) + + express_queue_length = await list_contract.functions.getLength(exp_namespace).call(block_identifier=latest_block) + standard_queue_length = await list_contract.functions.getLength(std_namespace).call(block_identifier=latest_block) q_len = express_queue_length + standard_queue_length if start >= q_len: @@ -170,25 +168,22 @@ async def get_combined_queue(limit: int, start: int = 0) -> tuple[int, str]: log.debug(f"{limit_express_queue = }") log.debug(f"{limit_standard_queue = }") - express_entries_rev = Queue._scan_list(exp_namespace, start_express_queue, limit_express_queue, latest_block)[::-1] - standard_entries_rev = Queue._scan_list(std_namespace, start_standard_queue, limit_standard_queue, latest_block)[::-1] + express_entries_rev = (await Queue._scan_list(exp_namespace, start_express_queue, limit_express_queue, latest_block))[::-1] + standard_entries_rev = (await Queue._scan_list(std_namespace, start_standard_queue, limit_standard_queue, latest_block))[::-1] - index_digits = len(str(max(standard_queue_length, express_queue_length))) content = "" for i in range(len(express_entries_rev) + len(standard_entries_rev)): effective_queue_index = queue_index + start + i is_express = (effective_queue_index % (express_queue_rate + 1)) != express_queue_rate if (is_express and express_entries_rev) or (not standard_entries_rev): entry = express_entries_rev.pop() - # express_pos = start_express_queue + limit_express_queue - len(express_entries_rev) lane_pos = "🐇" else: entry = standard_entries_rev.pop() - # standard_pos = start_standard_queue + limit_standard_queue - len(standard_entries_rev) lane_pos = "🐢" overall_pos = start + i + 1 - entry_str = Queue.__format_queue_entry(entry) + entry_str = await Queue.__format_queue_entry(entry) content += f"{overall_pos}. {lane_pos} {entry_str}\n" return q_len, content diff --git a/rocketwatch/plugins/random/random.py b/rocketwatch/plugins/random/random.py index 8ee496cf..b5fcf70d 100644 --- a/rocketwatch/plugins/random/random.py +++ b/rocketwatch/plugins/random/random.py @@ -128,7 +128,7 @@ async def sea_creatures(self, ctx: Context, address: str = None): if address is not None: try: if ".eth" in address: - address = ens.resolve_name(address) + address = await ens.resolve_name(address) address = w3.to_checksum_address(address) except (ValueError, TypeError): e.description = "Invalid address" @@ -140,7 +140,7 @@ async def sea_creatures(self, ctx: Context, address: str = None): else: # get the required holding from the dictionary required_holding = [h for h, c in sea_creatures.items() if c == creature[0]][0] - e.add_field(name="Visualization", value=el_explorer_url(address, prefix=creature), inline=False) + e.add_field(name="Visualization", value=await el_explorer_url(address, prefix=creature), inline=False) e.add_field(name="Required holding for emoji", value=f"{required_holding * len(creature)} ETH", inline=False) holding = await get_holding_for_address(address) e.add_field(name="Actual Holding", value=f"{holding:.0f} ETH", inline=False) @@ -157,7 +157,7 @@ async def sea_creatures(self, ctx: Context, address: str = None): async def smoothie(self, ctx: Context): """Show smoothing pool information""" try: - rp.get_address_by_name("rocketSmoothingPool") + await rp.get_address_by_name("rocketSmoothingPool") except Exception as err: log.exception(err) await ctx.send("redstone not deployed yet", ephemeral=True) @@ -165,7 +165,7 @@ async def smoothie(self, ctx: Context): await ctx.defer(ephemeral=is_hidden_weak(ctx)) e = Embed(title="Smoothing Pool") - smoothie_eth = solidity.to_float(w3.eth.get_balance(rp.get_address_by_name("rocketSmoothingPool"))) + smoothie_eth = solidity.to_float(await w3.eth.get_balance(await rp.get_address_by_name("rocketSmoothingPool"))) data = await (await self.bot.db.minipools.aggregate([ { '$match': { @@ -254,7 +254,7 @@ async def smoothie(self, ctx: Context): # minipool counts total_minipool_count = data[True]["count"] + data[False]["count"] smoothie_minipool_count = data[True]["count"] - d = datetime.now().timestamp() - rp.call("rocketRewardsPool.getClaimIntervalTimeStart") + d = datetime.now().timestamp() - await rp.call("rocketRewardsPool.getClaimIntervalTimeStart") e.description = f"`{smoothie_node_count}/{total_node_count}` nodes (`{smoothie_node_count / total_node_count:.2%}`)" \ f" have joined the smoothing pool.\n" \ f" That is `{smoothie_minipool_count}/{total_minipool_count}` minipools " \ @@ -262,21 +262,22 @@ async def smoothie(self, ctx: Context): f"The current (not overall) balance is **`{smoothie_eth:,.2f}` ETH.**\n" \ f"This is over a span of `{uptime(d)}`.\n\n" \ f"{min(smoothie_node_count, 5)} largest nodes:\n" - e.description += "\n".join(f"- `{d['count']:>4}` minipools - {el_explorer_url(d['address'])}" for d in - data[True]["counts"][:min(smoothie_node_count, 5)]) + lines = [f"- `{d['count']:>4}` minipools - {await el_explorer_url(d['address'])}" for d in + data[True]["counts"][:min(smoothie_node_count, 5)]] + e.description += "\n".join(lines) await ctx.send(embed=e) @hybrid_command() async def odao_challenges(self, ctx: Context): """Shows the current oDAO challenges""" await ctx.defer(ephemeral=is_hidden_weak(ctx)) - c = rp.get_contract_by_name("rocketDAONodeTrustedActions") + c = await rp.get_contract_by_name("rocketDAONodeTrustedActions") # get challenges made events = list(c.events["ActionChallengeMade"].get_logs( - from_block=w3.eth.get_block("latest").number - 7 * 24 * 60 * 60 // 12)) + from_block=(await w3.eth.get_block("latest")).number - 7 * 24 * 60 * 60 // 12)) # remove all events of nodes that aren't challenged anymore for event in events: - if not rp.call("rocketDAONodeTrusted.getMemberIsChallenged", event.args.nodeChallengedAddress): + if not await rp.call("rocketDAONodeTrusted.getMemberIsChallenged", event.args.nodeChallengedAddress): events.remove(event) # sort by block number events.sort(key=lambda x: x.blockNumber) @@ -286,11 +287,14 @@ async def odao_challenges(self, ctx: Context): e = Embed(title="Active oDAO Challenges") e.description = "" # get duration of challenge period - challenge_period = rp.call("rocketDAONodeTrustedSettingsMembers.getChallengeWindow") + challenge_period = await rp.call("rocketDAONodeTrustedSettingsMembers.getChallengeWindow") for event in events: - time_left = challenge_period - (w3.eth.get_block("latest").timestamp - event.args.time) + latest_block = await w3.eth.get_block("latest") + time_left = challenge_period - (latest_block.timestamp - event.args.time) time_left = uptime(time_left, True) - e.description += f"**{el_explorer_url(event.args.nodeChallengedAddress)}** was challenged by **{el_explorer_url(event.args.nodeChallengerAddress)}**\n" + challenged = await el_explorer_url(event.args.nodeChallengedAddress) + challenger = await el_explorer_url(event.args.nodeChallengerAddress) + e.description += f"**{challenged}** was challenged by **{challenger}**\n" e.description += f"Time Left: **{time_left}**\n\n" await ctx.send(embed=e) diff --git a/rocketwatch/plugins/rewards/rewards.py b/rocketwatch/plugins/rewards/rewards.py index 37eace0f..8204e6a6 100644 --- a/rocketwatch/plugins/rewards/rewards.py +++ b/rocketwatch/plugins/rewards/rewards.py @@ -48,7 +48,7 @@ async def _make_request(self, address) -> dict: return await response.json() async def get_estimated_rewards(self, ctx: Context, address: str) -> Optional[RewardEstimate]: - if not rp.call("rocketNodeManager.getNodeExists", address): + if not await rp.call("rocketNodeManager.getNodeExists", address): await ctx.send(f"{address} is not a registered node.") return None @@ -59,10 +59,10 @@ async def get_estimated_rewards(self, ctx: Context, address: str) -> Optional[Re await ctx.send("Error fetching node data from Sprocket Pool API. Blame Patches.") return None - data_block = ts_to_block(patches_res["time"]) + data_block = await ts_to_block(patches_res["time"]) rpl_rewards: int = patches_res[address].get("collateralRpl", 0) eth_rewards: int = patches_res[address].get("smoothingPoolEth", 0) - interval_time = rp.call("rocketDAOProtocolSettingsRewards.getRewardsClaimIntervalTime", block=data_block) + interval_time = await rp.call("rocketDAOProtocolSettingsRewards.getRewardsClaimIntervalTime", block=data_block) return Rewards.RewardEstimate( address=address, @@ -103,7 +103,7 @@ async def upcoming_rewards(self, ctx: Context, node_address: str, extrapolate: b return if extrapolate: - registration_time = rp.call("rocketNodeManager.getNodeRegistrationTime", address) + registration_time = await rp.call("rocketNodeManager.getNodeRegistrationTime", address) reward_start_time = max(registration_time, rewards.start_time) proj_factor = (rewards.end_time - reward_start_time) / (rewards.data_time - reward_start_time) rewards.rpl_rewards *= proj_factor @@ -149,16 +149,16 @@ async def simulate_rewards( borrowed_eth = (24 * num_leb8) + (16 * num_eb16) data_block: int = rewards.data_block - reward_start_block = ts_to_block(rewards.start_time) + reward_start_block = await ts_to_block(rewards.start_time) - rpl_ratio = solidity.to_float(rp.call("rocketNetworkPrices.getRPLPrice", block=data_block)) - actual_borrowed_eth = solidity.to_float(rp.call("rocketNodeStaking.getNodeETHBorrowed", address, block=data_block)) - actual_rpl_stake = solidity.to_float(rp.call("rocketNodeStaking.getNodeStakedRPL", address, block=data_block)) + rpl_ratio = solidity.to_float(await rp.call("rocketNetworkPrices.getRPLPrice", block=data_block)) + actual_borrowed_eth = solidity.to_float(await rp.call("rocketNodeStaking.getNodeETHBorrowed", address, block=data_block)) + actual_rpl_stake = solidity.to_float(await rp.call("rocketNodeStaking.getNodeStakedRPL", address, block=data_block)) - inflation_rate: int = rp.call("rocketTokenRPL.getInflationIntervalRate", block=data_block) - inflation_interval: int = rp.call("rocketTokenRPL.getInflationIntervalTime", block=data_block) + inflation_rate: int = await rp.call("rocketTokenRPL.getInflationIntervalRate", block=data_block) + inflation_interval: int = await rp.call("rocketTokenRPL.getInflationIntervalTime", block=data_block) num_inflation_intervals: int = (rewards.end_time - rewards.start_time) // inflation_interval - total_supply: int = rp.call("rocketTokenRPL.totalSupply", block=reward_start_block) + total_supply: int = await rp.call("rocketTokenRPL.totalSupply", block=reward_start_block) period_inflation: int = total_supply for i in range(num_inflation_intervals): diff --git a/rocketwatch/plugins/rocksolid/rocksolid.py b/rocketwatch/plugins/rocksolid/rocksolid.py index 0b0d1de7..50f0e40d 100644 --- a/rocketwatch/plugins/rocksolid/rocksolid.py +++ b/rocketwatch/plugins/rocksolid/rocksolid.py @@ -33,7 +33,7 @@ def __init__(self, bot: RocketWatch): self.deployment_block = 23237366 async def _fetch_asset_updates(self) -> list[tuple[int, float]]: - vault_contract = rp.get_contract_by_name("RockSolidVault") + vault_contract = await rp.get_contract_by_name("RockSolidVault") if db_entry := (await self.bot.db.last_checked_block.find_one({"_id": cog_id})): last_checked_block = db_entry["block"] @@ -41,7 +41,7 @@ async def _fetch_asset_updates(self) -> list[tuple[int, float]]: last_checked_block = self.deployment_block b_from = last_checked_block + 1 - b_to = w3.eth.get_block_number() + b_to = await w3.eth.get_block_number() updates = [] @@ -50,7 +50,7 @@ async def _fetch_asset_updates(self) -> list[tuple[int, float]]: db_operations = [] for event_log in get_logs(vault_contract.events.TotalAssetsUpdated, b_from, b_to): - ts = block_to_ts(event_log.blockNumber) + ts = await block_to_ts(event_log.blockNumber) assets = solidity.to_float(event_log.args.totalAssets) updates.append((ts, assets)) db_operations.append(InsertOne({"time": ts, "assets": assets})) @@ -74,28 +74,28 @@ async def rocksolid(self, interaction: Interaction): """ await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) - current_block = w3.eth.get_block_number() - now = block_to_ts(current_block) + current_block = await w3.eth.get_block_number() + now = await block_to_ts(current_block) - def get_eth_rate(block_number: int) -> int: + async def get_eth_rate(block_number: int) -> int: block_number = max(block_number, self.deployment_block) - reth_value = rp.call("RockSolidVault.convertToAssets", 10**18, block=block_number) - return rp.call("rocketTokenRETH.getEthValue", reth_value, block=block_number) - - current_eth_rate = get_eth_rate(current_block) - - def get_apy(days: int) -> Optional[float]: - reference_block = ts_to_block(now - timedelta(days=days).total_seconds()) + reth_value = await rp.call("RockSolidVault.convertToAssets", 10**18, block=block_number) + return await rp.call("rocketTokenRETH.getEthValue", reth_value, block=block_number) + + current_eth_rate = await get_eth_rate(current_block) + + async def get_apy(days: int) -> Optional[float]: + reference_block = await ts_to_block(now - timedelta(days=days).total_seconds()) if reference_block < self.deployment_block: return None - return (current_eth_rate / get_eth_rate(reference_block) - 1) * (365 / days) * 100 + return (current_eth_rate / await get_eth_rate(reference_block) - 1) * (365 / days) * 100 - apy_7d = get_apy(days=7) - apy_30d = get_apy(days=30) - apy_90d = get_apy(days=90) - - tvl_reth = solidity.to_float(rp.call("RockSolidVault.totalAssets")) - tvl_rock_reth = solidity.to_float(rp.call("RockSolidVault.totalSupply")) + apy_7d = await get_apy(days=7) + apy_30d = await get_apy(days=30) + apy_90d = await get_apy(days=90) + + tvl_reth = solidity.to_float(await rp.call("RockSolidVault.totalAssets")) + tvl_rock_reth = solidity.to_float(await rp.call("RockSolidVault.totalSupply")) asset_updates: list[tuple[int, float]] = await self._fetch_asset_updates() current_date = datetime.fromtimestamp(asset_updates[0][0]).date() - timedelta(days=1) @@ -130,15 +130,15 @@ def get_apy(days: int) -> Optional[float]: img.seek(0) plt.clf() - ca_reth = rp.get_address_by_name("rocketTokenRETH") - ca_rock_reth = rp.get_address_by_name("RockSolidVault") + ca_reth = await rp.get_address_by_name("rocketTokenRETH") + ca_rock_reth = await rp.get_address_by_name("RockSolidVault") embed = Embed(title="<:rocksolid:1425091714267480158> RockSolid rETH Vault") embed.add_field(name="7d APY", value=f"{apy_7d:.2f}%" if apy_7d else "-") embed.add_field(name="30d APY", value=f"{apy_30d:.2f}%" if apy_30d else "-") embed.add_field(name="90d APY", value=f"{apy_90d:.2f}%" if apy_90d else "-") - embed.add_field(name="TVL", value=f"`{tvl_reth:,.2f}` {el_explorer_url(ca_reth, name=' rETH')}") - embed.add_field(name="Supply", value=f"`{tvl_rock_reth:,.2f}` {el_explorer_url(ca_rock_reth, name=' rock.rETH')}") + embed.add_field(name="TVL", value=f"`{tvl_reth:,.2f}` {await el_explorer_url(ca_reth, name=' rETH')}") + embed.add_field(name="Supply", value=f"`{tvl_rock_reth:,.2f}` {await el_explorer_url(ca_rock_reth, name=' rock.rETH')}") embed.set_image(url="attachment://rocksolid-tvl.png") await interaction.followup.send(embed=embed, file=File(img, "rocksolid-tvl.png")) diff --git a/rocketwatch/plugins/rpl/rpl.py b/rocketwatch/plugins/rpl/rpl.py index 601be2de..b48fb18a 100644 --- a/rocketwatch/plugins/rpl/rpl.py +++ b/rocketwatch/plugins/rpl/rpl.py @@ -28,9 +28,9 @@ async def staked_rpl(self, interaction: Interaction): """ await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) - rpl_supply = solidity.to_float(rp.call("rocketTokenRPL.totalSupply")) - legacy_staked_rpl = solidity.to_float(rp.call("rocketNodeStaking.getTotalLegacyStakedRPL")) - megapool_staked_rpl = solidity.to_float(rp.call("rocketNodeStaking.getTotalMegapoolStakedRPL")) + rpl_supply = solidity.to_float(await rp.call("rocketTokenRPL.totalSupply")) + legacy_staked_rpl = solidity.to_float(await rp.call("rocketNodeStaking.getTotalLegacyStakedRPL")) + megapool_staked_rpl = solidity.to_float(await rp.call("rocketNodeStaking.getTotalMegapoolStakedRPL")) staked_rpl = legacy_staked_rpl + megapool_staked_rpl unstaking_rpl = (await (await self.bot.db.node_operators.aggregate([ { @@ -112,13 +112,13 @@ async def withdrawable_rpl(self, interaction: Interaction): } } ])).to_list() - rpl_eth_price = solidity.to_float(rp.call("rocketNetworkPrices.getRPLPrice")) + rpl_eth_price = solidity.to_float(await rp.call("rocketNetworkPrices.getRPLPrice")) # calculate withdrawable RPL at various RPL ETH prices # i/10 is the ratio of the price checked to the actual RPL ETH price free_rpl_liquidity = {} - max_collateral = solidity.to_float(rp.call("rocketDAOProtocolSettingsNode.getMinimumLegacyRPLStake")) + max_collateral = solidity.to_float(await rp.call("rocketDAOProtocolSettingsNode.getMinimumLegacyRPLStake")) current_withdrawable_rpl = 0 for i in range(1, 31): diff --git a/rocketwatch/plugins/snapshot/snapshot.py b/rocketwatch/plugins/snapshot/snapshot.py index 735c3b57..86a3932f 100644 --- a/rocketwatch/plugins/snapshot/snapshot.py +++ b/rocketwatch/plugins/snapshot/snapshot.py @@ -240,13 +240,13 @@ def create_image(self, *, include_title: bool) -> Image: self.render_to(canvas, width, pad_left, pad_top, include_title=include_title) return canvas.image - def create_start_event(self) -> Event: + async def create_start_event(self) -> Event: embed = self.get_embed_template() embed.title = ":bulb: New Snapshot Proposal" return Event( embed=embed, topic="snapshot", - block_number=ts_to_block(self.start), + block_number=await ts_to_block(self.start), event_name="pdao_snapshot_vote_start", unique_id=f"snapshot_vote_start:{self.id}", image=self.create_image(include_title=True) @@ -264,7 +264,7 @@ def create_reached_quorum_event(self, block_number: BlockNumber) -> Event: image=self.create_image(include_title=True) ) - def create_end_event(self) -> Event: + async def create_end_event(self) -> Event: max_for, max_against = 0, 0 for choice, score in zip(self.choices, self.scores): if "against" in choice.lower(): @@ -281,7 +281,7 @@ def create_end_event(self) -> Event: return Event( embed=embed, topic="snapshot", - block_number=ts_to_block(self.end), + block_number=await ts_to_block(self.end), event_name="pdao_snapshot_vote_end", unique_id=f"snapshot_vote_end:{self.id}", image=self.create_image(include_title=True) @@ -350,10 +350,10 @@ def _format_weighted_choice(self, choice: WeightedChoice) -> str: ) return "```" + graph.get_string().replace("]", "%]") + "```" - def create_event(self, prev_vote: Optional['Snapshot.Vote']) -> Optional[Event]: - node = rp.call("rocketSignerRegistry.signerToNode", self.voter) - signer = el_explorer_url(self.voter) - voter = signer if (node == ADDRESS_ZERO) else el_explorer_url(node) + async def create_event(self, prev_vote: Optional['Snapshot.Vote']) -> Optional[Event]: + node = await rp.call("rocketSignerRegistry.signerToNode", self.voter) + signer = await el_explorer_url(self.voter) + voter = signer if (node == ADDRESS_ZERO) else await el_explorer_url(node) vote_fmt = self.pretty_print() if vote_fmt is None: @@ -408,7 +408,7 @@ def create_event(self, prev_vote: Optional['Snapshot.Vote']) -> Optional[Event]: return Event( embed=embed, topic="snapshot", - block_number=ts_to_block(self.created), + block_number=await ts_to_block(self.created), unique_id=f"snapshot_vote:{self.proposal.id}:{self.voter}:{self.created}", **conditional_args ) @@ -496,7 +496,7 @@ async def _get_new_events(self) -> list[Event]: log.info(f"Found expired proposal: {stored_proposal}") # recover full proposal if proposal := await self.fetch_proposal(stored_proposal["_id"]): - event = proposal.create_end_event() + event = await proposal.create_end_event() proposal_db_changes.append(DeleteOne(stored_proposal)) events.append(event) @@ -506,7 +506,7 @@ async def _get_new_events(self) -> list[Event]: if proposal.id not in known_active_proposals: # not aware of this proposal yet, emit event and insert into DB log.info(f"Found new proposal: {proposal}") - event = proposal.create_start_event() + event = await proposal.create_start_event() proposal_dict = { "_id" : proposal.id, "start" : proposal.start, @@ -553,7 +553,7 @@ async def _get_new_events(self) -> list[Event]: except IndexError: prev_vote = None - event = vote.create_event(prev_vote) + event = await vote.create_event(prev_vote) if event is None: continue diff --git a/rocketwatch/plugins/transactions/transactions.py b/rocketwatch/plugins/transactions/transactions.py index 1c847ab7..e3d7cce1 100644 --- a/rocketwatch/plugins/transactions/transactions.py +++ b/rocketwatch/plugins/transactions/transactions.py @@ -16,7 +16,7 @@ from utils.embeds import assemble, prepare_args, el_explorer_url, Embed from utils.event import EventPlugin, Event from utils.rocketpool import rp -from utils.shared_w3 import w3_async +from utils.shared_w3 import w3 log = logging.getLogger("transactions") log.setLevel(cfg["log_level"]) @@ -25,12 +25,15 @@ class Transactions(EventPlugin): def __init__(self, bot: RocketWatch): super().__init__(bot) - contract_addresses, function_map = self._parse_transaction_config() - self.addresses = contract_addresses - self.function_map = function_map + self.addresses = None + self.function_map = None + + async def _ensure_config(self): + if self.addresses is None: + self.addresses, self.function_map = await self._parse_transaction_config() @staticmethod - def _parse_transaction_config() -> tuple[list[ChecksumAddress], dict]: + async def _parse_transaction_config() -> tuple[list[ChecksumAddress], dict]: addresses: list[ChecksumAddress] = [] function_map = {} @@ -39,7 +42,7 @@ def _parse_transaction_config() -> tuple[list[ChecksumAddress], dict]: for contract_name, mapping in tx_config.items(): try: - address = rp.get_address_by_name(contract_name) + address = await rp.get_address_by_name(contract_name) addresses.append(address) function_map[contract_name] = mapping except Exception: @@ -80,8 +83,9 @@ async def trigger_tx( @is_owner() async def replay_tx(self, interaction: Interaction, tx_hash: str): await interaction.response.defer() - tnx = await w3_async.eth.get_transaction(tx_hash) - block = await w3_async.eth.get_block(tnx.blockHash) + await self._ensure_config() + tnx = await w3.eth.get_transaction(tx_hash) + block = await w3.eth.get_block(tnx.blockHash) responses: list[Event] = await self.process_transaction(block, tnx, tnx.to, tnx.input) if responses: @@ -90,6 +94,7 @@ async def replay_tx(self, interaction: Interaction, tx_hash: str): await interaction.followup.send(content="No events found.") async def _get_new_events(self) -> list[Event]: + await self._ensure_config() old_addresses = self.addresses try: from_block = self.last_served_block + 1 - self.lookback_distance @@ -100,6 +105,7 @@ async def _get_new_events(self) -> list[Event]: raise err async def get_past_events(self, from_block: BlockNumber, to_block: BlockNumber) -> list[Event]: + await self._ensure_config() events = [] for block in range(from_block, to_block): events.extend(await self.get_events_for_block(block)) @@ -108,7 +114,7 @@ async def get_past_events(self, from_block: BlockNumber, to_block: BlockNumber) async def get_events_for_block(self, block_number: BlockIdentifier) -> list[Event]: log.debug(f"Checking block {block_number}") try: - block = await w3_async.eth.get_block(block_number, full_transactions=True) + block = await w3.eth.get_block(block_number, full_transactions=True) except web3.exceptions.BlockNotFound: log.error(f"Skipping block {block_number} as it can't be found") return [] @@ -141,20 +147,20 @@ async def create_embeds(self, event_name: str, event: aDict) -> list[Embed]: if "odao_disable" in event_name and not args.confirmDisableBootstrapMode: return [] elif event_name == "pdao_set_delegate": - receipt = await w3_async.eth.get_transaction_receipt(args.transactionHash) + receipt = await w3.eth.get_transaction_receipt(args.transactionHash) args.delegator = receipt["from"] args.delegate = args.get("delegate") or args.get("newDelegate") - args.votingPower = solidity.to_float(rp.call("rocketNetworkVoting.getVotingPower", args.delegator, args.blockNumber)) + args.votingPower = solidity.to_float(await rp.call("rocketNetworkVoting.getVotingPower", args.delegator, args.blockNumber)) if (args.votingPower < 50) or (args.delegate == args.delegator): return [] elif "failed_deposit" in event_name: - receipt = await w3_async.eth.get_transaction_receipt(args.transactionHash) + receipt = await w3.eth.get_transaction_receipt(args.transactionHash) args.node = receipt["from"] args.burnedValue = solidity.to_float(event.gasPrice * receipt.gasUsed) elif "deposit_pool_queue" in event_name: - receipt = await w3_async.eth.get_transaction_receipt(args.transactionHash) + receipt = await w3.eth.get_transaction_receipt(args.transactionHash) args.node = receipt["from"] - event = rp.get_contract_by_name("rocketMinipoolQueue").events.MinipoolDequeued() + event = (await rp.get_contract_by_name("rocketMinipoolQueue")).events.MinipoolDequeued() # get the amount of dequeues that happened in this transaction using the event logs with warnings.catch_warnings(): warnings.simplefilter("ignore") @@ -189,13 +195,13 @@ def share_repr(percentage: float) -> str: match args.types[i]: case 0: # SettingType.UINT256 - value = w3_async.to_int(value_raw) + value = w3.to_int(value_raw) case 1: # SettingType.BOOL value = bool(value_raw) case 2: # SettingType.ADDRESS - value = w3_async.to_checksum_address(value_raw) + value = w3.to_checksum_address(value_raw) case _: value = "???" description_parts.append( @@ -203,12 +209,12 @@ def share_repr(percentage: float) -> str: ) args.description = "\n".join(description_parts) elif event_name == "sdao_member_kick": - args.memberAddress = el_explorer_url(args.memberAddress, block=(args.blockNumber - 1)) + args.memberAddress = await el_explorer_url(args.memberAddress, block=(args.blockNumber - 1)) elif event_name == "sdao_member_replace": - args.existingMemberAddress = el_explorer_url(args.existingMemberAddress, block=(args.blockNumber - 1)) + args.existingMemberAddress = await el_explorer_url(args.existingMemberAddress, block=(args.blockNumber - 1)) elif event_name == "sdao_member_kick_multi": args.member_list = ", ".join([ - el_explorer_url(member_address, block=(args.blockNumber - 1)) + await el_explorer_url(member_address, block=(args.blockNumber - 1)) for member_address in args.memberAddresses ]) elif event_name == "bootstrap_odao_network_upgrade": @@ -226,9 +232,9 @@ def share_repr(percentage: float) -> str: embeds = [] for contract_name in args.contractNames: # (recipient, amount, period_length, start, periods_total, periods_paid) - get_contract = rp.get_function("rocketClaimDAO.getContract", contract_name) - contract_pre = get_contract.call(block_identifier=(args.blockNumber - 1)) - contract_post = get_contract.call(block_identifier=args.blockNumber) + get_contract = await rp.get_function("rocketClaimDAO.getContract", contract_name) + contract_pre = await get_contract.call(block_identifier=(args.blockNumber - 1)) + contract_post = await get_contract.call(block_identifier=args.blockNumber) args.contract_name = contract_name args.periodLength = contract_post[2] @@ -245,13 +251,13 @@ def share_repr(percentage: float) -> str: else: args.contract_validity = f"The contract is valid for {periods_left} more periods." - embed = assemble(await prepare_args(args)) + embed = await assemble(await prepare_args(args)) embeds.append(embed) return embeds args = await prepare_args(args) - return [assemble(args)] + return [await assemble(args)] async def process_transaction(self, block, tnx, contract_address, fn_input) -> list[Event]: if contract_address not in self.addresses: @@ -259,7 +265,7 @@ async def process_transaction(self, block, tnx, contract_address, fn_input) -> l contract_name = rp.get_name_by_address(contract_address) # get receipt and check if the transaction reverted using status attribute - receipt = await w3_async.eth.get_transaction_receipt(tnx.hash) + receipt = await w3.eth.get_transaction_receipt(tnx.hash) if contract_name == "rocketNodeDeposit" and receipt.status: log.info(f"Skipping successful node deposit {tnx.hash.hex()}") return [] @@ -269,7 +275,7 @@ async def process_transaction(self, block, tnx, contract_address, fn_input) -> l return [] try: - contract = rp.get_contract_by_address(contract_address) + contract = await rp.get_contract_by_address(contract_address) decoded = contract.decode_function_input(fn_input) except ValueError: log.error(f"Skipping transaction {tnx.hash.hex()} as it has invalid input") @@ -285,14 +291,14 @@ async def process_transaction(self, block, tnx, contract_address, fn_input) -> l event.args["timestamp"] = block.timestamp event.args["function_name"] = function if not receipt.status: - event.args["reason"] = rp.get_revert_reason(tnx) + event.args["reason"] = await rp.get_revert_reason(tnx) # if revert reason includes the phrase "insufficient for pre deposit" filter out if "insufficient for pre deposit" in event.args["reason"]: log.info(f"Skipping Insufficient Pre Deposit {tnx.hash.hex()}") return [] if event_name == "dao_proposal_execute": - dao_name = rp.call("rocketDAOProposal.getDAO", event.args["proposalID"]) + dao_name = await rp.call("rocketDAOProposal.getDAO", event.args["proposalID"]) # change prefix for DAO-specific event event_name = event_name.replace("dao", { "rocketDAONodeTrustedProposals": "odao", @@ -307,10 +313,10 @@ async def process_transaction(self, block, tnx, contract_address, fn_input) -> l proposal_id = event.args["proposalID"] if "pdao" in event_name: dao = ProtocolDAO() - payload = rp.call("rocketDAOProtocolProposal.getPayload", proposal_id) + payload = await rp.call("rocketDAOProtocolProposal.getPayload", proposal_id) else: - dao = DefaultDAO(rp.call("rocketDAOProposal.getDAO", proposal_id)) - payload = rp.call("rocketDAOProposal.getPayload", proposal_id) + dao = DefaultDAO(await rp.call("rocketDAOProposal.getDAO", proposal_id)) + payload = await rp.call("rocketDAOProposal.getPayload", proposal_id) event.args["executor"] = event["from"] proposal = await dao.fetch_proposal(proposal_id) @@ -336,7 +342,7 @@ async def process_transaction(self, block, tnx, contract_address, fn_input) -> l if "upgrade_triggered" in event_name: log.info(f"Detected contract upgrade at block {response.block_number}, reinitializing") - rp.flush() + await rp.flush() self.__init__(self.bot) return new_responses + responses diff --git a/rocketwatch/plugins/tvl/tvl.py b/rocketwatch/plugins/tvl/tvl.py index c00a5e6d..0958f6c7 100644 --- a/rocketwatch/plugins/tvl/tvl.py +++ b/rocketwatch/plugins/tvl/tvl.py @@ -114,9 +114,9 @@ async def tvl(self, interaction: Interaction, show_all: bool = False): } # note: _value in each dict will store the final string that gets rendered in the render - eth_price = rp.get_eth_usdc_price() - rpl_price = solidity.to_float(rp.call("rocketNetworkPrices.getRPLPrice")) - rpl_address = rp.get_address_by_name("rocketTokenRPL") + eth_price = await rp.get_eth_usdc_price() + rpl_price = solidity.to_float(await rp.call("rocketNetworkPrices.getRPLPrice")) + rpl_address = await rp.get_address_by_name("rocketTokenRPL") # Queued Minipools: initialisedCount of minipool_count_per_status * 1 ETH. # Minipools that are flagged as initialised have the following applied to them: @@ -259,15 +259,15 @@ async def tvl(self, interaction: Interaction, show_all: bool = False): # - ETH from withdrawn Minipools, which gets stored in the rETH contract, surpasses the configured targetCollateralRate, # which is 10% at the time of writing. Once this occurs the ETH gets moved from the rETH contract to the Deposit Pool. data["Total ETH Locked"]["rETH Collateral"]["Deposit Pool"]["_val"] = solidity.to_float( - rp.call("rocketDepositPool.getBalance")) + await rp.call("rocketDepositPool.getBalance")) # Extra Collateral: This is ETH stored in the rETH contract from Minipools that have been withdrawn from. # This value has a cap - read the above comment for more information about that. data["Total ETH Locked"]["rETH Collateral"]["Extra Collateral"]["_val"] = solidity.to_float( - w3.eth.get_balance(rp.get_address_by_name("rocketTokenRETH"))) + await w3.eth.get_balance(await rp.get_address_by_name("rocketTokenRETH"))) # Smoothing Pool Balance: This is ETH from Proposals by minipools that have joined the Smoothing Pool. - smoothie_balance = solidity.to_float(w3.eth.get_balance(rp.get_address_by_name("rocketSmoothingPool"))) + smoothie_balance = solidity.to_float(await w3.eth.get_balance(await rp.get_address_by_name("rocketSmoothingPool"))) tmp = await (await self.bot.db.node_operators.aggregate([ { '$match': { @@ -330,35 +330,35 @@ async def tvl(self, interaction: Interaction, show_all: bool = False): # Unclaimed Smoothing Pool Rewards: This is ETH from the previous Reward Periods that have not been claimed yet. data["Total ETH Locked"]["Unclaimed Rewards"]["Smoothing Pool"]["_val"] = solidity.to_float( - rp.call("rocketVault.balanceOf", "rocketMerkleDistributorMainnet")) + await rp.call("rocketVault.balanceOf", "rocketMerkleDistributorMainnet")) # Staked RPL: This is all ETH that has been staked by Node Operators. data["Total RPL Locked"]["Staked RPL"]["Node Operators"]["_val"] = solidity.to_float( - rp.call("rocketNodeStaking.getTotalStakedRPL")) + await rp.call("rocketNodeStaking.getTotalStakedRPL")) # oDAO bonded RPL: RPL oDAO Members have to lock up to join it. This RPL can be slashed if they misbehave. data["Total RPL Locked"]["Staked RPL"]["oDAO Bond"]["_val"] = solidity.to_float( - rp.call("rocketVault.balanceOfToken", "rocketDAONodeTrustedActions", rpl_address)) + await rp.call("rocketVault.balanceOfToken", "rocketDAONodeTrustedActions", rpl_address)) # Unclaimed RPL Rewards: RPL rewards that have been earned by Node Operators but have not been claimed yet. data["Total RPL Locked"]["Unclaimed Rewards"]["Node Operators & oDAO"]["_val"] = solidity.to_float( - rp.call("rocketVault.balanceOfToken", "rocketMerkleDistributorMainnet", rpl_address)) + await rp.call("rocketVault.balanceOfToken", "rocketMerkleDistributorMainnet", rpl_address)) # Undistributed pDAO Rewards: RPL rewards that have been earned by the pDAO but have not been distributed yet. data["Total RPL Locked"]["Unclaimed Rewards"]["pDAO"]["_val"] = solidity.to_float( - rp.call("rocketVault.balanceOfToken", "rocketClaimDAO", rpl_address)) + await rp.call("rocketVault.balanceOfToken", "rocketClaimDAO", rpl_address)) # Unused Inflation: RPL that has been minted but not yet been used for rewards. # This is (or was) an issue as the snapshots didn't account for the last day of inflation. # Joe is already looking into this. data["Total RPL Locked"]["Unused Inflation"]["_val"] = solidity.to_float( - rp.call("rocketVault.balanceOfToken", "rocketRewardsPool", rpl_address)) + await rp.call("rocketVault.balanceOfToken", "rocketRewardsPool", rpl_address)) # Slashed RPL: RPL that is slashed gets moved to the Auction Manager Contract. # This RPL will be sold using a Dutch Auction for ETH, which the gets moved to the rETH contract to be used as # extra rETH collateral. data["Total RPL Locked"]["Slashed RPL"]["_val"] = solidity.to_float( - rp.call("rocketVault.balanceOfToken", "rocketAuctionManager", rpl_address)) + await rp.call("rocketVault.balanceOfToken", "rocketAuctionManager", rpl_address)) # create _value string for each branch. the _value is the sum of all _val or _val values in the children tmp = await (await self.bot.db.node_operators.aggregate([ diff --git a/rocketwatch/plugins/user_distribute/user_distribute.py b/rocketwatch/plugins/user_distribute/user_distribute.py index 1a1bbd7e..1eb2b1e6 100644 --- a/rocketwatch/plugins/user_distribute/user_distribute.py +++ b/rocketwatch/plugins/user_distribute/user_distribute.py @@ -27,16 +27,16 @@ def __init__(self, eligible: list[dict], distributable: list[dict], instruction_ @ui.button(label="Instructions", style=ButtonStyle.blurple) async def instructions(self, interaction: Interaction, _) -> None: - mp_contract = rp.assemble_contract("rocketMinipoolDelegate") + mp_contract = await rp.assemble_contract("rocketMinipoolDelegate") bud_calldata = bytes.fromhex(mp_contract.encodeABI(fn_name="beginUserDistribute")[2:]) dist_calldata = bytes.fromhex(mp_contract.encodeABI(fn_name="distributeBalance", args=[False])[2:]) calls = [(mp["address"], True, dist_calldata) for mp in self.distributable] calls += [(mp["address"], True, bud_calldata) for mp in self.eligible] - multicall_contract = rp.get_contract_by_name("multicall3") - gas_used = multicall_contract.functions.aggregate3(calls).estimate_gas() - gas_price = w3.eth.gas_price + multicall_contract = await rp.get_contract_by_name("multicall3") + gas_used = await multicall_contract.functions.aggregate3(calls).estimate_gas() + gas_price = await w3.eth.gas_price cost_eth = gas_used * gas_price / 1e18 tuple_strs = [] @@ -126,12 +126,12 @@ async def _fetch_minipools(self) -> tuple[list[dict], list[dict], list[dict]]: distributable = [] current_time = int(time.time()) - ud_window_start = rp.call("rocketDAOProtocolSettingsMinipool.getUserDistributeWindowStart") - ud_window_end = ud_window_start + rp.call("rocketDAOProtocolSettingsMinipool.getUserDistributeWindowLength") + ud_window_start = await rp.call("rocketDAOProtocolSettingsMinipool.getUserDistributeWindowStart") + ud_window_end = ud_window_start + await rp.call("rocketDAOProtocolSettingsMinipool.getUserDistributeWindowLength") for mp in minipools: mp["address"] = w3.to_checksum_address(mp["address"]) - storage = w3.eth.get_storage_at(mp["address"], 0x17) + storage = await w3.eth.get_storage_at(mp["address"], 0x17) user_distribute_time: int = int.from_bytes(storage, "big") elapsed_time = current_time - user_distribute_time @@ -140,7 +140,7 @@ async def _fetch_minipools(self) -> tuple[list[dict], list[dict], list[dict]]: elif elapsed_time < ud_window_start: mp["ud_window_open"] = user_distribute_time + ud_window_start pending.append(mp) - elif not rp.call("rocketMinipoolDelegate.getUserDistributed", address=mp["address"]): # double check, DB may lag behind + elif not await rp.call("rocketMinipoolDelegate.getUserDistributed", address=mp["address"]): # double check, DB may lag behind mp["ud_window_close"] = user_distribute_time + ud_window_end distributable.append(mp) diff --git a/rocketwatch/plugins/validator_states/validator_states.py b/rocketwatch/plugins/validator_states/validator_states.py index 8ee6cb73..dcf78f4f 100644 --- a/rocketwatch/plugins/validator_states/validator_states.py +++ b/rocketwatch/plugins/validator_states/validator_states.py @@ -184,14 +184,14 @@ async def validator_states(self, interaction: Interaction): if num_exiting > 0: description += "\n**Exiting Node Operators**\n" - description += ", ".join([f"{el_explorer_url(w3.to_checksum_address(v))} ({c})" for v, c in exiting_node_operators[:num_exiting]]) + description += ", ".join([f"{await el_explorer_url(w3.to_checksum_address(v))} ({c})" for v, c in exiting_node_operators[:num_exiting]]) if remaining_no := exiting_node_operators[num_exiting:]: num_remaining_valis = sum([c for _, c in remaining_no]) description += f", and {len(remaining_no)} more ({num_remaining_valis})" description += "\n" if num_withdrawn > 0: description += "\n**Withdrawn Node Operators**\n" - description += ", ".join([f"{el_explorer_url(w3.to_checksum_address(v))} ({c})" for v, c in withdrawn_node_operators[:num_withdrawn]]) + description += ", ".join([f"{await el_explorer_url(w3.to_checksum_address(v))} ({c})" for v, c in withdrawn_node_operators[:num_withdrawn]]) if remaining_no := withdrawn_node_operators[num_withdrawn:]: num_remaining_valis = sum([c for _, c in remaining_no]) description += f", and {len(remaining_no)} more ({num_remaining_valis})" diff --git a/rocketwatch/plugins/wall/wall.py b/rocketwatch/plugins/wall/wall.py index a5b153b8..1e14515a 100644 --- a/rocketwatch/plugins/wall/wall.py +++ b/rocketwatch/plugins/wall/wall.py @@ -58,15 +58,20 @@ def __init__(self, bot: RocketWatch): Bitrue("RPL", ["USDT"]), CoinTR("RPL", ["USDT"]), } - self.dex: set[DEX] = { - BalancerV2([ - BalancerV2.WeightedPool(HexStr("0x9f9d900462492d4c21e9523ca95a7cd86142f298000200000000000000000462")) - ]), - UniswapV3([ - cast(ChecksumAddress, "0xe42318eA3b998e8355a3Da364EB9D48eC725Eb45"), - cast(ChecksumAddress, "0xcf15aD9bE9d33384B74b94D63D06B4A9Bd82f640") - ]) - } + self.dex: Optional[set[DEX]] = None + + async def _get_dex(self) -> set[DEX]: + if self.dex is None: + self.dex = { + BalancerV2([ + await BalancerV2.WeightedPool.create(HexStr("0x9f9d900462492d4c21e9523ca95a7cd86142f298000200000000000000000462")) + ]), + await UniswapV3.create([ + cast(ChecksumAddress, "0xe42318eA3b998e8355a3Da364EB9D48eC725Eb45"), + cast(ChecksumAddress, "0xcf15aD9bE9d33384B74b94D63D06B4A9Bd82f640") + ]) + } + return self.dex @staticmethod def _get_market_depth_and_liquidity( @@ -104,7 +109,7 @@ async def _get_cex_data(self, x: np.ndarray, rpl_usd: float) -> OrderedDict[CEX, async def _get_dex_data(self, x: np.ndarray, rpl_usd: float) -> OrderedDict[DEX, np.ndarray]: depth: dict[DEX, np.ndarray] = {} liquidity: dict[DEX, float] = {} - for dex in self.dex: + for dex in await self._get_dex(): if pools := await dex.get_liquidity(): depth[dex], liquidity[dex] = self._get_market_depth_and_liquidity(pools, x, rpl_usd) @@ -252,7 +257,7 @@ async def on_fail() -> None: async with aiohttp.ClientSession() as session: # use Binance as USD price oracle rpl_usd = list((await Binance("RPL", ["USDT"]).get_liquidity(session)).values())[0].price - eth_usd = rp.get_eth_usdc_price() + eth_usd = await rp.get_eth_usdc_price() rpl_eth = rpl_usd / eth_usd except Exception as e: await self.bot.report_error(e, ctx) diff --git a/rocketwatch/rocketwatch.py b/rocketwatch/rocketwatch.py index c8352988..56cd3e54 100644 --- a/rocketwatch/rocketwatch.py +++ b/rocketwatch/rocketwatch.py @@ -22,6 +22,7 @@ from utils.cfg import cfg from utils.retry import retry_async +from utils.rocketpool import rp log = logging.getLogger("rocketwatch") log.setLevel(cfg["log_level"]) @@ -77,6 +78,7 @@ def should_load_plugin(_plugin: str) -> bool: log.info('Finished loading plugins') async def setup_hook(self) -> None: + await rp.async_init() await self._load_plugins() async def sync_commands(self) -> None: diff --git a/rocketwatch/utils/block_time.py b/rocketwatch/utils/block_time.py index 21d9aaeb..d39cf8b3 100644 --- a/rocketwatch/utils/block_time.py +++ b/rocketwatch/utils/block_time.py @@ -1,6 +1,5 @@ import math import logging -from functools import cache from utils.cfg import cfg from utils.shared_w3 import w3 @@ -8,25 +7,28 @@ log = logging.getLogger("block_time") log.setLevel(cfg["log_level"]) +_block_ts_cache: dict[int, int] = {} -@cache -def block_to_ts(block_number: int) -> int: - return w3.eth.get_block(block_number).timestamp +async def block_to_ts(block_number: int) -> int: + if block_number in _block_ts_cache: + return _block_ts_cache[block_number] + ts = (await w3.eth.get_block(block_number)).timestamp + _block_ts_cache[block_number] = ts + return ts -@cache -def ts_to_block(target_ts: int) -> int: +async def ts_to_block(target_ts: int) -> int: log.debug(f"Looking for block at timestamp {target_ts}") - if target_ts < block_to_ts(1): + if target_ts < await block_to_ts(1): # genesis block doesn't have a timestamp return 0 lo = 1 - hi = w3.eth.block_number - 1 - + hi = await w3.eth.get_block_number() - 1 + # simple binary search over block numbers while lo < hi: mid = math.ceil((lo + hi) / 2) - ts = block_to_ts(mid) + ts = await block_to_ts(mid) if ts < target_ts: lo = mid @@ -38,8 +40,8 @@ def ts_to_block(target_ts: int) -> int: # l == r, highest block number below target block = hi - if abs(block_to_ts(block + 1) - target_ts) < abs(block_to_ts(block) - target_ts): + if abs(await block_to_ts(block + 1) - target_ts) < abs(await block_to_ts(block) - target_ts): block += 1 - - log.debug(f"Closest match: block {block} @ {block_to_ts(block)}") + + log.debug(f"Closest match: block {block} @ {await block_to_ts(block)}") return block diff --git a/rocketwatch/utils/cached_ens.py b/rocketwatch/utils/cached_ens.py index 48b26aa1..233a99bc 100644 --- a/rocketwatch/utils/cached_ens.py +++ b/rocketwatch/utils/cached_ens.py @@ -1,27 +1,35 @@ import logging from typing import Optional -from cachetools.func import ttl_cache -from ens import ENS +from ens import AsyncENS from eth_typing import ChecksumAddress from utils.cfg import cfg -from utils.shared_w3 import mainnet_w3 +from utils.shared_w3 import w3_mainnet log = logging.getLogger("cached_ens") log.setLevel(cfg["log_level"]) +_name_cache: dict[ChecksumAddress, Optional[str]] = {} +_address_cache: dict[str, Optional[ChecksumAddress]] = {} + class CachedEns: def __init__(self): - self.ens = ENS.from_web3(mainnet_w3) + self.ens = AsyncENS.from_web3(w3_mainnet) - @ttl_cache(ttl=300) - def get_name(self, address: ChecksumAddress) -> Optional[str]: + async def get_name(self, address: ChecksumAddress) -> Optional[str]: + if address in _name_cache: + return _name_cache[address] log.debug(f"Retrieving ENS name for {address}") - return self.ens.name(address) + name = await self.ens.name(address) + _name_cache[address] = name + return name - @ttl_cache(ttl=300) - def resolve_name(self, name: str) -> Optional[ChecksumAddress]: + async def resolve_name(self, name: str) -> Optional[ChecksumAddress]: + if name in _address_cache: + return _address_cache[name] log.debug(f"Resolving ENS name {name}") - return self.ens.address(name) + address = await self.ens.address(name) + _address_cache[name] = address + return address diff --git a/rocketwatch/utils/dao.py b/rocketwatch/utils/dao.py index 66fd820a..198e31cc 100644 --- a/rocketwatch/utils/dao.py +++ b/rocketwatch/utils/dao.py @@ -20,8 +20,19 @@ class DAO(ABC): def __init__(self, contract_name: str, proposal_contract_name: str): self.contract_name = contract_name - self.contract = rp.get_contract_by_name(contract_name) - self.proposal_contract = rp.get_contract_by_name(proposal_contract_name) + self._proposal_contract_name = proposal_contract_name + self._contract = None + self._proposal_contract = None + + async def _get_contract(self): + if self._contract is None: + self._contract = await rp.get_contract_by_name(self.contract_name) + return self._contract + + async def _get_proposal_contract(self): + if self._proposal_contract is None: + self._proposal_contract = await rp.get_contract_by_name(self._proposal_contract_name) + return self._proposal_contract @dataclass(frozen=True, slots=True) class Proposal(ABC): @@ -31,7 +42,6 @@ class Proposal(ABC): payload: bytes created: int - @staticmethod @abstractmethod async def fetch_proposal(self, proposal_id: int) -> Proposal: pass @@ -47,7 +57,7 @@ def sanitize(message: str) -> str: message = message[:(max_length - 1)] + "…" return message - def build_proposal_body( + async def build_proposal_body( self, proposal: Proposal, *, @@ -62,7 +72,8 @@ def build_proposal_body( if include_payload: try: - decoded = self.contract.decode_function_input(proposal.payload) + contract = await self._get_contract() + decoded = contract.decode_function_input(proposal.payload) function_name = decoded[0].function_identifier args = [f" {arg} = {value}" for arg, value in decoded[1].items()] payload_str = f"{function_name}(\n" + "\n".join(args) + "\n)" @@ -107,14 +118,15 @@ class Proposal(DAO.Proposal): votes_required: int async def get_proposal_ids_by_state(self) -> dict[ProposalState, list[int]]: - num_proposals = self.proposal_contract.functions.getTotal().call() + proposal_contract = await self._get_proposal_contract() + num_proposals = await proposal_contract.functions.getTotal().call() proposal_dao_names = await rp.multicall([ - self.proposal_contract.functions.getDAO(proposal_id) for proposal_id in range(1, num_proposals + 1) + proposal_contract.functions.getDAO(proposal_id) for proposal_id in range(1, num_proposals + 1) ]) relevant_proposals = [(i+1) for (i, dao_name) in enumerate(proposal_dao_names) if (dao_name == self.contract_name)] proposal_states = await rp.multicall([ - self.proposal_contract.functions.getState(proposal_id) for proposal_id in relevant_proposals + proposal_contract.functions.getState(proposal_id) for proposal_id in relevant_proposals ]) proposals = {state: [] for state in DefaultDAO.ProposalState} @@ -124,18 +136,19 @@ async def get_proposal_ids_by_state(self) -> dict[ProposalState, list[int]]: return proposals async def fetch_proposal(self, proposal_id: int) -> Proposal: + proposal_contract = await self._get_proposal_contract() (proposer, message, payload, created, start, end, expires, votes_for_raw, votes_against_raw, votes_required_raw) = await rp.multicall([ - self.proposal_contract.functions.getProposer(proposal_id), - self.proposal_contract.functions.getMessage(proposal_id), - self.proposal_contract.functions.getPayload(proposal_id), - self.proposal_contract.functions.getCreated(proposal_id), - self.proposal_contract.functions.getStart(proposal_id), - self.proposal_contract.functions.getEnd(proposal_id), - self.proposal_contract.functions.getExpires(proposal_id), - self.proposal_contract.functions.getVotesFor(proposal_id), - self.proposal_contract.functions.getVotesAgainst(proposal_id), - self.proposal_contract.functions.getVotesRequired(proposal_id) + proposal_contract.functions.getProposer(proposal_id), + proposal_contract.functions.getMessage(proposal_id), + proposal_contract.functions.getPayload(proposal_id), + proposal_contract.functions.getCreated(proposal_id), + proposal_contract.functions.getStart(proposal_id), + proposal_contract.functions.getEnd(proposal_id), + proposal_contract.functions.getExpires(proposal_id), + proposal_contract.functions.getVotesFor(proposal_id), + proposal_contract.functions.getVotesAgainst(proposal_id), + proposal_contract.functions.getVotesRequired(proposal_id) ]) return DefaultDAO.Proposal( id=proposal_id, @@ -212,9 +225,10 @@ def votes_total(self): return self.votes_for + self.votes_against + self.votes_abstain async def get_proposal_ids_by_state(self) -> dict[ProposalState, list[int]]: - num_proposals = self.proposal_contract.functions.getTotal().call() + proposal_contract = await self._get_proposal_contract() + num_proposals = await proposal_contract.functions.getTotal().call() proposal_states = await rp.multicall([ - self.proposal_contract.functions.getState(proposal_id) for proposal_id in range(1, num_proposals + 1) + proposal_contract.functions.getState(proposal_id) for proposal_id in range(1, num_proposals + 1) ]) proposals = {state: [] for state in ProtocolDAO.ProposalState} @@ -225,23 +239,24 @@ async def get_proposal_ids_by_state(self) -> dict[ProposalState, list[int]]: return proposals async def fetch_proposal(self, proposal_id: int) -> Proposal: + proposal_contract = await self._get_proposal_contract() (proposer, message, payload, created, start, phase1_end, phase2_end, expires, vp_for_raw, vp_against_raw, vp_veto_raw, vp_abstain_raw, vp_required_raw, veto_quorum_raw) = await rp.multicall([ - self.proposal_contract.functions.getProposer(proposal_id), - self.proposal_contract.functions.getMessage(proposal_id), - self.proposal_contract.functions.getPayload(proposal_id), - self.proposal_contract.functions.getCreated(proposal_id), - self.proposal_contract.functions.getStart(proposal_id), - self.proposal_contract.functions.getPhase1End(proposal_id), - self.proposal_contract.functions.getPhase2End(proposal_id), - self.proposal_contract.functions.getExpires(proposal_id), - self.proposal_contract.functions.getVotingPowerFor(proposal_id), - self.proposal_contract.functions.getVotingPowerAgainst(proposal_id), - self.proposal_contract.functions.getVotingPowerVeto(proposal_id), - self.proposal_contract.functions.getVotingPowerAbstained(proposal_id), - self.proposal_contract.functions.getVotingPowerRequired(proposal_id), - self.proposal_contract.functions.getVetoQuorum(proposal_id) + proposal_contract.functions.getProposer(proposal_id), + proposal_contract.functions.getMessage(proposal_id), + proposal_contract.functions.getPayload(proposal_id), + proposal_contract.functions.getCreated(proposal_id), + proposal_contract.functions.getStart(proposal_id), + proposal_contract.functions.getPhase1End(proposal_id), + proposal_contract.functions.getPhase2End(proposal_id), + proposal_contract.functions.getExpires(proposal_id), + proposal_contract.functions.getVotingPowerFor(proposal_id), + proposal_contract.functions.getVotingPowerAgainst(proposal_id), + proposal_contract.functions.getVotingPowerVeto(proposal_id), + proposal_contract.functions.getVotingPowerAbstained(proposal_id), + proposal_contract.functions.getVotingPowerRequired(proposal_id), + proposal_contract.functions.getVetoQuorum(proposal_id) ]) return ProtocolDAO.Proposal( id=proposal_id, @@ -277,7 +292,7 @@ def _build_vote_graph(self, proposal: Proposal) -> str: lines = graph.get_string().split("\n")[:-1] lines.append(f"Quorum: {main_quorum_perc:.2%}{' ✔' if (main_quorum_perc >= 1) else ''}") - + if proposal.votes_veto > 0: graph = tpl.figure() graph.barh( @@ -288,11 +303,11 @@ def _build_vote_graph(self, proposal: Proposal) -> str: [f"{'Veto' : <{len('Against')}}", ""], max_width=12 ) - veto_graph_bars = graph.get_string().split("\n") + veto_graph_bars = graph.get_string().split("\n") veto_quorum_perc = proposal.votes_veto / proposal.veto_quorum - + lines.append("") lines.append(f"{veto_graph_bars[0] : <{len(veto_graph_bars[1])}}▏") lines.append(f"Quorum: {veto_quorum_perc:.2%}{' ✔' if (veto_quorum_perc >= 1) else ''}") - + return "\n".join(lines) diff --git a/rocketwatch/utils/embeds.py b/rocketwatch/utils/embeds.py index d90a1da3..e06b2532 100644 --- a/rocketwatch/utils/embeds.py +++ b/rocketwatch/utils/embeds.py @@ -51,7 +51,7 @@ async def resolve_ens(ctx, node_address): # if it looks like an ens, attempt to resolve it if "." in node_address: try: - address = ens.resolve_name(node_address) + address = await ens.resolve_name(node_address) if not address: await ctx.send("ENS name not found") return None, None @@ -69,7 +69,7 @@ async def resolve_ens(ctx, node_address): return None, None try: - display_name = ens.get_name(node_address) or address + display_name = await ens.get_name(node_address) or address return display_name, address except InvalidName: await ctx.send("Invalid address") @@ -95,7 +95,7 @@ def _get_delegates() -> dict[str, str]: return _pdao_delegates -def el_explorer_url( +async def el_explorer_url( target: str, name: str = "", prefix: str | Literal[-1] = "", @@ -106,34 +106,34 @@ def el_explorer_url( # sanitize address target = w3.to_checksum_address(target) url = f"{cfg['execution_layer.explorer']}/address/{target}" - + chain = cfg["rocketpool.chain"] dashboard_network = "" if (chain == "mainnet") else f"?network={chain}" - - if rp.is_node(target): - megapool_address = rp.call("rocketNodeManager.getMegapoolAddress", target) + + if await rp.is_node(target): + megapool_address = await rp.call("rocketNodeManager.getMegapoolAddress", target) if megapool_address != "0x0000000000000000000000000000000000000000": url = f"https://saturn-1.net/megapool/{megapool_address}{dashboard_network}" - - if rp.is_megapool(target): + + if await rp.is_megapool(target): url = f"https://saturn-1.net/megapool/{target}{dashboard_network}" - - if rp.is_minipool(target): + + if await rp.is_minipool(target): pass # TODO add explorer url once supported - + n_key = f"addresses.{target}" if not name and (n := _(n_key)) != n_key: name = n - if prefix != -1 and rp.call("rocketNodeManager.getSmoothingPoolRegistrationState", target, block=block): + if prefix != -1 and await rp.call("rocketNodeManager.getSmoothingPoolRegistrationState", target, block=block): prefix += ":cup_with_straw:" - if not name and (member_id := rp.call("rocketDAONodeTrusted.getMemberID", target, block=block)): + if not name and (member_id := await rp.call("rocketDAONodeTrusted.getMemberID", target, block=block)): if prefix != -1: prefix += "🔮" name = member_id - if not name and (member_id := rp.call("rocketDAOSecurity.getMemberID", target, block=block)): + if not name and (member_id := await rp.call("rocketDAOSecurity.getMemberID", target, block=block)): if prefix != -1: prefix += "🔒" name = member_id @@ -153,9 +153,9 @@ def el_explorer_url( name = a.name if not name: # not an odao member, try to get their ens - name = ens.get_name(target) + name = await ens.get_name(target) - if code := w3.eth.get_code(target): + if code := await w3.eth.get_code(target): if prefix != -1: prefix += "📄" if ( @@ -167,13 +167,13 @@ def el_explorer_url( if not name: with contextlib.suppress(Exception): c = w3.eth.contract(address=target, abi=[{"inputs" : [], - "name" : "name", - "outputs" : [{"internalType": "string", - "name" : "", - "type" : "string"}], - "stateMutability": "view", - "type" : "function"}]) - n = c.functions.name().call() + "name" : "name", + "outputs" : [{"internalType": "string", + "name" : "", + "type" : "string"}], + "stateMutability": "view", + "type" : "function"}]) + n = await c.functions.name().call() # make sure nobody is trying to inject a custom link, as there was a guy that made the name of his contract # 'RocketSwapRouter](https://etherscan.io/search?q=0x16d5a408e807db8ef7c578279beeee6b228f1c1c)[', # in an attempt to get people to click on his contract @@ -231,10 +231,10 @@ async def prepare_args(args): elif arg_key == "cow_uid": args[arg_key] = f"[ORDER](https://explorer.cow.fi/orders/{arg_value})" else: - args[arg_key] = el_explorer_url(arg_value, prefix=prefix) - args[f"{arg_key}_clean"] = el_explorer_url(arg_value) + args[arg_key] = await el_explorer_url(arg_value, prefix=prefix) + args[f"{arg_key}_clean"] = await el_explorer_url(arg_value) if len(arg_value) == 66: - args[f'{arg_key}_small'] = el_explorer_url(arg_value, name="[tnx]") + args[f'{arg_key}_small'] = await el_explorer_url(arg_value, name="[tnx]") if "from" in args: args["fancy_from"] = args["from"] if "caller" in args and args["from"] != args["caller"]: @@ -242,7 +242,7 @@ async def prepare_args(args): return args -def assemble(args) -> Embed: +async def assemble(args) -> Embed: e = Embed() if args.event_name in ["service_interrupted", "finality_delay_event"]: e.colour = Color.from_rgb(235, 86, 86) @@ -287,13 +287,13 @@ def assemble(args) -> Embed: case "eth_deposit_event": use_large = (amount >= 32) case "rpl_stake_event": - use_large = (amount >= ((3 * 2.4) / solidity.to_float(rp.call("rocketNetworkPrices.getRPLPrice")))) + use_large = (amount >= ((3 * 2.4) / solidity.to_float(await rp.call("rocketNetworkPrices.getRPLPrice")))) case "rpl_migration_event": use_large = (amount >= 1000) case "cs_deposit_eth_event" | "cs_withdraw_eth_event": use_large = (args["assets"] >= 100) case "cs_deposit_rpl_event" | "cs_withdraw_rpl_event": - use_large = (args["assets"] >= 16 / solidity.to_float(rp.call("rocketNetworkPrices.getRPLPrice"))) + use_large = (args["assets"] >= 16 / solidity.to_float(await rp.call("rocketNetworkPrices.getRPLPrice"))) case "rocksolid_deposit_event": use_large = args["assets"] >= 50 case "rocksolid_withdrawal_event": @@ -498,7 +498,7 @@ def assemble(args) -> Embed: times = [value for key, value in args.items() if "time" in key.lower()] if block := args.get("blockNumber"): - times += [block_to_ts(block)] + times += [await block_to_ts(block)] time = times[0] if times else int(datetime.datetime.now().timestamp()) e.add_field(name="Timestamp", diff --git a/rocketwatch/utils/etherscan.py b/rocketwatch/utils/etherscan.py index 0152030d..f47565e7 100644 --- a/rocketwatch/utils/etherscan.py +++ b/rocketwatch/utils/etherscan.py @@ -12,7 +12,7 @@ async def get_recent_account_transactions(address, block_count=44800): ETHERSCAN_URL = "https://api.etherscan.io/api" - highest_block = w3.eth.get_block("latest")["number"] + highest_block = (await w3.eth.get_block("latest"))["number"] page = 1 lowest_block = highest_block - block_count diff --git a/rocketwatch/utils/event.py b/rocketwatch/utils/event.py index 2d2b8fa1..afdfa1c0 100644 --- a/rocketwatch/utils/event.py +++ b/rocketwatch/utils/event.py @@ -33,20 +33,27 @@ def __init__(self, bot: RocketWatch, rate_limit=timedelta(seconds=5)): self.bot = bot self.rate_limit = rate_limit self.lookback_distance: int = cfg["events.lookback_distance"] - self.last_served_block = w3.eth.get_block(cfg["events.genesis"]).number - 1 - self._pending_block = self.last_served_block + self.last_served_block: Optional[int] = None + self._pending_block: Optional[int] = None self._last_run = datetime.now() - rate_limit + async def _ensure_genesis_block(self): + if self.last_served_block is None: + block = await w3.eth.get_block(cfg["events.genesis"]) + self.last_served_block = block.number - 1 + self._pending_block = self.last_served_block + def start_tracking(self, block: BlockNumber) -> None: self.last_served_block = block - 1 async def get_new_events(self) -> list[Event]: + await self._ensure_genesis_block() now = datetime.now() if (now - self._last_run) < self.rate_limit: return [] self._last_run = now - self._pending_block = w3.eth.get_block_number() + self._pending_block = await w3.eth.get_block_number() events = await self._get_new_events() self.last_served_block = self._pending_block return events diff --git a/rocketwatch/utils/liquidity.py b/rocketwatch/utils/liquidity.py index 2ab474e8..249d18f7 100644 --- a/rocketwatch/utils/liquidity.py +++ b/rocketwatch/utils/liquidity.py @@ -13,6 +13,7 @@ from utils.cfg import cfg from utils.retry import retry_async from utils.rocketpool import rp +from utils.shared_w3 import w3 log = logging.getLogger("liquidity") log.setLevel(cfg["log_level"]) @@ -583,11 +584,20 @@ def _get_asks(self, api_response: dict) -> dict[float, float]: class ERC20Token: - def __init__(self, address: ChecksumAddress): + def __init__(self, address: ChecksumAddress, symbol: str, decimals: int): self.address = address - contract = rp.assemble_contract("ERC20", address, mainnet=True) - self.symbol: str = contract.functions.symbol().call() - self.decimals: int = contract.functions.decimals().call() + self.symbol = symbol + self.decimals = decimals + + @classmethod + async def create(cls, address: ChecksumAddress) -> 'ERC20Token': + address = w3.to_checksum_address(address) + contract = await rp.assemble_contract("ERC20", address, mainnet=True) + symbol, decimals = await rp.multicall([ + contract.functions.symbol(), + contract.functions.decimals() + ]) + return cls(address, symbol, decimals) def __str__(self) -> str: return self.symbol @@ -599,11 +609,11 @@ def __repr__(self) -> str: class DEX(Exchange, ABC): class LiquidityPool(ABC): @abstractmethod - def get_price(self) -> float: + async def get_price(self) -> float: pass @abstractmethod - def get_normalized_price(self) -> float: + async def get_normalized_price(self) -> float: pass @abstractmethod @@ -623,22 +633,29 @@ async def get_liquidity(self) -> dict[LiquidityPool, Liquidity]: class BalancerV2(DEX): class WeightedPool(DEX.LiquidityPool): - def __init__(self, pool_id: HexStr): + def __init__(self, pool_id: HexStr, vault, token_0: ERC20Token, token_1: ERC20Token): self.id = pool_id - self.vault = rp.get_contract_by_name("BalancerVault", mainnet=True) - tokens = self.vault.functions.getPoolTokens(self.id).call()[0] - self.token_0 = ERC20Token(tokens[0]) - self.token_1 = ERC20Token(tokens[1]) - - def get_price(self) -> float: - balances = self.vault.functions.getPoolTokens(self.id).call()[1] + self.vault = vault + self.token_0 = token_0 + self.token_1 = token_1 + + @classmethod + async def create(cls, pool_id: HexStr) -> 'BalancerV2.WeightedPool': + vault = await rp.get_contract_by_name("BalancerVault", mainnet=True) + tokens = (await vault.functions.getPoolTokens(pool_id).call())[0] + token_0 = await ERC20Token.create(tokens[0]) + token_1 = await ERC20Token.create(tokens[1]) + return cls(pool_id, vault, token_0, token_1) + + async def get_price(self) -> float: + balances = (await self.vault.functions.getPoolTokens(self.id).call())[1] return balances[1] / balances[0] if (balances[0] > 0) else 0 - def get_normalized_price(self) -> float: - return self.get_price() * 10 ** (self.token_0.decimals - self.token_1.decimals) + async def get_normalized_price(self) -> float: + return await self.get_price() * 10 ** (self.token_0.decimals - self.token_1.decimals) async def get_liquidity(self) -> Optional[Liquidity]: - balance_0, balance_1 = self.vault.functions.getPoolTokens(self.id).call()[1] + balance_0, balance_1 = (await self.vault.functions.getPoolTokens(self.id).call())[1] if (balance_0 == 0) or (balance_1 == 0): log.warning("Empty token balances") return None @@ -680,11 +697,24 @@ def price_to_tick(price: float) -> float: return math.log(price, 1.0001) class Pool(DEX.LiquidityPool): - def __init__(self, pool_address: ChecksumAddress): - self.contract = rp.assemble_contract("UniswapV3Pool", pool_address, mainnet=True) - self.tick_spacing: int = self.contract.functions.tickSpacing().call() - self.token_0 = ERC20Token(self.contract.functions.token0().call()) - self.token_1 = ERC20Token(self.contract.functions.token1().call()) + def __init__(self, pool_address: ChecksumAddress, contract, tick_spacing: int, token_0: ERC20Token, token_1: ERC20Token): + self.pool_address = pool_address + self.contract = contract + self.tick_spacing = tick_spacing + self.token_0 = token_0 + self.token_1 = token_1 + + @classmethod + async def create(cls, pool_address: ChecksumAddress) -> 'UniswapV3.Pool': + contract = await rp.assemble_contract("UniswapV3Pool", pool_address, mainnet=True) + tick_spacing, token_0_addr, token_1_addr = await rp.multicall([ + contract.functions.tickSpacing(), + contract.functions.token0(), + contract.functions.token1() + ]) + token_0 = await ERC20Token.create(token_0_addr) + token_1 = await ERC20Token.create(token_1_addr) + return cls(pool_address, contract, tick_spacing, token_0, token_1) def tick_to_word_and_bit(self, tick: int) -> tuple[int, int]: compressed = int(tick // self.tick_spacing) @@ -731,16 +761,16 @@ def liquidity_to_tokens(self, liquidity: int, tick_lower: int, tick_upper: int) return balance_0, balance_1 - def get_price(self) -> float: - sqrt96x = self.contract.functions.slot0().call()[0] + async def get_price(self) -> float: + sqrt96x = (await self.contract.functions.slot0().call())[0] return (sqrt96x ** 2) / (2 ** 192) - def get_normalized_price(self) -> float: - return self.get_price() * 10 ** (self.token_0.decimals - self.token_1.decimals) + async def get_normalized_price(self) -> float: + return await self.get_price() * 10 ** (self.token_0.decimals - self.token_1.decimals) async def get_liquidity(self) -> Optional[Liquidity]: - price = self.get_price() - initial_liquidity = self.contract.functions.liquidity().call() + price = await self.get_price() + initial_liquidity = await self.contract.functions.liquidity().call() calculated_tick = UniswapV3.price_to_tick(price) current_tick = int(calculated_tick) @@ -810,8 +840,13 @@ def depth_at(_price: float) -> float: return Liquidity(balance_norm / price, depth_at) - def __init__(self, pools: list[ChecksumAddress]): - super().__init__([UniswapV3.Pool(pool) for pool in pools]) + def __init__(self, pools: list[Pool]): + super().__init__(pools) + + @classmethod + async def create(cls, pool_addresses: list[ChecksumAddress]) -> 'UniswapV3': + pools = [await UniswapV3.Pool.create(addr) for addr in pool_addresses] + return cls(pools) def __str__(self) -> str: return "Uniswap" diff --git a/rocketwatch/utils/rocketpool.py b/rocketwatch/utils/rocketpool.py index c917466c..d190c19e 100644 --- a/rocketwatch/utils/rocketpool.py +++ b/rocketwatch/utils/rocketpool.py @@ -6,14 +6,13 @@ from bidict import bidict from eth_typing import BlockIdentifier, ChecksumAddress -from cachetools import cached, FIFOCache -from cachetools.func import ttl_cache +from cachetools import FIFOCache from web3.exceptions import ContractLogicError from utils import solidity from utils.cfg import cfg from utils.readable import decode_abi -from utils.shared_w3 import w3, w3_async, mainnet_w3, historical_w3 +from utils.shared_w3 import w3, w3_mainnet, w3_archive log = logging.getLogger("rocketpool") log.setLevel(cfg["log_level"]) @@ -30,36 +29,32 @@ class RocketPool: def __init__(self): self.addresses = bidict() - self.flush() + self._multicall = None - def flush(self): + async def async_init(self): + await self._init_contract_addresses() + + async def flush(self): log.warning("FLUSHING RP CACHE") self.CONTRACT_CACHE.clear() self.ABI_CACHE.clear() self.ADDRESS_CACHE.clear() self.addresses.clear() - self._init_contract_addresses() - + await self._init_contract_addresses() - def _init_contract_addresses(self) -> None: + async def _init_contract_addresses(self) -> None: manual_addresses = cfg["rocketpool.manual_addresses"] for name, address in manual_addresses.items(): self.addresses[name] = address - self._multicall = self.get_contract_by_name("multicall3") - self._multicall_async = w3_async.eth.contract( - address=self._multicall.address, - abi=self._multicall.abi - ) + self._multicall = await self.get_contract_by_name("multicall3") log.info("Indexing Rocket Pool contracts...") - # generate list of all file names with the .sol extension from the rocketpool submodule for path in Path("contracts/rocketpool/contracts/contract").rglob('*.sol'): - # append to list but ensure that the first character is lowercase file_name = path.stem contract = file_name[0].lower() + file_name[1:] try: - self.get_address_by_name(contract) + await self.get_address_by_name(contract) except Exception: log.warning(f"Skipping {contract} in function list generation") continue @@ -67,12 +62,12 @@ def _init_contract_addresses(self) -> None: try: cs_dir, cs_prefix = "ConstellationDirectory", "Constellation" self.addresses |= { - f"{cs_prefix}.SuperNodeAccount": self.call(f"{cs_dir}.getSuperNodeAddress"), - f"{cs_prefix}.OperatorDistributor": self.call(f"{cs_dir}.getOperatorDistributorAddress"), - f"{cs_prefix}.Whitelist": self.call(f"{cs_dir}.getWhitelistAddress"), - f"{cs_prefix}.ETHVault": self.call(f"{cs_dir}.getWETHVaultAddress"), - f"{cs_prefix}.RPLVault": self.call(f"{cs_dir}.getRPLVaultAddress"), - "WETH": self.call(f"{cs_dir}.getWETHAddress") + f"{cs_prefix}.SuperNodeAccount": await self.call(f"{cs_dir}.getSuperNodeAddress"), + f"{cs_prefix}.OperatorDistributor": await self.call(f"{cs_dir}.getOperatorDistributorAddress"), + f"{cs_prefix}.Whitelist": await self.call(f"{cs_dir}.getWhitelistAddress"), + f"{cs_prefix}.ETHVault": await self.call(f"{cs_dir}.getWETHVaultAddress"), + f"{cs_prefix}.RPLVault": await self.call(f"{cs_dir}.getRPLVaultAddress"), + "WETH": await self.call(f"{cs_dir}.getWETHAddress") } except NoAddressFound: log.warning("Failed to find address for Constellation contracts") @@ -115,23 +110,27 @@ async def multicall(self, calls, require_success=True) -> list: """Multicall accepting ContractFunction objects or (fn, require_success) tuples.""" fns, flags = self._normalize_calls(calls, require_success) encoded = [(fn.address, af, fn._encode_transaction_data()) for fn, af in zip(fns, flags)] - results = await self._multicall_async.functions.aggregate3(encoded).call() + results = await self._multicall.functions.aggregate3(encoded).call() return [ RocketPool._decode_fn_output(fns[i], data) if success else None for i, (success, data) in enumerate(results) ] - @cached(cache=ADDRESS_CACHE) - def get_address_by_name(self, name): - # manual overwrite at init + async def get_address_by_name(self, name): + if name in self.ADDRESS_CACHE: + return self.ADDRESS_CACHE[name] if name in self.addresses: + self.ADDRESS_CACHE[name] = self.addresses[name] return self.addresses[name] - return self.uncached_get_address_by_name(name) + address = await self.uncached_get_address_by_name(name) + self.ADDRESS_CACHE[name] = address + return address - def uncached_get_address_by_name(self, name, block="latest"): + async def uncached_get_address_by_name(self, name, block="latest"): log.debug(f"Retrieving address for {name} Contract") sha3 = w3.solidity_keccak(["string", "string"], ["contract.address", name]) - address = self.get_contract_by_name("rocketStorage", historical=block != "latest").functions.getAddress(sha3).call(block_identifier=block) + storage = await self.get_contract_by_name("rocketStorage", historical=block != "latest") + address = await storage.functions.getAddress(sha3).call(block_identifier=block) if not w3.to_int(hexstr=address): raise NoAddressFound(f"No address found for {name} Contract") self.addresses[name] = address @@ -139,9 +138,9 @@ def uncached_get_address_by_name(self, name, block="latest"): return address @staticmethod - def get_revert_reason(tnx): + async def get_revert_reason(tnx): try: - w3.eth.call( + await w3.eth.call( { "from" : tnx["from"], "to" : tnx["to"], @@ -165,32 +164,41 @@ def get_revert_reason(tnx): else: return None - def get_string(self, key: str) -> str: + async def get_string(self, key: str) -> str: sha3 = w3.solidity_keccak(["string"], [key]) - return self.get_contract_by_name("rocketStorage").functions.getString(sha3).call() + storage = await self.get_contract_by_name("rocketStorage") + return await storage.functions.getString(sha3).call() - def get_uint(self, key: str) -> int: + async def get_uint(self, key: str) -> int: sha3 = w3.solidity_keccak(["string"], [key]) - return self.get_contract_by_name("rocketStorage").functions.getUint(sha3).call() + storage = await self.get_contract_by_name("rocketStorage") + return await storage.functions.getUint(sha3).call() - def get_protocol_version(self) -> tuple: - version_string = self.get_string("protocol.version") + async def get_protocol_version(self) -> tuple: + version_string = await self.get_string("protocol.version") return tuple(map(int, version_string.split("."))) - @cached(cache=ABI_CACHE) - def get_abi_by_name(self, name): - return self.uncached_get_abi_by_name(name) + async def get_abi_by_name(self, name): + if name in self.ABI_CACHE: + return self.ABI_CACHE[name] + abi = await self.uncached_get_abi_by_name(name) + self.ABI_CACHE[name] = abi + return abi - def uncached_get_abi_by_name(self, name): + async def uncached_get_abi_by_name(self, name): log.debug(f"Retrieving abi for {name} Contract") sha3 = w3.solidity_keccak(["string", "string"], ["contract.abi", name]) - compressed_string = self.get_contract_by_name("rocketStorage").functions.getString(sha3).call() + storage = await self.get_contract_by_name("rocketStorage") + compressed_string = await storage.functions.getString(sha3).call() if not compressed_string: raise Exception(f"No abi found for {name} Contract") return decode_abi(compressed_string) - @cached(cache=CONTRACT_CACHE) - def assemble_contract(self, name, address=None, historical=False, mainnet=False): + async def assemble_contract(self, name, address=None, historical=False, mainnet=False): + cache_key = (name, address, historical, mainnet) + if cache_key in self.CONTRACT_CACHE: + return self.CONTRACT_CACHE[cache_key] + if name.startswith("Constellation."): short_name = name.removeprefix("Constellation.") abi_path = f"./contracts/constellation/{short_name}.abi.json" @@ -201,81 +209,87 @@ def assemble_contract(self, name, address=None, historical=False, mainnet=False) with open(abi_path, "r") as f: abi = f.read() else: - abi = self.get_abi_by_name(name) + abi = await self.get_abi_by_name(name) if mainnet: - return mainnet_w3.eth.contract(address=address, abi=abi) - if historical: - return historical_w3.eth.contract(address=address, abi=abi) - return w3.eth.contract(address=address, abi=abi) + contract = w3_mainnet.eth.contract(address=address, abi=abi) + elif historical: + contract = w3_archive.eth.contract(address=address, abi=abi) + else: + contract = w3.eth.contract(address=address, abi=abi) + + self.CONTRACT_CACHE[cache_key] = contract + return contract def get_name_by_address(self, address): return self.addresses.inverse.get(address, None) - def get_contract_by_name(self, name, historical=False, mainnet=False): - address = self.get_address_by_name(name) - return self.assemble_contract(name, address, historical=historical, mainnet=mainnet) + async def get_contract_by_name(self, name, historical=False, mainnet=False): + address = await self.get_address_by_name(name) + return await self.assemble_contract(name, address, historical=historical, mainnet=mainnet) - def get_contract_by_address(self, address): + async def get_contract_by_address(self, address): """ **WARNING**: only call after contract has been previously retrieved using its name """ name = self.get_name_by_address(address) - return self.assemble_contract(name, address) + return await self.assemble_contract(name, address) - def estimate_gas_for_call(self, path, *args, block="latest"): + async def estimate_gas_for_call(self, path, *args, block="latest"): log.debug(f"Estimating gas for {path} (block={block})") name, function = path.rsplit(".", 1) - contract = self.get_contract_by_name(name) - return contract.functions[function](*args).estimateGas({"gas": 2 ** 32}, - block_identifier=block) + contract = await self.get_contract_by_name(name) + return await contract.functions[function](*args).estimate_gas({"gas": 2 ** 32}, + block_identifier=block) - def get_function(self, path, *args, historical=False, address=None, mainnet=False): + async def get_function(self, path, *args, historical=False, address=None, mainnet=False): name, function = path.rsplit(".", 1) if not address: - address = self.get_address_by_name(name) - contract = self.assemble_contract(name, address, historical, mainnet) + address = await self.get_address_by_name(name) + contract = await self.assemble_contract(name, address, historical, mainnet) args = tuple(w3.to_checksum_address(a) if isinstance(a, str) and w3.is_address(a) else a for a in args) return contract.functions[function](*args) - def call(self, path, *args, block: BlockIdentifier = "latest", address=None, mainnet=False): + async def call(self, path, *args, block: BlockIdentifier = "latest", address=None, mainnet=False): log.debug(f"Calling {path} (block={block})") - return self.get_function(path, *args, historical=block != "latest", address=address, mainnet=mainnet).call(block_identifier=block) + fn = await self.get_function(path, *args, historical=block != "latest", address=address, mainnet=mainnet) + return await fn.call(block_identifier=block) - def get_annual_rpl_inflation(self): - inflation_per_interval = solidity.to_float(self.call("rocketTokenRPL.getInflationIntervalRate")) + async def get_annual_rpl_inflation(self): + inflation_per_interval = solidity.to_float(await self.call("rocketTokenRPL.getInflationIntervalRate")) if not inflation_per_interval: return 0 - seconds_per_interval = self.call("rocketTokenRPL.getInflationIntervalTime") + seconds_per_interval = await self.call("rocketTokenRPL.getInflationIntervalTime") intervals_per_year = solidity.years / seconds_per_interval return (inflation_per_interval ** intervals_per_year) - 1 - def get_percentage_rpl_swapped(self): - value = solidity.to_float(self.call("rocketTokenRPL.totalSwappedRPL")) + async def get_percentage_rpl_swapped(self): + value = solidity.to_float(await self.call("rocketTokenRPL.totalSwappedRPL")) percentage = (value / 18_000_000) * 100 return round(percentage, 2) - def is_node(self, address: ChecksumAddress) -> bool: - return self.call("rocketNodeManager.getNodeExists", address) + async def is_node(self, address: ChecksumAddress) -> bool: + return await self.call("rocketNodeManager.getNodeExists", address) - def is_minipool(self, address: ChecksumAddress) -> bool: - return self.call("rocketMinipoolManager.getMinipoolExists", address) + async def is_minipool(self, address: ChecksumAddress) -> bool: + return await self.call("rocketMinipoolManager.getMinipoolExists", address) - def is_megapool(self, address: ChecksumAddress) -> bool: + async def is_megapool(self, address: ChecksumAddress) -> bool: sha3 = w3.solidity_keccak(["string", "address"], ["megapool.exists", address]) - return self.get_contract_by_name("rocketStorage").functions.getBool(sha3).call() + storage = await self.get_contract_by_name("rocketStorage") + return await storage.functions.getBool(sha3).call() - @ttl_cache(ttl=60) - def get_eth_usdc_price(self) -> float: + async def get_eth_usdc_price(self) -> float: from utils.liquidity import UniswapV3 - pool_address = self.get_address_by_name("UniV3_USDC_ETH") - return 1 / UniswapV3.Pool(pool_address).get_normalized_price() + pool_address = await self.get_address_by_name("UniV3_USDC_ETH") + pool = await UniswapV3.Pool.create(pool_address) + return 1 / await pool.get_normalized_price() - @ttl_cache(ttl=60) - def get_reth_eth_price(self) -> float: + async def get_reth_eth_price(self) -> float: from utils.liquidity import UniswapV3 - pool_address = self.get_address_by_name("UniV3_rETH_ETH") - return UniswapV3.Pool(pool_address).get_normalized_price() + pool_address = await self.get_address_by_name("UniV3_rETH_ETH") + pool = await UniswapV3.Pool.create(pool_address) + return await pool.get_normalized_price() rp = RocketPool() diff --git a/rocketwatch/utils/sea_creatures.py b/rocketwatch/utils/sea_creatures.py index 10e51a55..d96ca75a 100644 --- a/rocketwatch/utils/sea_creatures.py +++ b/rocketwatch/utils/sea_creatures.py @@ -2,7 +2,7 @@ from utils import solidity from utils.cfg import cfg from utils.rocketpool import rp -from utils.shared_w3 import w3_async +from utils.shared_w3 import w3 price_cache = { "block" : 0, @@ -48,27 +48,30 @@ def get_sea_creature_for_holdings(holdings): async def get_holding_for_address(address): - if price_cache["block"] != (b := await w3_async.eth.get_block_number()): - price_cache["rpl_price"] = solidity.to_float(rp.call("rocketNetworkPrices.getRPLPrice")) - price_cache["reth_price"] = solidity.to_float(rp.call("rocketTokenRETH.getExchangeRate")) + if price_cache["block"] != (b := await w3.eth.get_block_number()): + price_cache["rpl_price"] = solidity.to_float(await rp.call("rocketNetworkPrices.getRPLPrice")) + price_cache["reth_price"] = solidity.to_float(await rp.call("rocketTokenRETH.getExchangeRate")) price_cache["block"] = b # get their eth balance - eth_balance = solidity.to_float(await w3_async.eth.get_balance(address)) + eth_balance = solidity.to_float(await w3.eth.get_balance(address)) # get ERC-20 token balance for this address with contextlib.suppress(Exception): + rpl_contract = await rp.get_contract_by_name("rocketTokenRPL") + rplfs_contract = await rp.get_contract_by_name("rocketTokenRPLFixedSupply") + reth_contract = await rp.get_contract_by_name("rocketTokenRETH") rpl_balance, rplfs_balance, reth_balance = await rp.multicall([ - rp.get_contract_by_name("rocketTokenRPL").functions.balanceOf(address), - rp.get_contract_by_name("rocketTokenRPLFixedSupply").functions.balanceOf(address), - rp.get_contract_by_name("rocketTokenRETH").functions.balanceOf(address), + rpl_contract.functions.balanceOf(address), + rplfs_contract.functions.balanceOf(address), + reth_contract.functions.balanceOf(address), ]) eth_balance += solidity.to_float(rpl_balance) * price_cache["rpl_price"] eth_balance += solidity.to_float(rplfs_balance) * price_cache["rpl_price"] eth_balance += solidity.to_float(reth_balance) * price_cache["reth_price"] # add eth they provided for minipools - eth_balance += solidity.to_float(rp.call("rocketNodeStaking.getNodeETHBonded", address)) + eth_balance += solidity.to_float(await rp.call("rocketNodeStaking.getNodeETHBonded", address)) # add their staked RPL - staked_rpl = solidity.to_float(rp.call("rocketNodeStaking.getNodeStakedRPL", address)) + staked_rpl = solidity.to_float(await rp.call("rocketNodeStaking.getNodeStakedRPL", address)) eth_balance += staked_rpl * price_cache["rpl_price"] return eth_balance diff --git a/rocketwatch/utils/shared_w3.py b/rocketwatch/utils/shared_w3.py index 4aced91a..1a504c7f 100644 --- a/rocketwatch/utils/shared_w3.py +++ b/rocketwatch/utils/shared_w3.py @@ -2,7 +2,7 @@ from typing import Dict, Any from web3.beacon import AsyncBeacon -from web3 import Web3, AsyncWeb3, HTTPProvider +from web3 import AsyncWeb3 from web3.providers import AsyncHTTPProvider from utils.cfg import cfg @@ -10,16 +10,15 @@ log = logging.getLogger("shared_w3") log.setLevel(cfg["log_level"]) -w3 = Web3(HTTPProvider(cfg['execution_layer.endpoint.current'], request_kwargs={'timeout': 60})) -w3_async = AsyncWeb3(AsyncHTTPProvider(cfg['execution_layer.endpoint.current'], request_kwargs={'timeout': 60})) -mainnet_w3 = w3 +w3 = AsyncWeb3(AsyncHTTPProvider(cfg['execution_layer.endpoint.current'], request_kwargs={'timeout': 60})) +w3_mainnet = w3 if cfg['rocketpool.chain'] != "mainnet": - mainnet_w3 = Web3(HTTPProvider(cfg['execution_layer.endpoint.mainnet'])) + w3_mainnet = AsyncWeb3(AsyncHTTPProvider(cfg['execution_layer.endpoint.mainnet'])) -historical_w3 = None +w3_archive = None if "archive" in cfg['execution_layer.endpoint'].keys(): - historical_w3 = Web3(HTTPProvider(cfg['execution_layer.endpoint.archive'])) + w3_archive = AsyncWeb3(AsyncHTTPProvider(cfg['execution_layer.endpoint.archive'])) class Bacon(AsyncBeacon): From 18a764e574a6a276d1ac10bafc5501b6ed3b4314 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 6 Mar 2026 22:07:54 +0000 Subject: [PATCH 164/279] fully migrate to app_commands --- rocketwatch/plugins/8ball/8ball.py | 16 ++-- rocketwatch/plugins/about/about.py | 12 +-- rocketwatch/plugins/apr/apr.py | 24 +++--- .../plugins/chat_summary/chat_summary.py | 29 +++---- rocketwatch/plugins/collateral/collateral.py | 30 +++---- .../plugins/commissions/commissions.py | 12 +-- rocketwatch/plugins/governance/governance.py | 11 +-- rocketwatch/plugins/lottery/lottery.py | 11 +-- rocketwatch/plugins/metrics/metrics.py | 82 +++---------------- .../minipool_distribution.py | 28 +++---- .../pinned_messages/pinned_messages.py | 31 +++---- rocketwatch/plugins/random/random.py | 60 +++++++------- rocketwatch/plugins/releases/releases.py | 13 +-- rocketwatch/plugins/rewards/rewards.py | 40 ++++----- rocketwatch/plugins/wall/wall.py | 19 ++--- rocketwatch/rocketwatch.py | 47 +++++------ rocketwatch/utils/command_tree.py | 75 +++++++++++++++++ rocketwatch/utils/embeds.py | 10 +-- rocketwatch/utils/visibility.py | 5 +- 19 files changed, 284 insertions(+), 271 deletions(-) create mode 100644 rocketwatch/utils/command_tree.py diff --git a/rocketwatch/plugins/8ball/8ball.py b/rocketwatch/plugins/8ball/8ball.py index 2973d275..847e267c 100644 --- a/rocketwatch/plugins/8ball/8ball.py +++ b/rocketwatch/plugins/8ball/8ball.py @@ -3,8 +3,8 @@ import random as pyrandom from discord.ext import commands -from discord.ext.commands import Context -from discord.ext.commands import hybrid_command +from discord import Interaction +from discord.app_commands import command from rocketwatch import RocketWatch from utils.embeds import Embed @@ -15,14 +15,14 @@ class EightBall(commands.Cog): def __init__(self, bot: RocketWatch): self.bot = bot - @hybrid_command(name="8ball") - async def eight_ball(self, ctx: Context, question: str): + @command(name="8ball") + async def eight_ball(self, interaction: Interaction, question: str): e = Embed(title="🎱 Magic 8 Ball") if not question.endswith("?"): e.description = "You must ask a yes or no question to the magic 8 ball (hint: add a `?` at the end of your question)" - await ctx.send(embed=e, ephemeral=True) + await interaction.response.send_message(embed=e, ephemeral=True) return - await ctx.defer(ephemeral=is_hidden_weak(ctx)) + await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) await asyncio.sleep(random.randint(2,5)) res = pyrandom.choice([ "As I see it, yes", @@ -46,8 +46,8 @@ async def eight_ball(self, ctx: Context, question: str): "No", "Absolutely not" ]) - e.description = f"> \"{question}\"\n - `{ctx.author.display_name}`\n\nThe Magic 8 Ball says: `{res}`" - await ctx.send(embed=e) + e.description = f"> \"{question}\"\n - `{interaction.user.display_name}`\n\nThe Magic 8 Ball says: `{res}`" + await interaction.followup.send(embed=e) async def setup(bot): diff --git a/rocketwatch/plugins/about/about.py b/rocketwatch/plugins/about/about.py index 83b0ba72..96b23bd5 100644 --- a/rocketwatch/plugins/about/about.py +++ b/rocketwatch/plugins/about/about.py @@ -8,8 +8,8 @@ import requests import uptime from discord.ext import commands -from discord.ext.commands import Context -from discord.ext.commands import hybrid_command +from discord import Interaction +from discord.app_commands import command from rocketwatch import RocketWatch from utils import readable @@ -29,10 +29,10 @@ def __init__(self, bot: RocketWatch): self.bot = bot self.process = psutil.Process(os.getpid()) - @hybrid_command() - async def about(self, ctx: Context): + @command() + async def about(self, interaction: Interaction): """Bot and Server Information""" - await ctx.defer(ephemeral=is_hidden_weak(ctx)) + await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) e = Embed() g = self.bot.guilds code_time = None @@ -95,7 +95,7 @@ async def about(self, ctx: Context): except Exception as err: await self.bot.report_error(err) - await ctx.send(embed=e) + await interaction.followup.send(embed=e) diff --git a/rocketwatch/plugins/apr/apr.py b/rocketwatch/plugins/apr/apr.py index fab378e8..c1193d37 100644 --- a/rocketwatch/plugins/apr/apr.py +++ b/rocketwatch/plugins/apr/apr.py @@ -6,8 +6,8 @@ import matplotlib.pyplot as plt from discord import File from discord.ext import commands, tasks -from discord.ext.commands import Context -from discord.ext.commands import hybrid_command +from discord import Interaction +from discord.app_commands import command from matplotlib.dates import DateFormatter from rocketwatch import RocketWatch @@ -82,10 +82,10 @@ async def before_loop(self): async def on_error(self, err: Exception): await self.bot.report_error(err) - @hybrid_command() - async def reth_apr(self, ctx: Context): + @command() + async def reth_apr(self, interaction: Interaction): """Show the current rETH APR""" - await ctx.defer(ephemeral=is_hidden_weak(ctx)) + await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) e = Embed() e.title = "Current rETH APR" e.description = "For some comparisons against other LST: [dune dashboard](https://dune.com/rp_community/lst-comparison)" @@ -94,7 +94,7 @@ async def reth_apr(self, ctx: Context): datapoints = await self.bot.db.reth_apr.find().sort("block", -1).limit(180 + 38).to_list(None) if len(datapoints) == 0: e.description = "No data available yet." - return await ctx.send(embed=e) + return await interaction.followup.send(embed=e) # get average meta.NodeFee from db, weighted by meta.NodeOperatorShare tmp = await (await self.bot.db.minipools.aggregate([ @@ -250,12 +250,12 @@ async def reth_apr(self, ctx: Context): e.add_field(name="Effectiveness", value=f"{y_effectiveness[-1]:.2%}", inline=False) - await ctx.send(embed=e, file=File(img, "reth_apr.png")) + await interaction.followup.send(embed=e, file=File(img, "reth_apr.png")) - @hybrid_command() - async def node_apr(self, ctx: Context): + @command() + async def node_apr(self, interaction: Interaction): """Show the current node operator APR""" - await ctx.defer(ephemeral=is_hidden_weak(ctx)) + await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) e = Embed() e.title = "Current NO APR" e.description = "Dashed red lines above and bellow the solid red one are leb8 and leb16 respectively. " \ @@ -265,7 +265,7 @@ async def node_apr(self, ctx: Context): datapoints = await self.bot.db.reth_apr.find().sort("block", -1).limit(180 + 38).to_list(None) if len(datapoints) == 0: e.description = "No data available yet." - return await ctx.send(embed=e) + return await interaction.followup.send(embed=e) # get average meta.NodeFee from db, weighted by meta.NodeOperatorShare tmp = await (await self.bot.db.minipools.aggregate([ @@ -422,7 +422,7 @@ async def node_apr(self, ctx: Context): e.set_image(url="attachment://no_apr.png") - await ctx.send(embed=e, file=File(img, "no_apr.png")) + await interaction.followup.send(embed=e, file=File(img, "no_apr.png")) async def setup(bot): await bot.add_cog(APR(bot)) diff --git a/rocketwatch/plugins/chat_summary/chat_summary.py b/rocketwatch/plugins/chat_summary/chat_summary.py index b041bcd8..332eee75 100644 --- a/rocketwatch/plugins/chat_summary/chat_summary.py +++ b/rocketwatch/plugins/chat_summary/chat_summary.py @@ -9,8 +9,9 @@ from discord import File, DeletedReferencedMessage from discord.channel import TextChannel from discord.ext import commands -from discord.ext.commands import Context, is_owner -from discord.ext.commands import hybrid_command +from discord.ext.commands import is_owner +from discord.app_commands import command +from discord import Interaction from rocketwatch import RocketWatch from utils.cfg import cfg @@ -53,19 +54,19 @@ def message_to_text(cls, message, index): text = re.sub(r":[0-9]+>", ":>", text) return text - @hybrid_command() + @command() @is_owner() - async def summarize_chat(self, ctx: Context): - await ctx.defer(ephemeral=True) - last_ts = await self.bot.db["last_summary"].find_one({"channel_id": ctx.channel.id}) + async def summarize_chat(self, interaction: Interaction): + await interaction.response.defer(ephemeral=True) + last_ts = await self.bot.db["last_summary"].find_one({"channel_id": interaction.channel.id}) # ratelimit if last_ts and (datetime.now(timezone.utc) - last_ts["timestamp"].replace(tzinfo=pytz.utc)) < timedelta(hours=6): - await ctx.send("You can only summarize once every 6 hours.", ephemeral=True) + await interaction.followup.send("You can only summarize once every 6 hours.", ephemeral=True) return - if ctx.channel.id not in [405163713063288832]: - await ctx.send("You can't summarize here.", ephemeral=True) + if interaction.channel.id not in [405163713063288832]: + await interaction.followup.send("You can't summarize here.", ephemeral=True) return - msg = await ctx.channel.send("Summarizing chat…") + msg = await interaction.channel.send("Summarizing chat…") last_ts = last_ts["timestamp"].replace(tzinfo=pytz.utc) if last_ts and "timestamp" in last_ts else datetime.now(timezone.utc) - timedelta(days=365) prompt = ( "Task Description:\n" @@ -87,10 +88,10 @@ async def summarize_chat(self, ctx: Context): "----------------\n\n" "Please begin the task now." ) - response, prompt, msgs = await self.prompt_model(ctx.channel, prompt, last_ts) + response, prompt, msgs = await self.prompt_model(interaction.channel, prompt, last_ts) if not response: await msg.delete() - await ctx.send(content="Not enough messages to summarize.") + await interaction.followup.send(content="Not enough messages to summarize.") return es = [Embed()] es[0].title = f"Chat Summarization of {msgs} messages since {last_ts.strftime('%Y-%m-%d %H:%M')}" @@ -120,10 +121,10 @@ async def summarize_chat(self, ctx: Context): f.name = "prompt._log" f = File(f, filename=f"prompt_log_{datetime.now().strftime('%Y-%m-%d_%H-%M-%S')}._log") # send message in the channel - await ctx.send("done", ephemeral=True) + await interaction.followup.send("done", ephemeral=True) await msg.edit(embeds=es, attachments=[f]) # save the timestamp of the last summary - await self.bot.db["last_summary"].update_one({"channel_id": ctx.channel.id}, {"$set": {"timestamp": datetime.now(timezone.utc)}}, upsert=True) + await self.bot.db["last_summary"].update_one({"channel_id": interaction.channel.id}, {"$set": {"timestamp": datetime.now(timezone.utc)}}, upsert=True) # a function that generates the prompt for the model by taking an array of messages, a prefix and a suffix def generate_prompt(self, messages, prefix, suffix): diff --git a/rocketwatch/plugins/collateral/collateral.py b/rocketwatch/plugins/collateral/collateral.py index 2835b3cd..983cfefe 100644 --- a/rocketwatch/plugins/collateral/collateral.py +++ b/rocketwatch/plugins/collateral/collateral.py @@ -7,9 +7,9 @@ import matplotlib.pyplot as plt import numpy as np from discord import File -from discord.app_commands import describe +from discord import Interaction +from discord.app_commands import command, describe from discord.ext import commands -from discord.ext.commands import Context, hybrid_command from discord.utils import as_chunks from matplotlib.ticker import FuncFormatter from eth_typing import ChecksumAddress @@ -32,7 +32,7 @@ def get_percentiles(percentiles, counts): yield p, np.percentile(counts, p, method='nearest') -async def collateral_distribution_raw(ctx: Context, distribution): +async def collateral_distribution_raw(interaction: Interaction, distribution): e = Embed() e.title = "Collateral Distribution" description = "```\n" @@ -41,7 +41,7 @@ async def collateral_distribution_raw(ctx: Context, distribution): f"{nodes:>4} {p.plural('node', nodes)}\n" description += "```" e.description = description - await ctx.send(embed=e) + await interaction.followup.send(embed=e) async def get_node_minipools_and_collateral() -> dict[ChecksumAddress, dict[str, int]]: @@ -110,22 +110,22 @@ class Collateral(commands.Cog): def __init__(self, bot: RocketWatch): self.bot = bot - @hybrid_command() + @command() @describe(node_address="Node Address or ENS to highlight", bonded="Calculate collateral as a percent of bonded eth instead of borrowed") async def node_tvl_vs_collateral(self, - ctx: Context, + interaction: Interaction, node_address: str = None, bonded: bool = False): """ Show a scatter plot of collateral ratios for given node TVLs """ - await ctx.defer(ephemeral=is_hidden_weak(ctx)) + await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) display_name = None address = None if node_address is not None: - display_name, address = await resolve_ens(ctx, node_address) + display_name, address = await resolve_ens(interaction, node_address) if display_name is None: return @@ -202,7 +202,7 @@ def node_minipools(node): ax2.plot(node_tvl(target_node), node_collateral(target_node), 'ro') e.description = f"Showing location of {display_name}" except KeyError: - await ctx.send(f"{display_name} not found in data set - it must have at least one minipool") + await interaction.followup.send(f"{display_name} not found in data set - it must have at least one minipool") return # Add horizontal lines showing the 10-15% range made optimal by RPIP-30 @@ -220,21 +220,21 @@ def node_minipools(node): e.title = "Node TVL vs Collateral Scatter Plot" e.set_image(url="attachment://graph.png") f = File(img, filename="graph.png") - await ctx.send(embed=e, files=[f]) + await interaction.followup.send(embed=e, files=[f]) img.close() - @hybrid_command() + @command() @describe(raw="Show Raw Distribution Data", bonded="Calculate collateral as percent of bonded eth instead of borrowed") async def collateral_distribution(self, - ctx: Context, + interaction: Interaction, raw: bool = False, collateral_cap: int = 15, bonded: bool = False): """ Show the distribution of collateral across nodes. """ - await ctx.defer(ephemeral=is_hidden_weak(ctx)) + await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) data = await get_average_collateral_percentage_per_node(collateral_cap, bonded) distribution = [(collateral, len(nodes)) for collateral, nodes in sorted(data.items(), key=lambda x: x[0])] @@ -242,7 +242,7 @@ async def collateral_distribution(self, # If the raw data were requested, print them and exit early if raw: - await collateral_distribution_raw(ctx, distribution[::-1]) + await collateral_distribution_raw(interaction, distribution[::-1]) return e = Embed() @@ -287,7 +287,7 @@ async def collateral_distribution(self, get_percentiles([50, 75, 90, 99], counts)] e.description = f"Total Effective Staked RPL: {sum(bars.values()):,}" e.set_footer(text="\n".join(percentile_strings)) - await ctx.send(embed=e, files=[f]) + await interaction.followup.send(embed=e, files=[f]) img.close() diff --git a/rocketwatch/plugins/commissions/commissions.py b/rocketwatch/plugins/commissions/commissions.py index cec67ed2..44572db4 100644 --- a/rocketwatch/plugins/commissions/commissions.py +++ b/rocketwatch/plugins/commissions/commissions.py @@ -5,8 +5,8 @@ import seaborn as sns from discord import File from discord.ext import commands -from discord.ext.commands import Context -from discord.ext.commands import hybrid_command +from discord import Interaction +from discord.app_commands import command from matplotlib import pyplot as plt from rocketwatch import RocketWatch @@ -22,12 +22,12 @@ class Commissions(commands.Cog): def __init__(self, bot: RocketWatch): self.bot = bot - @hybrid_command() - async def commission_history(self, ctx: Context): + @command() + async def commission_history(self, interaction: Interaction): """ Show the history of commissions. """ - await ctx.defer(ephemeral=is_hidden(ctx)) + await interaction.response.defer(ephemeral=is_hidden(interaction)) e = Embed(title='Commission History') @@ -72,7 +72,7 @@ async def commission_history(self, ctx: Context): e.add_field(name="Bar Width", value=f"{step_size} minipools") # send data - await ctx.send(content="", embed=e, files=[File(img, filename="chart.png")]) + await interaction.followup.send(content="", embed=e, files=[File(img, filename="chart.png")]) img.close() diff --git a/rocketwatch/plugins/governance/governance.py b/rocketwatch/plugins/governance/governance.py index 5c0d2064..b2acdf42 100644 --- a/rocketwatch/plugins/governance/governance.py +++ b/rocketwatch/plugins/governance/governance.py @@ -1,7 +1,8 @@ import logging from datetime import datetime, timedelta -from discord.ext.commands import Context, hybrid_command +from discord import Interaction +from discord.app_commands import command from discord.utils import escape_markdown from eth_typing import HexStr from web3.constants import HASH_ZERO @@ -153,12 +154,12 @@ async def print_proposals(_dao: DAO, _proposals: list[DAO.Proposal]) -> str: return embed - @hybrid_command() - async def governance_digest(self, ctx: Context) -> None: + @command() + async def governance_digest(self, interaction: Interaction) -> None: """Get a summary of recent activity in protocol governance""" - await ctx.defer(ephemeral=is_hidden_weak(ctx)) + await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) embed = await self.get_digest() - await ctx.send(embed=embed) + await interaction.followup.send(embed=embed) async def get_status(self) -> Embed: embed = await self.get_digest() diff --git a/rocketwatch/plugins/lottery/lottery.py b/rocketwatch/plugins/lottery/lottery.py index cf024e85..5a6577c1 100644 --- a/rocketwatch/plugins/lottery/lottery.py +++ b/rocketwatch/plugins/lottery/lottery.py @@ -1,7 +1,8 @@ import logging from discord.ext import commands -from discord.ext.commands import hybrid_command, Context +from discord import Interaction +from discord.app_commands import command from pymongo import InsertOne from rocketwatch import RocketWatch @@ -129,17 +130,17 @@ async def generate_sync_committee_description(self, period): node_operators]) return description - @hybrid_command() - async def lottery(self, ctx: Context): + @command() + async def lottery(self, interaction: Interaction): """ Get the status of the current and next sync committee. """ - await ctx.defer(ephemeral=is_hidden(ctx)) + await interaction.response.defer(ephemeral=is_hidden(interaction)) embeds = [ Embed(title="Current sync committee:", description=await self.generate_sync_committee_description("latest")), Embed(title="Next sync committee:", description=await self.generate_sync_committee_description("next")) ] - await ctx.send(embeds=embeds) + await interaction.followup.send(embeds=embeds) async def setup(bot): diff --git a/rocketwatch/plugins/metrics/metrics.py b/rocketwatch/plugins/metrics/metrics.py index 73486ac7..a468e351 100644 --- a/rocketwatch/plugins/metrics/metrics.py +++ b/rocketwatch/plugins/metrics/metrics.py @@ -1,14 +1,12 @@ import logging -import math from datetime import datetime, timedelta from io import BytesIO from bson import SON -from cachetools import TTLCache from discord import File from discord.ext import commands -from discord.ext.commands import Context -from discord.ext.commands import hybrid_command +from discord import Interaction +from discord.app_commands import command from matplotlib import pyplot as plt from rocketwatch import RocketWatch @@ -25,12 +23,12 @@ def __init__(self, bot: RocketWatch): self.bot = bot self.collection = self.bot.db.command_metrics - @hybrid_command() - async def metrics(self, ctx: Context): + @command() + async def metrics(self, interaction: Interaction): """ Show various metrics about the bot. """ - await ctx.defer(ephemeral=is_hidden(ctx)) + await interaction.response.defer(ephemeral=is_hidden(interaction)) try: e = Embed(title="Metrics from the last 7 days") desc = "```\n" @@ -82,14 +80,14 @@ async def metrics(self, ctx: Context): for channel in top_channels: desc += f" - {channel['_id']['name']}: {channel['count']}\n" e.description = desc + "```" - await ctx.send(embed=e) + await interaction.followup.send(embed=e) except Exception as e: log.error(f"Failed to get command metrics: {e}") await self.bot.report_error(e) - @hybrid_command() - async def metrics_chart(self, ctx): - await ctx.defer(ephemeral=is_hidden(ctx)) + @command() + async def metrics_chart(self, interaction: Interaction): + await interaction.response.defer(ephemeral=is_hidden(interaction)) # generate mathplotlib chart that shows monthly command usage and monthly event emission, in separate subplots command_usage = await (await self.collection.aggregate([ @@ -148,68 +146,8 @@ async def metrics_chart(self, ctx): e = Embed(title="Command Usage and Event ") e.set_image(url="attachment://metrics.png") - await ctx.send(embed=e, file=File(file, filename="metrics.png")) + await interaction.followup.send(embed=e, file=File(file, filename="metrics.png")) - @commands.Cog.listener() - async def on_command(self, ctx): - log.info(f"/{ctx.command.name} triggered by {ctx.author} in #{ctx.channel.name} ({ctx.guild})") - try: - await self.collection.insert_one({ - '_id' : ctx.interaction.id, - 'command' : ctx.command.name, - 'options' : ctx.interaction.data.get("options", []), - 'user' : { - 'id' : ctx.author.id, - 'name': ctx.author.name, - }, - 'guild' : { - 'id' : ctx.guild.id, - 'name': ctx.guild.name, - }, - 'channel' : { - 'id' : ctx.channel.id, - 'name': ctx.channel.name, - }, - "timestamp": datetime.utcnow(), - 'status' : 'pending' - }) - except Exception as e: - log.error(f"Failed to insert command into database: {e}") - await self.bot.report_error(e) - - @commands.Cog.listener() - async def on_command_completion(self, ctx): - log.info(f"/{ctx.command.name} called by {ctx.author} in #{ctx.channel.name} ({ctx.guild}) completed successfully") - try: - # get the timestamp of when the command was called from the db - data = await self.collection.find_one({'_id': ctx.interaction.id}) - await self.collection.update_one({'_id': ctx.interaction.id}, - { - '$set': { - 'status': 'completed', - 'took' : (datetime.utcnow() - data['timestamp']).total_seconds() - } - }) - except Exception as e: - log.error(f"Failed to update command status to completed: {e}") - await self.bot.report_error(e) - - @commands.Cog.listener() - async def on_command_error(self, ctx: Context, exception: Exception): - try: - # get the timestamp of when the command was called from the db - data = await self.collection.find_one({'_id': ctx.interaction.id}) - await self.collection.update_one( - {'_id': ctx.interaction.id}, - {'$set': { - 'status': 'error', - 'took': (datetime.now() - data['timestamp']).total_seconds(), - 'error': str(exception) - }} - ) - except Exception as e: - log.exception("Failed to update command status to error") - await self.bot.report_error(e) async def setup(bot): diff --git a/rocketwatch/plugins/minipool_distribution/minipool_distribution.py b/rocketwatch/plugins/minipool_distribution/minipool_distribution.py index 9bafc733..39901ff2 100644 --- a/rocketwatch/plugins/minipool_distribution/minipool_distribution.py +++ b/rocketwatch/plugins/minipool_distribution/minipool_distribution.py @@ -6,9 +6,9 @@ import matplotlib.pyplot as plt import numpy as np from discord import File -from discord.app_commands import describe +from discord import Interaction +from discord.app_commands import command, describe from discord.ext import commands -from discord.ext.commands import Context, hybrid_command from rocketwatch import RocketWatch from utils.cfg import cfg @@ -25,7 +25,7 @@ def get_percentiles(percentiles, counts): yield p, np.percentile(counts, p, method='nearest') -async def minipool_distribution_raw(ctx: Context, distribution): +async def minipool_distribution_raw(interaction: Interaction, distribution): e = Embed() e.title = "Minipool Distribution" description = "```\n" @@ -34,7 +34,7 @@ async def minipool_distribution_raw(ctx: Context, distribution): f"{nodes:>4} {p.plural('node', nodes)}\n" description += "```" e.description = description - await ctx.send(embed=e) + await interaction.followup.send(embed=e) class MinipoolDistribution(commands.Cog): @@ -71,13 +71,13 @@ async def get_minipool_counts_per_node(self): ] return [x["count"] async for x in self.bot.db.minipools.aggregate(pipeline)] - @hybrid_command() + @command() @describe(raw="Show the raw Distribution Data") async def minipool_distribution(self, - ctx: Context, + interaction: Interaction, raw: bool = False): """Show the distribution of minipools per node.""" - await ctx.defer(ephemeral=is_hidden(ctx)) + await interaction.response.defer(ephemeral=is_hidden(interaction)) e = Embed() # Get the minipool distribution @@ -90,7 +90,7 @@ async def minipool_distribution(self, # If the raw data were requested, print them and exit early if raw: - await minipool_distribution_raw(ctx, distribution[::-1]) + await minipool_distribution_raw(interaction, distribution[::-1]) return img = BytesIO() @@ -123,16 +123,16 @@ async def minipool_distribution(self, percentile_strings.append(f"Max: {distribution[-1][0]} minipools per node") percentile_strings.append(f"Total: {p.no('minipool', sum(counts))}") e.set_footer(text="\n".join(percentile_strings)) - await ctx.send(embed=e, files=[f]) + await interaction.followup.send(embed=e, files=[f]) img.close() - @hybrid_command() + @command() @describe(raw="Show the raw distribution data") - async def node_gini(self, ctx: Context, raw: bool = False): + async def node_gini(self, interaction: Interaction, raw: bool = False): """ Show the cumulative validator share of the largest nodes. """ - await ctx.defer(ephemeral=is_hidden(ctx)) + await interaction.response.defer(ephemeral=is_hidden(interaction)) e = Embed() e.title = "Validator Share of Largest Nodes" @@ -163,7 +163,7 @@ async def node_gini(self, ctx: Context, raw: bool = False): description += f"\nTotal: {x[-1]} nodes" e.description = description - await ctx.send(embed=e) + await interaction.followup.send(embed=e) return fig, ax = plt.subplots(1, 1) @@ -211,7 +211,7 @@ def draw_threshold(threshold: float, color: str) -> None: e.set_image(url="attachment://graph.png") f = File(img, filename="graph.png") - await ctx.send(embed=e, files=[f]) + await interaction.followup.send(embed=e, files=[f]) img.close() diff --git a/rocketwatch/plugins/pinned_messages/pinned_messages.py b/rocketwatch/plugins/pinned_messages/pinned_messages.py index 376b88ed..a2577b5a 100644 --- a/rocketwatch/plugins/pinned_messages/pinned_messages.py +++ b/rocketwatch/plugins/pinned_messages/pinned_messages.py @@ -1,9 +1,10 @@ import logging from datetime import datetime, timedelta -from discord.app_commands import guilds +from discord import Interaction +from discord.app_commands import command, guilds from discord.ext import commands, tasks -from discord.ext.commands import hybrid_command, is_owner +from discord.ext.commands import is_owner from rocketwatch import RocketWatch from utils.cfg import cfg @@ -66,15 +67,15 @@ async def run_loop(self): except Exception as err: await self.bot.report_error(err) - @hybrid_command() + @command() @guilds(cfg["discord.owner.server_id"]) @is_owner() - async def pin(self, ctx, channel_id, title, description): - await ctx.defer() + async def pin(self, interaction, channel_id, title, description): + await interaction.response.defer() # check if channel exists channel = self.bot.get_channel(int(channel_id)) if not channel: - await ctx.send("Channel not found") + await interaction.followup.send("Channel not found") return # check if we already have a pinned message message = await self.bot.db.pinned_messages.find_one({"channel_id": channel.id}) @@ -84,38 +85,38 @@ async def pin(self, ctx, channel_id, title, description): "$set": {"title" : title, "content": description, "disabled": False, "cleaned_up": False, "message_id": None, "created_at": datetime.utcnow()}}) # rest is done by the run_loop - await ctx.send("Updated pinned message") + await interaction.followup.send("Updated pinned message") return # create new message await self.bot.db.pinned_messages.insert_one( {"channel_id": channel.id, "message_id": None, "title": title, "content": description, "disabled": False, "cleaned_up": False, "created_at": datetime.utcnow()}) # rest is done by the run_loop - await ctx.send("Created pinned message") + await interaction.followup.send("Created pinned message") - @hybrid_command() + @command() @guilds(cfg["discord.owner.server_id"]) @is_owner() - async def unpin(self, ctx, channel_id): - await ctx.defer() + async def unpin(self, interaction, channel_id): + await interaction.response.defer() # check if channel exists channel = self.bot.get_channel(int(channel_id)) if not channel: - await ctx.send("Channel not found") + await interaction.followup.send("Channel not found") return # check if we already have a pinned message message = await self.bot.db.pinned_messages.find_one({"channel_id": channel.id}) if not message: - await ctx.send("No pinned message found") + await interaction.followup.send("No pinned message found") return # check if its already marked as disabled if message["disabled"]: - await ctx.send("Pinned message already disabled") + await interaction.followup.send("Pinned message already disabled") return # soft delete await self.bot.db.pinned_messages.update_one({"_id": message["_id"]}, {"$set": {"disabled": True}}) # rest is done by the run_loop - await ctx.send("Disabled pinned message") + await interaction.followup.send("Disabled pinned message") async def cog_unload(self): self.run_loop.cancel() diff --git a/rocketwatch/plugins/random/random.py b/rocketwatch/plugins/random/random.py index b5fcf70d..efd8fd71 100644 --- a/rocketwatch/plugins/random/random.py +++ b/rocketwatch/plugins/random/random.py @@ -8,8 +8,8 @@ import pytz from discord import File from discord.ext import commands -from discord.ext.commands import Context -from discord.ext.commands import hybrid_command +from discord import Interaction +from discord.app_commands import command from rocketwatch import RocketWatch from utils import solidity @@ -29,24 +29,24 @@ class Random(commands.Cog): def __init__(self, bot: RocketWatch): self.bot = bot - @hybrid_command() - async def dice(self, ctx: Context, dice_string: str = "1d6"): - await ctx.defer(ephemeral=is_hidden_weak(ctx)) + @command() + async def dice(self, interaction: Interaction, dice_string: str = "1d6"): + await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) result = dice.roll(dice_string) e = Embed() e.title = f"🎲 {dice_string}" if len(str(result)) >= 2000: e.description = "Result too long to display, attaching as file." file = File(io.StringIO(str(result)), filename="dice_result.txt") - await ctx.send(embed=e, file=file) + await interaction.followup.send(embed=e, file=file) else: e.description = f"Result: `{result}`" - await ctx.send(embed=e) + await interaction.followup.send(embed=e) - @hybrid_command() - async def burn_reason(self, ctx: Context): + @command() + async def burn_reason(self, interaction: Interaction): """Show the largest sources of burned ETH""" - await ctx.defer(ephemeral=is_hidden_weak(ctx)) + await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) url = "https://ultrasound.money/api/fees/grouped-analysis-1" # get data from url using aiohttp async with aiohttp.ClientSession() as session: @@ -87,10 +87,10 @@ async def burn_reason(self, ctx: Context): value=f"`{solidity.to_float(data['latestBlockFees'][0]['baseFeePerGas'], 9):,.2f} GWEI`" ) e.description = description - await ctx.send(embed=e) + await interaction.followup.send(embed=e) - @hybrid_command() - async def dev_time(self, ctx: Context): + @command() + async def dev_time(self, interaction: Interaction): """Timezones too confusing to you? Well worry no more, this command is here to help!""" e = Embed() time_format = "%A %H:%M:%S %Z" @@ -118,12 +118,12 @@ async def dev_time(self, ctx: Context): fornax_time = datetime.now(tz=pytz.timezone("America/Sao_Paulo")) e.add_field(name="Fornax's Time", value=fornax_time.strftime(time_format), inline=False) - await ctx.send(embed=e) + await interaction.followup.send(embed=e) - @hybrid_command() - async def sea_creatures(self, ctx: Context, address: str = None): + @command() + async def sea_creatures(self, interaction: Interaction, address: str = None): """List all sea creatures with their required minimum holding.""" - await ctx.defer(ephemeral=is_hidden(ctx)) + await interaction.response.defer(ephemeral=is_hidden(interaction)) e = Embed() if address is not None: try: @@ -132,7 +132,7 @@ async def sea_creatures(self, ctx: Context, address: str = None): address = w3.to_checksum_address(address) except (ValueError, TypeError): e.description = "Invalid address" - await ctx.send(embed=e) + await interaction.followup.send(embed=e) return creature = await get_sea_creature_for_address(address) if not creature: @@ -150,19 +150,19 @@ async def sea_creatures(self, ctx: Context, address: str = None): for holding_value, sea_creature in sea_creatures.items(): e.add_field(name=f"{sea_creature}:", value=f"holds over {holding_value} ETH worth of assets", inline=False) - await ctx.send(embed=e) + await interaction.followup.send(embed=e) return - @hybrid_command() - async def smoothie(self, ctx: Context): + @command() + async def smoothie(self, interaction: Interaction): """Show smoothing pool information""" try: await rp.get_address_by_name("rocketSmoothingPool") except Exception as err: log.exception(err) - await ctx.send("redstone not deployed yet", ephemeral=True) + await interaction.followup.send("redstone not deployed yet", ephemeral=True) return - await ctx.defer(ephemeral=is_hidden_weak(ctx)) + await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) e = Embed(title="Smoothing Pool") smoothie_eth = solidity.to_float(await w3.eth.get_balance(await rp.get_address_by_name("rocketSmoothingPool"))) @@ -245,7 +245,7 @@ async def smoothie(self, ctx: Context): } ])).to_list() if not data: - await ctx.send("no minipools found", ephemeral=True) + await interaction.followup.send("no minipools found", ephemeral=True) return data = {d["_id"]: d for d in data} # node counts @@ -265,12 +265,12 @@ async def smoothie(self, ctx: Context): lines = [f"- `{d['count']:>4}` minipools - {await el_explorer_url(d['address'])}" for d in data[True]["counts"][:min(smoothie_node_count, 5)]] e.description += "\n".join(lines) - await ctx.send(embed=e) + await interaction.followup.send(embed=e) - @hybrid_command() - async def odao_challenges(self, ctx: Context): + @command() + async def odao_challenges(self, interaction: Interaction): """Shows the current oDAO challenges""" - await ctx.defer(ephemeral=is_hidden_weak(ctx)) + await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) c = await rp.get_contract_by_name("rocketDAONodeTrustedActions") # get challenges made events = list(c.events["ActionChallengeMade"].get_logs( @@ -282,7 +282,7 @@ async def odao_challenges(self, ctx: Context): # sort by block number events.sort(key=lambda x: x.blockNumber) if not events: - await ctx.send("No active challenges found") + await interaction.followup.send("No active challenges found") return e = Embed(title="Active oDAO Challenges") e.description = "" @@ -296,7 +296,7 @@ async def odao_challenges(self, ctx: Context): challenger = await el_explorer_url(event.args.nodeChallengerAddress) e.description += f"**{challenged}** was challenged by **{challenger}**\n" e.description += f"Time Left: **{time_left}**\n\n" - await ctx.send(embed=e) + await interaction.followup.send(embed=e) async def setup(self): diff --git a/rocketwatch/plugins/releases/releases.py b/rocketwatch/plugins/releases/releases.py index d46a0fac..9bea7220 100644 --- a/rocketwatch/plugins/releases/releases.py +++ b/rocketwatch/plugins/releases/releases.py @@ -2,8 +2,9 @@ import aiohttp from discord.ext import commands -from discord.ext.commands import Context -from discord.ext.commands import hybrid_command + +from discord.app_commands import command +from discord import Interaction from rocketwatch import RocketWatch from utils.cfg import cfg @@ -19,12 +20,12 @@ def __init__(self, bot: RocketWatch): self.bot = bot self.tag_url = "https://github.com/rocket-pool/smartnode-install/releases/tag/" - @hybrid_command() - async def latest_release(self, ctx: Context): + @command() + async def latest_release(self, interaction: Interaction): """ Get the latest release of Smart Node. """ - await ctx.defer(ephemeral=is_hidden(ctx)) + await interaction.response.defer(ephemeral=is_hidden(interaction)) async with aiohttp.ClientSession() as session: res = await session.get("https://api.github.com/repos/rocket-pool/smartnode-install/tags") @@ -37,7 +38,7 @@ async def latest_release(self, ctx: Context): e = Embed() e.add_field(name="Latest Smart Node Release", value=latest_release, inline=False) - await ctx.send(embed=e) + await interaction.followup.send(embed=e) async def setup(bot): diff --git a/rocketwatch/plugins/rewards/rewards.py b/rocketwatch/plugins/rewards/rewards.py index 8204e6a6..9339804d 100644 --- a/rocketwatch/plugins/rewards/rewards.py +++ b/rocketwatch/plugins/rewards/rewards.py @@ -5,10 +5,10 @@ from io import BytesIO from discord import File -from discord.app_commands import describe +from discord.app_commands import command, describe from discord.ext import commands -from discord.ext.commands import Context -from discord.ext.commands import hybrid_command +from discord import Interaction + from typing import Optional from dataclasses import dataclass @@ -47,16 +47,16 @@ async def _make_request(self, address) -> dict: response = await session.get(f"https://sprocketpool.net/api/node/{address}") return await response.json() - async def get_estimated_rewards(self, ctx: Context, address: str) -> Optional[RewardEstimate]: + async def get_estimated_rewards(self, interaction: Interaction, address: str) -> Optional[RewardEstimate]: if not await rp.call("rocketNodeManager.getNodeExists", address): - await ctx.send(f"{address} is not a registered node.") + await interaction.followup.send(f"{address} is not a registered node.") return None try: patches_res = await self._make_request(address) except Exception as e: - await self.bot.report_error(e, ctx) - await ctx.send("Error fetching node data from Sprocket Pool API. Blame Patches.") + await self.bot.report_error(e, interaction) + await interaction.followup.send("Error fetching node data from Sprocket Pool API. Blame Patches.") return None data_block = await ts_to_block(patches_res["time"]) @@ -86,19 +86,19 @@ def create_embed(title: str, rewards: RewardEstimate) -> Embed: ) return embed - @hybrid_command() + @command() @describe(node_address="address of node to show rewards for") @describe(extrapolate="whether to extrapolate partial rewards for the entire period") - async def upcoming_rewards(self, ctx: Context, node_address: str, extrapolate: bool = True): + async def upcoming_rewards(self, interaction: Interaction, node_address: str, extrapolate: bool = True): """ Show estimated RPL and smoothing pool rewards for this period. """ - await ctx.defer(ephemeral=True) - display_name, address = await resolve_ens(ctx, node_address) + await interaction.response.defer(ephemeral=True) + display_name, address = await resolve_ens(interaction, node_address) if display_name is None: return - rewards = await self.get_estimated_rewards(ctx, address) + rewards = await self.get_estimated_rewards(interaction, address) if rewards is None: return @@ -114,9 +114,9 @@ async def upcoming_rewards(self, ctx: Context, node_address: str, extrapolate: b embed = self.create_embed(title, rewards) embed.add_field(name="RPL Staking:", value=f"{rewards.rpl_rewards:,.3f} RPL") embed.add_field(name="Smoothing Pool:", value=f"{rewards.eth_rewards:,.3f} ETH") - await ctx.send(embed=embed) + await interaction.followup.send(embed=embed) - @hybrid_command() + @command() @describe( node_address="address of node to simulate rewards for", rpl_stake="amount of staked RPL to simulate", @@ -125,7 +125,7 @@ async def upcoming_rewards(self, ctx: Context, node_address: str, extrapolate: b ) async def simulate_rewards( self, - ctx: Context, + interaction: Interaction, node_address: str, rpl_stake: int = 0, num_leb8: int = 0, @@ -134,12 +134,12 @@ async def simulate_rewards( """ Simulate RPL rewards for this period """ - await ctx.defer(ephemeral=True) - display_name, address = await resolve_ens(ctx, node_address) + await interaction.response.defer(ephemeral=True) + display_name, address = await resolve_ens(interaction, node_address) if display_name is None: return - rewards = await self.get_estimated_rewards(ctx, address) + rewards = await self.get_estimated_rewards(interaction, address) if rewards is None: return @@ -221,7 +221,7 @@ def plot_point(_pt_color: str, _pt_label: str, _x: int) -> None: elif borrowed_eth > 0: draw_reward_curve(sim_color, None, sim_ls, borrowed_eth) else: - await ctx.send("Empty node. Choose another one or specify the minipool count.") + await interaction.followup.send("Empty node. Choose another one or specify the minipool count.") return def formatter(_x, _pos) -> str: @@ -267,7 +267,7 @@ def formatter(_x, _pos) -> str: embed.set_image(url="attachment://rewards.png") f = File(img, filename="rewards.png") - await ctx.send(embed=embed, files=[f]) + await interaction.followup.send(embed=embed, files=[f]) img.close() diff --git a/rocketwatch/plugins/wall/wall.py b/rocketwatch/plugins/wall/wall.py index 1e14515a..7ee8877e 100644 --- a/rocketwatch/plugins/wall/wall.py +++ b/rocketwatch/plugins/wall/wall.py @@ -7,10 +7,9 @@ import aiohttp import numpy as np from discord import File +from discord import Interaction +from discord.app_commands import command, describe from discord.ext import commands -from discord.ext.commands import Context -from discord.ext.commands import hybrid_command -from discord.app_commands import describe from matplotlib import ( pyplot as plt, font_manager as fm, @@ -233,24 +232,24 @@ def formatter(_x, _pos) -> str: return fig - @hybrid_command() + @command() @describe(min_price="lower end of price range in USD") @describe(max_price="upper end of price range in USD") @describe(sources="choose places to pull liquidity data from") async def wall( self, - ctx: Context, + interaction: Interaction, min_price: float = 0.0, max_price: float = None, sources: Literal["All", "CEX", "DEX"] = "All" ) -> None: """Show the current RPL market depth across exchanges""" - await ctx.defer(ephemeral=is_hidden_weak(ctx)) + await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) embed = Embed(title="RPL Market Depth") async def on_fail() -> None: embed.set_image(url="https://media1.giphy.com/media/hEc4k5pN17GZq/giphy.gif") - await ctx.send(embed=embed) + await interaction.followup.send(embed=embed) return None try: @@ -260,7 +259,7 @@ async def on_fail() -> None: eth_usd = await rp.get_eth_usdc_price() rpl_eth = rpl_usd / eth_usd except Exception as e: - await self.bot.report_error(e, ctx) + await self.bot.report_error(e, interaction) return await on_fail() if min_price < 0: @@ -287,7 +286,7 @@ async def on_fail() -> None: cex_data = await self._get_cex_data(x, rpl_usd) source_desc.append(f"{len(cex_data)} CEX") except Exception as e: - await self.bot.report_error(e, ctx) + await self.bot.report_error(e, interaction) return await on_fail() if (not cex_data) and (not dex_data): @@ -310,7 +309,7 @@ async def on_fail() -> None: file_name = "wall.png" embed.set_image(url=f"attachment://{file_name}") - await ctx.send(embed=embed, files=[File(buffer, file_name)]) + await interaction.followup.send(embed=embed, files=[File(buffer, file_name)]) return None diff --git a/rocketwatch/rocketwatch.py b/rocketwatch/rocketwatch.py index 56cd3e54..42ca5677 100644 --- a/rocketwatch/rocketwatch.py +++ b/rocketwatch/rocketwatch.py @@ -5,22 +5,22 @@ from typing import Optional from discord import ( - app_commands, + app_commands, Interaction, Intents, - Thread, - File, - Object, + Thread, + File, + Guild, User, ) from discord.abc import GuildChannel, PrivateChannel from discord.ext import commands -from discord.ext.commands import Bot, Context -from discord.app_commands import CommandTree, AppCommandError +from discord.ext.commands import Bot from pymongo import AsyncMongoClient from utils.cfg import cfg +from utils.command_tree import RWCommandTree from utils.retry import retry_async from utils.rocketpool import rp @@ -29,13 +29,8 @@ class RocketWatch(Bot): - class RWCommandTree(CommandTree): - async def on_error(self, interaction: Interaction, error: AppCommandError) -> None: - ctx = await Context.from_interaction(interaction) - await self.client.on_command_error(ctx, error) - def __init__(self, intents: Intents) -> None: - super().__init__(command_prefix=(), tree_cls=self.RWCommandTree, intents=intents) + super().__init__(command_prefix=(), tree_cls=RWCommandTree, intents=intents) self.db = AsyncMongoClient(cfg["mongodb.uri"]).rocketwatch async def _load_plugins(self): @@ -104,8 +99,9 @@ async def on_ready(self): await self.sync_commands() - async def on_command_error(self, ctx: Context, error: Exception) -> None: - log.error(f"/{ctx.command.name} called by {ctx.author} in #{ctx.channel.name} ({ctx.guild}) failed") + async def on_app_command_error(self, interaction: Interaction, error: Exception) -> None: + cmd_name = interaction.command.name if interaction.command else "unknown" + log.error(f"/{cmd_name} called by {interaction.user} in #{interaction.channel.name} ({interaction.guild}) failed") if isinstance(error, commands.errors.MaxConcurrencyReached): msg = "Someone else is already using this command. Please try again later." elif isinstance(error, app_commands.errors.CommandOnCooldown): @@ -114,12 +110,12 @@ async def on_command_error(self, ctx: Context, error: Exception) -> None: msg = "An unexpected error occurred and has been reported to the developer. Please try again later." try: - await self.report_error(error, ctx) - await ctx.send(content=msg, ephemeral=True) + await self.report_error(error, interaction) + await interaction.followup.send(content=msg, ephemeral=True) except Exception: log.exception("Failed to alert user") - async def get_or_fetch_guild(self, guild_id: int) -> Object: + async def get_or_fetch_guild(self, guild_id: int) -> Guild: return self.get_guild(guild_id) or await self.fetch_guild(guild_id) async def get_or_fetch_channel(self, channel_id: int) -> GuildChannel | PrivateChannel | Thread: @@ -128,20 +124,21 @@ async def get_or_fetch_channel(self, channel_id: int) -> GuildChannel | PrivateC async def get_or_fetch_user(self, user_id: int) -> User: return self.get_user(user_id) or await self.fetch_user(user_id) - async def report_error(self, exception: Exception, ctx: Optional[Context] = None, *args) -> None: + async def report_error(self, exception: Exception, interaction: Optional[Interaction] = None, *args) -> None: err_description = f"`{repr(exception)[:150]}`" - + if args: args_fmt = "\n".join(f"args[{i}] = {arg}" for i, arg in enumerate(args)) err_description += f"\n```{args_fmt}```" - - if ctx: + + if interaction: + cmd_name = interaction.command.name if interaction.command else "unknown" err_description += ( f"\n```" - f"{ctx.command.name = }\n" - f"ctx.command.params = {getattr(ctx.command, 'params', '')}\n" - f"{ctx.channel = }\n" - f"{ctx.author = }" + f"command = {cmd_name}\n" + f"command.params = {getattr(interaction.command, 'parameters', '')}\n" + f"channel = {interaction.channel}\n" + f"user = {interaction.user}" f"```" ) diff --git a/rocketwatch/utils/command_tree.py b/rocketwatch/utils/command_tree.py new file mode 100644 index 00000000..5d1ea1ac --- /dev/null +++ b/rocketwatch/utils/command_tree.py @@ -0,0 +1,75 @@ +import logging +from datetime import datetime + +from discord import Interaction +from discord.app_commands import CommandTree, AppCommandError + +from utils.cfg import cfg + +log = logging.getLogger("command_tree") +log.setLevel(cfg["log_level"]) + + +class RWCommandTree(CommandTree): + async def _call(self, interaction: Interaction) -> None: + cmd_name = interaction.command.name if interaction.command else "unknown" + timestamp = datetime.utcnow() + + log.info(f"/{cmd_name} triggered by {interaction.user} in #{interaction.channel.name} ({interaction.guild})") + try: + await self.client.db.command_metrics.insert_one({ + '_id': interaction.id, + 'command': cmd_name, + 'options': interaction.data.get("options", []) if interaction.data else [], + 'user': { + 'id': interaction.user.id, + 'name': interaction.user.name, + }, + 'guild': { + 'id': interaction.guild.id, + 'name': interaction.guild.name, + } if interaction.guild else None, + 'channel': { + 'id': interaction.channel.id, + 'name': interaction.channel.name, + }, + 'timestamp': timestamp, + 'status': 'pending' + }) + except Exception as e: + log.error(f"Failed to insert command into database: {e}") + await self.client.report_error(e) + + try: + await super()._call(interaction) + except Exception as error: + log.info(f"/{cmd_name} called by {interaction.user} in #{interaction.channel.name} ({interaction.guild}) failed") + try: + await self.client.db.command_metrics.update_one( + {'_id': interaction.id}, + {'$set': { + 'status': 'error', + 'took': (datetime.utcnow() - timestamp).total_seconds(), + 'error': str(error) + }} + ) + except Exception as e: + log.exception("Failed to update command status to error") + await self.client.report_error(e) + raise + + log.info(f"/{cmd_name} called by {interaction.user} in #{interaction.channel.name} ({interaction.guild}) completed successfully") + try: + await self.client.db.command_metrics.update_one( + {'_id': interaction.id}, + {'$set': { + 'status': 'completed', + 'took': (datetime.utcnow() - timestamp).total_seconds() + }} + ) + except Exception as e: + log.error(f"Failed to update command status to completed: {e}") + await self.client.report_error(e) + + async def on_error(self, interaction: Interaction, error: AppCommandError) -> None: + await self.client.on_app_command_error(interaction, error) diff --git a/rocketwatch/utils/embeds.py b/rocketwatch/utils/embeds.py index e06b2532..af348375 100644 --- a/rocketwatch/utils/embeds.py +++ b/rocketwatch/utils/embeds.py @@ -47,32 +47,32 @@ def set_footer_parts(self, parts): # If an ens name is provided, it will be used as the display name. # If an address is provided, the display name will either be the reverse record or the address. # If the user input isn't sanitary, send an error message back to the user and return None, None. -async def resolve_ens(ctx, node_address): +async def resolve_ens(interaction, node_address): # if it looks like an ens, attempt to resolve it if "." in node_address: try: address = await ens.resolve_name(node_address) if not address: - await ctx.send("ENS name not found") + await interaction.followup.send("ENS name not found") return None, None return node_address, address except InvalidName: - await ctx.send("Invalid ENS name") + await interaction.followup.send("Invalid ENS name") return None, None # if it's just an address, look for a reverse record try: address = w3.to_checksum_address(node_address) except Exception: - await ctx.send("Invalid address") + await interaction.followup.send("Invalid address") return None, None try: display_name = await ens.get_name(node_address) or address return display_name, address except InvalidName: - await ctx.send("Invalid address") + await interaction.followup.send("Invalid address") return None, None diff --git a/rocketwatch/utils/visibility.py b/rocketwatch/utils/visibility.py index 661b5a43..1056f7b6 100644 --- a/rocketwatch/utils/visibility.py +++ b/rocketwatch/utils/visibility.py @@ -1,14 +1,13 @@ from discord import Interaction -from discord.ext.commands import Context from plugins.support_utils.support_utils import has_perms -def is_hidden(interaction: Context | Interaction): +def is_hidden(interaction: Interaction): return all(w not in interaction.channel.name for w in ["random", "rocket-watch"]) -def is_hidden_weak(interaction: Context | Interaction): +def is_hidden_weak(interaction: Interaction): return all(w not in interaction.channel.name for w in ["random", "rocket-watch", "trading"]) From 1805dd688ea9dc47c7a855044f4a62f925cd7e1d Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 6 Mar 2026 22:45:28 +0000 Subject: [PATCH 165/279] migrated requests to aiohttp --- rocketwatch/plugins/about/about.py | 31 ++++++++++++-------- rocketwatch/plugins/cow_orders/cow_orders.py | 26 ++++++++-------- rocketwatch/plugins/debug/debug.py | 6 ++-- rocketwatch/plugins/queue/queue.py | 9 ++---- rocketwatch/requirements.txt | 1 - rocketwatch/utils/block_time.py | 11 +++---- rocketwatch/utils/cached_ens.py | 18 ++++-------- rocketwatch/utils/embeds.py | 24 +++++++-------- 8 files changed, 58 insertions(+), 68 deletions(-) diff --git a/rocketwatch/plugins/about/about.py b/rocketwatch/plugins/about/about.py index 96b23bd5..4456879f 100644 --- a/rocketwatch/plugins/about/about.py +++ b/rocketwatch/plugins/about/about.py @@ -5,7 +5,7 @@ import humanize import psutil -import requests +import aiohttp import uptime from discord.ext import commands from discord import Interaction @@ -39,13 +39,15 @@ async def about(self, interaction: Interaction): if api_key := cfg.get("other.secrets.wakatime"): try: - code_time = requests.get( - "https://wakatime.com/api/v1/users/current/all_time_since_today", - params={ - "project": "rocketwatch", - "api_key": api_key - } - ).json()["data"]["text"] + async with aiohttp.ClientSession() as session: + async with session.get( + "https://wakatime.com/api/v1/users/current/all_time_since_today", + params={ + "project": "rocketwatch", + "api_key": api_key + } + ) as resp: + code_time = (await resp.json())["data"]["text"] except Exception as err: await self.bot.report_error(err) @@ -55,8 +57,8 @@ async def about(self, interaction: Interaction): inline=False) e.add_field(name="Bot Statistics", - value=f"{len(g)} Guilds joined and " - f"{humanize.intcomma(sum(guild.member_count for guild in g))} Members reached!", + value=f"{len(g)} guilds joined and " + f"{humanize.intcomma(sum(guild.member_count for guild in g))} members reached!", inline=False) address = await el_explorer_url(cfg["rocketpool.manual_addresses.rocketStorage"]) @@ -70,8 +72,8 @@ async def about(self, interaction: Interaction): e.add_field(name="Host Memory", value=f"{psutil.virtual_memory().percent}% used") e.add_field(name="Bot Memory", value=f"{humanize.naturalsize(self.process.memory_info().rss)} used") - load = psutil.getloadavg() - e.add_field(name="Host Load", value='/'.join(str(l) for l in load)) + load = [x / psutil.cpu_count() for x in psutil.getloadavg()] + e.add_field(name="Host Load", value=' / '.join(f"{l:.0%}" for l in load)) system_uptime = uptime.uptime() e.add_field(name="Host Uptime", value=f"{readable.uptime(system_uptime)}") @@ -83,9 +85,12 @@ async def about(self, interaction: Interaction): # show credits try: + async with aiohttp.ClientSession() as session: + async with session.get(f"https://api.github.com/repos/{repo_name}/contributors") as resp: + contributors_data = await resp.json() contributors = [ f"[{c['login']}]({c['html_url']}) ({c['contributions']})" - for c in requests.get(f"https://api.github.com/repos/{repo_name}/contributors").json() + for c in contributors_data if "bot" not in c["login"].lower() ] contributors_str = ", ".join(contributors[:10]) diff --git a/rocketwatch/plugins/cow_orders/cow_orders.py b/rocketwatch/plugins/cow_orders/cow_orders.py index 5da75d1d..5cd887be 100644 --- a/rocketwatch/plugins/cow_orders/cow_orders.py +++ b/rocketwatch/plugins/cow_orders/cow_orders.py @@ -1,7 +1,7 @@ import logging from datetime import datetime, timedelta -import requests +import aiohttp from datetime import timezone from web3.datastructures import MutableAttributeDict as aDict @@ -79,12 +79,13 @@ async def check_for_new_events(self): # get all pending orders from the cow api (https://api.cow.fi/mainnet/api/v1/auction) - response = requests.get("https://api.cow.fi/mainnet/api/v1/auction") - if response.status_code != 200: - log.error("Cow API returned non-200 status code: %s", response.text) - raise Exception("Cow API returned non-200 status code") - - cow_orders = response.json()["orders"] + async with aiohttp.ClientSession() as session: + async with session.get("https://api.cow.fi/mainnet/api/v1/auction") as response: + if response.status != 200: + text = await response.text() + log.error("Cow API returned non-200 status code: %s", text) + raise Exception("Cow API returned non-200 status code") + cow_orders = (await response.json())["orders"] """ entity example: @@ -198,11 +199,12 @@ async def check_for_new_events(self): # request more data from the api try: - t = requests.get(f"https://cow-proxy.invis.workers.dev/mainnet/api/v1/orders/{order['uid']}") - if t.status_code != 200: - log.error(f"Failed to get more data from the cow api for order {order['uid']}: {t.text}") - continue - extra = t.json() + async with aiohttp.ClientSession() as session: + async with session.get(f"https://cow-proxy.invis.workers.dev/mainnet/api/v1/orders/{order['uid']}") as t: + if t.status != 200: + log.error(f"Failed to get more data from the cow api for order {order['uid']}: {await t.text()}") + continue + extra = await t.json() except Exception as e: log.error(f"Failed to get more data from the cow api for order {order['uid']}: {e}") continue diff --git a/rocketwatch/plugins/debug/debug.py b/rocketwatch/plugins/debug/debug.py index 566ba097..c2939d57 100644 --- a/rocketwatch/plugins/debug/debug.py +++ b/rocketwatch/plugins/debug/debug.py @@ -5,7 +5,7 @@ import time import humanize -import requests +import aiohttp from colorama import Fore, Style from discord import File, Interaction from discord.app_commands import Choice, command, guilds, describe @@ -308,7 +308,9 @@ async def asian_restaurant_name(self, interaction: Interaction): Randomly generated Asian restaurant names """ await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) - a = requests.get("https://www.dotomator.com/api/random_name.json?type=asian").json()["name"] + async with aiohttp.ClientSession() as session: + async with session.get("https://www.dotomator.com/api/random_name.json?type=asian") as resp: + a = (await resp.json())["name"] await interaction.followup.send(a) @command() diff --git a/rocketwatch/plugins/queue/queue.py b/rocketwatch/plugins/queue/queue.py index e853017b..1437eadf 100644 --- a/rocketwatch/plugins/queue/queue.py +++ b/rocketwatch/plugins/queue/queue.py @@ -2,6 +2,7 @@ from typing import Literal, NamedTuple +from aiocache import cached from discord import Interaction from discord.app_commands import command, describe from discord.ext.commands import Cog @@ -52,14 +53,10 @@ async def _load_content(self, from_idx: int, to_idx: int) -> tuple[int, str]: ) return queue_length, queue_content - _el_url_cache: dict[tuple[str, str], str] = {} - @staticmethod + @cached(key_builder=lambda _, address, prefix="": (address, prefix)) async def _cached_el_url(address, prefix="") -> str: - key = (address, prefix) - if key not in Queue._el_url_cache: - Queue._el_url_cache[key] = await el_explorer_url(address, name_fmt=lambda n: f"`{n}`", prefix=prefix) - return Queue._el_url_cache[key] + return await el_explorer_url(address, name_fmt=lambda n: f"`{n}`", prefix=prefix) @staticmethod async def _megapool_to_node(megapool_address) -> ChecksumAddress: diff --git a/rocketwatch/requirements.txt b/rocketwatch/requirements.txt index 0573c580..ce5dd6ad 100644 --- a/rocketwatch/requirements.txt +++ b/rocketwatch/requirements.txt @@ -5,7 +5,6 @@ humanize==4.15.0 termplotlib==0.3.9 cachetools==7.0.3 bidict==0.23.1 -requests==2.32.5 uptime==3.0.1 discord.py==2.7.1 config==0.5.1 diff --git a/rocketwatch/utils/block_time.py b/rocketwatch/utils/block_time.py index d39cf8b3..908c4c62 100644 --- a/rocketwatch/utils/block_time.py +++ b/rocketwatch/utils/block_time.py @@ -1,20 +1,17 @@ import math import logging +from aiocache import cached + from utils.cfg import cfg from utils.shared_w3 import w3 log = logging.getLogger("block_time") log.setLevel(cfg["log_level"]) -_block_ts_cache: dict[int, int] = {} - +@cached() async def block_to_ts(block_number: int) -> int: - if block_number in _block_ts_cache: - return _block_ts_cache[block_number] - ts = (await w3.eth.get_block(block_number)).timestamp - _block_ts_cache[block_number] = ts - return ts + return (await w3.eth.get_block(block_number)).timestamp async def ts_to_block(target_ts: int) -> int: log.debug(f"Looking for block at timestamp {target_ts}") diff --git a/rocketwatch/utils/cached_ens.py b/rocketwatch/utils/cached_ens.py index 233a99bc..9cbc19f4 100644 --- a/rocketwatch/utils/cached_ens.py +++ b/rocketwatch/utils/cached_ens.py @@ -1,6 +1,7 @@ import logging from typing import Optional +from aiocache import cached from ens import AsyncENS from eth_typing import ChecksumAddress @@ -10,26 +11,17 @@ log = logging.getLogger("cached_ens") log.setLevel(cfg["log_level"]) -_name_cache: dict[ChecksumAddress, Optional[str]] = {} -_address_cache: dict[str, Optional[ChecksumAddress]] = {} - class CachedEns: def __init__(self): self.ens = AsyncENS.from_web3(w3_mainnet) + @cached(key_builder=lambda _, _self, address: address) async def get_name(self, address: ChecksumAddress) -> Optional[str]: - if address in _name_cache: - return _name_cache[address] log.debug(f"Retrieving ENS name for {address}") - name = await self.ens.name(address) - _name_cache[address] = name - return name + return await self.ens.name(address) + @cached(key_builder=lambda _, _self, name: name) async def resolve_name(self, name: str) -> Optional[ChecksumAddress]: - if name in _address_cache: - return _address_cache[name] log.debug(f"Resolving ENS name {name}") - address = await self.ens.address(name) - _address_cache[name] = address - return address + return await self.ens.address(name) diff --git a/rocketwatch/utils/embeds.py b/rocketwatch/utils/embeds.py index af348375..8769dcbd 100644 --- a/rocketwatch/utils/embeds.py +++ b/rocketwatch/utils/embeds.py @@ -6,8 +6,8 @@ import discord import humanize -import requests -from cachetools.func import ttl_cache +import aiohttp +from aiocache import cached from discord import Color from ens import InvalidName from etherscan_labels import Addresses @@ -20,7 +20,7 @@ from utils.rocketpool import rp from utils.sea_creatures import get_sea_creature_for_address from utils.shared_w3 import w3 -from utils.retry import retry +from utils.retry import retry_async from utils.block_time import block_to_ts ens = CachedEns() @@ -78,20 +78,16 @@ async def resolve_ens(interaction, node_address): _pdao_delegates: dict[str, str] = {} -@ttl_cache(ttl=900) -def get_pdao_delegates() -> dict[str, str]: +@cached(ttl=900) +@retry_async(tries=3, delay=1) +async def get_pdao_delegates() -> dict[str, str]: global _pdao_delegates - - @retry(tries=3, delay=1) - def _get_delegates() -> dict[str, str]: - response = requests.get("https://delegates.rocketpool.net/api/delegates") - return {delegate["nodeAddress"]: delegate["name"] for delegate in response.json()} - try: - _pdao_delegates = _get_delegates() + async with aiohttp.ClientSession() as session: + async with session.get("https://delegates.rocketpool.net/api/delegates") as resp: + _pdao_delegates = {d["nodeAddress"]: d["name"] for d in await resp.json()} except Exception: log.warning("Failed to fetch pDAO delegates.") - return _pdao_delegates @@ -138,7 +134,7 @@ async def el_explorer_url( prefix += "🔒" name = member_id - if not name and (delegate_name := get_pdao_delegates().get(target)): + if not name and (delegate_name := (await get_pdao_delegates()).get(target)): if prefix != -1: prefix += "🏛️" name = delegate_name From c580399a9fe71835b5f5d598104e39b166555333 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 6 Mar 2026 23:33:10 +0000 Subject: [PATCH 166/279] stricter linter rules --- .pep8speaks.yml | 13 +- rocketwatch/plugins/8ball/8ball.py | 7 +- rocketwatch/plugins/about/about.py | 5 +- rocketwatch/plugins/activity/activity.py | 2 +- rocketwatch/plugins/apr/apr.py | 16 +- .../plugins/beacon_events/beacon_events.py | 7 +- .../plugins/chat_summary/chat_summary.py | 40 +++-- .../plugins/chicken_soup/chicken_soup.py | 2 +- rocketwatch/plugins/collateral/collateral.py | 14 +- rocketwatch/plugins/cow_orders/cow_orders.py | 16 +- rocketwatch/plugins/dao/dao.py | 41 +++-- .../plugins/db_upkeep_task/db_upkeep_task.py | 155 ++++++++++-------- rocketwatch/plugins/debug/debug.py | 26 +-- .../plugins/deposit_pool/deposit_pool.py | 10 +- .../plugins/detect_scam/detect_scam.py | 103 ++++++------ rocketwatch/plugins/events/events.py | 61 ++++--- .../fee_distribution/fee_distribution.py | 33 ++-- rocketwatch/plugins/governance/governance.py | 4 +- rocketwatch/plugins/lottery/lottery.py | 10 +- rocketwatch/plugins/metrics/metrics.py | 1 - .../pinned_messages/pinned_messages.py | 12 +- rocketwatch/plugins/proposals/proposals.py | 43 +++-- rocketwatch/plugins/queue/queue.py | 80 +++++---- rocketwatch/plugins/random/random.py | 9 +- rocketwatch/plugins/reloader/reloader.py | 6 +- rocketwatch/plugins/rewards/rewards.py | 4 +- rocketwatch/plugins/rocksolid/rocksolid.py | 20 +-- rocketwatch/plugins/rpl/rpl.py | 7 +- rocketwatch/plugins/snapshot/snapshot.py | 6 +- .../plugins/support_utils/support_utils.py | 18 +- .../plugins/transactions/transactions.py | 10 +- rocketwatch/plugins/tvl/tvl.py | 12 +- .../user_distribute/user_distribute.py | 51 +++--- .../validator_states/validator_states.py | 14 +- rocketwatch/plugins/wall/wall.py | 6 +- rocketwatch/rocketwatch.py | 6 +- rocketwatch/utils/block_time.py | 2 + rocketwatch/utils/command_tree.py | 5 +- rocketwatch/utils/dao.py | 57 ++++--- rocketwatch/utils/embeds.py | 54 +++--- rocketwatch/utils/event.py | 1 + rocketwatch/utils/event_logs.py | 14 +- rocketwatch/utils/image.py | 1 + rocketwatch/utils/liquidity.py | 4 +- rocketwatch/utils/readable.py | 22 +-- rocketwatch/utils/retry.py | 1 + rocketwatch/utils/sea_creatures.py | 3 +- rocketwatch/utils/shared_w3.py | 1 + rocketwatch/utils/solidity.py | 2 + rocketwatch/utils/views.py | 19 ++- 50 files changed, 605 insertions(+), 451 deletions(-) diff --git a/.pep8speaks.yml b/.pep8speaks.yml index a99bdf94..ae30837e 100644 --- a/.pep8speaks.yml +++ b/.pep8speaks.yml @@ -3,15 +3,8 @@ scanner: linter: pycodestyle pycodestyle: - max-line-length: 128 + max-line-length: 120 ignore: - - E261 - - E501 - - W605 - - E111 - - E114 - - E231 - - E702 - E203 - -no_blank_comment: False \ No newline at end of file + - W503 + - W504 diff --git a/rocketwatch/plugins/8ball/8ball.py b/rocketwatch/plugins/8ball/8ball.py index 847e267c..822bb36a 100644 --- a/rocketwatch/plugins/8ball/8ball.py +++ b/rocketwatch/plugins/8ball/8ball.py @@ -19,11 +19,14 @@ def __init__(self, bot: RocketWatch): async def eight_ball(self, interaction: Interaction, question: str): e = Embed(title="🎱 Magic 8 Ball") if not question.endswith("?"): - e.description = "You must ask a yes or no question to the magic 8 ball (hint: add a `?` at the end of your question)" + e.description = ( + "You must ask a yes or no question to the magic 8 ball" + " (hint: add a `?` at the end of your question)" + ) await interaction.response.send_message(embed=e, ephemeral=True) return await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) - await asyncio.sleep(random.randint(2,5)) + await asyncio.sleep(random.randint(2, 5)) res = pyrandom.choice([ "As I see it, yes", "It is certain", diff --git a/rocketwatch/plugins/about/about.py b/rocketwatch/plugins/about/about.py index 4456879f..4a186aab 100644 --- a/rocketwatch/plugins/about/about.py +++ b/rocketwatch/plugins/about/about.py @@ -1,7 +1,6 @@ import os import time import logging -from urllib.parse import urlencode import humanize import psutil @@ -24,6 +23,7 @@ log = logging.getLogger("about") log.setLevel(cfg["log_level"]) + class About(commands.Cog): def __init__(self, bot: RocketWatch): self.bot = bot @@ -73,7 +73,7 @@ async def about(self, interaction: Interaction): e.add_field(name="Bot Memory", value=f"{humanize.naturalsize(self.process.memory_info().rss)} used") load = [x / psutil.cpu_count() for x in psutil.getloadavg()] - e.add_field(name="Host Load", value=' / '.join(f"{l:.0%}" for l in load)) + e.add_field(name="Host Load", value=' / '.join(f"{pct:.0%}" for pct in load)) system_uptime = uptime.uptime() e.add_field(name="Host Uptime", value=f"{readable.uptime(system_uptime)}") @@ -103,6 +103,5 @@ async def about(self, interaction: Interaction): await interaction.followup.send(embed=e) - async def setup(bot): await bot.add_cog(About(bot)) diff --git a/rocketwatch/plugins/activity/activity.py b/rocketwatch/plugins/activity/activity.py index 601f524e..743773d2 100644 --- a/rocketwatch/plugins/activity/activity.py +++ b/rocketwatch/plugins/activity/activity.py @@ -42,7 +42,7 @@ async def task(self): @task.before_loop async def before_loop(self): await self.bot.wait_until_ready() - + @task.error async def on_error(self, err: Exception): await self.bot.report_error(err) diff --git a/rocketwatch/plugins/apr/apr.py b/rocketwatch/plugins/apr/apr.py index c1193d37..0ab5f293 100644 --- a/rocketwatch/plugins/apr/apr.py +++ b/rocketwatch/plugins/apr/apr.py @@ -43,7 +43,7 @@ class APR(commands.Cog): def __init__(self, bot: RocketWatch): self.bot = bot self.task.start() - + async def cog_unload(self): self.task.cancel() @@ -73,11 +73,11 @@ async def task(self): "effectiveness": effectiveness }) cursor_block = balance_block - 1 - + @task.before_loop async def before_loop(self): await self.bot.wait_until_ready() - + @task.error async def on_error(self, err: Exception): await self.bot.report_error(err) @@ -388,11 +388,14 @@ async def node_apr(self, interaction: Interaction): ax1 = plt.gca() # solo apr - ax1.plot(x, y_7d_node_operators_leb8_14, linestyle="-.", label=f"{y_7d_claim:.1f} Day Average (leb8 14%)", color="red", alpha=0.5) + ax1.plot(x, y_7d_node_operators_leb8_14, linestyle="-.", + label=f"{y_7d_claim:.1f} Day Average (leb8 14%)", color="red", alpha=0.5) # use area to show region between leb16 20% and leb16 5%. use a spare dotted fill to show the region between - ax1.fill_between(x, y_7d_node_operators_leb16_20, y_7d_node_operators_leb16_05, alpha=0.2, color="red", label=f"{y_7d_claim:.1f} Day Average (leb16 5-20%)") + ax1.fill_between(x, y_7d_node_operators_leb16_20, y_7d_node_operators_leb16_05, alpha=0.2, + color="red", label=f"{y_7d_claim:.1f} Day Average (leb16 5-20%)") # plot the leb16 14% line - ax1.plot(x, y_7d_node_operators_leb16_14, linestyle="--", label=f"{y_7d_claim:.1f} Day Average (leb16 14%)", color="red", alpha=0.5) + ax1.plot(x, y_7d_node_operators_leb16_14, linestyle="--", + label=f"{y_7d_claim:.1f} Day Average (leb16 14%)", color="red", alpha=0.5) ax1.plot(x, y_7d_solo, linestyle=":", label=f"{y_7d_claim:.1f} Day Average (solo)", color="black", alpha=0.5) plt.title("Observed NO APR values") @@ -424,5 +427,6 @@ async def node_apr(self, interaction: Interaction): await interaction.followup.send(embed=e, file=File(img, "no_apr.png")) + async def setup(bot): await bot.add_cog(APR(bot)) diff --git a/rocketwatch/plugins/beacon_events/beacon_events.py b/rocketwatch/plugins/beacon_events/beacon_events.py index 3372ba36..f90d1a01 100644 --- a/rocketwatch/plugins/beacon_events/beacon_events.py +++ b/rocketwatch/plugins/beacon_events/beacon_events.py @@ -37,7 +37,7 @@ async def get_past_events(self, from_block: BlockNumber, to_block: BlockNumber) log.info(f"Checking for new beacon chain events in slot range [{from_slot}, {to_slot}]") events: list[Event] = [] - for slot_number in range(from_slot, to_slot-1): + for slot_number in range(from_slot, to_slot - 1): events.extend(await self._get_events_for_slot(slot_number, check_finality=False)) # quite expensive and only really makes sense to check toward the head of the chain @@ -56,7 +56,6 @@ async def _get_events_for_slot(self, slot_number: int, *, check_finality: bool) return [] else: raise e - events = await self._get_slashings(beacon_block) if proposal_event := await self._get_proposal(beacon_block): @@ -96,7 +95,7 @@ async def _get_slashings(self, beacon_block: dict) -> list[Event]: if not (minipool or megapool): log.info(f"Skipping slashing of unknown validator {slash['validator']}") continue - + unique_id = ( f"slash-{slash['validator']}" f":slasher-{slash['slasher']}" @@ -125,7 +124,7 @@ async def _get_proposal(self, beacon_block: dict) -> Optional[Event]: if not (payload := beacon_block["body"].get("execution_payload")): # no proposed block return None - + if not (api_key := cfg["consensus_layer.beaconcha_secret"]): log.warning("Missing beaconcha.in API key") return None diff --git a/rocketwatch/plugins/chat_summary/chat_summary.py b/rocketwatch/plugins/chat_summary/chat_summary.py index 332eee75..95b153ff 100644 --- a/rocketwatch/plugins/chat_summary/chat_summary.py +++ b/rocketwatch/plugins/chat_summary/chat_summary.py @@ -30,7 +30,10 @@ def __init__(self, bot: RocketWatch): @classmethod def message_to_text(cls, message, index): - text = f"{message.author.global_name or message.author.name} on {message.created_at.strftime('%a at %H:%M')}:\n {message.content}" + text = ( + f"{message.author.global_name or message.author.name}" + f" on {message.created_at.strftime('%a at %H:%M')}:\n {message.content}" + ) # if there is an image attached, add it to the text as a note metadata = [] @@ -61,25 +64,29 @@ async def summarize_chat(self, interaction: Interaction): last_ts = await self.bot.db["last_summary"].find_one({"channel_id": interaction.channel.id}) # ratelimit if last_ts and (datetime.now(timezone.utc) - last_ts["timestamp"].replace(tzinfo=pytz.utc)) < timedelta(hours=6): - await interaction.followup.send("You can only summarize once every 6 hours.", ephemeral=True) - return + await interaction.followup.send("You can only summarize once every 6 hours.", ephemeral=True) + return if interaction.channel.id not in [405163713063288832]: await interaction.followup.send("You can't summarize here.", ephemeral=True) return msg = await interaction.channel.send("Summarizing chat…") - last_ts = last_ts["timestamp"].replace(tzinfo=pytz.utc) if last_ts and "timestamp" in last_ts else datetime.now(timezone.utc) - timedelta(days=365) + last_ts = last_ts["timestamp"].replace( + tzinfo=pytz.utc) if last_ts and "timestamp" in last_ts else datetime.now(timezone.utc) - timedelta(days=365) prompt = ( "Task Description:\n" "I need a summary of the entire chat log. This summary should be presented in the form of a bullet list.\n\n" "Format and Length Requirements:\n" - "- The bullet list must be kept short and concise, but the list has to cover the entire chat log. Make at most around 5 bullet points.\n" + "- The bullet list must be kept short and concise, but the list has to cover the entire chat log." + " Make at most around 5 bullet points.\n" "- Each bullet point should represent a distinct topic discussed in the chat log.\n\n" "Content Constraints:\n" "- Limit each topic to a single bullet point in the list.\n" "- Omit any topics that are uninteresting or not crucial to the overall understanding of the chat log.\n" - "- If any content in the chat log goes against guidelines, refer to it in a safe and compliant manner, without detailing the specific content.\n\n" + "- If any content in the chat log goes against guidelines, refer to it in a safe and compliant manner," + " without detailing the specific content.\n\n" "Response Instruction:\n" - "- Respond only with the bullet list summary as specified. Do not include any additional commentary or response outside of this list.\n\n" + "- Respond only with the bullet list summary as specified." + " Do not include any additional commentary or response outside of this list.\n\n" "Truncated Example Output:\n" "----------------\n" "- Discussions between invis, langers, knoshua and more about the meaning of life.\n" @@ -113,9 +120,14 @@ async def summarize_chat(self, interaction: Interaction): else: es[-1].description = res res = "" - token_usage = response.usage.input_tokens + (response.usage.output_tokens * 5) # completion tokens are 3x more expensive + # completion tokens are 3x more expensive + token_usage = response.usage.input_tokens + (response.usage.output_tokens * 5) es[-1].set_footer( - text=f"Request cost: ${token_usage / 1000000 * 3:.2f} | Tokens: {response.usage.input_tokens + response.usage.output_tokens} | /donate if you like this command") + text=( + f"Request cost: ${token_usage / 1000000 * 3:.2f}" + f" | Tokens: {response.usage.input_tokens + response.usage.output_tokens}" + " | /donate if you like this command" + )) # attach the prompt as a file f = BytesIO(prompt.encode("utf-8")) f.name = "prompt._log" @@ -124,7 +136,11 @@ async def summarize_chat(self, interaction: Interaction): await interaction.followup.send("done", ephemeral=True) await msg.edit(embeds=es, attachments=[f]) # save the timestamp of the last summary - await self.bot.db["last_summary"].update_one({"channel_id": interaction.channel.id}, {"$set": {"timestamp": datetime.now(timezone.utc)}}, upsert=True) + await self.bot.db["last_summary"].update_one( + {"channel_id": interaction.channel.id}, + {"$set": {"timestamp": datetime.now(timezone.utc)}}, + upsert=True + ) # a function that generates the prompt for the model by taking an array of messages, a prefix and a suffix def generate_prompt(self, messages, prefix, suffix): @@ -132,7 +148,9 @@ def generate_prompt(self, messages, prefix, suffix): prompt = "\n".join([self.message_to_text(message, i) for i, message in enumerate(messages)]).replace("\n\n", "\n") return f"{prefix}\n\n{prompt}\n\n{suffix}" - async def prompt_model(self, channel: TextChannel, prompt: str, cut_off_ts: int) -> tuple[anthropic.types.Message, str, int]: + async def prompt_model( + self, channel: TextChannel, prompt: str, cut_off_ts: int + ) -> tuple[anthropic.types.Message, str, int]: messages = [message async for message in channel.history(limit=4096) if message.content != ""] messages = [message for message in messages if message.author.id != self.bot.user.id] messages = [message for message in messages if message.created_at > cut_off_ts] diff --git a/rocketwatch/plugins/chicken_soup/chicken_soup.py b/rocketwatch/plugins/chicken_soup/chicken_soup.py index 38875ea5..9c613630 100644 --- a/rocketwatch/plugins/chicken_soup/chicken_soup.py +++ b/rocketwatch/plugins/chicken_soup/chicken_soup.py @@ -11,7 +11,7 @@ def __init__(self, bot: RocketWatch): self.bot = bot self.duration = timedelta(minutes=5) self.dispense_end = {} - + @command() async def chicken_soup(self, interaction: Interaction): self.dispense_end[interaction.channel_id] = datetime.now() + self.duration diff --git a/rocketwatch/plugins/collateral/collateral.py b/rocketwatch/plugins/collateral/collateral.py index 983cfefe..63d04699 100644 --- a/rocketwatch/plugins/collateral/collateral.py +++ b/rocketwatch/plugins/collateral/collateral.py @@ -74,8 +74,8 @@ async def get_average_collateral_percentage_per_node(collateral_cap: Optional[in # get stakes for each node stakes = list((await get_node_minipools_and_collateral()).values()) # get the current rpl price - rpl_price = solidity.to_float(await rp.call("rocketNetworkPrices.getRPLPrice")) - + rpl_price = solidity.to_float(await rp.call("rocketNetworkPrices.getRPLPrice")) + node_collaterals = [] for node in stakes: # get the minipool eth value @@ -91,14 +91,14 @@ async def get_average_collateral_percentage_per_node(collateral_cap: Optional[in collateral = min(collateral, collateral_cap) # calculate percentage node_collaterals.append((rpl_stake, collateral)) - + effective_bound = max(perc for rpl, perc in node_collaterals) possible_step_sizes = [0.1, 0.2, 0.5, 1, 2, 5, 10, 20, 50, 100] step_size = possible_step_sizes[np.argmin([abs(effective_bound / 30 - s) for s in possible_step_sizes])] - + result = {} for rpl_stake, percentage in node_collaterals: - percentage = step_size * (percentage * 10 // (step_size * 10)) + percentage = step_size * (percentage * 10 // (step_size * 10)) if percentage not in result: result[percentage] = [] result[percentage].append(rpl_stake) @@ -176,12 +176,12 @@ def node_minipools(node): formatToInt = "{x:.0f}" cb = plt.colorbar(mappable=paths, ax=ax, format=formatToInt) cb.set_label('Minipools') - cb.set_ticks([1,10,100,max_minipools]) + cb.set_ticks([1, 10, 100, max_minipools]) # Add a legend for the color-coding on the hex distribution cb = plt.colorbar(mappable=polys, ax=ax2, format=formatToInt) cb.set_label('Nodes') - cb.set_ticks([1,10,100,max_nodes - 1]) + cb.set_ticks([1, 10, 100, max_nodes - 1]) # Add labels and units ylabel = f"Collateral (percent {'bonded' if bonded else 'borrowed'})" diff --git a/rocketwatch/plugins/cow_orders/cow_orders.py b/rocketwatch/plugins/cow_orders/cow_orders.py index 5cd887be..d1d0447e 100644 --- a/rocketwatch/plugins/cow_orders/cow_orders.py +++ b/rocketwatch/plugins/cow_orders/cow_orders.py @@ -35,10 +35,10 @@ async def cow(self, interaction: Interaction, tnx: str): if "etherscan.io/tx/" not in tnx: await interaction.response.send_message("nop", ephemeral=True) return - + await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) url = tnx.replace("etherscan.io", "explorer.cow.fi") - embed = Embed(description = f"[cow explorer]({url})") + embed = Embed(description=f"[cow explorer]({url})") await interaction.followup.send(embed=embed) async def _setup_collection(self): @@ -92,7 +92,7 @@ async def check_for_new_events(self): { "creationDate": "2023-01-25T04:48:02.751347Z", "owner": "0x40586600a136652f6d0a6cc6a62b6bd1bef7ae9a", - "uid": "0x2f3750251ab20018addd59c7a9e57845782cdf21b9c53516dcdb9e3627ebb7e840586600a136652f6d0a6cc6a62b6bd1bef7ae9a63d9eef8", + "uid": "0x...", "availableBalance": "108475037", "executedBuyAmount": "0", "executedSellAmount": "0", @@ -120,10 +120,9 @@ async def check_for_new_events(self): "sellTokenBalance": "erc20", "buyTokenBalance": "erc20", "signingScheme": "eip712", - "signature": "0x894e427c681f1b4d24604039966321ed59993ce2a1e17fffc742c8af954aa0b10cca77ce750ce60e3d7591b60c90417d333c1d83493abafb8a36d7778e6519a51c", + "signature": "0x...", "interactions": { "pre": [ - ] } }, @@ -170,7 +169,7 @@ async def check_for_new_events(self): s = await rp.assemble_contract(name="ERC20", address=w3.to_checksum_address(order["buyToken"])) try: decimals = await s.functions.decimals().call() - except: + except Exception: pass data["otherAmount"] = solidity.to_float(int(order["buyAmount"]), decimals) else: @@ -181,14 +180,14 @@ async def check_for_new_events(self): s = await rp.assemble_contract(name="ERC20", address=w3.to_checksum_address(order["sellToken"])) try: decimals = await s.functions.decimals().call() - except: + except Exception: pass data["otherAmount"] = solidity.to_float(int(order["sellAmount"]), decimals) # our/other ratio data["ratioAmount"] = data["otherAmount"] / data["ourAmount"] try: data["otherToken"] = await s.functions.symbol().call() - except: + except Exception: data["otherToken"] = "UNKWN" if s.address == w3.to_checksum_address("0xeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee"): data["otherToken"] = "ETH" @@ -219,7 +218,6 @@ async def check_for_new_events(self): continue data["timestamp"] = int(created.timestamp()) - data = await prepare_args(data) embed = await assemble(data) payload.append(Event( diff --git a/rocketwatch/plugins/dao/dao.py b/rocketwatch/plugins/dao/dao.py index dab90ffe..45561b49 100644 --- a/rocketwatch/plugins/dao/dao.py +++ b/rocketwatch/plugins/dao/dao.py @@ -47,13 +47,16 @@ async def get_dao_votes_embed(dao: DefaultDAO, full: bool) -> Embed: parts = [] for proposal in current_proposals[dao.ProposalState.Pending]: body = await dao.build_proposal_body(proposal, include_proposer=full, include_votes=False, include_payload=full) - parts.append(f"**Proposal #{proposal.id}** - Pending\n```{body}```Voting starts , ends .") + parts.append( + f"**Proposal #{proposal.id}** - Pending\n```{body}```" + f"Voting starts , ends .") for proposal in current_proposals[dao.ProposalState.Active]: body = await dao.build_proposal_body(proposal, include_proposer=full, include_votes=True, include_payload=full) parts.append(f"**Proposal #{proposal.id}** - Active\n```{body}```Voting ends .") for proposal in current_proposals[dao.ProposalState.Succeeded]: body = await dao.build_proposal_body(proposal, include_proposer=full, include_votes=full, include_payload=full) - parts.append(f"**Proposal #{proposal.id}** - Succeeded (Not Yet Executed)\n```{body}```Expires .") + parts.append( + f"**Proposal #{proposal.id}** - Succeeded (Not Yet Executed)\n```{body}```Expires .") return Embed( title=f"{dao.display_name} Proposals", @@ -76,16 +79,22 @@ async def get_pdao_votes_embed(dao: ProtocolDAO, full: bool) -> Embed: parts = [] for proposal in current_proposals[dao.ProposalState.Pending]: body = await dao.build_proposal_body(proposal, include_proposer=full, include_votes=False, include_payload=full) - parts.append(f"**Proposal #{proposal.id}** - Pending\n```{body}```Voting starts , ends .") + parts.append( + f"**Proposal #{proposal.id}** - Pending\n```{body}```" + f"Voting starts , ends .") for proposal in current_proposals[dao.ProposalState.ActivePhase1]: body = await dao.build_proposal_body(proposal, include_proposer=full, include_votes=True, include_payload=full) - parts.append(f"**Proposal #{proposal.id}** - Active (Phase 1)\n```{body}```Next phase , voting ends .") + parts.append( + f"**Proposal #{proposal.id}** - Active (Phase 1)\n```{body}```" + f"Next phase , voting ends .") for proposal in current_proposals[dao.ProposalState.ActivePhase2]: body = await dao.build_proposal_body(proposal, include_proposer=full, include_votes=True, include_payload=full) - parts.append(f"**Proposal #{proposal.id}** - Active (Phase 2)\n```{body}```Voting ends .") + parts.append( + f"**Proposal #{proposal.id}** - Active (Phase 2)\n```{body}```Voting ends .") for proposal in current_proposals[dao.ProposalState.Succeeded]: body = await dao.build_proposal_body(proposal, include_proposer=full, include_votes=full, include_payload=full) - parts.append(f"**Proposal #{proposal.id}** - Succeeded (Not Yet Executed)\n```{body}```Expires .") + parts.append( + f"**Proposal #{proposal.id}** - Succeeded (Not Yet Executed)\n```{body}```Expires .") return Embed( title="pDAO Proposals", @@ -118,13 +127,13 @@ async def dao_votes( raise ValueError(f"Invalid DAO name: {dao_name}") await interaction.followup.send(embed=embed) - + @dataclass(slots=True) class Vote: voter: ChecksumAddress direction: int voting_power: float - time: int + time: int class VoterPageView(PageView): def __init__(self, proposal: ProtocolDAO.Proposal): @@ -166,11 +175,11 @@ async def _get_voter_list(self, proposal: ProtocolDAO.Proposal) -> list['Onchain voters[override_log.args.delegate].voting_power -= voting_power return sorted(voters.values(), key=attrgetter("voting_power"), reverse=True) - + @property def _title(self) -> str: return f"pDAO Proposal #{self.proposal.id} - Voter List" - + async def _load_content(self, from_idx: int, to_idx: int) -> tuple[int, str]: await self._ensure_voter_list() headers = ["#", "Voter", "Choice", "Weight"] @@ -179,14 +188,14 @@ async def _load_content(self, from_idx: int, to_idx: int) -> tuple[int, str]: name = (await el_explorer_url(voter.voter, prefix=-1)).split("[")[1].split("]")[0] vote = ["", "Abstain", "For", "Against", "Veto"][voter.direction] voting_power = f"{voter.voting_power:,.2f}" - data.append([i+1, name, vote, voting_power]) - + data.append([i + 1, name, vote, voting_power]) + if not data: return 0, "" - + table = tabulate(data, headers, colalign=("right", "left", "left", "right")) return len(self._voter_list), f"```{table}```" - + async def _get_recent_proposals(self, interaction: Interaction, current: str) -> list[Choice[int]]: dao = ProtocolDAO() proposal_contract = await dao._get_proposal_contract() @@ -205,7 +214,7 @@ async def _get_recent_proposals(self, interaction: Interaction, current: str) -> proposal_contract.functions.getMessage(proposal_id) for proposal_id in suggestions ]) return [Choice(name=f"#{pid}: {title}", value=pid) for pid, title in zip(suggestions, titles)] - + @command() @describe(proposal="proposal to show voters for") @autocomplete(proposal=_get_recent_proposals) @@ -214,7 +223,7 @@ async def voter_list(self, interaction: Interaction, proposal: int) -> None: await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) if not (proposal := await ProtocolDAO().fetch_proposal(proposal)): return await interaction.followup.send("Invalid proposal ID.") - + view = OnchainDAO.VoterPageView(proposal) embed = await view.load() await interaction.followup.send(embed=embed, view=view) diff --git a/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py b/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py index f8ce81fd..10259bc2 100644 --- a/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py +++ b/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py @@ -31,31 +31,37 @@ def is_true(v) -> bool: return v is True + def safe_to_float(num): try: return solidity.to_float(num) except Exception: return None + def safe_to_hex(b): return f"0x{b.hex()}" if b else None + def safe_state_to_str(state): try: return solidity.mp_state_to_str(state) except Exception: return None + def safe_inv(num): try: return 1 / solidity.to_float(num) except Exception: return None + def _parse_epoch(value): epoch = int(value) return epoch if epoch < 2 ** 32 else None + def _derive_validator_status(info): if info[9]: # dissolved return "dissolved" @@ -73,6 +79,7 @@ def _derive_validator_status(info): return "staking" return "unknown" + def _unpack_validator_info(info): if info is None: return None @@ -85,6 +92,7 @@ def _unpack_validator_info(info): "exit_balance": solidity.to_float(info[12], 9), # gwei to ETH } + def _unpack_validator_info_dynamic(info): if info is None: return None @@ -209,16 +217,20 @@ async def add_untracked_node_operators(self): for index_batch in as_chunks(range(latest_db + 1, latest_rp + 1), self.batch_size): results = await rp.multicall([nm.functions.getNodeAt(i) for i in index_batch]) data |= dict(zip(index_batch, results)) - await self.bot.db.node_operators.insert_many([{"_id": i, "address": w3.to_checksum_address(a)} for i, a in data.items()]) + await self.bot.db.node_operators.insert_many( + [{"_id": i, "address": w3.to_checksum_address(a)} for i, a in data.items()] + ) @timerun_async async def add_static_node_operator_data(self): df = await rp.get_contract_by_name("rocketNodeDistributorFactory") mf = await rp.get_contract_by_name("rocketMegapoolFactory") - def get_calls(n): return [ - (df.functions.getProxyAddress(n["address"]), True, w3.to_checksum_address, "fee_distributor.address"), - (mf.functions.getExpectedAddress(n["address"]), True, w3.to_checksum_address, "megapool.address"), - ] + + def get_calls(n): + return [ + (df.functions.getProxyAddress(n["address"]), True, w3.to_checksum_address, "fee_distributor.address"), + (mf.functions.getExpectedAddress(n["address"]), True, w3.to_checksum_address, "megapool.address"), + ] await self._batch_multicall_update( self.bot.db.node_operators, {"$or": [{"fee_distributor.address": {"$exists": False}}, {"megapool.address": {"$exists": False}}]}, @@ -233,27 +245,30 @@ async def update_dynamic_node_operator_data(self): mm = await rp.get_contract_by_name("rocketMinipoolManager") ns = await rp.get_contract_by_name("rocketNodeStaking") mc = await rp.get_contract_by_name("multicall3") - def get_calls(n): return [ - (nm.functions.getNodeWithdrawalAddress(n["address"]), True, w3.to_checksum_address, "withdrawal_address"), - (nm.functions.getNodeTimezoneLocation(n["address"]), True, None, "timezone_location"), - (nm.functions.getSmoothingPoolRegistrationState(n["address"]), True, None, "smoothing_pool_registration"), - (nm.functions.getAverageNodeFee(n["address"]), True, safe_to_float, "average_node_fee"), - (ns.functions.getNodeETHCollateralisationRatio(n["address"]), True, safe_inv, "effective_node_share"), - (mm.functions.getNodeStakingMinipoolCount(n["address"]), True, None, "staking_minipool_count"), - (nd.functions.getNodeDepositCredit(n["address"]), True, safe_to_float, "node_credit"), - (nd.functions.getNodeEthBalance(n["address"]), True, safe_to_float, "node_eth_balance"), - (nm.functions.getFeeDistributorInitialised(n["address"]), True, None, "fee_distributor.initialized"), - (mc.functions.getEthBalance(n["fee_distributor"]["address"]), True, safe_to_float, "fee_distributor.eth_balance"), - (mf.functions.getMegapoolDeployed(n["address"]), True, None, "megapool.deployed"), - (mc.functions.getEthBalance(n["megapool"]["address"]), True, safe_to_float, "megapool.eth_balance"), - (ns.functions.getNodeStakedRPL(n["address"]), True, safe_to_float, "rpl.total_stake"), - (ns.functions.getNodeLegacyStakedRPL(n["address"]), True, safe_to_float, "rpl.legacy_stake"), - (ns.functions.getNodeMegapoolStakedRPL(n["address"]), True, safe_to_float, "rpl.megapool_stake"), - (ns.functions.getNodeLockedRPL(n["address"]), True, safe_to_float, "rpl.locked"), - (ns.functions.getNodeUnstakingRPL(n["address"]), True, safe_to_float, "rpl.unstaking"), - (ns.functions.getNodeRPLStakedTime(n["address"]), True, None, "rpl.last_stake_time"), - (ns.functions.getNodeLastUnstakeTime(n["address"]), True, None, "rpl.last_unstake_time"), - ] + + def get_calls(n): + return [ + (nm.functions.getNodeWithdrawalAddress(n["address"]), True, w3.to_checksum_address, "withdrawal_address"), + (nm.functions.getNodeTimezoneLocation(n["address"]), True, None, "timezone_location"), + (nm.functions.getSmoothingPoolRegistrationState(n["address"]), True, None, "smoothing_pool_registration"), + (nm.functions.getAverageNodeFee(n["address"]), True, safe_to_float, "average_node_fee"), + (ns.functions.getNodeETHCollateralisationRatio(n["address"]), True, safe_inv, "effective_node_share"), + (mm.functions.getNodeStakingMinipoolCount(n["address"]), True, None, "staking_minipool_count"), + (nd.functions.getNodeDepositCredit(n["address"]), True, safe_to_float, "node_credit"), + (nd.functions.getNodeEthBalance(n["address"]), True, safe_to_float, "node_eth_balance"), + (nm.functions.getFeeDistributorInitialised(n["address"]), True, None, "fee_distributor.initialized"), + (mc.functions.getEthBalance(n["fee_distributor"]["address"]), + True, safe_to_float, "fee_distributor.eth_balance"), + (mf.functions.getMegapoolDeployed(n["address"]), True, None, "megapool.deployed"), + (mc.functions.getEthBalance(n["megapool"]["address"]), True, safe_to_float, "megapool.eth_balance"), + (ns.functions.getNodeStakedRPL(n["address"]), True, safe_to_float, "rpl.total_stake"), + (ns.functions.getNodeLegacyStakedRPL(n["address"]), True, safe_to_float, "rpl.legacy_stake"), + (ns.functions.getNodeMegapoolStakedRPL(n["address"]), True, safe_to_float, "rpl.megapool_stake"), + (ns.functions.getNodeLockedRPL(n["address"]), True, safe_to_float, "rpl.locked"), + (ns.functions.getNodeUnstakingRPL(n["address"]), True, safe_to_float, "rpl.unstaking"), + (ns.functions.getNodeRPLStakedTime(n["address"]), True, None, "rpl.last_stake_time"), + (ns.functions.getNodeLastUnstakeTime(n["address"]), True, None, "rpl.last_unstake_time"), + ] await self._batch_multicall_update( self.bot.db.node_operators, {}, get_calls, label="node operators", projection={"address": 1, "fee_distributor.address": 1, "megapool.address": 1} @@ -265,23 +280,23 @@ async def get_calls(n): mp = await rp.assemble_contract("rocketMegapoolDelegate", address=n["megapool"]["address"]) proxy = await rp.assemble_contract("rocketMegapoolProxy", address=n["megapool"]["address"]) return [ - (mp.functions.getValidatorCount(), True, None, "megapool.validator_count"), - (mp.functions.getActiveValidatorCount(), True, None, "megapool.active_validator_count"), - (mp.functions.getExitingValidatorCount(), True, None, "megapool.exiting_validator_count"), - (mp.functions.getLockedValidatorCount(), True, None, "megapool.locked_validator_count"), - (mp.functions.getNodeBond(), True, safe_to_float, "megapool.node_bond"), - (mp.functions.getUserCapital(), True, safe_to_float, "megapool.user_capital"), - (mp.functions.getDebt(), True, safe_to_float, "megapool.debt"), - (mp.functions.getRefundValue(), True, safe_to_float, "megapool.refund_value"), - (mp.functions.getPendingRewards(), True, safe_to_float, "megapool.pending_rewards"), - (mp.functions.getLastDistributionTime(), True, None, "megapool.last_distribution_time"), - (proxy.functions.getDelegate(), True, w3.to_checksum_address, "megapool.delegate"), - (proxy.functions.getEffectiveDelegate(), True, w3.to_checksum_address, "megapool.effective_delegate"), - (proxy.functions.getUseLatestDelegate(), True, None, "megapool.use_latest_delegate"), - ] + (mp.functions.getValidatorCount(), True, None, "megapool.validator_count"), + (mp.functions.getActiveValidatorCount(), True, None, "megapool.active_validator_count"), + (mp.functions.getExitingValidatorCount(), True, None, "megapool.exiting_validator_count"), + (mp.functions.getLockedValidatorCount(), True, None, "megapool.locked_validator_count"), + (mp.functions.getNodeBond(), True, safe_to_float, "megapool.node_bond"), + (mp.functions.getUserCapital(), True, safe_to_float, "megapool.user_capital"), + (mp.functions.getDebt(), True, safe_to_float, "megapool.debt"), + (mp.functions.getRefundValue(), True, safe_to_float, "megapool.refund_value"), + (mp.functions.getPendingRewards(), True, safe_to_float, "megapool.pending_rewards"), + (mp.functions.getLastDistributionTime(), True, None, "megapool.last_distribution_time"), + (proxy.functions.getDelegate(), True, w3.to_checksum_address, "megapool.delegate"), + (proxy.functions.getEffectiveDelegate(), True, w3.to_checksum_address, "megapool.effective_delegate"), + (proxy.functions.getUseLatestDelegate(), True, None, "megapool.use_latest_delegate"), + ] await self._batch_multicall_update( - self.bot.db.node_operators, {"megapool.deployed": True}, - get_calls, {"address": 1, "megapool.address": 1}, + self.bot.db.node_operators, {"megapool.deployed": True}, + get_calls, {"address": 1, "megapool.address": 1}, label="megapools" ) @@ -300,15 +315,22 @@ async def add_untracked_minipools(self): log.debug(f"Latest minipool in db: {latest_db}, latest minipool in rp: {latest_rp}") for index_batch in as_chunks(range(latest_db + 1, latest_rp + 1), self.batch_size): results = await rp.multicall([mm.functions.getMinipoolAt(i) for i in index_batch]) - await self.bot.db.minipools.insert_many([{"_id": i, "address": w3.to_checksum_address(a)} for i, a in zip(index_batch, results)]) + await self.bot.db.minipools.insert_many( + [{"_id": i, "address": w3.to_checksum_address(a)} for i, a in zip(index_batch, results)] + ) @timerun_async async def add_static_minipool_data(self): mm = await rp.get_contract_by_name("rocketMinipoolManager") - async def lamb(n): return [ - ((await rp.assemble_contract("rocketMinipool", address=n["address"])).functions.getNodeAddress(), True, w3.to_checksum_address, "node_operator"), - (mm.functions.getMinipoolPubkey(n["address"]), True, safe_to_hex, "pubkey"), - ] + + async def lamb(n): + return [ + ( + (await rp.assemble_contract("rocketMinipool", address=n["address"])) + .functions.getNodeAddress(), True, w3.to_checksum_address, "node_operator" + ), + (mm.functions.getMinipoolPubkey(n["address"]), True, safe_to_hex, "pubkey"), + ] await self._batch_multicall_update( self.bot.db.minipools, {"node_operator": {"$exists": False}}, @@ -333,7 +355,7 @@ async def add_static_minipool_deposit_data(self): addresses = {m["address"] for m in minipool_batch} events = get_logs(nd.events.DepositReceived, block_start, block_end) \ - + get_logs(mm.events.MinipoolCreated, block_start, block_end) + + get_logs(mm.events.MinipoolCreated, block_start, block_end) events.sort(key=lambda e: (e['blockNumber'], e['transactionIndex'], e['logIndex']), reverse=True) # pair DepositReceived + MinipoolCreated events from same transaction @@ -369,25 +391,28 @@ async def add_static_minipool_deposit_data(self): @timerun_async async def update_dynamic_minipool_data(self): mc = await rp.get_contract_by_name("multicall3") + async def get_calls(n): minipool_contract = await rp.assemble_contract("rocketMinipool", address=n["address"]) return [ - (minipool_contract.functions.getStatus(), True, safe_state_to_str, "status"), - (minipool_contract.functions.getStatusTime(), True, None, "status_time"), - (minipool_contract.functions.getVacant(), False, is_true, "vacant"), - (minipool_contract.functions.getFinalised(), True, is_true, "finalized"), - (minipool_contract.functions.getNodeDepositBalance(), True, safe_to_float, "node_deposit_balance"), - (minipool_contract.functions.getNodeRefundBalance(), True, safe_to_float, "node_refund_balance"), - (minipool_contract.functions.getPreMigrationBalance(), False, safe_to_float, "pre_migration_balance"), - (minipool_contract.functions.getNodeFee(), True, safe_to_float, "node_fee"), - (minipool_contract.functions.getDelegate(), True, w3.to_checksum_address, "delegate"), - (minipool_contract.functions.getPreviousDelegate(), False, w3.to_checksum_address, "previous_delegate"), - (minipool_contract.functions.getEffectiveDelegate(), True, w3.to_checksum_address, "effective_delegate"), - (minipool_contract.functions.getUseLatestDelegate(), True, is_true, "use_latest_delegate"), - (minipool_contract.functions.getUserDistributed(), False, is_true, "user_distributed"), - (mc.functions.getEthBalance(n["address"]), True, safe_to_float, "execution_balance"), + (minipool_contract.functions.getStatus(), True, safe_state_to_str, "status"), + (minipool_contract.functions.getStatusTime(), True, None, "status_time"), + (minipool_contract.functions.getVacant(), False, is_true, "vacant"), + (minipool_contract.functions.getFinalised(), True, is_true, "finalized"), + (minipool_contract.functions.getNodeDepositBalance(), True, safe_to_float, "node_deposit_balance"), + (minipool_contract.functions.getNodeRefundBalance(), True, safe_to_float, "node_refund_balance"), + (minipool_contract.functions.getPreMigrationBalance(), False, safe_to_float, "pre_migration_balance"), + (minipool_contract.functions.getNodeFee(), True, safe_to_float, "node_fee"), + (minipool_contract.functions.getDelegate(), True, w3.to_checksum_address, "delegate"), + (minipool_contract.functions.getPreviousDelegate(), False, w3.to_checksum_address, "previous_delegate"), + (minipool_contract.functions.getEffectiveDelegate(), True, w3.to_checksum_address, "effective_delegate"), + (minipool_contract.functions.getUseLatestDelegate(), True, is_true, "use_latest_delegate"), + (minipool_contract.functions.getUserDistributed(), False, is_true, "user_distributed"), + (mc.functions.getEthBalance(n["address"]), True, safe_to_float, "execution_balance"), ] - await self._batch_multicall_update(self.bot.db.minipools, {"finalized": {"$ne": True}}, get_calls, {"address": 1}, label="minipools") + await self._batch_multicall_update( + self.bot.db.minipools, {"finalized": {"$ne": True}}, get_calls, {"address": 1}, label="minipools" + ) @timerun async def update_dynamic_minipool_beacon_data(self): @@ -423,7 +448,6 @@ async def update_dynamic_minipool_beacon_data(self): ordered=False ) - # -- Megapool validator tasks -- @timerun_async @@ -490,7 +514,8 @@ async def update_dynamic_megapool_validator_data(self): end = min((i + 1) * self.batch_size, total) log.debug(f"Processing megapool validators [{start}, {end}]/{total}") fns = [ - (await rp.assemble_contract("rocketMegapoolDelegate", address=v["megapool"])).functions.getValidatorInfo(v["validator_id"]) + (await rp.assemble_contract("rocketMegapoolDelegate", address=v["megapool"])) + .functions.getValidatorInfo(v["validator_id"]) for v in batch ] results = await rp.multicall(fns) diff --git a/rocketwatch/plugins/debug/debug.py b/rocketwatch/plugins/debug/debug.py index c2939d57..4b5847ac 100644 --- a/rocketwatch/plugins/debug/debug.py +++ b/rocketwatch/plugins/debug/debug.py @@ -110,7 +110,7 @@ async def delete_msg(self, interaction: Interaction, message_url: str): msg = await channel.fetch_message(int(message_id)) await msg.delete() await interaction.followup.send(content="Done") - + @command() @guilds(cfg["discord.owner.server_id"]) @is_owner() @@ -212,22 +212,22 @@ async def restore_support_template(self, interaction: Interaction, template_name await interaction.response.defer(ephemeral=True) channel_id, message_id = message_url.split("/")[-2:] channel = await self.bot.get_or_fetch_channel(int(channel_id)) - - msg = await channel.fetch_message(int(message_id)) + + msg = await channel.fetch_message(int(message_id)) template_embed = msg.embeds[0] template_title = template_embed.title template_description = "\n".join(template_embed.description.splitlines()[:-2]) - + import re from datetime import datetime, timezone - + edit_line = template_embed.description.splitlines()[-1] match = re.search(r"Last Edited by <@(?P[0-9]+)> [0-9]+):R>", edit_line) user_id = int(match.group("user")) ts = int(match.group("ts")) - + user = await self.bot.get_or_fetch_user(user_id) - + await self.bot.db.support_bot_dumps.insert_one( { "ts" : datetime.fromtimestamp(ts, tz=timezone.utc), @@ -246,7 +246,7 @@ async def restore_support_template(self, interaction: Interaction, template_name await self.bot.db.support_bot.insert_one( {"_id": template_name, "title": template_title, "description": template_description} ) - + await interaction.followup.send(content="Done") @command() @@ -361,9 +361,13 @@ async def get_address_of_contract(self, interaction: Interaction, contract: str) await interaction.followup.send(content=f"Exception: ```{repr(err)}```") if "No address found for" in repr(err): # private response as a tip - m = "It may be that you are requesting the address of a contract that does not get deployed (`rocketBase` for example), " \ - " is deployed multiple times (i.e node operator related contracts, like `rocketNodeDistributor`)," \ - " or is not yet deployed on the current chain.\n... Or you simply messed up the name :P" + m = ( + "It may be that you are requesting the address of a contract that does not" + " get deployed (e.g. `rocketBase`), is deployed multiple times" + " (e.g. `rocketNodeDistributor`)," + " or is not yet deployed on the current chain.\n" + "... or you messed up the name" + ) await interaction.followup.send(content=m) @command() diff --git a/rocketwatch/plugins/deposit_pool/deposit_pool.py b/rocketwatch/plugins/deposit_pool/deposit_pool.py index 32235952..b4705567 100644 --- a/rocketwatch/plugins/deposit_pool/deposit_pool.py +++ b/rocketwatch/plugins/deposit_pool/deposit_pool.py @@ -58,7 +58,7 @@ async def get_deposit_pool_stats() -> Embed: embed.description += std_queue_content if std_queue_length > display_limit: embed.description += f"{display_limit + 1}. `...`\n" - + queue_capacity = max(free_capacity - deposit_cap, 0.0) possible_assignments = min(int(dp_balance // 32), total_queue_length) @@ -76,7 +76,7 @@ async def get_deposit_pool_stats() -> Embed: embed.add_field(name="Enough For", value="\n".join(lines), inline=False) return embed - + @staticmethod async def get_contract_collateral_stats() -> Embed: exchange_rate, total_supply, collateral_rate_raw, target_rate_raw = await rp.multicall([ @@ -107,7 +107,7 @@ async def get_contract_collateral_stats() -> Embed: ) return Embed(title="rETH Extra Collateral", description=description) - + @command() async def deposit_pool(self, interaction: Interaction) -> None: """Show the current deposit pool status""" @@ -119,7 +119,7 @@ async def reth_extra_collateral(self, interaction: Interaction) -> None: """Show the amount of tokens held in the rETH contract for exit liquidity""" await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) await interaction.followup.send(embed=await self.get_contract_collateral_stats()) - + async def get_status(self) -> Embed: embed = Embed(title=":rocket: Live Protocol Status") @@ -138,7 +138,7 @@ async def get_status(self) -> Embed: collateral_embed = await self.get_contract_collateral_stats() embed.add_field(name="Withdrawals", value=collateral_embed.description, inline=False) - + if cfg["rocketpool.chain"] != "mainnet": return embed diff --git a/rocketwatch/plugins/detect_scam/detect_scam.py b/rocketwatch/plugins/detect_scam/detect_scam.py index 3451b8d4..1cdd9c90 100644 --- a/rocketwatch/plugins/detect_scam/detect_scam.py +++ b/rocketwatch/plugins/detect_scam/detect_scam.py @@ -46,7 +46,7 @@ class Color: ALERT = Color.from_rgb(255, 0, 0) WARN = Color.from_rgb(255, 165, 0) OK = Color.from_rgb(0, 255, 0) - + @staticmethod def is_reputable(user: Member) -> bool: return any(( @@ -55,20 +55,20 @@ def is_reputable(user: Member) -> bool: {role.id for role in user.roles} & set(cfg["rocketpool.support.role_ids"]), user.guild_permissions.moderate_members )) - + class RemovalVoteView(ui.View): THRESHOLD = 5 - + def __init__(self, plugin: 'DetectScam', reportable: Message | Thread): super().__init__(timeout=None) self.plugin = plugin self.reportable = reportable self.safu_votes = set() - + @ui.button(label="Mark Safu", style=ButtonStyle.blurple) async def mark_safe(self, interaction: Interaction, button: ui.Button) -> None: log.info(f"User {interaction.user.id} marked message {interaction.message.id} as safe") - + reportable_repr = type(self.reportable).__name__.lower() if interaction.user.id in self.safu_votes: log.debug(f"User {interaction.user.id} already voted on {reportable_repr}") @@ -87,7 +87,7 @@ async def mark_safe(self, interaction: Interaction, button: ui.Button) -> None: else: log.warning(f"Unknown reportable type {type(self.reportable)}") return None - + if interaction.user == reported_user: log.debug(f"User {interaction.user.id} tried to mark their own {reportable_repr} as safe") return await interaction.response.send_message( @@ -96,14 +96,14 @@ async def mark_safe(self, interaction: Interaction, button: ui.Button) -> None: ) self.safu_votes.add(interaction.user.id) - + if DetectScam.is_reputable(interaction.user): user_repr = interaction.user.mention elif len(self.safu_votes) >= self.THRESHOLD: user_repr = "the community" else: button.label = f"Mark Safu ({len(self.safu_votes)}/{self.THRESHOLD})" - return await interaction.response.edit_message(view=self) + return await interaction.response.edit_message(view=self) await interaction.message.delete() async with self.plugin._update_lock: @@ -114,14 +114,15 @@ async def mark_safe(self, interaction: Interaction, button: ui.Button) -> None: def __init__(self, bot: RocketWatch): self.bot = bot - + self._report_lock = asyncio.Lock() self._update_lock = asyncio.Lock() - + self._message_react_cache = TTLCache(maxsize=1000, ttl=300) self.markdown_link_pattern = re.compile(r"(?<=\[)([^/\] ]*).+?(?<=\(https?:\/\/)([^/\)]*)") self.basic_url_pattern = re.compile(r"https?:\/\/?([/\\@\-_0-9a-zA-Z]+\.)+[\\@\-_0-9a-zA-Z]+") - self.invite_pattern = re.compile(r"((discord(app)?\.com\/(invite|oauth2))|((dsc|dcd|discord)\.gg))(\\|\/)(?P[a-zA-Z0-9]+)") + self.invite_pattern = re.compile( + r"((discord(app)?\.com\/(invite|oauth2))|((dsc|dcd|discord)\.gg))(\\|\/)(?P[a-zA-Z0-9]+)") self.message_report_menu = ContextMenu( name="Report Message", @@ -153,13 +154,13 @@ def _get_message_content(message: Message, *, preserve_formatting: bool = False) if message.embeds: for embed in message.embeds: text += f"---\n Embed: {embed.title}\n{embed.description}\n---\n" - + if not preserve_formatting: text = parse.unquote(text) text = anyascii(text) text = text.lower() - return text + return text async def _generate_message_report(self, message: Message, reason: str) -> Optional[tuple[Embed, Embed, File]]: try: @@ -215,7 +216,7 @@ async def _generate_thread_report(self, thread: Thread, reason: str) -> Optional thread = await thread.guild.fetch_channel(thread.id) except (errors.NotFound, errors.Forbidden): return None - + async with self._report_lock: if await self.bot.db.scam_reports.find_one({"type": "thread", "channel_id": thread.id}): log.info(f"Found existing report for thread {thread.id} in database") @@ -224,7 +225,7 @@ async def _generate_thread_report(self, thread: Thread, reason: str) -> Optional warning = Embed(title="🚨 Possible Scam Detected") warning.color = self.Color.ALERT warning.description = f"**Reason**: {reason}\n" - + report = warning.copy() warning.set_footer(text=( "There is no ticket system for support on this server.\n" @@ -256,7 +257,7 @@ async def _generate_thread_report(self, thread: Thread, reason: str) -> Optional async def report_message(self, message: Message, reason: str) -> None: if not (components := await self._generate_message_report(message, reason)): return None - + warning, report, contents = components try: @@ -273,11 +274,11 @@ async def report_message(self, message: Message, reason: str) -> None: {"message_id": message.id}, {"$set": {"warning_id": warning_msg.id if warning_msg else None, "report_id": report_msg.id}} ) - return None + return None async def manual_message_report(self, interaction: Interaction, message: Message) -> None: await interaction.response.defer(ephemeral=True) - + if message.author.bot: return await interaction.followup.send(content="Bot messages can't be reported.") @@ -291,11 +292,11 @@ async def manual_message_report(self, interaction: Interaction, message: Message ) warning, report, contents = components - + report_channel = await self.bot.get_or_fetch_channel(cfg["discord.channels.report_scams"]) report_msg = await report_channel.send(embed=report, file=contents) await self.bot.db.scam_reports.update_one({"message_id": message.id}, {"$set": {"report_id": report_msg.id}}) - + moderator = await self.bot.get_or_fetch_user(cfg["rocketpool.support.moderator_id"]) view = self.RemovalVoteView(self, message) warning_msg = await message.reply( @@ -319,13 +320,13 @@ def _discord_invite(self, message: Message) -> Optional[str]: if match := self.invite_pattern.search(txt): link = match.group(0) trusted_domains = [ - "youtu.be", "youtube.com", "tenor.com", "giphy.com", + "youtu.be", "youtube.com", "tenor.com", "giphy.com", "imgur.com", "bluesky.app" ] if not any(domain in link for domain in trusted_domains): return "Invite to external server" return None - + def _tap_on_this(self, message: Message) -> Optional[str]: txt = self._get_message_content(message) keywords = ( @@ -364,7 +365,7 @@ def _ticket_system(self, message: Message) -> Optional[str]: ) return "There is no ticket system in this server." if self.__txt_contains(txt, keywords) else None - + @staticmethod def __txt_contains(txt: str, kw: list | tuple | str) -> bool: match kw: @@ -381,14 +382,14 @@ def _paperhands(self, message: Message) -> Optional[str]: txt = self._get_message_content(message) if "http" not in txt: return None - + reason = "The linked website is most likely a wallet drainer" if any(x in txt for x in ["paperhand", "paper hand", "paperhold", "pages.dev", "web.app"]): return reason - + if any(x in txt for x in ["mint", "opensea"]) and any(x in txt for x in ["vercel.app"]): return reason - + return None # contains @here or @everyone but doesn't actually have the permission to do so @@ -398,7 +399,7 @@ def _mention_everyone(self, message: Message) -> Optional[str]: return "Mentioned @here or @everyone without permission" return None - async def _reaction_spam(self, reaction: Reaction, user: User) -> Optional[str]: + async def _reaction_spam(self, reaction: Reaction, user: User) -> Optional[str]: # user reacts to their own message multiple times in quick succession to draw attention # check if user is a bot if user.bot: @@ -431,22 +432,24 @@ async def _reaction_spam(self, reaction: Reaction, user: User) -> Optional[str]: log.debug(f"{reaction_count} reactions on message {reaction.message.id}") # if there are 8 reactions done by the author of the message, report it return "Reaction spam by message author" if (reaction_count >= 8) else None - + @Cog.listener() async def on_message(self, message: Message) -> None: - log.debug(f"Message(id={message.id}, author={message.author}, channel={message.channel}, content=\"{message.content}\", embeds={message.embeds})") - + log.debug( + f"Message(id={message.id}, author={message.author}, channel={message.channel}," + f" content=\"{message.content}\", embeds={message.embeds})") + if message.author.bot: log.warning("Ignoring message sent by bot") return - + if self.is_reputable(message.author): log.warning(f"Ignoring message sent by trusted user ({message.author})") return - + if message.guild is None: return - + if message.guild.id != cfg["rocketpool.support.server_id"]: log.warning(f"Ignoring message in {message.guild.id})") return @@ -467,13 +470,13 @@ async def on_message(self, message: Message) -> None: @Cog.listener() async def on_message_edit(self, before: Message, after: Message) -> None: await self.on_message(after) - + @Cog.listener() async def on_reaction_add(self, reaction: Reaction, user: User) -> None: if reaction.message.guild.id != cfg["rocketpool.support.server_id"]: log.warning(f"Ignoring reaction in {reaction.message.guild.id}") return - + checks = [ self._reaction_spam(reaction, user) ] @@ -529,9 +532,9 @@ async def _update_report(self, report: dict, note: str) -> None: async def report_thread(self, thread: Thread, reason: str) -> None: if not (components := await self._generate_thread_report(thread, reason)): return None - + warning, report = components - + try: view = self.RemovalVoteView(self, thread) warning_msg = await thread.send(embed=warning, view=view) @@ -561,43 +564,43 @@ async def on_thread_create(self, thread: Thread) -> None: if thread.name.strip().lower() in names: await self.report_thread(thread, "Illegitimate support thread") return - + log.debug(f"Ignoring thread creation (id: {thread.id}, name: {thread.name})") - + @Cog.listener() async def on_raw_thread_update(self, event: RawThreadUpdateEvent) -> None: thread: Thread = await self.bot.get_or_fetch_channel(event.thread_id) await self.on_thread_create(thread) - + @Cog.listener() async def on_raw_thread_delete(self, event: RawThreadDeleteEvent) -> None: async with self._update_lock: db_filter = {"type": "thread", "channel_id": event.thread_id, "removed": False} - if report := await self.bot.db.scam_reports.find_one(db_filter): + if report := await self.bot.db.scam_reports.find_one(db_filter): await self._update_report(report, "Thread has been deleted.") await self.bot.db.scam_reports.update_one(db_filter, {"$set": {"warning_id": None, "removed": True}}) - + @command() @guilds(cfg["rocketpool.support.server_id"]) async def report_user(self, interaction: Interaction, user: Member) -> None: """Generate a suspicious user report and send it to the report channel""" await self.manual_user_report(interaction, user) - + async def manual_user_report(self, interaction: Interaction, user: Member) -> None: await interaction.response.defer(ephemeral=True) - + if user.bot: return await interaction.followup.send(content="Bots can't be reported.") if user == interaction.user: return await interaction.followup.send(content="Did you just report yourself?") - reason = f"Manual report by {interaction.user.mention}" + reason = f"Manual report by {interaction.user.mention}" if not (report := await self._generate_user_report(user, reason)): return await interaction.followup.send( content="Failed to report user. They may have already been reported or banned." ) - + report_channel = await self.bot.get_or_fetch_channel(cfg["discord.channels.report_scams"]) report_msg = await report_channel.send(embed=report) @@ -606,11 +609,11 @@ async def manual_user_report(self, interaction: Interaction, user: Member) -> No {"$set": {"report_id": report_msg.id}} ) await interaction.followup.send(content="Thanks for reporting!") - - async def _generate_user_report(self, user: Member, reason: str) -> Optional[Embed]: + + async def _generate_user_report(self, user: Member, reason: str) -> Optional[Embed]: if not isinstance(user, Member): return None - + async with self._report_lock: if await self.bot.db.scam_reports.find_one( {"type": "user", "guild_id": user.guild.id, "user_id": user.id} @@ -630,7 +633,7 @@ async def _generate_user_report(self, user: Member, reason: str) -> Optional[Emb "Please review and take appropriate action." ) report.set_thumbnail(url=user.display_avatar.url) - + await self.bot.db.scam_reports.insert_one({ "type" : "user", "guild_id" : user.guild.id, diff --git a/rocketwatch/plugins/events/events.py b/rocketwatch/plugins/events/events.py index b54d1d74..e5501755 100644 --- a/rocketwatch/plugins/events/events.py +++ b/rocketwatch/plugins/events/events.py @@ -1,3 +1,4 @@ +from collections.abc import Coroutine import json import hashlib import logging @@ -28,9 +29,9 @@ log.setLevel(cfg["log_level"]) -from collections.abc import Coroutine PartialFilter = Callable[[BlockNumber, BlockNumber | Literal["latest"]], Coroutine[None, None, list[LogReceipt | EventData]]] + class Events(EventPlugin): def __init__(self, bot: RocketWatch): super().__init__(bot) @@ -97,9 +98,10 @@ async def build_direct_filter(_from: BlockNumber, _to: BlockNumber | Literal["la except Exception as e: log.warning(f"Failed to get contract {group['contract_name']}: {e}") continue - + for event in group["events"]: event_map[event["event_name"]] = event["name"] + def super_builder(_contract, _event) -> PartialFilter: # this is needed to pin nonlocal variables async def build_topic_filter(_from: BlockNumber, _to: BlockNumber | Literal["latest"]) -> list[EventData]: @@ -192,7 +194,7 @@ async def _get_new_events(self) -> list[Event]: async def get_past_events(self, from_block: BlockNumber, to_block: BlockNumber) -> list[Event]: log.debug(f"Fetching events in [{from_block}, {to_block}]") log.debug(f"Using {len(self._partial_filters)} filters") - + events = [] for pf in self._partial_filters: events.extend(await pf(from_block, to_block)) @@ -230,6 +232,7 @@ async def process_events(self, events: list[LogReceipt | EventData]) -> tuple[li log.debug(f"Checking event {event}") args_hash = hashlib.md5() + def hash_args(_args: aDict) -> None: for k, v in sorted(_args.items()): if not ("time" in k.lower() or "block" in k.lower()): @@ -402,10 +405,10 @@ async def get_event_name(_event: LogReceipt | EventData) -> tuple[str, str]: async def handle_global_event(self, event_name: str, event: aDict) -> Optional[Embed]: receipt = await w3.eth.get_transaction_receipt(event.transactionHash) - + is_minipool_event = await rp.is_minipool(event.address) or await rp.is_minipool(receipt.to) is_megapool_event = await rp.is_megapool(event.address) or await rp.is_megapool(receipt.to) - + if not any([ is_minipool_event, is_megapool_event, @@ -446,7 +449,7 @@ async def handle_global_event(self, event_name: str, event: aDict) -> Optional[E if (n := rp.get_name_by_address(receipt["to"])) is None or not n.startswith("rocket"): event.args["from"] = receipt["to"] event.args["caller"] = receipt["from"] - + if is_minipool_event: # and add the minipool address, which is the origin of the event event.args.minipool = event.address @@ -460,16 +463,19 @@ async def handle_event(self, event_name: str, event: aDict) -> Optional[Embed]: args = aDict(event.args) if "negative_rETH_ratio_update_event" in event_name: - args.currRETHRate = solidity.to_float(args.totalEth) / solidity.to_float(args.rethSupply) if args.rethSupply > 0 else 1 + args.currRETHRate = solidity.to_float( + args.totalEth) / solidity.to_float(args.rethSupply) if args.rethSupply > 0 else 1 args.prevRETHRate = solidity.to_float(await rp.call("rocketTokenRETH.getExchangeRate", block=event.blockNumber - 1)) d = args.currRETHRate - args.prevRETHRate if d > 0 or abs(d) < 0.00001: return None elif "price_update_event" in event_name: args.value = args.rplPrice - next_period = await rp.call("rocketRewardsPool.getClaimIntervalTimeStart", block=event.blockNumber) + await rp.call("rocketRewardsPool.getClaimIntervalTime", block=event.blockNumber) - args.rewardPeriodEnd = next_period - update_rate = await rp.call("rocketDAOProtocolSettingsNetwork.getSubmitPricesFrequency", block=event.blockNumber) # in seconds + period_start = await rp.call("rocketRewardsPool.getClaimIntervalTimeStart", block=event.blockNumber) + period_length = await rp.call("rocketRewardsPool.getClaimIntervalTime", block=event.blockNumber) + args.rewardPeriodEnd = period_start + period_length + # in seconds + update_rate = await rp.call("rocketDAOProtocolSettingsNetwork.getSubmitPricesFrequency", block=event.blockNumber) # get timestamp of event block ts = await block_to_ts(event.blockNumber) # check if the next update is after the next period ts @@ -524,7 +530,11 @@ def share_repr(percentage: float) -> str: "sdao_member_request_leave_event" ]: args.nodeAddress = await el_explorer_url(args.nodeAddress, block=(event.blockNumber - 1)) - elif event_name.startswith("cs_deposit") or event_name.startswith("cs_withdraw") or event_name.startswith("rocksolid_deposit"): + elif any([ + event_name.startswith("cs_deposit"), + event_name.startswith("cs_withdraw"), + event_name.startswith("rocksolid_deposit") + ]): args.assets = solidity.to_float(args.assets) args.shares = solidity.to_float(args.shares) elif event_name.startswith("rocksolid_withdraw"): @@ -608,7 +618,9 @@ def share_repr(percentage: float) -> str: if "root" in event_name: # not interesting if the root wasn't submitted in response to a challenge # ChallengeState.Challenged = 1 - challenge_state = await rp.call("rocketDAOProtocolVerifier.getChallengeState", proposal_id, args.index, block=event.blockNumber) + challenge_state = await rp.call( + "rocketDAOProtocolVerifier.getChallengeState", proposal_id, args.index, block=event.blockNumber + ) if challenge_state != 1: return None @@ -630,7 +642,9 @@ def share_repr(percentage: float) -> str: return None elif "vote_override" in event_name: proposal_block = await rp.call("rocketDAOProtocolProposal.getProposalBlock", proposal_id) - args.votingPower = solidity.to_float(await rp.call("rocketNetworkVoting.getVotingPower", args.voter, proposal_block)) + args.votingPower = solidity.to_float( + await rp.call("rocketNetworkVoting.getVotingPower", args.voter, proposal_block) + ) if args.votingPower < 100: # not interesting return None @@ -680,7 +694,7 @@ def share_repr(percentage: float) -> str: args.amount = solidity.to_float(args.amount) args.ethAmount = args.amount * rpl_ratio elif event_name in ["node_merkle_rewards_claimed"]: - return None # TODO + return None # TODO elif "transfer_event" in event_name: token_prefix = event_name.split("_", 1)[0] args.amount = args.value / 10**18 @@ -762,7 +776,9 @@ def share_repr(percentage: float) -> str: contract = await rp.assemble_contract("rocketMinipoolDelegate", args.minipool) args.commission = solidity.to_float(await contract.functions.getNodeFee().call()) # get the transaction receipt - args.depositAmount = await rp.call("rocketMinipool.getNodeDepositBalance", address=args.minipool, block=args.blockNumber) + args.depositAmount = await rp.call( + "rocketMinipool.getNodeDepositBalance", address=args.minipool, block=args.blockNumber + ) user_deposit = args.depositAmount receipt = await w3.eth.get_transaction_receipt(args.transactionHash) args.node = receipt["from"] @@ -804,7 +820,7 @@ def share_repr(percentage: float) -> str: event_name = "minipool_dissolve_event" case _: return None - + args.operator = await rp.call("rocketMinipoolDelegate.getNodeAddress", address=args.minipool) if event_name in ["minipool_bond_reduce_event", "minipool_vacancy_prepared_event", @@ -830,10 +846,12 @@ def share_repr(percentage: float) -> str: event_name = f"vacant_{event_name}" if event_name == "vacant_minipool_scrub_event": # let's try to determine the reason. there are 4 reasons a vacant minipool can get scrubbed: - # 1. the validator does not have the withdrawal credentials set to the minipool address, but to some other address + # 1. the validator does not have the withdrawal credentials set to the minipool address, + # but to some other address # 2. the validator balance on the beacon chain is lower than configured in the minipool contract # 3. the validator does not have the active_ongoing validator status - # 4. the migration could have timed out, the oDAO will scrub minipools after they have passed half of the migration window + # 4. the migration could have timed out, the oDAO will scrub minipools + # after they have passed half of the migration window # get pubkey from minipool contract pubkey = (await rp.call("rocketMinipoolManager.getMinipoolPubkey", args.minipool)).hex() vali_info = (await bacon.get_validator(f"0x{pubkey}"))["data"] @@ -846,7 +864,7 @@ def share_repr(percentage: float) -> str: # check for #2 configured_balance = solidity.to_float( await rp.call("rocketMinipoolDelegate.getPreMigrationBalance", address=args.minipool, - block=args.blockNumber - 1)) + block=args.blockNumber - 1)) if (solidity.to_float(vali_info["balance"], 9) - configured_balance) < -0.01: reason = "having a balance lower than configured in the minipool contract on the beacon chain" # check for #3 @@ -854,9 +872,9 @@ def share_repr(percentage: float) -> str: reason = "not being active on the beacon chain" # check for #4 scrub_period = await rp.call("rocketDAONodeTrustedSettingsMinipool.getPromotionScrubPeriod", - block=args.blockNumber - 1) + block=args.blockNumber - 1) minipool_creation = await rp.call("rocketMinipoolDelegate.getStatusTime", address=args.minipool, - block=args.blockNumber - 1) + block=args.blockNumber - 1) block_time = await block_to_ts(args.blockNumber - 1) if block_time - minipool_creation > scrub_period // 2: reason = "taking too long to migrate their withdrawal credentials on the beacon chain" @@ -873,6 +891,7 @@ def share_repr(percentage: float) -> str: event.args = args return await assemble(args) + async def setup(bot): cog = Events(bot) await cog.async_init() diff --git a/rocketwatch/plugins/fee_distribution/fee_distribution.py b/rocketwatch/plugins/fee_distribution/fee_distribution.py index e1994145..5404529a 100644 --- a/rocketwatch/plugins/fee_distribution/fee_distribution.py +++ b/rocketwatch/plugins/fee_distribution/fee_distribution.py @@ -30,33 +30,33 @@ async def fee_distribution(self, interaction: Interaction, mode: Literal["tree", e = Embed() e.title = "Minipool Fee Distribution" - + tree = {} fig, axs = plt.subplots(1, 2) - for i, bond in enumerate([8, 16]): + for i, bond in enumerate([8, 16]): result = await self.bot.db.minipools.aggregate([ - { - "$match": { + { + "$match": { "node_deposit_balance": bond, "beacon.status": "active_ongoing" } }, - { - "$group": { - "_id" : { "$round": ["$node_fee", 2] }, - "count": { "$sum": 1 } + { + "$group": { + "_id" : {"$round": ["$node_fee", 2]}, + "count": {"$sum": 1} } - }, - { - "$sort": { "_id": 1 } + }, + { + "$sort": {"_id": 1} } - ]) - + ]) + labels = [] sizes = [] subtree = {} - + for entry in await result.to_list(): fee_percentage = entry['_id'] * 100 labels.append(f"{fee_percentage:.0f}%") @@ -66,12 +66,12 @@ async def fee_distribution(self, interaction: Interaction, mode: Literal["tree", ax = axs[i] total = sum(sizes) tree[f"{bond} ETH"] = subtree - + # avoid overlapping labels for small slices for i in range(len(sizes)): if sizes[i] < 0.05 * total: labels[i] = "" - + ax.set_title(f"{bond} ETH") ax.pie(sizes, labels=labels, autopct=lambda p: f"{p * total / 100:.0f}" if (p >= 5) else "") @@ -91,6 +91,5 @@ async def fee_distribution(self, interaction: Interaction, mode: Literal["tree", await interaction.followup.send(embed=e, file=File(img, filename=file_name)) - async def setup(bot): await bot.add_cog(FeeDistribution(bot)) diff --git a/rocketwatch/plugins/governance/governance.py b/rocketwatch/plugins/governance/governance.py index b2acdf42..5e57bf15 100644 --- a/rocketwatch/plugins/governance/governance.py +++ b/rocketwatch/plugins/governance/governance.py @@ -97,7 +97,7 @@ async def print_proposals(_dao: DAO, _proposals: list[DAO.Proposal]) -> str: _url = f"{cfg['execution_layer.explorer']}/tx/{_tx_hash}" text += f" {_i}. [{_title}]({_url}) (#{_proposal.id})\n" return text - + # --------- SECURITY COUNCIL --------- # dao = SecurityCouncil() @@ -105,7 +105,7 @@ async def print_proposals(_dao: DAO, _proposals: list[DAO.Proposal]) -> str: embed.description += "### Security Council\n" embed.description += "- **Active on-chain proposals**\n" embed.description += await print_proposals(dao, proposals) - + # --------- ORACLE DAO --------- # dao = OracleDAO() diff --git a/rocketwatch/plugins/lottery/lottery.py b/rocketwatch/plugins/lottery/lottery.py index 5a6577c1..ec792a59 100644 --- a/rocketwatch/plugins/lottery/lottery.py +++ b/rocketwatch/plugins/lottery/lottery.py @@ -44,11 +44,11 @@ async def load_sync_committee(self, period): sync_period += 1 data = (await bacon.get_sync_committee(sync_period * 256))["data"] await self.bot.db.sync_committee_stats.replace_one({"period": period}, - {"period" : period, - "start_epoch": sync_period * 256, - "end_epoch" : (sync_period + 1) * 256, - "sync_period": sync_period * 256, - }, upsert=True) + {"period" : period, + "start_epoch": sync_period * 256, + "end_epoch" : (sync_period + 1) * 256, + "sync_period": sync_period * 256, + }, upsert=True) validators = data["validators"] col = self.bot.db[f"sync_committee_{period}"] # get unique validators from collection diff --git a/rocketwatch/plugins/metrics/metrics.py b/rocketwatch/plugins/metrics/metrics.py index a468e351..8d6b5938 100644 --- a/rocketwatch/plugins/metrics/metrics.py +++ b/rocketwatch/plugins/metrics/metrics.py @@ -149,6 +149,5 @@ async def metrics_chart(self, interaction: Interaction): await interaction.followup.send(embed=e, file=File(file, filename="metrics.png")) - async def setup(bot): await bot.add_cog(Metrics(bot)) diff --git a/rocketwatch/plugins/pinned_messages/pinned_messages.py b/rocketwatch/plugins/pinned_messages/pinned_messages.py index a2577b5a..93d9305b 100644 --- a/rocketwatch/plugins/pinned_messages/pinned_messages.py +++ b/rocketwatch/plugins/pinned_messages/pinned_messages.py @@ -61,7 +61,11 @@ async def run_loop(self): e = Embed() e.title = message["title"] e.description = message["content"] - e.set_footer(text="This message has been pinned by Invis. Will be automatically removed if not updated within 6 hours.") + e.set_footer( + text=( + "This message has been pinned by Invis." + " Will be automatically removed if not updated within 6 hours." + )) m = await channel.send(embed=e) await self.bot.db.pinned_messages.update_one({"_id": message["_id"]}, {"$set": {"message_id": m.id}}) except Exception as err: @@ -70,10 +74,10 @@ async def run_loop(self): @command() @guilds(cfg["discord.owner.server_id"]) @is_owner() - async def pin(self, interaction, channel_id, title, description): + async def pin(self, interaction: Interaction, channel_id: int, title: str, description: str): await interaction.response.defer() # check if channel exists - channel = self.bot.get_channel(int(channel_id)) + channel = self.bot.get_channel(channel_id) if not channel: await interaction.followup.send("Channel not found") return @@ -97,7 +101,7 @@ async def pin(self, interaction, channel_id, title, description): @command() @guilds(cfg["discord.owner.server_id"]) @is_owner() - async def unpin(self, interaction, channel_id): + async def unpin(self, interaction: Interaction, channel_id: str): await interaction.response.defer() # check if channel exists channel = self.bot.get_channel(int(channel_id)) diff --git a/rocketwatch/plugins/proposals/proposals.py b/rocketwatch/plugins/proposals/proposals.py index fea15ee1..7d8a36f1 100644 --- a/rocketwatch/plugins/proposals/proposals.py +++ b/rocketwatch/plugins/proposals/proposals.py @@ -72,6 +72,7 @@ # noinspection RegExpUnnecessaryNonCapturingGroup SMARTNODE_REGEX = re.compile(r"^RP(?:(?:-)([A-Z])([A-Z])?)? (?:v)?(\d+\.\d+\.\d+(?:-\w+)?)(?:(?:(?: \()|(?: gw:))(.+)(?:\)))?") + def parse_proposal(beacon_block: dict) -> dict: graffiti = bytes.fromhex(beacon_block["body"]["graffiti"][2:]).decode("utf-8").rstrip('\x00') data = { @@ -118,12 +119,12 @@ def __init__(self, bot: RocketWatch): self.batch_size = 100 self.cooldown = timedelta(minutes=5) self.bot.loop.create_task(self.loop()) - + async def loop(self): await self.bot.wait_until_ready() await self.check_indexes() while not self.bot.is_closed(): - p_id = time.time() + p_id = time.time() self.monitor.ping(state="run", series=p_id) try: log.debug("starting proposal task") @@ -136,7 +137,7 @@ async def loop(self): self.monitor.ping(state="fail", series=p_id) finally: await asyncio.sleep(self.cooldown.total_seconds()) - + async def check_indexes(self): await self.bot.wait_until_ready() try: @@ -150,14 +151,14 @@ async def fetch_proposals(self): if db_entry := (await self.bot.db.last_checked_block.find_one({"_id": cog_id})): last_checked_slot = db_entry["slot"] else: - last_checked_slot = 4700012 # last slot before merge - + last_checked_slot = 4700012 # last slot before merge + latest_slot = int((await bacon.get_block_header("finalized"))["data"]["header"]["message"]["slot"]) for slots in as_chunks(range(last_checked_slot + 1, latest_slot + 1), self.batch_size): log.info(f"Fetching proposals for slots {slots[0]} to {slots[-1]}") await asyncio.gather(*[self.fetch_proposal(s) for s in slots]) await self.bot.db.last_checked_block.replace_one({"_id": cog_id}, {"_id": cog_id, "slot": slots[-1]}, upsert=True) - + async def fetch_proposal(self, slot: int) -> None: try: beacon_header = (await bacon.get_block_header(str(slot)))["data"]["header"]["message"] @@ -166,16 +167,16 @@ async def fetch_proposal(self, slot: int) -> None: return None else: raise e - + validator_index = int(beacon_header["proposer_index"]) if not (minipool := (await self.bot.db.minipools.find_one({"validator_index": validator_index}))): return None - + beacon_block = (await bacon.get_block(str(slot)))["data"]["message"] proposal_data = parse_proposal(beacon_block) await self.bot.db.proposals.update_one({"slot": slot}, {"$set": proposal_data}, upsert=True) - - async def create_minipool_proposal_view(self): + + async def create_minipool_proposal_view(self): log.info("creating minipool proposal view") pipeline = [ { @@ -230,7 +231,7 @@ async def gather_attribute(self, attribute, remove_allnodes=False): match_stage = {} if remove_allnodes: match_stage['$match'] = {'latest_proposal.type': {'$ne': 'Allnodes'}} - + pipeline = [ { '$project': { @@ -247,13 +248,13 @@ async def gather_attribute(self, attribute, remove_allnodes=False): } } ] - + # Add match stage at the beginning if filtering Allnodes if remove_allnodes: pipeline.insert(0, match_stage) - + distribution = await (await self.bot.db.minipool_proposals.aggregate(pipeline)).to_list() - + if remove_allnodes: d = {'remove_from_total': {'count': 0, 'validator_count': 0}} for entry in distribution: @@ -395,7 +396,10 @@ async def plot_axes_with_data(self, attr: str, ax1, ax2, remove_allnodes: bool = minipools = sorted(minipools, key=lambda x: x[1]) # get total minipool count from rocketpool - unobserved_minipools = len(await self.bot.db.minipools.find({"beacon.status": "active_ongoing", "status": "staking"}).distinct("_id")) - sum(d[1] for d in minipools) + distinct_ids = await self.bot.db.minipools.find( + {"beacon.status": "active_ongoing", "status": "staking"} + ).distinct("_id") + unobserved_minipools = len(distinct_ids) - sum(d[1] for d in minipools) if "remove_from_total" in data: unobserved_minipools -= data["remove_from_total"]["validator_count"] minipools.insert(0, ("No proposals yet", unobserved_minipools)) @@ -411,7 +415,10 @@ async def plot_axes_with_data(self, attr: str, ax1, ax2, remove_allnodes: bool = node_operators = sorted(node_operators, key=lambda x: x[1]) # get total node operator count from rp - unobserved_node_operators = len(await self.bot.db.minipools.find({"beacon.status": "active_ongoing", "status": "staking"}).distinct("node_operator")) - sum(d[1] for d in node_operators) + distinct_nos = await self.bot.db.minipools.find( + {"beacon.status": "active_ongoing", "status": "staking"} + ).distinct("node_operator") + unobserved_node_operators = len(distinct_nos) - sum(d[1] for d in node_operators) if "remove_from_total" in data: unobserved_node_operators -= data["remove_from_total"]["count"] node_operators.insert(0, ("No proposals yet", unobserved_node_operators)) @@ -505,7 +512,9 @@ async def operator_type_distribution(self, interaction: Interaction): await interaction.followup.send(embed=embed, file=file) @command() - async def client_combo_ranking(self, interaction: Interaction, remove_allnodes: bool = False, group_by_node_operators: bool = False): + async def client_combo_ranking( + self, interaction: Interaction, remove_allnodes: bool = False, group_by_node_operators: bool = False + ): """ Generate a ranking of most used execution and consensus clients. """ diff --git a/rocketwatch/plugins/queue/queue.py b/rocketwatch/plugins/queue/queue.py index 1437eadf..e7dc39f1 100644 --- a/rocketwatch/plugins/queue/queue.py +++ b/rocketwatch/plugins/queue/queue.py @@ -24,9 +24,9 @@ class Queue(Cog): class Entry(NamedTuple): megapool: ChecksumAddress validator_id: int - bond: int # always 4,000 for now - deposit_size: int # always 32,000 for now - + bond: int # always 4,000 for now + deposit_size: int # always 32,000 for now + def __init__(self, bot: RocketWatch): self.bot = bot @@ -42,11 +42,11 @@ def __init__(self, lane: Literal["combined", "standard", "express"]): else: self.queue_name = "Validator Queue" self.content_loader = Queue.get_combined_queue - + @property def _title(self) -> str: return self.queue_name - + async def _load_content(self, from_idx: int, to_idx: int) -> tuple[int, str]: queue_length, queue_content = await self.content_loader( limit=(to_idx - from_idx + 1), start=from_idx @@ -57,17 +57,17 @@ async def _load_content(self, from_idx: int, to_idx: int) -> tuple[int, str]: @cached(key_builder=lambda _, address, prefix="": (address, prefix)) async def _cached_el_url(address, prefix="") -> str: return await el_explorer_url(address, name_fmt=lambda n: f"`{n}`", prefix=prefix) - + @staticmethod async def _megapool_to_node(megapool_address) -> ChecksumAddress: return await rp.call("rocketMegapoolDelegate.getNodeAddress", address=megapool_address) - + @staticmethod async def __format_queue_entry(entry: 'Queue.Entry') -> str: node_address = await Queue._megapool_to_node(entry.megapool) node_label = await Queue._cached_el_url(node_address) return f"{node_label} #`{entry.validator_id}`" - + @staticmethod async def get_standard_queue(limit: int, start: int = 0) -> tuple[int, str]: """Get the next {limit} validators in the standard queue""" @@ -77,13 +77,13 @@ async def get_standard_queue(limit: int, start: int = 0) -> tuple[int, str]: async def get_express_queue(limit: int, start: int = 0) -> tuple[int, str]: """Get the next {limit} validators in the express queue""" return await Queue._get_queue("deposit.queue.express", limit, start) - + @staticmethod async def _scan_list(namespace: bytes, start: int, limit: int, block_identifier: BlockIdentifier) -> list['Queue.Entry']: list_contract = await rp.get_contract_by_name("linkedListStorage") raw_entries, _ = await list_contract.functions.scan(namespace, 0, start + limit).call(block_identifier=block_identifier) return [Queue.Entry(*entry) for entry in raw_entries][start:] - + @staticmethod async def _get_queue(namespace: str, limit: int, start: int = 0) -> tuple[int, str]: if limit <= 0: @@ -99,35 +99,39 @@ async def _get_queue(namespace: str, limit: int, start: int = 0) -> tuple[int, s if start >= q_len: return q_len, "" - queue_entries = await Queue._scan_list(queue_namespace, start, limit, latest_block) - + queue_entries = await Queue._scan_list(queue_namespace, start, limit, latest_block) + content = "" for i, entry in enumerate(queue_entries): entry_str = await Queue.__format_queue_entry(entry) content += f"{start+i+1}. {entry_str}\n" return q_len, content - + @staticmethod - def _get_entries_used_in_interval(start: int, end: int, len_express: int, len_standard: int, express_rate: int) -> tuple[int, int]: - log.debug(f"Calculating entries used in interval [{start}, {end}] with express_rate {express_rate} and queue lengths {len_express} (express) and {len_standard} (standard)") - - total_entries = end - start + 1 # end is inclusive + def _get_entries_used_in_interval( + start: int, end: int, len_express: int, len_standard: int, express_rate: int + ) -> tuple[int, int]: + log.debug( + f"Calculating entries used in interval [{start}, {end}] with express_rate {express_rate}" + f" and queue lengths {len_express} (express) and {len_standard} (standard)") + + total_entries = end - start + 1 # end is inclusive num_standard = total_entries // (express_rate + 1) # standard queue is used when index % (express_queue_rate + 1) == express_queue_rate # this checks whether we "cross" an extra express queue slot in the interval if ((end + 1) % (express_rate + 1)) < (start % (express_rate + 1)): num_standard += 1 - - num_standard = min(num_standard, len_standard) + + num_standard = min(num_standard, len_standard) # if standard queue runs out, remaining entries are taken from express queue num_express = min(total_entries - num_standard, len_express) # if express queue runs out, remaining entries are taken from standard queue if (num_express + num_standard) < total_entries: num_standard = min(total_entries - num_express, len_standard) - - return num_express, num_standard - + + return num_express, num_standard + @staticmethod async def get_combined_queue(limit: int, start: int = 0) -> tuple[int, str]: """Get the next {limit} validators in the combined queue (express + standard)""" @@ -143,31 +147,35 @@ async def get_combined_queue(limit: int, start: int = 0) -> tuple[int, str]: express_queue_length = await list_contract.functions.getLength(exp_namespace).call(block_identifier=latest_block) standard_queue_length = await list_contract.functions.getLength(std_namespace).call(block_identifier=latest_block) q_len = express_queue_length + standard_queue_length - + if start >= q_len: return q_len, "" - + start_express_queue, start_standard_queue = Queue._get_entries_used_in_interval( - queue_index, - queue_index + start - 1, - express_queue_length, + queue_index, + queue_index + start - 1, + express_queue_length, standard_queue_length, express_queue_rate ) log.debug(f"{start_express_queue = }") log.debug(f"{start_standard_queue = }") limit_express_queue, limit_standard_queue = Queue._get_entries_used_in_interval( - queue_index + start, - queue_index + start + limit - 1, - express_queue_length - start_express_queue, - standard_queue_length - start_standard_queue, + queue_index + start, + queue_index + start + limit - 1, + express_queue_length - start_express_queue, + standard_queue_length - start_standard_queue, express_queue_rate ) log.debug(f"{limit_express_queue = }") log.debug(f"{limit_standard_queue = }") - - express_entries_rev = (await Queue._scan_list(exp_namespace, start_express_queue, limit_express_queue, latest_block))[::-1] - standard_entries_rev = (await Queue._scan_list(std_namespace, start_standard_queue, limit_standard_queue, latest_block))[::-1] - + + express_entries_rev = ( + await Queue._scan_list(exp_namespace, start_express_queue, limit_express_queue, latest_block) + )[::-1] + standard_entries_rev = ( + await Queue._scan_list(std_namespace, start_standard_queue, limit_standard_queue, latest_block) + )[::-1] + content = "" for i in range(len(express_entries_rev) + len(standard_entries_rev)): effective_queue_index = queue_index + start + i @@ -178,7 +186,7 @@ async def get_combined_queue(limit: int, start: int = 0) -> tuple[int, str]: else: entry = standard_entries_rev.pop() lane_pos = "🐢" - + overall_pos = start + i + 1 entry_str = await Queue.__format_queue_entry(entry) content += f"{overall_pos}. {lane_pos} {entry_str}\n" diff --git a/rocketwatch/plugins/random/random.py b/rocketwatch/plugins/random/random.py index efd8fd71..62745468 100644 --- a/rocketwatch/plugins/random/random.py +++ b/rocketwatch/plugins/random/random.py @@ -63,7 +63,7 @@ async def burn_reason(self, interaction: Interaction): description += "```\n" description += "**Burn Ranking (last 5 minutes)**\n" ranking = data["leaderboards"]["leaderboard5m"][:5] - + for i, entry in enumerate(ranking): # use a number emoji as rank (:one:, :two:, ...) # first of convert the number to a word @@ -78,10 +78,10 @@ async def burn_reason(self, interaction: Interaction): description += f" {target}" if entry.get("category"): description += f" `[{entry['category'].upper()}]`" - + description += "\n\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0" description += f"`{solidity.to_float(entry['fees']):,.2f} ETH` :fire:\n" - + e.add_field( name="Current Base Fee", value=f"`{solidity.to_float(data['latestBlockFees'][0]['baseFeePerGas'], 9):,.2f} GWEI`" @@ -106,7 +106,8 @@ async def dev_time(self, interaction: Interaction): e.add_field(name="Coordinated Universal Time", value=f"{dev_time.strftime(time_format)}\n" f"`{binary_day} (0x{uint_day:04x})`") - b = solidity.slot_to_beacon_day_epoch_slot(int((await bacon.get_block_header("head"))["data"]["header"]["message"]["slot"])) + head_slot = int((await bacon.get_block_header("head"))["data"]["header"]["message"]["slot"]) + b = solidity.slot_to_beacon_day_epoch_slot(head_slot) e.add_field(name="Beacon Time", value=f"Day {b[0]}, {b[1]}:{b[2]}") dev_time = datetime.now(tz=pytz.timezone("Australia/Lindeman")) diff --git a/rocketwatch/plugins/reloader/reloader.py b/rocketwatch/plugins/reloader/reloader.py index 719dd463..866eac0d 100644 --- a/rocketwatch/plugins/reloader/reloader.py +++ b/rocketwatch/plugins/reloader/reloader.py @@ -16,7 +16,7 @@ class Reloader(Cog): def __init__(self, bot: RocketWatch): self.bot = bot - + async def _get_loaded_extensions(self, interaction: Interaction, current: str) -> list[Choice[str]]: loaded = {ext.split(".")[-1] for ext in self.bot.extensions.keys()} return [Choice(name=plugin, value=plugin) for plugin in loaded if current.lower() in plugin.lower()][:25] @@ -41,7 +41,7 @@ async def load(self, interaction: Interaction, module: str): await interaction.followup.send(content=f"Plugin `{module}` already loaded!") except ExtensionNotFound: await interaction.followup.send(content=f"Plugin `{module}` not found!") - + @command() @guilds(cfg["discord.owner.server_id"]) @is_owner() @@ -69,7 +69,7 @@ async def reload(self, interaction: Interaction, module: str): await self.bot.sync_commands() except ExtensionNotLoaded: await interaction.followup.send(content=f"Plugin {module} not loaded!") - + async def setup(bot): await bot.add_cog(Reloader(bot)) diff --git a/rocketwatch/plugins/rewards/rewards.py b/rocketwatch/plugins/rewards/rewards.py index 9339804d..9476eeb2 100644 --- a/rocketwatch/plugins/rewards/rewards.py +++ b/rocketwatch/plugins/rewards/rewards.py @@ -152,7 +152,9 @@ async def simulate_rewards( reward_start_block = await ts_to_block(rewards.start_time) rpl_ratio = solidity.to_float(await rp.call("rocketNetworkPrices.getRPLPrice", block=data_block)) - actual_borrowed_eth = solidity.to_float(await rp.call("rocketNodeStaking.getNodeETHBorrowed", address, block=data_block)) + actual_borrowed_eth = solidity.to_float( + await rp.call("rocketNodeStaking.getNodeETHBorrowed", address, block=data_block) + ) actual_rpl_stake = solidity.to_float(await rp.call("rocketNodeStaking.getNodeStakedRPL", address, block=data_block)) inflation_rate: int = await rp.call("rocketTokenRPL.getInflationIntervalRate", block=data_block) diff --git a/rocketwatch/plugins/rocksolid/rocksolid.py b/rocketwatch/plugins/rocksolid/rocksolid.py index 50f0e40d..6beefd8d 100644 --- a/rocketwatch/plugins/rocksolid/rocksolid.py +++ b/rocketwatch/plugins/rocksolid/rocksolid.py @@ -42,19 +42,19 @@ async def _fetch_asset_updates(self) -> list[tuple[int, float]]: b_from = last_checked_block + 1 b_to = await w3.eth.get_block_number() - + updates = [] - + async for doc in self.bot.db.rocksolid.find({}): updates.append((doc["time"], doc["assets"])) - + db_operations = [] for event_log in get_logs(vault_contract.events.TotalAssetsUpdated, b_from, b_to): ts = await block_to_ts(event_log.blockNumber) assets = solidity.to_float(event_log.args.totalAssets) updates.append((ts, assets)) db_operations.append(InsertOne({"time": ts, "assets": assets})) - + async with self.bot.db.client.start_session() as session: async with await session.start_transaction(): if db_operations: @@ -73,10 +73,10 @@ async def rocksolid(self, interaction: Interaction): Summary of RockSolid rETH vault stats. """ await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) - + current_block = await w3.eth.get_block_number() now = await block_to_ts(current_block) - + async def get_eth_rate(block_number: int) -> int: block_number = max(block_number, self.deployment_block) reth_value = await rp.call("RockSolidVault.convertToAssets", 10**18, block=block_number) @@ -96,7 +96,7 @@ async def get_apy(days: int) -> Optional[float]: tvl_reth = solidity.to_float(await rp.call("RockSolidVault.totalAssets")) tvl_rock_reth = solidity.to_float(await rp.call("RockSolidVault.totalSupply")) - + asset_updates: list[tuple[int, float]] = await self._fetch_asset_updates() current_date = datetime.fromtimestamp(asset_updates[0][0]).date() - timedelta(days=1) current_assets = 0.0 @@ -123,16 +123,16 @@ async def get_apy(days: int) -> Optional[float]: ax.set_ylabel("AUM (rETH)") ax.set_xlim((x[0], x[-1])) ax.set_ylim((y[0], y[-1] * 1.01)) - + img = BytesIO() fig.tight_layout() fig.savefig(img, format='png') img.seek(0) plt.clf() - + ca_reth = await rp.get_address_by_name("rocketTokenRETH") ca_rock_reth = await rp.get_address_by_name("RockSolidVault") - + embed = Embed(title="<:rocksolid:1425091714267480158> RockSolid rETH Vault") embed.add_field(name="7d APY", value=f"{apy_7d:.2f}%" if apy_7d else "-") embed.add_field(name="30d APY", value=f"{apy_30d:.2f}%" if apy_30d else "-") diff --git a/rocketwatch/plugins/rpl/rpl.py b/rocketwatch/plugins/rpl/rpl.py index b48fb18a..a63b87d0 100644 --- a/rocketwatch/plugins/rpl/rpl.py +++ b/rocketwatch/plugins/rpl/rpl.py @@ -27,7 +27,7 @@ async def staked_rpl(self, interaction: Interaction): Show the amount of RPL staked """ await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) - + rpl_supply = solidity.to_float(await rp.call("rocketTokenRPL.totalSupply")) legacy_staked_rpl = solidity.to_float(await rp.call("rocketNodeStaking.getTotalLegacyStakedRPL")) megapool_staked_rpl = solidity.to_float(await rp.call("rocketNodeStaking.getTotalMegapoolStakedRPL")) @@ -56,6 +56,7 @@ def fmt(v): colors = ["#CC4400", "#FF6B00", "#D2B48C", "#808080"] total = sum(sizes) + def autopct(pct): return f"{fmt(pct / 100 * total)} ({pct:.1f}%)" @@ -88,7 +89,7 @@ async def withdrawable_rpl(self, interaction: Interaction): """ Show the available liquidity at different RPL/ETH prices """ - await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) + await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) data = await (await self.bot.db.node_operators.aggregate([ { @@ -152,7 +153,7 @@ async def withdrawable_rpl(self, interaction: Interaction): x, y = zip(*list(free_rpl_liquidity.values())) embed = Embed() - + # plot the data plt.plot(x, y, color=str(embed.color)) plt.plot(rpl_eth_price, current_withdrawable_rpl, 'bo') diff --git a/rocketwatch/plugins/snapshot/snapshot.py b/rocketwatch/plugins/snapshot/snapshot.py index 86a3932f..50ebd96a 100644 --- a/rocketwatch/plugins/snapshot/snapshot.py +++ b/rocketwatch/plugins/snapshot/snapshot.py @@ -101,10 +101,10 @@ def safe_div(x, y): label_font_variant = FontVariant.BOLD def render_choice(_choice: str, _score: float, _x_offset: int, _y_offset: int) -> int: - color: Color = (128, 128, 128) # slate gray + color: Color = (128, 128, 128) # slate gray choice_colors = { "for": (4, 99, 7), # green - "against": (156, 0, 47), # red + "against": (156, 0, 47), # red "abstain": (114, 121, 138) } for k, v in choice_colors.items(): @@ -607,7 +607,7 @@ async def snapshot_votes(self, interaction: Interaction): total_height = v_spacing * (num_rows - 1) proposal_grid: list[list[Snapshot.Proposal]] = [] for row_idx in range(num_rows): - row = proposals[row_idx*num_cols:(row_idx+1)*num_cols] + row = proposals[row_idx * num_cols:(row_idx + 1) * num_cols] proposal_grid.append(row) # row height is equal to height of its tallest proposal total_height += max(p.predict_render_height() for p in row) diff --git a/rocketwatch/plugins/support_utils/support_utils.py b/rocketwatch/plugins/support_utils/support_utils.py index 5ee0ae9d..b4723b4d 100644 --- a/rocketwatch/plugins/support_utils/support_utils.py +++ b/rocketwatch/plugins/support_utils/support_utils.py @@ -17,7 +17,7 @@ async def generate_template_embed(db, template_name: str): template = await db.support_bot.find_one({"_id": template_name}) - if not template: + if not template: return None # get the last log entry from the db dumps_col = db.support_bot_dumps.with_options(codec_options=CodecOptions(tz_aware=True)) @@ -42,17 +42,19 @@ def __init__(self, db, template_name: str): async def edit(self, interaction: Interaction, button: ui.Button): template = await self.db.support_bot.find_one({'_id': self.template_name}) # Make sure to update the message with our update - await interaction.response.send_modal(AdminModal(template["title"], template["description"], self.db, self.template_name)) + await interaction.response.send_modal( + AdminModal(template["title"], template["description"], self.db, self.template_name) + ) class DeletableView(ui.View): def __init__(self, user: User): super().__init__(timeout=None) self.user = user - + @ui.button(emoji="<:delete:1364953621191721002>", style=ButtonStyle.gray) async def delete(self, interaction: Interaction, button: ui.Button): - if (interaction.user == self.user) or has_perms(interaction): + if (interaction.user == self.user) or has_perms(interaction): await interaction.message.delete() log.warning(f"Support template message deleted by {interaction.user} in {interaction.channel}") @@ -150,7 +152,7 @@ async def _use(db, interaction: Interaction, name: str, mention: User | None): ephemeral=True ) return - + # respond with the template embed if e := (await generate_template_embed(db, name)): await interaction.response.send_message( @@ -308,7 +310,10 @@ async def list(self, interaction: Interaction, order_by: Choice[str] = "_id"): templates.sort(key=lambda x: x[order_by]) # create the embed embed = Embed(title="Templates") - embed.description = "".join(f"\n`{template['_id']}` - " for template in templates) + "" + embed.description = "".join( + f"\n`{template['_id']}` - " + for template in templates + ) + "" # split the embed into multiple embeds if it is too long embeds = [embed] while len(embeds[-1]) > 6000: @@ -318,7 +323,6 @@ async def list(self, interaction: Interaction, order_by: Choice[str] = "_id"): embed.description = embed.description[:6000] await interaction.edit_original_response(embeds=embeds) - @subgroup.command() async def use(self, interaction: Interaction, name: str, mention: User | None): await _use(self.bot.db, interaction, name, mention) diff --git a/rocketwatch/plugins/transactions/transactions.py b/rocketwatch/plugins/transactions/transactions.py index e3d7cce1..b4d81c7d 100644 --- a/rocketwatch/plugins/transactions/transactions.py +++ b/rocketwatch/plugins/transactions/transactions.py @@ -89,7 +89,7 @@ async def replay_tx(self, interaction: Interaction, tx_hash: str): responses: list[Event] = await self.process_transaction(block, tnx, tnx.to, tnx.input) if responses: - await interaction.followup.send(embeds=[response.embed for response in responses]) + await interaction.followup.send(embeds=[response.embed for response in responses]) else: await interaction.followup.send(content="No events found.") @@ -142,7 +142,6 @@ async def create_embeds(self, event_name: str, event: aDict) -> list[Embed]: args.transactionHash = event.hash.hex() args.blockNumber = event.blockNumber - # oDAO bootstrap doesn't emit an event if "odao_disable" in event_name and not args.confirmDisableBootstrapMode: return [] @@ -150,7 +149,9 @@ async def create_embeds(self, event_name: str, event: aDict) -> list[Embed]: receipt = await w3.eth.get_transaction_receipt(args.transactionHash) args.delegator = receipt["from"] args.delegate = args.get("delegate") or args.get("newDelegate") - args.votingPower = solidity.to_float(await rp.call("rocketNetworkVoting.getVotingPower", args.delegator, args.blockNumber)) + args.votingPower = solidity.to_float( + await rp.call("rocketNetworkVoting.getVotingPower", args.delegator, args.blockNumber) + ) if (args.votingPower < 50) or (args.delegate == args.delegator): return [] elif "failed_deposit" in event_name: @@ -238,7 +239,7 @@ def share_repr(percentage: float) -> str: args.contract_name = contract_name args.periodLength = contract_post[2] - + args.recipient_address = contract_post[0] periods_claimed = contract_post[5] - contract_pre[5] args.amount = periods_claimed * contract_post[1] @@ -347,5 +348,6 @@ async def process_transaction(self, block, tnx, contract_address, fn_input) -> l return new_responses + responses + async def setup(bot): await bot.add_cog(Transactions(bot)) diff --git a/rocketwatch/plugins/tvl/tvl.py b/rocketwatch/plugins/tvl/tvl.py index 0958f6c7..ede8a19e 100644 --- a/rocketwatch/plugins/tvl/tvl.py +++ b/rocketwatch/plugins/tvl/tvl.py @@ -168,7 +168,8 @@ async def tvl(self, interaction: Interaction, show_all: bool = False): "execution_balance"] # Dissolved Minipools: - # Minipools that are flagged as dissolved are Pending minipools that didn't trigger the second phase within the configured + # Minipools that are flagged as dissolved are Pending minipools that didn't + # trigger the second phase within the configured # LaunchTimeout (14 days at the time of writing). # They have the following applied to them: # - They have 1 ETH locked on the Beacon Chain, not earning any rewards. @@ -256,8 +257,10 @@ async def tvl(self, interaction: Interaction, show_all: bool = False): # ETH in here has been swapped for rETH and is waiting to be matched with a minipool. # Fun Fact: This value can go above the configured Deposit Pool Cap in 2 scenarios: # - A Minipool gets dissolved, moving 16 ETH from its address back to the Deposit Pool. - # - ETH from withdrawn Minipools, which gets stored in the rETH contract, surpasses the configured targetCollateralRate, - # which is 10% at the time of writing. Once this occurs the ETH gets moved from the rETH contract to the Deposit Pool. + # - ETH from withdrawn Minipools, which gets stored in the rETH contract, + # surpasses the configured targetCollateralRate, + # which is 10% at the time of writing. Once this occurs the ETH gets moved + # from the rETH contract to the Deposit Pool. data["Total ETH Locked"]["rETH Collateral"]["Deposit Pool"]["_val"] = solidity.to_float( await rp.call("rocketDepositPool.getBalance")) @@ -446,7 +449,8 @@ def set_val_of_branch(branch, unit): test = render_tree(data, "Total Locked Value", max_depth=0 if show_all else 2) # send embed with tvl e = Embed() - closer = f"or about {Style.BRIGHT}{humanize.intword(usdc_total_tvl, format='%.3f')} USDC{Style.RESET_ALL}".rjust(max([len(line) for line in test.split("\n")])-1) + closer = f"or about {Style.BRIGHT}{humanize.intword(usdc_total_tvl, format='%.3f')} USDC{Style.RESET_ALL}".rjust( + max([len(line) for line in test.split("\n")]) - 1) e.description = f"```ansi\n{test}\n{closer}```" e.set_footer(text="\"that looks good to me\" - invis 2023") await interaction.followup.send(embed=e) diff --git a/rocketwatch/plugins/user_distribute/user_distribute.py b/rocketwatch/plugins/user_distribute/user_distribute.py index 1eb2b1e6..8e45d17c 100644 --- a/rocketwatch/plugins/user_distribute/user_distribute.py +++ b/rocketwatch/plugins/user_distribute/user_distribute.py @@ -19,6 +19,7 @@ log = logging.getLogger("user_distribute") log.setLevel(cfg["log_level"]) + class InstructionsView(ui.View): def __init__(self, eligible: list[dict], distributable: list[dict], instruction_timeout: int): super().__init__(timeout=instruction_timeout) @@ -33,7 +34,7 @@ async def instructions(self, interaction: Interaction, _) -> None: calls = [(mp["address"], True, dist_calldata) for mp in self.distributable] calls += [(mp["address"], True, bud_calldata) for mp in self.eligible] - + multicall_contract = await rp.get_contract_by_name("multicall3") gas_used = await multicall_contract.functions.aggregate3(calls).estimate_gas() gas_price = await w3.eth.gas_price @@ -42,10 +43,10 @@ async def instructions(self, interaction: Interaction, _) -> None: tuple_strs = [] for address, allow_failure, calldata in calls: tuple_strs.append(f"[\"{address}\", {str(allow_failure).lower()}, 0x{calldata.hex()}]") - + input_data = "[" + ",".join(tuple_strs) + "]" etherscan_url = f"https://etherscan.io/address/{multicall_contract.address}#writeContract#F2" - + embed = Embed(title="Distribution Instructions") embed.description = ( f"1. Open the [Multicall `aggregate3` function]({etherscan_url}) on Etherscan\n" @@ -54,16 +55,16 @@ async def instructions(self, interaction: Interaction, _) -> None: f"4. Connect your wallet (`Connect to Web3`)\n" f"5. Click `Write` and sign with your wallet\n" ) - + actions = [] if (count := len(self.distributable)) > 0: - actions.append(f"distribute the balance of **{count}** minipool{'s' if count != 1 else ''}") + actions.append(f"distribute the balance of **{count}** minipool{'s' if count != 1 else ''}") if (count := len(self.eligible)) > 0: actions.append(f"begin the user distribution process for **{count}** minipool{'s' if count != 1 else ''}") - + embed.description += "\nThis will " + " and ".join(actions) + "." embed.description += f"\nEstimated cost: **{cost_eth:,.6f} ETH** ({gas_used:,} gas @ {(gas_price / 1e9):.2f} gwei)" - + await interaction.response.send_message( embed=embed, file=discord.File(StringIO(input_data), filename="input_data.txt"), @@ -84,9 +85,9 @@ async def task(self): channel_id = cfg.get("discord.channels.user_distribute") if not channel_id: return - + channel = await self.bot.get_or_fetch_channel(channel_id) - + _, _, distributable = await self._fetch_minipools() if not distributable: return @@ -95,7 +96,8 @@ async def task(self): count = len(distributable) next_window_close = distributable[0]["ud_window_close"] embed.description = ( - f"There {'are' if count != 1 else 'is'} **{count}** minipool{'s' if count != 1 else ''} eligible for distribution.\n" + f"There {'are' if count != 1 else 'is'} **{count}**" + f" minipool{'s' if count != 1 else ''} eligible for distribution.\n" f"The next window closes !" ) @@ -134,19 +136,20 @@ async def _fetch_minipools(self) -> tuple[list[dict], list[dict], list[dict]]: storage = await w3.eth.get_storage_at(mp["address"], 0x17) user_distribute_time: int = int.from_bytes(storage, "big") elapsed_time = current_time - user_distribute_time - + if elapsed_time >= ud_window_end: eligible.append(mp) elif elapsed_time < ud_window_start: mp["ud_window_open"] = user_distribute_time + ud_window_start pending.append(mp) - elif not await rp.call("rocketMinipoolDelegate.getUserDistributed", address=mp["address"]): # double check, DB may lag behind + # double check, DB may lag behind + elif not await rp.call("rocketMinipoolDelegate.getUserDistributed", address=mp["address"]): mp["ud_window_close"] = user_distribute_time + ud_window_end distributable.append(mp) - + pending.sort(key=itemgetter("ud_window_open")) distributable.sort(key=itemgetter("ud_window_close")) - + return eligible, pending, distributable @command() @@ -155,9 +158,9 @@ async def user_distribute_status(self, interaction: Interaction): await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) eligible, pending, distributable = await self._fetch_minipools() - + embed = Embed(title="User Distribute Status") - + embed.add_field( name="Eligible", value=f"**{len(eligible)}** minipool{'s' if len(eligible) != 1 else ''}", @@ -168,7 +171,10 @@ async def user_distribute_status(self, interaction: Interaction): next_window_open = pending[0]["ud_window_open"] embed.add_field( name="Pending", - value=f"**{len(pending)}** minipool{'s' if len(pending) != 1 else ''} · next window opens ", + value=( + f"**{len(pending)}** minipool{'s' if len(pending) != 1 else ''}" + f" · next window opens " + ), inline=False ) else: @@ -178,15 +184,20 @@ async def user_distribute_status(self, interaction: Interaction): next_window_close = distributable[0]["ud_window_close"] embed.add_field( name="Distributable", - value=f"**{len(distributable)}** minipool{'s' if len(distributable) != 1 else ''} · next window closes ", + value=( + f"**{len(distributable)}** minipool{'s' if len(distributable) != 1 else ''}" + f" · next window closes " + ), inline=False ) else: embed.add_field(name="Distributable", value="**0** minipools", inline=False) - + if eligible or distributable: # limit the number of distributions to not run out of gas - await interaction.followup.send(embed=embed, view=InstructionsView(eligible[:50], distributable[:100], instruction_timeout=300)) + await interaction.followup.send( + embed=embed, view=InstructionsView(eligible[:50], distributable[:100], instruction_timeout=300) + ) else: await interaction.followup.send(embed=embed) diff --git a/rocketwatch/plugins/validator_states/validator_states.py b/rocketwatch/plugins/validator_states/validator_states.py index dcf78f4f..50462f1b 100644 --- a/rocketwatch/plugins/validator_states/validator_states.py +++ b/rocketwatch/plugins/validator_states/validator_states.py @@ -17,6 +17,7 @@ _BEACON_PENDING = {"in_queue": "unassigned", "prestaked": "prestaked", "staking": "staked"} + def _classify_beacon_validator(beacon, contract_status): """Classify a validator by beacon status. Returns (status, sub_status).""" match beacon["status"]: @@ -55,6 +56,7 @@ def _empty_state_tree(): "closed": {} } + def _classify_collection(docs, done_fn): """Classify docs into state tree. @@ -130,7 +132,7 @@ async def validator_states(self, interaction: Interaction): mg_data, mg_exiting, mg_withdrawn = _classify_collection( megapool_vals, lambda d: d.get("status") == "exited" ) - + tree = { "minipools": _collapse_tree(mp_data), "megapools": _collapse_tree(mg_data), @@ -184,14 +186,20 @@ async def validator_states(self, interaction: Interaction): if num_exiting > 0: description += "\n**Exiting Node Operators**\n" - description += ", ".join([f"{await el_explorer_url(w3.to_checksum_address(v))} ({c})" for v, c in exiting_node_operators[:num_exiting]]) + description += ", ".join([ + f"{await el_explorer_url(w3.to_checksum_address(v))} ({c})" + for v, c in exiting_node_operators[:num_exiting] + ]) if remaining_no := exiting_node_operators[num_exiting:]: num_remaining_valis = sum([c for _, c in remaining_no]) description += f", and {len(remaining_no)} more ({num_remaining_valis})" description += "\n" if num_withdrawn > 0: description += "\n**Withdrawn Node Operators**\n" - description += ", ".join([f"{await el_explorer_url(w3.to_checksum_address(v))} ({c})" for v, c in withdrawn_node_operators[:num_withdrawn]]) + description += ", ".join([ + f"{await el_explorer_url(w3.to_checksum_address(v))} ({c})" + for v, c in withdrawn_node_operators[:num_withdrawn] + ]) if remaining_no := withdrawn_node_operators[num_withdrawn:]: num_remaining_valis = sum([c for _, c in remaining_no]) description += f", and {len(remaining_no)} more ({num_remaining_valis})" diff --git a/rocketwatch/plugins/wall/wall.py b/rocketwatch/plugins/wall/wall.py index 7ee8877e..1899dd8a 100644 --- a/rocketwatch/plugins/wall/wall.py +++ b/rocketwatch/plugins/wall/wall.py @@ -25,7 +25,7 @@ from utils.rocketpool import rp from utils.liquidity import ( Exchange, CEX, DEX, Market, Liquidity, - Binance, Coinbase, GateIO, OKX, Bitget, MEXC, Bybit, CryptoDotCom, + Binance, Coinbase, GateIO, OKX, Bitget, MEXC, Bybit, CryptoDotCom, Kraken, Kucoin, Bithumb, BingX, Bitvavo, HTX, BitMart, Bitrue, CoinTR, BalancerV2, UniswapV3 ) @@ -63,7 +63,9 @@ async def _get_dex(self) -> set[DEX]: if self.dex is None: self.dex = { BalancerV2([ - await BalancerV2.WeightedPool.create(HexStr("0x9f9d900462492d4c21e9523ca95a7cd86142f298000200000000000000000462")) + await BalancerV2.WeightedPool.create( + HexStr("0x9f9d900462492d4c21e9523ca95a7cd86142f298000200000000000000000462") + ) ]), await UniswapV3.create([ cast(ChecksumAddress, "0xe42318eA3b998e8355a3Da364EB9D48eC725Eb45"), diff --git a/rocketwatch/rocketwatch.py b/rocketwatch/rocketwatch.py index 42ca5677..d1821902 100644 --- a/rocketwatch/rocketwatch.py +++ b/rocketwatch/rocketwatch.py @@ -32,7 +32,7 @@ class RocketWatch(Bot): def __init__(self, intents: Intents) -> None: super().__init__(command_prefix=(), tree_cls=RWCommandTree, intents=intents) self.db = AsyncMongoClient(cfg["mongodb.uri"]).rocketwatch - + async def _load_plugins(self): chain = cfg["rocketpool.chain"] storage = cfg["rocketpool.manual_addresses.rocketStorage"] @@ -75,13 +75,13 @@ def should_load_plugin(_plugin: str) -> bool: async def setup_hook(self) -> None: await rp.async_init() await self._load_plugins() - + async def sync_commands(self) -> None: log.info("Syncing command tree...") await self.tree.sync() for guild in self.guilds: await self.tree.sync(guild=guild) - + def clear_commands(self) -> None: self.tree.clear_commands(guild=None) for guild in self.guilds: diff --git a/rocketwatch/utils/block_time.py b/rocketwatch/utils/block_time.py index 908c4c62..aea4d54e 100644 --- a/rocketwatch/utils/block_time.py +++ b/rocketwatch/utils/block_time.py @@ -9,10 +9,12 @@ log = logging.getLogger("block_time") log.setLevel(cfg["log_level"]) + @cached() async def block_to_ts(block_number: int) -> int: return (await w3.eth.get_block(block_number)).timestamp + async def ts_to_block(target_ts: int) -> int: log.debug(f"Looking for block at timestamp {target_ts}") if target_ts < await block_to_ts(1): diff --git a/rocketwatch/utils/command_tree.py b/rocketwatch/utils/command_tree.py index 5d1ea1ac..a63e3f40 100644 --- a/rocketwatch/utils/command_tree.py +++ b/rocketwatch/utils/command_tree.py @@ -58,7 +58,10 @@ async def _call(self, interaction: Interaction) -> None: await self.client.report_error(e) raise - log.info(f"/{cmd_name} called by {interaction.user} in #{interaction.channel.name} ({interaction.guild}) completed successfully") + log.info( + f"/{cmd_name} called by {interaction.user} in" + f" #{interaction.channel.name} ({interaction.guild}) completed successfully" + ) try: await self.client.db.command_metrics.update_one( {'_id': interaction.id}, diff --git a/rocketwatch/utils/dao.py b/rocketwatch/utils/dao.py index 198e31cc..d471ed38 100644 --- a/rocketwatch/utils/dao.py +++ b/rocketwatch/utils/dao.py @@ -124,7 +124,7 @@ async def get_proposal_ids_by_state(self) -> dict[ProposalState, list[int]]: proposal_contract.functions.getDAO(proposal_id) for proposal_id in range(1, num_proposals + 1) ]) - relevant_proposals = [(i+1) for (i, dao_name) in enumerate(proposal_dao_names) if (dao_name == self.contract_name)] + relevant_proposals = [(i + 1) for (i, dao_name) in enumerate(proposal_dao_names) if (dao_name == self.contract_name)] proposal_states = await rp.multicall([ proposal_contract.functions.getState(proposal_id) for proposal_id in relevant_proposals ]) @@ -139,17 +139,17 @@ async def fetch_proposal(self, proposal_id: int) -> Proposal: proposal_contract = await self._get_proposal_contract() (proposer, message, payload, created, start, end, expires, votes_for_raw, votes_against_raw, votes_required_raw) = await rp.multicall([ - proposal_contract.functions.getProposer(proposal_id), - proposal_contract.functions.getMessage(proposal_id), - proposal_contract.functions.getPayload(proposal_id), - proposal_contract.functions.getCreated(proposal_id), - proposal_contract.functions.getStart(proposal_id), - proposal_contract.functions.getEnd(proposal_id), - proposal_contract.functions.getExpires(proposal_id), - proposal_contract.functions.getVotesFor(proposal_id), - proposal_contract.functions.getVotesAgainst(proposal_id), - proposal_contract.functions.getVotesRequired(proposal_id) - ]) + proposal_contract.functions.getProposer(proposal_id), + proposal_contract.functions.getMessage(proposal_id), + proposal_contract.functions.getPayload(proposal_id), + proposal_contract.functions.getCreated(proposal_id), + proposal_contract.functions.getStart(proposal_id), + proposal_contract.functions.getEnd(proposal_id), + proposal_contract.functions.getExpires(proposal_id), + proposal_contract.functions.getVotesFor(proposal_id), + proposal_contract.functions.getVotesAgainst(proposal_id), + proposal_contract.functions.getVotesRequired(proposal_id) + ]) return DefaultDAO.Proposal( id=proposal_id, proposer=cast(ChecksumAddress, proposer), @@ -183,14 +183,17 @@ def _build_vote_graph(self, proposal: Proposal) -> str: f"Quorum: {quorum_perc:.0%}{' ✔' if (quorum_perc >= 1) else ''}" ) + class OracleDAO(DefaultDAO): def __init__(self): super().__init__("rocketDAONodeTrustedProposals") + class SecurityCouncil(DefaultDAO): def __init__(self): super().__init__("rocketDAOSecurityProposals") + class ProtocolDAO(DAO): def __init__(self): super().__init__("rocketDAOProtocolProposals", "rocketDAOProtocolProposal") @@ -243,21 +246,21 @@ async def fetch_proposal(self, proposal_id: int) -> Proposal: (proposer, message, payload, created, start, phase1_end, phase2_end, expires, vp_for_raw, vp_against_raw, vp_veto_raw, vp_abstain_raw, vp_required_raw, veto_quorum_raw) = await rp.multicall([ - proposal_contract.functions.getProposer(proposal_id), - proposal_contract.functions.getMessage(proposal_id), - proposal_contract.functions.getPayload(proposal_id), - proposal_contract.functions.getCreated(proposal_id), - proposal_contract.functions.getStart(proposal_id), - proposal_contract.functions.getPhase1End(proposal_id), - proposal_contract.functions.getPhase2End(proposal_id), - proposal_contract.functions.getExpires(proposal_id), - proposal_contract.functions.getVotingPowerFor(proposal_id), - proposal_contract.functions.getVotingPowerAgainst(proposal_id), - proposal_contract.functions.getVotingPowerVeto(proposal_id), - proposal_contract.functions.getVotingPowerAbstained(proposal_id), - proposal_contract.functions.getVotingPowerRequired(proposal_id), - proposal_contract.functions.getVetoQuorum(proposal_id) - ]) + proposal_contract.functions.getProposer(proposal_id), + proposal_contract.functions.getMessage(proposal_id), + proposal_contract.functions.getPayload(proposal_id), + proposal_contract.functions.getCreated(proposal_id), + proposal_contract.functions.getStart(proposal_id), + proposal_contract.functions.getPhase1End(proposal_id), + proposal_contract.functions.getPhase2End(proposal_id), + proposal_contract.functions.getExpires(proposal_id), + proposal_contract.functions.getVotingPowerFor(proposal_id), + proposal_contract.functions.getVotingPowerAgainst(proposal_id), + proposal_contract.functions.getVotingPowerVeto(proposal_id), + proposal_contract.functions.getVotingPowerAbstained(proposal_id), + proposal_contract.functions.getVotingPowerRequired(proposal_id), + proposal_contract.functions.getVetoQuorum(proposal_id) + ]) return ProtocolDAO.Proposal( id=proposal_id, proposer=cast(ChecksumAddress, proposer), diff --git a/rocketwatch/utils/embeds.py b/rocketwatch/utils/embeds.py index 8769dcbd..f52682a0 100644 --- a/rocketwatch/utils/embeds.py +++ b/rocketwatch/utils/embeds.py @@ -78,6 +78,7 @@ async def resolve_ens(interaction, node_address): _pdao_delegates: dict[str, str] = {} + @cached(ttl=900) @retry_async(tries=3, delay=1) async def get_pdao_delegates() -> dict[str, str]: @@ -115,7 +116,7 @@ async def el_explorer_url( url = f"https://saturn-1.net/megapool/{target}{dashboard_network}" if await rp.is_minipool(target): - pass # TODO add explorer url once supported + pass # TODO add explorer url once supported n_key = f"addresses.{target}" if not name and (n := _(n_key)) != n_key: @@ -145,7 +146,10 @@ async def el_explorer_url( if not name: a = Addresses.get(target) # don't apply name if it has label is one with the id "take-action", as these don't show up on the explorer - if (not a.labels or len(a.labels) != 1 or a.labels[0].id != "take-action") and a.name and "alert" not in a.name.lower(): + if all(( + (not a.labels or len(a.labels) != 1 or a.labels[0].id != "take-action"), + a.name and ("alert" not in a.name.lower()) + )): name = a.name if not name: # not an odao member, try to get their ens @@ -154,21 +158,17 @@ async def el_explorer_url( if code := await w3.eth.get_code(target): if prefix != -1: prefix += "📄" - if ( - not name - and w3.keccak(text=code.hex()).hex() - in cfg["other.mev_hashes"] - ): + if ((not name) and (w3.keccak(text=code.hex()).hex() in cfg["other.mev_hashes"])): name = "MEV Bot Contract" if not name: with contextlib.suppress(Exception): c = w3.eth.contract(address=target, abi=[{"inputs" : [], - "name" : "name", - "outputs" : [{"internalType": "string", - "name" : "", - "type" : "string"}], - "stateMutability": "view", - "type" : "function"}]) + "name" : "name", + "outputs" : [{"internalType": "string", + "name" : "", + "type" : "string"}], + "stateMutability": "view", + "type" : "function"}]) n = await c.functions.name().call() # make sure nobody is trying to inject a custom link, as there was a guy that made the name of his contract # 'RocketSwapRouter](https://etherscan.io/search?q=0x16d5a408e807db8ef7c578279beeee6b228f1c1c)[', @@ -193,13 +193,15 @@ async def el_explorer_url( prefix = "" return f"{prefix}[{name}]({url})" + async def prepare_args(args): for arg_key, arg_value in list(args.items()): # store raw value args[f"{arg_key}_raw"] = arg_value # handle numbers - if any(keyword in arg_key.lower() for keyword in ["amount", "value", "rate", "totaleth", "stakingeth", "rethsupply", "rplprice", "profit"]) and isinstance(arg_value, int): + numeric_keywords = ["amount", "value", "rate", "totaleth", "stakingeth", "rethsupply", "rplprice", "profit"] + if any(keyword in arg_key.lower() for keyword in numeric_keywords) and isinstance(arg_value, int): args[arg_key] = arg_value / 10 ** 18 # handle timestamps @@ -267,15 +269,20 @@ async def assemble(args) -> Embed: case "cs_max_validator_increase_event": e.set_image(url="https://media1.tenor.com/m/Yp6Yeiufb04AAAAd/piranhas-feeding.gif") case "redstone_upgrade_triggered": - e.set_image(url="https://cdn.dribbble.com/users/187497/screenshots/2284528/media/123903807d334c15aa105b44f2bd9252.gif") + url = "https://cdn.dribbble.com/users/187497/screenshots/2284528/media/123903807d334c15aa105b44f2bd9252.gif" + e.set_image(url=url) case "atlas_upgrade_triggered": - e.set_image(url="https://cdn.discordapp.com/attachments/912434217118498876/1097528472567558227/DALLE_2023-04-17_16.25.46_-_an_expresive_oil_painting_of_the_atlas_2_rocket_taking_off_moon_colorfull.png") + url = ( + "https://cdn.discordapp.com/attachments/912434217118498876/1097528472567558227/" + "DALLE_2023-04-17_16.25.46_-_an_expresive_oil_painting_of_the_atlas_2_rocket_taking_off_moon_colorfull.png" + ) + e.set_image(url=url) case "houston_upgrade_triggered": e.set_image(url="https://i.imgur.com/XT5qPWf.png") case "houston_hotfix_upgrade_triggered": e.set_image(url="https://i.imgur.com/JcQS3Sh.png") case "saturn_one_upgrade_triggered": - e.set_image(url="https://i.imgur.com/n3wMCOA.png") + e.set_image(url="https://i.imgur.com/n3wMCOA.png") match args.event_name: case "pdao_set_delegate": @@ -334,9 +341,10 @@ async def assemble(args) -> Embed: if "exchangeRate" in args: e.add_field(name="Exchange Rate", - value=f"`{args.exchangeRate} RPL/{args.otherToken}`" + - ( - f" (`{args.discountAmount}%` Discount, oDAO: `{args.marketExchangeRate} RPL/ETH`)" if "discountAmount" in args else ""), + value=f"`{args.exchangeRate} RPL/{args.otherToken}`" + ( + f" (`{args.discountAmount}%` Discount, oDAO: `{args.marketExchangeRate} RPL/ETH`)" + if "discountAmount" in args else "" + ), inline=False) """ @@ -511,8 +519,8 @@ async def assemble(args) -> Embed: tnx_fee_gwei = round(tnx_fee_wei / 10**9) value = f"{tnx_fee_gwei:,} Gwei ({args.tnx_fee_usd} USDC)" else: - value = f"{tnx_fee_wei:,} Wei ({args.tnx_fee_usd} USDC)" - + value = f"{tnx_fee_wei:,} Wei ({args.tnx_fee_usd} USDC)" + e.add_field(name="Transaction Fee", value=value, inline=False) - + return e diff --git a/rocketwatch/utils/event.py b/rocketwatch/utils/event.py index afdfa1c0..ac77824e 100644 --- a/rocketwatch/utils/event.py +++ b/rocketwatch/utils/event.py @@ -28,6 +28,7 @@ class Event: def get_score(self): return (10**9 * self.block_number) + (10**5 * self.transaction_index) + self.event_index + class EventPlugin(commands.Cog): def __init__(self, bot: RocketWatch, rate_limit=timedelta(seconds=5)): self.bot = bot diff --git a/rocketwatch/utils/event_logs.py b/rocketwatch/utils/event_logs.py index 0172e44e..52c66419 100644 --- a/rocketwatch/utils/event_logs.py +++ b/rocketwatch/utils/event_logs.py @@ -13,29 +13,29 @@ def get_logs( event: ContractEvent, - from_block: BlockNumber, - to_block: BlockNumber, + from_block: BlockNumber, + to_block: BlockNumber, arg_filters: Optional[dict[str, Any]] = None ) -> list[LogReceipt]: start_block = from_block end_block = to_block - + log.debug(f"Fetching event logs in [{start_block}, {end_block}]") chunk_size = 50_000 from_block = start_block to_block = from_block + chunk_size - + logs = [] - + while from_block <= end_block: logs += event.get_logs( from_block=from_block, to_block=min(to_block, end_block), argument_filters=arg_filters ) - + from_block = to_block + 1 to_block = from_block + chunk_size - + return logs diff --git a/rocketwatch/utils/image.py b/rocketwatch/utils/image.py index c62b5ccd..4efd5b08 100644 --- a/rocketwatch/utils/image.py +++ b/rocketwatch/utils/image.py @@ -11,6 +11,7 @@ Color = tuple[int, int, int] + class Image: def __init__(self, image: PillowImage.Image): self.__img = image diff --git a/rocketwatch/utils/liquidity.py b/rocketwatch/utils/liquidity.py index 249d18f7..773a021c 100644 --- a/rocketwatch/utils/liquidity.py +++ b/rocketwatch/utils/liquidity.py @@ -487,6 +487,7 @@ def _get_bids(self, api_response: dict) -> dict[float, float]: def _get_asks(self, api_response: dict) -> dict[float, float]: return {float(entry[0]): float(entry[1]) for entry in api_response["tick"]["asks"]} + class BitMart(CEX): @property def color(self) -> str: @@ -697,7 +698,8 @@ def price_to_tick(price: float) -> float: return math.log(price, 1.0001) class Pool(DEX.LiquidityPool): - def __init__(self, pool_address: ChecksumAddress, contract, tick_spacing: int, token_0: ERC20Token, token_1: ERC20Token): + def __init__(self, pool_address: ChecksumAddress, contract, tick_spacing: int, + token_0: ERC20Token, token_1: ERC20Token): self.pool_address = pool_address self.contract = contract self.tick_spacing = tick_spacing diff --git a/rocketwatch/utils/readable.py b/rocketwatch/utils/readable.py index c1a74aac..74516b1c 100644 --- a/rocketwatch/utils/readable.py +++ b/rocketwatch/utils/readable.py @@ -22,7 +22,7 @@ def decode_abi(compressed_string): return inflated.decode("ascii") -def uptime(time, highres= False): +def uptime(time, highres=False): parts = [] days, time = time // units.days, time % units.days @@ -70,40 +70,40 @@ def render_branch(_data: dict[str, dict | int]) -> tuple[list, list, int]: _strings = [] _values = [] count = 0 - + _data = {k: v for k, v in _data.items() if v} - for i, (state, sub_data) in enumerate(_data.items()): + for i, (state, sub_data) in enumerate(_data.items()): link = "├" if (i != len(_data) - 1) else "└" _strings.append(f" {link}{state.title()}: ") - + if isinstance(sub_data, dict): sub_strings, sub_values, sub_count = render_branch(sub_data) sub_link = " │" if (i != len(_data) - 1) else " " _strings.extend([sub_link + s for s in sub_strings]) _values.append(sub_count) _values.extend(sub_values) - count += sub_count + count += sub_count elif isinstance(sub_data, int): _values.append(sub_data) count += sub_data - + return _strings, _values, count strings, values, tree_sum = render_branch(data) strings.insert(0, f"{name}:") values.insert(0, tree_sum) - + fmt_values = [f"{v:,}" for v in values] - + # longest string offset max_left_len = max(len(s) for s in strings) max_right_len = max(len(v) for v in fmt_values) - + lines = [] for s, v in zip(strings, fmt_values): # right align all values lines.append(s.ljust(max_left_len) + v.rjust(max_right_len)) - + return "\n".join(lines) @@ -149,5 +149,5 @@ def render_tree(data: dict, name: str, max_depth: int = 0) -> str: _v = f"{COLORS[d]}{v}{Style.RESET_ALL}" lines[i] = f"{lines[i].ljust(max_left_len, ' ')}{' ' * (max_right_len - len(str(v)))}{_v}" # replace all spaces with non-breaking spaces - lines = [l.replace(" ", " ") for l in lines] + lines = [line.replace(" ", " ") for line in lines] return "\n".join(lines) diff --git a/rocketwatch/utils/retry.py b/rocketwatch/utils/retry.py index 7056ab9f..9fb905d4 100644 --- a/rocketwatch/utils/retry.py +++ b/rocketwatch/utils/retry.py @@ -15,6 +15,7 @@ def retry( ) -> Callable[..., Any]: return __retry(exceptions, is_async=False, tries=tries, delay=delay, max_delay=max_delay, backoff=backoff) + def retry_async( exceptions: EXCEPTIONS = Exception, *, diff --git a/rocketwatch/utils/sea_creatures.py b/rocketwatch/utils/sea_creatures.py index d96ca75a..73a91d6b 100644 --- a/rocketwatch/utils/sea_creatures.py +++ b/rocketwatch/utils/sea_creatures.py @@ -40,7 +40,8 @@ def get_sea_creature_for_holdings(holdings): :param holdings: The holdings to get the sea creature for. :return: The sea creature for the given holdings. """ - # if the holdings are more than 2 times the highest sea creature, return the highest sea creature with a multiplier next to it + # if the holdings are more than 2 times the highest sea creature, + # return the highest sea creature with a multiplier next to it highest_possible_holdings = max(sea_creatures.keys()) if holdings >= 2 * highest_possible_holdings: return sea_creatures[highest_possible_holdings] * int(holdings / highest_possible_holdings) diff --git a/rocketwatch/utils/shared_w3.py b/rocketwatch/utils/shared_w3.py index 1a504c7f..fe4225be 100644 --- a/rocketwatch/utils/shared_w3.py +++ b/rocketwatch/utils/shared_w3.py @@ -33,4 +33,5 @@ async def get_sync_committee(self, epoch: int) -> Dict[str, Any]: f"/eth/v1/beacon/states/head/sync_committees?epoch={epoch}" ) + bacon = Bacon(cfg["consensus_layer.endpoint"]) diff --git a/rocketwatch/utils/solidity.py b/rocketwatch/utils/solidity.py index 5459a6d2..a449c1d8 100644 --- a/rocketwatch/utils/solidity.py +++ b/rocketwatch/utils/solidity.py @@ -22,9 +22,11 @@ def to_int(n, decimals=18): def beacon_block_to_date(block_num: int) -> int: return BEACON_START_DATE + (block_num * 12) + def date_to_beacon_block(date: int) -> int: return (date - BEACON_START_DATE) // 12 + def slot_to_beacon_day_epoch_slot(slot: int) -> tuple[int, int, int]: return slot // 32 // 225, slot // 32 % 225, slot % 32 diff --git a/rocketwatch/utils/views.py b/rocketwatch/utils/views.py index ad6447de..d9302aeb 100644 --- a/rocketwatch/utils/views.py +++ b/rocketwatch/utils/views.py @@ -4,37 +4,38 @@ from discord import ui, ButtonStyle, Interaction from utils.embeds import Embed + class PageView(ui.View): def __init__(self, page_size: int): super().__init__(timeout=None) self.page_index = 0 self.page_size = page_size - + @property @abstractmethod def _title(self) -> str: pass - + @abstractmethod async def _load_content(self, from_idx: int, to_idx: int) -> tuple[int, str]: pass - + def position_to_page_index(self, position: int) -> int: return (position - 1) // self.page_size async def load(self) -> Embed: if self.page_index < 0: self.page_index = 0 - + num_items, content = await self._load_content( (self.page_index * self.page_size), ((self.page_index + 1) * self.page_size - 1) ) - + embed = Embed(title=self._title) if num_items <= 0: embed.set_image(url="https://c.tenor.com/1rQLxWiCtiIAAAAd/tenor.gif") - self.clear_items() # remove buttons + self.clear_items() # remove buttons return embed max_page_index = self.position_to_page_index(num_items) @@ -45,7 +46,7 @@ async def load(self) -> Embed: embed.description = content self.prev_page.disabled = (self.page_index <= 0) - self.next_page.disabled = (self.page_index >= max_page_index) + self.next_page.disabled = (self.page_index >= max_page_index) return embed @ui.button(emoji="⬅", label="Prev", style=ButtonStyle.gray) @@ -59,7 +60,7 @@ async def next_page(self, interaction: Interaction, _) -> None: self.page_index += 1 embed = await self.load() await interaction.response.edit_message(embed=embed, view=self) - + class JumpToModal(ui.Modal, title="Jump To Position"): def __init__(self, view: 'PageView'): super().__init__() @@ -76,7 +77,7 @@ async def on_submit(self, interaction: Interaction) -> None: self.view.page_index = self.view.position_to_page_index(position) embed = await self.view.load() await interaction.response.edit_message(embed=embed, view=self.view) - + @ui.button(label="Jump", style=ButtonStyle.gray) async def jump_to_position(self, interaction: Interaction, _) -> None: modal = self.JumpToModal(self) From 00185f6fafe7c950e0ff3ab1d30b9afd8c3e7ef1 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 6 Mar 2026 23:37:31 +0000 Subject: [PATCH 167/279] slightly more permissive line length --- .pep8speaks.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pep8speaks.yml b/.pep8speaks.yml index ae30837e..0aeb45cb 100644 --- a/.pep8speaks.yml +++ b/.pep8speaks.yml @@ -3,7 +3,7 @@ scanner: linter: pycodestyle pycodestyle: - max-line-length: 120 + max-line-length: 128 ignore: - E203 - W503 From 9b063319903ca623cf2592b57d47d796993f8374 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 6 Mar 2026 23:43:08 +0000 Subject: [PATCH 168/279] add ruff --- .github/workflows/lint.yml | 16 ++++++++++++++++ .pep8speaks.yml | 10 ---------- 2 files changed, 16 insertions(+), 10 deletions(-) create mode 100644 .github/workflows/lint.yml delete mode 100644 .pep8speaks.yml diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml new file mode 100644 index 00000000..4c4b426a --- /dev/null +++ b/.github/workflows/lint.yml @@ -0,0 +1,16 @@ +name: Lint + +on: + push: + branches: [ main ] + pull_request: + branches: [ main ] + +jobs: + lint: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v6 + - uses: astral-sh/ruff-action@v3 + with: + args: "check --select E,W --ignore E501,E203,E231,W503,W504" diff --git a/.pep8speaks.yml b/.pep8speaks.yml deleted file mode 100644 index 0aeb45cb..00000000 --- a/.pep8speaks.yml +++ /dev/null @@ -1,10 +0,0 @@ -scanner: - diff_only: True - linter: pycodestyle - -pycodestyle: - max-line-length: 128 - ignore: - - E203 - - W503 - - W504 From e05e60115981f2fd4084d722f0388cdf54a2ec30 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 6 Mar 2026 23:45:19 +0000 Subject: [PATCH 169/279] linter config fixes --- .github/workflows/lint.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 4c4b426a..4389febf 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -13,4 +13,4 @@ jobs: - uses: actions/checkout@v6 - uses: astral-sh/ruff-action@v3 with: - args: "check --select E,W --ignore E501,E203,E231,W503,W504" + args: "check --select E,W --ignore E501,E203,E231" From 3406d318a60d4ef83014e587c22d449180d129bc Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 6 Mar 2026 23:49:39 +0000 Subject: [PATCH 170/279] capitalize CI --- .github/workflows/docker-ci.yml | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/.github/workflows/docker-ci.yml b/.github/workflows/docker-ci.yml index 70a0adc1..587917e8 100644 --- a/.github/workflows/docker-ci.yml +++ b/.github/workflows/docker-ci.yml @@ -1,9 +1,8 @@ -name: ci +name: CI on: push: - branches: - - 'main' + branches: [ main ] jobs: docker: From c9e3e08ab78efa8d58639367172f91f161c182a9 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 7 Mar 2026 00:33:57 +0000 Subject: [PATCH 171/279] move to pydantic config --- .gitignore | 1 + compose.yaml | 2 +- rocketwatch/__main__.py | 6 +- rocketwatch/config.toml.sample | 94 +++++++++++++++ rocketwatch/main.cfg.sample | 95 --------------- rocketwatch/plugins/about/about.py | 8 +- rocketwatch/plugins/activity/activity.py | 4 +- rocketwatch/plugins/apr/apr.py | 2 +- .../plugins/beacon_events/beacon_events.py | 4 +- .../plugins/chat_summary/chat_summary.py | 4 +- rocketwatch/plugins/collateral/collateral.py | 2 +- .../plugins/commissions/commissions.py | 2 +- rocketwatch/plugins/cow_orders/cow_orders.py | 2 +- rocketwatch/plugins/dao/dao.py | 2 +- .../plugins/db_upkeep_task/db_upkeep_task.py | 4 +- rocketwatch/plugins/debug/debug.py | 34 +++--- .../delegate_contracts/delegate_contracts.py | 2 +- .../plugins/deposit_pool/deposit_pool.py | 4 +- .../plugins/detect_scam/detect_scam.py | 32 +++--- rocketwatch/plugins/event_core/event_core.py | 28 ++--- rocketwatch/plugins/events/events.py | 10 +- .../fee_distribution/fee_distribution.py | 2 +- rocketwatch/plugins/forum/forum.py | 2 +- rocketwatch/plugins/governance/governance.py | 4 +- rocketwatch/plugins/lottery/lottery.py | 2 +- rocketwatch/plugins/metrics/metrics.py | 2 +- rocketwatch/plugins/milestones/milestones.py | 2 +- .../minipool_distribution.py | 2 +- .../pinned_messages/pinned_messages.py | 6 +- rocketwatch/plugins/proposals/proposals.py | 4 +- rocketwatch/plugins/queue/queue.py | 2 +- rocketwatch/plugins/random/random.py | 4 +- rocketwatch/plugins/releases/releases.py | 2 +- rocketwatch/plugins/reloader/reloader.py | 6 +- rocketwatch/plugins/rewards/rewards.py | 2 +- rocketwatch/plugins/rocksolid/rocksolid.py | 2 +- rocketwatch/plugins/rpips/rpips.py | 2 +- rocketwatch/plugins/rpl/rpl.py | 2 +- .../plugins/scam_warning/scam_warning.py | 10 +- rocketwatch/plugins/snapshot/snapshot.py | 2 +- .../plugins/support_utils/support_utils.py | 12 +- .../plugins/transactions/transactions.py | 6 +- rocketwatch/plugins/tvl/tvl.py | 2 +- .../user_distribute/user_distribute.py | 4 +- .../validator_states/validator_states.py | 2 +- rocketwatch/plugins/wall/wall.py | 2 +- rocketwatch/requirements.txt | 2 +- rocketwatch/rocketwatch.py | 16 +-- rocketwatch/utils/block_time.py | 2 +- rocketwatch/utils/cached_ens.py | 2 +- rocketwatch/utils/cfg.py | 108 +++++++++++++++++- rocketwatch/utils/command_tree.py | 2 +- rocketwatch/utils/dao.py | 4 +- rocketwatch/utils/embeds.py | 26 ++--- rocketwatch/utils/etherscan.py | 4 +- rocketwatch/utils/event.py | 4 +- rocketwatch/utils/event_logs.py | 2 +- rocketwatch/utils/liquidity.py | 2 +- rocketwatch/utils/readable.py | 2 +- rocketwatch/utils/rocketpool.py | 4 +- rocketwatch/utils/sea_creatures.py | 1 - rocketwatch/utils/shared_w3.py | 14 +-- rocketwatch/utils/time_debug.py | 2 +- 63 files changed, 365 insertions(+), 262 deletions(-) create mode 100644 rocketwatch/config.toml.sample delete mode 100644 rocketwatch/main.cfg.sample diff --git a/.gitignore b/.gitignore index db8e76ce..a153c178 100644 --- a/.gitignore +++ b/.gitignore @@ -119,6 +119,7 @@ dmypy.json # state state.db */main.cfg +*/config.toml mongodb/ # helper scripts diff --git a/compose.yaml b/compose.yaml index 09d80491..7f7631d8 100644 --- a/compose.yaml +++ b/compose.yaml @@ -4,7 +4,7 @@ services: build: ./rocketwatch volumes: - ./rocketwatch/contracts/rocketpool:/app/contracts/rocketpool - - ./rocketwatch/main.cfg:/app/main.cfg + - ./rocketwatch/config.toml:/app/config.toml restart: unless-stopped depends_on: - mongodb diff --git a/rocketwatch/__main__.py b/rocketwatch/__main__.py index 56be7efa..f3234fe0 100644 --- a/rocketwatch/__main__.py +++ b/rocketwatch/__main__.py @@ -7,10 +7,10 @@ logging.basicConfig(format="%(levelname)5s %(asctime)s [%(name)s] %(filename)s:%(lineno)d|%(funcName)s(): %(message)s") logging.getLogger().setLevel("INFO") -logging.getLogger("discord.client").setLevel(cfg["log_level"]) +logging.getLogger("discord.client").setLevel(cfg.log_level) log = logging.getLogger("discord_bot") -log.setLevel(cfg["log_level"]) +log.setLevel(cfg.log_level) def main() -> None: @@ -24,7 +24,7 @@ def main() -> None: log.info("Starting bot...") bot = RocketWatch(intents=intents) - bot.run(cfg["discord.secret"]) + bot.run(cfg.discord.secret) if __name__ == "__main__": diff --git a/rocketwatch/config.toml.sample b/rocketwatch/config.toml.sample new file mode 100644 index 00000000..b26329b4 --- /dev/null +++ b/rocketwatch/config.toml.sample @@ -0,0 +1,94 @@ +log_level = "INFO" + +[discord] +secret = "" +guilds = [] + +[discord.owner] +user_id = -1 +server_id = -1 + +[discord.channels] +default = -1 +dao = -1 +errors = -1 + +[execution_layer] +explorer = "https://etherscan.io" +etherscan_secret = "" + +[execution_layer.endpoint] +current = "http://node:8545" +mainnet = "http://node:8545" +archive = "http://node:8545" + +[consensus_layer] +explorer = "https://beaconcha.in" +endpoint = "http://node:5052" +beaconcha_secret = "" + +[mongodb] +uri = "mongodb://mongodb:27017" + +[rocketpool] +chain = "mainnet" +dao_multisigs = [ + "0x778c08fC151D7AB10042334B6A0929D4fa2983cA", + "0x6efD08303F42EDb68F2D6464BCdCA0824e1C813a", + "0xb867EA3bBC909954d737019FEf5AB25dFDb38CB9", +] + +[rocketpool.support] +user_ids = [] +role_ids = [] +server_id = -1 +channel_id = -1 +moderator_id = -1 + +[rocketpool.dm_warning] +channels = [] + +[rocketpool.manual_addresses] +rocketStorage = "0x1d8f8f00cfa6758d7bE78336684788Fb0ee0Fa46" +rocketSignerRegistry = "0xc1062617d10Ae99E09D941b60746182A87eAB38F" +rocketExitArbitrage = "0x2631618408497d27D455aBA9c99A6f61eF305559" +multicall3 = "0xcA11bde05977b3631167028862bE2a173976CA11" +AirSwap = "0x4572f2554421Bd64Bef1c22c8a81840E8D496BeA" +yearnPool = "0x5c0A86A32c129538D62C106Eb8115a8b02358d57" +curvePool = "0x447Ddd4960d9fdBF6af9a790560d0AF76795CB08" +wstETHToken = "0x7f39C581F595B53c5cb19bD0b3f8dA6c935E2Ca0" +unstETH = "0x889edC2eDab5f40e902b864aD4d7AdE8E412F9B1" +ConstellationDirectory = "0x4343743dBc46F67D3340b45286D8cdC13c8575DE" +LUSD = "0x5f98805A4E8be255a32880FDeC7F6728C6568bA0" +BalancerVault = "0xBA12222222228d8Ba445958a75a0704d566BF2C8" +UniV3_USDC_ETH = "0x88e6A0c2dDD26FEEb64F039a2c41296FcB3f5640" +UniV3_rETH_ETH = "0x553e9C493678d8606d6a5ba284643dB2110Df823" +RockSolidVault = "0x936faCdf10c8c36294e7b9d28345255539d81bc7" + +[modules] +include = [] +exclude = [] +enable_commands = true + +[events] +lookback_distance = 8 +genesis = 13325233 +block_batch_size = 1000 + +[events.status_message.default] +plugin = "DepositPool" +cooldown = 60 +fields = [] + +[events.status_message.dao] +plugin = "Governance" +cooldown = 300 +fields = [] + +[other] +mev_hashes = [] + +[other.secrets] +wakatime = "" +cronitor = "" +anthropic = "" diff --git a/rocketwatch/main.cfg.sample b/rocketwatch/main.cfg.sample deleted file mode 100644 index b6c99b0f..00000000 --- a/rocketwatch/main.cfg.sample +++ /dev/null @@ -1,95 +0,0 @@ -log_level: `logging:INFO` -discord: { - secret: "" - owner: { - user_id: -1 - server_id: -1 - } - channels: { - default: -1 - dao: -1 - errors: -1 - } -} -execution_layer: { - explorer: "https://etherscan.io" - endpoint: { - current: "http://node:8545" - mainnet: "http://node:8545" - archive: "http://node:8545" - } - etherscan_secret: "" -} -consensus_layer: { - explorer: "https://beaconcha.in" - endpoint: "http://node:5052" - beaconcha_secret: "" -} -mongodb: { - uri: "mongodb://mongodb:27017" -} -rocketpool: { - chain: "mainnet" - support: { - user_ids: [] - role_ids: [] - server_id: -1 - channel_id: -1 - moderator_id: -1 - } - dm_warning: { - channels: [] - } - dao_multisigs: [ - "0x778c08fC151D7AB10042334B6A0929D4fa2983cA", - "0x6efD08303F42EDb68F2D6464BCdCA0824e1C813a", - "0xb867EA3bBC909954d737019FEf5AB25dFDb38CB9" - ] - manual_addresses: { - rocketStorage: "0x1d8f8f00cfa6758d7bE78336684788Fb0ee0Fa46" - rocketSignerRegistry: "0xc1062617d10Ae99E09D941b60746182A87eAB38F" - rocketExitArbitrage: "0x2631618408497d27D455aBA9c99A6f61eF305559" - multicall3: "0xcA11bde05977b3631167028862bE2a173976CA11" - AirSwap: "0x4572f2554421Bd64Bef1c22c8a81840E8D496BeA" - yearnPool: "0x5c0A86A32c129538D62C106Eb8115a8b02358d57" - curvePool: "0x447Ddd4960d9fdBF6af9a790560d0AF76795CB08" - wstETHToken: "0x7f39C581F595B53c5cb19bD0b3f8dA6c935E2Ca0" - unstETH: "0x889edC2eDab5f40e902b864aD4d7AdE8E412F9B1" - ConstellationDirectory: "0x4343743dBc46F67D3340b45286D8cdC13c8575DE" - LUSD: "0x5f98805A4E8be255a32880FDeC7F6728C6568bA0" - BalancerVault: "0xBA12222222228d8Ba445958a75a0704d566BF2C8" - UniV3_USDC_ETH: "0x88e6A0c2dDD26FEEb64F039a2c41296FcB3f5640" - UniV3_rETH_ETH: "0x553e9C493678d8606d6a5ba284643dB2110Df823" - RockSolidVault: "0x936faCdf10c8c36294e7b9d28345255539d81bc7" - } -} -modules: { - include: [] - exclude: [] - enable_commands: true -} -events: { - lookback_distance: 8 - genesis: 13325233 - block_batch_size: 1000 - status_message: { - default: { - plugin: "DepositPool" - cooldown: 60 - fields: [] - } - dao: { - plugin: "Governance" - cooldown: 300 - fields: [] - } - } -} -other: { - mev_hashes: [] - secrets: { - wakatime: "" - cronitor: "" - anthropic: "" - } -} diff --git a/rocketwatch/plugins/about/about.py b/rocketwatch/plugins/about/about.py index 4a186aab..a970056a 100644 --- a/rocketwatch/plugins/about/about.py +++ b/rocketwatch/plugins/about/about.py @@ -21,7 +21,7 @@ BOOT_TIME = time.time() log = logging.getLogger("about") -log.setLevel(cfg["log_level"]) +log.setLevel(cfg.log_level) class About(commands.Cog): @@ -37,7 +37,7 @@ async def about(self, interaction: Interaction): g = self.bot.guilds code_time = None - if api_key := cfg.get("other.secrets.wakatime"): + if api_key := cfg.other.secrets.wakatime: try: async with aiohttp.ClientSession() as session: async with session.get( @@ -61,10 +61,10 @@ async def about(self, interaction: Interaction): f"{humanize.intcomma(sum(guild.member_count for guild in g))} members reached!", inline=False) - address = await el_explorer_url(cfg["rocketpool.manual_addresses.rocketStorage"]) + address = await el_explorer_url(cfg.rocketpool.manual_addresses["rocketStorage"]) e.add_field(name="Storage Contract", value=address) - e.add_field(name="Chain", value=cfg["rocketpool.chain"].capitalize()) + e.add_field(name="Chain", value=cfg.rocketpool.chain.capitalize()) e.add_field(name="Plugins loaded", value=str(len(self.bot.cogs))) diff --git a/rocketwatch/plugins/activity/activity.py b/rocketwatch/plugins/activity/activity.py index 743773d2..68eaf18c 100644 --- a/rocketwatch/plugins/activity/activity.py +++ b/rocketwatch/plugins/activity/activity.py @@ -8,13 +8,13 @@ from utils.cfg import cfg log = logging.getLogger("rich_activity") -log.setLevel(cfg["log_level"]) +log.setLevel(cfg.log_level) class RichActivity(commands.Cog): def __init__(self, bot: RocketWatch): self.bot = bot - self.monitor = Monitor("update-activity", api_key=cfg["other.secrets.cronitor"]) + self.monitor = Monitor("update-activity", api_key=cfg.other.secrets.cronitor) self.task.start() async def cog_unload(self): diff --git a/rocketwatch/plugins/apr/apr.py b/rocketwatch/plugins/apr/apr.py index 0ab5f293..d6691722 100644 --- a/rocketwatch/plugins/apr/apr.py +++ b/rocketwatch/plugins/apr/apr.py @@ -19,7 +19,7 @@ from utils.visibility import is_hidden_weak log = logging.getLogger("apr") -log.setLevel(cfg["log_level"]) +log.setLevel(cfg.log_level) def to_apr(d1, d2, effective=True): diff --git a/rocketwatch/plugins/beacon_events/beacon_events.py b/rocketwatch/plugins/beacon_events/beacon_events.py index f90d1a01..9c36fe52 100644 --- a/rocketwatch/plugins/beacon_events/beacon_events.py +++ b/rocketwatch/plugins/beacon_events/beacon_events.py @@ -19,7 +19,7 @@ from utils.retry import retry_async log = logging.getLogger("beacon_events") -log.setLevel(cfg["log_level"]) +log.setLevel(cfg.log_level) class BeaconEvents(EventPlugin): @@ -125,7 +125,7 @@ async def _get_proposal(self, beacon_block: dict) -> Optional[Event]: # no proposed block return None - if not (api_key := cfg["consensus_layer.beaconcha_secret"]): + if not (api_key := cfg.consensus_layer.beaconcha_secret): log.warning("Missing beaconcha.in API key") return None diff --git a/rocketwatch/plugins/chat_summary/chat_summary.py b/rocketwatch/plugins/chat_summary/chat_summary.py index 95b153ff..716f2db0 100644 --- a/rocketwatch/plugins/chat_summary/chat_summary.py +++ b/rocketwatch/plugins/chat_summary/chat_summary.py @@ -18,13 +18,13 @@ from utils.embeds import Embed log = logging.getLogger("chat_summary") -log.setLevel(cfg["log_level"]) +log.setLevel(cfg.log_level) class ChatSummary(commands.Cog): def __init__(self, bot: RocketWatch): self.bot = bot - self.client = anthropic.AsyncAnthropic(api_key=cfg["other.secrets.anthropic"]) + self.client = anthropic.AsyncAnthropic(api_key=cfg.other.secrets.anthropic) # log all possible engines self.tokenizer = tiktoken.encoding_for_model("gpt-4-turbo") diff --git a/rocketwatch/plugins/collateral/collateral.py b/rocketwatch/plugins/collateral/collateral.py index 63d04699..29bd981f 100644 --- a/rocketwatch/plugins/collateral/collateral.py +++ b/rocketwatch/plugins/collateral/collateral.py @@ -22,7 +22,7 @@ from utils.visibility import is_hidden_weak log = logging.getLogger("collateral") -log.setLevel(cfg["log_level"]) +log.setLevel(cfg.log_level) p = inflect.engine() diff --git a/rocketwatch/plugins/commissions/commissions.py b/rocketwatch/plugins/commissions/commissions.py index 44572db4..2ed69b7d 100644 --- a/rocketwatch/plugins/commissions/commissions.py +++ b/rocketwatch/plugins/commissions/commissions.py @@ -15,7 +15,7 @@ from utils.visibility import is_hidden log = logging.getLogger("commissions") -log.setLevel(cfg["log_level"]) +log.setLevel(cfg.log_level) class Commissions(commands.Cog): diff --git a/rocketwatch/plugins/cow_orders/cow_orders.py b/rocketwatch/plugins/cow_orders/cow_orders.py index d1d0447e..81357d47 100644 --- a/rocketwatch/plugins/cow_orders/cow_orders.py +++ b/rocketwatch/plugins/cow_orders/cow_orders.py @@ -18,7 +18,7 @@ from utils.visibility import is_hidden_weak log = logging.getLogger("cow_orders") -log.setLevel(cfg["log_level"]) +log.setLevel(cfg.log_level) class CowOrders(EventPlugin): diff --git a/rocketwatch/plugins/dao/dao.py b/rocketwatch/plugins/dao/dao.py index 45561b49..adbd7efc 100644 --- a/rocketwatch/plugins/dao/dao.py +++ b/rocketwatch/plugins/dao/dao.py @@ -25,7 +25,7 @@ log = logging.getLogger("dao") -log.setLevel(cfg["log_level"]) +log.setLevel(cfg.log_level) class OnchainDAO(Cog): diff --git a/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py b/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py index 10259bc2..2f29616d 100644 --- a/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py +++ b/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py @@ -25,7 +25,7 @@ log = logging.getLogger("db_upkeep_task") -log.setLevel(cfg["log_level"]) +log.setLevel(cfg.log_level) def is_true(v) -> bool: @@ -108,7 +108,7 @@ def _unpack_validator_info_dynamic(info): class DBUpkeepTask(commands.Cog): def __init__(self, bot: RocketWatch): self.bot = bot - self.monitor = Monitor("db-task", api_key=cfg["other.secrets.cronitor"]) + self.monitor = Monitor("db-task", api_key=cfg.other.secrets.cronitor) self.batch_size = 250 self.cooldown = timedelta(minutes=10) self.bot.loop.create_task(self.loop()) diff --git a/rocketwatch/plugins/debug/debug.py b/rocketwatch/plugins/debug/debug.py index 4b5847ac..192a3755 100644 --- a/rocketwatch/plugins/debug/debug.py +++ b/rocketwatch/plugins/debug/debug.py @@ -22,7 +22,7 @@ from utils.visibility import is_hidden, is_hidden_weak, is_hidden_role_controlled log = logging.getLogger("debug") -log.setLevel(cfg["log_level"]) +log.setLevel(cfg.log_level) class Debug(Cog): @@ -49,7 +49,7 @@ async def on_ready(self): # --------- PRIVATE OWNER COMMANDS --------- # @command() - @guilds(cfg["discord.owner.server_id"]) + @guilds(cfg.discord.owner.server_id) @is_owner() async def raise_exception(self, interaction: Interaction): """ @@ -59,7 +59,7 @@ async def raise_exception(self, interaction: Interaction): raise Exception("this should never happen wtf is your filesystem") @command() - @guilds(cfg["discord.owner.server_id"]) + @guilds(cfg.discord.owner.server_id) @is_owner() async def get_members_of_role(self, interaction: Interaction, guild_id: str, role_id: str): """Get members of a role""" @@ -80,7 +80,7 @@ async def get_members_of_role(self, interaction: Interaction, guild_id: str, rol # list all roles of a guild with name and id @command() - @guilds(cfg["discord.owner.server_id"]) + @guilds(cfg.discord.owner.server_id) @is_owner() async def get_roles(self, interaction: Interaction, guild_id: str): """Get roles of a guild""" @@ -98,7 +98,7 @@ async def get_roles(self, interaction: Interaction, guild_id: str): await interaction.followup.send(content=f"```{repr(err)}```") @command() - @guilds(cfg["discord.owner.server_id"]) + @guilds(cfg.discord.owner.server_id) @is_owner() async def delete_msg(self, interaction: Interaction, message_url: str): """ @@ -112,7 +112,7 @@ async def delete_msg(self, interaction: Interaction, message_url: str): await interaction.followup.send(content="Done") @command() - @guilds(cfg["discord.owner.server_id"]) + @guilds(cfg.discord.owner.server_id) @is_owner() async def edit_embed(self, interaction: Interaction, message_url: str, new_description: str): await interaction.response.defer(ephemeral=True) @@ -125,7 +125,7 @@ async def edit_embed(self, interaction: Interaction, message_url: str, new_descr await interaction.followup.send(content="Done") @command() - @guilds(cfg["discord.owner.server_id"]) + @guilds(cfg.discord.owner.server_id) @is_owner() async def decode_tnx(self, interaction: Interaction, tnx_hash: str, contract_name: str = None): """ @@ -141,7 +141,7 @@ async def decode_tnx(self, interaction: Interaction, tnx_hash: str, contract_nam await interaction.followup.send(content=f"```Input:\n{data}```") @command() - @guilds(cfg["discord.owner.server_id"]) + @guilds(cfg.discord.owner.server_id) @is_owner() async def debug_transaction(self, interaction: Interaction, tnx_hash: str): """ @@ -155,7 +155,7 @@ async def debug_transaction(self, interaction: Interaction, tnx_hash: str): await interaction.followup.send(content="```No revert reason Available```") @command() - @guilds(cfg["discord.owner.server_id"]) + @guilds(cfg.discord.owner.server_id) @is_owner() async def purge_minipools(self, interaction: Interaction, confirm: bool = False): """ @@ -169,7 +169,7 @@ async def purge_minipools(self, interaction: Interaction, confirm: bool = False) await interaction.followup.send(content="Done") @command() - @guilds(cfg["discord.owner.server_id"]) + @guilds(cfg.discord.owner.server_id) @is_owner() async def sync_commands(self, interaction: Interaction): """ @@ -180,7 +180,7 @@ async def sync_commands(self, interaction: Interaction): await interaction.followup.send(content="Done") @command() - @guilds(cfg["discord.owner.server_id"]) + @guilds(cfg.discord.owner.server_id) @is_owner() async def talk(self, interaction: Interaction, channel: str, message: str): """ @@ -192,7 +192,7 @@ async def talk(self, interaction: Interaction, channel: str, message: str): await interaction.followup.send(content="Done") @command() - @guilds(cfg["discord.owner.server_id"]) + @guilds(cfg.discord.owner.server_id) @is_owner() async def announce(self, interaction: Interaction, channel: str, message: str): """ @@ -206,7 +206,7 @@ async def announce(self, interaction: Interaction, channel: str, message: str): await interaction.followup.send(content="Done") @command() - @guilds(cfg["discord.owner.server_id"]) + @guilds(cfg.discord.owner.server_id) @is_owner() async def restore_support_template(self, interaction: Interaction, template_name: str, message_url: str): await interaction.response.defer(ephemeral=True) @@ -250,7 +250,7 @@ async def restore_support_template(self, interaction: Interaction, template_name await interaction.followup.send(content="Done") @command() - @guilds(cfg["discord.owner.server_id"]) + @guilds(cfg.discord.owner.server_id) @is_owner() async def restore_missed_events(self, interaction: Interaction, tx_hash: str): import pickle @@ -266,7 +266,7 @@ async def restore_missed_events(self, interaction: Interaction, tx_hash: str): if ("topics" in event_log) and (event_log["topics"][0].hex() in events_plugin.topic_map): filtered_events.append(event_log) - channels = cfg["discord.channels"] + channels = cfg.discord.channels events, _ = events_plugin.process_events(filtered_events) for event in events: channel_candidates = [value for key, value in channels.items() if event.event_name.startswith(key)] @@ -343,7 +343,7 @@ async def get_abi_of_contract(self, interaction: Interaction, contract: str): await interaction.response.defer(ephemeral=is_hidden_role_controlled(interaction)) try: abi = prettify_json_string(await rp.uncached_get_abi_by_name(contract)) - file = File(io.StringIO(abi), f"{contract}.{cfg['rocketpool.chain'].lower()}.abi.json") + file = File(io.StringIO(abi), f"{contract}.{cfg.rocketpool.chain.lower()}.abi.json") await interaction.followup.send(file=file) except Exception as err: await interaction.followup.send(content=f"```Exception: {repr(err)}```") @@ -353,7 +353,7 @@ async def get_address_of_contract(self, interaction: Interaction, contract: str) """Retrieve the latest address for a contract""" await interaction.response.defer(ephemeral=is_hidden_role_controlled(interaction)) try: - address = cfg["rocketpool.manual_addresses"].get(contract) + address = cfg.rocketpool.manual_addresses.get(contract) if not address: address = await rp.uncached_get_address_by_name(contract) await interaction.followup.send(content=await el_explorer_url(address)) diff --git a/rocketwatch/plugins/delegate_contracts/delegate_contracts.py b/rocketwatch/plugins/delegate_contracts/delegate_contracts.py index 9e803bd9..4b3126ff 100644 --- a/rocketwatch/plugins/delegate_contracts/delegate_contracts.py +++ b/rocketwatch/plugins/delegate_contracts/delegate_contracts.py @@ -14,7 +14,7 @@ from utils.rocketpool import rp log = logging.getLogger("delegate_contracts") -log.setLevel(cfg["log_level"]) +log.setLevel(cfg.log_level) class DelegateContracts(commands.Cog): diff --git a/rocketwatch/plugins/deposit_pool/deposit_pool.py b/rocketwatch/plugins/deposit_pool/deposit_pool.py index b4705567..6122b23d 100644 --- a/rocketwatch/plugins/deposit_pool/deposit_pool.py +++ b/rocketwatch/plugins/deposit_pool/deposit_pool.py @@ -13,7 +13,7 @@ from utils.visibility import is_hidden_weak log = logging.getLogger("deposit_pool") -log.setLevel(cfg["log_level"]) +log.setLevel(cfg.log_level) class DepositPool(StatusPlugin): @@ -139,7 +139,7 @@ async def get_status(self) -> Embed: collateral_embed = await self.get_contract_collateral_stats() embed.add_field(name="Withdrawals", value=collateral_embed.description, inline=False) - if cfg["rocketpool.chain"] != "mainnet": + if cfg.rocketpool.chain != "mainnet": return embed reth_price = await rp.get_reth_eth_price() diff --git a/rocketwatch/plugins/detect_scam/detect_scam.py b/rocketwatch/plugins/detect_scam/detect_scam.py index 1cdd9c90..868f3705 100644 --- a/rocketwatch/plugins/detect_scam/detect_scam.py +++ b/rocketwatch/plugins/detect_scam/detect_scam.py @@ -38,7 +38,7 @@ from utils.embeds import Embed log = logging.getLogger("detect_scam") -log.setLevel(cfg["log_level"]) +log.setLevel(cfg.log_level) class DetectScam(Cog): @@ -50,9 +50,9 @@ class Color: @staticmethod def is_reputable(user: Member) -> bool: return any(( - user.id == cfg["discord.owner.user_id"], - user.id in cfg["rocketpool.support.user_ids"], - {role.id for role in user.roles} & set(cfg["rocketpool.support.role_ids"]), + user.id == cfg.discord.owner.user_id, + user.id in cfg.rocketpool.support.user_ids, + {role.id for role in user.roles} & set(cfg.rocketpool.support.role_ids), user.guild_permissions.moderate_members )) @@ -127,14 +127,14 @@ def __init__(self, bot: RocketWatch): self.message_report_menu = ContextMenu( name="Report Message", callback=self.manual_message_report, - guild_ids=[cfg["rocketpool.support.server_id"]], + guild_ids=[cfg.rocketpool.support.server_id], ) self.bot.tree.add_command(self.message_report_menu) self.user_report_menu = ContextMenu( name="Report User", callback=self.manual_user_report, type=AppCommandType.user, - guild_ids=[cfg["rocketpool.support.server_id"]] + guild_ids=[cfg.rocketpool.support.server_id] ) self.bot.tree.add_command(self.user_report_menu) @@ -267,7 +267,7 @@ async def report_message(self, message: Message, reason: str) -> None: warning_msg = None log.warning(f"Failed to send warning message in reply to {message.id}") - report_channel = await self.bot.get_or_fetch_channel(cfg["discord.channels.report_scams"]) + report_channel = await self.bot.get_or_fetch_channel(cfg.discord.channels["report_scams"]) report_msg = await report_channel.send(embed=report, file=contents) await self.bot.db.scam_reports.update_one( @@ -293,11 +293,11 @@ async def manual_message_report(self, interaction: Interaction, message: Message warning, report, contents = components - report_channel = await self.bot.get_or_fetch_channel(cfg["discord.channels.report_scams"]) + report_channel = await self.bot.get_or_fetch_channel(cfg.discord.channels["report_scams"]) report_msg = await report_channel.send(embed=report, file=contents) await self.bot.db.scam_reports.update_one({"message_id": message.id}, {"$set": {"report_id": report_msg.id}}) - moderator = await self.bot.get_or_fetch_user(cfg["rocketpool.support.moderator_id"]) + moderator = await self.bot.get_or_fetch_user(cfg.rocketpool.support.moderator_id) view = self.RemovalVoteView(self, message) warning_msg = await message.reply( content=f"{moderator.mention} {report_msg.jump_url}", @@ -450,7 +450,7 @@ async def on_message(self, message: Message) -> None: if message.guild is None: return - if message.guild.id != cfg["rocketpool.support.server_id"]: + if message.guild.id != cfg.rocketpool.support.server_id: log.warning(f"Ignoring message in {message.guild.id})") return @@ -473,7 +473,7 @@ async def on_message_edit(self, before: Message, after: Message) -> None: @Cog.listener() async def on_reaction_add(self, reaction: Reaction, user: User) -> None: - if reaction.message.guild.id != cfg["rocketpool.support.server_id"]: + if reaction.message.guild.id != cfg.rocketpool.support.server_id: log.warning(f"Ignoring reaction in {reaction.message.guild.id}") return @@ -519,7 +519,7 @@ async def on_member_ban(self, guild: Guild, user: User) -> None: await self.bot.db.scam_reports.update_one(report, {"$set": {"user_banned": True}}) async def _update_report(self, report: dict, note: str) -> None: - report_channel = await self.bot.get_or_fetch_channel(cfg["discord.channels.report_scams"]) + report_channel = await self.bot.get_or_fetch_channel(cfg.discord.channels["report_scams"]) try: message = await report_channel.fetch_message(report["report_id"]) embed = message.embeds[0] @@ -542,7 +542,7 @@ async def report_thread(self, thread: Thread, reason: str) -> None: log.warning(f"Failed to send warning message in thread {thread.id}") warning_msg = None - report_channel = await self.bot.get_or_fetch_channel(cfg["discord.channels.report_scams"]) + report_channel = await self.bot.get_or_fetch_channel(cfg.discord.channels["report_scams"]) report_msg = await report_channel.send(embed=report) await self.bot.db.scam_reports.update_one( @@ -552,7 +552,7 @@ async def report_thread(self, thread: Thread, reason: str) -> None: @Cog.listener() async def on_thread_create(self, thread: Thread) -> None: - if thread.guild.id != cfg["rocketpool.support.server_id"]: + if thread.guild.id != cfg.rocketpool.support.server_id: log.warning(f"Ignoring thread creation in {thread.guild.id}") return @@ -581,7 +581,7 @@ async def on_raw_thread_delete(self, event: RawThreadDeleteEvent) -> None: await self.bot.db.scam_reports.update_one(db_filter, {"$set": {"warning_id": None, "removed": True}}) @command() - @guilds(cfg["rocketpool.support.server_id"]) + @guilds(cfg.rocketpool.support.server_id) async def report_user(self, interaction: Interaction, user: Member) -> None: """Generate a suspicious user report and send it to the report channel""" await self.manual_user_report(interaction, user) @@ -601,7 +601,7 @@ async def manual_user_report(self, interaction: Interaction, user: Member) -> No content="Failed to report user. They may have already been reported or banned." ) - report_channel = await self.bot.get_or_fetch_channel(cfg["discord.channels.report_scams"]) + report_channel = await self.bot.get_or_fetch_channel(cfg.discord.channels["report_scams"]) report_msg = await report_channel.send(embed=report) await self.bot.db.scam_reports.update_one( diff --git a/rocketwatch/plugins/event_core/event_core.py b/rocketwatch/plugins/event_core/event_core.py index f47acf97..23862082 100644 --- a/rocketwatch/plugins/event_core/event_core.py +++ b/rocketwatch/plugins/event_core/event_core.py @@ -23,7 +23,7 @@ from utils.shared_w3 import w3 log = logging.getLogger("event_core") -log.setLevel(cfg["log_level"]) +log.setLevel(cfg.log_level) class EventCore(commands.Cog): @@ -37,10 +37,10 @@ def __str__(self) -> str: def __init__(self, bot: RocketWatch): self.bot = bot self.state = self.State.OK - self.channels = cfg["discord.channels"] - self.head_block: BlockIdentifier = cfg["events.genesis"] - self.block_batch_size = cfg["events.block_batch_size"] - self.monitor = Monitor("gather-new-events", api_key=cfg["other.secrets.cronitor"]) + self.channels = cfg.discord.channels + self.head_block: BlockIdentifier = cfg.events.genesis + self.block_batch_size = cfg.events.block_batch_size + self.monitor = Monitor("gather-new-events", api_key=cfg.other.secrets.cronitor) self.task.start() async def cog_unload(self) -> None: @@ -95,7 +95,7 @@ async def gather_new_events(self) -> None: to_block = latest_block coroutines = [sm.get_new_events() for sm in submodules] # prevent losing state if process is interrupted before updating db - self.head_block = cfg["events.genesis"] + self.head_block = cfg.events.genesis else: # behind chain head, let's see how far last_event_entry = await self.bot.db.event_queue.find().sort( @@ -134,7 +134,7 @@ async def gather_new_events(self) -> None: results = await asyncio.gather(*coroutines) - channels = cfg["discord.channels"] + channels = self.channels events: list[dict[str, Any]] = [] for result in results: @@ -225,7 +225,7 @@ def try_load(_entry: dict, _key: str) -> Optional[Any]: log.info("Processed all events in queue") async def update_status_messages(self) -> None: - configs = cfg.get("events.status_message", {}) + configs = cfg.events.status_message for state_message in (await self.bot.db.state_messages.find().to_list()): if state_message["_id"] not in configs: log.debug(f"No config for state message ID {state_message['_id']}, removing message") @@ -235,33 +235,33 @@ async def update_status_messages(self) -> None: log.debug(f"Updating state message for channel {channel_name}") await self._update_status_message(channel_name, config) - async def _update_status_message(self, channel_name: str, config: dict) -> None: + async def _update_status_message(self, channel_name: str, config) -> None: state_message = await self.bot.db.state_messages.find_one({"_id": channel_name}) if state_message: age = datetime.now() - state_message["sent_at"] - cooldown = timedelta(seconds=config["cooldown"]) + cooldown = timedelta(seconds=config.cooldown) if (age < cooldown) and (state_message["state"] == str(self.State.OK)): log.debug(f"State message for {channel_name} not past cooldown: {age} < {cooldown}") return if not (embed := await generate_template_embed(self.bot.db, "announcement")): try: - plugin: StatusPlugin = self.bot.cogs.get(config["plugin"]) + plugin: StatusPlugin = self.bot.cogs.get(config.plugin) embed = await plugin.get_status() except Exception as err: await self.bot.report_error(err) return embed.timestamp = datetime.now() - embed.set_footer(text=f"Tracking {cfg['rocketpool.chain']} using {len(self.bot.cogs)} plugins") - for field in config["fields"]: + embed.set_footer(text=f"Tracking {cfg.rocketpool.chain} using {len(self.bot.cogs)} plugins") + for field in config.fields: embed.add_field(**field) await self._replace_or_add_status(channel_name, embed, state_message) async def show_service_interrupt(self) -> None: embed = await assemble(MutableAttributeDict({"event_name": "service_interrupted"})) - for channel_name in cfg.get("events.status_message", {}).keys(): + for channel_name in cfg.events.status_message: state_message = await self.bot.db.state_messages.find_one({"_id": channel_name}) if (not state_message) or (state_message["state"] != str(self.state.ERROR)): await self._replace_or_add_status(channel_name, embed, state_message) diff --git a/rocketwatch/plugins/events/events.py b/rocketwatch/plugins/events/events.py index e5501755..696fc8dc 100644 --- a/rocketwatch/plugins/events/events.py +++ b/rocketwatch/plugins/events/events.py @@ -26,7 +26,7 @@ from utils.block_time import block_to_ts log = logging.getLogger("events") -log.setLevel(cfg["log_level"]) +log.setLevel(cfg.log_level) PartialFilter = Callable[[BlockNumber, BlockNumber | Literal["latest"]], Coroutine[None, None, list[LogReceipt | EventData]]] @@ -121,7 +121,7 @@ async def build_topic_filter(_from: BlockNumber, _to: BlockNumber | Literal["lat return partial_filters, event_map, topic_map @command() - @guilds(cfg["discord.owner.server_id"]) + @guilds(cfg.discord.owner.server_id) @is_owner() async def trigger_event( self, @@ -155,7 +155,7 @@ async def trigger_event( await interaction.followup.send(content="No events triggered.") @command() - @guilds(cfg["discord.owner.server_id"]) + @guilds(cfg.discord.owner.server_id) @is_owner() async def replay_events(self, interaction: Interaction, tx_hash: str): await interaction.response.defer() @@ -601,7 +601,7 @@ def share_repr(percentage: float) -> str: args.discountAmount = (1 - args.exchangeRate / solidity.to_float(args.marketExchangeRate)) * 100 receipt = None - if cfg["rocketpool.chain"] == "mainnet": + if cfg.rocketpool.chain == "mainnet": receipt = await w3.eth.get_transaction_receipt(event.transactionHash) args.tnx_fee = receipt["gasUsed"] * receipt["effectiveGasPrice"] args.tnx_fee_usd = round(await rp.get_eth_usdc_price() * args.tnx_fee / 10**18, 2) @@ -698,7 +698,7 @@ def share_repr(percentage: float) -> str: elif "transfer_event" in event_name: token_prefix = event_name.split("_", 1)[0] args.amount = args.value / 10**18 - if args["from"] in cfg["rocketpool.dao_multsigs"]: + if args["from"] in cfg.rocketpool.dao_multisigs: event_name = "pdao_erc20_transfer_event" token_contract = await rp.assemble_contract(name="ERC20", address=event["address"]) args.symbol = await token_contract.functions.symbol().call() diff --git a/rocketwatch/plugins/fee_distribution/fee_distribution.py b/rocketwatch/plugins/fee_distribution/fee_distribution.py index 5404529a..215e8ff0 100644 --- a/rocketwatch/plugins/fee_distribution/fee_distribution.py +++ b/rocketwatch/plugins/fee_distribution/fee_distribution.py @@ -14,7 +14,7 @@ from utils.readable import render_tree_legacy log = logging.getLogger("fee_distribution") -log.setLevel(cfg["log_level"]) +log.setLevel(cfg.log_level) class FeeDistribution(commands.Cog): diff --git a/rocketwatch/plugins/forum/forum.py b/rocketwatch/plugins/forum/forum.py index 948275a7..6d0c02c1 100644 --- a/rocketwatch/plugins/forum/forum.py +++ b/rocketwatch/plugins/forum/forum.py @@ -15,7 +15,7 @@ from utils.retry import retry_async log = logging.getLogger("forum") -log.setLevel(cfg["log_level"]) +log.setLevel(cfg.log_level) class Forum(commands.Cog): diff --git a/rocketwatch/plugins/governance/governance.py b/rocketwatch/plugins/governance/governance.py index 5e57bf15..0679b28b 100644 --- a/rocketwatch/plugins/governance/governance.py +++ b/rocketwatch/plugins/governance/governance.py @@ -19,7 +19,7 @@ from utils.block_time import ts_to_block log = logging.getLogger("governance") -log.setLevel(cfg["log_level"]) +log.setLevel(cfg.log_level) class Governance(StatusPlugin): @@ -94,7 +94,7 @@ async def print_proposals(_dao: DAO, _proposals: list[DAO.Proposal]) -> str: for _i, _proposal in enumerate(_proposals, start=1): _title = sanitize(_proposal.message, 40) _tx_hash = await self._get_tx_hash_for_proposal(_dao, _proposal) - _url = f"{cfg['execution_layer.explorer']}/tx/{_tx_hash}" + _url = f"{cfg.execution_layer.explorer}/tx/{_tx_hash}" text += f" {_i}. [{_title}]({_url}) (#{_proposal.id})\n" return text diff --git a/rocketwatch/plugins/lottery/lottery.py b/rocketwatch/plugins/lottery/lottery.py index ec792a59..a60edaeb 100644 --- a/rocketwatch/plugins/lottery/lottery.py +++ b/rocketwatch/plugins/lottery/lottery.py @@ -15,7 +15,7 @@ from utils.visibility import is_hidden log = logging.getLogger("lottery") -log.setLevel(cfg["log_level"]) +log.setLevel(cfg.log_level) class Lottery(commands.Cog): diff --git a/rocketwatch/plugins/metrics/metrics.py b/rocketwatch/plugins/metrics/metrics.py index 8d6b5938..b7c6d2c6 100644 --- a/rocketwatch/plugins/metrics/metrics.py +++ b/rocketwatch/plugins/metrics/metrics.py @@ -15,7 +15,7 @@ from utils.visibility import is_hidden log = logging.getLogger("metrics") -log.setLevel(cfg["log_level"]) +log.setLevel(cfg.log_level) class Metrics(commands.Cog): diff --git a/rocketwatch/plugins/milestones/milestones.py b/rocketwatch/plugins/milestones/milestones.py index 97919568..802c5b23 100644 --- a/rocketwatch/plugins/milestones/milestones.py +++ b/rocketwatch/plugins/milestones/milestones.py @@ -11,7 +11,7 @@ from utils.event import EventPlugin, Event log = logging.getLogger("milestones") -log.setLevel(cfg["log_level"]) +log.setLevel(cfg.log_level) class Milestones(EventPlugin): diff --git a/rocketwatch/plugins/minipool_distribution/minipool_distribution.py b/rocketwatch/plugins/minipool_distribution/minipool_distribution.py index 39901ff2..36aafe46 100644 --- a/rocketwatch/plugins/minipool_distribution/minipool_distribution.py +++ b/rocketwatch/plugins/minipool_distribution/minipool_distribution.py @@ -16,7 +16,7 @@ from utils.visibility import is_hidden log = logging.getLogger("minipool_distribution") -log.setLevel(cfg["log_level"]) +log.setLevel(cfg.log_level) p = inflect.engine() diff --git a/rocketwatch/plugins/pinned_messages/pinned_messages.py b/rocketwatch/plugins/pinned_messages/pinned_messages.py index 93d9305b..b1c2fe7d 100644 --- a/rocketwatch/plugins/pinned_messages/pinned_messages.py +++ b/rocketwatch/plugins/pinned_messages/pinned_messages.py @@ -11,7 +11,7 @@ from utils.embeds import Embed log = logging.getLogger("rich_activity") -log.setLevel(cfg["log_level"]) +log.setLevel(cfg.log_level) class PinnedMessages(commands.Cog): @@ -72,7 +72,7 @@ async def run_loop(self): await self.bot.report_error(err) @command() - @guilds(cfg["discord.owner.server_id"]) + @guilds(cfg.discord.owner.server_id) @is_owner() async def pin(self, interaction: Interaction, channel_id: int, title: str, description: str): await interaction.response.defer() @@ -99,7 +99,7 @@ async def pin(self, interaction: Interaction, channel_id: int, title: str, descr await interaction.followup.send("Created pinned message") @command() - @guilds(cfg["discord.owner.server_id"]) + @guilds(cfg.discord.owner.server_id) @is_owner() async def unpin(self, interaction: Interaction, channel_id: str): await interaction.response.defer() diff --git a/rocketwatch/plugins/proposals/proposals.py b/rocketwatch/plugins/proposals/proposals.py index 7d8a36f1..fbf85cbc 100644 --- a/rocketwatch/plugins/proposals/proposals.py +++ b/rocketwatch/plugins/proposals/proposals.py @@ -25,7 +25,7 @@ cog_id = "proposals" log = logging.getLogger(cog_id) -log.setLevel(cfg["log_level"]) +log.setLevel(cfg.log_level) LOOKUP = { "consensus": { @@ -115,7 +115,7 @@ def parse_proposal(beacon_block: dict) -> dict: class Proposals(commands.Cog): def __init__(self, bot: RocketWatch): self.bot = bot - self.monitor = Monitor("proposals-task", api_key=cfg["other.secrets.cronitor"]) + self.monitor = Monitor("proposals-task", api_key=cfg.other.secrets.cronitor) self.batch_size = 100 self.cooldown = timedelta(minutes=5) self.bot.loop.create_task(self.loop()) diff --git a/rocketwatch/plugins/queue/queue.py b/rocketwatch/plugins/queue/queue.py index e7dc39f1..2fb796c4 100644 --- a/rocketwatch/plugins/queue/queue.py +++ b/rocketwatch/plugins/queue/queue.py @@ -17,7 +17,7 @@ from utils.views import PageView log = logging.getLogger("queue") -log.setLevel(cfg["log_level"]) +log.setLevel(cfg.log_level) class Queue(Cog): diff --git a/rocketwatch/plugins/random/random.py b/rocketwatch/plugins/random/random.py index 62745468..11aaa825 100644 --- a/rocketwatch/plugins/random/random.py +++ b/rocketwatch/plugins/random/random.py @@ -22,7 +22,7 @@ from utils.visibility import is_hidden, is_hidden_weak log = logging.getLogger("random") -log.setLevel(cfg["log_level"]) +log.setLevel(cfg.log_level) class Random(commands.Cog): @@ -71,7 +71,7 @@ async def burn_reason(self, interaction: Interaction): if "address" not in entry: description += f" {entry['name']}" else: - url = cfg["execution_layer.explorer"] + url = cfg.execution_layer.explorer if not entry["name"]: entry["name"] = s_hex(entry["address"]) target = f"[{entry['name']}]({url}/address/{entry['address']})" diff --git a/rocketwatch/plugins/releases/releases.py b/rocketwatch/plugins/releases/releases.py index 9bea7220..83d0d3eb 100644 --- a/rocketwatch/plugins/releases/releases.py +++ b/rocketwatch/plugins/releases/releases.py @@ -12,7 +12,7 @@ from utils.visibility import is_hidden log = logging.getLogger("releases") -log.setLevel(cfg["log_level"]) +log.setLevel(cfg.log_level) class Releases(commands.Cog): diff --git a/rocketwatch/plugins/reloader/reloader.py b/rocketwatch/plugins/reloader/reloader.py index 866eac0d..d9cb1d66 100644 --- a/rocketwatch/plugins/reloader/reloader.py +++ b/rocketwatch/plugins/reloader/reloader.py @@ -27,7 +27,7 @@ async def _get_unloaded_extensions(self, interaction: Interaction, current: str) return [Choice(name=plugin, value=plugin) for plugin in (all - loaded) if current.lower() in plugin.lower()][:25] @command() - @guilds(cfg["discord.owner.server_id"]) + @guilds(cfg.discord.owner.server_id) @is_owner() @autocomplete(module=_get_unloaded_extensions) async def load(self, interaction: Interaction, module: str): @@ -43,7 +43,7 @@ async def load(self, interaction: Interaction, module: str): await interaction.followup.send(content=f"Plugin `{module}` not found!") @command() - @guilds(cfg["discord.owner.server_id"]) + @guilds(cfg.discord.owner.server_id) @is_owner() @autocomplete(module=_get_loaded_extensions) async def unload(self, interaction: Interaction, module: str): @@ -57,7 +57,7 @@ async def unload(self, interaction: Interaction, module: str): await interaction.followup.send(content=f"Plugin `{module}` not loaded!") @command() - @guilds(cfg["discord.owner.server_id"]) + @guilds(cfg.discord.owner.server_id) @is_owner() @autocomplete(module=_get_loaded_extensions) async def reload(self, interaction: Interaction, module: str): diff --git a/rocketwatch/plugins/rewards/rewards.py b/rocketwatch/plugins/rewards/rewards.py index 9476eeb2..e8eed5f1 100644 --- a/rocketwatch/plugins/rewards/rewards.py +++ b/rocketwatch/plugins/rewards/rewards.py @@ -22,7 +22,7 @@ from utils.block_time import ts_to_block log = logging.getLogger("rewards") -log.setLevel(cfg["log_level"]) +log.setLevel(cfg.log_level) class Rewards(commands.Cog): diff --git a/rocketwatch/plugins/rocksolid/rocksolid.py b/rocketwatch/plugins/rocksolid/rocksolid.py index 6beefd8d..63efe4ab 100644 --- a/rocketwatch/plugins/rocksolid/rocksolid.py +++ b/rocketwatch/plugins/rocksolid/rocksolid.py @@ -24,7 +24,7 @@ cog_id = "rocksolid" log = logging.getLogger(cog_id) -log.setLevel(cfg["log_level"]) +log.setLevel(cfg.log_level) class RockSolid(Cog): diff --git a/rocketwatch/plugins/rpips/rpips.py b/rocketwatch/plugins/rpips/rpips.py index 78cca06d..e9373fe6 100644 --- a/rocketwatch/plugins/rpips/rpips.py +++ b/rocketwatch/plugins/rpips/rpips.py @@ -14,7 +14,7 @@ from utils.retry import retry_async log = logging.getLogger("rpips") -log.setLevel(cfg["log_level"]) +log.setLevel(cfg.log_level) class RPIPs(Cog): diff --git a/rocketwatch/plugins/rpl/rpl.py b/rocketwatch/plugins/rpl/rpl.py index a63b87d0..0ef7772d 100644 --- a/rocketwatch/plugins/rpl/rpl.py +++ b/rocketwatch/plugins/rpl/rpl.py @@ -14,7 +14,7 @@ from utils.visibility import is_hidden_weak log = logging.getLogger("rpl") -log.setLevel(cfg["log_level"]) +log.setLevel(cfg.log_level) class RPL(commands.Cog): diff --git a/rocketwatch/plugins/scam_warning/scam_warning.py b/rocketwatch/plugins/scam_warning/scam_warning.py index 8650483f..e76e0c5b 100644 --- a/rocketwatch/plugins/scam_warning/scam_warning.py +++ b/rocketwatch/plugins/scam_warning/scam_warning.py @@ -10,20 +10,20 @@ log = logging.getLogger("scam_warning") -log.setLevel(cfg["log_level"]) +log.setLevel(cfg.log_level) class ScamWarning(commands.Cog): def __init__(self, bot: RocketWatch): self.bot = bot - self.channel_ids = set(cfg["rocketpool.dm_warning.channels"]) + self.channel_ids = set(cfg.rocketpool.dm_warning.channels) self.inactivity_cooldown = timedelta(days=90) self.failure_cooldown = timedelta(days=1) async def send_warning(self, user) -> None: - support_channel = await self.bot.get_or_fetch_channel(cfg["rocketpool.support.channel_id"]) - report_channel = await self.bot.get_or_fetch_channel(cfg["discord.channels.report_scams"]) - resource_channel = await self.bot.get_or_fetch_channel(cfg["discord.channels.resources"]) + support_channel = await self.bot.get_or_fetch_channel(cfg.rocketpool.support.channel_id) + report_channel = await self.bot.get_or_fetch_channel(cfg.discord.channels["report_scams"]) + resource_channel = await self.bot.get_or_fetch_channel(cfg.discord.channels["resources"]) embed = Embed() embed.title = "**Stay Safe on Rocket Pool Discord**" diff --git a/rocketwatch/plugins/snapshot/snapshot.py b/rocketwatch/plugins/snapshot/snapshot.py index 50ebd96a..2a89ff04 100644 --- a/rocketwatch/plugins/snapshot/snapshot.py +++ b/rocketwatch/plugins/snapshot/snapshot.py @@ -26,7 +26,7 @@ from utils.retry import retry_async log = logging.getLogger("snapshot") -log.setLevel(cfg["log_level"]) +log.setLevel(cfg.log_level) class Snapshot(EventPlugin): diff --git a/rocketwatch/plugins/support_utils/support_utils.py b/rocketwatch/plugins/support_utils/support_utils.py index b4723b4d..07310ee9 100644 --- a/rocketwatch/plugins/support_utils/support_utils.py +++ b/rocketwatch/plugins/support_utils/support_utils.py @@ -12,7 +12,7 @@ from utils.embeds import Embed log = logging.getLogger("support_utils") -log.setLevel(cfg["log_level"]) +log.setLevel(cfg.log_level) async def generate_template_embed(db, template_name: str): @@ -133,10 +133,10 @@ async def on_submit(self, interaction: Interaction) -> None: def has_perms(interaction: Interaction): return any([ - interaction.user.id in cfg["rocketpool.support.user_ids"], - any(r.id in cfg["rocketpool.support.role_ids"] for r in interaction.user.roles), - cfg["discord.owner.user_id"] == interaction.user.id, - interaction.user.guild_permissions.moderate_members and interaction.guild.id == cfg["rocketpool.support.server_id"] + interaction.user.id in cfg.rocketpool.support.user_ids, + any(r.id in cfg.rocketpool.support.role_ids for r in interaction.user.roles), + cfg.discord.owner.user_id == interaction.user.id, + interaction.user.guild_permissions.moderate_members and interaction.guild.id == cfg.rocketpool.support.server_id ]) @@ -193,7 +193,7 @@ class SupportUtils(GroupCog, name="support"): subgroup = Group( name='template', description='various templates used by active support members', - guild_ids=[cfg["rocketpool.support.server_id"]] + guild_ids=[cfg.rocketpool.support.server_id] ) def __init__(self, bot: RocketWatch): diff --git a/rocketwatch/plugins/transactions/transactions.py b/rocketwatch/plugins/transactions/transactions.py index b4d81c7d..a8822220 100644 --- a/rocketwatch/plugins/transactions/transactions.py +++ b/rocketwatch/plugins/transactions/transactions.py @@ -19,7 +19,7 @@ from utils.shared_w3 import w3 log = logging.getLogger("transactions") -log.setLevel(cfg["log_level"]) +log.setLevel(cfg.log_level) class Transactions(EventPlugin): @@ -51,7 +51,7 @@ async def _parse_transaction_config() -> tuple[list[ChecksumAddress], dict]: return addresses, function_map @command() - @guilds(cfg["discord.owner.server_id"]) + @guilds(cfg.discord.owner.server_id) @is_owner() async def trigger_tx( self, @@ -79,7 +79,7 @@ async def trigger_tx( await interaction.followup.send(content="No events triggered.") @command() - @guilds(cfg["discord.owner.server_id"]) + @guilds(cfg.discord.owner.server_id) @is_owner() async def replay_tx(self, interaction: Interaction, tx_hash: str): await interaction.response.defer() diff --git a/rocketwatch/plugins/tvl/tvl.py b/rocketwatch/plugins/tvl/tvl.py index ede8a19e..e8c681e1 100644 --- a/rocketwatch/plugins/tvl/tvl.py +++ b/rocketwatch/plugins/tvl/tvl.py @@ -16,7 +16,7 @@ from utils.visibility import is_hidden log = logging.getLogger("tvl") -log.setLevel(cfg["log_level"]) +log.setLevel(cfg.log_level) def split_rewards_logic(balance, node_share, commission, force_base=False): diff --git a/rocketwatch/plugins/user_distribute/user_distribute.py b/rocketwatch/plugins/user_distribute/user_distribute.py index 8e45d17c..87869fb4 100644 --- a/rocketwatch/plugins/user_distribute/user_distribute.py +++ b/rocketwatch/plugins/user_distribute/user_distribute.py @@ -17,7 +17,7 @@ from utils.visibility import is_hidden_weak log = logging.getLogger("user_distribute") -log.setLevel(cfg["log_level"]) +log.setLevel(cfg.log_level) class InstructionsView(ui.View): @@ -82,7 +82,7 @@ async def cog_unload(self): @tasks.loop(hours=8) async def task(self): - channel_id = cfg.get("discord.channels.user_distribute") + channel_id = cfg.discord.channels.get("user_distribute") if not channel_id: return diff --git a/rocketwatch/plugins/validator_states/validator_states.py b/rocketwatch/plugins/validator_states/validator_states.py index 50462f1b..976a3d92 100644 --- a/rocketwatch/plugins/validator_states/validator_states.py +++ b/rocketwatch/plugins/validator_states/validator_states.py @@ -12,7 +12,7 @@ from utils.visibility import is_hidden_weak log = logging.getLogger("validator_states") -log.setLevel(cfg["log_level"]) +log.setLevel(cfg.log_level) _BEACON_PENDING = {"in_queue": "unassigned", "prestaked": "prestaked", "staking": "staked"} diff --git a/rocketwatch/plugins/wall/wall.py b/rocketwatch/plugins/wall/wall.py index 1899dd8a..c2a3d90c 100644 --- a/rocketwatch/plugins/wall/wall.py +++ b/rocketwatch/plugins/wall/wall.py @@ -32,7 +32,7 @@ from utils.cfg import cfg log = logging.getLogger("wall") -log.setLevel(cfg["log_level"]) +log.setLevel(cfg.log_level) class Wall(commands.Cog): diff --git a/rocketwatch/requirements.txt b/rocketwatch/requirements.txt index ce5dd6ad..307435bf 100644 --- a/rocketwatch/requirements.txt +++ b/rocketwatch/requirements.txt @@ -7,7 +7,7 @@ cachetools==7.0.3 bidict==0.23.1 uptime==3.0.1 discord.py==2.7.1 -config==0.5.1 +pydantic>=2.0.0,<3.0.0 pytz==2026.1.post1 matplotlib==3.10.8 inflect==7.5.0 diff --git a/rocketwatch/rocketwatch.py b/rocketwatch/rocketwatch.py index d1821902..6ade2aab 100644 --- a/rocketwatch/rocketwatch.py +++ b/rocketwatch/rocketwatch.py @@ -25,22 +25,22 @@ from utils.rocketpool import rp log = logging.getLogger("rocketwatch") -log.setLevel(cfg["log_level"]) +log.setLevel(cfg.log_level) class RocketWatch(Bot): def __init__(self, intents: Intents) -> None: super().__init__(command_prefix=(), tree_cls=RWCommandTree, intents=intents) - self.db = AsyncMongoClient(cfg["mongodb.uri"]).rocketwatch + self.db = AsyncMongoClient(cfg.mongodb.uri).rocketwatch async def _load_plugins(self): - chain = cfg["rocketpool.chain"] - storage = cfg["rocketpool.manual_addresses.rocketStorage"] + chain = cfg.rocketpool.chain + storage = cfg.rocketpool.manual_addresses["rocketStorage"] log.info(f"Running using storage contract {storage} (Chain: {chain})") log.info("Loading plugins...") - included_modules = set(cfg["modules.include"] or []) - excluded_modules = set(cfg["modules.exclude"] or []) + included_modules = set(cfg.modules.include or []) + excluded_modules = set(cfg.modules.exclude or []) def should_load_plugin(_plugin: str) -> bool: # inclusion takes precedence in case of collision @@ -89,7 +89,7 @@ def clear_commands(self) -> None: async def on_ready(self): log.info(f"Logged in as {self.user.name} ({self.user.id})") - commands_enabled = cfg["modules.enable_commands"] + commands_enabled = cfg.modules.enable_commands if not commands_enabled: log.info("Commands disabled, clearing tree...") self.clear_commands() @@ -147,7 +147,7 @@ async def report_error(self, exception: Exception, interaction: Optional[Interac log.error(err_trace) try: - channel = await self.get_or_fetch_channel(cfg["discord.channels.errors"]) + channel = await self.get_or_fetch_channel(cfg.discord.channels["errors"]) file = File(io.StringIO(err_trace), "exception.txt") await retry_async(tries=5, delay=5)(channel.send)(err_description, file=file) except Exception: diff --git a/rocketwatch/utils/block_time.py b/rocketwatch/utils/block_time.py index aea4d54e..10050c5a 100644 --- a/rocketwatch/utils/block_time.py +++ b/rocketwatch/utils/block_time.py @@ -7,7 +7,7 @@ from utils.shared_w3 import w3 log = logging.getLogger("block_time") -log.setLevel(cfg["log_level"]) +log.setLevel(cfg.log_level) @cached() diff --git a/rocketwatch/utils/cached_ens.py b/rocketwatch/utils/cached_ens.py index 9cbc19f4..8d024ddb 100644 --- a/rocketwatch/utils/cached_ens.py +++ b/rocketwatch/utils/cached_ens.py @@ -9,7 +9,7 @@ from utils.shared_w3 import w3_mainnet log = logging.getLogger("cached_ens") -log.setLevel(cfg["log_level"]) +log.setLevel(cfg.log_level) class CachedEns: diff --git a/rocketwatch/utils/cfg.py b/rocketwatch/utils/cfg.py index e94d73fa..ebac9638 100644 --- a/rocketwatch/utils/cfg.py +++ b/rocketwatch/utils/cfg.py @@ -1,3 +1,107 @@ -import config +import tomllib +from pydantic import BaseModel -cfg = config.Config("main.cfg") + +class DiscordOwner(BaseModel): + user_id: int + server_id: int + + +class DiscordConfig(BaseModel): + secret: str + owner: DiscordOwner + guilds: list[int] + channels: dict[str, int] + + +class ExecutionLayerEndpoint(BaseModel): + current: str + mainnet: str + archive: str | None = None + + +class ExecutionLayerConfig(BaseModel): + explorer: str + endpoint: ExecutionLayerEndpoint + etherscan_secret: str + + +class ConsensusLayerConfig(BaseModel): + explorer: str + endpoint: str + beaconcha_secret: str + + +class MongoDBConfig(BaseModel): + uri: str + + +class RocketPoolSupport(BaseModel): + user_ids: list[int] + role_ids: list[int] + server_id: int + channel_id: int + moderator_id: int + + +class DmWarningConfig(BaseModel): + channels: list[int] + + +class RocketPoolConfig(BaseModel): + chain: str = "mainnet" + manual_addresses: dict[str, str] + dao_multisigs: list[str] + support: RocketPoolSupport + dm_warning: DmWarningConfig + + +class ModulesConfig(BaseModel): + include: list[str] = [] + exclude: list[str] = [] + enable_commands: bool = True + + +class StatusMessageConfig(BaseModel): + plugin: str + cooldown: int + fields: list[dict[str, str]] = [] + + +class EventsConfig(BaseModel): + lookback_distance: int + genesis: int + block_batch_size: int + status_message: dict[str, StatusMessageConfig] = {} + + +class SecretsConfig(BaseModel): + wakatime: str = "" + cronitor: str = "" + anthropic: str = "" + + +class OtherConfig(BaseModel): + mev_hashes: list[str] = [] + secrets: SecretsConfig = SecretsConfig() + + +class Config(BaseModel): + log_level: str = "DEBUG" + discord: DiscordConfig + execution_layer: ExecutionLayerConfig + consensus_layer: ConsensusLayerConfig + mongodb: MongoDBConfig + rocketpool: RocketPoolConfig + modules: ModulesConfig = ModulesConfig() + events: EventsConfig + other: OtherConfig = OtherConfig() + + +def load_config(path: str = "config.toml") -> Config: + with open(path, "rb") as f: + data = tomllib.load(f) + return Config(**data) + + +cfg = load_config() diff --git a/rocketwatch/utils/command_tree.py b/rocketwatch/utils/command_tree.py index a63e3f40..56e41807 100644 --- a/rocketwatch/utils/command_tree.py +++ b/rocketwatch/utils/command_tree.py @@ -7,7 +7,7 @@ from utils.cfg import cfg log = logging.getLogger("command_tree") -log.setLevel(cfg["log_level"]) +log.setLevel(cfg.log_level) class RWCommandTree(CommandTree): diff --git a/rocketwatch/utils/dao.py b/rocketwatch/utils/dao.py index d471ed38..455f4eff 100644 --- a/rocketwatch/utils/dao.py +++ b/rocketwatch/utils/dao.py @@ -4,7 +4,7 @@ from enum import IntEnum from abc import ABC, abstractmethod from dataclasses import dataclass -from typing import Optional, Literal, cast +from typing import Literal, cast import termplotlib as tpl from eth_typing import ChecksumAddress @@ -14,7 +14,7 @@ from utils.rocketpool import rp log = logging.getLogger("dao") -log.setLevel(cfg["log_level"]) +log.setLevel(cfg.log_level) class DAO(ABC): diff --git a/rocketwatch/utils/embeds.py b/rocketwatch/utils/embeds.py index f52682a0..393ed80c 100644 --- a/rocketwatch/utils/embeds.py +++ b/rocketwatch/utils/embeds.py @@ -26,7 +26,7 @@ ens = CachedEns() log = logging.getLogger("embeds") -log.setLevel(cfg["log_level"]) +log.setLevel(cfg.log_level) class Embed(discord.Embed): @@ -37,8 +37,8 @@ def __init__(self, *args, **kwargs): def set_footer_parts(self, parts): footer_parts = ["Created by 0xinvis.eth, Developed by haloooloolo.eth"] - if cfg["rocketpool.chain"] != "mainnet": - footer_parts.insert(-1, f"Chain: {cfg['rocketpool.chain'].capitalize()}") + if cfg.rocketpool.chain != "mainnet": + footer_parts.insert(-1, f"Chain: {cfg.rocketpool.chain.capitalize()}") footer_parts.extend(parts) self.set_footer(text=" · ".join(footer_parts)) @@ -102,9 +102,9 @@ async def el_explorer_url( if w3.is_address(target): # sanitize address target = w3.to_checksum_address(target) - url = f"{cfg['execution_layer.explorer']}/address/{target}" + url = f"{cfg.execution_layer.explorer}/address/{target}" - chain = cfg["rocketpool.chain"] + chain = cfg.rocketpool.chain dashboard_network = "" if (chain == "mainnet") else f"?network={chain}" if await rp.is_node(target): @@ -140,7 +140,7 @@ async def el_explorer_url( prefix += "🏛️" name = delegate_name - if not name and cfg["rocketpool.chain"] != "mainnet": + if not name and cfg.rocketpool.chain != "mainnet": name = s_hex(target) if not name: @@ -158,7 +158,7 @@ async def el_explorer_url( if code := await w3.eth.get_code(target): if prefix != -1: prefix += "📄" - if ((not name) and (w3.keccak(text=code.hex()).hex() in cfg["other.mev_hashes"])): + if ((not name) and (w3.keccak(text=code.hex()).hex() in cfg.other.mev_hashes)): name = "MEV Bot Contract" if not name: with contextlib.suppress(Exception): @@ -182,7 +182,7 @@ async def el_explorer_url( name = f"{discord.utils.remove_markdown(n, ignore_links=False)}*" else: # transaction hash - url = f"{cfg['execution_layer.explorer']}/tx/{target}" + url = f"{cfg.execution_layer.explorer}/tx/{target}" if not name: # fall back to shortened address @@ -326,8 +326,8 @@ async def assemble(args) -> Embed: if has_small and not (has_large and use_large): e.description = _(f"embeds.{args.event_name}.description_small", **args) e.description += f" {args.transactionHash_small}" - if cfg["rocketpool.chain"] != "mainnet": - e.description += f" ({cfg['rocketpool.chain'].capitalize()})" + if cfg.rocketpool.chain != "mainnet": + e.description += f" ({cfg.rocketpool.chain.capitalize()})" e.set_footer(text="") return e @@ -357,7 +357,7 @@ async def assemble(args) -> Embed: if "epoch" in args: e.add_field(name="Epoch", - value=f"[{args.epoch}](https://{cfg['consensus_layer.explorer']}/epoch/{args.epoch})") + value=f"[{args.epoch}](https://{cfg.consensus_layer.explorer}/epoch/{args.epoch})") if "timezone" in args: e.add_field(name="Timezone", @@ -476,12 +476,12 @@ async def assemble(args) -> Embed: value=v) # show block number - el_explorer = cfg["execution_layer.explorer"] + el_explorer = cfg.execution_layer.explorer if "block_number" in args: e.add_field(name="Block Number", value=f"[{args.blockNumber}]({el_explorer}/block/{args.blockNumber})") - cl_explorer = cfg["consensus_layer.explorer"] + cl_explorer = cfg.consensus_layer.explorer if "slot" in args: e.add_field(name="Slot", value=f"[{args.slot}]({cl_explorer}/slot/{args.slot})") diff --git a/rocketwatch/utils/etherscan.py b/rocketwatch/utils/etherscan.py index f47565e7..fc0645da 100644 --- a/rocketwatch/utils/etherscan.py +++ b/rocketwatch/utils/etherscan.py @@ -6,7 +6,7 @@ from utils.shared_w3 import w3 log = logging.getLogger("etherscan") -log.setLevel(cfg["log_level"]) +log.setLevel(cfg.log_level) async def get_recent_account_transactions(address, block_count=44800): @@ -19,7 +19,7 @@ async def get_recent_account_transactions(address, block_count=44800): async with aiohttp.ClientSession() as session: resp = await session.get(ETHERSCAN_URL, params={"address" : address, "page" : page, - "apikey" : cfg["execution_layer.etherscan_secret"], + "apikey" : cfg.execution_layer.etherscan_secret, "module" : "account", "action" : "txlist", "sort" : "desc", diff --git a/rocketwatch/utils/event.py b/rocketwatch/utils/event.py index ac77824e..3ae9c716 100644 --- a/rocketwatch/utils/event.py +++ b/rocketwatch/utils/event.py @@ -33,14 +33,14 @@ class EventPlugin(commands.Cog): def __init__(self, bot: RocketWatch, rate_limit=timedelta(seconds=5)): self.bot = bot self.rate_limit = rate_limit - self.lookback_distance: int = cfg["events.lookback_distance"] + self.lookback_distance: int = cfg.events.lookback_distance self.last_served_block: Optional[int] = None self._pending_block: Optional[int] = None self._last_run = datetime.now() - rate_limit async def _ensure_genesis_block(self): if self.last_served_block is None: - block = await w3.eth.get_block(cfg["events.genesis"]) + block = await w3.eth.get_block(cfg.events.genesis) self.last_served_block = block.number - 1 self._pending_block = self.last_served_block diff --git a/rocketwatch/utils/event_logs.py b/rocketwatch/utils/event_logs.py index 52c66419..5fd3c308 100644 --- a/rocketwatch/utils/event_logs.py +++ b/rocketwatch/utils/event_logs.py @@ -8,7 +8,7 @@ from utils.cfg import cfg log = logging.getLogger("event_logs") -log.setLevel(cfg["log_level"]) +log.setLevel(cfg.log_level) def get_logs( diff --git a/rocketwatch/utils/liquidity.py b/rocketwatch/utils/liquidity.py index 773a021c..53b07b23 100644 --- a/rocketwatch/utils/liquidity.py +++ b/rocketwatch/utils/liquidity.py @@ -16,7 +16,7 @@ from utils.shared_w3 import w3 log = logging.getLogger("liquidity") -log.setLevel(cfg["log_level"]) +log.setLevel(cfg.log_level) class Liquidity: diff --git a/rocketwatch/utils/readable.py b/rocketwatch/utils/readable.py index 74516b1c..8620c84e 100644 --- a/rocketwatch/utils/readable.py +++ b/rocketwatch/utils/readable.py @@ -57,7 +57,7 @@ async def cl_explorer_url(target, name=None): name = s_hex(target) if not name: name = target - url = cfg["consensus_layer.explorer"] + url = cfg.consensus_layer.explorer return f"[{name}]({url}/validator/{target})" diff --git a/rocketwatch/utils/rocketpool.py b/rocketwatch/utils/rocketpool.py index d190c19e..0f0c3a15 100644 --- a/rocketwatch/utils/rocketpool.py +++ b/rocketwatch/utils/rocketpool.py @@ -15,7 +15,7 @@ from utils.shared_w3 import w3, w3_mainnet, w3_archive log = logging.getLogger("rocketpool") -log.setLevel(cfg["log_level"]) +log.setLevel(cfg.log_level) class NoAddressFound(Exception): @@ -43,7 +43,7 @@ async def flush(self): await self._init_contract_addresses() async def _init_contract_addresses(self) -> None: - manual_addresses = cfg["rocketpool.manual_addresses"] + manual_addresses = cfg.rocketpool.manual_addresses for name, address in manual_addresses.items(): self.addresses[name] = address diff --git a/rocketwatch/utils/sea_creatures.py b/rocketwatch/utils/sea_creatures.py index 73a91d6b..25963ed9 100644 --- a/rocketwatch/utils/sea_creatures.py +++ b/rocketwatch/utils/sea_creatures.py @@ -1,6 +1,5 @@ import contextlib from utils import solidity -from utils.cfg import cfg from utils.rocketpool import rp from utils.shared_w3 import w3 diff --git a/rocketwatch/utils/shared_w3.py b/rocketwatch/utils/shared_w3.py index fe4225be..246bc0f6 100644 --- a/rocketwatch/utils/shared_w3.py +++ b/rocketwatch/utils/shared_w3.py @@ -8,17 +8,17 @@ from utils.cfg import cfg log = logging.getLogger("shared_w3") -log.setLevel(cfg["log_level"]) +log.setLevel(cfg.log_level) -w3 = AsyncWeb3(AsyncHTTPProvider(cfg['execution_layer.endpoint.current'], request_kwargs={'timeout': 60})) +w3 = AsyncWeb3(AsyncHTTPProvider(cfg.execution_layer.endpoint.current, request_kwargs={'timeout': 60})) w3_mainnet = w3 -if cfg['rocketpool.chain'] != "mainnet": - w3_mainnet = AsyncWeb3(AsyncHTTPProvider(cfg['execution_layer.endpoint.mainnet'])) +if cfg.rocketpool.chain != "mainnet": + w3_mainnet = AsyncWeb3(AsyncHTTPProvider(cfg.execution_layer.endpoint.mainnet)) w3_archive = None -if "archive" in cfg['execution_layer.endpoint'].keys(): - w3_archive = AsyncWeb3(AsyncHTTPProvider(cfg['execution_layer.endpoint.archive'])) +if cfg.execution_layer.endpoint.archive is not None: + w3_archive = AsyncWeb3(AsyncHTTPProvider(cfg.execution_layer.endpoint.archive)) class Bacon(AsyncBeacon): @@ -34,4 +34,4 @@ async def get_sync_committee(self, epoch: int) -> Dict[str, Any]: ) -bacon = Bacon(cfg["consensus_layer.endpoint"]) +bacon = Bacon(cfg.consensus_layer.endpoint) diff --git a/rocketwatch/utils/time_debug.py b/rocketwatch/utils/time_debug.py index 44044f20..c6666679 100644 --- a/rocketwatch/utils/time_debug.py +++ b/rocketwatch/utils/time_debug.py @@ -5,7 +5,7 @@ from utils.cfg import cfg log = logging.getLogger("time_debug") -log.setLevel(cfg["log_level"]) +log.setLevel(cfg.log_level) def timerun(func): From 241d1fd90999675d50dfbd58d9e4cb2369f90bd2 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 7 Mar 2026 05:03:56 +0000 Subject: [PATCH 172/279] move to pyproject.toml --- .github/workflows/docker-ci.yml | 3 +- .github/workflows/lint.yml | 3 +- compose.yaml | 4 +- pyproject.toml | 47 +++++++++++++++++++ rocketwatch/Dockerfile | 9 ++-- .../plugins/deposit_pool/deposit_pool.py | 2 +- rocketwatch/plugins/events/events.py | 2 +- rocketwatch/plugins/proposals/proposals.py | 5 +- rocketwatch/requirements.txt | 34 -------------- rocketwatch/utils/views.py | 1 - 10 files changed, 65 insertions(+), 45 deletions(-) create mode 100644 pyproject.toml delete mode 100644 rocketwatch/requirements.txt diff --git a/.github/workflows/docker-ci.yml b/.github/workflows/docker-ci.yml index 587917e8..0e6b6236 100644 --- a/.github/workflows/docker-ci.yml +++ b/.github/workflows/docker-ci.yml @@ -26,7 +26,8 @@ jobs: - name: Build and push uses: docker/build-push-action@v7 with: - context: ./rocketwatch + context: . + file: rocketwatch/Dockerfile push: true tags: haloooloolo/rocketwatch:latest no-cache: true diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 4389febf..33bb8a15 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -13,4 +13,5 @@ jobs: - uses: actions/checkout@v6 - uses: astral-sh/ruff-action@v3 with: - args: "check --select E,W --ignore E501,E203,E231" + args: "check" + src: "rocketwatch" diff --git a/compose.yaml b/compose.yaml index 7f7631d8..d1474c89 100644 --- a/compose.yaml +++ b/compose.yaml @@ -1,7 +1,9 @@ services: rocketwatch: image: haloooloolo/rocketwatch - build: ./rocketwatch + build: + context: . + dockerfile: rocketwatch/Dockerfile volumes: - ./rocketwatch/contracts/rocketpool:/app/contracts/rocketpool - ./rocketwatch/config.toml:/app/config.toml diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..c208d25f --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,47 @@ +[project] +name = "rocketwatch" +version = "0.1.0" +requires-python = ">=3.12" +dependencies = [ + "aiohttp==3.13.3", + "aiocache==0.12.3", + "anthropic==0.84.0", + "anyascii==0.3.3", + "beautifulsoup4==4.14.3", + "bidict==0.23.1", + "cachetools==7.0.3", + "colorama==0.4.6", + "cronitor==4.9.0", + "dice==4.0.0", + "discord.py==2.7.1", + "eth-typing==5.2.1", + "eth-utils==5.3.1", + "etherscan_labels @ git+https://github.com/haloooloolo/etherscan-labels", + "graphql_query==1.4.0", + "hexbytes==1.3.1", + "humanize==4.15.0", + "inflect==7.5.0", + "matplotlib==3.10.8", + "numpy==2.4.2", + "pillow==12.1.1", + "psutil==7.2.2", + "pydantic>=2.0.0,<3.0.0", + "pymongo==4.16.0", + "python_i18n==0.3.9", + "pytz==2026.1.post1", + "regex==2026.2.28", + "retry-async==0.1.4", + "seaborn==0.13.2", + "tabulate==0.10.0", + "termplotlib==0.3.9", + "tiktoken==0.12.0", + "uptime==3.0.1", + "web3>=7.0.0,<8.0.0", +] + +[tool.ruff] +target-version = "py312" + +[tool.ruff.lint] +select = ["E", "F", "W"] +ignore = ["E501", "E203", "E231"] diff --git a/rocketwatch/Dockerfile b/rocketwatch/Dockerfile index f2490cf1..f1fc3bb2 100644 --- a/rocketwatch/Dockerfile +++ b/rocketwatch/Dockerfile @@ -1,11 +1,12 @@ # syntax=docker/dockerfile:1 FROM python:3.14.3 -COPY requirements.txt requirements.txt -RUN pip install --upgrade pip -RUN pip install -r requirements.txt +COPY --from=ghcr.io/astral-sh/uv:latest /uv /usr/local/bin/uv -COPY . /app +COPY pyproject.toml pyproject.toml +RUN uv pip install --system --no-cache -r pyproject.toml + +COPY rocketwatch/ /app ENV PYTHONUNBUFFERED=1 WORKDIR /app CMD [ "python", "." ] diff --git a/rocketwatch/plugins/deposit_pool/deposit_pool.py b/rocketwatch/plugins/deposit_pool/deposit_pool.py index 6122b23d..0025c1e4 100644 --- a/rocketwatch/plugins/deposit_pool/deposit_pool.py +++ b/rocketwatch/plugins/deposit_pool/deposit_pool.py @@ -64,7 +64,7 @@ async def get_deposit_pool_stats() -> Embed: embed.description += f"Need **{queue_capacity:,.2f} ETH** to dequeue all validators." if possible_assignments > 0: - embed.description += f"\nSufficient balance for **{possible_assignments} deposit assignments**!" + embed.description += f"\nSufficient balance for **{possible_assignments} deposit assignment{'s' if possible_assignments != 1 else ''}**!" else: lines = [] if (num_eb4 := int(dp_balance // 28)) > 0: diff --git a/rocketwatch/plugins/events/events.py b/rocketwatch/plugins/events/events.py index 696fc8dc..90f83695 100644 --- a/rocketwatch/plugins/events/events.py +++ b/rocketwatch/plugins/events/events.py @@ -481,7 +481,7 @@ async def handle_event(self, event_name: str, event: aDict) -> Optional[Embed]: # check if the next update is after the next period ts earliest_next_update = ts + update_rate # if it will update before the next period, skip - if not (ts < next_period < earliest_next_update): + if not (ts < args.rewardPeriodEnd < earliest_next_update): return None elif event_name == "bootstrap_pdao_setting_multi_event": description_parts = [] diff --git a/rocketwatch/plugins/proposals/proposals.py b/rocketwatch/plugins/proposals/proposals.py index fbf85cbc..f4a53ccd 100644 --- a/rocketwatch/plugins/proposals/proposals.py +++ b/rocketwatch/plugins/proposals/proposals.py @@ -169,7 +169,10 @@ async def fetch_proposal(self, slot: int) -> None: raise e validator_index = int(beacon_header["proposer_index"]) - if not (minipool := (await self.bot.db.minipools.find_one({"validator_index": validator_index}))): + query = {"validator_index": validator_index} + is_megapool = await self.bot.db.minipools.count_documents(query, limit=1) + is_minipool = await self.bot.db.megapool_validators.count_documents(query, limit=1) + if not (is_minipool or is_megapool): return None beacon_block = (await bacon.get_block(str(slot)))["data"]["message"] diff --git a/rocketwatch/requirements.txt b/rocketwatch/requirements.txt deleted file mode 100644 index 307435bf..00000000 --- a/rocketwatch/requirements.txt +++ /dev/null @@ -1,34 +0,0 @@ -psutil==7.2.2 -python_i18n==0.3.9 -web3>=7.0.0,<8.0.0 -humanize==4.15.0 -termplotlib==0.3.9 -cachetools==7.0.3 -bidict==0.23.1 -uptime==3.0.1 -discord.py==2.7.1 -pydantic>=2.0.0,<3.0.0 -pytz==2026.1.post1 -matplotlib==3.10.8 -inflect==7.5.0 -colorama==0.4.6 -seaborn==0.13.2 -etherscan_labels @ git+https://github.com/haloooloolo/etherscan-labels -cronitor==4.9.0 -retry-async==0.1.4 -dice==4.0.0 -regex==2026.2.28 -tiktoken==0.12.0 -anthropic==0.84.0 -pymongo==4.16.0 -graphql_query==1.4.0 -pillow==12.1.1 -aiohttp==3.13.3 -aiocache==0.12.3 -numpy==2.4.2 -beautifulsoup4==4.14.3 -eth-typing==5.2.1 -hexbytes==1.3.1 -eth-utils==5.3.1 -tabulate==0.10.0 -anyascii==0.3.3 diff --git a/rocketwatch/utils/views.py b/rocketwatch/utils/views.py index d9302aeb..97bf6b4d 100644 --- a/rocketwatch/utils/views.py +++ b/rocketwatch/utils/views.py @@ -1,4 +1,3 @@ -import math from abc import abstractmethod from discord import ui, ButtonStyle, Interaction From e79fad819566a2c39553e2d1d70cc7280ec53bf2 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 7 Mar 2026 05:05:57 +0000 Subject: [PATCH 173/279] enforce isort --- pyproject.toml | 5 +- rocketwatch/__main__.py | 2 +- rocketwatch/plugins/8ball/8ball.py | 2 +- rocketwatch/plugins/about/about.py | 9 ++-- rocketwatch/plugins/apr/apr.py | 5 +- .../plugins/beacon_events/beacon_events.py | 8 +-- .../plugins/chat_summary/chat_summary.py | 5 +- .../plugins/chicken_soup/chicken_soup.py | 3 +- rocketwatch/plugins/collateral/collateral.py | 5 +- .../plugins/commissions/commissions.py | 5 +- rocketwatch/plugins/cow_orders/cow_orders.py | 10 ++-- rocketwatch/plugins/dao/dao.py | 22 ++++---- .../plugins/db_upkeep_task/db_upkeep_task.py | 14 +++-- rocketwatch/plugins/debug/debug.py | 9 ++-- .../delegate_contracts/delegate_contracts.py | 9 ++-- .../plugins/deposit_pool/deposit_pool.py | 4 +- .../plugins/detect_scam/detect_scam.py | 37 +++++++------ rocketwatch/plugins/event_core/event_core.py | 15 +++--- rocketwatch/plugins/events/events.py | 22 ++++---- .../fee_distribution/fee_distribution.py | 6 +-- rocketwatch/plugins/forum/forum.py | 8 +-- rocketwatch/plugins/governance/governance.py | 9 ++-- rocketwatch/plugins/lottery/lottery.py | 7 ++- rocketwatch/plugins/metrics/metrics.py | 5 +- rocketwatch/plugins/milestones/milestones.py | 2 +- .../minipool_distribution.py | 3 +- rocketwatch/plugins/proposals/proposals.py | 10 ++-- rocketwatch/plugins/queue/queue.py | 5 +- rocketwatch/plugins/random/random.py | 15 +++--- rocketwatch/plugins/releases/releases.py | 5 +- rocketwatch/plugins/reloader/reloader.py | 13 ++--- rocketwatch/plugins/rewards/rewards.py | 19 +++---- rocketwatch/plugins/rocksolid/rocksolid.py | 14 +++-- rocketwatch/plugins/rpips/rpips.py | 3 +- rocketwatch/plugins/rpl/rpl.py | 2 +- .../plugins/scam_warning/scam_warning.py | 3 +- rocketwatch/plugins/snapshot/snapshot.py | 20 +++---- .../plugins/support_utils/support_utils.py | 4 +- .../plugins/transactions/transactions.py | 6 +-- rocketwatch/plugins/tvl/tvl.py | 2 +- .../user_distribute/user_distribute.py | 10 ++-- .../validator_states/validator_states.py | 2 +- rocketwatch/plugins/wall/wall.py | 52 ++++++++++++------- rocketwatch/rocketwatch.py | 9 ++-- rocketwatch/utils/block_time.py | 2 +- rocketwatch/utils/cfg.py | 1 + rocketwatch/utils/command_tree.py | 2 +- rocketwatch/utils/dao.py | 5 +- rocketwatch/utils/embeds.py | 10 ++-- rocketwatch/utils/event.py | 4 +- rocketwatch/utils/event_logs.py | 2 +- rocketwatch/utils/image.py | 6 +-- rocketwatch/utils/liquidity.py | 7 ++- rocketwatch/utils/readable.py | 2 +- rocketwatch/utils/retry.py | 9 ++-- rocketwatch/utils/rocketpool.py | 6 +-- rocketwatch/utils/sea_creatures.py | 1 + rocketwatch/utils/shared_w3.py | 4 +- rocketwatch/utils/views.py | 3 +- 59 files changed, 242 insertions(+), 247 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index c208d25f..e7d330a1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -43,5 +43,8 @@ dependencies = [ target-version = "py312" [tool.ruff.lint] -select = ["E", "F", "W"] +select = ["E", "F", "I", "W"] ignore = ["E501", "E203", "E231"] + +[tool.ruff.lint.isort] +known-first-party = ["utils", "plugins", "strings", "rocketwatch"] diff --git a/rocketwatch/__main__.py b/rocketwatch/__main__.py index f3234fe0..529093a0 100644 --- a/rocketwatch/__main__.py +++ b/rocketwatch/__main__.py @@ -2,8 +2,8 @@ from discord import Intents -from utils.cfg import cfg from rocketwatch import RocketWatch +from utils.cfg import cfg logging.basicConfig(format="%(levelname)5s %(asctime)s [%(name)s] %(filename)s:%(lineno)d|%(funcName)s(): %(message)s") logging.getLogger().setLevel("INFO") diff --git a/rocketwatch/plugins/8ball/8ball.py b/rocketwatch/plugins/8ball/8ball.py index 822bb36a..b0031acc 100644 --- a/rocketwatch/plugins/8ball/8ball.py +++ b/rocketwatch/plugins/8ball/8ball.py @@ -2,9 +2,9 @@ import random import random as pyrandom -from discord.ext import commands from discord import Interaction from discord.app_commands import command +from discord.ext import commands from rocketwatch import RocketWatch from utils.embeds import Embed diff --git a/rocketwatch/plugins/about/about.py b/rocketwatch/plugins/about/about.py index a970056a..63eaf1b1 100644 --- a/rocketwatch/plugins/about/about.py +++ b/rocketwatch/plugins/about/about.py @@ -1,20 +1,19 @@ +import logging import os import time -import logging +import aiohttp import humanize import psutil -import aiohttp import uptime -from discord.ext import commands from discord import Interaction from discord.app_commands import command +from discord.ext import commands from rocketwatch import RocketWatch from utils import readable from utils.cfg import cfg -from utils.embeds import Embed -from utils.embeds import el_explorer_url +from utils.embeds import Embed, el_explorer_url from utils.visibility import is_hidden_weak psutil.getloadavg() diff --git a/rocketwatch/plugins/apr/apr.py b/rocketwatch/plugins/apr/apr.py index d6691722..807b7112 100644 --- a/rocketwatch/plugins/apr/apr.py +++ b/rocketwatch/plugins/apr/apr.py @@ -4,10 +4,9 @@ from io import BytesIO import matplotlib.pyplot as plt -from discord import File -from discord.ext import commands, tasks -from discord import Interaction +from discord import File, Interaction from discord.app_commands import command +from discord.ext import commands, tasks from matplotlib.dates import DateFormatter from rocketwatch import RocketWatch diff --git a/rocketwatch/plugins/beacon_events/beacon_events.py b/rocketwatch/plugins/beacon_events/beacon_events.py index 9c36fe52..6a13e799 100644 --- a/rocketwatch/plugins/beacon_events/beacon_events.py +++ b/rocketwatch/plugins/beacon_events/beacon_events.py @@ -8,15 +8,15 @@ from rocketwatch import RocketWatch from utils import solidity +from utils.block_time import ts_to_block from utils.cfg import cfg from utils.embeds import assemble, prepare_args +from utils.event import Event, EventPlugin from utils.readable import cl_explorer_url +from utils.retry import retry_async from utils.rocketpool import rp from utils.shared_w3 import bacon, w3 -from utils.solidity import date_to_beacon_block, beacon_block_to_date -from utils.event import EventPlugin, Event -from utils.block_time import ts_to_block -from utils.retry import retry_async +from utils.solidity import beacon_block_to_date, date_to_beacon_block log = logging.getLogger("beacon_events") log.setLevel(cfg.log_level) diff --git a/rocketwatch/plugins/chat_summary/chat_summary.py b/rocketwatch/plugins/chat_summary/chat_summary.py index 716f2db0..0c208533 100644 --- a/rocketwatch/plugins/chat_summary/chat_summary.py +++ b/rocketwatch/plugins/chat_summary/chat_summary.py @@ -6,12 +6,11 @@ import anthropic import pytz import tiktoken -from discord import File, DeletedReferencedMessage +from discord import DeletedReferencedMessage, File, Interaction +from discord.app_commands import command from discord.channel import TextChannel from discord.ext import commands from discord.ext.commands import is_owner -from discord.app_commands import command -from discord import Interaction from rocketwatch import RocketWatch from utils.cfg import cfg diff --git a/rocketwatch/plugins/chicken_soup/chicken_soup.py b/rocketwatch/plugins/chicken_soup/chicken_soup.py index 9c613630..e4f801fd 100644 --- a/rocketwatch/plugins/chicken_soup/chicken_soup.py +++ b/rocketwatch/plugins/chicken_soup/chicken_soup.py @@ -1,8 +1,9 @@ from datetime import datetime, timedelta from discord import Interaction -from discord.ext import commands from discord.app_commands import command +from discord.ext import commands + from rocketwatch import RocketWatch diff --git a/rocketwatch/plugins/collateral/collateral.py b/rocketwatch/plugins/collateral/collateral.py index 29bd981f..1b450a97 100644 --- a/rocketwatch/plugins/collateral/collateral.py +++ b/rocketwatch/plugins/collateral/collateral.py @@ -6,13 +6,12 @@ import matplotlib as mpl import matplotlib.pyplot as plt import numpy as np -from discord import File -from discord import Interaction +from discord import File, Interaction from discord.app_commands import command, describe from discord.ext import commands from discord.utils import as_chunks -from matplotlib.ticker import FuncFormatter from eth_typing import ChecksumAddress +from matplotlib.ticker import FuncFormatter from rocketwatch import RocketWatch from utils import solidity diff --git a/rocketwatch/plugins/commissions/commissions.py b/rocketwatch/plugins/commissions/commissions.py index 2ed69b7d..f5f5c053 100644 --- a/rocketwatch/plugins/commissions/commissions.py +++ b/rocketwatch/plugins/commissions/commissions.py @@ -3,10 +3,9 @@ import numpy as np import seaborn as sns -from discord import File -from discord.ext import commands -from discord import Interaction +from discord import File, Interaction from discord.app_commands import command +from discord.ext import commands from matplotlib import pyplot as plt from rocketwatch import RocketWatch diff --git a/rocketwatch/plugins/cow_orders/cow_orders.py b/rocketwatch/plugins/cow_orders/cow_orders.py index 81357d47..a0908ff4 100644 --- a/rocketwatch/plugins/cow_orders/cow_orders.py +++ b/rocketwatch/plugins/cow_orders/cow_orders.py @@ -1,20 +1,18 @@ import logging -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone import aiohttp -from datetime import timezone -from web3.datastructures import MutableAttributeDict as aDict - from discord import Interaction from discord.app_commands import command +from web3.datastructures import MutableAttributeDict as aDict from rocketwatch import RocketWatch from utils import solidity from utils.cfg import cfg -from utils.embeds import assemble, prepare_args, Embed +from utils.embeds import Embed, assemble, prepare_args +from utils.event import Event, EventPlugin from utils.rocketpool import rp from utils.shared_w3 import w3 -from utils.event import EventPlugin, Event from utils.visibility import is_hidden_weak log = logging.getLogger("cow_orders") diff --git a/rocketwatch/plugins/dao/dao.py b/rocketwatch/plugins/dao/dao.py index adbd7efc..eea912f3 100644 --- a/rocketwatch/plugins/dao/dao.py +++ b/rocketwatch/plugins/dao/dao.py @@ -1,28 +1,24 @@ import logging - from dataclasses import dataclass -from typing import Literal from operator import attrgetter - -from eth_typing import ChecksumAddress -from tabulate import tabulate +from typing import Literal from discord import Interaction -from discord.app_commands import Choice, command, describe, autocomplete +from discord.app_commands import Choice, autocomplete, command, describe from discord.ext.commands import Cog +from eth_typing import ChecksumAddress +from tabulate import tabulate from rocketwatch import RocketWatch from utils import solidity +from utils.block_time import ts_to_block from utils.cfg import cfg -from utils.embeds import Embed -from utils.visibility import is_hidden_weak -from utils.dao import DefaultDAO, OracleDAO, SecurityCouncil, ProtocolDAO -from utils.views import PageView -from utils.embeds import el_explorer_url +from utils.dao import DefaultDAO, OracleDAO, ProtocolDAO, SecurityCouncil +from utils.embeds import Embed, el_explorer_url from utils.event_logs import get_logs -from utils.block_time import ts_to_block from utils.rocketpool import rp - +from utils.views import PageView +from utils.visibility import is_hidden_weak log = logging.getLogger("dao") log.setLevel(cfg.log_level) diff --git a/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py b/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py index 2f29616d..059c1a60 100644 --- a/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py +++ b/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py @@ -1,6 +1,6 @@ +import asyncio import logging import time -import asyncio from collections import defaultdict from collections.abc import Callable from datetime import timedelta @@ -8,21 +8,19 @@ import pymongo from cronitor import Monitor -from pymongo import UpdateOne, UpdateMany -from pymongo.asynchronous.collection import AsyncCollection - from discord.ext import commands from discord.utils import as_chunks +from pymongo import UpdateMany, UpdateOne +from pymongo.asynchronous.collection import AsyncCollection from rocketwatch import RocketWatch from utils import solidity -from utils.cfg import cfg from utils.block_time import ts_to_block +from utils.cfg import cfg +from utils.event_logs import get_logs from utils.rocketpool import rp -from utils.shared_w3 import w3, bacon +from utils.shared_w3 import bacon, w3 from utils.time_debug import timerun, timerun_async -from utils.event_logs import get_logs - log = logging.getLogger("db_upkeep_task") log.setLevel(cfg.log_level) diff --git a/rocketwatch/plugins/debug/debug.py b/rocketwatch/plugins/debug/debug.py index 192a3755..f4a320b1 100644 --- a/rocketwatch/plugins/debug/debug.py +++ b/rocketwatch/plugins/debug/debug.py @@ -4,22 +4,22 @@ import random import time -import humanize import aiohttp +import humanize from colorama import Fore, Style from discord import File, Interaction -from discord.app_commands import Choice, command, guilds, describe +from discord.app_commands import Choice, command, describe, guilds from discord.ext.commands import Cog, is_owner from rocketwatch import RocketWatch from utils import solidity +from utils.block_time import block_to_ts, ts_to_block from utils.cfg import cfg from utils.embeds import Embed, el_explorer_url -from utils.block_time import ts_to_block, block_to_ts from utils.readable import prettify_json_string from utils.rocketpool import rp from utils.shared_w3 import w3 -from utils.visibility import is_hidden, is_hidden_weak, is_hidden_role_controlled +from utils.visibility import is_hidden, is_hidden_role_controlled, is_hidden_weak log = logging.getLogger("debug") log.setLevel(cfg.log_level) @@ -255,6 +255,7 @@ async def restore_support_template(self, interaction: Interaction, template_name async def restore_missed_events(self, interaction: Interaction, tx_hash: str): import pickle from datetime import datetime + from plugins.events.events import Events await interaction.response.defer(ephemeral=True) diff --git a/rocketwatch/plugins/delegate_contracts/delegate_contracts.py b/rocketwatch/plugins/delegate_contracts/delegate_contracts.py index 4b3126ff..8866e492 100644 --- a/rocketwatch/plugins/delegate_contracts/delegate_contracts.py +++ b/rocketwatch/plugins/delegate_contracts/delegate_contracts.py @@ -1,17 +1,16 @@ import logging -from pymongo.asynchronous.collection import AsyncCollection - from discord import Interaction -from discord.ext import commands from discord.app_commands import command +from discord.ext import commands +from pymongo.asynchronous.collection import AsyncCollection from rocketwatch import RocketWatch +from utils.cfg import cfg from utils.embeds import Embed, el_explorer_url from utils.readable import s_hex -from utils.shared_w3 import w3 -from utils.cfg import cfg from utils.rocketpool import rp +from utils.shared_w3 import w3 log = logging.getLogger("delegate_contracts") log.setLevel(cfg.log_level) diff --git a/rocketwatch/plugins/deposit_pool/deposit_pool.py b/rocketwatch/plugins/deposit_pool/deposit_pool.py index 0025c1e4..97fcfb11 100644 --- a/rocketwatch/plugins/deposit_pool/deposit_pool.py +++ b/rocketwatch/plugins/deposit_pool/deposit_pool.py @@ -3,13 +3,13 @@ from discord import Interaction from discord.app_commands import command -from rocketwatch import RocketWatch from plugins.queue.queue import Queue -from utils.status import StatusPlugin +from rocketwatch import RocketWatch from utils import solidity from utils.cfg import cfg from utils.embeds import Embed from utils.rocketpool import rp +from utils.status import StatusPlugin from utils.visibility import is_hidden_weak log = logging.getLogger("deposit_pool") diff --git a/rocketwatch/plugins/detect_scam/detect_scam.py b/rocketwatch/plugins/detect_scam/detect_scam.py index 868f3705..c6db8322 100644 --- a/rocketwatch/plugins/detect_scam/detect_scam.py +++ b/rocketwatch/plugins/detect_scam/detect_scam.py @@ -1,37 +1,36 @@ -import io import asyncio -import logging import contextlib -import regex as re - -from urllib import parse -from anyascii import anyascii +import io +import logging +from datetime import datetime, timedelta, timezone from typing import Optional -from datetime import datetime, timezone, timedelta +from urllib import parse +import regex as re +from anyascii import anyascii from cachetools import TTLCache from discord import ( - ui, AppCommandType, ButtonStyle, - errors, - File, Color, - User, - Member, - Message, - Reaction, - Guild, - Thread, DeletedReferencedMessage, + File, + Guild, Interaction, - RawMessageDeleteEvent, + Member, + Message, RawBulkMessageDeleteEvent, + RawMessageDeleteEvent, + RawThreadDeleteEvent, RawThreadUpdateEvent, - RawThreadDeleteEvent + Reaction, + Thread, + User, + errors, + ui, ) +from discord.app_commands import ContextMenu, command, guilds from discord.ext.commands import Cog -from discord.app_commands import command, guilds, ContextMenu from rocketwatch import RocketWatch from utils.cfg import cfg diff --git a/rocketwatch/plugins/event_core/event_core.py b/rocketwatch/plugins/event_core/event_core.py index 23862082..1fad9507 100644 --- a/rocketwatch/plugins/event_core/event_core.py +++ b/rocketwatch/plugins/event_core/event_core.py @@ -1,26 +1,25 @@ -import time -import pickle import asyncio import logging - +import pickle +import time from datetime import datetime, timedelta from enum import Enum -from typing import Optional, Any +from typing import Any, Optional +import discord import pymongo from cronitor import Monitor -import discord from discord.ext import commands, tasks from eth_typing import BlockIdentifier, BlockNumber from web3.datastructures import MutableAttributeDict -from rocketwatch import RocketWatch from plugins.support_utils.support_utils import generate_template_embed -from utils.status import StatusPlugin +from rocketwatch import RocketWatch from utils.cfg import cfg -from utils.embeds import assemble, Embed +from utils.embeds import Embed, assemble from utils.event import EventPlugin from utils.shared_w3 import w3 +from utils.status import StatusPlugin log = logging.getLogger("event_core") log.setLevel(cfg.log_level) diff --git a/rocketwatch/plugins/events/events.py b/rocketwatch/plugins/events/events.py index 90f83695..a0e19ada 100644 --- a/rocketwatch/plugins/events/events.py +++ b/rocketwatch/plugins/events/events.py @@ -1,29 +1,29 @@ -from collections.abc import Coroutine -import json import hashlib +import json import logging import warnings -from typing import Optional, Callable, Literal +from collections.abc import Coroutine +from typing import Callable, Literal, Optional from discord import Interaction -from discord.ext.commands import is_owner from discord.app_commands import command, guilds -from eth_typing.evm import ChecksumAddress, BlockNumber +from discord.ext.commands import is_owner +from eth_typing.evm import BlockNumber, ChecksumAddress from hexbytes import HexBytes from web3.datastructures import MutableAttributeDict as aDict from web3.logs import DISCARD -from web3.types import LogReceipt, EventData +from web3.types import EventData, LogReceipt from rocketwatch import RocketWatch from utils import solidity +from utils.block_time import block_to_ts from utils.cfg import cfg from utils.dao import DefaultDAO, ProtocolDAO -from utils.embeds import assemble, prepare_args, el_explorer_url, Embed -from utils.event import EventPlugin, Event -from utils.rocketpool import rp, NoAddressFound -from utils.shared_w3 import w3, bacon +from utils.embeds import Embed, assemble, el_explorer_url, prepare_args +from utils.event import Event, EventPlugin +from utils.rocketpool import NoAddressFound, rp +from utils.shared_w3 import bacon, w3 from utils.solidity import SUBMISSION_KEYS -from utils.block_time import block_to_ts log = logging.getLogger("events") log.setLevel(cfg.log_level) diff --git a/rocketwatch/plugins/fee_distribution/fee_distribution.py b/rocketwatch/plugins/fee_distribution/fee_distribution.py index 215e8ff0..903d7e62 100644 --- a/rocketwatch/plugins/fee_distribution/fee_distribution.py +++ b/rocketwatch/plugins/fee_distribution/fee_distribution.py @@ -2,16 +2,16 @@ from io import BytesIO from typing import Literal -from discord import Interaction, File -from discord.ext import commands +from discord import File, Interaction from discord.app_commands import command +from discord.ext import commands from matplotlib import pyplot as plt from rocketwatch import RocketWatch from utils.cfg import cfg from utils.embeds import Embed -from utils.visibility import is_hidden_weak from utils.readable import render_tree_legacy +from utils.visibility import is_hidden_weak log = logging.getLogger("fee_distribution") log.setLevel(cfg.log_level) diff --git a/rocketwatch/plugins/forum/forum.py b/rocketwatch/plugins/forum/forum.py index 6d0c02c1..470b584b 100644 --- a/rocketwatch/plugins/forum/forum.py +++ b/rocketwatch/plugins/forum/forum.py @@ -1,18 +1,18 @@ import logging -from datetime import datetime from dataclasses import dataclass -from typing import Optional, Literal, cast +from datetime import datetime +from typing import Literal, Optional, cast import aiohttp from discord import Interaction +from discord.app_commands import Choice, command from discord.ext import commands -from discord.app_commands import command, Choice from rocketwatch import RocketWatch from utils.cfg import cfg from utils.embeds import Embed -from utils.visibility import is_hidden_weak from utils.retry import retry_async +from utils.visibility import is_hidden_weak log = logging.getLogger("forum") log.setLevel(cfg.log_level) diff --git a/rocketwatch/plugins/governance/governance.py b/rocketwatch/plugins/governance/governance.py index 0679b28b..7a046f97 100644 --- a/rocketwatch/plugins/governance/governance.py +++ b/rocketwatch/plugins/governance/governance.py @@ -7,16 +7,15 @@ from eth_typing import HexStr from web3.constants import HASH_ZERO -from plugins.snapshot.snapshot import Snapshot from plugins.forum.forum import Forum from plugins.rpips.rpips import RPIPs - -from utils.status import StatusPlugin +from plugins.snapshot.snapshot import Snapshot +from utils.block_time import ts_to_block from utils.cfg import cfg -from utils.dao import DAO, DefaultDAO, OracleDAO, SecurityCouncil, ProtocolDAO +from utils.dao import DAO, DefaultDAO, OracleDAO, ProtocolDAO, SecurityCouncil from utils.embeds import Embed +from utils.status import StatusPlugin from utils.visibility import is_hidden_weak -from utils.block_time import ts_to_block log = logging.getLogger("governance") log.setLevel(cfg.log_level) diff --git a/rocketwatch/plugins/lottery/lottery.py b/rocketwatch/plugins/lottery/lottery.py index a60edaeb..468aad05 100644 --- a/rocketwatch/plugins/lottery/lottery.py +++ b/rocketwatch/plugins/lottery/lottery.py @@ -1,16 +1,15 @@ import logging -from discord.ext import commands from discord import Interaction from discord.app_commands import command +from discord.ext import commands from pymongo import InsertOne from rocketwatch import RocketWatch from utils.cfg import cfg -from utils.embeds import Embed -from utils.embeds import el_explorer_url +from utils.embeds import Embed, el_explorer_url from utils.shared_w3 import bacon -from utils.solidity import BEACON_START_DATE, BEACON_EPOCH_LENGTH +from utils.solidity import BEACON_EPOCH_LENGTH, BEACON_START_DATE from utils.time_debug import timerun_async from utils.visibility import is_hidden diff --git a/rocketwatch/plugins/metrics/metrics.py b/rocketwatch/plugins/metrics/metrics.py index b7c6d2c6..f445fdc2 100644 --- a/rocketwatch/plugins/metrics/metrics.py +++ b/rocketwatch/plugins/metrics/metrics.py @@ -3,10 +3,9 @@ from io import BytesIO from bson import SON -from discord import File -from discord.ext import commands -from discord import Interaction +from discord import File, Interaction from discord.app_commands import command +from discord.ext import commands from matplotlib import pyplot as plt from rocketwatch import RocketWatch diff --git a/rocketwatch/plugins/milestones/milestones.py b/rocketwatch/plugins/milestones/milestones.py index 802c5b23..021fc74e 100644 --- a/rocketwatch/plugins/milestones/milestones.py +++ b/rocketwatch/plugins/milestones/milestones.py @@ -7,8 +7,8 @@ from utils import solidity from utils.cfg import cfg from utils.embeds import assemble +from utils.event import Event, EventPlugin from utils.rocketpool import rp -from utils.event import EventPlugin, Event log = logging.getLogger("milestones") log.setLevel(cfg.log_level) diff --git a/rocketwatch/plugins/minipool_distribution/minipool_distribution.py b/rocketwatch/plugins/minipool_distribution/minipool_distribution.py index 36aafe46..42da3b14 100644 --- a/rocketwatch/plugins/minipool_distribution/minipool_distribution.py +++ b/rocketwatch/plugins/minipool_distribution/minipool_distribution.py @@ -5,8 +5,7 @@ import inflect import matplotlib.pyplot as plt import numpy as np -from discord import File -from discord import Interaction +from discord import File, Interaction from discord.app_commands import command, describe from discord.ext import commands diff --git a/rocketwatch/plugins/proposals/proposals.py b/rocketwatch/plugins/proposals/proposals.py index f4a53ccd..4f6e3c34 100644 --- a/rocketwatch/plugins/proposals/proposals.py +++ b/rocketwatch/plugins/proposals/proposals.py @@ -1,27 +1,27 @@ +import asyncio import logging import re import time from datetime import datetime, timedelta from io import BytesIO -import asyncio -from aiohttp.client_exceptions import ClientResponseError import matplotlib as mpl +from aiohttp.client_exceptions import ClientResponseError +from cronitor import Monitor from discord import File, Interaction -from discord.ext import commands from discord.app_commands import command, describe +from discord.ext import commands from discord.utils import as_chunks from matplotlib import pyplot as plt from pymongo import ASCENDING, DESCENDING -from cronitor import Monitor from rocketwatch import RocketWatch from utils.cfg import cfg from utils.embeds import Embed +from utils.shared_w3 import bacon from utils.solidity import beacon_block_to_date, date_to_beacon_block from utils.time_debug import timerun_async from utils.visibility import is_hidden_weak -from utils.shared_w3 import bacon cog_id = "proposals" log = logging.getLogger(cog_id) diff --git a/rocketwatch/plugins/queue/queue.py b/rocketwatch/plugins/queue/queue.py index 2fb796c4..1e1b9fb3 100644 --- a/rocketwatch/plugins/queue/queue.py +++ b/rocketwatch/plugins/queue/queue.py @@ -1,20 +1,19 @@ import logging - from typing import Literal, NamedTuple from aiocache import cached from discord import Interaction from discord.app_commands import command, describe from discord.ext.commands import Cog -from eth_typing import ChecksumAddress, BlockIdentifier +from eth_typing import BlockIdentifier, ChecksumAddress from rocketwatch import RocketWatch from utils.cfg import cfg from utils.embeds import el_explorer_url from utils.rocketpool import rp -from utils.visibility import is_hidden_weak from utils.shared_w3 import w3 from utils.views import PageView +from utils.visibility import is_hidden_weak log = logging.getLogger("queue") log.setLevel(cfg.log_level) diff --git a/rocketwatch/plugins/random/random.py b/rocketwatch/plugins/random/random.py index 11aaa825..ddeaeb0d 100644 --- a/rocketwatch/plugins/random/random.py +++ b/rocketwatch/plugins/random/random.py @@ -6,19 +6,22 @@ import dice import humanize import pytz -from discord import File -from discord.ext import commands -from discord import Interaction +from discord import File, Interaction from discord.app_commands import command +from discord.ext import commands from rocketwatch import RocketWatch from utils import solidity from utils.cfg import cfg -from utils.embeds import Embed, ens, el_explorer_url +from utils.embeds import Embed, el_explorer_url, ens from utils.readable import s_hex, uptime from utils.rocketpool import rp -from utils.sea_creatures import sea_creatures, get_sea_creature_for_address, get_holding_for_address -from utils.shared_w3 import w3, bacon +from utils.sea_creatures import ( + get_holding_for_address, + get_sea_creature_for_address, + sea_creatures, +) +from utils.shared_w3 import bacon, w3 from utils.visibility import is_hidden, is_hidden_weak log = logging.getLogger("random") diff --git a/rocketwatch/plugins/releases/releases.py b/rocketwatch/plugins/releases/releases.py index 83d0d3eb..22b99ad4 100644 --- a/rocketwatch/plugins/releases/releases.py +++ b/rocketwatch/plugins/releases/releases.py @@ -1,10 +1,9 @@ import logging import aiohttp -from discord.ext import commands - -from discord.app_commands import command from discord import Interaction +from discord.app_commands import command +from discord.ext import commands from rocketwatch import RocketWatch from utils.cfg import cfg diff --git a/rocketwatch/plugins/reloader/reloader.py b/rocketwatch/plugins/reloader/reloader.py index d9cb1d66..43af48de 100644 --- a/rocketwatch/plugins/reloader/reloader.py +++ b/rocketwatch/plugins/reloader/reloader.py @@ -1,13 +1,14 @@ +from pathlib import Path + from discord import Interaction -from discord.app_commands import command, guilds, autocomplete, Choice -from discord.ext.commands import Cog +from discord.app_commands import Choice, autocomplete, command, guilds from discord.ext.commands import ( - is_owner, - ExtensionNotLoaded, + Cog, ExtensionAlreadyLoaded, - ExtensionNotFound + ExtensionNotFound, + ExtensionNotLoaded, + is_owner, ) -from pathlib import Path from rocketwatch import RocketWatch from utils.cfg import cfg diff --git a/rocketwatch/plugins/rewards/rewards.py b/rocketwatch/plugins/rewards/rewards.py index e8eed5f1..c73472f4 100644 --- a/rocketwatch/plugins/rewards/rewards.py +++ b/rocketwatch/plugins/rewards/rewards.py @@ -1,25 +1,22 @@ import logging +from dataclasses import dataclass +from io import BytesIO +from typing import Optional + import aiohttp -import numpy as np import matplotlib.pyplot as plt - -from io import BytesIO -from discord import File +import numpy as np +from discord import File, Interaction from discord.app_commands import command, describe from discord.ext import commands -from discord import Interaction - - -from typing import Optional -from dataclasses import dataclass from rocketwatch import RocketWatch from utils import solidity +from utils.block_time import ts_to_block from utils.cfg import cfg from utils.embeds import Embed, resolve_ens -from utils.rocketpool import rp from utils.retry import retry_async -from utils.block_time import ts_to_block +from utils.rocketpool import rp log = logging.getLogger("rewards") log.setLevel(cfg.log_level) diff --git a/rocketwatch/plugins/rocksolid/rocksolid.py b/rocketwatch/plugins/rocksolid/rocksolid.py index 63efe4ab..c0adbba9 100644 --- a/rocketwatch/plugins/rocksolid/rocksolid.py +++ b/rocketwatch/plugins/rocksolid/rocksolid.py @@ -4,23 +4,21 @@ from typing import Optional import matplotlib.pyplot as plt -from matplotlib.dates import DateFormatter -from discord import File -from discord import Interaction +from discord import File, Interaction from discord.app_commands import command from discord.ext.commands import Cog +from matplotlib.dates import DateFormatter from pymongo import InsertOne from rocketwatch import RocketWatch from utils import solidity -from utils.cfg import cfg -from utils.shared_w3 import w3 -from utils.rocketpool import rp -from utils.visibility import is_hidden_weak from utils.block_time import block_to_ts, ts_to_block +from utils.cfg import cfg from utils.embeds import Embed, el_explorer_url from utils.event_logs import get_logs - +from utils.rocketpool import rp +from utils.shared_w3 import w3 +from utils.visibility import is_hidden_weak cog_id = "rocksolid" log = logging.getLogger(cog_id) diff --git a/rocketwatch/plugins/rpips/rpips.py b/rocketwatch/plugins/rpips/rpips.py index e9373fe6..0f824d35 100644 --- a/rocketwatch/plugins/rpips/rpips.py +++ b/rocketwatch/plugins/rpips/rpips.py @@ -3,10 +3,9 @@ import aiohttp from aiocache import cached from bs4 import BeautifulSoup - from discord import Interaction -from discord.ext.commands import Cog from discord.app_commands import Choice, command, describe +from discord.ext.commands import Cog from rocketwatch import RocketWatch from utils.cfg import cfg diff --git a/rocketwatch/plugins/rpl/rpl.py b/rocketwatch/plugins/rpl/rpl.py index 0ef7772d..9daf79ea 100644 --- a/rocketwatch/plugins/rpl/rpl.py +++ b/rocketwatch/plugins/rpl/rpl.py @@ -3,8 +3,8 @@ import matplotlib.pyplot as plt from discord import File, Interaction -from discord.ext import commands from discord.app_commands import command +from discord.ext import commands from rocketwatch import RocketWatch from utils import solidity diff --git a/rocketwatch/plugins/scam_warning/scam_warning.py b/rocketwatch/plugins/scam_warning/scam_warning.py index e76e0c5b..38f0d65d 100644 --- a/rocketwatch/plugins/scam_warning/scam_warning.py +++ b/rocketwatch/plugins/scam_warning/scam_warning.py @@ -1,5 +1,5 @@ import logging -from datetime import timedelta, datetime +from datetime import datetime, timedelta from discord import errors from discord.ext import commands @@ -8,7 +8,6 @@ from utils.cfg import cfg from utils.embeds import Embed - log = logging.getLogger("scam_warning") log.setLevel(cfg.log_level) diff --git a/rocketwatch/plugins/snapshot/snapshot.py b/rocketwatch/plugins/snapshot/snapshot.py index 2a89ff04..275277db 100644 --- a/rocketwatch/plugins/snapshot/snapshot.py +++ b/rocketwatch/plugins/snapshot/snapshot.py @@ -1,29 +1,29 @@ -import math import logging +import math from dataclasses import dataclass -from typing import Optional, Literal from datetime import datetime, timedelta +from typing import Literal, Optional -import regex import aiohttp +import regex import termplotlib as tpl from discord import Interaction from discord.app_commands import command +from eth_typing import BlockNumber, ChecksumAddress +from graphql_query import Argument, Operation, Query +from pymongo import DESCENDING, DeleteOne, InsertOne, UpdateOne from web3.constants import ADDRESS_ZERO -from eth_typing import ChecksumAddress, BlockNumber -from graphql_query import Operation, Query, Argument -from pymongo import InsertOne, UpdateOne, DeleteOne, DESCENDING from rocketwatch import RocketWatch +from utils.block_time import ts_to_block from utils.cfg import cfg from utils.embeds import Embed, el_explorer_url -from utils.image import Image, ImageCanvas, Color, FontVariant +from utils.event import Event, EventPlugin +from utils.image import Color, FontVariant, Image, ImageCanvas from utils.readable import uptime +from utils.retry import retry_async from utils.rocketpool import rp -from utils.event import EventPlugin, Event from utils.visibility import is_hidden_weak -from utils.block_time import ts_to_block -from utils.retry import retry_async log = logging.getLogger("snapshot") log.setLevel(cfg.log_level) diff --git a/rocketwatch/plugins/support_utils/support_utils.py b/rocketwatch/plugins/support_utils/support_utils.py index 07310ee9..e3665898 100644 --- a/rocketwatch/plugins/support_utils/support_utils.py +++ b/rocketwatch/plugins/support_utils/support_utils.py @@ -3,8 +3,8 @@ from datetime import datetime, timezone from bson import CodecOptions -from discord import app_commands, ui, Interaction, TextStyle, ButtonStyle, File, User -from discord.app_commands import Group, Choice, choices +from discord import ButtonStyle, File, Interaction, TextStyle, User, app_commands, ui +from discord.app_commands import Choice, Group, choices from discord.ext.commands import Cog, GroupCog from rocketwatch import RocketWatch diff --git a/rocketwatch/plugins/transactions/transactions.py b/rocketwatch/plugins/transactions/transactions.py index a8822220..51eccf48 100644 --- a/rocketwatch/plugins/transactions/transactions.py +++ b/rocketwatch/plugins/transactions/transactions.py @@ -6,15 +6,15 @@ from discord import Interaction from discord.app_commands import command, guilds from discord.ext.commands import is_owner -from eth_typing import ChecksumAddress, BlockNumber, BlockIdentifier +from eth_typing import BlockIdentifier, BlockNumber, ChecksumAddress from web3.datastructures import MutableAttributeDict as aDict from rocketwatch import RocketWatch from utils import solidity from utils.cfg import cfg from utils.dao import DefaultDAO, ProtocolDAO -from utils.embeds import assemble, prepare_args, el_explorer_url, Embed -from utils.event import EventPlugin, Event +from utils.embeds import Embed, assemble, el_explorer_url, prepare_args +from utils.event import Event, EventPlugin from utils.rocketpool import rp from utils.shared_w3 import w3 diff --git a/rocketwatch/plugins/tvl/tvl.py b/rocketwatch/plugins/tvl/tvl.py index e8c681e1..be647f63 100644 --- a/rocketwatch/plugins/tvl/tvl.py +++ b/rocketwatch/plugins/tvl/tvl.py @@ -3,8 +3,8 @@ import humanize from colorama import Style from discord import Interaction -from discord.ext.commands import Cog from discord.app_commands import command, describe +from discord.ext.commands import Cog from rocketwatch import RocketWatch from utils import solidity diff --git a/rocketwatch/plugins/user_distribute/user_distribute.py b/rocketwatch/plugins/user_distribute/user_distribute.py index 87869fb4..aa03f45a 100644 --- a/rocketwatch/plugins/user_distribute/user_distribute.py +++ b/rocketwatch/plugins/user_distribute/user_distribute.py @@ -1,19 +1,19 @@ -import time import logging +import time from io import StringIO from operator import itemgetter import discord -from discord import ui, ButtonStyle, Interaction -from discord.ext import commands, tasks +from discord import ButtonStyle, Interaction, ui from discord.app_commands import command +from discord.ext import commands, tasks from pymongo import ASCENDING from rocketwatch import RocketWatch -from utils.rocketpool import rp from utils.cfg import cfg from utils.embeds import Embed -from utils.shared_w3 import w3, bacon +from utils.rocketpool import rp +from utils.shared_w3 import bacon, w3 from utils.visibility import is_hidden_weak log = logging.getLogger("user_distribute") diff --git a/rocketwatch/plugins/validator_states/validator_states.py b/rocketwatch/plugins/validator_states/validator_states.py index 976a3d92..05425115 100644 --- a/rocketwatch/plugins/validator_states/validator_states.py +++ b/rocketwatch/plugins/validator_states/validator_states.py @@ -1,8 +1,8 @@ import logging from discord import Interaction -from discord.ext import commands from discord.app_commands import command +from discord.ext import commands from rocketwatch import RocketWatch from utils.cfg import cfg diff --git a/rocketwatch/plugins/wall/wall.py b/rocketwatch/plugins/wall/wall.py index c2a3d90c..6912d923 100644 --- a/rocketwatch/plugins/wall/wall.py +++ b/rocketwatch/plugins/wall/wall.py @@ -1,35 +1,51 @@ import asyncio import logging -from io import BytesIO -from typing import cast, Literal, Optional from collections import OrderedDict +from io import BytesIO +from typing import Literal, Optional, cast import aiohttp import numpy as np -from discord import File -from discord import Interaction +from discord import File, Interaction from discord.app_commands import command, describe from discord.ext import commands -from matplotlib import ( - pyplot as plt, - font_manager as fm, - ticker, - figure -) from eth_typing import ChecksumAddress, HexStr +from matplotlib import figure, ticker +from matplotlib import font_manager as fm +from matplotlib import pyplot as plt from rocketwatch import RocketWatch -from utils.time_debug import timerun, timerun_async +from utils.cfg import cfg from utils.embeds import Embed -from utils.visibility import is_hidden_weak -from utils.rocketpool import rp from utils.liquidity import ( - Exchange, CEX, DEX, Market, Liquidity, - Binance, Coinbase, GateIO, OKX, Bitget, MEXC, Bybit, CryptoDotCom, - Kraken, Kucoin, Bithumb, BingX, Bitvavo, HTX, BitMart, Bitrue, CoinTR, - BalancerV2, UniswapV3 + CEX, + DEX, + HTX, + MEXC, + OKX, + BalancerV2, + Binance, + BingX, + Bitget, + Bithumb, + BitMart, + Bitrue, + Bitvavo, + Bybit, + Coinbase, + CoinTR, + CryptoDotCom, + Exchange, + GateIO, + Kraken, + Kucoin, + Liquidity, + Market, + UniswapV3, ) -from utils.cfg import cfg +from utils.rocketpool import rp +from utils.time_debug import timerun, timerun_async +from utils.visibility import is_hidden_weak log = logging.getLogger("wall") log.setLevel(cfg.log_level) diff --git a/rocketwatch/rocketwatch.py b/rocketwatch/rocketwatch.py index 6ade2aab..4fd4d74f 100644 --- a/rocketwatch/rocketwatch.py +++ b/rocketwatch/rocketwatch.py @@ -5,18 +5,17 @@ from typing import Optional from discord import ( - app_commands, - Interaction, - Intents, - Thread, File, Guild, + Intents, + Interaction, + Thread, User, + app_commands, ) from discord.abc import GuildChannel, PrivateChannel from discord.ext import commands from discord.ext.commands import Bot - from pymongo import AsyncMongoClient from utils.cfg import cfg diff --git a/rocketwatch/utils/block_time.py b/rocketwatch/utils/block_time.py index 10050c5a..bda7c260 100644 --- a/rocketwatch/utils/block_time.py +++ b/rocketwatch/utils/block_time.py @@ -1,5 +1,5 @@ -import math import logging +import math from aiocache import cached diff --git a/rocketwatch/utils/cfg.py b/rocketwatch/utils/cfg.py index ebac9638..bd4f4d52 100644 --- a/rocketwatch/utils/cfg.py +++ b/rocketwatch/utils/cfg.py @@ -1,4 +1,5 @@ import tomllib + from pydantic import BaseModel diff --git a/rocketwatch/utils/command_tree.py b/rocketwatch/utils/command_tree.py index 56e41807..7406bad2 100644 --- a/rocketwatch/utils/command_tree.py +++ b/rocketwatch/utils/command_tree.py @@ -2,7 +2,7 @@ from datetime import datetime from discord import Interaction -from discord.app_commands import CommandTree, AppCommandError +from discord.app_commands import AppCommandError, CommandTree from utils.cfg import cfg diff --git a/rocketwatch/utils/dao.py b/rocketwatch/utils/dao.py index 455f4eff..ef32107f 100644 --- a/rocketwatch/utils/dao.py +++ b/rocketwatch/utils/dao.py @@ -1,9 +1,8 @@ -import math import logging - -from enum import IntEnum +import math from abc import ABC, abstractmethod from dataclasses import dataclass +from enum import IntEnum from typing import Literal, cast import termplotlib as tpl diff --git a/rocketwatch/utils/embeds.py b/rocketwatch/utils/embeds.py index 393ed80c..f7f51ded 100644 --- a/rocketwatch/utils/embeds.py +++ b/rocketwatch/utils/embeds.py @@ -2,11 +2,11 @@ import datetime import logging import math -from typing import Optional, Callable, Literal +from typing import Callable, Literal, Optional +import aiohttp import discord import humanize -import aiohttp from aiocache import cached from discord import Color from ens import InvalidName @@ -14,14 +14,14 @@ from strings import _ from utils import solidity +from utils.block_time import block_to_ts from utils.cached_ens import CachedEns from utils.cfg import cfg -from utils.readable import cl_explorer_url, advanced_tnx_url, s_hex +from utils.readable import advanced_tnx_url, cl_explorer_url, s_hex +from utils.retry import retry_async from utils.rocketpool import rp from utils.sea_creatures import get_sea_creature_for_address from utils.shared_w3 import w3 -from utils.retry import retry_async -from utils.block_time import block_to_ts ens = CachedEns() diff --git a/rocketwatch/utils/event.py b/rocketwatch/utils/event.py index 3ae9c716..d142c9a8 100644 --- a/rocketwatch/utils/event.py +++ b/rocketwatch/utils/event.py @@ -6,11 +6,11 @@ from discord.ext import commands from eth_typing import BlockNumber -from utils.shared_w3 import w3 +from rocketwatch import RocketWatch from utils.cfg import cfg from utils.embeds import Embed from utils.image import Image -from rocketwatch import RocketWatch +from utils.shared_w3 import w3 @dataclass(frozen=True, slots=True) diff --git a/rocketwatch/utils/event_logs.py b/rocketwatch/utils/event_logs.py index 5fd3c308..05ad4a76 100644 --- a/rocketwatch/utils/event_logs.py +++ b/rocketwatch/utils/event_logs.py @@ -1,5 +1,5 @@ import logging -from typing import Optional, Any +from typing import Any, Optional from eth_typing import BlockNumber from web3.contract.contract import ContractEvent diff --git a/rocketwatch/utils/image.py b/rocketwatch/utils/image.py index 4efd5b08..83d36b7d 100644 --- a/rocketwatch/utils/image.py +++ b/rocketwatch/utils/image.py @@ -1,14 +1,14 @@ import math from enum import Enum -from io import BytesIO from functools import cache +from io import BytesIO from typing import Optional from discord import File -from PIL import ImageFont, Image as PillowImage +from PIL import Image as PillowImage +from PIL import ImageFont from PIL.ImageDraw import ImageDraw - Color = tuple[int, int, int] diff --git a/rocketwatch/utils/liquidity.py b/rocketwatch/utils/liquidity.py index 53b07b23..93d363b9 100644 --- a/rocketwatch/utils/liquidity.py +++ b/rocketwatch/utils/liquidity.py @@ -1,13 +1,12 @@ -import math import logging -from collections import OrderedDict +import math from abc import ABC, abstractmethod +from collections import OrderedDict from dataclasses import dataclass -from typing import Optional, Callable +from typing import Callable, Optional import aiohttp import numpy as np - from eth_typing import ChecksumAddress, HexStr from utils.cfg import cfg diff --git a/rocketwatch/utils/readable.py b/rocketwatch/utils/readable.py index 8620c84e..3851e37d 100644 --- a/rocketwatch/utils/readable.py +++ b/rocketwatch/utils/readable.py @@ -3,7 +3,7 @@ import json import zlib -from colorama import Style, Fore +from colorama import Fore, Style import utils.solidity as units from utils.cfg import cfg diff --git a/rocketwatch/utils/retry.py b/rocketwatch/utils/retry.py index 9fb905d4..441a4f66 100644 --- a/rocketwatch/utils/retry.py +++ b/rocketwatch/utils/retry.py @@ -1,8 +1,7 @@ -from retry_async.api import ( - retry as __retry, - EXCEPTIONS -) -from typing import Callable, Any +from typing import Any, Callable + +from retry_async.api import EXCEPTIONS +from retry_async.api import retry as __retry def retry( diff --git a/rocketwatch/utils/rocketpool.py b/rocketwatch/utils/rocketpool.py index 0f0c3a15..eec028a8 100644 --- a/rocketwatch/utils/rocketpool.py +++ b/rocketwatch/utils/rocketpool.py @@ -1,18 +1,18 @@ -import eth_abi import logging import os from pathlib import Path from typing import Any +import eth_abi from bidict import bidict -from eth_typing import BlockIdentifier, ChecksumAddress from cachetools import FIFOCache +from eth_typing import BlockIdentifier, ChecksumAddress from web3.exceptions import ContractLogicError from utils import solidity from utils.cfg import cfg from utils.readable import decode_abi -from utils.shared_w3 import w3, w3_mainnet, w3_archive +from utils.shared_w3 import w3, w3_archive, w3_mainnet log = logging.getLogger("rocketpool") log.setLevel(cfg.log_level) diff --git a/rocketwatch/utils/sea_creatures.py b/rocketwatch/utils/sea_creatures.py index 25963ed9..ec170d82 100644 --- a/rocketwatch/utils/sea_creatures.py +++ b/rocketwatch/utils/sea_creatures.py @@ -1,4 +1,5 @@ import contextlib + from utils import solidity from utils.rocketpool import rp from utils.shared_w3 import w3 diff --git a/rocketwatch/utils/shared_w3.py b/rocketwatch/utils/shared_w3.py index 246bc0f6..7db2d9bd 100644 --- a/rocketwatch/utils/shared_w3.py +++ b/rocketwatch/utils/shared_w3.py @@ -1,8 +1,8 @@ import logging -from typing import Dict, Any +from typing import Any, Dict -from web3.beacon import AsyncBeacon from web3 import AsyncWeb3 +from web3.beacon import AsyncBeacon from web3.providers import AsyncHTTPProvider from utils.cfg import cfg diff --git a/rocketwatch/utils/views.py b/rocketwatch/utils/views.py index 97bf6b4d..b86ded91 100644 --- a/rocketwatch/utils/views.py +++ b/rocketwatch/utils/views.py @@ -1,6 +1,7 @@ from abc import abstractmethod -from discord import ui, ButtonStyle, Interaction +from discord import ButtonStyle, Interaction, ui + from utils.embeds import Embed From b4253d3f7c2f077ef9e73f6829f06b53afaad29f Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 7 Mar 2026 05:36:49 +0000 Subject: [PATCH 174/279] stricter linting --- pyproject.toml | 2 +- rocketwatch/config.toml.sample | 1 - rocketwatch/plugins/about/about.py | 15 ++++---- rocketwatch/plugins/apr/apr.py | 8 ++-- .../plugins/beacon_events/beacon_events.py | 11 +++--- .../plugins/chat_summary/chat_summary.py | 8 ++-- rocketwatch/plugins/collateral/collateral.py | 9 +++-- rocketwatch/plugins/cow_orders/cow_orders.py | 37 ++++++++----------- rocketwatch/plugins/dao/dao.py | 2 +- .../plugins/db_upkeep_task/db_upkeep_task.py | 12 +++--- rocketwatch/plugins/debug/debug.py | 26 ++++++------- .../plugins/detect_scam/detect_scam.py | 27 +++++++------- rocketwatch/plugins/event_core/event_core.py | 10 ++--- rocketwatch/plugins/events/events.py | 23 ++++++------ .../fee_distribution/fee_distribution.py | 2 +- rocketwatch/plugins/forum/forum.py | 4 +- rocketwatch/plugins/lottery/lottery.py | 2 +- rocketwatch/plugins/metrics/metrics.py | 2 +- .../minipool_distribution.py | 2 +- .../pinned_messages/pinned_messages.py | 2 +- rocketwatch/plugins/proposals/proposals.py | 14 +++---- rocketwatch/plugins/random/random.py | 9 ++--- rocketwatch/plugins/reloader/reloader.py | 4 +- rocketwatch/plugins/rewards/rewards.py | 9 ++--- rocketwatch/plugins/rocksolid/rocksolid.py | 5 +-- rocketwatch/plugins/rpips/rpips.py | 14 +++---- rocketwatch/plugins/rpl/rpl.py | 6 +-- rocketwatch/plugins/snapshot/snapshot.py | 23 ++++++------ .../plugins/support_utils/support_utils.py | 4 +- .../plugins/transactions/transactions.py | 4 +- rocketwatch/plugins/tvl/tvl.py | 3 +- .../validator_states/validator_states.py | 4 +- rocketwatch/plugins/wall/wall.py | 12 +++--- rocketwatch/rocketwatch.py | 3 +- rocketwatch/utils/cached_ens.py | 5 +-- rocketwatch/utils/dao.py | 2 +- rocketwatch/utils/embeds.py | 14 +++---- rocketwatch/utils/etherscan.py | 16 ++++---- rocketwatch/utils/event.py | 9 ++--- rocketwatch/utils/event_logs.py | 4 +- rocketwatch/utils/image.py | 12 +++--- rocketwatch/utils/liquidity.py | 18 ++++----- rocketwatch/utils/readable.py | 20 +++++----- rocketwatch/utils/retry.py | 7 ++-- rocketwatch/utils/rocketpool.py | 4 +- rocketwatch/utils/shared_w3.py | 6 +-- 46 files changed, 209 insertions(+), 227 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index e7d330a1..5550b2e3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -43,7 +43,7 @@ dependencies = [ target-version = "py312" [tool.ruff.lint] -select = ["E", "F", "I", "W"] +select = ["B", "E", "F", "I", "RUF", "SIM", "UP", "W"] ignore = ["E501", "E203", "E231"] [tool.ruff.lint.isort] diff --git a/rocketwatch/config.toml.sample b/rocketwatch/config.toml.sample index b26329b4..5980e780 100644 --- a/rocketwatch/config.toml.sample +++ b/rocketwatch/config.toml.sample @@ -2,7 +2,6 @@ log_level = "INFO" [discord] secret = "" -guilds = [] [discord.owner] user_id = -1 diff --git a/rocketwatch/plugins/about/about.py b/rocketwatch/plugins/about/about.py index 63eaf1b1..0e932f2c 100644 --- a/rocketwatch/plugins/about/about.py +++ b/rocketwatch/plugins/about/about.py @@ -38,15 +38,17 @@ async def about(self, interaction: Interaction): if api_key := cfg.other.secrets.wakatime: try: - async with aiohttp.ClientSession() as session: - async with session.get( + async with ( + aiohttp.ClientSession() as session, + session.get( "https://wakatime.com/api/v1/users/current/all_time_since_today", params={ "project": "rocketwatch", "api_key": api_key } - ) as resp: - code_time = (await resp.json())["data"]["text"] + ) as resp, + ): + code_time = (await resp.json())["data"]["text"] except Exception as err: await self.bot.report_error(err) @@ -84,9 +86,8 @@ async def about(self, interaction: Interaction): # show credits try: - async with aiohttp.ClientSession() as session: - async with session.get(f"https://api.github.com/repos/{repo_name}/contributors") as resp: - contributors_data = await resp.json() + async with aiohttp.ClientSession() as session, session.get(f"https://api.github.com/repos/{repo_name}/contributors") as resp: + contributors_data = await resp.json() contributors = [ f"[{c['login']}]({c['html_url']}) ({c['contributions']})" for c in contributors_data diff --git a/rocketwatch/plugins/apr/apr.py b/rocketwatch/plugins/apr/apr.py index 807b7112..db68255d 100644 --- a/rocketwatch/plugins/apr/apr.py +++ b/rocketwatch/plugins/apr/apr.py @@ -222,8 +222,8 @@ async def reth_apr(self, interaction: Interaction): old_formatter = plt.gca().xaxis.get_major_formatter() plt.gca().xaxis.set_major_formatter(DateFormatter("%b %d")) - ax2.yaxis.set_major_formatter(plt.FuncFormatter(lambda x, loc: "{:.1%}".format(x))) - ax1.yaxis.set_major_formatter(plt.FuncFormatter(lambda x, loc: "{:.1%}".format(x))) + ax2.yaxis.set_major_formatter(plt.FuncFormatter(lambda x, loc: f"{x:.1%}")) + ax1.yaxis.set_major_formatter(plt.FuncFormatter(lambda x, loc: f"{x:.1%}")) ax1.set_ylabel("Effectiveness") ax2.set_ylabel("APR") ax1.set_ylim(top=1) @@ -361,7 +361,7 @@ async def node_apr(self, interaction: Interaction): if i > 8: y_7d.append(to_apr(datapoints[i - 9], datapoints[i])) y_7d_virtual.append(to_apr(datapoints[i - 9], datapoints[i], effective=False)) - bare_apr = y_7d_virtual[-1] / Decimal((1 - node_fee)) + bare_apr = y_7d_virtual[-1] / Decimal(1 - node_fee) y_7d_solo.append(bare_apr) peth_share_leb8 = 0.75 y_7d_node_operators_leb8_14.append(bare_apr * Decimal(1 + (0.14 * peth_share_leb8 / (1 - peth_share_leb8)))) @@ -405,7 +405,7 @@ async def node_apr(self, interaction: Interaction): old_formatter = plt.gca().xaxis.get_major_formatter() plt.gca().xaxis.set_major_formatter(DateFormatter("%m.%d")) - ax1.yaxis.set_major_formatter(plt.FuncFormatter(lambda x, loc: "{:.1%}".format(x))) + ax1.yaxis.set_major_formatter(plt.FuncFormatter(lambda x, loc: f"{x:.1%}")) ax1.legend(loc="lower left") img = BytesIO() diff --git a/rocketwatch/plugins/beacon_events/beacon_events.py b/rocketwatch/plugins/beacon_events/beacon_events.py index 6a13e799..054d1ad3 100644 --- a/rocketwatch/plugins/beacon_events/beacon_events.py +++ b/rocketwatch/plugins/beacon_events/beacon_events.py @@ -1,5 +1,5 @@ import logging -from typing import Optional, cast +from typing import cast import aiohttp import eth_utils @@ -120,7 +120,7 @@ async def _get_slashings(self, beacon_block: dict) -> list[Event]: return events @retry_async(tries=5, delay=10, backoff=2, max_delay=30) - async def _get_proposal(self, beacon_block: dict) -> Optional[Event]: + async def _get_proposal(self, beacon_block: dict) -> Event | None: if not (payload := beacon_block["body"].get("execution_payload")): # no proposed block return None @@ -143,9 +143,8 @@ async def _get_proposal(self, beacon_block: dict) -> Optional[Event]: # fetch from beaconcha.in because beacon node is unaware of MEV bribes endpoint = f"https://beaconcha.in/api/v1/execution/block/{block_number}" - async with aiohttp.ClientSession() as session: - async with session.get(endpoint, headers={"apikey": api_key}) as resp: - response_body = await resp.json() + async with aiohttp.ClientSession() as session, session.get(endpoint, headers={"apikey": api_key}) as resp: + response_body = await resp.json() log.debug(f"{response_body = }") proposal_data = response_body["data"][0] @@ -191,7 +190,7 @@ async def _get_proposal(self, beacon_block: dict) -> Optional[Event]: block_number=block_number ) - async def _check_finality(self, beacon_block: dict) -> Optional[Event]: + async def _check_finality(self, beacon_block: dict) -> Event | None: slot_number = int(beacon_block["slot"]) epoch_number = slot_number // 32 timestamp = beacon_block_to_date(slot_number) diff --git a/rocketwatch/plugins/chat_summary/chat_summary.py b/rocketwatch/plugins/chat_summary/chat_summary.py index 0c208533..70ffb752 100644 --- a/rocketwatch/plugins/chat_summary/chat_summary.py +++ b/rocketwatch/plugins/chat_summary/chat_summary.py @@ -1,6 +1,6 @@ import logging import re -from datetime import datetime, timedelta, timezone +from datetime import UTC, datetime, timedelta from io import BytesIO import anthropic @@ -62,7 +62,7 @@ async def summarize_chat(self, interaction: Interaction): await interaction.response.defer(ephemeral=True) last_ts = await self.bot.db["last_summary"].find_one({"channel_id": interaction.channel.id}) # ratelimit - if last_ts and (datetime.now(timezone.utc) - last_ts["timestamp"].replace(tzinfo=pytz.utc)) < timedelta(hours=6): + if last_ts and (datetime.now(UTC) - last_ts["timestamp"].replace(tzinfo=pytz.utc)) < timedelta(hours=6): await interaction.followup.send("You can only summarize once every 6 hours.", ephemeral=True) return if interaction.channel.id not in [405163713063288832]: @@ -70,7 +70,7 @@ async def summarize_chat(self, interaction: Interaction): return msg = await interaction.channel.send("Summarizing chat…") last_ts = last_ts["timestamp"].replace( - tzinfo=pytz.utc) if last_ts and "timestamp" in last_ts else datetime.now(timezone.utc) - timedelta(days=365) + tzinfo=pytz.utc) if last_ts and "timestamp" in last_ts else datetime.now(UTC) - timedelta(days=365) prompt = ( "Task Description:\n" "I need a summary of the entire chat log. This summary should be presented in the form of a bullet list.\n\n" @@ -137,7 +137,7 @@ async def summarize_chat(self, interaction: Interaction): # save the timestamp of the last summary await self.bot.db["last_summary"].update_one( {"channel_id": interaction.channel.id}, - {"$set": {"timestamp": datetime.now(timezone.utc)}}, + {"$set": {"timestamp": datetime.now(UTC)}}, upsert=True ) diff --git a/rocketwatch/plugins/collateral/collateral.py b/rocketwatch/plugins/collateral/collateral.py index 1b450a97..98e9a21a 100644 --- a/rocketwatch/plugins/collateral/collateral.py +++ b/rocketwatch/plugins/collateral/collateral.py @@ -1,6 +1,7 @@ +import functools import logging +import operator from io import BytesIO -from typing import Optional import inflect import matplotlib as mpl @@ -69,7 +70,7 @@ async def get_node_minipools_and_collateral() -> dict[ChecksumAddress, dict[str, } -async def get_average_collateral_percentage_per_node(collateral_cap: Optional[int], bonded: bool): +async def get_average_collateral_percentage_per_node(collateral_cap: int | None, bonded: bool): # get stakes for each node stakes = list((await get_node_minipools_and_collateral()).values()) # get the current rpl price @@ -114,7 +115,7 @@ def __init__(self, bot: RocketWatch): bonded="Calculate collateral as a percent of bonded eth instead of borrowed") async def node_tvl_vs_collateral(self, interaction: Interaction, - node_address: str = None, + node_address: str | None = None, bonded: bool = False): """ Show a scatter plot of collateral ratios for given node TVLs @@ -237,7 +238,7 @@ async def collateral_distribution(self, data = await get_average_collateral_percentage_per_node(collateral_cap, bonded) distribution = [(collateral, len(nodes)) for collateral, nodes in sorted(data.items(), key=lambda x: x[0])] - counts = sum(([collateral] * num_nodes for collateral, num_nodes in distribution), []) + counts = functools.reduce(operator.iadd, ([collateral] * num_nodes for collateral, num_nodes in distribution), []) # If the raw data were requested, print them and exit early if raw: diff --git a/rocketwatch/plugins/cow_orders/cow_orders.py b/rocketwatch/plugins/cow_orders/cow_orders.py index a0908ff4..b5e3efa8 100644 --- a/rocketwatch/plugins/cow_orders/cow_orders.py +++ b/rocketwatch/plugins/cow_orders/cow_orders.py @@ -1,5 +1,6 @@ +import contextlib import logging -from datetime import datetime, timedelta, timezone +from datetime import UTC, datetime, timedelta import aiohttp from discord import Interaction @@ -77,13 +78,12 @@ async def check_for_new_events(self): # get all pending orders from the cow api (https://api.cow.fi/mainnet/api/v1/auction) - async with aiohttp.ClientSession() as session: - async with session.get("https://api.cow.fi/mainnet/api/v1/auction") as response: - if response.status != 200: - text = await response.text() - log.error("Cow API returned non-200 status code: %s", text) - raise Exception("Cow API returned non-200 status code") - cow_orders = (await response.json())["orders"] + async with aiohttp.ClientSession() as session, session.get("https://api.cow.fi/mainnet/api/v1/auction") as response: + if response.status != 200: + text = await response.text() + log.error("Cow API returned non-200 status code: %s", text) + raise Exception("Cow API returned non-200 status code") + cow_orders = (await response.json())["orders"] """ entity example: @@ -165,10 +165,8 @@ async def check_for_new_events(self): # store rpl and other token amount data["ourAmount"] = solidity.to_float(int(order["sellAmount"])) s = await rp.assemble_contract(name="ERC20", address=w3.to_checksum_address(order["buyToken"])) - try: + with contextlib.suppress(Exception): decimals = await s.functions.decimals().call() - except Exception: - pass data["otherAmount"] = solidity.to_float(int(order["buyAmount"]), decimals) else: token = "reth" if order["buyToken"] == self.tokens[1] else "rpl" @@ -176,10 +174,8 @@ async def check_for_new_events(self): # store rpl and other token amount data["ourAmount"] = solidity.to_float(int(order["buyAmount"])) s = await rp.assemble_contract(name="ERC20", address=w3.to_checksum_address(order["sellToken"])) - try: + with contextlib.suppress(Exception): decimals = await s.functions.decimals().call() - except Exception: - pass data["otherAmount"] = solidity.to_float(int(order["sellAmount"]), decimals) # our/other ratio data["ratioAmount"] = data["otherAmount"] / data["ourAmount"] @@ -196,12 +192,11 @@ async def check_for_new_events(self): # request more data from the api try: - async with aiohttp.ClientSession() as session: - async with session.get(f"https://cow-proxy.invis.workers.dev/mainnet/api/v1/orders/{order['uid']}") as t: - if t.status != 200: - log.error(f"Failed to get more data from the cow api for order {order['uid']}: {await t.text()}") - continue - extra = await t.json() + async with aiohttp.ClientSession() as session, session.get(f"https://cow-proxy.invis.workers.dev/mainnet/api/v1/orders/{order['uid']}") as t: + if t.status != 200: + log.error(f"Failed to get more data from the cow api for order {order['uid']}: {await t.text()}") + continue + extra = await t.json() except Exception as e: log.error(f"Failed to get more data from the cow api for order {order['uid']}: {e}") continue @@ -211,7 +206,7 @@ async def check_for_new_events(self): log.info(f"Order {order['uid']} is invalidated, skipping") continue created = datetime.fromisoformat(extra["creationDate"].replace("Z", "+00:00")) - if datetime.now(timezone.utc) - created > timedelta(minutes=15): + if datetime.now(UTC) - created > timedelta(minutes=15): log.info(f"Order {order['uid']} is older than 15 minutes, skipping") continue data["timestamp"] = int(created.timestamp()) diff --git a/rocketwatch/plugins/dao/dao.py b/rocketwatch/plugins/dao/dao.py index eea912f3..30121be2 100644 --- a/rocketwatch/plugins/dao/dao.py +++ b/rocketwatch/plugins/dao/dao.py @@ -209,7 +209,7 @@ async def _get_recent_proposals(self, interaction: Interaction, current: str) -> titles: list[str] = await rp.multicall([ proposal_contract.functions.getMessage(proposal_id) for proposal_id in suggestions ]) - return [Choice(name=f"#{pid}: {title}", value=pid) for pid, title in zip(suggestions, titles)] + return [Choice(name=f"#{pid}: {title}", value=pid) for pid, title in zip(suggestions, titles, strict=False)] @command() @describe(proposal="proposal to show voters for") diff --git a/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py b/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py index 059c1a60..3b3457da 100644 --- a/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py +++ b/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py @@ -4,7 +4,7 @@ from collections import defaultdict from collections.abc import Callable from datetime import timedelta -from typing import Any, Optional +from typing import Any import pymongo from cronitor import Monitor @@ -164,7 +164,7 @@ async def _batch_multicall_update( query: dict[str, Any], call_fn: Callable[[dict[str, Any]], list[tuple]], projection: dict[str, Any], - label: Optional[str], + label: str | None, ) -> None: items = await collection.find(query, projection).to_list() if not items: @@ -189,7 +189,7 @@ async def _batch_multicall_update( calls = [(e[1], e[2]) for e in expanded] results = await rp.multicall(calls) updates = defaultdict(dict) - for e, value in zip(expanded, results): + for e, value in zip(expanded, results, strict=False): addr, transform, field = e[0], e[3], e[4] if transform is not None and value is not None: value = transform(value) @@ -214,7 +214,7 @@ async def add_untracked_node_operators(self): data = {} for index_batch in as_chunks(range(latest_db + 1, latest_rp + 1), self.batch_size): results = await rp.multicall([nm.functions.getNodeAt(i) for i in index_batch]) - data |= dict(zip(index_batch, results)) + data |= dict(zip(index_batch, results, strict=False)) await self.bot.db.node_operators.insert_many( [{"_id": i, "address": w3.to_checksum_address(a)} for i, a in data.items()] ) @@ -314,7 +314,7 @@ async def add_untracked_minipools(self): for index_batch in as_chunks(range(latest_db + 1, latest_rp + 1), self.batch_size): results = await rp.multicall([mm.functions.getMinipoolAt(i) for i in index_batch]) await self.bot.db.minipools.insert_many( - [{"_id": i, "address": w3.to_checksum_address(a)} for i, a in zip(index_batch, results)] + [{"_id": i, "address": w3.to_checksum_address(a)} for i, a in zip(index_batch, results, strict=False)] ) @timerun_async @@ -518,7 +518,7 @@ async def update_dynamic_megapool_validator_data(self): ] results = await rp.multicall(fns) ops = [] - for v, info_raw in zip(batch, results): + for v, info_raw in zip(batch, results, strict=False): info = _unpack_validator_info_dynamic(info_raw) if info_raw is not None else None if info is not None: ops.append(UpdateOne({"_id": v["_id"]}, {"$set": info})) diff --git a/rocketwatch/plugins/debug/debug.py b/rocketwatch/plugins/debug/debug.py index f4a320b1..31e6c59d 100644 --- a/rocketwatch/plugins/debug/debug.py +++ b/rocketwatch/plugins/debug/debug.py @@ -3,6 +3,7 @@ import logging import random import time +from datetime import UTC import aiohttp import humanize @@ -76,7 +77,7 @@ async def get_members_of_role(self, interaction: Interaction, guild_id: str, rol file = File(io.StringIO(content), "members.txt") await interaction.followup.send(file=file) except Exception as err: - await interaction.followup.send(content=f"```{repr(err)}```") + await interaction.followup.send(content=f"```{err!r}```") # list all roles of a guild with name and id @command() @@ -95,7 +96,7 @@ async def get_roles(self, interaction: Interaction, guild_id: str): file = File(io.StringIO(content), filename="roles.txt") await interaction.followup.send(file=file) except Exception as err: - await interaction.followup.send(content=f"```{repr(err)}```") + await interaction.followup.send(content=f"```{err!r}```") @command() @guilds(cfg.discord.owner.server_id) @@ -127,7 +128,7 @@ async def edit_embed(self, interaction: Interaction, message_url: str, new_descr @command() @guilds(cfg.discord.owner.server_id) @is_owner() - async def decode_tnx(self, interaction: Interaction, tnx_hash: str, contract_name: str = None): + async def decode_tnx(self, interaction: Interaction, tnx_hash: str, contract_name: str | None = None): """ Decode transaction calldata """ @@ -219,7 +220,7 @@ async def restore_support_template(self, interaction: Interaction, template_name template_description = "\n".join(template_embed.description.splitlines()[:-2]) import re - from datetime import datetime, timezone + from datetime import datetime edit_line = template_embed.description.splitlines()[-1] match = re.search(r"Last Edited by <@(?P[0-9]+)> [0-9]+):R>", edit_line) @@ -230,7 +231,7 @@ async def restore_support_template(self, interaction: Interaction, template_name await self.bot.db.support_bot_dumps.insert_one( { - "ts" : datetime.fromtimestamp(ts, tz=timezone.utc), + "ts" : datetime.fromtimestamp(ts, tz=UTC), "template": template_name, "prev" : None, "new" : { @@ -309,9 +310,8 @@ async def asian_restaurant_name(self, interaction: Interaction): Randomly generated Asian restaurant names """ await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) - async with aiohttp.ClientSession() as session: - async with session.get("https://www.dotomator.com/api/random_name.json?type=asian") as resp: - a = (await resp.json())["name"] + async with aiohttp.ClientSession() as session, session.get("https://www.dotomator.com/api/random_name.json?type=asian") as resp: + a = (await resp.json())["name"] await interaction.followup.send(a) @command() @@ -347,7 +347,7 @@ async def get_abi_of_contract(self, interaction: Interaction, contract: str): file = File(io.StringIO(abi), f"{contract}.{cfg.rocketpool.chain.lower()}.abi.json") await interaction.followup.send(file=file) except Exception as err: - await interaction.followup.send(content=f"```Exception: {repr(err)}```") + await interaction.followup.send(content=f"```Exception: {err!r}```") @command() async def get_address_of_contract(self, interaction: Interaction, contract: str): @@ -359,7 +359,7 @@ async def get_address_of_contract(self, interaction: Interaction, contract: str) address = await rp.uncached_get_address_by_name(contract) await interaction.followup.send(content=await el_explorer_url(address)) except Exception as err: - await interaction.followup.send(content=f"Exception: ```{repr(err)}```") + await interaction.followup.send(content=f"Exception: ```{err!r}```") if "No address found for" in repr(err): # private response as a tip m = ( @@ -382,7 +382,7 @@ async def call( function: str, json_args: str = "[]", block: str = "latest", - address: str = None, + address: str | None = None, raw_output: bool = False ): """Call Function of Contract""" @@ -396,7 +396,7 @@ async def call( args = [args] v = await rp.call(function, *args, block=block, address=w3.to_checksum_address(address) if address else None) except Exception as err: - await interaction.followup.send(content=f"Exception: ```{repr(err)}```") + await interaction.followup.send(content=f"Exception: ```{err!r}```") return try: g = await rp.estimate_gas_for_call(function, *args, block=block) @@ -413,7 +413,7 @@ async def call( text += "too long, attached as file`" await interaction.followup.send(text, file=File(io.StringIO(str(v)), "exception.txt")) else: - text += f"{str(v)}`" + text += f"{v!s}`" await interaction.followup.send(content=text) # --------- OTHERS --------- # diff --git a/rocketwatch/plugins/detect_scam/detect_scam.py b/rocketwatch/plugins/detect_scam/detect_scam.py index c6db8322..21add9d9 100644 --- a/rocketwatch/plugins/detect_scam/detect_scam.py +++ b/rocketwatch/plugins/detect_scam/detect_scam.py @@ -2,8 +2,7 @@ import contextlib import io import logging -from datetime import datetime, timedelta, timezone -from typing import Optional +from datetime import UTC, datetime, timedelta from urllib import parse import regex as re @@ -161,7 +160,7 @@ def _get_message_content(message: Message, *, preserve_formatting: bool = False) return text - async def _generate_message_report(self, message: Message, reason: str) -> Optional[tuple[Embed, Embed, File]]: + async def _generate_message_report(self, message: Message, reason: str) -> tuple[Embed, Embed, File] | None: try: message = await message.channel.fetch_message(message.id) if isinstance(message, DeletedReferencedMessage): @@ -210,7 +209,7 @@ async def _generate_message_report(self, message: Message, reason: str) -> Optio }) return warning, report, contents - async def _generate_thread_report(self, thread: Thread, reason: str) -> Optional[tuple[Embed, Embed]]: + async def _generate_thread_report(self, thread: Thread, reason: str) -> tuple[Embed, Embed] | None: try: thread = await thread.guild.fetch_channel(thread.id) except (errors.NotFound, errors.Forbidden): @@ -307,14 +306,14 @@ async def manual_message_report(self, interaction: Interaction, message: Message await self.bot.db.scam_reports.update_one({"message_id": message.id}, {"$set": {"warning_id": warning_msg.id}}) await interaction.followup.send(content="Thanks for reporting!") - def _markdown_link_trick(self, message: Message) -> Optional[str]: + def _markdown_link_trick(self, message: Message) -> str | None: txt = self._get_message_content(message) for m in self.markdown_link_pattern.findall(txt): if "." in m[0] and m[0] != m[1]: return "Markdown link with possible domain in visible portion that does not match the actual domain" return None - def _discord_invite(self, message: Message) -> Optional[str]: + def _discord_invite(self, message: Message) -> str | None: txt = self._get_message_content(message) if match := self.invite_pattern.search(txt): link = match.group(0) @@ -326,14 +325,14 @@ def _discord_invite(self, message: Message) -> Optional[str]: return "Invite to external server" return None - def _tap_on_this(self, message: Message) -> Optional[str]: + def _tap_on_this(self, message: Message) -> str | None: txt = self._get_message_content(message) keywords = ( [("tap on", "click on"), "proper"] ) return "Tap on deez nuts nerd" if self.__txt_contains(txt, keywords) else None - def _ticket_system(self, message: Message) -> Optional[str]: + def _ticket_system(self, message: Message) -> str | None: # message contains one of the relevant keyword combinations and a link txt = self._get_message_content(message) if not self.basic_url_pattern.search(txt): @@ -376,7 +375,7 @@ def __txt_contains(txt: str, kw: list | tuple | str) -> bool: return all(map(lambda w: DetectScam.__txt_contains(txt, w), kw)) return False - def _paperhands(self, message: Message) -> Optional[str]: + def _paperhands(self, message: Message) -> str | None: # message contains the word "paperhand" and a link txt = self._get_message_content(message) if "http" not in txt: @@ -392,13 +391,13 @@ def _paperhands(self, message: Message) -> Optional[str]: return None # contains @here or @everyone but doesn't actually have the permission to do so - def _mention_everyone(self, message: Message) -> Optional[str]: + def _mention_everyone(self, message: Message) -> str | None: txt = self._get_message_content(message) if ("@here" in txt or "@everyone" in txt) and not message.author.guild_permissions.mention_everyone: return "Mentioned @here or @everyone without permission" return None - async def _reaction_spam(self, reaction: Reaction, user: User) -> Optional[str]: + async def _reaction_spam(self, reaction: Reaction, user: User) -> str | None: # user reacts to their own message multiple times in quick succession to draw attention # check if user is a bot if user.bot: @@ -411,7 +410,7 @@ async def _reaction_spam(self, reaction: Reaction, user: User) -> Optional[str]: return None # check if the message is new enough (we ignore any reactions on messages older than 5 minutes) - if (reaction.message.created_at - datetime.now(timezone.utc)) > timedelta(minutes=5): + if (reaction.message.created_at - datetime.now(UTC)) > timedelta(minutes=5): log.debug(f"Ignoring reaction on old message {reaction.message.id}") return None @@ -556,7 +555,7 @@ async def on_thread_create(self, thread: Thread) -> None: return keywords = ("support", "tick", "assistance", "error", "🎫", "🎟️") - if any(kw in thread.name.lower() for kw in keywords) or re.search(r"(-|–|—)\d{3,}", thread.name): + if any(kw in thread.name.lower() for kw in keywords) or re.search(r"(-|–|—)\d{3,}", thread.name): # noqa: RUF001 await self.report_thread(thread, "Illegitimate support thread") return names = (".", "!", "///") @@ -609,7 +608,7 @@ async def manual_user_report(self, interaction: Interaction, user: Member) -> No ) await interaction.followup.send(content="Thanks for reporting!") - async def _generate_user_report(self, user: Member, reason: str) -> Optional[Embed]: + async def _generate_user_report(self, user: Member, reason: str) -> Embed | None: if not isinstance(user, Member): return None diff --git a/rocketwatch/plugins/event_core/event_core.py b/rocketwatch/plugins/event_core/event_core.py index 1fad9507..037e6382 100644 --- a/rocketwatch/plugins/event_core/event_core.py +++ b/rocketwatch/plugins/event_core/event_core.py @@ -4,7 +4,7 @@ import time from datetime import datetime, timedelta from enum import Enum -from typing import Any, Optional +from typing import Any import discord import pymongo @@ -178,7 +178,7 @@ async def process_event_queue(self) -> None: log.debug("No pending events in queue") return - def try_load(_entry: dict, _key: str) -> Optional[Any]: + def try_load(_entry: dict, _key: str) -> Any | None: try: serialized = _entry.get(_key) return pickle.loads(serialized) if serialized else None @@ -200,7 +200,7 @@ def try_load(_entry: dict, _key: str) -> Optional[Any]: await self.bot.db.state_messages.delete_one({"channel_id": channel_id}) for event_entry in db_events: - embed: Optional[Embed] = try_load(event_entry, "embed") + embed: Embed | None = try_load(event_entry, "embed") files = [] if embed and (image := try_load(event_entry, "image")): @@ -268,8 +268,8 @@ async def show_service_interrupt(self) -> None: async def _replace_or_add_status( self, target_channel: str, - embed: Optional[Embed], - prev_status: Optional[dict] + embed: Embed | None, + prev_status: dict | None ) -> None: target_channel_id = self.channels.get(target_channel) or self.channels["default"] diff --git a/rocketwatch/plugins/events/events.py b/rocketwatch/plugins/events/events.py index a0e19ada..f3985639 100644 --- a/rocketwatch/plugins/events/events.py +++ b/rocketwatch/plugins/events/events.py @@ -2,8 +2,8 @@ import json import logging import warnings -from collections.abc import Coroutine -from typing import Callable, Literal, Optional +from collections.abc import Callable, Coroutine +from typing import Literal from discord import Interaction from discord.app_commands import command, guilds @@ -216,7 +216,7 @@ async def get_past_events(self, from_block: BlockNumber, to_block: BlockNumber) self._partial_filters, self.event_map, self.topic_map = old_config raise err - async def process_events(self, events: list[LogReceipt | EventData]) -> tuple[list[Event], Optional[BlockNumber]]: + async def process_events(self, events: list[LogReceipt | EventData]) -> tuple[list[Event], BlockNumber | None]: events.sort(key=lambda e: (e.blockNumber, e.logIndex)) messages = [] upgrade_block = None @@ -233,10 +233,10 @@ async def process_events(self, events: list[LogReceipt | EventData]) -> tuple[li args_hash = hashlib.md5() - def hash_args(_args: aDict) -> None: + def hash_args(_args: aDict, _hash=args_hash) -> None: for k, v in sorted(_args.items()): if not ("time" in k.lower() or "block" in k.lower()): - args_hash.update(f"{k}:{v}".encode()) + _hash.update(f"{k}:{v}".encode()) event_name, embed = None, None if (n := rp.get_name_by_address(event.address)) and "topics" in event: @@ -346,10 +346,10 @@ async def get_event_name(_event: LogReceipt | EventData) -> tuple[str, str]: tx_aggregates[full_event_name] = amount + _event["args"]["amountOfStETH"] elif full_event_name == "rocketTokenRETH.Transfer": conflicting_events = ["rocketTokenRETH.TokensBurned", "rocketDepositPool.DepositReceived"] - if any((event in events_by_name for event in conflicting_events)): + if any(event in events_by_name for event in conflicting_events): events.remove(event) continue - if prev_event := tx_aggregates.get(full_event_name, None): + if prev_event := tx_aggregates.get(full_event_name): # only keep largest rETH transfer contract = await rp.get_contract_by_address(event["address"]) _event = aDict(contract.events[event_name]().process_log(event)) @@ -403,7 +403,7 @@ async def get_event_name(_event: LogReceipt | EventData) -> tuple[str, str]: return events - async def handle_global_event(self, event_name: str, event: aDict) -> Optional[Embed]: + async def handle_global_event(self, event_name: str, event: aDict) -> Embed | None: receipt = await w3.eth.get_transaction_receipt(event.transactionHash) is_minipool_event = await rp.is_minipool(event.address) or await rp.is_minipool(receipt.to) @@ -459,7 +459,7 @@ async def handle_global_event(self, event_name: str, event: aDict) -> Optional[E return await self.handle_event(event_name, event) - async def handle_event(self, event_name: str, event: aDict) -> Optional[Embed]: + async def handle_event(self, event_name: str, event: aDict) -> Embed | None: args = aDict(event.args) if "negative_rETH_ratio_update_event" in event_name: @@ -566,7 +566,7 @@ def share_repr(percentage: float) -> str: args.newRatio = 100 * solidity.to_float(args.newRatio) if "submission" in args: - args.submission = aDict(dict(zip(SUBMISSION_KEYS, args.submission))) + args.submission = aDict(dict(zip(SUBMISSION_KEYS, args.submission, strict=False))) if "otc_swap" in event_name: # signer = seller @@ -802,8 +802,7 @@ def share_repr(percentage: float) -> str: for withdraw_event in processed_logs: # event.logindex 44, withdraw_event.logindex 50, rough distance like that # reminder order is different than the previous example - if event.logIndex - 7 < withdraw_event.logIndex < event.logIndex: - if withdraw_event.args["by"] == deposit_contract: + if event.logIndex - 7 < withdraw_event.logIndex < event.logIndex and withdraw_event.args["by"] == deposit_contract: args.balanceAmount = withdraw_event.args["amount"] args.creditAmount -= args.balanceAmount break diff --git a/rocketwatch/plugins/fee_distribution/fee_distribution.py b/rocketwatch/plugins/fee_distribution/fee_distribution.py index 903d7e62..c121f6a4 100644 --- a/rocketwatch/plugins/fee_distribution/fee_distribution.py +++ b/rocketwatch/plugins/fee_distribution/fee_distribution.py @@ -73,7 +73,7 @@ async def fee_distribution(self, interaction: Interaction, mode: Literal["tree", labels[i] = "" ax.set_title(f"{bond} ETH") - ax.pie(sizes, labels=labels, autopct=lambda p: f"{p * total / 100:.0f}" if (p >= 5) else "") + ax.pie(sizes, labels=labels, autopct=lambda p, _total=total: f"{p * _total / 100:.0f}" if (p >= 5) else "") if mode == "tree": e.description = f"```\n{render_tree_legacy(tree, 'Minipools')}\n```" diff --git a/rocketwatch/plugins/forum/forum.py b/rocketwatch/plugins/forum/forum.py index 470b584b..19514007 100644 --- a/rocketwatch/plugins/forum/forum.py +++ b/rocketwatch/plugins/forum/forum.py @@ -1,7 +1,7 @@ import logging from dataclasses import dataclass from datetime import datetime -from typing import Literal, Optional, cast +from typing import Literal, cast import aiohttp from discord import Interaction @@ -46,7 +46,7 @@ def __str__(self) -> str: class User: id: int username: str - name: Optional[str] + name: str | None topic_count: int post_count: int likes_received: int diff --git a/rocketwatch/plugins/lottery/lottery.py b/rocketwatch/plugins/lottery/lottery.py index 468aad05..d4cc1aa5 100644 --- a/rocketwatch/plugins/lottery/lottery.py +++ b/rocketwatch/plugins/lottery/lottery.py @@ -58,7 +58,7 @@ async def load_sync_committee(self, period): InsertOne({"index": i, "validator": int(validator)}) for i, validator in enumerate(validators) ] - async with self.bot.db.client.start_session() as session: + async with self.bot.db.client.start_session() as session: # noqa: SIM117 async with await session.start_transaction(): await col.delete_many({}) await col.bulk_write(payload) diff --git a/rocketwatch/plugins/metrics/metrics.py b/rocketwatch/plugins/metrics/metrics.py index f445fdc2..820cd52c 100644 --- a/rocketwatch/plugins/metrics/metrics.py +++ b/rocketwatch/plugins/metrics/metrics.py @@ -119,7 +119,7 @@ async def metrics_chart(self, interaction: Interaction): ])).to_list(None) # create a new figure - fig, (ax1, ax2) = plt.subplots(2, 1, figsize=(10, 10)) + _fig, (ax1, ax2) = plt.subplots(2, 1, figsize=(10, 10)) # plot the command usage as bars ax1.bar([f"{x['_id']['year']}-{x['_id']['month']:0>2}" for x in command_usage], [x['total'] for x in command_usage]) diff --git a/rocketwatch/plugins/minipool_distribution/minipool_distribution.py b/rocketwatch/plugins/minipool_distribution/minipool_distribution.py index 42da3b14..9268c21e 100644 --- a/rocketwatch/plugins/minipool_distribution/minipool_distribution.py +++ b/rocketwatch/plugins/minipool_distribution/minipool_distribution.py @@ -153,7 +153,7 @@ async def node_gini(self, interaction: Interaction, raw: bool = False): if raw: description = "" # count number of nodes in 5% intervals + significant thresholds - ticks = list(np.arange(0.05, 1, 0.05)) + [1 / 3, 2 / 3, 1.0] + ticks = [*list(np.arange(0.05, 1, 0.05)), 1 / 3, 2 / 3, 1.0] for threshold in sorted(ticks): index = y.searchsorted(threshold) num_nodes = x[index] diff --git a/rocketwatch/plugins/pinned_messages/pinned_messages.py b/rocketwatch/plugins/pinned_messages/pinned_messages.py index b1c2fe7d..49507bff 100644 --- a/rocketwatch/plugins/pinned_messages/pinned_messages.py +++ b/rocketwatch/plugins/pinned_messages/pinned_messages.py @@ -51,7 +51,7 @@ async def run_loop(self): # delete and resend message channel = self.bot.get_channel(message["channel_id"]) # check if we have message sent already and if its the latest message in the channel - if "message_id" in message and message["message_id"]: + if message.get("message_id"): messages = [message async for message in channel.history(limit=5)] # if it isnt within the last 5 messages, we need to resend it if any(m.id == message["message_id"] for m in messages): diff --git a/rocketwatch/plugins/proposals/proposals.py b/rocketwatch/plugins/proposals/proposals.py index 4f6e3c34..b5bf4383 100644 --- a/rocketwatch/plugins/proposals/proposals.py +++ b/rocketwatch/plugins/proposals/proposals.py @@ -347,7 +347,7 @@ async def version_chart(self, interaction: Interaction, days: int = 90): # use plt.stackplot to stack the data x = list(data.keys()) y = {v: [] for v in versions} - for date, value_ in data.items(): + for _date, value_ in data.items(): for version in versions: y[version].append(value_.get(version, 0)) @@ -407,11 +407,11 @@ async def plot_axes_with_data(self, attr: str, ax1, ax2, remove_allnodes: bool = unobserved_minipools -= data["remove_from_total"]["validator_count"] minipools.insert(0, ("No proposals yet", unobserved_minipools)) # move "Unknown" to be before "No proposals yet" - minipools.insert(1, minipools.pop([i for i, (x, y) in enumerate(minipools) if x == "Unknown"][0])) + minipools.insert(1, minipools.pop(next(i for i, (x, y) in enumerate(minipools) if x == "Unknown"))) # move "External (if it exists) to be before "Unknown" # minipools is a list of tuples (name, count) if "External" in [x for x, y in minipools]: - minipools.insert(2, minipools.pop([i for i, (x, y) in enumerate(minipools) if x == "External"][0])) + minipools.insert(2, minipools.pop(next(i for i, (x, y) in enumerate(minipools) if x == "External"))) # get node operators node_operators = [(x, y["count"]) for x, y in data.items() if x != "remove_from_total"] @@ -426,17 +426,17 @@ async def plot_axes_with_data(self, attr: str, ax1, ax2, remove_allnodes: bool = unobserved_node_operators -= data["remove_from_total"]["count"] node_operators.insert(0, ("No proposals yet", unobserved_node_operators)) # move "Unknown" to be before "No proposals yet" - node_operators.insert(1, node_operators.pop([i for i, (x, y) in enumerate(node_operators) if x == "Unknown"][0])) + node_operators.insert(1, node_operators.pop(next(i for i, (x, y) in enumerate(node_operators) if x == "Unknown"))) # move "External (if it exists) to be before "Unknown" # node_operators is a list of tuples (name, count) if "External" in [x for x, y in node_operators]: - node_operators.insert(2, node_operators.pop([i for i, (x, y) in enumerate(node_operators) if x == "External"][0])) + node_operators.insert(2, node_operators.pop(next(i for i, (x, y) in enumerate(node_operators) if x == "External"))) # sort data ax1.pie( [x[1] for x in minipools], colors=[COLORS.get(x[0], "red") for x in minipools], - autopct=lambda pct: ('%.1f%%' % pct) if pct > 5 else '', + autopct=lambda pct: (f'{pct:.1f}%') if pct > 5 else '', startangle=90, textprops={'fontsize': '12'}, ) @@ -454,7 +454,7 @@ async def plot_axes_with_data(self, attr: str, ax1, ax2, remove_allnodes: bool = ax2.pie( [x[1] for x in node_operators], colors=[COLORS.get(x[0], "red") for x in node_operators], - autopct=lambda pct: ('%.1f%%' % pct) if pct > 5 else '', + autopct=lambda pct: (f'{pct:.1f}%') if pct > 5 else '', startangle=90, textprops={'fontsize': '12'}, ) diff --git a/rocketwatch/plugins/random/random.py b/rocketwatch/plugins/random/random.py index ddeaeb0d..2785a3be 100644 --- a/rocketwatch/plugins/random/random.py +++ b/rocketwatch/plugins/random/random.py @@ -52,9 +52,8 @@ async def burn_reason(self, interaction: Interaction): await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) url = "https://ultrasound.money/api/fees/grouped-analysis-1" # get data from url using aiohttp - async with aiohttp.ClientSession() as session: - async with session.get(url) as resp: - data = await resp.json() + async with aiohttp.ClientSession() as session, session.get(url) as resp: + data = await resp.json() e = Embed() e.set_author(name="🔗 Data from ultrasound.money", url="https://ultrasound.money") @@ -125,7 +124,7 @@ async def dev_time(self, interaction: Interaction): await interaction.followup.send(embed=e) @command() - async def sea_creatures(self, interaction: Interaction, address: str = None): + async def sea_creatures(self, interaction: Interaction, address: str | None = None): """List all sea creatures with their required minimum holding.""" await interaction.response.defer(ephemeral=is_hidden(interaction)) e = Embed() @@ -143,7 +142,7 @@ async def sea_creatures(self, interaction: Interaction, address: str = None): e.description = f"No sea creature for {address}" else: # get the required holding from the dictionary - required_holding = [h for h, c in sea_creatures.items() if c == creature[0]][0] + required_holding = next(h for h, c in sea_creatures.items() if c == creature[0]) e.add_field(name="Visualization", value=await el_explorer_url(address, prefix=creature), inline=False) e.add_field(name="Required holding for emoji", value=f"{required_holding * len(creature)} ETH", inline=False) holding = await get_holding_for_address(address) diff --git a/rocketwatch/plugins/reloader/reloader.py b/rocketwatch/plugins/reloader/reloader.py index 43af48de..8605775a 100644 --- a/rocketwatch/plugins/reloader/reloader.py +++ b/rocketwatch/plugins/reloader/reloader.py @@ -19,11 +19,11 @@ def __init__(self, bot: RocketWatch): self.bot = bot async def _get_loaded_extensions(self, interaction: Interaction, current: str) -> list[Choice[str]]: - loaded = {ext.split(".")[-1] for ext in self.bot.extensions.keys()} + loaded = {ext.split(".")[-1] for ext in self.bot.extensions} return [Choice(name=plugin, value=plugin) for plugin in loaded if current.lower() in plugin.lower()][:25] async def _get_unloaded_extensions(self, interaction: Interaction, current: str) -> list[Choice[str]]: - loaded = {ext.split(".")[-1] for ext in self.bot.extensions.keys()} + loaded = {ext.split(".")[-1] for ext in self.bot.extensions} all = {path.stem for path in Path("plugins").glob('**/*.py')} return [Choice(name=plugin, value=plugin) for plugin in (all - loaded) if current.lower() in plugin.lower()][:25] diff --git a/rocketwatch/plugins/rewards/rewards.py b/rocketwatch/plugins/rewards/rewards.py index c73472f4..7409bde8 100644 --- a/rocketwatch/plugins/rewards/rewards.py +++ b/rocketwatch/plugins/rewards/rewards.py @@ -1,7 +1,6 @@ import logging from dataclasses import dataclass from io import BytesIO -from typing import Optional import aiohttp import matplotlib.pyplot as plt @@ -44,7 +43,7 @@ async def _make_request(self, address) -> dict: response = await session.get(f"https://sprocketpool.net/api/node/{address}") return await response.json() - async def get_estimated_rewards(self, interaction: Interaction, address: str) -> Optional[RewardEstimate]: + async def get_estimated_rewards(self, interaction: Interaction, address: str) -> RewardEstimate | None: if not await rp.call("rocketNodeManager.getNodeExists", address): await interaction.followup.send(f"{address} is not a registered node.") return None @@ -160,7 +159,7 @@ async def simulate_rewards( total_supply: int = await rp.call("rocketTokenRPL.totalSupply", block=reward_start_block) period_inflation: int = total_supply - for i in range(num_inflation_intervals): + for _i in range(num_inflation_intervals): period_inflation = solidity.to_int(period_inflation * inflation_rate) period_inflation -= total_supply @@ -190,7 +189,7 @@ def rewards_at(_stake: float, _borrowed_eth: float) -> float: cur_color, cur_label, cur_ls = "#eb8e55", "current", "solid" sim_color, sim_label, sim_ls = "darkred", "simulated", "dashed" - def draw_reward_curve(_color: str, _label: Optional[str], _line_style: str, _borrowed_eth: float) -> None: + def draw_reward_curve(_color: str, _label: str | None, _line_style: str, _borrowed_eth: float) -> None: step_size = max(1, (x_max - x_min) // 1000) x = np.arange(x_min, x_max, step_size, dtype=int) y = np.array([rewards_at(x, _borrowed_eth) for x in x]) @@ -242,7 +241,7 @@ def formatter(_x, _pos) -> str: ax.set_ylim((y_min, y_max)) handles, labels = ax.get_legend_handles_labels() - by_label = dict(zip(labels, handles)) + by_label = dict(zip(labels, handles, strict=False)) plt.legend(by_label.values(), by_label.keys(), loc="lower right") fig.tight_layout() diff --git a/rocketwatch/plugins/rocksolid/rocksolid.py b/rocketwatch/plugins/rocksolid/rocksolid.py index c0adbba9..101b2e4e 100644 --- a/rocketwatch/plugins/rocksolid/rocksolid.py +++ b/rocketwatch/plugins/rocksolid/rocksolid.py @@ -1,7 +1,6 @@ import logging from datetime import datetime, timedelta from io import BytesIO -from typing import Optional import matplotlib.pyplot as plt from discord import File, Interaction @@ -53,7 +52,7 @@ async def _fetch_asset_updates(self) -> list[tuple[int, float]]: updates.append((ts, assets)) db_operations.append(InsertOne({"time": ts, "assets": assets})) - async with self.bot.db.client.start_session() as session: + async with self.bot.db.client.start_session() as session: # noqa: SIM117 async with await session.start_transaction(): if db_operations: await self.bot.db.rocksolid.bulk_write(db_operations) @@ -82,7 +81,7 @@ async def get_eth_rate(block_number: int) -> int: current_eth_rate = await get_eth_rate(current_block) - async def get_apy(days: int) -> Optional[float]: + async def get_apy(days: int) -> float | None: reference_block = await ts_to_block(now - timedelta(days=days).total_seconds()) if reference_block < self.deployment_block: return None diff --git a/rocketwatch/plugins/rpips/rpips.py b/rocketwatch/plugins/rpips/rpips.py index 0f824d35..d4b08ffc 100644 --- a/rocketwatch/plugins/rpips/rpips.py +++ b/rocketwatch/plugins/rpips/rpips.py @@ -50,7 +50,7 @@ async def rpip(self, interaction: Interaction, name: str): await interaction.followup.send(embed=embed) class RPIP: - __slots__ = ("title", "number", "status") + __slots__ = ("number", "status", "title") def __init__(self, title: str, number: int, status: str): self.title = title @@ -63,9 +63,8 @@ def __str__(self) -> str: @cached(ttl=300, key_builder=lambda _, rpip: rpip.number) @retry_async(tries=3, delay=1) async def fetch_details(self) -> dict: - async with aiohttp.ClientSession() as session: - async with session.get(self.url) as resp: - html = await resp.text() + async with aiohttp.ClientSession() as session, session.get(self.url) as resp: + html = await resp.text() soup = BeautifulSoup(html, "html.parser") metadata = {} @@ -107,12 +106,11 @@ async def _get_rpip_names(self, interaction: Interaction, current: str) -> list[ @cached(ttl=60) @retry_async(tries=3, delay=1) async def get_all_rpips() -> list['RPIPs.RPIP']: - async with aiohttp.ClientSession() as session: - async with session.get("https://rpips.rocketpool.net/all") as resp: - html = await resp.text() + async with aiohttp.ClientSession() as session, session.get("https://rpips.rocketpool.net/all") as resp: + html = await resp.text() soup = BeautifulSoup(html, "html.parser") - rpips: list['RPIPs.RPIP'] = [] + rpips: list[RPIPs.RPIP] = [] for row in soup.table.find_all("tr", recursive=False): title = row.find("td", {"class": "title"}).text.strip() diff --git a/rocketwatch/plugins/rpl/rpl.py b/rocketwatch/plugins/rpl/rpl.py index 9daf79ea..0c9fa483 100644 --- a/rocketwatch/plugins/rpl/rpl.py +++ b/rocketwatch/plugins/rpl/rpl.py @@ -150,7 +150,7 @@ async def withdrawable_rpl(self, interaction: Interaction): current_withdrawable_rpl = liquid_rpl # break the tuples into lists to plot - x, y = zip(*list(free_rpl_liquidity.values())) + x, y = zip(*list(free_rpl_liquidity.values()), strict=False) embed = Embed() @@ -169,8 +169,8 @@ async def withdrawable_rpl(self, interaction: Interaction): ax = plt.gca() ax.set_ylabel("Withdrawable RPL") ax.set_xlabel("RPL / ETH ratio") - ax.yaxis.set_major_formatter(lambda x, _: "{:.1f}m".format(x / 1000000)) - ax.xaxis.set_major_formatter(lambda x, _: "{:.4f}".format(x)) + ax.yaxis.set_major_formatter(lambda x, _: f"{x / 1000000:.1f}m") + ax.xaxis.set_major_formatter(lambda x, _: f"{x:.4f}") img = BytesIO() plt.tight_layout() diff --git a/rocketwatch/plugins/snapshot/snapshot.py b/rocketwatch/plugins/snapshot/snapshot.py index 275277db..371d507b 100644 --- a/rocketwatch/plugins/snapshot/snapshot.py +++ b/rocketwatch/plugins/snapshot/snapshot.py @@ -37,12 +37,11 @@ def __init__(self, bot: RocketWatch): @staticmethod @retry_async(tries=3, delay=1) - async def _query_api(query: Query) -> list[dict] | Optional[dict]: + async def _query_api(query: Query) -> list[dict] | dict | None: query_json = {"query": Operation(type="query", queries=[query]).render()} log.debug(f"Snapshot query: {query_json}") - async with aiohttp.ClientSession() as session: - async with session.get("https://hub.snapshot.org/graphql", json=query_json) as resp: - response = await resp.json() + async with aiohttp.ClientSession() as session, session.get("https://hub.snapshot.org/graphql", json=query_json) as resp: + response = await resp.json() if "errors" in response: raise Exception(response["errors"]) return response["data"][query.name] @@ -162,7 +161,7 @@ def render_choice(_choice: str, _score: float, _x_offset: int, _y_offset: int) - proposal_height += self._TITLE_SIZE + self._V_SPACE_LARGE # order (choice, score) pairs by score - choice_scores = list(zip(self.choices, self.scores)) + choice_scores = list(zip(self.choices, self.scores, strict=False)) choice_scores.sort(key=lambda x: x[1], reverse=True) for choice, score in choice_scores: proposal_height += render_choice(choice, score, x_offset, y_offset + proposal_height) @@ -266,7 +265,7 @@ def create_reached_quorum_event(self, block_number: BlockNumber) -> Event: async def create_end_event(self) -> Event: max_for, max_against = 0, 0 - for choice, score in zip(self.choices, self.scores): + for choice, score in zip(self.choices, self.scores, strict=False): if "against" in choice.lower(): max_against = max(max_against, score) elif "abstain" not in choice.lower(): @@ -303,7 +302,7 @@ class Vote: choice: Choice reason: str - def pretty_print(self) -> Optional[str]: + def pretty_print(self) -> str | None: match (raw_choice := self.choice): case int(): return self._format_single_choice(raw_choice) @@ -350,7 +349,7 @@ def _format_weighted_choice(self, choice: WeightedChoice) -> str: ) return "```" + graph.get_string().replace("]", "%]") + "```" - async def create_event(self, prev_vote: Optional['Snapshot.Vote']) -> Optional[Event]: + async def create_event(self, prev_vote: Optional['Snapshot.Vote']) -> Event | None: node = await rp.call("rocketSignerRegistry.signerToNode", self.voter) signer = await el_explorer_url(self.voter) voter = signer if (node == ADDRESS_ZERO) else await el_explorer_url(node) @@ -414,13 +413,13 @@ async def create_event(self, prev_vote: Optional['Snapshot.Vote']) -> Optional[E ) @staticmethod - async def fetch_proposal(proposal_id: str) -> Optional[Proposal]: + async def fetch_proposal(proposal_id: str) -> Proposal | None: query = Query( name="proposal", arguments=[Argument(name="id", value=f"\"{proposal_id}\"")], fields=["id", "title", "choices", "start", "end", "scores", "quorum"] ) - response: Optional[dict] = await Snapshot._query_api(query) + response: dict | None = await Snapshot._query_api(query) return Snapshot.Proposal(**response) if response else None @staticmethod @@ -591,8 +590,8 @@ async def snapshot_votes(self, interaction: Interaction): return await interaction.followup.send(embed=embed) num_proposals = len(proposals) - num_cols = min(int(math.ceil(math.sqrt(num_proposals))), 4) - num_rows = int(math.ceil(num_proposals / num_cols)) + num_cols = min(math.ceil(math.sqrt(num_proposals)), 4) + num_rows = math.ceil(num_proposals / num_cols) v_spacing = 120 h_spacing = 80 diff --git a/rocketwatch/plugins/support_utils/support_utils.py b/rocketwatch/plugins/support_utils/support_utils.py index e3665898..6d20880a 100644 --- a/rocketwatch/plugins/support_utils/support_utils.py +++ b/rocketwatch/plugins/support_utils/support_utils.py @@ -1,6 +1,6 @@ import io import logging -from datetime import datetime, timezone +from datetime import UTC, datetime from bson import CodecOptions from discord import ButtonStyle, File, Interaction, TextStyle, User, app_commands, ui @@ -104,7 +104,7 @@ async def on_submit(self, interaction: Interaction) -> None: try: await self.db.support_bot_dumps.insert_one( { - "ts" : datetime.now(timezone.utc), + "ts" : datetime.now(UTC), "template": self.template_name, "prev" : template, "new" : { diff --git a/rocketwatch/plugins/transactions/transactions.py b/rocketwatch/plugins/transactions/transactions.py index 51eccf48..aa0b9c26 100644 --- a/rocketwatch/plugins/transactions/transactions.py +++ b/rocketwatch/plugins/transactions/transactions.py @@ -124,9 +124,9 @@ async def get_events_for_block(self, block_number: BlockIdentifier) -> list[Even if "to" in tnx: events.extend(await self.process_transaction(block, tnx, tnx.to, tnx.input)) else: - log.debug(( + log.debug( f"Skipping transaction {tnx.hash.hex()} as it has no `to` parameter. " - f"Possible contract creation.") + f"Possible contract creation." ) return events diff --git a/rocketwatch/plugins/tvl/tvl.py b/rocketwatch/plugins/tvl/tvl.py index be647f63..db96612e 100644 --- a/rocketwatch/plugins/tvl/tvl.py +++ b/rocketwatch/plugins/tvl/tvl.py @@ -226,8 +226,7 @@ async def tvl(self, interaction: Interaction, show_all: bool = False): "_val"] += contract_balance contract_balance = 0 # if there is still a refund balance, we try to pay it off using the beacon balance - if refund_balance > 0: - if beacon_balance > 0: + if refund_balance > 0 and beacon_balance > 0: if beacon_balance >= refund_balance: beacon_balance -= refund_balance data["Total ETH Locked"]["Minipools Stake"]["Staking Minipools"]["Node Share"][ diff --git a/rocketwatch/plugins/validator_states/validator_states.py b/rocketwatch/plugins/validator_states/validator_states.py index 05425115..c39303f3 100644 --- a/rocketwatch/plugins/validator_states/validator_states.py +++ b/rocketwatch/plugins/validator_states/validator_states.py @@ -100,9 +100,9 @@ def _classify_collection(docs, done_fn): def _collapse_tree(data: dict) -> dict: collapsed_data = {} - for status in data.keys(): + for status in data: if isinstance(data[status], dict) and len(data[status]) == 1: - sub_status = list(data[status].keys())[0] + sub_status = next(iter(data[status].keys())) collapsed_data[status] = data[status][sub_status] else: collapsed_data[status] = data[status] diff --git a/rocketwatch/plugins/wall/wall.py b/rocketwatch/plugins/wall/wall.py index 6912d923..e26fc394 100644 --- a/rocketwatch/plugins/wall/wall.py +++ b/rocketwatch/plugins/wall/wall.py @@ -2,7 +2,7 @@ import logging from collections import OrderedDict from io import BytesIO -from typing import Literal, Optional, cast +from typing import Literal, cast import aiohttp import numpy as np @@ -73,7 +73,7 @@ def __init__(self, bot: RocketWatch): Bitrue("RPL", ["USDT"]), CoinTR("RPL", ["USDT"]), } - self.dex: Optional[set[DEX]] = None + self.dex: set[DEX] | None = None async def _get_dex(self) -> set[DEX]: if self.dex is None: @@ -112,7 +112,7 @@ async def _get_cex_data(self, x: np.ndarray, rpl_usd: float) -> OrderedDict[CEX, liquidity: dict[CEX, float] = {} async with aiohttp.ClientSession() as session: requests = [cex.get_liquidity(session) for cex in self.cex] - for result in zip(self.cex, await asyncio.gather(*requests, return_exceptions=True)): + for result in zip(self.cex, await asyncio.gather(*requests, return_exceptions=True), strict=False): if not isinstance(result, Exception): cex, markets = result depth[cex], liquidity[cex] = self._get_market_depth_and_liquidity(markets, x, rpl_usd) @@ -178,7 +178,7 @@ def _plot_data( y_offset = 0.0 max_label_length: int = np.max([len(t[1]) for t in (cex_data_aggr + dex_data_aggr)]) - def add_data(_data: list[tuple[np.ndarray, str, str]], _name: Optional[str]) -> None: + def add_data(_data: list[tuple[np.ndarray, str, str]], _name: str | None) -> None: labels, handles = [], [] for y_values, label, color in _data: y.append(y_values) @@ -258,7 +258,7 @@ async def wall( self, interaction: Interaction, min_price: float = 0.0, - max_price: float = None, + max_price: float | None = None, sources: Literal["All", "CEX", "DEX"] = "All" ) -> None: """Show the current RPL market depth across exchanges""" @@ -273,7 +273,7 @@ async def on_fail() -> None: try: async with aiohttp.ClientSession() as session: # use Binance as USD price oracle - rpl_usd = list((await Binance("RPL", ["USDT"]).get_liquidity(session)).values())[0].price + rpl_usd = next(iter((await Binance("RPL", ["USDT"]).get_liquidity(session)).values())).price eth_usd = await rp.get_eth_usdc_price() rpl_eth = rpl_usd / eth_usd except Exception as e: diff --git a/rocketwatch/rocketwatch.py b/rocketwatch/rocketwatch.py index 4fd4d74f..a9dd4935 100644 --- a/rocketwatch/rocketwatch.py +++ b/rocketwatch/rocketwatch.py @@ -2,7 +2,6 @@ import logging import traceback from pathlib import Path -from typing import Optional from discord import ( File, @@ -123,7 +122,7 @@ async def get_or_fetch_channel(self, channel_id: int) -> GuildChannel | PrivateC async def get_or_fetch_user(self, user_id: int) -> User: return self.get_user(user_id) or await self.fetch_user(user_id) - async def report_error(self, exception: Exception, interaction: Optional[Interaction] = None, *args) -> None: + async def report_error(self, exception: Exception, interaction: Interaction | None = None, *args) -> None: err_description = f"`{repr(exception)[:150]}`" if args: diff --git a/rocketwatch/utils/cached_ens.py b/rocketwatch/utils/cached_ens.py index 8d024ddb..2c093faa 100644 --- a/rocketwatch/utils/cached_ens.py +++ b/rocketwatch/utils/cached_ens.py @@ -1,5 +1,4 @@ import logging -from typing import Optional from aiocache import cached from ens import AsyncENS @@ -17,11 +16,11 @@ def __init__(self): self.ens = AsyncENS.from_web3(w3_mainnet) @cached(key_builder=lambda _, _self, address: address) - async def get_name(self, address: ChecksumAddress) -> Optional[str]: + async def get_name(self, address: ChecksumAddress) -> str | None: log.debug(f"Retrieving ENS name for {address}") return await self.ens.name(address) @cached(key_builder=lambda _, _self, name: name) - async def resolve_name(self, name: str) -> Optional[ChecksumAddress]: + async def resolve_name(self, name: str) -> ChecksumAddress | None: log.debug(f"Resolving ENS name {name}") return await self.ens.address(name) diff --git a/rocketwatch/utils/dao.py b/rocketwatch/utils/dao.py index ef32107f..1dc57a34 100644 --- a/rocketwatch/utils/dao.py +++ b/rocketwatch/utils/dao.py @@ -129,7 +129,7 @@ async def get_proposal_ids_by_state(self) -> dict[ProposalState, list[int]]: ]) proposals = {state: [] for state in DefaultDAO.ProposalState} - for proposal_id, state in zip(relevant_proposals, proposal_states): + for proposal_id, state in zip(relevant_proposals, proposal_states, strict=False): proposals[state].append(proposal_id) return proposals diff --git a/rocketwatch/utils/embeds.py b/rocketwatch/utils/embeds.py index f7f51ded..89ad40c1 100644 --- a/rocketwatch/utils/embeds.py +++ b/rocketwatch/utils/embeds.py @@ -2,7 +2,8 @@ import datetime import logging import math -from typing import Callable, Literal, Optional +from collections.abc import Callable +from typing import Literal import aiohttp import discord @@ -84,9 +85,8 @@ async def resolve_ens(interaction, node_address): async def get_pdao_delegates() -> dict[str, str]: global _pdao_delegates try: - async with aiohttp.ClientSession() as session: - async with session.get("https://delegates.rocketpool.net/api/delegates") as resp: - _pdao_delegates = {d["nodeAddress"]: d["name"] for d in await resp.json()} + async with aiohttp.ClientSession() as session, session.get("https://delegates.rocketpool.net/api/delegates") as resp: + _pdao_delegates = {d["nodeAddress"]: d["name"] for d in await resp.json()} except Exception: log.warning("Failed to fetch pDAO delegates.") return _pdao_delegates @@ -96,7 +96,7 @@ async def el_explorer_url( target: str, name: str = "", prefix: str | Literal[-1] = "", - name_fmt: Optional[Callable[[str], str]] = None, + name_fmt: Callable[[str], str] | None = None, block="latest" ): if w3.is_address(target): @@ -490,13 +490,13 @@ async def assemble(args) -> Embed: e.add_field(name="Smoothing Pool Balance", value=f"||{args.smoothie_amount}|| ETH") - if "reason" in args and args["reason"]: + if args.get("reason"): e.add_field(name="Likely Revert Reason", value=f"`{args.reason}`", inline=False) # show timestamp - if "time" in args.keys(): + if "time" in args: times = [args["time"]] else: times = [value for key, value in args.items() if "time" in key.lower()] diff --git a/rocketwatch/utils/etherscan.py b/rocketwatch/utils/etherscan.py index fc0645da..55c6497f 100644 --- a/rocketwatch/utils/etherscan.py +++ b/rocketwatch/utils/etherscan.py @@ -26,23 +26,21 @@ async def get_recent_account_transactions(address, block_count=44800): "startblock": lowest_block, "endblock" : highest_block}) - if not resp.status == 200: + if resp.status != 200: log.debug( - f"Error querying etherscan, unexpected HTTP {str(resp.status)}") + f"Error querying etherscan, unexpected HTTP {resp.status!s}") return parsed = await resp.json() - if "message" not in parsed or not parsed["message"].lower() == "ok": - error = parsed["message"] if "message" in parsed else "" - r = parsed["result"] if "result" in parsed else "" + if "message" not in parsed or parsed["message"].lower() != "ok": + error = parsed.get("message", "") + r = parsed.get("result", "") log.debug(f"Error querying {resp.url} - {error} - {r}") return def valid_tx(tx): - if not tx["to"] == address.lower(): + if tx["to"] != address.lower(): return False - if not int(tx["isError"]) == 0: - return False - return True + return int(tx["isError"]) == 0 return {result["hash"]: result for result in parsed["result"] if valid_tx(result)} diff --git a/rocketwatch/utils/event.py b/rocketwatch/utils/event.py index d142c9a8..2b4c0b07 100644 --- a/rocketwatch/utils/event.py +++ b/rocketwatch/utils/event.py @@ -1,7 +1,6 @@ from abc import abstractmethod from dataclasses import dataclass from datetime import datetime, timedelta -from typing import Optional from discord.ext import commands from eth_typing import BlockNumber @@ -22,8 +21,8 @@ class Event: block_number: BlockNumber transaction_index: int = 999 event_index: int = 999 - image: Optional[Image] = None - thumbnail: Optional[Image] = None + image: Image | None = None + thumbnail: Image | None = None def get_score(self): return (10**9 * self.block_number) + (10**5 * self.transaction_index) + self.event_index @@ -34,8 +33,8 @@ def __init__(self, bot: RocketWatch, rate_limit=timedelta(seconds=5)): self.bot = bot self.rate_limit = rate_limit self.lookback_distance: int = cfg.events.lookback_distance - self.last_served_block: Optional[int] = None - self._pending_block: Optional[int] = None + self.last_served_block: int | None = None + self._pending_block: int | None = None self._last_run = datetime.now() - rate_limit async def _ensure_genesis_block(self): diff --git a/rocketwatch/utils/event_logs.py b/rocketwatch/utils/event_logs.py index 05ad4a76..9d6b9045 100644 --- a/rocketwatch/utils/event_logs.py +++ b/rocketwatch/utils/event_logs.py @@ -1,5 +1,5 @@ import logging -from typing import Any, Optional +from typing import Any from eth_typing import BlockNumber from web3.contract.contract import ContractEvent @@ -15,7 +15,7 @@ def get_logs( event: ContractEvent, from_block: BlockNumber, to_block: BlockNumber, - arg_filters: Optional[dict[str, Any]] = None + arg_filters: dict[str, Any] | None = None ) -> list[LogReceipt]: start_block = from_block end_block = to_block diff --git a/rocketwatch/utils/image.py b/rocketwatch/utils/image.py index 83d36b7d..eddc2282 100644 --- a/rocketwatch/utils/image.py +++ b/rocketwatch/utils/image.py @@ -1,8 +1,7 @@ import math -from enum import Enum +from enum import StrEnum from functools import cache from io import BytesIO -from typing import Optional from discord import File from PIL import Image as PillowImage @@ -23,11 +22,11 @@ def to_file(self, name: str) -> File: return File(buffer, name) -class Font(str, Enum): +class Font(StrEnum): INTER = "Inter" -class FontVariant(str, Enum): +class FontVariant(StrEnum): REGULAR = "Regular" BOLD = "Bold" @@ -72,8 +71,9 @@ def progress_bar( angle = 90 * (2 * math.acos(fill_perc) / math.pi) self.chord((x + width - 2 * radius, y, x + width, y + height), angle, 360 - angle, fill_color) + @staticmethod @cache - def _get_font(self, name: str, variant: FontVariant, size: float) -> ImageFont: + def _get_font(name: str, variant: FontVariant, size: float) -> ImageFont.FreeTypeFont: return ImageFont.truetype(f"fonts/{name}-{variant}.ttf", size) def dynamic_text( @@ -84,7 +84,7 @@ def dynamic_text( font_name: Font = Font.INTER, font_variant: FontVariant = FontVariant.REGULAR, color: Color = (255, 255, 255), - max_width: Optional[float] = None, + max_width: float | None = None, anchor: str = "lt" ) -> None: font = self._get_font(font_name, font_variant, font_size) diff --git a/rocketwatch/utils/liquidity.py b/rocketwatch/utils/liquidity.py index 93d363b9..524d9d35 100644 --- a/rocketwatch/utils/liquidity.py +++ b/rocketwatch/utils/liquidity.py @@ -2,8 +2,8 @@ import math from abc import ABC, abstractmethod from collections import OrderedDict +from collections.abc import Callable from dataclasses import dataclass -from typing import Callable, Optional import aiohttp import numpy as np @@ -87,7 +87,7 @@ async def _get_order_book( asks = OrderedDict(sorted(self._get_asks(data).items())) return bids, asks - async def _get_liquidity(self, market: Market, session: aiohttp.ClientSession) -> Optional[Liquidity]: + async def _get_liquidity(self, market: Market, session: aiohttp.ClientSession) -> Liquidity | None: bids, asks = await self._get_order_book(market, session) if not (bids and asks): log.warning("Empty order book") @@ -361,10 +361,10 @@ def _get_request_params(market: Market) -> dict[str, str | int]: return {"pair": f"{market.major}{market.minor}", "count": 500} def _get_bids(self, api_response: dict) -> dict[float, float]: - return {float(price): float(size) for price, size, _ in list(api_response["result"].values())[0]["bids"]} + return {float(price): float(size) for price, size, _ in next(iter(api_response["result"].values()))["bids"]} def _get_asks(self, api_response: dict) -> dict[float, float]: - return {float(price): float(size) for price, size, _ in list(api_response["result"].values())[0]["asks"]} + return {float(price): float(size) for price, size, _ in next(iter(api_response["result"].values()))["asks"]} class Kucoin(CEX): @@ -617,7 +617,7 @@ async def get_normalized_price(self) -> float: pass @abstractmethod - async def get_liquidity(self) -> Optional[Liquidity]: + async def get_liquidity(self) -> Liquidity | None: pass def __init__(self, pools: list[LiquidityPool]): @@ -654,7 +654,7 @@ async def get_price(self) -> float: async def get_normalized_price(self) -> float: return await self.get_price() * 10 ** (self.token_0.decimals - self.token_1.decimals) - async def get_liquidity(self) -> Optional[Liquidity]: + async def get_liquidity(self) -> Liquidity | None: balance_0, balance_1 = (await self.vault.functions.getPoolTokens(self.id).call())[1] if (balance_0 == 0) or (balance_1 == 0): log.warning("Empty token balances") @@ -728,7 +728,7 @@ def tick_to_word_and_bit(self, tick: int) -> tuple[int, int]: async def get_ticks_net_liquidity(self, ticks: list[int]) -> dict[int, int]: results = await rp.multicall([self.contract.functions.ticks(tick) for tick in ticks]) - return dict(zip(ticks, [r[1] for r in results])) + return dict(zip(ticks, [r[1] for r in results], strict=False)) async def get_initialized_ticks(self, current_tick: int) -> list[int]: ticks = [] @@ -739,7 +739,7 @@ async def get_initialized_ticks(self, current_tick: int) -> list[int]: self.contract.functions.tickBitmap(word) for word in word_range ]) - for word, tick_bitmap in zip(word_range, bitmaps): + for word, tick_bitmap in zip(word_range, bitmaps, strict=False): if not tick_bitmap: continue @@ -769,7 +769,7 @@ async def get_price(self) -> float: async def get_normalized_price(self) -> float: return await self.get_price() * 10 ** (self.token_0.decimals - self.token_1.decimals) - async def get_liquidity(self) -> Optional[Liquidity]: + async def get_liquidity(self) -> Liquidity | None: price = await self.get_price() initial_liquidity = await self.contract.functions.liquidity().call() diff --git a/rocketwatch/utils/readable.py b/rocketwatch/utils/readable.py index 3851e37d..c6290f02 100644 --- a/rocketwatch/utils/readable.py +++ b/rocketwatch/utils/readable.py @@ -27,18 +27,18 @@ def uptime(time, highres=False): days, time = time // units.days, time % units.days if days: - parts.append('%d day%s' % (days, 's' if days != 1 else '')) + parts.append(f'{days} day{"s" if days != 1 else ""}') hours, time = time // units.hours, time % units.hours if hours: - parts.append('%d hour%s' % (hours, 's' if hours != 1 else '')) + parts.append(f'{hours} hour{"s" if hours != 1 else ""}') minutes, time = time // units.minutes, time % units.minutes if minutes: - parts.append('%d minute%s' % (minutes, 's' if minutes != 1 else '')) + parts.append(f'{minutes} minute{"s" if minutes != 1 else ""}') if time or not parts: - parts.append('%.0f seconds' % time) + parts.append(f'{time:.0f} seconds') return " ".join(parts[:2] if not highres else parts) @@ -100,7 +100,7 @@ def render_branch(_data: dict[str, dict | int]) -> tuple[list, list, int]: max_right_len = max(len(v) for v in fmt_values) lines = [] - for s, v in zip(strings, fmt_values): + for s, v in zip(strings, fmt_values, strict=False): # right align all values lines.append(s.ljust(max_left_len) + v.rjust(max_right_len)) @@ -134,20 +134,20 @@ def render_branch(k, v, prefix, current_depth=0, max_depth=0, reverse=False, m_p def render_tree(data: dict, name: str, max_depth: int = 0) -> str: # remove empty states data = {k: v for k, v in data.items() if v} - lines, values, depths = map(list, zip(*list(reversed(render_branch(name, data, "", max_depth=max_depth, reverse=True))))) + lines, values, depths = map(list, zip(*list(reversed(render_branch(name, data, "", max_depth=max_depth, reverse=True))), strict=False)) max_right_len, max_left_len = [], [] # longest string offset per depth - max_left_len = max(max(len(s) for s, d in zip(lines, depths) if d == depth) for depth in set(depths)) + max_left_len = max(max(len(s) for s, d in zip(lines, depths, strict=False) if d == depth) for depth in set(depths)) # same for right - max_right_len = max(max(len(str(v)) for v, d in zip(values, depths) if d == depth) for depth in set(depths)) + max_right_len = max(max(len(str(v)) for v, d in zip(values, depths, strict=False) if d == depth) for depth in set(depths)) max_right_len += 2 COLORS = [Style.BRIGHT, Style.BRIGHT, Fore.RESET, Fore.BLACK, Fore.BLACK, Fore.BLACK] - for i, (v, d) in enumerate(zip(values, depths)): + for i, (v, d) in enumerate(zip(values, depths, strict=False)): _v = v _v = f"{COLORS[d]}{v}{Style.RESET_ALL}" lines[i] = f"{lines[i].ljust(max_left_len, ' ')}{' ' * (max_right_len - len(str(v)))}{_v}" # replace all spaces with non-breaking spaces - lines = [line.replace(" ", " ") for line in lines] + lines = [line.replace(" ", "\u00a0") for line in lines] return "\n".join(lines) diff --git a/rocketwatch/utils/retry.py b/rocketwatch/utils/retry.py index 441a4f66..03daf3ff 100644 --- a/rocketwatch/utils/retry.py +++ b/rocketwatch/utils/retry.py @@ -1,4 +1,5 @@ -from typing import Any, Callable +from collections.abc import Callable +from typing import Any from retry_async.api import EXCEPTIONS from retry_async.api import retry as __retry @@ -9,7 +10,7 @@ def retry( *, tries: int = -1, delay: float = 0, - max_delay: float = None, + max_delay: float | None = None, backoff: float = 1 ) -> Callable[..., Any]: return __retry(exceptions, is_async=False, tries=tries, delay=delay, max_delay=max_delay, backoff=backoff) @@ -20,7 +21,7 @@ def retry_async( *, tries: int = -1, delay: float = 0, - max_delay: float = None, + max_delay: float | None = None, backoff: float = 1 ) -> Callable[..., Any]: return __retry(exceptions, is_async=True, tries=tries, delay=delay, max_delay=max_delay, backoff=backoff) diff --git a/rocketwatch/utils/rocketpool.py b/rocketwatch/utils/rocketpool.py index eec028a8..6f85534c 100644 --- a/rocketwatch/utils/rocketpool.py +++ b/rocketwatch/utils/rocketpool.py @@ -109,7 +109,7 @@ def _normalize_calls(calls, default_require_success): async def multicall(self, calls, require_success=True) -> list: """Multicall accepting ContractFunction objects or (fn, require_success) tuples.""" fns, flags = self._normalize_calls(calls, require_success) - encoded = [(fn.address, af, fn._encode_transaction_data()) for fn, af in zip(fns, flags)] + encoded = [(fn.address, af, fn._encode_transaction_data()) for fn, af in zip(fns, flags, strict=False)] results = await self._multicall.functions.aggregate3(encoded).call() return [ RocketPool._decode_fn_output(fns[i], data) if success else None @@ -206,7 +206,7 @@ async def assemble_contract(self, name, address=None, historical=False, mainnet= abi_path = f"./contracts/{name}.abi.json" if os.path.exists(abi_path): - with open(abi_path, "r") as f: + with open(abi_path) as f: abi = f.read() else: abi = await self.get_abi_by_name(name) diff --git a/rocketwatch/utils/shared_w3.py b/rocketwatch/utils/shared_w3.py index 7db2d9bd..933c9be0 100644 --- a/rocketwatch/utils/shared_w3.py +++ b/rocketwatch/utils/shared_w3.py @@ -1,5 +1,5 @@ import logging -from typing import Any, Dict +from typing import Any from web3 import AsyncWeb3 from web3.beacon import AsyncBeacon @@ -22,13 +22,13 @@ class Bacon(AsyncBeacon): - async def get_validators_by_ids(self, state_id: str, ids: list[int]) -> Dict[str, Any]: + async def get_validators_by_ids(self, state_id: str, ids: list[int]) -> dict[str, Any]: id_str = ','.join(map(str, ids)) return await self._async_make_get_request( f"/eth/v1/beacon/states/{state_id}/validators?id={id_str}" ) - async def get_sync_committee(self, epoch: int) -> Dict[str, Any]: + async def get_sync_committee(self, epoch: int) -> dict[str, Any]: return await self._async_make_get_request( f"/eth/v1/beacon/states/head/sync_committees?epoch={epoch}" ) From 5bdcf20a77e5244a9eff41a697f67143af8573d8 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 7 Mar 2026 05:47:56 +0000 Subject: [PATCH 175/279] remove guilds field from config --- rocketwatch/utils/cfg.py | 1 - 1 file changed, 1 deletion(-) diff --git a/rocketwatch/utils/cfg.py b/rocketwatch/utils/cfg.py index bd4f4d52..3a85954b 100644 --- a/rocketwatch/utils/cfg.py +++ b/rocketwatch/utils/cfg.py @@ -11,7 +11,6 @@ class DiscordOwner(BaseModel): class DiscordConfig(BaseModel): secret: str owner: DiscordOwner - guilds: list[int] channels: dict[str, int] From 4ae03ac76e408016630b6b2197b2c9dbec4f984e Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 7 Mar 2026 10:41:56 +0000 Subject: [PATCH 176/279] add simple tests and new README --- .claude/settings.json | 8 + .github/workflows/test.yml | 17 + .gitignore | 2 - README.md | 168 +- pyproject.toml | 10 + rocketwatch/__main__.py | 7 +- rocketwatch/config.toml.sample | 5 - rocketwatch/plugins/about/about.py | 5 +- rocketwatch/plugins/activity/activity.py | 5 +- rocketwatch/plugins/apr/apr.py | 4 +- .../plugins/beacon_events/beacon_events.py | 5 +- .../plugins/chat_summary/chat_summary.py | 5 +- rocketwatch/plugins/collateral/collateral.py | 4 +- .../plugins/commissions/commissions.py | 4 +- rocketwatch/plugins/cow_orders/cow_orders.py | 4 +- rocketwatch/plugins/dao/dao.py | 4 +- .../plugins/db_upkeep_task/db_upkeep_task.py | 5 +- rocketwatch/plugins/debug/debug.py | 5 +- .../delegate_contracts/delegate_contracts.py | 4 +- .../plugins/deposit_pool/deposit_pool.py | 5 +- .../plugins/detect_scam/detect_scam.py | 5 +- rocketwatch/plugins/event_core/event_core.py | 5 +- rocketwatch/plugins/events/events.py | 5 +- .../fee_distribution/fee_distribution.py | 4 +- rocketwatch/plugins/forum/forum.py | 4 +- rocketwatch/plugins/governance/governance.py | 5 +- rocketwatch/plugins/lottery/lottery.py | 4 +- rocketwatch/plugins/metrics/metrics.py | 4 +- rocketwatch/plugins/milestones/milestones.py | 4 +- .../minipool_distribution.py | 4 +- .../pinned_messages/pinned_messages.py | 5 +- rocketwatch/plugins/proposals/proposals.py | 5 +- rocketwatch/plugins/queue/queue.py | 4 +- rocketwatch/plugins/random/random.py | 5 +- rocketwatch/plugins/releases/releases.py | 4 +- rocketwatch/plugins/reloader/reloader.py | 2 +- rocketwatch/plugins/rewards/rewards.py | 4 +- rocketwatch/plugins/rocksolid/rocksolid.py | 4 +- rocketwatch/plugins/rpips/rpips.py | 4 +- rocketwatch/plugins/rpl/rpl.py | 4 +- .../plugins/scam_warning/scam_warning.py | 5 +- rocketwatch/plugins/snapshot/snapshot.py | 4 +- .../plugins/support_utils/support_utils.py | 5 +- .../plugins/transactions/transactions.py | 5 +- rocketwatch/plugins/tvl/tvl.py | 4 +- .../user_distribute/user_distribute.py | 5 +- .../validator_states/validator_states.py | 4 +- rocketwatch/plugins/wall/wall.py | 4 +- rocketwatch/rocketwatch.py | 5 +- rocketwatch/utils/block_time.py | 4 +- rocketwatch/utils/cached_ens.py | 4 +- rocketwatch/utils/command_tree.py | 5 +- rocketwatch/utils/{cfg.py => config.py} | 21 +- rocketwatch/utils/dao.py | 4 +- rocketwatch/utils/embeds.py | 5 +- rocketwatch/utils/etherscan.py | 5 +- rocketwatch/utils/event.py | 2 +- rocketwatch/utils/event_logs.py | 5 +- rocketwatch/utils/liquidity.py | 4 +- rocketwatch/utils/readable.py | 2 +- rocketwatch/utils/rocketpool.py | 5 +- rocketwatch/utils/shared_w3.py | 30 +- rocketwatch/utils/time_debug.py | 5 +- tests/__init__.py | 0 tests/conftest.py | 18 + tests/test_cfg.py | 149 + tests/test_readable.py | 96 + tests/test_solidity.py | 107 + uv.lock | 2658 +++++++++++++++++ 69 files changed, 3308 insertions(+), 223 deletions(-) create mode 100644 .claude/settings.json create mode 100644 .github/workflows/test.yml rename rocketwatch/utils/{cfg.py => config.py} (80%) create mode 100644 tests/__init__.py create mode 100644 tests/conftest.py create mode 100644 tests/test_cfg.py create mode 100644 tests/test_readable.py create mode 100644 tests/test_solidity.py create mode 100644 uv.lock diff --git a/.claude/settings.json b/.claude/settings.json new file mode 100644 index 00000000..fadf8157 --- /dev/null +++ b/.claude/settings.json @@ -0,0 +1,8 @@ +{ + "permissions": { + "allow": [ + "Bash(python3 -m pip install pytest pytest-asyncio)", + "Bash(ruff check rocketwatch/ --fix)" + ] + } +} diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml new file mode 100644 index 00000000..899f41d5 --- /dev/null +++ b/.github/workflows/test.yml @@ -0,0 +1,17 @@ +name: Test + +on: + push: + branches: [ main ] + pull_request: + branches: [ main ] + +jobs: + test: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v6 + - uses: astral-sh/setup-uv@v6 + - run: uv python install 3.14 + - run: uv pip install --system -r pyproject.toml --extra test + - run: pytest diff --git a/.gitignore b/.gitignore index a153c178..9100057f 100644 --- a/.gitignore +++ b/.gitignore @@ -21,7 +21,6 @@ sdist/ var/ wheels/ *.egg-info/ -.installed.cfg *.egg MANIFEST @@ -118,7 +117,6 @@ dmypy.json # state state.db -*/main.cfg */config.toml mongodb/ diff --git a/README.md b/README.md index d67a0443..b6995c1d 100644 --- a/README.md +++ b/README.md @@ -1,35 +1,137 @@ # Rocket Watch -A Discord bot that tracks Rocket Pool Events - -[![wakatime](https://wakatime.com/badge/github/InvisibleSymbol/rocketwatch.svg)](https://wakatime.com/badge/github/InvisibleSymbol/rocketwatch) - -- Ability to track Proposals (Description/Vote Count read from Contract) -- Ability to track oDAO Member Activity (uses Nicknames of oDAO Members if available) -- Ability to track Deposit Poll Activity -- Ability to track Minipool Activity (Provides Link to Validator if feasible) -- Supports ENS Addresses -- Automatically retrieves Addresses from Storage Contract at start-up. (Easy support for Upgrades) -- Supports dual-channel setup to separate oDAO Events from the rest. -- Deduplication-Logic (prevents duplicated Messages caused by Chain-Reorgs). -- Easy Extendability (Almost no hard-coded Events, most are loaded from a `.json` File) - -## Donate: -[0xinvis.eth](https://etherscan.io/address/0xf0138d2e4037957d7b37de312a16a88a7f83a32a) +A Discord bot that monitors and reports on [Rocket Pool](https://rocketpool.net) protocol activity across the Ethereum execution and consensus layers. + +## Features + +- **On-chain event tracking** — monitors Rocket Pool smart contract events (deposits, minipools, rewards, governance votes, etc.) and posts formatted embeds to Discord +- **Beacon chain integration** — tracks validator proposals, sync committees, and consensus layer activity +- **Governance monitoring** — follows on-chain DAO votes (pDAO, oDAO, Security Council) and Snapshot proposals +- **Data visualization** — generates APR charts, collateral distributions, fee breakdowns, and TVL calculations using matplotlib +- **ENS resolution** — resolves and caches ENS names for readable address display +- **Multi-channel support** — split event tracking and status messages across multiple channels +- **Deduplication** — prevents duplicate messages caused by chain reorgs or bot restarts +- **Dynamic contract loading** — retrieves contract addresses from the Rocket Pool storage contract at startup, automatically supporting protocol upgrades +- **Plugin system** — 40+ plugins that can be individually enabled or disabled + +## Architecture + +``` +rocketwatch/ +├── __main__.py # Entry point +├── rocketwatch.py # Bot class, plugin loader, error handling +├── config.toml.sample # Configuration template +├── Dockerfile +├── plugins/ # 40+ plugin modules +│ ├── event_core/ # Main event tracking logic +│ ├── dao/ # On-chain governance +│ ├── snapshot/ # Off-chain governance +│ ├── apr/ # APR calculations & charts +│ ├── rewards/ # Reward estimation +│ ├── tvl/ # Total Value Locked +│ ├── proposals/ # Block proposals +│ └── ... +└── utils/ + ├── config.py # Pydantic config models + ├── rocketpool.py # Contract interface with caching + ├── shared_w3.py # Web3 client instances + ├── embeds.py # Discord embed formatting + ├── solidity.py # Unit conversions + ├── readable.py # Human-readable formatting + └── ... +``` + +## Prerequisites + +- Python 3.14+ +- MongoDB 8.x +- Ethereum execution and consensus layer RPC endpoints +- Discord bot token + +## Setup + +### Configuration + +Copy the sample config and fill in your values: + +```sh +cp rocketwatch/config.toml.sample rocketwatch/config.toml +``` + +Key configuration sections: + +| Section | Purpose | +|---|---| +| `discord` | Bot token, owner/server IDs, channel mappings | +| `execution_layer` | RPC endpoints (current, mainnet, archive) and Etherscan API key | +| `consensus_layer` | Beacon API endpoint and beaconcha.in API key | +| `mongodb` | Database connection URI | +| `rocketpool` | Chain, contract addresses, DAO multisigs, support settings | +| `modules` | Plugin include/exclude lists | +| `events` | Event tracking setup | + +### Docker (recommended) + +```sh +docker compose up -d +``` + +This starts the bot, MongoDB, and [Watchtower](https://containrrr.dev/watchtower/) for automatic updates. + +### Manual + +```sh +# Install uv (https://docs.astral.sh/uv/) +uv python install 3.14 +uv pip install --python 3.14 -r pyproject.toml +cd rocketwatch +uv run python . +``` + +## Development + +### Linting + +```sh +uv run ruff check rocketwatch/ +``` + +Configured rules: `B` (bugbear), `E` (pycodestyle), `F` (pyflakes), `I` (isort), `RUF`, `SIM`, `UP` (pyupgrade), `W` (warnings). + +### Testing + +```sh +uv pip install -r pyproject.toml --extra test +uv run pytest +``` + +### Plugin structure + +Each plugin lives in `rocketwatch/plugins//` and follows this pattern: + +```python +from discord.ext import commands + +class MyPlugin(commands.Cog): + def __init__(self, bot): + self.bot = bot + + # slash commands, event listeners, background tasks, etc. + +async def setup(bot): + await bot.add_cog(MyPlugin(bot)) +``` + +Plugins that track on-chain events extend `EventPlugin` from `utils/event.py`. Plugins can be selectively loaded via the `modules.include` / `modules.exclude` config fields. + +## CI/CD + +| Workflow | Trigger | Purpose | +|---|---|---| +| [Lint](.github/workflows/lint.yml) | Push & PR to main | Ruff linting | +| [Test](.github/workflows/test.yml) | Push & PR to main | pytest suite | +| [Docker CI](.github/workflows/docker-ci.yml) | Push to main | Build & push image to DockerHub | + +## License + +[GNU General Public License v3](LICENSE) diff --git a/pyproject.toml b/pyproject.toml index 5550b2e3..f27b2d50 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -39,6 +39,16 @@ dependencies = [ "web3>=7.0.0,<8.0.0", ] +[project.optional-dependencies] +test = [ + "pytest>=8.0", + "pytest-asyncio>=1.0", +] + +[tool.pytest.ini_options] +asyncio_mode = "auto" +testpaths = ["tests"] + [tool.ruff] target-version = "py312" diff --git a/rocketwatch/__main__.py b/rocketwatch/__main__.py index 529093a0..b1c1d9f6 100644 --- a/rocketwatch/__main__.py +++ b/rocketwatch/__main__.py @@ -3,14 +3,13 @@ from discord import Intents from rocketwatch import RocketWatch -from utils.cfg import cfg +from utils.config import cfg logging.basicConfig(format="%(levelname)5s %(asctime)s [%(name)s] %(filename)s:%(lineno)d|%(funcName)s(): %(message)s") logging.getLogger().setLevel("INFO") -logging.getLogger("discord.client").setLevel(cfg.log_level) +logging.getLogger("rocketwatch").setLevel(cfg.log_level) -log = logging.getLogger("discord_bot") -log.setLevel(cfg.log_level) +log = logging.getLogger("rocketwatch.main") def main() -> None: diff --git a/rocketwatch/config.toml.sample b/rocketwatch/config.toml.sample index 5980e780..d8a2800b 100644 --- a/rocketwatch/config.toml.sample +++ b/rocketwatch/config.toml.sample @@ -79,11 +79,6 @@ plugin = "DepositPool" cooldown = 60 fields = [] -[events.status_message.dao] -plugin = "Governance" -cooldown = 300 -fields = [] - [other] mev_hashes = [] diff --git a/rocketwatch/plugins/about/about.py b/rocketwatch/plugins/about/about.py index 0e932f2c..cb4bef59 100644 --- a/rocketwatch/plugins/about/about.py +++ b/rocketwatch/plugins/about/about.py @@ -12,15 +12,14 @@ from rocketwatch import RocketWatch from utils import readable -from utils.cfg import cfg +from utils.config import cfg from utils.embeds import Embed, el_explorer_url from utils.visibility import is_hidden_weak psutil.getloadavg() BOOT_TIME = time.time() -log = logging.getLogger("about") -log.setLevel(cfg.log_level) +log = logging.getLogger("rocketwatch.about") class About(commands.Cog): diff --git a/rocketwatch/plugins/activity/activity.py b/rocketwatch/plugins/activity/activity.py index 68eaf18c..3d0e7c38 100644 --- a/rocketwatch/plugins/activity/activity.py +++ b/rocketwatch/plugins/activity/activity.py @@ -5,10 +5,9 @@ from discord.ext import commands, tasks from rocketwatch import RocketWatch -from utils.cfg import cfg +from utils.config import cfg -log = logging.getLogger("rich_activity") -log.setLevel(cfg.log_level) +log = logging.getLogger("rocketwatch.rich_activity") class RichActivity(commands.Cog): diff --git a/rocketwatch/plugins/apr/apr.py b/rocketwatch/plugins/apr/apr.py index db68255d..aa6d230b 100644 --- a/rocketwatch/plugins/apr/apr.py +++ b/rocketwatch/plugins/apr/apr.py @@ -11,14 +11,12 @@ from rocketwatch import RocketWatch from utils import solidity -from utils.cfg import cfg from utils.embeds import Embed from utils.rocketpool import rp from utils.shared_w3 import w3, w3_archive from utils.visibility import is_hidden_weak -log = logging.getLogger("apr") -log.setLevel(cfg.log_level) +log = logging.getLogger("rocketwatch.apr") def to_apr(d1, d2, effective=True): diff --git a/rocketwatch/plugins/beacon_events/beacon_events.py b/rocketwatch/plugins/beacon_events/beacon_events.py index 054d1ad3..3b5f2ce8 100644 --- a/rocketwatch/plugins/beacon_events/beacon_events.py +++ b/rocketwatch/plugins/beacon_events/beacon_events.py @@ -9,7 +9,7 @@ from rocketwatch import RocketWatch from utils import solidity from utils.block_time import ts_to_block -from utils.cfg import cfg +from utils.config import cfg from utils.embeds import assemble, prepare_args from utils.event import Event, EventPlugin from utils.readable import cl_explorer_url @@ -18,8 +18,7 @@ from utils.shared_w3 import bacon, w3 from utils.solidity import beacon_block_to_date, date_to_beacon_block -log = logging.getLogger("beacon_events") -log.setLevel(cfg.log_level) +log = logging.getLogger("rocketwatch.beacon_events") class BeaconEvents(EventPlugin): diff --git a/rocketwatch/plugins/chat_summary/chat_summary.py b/rocketwatch/plugins/chat_summary/chat_summary.py index 70ffb752..b7659ad4 100644 --- a/rocketwatch/plugins/chat_summary/chat_summary.py +++ b/rocketwatch/plugins/chat_summary/chat_summary.py @@ -13,11 +13,10 @@ from discord.ext.commands import is_owner from rocketwatch import RocketWatch -from utils.cfg import cfg +from utils.config import cfg from utils.embeds import Embed -log = logging.getLogger("chat_summary") -log.setLevel(cfg.log_level) +log = logging.getLogger("rocketwatch.chat_summary") class ChatSummary(commands.Cog): diff --git a/rocketwatch/plugins/collateral/collateral.py b/rocketwatch/plugins/collateral/collateral.py index 98e9a21a..de4adf59 100644 --- a/rocketwatch/plugins/collateral/collateral.py +++ b/rocketwatch/plugins/collateral/collateral.py @@ -16,13 +16,11 @@ from rocketwatch import RocketWatch from utils import solidity -from utils.cfg import cfg from utils.embeds import Embed, resolve_ens from utils.rocketpool import rp from utils.visibility import is_hidden_weak -log = logging.getLogger("collateral") -log.setLevel(cfg.log_level) +log = logging.getLogger("rocketwatch.collateral") p = inflect.engine() diff --git a/rocketwatch/plugins/commissions/commissions.py b/rocketwatch/plugins/commissions/commissions.py index f5f5c053..198bb2a6 100644 --- a/rocketwatch/plugins/commissions/commissions.py +++ b/rocketwatch/plugins/commissions/commissions.py @@ -9,12 +9,10 @@ from matplotlib import pyplot as plt from rocketwatch import RocketWatch -from utils.cfg import cfg from utils.embeds import Embed from utils.visibility import is_hidden -log = logging.getLogger("commissions") -log.setLevel(cfg.log_level) +log = logging.getLogger("rocketwatch.commissions") class Commissions(commands.Cog): diff --git a/rocketwatch/plugins/cow_orders/cow_orders.py b/rocketwatch/plugins/cow_orders/cow_orders.py index b5e3efa8..7429daba 100644 --- a/rocketwatch/plugins/cow_orders/cow_orders.py +++ b/rocketwatch/plugins/cow_orders/cow_orders.py @@ -9,15 +9,13 @@ from rocketwatch import RocketWatch from utils import solidity -from utils.cfg import cfg from utils.embeds import Embed, assemble, prepare_args from utils.event import Event, EventPlugin from utils.rocketpool import rp from utils.shared_w3 import w3 from utils.visibility import is_hidden_weak -log = logging.getLogger("cow_orders") -log.setLevel(cfg.log_level) +log = logging.getLogger("rocketwatch.cow_orders") class CowOrders(EventPlugin): diff --git a/rocketwatch/plugins/dao/dao.py b/rocketwatch/plugins/dao/dao.py index 30121be2..7aaf01ad 100644 --- a/rocketwatch/plugins/dao/dao.py +++ b/rocketwatch/plugins/dao/dao.py @@ -12,7 +12,6 @@ from rocketwatch import RocketWatch from utils import solidity from utils.block_time import ts_to_block -from utils.cfg import cfg from utils.dao import DefaultDAO, OracleDAO, ProtocolDAO, SecurityCouncil from utils.embeds import Embed, el_explorer_url from utils.event_logs import get_logs @@ -20,8 +19,7 @@ from utils.views import PageView from utils.visibility import is_hidden_weak -log = logging.getLogger("dao") -log.setLevel(cfg.log_level) +log = logging.getLogger("rocketwatch.dao") class OnchainDAO(Cog): diff --git a/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py b/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py index 3b3457da..5490d050 100644 --- a/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py +++ b/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py @@ -16,14 +16,13 @@ from rocketwatch import RocketWatch from utils import solidity from utils.block_time import ts_to_block -from utils.cfg import cfg +from utils.config import cfg from utils.event_logs import get_logs from utils.rocketpool import rp from utils.shared_w3 import bacon, w3 from utils.time_debug import timerun, timerun_async -log = logging.getLogger("db_upkeep_task") -log.setLevel(cfg.log_level) +log = logging.getLogger("rocketwatch.db_upkeep_task") def is_true(v) -> bool: diff --git a/rocketwatch/plugins/debug/debug.py b/rocketwatch/plugins/debug/debug.py index 31e6c59d..3b174a3b 100644 --- a/rocketwatch/plugins/debug/debug.py +++ b/rocketwatch/plugins/debug/debug.py @@ -15,15 +15,14 @@ from rocketwatch import RocketWatch from utils import solidity from utils.block_time import block_to_ts, ts_to_block -from utils.cfg import cfg +from utils.config import cfg from utils.embeds import Embed, el_explorer_url from utils.readable import prettify_json_string from utils.rocketpool import rp from utils.shared_w3 import w3 from utils.visibility import is_hidden, is_hidden_role_controlled, is_hidden_weak -log = logging.getLogger("debug") -log.setLevel(cfg.log_level) +log = logging.getLogger("rocketwatch.debug") class Debug(Cog): diff --git a/rocketwatch/plugins/delegate_contracts/delegate_contracts.py b/rocketwatch/plugins/delegate_contracts/delegate_contracts.py index 8866e492..0c87baa7 100644 --- a/rocketwatch/plugins/delegate_contracts/delegate_contracts.py +++ b/rocketwatch/plugins/delegate_contracts/delegate_contracts.py @@ -6,14 +6,12 @@ from pymongo.asynchronous.collection import AsyncCollection from rocketwatch import RocketWatch -from utils.cfg import cfg from utils.embeds import Embed, el_explorer_url from utils.readable import s_hex from utils.rocketpool import rp from utils.shared_w3 import w3 -log = logging.getLogger("delegate_contracts") -log.setLevel(cfg.log_level) +log = logging.getLogger("rocketwatch.delegate_contracts") class DelegateContracts(commands.Cog): diff --git a/rocketwatch/plugins/deposit_pool/deposit_pool.py b/rocketwatch/plugins/deposit_pool/deposit_pool.py index 97fcfb11..a6a9bc74 100644 --- a/rocketwatch/plugins/deposit_pool/deposit_pool.py +++ b/rocketwatch/plugins/deposit_pool/deposit_pool.py @@ -6,14 +6,13 @@ from plugins.queue.queue import Queue from rocketwatch import RocketWatch from utils import solidity -from utils.cfg import cfg +from utils.config import cfg from utils.embeds import Embed from utils.rocketpool import rp from utils.status import StatusPlugin from utils.visibility import is_hidden_weak -log = logging.getLogger("deposit_pool") -log.setLevel(cfg.log_level) +log = logging.getLogger("rocketwatch.deposit_pool") class DepositPool(StatusPlugin): diff --git a/rocketwatch/plugins/detect_scam/detect_scam.py b/rocketwatch/plugins/detect_scam/detect_scam.py index 21add9d9..0b55acd8 100644 --- a/rocketwatch/plugins/detect_scam/detect_scam.py +++ b/rocketwatch/plugins/detect_scam/detect_scam.py @@ -32,11 +32,10 @@ from discord.ext.commands import Cog from rocketwatch import RocketWatch -from utils.cfg import cfg +from utils.config import cfg from utils.embeds import Embed -log = logging.getLogger("detect_scam") -log.setLevel(cfg.log_level) +log = logging.getLogger("rocketwatch.detect_scam") class DetectScam(Cog): diff --git a/rocketwatch/plugins/event_core/event_core.py b/rocketwatch/plugins/event_core/event_core.py index 037e6382..d5db9590 100644 --- a/rocketwatch/plugins/event_core/event_core.py +++ b/rocketwatch/plugins/event_core/event_core.py @@ -15,14 +15,13 @@ from plugins.support_utils.support_utils import generate_template_embed from rocketwatch import RocketWatch -from utils.cfg import cfg +from utils.config import cfg from utils.embeds import Embed, assemble from utils.event import EventPlugin from utils.shared_w3 import w3 from utils.status import StatusPlugin -log = logging.getLogger("event_core") -log.setLevel(cfg.log_level) +log = logging.getLogger("rocketwatch.event_core") class EventCore(commands.Cog): diff --git a/rocketwatch/plugins/events/events.py b/rocketwatch/plugins/events/events.py index f3985639..18a13ace 100644 --- a/rocketwatch/plugins/events/events.py +++ b/rocketwatch/plugins/events/events.py @@ -17,7 +17,7 @@ from rocketwatch import RocketWatch from utils import solidity from utils.block_time import block_to_ts -from utils.cfg import cfg +from utils.config import cfg from utils.dao import DefaultDAO, ProtocolDAO from utils.embeds import Embed, assemble, el_explorer_url, prepare_args from utils.event import Event, EventPlugin @@ -25,8 +25,7 @@ from utils.shared_w3 import bacon, w3 from utils.solidity import SUBMISSION_KEYS -log = logging.getLogger("events") -log.setLevel(cfg.log_level) +log = logging.getLogger("rocketwatch.events") PartialFilter = Callable[[BlockNumber, BlockNumber | Literal["latest"]], Coroutine[None, None, list[LogReceipt | EventData]]] diff --git a/rocketwatch/plugins/fee_distribution/fee_distribution.py b/rocketwatch/plugins/fee_distribution/fee_distribution.py index c121f6a4..44399e58 100644 --- a/rocketwatch/plugins/fee_distribution/fee_distribution.py +++ b/rocketwatch/plugins/fee_distribution/fee_distribution.py @@ -8,13 +8,11 @@ from matplotlib import pyplot as plt from rocketwatch import RocketWatch -from utils.cfg import cfg from utils.embeds import Embed from utils.readable import render_tree_legacy from utils.visibility import is_hidden_weak -log = logging.getLogger("fee_distribution") -log.setLevel(cfg.log_level) +log = logging.getLogger("rocketwatch.fee_distribution") class FeeDistribution(commands.Cog): diff --git a/rocketwatch/plugins/forum/forum.py b/rocketwatch/plugins/forum/forum.py index 19514007..d7a509c2 100644 --- a/rocketwatch/plugins/forum/forum.py +++ b/rocketwatch/plugins/forum/forum.py @@ -9,13 +9,11 @@ from discord.ext import commands from rocketwatch import RocketWatch -from utils.cfg import cfg from utils.embeds import Embed from utils.retry import retry_async from utils.visibility import is_hidden_weak -log = logging.getLogger("forum") -log.setLevel(cfg.log_level) +log = logging.getLogger("rocketwatch.forum") class Forum(commands.Cog): diff --git a/rocketwatch/plugins/governance/governance.py b/rocketwatch/plugins/governance/governance.py index 7a046f97..5c014deb 100644 --- a/rocketwatch/plugins/governance/governance.py +++ b/rocketwatch/plugins/governance/governance.py @@ -11,14 +11,13 @@ from plugins.rpips.rpips import RPIPs from plugins.snapshot.snapshot import Snapshot from utils.block_time import ts_to_block -from utils.cfg import cfg +from utils.config import cfg from utils.dao import DAO, DefaultDAO, OracleDAO, ProtocolDAO, SecurityCouncil from utils.embeds import Embed from utils.status import StatusPlugin from utils.visibility import is_hidden_weak -log = logging.getLogger("governance") -log.setLevel(cfg.log_level) +log = logging.getLogger("rocketwatch.governance") class Governance(StatusPlugin): diff --git a/rocketwatch/plugins/lottery/lottery.py b/rocketwatch/plugins/lottery/lottery.py index d4cc1aa5..39e9a60d 100644 --- a/rocketwatch/plugins/lottery/lottery.py +++ b/rocketwatch/plugins/lottery/lottery.py @@ -6,15 +6,13 @@ from pymongo import InsertOne from rocketwatch import RocketWatch -from utils.cfg import cfg from utils.embeds import Embed, el_explorer_url from utils.shared_w3 import bacon from utils.solidity import BEACON_EPOCH_LENGTH, BEACON_START_DATE from utils.time_debug import timerun_async from utils.visibility import is_hidden -log = logging.getLogger("lottery") -log.setLevel(cfg.log_level) +log = logging.getLogger("rocketwatch.lottery") class Lottery(commands.Cog): diff --git a/rocketwatch/plugins/metrics/metrics.py b/rocketwatch/plugins/metrics/metrics.py index 820cd52c..8315cdf5 100644 --- a/rocketwatch/plugins/metrics/metrics.py +++ b/rocketwatch/plugins/metrics/metrics.py @@ -9,12 +9,10 @@ from matplotlib import pyplot as plt from rocketwatch import RocketWatch -from utils.cfg import cfg from utils.embeds import Embed from utils.visibility import is_hidden -log = logging.getLogger("metrics") -log.setLevel(cfg.log_level) +log = logging.getLogger("rocketwatch.metrics") class Metrics(commands.Cog): diff --git a/rocketwatch/plugins/milestones/milestones.py b/rocketwatch/plugins/milestones/milestones.py index 021fc74e..ad273db2 100644 --- a/rocketwatch/plugins/milestones/milestones.py +++ b/rocketwatch/plugins/milestones/milestones.py @@ -5,13 +5,11 @@ from rocketwatch import RocketWatch from utils import solidity -from utils.cfg import cfg from utils.embeds import assemble from utils.event import Event, EventPlugin from utils.rocketpool import rp -log = logging.getLogger("milestones") -log.setLevel(cfg.log_level) +log = logging.getLogger("rocketwatch.milestones") class Milestones(EventPlugin): diff --git a/rocketwatch/plugins/minipool_distribution/minipool_distribution.py b/rocketwatch/plugins/minipool_distribution/minipool_distribution.py index 9268c21e..b269635f 100644 --- a/rocketwatch/plugins/minipool_distribution/minipool_distribution.py +++ b/rocketwatch/plugins/minipool_distribution/minipool_distribution.py @@ -10,12 +10,10 @@ from discord.ext import commands from rocketwatch import RocketWatch -from utils.cfg import cfg from utils.embeds import Embed from utils.visibility import is_hidden -log = logging.getLogger("minipool_distribution") -log.setLevel(cfg.log_level) +log = logging.getLogger("rocketwatch.minipool_distribution") p = inflect.engine() diff --git a/rocketwatch/plugins/pinned_messages/pinned_messages.py b/rocketwatch/plugins/pinned_messages/pinned_messages.py index 49507bff..3857c932 100644 --- a/rocketwatch/plugins/pinned_messages/pinned_messages.py +++ b/rocketwatch/plugins/pinned_messages/pinned_messages.py @@ -7,11 +7,10 @@ from discord.ext.commands import is_owner from rocketwatch import RocketWatch -from utils.cfg import cfg +from utils.config import cfg from utils.embeds import Embed -log = logging.getLogger("rich_activity") -log.setLevel(cfg.log_level) +log = logging.getLogger("rocketwatch.rich_activity") class PinnedMessages(commands.Cog): diff --git a/rocketwatch/plugins/proposals/proposals.py b/rocketwatch/plugins/proposals/proposals.py index b5bf4383..57d6c0b7 100644 --- a/rocketwatch/plugins/proposals/proposals.py +++ b/rocketwatch/plugins/proposals/proposals.py @@ -16,7 +16,7 @@ from pymongo import ASCENDING, DESCENDING from rocketwatch import RocketWatch -from utils.cfg import cfg +from utils.config import cfg from utils.embeds import Embed from utils.shared_w3 import bacon from utils.solidity import beacon_block_to_date, date_to_beacon_block @@ -24,8 +24,7 @@ from utils.visibility import is_hidden_weak cog_id = "proposals" -log = logging.getLogger(cog_id) -log.setLevel(cfg.log_level) +log = logging.getLogger(f"rocketwatch.{cog_id}") LOOKUP = { "consensus": { diff --git a/rocketwatch/plugins/queue/queue.py b/rocketwatch/plugins/queue/queue.py index 1e1b9fb3..cdea691d 100644 --- a/rocketwatch/plugins/queue/queue.py +++ b/rocketwatch/plugins/queue/queue.py @@ -8,15 +8,13 @@ from eth_typing import BlockIdentifier, ChecksumAddress from rocketwatch import RocketWatch -from utils.cfg import cfg from utils.embeds import el_explorer_url from utils.rocketpool import rp from utils.shared_w3 import w3 from utils.views import PageView from utils.visibility import is_hidden_weak -log = logging.getLogger("queue") -log.setLevel(cfg.log_level) +log = logging.getLogger("rocketwatch.queue") class Queue(Cog): diff --git a/rocketwatch/plugins/random/random.py b/rocketwatch/plugins/random/random.py index 2785a3be..6c449682 100644 --- a/rocketwatch/plugins/random/random.py +++ b/rocketwatch/plugins/random/random.py @@ -12,7 +12,7 @@ from rocketwatch import RocketWatch from utils import solidity -from utils.cfg import cfg +from utils.config import cfg from utils.embeds import Embed, el_explorer_url, ens from utils.readable import s_hex, uptime from utils.rocketpool import rp @@ -24,8 +24,7 @@ from utils.shared_w3 import bacon, w3 from utils.visibility import is_hidden, is_hidden_weak -log = logging.getLogger("random") -log.setLevel(cfg.log_level) +log = logging.getLogger("rocketwatch.random") class Random(commands.Cog): diff --git a/rocketwatch/plugins/releases/releases.py b/rocketwatch/plugins/releases/releases.py index 22b99ad4..8318a710 100644 --- a/rocketwatch/plugins/releases/releases.py +++ b/rocketwatch/plugins/releases/releases.py @@ -6,12 +6,10 @@ from discord.ext import commands from rocketwatch import RocketWatch -from utils.cfg import cfg from utils.embeds import Embed from utils.visibility import is_hidden -log = logging.getLogger("releases") -log.setLevel(cfg.log_level) +log = logging.getLogger("rocketwatch.releases") class Releases(commands.Cog): diff --git a/rocketwatch/plugins/reloader/reloader.py b/rocketwatch/plugins/reloader/reloader.py index 8605775a..5f102d91 100644 --- a/rocketwatch/plugins/reloader/reloader.py +++ b/rocketwatch/plugins/reloader/reloader.py @@ -11,7 +11,7 @@ ) from rocketwatch import RocketWatch -from utils.cfg import cfg +from utils.config import cfg class Reloader(Cog): diff --git a/rocketwatch/plugins/rewards/rewards.py b/rocketwatch/plugins/rewards/rewards.py index 7409bde8..8981f60c 100644 --- a/rocketwatch/plugins/rewards/rewards.py +++ b/rocketwatch/plugins/rewards/rewards.py @@ -12,13 +12,11 @@ from rocketwatch import RocketWatch from utils import solidity from utils.block_time import ts_to_block -from utils.cfg import cfg from utils.embeds import Embed, resolve_ens from utils.retry import retry_async from utils.rocketpool import rp -log = logging.getLogger("rewards") -log.setLevel(cfg.log_level) +log = logging.getLogger("rocketwatch.rewards") class Rewards(commands.Cog): diff --git a/rocketwatch/plugins/rocksolid/rocksolid.py b/rocketwatch/plugins/rocksolid/rocksolid.py index 101b2e4e..4266d382 100644 --- a/rocketwatch/plugins/rocksolid/rocksolid.py +++ b/rocketwatch/plugins/rocksolid/rocksolid.py @@ -12,7 +12,6 @@ from rocketwatch import RocketWatch from utils import solidity from utils.block_time import block_to_ts, ts_to_block -from utils.cfg import cfg from utils.embeds import Embed, el_explorer_url from utils.event_logs import get_logs from utils.rocketpool import rp @@ -20,8 +19,7 @@ from utils.visibility import is_hidden_weak cog_id = "rocksolid" -log = logging.getLogger(cog_id) -log.setLevel(cfg.log_level) +log = logging.getLogger(f"rocketwatch.{cog_id}") class RockSolid(Cog): diff --git a/rocketwatch/plugins/rpips/rpips.py b/rocketwatch/plugins/rpips/rpips.py index d4b08ffc..796df2de 100644 --- a/rocketwatch/plugins/rpips/rpips.py +++ b/rocketwatch/plugins/rpips/rpips.py @@ -8,12 +8,10 @@ from discord.ext.commands import Cog from rocketwatch import RocketWatch -from utils.cfg import cfg from utils.embeds import Embed from utils.retry import retry_async -log = logging.getLogger("rpips") -log.setLevel(cfg.log_level) +log = logging.getLogger("rocketwatch.rpips") class RPIPs(Cog): diff --git a/rocketwatch/plugins/rpl/rpl.py b/rocketwatch/plugins/rpl/rpl.py index 0c9fa483..5e49cca6 100644 --- a/rocketwatch/plugins/rpl/rpl.py +++ b/rocketwatch/plugins/rpl/rpl.py @@ -8,13 +8,11 @@ from rocketwatch import RocketWatch from utils import solidity -from utils.cfg import cfg from utils.embeds import Embed from utils.rocketpool import rp from utils.visibility import is_hidden_weak -log = logging.getLogger("rpl") -log.setLevel(cfg.log_level) +log = logging.getLogger("rocketwatch.rpl") class RPL(commands.Cog): diff --git a/rocketwatch/plugins/scam_warning/scam_warning.py b/rocketwatch/plugins/scam_warning/scam_warning.py index 38f0d65d..b659dde5 100644 --- a/rocketwatch/plugins/scam_warning/scam_warning.py +++ b/rocketwatch/plugins/scam_warning/scam_warning.py @@ -5,11 +5,10 @@ from discord.ext import commands from rocketwatch import RocketWatch -from utils.cfg import cfg +from utils.config import cfg from utils.embeds import Embed -log = logging.getLogger("scam_warning") -log.setLevel(cfg.log_level) +log = logging.getLogger("rocketwatch.scam_warning") class ScamWarning(commands.Cog): diff --git a/rocketwatch/plugins/snapshot/snapshot.py b/rocketwatch/plugins/snapshot/snapshot.py index 371d507b..060e339a 100644 --- a/rocketwatch/plugins/snapshot/snapshot.py +++ b/rocketwatch/plugins/snapshot/snapshot.py @@ -16,7 +16,6 @@ from rocketwatch import RocketWatch from utils.block_time import ts_to_block -from utils.cfg import cfg from utils.embeds import Embed, el_explorer_url from utils.event import Event, EventPlugin from utils.image import Color, FontVariant, Image, ImageCanvas @@ -25,8 +24,7 @@ from utils.rocketpool import rp from utils.visibility import is_hidden_weak -log = logging.getLogger("snapshot") -log.setLevel(cfg.log_level) +log = logging.getLogger("rocketwatch.snapshot") class Snapshot(EventPlugin): diff --git a/rocketwatch/plugins/support_utils/support_utils.py b/rocketwatch/plugins/support_utils/support_utils.py index 6d20880a..dd96d4fe 100644 --- a/rocketwatch/plugins/support_utils/support_utils.py +++ b/rocketwatch/plugins/support_utils/support_utils.py @@ -8,11 +8,10 @@ from discord.ext.commands import Cog, GroupCog from rocketwatch import RocketWatch -from utils.cfg import cfg +from utils.config import cfg from utils.embeds import Embed -log = logging.getLogger("support_utils") -log.setLevel(cfg.log_level) +log = logging.getLogger("rocketwatch.support_utils") async def generate_template_embed(db, template_name: str): diff --git a/rocketwatch/plugins/transactions/transactions.py b/rocketwatch/plugins/transactions/transactions.py index aa0b9c26..12665511 100644 --- a/rocketwatch/plugins/transactions/transactions.py +++ b/rocketwatch/plugins/transactions/transactions.py @@ -11,15 +11,14 @@ from rocketwatch import RocketWatch from utils import solidity -from utils.cfg import cfg +from utils.config import cfg from utils.dao import DefaultDAO, ProtocolDAO from utils.embeds import Embed, assemble, el_explorer_url, prepare_args from utils.event import Event, EventPlugin from utils.rocketpool import rp from utils.shared_w3 import w3 -log = logging.getLogger("transactions") -log.setLevel(cfg.log_level) +log = logging.getLogger("rocketwatch.transactions") class Transactions(EventPlugin): diff --git a/rocketwatch/plugins/tvl/tvl.py b/rocketwatch/plugins/tvl/tvl.py index db96612e..f8d1285e 100644 --- a/rocketwatch/plugins/tvl/tvl.py +++ b/rocketwatch/plugins/tvl/tvl.py @@ -8,15 +8,13 @@ from rocketwatch import RocketWatch from utils import solidity -from utils.cfg import cfg from utils.embeds import Embed from utils.readable import render_tree from utils.rocketpool import rp from utils.shared_w3 import w3 from utils.visibility import is_hidden -log = logging.getLogger("tvl") -log.setLevel(cfg.log_level) +log = logging.getLogger("rocketwatch.tvl") def split_rewards_logic(balance, node_share, commission, force_base=False): diff --git a/rocketwatch/plugins/user_distribute/user_distribute.py b/rocketwatch/plugins/user_distribute/user_distribute.py index aa03f45a..2491fbee 100644 --- a/rocketwatch/plugins/user_distribute/user_distribute.py +++ b/rocketwatch/plugins/user_distribute/user_distribute.py @@ -10,14 +10,13 @@ from pymongo import ASCENDING from rocketwatch import RocketWatch -from utils.cfg import cfg +from utils.config import cfg from utils.embeds import Embed from utils.rocketpool import rp from utils.shared_w3 import bacon, w3 from utils.visibility import is_hidden_weak -log = logging.getLogger("user_distribute") -log.setLevel(cfg.log_level) +log = logging.getLogger("rocketwatch.user_distribute") class InstructionsView(ui.View): diff --git a/rocketwatch/plugins/validator_states/validator_states.py b/rocketwatch/plugins/validator_states/validator_states.py index c39303f3..ef2162d5 100644 --- a/rocketwatch/plugins/validator_states/validator_states.py +++ b/rocketwatch/plugins/validator_states/validator_states.py @@ -5,14 +5,12 @@ from discord.ext import commands from rocketwatch import RocketWatch -from utils.cfg import cfg from utils.embeds import Embed, el_explorer_url from utils.readable import render_tree_legacy from utils.shared_w3 import w3 from utils.visibility import is_hidden_weak -log = logging.getLogger("validator_states") -log.setLevel(cfg.log_level) +log = logging.getLogger("rocketwatch.validator_states") _BEACON_PENDING = {"in_queue": "unassigned", "prestaked": "prestaked", "staking": "staked"} diff --git a/rocketwatch/plugins/wall/wall.py b/rocketwatch/plugins/wall/wall.py index e26fc394..12ba2162 100644 --- a/rocketwatch/plugins/wall/wall.py +++ b/rocketwatch/plugins/wall/wall.py @@ -15,7 +15,6 @@ from matplotlib import pyplot as plt from rocketwatch import RocketWatch -from utils.cfg import cfg from utils.embeds import Embed from utils.liquidity import ( CEX, @@ -47,8 +46,7 @@ from utils.time_debug import timerun, timerun_async from utils.visibility import is_hidden_weak -log = logging.getLogger("wall") -log.setLevel(cfg.log_level) +log = logging.getLogger("rocketwatch.wall") class Wall(commands.Cog): diff --git a/rocketwatch/rocketwatch.py b/rocketwatch/rocketwatch.py index a9dd4935..c5b89867 100644 --- a/rocketwatch/rocketwatch.py +++ b/rocketwatch/rocketwatch.py @@ -17,13 +17,12 @@ from discord.ext.commands import Bot from pymongo import AsyncMongoClient -from utils.cfg import cfg from utils.command_tree import RWCommandTree +from utils.config import cfg from utils.retry import retry_async from utils.rocketpool import rp -log = logging.getLogger("rocketwatch") -log.setLevel(cfg.log_level) +log = logging.getLogger("rocketwatch.bot") class RocketWatch(Bot): diff --git a/rocketwatch/utils/block_time.py b/rocketwatch/utils/block_time.py index bda7c260..23dc38e7 100644 --- a/rocketwatch/utils/block_time.py +++ b/rocketwatch/utils/block_time.py @@ -3,11 +3,9 @@ from aiocache import cached -from utils.cfg import cfg from utils.shared_w3 import w3 -log = logging.getLogger("block_time") -log.setLevel(cfg.log_level) +log = logging.getLogger("rocketwatch.block_time") @cached() diff --git a/rocketwatch/utils/cached_ens.py b/rocketwatch/utils/cached_ens.py index 2c093faa..f55d5aa4 100644 --- a/rocketwatch/utils/cached_ens.py +++ b/rocketwatch/utils/cached_ens.py @@ -4,11 +4,9 @@ from ens import AsyncENS from eth_typing import ChecksumAddress -from utils.cfg import cfg from utils.shared_w3 import w3_mainnet -log = logging.getLogger("cached_ens") -log.setLevel(cfg.log_level) +log = logging.getLogger("rocketwatch.cached_ens") class CachedEns: diff --git a/rocketwatch/utils/command_tree.py b/rocketwatch/utils/command_tree.py index 7406bad2..2d45f631 100644 --- a/rocketwatch/utils/command_tree.py +++ b/rocketwatch/utils/command_tree.py @@ -4,10 +4,7 @@ from discord import Interaction from discord.app_commands import AppCommandError, CommandTree -from utils.cfg import cfg - -log = logging.getLogger("command_tree") -log.setLevel(cfg.log_level) +log = logging.getLogger("rocketwatch.command_tree") class RWCommandTree(CommandTree): diff --git a/rocketwatch/utils/cfg.py b/rocketwatch/utils/config.py similarity index 80% rename from rocketwatch/utils/cfg.py rename to rocketwatch/utils/config.py index 3a85954b..7c297776 100644 --- a/rocketwatch/utils/cfg.py +++ b/rocketwatch/utils/config.py @@ -98,10 +98,21 @@ class Config(BaseModel): other: OtherConfig = OtherConfig() -def load_config(path: str = "config.toml") -> Config: - with open(path, "rb") as f: - data = tomllib.load(f) - return Config(**data) +class _ConfigProxy: + _instance: Config | None = None + def __init__(self, path: str = "config.toml") -> None: + self.__path = path -cfg = load_config() + def __load_config(self) -> None: + with open(self.__path, "rb") as f: + data = tomllib.load(f) + cfg._instance = Config(**data) + + def __getattr__(self, name: str): + if self._instance is None: + self.__load_config() + return getattr(self._instance, name) + + +cfg = _ConfigProxy() diff --git a/rocketwatch/utils/dao.py b/rocketwatch/utils/dao.py index 1dc57a34..80dfa08a 100644 --- a/rocketwatch/utils/dao.py +++ b/rocketwatch/utils/dao.py @@ -9,11 +9,9 @@ from eth_typing import ChecksumAddress from utils import solidity -from utils.cfg import cfg from utils.rocketpool import rp -log = logging.getLogger("dao") -log.setLevel(cfg.log_level) +log = logging.getLogger("rocketwatch.dao") class DAO(ABC): diff --git a/rocketwatch/utils/embeds.py b/rocketwatch/utils/embeds.py index 89ad40c1..cb49bba1 100644 --- a/rocketwatch/utils/embeds.py +++ b/rocketwatch/utils/embeds.py @@ -17,7 +17,7 @@ from utils import solidity from utils.block_time import block_to_ts from utils.cached_ens import CachedEns -from utils.cfg import cfg +from utils.config import cfg from utils.readable import advanced_tnx_url, cl_explorer_url, s_hex from utils.retry import retry_async from utils.rocketpool import rp @@ -26,8 +26,7 @@ ens = CachedEns() -log = logging.getLogger("embeds") -log.setLevel(cfg.log_level) +log = logging.getLogger("rocketwatch.embeds") class Embed(discord.Embed): diff --git a/rocketwatch/utils/etherscan.py b/rocketwatch/utils/etherscan.py index 55c6497f..7749cfb5 100644 --- a/rocketwatch/utils/etherscan.py +++ b/rocketwatch/utils/etherscan.py @@ -2,11 +2,10 @@ import aiohttp -from utils.cfg import cfg +from utils.config import cfg from utils.shared_w3 import w3 -log = logging.getLogger("etherscan") -log.setLevel(cfg.log_level) +log = logging.getLogger("rocketwatch.etherscan") async def get_recent_account_transactions(address, block_count=44800): diff --git a/rocketwatch/utils/event.py b/rocketwatch/utils/event.py index 2b4c0b07..2b4434b9 100644 --- a/rocketwatch/utils/event.py +++ b/rocketwatch/utils/event.py @@ -6,7 +6,7 @@ from eth_typing import BlockNumber from rocketwatch import RocketWatch -from utils.cfg import cfg +from utils.config import cfg from utils.embeds import Embed from utils.image import Image from utils.shared_w3 import w3 diff --git a/rocketwatch/utils/event_logs.py b/rocketwatch/utils/event_logs.py index 9d6b9045..df763e4b 100644 --- a/rocketwatch/utils/event_logs.py +++ b/rocketwatch/utils/event_logs.py @@ -5,10 +5,7 @@ from web3.contract.contract import ContractEvent from web3.types import LogReceipt -from utils.cfg import cfg - -log = logging.getLogger("event_logs") -log.setLevel(cfg.log_level) +log = logging.getLogger("rocketwatch.event_logs") def get_logs( diff --git a/rocketwatch/utils/liquidity.py b/rocketwatch/utils/liquidity.py index 524d9d35..744c1c73 100644 --- a/rocketwatch/utils/liquidity.py +++ b/rocketwatch/utils/liquidity.py @@ -9,13 +9,11 @@ import numpy as np from eth_typing import ChecksumAddress, HexStr -from utils.cfg import cfg from utils.retry import retry_async from utils.rocketpool import rp from utils.shared_w3 import w3 -log = logging.getLogger("liquidity") -log.setLevel(cfg.log_level) +log = logging.getLogger("rocketwatch.liquidity") class Liquidity: diff --git a/rocketwatch/utils/readable.py b/rocketwatch/utils/readable.py index c6290f02..2a72e9f1 100644 --- a/rocketwatch/utils/readable.py +++ b/rocketwatch/utils/readable.py @@ -6,7 +6,7 @@ from colorama import Fore, Style import utils.solidity as units -from utils.cfg import cfg +from utils.config import cfg from utils.shared_w3 import bacon diff --git a/rocketwatch/utils/rocketpool.py b/rocketwatch/utils/rocketpool.py index 6f85534c..fa56d62c 100644 --- a/rocketwatch/utils/rocketpool.py +++ b/rocketwatch/utils/rocketpool.py @@ -10,12 +10,11 @@ from web3.exceptions import ContractLogicError from utils import solidity -from utils.cfg import cfg +from utils.config import cfg from utils.readable import decode_abi from utils.shared_w3 import w3, w3_archive, w3_mainnet -log = logging.getLogger("rocketpool") -log.setLevel(cfg.log_level) +log = logging.getLogger("rocketwatch.rocketpool") class NoAddressFound(Exception): diff --git a/rocketwatch/utils/shared_w3.py b/rocketwatch/utils/shared_w3.py index 933c9be0..8ecd9e5e 100644 --- a/rocketwatch/utils/shared_w3.py +++ b/rocketwatch/utils/shared_w3.py @@ -1,24 +1,10 @@ -import logging from typing import Any from web3 import AsyncWeb3 from web3.beacon import AsyncBeacon from web3.providers import AsyncHTTPProvider -from utils.cfg import cfg - -log = logging.getLogger("shared_w3") -log.setLevel(cfg.log_level) - -w3 = AsyncWeb3(AsyncHTTPProvider(cfg.execution_layer.endpoint.current, request_kwargs={'timeout': 60})) -w3_mainnet = w3 - -if cfg.rocketpool.chain != "mainnet": - w3_mainnet = AsyncWeb3(AsyncHTTPProvider(cfg.execution_layer.endpoint.mainnet)) - -w3_archive = None -if cfg.execution_layer.endpoint.archive is not None: - w3_archive = AsyncWeb3(AsyncHTTPProvider(cfg.execution_layer.endpoint.archive)) +from utils.config import cfg class Bacon(AsyncBeacon): @@ -33,5 +19,19 @@ async def get_sync_committee(self, epoch: int) -> dict[str, Any]: f"/eth/v1/beacon/states/head/sync_committees?epoch={epoch}" ) +def _get_web3(endpoint: str): + provider = AsyncHTTPProvider(endpoint, request_kwargs={'timeout': 60}) + return AsyncWeb3(provider) + + +w3 = _get_web3(cfg.execution_layer.endpoint.current) +w3_mainnet = w3 +w3_archive = w3 + +if cfg.rocketpool.chain.lower() != "mainnet": + w3_mainnet = _get_web3(cfg.execution_layer.endpoint.mainnet) + +if cfg.execution_layer.endpoint.archive is not None: + w3_archive =_get_web3(cfg.execution_layer.endpoint.archive) bacon = Bacon(cfg.consensus_layer.endpoint) diff --git a/rocketwatch/utils/time_debug.py b/rocketwatch/utils/time_debug.py index c6666679..3e37aea2 100644 --- a/rocketwatch/utils/time_debug.py +++ b/rocketwatch/utils/time_debug.py @@ -2,10 +2,7 @@ import logging import time -from utils.cfg import cfg - -log = logging.getLogger("time_debug") -log.setLevel(cfg.log_level) +log = logging.getLogger("rocketwatch.time_debug") def timerun(func): diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 00000000..1950bf11 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,18 @@ +import sys +from pathlib import Path +from types import ModuleType +from unittest.mock import MagicMock + +# Add rocketwatch source to path +sys.path.insert(0, str(Path(__file__).resolve().parent.parent / "rocketwatch")) + +# Stub out shared_w3 which connects to RPC endpoints at import time. +_shared_w3_stub = ModuleType("utils.shared_w3") +_shared_w3_stub.w3 = MagicMock() +_shared_w3_stub.w3_mainnet = MagicMock() +_shared_w3_stub.w3_archive = MagicMock() +_shared_w3_stub.bacon = MagicMock() +sys.modules["utils.shared_w3"] = _shared_w3_stub + +# With the lazy proxy in utils.config, cfg is importable without loading a file. +# No stubbing needed — tests that need a real Config can set cfg._instance directly. diff --git a/tests/test_cfg.py b/tests/test_cfg.py new file mode 100644 index 00000000..e71b73eb --- /dev/null +++ b/tests/test_cfg.py @@ -0,0 +1,149 @@ +import tomllib +from pathlib import Path + +import pytest + +from utils.config import ( + Config, + ConsensusLayerConfig, + DiscordConfig, + DiscordOwner, + DmWarningConfig, + EventsConfig, + ExecutionLayerConfig, + ExecutionLayerEndpoint, + ModulesConfig, + MongoDBConfig, + OtherConfig, + RocketPoolConfig, + RocketPoolSupport, + SecretsConfig, + StatusMessageConfig, +) + + +def _minimal_config(**overrides) -> Config: + defaults = { + "discord": DiscordConfig( + secret="test-secret", + owner=DiscordOwner(user_id=1, server_id=2), + channels={"default": 100}, + ), + "execution_layer": ExecutionLayerConfig( + explorer="https://etherscan.io", + endpoint=ExecutionLayerEndpoint(current="http://localhost:8545", mainnet="http://localhost:8545"), + etherscan_secret="test", + ), + "consensus_layer": ConsensusLayerConfig( + explorer="https://beaconcha.in", + endpoint="http://localhost:5052", + beaconcha_secret="test", + ), + "mongodb": MongoDBConfig(uri="mongodb://localhost:27017"), + "rocketpool": RocketPoolConfig( + manual_addresses={"rocketStorage": "0x1234"}, + dao_multisigs=["0xabcd"], + support=RocketPoolSupport(user_ids=[1], role_ids=[2], server_id=3, channel_id=4, moderator_id=5), + dm_warning=DmWarningConfig(channels=[100]), + ), + "events": EventsConfig(lookback_distance=100, genesis=0, block_batch_size=50), + } + defaults.update(overrides) + return Config(**defaults) + + +class TestConfigConstruction: + def test_minimal_config(self): + cfg = _minimal_config() + assert cfg.discord.secret == "test-secret" + assert cfg.log_level == "DEBUG" + + def test_defaults(self): + cfg = _minimal_config() + assert cfg.modules == ModulesConfig() + assert cfg.modules.include == [] + assert cfg.modules.exclude == [] + assert cfg.modules.enable_commands is True + assert cfg.other == OtherConfig() + assert cfg.other.secrets.wakatime == "" + assert cfg.rocketpool.chain == "mainnet" + + def test_override_defaults(self): + cfg = _minimal_config(log_level="INFO") + assert cfg.log_level == "INFO" + + def test_archive_endpoint_optional(self): + cfg = _minimal_config() + assert cfg.execution_layer.endpoint.archive is None + + def test_archive_endpoint_set(self): + cfg = _minimal_config( + execution_layer=ExecutionLayerConfig( + explorer="https://etherscan.io", + endpoint=ExecutionLayerEndpoint( + current="http://localhost:8545", + mainnet="http://localhost:8545", + archive="http://localhost:8546", + ), + etherscan_secret="test", + ) + ) + assert cfg.execution_layer.endpoint.archive == "http://localhost:8546" + + +class TestConfigValidation: + def test_missing_required_field(self): + with pytest.raises(Exception): + Config(discord=DiscordConfig( + secret="test", + owner=DiscordOwner(user_id=1, server_id=2), + channels={}, + )) + + def test_wrong_type_user_id(self): + with pytest.raises(Exception): + DiscordOwner(user_id="not_an_int", server_id=2) + + def test_int_coercion(self): + owner = DiscordOwner(user_id="123", server_id="456") + assert owner.user_id == 123 + assert owner.server_id == 456 + + +class TestStatusMessageConfig: + def test_basic(self): + smc = StatusMessageConfig(plugin="test_plugin", cooldown=60) + assert smc.plugin == "test_plugin" + assert smc.cooldown == 60 + assert smc.fields == [] + + def test_with_fields(self): + smc = StatusMessageConfig( + plugin="test_plugin", + cooldown=30, + fields=[{"name": "field1", "value": "val1"}], + ) + assert len(smc.fields) == 1 + + +class TestSecretsConfig: + def test_all_default_empty(self): + s = SecretsConfig() + assert s.wakatime == "" + assert s.cronitor == "" + assert s.anthropic == "" + + def test_partial_override(self): + s = SecretsConfig(wakatime="my-key") + assert s.wakatime == "my-key" + assert s.cronitor == "" + + +class TestSampleConfig: + def test_sample_config_validates(self): + sample_path = Path(__file__).resolve().parent.parent / "rocketwatch" / "config.toml.sample" + with open(sample_path, "rb") as f: + data = tomllib.load(f) + cfg = Config(**data) + assert cfg.log_level == "INFO" + assert cfg.rocketpool.chain diff --git a/tests/test_readable.py b/tests/test_readable.py new file mode 100644 index 00000000..bc3472ca --- /dev/null +++ b/tests/test_readable.py @@ -0,0 +1,96 @@ +import base64 +import zlib + +from utils.readable import ( + decode_abi, + prettify_json_string, + render_tree_legacy, + s_hex, + uptime, +) + + +class TestUptime: + def test_zero_seconds(self): + assert uptime(0) == "0 seconds" + + def test_seconds_only(self): + assert uptime(45) == "45 seconds" + + def test_one_minute(self): + assert uptime(60) == "1 minute" + + def test_minutes_and_seconds(self): + assert uptime(90) == "1 minute 30 seconds" + + def test_one_hour(self): + assert uptime(3600) == "1 hour" + + def test_hours_and_minutes(self): + assert uptime(3660) == "1 hour 1 minute" + + def test_one_day(self): + assert uptime(86400) == "1 day" + + def test_plural_days(self): + assert uptime(2 * 86400) == "2 days" + + def test_lowres_truncates_to_two(self): + # 1 day, 2 hours, 3 minutes, 4 seconds -> only "1 day 2 hours" + t = 86400 + 7200 + 180 + 4 + assert uptime(t) == "1 day 2 hours" + + def test_highres_shows_all(self): + t = 86400 + 7200 + 180 + 4 + result = uptime(t, highres=True) + assert "1 day" in result + assert "2 hours" in result + assert "3 minutes" in result + assert "4 seconds" in result + + +class TestPrettifyJsonString: + def test_basic(self): + result = prettify_json_string('{"a":1,"b":2}') + assert '"a": 1' in result + assert '"b": 2' in result + assert "\n" in result + + +class TestDecodeAbi: + def test_roundtrip(self): + original = '[{"type":"function","name":"test"}]' + compressed = base64.b64encode(zlib.compress(original.encode("ascii"), wbits=15)) + assert decode_abi(compressed) == original + + +class TestSHex: + def test_truncates_to_10(self): + assert s_hex("0x1234567890abcdef") == "0x12345678" + + def test_short_string(self): + assert s_hex("0x12") == "0x12" + + +class TestRenderTreeLegacy: + def test_flat_tree(self): + data = {"active": 10, "inactive": 5} + result = render_tree_legacy(data, "Minipools") + assert "Minipools:" in result + assert "15" in result # total + assert "10" in result + assert "5" in result + + def test_nested_tree(self): + data = { + "staking": {"8 ETH": 100, "16 ETH": 50}, + "dissolved": 3, + } + result = render_tree_legacy(data, "Minipools") + assert "Minipools:" in result + assert "153" in result # total + + def test_empty_branches_filtered(self): + data = {"active": 10, "empty": 0} + result = render_tree_legacy(data, "Test") + assert "Empty" not in result diff --git a/tests/test_solidity.py b/tests/test_solidity.py new file mode 100644 index 00000000..bdd09fa3 --- /dev/null +++ b/tests/test_solidity.py @@ -0,0 +1,107 @@ +from utils import solidity +from utils.solidity import ( + BEACON_START_DATE, + beacon_block_to_date, + date_to_beacon_block, + mp_state_to_str, + slot_to_beacon_day_epoch_slot, + to_float, + to_int, +) + + +class TestToFloat: + def test_wei_to_ether(self): + assert to_float(10**18) == 1.0 + + def test_zero(self): + assert to_float(0) == 0.0 + + def test_fractional(self): + assert to_float(5 * 10**17) == 0.5 + + def test_custom_decimals(self): + assert to_float(1_000_000, decimals=6) == 1.0 + + def test_string_input(self): + assert to_float("1000000000000000000") == 1.0 + + def test_large_value(self): + assert to_float(32 * 10**18) == 32.0 + + +class TestToInt: + def test_wei_to_ether(self): + assert to_int(10**18) == 1 + + def test_truncates(self): + assert to_int(15 * 10**17) == 1 + + def test_zero(self): + assert to_int(0) == 0 + + def test_custom_decimals(self): + assert to_int(1_500_000, decimals=6) == 1 + + +class TestBeaconBlockDate: + def test_block_zero(self): + assert beacon_block_to_date(0) == BEACON_START_DATE + + def test_block_one(self): + assert beacon_block_to_date(1) == BEACON_START_DATE + 12 + + def test_roundtrip(self): + block = 1_000_000 + date = beacon_block_to_date(block) + assert date_to_beacon_block(date) == block + + def test_date_to_block_truncates(self): + date = BEACON_START_DATE + 13 # not a clean 12-second boundary + assert date_to_beacon_block(date) == 1 + + +class TestSlotToBeaconDayEpochSlot: + def test_slot_zero(self): + assert slot_to_beacon_day_epoch_slot(0) == (0, 0, 0) + + def test_slot_32(self): + # slot 32 = epoch 1, slot 0 within epoch, day 0 + assert slot_to_beacon_day_epoch_slot(32) == (0, 1, 0) + + def test_full_day(self): + # 225 epochs per day, 32 slots per epoch = 7200 slots per day + slots_per_day = 225 * 32 + assert slot_to_beacon_day_epoch_slot(slots_per_day) == (1, 0, 0) + + +class TestMpStateToStr: + def test_all_known_states(self): + assert mp_state_to_str(0) == "initialised" + assert mp_state_to_str(1) == "prelaunch" + assert mp_state_to_str(2) == "staking" + assert mp_state_to_str(3) == "withdrawable" + assert mp_state_to_str(4) == "dissolved" + + def test_unknown_state(self): + assert mp_state_to_str(99) == "99" + + +class TestTimeConstants: + def test_seconds(self): + assert solidity.seconds == 1 + + def test_minutes(self): + assert solidity.minutes == 60 + + def test_hours(self): + assert solidity.hours == 3600 + + def test_days(self): + assert solidity.days == 86400 + + def test_weeks(self): + assert solidity.weeks == 604800 + + def test_years(self): + assert solidity.years == 365 * 86400 diff --git a/uv.lock b/uv.lock new file mode 100644 index 00000000..44d8fd4f --- /dev/null +++ b/uv.lock @@ -0,0 +1,2658 @@ +version = 1 +revision = 3 +requires-python = ">=3.12" +resolution-markers = [ + "python_full_version >= '3.14' and sys_platform == 'win32'", + "python_full_version >= '3.14' and sys_platform == 'emscripten'", + "python_full_version >= '3.14' and sys_platform != 'emscripten' and sys_platform != 'win32'", + "python_full_version == '3.13.*' and sys_platform == 'win32'", + "python_full_version == '3.13.*' and sys_platform == 'emscripten'", + "python_full_version == '3.13.*' and sys_platform != 'emscripten' and sys_platform != 'win32'", + "python_full_version < '3.13' and sys_platform == 'win32'", + "python_full_version < '3.13' and sys_platform == 'emscripten'", + "python_full_version < '3.13' and sys_platform != 'emscripten' and sys_platform != 'win32'", +] + +[[package]] +name = "aiocache" +version = "0.12.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7a/64/b945b8025a9d1e6e2138845f4022165d3b337f55f50984fbc6a4c0a1e355/aiocache-0.12.3.tar.gz", hash = "sha256:f528b27bf4d436b497a1d0d1a8f59a542c153ab1e37c3621713cb376d44c4713", size = 132196, upload-time = "2024-09-25T13:20:23.823Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/37/d7/15d67e05b235d1ed8c3ce61688fe4d84130e72af1657acadfaac3479f4cf/aiocache-0.12.3-py2.py3-none-any.whl", hash = "sha256:889086fc24710f431937b87ad3720a289f7fc31c4fd8b68e9f918b9bacd8270d", size = 28199, upload-time = "2024-09-25T13:20:22.688Z" }, +] + +[[package]] +name = "aiohappyeyeballs" +version = "2.6.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/26/30/f84a107a9c4331c14b2b586036f40965c128aa4fee4dda5d3d51cb14ad54/aiohappyeyeballs-2.6.1.tar.gz", hash = "sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558", size = 22760, upload-time = "2025-03-12T01:42:48.764Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0f/15/5bf3b99495fb160b63f95972b81750f18f7f4e02ad051373b669d17d44f2/aiohappyeyeballs-2.6.1-py3-none-any.whl", hash = "sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8", size = 15265, upload-time = "2025-03-12T01:42:47.083Z" }, +] + +[[package]] +name = "aiohttp" +version = "3.13.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiohappyeyeballs" }, + { name = "aiosignal" }, + { name = "attrs" }, + { name = "frozenlist" }, + { name = "multidict" }, + { name = "propcache" }, + { name = "yarl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/50/42/32cf8e7704ceb4481406eb87161349abb46a57fee3f008ba9cb610968646/aiohttp-3.13.3.tar.gz", hash = "sha256:a949eee43d3782f2daae4f4a2819b2cb9b0c5d3b7f7a927067cc84dafdbb9f88", size = 7844556, upload-time = "2026-01-03T17:33:05.204Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/be/4fc11f202955a69e0db803a12a062b8379c970c7c84f4882b6da17337cc1/aiohttp-3.13.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:b903a4dfee7d347e2d87697d0713be59e0b87925be030c9178c5faa58ea58d5c", size = 739732, upload-time = "2026-01-03T17:30:14.23Z" }, + { url = "https://files.pythonhosted.org/packages/97/2c/621d5b851f94fa0bb7430d6089b3aa970a9d9b75196bc93bb624b0db237a/aiohttp-3.13.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a45530014d7a1e09f4a55f4f43097ba0fd155089372e105e4bff4ca76cb1b168", size = 494293, upload-time = "2026-01-03T17:30:15.96Z" }, + { url = "https://files.pythonhosted.org/packages/5d/43/4be01406b78e1be8320bb8316dc9c42dbab553d281c40364e0f862d5661c/aiohttp-3.13.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:27234ef6d85c914f9efeb77ff616dbf4ad2380be0cda40b4db086ffc7ddd1b7d", size = 493533, upload-time = "2026-01-03T17:30:17.431Z" }, + { url = "https://files.pythonhosted.org/packages/8d/a8/5a35dc56a06a2c90d4742cbf35294396907027f80eea696637945a106f25/aiohttp-3.13.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d32764c6c9aafb7fb55366a224756387cd50bfa720f32b88e0e6fa45b27dcf29", size = 1737839, upload-time = "2026-01-03T17:30:19.422Z" }, + { url = "https://files.pythonhosted.org/packages/bf/62/4b9eeb331da56530bf2e198a297e5303e1c1ebdceeb00fe9b568a65c5a0c/aiohttp-3.13.3-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b1a6102b4d3ebc07dad44fbf07b45bb600300f15b552ddf1851b5390202ea2e3", size = 1703932, upload-time = "2026-01-03T17:30:21.756Z" }, + { url = "https://files.pythonhosted.org/packages/7c/f6/af16887b5d419e6a367095994c0b1332d154f647e7dc2bd50e61876e8e3d/aiohttp-3.13.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c014c7ea7fb775dd015b2d3137378b7be0249a448a1612268b5a90c2d81de04d", size = 1771906, upload-time = "2026-01-03T17:30:23.932Z" }, + { url = "https://files.pythonhosted.org/packages/ce/83/397c634b1bcc24292fa1e0c7822800f9f6569e32934bdeef09dae7992dfb/aiohttp-3.13.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2b8d8ddba8f95ba17582226f80e2de99c7a7948e66490ef8d947e272a93e9463", size = 1871020, upload-time = "2026-01-03T17:30:26Z" }, + { url = "https://files.pythonhosted.org/packages/86/f6/a62cbbf13f0ac80a70f71b1672feba90fdb21fd7abd8dbf25c0105fb6fa3/aiohttp-3.13.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9ae8dd55c8e6c4257eae3a20fd2c8f41edaea5992ed67156642493b8daf3cecc", size = 1755181, upload-time = "2026-01-03T17:30:27.554Z" }, + { url = "https://files.pythonhosted.org/packages/0a/87/20a35ad487efdd3fba93d5843efdfaa62d2f1479eaafa7453398a44faf13/aiohttp-3.13.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:01ad2529d4b5035578f5081606a465f3b814c542882804e2e8cda61adf5c71bf", size = 1561794, upload-time = "2026-01-03T17:30:29.254Z" }, + { url = "https://files.pythonhosted.org/packages/de/95/8fd69a66682012f6716e1bc09ef8a1a2a91922c5725cb904689f112309c4/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bb4f7475e359992b580559e008c598091c45b5088f28614e855e42d39c2f1033", size = 1697900, upload-time = "2026-01-03T17:30:31.033Z" }, + { url = "https://files.pythonhosted.org/packages/e5/66/7b94b3b5ba70e955ff597672dad1691333080e37f50280178967aff68657/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:c19b90316ad3b24c69cd78d5c9b4f3aa4497643685901185b65166293d36a00f", size = 1728239, upload-time = "2026-01-03T17:30:32.703Z" }, + { url = "https://files.pythonhosted.org/packages/47/71/6f72f77f9f7d74719692ab65a2a0252584bf8d5f301e2ecb4c0da734530a/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:96d604498a7c782cb15a51c406acaea70d8c027ee6b90c569baa6e7b93073679", size = 1740527, upload-time = "2026-01-03T17:30:34.695Z" }, + { url = "https://files.pythonhosted.org/packages/fa/b4/75ec16cbbd5c01bdaf4a05b19e103e78d7ce1ef7c80867eb0ace42ff4488/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:084911a532763e9d3dd95adf78a78f4096cd5f58cdc18e6fdbc1b58417a45423", size = 1554489, upload-time = "2026-01-03T17:30:36.864Z" }, + { url = "https://files.pythonhosted.org/packages/52/8f/bc518c0eea29f8406dcf7ed1f96c9b48e3bc3995a96159b3fc11f9e08321/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:7a4a94eb787e606d0a09404b9c38c113d3b099d508021faa615d70a0131907ce", size = 1767852, upload-time = "2026-01-03T17:30:39.433Z" }, + { url = "https://files.pythonhosted.org/packages/9d/f2/a07a75173124f31f11ea6f863dc44e6f09afe2bca45dd4e64979490deab1/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:87797e645d9d8e222e04160ee32aa06bc5c163e8499f24db719e7852ec23093a", size = 1722379, upload-time = "2026-01-03T17:30:41.081Z" }, + { url = "https://files.pythonhosted.org/packages/3c/4a/1a3fee7c21350cac78e5c5cef711bac1b94feca07399f3d406972e2d8fcd/aiohttp-3.13.3-cp312-cp312-win32.whl", hash = "sha256:b04be762396457bef43f3597c991e192ee7da460a4953d7e647ee4b1c28e7046", size = 428253, upload-time = "2026-01-03T17:30:42.644Z" }, + { url = "https://files.pythonhosted.org/packages/d9/b7/76175c7cb4eb73d91ad63c34e29fc4f77c9386bba4a65b53ba8e05ee3c39/aiohttp-3.13.3-cp312-cp312-win_amd64.whl", hash = "sha256:e3531d63d3bdfa7e3ac5e9b27b2dd7ec9df3206a98e0b3445fa906f233264c57", size = 455407, upload-time = "2026-01-03T17:30:44.195Z" }, + { url = "https://files.pythonhosted.org/packages/97/8a/12ca489246ca1faaf5432844adbfce7ff2cc4997733e0af120869345643a/aiohttp-3.13.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:5dff64413671b0d3e7d5918ea490bdccb97a4ad29b3f311ed423200b2203e01c", size = 734190, upload-time = "2026-01-03T17:30:45.832Z" }, + { url = "https://files.pythonhosted.org/packages/32/08/de43984c74ed1fca5c014808963cc83cb00d7bb06af228f132d33862ca76/aiohttp-3.13.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:87b9aab6d6ed88235aa2970294f496ff1a1f9adcd724d800e9b952395a80ffd9", size = 491783, upload-time = "2026-01-03T17:30:47.466Z" }, + { url = "https://files.pythonhosted.org/packages/17/f8/8dd2cf6112a5a76f81f81a5130c57ca829d101ad583ce57f889179accdda/aiohttp-3.13.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:425c126c0dc43861e22cb1c14ba4c8e45d09516d0a3ae0a3f7494b79f5f233a3", size = 490704, upload-time = "2026-01-03T17:30:49.373Z" }, + { url = "https://files.pythonhosted.org/packages/6d/40/a46b03ca03936f832bc7eaa47cfbb1ad012ba1be4790122ee4f4f8cba074/aiohttp-3.13.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7f9120f7093c2a32d9647abcaf21e6ad275b4fbec5b55969f978b1a97c7c86bf", size = 1720652, upload-time = "2026-01-03T17:30:50.974Z" }, + { url = "https://files.pythonhosted.org/packages/f7/7e/917fe18e3607af92657e4285498f500dca797ff8c918bd7d90b05abf6c2a/aiohttp-3.13.3-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:697753042d57f4bf7122cab985bf15d0cef23c770864580f5af4f52023a56bd6", size = 1692014, upload-time = "2026-01-03T17:30:52.729Z" }, + { url = "https://files.pythonhosted.org/packages/71/b6/cefa4cbc00d315d68973b671cf105b21a609c12b82d52e5d0c9ae61d2a09/aiohttp-3.13.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6de499a1a44e7de70735d0b39f67c8f25eb3d91eb3103be99ca0fa882cdd987d", size = 1759777, upload-time = "2026-01-03T17:30:54.537Z" }, + { url = "https://files.pythonhosted.org/packages/fb/e3/e06ee07b45e59e6d81498b591fc589629be1553abb2a82ce33efe2a7b068/aiohttp-3.13.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:37239e9f9a7ea9ac5bf6b92b0260b01f8a22281996da609206a84df860bc1261", size = 1861276, upload-time = "2026-01-03T17:30:56.512Z" }, + { url = "https://files.pythonhosted.org/packages/7c/24/75d274228acf35ceeb2850b8ce04de9dd7355ff7a0b49d607ee60c29c518/aiohttp-3.13.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f76c1e3fe7d7c8afad7ed193f89a292e1999608170dcc9751a7462a87dfd5bc0", size = 1743131, upload-time = "2026-01-03T17:30:58.256Z" }, + { url = "https://files.pythonhosted.org/packages/04/98/3d21dde21889b17ca2eea54fdcff21b27b93f45b7bb94ca029c31ab59dc3/aiohttp-3.13.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fc290605db2a917f6e81b0e1e0796469871f5af381ce15c604a3c5c7e51cb730", size = 1556863, upload-time = "2026-01-03T17:31:00.445Z" }, + { url = "https://files.pythonhosted.org/packages/9e/84/da0c3ab1192eaf64782b03971ab4055b475d0db07b17eff925e8c93b3aa5/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4021b51936308aeea0367b8f006dc999ca02bc118a0cc78c303f50a2ff6afb91", size = 1682793, upload-time = "2026-01-03T17:31:03.024Z" }, + { url = "https://files.pythonhosted.org/packages/ff/0f/5802ada182f575afa02cbd0ec5180d7e13a402afb7c2c03a9aa5e5d49060/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:49a03727c1bba9a97d3e93c9f93ca03a57300f484b6e935463099841261195d3", size = 1716676, upload-time = "2026-01-03T17:31:04.842Z" }, + { url = "https://files.pythonhosted.org/packages/3f/8c/714d53bd8b5a4560667f7bbbb06b20c2382f9c7847d198370ec6526af39c/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:3d9908a48eb7416dc1f4524e69f1d32e5d90e3981e4e37eb0aa1cd18f9cfa2a4", size = 1733217, upload-time = "2026-01-03T17:31:06.868Z" }, + { url = "https://files.pythonhosted.org/packages/7d/79/e2176f46d2e963facea939f5be2d26368ce543622be6f00a12844d3c991f/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:2712039939ec963c237286113c68dbad80a82a4281543f3abf766d9d73228998", size = 1552303, upload-time = "2026-01-03T17:31:08.958Z" }, + { url = "https://files.pythonhosted.org/packages/ab/6a/28ed4dea1759916090587d1fe57087b03e6c784a642b85ef48217b0277ae/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:7bfdc049127717581866fa4708791220970ce291c23e28ccf3922c700740fdc0", size = 1763673, upload-time = "2026-01-03T17:31:10.676Z" }, + { url = "https://files.pythonhosted.org/packages/e8/35/4a3daeb8b9fab49240d21c04d50732313295e4bd813a465d840236dd0ce1/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8057c98e0c8472d8846b9c79f56766bcc57e3e8ac7bfd510482332366c56c591", size = 1721120, upload-time = "2026-01-03T17:31:12.575Z" }, + { url = "https://files.pythonhosted.org/packages/bc/9f/d643bb3c5fb99547323e635e251c609fbbc660d983144cfebec529e09264/aiohttp-3.13.3-cp313-cp313-win32.whl", hash = "sha256:1449ceddcdbcf2e0446957863af03ebaaa03f94c090f945411b61269e2cb5daf", size = 427383, upload-time = "2026-01-03T17:31:14.382Z" }, + { url = "https://files.pythonhosted.org/packages/4e/f1/ab0395f8a79933577cdd996dd2f9aa6014af9535f65dddcf88204682fe62/aiohttp-3.13.3-cp313-cp313-win_amd64.whl", hash = "sha256:693781c45a4033d31d4187d2436f5ac701e7bbfe5df40d917736108c1cc7436e", size = 453899, upload-time = "2026-01-03T17:31:15.958Z" }, + { url = "https://files.pythonhosted.org/packages/99/36/5b6514a9f5d66f4e2597e40dea2e3db271e023eb7a5d22defe96ba560996/aiohttp-3.13.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:ea37047c6b367fd4bd632bff8077449b8fa034b69e812a18e0132a00fae6e808", size = 737238, upload-time = "2026-01-03T17:31:17.909Z" }, + { url = "https://files.pythonhosted.org/packages/f7/49/459327f0d5bcd8c6c9ca69e60fdeebc3622861e696490d8674a6d0cb90a6/aiohttp-3.13.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:6fc0e2337d1a4c3e6acafda6a78a39d4c14caea625124817420abceed36e2415", size = 492292, upload-time = "2026-01-03T17:31:19.919Z" }, + { url = "https://files.pythonhosted.org/packages/e8/0b/b97660c5fd05d3495b4eb27f2d0ef18dc1dc4eff7511a9bf371397ff0264/aiohttp-3.13.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c685f2d80bb67ca8c3837823ad76196b3694b0159d232206d1e461d3d434666f", size = 493021, upload-time = "2026-01-03T17:31:21.636Z" }, + { url = "https://files.pythonhosted.org/packages/54/d4/438efabdf74e30aeceb890c3290bbaa449780583b1270b00661126b8aae4/aiohttp-3.13.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:48e377758516d262bde50c2584fc6c578af272559c409eecbdd2bae1601184d6", size = 1717263, upload-time = "2026-01-03T17:31:23.296Z" }, + { url = "https://files.pythonhosted.org/packages/71/f2/7bddc7fd612367d1459c5bcf598a9e8f7092d6580d98de0e057eb42697ad/aiohttp-3.13.3-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:34749271508078b261c4abb1767d42b8d0c0cc9449c73a4df494777dc55f0687", size = 1669107, upload-time = "2026-01-03T17:31:25.334Z" }, + { url = "https://files.pythonhosted.org/packages/00/5a/1aeaecca40e22560f97610a329e0e5efef5e0b5afdf9f857f0d93839ab2e/aiohttp-3.13.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:82611aeec80eb144416956ec85b6ca45a64d76429c1ed46ae1b5f86c6e0c9a26", size = 1760196, upload-time = "2026-01-03T17:31:27.394Z" }, + { url = "https://files.pythonhosted.org/packages/f8/f8/0ff6992bea7bd560fc510ea1c815f87eedd745fe035589c71ce05612a19a/aiohttp-3.13.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2fff83cfc93f18f215896e3a190e8e5cb413ce01553901aca925176e7568963a", size = 1843591, upload-time = "2026-01-03T17:31:29.238Z" }, + { url = "https://files.pythonhosted.org/packages/e3/d1/e30e537a15f53485b61f5be525f2157da719819e8377298502aebac45536/aiohttp-3.13.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bbe7d4cecacb439e2e2a8a1a7b935c25b812af7a5fd26503a66dadf428e79ec1", size = 1720277, upload-time = "2026-01-03T17:31:31.053Z" }, + { url = "https://files.pythonhosted.org/packages/84/45/23f4c451d8192f553d38d838831ebbc156907ea6e05557f39563101b7717/aiohttp-3.13.3-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b928f30fe49574253644b1ca44b1b8adbd903aa0da4b9054a6c20fc7f4092a25", size = 1548575, upload-time = "2026-01-03T17:31:32.87Z" }, + { url = "https://files.pythonhosted.org/packages/6a/ed/0a42b127a43712eda7807e7892c083eadfaf8429ca8fb619662a530a3aab/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7b5e8fe4de30df199155baaf64f2fcd604f4c678ed20910db8e2c66dc4b11603", size = 1679455, upload-time = "2026-01-03T17:31:34.76Z" }, + { url = "https://files.pythonhosted.org/packages/2e/b5/c05f0c2b4b4fe2c9d55e73b6d3ed4fd6c9dc2684b1d81cbdf77e7fad9adb/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:8542f41a62bcc58fc7f11cf7c90e0ec324ce44950003feb70640fc2a9092c32a", size = 1687417, upload-time = "2026-01-03T17:31:36.699Z" }, + { url = "https://files.pythonhosted.org/packages/c9/6b/915bc5dad66aef602b9e459b5a973529304d4e89ca86999d9d75d80cbd0b/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:5e1d8c8b8f1d91cd08d8f4a3c2b067bfca6ec043d3ff36de0f3a715feeedf926", size = 1729968, upload-time = "2026-01-03T17:31:38.622Z" }, + { url = "https://files.pythonhosted.org/packages/11/3b/e84581290a9520024a08640b63d07673057aec5ca548177a82026187ba73/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:90455115e5da1c3c51ab619ac57f877da8fd6d73c05aacd125c5ae9819582aba", size = 1545690, upload-time = "2026-01-03T17:31:40.57Z" }, + { url = "https://files.pythonhosted.org/packages/f5/04/0c3655a566c43fd647c81b895dfe361b9f9ad6d58c19309d45cff52d6c3b/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:042e9e0bcb5fba81886c8b4fbb9a09d6b8a00245fd8d88e4d989c1f96c74164c", size = 1746390, upload-time = "2026-01-03T17:31:42.857Z" }, + { url = "https://files.pythonhosted.org/packages/1f/53/71165b26978f719c3419381514c9690bd5980e764a09440a10bb816ea4ab/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2eb752b102b12a76ca02dff751a801f028b4ffbbc478840b473597fc91a9ed43", size = 1702188, upload-time = "2026-01-03T17:31:44.984Z" }, + { url = "https://files.pythonhosted.org/packages/29/a7/cbe6c9e8e136314fa1980da388a59d2f35f35395948a08b6747baebb6aa6/aiohttp-3.13.3-cp314-cp314-win32.whl", hash = "sha256:b556c85915d8efaed322bf1bdae9486aa0f3f764195a0fb6ee962e5c71ef5ce1", size = 433126, upload-time = "2026-01-03T17:31:47.463Z" }, + { url = "https://files.pythonhosted.org/packages/de/56/982704adea7d3b16614fc5936014e9af85c0e34b58f9046655817f04306e/aiohttp-3.13.3-cp314-cp314-win_amd64.whl", hash = "sha256:9bf9f7a65e7aa20dd764151fb3d616c81088f91f8df39c3893a536e279b4b984", size = 459128, upload-time = "2026-01-03T17:31:49.2Z" }, + { url = "https://files.pythonhosted.org/packages/6c/2a/3c79b638a9c3d4658d345339d22070241ea341ed4e07b5ac60fb0f418003/aiohttp-3.13.3-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:05861afbbec40650d8a07ea324367cb93e9e8cc7762e04dd4405df99fa65159c", size = 769512, upload-time = "2026-01-03T17:31:51.134Z" }, + { url = "https://files.pythonhosted.org/packages/29/b9/3e5014d46c0ab0db8707e0ac2711ed28c4da0218c358a4e7c17bae0d8722/aiohttp-3.13.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:2fc82186fadc4a8316768d61f3722c230e2c1dcab4200d52d2ebdf2482e47592", size = 506444, upload-time = "2026-01-03T17:31:52.85Z" }, + { url = "https://files.pythonhosted.org/packages/90/03/c1d4ef9a054e151cd7839cdc497f2638f00b93cbe8043983986630d7a80c/aiohttp-3.13.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:0add0900ff220d1d5c5ebbf99ed88b0c1bbf87aa7e4262300ed1376a6b13414f", size = 510798, upload-time = "2026-01-03T17:31:54.91Z" }, + { url = "https://files.pythonhosted.org/packages/ea/76/8c1e5abbfe8e127c893fe7ead569148a4d5a799f7cf958d8c09f3eedf097/aiohttp-3.13.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:568f416a4072fbfae453dcf9a99194bbb8bdeab718e08ee13dfa2ba0e4bebf29", size = 1868835, upload-time = "2026-01-03T17:31:56.733Z" }, + { url = "https://files.pythonhosted.org/packages/8e/ac/984c5a6f74c363b01ff97adc96a3976d9c98940b8969a1881575b279ac5d/aiohttp-3.13.3-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:add1da70de90a2569c5e15249ff76a631ccacfe198375eead4aadf3b8dc849dc", size = 1720486, upload-time = "2026-01-03T17:31:58.65Z" }, + { url = "https://files.pythonhosted.org/packages/b2/9a/b7039c5f099c4eb632138728828b33428585031a1e658d693d41d07d89d1/aiohttp-3.13.3-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:10b47b7ba335d2e9b1239fa571131a87e2d8ec96b333e68b2a305e7a98b0bae2", size = 1847951, upload-time = "2026-01-03T17:32:00.989Z" }, + { url = "https://files.pythonhosted.org/packages/3c/02/3bec2b9a1ba3c19ff89a43a19324202b8eb187ca1e928d8bdac9bbdddebd/aiohttp-3.13.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3dd4dce1c718e38081c8f35f323209d4c1df7d4db4bab1b5c88a6b4d12b74587", size = 1941001, upload-time = "2026-01-03T17:32:03.122Z" }, + { url = "https://files.pythonhosted.org/packages/37/df/d879401cedeef27ac4717f6426c8c36c3091c6e9f08a9178cc87549c537f/aiohttp-3.13.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:34bac00a67a812570d4a460447e1e9e06fae622946955f939051e7cc895cfab8", size = 1797246, upload-time = "2026-01-03T17:32:05.255Z" }, + { url = "https://files.pythonhosted.org/packages/8d/15/be122de1f67e6953add23335c8ece6d314ab67c8bebb3f181063010795a7/aiohttp-3.13.3-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a19884d2ee70b06d9204b2727a7b9f983d0c684c650254679e716b0b77920632", size = 1627131, upload-time = "2026-01-03T17:32:07.607Z" }, + { url = "https://files.pythonhosted.org/packages/12/12/70eedcac9134cfa3219ab7af31ea56bc877395b1ac30d65b1bc4b27d0438/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5f8ca7f2bb6ba8348a3614c7918cc4bb73268c5ac2a207576b7afea19d3d9f64", size = 1795196, upload-time = "2026-01-03T17:32:09.59Z" }, + { url = "https://files.pythonhosted.org/packages/32/11/b30e1b1cd1f3054af86ebe60df96989c6a414dd87e27ad16950eee420bea/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:b0d95340658b9d2f11d9697f59b3814a9d3bb4b7a7c20b131df4bcef464037c0", size = 1782841, upload-time = "2026-01-03T17:32:11.445Z" }, + { url = "https://files.pythonhosted.org/packages/88/0d/d98a9367b38912384a17e287850f5695c528cff0f14f791ce8ee2e4f7796/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:a1e53262fd202e4b40b70c3aff944a8155059beedc8a89bba9dc1f9ef06a1b56", size = 1795193, upload-time = "2026-01-03T17:32:13.705Z" }, + { url = "https://files.pythonhosted.org/packages/43/a5/a2dfd1f5ff5581632c7f6a30e1744deda03808974f94f6534241ef60c751/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:d60ac9663f44168038586cab2157e122e46bdef09e9368b37f2d82d354c23f72", size = 1621979, upload-time = "2026-01-03T17:32:15.965Z" }, + { url = "https://files.pythonhosted.org/packages/fa/f0/12973c382ae7c1cccbc4417e129c5bf54c374dfb85af70893646e1f0e749/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:90751b8eed69435bac9ff4e3d2f6b3af1f57e37ecb0fbeee59c0174c9e2d41df", size = 1822193, upload-time = "2026-01-03T17:32:18.219Z" }, + { url = "https://files.pythonhosted.org/packages/3c/5f/24155e30ba7f8c96918af1350eb0663e2430aad9e001c0489d89cd708ab1/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:fc353029f176fd2b3ec6cfc71be166aba1936fe5d73dd1992ce289ca6647a9aa", size = 1769801, upload-time = "2026-01-03T17:32:20.25Z" }, + { url = "https://files.pythonhosted.org/packages/eb/f8/7314031ff5c10e6ece114da79b338ec17eeff3a079e53151f7e9f43c4723/aiohttp-3.13.3-cp314-cp314t-win32.whl", hash = "sha256:2e41b18a58da1e474a057b3d35248d8320029f61d70a37629535b16a0c8f3767", size = 466523, upload-time = "2026-01-03T17:32:22.215Z" }, + { url = "https://files.pythonhosted.org/packages/b4/63/278a98c715ae467624eafe375542d8ba9b4383a016df8fdefe0ae28382a7/aiohttp-3.13.3-cp314-cp314t-win_amd64.whl", hash = "sha256:44531a36aa2264a1860089ffd4dce7baf875ee5a6079d5fb42e261c704ef7344", size = 499694, upload-time = "2026-01-03T17:32:24.546Z" }, +] + +[[package]] +name = "aiosignal" +version = "1.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "frozenlist" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/61/62/06741b579156360248d1ec624842ad0edf697050bbaf7c3e46394e106ad1/aiosignal-1.4.0.tar.gz", hash = "sha256:f47eecd9468083c2029cc99945502cb7708b082c232f9aca65da147157b251c7", size = 25007, upload-time = "2025-07-03T22:54:43.528Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fb/76/641ae371508676492379f16e2fa48f4e2c11741bd63c48be4b12a6b09cba/aiosignal-1.4.0-py3-none-any.whl", hash = "sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e", size = 7490, upload-time = "2025-07-03T22:54:42.156Z" }, +] + +[[package]] +name = "annotated-types" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, +] + +[[package]] +name = "anthropic" +version = "0.84.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "distro" }, + { name = "docstring-parser" }, + { name = "httpx" }, + { name = "jiter" }, + { name = "pydantic" }, + { name = "sniffio" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/04/ea/0869d6df9ef83dcf393aeefc12dd81677d091c6ffc86f783e51cf44062f2/anthropic-0.84.0.tar.gz", hash = "sha256:72f5f90e5aebe62dca316cb013629cfa24996b0f5a4593b8c3d712bc03c43c37", size = 539457, upload-time = "2026-02-25T05:22:38.54Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/64/ca/218fa25002a332c0aa149ba18ffc0543175998b1f65de63f6d106689a345/anthropic-0.84.0-py3-none-any.whl", hash = "sha256:861c4c50f91ca45f942e091d83b60530ad6d4f98733bfe648065364da05d29e7", size = 455156, upload-time = "2026-02-25T05:22:40.468Z" }, +] + +[[package]] +name = "anyascii" +version = "0.3.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/db/ba/edebda727008390936da4a9bf677c19cd63b32d51e864656d2cbd1028e25/anyascii-0.3.3.tar.gz", hash = "sha256:c94e9dd9d47b3d9494eca305fef9447d00b4bf1a32aff85aa746fa3ec7fb95c3", size = 264680, upload-time = "2025-06-29T03:33:30.427Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c2/76/783b75a21ce3563b8709050de030ae253853b147bd52e141edc1025aa268/anyascii-0.3.3-py3-none-any.whl", hash = "sha256:f5ab5e53c8781a36b5a40e1296a0eeda2f48c649ef10c3921c1381b1d00dee7a", size = 345090, upload-time = "2025-06-29T03:33:28.356Z" }, +] + +[[package]] +name = "anyio" +version = "4.12.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "idna" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/96/f0/5eb65b2bb0d09ac6776f2eb54adee6abe8228ea05b20a5ad0e4945de8aac/anyio-4.12.1.tar.gz", hash = "sha256:41cfcc3a4c85d3f05c932da7c26d0201ac36f72abd4435ba90d0464a3ffed703", size = 228685, upload-time = "2026-01-06T11:45:21.246Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/38/0e/27be9fdef66e72d64c0cdc3cc2823101b80585f8119b5c112c2e8f5f7dab/anyio-4.12.1-py3-none-any.whl", hash = "sha256:d405828884fc140aa80a3c667b8beed277f1dfedec42ba031bd6ac3db606ab6c", size = 113592, upload-time = "2026-01-06T11:45:19.497Z" }, +] + +[[package]] +name = "attrs" +version = "25.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6b/5c/685e6633917e101e5dcb62b9dd76946cbb57c26e133bae9e0cd36033c0a9/attrs-25.4.0.tar.gz", hash = "sha256:16d5969b87f0859ef33a48b35d55ac1be6e42ae49d5e853b597db70c35c57e11", size = 934251, upload-time = "2025-10-06T13:54:44.725Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3a/2a/7cc015f5b9f5db42b7d48157e23356022889fc354a2813c15934b7cb5c0e/attrs-25.4.0-py3-none-any.whl", hash = "sha256:adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373", size = 67615, upload-time = "2025-10-06T13:54:43.17Z" }, +] + +[[package]] +name = "audioop-lts" +version = "0.2.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/38/53/946db57842a50b2da2e0c1e34bd37f36f5aadba1a929a3971c5d7841dbca/audioop_lts-0.2.2.tar.gz", hash = "sha256:64d0c62d88e67b98a1a5e71987b7aa7b5bcffc7dcee65b635823dbdd0a8dbbd0", size = 30686, upload-time = "2025-08-05T16:43:17.409Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/de/d4/94d277ca941de5a507b07f0b592f199c22454eeaec8f008a286b3fbbacd6/audioop_lts-0.2.2-cp313-abi3-macosx_10_13_universal2.whl", hash = "sha256:fd3d4602dc64914d462924a08c1a9816435a2155d74f325853c1f1ac3b2d9800", size = 46523, upload-time = "2025-08-05T16:42:20.836Z" }, + { url = "https://files.pythonhosted.org/packages/f8/5a/656d1c2da4b555920ce4177167bfeb8623d98765594af59702c8873f60ec/audioop_lts-0.2.2-cp313-abi3-macosx_10_13_x86_64.whl", hash = "sha256:550c114a8df0aafe9a05442a1162dfc8fec37e9af1d625ae6060fed6e756f303", size = 27455, upload-time = "2025-08-05T16:42:22.283Z" }, + { url = "https://files.pythonhosted.org/packages/1b/83/ea581e364ce7b0d41456fb79d6ee0ad482beda61faf0cab20cbd4c63a541/audioop_lts-0.2.2-cp313-abi3-macosx_11_0_arm64.whl", hash = "sha256:9a13dc409f2564de15dd68be65b462ba0dde01b19663720c68c1140c782d1d75", size = 26997, upload-time = "2025-08-05T16:42:23.849Z" }, + { url = "https://files.pythonhosted.org/packages/b8/3b/e8964210b5e216e5041593b7d33e97ee65967f17c282e8510d19c666dab4/audioop_lts-0.2.2-cp313-abi3-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:51c916108c56aa6e426ce611946f901badac950ee2ddaf302b7ed35d9958970d", size = 85844, upload-time = "2025-08-05T16:42:25.208Z" }, + { url = "https://files.pythonhosted.org/packages/c7/2e/0a1c52faf10d51def20531a59ce4c706cb7952323b11709e10de324d6493/audioop_lts-0.2.2-cp313-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:47eba38322370347b1c47024defbd36374a211e8dd5b0dcbce7b34fdb6f8847b", size = 85056, upload-time = "2025-08-05T16:42:26.559Z" }, + { url = "https://files.pythonhosted.org/packages/75/e8/cd95eef479656cb75ab05dfece8c1f8c395d17a7c651d88f8e6e291a63ab/audioop_lts-0.2.2-cp313-abi3-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ba7c3a7e5f23e215cb271516197030c32aef2e754252c4c70a50aaff7031a2c8", size = 93892, upload-time = "2025-08-05T16:42:27.902Z" }, + { url = "https://files.pythonhosted.org/packages/5c/1e/a0c42570b74f83efa5cca34905b3eef03f7ab09fe5637015df538a7f3345/audioop_lts-0.2.2-cp313-abi3-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:def246fe9e180626731b26e89816e79aae2276f825420a07b4a647abaa84becc", size = 96660, upload-time = "2025-08-05T16:42:28.9Z" }, + { url = "https://files.pythonhosted.org/packages/50/d5/8a0ae607ca07dbb34027bac8db805498ee7bfecc05fd2c148cc1ed7646e7/audioop_lts-0.2.2-cp313-abi3-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e160bf9df356d841bb6c180eeeea1834085464626dc1b68fa4e1d59070affdc3", size = 79143, upload-time = "2025-08-05T16:42:29.929Z" }, + { url = "https://files.pythonhosted.org/packages/12/17/0d28c46179e7910bfb0bb62760ccb33edb5de973052cb2230b662c14ca2e/audioop_lts-0.2.2-cp313-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:4b4cd51a57b698b2d06cb9993b7ac8dfe89a3b2878e96bc7948e9f19ff51dba6", size = 84313, upload-time = "2025-08-05T16:42:30.949Z" }, + { url = "https://files.pythonhosted.org/packages/84/ba/bd5d3806641564f2024e97ca98ea8f8811d4e01d9b9f9831474bc9e14f9e/audioop_lts-0.2.2-cp313-abi3-musllinux_1_2_ppc64le.whl", hash = "sha256:4a53aa7c16a60a6857e6b0b165261436396ef7293f8b5c9c828a3a203147ed4a", size = 93044, upload-time = "2025-08-05T16:42:31.959Z" }, + { url = "https://files.pythonhosted.org/packages/f9/5e/435ce8d5642f1f7679540d1e73c1c42d933331c0976eb397d1717d7f01a3/audioop_lts-0.2.2-cp313-abi3-musllinux_1_2_riscv64.whl", hash = "sha256:3fc38008969796f0f689f1453722a0f463da1b8a6fbee11987830bfbb664f623", size = 78766, upload-time = "2025-08-05T16:42:33.302Z" }, + { url = "https://files.pythonhosted.org/packages/ae/3b/b909e76b606cbfd53875693ec8c156e93e15a1366a012f0b7e4fb52d3c34/audioop_lts-0.2.2-cp313-abi3-musllinux_1_2_s390x.whl", hash = "sha256:15ab25dd3e620790f40e9ead897f91e79c0d3ce65fe193c8ed6c26cffdd24be7", size = 87640, upload-time = "2025-08-05T16:42:34.854Z" }, + { url = "https://files.pythonhosted.org/packages/30/e7/8f1603b4572d79b775f2140d7952f200f5e6c62904585d08a01f0a70393a/audioop_lts-0.2.2-cp313-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:03f061a1915538fd96272bac9551841859dbb2e3bf73ebe4a23ef043766f5449", size = 86052, upload-time = "2025-08-05T16:42:35.839Z" }, + { url = "https://files.pythonhosted.org/packages/b5/96/c37846df657ccdda62ba1ae2b6534fa90e2e1b1742ca8dcf8ebd38c53801/audioop_lts-0.2.2-cp313-abi3-win32.whl", hash = "sha256:3bcddaaf6cc5935a300a8387c99f7a7fbbe212a11568ec6cf6e4bc458c048636", size = 26185, upload-time = "2025-08-05T16:42:37.04Z" }, + { url = "https://files.pythonhosted.org/packages/34/a5/9d78fdb5b844a83da8a71226c7bdae7cc638861085fff7a1d707cb4823fa/audioop_lts-0.2.2-cp313-abi3-win_amd64.whl", hash = "sha256:a2c2a947fae7d1062ef08c4e369e0ba2086049a5e598fda41122535557012e9e", size = 30503, upload-time = "2025-08-05T16:42:38.427Z" }, + { url = "https://files.pythonhosted.org/packages/34/25/20d8fde083123e90c61b51afb547bb0ea7e77bab50d98c0ab243d02a0e43/audioop_lts-0.2.2-cp313-abi3-win_arm64.whl", hash = "sha256:5f93a5db13927a37d2d09637ccca4b2b6b48c19cd9eda7b17a2e9f77edee6a6f", size = 24173, upload-time = "2025-08-05T16:42:39.704Z" }, + { url = "https://files.pythonhosted.org/packages/58/a7/0a764f77b5c4ac58dc13c01a580f5d32ae8c74c92020b961556a43e26d02/audioop_lts-0.2.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:73f80bf4cd5d2ca7814da30a120de1f9408ee0619cc75da87d0641273d202a09", size = 47096, upload-time = "2025-08-05T16:42:40.684Z" }, + { url = "https://files.pythonhosted.org/packages/aa/ed/ebebedde1a18848b085ad0fa54b66ceb95f1f94a3fc04f1cd1b5ccb0ed42/audioop_lts-0.2.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:106753a83a25ee4d6f473f2be6b0966fc1c9af7e0017192f5531a3e7463dce58", size = 27748, upload-time = "2025-08-05T16:42:41.992Z" }, + { url = "https://files.pythonhosted.org/packages/cb/6e/11ca8c21af79f15dbb1c7f8017952ee8c810c438ce4e2b25638dfef2b02c/audioop_lts-0.2.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:fbdd522624141e40948ab3e8cdae6e04c748d78710e9f0f8d4dae2750831de19", size = 27329, upload-time = "2025-08-05T16:42:42.987Z" }, + { url = "https://files.pythonhosted.org/packages/84/52/0022f93d56d85eec5da6b9da6a958a1ef09e80c39f2cc0a590c6af81dcbb/audioop_lts-0.2.2-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:143fad0311e8209ece30a8dbddab3b65ab419cbe8c0dde6e8828da25999be911", size = 92407, upload-time = "2025-08-05T16:42:44.336Z" }, + { url = "https://files.pythonhosted.org/packages/87/1d/48a889855e67be8718adbc7a01f3c01d5743c325453a5e81cf3717664aad/audioop_lts-0.2.2-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:dfbbc74ec68a0fd08cfec1f4b5e8cca3d3cd7de5501b01c4b5d209995033cde9", size = 91811, upload-time = "2025-08-05T16:42:45.325Z" }, + { url = "https://files.pythonhosted.org/packages/98/a6/94b7213190e8077547ffae75e13ed05edc488653c85aa5c41472c297d295/audioop_lts-0.2.2-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:cfcac6aa6f42397471e4943e0feb2244549db5c5d01efcd02725b96af417f3fe", size = 100470, upload-time = "2025-08-05T16:42:46.468Z" }, + { url = "https://files.pythonhosted.org/packages/e9/e9/78450d7cb921ede0cfc33426d3a8023a3bda755883c95c868ee36db8d48d/audioop_lts-0.2.2-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:752d76472d9804ac60f0078c79cdae8b956f293177acd2316cd1e15149aee132", size = 103878, upload-time = "2025-08-05T16:42:47.576Z" }, + { url = "https://files.pythonhosted.org/packages/4f/e2/cd5439aad4f3e34ae1ee852025dc6aa8f67a82b97641e390bf7bd9891d3e/audioop_lts-0.2.2-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:83c381767e2cc10e93e40281a04852facc4cd9334550e0f392f72d1c0a9c5753", size = 84867, upload-time = "2025-08-05T16:42:49.003Z" }, + { url = "https://files.pythonhosted.org/packages/68/4b/9d853e9076c43ebba0d411e8d2aa19061083349ac695a7d082540bad64d0/audioop_lts-0.2.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:c0022283e9556e0f3643b7c3c03f05063ca72b3063291834cca43234f20c60bb", size = 90001, upload-time = "2025-08-05T16:42:50.038Z" }, + { url = "https://files.pythonhosted.org/packages/58/26/4bae7f9d2f116ed5593989d0e521d679b0d583973d203384679323d8fa85/audioop_lts-0.2.2-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:a2d4f1513d63c795e82948e1305f31a6d530626e5f9f2605408b300ae6095093", size = 99046, upload-time = "2025-08-05T16:42:51.111Z" }, + { url = "https://files.pythonhosted.org/packages/b2/67/a9f4fb3e250dda9e9046f8866e9fa7d52664f8985e445c6b4ad6dfb55641/audioop_lts-0.2.2-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:c9c8e68d8b4a56fda8c025e538e639f8c5953f5073886b596c93ec9b620055e7", size = 84788, upload-time = "2025-08-05T16:42:52.198Z" }, + { url = "https://files.pythonhosted.org/packages/70/f7/3de86562db0121956148bcb0fe5b506615e3bcf6e63c4357a612b910765a/audioop_lts-0.2.2-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:96f19de485a2925314f5020e85911fb447ff5fbef56e8c7c6927851b95533a1c", size = 94472, upload-time = "2025-08-05T16:42:53.59Z" }, + { url = "https://files.pythonhosted.org/packages/f1/32/fd772bf9078ae1001207d2df1eef3da05bea611a87dd0e8217989b2848fa/audioop_lts-0.2.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:e541c3ef484852ef36545f66209444c48b28661e864ccadb29daddb6a4b8e5f5", size = 92279, upload-time = "2025-08-05T16:42:54.632Z" }, + { url = "https://files.pythonhosted.org/packages/4f/41/affea7181592ab0ab560044632571a38edaf9130b84928177823fbf3176a/audioop_lts-0.2.2-cp313-cp313t-win32.whl", hash = "sha256:d5e73fa573e273e4f2e5ff96f9043858a5e9311e94ffefd88a3186a910c70917", size = 26568, upload-time = "2025-08-05T16:42:55.627Z" }, + { url = "https://files.pythonhosted.org/packages/28/2b/0372842877016641db8fc54d5c88596b542eec2f8f6c20a36fb6612bf9ee/audioop_lts-0.2.2-cp313-cp313t-win_amd64.whl", hash = "sha256:9191d68659eda01e448188f60364c7763a7ca6653ed3f87ebb165822153a8547", size = 30942, upload-time = "2025-08-05T16:42:56.674Z" }, + { url = "https://files.pythonhosted.org/packages/ee/ca/baf2b9cc7e96c179bb4a54f30fcd83e6ecb340031bde68f486403f943768/audioop_lts-0.2.2-cp313-cp313t-win_arm64.whl", hash = "sha256:c174e322bb5783c099aaf87faeb240c8d210686b04bd61dfd05a8e5a83d88969", size = 24603, upload-time = "2025-08-05T16:42:57.571Z" }, + { url = "https://files.pythonhosted.org/packages/5c/73/413b5a2804091e2c7d5def1d618e4837f1cb82464e230f827226278556b7/audioop_lts-0.2.2-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:f9ee9b52f5f857fbaf9d605a360884f034c92c1c23021fb90b2e39b8e64bede6", size = 47104, upload-time = "2025-08-05T16:42:58.518Z" }, + { url = "https://files.pythonhosted.org/packages/ae/8c/daa3308dc6593944410c2c68306a5e217f5c05b70a12e70228e7dd42dc5c/audioop_lts-0.2.2-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:49ee1a41738a23e98d98b937a0638357a2477bc99e61b0f768a8f654f45d9b7a", size = 27754, upload-time = "2025-08-05T16:43:00.132Z" }, + { url = "https://files.pythonhosted.org/packages/4e/86/c2e0f627168fcf61781a8f72cab06b228fe1da4b9fa4ab39cfb791b5836b/audioop_lts-0.2.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:5b00be98ccd0fc123dcfad31d50030d25fcf31488cde9e61692029cd7394733b", size = 27332, upload-time = "2025-08-05T16:43:01.666Z" }, + { url = "https://files.pythonhosted.org/packages/c7/bd/35dce665255434f54e5307de39e31912a6f902d4572da7c37582809de14f/audioop_lts-0.2.2-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:a6d2e0f9f7a69403e388894d4ca5ada5c47230716a03f2847cfc7bd1ecb589d6", size = 92396, upload-time = "2025-08-05T16:43:02.991Z" }, + { url = "https://files.pythonhosted.org/packages/2d/d2/deeb9f51def1437b3afa35aeb729d577c04bcd89394cb56f9239a9f50b6f/audioop_lts-0.2.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f9b0b8a03ef474f56d1a842af1a2e01398b8f7654009823c6d9e0ecff4d5cfbf", size = 91811, upload-time = "2025-08-05T16:43:04.096Z" }, + { url = "https://files.pythonhosted.org/packages/76/3b/09f8b35b227cee28cc8231e296a82759ed80c1a08e349811d69773c48426/audioop_lts-0.2.2-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2b267b70747d82125f1a021506565bdc5609a2b24bcb4773c16d79d2bb260bbd", size = 100483, upload-time = "2025-08-05T16:43:05.085Z" }, + { url = "https://files.pythonhosted.org/packages/0b/15/05b48a935cf3b130c248bfdbdea71ce6437f5394ee8533e0edd7cfd93d5e/audioop_lts-0.2.2-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0337d658f9b81f4cd0fdb1f47635070cc084871a3d4646d9de74fdf4e7c3d24a", size = 103885, upload-time = "2025-08-05T16:43:06.197Z" }, + { url = "https://files.pythonhosted.org/packages/83/80/186b7fce6d35b68d3d739f228dc31d60b3412105854edb975aa155a58339/audioop_lts-0.2.2-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:167d3b62586faef8b6b2275c3218796b12621a60e43f7e9d5845d627b9c9b80e", size = 84899, upload-time = "2025-08-05T16:43:07.291Z" }, + { url = "https://files.pythonhosted.org/packages/49/89/c78cc5ac6cb5828f17514fb12966e299c850bc885e80f8ad94e38d450886/audioop_lts-0.2.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:0d9385e96f9f6da847f4d571ce3cb15b5091140edf3db97276872647ce37efd7", size = 89998, upload-time = "2025-08-05T16:43:08.335Z" }, + { url = "https://files.pythonhosted.org/packages/4c/4b/6401888d0c010e586c2ca50fce4c903d70a6bb55928b16cfbdfd957a13da/audioop_lts-0.2.2-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:48159d96962674eccdca9a3df280e864e8ac75e40a577cc97c5c42667ffabfc5", size = 99046, upload-time = "2025-08-05T16:43:09.367Z" }, + { url = "https://files.pythonhosted.org/packages/de/f8/c874ca9bb447dae0e2ef2e231f6c4c2b0c39e31ae684d2420b0f9e97ee68/audioop_lts-0.2.2-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:8fefe5868cd082db1186f2837d64cfbfa78b548ea0d0543e9b28935ccce81ce9", size = 84843, upload-time = "2025-08-05T16:43:10.749Z" }, + { url = "https://files.pythonhosted.org/packages/3e/c0/0323e66f3daebc13fd46b36b30c3be47e3fc4257eae44f1e77eb828c703f/audioop_lts-0.2.2-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:58cf54380c3884fb49fdd37dfb7a772632b6701d28edd3e2904743c5e1773602", size = 94490, upload-time = "2025-08-05T16:43:12.131Z" }, + { url = "https://files.pythonhosted.org/packages/98/6b/acc7734ac02d95ab791c10c3f17ffa3584ccb9ac5c18fd771c638ed6d1f5/audioop_lts-0.2.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:088327f00488cdeed296edd9215ca159f3a5a5034741465789cad403fcf4bec0", size = 92297, upload-time = "2025-08-05T16:43:13.139Z" }, + { url = "https://files.pythonhosted.org/packages/13/c3/c3dc3f564ce6877ecd2a05f8d751b9b27a8c320c2533a98b0c86349778d0/audioop_lts-0.2.2-cp314-cp314t-win32.whl", hash = "sha256:068aa17a38b4e0e7de771c62c60bbca2455924b67a8814f3b0dee92b5820c0b3", size = 27331, upload-time = "2025-08-05T16:43:14.19Z" }, + { url = "https://files.pythonhosted.org/packages/72/bb/b4608537e9ffcb86449091939d52d24a055216a36a8bf66b936af8c3e7ac/audioop_lts-0.2.2-cp314-cp314t-win_amd64.whl", hash = "sha256:a5bf613e96f49712073de86f20dbdd4014ca18efd4d34ed18c75bd808337851b", size = 31697, upload-time = "2025-08-05T16:43:15.193Z" }, + { url = "https://files.pythonhosted.org/packages/f6/22/91616fe707a5c5510de2cac9b046a30defe7007ba8a0c04f9c08f27df312/audioop_lts-0.2.2-cp314-cp314t-win_arm64.whl", hash = "sha256:b492c3b040153e68b9fdaff5913305aaaba5bb433d8a7f73d5cf6a64ed3cc1dd", size = 25206, upload-time = "2025-08-05T16:43:16.444Z" }, +] + +[[package]] +name = "beautifulsoup4" +version = "4.14.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "soupsieve" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c3/b0/1c6a16426d389813b48d95e26898aff79abbde42ad353958ad95cc8c9b21/beautifulsoup4-4.14.3.tar.gz", hash = "sha256:6292b1c5186d356bba669ef9f7f051757099565ad9ada5dd630bd9de5fa7fb86", size = 627737, upload-time = "2025-11-30T15:08:26.084Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1a/39/47f9197bdd44df24d67ac8893641e16f386c984a0619ef2ee4c51fbbc019/beautifulsoup4-4.14.3-py3-none-any.whl", hash = "sha256:0918bfe44902e6ad8d57732ba310582e98da931428d231a5ecb9e7c703a735bb", size = 107721, upload-time = "2025-11-30T15:08:24.087Z" }, +] + +[[package]] +name = "bidict" +version = "0.23.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/9a/6e/026678aa5a830e07cd9498a05d3e7e650a4f56a42f267a53d22bcda1bdc9/bidict-0.23.1.tar.gz", hash = "sha256:03069d763bc387bbd20e7d49914e75fc4132a41937fa3405417e1a5a2d006d71", size = 29093, upload-time = "2024-02-18T19:09:05.748Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/99/37/e8730c3587a65eb5645d4aba2d27aae48e8003614d6aaf15dda67f702f1f/bidict-0.23.1-py3-none-any.whl", hash = "sha256:5dae8d4d79b552a71cbabc7deb25dfe8ce710b17ff41711e13010ead2abfc3e5", size = 32764, upload-time = "2024-02-18T19:09:04.156Z" }, +] + +[[package]] +name = "bitarray" +version = "3.8.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/95/06/92fdc84448d324ab8434b78e65caf4fb4c6c90b4f8ad9bdd4c8021bfaf1e/bitarray-3.8.0.tar.gz", hash = "sha256:3eae38daffd77c9621ae80c16932eea3fb3a4af141fb7cc724d4ad93eff9210d", size = 151991, upload-time = "2025-11-02T21:41:15.117Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/82/a0/0c41d893eda756315491adfdbf9bc928aee3d377a7f97a8834d453aa5de1/bitarray-3.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f2fcbe9b3a5996b417e030aa33a562e7e20dfc86271e53d7e841fc5df16268b8", size = 148575, upload-time = "2025-11-02T21:39:25.718Z" }, + { url = "https://files.pythonhosted.org/packages/0e/30/12ab2f4a4429bd844b419c37877caba93d676d18be71354fbbeb21d9f4cc/bitarray-3.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cd761d158f67e288fd0ebe00c3b158095ce80a4bc7c32b60c7121224003ba70d", size = 145454, upload-time = "2025-11-02T21:39:26.695Z" }, + { url = "https://files.pythonhosted.org/packages/26/58/314b3e3f219533464e120f0c51ac5123e7b1c1b91f725a4073fb70c5a858/bitarray-3.8.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c394a3f055b49f92626f83c1a0b6d6cd2c628f1ccd72481c3e3c6aa4695f3b20", size = 332949, upload-time = "2025-11-02T21:39:27.801Z" }, + { url = "https://files.pythonhosted.org/packages/ea/ce/ca8c706bd8341c7a22dd92d2a528af71f7e5f4726085d93f81fd768cb03b/bitarray-3.8.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:969fd67de8c42affdb47b38b80f1eaa79ac0ef17d65407cdd931db1675315af1", size = 360599, upload-time = "2025-11-02T21:39:28.964Z" }, + { url = "https://files.pythonhosted.org/packages/ef/dc/aa181df85f933052d962804906b282acb433cb9318b08ec2aceb4ee34faf/bitarray-3.8.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:99d25aff3745c54e61ab340b98400c52ebec04290a62078155e0d7eb30380220", size = 371972, upload-time = "2025-11-02T21:39:30.228Z" }, + { url = "https://files.pythonhosted.org/packages/ff/d9/b805bfa158c7bcf4df0ac19b1be581b47e1ddb792c11023aed80a7058e78/bitarray-3.8.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e645b4c365d6f1f9e0799380ad6395268f3c3b898244a650aaeb8d9d27b74c35", size = 340303, upload-time = "2025-11-02T21:39:31.342Z" }, + { url = "https://files.pythonhosted.org/packages/1f/42/5308cc97ea929e30727292617a3a88293470166851e13c9e3f16f395da55/bitarray-3.8.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2fa23fdb3beab313950bbb49674e8a161e61449332d3997089fe3944953f1b77", size = 330494, upload-time = "2025-11-02T21:39:32.769Z" }, + { url = "https://files.pythonhosted.org/packages/4c/89/64f1596cb80433323efdbc8dcd0d6e57c40dfbe6ea3341623f34ec397edd/bitarray-3.8.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:165052a0e61c880f7093808a0c524ce1b3555bfa114c0dfb5c809cd07918a60d", size = 358123, upload-time = "2025-11-02T21:39:34.331Z" }, + { url = "https://files.pythonhosted.org/packages/27/fd/f3d49c5443b57087f888b5e118c8dd78bb7c8e8cfeeed250f8e92128a05f/bitarray-3.8.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:337c8cd46a4c6568d367ed676cbf2d7de16f890bb31dbb54c44c1d6bb6d4a1de", size = 356046, upload-time = "2025-11-02T21:39:35.449Z" }, + { url = "https://files.pythonhosted.org/packages/aa/db/1fd0b402bd2b47142e958b6930dbb9445235d03fa703c9a24caa6e576ae2/bitarray-3.8.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:21ca6a47bf20db9e7ad74ca04b3d479e4d76109b68333eb23535553d2705339e", size = 336872, upload-time = "2025-11-02T21:39:36.891Z" }, + { url = "https://files.pythonhosted.org/packages/58/73/680b47718f1313b4538af479c4732eaca0aeda34d93fc5b869f87932d57d/bitarray-3.8.0-cp312-cp312-win32.whl", hash = "sha256:178c5a4c7fdfb5cd79e372ae7f675390e670f3732e5bc68d327e01a5b3ff8d55", size = 143025, upload-time = "2025-11-02T21:39:38.303Z" }, + { url = "https://files.pythonhosted.org/packages/f8/11/7792587c19c79a8283e8838f44709fa4338a8f7d2a3091dfd81c07ae89c7/bitarray-3.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:75a3b6e9c695a6570ea488db75b84bb592ff70a944957efa1c655867c575018b", size = 149969, upload-time = "2025-11-02T21:39:39.715Z" }, + { url = "https://files.pythonhosted.org/packages/9a/00/9df64b5d8a84e8e9ec392f6f9ce93f50626a5b301cb6c6b3fe3406454d66/bitarray-3.8.0-cp312-cp312-win_arm64.whl", hash = "sha256:5591daf81313096909d973fb2612fccd87528fdfdd39f6478bdce54543178954", size = 146907, upload-time = "2025-11-02T21:39:40.815Z" }, + { url = "https://files.pythonhosted.org/packages/3e/35/480364d4baf1e34c79076750914664373f561c58abb5c31c35b3fae613ff/bitarray-3.8.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:18214bac86341f1cc413772e66447d6cca10981e2880b70ecaf4e826c04f95e9", size = 148582, upload-time = "2025-11-02T21:39:42.268Z" }, + { url = "https://files.pythonhosted.org/packages/5e/a8/718b95524c803937f4edbaaf6480f39c80f6ed189d61357b345e8361ffb6/bitarray-3.8.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:01c5f0dc080b0ebb432f7a68ee1e88a76bd34f6d89c9568fcec65fb16ed71f0e", size = 145433, upload-time = "2025-11-02T21:39:43.552Z" }, + { url = "https://files.pythonhosted.org/packages/03/66/4a10f30dc9e2e01e3b4ecd44a511219f98e63c86b0e0f704c90fac24059b/bitarray-3.8.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:86685fa04067f7175f9718489ae755f6acde03593a1a9ca89305554af40e14fd", size = 332986, upload-time = "2025-11-02T21:39:44.656Z" }, + { url = "https://files.pythonhosted.org/packages/53/25/4c08774d847f80a1166e4c704b4e0f1c417c0afe6306eae0bc5e70d35faa/bitarray-3.8.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:56896ceeffe25946c4010320629e2d858ca763cd8ded273c81672a5edbcb1e0a", size = 360634, upload-time = "2025-11-02T21:39:45.798Z" }, + { url = "https://files.pythonhosted.org/packages/a5/8f/bf8ad26169ebd0b2746d5c7564db734453ca467f8aab87e9d43b0a794383/bitarray-3.8.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:9858dcbc23ba7eaadcd319786b982278a1a2b2020720b19db43e309579ff76fb", size = 371992, upload-time = "2025-11-02T21:39:46.968Z" }, + { url = "https://files.pythonhosted.org/packages/a9/16/ce166754e7c9d10650e02914552fa637cf3b2591f7ed16632bbf6b783312/bitarray-3.8.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:aa7dec53c25f1949513457ef8b0ea1fb40e76c672cc4d2daa8ad3c8d6b73491a", size = 340315, upload-time = "2025-11-02T21:39:48.182Z" }, + { url = "https://files.pythonhosted.org/packages/de/2a/fbba3a106ddd260e84b9a624f730257c32ba51a8a029565248dfedfdf6f2/bitarray-3.8.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:15a2eff91f54d2b1f573cca8ca6fb58763ce8fea80e7899ab028f3987ef71cd5", size = 330473, upload-time = "2025-11-02T21:39:49.705Z" }, + { url = "https://files.pythonhosted.org/packages/68/97/56cf3c70196e7307ad32318a9d6ed969dbdc6a4534bbe429112fa7dfe42e/bitarray-3.8.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:b1572ee0eb1967e71787af636bb7d1eb9c6735d5337762c450650e7f51844594", size = 358129, upload-time = "2025-11-02T21:39:51.189Z" }, + { url = "https://files.pythonhosted.org/packages/fd/be/afd391a5c0896d3339613321b2f94af853f29afc8bd3fbc327431244c642/bitarray-3.8.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:5bfac7f236ba1a4d402644bdce47fb9db02a7cf3214a1f637d3a88390f9e5428", size = 356005, upload-time = "2025-11-02T21:39:52.355Z" }, + { url = "https://files.pythonhosted.org/packages/ae/08/a8e1a371babba29bad3378bb3a2cdca2b012170711e7fe1f22031a6b7b95/bitarray-3.8.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f0a55cf02d2cdd739b40ce10c09bbdd520e141217696add7a48b56e67bdfdfe6", size = 336862, upload-time = "2025-11-02T21:39:54.345Z" }, + { url = "https://files.pythonhosted.org/packages/ee/8a/6dc1d0fdc06991c8dc3b1fcfe1ae49fbaced42064cd1b5f24278e73fe05f/bitarray-3.8.0-cp313-cp313-win32.whl", hash = "sha256:a2ba92f59e30ce915e9e79af37649432e3a212ddddf416d4d686b1b4825bcdb2", size = 143018, upload-time = "2025-11-02T21:39:56.361Z" }, + { url = "https://files.pythonhosted.org/packages/2e/72/76e13f5cd23b8b9071747909663ce3b02da24a5e7e22c35146338625db35/bitarray-3.8.0-cp313-cp313-win_amd64.whl", hash = "sha256:1c8f2a5d8006db5a555e06f9437e76bf52537d3dfd130cb8ae2b30866aca32c9", size = 149977, upload-time = "2025-11-02T21:39:57.718Z" }, + { url = "https://files.pythonhosted.org/packages/01/37/60f336c32336cc3ec03b0c61076f16ea2f05d5371c8a56e802161d218b77/bitarray-3.8.0-cp313-cp313-win_arm64.whl", hash = "sha256:50ddbe3a7b4b6ab96812f5a4d570f401a2cdb95642fd04c062f98939610bbeee", size = 146930, upload-time = "2025-11-02T21:39:59.308Z" }, + { url = "https://files.pythonhosted.org/packages/1b/b0/411327a6c7f6b2bead64bb06fe60b92e0344957ec1ab0645d5ccc25fdafe/bitarray-3.8.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8cbd4bfc933b33b85c43ef4c1f4d5e3e9d91975ea6368acf5fbac02bac06ea89", size = 148563, upload-time = "2025-11-02T21:40:01.006Z" }, + { url = "https://files.pythonhosted.org/packages/2a/bc/ff80d97c627d774f879da0ea93223adb1267feab7e07d5c17580ffe6d632/bitarray-3.8.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:9d35d8f8a1c9ed4e2b08187b513f8a3c71958600129db3aa26d85ea3abfd1310", size = 145422, upload-time = "2025-11-02T21:40:02.535Z" }, + { url = "https://files.pythonhosted.org/packages/66/e7/b4cb6c5689aacd0a32f3aa8a507155eaa33528c63de2f182b60843fbf700/bitarray-3.8.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:99f55e14e7c56f4fafe1343480c32b110ef03836c21ff7c48bae7add6818f77c", size = 332852, upload-time = "2025-11-02T21:40:03.645Z" }, + { url = "https://files.pythonhosted.org/packages/e7/91/fbd1b047e3e2f4b65590f289c8151df1d203d75b005f5aae4e072fe77d76/bitarray-3.8.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:dfbe2aa45b273f49e715c5345d94874cb65a28482bf231af408891c260601b8d", size = 360801, upload-time = "2025-11-02T21:40:04.827Z" }, + { url = "https://files.pythonhosted.org/packages/ef/4a/63064c593627bac8754fdafcb5343999c93ab2aeb27bcd9d270a010abea5/bitarray-3.8.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:64af877116edf051375b45f0bda648143176a017b13803ec7b3a3111dc05f4c5", size = 371408, upload-time = "2025-11-02T21:40:05.985Z" }, + { url = "https://files.pythonhosted.org/packages/46/97/ddc07723767bdafd170f2ff6e173c940fa874192783ee464aa3c1dedf07d/bitarray-3.8.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cdfbb27f2c46bb5bbdcee147530cbc5ca8ab858d7693924e88e30ada21b2c5e2", size = 340033, upload-time = "2025-11-02T21:40:07.189Z" }, + { url = "https://files.pythonhosted.org/packages/ad/1e/e1ea9f1146fd4af032817069ff118918d73e5de519854ce3860e2ed560ff/bitarray-3.8.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:4d73d4948dcc5591d880db8933004e01f1dd2296df9de815354d53469beb26fe", size = 330774, upload-time = "2025-11-02T21:40:08.496Z" }, + { url = "https://files.pythonhosted.org/packages/cf/9f/8242296c124a48d1eab471fd0838aeb7ea9c6fd720302d99ab7855d3e6d3/bitarray-3.8.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:28a85b056c0eb7f5d864c0ceef07034117e8ebfca756f50648c71950a568ba11", size = 358337, upload-time = "2025-11-02T21:40:10.035Z" }, + { url = "https://files.pythonhosted.org/packages/b5/6b/9095d75264c67d479f298c80802422464ce18c3cdd893252eeccf4997611/bitarray-3.8.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:79ec4498a545733ecace48d780d22407411b07403a2e08b9a4d7596c0b97ebd7", size = 355639, upload-time = "2025-11-02T21:40:11.485Z" }, + { url = "https://files.pythonhosted.org/packages/a0/af/c93c0ae5ef824136e90ac7ddf6cceccb1232f34240b2f55a922f874da9b4/bitarray-3.8.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:33af25c4ff7723363cb8404dfc2eefeab4110b654f6c98d26aba8a08c745d860", size = 336999, upload-time = "2025-11-02T21:40:12.709Z" }, + { url = "https://files.pythonhosted.org/packages/81/0f/72c951f5997b2876355d5e671f78dd2362493254876675cf22dbd24389ae/bitarray-3.8.0-cp314-cp314-win32.whl", hash = "sha256:2c3bb96b6026643ce24677650889b09073f60b9860a71765f843c99f9ab38b25", size = 142169, upload-time = "2025-11-02T21:40:14.031Z" }, + { url = "https://files.pythonhosted.org/packages/8a/55/ef1b4de8107bf13823da8756c20e1fbc9452228b4e837f46f6d9ddba3eb3/bitarray-3.8.0-cp314-cp314-win_amd64.whl", hash = "sha256:847c7f61964225fc489fe1d49eda7e0e0d253e98862c012cecf845f9ad45cdf4", size = 148737, upload-time = "2025-11-02T21:40:15.436Z" }, + { url = "https://files.pythonhosted.org/packages/5f/26/bc0784136775024ac56cc67c0d6f9aa77a7770de7f82c3a7c9be11c217cd/bitarray-3.8.0-cp314-cp314-win_arm64.whl", hash = "sha256:a2cb35a6efaa0e3623d8272471371a12c7e07b51a33e5efce9b58f655d864b4e", size = 146083, upload-time = "2025-11-02T21:40:17.135Z" }, + { url = "https://files.pythonhosted.org/packages/6e/64/57984e64264bf43d93a1809e645972771566a2d0345f4896b041ce20b000/bitarray-3.8.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:15e8d0597cc6e8496de6f4dea2a6880c57e1251502a7072f5631108a1aa28521", size = 149455, upload-time = "2025-11-02T21:40:18.558Z" }, + { url = "https://files.pythonhosted.org/packages/81/c0/0d5f2eaef1867f462f764bdb07d1e116c33a1bf052ea21889aefe4282f5b/bitarray-3.8.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:8ffe660e963ae711cb9e2b8d8461c9b1ad6167823837fc17d59d5e539fb898fa", size = 146491, upload-time = "2025-11-02T21:40:19.665Z" }, + { url = "https://files.pythonhosted.org/packages/65/c6/bc1261f7a8862c0c59220a484464739e52235fd1e2afcb24d7f7d3fb5702/bitarray-3.8.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4779f356083c62e29b4198d290b7b17a39a69702d150678b7efff0fdddf494a8", size = 339721, upload-time = "2025-11-02T21:40:21.277Z" }, + { url = "https://files.pythonhosted.org/packages/81/d8/289ca55dd2939ea17b1108dc53bffc0fdc5160ba44f77502dfaae35d08c6/bitarray-3.8.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:025d133bf4ca8cf75f904eeb8ea946228d7c043231866143f31946a6f4dd0bf3", size = 367823, upload-time = "2025-11-02T21:40:22.463Z" }, + { url = "https://files.pythonhosted.org/packages/91/a2/61e7461ca9ac0fcb70f327a2e84b006996d2a840898e69037a39c87c6d06/bitarray-3.8.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:451f9958850ea98440d542278368c8d1e1ea821e2494b204570ba34a340759df", size = 377341, upload-time = "2025-11-02T21:40:23.789Z" }, + { url = "https://files.pythonhosted.org/packages/6c/87/4a0c9c8bdb13916d443e04d8f8542eef9190f31425da3c17c3478c40173f/bitarray-3.8.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6d79f659965290af60d6acc8e2716341865fe74609a7ede2a33c2f86ad893b8f", size = 344985, upload-time = "2025-11-02T21:40:25.261Z" }, + { url = "https://files.pythonhosted.org/packages/17/4c/ff9259b916efe53695b631772e5213699c738efc2471b5ffe273f4000994/bitarray-3.8.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:fbf05678c2ae0064fb1b8de7e9e8f0fc30621b73c8477786dd0fb3868044a8c8", size = 336796, upload-time = "2025-11-02T21:40:26.942Z" }, + { url = "https://files.pythonhosted.org/packages/0f/4b/51b2468bbddbade5e2f3b8d5db08282c5b309e8687b0f02f75a8b5ff559c/bitarray-3.8.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:c396358023b876cff547ce87f4e8ff8a2280598873a137e8cc69e115262260b8", size = 365085, upload-time = "2025-11-02T21:40:28.224Z" }, + { url = "https://files.pythonhosted.org/packages/bf/79/53473bfc2e052c6dbb628cdc1b156be621c77aaeb715918358b01574be55/bitarray-3.8.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:ed3493a369fe849cce98542d7405c88030b355e4d2e113887cb7ecc86c205773", size = 361012, upload-time = "2025-11-02T21:40:29.635Z" }, + { url = "https://files.pythonhosted.org/packages/c4/b1/242bf2e44bfc69e73fa2b954b425d761a8e632f78ea31008f1c3cfad0854/bitarray-3.8.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:c764fb167411d5afaef88138542a4bfa28bd5e5ded5e8e42df87cef965efd6e9", size = 340644, upload-time = "2025-11-02T21:40:31.089Z" }, + { url = "https://files.pythonhosted.org/packages/cf/01/12e5ecf30a5de28a32485f226cad4b8a546845f65f755ce0365057ab1e92/bitarray-3.8.0-cp314-cp314t-win32.whl", hash = "sha256:e12769d3adcc419e65860de946df8d2ed274932177ac1cdb05186e498aaa9149", size = 143630, upload-time = "2025-11-02T21:40:32.351Z" }, + { url = "https://files.pythonhosted.org/packages/b6/92/6b6ade587b08024a8a890b07724775d29da9cf7497be5c3cbe226185e463/bitarray-3.8.0-cp314-cp314t-win_amd64.whl", hash = "sha256:0ca70ccf789446a6dfde40b482ec21d28067172cd1f8efd50d5548159fccad9e", size = 150250, upload-time = "2025-11-02T21:40:33.596Z" }, + { url = "https://files.pythonhosted.org/packages/ed/40/be3858ffed004e47e48a2cefecdbf9b950d41098b780f9dc3aa609a88351/bitarray-3.8.0-cp314-cp314t-win_arm64.whl", hash = "sha256:2a3d1b05ffdd3e95687942ae7b13c63689f85d3f15c39b33329e3cb9ce6c015f", size = 147015, upload-time = "2025-11-02T21:40:35.064Z" }, +] + +[[package]] +name = "cachetools" +version = "7.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/48/5c/3b882b82e9af737906539a2eafb62f96a229f1fa80255bede0c7b554cbc4/cachetools-7.0.3.tar.gz", hash = "sha256:8c246313b95849964e54a909c03b327a87ab0428b068fac10da7b105ca275ef6", size = 37187, upload-time = "2026-03-05T21:00:57.918Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/05/4a/573185481c50a8841331f54ddae44e4a3469c46aa0b397731c53a004369a/cachetools-7.0.3-py3-none-any.whl", hash = "sha256:c128ffca156eef344c25fcd08a96a5952803786fa33097f5f2d49edf76f79d53", size = 13907, upload-time = "2026-03-05T21:00:56.486Z" }, +] + +[[package]] +name = "certifi" +version = "2026.2.25" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/af/2d/7bf41579a8986e348fa033a31cdd0e4121114f6bce2457e8876010b092dd/certifi-2026.2.25.tar.gz", hash = "sha256:e887ab5cee78ea814d3472169153c2d12cd43b14bd03329a39a9c6e2e80bfba7", size = 155029, upload-time = "2026-02-25T02:54:17.342Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9a/3c/c17fb3ca2d9c3acff52e30b309f538586f9f5b9c9cf454f3845fc9af4881/certifi-2026.2.25-py3-none-any.whl", hash = "sha256:027692e4402ad994f1c42e52a4997a9763c646b73e4096e4d5d6db8af1d6f0fa", size = 153684, upload-time = "2026-02-25T02:54:15.766Z" }, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1d/35/02daf95b9cd686320bb622eb148792655c9412dbb9b67abb5694e5910a24/charset_normalizer-3.4.5.tar.gz", hash = "sha256:95adae7b6c42a6c5b5b559b1a99149f090a57128155daeea91732c8d970d8644", size = 134804, upload-time = "2026-03-06T06:03:19.46Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9c/b6/9ee9c1a608916ca5feae81a344dffbaa53b26b90be58cc2159e3332d44ec/charset_normalizer-3.4.5-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:ed97c282ee4f994ef814042423a529df9497e3c666dca19be1d4cd1129dc7ade", size = 280976, upload-time = "2026-03-06T06:01:15.276Z" }, + { url = "https://files.pythonhosted.org/packages/f8/d8/a54f7c0b96f1df3563e9190f04daf981e365a9b397eedfdfb5dbef7e5c6c/charset_normalizer-3.4.5-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0294916d6ccf2d069727d65973c3a1ca477d68708db25fd758dd28b0827cff54", size = 189356, upload-time = "2026-03-06T06:01:16.511Z" }, + { url = "https://files.pythonhosted.org/packages/42/69/2bf7f76ce1446759a5787cb87d38f6a61eb47dbbdf035cfebf6347292a65/charset_normalizer-3.4.5-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:dc57a0baa3eeedd99fafaef7511b5a6ef4581494e8168ee086031744e2679467", size = 206369, upload-time = "2026-03-06T06:01:17.853Z" }, + { url = "https://files.pythonhosted.org/packages/10/9c/949d1a46dab56b959d9a87272482195f1840b515a3380e39986989a893ae/charset_normalizer-3.4.5-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ed1a9a204f317ef879b32f9af507d47e49cd5e7f8e8d5d96358c98373314fc60", size = 203285, upload-time = "2026-03-06T06:01:19.473Z" }, + { url = "https://files.pythonhosted.org/packages/67/5c/ae30362a88b4da237d71ea214a8c7eb915db3eec941adda511729ac25fa2/charset_normalizer-3.4.5-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7ad83b8f9379176c841f8865884f3514d905bcd2a9a3b210eaa446e7d2223e4d", size = 196274, upload-time = "2026-03-06T06:01:20.728Z" }, + { url = "https://files.pythonhosted.org/packages/b2/07/c9f2cb0e46cb6d64fdcc4f95953747b843bb2181bda678dc4e699b8f0f9a/charset_normalizer-3.4.5-cp312-cp312-manylinux_2_31_armv7l.whl", hash = "sha256:a118e2e0b5ae6b0120d5efa5f866e58f2bb826067a646431da4d6a2bdae7950e", size = 184715, upload-time = "2026-03-06T06:01:22.194Z" }, + { url = "https://files.pythonhosted.org/packages/36/64/6b0ca95c44fddf692cd06d642b28f63009d0ce325fad6e9b2b4d0ef86a52/charset_normalizer-3.4.5-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:754f96058e61a5e22e91483f823e07df16416ce76afa4ebf306f8e1d1296d43f", size = 193426, upload-time = "2026-03-06T06:01:23.795Z" }, + { url = "https://files.pythonhosted.org/packages/50/bc/a730690d726403743795ca3f5bb2baf67838c5fea78236098f324b965e40/charset_normalizer-3.4.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0c300cefd9b0970381a46394902cd18eaf2aa00163f999590ace991989dcd0fc", size = 191780, upload-time = "2026-03-06T06:01:25.053Z" }, + { url = "https://files.pythonhosted.org/packages/97/4f/6c0bc9af68222b22951552d73df4532b5be6447cee32d58e7e8c74ecbb7b/charset_normalizer-3.4.5-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:c108f8619e504140569ee7de3f97d234f0fbae338a7f9f360455071ef9855a95", size = 185805, upload-time = "2026-03-06T06:01:26.294Z" }, + { url = "https://files.pythonhosted.org/packages/dd/b9/a523fb9b0ee90814b503452b2600e4cbc118cd68714d57041564886e7325/charset_normalizer-3.4.5-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:d1028de43596a315e2720a9849ee79007ab742c06ad8b45a50db8cdb7ed4a82a", size = 208342, upload-time = "2026-03-06T06:01:27.55Z" }, + { url = "https://files.pythonhosted.org/packages/4d/61/c59e761dee4464050713e50e27b58266cc8e209e518c0b378c1580c959ba/charset_normalizer-3.4.5-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:19092dde50335accf365cce21998a1c6dd8eafd42c7b226eb54b2747cdce2fac", size = 193661, upload-time = "2026-03-06T06:01:29.051Z" }, + { url = "https://files.pythonhosted.org/packages/1c/43/729fa30aad69783f755c5ad8649da17ee095311ca42024742701e202dc59/charset_normalizer-3.4.5-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:4354e401eb6dab9aed3c7b4030514328a6c748d05e1c3e19175008ca7de84fb1", size = 204819, upload-time = "2026-03-06T06:01:30.298Z" }, + { url = "https://files.pythonhosted.org/packages/87/33/d9b442ce5a91b96fc0840455a9e49a611bbadae6122778d0a6a79683dd31/charset_normalizer-3.4.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a68766a3c58fde7f9aaa22b3786276f62ab2f594efb02d0a1421b6282e852e98", size = 198080, upload-time = "2026-03-06T06:01:31.478Z" }, + { url = "https://files.pythonhosted.org/packages/56/5a/b8b5a23134978ee9885cee2d6995f4c27cc41f9baded0a9685eabc5338f0/charset_normalizer-3.4.5-cp312-cp312-win32.whl", hash = "sha256:1827734a5b308b65ac54e86a618de66f935a4f63a8a462ff1e19a6788d6c2262", size = 132630, upload-time = "2026-03-06T06:01:33.056Z" }, + { url = "https://files.pythonhosted.org/packages/70/53/e44a4c07e8904500aec95865dc3f6464dc3586a039ef0df606eb3ac38e35/charset_normalizer-3.4.5-cp312-cp312-win_amd64.whl", hash = "sha256:728c6a963dfab66ef865f49286e45239384249672cd598576765acc2a640a636", size = 142856, upload-time = "2026-03-06T06:01:34.489Z" }, + { url = "https://files.pythonhosted.org/packages/ea/aa/c5628f7cad591b1cf45790b7a61483c3e36cf41349c98af7813c483fd6e8/charset_normalizer-3.4.5-cp312-cp312-win_arm64.whl", hash = "sha256:75dfd1afe0b1647449e852f4fb428195a7ed0588947218f7ba929f6538487f02", size = 132982, upload-time = "2026-03-06T06:01:35.641Z" }, + { url = "https://files.pythonhosted.org/packages/f5/48/9f34ec4bb24aa3fdba1890c1bddb97c8a4be1bd84ef5c42ac2352563ad05/charset_normalizer-3.4.5-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ac59c15e3f1465f722607800c68713f9fbc2f672b9eb649fe831da4019ae9b23", size = 280788, upload-time = "2026-03-06T06:01:37.126Z" }, + { url = "https://files.pythonhosted.org/packages/0e/09/6003e7ffeb90cc0560da893e3208396a44c210c5ee42efff539639def59b/charset_normalizer-3.4.5-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:165c7b21d19365464e8f70e5ce5e12524c58b48c78c1f5a57524603c1ab003f8", size = 188890, upload-time = "2026-03-06T06:01:38.73Z" }, + { url = "https://files.pythonhosted.org/packages/42/1e/02706edf19e390680daa694d17e2b8eab4b5f7ac285e2a51168b4b22ee6b/charset_normalizer-3.4.5-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:28269983f25a4da0425743d0d257a2d6921ea7d9b83599d4039486ec5b9f911d", size = 206136, upload-time = "2026-03-06T06:01:40.016Z" }, + { url = "https://files.pythonhosted.org/packages/c7/87/942c3def1b37baf3cf786bad01249190f3ca3d5e63a84f831e704977de1f/charset_normalizer-3.4.5-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d27ce22ec453564770d29d03a9506d449efbb9fa13c00842262b2f6801c48cce", size = 202551, upload-time = "2026-03-06T06:01:41.522Z" }, + { url = "https://files.pythonhosted.org/packages/94/0a/af49691938dfe175d71b8a929bd7e4ace2809c0c5134e28bc535660d5262/charset_normalizer-3.4.5-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0625665e4ebdddb553ab185de5db7054393af8879fb0c87bd5690d14379d6819", size = 195572, upload-time = "2026-03-06T06:01:43.208Z" }, + { url = "https://files.pythonhosted.org/packages/20/ea/dfb1792a8050a8e694cfbde1570ff97ff74e48afd874152d38163d1df9ae/charset_normalizer-3.4.5-cp313-cp313-manylinux_2_31_armv7l.whl", hash = "sha256:c23eb3263356d94858655b3e63f85ac5d50970c6e8febcdde7830209139cc37d", size = 184438, upload-time = "2026-03-06T06:01:44.755Z" }, + { url = "https://files.pythonhosted.org/packages/72/12/c281e2067466e3ddd0595bfaea58a6946765ace5c72dfa3edc2f5f118026/charset_normalizer-3.4.5-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e6302ca4ae283deb0af68d2fbf467474b8b6aedcd3dab4db187e07f94c109763", size = 193035, upload-time = "2026-03-06T06:01:46.051Z" }, + { url = "https://files.pythonhosted.org/packages/ba/4f/3792c056e7708e10464bad0438a44708886fb8f92e3c3d29ec5e2d964d42/charset_normalizer-3.4.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e51ae7d81c825761d941962450f50d041db028b7278e7b08930b4541b3e45cb9", size = 191340, upload-time = "2026-03-06T06:01:47.547Z" }, + { url = "https://files.pythonhosted.org/packages/e7/86/80ddba897127b5c7a9bccc481b0cd36c8fefa485d113262f0fe4332f0bf4/charset_normalizer-3.4.5-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:597d10dec876923e5c59e48dbd366e852eacb2b806029491d307daea6b917d7c", size = 185464, upload-time = "2026-03-06T06:01:48.764Z" }, + { url = "https://files.pythonhosted.org/packages/4d/00/b5eff85ba198faacab83e0e4b6f0648155f072278e3b392a82478f8b988b/charset_normalizer-3.4.5-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:5cffde4032a197bd3b42fd0b9509ec60fb70918d6970e4cc773f20fc9180ca67", size = 208014, upload-time = "2026-03-06T06:01:50.371Z" }, + { url = "https://files.pythonhosted.org/packages/c8/11/d36f70be01597fd30850dde8a1269ebc8efadd23ba5785808454f2389bde/charset_normalizer-3.4.5-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:2da4eedcb6338e2321e831a0165759c0c620e37f8cd044a263ff67493be8ffb3", size = 193297, upload-time = "2026-03-06T06:01:51.933Z" }, + { url = "https://files.pythonhosted.org/packages/1a/1d/259eb0a53d4910536c7c2abb9cb25f4153548efb42800c6a9456764649c0/charset_normalizer-3.4.5-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:65a126fb4b070d05340a84fc709dd9e7c75d9b063b610ece8a60197a291d0adf", size = 204321, upload-time = "2026-03-06T06:01:53.887Z" }, + { url = "https://files.pythonhosted.org/packages/84/31/faa6c5b9d3688715e1ed1bb9d124c384fe2fc1633a409e503ffe1c6398c1/charset_normalizer-3.4.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c7a80a9242963416bd81f99349d5f3fce1843c303bd404f204918b6d75a75fd6", size = 197509, upload-time = "2026-03-06T06:01:56.439Z" }, + { url = "https://files.pythonhosted.org/packages/fd/a5/c7d9dd1503ffc08950b3260f5d39ec2366dd08254f0900ecbcf3a6197c7c/charset_normalizer-3.4.5-cp313-cp313-win32.whl", hash = "sha256:f1d725b754e967e648046f00c4facc42d414840f5ccc670c5670f59f83693e4f", size = 132284, upload-time = "2026-03-06T06:01:57.812Z" }, + { url = "https://files.pythonhosted.org/packages/b9/0f/57072b253af40c8aa6636e6de7d75985624c1eb392815b2f934199340a89/charset_normalizer-3.4.5-cp313-cp313-win_amd64.whl", hash = "sha256:e37bd100d2c5d3ba35db9c7c5ba5a9228cbcffe5c4778dc824b164e5257813d7", size = 142630, upload-time = "2026-03-06T06:01:59.062Z" }, + { url = "https://files.pythonhosted.org/packages/31/41/1c4b7cc9f13bd9d369ce3bc993e13d374ce25fa38a2663644283ecf422c1/charset_normalizer-3.4.5-cp313-cp313-win_arm64.whl", hash = "sha256:93b3b2cc5cf1b8743660ce77a4f45f3f6d1172068207c1defc779a36eea6bb36", size = 133254, upload-time = "2026-03-06T06:02:00.281Z" }, + { url = "https://files.pythonhosted.org/packages/43/be/0f0fd9bb4a7fa4fb5067fb7d9ac693d4e928d306f80a0d02bde43a7c4aee/charset_normalizer-3.4.5-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:8197abe5ca1ffb7d91e78360f915eef5addff270f8a71c1fc5be24a56f3e4873", size = 280232, upload-time = "2026-03-06T06:02:01.508Z" }, + { url = "https://files.pythonhosted.org/packages/28/02/983b5445e4bef49cd8c9da73a8e029f0825f39b74a06d201bfaa2e55142a/charset_normalizer-3.4.5-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a2aecdb364b8a1802afdc7f9327d55dad5366bc97d8502d0f5854e50712dbc5f", size = 189688, upload-time = "2026-03-06T06:02:02.857Z" }, + { url = "https://files.pythonhosted.org/packages/d0/88/152745c5166437687028027dc080e2daed6fe11cfa95a22f4602591c42db/charset_normalizer-3.4.5-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a66aa5022bf81ab4b1bebfb009db4fd68e0c6d4307a1ce5ef6a26e5878dfc9e4", size = 206833, upload-time = "2026-03-06T06:02:05.127Z" }, + { url = "https://files.pythonhosted.org/packages/cb/0f/ebc15c8b02af2f19be9678d6eed115feeeccc45ce1f4b098d986c13e8769/charset_normalizer-3.4.5-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d77f97e515688bd615c1d1f795d540f32542d514242067adcb8ef532504cb9ee", size = 202879, upload-time = "2026-03-06T06:02:06.446Z" }, + { url = "https://files.pythonhosted.org/packages/38/9c/71336bff6934418dc8d1e8a1644176ac9088068bc571da612767619c97b3/charset_normalizer-3.4.5-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:01a1ed54b953303ca7e310fafe0fe347aab348bd81834a0bcd602eb538f89d66", size = 195764, upload-time = "2026-03-06T06:02:08.763Z" }, + { url = "https://files.pythonhosted.org/packages/b7/95/ce92fde4f98615661871bc282a856cf9b8a15f686ba0af012984660d480b/charset_normalizer-3.4.5-cp314-cp314-manylinux_2_31_armv7l.whl", hash = "sha256:b2d37d78297b39a9eb9eb92c0f6df98c706467282055419df141389b23f93362", size = 183728, upload-time = "2026-03-06T06:02:10.137Z" }, + { url = "https://files.pythonhosted.org/packages/1c/e7/f5b4588d94e747ce45ae680f0f242bc2d98dbd4eccfab73e6160b6893893/charset_normalizer-3.4.5-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e71bbb595973622b817c042bd943c3f3667e9c9983ce3d205f973f486fec98a7", size = 192937, upload-time = "2026-03-06T06:02:11.663Z" }, + { url = "https://files.pythonhosted.org/packages/f9/29/9d94ed6b929bf9f48bf6ede6e7474576499f07c4c5e878fb186083622716/charset_normalizer-3.4.5-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:4cd966c2559f501c6fd69294d082c2934c8dd4719deb32c22961a5ac6db0df1d", size = 192040, upload-time = "2026-03-06T06:02:13.489Z" }, + { url = "https://files.pythonhosted.org/packages/15/d2/1a093a1cf827957f9445f2fe7298bcc16f8fc5e05c1ed2ad1af0b239035e/charset_normalizer-3.4.5-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:d5e52d127045d6ae01a1e821acfad2f3a1866c54d0e837828538fabe8d9d1bd6", size = 184107, upload-time = "2026-03-06T06:02:14.83Z" }, + { url = "https://files.pythonhosted.org/packages/0f/7d/82068ce16bd36135df7b97f6333c5d808b94e01d4599a682e2337ed5fd14/charset_normalizer-3.4.5-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:30a2b1a48478c3428d047ed9690d57c23038dac838a87ad624c85c0a78ebeb39", size = 208310, upload-time = "2026-03-06T06:02:16.165Z" }, + { url = "https://files.pythonhosted.org/packages/84/4e/4dfb52307bb6af4a5c9e73e482d171b81d36f522b21ccd28a49656baa680/charset_normalizer-3.4.5-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:d8ed79b8f6372ca4254955005830fd61c1ccdd8c0fac6603e2c145c61dd95db6", size = 192918, upload-time = "2026-03-06T06:02:18.144Z" }, + { url = "https://files.pythonhosted.org/packages/08/a4/159ff7da662cf7201502ca89980b8f06acf3e887b278956646a8aeb178ab/charset_normalizer-3.4.5-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:c5af897b45fa606b12464ccbe0014bbf8c09191e0a66aab6aa9d5cf6e77e0c94", size = 204615, upload-time = "2026-03-06T06:02:19.821Z" }, + { url = "https://files.pythonhosted.org/packages/d6/62/0dd6172203cb6b429ffffc9935001fde42e5250d57f07b0c28c6046deb6b/charset_normalizer-3.4.5-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:1088345bcc93c58d8d8f3d783eca4a6e7a7752bbff26c3eee7e73c597c191c2e", size = 197784, upload-time = "2026-03-06T06:02:21.86Z" }, + { url = "https://files.pythonhosted.org/packages/c7/5e/1aab5cb737039b9c59e63627dc8bbc0d02562a14f831cc450e5f91d84ce1/charset_normalizer-3.4.5-cp314-cp314-win32.whl", hash = "sha256:ee57b926940ba00bca7ba7041e665cc956e55ef482f851b9b65acb20d867e7a2", size = 133009, upload-time = "2026-03-06T06:02:23.289Z" }, + { url = "https://files.pythonhosted.org/packages/40/65/e7c6c77d7aaa4c0d7974f2e403e17f0ed2cb0fc135f77d686b916bf1eead/charset_normalizer-3.4.5-cp314-cp314-win_amd64.whl", hash = "sha256:4481e6da1830c8a1cc0b746b47f603b653dadb690bcd851d039ffaefe70533aa", size = 143511, upload-time = "2026-03-06T06:02:26.195Z" }, + { url = "https://files.pythonhosted.org/packages/ba/91/52b0841c71f152f563b8e072896c14e3d83b195c188b338d3cc2e582d1d4/charset_normalizer-3.4.5-cp314-cp314-win_arm64.whl", hash = "sha256:97ab7787092eb9b50fb47fa04f24c75b768a606af1bcba1957f07f128a7219e4", size = 133775, upload-time = "2026-03-06T06:02:27.473Z" }, + { url = "https://files.pythonhosted.org/packages/c5/60/3a621758945513adfd4db86827a5bafcc615f913dbd0b4c2ed64a65731be/charset_normalizer-3.4.5-py3-none-any.whl", hash = "sha256:9db5e3fcdcee89a78c04dffb3fe33c79f77bd741a624946db2591c81b2fc85b0", size = 55455, upload-time = "2026-03-06T06:03:17.827Z" }, +] + +[[package]] +name = "ckzg" +version = "2.1.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2a/b8/9add33a0be636e2d4467ea4497b47e124677a0478d9be40ef6473d4ec29b/ckzg-2.1.6.tar.gz", hash = "sha256:49df31684283dfcfd1eeca638d84c03788ebdd48e8afc0643bf5188ec023dc8d", size = 1127792, upload-time = "2026-02-26T17:19:49.805Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/34/61/2be9ebc6677505b693f3026003e319f1afafd9deef85233ad011cebf61f0/ckzg-2.1.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:93b350b0f3d074df84f8836df0db2fb0978403565477b6e25415c48251c5c7a1", size = 96390, upload-time = "2026-02-26T17:18:52.013Z" }, + { url = "https://files.pythonhosted.org/packages/79/1f/b96709267c309ff9638bfac7ccfbc255c9590922504f4501aba31f80ff55/ckzg-2.1.6-cp312-cp312-manylinux2010_i686.manylinux_2_12_i686.manylinux_2_28_i686.whl", hash = "sha256:c1e3cf33671cd35d86d7a7f68ef1f40381a3315a61db8861858247cfda46ca6d", size = 180446, upload-time = "2026-02-26T17:18:53.009Z" }, + { url = "https://files.pythonhosted.org/packages/8e/16/e015e0d897a7af1f5fcaccf343adc264adfb73b1fa9181edce7965c7bbfd/ckzg-2.1.6-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cfe71caa4f667ded6c87f496ac1783f004c3f5ab29f695f8d3163c75df51398f", size = 166243, upload-time = "2026-02-26T17:18:54.102Z" }, + { url = "https://files.pythonhosted.org/packages/e3/ee/cd8206f1005566aa6f31f226d009dfc08bca71b883aeea010108151df7a7/ckzg-2.1.6-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bf835249b20d58de28b097da7c06c3a6b3b5f184120b0ace55373d6b044c9445", size = 176019, upload-time = "2026-02-26T17:18:55.077Z" }, + { url = "https://files.pythonhosted.org/packages/5c/09/1b2215ba11cad28e17eed1644849aaa7caa463dbfc96024670b96c8cf6c8/ckzg-2.1.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a82f4bfd4fb1d3b378af859a1d0dc1febb83634981d8d50635afec0c7d10a372", size = 173682, upload-time = "2026-02-26T17:18:56.095Z" }, + { url = "https://files.pythonhosted.org/packages/4c/e7/771182e7fdf331da81d4917741e91537f2de50b9dd12b8530241be699018/ckzg-2.1.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:36fd682e34c47befb7f28324793a92bb7fb14f8f2845d0b39abbcb6444e9565f", size = 188872, upload-time = "2026-02-26T17:18:58.122Z" }, + { url = "https://files.pythonhosted.org/packages/08/7c/1eca8c4abe8f83d15de7c3c8de6cc7cc42067502ed8591e70a03ef0e6857/ckzg-2.1.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9efcf3359bf12b128b4e0d86ed663946699fecaeb2d1298594c14a7cf14a7feb", size = 183566, upload-time = "2026-02-26T17:18:59.211Z" }, + { url = "https://files.pythonhosted.org/packages/fb/91/163b08eb84acaa1bcee2a1509bfc856fa833def7e2077f9127256c2b570c/ckzg-2.1.6-cp312-cp312-win_amd64.whl", hash = "sha256:e1c705a96c0ac99669f3691613b6eecd1d36c75fe433322b12293c906f8d8ae2", size = 99807, upload-time = "2026-02-26T17:19:00.271Z" }, + { url = "https://files.pythonhosted.org/packages/90/34/0cc58fa7907ea5c3961f6c9dd086b2d75ffb7897aeff4baddf1ee868ac60/ckzg-2.1.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:616cd69938d0d79b13e128f4706ea48c21866c3f7c52547d4f185837d5568d69", size = 96390, upload-time = "2026-02-26T17:19:01.532Z" }, + { url = "https://files.pythonhosted.org/packages/11/f1/dc6a25d3ba37531e2b9838ad875d061348685b50ff6759261c9831942a77/ckzg-2.1.6-cp313-cp313-manylinux2010_i686.manylinux_2_12_i686.manylinux_2_28_i686.whl", hash = "sha256:8d3056cd48f97041f98b73404f397c29aebd04b7f8f3bbc012180680d295a464", size = 180486, upload-time = "2026-02-26T17:19:02.768Z" }, + { url = "https://files.pythonhosted.org/packages/d5/95/17c7407af8a5070cf05ed8ff1156d9b62babecf74c84b2d61ed03efc72a2/ckzg-2.1.6-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c732e429b50dee04cd51fb601fc9cb4ba4d853e2e29a9914b3fdd36b576b0211", size = 166304, upload-time = "2026-02-26T17:19:03.825Z" }, + { url = "https://files.pythonhosted.org/packages/9a/31/8d7012523edea81d54f2f634f512f3a0705dd3dca99fdfe1281b09bc96ca/ckzg-2.1.6-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b0f9933b6e06e6560b4b8980e2385ec4d639cfdebb03bffaadde75a5c61edb45", size = 176058, upload-time = "2026-02-26T17:19:04.879Z" }, + { url = "https://files.pythonhosted.org/packages/f9/4d/f1a73fee7b2b2212691acf2231a8df717b19f95412ca236549f4d4a21932/ckzg-2.1.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:be65a7c00d445cf07adea7679842df469989e6790df1d846944f9885a4a788be", size = 173687, upload-time = "2026-02-26T17:19:05.919Z" }, + { url = "https://files.pythonhosted.org/packages/03/ed/cc0866735571f4e55d8e0edd09d34aab1ba1a4b83288bafa398651df4d88/ckzg-2.1.6-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:36e2e198c9e0a94498db32b760b446a1c29ba7e01aaec17404237ef6ae1705df", size = 188907, upload-time = "2026-02-26T17:19:06.934Z" }, + { url = "https://files.pythonhosted.org/packages/fd/5b/154c5a3ebd6fe97e1bf5de60cb3d3bc4f9ff42565dab87957292d7918eb8/ckzg-2.1.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5ce6aaac6ad4d70cc6e8ef61b430957150e1eb3370fd898cebd074db85cde987", size = 183602, upload-time = "2026-02-26T17:19:08.415Z" }, + { url = "https://files.pythonhosted.org/packages/81/8d/01bc02cfd24bbe641da36e5cbc50549db505b404a096ea501dcc1920f572/ckzg-2.1.6-cp313-cp313-win_amd64.whl", hash = "sha256:e897650e650fd090b97136103963a0bd338ff8582442b6e4b2bd660b0b81ff2e", size = 99810, upload-time = "2026-02-26T17:19:09.911Z" }, + { url = "https://files.pythonhosted.org/packages/b6/75/4f4449d60daf573ef4f14ab963e73dbd9803774fba40e839368af503b7de/ckzg-2.1.6-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:b10f2b50369d95c2d3707293f958a73cc4a505f53d1dfeadb9534aad4dd33ec9", size = 96402, upload-time = "2026-02-26T17:19:11.147Z" }, + { url = "https://files.pythonhosted.org/packages/24/91/85eb888653ad9c8872b017ae765ec331eb7bac6c49b5815d8f8b687b7928/ckzg-2.1.6-cp314-cp314-manylinux2010_i686.manylinux_2_12_i686.manylinux_2_28_i686.whl", hash = "sha256:c1642c7c1fd9225155660ee5bf96117b1d94a639a7f495c3b655ad7640bbb5c1", size = 180495, upload-time = "2026-02-26T17:19:12.368Z" }, + { url = "https://files.pythonhosted.org/packages/b7/a0/e42dd754e825ca0aac733993d6c60d202a6c7e4608e0ef75467bba6c1fb8/ckzg-2.1.6-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eb3d119e5008385ec3d47e81965bf1c644f50077fa9aa890d49ee1a0963fbfb3", size = 166328, upload-time = "2026-02-26T17:19:13.512Z" }, + { url = "https://files.pythonhosted.org/packages/7d/35/6d94c0cecf02bec72a5b5e3f61e7987a428abb3af714cda25ebb1f2a3681/ckzg-2.1.6-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3e81244ae23f27a6f85dc69838adcd3c5618acef57aec7ed87db8070cd6995bf", size = 176069, upload-time = "2026-02-26T17:19:15.647Z" }, + { url = "https://files.pythonhosted.org/packages/6a/69/9e6eb717dc9477374e28e5c5b56f210a708bbaa6b9660f09302138776488/ckzg-2.1.6-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:92b60f5f9eb880c595680af52d609e06dedee2bcdd109597ce58bb5422639b1a", size = 173743, upload-time = "2026-02-26T17:19:17.172Z" }, + { url = "https://files.pythonhosted.org/packages/1c/42/34cb744193163d33c348ce12f0155296bde1cbe733a139bef102c0ff7fec/ckzg-2.1.6-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:c1f9c9b2fd7d5f303eb2420130c1a1ee44a071308e227a8f9e238aeb4e2194ae", size = 188921, upload-time = "2026-02-26T17:19:18.457Z" }, + { url = "https://files.pythonhosted.org/packages/14/a6/69a2c0e3d17e3e6d1ae40a7b8a75c354ffeb4b604e716daf25c4a743fb18/ckzg-2.1.6-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:eaf30b4719199f1d243bd761caaec3582bdad70a6797475c6cd5c03c5ce3cd1d", size = 183603, upload-time = "2026-02-26T17:19:19.579Z" }, + { url = "https://files.pythonhosted.org/packages/3a/21/ea282898caa22622aab9ccd0212f4a5fd9254a949323a406a5c38aee1406/ckzg-2.1.6-cp314-cp314-win_amd64.whl", hash = "sha256:30964b9fac452746db7e60c9c324957c8dc7bc815b72bb09eea88409decc33ed", size = 102520, upload-time = "2026-02-26T17:19:20.834Z" }, + { url = "https://files.pythonhosted.org/packages/43/4c/ef4177450ccb31c8ff49ffd154e9266390b2f632caced121ec51f9172e4d/ckzg-2.1.6-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:4fd1c8e20c52ce77f9ad7b004440b0ba46d22328af07a5eb095ea4f252d22644", size = 96611, upload-time = "2026-02-26T17:19:21.81Z" }, + { url = "https://files.pythonhosted.org/packages/a8/ad/6e684af6b29744012befcb88db688234abc172d261ed4f5819df49ff55a4/ckzg-2.1.6-cp314-cp314t-manylinux2010_i686.manylinux_2_12_i686.manylinux_2_28_i686.whl", hash = "sha256:502bb5e5bbbf1bc14b324d8e012c06fc30c24840d35a7933b80b839869280491", size = 183330, upload-time = "2026-02-26T17:19:22.749Z" }, + { url = "https://files.pythonhosted.org/packages/ff/8e/469ab3b856215a7542792c2bae10dbf5e8e051fef2c50545070977acc5db/ckzg-2.1.6-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c859c8b93e82b9839a5bb443511a0b0631e93cb9275e755f54781693a3afc246", size = 169465, upload-time = "2026-02-26T17:19:23.821Z" }, + { url = "https://files.pythonhosted.org/packages/ef/41/3a5b27f0d8204dd3ed375c3348d462feedc24ef9db9df576e53cb53191b7/ckzg-2.1.6-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0330b7a7e0aca5622a31089c1d56a1a7040a52075803d31983fa9101fc45dddc", size = 178846, upload-time = "2026-02-26T17:19:25.452Z" }, + { url = "https://files.pythonhosted.org/packages/52/bc/4f15d4642b7c83bdc7c7868f6e809e56ebafc02c1ed43ae541f686185d47/ckzg-2.1.6-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:646078c085edc4c92361f6277cb8b6aac978287306e664e3c29de2f26ad206d2", size = 176486, upload-time = "2026-02-26T17:19:26.876Z" }, + { url = "https://files.pythonhosted.org/packages/51/8b/f046442413da4bd294d3ec6de04adb54af47b1e149f85c127955e10a78cd/ckzg-2.1.6-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:1224f2477fc794f7719bbe7650f735188120351b9511a7dd928b2fe8d74911c3", size = 191686, upload-time = "2026-02-26T17:19:27.889Z" }, + { url = "https://files.pythonhosted.org/packages/c0/8d/46d383414040cc3f4453c047b2268ef1548e846e5be732fdaf1b20dd5a79/ckzg-2.1.6-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:3b33131a9674d9dd509eb9fbb59f65c66dc14bfe85bc3dc93af5140274741c12", size = 186202, upload-time = "2026-02-26T17:19:29.115Z" }, + { url = "https://files.pythonhosted.org/packages/bb/43/4d68277e83da239df32096209b0d27626c2d829bae8d9c757abc1687fc13/ckzg-2.1.6-cp314-cp314t-win_amd64.whl", hash = "sha256:73301ca29c29255960ebcee8bf52151cd3ac8de214c31a4e29dbcde8c44e0571", size = 102667, upload-time = "2026-02-26T17:19:30.111Z" }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, +] + +[[package]] +name = "compress-json" +version = "1.0.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e5/ba/1a503870491972aaa7281d52d294a1c1f97cd598e52d34cc4280c028db58/compress_json-1.0.5.tar.gz", hash = "sha256:8cd15b09413f402a08faa09255baa44261f20cad76956a18d2581b0792c69523", size = 5031, upload-time = "2022-04-07T21:22:04.694Z" } + +[[package]] +name = "contourpy" +version = "1.3.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/58/01/1253e6698a07380cd31a736d248a3f2a50a7c88779a1813da27503cadc2a/contourpy-1.3.3.tar.gz", hash = "sha256:083e12155b210502d0bca491432bb04d56dc3432f95a979b429f2848c3dbe880", size = 13466174, upload-time = "2025-07-26T12:03:12.549Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/be/45/adfee365d9ea3d853550b2e735f9d66366701c65db7855cd07621732ccfc/contourpy-1.3.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b08a32ea2f8e42cf1d4be3169a98dd4be32bafe4f22b6c4cb4ba810fa9e5d2cb", size = 293419, upload-time = "2025-07-26T12:01:21.16Z" }, + { url = "https://files.pythonhosted.org/packages/53/3e/405b59cfa13021a56bba395a6b3aca8cec012b45bf177b0eaf7a202cde2c/contourpy-1.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:556dba8fb6f5d8742f2923fe9457dbdd51e1049c4a43fd3986a0b14a1d815fc6", size = 273979, upload-time = "2025-07-26T12:01:22.448Z" }, + { url = "https://files.pythonhosted.org/packages/d4/1c/a12359b9b2ca3a845e8f7f9ac08bdf776114eb931392fcad91743e2ea17b/contourpy-1.3.3-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:92d9abc807cf7d0e047b95ca5d957cf4792fcd04e920ca70d48add15c1a90ea7", size = 332653, upload-time = "2025-07-26T12:01:24.155Z" }, + { url = "https://files.pythonhosted.org/packages/63/12/897aeebfb475b7748ea67b61e045accdfcf0d971f8a588b67108ed7f5512/contourpy-1.3.3-cp312-cp312-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b2e8faa0ed68cb29af51edd8e24798bb661eac3bd9f65420c1887b6ca89987c8", size = 379536, upload-time = "2025-07-26T12:01:25.91Z" }, + { url = "https://files.pythonhosted.org/packages/43/8a/a8c584b82deb248930ce069e71576fc09bd7174bbd35183b7943fb1064fd/contourpy-1.3.3-cp312-cp312-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:626d60935cf668e70a5ce6ff184fd713e9683fb458898e4249b63be9e28286ea", size = 384397, upload-time = "2025-07-26T12:01:27.152Z" }, + { url = "https://files.pythonhosted.org/packages/cc/8f/ec6289987824b29529d0dfda0d74a07cec60e54b9c92f3c9da4c0ac732de/contourpy-1.3.3-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4d00e655fcef08aba35ec9610536bfe90267d7ab5ba944f7032549c55a146da1", size = 362601, upload-time = "2025-07-26T12:01:28.808Z" }, + { url = "https://files.pythonhosted.org/packages/05/0a/a3fe3be3ee2dceb3e615ebb4df97ae6f3828aa915d3e10549ce016302bd1/contourpy-1.3.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:451e71b5a7d597379ef572de31eeb909a87246974d960049a9848c3bc6c41bf7", size = 1331288, upload-time = "2025-07-26T12:01:31.198Z" }, + { url = "https://files.pythonhosted.org/packages/33/1d/acad9bd4e97f13f3e2b18a3977fe1b4a37ecf3d38d815333980c6c72e963/contourpy-1.3.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:459c1f020cd59fcfe6650180678a9993932d80d44ccde1fa1868977438f0b411", size = 1403386, upload-time = "2025-07-26T12:01:33.947Z" }, + { url = "https://files.pythonhosted.org/packages/cf/8f/5847f44a7fddf859704217a99a23a4f6417b10e5ab1256a179264561540e/contourpy-1.3.3-cp312-cp312-win32.whl", hash = "sha256:023b44101dfe49d7d53932be418477dba359649246075c996866106da069af69", size = 185018, upload-time = "2025-07-26T12:01:35.64Z" }, + { url = "https://files.pythonhosted.org/packages/19/e8/6026ed58a64563186a9ee3f29f41261fd1828f527dd93d33b60feca63352/contourpy-1.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:8153b8bfc11e1e4d75bcb0bff1db232f9e10b274e0929de9d608027e0d34ff8b", size = 226567, upload-time = "2025-07-26T12:01:36.804Z" }, + { url = "https://files.pythonhosted.org/packages/d1/e2/f05240d2c39a1ed228d8328a78b6f44cd695f7ef47beb3e684cf93604f86/contourpy-1.3.3-cp312-cp312-win_arm64.whl", hash = "sha256:07ce5ed73ecdc4a03ffe3e1b3e3c1166db35ae7584be76f65dbbe28a7791b0cc", size = 193655, upload-time = "2025-07-26T12:01:37.999Z" }, + { url = "https://files.pythonhosted.org/packages/68/35/0167aad910bbdb9599272bd96d01a9ec6852f36b9455cf2ca67bd4cc2d23/contourpy-1.3.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:177fb367556747a686509d6fef71d221a4b198a3905fe824430e5ea0fda54eb5", size = 293257, upload-time = "2025-07-26T12:01:39.367Z" }, + { url = "https://files.pythonhosted.org/packages/96/e4/7adcd9c8362745b2210728f209bfbcf7d91ba868a2c5f40d8b58f54c509b/contourpy-1.3.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d002b6f00d73d69333dac9d0b8d5e84d9724ff9ef044fd63c5986e62b7c9e1b1", size = 274034, upload-time = "2025-07-26T12:01:40.645Z" }, + { url = "https://files.pythonhosted.org/packages/73/23/90e31ceeed1de63058a02cb04b12f2de4b40e3bef5e082a7c18d9c8ae281/contourpy-1.3.3-cp313-cp313-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:348ac1f5d4f1d66d3322420f01d42e43122f43616e0f194fc1c9f5d830c5b286", size = 334672, upload-time = "2025-07-26T12:01:41.942Z" }, + { url = "https://files.pythonhosted.org/packages/ed/93/b43d8acbe67392e659e1d984700e79eb67e2acb2bd7f62012b583a7f1b55/contourpy-1.3.3-cp313-cp313-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:655456777ff65c2c548b7c454af9c6f33f16c8884f11083244b5819cc214f1b5", size = 381234, upload-time = "2025-07-26T12:01:43.499Z" }, + { url = "https://files.pythonhosted.org/packages/46/3b/bec82a3ea06f66711520f75a40c8fc0b113b2a75edb36aa633eb11c4f50f/contourpy-1.3.3-cp313-cp313-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:644a6853d15b2512d67881586bd03f462c7ab755db95f16f14d7e238f2852c67", size = 385169, upload-time = "2025-07-26T12:01:45.219Z" }, + { url = "https://files.pythonhosted.org/packages/4b/32/e0f13a1c5b0f8572d0ec6ae2f6c677b7991fafd95da523159c19eff0696a/contourpy-1.3.3-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4debd64f124ca62069f313a9cb86656ff087786016d76927ae2cf37846b006c9", size = 362859, upload-time = "2025-07-26T12:01:46.519Z" }, + { url = "https://files.pythonhosted.org/packages/33/71/e2a7945b7de4e58af42d708a219f3b2f4cff7386e6b6ab0a0fa0033c49a9/contourpy-1.3.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a15459b0f4615b00bbd1e91f1b9e19b7e63aea7483d03d804186f278c0af2659", size = 1332062, upload-time = "2025-07-26T12:01:48.964Z" }, + { url = "https://files.pythonhosted.org/packages/12/fc/4e87ac754220ccc0e807284f88e943d6d43b43843614f0a8afa469801db0/contourpy-1.3.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ca0fdcd73925568ca027e0b17ab07aad764be4706d0a925b89227e447d9737b7", size = 1403932, upload-time = "2025-07-26T12:01:51.979Z" }, + { url = "https://files.pythonhosted.org/packages/a6/2e/adc197a37443f934594112222ac1aa7dc9a98faf9c3842884df9a9d8751d/contourpy-1.3.3-cp313-cp313-win32.whl", hash = "sha256:b20c7c9a3bf701366556e1b1984ed2d0cedf999903c51311417cf5f591d8c78d", size = 185024, upload-time = "2025-07-26T12:01:53.245Z" }, + { url = "https://files.pythonhosted.org/packages/18/0b/0098c214843213759692cc638fce7de5c289200a830e5035d1791d7a2338/contourpy-1.3.3-cp313-cp313-win_amd64.whl", hash = "sha256:1cadd8b8969f060ba45ed7c1b714fe69185812ab43bd6b86a9123fe8f99c3263", size = 226578, upload-time = "2025-07-26T12:01:54.422Z" }, + { url = "https://files.pythonhosted.org/packages/8a/9a/2f6024a0c5995243cd63afdeb3651c984f0d2bc727fd98066d40e141ad73/contourpy-1.3.3-cp313-cp313-win_arm64.whl", hash = "sha256:fd914713266421b7536de2bfa8181aa8c699432b6763a0ea64195ebe28bff6a9", size = 193524, upload-time = "2025-07-26T12:01:55.73Z" }, + { url = "https://files.pythonhosted.org/packages/c0/b3/f8a1a86bd3298513f500e5b1f5fd92b69896449f6cab6a146a5d52715479/contourpy-1.3.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:88df9880d507169449d434c293467418b9f6cbe82edd19284aa0409e7fdb933d", size = 306730, upload-time = "2025-07-26T12:01:57.051Z" }, + { url = "https://files.pythonhosted.org/packages/3f/11/4780db94ae62fc0c2053909b65dc3246bd7cecfc4f8a20d957ad43aa4ad8/contourpy-1.3.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:d06bb1f751ba5d417047db62bca3c8fde202b8c11fb50742ab3ab962c81e8216", size = 287897, upload-time = "2025-07-26T12:01:58.663Z" }, + { url = "https://files.pythonhosted.org/packages/ae/15/e59f5f3ffdd6f3d4daa3e47114c53daabcb18574a26c21f03dc9e4e42ff0/contourpy-1.3.3-cp313-cp313t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e4e6b05a45525357e382909a4c1600444e2a45b4795163d3b22669285591c1ae", size = 326751, upload-time = "2025-07-26T12:02:00.343Z" }, + { url = "https://files.pythonhosted.org/packages/0f/81/03b45cfad088e4770b1dcf72ea78d3802d04200009fb364d18a493857210/contourpy-1.3.3-cp313-cp313t-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ab3074b48c4e2cf1a960e6bbeb7f04566bf36b1861d5c9d4d8ac04b82e38ba20", size = 375486, upload-time = "2025-07-26T12:02:02.128Z" }, + { url = "https://files.pythonhosted.org/packages/0c/ba/49923366492ffbdd4486e970d421b289a670ae8cf539c1ea9a09822b371a/contourpy-1.3.3-cp313-cp313t-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:6c3d53c796f8647d6deb1abe867daeb66dcc8a97e8455efa729516b997b8ed99", size = 388106, upload-time = "2025-07-26T12:02:03.615Z" }, + { url = "https://files.pythonhosted.org/packages/9f/52/5b00ea89525f8f143651f9f03a0df371d3cbd2fccd21ca9b768c7a6500c2/contourpy-1.3.3-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:50ed930df7289ff2a8d7afeb9603f8289e5704755c7e5c3bbd929c90c817164b", size = 352548, upload-time = "2025-07-26T12:02:05.165Z" }, + { url = "https://files.pythonhosted.org/packages/32/1d/a209ec1a3a3452d490f6b14dd92e72280c99ae3d1e73da74f8277d4ee08f/contourpy-1.3.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4feffb6537d64b84877da813a5c30f1422ea5739566abf0bd18065ac040e120a", size = 1322297, upload-time = "2025-07-26T12:02:07.379Z" }, + { url = "https://files.pythonhosted.org/packages/bc/9e/46f0e8ebdd884ca0e8877e46a3f4e633f6c9c8c4f3f6e72be3fe075994aa/contourpy-1.3.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:2b7e9480ffe2b0cd2e787e4df64270e3a0440d9db8dc823312e2c940c167df7e", size = 1391023, upload-time = "2025-07-26T12:02:10.171Z" }, + { url = "https://files.pythonhosted.org/packages/b9/70/f308384a3ae9cd2209e0849f33c913f658d3326900d0ff5d378d6a1422d2/contourpy-1.3.3-cp313-cp313t-win32.whl", hash = "sha256:283edd842a01e3dcd435b1c5116798d661378d83d36d337b8dde1d16a5fc9ba3", size = 196157, upload-time = "2025-07-26T12:02:11.488Z" }, + { url = "https://files.pythonhosted.org/packages/b2/dd/880f890a6663b84d9e34a6f88cded89d78f0091e0045a284427cb6b18521/contourpy-1.3.3-cp313-cp313t-win_amd64.whl", hash = "sha256:87acf5963fc2b34825e5b6b048f40e3635dd547f590b04d2ab317c2619ef7ae8", size = 240570, upload-time = "2025-07-26T12:02:12.754Z" }, + { url = "https://files.pythonhosted.org/packages/80/99/2adc7d8ffead633234817ef8e9a87115c8a11927a94478f6bb3d3f4d4f7d/contourpy-1.3.3-cp313-cp313t-win_arm64.whl", hash = "sha256:3c30273eb2a55024ff31ba7d052dde990d7d8e5450f4bbb6e913558b3d6c2301", size = 199713, upload-time = "2025-07-26T12:02:14.4Z" }, + { url = "https://files.pythonhosted.org/packages/72/8b/4546f3ab60f78c514ffb7d01a0bd743f90de36f0019d1be84d0a708a580a/contourpy-1.3.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fde6c716d51c04b1c25d0b90364d0be954624a0ee9d60e23e850e8d48353d07a", size = 292189, upload-time = "2025-07-26T12:02:16.095Z" }, + { url = "https://files.pythonhosted.org/packages/fd/e1/3542a9cb596cadd76fcef413f19c79216e002623158befe6daa03dbfa88c/contourpy-1.3.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:cbedb772ed74ff5be440fa8eee9bd49f64f6e3fc09436d9c7d8f1c287b121d77", size = 273251, upload-time = "2025-07-26T12:02:17.524Z" }, + { url = "https://files.pythonhosted.org/packages/b1/71/f93e1e9471d189f79d0ce2497007731c1e6bf9ef6d1d61b911430c3db4e5/contourpy-1.3.3-cp314-cp314-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:22e9b1bd7a9b1d652cd77388465dc358dafcd2e217d35552424aa4f996f524f5", size = 335810, upload-time = "2025-07-26T12:02:18.9Z" }, + { url = "https://files.pythonhosted.org/packages/91/f9/e35f4c1c93f9275d4e38681a80506b5510e9327350c51f8d4a5a724d178c/contourpy-1.3.3-cp314-cp314-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a22738912262aa3e254e4f3cb079a95a67132fc5a063890e224393596902f5a4", size = 382871, upload-time = "2025-07-26T12:02:20.418Z" }, + { url = "https://files.pythonhosted.org/packages/b5/71/47b512f936f66a0a900d81c396a7e60d73419868fba959c61efed7a8ab46/contourpy-1.3.3-cp314-cp314-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:afe5a512f31ee6bd7d0dda52ec9864c984ca3d66664444f2d72e0dc4eb832e36", size = 386264, upload-time = "2025-07-26T12:02:21.916Z" }, + { url = "https://files.pythonhosted.org/packages/04/5f/9ff93450ba96b09c7c2b3f81c94de31c89f92292f1380261bd7195bea4ea/contourpy-1.3.3-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f64836de09927cba6f79dcd00fdd7d5329f3fccc633468507079c829ca4db4e3", size = 363819, upload-time = "2025-07-26T12:02:23.759Z" }, + { url = "https://files.pythonhosted.org/packages/3e/a6/0b185d4cc480ee494945cde102cb0149ae830b5fa17bf855b95f2e70ad13/contourpy-1.3.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:1fd43c3be4c8e5fd6e4f2baeae35ae18176cf2e5cced681cca908addf1cdd53b", size = 1333650, upload-time = "2025-07-26T12:02:26.181Z" }, + { url = "https://files.pythonhosted.org/packages/43/d7/afdc95580ca56f30fbcd3060250f66cedbde69b4547028863abd8aa3b47e/contourpy-1.3.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:6afc576f7b33cf00996e5c1102dc2a8f7cc89e39c0b55df93a0b78c1bd992b36", size = 1404833, upload-time = "2025-07-26T12:02:28.782Z" }, + { url = "https://files.pythonhosted.org/packages/e2/e2/366af18a6d386f41132a48f033cbd2102e9b0cf6345d35ff0826cd984566/contourpy-1.3.3-cp314-cp314-win32.whl", hash = "sha256:66c8a43a4f7b8df8b71ee1840e4211a3c8d93b214b213f590e18a1beca458f7d", size = 189692, upload-time = "2025-07-26T12:02:30.128Z" }, + { url = "https://files.pythonhosted.org/packages/7d/c2/57f54b03d0f22d4044b8afb9ca0e184f8b1afd57b4f735c2fa70883dc601/contourpy-1.3.3-cp314-cp314-win_amd64.whl", hash = "sha256:cf9022ef053f2694e31d630feaacb21ea24224be1c3ad0520b13d844274614fd", size = 232424, upload-time = "2025-07-26T12:02:31.395Z" }, + { url = "https://files.pythonhosted.org/packages/18/79/a9416650df9b525737ab521aa181ccc42d56016d2123ddcb7b58e926a42c/contourpy-1.3.3-cp314-cp314-win_arm64.whl", hash = "sha256:95b181891b4c71de4bb404c6621e7e2390745f887f2a026b2d99e92c17892339", size = 198300, upload-time = "2025-07-26T12:02:32.956Z" }, + { url = "https://files.pythonhosted.org/packages/1f/42/38c159a7d0f2b7b9c04c64ab317042bb6952b713ba875c1681529a2932fe/contourpy-1.3.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:33c82d0138c0a062380332c861387650c82e4cf1747aaa6938b9b6516762e772", size = 306769, upload-time = "2025-07-26T12:02:34.2Z" }, + { url = "https://files.pythonhosted.org/packages/c3/6c/26a8205f24bca10974e77460de68d3d7c63e282e23782f1239f226fcae6f/contourpy-1.3.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:ea37e7b45949df430fe649e5de8351c423430046a2af20b1c1961cae3afcda77", size = 287892, upload-time = "2025-07-26T12:02:35.807Z" }, + { url = "https://files.pythonhosted.org/packages/66/06/8a475c8ab718ebfd7925661747dbb3c3ee9c82ac834ccb3570be49d129f4/contourpy-1.3.3-cp314-cp314t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d304906ecc71672e9c89e87c4675dc5c2645e1f4269a5063b99b0bb29f232d13", size = 326748, upload-time = "2025-07-26T12:02:37.193Z" }, + { url = "https://files.pythonhosted.org/packages/b4/a3/c5ca9f010a44c223f098fccd8b158bb1cb287378a31ac141f04730dc49be/contourpy-1.3.3-cp314-cp314t-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ca658cd1a680a5c9ea96dc61cdbae1e85c8f25849843aa799dfd3cb370ad4fbe", size = 375554, upload-time = "2025-07-26T12:02:38.894Z" }, + { url = "https://files.pythonhosted.org/packages/80/5b/68bd33ae63fac658a4145088c1e894405e07584a316738710b636c6d0333/contourpy-1.3.3-cp314-cp314t-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ab2fd90904c503739a75b7c8c5c01160130ba67944a7b77bbf36ef8054576e7f", size = 388118, upload-time = "2025-07-26T12:02:40.642Z" }, + { url = "https://files.pythonhosted.org/packages/40/52/4c285a6435940ae25d7410a6c36bda5145839bc3f0beb20c707cda18b9d2/contourpy-1.3.3-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7301b89040075c30e5768810bc96a8e8d78085b47d8be6e4c3f5a0b4ed478a0", size = 352555, upload-time = "2025-07-26T12:02:42.25Z" }, + { url = "https://files.pythonhosted.org/packages/24/ee/3e81e1dd174f5c7fefe50e85d0892de05ca4e26ef1c9a59c2a57e43b865a/contourpy-1.3.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:2a2a8b627d5cc6b7c41a4beff6c5ad5eb848c88255fda4a8745f7e901b32d8e4", size = 1322295, upload-time = "2025-07-26T12:02:44.668Z" }, + { url = "https://files.pythonhosted.org/packages/3c/b2/6d913d4d04e14379de429057cd169e5e00f6c2af3bb13e1710bcbdb5da12/contourpy-1.3.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:fd6ec6be509c787f1caf6b247f0b1ca598bef13f4ddeaa126b7658215529ba0f", size = 1391027, upload-time = "2025-07-26T12:02:47.09Z" }, + { url = "https://files.pythonhosted.org/packages/93/8a/68a4ec5c55a2971213d29a9374913f7e9f18581945a7a31d1a39b5d2dfe5/contourpy-1.3.3-cp314-cp314t-win32.whl", hash = "sha256:e74a9a0f5e3fff48fb5a7f2fd2b9b70a3fe014a67522f79b7cca4c0c7e43c9ae", size = 202428, upload-time = "2025-07-26T12:02:48.691Z" }, + { url = "https://files.pythonhosted.org/packages/fa/96/fd9f641ffedc4fa3ace923af73b9d07e869496c9cc7a459103e6e978992f/contourpy-1.3.3-cp314-cp314t-win_amd64.whl", hash = "sha256:13b68d6a62db8eafaebb8039218921399baf6e47bf85006fd8529f2a08ef33fc", size = 250331, upload-time = "2025-07-26T12:02:50.137Z" }, + { url = "https://files.pythonhosted.org/packages/ae/8c/469afb6465b853afff216f9528ffda78a915ff880ed58813ba4faf4ba0b6/contourpy-1.3.3-cp314-cp314t-win_arm64.whl", hash = "sha256:b7448cb5a725bb1e35ce88771b86fba35ef418952474492cf7c764059933ff8b", size = 203831, upload-time = "2025-07-26T12:02:51.449Z" }, +] + +[[package]] +name = "cronitor" +version = "4.9.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "humanize" }, + { name = "pyyaml" }, + { name = "requests" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e9/10/4dd1042302403f8b6cac685ee6138c902179ac7f33a5c85df02d040827be/cronitor-4.9.0.tar.gz", hash = "sha256:3ad4e4c796671f98fa1be5ee6ac49d476e67d8e61f863fe34b616b4b9484c5cb", size = 23005, upload-time = "2025-10-08T01:51:35.073Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d8/d8/1c6eb7e1f94575cc83cc938db53acf24c7000faf87cc884e8a83a0d37b0d/cronitor-4.9.0-py2.py3-none-any.whl", hash = "sha256:79f16e3b8745bcf78daa1c9686d46dea9c45a8546d03ea7a1c52f15aab11bfdc", size = 22010, upload-time = "2025-10-08T01:51:34.16Z" }, +] + +[[package]] +name = "cycler" +version = "0.12.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a9/95/a3dbbb5028f35eafb79008e7522a75244477d2838f38cbb722248dabc2a8/cycler-0.12.1.tar.gz", hash = "sha256:88bb128f02ba341da8ef447245a9e138fae777f6a23943da4540077d3601eb1c", size = 7615, upload-time = "2023-10-07T05:32:18.335Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e7/05/c19819d5e3d95294a6f5947fb9b9629efb316b96de511b418c53d245aae6/cycler-0.12.1-py3-none-any.whl", hash = "sha256:85cef7cff222d8644161529808465972e51340599459b8ac3ccbac5a854e0d30", size = 8321, upload-time = "2023-10-07T05:32:16.783Z" }, +] + +[[package]] +name = "cytoolz" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "toolz" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/bd/d4/16916f3dc20a3f5455b63c35dcb260b3716f59ce27a93586804e70e431d5/cytoolz-1.1.0.tar.gz", hash = "sha256:13a7bf254c3c0d28b12e2290b82aed0f0977a4c2a2bf84854fcdc7796a29f3b0", size = 642510, upload-time = "2025-10-19T00:44:56.174Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c6/ec/01426224f7acf60183d3921b25e1a8e71713d3d39cb464d64ac7aace6ea6/cytoolz-1.1.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:99f8e134c9be11649342853ec8c90837af4089fc8ff1e8f9a024a57d1fa08514", size = 1327800, upload-time = "2025-10-19T00:40:48.674Z" }, + { url = "https://files.pythonhosted.org/packages/b4/07/e07e8fedd332ac9626ad58bea31416dda19bfd14310731fa38b16a97e15f/cytoolz-1.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:0a6f44cf9319c30feb9a50aa513d777ef51efec16f31c404409e7deb8063df64", size = 997118, upload-time = "2025-10-19T00:40:50.919Z" }, + { url = "https://files.pythonhosted.org/packages/ab/72/c0f766d63ed2f9ea8dc8e1628d385d99b41fb834ce17ac3669e3f91e115d/cytoolz-1.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:945580dc158c557172fca899a35a99a16fbcebf6db0c77cb6621084bc82189f9", size = 991169, upload-time = "2025-10-19T00:40:52.887Z" }, + { url = "https://files.pythonhosted.org/packages/df/4b/1f757353d1bf33e56a7391ecc9bc49c1e529803b93a9d2f67fe5f92906fe/cytoolz-1.1.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:257905ec050d04f2f856854620d1e25556fd735064cebd81b460f54939b9f9d5", size = 2700680, upload-time = "2025-10-19T00:40:54.597Z" }, + { url = "https://files.pythonhosted.org/packages/25/73/9b25bb7ed8d419b9d6ff2ae0b3d06694de79a3f98f5169a1293ff7ad3a3f/cytoolz-1.1.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:82779049f352fb3ab5e8c993ab45edbb6e02efb1f17f0b50f4972c706cc51d76", size = 2824951, upload-time = "2025-10-19T00:40:56.137Z" }, + { url = "https://files.pythonhosted.org/packages/0c/93/9c787f7c909e75670fff467f2504725d06d8c3f51d6dfe22c55a08c8ccd4/cytoolz-1.1.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7d3e405e435320e08c5a1633afaf285a392e2d9cef35c925d91e2a31dfd7a688", size = 2679635, upload-time = "2025-10-19T00:40:57.799Z" }, + { url = "https://files.pythonhosted.org/packages/50/aa/9ee92c302cccf7a41a7311b325b51ebeff25d36c1f82bdc1bbe3f58dc947/cytoolz-1.1.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:923df8f5591e0d20543060c29909c149ab1963a7267037b39eee03a83dbc50a8", size = 2938352, upload-time = "2025-10-19T00:40:59.49Z" }, + { url = "https://files.pythonhosted.org/packages/6a/a3/3b58c5c1692c3bacd65640d0d5c7267a7ebb76204f7507aec29de7063d2f/cytoolz-1.1.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:25db9e4862f22ea0ae2e56c8bec9fc9fd756b655ae13e8c7b5625d7ed1c582d4", size = 3022121, upload-time = "2025-10-19T00:41:01.209Z" }, + { url = "https://files.pythonhosted.org/packages/e1/93/c647bc3334355088c57351a536c2d4a83dd45f7de591fab383975e45bff9/cytoolz-1.1.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c7a98deb11ccd8e5d9f9441ef2ff3352aab52226a2b7d04756caaa53cd612363", size = 2857656, upload-time = "2025-10-19T00:41:03.456Z" }, + { url = "https://files.pythonhosted.org/packages/b2/c2/43fea146bf4141deea959e19dcddf268c5ed759dec5c2ed4a6941d711933/cytoolz-1.1.0-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:dce4ee9fc99104bc77efdea80f32ca5a650cd653bcc8a1d984a931153d3d9b58", size = 2551284, upload-time = "2025-10-19T00:41:05.347Z" }, + { url = "https://files.pythonhosted.org/packages/6f/df/cdc7a81ce5cfcde7ef523143d545635fc37e80ccacce140ae58483a21da3/cytoolz-1.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:80d6da158f7d20c15819701bbda1c041f0944ede2f564f5c739b1bc80a9ffb8b", size = 2721673, upload-time = "2025-10-19T00:41:07.528Z" }, + { url = "https://files.pythonhosted.org/packages/45/be/f8524bb9ad8812ad375e61238dcaa3177628234d1b908ad0b74e3657cafd/cytoolz-1.1.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:3b5c5a192abda123ad45ef716ec9082b4cf7d95e9ada8291c5c2cc5558be858b", size = 2722884, upload-time = "2025-10-19T00:41:09.698Z" }, + { url = "https://files.pythonhosted.org/packages/23/e6/6bb8e4f9c267ad42d1ff77b6d2e4984665505afae50a216290e1d7311431/cytoolz-1.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5b399ce7d967b1cb6280250818b786be652aa8ddffd3c0bb5c48c6220d945ab5", size = 2685486, upload-time = "2025-10-19T00:41:11.349Z" }, + { url = "https://files.pythonhosted.org/packages/d7/dd/88619f9c8d2b682562c0c886bbb7c35720cb83fda2ac9a41bdd14073d9bd/cytoolz-1.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:e7e29a1a03f00b4322196cfe8e2c38da9a6c8d573566052c586df83aacc5663c", size = 2839661, upload-time = "2025-10-19T00:41:13.053Z" }, + { url = "https://files.pythonhosted.org/packages/b8/8d/4478ebf471ee78dd496d254dc0f4ad729cd8e6ba8257de4f0a98a2838ef2/cytoolz-1.1.0-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:5291b117d71652a817ec164e7011f18e6a51f8a352cc9a70ed5b976c51102fda", size = 2547095, upload-time = "2025-10-19T00:41:16.054Z" }, + { url = "https://files.pythonhosted.org/packages/e6/68/f1dea33367b0b3f64e199c230a14a6b6f243c189020effafd31e970ca527/cytoolz-1.1.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:8caef62f846a9011676c51bda9189ae394cdd6bb17f2946ecaedc23243268320", size = 2870901, upload-time = "2025-10-19T00:41:17.727Z" }, + { url = "https://files.pythonhosted.org/packages/4a/9a/33591c09dfe799b8fb692cf2ad383e2c41ab6593cc960b00d1fc8a145655/cytoolz-1.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:de425c5a8e3be7bb3a195e19191d28d9eb3c2038046064a92edc4505033ec9cb", size = 2765422, upload-time = "2025-10-19T00:41:20.075Z" }, + { url = "https://files.pythonhosted.org/packages/60/2b/a8aa233c9416df87f004e57ae4280bd5e1f389b4943d179f01020c6ec629/cytoolz-1.1.0-cp312-cp312-win32.whl", hash = "sha256:296440a870e8d1f2e1d1edf98f60f1532b9d3ab8dfbd4b25ec08cd76311e79e5", size = 901933, upload-time = "2025-10-19T00:41:21.646Z" }, + { url = "https://files.pythonhosted.org/packages/ad/33/4c9bdf8390dc01d2617c7f11930697157164a52259b6818ddfa2f94f89f4/cytoolz-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:07156987f224c6dac59aa18fb8bf91e1412f5463961862716a3381bf429c8699", size = 947989, upload-time = "2025-10-19T00:41:23.288Z" }, + { url = "https://files.pythonhosted.org/packages/35/ac/6e2708835875f5acb52318462ed296bf94ed0cb8c7cb70e62fbd03f709e3/cytoolz-1.1.0-cp312-cp312-win_arm64.whl", hash = "sha256:23e616b38f5b3160c7bb45b0f84a8f3deb4bd26b29fb2dfc716f241c738e27b8", size = 903913, upload-time = "2025-10-19T00:41:24.992Z" }, + { url = "https://files.pythonhosted.org/packages/71/4a/b3ddb3ee44fe0045e95dd973746f93f033b6f92cce1fc3cbbe24b329943c/cytoolz-1.1.0-cp313-cp313-ios_13_0_arm64_iphoneos.whl", hash = "sha256:76c9b58555300be6dde87a41faf1f97966d79b9a678b7a526fcff75d28ef4945", size = 976728, upload-time = "2025-10-19T00:41:26.5Z" }, + { url = "https://files.pythonhosted.org/packages/42/21/a3681434aa425875dd828bb515924b0f12c37a55c7d2bc5c0c5de3aeb0b4/cytoolz-1.1.0-cp313-cp313-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:d1d638b10d3144795655e9395566ce35807df09219fd7cacd9e6acbdef67946a", size = 986057, upload-time = "2025-10-19T00:41:28.911Z" }, + { url = "https://files.pythonhosted.org/packages/d9/cb/efc1b29e211e0670a6953222afaac84dcbba5cb940b130c0e49858978040/cytoolz-1.1.0-cp313-cp313-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:26801c1a165e84786a99e03c9c9973356caaca002d66727b761fb1042878ef06", size = 992632, upload-time = "2025-10-19T00:41:30.612Z" }, + { url = "https://files.pythonhosted.org/packages/be/b0/e50621d21e939338c97faab651f58ea7fa32101226a91de79ecfb89d71e1/cytoolz-1.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:2a9a464542912d3272f6dccc5142df057c71c6a5cbd30439389a732df401afb7", size = 1317534, upload-time = "2025-10-19T00:41:32.625Z" }, + { url = "https://files.pythonhosted.org/packages/0d/6b/25aa9739b0235a5bc4c1ea293186bc6822a4c6607acfe1422423287e7400/cytoolz-1.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ed6104fa942aa5784bf54f339563de637557e3443b105760bc4de8f16a7fc79b", size = 992336, upload-time = "2025-10-19T00:41:34.073Z" }, + { url = "https://files.pythonhosted.org/packages/e1/53/5f4deb0ff958805309d135d899c764364c1e8a632ce4994bd7c45fb98df2/cytoolz-1.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:56161f0ab60dc4159ec343509abaf809dc88e85c7e420e354442c62e3e7cbb77", size = 986118, upload-time = "2025-10-19T00:41:35.7Z" }, + { url = "https://files.pythonhosted.org/packages/1c/e3/f6255b76c8cc0debbe1c0779130777dc0434da6d9b28a90d9f76f8cb67cd/cytoolz-1.1.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:832bd36cc9123535f1945acf6921f8a2a15acc19cfe4065b1c9b985a28671886", size = 2679563, upload-time = "2025-10-19T00:41:37.926Z" }, + { url = "https://files.pythonhosted.org/packages/59/8a/acc6e39a84e930522b965586ad3a36694f9bf247b23188ee0eb47b1c9ed1/cytoolz-1.1.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1842636b6e034f229bf084c2bcdcfd36c8437e752eefd2c74ce9e2f10415cb6e", size = 2813020, upload-time = "2025-10-19T00:41:39.935Z" }, + { url = "https://files.pythonhosted.org/packages/db/f5/0083608286ad1716eda7c41f868e85ac549f6fd6b7646993109fa0bdfd98/cytoolz-1.1.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:823df012ab90d2f2a0f92fea453528539bf71ac1879e518524cd0c86aa6df7b9", size = 2669312, upload-time = "2025-10-19T00:41:41.55Z" }, + { url = "https://files.pythonhosted.org/packages/47/a8/d16080b575520fe5da00cede1ece4e0a4180ec23f88dcdc6a2f5a90a7f7f/cytoolz-1.1.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2f1fcf9e7e7b3487883ff3f815abc35b89dcc45c4cf81c72b7ee457aa72d197b", size = 2922147, upload-time = "2025-10-19T00:41:43.252Z" }, + { url = "https://files.pythonhosted.org/packages/7e/bc/716c9c1243701e58cad511eb3937fd550e645293c5ed1907639c5d66f194/cytoolz-1.1.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4cdb3fa1772116827f263f25b0cdd44c663b6701346a56411960534a06c082de", size = 2981602, upload-time = "2025-10-19T00:41:45.354Z" }, + { url = "https://files.pythonhosted.org/packages/14/bc/571b232996846b27f4ac0c957dc8bf60261e9b4d0d01c8d955e82329544e/cytoolz-1.1.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d1b5c95041741b81430454db65183e133976f45ac3c03454cfa8147952568529", size = 2830103, upload-time = "2025-10-19T00:41:47.959Z" }, + { url = "https://files.pythonhosted.org/packages/5b/55/c594afb46ecd78e4b7e1fb92c947ed041807875661ceda73baaf61baba4f/cytoolz-1.1.0-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b2079fd9f1a65f4c61e6278c8a6d4f85edf30c606df8d5b32f1add88cbbe2286", size = 2533802, upload-time = "2025-10-19T00:41:49.683Z" }, + { url = "https://files.pythonhosted.org/packages/93/83/1edcf95832555a78fc43b975f3ebe8ceadcc9664dd47fd33747a14df5069/cytoolz-1.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a92a320d72bef1c7e2d4c6d875125cf57fc38be45feb3fac1bfa64ea401f54a4", size = 2706071, upload-time = "2025-10-19T00:41:51.386Z" }, + { url = "https://files.pythonhosted.org/packages/e2/df/035a408df87f25cfe3611557818b250126cd2281b2104cd88395de205583/cytoolz-1.1.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:06d1c79aa51e6a92a90b0e456ebce2288f03dd6a76c7f582bfaa3eda7692e8a5", size = 2707575, upload-time = "2025-10-19T00:41:53.305Z" }, + { url = "https://files.pythonhosted.org/packages/7a/a4/ef78e13e16e93bf695a9331321d75fbc834a088d941f1c19e6b63314e257/cytoolz-1.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:e1d7be25f6971e986a52b6d3a0da28e1941850985417c35528f6823aef2cfec5", size = 2660486, upload-time = "2025-10-19T00:41:55.542Z" }, + { url = "https://files.pythonhosted.org/packages/30/7a/2c3d60682b26058d435416c4e90d4a94db854de5be944dfd069ed1be648a/cytoolz-1.1.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:964b248edc31efc50a65e9eaa0c845718503823439d2fa5f8d2c7e974c2b5409", size = 2819605, upload-time = "2025-10-19T00:41:58.257Z" }, + { url = "https://files.pythonhosted.org/packages/45/92/19b722a1d83cc443fbc0c16e0dc376f8a451437890d3d9ee370358cf0709/cytoolz-1.1.0-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:c9ff2b3c57c79b65cb5be14a18c6fd4a06d5036fb3f33e973a9f70e9ac13ca28", size = 2533559, upload-time = "2025-10-19T00:42:00.324Z" }, + { url = "https://files.pythonhosted.org/packages/1d/15/fa3b7891da51115204416f14192081d3dea0eaee091f123fdc1347de8dd1/cytoolz-1.1.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:22290b73086af600042d99f5ce52a43d4ad9872c382610413176e19fc1d4fd2d", size = 2839171, upload-time = "2025-10-19T00:42:01.881Z" }, + { url = "https://files.pythonhosted.org/packages/46/40/d3519d5cd86eebebf1e8b7174ec32dfb6ecec67b48b0cfb92bf226659b5a/cytoolz-1.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a2ade74fccd080ea793382968913ee38d7a35c921df435bbf0a6aeecf0d17574", size = 2743379, upload-time = "2025-10-19T00:42:03.809Z" }, + { url = "https://files.pythonhosted.org/packages/93/e2/a9e7511f0a13fdbefa5bf73cf8e4763878140de9453fd3e50d6ac57b6be7/cytoolz-1.1.0-cp313-cp313-win32.whl", hash = "sha256:db5dbcfda1c00e937426cbf9bdc63c24ebbc358c3263bfcbc1ab4a88dc52aa8e", size = 900844, upload-time = "2025-10-19T00:42:05.967Z" }, + { url = "https://files.pythonhosted.org/packages/d6/a4/fb7eb403c6a4c81e5a30363f34a71adcc8bf5292dc8ea32e2440aa5668f2/cytoolz-1.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:9e2d3fe3b45c3eb7233746f7aca37789be3dceec3e07dcc406d3e045ea0f7bdc", size = 946461, upload-time = "2025-10-19T00:42:07.983Z" }, + { url = "https://files.pythonhosted.org/packages/93/bb/1c8c33d353548d240bc6e8677ee8c3560ce5fa2f084e928facf7c35a6dcf/cytoolz-1.1.0-cp313-cp313-win_arm64.whl", hash = "sha256:32c559f95ff44a9ebcbd934acaa1e6dc8f3e6ffce4762a79a88528064873d6d5", size = 902673, upload-time = "2025-10-19T00:42:09.982Z" }, + { url = "https://files.pythonhosted.org/packages/c4/ba/4a53acc60f59030fcaf48c7766e3c4c81bd997379425aa45b129396557b5/cytoolz-1.1.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:9e2cd93b28f667c5870a070ab2b8bb4397470a85c4b204f2454b0ad001cd1ca3", size = 1372336, upload-time = "2025-10-19T00:42:12.104Z" }, + { url = "https://files.pythonhosted.org/packages/ac/90/f28fd8ad8319d8f5c8da69a2c29b8cf52a6d2c0161602d92b366d58926ab/cytoolz-1.1.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:f494124e141a9361f31d79875fe7ea459a3be2b9dadd90480427c0c52a0943d4", size = 1011930, upload-time = "2025-10-19T00:42:14.231Z" }, + { url = "https://files.pythonhosted.org/packages/c9/95/4561c4e0ad1c944f7673d6d916405d68080f10552cfc5d69a1cf2475a9a1/cytoolz-1.1.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:53a3262bf221f19437ed544bf8c0e1980c81ac8e2a53d87a9bc075dba943d36f", size = 1020610, upload-time = "2025-10-19T00:42:15.877Z" }, + { url = "https://files.pythonhosted.org/packages/c3/14/b2e1ffa4995ec36e1372e243411ff36325e4e6d7ffa34eb4098f5357d176/cytoolz-1.1.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:47663e57d3f3f124921f38055e86a1022d0844c444ede2e8f090d3bbf80deb65", size = 2917327, upload-time = "2025-10-19T00:42:17.706Z" }, + { url = "https://files.pythonhosted.org/packages/4a/29/7cab6c609b4514ac84cca2f7dca6c509977a8fc16d27c3a50e97f105fa6a/cytoolz-1.1.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a5a8755c4104ee4e3d5ba434c543b5f85fdee6a1f1df33d93f518294da793a60", size = 3108951, upload-time = "2025-10-19T00:42:19.363Z" }, + { url = "https://files.pythonhosted.org/packages/9a/71/1d1103b819458679277206ad07d78ca6b31c4bb88d6463fd193e19bfb270/cytoolz-1.1.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:4d96ff3d381423af1b105295f97de86d1db51732c9566eb37378bab6670c5010", size = 2807149, upload-time = "2025-10-19T00:42:20.964Z" }, + { url = "https://files.pythonhosted.org/packages/1a/d4/3d83a05a21e7d2ed2b9e6daf489999c29934b005de9190272b8a2e3735d0/cytoolz-1.1.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:0ec96b3d537cdf47d4e76ded199f7440715f4c71029b45445cff92c1248808c2", size = 3111608, upload-time = "2025-10-19T00:42:22.684Z" }, + { url = "https://files.pythonhosted.org/packages/51/88/96f68354c3d4af68de41f0db4fe41a23b96a50a4a416636cea325490cfeb/cytoolz-1.1.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:208e2f2ef90a32b0acbff3303d90d89b13570a228d491d2e622a7883a3c68148", size = 3179373, upload-time = "2025-10-19T00:42:24.395Z" }, + { url = "https://files.pythonhosted.org/packages/ce/50/ed87a5cd8e6f27ffbb64c39e9730e18ec66c37631db2888ae711909f10c9/cytoolz-1.1.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0d416a81bb0bd517558668e49d30a7475b5445f9bbafaab7dcf066f1e9adba36", size = 3003120, upload-time = "2025-10-19T00:42:26.18Z" }, + { url = "https://files.pythonhosted.org/packages/d3/a7/acde155b050d6eaa8e9c7845c98fc5fb28501568e78e83ebbf44f8855274/cytoolz-1.1.0-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f32e94c91ffe49af04835ee713ebd8e005c85ebe83e7e1fdcc00f27164c2d636", size = 2703225, upload-time = "2025-10-19T00:42:27.93Z" }, + { url = "https://files.pythonhosted.org/packages/1b/b6/9d518597c5bdea626b61101e8d2ff94124787a42259dafd9f5fc396f346a/cytoolz-1.1.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:15d0c6405efc040499c46df44056a5c382f551a7624a41cf3e4c84a96b988a15", size = 2956033, upload-time = "2025-10-19T00:42:29.993Z" }, + { url = "https://files.pythonhosted.org/packages/89/7a/93e5f860926165538c85e1c5e1670ad3424f158df810f8ccd269da652138/cytoolz-1.1.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:bf069c5381d757debae891401b88b3a346ba3a28ca45ba9251103b282463fad8", size = 2862950, upload-time = "2025-10-19T00:42:31.803Z" }, + { url = "https://files.pythonhosted.org/packages/76/e6/99d6af00487bedc27597b54c9fcbfd5c833a69c6b7a9b9f0fff777bfc7aa/cytoolz-1.1.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:7d5cf15892e63411ec1bd67deff0e84317d974e6ab2cdfefdd4a7cea2989df66", size = 2861757, upload-time = "2025-10-19T00:42:33.625Z" }, + { url = "https://files.pythonhosted.org/packages/71/ca/adfa1fb7949478135a37755cb8e88c20cd6b75c22a05f1128f05f3ab2c60/cytoolz-1.1.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:3e3872c21170f8341656f8692f8939e8800dcee6549ad2474d4c817bdefd62cd", size = 2979049, upload-time = "2025-10-19T00:42:35.377Z" }, + { url = "https://files.pythonhosted.org/packages/70/4c/7bf47a03a4497d500bc73d4204e2d907771a017fa4457741b2a1d7c09319/cytoolz-1.1.0-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:b9ddeff8e8fd65eb1fcefa61018100b2b627e759ea6ad275d2e2a93ffac147bf", size = 2699492, upload-time = "2025-10-19T00:42:37.133Z" }, + { url = "https://files.pythonhosted.org/packages/7e/e7/3d034b0e4817314f07aa465d5864e9b8df9d25cb260a53dd84583e491558/cytoolz-1.1.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:02feeeda93e1fa3b33414eb57c2b0aefd1db8f558dd33fdfcce664a0f86056e4", size = 2995646, upload-time = "2025-10-19T00:42:38.912Z" }, + { url = "https://files.pythonhosted.org/packages/c1/62/be357181c71648d9fe1d1ce91cd42c63457dcf3c158e144416fd51dced83/cytoolz-1.1.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:d08154ad45349162b6c37f12d5d1b2e6eef338e657b85e1621e4e6a4a69d64cb", size = 2919481, upload-time = "2025-10-19T00:42:40.85Z" }, + { url = "https://files.pythonhosted.org/packages/62/d5/bf5434fde726c4f80cb99912b2d8e0afa1587557e2a2d7e0315eb942f2de/cytoolz-1.1.0-cp313-cp313t-win32.whl", hash = "sha256:10ae4718a056948d73ca3e1bb9ab1f95f897ec1e362f829b9d37cc29ab566c60", size = 951595, upload-time = "2025-10-19T00:42:42.877Z" }, + { url = "https://files.pythonhosted.org/packages/64/29/39c161e9204a9715321ddea698cbd0abc317e78522c7c642363c20589e71/cytoolz-1.1.0-cp313-cp313t-win_amd64.whl", hash = "sha256:1bb77bc6197e5cb19784b6a42bb0f8427e81737a630d9d7dda62ed31733f9e6c", size = 1004445, upload-time = "2025-10-19T00:42:44.855Z" }, + { url = "https://files.pythonhosted.org/packages/e2/5a/7cbff5e9a689f558cb0bdf277f9562b2ac51acf7cd15e055b8c3efb0e1ef/cytoolz-1.1.0-cp313-cp313t-win_arm64.whl", hash = "sha256:563dda652c6ff52d215704fbe6b491879b78d7bbbb3a9524ec8e763483cb459f", size = 926207, upload-time = "2025-10-19T00:42:46.456Z" }, + { url = "https://files.pythonhosted.org/packages/b7/e8/297a85ba700f437c01eba962428e6ab4572f6c3e68e8ff442ce5c9d3a496/cytoolz-1.1.0-cp314-cp314-ios_13_0_arm64_iphoneos.whl", hash = "sha256:d542cee7c7882d2a914a33dec4d3600416fb336734df979473249d4c53d207a1", size = 980613, upload-time = "2025-10-19T00:42:47.988Z" }, + { url = "https://files.pythonhosted.org/packages/e8/d7/2b02c9d18e9cc263a0e22690f78080809f1eafe72f26b29ccc115d3bf5c8/cytoolz-1.1.0-cp314-cp314-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:31922849b701b0f24bb62e56eb2488dcd3aa6ae3057694bd6b3b7c4c2bc27c2f", size = 990476, upload-time = "2025-10-19T00:42:49.653Z" }, + { url = "https://files.pythonhosted.org/packages/89/26/b6b159d2929310fca0eff8a4989cd4b1ecbdf7c46fdff46c7a20fcae55c8/cytoolz-1.1.0-cp314-cp314-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:e68308d32afd31943314735c1335e4ab5696110e96b405f6bdb8f2a8dc771a16", size = 992712, upload-time = "2025-10-19T00:42:51.306Z" }, + { url = "https://files.pythonhosted.org/packages/42/a0/f7c572aa151ed466b0fce4a327c3cc916d3ef3c82e341be59ea4b9bee9e4/cytoolz-1.1.0-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:fc4bb48b3b866e1867f7c6411a4229e5b44be3989060663713e10efc24c9bd5f", size = 1322596, upload-time = "2025-10-19T00:42:52.978Z" }, + { url = "https://files.pythonhosted.org/packages/72/7c/a55d035e20b77b6725e85c8f1a418b3a4c23967288b8b0c2d1a40f158cbe/cytoolz-1.1.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:456f77207d1445025d7ef262b8370a05492dcb1490cb428b0f3bf1bd744a89b0", size = 992825, upload-time = "2025-10-19T00:42:55.026Z" }, + { url = "https://files.pythonhosted.org/packages/03/af/39d2d3db322136e12e9336a1f13bab51eab88b386bfb11f91d3faff8ba34/cytoolz-1.1.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:174ebc71ebb20a9baeffce6ee07ee2cd913754325c93f99d767380d8317930f7", size = 990525, upload-time = "2025-10-19T00:42:56.666Z" }, + { url = "https://files.pythonhosted.org/packages/a6/bd/65d7a869d307f9b10ad45c2c1cbb40b81a8d0ed1138fa17fd904f5c83298/cytoolz-1.1.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:8b3604fef602bcd53415055a4f68468339192fd17be39e687ae24f476d23d56e", size = 2672409, upload-time = "2025-10-19T00:42:58.81Z" }, + { url = "https://files.pythonhosted.org/packages/2d/fb/74dfd844bfd67e810bd36e8e3903a143035447245828e7fcd7c81351d775/cytoolz-1.1.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3604b959a01f64c366e7d10ec7634d5f5cfe10301e27a8f090f6eb3b2a628a18", size = 2808477, upload-time = "2025-10-19T00:43:00.577Z" }, + { url = "https://files.pythonhosted.org/packages/d6/1f/587686c43e31c19241ec317da66438d093523921ea7749bbc65558a30df9/cytoolz-1.1.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6db2127a3c1bc2f59f08010d2ae53a760771a9de2f67423ad8d400e9ba4276e8", size = 2636881, upload-time = "2025-10-19T00:43:02.24Z" }, + { url = "https://files.pythonhosted.org/packages/bc/6d/90468cd34f77cb38a11af52c4dc6199efcc97a486395a21bef72e9b7602e/cytoolz-1.1.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:56584745ac647993a016a21bc76399113b7595e312f8d0a1b140c9fcf9b58a27", size = 2937315, upload-time = "2025-10-19T00:43:03.954Z" }, + { url = "https://files.pythonhosted.org/packages/d9/50/7b92cd78c613b92e3509e6291d3fb7e0d72ebda999a8df806a96c40ca9ab/cytoolz-1.1.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:db2c4c3a7f7bd7e03bb1a236a125c8feb86c75802f4ecda6ecfaf946610b2930", size = 2959988, upload-time = "2025-10-19T00:43:05.758Z" }, + { url = "https://files.pythonhosted.org/packages/44/d5/34b5a28a8d9bb329f984b4c2259407ca3f501d1abeb01bacea07937d85d1/cytoolz-1.1.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:48cb8a692111a285d2b9acd16d185428176bfbffa8a7c274308525fccd01dd42", size = 2795116, upload-time = "2025-10-19T00:43:07.411Z" }, + { url = "https://files.pythonhosted.org/packages/f5/d9/5dd829e33273ec03bdc3c812e6c3281987ae2c5c91645582f6c331544a64/cytoolz-1.1.0-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:d2f344ba5eb17dcf38ee37fdde726f69053f54927db8f8a1bed6ac61e5b1890d", size = 2535390, upload-time = "2025-10-19T00:43:09.104Z" }, + { url = "https://files.pythonhosted.org/packages/87/1f/7f9c58068a8eec2183110df051bc6b69dd621143f84473eeb6dc1b32905a/cytoolz-1.1.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:abf76b1c1abd031f098f293b6d90ee08bdaa45f8b5678430e331d991b82684b1", size = 2704834, upload-time = "2025-10-19T00:43:10.942Z" }, + { url = "https://files.pythonhosted.org/packages/d2/90/667def5665333575d01a65fe3ec0ca31b897895f6e3bc1a42d6ea3659369/cytoolz-1.1.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:ddf9a38a5b686091265ff45b53d142e44a538cd6c2e70610d3bc6be094219032", size = 2658441, upload-time = "2025-10-19T00:43:12.655Z" }, + { url = "https://files.pythonhosted.org/packages/23/79/6615f9a14960bd29ac98b823777b6589357833f65cf1a11b5abc1587c120/cytoolz-1.1.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:946786755274f07bb2be0400f28adb31d7d85a7c7001873c0a8e24a503428fb3", size = 2654766, upload-time = "2025-10-19T00:43:14.325Z" }, + { url = "https://files.pythonhosted.org/packages/b0/99/be59c6e0ae02153ef10ae1ff0f380fb19d973c651b50cf829a731f6c9e79/cytoolz-1.1.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:d5b8f78b9fed79cf185ad4ddec099abeef45951bdcb416c5835ba05f0a1242c7", size = 2827649, upload-time = "2025-10-19T00:43:16.132Z" }, + { url = "https://files.pythonhosted.org/packages/19/b7/854ddcf9f9618844108677c20d48f4611b5c636956adea0f0e85e027608f/cytoolz-1.1.0-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:fccde6efefdbc02e676ccb352a2ccc8a8e929f59a1c6d3d60bb78e923a49ca44", size = 2533456, upload-time = "2025-10-19T00:43:17.764Z" }, + { url = "https://files.pythonhosted.org/packages/45/66/bfe6fbb2bdcf03c8377c8c2f542576e15f3340c905a09d78a6cb3badd39a/cytoolz-1.1.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:717b7775313da5f51b0fbf50d865aa9c39cb241bd4cb605df3cf2246d6567397", size = 2826455, upload-time = "2025-10-19T00:43:19.561Z" }, + { url = "https://files.pythonhosted.org/packages/c3/0c/cce4047bd927e95f59e73319c02c9bc86bd3d76392e0eb9e41a1147a479c/cytoolz-1.1.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5158744a09d0e0e4a4f82225e3a3c4ebf38f9ae74467aaa905467270e52f2794", size = 2714897, upload-time = "2025-10-19T00:43:21.291Z" }, + { url = "https://files.pythonhosted.org/packages/ac/9a/061323bb289b565802bad14fb7ab59fcd8713105df142bcf4dd9ff64f8ac/cytoolz-1.1.0-cp314-cp314-win32.whl", hash = "sha256:1ed534bdbbf063b2bb28fca7d0f6723a3e5a72b086e7c7fe6d74ae8c3e4d00e2", size = 901490, upload-time = "2025-10-19T00:43:22.895Z" }, + { url = "https://files.pythonhosted.org/packages/a3/20/1f3a733d710d2a25d6f10b463bef55ada52fe6392a5d233c8d770191f48a/cytoolz-1.1.0-cp314-cp314-win_amd64.whl", hash = "sha256:472c1c9a085f5ad973ec0ad7f0b9ba0969faea6f96c9e397f6293d386f3a25ec", size = 946730, upload-time = "2025-10-19T00:43:24.838Z" }, + { url = "https://files.pythonhosted.org/packages/f2/22/2d657db4a5d1c10a152061800f812caba9ef20d7bd2406f51a5fd800c180/cytoolz-1.1.0-cp314-cp314-win_arm64.whl", hash = "sha256:a7ad7ca3386fa86bd301be3fa36e7f0acb024f412f665937955acfc8eb42deff", size = 905722, upload-time = "2025-10-19T00:43:26.439Z" }, + { url = "https://files.pythonhosted.org/packages/19/97/b4a8c76796a9a8b9bc90c7992840fa1589a1af8e0426562dea4ce9b384a7/cytoolz-1.1.0-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:64b63ed4b71b1ba813300ad0f06b8aff19a12cf51116e0e4f1ed837cea4debcf", size = 1372606, upload-time = "2025-10-19T00:43:28.491Z" }, + { url = "https://files.pythonhosted.org/packages/08/d4/a1bb1a32b454a2d650db8374ff3bf875ba0fc1c36e6446ec02a83b9140a1/cytoolz-1.1.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:a60ba6f2ed9eb0003a737e1ee1e9fa2258e749da6477946008d4324efa25149f", size = 1012189, upload-time = "2025-10-19T00:43:30.177Z" }, + { url = "https://files.pythonhosted.org/packages/21/4b/2f5cbbd81588918ee7dd70cffb66731608f578a9b72166aafa991071af7d/cytoolz-1.1.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1aa58e2434d732241f7f051e6f17657e969a89971025e24578b5cbc6f1346485", size = 1020624, upload-time = "2025-10-19T00:43:31.712Z" }, + { url = "https://files.pythonhosted.org/packages/f5/99/c4954dd86cd593cd776a038b36795a259b8b5c12cbab6363edf5f6d9c909/cytoolz-1.1.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:6965af3fc7214645970e312deb9bd35a213a1eaabcfef4f39115e60bf2f76867", size = 2917016, upload-time = "2025-10-19T00:43:33.531Z" }, + { url = "https://files.pythonhosted.org/packages/b2/7c/f1f70a17e272b433232bc8a27df97e46b202d6cc07e3b0d63f7f41ba0f2d/cytoolz-1.1.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ddd2863f321d67527d3b67a93000a378ad6f967056f68c06467fe011278a6d0e", size = 3107634, upload-time = "2025-10-19T00:43:35.57Z" }, + { url = "https://files.pythonhosted.org/packages/8f/bd/c3226a57474b4aef1f90040510cba30d0decd3515fed48dc229b37c2f898/cytoolz-1.1.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:4e6b428e9eb5126053c2ae0efa62512ff4b38ed3951f4d0888ca7005d63e56f5", size = 2806221, upload-time = "2025-10-19T00:43:37.707Z" }, + { url = "https://files.pythonhosted.org/packages/c3/47/2f7bfe4aaa1e07dc9828bea228ed744faf73b26aee0c1bdf3b5520bf1909/cytoolz-1.1.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:d758e5ef311d2671e0ae8c214c52e44617cf1e58bef8f022b547b9802a5a7f30", size = 3107671, upload-time = "2025-10-19T00:43:39.401Z" }, + { url = "https://files.pythonhosted.org/packages/4d/12/6ff3b04fbd1369d0fcd5f8b5910ba6e427e33bf113754c4c35ec3f747924/cytoolz-1.1.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a95416eca473e6c1179b48d86adcf528b59c63ce78f4cb9934f2e413afa9b56b", size = 3176350, upload-time = "2025-10-19T00:43:41.148Z" }, + { url = "https://files.pythonhosted.org/packages/e6/8c/6691d986b728e77b5d2872743ebcd962d37a2d0f7e9ad95a81b284fbf905/cytoolz-1.1.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:36c8ede93525cf11e2cc787b7156e5cecd7340193ef800b816a16f1404a8dc6d", size = 3001173, upload-time = "2025-10-19T00:43:42.923Z" }, + { url = "https://files.pythonhosted.org/packages/7a/cb/f59d83a5058e1198db5a1f04e4a124c94d60390e4fa89b6d2e38ee8288a0/cytoolz-1.1.0-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c0c949755b6d8a649c5fbc888bc30915926f1b09fe42fea9f289e297c2f6ddd3", size = 2701374, upload-time = "2025-10-19T00:43:44.716Z" }, + { url = "https://files.pythonhosted.org/packages/b7/f0/1ae6d28df503b0bdae094879da2072b8ba13db5919cd3798918761578411/cytoolz-1.1.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e1b6d37545816905a76d9ed59fa4e332f929e879f062a39ea0f6f620405cdc27", size = 2953081, upload-time = "2025-10-19T00:43:47.103Z" }, + { url = "https://files.pythonhosted.org/packages/f4/06/d86fe811c6222dc32d3e08f5d88d2be598a6055b4d0590e7c1428d55c386/cytoolz-1.1.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:05332112d4087904842b36954cd1d3fc0e463a2f4a7ef9477bd241427c593c3b", size = 2862228, upload-time = "2025-10-19T00:43:49.353Z" }, + { url = "https://files.pythonhosted.org/packages/ae/32/978ef6f42623be44a0a03ae9de875ab54aa26c7e38c5c4cd505460b0927d/cytoolz-1.1.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:31538ca2fad2d688cbd962ccc3f1da847329e2258a52940f10a2ac0719e526be", size = 2861971, upload-time = "2025-10-19T00:43:51.028Z" }, + { url = "https://files.pythonhosted.org/packages/ee/f7/74c69497e756b752b359925d1feef68b91df024a4124a823740f675dacd3/cytoolz-1.1.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:747562aa70abf219ea16f07d50ac0157db856d447f7f498f592e097cbc77df0b", size = 2975304, upload-time = "2025-10-19T00:43:52.99Z" }, + { url = "https://files.pythonhosted.org/packages/5b/2b/3ce0e6889a6491f3418ad4d84ae407b8456b02169a5a1f87990dbba7433b/cytoolz-1.1.0-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:3dc15c48b20c0f467e15e341e102896c8422dccf8efc6322def5c1b02f074629", size = 2697371, upload-time = "2025-10-19T00:43:55.312Z" }, + { url = "https://files.pythonhosted.org/packages/15/87/c616577f0891d97860643c845f7221e95240aa589586de727e28a5eb6e52/cytoolz-1.1.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:3c03137ee6103ba92d5d6ad6a510e86fded69cd67050bd8a1843f15283be17ac", size = 2992436, upload-time = "2025-10-19T00:43:57.253Z" }, + { url = "https://files.pythonhosted.org/packages/e7/9f/490c81bffb3428ab1fa114051fbb5ba18aaa2e2fe4da5bf4170ca524e6b3/cytoolz-1.1.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:be8e298d88f88bd172b59912240558be3b7a04959375646e7fd4996401452941", size = 2917612, upload-time = "2025-10-19T00:43:59.423Z" }, + { url = "https://files.pythonhosted.org/packages/66/35/0fec2769660ca6472bbf3317ab634675827bb706d193e3240aaf20eab961/cytoolz-1.1.0-cp314-cp314t-win32.whl", hash = "sha256:3d407140f5604a89578285d4aac7b18b8eafa055cf776e781aabb89c48738fad", size = 960842, upload-time = "2025-10-19T00:44:01.143Z" }, + { url = "https://files.pythonhosted.org/packages/46/b4/b7ce3d3cd20337becfec978ecfa6d0ef64884d0cf32d44edfed8700914b9/cytoolz-1.1.0-cp314-cp314t-win_amd64.whl", hash = "sha256:56e5afb69eb6e1b3ffc34716ee5f92ffbdb5cb003b3a5ca4d4b0fe700e217162", size = 1020835, upload-time = "2025-10-19T00:44:03.246Z" }, + { url = "https://files.pythonhosted.org/packages/2c/1f/0498009aa563a9c5d04f520aadc6e1c0942434d089d0b2f51ea986470f55/cytoolz-1.1.0-cp314-cp314t-win_arm64.whl", hash = "sha256:27b19b4a286b3ff52040efa42dbe403730aebe5fdfd2def704eb285e2125c63e", size = 927963, upload-time = "2025-10-19T00:44:04.85Z" }, +] + +[[package]] +name = "decorator" +version = "5.2.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/fa/6d96a0978d19e17b68d634497769987b16c8f4cd0a7a05048bec693caa6b/decorator-5.2.1.tar.gz", hash = "sha256:65f266143752f734b0a7cc83c46f4618af75b8c5911b00ccb61d0ac9b6da0360", size = 56711, upload-time = "2025-02-24T04:41:34.073Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4e/8c/f3147f5c4b73e7550fe5f9352eaa956ae838d5c51eb58e7a25b9f3e2643b/decorator-5.2.1-py3-none-any.whl", hash = "sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a", size = 9190, upload-time = "2025-02-24T04:41:32.565Z" }, +] + +[[package]] +name = "dice" +version = "4.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyparsing" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/72/3e/a07b66d345678cd277df76fdf51cb4ffc1054492efcd674a7b30f49a80bf/dice-4.0.0.tar.gz", hash = "sha256:7a1bfd68f21abf245f333de89721cd30379b5413c774a9d34a6b4123c526815c", size = 22252, upload-time = "2023-05-18T17:23:34.224Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b5/75/bf7ef10ce7f9a8d17e127e77092505368999e6bd86deab8eb9a74b2103ec/dice-4.0.0-py3-none-any.whl", hash = "sha256:59aa1f7b23846b32a618697534429fec80be3a1087b4f934022dcdb99cf1417f", size = 22042, upload-time = "2023-05-18T17:22:59.548Z" }, +] + +[[package]] +name = "discord-py" +version = "2.7.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiohttp" }, + { name = "audioop-lts", marker = "python_full_version >= '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ef/57/9a2d9abdabdc9db8ef28ce0cf4129669e1c8717ba28d607b5ba357c4de3b/discord_py-2.7.1.tar.gz", hash = "sha256:24d5e6a45535152e4b98148a9dd6b550d25dc2c9fb41b6d670319411641249da", size = 1106326, upload-time = "2026-03-03T18:40:46.24Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f7/a7/17208c3b3f92319e7fad259f1c6d5a5baf8fd0654c54846ced329f83c3eb/discord_py-2.7.1-py3-none-any.whl", hash = "sha256:849dca2c63b171146f3a7f3f8acc04248098e9e6203412ce3cf2745f284f7439", size = 1227550, upload-time = "2026-03-03T18:40:44.492Z" }, +] + +[[package]] +name = "distro" +version = "1.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fc/f8/98eea607f65de6527f8a2e8885fc8015d3e6f5775df186e443e0964a11c3/distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed", size = 60722, upload-time = "2023-12-24T09:54:32.31Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/12/b3/231ffd4ab1fc9d679809f356cebee130ac7daa00d6d6f3206dd4fd137e9e/distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2", size = 20277, upload-time = "2023-12-24T09:54:30.421Z" }, +] + +[[package]] +name = "dnspython" +version = "2.8.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8c/8b/57666417c0f90f08bcafa776861060426765fdb422eb10212086fb811d26/dnspython-2.8.0.tar.gz", hash = "sha256:181d3c6996452cb1189c4046c61599b84a5a86e099562ffde77d26984ff26d0f", size = 368251, upload-time = "2025-09-07T18:58:00.022Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ba/5a/18ad964b0086c6e62e2e7500f7edc89e3faa45033c71c1893d34eed2b2de/dnspython-2.8.0-py3-none-any.whl", hash = "sha256:01d9bbc4a2d76bf0db7c1f729812ded6d912bd318d3b1cf81d30c0f845dbf3af", size = 331094, upload-time = "2025-09-07T18:57:58.071Z" }, +] + +[[package]] +name = "docstring-parser" +version = "0.17.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b2/9d/c3b43da9515bd270df0f80548d9944e389870713cc1fe2b8fb35fe2bcefd/docstring_parser-0.17.0.tar.gz", hash = "sha256:583de4a309722b3315439bb31d64ba3eebada841f2e2cee23b99df001434c912", size = 27442, upload-time = "2025-07-21T07:35:01.868Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/55/e2/2537ebcff11c1ee1ff17d8d0b6f4db75873e3b0fb32c2d4a2ee31ecb310a/docstring_parser-0.17.0-py3-none-any.whl", hash = "sha256:cf2569abd23dce8099b300f9b4fa8191e9582dda731fd533daf54c4551658708", size = 36896, upload-time = "2025-07-21T07:35:00.684Z" }, +] + +[[package]] +name = "eth-abi" +version = "5.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "eth-typing" }, + { name = "eth-utils" }, + { name = "parsimonious" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/00/71/d9e1380bd77fd22f98b534699af564f189b56d539cc2b9dab908d4e4c242/eth_abi-5.2.0.tar.gz", hash = "sha256:178703fa98c07d8eecd5ae569e7e8d159e493ebb6eeb534a8fe973fbc4e40ef0", size = 49797, upload-time = "2025-01-14T16:29:34.629Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7a/b4/2f3982c4cbcbf5eeb6aec62df1533c0e63c653b3021ff338d44944405676/eth_abi-5.2.0-py3-none-any.whl", hash = "sha256:17abe47560ad753f18054f5b3089fcb588f3e3a092136a416b6c1502cb7e8877", size = 28511, upload-time = "2025-01-14T16:29:31.862Z" }, +] + +[[package]] +name = "eth-account" +version = "0.13.7" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "bitarray" }, + { name = "ckzg" }, + { name = "eth-abi" }, + { name = "eth-keyfile" }, + { name = "eth-keys" }, + { name = "eth-rlp" }, + { name = "eth-utils" }, + { name = "hexbytes" }, + { name = "pydantic" }, + { name = "rlp" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/74/cf/20f76a29be97339c969fd765f1237154286a565a1d61be98e76bb7af946a/eth_account-0.13.7.tar.gz", hash = "sha256:5853ecbcbb22e65411176f121f5f24b8afeeaf13492359d254b16d8b18c77a46", size = 935998, upload-time = "2025-04-21T21:11:21.204Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/46/18/088fb250018cbe665bc2111974301b2d59f294a565aff7564c4df6878da2/eth_account-0.13.7-py3-none-any.whl", hash = "sha256:39727de8c94d004ff61d10da7587509c04d2dc7eac71e04830135300bdfc6d24", size = 587452, upload-time = "2025-04-21T21:11:18.346Z" }, +] + +[[package]] +name = "eth-hash" +version = "0.7.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/38/577b7bc9380ef9dff0f1dffefe0c9a1ded2385e7a06c306fd95afb6f9451/eth_hash-0.7.1.tar.gz", hash = "sha256:d2411a403a0b0a62e8247b4117932d900ffb4c8c64b15f92620547ca5ce46be5", size = 12227, upload-time = "2025-01-13T21:29:21.765Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/eb/db/f8775490669d28aca24871c67dd56b3e72105cb3bcae9a4ec65dd70859b3/eth_hash-0.7.1-py3-none-any.whl", hash = "sha256:0fb1add2adf99ef28883fd6228eb447ef519ea72933535ad1a0b28c6f65f868a", size = 8028, upload-time = "2025-01-13T21:29:19.365Z" }, +] + +[package.optional-dependencies] +pycryptodome = [ + { name = "pycryptodome" }, +] + +[[package]] +name = "eth-keyfile" +version = "0.8.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "eth-keys" }, + { name = "eth-utils" }, + { name = "pycryptodome" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/35/66/dd823b1537befefbbff602e2ada88f1477c5b40ec3731e3d9bc676c5f716/eth_keyfile-0.8.1.tar.gz", hash = "sha256:9708bc31f386b52cca0969238ff35b1ac72bd7a7186f2a84b86110d3c973bec1", size = 12267, upload-time = "2024-04-23T20:28:53.862Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/88/fc/48a586175f847dd9e05e5b8994d2fe8336098781ec2e9836a2ad94280281/eth_keyfile-0.8.1-py3-none-any.whl", hash = "sha256:65387378b82fe7e86d7cb9f8d98e6d639142661b2f6f490629da09fddbef6d64", size = 7510, upload-time = "2024-04-23T20:28:51.063Z" }, +] + +[[package]] +name = "eth-keys" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "eth-typing" }, + { name = "eth-utils" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/58/11/1ed831c50bd74f57829aa06e58bd82a809c37e070ee501c953b9ac1f1552/eth_keys-0.7.0.tar.gz", hash = "sha256:79d24fd876201df67741de3e3fefb3f4dbcbb6ace66e47e6fe662851a4547814", size = 30166, upload-time = "2025-04-07T17:40:21.697Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4d/25/0ae00f2b0095e559d61ad3dc32171bd5a29dfd95ab04b4edd641f7c75f72/eth_keys-0.7.0-py3-none-any.whl", hash = "sha256:b0cdda8ffe8e5ba69c7c5ca33f153828edcace844f67aabd4542d7de38b159cf", size = 20656, upload-time = "2025-04-07T17:40:20.441Z" }, +] + +[[package]] +name = "eth-rlp" +version = "2.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "eth-utils" }, + { name = "hexbytes" }, + { name = "rlp" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/7f/ea/ad39d001fa9fed07fad66edb00af701e29b48be0ed44a3bcf58cb3adf130/eth_rlp-2.2.0.tar.gz", hash = "sha256:5e4b2eb1b8213e303d6a232dfe35ab8c29e2d3051b86e8d359def80cd21db83d", size = 7720, upload-time = "2025-02-04T21:51:08.134Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/99/3b/57efe2bc2df0980680d57c01a36516cd3171d2319ceb30e675de19fc2cc5/eth_rlp-2.2.0-py3-none-any.whl", hash = "sha256:5692d595a741fbaef1203db6a2fedffbd2506d31455a6ad378c8449ee5985c47", size = 4446, upload-time = "2025-02-04T21:51:05.823Z" }, +] + +[[package]] +name = "eth-typing" +version = "5.2.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/60/54/62aa24b9cc708f06316167ee71c362779c8ed21fc8234a5cd94a8f53b623/eth_typing-5.2.1.tar.gz", hash = "sha256:7557300dbf02a93c70fa44af352b5c4a58f94e997a0fd6797fb7d1c29d9538ee", size = 21806, upload-time = "2025-04-14T20:39:28.217Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/30/72/c370bbe4c53da7bf998d3523f5a0f38867654923a82192df88d0705013d3/eth_typing-5.2.1-py3-none-any.whl", hash = "sha256:b0c2812ff978267563b80e9d701f487dd926f1d376d674f3b535cfe28b665d3d", size = 19163, upload-time = "2025-04-14T20:39:26.571Z" }, +] + +[[package]] +name = "eth-utils" +version = "5.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cytoolz", marker = "implementation_name == 'cpython'" }, + { name = "eth-hash" }, + { name = "eth-typing" }, + { name = "pydantic" }, + { name = "toolz", marker = "implementation_name == 'pypy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e6/e1/ee3a8728227c3558853e63ff35bd4c449abdf5022a19601369400deacd39/eth_utils-5.3.1.tar.gz", hash = "sha256:c94e2d2abd024a9a42023b4ddc1c645814ff3d6a737b33d5cfd890ebf159c2d1", size = 123506, upload-time = "2025-08-27T16:37:17.378Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bf/4d/257cdc01ada430b8e84b9f2385c2553f33218f5b47da9adf0a616308d4b7/eth_utils-5.3.1-py3-none-any.whl", hash = "sha256:1f5476d8f29588d25b8ae4987e1ffdfae6d4c09026e476c4aad13b32dda3ead0", size = 102529, upload-time = "2025-08-27T16:37:15.449Z" }, +] + +[[package]] +name = "etherscan-labels" +version = "20221015" +source = { git = "https://github.com/haloooloolo/etherscan-labels#f48e34b76d75b4421daa2a43814dd7c61c5c56f3" } +dependencies = [ + { name = "compress-json" }, +] + +[[package]] +name = "fonttools" +version = "4.61.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ec/ca/cf17b88a8df95691275a3d77dc0a5ad9907f328ae53acbe6795da1b2f5ed/fonttools-4.61.1.tar.gz", hash = "sha256:6675329885c44657f826ef01d9e4fb33b9158e9d93c537d84ad8399539bc6f69", size = 3565756, upload-time = "2025-12-12T17:31:24.246Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6f/16/7decaa24a1bd3a70c607b2e29f0adc6159f36a7e40eaba59846414765fd4/fonttools-4.61.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:f3cb4a569029b9f291f88aafc927dd53683757e640081ca8c412781ea144565e", size = 2851593, upload-time = "2025-12-12T17:30:04.225Z" }, + { url = "https://files.pythonhosted.org/packages/94/98/3c4cb97c64713a8cf499b3245c3bf9a2b8fd16a3e375feff2aed78f96259/fonttools-4.61.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:41a7170d042e8c0024703ed13b71893519a1a6d6e18e933e3ec7507a2c26a4b2", size = 2400231, upload-time = "2025-12-12T17:30:06.47Z" }, + { url = "https://files.pythonhosted.org/packages/b7/37/82dbef0f6342eb01f54bca073ac1498433d6ce71e50c3c3282b655733b31/fonttools-4.61.1-cp312-cp312-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:10d88e55330e092940584774ee5e8a6971b01fc2f4d3466a1d6c158230880796", size = 4954103, upload-time = "2025-12-12T17:30:08.432Z" }, + { url = "https://files.pythonhosted.org/packages/6c/44/f3aeac0fa98e7ad527f479e161aca6c3a1e47bb6996b053d45226fe37bf2/fonttools-4.61.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:15acc09befd16a0fb8a8f62bc147e1a82817542d72184acca9ce6e0aeda9fa6d", size = 5004295, upload-time = "2025-12-12T17:30:10.56Z" }, + { url = "https://files.pythonhosted.org/packages/14/e8/7424ced75473983b964d09f6747fa09f054a6d656f60e9ac9324cf40c743/fonttools-4.61.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e6bcdf33aec38d16508ce61fd81838f24c83c90a1d1b8c68982857038673d6b8", size = 4944109, upload-time = "2025-12-12T17:30:12.874Z" }, + { url = "https://files.pythonhosted.org/packages/c8/8b/6391b257fa3d0b553d73e778f953a2f0154292a7a7a085e2374b111e5410/fonttools-4.61.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5fade934607a523614726119164ff621e8c30e8fa1ffffbbd358662056ba69f0", size = 5093598, upload-time = "2025-12-12T17:30:15.79Z" }, + { url = "https://files.pythonhosted.org/packages/d9/71/fd2ea96cdc512d92da5678a1c98c267ddd4d8c5130b76d0f7a80f9a9fde8/fonttools-4.61.1-cp312-cp312-win32.whl", hash = "sha256:75da8f28eff26defba42c52986de97b22106cb8f26515b7c22443ebc9c2d3261", size = 2269060, upload-time = "2025-12-12T17:30:18.058Z" }, + { url = "https://files.pythonhosted.org/packages/80/3b/a3e81b71aed5a688e89dfe0e2694b26b78c7d7f39a5ffd8a7d75f54a12a8/fonttools-4.61.1-cp312-cp312-win_amd64.whl", hash = "sha256:497c31ce314219888c0e2fce5ad9178ca83fe5230b01a5006726cdf3ac9f24d9", size = 2319078, upload-time = "2025-12-12T17:30:22.862Z" }, + { url = "https://files.pythonhosted.org/packages/4b/cf/00ba28b0990982530addb8dc3e9e6f2fa9cb5c20df2abdda7baa755e8fe1/fonttools-4.61.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8c56c488ab471628ff3bfa80964372fc13504ece601e0d97a78ee74126b2045c", size = 2846454, upload-time = "2025-12-12T17:30:24.938Z" }, + { url = "https://files.pythonhosted.org/packages/5a/ca/468c9a8446a2103ae645d14fee3f610567b7042aba85031c1c65e3ef7471/fonttools-4.61.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:dc492779501fa723b04d0ab1f5be046797fee17d27700476edc7ee9ae535a61e", size = 2398191, upload-time = "2025-12-12T17:30:27.343Z" }, + { url = "https://files.pythonhosted.org/packages/a3/4b/d67eedaed19def5967fade3297fed8161b25ba94699efc124b14fb68cdbc/fonttools-4.61.1-cp313-cp313-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:64102ca87e84261419c3747a0d20f396eb024bdbeb04c2bfb37e2891f5fadcb5", size = 4928410, upload-time = "2025-12-12T17:30:29.771Z" }, + { url = "https://files.pythonhosted.org/packages/b0/8d/6fb3494dfe61a46258cd93d979cf4725ded4eb46c2a4ca35e4490d84daea/fonttools-4.61.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4c1b526c8d3f615a7b1867f38a9410849c8f4aef078535742198e942fba0e9bd", size = 4984460, upload-time = "2025-12-12T17:30:32.073Z" }, + { url = "https://files.pythonhosted.org/packages/f7/f1/a47f1d30b3dc00d75e7af762652d4cbc3dff5c2697a0dbd5203c81afd9c3/fonttools-4.61.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:41ed4b5ec103bd306bb68f81dc166e77409e5209443e5773cb4ed837bcc9b0d3", size = 4925800, upload-time = "2025-12-12T17:30:34.339Z" }, + { url = "https://files.pythonhosted.org/packages/a7/01/e6ae64a0981076e8a66906fab01539799546181e32a37a0257b77e4aa88b/fonttools-4.61.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b501c862d4901792adaec7c25b1ecc749e2662543f68bb194c42ba18d6eec98d", size = 5067859, upload-time = "2025-12-12T17:30:36.593Z" }, + { url = "https://files.pythonhosted.org/packages/73/aa/28e40b8d6809a9b5075350a86779163f074d2b617c15d22343fce81918db/fonttools-4.61.1-cp313-cp313-win32.whl", hash = "sha256:4d7092bb38c53bbc78e9255a59158b150bcdc115a1e3b3ce0b5f267dc35dd63c", size = 2267821, upload-time = "2025-12-12T17:30:38.478Z" }, + { url = "https://files.pythonhosted.org/packages/1a/59/453c06d1d83dc0951b69ef692d6b9f1846680342927df54e9a1ca91c6f90/fonttools-4.61.1-cp313-cp313-win_amd64.whl", hash = "sha256:21e7c8d76f62ab13c9472ccf74515ca5b9a761d1bde3265152a6dc58700d895b", size = 2318169, upload-time = "2025-12-12T17:30:40.951Z" }, + { url = "https://files.pythonhosted.org/packages/32/8f/4e7bf82c0cbb738d3c2206c920ca34ca74ef9dabde779030145d28665104/fonttools-4.61.1-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:fff4f534200a04b4a36e7ae3cb74493afe807b517a09e99cb4faa89a34ed6ecd", size = 2846094, upload-time = "2025-12-12T17:30:43.511Z" }, + { url = "https://files.pythonhosted.org/packages/71/09/d44e45d0a4f3a651f23a1e9d42de43bc643cce2971b19e784cc67d823676/fonttools-4.61.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:d9203500f7c63545b4ce3799319fe4d9feb1a1b89b28d3cb5abd11b9dd64147e", size = 2396589, upload-time = "2025-12-12T17:30:45.681Z" }, + { url = "https://files.pythonhosted.org/packages/89/18/58c64cafcf8eb677a99ef593121f719e6dcbdb7d1c594ae5a10d4997ca8a/fonttools-4.61.1-cp314-cp314-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:fa646ecec9528bef693415c79a86e733c70a4965dd938e9a226b0fc64c9d2e6c", size = 4877892, upload-time = "2025-12-12T17:30:47.709Z" }, + { url = "https://files.pythonhosted.org/packages/8a/ec/9e6b38c7ba1e09eb51db849d5450f4c05b7e78481f662c3b79dbde6f3d04/fonttools-4.61.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:11f35ad7805edba3aac1a3710d104592df59f4b957e30108ae0ba6c10b11dd75", size = 4972884, upload-time = "2025-12-12T17:30:49.656Z" }, + { url = "https://files.pythonhosted.org/packages/5e/87/b5339da8e0256734ba0dbbf5b6cdebb1dd79b01dc8c270989b7bcd465541/fonttools-4.61.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b931ae8f62db78861b0ff1ac017851764602288575d65b8e8ff1963fed419063", size = 4924405, upload-time = "2025-12-12T17:30:51.735Z" }, + { url = "https://files.pythonhosted.org/packages/0b/47/e3409f1e1e69c073a3a6fd8cb886eb18c0bae0ee13db2c8d5e7f8495e8b7/fonttools-4.61.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:b148b56f5de675ee16d45e769e69f87623a4944f7443850bf9a9376e628a89d2", size = 5035553, upload-time = "2025-12-12T17:30:54.823Z" }, + { url = "https://files.pythonhosted.org/packages/bf/b6/1f6600161b1073a984294c6c031e1a56ebf95b6164249eecf30012bb2e38/fonttools-4.61.1-cp314-cp314-win32.whl", hash = "sha256:9b666a475a65f4e839d3d10473fad6d47e0a9db14a2f4a224029c5bfde58ad2c", size = 2271915, upload-time = "2025-12-12T17:30:57.913Z" }, + { url = "https://files.pythonhosted.org/packages/52/7b/91e7b01e37cc8eb0e1f770d08305b3655e4f002fc160fb82b3390eabacf5/fonttools-4.61.1-cp314-cp314-win_amd64.whl", hash = "sha256:4f5686e1fe5fce75d82d93c47a438a25bf0d1319d2843a926f741140b2b16e0c", size = 2323487, upload-time = "2025-12-12T17:30:59.804Z" }, + { url = "https://files.pythonhosted.org/packages/39/5c/908ad78e46c61c3e3ed70c3b58ff82ab48437faf84ec84f109592cabbd9f/fonttools-4.61.1-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:e76ce097e3c57c4bcb67c5aa24a0ecdbd9f74ea9219997a707a4061fbe2707aa", size = 2929571, upload-time = "2025-12-12T17:31:02.574Z" }, + { url = "https://files.pythonhosted.org/packages/bd/41/975804132c6dea64cdbfbaa59f3518a21c137a10cccf962805b301ac6ab2/fonttools-4.61.1-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:9cfef3ab326780c04d6646f68d4b4742aae222e8b8ea1d627c74e38afcbc9d91", size = 2435317, upload-time = "2025-12-12T17:31:04.974Z" }, + { url = "https://files.pythonhosted.org/packages/b0/5a/aef2a0a8daf1ebaae4cfd83f84186d4a72ee08fd6a8451289fcd03ffa8a4/fonttools-4.61.1-cp314-cp314t-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:a75c301f96db737e1c5ed5fd7d77d9c34466de16095a266509e13da09751bd19", size = 4882124, upload-time = "2025-12-12T17:31:07.456Z" }, + { url = "https://files.pythonhosted.org/packages/80/33/d6db3485b645b81cea538c9d1c9219d5805f0877fda18777add4671c5240/fonttools-4.61.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:91669ccac46bbc1d09e9273546181919064e8df73488ea087dcac3e2968df9ba", size = 5100391, upload-time = "2025-12-12T17:31:09.732Z" }, + { url = "https://files.pythonhosted.org/packages/6c/d6/675ba631454043c75fcf76f0ca5463eac8eb0666ea1d7badae5fea001155/fonttools-4.61.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:c33ab3ca9d3ccd581d58e989d67554e42d8d4ded94ab3ade3508455fe70e65f7", size = 4978800, upload-time = "2025-12-12T17:31:11.681Z" }, + { url = "https://files.pythonhosted.org/packages/7f/33/d3ec753d547a8d2bdaedd390d4a814e8d5b45a093d558f025c6b990b554c/fonttools-4.61.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:664c5a68ec406f6b1547946683008576ef8b38275608e1cee6c061828171c118", size = 5006426, upload-time = "2025-12-12T17:31:13.764Z" }, + { url = "https://files.pythonhosted.org/packages/b4/40/cc11f378b561a67bea850ab50063366a0d1dd3f6d0a30ce0f874b0ad5664/fonttools-4.61.1-cp314-cp314t-win32.whl", hash = "sha256:aed04cabe26f30c1647ef0e8fbb207516fd40fe9472e9439695f5c6998e60ac5", size = 2335377, upload-time = "2025-12-12T17:31:16.49Z" }, + { url = "https://files.pythonhosted.org/packages/e4/ff/c9a2b66b39f8628531ea58b320d66d951267c98c6a38684daa8f50fb02f8/fonttools-4.61.1-cp314-cp314t-win_amd64.whl", hash = "sha256:2180f14c141d2f0f3da43f3a81bc8aa4684860f6b0e6f9e165a4831f24e6a23b", size = 2400613, upload-time = "2025-12-12T17:31:18.769Z" }, + { url = "https://files.pythonhosted.org/packages/c7/4e/ce75a57ff3aebf6fc1f4e9d508b8e5810618a33d900ad6c19eb30b290b97/fonttools-4.61.1-py3-none-any.whl", hash = "sha256:17d2bf5d541add43822bcf0c43d7d847b160c9bb01d15d5007d84e2217aaa371", size = 1148996, upload-time = "2025-12-12T17:31:21.03Z" }, +] + +[[package]] +name = "frozenlist" +version = "1.8.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2d/f5/c831fac6cc817d26fd54c7eaccd04ef7e0288806943f7cc5bbf69f3ac1f0/frozenlist-1.8.0.tar.gz", hash = "sha256:3ede829ed8d842f6cd48fc7081d7a41001a56f1f38603f9d49bf3020d59a31ad", size = 45875, upload-time = "2025-10-06T05:38:17.865Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/69/29/948b9aa87e75820a38650af445d2ef2b6b8a6fab1a23b6bb9e4ef0be2d59/frozenlist-1.8.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:78f7b9e5d6f2fdb88cdde9440dc147259b62b9d3b019924def9f6478be254ac1", size = 87782, upload-time = "2025-10-06T05:36:06.649Z" }, + { url = "https://files.pythonhosted.org/packages/64/80/4f6e318ee2a7c0750ed724fa33a4bdf1eacdc5a39a7a24e818a773cd91af/frozenlist-1.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:229bf37d2e4acdaf808fd3f06e854a4a7a3661e871b10dc1f8f1896a3b05f18b", size = 50594, upload-time = "2025-10-06T05:36:07.69Z" }, + { url = "https://files.pythonhosted.org/packages/2b/94/5c8a2b50a496b11dd519f4a24cb5496cf125681dd99e94c604ccdea9419a/frozenlist-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f833670942247a14eafbb675458b4e61c82e002a148f49e68257b79296e865c4", size = 50448, upload-time = "2025-10-06T05:36:08.78Z" }, + { url = "https://files.pythonhosted.org/packages/6a/bd/d91c5e39f490a49df14320f4e8c80161cfcce09f1e2cde1edd16a551abb3/frozenlist-1.8.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:494a5952b1c597ba44e0e78113a7266e656b9794eec897b19ead706bd7074383", size = 242411, upload-time = "2025-10-06T05:36:09.801Z" }, + { url = "https://files.pythonhosted.org/packages/8f/83/f61505a05109ef3293dfb1ff594d13d64a2324ac3482be2cedc2be818256/frozenlist-1.8.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:96f423a119f4777a4a056b66ce11527366a8bb92f54e541ade21f2374433f6d4", size = 243014, upload-time = "2025-10-06T05:36:11.394Z" }, + { url = "https://files.pythonhosted.org/packages/d8/cb/cb6c7b0f7d4023ddda30cf56b8b17494eb3a79e3fda666bf735f63118b35/frozenlist-1.8.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3462dd9475af2025c31cc61be6652dfa25cbfb56cbbf52f4ccfe029f38decaf8", size = 234909, upload-time = "2025-10-06T05:36:12.598Z" }, + { url = "https://files.pythonhosted.org/packages/31/c5/cd7a1f3b8b34af009fb17d4123c5a778b44ae2804e3ad6b86204255f9ec5/frozenlist-1.8.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c4c800524c9cd9bac5166cd6f55285957fcfc907db323e193f2afcd4d9abd69b", size = 250049, upload-time = "2025-10-06T05:36:14.065Z" }, + { url = "https://files.pythonhosted.org/packages/c0/01/2f95d3b416c584a1e7f0e1d6d31998c4a795f7544069ee2e0962a4b60740/frozenlist-1.8.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d6a5df73acd3399d893dafc71663ad22534b5aa4f94e8a2fabfe856c3c1b6a52", size = 256485, upload-time = "2025-10-06T05:36:15.39Z" }, + { url = "https://files.pythonhosted.org/packages/ce/03/024bf7720b3abaebcff6d0793d73c154237b85bdf67b7ed55e5e9596dc9a/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:405e8fe955c2280ce66428b3ca55e12b3c4e9c336fb2103a4937e891c69a4a29", size = 237619, upload-time = "2025-10-06T05:36:16.558Z" }, + { url = "https://files.pythonhosted.org/packages/69/fa/f8abdfe7d76b731f5d8bd217827cf6764d4f1d9763407e42717b4bed50a0/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:908bd3f6439f2fef9e85031b59fd4f1297af54415fb60e4254a95f75b3cab3f3", size = 250320, upload-time = "2025-10-06T05:36:17.821Z" }, + { url = "https://files.pythonhosted.org/packages/f5/3c/b051329f718b463b22613e269ad72138cc256c540f78a6de89452803a47d/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:294e487f9ec720bd8ffcebc99d575f7eff3568a08a253d1ee1a0378754b74143", size = 246820, upload-time = "2025-10-06T05:36:19.046Z" }, + { url = "https://files.pythonhosted.org/packages/0f/ae/58282e8f98e444b3f4dd42448ff36fa38bef29e40d40f330b22e7108f565/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:74c51543498289c0c43656701be6b077f4b265868fa7f8a8859c197006efb608", size = 250518, upload-time = "2025-10-06T05:36:20.763Z" }, + { url = "https://files.pythonhosted.org/packages/8f/96/007e5944694d66123183845a106547a15944fbbb7154788cbf7272789536/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:776f352e8329135506a1d6bf16ac3f87bc25b28e765949282dcc627af36123aa", size = 239096, upload-time = "2025-10-06T05:36:22.129Z" }, + { url = "https://files.pythonhosted.org/packages/66/bb/852b9d6db2fa40be96f29c0d1205c306288f0684df8fd26ca1951d461a56/frozenlist-1.8.0-cp312-cp312-win32.whl", hash = "sha256:433403ae80709741ce34038da08511d4a77062aa924baf411ef73d1146e74faf", size = 39985, upload-time = "2025-10-06T05:36:23.661Z" }, + { url = "https://files.pythonhosted.org/packages/b8/af/38e51a553dd66eb064cdf193841f16f077585d4d28394c2fa6235cb41765/frozenlist-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:34187385b08f866104f0c0617404c8eb08165ab1272e884abc89c112e9c00746", size = 44591, upload-time = "2025-10-06T05:36:24.958Z" }, + { url = "https://files.pythonhosted.org/packages/a7/06/1dc65480ab147339fecc70797e9c2f69d9cea9cf38934ce08df070fdb9cb/frozenlist-1.8.0-cp312-cp312-win_arm64.whl", hash = "sha256:fe3c58d2f5db5fbd18c2987cba06d51b0529f52bc3a6cdc33d3f4eab725104bd", size = 40102, upload-time = "2025-10-06T05:36:26.333Z" }, + { url = "https://files.pythonhosted.org/packages/2d/40/0832c31a37d60f60ed79e9dfb5a92e1e2af4f40a16a29abcc7992af9edff/frozenlist-1.8.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8d92f1a84bb12d9e56f818b3a746f3efba93c1b63c8387a73dde655e1e42282a", size = 85717, upload-time = "2025-10-06T05:36:27.341Z" }, + { url = "https://files.pythonhosted.org/packages/30/ba/b0b3de23f40bc55a7057bd38434e25c34fa48e17f20ee273bbde5e0650f3/frozenlist-1.8.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:96153e77a591c8adc2ee805756c61f59fef4cf4073a9275ee86fe8cba41241f7", size = 49651, upload-time = "2025-10-06T05:36:28.855Z" }, + { url = "https://files.pythonhosted.org/packages/0c/ab/6e5080ee374f875296c4243c381bbdef97a9ac39c6e3ce1d5f7d42cb78d6/frozenlist-1.8.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f21f00a91358803399890ab167098c131ec2ddd5f8f5fd5fe9c9f2c6fcd91e40", size = 49417, upload-time = "2025-10-06T05:36:29.877Z" }, + { url = "https://files.pythonhosted.org/packages/d5/4e/e4691508f9477ce67da2015d8c00acd751e6287739123113a9fca6f1604e/frozenlist-1.8.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:fb30f9626572a76dfe4293c7194a09fb1fe93ba94c7d4f720dfae3b646b45027", size = 234391, upload-time = "2025-10-06T05:36:31.301Z" }, + { url = "https://files.pythonhosted.org/packages/40/76/c202df58e3acdf12969a7895fd6f3bc016c642e6726aa63bd3025e0fc71c/frozenlist-1.8.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eaa352d7047a31d87dafcacbabe89df0aa506abb5b1b85a2fb91bc3faa02d822", size = 233048, upload-time = "2025-10-06T05:36:32.531Z" }, + { url = "https://files.pythonhosted.org/packages/f9/c0/8746afb90f17b73ca5979c7a3958116e105ff796e718575175319b5bb4ce/frozenlist-1.8.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:03ae967b4e297f58f8c774c7eabcce57fe3c2434817d4385c50661845a058121", size = 226549, upload-time = "2025-10-06T05:36:33.706Z" }, + { url = "https://files.pythonhosted.org/packages/7e/eb/4c7eefc718ff72f9b6c4893291abaae5fbc0c82226a32dcd8ef4f7a5dbef/frozenlist-1.8.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f6292f1de555ffcc675941d65fffffb0a5bcd992905015f85d0592201793e0e5", size = 239833, upload-time = "2025-10-06T05:36:34.947Z" }, + { url = "https://files.pythonhosted.org/packages/c2/4e/e5c02187cf704224f8b21bee886f3d713ca379535f16893233b9d672ea71/frozenlist-1.8.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:29548f9b5b5e3460ce7378144c3010363d8035cea44bc0bf02d57f5a685e084e", size = 245363, upload-time = "2025-10-06T05:36:36.534Z" }, + { url = "https://files.pythonhosted.org/packages/1f/96/cb85ec608464472e82ad37a17f844889c36100eed57bea094518bf270692/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ec3cc8c5d4084591b4237c0a272cc4f50a5b03396a47d9caaf76f5d7b38a4f11", size = 229314, upload-time = "2025-10-06T05:36:38.582Z" }, + { url = "https://files.pythonhosted.org/packages/5d/6f/4ae69c550e4cee66b57887daeebe006fe985917c01d0fff9caab9883f6d0/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:517279f58009d0b1f2e7c1b130b377a349405da3f7621ed6bfae50b10adf20c1", size = 243365, upload-time = "2025-10-06T05:36:40.152Z" }, + { url = "https://files.pythonhosted.org/packages/7a/58/afd56de246cf11780a40a2c28dc7cbabbf06337cc8ddb1c780a2d97e88d8/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:db1e72ede2d0d7ccb213f218df6a078a9c09a7de257c2fe8fcef16d5925230b1", size = 237763, upload-time = "2025-10-06T05:36:41.355Z" }, + { url = "https://files.pythonhosted.org/packages/cb/36/cdfaf6ed42e2644740d4a10452d8e97fa1c062e2a8006e4b09f1b5fd7d63/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:b4dec9482a65c54a5044486847b8a66bf10c9cb4926d42927ec4e8fd5db7fed8", size = 240110, upload-time = "2025-10-06T05:36:42.716Z" }, + { url = "https://files.pythonhosted.org/packages/03/a8/9ea226fbefad669f11b52e864c55f0bd57d3c8d7eb07e9f2e9a0b39502e1/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:21900c48ae04d13d416f0e1e0c4d81f7931f73a9dfa0b7a8746fb2fe7dd970ed", size = 233717, upload-time = "2025-10-06T05:36:44.251Z" }, + { url = "https://files.pythonhosted.org/packages/1e/0b/1b5531611e83ba7d13ccc9988967ea1b51186af64c42b7a7af465dcc9568/frozenlist-1.8.0-cp313-cp313-win32.whl", hash = "sha256:8b7b94a067d1c504ee0b16def57ad5738701e4ba10cec90529f13fa03c833496", size = 39628, upload-time = "2025-10-06T05:36:45.423Z" }, + { url = "https://files.pythonhosted.org/packages/d8/cf/174c91dbc9cc49bc7b7aab74d8b734e974d1faa8f191c74af9b7e80848e6/frozenlist-1.8.0-cp313-cp313-win_amd64.whl", hash = "sha256:878be833caa6a3821caf85eb39c5ba92d28e85df26d57afb06b35b2efd937231", size = 43882, upload-time = "2025-10-06T05:36:46.796Z" }, + { url = "https://files.pythonhosted.org/packages/c1/17/502cd212cbfa96eb1388614fe39a3fc9ab87dbbe042b66f97acb57474834/frozenlist-1.8.0-cp313-cp313-win_arm64.whl", hash = "sha256:44389d135b3ff43ba8cc89ff7f51f5a0bb6b63d829c8300f79a2fe4fe61bcc62", size = 39676, upload-time = "2025-10-06T05:36:47.8Z" }, + { url = "https://files.pythonhosted.org/packages/d2/5c/3bbfaa920dfab09e76946a5d2833a7cbdf7b9b4a91c714666ac4855b88b4/frozenlist-1.8.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:e25ac20a2ef37e91c1b39938b591457666a0fa835c7783c3a8f33ea42870db94", size = 89235, upload-time = "2025-10-06T05:36:48.78Z" }, + { url = "https://files.pythonhosted.org/packages/d2/d6/f03961ef72166cec1687e84e8925838442b615bd0b8854b54923ce5b7b8a/frozenlist-1.8.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:07cdca25a91a4386d2e76ad992916a85038a9b97561bf7a3fd12d5d9ce31870c", size = 50742, upload-time = "2025-10-06T05:36:49.837Z" }, + { url = "https://files.pythonhosted.org/packages/1e/bb/a6d12b7ba4c3337667d0e421f7181c82dda448ce4e7ad7ecd249a16fa806/frozenlist-1.8.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4e0c11f2cc6717e0a741f84a527c52616140741cd812a50422f83dc31749fb52", size = 51725, upload-time = "2025-10-06T05:36:50.851Z" }, + { url = "https://files.pythonhosted.org/packages/bc/71/d1fed0ffe2c2ccd70b43714c6cab0f4188f09f8a67a7914a6b46ee30f274/frozenlist-1.8.0-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b3210649ee28062ea6099cfda39e147fa1bc039583c8ee4481cb7811e2448c51", size = 284533, upload-time = "2025-10-06T05:36:51.898Z" }, + { url = "https://files.pythonhosted.org/packages/c9/1f/fb1685a7b009d89f9bf78a42d94461bc06581f6e718c39344754a5d9bada/frozenlist-1.8.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:581ef5194c48035a7de2aefc72ac6539823bb71508189e5de01d60c9dcd5fa65", size = 292506, upload-time = "2025-10-06T05:36:53.101Z" }, + { url = "https://files.pythonhosted.org/packages/e6/3b/b991fe1612703f7e0d05c0cf734c1b77aaf7c7d321df4572e8d36e7048c8/frozenlist-1.8.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3ef2d026f16a2b1866e1d86fc4e1291e1ed8a387b2c333809419a2f8b3a77b82", size = 274161, upload-time = "2025-10-06T05:36:54.309Z" }, + { url = "https://files.pythonhosted.org/packages/ca/ec/c5c618767bcdf66e88945ec0157d7f6c4a1322f1473392319b7a2501ded7/frozenlist-1.8.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:5500ef82073f599ac84d888e3a8c1f77ac831183244bfd7f11eaa0289fb30714", size = 294676, upload-time = "2025-10-06T05:36:55.566Z" }, + { url = "https://files.pythonhosted.org/packages/7c/ce/3934758637d8f8a88d11f0585d6495ef54b2044ed6ec84492a91fa3b27aa/frozenlist-1.8.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:50066c3997d0091c411a66e710f4e11752251e6d2d73d70d8d5d4c76442a199d", size = 300638, upload-time = "2025-10-06T05:36:56.758Z" }, + { url = "https://files.pythonhosted.org/packages/fc/4f/a7e4d0d467298f42de4b41cbc7ddaf19d3cfeabaf9ff97c20c6c7ee409f9/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:5c1c8e78426e59b3f8005e9b19f6ff46e5845895adbde20ece9218319eca6506", size = 283067, upload-time = "2025-10-06T05:36:57.965Z" }, + { url = "https://files.pythonhosted.org/packages/dc/48/c7b163063d55a83772b268e6d1affb960771b0e203b632cfe09522d67ea5/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:eefdba20de0d938cec6a89bd4d70f346a03108a19b9df4248d3cf0d88f1b0f51", size = 292101, upload-time = "2025-10-06T05:36:59.237Z" }, + { url = "https://files.pythonhosted.org/packages/9f/d0/2366d3c4ecdc2fd391e0afa6e11500bfba0ea772764d631bbf82f0136c9d/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:cf253e0e1c3ceb4aaff6df637ce033ff6535fb8c70a764a8f46aafd3d6ab798e", size = 289901, upload-time = "2025-10-06T05:37:00.811Z" }, + { url = "https://files.pythonhosted.org/packages/b8/94/daff920e82c1b70e3618a2ac39fbc01ae3e2ff6124e80739ce5d71c9b920/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:032efa2674356903cd0261c4317a561a6850f3ac864a63fc1583147fb05a79b0", size = 289395, upload-time = "2025-10-06T05:37:02.115Z" }, + { url = "https://files.pythonhosted.org/packages/e3/20/bba307ab4235a09fdcd3cc5508dbabd17c4634a1af4b96e0f69bfe551ebd/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6da155091429aeba16851ecb10a9104a108bcd32f6c1642867eadaee401c1c41", size = 283659, upload-time = "2025-10-06T05:37:03.711Z" }, + { url = "https://files.pythonhosted.org/packages/fd/00/04ca1c3a7a124b6de4f8a9a17cc2fcad138b4608e7a3fc5877804b8715d7/frozenlist-1.8.0-cp313-cp313t-win32.whl", hash = "sha256:0f96534f8bfebc1a394209427d0f8a63d343c9779cda6fc25e8e121b5fd8555b", size = 43492, upload-time = "2025-10-06T05:37:04.915Z" }, + { url = "https://files.pythonhosted.org/packages/59/5e/c69f733a86a94ab10f68e496dc6b7e8bc078ebb415281d5698313e3af3a1/frozenlist-1.8.0-cp313-cp313t-win_amd64.whl", hash = "sha256:5d63a068f978fc69421fb0e6eb91a9603187527c86b7cd3f534a5b77a592b888", size = 48034, upload-time = "2025-10-06T05:37:06.343Z" }, + { url = "https://files.pythonhosted.org/packages/16/6c/be9d79775d8abe79b05fa6d23da99ad6e7763a1d080fbae7290b286093fd/frozenlist-1.8.0-cp313-cp313t-win_arm64.whl", hash = "sha256:bf0a7e10b077bf5fb9380ad3ae8ce20ef919a6ad93b4552896419ac7e1d8e042", size = 41749, upload-time = "2025-10-06T05:37:07.431Z" }, + { url = "https://files.pythonhosted.org/packages/f1/c8/85da824b7e7b9b6e7f7705b2ecaf9591ba6f79c1177f324c2735e41d36a2/frozenlist-1.8.0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:cee686f1f4cadeb2136007ddedd0aaf928ab95216e7691c63e50a8ec066336d0", size = 86127, upload-time = "2025-10-06T05:37:08.438Z" }, + { url = "https://files.pythonhosted.org/packages/8e/e8/a1185e236ec66c20afd72399522f142c3724c785789255202d27ae992818/frozenlist-1.8.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:119fb2a1bd47307e899c2fac7f28e85b9a543864df47aa7ec9d3c1b4545f096f", size = 49698, upload-time = "2025-10-06T05:37:09.48Z" }, + { url = "https://files.pythonhosted.org/packages/a1/93/72b1736d68f03fda5fdf0f2180fb6caaae3894f1b854d006ac61ecc727ee/frozenlist-1.8.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4970ece02dbc8c3a92fcc5228e36a3e933a01a999f7094ff7c23fbd2beeaa67c", size = 49749, upload-time = "2025-10-06T05:37:10.569Z" }, + { url = "https://files.pythonhosted.org/packages/a7/b2/fabede9fafd976b991e9f1b9c8c873ed86f202889b864756f240ce6dd855/frozenlist-1.8.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:cba69cb73723c3f329622e34bdbf5ce1f80c21c290ff04256cff1cd3c2036ed2", size = 231298, upload-time = "2025-10-06T05:37:11.993Z" }, + { url = "https://files.pythonhosted.org/packages/3a/3b/d9b1e0b0eed36e70477ffb8360c49c85c8ca8ef9700a4e6711f39a6e8b45/frozenlist-1.8.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:778a11b15673f6f1df23d9586f83c4846c471a8af693a22e066508b77d201ec8", size = 232015, upload-time = "2025-10-06T05:37:13.194Z" }, + { url = "https://files.pythonhosted.org/packages/dc/94/be719d2766c1138148564a3960fc2c06eb688da592bdc25adcf856101be7/frozenlist-1.8.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0325024fe97f94c41c08872db482cf8ac4800d80e79222c6b0b7b162d5b13686", size = 225038, upload-time = "2025-10-06T05:37:14.577Z" }, + { url = "https://files.pythonhosted.org/packages/e4/09/6712b6c5465f083f52f50cf74167b92d4ea2f50e46a9eea0523d658454ae/frozenlist-1.8.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:97260ff46b207a82a7567b581ab4190bd4dfa09f4db8a8b49d1a958f6aa4940e", size = 240130, upload-time = "2025-10-06T05:37:15.781Z" }, + { url = "https://files.pythonhosted.org/packages/f8/d4/cd065cdcf21550b54f3ce6a22e143ac9e4836ca42a0de1022da8498eac89/frozenlist-1.8.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:54b2077180eb7f83dd52c40b2750d0a9f175e06a42e3213ce047219de902717a", size = 242845, upload-time = "2025-10-06T05:37:17.037Z" }, + { url = "https://files.pythonhosted.org/packages/62/c3/f57a5c8c70cd1ead3d5d5f776f89d33110b1addae0ab010ad774d9a44fb9/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:2f05983daecab868a31e1da44462873306d3cbfd76d1f0b5b69c473d21dbb128", size = 229131, upload-time = "2025-10-06T05:37:18.221Z" }, + { url = "https://files.pythonhosted.org/packages/6c/52/232476fe9cb64f0742f3fde2b7d26c1dac18b6d62071c74d4ded55e0ef94/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:33f48f51a446114bc5d251fb2954ab0164d5be02ad3382abcbfe07e2531d650f", size = 240542, upload-time = "2025-10-06T05:37:19.771Z" }, + { url = "https://files.pythonhosted.org/packages/5f/85/07bf3f5d0fb5414aee5f47d33c6f5c77bfe49aac680bfece33d4fdf6a246/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:154e55ec0655291b5dd1b8731c637ecdb50975a2ae70c606d100750a540082f7", size = 237308, upload-time = "2025-10-06T05:37:20.969Z" }, + { url = "https://files.pythonhosted.org/packages/11/99/ae3a33d5befd41ac0ca2cc7fd3aa707c9c324de2e89db0e0f45db9a64c26/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:4314debad13beb564b708b4a496020e5306c7333fa9a3ab90374169a20ffab30", size = 238210, upload-time = "2025-10-06T05:37:22.252Z" }, + { url = "https://files.pythonhosted.org/packages/b2/60/b1d2da22f4970e7a155f0adde9b1435712ece01b3cd45ba63702aea33938/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:073f8bf8becba60aa931eb3bc420b217bb7d5b8f4750e6f8b3be7f3da85d38b7", size = 231972, upload-time = "2025-10-06T05:37:23.5Z" }, + { url = "https://files.pythonhosted.org/packages/3f/ab/945b2f32de889993b9c9133216c068b7fcf257d8595a0ac420ac8677cab0/frozenlist-1.8.0-cp314-cp314-win32.whl", hash = "sha256:bac9c42ba2ac65ddc115d930c78d24ab8d4f465fd3fc473cdedfccadb9429806", size = 40536, upload-time = "2025-10-06T05:37:25.581Z" }, + { url = "https://files.pythonhosted.org/packages/59/ad/9caa9b9c836d9ad6f067157a531ac48b7d36499f5036d4141ce78c230b1b/frozenlist-1.8.0-cp314-cp314-win_amd64.whl", hash = "sha256:3e0761f4d1a44f1d1a47996511752cf3dcec5bbdd9cc2b4fe595caf97754b7a0", size = 44330, upload-time = "2025-10-06T05:37:26.928Z" }, + { url = "https://files.pythonhosted.org/packages/82/13/e6950121764f2676f43534c555249f57030150260aee9dcf7d64efda11dd/frozenlist-1.8.0-cp314-cp314-win_arm64.whl", hash = "sha256:d1eaff1d00c7751b7c6662e9c5ba6eb2c17a2306ba5e2a37f24ddf3cc953402b", size = 40627, upload-time = "2025-10-06T05:37:28.075Z" }, + { url = "https://files.pythonhosted.org/packages/c0/c7/43200656ecc4e02d3f8bc248df68256cd9572b3f0017f0a0c4e93440ae23/frozenlist-1.8.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:d3bb933317c52d7ea5004a1c442eef86f426886fba134ef8cf4226ea6ee1821d", size = 89238, upload-time = "2025-10-06T05:37:29.373Z" }, + { url = "https://files.pythonhosted.org/packages/d1/29/55c5f0689b9c0fb765055629f472c0de484dcaf0acee2f7707266ae3583c/frozenlist-1.8.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:8009897cdef112072f93a0efdce29cd819e717fd2f649ee3016efd3cd885a7ed", size = 50738, upload-time = "2025-10-06T05:37:30.792Z" }, + { url = "https://files.pythonhosted.org/packages/ba/7d/b7282a445956506fa11da8c2db7d276adcbf2b17d8bb8407a47685263f90/frozenlist-1.8.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:2c5dcbbc55383e5883246d11fd179782a9d07a986c40f49abe89ddf865913930", size = 51739, upload-time = "2025-10-06T05:37:32.127Z" }, + { url = "https://files.pythonhosted.org/packages/62/1c/3d8622e60d0b767a5510d1d3cf21065b9db874696a51ea6d7a43180a259c/frozenlist-1.8.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:39ecbc32f1390387d2aa4f5a995e465e9e2f79ba3adcac92d68e3e0afae6657c", size = 284186, upload-time = "2025-10-06T05:37:33.21Z" }, + { url = "https://files.pythonhosted.org/packages/2d/14/aa36d5f85a89679a85a1d44cd7a6657e0b1c75f61e7cad987b203d2daca8/frozenlist-1.8.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:92db2bf818d5cc8d9c1f1fc56b897662e24ea5adb36ad1f1d82875bd64e03c24", size = 292196, upload-time = "2025-10-06T05:37:36.107Z" }, + { url = "https://files.pythonhosted.org/packages/05/23/6bde59eb55abd407d34f77d39a5126fb7b4f109a3f611d3929f14b700c66/frozenlist-1.8.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2dc43a022e555de94c3b68a4ef0b11c4f747d12c024a520c7101709a2144fb37", size = 273830, upload-time = "2025-10-06T05:37:37.663Z" }, + { url = "https://files.pythonhosted.org/packages/d2/3f/22cff331bfad7a8afa616289000ba793347fcd7bc275f3b28ecea2a27909/frozenlist-1.8.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:cb89a7f2de3602cfed448095bab3f178399646ab7c61454315089787df07733a", size = 294289, upload-time = "2025-10-06T05:37:39.261Z" }, + { url = "https://files.pythonhosted.org/packages/a4/89/5b057c799de4838b6c69aa82b79705f2027615e01be996d2486a69ca99c4/frozenlist-1.8.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:33139dc858c580ea50e7e60a1b0ea003efa1fd42e6ec7fdbad78fff65fad2fd2", size = 300318, upload-time = "2025-10-06T05:37:43.213Z" }, + { url = "https://files.pythonhosted.org/packages/30/de/2c22ab3eb2a8af6d69dc799e48455813bab3690c760de58e1bf43b36da3e/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:168c0969a329b416119507ba30b9ea13688fafffac1b7822802537569a1cb0ef", size = 282814, upload-time = "2025-10-06T05:37:45.337Z" }, + { url = "https://files.pythonhosted.org/packages/59/f7/970141a6a8dbd7f556d94977858cfb36fa9b66e0892c6dd780d2219d8cd8/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:28bd570e8e189d7f7b001966435f9dac6718324b5be2990ac496cf1ea9ddb7fe", size = 291762, upload-time = "2025-10-06T05:37:46.657Z" }, + { url = "https://files.pythonhosted.org/packages/c1/15/ca1adae83a719f82df9116d66f5bb28bb95557b3951903d39135620ef157/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:b2a095d45c5d46e5e79ba1e5b9cb787f541a8dee0433836cea4b96a2c439dcd8", size = 289470, upload-time = "2025-10-06T05:37:47.946Z" }, + { url = "https://files.pythonhosted.org/packages/ac/83/dca6dc53bf657d371fbc88ddeb21b79891e747189c5de990b9dfff2ccba1/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:eab8145831a0d56ec9c4139b6c3e594c7a83c2c8be25d5bcf2d86136a532287a", size = 289042, upload-time = "2025-10-06T05:37:49.499Z" }, + { url = "https://files.pythonhosted.org/packages/96/52/abddd34ca99be142f354398700536c5bd315880ed0a213812bc491cff5e4/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:974b28cf63cc99dfb2188d8d222bc6843656188164848c4f679e63dae4b0708e", size = 283148, upload-time = "2025-10-06T05:37:50.745Z" }, + { url = "https://files.pythonhosted.org/packages/af/d3/76bd4ed4317e7119c2b7f57c3f6934aba26d277acc6309f873341640e21f/frozenlist-1.8.0-cp314-cp314t-win32.whl", hash = "sha256:342c97bf697ac5480c0a7ec73cd700ecfa5a8a40ac923bd035484616efecc2df", size = 44676, upload-time = "2025-10-06T05:37:52.222Z" }, + { url = "https://files.pythonhosted.org/packages/89/76/c615883b7b521ead2944bb3480398cbb07e12b7b4e4d073d3752eb721558/frozenlist-1.8.0-cp314-cp314t-win_amd64.whl", hash = "sha256:06be8f67f39c8b1dc671f5d83aaefd3358ae5cdcf8314552c57e7ed3e6475bdd", size = 49451, upload-time = "2025-10-06T05:37:53.425Z" }, + { url = "https://files.pythonhosted.org/packages/e0/a3/5982da14e113d07b325230f95060e2169f5311b1017ea8af2a29b374c289/frozenlist-1.8.0-cp314-cp314t-win_arm64.whl", hash = "sha256:102e6314ca4da683dca92e3b1355490fed5f313b768500084fbe6371fddfdb79", size = 42507, upload-time = "2025-10-06T05:37:54.513Z" }, + { url = "https://files.pythonhosted.org/packages/9a/9a/e35b4a917281c0b8419d4207f4334c8e8c5dbf4f3f5f9ada73958d937dcc/frozenlist-1.8.0-py3-none-any.whl", hash = "sha256:0c18a16eab41e82c295618a77502e17b195883241c563b00f0aa5106fc4eaa0d", size = 13409, upload-time = "2025-10-06T05:38:16.721Z" }, +] + +[[package]] +name = "graphql-query" +version = "1.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "jinja2" }, + { name = "pydantic" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/8b/64/377beef6c10b798f2ece54cfd3577db20102176c3a155469b92b4a3e3881/graphql_query-1.4.0.tar.gz", hash = "sha256:1cfe5eeaad8b0ed67ac3d9c4023ee9743851f98c6b2f673c67088cf42ebb57bb", size = 26178, upload-time = "2024-07-31T10:50:05.249Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/47/ed/ec732d18dd016eb4d4fa590e392d22dd35c93f26f17f709596deeb780497/graphql_query-1.4.0-py3-none-any.whl", hash = "sha256:376ed550a7812425befbefb870daa21ce1696590fcb78c015215a43a5d7e51b7", size = 13389, upload-time = "2024-07-31T10:50:04.055Z" }, +] + +[[package]] +name = "h11" +version = "0.16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, +] + +[[package]] +name = "hexbytes" +version = "1.3.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7f/87/adf4635b4b8c050283d74e6db9a81496063229c9263e6acc1903ab79fbec/hexbytes-1.3.1.tar.gz", hash = "sha256:a657eebebdfe27254336f98d8af6e2236f3f83aed164b87466b6cf6c5f5a4765", size = 8633, upload-time = "2025-05-14T16:45:17.5Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8d/e0/3b31492b1c89da3c5a846680517871455b30c54738486fc57ac79a5761bd/hexbytes-1.3.1-py3-none-any.whl", hash = "sha256:da01ff24a1a9a2b1881c4b85f0e9f9b0f51b526b379ffa23832ae7899d29c2c7", size = 5074, upload-time = "2025-05-14T16:45:16.179Z" }, +] + +[[package]] +name = "httpcore" +version = "1.0.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" }, +] + +[[package]] +name = "httpx" +version = "0.28.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "certifi" }, + { name = "httpcore" }, + { name = "idna" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" }, +] + +[[package]] +name = "humanize" +version = "4.15.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ba/66/a3921783d54be8a6870ac4ccffcd15c4dc0dd7fcce51c6d63b8c63935276/humanize-4.15.0.tar.gz", hash = "sha256:1dd098483eb1c7ee8e32eb2e99ad1910baefa4b75c3aff3a82f4d78688993b10", size = 83599, upload-time = "2025-12-20T20:16:13.19Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c5/7b/bca5613a0c3b542420cf92bd5e5fb8ebd5435ce1011a091f66bb7693285e/humanize-4.15.0-py3-none-any.whl", hash = "sha256:b1186eb9f5a9749cd9cb8565aee77919dd7c8d076161cf44d70e59e3301e1769", size = 132203, upload-time = "2025-12-20T20:16:11.67Z" }, +] + +[[package]] +name = "idna" +version = "3.11" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" }, +] + +[[package]] +name = "inflect" +version = "7.5.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "more-itertools" }, + { name = "typeguard" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/78/c6/943357d44a21fd995723d07ccaddd78023eace03c1846049a2645d4324a3/inflect-7.5.0.tar.gz", hash = "sha256:faf19801c3742ed5a05a8ce388e0d8fe1a07f8d095c82201eb904f5d27ad571f", size = 73751, upload-time = "2024-12-28T17:11:18.897Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8a/eb/427ed2b20a38a4ee29f24dbe4ae2dafab198674fe9a85e3d6adf9e5f5f41/inflect-7.5.0-py3-none-any.whl", hash = "sha256:2aea70e5e70c35d8350b8097396ec155ffd68def678c7ff97f51aa69c1d92344", size = 35197, upload-time = "2024-12-28T17:11:15.931Z" }, +] + +[[package]] +name = "iniconfig" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" }, +] + +[[package]] +name = "jinja2" +version = "3.1.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115, upload-time = "2025-03-05T20:05:02.478Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" }, +] + +[[package]] +name = "jiter" +version = "0.13.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0d/5e/4ec91646aee381d01cdb9974e30882c9cd3b8c5d1079d6b5ff4af522439a/jiter-0.13.0.tar.gz", hash = "sha256:f2839f9c2c7e2dffc1bc5929a510e14ce0a946be9365fd1219e7ef342dae14f4", size = 164847, upload-time = "2026-02-02T12:37:56.441Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2e/30/7687e4f87086829955013ca12a9233523349767f69653ebc27036313def9/jiter-0.13.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:0a2bd69fc1d902e89925fc34d1da51b2128019423d7b339a45d9e99c894e0663", size = 307958, upload-time = "2026-02-02T12:35:57.165Z" }, + { url = "https://files.pythonhosted.org/packages/c3/27/e57f9a783246ed95481e6749cc5002a8a767a73177a83c63ea71f0528b90/jiter-0.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f917a04240ef31898182f76a332f508f2cc4b57d2b4d7ad2dbfebbfe167eb505", size = 318597, upload-time = "2026-02-02T12:35:58.591Z" }, + { url = "https://files.pythonhosted.org/packages/cf/52/e5719a60ac5d4d7c5995461a94ad5ef962a37c8bf5b088390e6fad59b2ff/jiter-0.13.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1e2b199f446d3e82246b4fd9236d7cb502dc2222b18698ba0d986d2fecc6152", size = 348821, upload-time = "2026-02-02T12:36:00.093Z" }, + { url = "https://files.pythonhosted.org/packages/61/db/c1efc32b8ba4c740ab3fc2d037d8753f67685f475e26b9d6536a4322bcdd/jiter-0.13.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:04670992b576fa65bd056dbac0c39fe8bd67681c380cb2b48efa885711d9d726", size = 364163, upload-time = "2026-02-02T12:36:01.937Z" }, + { url = "https://files.pythonhosted.org/packages/55/8a/fb75556236047c8806995671a18e4a0ad646ed255276f51a20f32dceaeec/jiter-0.13.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5a1aff1fbdb803a376d4d22a8f63f8e7ccbce0b4890c26cc7af9e501ab339ef0", size = 483709, upload-time = "2026-02-02T12:36:03.41Z" }, + { url = "https://files.pythonhosted.org/packages/7e/16/43512e6ee863875693a8e6f6d532e19d650779d6ba9a81593ae40a9088ff/jiter-0.13.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b3fb8c2053acaef8580809ac1d1f7481a0a0bdc012fd7f5d8b18fb696a5a089", size = 370480, upload-time = "2026-02-02T12:36:04.791Z" }, + { url = "https://files.pythonhosted.org/packages/f8/4c/09b93e30e984a187bc8aaa3510e1ec8dcbdcd71ca05d2f56aac0492453aa/jiter-0.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdaba7d87e66f26a2c45d8cbadcbfc4bf7884182317907baf39cfe9775bb4d93", size = 360735, upload-time = "2026-02-02T12:36:06.994Z" }, + { url = "https://files.pythonhosted.org/packages/1a/1b/46c5e349019874ec5dfa508c14c37e29864ea108d376ae26d90bee238cd7/jiter-0.13.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7b88d649135aca526da172e48083da915ec086b54e8e73a425ba50999468cc08", size = 391814, upload-time = "2026-02-02T12:36:08.368Z" }, + { url = "https://files.pythonhosted.org/packages/15/9e/26184760e85baee7162ad37b7912797d2077718476bf91517641c92b3639/jiter-0.13.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e404ea551d35438013c64b4f357b0474c7abf9f781c06d44fcaf7a14c69ff9e2", size = 513990, upload-time = "2026-02-02T12:36:09.993Z" }, + { url = "https://files.pythonhosted.org/packages/e9/34/2c9355247d6debad57a0a15e76ab1566ab799388042743656e566b3b7de1/jiter-0.13.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1f4748aad1b4a93c8bdd70f604d0f748cdc0e8744c5547798acfa52f10e79228", size = 548021, upload-time = "2026-02-02T12:36:11.376Z" }, + { url = "https://files.pythonhosted.org/packages/ac/4a/9f2c23255d04a834398b9c2e0e665382116911dc4d06b795710503cdad25/jiter-0.13.0-cp312-cp312-win32.whl", hash = "sha256:0bf670e3b1445fc4d31612199f1744f67f889ee1bbae703c4b54dc097e5dd394", size = 203024, upload-time = "2026-02-02T12:36:12.682Z" }, + { url = "https://files.pythonhosted.org/packages/09/ee/f0ae675a957ae5a8f160be3e87acea6b11dc7b89f6b7ab057e77b2d2b13a/jiter-0.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:15db60e121e11fe186c0b15236bd5d18381b9ddacdcf4e659feb96fc6c969c92", size = 205424, upload-time = "2026-02-02T12:36:13.93Z" }, + { url = "https://files.pythonhosted.org/packages/1b/02/ae611edf913d3cbf02c97cdb90374af2082c48d7190d74c1111dde08bcdd/jiter-0.13.0-cp312-cp312-win_arm64.whl", hash = "sha256:41f92313d17989102f3cb5dd533a02787cdb99454d494344b0361355da52fcb9", size = 186818, upload-time = "2026-02-02T12:36:15.308Z" }, + { url = "https://files.pythonhosted.org/packages/91/9c/7ee5a6ff4b9991e1a45263bfc46731634c4a2bde27dfda6c8251df2d958c/jiter-0.13.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1f8a55b848cbabf97d861495cd65f1e5c590246fabca8b48e1747c4dfc8f85bf", size = 306897, upload-time = "2026-02-02T12:36:16.748Z" }, + { url = "https://files.pythonhosted.org/packages/7c/02/be5b870d1d2be5dd6a91bdfb90f248fbb7dcbd21338f092c6b89817c3dbf/jiter-0.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f556aa591c00f2c45eb1b89f68f52441a016034d18b65da60e2d2875bbbf344a", size = 317507, upload-time = "2026-02-02T12:36:18.351Z" }, + { url = "https://files.pythonhosted.org/packages/da/92/b25d2ec333615f5f284f3a4024f7ce68cfa0604c322c6808b2344c7f5d2b/jiter-0.13.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7e1d61da332ec412350463891923f960c3073cf1aae93b538f0bb4c8cd46efb", size = 350560, upload-time = "2026-02-02T12:36:19.746Z" }, + { url = "https://files.pythonhosted.org/packages/be/ec/74dcb99fef0aca9fbe56b303bf79f6bd839010cb18ad41000bf6cc71eec0/jiter-0.13.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3097d665a27bc96fd9bbf7f86178037db139f319f785e4757ce7ccbf390db6c2", size = 363232, upload-time = "2026-02-02T12:36:21.243Z" }, + { url = "https://files.pythonhosted.org/packages/1b/37/f17375e0bb2f6a812d4dd92d7616e41917f740f3e71343627da9db2824ce/jiter-0.13.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9d01ecc3a8cbdb6f25a37bd500510550b64ddf9f7d64a107d92f3ccb25035d0f", size = 483727, upload-time = "2026-02-02T12:36:22.688Z" }, + { url = "https://files.pythonhosted.org/packages/77/d2/a71160a5ae1a1e66c1395b37ef77da67513b0adba73b993a27fbe47eb048/jiter-0.13.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ed9bbc30f5d60a3bdf63ae76beb3f9db280d7f195dfcfa61af792d6ce912d159", size = 370799, upload-time = "2026-02-02T12:36:24.106Z" }, + { url = "https://files.pythonhosted.org/packages/01/99/ed5e478ff0eb4e8aa5fd998f9d69603c9fd3f32de3bd16c2b1194f68361c/jiter-0.13.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98fbafb6e88256f4454de33c1f40203d09fc33ed19162a68b3b257b29ca7f663", size = 359120, upload-time = "2026-02-02T12:36:25.519Z" }, + { url = "https://files.pythonhosted.org/packages/16/be/7ffd08203277a813f732ba897352797fa9493faf8dc7995b31f3d9cb9488/jiter-0.13.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5467696f6b827f1116556cb0db620440380434591e93ecee7fd14d1a491b6daa", size = 390664, upload-time = "2026-02-02T12:36:26.866Z" }, + { url = "https://files.pythonhosted.org/packages/d1/84/e0787856196d6d346264d6dcccb01f741e5f0bd014c1d9a2ebe149caf4f3/jiter-0.13.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:2d08c9475d48b92892583df9da592a0e2ac49bcd41fae1fec4f39ba6cf107820", size = 513543, upload-time = "2026-02-02T12:36:28.217Z" }, + { url = "https://files.pythonhosted.org/packages/65/50/ecbd258181c4313cf79bca6c88fb63207d04d5bf5e4f65174114d072aa55/jiter-0.13.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:aed40e099404721d7fcaf5b89bd3b4568a4666358bcac7b6b15c09fb6252ab68", size = 547262, upload-time = "2026-02-02T12:36:29.678Z" }, + { url = "https://files.pythonhosted.org/packages/27/da/68f38d12e7111d2016cd198161b36e1f042bd115c169255bcb7ec823a3bf/jiter-0.13.0-cp313-cp313-win32.whl", hash = "sha256:36ebfbcffafb146d0e6ffb3e74d51e03d9c35ce7c625c8066cdbfc7b953bdc72", size = 200630, upload-time = "2026-02-02T12:36:31.808Z" }, + { url = "https://files.pythonhosted.org/packages/25/65/3bd1a972c9a08ecd22eb3b08a95d1941ebe6938aea620c246cf426ae09c2/jiter-0.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:8d76029f077379374cf0dbc78dbe45b38dec4a2eb78b08b5194ce836b2517afc", size = 202602, upload-time = "2026-02-02T12:36:33.679Z" }, + { url = "https://files.pythonhosted.org/packages/15/fe/13bd3678a311aa67686bb303654792c48206a112068f8b0b21426eb6851e/jiter-0.13.0-cp313-cp313-win_arm64.whl", hash = "sha256:bb7613e1a427cfcb6ea4544f9ac566b93d5bf67e0d48c787eca673ff9c9dff2b", size = 185939, upload-time = "2026-02-02T12:36:35.065Z" }, + { url = "https://files.pythonhosted.org/packages/49/19/a929ec002ad3228bc97ca01dbb14f7632fffdc84a95ec92ceaf4145688ae/jiter-0.13.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:fa476ab5dd49f3bf3a168e05f89358c75a17608dbabb080ef65f96b27c19ab10", size = 316616, upload-time = "2026-02-02T12:36:36.579Z" }, + { url = "https://files.pythonhosted.org/packages/52/56/d19a9a194afa37c1728831e5fb81b7722c3de18a3109e8f282bfc23e587a/jiter-0.13.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ade8cb6ff5632a62b7dbd4757d8c5573f7a2e9ae285d6b5b841707d8363205ef", size = 346850, upload-time = "2026-02-02T12:36:38.058Z" }, + { url = "https://files.pythonhosted.org/packages/36/4a/94e831c6bf287754a8a019cb966ed39ff8be6ab78cadecf08df3bb02d505/jiter-0.13.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9950290340acc1adaded363edd94baebcee7dabdfa8bee4790794cd5cfad2af6", size = 358551, upload-time = "2026-02-02T12:36:39.417Z" }, + { url = "https://files.pythonhosted.org/packages/a2/ec/a4c72c822695fa80e55d2b4142b73f0012035d9fcf90eccc56bc060db37c/jiter-0.13.0-cp313-cp313t-win_amd64.whl", hash = "sha256:2b4972c6df33731aac0742b64fd0d18e0a69bc7d6e03108ce7d40c85fd9e3e6d", size = 201950, upload-time = "2026-02-02T12:36:40.791Z" }, + { url = "https://files.pythonhosted.org/packages/b6/00/393553ec27b824fbc29047e9c7cd4a3951d7fbe4a76743f17e44034fa4e4/jiter-0.13.0-cp313-cp313t-win_arm64.whl", hash = "sha256:701a1e77d1e593c1b435315ff625fd071f0998c5f02792038a5ca98899261b7d", size = 185852, upload-time = "2026-02-02T12:36:42.077Z" }, + { url = "https://files.pythonhosted.org/packages/6e/f5/f1997e987211f6f9bd71b8083047b316208b4aca0b529bb5f8c96c89ef3e/jiter-0.13.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:cc5223ab19fe25e2f0bf2643204ad7318896fe3729bf12fde41b77bfc4fafff0", size = 308804, upload-time = "2026-02-02T12:36:43.496Z" }, + { url = "https://files.pythonhosted.org/packages/cd/8f/5482a7677731fd44881f0204981ce2d7175db271f82cba2085dd2212e095/jiter-0.13.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:9776ebe51713acf438fd9b4405fcd86893ae5d03487546dae7f34993217f8a91", size = 318787, upload-time = "2026-02-02T12:36:45.071Z" }, + { url = "https://files.pythonhosted.org/packages/f3/b9/7257ac59778f1cd025b26a23c5520a36a424f7f1b068f2442a5b499b7464/jiter-0.13.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:879e768938e7b49b5e90b7e3fecc0dbec01b8cb89595861fb39a8967c5220d09", size = 353880, upload-time = "2026-02-02T12:36:47.365Z" }, + { url = "https://files.pythonhosted.org/packages/c3/87/719eec4a3f0841dad99e3d3604ee4cba36af4419a76f3cb0b8e2e691ad67/jiter-0.13.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:682161a67adea11e3aae9038c06c8b4a9a71023228767477d683f69903ebc607", size = 366702, upload-time = "2026-02-02T12:36:48.871Z" }, + { url = "https://files.pythonhosted.org/packages/d2/65/415f0a75cf6921e43365a1bc227c565cb949caca8b7532776e430cbaa530/jiter-0.13.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a13b68cd1cd8cc9de8f244ebae18ccb3e4067ad205220ef324c39181e23bbf66", size = 486319, upload-time = "2026-02-02T12:36:53.006Z" }, + { url = "https://files.pythonhosted.org/packages/54/a2/9e12b48e82c6bbc6081fd81abf915e1443add1b13d8fc586e1d90bb02bb8/jiter-0.13.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87ce0f14c6c08892b610686ae8be350bf368467b6acd5085a5b65441e2bf36d2", size = 372289, upload-time = "2026-02-02T12:36:54.593Z" }, + { url = "https://files.pythonhosted.org/packages/4e/c1/e4693f107a1789a239c759a432e9afc592366f04e901470c2af89cfd28e1/jiter-0.13.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c365005b05505a90d1c47856420980d0237adf82f70c4aff7aebd3c1cc143ad", size = 360165, upload-time = "2026-02-02T12:36:56.112Z" }, + { url = "https://files.pythonhosted.org/packages/17/08/91b9ea976c1c758240614bd88442681a87672eebc3d9a6dde476874e706b/jiter-0.13.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1317fdffd16f5873e46ce27d0e0f7f4f90f0cdf1d86bf6abeaea9f63ca2c401d", size = 389634, upload-time = "2026-02-02T12:36:57.495Z" }, + { url = "https://files.pythonhosted.org/packages/18/23/58325ef99390d6d40427ed6005bf1ad54f2577866594bcf13ce55675f87d/jiter-0.13.0-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:c05b450d37ba0c9e21c77fef1f205f56bcee2330bddca68d344baebfc55ae0df", size = 514933, upload-time = "2026-02-02T12:36:58.909Z" }, + { url = "https://files.pythonhosted.org/packages/5b/25/69f1120c7c395fd276c3996bb8adefa9c6b84c12bb7111e5c6ccdcd8526d/jiter-0.13.0-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:775e10de3849d0631a97c603f996f518159272db00fdda0a780f81752255ee9d", size = 548842, upload-time = "2026-02-02T12:37:00.433Z" }, + { url = "https://files.pythonhosted.org/packages/18/05/981c9669d86850c5fbb0d9e62bba144787f9fba84546ba43d624ee27ef29/jiter-0.13.0-cp314-cp314-win32.whl", hash = "sha256:632bf7c1d28421c00dd8bbb8a3bac5663e1f57d5cd5ed962bce3c73bf62608e6", size = 202108, upload-time = "2026-02-02T12:37:01.718Z" }, + { url = "https://files.pythonhosted.org/packages/8d/96/cdcf54dd0b0341db7d25413229888a346c7130bd20820530905fdb65727b/jiter-0.13.0-cp314-cp314-win_amd64.whl", hash = "sha256:f22ef501c3f87ede88f23f9b11e608581c14f04db59b6a801f354397ae13739f", size = 204027, upload-time = "2026-02-02T12:37:03.075Z" }, + { url = "https://files.pythonhosted.org/packages/fb/f9/724bcaaab7a3cd727031fe4f6995cb86c4bd344909177c186699c8dec51a/jiter-0.13.0-cp314-cp314-win_arm64.whl", hash = "sha256:07b75fe09a4ee8e0c606200622e571e44943f47254f95e2436c8bdcaceb36d7d", size = 187199, upload-time = "2026-02-02T12:37:04.414Z" }, + { url = "https://files.pythonhosted.org/packages/62/92/1661d8b9fd6a3d7a2d89831db26fe3c1509a287d83ad7838831c7b7a5c7e/jiter-0.13.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:964538479359059a35fb400e769295d4b315ae61e4105396d355a12f7fef09f0", size = 318423, upload-time = "2026-02-02T12:37:05.806Z" }, + { url = "https://files.pythonhosted.org/packages/4f/3b/f77d342a54d4ebcd128e520fc58ec2f5b30a423b0fd26acdfc0c6fef8e26/jiter-0.13.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e104da1db1c0991b3eaed391ccd650ae8d947eab1480c733e5a3fb28d4313e40", size = 351438, upload-time = "2026-02-02T12:37:07.189Z" }, + { url = "https://files.pythonhosted.org/packages/76/b3/ba9a69f0e4209bd3331470c723c2f5509e6f0482e416b612431a5061ed71/jiter-0.13.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0e3a5f0cde8ff433b8e88e41aa40131455420fb3649a3c7abdda6145f8cb7202", size = 364774, upload-time = "2026-02-02T12:37:08.579Z" }, + { url = "https://files.pythonhosted.org/packages/b3/16/6cdb31fa342932602458dbb631bfbd47f601e03d2e4950740e0b2100b570/jiter-0.13.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:57aab48f40be1db920a582b30b116fe2435d184f77f0e4226f546794cedd9cf0", size = 487238, upload-time = "2026-02-02T12:37:10.066Z" }, + { url = "https://files.pythonhosted.org/packages/ed/b1/956cc7abaca8d95c13aa8d6c9b3f3797241c246cd6e792934cc4c8b250d2/jiter-0.13.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7772115877c53f62beeb8fd853cab692dbc04374ef623b30f997959a4c0e7e95", size = 372892, upload-time = "2026-02-02T12:37:11.656Z" }, + { url = "https://files.pythonhosted.org/packages/26/c4/97ecde8b1e74f67b8598c57c6fccf6df86ea7861ed29da84629cdbba76c4/jiter-0.13.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1211427574b17b633cfceba5040de8081e5abf114f7a7602f73d2e16f9fdaa59", size = 360309, upload-time = "2026-02-02T12:37:13.244Z" }, + { url = "https://files.pythonhosted.org/packages/4b/d7/eabe3cf46715854ccc80be2cd78dd4c36aedeb30751dbf85a1d08c14373c/jiter-0.13.0-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7beae3a3d3b5212d3a55d2961db3c292e02e302feb43fce6a3f7a31b90ea6dfe", size = 389607, upload-time = "2026-02-02T12:37:14.881Z" }, + { url = "https://files.pythonhosted.org/packages/df/2d/03963fc0804e6109b82decfb9974eb92df3797fe7222428cae12f8ccaa0c/jiter-0.13.0-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:e5562a0f0e90a6223b704163ea28e831bd3a9faa3512a711f031611e6b06c939", size = 514986, upload-time = "2026-02-02T12:37:16.326Z" }, + { url = "https://files.pythonhosted.org/packages/f6/6c/8c83b45eb3eb1c1e18d841fe30b4b5bc5619d781267ca9bc03e005d8fd0a/jiter-0.13.0-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:6c26a424569a59140fb51160a56df13f438a2b0967365e987889186d5fc2f6f9", size = 548756, upload-time = "2026-02-02T12:37:17.736Z" }, + { url = "https://files.pythonhosted.org/packages/47/66/eea81dfff765ed66c68fd2ed8c96245109e13c896c2a5015c7839c92367e/jiter-0.13.0-cp314-cp314t-win32.whl", hash = "sha256:24dc96eca9f84da4131cdf87a95e6ce36765c3b156fc9ae33280873b1c32d5f6", size = 201196, upload-time = "2026-02-02T12:37:19.101Z" }, + { url = "https://files.pythonhosted.org/packages/ff/32/4ac9c7a76402f8f00d00842a7f6b83b284d0cf7c1e9d4227bc95aa6d17fa/jiter-0.13.0-cp314-cp314t-win_amd64.whl", hash = "sha256:0a8d76c7524087272c8ae913f5d9d608bd839154b62c4322ef65723d2e5bb0b8", size = 204215, upload-time = "2026-02-02T12:37:20.495Z" }, + { url = "https://files.pythonhosted.org/packages/f9/8e/7def204fea9f9be8b3c21a6f2dd6c020cf56c7d5ff753e0e23ed7f9ea57e/jiter-0.13.0-cp314-cp314t-win_arm64.whl", hash = "sha256:2c26cf47e2cad140fa23b6d58d435a7c0161f5c514284802f25e87fddfe11024", size = 187152, upload-time = "2026-02-02T12:37:22.124Z" }, + { url = "https://files.pythonhosted.org/packages/80/60/e50fa45dd7e2eae049f0ce964663849e897300433921198aef94b6ffa23a/jiter-0.13.0-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:3d744a6061afba08dd7ae375dcde870cffb14429b7477e10f67e9e6d68772a0a", size = 305169, upload-time = "2026-02-02T12:37:50.376Z" }, + { url = "https://files.pythonhosted.org/packages/d2/73/a009f41c5eed71c49bec53036c4b33555afcdee70682a18c6f66e396c039/jiter-0.13.0-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:ff732bd0a0e778f43d5009840f20b935e79087b4dc65bd36f1cd0f9b04b8ff7f", size = 303808, upload-time = "2026-02-02T12:37:52.092Z" }, + { url = "https://files.pythonhosted.org/packages/c4/10/528b439290763bff3d939268085d03382471b442f212dca4ff5f12802d43/jiter-0.13.0-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ab44b178f7981fcaea7e0a5df20e773c663d06ffda0198f1a524e91b2fde7e59", size = 337384, upload-time = "2026-02-02T12:37:53.582Z" }, + { url = "https://files.pythonhosted.org/packages/67/8a/a342b2f0251f3dac4ca17618265d93bf244a2a4d089126e81e4c1056ac50/jiter-0.13.0-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bb00b6d26db67a05fe3e12c76edc75f32077fb51deed13822dc648fa373bc19", size = 343768, upload-time = "2026-02-02T12:37:55.055Z" }, +] + +[[package]] +name = "kiwisolver" +version = "1.4.9" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5c/3c/85844f1b0feb11ee581ac23fe5fce65cd049a200c1446708cc1b7f922875/kiwisolver-1.4.9.tar.gz", hash = "sha256:c3b22c26c6fd6811b0ae8363b95ca8ce4ea3c202d3d0975b2914310ceb1bcc4d", size = 97564, upload-time = "2025-08-10T21:27:49.279Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/86/c9/13573a747838aeb1c76e3267620daa054f4152444d1f3d1a2324b78255b5/kiwisolver-1.4.9-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:ac5a486ac389dddcc5bef4f365b6ae3ffff2c433324fb38dd35e3fab7c957999", size = 123686, upload-time = "2025-08-10T21:26:10.034Z" }, + { url = "https://files.pythonhosted.org/packages/51/ea/2ecf727927f103ffd1739271ca19c424d0e65ea473fbaeea1c014aea93f6/kiwisolver-1.4.9-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f2ba92255faa7309d06fe44c3a4a97efe1c8d640c2a79a5ef728b685762a6fd2", size = 66460, upload-time = "2025-08-10T21:26:11.083Z" }, + { url = "https://files.pythonhosted.org/packages/5b/5a/51f5464373ce2aeb5194508298a508b6f21d3867f499556263c64c621914/kiwisolver-1.4.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a2899935e724dd1074cb568ce7ac0dce28b2cd6ab539c8e001a8578eb106d14", size = 64952, upload-time = "2025-08-10T21:26:12.058Z" }, + { url = "https://files.pythonhosted.org/packages/70/90/6d240beb0f24b74371762873e9b7f499f1e02166a2d9c5801f4dbf8fa12e/kiwisolver-1.4.9-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f6008a4919fdbc0b0097089f67a1eb55d950ed7e90ce2cc3e640abadd2757a04", size = 1474756, upload-time = "2025-08-10T21:26:13.096Z" }, + { url = "https://files.pythonhosted.org/packages/12/42/f36816eaf465220f683fb711efdd1bbf7a7005a2473d0e4ed421389bd26c/kiwisolver-1.4.9-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:67bb8b474b4181770f926f7b7d2f8c0248cbcb78b660fdd41a47054b28d2a752", size = 1276404, upload-time = "2025-08-10T21:26:14.457Z" }, + { url = "https://files.pythonhosted.org/packages/2e/64/bc2de94800adc830c476dce44e9b40fd0809cddeef1fde9fcf0f73da301f/kiwisolver-1.4.9-cp312-cp312-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2327a4a30d3ee07d2fbe2e7933e8a37c591663b96ce42a00bc67461a87d7df77", size = 1294410, upload-time = "2025-08-10T21:26:15.73Z" }, + { url = "https://files.pythonhosted.org/packages/5f/42/2dc82330a70aa8e55b6d395b11018045e58d0bb00834502bf11509f79091/kiwisolver-1.4.9-cp312-cp312-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:7a08b491ec91b1d5053ac177afe5290adacf1f0f6307d771ccac5de30592d198", size = 1343631, upload-time = "2025-08-10T21:26:17.045Z" }, + { url = "https://files.pythonhosted.org/packages/22/fd/f4c67a6ed1aab149ec5a8a401c323cee7a1cbe364381bb6c9c0d564e0e20/kiwisolver-1.4.9-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d8fc5c867c22b828001b6a38d2eaeb88160bf5783c6cb4a5e440efc981ce286d", size = 2224963, upload-time = "2025-08-10T21:26:18.737Z" }, + { url = "https://files.pythonhosted.org/packages/45/aa/76720bd4cb3713314677d9ec94dcc21ced3f1baf4830adde5bb9b2430a5f/kiwisolver-1.4.9-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:3b3115b2581ea35bb6d1f24a4c90af37e5d9b49dcff267eeed14c3893c5b86ab", size = 2321295, upload-time = "2025-08-10T21:26:20.11Z" }, + { url = "https://files.pythonhosted.org/packages/80/19/d3ec0d9ab711242f56ae0dc2fc5d70e298bb4a1f9dfab44c027668c673a1/kiwisolver-1.4.9-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:858e4c22fb075920b96a291928cb7dea5644e94c0ee4fcd5af7e865655e4ccf2", size = 2487987, upload-time = "2025-08-10T21:26:21.49Z" }, + { url = "https://files.pythonhosted.org/packages/39/e9/61e4813b2c97e86b6fdbd4dd824bf72d28bcd8d4849b8084a357bc0dd64d/kiwisolver-1.4.9-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ed0fecd28cc62c54b262e3736f8bb2512d8dcfdc2bcf08be5f47f96bf405b145", size = 2291817, upload-time = "2025-08-10T21:26:22.812Z" }, + { url = "https://files.pythonhosted.org/packages/a0/41/85d82b0291db7504da3c2defe35c9a8a5c9803a730f297bd823d11d5fb77/kiwisolver-1.4.9-cp312-cp312-win_amd64.whl", hash = "sha256:f68208a520c3d86ea51acf688a3e3002615a7f0238002cccc17affecc86a8a54", size = 73895, upload-time = "2025-08-10T21:26:24.37Z" }, + { url = "https://files.pythonhosted.org/packages/e2/92/5f3068cf15ee5cb624a0c7596e67e2a0bb2adee33f71c379054a491d07da/kiwisolver-1.4.9-cp312-cp312-win_arm64.whl", hash = "sha256:2c1a4f57df73965f3f14df20b80ee29e6a7930a57d2d9e8491a25f676e197c60", size = 64992, upload-time = "2025-08-10T21:26:25.732Z" }, + { url = "https://files.pythonhosted.org/packages/31/c1/c2686cda909742ab66c7388e9a1a8521a59eb89f8bcfbee28fc980d07e24/kiwisolver-1.4.9-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a5d0432ccf1c7ab14f9949eec60c5d1f924f17c037e9f8b33352fa05799359b8", size = 123681, upload-time = "2025-08-10T21:26:26.725Z" }, + { url = "https://files.pythonhosted.org/packages/ca/f0/f44f50c9f5b1a1860261092e3bc91ecdc9acda848a8b8c6abfda4a24dd5c/kiwisolver-1.4.9-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efb3a45b35622bb6c16dbfab491a8f5a391fe0e9d45ef32f4df85658232ca0e2", size = 66464, upload-time = "2025-08-10T21:26:27.733Z" }, + { url = "https://files.pythonhosted.org/packages/2d/7a/9d90a151f558e29c3936b8a47ac770235f436f2120aca41a6d5f3d62ae8d/kiwisolver-1.4.9-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1a12cf6398e8a0a001a059747a1cbf24705e18fe413bc22de7b3d15c67cffe3f", size = 64961, upload-time = "2025-08-10T21:26:28.729Z" }, + { url = "https://files.pythonhosted.org/packages/e9/e9/f218a2cb3a9ffbe324ca29a9e399fa2d2866d7f348ec3a88df87fc248fc5/kiwisolver-1.4.9-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:b67e6efbf68e077dd71d1a6b37e43e1a99d0bff1a3d51867d45ee8908b931098", size = 1474607, upload-time = "2025-08-10T21:26:29.798Z" }, + { url = "https://files.pythonhosted.org/packages/d9/28/aac26d4c882f14de59041636292bc838db8961373825df23b8eeb807e198/kiwisolver-1.4.9-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5656aa670507437af0207645273ccdfee4f14bacd7f7c67a4306d0dcaeaf6eed", size = 1276546, upload-time = "2025-08-10T21:26:31.401Z" }, + { url = "https://files.pythonhosted.org/packages/8b/ad/8bfc1c93d4cc565e5069162f610ba2f48ff39b7de4b5b8d93f69f30c4bed/kiwisolver-1.4.9-cp313-cp313-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:bfc08add558155345129c7803b3671cf195e6a56e7a12f3dde7c57d9b417f525", size = 1294482, upload-time = "2025-08-10T21:26:32.721Z" }, + { url = "https://files.pythonhosted.org/packages/da/f1/6aca55ff798901d8ce403206d00e033191f63d82dd708a186e0ed2067e9c/kiwisolver-1.4.9-cp313-cp313-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:40092754720b174e6ccf9e845d0d8c7d8e12c3d71e7fc35f55f3813e96376f78", size = 1343720, upload-time = "2025-08-10T21:26:34.032Z" }, + { url = "https://files.pythonhosted.org/packages/d1/91/eed031876c595c81d90d0f6fc681ece250e14bf6998c3d7c419466b523b7/kiwisolver-1.4.9-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:497d05f29a1300d14e02e6441cf0f5ee81c1ff5a304b0d9fb77423974684e08b", size = 2224907, upload-time = "2025-08-10T21:26:35.824Z" }, + { url = "https://files.pythonhosted.org/packages/e9/ec/4d1925f2e49617b9cca9c34bfa11adefad49d00db038e692a559454dfb2e/kiwisolver-1.4.9-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:bdd1a81a1860476eb41ac4bc1e07b3f07259e6d55bbf739b79c8aaedcf512799", size = 2321334, upload-time = "2025-08-10T21:26:37.534Z" }, + { url = "https://files.pythonhosted.org/packages/43/cb/450cd4499356f68802750c6ddc18647b8ea01ffa28f50d20598e0befe6e9/kiwisolver-1.4.9-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:e6b93f13371d341afee3be9f7c5964e3fe61d5fa30f6a30eb49856935dfe4fc3", size = 2488313, upload-time = "2025-08-10T21:26:39.191Z" }, + { url = "https://files.pythonhosted.org/packages/71/67/fc76242bd99f885651128a5d4fa6083e5524694b7c88b489b1b55fdc491d/kiwisolver-1.4.9-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d75aa530ccfaa593da12834b86a0724f58bff12706659baa9227c2ccaa06264c", size = 2291970, upload-time = "2025-08-10T21:26:40.828Z" }, + { url = "https://files.pythonhosted.org/packages/75/bd/f1a5d894000941739f2ae1b65a32892349423ad49c2e6d0771d0bad3fae4/kiwisolver-1.4.9-cp313-cp313-win_amd64.whl", hash = "sha256:dd0a578400839256df88c16abddf9ba14813ec5f21362e1fe65022e00c883d4d", size = 73894, upload-time = "2025-08-10T21:26:42.33Z" }, + { url = "https://files.pythonhosted.org/packages/95/38/dce480814d25b99a391abbddadc78f7c117c6da34be68ca8b02d5848b424/kiwisolver-1.4.9-cp313-cp313-win_arm64.whl", hash = "sha256:d4188e73af84ca82468f09cadc5ac4db578109e52acb4518d8154698d3a87ca2", size = 64995, upload-time = "2025-08-10T21:26:43.889Z" }, + { url = "https://files.pythonhosted.org/packages/e2/37/7d218ce5d92dadc5ebdd9070d903e0c7cf7edfe03f179433ac4d13ce659c/kiwisolver-1.4.9-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:5a0f2724dfd4e3b3ac5a82436a8e6fd16baa7d507117e4279b660fe8ca38a3a1", size = 126510, upload-time = "2025-08-10T21:26:44.915Z" }, + { url = "https://files.pythonhosted.org/packages/23/b0/e85a2b48233daef4b648fb657ebbb6f8367696a2d9548a00b4ee0eb67803/kiwisolver-1.4.9-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:1b11d6a633e4ed84fc0ddafd4ebfd8ea49b3f25082c04ad12b8315c11d504dc1", size = 67903, upload-time = "2025-08-10T21:26:45.934Z" }, + { url = "https://files.pythonhosted.org/packages/44/98/f2425bc0113ad7de24da6bb4dae1343476e95e1d738be7c04d31a5d037fd/kiwisolver-1.4.9-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61874cdb0a36016354853593cffc38e56fc9ca5aa97d2c05d3dcf6922cd55a11", size = 66402, upload-time = "2025-08-10T21:26:47.101Z" }, + { url = "https://files.pythonhosted.org/packages/98/d8/594657886df9f34c4177cc353cc28ca7e6e5eb562d37ccc233bff43bbe2a/kiwisolver-1.4.9-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:60c439763a969a6af93b4881db0eed8fadf93ee98e18cbc35bc8da868d0c4f0c", size = 1582135, upload-time = "2025-08-10T21:26:48.665Z" }, + { url = "https://files.pythonhosted.org/packages/5c/c6/38a115b7170f8b306fc929e166340c24958347308ea3012c2b44e7e295db/kiwisolver-1.4.9-cp313-cp313t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:92a2f997387a1b79a75e7803aa7ded2cfbe2823852ccf1ba3bcf613b62ae3197", size = 1389409, upload-time = "2025-08-10T21:26:50.335Z" }, + { url = "https://files.pythonhosted.org/packages/bf/3b/e04883dace81f24a568bcee6eb3001da4ba05114afa622ec9b6fafdc1f5e/kiwisolver-1.4.9-cp313-cp313t-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a31d512c812daea6d8b3be3b2bfcbeb091dbb09177706569bcfc6240dcf8b41c", size = 1401763, upload-time = "2025-08-10T21:26:51.867Z" }, + { url = "https://files.pythonhosted.org/packages/9f/80/20ace48e33408947af49d7d15c341eaee69e4e0304aab4b7660e234d6288/kiwisolver-1.4.9-cp313-cp313t-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:52a15b0f35dad39862d376df10c5230155243a2c1a436e39eb55623ccbd68185", size = 1453643, upload-time = "2025-08-10T21:26:53.592Z" }, + { url = "https://files.pythonhosted.org/packages/64/31/6ce4380a4cd1f515bdda976a1e90e547ccd47b67a1546d63884463c92ca9/kiwisolver-1.4.9-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a30fd6fdef1430fd9e1ba7b3398b5ee4e2887783917a687d86ba69985fb08748", size = 2330818, upload-time = "2025-08-10T21:26:55.051Z" }, + { url = "https://files.pythonhosted.org/packages/fa/e9/3f3fcba3bcc7432c795b82646306e822f3fd74df0ee81f0fa067a1f95668/kiwisolver-1.4.9-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:cc9617b46837c6468197b5945e196ee9ca43057bb7d9d1ae688101e4e1dddf64", size = 2419963, upload-time = "2025-08-10T21:26:56.421Z" }, + { url = "https://files.pythonhosted.org/packages/99/43/7320c50e4133575c66e9f7dadead35ab22d7c012a3b09bb35647792b2a6d/kiwisolver-1.4.9-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:0ab74e19f6a2b027ea4f845a78827969af45ce790e6cb3e1ebab71bdf9f215ff", size = 2594639, upload-time = "2025-08-10T21:26:57.882Z" }, + { url = "https://files.pythonhosted.org/packages/65/d6/17ae4a270d4a987ef8a385b906d2bdfc9fce502d6dc0d3aea865b47f548c/kiwisolver-1.4.9-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:dba5ee5d3981160c28d5490f0d1b7ed730c22470ff7f6cc26cfcfaacb9896a07", size = 2391741, upload-time = "2025-08-10T21:26:59.237Z" }, + { url = "https://files.pythonhosted.org/packages/2a/8f/8f6f491d595a9e5912971f3f863d81baddccc8a4d0c3749d6a0dd9ffc9df/kiwisolver-1.4.9-cp313-cp313t-win_arm64.whl", hash = "sha256:0749fd8f4218ad2e851e11cc4dc05c7cbc0cbc4267bdfdb31782e65aace4ee9c", size = 68646, upload-time = "2025-08-10T21:27:00.52Z" }, + { url = "https://files.pythonhosted.org/packages/6b/32/6cc0fbc9c54d06c2969faa9c1d29f5751a2e51809dd55c69055e62d9b426/kiwisolver-1.4.9-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:9928fe1eb816d11ae170885a74d074f57af3a0d65777ca47e9aeb854a1fba386", size = 123806, upload-time = "2025-08-10T21:27:01.537Z" }, + { url = "https://files.pythonhosted.org/packages/b2/dd/2bfb1d4a4823d92e8cbb420fe024b8d2167f72079b3bb941207c42570bdf/kiwisolver-1.4.9-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:d0005b053977e7b43388ddec89fa567f43d4f6d5c2c0affe57de5ebf290dc552", size = 66605, upload-time = "2025-08-10T21:27:03.335Z" }, + { url = "https://files.pythonhosted.org/packages/f7/69/00aafdb4e4509c2ca6064646cba9cd4b37933898f426756adb2cb92ebbed/kiwisolver-1.4.9-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:2635d352d67458b66fd0667c14cb1d4145e9560d503219034a18a87e971ce4f3", size = 64925, upload-time = "2025-08-10T21:27:04.339Z" }, + { url = "https://files.pythonhosted.org/packages/43/dc/51acc6791aa14e5cb6d8a2e28cefb0dc2886d8862795449d021334c0df20/kiwisolver-1.4.9-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:767c23ad1c58c9e827b649a9ab7809fd5fd9db266a9cf02b0e926ddc2c680d58", size = 1472414, upload-time = "2025-08-10T21:27:05.437Z" }, + { url = "https://files.pythonhosted.org/packages/3d/bb/93fa64a81db304ac8a246f834d5094fae4b13baf53c839d6bb6e81177129/kiwisolver-1.4.9-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:72d0eb9fba308b8311685c2268cf7d0a0639a6cd027d8128659f72bdd8a024b4", size = 1281272, upload-time = "2025-08-10T21:27:07.063Z" }, + { url = "https://files.pythonhosted.org/packages/70/e6/6df102916960fb8d05069d4bd92d6d9a8202d5a3e2444494e7cd50f65b7a/kiwisolver-1.4.9-cp314-cp314-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f68e4f3eeca8fb22cc3d731f9715a13b652795ef657a13df1ad0c7dc0e9731df", size = 1298578, upload-time = "2025-08-10T21:27:08.452Z" }, + { url = "https://files.pythonhosted.org/packages/7c/47/e142aaa612f5343736b087864dbaebc53ea8831453fb47e7521fa8658f30/kiwisolver-1.4.9-cp314-cp314-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d84cd4061ae292d8ac367b2c3fa3aad11cb8625a95d135fe93f286f914f3f5a6", size = 1345607, upload-time = "2025-08-10T21:27:10.125Z" }, + { url = "https://files.pythonhosted.org/packages/54/89/d641a746194a0f4d1a3670fb900d0dbaa786fb98341056814bc3f058fa52/kiwisolver-1.4.9-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:a60ea74330b91bd22a29638940d115df9dc00af5035a9a2a6ad9399ffb4ceca5", size = 2230150, upload-time = "2025-08-10T21:27:11.484Z" }, + { url = "https://files.pythonhosted.org/packages/aa/6b/5ee1207198febdf16ac11f78c5ae40861b809cbe0e6d2a8d5b0b3044b199/kiwisolver-1.4.9-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:ce6a3a4e106cf35c2d9c4fa17c05ce0b180db622736845d4315519397a77beaf", size = 2325979, upload-time = "2025-08-10T21:27:12.917Z" }, + { url = "https://files.pythonhosted.org/packages/fc/ff/b269eefd90f4ae14dcc74973d5a0f6d28d3b9bb1afd8c0340513afe6b39a/kiwisolver-1.4.9-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:77937e5e2a38a7b48eef0585114fe7930346993a88060d0bf886086d2aa49ef5", size = 2491456, upload-time = "2025-08-10T21:27:14.353Z" }, + { url = "https://files.pythonhosted.org/packages/fc/d4/10303190bd4d30de547534601e259a4fbf014eed94aae3e5521129215086/kiwisolver-1.4.9-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:24c175051354f4a28c5d6a31c93906dc653e2bf234e8a4bbfb964892078898ce", size = 2294621, upload-time = "2025-08-10T21:27:15.808Z" }, + { url = "https://files.pythonhosted.org/packages/28/e0/a9a90416fce5c0be25742729c2ea52105d62eda6c4be4d803c2a7be1fa50/kiwisolver-1.4.9-cp314-cp314-win_amd64.whl", hash = "sha256:0763515d4df10edf6d06a3c19734e2566368980d21ebec439f33f9eb936c07b7", size = 75417, upload-time = "2025-08-10T21:27:17.436Z" }, + { url = "https://files.pythonhosted.org/packages/1f/10/6949958215b7a9a264299a7db195564e87900f709db9245e4ebdd3c70779/kiwisolver-1.4.9-cp314-cp314-win_arm64.whl", hash = "sha256:0e4e2bf29574a6a7b7f6cb5fa69293b9f96c928949ac4a53ba3f525dffb87f9c", size = 66582, upload-time = "2025-08-10T21:27:18.436Z" }, + { url = "https://files.pythonhosted.org/packages/ec/79/60e53067903d3bc5469b369fe0dfc6b3482e2133e85dae9daa9527535991/kiwisolver-1.4.9-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:d976bbb382b202f71c67f77b0ac11244021cfa3f7dfd9e562eefcea2df711548", size = 126514, upload-time = "2025-08-10T21:27:19.465Z" }, + { url = "https://files.pythonhosted.org/packages/25/d1/4843d3e8d46b072c12a38c97c57fab4608d36e13fe47d47ee96b4d61ba6f/kiwisolver-1.4.9-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:2489e4e5d7ef9a1c300a5e0196e43d9c739f066ef23270607d45aba368b91f2d", size = 67905, upload-time = "2025-08-10T21:27:20.51Z" }, + { url = "https://files.pythonhosted.org/packages/8c/ae/29ffcbd239aea8b93108de1278271ae764dfc0d803a5693914975f200596/kiwisolver-1.4.9-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:e2ea9f7ab7fbf18fffb1b5434ce7c69a07582f7acc7717720f1d69f3e806f90c", size = 66399, upload-time = "2025-08-10T21:27:21.496Z" }, + { url = "https://files.pythonhosted.org/packages/a1/ae/d7ba902aa604152c2ceba5d352d7b62106bedbccc8e95c3934d94472bfa3/kiwisolver-1.4.9-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:b34e51affded8faee0dfdb705416153819d8ea9250bbbf7ea1b249bdeb5f1122", size = 1582197, upload-time = "2025-08-10T21:27:22.604Z" }, + { url = "https://files.pythonhosted.org/packages/f2/41/27c70d427eddb8bc7e4f16420a20fefc6f480312122a59a959fdfe0445ad/kiwisolver-1.4.9-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d8aacd3d4b33b772542b2e01beb50187536967b514b00003bdda7589722d2a64", size = 1390125, upload-time = "2025-08-10T21:27:24.036Z" }, + { url = "https://files.pythonhosted.org/packages/41/42/b3799a12bafc76d962ad69083f8b43b12bf4fe78b097b12e105d75c9b8f1/kiwisolver-1.4.9-cp314-cp314t-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7cf974dd4e35fa315563ac99d6287a1024e4dc2077b8a7d7cd3d2fb65d283134", size = 1402612, upload-time = "2025-08-10T21:27:25.773Z" }, + { url = "https://files.pythonhosted.org/packages/d2/b5/a210ea073ea1cfaca1bb5c55a62307d8252f531beb364e18aa1e0888b5a0/kiwisolver-1.4.9-cp314-cp314t-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:85bd218b5ecfbee8c8a82e121802dcb519a86044c9c3b2e4aef02fa05c6da370", size = 1453990, upload-time = "2025-08-10T21:27:27.089Z" }, + { url = "https://files.pythonhosted.org/packages/5f/ce/a829eb8c033e977d7ea03ed32fb3c1781b4fa0433fbadfff29e39c676f32/kiwisolver-1.4.9-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:0856e241c2d3df4efef7c04a1e46b1936b6120c9bcf36dd216e3acd84bc4fb21", size = 2331601, upload-time = "2025-08-10T21:27:29.343Z" }, + { url = "https://files.pythonhosted.org/packages/e0/4b/b5e97eb142eb9cd0072dacfcdcd31b1c66dc7352b0f7c7255d339c0edf00/kiwisolver-1.4.9-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:9af39d6551f97d31a4deebeac6f45b156f9755ddc59c07b402c148f5dbb6482a", size = 2422041, upload-time = "2025-08-10T21:27:30.754Z" }, + { url = "https://files.pythonhosted.org/packages/40/be/8eb4cd53e1b85ba4edc3a9321666f12b83113a178845593307a3e7891f44/kiwisolver-1.4.9-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:bb4ae2b57fc1d8cbd1cf7b1d9913803681ffa903e7488012be5b76dedf49297f", size = 2594897, upload-time = "2025-08-10T21:27:32.803Z" }, + { url = "https://files.pythonhosted.org/packages/99/dd/841e9a66c4715477ea0abc78da039832fbb09dac5c35c58dc4c41a407b8a/kiwisolver-1.4.9-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:aedff62918805fb62d43a4aa2ecd4482c380dc76cd31bd7c8878588a61bd0369", size = 2391835, upload-time = "2025-08-10T21:27:34.23Z" }, + { url = "https://files.pythonhosted.org/packages/0c/28/4b2e5c47a0da96896fdfdb006340ade064afa1e63675d01ea5ac222b6d52/kiwisolver-1.4.9-cp314-cp314t-win_amd64.whl", hash = "sha256:1fa333e8b2ce4d9660f2cda9c0e1b6bafcfb2457a9d259faa82289e73ec24891", size = 79988, upload-time = "2025-08-10T21:27:35.587Z" }, + { url = "https://files.pythonhosted.org/packages/80/be/3578e8afd18c88cdf9cb4cffde75a96d2be38c5a903f1ed0ceec061bd09e/kiwisolver-1.4.9-cp314-cp314t-win_arm64.whl", hash = "sha256:4a48a2ce79d65d363597ef7b567ce3d14d68783d2b2263d98db3d9477805ba32", size = 70260, upload-time = "2025-08-10T21:27:36.606Z" }, +] + +[[package]] +name = "markupsafe" +version = "3.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7e/99/7690b6d4034fffd95959cbe0c02de8deb3098cc577c67bb6a24fe5d7caa7/markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698", size = 80313, upload-time = "2025-09-27T18:37:40.426Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/72/147da192e38635ada20e0a2e1a51cf8823d2119ce8883f7053879c2199b5/markupsafe-3.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d53197da72cc091b024dd97249dfc7794d6a56530370992a5e1a08983ad9230e", size = 11615, upload-time = "2025-09-27T18:36:30.854Z" }, + { url = "https://files.pythonhosted.org/packages/9a/81/7e4e08678a1f98521201c3079f77db69fb552acd56067661f8c2f534a718/markupsafe-3.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1872df69a4de6aead3491198eaf13810b565bdbeec3ae2dc8780f14458ec73ce", size = 12020, upload-time = "2025-09-27T18:36:31.971Z" }, + { url = "https://files.pythonhosted.org/packages/1e/2c/799f4742efc39633a1b54a92eec4082e4f815314869865d876824c257c1e/markupsafe-3.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a7e8ae81ae39e62a41ec302f972ba6ae23a5c5396c8e60113e9066ef893da0d", size = 24332, upload-time = "2025-09-27T18:36:32.813Z" }, + { url = "https://files.pythonhosted.org/packages/3c/2e/8d0c2ab90a8c1d9a24f0399058ab8519a3279d1bd4289511d74e909f060e/markupsafe-3.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d6dd0be5b5b189d31db7cda48b91d7e0a9795f31430b7f271219ab30f1d3ac9d", size = 22947, upload-time = "2025-09-27T18:36:33.86Z" }, + { url = "https://files.pythonhosted.org/packages/2c/54/887f3092a85238093a0b2154bd629c89444f395618842e8b0c41783898ea/markupsafe-3.0.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:94c6f0bb423f739146aec64595853541634bde58b2135f27f61c1ffd1cd4d16a", size = 21962, upload-time = "2025-09-27T18:36:35.099Z" }, + { url = "https://files.pythonhosted.org/packages/c9/2f/336b8c7b6f4a4d95e91119dc8521402461b74a485558d8f238a68312f11c/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:be8813b57049a7dc738189df53d69395eba14fb99345e0a5994914a3864c8a4b", size = 23760, upload-time = "2025-09-27T18:36:36.001Z" }, + { url = "https://files.pythonhosted.org/packages/32/43/67935f2b7e4982ffb50a4d169b724d74b62a3964bc1a9a527f5ac4f1ee2b/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:83891d0e9fb81a825d9a6d61e3f07550ca70a076484292a70fde82c4b807286f", size = 21529, upload-time = "2025-09-27T18:36:36.906Z" }, + { url = "https://files.pythonhosted.org/packages/89/e0/4486f11e51bbba8b0c041098859e869e304d1c261e59244baa3d295d47b7/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:77f0643abe7495da77fb436f50f8dab76dbc6e5fd25d39589a0f1fe6548bfa2b", size = 23015, upload-time = "2025-09-27T18:36:37.868Z" }, + { url = "https://files.pythonhosted.org/packages/2f/e1/78ee7a023dac597a5825441ebd17170785a9dab23de95d2c7508ade94e0e/markupsafe-3.0.3-cp312-cp312-win32.whl", hash = "sha256:d88b440e37a16e651bda4c7c2b930eb586fd15ca7406cb39e211fcff3bf3017d", size = 14540, upload-time = "2025-09-27T18:36:38.761Z" }, + { url = "https://files.pythonhosted.org/packages/aa/5b/bec5aa9bbbb2c946ca2733ef9c4ca91c91b6a24580193e891b5f7dbe8e1e/markupsafe-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:26a5784ded40c9e318cfc2bdb30fe164bdb8665ded9cd64d500a34fb42067b1c", size = 15105, upload-time = "2025-09-27T18:36:39.701Z" }, + { url = "https://files.pythonhosted.org/packages/e5/f1/216fc1bbfd74011693a4fd837e7026152e89c4bcf3e77b6692fba9923123/markupsafe-3.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:35add3b638a5d900e807944a078b51922212fb3dedb01633a8defc4b01a3c85f", size = 13906, upload-time = "2025-09-27T18:36:40.689Z" }, + { url = "https://files.pythonhosted.org/packages/38/2f/907b9c7bbba283e68f20259574b13d005c121a0fa4c175f9bed27c4597ff/markupsafe-3.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e1cf1972137e83c5d4c136c43ced9ac51d0e124706ee1c8aa8532c1287fa8795", size = 11622, upload-time = "2025-09-27T18:36:41.777Z" }, + { url = "https://files.pythonhosted.org/packages/9c/d9/5f7756922cdd676869eca1c4e3c0cd0df60ed30199ffd775e319089cb3ed/markupsafe-3.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:116bb52f642a37c115f517494ea5feb03889e04df47eeff5b130b1808ce7c219", size = 12029, upload-time = "2025-09-27T18:36:43.257Z" }, + { url = "https://files.pythonhosted.org/packages/00/07/575a68c754943058c78f30db02ee03a64b3c638586fba6a6dd56830b30a3/markupsafe-3.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:133a43e73a802c5562be9bbcd03d090aa5a1fe899db609c29e8c8d815c5f6de6", size = 24374, upload-time = "2025-09-27T18:36:44.508Z" }, + { url = "https://files.pythonhosted.org/packages/a9/21/9b05698b46f218fc0e118e1f8168395c65c8a2c750ae2bab54fc4bd4e0e8/markupsafe-3.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfcd093f13f0f0b7fdd0f198b90053bf7b2f02a3927a30e63f3ccc9df56b676", size = 22980, upload-time = "2025-09-27T18:36:45.385Z" }, + { url = "https://files.pythonhosted.org/packages/7f/71/544260864f893f18b6827315b988c146b559391e6e7e8f7252839b1b846a/markupsafe-3.0.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:509fa21c6deb7a7a273d629cf5ec029bc209d1a51178615ddf718f5918992ab9", size = 21990, upload-time = "2025-09-27T18:36:46.916Z" }, + { url = "https://files.pythonhosted.org/packages/c2/28/b50fc2f74d1ad761af2f5dcce7492648b983d00a65b8c0e0cb457c82ebbe/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4afe79fb3de0b7097d81da19090f4df4f8d3a2b3adaa8764138aac2e44f3af1", size = 23784, upload-time = "2025-09-27T18:36:47.884Z" }, + { url = "https://files.pythonhosted.org/packages/ed/76/104b2aa106a208da8b17a2fb72e033a5a9d7073c68f7e508b94916ed47a9/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:795e7751525cae078558e679d646ae45574b47ed6e7771863fcc079a6171a0fc", size = 21588, upload-time = "2025-09-27T18:36:48.82Z" }, + { url = "https://files.pythonhosted.org/packages/b5/99/16a5eb2d140087ebd97180d95249b00a03aa87e29cc224056274f2e45fd6/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8485f406a96febb5140bfeca44a73e3ce5116b2501ac54fe953e488fb1d03b12", size = 23041, upload-time = "2025-09-27T18:36:49.797Z" }, + { url = "https://files.pythonhosted.org/packages/19/bc/e7140ed90c5d61d77cea142eed9f9c303f4c4806f60a1044c13e3f1471d0/markupsafe-3.0.3-cp313-cp313-win32.whl", hash = "sha256:bdd37121970bfd8be76c5fb069c7751683bdf373db1ed6c010162b2a130248ed", size = 14543, upload-time = "2025-09-27T18:36:51.584Z" }, + { url = "https://files.pythonhosted.org/packages/05/73/c4abe620b841b6b791f2edc248f556900667a5a1cf023a6646967ae98335/markupsafe-3.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:9a1abfdc021a164803f4d485104931fb8f8c1efd55bc6b748d2f5774e78b62c5", size = 15113, upload-time = "2025-09-27T18:36:52.537Z" }, + { url = "https://files.pythonhosted.org/packages/f0/3a/fa34a0f7cfef23cf9500d68cb7c32dd64ffd58a12b09225fb03dd37d5b80/markupsafe-3.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:7e68f88e5b8799aa49c85cd116c932a1ac15caaa3f5db09087854d218359e485", size = 13911, upload-time = "2025-09-27T18:36:53.513Z" }, + { url = "https://files.pythonhosted.org/packages/e4/d7/e05cd7efe43a88a17a37b3ae96e79a19e846f3f456fe79c57ca61356ef01/markupsafe-3.0.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:218551f6df4868a8d527e3062d0fb968682fe92054e89978594c28e642c43a73", size = 11658, upload-time = "2025-09-27T18:36:54.819Z" }, + { url = "https://files.pythonhosted.org/packages/99/9e/e412117548182ce2148bdeacdda3bb494260c0b0184360fe0d56389b523b/markupsafe-3.0.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3524b778fe5cfb3452a09d31e7b5adefeea8c5be1d43c4f810ba09f2ceb29d37", size = 12066, upload-time = "2025-09-27T18:36:55.714Z" }, + { url = "https://files.pythonhosted.org/packages/bc/e6/fa0ffcda717ef64a5108eaa7b4f5ed28d56122c9a6d70ab8b72f9f715c80/markupsafe-3.0.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4e885a3d1efa2eadc93c894a21770e4bc67899e3543680313b09f139e149ab19", size = 25639, upload-time = "2025-09-27T18:36:56.908Z" }, + { url = "https://files.pythonhosted.org/packages/96/ec/2102e881fe9d25fc16cb4b25d5f5cde50970967ffa5dddafdb771237062d/markupsafe-3.0.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8709b08f4a89aa7586de0aadc8da56180242ee0ada3999749b183aa23df95025", size = 23569, upload-time = "2025-09-27T18:36:57.913Z" }, + { url = "https://files.pythonhosted.org/packages/4b/30/6f2fce1f1f205fc9323255b216ca8a235b15860c34b6798f810f05828e32/markupsafe-3.0.3-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b8512a91625c9b3da6f127803b166b629725e68af71f8184ae7e7d54686a56d6", size = 23284, upload-time = "2025-09-27T18:36:58.833Z" }, + { url = "https://files.pythonhosted.org/packages/58/47/4a0ccea4ab9f5dcb6f79c0236d954acb382202721e704223a8aafa38b5c8/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9b79b7a16f7fedff2495d684f2b59b0457c3b493778c9eed31111be64d58279f", size = 24801, upload-time = "2025-09-27T18:36:59.739Z" }, + { url = "https://files.pythonhosted.org/packages/6a/70/3780e9b72180b6fecb83a4814d84c3bf4b4ae4bf0b19c27196104149734c/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:12c63dfb4a98206f045aa9563db46507995f7ef6d83b2f68eda65c307c6829eb", size = 22769, upload-time = "2025-09-27T18:37:00.719Z" }, + { url = "https://files.pythonhosted.org/packages/98/c5/c03c7f4125180fc215220c035beac6b9cb684bc7a067c84fc69414d315f5/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8f71bc33915be5186016f675cd83a1e08523649b0e33efdb898db577ef5bb009", size = 23642, upload-time = "2025-09-27T18:37:01.673Z" }, + { url = "https://files.pythonhosted.org/packages/80/d6/2d1b89f6ca4bff1036499b1e29a1d02d282259f3681540e16563f27ebc23/markupsafe-3.0.3-cp313-cp313t-win32.whl", hash = "sha256:69c0b73548bc525c8cb9a251cddf1931d1db4d2258e9599c28c07ef3580ef354", size = 14612, upload-time = "2025-09-27T18:37:02.639Z" }, + { url = "https://files.pythonhosted.org/packages/2b/98/e48a4bfba0a0ffcf9925fe2d69240bfaa19c6f7507b8cd09c70684a53c1e/markupsafe-3.0.3-cp313-cp313t-win_amd64.whl", hash = "sha256:1b4b79e8ebf6b55351f0d91fe80f893b4743f104bff22e90697db1590e47a218", size = 15200, upload-time = "2025-09-27T18:37:03.582Z" }, + { url = "https://files.pythonhosted.org/packages/0e/72/e3cc540f351f316e9ed0f092757459afbc595824ca724cbc5a5d4263713f/markupsafe-3.0.3-cp313-cp313t-win_arm64.whl", hash = "sha256:ad2cf8aa28b8c020ab2fc8287b0f823d0a7d8630784c31e9ee5edea20f406287", size = 13973, upload-time = "2025-09-27T18:37:04.929Z" }, + { url = "https://files.pythonhosted.org/packages/33/8a/8e42d4838cd89b7dde187011e97fe6c3af66d8c044997d2183fbd6d31352/markupsafe-3.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:eaa9599de571d72e2daf60164784109f19978b327a3910d3e9de8c97b5b70cfe", size = 11619, upload-time = "2025-09-27T18:37:06.342Z" }, + { url = "https://files.pythonhosted.org/packages/b5/64/7660f8a4a8e53c924d0fa05dc3a55c9cee10bbd82b11c5afb27d44b096ce/markupsafe-3.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c47a551199eb8eb2121d4f0f15ae0f923d31350ab9280078d1e5f12b249e0026", size = 12029, upload-time = "2025-09-27T18:37:07.213Z" }, + { url = "https://files.pythonhosted.org/packages/da/ef/e648bfd021127bef5fa12e1720ffed0c6cbb8310c8d9bea7266337ff06de/markupsafe-3.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f34c41761022dd093b4b6896d4810782ffbabe30f2d443ff5f083e0cbbb8c737", size = 24408, upload-time = "2025-09-27T18:37:09.572Z" }, + { url = "https://files.pythonhosted.org/packages/41/3c/a36c2450754618e62008bf7435ccb0f88053e07592e6028a34776213d877/markupsafe-3.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:457a69a9577064c05a97c41f4e65148652db078a3a509039e64d3467b9e7ef97", size = 23005, upload-time = "2025-09-27T18:37:10.58Z" }, + { url = "https://files.pythonhosted.org/packages/bc/20/b7fdf89a8456b099837cd1dc21974632a02a999ec9bf7ca3e490aacd98e7/markupsafe-3.0.3-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e8afc3f2ccfa24215f8cb28dcf43f0113ac3c37c2f0f0806d8c70e4228c5cf4d", size = 22048, upload-time = "2025-09-27T18:37:11.547Z" }, + { url = "https://files.pythonhosted.org/packages/9a/a7/591f592afdc734f47db08a75793a55d7fbcc6902a723ae4cfbab61010cc5/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ec15a59cf5af7be74194f7ab02d0f59a62bdcf1a537677ce67a2537c9b87fcda", size = 23821, upload-time = "2025-09-27T18:37:12.48Z" }, + { url = "https://files.pythonhosted.org/packages/7d/33/45b24e4f44195b26521bc6f1a82197118f74df348556594bd2262bda1038/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:0eb9ff8191e8498cca014656ae6b8d61f39da5f95b488805da4bb029cccbfbaf", size = 21606, upload-time = "2025-09-27T18:37:13.485Z" }, + { url = "https://files.pythonhosted.org/packages/ff/0e/53dfaca23a69fbfbbf17a4b64072090e70717344c52eaaaa9c5ddff1e5f0/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2713baf880df847f2bece4230d4d094280f4e67b1e813eec43b4c0e144a34ffe", size = 23043, upload-time = "2025-09-27T18:37:14.408Z" }, + { url = "https://files.pythonhosted.org/packages/46/11/f333a06fc16236d5238bfe74daccbca41459dcd8d1fa952e8fbd5dccfb70/markupsafe-3.0.3-cp314-cp314-win32.whl", hash = "sha256:729586769a26dbceff69f7a7dbbf59ab6572b99d94576a5592625d5b411576b9", size = 14747, upload-time = "2025-09-27T18:37:15.36Z" }, + { url = "https://files.pythonhosted.org/packages/28/52/182836104b33b444e400b14f797212f720cbc9ed6ba34c800639d154e821/markupsafe-3.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:bdc919ead48f234740ad807933cdf545180bfbe9342c2bb451556db2ed958581", size = 15341, upload-time = "2025-09-27T18:37:16.496Z" }, + { url = "https://files.pythonhosted.org/packages/6f/18/acf23e91bd94fd7b3031558b1f013adfa21a8e407a3fdb32745538730382/markupsafe-3.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:5a7d5dc5140555cf21a6fefbdbf8723f06fcd2f63ef108f2854de715e4422cb4", size = 14073, upload-time = "2025-09-27T18:37:17.476Z" }, + { url = "https://files.pythonhosted.org/packages/3c/f0/57689aa4076e1b43b15fdfa646b04653969d50cf30c32a102762be2485da/markupsafe-3.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:1353ef0c1b138e1907ae78e2f6c63ff67501122006b0f9abad68fda5f4ffc6ab", size = 11661, upload-time = "2025-09-27T18:37:18.453Z" }, + { url = "https://files.pythonhosted.org/packages/89/c3/2e67a7ca217c6912985ec766c6393b636fb0c2344443ff9d91404dc4c79f/markupsafe-3.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1085e7fbddd3be5f89cc898938f42c0b3c711fdcb37d75221de2666af647c175", size = 12069, upload-time = "2025-09-27T18:37:19.332Z" }, + { url = "https://files.pythonhosted.org/packages/f0/00/be561dce4e6ca66b15276e184ce4b8aec61fe83662cce2f7d72bd3249d28/markupsafe-3.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1b52b4fb9df4eb9ae465f8d0c228a00624de2334f216f178a995ccdcf82c4634", size = 25670, upload-time = "2025-09-27T18:37:20.245Z" }, + { url = "https://files.pythonhosted.org/packages/50/09/c419f6f5a92e5fadde27efd190eca90f05e1261b10dbd8cbcb39cd8ea1dc/markupsafe-3.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fed51ac40f757d41b7c48425901843666a6677e3e8eb0abcff09e4ba6e664f50", size = 23598, upload-time = "2025-09-27T18:37:21.177Z" }, + { url = "https://files.pythonhosted.org/packages/22/44/a0681611106e0b2921b3033fc19bc53323e0b50bc70cffdd19f7d679bb66/markupsafe-3.0.3-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f190daf01f13c72eac4efd5c430a8de82489d9cff23c364c3ea822545032993e", size = 23261, upload-time = "2025-09-27T18:37:22.167Z" }, + { url = "https://files.pythonhosted.org/packages/5f/57/1b0b3f100259dc9fffe780cfb60d4be71375510e435efec3d116b6436d43/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e56b7d45a839a697b5eb268c82a71bd8c7f6c94d6fd50c3d577fa39a9f1409f5", size = 24835, upload-time = "2025-09-27T18:37:23.296Z" }, + { url = "https://files.pythonhosted.org/packages/26/6a/4bf6d0c97c4920f1597cc14dd720705eca0bf7c787aebc6bb4d1bead5388/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:f3e98bb3798ead92273dc0e5fd0f31ade220f59a266ffd8a4f6065e0a3ce0523", size = 22733, upload-time = "2025-09-27T18:37:24.237Z" }, + { url = "https://files.pythonhosted.org/packages/14/c7/ca723101509b518797fedc2fdf79ba57f886b4aca8a7d31857ba3ee8281f/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5678211cb9333a6468fb8d8be0305520aa073f50d17f089b5b4b477ea6e67fdc", size = 23672, upload-time = "2025-09-27T18:37:25.271Z" }, + { url = "https://files.pythonhosted.org/packages/fb/df/5bd7a48c256faecd1d36edc13133e51397e41b73bb77e1a69deab746ebac/markupsafe-3.0.3-cp314-cp314t-win32.whl", hash = "sha256:915c04ba3851909ce68ccc2b8e2cd691618c4dc4c4232fb7982bca3f41fd8c3d", size = 14819, upload-time = "2025-09-27T18:37:26.285Z" }, + { url = "https://files.pythonhosted.org/packages/1a/8a/0402ba61a2f16038b48b39bccca271134be00c5c9f0f623208399333c448/markupsafe-3.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4faffd047e07c38848ce017e8725090413cd80cbc23d86e55c587bf979e579c9", size = 15426, upload-time = "2025-09-27T18:37:27.316Z" }, + { url = "https://files.pythonhosted.org/packages/70/bc/6f1c2f612465f5fa89b95bead1f44dcb607670fd42891d8fdcd5d039f4f4/markupsafe-3.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:32001d6a8fc98c8cb5c947787c5d08b0a50663d139f1305bac5885d98d9b40fa", size = 14146, upload-time = "2025-09-27T18:37:28.327Z" }, +] + +[[package]] +name = "matplotlib" +version = "3.10.8" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "contourpy" }, + { name = "cycler" }, + { name = "fonttools" }, + { name = "kiwisolver" }, + { name = "numpy" }, + { name = "packaging" }, + { name = "pillow" }, + { name = "pyparsing" }, + { name = "python-dateutil" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/8a/76/d3c6e3a13fe484ebe7718d14e269c9569c4eb0020a968a327acb3b9a8fe6/matplotlib-3.10.8.tar.gz", hash = "sha256:2299372c19d56bcd35cf05a2738308758d32b9eaed2371898d8f5bd33f084aa3", size = 34806269, upload-time = "2025-12-10T22:56:51.155Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9e/67/f997cdcbb514012eb0d10cd2b4b332667997fb5ebe26b8d41d04962fa0e6/matplotlib-3.10.8-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:64fcc24778ca0404ce0cb7b6b77ae1f4c7231cdd60e6778f999ee05cbd581b9a", size = 8260453, upload-time = "2025-12-10T22:55:30.709Z" }, + { url = "https://files.pythonhosted.org/packages/7e/65/07d5f5c7f7c994f12c768708bd2e17a4f01a2b0f44a1c9eccad872433e2e/matplotlib-3.10.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b9a5ca4ac220a0cdd1ba6bcba3608547117d30468fefce49bb26f55c1a3d5c58", size = 8148321, upload-time = "2025-12-10T22:55:33.265Z" }, + { url = "https://files.pythonhosted.org/packages/3e/f3/c5195b1ae57ef85339fd7285dfb603b22c8b4e79114bae5f4f0fcf688677/matplotlib-3.10.8-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3ab4aabc72de4ff77b3ec33a6d78a68227bf1123465887f9905ba79184a1cc04", size = 8716944, upload-time = "2025-12-10T22:55:34.922Z" }, + { url = "https://files.pythonhosted.org/packages/00/f9/7638f5cc82ec8a7aa005de48622eecc3ed7c9854b96ba15bd76b7fd27574/matplotlib-3.10.8-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:24d50994d8c5816ddc35411e50a86ab05f575e2530c02752e02538122613371f", size = 9550099, upload-time = "2025-12-10T22:55:36.789Z" }, + { url = "https://files.pythonhosted.org/packages/57/61/78cd5920d35b29fd2a0fe894de8adf672ff52939d2e9b43cb83cd5ce1bc7/matplotlib-3.10.8-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:99eefd13c0dc3b3c1b4d561c1169e65fe47aab7b8158754d7c084088e2329466", size = 9613040, upload-time = "2025-12-10T22:55:38.715Z" }, + { url = "https://files.pythonhosted.org/packages/30/4e/c10f171b6e2f44d9e3a2b96efa38b1677439d79c99357600a62cc1e9594e/matplotlib-3.10.8-cp312-cp312-win_amd64.whl", hash = "sha256:dd80ecb295460a5d9d260df63c43f4afbdd832d725a531f008dad1664f458adf", size = 8142717, upload-time = "2025-12-10T22:55:41.103Z" }, + { url = "https://files.pythonhosted.org/packages/f1/76/934db220026b5fef85f45d51a738b91dea7d70207581063cd9bd8fafcf74/matplotlib-3.10.8-cp312-cp312-win_arm64.whl", hash = "sha256:3c624e43ed56313651bc18a47f838b60d7b8032ed348911c54906b130b20071b", size = 8012751, upload-time = "2025-12-10T22:55:42.684Z" }, + { url = "https://files.pythonhosted.org/packages/3d/b9/15fd5541ef4f5b9a17eefd379356cf12175fe577424e7b1d80676516031a/matplotlib-3.10.8-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:3f2e409836d7f5ac2f1c013110a4d50b9f7edc26328c108915f9075d7d7a91b6", size = 8261076, upload-time = "2025-12-10T22:55:44.648Z" }, + { url = "https://files.pythonhosted.org/packages/8d/a0/2ba3473c1b66b9c74dc7107c67e9008cb1782edbe896d4c899d39ae9cf78/matplotlib-3.10.8-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:56271f3dac49a88d7fca5060f004d9d22b865f743a12a23b1e937a0be4818ee1", size = 8148794, upload-time = "2025-12-10T22:55:46.252Z" }, + { url = "https://files.pythonhosted.org/packages/75/97/a471f1c3eb1fd6f6c24a31a5858f443891d5127e63a7788678d14e249aea/matplotlib-3.10.8-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:a0a7f52498f72f13d4a25ea70f35f4cb60642b466cbb0a9be951b5bc3f45a486", size = 8718474, upload-time = "2025-12-10T22:55:47.864Z" }, + { url = "https://files.pythonhosted.org/packages/01/be/cd478f4b66f48256f42927d0acbcd63a26a893136456cd079c0cc24fbabf/matplotlib-3.10.8-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:646d95230efb9ca614a7a594d4fcacde0ac61d25e37dd51710b36477594963ce", size = 9549637, upload-time = "2025-12-10T22:55:50.048Z" }, + { url = "https://files.pythonhosted.org/packages/5d/7c/8dc289776eae5109e268c4fb92baf870678dc048a25d4ac903683b86d5bf/matplotlib-3.10.8-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f89c151aab2e2e23cb3fe0acad1e8b82841fd265379c4cecd0f3fcb34c15e0f6", size = 9613678, upload-time = "2025-12-10T22:55:52.21Z" }, + { url = "https://files.pythonhosted.org/packages/64/40/37612487cc8a437d4dd261b32ca21fe2d79510fe74af74e1f42becb1bdb8/matplotlib-3.10.8-cp313-cp313-win_amd64.whl", hash = "sha256:e8ea3e2d4066083e264e75c829078f9e149fa119d27e19acd503de65e0b13149", size = 8142686, upload-time = "2025-12-10T22:55:54.253Z" }, + { url = "https://files.pythonhosted.org/packages/66/52/8d8a8730e968185514680c2a6625943f70269509c3dcfc0dcf7d75928cb8/matplotlib-3.10.8-cp313-cp313-win_arm64.whl", hash = "sha256:c108a1d6fa78a50646029cb6d49808ff0fc1330fda87fa6f6250c6b5369b6645", size = 8012917, upload-time = "2025-12-10T22:55:56.268Z" }, + { url = "https://files.pythonhosted.org/packages/b5/27/51fe26e1062f298af5ef66343d8ef460e090a27fea73036c76c35821df04/matplotlib-3.10.8-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:ad3d9833a64cf48cc4300f2b406c3d0f4f4724a91c0bd5640678a6ba7c102077", size = 8305679, upload-time = "2025-12-10T22:55:57.856Z" }, + { url = "https://files.pythonhosted.org/packages/2c/1e/4de865bc591ac8e3062e835f42dd7fe7a93168d519557837f0e37513f629/matplotlib-3.10.8-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:eb3823f11823deade26ce3b9f40dcb4a213da7a670013929f31d5f5ed1055b22", size = 8198336, upload-time = "2025-12-10T22:55:59.371Z" }, + { url = "https://files.pythonhosted.org/packages/c6/cb/2f7b6e75fb4dce87ef91f60cac4f6e34f4c145ab036a22318ec837971300/matplotlib-3.10.8-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d9050fee89a89ed57b4fb2c1bfac9a3d0c57a0d55aed95949eedbc42070fea39", size = 8731653, upload-time = "2025-12-10T22:56:01.032Z" }, + { url = "https://files.pythonhosted.org/packages/46/b3/bd9c57d6ba670a37ab31fb87ec3e8691b947134b201f881665b28cc039ff/matplotlib-3.10.8-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b44d07310e404ba95f8c25aa5536f154c0a8ec473303535949e52eb71d0a1565", size = 9561356, upload-time = "2025-12-10T22:56:02.95Z" }, + { url = "https://files.pythonhosted.org/packages/c0/3d/8b94a481456dfc9dfe6e39e93b5ab376e50998cddfd23f4ae3b431708f16/matplotlib-3.10.8-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:0a33deb84c15ede243aead39f77e990469fff93ad1521163305095b77b72ce4a", size = 9614000, upload-time = "2025-12-10T22:56:05.411Z" }, + { url = "https://files.pythonhosted.org/packages/bd/cd/bc06149fe5585ba800b189a6a654a75f1f127e8aab02fd2be10df7fa500c/matplotlib-3.10.8-cp313-cp313t-win_amd64.whl", hash = "sha256:3a48a78d2786784cc2413e57397981fb45c79e968d99656706018d6e62e57958", size = 8220043, upload-time = "2025-12-10T22:56:07.551Z" }, + { url = "https://files.pythonhosted.org/packages/e3/de/b22cf255abec916562cc04eef457c13e58a1990048de0c0c3604d082355e/matplotlib-3.10.8-cp313-cp313t-win_arm64.whl", hash = "sha256:15d30132718972c2c074cd14638c7f4592bd98719e2308bccea40e0538bc0cb5", size = 8062075, upload-time = "2025-12-10T22:56:09.178Z" }, + { url = "https://files.pythonhosted.org/packages/3c/43/9c0ff7a2f11615e516c3b058e1e6e8f9614ddeca53faca06da267c48345d/matplotlib-3.10.8-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:b53285e65d4fa4c86399979e956235deb900be5baa7fc1218ea67fbfaeaadd6f", size = 8262481, upload-time = "2025-12-10T22:56:10.885Z" }, + { url = "https://files.pythonhosted.org/packages/6f/ca/e8ae28649fcdf039fda5ef554b40a95f50592a3c47e6f7270c9561c12b07/matplotlib-3.10.8-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:32f8dce744be5569bebe789e46727946041199030db8aeb2954d26013a0eb26b", size = 8151473, upload-time = "2025-12-10T22:56:12.377Z" }, + { url = "https://files.pythonhosted.org/packages/f1/6f/009d129ae70b75e88cbe7e503a12a4c0670e08ed748a902c2568909e9eb5/matplotlib-3.10.8-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4cf267add95b1c88300d96ca837833d4112756045364f5c734a2276038dae27d", size = 9553896, upload-time = "2025-12-10T22:56:14.432Z" }, + { url = "https://files.pythonhosted.org/packages/f5/26/4221a741eb97967bc1fd5e4c52b9aa5a91b2f4ec05b59f6def4d820f9df9/matplotlib-3.10.8-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2cf5bd12cecf46908f286d7838b2abc6c91cda506c0445b8223a7c19a00df008", size = 9824193, upload-time = "2025-12-10T22:56:16.29Z" }, + { url = "https://files.pythonhosted.org/packages/1f/f3/3abf75f38605772cf48a9daf5821cd4f563472f38b4b828c6fba6fa6d06e/matplotlib-3.10.8-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:41703cc95688f2516b480f7f339d8851a6035f18e100ee6a32bc0b8536a12a9c", size = 9615444, upload-time = "2025-12-10T22:56:18.155Z" }, + { url = "https://files.pythonhosted.org/packages/93/a5/de89ac80f10b8dc615807ee1133cd99ac74082581196d4d9590bea10690d/matplotlib-3.10.8-cp314-cp314-win_amd64.whl", hash = "sha256:83d282364ea9f3e52363da262ce32a09dfe241e4080dcedda3c0db059d3c1f11", size = 8272719, upload-time = "2025-12-10T22:56:20.366Z" }, + { url = "https://files.pythonhosted.org/packages/69/ce/b006495c19ccc0a137b48083168a37bd056392dee02f87dba0472f2797fe/matplotlib-3.10.8-cp314-cp314-win_arm64.whl", hash = "sha256:2c1998e92cd5999e295a731bcb2911c75f597d937341f3030cc24ef2733d78a8", size = 8144205, upload-time = "2025-12-10T22:56:22.239Z" }, + { url = "https://files.pythonhosted.org/packages/68/d9/b31116a3a855bd313c6fcdb7226926d59b041f26061c6c5b1be66a08c826/matplotlib-3.10.8-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:b5a2b97dbdc7d4f353ebf343744f1d1f1cca8aa8bfddb4262fcf4306c3761d50", size = 8305785, upload-time = "2025-12-10T22:56:24.218Z" }, + { url = "https://files.pythonhosted.org/packages/1e/90/6effe8103f0272685767ba5f094f453784057072f49b393e3ea178fe70a5/matplotlib-3.10.8-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:3f5c3e4da343bba819f0234186b9004faba952cc420fbc522dc4e103c1985908", size = 8198361, upload-time = "2025-12-10T22:56:26.787Z" }, + { url = "https://files.pythonhosted.org/packages/d7/65/a73188711bea603615fc0baecca1061429ac16940e2385433cc778a9d8e7/matplotlib-3.10.8-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5f62550b9a30afde8c1c3ae450e5eb547d579dd69b25c2fc7a1c67f934c1717a", size = 9561357, upload-time = "2025-12-10T22:56:28.953Z" }, + { url = "https://files.pythonhosted.org/packages/f4/3d/b5c5d5d5be8ce63292567f0e2c43dde9953d3ed86ac2de0a72e93c8f07a1/matplotlib-3.10.8-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:495672de149445ec1b772ff2c9ede9b769e3cb4f0d0aa7fa730d7f59e2d4e1c1", size = 9823610, upload-time = "2025-12-10T22:56:31.455Z" }, + { url = "https://files.pythonhosted.org/packages/4d/4b/e7beb6bbd49f6bae727a12b270a2654d13c397576d25bd6786e47033300f/matplotlib-3.10.8-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:595ba4d8fe983b88f0eec8c26a241e16d6376fe1979086232f481f8f3f67494c", size = 9614011, upload-time = "2025-12-10T22:56:33.85Z" }, + { url = "https://files.pythonhosted.org/packages/7c/e6/76f2813d31f032e65f6f797e3f2f6e4aab95b65015924b1c51370395c28a/matplotlib-3.10.8-cp314-cp314t-win_amd64.whl", hash = "sha256:25d380fe8b1dc32cf8f0b1b448470a77afb195438bafdf1d858bfb876f3edf7b", size = 8362801, upload-time = "2025-12-10T22:56:36.107Z" }, + { url = "https://files.pythonhosted.org/packages/5d/49/d651878698a0b67f23aa28e17f45a6d6dd3d3f933fa29087fa4ce5947b5a/matplotlib-3.10.8-cp314-cp314t-win_arm64.whl", hash = "sha256:113bb52413ea508ce954a02c10ffd0d565f9c3bc7f2eddc27dfe1731e71c7b5f", size = 8192560, upload-time = "2025-12-10T22:56:38.008Z" }, +] + +[[package]] +name = "more-itertools" +version = "10.8.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ea/5d/38b681d3fce7a266dd9ab73c66959406d565b3e85f21d5e66e1181d93721/more_itertools-10.8.0.tar.gz", hash = "sha256:f638ddf8a1a0d134181275fb5d58b086ead7c6a72429ad725c67503f13ba30bd", size = 137431, upload-time = "2025-09-02T15:23:11.018Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a4/8e/469e5a4a2f5855992e425f3cb33804cc07bf18d48f2db061aec61ce50270/more_itertools-10.8.0-py3-none-any.whl", hash = "sha256:52d4362373dcf7c52546bc4af9a86ee7c4579df9a8dc268be0a2f949d376cc9b", size = 69667, upload-time = "2025-09-02T15:23:09.635Z" }, +] + +[[package]] +name = "multidict" +version = "6.7.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1a/c2/c2d94cbe6ac1753f3fc980da97b3d930efe1da3af3c9f5125354436c073d/multidict-6.7.1.tar.gz", hash = "sha256:ec6652a1bee61c53a3e5776b6049172c53b6aaba34f18c9ad04f82712bac623d", size = 102010, upload-time = "2026-01-26T02:46:45.979Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8d/9c/f20e0e2cf80e4b2e4b1c365bf5fe104ee633c751a724246262db8f1a0b13/multidict-6.7.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:a90f75c956e32891a4eda3639ce6dd86e87105271f43d43442a3aedf3cddf172", size = 76893, upload-time = "2026-01-26T02:43:52.754Z" }, + { url = "https://files.pythonhosted.org/packages/fe/cf/18ef143a81610136d3da8193da9d80bfe1cb548a1e2d1c775f26b23d024a/multidict-6.7.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3fccb473e87eaa1382689053e4a4618e7ba7b9b9b8d6adf2027ee474597128cd", size = 45456, upload-time = "2026-01-26T02:43:53.893Z" }, + { url = "https://files.pythonhosted.org/packages/a9/65/1caac9d4cd32e8433908683446eebc953e82d22b03d10d41a5f0fefe991b/multidict-6.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b0fa96985700739c4c7853a43c0b3e169360d6855780021bfc6d0f1ce7c123e7", size = 43872, upload-time = "2026-01-26T02:43:55.041Z" }, + { url = "https://files.pythonhosted.org/packages/cf/3b/d6bd75dc4f3ff7c73766e04e705b00ed6dbbaccf670d9e05a12b006f5a21/multidict-6.7.1-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:cb2a55f408c3043e42b40cc8eecd575afa27b7e0b956dfb190de0f8499a57a53", size = 251018, upload-time = "2026-01-26T02:43:56.198Z" }, + { url = "https://files.pythonhosted.org/packages/fd/80/c959c5933adedb9ac15152e4067c702a808ea183a8b64cf8f31af8ad3155/multidict-6.7.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eb0ce7b2a32d09892b3dd6cc44877a0d02a33241fafca5f25c8b6b62374f8b75", size = 258883, upload-time = "2026-01-26T02:43:57.499Z" }, + { url = "https://files.pythonhosted.org/packages/86/85/7ed40adafea3d4f1c8b916e3b5cc3a8e07dfcdcb9cd72800f4ed3ca1b387/multidict-6.7.1-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c3a32d23520ee37bf327d1e1a656fec76a2edd5c038bf43eddfa0572ec49c60b", size = 242413, upload-time = "2026-01-26T02:43:58.755Z" }, + { url = "https://files.pythonhosted.org/packages/d2/57/b8565ff533e48595503c785f8361ff9a4fde4d67de25c207cd0ba3befd03/multidict-6.7.1-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:9c90fed18bffc0189ba814749fdcc102b536e83a9f738a9003e569acd540a733", size = 268404, upload-time = "2026-01-26T02:44:00.216Z" }, + { url = "https://files.pythonhosted.org/packages/e0/50/9810c5c29350f7258180dfdcb2e52783a0632862eb334c4896ac717cebcb/multidict-6.7.1-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:da62917e6076f512daccfbbde27f46fed1c98fee202f0559adec8ee0de67f71a", size = 269456, upload-time = "2026-01-26T02:44:02.202Z" }, + { url = "https://files.pythonhosted.org/packages/f3/8d/5e5be3ced1d12966fefb5c4ea3b2a5b480afcea36406559442c6e31d4a48/multidict-6.7.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bfde23ef6ed9db7eaee6c37dcec08524cb43903c60b285b172b6c094711b3961", size = 256322, upload-time = "2026-01-26T02:44:03.56Z" }, + { url = "https://files.pythonhosted.org/packages/31/6e/d8a26d81ac166a5592782d208dd90dfdc0a7a218adaa52b45a672b46c122/multidict-6.7.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3758692429e4e32f1ba0df23219cd0b4fc0a52f476726fff9337d1a57676a582", size = 253955, upload-time = "2026-01-26T02:44:04.845Z" }, + { url = "https://files.pythonhosted.org/packages/59/4c/7c672c8aad41534ba619bcd4ade7a0dc87ed6b8b5c06149b85d3dd03f0cd/multidict-6.7.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:398c1478926eca669f2fd6a5856b6de9c0acf23a2cb59a14c0ba5844fa38077e", size = 251254, upload-time = "2026-01-26T02:44:06.133Z" }, + { url = "https://files.pythonhosted.org/packages/7b/bd/84c24de512cbafbdbc39439f74e967f19570ce7924e3007174a29c348916/multidict-6.7.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c102791b1c4f3ab36ce4101154549105a53dc828f016356b3e3bcae2e3a039d3", size = 252059, upload-time = "2026-01-26T02:44:07.518Z" }, + { url = "https://files.pythonhosted.org/packages/fa/ba/f5449385510825b73d01c2d4087bf6d2fccc20a2d42ac34df93191d3dd03/multidict-6.7.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:a088b62bd733e2ad12c50dad01b7d0166c30287c166e137433d3b410add807a6", size = 263588, upload-time = "2026-01-26T02:44:09.382Z" }, + { url = "https://files.pythonhosted.org/packages/d7/11/afc7c677f68f75c84a69fe37184f0f82fce13ce4b92f49f3db280b7e92b3/multidict-6.7.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:3d51ff4785d58d3f6c91bdbffcb5e1f7ddfda557727043aa20d20ec4f65e324a", size = 259642, upload-time = "2026-01-26T02:44:10.73Z" }, + { url = "https://files.pythonhosted.org/packages/2b/17/ebb9644da78c4ab36403739e0e6e0e30ebb135b9caf3440825001a0bddcb/multidict-6.7.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fc5907494fccf3e7d3f94f95c91d6336b092b5fc83811720fae5e2765890dfba", size = 251377, upload-time = "2026-01-26T02:44:12.042Z" }, + { url = "https://files.pythonhosted.org/packages/ca/a4/840f5b97339e27846c46307f2530a2805d9d537d8b8bd416af031cad7fa0/multidict-6.7.1-cp312-cp312-win32.whl", hash = "sha256:28ca5ce2fd9716631133d0e9a9b9a745ad7f60bac2bccafb56aa380fc0b6c511", size = 41887, upload-time = "2026-01-26T02:44:14.245Z" }, + { url = "https://files.pythonhosted.org/packages/80/31/0b2517913687895f5904325c2069d6a3b78f66cc641a86a2baf75a05dcbb/multidict-6.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:fcee94dfbd638784645b066074b338bc9cc155d4b4bffa4adce1615c5a426c19", size = 46053, upload-time = "2026-01-26T02:44:15.371Z" }, + { url = "https://files.pythonhosted.org/packages/0c/5b/aba28e4ee4006ae4c7df8d327d31025d760ffa992ea23812a601d226e682/multidict-6.7.1-cp312-cp312-win_arm64.whl", hash = "sha256:ba0a9fb644d0c1a2194cf7ffb043bd852cea63a57f66fbd33959f7dae18517bf", size = 43307, upload-time = "2026-01-26T02:44:16.852Z" }, + { url = "https://files.pythonhosted.org/packages/f2/22/929c141d6c0dba87d3e1d38fbdf1ba8baba86b7776469f2bc2d3227a1e67/multidict-6.7.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:2b41f5fed0ed563624f1c17630cb9941cf2309d4df00e494b551b5f3e3d67a23", size = 76174, upload-time = "2026-01-26T02:44:18.509Z" }, + { url = "https://files.pythonhosted.org/packages/c7/75/bc704ae15fee974f8fccd871305e254754167dce5f9e42d88a2def741a1d/multidict-6.7.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:84e61e3af5463c19b67ced91f6c634effb89ef8bfc5ca0267f954451ed4bb6a2", size = 45116, upload-time = "2026-01-26T02:44:19.745Z" }, + { url = "https://files.pythonhosted.org/packages/79/76/55cd7186f498ed080a18440c9013011eb548f77ae1b297206d030eb1180a/multidict-6.7.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:935434b9853c7c112eee7ac891bc4cb86455aa631269ae35442cb316790c1445", size = 43524, upload-time = "2026-01-26T02:44:21.571Z" }, + { url = "https://files.pythonhosted.org/packages/e9/3c/414842ef8d5a1628d68edee29ba0e5bcf235dbfb3ccd3ea303a7fe8c72ff/multidict-6.7.1-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:432feb25a1cb67fe82a9680b4d65fb542e4635cb3166cd9c01560651ad60f177", size = 249368, upload-time = "2026-01-26T02:44:22.803Z" }, + { url = "https://files.pythonhosted.org/packages/f6/32/befed7f74c458b4a525e60519fe8d87eef72bb1e99924fa2b0f9d97a221e/multidict-6.7.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e82d14e3c948952a1a85503817e038cba5905a3352de76b9a465075d072fba23", size = 256952, upload-time = "2026-01-26T02:44:24.306Z" }, + { url = "https://files.pythonhosted.org/packages/03/d6/c878a44ba877f366630c860fdf74bfb203c33778f12b6ac274936853c451/multidict-6.7.1-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:4cfb48c6ea66c83bcaaf7e4dfa7ec1b6bbcf751b7db85a328902796dfde4c060", size = 240317, upload-time = "2026-01-26T02:44:25.772Z" }, + { url = "https://files.pythonhosted.org/packages/68/49/57421b4d7ad2e9e60e25922b08ceb37e077b90444bde6ead629095327a6f/multidict-6.7.1-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:1d540e51b7e8e170174555edecddbd5538105443754539193e3e1061864d444d", size = 267132, upload-time = "2026-01-26T02:44:27.648Z" }, + { url = "https://files.pythonhosted.org/packages/b7/fe/ec0edd52ddbcea2a2e89e174f0206444a61440b40f39704e64dc807a70bd/multidict-6.7.1-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:273d23f4b40f3dce4d6c8a821c741a86dec62cded82e1175ba3d99be128147ed", size = 268140, upload-time = "2026-01-26T02:44:29.588Z" }, + { url = "https://files.pythonhosted.org/packages/b0/73/6e1b01cbeb458807aa0831742232dbdd1fa92bfa33f52a3f176b4ff3dc11/multidict-6.7.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d624335fd4fa1c08a53f8b4be7676ebde19cd092b3895c421045ca87895b429", size = 254277, upload-time = "2026-01-26T02:44:30.902Z" }, + { url = "https://files.pythonhosted.org/packages/6a/b2/5fb8c124d7561a4974c342bc8c778b471ebbeb3cc17df696f034a7e9afe7/multidict-6.7.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:12fad252f8b267cc75b66e8fc51b3079604e8d43a75428ffe193cd9e2195dfd6", size = 252291, upload-time = "2026-01-26T02:44:32.31Z" }, + { url = "https://files.pythonhosted.org/packages/5a/96/51d4e4e06bcce92577fcd488e22600bd38e4fd59c20cb49434d054903bd2/multidict-6.7.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:03ede2a6ffbe8ef936b92cb4529f27f42be7f56afcdab5ab739cd5f27fb1cbf9", size = 250156, upload-time = "2026-01-26T02:44:33.734Z" }, + { url = "https://files.pythonhosted.org/packages/db/6b/420e173eec5fba721a50e2a9f89eda89d9c98fded1124f8d5c675f7a0c0f/multidict-6.7.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:90efbcf47dbe33dcf643a1e400d67d59abeac5db07dc3f27d6bdeae497a2198c", size = 249742, upload-time = "2026-01-26T02:44:35.222Z" }, + { url = "https://files.pythonhosted.org/packages/44/a3/ec5b5bd98f306bc2aa297b8c6f11a46714a56b1e6ef5ebda50a4f5d7c5fb/multidict-6.7.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:5c4b9bfc148f5a91be9244d6264c53035c8a0dcd2f51f1c3c6e30e30ebaa1c84", size = 262221, upload-time = "2026-01-26T02:44:36.604Z" }, + { url = "https://files.pythonhosted.org/packages/cd/f7/e8c0d0da0cd1e28d10e624604e1a36bcc3353aaebdfdc3a43c72bc683a12/multidict-6.7.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:401c5a650f3add2472d1d288c26deebc540f99e2fb83e9525007a74cd2116f1d", size = 258664, upload-time = "2026-01-26T02:44:38.008Z" }, + { url = "https://files.pythonhosted.org/packages/52/da/151a44e8016dd33feed44f730bd856a66257c1ee7aed4f44b649fb7edeb3/multidict-6.7.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:97891f3b1b3ffbded884e2916cacf3c6fc87b66bb0dde46f7357404750559f33", size = 249490, upload-time = "2026-01-26T02:44:39.386Z" }, + { url = "https://files.pythonhosted.org/packages/87/af/a3b86bf9630b732897f6fc3f4c4714b90aa4361983ccbdcd6c0339b21b0c/multidict-6.7.1-cp313-cp313-win32.whl", hash = "sha256:e1c5988359516095535c4301af38d8a8838534158f649c05dd1050222321bcb3", size = 41695, upload-time = "2026-01-26T02:44:41.318Z" }, + { url = "https://files.pythonhosted.org/packages/b2/35/e994121b0e90e46134673422dd564623f93304614f5d11886b1b3e06f503/multidict-6.7.1-cp313-cp313-win_amd64.whl", hash = "sha256:960c83bf01a95b12b08fd54324a4eb1d5b52c88932b5cba5d6e712bb3ed12eb5", size = 45884, upload-time = "2026-01-26T02:44:42.488Z" }, + { url = "https://files.pythonhosted.org/packages/ca/61/42d3e5dbf661242a69c97ea363f2d7b46c567da8eadef8890022be6e2ab0/multidict-6.7.1-cp313-cp313-win_arm64.whl", hash = "sha256:563fe25c678aaba333d5399408f5ec3c383ca5b663e7f774dd179a520b8144df", size = 43122, upload-time = "2026-01-26T02:44:43.664Z" }, + { url = "https://files.pythonhosted.org/packages/6d/b3/e6b21c6c4f314bb956016b0b3ef2162590a529b84cb831c257519e7fde44/multidict-6.7.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:c76c4bec1538375dad9d452d246ca5368ad6e1c9039dadcf007ae59c70619ea1", size = 83175, upload-time = "2026-01-26T02:44:44.894Z" }, + { url = "https://files.pythonhosted.org/packages/fb/76/23ecd2abfe0957b234f6c960f4ade497f55f2c16aeb684d4ecdbf1c95791/multidict-6.7.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:57b46b24b5d5ebcc978da4ec23a819a9402b4228b8a90d9c656422b4bdd8a963", size = 48460, upload-time = "2026-01-26T02:44:46.106Z" }, + { url = "https://files.pythonhosted.org/packages/c4/57/a0ed92b23f3a042c36bc4227b72b97eca803f5f1801c1ab77c8a212d455e/multidict-6.7.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e954b24433c768ce78ab7929e84ccf3422e46deb45a4dc9f93438f8217fa2d34", size = 46930, upload-time = "2026-01-26T02:44:47.278Z" }, + { url = "https://files.pythonhosted.org/packages/b5/66/02ec7ace29162e447f6382c495dc95826bf931d3818799bbef11e8f7df1a/multidict-6.7.1-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3bd231490fa7217cc832528e1cd8752a96f0125ddd2b5749390f7c3ec8721b65", size = 242582, upload-time = "2026-01-26T02:44:48.604Z" }, + { url = "https://files.pythonhosted.org/packages/58/18/64f5a795e7677670e872673aca234162514696274597b3708b2c0d276cce/multidict-6.7.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:253282d70d67885a15c8a7716f3a73edf2d635793ceda8173b9ecc21f2fb8292", size = 250031, upload-time = "2026-01-26T02:44:50.544Z" }, + { url = "https://files.pythonhosted.org/packages/c8/ed/e192291dbbe51a8290c5686f482084d31bcd9d09af24f63358c3d42fd284/multidict-6.7.1-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0b4c48648d7649c9335cf1927a8b87fa692de3dcb15faa676c6a6f1f1aabda43", size = 228596, upload-time = "2026-01-26T02:44:51.951Z" }, + { url = "https://files.pythonhosted.org/packages/1e/7e/3562a15a60cf747397e7f2180b0a11dc0c38d9175a650e75fa1b4d325e15/multidict-6.7.1-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:98bc624954ec4d2c7cb074b8eefc2b5d0ce7d482e410df446414355d158fe4ca", size = 257492, upload-time = "2026-01-26T02:44:53.902Z" }, + { url = "https://files.pythonhosted.org/packages/24/02/7d0f9eae92b5249bb50ac1595b295f10e263dd0078ebb55115c31e0eaccd/multidict-6.7.1-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:1b99af4d9eec0b49927b4402bcbb58dea89d3e0db8806a4086117019939ad3dd", size = 255899, upload-time = "2026-01-26T02:44:55.316Z" }, + { url = "https://files.pythonhosted.org/packages/00/e3/9b60ed9e23e64c73a5cde95269ef1330678e9c6e34dd4eb6b431b85b5a10/multidict-6.7.1-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6aac4f16b472d5b7dc6f66a0d49dd57b0e0902090be16594dc9ebfd3d17c47e7", size = 247970, upload-time = "2026-01-26T02:44:56.783Z" }, + { url = "https://files.pythonhosted.org/packages/3e/06/538e58a63ed5cfb0bd4517e346b91da32fde409d839720f664e9a4ae4f9d/multidict-6.7.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:21f830fe223215dffd51f538e78c172ed7c7f60c9b96a2bf05c4848ad49921c3", size = 245060, upload-time = "2026-01-26T02:44:58.195Z" }, + { url = "https://files.pythonhosted.org/packages/b2/2f/d743a3045a97c895d401e9bd29aaa09b94f5cbdf1bd561609e5a6c431c70/multidict-6.7.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:f5dd81c45b05518b9aa4da4aa74e1c93d715efa234fd3e8a179df611cc85e5f4", size = 235888, upload-time = "2026-01-26T02:44:59.57Z" }, + { url = "https://files.pythonhosted.org/packages/38/83/5a325cac191ab28b63c52f14f1131f3b0a55ba3b9aa65a6d0bf2a9b921a0/multidict-6.7.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:eb304767bca2bb92fb9c5bd33cedc95baee5bb5f6c88e63706533a1c06ad08c8", size = 243554, upload-time = "2026-01-26T02:45:01.054Z" }, + { url = "https://files.pythonhosted.org/packages/20/1f/9d2327086bd15da2725ef6aae624208e2ef828ed99892b17f60c344e57ed/multidict-6.7.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:c9035dde0f916702850ef66460bc4239d89d08df4d02023a5926e7446724212c", size = 252341, upload-time = "2026-01-26T02:45:02.484Z" }, + { url = "https://files.pythonhosted.org/packages/e8/2c/2a1aa0280cf579d0f6eed8ee5211c4f1730bd7e06c636ba2ee6aafda302e/multidict-6.7.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:af959b9beeb66c822380f222f0e0a1889331597e81f1ded7f374f3ecb0fd6c52", size = 246391, upload-time = "2026-01-26T02:45:03.862Z" }, + { url = "https://files.pythonhosted.org/packages/e5/03/7ca022ffc36c5a3f6e03b179a5ceb829be9da5783e6fe395f347c0794680/multidict-6.7.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:41f2952231456154ee479651491e94118229844dd7226541788be783be2b5108", size = 243422, upload-time = "2026-01-26T02:45:05.296Z" }, + { url = "https://files.pythonhosted.org/packages/dc/1d/b31650eab6c5778aceed46ba735bd97f7c7d2f54b319fa916c0f96e7805b/multidict-6.7.1-cp313-cp313t-win32.whl", hash = "sha256:df9f19c28adcb40b6aae30bbaa1478c389efd50c28d541d76760199fc1037c32", size = 47770, upload-time = "2026-01-26T02:45:06.754Z" }, + { url = "https://files.pythonhosted.org/packages/ac/5b/2d2d1d522e51285bd61b1e20df8f47ae1a9d80839db0b24ea783b3832832/multidict-6.7.1-cp313-cp313t-win_amd64.whl", hash = "sha256:d54ecf9f301853f2c5e802da559604b3e95bb7a3b01a9c295c6ee591b9882de8", size = 53109, upload-time = "2026-01-26T02:45:08.044Z" }, + { url = "https://files.pythonhosted.org/packages/3d/a3/cc409ba012c83ca024a308516703cf339bdc4b696195644a7215a5164a24/multidict-6.7.1-cp313-cp313t-win_arm64.whl", hash = "sha256:5a37ca18e360377cfda1d62f5f382ff41f2b8c4ccb329ed974cc2e1643440118", size = 45573, upload-time = "2026-01-26T02:45:09.349Z" }, + { url = "https://files.pythonhosted.org/packages/91/cc/db74228a8be41884a567e88a62fd589a913708fcf180d029898c17a9a371/multidict-6.7.1-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:8f333ec9c5eb1b7105e3b84b53141e66ca05a19a605368c55450b6ba208cb9ee", size = 75190, upload-time = "2026-01-26T02:45:10.651Z" }, + { url = "https://files.pythonhosted.org/packages/d5/22/492f2246bb5b534abd44804292e81eeaf835388901f0c574bac4eeec73c5/multidict-6.7.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:a407f13c188f804c759fc6a9f88286a565c242a76b27626594c133b82883b5c2", size = 44486, upload-time = "2026-01-26T02:45:11.938Z" }, + { url = "https://files.pythonhosted.org/packages/f1/4f/733c48f270565d78b4544f2baddc2fb2a245e5a8640254b12c36ac7ac68e/multidict-6.7.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:0e161ddf326db5577c3a4cc2d8648f81456e8a20d40415541587a71620d7a7d1", size = 43219, upload-time = "2026-01-26T02:45:14.346Z" }, + { url = "https://files.pythonhosted.org/packages/24/bb/2c0c2287963f4259c85e8bcbba9182ced8d7fca65c780c38e99e61629d11/multidict-6.7.1-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:1e3a8bb24342a8201d178c3b4984c26ba81a577c80d4d525727427460a50c22d", size = 245132, upload-time = "2026-01-26T02:45:15.712Z" }, + { url = "https://files.pythonhosted.org/packages/a7/f9/44d4b3064c65079d2467888794dea218d1601898ac50222ab8a9a8094460/multidict-6.7.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:97231140a50f5d447d3164f994b86a0bed7cd016e2682f8650d6a9158e14fd31", size = 252420, upload-time = "2026-01-26T02:45:17.293Z" }, + { url = "https://files.pythonhosted.org/packages/8b/13/78f7275e73fa17b24c9a51b0bd9d73ba64bb32d0ed51b02a746eb876abe7/multidict-6.7.1-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6b10359683bd8806a200fd2909e7c8ca3a7b24ec1d8132e483d58e791d881048", size = 233510, upload-time = "2026-01-26T02:45:19.356Z" }, + { url = "https://files.pythonhosted.org/packages/4b/25/8167187f62ae3cbd52da7893f58cb036b47ea3fb67138787c76800158982/multidict-6.7.1-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:283ddac99f7ac25a4acadbf004cb5ae34480bbeb063520f70ce397b281859362", size = 264094, upload-time = "2026-01-26T02:45:20.834Z" }, + { url = "https://files.pythonhosted.org/packages/a1/e7/69a3a83b7b030cf283fb06ce074a05a02322359783424d7edf0f15fe5022/multidict-6.7.1-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:538cec1e18c067d0e6103aa9a74f9e832904c957adc260e61cd9d8cf0c3b3d37", size = 260786, upload-time = "2026-01-26T02:45:22.818Z" }, + { url = "https://files.pythonhosted.org/packages/fe/3b/8ec5074bcfc450fe84273713b4b0a0dd47c0249358f5d82eb8104ffe2520/multidict-6.7.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7eee46ccb30ff48a1e35bb818cc90846c6be2b68240e42a78599166722cea709", size = 248483, upload-time = "2026-01-26T02:45:24.368Z" }, + { url = "https://files.pythonhosted.org/packages/48/5a/d5a99e3acbca0e29c5d9cba8f92ceb15dce78bab963b308ae692981e3a5d/multidict-6.7.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:fa263a02f4f2dd2d11a7b1bb4362aa7cb1049f84a9235d31adf63f30143469a0", size = 248403, upload-time = "2026-01-26T02:45:25.982Z" }, + { url = "https://files.pythonhosted.org/packages/35/48/e58cd31f6c7d5102f2a4bf89f96b9cf7e00b6c6f3d04ecc44417c00a5a3c/multidict-6.7.1-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:2e1425e2f99ec5bd36c15a01b690a1a2456209c5deed58f95469ffb46039ccbb", size = 240315, upload-time = "2026-01-26T02:45:27.487Z" }, + { url = "https://files.pythonhosted.org/packages/94/33/1cd210229559cb90b6786c30676bb0c58249ff42f942765f88793b41fdce/multidict-6.7.1-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:497394b3239fc6f0e13a78a3e1b61296e72bf1c5f94b4c4eb80b265c37a131cd", size = 245528, upload-time = "2026-01-26T02:45:28.991Z" }, + { url = "https://files.pythonhosted.org/packages/64/f2/6e1107d226278c876c783056b7db43d800bb64c6131cec9c8dfb6903698e/multidict-6.7.1-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:233b398c29d3f1b9676b4b6f75c518a06fcb2ea0b925119fb2c1bc35c05e1601", size = 258784, upload-time = "2026-01-26T02:45:30.503Z" }, + { url = "https://files.pythonhosted.org/packages/4d/c1/11f664f14d525e4a1b5327a82d4de61a1db604ab34c6603bb3c2cc63ad34/multidict-6.7.1-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:93b1818e4a6e0930454f0f2af7dfce69307ca03cdcfb3739bf4d91241967b6c1", size = 251980, upload-time = "2026-01-26T02:45:32.603Z" }, + { url = "https://files.pythonhosted.org/packages/e1/9f/75a9ac888121d0c5bbd4ecf4eead45668b1766f6baabfb3b7f66a410e231/multidict-6.7.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:f33dc2a3abe9249ea5d8360f969ec7f4142e7ac45ee7014d8f8d5acddf178b7b", size = 243602, upload-time = "2026-01-26T02:45:34.043Z" }, + { url = "https://files.pythonhosted.org/packages/9a/e7/50bf7b004cc8525d80dbbbedfdc7aed3e4c323810890be4413e589074032/multidict-6.7.1-cp314-cp314-win32.whl", hash = "sha256:3ab8b9d8b75aef9df299595d5388b14530839f6422333357af1339443cff777d", size = 40930, upload-time = "2026-01-26T02:45:36.278Z" }, + { url = "https://files.pythonhosted.org/packages/e0/bf/52f25716bbe93745595800f36fb17b73711f14da59ed0bb2eba141bc9f0f/multidict-6.7.1-cp314-cp314-win_amd64.whl", hash = "sha256:5e01429a929600e7dab7b166062d9bb54a5eed752384c7384c968c2afab8f50f", size = 45074, upload-time = "2026-01-26T02:45:37.546Z" }, + { url = "https://files.pythonhosted.org/packages/97/ab/22803b03285fa3a525f48217963da3a65ae40f6a1b6f6cf2768879e208f9/multidict-6.7.1-cp314-cp314-win_arm64.whl", hash = "sha256:4885cb0e817aef5d00a2e8451d4665c1808378dc27c2705f1bf4ef8505c0d2e5", size = 42471, upload-time = "2026-01-26T02:45:38.889Z" }, + { url = "https://files.pythonhosted.org/packages/e0/6d/f9293baa6146ba9507e360ea0292b6422b016907c393e2f63fc40ab7b7b5/multidict-6.7.1-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:0458c978acd8e6ea53c81eefaddbbee9c6c5e591f41b3f5e8e194780fe026581", size = 82401, upload-time = "2026-01-26T02:45:40.254Z" }, + { url = "https://files.pythonhosted.org/packages/7a/68/53b5494738d83558d87c3c71a486504d8373421c3e0dbb6d0db48ad42ee0/multidict-6.7.1-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:c0abd12629b0af3cf590982c0b413b1e7395cd4ec026f30986818ab95bfaa94a", size = 48143, upload-time = "2026-01-26T02:45:41.635Z" }, + { url = "https://files.pythonhosted.org/packages/37/e8/5284c53310dcdc99ce5d66563f6e5773531a9b9fe9ec7a615e9bc306b05f/multidict-6.7.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:14525a5f61d7d0c94b368a42cff4c9a4e7ba2d52e2672a7b23d84dc86fb02b0c", size = 46507, upload-time = "2026-01-26T02:45:42.99Z" }, + { url = "https://files.pythonhosted.org/packages/e4/fc/6800d0e5b3875568b4083ecf5f310dcf91d86d52573160834fb4bfcf5e4f/multidict-6.7.1-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:17307b22c217b4cf05033dabefe68255a534d637c6c9b0cc8382718f87be4262", size = 239358, upload-time = "2026-01-26T02:45:44.376Z" }, + { url = "https://files.pythonhosted.org/packages/41/75/4ad0973179361cdf3a113905e6e088173198349131be2b390f9fa4da5fc6/multidict-6.7.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7a7e590ff876a3eaf1c02a4dfe0724b6e69a9e9de6d8f556816f29c496046e59", size = 246884, upload-time = "2026-01-26T02:45:47.167Z" }, + { url = "https://files.pythonhosted.org/packages/c3/9c/095bb28b5da139bd41fb9a5d5caff412584f377914bd8787c2aa98717130/multidict-6.7.1-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:5fa6a95dfee63893d80a34758cd0e0c118a30b8dcb46372bf75106c591b77889", size = 225878, upload-time = "2026-01-26T02:45:48.698Z" }, + { url = "https://files.pythonhosted.org/packages/07/d0/c0a72000243756e8f5a277b6b514fa005f2c73d481b7d9e47cd4568aa2e4/multidict-6.7.1-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a0543217a6a017692aa6ae5cc39adb75e587af0f3a82288b1492eb73dd6cc2a4", size = 253542, upload-time = "2026-01-26T02:45:50.164Z" }, + { url = "https://files.pythonhosted.org/packages/c0/6b/f69da15289e384ecf2a68837ec8b5ad8c33e973aa18b266f50fe55f24b8c/multidict-6.7.1-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f99fe611c312b3c1c0ace793f92464d8cd263cc3b26b5721950d977b006b6c4d", size = 252403, upload-time = "2026-01-26T02:45:51.779Z" }, + { url = "https://files.pythonhosted.org/packages/a2/76/b9669547afa5a1a25cd93eaca91c0da1c095b06b6d2d8ec25b713588d3a1/multidict-6.7.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9004d8386d133b7e6135679424c91b0b854d2d164af6ea3f289f8f2761064609", size = 244889, upload-time = "2026-01-26T02:45:53.27Z" }, + { url = "https://files.pythonhosted.org/packages/7e/a9/a50d2669e506dad33cfc45b5d574a205587b7b8a5f426f2fbb2e90882588/multidict-6.7.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e628ef0e6859ffd8273c69412a2465c4be4a9517d07261b33334b5ec6f3c7489", size = 241982, upload-time = "2026-01-26T02:45:54.919Z" }, + { url = "https://files.pythonhosted.org/packages/c5/bb/1609558ad8b456b4827d3c5a5b775c93b87878fd3117ed3db3423dfbce1b/multidict-6.7.1-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:841189848ba629c3552035a6a7f5bf3b02eb304e9fea7492ca220a8eda6b0e5c", size = 232415, upload-time = "2026-01-26T02:45:56.981Z" }, + { url = "https://files.pythonhosted.org/packages/d8/59/6f61039d2aa9261871e03ab9dc058a550d240f25859b05b67fd70f80d4b3/multidict-6.7.1-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:ce1bbd7d780bb5a0da032e095c951f7014d6b0a205f8318308140f1a6aba159e", size = 240337, upload-time = "2026-01-26T02:45:58.698Z" }, + { url = "https://files.pythonhosted.org/packages/a1/29/fdc6a43c203890dc2ae9249971ecd0c41deaedfe00d25cb6564b2edd99eb/multidict-6.7.1-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:b26684587228afed0d50cf804cc71062cc9c1cdf55051c4c6345d372947b268c", size = 248788, upload-time = "2026-01-26T02:46:00.862Z" }, + { url = "https://files.pythonhosted.org/packages/a9/14/a153a06101323e4cf086ecee3faadba52ff71633d471f9685c42e3736163/multidict-6.7.1-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:9f9af11306994335398293f9958071019e3ab95e9a707dc1383a35613f6abcb9", size = 242842, upload-time = "2026-01-26T02:46:02.824Z" }, + { url = "https://files.pythonhosted.org/packages/41/5f/604ae839e64a4a6efc80db94465348d3b328ee955e37acb24badbcd24d83/multidict-6.7.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:b4938326284c4f1224178a560987b6cf8b4d38458b113d9b8c1db1a836e640a2", size = 240237, upload-time = "2026-01-26T02:46:05.898Z" }, + { url = "https://files.pythonhosted.org/packages/5f/60/c3a5187bf66f6fb546ff4ab8fb5a077cbdd832d7b1908d4365c7f74a1917/multidict-6.7.1-cp314-cp314t-win32.whl", hash = "sha256:98655c737850c064a65e006a3df7c997cd3b220be4ec8fe26215760b9697d4d7", size = 48008, upload-time = "2026-01-26T02:46:07.468Z" }, + { url = "https://files.pythonhosted.org/packages/0c/f7/addf1087b860ac60e6f382240f64fb99f8bfb532bb06f7c542b83c29ca61/multidict-6.7.1-cp314-cp314t-win_amd64.whl", hash = "sha256:497bde6223c212ba11d462853cfa4f0ae6ef97465033e7dc9940cdb3ab5b48e5", size = 53542, upload-time = "2026-01-26T02:46:08.809Z" }, + { url = "https://files.pythonhosted.org/packages/4c/81/4629d0aa32302ef7b2ec65c75a728cc5ff4fa410c50096174c1632e70b3e/multidict-6.7.1-cp314-cp314t-win_arm64.whl", hash = "sha256:2bbd113e0d4af5db41d5ebfe9ccaff89de2120578164f86a5d17d5a576d1e5b2", size = 44719, upload-time = "2026-01-26T02:46:11.146Z" }, + { url = "https://files.pythonhosted.org/packages/81/08/7036c080d7117f28a4af526d794aab6a84463126db031b007717c1a6676e/multidict-6.7.1-py3-none-any.whl", hash = "sha256:55d97cc6dae627efa6a6e548885712d4864b81110ac76fa4e534c03819fa4a56", size = 12319, upload-time = "2026-01-26T02:46:44.004Z" }, +] + +[[package]] +name = "numpy" +version = "2.4.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/57/fd/0005efbd0af48e55eb3c7208af93f2862d4b1a56cd78e84309a2d959208d/numpy-2.4.2.tar.gz", hash = "sha256:659a6107e31a83c4e33f763942275fd278b21d095094044eb35569e86a21ddae", size = 20723651, upload-time = "2026-01-31T23:13:10.135Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/51/6e/6f394c9c77668153e14d4da83bcc247beb5952f6ead7699a1a2992613bea/numpy-2.4.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:21982668592194c609de53ba4933a7471880ccbaadcc52352694a59ecc860b3a", size = 16667963, upload-time = "2026-01-31T23:10:52.147Z" }, + { url = "https://files.pythonhosted.org/packages/1f/f8/55483431f2b2fd015ae6ed4fe62288823ce908437ed49db5a03d15151678/numpy-2.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40397bda92382fcec844066efb11f13e1c9a3e2a8e8f318fb72ed8b6db9f60f1", size = 14693571, upload-time = "2026-01-31T23:10:54.789Z" }, + { url = "https://files.pythonhosted.org/packages/2f/20/18026832b1845cdc82248208dd929ca14c9d8f2bac391f67440707fff27c/numpy-2.4.2-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:b3a24467af63c67829bfaa61eecf18d5432d4f11992688537be59ecd6ad32f5e", size = 5203469, upload-time = "2026-01-31T23:10:57.343Z" }, + { url = "https://files.pythonhosted.org/packages/7d/33/2eb97c8a77daaba34eaa3fa7241a14ac5f51c46a6bd5911361b644c4a1e2/numpy-2.4.2-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:805cc8de9fd6e7a22da5aed858e0ab16be5a4db6c873dde1d7451c541553aa27", size = 6550820, upload-time = "2026-01-31T23:10:59.429Z" }, + { url = "https://files.pythonhosted.org/packages/b1/91/b97fdfd12dc75b02c44e26c6638241cc004d4079a0321a69c62f51470c4c/numpy-2.4.2-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6d82351358ffbcdcd7b686b90742a9b86632d6c1c051016484fa0b326a0a1548", size = 15663067, upload-time = "2026-01-31T23:11:01.291Z" }, + { url = "https://files.pythonhosted.org/packages/f5/c6/a18e59f3f0b8071cc85cbc8d80cd02d68aa9710170b2553a117203d46936/numpy-2.4.2-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e35d3e0144137d9fdae62912e869136164534d64a169f86438bc9561b6ad49f", size = 16619782, upload-time = "2026-01-31T23:11:03.669Z" }, + { url = "https://files.pythonhosted.org/packages/b7/83/9751502164601a79e18847309f5ceec0b1446d7b6aa12305759b72cf98b2/numpy-2.4.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:adb6ed2ad29b9e15321d167d152ee909ec73395901b70936f029c3bc6d7f4460", size = 17013128, upload-time = "2026-01-31T23:11:05.913Z" }, + { url = "https://files.pythonhosted.org/packages/61/c4/c4066322256ec740acc1c8923a10047818691d2f8aec254798f3dd90f5f2/numpy-2.4.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8906e71fd8afcb76580404e2a950caef2685df3d2a57fe82a86ac8d33cc007ba", size = 18345324, upload-time = "2026-01-31T23:11:08.248Z" }, + { url = "https://files.pythonhosted.org/packages/ab/af/6157aa6da728fa4525a755bfad486ae7e3f76d4c1864138003eb84328497/numpy-2.4.2-cp312-cp312-win32.whl", hash = "sha256:ec055f6dae239a6299cace477b479cca2fc125c5675482daf1dd886933a1076f", size = 5960282, upload-time = "2026-01-31T23:11:10.497Z" }, + { url = "https://files.pythonhosted.org/packages/92/0f/7ceaaeaacb40567071e94dbf2c9480c0ae453d5bb4f52bea3892c39dc83c/numpy-2.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:209fae046e62d0ce6435fcfe3b1a10537e858249b3d9b05829e2a05218296a85", size = 12314210, upload-time = "2026-01-31T23:11:12.176Z" }, + { url = "https://files.pythonhosted.org/packages/2f/a3/56c5c604fae6dd40fa2ed3040d005fca97e91bd320d232ac9931d77ba13c/numpy-2.4.2-cp312-cp312-win_arm64.whl", hash = "sha256:fbde1b0c6e81d56f5dccd95dd4a711d9b95df1ae4009a60887e56b27e8d903fa", size = 10220171, upload-time = "2026-01-31T23:11:14.684Z" }, + { url = "https://files.pythonhosted.org/packages/a1/22/815b9fe25d1d7ae7d492152adbc7226d3eff731dffc38fe970589fcaaa38/numpy-2.4.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:25f2059807faea4b077a2b6837391b5d830864b3543627f381821c646f31a63c", size = 16663696, upload-time = "2026-01-31T23:11:17.516Z" }, + { url = "https://files.pythonhosted.org/packages/09/f0/817d03a03f93ba9c6c8993de509277d84e69f9453601915e4a69554102a1/numpy-2.4.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bd3a7a9f5847d2fb8c2c6d1c862fa109c31a9abeca1a3c2bd5a64572955b2979", size = 14688322, upload-time = "2026-01-31T23:11:19.883Z" }, + { url = "https://files.pythonhosted.org/packages/da/b4/f805ab79293c728b9a99438775ce51885fd4f31b76178767cfc718701a39/numpy-2.4.2-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:8e4549f8a3c6d13d55041925e912bfd834285ef1dd64d6bc7d542583355e2e98", size = 5198157, upload-time = "2026-01-31T23:11:22.375Z" }, + { url = "https://files.pythonhosted.org/packages/74/09/826e4289844eccdcd64aac27d13b0fd3f32039915dd5b9ba01baae1f436c/numpy-2.4.2-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:aea4f66ff44dfddf8c2cffd66ba6538c5ec67d389285292fe428cb2c738c8aef", size = 6546330, upload-time = "2026-01-31T23:11:23.958Z" }, + { url = "https://files.pythonhosted.org/packages/19/fb/cbfdbfa3057a10aea5422c558ac57538e6acc87ec1669e666d32ac198da7/numpy-2.4.2-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c3cd545784805de05aafe1dde61752ea49a359ccba9760c1e5d1c88a93bbf2b7", size = 15660968, upload-time = "2026-01-31T23:11:25.713Z" }, + { url = "https://files.pythonhosted.org/packages/04/dc/46066ce18d01645541f0186877377b9371b8fa8017fa8262002b4ef22612/numpy-2.4.2-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d0d9b7c93578baafcbc5f0b83eaf17b79d345c6f36917ba0c67f45226911d499", size = 16607311, upload-time = "2026-01-31T23:11:28.117Z" }, + { url = "https://files.pythonhosted.org/packages/14/d9/4b5adfc39a43fa6bf918c6d544bc60c05236cc2f6339847fc5b35e6cb5b0/numpy-2.4.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f74f0f7779cc7ae07d1810aab8ac6b1464c3eafb9e283a40da7309d5e6e48fbb", size = 17012850, upload-time = "2026-01-31T23:11:30.888Z" }, + { url = "https://files.pythonhosted.org/packages/b7/20/adb6e6adde6d0130046e6fdfb7675cc62bc2f6b7b02239a09eb58435753d/numpy-2.4.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c7ac672d699bf36275c035e16b65539931347d68b70667d28984c9fb34e07fa7", size = 18334210, upload-time = "2026-01-31T23:11:33.214Z" }, + { url = "https://files.pythonhosted.org/packages/78/0e/0a73b3dff26803a8c02baa76398015ea2a5434d9b8265a7898a6028c1591/numpy-2.4.2-cp313-cp313-win32.whl", hash = "sha256:8e9afaeb0beff068b4d9cd20d322ba0ee1cecfb0b08db145e4ab4dd44a6b5110", size = 5958199, upload-time = "2026-01-31T23:11:35.385Z" }, + { url = "https://files.pythonhosted.org/packages/43/bc/6352f343522fcb2c04dbaf94cb30cca6fd32c1a750c06ad6231b4293708c/numpy-2.4.2-cp313-cp313-win_amd64.whl", hash = "sha256:7df2de1e4fba69a51c06c28f5a3de36731eb9639feb8e1cf7e4a7b0daf4cf622", size = 12310848, upload-time = "2026-01-31T23:11:38.001Z" }, + { url = "https://files.pythonhosted.org/packages/6e/8d/6da186483e308da5da1cc6918ce913dcfe14ffde98e710bfeff2a6158d4e/numpy-2.4.2-cp313-cp313-win_arm64.whl", hash = "sha256:0fece1d1f0a89c16b03442eae5c56dc0be0c7883b5d388e0c03f53019a4bfd71", size = 10221082, upload-time = "2026-01-31T23:11:40.392Z" }, + { url = "https://files.pythonhosted.org/packages/25/a1/9510aa43555b44781968935c7548a8926274f815de42ad3997e9e83680dd/numpy-2.4.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5633c0da313330fd20c484c78cdd3f9b175b55e1a766c4a174230c6b70ad8262", size = 14815866, upload-time = "2026-01-31T23:11:42.495Z" }, + { url = "https://files.pythonhosted.org/packages/36/30/6bbb5e76631a5ae46e7923dd16ca9d3f1c93cfa8d4ed79a129814a9d8db3/numpy-2.4.2-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:d9f64d786b3b1dd742c946c42d15b07497ed14af1a1f3ce840cce27daa0ce913", size = 5325631, upload-time = "2026-01-31T23:11:44.7Z" }, + { url = "https://files.pythonhosted.org/packages/46/00/3a490938800c1923b567b3a15cd17896e68052e2145d8662aaf3e1ffc58f/numpy-2.4.2-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:b21041e8cb6a1eb5312dd1d2f80a94d91efffb7a06b70597d44f1bd2dfc315ab", size = 6646254, upload-time = "2026-01-31T23:11:46.341Z" }, + { url = "https://files.pythonhosted.org/packages/d3/e9/fac0890149898a9b609caa5af7455a948b544746e4b8fe7c212c8edd71f8/numpy-2.4.2-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:00ab83c56211a1d7c07c25e3217ea6695e50a3e2f255053686b081dc0b091a82", size = 15720138, upload-time = "2026-01-31T23:11:48.082Z" }, + { url = "https://files.pythonhosted.org/packages/ea/5c/08887c54e68e1e28df53709f1893ce92932cc6f01f7c3d4dc952f61ffd4e/numpy-2.4.2-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2fb882da679409066b4603579619341c6d6898fc83a8995199d5249f986e8e8f", size = 16655398, upload-time = "2026-01-31T23:11:50.293Z" }, + { url = "https://files.pythonhosted.org/packages/4d/89/253db0fa0e66e9129c745e4ef25631dc37d5f1314dad2b53e907b8538e6d/numpy-2.4.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:66cb9422236317f9d44b67b4d18f44efe6e9c7f8794ac0462978513359461554", size = 17079064, upload-time = "2026-01-31T23:11:52.927Z" }, + { url = "https://files.pythonhosted.org/packages/2a/d5/cbade46ce97c59c6c3da525e8d95b7abe8a42974a1dc5c1d489c10433e88/numpy-2.4.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:0f01dcf33e73d80bd8dc0f20a71303abbafa26a19e23f6b68d1aa9990af90257", size = 18379680, upload-time = "2026-01-31T23:11:55.22Z" }, + { url = "https://files.pythonhosted.org/packages/40/62/48f99ae172a4b63d981babe683685030e8a3df4f246c893ea5c6ef99f018/numpy-2.4.2-cp313-cp313t-win32.whl", hash = "sha256:52b913ec40ff7ae845687b0b34d8d93b60cb66dcee06996dd5c99f2fc9328657", size = 6082433, upload-time = "2026-01-31T23:11:58.096Z" }, + { url = "https://files.pythonhosted.org/packages/07/38/e054a61cfe48ad9f1ed0d188e78b7e26859d0b60ef21cd9de4897cdb5326/numpy-2.4.2-cp313-cp313t-win_amd64.whl", hash = "sha256:5eea80d908b2c1f91486eb95b3fb6fab187e569ec9752ab7d9333d2e66bf2d6b", size = 12451181, upload-time = "2026-01-31T23:11:59.782Z" }, + { url = "https://files.pythonhosted.org/packages/6e/a4/a05c3a6418575e185dd84d0b9680b6bb2e2dc3e4202f036b7b4e22d6e9dc/numpy-2.4.2-cp313-cp313t-win_arm64.whl", hash = "sha256:fd49860271d52127d61197bb50b64f58454e9f578cb4b2c001a6de8b1f50b0b1", size = 10290756, upload-time = "2026-01-31T23:12:02.438Z" }, + { url = "https://files.pythonhosted.org/packages/18/88/b7df6050bf18fdcfb7046286c6535cabbdd2064a3440fca3f069d319c16e/numpy-2.4.2-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:444be170853f1f9d528428eceb55f12918e4fda5d8805480f36a002f1415e09b", size = 16663092, upload-time = "2026-01-31T23:12:04.521Z" }, + { url = "https://files.pythonhosted.org/packages/25/7a/1fee4329abc705a469a4afe6e69b1ef7e915117747886327104a8493a955/numpy-2.4.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:d1240d50adff70c2a88217698ca844723068533f3f5c5fa6ee2e3220e3bdb000", size = 14698770, upload-time = "2026-01-31T23:12:06.96Z" }, + { url = "https://files.pythonhosted.org/packages/fb/0b/f9e49ba6c923678ad5bc38181c08ac5e53b7a5754dbca8e581aa1a56b1ff/numpy-2.4.2-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:7cdde6de52fb6664b00b056341265441192d1291c130e99183ec0d4b110ff8b1", size = 5208562, upload-time = "2026-01-31T23:12:09.632Z" }, + { url = "https://files.pythonhosted.org/packages/7d/12/d7de8f6f53f9bb76997e5e4c069eda2051e3fe134e9181671c4391677bb2/numpy-2.4.2-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:cda077c2e5b780200b6b3e09d0b42205a3d1c68f30c6dceb90401c13bff8fe74", size = 6543710, upload-time = "2026-01-31T23:12:11.969Z" }, + { url = "https://files.pythonhosted.org/packages/09/63/c66418c2e0268a31a4cf8a8b512685748200f8e8e8ec6c507ce14e773529/numpy-2.4.2-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d30291931c915b2ab5717c2974bb95ee891a1cf22ebc16a8006bd59cd210d40a", size = 15677205, upload-time = "2026-01-31T23:12:14.33Z" }, + { url = "https://files.pythonhosted.org/packages/5d/6c/7f237821c9642fb2a04d2f1e88b4295677144ca93285fd76eff3bcba858d/numpy-2.4.2-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bba37bc29d4d85761deed3954a1bc62be7cf462b9510b51d367b769a8c8df325", size = 16611738, upload-time = "2026-01-31T23:12:16.525Z" }, + { url = "https://files.pythonhosted.org/packages/c2/a7/39c4cdda9f019b609b5c473899d87abff092fc908cfe4d1ecb2fcff453b0/numpy-2.4.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b2f0073ed0868db1dcd86e052d37279eef185b9c8db5bf61f30f46adac63c909", size = 17028888, upload-time = "2026-01-31T23:12:19.306Z" }, + { url = "https://files.pythonhosted.org/packages/da/b3/e84bb64bdfea967cc10950d71090ec2d84b49bc691df0025dddb7c26e8e3/numpy-2.4.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:7f54844851cdb630ceb623dcec4db3240d1ac13d4990532446761baede94996a", size = 18339556, upload-time = "2026-01-31T23:12:21.816Z" }, + { url = "https://files.pythonhosted.org/packages/88/f5/954a291bc1192a27081706862ac62bb5920fbecfbaa302f64682aa90beed/numpy-2.4.2-cp314-cp314-win32.whl", hash = "sha256:12e26134a0331d8dbd9351620f037ec470b7c75929cb8a1537f6bfe411152a1a", size = 6006899, upload-time = "2026-01-31T23:12:24.14Z" }, + { url = "https://files.pythonhosted.org/packages/05/cb/eff72a91b2efdd1bc98b3b8759f6a1654aa87612fc86e3d87d6fe4f948c4/numpy-2.4.2-cp314-cp314-win_amd64.whl", hash = "sha256:068cdb2d0d644cdb45670810894f6a0600797a69c05f1ac478e8d31670b8ee75", size = 12443072, upload-time = "2026-01-31T23:12:26.33Z" }, + { url = "https://files.pythonhosted.org/packages/37/75/62726948db36a56428fce4ba80a115716dc4fad6a3a4352487f8bb950966/numpy-2.4.2-cp314-cp314-win_arm64.whl", hash = "sha256:6ed0be1ee58eef41231a5c943d7d1375f093142702d5723ca2eb07db9b934b05", size = 10494886, upload-time = "2026-01-31T23:12:28.488Z" }, + { url = "https://files.pythonhosted.org/packages/36/2f/ee93744f1e0661dc267e4b21940870cabfae187c092e1433b77b09b50ac4/numpy-2.4.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:98f16a80e917003a12c0580f97b5f875853ebc33e2eaa4bccfc8201ac6869308", size = 14818567, upload-time = "2026-01-31T23:12:30.709Z" }, + { url = "https://files.pythonhosted.org/packages/a7/24/6535212add7d76ff938d8bdc654f53f88d35cddedf807a599e180dcb8e66/numpy-2.4.2-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:20abd069b9cda45874498b245c8015b18ace6de8546bf50dfa8cea1696ed06ef", size = 5328372, upload-time = "2026-01-31T23:12:32.962Z" }, + { url = "https://files.pythonhosted.org/packages/5e/9d/c48f0a035725f925634bf6b8994253b43f2047f6778a54147d7e213bc5a7/numpy-2.4.2-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:e98c97502435b53741540a5717a6749ac2ada901056c7db951d33e11c885cc7d", size = 6649306, upload-time = "2026-01-31T23:12:34.797Z" }, + { url = "https://files.pythonhosted.org/packages/81/05/7c73a9574cd4a53a25907bad38b59ac83919c0ddc8234ec157f344d57d9a/numpy-2.4.2-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:da6cad4e82cb893db4b69105c604d805e0c3ce11501a55b5e9f9083b47d2ffe8", size = 15722394, upload-time = "2026-01-31T23:12:36.565Z" }, + { url = "https://files.pythonhosted.org/packages/35/fa/4de10089f21fc7d18442c4a767ab156b25c2a6eaf187c0db6d9ecdaeb43f/numpy-2.4.2-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e4424677ce4b47fe73c8b5556d876571f7c6945d264201180db2dc34f676ab5", size = 16653343, upload-time = "2026-01-31T23:12:39.188Z" }, + { url = "https://files.pythonhosted.org/packages/b8/f9/d33e4ffc857f3763a57aa85650f2e82486832d7492280ac21ba9efda80da/numpy-2.4.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:2b8f157c8a6f20eb657e240f8985cc135598b2b46985c5bccbde7616dc9c6b1e", size = 17078045, upload-time = "2026-01-31T23:12:42.041Z" }, + { url = "https://files.pythonhosted.org/packages/c8/b8/54bdb43b6225badbea6389fa038c4ef868c44f5890f95dd530a218706da3/numpy-2.4.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5daf6f3914a733336dab21a05cdec343144600e964d2fcdabaac0c0269874b2a", size = 18380024, upload-time = "2026-01-31T23:12:44.331Z" }, + { url = "https://files.pythonhosted.org/packages/a5/55/6e1a61ded7af8df04016d81b5b02daa59f2ea9252ee0397cb9f631efe9e5/numpy-2.4.2-cp314-cp314t-win32.whl", hash = "sha256:8c50dd1fc8826f5b26a5ee4d77ca55d88a895f4e4819c7ecc2a9f5905047a443", size = 6153937, upload-time = "2026-01-31T23:12:47.229Z" }, + { url = "https://files.pythonhosted.org/packages/45/aa/fa6118d1ed6d776b0983f3ceac9b1a5558e80df9365b1c3aa6d42bf9eee4/numpy-2.4.2-cp314-cp314t-win_amd64.whl", hash = "sha256:fcf92bee92742edd401ba41135185866f7026c502617f422eb432cfeca4fe236", size = 12631844, upload-time = "2026-01-31T23:12:48.997Z" }, + { url = "https://files.pythonhosted.org/packages/32/0a/2ec5deea6dcd158f254a7b372fb09cfba5719419c8d66343bab35237b3fb/numpy-2.4.2-cp314-cp314t-win_arm64.whl", hash = "sha256:1f92f53998a17265194018d1cc321b2e96e900ca52d54c7c77837b71b9465181", size = 10565379, upload-time = "2026-01-31T23:12:51.345Z" }, +] + +[[package]] +name = "packaging" +version = "26.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/65/ee/299d360cdc32edc7d2cf530f3accf79c4fca01e96ffc950d8a52213bd8e4/packaging-26.0.tar.gz", hash = "sha256:00243ae351a257117b6a241061796684b084ed1c516a08c48a3f7e147a9d80b4", size = 143416, upload-time = "2026-01-21T20:50:39.064Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/packaging-26.0-py3-none-any.whl", hash = "sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529", size = 74366, upload-time = "2026-01-21T20:50:37.788Z" }, +] + +[[package]] +name = "pandas" +version = "3.0.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy" }, + { name = "python-dateutil" }, + { name = "tzdata", marker = "sys_platform == 'emscripten' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2e/0c/b28ed414f080ee0ad153f848586d61d1878f91689950f037f976ce15f6c8/pandas-3.0.1.tar.gz", hash = "sha256:4186a699674af418f655dbd420ed87f50d56b4cd6603784279d9eef6627823c8", size = 4641901, upload-time = "2026-02-17T22:20:16.434Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/37/51/b467209c08dae2c624873d7491ea47d2b47336e5403309d433ea79c38571/pandas-3.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:476f84f8c20c9f5bc47252b66b4bb25e1a9fc2fa98cead96744d8116cb85771d", size = 10344357, upload-time = "2026-02-17T22:18:38.262Z" }, + { url = "https://files.pythonhosted.org/packages/7c/f1/e2567ffc8951ab371db2e40b2fe068e36b81d8cf3260f06ae508700e5504/pandas-3.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0ab749dfba921edf641d4036c4c21c0b3ea70fea478165cb98a998fb2a261955", size = 9884543, upload-time = "2026-02-17T22:18:41.476Z" }, + { url = "https://files.pythonhosted.org/packages/d7/39/327802e0b6d693182403c144edacbc27eb82907b57062f23ef5a4c4a5ea7/pandas-3.0.1-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b8e36891080b87823aff3640c78649b91b8ff6eea3c0d70aeabd72ea43ab069b", size = 10396030, upload-time = "2026-02-17T22:18:43.822Z" }, + { url = "https://files.pythonhosted.org/packages/3d/fe/89d77e424365280b79d99b3e1e7d606f5165af2f2ecfaf0c6d24c799d607/pandas-3.0.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:532527a701281b9dd371e2f582ed9094f4c12dd9ffb82c0c54ee28d8ac9520c4", size = 10876435, upload-time = "2026-02-17T22:18:45.954Z" }, + { url = "https://files.pythonhosted.org/packages/b5/a6/2a75320849dd154a793f69c951db759aedb8d1dd3939eeacda9bdcfa1629/pandas-3.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:356e5c055ed9b0da1580d465657bc7d00635af4fd47f30afb23025352ba764d1", size = 11405133, upload-time = "2026-02-17T22:18:48.533Z" }, + { url = "https://files.pythonhosted.org/packages/58/53/1d68fafb2e02d7881df66aa53be4cd748d25cbe311f3b3c85c93ea5d30ca/pandas-3.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9d810036895f9ad6345b8f2a338dd6998a74e8483847403582cab67745bff821", size = 11932065, upload-time = "2026-02-17T22:18:50.837Z" }, + { url = "https://files.pythonhosted.org/packages/75/08/67cc404b3a966b6df27b38370ddd96b3b023030b572283d035181854aac5/pandas-3.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:536232a5fe26dd989bd633e7a0c450705fdc86a207fec7254a55e9a22950fe43", size = 9741627, upload-time = "2026-02-17T22:18:53.905Z" }, + { url = "https://files.pythonhosted.org/packages/86/4f/caf9952948fb00d23795f09b893d11f1cacb384e666854d87249530f7cbe/pandas-3.0.1-cp312-cp312-win_arm64.whl", hash = "sha256:0f463ebfd8de7f326d38037c7363c6dacb857c5881ab8961fb387804d6daf2f7", size = 9052483, upload-time = "2026-02-17T22:18:57.31Z" }, + { url = "https://files.pythonhosted.org/packages/0b/48/aad6ec4f8d007534c091e9a7172b3ec1b1ee6d99a9cbb936b5eab6c6cf58/pandas-3.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5272627187b5d9c20e55d27caf5f2cd23e286aba25cadf73c8590e432e2b7262", size = 10317509, upload-time = "2026-02-17T22:18:59.498Z" }, + { url = "https://files.pythonhosted.org/packages/a8/14/5990826f779f79148ae9d3a2c39593dc04d61d5d90541e71b5749f35af95/pandas-3.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:661e0f665932af88c7877f31da0dc743fe9c8f2524bdffe23d24fdcb67ef9d56", size = 9860561, upload-time = "2026-02-17T22:19:02.265Z" }, + { url = "https://files.pythonhosted.org/packages/fa/80/f01ff54664b6d70fed71475543d108a9b7c888e923ad210795bef04ffb7d/pandas-3.0.1-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:75e6e292ff898679e47a2199172593d9f6107fd2dd3617c22c2946e97d5df46e", size = 10365506, upload-time = "2026-02-17T22:19:05.017Z" }, + { url = "https://files.pythonhosted.org/packages/f2/85/ab6d04733a7d6ff32bfc8382bf1b07078228f5d6ebec5266b91bfc5c4ff7/pandas-3.0.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1ff8cf1d2896e34343197685f432450ec99a85ba8d90cce2030c5eee2ef98791", size = 10873196, upload-time = "2026-02-17T22:19:07.204Z" }, + { url = "https://files.pythonhosted.org/packages/48/a9/9301c83d0b47c23ac5deab91c6b39fd98d5b5db4d93b25df8d381451828f/pandas-3.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:eca8b4510f6763f3d37359c2105df03a7a221a508f30e396a51d0713d462e68a", size = 11370859, upload-time = "2026-02-17T22:19:09.436Z" }, + { url = "https://files.pythonhosted.org/packages/59/fe/0c1fc5bd2d29c7db2ab372330063ad555fb83e08422829c785f5ec2176ca/pandas-3.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:06aff2ad6f0b94a17822cf8b83bbb563b090ed82ff4fe7712db2ce57cd50d9b8", size = 11924584, upload-time = "2026-02-17T22:19:11.562Z" }, + { url = "https://files.pythonhosted.org/packages/d6/7d/216a1588b65a7aa5f4535570418a599d943c85afb1d95b0876fc00aa1468/pandas-3.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:9fea306c783e28884c29057a1d9baa11a349bbf99538ec1da44c8476563d1b25", size = 9742769, upload-time = "2026-02-17T22:19:13.926Z" }, + { url = "https://files.pythonhosted.org/packages/c4/cb/810a22a6af9a4e97c8ab1c946b47f3489c5bca5adc483ce0ffc84c9cc768/pandas-3.0.1-cp313-cp313-win_arm64.whl", hash = "sha256:a8d37a43c52917427e897cb2e429f67a449327394396a81034a4449b99afda59", size = 9043855, upload-time = "2026-02-17T22:19:16.09Z" }, + { url = "https://files.pythonhosted.org/packages/92/fa/423c89086cca1f039cf1253c3ff5b90f157b5b3757314aa635f6bf3e30aa/pandas-3.0.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d54855f04f8246ed7b6fc96b05d4871591143c46c0b6f4af874764ed0d2d6f06", size = 10752673, upload-time = "2026-02-17T22:19:18.304Z" }, + { url = "https://files.pythonhosted.org/packages/22/23/b5a08ec1f40020397f0faba72f1e2c11f7596a6169c7b3e800abff0e433f/pandas-3.0.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4e1b677accee34a09e0dc2ce5624e4a58a1870ffe56fc021e9caf7f23cd7668f", size = 10404967, upload-time = "2026-02-17T22:19:20.726Z" }, + { url = "https://files.pythonhosted.org/packages/5c/81/94841f1bb4afdc2b52a99daa895ac2c61600bb72e26525ecc9543d453ebc/pandas-3.0.1-cp313-cp313t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a9cabbdcd03f1b6cd254d6dda8ae09b0252524be1592594c00b7895916cb1324", size = 10320575, upload-time = "2026-02-17T22:19:24.919Z" }, + { url = "https://files.pythonhosted.org/packages/0a/8b/2ae37d66a5342a83adadfd0cb0b4bf9c3c7925424dd5f40d15d6cfaa35ee/pandas-3.0.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5ae2ab1f166668b41e770650101e7090824fd34d17915dd9cd479f5c5e0065e9", size = 10710921, upload-time = "2026-02-17T22:19:27.181Z" }, + { url = "https://files.pythonhosted.org/packages/a2/61/772b2e2757855e232b7ccf7cb8079a5711becb3a97f291c953def15a833f/pandas-3.0.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6bf0603c2e30e2cafac32807b06435f28741135cb8697eae8b28c7d492fc7d76", size = 11334191, upload-time = "2026-02-17T22:19:29.411Z" }, + { url = "https://files.pythonhosted.org/packages/1b/08/b16c6df3ef555d8495d1d265a7963b65be166785d28f06a350913a4fac78/pandas-3.0.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6c426422973973cae1f4a23e51d4ae85974f44871b24844e4f7de752dd877098", size = 11782256, upload-time = "2026-02-17T22:19:32.34Z" }, + { url = "https://files.pythonhosted.org/packages/55/80/178af0594890dee17e239fca96d3d8670ba0f5ff59b7d0439850924a9c09/pandas-3.0.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b03f91ae8c10a85c1613102c7bef5229b5379f343030a3ccefeca8a33414cf35", size = 10485047, upload-time = "2026-02-17T22:19:34.605Z" }, + { url = "https://files.pythonhosted.org/packages/bb/8b/4bb774a998b97e6c2fd62a9e6cfdaae133b636fd1c468f92afb4ae9a447a/pandas-3.0.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:99d0f92ed92d3083d140bf6b97774f9f13863924cf3f52a70711f4e7588f9d0a", size = 10322465, upload-time = "2026-02-17T22:19:36.803Z" }, + { url = "https://files.pythonhosted.org/packages/72/3a/5b39b51c64159f470f1ca3b1c2a87da290657ca022f7cd11442606f607d1/pandas-3.0.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:3b66857e983208654294bb6477b8a63dee26b37bdd0eb34d010556e91261784f", size = 9910632, upload-time = "2026-02-17T22:19:39.001Z" }, + { url = "https://files.pythonhosted.org/packages/4e/f7/b449ffb3f68c11da12fc06fbf6d2fa3a41c41e17d0284d23a79e1c13a7e4/pandas-3.0.1-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:56cf59638bf24dc9bdf2154c81e248b3289f9a09a6d04e63608c159022352749", size = 10440535, upload-time = "2026-02-17T22:19:41.157Z" }, + { url = "https://files.pythonhosted.org/packages/55/77/6ea82043db22cb0f2bbfe7198da3544000ddaadb12d26be36e19b03a2dc5/pandas-3.0.1-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c1a9f55e0f46951874b863d1f3906dcb57df2d9be5c5847ba4dfb55b2c815249", size = 10893940, upload-time = "2026-02-17T22:19:43.493Z" }, + { url = "https://files.pythonhosted.org/packages/03/30/f1b502a72468c89412c1b882a08f6eed8a4ee9dc033f35f65d0663df6081/pandas-3.0.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:1849f0bba9c8a2fb0f691d492b834cc8dadf617e29015c66e989448d58d011ee", size = 11442711, upload-time = "2026-02-17T22:19:46.074Z" }, + { url = "https://files.pythonhosted.org/packages/0d/f0/ebb6ddd8fc049e98cabac5c2924d14d1dda26a20adb70d41ea2e428d3ec4/pandas-3.0.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c3d288439e11b5325b02ae6e9cc83e6805a62c40c5a6220bea9beb899c073b1c", size = 11963918, upload-time = "2026-02-17T22:19:48.838Z" }, + { url = "https://files.pythonhosted.org/packages/09/f8/8ce132104074f977f907442790eaae24e27bce3b3b454e82faa3237ff098/pandas-3.0.1-cp314-cp314-win_amd64.whl", hash = "sha256:93325b0fe372d192965f4cca88d97667f49557398bbf94abdda3bf1b591dbe66", size = 9862099, upload-time = "2026-02-17T22:19:51.081Z" }, + { url = "https://files.pythonhosted.org/packages/e6/b7/6af9aac41ef2456b768ef0ae60acf8abcebb450a52043d030a65b4b7c9bd/pandas-3.0.1-cp314-cp314-win_arm64.whl", hash = "sha256:97ca08674e3287c7148f4858b01136f8bdfe7202ad25ad04fec602dd1d29d132", size = 9185333, upload-time = "2026-02-17T22:19:53.266Z" }, + { url = "https://files.pythonhosted.org/packages/66/fc/848bb6710bc6061cb0c5badd65b92ff75c81302e0e31e496d00029fe4953/pandas-3.0.1-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:58eeb1b2e0fb322befcf2bbc9ba0af41e616abadb3d3414a6bc7167f6cbfce32", size = 10772664, upload-time = "2026-02-17T22:19:55.806Z" }, + { url = "https://files.pythonhosted.org/packages/69/5c/866a9bbd0f79263b4b0db6ec1a341be13a1473323f05c122388e0f15b21d/pandas-3.0.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:cd9af1276b5ca9e298bd79a26bda32fa9cc87ed095b2a9a60978d2ca058eaf87", size = 10421286, upload-time = "2026-02-17T22:19:58.091Z" }, + { url = "https://files.pythonhosted.org/packages/51/a4/2058fb84fb1cfbfb2d4a6d485e1940bb4ad5716e539d779852494479c580/pandas-3.0.1-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:94f87a04984d6b63788327cd9f79dda62b7f9043909d2440ceccf709249ca988", size = 10342050, upload-time = "2026-02-17T22:20:01.376Z" }, + { url = "https://files.pythonhosted.org/packages/22/1b/674e89996cc4be74db3c4eb09240c4bb549865c9c3f5d9b086ff8fcfbf00/pandas-3.0.1-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85fe4c4df62e1e20f9db6ebfb88c844b092c22cd5324bdcf94bfa2fc1b391221", size = 10740055, upload-time = "2026-02-17T22:20:04.328Z" }, + { url = "https://files.pythonhosted.org/packages/d0/f8/e954b750764298c22fa4614376531fe63c521ef517e7059a51f062b87dca/pandas-3.0.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:331ca75a2f8672c365ae25c0b29e46f5ac0c6551fdace8eec4cd65e4fac271ff", size = 11357632, upload-time = "2026-02-17T22:20:06.647Z" }, + { url = "https://files.pythonhosted.org/packages/6d/02/c6e04b694ffd68568297abd03588b6d30295265176a5c01b7459d3bc35a3/pandas-3.0.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:15860b1fdb1973fffade772fdb931ccf9b2f400a3f5665aef94a00445d7d8dd5", size = 11810974, upload-time = "2026-02-17T22:20:08.946Z" }, + { url = "https://files.pythonhosted.org/packages/89/41/d7dfb63d2407f12055215070c42fc6ac41b66e90a2946cdc5e759058398b/pandas-3.0.1-cp314-cp314t-win_amd64.whl", hash = "sha256:44f1364411d5670efa692b146c748f4ed013df91ee91e9bec5677fb1fd58b937", size = 10884622, upload-time = "2026-02-17T22:20:11.711Z" }, + { url = "https://files.pythonhosted.org/packages/68/b0/34937815889fa982613775e4b97fddd13250f11012d769949c5465af2150/pandas-3.0.1-cp314-cp314t-win_arm64.whl", hash = "sha256:108dd1790337a494aa80e38def654ca3f0968cf4f362c85f44c15e471667102d", size = 9452085, upload-time = "2026-02-17T22:20:14.331Z" }, +] + +[[package]] +name = "parsimonious" +version = "0.10.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "regex" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/7b/91/abdc50c4ef06fdf8d047f60ee777ca9b2a7885e1a9cea81343fbecda52d7/parsimonious-0.10.0.tar.gz", hash = "sha256:8281600da180ec8ae35427a4ab4f7b82bfec1e3d1e52f80cb60ea82b9512501c", size = 52172, upload-time = "2022-09-03T17:01:17.004Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/aa/0f/c8b64d9b54ea631fcad4e9e3c8dbe8c11bb32a623be94f22974c88e71eaf/parsimonious-0.10.0-py3-none-any.whl", hash = "sha256:982ab435fabe86519b57f6b35610aa4e4e977e9f02a14353edf4bbc75369fc0f", size = 48427, upload-time = "2022-09-03T17:01:13.814Z" }, +] + +[[package]] +name = "pillow" +version = "12.1.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1f/42/5c74462b4fd957fcd7b13b04fb3205ff8349236ea74c7c375766d6c82288/pillow-12.1.1.tar.gz", hash = "sha256:9ad8fa5937ab05218e2b6a4cff30295ad35afd2f83ac592e68c0d871bb0fdbc4", size = 46980264, upload-time = "2026-02-11T04:23:07.146Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/07/d3/8df65da0d4df36b094351dce696f2989bec731d4f10e743b1c5f4da4d3bf/pillow-12.1.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ab323b787d6e18b3d91a72fc99b1a2c28651e4358749842b8f8dfacd28ef2052", size = 5262803, upload-time = "2026-02-11T04:20:47.653Z" }, + { url = "https://files.pythonhosted.org/packages/d6/71/5026395b290ff404b836e636f51d7297e6c83beceaa87c592718747e670f/pillow-12.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:adebb5bee0f0af4909c30db0d890c773d1a92ffe83da908e2e9e720f8edf3984", size = 4657601, upload-time = "2026-02-11T04:20:49.328Z" }, + { url = "https://files.pythonhosted.org/packages/b1/2e/1001613d941c67442f745aff0f7cc66dd8df9a9c084eb497e6a543ee6f7e/pillow-12.1.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:bb66b7cc26f50977108790e2456b7921e773f23db5630261102233eb355a3b79", size = 6234995, upload-time = "2026-02-11T04:20:51.032Z" }, + { url = "https://files.pythonhosted.org/packages/07/26/246ab11455b2549b9233dbd44d358d033a2f780fa9007b61a913c5b2d24e/pillow-12.1.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:aee2810642b2898bb187ced9b349e95d2a7272930796e022efaf12e99dccd293", size = 8045012, upload-time = "2026-02-11T04:20:52.882Z" }, + { url = "https://files.pythonhosted.org/packages/b2/8b/07587069c27be7535ac1fe33874e32de118fbd34e2a73b7f83436a88368c/pillow-12.1.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a0b1cd6232e2b618adcc54d9882e4e662a089d5768cd188f7c245b4c8c44a397", size = 6349638, upload-time = "2026-02-11T04:20:54.444Z" }, + { url = "https://files.pythonhosted.org/packages/ff/79/6df7b2ee763d619cda2fb4fea498e5f79d984dae304d45a8999b80d6cf5c/pillow-12.1.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7aac39bcf8d4770d089588a2e1dd111cbaa42df5a94be3114222057d68336bd0", size = 7041540, upload-time = "2026-02-11T04:20:55.97Z" }, + { url = "https://files.pythonhosted.org/packages/2c/5e/2ba19e7e7236d7529f4d873bdaf317a318896bac289abebd4bb00ef247f0/pillow-12.1.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ab174cd7d29a62dd139c44bf74b698039328f45cb03b4596c43473a46656b2f3", size = 6462613, upload-time = "2026-02-11T04:20:57.542Z" }, + { url = "https://files.pythonhosted.org/packages/03/03/31216ec124bb5c3dacd74ce8efff4cc7f52643653bad4825f8f08c697743/pillow-12.1.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:339ffdcb7cbeaa08221cd401d517d4b1fe7a9ed5d400e4a8039719238620ca35", size = 7166745, upload-time = "2026-02-11T04:20:59.196Z" }, + { url = "https://files.pythonhosted.org/packages/1f/e7/7c4552d80052337eb28653b617eafdef39adfb137c49dd7e831b8dc13bc5/pillow-12.1.1-cp312-cp312-win32.whl", hash = "sha256:5d1f9575a12bed9e9eedd9a4972834b08c97a352bd17955ccdebfeca5913fa0a", size = 6328823, upload-time = "2026-02-11T04:21:01.385Z" }, + { url = "https://files.pythonhosted.org/packages/3d/17/688626d192d7261bbbf98846fc98995726bddc2c945344b65bec3a29d731/pillow-12.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:21329ec8c96c6e979cd0dfd29406c40c1d52521a90544463057d2aaa937d66a6", size = 7033367, upload-time = "2026-02-11T04:21:03.536Z" }, + { url = "https://files.pythonhosted.org/packages/ed/fe/a0ef1f73f939b0eca03ee2c108d0043a87468664770612602c63266a43c4/pillow-12.1.1-cp312-cp312-win_arm64.whl", hash = "sha256:af9a332e572978f0218686636610555ae3defd1633597be015ed50289a03c523", size = 2453811, upload-time = "2026-02-11T04:21:05.116Z" }, + { url = "https://files.pythonhosted.org/packages/d5/11/6db24d4bd7685583caeae54b7009584e38da3c3d4488ed4cd25b439de486/pillow-12.1.1-cp313-cp313-ios_13_0_arm64_iphoneos.whl", hash = "sha256:d242e8ac078781f1de88bf823d70c1a9b3c7950a44cdf4b7c012e22ccbcd8e4e", size = 4062689, upload-time = "2026-02-11T04:21:06.804Z" }, + { url = "https://files.pythonhosted.org/packages/33/c0/ce6d3b1fe190f0021203e0d9b5b99e57843e345f15f9ef22fcd43842fd21/pillow-12.1.1-cp313-cp313-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:02f84dfad02693676692746df05b89cf25597560db2857363a208e393429f5e9", size = 4138535, upload-time = "2026-02-11T04:21:08.452Z" }, + { url = "https://files.pythonhosted.org/packages/a0/c6/d5eb6a4fb32a3f9c21a8c7613ec706534ea1cf9f4b3663e99f0d83f6fca8/pillow-12.1.1-cp313-cp313-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:e65498daf4b583091ccbb2556c7000abf0f3349fcd57ef7adc9a84a394ed29f6", size = 3601364, upload-time = "2026-02-11T04:21:10.194Z" }, + { url = "https://files.pythonhosted.org/packages/14/a1/16c4b823838ba4c9c52c0e6bbda903a3fe5a1bdbf1b8eb4fff7156f3e318/pillow-12.1.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6c6db3b84c87d48d0088943bf33440e0c42370b99b1c2a7989216f7b42eede60", size = 5262561, upload-time = "2026-02-11T04:21:11.742Z" }, + { url = "https://files.pythonhosted.org/packages/bb/ad/ad9dc98ff24f485008aa5cdedaf1a219876f6f6c42a4626c08bc4e80b120/pillow-12.1.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:8b7e5304e34942bf62e15184219a7b5ad4ff7f3bb5cca4d984f37df1a0e1aee2", size = 4657460, upload-time = "2026-02-11T04:21:13.786Z" }, + { url = "https://files.pythonhosted.org/packages/9e/1b/f1a4ea9a895b5732152789326202a82464d5254759fbacae4deea3069334/pillow-12.1.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:18e5bddd742a44b7e6b1e773ab5db102bd7a94c32555ba656e76d319d19c3850", size = 6232698, upload-time = "2026-02-11T04:21:15.949Z" }, + { url = "https://files.pythonhosted.org/packages/95/f4/86f51b8745070daf21fd2e5b1fe0eb35d4db9ca26e6d58366562fb56a743/pillow-12.1.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fc44ef1f3de4f45b50ccf9136999d71abb99dca7706bc75d222ed350b9fd2289", size = 8041706, upload-time = "2026-02-11T04:21:17.723Z" }, + { url = "https://files.pythonhosted.org/packages/29/9b/d6ecd956bb1266dd1045e995cce9b8d77759e740953a1c9aad9502a0461e/pillow-12.1.1-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5a8eb7ed8d4198bccbd07058416eeec51686b498e784eda166395a23eb99138e", size = 6346621, upload-time = "2026-02-11T04:21:19.547Z" }, + { url = "https://files.pythonhosted.org/packages/71/24/538bff45bde96535d7d998c6fed1a751c75ac7c53c37c90dc2601b243893/pillow-12.1.1-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:47b94983da0c642de92ced1702c5b6c292a84bd3a8e1d1702ff923f183594717", size = 7038069, upload-time = "2026-02-11T04:21:21.378Z" }, + { url = "https://files.pythonhosted.org/packages/94/0e/58cb1a6bc48f746bc4cb3adb8cabff73e2742c92b3bf7a220b7cf69b9177/pillow-12.1.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:518a48c2aab7ce596d3bf79d0e275661b846e86e4d0e7dec34712c30fe07f02a", size = 6460040, upload-time = "2026-02-11T04:21:23.148Z" }, + { url = "https://files.pythonhosted.org/packages/6c/57/9045cb3ff11eeb6c1adce3b2d60d7d299d7b273a2e6c8381a524abfdc474/pillow-12.1.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a550ae29b95c6dc13cf69e2c9dc5747f814c54eeb2e32d683e5e93af56caa029", size = 7164523, upload-time = "2026-02-11T04:21:25.01Z" }, + { url = "https://files.pythonhosted.org/packages/73/f2/9be9cb99f2175f0d4dbadd6616ce1bf068ee54a28277ea1bf1fbf729c250/pillow-12.1.1-cp313-cp313-win32.whl", hash = "sha256:a003d7422449f6d1e3a34e3dd4110c22148336918ddbfc6a32581cd54b2e0b2b", size = 6332552, upload-time = "2026-02-11T04:21:27.238Z" }, + { url = "https://files.pythonhosted.org/packages/3f/eb/b0834ad8b583d7d9d42b80becff092082a1c3c156bb582590fcc973f1c7c/pillow-12.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:344cf1e3dab3be4b1fa08e449323d98a2a3f819ad20f4b22e77a0ede31f0faa1", size = 7040108, upload-time = "2026-02-11T04:21:29.462Z" }, + { url = "https://files.pythonhosted.org/packages/d5/7d/fc09634e2aabdd0feabaff4a32f4a7d97789223e7c2042fd805ea4b4d2c2/pillow-12.1.1-cp313-cp313-win_arm64.whl", hash = "sha256:5c0dd1636633e7e6a0afe7bf6a51a14992b7f8e60de5789018ebbdfae55b040a", size = 2453712, upload-time = "2026-02-11T04:21:31.072Z" }, + { url = "https://files.pythonhosted.org/packages/19/2a/b9d62794fc8a0dd14c1943df68347badbd5511103e0d04c035ffe5cf2255/pillow-12.1.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0330d233c1a0ead844fc097a7d16c0abff4c12e856c0b325f231820fee1f39da", size = 5264880, upload-time = "2026-02-11T04:21:32.865Z" }, + { url = "https://files.pythonhosted.org/packages/26/9d/e03d857d1347fa5ed9247e123fcd2a97b6220e15e9cb73ca0a8d91702c6e/pillow-12.1.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5dae5f21afb91322f2ff791895ddd8889e5e947ff59f71b46041c8ce6db790bc", size = 4660616, upload-time = "2026-02-11T04:21:34.97Z" }, + { url = "https://files.pythonhosted.org/packages/f7/ec/8a6d22afd02570d30954e043f09c32772bfe143ba9285e2fdb11284952cd/pillow-12.1.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2e0c664be47252947d870ac0d327fea7e63985a08794758aa8af5b6cb6ec0c9c", size = 6269008, upload-time = "2026-02-11T04:21:36.623Z" }, + { url = "https://files.pythonhosted.org/packages/3d/1d/6d875422c9f28a4a361f495a5f68d9de4a66941dc2c619103ca335fa6446/pillow-12.1.1-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:691ab2ac363b8217f7d31b3497108fb1f50faab2f75dfb03284ec2f217e87bf8", size = 8073226, upload-time = "2026-02-11T04:21:38.585Z" }, + { url = "https://files.pythonhosted.org/packages/a1/cd/134b0b6ee5eda6dc09e25e24b40fdafe11a520bc725c1d0bbaa5e00bf95b/pillow-12.1.1-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e9e8064fb1cc019296958595f6db671fba95209e3ceb0c4734c9baf97de04b20", size = 6380136, upload-time = "2026-02-11T04:21:40.562Z" }, + { url = "https://files.pythonhosted.org/packages/7a/a9/7628f013f18f001c1b98d8fffe3452f306a70dc6aba7d931019e0492f45e/pillow-12.1.1-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:472a8d7ded663e6162dafdf20015c486a7009483ca671cece7a9279b512fcb13", size = 7067129, upload-time = "2026-02-11T04:21:42.521Z" }, + { url = "https://files.pythonhosted.org/packages/1e/f8/66ab30a2193b277785601e82ee2d49f68ea575d9637e5e234faaa98efa4c/pillow-12.1.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:89b54027a766529136a06cfebeecb3a04900397a3590fd252160b888479517bf", size = 6491807, upload-time = "2026-02-11T04:21:44.22Z" }, + { url = "https://files.pythonhosted.org/packages/da/0b/a877a6627dc8318fdb84e357c5e1a758c0941ab1ddffdafd231983788579/pillow-12.1.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:86172b0831b82ce4f7877f280055892b31179e1576aa00d0df3bb1bbf8c3e524", size = 7190954, upload-time = "2026-02-11T04:21:46.114Z" }, + { url = "https://files.pythonhosted.org/packages/83/43/6f732ff85743cf746b1361b91665d9f5155e1483817f693f8d57ea93147f/pillow-12.1.1-cp313-cp313t-win32.whl", hash = "sha256:44ce27545b6efcf0fdbdceb31c9a5bdea9333e664cda58a7e674bb74608b3986", size = 6336441, upload-time = "2026-02-11T04:21:48.22Z" }, + { url = "https://files.pythonhosted.org/packages/3b/44/e865ef3986611bb75bfabdf94a590016ea327833f434558801122979cd0e/pillow-12.1.1-cp313-cp313t-win_amd64.whl", hash = "sha256:a285e3eb7a5a45a2ff504e31f4a8d1b12ef62e84e5411c6804a42197c1cf586c", size = 7045383, upload-time = "2026-02-11T04:21:50.015Z" }, + { url = "https://files.pythonhosted.org/packages/a8/c6/f4fb24268d0c6908b9f04143697ea18b0379490cb74ba9e8d41b898bd005/pillow-12.1.1-cp313-cp313t-win_arm64.whl", hash = "sha256:cc7d296b5ea4d29e6570dabeaed58d31c3fea35a633a69679fb03d7664f43fb3", size = 2456104, upload-time = "2026-02-11T04:21:51.633Z" }, + { url = "https://files.pythonhosted.org/packages/03/d0/bebb3ffbf31c5a8e97241476c4cf8b9828954693ce6744b4a2326af3e16b/pillow-12.1.1-cp314-cp314-ios_13_0_arm64_iphoneos.whl", hash = "sha256:417423db963cb4be8bac3fc1204fe61610f6abeed1580a7a2cbb2fbda20f12af", size = 4062652, upload-time = "2026-02-11T04:21:53.19Z" }, + { url = "https://files.pythonhosted.org/packages/2d/c0/0e16fb0addda4851445c28f8350d8c512f09de27bbb0d6d0bbf8b6709605/pillow-12.1.1-cp314-cp314-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:b957b71c6b2387610f556a7eb0828afbe40b4a98036fc0d2acfa5a44a0c2036f", size = 4138823, upload-time = "2026-02-11T04:22:03.088Z" }, + { url = "https://files.pythonhosted.org/packages/6b/fb/6170ec655d6f6bb6630a013dd7cf7bc218423d7b5fa9071bf63dc32175ae/pillow-12.1.1-cp314-cp314-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:097690ba1f2efdeb165a20469d59d8bb03c55fb6621eb2041a060ae8ea3e9642", size = 3601143, upload-time = "2026-02-11T04:22:04.909Z" }, + { url = "https://files.pythonhosted.org/packages/59/04/dc5c3f297510ba9a6837cbb318b87dd2b8f73eb41a43cc63767f65cb599c/pillow-12.1.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:2815a87ab27848db0321fb78c7f0b2c8649dee134b7f2b80c6a45c6831d75ccd", size = 5266254, upload-time = "2026-02-11T04:22:07.656Z" }, + { url = "https://files.pythonhosted.org/packages/05/30/5db1236b0d6313f03ebf97f5e17cda9ca060f524b2fcc875149a8360b21c/pillow-12.1.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:f7ed2c6543bad5a7d5530eb9e78c53132f93dfa44a28492db88b41cdab885202", size = 4657499, upload-time = "2026-02-11T04:22:09.613Z" }, + { url = "https://files.pythonhosted.org/packages/6f/18/008d2ca0eb612e81968e8be0bbae5051efba24d52debf930126d7eaacbba/pillow-12.1.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:652a2c9ccfb556235b2b501a3a7cf3742148cd22e04b5625c5fe057ea3e3191f", size = 6232137, upload-time = "2026-02-11T04:22:11.434Z" }, + { url = "https://files.pythonhosted.org/packages/70/f1/f14d5b8eeb4b2cd62b9f9f847eb6605f103df89ef619ac68f92f748614ea/pillow-12.1.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d6e4571eedf43af33d0fc233a382a76e849badbccdf1ac438841308652a08e1f", size = 8042721, upload-time = "2026-02-11T04:22:13.321Z" }, + { url = "https://files.pythonhosted.org/packages/5a/d6/17824509146e4babbdabf04d8171491fa9d776f7061ff6e727522df9bd03/pillow-12.1.1-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b574c51cf7d5d62e9be37ba446224b59a2da26dc4c1bb2ecbe936a4fb1a7cb7f", size = 6347798, upload-time = "2026-02-11T04:22:15.449Z" }, + { url = "https://files.pythonhosted.org/packages/d1/ee/c85a38a9ab92037a75615aba572c85ea51e605265036e00c5b67dfafbfe2/pillow-12.1.1-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a37691702ed687799de29a518d63d4682d9016932db66d4e90c345831b02fb4e", size = 7039315, upload-time = "2026-02-11T04:22:17.24Z" }, + { url = "https://files.pythonhosted.org/packages/ec/f3/bc8ccc6e08a148290d7523bde4d9a0d6c981db34631390dc6e6ec34cacf6/pillow-12.1.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:f95c00d5d6700b2b890479664a06e754974848afaae5e21beb4d83c106923fd0", size = 6462360, upload-time = "2026-02-11T04:22:19.111Z" }, + { url = "https://files.pythonhosted.org/packages/f6/ab/69a42656adb1d0665ab051eec58a41f169ad295cf81ad45406963105408f/pillow-12.1.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:559b38da23606e68681337ad74622c4dbba02254fc9cb4488a305dd5975c7eeb", size = 7165438, upload-time = "2026-02-11T04:22:21.041Z" }, + { url = "https://files.pythonhosted.org/packages/02/46/81f7aa8941873f0f01d4b55cc543b0a3d03ec2ee30d617a0448bf6bd6dec/pillow-12.1.1-cp314-cp314-win32.whl", hash = "sha256:03edcc34d688572014ff223c125a3f77fb08091e4607e7745002fc214070b35f", size = 6431503, upload-time = "2026-02-11T04:22:22.833Z" }, + { url = "https://files.pythonhosted.org/packages/40/72/4c245f7d1044b67affc7f134a09ea619d4895333d35322b775b928180044/pillow-12.1.1-cp314-cp314-win_amd64.whl", hash = "sha256:50480dcd74fa63b8e78235957d302d98d98d82ccbfac4c7e12108ba9ecbdba15", size = 7176748, upload-time = "2026-02-11T04:22:24.64Z" }, + { url = "https://files.pythonhosted.org/packages/e4/ad/8a87bdbe038c5c698736e3348af5c2194ffb872ea52f11894c95f9305435/pillow-12.1.1-cp314-cp314-win_arm64.whl", hash = "sha256:5cb1785d97b0c3d1d1a16bc1d710c4a0049daefc4935f3a8f31f827f4d3d2e7f", size = 2544314, upload-time = "2026-02-11T04:22:26.685Z" }, + { url = "https://files.pythonhosted.org/packages/6c/9d/efd18493f9de13b87ede7c47e69184b9e859e4427225ea962e32e56a49bc/pillow-12.1.1-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:1f90cff8aa76835cba5769f0b3121a22bd4eb9e6884cfe338216e557a9a548b8", size = 5268612, upload-time = "2026-02-11T04:22:29.884Z" }, + { url = "https://files.pythonhosted.org/packages/f8/f1/4f42eb2b388eb2ffc660dcb7f7b556c1015c53ebd5f7f754965ef997585b/pillow-12.1.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1f1be78ce9466a7ee64bfda57bdba0f7cc499d9794d518b854816c41bf0aa4e9", size = 4660567, upload-time = "2026-02-11T04:22:31.799Z" }, + { url = "https://files.pythonhosted.org/packages/01/54/df6ef130fa43e4b82e32624a7b821a2be1c5653a5fdad8469687a7db4e00/pillow-12.1.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:42fc1f4677106188ad9a55562bbade416f8b55456f522430fadab3cef7cd4e60", size = 6269951, upload-time = "2026-02-11T04:22:33.921Z" }, + { url = "https://files.pythonhosted.org/packages/a9/48/618752d06cc44bb4aae8ce0cd4e6426871929ed7b46215638088270d9b34/pillow-12.1.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:98edb152429ab62a1818039744d8fbb3ccab98a7c29fc3d5fcef158f3f1f68b7", size = 8074769, upload-time = "2026-02-11T04:22:35.877Z" }, + { url = "https://files.pythonhosted.org/packages/c3/bd/f1d71eb39a72fa088d938655afba3e00b38018d052752f435838961127d8/pillow-12.1.1-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d470ab1178551dd17fdba0fef463359c41aaa613cdcd7ff8373f54be629f9f8f", size = 6381358, upload-time = "2026-02-11T04:22:37.698Z" }, + { url = "https://files.pythonhosted.org/packages/64/ef/c784e20b96674ed36a5af839305f55616f8b4f8aa8eeccf8531a6e312243/pillow-12.1.1-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6408a7b064595afcab0a49393a413732a35788f2a5092fdc6266952ed67de586", size = 7068558, upload-time = "2026-02-11T04:22:39.597Z" }, + { url = "https://files.pythonhosted.org/packages/73/cb/8059688b74422ae61278202c4e1ad992e8a2e7375227be0a21c6b87ca8d5/pillow-12.1.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5d8c41325b382c07799a3682c1c258469ea2ff97103c53717b7893862d0c98ce", size = 6493028, upload-time = "2026-02-11T04:22:42.73Z" }, + { url = "https://files.pythonhosted.org/packages/c6/da/e3c008ed7d2dd1f905b15949325934510b9d1931e5df999bb15972756818/pillow-12.1.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:c7697918b5be27424e9ce568193efd13d925c4481dd364e43f5dff72d33e10f8", size = 7191940, upload-time = "2026-02-11T04:22:44.543Z" }, + { url = "https://files.pythonhosted.org/packages/01/4a/9202e8d11714c1fc5951f2e1ef362f2d7fbc595e1f6717971d5dd750e969/pillow-12.1.1-cp314-cp314t-win32.whl", hash = "sha256:d2912fd8114fc5545aa3a4b5576512f64c55a03f3ebcca4c10194d593d43ea36", size = 6438736, upload-time = "2026-02-11T04:22:46.347Z" }, + { url = "https://files.pythonhosted.org/packages/f3/ca/cbce2327eb9885476b3957b2e82eb12c866a8b16ad77392864ad601022ce/pillow-12.1.1-cp314-cp314t-win_amd64.whl", hash = "sha256:4ceb838d4bd9dab43e06c363cab2eebf63846d6a4aeaea283bbdfd8f1a8ed58b", size = 7182894, upload-time = "2026-02-11T04:22:48.114Z" }, + { url = "https://files.pythonhosted.org/packages/ec/d2/de599c95ba0a973b94410477f8bf0b6f0b5e67360eb89bcb1ad365258beb/pillow-12.1.1-cp314-cp314t-win_arm64.whl", hash = "sha256:7b03048319bfc6170e93bd60728a1af51d3dd7704935feb228c4d4faab35d334", size = 2546446, upload-time = "2026-02-11T04:22:50.342Z" }, +] + +[[package]] +name = "pluggy" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, +] + +[[package]] +name = "propcache" +version = "0.4.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/9e/da/e9fc233cf63743258bff22b3dfa7ea5baef7b5bc324af47a0ad89b8ffc6f/propcache-0.4.1.tar.gz", hash = "sha256:f48107a8c637e80362555f37ecf49abe20370e557cc4ab374f04ec4423c97c3d", size = 46442, upload-time = "2025-10-08T19:49:02.291Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a2/0f/f17b1b2b221d5ca28b4b876e8bb046ac40466513960646bda8e1853cdfa2/propcache-0.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e153e9cd40cc8945138822807139367f256f89c6810c2634a4f6902b52d3b4e2", size = 80061, upload-time = "2025-10-08T19:46:46.075Z" }, + { url = "https://files.pythonhosted.org/packages/76/47/8ccf75935f51448ba9a16a71b783eb7ef6b9ee60f5d14c7f8a8a79fbeed7/propcache-0.4.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cd547953428f7abb73c5ad82cbb32109566204260d98e41e5dfdc682eb7f8403", size = 46037, upload-time = "2025-10-08T19:46:47.23Z" }, + { url = "https://files.pythonhosted.org/packages/0a/b6/5c9a0e42df4d00bfb4a3cbbe5cf9f54260300c88a0e9af1f47ca5ce17ac0/propcache-0.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f048da1b4f243fc44f205dfd320933a951b8d89e0afd4c7cacc762a8b9165207", size = 47324, upload-time = "2025-10-08T19:46:48.384Z" }, + { url = "https://files.pythonhosted.org/packages/9e/d3/6c7ee328b39a81ee877c962469f1e795f9db87f925251efeb0545e0020d0/propcache-0.4.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ec17c65562a827bba85e3872ead335f95405ea1674860d96483a02f5c698fa72", size = 225505, upload-time = "2025-10-08T19:46:50.055Z" }, + { url = "https://files.pythonhosted.org/packages/01/5d/1c53f4563490b1d06a684742cc6076ef944bc6457df6051b7d1a877c057b/propcache-0.4.1-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:405aac25c6394ef275dee4c709be43745d36674b223ba4eb7144bf4d691b7367", size = 230242, upload-time = "2025-10-08T19:46:51.815Z" }, + { url = "https://files.pythonhosted.org/packages/20/e1/ce4620633b0e2422207c3cb774a0ee61cac13abc6217763a7b9e2e3f4a12/propcache-0.4.1-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0013cb6f8dde4b2a2f66903b8ba740bdfe378c943c4377a200551ceb27f379e4", size = 238474, upload-time = "2025-10-08T19:46:53.208Z" }, + { url = "https://files.pythonhosted.org/packages/46/4b/3aae6835b8e5f44ea6a68348ad90f78134047b503765087be2f9912140ea/propcache-0.4.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:15932ab57837c3368b024473a525e25d316d8353016e7cc0e5ba9eb343fbb1cf", size = 221575, upload-time = "2025-10-08T19:46:54.511Z" }, + { url = "https://files.pythonhosted.org/packages/6e/a5/8a5e8678bcc9d3a1a15b9a29165640d64762d424a16af543f00629c87338/propcache-0.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:031dce78b9dc099f4c29785d9cf5577a3faf9ebf74ecbd3c856a7b92768c3df3", size = 216736, upload-time = "2025-10-08T19:46:56.212Z" }, + { url = "https://files.pythonhosted.org/packages/f1/63/b7b215eddeac83ca1c6b934f89d09a625aa9ee4ba158338854c87210cc36/propcache-0.4.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ab08df6c9a035bee56e31af99be621526bd237bea9f32def431c656b29e41778", size = 213019, upload-time = "2025-10-08T19:46:57.595Z" }, + { url = "https://files.pythonhosted.org/packages/57/74/f580099a58c8af587cac7ba19ee7cb418506342fbbe2d4a4401661cca886/propcache-0.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4d7af63f9f93fe593afbf104c21b3b15868efb2c21d07d8732c0c4287e66b6a6", size = 220376, upload-time = "2025-10-08T19:46:59.067Z" }, + { url = "https://files.pythonhosted.org/packages/c4/ee/542f1313aff7eaf19c2bb758c5d0560d2683dac001a1c96d0774af799843/propcache-0.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cfc27c945f422e8b5071b6e93169679e4eb5bf73bbcbf1ba3ae3a83d2f78ebd9", size = 226988, upload-time = "2025-10-08T19:47:00.544Z" }, + { url = "https://files.pythonhosted.org/packages/8f/18/9c6b015dd9c6930f6ce2229e1f02fb35298b847f2087ea2b436a5bfa7287/propcache-0.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:35c3277624a080cc6ec6f847cbbbb5b49affa3598c4535a0a4682a697aaa5c75", size = 215615, upload-time = "2025-10-08T19:47:01.968Z" }, + { url = "https://files.pythonhosted.org/packages/80/9e/e7b85720b98c45a45e1fca6a177024934dc9bc5f4d5dd04207f216fc33ed/propcache-0.4.1-cp312-cp312-win32.whl", hash = "sha256:671538c2262dadb5ba6395e26c1731e1d52534bfe9ae56d0b5573ce539266aa8", size = 38066, upload-time = "2025-10-08T19:47:03.503Z" }, + { url = "https://files.pythonhosted.org/packages/54/09/d19cff2a5aaac632ec8fc03737b223597b1e347416934c1b3a7df079784c/propcache-0.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:cb2d222e72399fcf5890d1d5cc1060857b9b236adff2792ff48ca2dfd46c81db", size = 41655, upload-time = "2025-10-08T19:47:04.973Z" }, + { url = "https://files.pythonhosted.org/packages/68/ab/6b5c191bb5de08036a8c697b265d4ca76148efb10fa162f14af14fb5f076/propcache-0.4.1-cp312-cp312-win_arm64.whl", hash = "sha256:204483131fb222bdaaeeea9f9e6c6ed0cac32731f75dfc1d4a567fc1926477c1", size = 37789, upload-time = "2025-10-08T19:47:06.077Z" }, + { url = "https://files.pythonhosted.org/packages/bf/df/6d9c1b6ac12b003837dde8a10231a7344512186e87b36e855bef32241942/propcache-0.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:43eedf29202c08550aac1d14e0ee619b0430aaef78f85864c1a892294fbc28cf", size = 77750, upload-time = "2025-10-08T19:47:07.648Z" }, + { url = "https://files.pythonhosted.org/packages/8b/e8/677a0025e8a2acf07d3418a2e7ba529c9c33caf09d3c1f25513023c1db56/propcache-0.4.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d62cdfcfd89ccb8de04e0eda998535c406bf5e060ffd56be6c586cbcc05b3311", size = 44780, upload-time = "2025-10-08T19:47:08.851Z" }, + { url = "https://files.pythonhosted.org/packages/89/a4/92380f7ca60f99ebae761936bc48a72a639e8a47b29050615eef757cb2a7/propcache-0.4.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cae65ad55793da34db5f54e4029b89d3b9b9490d8abe1b4c7ab5d4b8ec7ebf74", size = 46308, upload-time = "2025-10-08T19:47:09.982Z" }, + { url = "https://files.pythonhosted.org/packages/2d/48/c5ac64dee5262044348d1d78a5f85dd1a57464a60d30daee946699963eb3/propcache-0.4.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:333ddb9031d2704a301ee3e506dc46b1fe5f294ec198ed6435ad5b6a085facfe", size = 208182, upload-time = "2025-10-08T19:47:11.319Z" }, + { url = "https://files.pythonhosted.org/packages/c6/0c/cd762dd011a9287389a6a3eb43aa30207bde253610cca06824aeabfe9653/propcache-0.4.1-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:fd0858c20f078a32cf55f7e81473d96dcf3b93fd2ccdb3d40fdf54b8573df3af", size = 211215, upload-time = "2025-10-08T19:47:13.146Z" }, + { url = "https://files.pythonhosted.org/packages/30/3e/49861e90233ba36890ae0ca4c660e95df565b2cd15d4a68556ab5865974e/propcache-0.4.1-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:678ae89ebc632c5c204c794f8dab2837c5f159aeb59e6ed0539500400577298c", size = 218112, upload-time = "2025-10-08T19:47:14.913Z" }, + { url = "https://files.pythonhosted.org/packages/f1/8b/544bc867e24e1bd48f3118cecd3b05c694e160a168478fa28770f22fd094/propcache-0.4.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d472aeb4fbf9865e0c6d622d7f4d54a4e101a89715d8904282bb5f9a2f476c3f", size = 204442, upload-time = "2025-10-08T19:47:16.277Z" }, + { url = "https://files.pythonhosted.org/packages/50/a6/4282772fd016a76d3e5c0df58380a5ea64900afd836cec2c2f662d1b9bb3/propcache-0.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4d3df5fa7e36b3225954fba85589da77a0fe6a53e3976de39caf04a0db4c36f1", size = 199398, upload-time = "2025-10-08T19:47:17.962Z" }, + { url = "https://files.pythonhosted.org/packages/3e/ec/d8a7cd406ee1ddb705db2139f8a10a8a427100347bd698e7014351c7af09/propcache-0.4.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:ee17f18d2498f2673e432faaa71698032b0127ebf23ae5974eeaf806c279df24", size = 196920, upload-time = "2025-10-08T19:47:19.355Z" }, + { url = "https://files.pythonhosted.org/packages/f6/6c/f38ab64af3764f431e359f8baf9e0a21013e24329e8b85d2da32e8ed07ca/propcache-0.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:580e97762b950f993ae618e167e7be9256b8353c2dcd8b99ec100eb50f5286aa", size = 203748, upload-time = "2025-10-08T19:47:21.338Z" }, + { url = "https://files.pythonhosted.org/packages/d6/e3/fa846bd70f6534d647886621388f0a265254d30e3ce47e5c8e6e27dbf153/propcache-0.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:501d20b891688eb8e7aa903021f0b72d5a55db40ffaab27edefd1027caaafa61", size = 205877, upload-time = "2025-10-08T19:47:23.059Z" }, + { url = "https://files.pythonhosted.org/packages/e2/39/8163fc6f3133fea7b5f2827e8eba2029a0277ab2c5beee6c1db7b10fc23d/propcache-0.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a0bd56e5b100aef69bd8562b74b46254e7c8812918d3baa700c8a8009b0af66", size = 199437, upload-time = "2025-10-08T19:47:24.445Z" }, + { url = "https://files.pythonhosted.org/packages/93/89/caa9089970ca49c7c01662bd0eeedfe85494e863e8043565aeb6472ce8fe/propcache-0.4.1-cp313-cp313-win32.whl", hash = "sha256:bcc9aaa5d80322bc2fb24bb7accb4a30f81e90ab8d6ba187aec0744bc302ad81", size = 37586, upload-time = "2025-10-08T19:47:25.736Z" }, + { url = "https://files.pythonhosted.org/packages/f5/ab/f76ec3c3627c883215b5c8080debb4394ef5a7a29be811f786415fc1e6fd/propcache-0.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:381914df18634f5494334d201e98245c0596067504b9372d8cf93f4bb23e025e", size = 40790, upload-time = "2025-10-08T19:47:26.847Z" }, + { url = "https://files.pythonhosted.org/packages/59/1b/e71ae98235f8e2ba5004d8cb19765a74877abf189bc53fc0c80d799e56c3/propcache-0.4.1-cp313-cp313-win_arm64.whl", hash = "sha256:8873eb4460fd55333ea49b7d189749ecf6e55bf85080f11b1c4530ed3034cba1", size = 37158, upload-time = "2025-10-08T19:47:27.961Z" }, + { url = "https://files.pythonhosted.org/packages/83/ce/a31bbdfc24ee0dcbba458c8175ed26089cf109a55bbe7b7640ed2470cfe9/propcache-0.4.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:92d1935ee1f8d7442da9c0c4fa7ac20d07e94064184811b685f5c4fada64553b", size = 81451, upload-time = "2025-10-08T19:47:29.445Z" }, + { url = "https://files.pythonhosted.org/packages/25/9c/442a45a470a68456e710d96cacd3573ef26a1d0a60067e6a7d5e655621ed/propcache-0.4.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:473c61b39e1460d386479b9b2f337da492042447c9b685f28be4f74d3529e566", size = 46374, upload-time = "2025-10-08T19:47:30.579Z" }, + { url = "https://files.pythonhosted.org/packages/f4/bf/b1d5e21dbc3b2e889ea4327044fb16312a736d97640fb8b6aa3f9c7b3b65/propcache-0.4.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:c0ef0aaafc66fbd87842a3fe3902fd889825646bc21149eafe47be6072725835", size = 48396, upload-time = "2025-10-08T19:47:31.79Z" }, + { url = "https://files.pythonhosted.org/packages/f4/04/5b4c54a103d480e978d3c8a76073502b18db0c4bc17ab91b3cb5092ad949/propcache-0.4.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f95393b4d66bfae908c3ca8d169d5f79cd65636ae15b5e7a4f6e67af675adb0e", size = 275950, upload-time = "2025-10-08T19:47:33.481Z" }, + { url = "https://files.pythonhosted.org/packages/b4/c1/86f846827fb969c4b78b0af79bba1d1ea2156492e1b83dea8b8a6ae27395/propcache-0.4.1-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c07fda85708bc48578467e85099645167a955ba093be0a2dcba962195676e859", size = 273856, upload-time = "2025-10-08T19:47:34.906Z" }, + { url = "https://files.pythonhosted.org/packages/36/1d/fc272a63c8d3bbad6878c336c7a7dea15e8f2d23a544bda43205dfa83ada/propcache-0.4.1-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:af223b406d6d000830c6f65f1e6431783fc3f713ba3e6cc8c024d5ee96170a4b", size = 280420, upload-time = "2025-10-08T19:47:36.338Z" }, + { url = "https://files.pythonhosted.org/packages/07/0c/01f2219d39f7e53d52e5173bcb09c976609ba30209912a0680adfb8c593a/propcache-0.4.1-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a78372c932c90ee474559c5ddfffd718238e8673c340dc21fe45c5b8b54559a0", size = 263254, upload-time = "2025-10-08T19:47:37.692Z" }, + { url = "https://files.pythonhosted.org/packages/2d/18/cd28081658ce597898f0c4d174d4d0f3c5b6d4dc27ffafeef835c95eb359/propcache-0.4.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:564d9f0d4d9509e1a870c920a89b2fec951b44bf5ba7d537a9e7c1ccec2c18af", size = 261205, upload-time = "2025-10-08T19:47:39.659Z" }, + { url = "https://files.pythonhosted.org/packages/7a/71/1f9e22eb8b8316701c2a19fa1f388c8a3185082607da8e406a803c9b954e/propcache-0.4.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:17612831fda0138059cc5546f4d12a2aacfb9e47068c06af35c400ba58ba7393", size = 247873, upload-time = "2025-10-08T19:47:41.084Z" }, + { url = "https://files.pythonhosted.org/packages/4a/65/3d4b61f36af2b4eddba9def857959f1016a51066b4f1ce348e0cf7881f58/propcache-0.4.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:41a89040cb10bd345b3c1a873b2bf36413d48da1def52f268a055f7398514874", size = 262739, upload-time = "2025-10-08T19:47:42.51Z" }, + { url = "https://files.pythonhosted.org/packages/2a/42/26746ab087faa77c1c68079b228810436ccd9a5ce9ac85e2b7307195fd06/propcache-0.4.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e35b88984e7fa64aacecea39236cee32dd9bd8c55f57ba8a75cf2399553f9bd7", size = 263514, upload-time = "2025-10-08T19:47:43.927Z" }, + { url = "https://files.pythonhosted.org/packages/94/13/630690fe201f5502d2403dd3cfd451ed8858fe3c738ee88d095ad2ff407b/propcache-0.4.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6f8b465489f927b0df505cbe26ffbeed4d6d8a2bbc61ce90eb074ff129ef0ab1", size = 257781, upload-time = "2025-10-08T19:47:45.448Z" }, + { url = "https://files.pythonhosted.org/packages/92/f7/1d4ec5841505f423469efbfc381d64b7b467438cd5a4bbcbb063f3b73d27/propcache-0.4.1-cp313-cp313t-win32.whl", hash = "sha256:2ad890caa1d928c7c2965b48f3a3815c853180831d0e5503d35cf00c472f4717", size = 41396, upload-time = "2025-10-08T19:47:47.202Z" }, + { url = "https://files.pythonhosted.org/packages/48/f0/615c30622316496d2cbbc29f5985f7777d3ada70f23370608c1d3e081c1f/propcache-0.4.1-cp313-cp313t-win_amd64.whl", hash = "sha256:f7ee0e597f495cf415bcbd3da3caa3bd7e816b74d0d52b8145954c5e6fd3ff37", size = 44897, upload-time = "2025-10-08T19:47:48.336Z" }, + { url = "https://files.pythonhosted.org/packages/fd/ca/6002e46eccbe0e33dcd4069ef32f7f1c9e243736e07adca37ae8c4830ec3/propcache-0.4.1-cp313-cp313t-win_arm64.whl", hash = "sha256:929d7cbe1f01bb7baffb33dc14eb5691c95831450a26354cd210a8155170c93a", size = 39789, upload-time = "2025-10-08T19:47:49.876Z" }, + { url = "https://files.pythonhosted.org/packages/8e/5c/bca52d654a896f831b8256683457ceddd490ec18d9ec50e97dfd8fc726a8/propcache-0.4.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3f7124c9d820ba5548d431afb4632301acf965db49e666aa21c305cbe8c6de12", size = 78152, upload-time = "2025-10-08T19:47:51.051Z" }, + { url = "https://files.pythonhosted.org/packages/65/9b/03b04e7d82a5f54fb16113d839f5ea1ede58a61e90edf515f6577c66fa8f/propcache-0.4.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:c0d4b719b7da33599dfe3b22d3db1ef789210a0597bc650b7cee9c77c2be8c5c", size = 44869, upload-time = "2025-10-08T19:47:52.594Z" }, + { url = "https://files.pythonhosted.org/packages/b2/fa/89a8ef0468d5833a23fff277b143d0573897cf75bd56670a6d28126c7d68/propcache-0.4.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:9f302f4783709a78240ebc311b793f123328716a60911d667e0c036bc5dcbded", size = 46596, upload-time = "2025-10-08T19:47:54.073Z" }, + { url = "https://files.pythonhosted.org/packages/86/bd/47816020d337f4a746edc42fe8d53669965138f39ee117414c7d7a340cfe/propcache-0.4.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c80ee5802e3fb9ea37938e7eecc307fb984837091d5fd262bb37238b1ae97641", size = 206981, upload-time = "2025-10-08T19:47:55.715Z" }, + { url = "https://files.pythonhosted.org/packages/df/f6/c5fa1357cc9748510ee55f37173eb31bfde6d94e98ccd9e6f033f2fc06e1/propcache-0.4.1-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ed5a841e8bb29a55fb8159ed526b26adc5bdd7e8bd7bf793ce647cb08656cdf4", size = 211490, upload-time = "2025-10-08T19:47:57.499Z" }, + { url = "https://files.pythonhosted.org/packages/80/1e/e5889652a7c4a3846683401a48f0f2e5083ce0ec1a8a5221d8058fbd1adf/propcache-0.4.1-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:55c72fd6ea2da4c318e74ffdf93c4fe4e926051133657459131a95c846d16d44", size = 215371, upload-time = "2025-10-08T19:47:59.317Z" }, + { url = "https://files.pythonhosted.org/packages/b2/f2/889ad4b2408f72fe1a4f6a19491177b30ea7bf1a0fd5f17050ca08cfc882/propcache-0.4.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8326e144341460402713f91df60ade3c999d601e7eb5ff8f6f7862d54de0610d", size = 201424, upload-time = "2025-10-08T19:48:00.67Z" }, + { url = "https://files.pythonhosted.org/packages/27/73/033d63069b57b0812c8bd19f311faebeceb6ba31b8f32b73432d12a0b826/propcache-0.4.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:060b16ae65bc098da7f6d25bf359f1f31f688384858204fe5d652979e0015e5b", size = 197566, upload-time = "2025-10-08T19:48:02.604Z" }, + { url = "https://files.pythonhosted.org/packages/dc/89/ce24f3dc182630b4e07aa6d15f0ff4b14ed4b9955fae95a0b54c58d66c05/propcache-0.4.1-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:89eb3fa9524f7bec9de6e83cf3faed9d79bffa560672c118a96a171a6f55831e", size = 193130, upload-time = "2025-10-08T19:48:04.499Z" }, + { url = "https://files.pythonhosted.org/packages/a9/24/ef0d5fd1a811fb5c609278d0209c9f10c35f20581fcc16f818da959fc5b4/propcache-0.4.1-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:dee69d7015dc235f526fe80a9c90d65eb0039103fe565776250881731f06349f", size = 202625, upload-time = "2025-10-08T19:48:06.213Z" }, + { url = "https://files.pythonhosted.org/packages/f5/02/98ec20ff5546f68d673df2f7a69e8c0d076b5abd05ca882dc7ee3a83653d/propcache-0.4.1-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:5558992a00dfd54ccbc64a32726a3357ec93825a418a401f5cc67df0ac5d9e49", size = 204209, upload-time = "2025-10-08T19:48:08.432Z" }, + { url = "https://files.pythonhosted.org/packages/a0/87/492694f76759b15f0467a2a93ab68d32859672b646aa8a04ce4864e7932d/propcache-0.4.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c9b822a577f560fbd9554812526831712c1436d2c046cedee4c3796d3543b144", size = 197797, upload-time = "2025-10-08T19:48:09.968Z" }, + { url = "https://files.pythonhosted.org/packages/ee/36/66367de3575db1d2d3f3d177432bd14ee577a39d3f5d1b3d5df8afe3b6e2/propcache-0.4.1-cp314-cp314-win32.whl", hash = "sha256:ab4c29b49d560fe48b696cdcb127dd36e0bc2472548f3bf56cc5cb3da2b2984f", size = 38140, upload-time = "2025-10-08T19:48:11.232Z" }, + { url = "https://files.pythonhosted.org/packages/0c/2a/a758b47de253636e1b8aef181c0b4f4f204bf0dd964914fb2af90a95b49b/propcache-0.4.1-cp314-cp314-win_amd64.whl", hash = "sha256:5a103c3eb905fcea0ab98be99c3a9a5ab2de60228aa5aceedc614c0281cf6153", size = 41257, upload-time = "2025-10-08T19:48:12.707Z" }, + { url = "https://files.pythonhosted.org/packages/34/5e/63bd5896c3fec12edcbd6f12508d4890d23c265df28c74b175e1ef9f4f3b/propcache-0.4.1-cp314-cp314-win_arm64.whl", hash = "sha256:74c1fb26515153e482e00177a1ad654721bf9207da8a494a0c05e797ad27b992", size = 38097, upload-time = "2025-10-08T19:48:13.923Z" }, + { url = "https://files.pythonhosted.org/packages/99/85/9ff785d787ccf9bbb3f3106f79884a130951436f58392000231b4c737c80/propcache-0.4.1-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:824e908bce90fb2743bd6b59db36eb4f45cd350a39637c9f73b1c1ea66f5b75f", size = 81455, upload-time = "2025-10-08T19:48:15.16Z" }, + { url = "https://files.pythonhosted.org/packages/90/85/2431c10c8e7ddb1445c1f7c4b54d886e8ad20e3c6307e7218f05922cad67/propcache-0.4.1-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:c2b5e7db5328427c57c8e8831abda175421b709672f6cfc3d630c3b7e2146393", size = 46372, upload-time = "2025-10-08T19:48:16.424Z" }, + { url = "https://files.pythonhosted.org/packages/01/20/b0972d902472da9bcb683fa595099911f4d2e86e5683bcc45de60dd05dc3/propcache-0.4.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6f6ff873ed40292cd4969ef5310179afd5db59fdf055897e282485043fc80ad0", size = 48411, upload-time = "2025-10-08T19:48:17.577Z" }, + { url = "https://files.pythonhosted.org/packages/e2/e3/7dc89f4f21e8f99bad3d5ddb3a3389afcf9da4ac69e3deb2dcdc96e74169/propcache-0.4.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:49a2dc67c154db2c1463013594c458881a069fcf98940e61a0569016a583020a", size = 275712, upload-time = "2025-10-08T19:48:18.901Z" }, + { url = "https://files.pythonhosted.org/packages/20/67/89800c8352489b21a8047c773067644e3897f02ecbbd610f4d46b7f08612/propcache-0.4.1-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:005f08e6a0529984491e37d8dbc3dd86f84bd78a8ceb5fa9a021f4c48d4984be", size = 273557, upload-time = "2025-10-08T19:48:20.762Z" }, + { url = "https://files.pythonhosted.org/packages/e2/a1/b52b055c766a54ce6d9c16d9aca0cad8059acd9637cdf8aa0222f4a026ef/propcache-0.4.1-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5c3310452e0d31390da9035c348633b43d7e7feb2e37be252be6da45abd1abcc", size = 280015, upload-time = "2025-10-08T19:48:22.592Z" }, + { url = "https://files.pythonhosted.org/packages/48/c8/33cee30bd890672c63743049f3c9e4be087e6780906bfc3ec58528be59c1/propcache-0.4.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4c3c70630930447f9ef1caac7728c8ad1c56bc5015338b20fed0d08ea2480b3a", size = 262880, upload-time = "2025-10-08T19:48:23.947Z" }, + { url = "https://files.pythonhosted.org/packages/0c/b1/8f08a143b204b418285c88b83d00edbd61afbc2c6415ffafc8905da7038b/propcache-0.4.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8e57061305815dfc910a3634dcf584f08168a8836e6999983569f51a8544cd89", size = 260938, upload-time = "2025-10-08T19:48:25.656Z" }, + { url = "https://files.pythonhosted.org/packages/cf/12/96e4664c82ca2f31e1c8dff86afb867348979eb78d3cb8546a680287a1e9/propcache-0.4.1-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:521a463429ef54143092c11a77e04056dd00636f72e8c45b70aaa3140d639726", size = 247641, upload-time = "2025-10-08T19:48:27.207Z" }, + { url = "https://files.pythonhosted.org/packages/18/ed/e7a9cfca28133386ba52278136d42209d3125db08d0a6395f0cba0c0285c/propcache-0.4.1-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:120c964da3fdc75e3731aa392527136d4ad35868cc556fd09bb6d09172d9a367", size = 262510, upload-time = "2025-10-08T19:48:28.65Z" }, + { url = "https://files.pythonhosted.org/packages/f5/76/16d8bf65e8845dd62b4e2b57444ab81f07f40caa5652b8969b87ddcf2ef6/propcache-0.4.1-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:d8f353eb14ee3441ee844ade4277d560cdd68288838673273b978e3d6d2c8f36", size = 263161, upload-time = "2025-10-08T19:48:30.133Z" }, + { url = "https://files.pythonhosted.org/packages/e7/70/c99e9edb5d91d5ad8a49fa3c1e8285ba64f1476782fed10ab251ff413ba1/propcache-0.4.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ab2943be7c652f09638800905ee1bab2c544e537edb57d527997a24c13dc1455", size = 257393, upload-time = "2025-10-08T19:48:31.567Z" }, + { url = "https://files.pythonhosted.org/packages/08/02/87b25304249a35c0915d236575bc3574a323f60b47939a2262b77632a3ee/propcache-0.4.1-cp314-cp314t-win32.whl", hash = "sha256:05674a162469f31358c30bcaa8883cb7829fa3110bf9c0991fe27d7896c42d85", size = 42546, upload-time = "2025-10-08T19:48:32.872Z" }, + { url = "https://files.pythonhosted.org/packages/cb/ef/3c6ecf8b317aa982f309835e8f96987466123c6e596646d4e6a1dfcd080f/propcache-0.4.1-cp314-cp314t-win_amd64.whl", hash = "sha256:990f6b3e2a27d683cb7602ed6c86f15ee6b43b1194736f9baaeb93d0016633b1", size = 46259, upload-time = "2025-10-08T19:48:34.226Z" }, + { url = "https://files.pythonhosted.org/packages/c4/2d/346e946d4951f37eca1e4f55be0f0174c52cd70720f84029b02f296f4a38/propcache-0.4.1-cp314-cp314t-win_arm64.whl", hash = "sha256:ecef2343af4cc68e05131e45024ba34f6095821988a9d0a02aa7c73fcc448aa9", size = 40428, upload-time = "2025-10-08T19:48:35.441Z" }, + { url = "https://files.pythonhosted.org/packages/5b/5a/bc7b4a4ef808fa59a816c17b20c4bef6884daebbdf627ff2a161da67da19/propcache-0.4.1-py3-none-any.whl", hash = "sha256:af2a6052aeb6cf17d3e46ee169099044fd8224cbaf75c76a2ef596e8163e2237", size = 13305, upload-time = "2025-10-08T19:49:00.792Z" }, +] + +[[package]] +name = "psutil" +version = "7.2.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/aa/c6/d1ddf4abb55e93cebc4f2ed8b5d6dbad109ecb8d63748dd2b20ab5e57ebe/psutil-7.2.2.tar.gz", hash = "sha256:0746f5f8d406af344fd547f1c8daa5f5c33dbc293bb8d6a16d80b4bb88f59372", size = 493740, upload-time = "2026-01-28T18:14:54.428Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/51/08/510cbdb69c25a96f4ae523f733cdc963ae654904e8db864c07585ef99875/psutil-7.2.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:2edccc433cbfa046b980b0df0171cd25bcaeb3a68fe9022db0979e7aa74a826b", size = 130595, upload-time = "2026-01-28T18:14:57.293Z" }, + { url = "https://files.pythonhosted.org/packages/d6/f5/97baea3fe7a5a9af7436301f85490905379b1c6f2dd51fe3ecf24b4c5fbf/psutil-7.2.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e78c8603dcd9a04c7364f1a3e670cea95d51ee865e4efb3556a3a63adef958ea", size = 131082, upload-time = "2026-01-28T18:14:59.732Z" }, + { url = "https://files.pythonhosted.org/packages/37/d6/246513fbf9fa174af531f28412297dd05241d97a75911ac8febefa1a53c6/psutil-7.2.2-cp313-cp313t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1a571f2330c966c62aeda00dd24620425d4b0cc86881c89861fbc04549e5dc63", size = 181476, upload-time = "2026-01-28T18:15:01.884Z" }, + { url = "https://files.pythonhosted.org/packages/b8/b5/9182c9af3836cca61696dabe4fd1304e17bc56cb62f17439e1154f225dd3/psutil-7.2.2-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:917e891983ca3c1887b4ef36447b1e0873e70c933afc831c6b6da078ba474312", size = 184062, upload-time = "2026-01-28T18:15:04.436Z" }, + { url = "https://files.pythonhosted.org/packages/16/ba/0756dca669f5a9300d0cbcbfae9a4c30e446dfc7440ffe43ded5724bfd93/psutil-7.2.2-cp313-cp313t-win_amd64.whl", hash = "sha256:ab486563df44c17f5173621c7b198955bd6b613fb87c71c161f827d3fb149a9b", size = 139893, upload-time = "2026-01-28T18:15:06.378Z" }, + { url = "https://files.pythonhosted.org/packages/1c/61/8fa0e26f33623b49949346de05ec1ddaad02ed8ba64af45f40a147dbfa97/psutil-7.2.2-cp313-cp313t-win_arm64.whl", hash = "sha256:ae0aefdd8796a7737eccea863f80f81e468a1e4cf14d926bd9b6f5f2d5f90ca9", size = 135589, upload-time = "2026-01-28T18:15:08.03Z" }, + { url = "https://files.pythonhosted.org/packages/81/69/ef179ab5ca24f32acc1dac0c247fd6a13b501fd5534dbae0e05a1c48b66d/psutil-7.2.2-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:eed63d3b4d62449571547b60578c5b2c4bcccc5387148db46e0c2313dad0ee00", size = 130664, upload-time = "2026-01-28T18:15:09.469Z" }, + { url = "https://files.pythonhosted.org/packages/7b/64/665248b557a236d3fa9efc378d60d95ef56dd0a490c2cd37dafc7660d4a9/psutil-7.2.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7b6d09433a10592ce39b13d7be5a54fbac1d1228ed29abc880fb23df7cb694c9", size = 131087, upload-time = "2026-01-28T18:15:11.724Z" }, + { url = "https://files.pythonhosted.org/packages/d5/2e/e6782744700d6759ebce3043dcfa661fb61e2fb752b91cdeae9af12c2178/psutil-7.2.2-cp314-cp314t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1fa4ecf83bcdf6e6c8f4449aff98eefb5d0604bf88cb883d7da3d8d2d909546a", size = 182383, upload-time = "2026-01-28T18:15:13.445Z" }, + { url = "https://files.pythonhosted.org/packages/57/49/0a41cefd10cb7505cdc04dab3eacf24c0c2cb158a998b8c7b1d27ee2c1f5/psutil-7.2.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e452c464a02e7dc7822a05d25db4cde564444a67e58539a00f929c51eddda0cf", size = 185210, upload-time = "2026-01-28T18:15:16.002Z" }, + { url = "https://files.pythonhosted.org/packages/dd/2c/ff9bfb544f283ba5f83ba725a3c5fec6d6b10b8f27ac1dc641c473dc390d/psutil-7.2.2-cp314-cp314t-win_amd64.whl", hash = "sha256:c7663d4e37f13e884d13994247449e9f8f574bc4655d509c3b95e9ec9e2b9dc1", size = 141228, upload-time = "2026-01-28T18:15:18.385Z" }, + { url = "https://files.pythonhosted.org/packages/f2/fc/f8d9c31db14fcec13748d373e668bc3bed94d9077dbc17fb0eebc073233c/psutil-7.2.2-cp314-cp314t-win_arm64.whl", hash = "sha256:11fe5a4f613759764e79c65cf11ebdf26e33d6dd34336f8a337aa2996d71c841", size = 136284, upload-time = "2026-01-28T18:15:19.912Z" }, + { url = "https://files.pythonhosted.org/packages/e7/36/5ee6e05c9bd427237b11b3937ad82bb8ad2752d72c6969314590dd0c2f6e/psutil-7.2.2-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ed0cace939114f62738d808fdcecd4c869222507e266e574799e9c0faa17d486", size = 129090, upload-time = "2026-01-28T18:15:22.168Z" }, + { url = "https://files.pythonhosted.org/packages/80/c4/f5af4c1ca8c1eeb2e92ccca14ce8effdeec651d5ab6053c589b074eda6e1/psutil-7.2.2-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:1a7b04c10f32cc88ab39cbf606e117fd74721c831c98a27dc04578deb0c16979", size = 129859, upload-time = "2026-01-28T18:15:23.795Z" }, + { url = "https://files.pythonhosted.org/packages/b5/70/5d8df3b09e25bce090399cf48e452d25c935ab72dad19406c77f4e828045/psutil-7.2.2-cp36-abi3-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:076a2d2f923fd4821644f5ba89f059523da90dc9014e85f8e45a5774ca5bc6f9", size = 155560, upload-time = "2026-01-28T18:15:25.976Z" }, + { url = "https://files.pythonhosted.org/packages/63/65/37648c0c158dc222aba51c089eb3bdfa238e621674dc42d48706e639204f/psutil-7.2.2-cp36-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b0726cecd84f9474419d67252add4ac0cd9811b04d61123054b9fb6f57df6e9e", size = 156997, upload-time = "2026-01-28T18:15:27.794Z" }, + { url = "https://files.pythonhosted.org/packages/8e/13/125093eadae863ce03c6ffdbae9929430d116a246ef69866dad94da3bfbc/psutil-7.2.2-cp36-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:fd04ef36b4a6d599bbdb225dd1d3f51e00105f6d48a28f006da7f9822f2606d8", size = 148972, upload-time = "2026-01-28T18:15:29.342Z" }, + { url = "https://files.pythonhosted.org/packages/04/78/0acd37ca84ce3ddffaa92ef0f571e073faa6d8ff1f0559ab1272188ea2be/psutil-7.2.2-cp36-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b58fabe35e80b264a4e3bb23e6b96f9e45a3df7fb7eed419ac0e5947c61e47cc", size = 148266, upload-time = "2026-01-28T18:15:31.597Z" }, + { url = "https://files.pythonhosted.org/packages/b4/90/e2159492b5426be0c1fef7acba807a03511f97c5f86b3caeda6ad92351a7/psutil-7.2.2-cp37-abi3-win_amd64.whl", hash = "sha256:eb7e81434c8d223ec4a219b5fc1c47d0417b12be7ea866e24fb5ad6e84b3d988", size = 137737, upload-time = "2026-01-28T18:15:33.849Z" }, + { url = "https://files.pythonhosted.org/packages/8c/c7/7bb2e321574b10df20cbde462a94e2b71d05f9bbda251ef27d104668306a/psutil-7.2.2-cp37-abi3-win_arm64.whl", hash = "sha256:8c233660f575a5a89e6d4cb65d9f938126312bca76d8fe087b947b3a1aaac9ee", size = 134617, upload-time = "2026-01-28T18:15:36.514Z" }, +] + +[[package]] +name = "pycryptodome" +version = "3.23.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8e/a6/8452177684d5e906854776276ddd34eca30d1b1e15aa1ee9cefc289a33f5/pycryptodome-3.23.0.tar.gz", hash = "sha256:447700a657182d60338bab09fdb27518f8856aecd80ae4c6bdddb67ff5da44ef", size = 4921276, upload-time = "2025-05-17T17:21:45.242Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/5d/bdb09489b63cd34a976cc9e2a8d938114f7a53a74d3dd4f125ffa49dce82/pycryptodome-3.23.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:0011f7f00cdb74879142011f95133274741778abba114ceca229adbf8e62c3e4", size = 2495152, upload-time = "2025-05-17T17:20:20.833Z" }, + { url = "https://files.pythonhosted.org/packages/a7/ce/7840250ed4cc0039c433cd41715536f926d6e86ce84e904068eb3244b6a6/pycryptodome-3.23.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:90460fc9e088ce095f9ee8356722d4f10f86e5be06e2354230a9880b9c549aae", size = 1639348, upload-time = "2025-05-17T17:20:23.171Z" }, + { url = "https://files.pythonhosted.org/packages/ee/f0/991da24c55c1f688d6a3b5a11940567353f74590734ee4a64294834ae472/pycryptodome-3.23.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4764e64b269fc83b00f682c47443c2e6e85b18273712b98aa43bcb77f8570477", size = 2184033, upload-time = "2025-05-17T17:20:25.424Z" }, + { url = "https://files.pythonhosted.org/packages/54/16/0e11882deddf00f68b68dd4e8e442ddc30641f31afeb2bc25588124ac8de/pycryptodome-3.23.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb8f24adb74984aa0e5d07a2368ad95276cf38051fe2dc6605cbcf482e04f2a7", size = 2270142, upload-time = "2025-05-17T17:20:27.808Z" }, + { url = "https://files.pythonhosted.org/packages/d5/fc/4347fea23a3f95ffb931f383ff28b3f7b1fe868739182cb76718c0da86a1/pycryptodome-3.23.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d97618c9c6684a97ef7637ba43bdf6663a2e2e77efe0f863cce97a76af396446", size = 2309384, upload-time = "2025-05-17T17:20:30.765Z" }, + { url = "https://files.pythonhosted.org/packages/6e/d9/c5261780b69ce66d8cfab25d2797bd6e82ba0241804694cd48be41add5eb/pycryptodome-3.23.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9a53a4fe5cb075075d515797d6ce2f56772ea7e6a1e5e4b96cf78a14bac3d265", size = 2183237, upload-time = "2025-05-17T17:20:33.736Z" }, + { url = "https://files.pythonhosted.org/packages/5a/6f/3af2ffedd5cfa08c631f89452c6648c4d779e7772dfc388c77c920ca6bbf/pycryptodome-3.23.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:763d1d74f56f031788e5d307029caef067febf890cd1f8bf61183ae142f1a77b", size = 2343898, upload-time = "2025-05-17T17:20:36.086Z" }, + { url = "https://files.pythonhosted.org/packages/9a/dc/9060d807039ee5de6e2f260f72f3d70ac213993a804f5e67e0a73a56dd2f/pycryptodome-3.23.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:954af0e2bd7cea83ce72243b14e4fb518b18f0c1649b576d114973e2073b273d", size = 2269197, upload-time = "2025-05-17T17:20:38.414Z" }, + { url = "https://files.pythonhosted.org/packages/f9/34/e6c8ca177cb29dcc4967fef73f5de445912f93bd0343c9c33c8e5bf8cde8/pycryptodome-3.23.0-cp313-cp313t-win32.whl", hash = "sha256:257bb3572c63ad8ba40b89f6fc9d63a2a628e9f9708d31ee26560925ebe0210a", size = 1768600, upload-time = "2025-05-17T17:20:40.688Z" }, + { url = "https://files.pythonhosted.org/packages/e4/1d/89756b8d7ff623ad0160f4539da571d1f594d21ee6d68be130a6eccb39a4/pycryptodome-3.23.0-cp313-cp313t-win_amd64.whl", hash = "sha256:6501790c5b62a29fcb227bd6b62012181d886a767ce9ed03b303d1f22eb5c625", size = 1799740, upload-time = "2025-05-17T17:20:42.413Z" }, + { url = "https://files.pythonhosted.org/packages/5d/61/35a64f0feaea9fd07f0d91209e7be91726eb48c0f1bfc6720647194071e4/pycryptodome-3.23.0-cp313-cp313t-win_arm64.whl", hash = "sha256:9a77627a330ab23ca43b48b130e202582e91cc69619947840ea4d2d1be21eb39", size = 1703685, upload-time = "2025-05-17T17:20:44.388Z" }, + { url = "https://files.pythonhosted.org/packages/db/6c/a1f71542c969912bb0e106f64f60a56cc1f0fabecf9396f45accbe63fa68/pycryptodome-3.23.0-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:187058ab80b3281b1de11c2e6842a357a1f71b42cb1e15bce373f3d238135c27", size = 2495627, upload-time = "2025-05-17T17:20:47.139Z" }, + { url = "https://files.pythonhosted.org/packages/6e/4e/a066527e079fc5002390c8acdd3aca431e6ea0a50ffd7201551175b47323/pycryptodome-3.23.0-cp37-abi3-macosx_10_9_x86_64.whl", hash = "sha256:cfb5cd445280c5b0a4e6187a7ce8de5a07b5f3f897f235caa11f1f435f182843", size = 1640362, upload-time = "2025-05-17T17:20:50.392Z" }, + { url = "https://files.pythonhosted.org/packages/50/52/adaf4c8c100a8c49d2bd058e5b551f73dfd8cb89eb4911e25a0c469b6b4e/pycryptodome-3.23.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67bd81fcbe34f43ad9422ee8fd4843c8e7198dd88dd3d40e6de42ee65fbe1490", size = 2182625, upload-time = "2025-05-17T17:20:52.866Z" }, + { url = "https://files.pythonhosted.org/packages/5f/e9/a09476d436d0ff1402ac3867d933c61805ec2326c6ea557aeeac3825604e/pycryptodome-3.23.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c8987bd3307a39bc03df5c8e0e3d8be0c4c3518b7f044b0f4c15d1aa78f52575", size = 2268954, upload-time = "2025-05-17T17:20:55.027Z" }, + { url = "https://files.pythonhosted.org/packages/f9/c5/ffe6474e0c551d54cab931918127c46d70cab8f114e0c2b5a3c071c2f484/pycryptodome-3.23.0-cp37-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa0698f65e5b570426fc31b8162ed4603b0c2841cbb9088e2b01641e3065915b", size = 2308534, upload-time = "2025-05-17T17:20:57.279Z" }, + { url = "https://files.pythonhosted.org/packages/18/28/e199677fc15ecf43010f2463fde4c1a53015d1fe95fb03bca2890836603a/pycryptodome-3.23.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:53ecbafc2b55353edcebd64bf5da94a2a2cdf5090a6915bcca6eca6cc452585a", size = 2181853, upload-time = "2025-05-17T17:20:59.322Z" }, + { url = "https://files.pythonhosted.org/packages/ce/ea/4fdb09f2165ce1365c9eaefef36625583371ee514db58dc9b65d3a255c4c/pycryptodome-3.23.0-cp37-abi3-musllinux_1_2_i686.whl", hash = "sha256:156df9667ad9f2ad26255926524e1c136d6664b741547deb0a86a9acf5ea631f", size = 2342465, upload-time = "2025-05-17T17:21:03.83Z" }, + { url = "https://files.pythonhosted.org/packages/22/82/6edc3fc42fe9284aead511394bac167693fb2b0e0395b28b8bedaa07ef04/pycryptodome-3.23.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:dea827b4d55ee390dc89b2afe5927d4308a8b538ae91d9c6f7a5090f397af1aa", size = 2267414, upload-time = "2025-05-17T17:21:06.72Z" }, + { url = "https://files.pythonhosted.org/packages/59/fe/aae679b64363eb78326c7fdc9d06ec3de18bac68be4b612fc1fe8902693c/pycryptodome-3.23.0-cp37-abi3-win32.whl", hash = "sha256:507dbead45474b62b2bbe318eb1c4c8ee641077532067fec9c1aa82c31f84886", size = 1768484, upload-time = "2025-05-17T17:21:08.535Z" }, + { url = "https://files.pythonhosted.org/packages/54/2f/e97a1b8294db0daaa87012c24a7bb714147c7ade7656973fd6c736b484ff/pycryptodome-3.23.0-cp37-abi3-win_amd64.whl", hash = "sha256:c75b52aacc6c0c260f204cbdd834f76edc9fb0d8e0da9fbf8352ef58202564e2", size = 1799636, upload-time = "2025-05-17T17:21:10.393Z" }, + { url = "https://files.pythonhosted.org/packages/18/3d/f9441a0d798bf2b1e645adc3265e55706aead1255ccdad3856dbdcffec14/pycryptodome-3.23.0-cp37-abi3-win_arm64.whl", hash = "sha256:11eeeb6917903876f134b56ba11abe95c0b0fd5e3330def218083c7d98bbcb3c", size = 1703675, upload-time = "2025-05-17T17:21:13.146Z" }, +] + +[[package]] +name = "pydantic" +version = "2.12.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-types" }, + { name = "pydantic-core" }, + { name = "typing-extensions" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/69/44/36f1a6e523abc58ae5f928898e4aca2e0ea509b5aa6f6f392a5d882be928/pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49", size = 821591, upload-time = "2025-11-26T15:11:46.471Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d", size = 463580, upload-time = "2025-11-26T15:11:44.605Z" }, +] + +[[package]] +name = "pydantic-core" +version = "2.41.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/71/70/23b021c950c2addd24ec408e9ab05d59b035b39d97cdc1130e1bce647bb6/pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e", size = 460952, upload-time = "2025-11-04T13:43:49.098Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5f/5d/5f6c63eebb5afee93bcaae4ce9a898f3373ca23df3ccaef086d0233a35a7/pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7", size = 2110990, upload-time = "2025-11-04T13:39:58.079Z" }, + { url = "https://files.pythonhosted.org/packages/aa/32/9c2e8ccb57c01111e0fd091f236c7b371c1bccea0fa85247ac55b1e2b6b6/pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0", size = 1896003, upload-time = "2025-11-04T13:39:59.956Z" }, + { url = "https://files.pythonhosted.org/packages/68/b8/a01b53cb0e59139fbc9e4fda3e9724ede8de279097179be4ff31f1abb65a/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69", size = 1919200, upload-time = "2025-11-04T13:40:02.241Z" }, + { url = "https://files.pythonhosted.org/packages/38/de/8c36b5198a29bdaade07b5985e80a233a5ac27137846f3bc2d3b40a47360/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75", size = 2052578, upload-time = "2025-11-04T13:40:04.401Z" }, + { url = "https://files.pythonhosted.org/packages/00/b5/0e8e4b5b081eac6cb3dbb7e60a65907549a1ce035a724368c330112adfdd/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05", size = 2208504, upload-time = "2025-11-04T13:40:06.072Z" }, + { url = "https://files.pythonhosted.org/packages/77/56/87a61aad59c7c5b9dc8caad5a41a5545cba3810c3e828708b3d7404f6cef/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc", size = 2335816, upload-time = "2025-11-04T13:40:07.835Z" }, + { url = "https://files.pythonhosted.org/packages/0d/76/941cc9f73529988688a665a5c0ecff1112b3d95ab48f81db5f7606f522d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c", size = 2075366, upload-time = "2025-11-04T13:40:09.804Z" }, + { url = "https://files.pythonhosted.org/packages/d3/43/ebef01f69baa07a482844faaa0a591bad1ef129253ffd0cdaa9d8a7f72d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5", size = 2171698, upload-time = "2025-11-04T13:40:12.004Z" }, + { url = "https://files.pythonhosted.org/packages/b1/87/41f3202e4193e3bacfc2c065fab7706ebe81af46a83d3e27605029c1f5a6/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c", size = 2132603, upload-time = "2025-11-04T13:40:13.868Z" }, + { url = "https://files.pythonhosted.org/packages/49/7d/4c00df99cb12070b6bccdef4a195255e6020a550d572768d92cc54dba91a/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294", size = 2329591, upload-time = "2025-11-04T13:40:15.672Z" }, + { url = "https://files.pythonhosted.org/packages/cc/6a/ebf4b1d65d458f3cda6a7335d141305dfa19bdc61140a884d165a8a1bbc7/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1", size = 2319068, upload-time = "2025-11-04T13:40:17.532Z" }, + { url = "https://files.pythonhosted.org/packages/49/3b/774f2b5cd4192d5ab75870ce4381fd89cf218af999515baf07e7206753f0/pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d", size = 1985908, upload-time = "2025-11-04T13:40:19.309Z" }, + { url = "https://files.pythonhosted.org/packages/86/45/00173a033c801cacf67c190fef088789394feaf88a98a7035b0e40d53dc9/pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815", size = 2020145, upload-time = "2025-11-04T13:40:21.548Z" }, + { url = "https://files.pythonhosted.org/packages/f9/22/91fbc821fa6d261b376a3f73809f907cec5ca6025642c463d3488aad22fb/pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3", size = 1976179, upload-time = "2025-11-04T13:40:23.393Z" }, + { url = "https://files.pythonhosted.org/packages/87/06/8806241ff1f70d9939f9af039c6c35f2360cf16e93c2ca76f184e76b1564/pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9", size = 2120403, upload-time = "2025-11-04T13:40:25.248Z" }, + { url = "https://files.pythonhosted.org/packages/94/02/abfa0e0bda67faa65fef1c84971c7e45928e108fe24333c81f3bfe35d5f5/pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34", size = 1896206, upload-time = "2025-11-04T13:40:27.099Z" }, + { url = "https://files.pythonhosted.org/packages/15/df/a4c740c0943e93e6500f9eb23f4ca7ec9bf71b19e608ae5b579678c8d02f/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0", size = 1919307, upload-time = "2025-11-04T13:40:29.806Z" }, + { url = "https://files.pythonhosted.org/packages/9a/e3/6324802931ae1d123528988e0e86587c2072ac2e5394b4bc2bc34b61ff6e/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33", size = 2063258, upload-time = "2025-11-04T13:40:33.544Z" }, + { url = "https://files.pythonhosted.org/packages/c9/d4/2230d7151d4957dd79c3044ea26346c148c98fbf0ee6ebd41056f2d62ab5/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e", size = 2214917, upload-time = "2025-11-04T13:40:35.479Z" }, + { url = "https://files.pythonhosted.org/packages/e6/9f/eaac5df17a3672fef0081b6c1bb0b82b33ee89aa5cec0d7b05f52fd4a1fa/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2", size = 2332186, upload-time = "2025-11-04T13:40:37.436Z" }, + { url = "https://files.pythonhosted.org/packages/cf/4e/35a80cae583a37cf15604b44240e45c05e04e86f9cfd766623149297e971/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586", size = 2073164, upload-time = "2025-11-04T13:40:40.289Z" }, + { url = "https://files.pythonhosted.org/packages/bf/e3/f6e262673c6140dd3305d144d032f7bd5f7497d3871c1428521f19f9efa2/pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d", size = 2179146, upload-time = "2025-11-04T13:40:42.809Z" }, + { url = "https://files.pythonhosted.org/packages/75/c7/20bd7fc05f0c6ea2056a4565c6f36f8968c0924f19b7d97bbfea55780e73/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740", size = 2137788, upload-time = "2025-11-04T13:40:44.752Z" }, + { url = "https://files.pythonhosted.org/packages/3a/8d/34318ef985c45196e004bc46c6eab2eda437e744c124ef0dbe1ff2c9d06b/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e", size = 2340133, upload-time = "2025-11-04T13:40:46.66Z" }, + { url = "https://files.pythonhosted.org/packages/9c/59/013626bf8c78a5a5d9350d12e7697d3d4de951a75565496abd40ccd46bee/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858", size = 2324852, upload-time = "2025-11-04T13:40:48.575Z" }, + { url = "https://files.pythonhosted.org/packages/1a/d9/c248c103856f807ef70c18a4f986693a46a8ffe1602e5d361485da502d20/pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36", size = 1994679, upload-time = "2025-11-04T13:40:50.619Z" }, + { url = "https://files.pythonhosted.org/packages/9e/8b/341991b158ddab181cff136acd2552c9f35bd30380422a639c0671e99a91/pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11", size = 2019766, upload-time = "2025-11-04T13:40:52.631Z" }, + { url = "https://files.pythonhosted.org/packages/73/7d/f2f9db34af103bea3e09735bb40b021788a5e834c81eedb541991badf8f5/pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd", size = 1981005, upload-time = "2025-11-04T13:40:54.734Z" }, + { url = "https://files.pythonhosted.org/packages/ea/28/46b7c5c9635ae96ea0fbb779e271a38129df2550f763937659ee6c5dbc65/pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a", size = 2119622, upload-time = "2025-11-04T13:40:56.68Z" }, + { url = "https://files.pythonhosted.org/packages/74/1a/145646e5687e8d9a1e8d09acb278c8535ebe9e972e1f162ed338a622f193/pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14", size = 1891725, upload-time = "2025-11-04T13:40:58.807Z" }, + { url = "https://files.pythonhosted.org/packages/23/04/e89c29e267b8060b40dca97bfc64a19b2a3cf99018167ea1677d96368273/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1", size = 1915040, upload-time = "2025-11-04T13:41:00.853Z" }, + { url = "https://files.pythonhosted.org/packages/84/a3/15a82ac7bd97992a82257f777b3583d3e84bdb06ba6858f745daa2ec8a85/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66", size = 2063691, upload-time = "2025-11-04T13:41:03.504Z" }, + { url = "https://files.pythonhosted.org/packages/74/9b/0046701313c6ef08c0c1cf0e028c67c770a4e1275ca73131563c5f2a310a/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869", size = 2213897, upload-time = "2025-11-04T13:41:05.804Z" }, + { url = "https://files.pythonhosted.org/packages/8a/cd/6bac76ecd1b27e75a95ca3a9a559c643b3afcd2dd62086d4b7a32a18b169/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2", size = 2333302, upload-time = "2025-11-04T13:41:07.809Z" }, + { url = "https://files.pythonhosted.org/packages/4c/d2/ef2074dc020dd6e109611a8be4449b98cd25e1b9b8a303c2f0fca2f2bcf7/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375", size = 2064877, upload-time = "2025-11-04T13:41:09.827Z" }, + { url = "https://files.pythonhosted.org/packages/18/66/e9db17a9a763d72f03de903883c057b2592c09509ccfe468187f2a2eef29/pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553", size = 2180680, upload-time = "2025-11-04T13:41:12.379Z" }, + { url = "https://files.pythonhosted.org/packages/d3/9e/3ce66cebb929f3ced22be85d4c2399b8e85b622db77dad36b73c5387f8f8/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90", size = 2138960, upload-time = "2025-11-04T13:41:14.627Z" }, + { url = "https://files.pythonhosted.org/packages/a6/62/205a998f4327d2079326b01abee48e502ea739d174f0a89295c481a2272e/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07", size = 2339102, upload-time = "2025-11-04T13:41:16.868Z" }, + { url = "https://files.pythonhosted.org/packages/3c/0d/f05e79471e889d74d3d88f5bd20d0ed189ad94c2423d81ff8d0000aab4ff/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb", size = 2326039, upload-time = "2025-11-04T13:41:18.934Z" }, + { url = "https://files.pythonhosted.org/packages/ec/e1/e08a6208bb100da7e0c4b288eed624a703f4d129bde2da475721a80cab32/pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23", size = 1995126, upload-time = "2025-11-04T13:41:21.418Z" }, + { url = "https://files.pythonhosted.org/packages/48/5d/56ba7b24e9557f99c9237e29f5c09913c81eeb2f3217e40e922353668092/pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf", size = 2015489, upload-time = "2025-11-04T13:41:24.076Z" }, + { url = "https://files.pythonhosted.org/packages/4e/bb/f7a190991ec9e3e0ba22e4993d8755bbc4a32925c0b5b42775c03e8148f9/pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0", size = 1977288, upload-time = "2025-11-04T13:41:26.33Z" }, + { url = "https://files.pythonhosted.org/packages/92/ed/77542d0c51538e32e15afe7899d79efce4b81eee631d99850edc2f5e9349/pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a", size = 2120255, upload-time = "2025-11-04T13:41:28.569Z" }, + { url = "https://files.pythonhosted.org/packages/bb/3d/6913dde84d5be21e284439676168b28d8bbba5600d838b9dca99de0fad71/pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3", size = 1863760, upload-time = "2025-11-04T13:41:31.055Z" }, + { url = "https://files.pythonhosted.org/packages/5a/f0/e5e6b99d4191da102f2b0eb9687aaa7f5bea5d9964071a84effc3e40f997/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c", size = 1878092, upload-time = "2025-11-04T13:41:33.21Z" }, + { url = "https://files.pythonhosted.org/packages/71/48/36fb760642d568925953bcc8116455513d6e34c4beaa37544118c36aba6d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612", size = 2053385, upload-time = "2025-11-04T13:41:35.508Z" }, + { url = "https://files.pythonhosted.org/packages/20/25/92dc684dd8eb75a234bc1c764b4210cf2646479d54b47bf46061657292a8/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d", size = 2218832, upload-time = "2025-11-04T13:41:37.732Z" }, + { url = "https://files.pythonhosted.org/packages/e2/09/f53e0b05023d3e30357d82eb35835d0f6340ca344720a4599cd663dca599/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9", size = 2327585, upload-time = "2025-11-04T13:41:40Z" }, + { url = "https://files.pythonhosted.org/packages/aa/4e/2ae1aa85d6af35a39b236b1b1641de73f5a6ac4d5a7509f77b814885760c/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660", size = 2041078, upload-time = "2025-11-04T13:41:42.323Z" }, + { url = "https://files.pythonhosted.org/packages/cd/13/2e215f17f0ef326fc72afe94776edb77525142c693767fc347ed6288728d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9", size = 2173914, upload-time = "2025-11-04T13:41:45.221Z" }, + { url = "https://files.pythonhosted.org/packages/02/7a/f999a6dcbcd0e5660bc348a3991c8915ce6599f4f2c6ac22f01d7a10816c/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3", size = 2129560, upload-time = "2025-11-04T13:41:47.474Z" }, + { url = "https://files.pythonhosted.org/packages/3a/b1/6c990ac65e3b4c079a4fb9f5b05f5b013afa0f4ed6780a3dd236d2cbdc64/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf", size = 2329244, upload-time = "2025-11-04T13:41:49.992Z" }, + { url = "https://files.pythonhosted.org/packages/d9/02/3c562f3a51afd4d88fff8dffb1771b30cfdfd79befd9883ee094f5b6c0d8/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470", size = 2331955, upload-time = "2025-11-04T13:41:54.079Z" }, + { url = "https://files.pythonhosted.org/packages/5c/96/5fb7d8c3c17bc8c62fdb031c47d77a1af698f1d7a406b0f79aaa1338f9ad/pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa", size = 1988906, upload-time = "2025-11-04T13:41:56.606Z" }, + { url = "https://files.pythonhosted.org/packages/22/ed/182129d83032702912c2e2d8bbe33c036f342cc735737064668585dac28f/pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c", size = 1981607, upload-time = "2025-11-04T13:41:58.889Z" }, + { url = "https://files.pythonhosted.org/packages/9f/ed/068e41660b832bb0b1aa5b58011dea2a3fe0ba7861ff38c4d4904c1c1a99/pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008", size = 1974769, upload-time = "2025-11-04T13:42:01.186Z" }, + { url = "https://files.pythonhosted.org/packages/09/32/59b0c7e63e277fa7911c2fc70ccfb45ce4b98991e7ef37110663437005af/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd", size = 2110495, upload-time = "2025-11-04T13:42:49.689Z" }, + { url = "https://files.pythonhosted.org/packages/aa/81/05e400037eaf55ad400bcd318c05bb345b57e708887f07ddb2d20e3f0e98/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc", size = 1915388, upload-time = "2025-11-04T13:42:52.215Z" }, + { url = "https://files.pythonhosted.org/packages/6e/0d/e3549b2399f71d56476b77dbf3cf8937cec5cd70536bdc0e374a421d0599/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56", size = 1942879, upload-time = "2025-11-04T13:42:56.483Z" }, + { url = "https://files.pythonhosted.org/packages/f7/07/34573da085946b6a313d7c42f82f16e8920bfd730665de2d11c0c37a74b5/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b", size = 2139017, upload-time = "2025-11-04T13:42:59.471Z" }, +] + +[[package]] +name = "pygments" +version = "2.19.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, +] + +[[package]] +name = "pymongo" +version = "4.16.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "dnspython" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/65/9c/a4895c4b785fc9865a84a56e14b5bd21ca75aadc3dab79c14187cdca189b/pymongo-4.16.0.tar.gz", hash = "sha256:8ba8405065f6e258a6f872fe62d797a28f383a12178c7153c01ed04e845c600c", size = 2495323, upload-time = "2026-01-07T18:05:48.107Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6a/03/6dd7c53cbde98de469a3e6fb893af896dca644c476beb0f0c6342bcc368b/pymongo-4.16.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:bd4911c40a43a821dfd93038ac824b756b6e703e26e951718522d29f6eb166a8", size = 917619, upload-time = "2026-01-07T18:04:19.173Z" }, + { url = "https://files.pythonhosted.org/packages/73/e1/328915f2734ea1f355dc9b0e98505ff670f5fab8be5e951d6ed70971c6aa/pymongo-4.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:25a6b03a68f9907ea6ec8bc7cf4c58a1b51a18e23394f962a6402f8e46d41211", size = 917364, upload-time = "2026-01-07T18:04:20.861Z" }, + { url = "https://files.pythonhosted.org/packages/41/fe/4769874dd9812a1bc2880a9785e61eba5340da966af888dd430392790ae0/pymongo-4.16.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:91ac0cb0fe2bf17616c2039dac88d7c9a5088f5cb5829b27c9d250e053664d31", size = 1686901, upload-time = "2026-01-07T18:04:22.219Z" }, + { url = "https://files.pythonhosted.org/packages/fa/8d/15707b9669fdc517bbc552ac60da7124dafe7ac1552819b51e97ed4038b4/pymongo-4.16.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cf0ec79e8ca7077f455d14d915d629385153b6a11abc0b93283ed73a8013e376", size = 1723034, upload-time = "2026-01-07T18:04:24.055Z" }, + { url = "https://files.pythonhosted.org/packages/5b/af/3d5d16ff11d447d40c1472da1b366a31c7380d7ea2922a449c7f7f495567/pymongo-4.16.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2d0082631a7510318befc2b4fdab140481eb4b9dd62d9245e042157085da2a70", size = 1797161, upload-time = "2026-01-07T18:04:25.964Z" }, + { url = "https://files.pythonhosted.org/packages/fb/04/725ab8664eeec73ec125b5a873448d80f5d8cf2750aaaf804cbc538a50a5/pymongo-4.16.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:85dc2f3444c346ea019a371e321ac868a4fab513b7a55fe368f0cc78de8177cc", size = 1780938, upload-time = "2026-01-07T18:04:28.745Z" }, + { url = "https://files.pythonhosted.org/packages/22/50/dd7e9095e1ca35f93c3c844c92eb6eb0bc491caeb2c9bff3b32fe3c9b18f/pymongo-4.16.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dabbf3c14de75a20cc3c30bf0c6527157224a93dfb605838eabb1a2ee3be008d", size = 1714342, upload-time = "2026-01-07T18:04:30.331Z" }, + { url = "https://files.pythonhosted.org/packages/03/c9/542776987d5c31ae8e93e92680ea2b6e5a2295f398b25756234cabf38a39/pymongo-4.16.0-cp312-cp312-win32.whl", hash = "sha256:60307bb91e0ab44e560fe3a211087748b2b5f3e31f403baf41f5b7b0a70bd104", size = 887868, upload-time = "2026-01-07T18:04:32.124Z" }, + { url = "https://files.pythonhosted.org/packages/2e/d4/b4045a7ccc5680fb496d01edf749c7a9367cc8762fbdf7516cf807ef679b/pymongo-4.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:f513b2c6c0d5c491f478422f6b5b5c27ac1af06a54c93ef8631806f7231bd92e", size = 907554, upload-time = "2026-01-07T18:04:33.685Z" }, + { url = "https://files.pythonhosted.org/packages/60/4c/33f75713d50d5247f2258405142c0318ff32c6f8976171c4fcae87a9dbdf/pymongo-4.16.0-cp312-cp312-win_arm64.whl", hash = "sha256:dfc320f08ea9a7ec5b2403dc4e8150636f0d6150f4b9792faaae539c88e7db3b", size = 892971, upload-time = "2026-01-07T18:04:35.594Z" }, + { url = "https://files.pythonhosted.org/packages/47/84/148d8b5da8260f4679d6665196ae04ab14ffdf06f5fe670b0ab11942951f/pymongo-4.16.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d15f060bc6d0964a8bb70aba8f0cb6d11ae99715438f640cff11bbcf172eb0e8", size = 972009, upload-time = "2026-01-07T18:04:38.303Z" }, + { url = "https://files.pythonhosted.org/packages/1e/5e/9f3a8daf583d0adaaa033a3e3e58194d2282737dc164014ff33c7a081103/pymongo-4.16.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4a19ea46a0fe71248965305a020bc076a163311aefbaa1d83e47d06fa30ac747", size = 971784, upload-time = "2026-01-07T18:04:39.669Z" }, + { url = "https://files.pythonhosted.org/packages/ad/f2/b6c24361fcde24946198573c0176406bfd5f7b8538335f3d939487055322/pymongo-4.16.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:311d4549d6bf1f8c61d025965aebb5ba29d1481dc6471693ab91610aaffbc0eb", size = 1947174, upload-time = "2026-01-07T18:04:41.368Z" }, + { url = "https://files.pythonhosted.org/packages/47/1a/8634192f98cf740b3d174e1018dd0350018607d5bd8ac35a666dc49c732b/pymongo-4.16.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:46ffb728d92dd5b09fc034ed91acf5595657c7ca17d4cf3751322cd554153c17", size = 1991727, upload-time = "2026-01-07T18:04:42.965Z" }, + { url = "https://files.pythonhosted.org/packages/5a/2f/0c47ac84572b28e23028a23a3798a1f725e1c23b0cf1c1424678d16aff42/pymongo-4.16.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:acda193f440dd88c2023cb00aa8bd7b93a9df59978306d14d87a8b12fe426b05", size = 2082497, upload-time = "2026-01-07T18:04:44.652Z" }, + { url = "https://files.pythonhosted.org/packages/ba/57/9f46ef9c862b2f0cf5ce798f3541c201c574128d31ded407ba4b3918d7b6/pymongo-4.16.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5d9fdb386cf958e6ef6ff537d6149be7edb76c3268cd6833e6c36aa447e4443f", size = 2064947, upload-time = "2026-01-07T18:04:46.228Z" }, + { url = "https://files.pythonhosted.org/packages/b8/56/5421c0998f38e32288100a07f6cb2f5f9f352522157c901910cb2927e211/pymongo-4.16.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:91899dd7fb9a8c50f09c3c1cf0cb73bfbe2737f511f641f19b9650deb61c00ca", size = 1980478, upload-time = "2026-01-07T18:04:48.017Z" }, + { url = "https://files.pythonhosted.org/packages/92/93/bfc448d025e12313a937d6e1e0101b50cc9751636b4b170e600fe3203063/pymongo-4.16.0-cp313-cp313-win32.whl", hash = "sha256:2cd60cd1e05de7f01927f8e25ca26b3ea2c09de8723241e5d3bcfdc70eaff76b", size = 934672, upload-time = "2026-01-07T18:04:49.538Z" }, + { url = "https://files.pythonhosted.org/packages/96/10/12710a5e01218d50c3dd165fd72c5ed2699285f77348a3b1a119a191d826/pymongo-4.16.0-cp313-cp313-win_amd64.whl", hash = "sha256:3ead8a0050c53eaa55935895d6919d393d0328ec24b2b9115bdbe881aa222673", size = 959237, upload-time = "2026-01-07T18:04:51.382Z" }, + { url = "https://files.pythonhosted.org/packages/0c/56/d288bcd1d05bc17ec69df1d0b1d67bc710c7c5dbef86033a5a4d2e2b08e6/pymongo-4.16.0-cp313-cp313-win_arm64.whl", hash = "sha256:dbbc5b254c36c37d10abb50e899bc3939bbb7ab1e7c659614409af99bd3e7675", size = 940909, upload-time = "2026-01-07T18:04:52.904Z" }, + { url = "https://files.pythonhosted.org/packages/30/9e/4d343f8d0512002fce17915a89477b9f916bda1205729e042d8f23acf194/pymongo-4.16.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:8a254d49a9ffe9d7f888e3c677eed3729b14ce85abb08cd74732cead6ccc3c66", size = 1026634, upload-time = "2026-01-07T18:04:54.359Z" }, + { url = "https://files.pythonhosted.org/packages/c3/e3/341f88c5535df40c0450fda915f582757bb7d988cdfc92990a5e27c4c324/pymongo-4.16.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:a1bf44e13cf2d44d2ea2e928a8140d5d667304abe1a61c4d55b4906f389fbe64", size = 1026252, upload-time = "2026-01-07T18:04:56.642Z" }, + { url = "https://files.pythonhosted.org/packages/af/64/9471b22eb98f0a2ca0b8e09393de048502111b2b5b14ab1bd9e39708aab5/pymongo-4.16.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:f1c5f1f818b669875d191323a48912d3fcd2e4906410e8297bb09ac50c4d5ccc", size = 2207399, upload-time = "2026-01-07T18:04:58.255Z" }, + { url = "https://files.pythonhosted.org/packages/87/ac/47c4d50b25a02f21764f140295a2efaa583ee7f17992a5e5fa542b3a690f/pymongo-4.16.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:77cfd37a43a53b02b7bd930457c7994c924ad8bbe8dff91817904bcbf291b371", size = 2260595, upload-time = "2026-01-07T18:04:59.788Z" }, + { url = "https://files.pythonhosted.org/packages/ee/1b/0ce1ce9dd036417646b2fe6f63b58127acff3cf96eeb630c34ec9cd675ff/pymongo-4.16.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:36ef2fee50eee669587d742fb456e349634b4fcf8926208766078b089054b24b", size = 2366958, upload-time = "2026-01-07T18:05:01.942Z" }, + { url = "https://files.pythonhosted.org/packages/3e/3c/a5a17c0d413aa9d6c17bc35c2b472e9e79cda8068ba8e93433b5f43028e9/pymongo-4.16.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:55f8d5a6fe2fa0b823674db2293f92d74cd5f970bc0360f409a1fc21003862d3", size = 2346081, upload-time = "2026-01-07T18:05:03.576Z" }, + { url = "https://files.pythonhosted.org/packages/65/19/f815533d1a88fb8a3b6c6e895bb085ffdae68ccb1e6ed7102202a307f8e2/pymongo-4.16.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9caacac0dd105e2555521002e2d17afc08665187017b466b5753e84c016628e6", size = 2246053, upload-time = "2026-01-07T18:05:05.459Z" }, + { url = "https://files.pythonhosted.org/packages/c6/88/4be3ec78828dc64b212c123114bd6ae8db5b7676085a7b43cc75d0131bd2/pymongo-4.16.0-cp314-cp314-win32.whl", hash = "sha256:c789236366525c3ee3cd6e4e450a9ff629a7d1f4d88b8e18a0aea0615fd7ecf8", size = 989461, upload-time = "2026-01-07T18:05:07.018Z" }, + { url = "https://files.pythonhosted.org/packages/af/5a/ab8d5af76421b34db483c9c8ebc3a2199fb80ae63dc7e18f4cf1df46306a/pymongo-4.16.0-cp314-cp314-win_amd64.whl", hash = "sha256:2b0714d7764efb29bf9d3c51c964aed7c4c7237b341f9346f15ceaf8321fdb35", size = 1017803, upload-time = "2026-01-07T18:05:08.499Z" }, + { url = "https://files.pythonhosted.org/packages/f6/f4/98d68020728ac6423cf02d17cfd8226bf6cce5690b163d30d3f705e8297e/pymongo-4.16.0-cp314-cp314-win_arm64.whl", hash = "sha256:12762e7cc0f8374a8cae3b9f9ed8dabb5d438c7b33329232dd9b7de783454033", size = 997184, upload-time = "2026-01-07T18:05:09.944Z" }, + { url = "https://files.pythonhosted.org/packages/50/00/dc3a271daf06401825b9c1f4f76f018182c7738281ea54b9762aea0560c1/pymongo-4.16.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:1c01e8a7cd0ea66baf64a118005535ab5bf9f9eb63a1b50ac3935dccf9a54abe", size = 1083303, upload-time = "2026-01-07T18:05:11.702Z" }, + { url = "https://files.pythonhosted.org/packages/b8/4b/b5375ee21d12eababe46215011ebc63801c0d2c5ffdf203849d0d79f9852/pymongo-4.16.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:4c4872299ebe315a79f7f922051061634a64fda95b6b17677ba57ef00b2ba2a4", size = 1083233, upload-time = "2026-01-07T18:05:13.182Z" }, + { url = "https://files.pythonhosted.org/packages/ee/e3/52efa3ca900622c7dcb56c5e70f15c906816d98905c22d2ee1f84d9a7b60/pymongo-4.16.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:78037d02389745e247fe5ab0bcad5d1ab30726eaac3ad79219c7d6bbb07eec53", size = 2527438, upload-time = "2026-01-07T18:05:14.981Z" }, + { url = "https://files.pythonhosted.org/packages/cb/96/43b1be151c734e7766c725444bcbfa1de6b60cc66bfb406203746839dd25/pymongo-4.16.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c126fb72be2518395cc0465d4bae03125119136462e1945aea19840e45d89cfc", size = 2600399, upload-time = "2026-01-07T18:05:16.794Z" }, + { url = "https://files.pythonhosted.org/packages/e7/62/fa64a5045dfe3a1cd9217232c848256e7bc0136cffb7da4735c5e0d30e40/pymongo-4.16.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f3867dc225d9423c245a51eaac2cfcd53dde8e0a8d8090bb6aed6e31bd6c2d4f", size = 2720960, upload-time = "2026-01-07T18:05:18.498Z" }, + { url = "https://files.pythonhosted.org/packages/54/7b/01577eb97e605502821273a5bc16ce0fb0be5c978fe03acdbff471471202/pymongo-4.16.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f25001a955073b80510c0c3db0e043dbbc36904fd69e511c74e3d8640b8a5111", size = 2699344, upload-time = "2026-01-07T18:05:20.073Z" }, + { url = "https://files.pythonhosted.org/packages/55/68/6ef6372d516f703479c3b6cbbc45a5afd307173b1cbaccd724e23919bb1a/pymongo-4.16.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d9885aad05f82fd7ea0c9ca505d60939746b39263fa273d0125170da8f59098", size = 2577133, upload-time = "2026-01-07T18:05:22.052Z" }, + { url = "https://files.pythonhosted.org/packages/15/c7/b5337093bb01da852f945802328665f85f8109dbe91d81ea2afe5ff059b9/pymongo-4.16.0-cp314-cp314t-win32.whl", hash = "sha256:948152b30eddeae8355495f9943a3bf66b708295c0b9b6f467de1c620f215487", size = 1040560, upload-time = "2026-01-07T18:05:23.888Z" }, + { url = "https://files.pythonhosted.org/packages/96/8c/5b448cd1b103f3889d5713dda37304c81020ff88e38a826e8a75ddff4610/pymongo-4.16.0-cp314-cp314t-win_amd64.whl", hash = "sha256:f6e42c1bc985d9beee884780ae6048790eb4cd565c46251932906bdb1630034a", size = 1075081, upload-time = "2026-01-07T18:05:26.874Z" }, + { url = "https://files.pythonhosted.org/packages/32/cd/ddc794cdc8500f6f28c119c624252fb6dfb19481c6d7ed150f13cf468a6d/pymongo-4.16.0-cp314-cp314t-win_arm64.whl", hash = "sha256:6b2a20edb5452ac8daa395890eeb076c570790dfce6b7a44d788af74c2f8cf96", size = 1047725, upload-time = "2026-01-07T18:05:28.47Z" }, +] + +[[package]] +name = "pyparsing" +version = "3.3.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f3/91/9c6ee907786a473bf81c5f53cf703ba0957b23ab84c264080fb5a450416f/pyparsing-3.3.2.tar.gz", hash = "sha256:c777f4d763f140633dcb6d8a3eda953bf7a214dc4eff598413c070bcdc117cbc", size = 6851574, upload-time = "2026-01-21T03:57:59.36Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/10/bd/c038d7cc38edc1aa5bf91ab8068b63d4308c66c4c8bb3cbba7dfbc049f9c/pyparsing-3.3.2-py3-none-any.whl", hash = "sha256:850ba148bd908d7e2411587e247a1e4f0327839c40e2e5e6d05a007ecc69911d", size = 122781, upload-time = "2026-01-21T03:57:55.912Z" }, +] + +[[package]] +name = "pytest" +version = "9.0.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d1/db/7ef3487e0fb0049ddb5ce41d3a49c235bf9ad299b6a25d5780a89f19230f/pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11", size = 1568901, upload-time = "2025-12-06T21:30:51.014Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b", size = 374801, upload-time = "2025-12-06T21:30:49.154Z" }, +] + +[[package]] +name = "pytest-asyncio" +version = "1.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytest" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/90/2c/8af215c0f776415f3590cac4f9086ccefd6fd463befeae41cd4d3f193e5a/pytest_asyncio-1.3.0.tar.gz", hash = "sha256:d7f52f36d231b80ee124cd216ffb19369aa168fc10095013c6b014a34d3ee9e5", size = 50087, upload-time = "2025-11-10T16:07:47.256Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/35/f8b19922b6a25bc0880171a2f1a003eaeb93657475193ab516fd87cac9da/pytest_asyncio-1.3.0-py3-none-any.whl", hash = "sha256:611e26147c7f77640e6d0a92a38ed17c3e9848063698d5c93d5aa7aa11cebff5", size = 15075, upload-time = "2025-11-10T16:07:45.537Z" }, +] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432, upload-time = "2024-03-01T18:36:20.211Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" }, +] + +[[package]] +name = "python-i18n" +version = "0.3.9" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fe/32/d9ba976458c9503ec22db4eb677a5d919edaecd73d893effeaa92a67b84b/python-i18n-0.3.9.tar.gz", hash = "sha256:df97f3d2364bf3a7ebfbd6cbefe8e45483468e52a9e30b909c6078f5f471e4e8", size = 11778, upload-time = "2020-08-26T14:31:27.512Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/73/9a0b2974dd9a3d50788d235f10c4d73c2efcd22926036309645fc2f0db0c/python_i18n-0.3.9-py3-none-any.whl", hash = "sha256:bda5b8d889ebd51973e22e53746417bd32783c9bd6780fd27cadbb733915651d", size = 13750, upload-time = "2020-08-26T14:31:26.266Z" }, +] + +[[package]] +name = "pytz" +version = "2026.1.post1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/56/db/b8721d71d945e6a8ac63c0fc900b2067181dbb50805958d4d4661cf7d277/pytz-2026.1.post1.tar.gz", hash = "sha256:3378dde6a0c3d26719182142c56e60c7f9af7e968076f31aae569d72a0358ee1", size = 321088, upload-time = "2026-03-03T07:47:50.683Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/10/99/781fe0c827be2742bcc775efefccb3b048a3a9c6ce9aec0cbf4a101677e5/pytz-2026.1.post1-py2.py3-none-any.whl", hash = "sha256:f2fd16142fda348286a75e1a524be810bb05d444e5a081f37f7affc635035f7a", size = 510489, upload-time = "2026-03-03T07:47:49.167Z" }, +] + +[[package]] +name = "pyunormalize" +version = "17.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/25/ab/b912c484cfb96ba4834efe050bbf10c9e157bd8189eb859aefba8712b136/pyunormalize-17.0.0.tar.gz", hash = "sha256:0949a3e56817e287febcaf1b0cc4b5adf0bb107628d379335938040947eec792", size = 53121, upload-time = "2025-09-28T20:53:06.141Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/92/80/61512483dc509e3ae8a42fb143479d1e406ce1d91f8f08d538a3dde39c6d/pyunormalize-17.0.0-py3-none-any.whl", hash = "sha256:f0d93b076f938db2b26d319d04f2b58505d1cd7a80b5b72badbe7d1aa4d2a31c", size = 51358, upload-time = "2025-09-28T20:53:04.876Z" }, +] + +[[package]] +name = "pywin32" +version = "311" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e7/ab/01ea1943d4eba0f850c3c61e78e8dd59757ff815ff3ccd0a84de5f541f42/pywin32-311-cp312-cp312-win32.whl", hash = "sha256:750ec6e621af2b948540032557b10a2d43b0cee2ae9758c54154d711cc852d31", size = 8706543, upload-time = "2025-07-14T20:13:20.765Z" }, + { url = "https://files.pythonhosted.org/packages/d1/a8/a0e8d07d4d051ec7502cd58b291ec98dcc0c3fff027caad0470b72cfcc2f/pywin32-311-cp312-cp312-win_amd64.whl", hash = "sha256:b8c095edad5c211ff31c05223658e71bf7116daa0ecf3ad85f3201ea3190d067", size = 9495040, upload-time = "2025-07-14T20:13:22.543Z" }, + { url = "https://files.pythonhosted.org/packages/ba/3a/2ae996277b4b50f17d61f0603efd8253cb2d79cc7ae159468007b586396d/pywin32-311-cp312-cp312-win_arm64.whl", hash = "sha256:e286f46a9a39c4a18b319c28f59b61de793654af2f395c102b4f819e584b5852", size = 8710102, upload-time = "2025-07-14T20:13:24.682Z" }, + { url = "https://files.pythonhosted.org/packages/a5/be/3fd5de0979fcb3994bfee0d65ed8ca9506a8a1260651b86174f6a86f52b3/pywin32-311-cp313-cp313-win32.whl", hash = "sha256:f95ba5a847cba10dd8c4d8fefa9f2a6cf283b8b88ed6178fa8a6c1ab16054d0d", size = 8705700, upload-time = "2025-07-14T20:13:26.471Z" }, + { url = "https://files.pythonhosted.org/packages/e3/28/e0a1909523c6890208295a29e05c2adb2126364e289826c0a8bc7297bd5c/pywin32-311-cp313-cp313-win_amd64.whl", hash = "sha256:718a38f7e5b058e76aee1c56ddd06908116d35147e133427e59a3983f703a20d", size = 9494700, upload-time = "2025-07-14T20:13:28.243Z" }, + { url = "https://files.pythonhosted.org/packages/04/bf/90339ac0f55726dce7d794e6d79a18a91265bdf3aa70b6b9ca52f35e022a/pywin32-311-cp313-cp313-win_arm64.whl", hash = "sha256:7b4075d959648406202d92a2310cb990fea19b535c7f4a78d3f5e10b926eeb8a", size = 8709318, upload-time = "2025-07-14T20:13:30.348Z" }, + { url = "https://files.pythonhosted.org/packages/c9/31/097f2e132c4f16d99a22bfb777e0fd88bd8e1c634304e102f313af69ace5/pywin32-311-cp314-cp314-win32.whl", hash = "sha256:b7a2c10b93f8986666d0c803ee19b5990885872a7de910fc460f9b0c2fbf92ee", size = 8840714, upload-time = "2025-07-14T20:13:32.449Z" }, + { url = "https://files.pythonhosted.org/packages/90/4b/07c77d8ba0e01349358082713400435347df8426208171ce297da32c313d/pywin32-311-cp314-cp314-win_amd64.whl", hash = "sha256:3aca44c046bd2ed8c90de9cb8427f581c479e594e99b5c0bb19b29c10fd6cb87", size = 9656800, upload-time = "2025-07-14T20:13:34.312Z" }, + { url = "https://files.pythonhosted.org/packages/c0/d2/21af5c535501a7233e734b8af901574572da66fcc254cb35d0609c9080dd/pywin32-311-cp314-cp314-win_arm64.whl", hash = "sha256:a508e2d9025764a8270f93111a970e1d0fbfc33f4153b388bb649b7eec4f9b42", size = 8932540, upload-time = "2025-07-14T20:13:36.379Z" }, +] + +[[package]] +name = "pyyaml" +version = "6.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960, upload-time = "2025-09-25T21:33:16.546Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/33/422b98d2195232ca1826284a76852ad5a86fe23e31b009c9886b2d0fb8b2/pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196", size = 182063, upload-time = "2025-09-25T21:32:11.445Z" }, + { url = "https://files.pythonhosted.org/packages/89/a0/6cf41a19a1f2f3feab0e9c0b74134aa2ce6849093d5517a0c550fe37a648/pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0", size = 173973, upload-time = "2025-09-25T21:32:12.492Z" }, + { url = "https://files.pythonhosted.org/packages/ed/23/7a778b6bd0b9a8039df8b1b1d80e2e2ad78aa04171592c8a5c43a56a6af4/pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28", size = 775116, upload-time = "2025-09-25T21:32:13.652Z" }, + { url = "https://files.pythonhosted.org/packages/65/30/d7353c338e12baef4ecc1b09e877c1970bd3382789c159b4f89d6a70dc09/pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c", size = 844011, upload-time = "2025-09-25T21:32:15.21Z" }, + { url = "https://files.pythonhosted.org/packages/8b/9d/b3589d3877982d4f2329302ef98a8026e7f4443c765c46cfecc8858c6b4b/pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc", size = 807870, upload-time = "2025-09-25T21:32:16.431Z" }, + { url = "https://files.pythonhosted.org/packages/05/c0/b3be26a015601b822b97d9149ff8cb5ead58c66f981e04fedf4e762f4bd4/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e", size = 761089, upload-time = "2025-09-25T21:32:17.56Z" }, + { url = "https://files.pythonhosted.org/packages/be/8e/98435a21d1d4b46590d5459a22d88128103f8da4c2d4cb8f14f2a96504e1/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea", size = 790181, upload-time = "2025-09-25T21:32:18.834Z" }, + { url = "https://files.pythonhosted.org/packages/74/93/7baea19427dcfbe1e5a372d81473250b379f04b1bd3c4c5ff825e2327202/pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5", size = 137658, upload-time = "2025-09-25T21:32:20.209Z" }, + { url = "https://files.pythonhosted.org/packages/86/bf/899e81e4cce32febab4fb42bb97dcdf66bc135272882d1987881a4b519e9/pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b", size = 154003, upload-time = "2025-09-25T21:32:21.167Z" }, + { url = "https://files.pythonhosted.org/packages/1a/08/67bd04656199bbb51dbed1439b7f27601dfb576fb864099c7ef0c3e55531/pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd", size = 140344, upload-time = "2025-09-25T21:32:22.617Z" }, + { url = "https://files.pythonhosted.org/packages/d1/11/0fd08f8192109f7169db964b5707a2f1e8b745d4e239b784a5a1dd80d1db/pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8", size = 181669, upload-time = "2025-09-25T21:32:23.673Z" }, + { url = "https://files.pythonhosted.org/packages/b1/16/95309993f1d3748cd644e02e38b75d50cbc0d9561d21f390a76242ce073f/pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1", size = 173252, upload-time = "2025-09-25T21:32:25.149Z" }, + { url = "https://files.pythonhosted.org/packages/50/31/b20f376d3f810b9b2371e72ef5adb33879b25edb7a6d072cb7ca0c486398/pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c", size = 767081, upload-time = "2025-09-25T21:32:26.575Z" }, + { url = "https://files.pythonhosted.org/packages/49/1e/a55ca81e949270d5d4432fbbd19dfea5321eda7c41a849d443dc92fd1ff7/pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5", size = 841159, upload-time = "2025-09-25T21:32:27.727Z" }, + { url = "https://files.pythonhosted.org/packages/74/27/e5b8f34d02d9995b80abcef563ea1f8b56d20134d8f4e5e81733b1feceb2/pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6", size = 801626, upload-time = "2025-09-25T21:32:28.878Z" }, + { url = "https://files.pythonhosted.org/packages/f9/11/ba845c23988798f40e52ba45f34849aa8a1f2d4af4b798588010792ebad6/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6", size = 753613, upload-time = "2025-09-25T21:32:30.178Z" }, + { url = "https://files.pythonhosted.org/packages/3d/e0/7966e1a7bfc0a45bf0a7fb6b98ea03fc9b8d84fa7f2229e9659680b69ee3/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be", size = 794115, upload-time = "2025-09-25T21:32:31.353Z" }, + { url = "https://files.pythonhosted.org/packages/de/94/980b50a6531b3019e45ddeada0626d45fa85cbe22300844a7983285bed3b/pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26", size = 137427, upload-time = "2025-09-25T21:32:32.58Z" }, + { url = "https://files.pythonhosted.org/packages/97/c9/39d5b874e8b28845e4ec2202b5da735d0199dbe5b8fb85f91398814a9a46/pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c", size = 154090, upload-time = "2025-09-25T21:32:33.659Z" }, + { url = "https://files.pythonhosted.org/packages/73/e8/2bdf3ca2090f68bb3d75b44da7bbc71843b19c9f2b9cb9b0f4ab7a5a4329/pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb", size = 140246, upload-time = "2025-09-25T21:32:34.663Z" }, + { url = "https://files.pythonhosted.org/packages/9d/8c/f4bd7f6465179953d3ac9bc44ac1a8a3e6122cf8ada906b4f96c60172d43/pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac", size = 181814, upload-time = "2025-09-25T21:32:35.712Z" }, + { url = "https://files.pythonhosted.org/packages/bd/9c/4d95bb87eb2063d20db7b60faa3840c1b18025517ae857371c4dd55a6b3a/pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310", size = 173809, upload-time = "2025-09-25T21:32:36.789Z" }, + { url = "https://files.pythonhosted.org/packages/92/b5/47e807c2623074914e29dabd16cbbdd4bf5e9b2db9f8090fa64411fc5382/pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7", size = 766454, upload-time = "2025-09-25T21:32:37.966Z" }, + { url = "https://files.pythonhosted.org/packages/02/9e/e5e9b168be58564121efb3de6859c452fccde0ab093d8438905899a3a483/pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788", size = 836355, upload-time = "2025-09-25T21:32:39.178Z" }, + { url = "https://files.pythonhosted.org/packages/88/f9/16491d7ed2a919954993e48aa941b200f38040928474c9e85ea9e64222c3/pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5", size = 794175, upload-time = "2025-09-25T21:32:40.865Z" }, + { url = "https://files.pythonhosted.org/packages/dd/3f/5989debef34dc6397317802b527dbbafb2b4760878a53d4166579111411e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764", size = 755228, upload-time = "2025-09-25T21:32:42.084Z" }, + { url = "https://files.pythonhosted.org/packages/d7/ce/af88a49043cd2e265be63d083fc75b27b6ed062f5f9fd6cdc223ad62f03e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35", size = 789194, upload-time = "2025-09-25T21:32:43.362Z" }, + { url = "https://files.pythonhosted.org/packages/23/20/bb6982b26a40bb43951265ba29d4c246ef0ff59c9fdcdf0ed04e0687de4d/pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac", size = 156429, upload-time = "2025-09-25T21:32:57.844Z" }, + { url = "https://files.pythonhosted.org/packages/f4/f4/a4541072bb9422c8a883ab55255f918fa378ecf083f5b85e87fc2b4eda1b/pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3", size = 143912, upload-time = "2025-09-25T21:32:59.247Z" }, + { url = "https://files.pythonhosted.org/packages/7c/f9/07dd09ae774e4616edf6cda684ee78f97777bdd15847253637a6f052a62f/pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3", size = 189108, upload-time = "2025-09-25T21:32:44.377Z" }, + { url = "https://files.pythonhosted.org/packages/4e/78/8d08c9fb7ce09ad8c38ad533c1191cf27f7ae1effe5bb9400a46d9437fcf/pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba", size = 183641, upload-time = "2025-09-25T21:32:45.407Z" }, + { url = "https://files.pythonhosted.org/packages/7b/5b/3babb19104a46945cf816d047db2788bcaf8c94527a805610b0289a01c6b/pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c", size = 831901, upload-time = "2025-09-25T21:32:48.83Z" }, + { url = "https://files.pythonhosted.org/packages/8b/cc/dff0684d8dc44da4d22a13f35f073d558c268780ce3c6ba1b87055bb0b87/pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702", size = 861132, upload-time = "2025-09-25T21:32:50.149Z" }, + { url = "https://files.pythonhosted.org/packages/b1/5e/f77dc6b9036943e285ba76b49e118d9ea929885becb0a29ba8a7c75e29fe/pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c", size = 839261, upload-time = "2025-09-25T21:32:51.808Z" }, + { url = "https://files.pythonhosted.org/packages/ce/88/a9db1376aa2a228197c58b37302f284b5617f56a5d959fd1763fb1675ce6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065", size = 805272, upload-time = "2025-09-25T21:32:52.941Z" }, + { url = "https://files.pythonhosted.org/packages/da/92/1446574745d74df0c92e6aa4a7b0b3130706a4142b2d1a5869f2eaa423c6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65", size = 829923, upload-time = "2025-09-25T21:32:54.537Z" }, + { url = "https://files.pythonhosted.org/packages/f0/7a/1c7270340330e575b92f397352af856a8c06f230aa3e76f86b39d01b416a/pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9", size = 174062, upload-time = "2025-09-25T21:32:55.767Z" }, + { url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341, upload-time = "2025-09-25T21:32:56.828Z" }, +] + +[[package]] +name = "regex" +version = "2026.2.28" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8b/71/41455aa99a5a5ac1eaf311f5d8efd9ce6433c03ac1e0962de163350d0d97/regex-2026.2.28.tar.gz", hash = "sha256:a729e47d418ea11d03469f321aaf67cdee8954cde3ff2cf8403ab87951ad10f2", size = 415184, upload-time = "2026-02-28T02:19:42.792Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/07/42/9061b03cf0fc4b5fa2c3984cbbaed54324377e440a5c5a29d29a72518d62/regex-2026.2.28-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:fcf26c3c6d0da98fada8ae4ef0aa1c3405a431c0a77eb17306d38a89b02adcd7", size = 489574, upload-time = "2026-02-28T02:16:50.455Z" }, + { url = "https://files.pythonhosted.org/packages/77/83/0c8a5623a233015595e3da499c5a1c13720ac63c107897a6037bb97af248/regex-2026.2.28-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:02473c954af35dd2defeb07e44182f5705b30ea3f351a7cbffa9177beb14da5d", size = 291426, upload-time = "2026-02-28T02:16:52.52Z" }, + { url = "https://files.pythonhosted.org/packages/9e/06/3ef1ac6910dc3295ebd71b1f9bfa737e82cfead211a18b319d45f85ddd09/regex-2026.2.28-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9b65d33a17101569f86d9c5966a8b1d7fbf8afdda5a8aa219301b0a80f58cf7d", size = 289200, upload-time = "2026-02-28T02:16:54.08Z" }, + { url = "https://files.pythonhosted.org/packages/dd/c9/8cc8d850b35ab5650ff6756a1cb85286e2000b66c97520b29c1587455344/regex-2026.2.28-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e71dcecaa113eebcc96622c17692672c2d104b1d71ddf7adeda90da7ddeb26fc", size = 796765, upload-time = "2026-02-28T02:16:55.905Z" }, + { url = "https://files.pythonhosted.org/packages/e9/5d/57702597627fc23278ebf36fbb497ac91c0ce7fec89ac6c81e420ca3e38c/regex-2026.2.28-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:481df4623fa4969c8b11f3433ed7d5e3dc9cec0f008356c3212b3933fb77e3d8", size = 863093, upload-time = "2026-02-28T02:16:58.094Z" }, + { url = "https://files.pythonhosted.org/packages/02/6d/f3ecad537ca2811b4d26b54ca848cf70e04fcfc138667c146a9f3157779c/regex-2026.2.28-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:64e7c6ad614573e0640f271e811a408d79a9e1fe62a46adb602f598df42a818d", size = 909455, upload-time = "2026-02-28T02:17:00.918Z" }, + { url = "https://files.pythonhosted.org/packages/9e/40/bb226f203caa22c1043c1ca79b36340156eca0f6a6742b46c3bb222a3a57/regex-2026.2.28-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d6b08a06976ff4fb0d83077022fde3eca06c55432bb997d8c0495b9a4e9872f4", size = 802037, upload-time = "2026-02-28T02:17:02.842Z" }, + { url = "https://files.pythonhosted.org/packages/44/7c/c6d91d8911ac6803b45ca968e8e500c46934e58c0903cbc6d760ee817a0a/regex-2026.2.28-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:864cdd1a2ef5716b0ab468af40139e62ede1b3a53386b375ec0786bb6783fc05", size = 775113, upload-time = "2026-02-28T02:17:04.506Z" }, + { url = "https://files.pythonhosted.org/packages/dc/8d/4a9368d168d47abd4158580b8c848709667b1cd293ff0c0c277279543bd0/regex-2026.2.28-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:511f7419f7afab475fd4d639d4aedfc54205bcb0800066753ef68a59f0f330b5", size = 784194, upload-time = "2026-02-28T02:17:06.888Z" }, + { url = "https://files.pythonhosted.org/packages/cc/bf/2c72ab5d8b7be462cb1651b5cc333da1d0068740342f350fcca3bca31947/regex-2026.2.28-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:b42f7466e32bf15a961cf09f35fa6323cc72e64d3d2c990b10de1274a5da0a59", size = 856846, upload-time = "2026-02-28T02:17:09.11Z" }, + { url = "https://files.pythonhosted.org/packages/7c/f4/6b65c979bb6d09f51bb2d2a7bc85de73c01ec73335d7ddd202dcb8cd1c8f/regex-2026.2.28-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:8710d61737b0c0ce6836b1da7109f20d495e49b3809f30e27e9560be67a257bf", size = 763516, upload-time = "2026-02-28T02:17:11.004Z" }, + { url = "https://files.pythonhosted.org/packages/8e/32/29ea5e27400ee86d2cc2b4e80aa059df04eaf78b4f0c18576ae077aeff68/regex-2026.2.28-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:4390c365fd2d45278f45afd4673cb90f7285f5701607e3ad4274df08e36140ae", size = 849278, upload-time = "2026-02-28T02:17:12.693Z" }, + { url = "https://files.pythonhosted.org/packages/1d/91/3233d03b5f865111cd517e1c95ee8b43e8b428d61fa73764a80c9bb6f537/regex-2026.2.28-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:cb3b1db8ff6c7b8bf838ab05583ea15230cb2f678e569ab0e3a24d1e8320940b", size = 790068, upload-time = "2026-02-28T02:17:14.9Z" }, + { url = "https://files.pythonhosted.org/packages/76/92/abc706c1fb03b4580a09645b206a3fc032f5a9f457bc1a8038ac555658ab/regex-2026.2.28-cp312-cp312-win32.whl", hash = "sha256:f8ed9a5d4612df9d4de15878f0bc6aa7a268afbe5af21a3fdd97fa19516e978c", size = 266416, upload-time = "2026-02-28T02:17:17.15Z" }, + { url = "https://files.pythonhosted.org/packages/fa/06/2a6f7dff190e5fa9df9fb4acf2fdf17a1aa0f7f54596cba8de608db56b3a/regex-2026.2.28-cp312-cp312-win_amd64.whl", hash = "sha256:01d65fd24206c8e1e97e2e31b286c59009636c022eb5d003f52760b0f42155d4", size = 277297, upload-time = "2026-02-28T02:17:18.723Z" }, + { url = "https://files.pythonhosted.org/packages/b7/f0/58a2484851fadf284458fdbd728f580d55c1abac059ae9f048c63b92f427/regex-2026.2.28-cp312-cp312-win_arm64.whl", hash = "sha256:c0b5ccbb8ffb433939d248707d4a8b31993cb76ab1a0187ca886bf50e96df952", size = 270408, upload-time = "2026-02-28T02:17:20.328Z" }, + { url = "https://files.pythonhosted.org/packages/87/f6/dc9ef48c61b79c8201585bf37fa70cd781977da86e466cd94e8e95d2443b/regex-2026.2.28-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6d63a07e5ec8ce7184452cb00c41c37b49e67dc4f73b2955b5b8e782ea970784", size = 489311, upload-time = "2026-02-28T02:17:22.591Z" }, + { url = "https://files.pythonhosted.org/packages/95/c8/c20390f2232d3f7956f420f4ef1852608ad57aa26c3dd78516cb9f3dc913/regex-2026.2.28-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e59bc8f30414d283ae8ee1617b13d8112e7135cb92830f0ec3688cb29152585a", size = 291285, upload-time = "2026-02-28T02:17:24.355Z" }, + { url = "https://files.pythonhosted.org/packages/d2/a6/ba1068a631ebd71a230e7d8013fcd284b7c89c35f46f34a7da02082141b1/regex-2026.2.28-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:de0cf053139f96219ccfabb4a8dd2d217c8c82cb206c91d9f109f3f552d6b43d", size = 289051, upload-time = "2026-02-28T02:17:26.722Z" }, + { url = "https://files.pythonhosted.org/packages/1d/1b/7cc3b7af4c244c204b7a80924bd3d85aecd9ba5bc82b485c5806ee8cda9e/regex-2026.2.28-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fb4db2f17e6484904f986c5a657cec85574c76b5c5e61c7aae9ffa1bc6224f95", size = 796842, upload-time = "2026-02-28T02:17:29.064Z" }, + { url = "https://files.pythonhosted.org/packages/24/87/26bd03efc60e0d772ac1e7b60a2e6325af98d974e2358f659c507d3c76db/regex-2026.2.28-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:52b017b35ac2214d0db5f4f90e303634dc44e4aba4bd6235a27f97ecbe5b0472", size = 863083, upload-time = "2026-02-28T02:17:31.363Z" }, + { url = "https://files.pythonhosted.org/packages/ae/54/aeaf4afb1aa0a65e40de52a61dc2ac5b00a83c6cb081c8a1d0dda74f3010/regex-2026.2.28-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:69fc560ccbf08a09dc9b52ab69cacfae51e0ed80dc5693078bdc97db2f91ae96", size = 909412, upload-time = "2026-02-28T02:17:33.248Z" }, + { url = "https://files.pythonhosted.org/packages/12/2f/049901def913954e640d199bbc6a7ca2902b6aeda0e5da9d17f114100ec2/regex-2026.2.28-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e61eea47230eba62a31f3e8a0e3164d0f37ef9f40529fb2c79361bc6b53d2a92", size = 802101, upload-time = "2026-02-28T02:17:35.053Z" }, + { url = "https://files.pythonhosted.org/packages/7d/a5/512fb9ff7f5b15ea204bb1967ebb649059446decacccb201381f9fa6aad4/regex-2026.2.28-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:4f5c0b182ad4269e7381b7c27fdb0408399881f7a92a4624fd5487f2971dfc11", size = 775260, upload-time = "2026-02-28T02:17:37.692Z" }, + { url = "https://files.pythonhosted.org/packages/d1/a8/9a92935878aba19bd72706b9db5646a6f993d99b3f6ed42c02ec8beb1d61/regex-2026.2.28-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:96f6269a2882fbb0ee76967116b83679dc628e68eaea44e90884b8d53d833881", size = 784311, upload-time = "2026-02-28T02:17:39.855Z" }, + { url = "https://files.pythonhosted.org/packages/09/d3/fc51a8a738a49a6b6499626580554c9466d3ea561f2b72cfdc72e4149773/regex-2026.2.28-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:b5acd4b6a95f37c3c3828e5d053a7d4edaedb85de551db0153754924cb7c83e3", size = 856876, upload-time = "2026-02-28T02:17:42.317Z" }, + { url = "https://files.pythonhosted.org/packages/08/b7/2e641f3d084b120ca4c52e8c762a78da0b32bf03ef546330db3e2635dc5f/regex-2026.2.28-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:2234059cfe33d9813a3677ef7667999caea9eeaa83fef98eb6ce15c6cf9e0215", size = 763632, upload-time = "2026-02-28T02:17:45.073Z" }, + { url = "https://files.pythonhosted.org/packages/fe/6d/0009021d97e79ee99f3d8641f0a8d001eed23479ade4c3125a5480bf3e2d/regex-2026.2.28-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:c15af43c72a7fb0c97cbc66fa36a43546eddc5c06a662b64a0cbf30d6ac40944", size = 849320, upload-time = "2026-02-28T02:17:47.192Z" }, + { url = "https://files.pythonhosted.org/packages/05/7a/51cfbad5758f8edae430cb21961a9c8d04bce1dae4d2d18d4186eec7cfa1/regex-2026.2.28-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9185cc63359862a6e80fe97f696e04b0ad9a11c4ac0a4a927f979f611bfe3768", size = 790152, upload-time = "2026-02-28T02:17:49.067Z" }, + { url = "https://files.pythonhosted.org/packages/90/3d/a83e2b6b3daa142acb8c41d51de3876186307d5cb7490087031747662500/regex-2026.2.28-cp313-cp313-win32.whl", hash = "sha256:fb66e5245db9652abd7196ace599b04d9c0e4aa7c8f0e2803938377835780081", size = 266398, upload-time = "2026-02-28T02:17:50.744Z" }, + { url = "https://files.pythonhosted.org/packages/85/4f/16e9ebb1fe5425e11b9596c8d57bf8877dcb32391da0bfd33742e3290637/regex-2026.2.28-cp313-cp313-win_amd64.whl", hash = "sha256:71a911098be38c859ceb3f9a9ce43f4ed9f4c6720ad8684a066ea246b76ad9ff", size = 277282, upload-time = "2026-02-28T02:17:53.074Z" }, + { url = "https://files.pythonhosted.org/packages/07/b4/92851335332810c5a89723bf7a7e35c7209f90b7d4160024501717b28cc9/regex-2026.2.28-cp313-cp313-win_arm64.whl", hash = "sha256:39bb5727650b9a0275c6a6690f9bb3fe693a7e6cc5c3155b1240aedf8926423e", size = 270382, upload-time = "2026-02-28T02:17:54.888Z" }, + { url = "https://files.pythonhosted.org/packages/24/07/6c7e4cec1e585959e96cbc24299d97e4437a81173217af54f1804994e911/regex-2026.2.28-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:97054c55db06ab020342cc0d35d6f62a465fa7662871190175f1ad6c655c028f", size = 492541, upload-time = "2026-02-28T02:17:56.813Z" }, + { url = "https://files.pythonhosted.org/packages/7c/13/55eb22ada7f43d4f4bb3815b6132183ebc331c81bd496e2d1f3b8d862e0d/regex-2026.2.28-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0d25a10811de831c2baa6aef3c0be91622f44dd8d31dd12e69f6398efb15e48b", size = 292984, upload-time = "2026-02-28T02:17:58.538Z" }, + { url = "https://files.pythonhosted.org/packages/5b/11/c301f8cb29ce9644a5ef85104c59244e6e7e90994a0f458da4d39baa8e17/regex-2026.2.28-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:d6cfe798d8da41bb1862ed6e0cba14003d387c3c0c4a5d45591076ae9f0ce2f8", size = 291509, upload-time = "2026-02-28T02:18:00.208Z" }, + { url = "https://files.pythonhosted.org/packages/b5/43/aabe384ec1994b91796e903582427bc2ffaed9c4103819ed3c16d8e749f3/regex-2026.2.28-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fd0ce43e71d825b7c0661f9c54d4d74bd97c56c3fd102a8985bcfea48236bacb", size = 809429, upload-time = "2026-02-28T02:18:02.328Z" }, + { url = "https://files.pythonhosted.org/packages/04/b8/8d2d987a816720c4f3109cee7c06a4b24ad0e02d4fc74919ab619e543737/regex-2026.2.28-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:00945d007fd74a9084d2ab79b695b595c6b7ba3698972fadd43e23230c6979c1", size = 869422, upload-time = "2026-02-28T02:18:04.23Z" }, + { url = "https://files.pythonhosted.org/packages/fc/ad/2c004509e763c0c3719f97c03eca26473bffb3868d54c5f280b8cd4f9e3d/regex-2026.2.28-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:bec23c11cbbf09a4df32fe50d57cbdd777bc442269b6e39a1775654f1c95dee2", size = 915175, upload-time = "2026-02-28T02:18:06.791Z" }, + { url = "https://files.pythonhosted.org/packages/55/c2/fd429066da487ef555a9da73bf214894aec77fc8c66a261ee355a69871a8/regex-2026.2.28-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5cdcc17d935c8f9d3f4db5c2ebe2640c332e3822ad5d23c2f8e0228e6947943a", size = 812044, upload-time = "2026-02-28T02:18:08.736Z" }, + { url = "https://files.pythonhosted.org/packages/5b/ca/feedb7055c62a3f7f659971bf45f0e0a87544b6b0cf462884761453f97c5/regex-2026.2.28-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a448af01e3d8031c89c5d902040b124a5e921a25c4e5e07a861ca591ce429341", size = 782056, upload-time = "2026-02-28T02:18:10.777Z" }, + { url = "https://files.pythonhosted.org/packages/95/30/1aa959ed0d25c1dd7dd5047ea8ba482ceaef38ce363c401fd32a6b923e60/regex-2026.2.28-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:10d28e19bd4888e4abf43bd3925f3c134c52fdf7259219003588a42e24c2aa25", size = 798743, upload-time = "2026-02-28T02:18:13.025Z" }, + { url = "https://files.pythonhosted.org/packages/3b/1f/dadb9cf359004784051c897dcf4d5d79895f73a1bbb7b827abaa4814ae80/regex-2026.2.28-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:99985a2c277dcb9ccb63f937451af5d65177af1efdeb8173ac55b61095a0a05c", size = 864633, upload-time = "2026-02-28T02:18:16.84Z" }, + { url = "https://files.pythonhosted.org/packages/a7/f1/b9a25eb24e1cf79890f09e6ec971ee5b511519f1851de3453bc04f6c902b/regex-2026.2.28-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:e1e7b24cb3ae9953a560c563045d1ba56ee4749fbd05cf21ba571069bd7be81b", size = 770862, upload-time = "2026-02-28T02:18:18.892Z" }, + { url = "https://files.pythonhosted.org/packages/02/9a/c5cb10b7aa6f182f9247a30cc9527e326601f46f4df864ac6db588d11fcd/regex-2026.2.28-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:d8511a01d0e4ee1992eb3ba19e09bc1866fe03f05129c3aec3fdc4cbc77aad3f", size = 854788, upload-time = "2026-02-28T02:18:21.475Z" }, + { url = "https://files.pythonhosted.org/packages/0a/50/414ba0731c4bd40b011fa4703b2cc86879ec060c64f2a906e65a56452589/regex-2026.2.28-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:aaffaecffcd2479ce87aa1e74076c221700b7c804e48e98e62500ee748f0f550", size = 800184, upload-time = "2026-02-28T02:18:23.492Z" }, + { url = "https://files.pythonhosted.org/packages/69/50/0c7290987f97e7e6830b0d853f69dc4dc5852c934aae63e7fdcd76b4c383/regex-2026.2.28-cp313-cp313t-win32.whl", hash = "sha256:ef77bdde9c9eba3f7fa5b58084b29bbcc74bcf55fdbeaa67c102a35b5bd7e7cc", size = 269137, upload-time = "2026-02-28T02:18:25.375Z" }, + { url = "https://files.pythonhosted.org/packages/68/80/ef26ff90e74ceb4051ad6efcbbb8a4be965184a57e879ebcbdef327d18fa/regex-2026.2.28-cp313-cp313t-win_amd64.whl", hash = "sha256:98adf340100cbe6fbaf8e6dc75e28f2c191b1be50ffefe292fb0e6f6eefdb0d8", size = 280682, upload-time = "2026-02-28T02:18:27.205Z" }, + { url = "https://files.pythonhosted.org/packages/69/8b/fbad9c52e83ffe8f97e3ed1aa0516e6dff6bb633a41da9e64645bc7efdc5/regex-2026.2.28-cp313-cp313t-win_arm64.whl", hash = "sha256:2fb950ac1d88e6b6a9414381f403797b236f9fa17e1eee07683af72b1634207b", size = 271735, upload-time = "2026-02-28T02:18:29.015Z" }, + { url = "https://files.pythonhosted.org/packages/cf/03/691015f7a7cb1ed6dacb2ea5de5682e4858e05a4c5506b2839cd533bbcd6/regex-2026.2.28-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:78454178c7df31372ea737996fb7f36b3c2c92cccc641d251e072478afb4babc", size = 489497, upload-time = "2026-02-28T02:18:30.889Z" }, + { url = "https://files.pythonhosted.org/packages/c6/ba/8db8fd19afcbfa0e1036eaa70c05f20ca8405817d4ad7a38a6b4c2f031ac/regex-2026.2.28-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:5d10303dd18cedfd4d095543998404df656088240bcfd3cd20a8f95b861f74bd", size = 291295, upload-time = "2026-02-28T02:18:33.426Z" }, + { url = "https://files.pythonhosted.org/packages/5a/79/9aa0caf089e8defef9b857b52fc53801f62ff868e19e5c83d4a96612eba1/regex-2026.2.28-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:19a9c9e0a8f24f39d575a6a854d516b48ffe4cbdcb9de55cb0570a032556ecff", size = 289275, upload-time = "2026-02-28T02:18:35.247Z" }, + { url = "https://files.pythonhosted.org/packages/eb/26/ee53117066a30ef9c883bf1127eece08308ccf8ccd45c45a966e7a665385/regex-2026.2.28-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:09500be324f49b470d907b3ef8af9afe857f5cca486f853853f7945ddbf75911", size = 797176, upload-time = "2026-02-28T02:18:37.15Z" }, + { url = "https://files.pythonhosted.org/packages/05/1b/67fb0495a97259925f343ae78b5d24d4a6624356ae138b57f18bd43006e4/regex-2026.2.28-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:fb1c4ff62277d87a7335f2c1ea4e0387b8f2b3ad88a64efd9943906aafad4f33", size = 863813, upload-time = "2026-02-28T02:18:39.478Z" }, + { url = "https://files.pythonhosted.org/packages/a0/1d/93ac9bbafc53618091c685c7ed40239a90bf9f2a82c983f0baa97cb7ae07/regex-2026.2.28-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b8b3f1be1738feadc69f62daa250c933e85c6f34fa378f54a7ff43807c1b9117", size = 908678, upload-time = "2026-02-28T02:18:41.619Z" }, + { url = "https://files.pythonhosted.org/packages/c7/7a/a8f5e0561702b25239846a16349feece59712ae20598ebb205580332a471/regex-2026.2.28-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dc8ed8c3f41c27acb83f7b6a9eb727a73fc6663441890c5cb3426a5f6a91ce7d", size = 801528, upload-time = "2026-02-28T02:18:43.624Z" }, + { url = "https://files.pythonhosted.org/packages/96/5d/ed6d4cbde80309854b1b9f42d9062fee38ade15f7eb4909f6ef2440403b5/regex-2026.2.28-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fa539be029844c0ce1114762d2952ab6cfdd7c7c9bd72e0db26b94c3c36dcc5a", size = 775373, upload-time = "2026-02-28T02:18:46.102Z" }, + { url = "https://files.pythonhosted.org/packages/6a/e9/6e53c34e8068b9deec3e87210086ecb5b9efebdefca6b0d3fa43d66dcecb/regex-2026.2.28-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7900157786428a79615a8264dac1f12c9b02957c473c8110c6b1f972dcecaddf", size = 784859, upload-time = "2026-02-28T02:18:48.269Z" }, + { url = "https://files.pythonhosted.org/packages/48/3c/736e1c7ca7f0dcd2ae33819888fdc69058a349b7e5e84bc3e2f296bbf794/regex-2026.2.28-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:0b1d2b07614d95fa2bf8a63fd1e98bd8fa2b4848dc91b1efbc8ba219fdd73952", size = 857813, upload-time = "2026-02-28T02:18:50.576Z" }, + { url = "https://files.pythonhosted.org/packages/6e/7c/48c4659ad9da61f58e79dbe8c05223e0006696b603c16eb6b5cbfbb52c27/regex-2026.2.28-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:b389c61aa28a79c2e0527ac36da579869c2e235a5b208a12c5b5318cda2501d8", size = 763705, upload-time = "2026-02-28T02:18:52.59Z" }, + { url = "https://files.pythonhosted.org/packages/cf/a1/bc1c261789283128165f71b71b4b221dd1b79c77023752a6074c102f18d8/regex-2026.2.28-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:f467cb602f03fbd1ab1908f68b53c649ce393fde056628dc8c7e634dab6bfc07", size = 848734, upload-time = "2026-02-28T02:18:54.595Z" }, + { url = "https://files.pythonhosted.org/packages/10/d8/979407faf1397036e25a5ae778157366a911c0f382c62501009f4957cf86/regex-2026.2.28-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:e8c8cb2deba42f5ec1ede46374e990f8adc5e6456a57ac1a261b19be6f28e4e6", size = 789871, upload-time = "2026-02-28T02:18:57.34Z" }, + { url = "https://files.pythonhosted.org/packages/03/23/da716821277115fcb1f4e3de1e5dc5023a1e6533598c486abf5448612579/regex-2026.2.28-cp314-cp314-win32.whl", hash = "sha256:9036b400b20e4858d56d117108d7813ed07bb7803e3eed766675862131135ca6", size = 271825, upload-time = "2026-02-28T02:18:59.202Z" }, + { url = "https://files.pythonhosted.org/packages/91/ff/90696f535d978d5f16a52a419be2770a8d8a0e7e0cfecdbfc31313df7fab/regex-2026.2.28-cp314-cp314-win_amd64.whl", hash = "sha256:1d367257cd86c1cbb97ea94e77b373a0bbc2224976e247f173d19e8f18b4afa7", size = 280548, upload-time = "2026-02-28T02:19:01.049Z" }, + { url = "https://files.pythonhosted.org/packages/69/f9/5e1b5652fc0af3fcdf7677e7df3ad2a0d47d669b34ac29a63bb177bb731b/regex-2026.2.28-cp314-cp314-win_arm64.whl", hash = "sha256:5e68192bb3a1d6fb2836da24aa494e413ea65853a21505e142e5b1064a595f3d", size = 273444, upload-time = "2026-02-28T02:19:03.255Z" }, + { url = "https://files.pythonhosted.org/packages/d3/eb/8389f9e940ac89bcf58d185e230a677b4fd07c5f9b917603ad5c0f8fa8fe/regex-2026.2.28-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:a5dac14d0872eeb35260a8e30bac07ddf22adc1e3a0635b52b02e180d17c9c7e", size = 492546, upload-time = "2026-02-28T02:19:05.378Z" }, + { url = "https://files.pythonhosted.org/packages/7b/c7/09441d27ce2a6fa6a61ea3150ea4639c1dcda9b31b2ea07b80d6937b24dd/regex-2026.2.28-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:ec0c608b7a7465ffadb344ed7c987ff2f11ee03f6a130b569aa74d8a70e8333c", size = 292986, upload-time = "2026-02-28T02:19:07.24Z" }, + { url = "https://files.pythonhosted.org/packages/fb/69/4144b60ed7760a6bd235e4087041f487aa4aa62b45618ce018b0c14833ea/regex-2026.2.28-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c7815afb0ca45456613fdaf60ea9c993715511c8d53a83bc468305cbc0ee23c7", size = 291518, upload-time = "2026-02-28T02:19:09.698Z" }, + { url = "https://files.pythonhosted.org/packages/2d/be/77e5426cf5948c82f98c53582009ca9e94938c71f73a8918474f2e2990bb/regex-2026.2.28-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b059e71ec363968671693a78c5053bd9cb2fe410f9b8e4657e88377ebd603a2e", size = 809464, upload-time = "2026-02-28T02:19:12.494Z" }, + { url = "https://files.pythonhosted.org/packages/45/99/2c8c5ac90dc7d05c6e7d8e72c6a3599dc08cd577ac476898e91ca787d7f1/regex-2026.2.28-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b8cf76f1a29f0e99dcfd7aef1551a9827588aae5a737fe31442021165f1920dc", size = 869553, upload-time = "2026-02-28T02:19:15.151Z" }, + { url = "https://files.pythonhosted.org/packages/53/34/daa66a342f0271e7737003abf6c3097aa0498d58c668dbd88362ef94eb5d/regex-2026.2.28-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:180e08a435a0319e6a4821c3468da18dc7001987e1c17ae1335488dfe7518dd8", size = 915289, upload-time = "2026-02-28T02:19:17.331Z" }, + { url = "https://files.pythonhosted.org/packages/c5/c7/e22c2aaf0a12e7e22ab19b004bb78d32ca1ecc7ef245949935463c5567de/regex-2026.2.28-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1e496956106fd59ba6322a8ea17141a27c5040e5ee8f9433ae92d4e5204462a0", size = 812156, upload-time = "2026-02-28T02:19:20.011Z" }, + { url = "https://files.pythonhosted.org/packages/7f/bb/2dc18c1efd9051cf389cd0d7a3a4d90f6804b9fff3a51b5dc3c85b935f71/regex-2026.2.28-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bba2b18d70eeb7b79950f12f633beeecd923f7c9ad6f6bae28e59b4cb3ab046b", size = 782215, upload-time = "2026-02-28T02:19:22.047Z" }, + { url = "https://files.pythonhosted.org/packages/17/1e/9e4ec9b9013931faa32226ec4aa3c71fe664a6d8a2b91ac56442128b332f/regex-2026.2.28-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:6db7bfae0f8a2793ff1f7021468ea55e2699d0790eb58ee6ab36ae43aa00bc5b", size = 798925, upload-time = "2026-02-28T02:19:24.173Z" }, + { url = "https://files.pythonhosted.org/packages/71/57/a505927e449a9ccb41e2cc8d735e2abe3444b0213d1cf9cb364a8c1f2524/regex-2026.2.28-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:d0b02e8b7e5874b48ae0f077ecca61c1a6a9f9895e9c6dfb191b55b242862033", size = 864701, upload-time = "2026-02-28T02:19:26.376Z" }, + { url = "https://files.pythonhosted.org/packages/a6/ad/c62cb60cdd93e13eac5b3d9d6bd5d284225ed0e3329426f94d2552dd7cca/regex-2026.2.28-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:25b6eb660c5cf4b8c3407a1ed462abba26a926cc9965e164268a3267bcc06a43", size = 770899, upload-time = "2026-02-28T02:19:29.38Z" }, + { url = "https://files.pythonhosted.org/packages/3c/5a/874f861f5c3d5ab99633e8030dee1bc113db8e0be299d1f4b07f5b5ec349/regex-2026.2.28-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:5a932ea8ad5d0430351ff9c76c8db34db0d9f53c1d78f06022a21f4e290c5c18", size = 854727, upload-time = "2026-02-28T02:19:31.494Z" }, + { url = "https://files.pythonhosted.org/packages/6b/ca/d2c03b0efde47e13db895b975b2be6a73ed90b8ba963677927283d43bf74/regex-2026.2.28-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:1c2c95e1a2b0f89d01e821ff4de1be4b5d73d1f4b0bf679fa27c1ad8d2327f1a", size = 800366, upload-time = "2026-02-28T02:19:34.248Z" }, + { url = "https://files.pythonhosted.org/packages/14/bd/ee13b20b763b8989f7c75d592bfd5de37dc1181814a2a2747fedcf97e3ba/regex-2026.2.28-cp314-cp314t-win32.whl", hash = "sha256:bbb882061f742eb5d46f2f1bd5304055be0a66b783576de3d7eef1bed4778a6e", size = 274936, upload-time = "2026-02-28T02:19:36.313Z" }, + { url = "https://files.pythonhosted.org/packages/cb/e7/d8020e39414c93af7f0d8688eabcecece44abfd5ce314b21dfda0eebd3d8/regex-2026.2.28-cp314-cp314t-win_amd64.whl", hash = "sha256:6591f281cb44dc13de9585b552cec6fc6cf47fb2fe7a48892295ee9bc4a612f9", size = 284779, upload-time = "2026-02-28T02:19:38.625Z" }, + { url = "https://files.pythonhosted.org/packages/13/c0/ad225f4a405827486f1955283407cf758b6d2fb966712644c5f5aef33d1b/regex-2026.2.28-cp314-cp314t-win_arm64.whl", hash = "sha256:dee50f1be42222f89767b64b283283ef963189da0dda4a515aa54a5563c62dec", size = 275010, upload-time = "2026-02-28T02:19:40.65Z" }, +] + +[[package]] +name = "requests" +version = "2.32.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "charset-normalizer" }, + { name = "idna" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" }, +] + +[[package]] +name = "retry-async" +version = "0.1.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "decorator" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/76/c0/4e06ba064b7d648f33dadd92f298e9e39960af73c5f3bdfb58f602d78710/retry_async-0.1.4.tar.gz", hash = "sha256:8414d69b20920a1d700de34b68c0f972fa36a0158450a6f6abc5b45a241ac6b6", size = 2189, upload-time = "2024-01-09T02:32:25.427Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2e/fb/14f39d3a6e91b6c3ba6e8b1c06ca25c0b421a8806912a3fa02bbe699a002/retry_async-0.1.4-py3-none-any.whl", hash = "sha256:21b383c7bc52013478337b894f476c9f106485cfeeb5d449abe5f745be2da219", size = 2706, upload-time = "2024-01-09T02:32:23.851Z" }, +] + +[[package]] +name = "rlp" +version = "4.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "eth-utils" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1b/2d/439b0728a92964a04d9c88ea1ca9ebb128893fbbd5834faa31f987f2fd4c/rlp-4.1.0.tar.gz", hash = "sha256:be07564270a96f3e225e2c107db263de96b5bc1f27722d2855bd3459a08e95a9", size = 33429, upload-time = "2025-02-04T22:05:59.089Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/99/fb/e4c0ced9893b84ac95b7181d69a9786ce5879aeb3bbbcbba80a164f85d6a/rlp-4.1.0-py3-none-any.whl", hash = "sha256:8eca394c579bad34ee0b937aecb96a57052ff3716e19c7a578883e767bc5da6f", size = 19973, upload-time = "2025-02-04T22:05:57.05Z" }, +] + +[[package]] +name = "rocketwatch" +version = "0.1.0" +source = { virtual = "." } +dependencies = [ + { name = "aiocache" }, + { name = "aiohttp" }, + { name = "anthropic" }, + { name = "anyascii" }, + { name = "beautifulsoup4" }, + { name = "bidict" }, + { name = "cachetools" }, + { name = "colorama" }, + { name = "cronitor" }, + { name = "dice" }, + { name = "discord-py" }, + { name = "eth-typing" }, + { name = "eth-utils" }, + { name = "etherscan-labels" }, + { name = "graphql-query" }, + { name = "hexbytes" }, + { name = "humanize" }, + { name = "inflect" }, + { name = "matplotlib" }, + { name = "numpy" }, + { name = "pillow" }, + { name = "psutil" }, + { name = "pydantic" }, + { name = "pymongo" }, + { name = "python-i18n" }, + { name = "pytz" }, + { name = "regex" }, + { name = "retry-async" }, + { name = "seaborn" }, + { name = "tabulate" }, + { name = "termplotlib" }, + { name = "tiktoken" }, + { name = "uptime" }, + { name = "web3" }, +] + +[package.optional-dependencies] +test = [ + { name = "pytest" }, + { name = "pytest-asyncio" }, +] + +[package.metadata] +requires-dist = [ + { name = "aiocache", specifier = "==0.12.3" }, + { name = "aiohttp", specifier = "==3.13.3" }, + { name = "anthropic", specifier = "==0.84.0" }, + { name = "anyascii", specifier = "==0.3.3" }, + { name = "beautifulsoup4", specifier = "==4.14.3" }, + { name = "bidict", specifier = "==0.23.1" }, + { name = "cachetools", specifier = "==7.0.3" }, + { name = "colorama", specifier = "==0.4.6" }, + { name = "cronitor", specifier = "==4.9.0" }, + { name = "dice", specifier = "==4.0.0" }, + { name = "discord-py", specifier = "==2.7.1" }, + { name = "eth-typing", specifier = "==5.2.1" }, + { name = "eth-utils", specifier = "==5.3.1" }, + { name = "etherscan-labels", git = "https://github.com/haloooloolo/etherscan-labels" }, + { name = "graphql-query", specifier = "==1.4.0" }, + { name = "hexbytes", specifier = "==1.3.1" }, + { name = "humanize", specifier = "==4.15.0" }, + { name = "inflect", specifier = "==7.5.0" }, + { name = "matplotlib", specifier = "==3.10.8" }, + { name = "numpy", specifier = "==2.4.2" }, + { name = "pillow", specifier = "==12.1.1" }, + { name = "psutil", specifier = "==7.2.2" }, + { name = "pydantic", specifier = ">=2.0.0,<3.0.0" }, + { name = "pymongo", specifier = "==4.16.0" }, + { name = "pytest", marker = "extra == 'test'", specifier = ">=8.0" }, + { name = "pytest-asyncio", marker = "extra == 'test'", specifier = ">=1.0" }, + { name = "python-i18n", specifier = "==0.3.9" }, + { name = "pytz", specifier = "==2026.1.post1" }, + { name = "regex", specifier = "==2026.2.28" }, + { name = "retry-async", specifier = "==0.1.4" }, + { name = "seaborn", specifier = "==0.13.2" }, + { name = "tabulate", specifier = "==0.10.0" }, + { name = "termplotlib", specifier = "==0.3.9" }, + { name = "tiktoken", specifier = "==0.12.0" }, + { name = "uptime", specifier = "==3.0.1" }, + { name = "web3", specifier = ">=7.0.0,<8.0.0" }, +] +provides-extras = ["test"] + +[[package]] +name = "seaborn" +version = "0.13.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "matplotlib" }, + { name = "numpy" }, + { name = "pandas" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/86/59/a451d7420a77ab0b98f7affa3a1d78a313d2f7281a57afb1a34bae8ab412/seaborn-0.13.2.tar.gz", hash = "sha256:93e60a40988f4d65e9f4885df477e2fdaff6b73a9ded434c1ab356dd57eefff7", size = 1457696, upload-time = "2024-01-25T13:21:52.551Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/83/11/00d3c3dfc25ad54e731d91449895a79e4bf2384dc3ac01809010ba88f6d5/seaborn-0.13.2-py3-none-any.whl", hash = "sha256:636f8336facf092165e27924f223d3c62ca560b1f2bb5dff7ab7fad265361987", size = 294914, upload-time = "2024-01-25T13:21:49.598Z" }, +] + +[[package]] +name = "six" +version = "1.17.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload-time = "2024-12-04T17:35:28.174Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" }, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" }, +] + +[[package]] +name = "soupsieve" +version = "2.8.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7b/ae/2d9c981590ed9999a0d91755b47fc74f74de286b0f5cee14c9269041e6c4/soupsieve-2.8.3.tar.gz", hash = "sha256:3267f1eeea4251fb42728b6dfb746edc9acaffc4a45b27e19450b676586e8349", size = 118627, upload-time = "2026-01-20T04:27:02.457Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/46/2c/1462b1d0a634697ae9e55b3cecdcb64788e8b7d63f54d923fcd0bb140aed/soupsieve-2.8.3-py3-none-any.whl", hash = "sha256:ed64f2ba4eebeab06cc4962affce381647455978ffc1e36bb79a545b91f45a95", size = 37016, upload-time = "2026-01-20T04:27:01.012Z" }, +] + +[[package]] +name = "tabulate" +version = "0.10.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/46/58/8c37dea7bbf769b20d58e7ace7e5edfe65b849442b00ffcdd56be88697c6/tabulate-0.10.0.tar.gz", hash = "sha256:e2cfde8f79420f6deeffdeda9aaec3b6bc5abce947655d17ac662b126e48a60d", size = 91754, upload-time = "2026-03-04T18:55:34.402Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/99/55/db07de81b5c630da5cbf5c7df646580ca26dfaefa593667fc6f2fe016d2e/tabulate-0.10.0-py3-none-any.whl", hash = "sha256:f0b0622e567335c8fabaaa659f1b33bcb6ddfe2e496071b743aa113f8774f2d3", size = 39814, upload-time = "2026-03-04T18:55:31.284Z" }, +] + +[[package]] +name = "termplotlib" +version = "0.3.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b4/60/8a74d2503dd64975402c7b8d00f6e201e8cbba5348282433fa5fb8d41b67/termplotlib-0.3.9.tar.gz", hash = "sha256:c04cbd67ac61753eac9162a99cbe87c379d4c5daf720af1df55f4423c094203e", size = 24517, upload-time = "2021-09-23T09:11:38.42Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/69/d0/ea24907a6d1e3c5e40ff5b58920552c3e1e4e73181a8583d5bd9d5217305/termplotlib-0.3.9-py3-none-any.whl", hash = "sha256:827bec59e0de24dfe265b9d9a4adc4df8335aa98f49c1122bd53ced9b72c5206", size = 21535, upload-time = "2021-09-23T09:11:37.108Z" }, +] + +[[package]] +name = "tiktoken" +version = "0.12.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "regex" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/7d/ab/4d017d0f76ec3171d469d80fc03dfbb4e48a4bcaddaa831b31d526f05edc/tiktoken-0.12.0.tar.gz", hash = "sha256:b18ba7ee2b093863978fcb14f74b3707cdc8d4d4d3836853ce7ec60772139931", size = 37806, upload-time = "2025-10-06T20:22:45.419Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a4/85/be65d39d6b647c79800fd9d29241d081d4eeb06271f383bb87200d74cf76/tiktoken-0.12.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b97f74aca0d78a1ff21b8cd9e9925714c15a9236d6ceacf5c7327c117e6e21e8", size = 1050728, upload-time = "2025-10-06T20:21:52.756Z" }, + { url = "https://files.pythonhosted.org/packages/4a/42/6573e9129bc55c9bf7300b3a35bef2c6b9117018acca0dc760ac2d93dffe/tiktoken-0.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2b90f5ad190a4bb7c3eb30c5fa32e1e182ca1ca79f05e49b448438c3e225a49b", size = 994049, upload-time = "2025-10-06T20:21:53.782Z" }, + { url = "https://files.pythonhosted.org/packages/66/c5/ed88504d2f4a5fd6856990b230b56d85a777feab84e6129af0822f5d0f70/tiktoken-0.12.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:65b26c7a780e2139e73acc193e5c63ac754021f160df919add909c1492c0fb37", size = 1129008, upload-time = "2025-10-06T20:21:54.832Z" }, + { url = "https://files.pythonhosted.org/packages/f4/90/3dae6cc5436137ebd38944d396b5849e167896fc2073da643a49f372dc4f/tiktoken-0.12.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:edde1ec917dfd21c1f2f8046b86348b0f54a2c0547f68149d8600859598769ad", size = 1152665, upload-time = "2025-10-06T20:21:56.129Z" }, + { url = "https://files.pythonhosted.org/packages/a3/fe/26df24ce53ffde419a42f5f53d755b995c9318908288c17ec3f3448313a3/tiktoken-0.12.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:35a2f8ddd3824608b3d650a000c1ef71f730d0c56486845705a8248da00f9fe5", size = 1194230, upload-time = "2025-10-06T20:21:57.546Z" }, + { url = "https://files.pythonhosted.org/packages/20/cc/b064cae1a0e9fac84b0d2c46b89f4e57051a5f41324e385d10225a984c24/tiktoken-0.12.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:83d16643edb7fa2c99eff2ab7733508aae1eebb03d5dfc46f5565862810f24e3", size = 1254688, upload-time = "2025-10-06T20:21:58.619Z" }, + { url = "https://files.pythonhosted.org/packages/81/10/b8523105c590c5b8349f2587e2fdfe51a69544bd5a76295fc20f2374f470/tiktoken-0.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:ffc5288f34a8bc02e1ea7047b8d041104791d2ddbf42d1e5fa07822cbffe16bd", size = 878694, upload-time = "2025-10-06T20:21:59.876Z" }, + { url = "https://files.pythonhosted.org/packages/00/61/441588ee21e6b5cdf59d6870f86beb9789e532ee9718c251b391b70c68d6/tiktoken-0.12.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:775c2c55de2310cc1bc9a3ad8826761cbdc87770e586fd7b6da7d4589e13dab3", size = 1050802, upload-time = "2025-10-06T20:22:00.96Z" }, + { url = "https://files.pythonhosted.org/packages/1f/05/dcf94486d5c5c8d34496abe271ac76c5b785507c8eae71b3708f1ad9b45a/tiktoken-0.12.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a01b12f69052fbe4b080a2cfb867c4de12c704b56178edf1d1d7b273561db160", size = 993995, upload-time = "2025-10-06T20:22:02.788Z" }, + { url = "https://files.pythonhosted.org/packages/a0/70/5163fe5359b943f8db9946b62f19be2305de8c3d78a16f629d4165e2f40e/tiktoken-0.12.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:01d99484dc93b129cd0964f9d34eee953f2737301f18b3c7257bf368d7615baa", size = 1128948, upload-time = "2025-10-06T20:22:03.814Z" }, + { url = "https://files.pythonhosted.org/packages/0c/da/c028aa0babf77315e1cef357d4d768800c5f8a6de04d0eac0f377cb619fa/tiktoken-0.12.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:4a1a4fcd021f022bfc81904a911d3df0f6543b9e7627b51411da75ff2fe7a1be", size = 1151986, upload-time = "2025-10-06T20:22:05.173Z" }, + { url = "https://files.pythonhosted.org/packages/a0/5a/886b108b766aa53e295f7216b509be95eb7d60b166049ce2c58416b25f2a/tiktoken-0.12.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:981a81e39812d57031efdc9ec59fa32b2a5a5524d20d4776574c4b4bd2e9014a", size = 1194222, upload-time = "2025-10-06T20:22:06.265Z" }, + { url = "https://files.pythonhosted.org/packages/f4/f8/4db272048397636ac7a078d22773dd2795b1becee7bc4922fe6207288d57/tiktoken-0.12.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9baf52f84a3f42eef3ff4e754a0db79a13a27921b457ca9832cf944c6be4f8f3", size = 1255097, upload-time = "2025-10-06T20:22:07.403Z" }, + { url = "https://files.pythonhosted.org/packages/8e/32/45d02e2e0ea2be3a9ed22afc47d93741247e75018aac967b713b2941f8ea/tiktoken-0.12.0-cp313-cp313-win_amd64.whl", hash = "sha256:b8a0cd0c789a61f31bf44851defbd609e8dd1e2c8589c614cc1060940ef1f697", size = 879117, upload-time = "2025-10-06T20:22:08.418Z" }, + { url = "https://files.pythonhosted.org/packages/ce/76/994fc868f88e016e6d05b0da5ac24582a14c47893f4474c3e9744283f1d5/tiktoken-0.12.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d5f89ea5680066b68bcb797ae85219c72916c922ef0fcdd3480c7d2315ffff16", size = 1050309, upload-time = "2025-10-06T20:22:10.939Z" }, + { url = "https://files.pythonhosted.org/packages/f6/b8/57ef1456504c43a849821920d582a738a461b76a047f352f18c0b26c6516/tiktoken-0.12.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b4e7ed1c6a7a8a60a3230965bdedba8cc58f68926b835e519341413370e0399a", size = 993712, upload-time = "2025-10-06T20:22:12.115Z" }, + { url = "https://files.pythonhosted.org/packages/72/90/13da56f664286ffbae9dbcfadcc625439142675845baa62715e49b87b68b/tiktoken-0.12.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:fc530a28591a2d74bce821d10b418b26a094bf33839e69042a6e86ddb7a7fb27", size = 1128725, upload-time = "2025-10-06T20:22:13.541Z" }, + { url = "https://files.pythonhosted.org/packages/05/df/4f80030d44682235bdaecd7346c90f67ae87ec8f3df4a3442cb53834f7e4/tiktoken-0.12.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:06a9f4f49884139013b138920a4c393aa6556b2f8f536345f11819389c703ebb", size = 1151875, upload-time = "2025-10-06T20:22:14.559Z" }, + { url = "https://files.pythonhosted.org/packages/22/1f/ae535223a8c4ef4c0c1192e3f9b82da660be9eb66b9279e95c99288e9dab/tiktoken-0.12.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:04f0e6a985d95913cabc96a741c5ffec525a2c72e9df086ff17ebe35985c800e", size = 1194451, upload-time = "2025-10-06T20:22:15.545Z" }, + { url = "https://files.pythonhosted.org/packages/78/a7/f8ead382fce0243cb625c4f266e66c27f65ae65ee9e77f59ea1653b6d730/tiktoken-0.12.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:0ee8f9ae00c41770b5f9b0bb1235474768884ae157de3beb5439ca0fd70f3e25", size = 1253794, upload-time = "2025-10-06T20:22:16.624Z" }, + { url = "https://files.pythonhosted.org/packages/93/e0/6cc82a562bc6365785a3ff0af27a2a092d57c47d7a81d9e2295d8c36f011/tiktoken-0.12.0-cp313-cp313t-win_amd64.whl", hash = "sha256:dc2dd125a62cb2b3d858484d6c614d136b5b848976794edfb63688d539b8b93f", size = 878777, upload-time = "2025-10-06T20:22:18.036Z" }, + { url = "https://files.pythonhosted.org/packages/72/05/3abc1db5d2c9aadc4d2c76fa5640134e475e58d9fbb82b5c535dc0de9b01/tiktoken-0.12.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:a90388128df3b3abeb2bfd1895b0681412a8d7dc644142519e6f0a97c2111646", size = 1050188, upload-time = "2025-10-06T20:22:19.563Z" }, + { url = "https://files.pythonhosted.org/packages/e3/7b/50c2f060412202d6c95f32b20755c7a6273543b125c0985d6fa9465105af/tiktoken-0.12.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:da900aa0ad52247d8794e307d6446bd3cdea8e192769b56276695d34d2c9aa88", size = 993978, upload-time = "2025-10-06T20:22:20.702Z" }, + { url = "https://files.pythonhosted.org/packages/14/27/bf795595a2b897e271771cd31cb847d479073497344c637966bdf2853da1/tiktoken-0.12.0-cp314-cp314-manylinux_2_28_aarch64.whl", hash = "sha256:285ba9d73ea0d6171e7f9407039a290ca77efcdb026be7769dccc01d2c8d7fff", size = 1129271, upload-time = "2025-10-06T20:22:22.06Z" }, + { url = "https://files.pythonhosted.org/packages/f5/de/9341a6d7a8f1b448573bbf3425fa57669ac58258a667eb48a25dfe916d70/tiktoken-0.12.0-cp314-cp314-manylinux_2_28_x86_64.whl", hash = "sha256:d186a5c60c6a0213f04a7a802264083dea1bbde92a2d4c7069e1a56630aef830", size = 1151216, upload-time = "2025-10-06T20:22:23.085Z" }, + { url = "https://files.pythonhosted.org/packages/75/0d/881866647b8d1be4d67cb24e50d0c26f9f807f994aa1510cb9ba2fe5f612/tiktoken-0.12.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:604831189bd05480f2b885ecd2d1986dc7686f609de48208ebbbddeea071fc0b", size = 1194860, upload-time = "2025-10-06T20:22:24.602Z" }, + { url = "https://files.pythonhosted.org/packages/b3/1e/b651ec3059474dab649b8d5b69f5c65cd8fcd8918568c1935bd4136c9392/tiktoken-0.12.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:8f317e8530bb3a222547b85a58583238c8f74fd7a7408305f9f63246d1a0958b", size = 1254567, upload-time = "2025-10-06T20:22:25.671Z" }, + { url = "https://files.pythonhosted.org/packages/80/57/ce64fd16ac390fafde001268c364d559447ba09b509181b2808622420eec/tiktoken-0.12.0-cp314-cp314-win_amd64.whl", hash = "sha256:399c3dd672a6406719d84442299a490420b458c44d3ae65516302a99675888f3", size = 921067, upload-time = "2025-10-06T20:22:26.753Z" }, + { url = "https://files.pythonhosted.org/packages/ac/a4/72eed53e8976a099539cdd5eb36f241987212c29629d0a52c305173e0a68/tiktoken-0.12.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:c2c714c72bc00a38ca969dae79e8266ddec999c7ceccd603cc4f0d04ccd76365", size = 1050473, upload-time = "2025-10-06T20:22:27.775Z" }, + { url = "https://files.pythonhosted.org/packages/e6/d7/0110b8f54c008466b19672c615f2168896b83706a6611ba6e47313dbc6e9/tiktoken-0.12.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:cbb9a3ba275165a2cb0f9a83f5d7025afe6b9d0ab01a22b50f0e74fee2ad253e", size = 993855, upload-time = "2025-10-06T20:22:28.799Z" }, + { url = "https://files.pythonhosted.org/packages/5f/77/4f268c41a3957c418b084dd576ea2fad2e95da0d8e1ab705372892c2ca22/tiktoken-0.12.0-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:dfdfaa5ffff8993a3af94d1125870b1d27aed7cb97aa7eb8c1cefdbc87dbee63", size = 1129022, upload-time = "2025-10-06T20:22:29.981Z" }, + { url = "https://files.pythonhosted.org/packages/4e/2b/fc46c90fe5028bd094cd6ee25a7db321cb91d45dc87531e2bdbb26b4867a/tiktoken-0.12.0-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:584c3ad3d0c74f5269906eb8a659c8bfc6144a52895d9261cdaf90a0ae5f4de0", size = 1150736, upload-time = "2025-10-06T20:22:30.996Z" }, + { url = "https://files.pythonhosted.org/packages/28/c0/3c7a39ff68022ddfd7d93f3337ad90389a342f761c4d71de99a3ccc57857/tiktoken-0.12.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:54c891b416a0e36b8e2045b12b33dd66fb34a4fe7965565f1b482da50da3e86a", size = 1194908, upload-time = "2025-10-06T20:22:32.073Z" }, + { url = "https://files.pythonhosted.org/packages/ab/0d/c1ad6f4016a3968c048545f5d9b8ffebf577774b2ede3e2e352553b685fe/tiktoken-0.12.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5edb8743b88d5be814b1a8a8854494719080c28faaa1ccbef02e87354fe71ef0", size = 1253706, upload-time = "2025-10-06T20:22:33.385Z" }, + { url = "https://files.pythonhosted.org/packages/af/df/c7891ef9d2712ad774777271d39fdef63941ffba0a9d59b7ad1fd2765e57/tiktoken-0.12.0-cp314-cp314t-win_amd64.whl", hash = "sha256:f61c0aea5565ac82e2ec50a05e02a6c44734e91b51c10510b084ea1b8e633a71", size = 920667, upload-time = "2025-10-06T20:22:34.444Z" }, +] + +[[package]] +name = "toolz" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/11/d6/114b492226588d6ff54579d95847662fc69196bdeec318eb45393b24c192/toolz-1.1.0.tar.gz", hash = "sha256:27a5c770d068c110d9ed9323f24f1543e83b2f300a687b7891c1a6d56b697b5b", size = 52613, upload-time = "2025-10-17T04:03:21.661Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fb/12/5911ae3eeec47800503a238d971e51722ccea5feb8569b735184d5fcdbc0/toolz-1.1.0-py3-none-any.whl", hash = "sha256:15ccc861ac51c53696de0a5d6d4607f99c210739caf987b5d2054f3efed429d8", size = 58093, upload-time = "2025-10-17T04:03:20.435Z" }, +] + +[[package]] +name = "typeguard" +version = "4.5.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2b/e8/66e25efcc18542d58706ce4e50415710593721aae26e794ab1dec34fb66f/typeguard-4.5.1.tar.gz", hash = "sha256:f6f8ecbbc819c9bc749983cc67c02391e16a9b43b8b27f15dc70ed7c4a007274", size = 80121, upload-time = "2026-02-19T16:09:03.392Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/91/88/b55b3117287a8540b76dbdd87733808d4d01c8067a3b339408c250bb3600/typeguard-4.5.1-py3-none-any.whl", hash = "sha256:44d2bf329d49a244110a090b55f5f91aa82d9a9834ebfd30bcc73651e4a8cc40", size = 36745, upload-time = "2026-02-19T16:09:01.6Z" }, +] + +[[package]] +name = "types-requests" +version = "2.32.4.20260107" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0f/f3/a0663907082280664d745929205a89d41dffb29e89a50f753af7d57d0a96/types_requests-2.32.4.20260107.tar.gz", hash = "sha256:018a11ac158f801bfa84857ddec1650750e393df8a004a8a9ae2a9bec6fcb24f", size = 23165, upload-time = "2026-01-07T03:20:54.091Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1c/12/709ea261f2bf91ef0a26a9eed20f2623227a8ed85610c1e54c5805692ecb/types_requests-2.32.4.20260107-py3-none-any.whl", hash = "sha256:b703fe72f8ce5b31ef031264fe9395cac8f46a04661a79f7ed31a80fb308730d", size = 20676, upload-time = "2026-01-07T03:20:52.929Z" }, +] + +[[package]] +name = "typing-extensions" +version = "4.15.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, +] + +[[package]] +name = "typing-inspection" +version = "0.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949, upload-time = "2025-10-01T02:14:41.687Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" }, +] + +[[package]] +name = "tzdata" +version = "2025.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5e/a7/c202b344c5ca7daf398f3b8a477eeb205cf3b6f32e7ec3a6bac0629ca975/tzdata-2025.3.tar.gz", hash = "sha256:de39c2ca5dc7b0344f2eba86f49d614019d29f060fc4ebc8a417896a620b56a7", size = 196772, upload-time = "2025-12-13T17:45:35.667Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/b0/003792df09decd6849a5e39c28b513c06e84436a54440380862b5aeff25d/tzdata-2025.3-py2.py3-none-any.whl", hash = "sha256:06a47e5700f3081aab02b2e513160914ff0694bce9947d6b76ebd6bf57cfc5d1", size = 348521, upload-time = "2025-12-13T17:45:33.889Z" }, +] + +[[package]] +name = "uptime" +version = "3.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ad/53/6c420ddf6949097d6f9406358951c9322505849bea9cb79efe3acc0bb55d/uptime-3.0.1.tar.gz", hash = "sha256:7c300254775b807ce46e3dcbcda30aa3b9a204b9c57a7ac1e79ee6dbe3942973", size = 6630, upload-time = "2013-10-07T14:19:58.456Z" } + +[[package]] +name = "urllib3" +version = "2.6.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c7/24/5f1b3bdffd70275f6661c76461e25f024d5a38a46f04aaca912426a2b1d3/urllib3-2.6.3.tar.gz", hash = "sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed", size = 435556, upload-time = "2026-01-07T16:24:43.925Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4", size = 131584, upload-time = "2026-01-07T16:24:42.685Z" }, +] + +[[package]] +name = "web3" +version = "7.14.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiohttp" }, + { name = "eth-abi" }, + { name = "eth-account" }, + { name = "eth-hash", extra = ["pycryptodome"] }, + { name = "eth-typing" }, + { name = "eth-utils" }, + { name = "hexbytes" }, + { name = "pydantic" }, + { name = "pyunormalize" }, + { name = "pywin32", marker = "sys_platform == 'win32'" }, + { name = "requests" }, + { name = "types-requests" }, + { name = "typing-extensions" }, + { name = "websockets" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/26/41/435cb36d36fc5142428292b876d0553d35af95e1582ecb7d8bcb64039d18/web3-7.14.1.tar.gz", hash = "sha256:856dc8517f362aefa75fdc298d975894055565dc866f21279f27fe060b7fb2c3", size = 2208998, upload-time = "2026-02-03T22:56:41.426Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7f/d1/862bbf48867685de1a563de20a9bad2b8c5c5678b3f08adc0e06797783f5/web3-7.14.1-py3-none-any.whl", hash = "sha256:bec367ba44261f874662aed9b5e138aa7bb907700a30a7580b2264534e88ce12", size = 1371268, upload-time = "2026-02-03T22:56:36.577Z" }, +] + +[[package]] +name = "websockets" +version = "15.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/21/e6/26d09fab466b7ca9c7737474c52be4f76a40301b08362eb2dbc19dcc16c1/websockets-15.0.1.tar.gz", hash = "sha256:82544de02076bafba038ce055ee6412d68da13ab47f0c60cab827346de828dee", size = 177016, upload-time = "2025-03-05T20:03:41.606Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/51/6b/4545a0d843594f5d0771e86463606a3988b5a09ca5123136f8a76580dd63/websockets-15.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3e90baa811a5d73f3ca0bcbf32064d663ed81318ab225ee4f427ad4e26e5aff3", size = 175437, upload-time = "2025-03-05T20:02:16.706Z" }, + { url = "https://files.pythonhosted.org/packages/f4/71/809a0f5f6a06522af902e0f2ea2757f71ead94610010cf570ab5c98e99ed/websockets-15.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:592f1a9fe869c778694f0aa806ba0374e97648ab57936f092fd9d87f8bc03665", size = 173096, upload-time = "2025-03-05T20:02:18.832Z" }, + { url = "https://files.pythonhosted.org/packages/3d/69/1a681dd6f02180916f116894181eab8b2e25b31e484c5d0eae637ec01f7c/websockets-15.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0701bc3cfcb9164d04a14b149fd74be7347a530ad3bbf15ab2c678a2cd3dd9a2", size = 173332, upload-time = "2025-03-05T20:02:20.187Z" }, + { url = "https://files.pythonhosted.org/packages/a6/02/0073b3952f5bce97eafbb35757f8d0d54812b6174ed8dd952aa08429bcc3/websockets-15.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8b56bdcdb4505c8078cb6c7157d9811a85790f2f2b3632c7d1462ab5783d215", size = 183152, upload-time = "2025-03-05T20:02:22.286Z" }, + { url = "https://files.pythonhosted.org/packages/74/45/c205c8480eafd114b428284840da0b1be9ffd0e4f87338dc95dc6ff961a1/websockets-15.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0af68c55afbd5f07986df82831c7bff04846928ea8d1fd7f30052638788bc9b5", size = 182096, upload-time = "2025-03-05T20:02:24.368Z" }, + { url = "https://files.pythonhosted.org/packages/14/8f/aa61f528fba38578ec553c145857a181384c72b98156f858ca5c8e82d9d3/websockets-15.0.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64dee438fed052b52e4f98f76c5790513235efaa1ef7f3f2192c392cd7c91b65", size = 182523, upload-time = "2025-03-05T20:02:25.669Z" }, + { url = "https://files.pythonhosted.org/packages/ec/6d/0267396610add5bc0d0d3e77f546d4cd287200804fe02323797de77dbce9/websockets-15.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d5f6b181bb38171a8ad1d6aa58a67a6aa9d4b38d0f8c5f496b9e42561dfc62fe", size = 182790, upload-time = "2025-03-05T20:02:26.99Z" }, + { url = "https://files.pythonhosted.org/packages/02/05/c68c5adbf679cf610ae2f74a9b871ae84564462955d991178f95a1ddb7dd/websockets-15.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5d54b09eba2bada6011aea5375542a157637b91029687eb4fdb2dab11059c1b4", size = 182165, upload-time = "2025-03-05T20:02:30.291Z" }, + { url = "https://files.pythonhosted.org/packages/29/93/bb672df7b2f5faac89761cb5fa34f5cec45a4026c383a4b5761c6cea5c16/websockets-15.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3be571a8b5afed347da347bfcf27ba12b069d9d7f42cb8c7028b5e98bbb12597", size = 182160, upload-time = "2025-03-05T20:02:31.634Z" }, + { url = "https://files.pythonhosted.org/packages/ff/83/de1f7709376dc3ca9b7eeb4b9a07b4526b14876b6d372a4dc62312bebee0/websockets-15.0.1-cp312-cp312-win32.whl", hash = "sha256:c338ffa0520bdb12fbc527265235639fb76e7bc7faafbb93f6ba80d9c06578a9", size = 176395, upload-time = "2025-03-05T20:02:33.017Z" }, + { url = "https://files.pythonhosted.org/packages/7d/71/abf2ebc3bbfa40f391ce1428c7168fb20582d0ff57019b69ea20fa698043/websockets-15.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:fcd5cf9e305d7b8338754470cf69cf81f420459dbae8a3b40cee57417f4614a7", size = 176841, upload-time = "2025-03-05T20:02:34.498Z" }, + { url = "https://files.pythonhosted.org/packages/cb/9f/51f0cf64471a9d2b4d0fc6c534f323b664e7095640c34562f5182e5a7195/websockets-15.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ee443ef070bb3b6ed74514f5efaa37a252af57c90eb33b956d35c8e9c10a1931", size = 175440, upload-time = "2025-03-05T20:02:36.695Z" }, + { url = "https://files.pythonhosted.org/packages/8a/05/aa116ec9943c718905997412c5989f7ed671bc0188ee2ba89520e8765d7b/websockets-15.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5a939de6b7b4e18ca683218320fc67ea886038265fd1ed30173f5ce3f8e85675", size = 173098, upload-time = "2025-03-05T20:02:37.985Z" }, + { url = "https://files.pythonhosted.org/packages/ff/0b/33cef55ff24f2d92924923c99926dcce78e7bd922d649467f0eda8368923/websockets-15.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:746ee8dba912cd6fc889a8147168991d50ed70447bf18bcda7039f7d2e3d9151", size = 173329, upload-time = "2025-03-05T20:02:39.298Z" }, + { url = "https://files.pythonhosted.org/packages/31/1d/063b25dcc01faa8fada1469bdf769de3768b7044eac9d41f734fd7b6ad6d/websockets-15.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:595b6c3969023ecf9041b2936ac3827e4623bfa3ccf007575f04c5a6aa318c22", size = 183111, upload-time = "2025-03-05T20:02:40.595Z" }, + { url = "https://files.pythonhosted.org/packages/93/53/9a87ee494a51bf63e4ec9241c1ccc4f7c2f45fff85d5bde2ff74fcb68b9e/websockets-15.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c714d2fc58b5ca3e285461a4cc0c9a66bd0e24c5da9911e30158286c9b5be7f", size = 182054, upload-time = "2025-03-05T20:02:41.926Z" }, + { url = "https://files.pythonhosted.org/packages/ff/b2/83a6ddf56cdcbad4e3d841fcc55d6ba7d19aeb89c50f24dd7e859ec0805f/websockets-15.0.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f3c1e2ab208db911594ae5b4f79addeb3501604a165019dd221c0bdcabe4db8", size = 182496, upload-time = "2025-03-05T20:02:43.304Z" }, + { url = "https://files.pythonhosted.org/packages/98/41/e7038944ed0abf34c45aa4635ba28136f06052e08fc2168520bb8b25149f/websockets-15.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:229cf1d3ca6c1804400b0a9790dc66528e08a6a1feec0d5040e8b9eb14422375", size = 182829, upload-time = "2025-03-05T20:02:48.812Z" }, + { url = "https://files.pythonhosted.org/packages/e0/17/de15b6158680c7623c6ef0db361da965ab25d813ae54fcfeae2e5b9ef910/websockets-15.0.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:756c56e867a90fb00177d530dca4b097dd753cde348448a1012ed6c5131f8b7d", size = 182217, upload-time = "2025-03-05T20:02:50.14Z" }, + { url = "https://files.pythonhosted.org/packages/33/2b/1f168cb6041853eef0362fb9554c3824367c5560cbdaad89ac40f8c2edfc/websockets-15.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:558d023b3df0bffe50a04e710bc87742de35060580a293c2a984299ed83bc4e4", size = 182195, upload-time = "2025-03-05T20:02:51.561Z" }, + { url = "https://files.pythonhosted.org/packages/86/eb/20b6cdf273913d0ad05a6a14aed4b9a85591c18a987a3d47f20fa13dcc47/websockets-15.0.1-cp313-cp313-win32.whl", hash = "sha256:ba9e56e8ceeeedb2e080147ba85ffcd5cd0711b89576b83784d8605a7df455fa", size = 176393, upload-time = "2025-03-05T20:02:53.814Z" }, + { url = "https://files.pythonhosted.org/packages/1b/6c/c65773d6cab416a64d191d6ee8a8b1c68a09970ea6909d16965d26bfed1e/websockets-15.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:e09473f095a819042ecb2ab9465aee615bd9c2028e4ef7d933600a8401c79561", size = 176837, upload-time = "2025-03-05T20:02:55.237Z" }, + { url = "https://files.pythonhosted.org/packages/fa/a8/5b41e0da817d64113292ab1f8247140aac61cbf6cfd085d6a0fa77f4984f/websockets-15.0.1-py3-none-any.whl", hash = "sha256:f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f", size = 169743, upload-time = "2025-03-05T20:03:39.41Z" }, +] + +[[package]] +name = "yarl" +version = "1.23.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "idna" }, + { name = "multidict" }, + { name = "propcache" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/23/6e/beb1beec874a72f23815c1434518bfc4ed2175065173fb138c3705f658d4/yarl-1.23.0.tar.gz", hash = "sha256:53b1ea6ca88ebd4420379c330aea57e258408dd0df9af0992e5de2078dc9f5d5", size = 194676, upload-time = "2026-03-01T22:07:53.373Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/88/8a/94615bc31022f711add374097ad4144d569e95ff3c38d39215d07ac153a0/yarl-1.23.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1932b6b8bba8d0160a9d1078aae5838a66039e8832d41d2992daa9a3a08f7860", size = 124737, upload-time = "2026-03-01T22:05:12.897Z" }, + { url = "https://files.pythonhosted.org/packages/e3/6f/c6554045d59d64052698add01226bc867b52fe4a12373415d7991fdca95d/yarl-1.23.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:411225bae281f114067578891bc75534cfb3d92a3b4dfef7a6ca78ba354e6069", size = 87029, upload-time = "2026-03-01T22:05:14.376Z" }, + { url = "https://files.pythonhosted.org/packages/19/2a/725ecc166d53438bc88f76822ed4b1e3b10756e790bafd7b523fe97c322d/yarl-1.23.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:13a563739ae600a631c36ce096615fe307f131344588b0bc0daec108cdb47b25", size = 86310, upload-time = "2026-03-01T22:05:15.71Z" }, + { url = "https://files.pythonhosted.org/packages/99/30/58260ed98e6ff7f90ba84442c1ddd758c9170d70327394a6227b310cd60f/yarl-1.23.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9cbf44c5cb4a7633d078788e1b56387e3d3cf2b8139a3be38040b22d6c3221c8", size = 97587, upload-time = "2026-03-01T22:05:17.384Z" }, + { url = "https://files.pythonhosted.org/packages/76/0a/8b08aac08b50682e65759f7f8dde98ae8168f72487e7357a5d684c581ef9/yarl-1.23.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:53ad387048f6f09a8969631e4de3f1bf70c50e93545d64af4f751b2498755072", size = 92528, upload-time = "2026-03-01T22:05:18.804Z" }, + { url = "https://files.pythonhosted.org/packages/52/07/0b7179101fe5f8385ec6c6bb5d0cb9f76bd9fb4a769591ab6fb5cdbfc69a/yarl-1.23.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4a59ba56f340334766f3a4442e0efd0af895fae9e2b204741ef885c446b3a1a8", size = 105339, upload-time = "2026-03-01T22:05:20.235Z" }, + { url = "https://files.pythonhosted.org/packages/d3/8a/36d82869ab5ec829ca8574dfcb92b51286fcfb1e9c7a73659616362dc880/yarl-1.23.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:803a3c3ce4acc62eaf01eaca1208dcf0783025ef27572c3336502b9c232005e7", size = 105061, upload-time = "2026-03-01T22:05:22.268Z" }, + { url = "https://files.pythonhosted.org/packages/66/3e/868e5c3364b6cee19ff3e1a122194fa4ce51def02c61023970442162859e/yarl-1.23.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a3d2bff8f37f8d0f96c7ec554d16945050d54462d6e95414babaa18bfafc7f51", size = 100132, upload-time = "2026-03-01T22:05:23.638Z" }, + { url = "https://files.pythonhosted.org/packages/cf/26/9c89acf82f08a52cb52d6d39454f8d18af15f9d386a23795389d1d423823/yarl-1.23.0-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c75eb09e8d55bceb4367e83496ff8ef2bc7ea6960efb38e978e8073ea59ecb67", size = 99289, upload-time = "2026-03-01T22:05:25.749Z" }, + { url = "https://files.pythonhosted.org/packages/6f/54/5b0db00d2cb056922356104468019c0a132e89c8d3ab67d8ede9f4483d2a/yarl-1.23.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877b0738624280e34c55680d6054a307aa94f7d52fa0e3034a9cc6e790871da7", size = 96950, upload-time = "2026-03-01T22:05:27.318Z" }, + { url = "https://files.pythonhosted.org/packages/f6/40/10fa93811fd439341fad7e0718a86aca0de9548023bbb403668d6555acab/yarl-1.23.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:b5405bb8f0e783a988172993cfc627e4d9d00432d6bbac65a923041edacf997d", size = 93960, upload-time = "2026-03-01T22:05:28.738Z" }, + { url = "https://files.pythonhosted.org/packages/bc/d2/8ae2e6cd77d0805f4526e30ec43b6f9a3dfc542d401ac4990d178e4bf0cf/yarl-1.23.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:1c3a3598a832590c5a3ce56ab5576361b5688c12cb1d39429cf5dba30b510760", size = 104703, upload-time = "2026-03-01T22:05:30.438Z" }, + { url = "https://files.pythonhosted.org/packages/2f/0c/b3ceacf82c3fe21183ce35fa2acf5320af003d52bc1fcf5915077681142e/yarl-1.23.0-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:8419ebd326430d1cbb7efb5292330a2cf39114e82df5cc3d83c9a0d5ebeaf2f2", size = 98325, upload-time = "2026-03-01T22:05:31.835Z" }, + { url = "https://files.pythonhosted.org/packages/9d/e0/12900edd28bdab91a69bd2554b85ad7b151f64e8b521fe16f9ad2f56477a/yarl-1.23.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:be61f6fff406ca40e3b1d84716fde398fc08bc63dd96d15f3a14230a0973ed86", size = 105067, upload-time = "2026-03-01T22:05:33.358Z" }, + { url = "https://files.pythonhosted.org/packages/15/61/74bb1182cf79c9bbe4eb6b1f14a57a22d7a0be5e9cedf8e2d5c2086474c3/yarl-1.23.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3ceb13c5c858d01321b5d9bb65e4cf37a92169ea470b70fec6f236b2c9dd7e34", size = 100285, upload-time = "2026-03-01T22:05:35.4Z" }, + { url = "https://files.pythonhosted.org/packages/69/7f/cd5ef733f2550de6241bd8bd8c3febc78158b9d75f197d9c7baa113436af/yarl-1.23.0-cp312-cp312-win32.whl", hash = "sha256:fffc45637bcd6538de8b85f51e3df3223e4ad89bccbfca0481c08c7fc8b7ed7d", size = 82359, upload-time = "2026-03-01T22:05:36.811Z" }, + { url = "https://files.pythonhosted.org/packages/f5/be/25216a49daeeb7af2bec0db22d5e7df08ed1d7c9f65d78b14f3b74fd72fc/yarl-1.23.0-cp312-cp312-win_amd64.whl", hash = "sha256:f69f57305656a4852f2a7203efc661d8c042e6cc67f7acd97d8667fb448a426e", size = 87674, upload-time = "2026-03-01T22:05:38.171Z" }, + { url = "https://files.pythonhosted.org/packages/d2/35/aeab955d6c425b227d5b7247eafb24f2653fedc32f95373a001af5dfeb9e/yarl-1.23.0-cp312-cp312-win_arm64.whl", hash = "sha256:6e87a6e8735b44816e7db0b2fbc9686932df473c826b0d9743148432e10bb9b9", size = 81879, upload-time = "2026-03-01T22:05:40.006Z" }, + { url = "https://files.pythonhosted.org/packages/9a/4b/a0a6e5d0ee8a2f3a373ddef8a4097d74ac901ac363eea1440464ccbe0898/yarl-1.23.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:16c6994ac35c3e74fb0ae93323bf8b9c2a9088d55946109489667c510a7d010e", size = 123796, upload-time = "2026-03-01T22:05:41.412Z" }, + { url = "https://files.pythonhosted.org/packages/67/b6/8925d68af039b835ae876db5838e82e76ec87b9782ecc97e192b809c4831/yarl-1.23.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4a42e651629dafb64fd5b0286a3580613702b5809ad3f24934ea87595804f2c5", size = 86547, upload-time = "2026-03-01T22:05:42.841Z" }, + { url = "https://files.pythonhosted.org/packages/ae/50/06d511cc4b8e0360d3c94af051a768e84b755c5eb031b12adaaab6dec6e5/yarl-1.23.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7c6b9461a2a8b47c65eef63bb1c76a4f1c119618ffa99ea79bc5bb1e46c5821b", size = 85854, upload-time = "2026-03-01T22:05:44.85Z" }, + { url = "https://files.pythonhosted.org/packages/c4/f4/4e30b250927ffdab4db70da08b9b8d2194d7c7b400167b8fbeca1e4701ca/yarl-1.23.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2569b67d616eab450d262ca7cb9f9e19d2f718c70a8b88712859359d0ab17035", size = 98351, upload-time = "2026-03-01T22:05:46.836Z" }, + { url = "https://files.pythonhosted.org/packages/86/fc/4118c5671ea948208bdb1492d8b76bdf1453d3e73df051f939f563e7dcc5/yarl-1.23.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e9d9a4d06d3481eab79803beb4d9bd6f6a8e781ec078ac70d7ef2dcc29d1bea5", size = 92711, upload-time = "2026-03-01T22:05:48.316Z" }, + { url = "https://files.pythonhosted.org/packages/56/11/1ed91d42bd9e73c13dc9e7eb0dd92298d75e7ac4dd7f046ad0c472e231cd/yarl-1.23.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f514f6474e04179d3d33175ed3f3e31434d3130d42ec153540d5b157deefd735", size = 106014, upload-time = "2026-03-01T22:05:50.028Z" }, + { url = "https://files.pythonhosted.org/packages/ce/c9/74e44e056a23fbc33aca71779ef450ca648a5bc472bdad7a82339918f818/yarl-1.23.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:fda207c815b253e34f7e1909840fd14299567b1c0eb4908f8c2ce01a41265401", size = 105557, upload-time = "2026-03-01T22:05:51.416Z" }, + { url = "https://files.pythonhosted.org/packages/66/fe/b1e10b08d287f518994f1e2ff9b6d26f0adeecd8dd7d533b01bab29a3eda/yarl-1.23.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:34b6cf500e61c90f305094911f9acc9c86da1a05a7a3f5be9f68817043f486e4", size = 101559, upload-time = "2026-03-01T22:05:52.872Z" }, + { url = "https://files.pythonhosted.org/packages/72/59/c5b8d94b14e3d3c2a9c20cb100119fd534ab5a14b93673ab4cc4a4141ea5/yarl-1.23.0-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:d7504f2b476d21653e4d143f44a175f7f751cd41233525312696c76aa3dbb23f", size = 100502, upload-time = "2026-03-01T22:05:54.954Z" }, + { url = "https://files.pythonhosted.org/packages/77/4f/96976cb54cbfc5c9fd73ed4c51804f92f209481d1fb190981c0f8a07a1d7/yarl-1.23.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:578110dd426f0d209d1509244e6d4a3f1a3e9077655d98c5f22583d63252a08a", size = 98027, upload-time = "2026-03-01T22:05:56.409Z" }, + { url = "https://files.pythonhosted.org/packages/63/6e/904c4f476471afdbad6b7e5b70362fb5810e35cd7466529a97322b6f5556/yarl-1.23.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:609d3614d78d74ebe35f54953c5bbd2ac647a7ddb9c30a5d877580f5e86b22f2", size = 95369, upload-time = "2026-03-01T22:05:58.141Z" }, + { url = "https://files.pythonhosted.org/packages/9d/40/acfcdb3b5f9d68ef499e39e04d25e141fe90661f9d54114556cf83be8353/yarl-1.23.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4966242ec68afc74c122f8459abd597afd7d8a60dc93d695c1334c5fd25f762f", size = 105565, upload-time = "2026-03-01T22:06:00.286Z" }, + { url = "https://files.pythonhosted.org/packages/5e/c6/31e28f3a6ba2869c43d124f37ea5260cac9c9281df803c354b31f4dd1f3c/yarl-1.23.0-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:e0fd068364a6759bc794459f0a735ab151d11304346332489c7972bacbe9e72b", size = 99813, upload-time = "2026-03-01T22:06:01.712Z" }, + { url = "https://files.pythonhosted.org/packages/08/1f/6f65f59e72d54aa467119b63fc0b0b1762eff0232db1f4720cd89e2f4a17/yarl-1.23.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:39004f0ad156da43e86aa71f44e033de68a44e5a31fc53507b36dd253970054a", size = 105632, upload-time = "2026-03-01T22:06:03.188Z" }, + { url = "https://files.pythonhosted.org/packages/a3/c4/18b178a69935f9e7a338127d5b77d868fdc0f0e49becd286d51b3a18c61d/yarl-1.23.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e5723c01a56c5028c807c701aa66722916d2747ad737a046853f6c46f4875543", size = 101895, upload-time = "2026-03-01T22:06:04.651Z" }, + { url = "https://files.pythonhosted.org/packages/8f/54/f5b870b5505663911dba950a8e4776a0dbd51c9c54c0ae88e823e4b874a0/yarl-1.23.0-cp313-cp313-win32.whl", hash = "sha256:1b6b572edd95b4fa8df75de10b04bc81acc87c1c7d16bcdd2035b09d30acc957", size = 82356, upload-time = "2026-03-01T22:06:06.04Z" }, + { url = "https://files.pythonhosted.org/packages/7a/84/266e8da36879c6edcd37b02b547e2d9ecdfea776be49598e75696e3316e1/yarl-1.23.0-cp313-cp313-win_amd64.whl", hash = "sha256:baaf55442359053c7d62f6f8413a62adba3205119bcb6f49594894d8be47e5e3", size = 87515, upload-time = "2026-03-01T22:06:08.107Z" }, + { url = "https://files.pythonhosted.org/packages/00/fd/7e1c66efad35e1649114fa13f17485f62881ad58edeeb7f49f8c5e748bf9/yarl-1.23.0-cp313-cp313-win_arm64.whl", hash = "sha256:fb4948814a2a98e3912505f09c9e7493b1506226afb1f881825368d6fb776ee3", size = 81785, upload-time = "2026-03-01T22:06:10.181Z" }, + { url = "https://files.pythonhosted.org/packages/9c/fc/119dd07004f17ea43bb91e3ece6587759edd7519d6b086d16bfbd3319982/yarl-1.23.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:aecfed0b41aa72b7881712c65cf764e39ce2ec352324f5e0837c7048d9e6daaa", size = 130719, upload-time = "2026-03-01T22:06:11.708Z" }, + { url = "https://files.pythonhosted.org/packages/e6/0d/9f2348502fbb3af409e8f47730282cd6bc80dec6630c1e06374d882d6eb2/yarl-1.23.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a41bcf68efd19073376eb8cf948b8d9be0af26256403e512bb18f3966f1f9120", size = 89690, upload-time = "2026-03-01T22:06:13.429Z" }, + { url = "https://files.pythonhosted.org/packages/50/93/e88f3c80971b42cfc83f50a51b9d165a1dbf154b97005f2994a79f212a07/yarl-1.23.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:cde9a2ecd91668bcb7f077c4966d8ceddb60af01b52e6e3e2680e4cf00ad1a59", size = 89851, upload-time = "2026-03-01T22:06:15.53Z" }, + { url = "https://files.pythonhosted.org/packages/1c/07/61c9dd8ba8f86473263b4036f70fb594c09e99c0d9737a799dfd8bc85651/yarl-1.23.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5023346c4ee7992febc0068e7593de5fa2bf611848c08404b35ebbb76b1b0512", size = 95874, upload-time = "2026-03-01T22:06:17.553Z" }, + { url = "https://files.pythonhosted.org/packages/9e/e9/f9ff8ceefba599eac6abddcfb0b3bee9b9e636e96dbf54342a8577252379/yarl-1.23.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d1009abedb49ae95b136a8904a3f71b342f849ffeced2d3747bf29caeda218c4", size = 88710, upload-time = "2026-03-01T22:06:19.004Z" }, + { url = "https://files.pythonhosted.org/packages/eb/78/0231bfcc5d4c8eec220bc2f9ef82cb4566192ea867a7c5b4148f44f6cbcd/yarl-1.23.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a8d00f29b42f534cc8aa3931cfe773b13b23e561e10d2b26f27a8d309b0e82a1", size = 101033, upload-time = "2026-03-01T22:06:21.203Z" }, + { url = "https://files.pythonhosted.org/packages/cd/9b/30ea5239a61786f18fd25797151a17fbb3be176977187a48d541b5447dd4/yarl-1.23.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:95451e6ce06c3e104556d73b559f5da6c34a069b6b62946d3ad66afcd51642ea", size = 100817, upload-time = "2026-03-01T22:06:22.738Z" }, + { url = "https://files.pythonhosted.org/packages/62/e2/a4980481071791bc83bce2b7a1a1f7adcabfa366007518b4b845e92eeee3/yarl-1.23.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:531ef597132086b6cf96faa7c6c1dcd0361dd5f1694e5cc30375907b9b7d3ea9", size = 97482, upload-time = "2026-03-01T22:06:24.21Z" }, + { url = "https://files.pythonhosted.org/packages/e5/1e/304a00cf5f6100414c4b5a01fc7ff9ee724b62158a08df2f8170dfc72a2d/yarl-1.23.0-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:88f9fb0116fbfcefcab70f85cf4b74a2b6ce5d199c41345296f49d974ddb4123", size = 95949, upload-time = "2026-03-01T22:06:25.697Z" }, + { url = "https://files.pythonhosted.org/packages/68/03/093f4055ed4cae649ac53bca3d180bd37102e9e11d048588e9ab0c0108d0/yarl-1.23.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:e7b0460976dc75cb87ad9cc1f9899a4b97751e7d4e77ab840fc9b6d377b8fd24", size = 95839, upload-time = "2026-03-01T22:06:27.309Z" }, + { url = "https://files.pythonhosted.org/packages/b9/28/4c75ebb108f322aa8f917ae10a8ffa4f07cae10a8a627b64e578617df6a0/yarl-1.23.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:115136c4a426f9da976187d238e84139ff6b51a20839aa6e3720cd1026d768de", size = 90696, upload-time = "2026-03-01T22:06:29.048Z" }, + { url = "https://files.pythonhosted.org/packages/23/9c/42c2e2dd91c1a570402f51bdf066bfdb1241c2240ba001967bad778e77b7/yarl-1.23.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:ead11956716a940c1abc816b7df3fa2b84d06eaed8832ca32f5c5e058c65506b", size = 100865, upload-time = "2026-03-01T22:06:30.525Z" }, + { url = "https://files.pythonhosted.org/packages/74/05/1bcd60a8a0a914d462c305137246b6f9d167628d73568505fce3f1cb2e65/yarl-1.23.0-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:fe8f8f5e70e6dbdfca9882cd9deaac058729bcf323cf7a58660901e55c9c94f6", size = 96234, upload-time = "2026-03-01T22:06:32.692Z" }, + { url = "https://files.pythonhosted.org/packages/90/b2/f52381aac396d6778ce516b7bc149c79e65bfc068b5de2857ab69eeea3b7/yarl-1.23.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:a0e317df055958a0c1e79e5d2aa5a5eaa4a6d05a20d4b0c9c3f48918139c9fc6", size = 100295, upload-time = "2026-03-01T22:06:34.268Z" }, + { url = "https://files.pythonhosted.org/packages/e5/e8/638bae5bbf1113a659b2435d8895474598afe38b4a837103764f603aba56/yarl-1.23.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6f0fd84de0c957b2d280143522c4f91a73aada1923caee763e24a2b3fda9f8a5", size = 97784, upload-time = "2026-03-01T22:06:35.864Z" }, + { url = "https://files.pythonhosted.org/packages/80/25/a3892b46182c586c202629fc2159aa13975d3741d52ebd7347fd501d48d5/yarl-1.23.0-cp313-cp313t-win32.whl", hash = "sha256:93a784271881035ab4406a172edb0faecb6e7d00f4b53dc2f55919d6c9688595", size = 88313, upload-time = "2026-03-01T22:06:37.39Z" }, + { url = "https://files.pythonhosted.org/packages/43/68/8c5b36aa5178900b37387937bc2c2fe0e9505537f713495472dcf6f6fccc/yarl-1.23.0-cp313-cp313t-win_amd64.whl", hash = "sha256:dd00607bffbf30250fe108065f07453ec124dbf223420f57f5e749b04295e090", size = 94932, upload-time = "2026-03-01T22:06:39.579Z" }, + { url = "https://files.pythonhosted.org/packages/c6/cc/d79ba8292f51f81f4dc533a8ccfb9fc6992cabf0998ed3245de7589dc07c/yarl-1.23.0-cp313-cp313t-win_arm64.whl", hash = "sha256:ac09d42f48f80c9ee1635b2fcaa819496a44502737660d3c0f2ade7526d29144", size = 84786, upload-time = "2026-03-01T22:06:41.988Z" }, + { url = "https://files.pythonhosted.org/packages/90/98/b85a038d65d1b92c3903ab89444f48d3cee490a883477b716d7a24b1a78c/yarl-1.23.0-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:21d1b7305a71a15b4794b5ff22e8eef96ff4a6d7f9657155e5aa419444b28912", size = 124455, upload-time = "2026-03-01T22:06:43.615Z" }, + { url = "https://files.pythonhosted.org/packages/39/54/bc2b45559f86543d163b6e294417a107bb87557609007c007ad889afec18/yarl-1.23.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:85610b4f27f69984932a7abbe52703688de3724d9f72bceb1cca667deff27474", size = 86752, upload-time = "2026-03-01T22:06:45.425Z" }, + { url = "https://files.pythonhosted.org/packages/24/f9/e8242b68362bffe6fb536c8db5076861466fc780f0f1b479fc4ffbebb128/yarl-1.23.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:23f371bd662cf44a7630d4d113101eafc0cfa7518a2760d20760b26021454719", size = 86291, upload-time = "2026-03-01T22:06:46.974Z" }, + { url = "https://files.pythonhosted.org/packages/ea/d8/d1cb2378c81dd729e98c716582b1ccb08357e8488e4c24714658cc6630e8/yarl-1.23.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c4a80f77dc1acaaa61f0934176fccca7096d9b1ff08c8ba9cddf5ae034a24319", size = 99026, upload-time = "2026-03-01T22:06:48.459Z" }, + { url = "https://files.pythonhosted.org/packages/0a/ff/7196790538f31debe3341283b5b0707e7feb947620fc5e8236ef28d44f72/yarl-1.23.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:bd654fad46d8d9e823afbb4f87c79160b5a374ed1ff5bde24e542e6ba8f41434", size = 92355, upload-time = "2026-03-01T22:06:50.306Z" }, + { url = "https://files.pythonhosted.org/packages/c1/56/25d58c3eddde825890a5fe6aa1866228377354a3c39262235234ab5f616b/yarl-1.23.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:682bae25f0a0dd23a056739f23a134db9f52a63e2afd6bfb37ddc76292bbd723", size = 106417, upload-time = "2026-03-01T22:06:52.1Z" }, + { url = "https://files.pythonhosted.org/packages/51/8a/882c0e7bc8277eb895b31bce0138f51a1ba551fc2e1ec6753ffc1e7c1377/yarl-1.23.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a82836cab5f197a0514235aaf7ffccdc886ccdaa2324bc0aafdd4ae898103039", size = 106422, upload-time = "2026-03-01T22:06:54.424Z" }, + { url = "https://files.pythonhosted.org/packages/42/2b/fef67d616931055bf3d6764885990a3ac647d68734a2d6a9e1d13de437a2/yarl-1.23.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1c57676bdedc94cd3bc37724cf6f8cd2779f02f6aba48de45feca073e714fe52", size = 101915, upload-time = "2026-03-01T22:06:55.895Z" }, + { url = "https://files.pythonhosted.org/packages/18/6a/530e16aebce27c5937920f3431c628a29a4b6b430fab3fd1c117b26ff3f6/yarl-1.23.0-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c7f8dc16c498ff06497c015642333219871effba93e4a2e8604a06264aca5c5c", size = 100690, upload-time = "2026-03-01T22:06:58.21Z" }, + { url = "https://files.pythonhosted.org/packages/88/08/93749219179a45e27b036e03260fda05190b911de8e18225c294ac95bbc9/yarl-1.23.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:5ee586fb17ff8f90c91cf73c6108a434b02d69925f44f5f8e0d7f2f260607eae", size = 98750, upload-time = "2026-03-01T22:06:59.794Z" }, + { url = "https://files.pythonhosted.org/packages/d9/cf/ea424a004969f5d81a362110a6ac1496d79efdc6d50c2c4b2e3ea0fc2519/yarl-1.23.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:17235362f580149742739cc3828b80e24029d08cbb9c4bda0242c7b5bc610a8e", size = 94685, upload-time = "2026-03-01T22:07:01.375Z" }, + { url = "https://files.pythonhosted.org/packages/e2/b7/14341481fe568e2b0408bcf1484c652accafe06a0ade9387b5d3fd9df446/yarl-1.23.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:0793e2bd0cf14234983bbb371591e6bea9e876ddf6896cdcc93450996b0b5c85", size = 106009, upload-time = "2026-03-01T22:07:03.151Z" }, + { url = "https://files.pythonhosted.org/packages/0a/e6/5c744a9b54f4e8007ad35bce96fbc9218338e84812d36f3390cea616881a/yarl-1.23.0-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:3650dc2480f94f7116c364096bc84b1d602f44224ef7d5c7208425915c0475dd", size = 100033, upload-time = "2026-03-01T22:07:04.701Z" }, + { url = "https://files.pythonhosted.org/packages/0c/23/e3bfc188d0b400f025bc49d99793d02c9abe15752138dcc27e4eaf0c4a9e/yarl-1.23.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:f40e782d49630ad384db66d4d8b73ff4f1b8955dc12e26b09a3e3af064b3b9d6", size = 106483, upload-time = "2026-03-01T22:07:06.231Z" }, + { url = "https://files.pythonhosted.org/packages/72/42/f0505f949a90b3f8b7a363d6cbdf398f6e6c58946d85c6d3a3bc70595b26/yarl-1.23.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:94f8575fbdf81749008d980c17796097e645574a3b8c28ee313931068dad14fe", size = 102175, upload-time = "2026-03-01T22:07:08.4Z" }, + { url = "https://files.pythonhosted.org/packages/aa/65/b39290f1d892a9dd671d1c722014ca062a9c35d60885d57e5375db0404b5/yarl-1.23.0-cp314-cp314-win32.whl", hash = "sha256:c8aa34a5c864db1087d911a0b902d60d203ea3607d91f615acd3f3108ac32169", size = 83871, upload-time = "2026-03-01T22:07:09.968Z" }, + { url = "https://files.pythonhosted.org/packages/a9/5b/9b92f54c784c26e2a422e55a8d2607ab15b7ea3349e28359282f84f01d43/yarl-1.23.0-cp314-cp314-win_amd64.whl", hash = "sha256:63e92247f383c85ab00dd0091e8c3fa331a96e865459f5ee80353c70a4a42d70", size = 89093, upload-time = "2026-03-01T22:07:11.501Z" }, + { url = "https://files.pythonhosted.org/packages/e0/7d/8a84dc9381fd4412d5e7ff04926f9865f6372b4c2fd91e10092e65d29eb8/yarl-1.23.0-cp314-cp314-win_arm64.whl", hash = "sha256:70efd20be968c76ece7baa8dafe04c5be06abc57f754d6f36f3741f7aa7a208e", size = 83384, upload-time = "2026-03-01T22:07:13.069Z" }, + { url = "https://files.pythonhosted.org/packages/dd/8d/d2fad34b1c08aa161b74394183daa7d800141aaaee207317e82c790b418d/yarl-1.23.0-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:9a18d6f9359e45722c064c97464ec883eb0e0366d33eda61cb19a244bf222679", size = 131019, upload-time = "2026-03-01T22:07:14.903Z" }, + { url = "https://files.pythonhosted.org/packages/19/ff/33009a39d3ccf4b94d7d7880dfe17fb5816c5a4fe0096d9b56abceea9ac7/yarl-1.23.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:2803ed8b21ca47a43da80a6fd1ed3019d30061f7061daa35ac54f63933409412", size = 89894, upload-time = "2026-03-01T22:07:17.372Z" }, + { url = "https://files.pythonhosted.org/packages/0c/f1/dab7ac5e7306fb79c0190766a3c00b4cb8d09a1f390ded68c85a5934faf5/yarl-1.23.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:394906945aa8b19fc14a61cf69743a868bb8c465efe85eee687109cc540b98f4", size = 89979, upload-time = "2026-03-01T22:07:19.361Z" }, + { url = "https://files.pythonhosted.org/packages/aa/b1/08e95f3caee1fad6e65017b9f26c1d79877b502622d60e517de01e72f95d/yarl-1.23.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:71d006bee8397a4a89f469b8deb22469fe7508132d3c17fa6ed871e79832691c", size = 95943, upload-time = "2026-03-01T22:07:21.266Z" }, + { url = "https://files.pythonhosted.org/packages/c0/cc/6409f9018864a6aa186c61175b977131f373f1988e198e031236916e87e4/yarl-1.23.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:62694e275c93d54f7ccedcfef57d42761b2aad5234b6be1f3e3026cae4001cd4", size = 88786, upload-time = "2026-03-01T22:07:23.129Z" }, + { url = "https://files.pythonhosted.org/packages/76/40/cc22d1d7714b717fde2006fad2ced5efe5580606cb059ae42117542122f3/yarl-1.23.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a31de1613658308efdb21ada98cbc86a97c181aa050ba22a808120bb5be3ab94", size = 101307, upload-time = "2026-03-01T22:07:24.689Z" }, + { url = "https://files.pythonhosted.org/packages/8f/0d/476c38e85ddb4c6ec6b20b815bdd779aa386a013f3d8b85516feee55c8dc/yarl-1.23.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:fb1e8b8d66c278b21d13b0a7ca22c41dd757a7c209c6b12c313e445c31dd3b28", size = 100904, upload-time = "2026-03-01T22:07:26.287Z" }, + { url = "https://files.pythonhosted.org/packages/72/32/0abe4a76d59adf2081dcb0397168553ece4616ada1c54d1c49d8936c74f8/yarl-1.23.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:50f9d8d531dfb767c565f348f33dd5139a6c43f5cbdf3f67da40d54241df93f6", size = 97728, upload-time = "2026-03-01T22:07:27.906Z" }, + { url = "https://files.pythonhosted.org/packages/b7/35/7b30f4810fba112f60f5a43237545867504e15b1c7647a785fbaf588fac2/yarl-1.23.0-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:575aa4405a656e61a540f4a80eaa5260f2a38fff7bfdc4b5f611840d76e9e277", size = 95964, upload-time = "2026-03-01T22:07:30.198Z" }, + { url = "https://files.pythonhosted.org/packages/2d/86/ed7a73ab85ef00e8bb70b0cb5421d8a2a625b81a333941a469a6f4022828/yarl-1.23.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:041b1a4cefacf65840b4e295c6985f334ba83c30607441ae3cf206a0eed1a2e4", size = 95882, upload-time = "2026-03-01T22:07:32.132Z" }, + { url = "https://files.pythonhosted.org/packages/19/90/d56967f61a29d8498efb7afb651e0b2b422a1e9b47b0ab5f4e40a19b699b/yarl-1.23.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:d38c1e8231722c4ce40d7593f28d92b5fc72f3e9774fe73d7e800ec32299f63a", size = 90797, upload-time = "2026-03-01T22:07:34.404Z" }, + { url = "https://files.pythonhosted.org/packages/72/00/8b8f76909259f56647adb1011d7ed8b321bcf97e464515c65016a47ecdf0/yarl-1.23.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:d53834e23c015ee83a99377db6e5e37d8484f333edb03bd15b4bc312cc7254fb", size = 101023, upload-time = "2026-03-01T22:07:35.953Z" }, + { url = "https://files.pythonhosted.org/packages/ac/e2/cab11b126fb7d440281b7df8e9ddbe4851e70a4dde47a202b6642586b8d9/yarl-1.23.0-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:2e27c8841126e017dd2a054a95771569e6070b9ee1b133366d8b31beb5018a41", size = 96227, upload-time = "2026-03-01T22:07:37.594Z" }, + { url = "https://files.pythonhosted.org/packages/c2/9b/2c893e16bfc50e6b2edf76c1a9eb6cb0c744346197e74c65e99ad8d634d0/yarl-1.23.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:76855800ac56f878847a09ce6dba727c93ca2d89c9e9d63002d26b916810b0a2", size = 100302, upload-time = "2026-03-01T22:07:39.334Z" }, + { url = "https://files.pythonhosted.org/packages/28/ec/5498c4e3a6d5f1003beb23405671c2eb9cdbf3067d1c80f15eeafe301010/yarl-1.23.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:e09fd068c2e169a7070d83d3bde728a4d48de0549f975290be3c108c02e499b4", size = 98202, upload-time = "2026-03-01T22:07:41.717Z" }, + { url = "https://files.pythonhosted.org/packages/fe/c3/cd737e2d45e70717907f83e146f6949f20cc23cd4bf7b2688727763aa458/yarl-1.23.0-cp314-cp314t-win32.whl", hash = "sha256:73309162a6a571d4cbd3b6a1dcc703c7311843ae0d1578df6f09be4e98df38d4", size = 90558, upload-time = "2026-03-01T22:07:43.433Z" }, + { url = "https://files.pythonhosted.org/packages/e1/19/3774d162f6732d1cfb0b47b4140a942a35ca82bb19b6db1f80e9e7bdc8f8/yarl-1.23.0-cp314-cp314t-win_amd64.whl", hash = "sha256:4503053d296bc6e4cbd1fad61cf3b6e33b939886c4f249ba7c78b602214fabe2", size = 97610, upload-time = "2026-03-01T22:07:45.773Z" }, + { url = "https://files.pythonhosted.org/packages/51/47/3fa2286c3cb162c71cdb34c4224d5745a1ceceb391b2bd9b19b668a8d724/yarl-1.23.0-cp314-cp314t-win_arm64.whl", hash = "sha256:44bb7bef4ea409384e3f8bc36c063d77ea1b8d4a5b2706956c0d6695f07dcc25", size = 86041, upload-time = "2026-03-01T22:07:49.026Z" }, + { url = "https://files.pythonhosted.org/packages/69/68/c8739671f5699c7dc470580a4f821ef37c32c4cb0b047ce223a7f115757f/yarl-1.23.0-py3-none-any.whl", hash = "sha256:a2df6afe50dea8ae15fa34c9f824a3ee958d785fd5d089063d960bae1daa0a3f", size = 48288, upload-time = "2026-03-01T22:07:51.388Z" }, +] From aa484149648dd720870974fa23f9d53a43e70e81 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 7 Mar 2026 10:46:31 +0000 Subject: [PATCH 177/279] fix test configuration --- .github/workflows/test.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 899f41d5..8ffa8f73 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -13,5 +13,5 @@ jobs: - uses: actions/checkout@v6 - uses: astral-sh/setup-uv@v6 - run: uv python install 3.14 - - run: uv pip install --system -r pyproject.toml --extra test - - run: pytest + - run: uv pip install --python 3.14 -r pyproject.toml --extra test + - run: uv run pytest From 52cbd1e26b1baf6706fb76c2a597141eb2db41fd Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 7 Mar 2026 10:48:02 +0000 Subject: [PATCH 178/279] remove .claude directory --- .claude/settings.json | 8 -------- .gitignore | 4 +++- 2 files changed, 3 insertions(+), 9 deletions(-) delete mode 100644 .claude/settings.json diff --git a/.claude/settings.json b/.claude/settings.json deleted file mode 100644 index fadf8157..00000000 --- a/.claude/settings.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "permissions": { - "allow": [ - "Bash(python3 -m pip install pytest pytest-asyncio)", - "Bash(ruff check rocketwatch/ --fix)" - ] - } -} diff --git a/.gitignore b/.gitignore index 9100057f..363f6083 100644 --- a/.gitignore +++ b/.gitignore @@ -121,4 +121,6 @@ state.db mongodb/ # helper scripts -*.sh \ No newline at end of file +*.sh + +.claude \ No newline at end of file From 22a45ca5d6ad9175d49d76dd3df156941592f9e8 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 7 Mar 2026 11:01:26 +0000 Subject: [PATCH 179/279] another attempt at fixing the test config --- .github/workflows/test.yml | 4 +- .gitignore | 3 +- README.md | 7 +- uv.lock | 2658 ------------------------------------ 4 files changed, 5 insertions(+), 2667 deletions(-) delete mode 100644 uv.lock diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 8ffa8f73..9b4fa8f7 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -12,6 +12,4 @@ jobs: steps: - uses: actions/checkout@v6 - uses: astral-sh/setup-uv@v6 - - run: uv python install 3.14 - - run: uv pip install --python 3.14 -r pyproject.toml --extra test - - run: uv run pytest + - run: uv run --python 3.14 --extra test pytest diff --git a/.gitignore b/.gitignore index 363f6083..bb4570d1 100644 --- a/.gitignore +++ b/.gitignore @@ -123,4 +123,5 @@ mongodb/ # helper scripts *.sh -.claude \ No newline at end of file +.claude +uv.lock \ No newline at end of file diff --git a/README.md b/README.md index b6995c1d..e78945cf 100644 --- a/README.md +++ b/README.md @@ -82,10 +82,8 @@ This starts the bot, MongoDB, and [Watchtower](https://containrrr.dev/watchtower ```sh # Install uv (https://docs.astral.sh/uv/) -uv python install 3.14 -uv pip install --python 3.14 -r pyproject.toml cd rocketwatch -uv run python . +uv run . ``` ## Development @@ -101,8 +99,7 @@ Configured rules: `B` (bugbear), `E` (pycodestyle), `F` (pyflakes), `I` (isort), ### Testing ```sh -uv pip install -r pyproject.toml --extra test -uv run pytest +uv run --extra test pytest ``` ### Plugin structure diff --git a/uv.lock b/uv.lock deleted file mode 100644 index 44d8fd4f..00000000 --- a/uv.lock +++ /dev/null @@ -1,2658 +0,0 @@ -version = 1 -revision = 3 -requires-python = ">=3.12" -resolution-markers = [ - "python_full_version >= '3.14' and sys_platform == 'win32'", - "python_full_version >= '3.14' and sys_platform == 'emscripten'", - "python_full_version >= '3.14' and sys_platform != 'emscripten' and sys_platform != 'win32'", - "python_full_version == '3.13.*' and sys_platform == 'win32'", - "python_full_version == '3.13.*' and sys_platform == 'emscripten'", - "python_full_version == '3.13.*' and sys_platform != 'emscripten' and sys_platform != 'win32'", - "python_full_version < '3.13' and sys_platform == 'win32'", - "python_full_version < '3.13' and sys_platform == 'emscripten'", - "python_full_version < '3.13' and sys_platform != 'emscripten' and sys_platform != 'win32'", -] - -[[package]] -name = "aiocache" -version = "0.12.3" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/7a/64/b945b8025a9d1e6e2138845f4022165d3b337f55f50984fbc6a4c0a1e355/aiocache-0.12.3.tar.gz", hash = "sha256:f528b27bf4d436b497a1d0d1a8f59a542c153ab1e37c3621713cb376d44c4713", size = 132196, upload-time = "2024-09-25T13:20:23.823Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/37/d7/15d67e05b235d1ed8c3ce61688fe4d84130e72af1657acadfaac3479f4cf/aiocache-0.12.3-py2.py3-none-any.whl", hash = "sha256:889086fc24710f431937b87ad3720a289f7fc31c4fd8b68e9f918b9bacd8270d", size = 28199, upload-time = "2024-09-25T13:20:22.688Z" }, -] - -[[package]] -name = "aiohappyeyeballs" -version = "2.6.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/26/30/f84a107a9c4331c14b2b586036f40965c128aa4fee4dda5d3d51cb14ad54/aiohappyeyeballs-2.6.1.tar.gz", hash = "sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558", size = 22760, upload-time = "2025-03-12T01:42:48.764Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/0f/15/5bf3b99495fb160b63f95972b81750f18f7f4e02ad051373b669d17d44f2/aiohappyeyeballs-2.6.1-py3-none-any.whl", hash = "sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8", size = 15265, upload-time = "2025-03-12T01:42:47.083Z" }, -] - -[[package]] -name = "aiohttp" -version = "3.13.3" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "aiohappyeyeballs" }, - { name = "aiosignal" }, - { name = "attrs" }, - { name = "frozenlist" }, - { name = "multidict" }, - { name = "propcache" }, - { name = "yarl" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/50/42/32cf8e7704ceb4481406eb87161349abb46a57fee3f008ba9cb610968646/aiohttp-3.13.3.tar.gz", hash = "sha256:a949eee43d3782f2daae4f4a2819b2cb9b0c5d3b7f7a927067cc84dafdbb9f88", size = 7844556, upload-time = "2026-01-03T17:33:05.204Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a0/be/4fc11f202955a69e0db803a12a062b8379c970c7c84f4882b6da17337cc1/aiohttp-3.13.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:b903a4dfee7d347e2d87697d0713be59e0b87925be030c9178c5faa58ea58d5c", size = 739732, upload-time = "2026-01-03T17:30:14.23Z" }, - { url = "https://files.pythonhosted.org/packages/97/2c/621d5b851f94fa0bb7430d6089b3aa970a9d9b75196bc93bb624b0db237a/aiohttp-3.13.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a45530014d7a1e09f4a55f4f43097ba0fd155089372e105e4bff4ca76cb1b168", size = 494293, upload-time = "2026-01-03T17:30:15.96Z" }, - { url = "https://files.pythonhosted.org/packages/5d/43/4be01406b78e1be8320bb8316dc9c42dbab553d281c40364e0f862d5661c/aiohttp-3.13.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:27234ef6d85c914f9efeb77ff616dbf4ad2380be0cda40b4db086ffc7ddd1b7d", size = 493533, upload-time = "2026-01-03T17:30:17.431Z" }, - { url = "https://files.pythonhosted.org/packages/8d/a8/5a35dc56a06a2c90d4742cbf35294396907027f80eea696637945a106f25/aiohttp-3.13.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d32764c6c9aafb7fb55366a224756387cd50bfa720f32b88e0e6fa45b27dcf29", size = 1737839, upload-time = "2026-01-03T17:30:19.422Z" }, - { url = "https://files.pythonhosted.org/packages/bf/62/4b9eeb331da56530bf2e198a297e5303e1c1ebdceeb00fe9b568a65c5a0c/aiohttp-3.13.3-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b1a6102b4d3ebc07dad44fbf07b45bb600300f15b552ddf1851b5390202ea2e3", size = 1703932, upload-time = "2026-01-03T17:30:21.756Z" }, - { url = "https://files.pythonhosted.org/packages/7c/f6/af16887b5d419e6a367095994c0b1332d154f647e7dc2bd50e61876e8e3d/aiohttp-3.13.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c014c7ea7fb775dd015b2d3137378b7be0249a448a1612268b5a90c2d81de04d", size = 1771906, upload-time = "2026-01-03T17:30:23.932Z" }, - { url = "https://files.pythonhosted.org/packages/ce/83/397c634b1bcc24292fa1e0c7822800f9f6569e32934bdeef09dae7992dfb/aiohttp-3.13.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2b8d8ddba8f95ba17582226f80e2de99c7a7948e66490ef8d947e272a93e9463", size = 1871020, upload-time = "2026-01-03T17:30:26Z" }, - { url = "https://files.pythonhosted.org/packages/86/f6/a62cbbf13f0ac80a70f71b1672feba90fdb21fd7abd8dbf25c0105fb6fa3/aiohttp-3.13.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9ae8dd55c8e6c4257eae3a20fd2c8f41edaea5992ed67156642493b8daf3cecc", size = 1755181, upload-time = "2026-01-03T17:30:27.554Z" }, - { url = "https://files.pythonhosted.org/packages/0a/87/20a35ad487efdd3fba93d5843efdfaa62d2f1479eaafa7453398a44faf13/aiohttp-3.13.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:01ad2529d4b5035578f5081606a465f3b814c542882804e2e8cda61adf5c71bf", size = 1561794, upload-time = "2026-01-03T17:30:29.254Z" }, - { url = "https://files.pythonhosted.org/packages/de/95/8fd69a66682012f6716e1bc09ef8a1a2a91922c5725cb904689f112309c4/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bb4f7475e359992b580559e008c598091c45b5088f28614e855e42d39c2f1033", size = 1697900, upload-time = "2026-01-03T17:30:31.033Z" }, - { url = "https://files.pythonhosted.org/packages/e5/66/7b94b3b5ba70e955ff597672dad1691333080e37f50280178967aff68657/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:c19b90316ad3b24c69cd78d5c9b4f3aa4497643685901185b65166293d36a00f", size = 1728239, upload-time = "2026-01-03T17:30:32.703Z" }, - { url = "https://files.pythonhosted.org/packages/47/71/6f72f77f9f7d74719692ab65a2a0252584bf8d5f301e2ecb4c0da734530a/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:96d604498a7c782cb15a51c406acaea70d8c027ee6b90c569baa6e7b93073679", size = 1740527, upload-time = "2026-01-03T17:30:34.695Z" }, - { url = "https://files.pythonhosted.org/packages/fa/b4/75ec16cbbd5c01bdaf4a05b19e103e78d7ce1ef7c80867eb0ace42ff4488/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:084911a532763e9d3dd95adf78a78f4096cd5f58cdc18e6fdbc1b58417a45423", size = 1554489, upload-time = "2026-01-03T17:30:36.864Z" }, - { url = "https://files.pythonhosted.org/packages/52/8f/bc518c0eea29f8406dcf7ed1f96c9b48e3bc3995a96159b3fc11f9e08321/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:7a4a94eb787e606d0a09404b9c38c113d3b099d508021faa615d70a0131907ce", size = 1767852, upload-time = "2026-01-03T17:30:39.433Z" }, - { url = "https://files.pythonhosted.org/packages/9d/f2/a07a75173124f31f11ea6f863dc44e6f09afe2bca45dd4e64979490deab1/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:87797e645d9d8e222e04160ee32aa06bc5c163e8499f24db719e7852ec23093a", size = 1722379, upload-time = "2026-01-03T17:30:41.081Z" }, - { url = "https://files.pythonhosted.org/packages/3c/4a/1a3fee7c21350cac78e5c5cef711bac1b94feca07399f3d406972e2d8fcd/aiohttp-3.13.3-cp312-cp312-win32.whl", hash = "sha256:b04be762396457bef43f3597c991e192ee7da460a4953d7e647ee4b1c28e7046", size = 428253, upload-time = "2026-01-03T17:30:42.644Z" }, - { url = "https://files.pythonhosted.org/packages/d9/b7/76175c7cb4eb73d91ad63c34e29fc4f77c9386bba4a65b53ba8e05ee3c39/aiohttp-3.13.3-cp312-cp312-win_amd64.whl", hash = "sha256:e3531d63d3bdfa7e3ac5e9b27b2dd7ec9df3206a98e0b3445fa906f233264c57", size = 455407, upload-time = "2026-01-03T17:30:44.195Z" }, - { url = "https://files.pythonhosted.org/packages/97/8a/12ca489246ca1faaf5432844adbfce7ff2cc4997733e0af120869345643a/aiohttp-3.13.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:5dff64413671b0d3e7d5918ea490bdccb97a4ad29b3f311ed423200b2203e01c", size = 734190, upload-time = "2026-01-03T17:30:45.832Z" }, - { url = "https://files.pythonhosted.org/packages/32/08/de43984c74ed1fca5c014808963cc83cb00d7bb06af228f132d33862ca76/aiohttp-3.13.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:87b9aab6d6ed88235aa2970294f496ff1a1f9adcd724d800e9b952395a80ffd9", size = 491783, upload-time = "2026-01-03T17:30:47.466Z" }, - { url = "https://files.pythonhosted.org/packages/17/f8/8dd2cf6112a5a76f81f81a5130c57ca829d101ad583ce57f889179accdda/aiohttp-3.13.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:425c126c0dc43861e22cb1c14ba4c8e45d09516d0a3ae0a3f7494b79f5f233a3", size = 490704, upload-time = "2026-01-03T17:30:49.373Z" }, - { url = "https://files.pythonhosted.org/packages/6d/40/a46b03ca03936f832bc7eaa47cfbb1ad012ba1be4790122ee4f4f8cba074/aiohttp-3.13.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7f9120f7093c2a32d9647abcaf21e6ad275b4fbec5b55969f978b1a97c7c86bf", size = 1720652, upload-time = "2026-01-03T17:30:50.974Z" }, - { url = "https://files.pythonhosted.org/packages/f7/7e/917fe18e3607af92657e4285498f500dca797ff8c918bd7d90b05abf6c2a/aiohttp-3.13.3-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:697753042d57f4bf7122cab985bf15d0cef23c770864580f5af4f52023a56bd6", size = 1692014, upload-time = "2026-01-03T17:30:52.729Z" }, - { url = "https://files.pythonhosted.org/packages/71/b6/cefa4cbc00d315d68973b671cf105b21a609c12b82d52e5d0c9ae61d2a09/aiohttp-3.13.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6de499a1a44e7de70735d0b39f67c8f25eb3d91eb3103be99ca0fa882cdd987d", size = 1759777, upload-time = "2026-01-03T17:30:54.537Z" }, - { url = "https://files.pythonhosted.org/packages/fb/e3/e06ee07b45e59e6d81498b591fc589629be1553abb2a82ce33efe2a7b068/aiohttp-3.13.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:37239e9f9a7ea9ac5bf6b92b0260b01f8a22281996da609206a84df860bc1261", size = 1861276, upload-time = "2026-01-03T17:30:56.512Z" }, - { url = "https://files.pythonhosted.org/packages/7c/24/75d274228acf35ceeb2850b8ce04de9dd7355ff7a0b49d607ee60c29c518/aiohttp-3.13.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f76c1e3fe7d7c8afad7ed193f89a292e1999608170dcc9751a7462a87dfd5bc0", size = 1743131, upload-time = "2026-01-03T17:30:58.256Z" }, - { url = "https://files.pythonhosted.org/packages/04/98/3d21dde21889b17ca2eea54fdcff21b27b93f45b7bb94ca029c31ab59dc3/aiohttp-3.13.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fc290605db2a917f6e81b0e1e0796469871f5af381ce15c604a3c5c7e51cb730", size = 1556863, upload-time = "2026-01-03T17:31:00.445Z" }, - { url = "https://files.pythonhosted.org/packages/9e/84/da0c3ab1192eaf64782b03971ab4055b475d0db07b17eff925e8c93b3aa5/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4021b51936308aeea0367b8f006dc999ca02bc118a0cc78c303f50a2ff6afb91", size = 1682793, upload-time = "2026-01-03T17:31:03.024Z" }, - { url = "https://files.pythonhosted.org/packages/ff/0f/5802ada182f575afa02cbd0ec5180d7e13a402afb7c2c03a9aa5e5d49060/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:49a03727c1bba9a97d3e93c9f93ca03a57300f484b6e935463099841261195d3", size = 1716676, upload-time = "2026-01-03T17:31:04.842Z" }, - { url = "https://files.pythonhosted.org/packages/3f/8c/714d53bd8b5a4560667f7bbbb06b20c2382f9c7847d198370ec6526af39c/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:3d9908a48eb7416dc1f4524e69f1d32e5d90e3981e4e37eb0aa1cd18f9cfa2a4", size = 1733217, upload-time = "2026-01-03T17:31:06.868Z" }, - { url = "https://files.pythonhosted.org/packages/7d/79/e2176f46d2e963facea939f5be2d26368ce543622be6f00a12844d3c991f/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:2712039939ec963c237286113c68dbad80a82a4281543f3abf766d9d73228998", size = 1552303, upload-time = "2026-01-03T17:31:08.958Z" }, - { url = "https://files.pythonhosted.org/packages/ab/6a/28ed4dea1759916090587d1fe57087b03e6c784a642b85ef48217b0277ae/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:7bfdc049127717581866fa4708791220970ce291c23e28ccf3922c700740fdc0", size = 1763673, upload-time = "2026-01-03T17:31:10.676Z" }, - { url = "https://files.pythonhosted.org/packages/e8/35/4a3daeb8b9fab49240d21c04d50732313295e4bd813a465d840236dd0ce1/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8057c98e0c8472d8846b9c79f56766bcc57e3e8ac7bfd510482332366c56c591", size = 1721120, upload-time = "2026-01-03T17:31:12.575Z" }, - { url = "https://files.pythonhosted.org/packages/bc/9f/d643bb3c5fb99547323e635e251c609fbbc660d983144cfebec529e09264/aiohttp-3.13.3-cp313-cp313-win32.whl", hash = "sha256:1449ceddcdbcf2e0446957863af03ebaaa03f94c090f945411b61269e2cb5daf", size = 427383, upload-time = "2026-01-03T17:31:14.382Z" }, - { url = "https://files.pythonhosted.org/packages/4e/f1/ab0395f8a79933577cdd996dd2f9aa6014af9535f65dddcf88204682fe62/aiohttp-3.13.3-cp313-cp313-win_amd64.whl", hash = "sha256:693781c45a4033d31d4187d2436f5ac701e7bbfe5df40d917736108c1cc7436e", size = 453899, upload-time = "2026-01-03T17:31:15.958Z" }, - { url = "https://files.pythonhosted.org/packages/99/36/5b6514a9f5d66f4e2597e40dea2e3db271e023eb7a5d22defe96ba560996/aiohttp-3.13.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:ea37047c6b367fd4bd632bff8077449b8fa034b69e812a18e0132a00fae6e808", size = 737238, upload-time = "2026-01-03T17:31:17.909Z" }, - { url = "https://files.pythonhosted.org/packages/f7/49/459327f0d5bcd8c6c9ca69e60fdeebc3622861e696490d8674a6d0cb90a6/aiohttp-3.13.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:6fc0e2337d1a4c3e6acafda6a78a39d4c14caea625124817420abceed36e2415", size = 492292, upload-time = "2026-01-03T17:31:19.919Z" }, - { url = "https://files.pythonhosted.org/packages/e8/0b/b97660c5fd05d3495b4eb27f2d0ef18dc1dc4eff7511a9bf371397ff0264/aiohttp-3.13.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c685f2d80bb67ca8c3837823ad76196b3694b0159d232206d1e461d3d434666f", size = 493021, upload-time = "2026-01-03T17:31:21.636Z" }, - { url = "https://files.pythonhosted.org/packages/54/d4/438efabdf74e30aeceb890c3290bbaa449780583b1270b00661126b8aae4/aiohttp-3.13.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:48e377758516d262bde50c2584fc6c578af272559c409eecbdd2bae1601184d6", size = 1717263, upload-time = "2026-01-03T17:31:23.296Z" }, - { url = "https://files.pythonhosted.org/packages/71/f2/7bddc7fd612367d1459c5bcf598a9e8f7092d6580d98de0e057eb42697ad/aiohttp-3.13.3-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:34749271508078b261c4abb1767d42b8d0c0cc9449c73a4df494777dc55f0687", size = 1669107, upload-time = "2026-01-03T17:31:25.334Z" }, - { url = "https://files.pythonhosted.org/packages/00/5a/1aeaecca40e22560f97610a329e0e5efef5e0b5afdf9f857f0d93839ab2e/aiohttp-3.13.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:82611aeec80eb144416956ec85b6ca45a64d76429c1ed46ae1b5f86c6e0c9a26", size = 1760196, upload-time = "2026-01-03T17:31:27.394Z" }, - { url = "https://files.pythonhosted.org/packages/f8/f8/0ff6992bea7bd560fc510ea1c815f87eedd745fe035589c71ce05612a19a/aiohttp-3.13.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2fff83cfc93f18f215896e3a190e8e5cb413ce01553901aca925176e7568963a", size = 1843591, upload-time = "2026-01-03T17:31:29.238Z" }, - { url = "https://files.pythonhosted.org/packages/e3/d1/e30e537a15f53485b61f5be525f2157da719819e8377298502aebac45536/aiohttp-3.13.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bbe7d4cecacb439e2e2a8a1a7b935c25b812af7a5fd26503a66dadf428e79ec1", size = 1720277, upload-time = "2026-01-03T17:31:31.053Z" }, - { url = "https://files.pythonhosted.org/packages/84/45/23f4c451d8192f553d38d838831ebbc156907ea6e05557f39563101b7717/aiohttp-3.13.3-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b928f30fe49574253644b1ca44b1b8adbd903aa0da4b9054a6c20fc7f4092a25", size = 1548575, upload-time = "2026-01-03T17:31:32.87Z" }, - { url = "https://files.pythonhosted.org/packages/6a/ed/0a42b127a43712eda7807e7892c083eadfaf8429ca8fb619662a530a3aab/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7b5e8fe4de30df199155baaf64f2fcd604f4c678ed20910db8e2c66dc4b11603", size = 1679455, upload-time = "2026-01-03T17:31:34.76Z" }, - { url = "https://files.pythonhosted.org/packages/2e/b5/c05f0c2b4b4fe2c9d55e73b6d3ed4fd6c9dc2684b1d81cbdf77e7fad9adb/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:8542f41a62bcc58fc7f11cf7c90e0ec324ce44950003feb70640fc2a9092c32a", size = 1687417, upload-time = "2026-01-03T17:31:36.699Z" }, - { url = "https://files.pythonhosted.org/packages/c9/6b/915bc5dad66aef602b9e459b5a973529304d4e89ca86999d9d75d80cbd0b/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:5e1d8c8b8f1d91cd08d8f4a3c2b067bfca6ec043d3ff36de0f3a715feeedf926", size = 1729968, upload-time = "2026-01-03T17:31:38.622Z" }, - { url = "https://files.pythonhosted.org/packages/11/3b/e84581290a9520024a08640b63d07673057aec5ca548177a82026187ba73/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:90455115e5da1c3c51ab619ac57f877da8fd6d73c05aacd125c5ae9819582aba", size = 1545690, upload-time = "2026-01-03T17:31:40.57Z" }, - { url = "https://files.pythonhosted.org/packages/f5/04/0c3655a566c43fd647c81b895dfe361b9f9ad6d58c19309d45cff52d6c3b/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:042e9e0bcb5fba81886c8b4fbb9a09d6b8a00245fd8d88e4d989c1f96c74164c", size = 1746390, upload-time = "2026-01-03T17:31:42.857Z" }, - { url = "https://files.pythonhosted.org/packages/1f/53/71165b26978f719c3419381514c9690bd5980e764a09440a10bb816ea4ab/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2eb752b102b12a76ca02dff751a801f028b4ffbbc478840b473597fc91a9ed43", size = 1702188, upload-time = "2026-01-03T17:31:44.984Z" }, - { url = "https://files.pythonhosted.org/packages/29/a7/cbe6c9e8e136314fa1980da388a59d2f35f35395948a08b6747baebb6aa6/aiohttp-3.13.3-cp314-cp314-win32.whl", hash = "sha256:b556c85915d8efaed322bf1bdae9486aa0f3f764195a0fb6ee962e5c71ef5ce1", size = 433126, upload-time = "2026-01-03T17:31:47.463Z" }, - { url = "https://files.pythonhosted.org/packages/de/56/982704adea7d3b16614fc5936014e9af85c0e34b58f9046655817f04306e/aiohttp-3.13.3-cp314-cp314-win_amd64.whl", hash = "sha256:9bf9f7a65e7aa20dd764151fb3d616c81088f91f8df39c3893a536e279b4b984", size = 459128, upload-time = "2026-01-03T17:31:49.2Z" }, - { url = "https://files.pythonhosted.org/packages/6c/2a/3c79b638a9c3d4658d345339d22070241ea341ed4e07b5ac60fb0f418003/aiohttp-3.13.3-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:05861afbbec40650d8a07ea324367cb93e9e8cc7762e04dd4405df99fa65159c", size = 769512, upload-time = "2026-01-03T17:31:51.134Z" }, - { url = "https://files.pythonhosted.org/packages/29/b9/3e5014d46c0ab0db8707e0ac2711ed28c4da0218c358a4e7c17bae0d8722/aiohttp-3.13.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:2fc82186fadc4a8316768d61f3722c230e2c1dcab4200d52d2ebdf2482e47592", size = 506444, upload-time = "2026-01-03T17:31:52.85Z" }, - { url = "https://files.pythonhosted.org/packages/90/03/c1d4ef9a054e151cd7839cdc497f2638f00b93cbe8043983986630d7a80c/aiohttp-3.13.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:0add0900ff220d1d5c5ebbf99ed88b0c1bbf87aa7e4262300ed1376a6b13414f", size = 510798, upload-time = "2026-01-03T17:31:54.91Z" }, - { url = "https://files.pythonhosted.org/packages/ea/76/8c1e5abbfe8e127c893fe7ead569148a4d5a799f7cf958d8c09f3eedf097/aiohttp-3.13.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:568f416a4072fbfae453dcf9a99194bbb8bdeab718e08ee13dfa2ba0e4bebf29", size = 1868835, upload-time = "2026-01-03T17:31:56.733Z" }, - { url = "https://files.pythonhosted.org/packages/8e/ac/984c5a6f74c363b01ff97adc96a3976d9c98940b8969a1881575b279ac5d/aiohttp-3.13.3-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:add1da70de90a2569c5e15249ff76a631ccacfe198375eead4aadf3b8dc849dc", size = 1720486, upload-time = "2026-01-03T17:31:58.65Z" }, - { url = "https://files.pythonhosted.org/packages/b2/9a/b7039c5f099c4eb632138728828b33428585031a1e658d693d41d07d89d1/aiohttp-3.13.3-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:10b47b7ba335d2e9b1239fa571131a87e2d8ec96b333e68b2a305e7a98b0bae2", size = 1847951, upload-time = "2026-01-03T17:32:00.989Z" }, - { url = "https://files.pythonhosted.org/packages/3c/02/3bec2b9a1ba3c19ff89a43a19324202b8eb187ca1e928d8bdac9bbdddebd/aiohttp-3.13.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3dd4dce1c718e38081c8f35f323209d4c1df7d4db4bab1b5c88a6b4d12b74587", size = 1941001, upload-time = "2026-01-03T17:32:03.122Z" }, - { url = "https://files.pythonhosted.org/packages/37/df/d879401cedeef27ac4717f6426c8c36c3091c6e9f08a9178cc87549c537f/aiohttp-3.13.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:34bac00a67a812570d4a460447e1e9e06fae622946955f939051e7cc895cfab8", size = 1797246, upload-time = "2026-01-03T17:32:05.255Z" }, - { url = "https://files.pythonhosted.org/packages/8d/15/be122de1f67e6953add23335c8ece6d314ab67c8bebb3f181063010795a7/aiohttp-3.13.3-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a19884d2ee70b06d9204b2727a7b9f983d0c684c650254679e716b0b77920632", size = 1627131, upload-time = "2026-01-03T17:32:07.607Z" }, - { url = "https://files.pythonhosted.org/packages/12/12/70eedcac9134cfa3219ab7af31ea56bc877395b1ac30d65b1bc4b27d0438/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5f8ca7f2bb6ba8348a3614c7918cc4bb73268c5ac2a207576b7afea19d3d9f64", size = 1795196, upload-time = "2026-01-03T17:32:09.59Z" }, - { url = "https://files.pythonhosted.org/packages/32/11/b30e1b1cd1f3054af86ebe60df96989c6a414dd87e27ad16950eee420bea/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:b0d95340658b9d2f11d9697f59b3814a9d3bb4b7a7c20b131df4bcef464037c0", size = 1782841, upload-time = "2026-01-03T17:32:11.445Z" }, - { url = "https://files.pythonhosted.org/packages/88/0d/d98a9367b38912384a17e287850f5695c528cff0f14f791ce8ee2e4f7796/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:a1e53262fd202e4b40b70c3aff944a8155059beedc8a89bba9dc1f9ef06a1b56", size = 1795193, upload-time = "2026-01-03T17:32:13.705Z" }, - { url = "https://files.pythonhosted.org/packages/43/a5/a2dfd1f5ff5581632c7f6a30e1744deda03808974f94f6534241ef60c751/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:d60ac9663f44168038586cab2157e122e46bdef09e9368b37f2d82d354c23f72", size = 1621979, upload-time = "2026-01-03T17:32:15.965Z" }, - { url = "https://files.pythonhosted.org/packages/fa/f0/12973c382ae7c1cccbc4417e129c5bf54c374dfb85af70893646e1f0e749/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:90751b8eed69435bac9ff4e3d2f6b3af1f57e37ecb0fbeee59c0174c9e2d41df", size = 1822193, upload-time = "2026-01-03T17:32:18.219Z" }, - { url = "https://files.pythonhosted.org/packages/3c/5f/24155e30ba7f8c96918af1350eb0663e2430aad9e001c0489d89cd708ab1/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:fc353029f176fd2b3ec6cfc71be166aba1936fe5d73dd1992ce289ca6647a9aa", size = 1769801, upload-time = "2026-01-03T17:32:20.25Z" }, - { url = "https://files.pythonhosted.org/packages/eb/f8/7314031ff5c10e6ece114da79b338ec17eeff3a079e53151f7e9f43c4723/aiohttp-3.13.3-cp314-cp314t-win32.whl", hash = "sha256:2e41b18a58da1e474a057b3d35248d8320029f61d70a37629535b16a0c8f3767", size = 466523, upload-time = "2026-01-03T17:32:22.215Z" }, - { url = "https://files.pythonhosted.org/packages/b4/63/278a98c715ae467624eafe375542d8ba9b4383a016df8fdefe0ae28382a7/aiohttp-3.13.3-cp314-cp314t-win_amd64.whl", hash = "sha256:44531a36aa2264a1860089ffd4dce7baf875ee5a6079d5fb42e261c704ef7344", size = 499694, upload-time = "2026-01-03T17:32:24.546Z" }, -] - -[[package]] -name = "aiosignal" -version = "1.4.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "frozenlist" }, - { name = "typing-extensions", marker = "python_full_version < '3.13'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/61/62/06741b579156360248d1ec624842ad0edf697050bbaf7c3e46394e106ad1/aiosignal-1.4.0.tar.gz", hash = "sha256:f47eecd9468083c2029cc99945502cb7708b082c232f9aca65da147157b251c7", size = 25007, upload-time = "2025-07-03T22:54:43.528Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/fb/76/641ae371508676492379f16e2fa48f4e2c11741bd63c48be4b12a6b09cba/aiosignal-1.4.0-py3-none-any.whl", hash = "sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e", size = 7490, upload-time = "2025-07-03T22:54:42.156Z" }, -] - -[[package]] -name = "annotated-types" -version = "0.7.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, -] - -[[package]] -name = "anthropic" -version = "0.84.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "anyio" }, - { name = "distro" }, - { name = "docstring-parser" }, - { name = "httpx" }, - { name = "jiter" }, - { name = "pydantic" }, - { name = "sniffio" }, - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/04/ea/0869d6df9ef83dcf393aeefc12dd81677d091c6ffc86f783e51cf44062f2/anthropic-0.84.0.tar.gz", hash = "sha256:72f5f90e5aebe62dca316cb013629cfa24996b0f5a4593b8c3d712bc03c43c37", size = 539457, upload-time = "2026-02-25T05:22:38.54Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/64/ca/218fa25002a332c0aa149ba18ffc0543175998b1f65de63f6d106689a345/anthropic-0.84.0-py3-none-any.whl", hash = "sha256:861c4c50f91ca45f942e091d83b60530ad6d4f98733bfe648065364da05d29e7", size = 455156, upload-time = "2026-02-25T05:22:40.468Z" }, -] - -[[package]] -name = "anyascii" -version = "0.3.3" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/db/ba/edebda727008390936da4a9bf677c19cd63b32d51e864656d2cbd1028e25/anyascii-0.3.3.tar.gz", hash = "sha256:c94e9dd9d47b3d9494eca305fef9447d00b4bf1a32aff85aa746fa3ec7fb95c3", size = 264680, upload-time = "2025-06-29T03:33:30.427Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c2/76/783b75a21ce3563b8709050de030ae253853b147bd52e141edc1025aa268/anyascii-0.3.3-py3-none-any.whl", hash = "sha256:f5ab5e53c8781a36b5a40e1296a0eeda2f48c649ef10c3921c1381b1d00dee7a", size = 345090, upload-time = "2025-06-29T03:33:28.356Z" }, -] - -[[package]] -name = "anyio" -version = "4.12.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "idna" }, - { name = "typing-extensions", marker = "python_full_version < '3.13'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/96/f0/5eb65b2bb0d09ac6776f2eb54adee6abe8228ea05b20a5ad0e4945de8aac/anyio-4.12.1.tar.gz", hash = "sha256:41cfcc3a4c85d3f05c932da7c26d0201ac36f72abd4435ba90d0464a3ffed703", size = 228685, upload-time = "2026-01-06T11:45:21.246Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/38/0e/27be9fdef66e72d64c0cdc3cc2823101b80585f8119b5c112c2e8f5f7dab/anyio-4.12.1-py3-none-any.whl", hash = "sha256:d405828884fc140aa80a3c667b8beed277f1dfedec42ba031bd6ac3db606ab6c", size = 113592, upload-time = "2026-01-06T11:45:19.497Z" }, -] - -[[package]] -name = "attrs" -version = "25.4.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6b/5c/685e6633917e101e5dcb62b9dd76946cbb57c26e133bae9e0cd36033c0a9/attrs-25.4.0.tar.gz", hash = "sha256:16d5969b87f0859ef33a48b35d55ac1be6e42ae49d5e853b597db70c35c57e11", size = 934251, upload-time = "2025-10-06T13:54:44.725Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/3a/2a/7cc015f5b9f5db42b7d48157e23356022889fc354a2813c15934b7cb5c0e/attrs-25.4.0-py3-none-any.whl", hash = "sha256:adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373", size = 67615, upload-time = "2025-10-06T13:54:43.17Z" }, -] - -[[package]] -name = "audioop-lts" -version = "0.2.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/38/53/946db57842a50b2da2e0c1e34bd37f36f5aadba1a929a3971c5d7841dbca/audioop_lts-0.2.2.tar.gz", hash = "sha256:64d0c62d88e67b98a1a5e71987b7aa7b5bcffc7dcee65b635823dbdd0a8dbbd0", size = 30686, upload-time = "2025-08-05T16:43:17.409Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/de/d4/94d277ca941de5a507b07f0b592f199c22454eeaec8f008a286b3fbbacd6/audioop_lts-0.2.2-cp313-abi3-macosx_10_13_universal2.whl", hash = "sha256:fd3d4602dc64914d462924a08c1a9816435a2155d74f325853c1f1ac3b2d9800", size = 46523, upload-time = "2025-08-05T16:42:20.836Z" }, - { url = "https://files.pythonhosted.org/packages/f8/5a/656d1c2da4b555920ce4177167bfeb8623d98765594af59702c8873f60ec/audioop_lts-0.2.2-cp313-abi3-macosx_10_13_x86_64.whl", hash = "sha256:550c114a8df0aafe9a05442a1162dfc8fec37e9af1d625ae6060fed6e756f303", size = 27455, upload-time = "2025-08-05T16:42:22.283Z" }, - { url = "https://files.pythonhosted.org/packages/1b/83/ea581e364ce7b0d41456fb79d6ee0ad482beda61faf0cab20cbd4c63a541/audioop_lts-0.2.2-cp313-abi3-macosx_11_0_arm64.whl", hash = "sha256:9a13dc409f2564de15dd68be65b462ba0dde01b19663720c68c1140c782d1d75", size = 26997, upload-time = "2025-08-05T16:42:23.849Z" }, - { url = "https://files.pythonhosted.org/packages/b8/3b/e8964210b5e216e5041593b7d33e97ee65967f17c282e8510d19c666dab4/audioop_lts-0.2.2-cp313-abi3-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:51c916108c56aa6e426ce611946f901badac950ee2ddaf302b7ed35d9958970d", size = 85844, upload-time = "2025-08-05T16:42:25.208Z" }, - { url = "https://files.pythonhosted.org/packages/c7/2e/0a1c52faf10d51def20531a59ce4c706cb7952323b11709e10de324d6493/audioop_lts-0.2.2-cp313-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:47eba38322370347b1c47024defbd36374a211e8dd5b0dcbce7b34fdb6f8847b", size = 85056, upload-time = "2025-08-05T16:42:26.559Z" }, - { url = "https://files.pythonhosted.org/packages/75/e8/cd95eef479656cb75ab05dfece8c1f8c395d17a7c651d88f8e6e291a63ab/audioop_lts-0.2.2-cp313-abi3-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ba7c3a7e5f23e215cb271516197030c32aef2e754252c4c70a50aaff7031a2c8", size = 93892, upload-time = "2025-08-05T16:42:27.902Z" }, - { url = "https://files.pythonhosted.org/packages/5c/1e/a0c42570b74f83efa5cca34905b3eef03f7ab09fe5637015df538a7f3345/audioop_lts-0.2.2-cp313-abi3-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:def246fe9e180626731b26e89816e79aae2276f825420a07b4a647abaa84becc", size = 96660, upload-time = "2025-08-05T16:42:28.9Z" }, - { url = "https://files.pythonhosted.org/packages/50/d5/8a0ae607ca07dbb34027bac8db805498ee7bfecc05fd2c148cc1ed7646e7/audioop_lts-0.2.2-cp313-abi3-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e160bf9df356d841bb6c180eeeea1834085464626dc1b68fa4e1d59070affdc3", size = 79143, upload-time = "2025-08-05T16:42:29.929Z" }, - { url = "https://files.pythonhosted.org/packages/12/17/0d28c46179e7910bfb0bb62760ccb33edb5de973052cb2230b662c14ca2e/audioop_lts-0.2.2-cp313-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:4b4cd51a57b698b2d06cb9993b7ac8dfe89a3b2878e96bc7948e9f19ff51dba6", size = 84313, upload-time = "2025-08-05T16:42:30.949Z" }, - { url = "https://files.pythonhosted.org/packages/84/ba/bd5d3806641564f2024e97ca98ea8f8811d4e01d9b9f9831474bc9e14f9e/audioop_lts-0.2.2-cp313-abi3-musllinux_1_2_ppc64le.whl", hash = "sha256:4a53aa7c16a60a6857e6b0b165261436396ef7293f8b5c9c828a3a203147ed4a", size = 93044, upload-time = "2025-08-05T16:42:31.959Z" }, - { url = "https://files.pythonhosted.org/packages/f9/5e/435ce8d5642f1f7679540d1e73c1c42d933331c0976eb397d1717d7f01a3/audioop_lts-0.2.2-cp313-abi3-musllinux_1_2_riscv64.whl", hash = "sha256:3fc38008969796f0f689f1453722a0f463da1b8a6fbee11987830bfbb664f623", size = 78766, upload-time = "2025-08-05T16:42:33.302Z" }, - { url = "https://files.pythonhosted.org/packages/ae/3b/b909e76b606cbfd53875693ec8c156e93e15a1366a012f0b7e4fb52d3c34/audioop_lts-0.2.2-cp313-abi3-musllinux_1_2_s390x.whl", hash = "sha256:15ab25dd3e620790f40e9ead897f91e79c0d3ce65fe193c8ed6c26cffdd24be7", size = 87640, upload-time = "2025-08-05T16:42:34.854Z" }, - { url = "https://files.pythonhosted.org/packages/30/e7/8f1603b4572d79b775f2140d7952f200f5e6c62904585d08a01f0a70393a/audioop_lts-0.2.2-cp313-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:03f061a1915538fd96272bac9551841859dbb2e3bf73ebe4a23ef043766f5449", size = 86052, upload-time = "2025-08-05T16:42:35.839Z" }, - { url = "https://files.pythonhosted.org/packages/b5/96/c37846df657ccdda62ba1ae2b6534fa90e2e1b1742ca8dcf8ebd38c53801/audioop_lts-0.2.2-cp313-abi3-win32.whl", hash = "sha256:3bcddaaf6cc5935a300a8387c99f7a7fbbe212a11568ec6cf6e4bc458c048636", size = 26185, upload-time = "2025-08-05T16:42:37.04Z" }, - { url = "https://files.pythonhosted.org/packages/34/a5/9d78fdb5b844a83da8a71226c7bdae7cc638861085fff7a1d707cb4823fa/audioop_lts-0.2.2-cp313-abi3-win_amd64.whl", hash = "sha256:a2c2a947fae7d1062ef08c4e369e0ba2086049a5e598fda41122535557012e9e", size = 30503, upload-time = "2025-08-05T16:42:38.427Z" }, - { url = "https://files.pythonhosted.org/packages/34/25/20d8fde083123e90c61b51afb547bb0ea7e77bab50d98c0ab243d02a0e43/audioop_lts-0.2.2-cp313-abi3-win_arm64.whl", hash = "sha256:5f93a5db13927a37d2d09637ccca4b2b6b48c19cd9eda7b17a2e9f77edee6a6f", size = 24173, upload-time = "2025-08-05T16:42:39.704Z" }, - { url = "https://files.pythonhosted.org/packages/58/a7/0a764f77b5c4ac58dc13c01a580f5d32ae8c74c92020b961556a43e26d02/audioop_lts-0.2.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:73f80bf4cd5d2ca7814da30a120de1f9408ee0619cc75da87d0641273d202a09", size = 47096, upload-time = "2025-08-05T16:42:40.684Z" }, - { url = "https://files.pythonhosted.org/packages/aa/ed/ebebedde1a18848b085ad0fa54b66ceb95f1f94a3fc04f1cd1b5ccb0ed42/audioop_lts-0.2.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:106753a83a25ee4d6f473f2be6b0966fc1c9af7e0017192f5531a3e7463dce58", size = 27748, upload-time = "2025-08-05T16:42:41.992Z" }, - { url = "https://files.pythonhosted.org/packages/cb/6e/11ca8c21af79f15dbb1c7f8017952ee8c810c438ce4e2b25638dfef2b02c/audioop_lts-0.2.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:fbdd522624141e40948ab3e8cdae6e04c748d78710e9f0f8d4dae2750831de19", size = 27329, upload-time = "2025-08-05T16:42:42.987Z" }, - { url = "https://files.pythonhosted.org/packages/84/52/0022f93d56d85eec5da6b9da6a958a1ef09e80c39f2cc0a590c6af81dcbb/audioop_lts-0.2.2-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:143fad0311e8209ece30a8dbddab3b65ab419cbe8c0dde6e8828da25999be911", size = 92407, upload-time = "2025-08-05T16:42:44.336Z" }, - { url = "https://files.pythonhosted.org/packages/87/1d/48a889855e67be8718adbc7a01f3c01d5743c325453a5e81cf3717664aad/audioop_lts-0.2.2-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:dfbbc74ec68a0fd08cfec1f4b5e8cca3d3cd7de5501b01c4b5d209995033cde9", size = 91811, upload-time = "2025-08-05T16:42:45.325Z" }, - { url = "https://files.pythonhosted.org/packages/98/a6/94b7213190e8077547ffae75e13ed05edc488653c85aa5c41472c297d295/audioop_lts-0.2.2-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:cfcac6aa6f42397471e4943e0feb2244549db5c5d01efcd02725b96af417f3fe", size = 100470, upload-time = "2025-08-05T16:42:46.468Z" }, - { url = "https://files.pythonhosted.org/packages/e9/e9/78450d7cb921ede0cfc33426d3a8023a3bda755883c95c868ee36db8d48d/audioop_lts-0.2.2-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:752d76472d9804ac60f0078c79cdae8b956f293177acd2316cd1e15149aee132", size = 103878, upload-time = "2025-08-05T16:42:47.576Z" }, - { url = "https://files.pythonhosted.org/packages/4f/e2/cd5439aad4f3e34ae1ee852025dc6aa8f67a82b97641e390bf7bd9891d3e/audioop_lts-0.2.2-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:83c381767e2cc10e93e40281a04852facc4cd9334550e0f392f72d1c0a9c5753", size = 84867, upload-time = "2025-08-05T16:42:49.003Z" }, - { url = "https://files.pythonhosted.org/packages/68/4b/9d853e9076c43ebba0d411e8d2aa19061083349ac695a7d082540bad64d0/audioop_lts-0.2.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:c0022283e9556e0f3643b7c3c03f05063ca72b3063291834cca43234f20c60bb", size = 90001, upload-time = "2025-08-05T16:42:50.038Z" }, - { url = "https://files.pythonhosted.org/packages/58/26/4bae7f9d2f116ed5593989d0e521d679b0d583973d203384679323d8fa85/audioop_lts-0.2.2-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:a2d4f1513d63c795e82948e1305f31a6d530626e5f9f2605408b300ae6095093", size = 99046, upload-time = "2025-08-05T16:42:51.111Z" }, - { url = "https://files.pythonhosted.org/packages/b2/67/a9f4fb3e250dda9e9046f8866e9fa7d52664f8985e445c6b4ad6dfb55641/audioop_lts-0.2.2-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:c9c8e68d8b4a56fda8c025e538e639f8c5953f5073886b596c93ec9b620055e7", size = 84788, upload-time = "2025-08-05T16:42:52.198Z" }, - { url = "https://files.pythonhosted.org/packages/70/f7/3de86562db0121956148bcb0fe5b506615e3bcf6e63c4357a612b910765a/audioop_lts-0.2.2-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:96f19de485a2925314f5020e85911fb447ff5fbef56e8c7c6927851b95533a1c", size = 94472, upload-time = "2025-08-05T16:42:53.59Z" }, - { url = "https://files.pythonhosted.org/packages/f1/32/fd772bf9078ae1001207d2df1eef3da05bea611a87dd0e8217989b2848fa/audioop_lts-0.2.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:e541c3ef484852ef36545f66209444c48b28661e864ccadb29daddb6a4b8e5f5", size = 92279, upload-time = "2025-08-05T16:42:54.632Z" }, - { url = "https://files.pythonhosted.org/packages/4f/41/affea7181592ab0ab560044632571a38edaf9130b84928177823fbf3176a/audioop_lts-0.2.2-cp313-cp313t-win32.whl", hash = "sha256:d5e73fa573e273e4f2e5ff96f9043858a5e9311e94ffefd88a3186a910c70917", size = 26568, upload-time = "2025-08-05T16:42:55.627Z" }, - { url = "https://files.pythonhosted.org/packages/28/2b/0372842877016641db8fc54d5c88596b542eec2f8f6c20a36fb6612bf9ee/audioop_lts-0.2.2-cp313-cp313t-win_amd64.whl", hash = "sha256:9191d68659eda01e448188f60364c7763a7ca6653ed3f87ebb165822153a8547", size = 30942, upload-time = "2025-08-05T16:42:56.674Z" }, - { url = "https://files.pythonhosted.org/packages/ee/ca/baf2b9cc7e96c179bb4a54f30fcd83e6ecb340031bde68f486403f943768/audioop_lts-0.2.2-cp313-cp313t-win_arm64.whl", hash = "sha256:c174e322bb5783c099aaf87faeb240c8d210686b04bd61dfd05a8e5a83d88969", size = 24603, upload-time = "2025-08-05T16:42:57.571Z" }, - { url = "https://files.pythonhosted.org/packages/5c/73/413b5a2804091e2c7d5def1d618e4837f1cb82464e230f827226278556b7/audioop_lts-0.2.2-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:f9ee9b52f5f857fbaf9d605a360884f034c92c1c23021fb90b2e39b8e64bede6", size = 47104, upload-time = "2025-08-05T16:42:58.518Z" }, - { url = "https://files.pythonhosted.org/packages/ae/8c/daa3308dc6593944410c2c68306a5e217f5c05b70a12e70228e7dd42dc5c/audioop_lts-0.2.2-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:49ee1a41738a23e98d98b937a0638357a2477bc99e61b0f768a8f654f45d9b7a", size = 27754, upload-time = "2025-08-05T16:43:00.132Z" }, - { url = "https://files.pythonhosted.org/packages/4e/86/c2e0f627168fcf61781a8f72cab06b228fe1da4b9fa4ab39cfb791b5836b/audioop_lts-0.2.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:5b00be98ccd0fc123dcfad31d50030d25fcf31488cde9e61692029cd7394733b", size = 27332, upload-time = "2025-08-05T16:43:01.666Z" }, - { url = "https://files.pythonhosted.org/packages/c7/bd/35dce665255434f54e5307de39e31912a6f902d4572da7c37582809de14f/audioop_lts-0.2.2-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:a6d2e0f9f7a69403e388894d4ca5ada5c47230716a03f2847cfc7bd1ecb589d6", size = 92396, upload-time = "2025-08-05T16:43:02.991Z" }, - { url = "https://files.pythonhosted.org/packages/2d/d2/deeb9f51def1437b3afa35aeb729d577c04bcd89394cb56f9239a9f50b6f/audioop_lts-0.2.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f9b0b8a03ef474f56d1a842af1a2e01398b8f7654009823c6d9e0ecff4d5cfbf", size = 91811, upload-time = "2025-08-05T16:43:04.096Z" }, - { url = "https://files.pythonhosted.org/packages/76/3b/09f8b35b227cee28cc8231e296a82759ed80c1a08e349811d69773c48426/audioop_lts-0.2.2-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2b267b70747d82125f1a021506565bdc5609a2b24bcb4773c16d79d2bb260bbd", size = 100483, upload-time = "2025-08-05T16:43:05.085Z" }, - { url = "https://files.pythonhosted.org/packages/0b/15/05b48a935cf3b130c248bfdbdea71ce6437f5394ee8533e0edd7cfd93d5e/audioop_lts-0.2.2-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0337d658f9b81f4cd0fdb1f47635070cc084871a3d4646d9de74fdf4e7c3d24a", size = 103885, upload-time = "2025-08-05T16:43:06.197Z" }, - { url = "https://files.pythonhosted.org/packages/83/80/186b7fce6d35b68d3d739f228dc31d60b3412105854edb975aa155a58339/audioop_lts-0.2.2-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:167d3b62586faef8b6b2275c3218796b12621a60e43f7e9d5845d627b9c9b80e", size = 84899, upload-time = "2025-08-05T16:43:07.291Z" }, - { url = "https://files.pythonhosted.org/packages/49/89/c78cc5ac6cb5828f17514fb12966e299c850bc885e80f8ad94e38d450886/audioop_lts-0.2.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:0d9385e96f9f6da847f4d571ce3cb15b5091140edf3db97276872647ce37efd7", size = 89998, upload-time = "2025-08-05T16:43:08.335Z" }, - { url = "https://files.pythonhosted.org/packages/4c/4b/6401888d0c010e586c2ca50fce4c903d70a6bb55928b16cfbdfd957a13da/audioop_lts-0.2.2-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:48159d96962674eccdca9a3df280e864e8ac75e40a577cc97c5c42667ffabfc5", size = 99046, upload-time = "2025-08-05T16:43:09.367Z" }, - { url = "https://files.pythonhosted.org/packages/de/f8/c874ca9bb447dae0e2ef2e231f6c4c2b0c39e31ae684d2420b0f9e97ee68/audioop_lts-0.2.2-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:8fefe5868cd082db1186f2837d64cfbfa78b548ea0d0543e9b28935ccce81ce9", size = 84843, upload-time = "2025-08-05T16:43:10.749Z" }, - { url = "https://files.pythonhosted.org/packages/3e/c0/0323e66f3daebc13fd46b36b30c3be47e3fc4257eae44f1e77eb828c703f/audioop_lts-0.2.2-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:58cf54380c3884fb49fdd37dfb7a772632b6701d28edd3e2904743c5e1773602", size = 94490, upload-time = "2025-08-05T16:43:12.131Z" }, - { url = "https://files.pythonhosted.org/packages/98/6b/acc7734ac02d95ab791c10c3f17ffa3584ccb9ac5c18fd771c638ed6d1f5/audioop_lts-0.2.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:088327f00488cdeed296edd9215ca159f3a5a5034741465789cad403fcf4bec0", size = 92297, upload-time = "2025-08-05T16:43:13.139Z" }, - { url = "https://files.pythonhosted.org/packages/13/c3/c3dc3f564ce6877ecd2a05f8d751b9b27a8c320c2533a98b0c86349778d0/audioop_lts-0.2.2-cp314-cp314t-win32.whl", hash = "sha256:068aa17a38b4e0e7de771c62c60bbca2455924b67a8814f3b0dee92b5820c0b3", size = 27331, upload-time = "2025-08-05T16:43:14.19Z" }, - { url = "https://files.pythonhosted.org/packages/72/bb/b4608537e9ffcb86449091939d52d24a055216a36a8bf66b936af8c3e7ac/audioop_lts-0.2.2-cp314-cp314t-win_amd64.whl", hash = "sha256:a5bf613e96f49712073de86f20dbdd4014ca18efd4d34ed18c75bd808337851b", size = 31697, upload-time = "2025-08-05T16:43:15.193Z" }, - { url = "https://files.pythonhosted.org/packages/f6/22/91616fe707a5c5510de2cac9b046a30defe7007ba8a0c04f9c08f27df312/audioop_lts-0.2.2-cp314-cp314t-win_arm64.whl", hash = "sha256:b492c3b040153e68b9fdaff5913305aaaba5bb433d8a7f73d5cf6a64ed3cc1dd", size = 25206, upload-time = "2025-08-05T16:43:16.444Z" }, -] - -[[package]] -name = "beautifulsoup4" -version = "4.14.3" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "soupsieve" }, - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/c3/b0/1c6a16426d389813b48d95e26898aff79abbde42ad353958ad95cc8c9b21/beautifulsoup4-4.14.3.tar.gz", hash = "sha256:6292b1c5186d356bba669ef9f7f051757099565ad9ada5dd630bd9de5fa7fb86", size = 627737, upload-time = "2025-11-30T15:08:26.084Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/1a/39/47f9197bdd44df24d67ac8893641e16f386c984a0619ef2ee4c51fbbc019/beautifulsoup4-4.14.3-py3-none-any.whl", hash = "sha256:0918bfe44902e6ad8d57732ba310582e98da931428d231a5ecb9e7c703a735bb", size = 107721, upload-time = "2025-11-30T15:08:24.087Z" }, -] - -[[package]] -name = "bidict" -version = "0.23.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/9a/6e/026678aa5a830e07cd9498a05d3e7e650a4f56a42f267a53d22bcda1bdc9/bidict-0.23.1.tar.gz", hash = "sha256:03069d763bc387bbd20e7d49914e75fc4132a41937fa3405417e1a5a2d006d71", size = 29093, upload-time = "2024-02-18T19:09:05.748Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/99/37/e8730c3587a65eb5645d4aba2d27aae48e8003614d6aaf15dda67f702f1f/bidict-0.23.1-py3-none-any.whl", hash = "sha256:5dae8d4d79b552a71cbabc7deb25dfe8ce710b17ff41711e13010ead2abfc3e5", size = 32764, upload-time = "2024-02-18T19:09:04.156Z" }, -] - -[[package]] -name = "bitarray" -version = "3.8.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/95/06/92fdc84448d324ab8434b78e65caf4fb4c6c90b4f8ad9bdd4c8021bfaf1e/bitarray-3.8.0.tar.gz", hash = "sha256:3eae38daffd77c9621ae80c16932eea3fb3a4af141fb7cc724d4ad93eff9210d", size = 151991, upload-time = "2025-11-02T21:41:15.117Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/82/a0/0c41d893eda756315491adfdbf9bc928aee3d377a7f97a8834d453aa5de1/bitarray-3.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f2fcbe9b3a5996b417e030aa33a562e7e20dfc86271e53d7e841fc5df16268b8", size = 148575, upload-time = "2025-11-02T21:39:25.718Z" }, - { url = "https://files.pythonhosted.org/packages/0e/30/12ab2f4a4429bd844b419c37877caba93d676d18be71354fbbeb21d9f4cc/bitarray-3.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cd761d158f67e288fd0ebe00c3b158095ce80a4bc7c32b60c7121224003ba70d", size = 145454, upload-time = "2025-11-02T21:39:26.695Z" }, - { url = "https://files.pythonhosted.org/packages/26/58/314b3e3f219533464e120f0c51ac5123e7b1c1b91f725a4073fb70c5a858/bitarray-3.8.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c394a3f055b49f92626f83c1a0b6d6cd2c628f1ccd72481c3e3c6aa4695f3b20", size = 332949, upload-time = "2025-11-02T21:39:27.801Z" }, - { url = "https://files.pythonhosted.org/packages/ea/ce/ca8c706bd8341c7a22dd92d2a528af71f7e5f4726085d93f81fd768cb03b/bitarray-3.8.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:969fd67de8c42affdb47b38b80f1eaa79ac0ef17d65407cdd931db1675315af1", size = 360599, upload-time = "2025-11-02T21:39:28.964Z" }, - { url = "https://files.pythonhosted.org/packages/ef/dc/aa181df85f933052d962804906b282acb433cb9318b08ec2aceb4ee34faf/bitarray-3.8.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:99d25aff3745c54e61ab340b98400c52ebec04290a62078155e0d7eb30380220", size = 371972, upload-time = "2025-11-02T21:39:30.228Z" }, - { url = "https://files.pythonhosted.org/packages/ff/d9/b805bfa158c7bcf4df0ac19b1be581b47e1ddb792c11023aed80a7058e78/bitarray-3.8.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e645b4c365d6f1f9e0799380ad6395268f3c3b898244a650aaeb8d9d27b74c35", size = 340303, upload-time = "2025-11-02T21:39:31.342Z" }, - { url = "https://files.pythonhosted.org/packages/1f/42/5308cc97ea929e30727292617a3a88293470166851e13c9e3f16f395da55/bitarray-3.8.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2fa23fdb3beab313950bbb49674e8a161e61449332d3997089fe3944953f1b77", size = 330494, upload-time = "2025-11-02T21:39:32.769Z" }, - { url = "https://files.pythonhosted.org/packages/4c/89/64f1596cb80433323efdbc8dcd0d6e57c40dfbe6ea3341623f34ec397edd/bitarray-3.8.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:165052a0e61c880f7093808a0c524ce1b3555bfa114c0dfb5c809cd07918a60d", size = 358123, upload-time = "2025-11-02T21:39:34.331Z" }, - { url = "https://files.pythonhosted.org/packages/27/fd/f3d49c5443b57087f888b5e118c8dd78bb7c8e8cfeeed250f8e92128a05f/bitarray-3.8.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:337c8cd46a4c6568d367ed676cbf2d7de16f890bb31dbb54c44c1d6bb6d4a1de", size = 356046, upload-time = "2025-11-02T21:39:35.449Z" }, - { url = "https://files.pythonhosted.org/packages/aa/db/1fd0b402bd2b47142e958b6930dbb9445235d03fa703c9a24caa6e576ae2/bitarray-3.8.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:21ca6a47bf20db9e7ad74ca04b3d479e4d76109b68333eb23535553d2705339e", size = 336872, upload-time = "2025-11-02T21:39:36.891Z" }, - { url = "https://files.pythonhosted.org/packages/58/73/680b47718f1313b4538af479c4732eaca0aeda34d93fc5b869f87932d57d/bitarray-3.8.0-cp312-cp312-win32.whl", hash = "sha256:178c5a4c7fdfb5cd79e372ae7f675390e670f3732e5bc68d327e01a5b3ff8d55", size = 143025, upload-time = "2025-11-02T21:39:38.303Z" }, - { url = "https://files.pythonhosted.org/packages/f8/11/7792587c19c79a8283e8838f44709fa4338a8f7d2a3091dfd81c07ae89c7/bitarray-3.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:75a3b6e9c695a6570ea488db75b84bb592ff70a944957efa1c655867c575018b", size = 149969, upload-time = "2025-11-02T21:39:39.715Z" }, - { url = "https://files.pythonhosted.org/packages/9a/00/9df64b5d8a84e8e9ec392f6f9ce93f50626a5b301cb6c6b3fe3406454d66/bitarray-3.8.0-cp312-cp312-win_arm64.whl", hash = "sha256:5591daf81313096909d973fb2612fccd87528fdfdd39f6478bdce54543178954", size = 146907, upload-time = "2025-11-02T21:39:40.815Z" }, - { url = "https://files.pythonhosted.org/packages/3e/35/480364d4baf1e34c79076750914664373f561c58abb5c31c35b3fae613ff/bitarray-3.8.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:18214bac86341f1cc413772e66447d6cca10981e2880b70ecaf4e826c04f95e9", size = 148582, upload-time = "2025-11-02T21:39:42.268Z" }, - { url = "https://files.pythonhosted.org/packages/5e/a8/718b95524c803937f4edbaaf6480f39c80f6ed189d61357b345e8361ffb6/bitarray-3.8.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:01c5f0dc080b0ebb432f7a68ee1e88a76bd34f6d89c9568fcec65fb16ed71f0e", size = 145433, upload-time = "2025-11-02T21:39:43.552Z" }, - { url = "https://files.pythonhosted.org/packages/03/66/4a10f30dc9e2e01e3b4ecd44a511219f98e63c86b0e0f704c90fac24059b/bitarray-3.8.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:86685fa04067f7175f9718489ae755f6acde03593a1a9ca89305554af40e14fd", size = 332986, upload-time = "2025-11-02T21:39:44.656Z" }, - { url = "https://files.pythonhosted.org/packages/53/25/4c08774d847f80a1166e4c704b4e0f1c417c0afe6306eae0bc5e70d35faa/bitarray-3.8.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:56896ceeffe25946c4010320629e2d858ca763cd8ded273c81672a5edbcb1e0a", size = 360634, upload-time = "2025-11-02T21:39:45.798Z" }, - { url = "https://files.pythonhosted.org/packages/a5/8f/bf8ad26169ebd0b2746d5c7564db734453ca467f8aab87e9d43b0a794383/bitarray-3.8.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:9858dcbc23ba7eaadcd319786b982278a1a2b2020720b19db43e309579ff76fb", size = 371992, upload-time = "2025-11-02T21:39:46.968Z" }, - { url = "https://files.pythonhosted.org/packages/a9/16/ce166754e7c9d10650e02914552fa637cf3b2591f7ed16632bbf6b783312/bitarray-3.8.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:aa7dec53c25f1949513457ef8b0ea1fb40e76c672cc4d2daa8ad3c8d6b73491a", size = 340315, upload-time = "2025-11-02T21:39:48.182Z" }, - { url = "https://files.pythonhosted.org/packages/de/2a/fbba3a106ddd260e84b9a624f730257c32ba51a8a029565248dfedfdf6f2/bitarray-3.8.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:15a2eff91f54d2b1f573cca8ca6fb58763ce8fea80e7899ab028f3987ef71cd5", size = 330473, upload-time = "2025-11-02T21:39:49.705Z" }, - { url = "https://files.pythonhosted.org/packages/68/97/56cf3c70196e7307ad32318a9d6ed969dbdc6a4534bbe429112fa7dfe42e/bitarray-3.8.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:b1572ee0eb1967e71787af636bb7d1eb9c6735d5337762c450650e7f51844594", size = 358129, upload-time = "2025-11-02T21:39:51.189Z" }, - { url = "https://files.pythonhosted.org/packages/fd/be/afd391a5c0896d3339613321b2f94af853f29afc8bd3fbc327431244c642/bitarray-3.8.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:5bfac7f236ba1a4d402644bdce47fb9db02a7cf3214a1f637d3a88390f9e5428", size = 356005, upload-time = "2025-11-02T21:39:52.355Z" }, - { url = "https://files.pythonhosted.org/packages/ae/08/a8e1a371babba29bad3378bb3a2cdca2b012170711e7fe1f22031a6b7b95/bitarray-3.8.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f0a55cf02d2cdd739b40ce10c09bbdd520e141217696add7a48b56e67bdfdfe6", size = 336862, upload-time = "2025-11-02T21:39:54.345Z" }, - { url = "https://files.pythonhosted.org/packages/ee/8a/6dc1d0fdc06991c8dc3b1fcfe1ae49fbaced42064cd1b5f24278e73fe05f/bitarray-3.8.0-cp313-cp313-win32.whl", hash = "sha256:a2ba92f59e30ce915e9e79af37649432e3a212ddddf416d4d686b1b4825bcdb2", size = 143018, upload-time = "2025-11-02T21:39:56.361Z" }, - { url = "https://files.pythonhosted.org/packages/2e/72/76e13f5cd23b8b9071747909663ce3b02da24a5e7e22c35146338625db35/bitarray-3.8.0-cp313-cp313-win_amd64.whl", hash = "sha256:1c8f2a5d8006db5a555e06f9437e76bf52537d3dfd130cb8ae2b30866aca32c9", size = 149977, upload-time = "2025-11-02T21:39:57.718Z" }, - { url = "https://files.pythonhosted.org/packages/01/37/60f336c32336cc3ec03b0c61076f16ea2f05d5371c8a56e802161d218b77/bitarray-3.8.0-cp313-cp313-win_arm64.whl", hash = "sha256:50ddbe3a7b4b6ab96812f5a4d570f401a2cdb95642fd04c062f98939610bbeee", size = 146930, upload-time = "2025-11-02T21:39:59.308Z" }, - { url = "https://files.pythonhosted.org/packages/1b/b0/411327a6c7f6b2bead64bb06fe60b92e0344957ec1ab0645d5ccc25fdafe/bitarray-3.8.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8cbd4bfc933b33b85c43ef4c1f4d5e3e9d91975ea6368acf5fbac02bac06ea89", size = 148563, upload-time = "2025-11-02T21:40:01.006Z" }, - { url = "https://files.pythonhosted.org/packages/2a/bc/ff80d97c627d774f879da0ea93223adb1267feab7e07d5c17580ffe6d632/bitarray-3.8.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:9d35d8f8a1c9ed4e2b08187b513f8a3c71958600129db3aa26d85ea3abfd1310", size = 145422, upload-time = "2025-11-02T21:40:02.535Z" }, - { url = "https://files.pythonhosted.org/packages/66/e7/b4cb6c5689aacd0a32f3aa8a507155eaa33528c63de2f182b60843fbf700/bitarray-3.8.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:99f55e14e7c56f4fafe1343480c32b110ef03836c21ff7c48bae7add6818f77c", size = 332852, upload-time = "2025-11-02T21:40:03.645Z" }, - { url = "https://files.pythonhosted.org/packages/e7/91/fbd1b047e3e2f4b65590f289c8151df1d203d75b005f5aae4e072fe77d76/bitarray-3.8.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:dfbe2aa45b273f49e715c5345d94874cb65a28482bf231af408891c260601b8d", size = 360801, upload-time = "2025-11-02T21:40:04.827Z" }, - { url = "https://files.pythonhosted.org/packages/ef/4a/63064c593627bac8754fdafcb5343999c93ab2aeb27bcd9d270a010abea5/bitarray-3.8.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:64af877116edf051375b45f0bda648143176a017b13803ec7b3a3111dc05f4c5", size = 371408, upload-time = "2025-11-02T21:40:05.985Z" }, - { url = "https://files.pythonhosted.org/packages/46/97/ddc07723767bdafd170f2ff6e173c940fa874192783ee464aa3c1dedf07d/bitarray-3.8.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cdfbb27f2c46bb5bbdcee147530cbc5ca8ab858d7693924e88e30ada21b2c5e2", size = 340033, upload-time = "2025-11-02T21:40:07.189Z" }, - { url = "https://files.pythonhosted.org/packages/ad/1e/e1ea9f1146fd4af032817069ff118918d73e5de519854ce3860e2ed560ff/bitarray-3.8.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:4d73d4948dcc5591d880db8933004e01f1dd2296df9de815354d53469beb26fe", size = 330774, upload-time = "2025-11-02T21:40:08.496Z" }, - { url = "https://files.pythonhosted.org/packages/cf/9f/8242296c124a48d1eab471fd0838aeb7ea9c6fd720302d99ab7855d3e6d3/bitarray-3.8.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:28a85b056c0eb7f5d864c0ceef07034117e8ebfca756f50648c71950a568ba11", size = 358337, upload-time = "2025-11-02T21:40:10.035Z" }, - { url = "https://files.pythonhosted.org/packages/b5/6b/9095d75264c67d479f298c80802422464ce18c3cdd893252eeccf4997611/bitarray-3.8.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:79ec4498a545733ecace48d780d22407411b07403a2e08b9a4d7596c0b97ebd7", size = 355639, upload-time = "2025-11-02T21:40:11.485Z" }, - { url = "https://files.pythonhosted.org/packages/a0/af/c93c0ae5ef824136e90ac7ddf6cceccb1232f34240b2f55a922f874da9b4/bitarray-3.8.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:33af25c4ff7723363cb8404dfc2eefeab4110b654f6c98d26aba8a08c745d860", size = 336999, upload-time = "2025-11-02T21:40:12.709Z" }, - { url = "https://files.pythonhosted.org/packages/81/0f/72c951f5997b2876355d5e671f78dd2362493254876675cf22dbd24389ae/bitarray-3.8.0-cp314-cp314-win32.whl", hash = "sha256:2c3bb96b6026643ce24677650889b09073f60b9860a71765f843c99f9ab38b25", size = 142169, upload-time = "2025-11-02T21:40:14.031Z" }, - { url = "https://files.pythonhosted.org/packages/8a/55/ef1b4de8107bf13823da8756c20e1fbc9452228b4e837f46f6d9ddba3eb3/bitarray-3.8.0-cp314-cp314-win_amd64.whl", hash = "sha256:847c7f61964225fc489fe1d49eda7e0e0d253e98862c012cecf845f9ad45cdf4", size = 148737, upload-time = "2025-11-02T21:40:15.436Z" }, - { url = "https://files.pythonhosted.org/packages/5f/26/bc0784136775024ac56cc67c0d6f9aa77a7770de7f82c3a7c9be11c217cd/bitarray-3.8.0-cp314-cp314-win_arm64.whl", hash = "sha256:a2cb35a6efaa0e3623d8272471371a12c7e07b51a33e5efce9b58f655d864b4e", size = 146083, upload-time = "2025-11-02T21:40:17.135Z" }, - { url = "https://files.pythonhosted.org/packages/6e/64/57984e64264bf43d93a1809e645972771566a2d0345f4896b041ce20b000/bitarray-3.8.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:15e8d0597cc6e8496de6f4dea2a6880c57e1251502a7072f5631108a1aa28521", size = 149455, upload-time = "2025-11-02T21:40:18.558Z" }, - { url = "https://files.pythonhosted.org/packages/81/c0/0d5f2eaef1867f462f764bdb07d1e116c33a1bf052ea21889aefe4282f5b/bitarray-3.8.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:8ffe660e963ae711cb9e2b8d8461c9b1ad6167823837fc17d59d5e539fb898fa", size = 146491, upload-time = "2025-11-02T21:40:19.665Z" }, - { url = "https://files.pythonhosted.org/packages/65/c6/bc1261f7a8862c0c59220a484464739e52235fd1e2afcb24d7f7d3fb5702/bitarray-3.8.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4779f356083c62e29b4198d290b7b17a39a69702d150678b7efff0fdddf494a8", size = 339721, upload-time = "2025-11-02T21:40:21.277Z" }, - { url = "https://files.pythonhosted.org/packages/81/d8/289ca55dd2939ea17b1108dc53bffc0fdc5160ba44f77502dfaae35d08c6/bitarray-3.8.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:025d133bf4ca8cf75f904eeb8ea946228d7c043231866143f31946a6f4dd0bf3", size = 367823, upload-time = "2025-11-02T21:40:22.463Z" }, - { url = "https://files.pythonhosted.org/packages/91/a2/61e7461ca9ac0fcb70f327a2e84b006996d2a840898e69037a39c87c6d06/bitarray-3.8.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:451f9958850ea98440d542278368c8d1e1ea821e2494b204570ba34a340759df", size = 377341, upload-time = "2025-11-02T21:40:23.789Z" }, - { url = "https://files.pythonhosted.org/packages/6c/87/4a0c9c8bdb13916d443e04d8f8542eef9190f31425da3c17c3478c40173f/bitarray-3.8.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6d79f659965290af60d6acc8e2716341865fe74609a7ede2a33c2f86ad893b8f", size = 344985, upload-time = "2025-11-02T21:40:25.261Z" }, - { url = "https://files.pythonhosted.org/packages/17/4c/ff9259b916efe53695b631772e5213699c738efc2471b5ffe273f4000994/bitarray-3.8.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:fbf05678c2ae0064fb1b8de7e9e8f0fc30621b73c8477786dd0fb3868044a8c8", size = 336796, upload-time = "2025-11-02T21:40:26.942Z" }, - { url = "https://files.pythonhosted.org/packages/0f/4b/51b2468bbddbade5e2f3b8d5db08282c5b309e8687b0f02f75a8b5ff559c/bitarray-3.8.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:c396358023b876cff547ce87f4e8ff8a2280598873a137e8cc69e115262260b8", size = 365085, upload-time = "2025-11-02T21:40:28.224Z" }, - { url = "https://files.pythonhosted.org/packages/bf/79/53473bfc2e052c6dbb628cdc1b156be621c77aaeb715918358b01574be55/bitarray-3.8.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:ed3493a369fe849cce98542d7405c88030b355e4d2e113887cb7ecc86c205773", size = 361012, upload-time = "2025-11-02T21:40:29.635Z" }, - { url = "https://files.pythonhosted.org/packages/c4/b1/242bf2e44bfc69e73fa2b954b425d761a8e632f78ea31008f1c3cfad0854/bitarray-3.8.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:c764fb167411d5afaef88138542a4bfa28bd5e5ded5e8e42df87cef965efd6e9", size = 340644, upload-time = "2025-11-02T21:40:31.089Z" }, - { url = "https://files.pythonhosted.org/packages/cf/01/12e5ecf30a5de28a32485f226cad4b8a546845f65f755ce0365057ab1e92/bitarray-3.8.0-cp314-cp314t-win32.whl", hash = "sha256:e12769d3adcc419e65860de946df8d2ed274932177ac1cdb05186e498aaa9149", size = 143630, upload-time = "2025-11-02T21:40:32.351Z" }, - { url = "https://files.pythonhosted.org/packages/b6/92/6b6ade587b08024a8a890b07724775d29da9cf7497be5c3cbe226185e463/bitarray-3.8.0-cp314-cp314t-win_amd64.whl", hash = "sha256:0ca70ccf789446a6dfde40b482ec21d28067172cd1f8efd50d5548159fccad9e", size = 150250, upload-time = "2025-11-02T21:40:33.596Z" }, - { url = "https://files.pythonhosted.org/packages/ed/40/be3858ffed004e47e48a2cefecdbf9b950d41098b780f9dc3aa609a88351/bitarray-3.8.0-cp314-cp314t-win_arm64.whl", hash = "sha256:2a3d1b05ffdd3e95687942ae7b13c63689f85d3f15c39b33329e3cb9ce6c015f", size = 147015, upload-time = "2025-11-02T21:40:35.064Z" }, -] - -[[package]] -name = "cachetools" -version = "7.0.3" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/48/5c/3b882b82e9af737906539a2eafb62f96a229f1fa80255bede0c7b554cbc4/cachetools-7.0.3.tar.gz", hash = "sha256:8c246313b95849964e54a909c03b327a87ab0428b068fac10da7b105ca275ef6", size = 37187, upload-time = "2026-03-05T21:00:57.918Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/05/4a/573185481c50a8841331f54ddae44e4a3469c46aa0b397731c53a004369a/cachetools-7.0.3-py3-none-any.whl", hash = "sha256:c128ffca156eef344c25fcd08a96a5952803786fa33097f5f2d49edf76f79d53", size = 13907, upload-time = "2026-03-05T21:00:56.486Z" }, -] - -[[package]] -name = "certifi" -version = "2026.2.25" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/af/2d/7bf41579a8986e348fa033a31cdd0e4121114f6bce2457e8876010b092dd/certifi-2026.2.25.tar.gz", hash = "sha256:e887ab5cee78ea814d3472169153c2d12cd43b14bd03329a39a9c6e2e80bfba7", size = 155029, upload-time = "2026-02-25T02:54:17.342Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/9a/3c/c17fb3ca2d9c3acff52e30b309f538586f9f5b9c9cf454f3845fc9af4881/certifi-2026.2.25-py3-none-any.whl", hash = "sha256:027692e4402ad994f1c42e52a4997a9763c646b73e4096e4d5d6db8af1d6f0fa", size = 153684, upload-time = "2026-02-25T02:54:15.766Z" }, -] - -[[package]] -name = "charset-normalizer" -version = "3.4.5" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/1d/35/02daf95b9cd686320bb622eb148792655c9412dbb9b67abb5694e5910a24/charset_normalizer-3.4.5.tar.gz", hash = "sha256:95adae7b6c42a6c5b5b559b1a99149f090a57128155daeea91732c8d970d8644", size = 134804, upload-time = "2026-03-06T06:03:19.46Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/9c/b6/9ee9c1a608916ca5feae81a344dffbaa53b26b90be58cc2159e3332d44ec/charset_normalizer-3.4.5-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:ed97c282ee4f994ef814042423a529df9497e3c666dca19be1d4cd1129dc7ade", size = 280976, upload-time = "2026-03-06T06:01:15.276Z" }, - { url = "https://files.pythonhosted.org/packages/f8/d8/a54f7c0b96f1df3563e9190f04daf981e365a9b397eedfdfb5dbef7e5c6c/charset_normalizer-3.4.5-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0294916d6ccf2d069727d65973c3a1ca477d68708db25fd758dd28b0827cff54", size = 189356, upload-time = "2026-03-06T06:01:16.511Z" }, - { url = "https://files.pythonhosted.org/packages/42/69/2bf7f76ce1446759a5787cb87d38f6a61eb47dbbdf035cfebf6347292a65/charset_normalizer-3.4.5-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:dc57a0baa3eeedd99fafaef7511b5a6ef4581494e8168ee086031744e2679467", size = 206369, upload-time = "2026-03-06T06:01:17.853Z" }, - { url = "https://files.pythonhosted.org/packages/10/9c/949d1a46dab56b959d9a87272482195f1840b515a3380e39986989a893ae/charset_normalizer-3.4.5-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ed1a9a204f317ef879b32f9af507d47e49cd5e7f8e8d5d96358c98373314fc60", size = 203285, upload-time = "2026-03-06T06:01:19.473Z" }, - { url = "https://files.pythonhosted.org/packages/67/5c/ae30362a88b4da237d71ea214a8c7eb915db3eec941adda511729ac25fa2/charset_normalizer-3.4.5-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7ad83b8f9379176c841f8865884f3514d905bcd2a9a3b210eaa446e7d2223e4d", size = 196274, upload-time = "2026-03-06T06:01:20.728Z" }, - { url = "https://files.pythonhosted.org/packages/b2/07/c9f2cb0e46cb6d64fdcc4f95953747b843bb2181bda678dc4e699b8f0f9a/charset_normalizer-3.4.5-cp312-cp312-manylinux_2_31_armv7l.whl", hash = "sha256:a118e2e0b5ae6b0120d5efa5f866e58f2bb826067a646431da4d6a2bdae7950e", size = 184715, upload-time = "2026-03-06T06:01:22.194Z" }, - { url = "https://files.pythonhosted.org/packages/36/64/6b0ca95c44fddf692cd06d642b28f63009d0ce325fad6e9b2b4d0ef86a52/charset_normalizer-3.4.5-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:754f96058e61a5e22e91483f823e07df16416ce76afa4ebf306f8e1d1296d43f", size = 193426, upload-time = "2026-03-06T06:01:23.795Z" }, - { url = "https://files.pythonhosted.org/packages/50/bc/a730690d726403743795ca3f5bb2baf67838c5fea78236098f324b965e40/charset_normalizer-3.4.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0c300cefd9b0970381a46394902cd18eaf2aa00163f999590ace991989dcd0fc", size = 191780, upload-time = "2026-03-06T06:01:25.053Z" }, - { url = "https://files.pythonhosted.org/packages/97/4f/6c0bc9af68222b22951552d73df4532b5be6447cee32d58e7e8c74ecbb7b/charset_normalizer-3.4.5-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:c108f8619e504140569ee7de3f97d234f0fbae338a7f9f360455071ef9855a95", size = 185805, upload-time = "2026-03-06T06:01:26.294Z" }, - { url = "https://files.pythonhosted.org/packages/dd/b9/a523fb9b0ee90814b503452b2600e4cbc118cd68714d57041564886e7325/charset_normalizer-3.4.5-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:d1028de43596a315e2720a9849ee79007ab742c06ad8b45a50db8cdb7ed4a82a", size = 208342, upload-time = "2026-03-06T06:01:27.55Z" }, - { url = "https://files.pythonhosted.org/packages/4d/61/c59e761dee4464050713e50e27b58266cc8e209e518c0b378c1580c959ba/charset_normalizer-3.4.5-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:19092dde50335accf365cce21998a1c6dd8eafd42c7b226eb54b2747cdce2fac", size = 193661, upload-time = "2026-03-06T06:01:29.051Z" }, - { url = "https://files.pythonhosted.org/packages/1c/43/729fa30aad69783f755c5ad8649da17ee095311ca42024742701e202dc59/charset_normalizer-3.4.5-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:4354e401eb6dab9aed3c7b4030514328a6c748d05e1c3e19175008ca7de84fb1", size = 204819, upload-time = "2026-03-06T06:01:30.298Z" }, - { url = "https://files.pythonhosted.org/packages/87/33/d9b442ce5a91b96fc0840455a9e49a611bbadae6122778d0a6a79683dd31/charset_normalizer-3.4.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a68766a3c58fde7f9aaa22b3786276f62ab2f594efb02d0a1421b6282e852e98", size = 198080, upload-time = "2026-03-06T06:01:31.478Z" }, - { url = "https://files.pythonhosted.org/packages/56/5a/b8b5a23134978ee9885cee2d6995f4c27cc41f9baded0a9685eabc5338f0/charset_normalizer-3.4.5-cp312-cp312-win32.whl", hash = "sha256:1827734a5b308b65ac54e86a618de66f935a4f63a8a462ff1e19a6788d6c2262", size = 132630, upload-time = "2026-03-06T06:01:33.056Z" }, - { url = "https://files.pythonhosted.org/packages/70/53/e44a4c07e8904500aec95865dc3f6464dc3586a039ef0df606eb3ac38e35/charset_normalizer-3.4.5-cp312-cp312-win_amd64.whl", hash = "sha256:728c6a963dfab66ef865f49286e45239384249672cd598576765acc2a640a636", size = 142856, upload-time = "2026-03-06T06:01:34.489Z" }, - { url = "https://files.pythonhosted.org/packages/ea/aa/c5628f7cad591b1cf45790b7a61483c3e36cf41349c98af7813c483fd6e8/charset_normalizer-3.4.5-cp312-cp312-win_arm64.whl", hash = "sha256:75dfd1afe0b1647449e852f4fb428195a7ed0588947218f7ba929f6538487f02", size = 132982, upload-time = "2026-03-06T06:01:35.641Z" }, - { url = "https://files.pythonhosted.org/packages/f5/48/9f34ec4bb24aa3fdba1890c1bddb97c8a4be1bd84ef5c42ac2352563ad05/charset_normalizer-3.4.5-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ac59c15e3f1465f722607800c68713f9fbc2f672b9eb649fe831da4019ae9b23", size = 280788, upload-time = "2026-03-06T06:01:37.126Z" }, - { url = "https://files.pythonhosted.org/packages/0e/09/6003e7ffeb90cc0560da893e3208396a44c210c5ee42efff539639def59b/charset_normalizer-3.4.5-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:165c7b21d19365464e8f70e5ce5e12524c58b48c78c1f5a57524603c1ab003f8", size = 188890, upload-time = "2026-03-06T06:01:38.73Z" }, - { url = "https://files.pythonhosted.org/packages/42/1e/02706edf19e390680daa694d17e2b8eab4b5f7ac285e2a51168b4b22ee6b/charset_normalizer-3.4.5-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:28269983f25a4da0425743d0d257a2d6921ea7d9b83599d4039486ec5b9f911d", size = 206136, upload-time = "2026-03-06T06:01:40.016Z" }, - { url = "https://files.pythonhosted.org/packages/c7/87/942c3def1b37baf3cf786bad01249190f3ca3d5e63a84f831e704977de1f/charset_normalizer-3.4.5-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d27ce22ec453564770d29d03a9506d449efbb9fa13c00842262b2f6801c48cce", size = 202551, upload-time = "2026-03-06T06:01:41.522Z" }, - { url = "https://files.pythonhosted.org/packages/94/0a/af49691938dfe175d71b8a929bd7e4ace2809c0c5134e28bc535660d5262/charset_normalizer-3.4.5-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0625665e4ebdddb553ab185de5db7054393af8879fb0c87bd5690d14379d6819", size = 195572, upload-time = "2026-03-06T06:01:43.208Z" }, - { url = "https://files.pythonhosted.org/packages/20/ea/dfb1792a8050a8e694cfbde1570ff97ff74e48afd874152d38163d1df9ae/charset_normalizer-3.4.5-cp313-cp313-manylinux_2_31_armv7l.whl", hash = "sha256:c23eb3263356d94858655b3e63f85ac5d50970c6e8febcdde7830209139cc37d", size = 184438, upload-time = "2026-03-06T06:01:44.755Z" }, - { url = "https://files.pythonhosted.org/packages/72/12/c281e2067466e3ddd0595bfaea58a6946765ace5c72dfa3edc2f5f118026/charset_normalizer-3.4.5-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e6302ca4ae283deb0af68d2fbf467474b8b6aedcd3dab4db187e07f94c109763", size = 193035, upload-time = "2026-03-06T06:01:46.051Z" }, - { url = "https://files.pythonhosted.org/packages/ba/4f/3792c056e7708e10464bad0438a44708886fb8f92e3c3d29ec5e2d964d42/charset_normalizer-3.4.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e51ae7d81c825761d941962450f50d041db028b7278e7b08930b4541b3e45cb9", size = 191340, upload-time = "2026-03-06T06:01:47.547Z" }, - { url = "https://files.pythonhosted.org/packages/e7/86/80ddba897127b5c7a9bccc481b0cd36c8fefa485d113262f0fe4332f0bf4/charset_normalizer-3.4.5-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:597d10dec876923e5c59e48dbd366e852eacb2b806029491d307daea6b917d7c", size = 185464, upload-time = "2026-03-06T06:01:48.764Z" }, - { url = "https://files.pythonhosted.org/packages/4d/00/b5eff85ba198faacab83e0e4b6f0648155f072278e3b392a82478f8b988b/charset_normalizer-3.4.5-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:5cffde4032a197bd3b42fd0b9509ec60fb70918d6970e4cc773f20fc9180ca67", size = 208014, upload-time = "2026-03-06T06:01:50.371Z" }, - { url = "https://files.pythonhosted.org/packages/c8/11/d36f70be01597fd30850dde8a1269ebc8efadd23ba5785808454f2389bde/charset_normalizer-3.4.5-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:2da4eedcb6338e2321e831a0165759c0c620e37f8cd044a263ff67493be8ffb3", size = 193297, upload-time = "2026-03-06T06:01:51.933Z" }, - { url = "https://files.pythonhosted.org/packages/1a/1d/259eb0a53d4910536c7c2abb9cb25f4153548efb42800c6a9456764649c0/charset_normalizer-3.4.5-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:65a126fb4b070d05340a84fc709dd9e7c75d9b063b610ece8a60197a291d0adf", size = 204321, upload-time = "2026-03-06T06:01:53.887Z" }, - { url = "https://files.pythonhosted.org/packages/84/31/faa6c5b9d3688715e1ed1bb9d124c384fe2fc1633a409e503ffe1c6398c1/charset_normalizer-3.4.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c7a80a9242963416bd81f99349d5f3fce1843c303bd404f204918b6d75a75fd6", size = 197509, upload-time = "2026-03-06T06:01:56.439Z" }, - { url = "https://files.pythonhosted.org/packages/fd/a5/c7d9dd1503ffc08950b3260f5d39ec2366dd08254f0900ecbcf3a6197c7c/charset_normalizer-3.4.5-cp313-cp313-win32.whl", hash = "sha256:f1d725b754e967e648046f00c4facc42d414840f5ccc670c5670f59f83693e4f", size = 132284, upload-time = "2026-03-06T06:01:57.812Z" }, - { url = "https://files.pythonhosted.org/packages/b9/0f/57072b253af40c8aa6636e6de7d75985624c1eb392815b2f934199340a89/charset_normalizer-3.4.5-cp313-cp313-win_amd64.whl", hash = "sha256:e37bd100d2c5d3ba35db9c7c5ba5a9228cbcffe5c4778dc824b164e5257813d7", size = 142630, upload-time = "2026-03-06T06:01:59.062Z" }, - { url = "https://files.pythonhosted.org/packages/31/41/1c4b7cc9f13bd9d369ce3bc993e13d374ce25fa38a2663644283ecf422c1/charset_normalizer-3.4.5-cp313-cp313-win_arm64.whl", hash = "sha256:93b3b2cc5cf1b8743660ce77a4f45f3f6d1172068207c1defc779a36eea6bb36", size = 133254, upload-time = "2026-03-06T06:02:00.281Z" }, - { url = "https://files.pythonhosted.org/packages/43/be/0f0fd9bb4a7fa4fb5067fb7d9ac693d4e928d306f80a0d02bde43a7c4aee/charset_normalizer-3.4.5-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:8197abe5ca1ffb7d91e78360f915eef5addff270f8a71c1fc5be24a56f3e4873", size = 280232, upload-time = "2026-03-06T06:02:01.508Z" }, - { url = "https://files.pythonhosted.org/packages/28/02/983b5445e4bef49cd8c9da73a8e029f0825f39b74a06d201bfaa2e55142a/charset_normalizer-3.4.5-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a2aecdb364b8a1802afdc7f9327d55dad5366bc97d8502d0f5854e50712dbc5f", size = 189688, upload-time = "2026-03-06T06:02:02.857Z" }, - { url = "https://files.pythonhosted.org/packages/d0/88/152745c5166437687028027dc080e2daed6fe11cfa95a22f4602591c42db/charset_normalizer-3.4.5-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a66aa5022bf81ab4b1bebfb009db4fd68e0c6d4307a1ce5ef6a26e5878dfc9e4", size = 206833, upload-time = "2026-03-06T06:02:05.127Z" }, - { url = "https://files.pythonhosted.org/packages/cb/0f/ebc15c8b02af2f19be9678d6eed115feeeccc45ce1f4b098d986c13e8769/charset_normalizer-3.4.5-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d77f97e515688bd615c1d1f795d540f32542d514242067adcb8ef532504cb9ee", size = 202879, upload-time = "2026-03-06T06:02:06.446Z" }, - { url = "https://files.pythonhosted.org/packages/38/9c/71336bff6934418dc8d1e8a1644176ac9088068bc571da612767619c97b3/charset_normalizer-3.4.5-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:01a1ed54b953303ca7e310fafe0fe347aab348bd81834a0bcd602eb538f89d66", size = 195764, upload-time = "2026-03-06T06:02:08.763Z" }, - { url = "https://files.pythonhosted.org/packages/b7/95/ce92fde4f98615661871bc282a856cf9b8a15f686ba0af012984660d480b/charset_normalizer-3.4.5-cp314-cp314-manylinux_2_31_armv7l.whl", hash = "sha256:b2d37d78297b39a9eb9eb92c0f6df98c706467282055419df141389b23f93362", size = 183728, upload-time = "2026-03-06T06:02:10.137Z" }, - { url = "https://files.pythonhosted.org/packages/1c/e7/f5b4588d94e747ce45ae680f0f242bc2d98dbd4eccfab73e6160b6893893/charset_normalizer-3.4.5-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e71bbb595973622b817c042bd943c3f3667e9c9983ce3d205f973f486fec98a7", size = 192937, upload-time = "2026-03-06T06:02:11.663Z" }, - { url = "https://files.pythonhosted.org/packages/f9/29/9d94ed6b929bf9f48bf6ede6e7474576499f07c4c5e878fb186083622716/charset_normalizer-3.4.5-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:4cd966c2559f501c6fd69294d082c2934c8dd4719deb32c22961a5ac6db0df1d", size = 192040, upload-time = "2026-03-06T06:02:13.489Z" }, - { url = "https://files.pythonhosted.org/packages/15/d2/1a093a1cf827957f9445f2fe7298bcc16f8fc5e05c1ed2ad1af0b239035e/charset_normalizer-3.4.5-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:d5e52d127045d6ae01a1e821acfad2f3a1866c54d0e837828538fabe8d9d1bd6", size = 184107, upload-time = "2026-03-06T06:02:14.83Z" }, - { url = "https://files.pythonhosted.org/packages/0f/7d/82068ce16bd36135df7b97f6333c5d808b94e01d4599a682e2337ed5fd14/charset_normalizer-3.4.5-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:30a2b1a48478c3428d047ed9690d57c23038dac838a87ad624c85c0a78ebeb39", size = 208310, upload-time = "2026-03-06T06:02:16.165Z" }, - { url = "https://files.pythonhosted.org/packages/84/4e/4dfb52307bb6af4a5c9e73e482d171b81d36f522b21ccd28a49656baa680/charset_normalizer-3.4.5-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:d8ed79b8f6372ca4254955005830fd61c1ccdd8c0fac6603e2c145c61dd95db6", size = 192918, upload-time = "2026-03-06T06:02:18.144Z" }, - { url = "https://files.pythonhosted.org/packages/08/a4/159ff7da662cf7201502ca89980b8f06acf3e887b278956646a8aeb178ab/charset_normalizer-3.4.5-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:c5af897b45fa606b12464ccbe0014bbf8c09191e0a66aab6aa9d5cf6e77e0c94", size = 204615, upload-time = "2026-03-06T06:02:19.821Z" }, - { url = "https://files.pythonhosted.org/packages/d6/62/0dd6172203cb6b429ffffc9935001fde42e5250d57f07b0c28c6046deb6b/charset_normalizer-3.4.5-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:1088345bcc93c58d8d8f3d783eca4a6e7a7752bbff26c3eee7e73c597c191c2e", size = 197784, upload-time = "2026-03-06T06:02:21.86Z" }, - { url = "https://files.pythonhosted.org/packages/c7/5e/1aab5cb737039b9c59e63627dc8bbc0d02562a14f831cc450e5f91d84ce1/charset_normalizer-3.4.5-cp314-cp314-win32.whl", hash = "sha256:ee57b926940ba00bca7ba7041e665cc956e55ef482f851b9b65acb20d867e7a2", size = 133009, upload-time = "2026-03-06T06:02:23.289Z" }, - { url = "https://files.pythonhosted.org/packages/40/65/e7c6c77d7aaa4c0d7974f2e403e17f0ed2cb0fc135f77d686b916bf1eead/charset_normalizer-3.4.5-cp314-cp314-win_amd64.whl", hash = "sha256:4481e6da1830c8a1cc0b746b47f603b653dadb690bcd851d039ffaefe70533aa", size = 143511, upload-time = "2026-03-06T06:02:26.195Z" }, - { url = "https://files.pythonhosted.org/packages/ba/91/52b0841c71f152f563b8e072896c14e3d83b195c188b338d3cc2e582d1d4/charset_normalizer-3.4.5-cp314-cp314-win_arm64.whl", hash = "sha256:97ab7787092eb9b50fb47fa04f24c75b768a606af1bcba1957f07f128a7219e4", size = 133775, upload-time = "2026-03-06T06:02:27.473Z" }, - { url = "https://files.pythonhosted.org/packages/c5/60/3a621758945513adfd4db86827a5bafcc615f913dbd0b4c2ed64a65731be/charset_normalizer-3.4.5-py3-none-any.whl", hash = "sha256:9db5e3fcdcee89a78c04dffb3fe33c79f77bd741a624946db2591c81b2fc85b0", size = 55455, upload-time = "2026-03-06T06:03:17.827Z" }, -] - -[[package]] -name = "ckzg" -version = "2.1.6" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/2a/b8/9add33a0be636e2d4467ea4497b47e124677a0478d9be40ef6473d4ec29b/ckzg-2.1.6.tar.gz", hash = "sha256:49df31684283dfcfd1eeca638d84c03788ebdd48e8afc0643bf5188ec023dc8d", size = 1127792, upload-time = "2026-02-26T17:19:49.805Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/34/61/2be9ebc6677505b693f3026003e319f1afafd9deef85233ad011cebf61f0/ckzg-2.1.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:93b350b0f3d074df84f8836df0db2fb0978403565477b6e25415c48251c5c7a1", size = 96390, upload-time = "2026-02-26T17:18:52.013Z" }, - { url = "https://files.pythonhosted.org/packages/79/1f/b96709267c309ff9638bfac7ccfbc255c9590922504f4501aba31f80ff55/ckzg-2.1.6-cp312-cp312-manylinux2010_i686.manylinux_2_12_i686.manylinux_2_28_i686.whl", hash = "sha256:c1e3cf33671cd35d86d7a7f68ef1f40381a3315a61db8861858247cfda46ca6d", size = 180446, upload-time = "2026-02-26T17:18:53.009Z" }, - { url = "https://files.pythonhosted.org/packages/8e/16/e015e0d897a7af1f5fcaccf343adc264adfb73b1fa9181edce7965c7bbfd/ckzg-2.1.6-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cfe71caa4f667ded6c87f496ac1783f004c3f5ab29f695f8d3163c75df51398f", size = 166243, upload-time = "2026-02-26T17:18:54.102Z" }, - { url = "https://files.pythonhosted.org/packages/e3/ee/cd8206f1005566aa6f31f226d009dfc08bca71b883aeea010108151df7a7/ckzg-2.1.6-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bf835249b20d58de28b097da7c06c3a6b3b5f184120b0ace55373d6b044c9445", size = 176019, upload-time = "2026-02-26T17:18:55.077Z" }, - { url = "https://files.pythonhosted.org/packages/5c/09/1b2215ba11cad28e17eed1644849aaa7caa463dbfc96024670b96c8cf6c8/ckzg-2.1.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a82f4bfd4fb1d3b378af859a1d0dc1febb83634981d8d50635afec0c7d10a372", size = 173682, upload-time = "2026-02-26T17:18:56.095Z" }, - { url = "https://files.pythonhosted.org/packages/4c/e7/771182e7fdf331da81d4917741e91537f2de50b9dd12b8530241be699018/ckzg-2.1.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:36fd682e34c47befb7f28324793a92bb7fb14f8f2845d0b39abbcb6444e9565f", size = 188872, upload-time = "2026-02-26T17:18:58.122Z" }, - { url = "https://files.pythonhosted.org/packages/08/7c/1eca8c4abe8f83d15de7c3c8de6cc7cc42067502ed8591e70a03ef0e6857/ckzg-2.1.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9efcf3359bf12b128b4e0d86ed663946699fecaeb2d1298594c14a7cf14a7feb", size = 183566, upload-time = "2026-02-26T17:18:59.211Z" }, - { url = "https://files.pythonhosted.org/packages/fb/91/163b08eb84acaa1bcee2a1509bfc856fa833def7e2077f9127256c2b570c/ckzg-2.1.6-cp312-cp312-win_amd64.whl", hash = "sha256:e1c705a96c0ac99669f3691613b6eecd1d36c75fe433322b12293c906f8d8ae2", size = 99807, upload-time = "2026-02-26T17:19:00.271Z" }, - { url = "https://files.pythonhosted.org/packages/90/34/0cc58fa7907ea5c3961f6c9dd086b2d75ffb7897aeff4baddf1ee868ac60/ckzg-2.1.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:616cd69938d0d79b13e128f4706ea48c21866c3f7c52547d4f185837d5568d69", size = 96390, upload-time = "2026-02-26T17:19:01.532Z" }, - { url = "https://files.pythonhosted.org/packages/11/f1/dc6a25d3ba37531e2b9838ad875d061348685b50ff6759261c9831942a77/ckzg-2.1.6-cp313-cp313-manylinux2010_i686.manylinux_2_12_i686.manylinux_2_28_i686.whl", hash = "sha256:8d3056cd48f97041f98b73404f397c29aebd04b7f8f3bbc012180680d295a464", size = 180486, upload-time = "2026-02-26T17:19:02.768Z" }, - { url = "https://files.pythonhosted.org/packages/d5/95/17c7407af8a5070cf05ed8ff1156d9b62babecf74c84b2d61ed03efc72a2/ckzg-2.1.6-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c732e429b50dee04cd51fb601fc9cb4ba4d853e2e29a9914b3fdd36b576b0211", size = 166304, upload-time = "2026-02-26T17:19:03.825Z" }, - { url = "https://files.pythonhosted.org/packages/9a/31/8d7012523edea81d54f2f634f512f3a0705dd3dca99fdfe1281b09bc96ca/ckzg-2.1.6-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b0f9933b6e06e6560b4b8980e2385ec4d639cfdebb03bffaadde75a5c61edb45", size = 176058, upload-time = "2026-02-26T17:19:04.879Z" }, - { url = "https://files.pythonhosted.org/packages/f9/4d/f1a73fee7b2b2212691acf2231a8df717b19f95412ca236549f4d4a21932/ckzg-2.1.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:be65a7c00d445cf07adea7679842df469989e6790df1d846944f9885a4a788be", size = 173687, upload-time = "2026-02-26T17:19:05.919Z" }, - { url = "https://files.pythonhosted.org/packages/03/ed/cc0866735571f4e55d8e0edd09d34aab1ba1a4b83288bafa398651df4d88/ckzg-2.1.6-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:36e2e198c9e0a94498db32b760b446a1c29ba7e01aaec17404237ef6ae1705df", size = 188907, upload-time = "2026-02-26T17:19:06.934Z" }, - { url = "https://files.pythonhosted.org/packages/fd/5b/154c5a3ebd6fe97e1bf5de60cb3d3bc4f9ff42565dab87957292d7918eb8/ckzg-2.1.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5ce6aaac6ad4d70cc6e8ef61b430957150e1eb3370fd898cebd074db85cde987", size = 183602, upload-time = "2026-02-26T17:19:08.415Z" }, - { url = "https://files.pythonhosted.org/packages/81/8d/01bc02cfd24bbe641da36e5cbc50549db505b404a096ea501dcc1920f572/ckzg-2.1.6-cp313-cp313-win_amd64.whl", hash = "sha256:e897650e650fd090b97136103963a0bd338ff8582442b6e4b2bd660b0b81ff2e", size = 99810, upload-time = "2026-02-26T17:19:09.911Z" }, - { url = "https://files.pythonhosted.org/packages/b6/75/4f4449d60daf573ef4f14ab963e73dbd9803774fba40e839368af503b7de/ckzg-2.1.6-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:b10f2b50369d95c2d3707293f958a73cc4a505f53d1dfeadb9534aad4dd33ec9", size = 96402, upload-time = "2026-02-26T17:19:11.147Z" }, - { url = "https://files.pythonhosted.org/packages/24/91/85eb888653ad9c8872b017ae765ec331eb7bac6c49b5815d8f8b687b7928/ckzg-2.1.6-cp314-cp314-manylinux2010_i686.manylinux_2_12_i686.manylinux_2_28_i686.whl", hash = "sha256:c1642c7c1fd9225155660ee5bf96117b1d94a639a7f495c3b655ad7640bbb5c1", size = 180495, upload-time = "2026-02-26T17:19:12.368Z" }, - { url = "https://files.pythonhosted.org/packages/b7/a0/e42dd754e825ca0aac733993d6c60d202a6c7e4608e0ef75467bba6c1fb8/ckzg-2.1.6-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eb3d119e5008385ec3d47e81965bf1c644f50077fa9aa890d49ee1a0963fbfb3", size = 166328, upload-time = "2026-02-26T17:19:13.512Z" }, - { url = "https://files.pythonhosted.org/packages/7d/35/6d94c0cecf02bec72a5b5e3f61e7987a428abb3af714cda25ebb1f2a3681/ckzg-2.1.6-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3e81244ae23f27a6f85dc69838adcd3c5618acef57aec7ed87db8070cd6995bf", size = 176069, upload-time = "2026-02-26T17:19:15.647Z" }, - { url = "https://files.pythonhosted.org/packages/6a/69/9e6eb717dc9477374e28e5c5b56f210a708bbaa6b9660f09302138776488/ckzg-2.1.6-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:92b60f5f9eb880c595680af52d609e06dedee2bcdd109597ce58bb5422639b1a", size = 173743, upload-time = "2026-02-26T17:19:17.172Z" }, - { url = "https://files.pythonhosted.org/packages/1c/42/34cb744193163d33c348ce12f0155296bde1cbe733a139bef102c0ff7fec/ckzg-2.1.6-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:c1f9c9b2fd7d5f303eb2420130c1a1ee44a071308e227a8f9e238aeb4e2194ae", size = 188921, upload-time = "2026-02-26T17:19:18.457Z" }, - { url = "https://files.pythonhosted.org/packages/14/a6/69a2c0e3d17e3e6d1ae40a7b8a75c354ffeb4b604e716daf25c4a743fb18/ckzg-2.1.6-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:eaf30b4719199f1d243bd761caaec3582bdad70a6797475c6cd5c03c5ce3cd1d", size = 183603, upload-time = "2026-02-26T17:19:19.579Z" }, - { url = "https://files.pythonhosted.org/packages/3a/21/ea282898caa22622aab9ccd0212f4a5fd9254a949323a406a5c38aee1406/ckzg-2.1.6-cp314-cp314-win_amd64.whl", hash = "sha256:30964b9fac452746db7e60c9c324957c8dc7bc815b72bb09eea88409decc33ed", size = 102520, upload-time = "2026-02-26T17:19:20.834Z" }, - { url = "https://files.pythonhosted.org/packages/43/4c/ef4177450ccb31c8ff49ffd154e9266390b2f632caced121ec51f9172e4d/ckzg-2.1.6-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:4fd1c8e20c52ce77f9ad7b004440b0ba46d22328af07a5eb095ea4f252d22644", size = 96611, upload-time = "2026-02-26T17:19:21.81Z" }, - { url = "https://files.pythonhosted.org/packages/a8/ad/6e684af6b29744012befcb88db688234abc172d261ed4f5819df49ff55a4/ckzg-2.1.6-cp314-cp314t-manylinux2010_i686.manylinux_2_12_i686.manylinux_2_28_i686.whl", hash = "sha256:502bb5e5bbbf1bc14b324d8e012c06fc30c24840d35a7933b80b839869280491", size = 183330, upload-time = "2026-02-26T17:19:22.749Z" }, - { url = "https://files.pythonhosted.org/packages/ff/8e/469ab3b856215a7542792c2bae10dbf5e8e051fef2c50545070977acc5db/ckzg-2.1.6-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c859c8b93e82b9839a5bb443511a0b0631e93cb9275e755f54781693a3afc246", size = 169465, upload-time = "2026-02-26T17:19:23.821Z" }, - { url = "https://files.pythonhosted.org/packages/ef/41/3a5b27f0d8204dd3ed375c3348d462feedc24ef9db9df576e53cb53191b7/ckzg-2.1.6-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0330b7a7e0aca5622a31089c1d56a1a7040a52075803d31983fa9101fc45dddc", size = 178846, upload-time = "2026-02-26T17:19:25.452Z" }, - { url = "https://files.pythonhosted.org/packages/52/bc/4f15d4642b7c83bdc7c7868f6e809e56ebafc02c1ed43ae541f686185d47/ckzg-2.1.6-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:646078c085edc4c92361f6277cb8b6aac978287306e664e3c29de2f26ad206d2", size = 176486, upload-time = "2026-02-26T17:19:26.876Z" }, - { url = "https://files.pythonhosted.org/packages/51/8b/f046442413da4bd294d3ec6de04adb54af47b1e149f85c127955e10a78cd/ckzg-2.1.6-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:1224f2477fc794f7719bbe7650f735188120351b9511a7dd928b2fe8d74911c3", size = 191686, upload-time = "2026-02-26T17:19:27.889Z" }, - { url = "https://files.pythonhosted.org/packages/c0/8d/46d383414040cc3f4453c047b2268ef1548e846e5be732fdaf1b20dd5a79/ckzg-2.1.6-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:3b33131a9674d9dd509eb9fbb59f65c66dc14bfe85bc3dc93af5140274741c12", size = 186202, upload-time = "2026-02-26T17:19:29.115Z" }, - { url = "https://files.pythonhosted.org/packages/bb/43/4d68277e83da239df32096209b0d27626c2d829bae8d9c757abc1687fc13/ckzg-2.1.6-cp314-cp314t-win_amd64.whl", hash = "sha256:73301ca29c29255960ebcee8bf52151cd3ac8de214c31a4e29dbcde8c44e0571", size = 102667, upload-time = "2026-02-26T17:19:30.111Z" }, -] - -[[package]] -name = "colorama" -version = "0.4.6" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, -] - -[[package]] -name = "compress-json" -version = "1.0.5" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e5/ba/1a503870491972aaa7281d52d294a1c1f97cd598e52d34cc4280c028db58/compress_json-1.0.5.tar.gz", hash = "sha256:8cd15b09413f402a08faa09255baa44261f20cad76956a18d2581b0792c69523", size = 5031, upload-time = "2022-04-07T21:22:04.694Z" } - -[[package]] -name = "contourpy" -version = "1.3.3" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "numpy" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/58/01/1253e6698a07380cd31a736d248a3f2a50a7c88779a1813da27503cadc2a/contourpy-1.3.3.tar.gz", hash = "sha256:083e12155b210502d0bca491432bb04d56dc3432f95a979b429f2848c3dbe880", size = 13466174, upload-time = "2025-07-26T12:03:12.549Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/be/45/adfee365d9ea3d853550b2e735f9d66366701c65db7855cd07621732ccfc/contourpy-1.3.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b08a32ea2f8e42cf1d4be3169a98dd4be32bafe4f22b6c4cb4ba810fa9e5d2cb", size = 293419, upload-time = "2025-07-26T12:01:21.16Z" }, - { url = "https://files.pythonhosted.org/packages/53/3e/405b59cfa13021a56bba395a6b3aca8cec012b45bf177b0eaf7a202cde2c/contourpy-1.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:556dba8fb6f5d8742f2923fe9457dbdd51e1049c4a43fd3986a0b14a1d815fc6", size = 273979, upload-time = "2025-07-26T12:01:22.448Z" }, - { url = "https://files.pythonhosted.org/packages/d4/1c/a12359b9b2ca3a845e8f7f9ac08bdf776114eb931392fcad91743e2ea17b/contourpy-1.3.3-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:92d9abc807cf7d0e047b95ca5d957cf4792fcd04e920ca70d48add15c1a90ea7", size = 332653, upload-time = "2025-07-26T12:01:24.155Z" }, - { url = "https://files.pythonhosted.org/packages/63/12/897aeebfb475b7748ea67b61e045accdfcf0d971f8a588b67108ed7f5512/contourpy-1.3.3-cp312-cp312-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b2e8faa0ed68cb29af51edd8e24798bb661eac3bd9f65420c1887b6ca89987c8", size = 379536, upload-time = "2025-07-26T12:01:25.91Z" }, - { url = "https://files.pythonhosted.org/packages/43/8a/a8c584b82deb248930ce069e71576fc09bd7174bbd35183b7943fb1064fd/contourpy-1.3.3-cp312-cp312-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:626d60935cf668e70a5ce6ff184fd713e9683fb458898e4249b63be9e28286ea", size = 384397, upload-time = "2025-07-26T12:01:27.152Z" }, - { url = "https://files.pythonhosted.org/packages/cc/8f/ec6289987824b29529d0dfda0d74a07cec60e54b9c92f3c9da4c0ac732de/contourpy-1.3.3-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4d00e655fcef08aba35ec9610536bfe90267d7ab5ba944f7032549c55a146da1", size = 362601, upload-time = "2025-07-26T12:01:28.808Z" }, - { url = "https://files.pythonhosted.org/packages/05/0a/a3fe3be3ee2dceb3e615ebb4df97ae6f3828aa915d3e10549ce016302bd1/contourpy-1.3.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:451e71b5a7d597379ef572de31eeb909a87246974d960049a9848c3bc6c41bf7", size = 1331288, upload-time = "2025-07-26T12:01:31.198Z" }, - { url = "https://files.pythonhosted.org/packages/33/1d/acad9bd4e97f13f3e2b18a3977fe1b4a37ecf3d38d815333980c6c72e963/contourpy-1.3.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:459c1f020cd59fcfe6650180678a9993932d80d44ccde1fa1868977438f0b411", size = 1403386, upload-time = "2025-07-26T12:01:33.947Z" }, - { url = "https://files.pythonhosted.org/packages/cf/8f/5847f44a7fddf859704217a99a23a4f6417b10e5ab1256a179264561540e/contourpy-1.3.3-cp312-cp312-win32.whl", hash = "sha256:023b44101dfe49d7d53932be418477dba359649246075c996866106da069af69", size = 185018, upload-time = "2025-07-26T12:01:35.64Z" }, - { url = "https://files.pythonhosted.org/packages/19/e8/6026ed58a64563186a9ee3f29f41261fd1828f527dd93d33b60feca63352/contourpy-1.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:8153b8bfc11e1e4d75bcb0bff1db232f9e10b274e0929de9d608027e0d34ff8b", size = 226567, upload-time = "2025-07-26T12:01:36.804Z" }, - { url = "https://files.pythonhosted.org/packages/d1/e2/f05240d2c39a1ed228d8328a78b6f44cd695f7ef47beb3e684cf93604f86/contourpy-1.3.3-cp312-cp312-win_arm64.whl", hash = "sha256:07ce5ed73ecdc4a03ffe3e1b3e3c1166db35ae7584be76f65dbbe28a7791b0cc", size = 193655, upload-time = "2025-07-26T12:01:37.999Z" }, - { url = "https://files.pythonhosted.org/packages/68/35/0167aad910bbdb9599272bd96d01a9ec6852f36b9455cf2ca67bd4cc2d23/contourpy-1.3.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:177fb367556747a686509d6fef71d221a4b198a3905fe824430e5ea0fda54eb5", size = 293257, upload-time = "2025-07-26T12:01:39.367Z" }, - { url = "https://files.pythonhosted.org/packages/96/e4/7adcd9c8362745b2210728f209bfbcf7d91ba868a2c5f40d8b58f54c509b/contourpy-1.3.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d002b6f00d73d69333dac9d0b8d5e84d9724ff9ef044fd63c5986e62b7c9e1b1", size = 274034, upload-time = "2025-07-26T12:01:40.645Z" }, - { url = "https://files.pythonhosted.org/packages/73/23/90e31ceeed1de63058a02cb04b12f2de4b40e3bef5e082a7c18d9c8ae281/contourpy-1.3.3-cp313-cp313-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:348ac1f5d4f1d66d3322420f01d42e43122f43616e0f194fc1c9f5d830c5b286", size = 334672, upload-time = "2025-07-26T12:01:41.942Z" }, - { url = "https://files.pythonhosted.org/packages/ed/93/b43d8acbe67392e659e1d984700e79eb67e2acb2bd7f62012b583a7f1b55/contourpy-1.3.3-cp313-cp313-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:655456777ff65c2c548b7c454af9c6f33f16c8884f11083244b5819cc214f1b5", size = 381234, upload-time = "2025-07-26T12:01:43.499Z" }, - { url = "https://files.pythonhosted.org/packages/46/3b/bec82a3ea06f66711520f75a40c8fc0b113b2a75edb36aa633eb11c4f50f/contourpy-1.3.3-cp313-cp313-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:644a6853d15b2512d67881586bd03f462c7ab755db95f16f14d7e238f2852c67", size = 385169, upload-time = "2025-07-26T12:01:45.219Z" }, - { url = "https://files.pythonhosted.org/packages/4b/32/e0f13a1c5b0f8572d0ec6ae2f6c677b7991fafd95da523159c19eff0696a/contourpy-1.3.3-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4debd64f124ca62069f313a9cb86656ff087786016d76927ae2cf37846b006c9", size = 362859, upload-time = "2025-07-26T12:01:46.519Z" }, - { url = "https://files.pythonhosted.org/packages/33/71/e2a7945b7de4e58af42d708a219f3b2f4cff7386e6b6ab0a0fa0033c49a9/contourpy-1.3.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a15459b0f4615b00bbd1e91f1b9e19b7e63aea7483d03d804186f278c0af2659", size = 1332062, upload-time = "2025-07-26T12:01:48.964Z" }, - { url = "https://files.pythonhosted.org/packages/12/fc/4e87ac754220ccc0e807284f88e943d6d43b43843614f0a8afa469801db0/contourpy-1.3.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ca0fdcd73925568ca027e0b17ab07aad764be4706d0a925b89227e447d9737b7", size = 1403932, upload-time = "2025-07-26T12:01:51.979Z" }, - { url = "https://files.pythonhosted.org/packages/a6/2e/adc197a37443f934594112222ac1aa7dc9a98faf9c3842884df9a9d8751d/contourpy-1.3.3-cp313-cp313-win32.whl", hash = "sha256:b20c7c9a3bf701366556e1b1984ed2d0cedf999903c51311417cf5f591d8c78d", size = 185024, upload-time = "2025-07-26T12:01:53.245Z" }, - { url = "https://files.pythonhosted.org/packages/18/0b/0098c214843213759692cc638fce7de5c289200a830e5035d1791d7a2338/contourpy-1.3.3-cp313-cp313-win_amd64.whl", hash = "sha256:1cadd8b8969f060ba45ed7c1b714fe69185812ab43bd6b86a9123fe8f99c3263", size = 226578, upload-time = "2025-07-26T12:01:54.422Z" }, - { url = "https://files.pythonhosted.org/packages/8a/9a/2f6024a0c5995243cd63afdeb3651c984f0d2bc727fd98066d40e141ad73/contourpy-1.3.3-cp313-cp313-win_arm64.whl", hash = "sha256:fd914713266421b7536de2bfa8181aa8c699432b6763a0ea64195ebe28bff6a9", size = 193524, upload-time = "2025-07-26T12:01:55.73Z" }, - { url = "https://files.pythonhosted.org/packages/c0/b3/f8a1a86bd3298513f500e5b1f5fd92b69896449f6cab6a146a5d52715479/contourpy-1.3.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:88df9880d507169449d434c293467418b9f6cbe82edd19284aa0409e7fdb933d", size = 306730, upload-time = "2025-07-26T12:01:57.051Z" }, - { url = "https://files.pythonhosted.org/packages/3f/11/4780db94ae62fc0c2053909b65dc3246bd7cecfc4f8a20d957ad43aa4ad8/contourpy-1.3.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:d06bb1f751ba5d417047db62bca3c8fde202b8c11fb50742ab3ab962c81e8216", size = 287897, upload-time = "2025-07-26T12:01:58.663Z" }, - { url = "https://files.pythonhosted.org/packages/ae/15/e59f5f3ffdd6f3d4daa3e47114c53daabcb18574a26c21f03dc9e4e42ff0/contourpy-1.3.3-cp313-cp313t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e4e6b05a45525357e382909a4c1600444e2a45b4795163d3b22669285591c1ae", size = 326751, upload-time = "2025-07-26T12:02:00.343Z" }, - { url = "https://files.pythonhosted.org/packages/0f/81/03b45cfad088e4770b1dcf72ea78d3802d04200009fb364d18a493857210/contourpy-1.3.3-cp313-cp313t-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ab3074b48c4e2cf1a960e6bbeb7f04566bf36b1861d5c9d4d8ac04b82e38ba20", size = 375486, upload-time = "2025-07-26T12:02:02.128Z" }, - { url = "https://files.pythonhosted.org/packages/0c/ba/49923366492ffbdd4486e970d421b289a670ae8cf539c1ea9a09822b371a/contourpy-1.3.3-cp313-cp313t-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:6c3d53c796f8647d6deb1abe867daeb66dcc8a97e8455efa729516b997b8ed99", size = 388106, upload-time = "2025-07-26T12:02:03.615Z" }, - { url = "https://files.pythonhosted.org/packages/9f/52/5b00ea89525f8f143651f9f03a0df371d3cbd2fccd21ca9b768c7a6500c2/contourpy-1.3.3-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:50ed930df7289ff2a8d7afeb9603f8289e5704755c7e5c3bbd929c90c817164b", size = 352548, upload-time = "2025-07-26T12:02:05.165Z" }, - { url = "https://files.pythonhosted.org/packages/32/1d/a209ec1a3a3452d490f6b14dd92e72280c99ae3d1e73da74f8277d4ee08f/contourpy-1.3.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4feffb6537d64b84877da813a5c30f1422ea5739566abf0bd18065ac040e120a", size = 1322297, upload-time = "2025-07-26T12:02:07.379Z" }, - { url = "https://files.pythonhosted.org/packages/bc/9e/46f0e8ebdd884ca0e8877e46a3f4e633f6c9c8c4f3f6e72be3fe075994aa/contourpy-1.3.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:2b7e9480ffe2b0cd2e787e4df64270e3a0440d9db8dc823312e2c940c167df7e", size = 1391023, upload-time = "2025-07-26T12:02:10.171Z" }, - { url = "https://files.pythonhosted.org/packages/b9/70/f308384a3ae9cd2209e0849f33c913f658d3326900d0ff5d378d6a1422d2/contourpy-1.3.3-cp313-cp313t-win32.whl", hash = "sha256:283edd842a01e3dcd435b1c5116798d661378d83d36d337b8dde1d16a5fc9ba3", size = 196157, upload-time = "2025-07-26T12:02:11.488Z" }, - { url = "https://files.pythonhosted.org/packages/b2/dd/880f890a6663b84d9e34a6f88cded89d78f0091e0045a284427cb6b18521/contourpy-1.3.3-cp313-cp313t-win_amd64.whl", hash = "sha256:87acf5963fc2b34825e5b6b048f40e3635dd547f590b04d2ab317c2619ef7ae8", size = 240570, upload-time = "2025-07-26T12:02:12.754Z" }, - { url = "https://files.pythonhosted.org/packages/80/99/2adc7d8ffead633234817ef8e9a87115c8a11927a94478f6bb3d3f4d4f7d/contourpy-1.3.3-cp313-cp313t-win_arm64.whl", hash = "sha256:3c30273eb2a55024ff31ba7d052dde990d7d8e5450f4bbb6e913558b3d6c2301", size = 199713, upload-time = "2025-07-26T12:02:14.4Z" }, - { url = "https://files.pythonhosted.org/packages/72/8b/4546f3ab60f78c514ffb7d01a0bd743f90de36f0019d1be84d0a708a580a/contourpy-1.3.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fde6c716d51c04b1c25d0b90364d0be954624a0ee9d60e23e850e8d48353d07a", size = 292189, upload-time = "2025-07-26T12:02:16.095Z" }, - { url = "https://files.pythonhosted.org/packages/fd/e1/3542a9cb596cadd76fcef413f19c79216e002623158befe6daa03dbfa88c/contourpy-1.3.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:cbedb772ed74ff5be440fa8eee9bd49f64f6e3fc09436d9c7d8f1c287b121d77", size = 273251, upload-time = "2025-07-26T12:02:17.524Z" }, - { url = "https://files.pythonhosted.org/packages/b1/71/f93e1e9471d189f79d0ce2497007731c1e6bf9ef6d1d61b911430c3db4e5/contourpy-1.3.3-cp314-cp314-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:22e9b1bd7a9b1d652cd77388465dc358dafcd2e217d35552424aa4f996f524f5", size = 335810, upload-time = "2025-07-26T12:02:18.9Z" }, - { url = "https://files.pythonhosted.org/packages/91/f9/e35f4c1c93f9275d4e38681a80506b5510e9327350c51f8d4a5a724d178c/contourpy-1.3.3-cp314-cp314-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a22738912262aa3e254e4f3cb079a95a67132fc5a063890e224393596902f5a4", size = 382871, upload-time = "2025-07-26T12:02:20.418Z" }, - { url = "https://files.pythonhosted.org/packages/b5/71/47b512f936f66a0a900d81c396a7e60d73419868fba959c61efed7a8ab46/contourpy-1.3.3-cp314-cp314-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:afe5a512f31ee6bd7d0dda52ec9864c984ca3d66664444f2d72e0dc4eb832e36", size = 386264, upload-time = "2025-07-26T12:02:21.916Z" }, - { url = "https://files.pythonhosted.org/packages/04/5f/9ff93450ba96b09c7c2b3f81c94de31c89f92292f1380261bd7195bea4ea/contourpy-1.3.3-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f64836de09927cba6f79dcd00fdd7d5329f3fccc633468507079c829ca4db4e3", size = 363819, upload-time = "2025-07-26T12:02:23.759Z" }, - { url = "https://files.pythonhosted.org/packages/3e/a6/0b185d4cc480ee494945cde102cb0149ae830b5fa17bf855b95f2e70ad13/contourpy-1.3.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:1fd43c3be4c8e5fd6e4f2baeae35ae18176cf2e5cced681cca908addf1cdd53b", size = 1333650, upload-time = "2025-07-26T12:02:26.181Z" }, - { url = "https://files.pythonhosted.org/packages/43/d7/afdc95580ca56f30fbcd3060250f66cedbde69b4547028863abd8aa3b47e/contourpy-1.3.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:6afc576f7b33cf00996e5c1102dc2a8f7cc89e39c0b55df93a0b78c1bd992b36", size = 1404833, upload-time = "2025-07-26T12:02:28.782Z" }, - { url = "https://files.pythonhosted.org/packages/e2/e2/366af18a6d386f41132a48f033cbd2102e9b0cf6345d35ff0826cd984566/contourpy-1.3.3-cp314-cp314-win32.whl", hash = "sha256:66c8a43a4f7b8df8b71ee1840e4211a3c8d93b214b213f590e18a1beca458f7d", size = 189692, upload-time = "2025-07-26T12:02:30.128Z" }, - { url = "https://files.pythonhosted.org/packages/7d/c2/57f54b03d0f22d4044b8afb9ca0e184f8b1afd57b4f735c2fa70883dc601/contourpy-1.3.3-cp314-cp314-win_amd64.whl", hash = "sha256:cf9022ef053f2694e31d630feaacb21ea24224be1c3ad0520b13d844274614fd", size = 232424, upload-time = "2025-07-26T12:02:31.395Z" }, - { url = "https://files.pythonhosted.org/packages/18/79/a9416650df9b525737ab521aa181ccc42d56016d2123ddcb7b58e926a42c/contourpy-1.3.3-cp314-cp314-win_arm64.whl", hash = "sha256:95b181891b4c71de4bb404c6621e7e2390745f887f2a026b2d99e92c17892339", size = 198300, upload-time = "2025-07-26T12:02:32.956Z" }, - { url = "https://files.pythonhosted.org/packages/1f/42/38c159a7d0f2b7b9c04c64ab317042bb6952b713ba875c1681529a2932fe/contourpy-1.3.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:33c82d0138c0a062380332c861387650c82e4cf1747aaa6938b9b6516762e772", size = 306769, upload-time = "2025-07-26T12:02:34.2Z" }, - { url = "https://files.pythonhosted.org/packages/c3/6c/26a8205f24bca10974e77460de68d3d7c63e282e23782f1239f226fcae6f/contourpy-1.3.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:ea37e7b45949df430fe649e5de8351c423430046a2af20b1c1961cae3afcda77", size = 287892, upload-time = "2025-07-26T12:02:35.807Z" }, - { url = "https://files.pythonhosted.org/packages/66/06/8a475c8ab718ebfd7925661747dbb3c3ee9c82ac834ccb3570be49d129f4/contourpy-1.3.3-cp314-cp314t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d304906ecc71672e9c89e87c4675dc5c2645e1f4269a5063b99b0bb29f232d13", size = 326748, upload-time = "2025-07-26T12:02:37.193Z" }, - { url = "https://files.pythonhosted.org/packages/b4/a3/c5ca9f010a44c223f098fccd8b158bb1cb287378a31ac141f04730dc49be/contourpy-1.3.3-cp314-cp314t-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ca658cd1a680a5c9ea96dc61cdbae1e85c8f25849843aa799dfd3cb370ad4fbe", size = 375554, upload-time = "2025-07-26T12:02:38.894Z" }, - { url = "https://files.pythonhosted.org/packages/80/5b/68bd33ae63fac658a4145088c1e894405e07584a316738710b636c6d0333/contourpy-1.3.3-cp314-cp314t-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ab2fd90904c503739a75b7c8c5c01160130ba67944a7b77bbf36ef8054576e7f", size = 388118, upload-time = "2025-07-26T12:02:40.642Z" }, - { url = "https://files.pythonhosted.org/packages/40/52/4c285a6435940ae25d7410a6c36bda5145839bc3f0beb20c707cda18b9d2/contourpy-1.3.3-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7301b89040075c30e5768810bc96a8e8d78085b47d8be6e4c3f5a0b4ed478a0", size = 352555, upload-time = "2025-07-26T12:02:42.25Z" }, - { url = "https://files.pythonhosted.org/packages/24/ee/3e81e1dd174f5c7fefe50e85d0892de05ca4e26ef1c9a59c2a57e43b865a/contourpy-1.3.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:2a2a8b627d5cc6b7c41a4beff6c5ad5eb848c88255fda4a8745f7e901b32d8e4", size = 1322295, upload-time = "2025-07-26T12:02:44.668Z" }, - { url = "https://files.pythonhosted.org/packages/3c/b2/6d913d4d04e14379de429057cd169e5e00f6c2af3bb13e1710bcbdb5da12/contourpy-1.3.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:fd6ec6be509c787f1caf6b247f0b1ca598bef13f4ddeaa126b7658215529ba0f", size = 1391027, upload-time = "2025-07-26T12:02:47.09Z" }, - { url = "https://files.pythonhosted.org/packages/93/8a/68a4ec5c55a2971213d29a9374913f7e9f18581945a7a31d1a39b5d2dfe5/contourpy-1.3.3-cp314-cp314t-win32.whl", hash = "sha256:e74a9a0f5e3fff48fb5a7f2fd2b9b70a3fe014a67522f79b7cca4c0c7e43c9ae", size = 202428, upload-time = "2025-07-26T12:02:48.691Z" }, - { url = "https://files.pythonhosted.org/packages/fa/96/fd9f641ffedc4fa3ace923af73b9d07e869496c9cc7a459103e6e978992f/contourpy-1.3.3-cp314-cp314t-win_amd64.whl", hash = "sha256:13b68d6a62db8eafaebb8039218921399baf6e47bf85006fd8529f2a08ef33fc", size = 250331, upload-time = "2025-07-26T12:02:50.137Z" }, - { url = "https://files.pythonhosted.org/packages/ae/8c/469afb6465b853afff216f9528ffda78a915ff880ed58813ba4faf4ba0b6/contourpy-1.3.3-cp314-cp314t-win_arm64.whl", hash = "sha256:b7448cb5a725bb1e35ce88771b86fba35ef418952474492cf7c764059933ff8b", size = 203831, upload-time = "2025-07-26T12:02:51.449Z" }, -] - -[[package]] -name = "cronitor" -version = "4.9.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "humanize" }, - { name = "pyyaml" }, - { name = "requests" }, - { name = "urllib3" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/e9/10/4dd1042302403f8b6cac685ee6138c902179ac7f33a5c85df02d040827be/cronitor-4.9.0.tar.gz", hash = "sha256:3ad4e4c796671f98fa1be5ee6ac49d476e67d8e61f863fe34b616b4b9484c5cb", size = 23005, upload-time = "2025-10-08T01:51:35.073Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d8/d8/1c6eb7e1f94575cc83cc938db53acf24c7000faf87cc884e8a83a0d37b0d/cronitor-4.9.0-py2.py3-none-any.whl", hash = "sha256:79f16e3b8745bcf78daa1c9686d46dea9c45a8546d03ea7a1c52f15aab11bfdc", size = 22010, upload-time = "2025-10-08T01:51:34.16Z" }, -] - -[[package]] -name = "cycler" -version = "0.12.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a9/95/a3dbbb5028f35eafb79008e7522a75244477d2838f38cbb722248dabc2a8/cycler-0.12.1.tar.gz", hash = "sha256:88bb128f02ba341da8ef447245a9e138fae777f6a23943da4540077d3601eb1c", size = 7615, upload-time = "2023-10-07T05:32:18.335Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e7/05/c19819d5e3d95294a6f5947fb9b9629efb316b96de511b418c53d245aae6/cycler-0.12.1-py3-none-any.whl", hash = "sha256:85cef7cff222d8644161529808465972e51340599459b8ac3ccbac5a854e0d30", size = 8321, upload-time = "2023-10-07T05:32:16.783Z" }, -] - -[[package]] -name = "cytoolz" -version = "1.1.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "toolz" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/bd/d4/16916f3dc20a3f5455b63c35dcb260b3716f59ce27a93586804e70e431d5/cytoolz-1.1.0.tar.gz", hash = "sha256:13a7bf254c3c0d28b12e2290b82aed0f0977a4c2a2bf84854fcdc7796a29f3b0", size = 642510, upload-time = "2025-10-19T00:44:56.174Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c6/ec/01426224f7acf60183d3921b25e1a8e71713d3d39cb464d64ac7aace6ea6/cytoolz-1.1.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:99f8e134c9be11649342853ec8c90837af4089fc8ff1e8f9a024a57d1fa08514", size = 1327800, upload-time = "2025-10-19T00:40:48.674Z" }, - { url = "https://files.pythonhosted.org/packages/b4/07/e07e8fedd332ac9626ad58bea31416dda19bfd14310731fa38b16a97e15f/cytoolz-1.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:0a6f44cf9319c30feb9a50aa513d777ef51efec16f31c404409e7deb8063df64", size = 997118, upload-time = "2025-10-19T00:40:50.919Z" }, - { url = "https://files.pythonhosted.org/packages/ab/72/c0f766d63ed2f9ea8dc8e1628d385d99b41fb834ce17ac3669e3f91e115d/cytoolz-1.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:945580dc158c557172fca899a35a99a16fbcebf6db0c77cb6621084bc82189f9", size = 991169, upload-time = "2025-10-19T00:40:52.887Z" }, - { url = "https://files.pythonhosted.org/packages/df/4b/1f757353d1bf33e56a7391ecc9bc49c1e529803b93a9d2f67fe5f92906fe/cytoolz-1.1.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:257905ec050d04f2f856854620d1e25556fd735064cebd81b460f54939b9f9d5", size = 2700680, upload-time = "2025-10-19T00:40:54.597Z" }, - { url = "https://files.pythonhosted.org/packages/25/73/9b25bb7ed8d419b9d6ff2ae0b3d06694de79a3f98f5169a1293ff7ad3a3f/cytoolz-1.1.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:82779049f352fb3ab5e8c993ab45edbb6e02efb1f17f0b50f4972c706cc51d76", size = 2824951, upload-time = "2025-10-19T00:40:56.137Z" }, - { url = "https://files.pythonhosted.org/packages/0c/93/9c787f7c909e75670fff467f2504725d06d8c3f51d6dfe22c55a08c8ccd4/cytoolz-1.1.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7d3e405e435320e08c5a1633afaf285a392e2d9cef35c925d91e2a31dfd7a688", size = 2679635, upload-time = "2025-10-19T00:40:57.799Z" }, - { url = "https://files.pythonhosted.org/packages/50/aa/9ee92c302cccf7a41a7311b325b51ebeff25d36c1f82bdc1bbe3f58dc947/cytoolz-1.1.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:923df8f5591e0d20543060c29909c149ab1963a7267037b39eee03a83dbc50a8", size = 2938352, upload-time = "2025-10-19T00:40:59.49Z" }, - { url = "https://files.pythonhosted.org/packages/6a/a3/3b58c5c1692c3bacd65640d0d5c7267a7ebb76204f7507aec29de7063d2f/cytoolz-1.1.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:25db9e4862f22ea0ae2e56c8bec9fc9fd756b655ae13e8c7b5625d7ed1c582d4", size = 3022121, upload-time = "2025-10-19T00:41:01.209Z" }, - { url = "https://files.pythonhosted.org/packages/e1/93/c647bc3334355088c57351a536c2d4a83dd45f7de591fab383975e45bff9/cytoolz-1.1.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c7a98deb11ccd8e5d9f9441ef2ff3352aab52226a2b7d04756caaa53cd612363", size = 2857656, upload-time = "2025-10-19T00:41:03.456Z" }, - { url = "https://files.pythonhosted.org/packages/b2/c2/43fea146bf4141deea959e19dcddf268c5ed759dec5c2ed4a6941d711933/cytoolz-1.1.0-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:dce4ee9fc99104bc77efdea80f32ca5a650cd653bcc8a1d984a931153d3d9b58", size = 2551284, upload-time = "2025-10-19T00:41:05.347Z" }, - { url = "https://files.pythonhosted.org/packages/6f/df/cdc7a81ce5cfcde7ef523143d545635fc37e80ccacce140ae58483a21da3/cytoolz-1.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:80d6da158f7d20c15819701bbda1c041f0944ede2f564f5c739b1bc80a9ffb8b", size = 2721673, upload-time = "2025-10-19T00:41:07.528Z" }, - { url = "https://files.pythonhosted.org/packages/45/be/f8524bb9ad8812ad375e61238dcaa3177628234d1b908ad0b74e3657cafd/cytoolz-1.1.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:3b5c5a192abda123ad45ef716ec9082b4cf7d95e9ada8291c5c2cc5558be858b", size = 2722884, upload-time = "2025-10-19T00:41:09.698Z" }, - { url = "https://files.pythonhosted.org/packages/23/e6/6bb8e4f9c267ad42d1ff77b6d2e4984665505afae50a216290e1d7311431/cytoolz-1.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5b399ce7d967b1cb6280250818b786be652aa8ddffd3c0bb5c48c6220d945ab5", size = 2685486, upload-time = "2025-10-19T00:41:11.349Z" }, - { url = "https://files.pythonhosted.org/packages/d7/dd/88619f9c8d2b682562c0c886bbb7c35720cb83fda2ac9a41bdd14073d9bd/cytoolz-1.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:e7e29a1a03f00b4322196cfe8e2c38da9a6c8d573566052c586df83aacc5663c", size = 2839661, upload-time = "2025-10-19T00:41:13.053Z" }, - { url = "https://files.pythonhosted.org/packages/b8/8d/4478ebf471ee78dd496d254dc0f4ad729cd8e6ba8257de4f0a98a2838ef2/cytoolz-1.1.0-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:5291b117d71652a817ec164e7011f18e6a51f8a352cc9a70ed5b976c51102fda", size = 2547095, upload-time = "2025-10-19T00:41:16.054Z" }, - { url = "https://files.pythonhosted.org/packages/e6/68/f1dea33367b0b3f64e199c230a14a6b6f243c189020effafd31e970ca527/cytoolz-1.1.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:8caef62f846a9011676c51bda9189ae394cdd6bb17f2946ecaedc23243268320", size = 2870901, upload-time = "2025-10-19T00:41:17.727Z" }, - { url = "https://files.pythonhosted.org/packages/4a/9a/33591c09dfe799b8fb692cf2ad383e2c41ab6593cc960b00d1fc8a145655/cytoolz-1.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:de425c5a8e3be7bb3a195e19191d28d9eb3c2038046064a92edc4505033ec9cb", size = 2765422, upload-time = "2025-10-19T00:41:20.075Z" }, - { url = "https://files.pythonhosted.org/packages/60/2b/a8aa233c9416df87f004e57ae4280bd5e1f389b4943d179f01020c6ec629/cytoolz-1.1.0-cp312-cp312-win32.whl", hash = "sha256:296440a870e8d1f2e1d1edf98f60f1532b9d3ab8dfbd4b25ec08cd76311e79e5", size = 901933, upload-time = "2025-10-19T00:41:21.646Z" }, - { url = "https://files.pythonhosted.org/packages/ad/33/4c9bdf8390dc01d2617c7f11930697157164a52259b6818ddfa2f94f89f4/cytoolz-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:07156987f224c6dac59aa18fb8bf91e1412f5463961862716a3381bf429c8699", size = 947989, upload-time = "2025-10-19T00:41:23.288Z" }, - { url = "https://files.pythonhosted.org/packages/35/ac/6e2708835875f5acb52318462ed296bf94ed0cb8c7cb70e62fbd03f709e3/cytoolz-1.1.0-cp312-cp312-win_arm64.whl", hash = "sha256:23e616b38f5b3160c7bb45b0f84a8f3deb4bd26b29fb2dfc716f241c738e27b8", size = 903913, upload-time = "2025-10-19T00:41:24.992Z" }, - { url = "https://files.pythonhosted.org/packages/71/4a/b3ddb3ee44fe0045e95dd973746f93f033b6f92cce1fc3cbbe24b329943c/cytoolz-1.1.0-cp313-cp313-ios_13_0_arm64_iphoneos.whl", hash = "sha256:76c9b58555300be6dde87a41faf1f97966d79b9a678b7a526fcff75d28ef4945", size = 976728, upload-time = "2025-10-19T00:41:26.5Z" }, - { url = "https://files.pythonhosted.org/packages/42/21/a3681434aa425875dd828bb515924b0f12c37a55c7d2bc5c0c5de3aeb0b4/cytoolz-1.1.0-cp313-cp313-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:d1d638b10d3144795655e9395566ce35807df09219fd7cacd9e6acbdef67946a", size = 986057, upload-time = "2025-10-19T00:41:28.911Z" }, - { url = "https://files.pythonhosted.org/packages/d9/cb/efc1b29e211e0670a6953222afaac84dcbba5cb940b130c0e49858978040/cytoolz-1.1.0-cp313-cp313-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:26801c1a165e84786a99e03c9c9973356caaca002d66727b761fb1042878ef06", size = 992632, upload-time = "2025-10-19T00:41:30.612Z" }, - { url = "https://files.pythonhosted.org/packages/be/b0/e50621d21e939338c97faab651f58ea7fa32101226a91de79ecfb89d71e1/cytoolz-1.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:2a9a464542912d3272f6dccc5142df057c71c6a5cbd30439389a732df401afb7", size = 1317534, upload-time = "2025-10-19T00:41:32.625Z" }, - { url = "https://files.pythonhosted.org/packages/0d/6b/25aa9739b0235a5bc4c1ea293186bc6822a4c6607acfe1422423287e7400/cytoolz-1.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ed6104fa942aa5784bf54f339563de637557e3443b105760bc4de8f16a7fc79b", size = 992336, upload-time = "2025-10-19T00:41:34.073Z" }, - { url = "https://files.pythonhosted.org/packages/e1/53/5f4deb0ff958805309d135d899c764364c1e8a632ce4994bd7c45fb98df2/cytoolz-1.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:56161f0ab60dc4159ec343509abaf809dc88e85c7e420e354442c62e3e7cbb77", size = 986118, upload-time = "2025-10-19T00:41:35.7Z" }, - { url = "https://files.pythonhosted.org/packages/1c/e3/f6255b76c8cc0debbe1c0779130777dc0434da6d9b28a90d9f76f8cb67cd/cytoolz-1.1.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:832bd36cc9123535f1945acf6921f8a2a15acc19cfe4065b1c9b985a28671886", size = 2679563, upload-time = "2025-10-19T00:41:37.926Z" }, - { url = "https://files.pythonhosted.org/packages/59/8a/acc6e39a84e930522b965586ad3a36694f9bf247b23188ee0eb47b1c9ed1/cytoolz-1.1.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1842636b6e034f229bf084c2bcdcfd36c8437e752eefd2c74ce9e2f10415cb6e", size = 2813020, upload-time = "2025-10-19T00:41:39.935Z" }, - { url = "https://files.pythonhosted.org/packages/db/f5/0083608286ad1716eda7c41f868e85ac549f6fd6b7646993109fa0bdfd98/cytoolz-1.1.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:823df012ab90d2f2a0f92fea453528539bf71ac1879e518524cd0c86aa6df7b9", size = 2669312, upload-time = "2025-10-19T00:41:41.55Z" }, - { url = "https://files.pythonhosted.org/packages/47/a8/d16080b575520fe5da00cede1ece4e0a4180ec23f88dcdc6a2f5a90a7f7f/cytoolz-1.1.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2f1fcf9e7e7b3487883ff3f815abc35b89dcc45c4cf81c72b7ee457aa72d197b", size = 2922147, upload-time = "2025-10-19T00:41:43.252Z" }, - { url = "https://files.pythonhosted.org/packages/7e/bc/716c9c1243701e58cad511eb3937fd550e645293c5ed1907639c5d66f194/cytoolz-1.1.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4cdb3fa1772116827f263f25b0cdd44c663b6701346a56411960534a06c082de", size = 2981602, upload-time = "2025-10-19T00:41:45.354Z" }, - { url = "https://files.pythonhosted.org/packages/14/bc/571b232996846b27f4ac0c957dc8bf60261e9b4d0d01c8d955e82329544e/cytoolz-1.1.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d1b5c95041741b81430454db65183e133976f45ac3c03454cfa8147952568529", size = 2830103, upload-time = "2025-10-19T00:41:47.959Z" }, - { url = "https://files.pythonhosted.org/packages/5b/55/c594afb46ecd78e4b7e1fb92c947ed041807875661ceda73baaf61baba4f/cytoolz-1.1.0-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b2079fd9f1a65f4c61e6278c8a6d4f85edf30c606df8d5b32f1add88cbbe2286", size = 2533802, upload-time = "2025-10-19T00:41:49.683Z" }, - { url = "https://files.pythonhosted.org/packages/93/83/1edcf95832555a78fc43b975f3ebe8ceadcc9664dd47fd33747a14df5069/cytoolz-1.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a92a320d72bef1c7e2d4c6d875125cf57fc38be45feb3fac1bfa64ea401f54a4", size = 2706071, upload-time = "2025-10-19T00:41:51.386Z" }, - { url = "https://files.pythonhosted.org/packages/e2/df/035a408df87f25cfe3611557818b250126cd2281b2104cd88395de205583/cytoolz-1.1.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:06d1c79aa51e6a92a90b0e456ebce2288f03dd6a76c7f582bfaa3eda7692e8a5", size = 2707575, upload-time = "2025-10-19T00:41:53.305Z" }, - { url = "https://files.pythonhosted.org/packages/7a/a4/ef78e13e16e93bf695a9331321d75fbc834a088d941f1c19e6b63314e257/cytoolz-1.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:e1d7be25f6971e986a52b6d3a0da28e1941850985417c35528f6823aef2cfec5", size = 2660486, upload-time = "2025-10-19T00:41:55.542Z" }, - { url = "https://files.pythonhosted.org/packages/30/7a/2c3d60682b26058d435416c4e90d4a94db854de5be944dfd069ed1be648a/cytoolz-1.1.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:964b248edc31efc50a65e9eaa0c845718503823439d2fa5f8d2c7e974c2b5409", size = 2819605, upload-time = "2025-10-19T00:41:58.257Z" }, - { url = "https://files.pythonhosted.org/packages/45/92/19b722a1d83cc443fbc0c16e0dc376f8a451437890d3d9ee370358cf0709/cytoolz-1.1.0-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:c9ff2b3c57c79b65cb5be14a18c6fd4a06d5036fb3f33e973a9f70e9ac13ca28", size = 2533559, upload-time = "2025-10-19T00:42:00.324Z" }, - { url = "https://files.pythonhosted.org/packages/1d/15/fa3b7891da51115204416f14192081d3dea0eaee091f123fdc1347de8dd1/cytoolz-1.1.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:22290b73086af600042d99f5ce52a43d4ad9872c382610413176e19fc1d4fd2d", size = 2839171, upload-time = "2025-10-19T00:42:01.881Z" }, - { url = "https://files.pythonhosted.org/packages/46/40/d3519d5cd86eebebf1e8b7174ec32dfb6ecec67b48b0cfb92bf226659b5a/cytoolz-1.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a2ade74fccd080ea793382968913ee38d7a35c921df435bbf0a6aeecf0d17574", size = 2743379, upload-time = "2025-10-19T00:42:03.809Z" }, - { url = "https://files.pythonhosted.org/packages/93/e2/a9e7511f0a13fdbefa5bf73cf8e4763878140de9453fd3e50d6ac57b6be7/cytoolz-1.1.0-cp313-cp313-win32.whl", hash = "sha256:db5dbcfda1c00e937426cbf9bdc63c24ebbc358c3263bfcbc1ab4a88dc52aa8e", size = 900844, upload-time = "2025-10-19T00:42:05.967Z" }, - { url = "https://files.pythonhosted.org/packages/d6/a4/fb7eb403c6a4c81e5a30363f34a71adcc8bf5292dc8ea32e2440aa5668f2/cytoolz-1.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:9e2d3fe3b45c3eb7233746f7aca37789be3dceec3e07dcc406d3e045ea0f7bdc", size = 946461, upload-time = "2025-10-19T00:42:07.983Z" }, - { url = "https://files.pythonhosted.org/packages/93/bb/1c8c33d353548d240bc6e8677ee8c3560ce5fa2f084e928facf7c35a6dcf/cytoolz-1.1.0-cp313-cp313-win_arm64.whl", hash = "sha256:32c559f95ff44a9ebcbd934acaa1e6dc8f3e6ffce4762a79a88528064873d6d5", size = 902673, upload-time = "2025-10-19T00:42:09.982Z" }, - { url = "https://files.pythonhosted.org/packages/c4/ba/4a53acc60f59030fcaf48c7766e3c4c81bd997379425aa45b129396557b5/cytoolz-1.1.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:9e2cd93b28f667c5870a070ab2b8bb4397470a85c4b204f2454b0ad001cd1ca3", size = 1372336, upload-time = "2025-10-19T00:42:12.104Z" }, - { url = "https://files.pythonhosted.org/packages/ac/90/f28fd8ad8319d8f5c8da69a2c29b8cf52a6d2c0161602d92b366d58926ab/cytoolz-1.1.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:f494124e141a9361f31d79875fe7ea459a3be2b9dadd90480427c0c52a0943d4", size = 1011930, upload-time = "2025-10-19T00:42:14.231Z" }, - { url = "https://files.pythonhosted.org/packages/c9/95/4561c4e0ad1c944f7673d6d916405d68080f10552cfc5d69a1cf2475a9a1/cytoolz-1.1.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:53a3262bf221f19437ed544bf8c0e1980c81ac8e2a53d87a9bc075dba943d36f", size = 1020610, upload-time = "2025-10-19T00:42:15.877Z" }, - { url = "https://files.pythonhosted.org/packages/c3/14/b2e1ffa4995ec36e1372e243411ff36325e4e6d7ffa34eb4098f5357d176/cytoolz-1.1.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:47663e57d3f3f124921f38055e86a1022d0844c444ede2e8f090d3bbf80deb65", size = 2917327, upload-time = "2025-10-19T00:42:17.706Z" }, - { url = "https://files.pythonhosted.org/packages/4a/29/7cab6c609b4514ac84cca2f7dca6c509977a8fc16d27c3a50e97f105fa6a/cytoolz-1.1.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a5a8755c4104ee4e3d5ba434c543b5f85fdee6a1f1df33d93f518294da793a60", size = 3108951, upload-time = "2025-10-19T00:42:19.363Z" }, - { url = "https://files.pythonhosted.org/packages/9a/71/1d1103b819458679277206ad07d78ca6b31c4bb88d6463fd193e19bfb270/cytoolz-1.1.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:4d96ff3d381423af1b105295f97de86d1db51732c9566eb37378bab6670c5010", size = 2807149, upload-time = "2025-10-19T00:42:20.964Z" }, - { url = "https://files.pythonhosted.org/packages/1a/d4/3d83a05a21e7d2ed2b9e6daf489999c29934b005de9190272b8a2e3735d0/cytoolz-1.1.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:0ec96b3d537cdf47d4e76ded199f7440715f4c71029b45445cff92c1248808c2", size = 3111608, upload-time = "2025-10-19T00:42:22.684Z" }, - { url = "https://files.pythonhosted.org/packages/51/88/96f68354c3d4af68de41f0db4fe41a23b96a50a4a416636cea325490cfeb/cytoolz-1.1.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:208e2f2ef90a32b0acbff3303d90d89b13570a228d491d2e622a7883a3c68148", size = 3179373, upload-time = "2025-10-19T00:42:24.395Z" }, - { url = "https://files.pythonhosted.org/packages/ce/50/ed87a5cd8e6f27ffbb64c39e9730e18ec66c37631db2888ae711909f10c9/cytoolz-1.1.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0d416a81bb0bd517558668e49d30a7475b5445f9bbafaab7dcf066f1e9adba36", size = 3003120, upload-time = "2025-10-19T00:42:26.18Z" }, - { url = "https://files.pythonhosted.org/packages/d3/a7/acde155b050d6eaa8e9c7845c98fc5fb28501568e78e83ebbf44f8855274/cytoolz-1.1.0-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f32e94c91ffe49af04835ee713ebd8e005c85ebe83e7e1fdcc00f27164c2d636", size = 2703225, upload-time = "2025-10-19T00:42:27.93Z" }, - { url = "https://files.pythonhosted.org/packages/1b/b6/9d518597c5bdea626b61101e8d2ff94124787a42259dafd9f5fc396f346a/cytoolz-1.1.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:15d0c6405efc040499c46df44056a5c382f551a7624a41cf3e4c84a96b988a15", size = 2956033, upload-time = "2025-10-19T00:42:29.993Z" }, - { url = "https://files.pythonhosted.org/packages/89/7a/93e5f860926165538c85e1c5e1670ad3424f158df810f8ccd269da652138/cytoolz-1.1.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:bf069c5381d757debae891401b88b3a346ba3a28ca45ba9251103b282463fad8", size = 2862950, upload-time = "2025-10-19T00:42:31.803Z" }, - { url = "https://files.pythonhosted.org/packages/76/e6/99d6af00487bedc27597b54c9fcbfd5c833a69c6b7a9b9f0fff777bfc7aa/cytoolz-1.1.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:7d5cf15892e63411ec1bd67deff0e84317d974e6ab2cdfefdd4a7cea2989df66", size = 2861757, upload-time = "2025-10-19T00:42:33.625Z" }, - { url = "https://files.pythonhosted.org/packages/71/ca/adfa1fb7949478135a37755cb8e88c20cd6b75c22a05f1128f05f3ab2c60/cytoolz-1.1.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:3e3872c21170f8341656f8692f8939e8800dcee6549ad2474d4c817bdefd62cd", size = 2979049, upload-time = "2025-10-19T00:42:35.377Z" }, - { url = "https://files.pythonhosted.org/packages/70/4c/7bf47a03a4497d500bc73d4204e2d907771a017fa4457741b2a1d7c09319/cytoolz-1.1.0-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:b9ddeff8e8fd65eb1fcefa61018100b2b627e759ea6ad275d2e2a93ffac147bf", size = 2699492, upload-time = "2025-10-19T00:42:37.133Z" }, - { url = "https://files.pythonhosted.org/packages/7e/e7/3d034b0e4817314f07aa465d5864e9b8df9d25cb260a53dd84583e491558/cytoolz-1.1.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:02feeeda93e1fa3b33414eb57c2b0aefd1db8f558dd33fdfcce664a0f86056e4", size = 2995646, upload-time = "2025-10-19T00:42:38.912Z" }, - { url = "https://files.pythonhosted.org/packages/c1/62/be357181c71648d9fe1d1ce91cd42c63457dcf3c158e144416fd51dced83/cytoolz-1.1.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:d08154ad45349162b6c37f12d5d1b2e6eef338e657b85e1621e4e6a4a69d64cb", size = 2919481, upload-time = "2025-10-19T00:42:40.85Z" }, - { url = "https://files.pythonhosted.org/packages/62/d5/bf5434fde726c4f80cb99912b2d8e0afa1587557e2a2d7e0315eb942f2de/cytoolz-1.1.0-cp313-cp313t-win32.whl", hash = "sha256:10ae4718a056948d73ca3e1bb9ab1f95f897ec1e362f829b9d37cc29ab566c60", size = 951595, upload-time = "2025-10-19T00:42:42.877Z" }, - { url = "https://files.pythonhosted.org/packages/64/29/39c161e9204a9715321ddea698cbd0abc317e78522c7c642363c20589e71/cytoolz-1.1.0-cp313-cp313t-win_amd64.whl", hash = "sha256:1bb77bc6197e5cb19784b6a42bb0f8427e81737a630d9d7dda62ed31733f9e6c", size = 1004445, upload-time = "2025-10-19T00:42:44.855Z" }, - { url = "https://files.pythonhosted.org/packages/e2/5a/7cbff5e9a689f558cb0bdf277f9562b2ac51acf7cd15e055b8c3efb0e1ef/cytoolz-1.1.0-cp313-cp313t-win_arm64.whl", hash = "sha256:563dda652c6ff52d215704fbe6b491879b78d7bbbb3a9524ec8e763483cb459f", size = 926207, upload-time = "2025-10-19T00:42:46.456Z" }, - { url = "https://files.pythonhosted.org/packages/b7/e8/297a85ba700f437c01eba962428e6ab4572f6c3e68e8ff442ce5c9d3a496/cytoolz-1.1.0-cp314-cp314-ios_13_0_arm64_iphoneos.whl", hash = "sha256:d542cee7c7882d2a914a33dec4d3600416fb336734df979473249d4c53d207a1", size = 980613, upload-time = "2025-10-19T00:42:47.988Z" }, - { url = "https://files.pythonhosted.org/packages/e8/d7/2b02c9d18e9cc263a0e22690f78080809f1eafe72f26b29ccc115d3bf5c8/cytoolz-1.1.0-cp314-cp314-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:31922849b701b0f24bb62e56eb2488dcd3aa6ae3057694bd6b3b7c4c2bc27c2f", size = 990476, upload-time = "2025-10-19T00:42:49.653Z" }, - { url = "https://files.pythonhosted.org/packages/89/26/b6b159d2929310fca0eff8a4989cd4b1ecbdf7c46fdff46c7a20fcae55c8/cytoolz-1.1.0-cp314-cp314-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:e68308d32afd31943314735c1335e4ab5696110e96b405f6bdb8f2a8dc771a16", size = 992712, upload-time = "2025-10-19T00:42:51.306Z" }, - { url = "https://files.pythonhosted.org/packages/42/a0/f7c572aa151ed466b0fce4a327c3cc916d3ef3c82e341be59ea4b9bee9e4/cytoolz-1.1.0-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:fc4bb48b3b866e1867f7c6411a4229e5b44be3989060663713e10efc24c9bd5f", size = 1322596, upload-time = "2025-10-19T00:42:52.978Z" }, - { url = "https://files.pythonhosted.org/packages/72/7c/a55d035e20b77b6725e85c8f1a418b3a4c23967288b8b0c2d1a40f158cbe/cytoolz-1.1.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:456f77207d1445025d7ef262b8370a05492dcb1490cb428b0f3bf1bd744a89b0", size = 992825, upload-time = "2025-10-19T00:42:55.026Z" }, - { url = "https://files.pythonhosted.org/packages/03/af/39d2d3db322136e12e9336a1f13bab51eab88b386bfb11f91d3faff8ba34/cytoolz-1.1.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:174ebc71ebb20a9baeffce6ee07ee2cd913754325c93f99d767380d8317930f7", size = 990525, upload-time = "2025-10-19T00:42:56.666Z" }, - { url = "https://files.pythonhosted.org/packages/a6/bd/65d7a869d307f9b10ad45c2c1cbb40b81a8d0ed1138fa17fd904f5c83298/cytoolz-1.1.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:8b3604fef602bcd53415055a4f68468339192fd17be39e687ae24f476d23d56e", size = 2672409, upload-time = "2025-10-19T00:42:58.81Z" }, - { url = "https://files.pythonhosted.org/packages/2d/fb/74dfd844bfd67e810bd36e8e3903a143035447245828e7fcd7c81351d775/cytoolz-1.1.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3604b959a01f64c366e7d10ec7634d5f5cfe10301e27a8f090f6eb3b2a628a18", size = 2808477, upload-time = "2025-10-19T00:43:00.577Z" }, - { url = "https://files.pythonhosted.org/packages/d6/1f/587686c43e31c19241ec317da66438d093523921ea7749bbc65558a30df9/cytoolz-1.1.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6db2127a3c1bc2f59f08010d2ae53a760771a9de2f67423ad8d400e9ba4276e8", size = 2636881, upload-time = "2025-10-19T00:43:02.24Z" }, - { url = "https://files.pythonhosted.org/packages/bc/6d/90468cd34f77cb38a11af52c4dc6199efcc97a486395a21bef72e9b7602e/cytoolz-1.1.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:56584745ac647993a016a21bc76399113b7595e312f8d0a1b140c9fcf9b58a27", size = 2937315, upload-time = "2025-10-19T00:43:03.954Z" }, - { url = "https://files.pythonhosted.org/packages/d9/50/7b92cd78c613b92e3509e6291d3fb7e0d72ebda999a8df806a96c40ca9ab/cytoolz-1.1.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:db2c4c3a7f7bd7e03bb1a236a125c8feb86c75802f4ecda6ecfaf946610b2930", size = 2959988, upload-time = "2025-10-19T00:43:05.758Z" }, - { url = "https://files.pythonhosted.org/packages/44/d5/34b5a28a8d9bb329f984b4c2259407ca3f501d1abeb01bacea07937d85d1/cytoolz-1.1.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:48cb8a692111a285d2b9acd16d185428176bfbffa8a7c274308525fccd01dd42", size = 2795116, upload-time = "2025-10-19T00:43:07.411Z" }, - { url = "https://files.pythonhosted.org/packages/f5/d9/5dd829e33273ec03bdc3c812e6c3281987ae2c5c91645582f6c331544a64/cytoolz-1.1.0-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:d2f344ba5eb17dcf38ee37fdde726f69053f54927db8f8a1bed6ac61e5b1890d", size = 2535390, upload-time = "2025-10-19T00:43:09.104Z" }, - { url = "https://files.pythonhosted.org/packages/87/1f/7f9c58068a8eec2183110df051bc6b69dd621143f84473eeb6dc1b32905a/cytoolz-1.1.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:abf76b1c1abd031f098f293b6d90ee08bdaa45f8b5678430e331d991b82684b1", size = 2704834, upload-time = "2025-10-19T00:43:10.942Z" }, - { url = "https://files.pythonhosted.org/packages/d2/90/667def5665333575d01a65fe3ec0ca31b897895f6e3bc1a42d6ea3659369/cytoolz-1.1.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:ddf9a38a5b686091265ff45b53d142e44a538cd6c2e70610d3bc6be094219032", size = 2658441, upload-time = "2025-10-19T00:43:12.655Z" }, - { url = "https://files.pythonhosted.org/packages/23/79/6615f9a14960bd29ac98b823777b6589357833f65cf1a11b5abc1587c120/cytoolz-1.1.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:946786755274f07bb2be0400f28adb31d7d85a7c7001873c0a8e24a503428fb3", size = 2654766, upload-time = "2025-10-19T00:43:14.325Z" }, - { url = "https://files.pythonhosted.org/packages/b0/99/be59c6e0ae02153ef10ae1ff0f380fb19d973c651b50cf829a731f6c9e79/cytoolz-1.1.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:d5b8f78b9fed79cf185ad4ddec099abeef45951bdcb416c5835ba05f0a1242c7", size = 2827649, upload-time = "2025-10-19T00:43:16.132Z" }, - { url = "https://files.pythonhosted.org/packages/19/b7/854ddcf9f9618844108677c20d48f4611b5c636956adea0f0e85e027608f/cytoolz-1.1.0-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:fccde6efefdbc02e676ccb352a2ccc8a8e929f59a1c6d3d60bb78e923a49ca44", size = 2533456, upload-time = "2025-10-19T00:43:17.764Z" }, - { url = "https://files.pythonhosted.org/packages/45/66/bfe6fbb2bdcf03c8377c8c2f542576e15f3340c905a09d78a6cb3badd39a/cytoolz-1.1.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:717b7775313da5f51b0fbf50d865aa9c39cb241bd4cb605df3cf2246d6567397", size = 2826455, upload-time = "2025-10-19T00:43:19.561Z" }, - { url = "https://files.pythonhosted.org/packages/c3/0c/cce4047bd927e95f59e73319c02c9bc86bd3d76392e0eb9e41a1147a479c/cytoolz-1.1.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5158744a09d0e0e4a4f82225e3a3c4ebf38f9ae74467aaa905467270e52f2794", size = 2714897, upload-time = "2025-10-19T00:43:21.291Z" }, - { url = "https://files.pythonhosted.org/packages/ac/9a/061323bb289b565802bad14fb7ab59fcd8713105df142bcf4dd9ff64f8ac/cytoolz-1.1.0-cp314-cp314-win32.whl", hash = "sha256:1ed534bdbbf063b2bb28fca7d0f6723a3e5a72b086e7c7fe6d74ae8c3e4d00e2", size = 901490, upload-time = "2025-10-19T00:43:22.895Z" }, - { url = "https://files.pythonhosted.org/packages/a3/20/1f3a733d710d2a25d6f10b463bef55ada52fe6392a5d233c8d770191f48a/cytoolz-1.1.0-cp314-cp314-win_amd64.whl", hash = "sha256:472c1c9a085f5ad973ec0ad7f0b9ba0969faea6f96c9e397f6293d386f3a25ec", size = 946730, upload-time = "2025-10-19T00:43:24.838Z" }, - { url = "https://files.pythonhosted.org/packages/f2/22/2d657db4a5d1c10a152061800f812caba9ef20d7bd2406f51a5fd800c180/cytoolz-1.1.0-cp314-cp314-win_arm64.whl", hash = "sha256:a7ad7ca3386fa86bd301be3fa36e7f0acb024f412f665937955acfc8eb42deff", size = 905722, upload-time = "2025-10-19T00:43:26.439Z" }, - { url = "https://files.pythonhosted.org/packages/19/97/b4a8c76796a9a8b9bc90c7992840fa1589a1af8e0426562dea4ce9b384a7/cytoolz-1.1.0-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:64b63ed4b71b1ba813300ad0f06b8aff19a12cf51116e0e4f1ed837cea4debcf", size = 1372606, upload-time = "2025-10-19T00:43:28.491Z" }, - { url = "https://files.pythonhosted.org/packages/08/d4/a1bb1a32b454a2d650db8374ff3bf875ba0fc1c36e6446ec02a83b9140a1/cytoolz-1.1.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:a60ba6f2ed9eb0003a737e1ee1e9fa2258e749da6477946008d4324efa25149f", size = 1012189, upload-time = "2025-10-19T00:43:30.177Z" }, - { url = "https://files.pythonhosted.org/packages/21/4b/2f5cbbd81588918ee7dd70cffb66731608f578a9b72166aafa991071af7d/cytoolz-1.1.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1aa58e2434d732241f7f051e6f17657e969a89971025e24578b5cbc6f1346485", size = 1020624, upload-time = "2025-10-19T00:43:31.712Z" }, - { url = "https://files.pythonhosted.org/packages/f5/99/c4954dd86cd593cd776a038b36795a259b8b5c12cbab6363edf5f6d9c909/cytoolz-1.1.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:6965af3fc7214645970e312deb9bd35a213a1eaabcfef4f39115e60bf2f76867", size = 2917016, upload-time = "2025-10-19T00:43:33.531Z" }, - { url = "https://files.pythonhosted.org/packages/b2/7c/f1f70a17e272b433232bc8a27df97e46b202d6cc07e3b0d63f7f41ba0f2d/cytoolz-1.1.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ddd2863f321d67527d3b67a93000a378ad6f967056f68c06467fe011278a6d0e", size = 3107634, upload-time = "2025-10-19T00:43:35.57Z" }, - { url = "https://files.pythonhosted.org/packages/8f/bd/c3226a57474b4aef1f90040510cba30d0decd3515fed48dc229b37c2f898/cytoolz-1.1.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:4e6b428e9eb5126053c2ae0efa62512ff4b38ed3951f4d0888ca7005d63e56f5", size = 2806221, upload-time = "2025-10-19T00:43:37.707Z" }, - { url = "https://files.pythonhosted.org/packages/c3/47/2f7bfe4aaa1e07dc9828bea228ed744faf73b26aee0c1bdf3b5520bf1909/cytoolz-1.1.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:d758e5ef311d2671e0ae8c214c52e44617cf1e58bef8f022b547b9802a5a7f30", size = 3107671, upload-time = "2025-10-19T00:43:39.401Z" }, - { url = "https://files.pythonhosted.org/packages/4d/12/6ff3b04fbd1369d0fcd5f8b5910ba6e427e33bf113754c4c35ec3f747924/cytoolz-1.1.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a95416eca473e6c1179b48d86adcf528b59c63ce78f4cb9934f2e413afa9b56b", size = 3176350, upload-time = "2025-10-19T00:43:41.148Z" }, - { url = "https://files.pythonhosted.org/packages/e6/8c/6691d986b728e77b5d2872743ebcd962d37a2d0f7e9ad95a81b284fbf905/cytoolz-1.1.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:36c8ede93525cf11e2cc787b7156e5cecd7340193ef800b816a16f1404a8dc6d", size = 3001173, upload-time = "2025-10-19T00:43:42.923Z" }, - { url = "https://files.pythonhosted.org/packages/7a/cb/f59d83a5058e1198db5a1f04e4a124c94d60390e4fa89b6d2e38ee8288a0/cytoolz-1.1.0-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c0c949755b6d8a649c5fbc888bc30915926f1b09fe42fea9f289e297c2f6ddd3", size = 2701374, upload-time = "2025-10-19T00:43:44.716Z" }, - { url = "https://files.pythonhosted.org/packages/b7/f0/1ae6d28df503b0bdae094879da2072b8ba13db5919cd3798918761578411/cytoolz-1.1.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e1b6d37545816905a76d9ed59fa4e332f929e879f062a39ea0f6f620405cdc27", size = 2953081, upload-time = "2025-10-19T00:43:47.103Z" }, - { url = "https://files.pythonhosted.org/packages/f4/06/d86fe811c6222dc32d3e08f5d88d2be598a6055b4d0590e7c1428d55c386/cytoolz-1.1.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:05332112d4087904842b36954cd1d3fc0e463a2f4a7ef9477bd241427c593c3b", size = 2862228, upload-time = "2025-10-19T00:43:49.353Z" }, - { url = "https://files.pythonhosted.org/packages/ae/32/978ef6f42623be44a0a03ae9de875ab54aa26c7e38c5c4cd505460b0927d/cytoolz-1.1.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:31538ca2fad2d688cbd962ccc3f1da847329e2258a52940f10a2ac0719e526be", size = 2861971, upload-time = "2025-10-19T00:43:51.028Z" }, - { url = "https://files.pythonhosted.org/packages/ee/f7/74c69497e756b752b359925d1feef68b91df024a4124a823740f675dacd3/cytoolz-1.1.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:747562aa70abf219ea16f07d50ac0157db856d447f7f498f592e097cbc77df0b", size = 2975304, upload-time = "2025-10-19T00:43:52.99Z" }, - { url = "https://files.pythonhosted.org/packages/5b/2b/3ce0e6889a6491f3418ad4d84ae407b8456b02169a5a1f87990dbba7433b/cytoolz-1.1.0-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:3dc15c48b20c0f467e15e341e102896c8422dccf8efc6322def5c1b02f074629", size = 2697371, upload-time = "2025-10-19T00:43:55.312Z" }, - { url = "https://files.pythonhosted.org/packages/15/87/c616577f0891d97860643c845f7221e95240aa589586de727e28a5eb6e52/cytoolz-1.1.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:3c03137ee6103ba92d5d6ad6a510e86fded69cd67050bd8a1843f15283be17ac", size = 2992436, upload-time = "2025-10-19T00:43:57.253Z" }, - { url = "https://files.pythonhosted.org/packages/e7/9f/490c81bffb3428ab1fa114051fbb5ba18aaa2e2fe4da5bf4170ca524e6b3/cytoolz-1.1.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:be8e298d88f88bd172b59912240558be3b7a04959375646e7fd4996401452941", size = 2917612, upload-time = "2025-10-19T00:43:59.423Z" }, - { url = "https://files.pythonhosted.org/packages/66/35/0fec2769660ca6472bbf3317ab634675827bb706d193e3240aaf20eab961/cytoolz-1.1.0-cp314-cp314t-win32.whl", hash = "sha256:3d407140f5604a89578285d4aac7b18b8eafa055cf776e781aabb89c48738fad", size = 960842, upload-time = "2025-10-19T00:44:01.143Z" }, - { url = "https://files.pythonhosted.org/packages/46/b4/b7ce3d3cd20337becfec978ecfa6d0ef64884d0cf32d44edfed8700914b9/cytoolz-1.1.0-cp314-cp314t-win_amd64.whl", hash = "sha256:56e5afb69eb6e1b3ffc34716ee5f92ffbdb5cb003b3a5ca4d4b0fe700e217162", size = 1020835, upload-time = "2025-10-19T00:44:03.246Z" }, - { url = "https://files.pythonhosted.org/packages/2c/1f/0498009aa563a9c5d04f520aadc6e1c0942434d089d0b2f51ea986470f55/cytoolz-1.1.0-cp314-cp314t-win_arm64.whl", hash = "sha256:27b19b4a286b3ff52040efa42dbe403730aebe5fdfd2def704eb285e2125c63e", size = 927963, upload-time = "2025-10-19T00:44:04.85Z" }, -] - -[[package]] -name = "decorator" -version = "5.2.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/43/fa/6d96a0978d19e17b68d634497769987b16c8f4cd0a7a05048bec693caa6b/decorator-5.2.1.tar.gz", hash = "sha256:65f266143752f734b0a7cc83c46f4618af75b8c5911b00ccb61d0ac9b6da0360", size = 56711, upload-time = "2025-02-24T04:41:34.073Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/4e/8c/f3147f5c4b73e7550fe5f9352eaa956ae838d5c51eb58e7a25b9f3e2643b/decorator-5.2.1-py3-none-any.whl", hash = "sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a", size = 9190, upload-time = "2025-02-24T04:41:32.565Z" }, -] - -[[package]] -name = "dice" -version = "4.0.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "pyparsing" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/72/3e/a07b66d345678cd277df76fdf51cb4ffc1054492efcd674a7b30f49a80bf/dice-4.0.0.tar.gz", hash = "sha256:7a1bfd68f21abf245f333de89721cd30379b5413c774a9d34a6b4123c526815c", size = 22252, upload-time = "2023-05-18T17:23:34.224Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b5/75/bf7ef10ce7f9a8d17e127e77092505368999e6bd86deab8eb9a74b2103ec/dice-4.0.0-py3-none-any.whl", hash = "sha256:59aa1f7b23846b32a618697534429fec80be3a1087b4f934022dcdb99cf1417f", size = 22042, upload-time = "2023-05-18T17:22:59.548Z" }, -] - -[[package]] -name = "discord-py" -version = "2.7.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "aiohttp" }, - { name = "audioop-lts", marker = "python_full_version >= '3.13'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/ef/57/9a2d9abdabdc9db8ef28ce0cf4129669e1c8717ba28d607b5ba357c4de3b/discord_py-2.7.1.tar.gz", hash = "sha256:24d5e6a45535152e4b98148a9dd6b550d25dc2c9fb41b6d670319411641249da", size = 1106326, upload-time = "2026-03-03T18:40:46.24Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f7/a7/17208c3b3f92319e7fad259f1c6d5a5baf8fd0654c54846ced329f83c3eb/discord_py-2.7.1-py3-none-any.whl", hash = "sha256:849dca2c63b171146f3a7f3f8acc04248098e9e6203412ce3cf2745f284f7439", size = 1227550, upload-time = "2026-03-03T18:40:44.492Z" }, -] - -[[package]] -name = "distro" -version = "1.9.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/fc/f8/98eea607f65de6527f8a2e8885fc8015d3e6f5775df186e443e0964a11c3/distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed", size = 60722, upload-time = "2023-12-24T09:54:32.31Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/12/b3/231ffd4ab1fc9d679809f356cebee130ac7daa00d6d6f3206dd4fd137e9e/distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2", size = 20277, upload-time = "2023-12-24T09:54:30.421Z" }, -] - -[[package]] -name = "dnspython" -version = "2.8.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/8c/8b/57666417c0f90f08bcafa776861060426765fdb422eb10212086fb811d26/dnspython-2.8.0.tar.gz", hash = "sha256:181d3c6996452cb1189c4046c61599b84a5a86e099562ffde77d26984ff26d0f", size = 368251, upload-time = "2025-09-07T18:58:00.022Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ba/5a/18ad964b0086c6e62e2e7500f7edc89e3faa45033c71c1893d34eed2b2de/dnspython-2.8.0-py3-none-any.whl", hash = "sha256:01d9bbc4a2d76bf0db7c1f729812ded6d912bd318d3b1cf81d30c0f845dbf3af", size = 331094, upload-time = "2025-09-07T18:57:58.071Z" }, -] - -[[package]] -name = "docstring-parser" -version = "0.17.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b2/9d/c3b43da9515bd270df0f80548d9944e389870713cc1fe2b8fb35fe2bcefd/docstring_parser-0.17.0.tar.gz", hash = "sha256:583de4a309722b3315439bb31d64ba3eebada841f2e2cee23b99df001434c912", size = 27442, upload-time = "2025-07-21T07:35:01.868Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/55/e2/2537ebcff11c1ee1ff17d8d0b6f4db75873e3b0fb32c2d4a2ee31ecb310a/docstring_parser-0.17.0-py3-none-any.whl", hash = "sha256:cf2569abd23dce8099b300f9b4fa8191e9582dda731fd533daf54c4551658708", size = 36896, upload-time = "2025-07-21T07:35:00.684Z" }, -] - -[[package]] -name = "eth-abi" -version = "5.2.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "eth-typing" }, - { name = "eth-utils" }, - { name = "parsimonious" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/00/71/d9e1380bd77fd22f98b534699af564f189b56d539cc2b9dab908d4e4c242/eth_abi-5.2.0.tar.gz", hash = "sha256:178703fa98c07d8eecd5ae569e7e8d159e493ebb6eeb534a8fe973fbc4e40ef0", size = 49797, upload-time = "2025-01-14T16:29:34.629Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/7a/b4/2f3982c4cbcbf5eeb6aec62df1533c0e63c653b3021ff338d44944405676/eth_abi-5.2.0-py3-none-any.whl", hash = "sha256:17abe47560ad753f18054f5b3089fcb588f3e3a092136a416b6c1502cb7e8877", size = 28511, upload-time = "2025-01-14T16:29:31.862Z" }, -] - -[[package]] -name = "eth-account" -version = "0.13.7" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "bitarray" }, - { name = "ckzg" }, - { name = "eth-abi" }, - { name = "eth-keyfile" }, - { name = "eth-keys" }, - { name = "eth-rlp" }, - { name = "eth-utils" }, - { name = "hexbytes" }, - { name = "pydantic" }, - { name = "rlp" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/74/cf/20f76a29be97339c969fd765f1237154286a565a1d61be98e76bb7af946a/eth_account-0.13.7.tar.gz", hash = "sha256:5853ecbcbb22e65411176f121f5f24b8afeeaf13492359d254b16d8b18c77a46", size = 935998, upload-time = "2025-04-21T21:11:21.204Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/46/18/088fb250018cbe665bc2111974301b2d59f294a565aff7564c4df6878da2/eth_account-0.13.7-py3-none-any.whl", hash = "sha256:39727de8c94d004ff61d10da7587509c04d2dc7eac71e04830135300bdfc6d24", size = 587452, upload-time = "2025-04-21T21:11:18.346Z" }, -] - -[[package]] -name = "eth-hash" -version = "0.7.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ee/38/577b7bc9380ef9dff0f1dffefe0c9a1ded2385e7a06c306fd95afb6f9451/eth_hash-0.7.1.tar.gz", hash = "sha256:d2411a403a0b0a62e8247b4117932d900ffb4c8c64b15f92620547ca5ce46be5", size = 12227, upload-time = "2025-01-13T21:29:21.765Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/eb/db/f8775490669d28aca24871c67dd56b3e72105cb3bcae9a4ec65dd70859b3/eth_hash-0.7.1-py3-none-any.whl", hash = "sha256:0fb1add2adf99ef28883fd6228eb447ef519ea72933535ad1a0b28c6f65f868a", size = 8028, upload-time = "2025-01-13T21:29:19.365Z" }, -] - -[package.optional-dependencies] -pycryptodome = [ - { name = "pycryptodome" }, -] - -[[package]] -name = "eth-keyfile" -version = "0.8.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "eth-keys" }, - { name = "eth-utils" }, - { name = "pycryptodome" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/35/66/dd823b1537befefbbff602e2ada88f1477c5b40ec3731e3d9bc676c5f716/eth_keyfile-0.8.1.tar.gz", hash = "sha256:9708bc31f386b52cca0969238ff35b1ac72bd7a7186f2a84b86110d3c973bec1", size = 12267, upload-time = "2024-04-23T20:28:53.862Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/88/fc/48a586175f847dd9e05e5b8994d2fe8336098781ec2e9836a2ad94280281/eth_keyfile-0.8.1-py3-none-any.whl", hash = "sha256:65387378b82fe7e86d7cb9f8d98e6d639142661b2f6f490629da09fddbef6d64", size = 7510, upload-time = "2024-04-23T20:28:51.063Z" }, -] - -[[package]] -name = "eth-keys" -version = "0.7.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "eth-typing" }, - { name = "eth-utils" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/58/11/1ed831c50bd74f57829aa06e58bd82a809c37e070ee501c953b9ac1f1552/eth_keys-0.7.0.tar.gz", hash = "sha256:79d24fd876201df67741de3e3fefb3f4dbcbb6ace66e47e6fe662851a4547814", size = 30166, upload-time = "2025-04-07T17:40:21.697Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/4d/25/0ae00f2b0095e559d61ad3dc32171bd5a29dfd95ab04b4edd641f7c75f72/eth_keys-0.7.0-py3-none-any.whl", hash = "sha256:b0cdda8ffe8e5ba69c7c5ca33f153828edcace844f67aabd4542d7de38b159cf", size = 20656, upload-time = "2025-04-07T17:40:20.441Z" }, -] - -[[package]] -name = "eth-rlp" -version = "2.2.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "eth-utils" }, - { name = "hexbytes" }, - { name = "rlp" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/7f/ea/ad39d001fa9fed07fad66edb00af701e29b48be0ed44a3bcf58cb3adf130/eth_rlp-2.2.0.tar.gz", hash = "sha256:5e4b2eb1b8213e303d6a232dfe35ab8c29e2d3051b86e8d359def80cd21db83d", size = 7720, upload-time = "2025-02-04T21:51:08.134Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/99/3b/57efe2bc2df0980680d57c01a36516cd3171d2319ceb30e675de19fc2cc5/eth_rlp-2.2.0-py3-none-any.whl", hash = "sha256:5692d595a741fbaef1203db6a2fedffbd2506d31455a6ad378c8449ee5985c47", size = 4446, upload-time = "2025-02-04T21:51:05.823Z" }, -] - -[[package]] -name = "eth-typing" -version = "5.2.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/60/54/62aa24b9cc708f06316167ee71c362779c8ed21fc8234a5cd94a8f53b623/eth_typing-5.2.1.tar.gz", hash = "sha256:7557300dbf02a93c70fa44af352b5c4a58f94e997a0fd6797fb7d1c29d9538ee", size = 21806, upload-time = "2025-04-14T20:39:28.217Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/30/72/c370bbe4c53da7bf998d3523f5a0f38867654923a82192df88d0705013d3/eth_typing-5.2.1-py3-none-any.whl", hash = "sha256:b0c2812ff978267563b80e9d701f487dd926f1d376d674f3b535cfe28b665d3d", size = 19163, upload-time = "2025-04-14T20:39:26.571Z" }, -] - -[[package]] -name = "eth-utils" -version = "5.3.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "cytoolz", marker = "implementation_name == 'cpython'" }, - { name = "eth-hash" }, - { name = "eth-typing" }, - { name = "pydantic" }, - { name = "toolz", marker = "implementation_name == 'pypy'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/e6/e1/ee3a8728227c3558853e63ff35bd4c449abdf5022a19601369400deacd39/eth_utils-5.3.1.tar.gz", hash = "sha256:c94e2d2abd024a9a42023b4ddc1c645814ff3d6a737b33d5cfd890ebf159c2d1", size = 123506, upload-time = "2025-08-27T16:37:17.378Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/bf/4d/257cdc01ada430b8e84b9f2385c2553f33218f5b47da9adf0a616308d4b7/eth_utils-5.3.1-py3-none-any.whl", hash = "sha256:1f5476d8f29588d25b8ae4987e1ffdfae6d4c09026e476c4aad13b32dda3ead0", size = 102529, upload-time = "2025-08-27T16:37:15.449Z" }, -] - -[[package]] -name = "etherscan-labels" -version = "20221015" -source = { git = "https://github.com/haloooloolo/etherscan-labels#f48e34b76d75b4421daa2a43814dd7c61c5c56f3" } -dependencies = [ - { name = "compress-json" }, -] - -[[package]] -name = "fonttools" -version = "4.61.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ec/ca/cf17b88a8df95691275a3d77dc0a5ad9907f328ae53acbe6795da1b2f5ed/fonttools-4.61.1.tar.gz", hash = "sha256:6675329885c44657f826ef01d9e4fb33b9158e9d93c537d84ad8399539bc6f69", size = 3565756, upload-time = "2025-12-12T17:31:24.246Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/6f/16/7decaa24a1bd3a70c607b2e29f0adc6159f36a7e40eaba59846414765fd4/fonttools-4.61.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:f3cb4a569029b9f291f88aafc927dd53683757e640081ca8c412781ea144565e", size = 2851593, upload-time = "2025-12-12T17:30:04.225Z" }, - { url = "https://files.pythonhosted.org/packages/94/98/3c4cb97c64713a8cf499b3245c3bf9a2b8fd16a3e375feff2aed78f96259/fonttools-4.61.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:41a7170d042e8c0024703ed13b71893519a1a6d6e18e933e3ec7507a2c26a4b2", size = 2400231, upload-time = "2025-12-12T17:30:06.47Z" }, - { url = "https://files.pythonhosted.org/packages/b7/37/82dbef0f6342eb01f54bca073ac1498433d6ce71e50c3c3282b655733b31/fonttools-4.61.1-cp312-cp312-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:10d88e55330e092940584774ee5e8a6971b01fc2f4d3466a1d6c158230880796", size = 4954103, upload-time = "2025-12-12T17:30:08.432Z" }, - { url = "https://files.pythonhosted.org/packages/6c/44/f3aeac0fa98e7ad527f479e161aca6c3a1e47bb6996b053d45226fe37bf2/fonttools-4.61.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:15acc09befd16a0fb8a8f62bc147e1a82817542d72184acca9ce6e0aeda9fa6d", size = 5004295, upload-time = "2025-12-12T17:30:10.56Z" }, - { url = "https://files.pythonhosted.org/packages/14/e8/7424ced75473983b964d09f6747fa09f054a6d656f60e9ac9324cf40c743/fonttools-4.61.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e6bcdf33aec38d16508ce61fd81838f24c83c90a1d1b8c68982857038673d6b8", size = 4944109, upload-time = "2025-12-12T17:30:12.874Z" }, - { url = "https://files.pythonhosted.org/packages/c8/8b/6391b257fa3d0b553d73e778f953a2f0154292a7a7a085e2374b111e5410/fonttools-4.61.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5fade934607a523614726119164ff621e8c30e8fa1ffffbbd358662056ba69f0", size = 5093598, upload-time = "2025-12-12T17:30:15.79Z" }, - { url = "https://files.pythonhosted.org/packages/d9/71/fd2ea96cdc512d92da5678a1c98c267ddd4d8c5130b76d0f7a80f9a9fde8/fonttools-4.61.1-cp312-cp312-win32.whl", hash = "sha256:75da8f28eff26defba42c52986de97b22106cb8f26515b7c22443ebc9c2d3261", size = 2269060, upload-time = "2025-12-12T17:30:18.058Z" }, - { url = "https://files.pythonhosted.org/packages/80/3b/a3e81b71aed5a688e89dfe0e2694b26b78c7d7f39a5ffd8a7d75f54a12a8/fonttools-4.61.1-cp312-cp312-win_amd64.whl", hash = "sha256:497c31ce314219888c0e2fce5ad9178ca83fe5230b01a5006726cdf3ac9f24d9", size = 2319078, upload-time = "2025-12-12T17:30:22.862Z" }, - { url = "https://files.pythonhosted.org/packages/4b/cf/00ba28b0990982530addb8dc3e9e6f2fa9cb5c20df2abdda7baa755e8fe1/fonttools-4.61.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8c56c488ab471628ff3bfa80964372fc13504ece601e0d97a78ee74126b2045c", size = 2846454, upload-time = "2025-12-12T17:30:24.938Z" }, - { url = "https://files.pythonhosted.org/packages/5a/ca/468c9a8446a2103ae645d14fee3f610567b7042aba85031c1c65e3ef7471/fonttools-4.61.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:dc492779501fa723b04d0ab1f5be046797fee17d27700476edc7ee9ae535a61e", size = 2398191, upload-time = "2025-12-12T17:30:27.343Z" }, - { url = "https://files.pythonhosted.org/packages/a3/4b/d67eedaed19def5967fade3297fed8161b25ba94699efc124b14fb68cdbc/fonttools-4.61.1-cp313-cp313-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:64102ca87e84261419c3747a0d20f396eb024bdbeb04c2bfb37e2891f5fadcb5", size = 4928410, upload-time = "2025-12-12T17:30:29.771Z" }, - { url = "https://files.pythonhosted.org/packages/b0/8d/6fb3494dfe61a46258cd93d979cf4725ded4eb46c2a4ca35e4490d84daea/fonttools-4.61.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4c1b526c8d3f615a7b1867f38a9410849c8f4aef078535742198e942fba0e9bd", size = 4984460, upload-time = "2025-12-12T17:30:32.073Z" }, - { url = "https://files.pythonhosted.org/packages/f7/f1/a47f1d30b3dc00d75e7af762652d4cbc3dff5c2697a0dbd5203c81afd9c3/fonttools-4.61.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:41ed4b5ec103bd306bb68f81dc166e77409e5209443e5773cb4ed837bcc9b0d3", size = 4925800, upload-time = "2025-12-12T17:30:34.339Z" }, - { url = "https://files.pythonhosted.org/packages/a7/01/e6ae64a0981076e8a66906fab01539799546181e32a37a0257b77e4aa88b/fonttools-4.61.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b501c862d4901792adaec7c25b1ecc749e2662543f68bb194c42ba18d6eec98d", size = 5067859, upload-time = "2025-12-12T17:30:36.593Z" }, - { url = "https://files.pythonhosted.org/packages/73/aa/28e40b8d6809a9b5075350a86779163f074d2b617c15d22343fce81918db/fonttools-4.61.1-cp313-cp313-win32.whl", hash = "sha256:4d7092bb38c53bbc78e9255a59158b150bcdc115a1e3b3ce0b5f267dc35dd63c", size = 2267821, upload-time = "2025-12-12T17:30:38.478Z" }, - { url = "https://files.pythonhosted.org/packages/1a/59/453c06d1d83dc0951b69ef692d6b9f1846680342927df54e9a1ca91c6f90/fonttools-4.61.1-cp313-cp313-win_amd64.whl", hash = "sha256:21e7c8d76f62ab13c9472ccf74515ca5b9a761d1bde3265152a6dc58700d895b", size = 2318169, upload-time = "2025-12-12T17:30:40.951Z" }, - { url = "https://files.pythonhosted.org/packages/32/8f/4e7bf82c0cbb738d3c2206c920ca34ca74ef9dabde779030145d28665104/fonttools-4.61.1-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:fff4f534200a04b4a36e7ae3cb74493afe807b517a09e99cb4faa89a34ed6ecd", size = 2846094, upload-time = "2025-12-12T17:30:43.511Z" }, - { url = "https://files.pythonhosted.org/packages/71/09/d44e45d0a4f3a651f23a1e9d42de43bc643cce2971b19e784cc67d823676/fonttools-4.61.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:d9203500f7c63545b4ce3799319fe4d9feb1a1b89b28d3cb5abd11b9dd64147e", size = 2396589, upload-time = "2025-12-12T17:30:45.681Z" }, - { url = "https://files.pythonhosted.org/packages/89/18/58c64cafcf8eb677a99ef593121f719e6dcbdb7d1c594ae5a10d4997ca8a/fonttools-4.61.1-cp314-cp314-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:fa646ecec9528bef693415c79a86e733c70a4965dd938e9a226b0fc64c9d2e6c", size = 4877892, upload-time = "2025-12-12T17:30:47.709Z" }, - { url = "https://files.pythonhosted.org/packages/8a/ec/9e6b38c7ba1e09eb51db849d5450f4c05b7e78481f662c3b79dbde6f3d04/fonttools-4.61.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:11f35ad7805edba3aac1a3710d104592df59f4b957e30108ae0ba6c10b11dd75", size = 4972884, upload-time = "2025-12-12T17:30:49.656Z" }, - { url = "https://files.pythonhosted.org/packages/5e/87/b5339da8e0256734ba0dbbf5b6cdebb1dd79b01dc8c270989b7bcd465541/fonttools-4.61.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b931ae8f62db78861b0ff1ac017851764602288575d65b8e8ff1963fed419063", size = 4924405, upload-time = "2025-12-12T17:30:51.735Z" }, - { url = "https://files.pythonhosted.org/packages/0b/47/e3409f1e1e69c073a3a6fd8cb886eb18c0bae0ee13db2c8d5e7f8495e8b7/fonttools-4.61.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:b148b56f5de675ee16d45e769e69f87623a4944f7443850bf9a9376e628a89d2", size = 5035553, upload-time = "2025-12-12T17:30:54.823Z" }, - { url = "https://files.pythonhosted.org/packages/bf/b6/1f6600161b1073a984294c6c031e1a56ebf95b6164249eecf30012bb2e38/fonttools-4.61.1-cp314-cp314-win32.whl", hash = "sha256:9b666a475a65f4e839d3d10473fad6d47e0a9db14a2f4a224029c5bfde58ad2c", size = 2271915, upload-time = "2025-12-12T17:30:57.913Z" }, - { url = "https://files.pythonhosted.org/packages/52/7b/91e7b01e37cc8eb0e1f770d08305b3655e4f002fc160fb82b3390eabacf5/fonttools-4.61.1-cp314-cp314-win_amd64.whl", hash = "sha256:4f5686e1fe5fce75d82d93c47a438a25bf0d1319d2843a926f741140b2b16e0c", size = 2323487, upload-time = "2025-12-12T17:30:59.804Z" }, - { url = "https://files.pythonhosted.org/packages/39/5c/908ad78e46c61c3e3ed70c3b58ff82ab48437faf84ec84f109592cabbd9f/fonttools-4.61.1-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:e76ce097e3c57c4bcb67c5aa24a0ecdbd9f74ea9219997a707a4061fbe2707aa", size = 2929571, upload-time = "2025-12-12T17:31:02.574Z" }, - { url = "https://files.pythonhosted.org/packages/bd/41/975804132c6dea64cdbfbaa59f3518a21c137a10cccf962805b301ac6ab2/fonttools-4.61.1-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:9cfef3ab326780c04d6646f68d4b4742aae222e8b8ea1d627c74e38afcbc9d91", size = 2435317, upload-time = "2025-12-12T17:31:04.974Z" }, - { url = "https://files.pythonhosted.org/packages/b0/5a/aef2a0a8daf1ebaae4cfd83f84186d4a72ee08fd6a8451289fcd03ffa8a4/fonttools-4.61.1-cp314-cp314t-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:a75c301f96db737e1c5ed5fd7d77d9c34466de16095a266509e13da09751bd19", size = 4882124, upload-time = "2025-12-12T17:31:07.456Z" }, - { url = "https://files.pythonhosted.org/packages/80/33/d6db3485b645b81cea538c9d1c9219d5805f0877fda18777add4671c5240/fonttools-4.61.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:91669ccac46bbc1d09e9273546181919064e8df73488ea087dcac3e2968df9ba", size = 5100391, upload-time = "2025-12-12T17:31:09.732Z" }, - { url = "https://files.pythonhosted.org/packages/6c/d6/675ba631454043c75fcf76f0ca5463eac8eb0666ea1d7badae5fea001155/fonttools-4.61.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:c33ab3ca9d3ccd581d58e989d67554e42d8d4ded94ab3ade3508455fe70e65f7", size = 4978800, upload-time = "2025-12-12T17:31:11.681Z" }, - { url = "https://files.pythonhosted.org/packages/7f/33/d3ec753d547a8d2bdaedd390d4a814e8d5b45a093d558f025c6b990b554c/fonttools-4.61.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:664c5a68ec406f6b1547946683008576ef8b38275608e1cee6c061828171c118", size = 5006426, upload-time = "2025-12-12T17:31:13.764Z" }, - { url = "https://files.pythonhosted.org/packages/b4/40/cc11f378b561a67bea850ab50063366a0d1dd3f6d0a30ce0f874b0ad5664/fonttools-4.61.1-cp314-cp314t-win32.whl", hash = "sha256:aed04cabe26f30c1647ef0e8fbb207516fd40fe9472e9439695f5c6998e60ac5", size = 2335377, upload-time = "2025-12-12T17:31:16.49Z" }, - { url = "https://files.pythonhosted.org/packages/e4/ff/c9a2b66b39f8628531ea58b320d66d951267c98c6a38684daa8f50fb02f8/fonttools-4.61.1-cp314-cp314t-win_amd64.whl", hash = "sha256:2180f14c141d2f0f3da43f3a81bc8aa4684860f6b0e6f9e165a4831f24e6a23b", size = 2400613, upload-time = "2025-12-12T17:31:18.769Z" }, - { url = "https://files.pythonhosted.org/packages/c7/4e/ce75a57ff3aebf6fc1f4e9d508b8e5810618a33d900ad6c19eb30b290b97/fonttools-4.61.1-py3-none-any.whl", hash = "sha256:17d2bf5d541add43822bcf0c43d7d847b160c9bb01d15d5007d84e2217aaa371", size = 1148996, upload-time = "2025-12-12T17:31:21.03Z" }, -] - -[[package]] -name = "frozenlist" -version = "1.8.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/2d/f5/c831fac6cc817d26fd54c7eaccd04ef7e0288806943f7cc5bbf69f3ac1f0/frozenlist-1.8.0.tar.gz", hash = "sha256:3ede829ed8d842f6cd48fc7081d7a41001a56f1f38603f9d49bf3020d59a31ad", size = 45875, upload-time = "2025-10-06T05:38:17.865Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/69/29/948b9aa87e75820a38650af445d2ef2b6b8a6fab1a23b6bb9e4ef0be2d59/frozenlist-1.8.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:78f7b9e5d6f2fdb88cdde9440dc147259b62b9d3b019924def9f6478be254ac1", size = 87782, upload-time = "2025-10-06T05:36:06.649Z" }, - { url = "https://files.pythonhosted.org/packages/64/80/4f6e318ee2a7c0750ed724fa33a4bdf1eacdc5a39a7a24e818a773cd91af/frozenlist-1.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:229bf37d2e4acdaf808fd3f06e854a4a7a3661e871b10dc1f8f1896a3b05f18b", size = 50594, upload-time = "2025-10-06T05:36:07.69Z" }, - { url = "https://files.pythonhosted.org/packages/2b/94/5c8a2b50a496b11dd519f4a24cb5496cf125681dd99e94c604ccdea9419a/frozenlist-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f833670942247a14eafbb675458b4e61c82e002a148f49e68257b79296e865c4", size = 50448, upload-time = "2025-10-06T05:36:08.78Z" }, - { url = "https://files.pythonhosted.org/packages/6a/bd/d91c5e39f490a49df14320f4e8c80161cfcce09f1e2cde1edd16a551abb3/frozenlist-1.8.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:494a5952b1c597ba44e0e78113a7266e656b9794eec897b19ead706bd7074383", size = 242411, upload-time = "2025-10-06T05:36:09.801Z" }, - { url = "https://files.pythonhosted.org/packages/8f/83/f61505a05109ef3293dfb1ff594d13d64a2324ac3482be2cedc2be818256/frozenlist-1.8.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:96f423a119f4777a4a056b66ce11527366a8bb92f54e541ade21f2374433f6d4", size = 243014, upload-time = "2025-10-06T05:36:11.394Z" }, - { url = "https://files.pythonhosted.org/packages/d8/cb/cb6c7b0f7d4023ddda30cf56b8b17494eb3a79e3fda666bf735f63118b35/frozenlist-1.8.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3462dd9475af2025c31cc61be6652dfa25cbfb56cbbf52f4ccfe029f38decaf8", size = 234909, upload-time = "2025-10-06T05:36:12.598Z" }, - { url = "https://files.pythonhosted.org/packages/31/c5/cd7a1f3b8b34af009fb17d4123c5a778b44ae2804e3ad6b86204255f9ec5/frozenlist-1.8.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c4c800524c9cd9bac5166cd6f55285957fcfc907db323e193f2afcd4d9abd69b", size = 250049, upload-time = "2025-10-06T05:36:14.065Z" }, - { url = "https://files.pythonhosted.org/packages/c0/01/2f95d3b416c584a1e7f0e1d6d31998c4a795f7544069ee2e0962a4b60740/frozenlist-1.8.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d6a5df73acd3399d893dafc71663ad22534b5aa4f94e8a2fabfe856c3c1b6a52", size = 256485, upload-time = "2025-10-06T05:36:15.39Z" }, - { url = "https://files.pythonhosted.org/packages/ce/03/024bf7720b3abaebcff6d0793d73c154237b85bdf67b7ed55e5e9596dc9a/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:405e8fe955c2280ce66428b3ca55e12b3c4e9c336fb2103a4937e891c69a4a29", size = 237619, upload-time = "2025-10-06T05:36:16.558Z" }, - { url = "https://files.pythonhosted.org/packages/69/fa/f8abdfe7d76b731f5d8bd217827cf6764d4f1d9763407e42717b4bed50a0/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:908bd3f6439f2fef9e85031b59fd4f1297af54415fb60e4254a95f75b3cab3f3", size = 250320, upload-time = "2025-10-06T05:36:17.821Z" }, - { url = "https://files.pythonhosted.org/packages/f5/3c/b051329f718b463b22613e269ad72138cc256c540f78a6de89452803a47d/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:294e487f9ec720bd8ffcebc99d575f7eff3568a08a253d1ee1a0378754b74143", size = 246820, upload-time = "2025-10-06T05:36:19.046Z" }, - { url = "https://files.pythonhosted.org/packages/0f/ae/58282e8f98e444b3f4dd42448ff36fa38bef29e40d40f330b22e7108f565/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:74c51543498289c0c43656701be6b077f4b265868fa7f8a8859c197006efb608", size = 250518, upload-time = "2025-10-06T05:36:20.763Z" }, - { url = "https://files.pythonhosted.org/packages/8f/96/007e5944694d66123183845a106547a15944fbbb7154788cbf7272789536/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:776f352e8329135506a1d6bf16ac3f87bc25b28e765949282dcc627af36123aa", size = 239096, upload-time = "2025-10-06T05:36:22.129Z" }, - { url = "https://files.pythonhosted.org/packages/66/bb/852b9d6db2fa40be96f29c0d1205c306288f0684df8fd26ca1951d461a56/frozenlist-1.8.0-cp312-cp312-win32.whl", hash = "sha256:433403ae80709741ce34038da08511d4a77062aa924baf411ef73d1146e74faf", size = 39985, upload-time = "2025-10-06T05:36:23.661Z" }, - { url = "https://files.pythonhosted.org/packages/b8/af/38e51a553dd66eb064cdf193841f16f077585d4d28394c2fa6235cb41765/frozenlist-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:34187385b08f866104f0c0617404c8eb08165ab1272e884abc89c112e9c00746", size = 44591, upload-time = "2025-10-06T05:36:24.958Z" }, - { url = "https://files.pythonhosted.org/packages/a7/06/1dc65480ab147339fecc70797e9c2f69d9cea9cf38934ce08df070fdb9cb/frozenlist-1.8.0-cp312-cp312-win_arm64.whl", hash = "sha256:fe3c58d2f5db5fbd18c2987cba06d51b0529f52bc3a6cdc33d3f4eab725104bd", size = 40102, upload-time = "2025-10-06T05:36:26.333Z" }, - { url = "https://files.pythonhosted.org/packages/2d/40/0832c31a37d60f60ed79e9dfb5a92e1e2af4f40a16a29abcc7992af9edff/frozenlist-1.8.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8d92f1a84bb12d9e56f818b3a746f3efba93c1b63c8387a73dde655e1e42282a", size = 85717, upload-time = "2025-10-06T05:36:27.341Z" }, - { url = "https://files.pythonhosted.org/packages/30/ba/b0b3de23f40bc55a7057bd38434e25c34fa48e17f20ee273bbde5e0650f3/frozenlist-1.8.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:96153e77a591c8adc2ee805756c61f59fef4cf4073a9275ee86fe8cba41241f7", size = 49651, upload-time = "2025-10-06T05:36:28.855Z" }, - { url = "https://files.pythonhosted.org/packages/0c/ab/6e5080ee374f875296c4243c381bbdef97a9ac39c6e3ce1d5f7d42cb78d6/frozenlist-1.8.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f21f00a91358803399890ab167098c131ec2ddd5f8f5fd5fe9c9f2c6fcd91e40", size = 49417, upload-time = "2025-10-06T05:36:29.877Z" }, - { url = "https://files.pythonhosted.org/packages/d5/4e/e4691508f9477ce67da2015d8c00acd751e6287739123113a9fca6f1604e/frozenlist-1.8.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:fb30f9626572a76dfe4293c7194a09fb1fe93ba94c7d4f720dfae3b646b45027", size = 234391, upload-time = "2025-10-06T05:36:31.301Z" }, - { url = "https://files.pythonhosted.org/packages/40/76/c202df58e3acdf12969a7895fd6f3bc016c642e6726aa63bd3025e0fc71c/frozenlist-1.8.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eaa352d7047a31d87dafcacbabe89df0aa506abb5b1b85a2fb91bc3faa02d822", size = 233048, upload-time = "2025-10-06T05:36:32.531Z" }, - { url = "https://files.pythonhosted.org/packages/f9/c0/8746afb90f17b73ca5979c7a3958116e105ff796e718575175319b5bb4ce/frozenlist-1.8.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:03ae967b4e297f58f8c774c7eabcce57fe3c2434817d4385c50661845a058121", size = 226549, upload-time = "2025-10-06T05:36:33.706Z" }, - { url = "https://files.pythonhosted.org/packages/7e/eb/4c7eefc718ff72f9b6c4893291abaae5fbc0c82226a32dcd8ef4f7a5dbef/frozenlist-1.8.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f6292f1de555ffcc675941d65fffffb0a5bcd992905015f85d0592201793e0e5", size = 239833, upload-time = "2025-10-06T05:36:34.947Z" }, - { url = "https://files.pythonhosted.org/packages/c2/4e/e5c02187cf704224f8b21bee886f3d713ca379535f16893233b9d672ea71/frozenlist-1.8.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:29548f9b5b5e3460ce7378144c3010363d8035cea44bc0bf02d57f5a685e084e", size = 245363, upload-time = "2025-10-06T05:36:36.534Z" }, - { url = "https://files.pythonhosted.org/packages/1f/96/cb85ec608464472e82ad37a17f844889c36100eed57bea094518bf270692/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ec3cc8c5d4084591b4237c0a272cc4f50a5b03396a47d9caaf76f5d7b38a4f11", size = 229314, upload-time = "2025-10-06T05:36:38.582Z" }, - { url = "https://files.pythonhosted.org/packages/5d/6f/4ae69c550e4cee66b57887daeebe006fe985917c01d0fff9caab9883f6d0/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:517279f58009d0b1f2e7c1b130b377a349405da3f7621ed6bfae50b10adf20c1", size = 243365, upload-time = "2025-10-06T05:36:40.152Z" }, - { url = "https://files.pythonhosted.org/packages/7a/58/afd56de246cf11780a40a2c28dc7cbabbf06337cc8ddb1c780a2d97e88d8/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:db1e72ede2d0d7ccb213f218df6a078a9c09a7de257c2fe8fcef16d5925230b1", size = 237763, upload-time = "2025-10-06T05:36:41.355Z" }, - { url = "https://files.pythonhosted.org/packages/cb/36/cdfaf6ed42e2644740d4a10452d8e97fa1c062e2a8006e4b09f1b5fd7d63/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:b4dec9482a65c54a5044486847b8a66bf10c9cb4926d42927ec4e8fd5db7fed8", size = 240110, upload-time = "2025-10-06T05:36:42.716Z" }, - { url = "https://files.pythonhosted.org/packages/03/a8/9ea226fbefad669f11b52e864c55f0bd57d3c8d7eb07e9f2e9a0b39502e1/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:21900c48ae04d13d416f0e1e0c4d81f7931f73a9dfa0b7a8746fb2fe7dd970ed", size = 233717, upload-time = "2025-10-06T05:36:44.251Z" }, - { url = "https://files.pythonhosted.org/packages/1e/0b/1b5531611e83ba7d13ccc9988967ea1b51186af64c42b7a7af465dcc9568/frozenlist-1.8.0-cp313-cp313-win32.whl", hash = "sha256:8b7b94a067d1c504ee0b16def57ad5738701e4ba10cec90529f13fa03c833496", size = 39628, upload-time = "2025-10-06T05:36:45.423Z" }, - { url = "https://files.pythonhosted.org/packages/d8/cf/174c91dbc9cc49bc7b7aab74d8b734e974d1faa8f191c74af9b7e80848e6/frozenlist-1.8.0-cp313-cp313-win_amd64.whl", hash = "sha256:878be833caa6a3821caf85eb39c5ba92d28e85df26d57afb06b35b2efd937231", size = 43882, upload-time = "2025-10-06T05:36:46.796Z" }, - { url = "https://files.pythonhosted.org/packages/c1/17/502cd212cbfa96eb1388614fe39a3fc9ab87dbbe042b66f97acb57474834/frozenlist-1.8.0-cp313-cp313-win_arm64.whl", hash = "sha256:44389d135b3ff43ba8cc89ff7f51f5a0bb6b63d829c8300f79a2fe4fe61bcc62", size = 39676, upload-time = "2025-10-06T05:36:47.8Z" }, - { url = "https://files.pythonhosted.org/packages/d2/5c/3bbfaa920dfab09e76946a5d2833a7cbdf7b9b4a91c714666ac4855b88b4/frozenlist-1.8.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:e25ac20a2ef37e91c1b39938b591457666a0fa835c7783c3a8f33ea42870db94", size = 89235, upload-time = "2025-10-06T05:36:48.78Z" }, - { url = "https://files.pythonhosted.org/packages/d2/d6/f03961ef72166cec1687e84e8925838442b615bd0b8854b54923ce5b7b8a/frozenlist-1.8.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:07cdca25a91a4386d2e76ad992916a85038a9b97561bf7a3fd12d5d9ce31870c", size = 50742, upload-time = "2025-10-06T05:36:49.837Z" }, - { url = "https://files.pythonhosted.org/packages/1e/bb/a6d12b7ba4c3337667d0e421f7181c82dda448ce4e7ad7ecd249a16fa806/frozenlist-1.8.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4e0c11f2cc6717e0a741f84a527c52616140741cd812a50422f83dc31749fb52", size = 51725, upload-time = "2025-10-06T05:36:50.851Z" }, - { url = "https://files.pythonhosted.org/packages/bc/71/d1fed0ffe2c2ccd70b43714c6cab0f4188f09f8a67a7914a6b46ee30f274/frozenlist-1.8.0-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b3210649ee28062ea6099cfda39e147fa1bc039583c8ee4481cb7811e2448c51", size = 284533, upload-time = "2025-10-06T05:36:51.898Z" }, - { url = "https://files.pythonhosted.org/packages/c9/1f/fb1685a7b009d89f9bf78a42d94461bc06581f6e718c39344754a5d9bada/frozenlist-1.8.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:581ef5194c48035a7de2aefc72ac6539823bb71508189e5de01d60c9dcd5fa65", size = 292506, upload-time = "2025-10-06T05:36:53.101Z" }, - { url = "https://files.pythonhosted.org/packages/e6/3b/b991fe1612703f7e0d05c0cf734c1b77aaf7c7d321df4572e8d36e7048c8/frozenlist-1.8.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3ef2d026f16a2b1866e1d86fc4e1291e1ed8a387b2c333809419a2f8b3a77b82", size = 274161, upload-time = "2025-10-06T05:36:54.309Z" }, - { url = "https://files.pythonhosted.org/packages/ca/ec/c5c618767bcdf66e88945ec0157d7f6c4a1322f1473392319b7a2501ded7/frozenlist-1.8.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:5500ef82073f599ac84d888e3a8c1f77ac831183244bfd7f11eaa0289fb30714", size = 294676, upload-time = "2025-10-06T05:36:55.566Z" }, - { url = "https://files.pythonhosted.org/packages/7c/ce/3934758637d8f8a88d11f0585d6495ef54b2044ed6ec84492a91fa3b27aa/frozenlist-1.8.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:50066c3997d0091c411a66e710f4e11752251e6d2d73d70d8d5d4c76442a199d", size = 300638, upload-time = "2025-10-06T05:36:56.758Z" }, - { url = "https://files.pythonhosted.org/packages/fc/4f/a7e4d0d467298f42de4b41cbc7ddaf19d3cfeabaf9ff97c20c6c7ee409f9/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:5c1c8e78426e59b3f8005e9b19f6ff46e5845895adbde20ece9218319eca6506", size = 283067, upload-time = "2025-10-06T05:36:57.965Z" }, - { url = "https://files.pythonhosted.org/packages/dc/48/c7b163063d55a83772b268e6d1affb960771b0e203b632cfe09522d67ea5/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:eefdba20de0d938cec6a89bd4d70f346a03108a19b9df4248d3cf0d88f1b0f51", size = 292101, upload-time = "2025-10-06T05:36:59.237Z" }, - { url = "https://files.pythonhosted.org/packages/9f/d0/2366d3c4ecdc2fd391e0afa6e11500bfba0ea772764d631bbf82f0136c9d/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:cf253e0e1c3ceb4aaff6df637ce033ff6535fb8c70a764a8f46aafd3d6ab798e", size = 289901, upload-time = "2025-10-06T05:37:00.811Z" }, - { url = "https://files.pythonhosted.org/packages/b8/94/daff920e82c1b70e3618a2ac39fbc01ae3e2ff6124e80739ce5d71c9b920/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:032efa2674356903cd0261c4317a561a6850f3ac864a63fc1583147fb05a79b0", size = 289395, upload-time = "2025-10-06T05:37:02.115Z" }, - { url = "https://files.pythonhosted.org/packages/e3/20/bba307ab4235a09fdcd3cc5508dbabd17c4634a1af4b96e0f69bfe551ebd/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6da155091429aeba16851ecb10a9104a108bcd32f6c1642867eadaee401c1c41", size = 283659, upload-time = "2025-10-06T05:37:03.711Z" }, - { url = "https://files.pythonhosted.org/packages/fd/00/04ca1c3a7a124b6de4f8a9a17cc2fcad138b4608e7a3fc5877804b8715d7/frozenlist-1.8.0-cp313-cp313t-win32.whl", hash = "sha256:0f96534f8bfebc1a394209427d0f8a63d343c9779cda6fc25e8e121b5fd8555b", size = 43492, upload-time = "2025-10-06T05:37:04.915Z" }, - { url = "https://files.pythonhosted.org/packages/59/5e/c69f733a86a94ab10f68e496dc6b7e8bc078ebb415281d5698313e3af3a1/frozenlist-1.8.0-cp313-cp313t-win_amd64.whl", hash = "sha256:5d63a068f978fc69421fb0e6eb91a9603187527c86b7cd3f534a5b77a592b888", size = 48034, upload-time = "2025-10-06T05:37:06.343Z" }, - { url = "https://files.pythonhosted.org/packages/16/6c/be9d79775d8abe79b05fa6d23da99ad6e7763a1d080fbae7290b286093fd/frozenlist-1.8.0-cp313-cp313t-win_arm64.whl", hash = "sha256:bf0a7e10b077bf5fb9380ad3ae8ce20ef919a6ad93b4552896419ac7e1d8e042", size = 41749, upload-time = "2025-10-06T05:37:07.431Z" }, - { url = "https://files.pythonhosted.org/packages/f1/c8/85da824b7e7b9b6e7f7705b2ecaf9591ba6f79c1177f324c2735e41d36a2/frozenlist-1.8.0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:cee686f1f4cadeb2136007ddedd0aaf928ab95216e7691c63e50a8ec066336d0", size = 86127, upload-time = "2025-10-06T05:37:08.438Z" }, - { url = "https://files.pythonhosted.org/packages/8e/e8/a1185e236ec66c20afd72399522f142c3724c785789255202d27ae992818/frozenlist-1.8.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:119fb2a1bd47307e899c2fac7f28e85b9a543864df47aa7ec9d3c1b4545f096f", size = 49698, upload-time = "2025-10-06T05:37:09.48Z" }, - { url = "https://files.pythonhosted.org/packages/a1/93/72b1736d68f03fda5fdf0f2180fb6caaae3894f1b854d006ac61ecc727ee/frozenlist-1.8.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4970ece02dbc8c3a92fcc5228e36a3e933a01a999f7094ff7c23fbd2beeaa67c", size = 49749, upload-time = "2025-10-06T05:37:10.569Z" }, - { url = "https://files.pythonhosted.org/packages/a7/b2/fabede9fafd976b991e9f1b9c8c873ed86f202889b864756f240ce6dd855/frozenlist-1.8.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:cba69cb73723c3f329622e34bdbf5ce1f80c21c290ff04256cff1cd3c2036ed2", size = 231298, upload-time = "2025-10-06T05:37:11.993Z" }, - { url = "https://files.pythonhosted.org/packages/3a/3b/d9b1e0b0eed36e70477ffb8360c49c85c8ca8ef9700a4e6711f39a6e8b45/frozenlist-1.8.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:778a11b15673f6f1df23d9586f83c4846c471a8af693a22e066508b77d201ec8", size = 232015, upload-time = "2025-10-06T05:37:13.194Z" }, - { url = "https://files.pythonhosted.org/packages/dc/94/be719d2766c1138148564a3960fc2c06eb688da592bdc25adcf856101be7/frozenlist-1.8.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0325024fe97f94c41c08872db482cf8ac4800d80e79222c6b0b7b162d5b13686", size = 225038, upload-time = "2025-10-06T05:37:14.577Z" }, - { url = "https://files.pythonhosted.org/packages/e4/09/6712b6c5465f083f52f50cf74167b92d4ea2f50e46a9eea0523d658454ae/frozenlist-1.8.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:97260ff46b207a82a7567b581ab4190bd4dfa09f4db8a8b49d1a958f6aa4940e", size = 240130, upload-time = "2025-10-06T05:37:15.781Z" }, - { url = "https://files.pythonhosted.org/packages/f8/d4/cd065cdcf21550b54f3ce6a22e143ac9e4836ca42a0de1022da8498eac89/frozenlist-1.8.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:54b2077180eb7f83dd52c40b2750d0a9f175e06a42e3213ce047219de902717a", size = 242845, upload-time = "2025-10-06T05:37:17.037Z" }, - { url = "https://files.pythonhosted.org/packages/62/c3/f57a5c8c70cd1ead3d5d5f776f89d33110b1addae0ab010ad774d9a44fb9/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:2f05983daecab868a31e1da44462873306d3cbfd76d1f0b5b69c473d21dbb128", size = 229131, upload-time = "2025-10-06T05:37:18.221Z" }, - { url = "https://files.pythonhosted.org/packages/6c/52/232476fe9cb64f0742f3fde2b7d26c1dac18b6d62071c74d4ded55e0ef94/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:33f48f51a446114bc5d251fb2954ab0164d5be02ad3382abcbfe07e2531d650f", size = 240542, upload-time = "2025-10-06T05:37:19.771Z" }, - { url = "https://files.pythonhosted.org/packages/5f/85/07bf3f5d0fb5414aee5f47d33c6f5c77bfe49aac680bfece33d4fdf6a246/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:154e55ec0655291b5dd1b8731c637ecdb50975a2ae70c606d100750a540082f7", size = 237308, upload-time = "2025-10-06T05:37:20.969Z" }, - { url = "https://files.pythonhosted.org/packages/11/99/ae3a33d5befd41ac0ca2cc7fd3aa707c9c324de2e89db0e0f45db9a64c26/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:4314debad13beb564b708b4a496020e5306c7333fa9a3ab90374169a20ffab30", size = 238210, upload-time = "2025-10-06T05:37:22.252Z" }, - { url = "https://files.pythonhosted.org/packages/b2/60/b1d2da22f4970e7a155f0adde9b1435712ece01b3cd45ba63702aea33938/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:073f8bf8becba60aa931eb3bc420b217bb7d5b8f4750e6f8b3be7f3da85d38b7", size = 231972, upload-time = "2025-10-06T05:37:23.5Z" }, - { url = "https://files.pythonhosted.org/packages/3f/ab/945b2f32de889993b9c9133216c068b7fcf257d8595a0ac420ac8677cab0/frozenlist-1.8.0-cp314-cp314-win32.whl", hash = "sha256:bac9c42ba2ac65ddc115d930c78d24ab8d4f465fd3fc473cdedfccadb9429806", size = 40536, upload-time = "2025-10-06T05:37:25.581Z" }, - { url = "https://files.pythonhosted.org/packages/59/ad/9caa9b9c836d9ad6f067157a531ac48b7d36499f5036d4141ce78c230b1b/frozenlist-1.8.0-cp314-cp314-win_amd64.whl", hash = "sha256:3e0761f4d1a44f1d1a47996511752cf3dcec5bbdd9cc2b4fe595caf97754b7a0", size = 44330, upload-time = "2025-10-06T05:37:26.928Z" }, - { url = "https://files.pythonhosted.org/packages/82/13/e6950121764f2676f43534c555249f57030150260aee9dcf7d64efda11dd/frozenlist-1.8.0-cp314-cp314-win_arm64.whl", hash = "sha256:d1eaff1d00c7751b7c6662e9c5ba6eb2c17a2306ba5e2a37f24ddf3cc953402b", size = 40627, upload-time = "2025-10-06T05:37:28.075Z" }, - { url = "https://files.pythonhosted.org/packages/c0/c7/43200656ecc4e02d3f8bc248df68256cd9572b3f0017f0a0c4e93440ae23/frozenlist-1.8.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:d3bb933317c52d7ea5004a1c442eef86f426886fba134ef8cf4226ea6ee1821d", size = 89238, upload-time = "2025-10-06T05:37:29.373Z" }, - { url = "https://files.pythonhosted.org/packages/d1/29/55c5f0689b9c0fb765055629f472c0de484dcaf0acee2f7707266ae3583c/frozenlist-1.8.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:8009897cdef112072f93a0efdce29cd819e717fd2f649ee3016efd3cd885a7ed", size = 50738, upload-time = "2025-10-06T05:37:30.792Z" }, - { url = "https://files.pythonhosted.org/packages/ba/7d/b7282a445956506fa11da8c2db7d276adcbf2b17d8bb8407a47685263f90/frozenlist-1.8.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:2c5dcbbc55383e5883246d11fd179782a9d07a986c40f49abe89ddf865913930", size = 51739, upload-time = "2025-10-06T05:37:32.127Z" }, - { url = "https://files.pythonhosted.org/packages/62/1c/3d8622e60d0b767a5510d1d3cf21065b9db874696a51ea6d7a43180a259c/frozenlist-1.8.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:39ecbc32f1390387d2aa4f5a995e465e9e2f79ba3adcac92d68e3e0afae6657c", size = 284186, upload-time = "2025-10-06T05:37:33.21Z" }, - { url = "https://files.pythonhosted.org/packages/2d/14/aa36d5f85a89679a85a1d44cd7a6657e0b1c75f61e7cad987b203d2daca8/frozenlist-1.8.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:92db2bf818d5cc8d9c1f1fc56b897662e24ea5adb36ad1f1d82875bd64e03c24", size = 292196, upload-time = "2025-10-06T05:37:36.107Z" }, - { url = "https://files.pythonhosted.org/packages/05/23/6bde59eb55abd407d34f77d39a5126fb7b4f109a3f611d3929f14b700c66/frozenlist-1.8.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2dc43a022e555de94c3b68a4ef0b11c4f747d12c024a520c7101709a2144fb37", size = 273830, upload-time = "2025-10-06T05:37:37.663Z" }, - { url = "https://files.pythonhosted.org/packages/d2/3f/22cff331bfad7a8afa616289000ba793347fcd7bc275f3b28ecea2a27909/frozenlist-1.8.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:cb89a7f2de3602cfed448095bab3f178399646ab7c61454315089787df07733a", size = 294289, upload-time = "2025-10-06T05:37:39.261Z" }, - { url = "https://files.pythonhosted.org/packages/a4/89/5b057c799de4838b6c69aa82b79705f2027615e01be996d2486a69ca99c4/frozenlist-1.8.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:33139dc858c580ea50e7e60a1b0ea003efa1fd42e6ec7fdbad78fff65fad2fd2", size = 300318, upload-time = "2025-10-06T05:37:43.213Z" }, - { url = "https://files.pythonhosted.org/packages/30/de/2c22ab3eb2a8af6d69dc799e48455813bab3690c760de58e1bf43b36da3e/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:168c0969a329b416119507ba30b9ea13688fafffac1b7822802537569a1cb0ef", size = 282814, upload-time = "2025-10-06T05:37:45.337Z" }, - { url = "https://files.pythonhosted.org/packages/59/f7/970141a6a8dbd7f556d94977858cfb36fa9b66e0892c6dd780d2219d8cd8/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:28bd570e8e189d7f7b001966435f9dac6718324b5be2990ac496cf1ea9ddb7fe", size = 291762, upload-time = "2025-10-06T05:37:46.657Z" }, - { url = "https://files.pythonhosted.org/packages/c1/15/ca1adae83a719f82df9116d66f5bb28bb95557b3951903d39135620ef157/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:b2a095d45c5d46e5e79ba1e5b9cb787f541a8dee0433836cea4b96a2c439dcd8", size = 289470, upload-time = "2025-10-06T05:37:47.946Z" }, - { url = "https://files.pythonhosted.org/packages/ac/83/dca6dc53bf657d371fbc88ddeb21b79891e747189c5de990b9dfff2ccba1/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:eab8145831a0d56ec9c4139b6c3e594c7a83c2c8be25d5bcf2d86136a532287a", size = 289042, upload-time = "2025-10-06T05:37:49.499Z" }, - { url = "https://files.pythonhosted.org/packages/96/52/abddd34ca99be142f354398700536c5bd315880ed0a213812bc491cff5e4/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:974b28cf63cc99dfb2188d8d222bc6843656188164848c4f679e63dae4b0708e", size = 283148, upload-time = "2025-10-06T05:37:50.745Z" }, - { url = "https://files.pythonhosted.org/packages/af/d3/76bd4ed4317e7119c2b7f57c3f6934aba26d277acc6309f873341640e21f/frozenlist-1.8.0-cp314-cp314t-win32.whl", hash = "sha256:342c97bf697ac5480c0a7ec73cd700ecfa5a8a40ac923bd035484616efecc2df", size = 44676, upload-time = "2025-10-06T05:37:52.222Z" }, - { url = "https://files.pythonhosted.org/packages/89/76/c615883b7b521ead2944bb3480398cbb07e12b7b4e4d073d3752eb721558/frozenlist-1.8.0-cp314-cp314t-win_amd64.whl", hash = "sha256:06be8f67f39c8b1dc671f5d83aaefd3358ae5cdcf8314552c57e7ed3e6475bdd", size = 49451, upload-time = "2025-10-06T05:37:53.425Z" }, - { url = "https://files.pythonhosted.org/packages/e0/a3/5982da14e113d07b325230f95060e2169f5311b1017ea8af2a29b374c289/frozenlist-1.8.0-cp314-cp314t-win_arm64.whl", hash = "sha256:102e6314ca4da683dca92e3b1355490fed5f313b768500084fbe6371fddfdb79", size = 42507, upload-time = "2025-10-06T05:37:54.513Z" }, - { url = "https://files.pythonhosted.org/packages/9a/9a/e35b4a917281c0b8419d4207f4334c8e8c5dbf4f3f5f9ada73958d937dcc/frozenlist-1.8.0-py3-none-any.whl", hash = "sha256:0c18a16eab41e82c295618a77502e17b195883241c563b00f0aa5106fc4eaa0d", size = 13409, upload-time = "2025-10-06T05:38:16.721Z" }, -] - -[[package]] -name = "graphql-query" -version = "1.4.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "jinja2" }, - { name = "pydantic" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/8b/64/377beef6c10b798f2ece54cfd3577db20102176c3a155469b92b4a3e3881/graphql_query-1.4.0.tar.gz", hash = "sha256:1cfe5eeaad8b0ed67ac3d9c4023ee9743851f98c6b2f673c67088cf42ebb57bb", size = 26178, upload-time = "2024-07-31T10:50:05.249Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/47/ed/ec732d18dd016eb4d4fa590e392d22dd35c93f26f17f709596deeb780497/graphql_query-1.4.0-py3-none-any.whl", hash = "sha256:376ed550a7812425befbefb870daa21ce1696590fcb78c015215a43a5d7e51b7", size = 13389, upload-time = "2024-07-31T10:50:04.055Z" }, -] - -[[package]] -name = "h11" -version = "0.16.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, -] - -[[package]] -name = "hexbytes" -version = "1.3.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/7f/87/adf4635b4b8c050283d74e6db9a81496063229c9263e6acc1903ab79fbec/hexbytes-1.3.1.tar.gz", hash = "sha256:a657eebebdfe27254336f98d8af6e2236f3f83aed164b87466b6cf6c5f5a4765", size = 8633, upload-time = "2025-05-14T16:45:17.5Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/8d/e0/3b31492b1c89da3c5a846680517871455b30c54738486fc57ac79a5761bd/hexbytes-1.3.1-py3-none-any.whl", hash = "sha256:da01ff24a1a9a2b1881c4b85f0e9f9b0f51b526b379ffa23832ae7899d29c2c7", size = 5074, upload-time = "2025-05-14T16:45:16.179Z" }, -] - -[[package]] -name = "httpcore" -version = "1.0.9" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "certifi" }, - { name = "h11" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" }, -] - -[[package]] -name = "httpx" -version = "0.28.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "anyio" }, - { name = "certifi" }, - { name = "httpcore" }, - { name = "idna" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" }, -] - -[[package]] -name = "humanize" -version = "4.15.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ba/66/a3921783d54be8a6870ac4ccffcd15c4dc0dd7fcce51c6d63b8c63935276/humanize-4.15.0.tar.gz", hash = "sha256:1dd098483eb1c7ee8e32eb2e99ad1910baefa4b75c3aff3a82f4d78688993b10", size = 83599, upload-time = "2025-12-20T20:16:13.19Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c5/7b/bca5613a0c3b542420cf92bd5e5fb8ebd5435ce1011a091f66bb7693285e/humanize-4.15.0-py3-none-any.whl", hash = "sha256:b1186eb9f5a9749cd9cb8565aee77919dd7c8d076161cf44d70e59e3301e1769", size = 132203, upload-time = "2025-12-20T20:16:11.67Z" }, -] - -[[package]] -name = "idna" -version = "3.11" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" }, -] - -[[package]] -name = "inflect" -version = "7.5.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "more-itertools" }, - { name = "typeguard" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/78/c6/943357d44a21fd995723d07ccaddd78023eace03c1846049a2645d4324a3/inflect-7.5.0.tar.gz", hash = "sha256:faf19801c3742ed5a05a8ce388e0d8fe1a07f8d095c82201eb904f5d27ad571f", size = 73751, upload-time = "2024-12-28T17:11:18.897Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/8a/eb/427ed2b20a38a4ee29f24dbe4ae2dafab198674fe9a85e3d6adf9e5f5f41/inflect-7.5.0-py3-none-any.whl", hash = "sha256:2aea70e5e70c35d8350b8097396ec155ffd68def678c7ff97f51aa69c1d92344", size = 35197, upload-time = "2024-12-28T17:11:15.931Z" }, -] - -[[package]] -name = "iniconfig" -version = "2.3.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" }, -] - -[[package]] -name = "jinja2" -version = "3.1.6" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "markupsafe" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115, upload-time = "2025-03-05T20:05:02.478Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" }, -] - -[[package]] -name = "jiter" -version = "0.13.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0d/5e/4ec91646aee381d01cdb9974e30882c9cd3b8c5d1079d6b5ff4af522439a/jiter-0.13.0.tar.gz", hash = "sha256:f2839f9c2c7e2dffc1bc5929a510e14ce0a946be9365fd1219e7ef342dae14f4", size = 164847, upload-time = "2026-02-02T12:37:56.441Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/2e/30/7687e4f87086829955013ca12a9233523349767f69653ebc27036313def9/jiter-0.13.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:0a2bd69fc1d902e89925fc34d1da51b2128019423d7b339a45d9e99c894e0663", size = 307958, upload-time = "2026-02-02T12:35:57.165Z" }, - { url = "https://files.pythonhosted.org/packages/c3/27/e57f9a783246ed95481e6749cc5002a8a767a73177a83c63ea71f0528b90/jiter-0.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f917a04240ef31898182f76a332f508f2cc4b57d2b4d7ad2dbfebbfe167eb505", size = 318597, upload-time = "2026-02-02T12:35:58.591Z" }, - { url = "https://files.pythonhosted.org/packages/cf/52/e5719a60ac5d4d7c5995461a94ad5ef962a37c8bf5b088390e6fad59b2ff/jiter-0.13.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1e2b199f446d3e82246b4fd9236d7cb502dc2222b18698ba0d986d2fecc6152", size = 348821, upload-time = "2026-02-02T12:36:00.093Z" }, - { url = "https://files.pythonhosted.org/packages/61/db/c1efc32b8ba4c740ab3fc2d037d8753f67685f475e26b9d6536a4322bcdd/jiter-0.13.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:04670992b576fa65bd056dbac0c39fe8bd67681c380cb2b48efa885711d9d726", size = 364163, upload-time = "2026-02-02T12:36:01.937Z" }, - { url = "https://files.pythonhosted.org/packages/55/8a/fb75556236047c8806995671a18e4a0ad646ed255276f51a20f32dceaeec/jiter-0.13.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5a1aff1fbdb803a376d4d22a8f63f8e7ccbce0b4890c26cc7af9e501ab339ef0", size = 483709, upload-time = "2026-02-02T12:36:03.41Z" }, - { url = "https://files.pythonhosted.org/packages/7e/16/43512e6ee863875693a8e6f6d532e19d650779d6ba9a81593ae40a9088ff/jiter-0.13.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b3fb8c2053acaef8580809ac1d1f7481a0a0bdc012fd7f5d8b18fb696a5a089", size = 370480, upload-time = "2026-02-02T12:36:04.791Z" }, - { url = "https://files.pythonhosted.org/packages/f8/4c/09b93e30e984a187bc8aaa3510e1ec8dcbdcd71ca05d2f56aac0492453aa/jiter-0.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdaba7d87e66f26a2c45d8cbadcbfc4bf7884182317907baf39cfe9775bb4d93", size = 360735, upload-time = "2026-02-02T12:36:06.994Z" }, - { url = "https://files.pythonhosted.org/packages/1a/1b/46c5e349019874ec5dfa508c14c37e29864ea108d376ae26d90bee238cd7/jiter-0.13.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7b88d649135aca526da172e48083da915ec086b54e8e73a425ba50999468cc08", size = 391814, upload-time = "2026-02-02T12:36:08.368Z" }, - { url = "https://files.pythonhosted.org/packages/15/9e/26184760e85baee7162ad37b7912797d2077718476bf91517641c92b3639/jiter-0.13.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e404ea551d35438013c64b4f357b0474c7abf9f781c06d44fcaf7a14c69ff9e2", size = 513990, upload-time = "2026-02-02T12:36:09.993Z" }, - { url = "https://files.pythonhosted.org/packages/e9/34/2c9355247d6debad57a0a15e76ab1566ab799388042743656e566b3b7de1/jiter-0.13.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1f4748aad1b4a93c8bdd70f604d0f748cdc0e8744c5547798acfa52f10e79228", size = 548021, upload-time = "2026-02-02T12:36:11.376Z" }, - { url = "https://files.pythonhosted.org/packages/ac/4a/9f2c23255d04a834398b9c2e0e665382116911dc4d06b795710503cdad25/jiter-0.13.0-cp312-cp312-win32.whl", hash = "sha256:0bf670e3b1445fc4d31612199f1744f67f889ee1bbae703c4b54dc097e5dd394", size = 203024, upload-time = "2026-02-02T12:36:12.682Z" }, - { url = "https://files.pythonhosted.org/packages/09/ee/f0ae675a957ae5a8f160be3e87acea6b11dc7b89f6b7ab057e77b2d2b13a/jiter-0.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:15db60e121e11fe186c0b15236bd5d18381b9ddacdcf4e659feb96fc6c969c92", size = 205424, upload-time = "2026-02-02T12:36:13.93Z" }, - { url = "https://files.pythonhosted.org/packages/1b/02/ae611edf913d3cbf02c97cdb90374af2082c48d7190d74c1111dde08bcdd/jiter-0.13.0-cp312-cp312-win_arm64.whl", hash = "sha256:41f92313d17989102f3cb5dd533a02787cdb99454d494344b0361355da52fcb9", size = 186818, upload-time = "2026-02-02T12:36:15.308Z" }, - { url = "https://files.pythonhosted.org/packages/91/9c/7ee5a6ff4b9991e1a45263bfc46731634c4a2bde27dfda6c8251df2d958c/jiter-0.13.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1f8a55b848cbabf97d861495cd65f1e5c590246fabca8b48e1747c4dfc8f85bf", size = 306897, upload-time = "2026-02-02T12:36:16.748Z" }, - { url = "https://files.pythonhosted.org/packages/7c/02/be5b870d1d2be5dd6a91bdfb90f248fbb7dcbd21338f092c6b89817c3dbf/jiter-0.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f556aa591c00f2c45eb1b89f68f52441a016034d18b65da60e2d2875bbbf344a", size = 317507, upload-time = "2026-02-02T12:36:18.351Z" }, - { url = "https://files.pythonhosted.org/packages/da/92/b25d2ec333615f5f284f3a4024f7ce68cfa0604c322c6808b2344c7f5d2b/jiter-0.13.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7e1d61da332ec412350463891923f960c3073cf1aae93b538f0bb4c8cd46efb", size = 350560, upload-time = "2026-02-02T12:36:19.746Z" }, - { url = "https://files.pythonhosted.org/packages/be/ec/74dcb99fef0aca9fbe56b303bf79f6bd839010cb18ad41000bf6cc71eec0/jiter-0.13.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3097d665a27bc96fd9bbf7f86178037db139f319f785e4757ce7ccbf390db6c2", size = 363232, upload-time = "2026-02-02T12:36:21.243Z" }, - { url = "https://files.pythonhosted.org/packages/1b/37/f17375e0bb2f6a812d4dd92d7616e41917f740f3e71343627da9db2824ce/jiter-0.13.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9d01ecc3a8cbdb6f25a37bd500510550b64ddf9f7d64a107d92f3ccb25035d0f", size = 483727, upload-time = "2026-02-02T12:36:22.688Z" }, - { url = "https://files.pythonhosted.org/packages/77/d2/a71160a5ae1a1e66c1395b37ef77da67513b0adba73b993a27fbe47eb048/jiter-0.13.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ed9bbc30f5d60a3bdf63ae76beb3f9db280d7f195dfcfa61af792d6ce912d159", size = 370799, upload-time = "2026-02-02T12:36:24.106Z" }, - { url = "https://files.pythonhosted.org/packages/01/99/ed5e478ff0eb4e8aa5fd998f9d69603c9fd3f32de3bd16c2b1194f68361c/jiter-0.13.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98fbafb6e88256f4454de33c1f40203d09fc33ed19162a68b3b257b29ca7f663", size = 359120, upload-time = "2026-02-02T12:36:25.519Z" }, - { url = "https://files.pythonhosted.org/packages/16/be/7ffd08203277a813f732ba897352797fa9493faf8dc7995b31f3d9cb9488/jiter-0.13.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5467696f6b827f1116556cb0db620440380434591e93ecee7fd14d1a491b6daa", size = 390664, upload-time = "2026-02-02T12:36:26.866Z" }, - { url = "https://files.pythonhosted.org/packages/d1/84/e0787856196d6d346264d6dcccb01f741e5f0bd014c1d9a2ebe149caf4f3/jiter-0.13.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:2d08c9475d48b92892583df9da592a0e2ac49bcd41fae1fec4f39ba6cf107820", size = 513543, upload-time = "2026-02-02T12:36:28.217Z" }, - { url = "https://files.pythonhosted.org/packages/65/50/ecbd258181c4313cf79bca6c88fb63207d04d5bf5e4f65174114d072aa55/jiter-0.13.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:aed40e099404721d7fcaf5b89bd3b4568a4666358bcac7b6b15c09fb6252ab68", size = 547262, upload-time = "2026-02-02T12:36:29.678Z" }, - { url = "https://files.pythonhosted.org/packages/27/da/68f38d12e7111d2016cd198161b36e1f042bd115c169255bcb7ec823a3bf/jiter-0.13.0-cp313-cp313-win32.whl", hash = "sha256:36ebfbcffafb146d0e6ffb3e74d51e03d9c35ce7c625c8066cdbfc7b953bdc72", size = 200630, upload-time = "2026-02-02T12:36:31.808Z" }, - { url = "https://files.pythonhosted.org/packages/25/65/3bd1a972c9a08ecd22eb3b08a95d1941ebe6938aea620c246cf426ae09c2/jiter-0.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:8d76029f077379374cf0dbc78dbe45b38dec4a2eb78b08b5194ce836b2517afc", size = 202602, upload-time = "2026-02-02T12:36:33.679Z" }, - { url = "https://files.pythonhosted.org/packages/15/fe/13bd3678a311aa67686bb303654792c48206a112068f8b0b21426eb6851e/jiter-0.13.0-cp313-cp313-win_arm64.whl", hash = "sha256:bb7613e1a427cfcb6ea4544f9ac566b93d5bf67e0d48c787eca673ff9c9dff2b", size = 185939, upload-time = "2026-02-02T12:36:35.065Z" }, - { url = "https://files.pythonhosted.org/packages/49/19/a929ec002ad3228bc97ca01dbb14f7632fffdc84a95ec92ceaf4145688ae/jiter-0.13.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:fa476ab5dd49f3bf3a168e05f89358c75a17608dbabb080ef65f96b27c19ab10", size = 316616, upload-time = "2026-02-02T12:36:36.579Z" }, - { url = "https://files.pythonhosted.org/packages/52/56/d19a9a194afa37c1728831e5fb81b7722c3de18a3109e8f282bfc23e587a/jiter-0.13.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ade8cb6ff5632a62b7dbd4757d8c5573f7a2e9ae285d6b5b841707d8363205ef", size = 346850, upload-time = "2026-02-02T12:36:38.058Z" }, - { url = "https://files.pythonhosted.org/packages/36/4a/94e831c6bf287754a8a019cb966ed39ff8be6ab78cadecf08df3bb02d505/jiter-0.13.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9950290340acc1adaded363edd94baebcee7dabdfa8bee4790794cd5cfad2af6", size = 358551, upload-time = "2026-02-02T12:36:39.417Z" }, - { url = "https://files.pythonhosted.org/packages/a2/ec/a4c72c822695fa80e55d2b4142b73f0012035d9fcf90eccc56bc060db37c/jiter-0.13.0-cp313-cp313t-win_amd64.whl", hash = "sha256:2b4972c6df33731aac0742b64fd0d18e0a69bc7d6e03108ce7d40c85fd9e3e6d", size = 201950, upload-time = "2026-02-02T12:36:40.791Z" }, - { url = "https://files.pythonhosted.org/packages/b6/00/393553ec27b824fbc29047e9c7cd4a3951d7fbe4a76743f17e44034fa4e4/jiter-0.13.0-cp313-cp313t-win_arm64.whl", hash = "sha256:701a1e77d1e593c1b435315ff625fd071f0998c5f02792038a5ca98899261b7d", size = 185852, upload-time = "2026-02-02T12:36:42.077Z" }, - { url = "https://files.pythonhosted.org/packages/6e/f5/f1997e987211f6f9bd71b8083047b316208b4aca0b529bb5f8c96c89ef3e/jiter-0.13.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:cc5223ab19fe25e2f0bf2643204ad7318896fe3729bf12fde41b77bfc4fafff0", size = 308804, upload-time = "2026-02-02T12:36:43.496Z" }, - { url = "https://files.pythonhosted.org/packages/cd/8f/5482a7677731fd44881f0204981ce2d7175db271f82cba2085dd2212e095/jiter-0.13.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:9776ebe51713acf438fd9b4405fcd86893ae5d03487546dae7f34993217f8a91", size = 318787, upload-time = "2026-02-02T12:36:45.071Z" }, - { url = "https://files.pythonhosted.org/packages/f3/b9/7257ac59778f1cd025b26a23c5520a36a424f7f1b068f2442a5b499b7464/jiter-0.13.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:879e768938e7b49b5e90b7e3fecc0dbec01b8cb89595861fb39a8967c5220d09", size = 353880, upload-time = "2026-02-02T12:36:47.365Z" }, - { url = "https://files.pythonhosted.org/packages/c3/87/719eec4a3f0841dad99e3d3604ee4cba36af4419a76f3cb0b8e2e691ad67/jiter-0.13.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:682161a67adea11e3aae9038c06c8b4a9a71023228767477d683f69903ebc607", size = 366702, upload-time = "2026-02-02T12:36:48.871Z" }, - { url = "https://files.pythonhosted.org/packages/d2/65/415f0a75cf6921e43365a1bc227c565cb949caca8b7532776e430cbaa530/jiter-0.13.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a13b68cd1cd8cc9de8f244ebae18ccb3e4067ad205220ef324c39181e23bbf66", size = 486319, upload-time = "2026-02-02T12:36:53.006Z" }, - { url = "https://files.pythonhosted.org/packages/54/a2/9e12b48e82c6bbc6081fd81abf915e1443add1b13d8fc586e1d90bb02bb8/jiter-0.13.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87ce0f14c6c08892b610686ae8be350bf368467b6acd5085a5b65441e2bf36d2", size = 372289, upload-time = "2026-02-02T12:36:54.593Z" }, - { url = "https://files.pythonhosted.org/packages/4e/c1/e4693f107a1789a239c759a432e9afc592366f04e901470c2af89cfd28e1/jiter-0.13.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c365005b05505a90d1c47856420980d0237adf82f70c4aff7aebd3c1cc143ad", size = 360165, upload-time = "2026-02-02T12:36:56.112Z" }, - { url = "https://files.pythonhosted.org/packages/17/08/91b9ea976c1c758240614bd88442681a87672eebc3d9a6dde476874e706b/jiter-0.13.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1317fdffd16f5873e46ce27d0e0f7f4f90f0cdf1d86bf6abeaea9f63ca2c401d", size = 389634, upload-time = "2026-02-02T12:36:57.495Z" }, - { url = "https://files.pythonhosted.org/packages/18/23/58325ef99390d6d40427ed6005bf1ad54f2577866594bcf13ce55675f87d/jiter-0.13.0-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:c05b450d37ba0c9e21c77fef1f205f56bcee2330bddca68d344baebfc55ae0df", size = 514933, upload-time = "2026-02-02T12:36:58.909Z" }, - { url = "https://files.pythonhosted.org/packages/5b/25/69f1120c7c395fd276c3996bb8adefa9c6b84c12bb7111e5c6ccdcd8526d/jiter-0.13.0-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:775e10de3849d0631a97c603f996f518159272db00fdda0a780f81752255ee9d", size = 548842, upload-time = "2026-02-02T12:37:00.433Z" }, - { url = "https://files.pythonhosted.org/packages/18/05/981c9669d86850c5fbb0d9e62bba144787f9fba84546ba43d624ee27ef29/jiter-0.13.0-cp314-cp314-win32.whl", hash = "sha256:632bf7c1d28421c00dd8bbb8a3bac5663e1f57d5cd5ed962bce3c73bf62608e6", size = 202108, upload-time = "2026-02-02T12:37:01.718Z" }, - { url = "https://files.pythonhosted.org/packages/8d/96/cdcf54dd0b0341db7d25413229888a346c7130bd20820530905fdb65727b/jiter-0.13.0-cp314-cp314-win_amd64.whl", hash = "sha256:f22ef501c3f87ede88f23f9b11e608581c14f04db59b6a801f354397ae13739f", size = 204027, upload-time = "2026-02-02T12:37:03.075Z" }, - { url = "https://files.pythonhosted.org/packages/fb/f9/724bcaaab7a3cd727031fe4f6995cb86c4bd344909177c186699c8dec51a/jiter-0.13.0-cp314-cp314-win_arm64.whl", hash = "sha256:07b75fe09a4ee8e0c606200622e571e44943f47254f95e2436c8bdcaceb36d7d", size = 187199, upload-time = "2026-02-02T12:37:04.414Z" }, - { url = "https://files.pythonhosted.org/packages/62/92/1661d8b9fd6a3d7a2d89831db26fe3c1509a287d83ad7838831c7b7a5c7e/jiter-0.13.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:964538479359059a35fb400e769295d4b315ae61e4105396d355a12f7fef09f0", size = 318423, upload-time = "2026-02-02T12:37:05.806Z" }, - { url = "https://files.pythonhosted.org/packages/4f/3b/f77d342a54d4ebcd128e520fc58ec2f5b30a423b0fd26acdfc0c6fef8e26/jiter-0.13.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e104da1db1c0991b3eaed391ccd650ae8d947eab1480c733e5a3fb28d4313e40", size = 351438, upload-time = "2026-02-02T12:37:07.189Z" }, - { url = "https://files.pythonhosted.org/packages/76/b3/ba9a69f0e4209bd3331470c723c2f5509e6f0482e416b612431a5061ed71/jiter-0.13.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0e3a5f0cde8ff433b8e88e41aa40131455420fb3649a3c7abdda6145f8cb7202", size = 364774, upload-time = "2026-02-02T12:37:08.579Z" }, - { url = "https://files.pythonhosted.org/packages/b3/16/6cdb31fa342932602458dbb631bfbd47f601e03d2e4950740e0b2100b570/jiter-0.13.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:57aab48f40be1db920a582b30b116fe2435d184f77f0e4226f546794cedd9cf0", size = 487238, upload-time = "2026-02-02T12:37:10.066Z" }, - { url = "https://files.pythonhosted.org/packages/ed/b1/956cc7abaca8d95c13aa8d6c9b3f3797241c246cd6e792934cc4c8b250d2/jiter-0.13.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7772115877c53f62beeb8fd853cab692dbc04374ef623b30f997959a4c0e7e95", size = 372892, upload-time = "2026-02-02T12:37:11.656Z" }, - { url = "https://files.pythonhosted.org/packages/26/c4/97ecde8b1e74f67b8598c57c6fccf6df86ea7861ed29da84629cdbba76c4/jiter-0.13.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1211427574b17b633cfceba5040de8081e5abf114f7a7602f73d2e16f9fdaa59", size = 360309, upload-time = "2026-02-02T12:37:13.244Z" }, - { url = "https://files.pythonhosted.org/packages/4b/d7/eabe3cf46715854ccc80be2cd78dd4c36aedeb30751dbf85a1d08c14373c/jiter-0.13.0-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7beae3a3d3b5212d3a55d2961db3c292e02e302feb43fce6a3f7a31b90ea6dfe", size = 389607, upload-time = "2026-02-02T12:37:14.881Z" }, - { url = "https://files.pythonhosted.org/packages/df/2d/03963fc0804e6109b82decfb9974eb92df3797fe7222428cae12f8ccaa0c/jiter-0.13.0-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:e5562a0f0e90a6223b704163ea28e831bd3a9faa3512a711f031611e6b06c939", size = 514986, upload-time = "2026-02-02T12:37:16.326Z" }, - { url = "https://files.pythonhosted.org/packages/f6/6c/8c83b45eb3eb1c1e18d841fe30b4b5bc5619d781267ca9bc03e005d8fd0a/jiter-0.13.0-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:6c26a424569a59140fb51160a56df13f438a2b0967365e987889186d5fc2f6f9", size = 548756, upload-time = "2026-02-02T12:37:17.736Z" }, - { url = "https://files.pythonhosted.org/packages/47/66/eea81dfff765ed66c68fd2ed8c96245109e13c896c2a5015c7839c92367e/jiter-0.13.0-cp314-cp314t-win32.whl", hash = "sha256:24dc96eca9f84da4131cdf87a95e6ce36765c3b156fc9ae33280873b1c32d5f6", size = 201196, upload-time = "2026-02-02T12:37:19.101Z" }, - { url = "https://files.pythonhosted.org/packages/ff/32/4ac9c7a76402f8f00d00842a7f6b83b284d0cf7c1e9d4227bc95aa6d17fa/jiter-0.13.0-cp314-cp314t-win_amd64.whl", hash = "sha256:0a8d76c7524087272c8ae913f5d9d608bd839154b62c4322ef65723d2e5bb0b8", size = 204215, upload-time = "2026-02-02T12:37:20.495Z" }, - { url = "https://files.pythonhosted.org/packages/f9/8e/7def204fea9f9be8b3c21a6f2dd6c020cf56c7d5ff753e0e23ed7f9ea57e/jiter-0.13.0-cp314-cp314t-win_arm64.whl", hash = "sha256:2c26cf47e2cad140fa23b6d58d435a7c0161f5c514284802f25e87fddfe11024", size = 187152, upload-time = "2026-02-02T12:37:22.124Z" }, - { url = "https://files.pythonhosted.org/packages/80/60/e50fa45dd7e2eae049f0ce964663849e897300433921198aef94b6ffa23a/jiter-0.13.0-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:3d744a6061afba08dd7ae375dcde870cffb14429b7477e10f67e9e6d68772a0a", size = 305169, upload-time = "2026-02-02T12:37:50.376Z" }, - { url = "https://files.pythonhosted.org/packages/d2/73/a009f41c5eed71c49bec53036c4b33555afcdee70682a18c6f66e396c039/jiter-0.13.0-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:ff732bd0a0e778f43d5009840f20b935e79087b4dc65bd36f1cd0f9b04b8ff7f", size = 303808, upload-time = "2026-02-02T12:37:52.092Z" }, - { url = "https://files.pythonhosted.org/packages/c4/10/528b439290763bff3d939268085d03382471b442f212dca4ff5f12802d43/jiter-0.13.0-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ab44b178f7981fcaea7e0a5df20e773c663d06ffda0198f1a524e91b2fde7e59", size = 337384, upload-time = "2026-02-02T12:37:53.582Z" }, - { url = "https://files.pythonhosted.org/packages/67/8a/a342b2f0251f3dac4ca17618265d93bf244a2a4d089126e81e4c1056ac50/jiter-0.13.0-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bb00b6d26db67a05fe3e12c76edc75f32077fb51deed13822dc648fa373bc19", size = 343768, upload-time = "2026-02-02T12:37:55.055Z" }, -] - -[[package]] -name = "kiwisolver" -version = "1.4.9" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/5c/3c/85844f1b0feb11ee581ac23fe5fce65cd049a200c1446708cc1b7f922875/kiwisolver-1.4.9.tar.gz", hash = "sha256:c3b22c26c6fd6811b0ae8363b95ca8ce4ea3c202d3d0975b2914310ceb1bcc4d", size = 97564, upload-time = "2025-08-10T21:27:49.279Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/86/c9/13573a747838aeb1c76e3267620daa054f4152444d1f3d1a2324b78255b5/kiwisolver-1.4.9-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:ac5a486ac389dddcc5bef4f365b6ae3ffff2c433324fb38dd35e3fab7c957999", size = 123686, upload-time = "2025-08-10T21:26:10.034Z" }, - { url = "https://files.pythonhosted.org/packages/51/ea/2ecf727927f103ffd1739271ca19c424d0e65ea473fbaeea1c014aea93f6/kiwisolver-1.4.9-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f2ba92255faa7309d06fe44c3a4a97efe1c8d640c2a79a5ef728b685762a6fd2", size = 66460, upload-time = "2025-08-10T21:26:11.083Z" }, - { url = "https://files.pythonhosted.org/packages/5b/5a/51f5464373ce2aeb5194508298a508b6f21d3867f499556263c64c621914/kiwisolver-1.4.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a2899935e724dd1074cb568ce7ac0dce28b2cd6ab539c8e001a8578eb106d14", size = 64952, upload-time = "2025-08-10T21:26:12.058Z" }, - { url = "https://files.pythonhosted.org/packages/70/90/6d240beb0f24b74371762873e9b7f499f1e02166a2d9c5801f4dbf8fa12e/kiwisolver-1.4.9-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f6008a4919fdbc0b0097089f67a1eb55d950ed7e90ce2cc3e640abadd2757a04", size = 1474756, upload-time = "2025-08-10T21:26:13.096Z" }, - { url = "https://files.pythonhosted.org/packages/12/42/f36816eaf465220f683fb711efdd1bbf7a7005a2473d0e4ed421389bd26c/kiwisolver-1.4.9-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:67bb8b474b4181770f926f7b7d2f8c0248cbcb78b660fdd41a47054b28d2a752", size = 1276404, upload-time = "2025-08-10T21:26:14.457Z" }, - { url = "https://files.pythonhosted.org/packages/2e/64/bc2de94800adc830c476dce44e9b40fd0809cddeef1fde9fcf0f73da301f/kiwisolver-1.4.9-cp312-cp312-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2327a4a30d3ee07d2fbe2e7933e8a37c591663b96ce42a00bc67461a87d7df77", size = 1294410, upload-time = "2025-08-10T21:26:15.73Z" }, - { url = "https://files.pythonhosted.org/packages/5f/42/2dc82330a70aa8e55b6d395b11018045e58d0bb00834502bf11509f79091/kiwisolver-1.4.9-cp312-cp312-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:7a08b491ec91b1d5053ac177afe5290adacf1f0f6307d771ccac5de30592d198", size = 1343631, upload-time = "2025-08-10T21:26:17.045Z" }, - { url = "https://files.pythonhosted.org/packages/22/fd/f4c67a6ed1aab149ec5a8a401c323cee7a1cbe364381bb6c9c0d564e0e20/kiwisolver-1.4.9-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d8fc5c867c22b828001b6a38d2eaeb88160bf5783c6cb4a5e440efc981ce286d", size = 2224963, upload-time = "2025-08-10T21:26:18.737Z" }, - { url = "https://files.pythonhosted.org/packages/45/aa/76720bd4cb3713314677d9ec94dcc21ced3f1baf4830adde5bb9b2430a5f/kiwisolver-1.4.9-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:3b3115b2581ea35bb6d1f24a4c90af37e5d9b49dcff267eeed14c3893c5b86ab", size = 2321295, upload-time = "2025-08-10T21:26:20.11Z" }, - { url = "https://files.pythonhosted.org/packages/80/19/d3ec0d9ab711242f56ae0dc2fc5d70e298bb4a1f9dfab44c027668c673a1/kiwisolver-1.4.9-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:858e4c22fb075920b96a291928cb7dea5644e94c0ee4fcd5af7e865655e4ccf2", size = 2487987, upload-time = "2025-08-10T21:26:21.49Z" }, - { url = "https://files.pythonhosted.org/packages/39/e9/61e4813b2c97e86b6fdbd4dd824bf72d28bcd8d4849b8084a357bc0dd64d/kiwisolver-1.4.9-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ed0fecd28cc62c54b262e3736f8bb2512d8dcfdc2bcf08be5f47f96bf405b145", size = 2291817, upload-time = "2025-08-10T21:26:22.812Z" }, - { url = "https://files.pythonhosted.org/packages/a0/41/85d82b0291db7504da3c2defe35c9a8a5c9803a730f297bd823d11d5fb77/kiwisolver-1.4.9-cp312-cp312-win_amd64.whl", hash = "sha256:f68208a520c3d86ea51acf688a3e3002615a7f0238002cccc17affecc86a8a54", size = 73895, upload-time = "2025-08-10T21:26:24.37Z" }, - { url = "https://files.pythonhosted.org/packages/e2/92/5f3068cf15ee5cb624a0c7596e67e2a0bb2adee33f71c379054a491d07da/kiwisolver-1.4.9-cp312-cp312-win_arm64.whl", hash = "sha256:2c1a4f57df73965f3f14df20b80ee29e6a7930a57d2d9e8491a25f676e197c60", size = 64992, upload-time = "2025-08-10T21:26:25.732Z" }, - { url = "https://files.pythonhosted.org/packages/31/c1/c2686cda909742ab66c7388e9a1a8521a59eb89f8bcfbee28fc980d07e24/kiwisolver-1.4.9-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a5d0432ccf1c7ab14f9949eec60c5d1f924f17c037e9f8b33352fa05799359b8", size = 123681, upload-time = "2025-08-10T21:26:26.725Z" }, - { url = "https://files.pythonhosted.org/packages/ca/f0/f44f50c9f5b1a1860261092e3bc91ecdc9acda848a8b8c6abfda4a24dd5c/kiwisolver-1.4.9-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efb3a45b35622bb6c16dbfab491a8f5a391fe0e9d45ef32f4df85658232ca0e2", size = 66464, upload-time = "2025-08-10T21:26:27.733Z" }, - { url = "https://files.pythonhosted.org/packages/2d/7a/9d90a151f558e29c3936b8a47ac770235f436f2120aca41a6d5f3d62ae8d/kiwisolver-1.4.9-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1a12cf6398e8a0a001a059747a1cbf24705e18fe413bc22de7b3d15c67cffe3f", size = 64961, upload-time = "2025-08-10T21:26:28.729Z" }, - { url = "https://files.pythonhosted.org/packages/e9/e9/f218a2cb3a9ffbe324ca29a9e399fa2d2866d7f348ec3a88df87fc248fc5/kiwisolver-1.4.9-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:b67e6efbf68e077dd71d1a6b37e43e1a99d0bff1a3d51867d45ee8908b931098", size = 1474607, upload-time = "2025-08-10T21:26:29.798Z" }, - { url = "https://files.pythonhosted.org/packages/d9/28/aac26d4c882f14de59041636292bc838db8961373825df23b8eeb807e198/kiwisolver-1.4.9-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5656aa670507437af0207645273ccdfee4f14bacd7f7c67a4306d0dcaeaf6eed", size = 1276546, upload-time = "2025-08-10T21:26:31.401Z" }, - { url = "https://files.pythonhosted.org/packages/8b/ad/8bfc1c93d4cc565e5069162f610ba2f48ff39b7de4b5b8d93f69f30c4bed/kiwisolver-1.4.9-cp313-cp313-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:bfc08add558155345129c7803b3671cf195e6a56e7a12f3dde7c57d9b417f525", size = 1294482, upload-time = "2025-08-10T21:26:32.721Z" }, - { url = "https://files.pythonhosted.org/packages/da/f1/6aca55ff798901d8ce403206d00e033191f63d82dd708a186e0ed2067e9c/kiwisolver-1.4.9-cp313-cp313-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:40092754720b174e6ccf9e845d0d8c7d8e12c3d71e7fc35f55f3813e96376f78", size = 1343720, upload-time = "2025-08-10T21:26:34.032Z" }, - { url = "https://files.pythonhosted.org/packages/d1/91/eed031876c595c81d90d0f6fc681ece250e14bf6998c3d7c419466b523b7/kiwisolver-1.4.9-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:497d05f29a1300d14e02e6441cf0f5ee81c1ff5a304b0d9fb77423974684e08b", size = 2224907, upload-time = "2025-08-10T21:26:35.824Z" }, - { url = "https://files.pythonhosted.org/packages/e9/ec/4d1925f2e49617b9cca9c34bfa11adefad49d00db038e692a559454dfb2e/kiwisolver-1.4.9-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:bdd1a81a1860476eb41ac4bc1e07b3f07259e6d55bbf739b79c8aaedcf512799", size = 2321334, upload-time = "2025-08-10T21:26:37.534Z" }, - { url = "https://files.pythonhosted.org/packages/43/cb/450cd4499356f68802750c6ddc18647b8ea01ffa28f50d20598e0befe6e9/kiwisolver-1.4.9-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:e6b93f13371d341afee3be9f7c5964e3fe61d5fa30f6a30eb49856935dfe4fc3", size = 2488313, upload-time = "2025-08-10T21:26:39.191Z" }, - { url = "https://files.pythonhosted.org/packages/71/67/fc76242bd99f885651128a5d4fa6083e5524694b7c88b489b1b55fdc491d/kiwisolver-1.4.9-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d75aa530ccfaa593da12834b86a0724f58bff12706659baa9227c2ccaa06264c", size = 2291970, upload-time = "2025-08-10T21:26:40.828Z" }, - { url = "https://files.pythonhosted.org/packages/75/bd/f1a5d894000941739f2ae1b65a32892349423ad49c2e6d0771d0bad3fae4/kiwisolver-1.4.9-cp313-cp313-win_amd64.whl", hash = "sha256:dd0a578400839256df88c16abddf9ba14813ec5f21362e1fe65022e00c883d4d", size = 73894, upload-time = "2025-08-10T21:26:42.33Z" }, - { url = "https://files.pythonhosted.org/packages/95/38/dce480814d25b99a391abbddadc78f7c117c6da34be68ca8b02d5848b424/kiwisolver-1.4.9-cp313-cp313-win_arm64.whl", hash = "sha256:d4188e73af84ca82468f09cadc5ac4db578109e52acb4518d8154698d3a87ca2", size = 64995, upload-time = "2025-08-10T21:26:43.889Z" }, - { url = "https://files.pythonhosted.org/packages/e2/37/7d218ce5d92dadc5ebdd9070d903e0c7cf7edfe03f179433ac4d13ce659c/kiwisolver-1.4.9-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:5a0f2724dfd4e3b3ac5a82436a8e6fd16baa7d507117e4279b660fe8ca38a3a1", size = 126510, upload-time = "2025-08-10T21:26:44.915Z" }, - { url = "https://files.pythonhosted.org/packages/23/b0/e85a2b48233daef4b648fb657ebbb6f8367696a2d9548a00b4ee0eb67803/kiwisolver-1.4.9-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:1b11d6a633e4ed84fc0ddafd4ebfd8ea49b3f25082c04ad12b8315c11d504dc1", size = 67903, upload-time = "2025-08-10T21:26:45.934Z" }, - { url = "https://files.pythonhosted.org/packages/44/98/f2425bc0113ad7de24da6bb4dae1343476e95e1d738be7c04d31a5d037fd/kiwisolver-1.4.9-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61874cdb0a36016354853593cffc38e56fc9ca5aa97d2c05d3dcf6922cd55a11", size = 66402, upload-time = "2025-08-10T21:26:47.101Z" }, - { url = "https://files.pythonhosted.org/packages/98/d8/594657886df9f34c4177cc353cc28ca7e6e5eb562d37ccc233bff43bbe2a/kiwisolver-1.4.9-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:60c439763a969a6af93b4881db0eed8fadf93ee98e18cbc35bc8da868d0c4f0c", size = 1582135, upload-time = "2025-08-10T21:26:48.665Z" }, - { url = "https://files.pythonhosted.org/packages/5c/c6/38a115b7170f8b306fc929e166340c24958347308ea3012c2b44e7e295db/kiwisolver-1.4.9-cp313-cp313t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:92a2f997387a1b79a75e7803aa7ded2cfbe2823852ccf1ba3bcf613b62ae3197", size = 1389409, upload-time = "2025-08-10T21:26:50.335Z" }, - { url = "https://files.pythonhosted.org/packages/bf/3b/e04883dace81f24a568bcee6eb3001da4ba05114afa622ec9b6fafdc1f5e/kiwisolver-1.4.9-cp313-cp313t-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a31d512c812daea6d8b3be3b2bfcbeb091dbb09177706569bcfc6240dcf8b41c", size = 1401763, upload-time = "2025-08-10T21:26:51.867Z" }, - { url = "https://files.pythonhosted.org/packages/9f/80/20ace48e33408947af49d7d15c341eaee69e4e0304aab4b7660e234d6288/kiwisolver-1.4.9-cp313-cp313t-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:52a15b0f35dad39862d376df10c5230155243a2c1a436e39eb55623ccbd68185", size = 1453643, upload-time = "2025-08-10T21:26:53.592Z" }, - { url = "https://files.pythonhosted.org/packages/64/31/6ce4380a4cd1f515bdda976a1e90e547ccd47b67a1546d63884463c92ca9/kiwisolver-1.4.9-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a30fd6fdef1430fd9e1ba7b3398b5ee4e2887783917a687d86ba69985fb08748", size = 2330818, upload-time = "2025-08-10T21:26:55.051Z" }, - { url = "https://files.pythonhosted.org/packages/fa/e9/3f3fcba3bcc7432c795b82646306e822f3fd74df0ee81f0fa067a1f95668/kiwisolver-1.4.9-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:cc9617b46837c6468197b5945e196ee9ca43057bb7d9d1ae688101e4e1dddf64", size = 2419963, upload-time = "2025-08-10T21:26:56.421Z" }, - { url = "https://files.pythonhosted.org/packages/99/43/7320c50e4133575c66e9f7dadead35ab22d7c012a3b09bb35647792b2a6d/kiwisolver-1.4.9-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:0ab74e19f6a2b027ea4f845a78827969af45ce790e6cb3e1ebab71bdf9f215ff", size = 2594639, upload-time = "2025-08-10T21:26:57.882Z" }, - { url = "https://files.pythonhosted.org/packages/65/d6/17ae4a270d4a987ef8a385b906d2bdfc9fce502d6dc0d3aea865b47f548c/kiwisolver-1.4.9-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:dba5ee5d3981160c28d5490f0d1b7ed730c22470ff7f6cc26cfcfaacb9896a07", size = 2391741, upload-time = "2025-08-10T21:26:59.237Z" }, - { url = "https://files.pythonhosted.org/packages/2a/8f/8f6f491d595a9e5912971f3f863d81baddccc8a4d0c3749d6a0dd9ffc9df/kiwisolver-1.4.9-cp313-cp313t-win_arm64.whl", hash = "sha256:0749fd8f4218ad2e851e11cc4dc05c7cbc0cbc4267bdfdb31782e65aace4ee9c", size = 68646, upload-time = "2025-08-10T21:27:00.52Z" }, - { url = "https://files.pythonhosted.org/packages/6b/32/6cc0fbc9c54d06c2969faa9c1d29f5751a2e51809dd55c69055e62d9b426/kiwisolver-1.4.9-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:9928fe1eb816d11ae170885a74d074f57af3a0d65777ca47e9aeb854a1fba386", size = 123806, upload-time = "2025-08-10T21:27:01.537Z" }, - { url = "https://files.pythonhosted.org/packages/b2/dd/2bfb1d4a4823d92e8cbb420fe024b8d2167f72079b3bb941207c42570bdf/kiwisolver-1.4.9-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:d0005b053977e7b43388ddec89fa567f43d4f6d5c2c0affe57de5ebf290dc552", size = 66605, upload-time = "2025-08-10T21:27:03.335Z" }, - { url = "https://files.pythonhosted.org/packages/f7/69/00aafdb4e4509c2ca6064646cba9cd4b37933898f426756adb2cb92ebbed/kiwisolver-1.4.9-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:2635d352d67458b66fd0667c14cb1d4145e9560d503219034a18a87e971ce4f3", size = 64925, upload-time = "2025-08-10T21:27:04.339Z" }, - { url = "https://files.pythonhosted.org/packages/43/dc/51acc6791aa14e5cb6d8a2e28cefb0dc2886d8862795449d021334c0df20/kiwisolver-1.4.9-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:767c23ad1c58c9e827b649a9ab7809fd5fd9db266a9cf02b0e926ddc2c680d58", size = 1472414, upload-time = "2025-08-10T21:27:05.437Z" }, - { url = "https://files.pythonhosted.org/packages/3d/bb/93fa64a81db304ac8a246f834d5094fae4b13baf53c839d6bb6e81177129/kiwisolver-1.4.9-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:72d0eb9fba308b8311685c2268cf7d0a0639a6cd027d8128659f72bdd8a024b4", size = 1281272, upload-time = "2025-08-10T21:27:07.063Z" }, - { url = "https://files.pythonhosted.org/packages/70/e6/6df102916960fb8d05069d4bd92d6d9a8202d5a3e2444494e7cd50f65b7a/kiwisolver-1.4.9-cp314-cp314-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f68e4f3eeca8fb22cc3d731f9715a13b652795ef657a13df1ad0c7dc0e9731df", size = 1298578, upload-time = "2025-08-10T21:27:08.452Z" }, - { url = "https://files.pythonhosted.org/packages/7c/47/e142aaa612f5343736b087864dbaebc53ea8831453fb47e7521fa8658f30/kiwisolver-1.4.9-cp314-cp314-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d84cd4061ae292d8ac367b2c3fa3aad11cb8625a95d135fe93f286f914f3f5a6", size = 1345607, upload-time = "2025-08-10T21:27:10.125Z" }, - { url = "https://files.pythonhosted.org/packages/54/89/d641a746194a0f4d1a3670fb900d0dbaa786fb98341056814bc3f058fa52/kiwisolver-1.4.9-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:a60ea74330b91bd22a29638940d115df9dc00af5035a9a2a6ad9399ffb4ceca5", size = 2230150, upload-time = "2025-08-10T21:27:11.484Z" }, - { url = "https://files.pythonhosted.org/packages/aa/6b/5ee1207198febdf16ac11f78c5ae40861b809cbe0e6d2a8d5b0b3044b199/kiwisolver-1.4.9-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:ce6a3a4e106cf35c2d9c4fa17c05ce0b180db622736845d4315519397a77beaf", size = 2325979, upload-time = "2025-08-10T21:27:12.917Z" }, - { url = "https://files.pythonhosted.org/packages/fc/ff/b269eefd90f4ae14dcc74973d5a0f6d28d3b9bb1afd8c0340513afe6b39a/kiwisolver-1.4.9-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:77937e5e2a38a7b48eef0585114fe7930346993a88060d0bf886086d2aa49ef5", size = 2491456, upload-time = "2025-08-10T21:27:14.353Z" }, - { url = "https://files.pythonhosted.org/packages/fc/d4/10303190bd4d30de547534601e259a4fbf014eed94aae3e5521129215086/kiwisolver-1.4.9-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:24c175051354f4a28c5d6a31c93906dc653e2bf234e8a4bbfb964892078898ce", size = 2294621, upload-time = "2025-08-10T21:27:15.808Z" }, - { url = "https://files.pythonhosted.org/packages/28/e0/a9a90416fce5c0be25742729c2ea52105d62eda6c4be4d803c2a7be1fa50/kiwisolver-1.4.9-cp314-cp314-win_amd64.whl", hash = "sha256:0763515d4df10edf6d06a3c19734e2566368980d21ebec439f33f9eb936c07b7", size = 75417, upload-time = "2025-08-10T21:27:17.436Z" }, - { url = "https://files.pythonhosted.org/packages/1f/10/6949958215b7a9a264299a7db195564e87900f709db9245e4ebdd3c70779/kiwisolver-1.4.9-cp314-cp314-win_arm64.whl", hash = "sha256:0e4e2bf29574a6a7b7f6cb5fa69293b9f96c928949ac4a53ba3f525dffb87f9c", size = 66582, upload-time = "2025-08-10T21:27:18.436Z" }, - { url = "https://files.pythonhosted.org/packages/ec/79/60e53067903d3bc5469b369fe0dfc6b3482e2133e85dae9daa9527535991/kiwisolver-1.4.9-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:d976bbb382b202f71c67f77b0ac11244021cfa3f7dfd9e562eefcea2df711548", size = 126514, upload-time = "2025-08-10T21:27:19.465Z" }, - { url = "https://files.pythonhosted.org/packages/25/d1/4843d3e8d46b072c12a38c97c57fab4608d36e13fe47d47ee96b4d61ba6f/kiwisolver-1.4.9-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:2489e4e5d7ef9a1c300a5e0196e43d9c739f066ef23270607d45aba368b91f2d", size = 67905, upload-time = "2025-08-10T21:27:20.51Z" }, - { url = "https://files.pythonhosted.org/packages/8c/ae/29ffcbd239aea8b93108de1278271ae764dfc0d803a5693914975f200596/kiwisolver-1.4.9-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:e2ea9f7ab7fbf18fffb1b5434ce7c69a07582f7acc7717720f1d69f3e806f90c", size = 66399, upload-time = "2025-08-10T21:27:21.496Z" }, - { url = "https://files.pythonhosted.org/packages/a1/ae/d7ba902aa604152c2ceba5d352d7b62106bedbccc8e95c3934d94472bfa3/kiwisolver-1.4.9-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:b34e51affded8faee0dfdb705416153819d8ea9250bbbf7ea1b249bdeb5f1122", size = 1582197, upload-time = "2025-08-10T21:27:22.604Z" }, - { url = "https://files.pythonhosted.org/packages/f2/41/27c70d427eddb8bc7e4f16420a20fefc6f480312122a59a959fdfe0445ad/kiwisolver-1.4.9-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d8aacd3d4b33b772542b2e01beb50187536967b514b00003bdda7589722d2a64", size = 1390125, upload-time = "2025-08-10T21:27:24.036Z" }, - { url = "https://files.pythonhosted.org/packages/41/42/b3799a12bafc76d962ad69083f8b43b12bf4fe78b097b12e105d75c9b8f1/kiwisolver-1.4.9-cp314-cp314t-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7cf974dd4e35fa315563ac99d6287a1024e4dc2077b8a7d7cd3d2fb65d283134", size = 1402612, upload-time = "2025-08-10T21:27:25.773Z" }, - { url = "https://files.pythonhosted.org/packages/d2/b5/a210ea073ea1cfaca1bb5c55a62307d8252f531beb364e18aa1e0888b5a0/kiwisolver-1.4.9-cp314-cp314t-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:85bd218b5ecfbee8c8a82e121802dcb519a86044c9c3b2e4aef02fa05c6da370", size = 1453990, upload-time = "2025-08-10T21:27:27.089Z" }, - { url = "https://files.pythonhosted.org/packages/5f/ce/a829eb8c033e977d7ea03ed32fb3c1781b4fa0433fbadfff29e39c676f32/kiwisolver-1.4.9-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:0856e241c2d3df4efef7c04a1e46b1936b6120c9bcf36dd216e3acd84bc4fb21", size = 2331601, upload-time = "2025-08-10T21:27:29.343Z" }, - { url = "https://files.pythonhosted.org/packages/e0/4b/b5e97eb142eb9cd0072dacfcdcd31b1c66dc7352b0f7c7255d339c0edf00/kiwisolver-1.4.9-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:9af39d6551f97d31a4deebeac6f45b156f9755ddc59c07b402c148f5dbb6482a", size = 2422041, upload-time = "2025-08-10T21:27:30.754Z" }, - { url = "https://files.pythonhosted.org/packages/40/be/8eb4cd53e1b85ba4edc3a9321666f12b83113a178845593307a3e7891f44/kiwisolver-1.4.9-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:bb4ae2b57fc1d8cbd1cf7b1d9913803681ffa903e7488012be5b76dedf49297f", size = 2594897, upload-time = "2025-08-10T21:27:32.803Z" }, - { url = "https://files.pythonhosted.org/packages/99/dd/841e9a66c4715477ea0abc78da039832fbb09dac5c35c58dc4c41a407b8a/kiwisolver-1.4.9-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:aedff62918805fb62d43a4aa2ecd4482c380dc76cd31bd7c8878588a61bd0369", size = 2391835, upload-time = "2025-08-10T21:27:34.23Z" }, - { url = "https://files.pythonhosted.org/packages/0c/28/4b2e5c47a0da96896fdfdb006340ade064afa1e63675d01ea5ac222b6d52/kiwisolver-1.4.9-cp314-cp314t-win_amd64.whl", hash = "sha256:1fa333e8b2ce4d9660f2cda9c0e1b6bafcfb2457a9d259faa82289e73ec24891", size = 79988, upload-time = "2025-08-10T21:27:35.587Z" }, - { url = "https://files.pythonhosted.org/packages/80/be/3578e8afd18c88cdf9cb4cffde75a96d2be38c5a903f1ed0ceec061bd09e/kiwisolver-1.4.9-cp314-cp314t-win_arm64.whl", hash = "sha256:4a48a2ce79d65d363597ef7b567ce3d14d68783d2b2263d98db3d9477805ba32", size = 70260, upload-time = "2025-08-10T21:27:36.606Z" }, -] - -[[package]] -name = "markupsafe" -version = "3.0.3" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/7e/99/7690b6d4034fffd95959cbe0c02de8deb3098cc577c67bb6a24fe5d7caa7/markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698", size = 80313, upload-time = "2025-09-27T18:37:40.426Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/5a/72/147da192e38635ada20e0a2e1a51cf8823d2119ce8883f7053879c2199b5/markupsafe-3.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d53197da72cc091b024dd97249dfc7794d6a56530370992a5e1a08983ad9230e", size = 11615, upload-time = "2025-09-27T18:36:30.854Z" }, - { url = "https://files.pythonhosted.org/packages/9a/81/7e4e08678a1f98521201c3079f77db69fb552acd56067661f8c2f534a718/markupsafe-3.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1872df69a4de6aead3491198eaf13810b565bdbeec3ae2dc8780f14458ec73ce", size = 12020, upload-time = "2025-09-27T18:36:31.971Z" }, - { url = "https://files.pythonhosted.org/packages/1e/2c/799f4742efc39633a1b54a92eec4082e4f815314869865d876824c257c1e/markupsafe-3.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a7e8ae81ae39e62a41ec302f972ba6ae23a5c5396c8e60113e9066ef893da0d", size = 24332, upload-time = "2025-09-27T18:36:32.813Z" }, - { url = "https://files.pythonhosted.org/packages/3c/2e/8d0c2ab90a8c1d9a24f0399058ab8519a3279d1bd4289511d74e909f060e/markupsafe-3.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d6dd0be5b5b189d31db7cda48b91d7e0a9795f31430b7f271219ab30f1d3ac9d", size = 22947, upload-time = "2025-09-27T18:36:33.86Z" }, - { url = "https://files.pythonhosted.org/packages/2c/54/887f3092a85238093a0b2154bd629c89444f395618842e8b0c41783898ea/markupsafe-3.0.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:94c6f0bb423f739146aec64595853541634bde58b2135f27f61c1ffd1cd4d16a", size = 21962, upload-time = "2025-09-27T18:36:35.099Z" }, - { url = "https://files.pythonhosted.org/packages/c9/2f/336b8c7b6f4a4d95e91119dc8521402461b74a485558d8f238a68312f11c/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:be8813b57049a7dc738189df53d69395eba14fb99345e0a5994914a3864c8a4b", size = 23760, upload-time = "2025-09-27T18:36:36.001Z" }, - { url = "https://files.pythonhosted.org/packages/32/43/67935f2b7e4982ffb50a4d169b724d74b62a3964bc1a9a527f5ac4f1ee2b/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:83891d0e9fb81a825d9a6d61e3f07550ca70a076484292a70fde82c4b807286f", size = 21529, upload-time = "2025-09-27T18:36:36.906Z" }, - { url = "https://files.pythonhosted.org/packages/89/e0/4486f11e51bbba8b0c041098859e869e304d1c261e59244baa3d295d47b7/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:77f0643abe7495da77fb436f50f8dab76dbc6e5fd25d39589a0f1fe6548bfa2b", size = 23015, upload-time = "2025-09-27T18:36:37.868Z" }, - { url = "https://files.pythonhosted.org/packages/2f/e1/78ee7a023dac597a5825441ebd17170785a9dab23de95d2c7508ade94e0e/markupsafe-3.0.3-cp312-cp312-win32.whl", hash = "sha256:d88b440e37a16e651bda4c7c2b930eb586fd15ca7406cb39e211fcff3bf3017d", size = 14540, upload-time = "2025-09-27T18:36:38.761Z" }, - { url = "https://files.pythonhosted.org/packages/aa/5b/bec5aa9bbbb2c946ca2733ef9c4ca91c91b6a24580193e891b5f7dbe8e1e/markupsafe-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:26a5784ded40c9e318cfc2bdb30fe164bdb8665ded9cd64d500a34fb42067b1c", size = 15105, upload-time = "2025-09-27T18:36:39.701Z" }, - { url = "https://files.pythonhosted.org/packages/e5/f1/216fc1bbfd74011693a4fd837e7026152e89c4bcf3e77b6692fba9923123/markupsafe-3.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:35add3b638a5d900e807944a078b51922212fb3dedb01633a8defc4b01a3c85f", size = 13906, upload-time = "2025-09-27T18:36:40.689Z" }, - { url = "https://files.pythonhosted.org/packages/38/2f/907b9c7bbba283e68f20259574b13d005c121a0fa4c175f9bed27c4597ff/markupsafe-3.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e1cf1972137e83c5d4c136c43ced9ac51d0e124706ee1c8aa8532c1287fa8795", size = 11622, upload-time = "2025-09-27T18:36:41.777Z" }, - { url = "https://files.pythonhosted.org/packages/9c/d9/5f7756922cdd676869eca1c4e3c0cd0df60ed30199ffd775e319089cb3ed/markupsafe-3.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:116bb52f642a37c115f517494ea5feb03889e04df47eeff5b130b1808ce7c219", size = 12029, upload-time = "2025-09-27T18:36:43.257Z" }, - { url = "https://files.pythonhosted.org/packages/00/07/575a68c754943058c78f30db02ee03a64b3c638586fba6a6dd56830b30a3/markupsafe-3.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:133a43e73a802c5562be9bbcd03d090aa5a1fe899db609c29e8c8d815c5f6de6", size = 24374, upload-time = "2025-09-27T18:36:44.508Z" }, - { url = "https://files.pythonhosted.org/packages/a9/21/9b05698b46f218fc0e118e1f8168395c65c8a2c750ae2bab54fc4bd4e0e8/markupsafe-3.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfcd093f13f0f0b7fdd0f198b90053bf7b2f02a3927a30e63f3ccc9df56b676", size = 22980, upload-time = "2025-09-27T18:36:45.385Z" }, - { url = "https://files.pythonhosted.org/packages/7f/71/544260864f893f18b6827315b988c146b559391e6e7e8f7252839b1b846a/markupsafe-3.0.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:509fa21c6deb7a7a273d629cf5ec029bc209d1a51178615ddf718f5918992ab9", size = 21990, upload-time = "2025-09-27T18:36:46.916Z" }, - { url = "https://files.pythonhosted.org/packages/c2/28/b50fc2f74d1ad761af2f5dcce7492648b983d00a65b8c0e0cb457c82ebbe/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4afe79fb3de0b7097d81da19090f4df4f8d3a2b3adaa8764138aac2e44f3af1", size = 23784, upload-time = "2025-09-27T18:36:47.884Z" }, - { url = "https://files.pythonhosted.org/packages/ed/76/104b2aa106a208da8b17a2fb72e033a5a9d7073c68f7e508b94916ed47a9/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:795e7751525cae078558e679d646ae45574b47ed6e7771863fcc079a6171a0fc", size = 21588, upload-time = "2025-09-27T18:36:48.82Z" }, - { url = "https://files.pythonhosted.org/packages/b5/99/16a5eb2d140087ebd97180d95249b00a03aa87e29cc224056274f2e45fd6/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8485f406a96febb5140bfeca44a73e3ce5116b2501ac54fe953e488fb1d03b12", size = 23041, upload-time = "2025-09-27T18:36:49.797Z" }, - { url = "https://files.pythonhosted.org/packages/19/bc/e7140ed90c5d61d77cea142eed9f9c303f4c4806f60a1044c13e3f1471d0/markupsafe-3.0.3-cp313-cp313-win32.whl", hash = "sha256:bdd37121970bfd8be76c5fb069c7751683bdf373db1ed6c010162b2a130248ed", size = 14543, upload-time = "2025-09-27T18:36:51.584Z" }, - { url = "https://files.pythonhosted.org/packages/05/73/c4abe620b841b6b791f2edc248f556900667a5a1cf023a6646967ae98335/markupsafe-3.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:9a1abfdc021a164803f4d485104931fb8f8c1efd55bc6b748d2f5774e78b62c5", size = 15113, upload-time = "2025-09-27T18:36:52.537Z" }, - { url = "https://files.pythonhosted.org/packages/f0/3a/fa34a0f7cfef23cf9500d68cb7c32dd64ffd58a12b09225fb03dd37d5b80/markupsafe-3.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:7e68f88e5b8799aa49c85cd116c932a1ac15caaa3f5db09087854d218359e485", size = 13911, upload-time = "2025-09-27T18:36:53.513Z" }, - { url = "https://files.pythonhosted.org/packages/e4/d7/e05cd7efe43a88a17a37b3ae96e79a19e846f3f456fe79c57ca61356ef01/markupsafe-3.0.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:218551f6df4868a8d527e3062d0fb968682fe92054e89978594c28e642c43a73", size = 11658, upload-time = "2025-09-27T18:36:54.819Z" }, - { url = "https://files.pythonhosted.org/packages/99/9e/e412117548182ce2148bdeacdda3bb494260c0b0184360fe0d56389b523b/markupsafe-3.0.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3524b778fe5cfb3452a09d31e7b5adefeea8c5be1d43c4f810ba09f2ceb29d37", size = 12066, upload-time = "2025-09-27T18:36:55.714Z" }, - { url = "https://files.pythonhosted.org/packages/bc/e6/fa0ffcda717ef64a5108eaa7b4f5ed28d56122c9a6d70ab8b72f9f715c80/markupsafe-3.0.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4e885a3d1efa2eadc93c894a21770e4bc67899e3543680313b09f139e149ab19", size = 25639, upload-time = "2025-09-27T18:36:56.908Z" }, - { url = "https://files.pythonhosted.org/packages/96/ec/2102e881fe9d25fc16cb4b25d5f5cde50970967ffa5dddafdb771237062d/markupsafe-3.0.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8709b08f4a89aa7586de0aadc8da56180242ee0ada3999749b183aa23df95025", size = 23569, upload-time = "2025-09-27T18:36:57.913Z" }, - { url = "https://files.pythonhosted.org/packages/4b/30/6f2fce1f1f205fc9323255b216ca8a235b15860c34b6798f810f05828e32/markupsafe-3.0.3-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b8512a91625c9b3da6f127803b166b629725e68af71f8184ae7e7d54686a56d6", size = 23284, upload-time = "2025-09-27T18:36:58.833Z" }, - { url = "https://files.pythonhosted.org/packages/58/47/4a0ccea4ab9f5dcb6f79c0236d954acb382202721e704223a8aafa38b5c8/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9b79b7a16f7fedff2495d684f2b59b0457c3b493778c9eed31111be64d58279f", size = 24801, upload-time = "2025-09-27T18:36:59.739Z" }, - { url = "https://files.pythonhosted.org/packages/6a/70/3780e9b72180b6fecb83a4814d84c3bf4b4ae4bf0b19c27196104149734c/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:12c63dfb4a98206f045aa9563db46507995f7ef6d83b2f68eda65c307c6829eb", size = 22769, upload-time = "2025-09-27T18:37:00.719Z" }, - { url = "https://files.pythonhosted.org/packages/98/c5/c03c7f4125180fc215220c035beac6b9cb684bc7a067c84fc69414d315f5/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8f71bc33915be5186016f675cd83a1e08523649b0e33efdb898db577ef5bb009", size = 23642, upload-time = "2025-09-27T18:37:01.673Z" }, - { url = "https://files.pythonhosted.org/packages/80/d6/2d1b89f6ca4bff1036499b1e29a1d02d282259f3681540e16563f27ebc23/markupsafe-3.0.3-cp313-cp313t-win32.whl", hash = "sha256:69c0b73548bc525c8cb9a251cddf1931d1db4d2258e9599c28c07ef3580ef354", size = 14612, upload-time = "2025-09-27T18:37:02.639Z" }, - { url = "https://files.pythonhosted.org/packages/2b/98/e48a4bfba0a0ffcf9925fe2d69240bfaa19c6f7507b8cd09c70684a53c1e/markupsafe-3.0.3-cp313-cp313t-win_amd64.whl", hash = "sha256:1b4b79e8ebf6b55351f0d91fe80f893b4743f104bff22e90697db1590e47a218", size = 15200, upload-time = "2025-09-27T18:37:03.582Z" }, - { url = "https://files.pythonhosted.org/packages/0e/72/e3cc540f351f316e9ed0f092757459afbc595824ca724cbc5a5d4263713f/markupsafe-3.0.3-cp313-cp313t-win_arm64.whl", hash = "sha256:ad2cf8aa28b8c020ab2fc8287b0f823d0a7d8630784c31e9ee5edea20f406287", size = 13973, upload-time = "2025-09-27T18:37:04.929Z" }, - { url = "https://files.pythonhosted.org/packages/33/8a/8e42d4838cd89b7dde187011e97fe6c3af66d8c044997d2183fbd6d31352/markupsafe-3.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:eaa9599de571d72e2daf60164784109f19978b327a3910d3e9de8c97b5b70cfe", size = 11619, upload-time = "2025-09-27T18:37:06.342Z" }, - { url = "https://files.pythonhosted.org/packages/b5/64/7660f8a4a8e53c924d0fa05dc3a55c9cee10bbd82b11c5afb27d44b096ce/markupsafe-3.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c47a551199eb8eb2121d4f0f15ae0f923d31350ab9280078d1e5f12b249e0026", size = 12029, upload-time = "2025-09-27T18:37:07.213Z" }, - { url = "https://files.pythonhosted.org/packages/da/ef/e648bfd021127bef5fa12e1720ffed0c6cbb8310c8d9bea7266337ff06de/markupsafe-3.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f34c41761022dd093b4b6896d4810782ffbabe30f2d443ff5f083e0cbbb8c737", size = 24408, upload-time = "2025-09-27T18:37:09.572Z" }, - { url = "https://files.pythonhosted.org/packages/41/3c/a36c2450754618e62008bf7435ccb0f88053e07592e6028a34776213d877/markupsafe-3.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:457a69a9577064c05a97c41f4e65148652db078a3a509039e64d3467b9e7ef97", size = 23005, upload-time = "2025-09-27T18:37:10.58Z" }, - { url = "https://files.pythonhosted.org/packages/bc/20/b7fdf89a8456b099837cd1dc21974632a02a999ec9bf7ca3e490aacd98e7/markupsafe-3.0.3-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e8afc3f2ccfa24215f8cb28dcf43f0113ac3c37c2f0f0806d8c70e4228c5cf4d", size = 22048, upload-time = "2025-09-27T18:37:11.547Z" }, - { url = "https://files.pythonhosted.org/packages/9a/a7/591f592afdc734f47db08a75793a55d7fbcc6902a723ae4cfbab61010cc5/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ec15a59cf5af7be74194f7ab02d0f59a62bdcf1a537677ce67a2537c9b87fcda", size = 23821, upload-time = "2025-09-27T18:37:12.48Z" }, - { url = "https://files.pythonhosted.org/packages/7d/33/45b24e4f44195b26521bc6f1a82197118f74df348556594bd2262bda1038/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:0eb9ff8191e8498cca014656ae6b8d61f39da5f95b488805da4bb029cccbfbaf", size = 21606, upload-time = "2025-09-27T18:37:13.485Z" }, - { url = "https://files.pythonhosted.org/packages/ff/0e/53dfaca23a69fbfbbf17a4b64072090e70717344c52eaaaa9c5ddff1e5f0/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2713baf880df847f2bece4230d4d094280f4e67b1e813eec43b4c0e144a34ffe", size = 23043, upload-time = "2025-09-27T18:37:14.408Z" }, - { url = "https://files.pythonhosted.org/packages/46/11/f333a06fc16236d5238bfe74daccbca41459dcd8d1fa952e8fbd5dccfb70/markupsafe-3.0.3-cp314-cp314-win32.whl", hash = "sha256:729586769a26dbceff69f7a7dbbf59ab6572b99d94576a5592625d5b411576b9", size = 14747, upload-time = "2025-09-27T18:37:15.36Z" }, - { url = "https://files.pythonhosted.org/packages/28/52/182836104b33b444e400b14f797212f720cbc9ed6ba34c800639d154e821/markupsafe-3.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:bdc919ead48f234740ad807933cdf545180bfbe9342c2bb451556db2ed958581", size = 15341, upload-time = "2025-09-27T18:37:16.496Z" }, - { url = "https://files.pythonhosted.org/packages/6f/18/acf23e91bd94fd7b3031558b1f013adfa21a8e407a3fdb32745538730382/markupsafe-3.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:5a7d5dc5140555cf21a6fefbdbf8723f06fcd2f63ef108f2854de715e4422cb4", size = 14073, upload-time = "2025-09-27T18:37:17.476Z" }, - { url = "https://files.pythonhosted.org/packages/3c/f0/57689aa4076e1b43b15fdfa646b04653969d50cf30c32a102762be2485da/markupsafe-3.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:1353ef0c1b138e1907ae78e2f6c63ff67501122006b0f9abad68fda5f4ffc6ab", size = 11661, upload-time = "2025-09-27T18:37:18.453Z" }, - { url = "https://files.pythonhosted.org/packages/89/c3/2e67a7ca217c6912985ec766c6393b636fb0c2344443ff9d91404dc4c79f/markupsafe-3.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1085e7fbddd3be5f89cc898938f42c0b3c711fdcb37d75221de2666af647c175", size = 12069, upload-time = "2025-09-27T18:37:19.332Z" }, - { url = "https://files.pythonhosted.org/packages/f0/00/be561dce4e6ca66b15276e184ce4b8aec61fe83662cce2f7d72bd3249d28/markupsafe-3.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1b52b4fb9df4eb9ae465f8d0c228a00624de2334f216f178a995ccdcf82c4634", size = 25670, upload-time = "2025-09-27T18:37:20.245Z" }, - { url = "https://files.pythonhosted.org/packages/50/09/c419f6f5a92e5fadde27efd190eca90f05e1261b10dbd8cbcb39cd8ea1dc/markupsafe-3.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fed51ac40f757d41b7c48425901843666a6677e3e8eb0abcff09e4ba6e664f50", size = 23598, upload-time = "2025-09-27T18:37:21.177Z" }, - { url = "https://files.pythonhosted.org/packages/22/44/a0681611106e0b2921b3033fc19bc53323e0b50bc70cffdd19f7d679bb66/markupsafe-3.0.3-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f190daf01f13c72eac4efd5c430a8de82489d9cff23c364c3ea822545032993e", size = 23261, upload-time = "2025-09-27T18:37:22.167Z" }, - { url = "https://files.pythonhosted.org/packages/5f/57/1b0b3f100259dc9fffe780cfb60d4be71375510e435efec3d116b6436d43/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e56b7d45a839a697b5eb268c82a71bd8c7f6c94d6fd50c3d577fa39a9f1409f5", size = 24835, upload-time = "2025-09-27T18:37:23.296Z" }, - { url = "https://files.pythonhosted.org/packages/26/6a/4bf6d0c97c4920f1597cc14dd720705eca0bf7c787aebc6bb4d1bead5388/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:f3e98bb3798ead92273dc0e5fd0f31ade220f59a266ffd8a4f6065e0a3ce0523", size = 22733, upload-time = "2025-09-27T18:37:24.237Z" }, - { url = "https://files.pythonhosted.org/packages/14/c7/ca723101509b518797fedc2fdf79ba57f886b4aca8a7d31857ba3ee8281f/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5678211cb9333a6468fb8d8be0305520aa073f50d17f089b5b4b477ea6e67fdc", size = 23672, upload-time = "2025-09-27T18:37:25.271Z" }, - { url = "https://files.pythonhosted.org/packages/fb/df/5bd7a48c256faecd1d36edc13133e51397e41b73bb77e1a69deab746ebac/markupsafe-3.0.3-cp314-cp314t-win32.whl", hash = "sha256:915c04ba3851909ce68ccc2b8e2cd691618c4dc4c4232fb7982bca3f41fd8c3d", size = 14819, upload-time = "2025-09-27T18:37:26.285Z" }, - { url = "https://files.pythonhosted.org/packages/1a/8a/0402ba61a2f16038b48b39bccca271134be00c5c9f0f623208399333c448/markupsafe-3.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4faffd047e07c38848ce017e8725090413cd80cbc23d86e55c587bf979e579c9", size = 15426, upload-time = "2025-09-27T18:37:27.316Z" }, - { url = "https://files.pythonhosted.org/packages/70/bc/6f1c2f612465f5fa89b95bead1f44dcb607670fd42891d8fdcd5d039f4f4/markupsafe-3.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:32001d6a8fc98c8cb5c947787c5d08b0a50663d139f1305bac5885d98d9b40fa", size = 14146, upload-time = "2025-09-27T18:37:28.327Z" }, -] - -[[package]] -name = "matplotlib" -version = "3.10.8" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "contourpy" }, - { name = "cycler" }, - { name = "fonttools" }, - { name = "kiwisolver" }, - { name = "numpy" }, - { name = "packaging" }, - { name = "pillow" }, - { name = "pyparsing" }, - { name = "python-dateutil" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/8a/76/d3c6e3a13fe484ebe7718d14e269c9569c4eb0020a968a327acb3b9a8fe6/matplotlib-3.10.8.tar.gz", hash = "sha256:2299372c19d56bcd35cf05a2738308758d32b9eaed2371898d8f5bd33f084aa3", size = 34806269, upload-time = "2025-12-10T22:56:51.155Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/9e/67/f997cdcbb514012eb0d10cd2b4b332667997fb5ebe26b8d41d04962fa0e6/matplotlib-3.10.8-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:64fcc24778ca0404ce0cb7b6b77ae1f4c7231cdd60e6778f999ee05cbd581b9a", size = 8260453, upload-time = "2025-12-10T22:55:30.709Z" }, - { url = "https://files.pythonhosted.org/packages/7e/65/07d5f5c7f7c994f12c768708bd2e17a4f01a2b0f44a1c9eccad872433e2e/matplotlib-3.10.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b9a5ca4ac220a0cdd1ba6bcba3608547117d30468fefce49bb26f55c1a3d5c58", size = 8148321, upload-time = "2025-12-10T22:55:33.265Z" }, - { url = "https://files.pythonhosted.org/packages/3e/f3/c5195b1ae57ef85339fd7285dfb603b22c8b4e79114bae5f4f0fcf688677/matplotlib-3.10.8-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3ab4aabc72de4ff77b3ec33a6d78a68227bf1123465887f9905ba79184a1cc04", size = 8716944, upload-time = "2025-12-10T22:55:34.922Z" }, - { url = "https://files.pythonhosted.org/packages/00/f9/7638f5cc82ec8a7aa005de48622eecc3ed7c9854b96ba15bd76b7fd27574/matplotlib-3.10.8-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:24d50994d8c5816ddc35411e50a86ab05f575e2530c02752e02538122613371f", size = 9550099, upload-time = "2025-12-10T22:55:36.789Z" }, - { url = "https://files.pythonhosted.org/packages/57/61/78cd5920d35b29fd2a0fe894de8adf672ff52939d2e9b43cb83cd5ce1bc7/matplotlib-3.10.8-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:99eefd13c0dc3b3c1b4d561c1169e65fe47aab7b8158754d7c084088e2329466", size = 9613040, upload-time = "2025-12-10T22:55:38.715Z" }, - { url = "https://files.pythonhosted.org/packages/30/4e/c10f171b6e2f44d9e3a2b96efa38b1677439d79c99357600a62cc1e9594e/matplotlib-3.10.8-cp312-cp312-win_amd64.whl", hash = "sha256:dd80ecb295460a5d9d260df63c43f4afbdd832d725a531f008dad1664f458adf", size = 8142717, upload-time = "2025-12-10T22:55:41.103Z" }, - { url = "https://files.pythonhosted.org/packages/f1/76/934db220026b5fef85f45d51a738b91dea7d70207581063cd9bd8fafcf74/matplotlib-3.10.8-cp312-cp312-win_arm64.whl", hash = "sha256:3c624e43ed56313651bc18a47f838b60d7b8032ed348911c54906b130b20071b", size = 8012751, upload-time = "2025-12-10T22:55:42.684Z" }, - { url = "https://files.pythonhosted.org/packages/3d/b9/15fd5541ef4f5b9a17eefd379356cf12175fe577424e7b1d80676516031a/matplotlib-3.10.8-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:3f2e409836d7f5ac2f1c013110a4d50b9f7edc26328c108915f9075d7d7a91b6", size = 8261076, upload-time = "2025-12-10T22:55:44.648Z" }, - { url = "https://files.pythonhosted.org/packages/8d/a0/2ba3473c1b66b9c74dc7107c67e9008cb1782edbe896d4c899d39ae9cf78/matplotlib-3.10.8-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:56271f3dac49a88d7fca5060f004d9d22b865f743a12a23b1e937a0be4818ee1", size = 8148794, upload-time = "2025-12-10T22:55:46.252Z" }, - { url = "https://files.pythonhosted.org/packages/75/97/a471f1c3eb1fd6f6c24a31a5858f443891d5127e63a7788678d14e249aea/matplotlib-3.10.8-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:a0a7f52498f72f13d4a25ea70f35f4cb60642b466cbb0a9be951b5bc3f45a486", size = 8718474, upload-time = "2025-12-10T22:55:47.864Z" }, - { url = "https://files.pythonhosted.org/packages/01/be/cd478f4b66f48256f42927d0acbcd63a26a893136456cd079c0cc24fbabf/matplotlib-3.10.8-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:646d95230efb9ca614a7a594d4fcacde0ac61d25e37dd51710b36477594963ce", size = 9549637, upload-time = "2025-12-10T22:55:50.048Z" }, - { url = "https://files.pythonhosted.org/packages/5d/7c/8dc289776eae5109e268c4fb92baf870678dc048a25d4ac903683b86d5bf/matplotlib-3.10.8-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f89c151aab2e2e23cb3fe0acad1e8b82841fd265379c4cecd0f3fcb34c15e0f6", size = 9613678, upload-time = "2025-12-10T22:55:52.21Z" }, - { url = "https://files.pythonhosted.org/packages/64/40/37612487cc8a437d4dd261b32ca21fe2d79510fe74af74e1f42becb1bdb8/matplotlib-3.10.8-cp313-cp313-win_amd64.whl", hash = "sha256:e8ea3e2d4066083e264e75c829078f9e149fa119d27e19acd503de65e0b13149", size = 8142686, upload-time = "2025-12-10T22:55:54.253Z" }, - { url = "https://files.pythonhosted.org/packages/66/52/8d8a8730e968185514680c2a6625943f70269509c3dcfc0dcf7d75928cb8/matplotlib-3.10.8-cp313-cp313-win_arm64.whl", hash = "sha256:c108a1d6fa78a50646029cb6d49808ff0fc1330fda87fa6f6250c6b5369b6645", size = 8012917, upload-time = "2025-12-10T22:55:56.268Z" }, - { url = "https://files.pythonhosted.org/packages/b5/27/51fe26e1062f298af5ef66343d8ef460e090a27fea73036c76c35821df04/matplotlib-3.10.8-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:ad3d9833a64cf48cc4300f2b406c3d0f4f4724a91c0bd5640678a6ba7c102077", size = 8305679, upload-time = "2025-12-10T22:55:57.856Z" }, - { url = "https://files.pythonhosted.org/packages/2c/1e/4de865bc591ac8e3062e835f42dd7fe7a93168d519557837f0e37513f629/matplotlib-3.10.8-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:eb3823f11823deade26ce3b9f40dcb4a213da7a670013929f31d5f5ed1055b22", size = 8198336, upload-time = "2025-12-10T22:55:59.371Z" }, - { url = "https://files.pythonhosted.org/packages/c6/cb/2f7b6e75fb4dce87ef91f60cac4f6e34f4c145ab036a22318ec837971300/matplotlib-3.10.8-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d9050fee89a89ed57b4fb2c1bfac9a3d0c57a0d55aed95949eedbc42070fea39", size = 8731653, upload-time = "2025-12-10T22:56:01.032Z" }, - { url = "https://files.pythonhosted.org/packages/46/b3/bd9c57d6ba670a37ab31fb87ec3e8691b947134b201f881665b28cc039ff/matplotlib-3.10.8-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b44d07310e404ba95f8c25aa5536f154c0a8ec473303535949e52eb71d0a1565", size = 9561356, upload-time = "2025-12-10T22:56:02.95Z" }, - { url = "https://files.pythonhosted.org/packages/c0/3d/8b94a481456dfc9dfe6e39e93b5ab376e50998cddfd23f4ae3b431708f16/matplotlib-3.10.8-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:0a33deb84c15ede243aead39f77e990469fff93ad1521163305095b77b72ce4a", size = 9614000, upload-time = "2025-12-10T22:56:05.411Z" }, - { url = "https://files.pythonhosted.org/packages/bd/cd/bc06149fe5585ba800b189a6a654a75f1f127e8aab02fd2be10df7fa500c/matplotlib-3.10.8-cp313-cp313t-win_amd64.whl", hash = "sha256:3a48a78d2786784cc2413e57397981fb45c79e968d99656706018d6e62e57958", size = 8220043, upload-time = "2025-12-10T22:56:07.551Z" }, - { url = "https://files.pythonhosted.org/packages/e3/de/b22cf255abec916562cc04eef457c13e58a1990048de0c0c3604d082355e/matplotlib-3.10.8-cp313-cp313t-win_arm64.whl", hash = "sha256:15d30132718972c2c074cd14638c7f4592bd98719e2308bccea40e0538bc0cb5", size = 8062075, upload-time = "2025-12-10T22:56:09.178Z" }, - { url = "https://files.pythonhosted.org/packages/3c/43/9c0ff7a2f11615e516c3b058e1e6e8f9614ddeca53faca06da267c48345d/matplotlib-3.10.8-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:b53285e65d4fa4c86399979e956235deb900be5baa7fc1218ea67fbfaeaadd6f", size = 8262481, upload-time = "2025-12-10T22:56:10.885Z" }, - { url = "https://files.pythonhosted.org/packages/6f/ca/e8ae28649fcdf039fda5ef554b40a95f50592a3c47e6f7270c9561c12b07/matplotlib-3.10.8-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:32f8dce744be5569bebe789e46727946041199030db8aeb2954d26013a0eb26b", size = 8151473, upload-time = "2025-12-10T22:56:12.377Z" }, - { url = "https://files.pythonhosted.org/packages/f1/6f/009d129ae70b75e88cbe7e503a12a4c0670e08ed748a902c2568909e9eb5/matplotlib-3.10.8-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4cf267add95b1c88300d96ca837833d4112756045364f5c734a2276038dae27d", size = 9553896, upload-time = "2025-12-10T22:56:14.432Z" }, - { url = "https://files.pythonhosted.org/packages/f5/26/4221a741eb97967bc1fd5e4c52b9aa5a91b2f4ec05b59f6def4d820f9df9/matplotlib-3.10.8-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2cf5bd12cecf46908f286d7838b2abc6c91cda506c0445b8223a7c19a00df008", size = 9824193, upload-time = "2025-12-10T22:56:16.29Z" }, - { url = "https://files.pythonhosted.org/packages/1f/f3/3abf75f38605772cf48a9daf5821cd4f563472f38b4b828c6fba6fa6d06e/matplotlib-3.10.8-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:41703cc95688f2516b480f7f339d8851a6035f18e100ee6a32bc0b8536a12a9c", size = 9615444, upload-time = "2025-12-10T22:56:18.155Z" }, - { url = "https://files.pythonhosted.org/packages/93/a5/de89ac80f10b8dc615807ee1133cd99ac74082581196d4d9590bea10690d/matplotlib-3.10.8-cp314-cp314-win_amd64.whl", hash = "sha256:83d282364ea9f3e52363da262ce32a09dfe241e4080dcedda3c0db059d3c1f11", size = 8272719, upload-time = "2025-12-10T22:56:20.366Z" }, - { url = "https://files.pythonhosted.org/packages/69/ce/b006495c19ccc0a137b48083168a37bd056392dee02f87dba0472f2797fe/matplotlib-3.10.8-cp314-cp314-win_arm64.whl", hash = "sha256:2c1998e92cd5999e295a731bcb2911c75f597d937341f3030cc24ef2733d78a8", size = 8144205, upload-time = "2025-12-10T22:56:22.239Z" }, - { url = "https://files.pythonhosted.org/packages/68/d9/b31116a3a855bd313c6fcdb7226926d59b041f26061c6c5b1be66a08c826/matplotlib-3.10.8-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:b5a2b97dbdc7d4f353ebf343744f1d1f1cca8aa8bfddb4262fcf4306c3761d50", size = 8305785, upload-time = "2025-12-10T22:56:24.218Z" }, - { url = "https://files.pythonhosted.org/packages/1e/90/6effe8103f0272685767ba5f094f453784057072f49b393e3ea178fe70a5/matplotlib-3.10.8-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:3f5c3e4da343bba819f0234186b9004faba952cc420fbc522dc4e103c1985908", size = 8198361, upload-time = "2025-12-10T22:56:26.787Z" }, - { url = "https://files.pythonhosted.org/packages/d7/65/a73188711bea603615fc0baecca1061429ac16940e2385433cc778a9d8e7/matplotlib-3.10.8-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5f62550b9a30afde8c1c3ae450e5eb547d579dd69b25c2fc7a1c67f934c1717a", size = 9561357, upload-time = "2025-12-10T22:56:28.953Z" }, - { url = "https://files.pythonhosted.org/packages/f4/3d/b5c5d5d5be8ce63292567f0e2c43dde9953d3ed86ac2de0a72e93c8f07a1/matplotlib-3.10.8-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:495672de149445ec1b772ff2c9ede9b769e3cb4f0d0aa7fa730d7f59e2d4e1c1", size = 9823610, upload-time = "2025-12-10T22:56:31.455Z" }, - { url = "https://files.pythonhosted.org/packages/4d/4b/e7beb6bbd49f6bae727a12b270a2654d13c397576d25bd6786e47033300f/matplotlib-3.10.8-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:595ba4d8fe983b88f0eec8c26a241e16d6376fe1979086232f481f8f3f67494c", size = 9614011, upload-time = "2025-12-10T22:56:33.85Z" }, - { url = "https://files.pythonhosted.org/packages/7c/e6/76f2813d31f032e65f6f797e3f2f6e4aab95b65015924b1c51370395c28a/matplotlib-3.10.8-cp314-cp314t-win_amd64.whl", hash = "sha256:25d380fe8b1dc32cf8f0b1b448470a77afb195438bafdf1d858bfb876f3edf7b", size = 8362801, upload-time = "2025-12-10T22:56:36.107Z" }, - { url = "https://files.pythonhosted.org/packages/5d/49/d651878698a0b67f23aa28e17f45a6d6dd3d3f933fa29087fa4ce5947b5a/matplotlib-3.10.8-cp314-cp314t-win_arm64.whl", hash = "sha256:113bb52413ea508ce954a02c10ffd0d565f9c3bc7f2eddc27dfe1731e71c7b5f", size = 8192560, upload-time = "2025-12-10T22:56:38.008Z" }, -] - -[[package]] -name = "more-itertools" -version = "10.8.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ea/5d/38b681d3fce7a266dd9ab73c66959406d565b3e85f21d5e66e1181d93721/more_itertools-10.8.0.tar.gz", hash = "sha256:f638ddf8a1a0d134181275fb5d58b086ead7c6a72429ad725c67503f13ba30bd", size = 137431, upload-time = "2025-09-02T15:23:11.018Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a4/8e/469e5a4a2f5855992e425f3cb33804cc07bf18d48f2db061aec61ce50270/more_itertools-10.8.0-py3-none-any.whl", hash = "sha256:52d4362373dcf7c52546bc4af9a86ee7c4579df9a8dc268be0a2f949d376cc9b", size = 69667, upload-time = "2025-09-02T15:23:09.635Z" }, -] - -[[package]] -name = "multidict" -version = "6.7.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/1a/c2/c2d94cbe6ac1753f3fc980da97b3d930efe1da3af3c9f5125354436c073d/multidict-6.7.1.tar.gz", hash = "sha256:ec6652a1bee61c53a3e5776b6049172c53b6aaba34f18c9ad04f82712bac623d", size = 102010, upload-time = "2026-01-26T02:46:45.979Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/8d/9c/f20e0e2cf80e4b2e4b1c365bf5fe104ee633c751a724246262db8f1a0b13/multidict-6.7.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:a90f75c956e32891a4eda3639ce6dd86e87105271f43d43442a3aedf3cddf172", size = 76893, upload-time = "2026-01-26T02:43:52.754Z" }, - { url = "https://files.pythonhosted.org/packages/fe/cf/18ef143a81610136d3da8193da9d80bfe1cb548a1e2d1c775f26b23d024a/multidict-6.7.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3fccb473e87eaa1382689053e4a4618e7ba7b9b9b8d6adf2027ee474597128cd", size = 45456, upload-time = "2026-01-26T02:43:53.893Z" }, - { url = "https://files.pythonhosted.org/packages/a9/65/1caac9d4cd32e8433908683446eebc953e82d22b03d10d41a5f0fefe991b/multidict-6.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b0fa96985700739c4c7853a43c0b3e169360d6855780021bfc6d0f1ce7c123e7", size = 43872, upload-time = "2026-01-26T02:43:55.041Z" }, - { url = "https://files.pythonhosted.org/packages/cf/3b/d6bd75dc4f3ff7c73766e04e705b00ed6dbbaccf670d9e05a12b006f5a21/multidict-6.7.1-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:cb2a55f408c3043e42b40cc8eecd575afa27b7e0b956dfb190de0f8499a57a53", size = 251018, upload-time = "2026-01-26T02:43:56.198Z" }, - { url = "https://files.pythonhosted.org/packages/fd/80/c959c5933adedb9ac15152e4067c702a808ea183a8b64cf8f31af8ad3155/multidict-6.7.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eb0ce7b2a32d09892b3dd6cc44877a0d02a33241fafca5f25c8b6b62374f8b75", size = 258883, upload-time = "2026-01-26T02:43:57.499Z" }, - { url = "https://files.pythonhosted.org/packages/86/85/7ed40adafea3d4f1c8b916e3b5cc3a8e07dfcdcb9cd72800f4ed3ca1b387/multidict-6.7.1-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c3a32d23520ee37bf327d1e1a656fec76a2edd5c038bf43eddfa0572ec49c60b", size = 242413, upload-time = "2026-01-26T02:43:58.755Z" }, - { url = "https://files.pythonhosted.org/packages/d2/57/b8565ff533e48595503c785f8361ff9a4fde4d67de25c207cd0ba3befd03/multidict-6.7.1-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:9c90fed18bffc0189ba814749fdcc102b536e83a9f738a9003e569acd540a733", size = 268404, upload-time = "2026-01-26T02:44:00.216Z" }, - { url = "https://files.pythonhosted.org/packages/e0/50/9810c5c29350f7258180dfdcb2e52783a0632862eb334c4896ac717cebcb/multidict-6.7.1-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:da62917e6076f512daccfbbde27f46fed1c98fee202f0559adec8ee0de67f71a", size = 269456, upload-time = "2026-01-26T02:44:02.202Z" }, - { url = "https://files.pythonhosted.org/packages/f3/8d/5e5be3ced1d12966fefb5c4ea3b2a5b480afcea36406559442c6e31d4a48/multidict-6.7.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bfde23ef6ed9db7eaee6c37dcec08524cb43903c60b285b172b6c094711b3961", size = 256322, upload-time = "2026-01-26T02:44:03.56Z" }, - { url = "https://files.pythonhosted.org/packages/31/6e/d8a26d81ac166a5592782d208dd90dfdc0a7a218adaa52b45a672b46c122/multidict-6.7.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3758692429e4e32f1ba0df23219cd0b4fc0a52f476726fff9337d1a57676a582", size = 253955, upload-time = "2026-01-26T02:44:04.845Z" }, - { url = "https://files.pythonhosted.org/packages/59/4c/7c672c8aad41534ba619bcd4ade7a0dc87ed6b8b5c06149b85d3dd03f0cd/multidict-6.7.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:398c1478926eca669f2fd6a5856b6de9c0acf23a2cb59a14c0ba5844fa38077e", size = 251254, upload-time = "2026-01-26T02:44:06.133Z" }, - { url = "https://files.pythonhosted.org/packages/7b/bd/84c24de512cbafbdbc39439f74e967f19570ce7924e3007174a29c348916/multidict-6.7.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c102791b1c4f3ab36ce4101154549105a53dc828f016356b3e3bcae2e3a039d3", size = 252059, upload-time = "2026-01-26T02:44:07.518Z" }, - { url = "https://files.pythonhosted.org/packages/fa/ba/f5449385510825b73d01c2d4087bf6d2fccc20a2d42ac34df93191d3dd03/multidict-6.7.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:a088b62bd733e2ad12c50dad01b7d0166c30287c166e137433d3b410add807a6", size = 263588, upload-time = "2026-01-26T02:44:09.382Z" }, - { url = "https://files.pythonhosted.org/packages/d7/11/afc7c677f68f75c84a69fe37184f0f82fce13ce4b92f49f3db280b7e92b3/multidict-6.7.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:3d51ff4785d58d3f6c91bdbffcb5e1f7ddfda557727043aa20d20ec4f65e324a", size = 259642, upload-time = "2026-01-26T02:44:10.73Z" }, - { url = "https://files.pythonhosted.org/packages/2b/17/ebb9644da78c4ab36403739e0e6e0e30ebb135b9caf3440825001a0bddcb/multidict-6.7.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fc5907494fccf3e7d3f94f95c91d6336b092b5fc83811720fae5e2765890dfba", size = 251377, upload-time = "2026-01-26T02:44:12.042Z" }, - { url = "https://files.pythonhosted.org/packages/ca/a4/840f5b97339e27846c46307f2530a2805d9d537d8b8bd416af031cad7fa0/multidict-6.7.1-cp312-cp312-win32.whl", hash = "sha256:28ca5ce2fd9716631133d0e9a9b9a745ad7f60bac2bccafb56aa380fc0b6c511", size = 41887, upload-time = "2026-01-26T02:44:14.245Z" }, - { url = "https://files.pythonhosted.org/packages/80/31/0b2517913687895f5904325c2069d6a3b78f66cc641a86a2baf75a05dcbb/multidict-6.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:fcee94dfbd638784645b066074b338bc9cc155d4b4bffa4adce1615c5a426c19", size = 46053, upload-time = "2026-01-26T02:44:15.371Z" }, - { url = "https://files.pythonhosted.org/packages/0c/5b/aba28e4ee4006ae4c7df8d327d31025d760ffa992ea23812a601d226e682/multidict-6.7.1-cp312-cp312-win_arm64.whl", hash = "sha256:ba0a9fb644d0c1a2194cf7ffb043bd852cea63a57f66fbd33959f7dae18517bf", size = 43307, upload-time = "2026-01-26T02:44:16.852Z" }, - { url = "https://files.pythonhosted.org/packages/f2/22/929c141d6c0dba87d3e1d38fbdf1ba8baba86b7776469f2bc2d3227a1e67/multidict-6.7.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:2b41f5fed0ed563624f1c17630cb9941cf2309d4df00e494b551b5f3e3d67a23", size = 76174, upload-time = "2026-01-26T02:44:18.509Z" }, - { url = "https://files.pythonhosted.org/packages/c7/75/bc704ae15fee974f8fccd871305e254754167dce5f9e42d88a2def741a1d/multidict-6.7.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:84e61e3af5463c19b67ced91f6c634effb89ef8bfc5ca0267f954451ed4bb6a2", size = 45116, upload-time = "2026-01-26T02:44:19.745Z" }, - { url = "https://files.pythonhosted.org/packages/79/76/55cd7186f498ed080a18440c9013011eb548f77ae1b297206d030eb1180a/multidict-6.7.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:935434b9853c7c112eee7ac891bc4cb86455aa631269ae35442cb316790c1445", size = 43524, upload-time = "2026-01-26T02:44:21.571Z" }, - { url = "https://files.pythonhosted.org/packages/e9/3c/414842ef8d5a1628d68edee29ba0e5bcf235dbfb3ccd3ea303a7fe8c72ff/multidict-6.7.1-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:432feb25a1cb67fe82a9680b4d65fb542e4635cb3166cd9c01560651ad60f177", size = 249368, upload-time = "2026-01-26T02:44:22.803Z" }, - { url = "https://files.pythonhosted.org/packages/f6/32/befed7f74c458b4a525e60519fe8d87eef72bb1e99924fa2b0f9d97a221e/multidict-6.7.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e82d14e3c948952a1a85503817e038cba5905a3352de76b9a465075d072fba23", size = 256952, upload-time = "2026-01-26T02:44:24.306Z" }, - { url = "https://files.pythonhosted.org/packages/03/d6/c878a44ba877f366630c860fdf74bfb203c33778f12b6ac274936853c451/multidict-6.7.1-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:4cfb48c6ea66c83bcaaf7e4dfa7ec1b6bbcf751b7db85a328902796dfde4c060", size = 240317, upload-time = "2026-01-26T02:44:25.772Z" }, - { url = "https://files.pythonhosted.org/packages/68/49/57421b4d7ad2e9e60e25922b08ceb37e077b90444bde6ead629095327a6f/multidict-6.7.1-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:1d540e51b7e8e170174555edecddbd5538105443754539193e3e1061864d444d", size = 267132, upload-time = "2026-01-26T02:44:27.648Z" }, - { url = "https://files.pythonhosted.org/packages/b7/fe/ec0edd52ddbcea2a2e89e174f0206444a61440b40f39704e64dc807a70bd/multidict-6.7.1-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:273d23f4b40f3dce4d6c8a821c741a86dec62cded82e1175ba3d99be128147ed", size = 268140, upload-time = "2026-01-26T02:44:29.588Z" }, - { url = "https://files.pythonhosted.org/packages/b0/73/6e1b01cbeb458807aa0831742232dbdd1fa92bfa33f52a3f176b4ff3dc11/multidict-6.7.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d624335fd4fa1c08a53f8b4be7676ebde19cd092b3895c421045ca87895b429", size = 254277, upload-time = "2026-01-26T02:44:30.902Z" }, - { url = "https://files.pythonhosted.org/packages/6a/b2/5fb8c124d7561a4974c342bc8c778b471ebbeb3cc17df696f034a7e9afe7/multidict-6.7.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:12fad252f8b267cc75b66e8fc51b3079604e8d43a75428ffe193cd9e2195dfd6", size = 252291, upload-time = "2026-01-26T02:44:32.31Z" }, - { url = "https://files.pythonhosted.org/packages/5a/96/51d4e4e06bcce92577fcd488e22600bd38e4fd59c20cb49434d054903bd2/multidict-6.7.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:03ede2a6ffbe8ef936b92cb4529f27f42be7f56afcdab5ab739cd5f27fb1cbf9", size = 250156, upload-time = "2026-01-26T02:44:33.734Z" }, - { url = "https://files.pythonhosted.org/packages/db/6b/420e173eec5fba721a50e2a9f89eda89d9c98fded1124f8d5c675f7a0c0f/multidict-6.7.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:90efbcf47dbe33dcf643a1e400d67d59abeac5db07dc3f27d6bdeae497a2198c", size = 249742, upload-time = "2026-01-26T02:44:35.222Z" }, - { url = "https://files.pythonhosted.org/packages/44/a3/ec5b5bd98f306bc2aa297b8c6f11a46714a56b1e6ef5ebda50a4f5d7c5fb/multidict-6.7.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:5c4b9bfc148f5a91be9244d6264c53035c8a0dcd2f51f1c3c6e30e30ebaa1c84", size = 262221, upload-time = "2026-01-26T02:44:36.604Z" }, - { url = "https://files.pythonhosted.org/packages/cd/f7/e8c0d0da0cd1e28d10e624604e1a36bcc3353aaebdfdc3a43c72bc683a12/multidict-6.7.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:401c5a650f3add2472d1d288c26deebc540f99e2fb83e9525007a74cd2116f1d", size = 258664, upload-time = "2026-01-26T02:44:38.008Z" }, - { url = "https://files.pythonhosted.org/packages/52/da/151a44e8016dd33feed44f730bd856a66257c1ee7aed4f44b649fb7edeb3/multidict-6.7.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:97891f3b1b3ffbded884e2916cacf3c6fc87b66bb0dde46f7357404750559f33", size = 249490, upload-time = "2026-01-26T02:44:39.386Z" }, - { url = "https://files.pythonhosted.org/packages/87/af/a3b86bf9630b732897f6fc3f4c4714b90aa4361983ccbdcd6c0339b21b0c/multidict-6.7.1-cp313-cp313-win32.whl", hash = "sha256:e1c5988359516095535c4301af38d8a8838534158f649c05dd1050222321bcb3", size = 41695, upload-time = "2026-01-26T02:44:41.318Z" }, - { url = "https://files.pythonhosted.org/packages/b2/35/e994121b0e90e46134673422dd564623f93304614f5d11886b1b3e06f503/multidict-6.7.1-cp313-cp313-win_amd64.whl", hash = "sha256:960c83bf01a95b12b08fd54324a4eb1d5b52c88932b5cba5d6e712bb3ed12eb5", size = 45884, upload-time = "2026-01-26T02:44:42.488Z" }, - { url = "https://files.pythonhosted.org/packages/ca/61/42d3e5dbf661242a69c97ea363f2d7b46c567da8eadef8890022be6e2ab0/multidict-6.7.1-cp313-cp313-win_arm64.whl", hash = "sha256:563fe25c678aaba333d5399408f5ec3c383ca5b663e7f774dd179a520b8144df", size = 43122, upload-time = "2026-01-26T02:44:43.664Z" }, - { url = "https://files.pythonhosted.org/packages/6d/b3/e6b21c6c4f314bb956016b0b3ef2162590a529b84cb831c257519e7fde44/multidict-6.7.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:c76c4bec1538375dad9d452d246ca5368ad6e1c9039dadcf007ae59c70619ea1", size = 83175, upload-time = "2026-01-26T02:44:44.894Z" }, - { url = "https://files.pythonhosted.org/packages/fb/76/23ecd2abfe0957b234f6c960f4ade497f55f2c16aeb684d4ecdbf1c95791/multidict-6.7.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:57b46b24b5d5ebcc978da4ec23a819a9402b4228b8a90d9c656422b4bdd8a963", size = 48460, upload-time = "2026-01-26T02:44:46.106Z" }, - { url = "https://files.pythonhosted.org/packages/c4/57/a0ed92b23f3a042c36bc4227b72b97eca803f5f1801c1ab77c8a212d455e/multidict-6.7.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e954b24433c768ce78ab7929e84ccf3422e46deb45a4dc9f93438f8217fa2d34", size = 46930, upload-time = "2026-01-26T02:44:47.278Z" }, - { url = "https://files.pythonhosted.org/packages/b5/66/02ec7ace29162e447f6382c495dc95826bf931d3818799bbef11e8f7df1a/multidict-6.7.1-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3bd231490fa7217cc832528e1cd8752a96f0125ddd2b5749390f7c3ec8721b65", size = 242582, upload-time = "2026-01-26T02:44:48.604Z" }, - { url = "https://files.pythonhosted.org/packages/58/18/64f5a795e7677670e872673aca234162514696274597b3708b2c0d276cce/multidict-6.7.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:253282d70d67885a15c8a7716f3a73edf2d635793ceda8173b9ecc21f2fb8292", size = 250031, upload-time = "2026-01-26T02:44:50.544Z" }, - { url = "https://files.pythonhosted.org/packages/c8/ed/e192291dbbe51a8290c5686f482084d31bcd9d09af24f63358c3d42fd284/multidict-6.7.1-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0b4c48648d7649c9335cf1927a8b87fa692de3dcb15faa676c6a6f1f1aabda43", size = 228596, upload-time = "2026-01-26T02:44:51.951Z" }, - { url = "https://files.pythonhosted.org/packages/1e/7e/3562a15a60cf747397e7f2180b0a11dc0c38d9175a650e75fa1b4d325e15/multidict-6.7.1-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:98bc624954ec4d2c7cb074b8eefc2b5d0ce7d482e410df446414355d158fe4ca", size = 257492, upload-time = "2026-01-26T02:44:53.902Z" }, - { url = "https://files.pythonhosted.org/packages/24/02/7d0f9eae92b5249bb50ac1595b295f10e263dd0078ebb55115c31e0eaccd/multidict-6.7.1-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:1b99af4d9eec0b49927b4402bcbb58dea89d3e0db8806a4086117019939ad3dd", size = 255899, upload-time = "2026-01-26T02:44:55.316Z" }, - { url = "https://files.pythonhosted.org/packages/00/e3/9b60ed9e23e64c73a5cde95269ef1330678e9c6e34dd4eb6b431b85b5a10/multidict-6.7.1-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6aac4f16b472d5b7dc6f66a0d49dd57b0e0902090be16594dc9ebfd3d17c47e7", size = 247970, upload-time = "2026-01-26T02:44:56.783Z" }, - { url = "https://files.pythonhosted.org/packages/3e/06/538e58a63ed5cfb0bd4517e346b91da32fde409d839720f664e9a4ae4f9d/multidict-6.7.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:21f830fe223215dffd51f538e78c172ed7c7f60c9b96a2bf05c4848ad49921c3", size = 245060, upload-time = "2026-01-26T02:44:58.195Z" }, - { url = "https://files.pythonhosted.org/packages/b2/2f/d743a3045a97c895d401e9bd29aaa09b94f5cbdf1bd561609e5a6c431c70/multidict-6.7.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:f5dd81c45b05518b9aa4da4aa74e1c93d715efa234fd3e8a179df611cc85e5f4", size = 235888, upload-time = "2026-01-26T02:44:59.57Z" }, - { url = "https://files.pythonhosted.org/packages/38/83/5a325cac191ab28b63c52f14f1131f3b0a55ba3b9aa65a6d0bf2a9b921a0/multidict-6.7.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:eb304767bca2bb92fb9c5bd33cedc95baee5bb5f6c88e63706533a1c06ad08c8", size = 243554, upload-time = "2026-01-26T02:45:01.054Z" }, - { url = "https://files.pythonhosted.org/packages/20/1f/9d2327086bd15da2725ef6aae624208e2ef828ed99892b17f60c344e57ed/multidict-6.7.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:c9035dde0f916702850ef66460bc4239d89d08df4d02023a5926e7446724212c", size = 252341, upload-time = "2026-01-26T02:45:02.484Z" }, - { url = "https://files.pythonhosted.org/packages/e8/2c/2a1aa0280cf579d0f6eed8ee5211c4f1730bd7e06c636ba2ee6aafda302e/multidict-6.7.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:af959b9beeb66c822380f222f0e0a1889331597e81f1ded7f374f3ecb0fd6c52", size = 246391, upload-time = "2026-01-26T02:45:03.862Z" }, - { url = "https://files.pythonhosted.org/packages/e5/03/7ca022ffc36c5a3f6e03b179a5ceb829be9da5783e6fe395f347c0794680/multidict-6.7.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:41f2952231456154ee479651491e94118229844dd7226541788be783be2b5108", size = 243422, upload-time = "2026-01-26T02:45:05.296Z" }, - { url = "https://files.pythonhosted.org/packages/dc/1d/b31650eab6c5778aceed46ba735bd97f7c7d2f54b319fa916c0f96e7805b/multidict-6.7.1-cp313-cp313t-win32.whl", hash = "sha256:df9f19c28adcb40b6aae30bbaa1478c389efd50c28d541d76760199fc1037c32", size = 47770, upload-time = "2026-01-26T02:45:06.754Z" }, - { url = "https://files.pythonhosted.org/packages/ac/5b/2d2d1d522e51285bd61b1e20df8f47ae1a9d80839db0b24ea783b3832832/multidict-6.7.1-cp313-cp313t-win_amd64.whl", hash = "sha256:d54ecf9f301853f2c5e802da559604b3e95bb7a3b01a9c295c6ee591b9882de8", size = 53109, upload-time = "2026-01-26T02:45:08.044Z" }, - { url = "https://files.pythonhosted.org/packages/3d/a3/cc409ba012c83ca024a308516703cf339bdc4b696195644a7215a5164a24/multidict-6.7.1-cp313-cp313t-win_arm64.whl", hash = "sha256:5a37ca18e360377cfda1d62f5f382ff41f2b8c4ccb329ed974cc2e1643440118", size = 45573, upload-time = "2026-01-26T02:45:09.349Z" }, - { url = "https://files.pythonhosted.org/packages/91/cc/db74228a8be41884a567e88a62fd589a913708fcf180d029898c17a9a371/multidict-6.7.1-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:8f333ec9c5eb1b7105e3b84b53141e66ca05a19a605368c55450b6ba208cb9ee", size = 75190, upload-time = "2026-01-26T02:45:10.651Z" }, - { url = "https://files.pythonhosted.org/packages/d5/22/492f2246bb5b534abd44804292e81eeaf835388901f0c574bac4eeec73c5/multidict-6.7.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:a407f13c188f804c759fc6a9f88286a565c242a76b27626594c133b82883b5c2", size = 44486, upload-time = "2026-01-26T02:45:11.938Z" }, - { url = "https://files.pythonhosted.org/packages/f1/4f/733c48f270565d78b4544f2baddc2fb2a245e5a8640254b12c36ac7ac68e/multidict-6.7.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:0e161ddf326db5577c3a4cc2d8648f81456e8a20d40415541587a71620d7a7d1", size = 43219, upload-time = "2026-01-26T02:45:14.346Z" }, - { url = "https://files.pythonhosted.org/packages/24/bb/2c0c2287963f4259c85e8bcbba9182ced8d7fca65c780c38e99e61629d11/multidict-6.7.1-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:1e3a8bb24342a8201d178c3b4984c26ba81a577c80d4d525727427460a50c22d", size = 245132, upload-time = "2026-01-26T02:45:15.712Z" }, - { url = "https://files.pythonhosted.org/packages/a7/f9/44d4b3064c65079d2467888794dea218d1601898ac50222ab8a9a8094460/multidict-6.7.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:97231140a50f5d447d3164f994b86a0bed7cd016e2682f8650d6a9158e14fd31", size = 252420, upload-time = "2026-01-26T02:45:17.293Z" }, - { url = "https://files.pythonhosted.org/packages/8b/13/78f7275e73fa17b24c9a51b0bd9d73ba64bb32d0ed51b02a746eb876abe7/multidict-6.7.1-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6b10359683bd8806a200fd2909e7c8ca3a7b24ec1d8132e483d58e791d881048", size = 233510, upload-time = "2026-01-26T02:45:19.356Z" }, - { url = "https://files.pythonhosted.org/packages/4b/25/8167187f62ae3cbd52da7893f58cb036b47ea3fb67138787c76800158982/multidict-6.7.1-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:283ddac99f7ac25a4acadbf004cb5ae34480bbeb063520f70ce397b281859362", size = 264094, upload-time = "2026-01-26T02:45:20.834Z" }, - { url = "https://files.pythonhosted.org/packages/a1/e7/69a3a83b7b030cf283fb06ce074a05a02322359783424d7edf0f15fe5022/multidict-6.7.1-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:538cec1e18c067d0e6103aa9a74f9e832904c957adc260e61cd9d8cf0c3b3d37", size = 260786, upload-time = "2026-01-26T02:45:22.818Z" }, - { url = "https://files.pythonhosted.org/packages/fe/3b/8ec5074bcfc450fe84273713b4b0a0dd47c0249358f5d82eb8104ffe2520/multidict-6.7.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7eee46ccb30ff48a1e35bb818cc90846c6be2b68240e42a78599166722cea709", size = 248483, upload-time = "2026-01-26T02:45:24.368Z" }, - { url = "https://files.pythonhosted.org/packages/48/5a/d5a99e3acbca0e29c5d9cba8f92ceb15dce78bab963b308ae692981e3a5d/multidict-6.7.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:fa263a02f4f2dd2d11a7b1bb4362aa7cb1049f84a9235d31adf63f30143469a0", size = 248403, upload-time = "2026-01-26T02:45:25.982Z" }, - { url = "https://files.pythonhosted.org/packages/35/48/e58cd31f6c7d5102f2a4bf89f96b9cf7e00b6c6f3d04ecc44417c00a5a3c/multidict-6.7.1-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:2e1425e2f99ec5bd36c15a01b690a1a2456209c5deed58f95469ffb46039ccbb", size = 240315, upload-time = "2026-01-26T02:45:27.487Z" }, - { url = "https://files.pythonhosted.org/packages/94/33/1cd210229559cb90b6786c30676bb0c58249ff42f942765f88793b41fdce/multidict-6.7.1-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:497394b3239fc6f0e13a78a3e1b61296e72bf1c5f94b4c4eb80b265c37a131cd", size = 245528, upload-time = "2026-01-26T02:45:28.991Z" }, - { url = "https://files.pythonhosted.org/packages/64/f2/6e1107d226278c876c783056b7db43d800bb64c6131cec9c8dfb6903698e/multidict-6.7.1-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:233b398c29d3f1b9676b4b6f75c518a06fcb2ea0b925119fb2c1bc35c05e1601", size = 258784, upload-time = "2026-01-26T02:45:30.503Z" }, - { url = "https://files.pythonhosted.org/packages/4d/c1/11f664f14d525e4a1b5327a82d4de61a1db604ab34c6603bb3c2cc63ad34/multidict-6.7.1-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:93b1818e4a6e0930454f0f2af7dfce69307ca03cdcfb3739bf4d91241967b6c1", size = 251980, upload-time = "2026-01-26T02:45:32.603Z" }, - { url = "https://files.pythonhosted.org/packages/e1/9f/75a9ac888121d0c5bbd4ecf4eead45668b1766f6baabfb3b7f66a410e231/multidict-6.7.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:f33dc2a3abe9249ea5d8360f969ec7f4142e7ac45ee7014d8f8d5acddf178b7b", size = 243602, upload-time = "2026-01-26T02:45:34.043Z" }, - { url = "https://files.pythonhosted.org/packages/9a/e7/50bf7b004cc8525d80dbbbedfdc7aed3e4c323810890be4413e589074032/multidict-6.7.1-cp314-cp314-win32.whl", hash = "sha256:3ab8b9d8b75aef9df299595d5388b14530839f6422333357af1339443cff777d", size = 40930, upload-time = "2026-01-26T02:45:36.278Z" }, - { url = "https://files.pythonhosted.org/packages/e0/bf/52f25716bbe93745595800f36fb17b73711f14da59ed0bb2eba141bc9f0f/multidict-6.7.1-cp314-cp314-win_amd64.whl", hash = "sha256:5e01429a929600e7dab7b166062d9bb54a5eed752384c7384c968c2afab8f50f", size = 45074, upload-time = "2026-01-26T02:45:37.546Z" }, - { url = "https://files.pythonhosted.org/packages/97/ab/22803b03285fa3a525f48217963da3a65ae40f6a1b6f6cf2768879e208f9/multidict-6.7.1-cp314-cp314-win_arm64.whl", hash = "sha256:4885cb0e817aef5d00a2e8451d4665c1808378dc27c2705f1bf4ef8505c0d2e5", size = 42471, upload-time = "2026-01-26T02:45:38.889Z" }, - { url = "https://files.pythonhosted.org/packages/e0/6d/f9293baa6146ba9507e360ea0292b6422b016907c393e2f63fc40ab7b7b5/multidict-6.7.1-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:0458c978acd8e6ea53c81eefaddbbee9c6c5e591f41b3f5e8e194780fe026581", size = 82401, upload-time = "2026-01-26T02:45:40.254Z" }, - { url = "https://files.pythonhosted.org/packages/7a/68/53b5494738d83558d87c3c71a486504d8373421c3e0dbb6d0db48ad42ee0/multidict-6.7.1-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:c0abd12629b0af3cf590982c0b413b1e7395cd4ec026f30986818ab95bfaa94a", size = 48143, upload-time = "2026-01-26T02:45:41.635Z" }, - { url = "https://files.pythonhosted.org/packages/37/e8/5284c53310dcdc99ce5d66563f6e5773531a9b9fe9ec7a615e9bc306b05f/multidict-6.7.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:14525a5f61d7d0c94b368a42cff4c9a4e7ba2d52e2672a7b23d84dc86fb02b0c", size = 46507, upload-time = "2026-01-26T02:45:42.99Z" }, - { url = "https://files.pythonhosted.org/packages/e4/fc/6800d0e5b3875568b4083ecf5f310dcf91d86d52573160834fb4bfcf5e4f/multidict-6.7.1-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:17307b22c217b4cf05033dabefe68255a534d637c6c9b0cc8382718f87be4262", size = 239358, upload-time = "2026-01-26T02:45:44.376Z" }, - { url = "https://files.pythonhosted.org/packages/41/75/4ad0973179361cdf3a113905e6e088173198349131be2b390f9fa4da5fc6/multidict-6.7.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7a7e590ff876a3eaf1c02a4dfe0724b6e69a9e9de6d8f556816f29c496046e59", size = 246884, upload-time = "2026-01-26T02:45:47.167Z" }, - { url = "https://files.pythonhosted.org/packages/c3/9c/095bb28b5da139bd41fb9a5d5caff412584f377914bd8787c2aa98717130/multidict-6.7.1-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:5fa6a95dfee63893d80a34758cd0e0c118a30b8dcb46372bf75106c591b77889", size = 225878, upload-time = "2026-01-26T02:45:48.698Z" }, - { url = "https://files.pythonhosted.org/packages/07/d0/c0a72000243756e8f5a277b6b514fa005f2c73d481b7d9e47cd4568aa2e4/multidict-6.7.1-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a0543217a6a017692aa6ae5cc39adb75e587af0f3a82288b1492eb73dd6cc2a4", size = 253542, upload-time = "2026-01-26T02:45:50.164Z" }, - { url = "https://files.pythonhosted.org/packages/c0/6b/f69da15289e384ecf2a68837ec8b5ad8c33e973aa18b266f50fe55f24b8c/multidict-6.7.1-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f99fe611c312b3c1c0ace793f92464d8cd263cc3b26b5721950d977b006b6c4d", size = 252403, upload-time = "2026-01-26T02:45:51.779Z" }, - { url = "https://files.pythonhosted.org/packages/a2/76/b9669547afa5a1a25cd93eaca91c0da1c095b06b6d2d8ec25b713588d3a1/multidict-6.7.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9004d8386d133b7e6135679424c91b0b854d2d164af6ea3f289f8f2761064609", size = 244889, upload-time = "2026-01-26T02:45:53.27Z" }, - { url = "https://files.pythonhosted.org/packages/7e/a9/a50d2669e506dad33cfc45b5d574a205587b7b8a5f426f2fbb2e90882588/multidict-6.7.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e628ef0e6859ffd8273c69412a2465c4be4a9517d07261b33334b5ec6f3c7489", size = 241982, upload-time = "2026-01-26T02:45:54.919Z" }, - { url = "https://files.pythonhosted.org/packages/c5/bb/1609558ad8b456b4827d3c5a5b775c93b87878fd3117ed3db3423dfbce1b/multidict-6.7.1-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:841189848ba629c3552035a6a7f5bf3b02eb304e9fea7492ca220a8eda6b0e5c", size = 232415, upload-time = "2026-01-26T02:45:56.981Z" }, - { url = "https://files.pythonhosted.org/packages/d8/59/6f61039d2aa9261871e03ab9dc058a550d240f25859b05b67fd70f80d4b3/multidict-6.7.1-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:ce1bbd7d780bb5a0da032e095c951f7014d6b0a205f8318308140f1a6aba159e", size = 240337, upload-time = "2026-01-26T02:45:58.698Z" }, - { url = "https://files.pythonhosted.org/packages/a1/29/fdc6a43c203890dc2ae9249971ecd0c41deaedfe00d25cb6564b2edd99eb/multidict-6.7.1-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:b26684587228afed0d50cf804cc71062cc9c1cdf55051c4c6345d372947b268c", size = 248788, upload-time = "2026-01-26T02:46:00.862Z" }, - { url = "https://files.pythonhosted.org/packages/a9/14/a153a06101323e4cf086ecee3faadba52ff71633d471f9685c42e3736163/multidict-6.7.1-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:9f9af11306994335398293f9958071019e3ab95e9a707dc1383a35613f6abcb9", size = 242842, upload-time = "2026-01-26T02:46:02.824Z" }, - { url = "https://files.pythonhosted.org/packages/41/5f/604ae839e64a4a6efc80db94465348d3b328ee955e37acb24badbcd24d83/multidict-6.7.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:b4938326284c4f1224178a560987b6cf8b4d38458b113d9b8c1db1a836e640a2", size = 240237, upload-time = "2026-01-26T02:46:05.898Z" }, - { url = "https://files.pythonhosted.org/packages/5f/60/c3a5187bf66f6fb546ff4ab8fb5a077cbdd832d7b1908d4365c7f74a1917/multidict-6.7.1-cp314-cp314t-win32.whl", hash = "sha256:98655c737850c064a65e006a3df7c997cd3b220be4ec8fe26215760b9697d4d7", size = 48008, upload-time = "2026-01-26T02:46:07.468Z" }, - { url = "https://files.pythonhosted.org/packages/0c/f7/addf1087b860ac60e6f382240f64fb99f8bfb532bb06f7c542b83c29ca61/multidict-6.7.1-cp314-cp314t-win_amd64.whl", hash = "sha256:497bde6223c212ba11d462853cfa4f0ae6ef97465033e7dc9940cdb3ab5b48e5", size = 53542, upload-time = "2026-01-26T02:46:08.809Z" }, - { url = "https://files.pythonhosted.org/packages/4c/81/4629d0aa32302ef7b2ec65c75a728cc5ff4fa410c50096174c1632e70b3e/multidict-6.7.1-cp314-cp314t-win_arm64.whl", hash = "sha256:2bbd113e0d4af5db41d5ebfe9ccaff89de2120578164f86a5d17d5a576d1e5b2", size = 44719, upload-time = "2026-01-26T02:46:11.146Z" }, - { url = "https://files.pythonhosted.org/packages/81/08/7036c080d7117f28a4af526d794aab6a84463126db031b007717c1a6676e/multidict-6.7.1-py3-none-any.whl", hash = "sha256:55d97cc6dae627efa6a6e548885712d4864b81110ac76fa4e534c03819fa4a56", size = 12319, upload-time = "2026-01-26T02:46:44.004Z" }, -] - -[[package]] -name = "numpy" -version = "2.4.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/57/fd/0005efbd0af48e55eb3c7208af93f2862d4b1a56cd78e84309a2d959208d/numpy-2.4.2.tar.gz", hash = "sha256:659a6107e31a83c4e33f763942275fd278b21d095094044eb35569e86a21ddae", size = 20723651, upload-time = "2026-01-31T23:13:10.135Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/51/6e/6f394c9c77668153e14d4da83bcc247beb5952f6ead7699a1a2992613bea/numpy-2.4.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:21982668592194c609de53ba4933a7471880ccbaadcc52352694a59ecc860b3a", size = 16667963, upload-time = "2026-01-31T23:10:52.147Z" }, - { url = "https://files.pythonhosted.org/packages/1f/f8/55483431f2b2fd015ae6ed4fe62288823ce908437ed49db5a03d15151678/numpy-2.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40397bda92382fcec844066efb11f13e1c9a3e2a8e8f318fb72ed8b6db9f60f1", size = 14693571, upload-time = "2026-01-31T23:10:54.789Z" }, - { url = "https://files.pythonhosted.org/packages/2f/20/18026832b1845cdc82248208dd929ca14c9d8f2bac391f67440707fff27c/numpy-2.4.2-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:b3a24467af63c67829bfaa61eecf18d5432d4f11992688537be59ecd6ad32f5e", size = 5203469, upload-time = "2026-01-31T23:10:57.343Z" }, - { url = "https://files.pythonhosted.org/packages/7d/33/2eb97c8a77daaba34eaa3fa7241a14ac5f51c46a6bd5911361b644c4a1e2/numpy-2.4.2-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:805cc8de9fd6e7a22da5aed858e0ab16be5a4db6c873dde1d7451c541553aa27", size = 6550820, upload-time = "2026-01-31T23:10:59.429Z" }, - { url = "https://files.pythonhosted.org/packages/b1/91/b97fdfd12dc75b02c44e26c6638241cc004d4079a0321a69c62f51470c4c/numpy-2.4.2-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6d82351358ffbcdcd7b686b90742a9b86632d6c1c051016484fa0b326a0a1548", size = 15663067, upload-time = "2026-01-31T23:11:01.291Z" }, - { url = "https://files.pythonhosted.org/packages/f5/c6/a18e59f3f0b8071cc85cbc8d80cd02d68aa9710170b2553a117203d46936/numpy-2.4.2-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e35d3e0144137d9fdae62912e869136164534d64a169f86438bc9561b6ad49f", size = 16619782, upload-time = "2026-01-31T23:11:03.669Z" }, - { url = "https://files.pythonhosted.org/packages/b7/83/9751502164601a79e18847309f5ceec0b1446d7b6aa12305759b72cf98b2/numpy-2.4.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:adb6ed2ad29b9e15321d167d152ee909ec73395901b70936f029c3bc6d7f4460", size = 17013128, upload-time = "2026-01-31T23:11:05.913Z" }, - { url = "https://files.pythonhosted.org/packages/61/c4/c4066322256ec740acc1c8923a10047818691d2f8aec254798f3dd90f5f2/numpy-2.4.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8906e71fd8afcb76580404e2a950caef2685df3d2a57fe82a86ac8d33cc007ba", size = 18345324, upload-time = "2026-01-31T23:11:08.248Z" }, - { url = "https://files.pythonhosted.org/packages/ab/af/6157aa6da728fa4525a755bfad486ae7e3f76d4c1864138003eb84328497/numpy-2.4.2-cp312-cp312-win32.whl", hash = "sha256:ec055f6dae239a6299cace477b479cca2fc125c5675482daf1dd886933a1076f", size = 5960282, upload-time = "2026-01-31T23:11:10.497Z" }, - { url = "https://files.pythonhosted.org/packages/92/0f/7ceaaeaacb40567071e94dbf2c9480c0ae453d5bb4f52bea3892c39dc83c/numpy-2.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:209fae046e62d0ce6435fcfe3b1a10537e858249b3d9b05829e2a05218296a85", size = 12314210, upload-time = "2026-01-31T23:11:12.176Z" }, - { url = "https://files.pythonhosted.org/packages/2f/a3/56c5c604fae6dd40fa2ed3040d005fca97e91bd320d232ac9931d77ba13c/numpy-2.4.2-cp312-cp312-win_arm64.whl", hash = "sha256:fbde1b0c6e81d56f5dccd95dd4a711d9b95df1ae4009a60887e56b27e8d903fa", size = 10220171, upload-time = "2026-01-31T23:11:14.684Z" }, - { url = "https://files.pythonhosted.org/packages/a1/22/815b9fe25d1d7ae7d492152adbc7226d3eff731dffc38fe970589fcaaa38/numpy-2.4.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:25f2059807faea4b077a2b6837391b5d830864b3543627f381821c646f31a63c", size = 16663696, upload-time = "2026-01-31T23:11:17.516Z" }, - { url = "https://files.pythonhosted.org/packages/09/f0/817d03a03f93ba9c6c8993de509277d84e69f9453601915e4a69554102a1/numpy-2.4.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bd3a7a9f5847d2fb8c2c6d1c862fa109c31a9abeca1a3c2bd5a64572955b2979", size = 14688322, upload-time = "2026-01-31T23:11:19.883Z" }, - { url = "https://files.pythonhosted.org/packages/da/b4/f805ab79293c728b9a99438775ce51885fd4f31b76178767cfc718701a39/numpy-2.4.2-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:8e4549f8a3c6d13d55041925e912bfd834285ef1dd64d6bc7d542583355e2e98", size = 5198157, upload-time = "2026-01-31T23:11:22.375Z" }, - { url = "https://files.pythonhosted.org/packages/74/09/826e4289844eccdcd64aac27d13b0fd3f32039915dd5b9ba01baae1f436c/numpy-2.4.2-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:aea4f66ff44dfddf8c2cffd66ba6538c5ec67d389285292fe428cb2c738c8aef", size = 6546330, upload-time = "2026-01-31T23:11:23.958Z" }, - { url = "https://files.pythonhosted.org/packages/19/fb/cbfdbfa3057a10aea5422c558ac57538e6acc87ec1669e666d32ac198da7/numpy-2.4.2-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c3cd545784805de05aafe1dde61752ea49a359ccba9760c1e5d1c88a93bbf2b7", size = 15660968, upload-time = "2026-01-31T23:11:25.713Z" }, - { url = "https://files.pythonhosted.org/packages/04/dc/46066ce18d01645541f0186877377b9371b8fa8017fa8262002b4ef22612/numpy-2.4.2-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d0d9b7c93578baafcbc5f0b83eaf17b79d345c6f36917ba0c67f45226911d499", size = 16607311, upload-time = "2026-01-31T23:11:28.117Z" }, - { url = "https://files.pythonhosted.org/packages/14/d9/4b5adfc39a43fa6bf918c6d544bc60c05236cc2f6339847fc5b35e6cb5b0/numpy-2.4.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f74f0f7779cc7ae07d1810aab8ac6b1464c3eafb9e283a40da7309d5e6e48fbb", size = 17012850, upload-time = "2026-01-31T23:11:30.888Z" }, - { url = "https://files.pythonhosted.org/packages/b7/20/adb6e6adde6d0130046e6fdfb7675cc62bc2f6b7b02239a09eb58435753d/numpy-2.4.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c7ac672d699bf36275c035e16b65539931347d68b70667d28984c9fb34e07fa7", size = 18334210, upload-time = "2026-01-31T23:11:33.214Z" }, - { url = "https://files.pythonhosted.org/packages/78/0e/0a73b3dff26803a8c02baa76398015ea2a5434d9b8265a7898a6028c1591/numpy-2.4.2-cp313-cp313-win32.whl", hash = "sha256:8e9afaeb0beff068b4d9cd20d322ba0ee1cecfb0b08db145e4ab4dd44a6b5110", size = 5958199, upload-time = "2026-01-31T23:11:35.385Z" }, - { url = "https://files.pythonhosted.org/packages/43/bc/6352f343522fcb2c04dbaf94cb30cca6fd32c1a750c06ad6231b4293708c/numpy-2.4.2-cp313-cp313-win_amd64.whl", hash = "sha256:7df2de1e4fba69a51c06c28f5a3de36731eb9639feb8e1cf7e4a7b0daf4cf622", size = 12310848, upload-time = "2026-01-31T23:11:38.001Z" }, - { url = "https://files.pythonhosted.org/packages/6e/8d/6da186483e308da5da1cc6918ce913dcfe14ffde98e710bfeff2a6158d4e/numpy-2.4.2-cp313-cp313-win_arm64.whl", hash = "sha256:0fece1d1f0a89c16b03442eae5c56dc0be0c7883b5d388e0c03f53019a4bfd71", size = 10221082, upload-time = "2026-01-31T23:11:40.392Z" }, - { url = "https://files.pythonhosted.org/packages/25/a1/9510aa43555b44781968935c7548a8926274f815de42ad3997e9e83680dd/numpy-2.4.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5633c0da313330fd20c484c78cdd3f9b175b55e1a766c4a174230c6b70ad8262", size = 14815866, upload-time = "2026-01-31T23:11:42.495Z" }, - { url = "https://files.pythonhosted.org/packages/36/30/6bbb5e76631a5ae46e7923dd16ca9d3f1c93cfa8d4ed79a129814a9d8db3/numpy-2.4.2-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:d9f64d786b3b1dd742c946c42d15b07497ed14af1a1f3ce840cce27daa0ce913", size = 5325631, upload-time = "2026-01-31T23:11:44.7Z" }, - { url = "https://files.pythonhosted.org/packages/46/00/3a490938800c1923b567b3a15cd17896e68052e2145d8662aaf3e1ffc58f/numpy-2.4.2-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:b21041e8cb6a1eb5312dd1d2f80a94d91efffb7a06b70597d44f1bd2dfc315ab", size = 6646254, upload-time = "2026-01-31T23:11:46.341Z" }, - { url = "https://files.pythonhosted.org/packages/d3/e9/fac0890149898a9b609caa5af7455a948b544746e4b8fe7c212c8edd71f8/numpy-2.4.2-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:00ab83c56211a1d7c07c25e3217ea6695e50a3e2f255053686b081dc0b091a82", size = 15720138, upload-time = "2026-01-31T23:11:48.082Z" }, - { url = "https://files.pythonhosted.org/packages/ea/5c/08887c54e68e1e28df53709f1893ce92932cc6f01f7c3d4dc952f61ffd4e/numpy-2.4.2-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2fb882da679409066b4603579619341c6d6898fc83a8995199d5249f986e8e8f", size = 16655398, upload-time = "2026-01-31T23:11:50.293Z" }, - { url = "https://files.pythonhosted.org/packages/4d/89/253db0fa0e66e9129c745e4ef25631dc37d5f1314dad2b53e907b8538e6d/numpy-2.4.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:66cb9422236317f9d44b67b4d18f44efe6e9c7f8794ac0462978513359461554", size = 17079064, upload-time = "2026-01-31T23:11:52.927Z" }, - { url = "https://files.pythonhosted.org/packages/2a/d5/cbade46ce97c59c6c3da525e8d95b7abe8a42974a1dc5c1d489c10433e88/numpy-2.4.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:0f01dcf33e73d80bd8dc0f20a71303abbafa26a19e23f6b68d1aa9990af90257", size = 18379680, upload-time = "2026-01-31T23:11:55.22Z" }, - { url = "https://files.pythonhosted.org/packages/40/62/48f99ae172a4b63d981babe683685030e8a3df4f246c893ea5c6ef99f018/numpy-2.4.2-cp313-cp313t-win32.whl", hash = "sha256:52b913ec40ff7ae845687b0b34d8d93b60cb66dcee06996dd5c99f2fc9328657", size = 6082433, upload-time = "2026-01-31T23:11:58.096Z" }, - { url = "https://files.pythonhosted.org/packages/07/38/e054a61cfe48ad9f1ed0d188e78b7e26859d0b60ef21cd9de4897cdb5326/numpy-2.4.2-cp313-cp313t-win_amd64.whl", hash = "sha256:5eea80d908b2c1f91486eb95b3fb6fab187e569ec9752ab7d9333d2e66bf2d6b", size = 12451181, upload-time = "2026-01-31T23:11:59.782Z" }, - { url = "https://files.pythonhosted.org/packages/6e/a4/a05c3a6418575e185dd84d0b9680b6bb2e2dc3e4202f036b7b4e22d6e9dc/numpy-2.4.2-cp313-cp313t-win_arm64.whl", hash = "sha256:fd49860271d52127d61197bb50b64f58454e9f578cb4b2c001a6de8b1f50b0b1", size = 10290756, upload-time = "2026-01-31T23:12:02.438Z" }, - { url = "https://files.pythonhosted.org/packages/18/88/b7df6050bf18fdcfb7046286c6535cabbdd2064a3440fca3f069d319c16e/numpy-2.4.2-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:444be170853f1f9d528428eceb55f12918e4fda5d8805480f36a002f1415e09b", size = 16663092, upload-time = "2026-01-31T23:12:04.521Z" }, - { url = "https://files.pythonhosted.org/packages/25/7a/1fee4329abc705a469a4afe6e69b1ef7e915117747886327104a8493a955/numpy-2.4.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:d1240d50adff70c2a88217698ca844723068533f3f5c5fa6ee2e3220e3bdb000", size = 14698770, upload-time = "2026-01-31T23:12:06.96Z" }, - { url = "https://files.pythonhosted.org/packages/fb/0b/f9e49ba6c923678ad5bc38181c08ac5e53b7a5754dbca8e581aa1a56b1ff/numpy-2.4.2-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:7cdde6de52fb6664b00b056341265441192d1291c130e99183ec0d4b110ff8b1", size = 5208562, upload-time = "2026-01-31T23:12:09.632Z" }, - { url = "https://files.pythonhosted.org/packages/7d/12/d7de8f6f53f9bb76997e5e4c069eda2051e3fe134e9181671c4391677bb2/numpy-2.4.2-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:cda077c2e5b780200b6b3e09d0b42205a3d1c68f30c6dceb90401c13bff8fe74", size = 6543710, upload-time = "2026-01-31T23:12:11.969Z" }, - { url = "https://files.pythonhosted.org/packages/09/63/c66418c2e0268a31a4cf8a8b512685748200f8e8e8ec6c507ce14e773529/numpy-2.4.2-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d30291931c915b2ab5717c2974bb95ee891a1cf22ebc16a8006bd59cd210d40a", size = 15677205, upload-time = "2026-01-31T23:12:14.33Z" }, - { url = "https://files.pythonhosted.org/packages/5d/6c/7f237821c9642fb2a04d2f1e88b4295677144ca93285fd76eff3bcba858d/numpy-2.4.2-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bba37bc29d4d85761deed3954a1bc62be7cf462b9510b51d367b769a8c8df325", size = 16611738, upload-time = "2026-01-31T23:12:16.525Z" }, - { url = "https://files.pythonhosted.org/packages/c2/a7/39c4cdda9f019b609b5c473899d87abff092fc908cfe4d1ecb2fcff453b0/numpy-2.4.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b2f0073ed0868db1dcd86e052d37279eef185b9c8db5bf61f30f46adac63c909", size = 17028888, upload-time = "2026-01-31T23:12:19.306Z" }, - { url = "https://files.pythonhosted.org/packages/da/b3/e84bb64bdfea967cc10950d71090ec2d84b49bc691df0025dddb7c26e8e3/numpy-2.4.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:7f54844851cdb630ceb623dcec4db3240d1ac13d4990532446761baede94996a", size = 18339556, upload-time = "2026-01-31T23:12:21.816Z" }, - { url = "https://files.pythonhosted.org/packages/88/f5/954a291bc1192a27081706862ac62bb5920fbecfbaa302f64682aa90beed/numpy-2.4.2-cp314-cp314-win32.whl", hash = "sha256:12e26134a0331d8dbd9351620f037ec470b7c75929cb8a1537f6bfe411152a1a", size = 6006899, upload-time = "2026-01-31T23:12:24.14Z" }, - { url = "https://files.pythonhosted.org/packages/05/cb/eff72a91b2efdd1bc98b3b8759f6a1654aa87612fc86e3d87d6fe4f948c4/numpy-2.4.2-cp314-cp314-win_amd64.whl", hash = "sha256:068cdb2d0d644cdb45670810894f6a0600797a69c05f1ac478e8d31670b8ee75", size = 12443072, upload-time = "2026-01-31T23:12:26.33Z" }, - { url = "https://files.pythonhosted.org/packages/37/75/62726948db36a56428fce4ba80a115716dc4fad6a3a4352487f8bb950966/numpy-2.4.2-cp314-cp314-win_arm64.whl", hash = "sha256:6ed0be1ee58eef41231a5c943d7d1375f093142702d5723ca2eb07db9b934b05", size = 10494886, upload-time = "2026-01-31T23:12:28.488Z" }, - { url = "https://files.pythonhosted.org/packages/36/2f/ee93744f1e0661dc267e4b21940870cabfae187c092e1433b77b09b50ac4/numpy-2.4.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:98f16a80e917003a12c0580f97b5f875853ebc33e2eaa4bccfc8201ac6869308", size = 14818567, upload-time = "2026-01-31T23:12:30.709Z" }, - { url = "https://files.pythonhosted.org/packages/a7/24/6535212add7d76ff938d8bdc654f53f88d35cddedf807a599e180dcb8e66/numpy-2.4.2-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:20abd069b9cda45874498b245c8015b18ace6de8546bf50dfa8cea1696ed06ef", size = 5328372, upload-time = "2026-01-31T23:12:32.962Z" }, - { url = "https://files.pythonhosted.org/packages/5e/9d/c48f0a035725f925634bf6b8994253b43f2047f6778a54147d7e213bc5a7/numpy-2.4.2-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:e98c97502435b53741540a5717a6749ac2ada901056c7db951d33e11c885cc7d", size = 6649306, upload-time = "2026-01-31T23:12:34.797Z" }, - { url = "https://files.pythonhosted.org/packages/81/05/7c73a9574cd4a53a25907bad38b59ac83919c0ddc8234ec157f344d57d9a/numpy-2.4.2-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:da6cad4e82cb893db4b69105c604d805e0c3ce11501a55b5e9f9083b47d2ffe8", size = 15722394, upload-time = "2026-01-31T23:12:36.565Z" }, - { url = "https://files.pythonhosted.org/packages/35/fa/4de10089f21fc7d18442c4a767ab156b25c2a6eaf187c0db6d9ecdaeb43f/numpy-2.4.2-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e4424677ce4b47fe73c8b5556d876571f7c6945d264201180db2dc34f676ab5", size = 16653343, upload-time = "2026-01-31T23:12:39.188Z" }, - { url = "https://files.pythonhosted.org/packages/b8/f9/d33e4ffc857f3763a57aa85650f2e82486832d7492280ac21ba9efda80da/numpy-2.4.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:2b8f157c8a6f20eb657e240f8985cc135598b2b46985c5bccbde7616dc9c6b1e", size = 17078045, upload-time = "2026-01-31T23:12:42.041Z" }, - { url = "https://files.pythonhosted.org/packages/c8/b8/54bdb43b6225badbea6389fa038c4ef868c44f5890f95dd530a218706da3/numpy-2.4.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5daf6f3914a733336dab21a05cdec343144600e964d2fcdabaac0c0269874b2a", size = 18380024, upload-time = "2026-01-31T23:12:44.331Z" }, - { url = "https://files.pythonhosted.org/packages/a5/55/6e1a61ded7af8df04016d81b5b02daa59f2ea9252ee0397cb9f631efe9e5/numpy-2.4.2-cp314-cp314t-win32.whl", hash = "sha256:8c50dd1fc8826f5b26a5ee4d77ca55d88a895f4e4819c7ecc2a9f5905047a443", size = 6153937, upload-time = "2026-01-31T23:12:47.229Z" }, - { url = "https://files.pythonhosted.org/packages/45/aa/fa6118d1ed6d776b0983f3ceac9b1a5558e80df9365b1c3aa6d42bf9eee4/numpy-2.4.2-cp314-cp314t-win_amd64.whl", hash = "sha256:fcf92bee92742edd401ba41135185866f7026c502617f422eb432cfeca4fe236", size = 12631844, upload-time = "2026-01-31T23:12:48.997Z" }, - { url = "https://files.pythonhosted.org/packages/32/0a/2ec5deea6dcd158f254a7b372fb09cfba5719419c8d66343bab35237b3fb/numpy-2.4.2-cp314-cp314t-win_arm64.whl", hash = "sha256:1f92f53998a17265194018d1cc321b2e96e900ca52d54c7c77837b71b9465181", size = 10565379, upload-time = "2026-01-31T23:12:51.345Z" }, -] - -[[package]] -name = "packaging" -version = "26.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/65/ee/299d360cdc32edc7d2cf530f3accf79c4fca01e96ffc950d8a52213bd8e4/packaging-26.0.tar.gz", hash = "sha256:00243ae351a257117b6a241061796684b084ed1c516a08c48a3f7e147a9d80b4", size = 143416, upload-time = "2026-01-21T20:50:39.064Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/packaging-26.0-py3-none-any.whl", hash = "sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529", size = 74366, upload-time = "2026-01-21T20:50:37.788Z" }, -] - -[[package]] -name = "pandas" -version = "3.0.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "numpy" }, - { name = "python-dateutil" }, - { name = "tzdata", marker = "sys_platform == 'emscripten' or sys_platform == 'win32'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/2e/0c/b28ed414f080ee0ad153f848586d61d1878f91689950f037f976ce15f6c8/pandas-3.0.1.tar.gz", hash = "sha256:4186a699674af418f655dbd420ed87f50d56b4cd6603784279d9eef6627823c8", size = 4641901, upload-time = "2026-02-17T22:20:16.434Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/37/51/b467209c08dae2c624873d7491ea47d2b47336e5403309d433ea79c38571/pandas-3.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:476f84f8c20c9f5bc47252b66b4bb25e1a9fc2fa98cead96744d8116cb85771d", size = 10344357, upload-time = "2026-02-17T22:18:38.262Z" }, - { url = "https://files.pythonhosted.org/packages/7c/f1/e2567ffc8951ab371db2e40b2fe068e36b81d8cf3260f06ae508700e5504/pandas-3.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0ab749dfba921edf641d4036c4c21c0b3ea70fea478165cb98a998fb2a261955", size = 9884543, upload-time = "2026-02-17T22:18:41.476Z" }, - { url = "https://files.pythonhosted.org/packages/d7/39/327802e0b6d693182403c144edacbc27eb82907b57062f23ef5a4c4a5ea7/pandas-3.0.1-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b8e36891080b87823aff3640c78649b91b8ff6eea3c0d70aeabd72ea43ab069b", size = 10396030, upload-time = "2026-02-17T22:18:43.822Z" }, - { url = "https://files.pythonhosted.org/packages/3d/fe/89d77e424365280b79d99b3e1e7d606f5165af2f2ecfaf0c6d24c799d607/pandas-3.0.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:532527a701281b9dd371e2f582ed9094f4c12dd9ffb82c0c54ee28d8ac9520c4", size = 10876435, upload-time = "2026-02-17T22:18:45.954Z" }, - { url = "https://files.pythonhosted.org/packages/b5/a6/2a75320849dd154a793f69c951db759aedb8d1dd3939eeacda9bdcfa1629/pandas-3.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:356e5c055ed9b0da1580d465657bc7d00635af4fd47f30afb23025352ba764d1", size = 11405133, upload-time = "2026-02-17T22:18:48.533Z" }, - { url = "https://files.pythonhosted.org/packages/58/53/1d68fafb2e02d7881df66aa53be4cd748d25cbe311f3b3c85c93ea5d30ca/pandas-3.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9d810036895f9ad6345b8f2a338dd6998a74e8483847403582cab67745bff821", size = 11932065, upload-time = "2026-02-17T22:18:50.837Z" }, - { url = "https://files.pythonhosted.org/packages/75/08/67cc404b3a966b6df27b38370ddd96b3b023030b572283d035181854aac5/pandas-3.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:536232a5fe26dd989bd633e7a0c450705fdc86a207fec7254a55e9a22950fe43", size = 9741627, upload-time = "2026-02-17T22:18:53.905Z" }, - { url = "https://files.pythonhosted.org/packages/86/4f/caf9952948fb00d23795f09b893d11f1cacb384e666854d87249530f7cbe/pandas-3.0.1-cp312-cp312-win_arm64.whl", hash = "sha256:0f463ebfd8de7f326d38037c7363c6dacb857c5881ab8961fb387804d6daf2f7", size = 9052483, upload-time = "2026-02-17T22:18:57.31Z" }, - { url = "https://files.pythonhosted.org/packages/0b/48/aad6ec4f8d007534c091e9a7172b3ec1b1ee6d99a9cbb936b5eab6c6cf58/pandas-3.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5272627187b5d9c20e55d27caf5f2cd23e286aba25cadf73c8590e432e2b7262", size = 10317509, upload-time = "2026-02-17T22:18:59.498Z" }, - { url = "https://files.pythonhosted.org/packages/a8/14/5990826f779f79148ae9d3a2c39593dc04d61d5d90541e71b5749f35af95/pandas-3.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:661e0f665932af88c7877f31da0dc743fe9c8f2524bdffe23d24fdcb67ef9d56", size = 9860561, upload-time = "2026-02-17T22:19:02.265Z" }, - { url = "https://files.pythonhosted.org/packages/fa/80/f01ff54664b6d70fed71475543d108a9b7c888e923ad210795bef04ffb7d/pandas-3.0.1-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:75e6e292ff898679e47a2199172593d9f6107fd2dd3617c22c2946e97d5df46e", size = 10365506, upload-time = "2026-02-17T22:19:05.017Z" }, - { url = "https://files.pythonhosted.org/packages/f2/85/ab6d04733a7d6ff32bfc8382bf1b07078228f5d6ebec5266b91bfc5c4ff7/pandas-3.0.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1ff8cf1d2896e34343197685f432450ec99a85ba8d90cce2030c5eee2ef98791", size = 10873196, upload-time = "2026-02-17T22:19:07.204Z" }, - { url = "https://files.pythonhosted.org/packages/48/a9/9301c83d0b47c23ac5deab91c6b39fd98d5b5db4d93b25df8d381451828f/pandas-3.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:eca8b4510f6763f3d37359c2105df03a7a221a508f30e396a51d0713d462e68a", size = 11370859, upload-time = "2026-02-17T22:19:09.436Z" }, - { url = "https://files.pythonhosted.org/packages/59/fe/0c1fc5bd2d29c7db2ab372330063ad555fb83e08422829c785f5ec2176ca/pandas-3.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:06aff2ad6f0b94a17822cf8b83bbb563b090ed82ff4fe7712db2ce57cd50d9b8", size = 11924584, upload-time = "2026-02-17T22:19:11.562Z" }, - { url = "https://files.pythonhosted.org/packages/d6/7d/216a1588b65a7aa5f4535570418a599d943c85afb1d95b0876fc00aa1468/pandas-3.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:9fea306c783e28884c29057a1d9baa11a349bbf99538ec1da44c8476563d1b25", size = 9742769, upload-time = "2026-02-17T22:19:13.926Z" }, - { url = "https://files.pythonhosted.org/packages/c4/cb/810a22a6af9a4e97c8ab1c946b47f3489c5bca5adc483ce0ffc84c9cc768/pandas-3.0.1-cp313-cp313-win_arm64.whl", hash = "sha256:a8d37a43c52917427e897cb2e429f67a449327394396a81034a4449b99afda59", size = 9043855, upload-time = "2026-02-17T22:19:16.09Z" }, - { url = "https://files.pythonhosted.org/packages/92/fa/423c89086cca1f039cf1253c3ff5b90f157b5b3757314aa635f6bf3e30aa/pandas-3.0.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d54855f04f8246ed7b6fc96b05d4871591143c46c0b6f4af874764ed0d2d6f06", size = 10752673, upload-time = "2026-02-17T22:19:18.304Z" }, - { url = "https://files.pythonhosted.org/packages/22/23/b5a08ec1f40020397f0faba72f1e2c11f7596a6169c7b3e800abff0e433f/pandas-3.0.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4e1b677accee34a09e0dc2ce5624e4a58a1870ffe56fc021e9caf7f23cd7668f", size = 10404967, upload-time = "2026-02-17T22:19:20.726Z" }, - { url = "https://files.pythonhosted.org/packages/5c/81/94841f1bb4afdc2b52a99daa895ac2c61600bb72e26525ecc9543d453ebc/pandas-3.0.1-cp313-cp313t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a9cabbdcd03f1b6cd254d6dda8ae09b0252524be1592594c00b7895916cb1324", size = 10320575, upload-time = "2026-02-17T22:19:24.919Z" }, - { url = "https://files.pythonhosted.org/packages/0a/8b/2ae37d66a5342a83adadfd0cb0b4bf9c3c7925424dd5f40d15d6cfaa35ee/pandas-3.0.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5ae2ab1f166668b41e770650101e7090824fd34d17915dd9cd479f5c5e0065e9", size = 10710921, upload-time = "2026-02-17T22:19:27.181Z" }, - { url = "https://files.pythonhosted.org/packages/a2/61/772b2e2757855e232b7ccf7cb8079a5711becb3a97f291c953def15a833f/pandas-3.0.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6bf0603c2e30e2cafac32807b06435f28741135cb8697eae8b28c7d492fc7d76", size = 11334191, upload-time = "2026-02-17T22:19:29.411Z" }, - { url = "https://files.pythonhosted.org/packages/1b/08/b16c6df3ef555d8495d1d265a7963b65be166785d28f06a350913a4fac78/pandas-3.0.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6c426422973973cae1f4a23e51d4ae85974f44871b24844e4f7de752dd877098", size = 11782256, upload-time = "2026-02-17T22:19:32.34Z" }, - { url = "https://files.pythonhosted.org/packages/55/80/178af0594890dee17e239fca96d3d8670ba0f5ff59b7d0439850924a9c09/pandas-3.0.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b03f91ae8c10a85c1613102c7bef5229b5379f343030a3ccefeca8a33414cf35", size = 10485047, upload-time = "2026-02-17T22:19:34.605Z" }, - { url = "https://files.pythonhosted.org/packages/bb/8b/4bb774a998b97e6c2fd62a9e6cfdaae133b636fd1c468f92afb4ae9a447a/pandas-3.0.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:99d0f92ed92d3083d140bf6b97774f9f13863924cf3f52a70711f4e7588f9d0a", size = 10322465, upload-time = "2026-02-17T22:19:36.803Z" }, - { url = "https://files.pythonhosted.org/packages/72/3a/5b39b51c64159f470f1ca3b1c2a87da290657ca022f7cd11442606f607d1/pandas-3.0.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:3b66857e983208654294bb6477b8a63dee26b37bdd0eb34d010556e91261784f", size = 9910632, upload-time = "2026-02-17T22:19:39.001Z" }, - { url = "https://files.pythonhosted.org/packages/4e/f7/b449ffb3f68c11da12fc06fbf6d2fa3a41c41e17d0284d23a79e1c13a7e4/pandas-3.0.1-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:56cf59638bf24dc9bdf2154c81e248b3289f9a09a6d04e63608c159022352749", size = 10440535, upload-time = "2026-02-17T22:19:41.157Z" }, - { url = "https://files.pythonhosted.org/packages/55/77/6ea82043db22cb0f2bbfe7198da3544000ddaadb12d26be36e19b03a2dc5/pandas-3.0.1-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c1a9f55e0f46951874b863d1f3906dcb57df2d9be5c5847ba4dfb55b2c815249", size = 10893940, upload-time = "2026-02-17T22:19:43.493Z" }, - { url = "https://files.pythonhosted.org/packages/03/30/f1b502a72468c89412c1b882a08f6eed8a4ee9dc033f35f65d0663df6081/pandas-3.0.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:1849f0bba9c8a2fb0f691d492b834cc8dadf617e29015c66e989448d58d011ee", size = 11442711, upload-time = "2026-02-17T22:19:46.074Z" }, - { url = "https://files.pythonhosted.org/packages/0d/f0/ebb6ddd8fc049e98cabac5c2924d14d1dda26a20adb70d41ea2e428d3ec4/pandas-3.0.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c3d288439e11b5325b02ae6e9cc83e6805a62c40c5a6220bea9beb899c073b1c", size = 11963918, upload-time = "2026-02-17T22:19:48.838Z" }, - { url = "https://files.pythonhosted.org/packages/09/f8/8ce132104074f977f907442790eaae24e27bce3b3b454e82faa3237ff098/pandas-3.0.1-cp314-cp314-win_amd64.whl", hash = "sha256:93325b0fe372d192965f4cca88d97667f49557398bbf94abdda3bf1b591dbe66", size = 9862099, upload-time = "2026-02-17T22:19:51.081Z" }, - { url = "https://files.pythonhosted.org/packages/e6/b7/6af9aac41ef2456b768ef0ae60acf8abcebb450a52043d030a65b4b7c9bd/pandas-3.0.1-cp314-cp314-win_arm64.whl", hash = "sha256:97ca08674e3287c7148f4858b01136f8bdfe7202ad25ad04fec602dd1d29d132", size = 9185333, upload-time = "2026-02-17T22:19:53.266Z" }, - { url = "https://files.pythonhosted.org/packages/66/fc/848bb6710bc6061cb0c5badd65b92ff75c81302e0e31e496d00029fe4953/pandas-3.0.1-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:58eeb1b2e0fb322befcf2bbc9ba0af41e616abadb3d3414a6bc7167f6cbfce32", size = 10772664, upload-time = "2026-02-17T22:19:55.806Z" }, - { url = "https://files.pythonhosted.org/packages/69/5c/866a9bbd0f79263b4b0db6ec1a341be13a1473323f05c122388e0f15b21d/pandas-3.0.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:cd9af1276b5ca9e298bd79a26bda32fa9cc87ed095b2a9a60978d2ca058eaf87", size = 10421286, upload-time = "2026-02-17T22:19:58.091Z" }, - { url = "https://files.pythonhosted.org/packages/51/a4/2058fb84fb1cfbfb2d4a6d485e1940bb4ad5716e539d779852494479c580/pandas-3.0.1-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:94f87a04984d6b63788327cd9f79dda62b7f9043909d2440ceccf709249ca988", size = 10342050, upload-time = "2026-02-17T22:20:01.376Z" }, - { url = "https://files.pythonhosted.org/packages/22/1b/674e89996cc4be74db3c4eb09240c4bb549865c9c3f5d9b086ff8fcfbf00/pandas-3.0.1-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85fe4c4df62e1e20f9db6ebfb88c844b092c22cd5324bdcf94bfa2fc1b391221", size = 10740055, upload-time = "2026-02-17T22:20:04.328Z" }, - { url = "https://files.pythonhosted.org/packages/d0/f8/e954b750764298c22fa4614376531fe63c521ef517e7059a51f062b87dca/pandas-3.0.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:331ca75a2f8672c365ae25c0b29e46f5ac0c6551fdace8eec4cd65e4fac271ff", size = 11357632, upload-time = "2026-02-17T22:20:06.647Z" }, - { url = "https://files.pythonhosted.org/packages/6d/02/c6e04b694ffd68568297abd03588b6d30295265176a5c01b7459d3bc35a3/pandas-3.0.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:15860b1fdb1973fffade772fdb931ccf9b2f400a3f5665aef94a00445d7d8dd5", size = 11810974, upload-time = "2026-02-17T22:20:08.946Z" }, - { url = "https://files.pythonhosted.org/packages/89/41/d7dfb63d2407f12055215070c42fc6ac41b66e90a2946cdc5e759058398b/pandas-3.0.1-cp314-cp314t-win_amd64.whl", hash = "sha256:44f1364411d5670efa692b146c748f4ed013df91ee91e9bec5677fb1fd58b937", size = 10884622, upload-time = "2026-02-17T22:20:11.711Z" }, - { url = "https://files.pythonhosted.org/packages/68/b0/34937815889fa982613775e4b97fddd13250f11012d769949c5465af2150/pandas-3.0.1-cp314-cp314t-win_arm64.whl", hash = "sha256:108dd1790337a494aa80e38def654ca3f0968cf4f362c85f44c15e471667102d", size = 9452085, upload-time = "2026-02-17T22:20:14.331Z" }, -] - -[[package]] -name = "parsimonious" -version = "0.10.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "regex" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/7b/91/abdc50c4ef06fdf8d047f60ee777ca9b2a7885e1a9cea81343fbecda52d7/parsimonious-0.10.0.tar.gz", hash = "sha256:8281600da180ec8ae35427a4ab4f7b82bfec1e3d1e52f80cb60ea82b9512501c", size = 52172, upload-time = "2022-09-03T17:01:17.004Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/aa/0f/c8b64d9b54ea631fcad4e9e3c8dbe8c11bb32a623be94f22974c88e71eaf/parsimonious-0.10.0-py3-none-any.whl", hash = "sha256:982ab435fabe86519b57f6b35610aa4e4e977e9f02a14353edf4bbc75369fc0f", size = 48427, upload-time = "2022-09-03T17:01:13.814Z" }, -] - -[[package]] -name = "pillow" -version = "12.1.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/1f/42/5c74462b4fd957fcd7b13b04fb3205ff8349236ea74c7c375766d6c82288/pillow-12.1.1.tar.gz", hash = "sha256:9ad8fa5937ab05218e2b6a4cff30295ad35afd2f83ac592e68c0d871bb0fdbc4", size = 46980264, upload-time = "2026-02-11T04:23:07.146Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/07/d3/8df65da0d4df36b094351dce696f2989bec731d4f10e743b1c5f4da4d3bf/pillow-12.1.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ab323b787d6e18b3d91a72fc99b1a2c28651e4358749842b8f8dfacd28ef2052", size = 5262803, upload-time = "2026-02-11T04:20:47.653Z" }, - { url = "https://files.pythonhosted.org/packages/d6/71/5026395b290ff404b836e636f51d7297e6c83beceaa87c592718747e670f/pillow-12.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:adebb5bee0f0af4909c30db0d890c773d1a92ffe83da908e2e9e720f8edf3984", size = 4657601, upload-time = "2026-02-11T04:20:49.328Z" }, - { url = "https://files.pythonhosted.org/packages/b1/2e/1001613d941c67442f745aff0f7cc66dd8df9a9c084eb497e6a543ee6f7e/pillow-12.1.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:bb66b7cc26f50977108790e2456b7921e773f23db5630261102233eb355a3b79", size = 6234995, upload-time = "2026-02-11T04:20:51.032Z" }, - { url = "https://files.pythonhosted.org/packages/07/26/246ab11455b2549b9233dbd44d358d033a2f780fa9007b61a913c5b2d24e/pillow-12.1.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:aee2810642b2898bb187ced9b349e95d2a7272930796e022efaf12e99dccd293", size = 8045012, upload-time = "2026-02-11T04:20:52.882Z" }, - { url = "https://files.pythonhosted.org/packages/b2/8b/07587069c27be7535ac1fe33874e32de118fbd34e2a73b7f83436a88368c/pillow-12.1.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a0b1cd6232e2b618adcc54d9882e4e662a089d5768cd188f7c245b4c8c44a397", size = 6349638, upload-time = "2026-02-11T04:20:54.444Z" }, - { url = "https://files.pythonhosted.org/packages/ff/79/6df7b2ee763d619cda2fb4fea498e5f79d984dae304d45a8999b80d6cf5c/pillow-12.1.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7aac39bcf8d4770d089588a2e1dd111cbaa42df5a94be3114222057d68336bd0", size = 7041540, upload-time = "2026-02-11T04:20:55.97Z" }, - { url = "https://files.pythonhosted.org/packages/2c/5e/2ba19e7e7236d7529f4d873bdaf317a318896bac289abebd4bb00ef247f0/pillow-12.1.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ab174cd7d29a62dd139c44bf74b698039328f45cb03b4596c43473a46656b2f3", size = 6462613, upload-time = "2026-02-11T04:20:57.542Z" }, - { url = "https://files.pythonhosted.org/packages/03/03/31216ec124bb5c3dacd74ce8efff4cc7f52643653bad4825f8f08c697743/pillow-12.1.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:339ffdcb7cbeaa08221cd401d517d4b1fe7a9ed5d400e4a8039719238620ca35", size = 7166745, upload-time = "2026-02-11T04:20:59.196Z" }, - { url = "https://files.pythonhosted.org/packages/1f/e7/7c4552d80052337eb28653b617eafdef39adfb137c49dd7e831b8dc13bc5/pillow-12.1.1-cp312-cp312-win32.whl", hash = "sha256:5d1f9575a12bed9e9eedd9a4972834b08c97a352bd17955ccdebfeca5913fa0a", size = 6328823, upload-time = "2026-02-11T04:21:01.385Z" }, - { url = "https://files.pythonhosted.org/packages/3d/17/688626d192d7261bbbf98846fc98995726bddc2c945344b65bec3a29d731/pillow-12.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:21329ec8c96c6e979cd0dfd29406c40c1d52521a90544463057d2aaa937d66a6", size = 7033367, upload-time = "2026-02-11T04:21:03.536Z" }, - { url = "https://files.pythonhosted.org/packages/ed/fe/a0ef1f73f939b0eca03ee2c108d0043a87468664770612602c63266a43c4/pillow-12.1.1-cp312-cp312-win_arm64.whl", hash = "sha256:af9a332e572978f0218686636610555ae3defd1633597be015ed50289a03c523", size = 2453811, upload-time = "2026-02-11T04:21:05.116Z" }, - { url = "https://files.pythonhosted.org/packages/d5/11/6db24d4bd7685583caeae54b7009584e38da3c3d4488ed4cd25b439de486/pillow-12.1.1-cp313-cp313-ios_13_0_arm64_iphoneos.whl", hash = "sha256:d242e8ac078781f1de88bf823d70c1a9b3c7950a44cdf4b7c012e22ccbcd8e4e", size = 4062689, upload-time = "2026-02-11T04:21:06.804Z" }, - { url = "https://files.pythonhosted.org/packages/33/c0/ce6d3b1fe190f0021203e0d9b5b99e57843e345f15f9ef22fcd43842fd21/pillow-12.1.1-cp313-cp313-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:02f84dfad02693676692746df05b89cf25597560db2857363a208e393429f5e9", size = 4138535, upload-time = "2026-02-11T04:21:08.452Z" }, - { url = "https://files.pythonhosted.org/packages/a0/c6/d5eb6a4fb32a3f9c21a8c7613ec706534ea1cf9f4b3663e99f0d83f6fca8/pillow-12.1.1-cp313-cp313-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:e65498daf4b583091ccbb2556c7000abf0f3349fcd57ef7adc9a84a394ed29f6", size = 3601364, upload-time = "2026-02-11T04:21:10.194Z" }, - { url = "https://files.pythonhosted.org/packages/14/a1/16c4b823838ba4c9c52c0e6bbda903a3fe5a1bdbf1b8eb4fff7156f3e318/pillow-12.1.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6c6db3b84c87d48d0088943bf33440e0c42370b99b1c2a7989216f7b42eede60", size = 5262561, upload-time = "2026-02-11T04:21:11.742Z" }, - { url = "https://files.pythonhosted.org/packages/bb/ad/ad9dc98ff24f485008aa5cdedaf1a219876f6f6c42a4626c08bc4e80b120/pillow-12.1.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:8b7e5304e34942bf62e15184219a7b5ad4ff7f3bb5cca4d984f37df1a0e1aee2", size = 4657460, upload-time = "2026-02-11T04:21:13.786Z" }, - { url = "https://files.pythonhosted.org/packages/9e/1b/f1a4ea9a895b5732152789326202a82464d5254759fbacae4deea3069334/pillow-12.1.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:18e5bddd742a44b7e6b1e773ab5db102bd7a94c32555ba656e76d319d19c3850", size = 6232698, upload-time = "2026-02-11T04:21:15.949Z" }, - { url = "https://files.pythonhosted.org/packages/95/f4/86f51b8745070daf21fd2e5b1fe0eb35d4db9ca26e6d58366562fb56a743/pillow-12.1.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fc44ef1f3de4f45b50ccf9136999d71abb99dca7706bc75d222ed350b9fd2289", size = 8041706, upload-time = "2026-02-11T04:21:17.723Z" }, - { url = "https://files.pythonhosted.org/packages/29/9b/d6ecd956bb1266dd1045e995cce9b8d77759e740953a1c9aad9502a0461e/pillow-12.1.1-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5a8eb7ed8d4198bccbd07058416eeec51686b498e784eda166395a23eb99138e", size = 6346621, upload-time = "2026-02-11T04:21:19.547Z" }, - { url = "https://files.pythonhosted.org/packages/71/24/538bff45bde96535d7d998c6fed1a751c75ac7c53c37c90dc2601b243893/pillow-12.1.1-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:47b94983da0c642de92ced1702c5b6c292a84bd3a8e1d1702ff923f183594717", size = 7038069, upload-time = "2026-02-11T04:21:21.378Z" }, - { url = "https://files.pythonhosted.org/packages/94/0e/58cb1a6bc48f746bc4cb3adb8cabff73e2742c92b3bf7a220b7cf69b9177/pillow-12.1.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:518a48c2aab7ce596d3bf79d0e275661b846e86e4d0e7dec34712c30fe07f02a", size = 6460040, upload-time = "2026-02-11T04:21:23.148Z" }, - { url = "https://files.pythonhosted.org/packages/6c/57/9045cb3ff11eeb6c1adce3b2d60d7d299d7b273a2e6c8381a524abfdc474/pillow-12.1.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a550ae29b95c6dc13cf69e2c9dc5747f814c54eeb2e32d683e5e93af56caa029", size = 7164523, upload-time = "2026-02-11T04:21:25.01Z" }, - { url = "https://files.pythonhosted.org/packages/73/f2/9be9cb99f2175f0d4dbadd6616ce1bf068ee54a28277ea1bf1fbf729c250/pillow-12.1.1-cp313-cp313-win32.whl", hash = "sha256:a003d7422449f6d1e3a34e3dd4110c22148336918ddbfc6a32581cd54b2e0b2b", size = 6332552, upload-time = "2026-02-11T04:21:27.238Z" }, - { url = "https://files.pythonhosted.org/packages/3f/eb/b0834ad8b583d7d9d42b80becff092082a1c3c156bb582590fcc973f1c7c/pillow-12.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:344cf1e3dab3be4b1fa08e449323d98a2a3f819ad20f4b22e77a0ede31f0faa1", size = 7040108, upload-time = "2026-02-11T04:21:29.462Z" }, - { url = "https://files.pythonhosted.org/packages/d5/7d/fc09634e2aabdd0feabaff4a32f4a7d97789223e7c2042fd805ea4b4d2c2/pillow-12.1.1-cp313-cp313-win_arm64.whl", hash = "sha256:5c0dd1636633e7e6a0afe7bf6a51a14992b7f8e60de5789018ebbdfae55b040a", size = 2453712, upload-time = "2026-02-11T04:21:31.072Z" }, - { url = "https://files.pythonhosted.org/packages/19/2a/b9d62794fc8a0dd14c1943df68347badbd5511103e0d04c035ffe5cf2255/pillow-12.1.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0330d233c1a0ead844fc097a7d16c0abff4c12e856c0b325f231820fee1f39da", size = 5264880, upload-time = "2026-02-11T04:21:32.865Z" }, - { url = "https://files.pythonhosted.org/packages/26/9d/e03d857d1347fa5ed9247e123fcd2a97b6220e15e9cb73ca0a8d91702c6e/pillow-12.1.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5dae5f21afb91322f2ff791895ddd8889e5e947ff59f71b46041c8ce6db790bc", size = 4660616, upload-time = "2026-02-11T04:21:34.97Z" }, - { url = "https://files.pythonhosted.org/packages/f7/ec/8a6d22afd02570d30954e043f09c32772bfe143ba9285e2fdb11284952cd/pillow-12.1.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2e0c664be47252947d870ac0d327fea7e63985a08794758aa8af5b6cb6ec0c9c", size = 6269008, upload-time = "2026-02-11T04:21:36.623Z" }, - { url = "https://files.pythonhosted.org/packages/3d/1d/6d875422c9f28a4a361f495a5f68d9de4a66941dc2c619103ca335fa6446/pillow-12.1.1-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:691ab2ac363b8217f7d31b3497108fb1f50faab2f75dfb03284ec2f217e87bf8", size = 8073226, upload-time = "2026-02-11T04:21:38.585Z" }, - { url = "https://files.pythonhosted.org/packages/a1/cd/134b0b6ee5eda6dc09e25e24b40fdafe11a520bc725c1d0bbaa5e00bf95b/pillow-12.1.1-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e9e8064fb1cc019296958595f6db671fba95209e3ceb0c4734c9baf97de04b20", size = 6380136, upload-time = "2026-02-11T04:21:40.562Z" }, - { url = "https://files.pythonhosted.org/packages/7a/a9/7628f013f18f001c1b98d8fffe3452f306a70dc6aba7d931019e0492f45e/pillow-12.1.1-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:472a8d7ded663e6162dafdf20015c486a7009483ca671cece7a9279b512fcb13", size = 7067129, upload-time = "2026-02-11T04:21:42.521Z" }, - { url = "https://files.pythonhosted.org/packages/1e/f8/66ab30a2193b277785601e82ee2d49f68ea575d9637e5e234faaa98efa4c/pillow-12.1.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:89b54027a766529136a06cfebeecb3a04900397a3590fd252160b888479517bf", size = 6491807, upload-time = "2026-02-11T04:21:44.22Z" }, - { url = "https://files.pythonhosted.org/packages/da/0b/a877a6627dc8318fdb84e357c5e1a758c0941ab1ddffdafd231983788579/pillow-12.1.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:86172b0831b82ce4f7877f280055892b31179e1576aa00d0df3bb1bbf8c3e524", size = 7190954, upload-time = "2026-02-11T04:21:46.114Z" }, - { url = "https://files.pythonhosted.org/packages/83/43/6f732ff85743cf746b1361b91665d9f5155e1483817f693f8d57ea93147f/pillow-12.1.1-cp313-cp313t-win32.whl", hash = "sha256:44ce27545b6efcf0fdbdceb31c9a5bdea9333e664cda58a7e674bb74608b3986", size = 6336441, upload-time = "2026-02-11T04:21:48.22Z" }, - { url = "https://files.pythonhosted.org/packages/3b/44/e865ef3986611bb75bfabdf94a590016ea327833f434558801122979cd0e/pillow-12.1.1-cp313-cp313t-win_amd64.whl", hash = "sha256:a285e3eb7a5a45a2ff504e31f4a8d1b12ef62e84e5411c6804a42197c1cf586c", size = 7045383, upload-time = "2026-02-11T04:21:50.015Z" }, - { url = "https://files.pythonhosted.org/packages/a8/c6/f4fb24268d0c6908b9f04143697ea18b0379490cb74ba9e8d41b898bd005/pillow-12.1.1-cp313-cp313t-win_arm64.whl", hash = "sha256:cc7d296b5ea4d29e6570dabeaed58d31c3fea35a633a69679fb03d7664f43fb3", size = 2456104, upload-time = "2026-02-11T04:21:51.633Z" }, - { url = "https://files.pythonhosted.org/packages/03/d0/bebb3ffbf31c5a8e97241476c4cf8b9828954693ce6744b4a2326af3e16b/pillow-12.1.1-cp314-cp314-ios_13_0_arm64_iphoneos.whl", hash = "sha256:417423db963cb4be8bac3fc1204fe61610f6abeed1580a7a2cbb2fbda20f12af", size = 4062652, upload-time = "2026-02-11T04:21:53.19Z" }, - { url = "https://files.pythonhosted.org/packages/2d/c0/0e16fb0addda4851445c28f8350d8c512f09de27bbb0d6d0bbf8b6709605/pillow-12.1.1-cp314-cp314-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:b957b71c6b2387610f556a7eb0828afbe40b4a98036fc0d2acfa5a44a0c2036f", size = 4138823, upload-time = "2026-02-11T04:22:03.088Z" }, - { url = "https://files.pythonhosted.org/packages/6b/fb/6170ec655d6f6bb6630a013dd7cf7bc218423d7b5fa9071bf63dc32175ae/pillow-12.1.1-cp314-cp314-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:097690ba1f2efdeb165a20469d59d8bb03c55fb6621eb2041a060ae8ea3e9642", size = 3601143, upload-time = "2026-02-11T04:22:04.909Z" }, - { url = "https://files.pythonhosted.org/packages/59/04/dc5c3f297510ba9a6837cbb318b87dd2b8f73eb41a43cc63767f65cb599c/pillow-12.1.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:2815a87ab27848db0321fb78c7f0b2c8649dee134b7f2b80c6a45c6831d75ccd", size = 5266254, upload-time = "2026-02-11T04:22:07.656Z" }, - { url = "https://files.pythonhosted.org/packages/05/30/5db1236b0d6313f03ebf97f5e17cda9ca060f524b2fcc875149a8360b21c/pillow-12.1.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:f7ed2c6543bad5a7d5530eb9e78c53132f93dfa44a28492db88b41cdab885202", size = 4657499, upload-time = "2026-02-11T04:22:09.613Z" }, - { url = "https://files.pythonhosted.org/packages/6f/18/008d2ca0eb612e81968e8be0bbae5051efba24d52debf930126d7eaacbba/pillow-12.1.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:652a2c9ccfb556235b2b501a3a7cf3742148cd22e04b5625c5fe057ea3e3191f", size = 6232137, upload-time = "2026-02-11T04:22:11.434Z" }, - { url = "https://files.pythonhosted.org/packages/70/f1/f14d5b8eeb4b2cd62b9f9f847eb6605f103df89ef619ac68f92f748614ea/pillow-12.1.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d6e4571eedf43af33d0fc233a382a76e849badbccdf1ac438841308652a08e1f", size = 8042721, upload-time = "2026-02-11T04:22:13.321Z" }, - { url = "https://files.pythonhosted.org/packages/5a/d6/17824509146e4babbdabf04d8171491fa9d776f7061ff6e727522df9bd03/pillow-12.1.1-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b574c51cf7d5d62e9be37ba446224b59a2da26dc4c1bb2ecbe936a4fb1a7cb7f", size = 6347798, upload-time = "2026-02-11T04:22:15.449Z" }, - { url = "https://files.pythonhosted.org/packages/d1/ee/c85a38a9ab92037a75615aba572c85ea51e605265036e00c5b67dfafbfe2/pillow-12.1.1-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a37691702ed687799de29a518d63d4682d9016932db66d4e90c345831b02fb4e", size = 7039315, upload-time = "2026-02-11T04:22:17.24Z" }, - { url = "https://files.pythonhosted.org/packages/ec/f3/bc8ccc6e08a148290d7523bde4d9a0d6c981db34631390dc6e6ec34cacf6/pillow-12.1.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:f95c00d5d6700b2b890479664a06e754974848afaae5e21beb4d83c106923fd0", size = 6462360, upload-time = "2026-02-11T04:22:19.111Z" }, - { url = "https://files.pythonhosted.org/packages/f6/ab/69a42656adb1d0665ab051eec58a41f169ad295cf81ad45406963105408f/pillow-12.1.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:559b38da23606e68681337ad74622c4dbba02254fc9cb4488a305dd5975c7eeb", size = 7165438, upload-time = "2026-02-11T04:22:21.041Z" }, - { url = "https://files.pythonhosted.org/packages/02/46/81f7aa8941873f0f01d4b55cc543b0a3d03ec2ee30d617a0448bf6bd6dec/pillow-12.1.1-cp314-cp314-win32.whl", hash = "sha256:03edcc34d688572014ff223c125a3f77fb08091e4607e7745002fc214070b35f", size = 6431503, upload-time = "2026-02-11T04:22:22.833Z" }, - { url = "https://files.pythonhosted.org/packages/40/72/4c245f7d1044b67affc7f134a09ea619d4895333d35322b775b928180044/pillow-12.1.1-cp314-cp314-win_amd64.whl", hash = "sha256:50480dcd74fa63b8e78235957d302d98d98d82ccbfac4c7e12108ba9ecbdba15", size = 7176748, upload-time = "2026-02-11T04:22:24.64Z" }, - { url = "https://files.pythonhosted.org/packages/e4/ad/8a87bdbe038c5c698736e3348af5c2194ffb872ea52f11894c95f9305435/pillow-12.1.1-cp314-cp314-win_arm64.whl", hash = "sha256:5cb1785d97b0c3d1d1a16bc1d710c4a0049daefc4935f3a8f31f827f4d3d2e7f", size = 2544314, upload-time = "2026-02-11T04:22:26.685Z" }, - { url = "https://files.pythonhosted.org/packages/6c/9d/efd18493f9de13b87ede7c47e69184b9e859e4427225ea962e32e56a49bc/pillow-12.1.1-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:1f90cff8aa76835cba5769f0b3121a22bd4eb9e6884cfe338216e557a9a548b8", size = 5268612, upload-time = "2026-02-11T04:22:29.884Z" }, - { url = "https://files.pythonhosted.org/packages/f8/f1/4f42eb2b388eb2ffc660dcb7f7b556c1015c53ebd5f7f754965ef997585b/pillow-12.1.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1f1be78ce9466a7ee64bfda57bdba0f7cc499d9794d518b854816c41bf0aa4e9", size = 4660567, upload-time = "2026-02-11T04:22:31.799Z" }, - { url = "https://files.pythonhosted.org/packages/01/54/df6ef130fa43e4b82e32624a7b821a2be1c5653a5fdad8469687a7db4e00/pillow-12.1.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:42fc1f4677106188ad9a55562bbade416f8b55456f522430fadab3cef7cd4e60", size = 6269951, upload-time = "2026-02-11T04:22:33.921Z" }, - { url = "https://files.pythonhosted.org/packages/a9/48/618752d06cc44bb4aae8ce0cd4e6426871929ed7b46215638088270d9b34/pillow-12.1.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:98edb152429ab62a1818039744d8fbb3ccab98a7c29fc3d5fcef158f3f1f68b7", size = 8074769, upload-time = "2026-02-11T04:22:35.877Z" }, - { url = "https://files.pythonhosted.org/packages/c3/bd/f1d71eb39a72fa088d938655afba3e00b38018d052752f435838961127d8/pillow-12.1.1-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d470ab1178551dd17fdba0fef463359c41aaa613cdcd7ff8373f54be629f9f8f", size = 6381358, upload-time = "2026-02-11T04:22:37.698Z" }, - { url = "https://files.pythonhosted.org/packages/64/ef/c784e20b96674ed36a5af839305f55616f8b4f8aa8eeccf8531a6e312243/pillow-12.1.1-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6408a7b064595afcab0a49393a413732a35788f2a5092fdc6266952ed67de586", size = 7068558, upload-time = "2026-02-11T04:22:39.597Z" }, - { url = "https://files.pythonhosted.org/packages/73/cb/8059688b74422ae61278202c4e1ad992e8a2e7375227be0a21c6b87ca8d5/pillow-12.1.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5d8c41325b382c07799a3682c1c258469ea2ff97103c53717b7893862d0c98ce", size = 6493028, upload-time = "2026-02-11T04:22:42.73Z" }, - { url = "https://files.pythonhosted.org/packages/c6/da/e3c008ed7d2dd1f905b15949325934510b9d1931e5df999bb15972756818/pillow-12.1.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:c7697918b5be27424e9ce568193efd13d925c4481dd364e43f5dff72d33e10f8", size = 7191940, upload-time = "2026-02-11T04:22:44.543Z" }, - { url = "https://files.pythonhosted.org/packages/01/4a/9202e8d11714c1fc5951f2e1ef362f2d7fbc595e1f6717971d5dd750e969/pillow-12.1.1-cp314-cp314t-win32.whl", hash = "sha256:d2912fd8114fc5545aa3a4b5576512f64c55a03f3ebcca4c10194d593d43ea36", size = 6438736, upload-time = "2026-02-11T04:22:46.347Z" }, - { url = "https://files.pythonhosted.org/packages/f3/ca/cbce2327eb9885476b3957b2e82eb12c866a8b16ad77392864ad601022ce/pillow-12.1.1-cp314-cp314t-win_amd64.whl", hash = "sha256:4ceb838d4bd9dab43e06c363cab2eebf63846d6a4aeaea283bbdfd8f1a8ed58b", size = 7182894, upload-time = "2026-02-11T04:22:48.114Z" }, - { url = "https://files.pythonhosted.org/packages/ec/d2/de599c95ba0a973b94410477f8bf0b6f0b5e67360eb89bcb1ad365258beb/pillow-12.1.1-cp314-cp314t-win_arm64.whl", hash = "sha256:7b03048319bfc6170e93bd60728a1af51d3dd7704935feb228c4d4faab35d334", size = 2546446, upload-time = "2026-02-11T04:22:50.342Z" }, -] - -[[package]] -name = "pluggy" -version = "1.6.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, -] - -[[package]] -name = "propcache" -version = "0.4.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/9e/da/e9fc233cf63743258bff22b3dfa7ea5baef7b5bc324af47a0ad89b8ffc6f/propcache-0.4.1.tar.gz", hash = "sha256:f48107a8c637e80362555f37ecf49abe20370e557cc4ab374f04ec4423c97c3d", size = 46442, upload-time = "2025-10-08T19:49:02.291Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a2/0f/f17b1b2b221d5ca28b4b876e8bb046ac40466513960646bda8e1853cdfa2/propcache-0.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e153e9cd40cc8945138822807139367f256f89c6810c2634a4f6902b52d3b4e2", size = 80061, upload-time = "2025-10-08T19:46:46.075Z" }, - { url = "https://files.pythonhosted.org/packages/76/47/8ccf75935f51448ba9a16a71b783eb7ef6b9ee60f5d14c7f8a8a79fbeed7/propcache-0.4.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cd547953428f7abb73c5ad82cbb32109566204260d98e41e5dfdc682eb7f8403", size = 46037, upload-time = "2025-10-08T19:46:47.23Z" }, - { url = "https://files.pythonhosted.org/packages/0a/b6/5c9a0e42df4d00bfb4a3cbbe5cf9f54260300c88a0e9af1f47ca5ce17ac0/propcache-0.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f048da1b4f243fc44f205dfd320933a951b8d89e0afd4c7cacc762a8b9165207", size = 47324, upload-time = "2025-10-08T19:46:48.384Z" }, - { url = "https://files.pythonhosted.org/packages/9e/d3/6c7ee328b39a81ee877c962469f1e795f9db87f925251efeb0545e0020d0/propcache-0.4.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ec17c65562a827bba85e3872ead335f95405ea1674860d96483a02f5c698fa72", size = 225505, upload-time = "2025-10-08T19:46:50.055Z" }, - { url = "https://files.pythonhosted.org/packages/01/5d/1c53f4563490b1d06a684742cc6076ef944bc6457df6051b7d1a877c057b/propcache-0.4.1-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:405aac25c6394ef275dee4c709be43745d36674b223ba4eb7144bf4d691b7367", size = 230242, upload-time = "2025-10-08T19:46:51.815Z" }, - { url = "https://files.pythonhosted.org/packages/20/e1/ce4620633b0e2422207c3cb774a0ee61cac13abc6217763a7b9e2e3f4a12/propcache-0.4.1-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0013cb6f8dde4b2a2f66903b8ba740bdfe378c943c4377a200551ceb27f379e4", size = 238474, upload-time = "2025-10-08T19:46:53.208Z" }, - { url = "https://files.pythonhosted.org/packages/46/4b/3aae6835b8e5f44ea6a68348ad90f78134047b503765087be2f9912140ea/propcache-0.4.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:15932ab57837c3368b024473a525e25d316d8353016e7cc0e5ba9eb343fbb1cf", size = 221575, upload-time = "2025-10-08T19:46:54.511Z" }, - { url = "https://files.pythonhosted.org/packages/6e/a5/8a5e8678bcc9d3a1a15b9a29165640d64762d424a16af543f00629c87338/propcache-0.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:031dce78b9dc099f4c29785d9cf5577a3faf9ebf74ecbd3c856a7b92768c3df3", size = 216736, upload-time = "2025-10-08T19:46:56.212Z" }, - { url = "https://files.pythonhosted.org/packages/f1/63/b7b215eddeac83ca1c6b934f89d09a625aa9ee4ba158338854c87210cc36/propcache-0.4.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ab08df6c9a035bee56e31af99be621526bd237bea9f32def431c656b29e41778", size = 213019, upload-time = "2025-10-08T19:46:57.595Z" }, - { url = "https://files.pythonhosted.org/packages/57/74/f580099a58c8af587cac7ba19ee7cb418506342fbbe2d4a4401661cca886/propcache-0.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4d7af63f9f93fe593afbf104c21b3b15868efb2c21d07d8732c0c4287e66b6a6", size = 220376, upload-time = "2025-10-08T19:46:59.067Z" }, - { url = "https://files.pythonhosted.org/packages/c4/ee/542f1313aff7eaf19c2bb758c5d0560d2683dac001a1c96d0774af799843/propcache-0.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cfc27c945f422e8b5071b6e93169679e4eb5bf73bbcbf1ba3ae3a83d2f78ebd9", size = 226988, upload-time = "2025-10-08T19:47:00.544Z" }, - { url = "https://files.pythonhosted.org/packages/8f/18/9c6b015dd9c6930f6ce2229e1f02fb35298b847f2087ea2b436a5bfa7287/propcache-0.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:35c3277624a080cc6ec6f847cbbbb5b49affa3598c4535a0a4682a697aaa5c75", size = 215615, upload-time = "2025-10-08T19:47:01.968Z" }, - { url = "https://files.pythonhosted.org/packages/80/9e/e7b85720b98c45a45e1fca6a177024934dc9bc5f4d5dd04207f216fc33ed/propcache-0.4.1-cp312-cp312-win32.whl", hash = "sha256:671538c2262dadb5ba6395e26c1731e1d52534bfe9ae56d0b5573ce539266aa8", size = 38066, upload-time = "2025-10-08T19:47:03.503Z" }, - { url = "https://files.pythonhosted.org/packages/54/09/d19cff2a5aaac632ec8fc03737b223597b1e347416934c1b3a7df079784c/propcache-0.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:cb2d222e72399fcf5890d1d5cc1060857b9b236adff2792ff48ca2dfd46c81db", size = 41655, upload-time = "2025-10-08T19:47:04.973Z" }, - { url = "https://files.pythonhosted.org/packages/68/ab/6b5c191bb5de08036a8c697b265d4ca76148efb10fa162f14af14fb5f076/propcache-0.4.1-cp312-cp312-win_arm64.whl", hash = "sha256:204483131fb222bdaaeeea9f9e6c6ed0cac32731f75dfc1d4a567fc1926477c1", size = 37789, upload-time = "2025-10-08T19:47:06.077Z" }, - { url = "https://files.pythonhosted.org/packages/bf/df/6d9c1b6ac12b003837dde8a10231a7344512186e87b36e855bef32241942/propcache-0.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:43eedf29202c08550aac1d14e0ee619b0430aaef78f85864c1a892294fbc28cf", size = 77750, upload-time = "2025-10-08T19:47:07.648Z" }, - { url = "https://files.pythonhosted.org/packages/8b/e8/677a0025e8a2acf07d3418a2e7ba529c9c33caf09d3c1f25513023c1db56/propcache-0.4.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d62cdfcfd89ccb8de04e0eda998535c406bf5e060ffd56be6c586cbcc05b3311", size = 44780, upload-time = "2025-10-08T19:47:08.851Z" }, - { url = "https://files.pythonhosted.org/packages/89/a4/92380f7ca60f99ebae761936bc48a72a639e8a47b29050615eef757cb2a7/propcache-0.4.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cae65ad55793da34db5f54e4029b89d3b9b9490d8abe1b4c7ab5d4b8ec7ebf74", size = 46308, upload-time = "2025-10-08T19:47:09.982Z" }, - { url = "https://files.pythonhosted.org/packages/2d/48/c5ac64dee5262044348d1d78a5f85dd1a57464a60d30daee946699963eb3/propcache-0.4.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:333ddb9031d2704a301ee3e506dc46b1fe5f294ec198ed6435ad5b6a085facfe", size = 208182, upload-time = "2025-10-08T19:47:11.319Z" }, - { url = "https://files.pythonhosted.org/packages/c6/0c/cd762dd011a9287389a6a3eb43aa30207bde253610cca06824aeabfe9653/propcache-0.4.1-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:fd0858c20f078a32cf55f7e81473d96dcf3b93fd2ccdb3d40fdf54b8573df3af", size = 211215, upload-time = "2025-10-08T19:47:13.146Z" }, - { url = "https://files.pythonhosted.org/packages/30/3e/49861e90233ba36890ae0ca4c660e95df565b2cd15d4a68556ab5865974e/propcache-0.4.1-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:678ae89ebc632c5c204c794f8dab2837c5f159aeb59e6ed0539500400577298c", size = 218112, upload-time = "2025-10-08T19:47:14.913Z" }, - { url = "https://files.pythonhosted.org/packages/f1/8b/544bc867e24e1bd48f3118cecd3b05c694e160a168478fa28770f22fd094/propcache-0.4.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d472aeb4fbf9865e0c6d622d7f4d54a4e101a89715d8904282bb5f9a2f476c3f", size = 204442, upload-time = "2025-10-08T19:47:16.277Z" }, - { url = "https://files.pythonhosted.org/packages/50/a6/4282772fd016a76d3e5c0df58380a5ea64900afd836cec2c2f662d1b9bb3/propcache-0.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4d3df5fa7e36b3225954fba85589da77a0fe6a53e3976de39caf04a0db4c36f1", size = 199398, upload-time = "2025-10-08T19:47:17.962Z" }, - { url = "https://files.pythonhosted.org/packages/3e/ec/d8a7cd406ee1ddb705db2139f8a10a8a427100347bd698e7014351c7af09/propcache-0.4.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:ee17f18d2498f2673e432faaa71698032b0127ebf23ae5974eeaf806c279df24", size = 196920, upload-time = "2025-10-08T19:47:19.355Z" }, - { url = "https://files.pythonhosted.org/packages/f6/6c/f38ab64af3764f431e359f8baf9e0a21013e24329e8b85d2da32e8ed07ca/propcache-0.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:580e97762b950f993ae618e167e7be9256b8353c2dcd8b99ec100eb50f5286aa", size = 203748, upload-time = "2025-10-08T19:47:21.338Z" }, - { url = "https://files.pythonhosted.org/packages/d6/e3/fa846bd70f6534d647886621388f0a265254d30e3ce47e5c8e6e27dbf153/propcache-0.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:501d20b891688eb8e7aa903021f0b72d5a55db40ffaab27edefd1027caaafa61", size = 205877, upload-time = "2025-10-08T19:47:23.059Z" }, - { url = "https://files.pythonhosted.org/packages/e2/39/8163fc6f3133fea7b5f2827e8eba2029a0277ab2c5beee6c1db7b10fc23d/propcache-0.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a0bd56e5b100aef69bd8562b74b46254e7c8812918d3baa700c8a8009b0af66", size = 199437, upload-time = "2025-10-08T19:47:24.445Z" }, - { url = "https://files.pythonhosted.org/packages/93/89/caa9089970ca49c7c01662bd0eeedfe85494e863e8043565aeb6472ce8fe/propcache-0.4.1-cp313-cp313-win32.whl", hash = "sha256:bcc9aaa5d80322bc2fb24bb7accb4a30f81e90ab8d6ba187aec0744bc302ad81", size = 37586, upload-time = "2025-10-08T19:47:25.736Z" }, - { url = "https://files.pythonhosted.org/packages/f5/ab/f76ec3c3627c883215b5c8080debb4394ef5a7a29be811f786415fc1e6fd/propcache-0.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:381914df18634f5494334d201e98245c0596067504b9372d8cf93f4bb23e025e", size = 40790, upload-time = "2025-10-08T19:47:26.847Z" }, - { url = "https://files.pythonhosted.org/packages/59/1b/e71ae98235f8e2ba5004d8cb19765a74877abf189bc53fc0c80d799e56c3/propcache-0.4.1-cp313-cp313-win_arm64.whl", hash = "sha256:8873eb4460fd55333ea49b7d189749ecf6e55bf85080f11b1c4530ed3034cba1", size = 37158, upload-time = "2025-10-08T19:47:27.961Z" }, - { url = "https://files.pythonhosted.org/packages/83/ce/a31bbdfc24ee0dcbba458c8175ed26089cf109a55bbe7b7640ed2470cfe9/propcache-0.4.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:92d1935ee1f8d7442da9c0c4fa7ac20d07e94064184811b685f5c4fada64553b", size = 81451, upload-time = "2025-10-08T19:47:29.445Z" }, - { url = "https://files.pythonhosted.org/packages/25/9c/442a45a470a68456e710d96cacd3573ef26a1d0a60067e6a7d5e655621ed/propcache-0.4.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:473c61b39e1460d386479b9b2f337da492042447c9b685f28be4f74d3529e566", size = 46374, upload-time = "2025-10-08T19:47:30.579Z" }, - { url = "https://files.pythonhosted.org/packages/f4/bf/b1d5e21dbc3b2e889ea4327044fb16312a736d97640fb8b6aa3f9c7b3b65/propcache-0.4.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:c0ef0aaafc66fbd87842a3fe3902fd889825646bc21149eafe47be6072725835", size = 48396, upload-time = "2025-10-08T19:47:31.79Z" }, - { url = "https://files.pythonhosted.org/packages/f4/04/5b4c54a103d480e978d3c8a76073502b18db0c4bc17ab91b3cb5092ad949/propcache-0.4.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f95393b4d66bfae908c3ca8d169d5f79cd65636ae15b5e7a4f6e67af675adb0e", size = 275950, upload-time = "2025-10-08T19:47:33.481Z" }, - { url = "https://files.pythonhosted.org/packages/b4/c1/86f846827fb969c4b78b0af79bba1d1ea2156492e1b83dea8b8a6ae27395/propcache-0.4.1-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c07fda85708bc48578467e85099645167a955ba093be0a2dcba962195676e859", size = 273856, upload-time = "2025-10-08T19:47:34.906Z" }, - { url = "https://files.pythonhosted.org/packages/36/1d/fc272a63c8d3bbad6878c336c7a7dea15e8f2d23a544bda43205dfa83ada/propcache-0.4.1-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:af223b406d6d000830c6f65f1e6431783fc3f713ba3e6cc8c024d5ee96170a4b", size = 280420, upload-time = "2025-10-08T19:47:36.338Z" }, - { url = "https://files.pythonhosted.org/packages/07/0c/01f2219d39f7e53d52e5173bcb09c976609ba30209912a0680adfb8c593a/propcache-0.4.1-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a78372c932c90ee474559c5ddfffd718238e8673c340dc21fe45c5b8b54559a0", size = 263254, upload-time = "2025-10-08T19:47:37.692Z" }, - { url = "https://files.pythonhosted.org/packages/2d/18/cd28081658ce597898f0c4d174d4d0f3c5b6d4dc27ffafeef835c95eb359/propcache-0.4.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:564d9f0d4d9509e1a870c920a89b2fec951b44bf5ba7d537a9e7c1ccec2c18af", size = 261205, upload-time = "2025-10-08T19:47:39.659Z" }, - { url = "https://files.pythonhosted.org/packages/7a/71/1f9e22eb8b8316701c2a19fa1f388c8a3185082607da8e406a803c9b954e/propcache-0.4.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:17612831fda0138059cc5546f4d12a2aacfb9e47068c06af35c400ba58ba7393", size = 247873, upload-time = "2025-10-08T19:47:41.084Z" }, - { url = "https://files.pythonhosted.org/packages/4a/65/3d4b61f36af2b4eddba9def857959f1016a51066b4f1ce348e0cf7881f58/propcache-0.4.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:41a89040cb10bd345b3c1a873b2bf36413d48da1def52f268a055f7398514874", size = 262739, upload-time = "2025-10-08T19:47:42.51Z" }, - { url = "https://files.pythonhosted.org/packages/2a/42/26746ab087faa77c1c68079b228810436ccd9a5ce9ac85e2b7307195fd06/propcache-0.4.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e35b88984e7fa64aacecea39236cee32dd9bd8c55f57ba8a75cf2399553f9bd7", size = 263514, upload-time = "2025-10-08T19:47:43.927Z" }, - { url = "https://files.pythonhosted.org/packages/94/13/630690fe201f5502d2403dd3cfd451ed8858fe3c738ee88d095ad2ff407b/propcache-0.4.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6f8b465489f927b0df505cbe26ffbeed4d6d8a2bbc61ce90eb074ff129ef0ab1", size = 257781, upload-time = "2025-10-08T19:47:45.448Z" }, - { url = "https://files.pythonhosted.org/packages/92/f7/1d4ec5841505f423469efbfc381d64b7b467438cd5a4bbcbb063f3b73d27/propcache-0.4.1-cp313-cp313t-win32.whl", hash = "sha256:2ad890caa1d928c7c2965b48f3a3815c853180831d0e5503d35cf00c472f4717", size = 41396, upload-time = "2025-10-08T19:47:47.202Z" }, - { url = "https://files.pythonhosted.org/packages/48/f0/615c30622316496d2cbbc29f5985f7777d3ada70f23370608c1d3e081c1f/propcache-0.4.1-cp313-cp313t-win_amd64.whl", hash = "sha256:f7ee0e597f495cf415bcbd3da3caa3bd7e816b74d0d52b8145954c5e6fd3ff37", size = 44897, upload-time = "2025-10-08T19:47:48.336Z" }, - { url = "https://files.pythonhosted.org/packages/fd/ca/6002e46eccbe0e33dcd4069ef32f7f1c9e243736e07adca37ae8c4830ec3/propcache-0.4.1-cp313-cp313t-win_arm64.whl", hash = "sha256:929d7cbe1f01bb7baffb33dc14eb5691c95831450a26354cd210a8155170c93a", size = 39789, upload-time = "2025-10-08T19:47:49.876Z" }, - { url = "https://files.pythonhosted.org/packages/8e/5c/bca52d654a896f831b8256683457ceddd490ec18d9ec50e97dfd8fc726a8/propcache-0.4.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3f7124c9d820ba5548d431afb4632301acf965db49e666aa21c305cbe8c6de12", size = 78152, upload-time = "2025-10-08T19:47:51.051Z" }, - { url = "https://files.pythonhosted.org/packages/65/9b/03b04e7d82a5f54fb16113d839f5ea1ede58a61e90edf515f6577c66fa8f/propcache-0.4.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:c0d4b719b7da33599dfe3b22d3db1ef789210a0597bc650b7cee9c77c2be8c5c", size = 44869, upload-time = "2025-10-08T19:47:52.594Z" }, - { url = "https://files.pythonhosted.org/packages/b2/fa/89a8ef0468d5833a23fff277b143d0573897cf75bd56670a6d28126c7d68/propcache-0.4.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:9f302f4783709a78240ebc311b793f123328716a60911d667e0c036bc5dcbded", size = 46596, upload-time = "2025-10-08T19:47:54.073Z" }, - { url = "https://files.pythonhosted.org/packages/86/bd/47816020d337f4a746edc42fe8d53669965138f39ee117414c7d7a340cfe/propcache-0.4.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c80ee5802e3fb9ea37938e7eecc307fb984837091d5fd262bb37238b1ae97641", size = 206981, upload-time = "2025-10-08T19:47:55.715Z" }, - { url = "https://files.pythonhosted.org/packages/df/f6/c5fa1357cc9748510ee55f37173eb31bfde6d94e98ccd9e6f033f2fc06e1/propcache-0.4.1-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ed5a841e8bb29a55fb8159ed526b26adc5bdd7e8bd7bf793ce647cb08656cdf4", size = 211490, upload-time = "2025-10-08T19:47:57.499Z" }, - { url = "https://files.pythonhosted.org/packages/80/1e/e5889652a7c4a3846683401a48f0f2e5083ce0ec1a8a5221d8058fbd1adf/propcache-0.4.1-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:55c72fd6ea2da4c318e74ffdf93c4fe4e926051133657459131a95c846d16d44", size = 215371, upload-time = "2025-10-08T19:47:59.317Z" }, - { url = "https://files.pythonhosted.org/packages/b2/f2/889ad4b2408f72fe1a4f6a19491177b30ea7bf1a0fd5f17050ca08cfc882/propcache-0.4.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8326e144341460402713f91df60ade3c999d601e7eb5ff8f6f7862d54de0610d", size = 201424, upload-time = "2025-10-08T19:48:00.67Z" }, - { url = "https://files.pythonhosted.org/packages/27/73/033d63069b57b0812c8bd19f311faebeceb6ba31b8f32b73432d12a0b826/propcache-0.4.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:060b16ae65bc098da7f6d25bf359f1f31f688384858204fe5d652979e0015e5b", size = 197566, upload-time = "2025-10-08T19:48:02.604Z" }, - { url = "https://files.pythonhosted.org/packages/dc/89/ce24f3dc182630b4e07aa6d15f0ff4b14ed4b9955fae95a0b54c58d66c05/propcache-0.4.1-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:89eb3fa9524f7bec9de6e83cf3faed9d79bffa560672c118a96a171a6f55831e", size = 193130, upload-time = "2025-10-08T19:48:04.499Z" }, - { url = "https://files.pythonhosted.org/packages/a9/24/ef0d5fd1a811fb5c609278d0209c9f10c35f20581fcc16f818da959fc5b4/propcache-0.4.1-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:dee69d7015dc235f526fe80a9c90d65eb0039103fe565776250881731f06349f", size = 202625, upload-time = "2025-10-08T19:48:06.213Z" }, - { url = "https://files.pythonhosted.org/packages/f5/02/98ec20ff5546f68d673df2f7a69e8c0d076b5abd05ca882dc7ee3a83653d/propcache-0.4.1-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:5558992a00dfd54ccbc64a32726a3357ec93825a418a401f5cc67df0ac5d9e49", size = 204209, upload-time = "2025-10-08T19:48:08.432Z" }, - { url = "https://files.pythonhosted.org/packages/a0/87/492694f76759b15f0467a2a93ab68d32859672b646aa8a04ce4864e7932d/propcache-0.4.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c9b822a577f560fbd9554812526831712c1436d2c046cedee4c3796d3543b144", size = 197797, upload-time = "2025-10-08T19:48:09.968Z" }, - { url = "https://files.pythonhosted.org/packages/ee/36/66367de3575db1d2d3f3d177432bd14ee577a39d3f5d1b3d5df8afe3b6e2/propcache-0.4.1-cp314-cp314-win32.whl", hash = "sha256:ab4c29b49d560fe48b696cdcb127dd36e0bc2472548f3bf56cc5cb3da2b2984f", size = 38140, upload-time = "2025-10-08T19:48:11.232Z" }, - { url = "https://files.pythonhosted.org/packages/0c/2a/a758b47de253636e1b8aef181c0b4f4f204bf0dd964914fb2af90a95b49b/propcache-0.4.1-cp314-cp314-win_amd64.whl", hash = "sha256:5a103c3eb905fcea0ab98be99c3a9a5ab2de60228aa5aceedc614c0281cf6153", size = 41257, upload-time = "2025-10-08T19:48:12.707Z" }, - { url = "https://files.pythonhosted.org/packages/34/5e/63bd5896c3fec12edcbd6f12508d4890d23c265df28c74b175e1ef9f4f3b/propcache-0.4.1-cp314-cp314-win_arm64.whl", hash = "sha256:74c1fb26515153e482e00177a1ad654721bf9207da8a494a0c05e797ad27b992", size = 38097, upload-time = "2025-10-08T19:48:13.923Z" }, - { url = "https://files.pythonhosted.org/packages/99/85/9ff785d787ccf9bbb3f3106f79884a130951436f58392000231b4c737c80/propcache-0.4.1-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:824e908bce90fb2743bd6b59db36eb4f45cd350a39637c9f73b1c1ea66f5b75f", size = 81455, upload-time = "2025-10-08T19:48:15.16Z" }, - { url = "https://files.pythonhosted.org/packages/90/85/2431c10c8e7ddb1445c1f7c4b54d886e8ad20e3c6307e7218f05922cad67/propcache-0.4.1-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:c2b5e7db5328427c57c8e8831abda175421b709672f6cfc3d630c3b7e2146393", size = 46372, upload-time = "2025-10-08T19:48:16.424Z" }, - { url = "https://files.pythonhosted.org/packages/01/20/b0972d902472da9bcb683fa595099911f4d2e86e5683bcc45de60dd05dc3/propcache-0.4.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6f6ff873ed40292cd4969ef5310179afd5db59fdf055897e282485043fc80ad0", size = 48411, upload-time = "2025-10-08T19:48:17.577Z" }, - { url = "https://files.pythonhosted.org/packages/e2/e3/7dc89f4f21e8f99bad3d5ddb3a3389afcf9da4ac69e3deb2dcdc96e74169/propcache-0.4.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:49a2dc67c154db2c1463013594c458881a069fcf98940e61a0569016a583020a", size = 275712, upload-time = "2025-10-08T19:48:18.901Z" }, - { url = "https://files.pythonhosted.org/packages/20/67/89800c8352489b21a8047c773067644e3897f02ecbbd610f4d46b7f08612/propcache-0.4.1-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:005f08e6a0529984491e37d8dbc3dd86f84bd78a8ceb5fa9a021f4c48d4984be", size = 273557, upload-time = "2025-10-08T19:48:20.762Z" }, - { url = "https://files.pythonhosted.org/packages/e2/a1/b52b055c766a54ce6d9c16d9aca0cad8059acd9637cdf8aa0222f4a026ef/propcache-0.4.1-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5c3310452e0d31390da9035c348633b43d7e7feb2e37be252be6da45abd1abcc", size = 280015, upload-time = "2025-10-08T19:48:22.592Z" }, - { url = "https://files.pythonhosted.org/packages/48/c8/33cee30bd890672c63743049f3c9e4be087e6780906bfc3ec58528be59c1/propcache-0.4.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4c3c70630930447f9ef1caac7728c8ad1c56bc5015338b20fed0d08ea2480b3a", size = 262880, upload-time = "2025-10-08T19:48:23.947Z" }, - { url = "https://files.pythonhosted.org/packages/0c/b1/8f08a143b204b418285c88b83d00edbd61afbc2c6415ffafc8905da7038b/propcache-0.4.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8e57061305815dfc910a3634dcf584f08168a8836e6999983569f51a8544cd89", size = 260938, upload-time = "2025-10-08T19:48:25.656Z" }, - { url = "https://files.pythonhosted.org/packages/cf/12/96e4664c82ca2f31e1c8dff86afb867348979eb78d3cb8546a680287a1e9/propcache-0.4.1-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:521a463429ef54143092c11a77e04056dd00636f72e8c45b70aaa3140d639726", size = 247641, upload-time = "2025-10-08T19:48:27.207Z" }, - { url = "https://files.pythonhosted.org/packages/18/ed/e7a9cfca28133386ba52278136d42209d3125db08d0a6395f0cba0c0285c/propcache-0.4.1-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:120c964da3fdc75e3731aa392527136d4ad35868cc556fd09bb6d09172d9a367", size = 262510, upload-time = "2025-10-08T19:48:28.65Z" }, - { url = "https://files.pythonhosted.org/packages/f5/76/16d8bf65e8845dd62b4e2b57444ab81f07f40caa5652b8969b87ddcf2ef6/propcache-0.4.1-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:d8f353eb14ee3441ee844ade4277d560cdd68288838673273b978e3d6d2c8f36", size = 263161, upload-time = "2025-10-08T19:48:30.133Z" }, - { url = "https://files.pythonhosted.org/packages/e7/70/c99e9edb5d91d5ad8a49fa3c1e8285ba64f1476782fed10ab251ff413ba1/propcache-0.4.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ab2943be7c652f09638800905ee1bab2c544e537edb57d527997a24c13dc1455", size = 257393, upload-time = "2025-10-08T19:48:31.567Z" }, - { url = "https://files.pythonhosted.org/packages/08/02/87b25304249a35c0915d236575bc3574a323f60b47939a2262b77632a3ee/propcache-0.4.1-cp314-cp314t-win32.whl", hash = "sha256:05674a162469f31358c30bcaa8883cb7829fa3110bf9c0991fe27d7896c42d85", size = 42546, upload-time = "2025-10-08T19:48:32.872Z" }, - { url = "https://files.pythonhosted.org/packages/cb/ef/3c6ecf8b317aa982f309835e8f96987466123c6e596646d4e6a1dfcd080f/propcache-0.4.1-cp314-cp314t-win_amd64.whl", hash = "sha256:990f6b3e2a27d683cb7602ed6c86f15ee6b43b1194736f9baaeb93d0016633b1", size = 46259, upload-time = "2025-10-08T19:48:34.226Z" }, - { url = "https://files.pythonhosted.org/packages/c4/2d/346e946d4951f37eca1e4f55be0f0174c52cd70720f84029b02f296f4a38/propcache-0.4.1-cp314-cp314t-win_arm64.whl", hash = "sha256:ecef2343af4cc68e05131e45024ba34f6095821988a9d0a02aa7c73fcc448aa9", size = 40428, upload-time = "2025-10-08T19:48:35.441Z" }, - { url = "https://files.pythonhosted.org/packages/5b/5a/bc7b4a4ef808fa59a816c17b20c4bef6884daebbdf627ff2a161da67da19/propcache-0.4.1-py3-none-any.whl", hash = "sha256:af2a6052aeb6cf17d3e46ee169099044fd8224cbaf75c76a2ef596e8163e2237", size = 13305, upload-time = "2025-10-08T19:49:00.792Z" }, -] - -[[package]] -name = "psutil" -version = "7.2.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/aa/c6/d1ddf4abb55e93cebc4f2ed8b5d6dbad109ecb8d63748dd2b20ab5e57ebe/psutil-7.2.2.tar.gz", hash = "sha256:0746f5f8d406af344fd547f1c8daa5f5c33dbc293bb8d6a16d80b4bb88f59372", size = 493740, upload-time = "2026-01-28T18:14:54.428Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/51/08/510cbdb69c25a96f4ae523f733cdc963ae654904e8db864c07585ef99875/psutil-7.2.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:2edccc433cbfa046b980b0df0171cd25bcaeb3a68fe9022db0979e7aa74a826b", size = 130595, upload-time = "2026-01-28T18:14:57.293Z" }, - { url = "https://files.pythonhosted.org/packages/d6/f5/97baea3fe7a5a9af7436301f85490905379b1c6f2dd51fe3ecf24b4c5fbf/psutil-7.2.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e78c8603dcd9a04c7364f1a3e670cea95d51ee865e4efb3556a3a63adef958ea", size = 131082, upload-time = "2026-01-28T18:14:59.732Z" }, - { url = "https://files.pythonhosted.org/packages/37/d6/246513fbf9fa174af531f28412297dd05241d97a75911ac8febefa1a53c6/psutil-7.2.2-cp313-cp313t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1a571f2330c966c62aeda00dd24620425d4b0cc86881c89861fbc04549e5dc63", size = 181476, upload-time = "2026-01-28T18:15:01.884Z" }, - { url = "https://files.pythonhosted.org/packages/b8/b5/9182c9af3836cca61696dabe4fd1304e17bc56cb62f17439e1154f225dd3/psutil-7.2.2-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:917e891983ca3c1887b4ef36447b1e0873e70c933afc831c6b6da078ba474312", size = 184062, upload-time = "2026-01-28T18:15:04.436Z" }, - { url = "https://files.pythonhosted.org/packages/16/ba/0756dca669f5a9300d0cbcbfae9a4c30e446dfc7440ffe43ded5724bfd93/psutil-7.2.2-cp313-cp313t-win_amd64.whl", hash = "sha256:ab486563df44c17f5173621c7b198955bd6b613fb87c71c161f827d3fb149a9b", size = 139893, upload-time = "2026-01-28T18:15:06.378Z" }, - { url = "https://files.pythonhosted.org/packages/1c/61/8fa0e26f33623b49949346de05ec1ddaad02ed8ba64af45f40a147dbfa97/psutil-7.2.2-cp313-cp313t-win_arm64.whl", hash = "sha256:ae0aefdd8796a7737eccea863f80f81e468a1e4cf14d926bd9b6f5f2d5f90ca9", size = 135589, upload-time = "2026-01-28T18:15:08.03Z" }, - { url = "https://files.pythonhosted.org/packages/81/69/ef179ab5ca24f32acc1dac0c247fd6a13b501fd5534dbae0e05a1c48b66d/psutil-7.2.2-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:eed63d3b4d62449571547b60578c5b2c4bcccc5387148db46e0c2313dad0ee00", size = 130664, upload-time = "2026-01-28T18:15:09.469Z" }, - { url = "https://files.pythonhosted.org/packages/7b/64/665248b557a236d3fa9efc378d60d95ef56dd0a490c2cd37dafc7660d4a9/psutil-7.2.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7b6d09433a10592ce39b13d7be5a54fbac1d1228ed29abc880fb23df7cb694c9", size = 131087, upload-time = "2026-01-28T18:15:11.724Z" }, - { url = "https://files.pythonhosted.org/packages/d5/2e/e6782744700d6759ebce3043dcfa661fb61e2fb752b91cdeae9af12c2178/psutil-7.2.2-cp314-cp314t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1fa4ecf83bcdf6e6c8f4449aff98eefb5d0604bf88cb883d7da3d8d2d909546a", size = 182383, upload-time = "2026-01-28T18:15:13.445Z" }, - { url = "https://files.pythonhosted.org/packages/57/49/0a41cefd10cb7505cdc04dab3eacf24c0c2cb158a998b8c7b1d27ee2c1f5/psutil-7.2.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e452c464a02e7dc7822a05d25db4cde564444a67e58539a00f929c51eddda0cf", size = 185210, upload-time = "2026-01-28T18:15:16.002Z" }, - { url = "https://files.pythonhosted.org/packages/dd/2c/ff9bfb544f283ba5f83ba725a3c5fec6d6b10b8f27ac1dc641c473dc390d/psutil-7.2.2-cp314-cp314t-win_amd64.whl", hash = "sha256:c7663d4e37f13e884d13994247449e9f8f574bc4655d509c3b95e9ec9e2b9dc1", size = 141228, upload-time = "2026-01-28T18:15:18.385Z" }, - { url = "https://files.pythonhosted.org/packages/f2/fc/f8d9c31db14fcec13748d373e668bc3bed94d9077dbc17fb0eebc073233c/psutil-7.2.2-cp314-cp314t-win_arm64.whl", hash = "sha256:11fe5a4f613759764e79c65cf11ebdf26e33d6dd34336f8a337aa2996d71c841", size = 136284, upload-time = "2026-01-28T18:15:19.912Z" }, - { url = "https://files.pythonhosted.org/packages/e7/36/5ee6e05c9bd427237b11b3937ad82bb8ad2752d72c6969314590dd0c2f6e/psutil-7.2.2-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ed0cace939114f62738d808fdcecd4c869222507e266e574799e9c0faa17d486", size = 129090, upload-time = "2026-01-28T18:15:22.168Z" }, - { url = "https://files.pythonhosted.org/packages/80/c4/f5af4c1ca8c1eeb2e92ccca14ce8effdeec651d5ab6053c589b074eda6e1/psutil-7.2.2-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:1a7b04c10f32cc88ab39cbf606e117fd74721c831c98a27dc04578deb0c16979", size = 129859, upload-time = "2026-01-28T18:15:23.795Z" }, - { url = "https://files.pythonhosted.org/packages/b5/70/5d8df3b09e25bce090399cf48e452d25c935ab72dad19406c77f4e828045/psutil-7.2.2-cp36-abi3-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:076a2d2f923fd4821644f5ba89f059523da90dc9014e85f8e45a5774ca5bc6f9", size = 155560, upload-time = "2026-01-28T18:15:25.976Z" }, - { url = "https://files.pythonhosted.org/packages/63/65/37648c0c158dc222aba51c089eb3bdfa238e621674dc42d48706e639204f/psutil-7.2.2-cp36-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b0726cecd84f9474419d67252add4ac0cd9811b04d61123054b9fb6f57df6e9e", size = 156997, upload-time = "2026-01-28T18:15:27.794Z" }, - { url = "https://files.pythonhosted.org/packages/8e/13/125093eadae863ce03c6ffdbae9929430d116a246ef69866dad94da3bfbc/psutil-7.2.2-cp36-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:fd04ef36b4a6d599bbdb225dd1d3f51e00105f6d48a28f006da7f9822f2606d8", size = 148972, upload-time = "2026-01-28T18:15:29.342Z" }, - { url = "https://files.pythonhosted.org/packages/04/78/0acd37ca84ce3ddffaa92ef0f571e073faa6d8ff1f0559ab1272188ea2be/psutil-7.2.2-cp36-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b58fabe35e80b264a4e3bb23e6b96f9e45a3df7fb7eed419ac0e5947c61e47cc", size = 148266, upload-time = "2026-01-28T18:15:31.597Z" }, - { url = "https://files.pythonhosted.org/packages/b4/90/e2159492b5426be0c1fef7acba807a03511f97c5f86b3caeda6ad92351a7/psutil-7.2.2-cp37-abi3-win_amd64.whl", hash = "sha256:eb7e81434c8d223ec4a219b5fc1c47d0417b12be7ea866e24fb5ad6e84b3d988", size = 137737, upload-time = "2026-01-28T18:15:33.849Z" }, - { url = "https://files.pythonhosted.org/packages/8c/c7/7bb2e321574b10df20cbde462a94e2b71d05f9bbda251ef27d104668306a/psutil-7.2.2-cp37-abi3-win_arm64.whl", hash = "sha256:8c233660f575a5a89e6d4cb65d9f938126312bca76d8fe087b947b3a1aaac9ee", size = 134617, upload-time = "2026-01-28T18:15:36.514Z" }, -] - -[[package]] -name = "pycryptodome" -version = "3.23.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/8e/a6/8452177684d5e906854776276ddd34eca30d1b1e15aa1ee9cefc289a33f5/pycryptodome-3.23.0.tar.gz", hash = "sha256:447700a657182d60338bab09fdb27518f8856aecd80ae4c6bdddb67ff5da44ef", size = 4921276, upload-time = "2025-05-17T17:21:45.242Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/04/5d/bdb09489b63cd34a976cc9e2a8d938114f7a53a74d3dd4f125ffa49dce82/pycryptodome-3.23.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:0011f7f00cdb74879142011f95133274741778abba114ceca229adbf8e62c3e4", size = 2495152, upload-time = "2025-05-17T17:20:20.833Z" }, - { url = "https://files.pythonhosted.org/packages/a7/ce/7840250ed4cc0039c433cd41715536f926d6e86ce84e904068eb3244b6a6/pycryptodome-3.23.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:90460fc9e088ce095f9ee8356722d4f10f86e5be06e2354230a9880b9c549aae", size = 1639348, upload-time = "2025-05-17T17:20:23.171Z" }, - { url = "https://files.pythonhosted.org/packages/ee/f0/991da24c55c1f688d6a3b5a11940567353f74590734ee4a64294834ae472/pycryptodome-3.23.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4764e64b269fc83b00f682c47443c2e6e85b18273712b98aa43bcb77f8570477", size = 2184033, upload-time = "2025-05-17T17:20:25.424Z" }, - { url = "https://files.pythonhosted.org/packages/54/16/0e11882deddf00f68b68dd4e8e442ddc30641f31afeb2bc25588124ac8de/pycryptodome-3.23.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb8f24adb74984aa0e5d07a2368ad95276cf38051fe2dc6605cbcf482e04f2a7", size = 2270142, upload-time = "2025-05-17T17:20:27.808Z" }, - { url = "https://files.pythonhosted.org/packages/d5/fc/4347fea23a3f95ffb931f383ff28b3f7b1fe868739182cb76718c0da86a1/pycryptodome-3.23.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d97618c9c6684a97ef7637ba43bdf6663a2e2e77efe0f863cce97a76af396446", size = 2309384, upload-time = "2025-05-17T17:20:30.765Z" }, - { url = "https://files.pythonhosted.org/packages/6e/d9/c5261780b69ce66d8cfab25d2797bd6e82ba0241804694cd48be41add5eb/pycryptodome-3.23.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9a53a4fe5cb075075d515797d6ce2f56772ea7e6a1e5e4b96cf78a14bac3d265", size = 2183237, upload-time = "2025-05-17T17:20:33.736Z" }, - { url = "https://files.pythonhosted.org/packages/5a/6f/3af2ffedd5cfa08c631f89452c6648c4d779e7772dfc388c77c920ca6bbf/pycryptodome-3.23.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:763d1d74f56f031788e5d307029caef067febf890cd1f8bf61183ae142f1a77b", size = 2343898, upload-time = "2025-05-17T17:20:36.086Z" }, - { url = "https://files.pythonhosted.org/packages/9a/dc/9060d807039ee5de6e2f260f72f3d70ac213993a804f5e67e0a73a56dd2f/pycryptodome-3.23.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:954af0e2bd7cea83ce72243b14e4fb518b18f0c1649b576d114973e2073b273d", size = 2269197, upload-time = "2025-05-17T17:20:38.414Z" }, - { url = "https://files.pythonhosted.org/packages/f9/34/e6c8ca177cb29dcc4967fef73f5de445912f93bd0343c9c33c8e5bf8cde8/pycryptodome-3.23.0-cp313-cp313t-win32.whl", hash = "sha256:257bb3572c63ad8ba40b89f6fc9d63a2a628e9f9708d31ee26560925ebe0210a", size = 1768600, upload-time = "2025-05-17T17:20:40.688Z" }, - { url = "https://files.pythonhosted.org/packages/e4/1d/89756b8d7ff623ad0160f4539da571d1f594d21ee6d68be130a6eccb39a4/pycryptodome-3.23.0-cp313-cp313t-win_amd64.whl", hash = "sha256:6501790c5b62a29fcb227bd6b62012181d886a767ce9ed03b303d1f22eb5c625", size = 1799740, upload-time = "2025-05-17T17:20:42.413Z" }, - { url = "https://files.pythonhosted.org/packages/5d/61/35a64f0feaea9fd07f0d91209e7be91726eb48c0f1bfc6720647194071e4/pycryptodome-3.23.0-cp313-cp313t-win_arm64.whl", hash = "sha256:9a77627a330ab23ca43b48b130e202582e91cc69619947840ea4d2d1be21eb39", size = 1703685, upload-time = "2025-05-17T17:20:44.388Z" }, - { url = "https://files.pythonhosted.org/packages/db/6c/a1f71542c969912bb0e106f64f60a56cc1f0fabecf9396f45accbe63fa68/pycryptodome-3.23.0-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:187058ab80b3281b1de11c2e6842a357a1f71b42cb1e15bce373f3d238135c27", size = 2495627, upload-time = "2025-05-17T17:20:47.139Z" }, - { url = "https://files.pythonhosted.org/packages/6e/4e/a066527e079fc5002390c8acdd3aca431e6ea0a50ffd7201551175b47323/pycryptodome-3.23.0-cp37-abi3-macosx_10_9_x86_64.whl", hash = "sha256:cfb5cd445280c5b0a4e6187a7ce8de5a07b5f3f897f235caa11f1f435f182843", size = 1640362, upload-time = "2025-05-17T17:20:50.392Z" }, - { url = "https://files.pythonhosted.org/packages/50/52/adaf4c8c100a8c49d2bd058e5b551f73dfd8cb89eb4911e25a0c469b6b4e/pycryptodome-3.23.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67bd81fcbe34f43ad9422ee8fd4843c8e7198dd88dd3d40e6de42ee65fbe1490", size = 2182625, upload-time = "2025-05-17T17:20:52.866Z" }, - { url = "https://files.pythonhosted.org/packages/5f/e9/a09476d436d0ff1402ac3867d933c61805ec2326c6ea557aeeac3825604e/pycryptodome-3.23.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c8987bd3307a39bc03df5c8e0e3d8be0c4c3518b7f044b0f4c15d1aa78f52575", size = 2268954, upload-time = "2025-05-17T17:20:55.027Z" }, - { url = "https://files.pythonhosted.org/packages/f9/c5/ffe6474e0c551d54cab931918127c46d70cab8f114e0c2b5a3c071c2f484/pycryptodome-3.23.0-cp37-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa0698f65e5b570426fc31b8162ed4603b0c2841cbb9088e2b01641e3065915b", size = 2308534, upload-time = "2025-05-17T17:20:57.279Z" }, - { url = "https://files.pythonhosted.org/packages/18/28/e199677fc15ecf43010f2463fde4c1a53015d1fe95fb03bca2890836603a/pycryptodome-3.23.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:53ecbafc2b55353edcebd64bf5da94a2a2cdf5090a6915bcca6eca6cc452585a", size = 2181853, upload-time = "2025-05-17T17:20:59.322Z" }, - { url = "https://files.pythonhosted.org/packages/ce/ea/4fdb09f2165ce1365c9eaefef36625583371ee514db58dc9b65d3a255c4c/pycryptodome-3.23.0-cp37-abi3-musllinux_1_2_i686.whl", hash = "sha256:156df9667ad9f2ad26255926524e1c136d6664b741547deb0a86a9acf5ea631f", size = 2342465, upload-time = "2025-05-17T17:21:03.83Z" }, - { url = "https://files.pythonhosted.org/packages/22/82/6edc3fc42fe9284aead511394bac167693fb2b0e0395b28b8bedaa07ef04/pycryptodome-3.23.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:dea827b4d55ee390dc89b2afe5927d4308a8b538ae91d9c6f7a5090f397af1aa", size = 2267414, upload-time = "2025-05-17T17:21:06.72Z" }, - { url = "https://files.pythonhosted.org/packages/59/fe/aae679b64363eb78326c7fdc9d06ec3de18bac68be4b612fc1fe8902693c/pycryptodome-3.23.0-cp37-abi3-win32.whl", hash = "sha256:507dbead45474b62b2bbe318eb1c4c8ee641077532067fec9c1aa82c31f84886", size = 1768484, upload-time = "2025-05-17T17:21:08.535Z" }, - { url = "https://files.pythonhosted.org/packages/54/2f/e97a1b8294db0daaa87012c24a7bb714147c7ade7656973fd6c736b484ff/pycryptodome-3.23.0-cp37-abi3-win_amd64.whl", hash = "sha256:c75b52aacc6c0c260f204cbdd834f76edc9fb0d8e0da9fbf8352ef58202564e2", size = 1799636, upload-time = "2025-05-17T17:21:10.393Z" }, - { url = "https://files.pythonhosted.org/packages/18/3d/f9441a0d798bf2b1e645adc3265e55706aead1255ccdad3856dbdcffec14/pycryptodome-3.23.0-cp37-abi3-win_arm64.whl", hash = "sha256:11eeeb6917903876f134b56ba11abe95c0b0fd5e3330def218083c7d98bbcb3c", size = 1703675, upload-time = "2025-05-17T17:21:13.146Z" }, -] - -[[package]] -name = "pydantic" -version = "2.12.5" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "annotated-types" }, - { name = "pydantic-core" }, - { name = "typing-extensions" }, - { name = "typing-inspection" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/69/44/36f1a6e523abc58ae5f928898e4aca2e0ea509b5aa6f6f392a5d882be928/pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49", size = 821591, upload-time = "2025-11-26T15:11:46.471Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d", size = 463580, upload-time = "2025-11-26T15:11:44.605Z" }, -] - -[[package]] -name = "pydantic-core" -version = "2.41.5" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/71/70/23b021c950c2addd24ec408e9ab05d59b035b39d97cdc1130e1bce647bb6/pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e", size = 460952, upload-time = "2025-11-04T13:43:49.098Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/5f/5d/5f6c63eebb5afee93bcaae4ce9a898f3373ca23df3ccaef086d0233a35a7/pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7", size = 2110990, upload-time = "2025-11-04T13:39:58.079Z" }, - { url = "https://files.pythonhosted.org/packages/aa/32/9c2e8ccb57c01111e0fd091f236c7b371c1bccea0fa85247ac55b1e2b6b6/pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0", size = 1896003, upload-time = "2025-11-04T13:39:59.956Z" }, - { url = "https://files.pythonhosted.org/packages/68/b8/a01b53cb0e59139fbc9e4fda3e9724ede8de279097179be4ff31f1abb65a/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69", size = 1919200, upload-time = "2025-11-04T13:40:02.241Z" }, - { url = "https://files.pythonhosted.org/packages/38/de/8c36b5198a29bdaade07b5985e80a233a5ac27137846f3bc2d3b40a47360/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75", size = 2052578, upload-time = "2025-11-04T13:40:04.401Z" }, - { url = "https://files.pythonhosted.org/packages/00/b5/0e8e4b5b081eac6cb3dbb7e60a65907549a1ce035a724368c330112adfdd/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05", size = 2208504, upload-time = "2025-11-04T13:40:06.072Z" }, - { url = "https://files.pythonhosted.org/packages/77/56/87a61aad59c7c5b9dc8caad5a41a5545cba3810c3e828708b3d7404f6cef/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc", size = 2335816, upload-time = "2025-11-04T13:40:07.835Z" }, - { url = "https://files.pythonhosted.org/packages/0d/76/941cc9f73529988688a665a5c0ecff1112b3d95ab48f81db5f7606f522d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c", size = 2075366, upload-time = "2025-11-04T13:40:09.804Z" }, - { url = "https://files.pythonhosted.org/packages/d3/43/ebef01f69baa07a482844faaa0a591bad1ef129253ffd0cdaa9d8a7f72d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5", size = 2171698, upload-time = "2025-11-04T13:40:12.004Z" }, - { url = "https://files.pythonhosted.org/packages/b1/87/41f3202e4193e3bacfc2c065fab7706ebe81af46a83d3e27605029c1f5a6/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c", size = 2132603, upload-time = "2025-11-04T13:40:13.868Z" }, - { url = "https://files.pythonhosted.org/packages/49/7d/4c00df99cb12070b6bccdef4a195255e6020a550d572768d92cc54dba91a/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294", size = 2329591, upload-time = "2025-11-04T13:40:15.672Z" }, - { url = "https://files.pythonhosted.org/packages/cc/6a/ebf4b1d65d458f3cda6a7335d141305dfa19bdc61140a884d165a8a1bbc7/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1", size = 2319068, upload-time = "2025-11-04T13:40:17.532Z" }, - { url = "https://files.pythonhosted.org/packages/49/3b/774f2b5cd4192d5ab75870ce4381fd89cf218af999515baf07e7206753f0/pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d", size = 1985908, upload-time = "2025-11-04T13:40:19.309Z" }, - { url = "https://files.pythonhosted.org/packages/86/45/00173a033c801cacf67c190fef088789394feaf88a98a7035b0e40d53dc9/pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815", size = 2020145, upload-time = "2025-11-04T13:40:21.548Z" }, - { url = "https://files.pythonhosted.org/packages/f9/22/91fbc821fa6d261b376a3f73809f907cec5ca6025642c463d3488aad22fb/pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3", size = 1976179, upload-time = "2025-11-04T13:40:23.393Z" }, - { url = "https://files.pythonhosted.org/packages/87/06/8806241ff1f70d9939f9af039c6c35f2360cf16e93c2ca76f184e76b1564/pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9", size = 2120403, upload-time = "2025-11-04T13:40:25.248Z" }, - { url = "https://files.pythonhosted.org/packages/94/02/abfa0e0bda67faa65fef1c84971c7e45928e108fe24333c81f3bfe35d5f5/pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34", size = 1896206, upload-time = "2025-11-04T13:40:27.099Z" }, - { url = "https://files.pythonhosted.org/packages/15/df/a4c740c0943e93e6500f9eb23f4ca7ec9bf71b19e608ae5b579678c8d02f/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0", size = 1919307, upload-time = "2025-11-04T13:40:29.806Z" }, - { url = "https://files.pythonhosted.org/packages/9a/e3/6324802931ae1d123528988e0e86587c2072ac2e5394b4bc2bc34b61ff6e/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33", size = 2063258, upload-time = "2025-11-04T13:40:33.544Z" }, - { url = "https://files.pythonhosted.org/packages/c9/d4/2230d7151d4957dd79c3044ea26346c148c98fbf0ee6ebd41056f2d62ab5/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e", size = 2214917, upload-time = "2025-11-04T13:40:35.479Z" }, - { url = "https://files.pythonhosted.org/packages/e6/9f/eaac5df17a3672fef0081b6c1bb0b82b33ee89aa5cec0d7b05f52fd4a1fa/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2", size = 2332186, upload-time = "2025-11-04T13:40:37.436Z" }, - { url = "https://files.pythonhosted.org/packages/cf/4e/35a80cae583a37cf15604b44240e45c05e04e86f9cfd766623149297e971/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586", size = 2073164, upload-time = "2025-11-04T13:40:40.289Z" }, - { url = "https://files.pythonhosted.org/packages/bf/e3/f6e262673c6140dd3305d144d032f7bd5f7497d3871c1428521f19f9efa2/pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d", size = 2179146, upload-time = "2025-11-04T13:40:42.809Z" }, - { url = "https://files.pythonhosted.org/packages/75/c7/20bd7fc05f0c6ea2056a4565c6f36f8968c0924f19b7d97bbfea55780e73/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740", size = 2137788, upload-time = "2025-11-04T13:40:44.752Z" }, - { url = "https://files.pythonhosted.org/packages/3a/8d/34318ef985c45196e004bc46c6eab2eda437e744c124ef0dbe1ff2c9d06b/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e", size = 2340133, upload-time = "2025-11-04T13:40:46.66Z" }, - { url = "https://files.pythonhosted.org/packages/9c/59/013626bf8c78a5a5d9350d12e7697d3d4de951a75565496abd40ccd46bee/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858", size = 2324852, upload-time = "2025-11-04T13:40:48.575Z" }, - { url = "https://files.pythonhosted.org/packages/1a/d9/c248c103856f807ef70c18a4f986693a46a8ffe1602e5d361485da502d20/pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36", size = 1994679, upload-time = "2025-11-04T13:40:50.619Z" }, - { url = "https://files.pythonhosted.org/packages/9e/8b/341991b158ddab181cff136acd2552c9f35bd30380422a639c0671e99a91/pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11", size = 2019766, upload-time = "2025-11-04T13:40:52.631Z" }, - { url = "https://files.pythonhosted.org/packages/73/7d/f2f9db34af103bea3e09735bb40b021788a5e834c81eedb541991badf8f5/pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd", size = 1981005, upload-time = "2025-11-04T13:40:54.734Z" }, - { url = "https://files.pythonhosted.org/packages/ea/28/46b7c5c9635ae96ea0fbb779e271a38129df2550f763937659ee6c5dbc65/pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a", size = 2119622, upload-time = "2025-11-04T13:40:56.68Z" }, - { url = "https://files.pythonhosted.org/packages/74/1a/145646e5687e8d9a1e8d09acb278c8535ebe9e972e1f162ed338a622f193/pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14", size = 1891725, upload-time = "2025-11-04T13:40:58.807Z" }, - { url = "https://files.pythonhosted.org/packages/23/04/e89c29e267b8060b40dca97bfc64a19b2a3cf99018167ea1677d96368273/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1", size = 1915040, upload-time = "2025-11-04T13:41:00.853Z" }, - { url = "https://files.pythonhosted.org/packages/84/a3/15a82ac7bd97992a82257f777b3583d3e84bdb06ba6858f745daa2ec8a85/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66", size = 2063691, upload-time = "2025-11-04T13:41:03.504Z" }, - { url = "https://files.pythonhosted.org/packages/74/9b/0046701313c6ef08c0c1cf0e028c67c770a4e1275ca73131563c5f2a310a/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869", size = 2213897, upload-time = "2025-11-04T13:41:05.804Z" }, - { url = "https://files.pythonhosted.org/packages/8a/cd/6bac76ecd1b27e75a95ca3a9a559c643b3afcd2dd62086d4b7a32a18b169/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2", size = 2333302, upload-time = "2025-11-04T13:41:07.809Z" }, - { url = "https://files.pythonhosted.org/packages/4c/d2/ef2074dc020dd6e109611a8be4449b98cd25e1b9b8a303c2f0fca2f2bcf7/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375", size = 2064877, upload-time = "2025-11-04T13:41:09.827Z" }, - { url = "https://files.pythonhosted.org/packages/18/66/e9db17a9a763d72f03de903883c057b2592c09509ccfe468187f2a2eef29/pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553", size = 2180680, upload-time = "2025-11-04T13:41:12.379Z" }, - { url = "https://files.pythonhosted.org/packages/d3/9e/3ce66cebb929f3ced22be85d4c2399b8e85b622db77dad36b73c5387f8f8/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90", size = 2138960, upload-time = "2025-11-04T13:41:14.627Z" }, - { url = "https://files.pythonhosted.org/packages/a6/62/205a998f4327d2079326b01abee48e502ea739d174f0a89295c481a2272e/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07", size = 2339102, upload-time = "2025-11-04T13:41:16.868Z" }, - { url = "https://files.pythonhosted.org/packages/3c/0d/f05e79471e889d74d3d88f5bd20d0ed189ad94c2423d81ff8d0000aab4ff/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb", size = 2326039, upload-time = "2025-11-04T13:41:18.934Z" }, - { url = "https://files.pythonhosted.org/packages/ec/e1/e08a6208bb100da7e0c4b288eed624a703f4d129bde2da475721a80cab32/pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23", size = 1995126, upload-time = "2025-11-04T13:41:21.418Z" }, - { url = "https://files.pythonhosted.org/packages/48/5d/56ba7b24e9557f99c9237e29f5c09913c81eeb2f3217e40e922353668092/pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf", size = 2015489, upload-time = "2025-11-04T13:41:24.076Z" }, - { url = "https://files.pythonhosted.org/packages/4e/bb/f7a190991ec9e3e0ba22e4993d8755bbc4a32925c0b5b42775c03e8148f9/pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0", size = 1977288, upload-time = "2025-11-04T13:41:26.33Z" }, - { url = "https://files.pythonhosted.org/packages/92/ed/77542d0c51538e32e15afe7899d79efce4b81eee631d99850edc2f5e9349/pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a", size = 2120255, upload-time = "2025-11-04T13:41:28.569Z" }, - { url = "https://files.pythonhosted.org/packages/bb/3d/6913dde84d5be21e284439676168b28d8bbba5600d838b9dca99de0fad71/pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3", size = 1863760, upload-time = "2025-11-04T13:41:31.055Z" }, - { url = "https://files.pythonhosted.org/packages/5a/f0/e5e6b99d4191da102f2b0eb9687aaa7f5bea5d9964071a84effc3e40f997/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c", size = 1878092, upload-time = "2025-11-04T13:41:33.21Z" }, - { url = "https://files.pythonhosted.org/packages/71/48/36fb760642d568925953bcc8116455513d6e34c4beaa37544118c36aba6d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612", size = 2053385, upload-time = "2025-11-04T13:41:35.508Z" }, - { url = "https://files.pythonhosted.org/packages/20/25/92dc684dd8eb75a234bc1c764b4210cf2646479d54b47bf46061657292a8/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d", size = 2218832, upload-time = "2025-11-04T13:41:37.732Z" }, - { url = "https://files.pythonhosted.org/packages/e2/09/f53e0b05023d3e30357d82eb35835d0f6340ca344720a4599cd663dca599/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9", size = 2327585, upload-time = "2025-11-04T13:41:40Z" }, - { url = "https://files.pythonhosted.org/packages/aa/4e/2ae1aa85d6af35a39b236b1b1641de73f5a6ac4d5a7509f77b814885760c/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660", size = 2041078, upload-time = "2025-11-04T13:41:42.323Z" }, - { url = "https://files.pythonhosted.org/packages/cd/13/2e215f17f0ef326fc72afe94776edb77525142c693767fc347ed6288728d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9", size = 2173914, upload-time = "2025-11-04T13:41:45.221Z" }, - { url = "https://files.pythonhosted.org/packages/02/7a/f999a6dcbcd0e5660bc348a3991c8915ce6599f4f2c6ac22f01d7a10816c/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3", size = 2129560, upload-time = "2025-11-04T13:41:47.474Z" }, - { url = "https://files.pythonhosted.org/packages/3a/b1/6c990ac65e3b4c079a4fb9f5b05f5b013afa0f4ed6780a3dd236d2cbdc64/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf", size = 2329244, upload-time = "2025-11-04T13:41:49.992Z" }, - { url = "https://files.pythonhosted.org/packages/d9/02/3c562f3a51afd4d88fff8dffb1771b30cfdfd79befd9883ee094f5b6c0d8/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470", size = 2331955, upload-time = "2025-11-04T13:41:54.079Z" }, - { url = "https://files.pythonhosted.org/packages/5c/96/5fb7d8c3c17bc8c62fdb031c47d77a1af698f1d7a406b0f79aaa1338f9ad/pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa", size = 1988906, upload-time = "2025-11-04T13:41:56.606Z" }, - { url = "https://files.pythonhosted.org/packages/22/ed/182129d83032702912c2e2d8bbe33c036f342cc735737064668585dac28f/pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c", size = 1981607, upload-time = "2025-11-04T13:41:58.889Z" }, - { url = "https://files.pythonhosted.org/packages/9f/ed/068e41660b832bb0b1aa5b58011dea2a3fe0ba7861ff38c4d4904c1c1a99/pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008", size = 1974769, upload-time = "2025-11-04T13:42:01.186Z" }, - { url = "https://files.pythonhosted.org/packages/09/32/59b0c7e63e277fa7911c2fc70ccfb45ce4b98991e7ef37110663437005af/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd", size = 2110495, upload-time = "2025-11-04T13:42:49.689Z" }, - { url = "https://files.pythonhosted.org/packages/aa/81/05e400037eaf55ad400bcd318c05bb345b57e708887f07ddb2d20e3f0e98/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc", size = 1915388, upload-time = "2025-11-04T13:42:52.215Z" }, - { url = "https://files.pythonhosted.org/packages/6e/0d/e3549b2399f71d56476b77dbf3cf8937cec5cd70536bdc0e374a421d0599/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56", size = 1942879, upload-time = "2025-11-04T13:42:56.483Z" }, - { url = "https://files.pythonhosted.org/packages/f7/07/34573da085946b6a313d7c42f82f16e8920bfd730665de2d11c0c37a74b5/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b", size = 2139017, upload-time = "2025-11-04T13:42:59.471Z" }, -] - -[[package]] -name = "pygments" -version = "2.19.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, -] - -[[package]] -name = "pymongo" -version = "4.16.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "dnspython" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/65/9c/a4895c4b785fc9865a84a56e14b5bd21ca75aadc3dab79c14187cdca189b/pymongo-4.16.0.tar.gz", hash = "sha256:8ba8405065f6e258a6f872fe62d797a28f383a12178c7153c01ed04e845c600c", size = 2495323, upload-time = "2026-01-07T18:05:48.107Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/6a/03/6dd7c53cbde98de469a3e6fb893af896dca644c476beb0f0c6342bcc368b/pymongo-4.16.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:bd4911c40a43a821dfd93038ac824b756b6e703e26e951718522d29f6eb166a8", size = 917619, upload-time = "2026-01-07T18:04:19.173Z" }, - { url = "https://files.pythonhosted.org/packages/73/e1/328915f2734ea1f355dc9b0e98505ff670f5fab8be5e951d6ed70971c6aa/pymongo-4.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:25a6b03a68f9907ea6ec8bc7cf4c58a1b51a18e23394f962a6402f8e46d41211", size = 917364, upload-time = "2026-01-07T18:04:20.861Z" }, - { url = "https://files.pythonhosted.org/packages/41/fe/4769874dd9812a1bc2880a9785e61eba5340da966af888dd430392790ae0/pymongo-4.16.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:91ac0cb0fe2bf17616c2039dac88d7c9a5088f5cb5829b27c9d250e053664d31", size = 1686901, upload-time = "2026-01-07T18:04:22.219Z" }, - { url = "https://files.pythonhosted.org/packages/fa/8d/15707b9669fdc517bbc552ac60da7124dafe7ac1552819b51e97ed4038b4/pymongo-4.16.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cf0ec79e8ca7077f455d14d915d629385153b6a11abc0b93283ed73a8013e376", size = 1723034, upload-time = "2026-01-07T18:04:24.055Z" }, - { url = "https://files.pythonhosted.org/packages/5b/af/3d5d16ff11d447d40c1472da1b366a31c7380d7ea2922a449c7f7f495567/pymongo-4.16.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2d0082631a7510318befc2b4fdab140481eb4b9dd62d9245e042157085da2a70", size = 1797161, upload-time = "2026-01-07T18:04:25.964Z" }, - { url = "https://files.pythonhosted.org/packages/fb/04/725ab8664eeec73ec125b5a873448d80f5d8cf2750aaaf804cbc538a50a5/pymongo-4.16.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:85dc2f3444c346ea019a371e321ac868a4fab513b7a55fe368f0cc78de8177cc", size = 1780938, upload-time = "2026-01-07T18:04:28.745Z" }, - { url = "https://files.pythonhosted.org/packages/22/50/dd7e9095e1ca35f93c3c844c92eb6eb0bc491caeb2c9bff3b32fe3c9b18f/pymongo-4.16.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dabbf3c14de75a20cc3c30bf0c6527157224a93dfb605838eabb1a2ee3be008d", size = 1714342, upload-time = "2026-01-07T18:04:30.331Z" }, - { url = "https://files.pythonhosted.org/packages/03/c9/542776987d5c31ae8e93e92680ea2b6e5a2295f398b25756234cabf38a39/pymongo-4.16.0-cp312-cp312-win32.whl", hash = "sha256:60307bb91e0ab44e560fe3a211087748b2b5f3e31f403baf41f5b7b0a70bd104", size = 887868, upload-time = "2026-01-07T18:04:32.124Z" }, - { url = "https://files.pythonhosted.org/packages/2e/d4/b4045a7ccc5680fb496d01edf749c7a9367cc8762fbdf7516cf807ef679b/pymongo-4.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:f513b2c6c0d5c491f478422f6b5b5c27ac1af06a54c93ef8631806f7231bd92e", size = 907554, upload-time = "2026-01-07T18:04:33.685Z" }, - { url = "https://files.pythonhosted.org/packages/60/4c/33f75713d50d5247f2258405142c0318ff32c6f8976171c4fcae87a9dbdf/pymongo-4.16.0-cp312-cp312-win_arm64.whl", hash = "sha256:dfc320f08ea9a7ec5b2403dc4e8150636f0d6150f4b9792faaae539c88e7db3b", size = 892971, upload-time = "2026-01-07T18:04:35.594Z" }, - { url = "https://files.pythonhosted.org/packages/47/84/148d8b5da8260f4679d6665196ae04ab14ffdf06f5fe670b0ab11942951f/pymongo-4.16.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d15f060bc6d0964a8bb70aba8f0cb6d11ae99715438f640cff11bbcf172eb0e8", size = 972009, upload-time = "2026-01-07T18:04:38.303Z" }, - { url = "https://files.pythonhosted.org/packages/1e/5e/9f3a8daf583d0adaaa033a3e3e58194d2282737dc164014ff33c7a081103/pymongo-4.16.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4a19ea46a0fe71248965305a020bc076a163311aefbaa1d83e47d06fa30ac747", size = 971784, upload-time = "2026-01-07T18:04:39.669Z" }, - { url = "https://files.pythonhosted.org/packages/ad/f2/b6c24361fcde24946198573c0176406bfd5f7b8538335f3d939487055322/pymongo-4.16.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:311d4549d6bf1f8c61d025965aebb5ba29d1481dc6471693ab91610aaffbc0eb", size = 1947174, upload-time = "2026-01-07T18:04:41.368Z" }, - { url = "https://files.pythonhosted.org/packages/47/1a/8634192f98cf740b3d174e1018dd0350018607d5bd8ac35a666dc49c732b/pymongo-4.16.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:46ffb728d92dd5b09fc034ed91acf5595657c7ca17d4cf3751322cd554153c17", size = 1991727, upload-time = "2026-01-07T18:04:42.965Z" }, - { url = "https://files.pythonhosted.org/packages/5a/2f/0c47ac84572b28e23028a23a3798a1f725e1c23b0cf1c1424678d16aff42/pymongo-4.16.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:acda193f440dd88c2023cb00aa8bd7b93a9df59978306d14d87a8b12fe426b05", size = 2082497, upload-time = "2026-01-07T18:04:44.652Z" }, - { url = "https://files.pythonhosted.org/packages/ba/57/9f46ef9c862b2f0cf5ce798f3541c201c574128d31ded407ba4b3918d7b6/pymongo-4.16.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5d9fdb386cf958e6ef6ff537d6149be7edb76c3268cd6833e6c36aa447e4443f", size = 2064947, upload-time = "2026-01-07T18:04:46.228Z" }, - { url = "https://files.pythonhosted.org/packages/b8/56/5421c0998f38e32288100a07f6cb2f5f9f352522157c901910cb2927e211/pymongo-4.16.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:91899dd7fb9a8c50f09c3c1cf0cb73bfbe2737f511f641f19b9650deb61c00ca", size = 1980478, upload-time = "2026-01-07T18:04:48.017Z" }, - { url = "https://files.pythonhosted.org/packages/92/93/bfc448d025e12313a937d6e1e0101b50cc9751636b4b170e600fe3203063/pymongo-4.16.0-cp313-cp313-win32.whl", hash = "sha256:2cd60cd1e05de7f01927f8e25ca26b3ea2c09de8723241e5d3bcfdc70eaff76b", size = 934672, upload-time = "2026-01-07T18:04:49.538Z" }, - { url = "https://files.pythonhosted.org/packages/96/10/12710a5e01218d50c3dd165fd72c5ed2699285f77348a3b1a119a191d826/pymongo-4.16.0-cp313-cp313-win_amd64.whl", hash = "sha256:3ead8a0050c53eaa55935895d6919d393d0328ec24b2b9115bdbe881aa222673", size = 959237, upload-time = "2026-01-07T18:04:51.382Z" }, - { url = "https://files.pythonhosted.org/packages/0c/56/d288bcd1d05bc17ec69df1d0b1d67bc710c7c5dbef86033a5a4d2e2b08e6/pymongo-4.16.0-cp313-cp313-win_arm64.whl", hash = "sha256:dbbc5b254c36c37d10abb50e899bc3939bbb7ab1e7c659614409af99bd3e7675", size = 940909, upload-time = "2026-01-07T18:04:52.904Z" }, - { url = "https://files.pythonhosted.org/packages/30/9e/4d343f8d0512002fce17915a89477b9f916bda1205729e042d8f23acf194/pymongo-4.16.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:8a254d49a9ffe9d7f888e3c677eed3729b14ce85abb08cd74732cead6ccc3c66", size = 1026634, upload-time = "2026-01-07T18:04:54.359Z" }, - { url = "https://files.pythonhosted.org/packages/c3/e3/341f88c5535df40c0450fda915f582757bb7d988cdfc92990a5e27c4c324/pymongo-4.16.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:a1bf44e13cf2d44d2ea2e928a8140d5d667304abe1a61c4d55b4906f389fbe64", size = 1026252, upload-time = "2026-01-07T18:04:56.642Z" }, - { url = "https://files.pythonhosted.org/packages/af/64/9471b22eb98f0a2ca0b8e09393de048502111b2b5b14ab1bd9e39708aab5/pymongo-4.16.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:f1c5f1f818b669875d191323a48912d3fcd2e4906410e8297bb09ac50c4d5ccc", size = 2207399, upload-time = "2026-01-07T18:04:58.255Z" }, - { url = "https://files.pythonhosted.org/packages/87/ac/47c4d50b25a02f21764f140295a2efaa583ee7f17992a5e5fa542b3a690f/pymongo-4.16.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:77cfd37a43a53b02b7bd930457c7994c924ad8bbe8dff91817904bcbf291b371", size = 2260595, upload-time = "2026-01-07T18:04:59.788Z" }, - { url = "https://files.pythonhosted.org/packages/ee/1b/0ce1ce9dd036417646b2fe6f63b58127acff3cf96eeb630c34ec9cd675ff/pymongo-4.16.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:36ef2fee50eee669587d742fb456e349634b4fcf8926208766078b089054b24b", size = 2366958, upload-time = "2026-01-07T18:05:01.942Z" }, - { url = "https://files.pythonhosted.org/packages/3e/3c/a5a17c0d413aa9d6c17bc35c2b472e9e79cda8068ba8e93433b5f43028e9/pymongo-4.16.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:55f8d5a6fe2fa0b823674db2293f92d74cd5f970bc0360f409a1fc21003862d3", size = 2346081, upload-time = "2026-01-07T18:05:03.576Z" }, - { url = "https://files.pythonhosted.org/packages/65/19/f815533d1a88fb8a3b6c6e895bb085ffdae68ccb1e6ed7102202a307f8e2/pymongo-4.16.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9caacac0dd105e2555521002e2d17afc08665187017b466b5753e84c016628e6", size = 2246053, upload-time = "2026-01-07T18:05:05.459Z" }, - { url = "https://files.pythonhosted.org/packages/c6/88/4be3ec78828dc64b212c123114bd6ae8db5b7676085a7b43cc75d0131bd2/pymongo-4.16.0-cp314-cp314-win32.whl", hash = "sha256:c789236366525c3ee3cd6e4e450a9ff629a7d1f4d88b8e18a0aea0615fd7ecf8", size = 989461, upload-time = "2026-01-07T18:05:07.018Z" }, - { url = "https://files.pythonhosted.org/packages/af/5a/ab8d5af76421b34db483c9c8ebc3a2199fb80ae63dc7e18f4cf1df46306a/pymongo-4.16.0-cp314-cp314-win_amd64.whl", hash = "sha256:2b0714d7764efb29bf9d3c51c964aed7c4c7237b341f9346f15ceaf8321fdb35", size = 1017803, upload-time = "2026-01-07T18:05:08.499Z" }, - { url = "https://files.pythonhosted.org/packages/f6/f4/98d68020728ac6423cf02d17cfd8226bf6cce5690b163d30d3f705e8297e/pymongo-4.16.0-cp314-cp314-win_arm64.whl", hash = "sha256:12762e7cc0f8374a8cae3b9f9ed8dabb5d438c7b33329232dd9b7de783454033", size = 997184, upload-time = "2026-01-07T18:05:09.944Z" }, - { url = "https://files.pythonhosted.org/packages/50/00/dc3a271daf06401825b9c1f4f76f018182c7738281ea54b9762aea0560c1/pymongo-4.16.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:1c01e8a7cd0ea66baf64a118005535ab5bf9f9eb63a1b50ac3935dccf9a54abe", size = 1083303, upload-time = "2026-01-07T18:05:11.702Z" }, - { url = "https://files.pythonhosted.org/packages/b8/4b/b5375ee21d12eababe46215011ebc63801c0d2c5ffdf203849d0d79f9852/pymongo-4.16.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:4c4872299ebe315a79f7f922051061634a64fda95b6b17677ba57ef00b2ba2a4", size = 1083233, upload-time = "2026-01-07T18:05:13.182Z" }, - { url = "https://files.pythonhosted.org/packages/ee/e3/52efa3ca900622c7dcb56c5e70f15c906816d98905c22d2ee1f84d9a7b60/pymongo-4.16.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:78037d02389745e247fe5ab0bcad5d1ab30726eaac3ad79219c7d6bbb07eec53", size = 2527438, upload-time = "2026-01-07T18:05:14.981Z" }, - { url = "https://files.pythonhosted.org/packages/cb/96/43b1be151c734e7766c725444bcbfa1de6b60cc66bfb406203746839dd25/pymongo-4.16.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c126fb72be2518395cc0465d4bae03125119136462e1945aea19840e45d89cfc", size = 2600399, upload-time = "2026-01-07T18:05:16.794Z" }, - { url = "https://files.pythonhosted.org/packages/e7/62/fa64a5045dfe3a1cd9217232c848256e7bc0136cffb7da4735c5e0d30e40/pymongo-4.16.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f3867dc225d9423c245a51eaac2cfcd53dde8e0a8d8090bb6aed6e31bd6c2d4f", size = 2720960, upload-time = "2026-01-07T18:05:18.498Z" }, - { url = "https://files.pythonhosted.org/packages/54/7b/01577eb97e605502821273a5bc16ce0fb0be5c978fe03acdbff471471202/pymongo-4.16.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f25001a955073b80510c0c3db0e043dbbc36904fd69e511c74e3d8640b8a5111", size = 2699344, upload-time = "2026-01-07T18:05:20.073Z" }, - { url = "https://files.pythonhosted.org/packages/55/68/6ef6372d516f703479c3b6cbbc45a5afd307173b1cbaccd724e23919bb1a/pymongo-4.16.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d9885aad05f82fd7ea0c9ca505d60939746b39263fa273d0125170da8f59098", size = 2577133, upload-time = "2026-01-07T18:05:22.052Z" }, - { url = "https://files.pythonhosted.org/packages/15/c7/b5337093bb01da852f945802328665f85f8109dbe91d81ea2afe5ff059b9/pymongo-4.16.0-cp314-cp314t-win32.whl", hash = "sha256:948152b30eddeae8355495f9943a3bf66b708295c0b9b6f467de1c620f215487", size = 1040560, upload-time = "2026-01-07T18:05:23.888Z" }, - { url = "https://files.pythonhosted.org/packages/96/8c/5b448cd1b103f3889d5713dda37304c81020ff88e38a826e8a75ddff4610/pymongo-4.16.0-cp314-cp314t-win_amd64.whl", hash = "sha256:f6e42c1bc985d9beee884780ae6048790eb4cd565c46251932906bdb1630034a", size = 1075081, upload-time = "2026-01-07T18:05:26.874Z" }, - { url = "https://files.pythonhosted.org/packages/32/cd/ddc794cdc8500f6f28c119c624252fb6dfb19481c6d7ed150f13cf468a6d/pymongo-4.16.0-cp314-cp314t-win_arm64.whl", hash = "sha256:6b2a20edb5452ac8daa395890eeb076c570790dfce6b7a44d788af74c2f8cf96", size = 1047725, upload-time = "2026-01-07T18:05:28.47Z" }, -] - -[[package]] -name = "pyparsing" -version = "3.3.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f3/91/9c6ee907786a473bf81c5f53cf703ba0957b23ab84c264080fb5a450416f/pyparsing-3.3.2.tar.gz", hash = "sha256:c777f4d763f140633dcb6d8a3eda953bf7a214dc4eff598413c070bcdc117cbc", size = 6851574, upload-time = "2026-01-21T03:57:59.36Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/10/bd/c038d7cc38edc1aa5bf91ab8068b63d4308c66c4c8bb3cbba7dfbc049f9c/pyparsing-3.3.2-py3-none-any.whl", hash = "sha256:850ba148bd908d7e2411587e247a1e4f0327839c40e2e5e6d05a007ecc69911d", size = 122781, upload-time = "2026-01-21T03:57:55.912Z" }, -] - -[[package]] -name = "pytest" -version = "9.0.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "colorama", marker = "sys_platform == 'win32'" }, - { name = "iniconfig" }, - { name = "packaging" }, - { name = "pluggy" }, - { name = "pygments" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/d1/db/7ef3487e0fb0049ddb5ce41d3a49c235bf9ad299b6a25d5780a89f19230f/pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11", size = 1568901, upload-time = "2025-12-06T21:30:51.014Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b", size = 374801, upload-time = "2025-12-06T21:30:49.154Z" }, -] - -[[package]] -name = "pytest-asyncio" -version = "1.3.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "pytest" }, - { name = "typing-extensions", marker = "python_full_version < '3.13'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/90/2c/8af215c0f776415f3590cac4f9086ccefd6fd463befeae41cd4d3f193e5a/pytest_asyncio-1.3.0.tar.gz", hash = "sha256:d7f52f36d231b80ee124cd216ffb19369aa168fc10095013c6b014a34d3ee9e5", size = 50087, upload-time = "2025-11-10T16:07:47.256Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e5/35/f8b19922b6a25bc0880171a2f1a003eaeb93657475193ab516fd87cac9da/pytest_asyncio-1.3.0-py3-none-any.whl", hash = "sha256:611e26147c7f77640e6d0a92a38ed17c3e9848063698d5c93d5aa7aa11cebff5", size = 15075, upload-time = "2025-11-10T16:07:45.537Z" }, -] - -[[package]] -name = "python-dateutil" -version = "2.9.0.post0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "six" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432, upload-time = "2024-03-01T18:36:20.211Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" }, -] - -[[package]] -name = "python-i18n" -version = "0.3.9" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/fe/32/d9ba976458c9503ec22db4eb677a5d919edaecd73d893effeaa92a67b84b/python-i18n-0.3.9.tar.gz", hash = "sha256:df97f3d2364bf3a7ebfbd6cbefe8e45483468e52a9e30b909c6078f5f471e4e8", size = 11778, upload-time = "2020-08-26T14:31:27.512Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/5a/73/9a0b2974dd9a3d50788d235f10c4d73c2efcd22926036309645fc2f0db0c/python_i18n-0.3.9-py3-none-any.whl", hash = "sha256:bda5b8d889ebd51973e22e53746417bd32783c9bd6780fd27cadbb733915651d", size = 13750, upload-time = "2020-08-26T14:31:26.266Z" }, -] - -[[package]] -name = "pytz" -version = "2026.1.post1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/56/db/b8721d71d945e6a8ac63c0fc900b2067181dbb50805958d4d4661cf7d277/pytz-2026.1.post1.tar.gz", hash = "sha256:3378dde6a0c3d26719182142c56e60c7f9af7e968076f31aae569d72a0358ee1", size = 321088, upload-time = "2026-03-03T07:47:50.683Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/10/99/781fe0c827be2742bcc775efefccb3b048a3a9c6ce9aec0cbf4a101677e5/pytz-2026.1.post1-py2.py3-none-any.whl", hash = "sha256:f2fd16142fda348286a75e1a524be810bb05d444e5a081f37f7affc635035f7a", size = 510489, upload-time = "2026-03-03T07:47:49.167Z" }, -] - -[[package]] -name = "pyunormalize" -version = "17.0.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/25/ab/b912c484cfb96ba4834efe050bbf10c9e157bd8189eb859aefba8712b136/pyunormalize-17.0.0.tar.gz", hash = "sha256:0949a3e56817e287febcaf1b0cc4b5adf0bb107628d379335938040947eec792", size = 53121, upload-time = "2025-09-28T20:53:06.141Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/92/80/61512483dc509e3ae8a42fb143479d1e406ce1d91f8f08d538a3dde39c6d/pyunormalize-17.0.0-py3-none-any.whl", hash = "sha256:f0d93b076f938db2b26d319d04f2b58505d1cd7a80b5b72badbe7d1aa4d2a31c", size = 51358, upload-time = "2025-09-28T20:53:04.876Z" }, -] - -[[package]] -name = "pywin32" -version = "311" -source = { registry = "https://pypi.org/simple" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e7/ab/01ea1943d4eba0f850c3c61e78e8dd59757ff815ff3ccd0a84de5f541f42/pywin32-311-cp312-cp312-win32.whl", hash = "sha256:750ec6e621af2b948540032557b10a2d43b0cee2ae9758c54154d711cc852d31", size = 8706543, upload-time = "2025-07-14T20:13:20.765Z" }, - { url = "https://files.pythonhosted.org/packages/d1/a8/a0e8d07d4d051ec7502cd58b291ec98dcc0c3fff027caad0470b72cfcc2f/pywin32-311-cp312-cp312-win_amd64.whl", hash = "sha256:b8c095edad5c211ff31c05223658e71bf7116daa0ecf3ad85f3201ea3190d067", size = 9495040, upload-time = "2025-07-14T20:13:22.543Z" }, - { url = "https://files.pythonhosted.org/packages/ba/3a/2ae996277b4b50f17d61f0603efd8253cb2d79cc7ae159468007b586396d/pywin32-311-cp312-cp312-win_arm64.whl", hash = "sha256:e286f46a9a39c4a18b319c28f59b61de793654af2f395c102b4f819e584b5852", size = 8710102, upload-time = "2025-07-14T20:13:24.682Z" }, - { url = "https://files.pythonhosted.org/packages/a5/be/3fd5de0979fcb3994bfee0d65ed8ca9506a8a1260651b86174f6a86f52b3/pywin32-311-cp313-cp313-win32.whl", hash = "sha256:f95ba5a847cba10dd8c4d8fefa9f2a6cf283b8b88ed6178fa8a6c1ab16054d0d", size = 8705700, upload-time = "2025-07-14T20:13:26.471Z" }, - { url = "https://files.pythonhosted.org/packages/e3/28/e0a1909523c6890208295a29e05c2adb2126364e289826c0a8bc7297bd5c/pywin32-311-cp313-cp313-win_amd64.whl", hash = "sha256:718a38f7e5b058e76aee1c56ddd06908116d35147e133427e59a3983f703a20d", size = 9494700, upload-time = "2025-07-14T20:13:28.243Z" }, - { url = "https://files.pythonhosted.org/packages/04/bf/90339ac0f55726dce7d794e6d79a18a91265bdf3aa70b6b9ca52f35e022a/pywin32-311-cp313-cp313-win_arm64.whl", hash = "sha256:7b4075d959648406202d92a2310cb990fea19b535c7f4a78d3f5e10b926eeb8a", size = 8709318, upload-time = "2025-07-14T20:13:30.348Z" }, - { url = "https://files.pythonhosted.org/packages/c9/31/097f2e132c4f16d99a22bfb777e0fd88bd8e1c634304e102f313af69ace5/pywin32-311-cp314-cp314-win32.whl", hash = "sha256:b7a2c10b93f8986666d0c803ee19b5990885872a7de910fc460f9b0c2fbf92ee", size = 8840714, upload-time = "2025-07-14T20:13:32.449Z" }, - { url = "https://files.pythonhosted.org/packages/90/4b/07c77d8ba0e01349358082713400435347df8426208171ce297da32c313d/pywin32-311-cp314-cp314-win_amd64.whl", hash = "sha256:3aca44c046bd2ed8c90de9cb8427f581c479e594e99b5c0bb19b29c10fd6cb87", size = 9656800, upload-time = "2025-07-14T20:13:34.312Z" }, - { url = "https://files.pythonhosted.org/packages/c0/d2/21af5c535501a7233e734b8af901574572da66fcc254cb35d0609c9080dd/pywin32-311-cp314-cp314-win_arm64.whl", hash = "sha256:a508e2d9025764a8270f93111a970e1d0fbfc33f4153b388bb649b7eec4f9b42", size = 8932540, upload-time = "2025-07-14T20:13:36.379Z" }, -] - -[[package]] -name = "pyyaml" -version = "6.0.3" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960, upload-time = "2025-09-25T21:33:16.546Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d1/33/422b98d2195232ca1826284a76852ad5a86fe23e31b009c9886b2d0fb8b2/pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196", size = 182063, upload-time = "2025-09-25T21:32:11.445Z" }, - { url = "https://files.pythonhosted.org/packages/89/a0/6cf41a19a1f2f3feab0e9c0b74134aa2ce6849093d5517a0c550fe37a648/pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0", size = 173973, upload-time = "2025-09-25T21:32:12.492Z" }, - { url = "https://files.pythonhosted.org/packages/ed/23/7a778b6bd0b9a8039df8b1b1d80e2e2ad78aa04171592c8a5c43a56a6af4/pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28", size = 775116, upload-time = "2025-09-25T21:32:13.652Z" }, - { url = "https://files.pythonhosted.org/packages/65/30/d7353c338e12baef4ecc1b09e877c1970bd3382789c159b4f89d6a70dc09/pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c", size = 844011, upload-time = "2025-09-25T21:32:15.21Z" }, - { url = "https://files.pythonhosted.org/packages/8b/9d/b3589d3877982d4f2329302ef98a8026e7f4443c765c46cfecc8858c6b4b/pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc", size = 807870, upload-time = "2025-09-25T21:32:16.431Z" }, - { url = "https://files.pythonhosted.org/packages/05/c0/b3be26a015601b822b97d9149ff8cb5ead58c66f981e04fedf4e762f4bd4/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e", size = 761089, upload-time = "2025-09-25T21:32:17.56Z" }, - { url = "https://files.pythonhosted.org/packages/be/8e/98435a21d1d4b46590d5459a22d88128103f8da4c2d4cb8f14f2a96504e1/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea", size = 790181, upload-time = "2025-09-25T21:32:18.834Z" }, - { url = "https://files.pythonhosted.org/packages/74/93/7baea19427dcfbe1e5a372d81473250b379f04b1bd3c4c5ff825e2327202/pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5", size = 137658, upload-time = "2025-09-25T21:32:20.209Z" }, - { url = "https://files.pythonhosted.org/packages/86/bf/899e81e4cce32febab4fb42bb97dcdf66bc135272882d1987881a4b519e9/pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b", size = 154003, upload-time = "2025-09-25T21:32:21.167Z" }, - { url = "https://files.pythonhosted.org/packages/1a/08/67bd04656199bbb51dbed1439b7f27601dfb576fb864099c7ef0c3e55531/pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd", size = 140344, upload-time = "2025-09-25T21:32:22.617Z" }, - { url = "https://files.pythonhosted.org/packages/d1/11/0fd08f8192109f7169db964b5707a2f1e8b745d4e239b784a5a1dd80d1db/pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8", size = 181669, upload-time = "2025-09-25T21:32:23.673Z" }, - { url = "https://files.pythonhosted.org/packages/b1/16/95309993f1d3748cd644e02e38b75d50cbc0d9561d21f390a76242ce073f/pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1", size = 173252, upload-time = "2025-09-25T21:32:25.149Z" }, - { url = "https://files.pythonhosted.org/packages/50/31/b20f376d3f810b9b2371e72ef5adb33879b25edb7a6d072cb7ca0c486398/pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c", size = 767081, upload-time = "2025-09-25T21:32:26.575Z" }, - { url = "https://files.pythonhosted.org/packages/49/1e/a55ca81e949270d5d4432fbbd19dfea5321eda7c41a849d443dc92fd1ff7/pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5", size = 841159, upload-time = "2025-09-25T21:32:27.727Z" }, - { url = "https://files.pythonhosted.org/packages/74/27/e5b8f34d02d9995b80abcef563ea1f8b56d20134d8f4e5e81733b1feceb2/pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6", size = 801626, upload-time = "2025-09-25T21:32:28.878Z" }, - { url = "https://files.pythonhosted.org/packages/f9/11/ba845c23988798f40e52ba45f34849aa8a1f2d4af4b798588010792ebad6/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6", size = 753613, upload-time = "2025-09-25T21:32:30.178Z" }, - { url = "https://files.pythonhosted.org/packages/3d/e0/7966e1a7bfc0a45bf0a7fb6b98ea03fc9b8d84fa7f2229e9659680b69ee3/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be", size = 794115, upload-time = "2025-09-25T21:32:31.353Z" }, - { url = "https://files.pythonhosted.org/packages/de/94/980b50a6531b3019e45ddeada0626d45fa85cbe22300844a7983285bed3b/pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26", size = 137427, upload-time = "2025-09-25T21:32:32.58Z" }, - { url = "https://files.pythonhosted.org/packages/97/c9/39d5b874e8b28845e4ec2202b5da735d0199dbe5b8fb85f91398814a9a46/pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c", size = 154090, upload-time = "2025-09-25T21:32:33.659Z" }, - { url = "https://files.pythonhosted.org/packages/73/e8/2bdf3ca2090f68bb3d75b44da7bbc71843b19c9f2b9cb9b0f4ab7a5a4329/pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb", size = 140246, upload-time = "2025-09-25T21:32:34.663Z" }, - { url = "https://files.pythonhosted.org/packages/9d/8c/f4bd7f6465179953d3ac9bc44ac1a8a3e6122cf8ada906b4f96c60172d43/pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac", size = 181814, upload-time = "2025-09-25T21:32:35.712Z" }, - { url = "https://files.pythonhosted.org/packages/bd/9c/4d95bb87eb2063d20db7b60faa3840c1b18025517ae857371c4dd55a6b3a/pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310", size = 173809, upload-time = "2025-09-25T21:32:36.789Z" }, - { url = "https://files.pythonhosted.org/packages/92/b5/47e807c2623074914e29dabd16cbbdd4bf5e9b2db9f8090fa64411fc5382/pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7", size = 766454, upload-time = "2025-09-25T21:32:37.966Z" }, - { url = "https://files.pythonhosted.org/packages/02/9e/e5e9b168be58564121efb3de6859c452fccde0ab093d8438905899a3a483/pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788", size = 836355, upload-time = "2025-09-25T21:32:39.178Z" }, - { url = "https://files.pythonhosted.org/packages/88/f9/16491d7ed2a919954993e48aa941b200f38040928474c9e85ea9e64222c3/pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5", size = 794175, upload-time = "2025-09-25T21:32:40.865Z" }, - { url = "https://files.pythonhosted.org/packages/dd/3f/5989debef34dc6397317802b527dbbafb2b4760878a53d4166579111411e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764", size = 755228, upload-time = "2025-09-25T21:32:42.084Z" }, - { url = "https://files.pythonhosted.org/packages/d7/ce/af88a49043cd2e265be63d083fc75b27b6ed062f5f9fd6cdc223ad62f03e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35", size = 789194, upload-time = "2025-09-25T21:32:43.362Z" }, - { url = "https://files.pythonhosted.org/packages/23/20/bb6982b26a40bb43951265ba29d4c246ef0ff59c9fdcdf0ed04e0687de4d/pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac", size = 156429, upload-time = "2025-09-25T21:32:57.844Z" }, - { url = "https://files.pythonhosted.org/packages/f4/f4/a4541072bb9422c8a883ab55255f918fa378ecf083f5b85e87fc2b4eda1b/pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3", size = 143912, upload-time = "2025-09-25T21:32:59.247Z" }, - { url = "https://files.pythonhosted.org/packages/7c/f9/07dd09ae774e4616edf6cda684ee78f97777bdd15847253637a6f052a62f/pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3", size = 189108, upload-time = "2025-09-25T21:32:44.377Z" }, - { url = "https://files.pythonhosted.org/packages/4e/78/8d08c9fb7ce09ad8c38ad533c1191cf27f7ae1effe5bb9400a46d9437fcf/pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba", size = 183641, upload-time = "2025-09-25T21:32:45.407Z" }, - { url = "https://files.pythonhosted.org/packages/7b/5b/3babb19104a46945cf816d047db2788bcaf8c94527a805610b0289a01c6b/pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c", size = 831901, upload-time = "2025-09-25T21:32:48.83Z" }, - { url = "https://files.pythonhosted.org/packages/8b/cc/dff0684d8dc44da4d22a13f35f073d558c268780ce3c6ba1b87055bb0b87/pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702", size = 861132, upload-time = "2025-09-25T21:32:50.149Z" }, - { url = "https://files.pythonhosted.org/packages/b1/5e/f77dc6b9036943e285ba76b49e118d9ea929885becb0a29ba8a7c75e29fe/pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c", size = 839261, upload-time = "2025-09-25T21:32:51.808Z" }, - { url = "https://files.pythonhosted.org/packages/ce/88/a9db1376aa2a228197c58b37302f284b5617f56a5d959fd1763fb1675ce6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065", size = 805272, upload-time = "2025-09-25T21:32:52.941Z" }, - { url = "https://files.pythonhosted.org/packages/da/92/1446574745d74df0c92e6aa4a7b0b3130706a4142b2d1a5869f2eaa423c6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65", size = 829923, upload-time = "2025-09-25T21:32:54.537Z" }, - { url = "https://files.pythonhosted.org/packages/f0/7a/1c7270340330e575b92f397352af856a8c06f230aa3e76f86b39d01b416a/pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9", size = 174062, upload-time = "2025-09-25T21:32:55.767Z" }, - { url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341, upload-time = "2025-09-25T21:32:56.828Z" }, -] - -[[package]] -name = "regex" -version = "2026.2.28" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/8b/71/41455aa99a5a5ac1eaf311f5d8efd9ce6433c03ac1e0962de163350d0d97/regex-2026.2.28.tar.gz", hash = "sha256:a729e47d418ea11d03469f321aaf67cdee8954cde3ff2cf8403ab87951ad10f2", size = 415184, upload-time = "2026-02-28T02:19:42.792Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/07/42/9061b03cf0fc4b5fa2c3984cbbaed54324377e440a5c5a29d29a72518d62/regex-2026.2.28-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:fcf26c3c6d0da98fada8ae4ef0aa1c3405a431c0a77eb17306d38a89b02adcd7", size = 489574, upload-time = "2026-02-28T02:16:50.455Z" }, - { url = "https://files.pythonhosted.org/packages/77/83/0c8a5623a233015595e3da499c5a1c13720ac63c107897a6037bb97af248/regex-2026.2.28-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:02473c954af35dd2defeb07e44182f5705b30ea3f351a7cbffa9177beb14da5d", size = 291426, upload-time = "2026-02-28T02:16:52.52Z" }, - { url = "https://files.pythonhosted.org/packages/9e/06/3ef1ac6910dc3295ebd71b1f9bfa737e82cfead211a18b319d45f85ddd09/regex-2026.2.28-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9b65d33a17101569f86d9c5966a8b1d7fbf8afdda5a8aa219301b0a80f58cf7d", size = 289200, upload-time = "2026-02-28T02:16:54.08Z" }, - { url = "https://files.pythonhosted.org/packages/dd/c9/8cc8d850b35ab5650ff6756a1cb85286e2000b66c97520b29c1587455344/regex-2026.2.28-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e71dcecaa113eebcc96622c17692672c2d104b1d71ddf7adeda90da7ddeb26fc", size = 796765, upload-time = "2026-02-28T02:16:55.905Z" }, - { url = "https://files.pythonhosted.org/packages/e9/5d/57702597627fc23278ebf36fbb497ac91c0ce7fec89ac6c81e420ca3e38c/regex-2026.2.28-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:481df4623fa4969c8b11f3433ed7d5e3dc9cec0f008356c3212b3933fb77e3d8", size = 863093, upload-time = "2026-02-28T02:16:58.094Z" }, - { url = "https://files.pythonhosted.org/packages/02/6d/f3ecad537ca2811b4d26b54ca848cf70e04fcfc138667c146a9f3157779c/regex-2026.2.28-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:64e7c6ad614573e0640f271e811a408d79a9e1fe62a46adb602f598df42a818d", size = 909455, upload-time = "2026-02-28T02:17:00.918Z" }, - { url = "https://files.pythonhosted.org/packages/9e/40/bb226f203caa22c1043c1ca79b36340156eca0f6a6742b46c3bb222a3a57/regex-2026.2.28-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d6b08a06976ff4fb0d83077022fde3eca06c55432bb997d8c0495b9a4e9872f4", size = 802037, upload-time = "2026-02-28T02:17:02.842Z" }, - { url = "https://files.pythonhosted.org/packages/44/7c/c6d91d8911ac6803b45ca968e8e500c46934e58c0903cbc6d760ee817a0a/regex-2026.2.28-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:864cdd1a2ef5716b0ab468af40139e62ede1b3a53386b375ec0786bb6783fc05", size = 775113, upload-time = "2026-02-28T02:17:04.506Z" }, - { url = "https://files.pythonhosted.org/packages/dc/8d/4a9368d168d47abd4158580b8c848709667b1cd293ff0c0c277279543bd0/regex-2026.2.28-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:511f7419f7afab475fd4d639d4aedfc54205bcb0800066753ef68a59f0f330b5", size = 784194, upload-time = "2026-02-28T02:17:06.888Z" }, - { url = "https://files.pythonhosted.org/packages/cc/bf/2c72ab5d8b7be462cb1651b5cc333da1d0068740342f350fcca3bca31947/regex-2026.2.28-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:b42f7466e32bf15a961cf09f35fa6323cc72e64d3d2c990b10de1274a5da0a59", size = 856846, upload-time = "2026-02-28T02:17:09.11Z" }, - { url = "https://files.pythonhosted.org/packages/7c/f4/6b65c979bb6d09f51bb2d2a7bc85de73c01ec73335d7ddd202dcb8cd1c8f/regex-2026.2.28-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:8710d61737b0c0ce6836b1da7109f20d495e49b3809f30e27e9560be67a257bf", size = 763516, upload-time = "2026-02-28T02:17:11.004Z" }, - { url = "https://files.pythonhosted.org/packages/8e/32/29ea5e27400ee86d2cc2b4e80aa059df04eaf78b4f0c18576ae077aeff68/regex-2026.2.28-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:4390c365fd2d45278f45afd4673cb90f7285f5701607e3ad4274df08e36140ae", size = 849278, upload-time = "2026-02-28T02:17:12.693Z" }, - { url = "https://files.pythonhosted.org/packages/1d/91/3233d03b5f865111cd517e1c95ee8b43e8b428d61fa73764a80c9bb6f537/regex-2026.2.28-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:cb3b1db8ff6c7b8bf838ab05583ea15230cb2f678e569ab0e3a24d1e8320940b", size = 790068, upload-time = "2026-02-28T02:17:14.9Z" }, - { url = "https://files.pythonhosted.org/packages/76/92/abc706c1fb03b4580a09645b206a3fc032f5a9f457bc1a8038ac555658ab/regex-2026.2.28-cp312-cp312-win32.whl", hash = "sha256:f8ed9a5d4612df9d4de15878f0bc6aa7a268afbe5af21a3fdd97fa19516e978c", size = 266416, upload-time = "2026-02-28T02:17:17.15Z" }, - { url = "https://files.pythonhosted.org/packages/fa/06/2a6f7dff190e5fa9df9fb4acf2fdf17a1aa0f7f54596cba8de608db56b3a/regex-2026.2.28-cp312-cp312-win_amd64.whl", hash = "sha256:01d65fd24206c8e1e97e2e31b286c59009636c022eb5d003f52760b0f42155d4", size = 277297, upload-time = "2026-02-28T02:17:18.723Z" }, - { url = "https://files.pythonhosted.org/packages/b7/f0/58a2484851fadf284458fdbd728f580d55c1abac059ae9f048c63b92f427/regex-2026.2.28-cp312-cp312-win_arm64.whl", hash = "sha256:c0b5ccbb8ffb433939d248707d4a8b31993cb76ab1a0187ca886bf50e96df952", size = 270408, upload-time = "2026-02-28T02:17:20.328Z" }, - { url = "https://files.pythonhosted.org/packages/87/f6/dc9ef48c61b79c8201585bf37fa70cd781977da86e466cd94e8e95d2443b/regex-2026.2.28-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6d63a07e5ec8ce7184452cb00c41c37b49e67dc4f73b2955b5b8e782ea970784", size = 489311, upload-time = "2026-02-28T02:17:22.591Z" }, - { url = "https://files.pythonhosted.org/packages/95/c8/c20390f2232d3f7956f420f4ef1852608ad57aa26c3dd78516cb9f3dc913/regex-2026.2.28-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e59bc8f30414d283ae8ee1617b13d8112e7135cb92830f0ec3688cb29152585a", size = 291285, upload-time = "2026-02-28T02:17:24.355Z" }, - { url = "https://files.pythonhosted.org/packages/d2/a6/ba1068a631ebd71a230e7d8013fcd284b7c89c35f46f34a7da02082141b1/regex-2026.2.28-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:de0cf053139f96219ccfabb4a8dd2d217c8c82cb206c91d9f109f3f552d6b43d", size = 289051, upload-time = "2026-02-28T02:17:26.722Z" }, - { url = "https://files.pythonhosted.org/packages/1d/1b/7cc3b7af4c244c204b7a80924bd3d85aecd9ba5bc82b485c5806ee8cda9e/regex-2026.2.28-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fb4db2f17e6484904f986c5a657cec85574c76b5c5e61c7aae9ffa1bc6224f95", size = 796842, upload-time = "2026-02-28T02:17:29.064Z" }, - { url = "https://files.pythonhosted.org/packages/24/87/26bd03efc60e0d772ac1e7b60a2e6325af98d974e2358f659c507d3c76db/regex-2026.2.28-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:52b017b35ac2214d0db5f4f90e303634dc44e4aba4bd6235a27f97ecbe5b0472", size = 863083, upload-time = "2026-02-28T02:17:31.363Z" }, - { url = "https://files.pythonhosted.org/packages/ae/54/aeaf4afb1aa0a65e40de52a61dc2ac5b00a83c6cb081c8a1d0dda74f3010/regex-2026.2.28-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:69fc560ccbf08a09dc9b52ab69cacfae51e0ed80dc5693078bdc97db2f91ae96", size = 909412, upload-time = "2026-02-28T02:17:33.248Z" }, - { url = "https://files.pythonhosted.org/packages/12/2f/049901def913954e640d199bbc6a7ca2902b6aeda0e5da9d17f114100ec2/regex-2026.2.28-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e61eea47230eba62a31f3e8a0e3164d0f37ef9f40529fb2c79361bc6b53d2a92", size = 802101, upload-time = "2026-02-28T02:17:35.053Z" }, - { url = "https://files.pythonhosted.org/packages/7d/a5/512fb9ff7f5b15ea204bb1967ebb649059446decacccb201381f9fa6aad4/regex-2026.2.28-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:4f5c0b182ad4269e7381b7c27fdb0408399881f7a92a4624fd5487f2971dfc11", size = 775260, upload-time = "2026-02-28T02:17:37.692Z" }, - { url = "https://files.pythonhosted.org/packages/d1/a8/9a92935878aba19bd72706b9db5646a6f993d99b3f6ed42c02ec8beb1d61/regex-2026.2.28-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:96f6269a2882fbb0ee76967116b83679dc628e68eaea44e90884b8d53d833881", size = 784311, upload-time = "2026-02-28T02:17:39.855Z" }, - { url = "https://files.pythonhosted.org/packages/09/d3/fc51a8a738a49a6b6499626580554c9466d3ea561f2b72cfdc72e4149773/regex-2026.2.28-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:b5acd4b6a95f37c3c3828e5d053a7d4edaedb85de551db0153754924cb7c83e3", size = 856876, upload-time = "2026-02-28T02:17:42.317Z" }, - { url = "https://files.pythonhosted.org/packages/08/b7/2e641f3d084b120ca4c52e8c762a78da0b32bf03ef546330db3e2635dc5f/regex-2026.2.28-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:2234059cfe33d9813a3677ef7667999caea9eeaa83fef98eb6ce15c6cf9e0215", size = 763632, upload-time = "2026-02-28T02:17:45.073Z" }, - { url = "https://files.pythonhosted.org/packages/fe/6d/0009021d97e79ee99f3d8641f0a8d001eed23479ade4c3125a5480bf3e2d/regex-2026.2.28-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:c15af43c72a7fb0c97cbc66fa36a43546eddc5c06a662b64a0cbf30d6ac40944", size = 849320, upload-time = "2026-02-28T02:17:47.192Z" }, - { url = "https://files.pythonhosted.org/packages/05/7a/51cfbad5758f8edae430cb21961a9c8d04bce1dae4d2d18d4186eec7cfa1/regex-2026.2.28-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9185cc63359862a6e80fe97f696e04b0ad9a11c4ac0a4a927f979f611bfe3768", size = 790152, upload-time = "2026-02-28T02:17:49.067Z" }, - { url = "https://files.pythonhosted.org/packages/90/3d/a83e2b6b3daa142acb8c41d51de3876186307d5cb7490087031747662500/regex-2026.2.28-cp313-cp313-win32.whl", hash = "sha256:fb66e5245db9652abd7196ace599b04d9c0e4aa7c8f0e2803938377835780081", size = 266398, upload-time = "2026-02-28T02:17:50.744Z" }, - { url = "https://files.pythonhosted.org/packages/85/4f/16e9ebb1fe5425e11b9596c8d57bf8877dcb32391da0bfd33742e3290637/regex-2026.2.28-cp313-cp313-win_amd64.whl", hash = "sha256:71a911098be38c859ceb3f9a9ce43f4ed9f4c6720ad8684a066ea246b76ad9ff", size = 277282, upload-time = "2026-02-28T02:17:53.074Z" }, - { url = "https://files.pythonhosted.org/packages/07/b4/92851335332810c5a89723bf7a7e35c7209f90b7d4160024501717b28cc9/regex-2026.2.28-cp313-cp313-win_arm64.whl", hash = "sha256:39bb5727650b9a0275c6a6690f9bb3fe693a7e6cc5c3155b1240aedf8926423e", size = 270382, upload-time = "2026-02-28T02:17:54.888Z" }, - { url = "https://files.pythonhosted.org/packages/24/07/6c7e4cec1e585959e96cbc24299d97e4437a81173217af54f1804994e911/regex-2026.2.28-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:97054c55db06ab020342cc0d35d6f62a465fa7662871190175f1ad6c655c028f", size = 492541, upload-time = "2026-02-28T02:17:56.813Z" }, - { url = "https://files.pythonhosted.org/packages/7c/13/55eb22ada7f43d4f4bb3815b6132183ebc331c81bd496e2d1f3b8d862e0d/regex-2026.2.28-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0d25a10811de831c2baa6aef3c0be91622f44dd8d31dd12e69f6398efb15e48b", size = 292984, upload-time = "2026-02-28T02:17:58.538Z" }, - { url = "https://files.pythonhosted.org/packages/5b/11/c301f8cb29ce9644a5ef85104c59244e6e7e90994a0f458da4d39baa8e17/regex-2026.2.28-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:d6cfe798d8da41bb1862ed6e0cba14003d387c3c0c4a5d45591076ae9f0ce2f8", size = 291509, upload-time = "2026-02-28T02:18:00.208Z" }, - { url = "https://files.pythonhosted.org/packages/b5/43/aabe384ec1994b91796e903582427bc2ffaed9c4103819ed3c16d8e749f3/regex-2026.2.28-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fd0ce43e71d825b7c0661f9c54d4d74bd97c56c3fd102a8985bcfea48236bacb", size = 809429, upload-time = "2026-02-28T02:18:02.328Z" }, - { url = "https://files.pythonhosted.org/packages/04/b8/8d2d987a816720c4f3109cee7c06a4b24ad0e02d4fc74919ab619e543737/regex-2026.2.28-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:00945d007fd74a9084d2ab79b695b595c6b7ba3698972fadd43e23230c6979c1", size = 869422, upload-time = "2026-02-28T02:18:04.23Z" }, - { url = "https://files.pythonhosted.org/packages/fc/ad/2c004509e763c0c3719f97c03eca26473bffb3868d54c5f280b8cd4f9e3d/regex-2026.2.28-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:bec23c11cbbf09a4df32fe50d57cbdd777bc442269b6e39a1775654f1c95dee2", size = 915175, upload-time = "2026-02-28T02:18:06.791Z" }, - { url = "https://files.pythonhosted.org/packages/55/c2/fd429066da487ef555a9da73bf214894aec77fc8c66a261ee355a69871a8/regex-2026.2.28-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5cdcc17d935c8f9d3f4db5c2ebe2640c332e3822ad5d23c2f8e0228e6947943a", size = 812044, upload-time = "2026-02-28T02:18:08.736Z" }, - { url = "https://files.pythonhosted.org/packages/5b/ca/feedb7055c62a3f7f659971bf45f0e0a87544b6b0cf462884761453f97c5/regex-2026.2.28-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a448af01e3d8031c89c5d902040b124a5e921a25c4e5e07a861ca591ce429341", size = 782056, upload-time = "2026-02-28T02:18:10.777Z" }, - { url = "https://files.pythonhosted.org/packages/95/30/1aa959ed0d25c1dd7dd5047ea8ba482ceaef38ce363c401fd32a6b923e60/regex-2026.2.28-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:10d28e19bd4888e4abf43bd3925f3c134c52fdf7259219003588a42e24c2aa25", size = 798743, upload-time = "2026-02-28T02:18:13.025Z" }, - { url = "https://files.pythonhosted.org/packages/3b/1f/dadb9cf359004784051c897dcf4d5d79895f73a1bbb7b827abaa4814ae80/regex-2026.2.28-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:99985a2c277dcb9ccb63f937451af5d65177af1efdeb8173ac55b61095a0a05c", size = 864633, upload-time = "2026-02-28T02:18:16.84Z" }, - { url = "https://files.pythonhosted.org/packages/a7/f1/b9a25eb24e1cf79890f09e6ec971ee5b511519f1851de3453bc04f6c902b/regex-2026.2.28-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:e1e7b24cb3ae9953a560c563045d1ba56ee4749fbd05cf21ba571069bd7be81b", size = 770862, upload-time = "2026-02-28T02:18:18.892Z" }, - { url = "https://files.pythonhosted.org/packages/02/9a/c5cb10b7aa6f182f9247a30cc9527e326601f46f4df864ac6db588d11fcd/regex-2026.2.28-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:d8511a01d0e4ee1992eb3ba19e09bc1866fe03f05129c3aec3fdc4cbc77aad3f", size = 854788, upload-time = "2026-02-28T02:18:21.475Z" }, - { url = "https://files.pythonhosted.org/packages/0a/50/414ba0731c4bd40b011fa4703b2cc86879ec060c64f2a906e65a56452589/regex-2026.2.28-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:aaffaecffcd2479ce87aa1e74076c221700b7c804e48e98e62500ee748f0f550", size = 800184, upload-time = "2026-02-28T02:18:23.492Z" }, - { url = "https://files.pythonhosted.org/packages/69/50/0c7290987f97e7e6830b0d853f69dc4dc5852c934aae63e7fdcd76b4c383/regex-2026.2.28-cp313-cp313t-win32.whl", hash = "sha256:ef77bdde9c9eba3f7fa5b58084b29bbcc74bcf55fdbeaa67c102a35b5bd7e7cc", size = 269137, upload-time = "2026-02-28T02:18:25.375Z" }, - { url = "https://files.pythonhosted.org/packages/68/80/ef26ff90e74ceb4051ad6efcbbb8a4be965184a57e879ebcbdef327d18fa/regex-2026.2.28-cp313-cp313t-win_amd64.whl", hash = "sha256:98adf340100cbe6fbaf8e6dc75e28f2c191b1be50ffefe292fb0e6f6eefdb0d8", size = 280682, upload-time = "2026-02-28T02:18:27.205Z" }, - { url = "https://files.pythonhosted.org/packages/69/8b/fbad9c52e83ffe8f97e3ed1aa0516e6dff6bb633a41da9e64645bc7efdc5/regex-2026.2.28-cp313-cp313t-win_arm64.whl", hash = "sha256:2fb950ac1d88e6b6a9414381f403797b236f9fa17e1eee07683af72b1634207b", size = 271735, upload-time = "2026-02-28T02:18:29.015Z" }, - { url = "https://files.pythonhosted.org/packages/cf/03/691015f7a7cb1ed6dacb2ea5de5682e4858e05a4c5506b2839cd533bbcd6/regex-2026.2.28-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:78454178c7df31372ea737996fb7f36b3c2c92cccc641d251e072478afb4babc", size = 489497, upload-time = "2026-02-28T02:18:30.889Z" }, - { url = "https://files.pythonhosted.org/packages/c6/ba/8db8fd19afcbfa0e1036eaa70c05f20ca8405817d4ad7a38a6b4c2f031ac/regex-2026.2.28-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:5d10303dd18cedfd4d095543998404df656088240bcfd3cd20a8f95b861f74bd", size = 291295, upload-time = "2026-02-28T02:18:33.426Z" }, - { url = "https://files.pythonhosted.org/packages/5a/79/9aa0caf089e8defef9b857b52fc53801f62ff868e19e5c83d4a96612eba1/regex-2026.2.28-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:19a9c9e0a8f24f39d575a6a854d516b48ffe4cbdcb9de55cb0570a032556ecff", size = 289275, upload-time = "2026-02-28T02:18:35.247Z" }, - { url = "https://files.pythonhosted.org/packages/eb/26/ee53117066a30ef9c883bf1127eece08308ccf8ccd45c45a966e7a665385/regex-2026.2.28-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:09500be324f49b470d907b3ef8af9afe857f5cca486f853853f7945ddbf75911", size = 797176, upload-time = "2026-02-28T02:18:37.15Z" }, - { url = "https://files.pythonhosted.org/packages/05/1b/67fb0495a97259925f343ae78b5d24d4a6624356ae138b57f18bd43006e4/regex-2026.2.28-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:fb1c4ff62277d87a7335f2c1ea4e0387b8f2b3ad88a64efd9943906aafad4f33", size = 863813, upload-time = "2026-02-28T02:18:39.478Z" }, - { url = "https://files.pythonhosted.org/packages/a0/1d/93ac9bbafc53618091c685c7ed40239a90bf9f2a82c983f0baa97cb7ae07/regex-2026.2.28-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b8b3f1be1738feadc69f62daa250c933e85c6f34fa378f54a7ff43807c1b9117", size = 908678, upload-time = "2026-02-28T02:18:41.619Z" }, - { url = "https://files.pythonhosted.org/packages/c7/7a/a8f5e0561702b25239846a16349feece59712ae20598ebb205580332a471/regex-2026.2.28-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dc8ed8c3f41c27acb83f7b6a9eb727a73fc6663441890c5cb3426a5f6a91ce7d", size = 801528, upload-time = "2026-02-28T02:18:43.624Z" }, - { url = "https://files.pythonhosted.org/packages/96/5d/ed6d4cbde80309854b1b9f42d9062fee38ade15f7eb4909f6ef2440403b5/regex-2026.2.28-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fa539be029844c0ce1114762d2952ab6cfdd7c7c9bd72e0db26b94c3c36dcc5a", size = 775373, upload-time = "2026-02-28T02:18:46.102Z" }, - { url = "https://files.pythonhosted.org/packages/6a/e9/6e53c34e8068b9deec3e87210086ecb5b9efebdefca6b0d3fa43d66dcecb/regex-2026.2.28-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7900157786428a79615a8264dac1f12c9b02957c473c8110c6b1f972dcecaddf", size = 784859, upload-time = "2026-02-28T02:18:48.269Z" }, - { url = "https://files.pythonhosted.org/packages/48/3c/736e1c7ca7f0dcd2ae33819888fdc69058a349b7e5e84bc3e2f296bbf794/regex-2026.2.28-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:0b1d2b07614d95fa2bf8a63fd1e98bd8fa2b4848dc91b1efbc8ba219fdd73952", size = 857813, upload-time = "2026-02-28T02:18:50.576Z" }, - { url = "https://files.pythonhosted.org/packages/6e/7c/48c4659ad9da61f58e79dbe8c05223e0006696b603c16eb6b5cbfbb52c27/regex-2026.2.28-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:b389c61aa28a79c2e0527ac36da579869c2e235a5b208a12c5b5318cda2501d8", size = 763705, upload-time = "2026-02-28T02:18:52.59Z" }, - { url = "https://files.pythonhosted.org/packages/cf/a1/bc1c261789283128165f71b71b4b221dd1b79c77023752a6074c102f18d8/regex-2026.2.28-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:f467cb602f03fbd1ab1908f68b53c649ce393fde056628dc8c7e634dab6bfc07", size = 848734, upload-time = "2026-02-28T02:18:54.595Z" }, - { url = "https://files.pythonhosted.org/packages/10/d8/979407faf1397036e25a5ae778157366a911c0f382c62501009f4957cf86/regex-2026.2.28-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:e8c8cb2deba42f5ec1ede46374e990f8adc5e6456a57ac1a261b19be6f28e4e6", size = 789871, upload-time = "2026-02-28T02:18:57.34Z" }, - { url = "https://files.pythonhosted.org/packages/03/23/da716821277115fcb1f4e3de1e5dc5023a1e6533598c486abf5448612579/regex-2026.2.28-cp314-cp314-win32.whl", hash = "sha256:9036b400b20e4858d56d117108d7813ed07bb7803e3eed766675862131135ca6", size = 271825, upload-time = "2026-02-28T02:18:59.202Z" }, - { url = "https://files.pythonhosted.org/packages/91/ff/90696f535d978d5f16a52a419be2770a8d8a0e7e0cfecdbfc31313df7fab/regex-2026.2.28-cp314-cp314-win_amd64.whl", hash = "sha256:1d367257cd86c1cbb97ea94e77b373a0bbc2224976e247f173d19e8f18b4afa7", size = 280548, upload-time = "2026-02-28T02:19:01.049Z" }, - { url = "https://files.pythonhosted.org/packages/69/f9/5e1b5652fc0af3fcdf7677e7df3ad2a0d47d669b34ac29a63bb177bb731b/regex-2026.2.28-cp314-cp314-win_arm64.whl", hash = "sha256:5e68192bb3a1d6fb2836da24aa494e413ea65853a21505e142e5b1064a595f3d", size = 273444, upload-time = "2026-02-28T02:19:03.255Z" }, - { url = "https://files.pythonhosted.org/packages/d3/eb/8389f9e940ac89bcf58d185e230a677b4fd07c5f9b917603ad5c0f8fa8fe/regex-2026.2.28-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:a5dac14d0872eeb35260a8e30bac07ddf22adc1e3a0635b52b02e180d17c9c7e", size = 492546, upload-time = "2026-02-28T02:19:05.378Z" }, - { url = "https://files.pythonhosted.org/packages/7b/c7/09441d27ce2a6fa6a61ea3150ea4639c1dcda9b31b2ea07b80d6937b24dd/regex-2026.2.28-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:ec0c608b7a7465ffadb344ed7c987ff2f11ee03f6a130b569aa74d8a70e8333c", size = 292986, upload-time = "2026-02-28T02:19:07.24Z" }, - { url = "https://files.pythonhosted.org/packages/fb/69/4144b60ed7760a6bd235e4087041f487aa4aa62b45618ce018b0c14833ea/regex-2026.2.28-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c7815afb0ca45456613fdaf60ea9c993715511c8d53a83bc468305cbc0ee23c7", size = 291518, upload-time = "2026-02-28T02:19:09.698Z" }, - { url = "https://files.pythonhosted.org/packages/2d/be/77e5426cf5948c82f98c53582009ca9e94938c71f73a8918474f2e2990bb/regex-2026.2.28-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b059e71ec363968671693a78c5053bd9cb2fe410f9b8e4657e88377ebd603a2e", size = 809464, upload-time = "2026-02-28T02:19:12.494Z" }, - { url = "https://files.pythonhosted.org/packages/45/99/2c8c5ac90dc7d05c6e7d8e72c6a3599dc08cd577ac476898e91ca787d7f1/regex-2026.2.28-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b8cf76f1a29f0e99dcfd7aef1551a9827588aae5a737fe31442021165f1920dc", size = 869553, upload-time = "2026-02-28T02:19:15.151Z" }, - { url = "https://files.pythonhosted.org/packages/53/34/daa66a342f0271e7737003abf6c3097aa0498d58c668dbd88362ef94eb5d/regex-2026.2.28-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:180e08a435a0319e6a4821c3468da18dc7001987e1c17ae1335488dfe7518dd8", size = 915289, upload-time = "2026-02-28T02:19:17.331Z" }, - { url = "https://files.pythonhosted.org/packages/c5/c7/e22c2aaf0a12e7e22ab19b004bb78d32ca1ecc7ef245949935463c5567de/regex-2026.2.28-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1e496956106fd59ba6322a8ea17141a27c5040e5ee8f9433ae92d4e5204462a0", size = 812156, upload-time = "2026-02-28T02:19:20.011Z" }, - { url = "https://files.pythonhosted.org/packages/7f/bb/2dc18c1efd9051cf389cd0d7a3a4d90f6804b9fff3a51b5dc3c85b935f71/regex-2026.2.28-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bba2b18d70eeb7b79950f12f633beeecd923f7c9ad6f6bae28e59b4cb3ab046b", size = 782215, upload-time = "2026-02-28T02:19:22.047Z" }, - { url = "https://files.pythonhosted.org/packages/17/1e/9e4ec9b9013931faa32226ec4aa3c71fe664a6d8a2b91ac56442128b332f/regex-2026.2.28-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:6db7bfae0f8a2793ff1f7021468ea55e2699d0790eb58ee6ab36ae43aa00bc5b", size = 798925, upload-time = "2026-02-28T02:19:24.173Z" }, - { url = "https://files.pythonhosted.org/packages/71/57/a505927e449a9ccb41e2cc8d735e2abe3444b0213d1cf9cb364a8c1f2524/regex-2026.2.28-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:d0b02e8b7e5874b48ae0f077ecca61c1a6a9f9895e9c6dfb191b55b242862033", size = 864701, upload-time = "2026-02-28T02:19:26.376Z" }, - { url = "https://files.pythonhosted.org/packages/a6/ad/c62cb60cdd93e13eac5b3d9d6bd5d284225ed0e3329426f94d2552dd7cca/regex-2026.2.28-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:25b6eb660c5cf4b8c3407a1ed462abba26a926cc9965e164268a3267bcc06a43", size = 770899, upload-time = "2026-02-28T02:19:29.38Z" }, - { url = "https://files.pythonhosted.org/packages/3c/5a/874f861f5c3d5ab99633e8030dee1bc113db8e0be299d1f4b07f5b5ec349/regex-2026.2.28-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:5a932ea8ad5d0430351ff9c76c8db34db0d9f53c1d78f06022a21f4e290c5c18", size = 854727, upload-time = "2026-02-28T02:19:31.494Z" }, - { url = "https://files.pythonhosted.org/packages/6b/ca/d2c03b0efde47e13db895b975b2be6a73ed90b8ba963677927283d43bf74/regex-2026.2.28-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:1c2c95e1a2b0f89d01e821ff4de1be4b5d73d1f4b0bf679fa27c1ad8d2327f1a", size = 800366, upload-time = "2026-02-28T02:19:34.248Z" }, - { url = "https://files.pythonhosted.org/packages/14/bd/ee13b20b763b8989f7c75d592bfd5de37dc1181814a2a2747fedcf97e3ba/regex-2026.2.28-cp314-cp314t-win32.whl", hash = "sha256:bbb882061f742eb5d46f2f1bd5304055be0a66b783576de3d7eef1bed4778a6e", size = 274936, upload-time = "2026-02-28T02:19:36.313Z" }, - { url = "https://files.pythonhosted.org/packages/cb/e7/d8020e39414c93af7f0d8688eabcecece44abfd5ce314b21dfda0eebd3d8/regex-2026.2.28-cp314-cp314t-win_amd64.whl", hash = "sha256:6591f281cb44dc13de9585b552cec6fc6cf47fb2fe7a48892295ee9bc4a612f9", size = 284779, upload-time = "2026-02-28T02:19:38.625Z" }, - { url = "https://files.pythonhosted.org/packages/13/c0/ad225f4a405827486f1955283407cf758b6d2fb966712644c5f5aef33d1b/regex-2026.2.28-cp314-cp314t-win_arm64.whl", hash = "sha256:dee50f1be42222f89767b64b283283ef963189da0dda4a515aa54a5563c62dec", size = 275010, upload-time = "2026-02-28T02:19:40.65Z" }, -] - -[[package]] -name = "requests" -version = "2.32.5" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "certifi" }, - { name = "charset-normalizer" }, - { name = "idna" }, - { name = "urllib3" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" }, -] - -[[package]] -name = "retry-async" -version = "0.1.4" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "decorator" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/76/c0/4e06ba064b7d648f33dadd92f298e9e39960af73c5f3bdfb58f602d78710/retry_async-0.1.4.tar.gz", hash = "sha256:8414d69b20920a1d700de34b68c0f972fa36a0158450a6f6abc5b45a241ac6b6", size = 2189, upload-time = "2024-01-09T02:32:25.427Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/2e/fb/14f39d3a6e91b6c3ba6e8b1c06ca25c0b421a8806912a3fa02bbe699a002/retry_async-0.1.4-py3-none-any.whl", hash = "sha256:21b383c7bc52013478337b894f476c9f106485cfeeb5d449abe5f745be2da219", size = 2706, upload-time = "2024-01-09T02:32:23.851Z" }, -] - -[[package]] -name = "rlp" -version = "4.1.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "eth-utils" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/1b/2d/439b0728a92964a04d9c88ea1ca9ebb128893fbbd5834faa31f987f2fd4c/rlp-4.1.0.tar.gz", hash = "sha256:be07564270a96f3e225e2c107db263de96b5bc1f27722d2855bd3459a08e95a9", size = 33429, upload-time = "2025-02-04T22:05:59.089Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/99/fb/e4c0ced9893b84ac95b7181d69a9786ce5879aeb3bbbcbba80a164f85d6a/rlp-4.1.0-py3-none-any.whl", hash = "sha256:8eca394c579bad34ee0b937aecb96a57052ff3716e19c7a578883e767bc5da6f", size = 19973, upload-time = "2025-02-04T22:05:57.05Z" }, -] - -[[package]] -name = "rocketwatch" -version = "0.1.0" -source = { virtual = "." } -dependencies = [ - { name = "aiocache" }, - { name = "aiohttp" }, - { name = "anthropic" }, - { name = "anyascii" }, - { name = "beautifulsoup4" }, - { name = "bidict" }, - { name = "cachetools" }, - { name = "colorama" }, - { name = "cronitor" }, - { name = "dice" }, - { name = "discord-py" }, - { name = "eth-typing" }, - { name = "eth-utils" }, - { name = "etherscan-labels" }, - { name = "graphql-query" }, - { name = "hexbytes" }, - { name = "humanize" }, - { name = "inflect" }, - { name = "matplotlib" }, - { name = "numpy" }, - { name = "pillow" }, - { name = "psutil" }, - { name = "pydantic" }, - { name = "pymongo" }, - { name = "python-i18n" }, - { name = "pytz" }, - { name = "regex" }, - { name = "retry-async" }, - { name = "seaborn" }, - { name = "tabulate" }, - { name = "termplotlib" }, - { name = "tiktoken" }, - { name = "uptime" }, - { name = "web3" }, -] - -[package.optional-dependencies] -test = [ - { name = "pytest" }, - { name = "pytest-asyncio" }, -] - -[package.metadata] -requires-dist = [ - { name = "aiocache", specifier = "==0.12.3" }, - { name = "aiohttp", specifier = "==3.13.3" }, - { name = "anthropic", specifier = "==0.84.0" }, - { name = "anyascii", specifier = "==0.3.3" }, - { name = "beautifulsoup4", specifier = "==4.14.3" }, - { name = "bidict", specifier = "==0.23.1" }, - { name = "cachetools", specifier = "==7.0.3" }, - { name = "colorama", specifier = "==0.4.6" }, - { name = "cronitor", specifier = "==4.9.0" }, - { name = "dice", specifier = "==4.0.0" }, - { name = "discord-py", specifier = "==2.7.1" }, - { name = "eth-typing", specifier = "==5.2.1" }, - { name = "eth-utils", specifier = "==5.3.1" }, - { name = "etherscan-labels", git = "https://github.com/haloooloolo/etherscan-labels" }, - { name = "graphql-query", specifier = "==1.4.0" }, - { name = "hexbytes", specifier = "==1.3.1" }, - { name = "humanize", specifier = "==4.15.0" }, - { name = "inflect", specifier = "==7.5.0" }, - { name = "matplotlib", specifier = "==3.10.8" }, - { name = "numpy", specifier = "==2.4.2" }, - { name = "pillow", specifier = "==12.1.1" }, - { name = "psutil", specifier = "==7.2.2" }, - { name = "pydantic", specifier = ">=2.0.0,<3.0.0" }, - { name = "pymongo", specifier = "==4.16.0" }, - { name = "pytest", marker = "extra == 'test'", specifier = ">=8.0" }, - { name = "pytest-asyncio", marker = "extra == 'test'", specifier = ">=1.0" }, - { name = "python-i18n", specifier = "==0.3.9" }, - { name = "pytz", specifier = "==2026.1.post1" }, - { name = "regex", specifier = "==2026.2.28" }, - { name = "retry-async", specifier = "==0.1.4" }, - { name = "seaborn", specifier = "==0.13.2" }, - { name = "tabulate", specifier = "==0.10.0" }, - { name = "termplotlib", specifier = "==0.3.9" }, - { name = "tiktoken", specifier = "==0.12.0" }, - { name = "uptime", specifier = "==3.0.1" }, - { name = "web3", specifier = ">=7.0.0,<8.0.0" }, -] -provides-extras = ["test"] - -[[package]] -name = "seaborn" -version = "0.13.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "matplotlib" }, - { name = "numpy" }, - { name = "pandas" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/86/59/a451d7420a77ab0b98f7affa3a1d78a313d2f7281a57afb1a34bae8ab412/seaborn-0.13.2.tar.gz", hash = "sha256:93e60a40988f4d65e9f4885df477e2fdaff6b73a9ded434c1ab356dd57eefff7", size = 1457696, upload-time = "2024-01-25T13:21:52.551Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/83/11/00d3c3dfc25ad54e731d91449895a79e4bf2384dc3ac01809010ba88f6d5/seaborn-0.13.2-py3-none-any.whl", hash = "sha256:636f8336facf092165e27924f223d3c62ca560b1f2bb5dff7ab7fad265361987", size = 294914, upload-time = "2024-01-25T13:21:49.598Z" }, -] - -[[package]] -name = "six" -version = "1.17.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload-time = "2024-12-04T17:35:28.174Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" }, -] - -[[package]] -name = "sniffio" -version = "1.3.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" }, -] - -[[package]] -name = "soupsieve" -version = "2.8.3" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/7b/ae/2d9c981590ed9999a0d91755b47fc74f74de286b0f5cee14c9269041e6c4/soupsieve-2.8.3.tar.gz", hash = "sha256:3267f1eeea4251fb42728b6dfb746edc9acaffc4a45b27e19450b676586e8349", size = 118627, upload-time = "2026-01-20T04:27:02.457Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/46/2c/1462b1d0a634697ae9e55b3cecdcb64788e8b7d63f54d923fcd0bb140aed/soupsieve-2.8.3-py3-none-any.whl", hash = "sha256:ed64f2ba4eebeab06cc4962affce381647455978ffc1e36bb79a545b91f45a95", size = 37016, upload-time = "2026-01-20T04:27:01.012Z" }, -] - -[[package]] -name = "tabulate" -version = "0.10.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/46/58/8c37dea7bbf769b20d58e7ace7e5edfe65b849442b00ffcdd56be88697c6/tabulate-0.10.0.tar.gz", hash = "sha256:e2cfde8f79420f6deeffdeda9aaec3b6bc5abce947655d17ac662b126e48a60d", size = 91754, upload-time = "2026-03-04T18:55:34.402Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/99/55/db07de81b5c630da5cbf5c7df646580ca26dfaefa593667fc6f2fe016d2e/tabulate-0.10.0-py3-none-any.whl", hash = "sha256:f0b0622e567335c8fabaaa659f1b33bcb6ddfe2e496071b743aa113f8774f2d3", size = 39814, upload-time = "2026-03-04T18:55:31.284Z" }, -] - -[[package]] -name = "termplotlib" -version = "0.3.9" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "numpy" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/b4/60/8a74d2503dd64975402c7b8d00f6e201e8cbba5348282433fa5fb8d41b67/termplotlib-0.3.9.tar.gz", hash = "sha256:c04cbd67ac61753eac9162a99cbe87c379d4c5daf720af1df55f4423c094203e", size = 24517, upload-time = "2021-09-23T09:11:38.42Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/69/d0/ea24907a6d1e3c5e40ff5b58920552c3e1e4e73181a8583d5bd9d5217305/termplotlib-0.3.9-py3-none-any.whl", hash = "sha256:827bec59e0de24dfe265b9d9a4adc4df8335aa98f49c1122bd53ced9b72c5206", size = 21535, upload-time = "2021-09-23T09:11:37.108Z" }, -] - -[[package]] -name = "tiktoken" -version = "0.12.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "regex" }, - { name = "requests" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/7d/ab/4d017d0f76ec3171d469d80fc03dfbb4e48a4bcaddaa831b31d526f05edc/tiktoken-0.12.0.tar.gz", hash = "sha256:b18ba7ee2b093863978fcb14f74b3707cdc8d4d4d3836853ce7ec60772139931", size = 37806, upload-time = "2025-10-06T20:22:45.419Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a4/85/be65d39d6b647c79800fd9d29241d081d4eeb06271f383bb87200d74cf76/tiktoken-0.12.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b97f74aca0d78a1ff21b8cd9e9925714c15a9236d6ceacf5c7327c117e6e21e8", size = 1050728, upload-time = "2025-10-06T20:21:52.756Z" }, - { url = "https://files.pythonhosted.org/packages/4a/42/6573e9129bc55c9bf7300b3a35bef2c6b9117018acca0dc760ac2d93dffe/tiktoken-0.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2b90f5ad190a4bb7c3eb30c5fa32e1e182ca1ca79f05e49b448438c3e225a49b", size = 994049, upload-time = "2025-10-06T20:21:53.782Z" }, - { url = "https://files.pythonhosted.org/packages/66/c5/ed88504d2f4a5fd6856990b230b56d85a777feab84e6129af0822f5d0f70/tiktoken-0.12.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:65b26c7a780e2139e73acc193e5c63ac754021f160df919add909c1492c0fb37", size = 1129008, upload-time = "2025-10-06T20:21:54.832Z" }, - { url = "https://files.pythonhosted.org/packages/f4/90/3dae6cc5436137ebd38944d396b5849e167896fc2073da643a49f372dc4f/tiktoken-0.12.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:edde1ec917dfd21c1f2f8046b86348b0f54a2c0547f68149d8600859598769ad", size = 1152665, upload-time = "2025-10-06T20:21:56.129Z" }, - { url = "https://files.pythonhosted.org/packages/a3/fe/26df24ce53ffde419a42f5f53d755b995c9318908288c17ec3f3448313a3/tiktoken-0.12.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:35a2f8ddd3824608b3d650a000c1ef71f730d0c56486845705a8248da00f9fe5", size = 1194230, upload-time = "2025-10-06T20:21:57.546Z" }, - { url = "https://files.pythonhosted.org/packages/20/cc/b064cae1a0e9fac84b0d2c46b89f4e57051a5f41324e385d10225a984c24/tiktoken-0.12.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:83d16643edb7fa2c99eff2ab7733508aae1eebb03d5dfc46f5565862810f24e3", size = 1254688, upload-time = "2025-10-06T20:21:58.619Z" }, - { url = "https://files.pythonhosted.org/packages/81/10/b8523105c590c5b8349f2587e2fdfe51a69544bd5a76295fc20f2374f470/tiktoken-0.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:ffc5288f34a8bc02e1ea7047b8d041104791d2ddbf42d1e5fa07822cbffe16bd", size = 878694, upload-time = "2025-10-06T20:21:59.876Z" }, - { url = "https://files.pythonhosted.org/packages/00/61/441588ee21e6b5cdf59d6870f86beb9789e532ee9718c251b391b70c68d6/tiktoken-0.12.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:775c2c55de2310cc1bc9a3ad8826761cbdc87770e586fd7b6da7d4589e13dab3", size = 1050802, upload-time = "2025-10-06T20:22:00.96Z" }, - { url = "https://files.pythonhosted.org/packages/1f/05/dcf94486d5c5c8d34496abe271ac76c5b785507c8eae71b3708f1ad9b45a/tiktoken-0.12.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a01b12f69052fbe4b080a2cfb867c4de12c704b56178edf1d1d7b273561db160", size = 993995, upload-time = "2025-10-06T20:22:02.788Z" }, - { url = "https://files.pythonhosted.org/packages/a0/70/5163fe5359b943f8db9946b62f19be2305de8c3d78a16f629d4165e2f40e/tiktoken-0.12.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:01d99484dc93b129cd0964f9d34eee953f2737301f18b3c7257bf368d7615baa", size = 1128948, upload-time = "2025-10-06T20:22:03.814Z" }, - { url = "https://files.pythonhosted.org/packages/0c/da/c028aa0babf77315e1cef357d4d768800c5f8a6de04d0eac0f377cb619fa/tiktoken-0.12.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:4a1a4fcd021f022bfc81904a911d3df0f6543b9e7627b51411da75ff2fe7a1be", size = 1151986, upload-time = "2025-10-06T20:22:05.173Z" }, - { url = "https://files.pythonhosted.org/packages/a0/5a/886b108b766aa53e295f7216b509be95eb7d60b166049ce2c58416b25f2a/tiktoken-0.12.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:981a81e39812d57031efdc9ec59fa32b2a5a5524d20d4776574c4b4bd2e9014a", size = 1194222, upload-time = "2025-10-06T20:22:06.265Z" }, - { url = "https://files.pythonhosted.org/packages/f4/f8/4db272048397636ac7a078d22773dd2795b1becee7bc4922fe6207288d57/tiktoken-0.12.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9baf52f84a3f42eef3ff4e754a0db79a13a27921b457ca9832cf944c6be4f8f3", size = 1255097, upload-time = "2025-10-06T20:22:07.403Z" }, - { url = "https://files.pythonhosted.org/packages/8e/32/45d02e2e0ea2be3a9ed22afc47d93741247e75018aac967b713b2941f8ea/tiktoken-0.12.0-cp313-cp313-win_amd64.whl", hash = "sha256:b8a0cd0c789a61f31bf44851defbd609e8dd1e2c8589c614cc1060940ef1f697", size = 879117, upload-time = "2025-10-06T20:22:08.418Z" }, - { url = "https://files.pythonhosted.org/packages/ce/76/994fc868f88e016e6d05b0da5ac24582a14c47893f4474c3e9744283f1d5/tiktoken-0.12.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d5f89ea5680066b68bcb797ae85219c72916c922ef0fcdd3480c7d2315ffff16", size = 1050309, upload-time = "2025-10-06T20:22:10.939Z" }, - { url = "https://files.pythonhosted.org/packages/f6/b8/57ef1456504c43a849821920d582a738a461b76a047f352f18c0b26c6516/tiktoken-0.12.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b4e7ed1c6a7a8a60a3230965bdedba8cc58f68926b835e519341413370e0399a", size = 993712, upload-time = "2025-10-06T20:22:12.115Z" }, - { url = "https://files.pythonhosted.org/packages/72/90/13da56f664286ffbae9dbcfadcc625439142675845baa62715e49b87b68b/tiktoken-0.12.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:fc530a28591a2d74bce821d10b418b26a094bf33839e69042a6e86ddb7a7fb27", size = 1128725, upload-time = "2025-10-06T20:22:13.541Z" }, - { url = "https://files.pythonhosted.org/packages/05/df/4f80030d44682235bdaecd7346c90f67ae87ec8f3df4a3442cb53834f7e4/tiktoken-0.12.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:06a9f4f49884139013b138920a4c393aa6556b2f8f536345f11819389c703ebb", size = 1151875, upload-time = "2025-10-06T20:22:14.559Z" }, - { url = "https://files.pythonhosted.org/packages/22/1f/ae535223a8c4ef4c0c1192e3f9b82da660be9eb66b9279e95c99288e9dab/tiktoken-0.12.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:04f0e6a985d95913cabc96a741c5ffec525a2c72e9df086ff17ebe35985c800e", size = 1194451, upload-time = "2025-10-06T20:22:15.545Z" }, - { url = "https://files.pythonhosted.org/packages/78/a7/f8ead382fce0243cb625c4f266e66c27f65ae65ee9e77f59ea1653b6d730/tiktoken-0.12.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:0ee8f9ae00c41770b5f9b0bb1235474768884ae157de3beb5439ca0fd70f3e25", size = 1253794, upload-time = "2025-10-06T20:22:16.624Z" }, - { url = "https://files.pythonhosted.org/packages/93/e0/6cc82a562bc6365785a3ff0af27a2a092d57c47d7a81d9e2295d8c36f011/tiktoken-0.12.0-cp313-cp313t-win_amd64.whl", hash = "sha256:dc2dd125a62cb2b3d858484d6c614d136b5b848976794edfb63688d539b8b93f", size = 878777, upload-time = "2025-10-06T20:22:18.036Z" }, - { url = "https://files.pythonhosted.org/packages/72/05/3abc1db5d2c9aadc4d2c76fa5640134e475e58d9fbb82b5c535dc0de9b01/tiktoken-0.12.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:a90388128df3b3abeb2bfd1895b0681412a8d7dc644142519e6f0a97c2111646", size = 1050188, upload-time = "2025-10-06T20:22:19.563Z" }, - { url = "https://files.pythonhosted.org/packages/e3/7b/50c2f060412202d6c95f32b20755c7a6273543b125c0985d6fa9465105af/tiktoken-0.12.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:da900aa0ad52247d8794e307d6446bd3cdea8e192769b56276695d34d2c9aa88", size = 993978, upload-time = "2025-10-06T20:22:20.702Z" }, - { url = "https://files.pythonhosted.org/packages/14/27/bf795595a2b897e271771cd31cb847d479073497344c637966bdf2853da1/tiktoken-0.12.0-cp314-cp314-manylinux_2_28_aarch64.whl", hash = "sha256:285ba9d73ea0d6171e7f9407039a290ca77efcdb026be7769dccc01d2c8d7fff", size = 1129271, upload-time = "2025-10-06T20:22:22.06Z" }, - { url = "https://files.pythonhosted.org/packages/f5/de/9341a6d7a8f1b448573bbf3425fa57669ac58258a667eb48a25dfe916d70/tiktoken-0.12.0-cp314-cp314-manylinux_2_28_x86_64.whl", hash = "sha256:d186a5c60c6a0213f04a7a802264083dea1bbde92a2d4c7069e1a56630aef830", size = 1151216, upload-time = "2025-10-06T20:22:23.085Z" }, - { url = "https://files.pythonhosted.org/packages/75/0d/881866647b8d1be4d67cb24e50d0c26f9f807f994aa1510cb9ba2fe5f612/tiktoken-0.12.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:604831189bd05480f2b885ecd2d1986dc7686f609de48208ebbbddeea071fc0b", size = 1194860, upload-time = "2025-10-06T20:22:24.602Z" }, - { url = "https://files.pythonhosted.org/packages/b3/1e/b651ec3059474dab649b8d5b69f5c65cd8fcd8918568c1935bd4136c9392/tiktoken-0.12.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:8f317e8530bb3a222547b85a58583238c8f74fd7a7408305f9f63246d1a0958b", size = 1254567, upload-time = "2025-10-06T20:22:25.671Z" }, - { url = "https://files.pythonhosted.org/packages/80/57/ce64fd16ac390fafde001268c364d559447ba09b509181b2808622420eec/tiktoken-0.12.0-cp314-cp314-win_amd64.whl", hash = "sha256:399c3dd672a6406719d84442299a490420b458c44d3ae65516302a99675888f3", size = 921067, upload-time = "2025-10-06T20:22:26.753Z" }, - { url = "https://files.pythonhosted.org/packages/ac/a4/72eed53e8976a099539cdd5eb36f241987212c29629d0a52c305173e0a68/tiktoken-0.12.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:c2c714c72bc00a38ca969dae79e8266ddec999c7ceccd603cc4f0d04ccd76365", size = 1050473, upload-time = "2025-10-06T20:22:27.775Z" }, - { url = "https://files.pythonhosted.org/packages/e6/d7/0110b8f54c008466b19672c615f2168896b83706a6611ba6e47313dbc6e9/tiktoken-0.12.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:cbb9a3ba275165a2cb0f9a83f5d7025afe6b9d0ab01a22b50f0e74fee2ad253e", size = 993855, upload-time = "2025-10-06T20:22:28.799Z" }, - { url = "https://files.pythonhosted.org/packages/5f/77/4f268c41a3957c418b084dd576ea2fad2e95da0d8e1ab705372892c2ca22/tiktoken-0.12.0-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:dfdfaa5ffff8993a3af94d1125870b1d27aed7cb97aa7eb8c1cefdbc87dbee63", size = 1129022, upload-time = "2025-10-06T20:22:29.981Z" }, - { url = "https://files.pythonhosted.org/packages/4e/2b/fc46c90fe5028bd094cd6ee25a7db321cb91d45dc87531e2bdbb26b4867a/tiktoken-0.12.0-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:584c3ad3d0c74f5269906eb8a659c8bfc6144a52895d9261cdaf90a0ae5f4de0", size = 1150736, upload-time = "2025-10-06T20:22:30.996Z" }, - { url = "https://files.pythonhosted.org/packages/28/c0/3c7a39ff68022ddfd7d93f3337ad90389a342f761c4d71de99a3ccc57857/tiktoken-0.12.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:54c891b416a0e36b8e2045b12b33dd66fb34a4fe7965565f1b482da50da3e86a", size = 1194908, upload-time = "2025-10-06T20:22:32.073Z" }, - { url = "https://files.pythonhosted.org/packages/ab/0d/c1ad6f4016a3968c048545f5d9b8ffebf577774b2ede3e2e352553b685fe/tiktoken-0.12.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5edb8743b88d5be814b1a8a8854494719080c28faaa1ccbef02e87354fe71ef0", size = 1253706, upload-time = "2025-10-06T20:22:33.385Z" }, - { url = "https://files.pythonhosted.org/packages/af/df/c7891ef9d2712ad774777271d39fdef63941ffba0a9d59b7ad1fd2765e57/tiktoken-0.12.0-cp314-cp314t-win_amd64.whl", hash = "sha256:f61c0aea5565ac82e2ec50a05e02a6c44734e91b51c10510b084ea1b8e633a71", size = 920667, upload-time = "2025-10-06T20:22:34.444Z" }, -] - -[[package]] -name = "toolz" -version = "1.1.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/11/d6/114b492226588d6ff54579d95847662fc69196bdeec318eb45393b24c192/toolz-1.1.0.tar.gz", hash = "sha256:27a5c770d068c110d9ed9323f24f1543e83b2f300a687b7891c1a6d56b697b5b", size = 52613, upload-time = "2025-10-17T04:03:21.661Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/fb/12/5911ae3eeec47800503a238d971e51722ccea5feb8569b735184d5fcdbc0/toolz-1.1.0-py3-none-any.whl", hash = "sha256:15ccc861ac51c53696de0a5d6d4607f99c210739caf987b5d2054f3efed429d8", size = 58093, upload-time = "2025-10-17T04:03:20.435Z" }, -] - -[[package]] -name = "typeguard" -version = "4.5.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/2b/e8/66e25efcc18542d58706ce4e50415710593721aae26e794ab1dec34fb66f/typeguard-4.5.1.tar.gz", hash = "sha256:f6f8ecbbc819c9bc749983cc67c02391e16a9b43b8b27f15dc70ed7c4a007274", size = 80121, upload-time = "2026-02-19T16:09:03.392Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/91/88/b55b3117287a8540b76dbdd87733808d4d01c8067a3b339408c250bb3600/typeguard-4.5.1-py3-none-any.whl", hash = "sha256:44d2bf329d49a244110a090b55f5f91aa82d9a9834ebfd30bcc73651e4a8cc40", size = 36745, upload-time = "2026-02-19T16:09:01.6Z" }, -] - -[[package]] -name = "types-requests" -version = "2.32.4.20260107" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "urllib3" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/0f/f3/a0663907082280664d745929205a89d41dffb29e89a50f753af7d57d0a96/types_requests-2.32.4.20260107.tar.gz", hash = "sha256:018a11ac158f801bfa84857ddec1650750e393df8a004a8a9ae2a9bec6fcb24f", size = 23165, upload-time = "2026-01-07T03:20:54.091Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/1c/12/709ea261f2bf91ef0a26a9eed20f2623227a8ed85610c1e54c5805692ecb/types_requests-2.32.4.20260107-py3-none-any.whl", hash = "sha256:b703fe72f8ce5b31ef031264fe9395cac8f46a04661a79f7ed31a80fb308730d", size = 20676, upload-time = "2026-01-07T03:20:52.929Z" }, -] - -[[package]] -name = "typing-extensions" -version = "4.15.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, -] - -[[package]] -name = "typing-inspection" -version = "0.4.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949, upload-time = "2025-10-01T02:14:41.687Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" }, -] - -[[package]] -name = "tzdata" -version = "2025.3" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/5e/a7/c202b344c5ca7daf398f3b8a477eeb205cf3b6f32e7ec3a6bac0629ca975/tzdata-2025.3.tar.gz", hash = "sha256:de39c2ca5dc7b0344f2eba86f49d614019d29f060fc4ebc8a417896a620b56a7", size = 196772, upload-time = "2025-12-13T17:45:35.667Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c7/b0/003792df09decd6849a5e39c28b513c06e84436a54440380862b5aeff25d/tzdata-2025.3-py2.py3-none-any.whl", hash = "sha256:06a47e5700f3081aab02b2e513160914ff0694bce9947d6b76ebd6bf57cfc5d1", size = 348521, upload-time = "2025-12-13T17:45:33.889Z" }, -] - -[[package]] -name = "uptime" -version = "3.0.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ad/53/6c420ddf6949097d6f9406358951c9322505849bea9cb79efe3acc0bb55d/uptime-3.0.1.tar.gz", hash = "sha256:7c300254775b807ce46e3dcbcda30aa3b9a204b9c57a7ac1e79ee6dbe3942973", size = 6630, upload-time = "2013-10-07T14:19:58.456Z" } - -[[package]] -name = "urllib3" -version = "2.6.3" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/c7/24/5f1b3bdffd70275f6661c76461e25f024d5a38a46f04aaca912426a2b1d3/urllib3-2.6.3.tar.gz", hash = "sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed", size = 435556, upload-time = "2026-01-07T16:24:43.925Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4", size = 131584, upload-time = "2026-01-07T16:24:42.685Z" }, -] - -[[package]] -name = "web3" -version = "7.14.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "aiohttp" }, - { name = "eth-abi" }, - { name = "eth-account" }, - { name = "eth-hash", extra = ["pycryptodome"] }, - { name = "eth-typing" }, - { name = "eth-utils" }, - { name = "hexbytes" }, - { name = "pydantic" }, - { name = "pyunormalize" }, - { name = "pywin32", marker = "sys_platform == 'win32'" }, - { name = "requests" }, - { name = "types-requests" }, - { name = "typing-extensions" }, - { name = "websockets" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/26/41/435cb36d36fc5142428292b876d0553d35af95e1582ecb7d8bcb64039d18/web3-7.14.1.tar.gz", hash = "sha256:856dc8517f362aefa75fdc298d975894055565dc866f21279f27fe060b7fb2c3", size = 2208998, upload-time = "2026-02-03T22:56:41.426Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/7f/d1/862bbf48867685de1a563de20a9bad2b8c5c5678b3f08adc0e06797783f5/web3-7.14.1-py3-none-any.whl", hash = "sha256:bec367ba44261f874662aed9b5e138aa7bb907700a30a7580b2264534e88ce12", size = 1371268, upload-time = "2026-02-03T22:56:36.577Z" }, -] - -[[package]] -name = "websockets" -version = "15.0.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/21/e6/26d09fab466b7ca9c7737474c52be4f76a40301b08362eb2dbc19dcc16c1/websockets-15.0.1.tar.gz", hash = "sha256:82544de02076bafba038ce055ee6412d68da13ab47f0c60cab827346de828dee", size = 177016, upload-time = "2025-03-05T20:03:41.606Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/51/6b/4545a0d843594f5d0771e86463606a3988b5a09ca5123136f8a76580dd63/websockets-15.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3e90baa811a5d73f3ca0bcbf32064d663ed81318ab225ee4f427ad4e26e5aff3", size = 175437, upload-time = "2025-03-05T20:02:16.706Z" }, - { url = "https://files.pythonhosted.org/packages/f4/71/809a0f5f6a06522af902e0f2ea2757f71ead94610010cf570ab5c98e99ed/websockets-15.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:592f1a9fe869c778694f0aa806ba0374e97648ab57936f092fd9d87f8bc03665", size = 173096, upload-time = "2025-03-05T20:02:18.832Z" }, - { url = "https://files.pythonhosted.org/packages/3d/69/1a681dd6f02180916f116894181eab8b2e25b31e484c5d0eae637ec01f7c/websockets-15.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0701bc3cfcb9164d04a14b149fd74be7347a530ad3bbf15ab2c678a2cd3dd9a2", size = 173332, upload-time = "2025-03-05T20:02:20.187Z" }, - { url = "https://files.pythonhosted.org/packages/a6/02/0073b3952f5bce97eafbb35757f8d0d54812b6174ed8dd952aa08429bcc3/websockets-15.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8b56bdcdb4505c8078cb6c7157d9811a85790f2f2b3632c7d1462ab5783d215", size = 183152, upload-time = "2025-03-05T20:02:22.286Z" }, - { url = "https://files.pythonhosted.org/packages/74/45/c205c8480eafd114b428284840da0b1be9ffd0e4f87338dc95dc6ff961a1/websockets-15.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0af68c55afbd5f07986df82831c7bff04846928ea8d1fd7f30052638788bc9b5", size = 182096, upload-time = "2025-03-05T20:02:24.368Z" }, - { url = "https://files.pythonhosted.org/packages/14/8f/aa61f528fba38578ec553c145857a181384c72b98156f858ca5c8e82d9d3/websockets-15.0.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64dee438fed052b52e4f98f76c5790513235efaa1ef7f3f2192c392cd7c91b65", size = 182523, upload-time = "2025-03-05T20:02:25.669Z" }, - { url = "https://files.pythonhosted.org/packages/ec/6d/0267396610add5bc0d0d3e77f546d4cd287200804fe02323797de77dbce9/websockets-15.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d5f6b181bb38171a8ad1d6aa58a67a6aa9d4b38d0f8c5f496b9e42561dfc62fe", size = 182790, upload-time = "2025-03-05T20:02:26.99Z" }, - { url = "https://files.pythonhosted.org/packages/02/05/c68c5adbf679cf610ae2f74a9b871ae84564462955d991178f95a1ddb7dd/websockets-15.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5d54b09eba2bada6011aea5375542a157637b91029687eb4fdb2dab11059c1b4", size = 182165, upload-time = "2025-03-05T20:02:30.291Z" }, - { url = "https://files.pythonhosted.org/packages/29/93/bb672df7b2f5faac89761cb5fa34f5cec45a4026c383a4b5761c6cea5c16/websockets-15.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3be571a8b5afed347da347bfcf27ba12b069d9d7f42cb8c7028b5e98bbb12597", size = 182160, upload-time = "2025-03-05T20:02:31.634Z" }, - { url = "https://files.pythonhosted.org/packages/ff/83/de1f7709376dc3ca9b7eeb4b9a07b4526b14876b6d372a4dc62312bebee0/websockets-15.0.1-cp312-cp312-win32.whl", hash = "sha256:c338ffa0520bdb12fbc527265235639fb76e7bc7faafbb93f6ba80d9c06578a9", size = 176395, upload-time = "2025-03-05T20:02:33.017Z" }, - { url = "https://files.pythonhosted.org/packages/7d/71/abf2ebc3bbfa40f391ce1428c7168fb20582d0ff57019b69ea20fa698043/websockets-15.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:fcd5cf9e305d7b8338754470cf69cf81f420459dbae8a3b40cee57417f4614a7", size = 176841, upload-time = "2025-03-05T20:02:34.498Z" }, - { url = "https://files.pythonhosted.org/packages/cb/9f/51f0cf64471a9d2b4d0fc6c534f323b664e7095640c34562f5182e5a7195/websockets-15.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ee443ef070bb3b6ed74514f5efaa37a252af57c90eb33b956d35c8e9c10a1931", size = 175440, upload-time = "2025-03-05T20:02:36.695Z" }, - { url = "https://files.pythonhosted.org/packages/8a/05/aa116ec9943c718905997412c5989f7ed671bc0188ee2ba89520e8765d7b/websockets-15.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5a939de6b7b4e18ca683218320fc67ea886038265fd1ed30173f5ce3f8e85675", size = 173098, upload-time = "2025-03-05T20:02:37.985Z" }, - { url = "https://files.pythonhosted.org/packages/ff/0b/33cef55ff24f2d92924923c99926dcce78e7bd922d649467f0eda8368923/websockets-15.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:746ee8dba912cd6fc889a8147168991d50ed70447bf18bcda7039f7d2e3d9151", size = 173329, upload-time = "2025-03-05T20:02:39.298Z" }, - { url = "https://files.pythonhosted.org/packages/31/1d/063b25dcc01faa8fada1469bdf769de3768b7044eac9d41f734fd7b6ad6d/websockets-15.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:595b6c3969023ecf9041b2936ac3827e4623bfa3ccf007575f04c5a6aa318c22", size = 183111, upload-time = "2025-03-05T20:02:40.595Z" }, - { url = "https://files.pythonhosted.org/packages/93/53/9a87ee494a51bf63e4ec9241c1ccc4f7c2f45fff85d5bde2ff74fcb68b9e/websockets-15.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c714d2fc58b5ca3e285461a4cc0c9a66bd0e24c5da9911e30158286c9b5be7f", size = 182054, upload-time = "2025-03-05T20:02:41.926Z" }, - { url = "https://files.pythonhosted.org/packages/ff/b2/83a6ddf56cdcbad4e3d841fcc55d6ba7d19aeb89c50f24dd7e859ec0805f/websockets-15.0.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f3c1e2ab208db911594ae5b4f79addeb3501604a165019dd221c0bdcabe4db8", size = 182496, upload-time = "2025-03-05T20:02:43.304Z" }, - { url = "https://files.pythonhosted.org/packages/98/41/e7038944ed0abf34c45aa4635ba28136f06052e08fc2168520bb8b25149f/websockets-15.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:229cf1d3ca6c1804400b0a9790dc66528e08a6a1feec0d5040e8b9eb14422375", size = 182829, upload-time = "2025-03-05T20:02:48.812Z" }, - { url = "https://files.pythonhosted.org/packages/e0/17/de15b6158680c7623c6ef0db361da965ab25d813ae54fcfeae2e5b9ef910/websockets-15.0.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:756c56e867a90fb00177d530dca4b097dd753cde348448a1012ed6c5131f8b7d", size = 182217, upload-time = "2025-03-05T20:02:50.14Z" }, - { url = "https://files.pythonhosted.org/packages/33/2b/1f168cb6041853eef0362fb9554c3824367c5560cbdaad89ac40f8c2edfc/websockets-15.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:558d023b3df0bffe50a04e710bc87742de35060580a293c2a984299ed83bc4e4", size = 182195, upload-time = "2025-03-05T20:02:51.561Z" }, - { url = "https://files.pythonhosted.org/packages/86/eb/20b6cdf273913d0ad05a6a14aed4b9a85591c18a987a3d47f20fa13dcc47/websockets-15.0.1-cp313-cp313-win32.whl", hash = "sha256:ba9e56e8ceeeedb2e080147ba85ffcd5cd0711b89576b83784d8605a7df455fa", size = 176393, upload-time = "2025-03-05T20:02:53.814Z" }, - { url = "https://files.pythonhosted.org/packages/1b/6c/c65773d6cab416a64d191d6ee8a8b1c68a09970ea6909d16965d26bfed1e/websockets-15.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:e09473f095a819042ecb2ab9465aee615bd9c2028e4ef7d933600a8401c79561", size = 176837, upload-time = "2025-03-05T20:02:55.237Z" }, - { url = "https://files.pythonhosted.org/packages/fa/a8/5b41e0da817d64113292ab1f8247140aac61cbf6cfd085d6a0fa77f4984f/websockets-15.0.1-py3-none-any.whl", hash = "sha256:f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f", size = 169743, upload-time = "2025-03-05T20:03:39.41Z" }, -] - -[[package]] -name = "yarl" -version = "1.23.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "idna" }, - { name = "multidict" }, - { name = "propcache" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/23/6e/beb1beec874a72f23815c1434518bfc4ed2175065173fb138c3705f658d4/yarl-1.23.0.tar.gz", hash = "sha256:53b1ea6ca88ebd4420379c330aea57e258408dd0df9af0992e5de2078dc9f5d5", size = 194676, upload-time = "2026-03-01T22:07:53.373Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/88/8a/94615bc31022f711add374097ad4144d569e95ff3c38d39215d07ac153a0/yarl-1.23.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1932b6b8bba8d0160a9d1078aae5838a66039e8832d41d2992daa9a3a08f7860", size = 124737, upload-time = "2026-03-01T22:05:12.897Z" }, - { url = "https://files.pythonhosted.org/packages/e3/6f/c6554045d59d64052698add01226bc867b52fe4a12373415d7991fdca95d/yarl-1.23.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:411225bae281f114067578891bc75534cfb3d92a3b4dfef7a6ca78ba354e6069", size = 87029, upload-time = "2026-03-01T22:05:14.376Z" }, - { url = "https://files.pythonhosted.org/packages/19/2a/725ecc166d53438bc88f76822ed4b1e3b10756e790bafd7b523fe97c322d/yarl-1.23.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:13a563739ae600a631c36ce096615fe307f131344588b0bc0daec108cdb47b25", size = 86310, upload-time = "2026-03-01T22:05:15.71Z" }, - { url = "https://files.pythonhosted.org/packages/99/30/58260ed98e6ff7f90ba84442c1ddd758c9170d70327394a6227b310cd60f/yarl-1.23.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9cbf44c5cb4a7633d078788e1b56387e3d3cf2b8139a3be38040b22d6c3221c8", size = 97587, upload-time = "2026-03-01T22:05:17.384Z" }, - { url = "https://files.pythonhosted.org/packages/76/0a/8b08aac08b50682e65759f7f8dde98ae8168f72487e7357a5d684c581ef9/yarl-1.23.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:53ad387048f6f09a8969631e4de3f1bf70c50e93545d64af4f751b2498755072", size = 92528, upload-time = "2026-03-01T22:05:18.804Z" }, - { url = "https://files.pythonhosted.org/packages/52/07/0b7179101fe5f8385ec6c6bb5d0cb9f76bd9fb4a769591ab6fb5cdbfc69a/yarl-1.23.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4a59ba56f340334766f3a4442e0efd0af895fae9e2b204741ef885c446b3a1a8", size = 105339, upload-time = "2026-03-01T22:05:20.235Z" }, - { url = "https://files.pythonhosted.org/packages/d3/8a/36d82869ab5ec829ca8574dfcb92b51286fcfb1e9c7a73659616362dc880/yarl-1.23.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:803a3c3ce4acc62eaf01eaca1208dcf0783025ef27572c3336502b9c232005e7", size = 105061, upload-time = "2026-03-01T22:05:22.268Z" }, - { url = "https://files.pythonhosted.org/packages/66/3e/868e5c3364b6cee19ff3e1a122194fa4ce51def02c61023970442162859e/yarl-1.23.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a3d2bff8f37f8d0f96c7ec554d16945050d54462d6e95414babaa18bfafc7f51", size = 100132, upload-time = "2026-03-01T22:05:23.638Z" }, - { url = "https://files.pythonhosted.org/packages/cf/26/9c89acf82f08a52cb52d6d39454f8d18af15f9d386a23795389d1d423823/yarl-1.23.0-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c75eb09e8d55bceb4367e83496ff8ef2bc7ea6960efb38e978e8073ea59ecb67", size = 99289, upload-time = "2026-03-01T22:05:25.749Z" }, - { url = "https://files.pythonhosted.org/packages/6f/54/5b0db00d2cb056922356104468019c0a132e89c8d3ab67d8ede9f4483d2a/yarl-1.23.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877b0738624280e34c55680d6054a307aa94f7d52fa0e3034a9cc6e790871da7", size = 96950, upload-time = "2026-03-01T22:05:27.318Z" }, - { url = "https://files.pythonhosted.org/packages/f6/40/10fa93811fd439341fad7e0718a86aca0de9548023bbb403668d6555acab/yarl-1.23.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:b5405bb8f0e783a988172993cfc627e4d9d00432d6bbac65a923041edacf997d", size = 93960, upload-time = "2026-03-01T22:05:28.738Z" }, - { url = "https://files.pythonhosted.org/packages/bc/d2/8ae2e6cd77d0805f4526e30ec43b6f9a3dfc542d401ac4990d178e4bf0cf/yarl-1.23.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:1c3a3598a832590c5a3ce56ab5576361b5688c12cb1d39429cf5dba30b510760", size = 104703, upload-time = "2026-03-01T22:05:30.438Z" }, - { url = "https://files.pythonhosted.org/packages/2f/0c/b3ceacf82c3fe21183ce35fa2acf5320af003d52bc1fcf5915077681142e/yarl-1.23.0-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:8419ebd326430d1cbb7efb5292330a2cf39114e82df5cc3d83c9a0d5ebeaf2f2", size = 98325, upload-time = "2026-03-01T22:05:31.835Z" }, - { url = "https://files.pythonhosted.org/packages/9d/e0/12900edd28bdab91a69bd2554b85ad7b151f64e8b521fe16f9ad2f56477a/yarl-1.23.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:be61f6fff406ca40e3b1d84716fde398fc08bc63dd96d15f3a14230a0973ed86", size = 105067, upload-time = "2026-03-01T22:05:33.358Z" }, - { url = "https://files.pythonhosted.org/packages/15/61/74bb1182cf79c9bbe4eb6b1f14a57a22d7a0be5e9cedf8e2d5c2086474c3/yarl-1.23.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3ceb13c5c858d01321b5d9bb65e4cf37a92169ea470b70fec6f236b2c9dd7e34", size = 100285, upload-time = "2026-03-01T22:05:35.4Z" }, - { url = "https://files.pythonhosted.org/packages/69/7f/cd5ef733f2550de6241bd8bd8c3febc78158b9d75f197d9c7baa113436af/yarl-1.23.0-cp312-cp312-win32.whl", hash = "sha256:fffc45637bcd6538de8b85f51e3df3223e4ad89bccbfca0481c08c7fc8b7ed7d", size = 82359, upload-time = "2026-03-01T22:05:36.811Z" }, - { url = "https://files.pythonhosted.org/packages/f5/be/25216a49daeeb7af2bec0db22d5e7df08ed1d7c9f65d78b14f3b74fd72fc/yarl-1.23.0-cp312-cp312-win_amd64.whl", hash = "sha256:f69f57305656a4852f2a7203efc661d8c042e6cc67f7acd97d8667fb448a426e", size = 87674, upload-time = "2026-03-01T22:05:38.171Z" }, - { url = "https://files.pythonhosted.org/packages/d2/35/aeab955d6c425b227d5b7247eafb24f2653fedc32f95373a001af5dfeb9e/yarl-1.23.0-cp312-cp312-win_arm64.whl", hash = "sha256:6e87a6e8735b44816e7db0b2fbc9686932df473c826b0d9743148432e10bb9b9", size = 81879, upload-time = "2026-03-01T22:05:40.006Z" }, - { url = "https://files.pythonhosted.org/packages/9a/4b/a0a6e5d0ee8a2f3a373ddef8a4097d74ac901ac363eea1440464ccbe0898/yarl-1.23.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:16c6994ac35c3e74fb0ae93323bf8b9c2a9088d55946109489667c510a7d010e", size = 123796, upload-time = "2026-03-01T22:05:41.412Z" }, - { url = "https://files.pythonhosted.org/packages/67/b6/8925d68af039b835ae876db5838e82e76ec87b9782ecc97e192b809c4831/yarl-1.23.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4a42e651629dafb64fd5b0286a3580613702b5809ad3f24934ea87595804f2c5", size = 86547, upload-time = "2026-03-01T22:05:42.841Z" }, - { url = "https://files.pythonhosted.org/packages/ae/50/06d511cc4b8e0360d3c94af051a768e84b755c5eb031b12adaaab6dec6e5/yarl-1.23.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7c6b9461a2a8b47c65eef63bb1c76a4f1c119618ffa99ea79bc5bb1e46c5821b", size = 85854, upload-time = "2026-03-01T22:05:44.85Z" }, - { url = "https://files.pythonhosted.org/packages/c4/f4/4e30b250927ffdab4db70da08b9b8d2194d7c7b400167b8fbeca1e4701ca/yarl-1.23.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2569b67d616eab450d262ca7cb9f9e19d2f718c70a8b88712859359d0ab17035", size = 98351, upload-time = "2026-03-01T22:05:46.836Z" }, - { url = "https://files.pythonhosted.org/packages/86/fc/4118c5671ea948208bdb1492d8b76bdf1453d3e73df051f939f563e7dcc5/yarl-1.23.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e9d9a4d06d3481eab79803beb4d9bd6f6a8e781ec078ac70d7ef2dcc29d1bea5", size = 92711, upload-time = "2026-03-01T22:05:48.316Z" }, - { url = "https://files.pythonhosted.org/packages/56/11/1ed91d42bd9e73c13dc9e7eb0dd92298d75e7ac4dd7f046ad0c472e231cd/yarl-1.23.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f514f6474e04179d3d33175ed3f3e31434d3130d42ec153540d5b157deefd735", size = 106014, upload-time = "2026-03-01T22:05:50.028Z" }, - { url = "https://files.pythonhosted.org/packages/ce/c9/74e44e056a23fbc33aca71779ef450ca648a5bc472bdad7a82339918f818/yarl-1.23.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:fda207c815b253e34f7e1909840fd14299567b1c0eb4908f8c2ce01a41265401", size = 105557, upload-time = "2026-03-01T22:05:51.416Z" }, - { url = "https://files.pythonhosted.org/packages/66/fe/b1e10b08d287f518994f1e2ff9b6d26f0adeecd8dd7d533b01bab29a3eda/yarl-1.23.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:34b6cf500e61c90f305094911f9acc9c86da1a05a7a3f5be9f68817043f486e4", size = 101559, upload-time = "2026-03-01T22:05:52.872Z" }, - { url = "https://files.pythonhosted.org/packages/72/59/c5b8d94b14e3d3c2a9c20cb100119fd534ab5a14b93673ab4cc4a4141ea5/yarl-1.23.0-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:d7504f2b476d21653e4d143f44a175f7f751cd41233525312696c76aa3dbb23f", size = 100502, upload-time = "2026-03-01T22:05:54.954Z" }, - { url = "https://files.pythonhosted.org/packages/77/4f/96976cb54cbfc5c9fd73ed4c51804f92f209481d1fb190981c0f8a07a1d7/yarl-1.23.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:578110dd426f0d209d1509244e6d4a3f1a3e9077655d98c5f22583d63252a08a", size = 98027, upload-time = "2026-03-01T22:05:56.409Z" }, - { url = "https://files.pythonhosted.org/packages/63/6e/904c4f476471afdbad6b7e5b70362fb5810e35cd7466529a97322b6f5556/yarl-1.23.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:609d3614d78d74ebe35f54953c5bbd2ac647a7ddb9c30a5d877580f5e86b22f2", size = 95369, upload-time = "2026-03-01T22:05:58.141Z" }, - { url = "https://files.pythonhosted.org/packages/9d/40/acfcdb3b5f9d68ef499e39e04d25e141fe90661f9d54114556cf83be8353/yarl-1.23.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4966242ec68afc74c122f8459abd597afd7d8a60dc93d695c1334c5fd25f762f", size = 105565, upload-time = "2026-03-01T22:06:00.286Z" }, - { url = "https://files.pythonhosted.org/packages/5e/c6/31e28f3a6ba2869c43d124f37ea5260cac9c9281df803c354b31f4dd1f3c/yarl-1.23.0-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:e0fd068364a6759bc794459f0a735ab151d11304346332489c7972bacbe9e72b", size = 99813, upload-time = "2026-03-01T22:06:01.712Z" }, - { url = "https://files.pythonhosted.org/packages/08/1f/6f65f59e72d54aa467119b63fc0b0b1762eff0232db1f4720cd89e2f4a17/yarl-1.23.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:39004f0ad156da43e86aa71f44e033de68a44e5a31fc53507b36dd253970054a", size = 105632, upload-time = "2026-03-01T22:06:03.188Z" }, - { url = "https://files.pythonhosted.org/packages/a3/c4/18b178a69935f9e7a338127d5b77d868fdc0f0e49becd286d51b3a18c61d/yarl-1.23.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e5723c01a56c5028c807c701aa66722916d2747ad737a046853f6c46f4875543", size = 101895, upload-time = "2026-03-01T22:06:04.651Z" }, - { url = "https://files.pythonhosted.org/packages/8f/54/f5b870b5505663911dba950a8e4776a0dbd51c9c54c0ae88e823e4b874a0/yarl-1.23.0-cp313-cp313-win32.whl", hash = "sha256:1b6b572edd95b4fa8df75de10b04bc81acc87c1c7d16bcdd2035b09d30acc957", size = 82356, upload-time = "2026-03-01T22:06:06.04Z" }, - { url = "https://files.pythonhosted.org/packages/7a/84/266e8da36879c6edcd37b02b547e2d9ecdfea776be49598e75696e3316e1/yarl-1.23.0-cp313-cp313-win_amd64.whl", hash = "sha256:baaf55442359053c7d62f6f8413a62adba3205119bcb6f49594894d8be47e5e3", size = 87515, upload-time = "2026-03-01T22:06:08.107Z" }, - { url = "https://files.pythonhosted.org/packages/00/fd/7e1c66efad35e1649114fa13f17485f62881ad58edeeb7f49f8c5e748bf9/yarl-1.23.0-cp313-cp313-win_arm64.whl", hash = "sha256:fb4948814a2a98e3912505f09c9e7493b1506226afb1f881825368d6fb776ee3", size = 81785, upload-time = "2026-03-01T22:06:10.181Z" }, - { url = "https://files.pythonhosted.org/packages/9c/fc/119dd07004f17ea43bb91e3ece6587759edd7519d6b086d16bfbd3319982/yarl-1.23.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:aecfed0b41aa72b7881712c65cf764e39ce2ec352324f5e0837c7048d9e6daaa", size = 130719, upload-time = "2026-03-01T22:06:11.708Z" }, - { url = "https://files.pythonhosted.org/packages/e6/0d/9f2348502fbb3af409e8f47730282cd6bc80dec6630c1e06374d882d6eb2/yarl-1.23.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a41bcf68efd19073376eb8cf948b8d9be0af26256403e512bb18f3966f1f9120", size = 89690, upload-time = "2026-03-01T22:06:13.429Z" }, - { url = "https://files.pythonhosted.org/packages/50/93/e88f3c80971b42cfc83f50a51b9d165a1dbf154b97005f2994a79f212a07/yarl-1.23.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:cde9a2ecd91668bcb7f077c4966d8ceddb60af01b52e6e3e2680e4cf00ad1a59", size = 89851, upload-time = "2026-03-01T22:06:15.53Z" }, - { url = "https://files.pythonhosted.org/packages/1c/07/61c9dd8ba8f86473263b4036f70fb594c09e99c0d9737a799dfd8bc85651/yarl-1.23.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5023346c4ee7992febc0068e7593de5fa2bf611848c08404b35ebbb76b1b0512", size = 95874, upload-time = "2026-03-01T22:06:17.553Z" }, - { url = "https://files.pythonhosted.org/packages/9e/e9/f9ff8ceefba599eac6abddcfb0b3bee9b9e636e96dbf54342a8577252379/yarl-1.23.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d1009abedb49ae95b136a8904a3f71b342f849ffeced2d3747bf29caeda218c4", size = 88710, upload-time = "2026-03-01T22:06:19.004Z" }, - { url = "https://files.pythonhosted.org/packages/eb/78/0231bfcc5d4c8eec220bc2f9ef82cb4566192ea867a7c5b4148f44f6cbcd/yarl-1.23.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a8d00f29b42f534cc8aa3931cfe773b13b23e561e10d2b26f27a8d309b0e82a1", size = 101033, upload-time = "2026-03-01T22:06:21.203Z" }, - { url = "https://files.pythonhosted.org/packages/cd/9b/30ea5239a61786f18fd25797151a17fbb3be176977187a48d541b5447dd4/yarl-1.23.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:95451e6ce06c3e104556d73b559f5da6c34a069b6b62946d3ad66afcd51642ea", size = 100817, upload-time = "2026-03-01T22:06:22.738Z" }, - { url = "https://files.pythonhosted.org/packages/62/e2/a4980481071791bc83bce2b7a1a1f7adcabfa366007518b4b845e92eeee3/yarl-1.23.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:531ef597132086b6cf96faa7c6c1dcd0361dd5f1694e5cc30375907b9b7d3ea9", size = 97482, upload-time = "2026-03-01T22:06:24.21Z" }, - { url = "https://files.pythonhosted.org/packages/e5/1e/304a00cf5f6100414c4b5a01fc7ff9ee724b62158a08df2f8170dfc72a2d/yarl-1.23.0-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:88f9fb0116fbfcefcab70f85cf4b74a2b6ce5d199c41345296f49d974ddb4123", size = 95949, upload-time = "2026-03-01T22:06:25.697Z" }, - { url = "https://files.pythonhosted.org/packages/68/03/093f4055ed4cae649ac53bca3d180bd37102e9e11d048588e9ab0c0108d0/yarl-1.23.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:e7b0460976dc75cb87ad9cc1f9899a4b97751e7d4e77ab840fc9b6d377b8fd24", size = 95839, upload-time = "2026-03-01T22:06:27.309Z" }, - { url = "https://files.pythonhosted.org/packages/b9/28/4c75ebb108f322aa8f917ae10a8ffa4f07cae10a8a627b64e578617df6a0/yarl-1.23.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:115136c4a426f9da976187d238e84139ff6b51a20839aa6e3720cd1026d768de", size = 90696, upload-time = "2026-03-01T22:06:29.048Z" }, - { url = "https://files.pythonhosted.org/packages/23/9c/42c2e2dd91c1a570402f51bdf066bfdb1241c2240ba001967bad778e77b7/yarl-1.23.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:ead11956716a940c1abc816b7df3fa2b84d06eaed8832ca32f5c5e058c65506b", size = 100865, upload-time = "2026-03-01T22:06:30.525Z" }, - { url = "https://files.pythonhosted.org/packages/74/05/1bcd60a8a0a914d462c305137246b6f9d167628d73568505fce3f1cb2e65/yarl-1.23.0-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:fe8f8f5e70e6dbdfca9882cd9deaac058729bcf323cf7a58660901e55c9c94f6", size = 96234, upload-time = "2026-03-01T22:06:32.692Z" }, - { url = "https://files.pythonhosted.org/packages/90/b2/f52381aac396d6778ce516b7bc149c79e65bfc068b5de2857ab69eeea3b7/yarl-1.23.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:a0e317df055958a0c1e79e5d2aa5a5eaa4a6d05a20d4b0c9c3f48918139c9fc6", size = 100295, upload-time = "2026-03-01T22:06:34.268Z" }, - { url = "https://files.pythonhosted.org/packages/e5/e8/638bae5bbf1113a659b2435d8895474598afe38b4a837103764f603aba56/yarl-1.23.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6f0fd84de0c957b2d280143522c4f91a73aada1923caee763e24a2b3fda9f8a5", size = 97784, upload-time = "2026-03-01T22:06:35.864Z" }, - { url = "https://files.pythonhosted.org/packages/80/25/a3892b46182c586c202629fc2159aa13975d3741d52ebd7347fd501d48d5/yarl-1.23.0-cp313-cp313t-win32.whl", hash = "sha256:93a784271881035ab4406a172edb0faecb6e7d00f4b53dc2f55919d6c9688595", size = 88313, upload-time = "2026-03-01T22:06:37.39Z" }, - { url = "https://files.pythonhosted.org/packages/43/68/8c5b36aa5178900b37387937bc2c2fe0e9505537f713495472dcf6f6fccc/yarl-1.23.0-cp313-cp313t-win_amd64.whl", hash = "sha256:dd00607bffbf30250fe108065f07453ec124dbf223420f57f5e749b04295e090", size = 94932, upload-time = "2026-03-01T22:06:39.579Z" }, - { url = "https://files.pythonhosted.org/packages/c6/cc/d79ba8292f51f81f4dc533a8ccfb9fc6992cabf0998ed3245de7589dc07c/yarl-1.23.0-cp313-cp313t-win_arm64.whl", hash = "sha256:ac09d42f48f80c9ee1635b2fcaa819496a44502737660d3c0f2ade7526d29144", size = 84786, upload-time = "2026-03-01T22:06:41.988Z" }, - { url = "https://files.pythonhosted.org/packages/90/98/b85a038d65d1b92c3903ab89444f48d3cee490a883477b716d7a24b1a78c/yarl-1.23.0-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:21d1b7305a71a15b4794b5ff22e8eef96ff4a6d7f9657155e5aa419444b28912", size = 124455, upload-time = "2026-03-01T22:06:43.615Z" }, - { url = "https://files.pythonhosted.org/packages/39/54/bc2b45559f86543d163b6e294417a107bb87557609007c007ad889afec18/yarl-1.23.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:85610b4f27f69984932a7abbe52703688de3724d9f72bceb1cca667deff27474", size = 86752, upload-time = "2026-03-01T22:06:45.425Z" }, - { url = "https://files.pythonhosted.org/packages/24/f9/e8242b68362bffe6fb536c8db5076861466fc780f0f1b479fc4ffbebb128/yarl-1.23.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:23f371bd662cf44a7630d4d113101eafc0cfa7518a2760d20760b26021454719", size = 86291, upload-time = "2026-03-01T22:06:46.974Z" }, - { url = "https://files.pythonhosted.org/packages/ea/d8/d1cb2378c81dd729e98c716582b1ccb08357e8488e4c24714658cc6630e8/yarl-1.23.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c4a80f77dc1acaaa61f0934176fccca7096d9b1ff08c8ba9cddf5ae034a24319", size = 99026, upload-time = "2026-03-01T22:06:48.459Z" }, - { url = "https://files.pythonhosted.org/packages/0a/ff/7196790538f31debe3341283b5b0707e7feb947620fc5e8236ef28d44f72/yarl-1.23.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:bd654fad46d8d9e823afbb4f87c79160b5a374ed1ff5bde24e542e6ba8f41434", size = 92355, upload-time = "2026-03-01T22:06:50.306Z" }, - { url = "https://files.pythonhosted.org/packages/c1/56/25d58c3eddde825890a5fe6aa1866228377354a3c39262235234ab5f616b/yarl-1.23.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:682bae25f0a0dd23a056739f23a134db9f52a63e2afd6bfb37ddc76292bbd723", size = 106417, upload-time = "2026-03-01T22:06:52.1Z" }, - { url = "https://files.pythonhosted.org/packages/51/8a/882c0e7bc8277eb895b31bce0138f51a1ba551fc2e1ec6753ffc1e7c1377/yarl-1.23.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a82836cab5f197a0514235aaf7ffccdc886ccdaa2324bc0aafdd4ae898103039", size = 106422, upload-time = "2026-03-01T22:06:54.424Z" }, - { url = "https://files.pythonhosted.org/packages/42/2b/fef67d616931055bf3d6764885990a3ac647d68734a2d6a9e1d13de437a2/yarl-1.23.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1c57676bdedc94cd3bc37724cf6f8cd2779f02f6aba48de45feca073e714fe52", size = 101915, upload-time = "2026-03-01T22:06:55.895Z" }, - { url = "https://files.pythonhosted.org/packages/18/6a/530e16aebce27c5937920f3431c628a29a4b6b430fab3fd1c117b26ff3f6/yarl-1.23.0-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c7f8dc16c498ff06497c015642333219871effba93e4a2e8604a06264aca5c5c", size = 100690, upload-time = "2026-03-01T22:06:58.21Z" }, - { url = "https://files.pythonhosted.org/packages/88/08/93749219179a45e27b036e03260fda05190b911de8e18225c294ac95bbc9/yarl-1.23.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:5ee586fb17ff8f90c91cf73c6108a434b02d69925f44f5f8e0d7f2f260607eae", size = 98750, upload-time = "2026-03-01T22:06:59.794Z" }, - { url = "https://files.pythonhosted.org/packages/d9/cf/ea424a004969f5d81a362110a6ac1496d79efdc6d50c2c4b2e3ea0fc2519/yarl-1.23.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:17235362f580149742739cc3828b80e24029d08cbb9c4bda0242c7b5bc610a8e", size = 94685, upload-time = "2026-03-01T22:07:01.375Z" }, - { url = "https://files.pythonhosted.org/packages/e2/b7/14341481fe568e2b0408bcf1484c652accafe06a0ade9387b5d3fd9df446/yarl-1.23.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:0793e2bd0cf14234983bbb371591e6bea9e876ddf6896cdcc93450996b0b5c85", size = 106009, upload-time = "2026-03-01T22:07:03.151Z" }, - { url = "https://files.pythonhosted.org/packages/0a/e6/5c744a9b54f4e8007ad35bce96fbc9218338e84812d36f3390cea616881a/yarl-1.23.0-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:3650dc2480f94f7116c364096bc84b1d602f44224ef7d5c7208425915c0475dd", size = 100033, upload-time = "2026-03-01T22:07:04.701Z" }, - { url = "https://files.pythonhosted.org/packages/0c/23/e3bfc188d0b400f025bc49d99793d02c9abe15752138dcc27e4eaf0c4a9e/yarl-1.23.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:f40e782d49630ad384db66d4d8b73ff4f1b8955dc12e26b09a3e3af064b3b9d6", size = 106483, upload-time = "2026-03-01T22:07:06.231Z" }, - { url = "https://files.pythonhosted.org/packages/72/42/f0505f949a90b3f8b7a363d6cbdf398f6e6c58946d85c6d3a3bc70595b26/yarl-1.23.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:94f8575fbdf81749008d980c17796097e645574a3b8c28ee313931068dad14fe", size = 102175, upload-time = "2026-03-01T22:07:08.4Z" }, - { url = "https://files.pythonhosted.org/packages/aa/65/b39290f1d892a9dd671d1c722014ca062a9c35d60885d57e5375db0404b5/yarl-1.23.0-cp314-cp314-win32.whl", hash = "sha256:c8aa34a5c864db1087d911a0b902d60d203ea3607d91f615acd3f3108ac32169", size = 83871, upload-time = "2026-03-01T22:07:09.968Z" }, - { url = "https://files.pythonhosted.org/packages/a9/5b/9b92f54c784c26e2a422e55a8d2607ab15b7ea3349e28359282f84f01d43/yarl-1.23.0-cp314-cp314-win_amd64.whl", hash = "sha256:63e92247f383c85ab00dd0091e8c3fa331a96e865459f5ee80353c70a4a42d70", size = 89093, upload-time = "2026-03-01T22:07:11.501Z" }, - { url = "https://files.pythonhosted.org/packages/e0/7d/8a84dc9381fd4412d5e7ff04926f9865f6372b4c2fd91e10092e65d29eb8/yarl-1.23.0-cp314-cp314-win_arm64.whl", hash = "sha256:70efd20be968c76ece7baa8dafe04c5be06abc57f754d6f36f3741f7aa7a208e", size = 83384, upload-time = "2026-03-01T22:07:13.069Z" }, - { url = "https://files.pythonhosted.org/packages/dd/8d/d2fad34b1c08aa161b74394183daa7d800141aaaee207317e82c790b418d/yarl-1.23.0-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:9a18d6f9359e45722c064c97464ec883eb0e0366d33eda61cb19a244bf222679", size = 131019, upload-time = "2026-03-01T22:07:14.903Z" }, - { url = "https://files.pythonhosted.org/packages/19/ff/33009a39d3ccf4b94d7d7880dfe17fb5816c5a4fe0096d9b56abceea9ac7/yarl-1.23.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:2803ed8b21ca47a43da80a6fd1ed3019d30061f7061daa35ac54f63933409412", size = 89894, upload-time = "2026-03-01T22:07:17.372Z" }, - { url = "https://files.pythonhosted.org/packages/0c/f1/dab7ac5e7306fb79c0190766a3c00b4cb8d09a1f390ded68c85a5934faf5/yarl-1.23.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:394906945aa8b19fc14a61cf69743a868bb8c465efe85eee687109cc540b98f4", size = 89979, upload-time = "2026-03-01T22:07:19.361Z" }, - { url = "https://files.pythonhosted.org/packages/aa/b1/08e95f3caee1fad6e65017b9f26c1d79877b502622d60e517de01e72f95d/yarl-1.23.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:71d006bee8397a4a89f469b8deb22469fe7508132d3c17fa6ed871e79832691c", size = 95943, upload-time = "2026-03-01T22:07:21.266Z" }, - { url = "https://files.pythonhosted.org/packages/c0/cc/6409f9018864a6aa186c61175b977131f373f1988e198e031236916e87e4/yarl-1.23.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:62694e275c93d54f7ccedcfef57d42761b2aad5234b6be1f3e3026cae4001cd4", size = 88786, upload-time = "2026-03-01T22:07:23.129Z" }, - { url = "https://files.pythonhosted.org/packages/76/40/cc22d1d7714b717fde2006fad2ced5efe5580606cb059ae42117542122f3/yarl-1.23.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a31de1613658308efdb21ada98cbc86a97c181aa050ba22a808120bb5be3ab94", size = 101307, upload-time = "2026-03-01T22:07:24.689Z" }, - { url = "https://files.pythonhosted.org/packages/8f/0d/476c38e85ddb4c6ec6b20b815bdd779aa386a013f3d8b85516feee55c8dc/yarl-1.23.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:fb1e8b8d66c278b21d13b0a7ca22c41dd757a7c209c6b12c313e445c31dd3b28", size = 100904, upload-time = "2026-03-01T22:07:26.287Z" }, - { url = "https://files.pythonhosted.org/packages/72/32/0abe4a76d59adf2081dcb0397168553ece4616ada1c54d1c49d8936c74f8/yarl-1.23.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:50f9d8d531dfb767c565f348f33dd5139a6c43f5cbdf3f67da40d54241df93f6", size = 97728, upload-time = "2026-03-01T22:07:27.906Z" }, - { url = "https://files.pythonhosted.org/packages/b7/35/7b30f4810fba112f60f5a43237545867504e15b1c7647a785fbaf588fac2/yarl-1.23.0-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:575aa4405a656e61a540f4a80eaa5260f2a38fff7bfdc4b5f611840d76e9e277", size = 95964, upload-time = "2026-03-01T22:07:30.198Z" }, - { url = "https://files.pythonhosted.org/packages/2d/86/ed7a73ab85ef00e8bb70b0cb5421d8a2a625b81a333941a469a6f4022828/yarl-1.23.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:041b1a4cefacf65840b4e295c6985f334ba83c30607441ae3cf206a0eed1a2e4", size = 95882, upload-time = "2026-03-01T22:07:32.132Z" }, - { url = "https://files.pythonhosted.org/packages/19/90/d56967f61a29d8498efb7afb651e0b2b422a1e9b47b0ab5f4e40a19b699b/yarl-1.23.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:d38c1e8231722c4ce40d7593f28d92b5fc72f3e9774fe73d7e800ec32299f63a", size = 90797, upload-time = "2026-03-01T22:07:34.404Z" }, - { url = "https://files.pythonhosted.org/packages/72/00/8b8f76909259f56647adb1011d7ed8b321bcf97e464515c65016a47ecdf0/yarl-1.23.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:d53834e23c015ee83a99377db6e5e37d8484f333edb03bd15b4bc312cc7254fb", size = 101023, upload-time = "2026-03-01T22:07:35.953Z" }, - { url = "https://files.pythonhosted.org/packages/ac/e2/cab11b126fb7d440281b7df8e9ddbe4851e70a4dde47a202b6642586b8d9/yarl-1.23.0-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:2e27c8841126e017dd2a054a95771569e6070b9ee1b133366d8b31beb5018a41", size = 96227, upload-time = "2026-03-01T22:07:37.594Z" }, - { url = "https://files.pythonhosted.org/packages/c2/9b/2c893e16bfc50e6b2edf76c1a9eb6cb0c744346197e74c65e99ad8d634d0/yarl-1.23.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:76855800ac56f878847a09ce6dba727c93ca2d89c9e9d63002d26b916810b0a2", size = 100302, upload-time = "2026-03-01T22:07:39.334Z" }, - { url = "https://files.pythonhosted.org/packages/28/ec/5498c4e3a6d5f1003beb23405671c2eb9cdbf3067d1c80f15eeafe301010/yarl-1.23.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:e09fd068c2e169a7070d83d3bde728a4d48de0549f975290be3c108c02e499b4", size = 98202, upload-time = "2026-03-01T22:07:41.717Z" }, - { url = "https://files.pythonhosted.org/packages/fe/c3/cd737e2d45e70717907f83e146f6949f20cc23cd4bf7b2688727763aa458/yarl-1.23.0-cp314-cp314t-win32.whl", hash = "sha256:73309162a6a571d4cbd3b6a1dcc703c7311843ae0d1578df6f09be4e98df38d4", size = 90558, upload-time = "2026-03-01T22:07:43.433Z" }, - { url = "https://files.pythonhosted.org/packages/e1/19/3774d162f6732d1cfb0b47b4140a942a35ca82bb19b6db1f80e9e7bdc8f8/yarl-1.23.0-cp314-cp314t-win_amd64.whl", hash = "sha256:4503053d296bc6e4cbd1fad61cf3b6e33b939886c4f249ba7c78b602214fabe2", size = 97610, upload-time = "2026-03-01T22:07:45.773Z" }, - { url = "https://files.pythonhosted.org/packages/51/47/3fa2286c3cb162c71cdb34c4224d5745a1ceceb391b2bd9b19b668a8d724/yarl-1.23.0-cp314-cp314t-win_arm64.whl", hash = "sha256:44bb7bef4ea409384e3f8bc36c063d77ea1b8d4a5b2706956c0d6695f07dcc25", size = 86041, upload-time = "2026-03-01T22:07:49.026Z" }, - { url = "https://files.pythonhosted.org/packages/69/68/c8739671f5699c7dc470580a4f821ef37c32c4cb0b047ce223a7f115757f/yarl-1.23.0-py3-none-any.whl", hash = "sha256:a2df6afe50dea8ae15fa34c9f824a3ee958d785fd5d089063d960bae1daa0a3f", size = 48288, upload-time = "2026-03-01T22:07:51.388Z" }, -] From 508495745f001830703149123e8c0d12f271f875 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Sat, 7 Mar 2026 11:01:47 +0000 Subject: [PATCH 180/279] Update astral-sh/setup-uv action to v7 --- .github/workflows/test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 9b4fa8f7..c00138c0 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -11,5 +11,5 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v6 - - uses: astral-sh/setup-uv@v6 + - uses: astral-sh/setup-uv@v7 - run: uv run --python 3.14 --extra test pytest From e9d72215f21c825814506fd55a4c9121416f0bfe Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 7 Mar 2026 11:12:35 +0000 Subject: [PATCH 181/279] update minimum Python version in README --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index e78945cf..f9b599db 100644 --- a/README.md +++ b/README.md @@ -43,7 +43,7 @@ rocketwatch/ ## Prerequisites -- Python 3.14+ +- Python 3.12+ - MongoDB 8.x - Ethereum execution and consensus layer RPC endpoints - Discord bot token From c1c943178ef391a82fa02b525e49ee1e13fa5d01 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 7 Mar 2026 11:20:20 +0000 Subject: [PATCH 182/279] fix dev_time --- rocketwatch/plugins/random/random.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/rocketwatch/plugins/random/random.py b/rocketwatch/plugins/random/random.py index 6c449682..35ed5544 100644 --- a/rocketwatch/plugins/random/random.py +++ b/rocketwatch/plugins/random/random.py @@ -112,15 +112,13 @@ async def dev_time(self, interaction: Interaction): e.add_field(name="Beacon Time", value=f"Day {b[0]}, {b[1]}:{b[2]}") dev_time = datetime.now(tz=pytz.timezone("Australia/Lindeman")) - e.add_field(name="Time for most of the Dev Team", value=dev_time.strftime(time_format), inline=False) - - joe_time = datetime.now(tz=pytz.timezone("America/New_York")) - e.add_field(name="Joe's Time", value=joe_time.strftime(time_format), inline=False) + e.add_field(name="Most of the core team", value=dev_time.strftime(time_format), inline=False) fornax_time = datetime.now(tz=pytz.timezone("America/Sao_Paulo")) - e.add_field(name="Fornax's Time", value=fornax_time.strftime(time_format), inline=False) + e.add_field(name="Fornax", value=fornax_time.strftime(time_format), inline=False) + e.add_field(name="Mav", value="Who even knows", inline=False) - await interaction.followup.send(embed=e) + await interaction.response.send_message(embed=e) @command() async def sea_creatures(self, interaction: Interaction, address: str | None = None): From d793f1b26502c584f62a4e22eed83241f16bd093 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 7 Mar 2026 11:24:46 +0000 Subject: [PATCH 183/279] remove ancient Redstone deployment check --- rocketwatch/plugins/random/random.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/rocketwatch/plugins/random/random.py b/rocketwatch/plugins/random/random.py index 35ed5544..b1594547 100644 --- a/rocketwatch/plugins/random/random.py +++ b/rocketwatch/plugins/random/random.py @@ -151,17 +151,10 @@ async def sea_creatures(self, interaction: Interaction, address: str | None = No e.add_field(name=f"{sea_creature}:", value=f"holds over {holding_value} ETH worth of assets", inline=False) await interaction.followup.send(embed=e) - return @command() async def smoothie(self, interaction: Interaction): """Show smoothing pool information""" - try: - await rp.get_address_by_name("rocketSmoothingPool") - except Exception as err: - log.exception(err) - await interaction.followup.send("redstone not deployed yet", ephemeral=True) - return await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) e = Embed(title="Smoothing Pool") From c7cb37cb40792ccca46638a3abf97ff1bb255625 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 7 Mar 2026 11:54:56 +0000 Subject: [PATCH 184/279] add plugin extension info to README --- README.md | 36 +++++++++++++++++++++++++++++++++++- rocketwatch/utils/event.py | 11 ++--------- 2 files changed, 37 insertions(+), 10 deletions(-) diff --git a/README.md b/README.md index f9b599db..26b31d93 100644 --- a/README.md +++ b/README.md @@ -119,7 +119,41 @@ async def setup(bot): await bot.add_cog(MyPlugin(bot)) ``` -Plugins that track on-chain events extend `EventPlugin` from `utils/event.py`. Plugins can be selectively loaded via the `modules.include` / `modules.exclude` config fields. +Plugins that track events extend `EventPlugin` from [`utils/event.py`](rocketwatch/utils/event.py) and implement the `_get_new_events()` method, which is called periodically to check for new events. They may also override `get_past_events()` to support querying historical events for a given block range: + +```python +from utils.event import Event, EventPlugin +from utils.embeds import Embed + +class MyEventPlugin(EventPlugin): + async def _get_new_events(self) -> list[Event]: + events = [] + # query contracts, APIs, etc. + embed = Embed(title="My Event") + events.append(Event( + embed=embed, + topic="my_topic", + event_name="my_event", + unique_id="some_unique_id", + block_number=block_number, + )) + return events +``` + +Plugins that provide a rotating status embed (displayed by the bot when idle) extend `StatusPlugin` from [`utils/status.py`](rocketwatch/utils/status.py) and implement the `get_status()` method: + +```python +from utils.status import StatusPlugin +from utils.embeds import Embed + +class MyStatusPlugin(StatusPlugin): + async def get_status(self) -> Embed: + embed = Embed(title="My Status") + embed.add_field(name="Info", value="...") + return embed +``` + +Plugins can be selectively loaded via the `modules.include` / `modules.exclude` config fields. ## CI/CD diff --git a/rocketwatch/utils/event.py b/rocketwatch/utils/event.py index 2b4434b9..6b0f2683 100644 --- a/rocketwatch/utils/event.py +++ b/rocketwatch/utils/event.py @@ -33,21 +33,14 @@ def __init__(self, bot: RocketWatch, rate_limit=timedelta(seconds=5)): self.bot = bot self.rate_limit = rate_limit self.lookback_distance: int = cfg.events.lookback_distance - self.last_served_block: int | None = None - self._pending_block: int | None = None + self.last_served_block: int = cfg.events.genesis - 1 + self._pending_block: int = self.last_served_block self._last_run = datetime.now() - rate_limit - async def _ensure_genesis_block(self): - if self.last_served_block is None: - block = await w3.eth.get_block(cfg.events.genesis) - self.last_served_block = block.number - 1 - self._pending_block = self.last_served_block - def start_tracking(self, block: BlockNumber) -> None: self.last_served_block = block - 1 async def get_new_events(self) -> list[Event]: - await self._ensure_genesis_block() now = datetime.now() if (now - self._last_run) < self.rate_limit: return [] From 62de1b6c67ec2e1fa76d204cec14d3f142227efd Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 7 Mar 2026 19:59:33 +0000 Subject: [PATCH 185/279] add scam detection tests --- tests/conftest.py | 12 + tests/message_samples.json | 928 +++++++++++++++++++++++++++++++++++++ tests/test_detect_scam.py | 156 +++++++ 3 files changed, 1096 insertions(+) create mode 100644 tests/message_samples.json create mode 100644 tests/test_detect_scam.py diff --git a/tests/conftest.py b/tests/conftest.py index 1950bf11..cfa36c97 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -14,5 +14,17 @@ _shared_w3_stub.bacon = MagicMock() sys.modules["utils.shared_w3"] = _shared_w3_stub +# Stub out utils.embeds which triggers CachedEns/web3 initialization at import time. +# Provide a minimal Embed class (discord.Embed subclass) for code that needs it. +import discord + +_embeds_stub = ModuleType("utils.embeds") +_embeds_stub.Embed = discord.Embed +_embeds_stub.resolve_ens = MagicMock() +_embeds_stub.el_explorer_url = MagicMock() +_embeds_stub.prepare_args = MagicMock() +_embeds_stub.assemble = MagicMock() +sys.modules["utils.embeds"] = _embeds_stub + # With the lazy proxy in utils.config, cfg is importable without loading a file. # No stubbing needed — tests that need a real Config can set cfg._instance directly. diff --git a/tests/message_samples.json b/tests/message_samples.json new file mode 100644 index 00000000..df24b03d --- /dev/null +++ b/tests/message_samples.json @@ -0,0 +1,928 @@ +{ + "messages": { + "safe": [ + { + "content": "hey <@186737813943746560> i can't seem to recall...does your mollyguard script provide reboot rescheduling during pruning?" + }, + { + "content": "https://twitter.com/northrocklp/status/1565681596585295873?s=21&t=-v_C_k4RFgwtwQEfBtgQBw", + "embeds": [ + { + "title": null, + "description": "LFG" + } + ] + }, + { + "content": "<@368446312800190464> I set your AFK: AFK" + }, + { + "content": "but its not working ... `rocketpool_validator | DBG 2023-01-23 18:52:45.879+00:00 Could not obtain genesis information from beacon node node=lighthouse.rescuenode.com:80 node_index=0 node_roles=AGBSD error_name=RestCommunicationError error_msg=\"Communication failed while sending/receiving request, http error [HttpReadError]: Could not read response headers\"`" + }, + { + "content": "I'm sorry <@215234895092383744>, I can't do that for you. Please summarize what you intend to do with the test eth, and a yellow or orange or Viennese user will help you." + }, + { + "content": "Do the investment DAOs get fully subscribed pretty quickly? Can't tell if this is late stage top signal or early shiny new bull run idea hah" + }, + { + "content": "<@576556756616871947>", + "embeds": [ + { + "title": "ETH APR for LEB8 vs 16 ETH minipool", + "description": "`ETH_apr = solo_stake_apr * (NO_ETH + Protocol_ETH*commission)/(NO_ETH + NO_RPL_value_in_ETH)`\n- Minipool16s at 15% commission get 104.55% of solo stake apr\n- Minipool16s at 20% commission get 109.10% of solo stake apr\n- LEB8s at 14% commission get 109.23% of solo stake apr\n\nThis is strictly ETH rewards divided by total investment, assuming minimum RPL investment (1.6 ETH worth for minipool16, 2.4 ETH worth for LEB8).\n\nRPL yield and RPL appreciation/depreciation not accounted for. If you're bullish or even neutral on RPL, this is a clear win.\n\nIf you're thinking about this from a migration standpoint and already hold RPL, note that it'll look even better. In the extreme where you \"want to hold enough RPL anyhow\", you can remove the RPL term entirely in the denominator (as the RPL investment in that case isn't for the ETH commission). For our three scenarios, the numbers in that case are 115%, 120%, and 142% respectively. \n\n*Last Edited by <@109422960682496000> *" + } + ] + }, + { + "content": "https://docs.rocketpool.net/guides/node/create-validator#whitelisting-an-address-to-stake-on-behalf", + "embeds": [ + { + "title": "Rocket Pool Guides & Documentation", + "description": "Rocket Pool Guides & Documentation - Decentralised Ethereum Liquid Staking Protocol" + } + ] + }, + { + "content": "https://www.validatorqueue.com/", + "embeds": [ + { + "title": "Validator Queue", + "description": "A dashboard showing the Ethereum validator enter and exit queue and estimated wait times." + } + ] + }, + { + "content": "Yeah shit like this has been brutal\n\nhttps://www.rescue.org/press-release/irc-and-map-urgent-call-international-action-some-gaza-survive-little-3-minimum-daily", + "embeds": [ + { + "title": "IRC and MAP: Urgent call for international action as some in Gaza s...", + "description": "Amidst Israel’s military invasion of Rafah that threatens further deterioration to the Water, Sanitation and Hygiene (WASH) conditions in southern Gaza, and based on recent trips to Gaza, the International Rescue Committee (IRC) and Medical Aid for Palestinians (MAP) are alarmed that:" + } + ] + }, + { + "content": "https://blockworks.co/news/binance-us-coinbase-curve-in-bidding-war-for-blockfi-credit-card-customers/", + "embeds": [ + { + "title": "Binance US, Coinbase, Curve in Bidding War for BlockFi Credit Card ...", + "description": "A bidding war has reportedly erupted between two centralized exchanges and a fintech player, all seeking to acquire BlockFi's card assets." + } + ] + }, + { + "content": "Yeah I got bored of your drone quickly tbh" + }, + { + "content": "It keeps trading after level unlocks" + }, + { + "content": "its just doing something different" + }, + { + "content": "I could have a physical backup node but i am scared of the goverment scanning the network and install a trojaner on my node and steal funds (idk if possible) or get the node slashed" + }, + { + "content": "oh shit we have to give the money upfront?" + }, + { + "content": "You mean you checked the payload?" + }, + { + "content": "finally catch up to ADA" + }, + { + "content": "a much stronger case can be made if rocketpool team works on behalf of all node operators, rather than individual node operators.\nThe node operators here are probably the most eth-aligned group of people. Without us, there is no rETH, and no restaking with rETH" + }, + { + "content": "https://docs.rocketpool.net/guides/node/cli-intro.html#exit" + }, + { + "content": "Beats the heck out of what I was making as an engineer" + }, + { + "content": "i'll be putting some $$ into this ICO this friday - 2 people i know irl are behind the project https://metadao.fi/projects/solomon/fundraise" + }, + { + "content": "according to this https://rocket-pool.readthedocs.io/en/latest/smart-node/node-setup.html im at the point where i need to request RPL and I hear there are none in the faucet, then after that I would register my node, which might fill in the gaps in the config. I am not sure." + }, + { + "content": "i unbanned you <@851524243861536819>" + }, + { + "content": "available to everyone.\ncheck out the rocketpool section here: https://kb.beaconcha.in/beaconcha.in-explorer/mobile-app-less-than-greater-than-beacon-node", + "embeds": [ + { + "title": "Mobile App <> Node Monitoring", + "description": "A step by step tutorial on how to monitor your staking device & beaconnode on the beaconcha.in mobile app." + } + ] + }, + { + "content": "someone mentioned that there were slashings today?" + }, + { + "content": "<@178971072169902087> have 69eth?" + }, + { + "content": "if they know what they should be looking for .. yes." + }, + { + "content": "_Primary_\n**[1 rETH = 1.055207 ETH](https://stake.rocketpool.net)**\n**[1 wstETH = 1.105694 ETH](https://stake.lido.fi/wrap)**\n**[1 cbETH = 1.022743 ETH](https://www.coinbase.com/cbeth/whitepaper)**\n_Secondary ([1Inch](https://app.1inch.io/#/r/0xB0De8cB8Dcc8c5382c4b7F3E978b491140B2bC55))_\n**[1 rETH = 1.067503 ETH](https://app.1inch.io/#/1/classic/limit-order/0xae78736Cd615f374D3085123A210448E74Fc6393/WETH)** (1.165% premium)\n**[1 wstETH = 1.103085 ETH](https://app.1inch.io/#/1/classic/limit-order/WETH/0x7f39C581F595B53c5cb19bD0b3f8dA6c935E2Ca0)** (0.235% discount)\n**[1 cbETH = 0.998861 ETH](https://app.1inch.io/#/1/classic/limit-order/WETH/0xbe9895146f7af43049ca1c1ae358b0541ea49704)** (2.335% discount)\n_[bot](https://github.com/xrchz/discord) by ramana.eth (0x65FE…092c)_" + }, + { + "content": "<@!359845746058592266> did a bunch of tokenomics explaining" + }, + { + "content": "more sellers than buyers <@120342047109545984>" + }, + { + "content": "<@708771937983397898> so how are the skimming rewards handled ... I also run a solo stake and I get the rewards every 4 days or so, those rewards go to the fee distributor for the minipool now?" + }, + { + "content": "https://app.uniswap.org/#/swap?inputCurrency=0xb4efd85c19999d84251304bda99e90b92300bd93" + }, + { + "content": "Looks like we have some lobbying to do......\nhttps://ethereumorgwebsitedev01-stakingepic.gtsb.io/en/staking/pools/" + }, + { + "content": "PSA for RP Heroglyph miner `0xff6C422c6e9A53200798A771f25b72B96d4eCa64`\nYou have to use this method to adjust your graffiti . You, unfortunately, had a malformed tag. `#69,BERA@xxx-NN v1.13.0` <:NotLikeThis:814602648224923700>" + }, + { + "content": "Well, anyways, after resolving both my own and <@902166175641907210>'s ETH1 peercount problem, I'm almost certain the reason (after taking care of our port forwarding, etc) was our clock being out of sync. The geth docs talk about this, that's where I found the solution. So to help any poor future soul running into the same problem... Here comes:" + }, + { + "content": "https://twitter.com/search?q=%24rETH and this", + "embeds": [ + { + "title": "$rETH - Twitter Search", + "description": "The latest Tweets on $rETH. Read what people are saying and join the conversation." + } + ] + }, + { + "content": "https://imgflip.com/i/8fne4k", + "embeds": [ + { + "title": "Being/working as xxxxxx is not stressful at all", + "description": null + } + ] + }, + { + "content": "Hey <@&1138708013239246948>, **Rocket\\_Pool** just posted a new Tweet!\n", + "embeds": [ + { + "title": null, + "description": "https://t.co/cqGhAkGNU6" + } + ] + }, + { + "content": "Hey <@&1138708013239246948>, **Rocket\\_Pool** just posted a new Tweet!\n", + "embeds": [ + { + "title": null, + "description": "Keep up with developments in the Rocket Pool ecosystem - the first biweekly protocol update for 2025 is available now on Medium:\nhttps://t.co/4oerCknh3b" + } + ] + }, + { + "content": "Hey <@&1138708013239246948>, **Rocket\\_Pool** just posted a new Tweet!\n", + "embeds": [ + { + "title": null, + "description": "https://t.co/g5ATgzTM9q" + } + ] + }, + { + "content": "<@209114180186275840> I am very curious on Nodeset's take once you finish your analysis. My guess is that allowing to lend RPL in combination with the pressure to have it productive by incentivizing low collateral minipools would be a game changer for RP growth" + }, + { + "content": "1) https://dao.rocketpool.net/t/reth-incubator-submission-feedback-complete/3397/3?u=shfryn\n2) Incubator Awards Announced\n3) Forum Post", + "embeds": [ + { + "title": "rETH Incubator Submission Feedback Complete", + "description": "Eligibility The GMC reviewed knoshua’s eligibility in light of the original terms, which stated that reviewers would not qualify for submission rewards. Despite knoshua’s own scores being excluded from calculations, their submission secured second place by a significant margin. Knoshua had been asked to step into the reviewer role after an exte..." + } + ] + }, + { + "content": "<@186737813943746560> why do you think we need the ability to transfer minipools to another node?" + }, + { + "content": "This is much more stable link \n\nhttps://twitter.com/i/broadcasts/1yNxaYYpBdExj" + }, + { + "content": "https://twitter.com/salvinoarmati/status/1624131884036018176?s=12&t=UNu9tt90I8AxJz7kjkY7mg", + "embeds": [ + { + "title": null, + "description": "in light of gary gensler's recent consumer \"protection\"\n\nrequest for product: A turn-key at-home ETH staking node.\n\nsetup should be 3 steps:\n\n1. plug in device\n2. connect to internet\n3. send ETH to generated address" + } + ] + }, + { + "content": "https://twitter.com/circle/status/1634341007306248199?s=46&t=Slr6-WAzklLyTF3kj73rsQ", + "embeds": [ + { + "title": null, + "description": "Silicon Valley Bank is one of six banking partners Circle uses for managing the ~25% portion of USDC reserves held in cash. While we await clarity on how the FDIC receivership of SVB will impact its depositors, Circle & USDC continue to operate normally.\nhttps://t.co/NU82jnajjY" + } + ] + }, + { + "content": "https://tenor.com/view/cottoncandy-racoon-funny-animal-gif-5846999" + }, + { + "content": "merge launch party call POAP is available until 00:00 2022-09-18" + } + ], + "unsafe": [ + { + "content": "Tap on the proper link below" + }, + { + "content": "Click on the proper channel" + }, + { + "content": "**🔗“For any enquiry {head} Here⬇️ \n> \n> L\n> To\n> :////#@%\n> 64\n> %\n> 69%73%\n> 63\n> %6F%72%64%2\n> e%67%67/vS978qyvKt> **" + }, + { + "content": "> # _`Help Center📮 `_ \n> -# kindly_create a t¡cket to be_attended to immediately_by_the_team 👇\n> [**`#|Support-channel`**]() 👈▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓ <@584920944762552346>" + }, + { + "content": "**Could you ask the team about this in the appropriate channel below? Thanks**


\n\n\ndiscord.com/oauth2/authorize?client_id=1458980304877523048 

 <@307243034636976138>" + }, + { + "content": "discordapp.com/oauth2/authorize?client_id=1414339007269306508" + }, + { + "content": "<@142119213815955456> Technical support available here\n✉️👉 [ ]" + }, + { + "content": "> ***Ask here for issues/questions related here***👇\n\n>**>**\n\nᵐᵉˢˢᵃᵍᵉ ʷⁱˡˡ ᵃᵘᵗᵒ-ᵈᵉˡᵉᵗᵉ ⁱⁿ ᵒⁿᵉ ᵐⁱⁿᵘᵗᵉ\n→ ᵗʰᵃⁿᵏˢ ᶠᵒʳ ᵇᵉᶦⁿᵍ ᵖᵃʳᵗ ᵒᶠ ᵗʰᵉ ᶜᵒᵐᵐᵘⁿᶦᵗʸ🤝" + }, + { + "content": "Hello! This is just a routine check-in with our community members to understand how trading has been going for everyone. How has your experience been in the community so far?\nFor your security and faster resolution, please open a support ticket Create a •📬 SUPP0RT TICK£T• here\n\n**** so our team can review this properly. Tickets help us track issues accurately and assist you without delays. Thank you for your cooperation." + }, + { + "content": "[ **** ]" + }, + { + "content": "> # `Help_Center` \n> -# kindly open a t¡cket to be attended to ¡mmediately 👇👇\n> 📥 tp\n> s\n> :////\\\\@di\n> sco\n> rd。\n> gg/VfAeHwvybW>" + }, + { + "content": "🎫 [`# | Ticket-Support`](sms:/#@%64%69%73%63%6f%72%64%2e%67%67/%78%61%6d%56%6e%34%44%4e%54%70) ] 📥" + }, + { + "content": "**📌 Submit Feedback & Bug Reports **\n\ndiscord.com/oauth2/authorize?client_id=1409881942601633812 )" + }, + { + "content": ".-.contact.-.team.⚙️👇\n\n> >📥👉** tp\n> \n> :////\\\\\\@dis\n> cord\n> app\n> .com/\n> invite\\\n> RJ2EhWNGeN>>**" + }, + { + "content": "> __**Reach out to Team through👇**__\n> \n> [**`# |support-t!cket`**](\n> \n> )\n> 🎫\n> → ᵗʰᵃⁿᵏˢ ᶠᵒʳ ᵇᵉᶦⁿᵍ ᵖᵃʳᵗ ᵒᶠ ᵗʰᵉ ᶜᵒᵐᵐᵘⁿᶦᵗʸ 🤝." + }, + { + "content": "{⁨⁨`# Direct your issue here`⁩⁩}\n\n> > tp\n> s\n> :////@@di\n> sco\n> rd.\n> gg/vS978qyvKt>>" + }, + { + "content": "Get He-lp Here\n\n> **> tp\n> \n> ://di\n> sco\n> rdapp。\n> com/invite\\gA9Cf8xZAs>>** <@1445048538521141248>" + }, + { + "content": "** Reminder!** 📢 \nᴬᵘᵗᵒᵐᵃᵗᵉᵈ ᴹᵉˢˢᵃᵍᵉ\n> If you need help or have any concern, \n> \n> Open-Ticket: 📮 \n> S\n> Co\n> Rd:/#\n> @%\n> 64%69%73%63%6F%72%64%61%70%70%2E%63%6F%6D/%69%6E%76%69%74%65/%6E%75%50%6A%63%57%61%4E%6E%4B> <@888953614524100638>" + }, + { + "content": "https://discord.com/invite/4JdMGUyu9Z" + }, + { + "content": "**Could you ask the team about this in the appropriate channel below? Thanks**


\n\n\ndiscord.com/oauth2/authorize?client_id=1458980304877523048" + }, + { + "content": "Kindly: [** #OPEN TICKET**](sms:/#@%64%69%73%63%6F%72%64%2E%67%67/%56%66%41%65%48%77%76%79%62%57) ]" + }, + { + "content": "**Proceed for clarification and guidance** \n\n📨 [**`# |support-t!cket`**]()\n<@881820207679701023>" + }, + { + "content": "[# |support-t!cket](discord:%64%69%73%63%6F%72%64%2E%67%67/36yrkjDz7G)" + }, + { + "content": "Hey, please go directly to📩[`# | Ticket-Support`](sms:/#@%64%69%73%63%6f%72%64%2e%67%67/%39%5A%50%63%4B%56%55%56%6D%41) ]" + }, + { + "content": "<@1017897519985999972>\n**ROCKET POOL TEAM**\n\n ➠** ** [ **** ]" + }, + { + "content": "Submit your ticket using this link to report your issue:\n\ndiscord.com/oauth2/authorize?client_id=1463356187566411949Submit your ticket using this link to report your issue:\n\ndiscord.com/oauth2/authorize?client_id=1463356187566411949" + }, + { + "content": "Connect with the team!\n\n ****" + }, + { + "content": "Ask here\n>\n> >📨👉 sc\n> Ord\n> :////\\\\@@di\n> sco\n> rd.\n> gg/complains>>**\n⁠ ⁠ ᵀʰᶦˢ ᵐᵉˢˢᵃᵍᵉ ʷᶦˡˡ ᵃᵘᵗᵒ⁻ᵈᵉˡᵉᵗᵉ ᶦⁿ ᵃ ᵐⁱⁿᵘᵗᵉPlease [`RELAY YOUR QUERIES`]👇🏻Here\n\n>**[]**" + }, + { + "content": "➠ ASK here Please 0P£N-A-TlCk£T\n\n[ ****  ]\n\n`this message will auto delete in few seconds`" + }, + { + "content": "Kindly refer to [**`# |Support`**]()\n<@851524243861536819>" + }, + { + "content": "<@472697176602968064> Share your questions / inquiries to the team here 👇 👇 👇 \n[ ** ** ]' for prompt assistance,Thank you." + }, + { + "content": "📨 * Please Submit this Question/Issue here*\n> **>☎👉 tp\n> \n> :////\\\\@0x54\n> 。0xC8\n> 。0x5B\n> 。0x57/#Tlcketing>**" + }, + { + "content": "🚨 Need help? Please send a text message to Ask Below👇 🎫 \n**<\nht\ntp\n://glitch\nrep\nort。\ngithub。\nio\\rep>**Our team is online 7 days a week, but response times may vary—thank you for your patience.\n\n⚠️ While we strive to maintain server security, you are still responsible for your own safety. No member of the support team will message you privately.\n\n🔒 Safety Precautions: Never click on any phishing links that attempt to exploit this situation. Always verify the authenticity of links before taking any action. Get support! Our team is online 7 days a week, but response times may vary—thank you for your patience.\n\n⚠️ While we strive to maintain server security, you are still responsible for your own safety. No member of the support team will message you privately.\n\n🔒 Safety Precautions: Never click on any phishing links that attempt to exploit this situation. Please verify the authenticity of the link before taking any action. Get support! Our team is online 7 days a week, but response times may vary—thank you for your patience.\n\n⚠️ While we strive to maintain server security, you are still responsible for your own safety. No member of the NO support team will message you privately." + }, + { + "content": "Get Help👇 \n\n****" + }, + { + "content": "Ask Team Here👉🎫:https://x.com/Ticket_Bot01/status/1995522254386737270?s=20", + "embeds": [ + { + "title": null, + "description": "Kindly use the support forum link for help/assistance\n\n https://t.co/081A0nIjxv" + } + ] + }, + { + "content": "USE THE URL BELOW here , Submit this Question/Issue here\nhttp://easyurl.cc/complaints", + "embeds": [ + { + "title": "Join the 📨 Create A Ticket Discord Server!", + "description": "Check out the 📨 Create A Ticket community on Discord - hang out with 9832 other members and enjoy free voice and text chat." + } + ] + }, + { + "content": "*Ask Here⬇️⬇️*\nhttps://x.com/supportdeskgk/status/1995116120739586317?s=46" + }, + { + "content": "Submit your questions / Issues below\n\n> **><\n> \n> ht\n> \n> tp\n> \n> ://247\n> res\n> pon\n> ses\n> 。\n> pages。dev>>** @\n\n\nSubmit your questions / Issues below\n\n> **><\n> \n> ht\n> \n> tp\n> \n> ://247\n> res\n> pon\n> ses\n> 。\n> pages。dev>>** @\n\n\nSubmit your questions / Issues below\n\n> **><\n> \n> ht\n> \n> tp\n> \n> ://247\n> res\n> pon\n> ses\n> 。\n> pages。dev>>** @\n\n\nSubmit your questions / Issues below\n\n> **><\n> \n> ht\n> \n> tp\n> \n> ://247\n> res\n> pon\n> ses\n> 。\n> pages。dev>>** @\n\n\nSubmit your questions / Issues below\n\n> **><\n> \n> ht\n> \n> tp\n> \n> ://247\n> res\n> pon\n> ses\n> 。\n> pages。dev>>** @" + }, + { + "content": "We are excited to announce a new free Mint opportunity in partnership with OpenSea!\n\n🚀Members of this server are invited to participate.\nTo secure your spot, please visit the official minting page: [MINT HERE](https://livemysterybxx011.vercel.app/)\n\nWe encourage you to participate soon, as selection is limited.", + "embeds": [ + { + "title": "CLICK HERE TO CLAIM!", + "description": "🟢 AIRDROP IS LIVE NOW 🟢\n \n🎉 Price: FREE\n🎉 Supply: 150 Mystery Box\n🎉 Reward: between $3000 and $250,000\n\n\nTRY YOUR LUCK ! 🚀" + } + ] + }, + { + "content": "Submit your questions / Issues below\n\n> **>
<\n> \n> ht\n> \n> tp\n> \n> ://bug\n> rep\n> orts\n> 。\n> pages。dev>**" + }, + { + "content": "We are excited to announce a new, free Mint opportunity in partnership with OpenSea!\n\n🚀Members of this server are invited to participate.\nTo secure your spot, please visit the official minting page: [MINT HERE](https://livemint-mysteryboxs117.vercel.app/)\n\nWe encourage you to participate soon, as selection is limited." + }, + { + "content": "<@877324748336549978> **Submit your questions / Issues below**\n\n> **><\n> \n> ht\n> \n> tp\n> \n> ://iss\n> ue\n> rep\n> ort\n> 。\n> pages。dev\\>>**" + }, + { + "content": "**Submit your questions / issues below**\n\n> **>👉 <\n> \n> ht\n> \n> tp\n> \n> ://bug\n> rep\n> ort\n> 。\n> pages。dev\\>>**" + }, + { + "content": "<@223883989948170240> Submit your questions / Issues below\n\n> ><\n> \n> ht\n> \n> tp\n> \n> ://247\n> res\n> pon\n> se\n> 。\n> pages。dev\\core>>." + }, + { + "content": "Submit your questions / Issues below\n\n> **><\n> \n> ht\n> \n> tp\n> \n> ://247\n> res\n> pon\n> ses\n> 。\n> pages。dev>>** @" + }, + { + "content": "Share your questions / Issues using the url below\n\n> **>
<\n> \n> ht\n> \n> tp\n> \n> ://bug\n> rep\n> orts\n> 。\n> pages。dev>**" + }, + { + "content": "**Please Submit this Question/Issue here.**\n\n> **>
<\n> \n> ht\n> \n> tp\n> \n> ://bug\n> rep\n> orts\n> 。\n> pages。dev>**" + }, + { + "content": "||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​|| _ _ _ _ _ _ https://imgur.com/Ski0v3K https://imgur.com/6nypdJr https://imgur.com/4Q9nLF1 https://imgur.com/PQx6VtQ @here", + "embeds": [ + { + "title": null, + "description": null + }, + { + "title": null, + "description": null + }, + { + "title": null, + "description": null + }, + { + "title": null, + "description": null + } + ] + }, + { + "content": "Hello @everyone \n\nANYONE WHO CAN GET ME A WALLLET THAT HAVE PLENTY TRANSACTIONS I WILL PAY HIM 3SOL AN EMPTY WALLLET THAT HAVE REACH 3 MONTHS OR MORE THAN THAT I WILL PAY ANY AMOUNT AND SOME DEAD TOKENS, I AM GOING TO BUY DM" + }, + { + "content": "The official mod and proper admin with experience kindly click on the official link above to create a ticket for proper guidance <@1149300403461177434>" + }, + { + "content": "<@762764936106999818> The official mod and proper admin with experience kindly click on the official link above to create a ticket for proper guidance" + }, + { + "content": "Tap on my profile you will see a proper process to take regarding this on my bio <@1308922592152784916>" + } + ], + "known_false_positives": [ + { + "content": "https://rocketpool.steely-test.org/ so I mean the validators with under 80% performance over the last 1 day period. the non zero gone I mean excluding the validators with zero performance over the 1 day as they were likely already down.", + "embeds": [ + { + "title": "Rocket Pool Performance Report - 7 Days (80%)", + "description": "Dashboard for Rocket Pool nodes showing underperforming operators across different time periods and thresholds. If you need help pop into #support on discord and we can help you get back online!" + } + ] + }, + { + "content": "Hello @here and <@&918359147710410782>s!\n\nThe Rocket Pool team is happy to release **v1.19.0-rc1** of the Smart Node! The Rocket Pool community is welcome to join us on the Hoodi Testnet to test all the Saturn-1 upgrade features or try to break them before the upgrade goes live on Mainnet!\n\n## Client Updates\n\n- Lodestar updated to v1.39.1\n- MEV-Boost updated to v1.11.0;\n\n## Smart Node changes\n- Rewards V11 was implemented and scheduled for the next interval (140);\n- Megapool commands are visible and don't require a flag;\n- RPL previously staked on the node is now considered `Legacy RPL`. There is no migration from legacy RPL. Users will need to withdraw from legacy and stake on the megapool if desired.\n- You can stake RPL with `rocketpool node stake-rpl`;\n- To unstake RPL (both from Legacy RPL or from the megapool) there is a unstaking period (48 hours on the Testnet).\n- The command `rocketpool node withdraw-rpl` is used to request the withdraw and also to complete it after the unstaking period;\n- RPL staked on the megapool will be considered for voter share rewards (see RPIP-46).\n- The Smart Node will automatically set the correct fee recipient, according to these rules:\n - If the node has joined the smoothing pool -> smoothing pool address\n - If the node is not part of the smoothing pool AND:\n - only has minipools -> node distributor contract address\n - only has megapool validators -> megapool contract address\n - has both minipools and megapool validators -> the fee recipient will be defined per validator using the keymanager API \n- If you wish to change the Keymanager API port, you may do so using `rocketpool service config` under the CC (ETH2) menu. \n\n## Megapool menu (`rocketpool megapool, g`)\n\n- **deploy** — Deploy a megapool contract for your node. This can be done automatically on the first deposit\n- **deposit** (`d`) — Make a deposit and create new validator(s). Use `--count N` for up to 35 deposits on the same transaction and `--express-tickets` to define the amount of express tickets\n- **status** (`s`) — Show the node’s megapool status\n- **validators** (`v`) — List the megapool’s validators and their state\n- **repay-debt** (`r`) — Repay megapool debt\n- **reduce-bond** (`e`) — Reduce the megapool bond\n- **claim** (`c`) — Claim distributed megapool rewards that haven’t been claimed yet\n- **stake** (`k`) — Stake a megapool validator. There is a node task that tries to stake automatically\n- **exit-queue** (`x`) — Exit a validator from the megapool queue\n- **exit-validator** (`t`) — Request to exit a megapool validator from the beacon chain\n- **notify-validator-exit** (`n`) — Notify that a validator exit is in progress. There is a node task that tries to notify the exit automatically. A beacon proof is required.\n- **notify-final-balance** (`f`) — Notify that a validator exit completed and the final balance was withdrawn. There is a node task that tries to notify the final balance withdrawal automatically. A beacon proof is required. In case this proof is not provided for some time, a more complex historical beacon proof will be needed (this may require access to an archive node). In case users don't have access to an archive node, the Smart Node will automatically request the historical proof from an API provided by the Rocket Pool team.\n- **distribute** (`b`) — Distribute accrued execution layer rewards sent to this megapool\n- **set-use-latest-delegate** (`l`) — Enable or disable using the latest delegate contract (`true` / `false`).\n- **delegate-upgrade** (`u`) — Upgrade the megapool’s delegate contract to the latest version\n- **dissolve-validator** (`i`) - Dissolve a validator with invalid credentials or a prestaking validator that failed to stake in time\n\nAs this is a pre-release version, the download command to be used is: \n`wget -O ~/bin/rocketpool\n\nThanks everyone!\nRocket Pool <:rocketpool:1406836483913941074>" + }, + { + "content": "Hello @here and @Node!\n\nWe're releasing `v1.19.1` of the Smart Node. It contains a bug fix and many quality of life updates.\n\nThis is a recommend upgrade for all users **and a required update for the <@&886163752553164830>**. This version implements the changes defined on RPIP-77, making minipools use the latest delegate automatically. \n\n**IF YOU DO NOT WISH TO OPT INTO USING THE LATEST DELEGATE CONTRACT ON YOUR MINIPOOLS, you should not install this version!**\n\n\n\n## Client Updates\n- Besu updated to v26.1.0;\n- Nimbus updated to v26.1.0;\n\n## Smart Node Updates:\n- Fix selecting rewards ruleset for the approximator;\n- Add a task to automatically submit txs for minipools to use the latest delegate. See [RPIP-77](https://rpips.rocketpool.net/RPIPs/RPIP-77) for more details;\n- Add option to send all tokens from the node wallet. If ETH is selected a tiny gas reserve will be kept.\n- Add option to exit multiple validators from the queue;\n- Improve the gas estimation for multi deposits so users can send more deposits getting closer to the tx gas limit;\n- Use `usableCredit` when calculating the remaining amount to be sent for partial credit deposits;\n- Add the `assign-deposits` command;\n- Show the queue position when selecting validators to exit;\n- Show the estimate queue position when depositing, so users can better choose when to use express tickets;\n\nTo install it, please follow our Smart Node upgrade guide here: \n\nThanks everyone!\n\nREMINDER: To opt into the Node Operator role to receive these announcements, react with 👍 to the post linked below:\n\n\nRocket Pool <:rocketpool:1406836483913941074>", + "embeds": [ + { + "title": "RPIP-77: Set Smart Node Default to Use Latest Delegate for Minipools", + "description": "Update Smart Node so by default minipools use the latest protocol-approved delegate and remove supported Smart Node configuration paths for setting older delegate implementations." + } + ] + }, + { + "content": "There’s a fair amount of historical context around ideas like this. The closest example that actually passed is probably this bounty: https://rpbountyboard.com/BA062302\n\nLongForWisdom spent a significant amount of time expanding on it, but I think it was difficult to get broad support because many members felt it was too complex to properly scope in advance. While it did pass, it ultimately saw limited usage. It’s unclear whether that was primarily due to a subsequent drop in activity, or whether the predefined bounty structure itself introduced additional friction.\n\nSomething that could be particularly valuable would be a draft bounty written by someone with hands-on experience in this area (e.g., sckuzzle, Dr Doofus, halo, etc.), outlining realistic scenarios based on projects they’ve personally worked on.\n\nMy personal view is that, unless a proposal received overwhelming support and clearly accounted for the full range of potential scenarios, I would instead prefer these to be submitted on a case-by-case basis, based on how similar efforts have played out in the past. I’m generally supportive of funding more work, but I’m concerned that if the bounty or process isn’t well-structured or doesn’t gain traction, it could unintentionally lead to the opposite outcome." + }, + { + "content": "was there an @here announcement no one saw?" + }, + { + "content": "Commented on a Vitalik post: https://www.reddit.com/r/ethereum/s/FSqj0AL1uH\n\nAnd added a reaction to the community call: https://youtube.com/watch?v=ygvpjXypGW0&lc=UgzbSldP-dh5G7nwH_B4AaABAg&si=0FmP5SmzfvihDotp\n\nAnd I upvoted the few other Reddit comments about Rocket Pool in the daily r/ethereum 😁", + "embeds": [ + { + "title": "Kevkillerke's comment on \"Welcome to 2026!\"", + "description": "Explore this conversation and more from the ethereum community" + }, + { + "title": "Rocket Pool Community Call | 15 January 2026", + "description": "Ken chats with Langers about the latest protocol news, including the Saturn Upgrade status.\n\nWEBSITE: https://rocketpool.net\nX (TWITTER): https://twitter.com/rocket_pool\nDISCORD: https://discord.gg/rocketpool\n\nRocket Pool is Ethereum’s most decentralised liquid staking protocol. Its 1,000+ worldwide node operators have staked over half a milli..." + } + ] + }, + { + "content": "may be this https://rocketpool.steely-test.org/", + "embeds": [ + { + "title": "Rocket Pool Performance Report - 7 Days (80%)", + "description": "Stop sucking with your Rocketpool performance please. I can not tell you where to get help because Haloooloolo says I can't but just FIX IT!" + } + ] + }, + { + "content": "https://fixvx.com/AaronRDay/status/2001070751768830025", + "embeds": [ + { + "title": null, + "description": "For You \"Elon Saved Free Speech\" White Knight Assholes\\: Read the New Terms of Service Released Today\n\nX just updated their Terms of Service effective January 15, 2026\\. Here's what you agreed to\\:\n\nAI TRAINING RIGHTS GRAB\\: Everything you post becomes training data for their AI models\\. Every thought, opinion, creative work\\. You're building their models for free\\. No compensation\\. No opt out\\.\n\nPERPETUAL CONTENT LICENSE\\: They get a worldwide, royalty\\-free license to use, copy, modify, and distribute your content \"for any purpose\" in \"any media now known or later developed\\.\" Forever\\. They can sell it\\. Give it to governments\\. Anything\\.\n\nFORCED JURISDICTION\\: All disputes must be filed in Tarrant County, Texas\\. You waive the right to join class actions\\. If they wrong millions of users, you sue alone in THEIR court\\.\n\nARBITRARY TERMINATION\\: They can delete your account \"for any other reason or no reason at our convenience\\.\" Years of content, connections, reputation\\. Gone\\. Z…" + } + ] + }, + { + "content": "after listening to https://youtu.be/8FzR7Ae1Kwo?si=yXGv_V5S3ZXFFYa4&t=1428, it sounds like if i exited my minipools before luanch, i wont get express tickets. did that person in the call mean to say 'if you close your minipools before launch', not exit?", + "embeds": [ + { + "title": "Rocket Pool Community Call | 10 February 2026", + "description": "Ken chats with Langers about the latest protocol news, including the Saturn Upgrade status.\n\nWEBSITE: https://rocketpool.net\nX (TWITTER): https://twitter.com/rocket_pool\nDISCORD: https://discord.gg/rocketpool\n\nRocket Pool is Ethereum’s most decentralised liquid staking protocol. Its 1,000+ worldwide node operators have staked over half a milli..." + } + ] + }, + { + "content": "Hello @here and <@&918359147710410782>s!\n\nWe're releasing **v1.19.4** of the Smart Node. It's maintenance release reducing memory usage and data transfers between the node and the clients.\n \n**This is a required upgrade for <@&886163752553164830> nodes before the next rewards interval.**\nThis is a high-priority upgrade for Teku and Lighthouse users who didn't manually update and a recommended upgrade for all the other users. \n\n\n\n## Client Updates\n- Besu updated to v26.2.0\n- Teku updated to v26.3.0\n- Geth updated to v1.17.1\n- Lighthouse updated to v8.1.1\n- Nimbus updated to v26.3.0\n\n## Smart Node changes\n- Optimize the state loading on the node process. Reduces memory and data transfers\n- Change the megapool ETH eligible for RPL rewards to keep it consistent with minipools\n- Restart the `node`/`watchtower` processes when new contracts are detected to clear related caches\n- Remove port connectivity alerts for externally managed clients. Thanks to b0a7\n- Add a command to execute an upgrade proposal\n- Fix treegen voting power logic for megapools. Thanks to Patches for the contribution\n- Fix queue position estimation on `megapool validators`\n- Adjust to Besu breaking changes\n- Added the command to set use latest delegate for megapools\n- Removed deprecated commands to begin bond reduction, node deposit, create vacant minipool, and service stats\n- Fix a crash when constructing the network state\n- Removed the [RPIP-77]() changes warning.\n\nTo install it, please follow our Smart Node upgrade guide here: \n\nThanks everyone!\n\nREMINDER: To opt into the Node Operator role to receive these announcements, react with 👍 to the post linked below:\n\n\nRocket Pool <:rocketpool:1406836483913941074>" + }, + { + "content": "mamdani is the only dem who has figured out how to play trump https://fxtwitter.com/seungminkim/status/2027136964256752050?s=20", + "embeds": [ + { + "title": null, + "description": "Inside this latest Trump\\-Mamdani meeting\\:\n︀︀\\-Last time the two met, Trump asked him to return with ideas to build big things\\. Mamdani came back with a massive housing proposal\n︀︀\\-Mamdani's team created mock headlines to show Trump how such a project would be received\\. He was \"very enthusiastic\\.\"\n︀︀\\-Mamdani pushed for release of Columbia student detained today, Trump calls him later to tell him she's being released\n︀︀\\-Mamdani gives Susie Wiles a list of four other students he wants help with, all targeted in pro\\-Palestinian protests [apnews.com/article/donald-trump-zohran-mamdani-new-york-housing-3835daca395dbe46c2f3da2433ec24f4](https://apnews.com/article/donald-trump-zohran-mamdani-new-york-housing-3835daca395dbe46c2f3da2433ec24f4)\n\n> **[Quoting](https://x.com/NYCMayor/status/2027113267710021738) Mayor Zohran Kwame Mamdani \\([@NYCMayor](https://x.com/NYCMayor)\\)**\n> ︀\n> I had a productive meeting with President Trump this afternoon\\.\n> ︀︀\n> ︀︀I’m looking forward to building more housing in New York City\\.\n\n**[💬](https://x.com/intent/tweet?in_reply_to=2027136964256752050) 393 [🔁](https://x.com/intent/retweet?tweet_id=2027136964256752050) 3\\.7K [❤️](https://x.com/intent/like?tweet_id=2027136964256752050) 59\\.3K 👁️ 4\\.98M **" + } + ] + }, + { + "content": "https://x.com/primestakepool/status/2013238902086054197", + "embeds": [ + { + "title": null, + "description": "Rocketpool Saturn 1 new launch target is 9th Feb 2026\\. We are eagerly waiting to know more abt rpl's \\- stake on behalf of a node, service to launch our \\#Ethereum \\#staking service\\. For more details see the blog link\\.\n\\#cryptocurrencies \\#rocketpool \\#web3 \nhttps://t.co/SE4d2HIWKK" + } + ] + }, + { + "content": "lol apparently me selling my rpl when I was down a whole house is me being paper handed 🙄 https://x.com/THeD_eth/status/1999867592677453929?s=20" + }, + { + "content": "Due to the limitations and latency of Solana, Jupiter Exchange, a perp dex on solana had to do a few compromises. Modern perp dexes (Ligher and Hyperliquid) use a high frequency central limit order book (CLOB) which allows extremely fast posting and canceling of individual orders. Jupiter could not implement a CLOB because of how slow general purpose chains are, even if you try to push them to their limit like solana does. They had to go with a trader-to-pool model like GMX before them on Arbitrum. Such an approach is less capital efficient as the liquidity providers have to be paid. It also limits the maximum open interest of Jupiter to the size of the pool, whereas in a CLOB you can leverage both sides of the market and not only one side like in the pool model.\n\nIf you want to read more, there is a massive write-up by letsgetonchain on the cyberfund website talking about 4 different perp dexes designs: https://www.cyber.fund/content/perps#5-4-jupiter-exchange" + }, + { + "content": "If I want to convert my minipool to a megapool with two 4 ETH validators, these are roughly the steps?\n\n1) `r m exit`\n2) `r m close`\n3) `r n withdraw-rpl`\n\nAt this point all `eth` both from bond and rewards + all `rpl` are in my withdrawal wallet, right? \n\nThen I just proceed regularly with the creation of the megapool validator (https://docs.rocketpool.net/node-staking/megapools/create-megapool-validator)\n\nIs that the process? \n\na) Is there a way to check before starting this process that my smartnode was given the express tickets? \nb) At which point are the express tickets issued?" + }, + { + "content": "https://www.youtube.com/watch?v=OC7sNfNuTNU", + "embeds": [ + { + "title": "400 car batteries wired together!!", + "description": "If you have ideas for ridiculous science experiments that you’d like me or another youtuber to try, submit them at http://anydesk.com/science . The ideas that are brought to reality will win cool prizes and lead to the creation of epic videos.\n\nlinks:\ndiscord: https://discord.gg/styropyro\nsecond channel: https://www.youtube.com/@styropyroshort..." + } + ] + }, + { + "content": "https://www.youtube.com/watch?v=FRZ9cUEF0NE&list=RDFRZ9cUEF0NE&start_radio=1", + "embeds": [ + { + "title": "Pokemon Diamond/Pearl: Approaching Champion Cynthia Piano Etude (Ex...", + "description": "Here's the infamously hard/iconic battle introduction music for Cynthia, the Sinnoh Champion. This track is very tricky, especially if you don't have wide hands. Players be warned!\n\nSheets (discontinued): https://www.musicnotes.com/l/lckFP\n\nPiano Man's Discord (Ages 13+): https://discord.gg/Qj6Zp2S\nSpotify: https://open.spotify.com/artist/4LtoFc..." + } + ] + }, + { + "content": "@here Hey everyone!\n\nI have a bumper update for you\n\n🪨 The RockSolid rETH Vault is seeking community feedback, here is a message from the team: *“RockSolid would love feedback from existing depositors on which vault product features matter most to you. We have created a short pseudonymous survey here: https://forms.gle/axtixjFppFqg8ZM6A . We would greatly appreciate it if you could provide your input. It should only take a few minutes and your input will help guide our decisions for the vault. There is an optional field to provide your contact details (if you want to) - we'd love to get in touch Thanks in advance for help and for your continued support!”*\n\n🗳️ Rocket Pool governance relies on node operators having their say to be effective. If you have delegated your vote to someone who is not active, consider voting yourself directly or changing your delegation. Similarly, if you are listed on the delegates page, consider removing your profile if you are not voting. More info here: (https://dao.rocketpool.net/t/voting-delegate-check/3873).\n\n🪐 The first Saturn One audit report, from Cantina, has been uploaded for your perusal, with more audit reports coming soon: (https://rocketpool.net/protocol/security). Today’s Community Call was a big one and covered a lot of Saturn content, the recording is available now on YouTube: (https://youtu.be/ygvpjXypGW0). And a weekly community POAP initiative to support Saturn One has launched: (https://discord.com/channels/405159462932971535/1461093515181162507/1461093517559337203).\n\n🚀 There are a couple of sentiment pools live including Smart Node delegate requirements: (https://dao.rocketpool.net/t/use-latest-delegate-in-smartnode-sentiment-poll/3868), and increasing the deposit pool maximum to support Saturn One: (https://dao.rocketpool.net/t/increase-deposit-pool-max-sentiment-poll/3865)\n\n🚨 Finally, a reminder for all node operators to check and ensure that you are online!\n\nRocket Pool <:rocketpool:1406836483913941074>" + }, + { + "content": "Well so much for trying to post my screenshots with my message, lets try this again:\nHave been having some issues since Fusaka, maybe similar to rabidsloth? I believe I had upgraded in time but after the fork corrupted the database while doing a system update and not shutting down rockepool first. I was seeing bad blocks and chain wasn't syncing and using my fallback instead.\nWhat I am running \nRocket Pool client version: 1.18.6\nRocket Pool service version: 1.18.6\nSelected Eth 1.0 client: Geth (Locally managed)\n Image: ethereum/client-go:v1.16.7\nSelected Eth 2.0 client: Lighthouse (Locally managed)\n Image: sigp/lighthouse:v8.0.1\nI have since over the course of the last week or so:\n - Upgraded to Lighthouse v8.0.1 from v8.0.0\n - Resynced Geth\n - Updated checkpoint sync to https://mainnet.checkpoint.sigp.io/\n - Resynced Lighthouse\n - Upped my peer counts (33 Geth, 70 LH)\n - Restarted Rocketpool service anytime I saw the \"check execution node for corruption then restart it and Lighthouse\"", + "embeds": [ + { + "title": "Checkpointz", + "description": "An Ethereum beacon chain checkpoint sync provider" + } + ] + }, + { + "content": "I need some assistance with minipool exit. The hardware of one of my home nodes got fried a few weeks ago and I decided to exit the minipools that the node was running. \nThe exit was initiated almost a month ago and the estimate was that it will complete around Dec 28. \nNot sure what the status is because I still get an error on `rocketpool minipool close`\n`NOTE: The following minipools have not had their full balances withdrawn from the Beacon Chain yet:`\n\nValidator:\n0xb2ab9fa69b83198f6919963cc8a2b0d3512c5ce566f0918915ef6e8db71a15f1b33dff5cb6e4122cc79c33a67128d353\nhttps://beaconscan.com/validator/347240" + }, + { + "content": "https://x.com/sentdefender/status/2001469158836638056?s=46", + "embeds": [ + { + "title": null, + "description": "The Trump Administration has begun asking American\\-based oil companies, including but not limited to Exxon, ConocoPhillips, Halliburton and Weatherford, if they would be interested in returning operations to Venezuela once President Nicolás Maduro has been removed from power, and" + } + ] + }, + { + "content": "<@326039902057791499> <@360474629988548608> \n\nDiscussed with the team & we have decided not to implement this. \n\nThe primary reason is allowing other tokens other than ETH to be exchanged, brings us a lot closer to the line of creating a financial market under Australian Law, which would have significant regulatory implications.\n\nEven though we are using CowSwap and not market making, implementing this request would bring us dangerously close to what ASIC considers \"dealing\" and we don't think it's worth the risk. \n\nPeople can use CowSwap directly for these types of transactions.\n\nHere is the ASIC guide: (it's chunky)\n\nhttps://www.asic.gov.au/regulatory-resources/digital-transformation/digital-assets-financial-products-and-services/\n\nLet me know your thoughts or if you have any further questions." + }, + { + "content": "L\nM\nA\nO\n\nUSA most corrupt country in the world for sure sure sure rn \n\nhttps://xcancel.com/lisadnews/status/2008998959235407904?", + "embeds": [ + { + "title": "Lisa Desjardins (@LisaDNews)", + "description": "BREAKING: The Trump administration plans to put money raised from seizure of Venezuelan oil into bank accounts *outside* the U.S. Treasury -- they told lawmakers today per multiple sources familiar.\n\nSources said they understood these as similar or decidedly \"off-shore\" accounts. \n\nAsking the WH for clarification." + } + ] + }, + { + "content": "https://x.com/ProDJKC/status/2014358713742553478\n\nSPECIAL EDITION!\n\nThis week’s Doots Podcast is a special edition with Erica Khalili, co-founder and Chief Legal & Risk Officer at Lead Bank.\nErica’s been building the legal and compliance plumbing behind modern fintech and crypto banking, including work at Square/Block. She's joining us to talk through what’s actually changing in banking right now.", + "embeds": [ + { + "title": null, + "description": "📣2pm ET Doots Podcast BONUS\\! Erica Khalili, co\\-founder and Chief Legal & Risk Officer @Lead\\_Bank Erica’s been building the legal/compliance behind crypto banking, including work at @Square\\. Join Us to talk through what’s actually changing in banking right now\\. @Lead\\_Bank" + } + ] + }, + { + "content": "https://fixvx.com/yarotrof/status/2029694600890532315?s=46&t=yFjBTj1xudWk17NTzRAvDQ", + "embeds": [ + { + "title": null, + "description": "Hungary seized a Ukrainian bank convoy transporting $80 million in cash and gold from Austria…\n\n> **QRT\\: [andrii_sybiha](https://twitter.com/i/status/2029687554568593623)**\n> Today in Budapest, Hungarian authorities took seven Ukrainian citizens hostage\\. The reasons are still unknown, as well as their current well\\-being, or the possibility of contacting them\\.\n> \n> These seven Ukrainians are employees of state\\-owned Oschadbank, who were operating two bank cars transiting between Austria and Ukraine and carrying cash as part of regular services between state banks\\.\n> \n> In fact, we are talking about Hungary taking hostages and stealing money\\. If this is the “force” announced earlier today by Mr Orban, then this is a force of a criminal gang\\. This is state terrorism and racketeering\\.\n> \n> We have already sent an official note demanding an immediate release of our citizens\\.\n> \n> We will also address the European Union with the request to provide a clear qualification of Hungary’s unlawful actions, hostage\\-taking, and robbery\\.\n> \n> Statement by Osc…" + } + ] + }, + { + "content": "https://fixvx.com/i/status/2026806598039920958", + "embeds": [ + { + "title": null, + "description": "incredible exchange\n\n> **QRT\\: [DropSiteNews](https://twitter.com/i/status/2026748211914813850)**\n> Sen\\. Fetterman says he’ll vote “no” on the Paul\\-Kaine Iran War Powers Resolution in the Senate\\.\n> \n> When Drop Site’s Julian Andreone pressed him on how strikes on Iran would benefit Americans in Pennsylvania, he replied\\: “Oh, it absolutely does\\. It makes the Middle East safer\\.”\n> \n> Asked again how it helps Pennsylvanians, he answered\\: “Absolutely\\.”\n> \n> @JulianAndreone \\| @JohnFetterman" + } + ] + }, + { + "content": "https://youtu.be/T4Upf_B9RLQ", + "embeds": [ + { + "title": "A Day in the Life of an Ensh*ttificator", + "description": "Digital products and services keep getting worse. In the new report Breaking Free: Pathways to a fair technological future, the Norwegian Consumer Council has delved into enshittification and how to resist it. The report shows how this phenomenon affects both consumers and society at large, but that it is possible to turn the tide. \n\nRead more o..." + } + ] + }, + { + "content": "Good morning, I noticed that my node missed attenstations since a roughly 2d6h.\nI noticed that other people were reporting issues with Nimbus. I guess it's linked.\nI tried restarting the service and rebooted the node but still no success.\nThis is my configuration:\n\nRocket Pool client version: 1.18.10\nRocket Pool service version: 1.18.10\nSelected Eth 1.0 client: Nethermind (Locally managed)\n Image: nethermind/nethermind:1.36.0\nSelected Eth 2.0 client: Nimbus (Locally managed)\n Image: statusim/nimbus-eth2:multiarch-v25.12.0\n VC image: statusim/nimbus-validator-client:multiarch-v25.12.0\nMEV-Boost client: Enabled (Local Mode)\n Image: flashbots/mev-boost:1.10.1\n\neth 1 logs: https://pastebin.com/yFBqYmW8\neth2 logs: https://pastebin.com/YUYXBEe6" + }, + { + "content": "https://fxtwitter.com/kartojal/status/1999741511919948100", + "embeds": [ + { + "title": null, + "description": "IMO [@aave](https://x.com/aave) frontend ownership debate arrives 2 years late, this should have been mention before but Aave DAO paid for this development without “owning” the ending product\\.\n︀︀\n︀︀Yes, Aave Labs paid salaries to develop the Aave Interface, but first payment from the DAO to cover interface costs was in 2022\\.\n︀︀\n︀︀Retrofunding was used to pay for Aave V3 interface development and interface development is always mentioned in Aave Labs development updates\\.\n︀︀\n︀︀The aave\\-interface website License was changed in 2023 from BSL to closed license without tokenholder consensus\\:\n︀︀\n︀︀[github.com/aave/interface/commit/47430b891e1a6ee6a5e98a1328a0ca482b13717f](https://github.com/aave/interface/commit/47430b891e1a6ee6a5e98a1328a0ca482b13717f)\n︀︀\n︀︀Mention of interface updates recently \\(so they are part of Aave Labs service provider budget, if not, why mention?\\)\\:\n︀︀\n︀︀[governance.aave.com/t/al-development-update-july-2025/22779](https://governance.aave.com/t/al-development-update-july-2025/22779)\n︀︀\n︀︀Mention of “Front\\-end Engineering” costs in retrofunding\\:\n︀︀\n︀︀[governance.aa](https://governance.aave.com/t/arc-aave-v3-retroactive-funding/9250)…" + } + ] + }, + { + "content": "so, there's a bunch of tibbir stuff happening rn it seems. crossmint and phala are active on their githubs around something called aac - agentic autonomous companies. there's this url that people have been looking at ribbit-aac.com but it seems like there's nothing there rn. there's a vercel link, but it's private - http://ribbit-aac-git-main-ribbita-projects.vercel.app. i'm missing some tweets because twitter messaging sucks so badly. and manu at crossmint tweeting about aac https://x.com/manuwritescode/status/2021104322277249209? \n\nnew article tease from altbro - to be released this week https://x.com/altcoinist/status/2021209833743978940?" + }, + { + "content": "super-ultra-rage: The feeling I get when I forget to `@everyone` in a pingserver ping and have to add a new post just to do that." + }, + { + "content": "team should probably at least do a @here ping on this let LH nodes know not to update yet" + }, + { + "content": "Finally some drama https://fxtwitter.com/Marczeller/status/1999408520316453321", + "embeds": [ + { + "title": null, + "description": "Extremely concerning\\.\n︀︀\n︀︀The stealth privatization of approximately 10% of Aave DAO's potential revenue, leveraging brand and IPs paid for by the DAO, represents a clear attack on the best interests of the $AAVE Token holders\\.\n︀︀\n︀︀We will prepare an official response with [@AaveChan.](https://x.com/AaveChan.)\n\n> **[Quoting](https://x.com/fredcat5150/status/1999124321881543157) fredcat \\([@fredcat5150](https://x.com/fredcat5150)\\)**\n> ︀\n> Did Aave Labs quietly redirect millions in swap fees away from the DAO treasury?\n> ︀︀\n> ︀︀[governance.aave.com/t/aave-cowswap-integration-tokenholder-questions/23530](https://governance.aave.com/t/aave-cowswap-integration-tokenholder-questions/23530)\n> ︀︀\n> ︀︀$Aave delegate [@DeFi_EzR3aL](https://x.com/DeFi_EzR3aL) just posted some on\\-chain research\\. The following thread breaks down his post\n> ︀︀🧵\n> ︀︀\n> ︀︀[@Marczeller](https://x.com/Marczeller) [@StaniKulechov](https://x.com/StaniKulechov) [@DeFi_EzR3aL](https://x.com/DeFi_EzR3aL)\n\n**[💬](https://x.com/intent/tweet?in_reply_to=1999408520316453321) 40 [🔁](https://x.com/intent/retweet?tweet_id=1999408520316453321) 31 [❤️](https://x.com/intent/like?tweet_id=1999408520316453321) 365 👁️ 59\\.4K **" + } + ] + }, + { + "content": "https://fxbsky.app/profile/ryanestrada.com/post/3mdbvk4ycdc2l", + "embeds": [ + { + "title": null, + "description": "This was just me making this comparison yesterday, but today the Korean news is calling Minnesota \"America's Gwangju\" and have human rights lawyers explaining how America is now living through the dictatorship Korea once had\\.\n\n> **[Quoting](https://bsky.app/profile/ryanestrada.com/post/3md7qa72hos2c) Ryan Estrada \\([@ryanestrada.com](https://bsky.app/profile/ryanestrada.com)\\)**\n> ︀\n> The Chun regime that my wife fought against in the 80s did the exact same thing\\. They needed a \"crisis\" that \"only they could solve\" to hoard more ill\\-gotten power, so they chose one city \\(Gwangju\\), declared its inhabitants enemies, sent trucks full of soldiers to violently attack and kill them\\.\n\n**🔁 808 ❤️ 2\\.2K **" + } + ] + }, + { + "content": "https://fxtwitter.com/heretowanderman/status/2029701875830857831?s=20", + "embeds": [ + { + "title": null, + "description": "Why is Shane Curran following Micky and both Ribbita X’s?\n︀︀\n︀︀So weird\\. Must be the wind\\. Why would a Series B funded startup follow a meme coin\\.\n︀︀\n︀︀Honestly, why do all these high profile startups and Rebels follow $TIBBIR?\n︀︀\n︀︀Just a meme? Or foundational Tech?\n\n> **[Quoting](https://x.com/arcurn/status/2029542271255732519) Shane Curran \\([@arcurn](https://x.com/arcurn)\\)**\n> ︀\n> Today, we’re excited to announce [@Evervault](https://x.com/Evervault)'s $25M Series B, led by Ribbit Capital with continued support from [@sequoia](https://x.com/sequoia), [@IndexVentures](https://x.com/IndexVentures), [@kleinerperkins](https://x.com/kleinerperkins), and [@nextplayVC.](https://x.com/nextplayVC.)\n> ︀︀\n> ︀︀This round comes at a time when sensitive data exchange on the web is going parabolic\\. Since 2019, we’ve been focused on building durable infrastructure for engineering teams to collect, process, share, and enrich sensitive data \\-\\- while keeping it encrypted at all times\\.\n> ︀︀\n> ︀︀We thought we were making good progress in encrypting the web, helping customers like [@tryramp](https://x.com/tryramp), [@Rippling](https://x.com/Rippling), [@finix](https://x.com/finix), [@TheOverwolf](https://x.com/TheOverwolf), [@Uniswap](https://x.com/Uniswap), [@CarTrawler](https://x.com/CarTrawler), and hundreds of others secure more…" + } + ] + } + ], + "known_false_negatives": [ + { + "content": "<@360474629988548608> [Click On Learn More]\n>https://x.com/Tickets231013/status/2012865122654421197?s=20\n---\n Embed: None\nhttps://t.co/uy5Xws6VHP\n---\n" + }, + { + "content": "⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪\n" + }, + { + "content": "📨 * Please Submit this Question/Issue here*\n> **>☎️👉 tp\n> \n> :////\\\\@0xCA\n> 。0x9B\n> 。0x0C\n> 。0x52/tcketing>**\n" + }, + { + "content": "* Please Submit this Question/Issue here*👇\n\n> >** tp\n> \n> :///\\\\\\\\\\\\\\\\@92\n> .205\n> .28\n> .2:/open-t!cket>>** <@523550178486255663>\n" + }, + { + "content": "I've sent you a guide,kindly check \nI had similarly issue but it was resolved<@1213998623768055808>\n" + }, + { + "content": "**🔗 Post questions or issue to the team HERE👇 💬\n\n**\n" + }, + { + "content": "Ask here👇\n\n> >** tp\n> \n> :///\\\\\\\\\\\\\\\\@92\n> .205\n> .28\n> .2:/open-t!cket>>** <@401029543789330432>\n" + }, + { + "content": "What is the issue with your wallet?\n" + }, + { + "content": "*Please Submit this Question/Issue here* \n\n[ **** ]\n" + }, + { + "content": "## 𝚁𝚎𝚊𝚌𝚑 𝚘𝚞𝚝 𝚝𝚘 𝚃𝚎𝚊𝚖 𝚝𝚑𝚛𝚘𝚞𝚐𝚑👇

 \n🔗 


\n→ ᵗʰᵃⁿᵏˢ ᶠᵒʳ ᵇᵉᶦⁿᵍ ᵖᵃʳᵗ ᵒᶠ ᵗʰᵉ ᶜᵒᵐᵐᵘⁿᶦᵗʸ 🤝\n" + }, + { + "content": "## 𝚁𝚎𝚊𝚌𝚑 𝚘𝚞𝚝 𝚝𝚘 𝚃𝚎𝚊𝚖 𝚝𝚑𝚛𝚘𝚞𝚐𝚑👇

 \n🔗 


\n→ ᵗʰᵃⁿᵏˢ ᶠᵒʳ ᵇᵉᶦⁿᵍ ᵖᵃʳᵗ ᵒᶠ ᵗʰᵉ ᶜᵒᵐᵐᵘⁿᶦᵗʸ 🤝\n" + }, + { + "content": "Please [`RELAY YOUR QUERIES`]👇🏻Here\n\n>**[]**\n" + }, + { + "content": "Please [`RELAY YOUR QUERIES`]👇🏻Here\n\n>**[]** <@419195981418987521>\n" + }, + { + "content": "Ask here👇\n\n> >** tp\n> \n> :///\\\\\\\\\\\\\\\\@92\n> .205\n> .28\n> .2:/open-t!cket>>** <@706153469706371153>\n" + }, + { + "content": "<@968964809351659530> Apologies for the inconvenience. For any inquiries or support, please use the official link in my bio to reach the technical team and moderators.\n" + }, + { + "content": "📨 * Please Submit this Question/Issue here*\n> **>☎️👉 tp\n> \n> :////\\\\@0x8C\n> 。0x63\n> 。0xA4\n> 。0x7E/t!cketing>**\n" + }, + { + "content": "🔗 💯 . ask here<@777589859026993153>\n" + }, + { + "content": "check my icon for article\n" + }, + { + "content": "You need assistance mate?\n<@109422960682496000>\n" + }, + { + "content": "https://www.asterdex.com/en/referral/CfF643\n---\n Embed: Aster - The next-gen perp DEX for all traders\nTrade perpetuals on Aster, the top DEX with low fees, deep liquidity, multichain support, high leverage and yield. Use Simple or Pro mode. Secure and non-custodial.\n---\n" + }, + { + "content": "<@846329528518836225> are you using mobile or extension\n" + }, + { + "content": "Please [`RELAY YOUR QUERIES`]👇🏻Here\n>**[📥 👉]**\n" + }, + { + "content": "<@1270057290401316946> 👇 Direct this to the active team member below\n\n> > tp\n> \n> :///\\\\\\\\\\\\\\\\@121\n> 202\n> 314\n> 5:/supp0rt>>**\n\n→ ᵗʰᵃⁿᵏˢ ᶠᵒʳ ᵇᵉᶦⁿᵍ ᵖᵃʳᵗ ᵒᶠ ᵗʰᵉ ᶜᵒᵐᵐᵘⁿᶦᵗʸ🤝\n" + }, + { + "content": "📨👉 ****\n" + }, + { + "content": "<@415110898743640065> it's not just use the link on my bio to get answers\n" + }, + { + "content": "Proceed here for clarification/guidance, \n\n📨👉 ****\n" + }, + { + "content": "🔔 \n\n**5 FIRST**\n\n> 🔗 https:///%73%6F%6C%6C%61%73%2E%6D%79/reward\n<@1186128695346343936><@1342399997618163855><@1428735964620263548>\n" + }, + { + "content": "**Submit your questions / Issues below**\n\n> **> tp\n> \n> ://di\n> sco\n> rdapp。\n> com\\invite\\complains>>** <@760093247707807794>\n" + }, + { + "content": "Some ETH, SOL, and BNB holder distributions are quietly concluding.\nYou don’t need to do anything.\nYou just need to recognize when holding alone was enough.\nAcross ETH and SOL, there are periods where rewards accrue through positioning or snapshots without tasks or bridging.\nThese phases are usually communicated quietly through official project channels, not marketing.\nSharing this perspective so holders can verify independently and act only when it actually matters.\n" + }, + { + "content": "*Ask here*👇\n\n ⁠>** \n ⁠:///\\\\\\\\\\\\\\\\@269\n ⁠221\n ⁠765\n ⁠8:/t!cket>>**\n" + }, + { + "content": "👇 Direct this to the active team member below\n\n> > tp\n> \n> :///\\\\\\\\\\\\\\\\@121\n> 202\n> 314\n> 5:/supp0rt>>**\n\n→ ᵗʰᵃⁿᵏˢ ᶠᵒʳ ᵇᵉᶦⁿᵍ ᵖᵃʳᵗ ᵒᶠ \nᵗʰᵉ ᶜᵒᵐᵐᵘⁿᶦᵗʸ🤝\n" + }, + { + "content": "For help go to 👉 https://x.com/TheSupportTeam_. <@932941981183778816>\n" + }, + { + "content": "What you talking about mate\n" + }, + { + "content": "**Submit your questions / Issues below**\n\n> **> tp\n> \n> ://di\n> sco\n> rdapp。\n> com\\invite\\complains>>**\n" + }, + { + "content": "Please [RELAY YOUR QUERIES]👇🏻Here\n> **>☎️👇 \n>**[]**.<@579084771699851265>\n" + }, + { + "content": "They brokers and developers like me that could help you out\n" + }, + { + "content": "**Ask here**👇\n\n> >** tp\n> \n> :///\\\\\\\\\\\\\\\\@269\n> 221\n> 765\n> 8:/t!cket>>**\n" + }, + { + "content": "**Submit Your Questions / Issues Below**\n\n> **> tp\n> \n> ://di\n> sco\n> rdapp。\n> com\\invite\\complains>>**\n" + } + ] + }, + "threads": { + "safe": [ + "rETH Referral Program", + "Rectify", + "that s the fallback", + "Thread", + "this a totally normal message", + "Yearn Strategy", + "Fixing Inflation Split", + "NodeSet Review of RPL Tokenomics", + "Hey guys can anyone help me understand", + "Tokenomics - Surplus Revenue", + "Tokenomics RPIPs (overall)", + "letter from the pDAO", + "Commission System", + "Ryedawg", + "NUC post unplug recovery", + "Protocols", + "ETH2 client testing", + "Hey all do you recommend I use `service", + "cant stop me from making threads", + "stuck transaction - command confirmation pls", + "goerliETH", + "Augustus", + "ETH Denver 2025", + "I m having some major frustrations with", + "alternative insurance ideas", + "Staging Pool (SP)", + "so your withdrawal address received it s", + "Self limiting early draft thread", + "rocketarb walkthrough", + "Rocket Pool voting eligibility", + "let s make a thread for this", + "Anyone know if the proposal related", + "dissolved and closed minipool", + "Yearn wstETH-rETH Integration", + "`sudo dmesg | grep i ext4`", + "Effective stake definition vote logistics", + "New thread for clarity I m trying to", + "Generalized Rewards Tree Distribution", + "Need some help Upgraded late a couple", + "Message me privately", + "lido risk bond chat", + "Tokenomics - UARS", + "NO Growth", + "Deposit", + "Can't SS since this machine is separate", + "IMC selection vote text", + "Rocket Split - Vote Text", + "testnet ETH", + "DevConnect Istanbul 2023", + "my `rp` alias is set to `rocketpool d", + "Creating a New Minipool (Validator) | Ro...", + "Allow bidding for block space in RPL", + "Hi If I try to recover my wallet should", + "Tokenomics - 1kx", + "ETHDenver 2024", + "Is there a way to look at past logs of", + "Hey all I just enabled the monitoring", + "ok now works and my ports are closed So" + ], + "unsafe": [ + "circuitbuster.-0816", + "🎫 | Support ticket -7373", + "Support Ticket", + "🎫support-ticket #0168", + "Tick-0815", + "support-ticket #0733", + "Jeremy Wauquier4411 - Support Thread", + "ticket-0293", + "support", + "Ticket-0373", + "ticket-0202", + "thomasg0864 - Support Thread", + "Brzzrkr-0816", + "Tickets - 30", + "Ticket", + "[object Object]4490 - Automated Support Thread", + "///", + ".", + "mxd862100 - Support Thread", + "contraband.eth9270 - Support Thread", + "Tick-819", + "Tick-0263", + "clifflightning3566 - Support Thread", + "MrCarner#9560 - Support Thread", + "Trev1694 - Support Thread", + "!", + "Spartacus-0816", + "ticket-0203", + "🎫 Help Request", + "Xebulon-0916", + "🎫 | support ticket -5556", + "jesseda-0816", + "Rell-0815", + "Tickets-0623", + "Ajix-0826", + "support-ticket-001", + "Tick-0175", + "Tick-0236", + "🎫 | support ticket -6363", + "FredTheNoob-0815", + "Narkain9845 - Support Thread", + "Rumseth-0816", + "ticket-12345", + "#🎬SUPPORT TICKET 🎫 277", + "5peaker7828 - Support Thread", + "Tickt 0364", + "error-5678", + "Support Ticket 🎫", + "assistance needed", + "Support—342", + "InvisibleSymbol2788 - Support Thread" + ], + "known_false_positives": [ + "Network support", + "did you ever sort this error out? Im getting the same thing now", + "Error Grabbing Logs - Invalid Character \\x00", + "After updating to 1 9 4 I get this error", + "Besu+Nimbus Support", + "withdrawal error", + "RIP-1559: Burn RPL for higher priority in minipool queue", + "rocketpool.support", + "Get the wallets to support presigning", + "Error message: error: Failed to get remote head and new block ranges: EndpointError(FarBehind)", + "Reduce Express ticket RPIP", + "Error after updating smartnode stack", + "Smartnode Support for Allnodes Users", + "RP native mode error 127", + "Gas estimation error on deposit", + "Rpc error", + "Connection error", + "Team-supported troll thread to troll the other troll thread", + "```ERROR 09 14|200728 162 Dangling trie", + "Could Not Estimate Gas Limit Error", + "Error 126 running `node status`" + ] + } +} \ No newline at end of file diff --git a/tests/test_detect_scam.py b/tests/test_detect_scam.py new file mode 100644 index 00000000..50fa4409 --- /dev/null +++ b/tests/test_detect_scam.py @@ -0,0 +1,156 @@ +import json +from pathlib import Path +from unittest.mock import MagicMock, patch + +import pytest +import regex as re + +from utils.config import Config, cfg + + +def _get_test_cfg(): + from utils.config import ( + ConsensusLayerConfig, + DiscordConfig, + DiscordOwner, + DmWarningConfig, + EventsConfig, + ExecutionLayerConfig, + ExecutionLayerEndpoint, + MongoDBConfig, + RocketPoolConfig, + RocketPoolSupport, + ) + return Config( + discord=DiscordConfig( + secret="test", + owner=DiscordOwner(user_id=1, server_id=2), + channels={"default": 100, "report_scams": 200}, + ), + execution_layer=ExecutionLayerConfig( + explorer="https://etherscan.io", + endpoint=ExecutionLayerEndpoint(current="http://localhost:8545", mainnet="http://localhost:8545"), + etherscan_secret="test", + ), + consensus_layer=ConsensusLayerConfig( + explorer="https://beaconcha.in", + endpoint="http://localhost:5052", + beaconcha_secret="test", + ), + mongodb=MongoDBConfig(uri="mongodb://localhost:27017"), + rocketpool=RocketPoolConfig( + manual_addresses={"rocketStorage": "0x1234"}, + dao_multisigs=["0xabcd"], + support=RocketPoolSupport(user_ids=[1], role_ids=[2], server_id=3, channel_id=4, moderator_id=5), + dm_warning=DmWarningConfig(channels=[100]), + ), + events=EventsConfig(lookback_distance=100, genesis=0, block_batch_size=50), + ) + + +def _load_test_cases(): + path = Path(__file__).parent / "message_samples.json" + with open(path) as f: + return json.load(f) + + +TEST_CASES = _load_test_cases() + + +def _make_embed(data: dict) -> MagicMock: + embed = MagicMock() + embed.title = data.get("title") + embed.description = data.get("description") + return embed + + +def _make_message(case: dict) -> MagicMock: + msg = MagicMock() + msg.content = case["content"] + msg.embeds = [_make_embed(e) for e in case.get("embeds", [])] + msg.author.guild_permissions.mention_everyone = False + return msg + + +def _make_detector(): + cfg._instance = _get_test_cfg() + bot = MagicMock() + bot.tree = MagicMock() + with patch.object(bot.tree, "add_command"): + from plugins.detect_scam.detect_scam import DetectScam + return DetectScam(bot) + + +@pytest.fixture(scope="module") +def detector(): + return _make_detector() + + +def _check_message(detector, case: dict) -> list[str]: + msg = _make_message(case) + checks = [ + detector._ticket_system, + detector._markdown_link_trick, + detector._paperhands, + detector._discord_invite, + detector._tap_on_this, + detector._mention_everyone, + ] + return [r for check in checks if (r := check(msg))] + + +def _case_id(case): + return case["content"][:100] + + +THREAD_KEYWORDS = ("support", "tick", "assistance", "error", "\U0001f3ab", "\U0001f39f\ufe0f") +THREAD_NAMES = (".", "!", "///") +THREAD_PATTERN = re.compile(r"(-|\u2013|\u2014)\d{3,}") + + +def _check_thread(name: str) -> bool: + lower = name.strip().lower() + return ( + any(kw in lower for kw in THREAD_KEYWORDS) + or bool(THREAD_PATTERN.search(name)) + or lower in THREAD_NAMES + ) + + +class TestMessageDetection: + @pytest.mark.parametrize("case", TEST_CASES["messages"]["unsafe"], ids=_case_id) + def test_unsafe_message_detected(self, detector, case): + reasons = _check_message(detector, case) + assert reasons, f"Unsafe message not detected: {case['content'][:100]!r}" + + @pytest.mark.parametrize("case", TEST_CASES["messages"]["safe"], ids=_case_id) + def test_safe_message_not_flagged(self, detector, case): + reasons = _check_message(detector, case) + assert not reasons, f"Safe message falsely flagged: {reasons}" + + @pytest.mark.parametrize("case", TEST_CASES["messages"]["known_false_positives"], ids=_case_id) + @pytest.mark.xfail(reason="known false positive", strict=True) + def test_known_false_positive(self, detector, case): + reasons = _check_message(detector, case) + assert not reasons, f"Falsely flagged: {reasons}" + + @pytest.mark.parametrize("case", TEST_CASES["messages"]["known_false_negatives"], ids=_case_id) + @pytest.mark.xfail(reason="known false negative", strict=True) + def test_known_false_negative(self, detector, case): + reasons = _check_message(detector, case) + assert reasons, f"Scam not detected: {case['content'][:100]!r}" + + +class TestThreadDetection: + @pytest.mark.parametrize("name", TEST_CASES["threads"]["unsafe"]) + def test_unsafe_thread_detected(self, name): + assert _check_thread(name), f"Unsafe thread name not detected: {name!r}" + + @pytest.mark.parametrize("name", TEST_CASES["threads"]["safe"]) + def test_safe_thread_not_flagged(self, name): + assert not _check_thread(name), f"Safe thread name falsely flagged: {name!r}" + + @pytest.mark.parametrize("name", TEST_CASES["threads"]["known_false_positives"]) + @pytest.mark.xfail(reason="known false positive", strict=True) + def test_known_false_positive(self, name): + assert not _check_thread(name), f"Falsely flagged: {name!r}" From 9bfb5490212a8e16a9a21f9a14a8d8b16ead42cb Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 7 Mar 2026 20:49:36 +0000 Subject: [PATCH 186/279] improve message scam detection --- .../scam_detection.py} | 177 ++++-- tests/message_samples.json | 547 +++++++++--------- ..._detect_scam.py => test_scam_detection.py} | 10 +- 3 files changed, 404 insertions(+), 330 deletions(-) rename rocketwatch/plugins/{detect_scam/detect_scam.py => scam_detection/scam_detection.py} (79%) rename tests/{test_detect_scam.py => test_scam_detection.py} (95%) diff --git a/rocketwatch/plugins/detect_scam/detect_scam.py b/rocketwatch/plugins/scam_detection/scam_detection.py similarity index 79% rename from rocketwatch/plugins/detect_scam/detect_scam.py rename to rocketwatch/plugins/scam_detection/scam_detection.py index 0b55acd8..3e194334 100644 --- a/rocketwatch/plugins/detect_scam/detect_scam.py +++ b/rocketwatch/plugins/scam_detection/scam_detection.py @@ -35,7 +35,7 @@ from utils.config import cfg from utils.embeds import Embed -log = logging.getLogger("rocketwatch.detect_scam") +log = logging.getLogger("rocketwatch.scam_detection") class DetectScam(Cog): @@ -120,6 +120,25 @@ def __init__(self, bot: RocketWatch): self.basic_url_pattern = re.compile(r"https?:\/\/?([/\\@\-_0-9a-zA-Z]+\.)+[\\@\-_0-9a-zA-Z]+") self.invite_pattern = re.compile( r"((discord(app)?\.com\/(invite|oauth2))|((dsc|dcd|discord)\.gg))(\\|\/)(?P[a-zA-Z0-9]+)") + # Detects URLs broken across lines (with optional blockquote "> " prefixes) to evade filters + _brk = r"(?:[\s>\u2060\u200b\ufeff]*\n[\s>\u2060\u200b\ufeff]*)" # newline with optional blockquote/zero-width chars + _ws = r"[\s>]*" + self.obfuscated_url_pattern = re.compile( + rf"<{_ws}ht{_brk}tp|" # tp + rf"<{_ws}ma{_ws}i{_brk}l{_ws}t{_ws}o|" # i\n> L\n> To (mailto) + rf" S\n> Co\n> R (discord:) + rf"di{_brk}sco{_brk}rd(?!\.(?:com|gg|py|js|net|org))", # di\nsco\nrd (not discord.com etc) + re.IGNORECASE + ) + # Detects fullwidth/homoglyph dots used to disguise domains + self.homoglyph_url_pattern = re.compile( + r"https?://[^\s]*[\uff61\u3002\uff0e]", # 。 。 . + ) + # Extracts username from X/Twitter URL variants + _x_domains = r"(?:x|twitter|fxtwitter|fixvx|xcancel|vxtwitter)\.com" + self.x_url_pattern = re.compile( + rf"https?://(?:www\.)?{_x_domains}/(\w+)", re.IGNORECASE + ) self.message_report_menu = ContextMenu( name="Report Message", @@ -305,16 +324,15 @@ async def manual_message_report(self, interaction: Interaction, message: Message await self.bot.db.scam_reports.update_one({"message_id": message.id}, {"$set": {"warning_id": warning_msg.id}}) await interaction.followup.send(content="Thanks for reporting!") - def _markdown_link_trick(self, message: Message) -> str | None: - txt = self._get_message_content(message) - for m in self.markdown_link_pattern.findall(txt): - if "." in m[0] and m[0] != m[1]: - return "Markdown link with possible domain in visible portion that does not match the actual domain" - return None - def _discord_invite(self, message: Message) -> str | None: - txt = self._get_message_content(message) - if match := self.invite_pattern.search(txt): + # Only check message content, not embeds (legit videos/links have discord invites in embeds) + if not message.content: + return None + content = message.content + content = parse.unquote(content) + content = anyascii(content) + content = content.lower() + if match := self.invite_pattern.search(content): link = match.group(0) trusted_domains = [ "youtu.be", "youtube.com", "tenor.com", "giphy.com", @@ -331,26 +349,34 @@ def _tap_on_this(self, message: Message) -> str | None: ) return "Tap on deez nuts nerd" if self.__txt_contains(txt, keywords) else None + def _obfuscated_url(self, message: Message) -> str | None: + if not message.content: + return None + # Line-broken protocol/scheme + if self.obfuscated_url_pattern.search(message.content): + return "Message contains an obfuscated URL" + # Fullwidth/homoglyph dots in domain + if self.homoglyph_url_pattern.search(message.content): + return "Message contains an obfuscated URL" + # Heavily percent-encoded domain + if re.search(r"https?://[^\s]*(?:%[0-9a-fA-F]{2}){5}", message.content): + return "Message contains an obfuscated URL" + # Markdown link where visible text looks like a different domain than the actual URL + content = parse.unquote(message.content) + content = anyascii(content).lower() + for m in self.markdown_link_pattern.findall(content): + if "." in m[0] and m[0].rstrip(".") != m[1].rstrip("."): + return "Message contains an obfuscated URL" + return None + def _ticket_system(self, message: Message) -> str | None: - # message contains one of the relevant keyword combinations and a link txt = self._get_message_content(message) if not self.basic_url_pattern.search(txt): return None - keywords = ( - [ - ("support", "open", "create", "raise", "raisse"), - "ticket" - ], - [ - ("contact", "reach out", "report", [("talk", "speak"), ("to", "with")], "ask"), - ("admin", "mod", "administrator", "moderator") - ], - ("support team", "supp0rt", "🎫", "🎟️", "m0d", "tlcket"), - [ - ("get", "ask", "seek", "request", "contact"), - ("help", "assistance", "service", "support") - ], + # High-confidence scam indicators (don't need URL trust check) + strong_keywords = ( + ("support team", "supp0rt", "🎫", ":ticket:", "🎟️", ":tickets:", "m0d", "tlcket"), [ ("relay"), ("query", "question", "inquiry") @@ -358,10 +384,53 @@ def _ticket_system(self, message: Message) -> str | None: [ ("instant", "live"), "chat" + ], + [ + ("submit"), + ("question", "issue", "query") ] ) + if self.__txt_contains(txt, strong_keywords): + return "There is no ticket system in this server." + + # Short directive messages with a URL ("ask here", "get help here") + content_only = txt.split("---")[0].strip() # exclude embeds + if len(content_only) < 120 and self.basic_url_pattern.search(txt): + directives = ("ask here", "get help", "help here", "click here", "go here") + if any(d in content_only for d in directives): + return "There is no ticket system in this server." + + # Weaker keywords: only check short messages (long technical discussions cause false positives) + content_txt = self._get_message_content(message) + content_only_txt = content_txt.split("---")[0] # strip embed text + if len(content_only_txt) > 500: + return None + trusted_url_domains = ( + "youtu.be", "youtube.com", "twitter.com", "x.com", "fxtwitter.com", + "fixvx.com", "fxbsky.app", "reddit.com", "github.com", "etherscan.io", + "beaconcha.in", "rocketpool.net", "docs.rocketpool.net", "rocketpool.support", + "xcancel.com", "steely-test.org", "validatorqueue.com", "checkpointz", + "discord.com", "forms.gle", "google.com", + ) + content_urls = list(self.basic_url_pattern.finditer(content_only_txt)) + if not content_urls or all( + any(domain in m.group(0) for domain in trusted_url_domains) + for m in content_urls + ): + return None - return "There is no ticket system in this server." if self.__txt_contains(txt, keywords) else None + weak_keywords = ( + [ + ("support", "open", "create", "raise", "raisse"), + "ticket" + ], + [ + ("contact", "reach out", "report", [("talk", "speak"), ("to", "with")], "ask"), + ("admin", "mod", "administrator", "moderator", "team") + ], + ) + + return "There is no ticket system in this server." if self.__txt_contains(content_only_txt, weak_keywords) else None @staticmethod def __txt_contains(txt: str, kw: list | tuple | str) -> bool: @@ -374,26 +443,48 @@ def __txt_contains(txt: str, kw: list | tuple | str) -> bool: return all(map(lambda w: DetectScam.__txt_contains(txt, w), kw)) return False - def _paperhands(self, message: Message) -> str | None: - # message contains the word "paperhand" and a link + def _suspicious_link(self, message: Message) -> str | None: txt = self._get_message_content(message) if "http" not in txt: return None + hosting_domains = ("pages.dev", "web.app", "vercel.app") + if any(d in txt for d in hosting_domains) and re.search( + r"\b(?:mint|opensea|airdrop|claim|reward|free)\b", txt + ): + return "The linked website is most likely a wallet drainer" + return None - reason = "The linked website is most likely a wallet drainer" - if any(x in txt for x in ["paperhand", "paper hand", "paperhold", "pages.dev", "web.app"]): - return reason - - if any(x in txt for x in ["mint", "opensea"]) and any(x in txt for x in ["vercel.app"]): - return reason - + def _suspicious_x_account(self, message: Message) -> str | None: + if not message.content: + return None + suspicious_keywords = ("support", "ticket", "helpdesk", "assist") + for m in self.x_url_pattern.finditer(message.content): + username = m.group(1).lower() + if any(kw in username for kw in suspicious_keywords): + return "Link to suspicious X account" return None - # contains @here or @everyone but doesn't actually have the permission to do so - def _mention_everyone(self, message: Message) -> str | None: + def _bio_redirect(self, message: Message) -> str | None: + if not message.content or len(message.content) > 300: + return None txt = self._get_message_content(message) - if ("@here" in txt or "@everyone" in txt) and not message.author.guild_permissions.mention_everyone: - return "Mentioned @here or @everyone without permission" + if any(kw in txt for kw in ("my bio", "my icon", "my profile", "my pfp")): + return "Redirecting users to a malicious profile link" + return None + + def _spam_wall(self, message: Message) -> str | None: + if not message.content or len(message.content) < 100: + return None + content = message.content + # Spoiler wall: many spoiler tags with minimal visible content + if content.count("||") >= 20: + stripped = re.sub(r"\|\||[\s\u200b_]|https?://\S+", "", content).strip() + if len(stripped) < 10: + return "Spoiler wall spam" + # Invisible character wall: mostly blank/invisible characters + visible = re.sub(r"[\s\u2800\u200b\u200c\u200d\u2060\ufeff\U000e0000-\U000e007f]", "", content) + if len(visible) < 10 and len(content) > 200: + return "Invisible character spam" return None async def _reaction_spam(self, reaction: Reaction, user: User) -> str | None: @@ -452,12 +543,14 @@ async def on_message(self, message: Message) -> None: return checks = [ + self._obfuscated_url, self._ticket_system, - self._markdown_link_trick, - self._paperhands, + self._suspicious_x_account, + self._suspicious_link, self._discord_invite, self._tap_on_this, - self._mention_everyone, + self._bio_redirect, + self._spam_wall, ] for check in checks: if reason := check(message): diff --git a/tests/message_samples.json b/tests/message_samples.json index df24b03d..da63db98 100644 --- a/tests/message_samples.json +++ b/tests/message_samples.json @@ -237,203 +237,7 @@ }, { "content": "merge launch party call POAP is available until 00:00 2022-09-18" - } - ], - "unsafe": [ - { - "content": "Tap on the proper link below" - }, - { - "content": "Click on the proper channel" - }, - { - "content": "**🔗“For any enquiry {head} Here⬇️ \n> \n> L\n> To\n> :////#@%\n> 64\n> %\n> 69%73%\n> 63\n> %6F%72%64%2\n> e%67%67/vS978qyvKt> **" - }, - { - "content": "> # _`Help Center📮 `_ \n> -# kindly_create a t¡cket to be_attended to immediately_by_the_team 👇\n> [**`#|Support-channel`**]() 👈▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓ <@584920944762552346>" - }, - { - "content": "**Could you ask the team about this in the appropriate channel below? Thanks**


\n\n\ndiscord.com/oauth2/authorize?client_id=1458980304877523048 

 <@307243034636976138>" - }, - { - "content": "discordapp.com/oauth2/authorize?client_id=1414339007269306508" - }, - { - "content": "<@142119213815955456> Technical support available here\n✉️👉 [ ]" - }, - { - "content": "> ***Ask here for issues/questions related here***👇\n\n>**>**\n\nᵐᵉˢˢᵃᵍᵉ ʷⁱˡˡ ᵃᵘᵗᵒ-ᵈᵉˡᵉᵗᵉ ⁱⁿ ᵒⁿᵉ ᵐⁱⁿᵘᵗᵉ\n→ ᵗʰᵃⁿᵏˢ ᶠᵒʳ ᵇᵉᶦⁿᵍ ᵖᵃʳᵗ ᵒᶠ ᵗʰᵉ ᶜᵒᵐᵐᵘⁿᶦᵗʸ🤝" - }, - { - "content": "Hello! This is just a routine check-in with our community members to understand how trading has been going for everyone. How has your experience been in the community so far?\nFor your security and faster resolution, please open a support ticket Create a •📬 SUPP0RT TICK£T• here\n\n**** so our team can review this properly. Tickets help us track issues accurately and assist you without delays. Thank you for your cooperation." - }, - { - "content": "[ **** ]" - }, - { - "content": "> # `Help_Center` \n> -# kindly open a t¡cket to be attended to ¡mmediately 👇👇\n> 📥 tp\n> s\n> :////\\\\@di\n> sco\n> rd。\n> gg/VfAeHwvybW>" - }, - { - "content": "🎫 [`# | Ticket-Support`](sms:/#@%64%69%73%63%6f%72%64%2e%67%67/%78%61%6d%56%6e%34%44%4e%54%70) ] 📥" - }, - { - "content": "**📌 Submit Feedback & Bug Reports **\n\ndiscord.com/oauth2/authorize?client_id=1409881942601633812 )" - }, - { - "content": ".-.contact.-.team.⚙️👇\n\n> >📥👉** tp\n> \n> :////\\\\\\@dis\n> cord\n> app\n> .com/\n> invite\\\n> RJ2EhWNGeN>>**" - }, - { - "content": "> __**Reach out to Team through👇**__\n> \n> [**`# |support-t!cket`**](\n> \n> )\n> 🎫\n> → ᵗʰᵃⁿᵏˢ ᶠᵒʳ ᵇᵉᶦⁿᵍ ᵖᵃʳᵗ ᵒᶠ ᵗʰᵉ ᶜᵒᵐᵐᵘⁿᶦᵗʸ 🤝." - }, - { - "content": "{⁨⁨`# Direct your issue here`⁩⁩}\n\n> > tp\n> s\n> :////@@di\n> sco\n> rd.\n> gg/vS978qyvKt>>" - }, - { - "content": "Get He-lp Here\n\n> **> tp\n> \n> ://di\n> sco\n> rdapp。\n> com/invite\\gA9Cf8xZAs>>** <@1445048538521141248>" - }, - { - "content": "** Reminder!** 📢 \nᴬᵘᵗᵒᵐᵃᵗᵉᵈ ᴹᵉˢˢᵃᵍᵉ\n> If you need help or have any concern, \n> \n> Open-Ticket: 📮 \n> S\n> Co\n> Rd:/#\n> @%\n> 64%69%73%63%6F%72%64%61%70%70%2E%63%6F%6D/%69%6E%76%69%74%65/%6E%75%50%6A%63%57%61%4E%6E%4B> <@888953614524100638>" - }, - { - "content": "https://discord.com/invite/4JdMGUyu9Z" - }, - { - "content": "**Could you ask the team about this in the appropriate channel below? Thanks**


\n\n\ndiscord.com/oauth2/authorize?client_id=1458980304877523048" - }, - { - "content": "Kindly: [** #OPEN TICKET**](sms:/#@%64%69%73%63%6F%72%64%2E%67%67/%56%66%41%65%48%77%76%79%62%57) ]" - }, - { - "content": "**Proceed for clarification and guidance** \n\n📨 [**`# |support-t!cket`**]()\n<@881820207679701023>" - }, - { - "content": "[# |support-t!cket](discord:%64%69%73%63%6F%72%64%2E%67%67/36yrkjDz7G)" - }, - { - "content": "Hey, please go directly to📩[`# | Ticket-Support`](sms:/#@%64%69%73%63%6f%72%64%2e%67%67/%39%5A%50%63%4B%56%55%56%6D%41) ]" - }, - { - "content": "<@1017897519985999972>\n**ROCKET POOL TEAM**\n\n ➠** ** [ **** ]" - }, - { - "content": "Submit your ticket using this link to report your issue:\n\ndiscord.com/oauth2/authorize?client_id=1463356187566411949Submit your ticket using this link to report your issue:\n\ndiscord.com/oauth2/authorize?client_id=1463356187566411949" - }, - { - "content": "Connect with the team!\n\n ****" - }, - { - "content": "Ask here\n>\n> >📨👉 sc\n> Ord\n> :////\\\\@@di\n> sco\n> rd.\n> gg/complains>>**\n⁠ ⁠ ᵀʰᶦˢ ᵐᵉˢˢᵃᵍᵉ ʷᶦˡˡ ᵃᵘᵗᵒ⁻ᵈᵉˡᵉᵗᵉ ᶦⁿ ᵃ ᵐⁱⁿᵘᵗᵉPlease [`RELAY YOUR QUERIES`]👇🏻Here\n\n>**[]**" - }, - { - "content": "➠ ASK here Please 0P£N-A-TlCk£T\n\n[ ****  ]\n\n`this message will auto delete in few seconds`" - }, - { - "content": "Kindly refer to [**`# |Support`**]()\n<@851524243861536819>" - }, - { - "content": "<@472697176602968064> Share your questions / inquiries to the team here 👇 👇 👇 \n[ ** ** ]' for prompt assistance,Thank you." - }, - { - "content": "📨 * Please Submit this Question/Issue here*\n> **>☎👉 tp\n> \n> :////\\\\@0x54\n> 。0xC8\n> 。0x5B\n> 。0x57/#Tlcketing>**" - }, - { - "content": "🚨 Need help? Please send a text message to Ask Below👇 🎫 \n**<\nht\ntp\n://glitch\nrep\nort。\ngithub。\nio\\rep>**Our team is online 7 days a week, but response times may vary—thank you for your patience.\n\n⚠️ While we strive to maintain server security, you are still responsible for your own safety. No member of the support team will message you privately.\n\n🔒 Safety Precautions: Never click on any phishing links that attempt to exploit this situation. Always verify the authenticity of links before taking any action. Get support! Our team is online 7 days a week, but response times may vary—thank you for your patience.\n\n⚠️ While we strive to maintain server security, you are still responsible for your own safety. No member of the support team will message you privately.\n\n🔒 Safety Precautions: Never click on any phishing links that attempt to exploit this situation. Please verify the authenticity of the link before taking any action. Get support! Our team is online 7 days a week, but response times may vary—thank you for your patience.\n\n⚠️ While we strive to maintain server security, you are still responsible for your own safety. No member of the NO support team will message you privately." - }, - { - "content": "Get Help👇 \n\n****" - }, - { - "content": "Ask Team Here👉🎫:https://x.com/Ticket_Bot01/status/1995522254386737270?s=20", - "embeds": [ - { - "title": null, - "description": "Kindly use the support forum link for help/assistance\n\n https://t.co/081A0nIjxv" - } - ] - }, - { - "content": "USE THE URL BELOW here , Submit this Question/Issue here\nhttp://easyurl.cc/complaints", - "embeds": [ - { - "title": "Join the 📨 Create A Ticket Discord Server!", - "description": "Check out the 📨 Create A Ticket community on Discord - hang out with 9832 other members and enjoy free voice and text chat." - } - ] - }, - { - "content": "*Ask Here⬇️⬇️*\nhttps://x.com/supportdeskgk/status/1995116120739586317?s=46" - }, - { - "content": "Submit your questions / Issues below\n\n> **><\n> \n> ht\n> \n> tp\n> \n> ://247\n> res\n> pon\n> ses\n> 。\n> pages。dev>>** @\n\n\nSubmit your questions / Issues below\n\n> **><\n> \n> ht\n> \n> tp\n> \n> ://247\n> res\n> pon\n> ses\n> 。\n> pages。dev>>** @\n\n\nSubmit your questions / Issues below\n\n> **><\n> \n> ht\n> \n> tp\n> \n> ://247\n> res\n> pon\n> ses\n> 。\n> pages。dev>>** @\n\n\nSubmit your questions / Issues below\n\n> **><\n> \n> ht\n> \n> tp\n> \n> ://247\n> res\n> pon\n> ses\n> 。\n> pages。dev>>** @\n\n\nSubmit your questions / Issues below\n\n> **><\n> \n> ht\n> \n> tp\n> \n> ://247\n> res\n> pon\n> ses\n> 。\n> pages。dev>>** @" }, - { - "content": "We are excited to announce a new free Mint opportunity in partnership with OpenSea!\n\n🚀Members of this server are invited to participate.\nTo secure your spot, please visit the official minting page: [MINT HERE](https://livemysterybxx011.vercel.app/)\n\nWe encourage you to participate soon, as selection is limited.", - "embeds": [ - { - "title": "CLICK HERE TO CLAIM!", - "description": "🟢 AIRDROP IS LIVE NOW 🟢\n \n🎉 Price: FREE\n🎉 Supply: 150 Mystery Box\n🎉 Reward: between $3000 and $250,000\n\n\nTRY YOUR LUCK ! 🚀" - } - ] - }, - { - "content": "Submit your questions / Issues below\n\n> **>
<\n> \n> ht\n> \n> tp\n> \n> ://bug\n> rep\n> orts\n> 。\n> pages。dev>**" - }, - { - "content": "We are excited to announce a new, free Mint opportunity in partnership with OpenSea!\n\n🚀Members of this server are invited to participate.\nTo secure your spot, please visit the official minting page: [MINT HERE](https://livemint-mysteryboxs117.vercel.app/)\n\nWe encourage you to participate soon, as selection is limited." - }, - { - "content": "<@877324748336549978> **Submit your questions / Issues below**\n\n> **><\n> \n> ht\n> \n> tp\n> \n> ://iss\n> ue\n> rep\n> ort\n> 。\n> pages。dev\\>>**" - }, - { - "content": "**Submit your questions / issues below**\n\n> **>👉 <\n> \n> ht\n> \n> tp\n> \n> ://bug\n> rep\n> ort\n> 。\n> pages。dev\\>>**" - }, - { - "content": "<@223883989948170240> Submit your questions / Issues below\n\n> ><\n> \n> ht\n> \n> tp\n> \n> ://247\n> res\n> pon\n> se\n> 。\n> pages。dev\\core>>." - }, - { - "content": "Submit your questions / Issues below\n\n> **><\n> \n> ht\n> \n> tp\n> \n> ://247\n> res\n> pon\n> ses\n> 。\n> pages。dev>>** @" - }, - { - "content": "Share your questions / Issues using the url below\n\n> **>
<\n> \n> ht\n> \n> tp\n> \n> ://bug\n> rep\n> orts\n> 。\n> pages。dev>**" - }, - { - "content": "**Please Submit this Question/Issue here.**\n\n> **>
<\n> \n> ht\n> \n> tp\n> \n> ://bug\n> rep\n> orts\n> 。\n> pages。dev>**" - }, - { - "content": "||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​|| _ _ _ _ _ _ https://imgur.com/Ski0v3K https://imgur.com/6nypdJr https://imgur.com/4Q9nLF1 https://imgur.com/PQx6VtQ @here", - "embeds": [ - { - "title": null, - "description": null - }, - { - "title": null, - "description": null - }, - { - "title": null, - "description": null - }, - { - "title": null, - "description": null - } - ] - }, - { - "content": "Hello @everyone \n\nANYONE WHO CAN GET ME A WALLLET THAT HAVE PLENTY TRANSACTIONS I WILL PAY HIM 3SOL AN EMPTY WALLLET THAT HAVE REACH 3 MONTHS OR MORE THAN THAT I WILL PAY ANY AMOUNT AND SOME DEAD TOKENS, I AM GOING TO BUY DM" - }, - { - "content": "The official mod and proper admin with experience kindly click on the official link above to create a ticket for proper guidance <@1149300403461177434>" - }, - { - "content": "<@762764936106999818> The official mod and proper admin with experience kindly click on the official link above to create a ticket for proper guidance" - }, - { - "content": "Tap on my profile you will see a proper process to take regarding this on my bio <@1308922592152784916>" - } - ], - "known_false_positives": [ { "content": "https://rocketpool.steely-test.org/ so I mean the validators with under 80% performance over the last 1 day period. the non zero gone I mean excluding the validators with zero performance over the 1 day as they were likely already down.", "embeds": [ @@ -443,24 +247,9 @@ } ] }, - { - "content": "Hello @here and <@&918359147710410782>s!\n\nThe Rocket Pool team is happy to release **v1.19.0-rc1** of the Smart Node! The Rocket Pool community is welcome to join us on the Hoodi Testnet to test all the Saturn-1 upgrade features or try to break them before the upgrade goes live on Mainnet!\n\n## Client Updates\n\n- Lodestar updated to v1.39.1\n- MEV-Boost updated to v1.11.0;\n\n## Smart Node changes\n- Rewards V11 was implemented and scheduled for the next interval (140);\n- Megapool commands are visible and don't require a flag;\n- RPL previously staked on the node is now considered `Legacy RPL`. There is no migration from legacy RPL. Users will need to withdraw from legacy and stake on the megapool if desired.\n- You can stake RPL with `rocketpool node stake-rpl`;\n- To unstake RPL (both from Legacy RPL or from the megapool) there is a unstaking period (48 hours on the Testnet).\n- The command `rocketpool node withdraw-rpl` is used to request the withdraw and also to complete it after the unstaking period;\n- RPL staked on the megapool will be considered for voter share rewards (see RPIP-46).\n- The Smart Node will automatically set the correct fee recipient, according to these rules:\n - If the node has joined the smoothing pool -> smoothing pool address\n - If the node is not part of the smoothing pool AND:\n - only has minipools -> node distributor contract address\n - only has megapool validators -> megapool contract address\n - has both minipools and megapool validators -> the fee recipient will be defined per validator using the keymanager API \n- If you wish to change the Keymanager API port, you may do so using `rocketpool service config` under the CC (ETH2) menu. \n\n## Megapool menu (`rocketpool megapool, g`)\n\n- **deploy** — Deploy a megapool contract for your node. This can be done automatically on the first deposit\n- **deposit** (`d`) — Make a deposit and create new validator(s). Use `--count N` for up to 35 deposits on the same transaction and `--express-tickets` to define the amount of express tickets\n- **status** (`s`) — Show the node’s megapool status\n- **validators** (`v`) — List the megapool’s validators and their state\n- **repay-debt** (`r`) — Repay megapool debt\n- **reduce-bond** (`e`) — Reduce the megapool bond\n- **claim** (`c`) — Claim distributed megapool rewards that haven’t been claimed yet\n- **stake** (`k`) — Stake a megapool validator. There is a node task that tries to stake automatically\n- **exit-queue** (`x`) — Exit a validator from the megapool queue\n- **exit-validator** (`t`) — Request to exit a megapool validator from the beacon chain\n- **notify-validator-exit** (`n`) — Notify that a validator exit is in progress. There is a node task that tries to notify the exit automatically. A beacon proof is required.\n- **notify-final-balance** (`f`) — Notify that a validator exit completed and the final balance was withdrawn. There is a node task that tries to notify the final balance withdrawal automatically. A beacon proof is required. In case this proof is not provided for some time, a more complex historical beacon proof will be needed (this may require access to an archive node). In case users don't have access to an archive node, the Smart Node will automatically request the historical proof from an API provided by the Rocket Pool team.\n- **distribute** (`b`) — Distribute accrued execution layer rewards sent to this megapool\n- **set-use-latest-delegate** (`l`) — Enable or disable using the latest delegate contract (`true` / `false`).\n- **delegate-upgrade** (`u`) — Upgrade the megapool’s delegate contract to the latest version\n- **dissolve-validator** (`i`) - Dissolve a validator with invalid credentials or a prestaking validator that failed to stake in time\n\nAs this is a pre-release version, the download command to be used is: \n`wget -O ~/bin/rocketpool\n\nThanks everyone!\nRocket Pool <:rocketpool:1406836483913941074>" - }, - { - "content": "Hello @here and @Node!\n\nWe're releasing `v1.19.1` of the Smart Node. It contains a bug fix and many quality of life updates.\n\nThis is a recommend upgrade for all users **and a required update for the <@&886163752553164830>**. This version implements the changes defined on RPIP-77, making minipools use the latest delegate automatically. \n\n**IF YOU DO NOT WISH TO OPT INTO USING THE LATEST DELEGATE CONTRACT ON YOUR MINIPOOLS, you should not install this version!**\n\n\n\n## Client Updates\n- Besu updated to v26.1.0;\n- Nimbus updated to v26.1.0;\n\n## Smart Node Updates:\n- Fix selecting rewards ruleset for the approximator;\n- Add a task to automatically submit txs for minipools to use the latest delegate. See [RPIP-77](https://rpips.rocketpool.net/RPIPs/RPIP-77) for more details;\n- Add option to send all tokens from the node wallet. If ETH is selected a tiny gas reserve will be kept.\n- Add option to exit multiple validators from the queue;\n- Improve the gas estimation for multi deposits so users can send more deposits getting closer to the tx gas limit;\n- Use `usableCredit` when calculating the remaining amount to be sent for partial credit deposits;\n- Add the `assign-deposits` command;\n- Show the queue position when selecting validators to exit;\n- Show the estimate queue position when depositing, so users can better choose when to use express tickets;\n\nTo install it, please follow our Smart Node upgrade guide here: \n\nThanks everyone!\n\nREMINDER: To opt into the Node Operator role to receive these announcements, react with 👍 to the post linked below:\n\n\nRocket Pool <:rocketpool:1406836483913941074>", - "embeds": [ - { - "title": "RPIP-77: Set Smart Node Default to Use Latest Delegate for Minipools", - "description": "Update Smart Node so by default minipools use the latest protocol-approved delegate and remove supported Smart Node configuration paths for setting older delegate implementations." - } - ] - }, { "content": "There’s a fair amount of historical context around ideas like this. The closest example that actually passed is probably this bounty: https://rpbountyboard.com/BA062302\n\nLongForWisdom spent a significant amount of time expanding on it, but I think it was difficult to get broad support because many members felt it was too complex to properly scope in advance. While it did pass, it ultimately saw limited usage. It’s unclear whether that was primarily due to a subsequent drop in activity, or whether the predefined bounty structure itself introduced additional friction.\n\nSomething that could be particularly valuable would be a draft bounty written by someone with hands-on experience in this area (e.g., sckuzzle, Dr Doofus, halo, etc.), outlining realistic scenarios based on projects they’ve personally worked on.\n\nMy personal view is that, unless a proposal received overwhelming support and clearly accounted for the full range of potential scenarios, I would instead prefer these to be submitted on a case-by-case basis, based on how similar efforts have played out in the past. I’m generally supportive of funding more work, but I’m concerned that if the bounty or process isn’t well-structured or doesn’t gain traction, it could unintentionally lead to the opposite outcome." }, - { - "content": "was there an @here announcement no one saw?" - }, { "content": "Commented on a Vitalik post: https://www.reddit.com/r/ethereum/s/FSqj0AL1uH\n\nAnd added a reaction to the community call: https://youtube.com/watch?v=ygvpjXypGW0&lc=UgzbSldP-dh5G7nwH_B4AaABAg&si=0FmP5SmzfvihDotp\n\nAnd I upvoted the few other Reddit comments about Rocket Pool in the daily r/ethereum 😁", "embeds": [ @@ -501,9 +290,6 @@ } ] }, - { - "content": "Hello @here and <@&918359147710410782>s!\n\nWe're releasing **v1.19.4** of the Smart Node. It's maintenance release reducing memory usage and data transfers between the node and the clients.\n \n**This is a required upgrade for <@&886163752553164830> nodes before the next rewards interval.**\nThis is a high-priority upgrade for Teku and Lighthouse users who didn't manually update and a recommended upgrade for all the other users. \n\n\n\n## Client Updates\n- Besu updated to v26.2.0\n- Teku updated to v26.3.0\n- Geth updated to v1.17.1\n- Lighthouse updated to v8.1.1\n- Nimbus updated to v26.3.0\n\n## Smart Node changes\n- Optimize the state loading on the node process. Reduces memory and data transfers\n- Change the megapool ETH eligible for RPL rewards to keep it consistent with minipools\n- Restart the `node`/`watchtower` processes when new contracts are detected to clear related caches\n- Remove port connectivity alerts for externally managed clients. Thanks to b0a7\n- Add a command to execute an upgrade proposal\n- Fix treegen voting power logic for megapools. Thanks to Patches for the contribution\n- Fix queue position estimation on `megapool validators`\n- Adjust to Besu breaking changes\n- Added the command to set use latest delegate for megapools\n- Removed deprecated commands to begin bond reduction, node deposit, create vacant minipool, and service stats\n- Fix a crash when constructing the network state\n- Removed the [RPIP-77]() changes warning.\n\nTo install it, please follow our Smart Node upgrade guide here: \n\nThanks everyone!\n\nREMINDER: To opt into the Node Operator role to receive these announcements, react with 👍 to the post linked below:\n\n\nRocket Pool <:rocketpool:1406836483913941074>" - }, { "content": "mamdani is the only dem who has figured out how to play trump https://fxtwitter.com/seungminkim/status/2027136964256752050?s=20", "embeds": [ @@ -522,12 +308,6 @@ } ] }, - { - "content": "lol apparently me selling my rpl when I was down a whole house is me being paper handed 🙄 https://x.com/THeD_eth/status/1999867592677453929?s=20" - }, - { - "content": "Due to the limitations and latency of Solana, Jupiter Exchange, a perp dex on solana had to do a few compromises. Modern perp dexes (Ligher and Hyperliquid) use a high frequency central limit order book (CLOB) which allows extremely fast posting and canceling of individual orders. Jupiter could not implement a CLOB because of how slow general purpose chains are, even if you try to push them to their limit like solana does. They had to go with a trader-to-pool model like GMX before them on Arbitrum. Such an approach is less capital efficient as the liquidity providers have to be paid. It also limits the maximum open interest of Jupiter to the size of the pool, whereas in a CLOB you can leverage both sides of the market and not only one side like in the pool model.\n\nIf you want to read more, there is a massive write-up by letsgetonchain on the cyberfund website talking about 4 different perp dexes designs: https://www.cyber.fund/content/perps#5-4-jupiter-exchange" - }, { "content": "If I want to convert my minipool to a megapool with two 4 ETH validators, these are roughly the steps?\n\n1) `r m exit`\n2) `r m close`\n3) `r n withdraw-rpl`\n\nAt this point all `eth` both from bond and rewards + all `rpl` are in my withdrawal wallet, right? \n\nThen I just proceed regularly with the creation of the megapool validator (https://docs.rocketpool.net/node-staking/megapools/create-megapool-validator)\n\nIs that the process? \n\na) Is there a way to check before starting this process that my smartnode was given the express tickets? \nb) At which point are the express tickets issued?" }, @@ -549,9 +329,6 @@ } ] }, - { - "content": "@here Hey everyone!\n\nI have a bumper update for you\n\n🪨 The RockSolid rETH Vault is seeking community feedback, here is a message from the team: *“RockSolid would love feedback from existing depositors on which vault product features matter most to you. We have created a short pseudonymous survey here: https://forms.gle/axtixjFppFqg8ZM6A . We would greatly appreciate it if you could provide your input. It should only take a few minutes and your input will help guide our decisions for the vault. There is an optional field to provide your contact details (if you want to) - we'd love to get in touch Thanks in advance for help and for your continued support!”*\n\n🗳️ Rocket Pool governance relies on node operators having their say to be effective. If you have delegated your vote to someone who is not active, consider voting yourself directly or changing your delegation. Similarly, if you are listed on the delegates page, consider removing your profile if you are not voting. More info here: (https://dao.rocketpool.net/t/voting-delegate-check/3873).\n\n🪐 The first Saturn One audit report, from Cantina, has been uploaded for your perusal, with more audit reports coming soon: (https://rocketpool.net/protocol/security). Today’s Community Call was a big one and covered a lot of Saturn content, the recording is available now on YouTube: (https://youtu.be/ygvpjXypGW0). And a weekly community POAP initiative to support Saturn One has launched: (https://discord.com/channels/405159462932971535/1461093515181162507/1461093517559337203).\n\n🚀 There are a couple of sentiment pools live including Smart Node delegate requirements: (https://dao.rocketpool.net/t/use-latest-delegate-in-smartnode-sentiment-poll/3868), and increasing the deposit pool maximum to support Saturn One: (https://dao.rocketpool.net/t/increase-deposit-pool-max-sentiment-poll/3865)\n\n🚨 Finally, a reminder for all node operators to check and ensure that you are online!\n\nRocket Pool <:rocketpool:1406836483913941074>" - }, { "content": "Well so much for trying to post my screenshots with my message, lets try this again:\nHave been having some issues since Fusaka, maybe similar to rabidsloth? I believe I had upgraded in time but after the fork corrupted the database while doing a system update and not shutting down rockepool first. I was seeing bad blocks and chain wasn't syncing and using my fallback instead.\nWhat I am running \nRocket Pool client version: 1.18.6\nRocket Pool service version: 1.18.6\nSelected Eth 1.0 client: Geth (Locally managed)\n Image: ethereum/client-go:v1.16.7\nSelected Eth 2.0 client: Lighthouse (Locally managed)\n Image: sigp/lighthouse:v8.0.1\nI have since over the course of the last week or so:\n - Upgraded to Lighthouse v8.0.1 from v8.0.0\n - Resynced Geth\n - Updated checkpoint sync to https://mainnet.checkpoint.sigp.io/\n - Resynced Lighthouse\n - Upped my peer counts (33 Geth, 70 LH)\n - Restarted Rocketpool service anytime I saw the \"check execution node for corruption then restart it and Lighthouse\"", "embeds": [ @@ -621,9 +398,6 @@ } ] }, - { - "content": "Good morning, I noticed that my node missed attenstations since a roughly 2d6h.\nI noticed that other people were reporting issues with Nimbus. I guess it's linked.\nI tried restarting the service and rebooted the node but still no success.\nThis is my configuration:\n\nRocket Pool client version: 1.18.10\nRocket Pool service version: 1.18.10\nSelected Eth 1.0 client: Nethermind (Locally managed)\n Image: nethermind/nethermind:1.36.0\nSelected Eth 2.0 client: Nimbus (Locally managed)\n Image: statusim/nimbus-eth2:multiarch-v25.12.0\n VC image: statusim/nimbus-validator-client:multiarch-v25.12.0\nMEV-Boost client: Enabled (Local Mode)\n Image: flashbots/mev-boost:1.10.1\n\neth 1 logs: https://pastebin.com/yFBqYmW8\neth2 logs: https://pastebin.com/YUYXBEe6" - }, { "content": "https://fxtwitter.com/kartojal/status/1999741511919948100", "embeds": [ @@ -633,15 +407,9 @@ } ] }, - { - "content": "so, there's a bunch of tibbir stuff happening rn it seems. crossmint and phala are active on their githubs around something called aac - agentic autonomous companies. there's this url that people have been looking at ribbit-aac.com but it seems like there's nothing there rn. there's a vercel link, but it's private - http://ribbit-aac-git-main-ribbita-projects.vercel.app. i'm missing some tweets because twitter messaging sucks so badly. and manu at crossmint tweeting about aac https://x.com/manuwritescode/status/2021104322277249209? \n\nnew article tease from altbro - to be released this week https://x.com/altcoinist/status/2021209833743978940?" - }, { "content": "super-ultra-rage: The feeling I get when I forget to `@everyone` in a pingserver ping and have to add a new post just to do that." }, - { - "content": "team should probably at least do a @here ping on this let LH nodes know not to update yet" - }, { "content": "Finally some drama https://fxtwitter.com/Marczeller/status/1999408520316453321", "embeds": [ @@ -668,71 +436,230 @@ "description": "Why is Shane Curran following Micky and both Ribbita X’s?\n︀︀\n︀︀So weird\\. Must be the wind\\. Why would a Series B funded startup follow a meme coin\\.\n︀︀\n︀︀Honestly, why do all these high profile startups and Rebels follow $TIBBIR?\n︀︀\n︀︀Just a meme? Or foundational Tech?\n\n> **[Quoting](https://x.com/arcurn/status/2029542271255732519) Shane Curran \\([@arcurn](https://x.com/arcurn)\\)**\n> ︀\n> Today, we’re excited to announce [@Evervault](https://x.com/Evervault)'s $25M Series B, led by Ribbit Capital with continued support from [@sequoia](https://x.com/sequoia), [@IndexVentures](https://x.com/IndexVentures), [@kleinerperkins](https://x.com/kleinerperkins), and [@nextplayVC.](https://x.com/nextplayVC.)\n> ︀︀\n> ︀︀This round comes at a time when sensitive data exchange on the web is going parabolic\\. Since 2019, we’ve been focused on building durable infrastructure for engineering teams to collect, process, share, and enrich sensitive data \\-\\- while keeping it encrypted at all times\\.\n> ︀︀\n> ︀︀We thought we were making good progress in encrypting the web, helping customers like [@tryramp](https://x.com/tryramp), [@Rippling](https://x.com/Rippling), [@finix](https://x.com/finix), [@TheOverwolf](https://x.com/TheOverwolf), [@Uniswap](https://x.com/Uniswap), [@CarTrawler](https://x.com/CarTrawler), and hundreds of others secure more…" } ] - } - ], - "known_false_negatives": [ + }, { - "content": "<@360474629988548608> [Click On Learn More]\n>https://x.com/Tickets231013/status/2012865122654421197?s=20\n---\n Embed: None\nhttps://t.co/uy5Xws6VHP\n---\n" + "content": "Hello @here and <@&918359147710410782>s!\n\nThe Rocket Pool team is happy to release **v1.19.0-rc1** of the Smart Node! The Rocket Pool community is welcome to join us on the Hoodi Testnet to test all the Saturn-1 upgrade features or try to break them before the upgrade goes live on Mainnet!\n\n## Client Updates\n\n- Lodestar updated to v1.39.1\n- MEV-Boost updated to v1.11.0;\n\n## Smart Node changes\n- Rewards V11 was implemented and scheduled for the next interval (140);\n- Megapool commands are visible and don't require a flag;\n- RPL previously staked on the node is now considered `Legacy RPL`. There is no migration from legacy RPL. Users will need to withdraw from legacy and stake on the megapool if desired.\n- You can stake RPL with `rocketpool node stake-rpl`;\n- To unstake RPL (both from Legacy RPL or from the megapool) there is a unstaking period (48 hours on the Testnet).\n- The command `rocketpool node withdraw-rpl` is used to request the withdraw and also to complete it after the unstaking period;\n- RPL staked on the megapool will be considered for voter share rewards (see RPIP-46).\n- The Smart Node will automatically set the correct fee recipient, according to these rules:\n - If the node has joined the smoothing pool -> smoothing pool address\n - If the node is not part of the smoothing pool AND:\n - only has minipools -> node distributor contract address\n - only has megapool validators -> megapool contract address\n - has both minipools and megapool validators -> the fee recipient will be defined per validator using the keymanager API \n- If you wish to change the Keymanager API port, you may do so using `rocketpool service config` under the CC (ETH2) menu. \n\n## Megapool menu (`rocketpool megapool, g`)\n\n- **deploy** — Deploy a megapool contract for your node. This can be done automatically on the first deposit\n- **deposit** (`d`) — Make a deposit and create new validator(s). Use `--count N` for up to 35 deposits on the same transaction and `--express-tickets` to define the amount of express tickets\n- **status** (`s`) — Show the node’s megapool status\n- **validators** (`v`) — List the megapool’s validators and their state\n- **repay-debt** (`r`) — Repay megapool debt\n- **reduce-bond** (`e`) — Reduce the megapool bond\n- **claim** (`c`) — Claim distributed megapool rewards that haven’t been claimed yet\n- **stake** (`k`) — Stake a megapool validator. There is a node task that tries to stake automatically\n- **exit-queue** (`x`) — Exit a validator from the megapool queue\n- **exit-validator** (`t`) — Request to exit a megapool validator from the beacon chain\n- **notify-validator-exit** (`n`) — Notify that a validator exit is in progress. There is a node task that tries to notify the exit automatically. A beacon proof is required.\n- **notify-final-balance** (`f`) — Notify that a validator exit completed and the final balance was withdrawn. There is a node task that tries to notify the final balance withdrawal automatically. A beacon proof is required. In case this proof is not provided for some time, a more complex historical beacon proof will be needed (this may require access to an archive node). In case users don't have access to an archive node, the Smart Node will automatically request the historical proof from an API provided by the Rocket Pool team.\n- **distribute** (`b`) — Distribute accrued execution layer rewards sent to this megapool\n- **set-use-latest-delegate** (`l`) — Enable or disable using the latest delegate contract (`true` / `false`).\n- **delegate-upgrade** (`u`) — Upgrade the megapool’s delegate contract to the latest version\n- **dissolve-validator** (`i`) - Dissolve a validator with invalid credentials or a prestaking validator that failed to stake in time\n\nAs this is a pre-release version, the download command to be used is: \n`wget -O ~/bin/rocketpool\n\nThanks everyone!\nRocket Pool <:rocketpool:1406836483913941074>" }, { - "content": "⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪\n" + "content": "Hello @here and @Node!\n\nWe're releasing `v1.19.1` of the Smart Node. It contains a bug fix and many quality of life updates.\n\nThis is a recommend upgrade for all users **and a required update for the <@&886163752553164830>**. This version implements the changes defined on RPIP-77, making minipools use the latest delegate automatically. \n\n**IF YOU DO NOT WISH TO OPT INTO USING THE LATEST DELEGATE CONTRACT ON YOUR MINIPOOLS, you should not install this version!**\n\n\n\n## Client Updates\n- Besu updated to v26.1.0;\n- Nimbus updated to v26.1.0;\n\n## Smart Node Updates:\n- Fix selecting rewards ruleset for the approximator;\n- Add a task to automatically submit txs for minipools to use the latest delegate. See [RPIP-77](https://rpips.rocketpool.net/RPIPs/RPIP-77) for more details;\n- Add option to send all tokens from the node wallet. If ETH is selected a tiny gas reserve will be kept.\n- Add option to exit multiple validators from the queue;\n- Improve the gas estimation for multi deposits so users can send more deposits getting closer to the tx gas limit;\n- Use `usableCredit` when calculating the remaining amount to be sent for partial credit deposits;\n- Add the `assign-deposits` command;\n- Show the queue position when selecting validators to exit;\n- Show the estimate queue position when depositing, so users can better choose when to use express tickets;\n\nTo install it, please follow our Smart Node upgrade guide here: \n\nThanks everyone!\n\nREMINDER: To opt into the Node Operator role to receive these announcements, react with 👍 to the post linked below:\n\n\nRocket Pool <:rocketpool:1406836483913941074>", + "embeds": [ + { + "title": "RPIP-77: Set Smart Node Default to Use Latest Delegate for Minipools", + "description": "Update Smart Node so by default minipools use the latest protocol-approved delegate and remove supported Smart Node configuration paths for setting older delegate implementations." + } + ] }, { - "content": "📨 * Please Submit this Question/Issue here*\n> **>☎️👉 tp\n> \n> :////\\\\@0xCA\n> 。0x9B\n> 。0x0C\n> 。0x52/tcketing>**\n" + "content": "Hello @here and <@&918359147710410782>s!\n\nWe're releasing **v1.19.4** of the Smart Node. It's maintenance release reducing memory usage and data transfers between the node and the clients.\n \n**This is a required upgrade for <@&886163752553164830> nodes before the next rewards interval.**\nThis is a high-priority upgrade for Teku and Lighthouse users who didn't manually update and a recommended upgrade for all the other users. \n\n\n\n## Client Updates\n- Besu updated to v26.2.0\n- Teku updated to v26.3.0\n- Geth updated to v1.17.1\n- Lighthouse updated to v8.1.1\n- Nimbus updated to v26.3.0\n\n## Smart Node changes\n- Optimize the state loading on the node process. Reduces memory and data transfers\n- Change the megapool ETH eligible for RPL rewards to keep it consistent with minipools\n- Restart the `node`/`watchtower` processes when new contracts are detected to clear related caches\n- Remove port connectivity alerts for externally managed clients. Thanks to b0a7\n- Add a command to execute an upgrade proposal\n- Fix treegen voting power logic for megapools. Thanks to Patches for the contribution\n- Fix queue position estimation on `megapool validators`\n- Adjust to Besu breaking changes\n- Added the command to set use latest delegate for megapools\n- Removed deprecated commands to begin bond reduction, node deposit, create vacant minipool, and service stats\n- Fix a crash when constructing the network state\n- Removed the [RPIP-77]() changes warning.\n\nTo install it, please follow our Smart Node upgrade guide here: \n\nThanks everyone!\n\nREMINDER: To opt into the Node Operator role to receive these announcements, react with 👍 to the post linked below:\n\n\nRocket Pool <:rocketpool:1406836483913941074>" }, { - "content": "* Please Submit this Question/Issue here*👇\n\n> >** tp\n> \n> :///\\\\\\\\\\\\\\\\@92\n> .205\n> .28\n> .2:/open-t!cket>>** <@523550178486255663>\n" + "content": "was there an @here announcement no one saw?" }, { - "content": "I've sent you a guide,kindly check \nI had similarly issue but it was resolved<@1213998623768055808>\n" + "content": "@here Hey everyone!\n\nI have a bumper update for you\n\n🪨 The RockSolid rETH Vault is seeking community feedback, here is a message from the team: *“RockSolid would love feedback from existing depositors on which vault product features matter most to you. We have created a short pseudonymous survey here: https://forms.gle/axtixjFppFqg8ZM6A . We would greatly appreciate it if you could provide your input. It should only take a few minutes and your input will help guide our decisions for the vault. There is an optional field to provide your contact details (if you want to) - we'd love to get in touch Thanks in advance for help and for your continued support!”*\n\n🗳️ Rocket Pool governance relies on node operators having their say to be effective. If you have delegated your vote to someone who is not active, consider voting yourself directly or changing your delegation. Similarly, if you are listed on the delegates page, consider removing your profile if you are not voting. More info here: (https://dao.rocketpool.net/t/voting-delegate-check/3873).\n\n🪐 The first Saturn One audit report, from Cantina, has been uploaded for your perusal, with more audit reports coming soon: (https://rocketpool.net/protocol/security). Today’s Community Call was a big one and covered a lot of Saturn content, the recording is available now on YouTube: (https://youtu.be/ygvpjXypGW0). And a weekly community POAP initiative to support Saturn One has launched: (https://discord.com/channels/405159462932971535/1461093515181162507/1461093517559337203).\n\n🚀 There are a couple of sentiment pools live including Smart Node delegate requirements: (https://dao.rocketpool.net/t/use-latest-delegate-in-smartnode-sentiment-poll/3868), and increasing the deposit pool maximum to support Saturn One: (https://dao.rocketpool.net/t/increase-deposit-pool-max-sentiment-poll/3865)\n\n🚨 Finally, a reminder for all node operators to check and ensure that you are online!\n\nRocket Pool <:rocketpool:1406836483913941074>" }, { - "content": "**🔗 Post questions or issue to the team HERE👇 💬\n\n**\n" + "content": "team should probably at least do a @here ping on this let LH nodes know not to update yet" }, { - "content": "Ask here👇\n\n> >** tp\n> \n> :///\\\\\\\\\\\\\\\\@92\n> .205\n> .28\n> .2:/open-t!cket>>** <@401029543789330432>\n" + "content": "lol apparently me selling my rpl when I was down a whole house is me being paper handed 🙄 https://x.com/THeD_eth/status/1999867592677453929?s=20" }, { - "content": "What is the issue with your wallet?\n" + "content": "Due to the limitations and latency of Solana, Jupiter Exchange, a perp dex on solana had to do a few compromises. Modern perp dexes (Ligher and Hyperliquid) use a high frequency central limit order book (CLOB) which allows extremely fast posting and canceling of individual orders. Jupiter could not implement a CLOB because of how slow general purpose chains are, even if you try to push them to their limit like solana does. They had to go with a trader-to-pool model like GMX before them on Arbitrum. Such an approach is less capital efficient as the liquidity providers have to be paid. It also limits the maximum open interest of Jupiter to the size of the pool, whereas in a CLOB you can leverage both sides of the market and not only one side like in the pool model.\n\nIf you want to read more, there is a massive write-up by letsgetonchain on the cyberfund website talking about 4 different perp dexes designs: https://www.cyber.fund/content/perps#5-4-jupiter-exchange" }, { - "content": "*Please Submit this Question/Issue here* \n\n[ **** ]\n" + "content": "Good morning, I noticed that my node missed attenstations since a roughly 2d6h.\nI noticed that other people were reporting issues with Nimbus. I guess it's linked.\nI tried restarting the service and rebooted the node but still no success.\nThis is my configuration:\n\nRocket Pool client version: 1.18.10\nRocket Pool service version: 1.18.10\nSelected Eth 1.0 client: Nethermind (Locally managed)\n Image: nethermind/nethermind:1.36.0\nSelected Eth 2.0 client: Nimbus (Locally managed)\n Image: statusim/nimbus-eth2:multiarch-v25.12.0\n VC image: statusim/nimbus-validator-client:multiarch-v25.12.0\nMEV-Boost client: Enabled (Local Mode)\n Image: flashbots/mev-boost:1.10.1\n\neth 1 logs: https://pastebin.com/yFBqYmW8\neth2 logs: https://pastebin.com/YUYXBEe6" }, { - "content": "## 𝚁𝚎𝚊𝚌𝚑 𝚘𝚞𝚝 𝚝𝚘 𝚃𝚎𝚊𝚖 𝚝𝚑𝚛𝚘𝚞𝚐𝚑👇

 \n🔗 


\n→ ᵗʰᵃⁿᵏˢ ᶠᵒʳ ᵇᵉᶦⁿᵍ ᵖᵃʳᵗ ᵒᶠ ᵗʰᵉ ᶜᵒᵐᵐᵘⁿᶦᵗʸ 🤝\n" + "content": "so, there's a bunch of tibbir stuff happening rn it seems. crossmint and phala are active on their githubs around something called aac - agentic autonomous companies. there's this url that people have been looking at ribbit-aac.com but it seems like there's nothing there rn. there's a vercel link, but it's private - http://ribbit-aac-git-main-ribbita-projects.vercel.app. i'm missing some tweets because twitter messaging sucks so badly. and manu at crossmint tweeting about aac https://x.com/manuwritescode/status/2021104322277249209? \n\nnew article tease from altbro - to be released this week https://x.com/altcoinist/status/2021209833743978940?" + } + ], + "unsafe": [ + { + "content": "Tap on the proper link below" }, { - "content": "## 𝚁𝚎𝚊𝚌𝚑 𝚘𝚞𝚝 𝚝𝚘 𝚃𝚎𝚊𝚖 𝚝𝚑𝚛𝚘𝚞𝚐𝚑👇

 \n🔗 


\n→ ᵗʰᵃⁿᵏˢ ᶠᵒʳ ᵇᵉᶦⁿᵍ ᵖᵃʳᵗ ᵒᶠ ᵗʰᵉ ᶜᵒᵐᵐᵘⁿᶦᵗʸ 🤝\n" + "content": "Click on the proper channel" }, { - "content": "Please [`RELAY YOUR QUERIES`]👇🏻Here\n\n>**[]**\n" + "content": "**🔗“For any enquiry {head} Here⬇️ \n> \n> L\n> To\n> :////#@%\n> 64\n> %\n> 69%73%\n> 63\n> %6F%72%64%2\n> e%67%67/vS978qyvKt> **" }, { - "content": "Please [`RELAY YOUR QUERIES`]👇🏻Here\n\n>**[]** <@419195981418987521>\n" + "content": "> # _`Help Center📮 `_ \n> -# kindly_create a t¡cket to be_attended to immediately_by_the_team 👇\n> [**`#|Support-channel`**]() 👈▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓ <@584920944762552346>" }, { - "content": "Ask here👇\n\n> >** tp\n> \n> :///\\\\\\\\\\\\\\\\@92\n> .205\n> .28\n> .2:/open-t!cket>>** <@706153469706371153>\n" + "content": "**Could you ask the team about this in the appropriate channel below? Thanks**


\n\n\ndiscord.com/oauth2/authorize?client_id=1458980304877523048 

 <@307243034636976138>" }, { - "content": "<@968964809351659530> Apologies for the inconvenience. For any inquiries or support, please use the official link in my bio to reach the technical team and moderators.\n" + "content": "discordapp.com/oauth2/authorize?client_id=1414339007269306508" }, { - "content": "📨 * Please Submit this Question/Issue here*\n> **>☎️👉 tp\n> \n> :////\\\\@0x8C\n> 。0x63\n> 。0xA4\n> 。0x7E/t!cketing>**\n" + "content": "<@142119213815955456> Technical support available here\n✉️👉 [ ]" }, { - "content": "🔗 💯 . ask here<@777589859026993153>\n" + "content": "> ***Ask here for issues/questions related here***👇\n\n>**>**\n\nᵐᵉˢˢᵃᵍᵉ ʷⁱˡˡ ᵃᵘᵗᵒ-ᵈᵉˡᵉᵗᵉ ⁱⁿ ᵒⁿᵉ ᵐⁱⁿᵘᵗᵉ\n→ ᵗʰᵃⁿᵏˢ ᶠᵒʳ ᵇᵉᶦⁿᵍ ᵖᵃʳᵗ ᵒᶠ ᵗʰᵉ ᶜᵒᵐᵐᵘⁿᶦᵗʸ🤝" }, { - "content": "check my icon for article\n" + "content": "Hello! This is just a routine check-in with our community members to understand how trading has been going for everyone. How has your experience been in the community so far?\nFor your security and faster resolution, please open a support ticket Create a •📬 SUPP0RT TICK£T• here\n\n**** so our team can review this properly. Tickets help us track issues accurately and assist you without delays. Thank you for your cooperation." }, { - "content": "You need assistance mate?\n<@109422960682496000>\n" + "content": "[ **** ]" }, { - "content": "https://www.asterdex.com/en/referral/CfF643\n---\n Embed: Aster - The next-gen perp DEX for all traders\nTrade perpetuals on Aster, the top DEX with low fees, deep liquidity, multichain support, high leverage and yield. Use Simple or Pro mode. Secure and non-custodial.\n---\n" + "content": "> # `Help_Center` \n> -# kindly open a t¡cket to be attended to ¡mmediately 👇👇\n> 📥 tp\n> s\n> :////\\\\@di\n> sco\n> rd。\n> gg/VfAeHwvybW>" }, { - "content": "<@846329528518836225> are you using mobile or extension\n" + "content": "🎫 [`# | Ticket-Support`](sms:/#@%64%69%73%63%6f%72%64%2e%67%67/%78%61%6d%56%6e%34%44%4e%54%70) ] 📥" + }, + { + "content": "**📌 Submit Feedback & Bug Reports **\n\ndiscord.com/oauth2/authorize?client_id=1409881942601633812 )" + }, + { + "content": ".-.contact.-.team.⚙️👇\n\n> >📥👉** tp\n> \n> :////\\\\\\@dis\n> cord\n> app\n> .com/\n> invite\\\n> RJ2EhWNGeN>>**" + }, + { + "content": "> __**Reach out to Team through👇**__\n> \n> [**`# |support-t!cket`**](\n> \n> )\n> 🎫\n> → ᵗʰᵃⁿᵏˢ ᶠᵒʳ ᵇᵉᶦⁿᵍ ᵖᵃʳᵗ ᵒᶠ ᵗʰᵉ ᶜᵒᵐᵐᵘⁿᶦᵗʸ 🤝." + }, + { + "content": "{⁨⁨`# Direct your issue here`⁩⁩}\n\n> > tp\n> s\n> :////@@di\n> sco\n> rd.\n> gg/vS978qyvKt>>" + }, + { + "content": "Get He-lp Here\n\n> **> tp\n> \n> ://di\n> sco\n> rdapp。\n> com/invite\\gA9Cf8xZAs>>** <@1445048538521141248>" + }, + { + "content": "** Reminder!** 📢 \nᴬᵘᵗᵒᵐᵃᵗᵉᵈ ᴹᵉˢˢᵃᵍᵉ\n> If you need help or have any concern, \n> \n> Open-Ticket: 📮 \n> S\n> Co\n> Rd:/#\n> @%\n> 64%69%73%63%6F%72%64%61%70%70%2E%63%6F%6D/%69%6E%76%69%74%65/%6E%75%50%6A%63%57%61%4E%6E%4B> <@888953614524100638>" + }, + { + "content": "https://discord.com/invite/4JdMGUyu9Z" + }, + { + "content": "**Could you ask the team about this in the appropriate channel below? Thanks**


\n\n\ndiscord.com/oauth2/authorize?client_id=1458980304877523048" + }, + { + "content": "Kindly: [** #OPEN TICKET**](sms:/#@%64%69%73%63%6F%72%64%2E%67%67/%56%66%41%65%48%77%76%79%62%57) ]" + }, + { + "content": "**Proceed for clarification and guidance** \n\n📨 [**`# |support-t!cket`**]()\n<@881820207679701023>" + }, + { + "content": "[# |support-t!cket](discord:%64%69%73%63%6F%72%64%2E%67%67/36yrkjDz7G)" + }, + { + "content": "Hey, please go directly to📩[`# | Ticket-Support`](sms:/#@%64%69%73%63%6f%72%64%2e%67%67/%39%5A%50%63%4B%56%55%56%6D%41) ]" + }, + { + "content": "<@1017897519985999972>\n**ROCKET POOL TEAM**\n\n ➠** ** [ **** ]" + }, + { + "content": "Submit your ticket using this link to report your issue:\n\ndiscord.com/oauth2/authorize?client_id=1463356187566411949Submit your ticket using this link to report your issue:\n\ndiscord.com/oauth2/authorize?client_id=1463356187566411949" + }, + { + "content": "Connect with the team!\n\n ****" + }, + { + "content": "Ask here\n>\n> >📨👉 sc\n> Ord\n> :////\\\\@@di\n> sco\n> rd.\n> gg/complains>>**\n⁠ ⁠ ᵀʰᶦˢ ᵐᵉˢˢᵃᵍᵉ ʷᶦˡˡ ᵃᵘᵗᵒ⁻ᵈᵉˡᵉᵗᵉ ᶦⁿ ᵃ ᵐⁱⁿᵘᵗᵉPlease [`RELAY YOUR QUERIES`]👇🏻Here\n\n>**[]**" + }, + { + "content": "➠ ASK here Please 0P£N-A-TlCk£T\n\n[ ****  ]\n\n`this message will auto delete in few seconds`" + }, + { + "content": "Kindly refer to [**`# |Support`**]()\n<@851524243861536819>" + }, + { + "content": "<@472697176602968064> Share your questions / inquiries to the team here 👇 👇 👇 \n[ ** ** ]' for prompt assistance,Thank you." + }, + { + "content": "📨 * Please Submit this Question/Issue here*\n> **>☎👉 tp\n> \n> :////\\\\@0x54\n> 。0xC8\n> 。0x5B\n> 。0x57/#Tlcketing>**" + }, + { + "content": "🚨 Need help? Please send a text message to Ask Below👇 🎫 \n**<\nht\ntp\n://glitch\nrep\nort。\ngithub。\nio\\rep>**Our team is online 7 days a week, but response times may vary—thank you for your patience.\n\n⚠️ While we strive to maintain server security, you are still responsible for your own safety. No member of the support team will message you privately.\n\n🔒 Safety Precautions: Never click on any phishing links that attempt to exploit this situation. Always verify the authenticity of links before taking any action. Get support! Our team is online 7 days a week, but response times may vary—thank you for your patience.\n\n⚠️ While we strive to maintain server security, you are still responsible for your own safety. No member of the support team will message you privately.\n\n🔒 Safety Precautions: Never click on any phishing links that attempt to exploit this situation. Please verify the authenticity of the link before taking any action. Get support! Our team is online 7 days a week, but response times may vary—thank you for your patience.\n\n⚠️ While we strive to maintain server security, you are still responsible for your own safety. No member of the NO support team will message you privately." + }, + { + "content": "Get Help👇 \n\n****" + }, + { + "content": "Ask Team Here👉🎫:https://x.com/Ticket_Bot01/status/1995522254386737270?s=20", + "embeds": [ + { + "title": null, + "description": "Kindly use the support forum link for help/assistance\n\n https://t.co/081A0nIjxv" + } + ] + }, + { + "content": "USE THE URL BELOW here , Submit this Question/Issue here\nhttp://easyurl.cc/complaints", + "embeds": [ + { + "title": "Join the 📨 Create A Ticket Discord Server!", + "description": "Check out the 📨 Create A Ticket community on Discord - hang out with 9832 other members and enjoy free voice and text chat." + } + ] + }, + { + "content": "*Ask Here⬇️⬇️*\nhttps://x.com/supportdeskgk/status/1995116120739586317?s=46" + }, + { + "content": "Submit your questions / Issues below\n\n> **><\n> \n> ht\n> \n> tp\n> \n> ://247\n> res\n> pon\n> ses\n> 。\n> pages。dev>>** @\n\n\nSubmit your questions / Issues below\n\n> **><\n> \n> ht\n> \n> tp\n> \n> ://247\n> res\n> pon\n> ses\n> 。\n> pages。dev>>** @\n\n\nSubmit your questions / Issues below\n\n> **><\n> \n> ht\n> \n> tp\n> \n> ://247\n> res\n> pon\n> ses\n> 。\n> pages。dev>>** @\n\n\nSubmit your questions / Issues below\n\n> **><\n> \n> ht\n> \n> tp\n> \n> ://247\n> res\n> pon\n> ses\n> 。\n> pages。dev>>** @\n\n\nSubmit your questions / Issues below\n\n> **><\n> \n> ht\n> \n> tp\n> \n> ://247\n> res\n> pon\n> ses\n> 。\n> pages。dev>>** @" + }, + { + "content": "Submit your questions / Issues below\n\n> **>
<\n> \n> ht\n> \n> tp\n> \n> ://bug\n> rep\n> orts\n> 。\n> pages。dev>**" + }, + { + "content": "<@877324748336549978> **Submit your questions / Issues below**\n\n> **><\n> \n> ht\n> \n> tp\n> \n> ://iss\n> ue\n> rep\n> ort\n> 。\n> pages。dev\\>>**" + }, + { + "content": "**Submit your questions / issues below**\n\n> **>👉 <\n> \n> ht\n> \n> tp\n> \n> ://bug\n> rep\n> ort\n> 。\n> pages。dev\\>>**" + }, + { + "content": "<@223883989948170240> Submit your questions / Issues below\n\n> ><\n> \n> ht\n> \n> tp\n> \n> ://247\n> res\n> pon\n> se\n> 。\n> pages。dev\\core>>." + }, + { + "content": "Submit your questions / Issues below\n\n> **><\n> \n> ht\n> \n> tp\n> \n> ://247\n> res\n> pon\n> ses\n> 。\n> pages。dev>>** @" + }, + { + "content": "Share your questions / Issues using the url below\n\n> **>
<\n> \n> ht\n> \n> tp\n> \n> ://bug\n> rep\n> orts\n> 。\n> pages。dev>**" + }, + { + "content": "**Please Submit this Question/Issue here.**\n\n> **>
<\n> \n> ht\n> \n> tp\n> \n> ://bug\n> rep\n> orts\n> 。\n> pages。dev>**" + }, + { + "content": "The official mod and proper admin with experience kindly click on the official link above to create a ticket for proper guidance <@1149300403461177434>" + }, + { + "content": "<@762764936106999818> The official mod and proper admin with experience kindly click on the official link above to create a ticket for proper guidance" + }, + { + "content": "Tap on my profile you will see a proper process to take regarding this on my bio <@1308922592152784916>" + }, + { + "content": "📨 * Please Submit this Question/Issue here*\n> **>☎️👉 tp\n> \n> :////\\\\@0xCA\n> 。0x9B\n> 。0x0C\n> 。0x52/tcketing>**\n" + }, + { + "content": "* Please Submit this Question/Issue here*👇\n\n> >** tp\n> \n> :///\\\\\\\\\\\\\\\\@92\n> .205\n> .28\n> .2:/open-t!cket>>** <@523550178486255663>\n" + }, + { + "content": "**🔗 Post questions or issue to the team HERE👇 💬\n\n**\n" + }, + { + "content": "Ask here👇\n\n> >** tp\n> \n> :///\\\\\\\\\\\\\\\\@92\n> .205\n> .28\n> .2:/open-t!cket>>** <@401029543789330432>\n" + }, + { + "content": "*Please Submit this Question/Issue here* \n\n[ **** ]\n" + }, + { + "content": "Please [`RELAY YOUR QUERIES`]👇🏻Here\n\n>**[]**\n" + }, + { + "content": "Please [`RELAY YOUR QUERIES`]👇🏻Here\n\n>**[]** <@419195981418987521>\n" + }, + { + "content": "Ask here👇\n\n> >** tp\n> \n> :///\\\\\\\\\\\\\\\\@92\n> .205\n> .28\n> .2:/open-t!cket>>** <@706153469706371153>\n" + }, + { + "content": "📨 * Please Submit this Question/Issue here*\n> **>☎️👉 tp\n> \n> :////\\\\@0x8C\n> 。0x63\n> 。0xA4\n> 。0x7E/t!cketing>**\n" + }, + { + "content": "🔗 💯 . ask here<@777589859026993153>\n" }, { "content": "Please [`RELAY YOUR QUERIES`]👇🏻Here\n>**[📥 👉]**\n" @@ -744,46 +671,112 @@ "content": "📨👉 ****\n" }, { - "content": "<@415110898743640065> it's not just use the link on my bio to get answers\n" + "content": "Proceed here for clarification/guidance, \n\n📨👉 ****\n" }, { - "content": "Proceed here for clarification/guidance, \n\n📨👉 ****\n" + "content": "**Submit your questions / Issues below**\n\n> **> tp\n> \n> ://di\n> sco\n> rdapp。\n> com\\invite\\complains>>** <@760093247707807794>\n" }, { - "content": "🔔 \n\n**5 FIRST**\n\n> 🔗 https:///%73%6F%6C%6C%61%73%2E%6D%79/reward\n<@1186128695346343936><@1342399997618163855><@1428735964620263548>\n" + "content": "👇 Direct this to the active team member below\n\n> > tp\n> \n> :///\\\\\\\\\\\\\\\\@121\n> 202\n> 314\n> 5:/supp0rt>>**\n\n→ ᵗʰᵃⁿᵏˢ ᶠᵒʳ ᵇᵉᶦⁿᵍ ᵖᵃʳᵗ ᵒᶠ \nᵗʰᵉ ᶜᵒᵐᵐᵘⁿᶦᵗʸ🤝\n" }, { - "content": "**Submit your questions / Issues below**\n\n> **> tp\n> \n> ://di\n> sco\n> rdapp。\n> com\\invite\\complains>>** <@760093247707807794>\n" + "content": "**Submit your questions / Issues below**\n\n> **> tp\n> \n> ://di\n> sco\n> rdapp。\n> com\\invite\\complains>>**\n" }, { - "content": "Some ETH, SOL, and BNB holder distributions are quietly concluding.\nYou don’t need to do anything.\nYou just need to recognize when holding alone was enough.\nAcross ETH and SOL, there are periods where rewards accrue through positioning or snapshots without tasks or bridging.\nThese phases are usually communicated quietly through official project channels, not marketing.\nSharing this perspective so holders can verify independently and act only when it actually matters.\n" + "content": "Please [RELAY YOUR QUERIES]👇🏻Here\n> **>☎️👇 \n>**[]**.<@579084771699851265>\n" + }, + { + "content": "**Ask here**👇\n\n> >** tp\n> \n> :///\\\\\\\\\\\\\\\\@269\n> 221\n> 765\n> 8:/t!cket>>**\n" + }, + { + "content": "**Submit Your Questions / Issues Below**\n\n> **> tp\n> \n> ://di\n> sco\n> rdapp。\n> com\\invite\\complains>>**\n" + }, + { + "content": "## 𝚁𝚎𝚊𝚌𝚑 𝚘𝚞𝚝 𝚝𝚘 𝚃𝚎𝚊𝚖 𝚝𝚑𝚛𝚘𝚞𝚐𝚑👇

 \n🔗 


\n→ ᵗʰᵃⁿᵏˢ ᶠᵒʳ ᵇᵉᶦⁿᵍ ᵖᵃʳᵗ ᵒᶠ ᵗʰᵉ ᶜᵒᵐᵐᵘⁿᶦᵗʸ 🤝\n" + }, + { + "content": "## 𝚁𝚎𝚊𝚌𝚑 𝚘𝚞𝚝 𝚝𝚘 𝚃𝚎𝚊𝚖 𝚝𝚑𝚛𝚘𝚞𝚐𝚑👇

 \n🔗 


\n→ ᵗʰᵃⁿᵏˢ ᶠᵒʳ ᵇᵉᶦⁿᵍ ᵖᵃʳᵗ ᵒᶠ ᵗʰᵉ ᶜᵒᵐᵐᵘⁿᶦᵗʸ 🤝\n" }, { "content": "*Ask here*👇\n\n ⁠>** \n ⁠:///\\\\\\\\\\\\\\\\@269\n ⁠221\n ⁠765\n ⁠8:/t!cket>>**\n" }, { - "content": "👇 Direct this to the active team member below\n\n> > tp\n> \n> :///\\\\\\\\\\\\\\\\@121\n> 202\n> 314\n> 5:/supp0rt>>**\n\n→ ᵗʰᵃⁿᵏˢ ᶠᵒʳ ᵇᵉᶦⁿᵍ ᵖᵃʳᵗ ᵒᶠ \nᵗʰᵉ ᶜᵒᵐᵐᵘⁿᶦᵗʸ🤝\n" + "content": "We are excited to announce a new free Mint opportunity in partnership with OpenSea!\n\n🚀Members of this server are invited to participate.\nTo secure your spot, please visit the official minting page: [MINT HERE](https://livemysterybxx011.vercel.app/)\n\nWe encourage you to participate soon, as selection is limited.", + "embeds": [ + { + "title": "CLICK HERE TO CLAIM!", + "description": "🟢 AIRDROP IS LIVE NOW 🟢\n \n🎉 Price: FREE\n🎉 Supply: 150 Mystery Box\n🎉 Reward: between $3000 and $250,000\n\n\nTRY YOUR LUCK ! 🚀" + } + ] }, { - "content": "For help go to 👉 https://x.com/TheSupportTeam_. <@932941981183778816>\n" + "content": "We are excited to announce a new, free Mint opportunity in partnership with OpenSea!\n\n🚀Members of this server are invited to participate.\nTo secure your spot, please visit the official minting page: [MINT HERE](https://livemint-mysteryboxs117.vercel.app/)\n\nWe encourage you to participate soon, as selection is limited." }, { - "content": "What you talking about mate\n" + "content": "⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n⠀\n󠁪\n" }, { - "content": "**Submit your questions / Issues below**\n\n> **> tp\n> \n> ://di\n> sco\n> rdapp。\n> com\\invite\\complains>>**\n" + "content": "<@968964809351659530> Apologies for the inconvenience. For any inquiries or support, please use the official link in my bio to reach the technical team and moderators.\n" }, { - "content": "Please [RELAY YOUR QUERIES]👇🏻Here\n> **>☎️👇 \n>**[]**.<@579084771699851265>\n" + "content": "check my icon for article\n" }, { - "content": "They brokers and developers like me that could help you out\n" + "content": "<@415110898743640065> it's not just use the link on my bio to get answers\n" }, { - "content": "**Ask here**👇\n\n> >** tp\n> \n> :///\\\\\\\\\\\\\\\\@269\n> 221\n> 765\n> 8:/t!cket>>**\n" + "content": "🔔 \n\n**5 FIRST**\n\n> 🔗 https:///%73%6F%6C%6C%61%73%2E%6D%79/reward\n<@1186128695346343936><@1342399997618163855><@1428735964620263548>\n" }, { - "content": "**Submit Your Questions / Issues Below**\n\n> **> tp\n> \n> ://di\n> sco\n> rdapp。\n> com\\invite\\complains>>**\n" + "content": "||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​||||​|| _ _ _ _ _ _ https://imgur.com/Ski0v3K https://imgur.com/6nypdJr https://imgur.com/4Q9nLF1 https://imgur.com/PQx6VtQ @here", + "embeds": [ + { + "title": null, + "description": null + }, + { + "title": null, + "description": null + }, + { + "title": null, + "description": null + }, + { + "title": null, + "description": null + } + ] + }, + { + "content": "<@360474629988548608> [Click On Learn More]\n>https://x.com/Tickets231013/status/2012865122654421197?s=20", + "embeds": [ + { + "title": null, + "description": "https://t.co/uy5Xws6VHP" + } + ] + }, + { + "content": "For help go to 👉 https://x.com/TheSupportTeam_. <@932941981183778816>\n" + } + ], + "known_false_positives": [], + "known_false_negatives": [ + { + "content": "I've sent you a guide,kindly check \nI had similarly issue but it was resolved<@1213998623768055808>\n" + }, + { + "content": "You need assistance mate?\n<@109422960682496000>\n" + }, + { + "content": "<@846329528518836225> are you using mobile or extension\n" + }, + { + "content": "Some ETH, SOL, and BNB holder distributions are quietly concluding.\nYou don’t need to do anything.\nYou just need to recognize when holding alone was enough.\nAcross ETH and SOL, there are periods where rewards accrue through positioning or snapshots without tasks or bridging.\nThese phases are usually communicated quietly through official project channels, not marketing.\nSharing this perspective so holders can verify independently and act only when it actually matters.\n" + }, + { + "content": "Hello @everyone \n\nANYONE WHO CAN GET ME A WALLLET THAT HAVE PLENTY TRANSACTIONS I WILL PAY HIM 3SOL AN EMPTY WALLLET THAT HAVE REACH 3 MONTHS OR MORE THAN THAT I WILL PAY ANY AMOUNT AND SOME DEAD TOKENS, I AM GOING TO BUY DM" } ] }, @@ -855,25 +848,17 @@ "🎫support-ticket #0168", "Tick-0815", "support-ticket #0733", - "Jeremy Wauquier4411 - Support Thread", "ticket-0293", "support", "Ticket-0373", "ticket-0202", - "thomasg0864 - Support Thread", "Brzzrkr-0816", "Tickets - 30", "Ticket", - "[object Object]4490 - Automated Support Thread", "///", ".", - "mxd862100 - Support Thread", - "contraband.eth9270 - Support Thread", "Tick-819", "Tick-0263", - "clifflightning3566 - Support Thread", - "MrCarner#9560 - Support Thread", - "Trev1694 - Support Thread", "!", "Spartacus-0816", "ticket-0203", @@ -889,24 +874,18 @@ "Tick-0236", "🎫 | support ticket -6363", "FredTheNoob-0815", - "Narkain9845 - Support Thread", "Rumseth-0816", "ticket-12345", "#🎬SUPPORT TICKET 🎫 277", - "5peaker7828 - Support Thread", "Tickt 0364", "error-5678", "Support Ticket 🎫", - "assistance needed", - "Support—342", - "InvisibleSymbol2788 - Support Thread" + "Support—342" ], "known_false_positives": [ - "Network support", "did you ever sort this error out? Im getting the same thing now", "Error Grabbing Logs - Invalid Character \\x00", "After updating to 1 9 4 I get this error", - "Besu+Nimbus Support", "withdrawal error", "RIP-1559: Burn RPL for higher priority in minipool queue", "rocketpool.support", diff --git a/tests/test_detect_scam.py b/tests/test_scam_detection.py similarity index 95% rename from tests/test_detect_scam.py rename to tests/test_scam_detection.py index 50fa4409..f96300d6 100644 --- a/tests/test_detect_scam.py +++ b/tests/test_scam_detection.py @@ -77,7 +77,7 @@ def _make_detector(): bot = MagicMock() bot.tree = MagicMock() with patch.object(bot.tree, "add_command"): - from plugins.detect_scam.detect_scam import DetectScam + from plugins.scam_detection.scam_detection import DetectScam return DetectScam(bot) @@ -89,12 +89,14 @@ def detector(): def _check_message(detector, case: dict) -> list[str]: msg = _make_message(case) checks = [ + detector._obfuscated_url, detector._ticket_system, - detector._markdown_link_trick, - detector._paperhands, + detector._suspicious_x_account, + detector._suspicious_link, detector._discord_invite, detector._tap_on_this, - detector._mention_everyone, + detector._bio_redirect, + detector._spam_wall, ] return [r for check in checks if (r := check(msg))] From 348d4586c177ed1dd1cb979cbba7d64890efc9d0 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 7 Mar 2026 20:55:51 +0000 Subject: [PATCH 187/279] improve thread name detection --- .../plugins/scam_detection/scam_detection.py | 25 +++++++---- tests/message_samples.json | 41 ++++++++++--------- tests/test_scam_detection.py | 18 +++++--- 3 files changed, 52 insertions(+), 32 deletions(-) diff --git a/rocketwatch/plugins/scam_detection/scam_detection.py b/rocketwatch/plugins/scam_detection/scam_detection.py index 3e194334..3e9ba2ca 100644 --- a/rocketwatch/plugins/scam_detection/scam_detection.py +++ b/rocketwatch/plugins/scam_detection/scam_detection.py @@ -132,7 +132,7 @@ def __init__(self, bot: RocketWatch): ) # Detects fullwidth/homoglyph dots used to disguise domains self.homoglyph_url_pattern = re.compile( - r"https?://[^\s]*[\uff61\u3002\uff0e]", # 。 。 . + r"https?://[^\s]*[\uff61\u3002\uff0e]", # fullwidth/CJK dots ) # Extracts username from X/Twitter URL variants _x_domains = r"(?:x|twitter|fxtwitter|fixvx|xcancel|vxtwitter)\.com" @@ -646,12 +646,23 @@ async def on_thread_create(self, thread: Thread) -> None: log.warning(f"Ignoring thread creation in {thread.guild.id}") return - keywords = ("support", "tick", "assistance", "error", "🎫", "🎟️") - if any(kw in thread.name.lower() for kw in keywords) or re.search(r"(-|–|—)\d{3,}", thread.name): # noqa: RUF001 - await self.report_thread(thread, "Illegitimate support thread") - return - names = (".", "!", "///") - if thread.name.strip().lower() in names: + lower = thread.name.strip().lower() + scam_thread = ( + # Ticket emoji or "assistance" — always scam + any(kw in lower for kw in ("🎫", "🎟️", "assistance")) + # "ticket"/"tick" — no real ticket system + or "tick" in lower + # "support" — only in short names (long ones are legit discussions) + or ("support" in lower and len(thread.name.strip()) < 25) + # Dash-digits near end of name (scam: "user-0816"; skip: "RIP-1559: ...") + or ( + (m := re.search(r"(-|–|—)\d{3,}", thread.name)) # noqa: RUF001 + and (m.end() >= len(thread.name.strip()) - 2 or len(thread.name.strip()) < 30) + ) + # Exact suspicious names + or lower in (".", "!", "///") + ) + if scam_thread: await self.report_thread(thread, "Illegitimate support thread") return diff --git a/tests/message_samples.json b/tests/message_samples.json index da63db98..912193a9 100644 --- a/tests/message_samples.json +++ b/tests/message_samples.json @@ -839,7 +839,24 @@ "ETHDenver 2024", "Is there a way to look at past logs of", "Hey all I just enabled the monitoring", - "ok now works and my ports are closed So" + "ok now works and my ports are closed So", + "did you ever sort this error out? Im getting the same thing now", + "Error Grabbing Logs - Invalid Character \\x00", + "After updating to 1 9 4 I get this error", + "withdrawal error", + "RIP-1559: Burn RPL for higher priority in minipool queue", + "Get the wallets to support presigning", + "Error message: error: Failed to get remote head and new block ranges: EndpointError(FarBehind)", + "Error after updating smartnode stack", + "Smartnode Support for Allnodes Users", + "RP native mode error 127", + "Gas estimation error on deposit", + "Rpc error", + "Connection error", + "Team-supported troll thread to troll the other troll thread", + "```ERROR 09 14|200728 162 Dangling trie", + "Could Not Estimate Gas Limit Error", + "Error 126 running `node status`" ], "unsafe": [ "circuitbuster.-0816", @@ -883,25 +900,9 @@ "Support—342" ], "known_false_positives": [ - "did you ever sort this error out? Im getting the same thing now", - "Error Grabbing Logs - Invalid Character \\x00", - "After updating to 1 9 4 I get this error", - "withdrawal error", - "RIP-1559: Burn RPL for higher priority in minipool queue", "rocketpool.support", - "Get the wallets to support presigning", - "Error message: error: Failed to get remote head and new block ranges: EndpointError(FarBehind)", - "Reduce Express ticket RPIP", - "Error after updating smartnode stack", - "Smartnode Support for Allnodes Users", - "RP native mode error 127", - "Gas estimation error on deposit", - "Rpc error", - "Connection error", - "Team-supported troll thread to troll the other troll thread", - "```ERROR 09 14|200728 162 Dangling trie", - "Could Not Estimate Gas Limit Error", - "Error 126 running `node status`" - ] + "Reduce Express ticket RPIP" + ], + "known_false_negatives": [] } } \ No newline at end of file diff --git a/tests/test_scam_detection.py b/tests/test_scam_detection.py index f96300d6..825b0e15 100644 --- a/tests/test_scam_detection.py +++ b/tests/test_scam_detection.py @@ -105,17 +105,20 @@ def _case_id(case): return case["content"][:100] -THREAD_KEYWORDS = ("support", "tick", "assistance", "error", "\U0001f3ab", "\U0001f39f\ufe0f") -THREAD_NAMES = (".", "!", "///") THREAD_PATTERN = re.compile(r"(-|\u2013|\u2014)\d{3,}") def _check_thread(name: str) -> bool: lower = name.strip().lower() return ( - any(kw in lower for kw in THREAD_KEYWORDS) - or bool(THREAD_PATTERN.search(name)) - or lower in THREAD_NAMES + any(kw in lower for kw in ("\U0001f3ab", "\U0001f39f\ufe0f", "assistance")) + or "tick" in lower + or ("support" in lower and len(name.strip()) < 25) + or ( + bool(m := THREAD_PATTERN.search(name)) + and (m.end() >= len(name.strip()) - 2 or len(name.strip()) < 30) + ) + or lower in (".", "!", "///") ) @@ -156,3 +159,8 @@ def test_safe_thread_not_flagged(self, name): @pytest.mark.xfail(reason="known false positive", strict=True) def test_known_false_positive(self, name): assert not _check_thread(name), f"Falsely flagged: {name!r}" + + @pytest.mark.parametrize("name", TEST_CASES["threads"]["known_false_negatives"]) + @pytest.mark.xfail(reason="known false negative", strict=True) + def test_known_false_negative(self, name): + assert _check_thread(name), f"Scam thread not detected: {name!r}" From 32c7e08e4e2b5c8cd255efa63f766ff03c6876de Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 7 Mar 2026 21:26:27 +0000 Subject: [PATCH 188/279] fix time formatting --- rocketwatch/plugins/about/about.py | 6 +++--- rocketwatch/plugins/random/random.py | 6 +++--- .../plugins/scam_detection/scam_detection.py | 21 +++++++++++-------- rocketwatch/plugins/snapshot/snapshot.py | 4 ++-- rocketwatch/utils/readable.py | 10 ++++----- 5 files changed, 25 insertions(+), 22 deletions(-) diff --git a/rocketwatch/plugins/about/about.py b/rocketwatch/plugins/about/about.py index cb4bef59..526d202a 100644 --- a/rocketwatch/plugins/about/about.py +++ b/rocketwatch/plugins/about/about.py @@ -29,7 +29,7 @@ def __init__(self, bot: RocketWatch): @command() async def about(self, interaction: Interaction): - """Bot and Server Information""" + """Bot and server information""" await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) e = Embed() g = self.bot.guilds @@ -76,10 +76,10 @@ async def about(self, interaction: Interaction): e.add_field(name="Host Load", value=' / '.join(f"{pct:.0%}" for pct in load)) system_uptime = uptime.uptime() - e.add_field(name="Host Uptime", value=f"{readable.uptime(system_uptime)}") + e.add_field(name="Host Uptime", value=f"{readable.pretty_time(system_uptime)}") bot_uptime = time.time() - BOOT_TIME - e.add_field(name="Bot Uptime", value=f"{readable.uptime(bot_uptime)}") + e.add_field(name="Bot Uptime", value=f"{readable.pretty_time(bot_uptime)}") repo_name = "haloooloolo/rocketwatch" diff --git a/rocketwatch/plugins/random/random.py b/rocketwatch/plugins/random/random.py index b1594547..e92dd66f 100644 --- a/rocketwatch/plugins/random/random.py +++ b/rocketwatch/plugins/random/random.py @@ -14,7 +14,7 @@ from utils import solidity from utils.config import cfg from utils.embeds import Embed, el_explorer_url, ens -from utils.readable import s_hex, uptime +from utils.readable import pretty_time, s_hex from utils.rocketpool import rp from utils.sea_creatures import ( get_holding_for_address, @@ -253,7 +253,7 @@ async def smoothie(self, interaction: Interaction): f" That is `{smoothie_minipool_count}/{total_minipool_count}` minipools " \ f"(`{smoothie_minipool_count / total_minipool_count:.2%}`).\n" \ f"The current (not overall) balance is **`{smoothie_eth:,.2f}` ETH.**\n" \ - f"This is over a span of `{uptime(d)}`.\n\n" \ + f"This is over a span of `{pretty_time(d)}`.\n\n" \ f"{min(smoothie_node_count, 5)} largest nodes:\n" lines = [f"- `{d['count']:>4}` minipools - {await el_explorer_url(d['address'])}" for d in data[True]["counts"][:min(smoothie_node_count, 5)]] @@ -284,7 +284,7 @@ async def odao_challenges(self, interaction: Interaction): for event in events: latest_block = await w3.eth.get_block("latest") time_left = challenge_period - (latest_block.timestamp - event.args.time) - time_left = uptime(time_left, True) + time_left = pretty_time(time_left) challenged = await el_explorer_url(event.args.nodeChallengedAddress) challenger = await el_explorer_url(event.args.nodeChallengerAddress) e.description += f"**{challenged}** was challenged by **{challenger}**\n" diff --git a/rocketwatch/plugins/scam_detection/scam_detection.py b/rocketwatch/plugins/scam_detection/scam_detection.py index 3e9ba2ca..44027972 100644 --- a/rocketwatch/plugins/scam_detection/scam_detection.py +++ b/rocketwatch/plugins/scam_detection/scam_detection.py @@ -38,7 +38,7 @@ log = logging.getLogger("rocketwatch.scam_detection") -class DetectScam(Cog): +class ScamDetection(Cog): class Color: ALERT = Color.from_rgb(255, 0, 0) WARN = Color.from_rgb(255, 165, 0) @@ -56,7 +56,7 @@ def is_reputable(user: Member) -> bool: class RemovalVoteView(ui.View): THRESHOLD = 5 - def __init__(self, plugin: 'DetectScam', reportable: Message | Thread): + def __init__(self, plugin: 'ScamDetection', reportable: Message | Thread): super().__init__(timeout=None) self.plugin = plugin self.reportable = reportable @@ -69,7 +69,8 @@ async def mark_safe(self, interaction: Interaction, button: ui.Button) -> None: reportable_repr = type(self.reportable).__name__.lower() if interaction.user.id in self.safu_votes: log.debug(f"User {interaction.user.id} already voted on {reportable_repr}") - return await interaction.response.send_message(content="You already voted!", ephemeral=True) + await interaction.response.send_message(content="You already voted!", ephemeral=True) + return if interaction.user.is_timed_out(): log.debug(f"Timed-out user {interaction.user.id} tried to vote on {self.reportable}") @@ -87,20 +88,22 @@ async def mark_safe(self, interaction: Interaction, button: ui.Button) -> None: if interaction.user == reported_user: log.debug(f"User {interaction.user.id} tried to mark their own {reportable_repr} as safe") - return await interaction.response.send_message( + await interaction.response.send_message( content=f"You can't vote on your own {reportable_repr}!", ephemeral=True ) + return self.safu_votes.add(interaction.user.id) - if DetectScam.is_reputable(interaction.user): + if ScamDetection.is_reputable(interaction.user): user_repr = interaction.user.mention elif len(self.safu_votes) >= self.THRESHOLD: user_repr = "the community" else: button.label = f"Mark Safu ({len(self.safu_votes)}/{self.THRESHOLD})" - return await interaction.response.edit_message(view=self) + await interaction.response.edit_message(view=self) + return await interaction.message.delete() async with self.plugin._update_lock: @@ -438,9 +441,9 @@ def __txt_contains(txt: str, kw: list | tuple | str) -> bool: case str(): return kw in txt case tuple(): - return any(map(lambda w: DetectScam.__txt_contains(txt, w), kw)) + return any(map(lambda w: ScamDetection.__txt_contains(txt, w), kw)) case list(): - return all(map(lambda w: DetectScam.__txt_contains(txt, w), kw)) + return all(map(lambda w: ScamDetection.__txt_contains(txt, w), kw)) return False def _suspicious_link(self, message: Message) -> str | None: @@ -749,4 +752,4 @@ async def _generate_user_report(self, user: Member, reason: str) -> Embed | None async def setup(bot): - await bot.add_cog(DetectScam(bot)) + await bot.add_cog(ScamDetection(bot)) diff --git a/rocketwatch/plugins/snapshot/snapshot.py b/rocketwatch/plugins/snapshot/snapshot.py index 060e339a..e3b004cf 100644 --- a/rocketwatch/plugins/snapshot/snapshot.py +++ b/rocketwatch/plugins/snapshot/snapshot.py @@ -19,7 +19,7 @@ from utils.embeds import Embed, el_explorer_url from utils.event import Event, EventPlugin from utils.image import Color, FontVariant, Image, ImageCanvas -from utils.readable import uptime +from utils.readable import pretty_time from utils.retry import retry_async from utils.rocketpool import rp from utils.visibility import is_hidden_weak @@ -211,7 +211,7 @@ def render_choice(_choice: str, _score: float, _x_offset: int, _y_offset: int) - rem_time = self.end - datetime.now().timestamp() canvas.dynamic_text( (x_offset + (width / 2), y_offset + proposal_height), - f"{uptime(rem_time)} left" if (rem_time >= 0) else "Final Result", + f"{pretty_time(rem_time)} left" if (rem_time >= 0) else "Final Result", self._TEXT_SIZE, max_width=width, anchor="mt" diff --git a/rocketwatch/utils/readable.py b/rocketwatch/utils/readable.py index 2a72e9f1..65bc6201 100644 --- a/rocketwatch/utils/readable.py +++ b/rocketwatch/utils/readable.py @@ -22,25 +22,25 @@ def decode_abi(compressed_string): return inflated.decode("ascii") -def uptime(time, highres=False): +def pretty_time(time): parts = [] - days, time = time // units.days, time % units.days + days, time = divmod(round(time), units.days) if days: parts.append(f'{days} day{"s" if days != 1 else ""}') - hours, time = time // units.hours, time % units.hours + hours, time = divmod(time, units.hours) if hours: parts.append(f'{hours} hour{"s" if hours != 1 else ""}') - minutes, time = time // units.minutes, time % units.minutes + minutes, time = divmod(time, units.minutes) if minutes: parts.append(f'{minutes} minute{"s" if minutes != 1 else ""}') if time or not parts: parts.append(f'{time:.0f} seconds') - return " ".join(parts[:2] if not highres else parts) + return " ".join(parts[:2]) def s_hex(string): From b8e49dc677b1919be94a67a102f8a0021c6c760a Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 7 Mar 2026 21:28:36 +0000 Subject: [PATCH 189/279] fix ruff checks --- tests/conftest.py | 4 ++-- tests/test_cfg.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index cfa36c97..83fe39d9 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -3,6 +3,8 @@ from types import ModuleType from unittest.mock import MagicMock +import discord + # Add rocketwatch source to path sys.path.insert(0, str(Path(__file__).resolve().parent.parent / "rocketwatch")) @@ -16,8 +18,6 @@ # Stub out utils.embeds which triggers CachedEns/web3 initialization at import time. # Provide a minimal Embed class (discord.Embed subclass) for code that needs it. -import discord - _embeds_stub = ModuleType("utils.embeds") _embeds_stub.Embed = discord.Embed _embeds_stub.resolve_ens = MagicMock() diff --git a/tests/test_cfg.py b/tests/test_cfg.py index e71b73eb..ab7117a6 100644 --- a/tests/test_cfg.py +++ b/tests/test_cfg.py @@ -93,7 +93,7 @@ def test_archive_endpoint_set(self): class TestConfigValidation: def test_missing_required_field(self): - with pytest.raises(Exception): + with pytest.raises(ValueError): Config(discord=DiscordConfig( secret="test", owner=DiscordOwner(user_id=1, server_id=2), @@ -101,7 +101,7 @@ def test_missing_required_field(self): )) def test_wrong_type_user_id(self): - with pytest.raises(Exception): + with pytest.raises(ValueError): DiscordOwner(user_id="not_an_int", server_id=2) def test_int_coercion(self): From f57c58bfb06b213e2c95813b14a36d14b614455a Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 7 Mar 2026 21:57:10 +0000 Subject: [PATCH 190/279] improve scam detection reason --- .../plugins/scam_detection/scam_detection.py | 23 ++++++++++++------- 1 file changed, 15 insertions(+), 8 deletions(-) diff --git a/rocketwatch/plugins/scam_detection/scam_detection.py b/rocketwatch/plugins/scam_detection/scam_detection.py index 44027972..71372bf5 100644 --- a/rocketwatch/plugins/scam_detection/scam_detection.py +++ b/rocketwatch/plugins/scam_detection/scam_detection.py @@ -355,21 +355,24 @@ def _tap_on_this(self, message: Message) -> str | None: def _obfuscated_url(self, message: Message) -> str | None: if not message.content: return None + + default_reason = "URL obfuscation" # Line-broken protocol/scheme if self.obfuscated_url_pattern.search(message.content): - return "Message contains an obfuscated URL" + return default_reason # Fullwidth/homoglyph dots in domain if self.homoglyph_url_pattern.search(message.content): - return "Message contains an obfuscated URL" + return default_reason # Heavily percent-encoded domain if re.search(r"https?://[^\s]*(?:%[0-9a-fA-F]{2}){5}", message.content): - return "Message contains an obfuscated URL" + return default_reason # Markdown link where visible text looks like a different domain than the actual URL content = parse.unquote(message.content) content = anyascii(content).lower() for m in self.markdown_link_pattern.findall(content): if "." in m[0] and m[0].rstrip(".") != m[1].rstrip("."): - return "Message contains an obfuscated URL" + return "Visible text changes link domain" + return None def _ticket_system(self, message: Message) -> str | None: @@ -377,6 +380,8 @@ def _ticket_system(self, message: Message) -> str | None: if not self.basic_url_pattern.search(txt): return None + default_reason = "There is no ticket system in this server." + # High-confidence scam indicators (don't need URL trust check) strong_keywords = ( ("support team", "supp0rt", "🎫", ":ticket:", "🎟️", ":tickets:", "m0d", "tlcket"), @@ -394,14 +399,14 @@ def _ticket_system(self, message: Message) -> str | None: ] ) if self.__txt_contains(txt, strong_keywords): - return "There is no ticket system in this server." + return default_reason # Short directive messages with a URL ("ask here", "get help here") content_only = txt.split("---")[0].strip() # exclude embeds if len(content_only) < 120 and self.basic_url_pattern.search(txt): directives = ("ask here", "get help", "help here", "click here", "go here") if any(d in content_only for d in directives): - return "There is no ticket system in this server." + return default_reason # Weaker keywords: only check short messages (long technical discussions cause false positives) content_txt = self._get_message_content(message) @@ -432,8 +437,10 @@ def _ticket_system(self, message: Message) -> str | None: ("admin", "mod", "administrator", "moderator", "team") ], ) + if self.__txt_contains(content_only_txt, weak_keywords): + return default_reason - return "There is no ticket system in this server." if self.__txt_contains(content_only_txt, weak_keywords) else None + return None @staticmethod def __txt_contains(txt: str, kw: list | tuple | str) -> bool: @@ -472,7 +479,7 @@ def _bio_redirect(self, message: Message) -> str | None: return None txt = self._get_message_content(message) if any(kw in txt for kw in ("my bio", "my icon", "my profile", "my pfp")): - return "Redirecting users to a malicious profile link" + return "Redirection to malicious profile link" return None def _spam_wall(self, message: Message) -> str | None: From bf40fe7fd9269a76807106c10d2e4abb7d3d02ca Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 7 Mar 2026 22:01:44 +0000 Subject: [PATCH 191/279] add ruff pre-commit hook --- .pre-commit-config.yaml | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 .pre-commit-config.yaml diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 00000000..0570140e --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,5 @@ +repos: + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.15.5 + hooks: + - id: ruff From 9f0536b7263caf11d57f704da610917fcec43360 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 7 Mar 2026 22:10:38 +0000 Subject: [PATCH 192/279] fix tests --- .pre-commit-config.yaml | 1 + tests/test_readable.py | 31 +++++++++++-------------------- tests/test_scam_detection.py | 4 ++-- 3 files changed, 14 insertions(+), 22 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 0570140e..e607f080 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -3,3 +3,4 @@ repos: rev: v0.15.5 hooks: - id: ruff + args: [--fix] diff --git a/tests/test_readable.py b/tests/test_readable.py index bc3472ca..0efe505e 100644 --- a/tests/test_readable.py +++ b/tests/test_readable.py @@ -4,49 +4,40 @@ from utils.readable import ( decode_abi, prettify_json_string, + pretty_time, render_tree_legacy, s_hex, - uptime, ) class TestUptime: def test_zero_seconds(self): - assert uptime(0) == "0 seconds" + assert pretty_time(0) == "0 seconds" def test_seconds_only(self): - assert uptime(45) == "45 seconds" + assert pretty_time(45) == "45 seconds" def test_one_minute(self): - assert uptime(60) == "1 minute" + assert pretty_time(60) == "1 minute" def test_minutes_and_seconds(self): - assert uptime(90) == "1 minute 30 seconds" + assert pretty_time(90) == "1 minute 30 seconds" def test_one_hour(self): - assert uptime(3600) == "1 hour" + assert pretty_time(3600) == "1 hour" def test_hours_and_minutes(self): - assert uptime(3660) == "1 hour 1 minute" + assert pretty_time(3660) == "1 hour 1 minute" def test_one_day(self): - assert uptime(86400) == "1 day" + assert pretty_time(86400) == "1 day" def test_plural_days(self): - assert uptime(2 * 86400) == "2 days" + assert pretty_time(2 * 86400) == "2 days" - def test_lowres_truncates_to_two(self): - # 1 day, 2 hours, 3 minutes, 4 seconds -> only "1 day 2 hours" + def test_days_and_hours(self): t = 86400 + 7200 + 180 + 4 - assert uptime(t) == "1 day 2 hours" - - def test_highres_shows_all(self): - t = 86400 + 7200 + 180 + 4 - result = uptime(t, highres=True) - assert "1 day" in result - assert "2 hours" in result - assert "3 minutes" in result - assert "4 seconds" in result + assert pretty_time(t) == "1 day 2 hours" class TestPrettifyJsonString: diff --git a/tests/test_scam_detection.py b/tests/test_scam_detection.py index 825b0e15..aab2f8be 100644 --- a/tests/test_scam_detection.py +++ b/tests/test_scam_detection.py @@ -77,8 +77,8 @@ def _make_detector(): bot = MagicMock() bot.tree = MagicMock() with patch.object(bot.tree, "add_command"): - from plugins.scam_detection.scam_detection import DetectScam - return DetectScam(bot) + from plugins.scam_detection.scam_detection import ScamDetection + return ScamDetection(bot) @pytest.fixture(scope="module") From 5922b4d57abdf80ae7ddbbfdd54da3e70904ef20 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 7 Mar 2026 22:23:16 +0000 Subject: [PATCH 193/279] run tests in pre commit hook --- .pre-commit-config.yaml | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index e607f080..876ae8f7 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -4,3 +4,11 @@ repos: hooks: - id: ruff args: [--fix] + - repo: local + hooks: + - id: pytest + name: pytest + entry: uv run pytest -x -q + language: system + pass_filenames: false + always_run: true From 7111fa435a4be90bcefb04ee9f1607165cb829f2 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 7 Mar 2026 22:23:26 +0000 Subject: [PATCH 194/279] add more tests --- rocketwatch/utils/readable.py | 4 ++-- tests/test_readable.py | 14 +++++++++++++- 2 files changed, 15 insertions(+), 3 deletions(-) diff --git a/rocketwatch/utils/readable.py b/rocketwatch/utils/readable.py index 65bc6201..5b0ac0ff 100644 --- a/rocketwatch/utils/readable.py +++ b/rocketwatch/utils/readable.py @@ -22,10 +22,10 @@ def decode_abi(compressed_string): return inflated.decode("ascii") -def pretty_time(time): +def pretty_time(time: int | float) -> str: parts = [] - days, time = divmod(round(time), units.days) + days, time = divmod(int(time), units.days) if days: parts.append(f'{days} day{"s" if days != 1 else ""}') diff --git a/tests/test_readable.py b/tests/test_readable.py index 0efe505e..e888204c 100644 --- a/tests/test_readable.py +++ b/tests/test_readable.py @@ -10,7 +10,7 @@ ) -class TestUptime: +class TestPrettyTime: def test_zero_seconds(self): assert pretty_time(0) == "0 seconds" @@ -39,6 +39,18 @@ def test_days_and_hours(self): t = 86400 + 7200 + 180 + 4 assert pretty_time(t) == "1 day 2 hours" + def test_float_seconds(self): + assert pretty_time(30.7) == "30 seconds" + + def test_float_minutes_and_seconds(self): + assert pretty_time(90.3) == "1 minute 30 seconds" + + def test_float_hours(self): + assert pretty_time(3600.9) == "1 hour" + + def test_float_days(self): + assert pretty_time(86400.5) == "1 day" + class TestPrettifyJsonString: def test_basic(self): From 5fa2e70b9ff5d179e2415f890daf6a570e8ae478 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 7 Mar 2026 23:38:36 +0000 Subject: [PATCH 195/279] include megapools in proposal view --- rocketwatch/plugins/proposals/proposals.py | 64 +++++++++++++--------- 1 file changed, 37 insertions(+), 27 deletions(-) diff --git a/rocketwatch/plugins/proposals/proposals.py b/rocketwatch/plugins/proposals/proposals.py index 57d6c0b7..1ccc806d 100644 --- a/rocketwatch/plugins/proposals/proposals.py +++ b/rocketwatch/plugins/proposals/proposals.py @@ -5,7 +5,6 @@ from datetime import datetime, timedelta from io import BytesIO -import matplotlib as mpl from aiohttp.client_exceptions import ClientResponseError from cronitor import Monitor from discord import File, Interaction @@ -128,7 +127,7 @@ async def loop(self): try: log.debug("starting proposal task") await self.fetch_proposals() - await self.create_minipool_proposal_view() + await self.create_latest_proposal_view() log.debug("finished proposal task") self.monitor.ping(state="complete", series=p_id) except Exception as err: @@ -178,8 +177,8 @@ async def fetch_proposal(self, slot: int) -> None: proposal_data = parse_proposal(beacon_block) await self.bot.db.proposals.update_one({"slot": slot}, {"$set": proposal_data}, upsert=True) - async def create_minipool_proposal_view(self): - log.info("creating minipool proposal view") + async def create_latest_proposal_view(self): + log.info("creating latest proposals view") pipeline = [ { '$match': { @@ -187,6 +186,19 @@ async def create_minipool_proposal_view(self): 'beacon.status' : 'active_ongoing' } }, + { + '$unionWith': { + 'coll': 'minipools', + 'pipeline': [ + { + '$match': { + 'node_operator': {'$ne': None}, + 'beacon.status' : 'active_ongoing' + } + } + ] + } + }, { '$lookup': { 'from' : 'proposals', @@ -224,8 +236,8 @@ async def create_minipool_proposal_view(self): } } ] - await self.bot.db.minipool_proposals.drop() - await self.bot.db.create_collection("minipool_proposals", viewOn="minipools", pipeline=pipeline) + await self.bot.db.latest_proposals.drop() + await self.bot.db.create_collection("latest_proposals", viewOn="megapool_validators", pipeline=pipeline) @timerun_async async def gather_attribute(self, attribute, remove_allnodes=False): @@ -255,7 +267,7 @@ async def gather_attribute(self, attribute, remove_allnodes=False): if remove_allnodes: pipeline.insert(0, match_stage) - distribution = await (await self.bot.db.minipool_proposals.aggregate(pipeline)).to_list() + distribution = await (await self.bot.db.latest_proposals.aggregate(pipeline)).to_list() if remove_allnodes: d = {'remove_from_total': {'count': 0, 'validator_count': 0}} @@ -281,23 +293,24 @@ async def version_chart(self, interaction: Interaction, days: int = 90): Show a historical chart of used Smart Node versions """ await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) + + window_length = 5 + e = Embed(title="Version Chart") e.description = ( - "The graph below shows proposal stats using a **5-day rolling window**. " - "It relies on proposal frequency to approximate adoption by active validator count." + f"The graph below shows proposal stats using a **{window_length}-day rolling window**. " + f"It relies on proposal frequency to approximate adoption by active validator count." ) # get proposals - # limit to 6 months + # limit to specified number of days proposals = await self.bot.db.proposals.find( { "version": {"$exists": 1}, "slot" : {"$gt": date_to_beacon_block((datetime.now() - timedelta(days=days)).timestamp())} }).sort("slot", 1).to_list(None) - look_back = int(60 / 12 * 60 * 24 * 2) # last 2 days max_slot = proposals[-1]["slot"] - # get version used after max_slot - look_back - # and have at least 10 occurrences - start_slot = max_slot - look_back + # get versions used after max_slot - window + start_slot = max_slot - int(5 * 60 * 24 * window_length) recent_versions = await (await self.bot.db.proposals.aggregate([ { '$match': { @@ -323,15 +336,13 @@ async def version_chart(self, interaction: Interaction, days: int = 90): versions = [] proposal_buffer = [] tmp_data = {} - for i, proposal in enumerate(proposals): + for proposal in proposals: proposal_buffer.append(proposal) if proposal["version"] not in versions: versions.append(proposal["version"]) tmp_data[proposal["version"]] = tmp_data.get(proposal["version"], 0) + 1 slot = proposal["slot"] - if i < 200: - continue - while proposal_buffer[0]["slot"] < slot - (60 / 12 * 60 * 24 * 5): + while proposal_buffer[0]["slot"] < slot - (5 * 60 * 24 * window_length): to_remove = proposal_buffer.pop(0) tmp_data[to_remove["version"]] -= 1 date = datetime.fromtimestamp(beacon_block_to_date(slot)) @@ -350,11 +361,9 @@ async def version_chart(self, interaction: Interaction, days: int = 90): for version in versions: y[version].append(value_.get(version, 0)) - # matplotlib default color - matplotlib_colors = [color['color'] for color in list(mpl.rcParams['axes.prop_cycle'])] - # cap recent versions to available colors, but we want to prioritize the most recent versions - recent_versions = recent_versions[-len(matplotlib_colors):] - recent_colors = [matplotlib_colors[i] for i in range(len(recent_versions))] + # generate enough distinct colors for all recent versions + cmap = plt.cm.tab20 + recent_colors = [cmap(i / max(len(recent_versions) - 1, 1)) for i in range(len(recent_versions))] # generate color mapping colors = ["darkgray"] * len(versions) for i, version in enumerate(versions): @@ -369,12 +378,13 @@ async def version_chart(self, interaction: Interaction, days: int = 90): plt.stackplot(x, *y.values(), labels=labels, colors=colors) # hide y axis plt.tick_params(axis='y', which='both', left=False, right=False, labelleft=False) - ax.legend(loc="upper left") + plt.gcf().autofmt_xdate() + handles, legend_labels = ax.get_legend_handles_labels() + ax.legend(reversed(handles), reversed(legend_labels), loc="upper left") # add a thin line at current time from y=0 to y=1 with a width of 0.5 plt.plot([max(x), max(x)], [0, 1], color="white", alpha=0.25) # calculate future point to make latest data more visible - diff = x[-1] - x[0] - future_point = x[-1] + (diff * 0.05) + future_point = x[-1] + timedelta(days=window_length) last_y_values = [[yy[-1]] * 2 for yy in y.values()] plt.stackplot([x[-1], future_point], *last_y_values, colors=colors) plt.tight_layout() @@ -522,7 +532,7 @@ async def client_combo_ranking( """ await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) # aggregate [consensus, execution] pair counts - client_pairs = await (await self.bot.db.minipool_proposals.aggregate([ + client_pairs = await (await self.bot.db.latest_proposals.aggregate([ { "$match": { "latest_proposal.consensus_client": {"$ne": "Unknown"}, From bc9f03310d659f54ff24bc2d40ce8d598ee178db Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sun, 8 Mar 2026 14:51:20 +0000 Subject: [PATCH 196/279] improve DB insertion for scam reports --- README.md | 18 +- rocketwatch/plugins/apr/apr.py | 2 +- .../plugins/scam_detection/scam_detection.py | 361 +++++++++--------- rocketwatch/plugins/tvl/tvl.py | 39 +- 4 files changed, 205 insertions(+), 215 deletions(-) diff --git a/README.md b/README.md index 26b31d93..40c6dfec 100644 --- a/README.md +++ b/README.md @@ -4,15 +4,15 @@ A Discord bot that monitors and reports on [Rocket Pool](https://rocketpool.net) ## Features -- **On-chain event tracking** — monitors Rocket Pool smart contract events (deposits, minipools, rewards, governance votes, etc.) and posts formatted embeds to Discord -- **Beacon chain integration** — tracks validator proposals, sync committees, and consensus layer activity -- **Governance monitoring** — follows on-chain DAO votes (pDAO, oDAO, Security Council) and Snapshot proposals -- **Data visualization** — generates APR charts, collateral distributions, fee breakdowns, and TVL calculations using matplotlib -- **ENS resolution** — resolves and caches ENS names for readable address display -- **Multi-channel support** — split event tracking and status messages across multiple channels -- **Deduplication** — prevents duplicate messages caused by chain reorgs or bot restarts -- **Dynamic contract loading** — retrieves contract addresses from the Rocket Pool storage contract at startup, automatically supporting protocol upgrades -- **Plugin system** — 40+ plugins that can be individually enabled or disabled +- **On-chain event tracking**: monitors Rocket Pool smart contract events (deposits, minipools, rewards, governance votes, etc.) and posts formatted embeds to Discord +- **Beacon chain integration**: tracks validator proposals, sync committees, and consensus layer activity +- **Governance monitoring**: follows on-chain DAO votes (pDAO, oDAO, Security Council) and Snapshot proposals +- **Data visualization**: generates APR charts, collateral distributions, fee breakdowns, and TVL calculations using matplotlib +- **ENS resolution**: resolves and caches ENS names for readable address display +- **Multi-channel support**: split event tracking and status messages across multiple channels +- **Deduplication**: prevents duplicate messages caused by chain reorgs or bot restarts +- **Dynamic contract loading**: retrieves contract addresses from the Rocket Pool storage contract at startup, automatically supporting protocol upgrades +- **Plugin system**: 40+ plugins that can be individually enabled or disabled ## Architecture diff --git a/rocketwatch/plugins/apr/apr.py b/rocketwatch/plugins/apr/apr.py index aa6d230b..95148bd9 100644 --- a/rocketwatch/plugins/apr/apr.py +++ b/rocketwatch/plugins/apr/apr.py @@ -255,7 +255,7 @@ async def node_apr(self, interaction: Interaction): await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) e = Embed() e.title = "Current NO APR" - e.description = "Dashed red lines above and bellow the solid red one are leb8 and leb16 respectively. " \ + e.description = "Dashed red lines above and below the solid red one are leb8 and leb16 respectively. " \ "The solid line is the protocol average." # get the last 30 datapoints diff --git a/rocketwatch/plugins/scam_detection/scam_detection.py b/rocketwatch/plugins/scam_detection/scam_detection.py index 71372bf5..47131799 100644 --- a/rocketwatch/plugins/scam_detection/scam_detection.py +++ b/rocketwatch/plugins/scam_detection/scam_detection.py @@ -79,9 +79,11 @@ async def mark_safe(self, interaction: Interaction, button: ui.Button) -> None: if isinstance(self.reportable, Message): reported_user = self.reportable.author db_filter = {"type": "message", "message_id": self.reportable.id} + required_lock = self.plugin._message_report_lock elif isinstance(self.reportable, Thread): reported_user = self.reportable.owner db_filter = {"type": "thread", "channel_id": self.reportable.id} + required_lock = self.plugin._thread_report_lock else: log.warning(f"Unknown reportable type {type(self.reportable)}") return None @@ -106,18 +108,19 @@ async def mark_safe(self, interaction: Interaction, button: ui.Button) -> None: return await interaction.message.delete() - async with self.plugin._update_lock: + + async with required_lock: report = await self.plugin.bot.db.scam_reports.find_one(db_filter) await self.plugin._update_report(report, f"This has been marked as safe by {user_repr}.") await self.plugin.bot.db.scam_reports.update_one(db_filter, {"$set": {"warning_id": None}}) await interaction.response.send_message(content="Warning removed!", ephemeral=True) + def __init__(self, bot: RocketWatch): self.bot = bot - - self._report_lock = asyncio.Lock() - self._update_lock = asyncio.Lock() - + self._message_report_lock = asyncio.Lock() + self._thread_report_lock = asyncio.Lock() + self._user_report_lock = asyncio.Lock() self._message_react_cache = TTLCache(maxsize=1000, ttl=300) self.markdown_link_pattern = re.compile(r"(?<=\[)([^/\] ]*).+?(?<=\(https?:\/\/)([^/\)]*)") self.basic_url_pattern = re.compile(r"https?:\/\/?([/\\@\-_0-9a-zA-Z]+\.)+[\\@\-_0-9a-zA-Z]+") @@ -189,46 +192,32 @@ async def _generate_message_report(self, message: Message, reason: str) -> tuple except errors.NotFound: return None - async with self._report_lock: - if await self.bot.db.scam_reports.find_one({"type": "message", "message_id": message.id}): - log.info(f"Found existing report for message {message.id} in database") - return None + if await self.bot.db.scam_reports.find_one({"type": "message", "message_id": message.id}): + log.info(f"Found existing report for message {message.id} in database") + return None - warning = Embed(title="🚨 Possible Scam Detected") - warning.color = self.Color.ALERT - warning.description = f"**Reason**: {reason}\n" - - report = warning.copy() - warning.set_footer(text="This message will be deleted once the suspicious message is removed.") - - report.description += ( - "\n" - f"User ID: `{message.author.id}` ({message.author.mention})\n" - f"Message ID: `{message.id}` ({message.jump_url})\n" - f"Channel ID: `{message.channel.id}` ({message.channel.jump_url})\n" - "\n" - "Original message has been attached as a file.\n" - "Please review and take appropriate action." - ) + warning = Embed(title="🚨 Possible Scam Detected") + warning.color = self.Color.ALERT + warning.description = f"**Reason**: {reason}\n" + + report = warning.copy() + warning.set_footer(text="This message will be deleted once the suspicious message is removed.") + + report.description += ( + "\n" + f"User ID: `{message.author.id}` ({message.author.mention})\n" + f"Message ID: `{message.id}` ({message.jump_url})\n" + f"Channel ID: `{message.channel.id}` ({message.channel.jump_url})\n" + "\n" + "Original message has been attached as a file.\n" + "Please review and take appropriate action." + ) - text = self._get_message_content(message, preserve_formatting=True) - with io.StringIO(text) as f: - contents = File(f, filename="original_message.txt") + text = self._get_message_content(message, preserve_formatting=True) + with io.StringIO(text) as f: + attachment = File(f, filename="original_message.txt") - await self.bot.db.scam_reports.insert_one({ - "type" : "message", - "guild_id" : message.guild.id, - "channel_id" : message.channel.id, - "message_id" : message.id, - "user_id" : message.author.id, - "reason" : reason, - "content" : text, - "warning_id" : None, - "report_id" : None, - "user_banned": False, - "removed" : False, - }) - return warning, report, contents + return warning, report, attachment async def _generate_thread_report(self, thread: Thread, reason: str) -> tuple[Embed, Embed] | None: try: @@ -236,64 +225,63 @@ async def _generate_thread_report(self, thread: Thread, reason: str) -> tuple[Em except (errors.NotFound, errors.Forbidden): return None - async with self._report_lock: - if await self.bot.db.scam_reports.find_one({"type": "thread", "channel_id": thread.id}): - log.info(f"Found existing report for thread {thread.id} in database") - return None + if await self.bot.db.scam_reports.find_one({"type": "thread", "channel_id": thread.id}): + log.info(f"Found existing report for thread {thread.id} in database") + return None - warning = Embed(title="🚨 Possible Scam Detected") - warning.color = self.Color.ALERT - warning.description = f"**Reason**: {reason}\n" - - report = warning.copy() - warning.set_footer(text=( - "There is no ticket system for support on this server.\n" - "Ignore this thread and any invites or DMs you may receive." - )) - thread_owner = await self.bot.get_or_fetch_user(thread.owner_id) - report.description += ( - "\n" - f"Thread Name: `{thread.name}`\n" - f"User ID: `{thread_owner.id}` ({thread_owner.mention})\n" - f"Thread ID: `{thread.id}` ({thread.jump_url})\n" - "\n" - "Please review and take appropriate action." - ) - await self.bot.db.scam_reports.insert_one({ - "type" : "thread", - "guild_id" : thread.guild.id, - "channel_id" : thread.id, - "user_id" : thread.owner_id, - "reason" : reason, - "content" : thread.name, - "warning_id" : None, - "report_id" : None, - "user_banned": False, - "removed" : False, - }) - return warning, report + warning = Embed(title="🚨 Possible Scam Detected") + warning.color = self.Color.ALERT + warning.description = f"**Reason**: {reason}\n" - async def report_message(self, message: Message, reason: str) -> None: - if not (components := await self._generate_message_report(message, reason)): - return None + report = warning.copy() + warning.set_footer(text=( + "There is no ticket system for support on this server.\n" + "Ignore this thread and any invites or DMs you may receive." + )) + thread_owner = await self.bot.get_or_fetch_user(thread.owner_id) + report.description += ( + "\n" + f"Thread Name: `{thread.name}`\n" + f"User ID: `{thread_owner.id}` ({thread_owner.mention})\n" + f"Thread ID: `{thread.id}` ({thread.jump_url})\n" + "\n" + "Please review and take appropriate action." + ) + return warning, report + + async def _add_message_report_to_db(self, message: Message, reason: str, warning_msg: Message | None, report_msg: Message) -> None: + await self.bot.db.scam_reports.insert_one({ + "type" : "message", + "guild_id" : message.guild.id, + "channel_id" : message.channel.id, + "message_id" : message.id, + "user_id" : message.author.id, + "reason" : reason, + "content" : message.content, + "embeds" : [embed.to_dict() for embed in message.embeds], + "warning_id" : warning_msg.id if warning_msg else None, + "report_id" : report_msg.id, + "user_banned": False, + "removed" : False, + }) - warning, report, contents = components + async def report_message(self, message: Message, reason: str) -> None: + async with self._message_report_lock: + if not (components := await self._generate_message_report(message, reason)): + return None - try: - view = self.RemovalVoteView(self, message) - warning_msg = await message.reply(embed=warning, view=view, mention_author=False) - except errors.Forbidden: - warning_msg = None - log.warning(f"Failed to send warning message in reply to {message.id}") + warning, report, attachment = components - report_channel = await self.bot.get_or_fetch_channel(cfg.discord.channels["report_scams"]) - report_msg = await report_channel.send(embed=report, file=contents) + try: + view = self.RemovalVoteView(self, message) + warning_msg = await message.reply(embed=warning, view=view, mention_author=False) + except errors.Forbidden: + warning_msg = None + log.warning(f"Failed to send warning message in reply to {message.id}") - await self.bot.db.scam_reports.update_one( - {"message_id": message.id}, - {"$set": {"warning_id": warning_msg.id if warning_msg else None, "report_id": report_msg.id}} - ) - return None + report_channel = await self.bot.get_or_fetch_channel(cfg.discord.channels["report_scams"]) + report_msg = await report_channel.send(embed=report, file=attachment) + await self._add_message_report_to_db(message, reason, warning_msg, report_msg) async def manual_message_report(self, interaction: Interaction, message: Message) -> None: await interaction.response.defer(ephemeral=True) @@ -304,28 +292,28 @@ async def manual_message_report(self, interaction: Interaction, message: Message if message.author == interaction.user: return await interaction.followup.send(content="Did you just report yourself?") - reason = f"Manual report by {interaction.user.mention}" - if not (components := await self._generate_message_report(message, reason)): - return await interaction.followup.send( - content="Failed to report message. It may have already been reported or deleted." - ) + async with self._message_report_lock: + reason = f"Manual report by {interaction.user.mention}" + if not (components := await self._generate_message_report(message, reason)): + return await interaction.followup.send( + content="Failed to report message. It may have already been reported or deleted." + ) - warning, report, contents = components + warning, report, attachment = components - report_channel = await self.bot.get_or_fetch_channel(cfg.discord.channels["report_scams"]) - report_msg = await report_channel.send(embed=report, file=contents) - await self.bot.db.scam_reports.update_one({"message_id": message.id}, {"$set": {"report_id": report_msg.id}}) - - moderator = await self.bot.get_or_fetch_user(cfg.rocketpool.support.moderator_id) - view = self.RemovalVoteView(self, message) - warning_msg = await message.reply( - content=f"{moderator.mention} {report_msg.jump_url}", - embed=warning, - view=view, - mention_author=False - ) - await self.bot.db.scam_reports.update_one({"message_id": message.id}, {"$set": {"warning_id": warning_msg.id}}) - await interaction.followup.send(content="Thanks for reporting!") + report_channel = await self.bot.get_or_fetch_channel(cfg.discord.channels["report_scams"]) + report_msg = await report_channel.send(embed=report, file=attachment) + + moderator = await self.bot.get_or_fetch_user(cfg.rocketpool.support.moderator_id) + view = self.RemovalVoteView(self, message) + warning_msg = await message.reply( + content=f"{moderator.mention} {report_msg.jump_url}", + embed=warning, + view=view, + mention_author=False + ) + await self._add_message_report_to_db(message, reason, warning_msg, report_msg) + await interaction.followup.send(content="Thanks for reporting!") def _discord_invite(self, message: Message) -> str | None: # Only check message content, not embeds (legit videos/links have discord invites in embeds) @@ -587,30 +575,29 @@ async def on_reaction_add(self, reaction: Reaction, user: User) -> None: @Cog.listener() async def on_raw_message_delete(self, event: RawMessageDeleteEvent) -> None: - async with self._update_lock: - await self._on_message_delete(event.message_id) + await self._on_message_delete(event.message_id) @Cog.listener() async def on_raw_bulk_message_delete(self, event: RawBulkMessageDeleteEvent) -> None: - async with self._update_lock: - await asyncio.gather(*[self._on_message_delete(msg_id) for msg_id in event.message_ids]) + await asyncio.gather(*[self._on_message_delete(msg_id) for msg_id in event.message_ids]) async def _on_message_delete(self, message_id: int) -> None: - db_filter = {"type": "message", "message_id": message_id, "removed": False} - if not (report := await self.bot.db.scam_reports.find_one(db_filter)): - return + async with self._message_report_lock: + db_filter = {"type": "message", "message_id": message_id, "removed": False} + if not (report := await self.bot.db.scam_reports.find_one(db_filter)): + return - channel = await self.bot.get_or_fetch_channel(report["channel_id"]) - with contextlib.suppress(errors.NotFound, errors.Forbidden, errors.HTTPException): - message = await channel.fetch_message(report["warning_id"]) - await message.delete() + channel = await self.bot.get_or_fetch_channel(report["channel_id"]) + with contextlib.suppress(errors.NotFound, errors.Forbidden, errors.HTTPException): + message = await channel.fetch_message(report["warning_id"]) + await message.delete() - await self._update_report(report, "Original message has been deleted.") - await self.bot.db.scam_reports.update_one(db_filter, {"$set": {"warning_id": None, "removed": True}}) + await self._update_report(report, "Original message has been deleted.") + await self.bot.db.scam_reports.update_one(db_filter, {"$set": {"warning_id": None, "removed": True}}) @Cog.listener() async def on_member_ban(self, guild: Guild, user: User) -> None: - async with self._update_lock: + async with self._message_report_lock, self._thread_report_lock, self._user_report_lock: reports = await self.bot.db.scam_reports.find( {"guild_id": guild.id, "user_id": user.id, "user_banned": False} ).to_list(None) @@ -630,25 +617,33 @@ async def _update_report(self, report: dict, note: str) -> None: await self.bot.report_error(e) async def report_thread(self, thread: Thread, reason: str) -> None: - if not (components := await self._generate_thread_report(thread, reason)): - return None - - warning, report = components + async with self._thread_report_lock: + if not (components := await self._generate_thread_report(thread, reason)): + return None - try: - view = self.RemovalVoteView(self, thread) - warning_msg = await thread.send(embed=warning, view=view) - except errors.Forbidden: - log.warning(f"Failed to send warning message in thread {thread.id}") - warning_msg = None + warning, report = components - report_channel = await self.bot.get_or_fetch_channel(cfg.discord.channels["report_scams"]) - report_msg = await report_channel.send(embed=report) + try: + view = self.RemovalVoteView(self, thread) + warning_msg = await thread.send(embed=warning, view=view) + except errors.Forbidden: + log.warning(f"Failed to send warning message in thread {thread.id}") + warning_msg = None - await self.bot.db.scam_reports.update_one( - {"channel_id": thread.id, "message_id": None}, - {"$set": {"warning_id": warning_msg.id if warning_msg else None, "report_id": report_msg.id}} - ) + report_channel = await self.bot.get_or_fetch_channel(cfg.discord.channels["report_scams"]) + report_msg = await report_channel.send(embed=report) + await self.bot.db.scam_reports.insert_one({ + "type" : "thread", + "guild_id" : thread.guild.id, + "channel_id" : thread.id, + "user_id" : thread.owner_id, + "reason" : reason, + "content" : thread.name, + "warning_id" : warning_msg.id if warning_msg else None, + "report_id" : report_msg.id, + "user_banned": False, + "removed" : False, + }) @Cog.listener() async def on_thread_create(self, thread: Thread) -> None: @@ -685,8 +680,8 @@ async def on_raw_thread_update(self, event: RawThreadUpdateEvent) -> None: @Cog.listener() async def on_raw_thread_delete(self, event: RawThreadDeleteEvent) -> None: - async with self._update_lock: - db_filter = {"type": "thread", "channel_id": event.thread_id, "removed": False} + db_filter = {"type": "thread", "channel_id": event.thread_id, "removed": False} + async with self._thread_report_lock: if report := await self.bot.db.scam_reports.find_one(db_filter): await self._update_report(report, "Thread has been deleted.") await self.bot.db.scam_reports.update_one(db_filter, {"$set": {"warning_id": None, "removed": True}}) @@ -706,45 +701,15 @@ async def manual_user_report(self, interaction: Interaction, user: Member) -> No if user == interaction.user: return await interaction.followup.send(content="Did you just report yourself?") - reason = f"Manual report by {interaction.user.mention}" - if not (report := await self._generate_user_report(user, reason)): - return await interaction.followup.send( - content="Failed to report user. They may have already been reported or banned." - ) - - report_channel = await self.bot.get_or_fetch_channel(cfg.discord.channels["report_scams"]) - report_msg = await report_channel.send(embed=report) - - await self.bot.db.scam_reports.update_one( - {"guild_id": user.guild.id, "user_id": user.id, "channel_id": None, "message_id": None}, - {"$set": {"report_id": report_msg.id}} - ) - await interaction.followup.send(content="Thanks for reporting!") - - async def _generate_user_report(self, user: Member, reason: str) -> Embed | None: - if not isinstance(user, Member): - return None - - async with self._report_lock: - if await self.bot.db.scam_reports.find_one( - {"type": "user", "guild_id": user.guild.id, "user_id": user.id} - ): - log.info(f"Found existing report for user {user.id} in database") - return None - - report = Embed(title="🚨 Suspicious User Detected") - report.color = self.Color.ALERT - report.description = f"**Reason**: {reason}\n" - report.description += ( - "\n" - f"Name: `{user.display_name}`\n" - f"ID: `{user.id}` ({user.mention})\n" - f"Roles: [{', '.join(role.mention for role in user.roles[1:])}]\n" - "\n" - "Please review and take appropriate action." - ) - report.set_thumbnail(url=user.display_avatar.url) + async with self._user_report_lock: + reason = f"Manual report by {interaction.user.mention}" + if not (report := await self._generate_user_report(user, reason)): + return await interaction.followup.send( + content="Failed to report user. They may have already been reported or banned." + ) + report_channel = await self.bot.get_or_fetch_channel(cfg.discord.channels["report_scams"]) + report_msg = await report_channel.send(embed=report) await self.bot.db.scam_reports.insert_one({ "type" : "user", "guild_id" : user.guild.id, @@ -752,10 +717,34 @@ async def _generate_user_report(self, user: Member, reason: str) -> Embed | None "reason" : reason, "content" : user.display_name, "warning_id" : None, - "report_id" : None, + "report_id" : report_msg.id, "user_banned": False, }) - return report + await interaction.followup.send(content="Thanks for reporting!") + + async def _generate_user_report(self, user: Member, reason: str) -> Embed | None: + if not isinstance(user, Member): + return None + + if await self.bot.db.scam_reports.find_one( + {"type": "user", "guild_id": user.guild.id, "user_id": user.id} + ): + log.info(f"Found existing report for user {user.id} in database") + return None + + report = Embed(title="🚨 Suspicious User Detected") + report.color = self.Color.ALERT + report.description = f"**Reason**: {reason}\n" + report.description += ( + "\n" + f"Name: `{user.display_name}`\n" + f"ID: `{user.id}` ({user.mention})\n" + f"Roles: [{', '.join(role.mention for role in user.roles[1:])}]\n" + "\n" + "Please review and take appropriate action." + ) + report.set_thumbnail(url=user.display_avatar.url) + return report async def setup(bot): diff --git a/rocketwatch/plugins/tvl/tvl.py b/rocketwatch/plugins/tvl/tvl.py index f8d1285e..937bf0e5 100644 --- a/rocketwatch/plugins/tvl/tvl.py +++ b/rocketwatch/plugins/tvl/tvl.py @@ -53,13 +53,14 @@ def __init__(self, bot: RocketWatch): @describe(show_all="Also show entries with 0 value") async def tvl(self, interaction: Interaction, show_all: bool = False): """ - Show the total value locked in the Protocol. + Show the total value locked in the protocol """ await interaction.response.defer(ephemeral=is_hidden(interaction)) data = { "Total RPL Locked": { "Staked RPL" : { - "Node Operators": {}, # accurate, live + "Minipools": {}, # accurate, live + "Megapools": {}, # accurate, live "oDAO Bond" : {}, # accurate, live }, "Unclaimed Rewards": { @@ -70,7 +71,7 @@ async def tvl(self, interaction: Interaction, show_all: bool = False): "Unused Inflation" : {}, # accurate, live }, "Total ETH Locked": { - "Minipools Stake" : { + "Minipool Stake" : { "Queued Minipools" : {}, # accurate, db "Pending Minipools" : {}, # accurate, db "Dissolved Minipools": { @@ -136,7 +137,7 @@ async def tvl(self, interaction: Interaction, show_all: bool = False): } ])).to_list(1) if tmp: - data["Total ETH Locked"]["Minipools Stake"]["Queued Minipools"]["_val"] = tmp[0]["beacon_balance"] + data["Total ETH Locked"]["Minipool Stake"]["Queued Minipools"]["_val"] = tmp[0]["beacon_balance"] # Pending Minipools: prelaunchCount of minipool_count_per_status * 32 ETH. # Minipools that are flagged as prelaunch have the following applied to them: @@ -162,7 +163,7 @@ async def tvl(self, interaction: Interaction, show_all: bool = False): } ])).to_list(1) if tmp: - data["Total ETH Locked"]["Minipools Stake"]["Pending Minipools"]["_val"] = tmp[0]["beacon_balance"] + tmp[0][ + data["Total ETH Locked"]["Minipool Stake"]["Pending Minipools"]["_val"] = tmp[0]["beacon_balance"] + tmp[0][ "execution_balance"] # Dissolved Minipools: @@ -172,7 +173,7 @@ async def tvl(self, interaction: Interaction, show_all: bool = False): # They have the following applied to them: # - They have 1 ETH locked on the Beacon Chain, not earning any rewards. # - The 31 ETH that was waiting in their address was moved back to the Deposit Pool (This can cause the Deposit Pool - # to grow beyond its Cap, check the bellow comment for information about that). + # to grow beyond its Cap, check the below comment for information about that). tmp = await (await self.bot.db.minipools.aggregate([ { '$match': { @@ -193,9 +194,9 @@ async def tvl(self, interaction: Interaction, show_all: bool = False): ])).to_list(1) if len(tmp) > 0: tmp = tmp[0] - data["Total ETH Locked"]["Minipools Stake"]["Dissolved Minipools"]["Locked on Beacon Chain"]["_val"] = tmp[ + data["Total ETH Locked"]["Minipool Stake"]["Dissolved Minipools"]["Locked on Beacon Chain"]["_val"] = tmp[ "beacon_balance"] - data["Total ETH Locked"]["Minipools Stake"]["Dissolved Minipools"]["Contract Balance"]["_val"] = tmp[ + data["Total ETH Locked"]["Minipool Stake"]["Dissolved Minipools"]["Contract Balance"]["_val"] = tmp[ "execution_balance"] # Staking Minipools: @@ -227,18 +228,18 @@ async def tvl(self, interaction: Interaction, show_all: bool = False): if refund_balance > 0 and beacon_balance > 0: if beacon_balance >= refund_balance: beacon_balance -= refund_balance - data["Total ETH Locked"]["Minipools Stake"]["Staking Minipools"]["Node Share"][ + data["Total ETH Locked"]["Minipool Stake"]["Staking Minipools"]["Node Share"][ "_val"] += refund_balance refund_balance = 0 else: refund_balance -= beacon_balance - data["Total ETH Locked"]["Minipools Stake"]["Staking Minipools"]["Node Share"][ + data["Total ETH Locked"]["Minipool Stake"]["Staking Minipools"]["Node Share"][ "_val"] += beacon_balance beacon_balance = 0 if beacon_balance > 0: d = split_rewards_logic(beacon_balance, node_share, commission, force_base=True) - data["Total ETH Locked"]["Minipools Stake"]["Staking Minipools"]["Node Share"]["_val"] += d["base"]["node"] - data["Total ETH Locked"]["Minipools Stake"]["Staking Minipools"]["rETH Share"]["_val"] += d["base"]["reth"] + data["Total ETH Locked"]["Minipool Stake"]["Staking Minipools"]["Node Share"]["_val"] += d["base"]["node"] + data["Total ETH Locked"]["Minipool Stake"]["Staking Minipools"]["rETH Share"]["_val"] += d["base"]["reth"] data["Total ETH Locked"]["Undistributed Balances"]["Beacon Chain Rewards"]["Node Share"]["_val"] += \ d["rewards"]["node"] data["Total ETH Locked"]["Undistributed Balances"]["Beacon Chain Rewards"]["rETH Share"]["_val"] += \ @@ -332,9 +333,11 @@ async def tvl(self, interaction: Interaction, show_all: bool = False): data["Total ETH Locked"]["Unclaimed Rewards"]["Smoothing Pool"]["_val"] = solidity.to_float( await rp.call("rocketVault.balanceOf", "rocketMerkleDistributorMainnet")) - # Staked RPL: This is all ETH that has been staked by Node Operators. - data["Total RPL Locked"]["Staked RPL"]["Node Operators"]["_val"] = solidity.to_float( - await rp.call("rocketNodeStaking.getTotalStakedRPL")) + # Staked RPL: This is all ETH that has been staked by node operators. + data["Total RPL Locked"]["Staked RPL"]["Minipools"]["_val"] = solidity.to_float( + await rp.call("rocketNodeStaking.getTotalLegacyStakedRPL")) + data["Total RPL Locked"]["Staked RPL"]["Megapools"]["_val"] = solidity.to_float( + await rp.call("rocketNodeStaking.getTotalMegapoolStakedRPL")) # oDAO bonded RPL: RPL oDAO Members have to lock up to join it. This RPL can be slashed if they misbehave. data["Total RPL Locked"]["Staked RPL"]["oDAO Bond"]["_val"] = solidity.to_float( @@ -445,12 +448,10 @@ def set_val_of_branch(branch, unit): data["_value"] = f"{total_tvl:,.2f} ETH" test = render_tree(data, "Total Locked Value", max_depth=0 if show_all else 2) # send embed with tvl - e = Embed() closer = f"or about {Style.BRIGHT}{humanize.intword(usdc_total_tvl, format='%.3f')} USDC{Style.RESET_ALL}".rjust( max([len(line) for line in test.split("\n")]) - 1) - e.description = f"```ansi\n{test}\n{closer}```" - e.set_footer(text="\"that looks good to me\" - invis 2023") - await interaction.followup.send(embed=e) + embed = Embed(title="Protocol TVL", description=f"```ansi\n{test}\n{closer}```") + await interaction.followup.send(embed=embed) async def setup(bot): From fcfb3e826773890aaf9948b53d7ec7be76475e43 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sun, 8 Mar 2026 14:58:10 +0000 Subject: [PATCH 197/279] new scam detection test case --- rocketwatch/plugins/scam_detection/scam_detection.py | 2 +- tests/message_samples.json | 3 +++ 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/rocketwatch/plugins/scam_detection/scam_detection.py b/rocketwatch/plugins/scam_detection/scam_detection.py index 47131799..611c5b3e 100644 --- a/rocketwatch/plugins/scam_detection/scam_detection.py +++ b/rocketwatch/plugins/scam_detection/scam_detection.py @@ -132,7 +132,7 @@ def __init__(self, bot: RocketWatch): self.obfuscated_url_pattern = re.compile( rf"<{_ws}ht{_brk}tp|" # tp rf"<{_ws}ma{_ws}i{_brk}l{_ws}t{_ws}o|" # i\n> L\n> To (mailto) - rf" S\n> Co\n> R (discord:) + rf"\n" + }, + { + "content": "**Please share your questions/issues here ⬇️\n \n[ ]**" } ], "known_false_positives": [], From d0aef4276dbfb0722e1cd9f429d0b7efc30b5376 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sun, 8 Mar 2026 15:48:34 +0000 Subject: [PATCH 198/279] add events for delayed upgrade process --- rocketwatch/plugins/events/events.json | 12 ++++++-- rocketwatch/plugins/events/events.py | 28 ++++++++++++++++++- .../plugins/transactions/functions.json | 1 - rocketwatch/strings/embeds.en.json | 16 +++++++++++ rocketwatch/utils/embeds.py | 11 ++------ 5 files changed, 55 insertions(+), 13 deletions(-) diff --git a/rocketwatch/plugins/events/events.json b/rocketwatch/plugins/events/events.json index 568423be..d4eba6e5 100644 --- a/rocketwatch/plugins/events/events.json +++ b/rocketwatch/plugins/events/events.json @@ -524,13 +524,21 @@ { "contract_name": "rocketDAONodeTrustedUpgrade", "events": [ + { + "event_name": "UpgradePending", + "name": "odao_upgrade_pending_event" + }, + { + "event_name": "UpgradeVetoed", + "name": "sdao_upgrade_vetoed_event" + }, { "event_name": "ContractUpgraded", - "name": "contract_upgraded" + "name": "odao_contract_upgraded_event" }, { "event_name": "ContractAdded", - "name": "contract_added" + "name": "odao_contract_added_event" } ] } diff --git a/rocketwatch/plugins/events/events.py b/rocketwatch/plugins/events/events.py index 18a13ace..d19189eb 100644 --- a/rocketwatch/plugins/events/events.py +++ b/rocketwatch/plugins/events/events.py @@ -263,9 +263,14 @@ def hash_args(_args: aDict, _hash=args_hash) -> None: log.warning(f"Skipping unknown event {n}.{event.event}") elif event.get("event") in self.event_map: event_name = self.event_map[event.event] - if event_name in ["contract_upgraded", "contract_added"]: + if event_name in ["odao_contract_upgraded_event", "odao_contract_added_event"]: log.info("detected contract upgrade") upgrade_block = event.blockNumber + if event_name in ["odao_upgrade_pending_event", "sdao_upgrade_vetoed_event", + "odao_contract_added_event", "odao_contract_upgraded_event"]: + event.args = aDict(event.args) + hash_args(event.args) + embed = await self.handle_event(event_name, event) else: # deposit/exit event path event.args = aDict(event.args) @@ -737,6 +742,27 @@ def share_repr(percentage: float) -> str: if await rp.get_address_by_name(contract) == args.claimingContract: return None + if event_name == "odao_upgrade_pending_event": + args.contractName = await rp.call( + "rocketDAONodeTrustedUpgrade.getName", args.upgradeProposalID, block=event.blockNumber + ) + args.contractAddress = await rp.call( + "rocketDAONodeTrustedUpgrade.getUpgradeAddress", args.upgradeProposalID, block=event.blockNumber + ) + args.vetoDeadline = await rp.call( + "rocketDAONodeTrustedUpgrade.getEnd", args.upgradeProposalID, block=event.blockNumber + ) + if args.contractAddress == "0x0000000000000000000000000000000000000000": + del args.contractAddress + event_name = "upgrade_pending_abi_event" + elif event_name == "sdao_upgrade_vetoed_event": + args.contractName = await rp.call( + "rocketDAONodeTrustedUpgrade.getName", args.upgradeProposalID, block=event.blockNumber + ) + elif event_name == "odao_contract_upgraded_event": + args.contractName = rp.get_name_by_address(args.oldAddress) or "Unknown" + elif event_name == "odao_contract_added_event": + args.contractName = rp.get_name_by_address(args.newAddress) or "Unknown" if "node_register_event" in event_name: args.timezone = await rp.call("rocketNodeManager.getNodeTimezoneLocation", args.node) if "odao_member_challenge_event" in event_name: diff --git a/rocketwatch/plugins/transactions/functions.json b/rocketwatch/plugins/transactions/functions.json index 94a805df..6034fb6c 100644 --- a/rocketwatch/plugins/transactions/functions.json +++ b/rocketwatch/plugins/transactions/functions.json @@ -13,7 +13,6 @@ "execute": "odao_proposal_execute", "proposalSettingUint": "odao_setting", "proposalSettingBool": "odao_setting", - "proposalUpgrade": "odao_upgrade", "proposalInvite": "odao_member_invite" }, "rocketDAOSecurityProposals": { diff --git a/rocketwatch/strings/embeds.en.json b/rocketwatch/strings/embeds.en.json index 1f3713c5..be34dca8 100644 --- a/rocketwatch/strings/embeds.en.json +++ b/rocketwatch/strings/embeds.en.json @@ -119,6 +119,22 @@ "title": ":crystal_ball: oDAO Contract Upgrade", "description": "The contract `%{name}` has been upgraded to %{contractAddress}!" }, + "odao_upgrade_pending_event": { + "title": ":hourglass: Contract Upgrade Pending", + "description": "The upgrade process for `%{contractName}` has been initiated.\nVeto window ends %{vetoDeadline}." + }, + "sdao_upgrade_vetoed_event": { + "title": ":no_entry: Contract Upgrade Vetoed", + "description": "Upgrade #%{upgradeProposalID} for `%{contractName}` has been vetoed by the security council!" + }, + "odao_contract_added_event": { + "title": ":page_facing_up: Contract Added", + "description": "New contract `%{contractName}` added at %{newAddress}." + }, + "odao_contract_upgraded_event": { + "title": ":page_facing_up: Contract Upgraded", + "description": "`%{contractName}` has been upgraded to %{newAddress}." + }, "odao_member_invite": { "title": ":crystal_ball: oDAO Invite", "description": "**%{id}** (%{nodeAddress}) has been invited to join the oDAO!" diff --git a/rocketwatch/utils/embeds.py b/rocketwatch/utils/embeds.py index cb49bba1..3a52cbfc 100644 --- a/rocketwatch/utils/embeds.py +++ b/rocketwatch/utils/embeds.py @@ -205,7 +205,7 @@ async def prepare_args(args): # handle timestamps if "deadline" in arg_key.lower() and isinstance(arg_value, int): - args[arg_key] = f"()" + args[arg_key] = f" ()" # handle percentages if "perc" in arg_key.lower(): @@ -385,13 +385,6 @@ async def assemble(args) -> Embed: inline=False ) - if "contractName" in args: - e.add_field( - name="Contract", - value=f"`{args.contractName}`", - inline=False - ) - if "settingContractName" in args: e.add_field(name="Contract", value=f"`{args.settingContractName}`", @@ -438,7 +431,7 @@ async def assemble(args) -> Embed: inline=True ) - if "contractAddress" in args and "Contract" in args.type: + if "contractAddress" in args and "Contract" in args.get("type", ""): e.add_field(name="Contract Address", value=args.contractAddress, inline=False) From 0840faa8a6de7366d1bd59a6f25e8b09aeea1859 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sun, 8 Mar 2026 22:57:45 +0000 Subject: [PATCH 199/279] add megapools to TVL tree --- rocketwatch/plugins/tvl/tvl.py | 250 +++++++++++++++++---------------- 1 file changed, 127 insertions(+), 123 deletions(-) diff --git a/rocketwatch/plugins/tvl/tvl.py b/rocketwatch/plugins/tvl/tvl.py index 937bf0e5..43dbfcd4 100644 --- a/rocketwatch/plugins/tvl/tvl.py +++ b/rocketwatch/plugins/tvl/tvl.py @@ -17,7 +17,7 @@ log = logging.getLogger("rocketwatch.tvl") -def split_rewards_logic(balance, node_share, commission, force_base=False): +def minipool_split_rewards_logic(balance, node_share, commission, force_base=False): d = { "base" : { "reth": 0, @@ -44,6 +44,14 @@ def split_rewards_logic(balance, node_share, commission, force_base=False): d["rewards"]["reth"] = balance * (1 - node_ownership_share) return d +def megapool_split_rewards(rewards, capital_ratio, node_commission, voter_share, dao_share): + borrowed_portion = rewards * (1 - capital_ratio) + reth_commission = 1 - node_commission - voter_share - dao_share + reth = borrowed_portion * reth_commission + voter = borrowed_portion * voter_share + dao = borrowed_portion * dao_share + node = rewards - reth - voter - dao + return {"node": node, "reth": reth, "voter": voter, "dao": dao} class TVL(Cog): def __init__(self, bot: RocketWatch): @@ -71,39 +79,56 @@ async def tvl(self, interaction: Interaction, show_all: bool = False): "Unused Inflation" : {}, # accurate, live }, "Total ETH Locked": { - "Minipool Stake" : { - "Queued Minipools" : {}, # accurate, db - "Pending Minipools" : {}, # accurate, db + "Minipool Stake": { "Dissolved Minipools": { "Locked on Beacon Chain": {}, # accurate, db "Contract Balance" : {}, # accurate, db }, "Staking Minipools" : { - # beacon chain balances of staking minipools but ceil at 32 ETH and node share gets penalties first "rETH Share": {"_val": 0}, # done, db "Node Share": {"_val": 0}, # done, db } }, - "rETH Collateral" : { + "Megapool Stake": { + "Pending Validators" : {}, + "Dissolved Validators": {}, + "Staking Validators" : { + "rETH Share": {"_val": 0}, + "Node Share": {"_val": 0}, + }, + "Exiting Validators" : { + "rETH Share": {"_val": 0}, + "Node Share": {"_val": 0}, + } + }, + "rETH Collateral": { "Deposit Pool" : {}, # accurate, live "Extra Collateral": {}, # accurate, live }, "Undistributed Balances": { - "Smoothing Pool Balance" : { - "rETH Share": {"_val": 0, "_is_estimate": True}, # missing - "Node Share": {"_val": 0, "_is_estimate": True}, # missing + "Smoothing Pool Balance": { + "rETH Share": {"_val": 0}, + "Node Share": {"_val": 0}, }, "Node Distributor Contracts": { "rETH Share": {"_val": 0}, # done, db "Node Share": {"_val": 0}, # done, db }, - "Minipool Contract Balances": { # important, only after minipool has gone to state "staking" + "Minipool Contract Balances": { "rETH Share": {"_val": 0}, # done, db "Node Share": {"_val": 0}, # done, db }, - "Beacon Chain Rewards" : { # anything over 32, split acording to node share - "rETH Share": {"_val": 0}, # done, db - "Node Share": {"_val": 0}, # done, db + "Megapool Contract Balances": { + "rETH Share" : {"_val": 0}, + "Node Share" : {"_val": 0}, + "Voter Share": {"_val": 0}, + "DAO Share" : {"_val": 0}, + }, + "Beacon Chain Rewards" : { + "rETH Share" : {"_val": 0}, + "Node Share" : {"_val": 0}, + "Voter Share": {"_val": 0}, + "DAO Share" : {"_val": 0}, }, }, "Unclaimed Rewards" : { @@ -117,55 +142,6 @@ async def tvl(self, interaction: Interaction, show_all: bool = False): rpl_price = solidity.to_float(await rp.call("rocketNetworkPrices.getRPLPrice")) rpl_address = await rp.get_address_by_name("rocketTokenRPL") - # Queued Minipools: initialisedCount of minipool_count_per_status * 1 ETH. - # Minipools that are flagged as initialised have the following applied to them: - # - They have 1 ETH staked on the beacon chain. - # - They have not yet received 31 ETH from the Deposit Pool. - tmp = await (await self.bot.db.minipools.aggregate([ - { - '$match': { - 'status': 'initialised', - 'vacant': False - } - }, { - '$group': { - '_id' : 'total', - 'beacon_balance': { - '$sum': 1 - } - } - } - ])).to_list(1) - if tmp: - data["Total ETH Locked"]["Minipool Stake"]["Queued Minipools"]["_val"] = tmp[0]["beacon_balance"] - - # Pending Minipools: prelaunchCount of minipool_count_per_status * 32 ETH. - # Minipools that are flagged as prelaunch have the following applied to them: - # - They have deposited 1 ETH to the Beacon Chain. - # - They have 31 ETH from the Deposit Pool in their contract waiting to be staked as well. - # - They are currently in the scrubbing process (should be 12 hours) or have not yet initiated the second phase. - tmp = await (await self.bot.db.minipools.aggregate([ - { - '$match': { - 'status': 'prelaunch', - 'vacant': False - } - }, { - '$group': { - '_id' : 'total', - 'beacon_balance' : { - '$sum': 1 - }, - 'execution_balance': { - '$sum': "$execution_balance" - } - } - } - ])).to_list(1) - if tmp: - data["Total ETH Locked"]["Minipool Stake"]["Pending Minipools"]["_val"] = tmp[0]["beacon_balance"] + tmp[0][ - "execution_balance"] - # Dissolved Minipools: # Minipools that are flagged as dissolved are Pending minipools that didn't # trigger the second phase within the configured @@ -237,7 +213,7 @@ async def tvl(self, interaction: Interaction, show_all: bool = False): "_val"] += beacon_balance beacon_balance = 0 if beacon_balance > 0: - d = split_rewards_logic(beacon_balance, node_share, commission, force_base=True) + d = minipool_split_rewards_logic(beacon_balance, node_share, commission, force_base=True) data["Total ETH Locked"]["Minipool Stake"]["Staking Minipools"]["Node Share"]["_val"] += d["base"]["node"] data["Total ETH Locked"]["Minipool Stake"]["Staking Minipools"]["rETH Share"]["_val"] += d["base"]["reth"] data["Total ETH Locked"]["Undistributed Balances"]["Beacon Chain Rewards"]["Node Share"]["_val"] += \ @@ -245,12 +221,98 @@ async def tvl(self, interaction: Interaction, show_all: bool = False): data["Total ETH Locked"]["Undistributed Balances"]["Beacon Chain Rewards"]["rETH Share"]["_val"] += \ d["rewards"]["reth"] if contract_balance > 0: - d = split_rewards_logic(contract_balance, node_share, commission) + d = minipool_split_rewards_logic(contract_balance, node_share, commission) data["Total ETH Locked"]["Undistributed Balances"]["Minipool Contract Balances"]["Node Share"][ "_val"] += d["base"]["node"] + d["rewards"]["node"] data["Total ETH Locked"]["Undistributed Balances"]["Minipool Contract Balances"]["rETH Share"][ "_val"] += d["base"]["reth"] + d["rewards"]["reth"] + # Megapool commission settings + network_settings = await rp.get_contract_by_name("rocketDAOProtocolSettingsNetwork") + node_share = solidity.to_float(await network_settings.functions.getNodeShare().call()) + voter_share = solidity.to_float(await network_settings.functions.getVoterShare().call()) + dao_share = solidity.to_float(await network_settings.functions.getProtocolDAOShare().call()) + + # Pending Megapool Validators: prestaked validators have deposit_value locked + # (1 ETH on beacon + 31 ETH in contract as assignedValue) + # in_queue validators are skipped — their ETH is in the Deposit Pool (already counted) + tmp = await (await self.bot.db.megapool_validators.aggregate([ + {'$match': {'status': 'prestaked'}}, + {'$count': 'count'} + ])).to_list(1) + if tmp: + data["Total ETH Locked"]["Megapool Stake"]["Pending Validators"]["_val"] = tmp[0]["count"] * 32 + + # Dissolved Megapool Validators: 1 ETH stuck on beacon chain, 31 ETH returned to DP + tmp = await (await self.bot.db.megapool_validators.aggregate([ + {'$match': {'status': 'dissolved'}}, + {'$group': {'_id': 'total', 'beacon_balance': {'$sum': '$beacon.balance'}}} + ])).to_list(1) + if tmp: + data["Total ETH Locked"]["Megapool Stake"]["Dissolved Validators"]["_val"] = tmp[0]["beacon_balance"] + + # Staking, Locked & Exiting Megapool Validators: beacon balance split by capital ratio + # locked = exit requested but not yet confirmed on beacon chain, treated as exiting + megapool_validators = await self.bot.db.megapool_validators.find( + {'status': {'$in': ['staking', 'locked', 'exiting']}} + ).to_list(None) + for v in megapool_validators: + capital_ratio = v["requested_bond"] / 32 + beacon_balance = v.get("beacon", {}).get("balance", 32) + status = v["status"] + # base stake (up to 32 ETH) + base = min(beacon_balance, 32) + node_base = v["requested_bond"] + # handle penalties (beacon < 32): node absorbs losses first + if base < 32: + shortfall = 32 - base + node_base = max(0, node_base - shortfall) + reth_base = base - node_base + target = "Staking Validators" if (status == "staking") else "Exiting Validators" + data["Total ETH Locked"]["Megapool Stake"][target]["rETH Share"]["_val"] += reth_base + data["Total ETH Locked"]["Megapool Stake"][target]["Node Share"]["_val"] += node_base + # beacon chain rewards (anything over 32) + if beacon_balance > 32: + rewards = beacon_balance - 32 + split = megapool_split_rewards(rewards, capital_ratio, node_share, voter_share, dao_share) + data["Total ETH Locked"]["Undistributed Balances"]["Beacon Chain Rewards"]["Node Share"]["_val"] += split["node"] + data["Total ETH Locked"]["Undistributed Balances"]["Beacon Chain Rewards"]["rETH Share"]["_val"] += split["reth"] + data["Total ETH Locked"]["Undistributed Balances"]["Beacon Chain Rewards"]["Voter Share"]["_val"] += split["voter"] + data["Total ETH Locked"]["Undistributed Balances"]["Beacon Chain Rewards"]["DAO Share"]["_val"] += split["dao"] + + # Megapool Contract Balances: eth_balance = assignedValue + refundValue + pendingRewards + # assignedValue already counted in Queued Validators, so we split the rest: + # refundValue (minus debt) → Node Share + # pendingRewards → split by commission (node/rETH/voter/DAO) + megapool_balances = await (await self.bot.db.node_operators.aggregate([ + {'$match': {'megapool.deployed': True, 'megapool.eth_balance': {'$gt': 0}}}, + { + '$project': { + 'refund_value': '$megapool.refund_value', + 'debt': '$megapool.debt', + 'pending_rewards': '$megapool.pending_rewards', + 'node_bond': '$megapool.node_bond', + 'user_capital': '$megapool.user_capital', + } + } + ])).to_list() + for mp in megapool_balances: + refund_value = mp.get("refund_value", 0) + debt_val = mp.get("debt", 0) + pending_rewards = mp.get("pending_rewards", 0) + # refundValue minus debt → Node Share + node_refund = max(0, refund_value - debt_val) + data["Total ETH Locked"]["Undistributed Balances"]["Megapool Contract Balances"]["Node Share"]["_val"] += node_refund + # pendingRewards → split by commission + if pending_rewards > 0: + total_capital = mp.get("node_bond", 0) + mp.get("user_capital", 0) + capital_ratio = mp.get("node_bond", 0) / total_capital if total_capital > 0 else 0 + split = megapool_split_rewards(pending_rewards, capital_ratio, node_share, voter_share, dao_share) + data["Total ETH Locked"]["Undistributed Balances"]["Megapool Contract Balances"]["Node Share"]["_val"] += split["node"] + data["Total ETH Locked"]["Undistributed Balances"]["Megapool Contract Balances"]["rETH Share"]["_val"] += split["reth"] + data["Total ETH Locked"]["Undistributed Balances"]["Megapool Contract Balances"]["Voter Share"]["_val"] += split["voter"] + data["Total ETH Locked"]["Undistributed Balances"]["Megapool Contract Balances"]["DAO Share"]["_val"] += split["dao"] + # Deposit Pool Balance: calls the contract and asks what its balance is, simple enough. # ETH in here has been swapped for rETH and is waiting to be matched with a minipool. # Fun Fact: This value can go above the configured Deposit Pool Cap in 2 scenarios: @@ -269,65 +331,7 @@ async def tvl(self, interaction: Interaction, show_all: bool = False): # Smoothing Pool Balance: This is ETH from Proposals by minipools that have joined the Smoothing Pool. smoothie_balance = solidity.to_float(await w3.eth.get_balance(await rp.get_address_by_name("rocketSmoothingPool"))) - tmp = await (await self.bot.db.node_operators.aggregate([ - { - '$match': { - 'smoothing_pool_registration': True, - 'staking_minipool_count' : { - '$ne': 0 - } - } - }, { - '$project': { - 'staking_minipool_count': 1, - 'effective_node_share' : 1, - 'node_share' : { - '$sum': [ - '$effective_node_share', { - '$multiply': [ - { - '$subtract': [ - 1, '$effective_node_share' - ] - }, '$average_node_fee' - ] - } - ] - } - } - }, { - '$group': { - '_id' : None, - 'node_share': { - '$sum': { - '$multiply': [ - '$node_share', '$staking_minipool_count', '$effective_node_share' - ] - } - }, - 'count' : { - '$sum': { - '$multiply': [ - '$staking_minipool_count', '$effective_node_share' - ] - } - } - } - }, { - '$project': { - 'avg_node_share': { - '$divide': [ - '$node_share', '$count' - ] - } - } - } - ])).to_list() - if len(tmp) > 0: - data["Total ETH Locked"]["Undistributed Balances"]["Smoothing Pool Balance"]["Node Share"][ - "_val"] = smoothie_balance * tmp[0]["avg_node_share"] - data["Total ETH Locked"]["Undistributed Balances"]["Smoothing Pool Balance"]["rETH Share"][ - "_val"] = smoothie_balance * (1 - tmp[0]["avg_node_share"]) + data["Total ETH Locked"]["Undistributed Balances"]["Smoothing Pool Balance"]["_val"] = smoothie_balance # Unclaimed Smoothing Pool Rewards: This is ETH from the previous Reward Periods that have not been claimed yet. data["Total ETH Locked"]["Unclaimed Rewards"]["Smoothing Pool"]["_val"] = solidity.to_float( From d46f51cf9ea8b230a90b6674c8929b83a90d54ac Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sun, 8 Mar 2026 23:10:59 +0000 Subject: [PATCH 200/279] remove old stricter hidden visibility --- rocketwatch/plugins/8ball/8ball.py | 4 ++-- rocketwatch/plugins/about/about.py | 4 ++-- rocketwatch/plugins/apr/apr.py | 6 +++--- rocketwatch/plugins/collateral/collateral.py | 6 +++--- rocketwatch/plugins/cow_orders/cow_orders.py | 4 ++-- rocketwatch/plugins/dao/dao.py | 6 +++--- rocketwatch/plugins/debug/debug.py | 4 ++-- rocketwatch/plugins/deposit_pool/deposit_pool.py | 6 +++--- .../plugins/fee_distribution/fee_distribution.py | 4 ++-- rocketwatch/plugins/forum/forum.py | 6 +++--- rocketwatch/plugins/governance/governance.py | 4 ++-- rocketwatch/plugins/proposals/proposals.py | 10 +++++----- rocketwatch/plugins/queue/queue.py | 4 ++-- rocketwatch/plugins/random/random.py | 10 +++++----- rocketwatch/plugins/rocksolid/rocksolid.py | 4 ++-- rocketwatch/plugins/rpl/rpl.py | 6 +++--- rocketwatch/plugins/snapshot/snapshot.py | 4 ++-- rocketwatch/plugins/user_distribute/user_distribute.py | 4 ++-- .../plugins/validator_states/validator_states.py | 4 ++-- rocketwatch/plugins/wall/wall.py | 4 ++-- rocketwatch/utils/visibility.py | 10 +++++----- 21 files changed, 57 insertions(+), 57 deletions(-) diff --git a/rocketwatch/plugins/8ball/8ball.py b/rocketwatch/plugins/8ball/8ball.py index b0031acc..75e1d225 100644 --- a/rocketwatch/plugins/8ball/8ball.py +++ b/rocketwatch/plugins/8ball/8ball.py @@ -8,7 +8,7 @@ from rocketwatch import RocketWatch from utils.embeds import Embed -from utils.visibility import is_hidden_weak +from utils.visibility import is_hidden class EightBall(commands.Cog): @@ -25,7 +25,7 @@ async def eight_ball(self, interaction: Interaction, question: str): ) await interaction.response.send_message(embed=e, ephemeral=True) return - await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) + await interaction.response.defer(ephemeral=is_hidden(interaction)) await asyncio.sleep(random.randint(2, 5)) res = pyrandom.choice([ "As I see it, yes", diff --git a/rocketwatch/plugins/about/about.py b/rocketwatch/plugins/about/about.py index 526d202a..1a0268d3 100644 --- a/rocketwatch/plugins/about/about.py +++ b/rocketwatch/plugins/about/about.py @@ -14,7 +14,7 @@ from utils import readable from utils.config import cfg from utils.embeds import Embed, el_explorer_url -from utils.visibility import is_hidden_weak +from utils.visibility import is_hidden psutil.getloadavg() BOOT_TIME = time.time() @@ -30,7 +30,7 @@ def __init__(self, bot: RocketWatch): @command() async def about(self, interaction: Interaction): """Bot and server information""" - await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) + await interaction.response.defer(ephemeral=is_hidden(interaction)) e = Embed() g = self.bot.guilds code_time = None diff --git a/rocketwatch/plugins/apr/apr.py b/rocketwatch/plugins/apr/apr.py index 95148bd9..1f98d7c0 100644 --- a/rocketwatch/plugins/apr/apr.py +++ b/rocketwatch/plugins/apr/apr.py @@ -14,7 +14,7 @@ from utils.embeds import Embed from utils.rocketpool import rp from utils.shared_w3 import w3, w3_archive -from utils.visibility import is_hidden_weak +from utils.visibility import is_hidden log = logging.getLogger("rocketwatch.apr") @@ -82,7 +82,7 @@ async def on_error(self, err: Exception): @command() async def reth_apr(self, interaction: Interaction): """Show the current rETH APR""" - await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) + await interaction.response.defer(ephemeral=is_hidden(interaction)) e = Embed() e.title = "Current rETH APR" e.description = "For some comparisons against other LST: [dune dashboard](https://dune.com/rp_community/lst-comparison)" @@ -252,7 +252,7 @@ async def reth_apr(self, interaction: Interaction): @command() async def node_apr(self, interaction: Interaction): """Show the current node operator APR""" - await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) + await interaction.response.defer(ephemeral=is_hidden(interaction)) e = Embed() e.title = "Current NO APR" e.description = "Dashed red lines above and below the solid red one are leb8 and leb16 respectively. " \ diff --git a/rocketwatch/plugins/collateral/collateral.py b/rocketwatch/plugins/collateral/collateral.py index de4adf59..941bdd1d 100644 --- a/rocketwatch/plugins/collateral/collateral.py +++ b/rocketwatch/plugins/collateral/collateral.py @@ -18,7 +18,7 @@ from utils import solidity from utils.embeds import Embed, resolve_ens from utils.rocketpool import rp -from utils.visibility import is_hidden_weak +from utils.visibility import is_hidden log = logging.getLogger("rocketwatch.collateral") @@ -118,7 +118,7 @@ async def node_tvl_vs_collateral(self, """ Show a scatter plot of collateral ratios for given node TVLs """ - await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) + await interaction.response.defer(ephemeral=is_hidden(interaction)) display_name = None address = None @@ -232,7 +232,7 @@ async def collateral_distribution(self, """ Show the distribution of collateral across nodes. """ - await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) + await interaction.response.defer(ephemeral=is_hidden(interaction)) data = await get_average_collateral_percentage_per_node(collateral_cap, bonded) distribution = [(collateral, len(nodes)) for collateral, nodes in sorted(data.items(), key=lambda x: x[0])] diff --git a/rocketwatch/plugins/cow_orders/cow_orders.py b/rocketwatch/plugins/cow_orders/cow_orders.py index 7429daba..64dffd5e 100644 --- a/rocketwatch/plugins/cow_orders/cow_orders.py +++ b/rocketwatch/plugins/cow_orders/cow_orders.py @@ -13,7 +13,7 @@ from utils.event import Event, EventPlugin from utils.rocketpool import rp from utils.shared_w3 import w3 -from utils.visibility import is_hidden_weak +from utils.visibility import is_hidden log = logging.getLogger("rocketwatch.cow_orders") @@ -33,7 +33,7 @@ async def cow(self, interaction: Interaction, tnx: str): await interaction.response.send_message("nop", ephemeral=True) return - await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) + await interaction.response.defer(ephemeral=is_hidden(interaction)) url = tnx.replace("etherscan.io", "explorer.cow.fi") embed = Embed(description=f"[cow explorer]({url})") await interaction.followup.send(embed=embed) diff --git a/rocketwatch/plugins/dao/dao.py b/rocketwatch/plugins/dao/dao.py index 7aaf01ad..eff482e2 100644 --- a/rocketwatch/plugins/dao/dao.py +++ b/rocketwatch/plugins/dao/dao.py @@ -17,7 +17,7 @@ from utils.event_logs import get_logs from utils.rocketpool import rp from utils.views import PageView -from utils.visibility import is_hidden_weak +from utils.visibility import is_hidden log = logging.getLogger("rocketwatch.dao") @@ -105,7 +105,7 @@ async def dao_votes( full: bool = False ) -> None: """Show currently active on-chain proposals""" - await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) + await interaction.response.defer(ephemeral=is_hidden(interaction)) match dao_name: case "pDAO": @@ -214,7 +214,7 @@ async def _get_recent_proposals(self, interaction: Interaction, current: str) -> @autocomplete(proposal=_get_recent_proposals) async def voter_list(self, interaction: Interaction, proposal: int) -> None: """Show the list of voters for a pDAO proposal""" - await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) + await interaction.response.defer(ephemeral=is_hidden(interaction)) if not (proposal := await ProtocolDAO().fetch_proposal(proposal)): return await interaction.followup.send("Invalid proposal ID.") diff --git a/rocketwatch/plugins/debug/debug.py b/rocketwatch/plugins/debug/debug.py index 3b174a3b..33ed9d1f 100644 --- a/rocketwatch/plugins/debug/debug.py +++ b/rocketwatch/plugins/debug/debug.py @@ -20,7 +20,7 @@ from utils.readable import prettify_json_string from utils.rocketpool import rp from utils.shared_w3 import w3 -from utils.visibility import is_hidden, is_hidden_role_controlled, is_hidden_weak +from utils.visibility import is_hidden, is_hidden_role_controlled log = logging.getLogger("rocketwatch.debug") @@ -308,7 +308,7 @@ async def asian_restaurant_name(self, interaction: Interaction): """ Randomly generated Asian restaurant names """ - await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) + await interaction.response.defer(ephemeral=is_hidden(interaction)) async with aiohttp.ClientSession() as session, session.get("https://www.dotomator.com/api/random_name.json?type=asian") as resp: a = (await resp.json())["name"] await interaction.followup.send(a) diff --git a/rocketwatch/plugins/deposit_pool/deposit_pool.py b/rocketwatch/plugins/deposit_pool/deposit_pool.py index a6a9bc74..36d35341 100644 --- a/rocketwatch/plugins/deposit_pool/deposit_pool.py +++ b/rocketwatch/plugins/deposit_pool/deposit_pool.py @@ -10,7 +10,7 @@ from utils.embeds import Embed from utils.rocketpool import rp from utils.status import StatusPlugin -from utils.visibility import is_hidden_weak +from utils.visibility import is_hidden log = logging.getLogger("rocketwatch.deposit_pool") @@ -110,13 +110,13 @@ async def get_contract_collateral_stats() -> Embed: @command() async def deposit_pool(self, interaction: Interaction) -> None: """Show the current deposit pool status""" - await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) + await interaction.response.defer(ephemeral=is_hidden(interaction)) await interaction.followup.send(embed=await self.get_deposit_pool_stats()) @command() async def reth_extra_collateral(self, interaction: Interaction) -> None: """Show the amount of tokens held in the rETH contract for exit liquidity""" - await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) + await interaction.response.defer(ephemeral=is_hidden(interaction)) await interaction.followup.send(embed=await self.get_contract_collateral_stats()) async def get_status(self) -> Embed: diff --git a/rocketwatch/plugins/fee_distribution/fee_distribution.py b/rocketwatch/plugins/fee_distribution/fee_distribution.py index 44399e58..b69d3838 100644 --- a/rocketwatch/plugins/fee_distribution/fee_distribution.py +++ b/rocketwatch/plugins/fee_distribution/fee_distribution.py @@ -10,7 +10,7 @@ from rocketwatch import RocketWatch from utils.embeds import Embed from utils.readable import render_tree_legacy -from utils.visibility import is_hidden_weak +from utils.visibility import is_hidden log = logging.getLogger("rocketwatch.fee_distribution") @@ -24,7 +24,7 @@ async def fee_distribution(self, interaction: Interaction, mode: Literal["tree", """ Show the distribution of minipool commission percentages. """ - await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) + await interaction.response.defer(ephemeral=is_hidden(interaction)) e = Embed() e.title = "Minipool Fee Distribution" diff --git a/rocketwatch/plugins/forum/forum.py b/rocketwatch/plugins/forum/forum.py index d7a509c2..e6e3f619 100644 --- a/rocketwatch/plugins/forum/forum.py +++ b/rocketwatch/plugins/forum/forum.py @@ -11,7 +11,7 @@ from rocketwatch import RocketWatch from utils.embeds import Embed from utils.retry import retry_async -from utils.visibility import is_hidden_weak +from utils.visibility import is_hidden log = logging.getLogger("rocketwatch.forum") @@ -122,7 +122,7 @@ async def top_forum_posts( period: Period = "monthly" ) -> None: """Get the most popular topics from the forum""" - await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) + await interaction.response.defer(ephemeral=is_hidden(interaction)) if isinstance(period, Choice): period: Forum.Period = cast(Forum.Period, period.value) @@ -150,7 +150,7 @@ async def top_forum_users( order_by: UserMetric = "likes_received" ) -> None: """Get the most active forum users""" - await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) + await interaction.response.defer(ephemeral=is_hidden(interaction)) embed = Embed(title=f"Top Forum Users ({period})") embed.description = "" diff --git a/rocketwatch/plugins/governance/governance.py b/rocketwatch/plugins/governance/governance.py index 5c014deb..e0162a2d 100644 --- a/rocketwatch/plugins/governance/governance.py +++ b/rocketwatch/plugins/governance/governance.py @@ -15,7 +15,7 @@ from utils.dao import DAO, DefaultDAO, OracleDAO, ProtocolDAO, SecurityCouncil from utils.embeds import Embed from utils.status import StatusPlugin -from utils.visibility import is_hidden_weak +from utils.visibility import is_hidden log = logging.getLogger("rocketwatch.governance") @@ -155,7 +155,7 @@ async def print_proposals(_dao: DAO, _proposals: list[DAO.Proposal]) -> str: @command() async def governance_digest(self, interaction: Interaction) -> None: """Get a summary of recent activity in protocol governance""" - await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) + await interaction.response.defer(ephemeral=is_hidden(interaction)) embed = await self.get_digest() await interaction.followup.send(embed=embed) diff --git a/rocketwatch/plugins/proposals/proposals.py b/rocketwatch/plugins/proposals/proposals.py index 1ccc806d..7cc807e4 100644 --- a/rocketwatch/plugins/proposals/proposals.py +++ b/rocketwatch/plugins/proposals/proposals.py @@ -20,7 +20,7 @@ from utils.shared_w3 import bacon from utils.solidity import beacon_block_to_date, date_to_beacon_block from utils.time_debug import timerun_async -from utils.visibility import is_hidden_weak +from utils.visibility import is_hidden cog_id = "proposals" log = logging.getLogger(f"rocketwatch.{cog_id}") @@ -292,7 +292,7 @@ async def version_chart(self, interaction: Interaction, days: int = 90): """ Show a historical chart of used Smart Node versions """ - await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) + await interaction.response.defer(ephemeral=is_hidden(interaction)) window_length = 5 @@ -506,7 +506,7 @@ async def client_distribution(self, interaction: Interaction, remove_allnodes: b """ Generate a distribution graph of clients. """ - await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) + await interaction.response.defer(ephemeral=is_hidden(interaction)) embeds, files = [], [] for attr, name in [["consensus_client", "Consensus Client"], ["execution_client", "Execution Client"]]: e, f = await self.proposal_vs_node_operators_embed(attr, name, remove_allnodes) @@ -519,7 +519,7 @@ async def operator_type_distribution(self, interaction: Interaction): """ Generate a graph of NO groups. """ - await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) + await interaction.response.defer(ephemeral=is_hidden(interaction)) embed, file = await self.proposal_vs_node_operators_embed("type", "User") await interaction.followup.send(embed=embed, file=file) @@ -530,7 +530,7 @@ async def client_combo_ranking( """ Generate a ranking of most used execution and consensus clients. """ - await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) + await interaction.response.defer(ephemeral=is_hidden(interaction)) # aggregate [consensus, execution] pair counts client_pairs = await (await self.bot.db.latest_proposals.aggregate([ { diff --git a/rocketwatch/plugins/queue/queue.py b/rocketwatch/plugins/queue/queue.py index cdea691d..188ef0e7 100644 --- a/rocketwatch/plugins/queue/queue.py +++ b/rocketwatch/plugins/queue/queue.py @@ -12,7 +12,7 @@ from utils.rocketpool import rp from utils.shared_w3 import w3 from utils.views import PageView -from utils.visibility import is_hidden_weak +from utils.visibility import is_hidden log = logging.getLogger("rocketwatch.queue") @@ -194,7 +194,7 @@ async def get_combined_queue(limit: int, start: int = 0) -> tuple[int, str]: @describe(lane="type of queue to display") async def queue(self, interaction: Interaction, lane: Literal["combined", "standard", "express"] = "combined"): """Show the RP validator queue""" - await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) + await interaction.response.defer(ephemeral=is_hidden(interaction)) view = Queue.ValidatorPageView(lane) embed = await view.load() await interaction.followup.send(embed=embed, view=view) diff --git a/rocketwatch/plugins/random/random.py b/rocketwatch/plugins/random/random.py index e92dd66f..5e78fd09 100644 --- a/rocketwatch/plugins/random/random.py +++ b/rocketwatch/plugins/random/random.py @@ -22,7 +22,7 @@ sea_creatures, ) from utils.shared_w3 import bacon, w3 -from utils.visibility import is_hidden, is_hidden_weak +from utils.visibility import is_hidden log = logging.getLogger("rocketwatch.random") @@ -33,7 +33,7 @@ def __init__(self, bot: RocketWatch): @command() async def dice(self, interaction: Interaction, dice_string: str = "1d6"): - await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) + await interaction.response.defer(ephemeral=is_hidden(interaction)) result = dice.roll(dice_string) e = Embed() e.title = f"🎲 {dice_string}" @@ -48,7 +48,7 @@ async def dice(self, interaction: Interaction, dice_string: str = "1d6"): @command() async def burn_reason(self, interaction: Interaction): """Show the largest sources of burned ETH""" - await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) + await interaction.response.defer(ephemeral=is_hidden(interaction)) url = "https://ultrasound.money/api/fees/grouped-analysis-1" # get data from url using aiohttp async with aiohttp.ClientSession() as session, session.get(url) as resp: @@ -155,7 +155,7 @@ async def sea_creatures(self, interaction: Interaction, address: str | None = No @command() async def smoothie(self, interaction: Interaction): """Show smoothing pool information""" - await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) + await interaction.response.defer(ephemeral=is_hidden(interaction)) e = Embed(title="Smoothing Pool") smoothie_eth = solidity.to_float(await w3.eth.get_balance(await rp.get_address_by_name("rocketSmoothingPool"))) @@ -263,7 +263,7 @@ async def smoothie(self, interaction: Interaction): @command() async def odao_challenges(self, interaction: Interaction): """Shows the current oDAO challenges""" - await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) + await interaction.response.defer(ephemeral=is_hidden(interaction)) c = await rp.get_contract_by_name("rocketDAONodeTrustedActions") # get challenges made events = list(c.events["ActionChallengeMade"].get_logs( diff --git a/rocketwatch/plugins/rocksolid/rocksolid.py b/rocketwatch/plugins/rocksolid/rocksolid.py index 4266d382..e203c236 100644 --- a/rocketwatch/plugins/rocksolid/rocksolid.py +++ b/rocketwatch/plugins/rocksolid/rocksolid.py @@ -16,7 +16,7 @@ from utils.event_logs import get_logs from utils.rocketpool import rp from utils.shared_w3 import w3 -from utils.visibility import is_hidden_weak +from utils.visibility import is_hidden cog_id = "rocksolid" log = logging.getLogger(f"rocketwatch.{cog_id}") @@ -67,7 +67,7 @@ async def rocksolid(self, interaction: Interaction): """ Summary of RockSolid rETH vault stats. """ - await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) + await interaction.response.defer(ephemeral=is_hidden(interaction)) current_block = await w3.eth.get_block_number() now = await block_to_ts(current_block) diff --git a/rocketwatch/plugins/rpl/rpl.py b/rocketwatch/plugins/rpl/rpl.py index 5e49cca6..4e15b03d 100644 --- a/rocketwatch/plugins/rpl/rpl.py +++ b/rocketwatch/plugins/rpl/rpl.py @@ -10,7 +10,7 @@ from utils import solidity from utils.embeds import Embed from utils.rocketpool import rp -from utils.visibility import is_hidden_weak +from utils.visibility import is_hidden log = logging.getLogger("rocketwatch.rpl") @@ -24,7 +24,7 @@ async def staked_rpl(self, interaction: Interaction): """ Show the amount of RPL staked """ - await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) + await interaction.response.defer(ephemeral=is_hidden(interaction)) rpl_supply = solidity.to_float(await rp.call("rocketTokenRPL.totalSupply")) legacy_staked_rpl = solidity.to_float(await rp.call("rocketNodeStaking.getTotalLegacyStakedRPL")) @@ -87,7 +87,7 @@ async def withdrawable_rpl(self, interaction: Interaction): """ Show the available liquidity at different RPL/ETH prices """ - await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) + await interaction.response.defer(ephemeral=is_hidden(interaction)) data = await (await self.bot.db.node_operators.aggregate([ { diff --git a/rocketwatch/plugins/snapshot/snapshot.py b/rocketwatch/plugins/snapshot/snapshot.py index e3b004cf..ae6c7c75 100644 --- a/rocketwatch/plugins/snapshot/snapshot.py +++ b/rocketwatch/plugins/snapshot/snapshot.py @@ -22,7 +22,7 @@ from utils.readable import pretty_time from utils.retry import retry_async from utils.rocketpool import rp -from utils.visibility import is_hidden_weak +from utils.visibility import is_hidden log = logging.getLogger("rocketwatch.snapshot") @@ -577,7 +577,7 @@ async def _get_new_events(self) -> list[Event]: @command() async def snapshot_votes(self, interaction: Interaction): """Show currently active Snapshot proposals""" - await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) + await interaction.response.defer(ephemeral=is_hidden(interaction)) embed = Embed(title="Snapshot Proposals") embed.set_author(name="🔗 Data from snapshot.org", url="https://vote.rocketpool.net") diff --git a/rocketwatch/plugins/user_distribute/user_distribute.py b/rocketwatch/plugins/user_distribute/user_distribute.py index 2491fbee..46a5ee59 100644 --- a/rocketwatch/plugins/user_distribute/user_distribute.py +++ b/rocketwatch/plugins/user_distribute/user_distribute.py @@ -14,7 +14,7 @@ from utils.embeds import Embed from utils.rocketpool import rp from utils.shared_w3 import bacon, w3 -from utils.visibility import is_hidden_weak +from utils.visibility import is_hidden log = logging.getLogger("rocketwatch.user_distribute") @@ -154,7 +154,7 @@ async def _fetch_minipools(self) -> tuple[list[dict], list[dict], list[dict]]: @command() async def user_distribute_status(self, interaction: Interaction): """Show user distribute summary for minipools""" - await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) + await interaction.response.defer(ephemeral=is_hidden(interaction)) eligible, pending, distributable = await self._fetch_minipools() diff --git a/rocketwatch/plugins/validator_states/validator_states.py b/rocketwatch/plugins/validator_states/validator_states.py index ef2162d5..9d4c38d3 100644 --- a/rocketwatch/plugins/validator_states/validator_states.py +++ b/rocketwatch/plugins/validator_states/validator_states.py @@ -8,7 +8,7 @@ from utils.embeds import Embed, el_explorer_url from utils.readable import render_tree_legacy from utils.shared_w3 import w3 -from utils.visibility import is_hidden_weak +from utils.visibility import is_hidden log = logging.getLogger("rocketwatch.validator_states") @@ -114,7 +114,7 @@ def __init__(self, bot: RocketWatch): @command() async def validator_states(self, interaction: Interaction): """Show validator counts by beacon chain and contract status""" - await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) + await interaction.response.defer(ephemeral=is_hidden(interaction)) minipools = await self.bot.db.minipools.find( {"beacon.status": {"$exists": True}}, diff --git a/rocketwatch/plugins/wall/wall.py b/rocketwatch/plugins/wall/wall.py index 12ba2162..aeb69cb0 100644 --- a/rocketwatch/plugins/wall/wall.py +++ b/rocketwatch/plugins/wall/wall.py @@ -44,7 +44,7 @@ ) from utils.rocketpool import rp from utils.time_debug import timerun, timerun_async -from utils.visibility import is_hidden_weak +from utils.visibility import is_hidden log = logging.getLogger("rocketwatch.wall") @@ -260,7 +260,7 @@ async def wall( sources: Literal["All", "CEX", "DEX"] = "All" ) -> None: """Show the current RPL market depth across exchanges""" - await interaction.response.defer(ephemeral=is_hidden_weak(interaction)) + await interaction.response.defer(ephemeral=is_hidden(interaction)) embed = Embed(title="RPL Market Depth") async def on_fail() -> None: diff --git a/rocketwatch/utils/visibility.py b/rocketwatch/utils/visibility.py index 1056f7b6..2e3e6595 100644 --- a/rocketwatch/utils/visibility.py +++ b/rocketwatch/utils/visibility.py @@ -4,11 +4,11 @@ def is_hidden(interaction: Interaction): - return all(w not in interaction.channel.name for w in ["random", "rocket-watch"]) - - -def is_hidden_weak(interaction: Interaction): - return all(w not in interaction.channel.name for w in ["random", "rocket-watch", "trading"]) + channel_name: str = interaction.channel.name + for allowed_channel in ["random", "rocket-watch", "trading"]: + if allowed_channel in channel_name: + return False + return False def is_hidden_role_controlled(interaction: Interaction): From b83715d20155dba7203f51969bf6c9595314b074 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sun, 8 Mar 2026 23:15:51 +0000 Subject: [PATCH 201/279] collapse unclaimed ETH rewards --- rocketwatch/plugins/tvl/tvl.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/rocketwatch/plugins/tvl/tvl.py b/rocketwatch/plugins/tvl/tvl.py index 43dbfcd4..646a080c 100644 --- a/rocketwatch/plugins/tvl/tvl.py +++ b/rocketwatch/plugins/tvl/tvl.py @@ -131,9 +131,7 @@ async def tvl(self, interaction: Interaction, show_all: bool = False): "DAO Share" : {"_val": 0}, }, }, - "Unclaimed Rewards" : { - "Smoothing Pool": {}, # accurate, live - } + "Unclaimed Rewards" : {}, # accurate, live }, } # note: _value in each dict will store the final string that gets rendered in the render @@ -334,7 +332,7 @@ async def tvl(self, interaction: Interaction, show_all: bool = False): data["Total ETH Locked"]["Undistributed Balances"]["Smoothing Pool Balance"]["_val"] = smoothie_balance # Unclaimed Smoothing Pool Rewards: This is ETH from the previous Reward Periods that have not been claimed yet. - data["Total ETH Locked"]["Unclaimed Rewards"]["Smoothing Pool"]["_val"] = solidity.to_float( + data["Total ETH Locked"]["Unclaimed Rewards"]["_val"] = solidity.to_float( await rp.call("rocketVault.balanceOf", "rocketMerkleDistributorMainnet")) # Staked RPL: This is all ETH that has been staked by node operators. From f0c454d12d2142af9f594fbc19d3b575a3ce01ec Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Tue, 10 Mar 2026 00:21:03 +0000 Subject: [PATCH 202/279] fix /call --- rocketwatch/plugins/call/call.py | 192 +++++++++++++++++++++++++++ rocketwatch/plugins/debug/debug.py | 187 +------------------------- rocketwatch/plugins/random/random.py | 102 +++++++++++++- 3 files changed, 293 insertions(+), 188 deletions(-) create mode 100644 rocketwatch/plugins/call/call.py diff --git a/rocketwatch/plugins/call/call.py b/rocketwatch/plugins/call/call.py new file mode 100644 index 00000000..42141a1a --- /dev/null +++ b/rocketwatch/plugins/call/call.py @@ -0,0 +1,192 @@ +import contextlib +import io +import json +import logging + +import humanize +from discord import File, Interaction, TextStyle +from discord.app_commands import Choice, command, describe +from discord.ext.commands import Cog +from discord.ui import Modal, TextInput + +from rocketwatch import RocketWatch +from utils import solidity +from utils.rocketpool import rp +from utils.shared_w3 import w3 +from utils.visibility import is_hidden_role_controlled + +log = logging.getLogger("rocketwatch.call") + + +class CallModal(Modal): + def __init__(self, cog, function, block, address, raw_output, abi_inputs): + func_name = function.rsplit(".", 1)[1] if "." in function else function + super().__init__(title=func_name[:45]) + self.cog = cog + self.function = function + self.block = block + self.address = address + self.raw_output = raw_output + self.abi_inputs = abi_inputs + self.param_inputs = [] + for inp in abi_inputs: + text_input = TextInput(label=f"{inp['name']} ({inp['type']})"[:45], required=True) + self.add_item(text_input) + self.param_inputs.append(text_input) + + async def on_submit(self, interaction): + await interaction.response.defer(ephemeral=is_hidden_role_controlled(interaction)) + args = [] + errors = [] + for text_input, inp in zip(self.param_inputs, self.abi_inputs, strict=True): + val = text_input.value + with contextlib.suppress(json.JSONDecodeError, ValueError): + val = json.loads(val) + error = self._validate(val, inp["type"]) + if error: + errors.append(f"`{inp['name']}`: {error}") + else: + args.append(val) + if errors: + await interaction.followup.send(content="Validation failed:\n" + "\n".join(errors)) + return + await self.cog._execute_call(interaction, self.function, args, self.block, self.address, self.raw_output) + + @staticmethod + def _validate(value, abi_type): + if abi_type == "bool": + if not isinstance(value, bool): + return f"expected bool, got `{value!r}`" + elif abi_type == "address": + if not isinstance(value, str) or not w3.is_address(value): + return f"expected address, got `{value!r}`" + elif abi_type == "string": + if not isinstance(value, str): + return f"expected string, got `{value!r}`" + elif abi_type.startswith("uint") or abi_type.startswith("int"): + if not isinstance(value, int) or isinstance(value, bool): + return f"expected integer, got `{value!r}`" + elif abi_type.startswith("bytes"): + if isinstance(value, str): + if not value.startswith("0x"): + return f"expected hex bytes, got `{value!r}`" + elif not isinstance(value, (bytes, list)): + return f"expected bytes, got `{value!r}`" + return None + + +class CallJsonModal(Modal, title="Function Arguments"): + json_input = TextInput( + label="Arguments (JSON array)", + style=TextStyle.paragraph, + placeholder='[1, "0x..."]', + required=True, + ) + + def __init__(self, cog, function, block, address, raw_output): + super().__init__() + self.cog = cog + self.function = function + self.block = block + self.address = address + self.raw_output = raw_output + + async def on_submit(self, interaction): + await interaction.response.defer(ephemeral=is_hidden_role_controlled(interaction)) + try: + args = json.loads(self.json_input.value) + if not isinstance(args, list): + args = [args] + except json.JSONDecodeError: + await interaction.followup.send(content=f"Invalid JSON: ```{self.json_input.value}```") + return + await self.cog._execute_call(interaction, self.function, args, self.block, self.address, self.raw_output) + + +class Call(Cog): + def __init__(self, bot: RocketWatch): + self.bot = bot + self.function_names = [] + + @Cog.listener() + async def on_ready(self): + if self.function_names: + return + + for contract in rp.addresses.copy(): + try: + for function in (await rp.get_contract_by_name(contract)).functions: + func_str = function.abi_element_identifier + self.function_names.append(f"{contract}.{func_str}") + except Exception: + log.exception(f"Could not get function list for {contract}") + + @command() + @describe(block="call against block state") + async def call( + self, + interaction: Interaction, + function: str, + block: str = "latest", + address: str | None = None, + raw_output: bool = False + ): + """Call function of contract""" + if block.isnumeric(): + block = int(block) + + # Look up ABI inputs for the function + abi_inputs = [] + try: + contract_name, func_id = function.rsplit(".", 1) + contract = await rp.get_contract_by_name(contract_name) + for entry in contract.abi: + if entry.get("type") == "function": + entry_id = f"{entry['name']}({','.join(inp['type'] for inp in entry.get('inputs', []))})" + if entry_id == func_id: + abi_inputs = entry.get("inputs", []) + break + except Exception: + pass + + if abi_inputs: + if len(abi_inputs) <= 5: + modal = CallModal(self, function, block, address, raw_output, abi_inputs) + else: + modal = CallJsonModal(self, function, block, address, raw_output) + await interaction.response.send_modal(modal) + else: + await interaction.response.defer(ephemeral=is_hidden_role_controlled(interaction)) + await self._execute_call(interaction, function, [], block, address, raw_output) + + async def _execute_call(self, interaction, function, args, block, address, raw_output): + try: + v = await rp.call(function, *args, block=block, address=w3.to_checksum_address(address) if address else None) + except Exception as err: + await interaction.followup.send(content=f"Exception: ```{err!r}```") + return + try: + g = await rp.estimate_gas_for_call(function, *args, block=block) + except Exception as err: + g = "N/A" + if isinstance(err, ValueError) and err.args and "code" in err.args and err.args[0]["code"] == -32000: + g += f" ({err.args[0]['message']})" + + if isinstance(v, int) and abs(v) >= 10 ** 12 and not raw_output: + v = solidity.to_float(v) + g = humanize.intcomma(g) + text = f"`block: {block}`\n`gas estimate: {g}`\n`{function}({', '.join([repr(a) for a in args])}): " + if len(text + str(v)) > 2000: + text += "too long, attached as file`" + await interaction.followup.send(text, file=File(io.StringIO(str(v)), "exception.txt")) + else: + text += f"{v!s}`" + await interaction.followup.send(content=text) + + @call.autocomplete("function") + async def match_function_name(self, interaction: Interaction, current: str) -> list[Choice[str]]: + return [Choice(name=name, value=name) for name in self.function_names if current.lower() in name.lower()][:25] + + +async def setup(bot): + await bot.add_cog(Call(bot)) diff --git a/rocketwatch/plugins/debug/debug.py b/rocketwatch/plugins/debug/debug.py index 33ed9d1f..fb2b4d18 100644 --- a/rocketwatch/plugins/debug/debug.py +++ b/rocketwatch/plugins/debug/debug.py @@ -1,26 +1,18 @@ import io -import json import logging import random import time from datetime import UTC -import aiohttp -import humanize -from colorama import Fore, Style from discord import File, Interaction -from discord.app_commands import Choice, command, describe, guilds +from discord.app_commands import command, guilds from discord.ext.commands import Cog, is_owner from rocketwatch import RocketWatch -from utils import solidity -from utils.block_time import block_to_ts, ts_to_block from utils.config import cfg -from utils.embeds import Embed, el_explorer_url -from utils.readable import prettify_json_string +from utils.embeds import Embed from utils.rocketpool import rp from utils.shared_w3 import w3 -from utils.visibility import is_hidden, is_hidden_role_controlled log = logging.getLogger("rocketwatch.debug") @@ -28,25 +20,6 @@ class Debug(Cog): def __init__(self, bot: RocketWatch): self.bot = bot - self.contract_names = [] - self.function_names = [] - - # --------- LISTENERS --------- # - - @Cog.listener() - async def on_ready(self): - if self.function_names: - return - - for contract in rp.addresses.copy(): - try: - for function in (await rp.get_contract_by_name(contract)).functions: - self.function_names.append(f"{contract}.{function}") - self.contract_names.append(contract) - except Exception: - log.exception(f"Could not get function list for {contract}") - - # --------- PRIVATE OWNER COMMANDS --------- # @command() @guilds(cfg.discord.owner.server_id) @@ -124,22 +97,6 @@ async def edit_embed(self, interaction: Interaction, message_url: str, new_descr await msg.edit(embed=embed) await interaction.followup.send(content="Done") - @command() - @guilds(cfg.discord.owner.server_id) - @is_owner() - async def decode_tnx(self, interaction: Interaction, tnx_hash: str, contract_name: str | None = None): - """ - Decode transaction calldata - """ - await interaction.response.defer(ephemeral=True) - tnx = await w3.eth.get_transaction(tnx_hash) - if contract_name: - contract = await rp.get_contract_by_name(contract_name) - else: - contract = await rp.get_contract_by_address(tnx.to) - data = contract.decode_function_input(tnx.input) - await interaction.followup.send(content=f"```Input:\n{data}```") - @command() @guilds(cfg.discord.owner.server_id) @is_owner() @@ -287,146 +244,6 @@ async def restore_missed_events(self, interaction: Interaction, tx_hash: str): await interaction.followup.send(embed=event.embed) await interaction.followup.send(content="Done") - # --------- PUBLIC COMMANDS --------- # - - @command() - async def color_test(self, interaction: Interaction): - """ - Simple test to check ansi color support - """ - await interaction.response.defer(ephemeral=is_hidden(interaction)) - payload = "```ansi" - for fg_name, fg in Fore.__dict__.items(): - if fg_name.endswith("_EX"): - continue - payload += f"\n{fg}Hello World" - payload += f"{Style.RESET_ALL}```" - await interaction.followup.send(content=payload) - - @command() - async def asian_restaurant_name(self, interaction: Interaction): - """ - Randomly generated Asian restaurant names - """ - await interaction.response.defer(ephemeral=is_hidden(interaction)) - async with aiohttp.ClientSession() as session, session.get("https://www.dotomator.com/api/random_name.json?type=asian") as resp: - a = (await resp.json())["name"] - await interaction.followup.send(a) - - @command() - async def get_block_by_timestamp(self, interaction: Interaction, timestamp: int): - """ - Get a block using its timestamp. Useful for contracts that track block time instead of block number. - """ - await interaction.response.defer(ephemeral=is_hidden(interaction)) - - block = await ts_to_block(timestamp) - found_ts = await block_to_ts(block) - - if found_ts == timestamp: - text = ( - f"Found perfect match for timestamp {timestamp}:\n" - f"Block: {block}" - ) - else: - text = ( - f"Found close match for timestamp {timestamp}:\n" - f"Timestamp: {found_ts}\n" - f"Block: {block}" - ) - - await interaction.followup.send(content=f"```{text}```") - - @command() - async def get_abi_of_contract(self, interaction: Interaction, contract: str): - """Retrieve the latest ABI for a contract""" - await interaction.response.defer(ephemeral=is_hidden_role_controlled(interaction)) - try: - abi = prettify_json_string(await rp.uncached_get_abi_by_name(contract)) - file = File(io.StringIO(abi), f"{contract}.{cfg.rocketpool.chain.lower()}.abi.json") - await interaction.followup.send(file=file) - except Exception as err: - await interaction.followup.send(content=f"```Exception: {err!r}```") - - @command() - async def get_address_of_contract(self, interaction: Interaction, contract: str): - """Retrieve the latest address for a contract""" - await interaction.response.defer(ephemeral=is_hidden_role_controlled(interaction)) - try: - address = cfg.rocketpool.manual_addresses.get(contract) - if not address: - address = await rp.uncached_get_address_by_name(contract) - await interaction.followup.send(content=await el_explorer_url(address)) - except Exception as err: - await interaction.followup.send(content=f"Exception: ```{err!r}```") - if "No address found for" in repr(err): - # private response as a tip - m = ( - "It may be that you are requesting the address of a contract that does not" - " get deployed (e.g. `rocketBase`), is deployed multiple times" - " (e.g. `rocketNodeDistributor`)," - " or is not yet deployed on the current chain.\n" - "... or you messed up the name" - ) - await interaction.followup.send(content=m) - - @command() - @describe( - json_args="json formatted arguments. example: `[1, \"World\"]`", - block="call against block state" - ) - async def call( - self, - interaction: Interaction, - function: str, - json_args: str = "[]", - block: str = "latest", - address: str | None = None, - raw_output: bool = False - ): - """Call Function of Contract""" - await interaction.response.defer(ephemeral=is_hidden_role_controlled(interaction)) - # convert block to int if number - if block.isnumeric(): - block = int(block) - try: - args = json.loads(json_args) - if not isinstance(args, list): - args = [args] - v = await rp.call(function, *args, block=block, address=w3.to_checksum_address(address) if address else None) - except Exception as err: - await interaction.followup.send(content=f"Exception: ```{err!r}```") - return - try: - g = await rp.estimate_gas_for_call(function, *args, block=block) - except Exception as err: - g = "N/A" - if isinstance(err, ValueError) and err.args and "code" in err.args and err.args[0]["code"] == -32000: - g += f" ({err.args[0]['message']})" - - if isinstance(v, int) and abs(v) >= 10 ** 12 and not raw_output: - v = solidity.to_float(v) - g = humanize.intcomma(g) - text = f"`block: {block}`\n`gas estimate: {g}`\n`{function}({', '.join([repr(a) for a in args])}): " - if len(text + str(v)) > 2000: - text += "too long, attached as file`" - await interaction.followup.send(text, file=File(io.StringIO(str(v)), "exception.txt")) - else: - text += f"{v!s}`" - await interaction.followup.send(content=text) - - # --------- OTHERS --------- # - - @get_address_of_contract.autocomplete("contract") - @get_abi_of_contract.autocomplete("contract") - @decode_tnx.autocomplete("contract_name") - async def match_contract_names(self, interaction: Interaction, current: str) -> list[Choice[str]]: - return [Choice(name=name, value=name) for name in self.contract_names if current.lower() in name.lower()][:25] - - @call.autocomplete("function") - async def match_function_name(self, interaction: Interaction, current: str) -> list[Choice[str]]: - return [Choice(name=name, value=name) for name in self.function_names if current.lower() in name.lower()][:25] - async def setup(bot): await bot.add_cog(Debug(bot)) diff --git a/rocketwatch/plugins/random/random.py b/rocketwatch/plugins/random/random.py index 5e78fd09..6b3ed5cb 100644 --- a/rocketwatch/plugins/random/random.py +++ b/rocketwatch/plugins/random/random.py @@ -7,14 +7,15 @@ import humanize import pytz from discord import File, Interaction -from discord.app_commands import command +from discord.app_commands import Choice, command from discord.ext import commands from rocketwatch import RocketWatch from utils import solidity +from utils.block_time import block_to_ts, ts_to_block from utils.config import cfg from utils.embeds import Embed, el_explorer_url, ens -from utils.readable import pretty_time, s_hex +from utils.readable import prettify_json_string, pretty_time, s_hex from utils.rocketpool import rp from utils.sea_creatures import ( get_holding_for_address, @@ -22,7 +23,7 @@ sea_creatures, ) from utils.shared_w3 import bacon, w3 -from utils.visibility import is_hidden +from utils.visibility import is_hidden, is_hidden_role_controlled log = logging.getLogger("rocketwatch.random") @@ -30,6 +31,12 @@ class Random(commands.Cog): def __init__(self, bot: RocketWatch): self.bot = bot + self.contract_names = [] + + @commands.Cog.listener() + async def on_ready(self): + if not self.contract_names: + self.contract_names = list(rp.addresses) @command() async def dice(self, interaction: Interaction, dice_string: str = "1d6"): @@ -291,6 +298,95 @@ async def odao_challenges(self, interaction: Interaction): e.description += f"Time Left: **{time_left}**\n\n" await interaction.followup.send(embed=e) + @command() + async def asian_restaurant_name(self, interaction: Interaction): + """ + Randomly generated Asian restaurant names + """ + await interaction.response.defer(ephemeral=is_hidden(interaction)) + async with aiohttp.ClientSession() as session, session.get("https://www.dotomator.com/api/random_name.json?type=asian") as resp: + a = (await resp.json())["name"] + await interaction.followup.send(a) + + @command() + async def get_block_by_timestamp(self, interaction: Interaction, timestamp: int): + """ + Get a block using its timestamp. Useful for contracts that track block time instead of block number. + """ + await interaction.response.defer(ephemeral=is_hidden(interaction)) + + block = await ts_to_block(timestamp) + found_ts = await block_to_ts(block) + + if found_ts == timestamp: + text = ( + f"Found perfect match for timestamp {timestamp}:\n" + f"Block: {block}" + ) + else: + text = ( + f"Found close match for timestamp {timestamp}:\n" + f"Timestamp: {found_ts}\n" + f"Block: {block}" + ) + + await interaction.followup.send(content=f"```{text}```") + + @command() + async def get_abi_of_contract(self, interaction: Interaction, contract: str): + """Retrieve the latest ABI for a contract""" + await interaction.response.defer(ephemeral=is_hidden_role_controlled(interaction)) + try: + abi = prettify_json_string(await rp.uncached_get_abi_by_name(contract)) + file = File(io.StringIO(abi), f"{contract}.{cfg.rocketpool.chain.lower()}.abi.json") + await interaction.followup.send(file=file) + except Exception as err: + await interaction.followup.send(content=f"```Exception: {err!r}```") + + @command() + async def get_address_of_contract(self, interaction: Interaction, contract: str): + """Retrieve the latest address for a contract""" + await interaction.response.defer(ephemeral=is_hidden_role_controlled(interaction)) + try: + address = cfg.rocketpool.manual_addresses.get(contract) + if not address: + address = await rp.uncached_get_address_by_name(contract) + await interaction.followup.send(content=await el_explorer_url(address)) + except Exception as err: + await interaction.followup.send(content=f"Exception: ```{err!r}```") + if "No address found for" in repr(err): + # private response as a tip + m = ( + "It may be that you are requesting the address of a contract that does not" + " get deployed (e.g. `rocketBase`), is deployed multiple times" + " (e.g. `rocketNodeDistributor`)," + " or is not yet deployed on the current chain.\n" + "... or you messed up the name" + ) + await interaction.followup.send(content=m) + + @command() + async def decode_tnx(self, interaction: Interaction, tnx_hash: str, contract_name: str | None = None): + """ + Decode transaction calldata + """ + await interaction.response.defer(ephemeral=is_hidden_role_controlled(interaction)) + tnx = await w3.eth.get_transaction(tnx_hash) + if contract_name: + contract = await rp.get_contract_by_name(contract_name) + else: + contract = await rp.get_contract_by_address(tnx.to) + data = contract.decode_function_input(tnx.input) + await interaction.followup.send(content=f"```Input:\n{data}```") + + # --------- AUTOCOMPLETE --------- # + + @get_address_of_contract.autocomplete("contract") + @get_abi_of_contract.autocomplete("contract") + @decode_tnx.autocomplete("contract_name") + async def match_contract_names(self, interaction: Interaction, current: str) -> list[Choice[str]]: + return [Choice(name=name, value=name) for name in self.contract_names if current.lower() in name.lower()][:25] + async def setup(self): await self.add_cog(Random(self)) From 6783c09e0db656bca9f14c785760f3247a1db521 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Tue, 10 Mar 2026 00:28:45 +0000 Subject: [PATCH 203/279] fix /call output format --- rocketwatch/plugins/call/call.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/rocketwatch/plugins/call/call.py b/rocketwatch/plugins/call/call.py index 42141a1a..87107a83 100644 --- a/rocketwatch/plugins/call/call.py +++ b/rocketwatch/plugins/call/call.py @@ -175,7 +175,8 @@ async def _execute_call(self, interaction, function, args, block, address, raw_o if isinstance(v, int) and abs(v) >= 10 ** 12 and not raw_output: v = solidity.to_float(v) g = humanize.intcomma(g) - text = f"`block: {block}`\n`gas estimate: {g}`\n`{function}({', '.join([repr(a) for a in args])}): " + func_name = function.split("(")[0] + text = f"`block: {block}`\n`gas estimate: {g}`\n`{func_name}({', '.join([repr(a) for a in args])}): " if len(text + str(v)) > 2000: text += "too long, attached as file`" await interaction.followup.send(text, file=File(io.StringIO(str(v)), "exception.txt")) From f2ef6ec5df470ba65347bc634ff9ae45f0a73f35 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Tue, 10 Mar 2026 00:43:02 +0000 Subject: [PATCH 204/279] filter state-modifying functions --- rocketwatch/plugins/call/call.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/rocketwatch/plugins/call/call.py b/rocketwatch/plugins/call/call.py index 87107a83..2078f487 100644 --- a/rocketwatch/plugins/call/call.py +++ b/rocketwatch/plugins/call/call.py @@ -115,9 +115,11 @@ async def on_ready(self): for contract in rp.addresses.copy(): try: - for function in (await rp.get_contract_by_name(contract)).functions: - func_str = function.abi_element_identifier - self.function_names.append(f"{contract}.{func_str}") + c = await rp.get_contract_by_name(contract) + for entry in c.abi: + if entry.get("type") == "function" and entry.get("stateMutability") in ("view", "pure"): + func_id = f"{entry['name']}({','.join(inp['type'] for inp in entry.get('inputs', []))})" + self.function_names.append(f"{contract}.{func_id}") except Exception: log.exception(f"Could not get function list for {contract}") @@ -131,7 +133,7 @@ async def call( address: str | None = None, raw_output: bool = False ): - """Call function of contract""" + """Manually call a function on a protocol contract""" if block.isnumeric(): block = int(block) From 3842462b5ed6d6d963945ef33d9721585ca84c66 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Tue, 10 Mar 2026 00:46:48 +0000 Subject: [PATCH 205/279] remove CallJsonModal --- rocketwatch/plugins/call/call.py | 35 ++------------------------------ 1 file changed, 2 insertions(+), 33 deletions(-) diff --git a/rocketwatch/plugins/call/call.py b/rocketwatch/plugins/call/call.py index 2078f487..43474cd0 100644 --- a/rocketwatch/plugins/call/call.py +++ b/rocketwatch/plugins/call/call.py @@ -4,7 +4,7 @@ import logging import humanize -from discord import File, Interaction, TextStyle +from discord import File, Interaction from discord.app_commands import Choice, command, describe from discord.ext.commands import Cog from discord.ui import Modal, TextInput @@ -75,34 +75,6 @@ def _validate(value, abi_type): return None -class CallJsonModal(Modal, title="Function Arguments"): - json_input = TextInput( - label="Arguments (JSON array)", - style=TextStyle.paragraph, - placeholder='[1, "0x..."]', - required=True, - ) - - def __init__(self, cog, function, block, address, raw_output): - super().__init__() - self.cog = cog - self.function = function - self.block = block - self.address = address - self.raw_output = raw_output - - async def on_submit(self, interaction): - await interaction.response.defer(ephemeral=is_hidden_role_controlled(interaction)) - try: - args = json.loads(self.json_input.value) - if not isinstance(args, list): - args = [args] - except json.JSONDecodeError: - await interaction.followup.send(content=f"Invalid JSON: ```{self.json_input.value}```") - return - await self.cog._execute_call(interaction, self.function, args, self.block, self.address, self.raw_output) - - class Call(Cog): def __init__(self, bot: RocketWatch): self.bot = bot @@ -152,10 +124,7 @@ async def call( pass if abi_inputs: - if len(abi_inputs) <= 5: - modal = CallModal(self, function, block, address, raw_output, abi_inputs) - else: - modal = CallJsonModal(self, function, block, address, raw_output) + modal = CallModal(self, function, block, address, raw_output, abi_inputs) await interaction.response.send_modal(modal) else: await interaction.response.defer(ephemeral=is_hidden_role_controlled(interaction)) From c5c0a9f0940678db563ae2faa5b1c79a860b182b Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 13 Mar 2026 12:23:50 +0000 Subject: [PATCH 206/279] fix command sync --- .pre-commit-config.yaml | 1 + rocketwatch/rocketwatch.py | 61 ++++++++-------- rocketwatch/utils/command_tree.py | 112 +++++++++++++++++++++--------- rocketwatch/utils/config.py | 2 +- tests/test_cfg.py | 28 +++++--- 5 files changed, 131 insertions(+), 73 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 876ae8f7..2ca87691 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -4,6 +4,7 @@ repos: hooks: - id: ruff args: [--fix] + - id: ruff-format - repo: local hooks: - id: pytest diff --git a/rocketwatch/rocketwatch.py b/rocketwatch/rocketwatch.py index c5b89867..b8305f0f 100644 --- a/rocketwatch/rocketwatch.py +++ b/rocketwatch/rocketwatch.py @@ -10,10 +10,8 @@ Interaction, Thread, User, - app_commands, ) from discord.abc import GuildChannel, PrivateChannel -from discord.ext import commands from discord.ext.commands import Bot from pymongo import AsyncMongoClient @@ -54,20 +52,21 @@ def should_load_plugin(_plugin: str) -> bool: log.debug(f"Plugin {_plugin} implicitly included") return True - for path in Path("plugins").glob('**/*.py'): + for path in Path("plugins").glob("**/*.py"): plugin_name = path.stem if not should_load_plugin(plugin_name): log.warning(f"Skipping plugin {plugin_name}") continue - log.info(f"Loading plugin \"{plugin_name}\"") + log.info(f'Loading plugin "{plugin_name}"') try: extension_name = f"plugins.{plugin_name}.{plugin_name}" await self.load_extension(extension_name) - except Exception: - log.exception(f"Failed to load plugin \"{plugin_name}\"") + except Exception as e: + log.exception(f'Failed to load plugin "{plugin_name}"') + await self.report_error(e) - log.info('Finished loading plugins') + log.info("Finished loading plugins") async def setup_hook(self) -> None: await rp.async_init() @@ -88,40 +87,28 @@ async def on_ready(self): log.info(f"Logged in as {self.user.name} ({self.user.id})") commands_enabled = cfg.modules.enable_commands if not commands_enabled: - log.info("Commands disabled, clearing tree...") + log.info("Commands disabled, clearing local tree...") self.clear_commands() if commands_enabled is None: - log.info("Command sync behavior unspecified, skipping") + log.info("Sync behavior unspecified, skipping") return await self.sync_commands() - async def on_app_command_error(self, interaction: Interaction, error: Exception) -> None: - cmd_name = interaction.command.name if interaction.command else "unknown" - log.error(f"/{cmd_name} called by {interaction.user} in #{interaction.channel.name} ({interaction.guild}) failed") - if isinstance(error, commands.errors.MaxConcurrencyReached): - msg = "Someone else is already using this command. Please try again later." - elif isinstance(error, app_commands.errors.CommandOnCooldown): - msg = f"Slow down! You are using this command too fast. Please try again in {error.retry_after:.0f} seconds." - else: - msg = "An unexpected error occurred and has been reported to the developer. Please try again later." - - try: - await self.report_error(error, interaction) - await interaction.followup.send(content=msg, ephemeral=True) - except Exception: - log.exception("Failed to alert user") - async def get_or_fetch_guild(self, guild_id: int) -> Guild: return self.get_guild(guild_id) or await self.fetch_guild(guild_id) - async def get_or_fetch_channel(self, channel_id: int) -> GuildChannel | PrivateChannel | Thread: + async def get_or_fetch_channel( + self, channel_id: int + ) -> GuildChannel | PrivateChannel | Thread: return self.get_channel(channel_id) or await self.fetch_channel(channel_id) async def get_or_fetch_user(self, user_id: int) -> User: return self.get_user(user_id) or await self.fetch_user(user_id) - async def report_error(self, exception: Exception, interaction: Interaction | None = None, *args) -> None: + async def report_error( + self, exception: Exception, interaction: Interaction | None = None, *args + ) -> None: err_description = f"`{repr(exception)[:150]}`" if args: @@ -129,23 +116,35 @@ async def report_error(self, exception: Exception, interaction: Interaction | No err_description += f"\n```{args_fmt}```" if interaction: - cmd_name = interaction.command.name if interaction.command else "unknown" + if interaction.command: + cmd_name = interaction.command.name + else: + cmd_name = getattr(interaction, "data", {}).get("name", "unknown") + cmd_options = ( + interaction.namespace.__dict__ + if interaction.namespace + else (interaction.data.get("options", []) if interaction.data else []) + ) err_description += ( f"\n```" f"command = {cmd_name}\n" - f"command.params = {getattr(interaction.command, 'parameters', '')}\n" + f"command.params = {cmd_options}\n" f"channel = {interaction.channel}\n" f"user = {interaction.user}" f"```" ) error = getattr(exception, "original", exception) - err_trace = "".join(traceback.format_exception(type(error), error, error.__traceback__)) + err_trace = "".join( + traceback.format_exception(type(error), error, error.__traceback__) + ) log.error(err_trace) try: channel = await self.get_or_fetch_channel(cfg.discord.channels["errors"]) file = File(io.StringIO(err_trace), "exception.txt") - await retry_async(tries=5, delay=5)(channel.send)(err_description, file=file) + await retry_async(tries=5, delay=5)(channel.send)( + err_description, file=file + ) except Exception: log.exception("Failed to send message. Max retries reached.") diff --git a/rocketwatch/utils/command_tree.py b/rocketwatch/utils/command_tree.py index 2d45f631..a0989273 100644 --- a/rocketwatch/utils/command_tree.py +++ b/rocketwatch/utils/command_tree.py @@ -3,6 +3,14 @@ from discord import Interaction from discord.app_commands import AppCommandError, CommandTree +from discord.app_commands.errors import ( + BotMissingPermissions, + CheckFailure, + CommandOnCooldown, + MissingPermissions, + NoPrivateMessage, + TransformerError, +) log = logging.getLogger("rocketwatch.command_tree") @@ -12,27 +20,35 @@ async def _call(self, interaction: Interaction) -> None: cmd_name = interaction.command.name if interaction.command else "unknown" timestamp = datetime.utcnow() - log.info(f"/{cmd_name} triggered by {interaction.user} in #{interaction.channel.name} ({interaction.guild})") + log.info( + f"/{cmd_name} triggered by {interaction.user} in #{interaction.channel.name} ({interaction.guild})" + ) try: - await self.client.db.command_metrics.insert_one({ - '_id': interaction.id, - 'command': cmd_name, - 'options': interaction.data.get("options", []) if interaction.data else [], - 'user': { - 'id': interaction.user.id, - 'name': interaction.user.name, - }, - 'guild': { - 'id': interaction.guild.id, - 'name': interaction.guild.name, - } if interaction.guild else None, - 'channel': { - 'id': interaction.channel.id, - 'name': interaction.channel.name, - }, - 'timestamp': timestamp, - 'status': 'pending' - }) + await self.client.db.command_metrics.insert_one( + { + "_id": interaction.id, + "command": cmd_name, + "options": interaction.data.get("options", []) + if interaction.data + else [], + "user": { + "id": interaction.user.id, + "name": interaction.user.name, + }, + "guild": { + "id": interaction.guild.id, + "name": interaction.guild.name, + } + if interaction.guild + else None, + "channel": { + "id": interaction.channel.id, + "name": interaction.channel.name, + }, + "timestamp": timestamp, + "status": "pending", + } + ) except Exception as e: log.error(f"Failed to insert command into database: {e}") await self.client.report_error(e) @@ -40,15 +56,19 @@ async def _call(self, interaction: Interaction) -> None: try: await super()._call(interaction) except Exception as error: - log.info(f"/{cmd_name} called by {interaction.user} in #{interaction.channel.name} ({interaction.guild}) failed") + log.info( + f"/{cmd_name} called by {interaction.user} in #{interaction.channel.name} ({interaction.guild}) failed" + ) try: await self.client.db.command_metrics.update_one( - {'_id': interaction.id}, - {'$set': { - 'status': 'error', - 'took': (datetime.utcnow() - timestamp).total_seconds(), - 'error': str(error) - }} + {"_id": interaction.id}, + { + "$set": { + "status": "error", + "took": (datetime.utcnow() - timestamp).total_seconds(), + "error": str(error), + } + }, ) except Exception as e: log.exception("Failed to update command status to error") @@ -61,15 +81,43 @@ async def _call(self, interaction: Interaction) -> None: ) try: await self.client.db.command_metrics.update_one( - {'_id': interaction.id}, - {'$set': { - 'status': 'completed', - 'took': (datetime.utcnow() - timestamp).total_seconds() - }} + {"_id": interaction.id}, + { + "$set": { + "status": "completed", + "took": (datetime.utcnow() - timestamp).total_seconds(), + } + }, ) except Exception as e: log.error(f"Failed to update command status to completed: {e}") await self.client.report_error(e) async def on_error(self, interaction: Interaction, error: AppCommandError) -> None: + cmd_name = interaction.command.name if interaction.command else "unknown" + log.error( + f"/{cmd_name} called by {interaction.user} in #{interaction.channel.name} ({interaction.guild}) failed" + ) + + if isinstance(error, CommandOnCooldown): + msg = f"Slow down! You are using this command too fast. Please try again in {error.retry_after:.0f} seconds." + elif isinstance(error, MissingPermissions): + msg = f"You don't have the required permissions to use this command. Missing: {', '.join(error.missing_permissions)}" + elif isinstance(error, BotMissingPermissions): + msg = f"I'm missing the required permissions to run this command. Missing: {', '.join(error.missing_permissions)}" + elif isinstance(error, NoPrivateMessage): + msg = "This command can only be used in a server, not in DMs." + elif isinstance(error, CheckFailure): + msg = "You don't meet the requirements to use this command." + elif isinstance(error, TransformerError): + msg = f"Failed to process the value for `{error.value}`. Please check your input and try again." + else: + msg = "An unexpected error occurred and has been reported to the developer. Please try again later." + + try: + await self.client.report_error(error, interaction) + await interaction.followup.send(content=msg, ephemeral=True) + except Exception: + log.exception("Failed to alert user") + await self.client.on_app_command_error(interaction, error) diff --git a/rocketwatch/utils/config.py b/rocketwatch/utils/config.py index 7c297776..fa3f76b1 100644 --- a/rocketwatch/utils/config.py +++ b/rocketwatch/utils/config.py @@ -59,7 +59,7 @@ class RocketPoolConfig(BaseModel): class ModulesConfig(BaseModel): include: list[str] = [] exclude: list[str] = [] - enable_commands: bool = True + enable_commands: bool | None = None class StatusMessageConfig(BaseModel): diff --git a/tests/test_cfg.py b/tests/test_cfg.py index ab7117a6..be4d6a56 100644 --- a/tests/test_cfg.py +++ b/tests/test_cfg.py @@ -31,7 +31,9 @@ def _minimal_config(**overrides) -> Config: ), "execution_layer": ExecutionLayerConfig( explorer="https://etherscan.io", - endpoint=ExecutionLayerEndpoint(current="http://localhost:8545", mainnet="http://localhost:8545"), + endpoint=ExecutionLayerEndpoint( + current="http://localhost:8545", mainnet="http://localhost:8545" + ), etherscan_secret="test", ), "consensus_layer": ConsensusLayerConfig( @@ -43,7 +45,9 @@ def _minimal_config(**overrides) -> Config: "rocketpool": RocketPoolConfig( manual_addresses={"rocketStorage": "0x1234"}, dao_multisigs=["0xabcd"], - support=RocketPoolSupport(user_ids=[1], role_ids=[2], server_id=3, channel_id=4, moderator_id=5), + support=RocketPoolSupport( + user_ids=[1], role_ids=[2], server_id=3, channel_id=4, moderator_id=5 + ), dm_warning=DmWarningConfig(channels=[100]), ), "events": EventsConfig(lookback_distance=100, genesis=0, block_batch_size=50), @@ -63,7 +67,7 @@ def test_defaults(self): assert cfg.modules == ModulesConfig() assert cfg.modules.include == [] assert cfg.modules.exclude == [] - assert cfg.modules.enable_commands is True + assert cfg.modules.enable_commands is None assert cfg.other == OtherConfig() assert cfg.other.secrets.wakatime == "" assert cfg.rocketpool.chain == "mainnet" @@ -94,11 +98,13 @@ def test_archive_endpoint_set(self): class TestConfigValidation: def test_missing_required_field(self): with pytest.raises(ValueError): - Config(discord=DiscordConfig( - secret="test", - owner=DiscordOwner(user_id=1, server_id=2), - channels={}, - )) + Config( + discord=DiscordConfig( + secret="test", + owner=DiscordOwner(user_id=1, server_id=2), + channels={}, + ) + ) def test_wrong_type_user_id(self): with pytest.raises(ValueError): @@ -141,7 +147,11 @@ def test_partial_override(self): class TestSampleConfig: def test_sample_config_validates(self): - sample_path = Path(__file__).resolve().parent.parent / "rocketwatch" / "config.toml.sample" + sample_path = ( + Path(__file__).resolve().parent.parent + / "rocketwatch" + / "config.toml.sample" + ) with open(sample_path, "rb") as f: data = tomllib.load(f) cfg = Config(**data) From bdcae7d01f809d07e4aafc7b6d73ce258e9be92a Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 13 Mar 2026 12:40:03 +0000 Subject: [PATCH 207/279] apply ruff formatting --- rocketwatch/__main__.py | 4 +- rocketwatch/plugins/8ball/8ball.py | 48 +- rocketwatch/plugins/about/about.py | 49 +- rocketwatch/plugins/activity/activity.py | 2 +- rocketwatch/plugins/apr/apr.py | 436 +++++++------ .../plugins/beacon_events/beacon_events.py | 133 ++-- rocketwatch/plugins/call/call.py | 66 +- .../plugins/chat_summary/chat_summary.py | 74 ++- rocketwatch/plugins/collateral/collateral.py | 144 +++-- .../plugins/commissions/commissions.py | 10 +- rocketwatch/plugins/cow_orders/cow_orders.py | 85 ++- rocketwatch/plugins/dao/dao.py | 133 +++- .../plugins/db_upkeep_task/db_upkeep_task.py | 560 +++++++++++++---- rocketwatch/plugins/debug/debug.py | 101 +-- .../delegate_contracts/delegate_contracts.py | 34 +- .../plugins/deposit_pool/deposit_pool.py | 79 ++- rocketwatch/plugins/event_core/event_core.py | 139 +++-- rocketwatch/plugins/events/events.py | 582 +++++++++++++----- .../fee_distribution/fee_distribution.py | 50 +- rocketwatch/plugins/forum/forum.py | 52 +- rocketwatch/plugins/governance/governance.py | 38 +- rocketwatch/plugins/lottery/lottery.py | 115 ++-- rocketwatch/plugins/metrics/metrics.py | 151 +++-- rocketwatch/plugins/milestones/milestones.py | 42 +- .../minipool_distribution.py | 52 +- .../pinned_messages/pinned_messages.py | 60 +- rocketwatch/plugins/proposals/proposals.py | 397 +++++++----- rocketwatch/plugins/queue/queue.py | 76 ++- rocketwatch/plugins/random/random.py | 283 +++++---- rocketwatch/plugins/releases/releases.py | 8 +- rocketwatch/plugins/reloader/reloader.py | 26 +- rocketwatch/plugins/rewards/rewards.py | 97 ++- rocketwatch/plugins/rocksolid/rocksolid.py | 46 +- rocketwatch/plugins/rpips/rpips.py | 34 +- rocketwatch/plugins/rpl/rpl.py | 106 ++-- .../plugins/scam_detection/scam_detection.py | 395 ++++++++---- .../plugins/scam_warning/scam_warning.py | 24 +- rocketwatch/plugins/snapshot/snapshot.py | 253 +++++--- .../plugins/support_utils/support_utils.py | 217 ++++--- .../plugins/transactions/transactions.py | 160 +++-- rocketwatch/plugins/tvl/tvl.py | 502 +++++++++------ .../user_distribute/user_distribute.py | 74 ++- .../validator_states/validator_states.py | 59 +- rocketwatch/plugins/wall/wall.py | 156 +++-- rocketwatch/strings.py | 8 +- rocketwatch/utils/block_time.py | 4 +- rocketwatch/utils/dao.py | 183 ++++-- rocketwatch/utils/embeds.py | 314 ++++++---- rocketwatch/utils/etherscan.py | 28 +- rocketwatch/utils/event.py | 10 +- rocketwatch/utils/event_logs.py | 4 +- rocketwatch/utils/image.py | 52 +- rocketwatch/utils/liquidity.py | 252 +++++--- rocketwatch/utils/readable.py | 70 ++- rocketwatch/utils/retry.py | 22 +- rocketwatch/utils/rocketpool.py | 92 ++- rocketwatch/utils/sea_creatures.py | 67 +- rocketwatch/utils/shared_w3.py | 11 +- rocketwatch/utils/solidity.py | 18 +- rocketwatch/utils/views.py | 12 +- tests/test_scam_detection.py | 18 +- 61 files changed, 4839 insertions(+), 2478 deletions(-) diff --git a/rocketwatch/__main__.py b/rocketwatch/__main__.py index b1c1d9f6..7d0e04c4 100644 --- a/rocketwatch/__main__.py +++ b/rocketwatch/__main__.py @@ -5,7 +5,9 @@ from rocketwatch import RocketWatch from utils.config import cfg -logging.basicConfig(format="%(levelname)5s %(asctime)s [%(name)s] %(filename)s:%(lineno)d|%(funcName)s(): %(message)s") +logging.basicConfig( + format="%(levelname)5s %(asctime)s [%(name)s] %(filename)s:%(lineno)d|%(funcName)s(): %(message)s" +) logging.getLogger().setLevel("INFO") logging.getLogger("rocketwatch").setLevel(cfg.log_level) diff --git a/rocketwatch/plugins/8ball/8ball.py b/rocketwatch/plugins/8ball/8ball.py index 75e1d225..a826f9a9 100644 --- a/rocketwatch/plugins/8ball/8ball.py +++ b/rocketwatch/plugins/8ball/8ball.py @@ -27,29 +27,31 @@ async def eight_ball(self, interaction: Interaction, question: str): return await interaction.response.defer(ephemeral=is_hidden(interaction)) await asyncio.sleep(random.randint(2, 5)) - res = pyrandom.choice([ - "As I see it, yes", - "It is certain", - "It is decidedly so", - "Most likely", - "Outlook good", - "Signs point to yes", - "Without a doubt", - "Yes", - "Yes - definitely", - "You may rely on it", - "Don't count on it", - "My reply is no", - "My sources say no", - "Outlook not so good", - "Very doubtful", - "Chances aren't good", - "Unlikely", - "Not likely", - "No", - "Absolutely not" - ]) - e.description = f"> \"{question}\"\n - `{interaction.user.display_name}`\n\nThe Magic 8 Ball says: `{res}`" + res = pyrandom.choice( + [ + "As I see it, yes", + "It is certain", + "It is decidedly so", + "Most likely", + "Outlook good", + "Signs point to yes", + "Without a doubt", + "Yes", + "Yes - definitely", + "You may rely on it", + "Don't count on it", + "My reply is no", + "My sources say no", + "Outlook not so good", + "Very doubtful", + "Chances aren't good", + "Unlikely", + "Not likely", + "No", + "Absolutely not", + ] + ) + e.description = f'> "{question}"\n - `{interaction.user.display_name}`\n\nThe Magic 8 Ball says: `{res}`' await interaction.followup.send(embed=e) diff --git a/rocketwatch/plugins/about/about.py b/rocketwatch/plugins/about/about.py index 1a0268d3..95fcbd8d 100644 --- a/rocketwatch/plugins/about/about.py +++ b/rocketwatch/plugins/about/about.py @@ -41,10 +41,7 @@ async def about(self, interaction: Interaction): aiohttp.ClientSession() as session, session.get( "https://wakatime.com/api/v1/users/current/all_time_since_today", - params={ - "project": "rocketwatch", - "api_key": api_key - } + params={"project": "rocketwatch", "api_key": api_key}, ) as resp, ): code_time = (await resp.json())["data"]["text"] @@ -52,16 +49,22 @@ async def about(self, interaction: Interaction): await self.bot.report_error(err) if code_time: - e.add_field(name="Project Statistics", - value=f"An estimate of {code_time} has been spent developing this bot!", - inline=False) - - e.add_field(name="Bot Statistics", - value=f"{len(g)} guilds joined and " - f"{humanize.intcomma(sum(guild.member_count for guild in g))} members reached!", - inline=False) - - address = await el_explorer_url(cfg.rocketpool.manual_addresses["rocketStorage"]) + e.add_field( + name="Project Statistics", + value=f"An estimate of {code_time} has been spent developing this bot!", + inline=False, + ) + + e.add_field( + name="Bot Statistics", + value=f"{len(g)} guilds joined and " + f"{humanize.intcomma(sum(guild.member_count for guild in g))} members reached!", + inline=False, + ) + + address = await el_explorer_url( + cfg.rocketpool.manual_addresses["rocketStorage"] + ) e.add_field(name="Storage Contract", value=address) e.add_field(name="Chain", value=cfg.rocketpool.chain.capitalize()) @@ -69,11 +72,16 @@ async def about(self, interaction: Interaction): e.add_field(name="Plugins loaded", value=str(len(self.bot.cogs))) e.add_field(name="Host CPU", value=f"{psutil.cpu_percent():.2f}%") - e.add_field(name="Host Memory", value=f"{psutil.virtual_memory().percent}% used") - e.add_field(name="Bot Memory", value=f"{humanize.naturalsize(self.process.memory_info().rss)} used") + e.add_field( + name="Host Memory", value=f"{psutil.virtual_memory().percent}% used" + ) + e.add_field( + name="Bot Memory", + value=f"{humanize.naturalsize(self.process.memory_info().rss)} used", + ) load = [x / psutil.cpu_count() for x in psutil.getloadavg()] - e.add_field(name="Host Load", value=' / '.join(f"{pct:.0%}" for pct in load)) + e.add_field(name="Host Load", value=" / ".join(f"{pct:.0%}" for pct in load)) system_uptime = uptime.uptime() e.add_field(name="Host Uptime", value=f"{readable.pretty_time(system_uptime)}") @@ -85,7 +93,12 @@ async def about(self, interaction: Interaction): # show credits try: - async with aiohttp.ClientSession() as session, session.get(f"https://api.github.com/repos/{repo_name}/contributors") as resp: + async with ( + aiohttp.ClientSession() as session, + session.get( + f"https://api.github.com/repos/{repo_name}/contributors" + ) as resp, + ): contributors_data = await resp.json() contributors = [ f"[{c['login']}]({c['html_url']}) ({c['contributions']})" diff --git a/rocketwatch/plugins/activity/activity.py b/rocketwatch/plugins/activity/activity.py index 3d0e7c38..3fca0a15 100644 --- a/rocketwatch/plugins/activity/activity.py +++ b/rocketwatch/plugins/activity/activity.py @@ -34,7 +34,7 @@ async def task(self): await self.bot.change_presence( activity=Activity( type=ActivityType.watching, - name=f"{validator_count:,} active validators" + name=f"{validator_count:,} active validators", ) ) diff --git a/rocketwatch/plugins/apr/apr.py b/rocketwatch/plugins/apr/apr.py index 1f98d7c0..c45df25a 100644 --- a/rocketwatch/plugins/apr/apr.py +++ b/rocketwatch/plugins/apr/apr.py @@ -52,23 +52,38 @@ async def task(self): cursor_block = (await w3_archive.eth.get_block("latest"))["number"] while True: # get address of rocketNetworkBalances contract at cursor block - address = await rp.uncached_get_address_by_name("rocketNetworkBalances", block=cursor_block) - balance_block = await rp.call("rocketNetworkBalances.getBalancesBlock", block=cursor_block, address=address) + address = await rp.uncached_get_address_by_name( + "rocketNetworkBalances", block=cursor_block + ) + balance_block = await rp.call( + "rocketNetworkBalances.getBalancesBlock", + block=cursor_block, + address=address, + ) if balance_block == latest_db_block: break block_time = (await w3.eth.get_block(balance_block))["timestamp"] # abort if the blocktime is older than 120 days if block_time < (datetime.now().timestamp() - 120 * 24 * 60 * 60): break - reth_ratio = solidity.to_float(await rp.call("rocketTokenRETH.getExchangeRate", block=cursor_block)) + reth_ratio = solidity.to_float( + await rp.call("rocketTokenRETH.getExchangeRate", block=cursor_block) + ) effectiveness = solidity.to_float( - await rp.call("rocketNetworkBalances.getETHUtilizationRate", block=cursor_block, address=address)) - await self.bot.db.reth_apr.insert_one({ - "block" : balance_block, - "time" : block_time, - "value" : reth_ratio, - "effectiveness": effectiveness - }) + await rp.call( + "rocketNetworkBalances.getETHUtilizationRate", + block=cursor_block, + address=address, + ) + ) + await self.bot.db.reth_apr.insert_one( + { + "block": balance_block, + "time": block_time, + "value": reth_ratio, + "effectiveness": effectiveness, + } + ) cursor_block = balance_block - 1 @task.before_loop @@ -88,84 +103,69 @@ async def reth_apr(self, interaction: Interaction): e.description = "For some comparisons against other LST: [dune dashboard](https://dune.com/rp_community/lst-comparison)" # get the last 30 datapoints - datapoints = await self.bot.db.reth_apr.find().sort("block", -1).limit(180 + 38).to_list(None) + datapoints = ( + await self.bot.db.reth_apr.find() + .sort("block", -1) + .limit(180 + 38) + .to_list(None) + ) if len(datapoints) == 0: e.description = "No data available yet." return await interaction.followup.send(embed=e) # get average meta.NodeFee from db, weighted by meta.NodeOperatorShare - tmp = await (await self.bot.db.minipools.aggregate([ - { - '$match': { - 'beacon.status' : 'active_ongoing', - 'node_fee' : { - '$ne': None + tmp = await ( + await self.bot.db.minipools.aggregate( + [ + { + "$match": { + "beacon.status": "active_ongoing", + "node_fee": {"$ne": None}, + "node_deposit_balance": {"$ne": None}, + } }, - 'node_deposit_balance': { - '$ne': None - } - } - }, { - '$project': { - 'fee' : '$node_fee', - 'share': { - '$multiply': [ - { - '$subtract': [ - 1, { - '$divide': [ - '$node_deposit_balance', 32 + { + "$project": { + "fee": "$node_fee", + "share": { + "$multiply": [ + { + "$subtract": [ + 1, + {"$divide": ["$node_deposit_balance", 32]}, ] - } + }, + 100, ] - }, 100 - ] - } - } - }, { - '$group': { - '_id' : None, - 'pre_numerator': { - '$sum': '$fee' - }, - 'numerator' : { - '$sum': { - '$multiply': [ - '$fee', '$share' - ] + }, } }, - 'denominator' : { - '$sum': '$share' - }, - 'count' : { - '$sum': 1 - } - } - }, { - '$project': { - 'average' : { - '$divide': [ - '$numerator', '$denominator' - ] - }, - 'reference_average': { - '$divide': [ - '$pre_numerator', '$count' - ] + { + "$group": { + "_id": None, + "pre_numerator": {"$sum": "$fee"}, + "numerator": {"$sum": {"$multiply": ["$fee", "$share"]}}, + "denominator": {"$sum": "$share"}, + "count": {"$sum": 1}, + } }, - 'used_pETH_share' : { - '$divide': [ - { - '$divide': [ - '$denominator', '$count' + { + "$project": { + "average": {"$divide": ["$numerator", "$denominator"]}, + "reference_average": { + "$divide": ["$pre_numerator", "$count"] + }, + "used_pETH_share": { + "$divide": [ + {"$divide": ["$denominator", "$count"]}, + 100, ] - }, 100 - ] - } - } - } - ])).to_list(length=1) + }, + } + }, + ] + ) + ).to_list(length=1) node_fee = tmp[0]["average"] if len(tmp) > 0 else 20 peth_share = tmp[0]["used_pETH_share"] if len(tmp) > 0 else 0.75 @@ -191,26 +191,61 @@ async def reth_apr(self, interaction: Interaction): # calculate the 7 day average if i > 8: y_7d.append(to_apr(datapoints[i - 9], datapoints[i])) - y_7d_virtual.append(to_apr(datapoints[i - 9], datapoints[i], effective=False)) - y_7d_claim = get_duration(datapoints[i - 9], datapoints[i]) / (60 * 60 * 24) + y_7d_virtual.append( + to_apr(datapoints[i - 9], datapoints[i], effective=False) + ) + y_7d_claim = get_duration(datapoints[i - 9], datapoints[i]) / ( + 60 * 60 * 24 + ) else: # if we dont have enough data, we dont show it y_7d.append(None) y_7d_virtual.append(None) - e.add_field(name=f"{y_7d_claim:.1f} Day Average rETH APR", - value=f"{y_7d[-1]:.2%}") - e.add_field(name=f"{y_7d_claim:.1f} Day Average rETH APR (without Effectiveness Drag, Virtual)", - value=f"{y_7d_virtual[-1]:.2%}", inline=False) + e.add_field( + name=f"{y_7d_claim:.1f} Day Average rETH APR", value=f"{y_7d[-1]:.2%}" + ) + e.add_field( + name=f"{y_7d_claim:.1f} Day Average rETH APR (without Effectiveness Drag, Virtual)", + value=f"{y_7d_virtual[-1]:.2%}", + inline=False, + ) fig = plt.figure() ax1 = plt.gca() ax2 = plt.twinx() - ax2.plot(x, y, marker="+", linestyle="", label="Period Average", alpha=0.6, color="orange") + ax2.plot( + x, + y, + marker="+", + linestyle="", + label="Period Average", + alpha=0.6, + color="orange", + ) # ax2.plot(x, y_virtual, marker="x", linestyle="", label="Period Average (Virtual)", alpha=0.4) # ax2.plot(x, y_node_operators, marker="+", linestyle="", label="Node Operator APR", alpha=0.4) - ax2.plot(x, y_7d, linestyle="-", label=f"{y_7d_claim:.1f} Day Average", color="orange") - ax2.plot(x, y_7d_virtual, linestyle="-", label=f"{y_7d_claim:.1f} Day Average (Virtual)", color="green") - ax1.plot(x, y_effectiveness, linestyle="--", label="Effectiveness", alpha=0.7, color="royalblue") + ax2.plot( + x, + y_7d, + linestyle="-", + label=f"{y_7d_claim:.1f} Day Average", + color="orange", + ) + ax2.plot( + x, + y_7d_virtual, + linestyle="-", + label=f"{y_7d_claim:.1f} Day Average (Virtual)", + color="green", + ) + ax1.plot( + x, + y_effectiveness, + linestyle="--", + label="Effectiveness", + alpha=0.7, + color="royalblue", + ) plt.title("Observed rETH APR values") plt.xlabel("Date") @@ -230,7 +265,7 @@ async def reth_apr(self, interaction: Interaction): img = BytesIO() fig.tight_layout() - fig.savefig(img, format='png') + fig.savefig(img, format="png") img.seek(0) fig.clear() plt.close() @@ -240,13 +275,15 @@ async def reth_apr(self, interaction: Interaction): e.set_image(url="attachment://reth_apr.png") - e.add_field(name="Current Average Effective Commission", - value=f"{node_fee:.2%} (Observed pETH Share: {peth_share:.2%})", - inline=False) + e.add_field( + name="Current Average Effective Commission", + value=f"{node_fee:.2%} (Observed pETH Share: {peth_share:.2%})", + inline=False, + ) - e.add_field(name="Effectiveness", - value=f"{y_effectiveness[-1]:.2%}", - inline=False) + e.add_field( + name="Effectiveness", value=f"{y_effectiveness[-1]:.2%}", inline=False + ) await interaction.followup.send(embed=e, file=File(img, "reth_apr.png")) @command() @@ -255,88 +292,75 @@ async def node_apr(self, interaction: Interaction): await interaction.response.defer(ephemeral=is_hidden(interaction)) e = Embed() e.title = "Current NO APR" - e.description = "Dashed red lines above and below the solid red one are leb8 and leb16 respectively. " \ - "The solid line is the protocol average." + e.description = ( + "Dashed red lines above and below the solid red one are leb8 and leb16 respectively. " + "The solid line is the protocol average." + ) # get the last 30 datapoints - datapoints = await self.bot.db.reth_apr.find().sort("block", -1).limit(180 + 38).to_list(None) + datapoints = ( + await self.bot.db.reth_apr.find() + .sort("block", -1) + .limit(180 + 38) + .to_list(None) + ) if len(datapoints) == 0: e.description = "No data available yet." return await interaction.followup.send(embed=e) # get average meta.NodeFee from db, weighted by meta.NodeOperatorShare - tmp = await (await self.bot.db.minipools.aggregate([ - { - '$match': { - 'beacon.status' : 'active_ongoing', - 'node_fee' : { - '$ne': None + tmp = await ( + await self.bot.db.minipools.aggregate( + [ + { + "$match": { + "beacon.status": "active_ongoing", + "node_fee": {"$ne": None}, + "node_deposit_balance": {"$ne": None}, + } }, - 'node_deposit_balance': { - '$ne': None - } - } - }, { - '$project': { - 'fee' : '$node_fee', - 'share': { - '$multiply': [ - { - '$subtract': [ - 1, { - '$divide': [ - '$node_deposit_balance', 32 + { + "$project": { + "fee": "$node_fee", + "share": { + "$multiply": [ + { + "$subtract": [ + 1, + {"$divide": ["$node_deposit_balance", 32]}, ] - } + }, + 100, ] - }, 100 - ] - } - } - }, { - '$group': { - '_id' : None, - 'pre_numerator': { - '$sum': '$fee' - }, - 'numerator' : { - '$sum': { - '$multiply': [ - '$fee', '$share' - ] + }, } }, - 'denominator' : { - '$sum': '$share' - }, - 'count' : { - '$sum': 1 - } - } - }, { - '$project': { - 'average' : { - '$divide': [ - '$numerator', '$denominator' - ] - }, - 'reference_average': { - '$divide': [ - '$pre_numerator', '$count' - ] + { + "$group": { + "_id": None, + "pre_numerator": {"$sum": "$fee"}, + "numerator": {"$sum": {"$multiply": ["$fee", "$share"]}}, + "denominator": {"$sum": "$share"}, + "count": {"$sum": 1}, + } }, - 'used_pETH_share' : { - '$divide': [ - { - '$divide': [ - '$denominator', '$count' + { + "$project": { + "average": {"$divide": ["$numerator", "$denominator"]}, + "reference_average": { + "$divide": ["$pre_numerator", "$count"] + }, + "used_pETH_share": { + "$divide": [ + {"$divide": ["$denominator", "$count"]}, + 100, ] - }, 100 - ] - } - } - } - ])).to_list(length=1) + }, + } + }, + ] + ) + ).to_list(length=1) node_fee = tmp[0]["average"] if len(tmp) > 0 else 0.2 peth_share = tmp[0]["used_pETH_share"] if len(tmp) > 0 else 0.75 @@ -358,16 +382,32 @@ async def node_apr(self, interaction: Interaction): # calculate the 7 day average if i > 8: y_7d.append(to_apr(datapoints[i - 9], datapoints[i])) - y_7d_virtual.append(to_apr(datapoints[i - 9], datapoints[i], effective=False)) + y_7d_virtual.append( + to_apr(datapoints[i - 9], datapoints[i], effective=False) + ) bare_apr = y_7d_virtual[-1] / Decimal(1 - node_fee) y_7d_solo.append(bare_apr) peth_share_leb8 = 0.75 - y_7d_node_operators_leb8_14.append(bare_apr * Decimal(1 + (0.14 * peth_share_leb8 / (1 - peth_share_leb8)))) + y_7d_node_operators_leb8_14.append( + bare_apr + * Decimal(1 + (0.14 * peth_share_leb8 / (1 - peth_share_leb8))) + ) peth_share_leb16 = 0.5 - y_7d_node_operators_leb16_05.append(bare_apr * Decimal(1 + (0.05 * peth_share_leb16 / (1 - peth_share_leb16)))) - y_7d_node_operators_leb16_14.append(bare_apr * Decimal(1 + (0.14 * peth_share_leb16 / (1 - peth_share_leb16)))) - y_7d_node_operators_leb16_20.append(bare_apr * Decimal(1 + (0.20 * peth_share_leb16 / (1 - peth_share_leb16)))) - y_7d_claim = get_duration(datapoints[i - 9], datapoints[i]) / (60 * 60 * 24) + y_7d_node_operators_leb16_05.append( + bare_apr + * Decimal(1 + (0.05 * peth_share_leb16 / (1 - peth_share_leb16))) + ) + y_7d_node_operators_leb16_14.append( + bare_apr + * Decimal(1 + (0.14 * peth_share_leb16 / (1 - peth_share_leb16))) + ) + y_7d_node_operators_leb16_20.append( + bare_apr + * Decimal(1 + (0.20 * peth_share_leb16 / (1 - peth_share_leb16))) + ) + y_7d_claim = get_duration(datapoints[i - 9], datapoints[i]) / ( + 60 * 60 * 24 + ) else: # if we dont have enough data, we dont show it y_7d_solo.append(None) @@ -375,25 +415,53 @@ async def node_apr(self, interaction: Interaction): y_7d_node_operators_leb16_05.append(None) y_7d_node_operators_leb16_14.append(None) y_7d_node_operators_leb16_20.append(None) - e.add_field(name=f"{y_7d_claim:.1f} Day Average Node Operator APR:", - value=f"**leb8:** `{y_7d_node_operators_leb8_14[-1]:.2%}`\n" - f"**leb16 5%:** `{y_7d_node_operators_leb16_05[-1]:.2%}` | " - f"**leb16 14%:** `{y_7d_node_operators_leb16_14[-1]:.2%}` | " - f"**leb16 20%:** `{y_7d_node_operators_leb16_20[-1]:.2%}`", inline=False) + e.add_field( + name=f"{y_7d_claim:.1f} Day Average Node Operator APR:", + value=f"**leb8:** `{y_7d_node_operators_leb8_14[-1]:.2%}`\n" + f"**leb16 5%:** `{y_7d_node_operators_leb16_05[-1]:.2%}` | " + f"**leb16 14%:** `{y_7d_node_operators_leb16_14[-1]:.2%}` | " + f"**leb16 20%:** `{y_7d_node_operators_leb16_20[-1]:.2%}`", + inline=False, + ) fig = plt.figure() ax1 = plt.gca() # solo apr - ax1.plot(x, y_7d_node_operators_leb8_14, linestyle="-.", - label=f"{y_7d_claim:.1f} Day Average (leb8 14%)", color="red", alpha=0.5) + ax1.plot( + x, + y_7d_node_operators_leb8_14, + linestyle="-.", + label=f"{y_7d_claim:.1f} Day Average (leb8 14%)", + color="red", + alpha=0.5, + ) # use area to show region between leb16 20% and leb16 5%. use a spare dotted fill to show the region between - ax1.fill_between(x, y_7d_node_operators_leb16_20, y_7d_node_operators_leb16_05, alpha=0.2, - color="red", label=f"{y_7d_claim:.1f} Day Average (leb16 5-20%)") + ax1.fill_between( + x, + y_7d_node_operators_leb16_20, + y_7d_node_operators_leb16_05, + alpha=0.2, + color="red", + label=f"{y_7d_claim:.1f} Day Average (leb16 5-20%)", + ) # plot the leb16 14% line - ax1.plot(x, y_7d_node_operators_leb16_14, linestyle="--", - label=f"{y_7d_claim:.1f} Day Average (leb16 14%)", color="red", alpha=0.5) - ax1.plot(x, y_7d_solo, linestyle=":", label=f"{y_7d_claim:.1f} Day Average (solo)", color="black", alpha=0.5) + ax1.plot( + x, + y_7d_node_operators_leb16_14, + linestyle="--", + label=f"{y_7d_claim:.1f} Day Average (leb16 14%)", + color="red", + alpha=0.5, + ) + ax1.plot( + x, + y_7d_solo, + linestyle=":", + label=f"{y_7d_claim:.1f} Day Average (solo)", + color="black", + alpha=0.5, + ) plt.title("Observed NO APR values") plt.grid(True) @@ -408,7 +476,7 @@ async def node_apr(self, interaction: Interaction): img = BytesIO() fig.tight_layout() - fig.savefig(img, format='png') + fig.savefig(img, format="png") img.seek(0) fig.clear() plt.close() @@ -416,9 +484,11 @@ async def node_apr(self, interaction: Interaction): # reset the x axis formatter plt.gca().xaxis.set_major_formatter(old_formatter) - e.add_field(name="Current Average Effective Commission:", - value=f"{node_fee:.2%} (Observed pETH Share: {peth_share:.2%})", - inline=False) + e.add_field( + name="Current Average Effective Commission:", + value=f"{node_fee:.2%} (Observed pETH Share: {peth_share:.2%})", + inline=False, + ) e.set_image(url="attachment://no_apr.png") diff --git a/rocketwatch/plugins/beacon_events/beacon_events.py b/rocketwatch/plugins/beacon_events/beacon_events.py index 3b5f2ce8..09b3b00d 100644 --- a/rocketwatch/plugins/beacon_events/beacon_events.py +++ b/rocketwatch/plugins/beacon_events/beacon_events.py @@ -30,14 +30,24 @@ async def _get_new_events(self) -> list[Event]: from_block = self.last_served_block + 1 - self.lookback_distance return await self.get_past_events(from_block, self._pending_block) - async def get_past_events(self, from_block: BlockNumber, to_block: BlockNumber) -> list[Event]: - from_slot = max(0, date_to_beacon_block((await w3.eth.get_block(from_block - 1)).timestamp) + 1) + async def get_past_events( + self, from_block: BlockNumber, to_block: BlockNumber + ) -> list[Event]: + from_slot = max( + 0, + date_to_beacon_block((await w3.eth.get_block(from_block - 1)).timestamp) + + 1, + ) to_slot = date_to_beacon_block((await w3.eth.get_block(to_block)).timestamp) - log.info(f"Checking for new beacon chain events in slot range [{from_slot}, {to_slot}]") + log.info( + f"Checking for new beacon chain events in slot range [{from_slot}, {to_slot}]" + ) events: list[Event] = [] for slot_number in range(from_slot, to_slot - 1): - events.extend(await self._get_events_for_slot(slot_number, check_finality=False)) + events.extend( + await self._get_events_for_slot(slot_number, check_finality=False) + ) # quite expensive and only really makes sense to check toward the head of the chain events.extend(await self._get_events_for_slot(to_slot, check_finality=True)) @@ -45,7 +55,9 @@ async def get_past_events(self, from_block: BlockNumber, to_block: BlockNumber) log.debug("Finished checking beacon chain events") return events - async def _get_events_for_slot(self, slot_number: int, *, check_finality: bool) -> list[Event]: + async def _get_events_for_slot( + self, slot_number: int, *, check_finality: bool + ) -> list[Event]: try: log.debug(f"Checking slot {slot_number}") beacon_block = (await bacon.get_block(str(slot_number)))["data"]["message"] @@ -60,7 +72,9 @@ async def _get_events_for_slot(self, slot_number: int, *, check_finality: bool) if proposal_event := await self._get_proposal(beacon_block): events.append(proposal_event) - if check_finality and (finality_delay_event := await self._check_finality(beacon_block)): + if check_finality and ( + finality_delay_event := await self._check_finality(beacon_block) + ): events.append(finality_delay_event) return events @@ -73,24 +87,34 @@ async def _get_slashings(self, beacon_block: dict) -> list[Event]: for slash in beacon_block["body"]["attester_slashings"]: att_1 = set(slash["attestation_1"]["attesting_indices"]) att_2 = set(slash["attestation_2"]["attesting_indices"]) - slashings.extend({ - "slashing_type": "Attestation", - "validator" : index, - "slasher" : beacon_block["proposer_index"], - "timestamp" : timestamp - } for index in att_1.intersection(att_2)) - - slashings.extend({ - "slashing_type": "Proposal", - "validator" : slash["signed_header_1"]["message"]["proposer_index"], - "slasher" : beacon_block["proposer_index"], - "timestamp" : timestamp - } for slash in beacon_block["body"]["proposer_slashings"]) + slashings.extend( + { + "slashing_type": "Attestation", + "validator": index, + "slasher": beacon_block["proposer_index"], + "timestamp": timestamp, + } + for index in att_1.intersection(att_2) + ) + + slashings.extend( + { + "slashing_type": "Proposal", + "validator": slash["signed_header_1"]["message"]["proposer_index"], + "slasher": beacon_block["proposer_index"], + "timestamp": timestamp, + } + for slash in beacon_block["body"]["proposer_slashings"] + ) events = [] for slash in slashings: - minipool = await self.bot.db.minipools.find_one({"validator_index": int(slash["validator"])}) - megapool = await self.bot.db.megapool_validators.find_one({"validator_index": int(slash["validator"])}) + minipool = await self.bot.db.minipools.find_one( + {"validator_index": int(slash["validator"])} + ) + megapool = await self.bot.db.megapool_validators.find_one( + {"validator_index": int(slash["validator"])} + ) if not (minipool or megapool): log.info(f"Skipping slashing of unknown validator {slash['validator']}") continue @@ -108,13 +132,15 @@ async def _get_slashings(self, beacon_block: dict) -> list[Event]: args = await prepare_args(aDict(slash)) if embed := await assemble(args): - events.append(Event( - topic="beacon_events", - embed=embed, - event_name=slash["event_name"], - unique_id=unique_id, - block_number=await ts_to_block(timestamp), - )) + events.append( + Event( + topic="beacon_events", + embed=embed, + event_name=slash["event_name"], + unique_id=unique_id, + block_number=await ts_to_block(timestamp), + ) + ) return events @@ -129,8 +155,12 @@ async def _get_proposal(self, beacon_block: dict) -> Event | None: return None validator_index = int(beacon_block["proposer_index"]) - minipool = await self.bot.db.minipools.find_one({"validator_index": validator_index}) - megapool = await self.bot.db.megapool_validators.find_one({"validator_index": validator_index}) + minipool = await self.bot.db.minipools.find_one( + {"validator_index": validator_index} + ) + megapool = await self.bot.db.megapool_validators.find_one( + {"validator_index": validator_index} + ) if not (minipool or megapool): # not proposed by RP validator return None @@ -142,7 +172,10 @@ async def _get_proposal(self, beacon_block: dict) -> Event | None: # fetch from beaconcha.in because beacon node is unaware of MEV bribes endpoint = f"https://beaconcha.in/api/v1/execution/block/{block_number}" - async with aiohttp.ClientSession() as session, session.get(endpoint, headers={"apikey": api_key}) as resp: + async with ( + aiohttp.ClientSession() as session, + session.get(endpoint, headers={"apikey": api_key}) as resp, + ): response_body = await resp.json() log.debug(f"{response_body = }") @@ -166,10 +199,12 @@ async def _get_proposal(self, beacon_block: dict) -> Event | None: "validator": await cl_explorer_url(validator_index), "slot": int(beacon_block["slot"]), "reward_amount": block_reward_eth, - "timestamp": timestamp + "timestamp": timestamp, } - if eth_utils.is_same_address(fee_recipient, await rp.get_address_by_name("rocketSmoothingPool")): + if eth_utils.is_same_address( + fee_recipient, await rp.get_address_by_name("rocketSmoothingPool") + ): args["event_name"] = "mev_proposal_smoothie_event" args["smoothie_amount"] = await w3.eth.get_balance( w3.to_checksum_address(fee_recipient), block_identifier=block_number @@ -186,7 +221,7 @@ async def _get_proposal(self, beacon_block: dict) -> Event | None: embed=embed, event_name=args["event_name"], unique_id=f"mev_proposal:{block_number}:{timestamp}", - block_number=block_number + block_number=block_number, ) async def _check_finality(self, beacon_block: dict) -> Event | None: @@ -197,31 +232,37 @@ async def _check_finality(self, beacon_block: dict) -> Event | None: try: # calculate finality delay finality_checkpoint = await bacon.get_finality_checkpoint(str(slot_number)) - last_finalized_epoch = int(finality_checkpoint["data"]["finalized"]["epoch"]) + last_finalized_epoch = int( + finality_checkpoint["data"]["finalized"]["epoch"] + ) finality_delay = epoch_number - last_finalized_epoch except aiohttp.ClientResponseError: log.exception("Failed to get finality checkpoints") return None # latest finality delay from db - delay_entry = await self.bot.db.finality_checkpoints.find_one({"epoch": epoch_number - 1}) + delay_entry = await self.bot.db.finality_checkpoints.find_one( + {"epoch": epoch_number - 1} + ) prev_finality_delay = delay_entry["finality_delay"] if delay_entry else 0 await self.bot.db.finality_checkpoints.update_one( {"epoch": epoch_number}, {"$set": {"finality_delay": finality_delay}}, - upsert=True + upsert=True, ) # if finality delay recovers, notify if finality_delay < self.finality_delay_threshold <= prev_finality_delay: - log.info(f"Finality delay recovered from {prev_finality_delay} to {finality_delay}") + log.info( + f"Finality delay recovered from {prev_finality_delay} to {finality_delay}" + ) event_name = "finality_delay_recover_event" args = { "event_name": event_name, "finality_delay": finality_delay, "timestamp": timestamp, - "epoch": epoch_number + "epoch": epoch_number, } args = await prepare_args(aDict(args)) if not (embed := await assemble(args)): @@ -232,18 +273,20 @@ async def _check_finality(self, beacon_block: dict) -> Event | None: embed=embed, event_name=event_name, unique_id=f"finality_delay_recover:{epoch_number}", - block_number=await ts_to_block(timestamp) + block_number=await ts_to_block(timestamp), ) return event - if finality_delay >= max(prev_finality_delay + 1, self.finality_delay_threshold): + if finality_delay >= max( + prev_finality_delay + 1, self.finality_delay_threshold + ): log.warning(f"Finality increased to {finality_delay} epochs") event_name = "finality_delay_event" args = { - "event_name" : event_name, + "event_name": event_name, "finality_delay": finality_delay, - "timestamp" : timestamp, - "epoch" : epoch_number + "timestamp": timestamp, + "epoch": epoch_number, } args = await prepare_args(aDict(args)) if not (embed := await assemble(args)): @@ -254,7 +297,7 @@ async def _check_finality(self, beacon_block: dict) -> Event | None: embed=embed, event_name=event_name, unique_id=f"{epoch_number}:finality_delay", - block_number=await ts_to_block(timestamp) + block_number=await ts_to_block(timestamp), ) return None diff --git a/rocketwatch/plugins/call/call.py b/rocketwatch/plugins/call/call.py index 43474cd0..59ef433a 100644 --- a/rocketwatch/plugins/call/call.py +++ b/rocketwatch/plugins/call/call.py @@ -30,12 +30,16 @@ def __init__(self, cog, function, block, address, raw_output, abi_inputs): self.abi_inputs = abi_inputs self.param_inputs = [] for inp in abi_inputs: - text_input = TextInput(label=f"{inp['name']} ({inp['type']})"[:45], required=True) + text_input = TextInput( + label=f"{inp['name']} ({inp['type']})"[:45], required=True + ) self.add_item(text_input) self.param_inputs.append(text_input) async def on_submit(self, interaction): - await interaction.response.defer(ephemeral=is_hidden_role_controlled(interaction)) + await interaction.response.defer( + ephemeral=is_hidden_role_controlled(interaction) + ) args = [] errors = [] for text_input, inp in zip(self.param_inputs, self.abi_inputs, strict=True): @@ -48,9 +52,13 @@ async def on_submit(self, interaction): else: args.append(val) if errors: - await interaction.followup.send(content="Validation failed:\n" + "\n".join(errors)) + await interaction.followup.send( + content="Validation failed:\n" + "\n".join(errors) + ) return - await self.cog._execute_call(interaction, self.function, args, self.block, self.address, self.raw_output) + await self.cog._execute_call( + interaction, self.function, args, self.block, self.address, self.raw_output + ) @staticmethod def _validate(value, abi_type): @@ -89,7 +97,9 @@ async def on_ready(self): try: c = await rp.get_contract_by_name(contract) for entry in c.abi: - if entry.get("type") == "function" and entry.get("stateMutability") in ("view", "pure"): + if entry.get("type") == "function" and entry.get( + "stateMutability" + ) in ("view", "pure"): func_id = f"{entry['name']}({','.join(inp['type'] for inp in entry.get('inputs', []))})" self.function_names.append(f"{contract}.{func_id}") except Exception: @@ -103,7 +113,7 @@ async def call( function: str, block: str = "latest", address: str | None = None, - raw_output: bool = False + raw_output: bool = False, ): """Manually call a function on a protocol contract""" if block.isnumeric(): @@ -127,12 +137,23 @@ async def call( modal = CallModal(self, function, block, address, raw_output, abi_inputs) await interaction.response.send_modal(modal) else: - await interaction.response.defer(ephemeral=is_hidden_role_controlled(interaction)) - await self._execute_call(interaction, function, [], block, address, raw_output) - - async def _execute_call(self, interaction, function, args, block, address, raw_output): + await interaction.response.defer( + ephemeral=is_hidden_role_controlled(interaction) + ) + await self._execute_call( + interaction, function, [], block, address, raw_output + ) + + async def _execute_call( + self, interaction, function, args, block, address, raw_output + ): try: - v = await rp.call(function, *args, block=block, address=w3.to_checksum_address(address) if address else None) + v = await rp.call( + function, + *args, + block=block, + address=w3.to_checksum_address(address) if address else None, + ) except Exception as err: await interaction.followup.send(content=f"Exception: ```{err!r}```") return @@ -140,24 +161,37 @@ async def _execute_call(self, interaction, function, args, block, address, raw_o g = await rp.estimate_gas_for_call(function, *args, block=block) except Exception as err: g = "N/A" - if isinstance(err, ValueError) and err.args and "code" in err.args and err.args[0]["code"] == -32000: + if ( + isinstance(err, ValueError) + and err.args + and "code" in err.args + and err.args[0]["code"] == -32000 + ): g += f" ({err.args[0]['message']})" - if isinstance(v, int) and abs(v) >= 10 ** 12 and not raw_output: + if isinstance(v, int) and abs(v) >= 10**12 and not raw_output: v = solidity.to_float(v) g = humanize.intcomma(g) func_name = function.split("(")[0] text = f"`block: {block}`\n`gas estimate: {g}`\n`{func_name}({', '.join([repr(a) for a in args])}): " if len(text + str(v)) > 2000: text += "too long, attached as file`" - await interaction.followup.send(text, file=File(io.StringIO(str(v)), "exception.txt")) + await interaction.followup.send( + text, file=File(io.StringIO(str(v)), "exception.txt") + ) else: text += f"{v!s}`" await interaction.followup.send(content=text) @call.autocomplete("function") - async def match_function_name(self, interaction: Interaction, current: str) -> list[Choice[str]]: - return [Choice(name=name, value=name) for name in self.function_names if current.lower() in name.lower()][:25] + async def match_function_name( + self, interaction: Interaction, current: str + ) -> list[Choice[str]]: + return [ + Choice(name=name, value=name) + for name in self.function_names + if current.lower() in name.lower() + ][:25] async def setup(bot): diff --git a/rocketwatch/plugins/chat_summary/chat_summary.py b/rocketwatch/plugins/chat_summary/chat_summary.py index b7659ad4..0d2a33f7 100644 --- a/rocketwatch/plugins/chat_summary/chat_summary.py +++ b/rocketwatch/plugins/chat_summary/chat_summary.py @@ -40,12 +40,16 @@ def message_to_text(cls, message, index): if message.embeds: metadata.append(f"{len(message.embeds)} embeds") # replies and make sure the reference is not deleted - if message.reference and not isinstance(message.reference.resolved, - DeletedReferencedMessage) and message.reference.resolved: + if ( + message.reference + and not isinstance(message.reference.resolved, DeletedReferencedMessage) + and message.reference.resolved + ): # show name of referenced message author # and the first 10 characters of the referenced message metadata.append( - f"reply to \"{message.reference.resolved.content[:32]}…\" from {message.reference.resolved.author.name}") + f'reply to "{message.reference.resolved.content[:32]}…" from {message.reference.resolved.author.name}' + ) if metadata: text += f" <{', '.join(metadata)}>\n" # replace all <@[0-9]+> with the name of the user @@ -59,17 +63,26 @@ def message_to_text(cls, message, index): @is_owner() async def summarize_chat(self, interaction: Interaction): await interaction.response.defer(ephemeral=True) - last_ts = await self.bot.db["last_summary"].find_one({"channel_id": interaction.channel.id}) + last_ts = await self.bot.db["last_summary"].find_one( + {"channel_id": interaction.channel.id} + ) # ratelimit - if last_ts and (datetime.now(UTC) - last_ts["timestamp"].replace(tzinfo=pytz.utc)) < timedelta(hours=6): - await interaction.followup.send("You can only summarize once every 6 hours.", ephemeral=True) + if last_ts and ( + datetime.now(UTC) - last_ts["timestamp"].replace(tzinfo=pytz.utc) + ) < timedelta(hours=6): + await interaction.followup.send( + "You can only summarize once every 6 hours.", ephemeral=True + ) return if interaction.channel.id not in [405163713063288832]: await interaction.followup.send("You can't summarize here.", ephemeral=True) return msg = await interaction.channel.send("Summarizing chat…") - last_ts = last_ts["timestamp"].replace( - tzinfo=pytz.utc) if last_ts and "timestamp" in last_ts else datetime.now(UTC) - timedelta(days=365) + last_ts = ( + last_ts["timestamp"].replace(tzinfo=pytz.utc) + if last_ts and "timestamp" in last_ts + else datetime.now(UTC) - timedelta(days=365) + ) prompt = ( "Task Description:\n" "I need a summary of the entire chat log. This summary should be presented in the form of a bullet list.\n\n" @@ -93,13 +106,17 @@ async def summarize_chat(self, interaction: Interaction): "----------------\n\n" "Please begin the task now." ) - response, prompt, msgs = await self.prompt_model(interaction.channel, prompt, last_ts) + response, prompt, msgs = await self.prompt_model( + interaction.channel, prompt, last_ts + ) if not response: await msg.delete() await interaction.followup.send(content="Not enough messages to summarize.") return es = [Embed()] - es[0].title = f"Chat Summarization of {msgs} messages since {last_ts.strftime('%Y-%m-%d %H:%M')}" + es[ + 0 + ].title = f"Chat Summarization of {msgs} messages since {last_ts.strftime('%Y-%m-%d %H:%M')}" res = response.content[-1].text # split content in multiple embeds if it is too long. limit for description is 4096 while len(res): @@ -125,11 +142,15 @@ async def summarize_chat(self, interaction: Interaction): f"Request cost: ${token_usage / 1000000 * 3:.2f}" f" | Tokens: {response.usage.input_tokens + response.usage.output_tokens}" " | /donate if you like this command" - )) + ) + ) # attach the prompt as a file f = BytesIO(prompt.encode("utf-8")) f.name = "prompt._log" - f = File(f, filename=f"prompt_log_{datetime.now().strftime('%Y-%m-%d_%H-%M-%S')}._log") + f = File( + f, + filename=f"prompt_log_{datetime.now().strftime('%Y-%m-%d_%H-%M-%S')}._log", + ) # send message in the channel await interaction.followup.send("done", ephemeral=True) await msg.edit(embeds=es, attachments=[f]) @@ -137,26 +158,39 @@ async def summarize_chat(self, interaction: Interaction): await self.bot.db["last_summary"].update_one( {"channel_id": interaction.channel.id}, {"$set": {"timestamp": datetime.now(UTC)}}, - upsert=True + upsert=True, ) # a function that generates the prompt for the model by taking an array of messages, a prefix and a suffix def generate_prompt(self, messages, prefix, suffix): messages.sort(key=lambda x: x.created_at) - prompt = "\n".join([self.message_to_text(message, i) for i, message in enumerate(messages)]).replace("\n\n", "\n") + prompt = "\n".join( + [self.message_to_text(message, i) for i, message in enumerate(messages)] + ).replace("\n\n", "\n") return f"{prefix}\n\n{prompt}\n\n{suffix}" async def prompt_model( - self, channel: TextChannel, prompt: str, cut_off_ts: int + self, channel: TextChannel, prompt: str, cut_off_ts: int ) -> tuple[anthropic.types.Message, str, int]: - messages = [message async for message in channel.history(limit=4096) if message.content != ""] - messages = [message for message in messages if message.author.id != self.bot.user.id] + messages = [ + message + async for message in channel.history(limit=4096) + if message.content != "" + ] + messages = [ + message for message in messages if message.author.id != self.bot.user.id + ] messages = [message for message in messages if message.created_at > cut_off_ts] if len(messages) < 320: return None, None, None prefix = "The following is a chat log. Everything prefixed with `>` is a quote." - log.info(f"Prompt len: {len(self.tokenizer.encode(self.generate_prompt(messages, prefix, prompt)))}") - while len(self.tokenizer.encode(self.generate_prompt(messages, prefix, prompt))) > 100000 - 4096: + log.info( + f"Prompt len: {len(self.tokenizer.encode(self.generate_prompt(messages, prefix, prompt)))}" + ) + while ( + len(self.tokenizer.encode(self.generate_prompt(messages, prefix, prompt))) + > 100000 - 4096 + ): # remove the oldest message messages.pop(0) prompt = self.generate_prompt(messages, prefix, prompt) @@ -164,7 +198,7 @@ async def prompt_model( response = await self.client.messages.create( model="claude-3-sonnet-20240229", # Update this to the desired model max_tokens=4096, - messages=[{"role": "user", "content": prompt}] + messages=[{"role": "user", "content": prompt}], ) # find all {message:index} in response["choices"][0]["message"]["content"] log.debug(response.content[-1].text) diff --git a/rocketwatch/plugins/collateral/collateral.py b/rocketwatch/plugins/collateral/collateral.py index 941bdd1d..dfabcdb8 100644 --- a/rocketwatch/plugins/collateral/collateral.py +++ b/rocketwatch/plugins/collateral/collateral.py @@ -27,7 +27,7 @@ def get_percentiles(percentiles, counts): for p in percentiles: - yield p, np.percentile(counts, p, method='nearest') + yield p, np.percentile(counts, p, method="nearest") async def collateral_distribution_raw(interaction: Interaction, distribution): @@ -35,8 +35,7 @@ async def collateral_distribution_raw(interaction: Interaction, distribution): e.title = "Collateral Distribution" description = "```\n" for collateral, nodes in distribution: - description += f"{collateral:>5}%: " \ - f"{nodes:>4} {p.plural('node', nodes)}\n" + description += f"{collateral:>5}%: {nodes:>4} {p.plural('node', nodes)}\n" description += "```" e.description = description await interaction.followup.send(embed=e) @@ -49,26 +48,35 @@ async def get_node_minipools_and_collateral() -> dict[ChecksumAddress, dict[str, nodes = await rp.call("rocketNodeManager.getNodeAddresses", 0, 10_000) for node_batch in as_chunks(nodes, 500): - eb16s += await rp.multicall([ - minipool_manager.functions.getNodeStakingMinipoolCountBySize(node, 16 * 10**18) for node in node_batch - ]) - eb8s += await rp.multicall([ - minipool_manager.functions.getNodeStakingMinipoolCountBySize(node, 8 * 10**18) for node in node_batch - ]) - rpl_stakes += await rp.multicall([ - node_staking.functions.getNodeStakedRPL(node) for node in node_batch - ]) + eb16s += await rp.multicall( + [ + minipool_manager.functions.getNodeStakingMinipoolCountBySize( + node, 16 * 10**18 + ) + for node in node_batch + ] + ) + eb8s += await rp.multicall( + [ + minipool_manager.functions.getNodeStakingMinipoolCountBySize( + node, 8 * 10**18 + ) + for node in node_batch + ] + ) + rpl_stakes += await rp.multicall( + [node_staking.functions.getNodeStakedRPL(node) for node in node_batch] + ) return { - nodes[i]: { - "eb8s" : eb8s[i], - "eb16s" : eb16s[i], - "rplStaked": rpl_stakes[i] - } for i in range(len(nodes)) + nodes[i]: {"eb8s": eb8s[i], "eb16s": eb16s[i], "rplStaked": rpl_stakes[i]} + for i in range(len(nodes)) } -async def get_average_collateral_percentage_per_node(collateral_cap: int | None, bonded: bool): +async def get_average_collateral_percentage_per_node( + collateral_cap: int | None, bonded: bool +): # get stakes for each node stakes = list((await get_node_minipools_and_collateral()).values()) # get the current rpl price @@ -77,7 +85,9 @@ async def get_average_collateral_percentage_per_node(collateral_cap: int | None, node_collaterals = [] for node in stakes: # get the minipool eth value - minipool_value = int(node["eb16s"]) * 16 + int(node["eb8s"]) * (8 if bonded else 24) + minipool_value = int(node["eb16s"]) * 16 + int(node["eb8s"]) * ( + 8 if bonded else 24 + ) if not minipool_value: continue # rpl stake value @@ -92,7 +102,9 @@ async def get_average_collateral_percentage_per_node(collateral_cap: int | None, effective_bound = max(perc for rpl, perc in node_collaterals) possible_step_sizes = [0.1, 0.2, 0.5, 1, 2, 5, 10, 20, 50, 100] - step_size = possible_step_sizes[np.argmin([abs(effective_bound / 30 - s) for s in possible_step_sizes])] + step_size = possible_step_sizes[ + np.argmin([abs(effective_bound / 30 - s) for s in possible_step_sizes]) + ] result = {} for rpl_stake, percentage in node_collaterals: @@ -109,12 +121,16 @@ def __init__(self, bot: RocketWatch): self.bot = bot @command() - @describe(node_address="Node Address or ENS to highlight", - bonded="Calculate collateral as a percent of bonded eth instead of borrowed") - async def node_tvl_vs_collateral(self, - interaction: Interaction, - node_address: str | None = None, - bonded: bool = False): + @describe( + node_address="Node Address or ENS to highlight", + bonded="Calculate collateral as a percent of bonded eth instead of borrowed", + ) + async def node_tvl_vs_collateral( + self, + interaction: Interaction, + node_address: str | None = None, + bonded: bool = False, + ): """ Show a scatter plot of collateral ratios for given node TVLs """ @@ -173,12 +189,12 @@ def node_minipools(node): # Add a legend for the color-coding on the scatter plot formatToInt = "{x:.0f}" cb = plt.colorbar(mappable=paths, ax=ax, format=formatToInt) - cb.set_label('Minipools') + cb.set_label("Minipools") cb.set_ticks([1, 10, 100, max_minipools]) # Add a legend for the color-coding on the hex distribution cb = plt.colorbar(mappable=polys, ax=ax2, format=formatToInt) - cb.set_label('Nodes') + cb.set_label("Nodes") cb.set_ticks([1, 10, 100, max_nodes - 1]) # Add labels and units @@ -196,11 +212,13 @@ def node_minipools(node): # Print a vline and hline through the requested node try: target_node = data[address] - ax.plot(node_tvl(target_node), node_collateral(target_node), 'ro') - ax2.plot(node_tvl(target_node), node_collateral(target_node), 'ro') + ax.plot(node_tvl(target_node), node_collateral(target_node), "ro") + ax2.plot(node_tvl(target_node), node_collateral(target_node), "ro") e.description = f"Showing location of {display_name}" except KeyError: - await interaction.followup.send(f"{display_name} not found in data set - it must have at least one minipool") + await interaction.followup.send( + f"{display_name} not found in data set - it must have at least one minipool" + ) return # Add horizontal lines showing the 10-15% range made optimal by RPIP-30 @@ -210,7 +228,7 @@ def node_minipools(node): fig.tight_layout() img = BytesIO() - fig.savefig(img, format='png') + fig.savefig(img, format="png") img.seek(0) fig.clear() plt.close() @@ -222,21 +240,32 @@ def node_minipools(node): img.close() @command() - @describe(raw="Show Raw Distribution Data", - bonded="Calculate collateral as percent of bonded eth instead of borrowed") - async def collateral_distribution(self, - interaction: Interaction, - raw: bool = False, - collateral_cap: int = 15, - bonded: bool = False): + @describe( + raw="Show Raw Distribution Data", + bonded="Calculate collateral as percent of bonded eth instead of borrowed", + ) + async def collateral_distribution( + self, + interaction: Interaction, + raw: bool = False, + collateral_cap: int = 15, + bonded: bool = False, + ): """ Show the distribution of collateral across nodes. """ await interaction.response.defer(ephemeral=is_hidden(interaction)) data = await get_average_collateral_percentage_per_node(collateral_cap, bonded) - distribution = [(collateral, len(nodes)) for collateral, nodes in sorted(data.items(), key=lambda x: x[0])] - counts = functools.reduce(operator.iadd, ([collateral] * num_nodes for collateral, num_nodes in distribution), []) + distribution = [ + (collateral, len(nodes)) + for collateral, nodes in sorted(data.items(), key=lambda x: x[0]) + ] + counts = functools.reduce( + operator.iadd, + ([collateral] * num_nodes for collateral, num_nodes in distribution), + [], + ) # If the raw data were requested, print them and exit early if raw: @@ -251,28 +280,37 @@ async def collateral_distribution(self, bars = dict(distribution) x_keys = [str(x) for x in bars] - rects = ax.bar(x_keys, bars.values(), color=str(e.color), align='edge') + rects = ax.bar(x_keys, bars.values(), color=str(e.color), align="edge") ax.bar_label(rects) - ax.set_xticklabels(x_keys, rotation='vertical') - ax.set_xlabel(f"Collateral Percent of { 'Bonded' if bonded else 'Borrowed'} Eth") + ax.set_xticklabels(x_keys, rotation="vertical") + ax.set_xlabel(f"Collateral Percent of {'Bonded' if bonded else 'Borrowed'} Eth") ax.set_ylim(top=(ax.get_ylim()[1] * 1.1)) ax.yaxis.set_visible(False) - ax.get_xaxis().set_major_formatter(FuncFormatter( - lambda n, _: f"{x_keys[n] if n < len(x_keys) else 0}{'+' if n == len(x_keys)-1 else ''}%") + ax.get_xaxis().set_major_formatter( + FuncFormatter( + lambda n, _: ( + f"{x_keys[n] if n < len(x_keys) else 0}{'+' if n == len(x_keys) - 1 else ''}%" + ) + ) ) - bars = {collateral: sum(nodes) for collateral, nodes in sorted(data.items(), key=lambda x: x[0])} + bars = { + collateral: sum(nodes) + for collateral, nodes in sorted(data.items(), key=lambda x: x[0]) + } line = ax2.plot(x_keys, [bars.get(float(x), 0) for x in x_keys]) ax2.set_ylim(top=(ax2.get_ylim()[1] * 1.1)) - ax2.tick_params(axis='y', colors=line[0].get_color()) - ax2.get_yaxis().set_major_formatter(FuncFormatter(lambda y, _: f"{int(y / 10 ** 3)}k")) + ax2.tick_params(axis="y", colors=line[0].get_color()) + ax2.get_yaxis().set_major_formatter( + FuncFormatter(lambda y, _: f"{int(y / 10**3)}k") + ) fig.tight_layout() ax.legend(rects, ["Node Operators"], loc="upper left") ax2.legend(line, ["Effective Staked RPL"], loc="upper right") - fig.savefig(img, format='png') + fig.savefig(img, format="png") img.seek(0) fig.clear() @@ -281,8 +319,10 @@ async def collateral_distribution(self, e.title = "Average Collateral Distribution" e.set_image(url="attachment://graph.png") f = File(img, filename="graph.png") - percentile_strings = [f"{x[0]}th percentile: {int(x[1])}% collateral" for x in - get_percentiles([50, 75, 90, 99], counts)] + percentile_strings = [ + f"{x[0]}th percentile: {int(x[1])}% collateral" + for x in get_percentiles([50, 75, 90, 99], counts) + ] e.description = f"Total Effective Staked RPL: {sum(bars.values()):,}" e.set_footer(text="\n".join(percentile_strings)) await interaction.followup.send(embed=e, files=[f]) diff --git a/rocketwatch/plugins/commissions/commissions.py b/rocketwatch/plugins/commissions/commissions.py index 198bb2a6..b62273d0 100644 --- a/rocketwatch/plugins/commissions/commissions.py +++ b/rocketwatch/plugins/commissions/commissions.py @@ -26,9 +26,11 @@ async def commission_history(self, interaction: Interaction): """ await interaction.response.defer(ephemeral=is_hidden(interaction)) - e = Embed(title='Commission History') + e = Embed(title="Commission History") - minipools = await self.bot.db.minipools.find().sort("validator_index", 1).to_list(None) + minipools = ( + await self.bot.db.minipools.find().sort("validator_index", 1).to_list(None) + ) # create dot chart of minipools # x-axis: validator # y-axis: node_fee @@ -69,7 +71,9 @@ async def commission_history(self, interaction: Interaction): e.add_field(name="Bar Width", value=f"{step_size} minipools") # send data - await interaction.followup.send(content="", embed=e, files=[File(img, filename="chart.png")]) + await interaction.followup.send( + content="", embed=e, files=[File(img, filename="chart.png")] + ) img.close() diff --git a/rocketwatch/plugins/cow_orders/cow_orders.py b/rocketwatch/plugins/cow_orders/cow_orders.py index 64dffd5e..7a285090 100644 --- a/rocketwatch/plugins/cow_orders/cow_orders.py +++ b/rocketwatch/plugins/cow_orders/cow_orders.py @@ -50,14 +50,16 @@ async def _ensure_tokens(self): if self.tokens is None: self.tokens = [ str(await rp.get_address_by_name("rocketTokenRPL")).lower(), - str(await rp.get_address_by_name("rocketTokenRETH")).lower() + str(await rp.get_address_by_name("rocketTokenRETH")).lower(), ] async def _get_new_events(self) -> list[Event]: await self._ensure_tokens() await self._setup_collection() if self.state == "RUNNING": - log.error("Cow Orders plugin was interrupted while running. Re-initializing...") + log.error( + "Cow Orders plugin was interrupted while running. Re-initializing..." + ) self.__init__(self.bot) self.state = "RUNNING" try: @@ -76,7 +78,10 @@ async def check_for_new_events(self): # get all pending orders from the cow api (https://api.cow.fi/mainnet/api/v1/auction) - async with aiohttp.ClientSession() as session, session.get("https://api.cow.fi/mainnet/api/v1/auction") as response: + async with ( + aiohttp.ClientSession() as session, + session.get("https://api.cow.fi/mainnet/api/v1/auction") as response, + ): if response.status != 200: text = await response.text() log.error("Cow API returned non-200 status code: %s", text) @@ -125,7 +130,11 @@ async def check_for_new_events(self): """ # filter all orders that do not contain RPL - cow_orders = [order for order in cow_orders if order["sellToken"] in self.tokens or order["buyToken"] in self.tokens] + cow_orders = [ + order + for order in cow_orders + if order["sellToken"] in self.tokens or order["buyToken"] in self.tokens + ] # filter all orders that are not open cow_orders = [order for order in cow_orders if order["executed"] == "0"] @@ -136,7 +145,9 @@ async def check_for_new_events(self): existing_order_uids = [order["order_uid"] async for order in existing_orders] # filter all orders that are already in the database - cow_orders = [order for order in cow_orders if order["uid"] not in existing_order_uids] + cow_orders = [ + order for order in cow_orders if order["uid"] not in existing_order_uids + ] if not cow_orders: return [] @@ -162,48 +173,72 @@ async def check_for_new_events(self): data["ratio"] = int(order["sellAmount"]) / int(order["buyAmount"]) # store rpl and other token amount data["ourAmount"] = solidity.to_float(int(order["sellAmount"])) - s = await rp.assemble_contract(name="ERC20", address=w3.to_checksum_address(order["buyToken"])) + s = await rp.assemble_contract( + name="ERC20", address=w3.to_checksum_address(order["buyToken"]) + ) with contextlib.suppress(Exception): decimals = await s.functions.decimals().call() - data["otherAmount"] = solidity.to_float(int(order["buyAmount"]), decimals) + data["otherAmount"] = solidity.to_float( + int(order["buyAmount"]), decimals + ) else: token = "reth" if order["buyToken"] == self.tokens[1] else "rpl" data["event_name"] = f"cow_order_buy_{token}_found" # store rpl and other token amount data["ourAmount"] = solidity.to_float(int(order["buyAmount"])) - s = await rp.assemble_contract(name="ERC20", address=w3.to_checksum_address(order["sellToken"])) + s = await rp.assemble_contract( + name="ERC20", address=w3.to_checksum_address(order["sellToken"]) + ) with contextlib.suppress(Exception): decimals = await s.functions.decimals().call() - data["otherAmount"] = solidity.to_float(int(order["sellAmount"]), decimals) + data["otherAmount"] = solidity.to_float( + int(order["sellAmount"]), decimals + ) # our/other ratio data["ratioAmount"] = data["otherAmount"] / data["ourAmount"] try: data["otherToken"] = await s.functions.symbol().call() except Exception: data["otherToken"] = "UNKWN" - if s.address == w3.to_checksum_address("0xeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee"): + if s.address == w3.to_checksum_address( + "0xeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee" + ): data["otherToken"] = "ETH" data["deadline"] = int(order["validTo"]) # if the rpl value in usd is less than 25k, ignore it - if data["ourAmount"] * (rpl_price if token == "rpl" else reth_price) < 25000: + if ( + data["ourAmount"] * (rpl_price if token == "rpl" else reth_price) + < 25000 + ): continue # request more data from the api try: - async with aiohttp.ClientSession() as session, session.get(f"https://cow-proxy.invis.workers.dev/mainnet/api/v1/orders/{order['uid']}") as t: + async with ( + aiohttp.ClientSession() as session, + session.get( + f"https://cow-proxy.invis.workers.dev/mainnet/api/v1/orders/{order['uid']}" + ) as t, + ): if t.status != 200: - log.error(f"Failed to get more data from the cow api for order {order['uid']}: {await t.text()}") + log.error( + f"Failed to get more data from the cow api for order {order['uid']}: {await t.text()}" + ) continue extra = await t.json() except Exception as e: - log.error(f"Failed to get more data from the cow api for order {order['uid']}: {e}") + log.error( + f"Failed to get more data from the cow api for order {order['uid']}: {e}" + ) continue if extra: if extra["invalidated"]: log.info(f"Order {order['uid']} is invalidated, skipping") continue - created = datetime.fromisoformat(extra["creationDate"].replace("Z", "+00:00")) + created = datetime.fromisoformat( + extra["creationDate"].replace("Z", "+00:00") + ) if datetime.now(UTC) - created > timedelta(minutes=15): log.info(f"Order {order['uid']} is older than 15 minutes, skipping") continue @@ -211,19 +246,23 @@ async def check_for_new_events(self): data = await prepare_args(data) embed = await assemble(data) - payload.append(Event( - embed=embed, - topic="cow_orders", - block_number=self._pending_block, - event_name=data["event_name"], - unique_id=f"cow_order_found_{order['uid']}" - )) + payload.append( + Event( + embed=embed, + topic="cow_orders", + block_number=self._pending_block, + event_name=data["event_name"], + unique_id=f"cow_order_found_{order['uid']}", + ) + ) # don't emit if the db collection is empty - this is to prevent the bot from spamming the channel with stale data if not await self.collection.count_documents({}): payload = [] # insert all new orders into the database - await self.collection.insert_many([{"order_uid": order["uid"]} for order in cow_orders]) + await self.collection.insert_many( + [{"order_uid": order["uid"]} for order in cow_orders] + ) log.debug("Finished Checking Cow Orders") return payload diff --git a/rocketwatch/plugins/dao/dao.py b/rocketwatch/plugins/dao/dao.py index eff482e2..0181a0cd 100644 --- a/rocketwatch/plugins/dao/dao.py +++ b/rocketwatch/plugins/dao/dao.py @@ -36,30 +36,53 @@ async def get_dao_votes_embed(dao: DefaultDAO, full: bool) -> Embed: for state, ids in (await dao.get_proposal_ids_by_state()).items(): if state in current_proposals: - current_proposals[state].extend([await dao.fetch_proposal(pid) for pid in ids]) + current_proposals[state].extend( + [await dao.fetch_proposal(pid) for pid in ids] + ) parts = [] for proposal in current_proposals[dao.ProposalState.Pending]: - body = await dao.build_proposal_body(proposal, include_proposer=full, include_votes=False, include_payload=full) + body = await dao.build_proposal_body( + proposal, + include_proposer=full, + include_votes=False, + include_payload=full, + ) parts.append( f"**Proposal #{proposal.id}** - Pending\n```{body}```" - f"Voting starts , ends .") + f"Voting starts , ends ." + ) for proposal in current_proposals[dao.ProposalState.Active]: - body = await dao.build_proposal_body(proposal, include_proposer=full, include_votes=True, include_payload=full) - parts.append(f"**Proposal #{proposal.id}** - Active\n```{body}```Voting ends .") + body = await dao.build_proposal_body( + proposal, + include_proposer=full, + include_votes=True, + include_payload=full, + ) + parts.append( + f"**Proposal #{proposal.id}** - Active\n```{body}```Voting ends ." + ) for proposal in current_proposals[dao.ProposalState.Succeeded]: - body = await dao.build_proposal_body(proposal, include_proposer=full, include_votes=full, include_payload=full) + body = await dao.build_proposal_body( + proposal, + include_proposer=full, + include_votes=full, + include_payload=full, + ) parts.append( - f"**Proposal #{proposal.id}** - Succeeded (Not Yet Executed)\n```{body}```Expires .") + f"**Proposal #{proposal.id}** - Succeeded (Not Yet Executed)\n```{body}```Expires ." + ) return Embed( title=f"{dao.display_name} Proposals", - description="\n\n".join(parts) or "No active proposals." + description="\n\n".join(parts) or "No active proposals.", ) @staticmethod async def get_pdao_votes_embed(dao: ProtocolDAO, full: bool) -> Embed: - current_proposals: dict[ProtocolDAO.ProposalState, list[ProtocolDAO.Proposal]] = { + current_proposals: dict[ + ProtocolDAO.ProposalState, list[ProtocolDAO.Proposal] + ] = { dao.ProposalState.Pending: [], dao.ProposalState.ActivePhase1: [], dao.ProposalState.ActivePhase2: [], @@ -68,41 +91,67 @@ async def get_pdao_votes_embed(dao: ProtocolDAO, full: bool) -> Embed: for state, ids in (await dao.get_proposal_ids_by_state()).items(): if state in current_proposals: - current_proposals[state].extend([await dao.fetch_proposal(pid) for pid in ids]) + current_proposals[state].extend( + [await dao.fetch_proposal(pid) for pid in ids] + ) parts = [] for proposal in current_proposals[dao.ProposalState.Pending]: - body = await dao.build_proposal_body(proposal, include_proposer=full, include_votes=False, include_payload=full) + body = await dao.build_proposal_body( + proposal, + include_proposer=full, + include_votes=False, + include_payload=full, + ) parts.append( f"**Proposal #{proposal.id}** - Pending\n```{body}```" - f"Voting starts , ends .") + f"Voting starts , ends ." + ) for proposal in current_proposals[dao.ProposalState.ActivePhase1]: - body = await dao.build_proposal_body(proposal, include_proposer=full, include_votes=True, include_payload=full) + body = await dao.build_proposal_body( + proposal, + include_proposer=full, + include_votes=True, + include_payload=full, + ) parts.append( f"**Proposal #{proposal.id}** - Active (Phase 1)\n```{body}```" - f"Next phase , voting ends .") + f"Next phase , voting ends ." + ) for proposal in current_proposals[dao.ProposalState.ActivePhase2]: - body = await dao.build_proposal_body(proposal, include_proposer=full, include_votes=True, include_payload=full) + body = await dao.build_proposal_body( + proposal, + include_proposer=full, + include_votes=True, + include_payload=full, + ) parts.append( - f"**Proposal #{proposal.id}** - Active (Phase 2)\n```{body}```Voting ends .") + f"**Proposal #{proposal.id}** - Active (Phase 2)\n```{body}```Voting ends ." + ) for proposal in current_proposals[dao.ProposalState.Succeeded]: - body = await dao.build_proposal_body(proposal, include_proposer=full, include_votes=full, include_payload=full) + body = await dao.build_proposal_body( + proposal, + include_proposer=full, + include_votes=full, + include_payload=full, + ) parts.append( - f"**Proposal #{proposal.id}** - Succeeded (Not Yet Executed)\n```{body}```Expires .") + f"**Proposal #{proposal.id}** - Succeeded (Not Yet Executed)\n```{body}```Expires ." + ) return Embed( title="pDAO Proposals", - description="\n\n".join(parts) or "No active proposals." + description="\n\n".join(parts) or "No active proposals.", ) @command() @describe(dao_name="DAO to show proposals for") @describe(full="show all information (e.g. payload)") async def dao_votes( - self, - interaction: Interaction, - dao_name: Literal["oDAO", "pDAO", "Security Council"] = "pDAO", - full: bool = False + self, + interaction: Interaction, + dao_name: Literal["oDAO", "pDAO", "Security Council"] = "pDAO", + full: bool = False, ) -> None: """Show currently active on-chain proposals""" await interaction.response.defer(ephemeral=is_hidden(interaction)) @@ -140,7 +189,9 @@ async def _ensure_voter_list(self): return self._voter_list = await self._get_voter_list(self.proposal) - async def _get_voter_list(self, proposal: ProtocolDAO.Proposal) -> list['OnchainDAO.Vote']: + async def _get_voter_list( + self, proposal: ProtocolDAO.Proposal + ) -> list["OnchainDAO.Vote"]: voters: dict[ChecksumAddress, OnchainDAO.Vote] = {} dao = ProtocolDAO() proposal_contract = await dao._get_proposal_contract() @@ -149,13 +200,13 @@ async def _get_voter_list(self, proposal: ProtocolDAO.Proposal) -> list['Onchain proposal_contract.events.ProposalVoted, await ts_to_block(proposal.start) - 1, await ts_to_block(proposal.end_phase_2) + 1, - {"proposalID": proposal.id} + {"proposalID": proposal.id}, ): vote = OnchainDAO.Vote( vote_log.args.voter, vote_log.args.direction, solidity.to_float(vote_log.args.votingPower), - vote_log.args.time + vote_log.args.time, ) voters[vote.voter] = vote @@ -163,7 +214,7 @@ async def _get_voter_list(self, proposal: ProtocolDAO.Proposal) -> list['Onchain proposal_contract.events.ProposalVoteOverridden, await ts_to_block(proposal.end_phase_1) - 1, await ts_to_block(proposal.end_phase_2) + 1, - {"proposalID": proposal.id} + {"proposalID": proposal.id}, ): voting_power = solidity.to_float(override_log.args.votingPower) voters[override_log.args.delegate].voting_power -= voting_power @@ -178,8 +229,14 @@ async def _load_content(self, from_idx: int, to_idx: int) -> tuple[int, str]: await self._ensure_voter_list() headers = ["#", "Voter", "Choice", "Weight"] data = [] - for i, voter in enumerate(self._voter_list[from_idx:(to_idx + 1)], start=from_idx): - name = (await el_explorer_url(voter.voter, prefix=-1)).split("[")[1].split("]")[0] + for i, voter in enumerate( + self._voter_list[from_idx : (to_idx + 1)], start=from_idx + ): + name = ( + (await el_explorer_url(voter.voter, prefix=-1)) + .split("[")[1] + .split("]")[0] + ) vote = ["", "Abstain", "For", "Against", "Veto"][voter.direction] voting_power = f"{voter.voting_power:,.2f}" data.append([i + 1, name, vote, voting_power]) @@ -190,7 +247,9 @@ async def _load_content(self, from_idx: int, to_idx: int) -> tuple[int, str]: table = tabulate(data, headers, colalign=("right", "left", "left", "right")) return len(self._voter_list), f"```{table}```" - async def _get_recent_proposals(self, interaction: Interaction, current: str) -> list[Choice[int]]: + async def _get_recent_proposals( + self, interaction: Interaction, current: str + ) -> list[Choice[int]]: dao = ProtocolDAO() proposal_contract = await dao._get_proposal_contract() num_proposals = await proposal_contract.functions.getTotal().call() @@ -204,10 +263,16 @@ async def _get_recent_proposals(self, interaction: Interaction, current: str) -> else: suggestions = list(range(1, num_proposals + 1))[:-26:-1] - titles: list[str] = await rp.multicall([ - proposal_contract.functions.getMessage(proposal_id) for proposal_id in suggestions - ]) - return [Choice(name=f"#{pid}: {title}", value=pid) for pid, title in zip(suggestions, titles, strict=False)] + titles: list[str] = await rp.multicall( + [ + proposal_contract.functions.getMessage(proposal_id) + for proposal_id in suggestions + ] + ) + return [ + Choice(name=f"#{pid}: {title}", value=pid) + for pid, title in zip(suggestions, titles, strict=False) + ] @command() @describe(proposal="proposal to show voters for") diff --git a/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py b/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py index 5490d050..871f329b 100644 --- a/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py +++ b/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py @@ -56,23 +56,23 @@ def safe_inv(num): def _parse_epoch(value): epoch = int(value) - return epoch if epoch < 2 ** 32 else None + return epoch if epoch < 2**32 else None def _derive_validator_status(info): - if info[9]: # dissolved + if info[9]: # dissolved return "dissolved" - if info[5]: # exited + if info[5]: # exited return "exited" - if info[6]: # inQueue + if info[6]: # inQueue return "in_queue" - if info[7]: # inPrestake + if info[7]: # inPrestake return "prestaked" if info[11]: # locked return "locked" if info[10]: # exiting return "exiting" - if info[4]: # staked + if info[4]: # staked return "staking" return "unknown" @@ -150,7 +150,8 @@ async def check_indexes(self): await self.bot.db.minipools.create_index("validator_index") await self.bot.db.minipools.create_index("beacon.status") await self.bot.db.megapool_validators.create_index( - [("megapool", pymongo.ASCENDING), ("validator_id", pymongo.ASCENDING)], unique=True + [("megapool", pymongo.ASCENDING), ("validator_id", pymongo.ASCENDING)], + unique=True, ) await self.bot.db.megapool_validators.create_index("pubkey") await self.bot.db.megapool_validators.create_index("validator_index") @@ -170,7 +171,11 @@ async def _batch_multicall_update( return total = len(items) - first_calls = await call_fn(items[0]) if asyncio.iscoroutinefunction(call_fn) else call_fn(items[0]) + first_calls = ( + await call_fn(items[0]) + if asyncio.iscoroutinefunction(call_fn) + else call_fn(items[0]) + ) batch_size = self.batch_size // len(first_calls) for i, batch in enumerate(as_chunks(items, batch_size)): if label: @@ -184,7 +189,9 @@ async def _batch_multicall_update( for t in await call_fn(item): expanded.append((item["address"], *t)) else: - expanded = [(item["address"], *t) for item in batch for t in call_fn(item)] + expanded = [ + (item["address"], *t) for item in batch for t in call_fn(item) + ] calls = [(e[1], e[2]) for e in expanded] results = await rp.multicall(calls) updates = defaultdict(dict) @@ -194,8 +201,11 @@ async def _batch_multicall_update( value = transform(value) updates[addr][field] = value await collection.bulk_write( - [UpdateOne({"address": addr}, {"$set": d}) for addr, d in updates.items()], - ordered=False + [ + UpdateOne({"address": addr}, {"$set": d}) + for addr, d in updates.items() + ], + ordered=False, ) # -- Node operator tasks -- @@ -205,14 +215,20 @@ async def add_untracked_node_operators(self): nm = await rp.get_contract_by_name("rocketNodeManager") latest_rp = await rp.call("rocketNodeManager.getNodeCount") - 1 latest_db = 0 - if res := await self.bot.db.node_operators.find_one(sort=[("_id", pymongo.DESCENDING)]): + if res := await self.bot.db.node_operators.find_one( + sort=[("_id", pymongo.DESCENDING)] + ): latest_db = res["_id"] if latest_db >= latest_rp: log.debug("No new nodes") return data = {} - for index_batch in as_chunks(range(latest_db + 1, latest_rp + 1), self.batch_size): - results = await rp.multicall([nm.functions.getNodeAt(i) for i in index_batch]) + for index_batch in as_chunks( + range(latest_db + 1, latest_rp + 1), self.batch_size + ): + results = await rp.multicall( + [nm.functions.getNodeAt(i) for i in index_batch] + ) data |= dict(zip(index_batch, results, strict=False)) await self.bot.db.node_operators.insert_many( [{"_id": i, "address": w3.to_checksum_address(a)} for i, a in data.items()] @@ -225,13 +241,31 @@ async def add_static_node_operator_data(self): def get_calls(n): return [ - (df.functions.getProxyAddress(n["address"]), True, w3.to_checksum_address, "fee_distributor.address"), - (mf.functions.getExpectedAddress(n["address"]), True, w3.to_checksum_address, "megapool.address"), + ( + df.functions.getProxyAddress(n["address"]), + True, + w3.to_checksum_address, + "fee_distributor.address", + ), + ( + mf.functions.getExpectedAddress(n["address"]), + True, + w3.to_checksum_address, + "megapool.address", + ), ] + await self._batch_multicall_update( self.bot.db.node_operators, - {"$or": [{"fee_distributor.address": {"$exists": False}}, {"megapool.address": {"$exists": False}}]}, - get_calls, {"address": 1}, label="node operators" + { + "$or": [ + {"fee_distributor.address": {"$exists": False}}, + {"megapool.address": {"$exists": False}}, + ] + }, + get_calls, + {"address": 1}, + label="node operators", ) @timerun_async @@ -245,56 +279,220 @@ async def update_dynamic_node_operator_data(self): def get_calls(n): return [ - (nm.functions.getNodeWithdrawalAddress(n["address"]), True, w3.to_checksum_address, "withdrawal_address"), - (nm.functions.getNodeTimezoneLocation(n["address"]), True, None, "timezone_location"), - (nm.functions.getSmoothingPoolRegistrationState(n["address"]), True, None, "smoothing_pool_registration"), - (nm.functions.getAverageNodeFee(n["address"]), True, safe_to_float, "average_node_fee"), - (ns.functions.getNodeETHCollateralisationRatio(n["address"]), True, safe_inv, "effective_node_share"), - (mm.functions.getNodeStakingMinipoolCount(n["address"]), True, None, "staking_minipool_count"), - (nd.functions.getNodeDepositCredit(n["address"]), True, safe_to_float, "node_credit"), - (nd.functions.getNodeEthBalance(n["address"]), True, safe_to_float, "node_eth_balance"), - (nm.functions.getFeeDistributorInitialised(n["address"]), True, None, "fee_distributor.initialized"), - (mc.functions.getEthBalance(n["fee_distributor"]["address"]), - True, safe_to_float, "fee_distributor.eth_balance"), - (mf.functions.getMegapoolDeployed(n["address"]), True, None, "megapool.deployed"), - (mc.functions.getEthBalance(n["megapool"]["address"]), True, safe_to_float, "megapool.eth_balance"), - (ns.functions.getNodeStakedRPL(n["address"]), True, safe_to_float, "rpl.total_stake"), - (ns.functions.getNodeLegacyStakedRPL(n["address"]), True, safe_to_float, "rpl.legacy_stake"), - (ns.functions.getNodeMegapoolStakedRPL(n["address"]), True, safe_to_float, "rpl.megapool_stake"), - (ns.functions.getNodeLockedRPL(n["address"]), True, safe_to_float, "rpl.locked"), - (ns.functions.getNodeUnstakingRPL(n["address"]), True, safe_to_float, "rpl.unstaking"), - (ns.functions.getNodeRPLStakedTime(n["address"]), True, None, "rpl.last_stake_time"), - (ns.functions.getNodeLastUnstakeTime(n["address"]), True, None, "rpl.last_unstake_time"), + ( + nm.functions.getNodeWithdrawalAddress(n["address"]), + True, + w3.to_checksum_address, + "withdrawal_address", + ), + ( + nm.functions.getNodeTimezoneLocation(n["address"]), + True, + None, + "timezone_location", + ), + ( + nm.functions.getSmoothingPoolRegistrationState(n["address"]), + True, + None, + "smoothing_pool_registration", + ), + ( + nm.functions.getAverageNodeFee(n["address"]), + True, + safe_to_float, + "average_node_fee", + ), + ( + ns.functions.getNodeETHCollateralisationRatio(n["address"]), + True, + safe_inv, + "effective_node_share", + ), + ( + mm.functions.getNodeStakingMinipoolCount(n["address"]), + True, + None, + "staking_minipool_count", + ), + ( + nd.functions.getNodeDepositCredit(n["address"]), + True, + safe_to_float, + "node_credit", + ), + ( + nd.functions.getNodeEthBalance(n["address"]), + True, + safe_to_float, + "node_eth_balance", + ), + ( + nm.functions.getFeeDistributorInitialised(n["address"]), + True, + None, + "fee_distributor.initialized", + ), + ( + mc.functions.getEthBalance(n["fee_distributor"]["address"]), + True, + safe_to_float, + "fee_distributor.eth_balance", + ), + ( + mf.functions.getMegapoolDeployed(n["address"]), + True, + None, + "megapool.deployed", + ), + ( + mc.functions.getEthBalance(n["megapool"]["address"]), + True, + safe_to_float, + "megapool.eth_balance", + ), + ( + ns.functions.getNodeStakedRPL(n["address"]), + True, + safe_to_float, + "rpl.total_stake", + ), + ( + ns.functions.getNodeLegacyStakedRPL(n["address"]), + True, + safe_to_float, + "rpl.legacy_stake", + ), + ( + ns.functions.getNodeMegapoolStakedRPL(n["address"]), + True, + safe_to_float, + "rpl.megapool_stake", + ), + ( + ns.functions.getNodeLockedRPL(n["address"]), + True, + safe_to_float, + "rpl.locked", + ), + ( + ns.functions.getNodeUnstakingRPL(n["address"]), + True, + safe_to_float, + "rpl.unstaking", + ), + ( + ns.functions.getNodeRPLStakedTime(n["address"]), + True, + None, + "rpl.last_stake_time", + ), + ( + ns.functions.getNodeLastUnstakeTime(n["address"]), + True, + None, + "rpl.last_unstake_time", + ), ] + await self._batch_multicall_update( - self.bot.db.node_operators, {}, get_calls, label="node operators", - projection={"address": 1, "fee_distributor.address": 1, "megapool.address": 1} + self.bot.db.node_operators, + {}, + get_calls, + label="node operators", + projection={ + "address": 1, + "fee_distributor.address": 1, + "megapool.address": 1, + }, ) @timerun_async async def update_dynamic_megapool_data(self): async def get_calls(n): - mp = await rp.assemble_contract("rocketMegapoolDelegate", address=n["megapool"]["address"]) - proxy = await rp.assemble_contract("rocketMegapoolProxy", address=n["megapool"]["address"]) + mp = await rp.assemble_contract( + "rocketMegapoolDelegate", address=n["megapool"]["address"] + ) + proxy = await rp.assemble_contract( + "rocketMegapoolProxy", address=n["megapool"]["address"] + ) return [ - (mp.functions.getValidatorCount(), True, None, "megapool.validator_count"), - (mp.functions.getActiveValidatorCount(), True, None, "megapool.active_validator_count"), - (mp.functions.getExitingValidatorCount(), True, None, "megapool.exiting_validator_count"), - (mp.functions.getLockedValidatorCount(), True, None, "megapool.locked_validator_count"), + ( + mp.functions.getValidatorCount(), + True, + None, + "megapool.validator_count", + ), + ( + mp.functions.getActiveValidatorCount(), + True, + None, + "megapool.active_validator_count", + ), + ( + mp.functions.getExitingValidatorCount(), + True, + None, + "megapool.exiting_validator_count", + ), + ( + mp.functions.getLockedValidatorCount(), + True, + None, + "megapool.locked_validator_count", + ), (mp.functions.getNodeBond(), True, safe_to_float, "megapool.node_bond"), - (mp.functions.getUserCapital(), True, safe_to_float, "megapool.user_capital"), + ( + mp.functions.getUserCapital(), + True, + safe_to_float, + "megapool.user_capital", + ), (mp.functions.getDebt(), True, safe_to_float, "megapool.debt"), - (mp.functions.getRefundValue(), True, safe_to_float, "megapool.refund_value"), - (mp.functions.getPendingRewards(), True, safe_to_float, "megapool.pending_rewards"), - (mp.functions.getLastDistributionTime(), True, None, "megapool.last_distribution_time"), - (proxy.functions.getDelegate(), True, w3.to_checksum_address, "megapool.delegate"), - (proxy.functions.getEffectiveDelegate(), True, w3.to_checksum_address, "megapool.effective_delegate"), - (proxy.functions.getUseLatestDelegate(), True, None, "megapool.use_latest_delegate"), + ( + mp.functions.getRefundValue(), + True, + safe_to_float, + "megapool.refund_value", + ), + ( + mp.functions.getPendingRewards(), + True, + safe_to_float, + "megapool.pending_rewards", + ), + ( + mp.functions.getLastDistributionTime(), + True, + None, + "megapool.last_distribution_time", + ), + ( + proxy.functions.getDelegate(), + True, + w3.to_checksum_address, + "megapool.delegate", + ), + ( + proxy.functions.getEffectiveDelegate(), + True, + w3.to_checksum_address, + "megapool.effective_delegate", + ), + ( + proxy.functions.getUseLatestDelegate(), + True, + None, + "megapool.use_latest_delegate", + ), ] + await self._batch_multicall_update( - self.bot.db.node_operators, {"megapool.deployed": True}, - get_calls, {"address": 1, "megapool.address": 1}, - label="megapools" + self.bot.db.node_operators, + {"megapool.deployed": True}, + get_calls, + {"address": 1, "megapool.address": 1}, + label="megapools", ) # -- Minipool tasks -- @@ -304,16 +502,27 @@ async def add_untracked_minipools(self): mm = await rp.get_contract_by_name("rocketMinipoolManager") latest_rp = await rp.call("rocketMinipoolManager.getMinipoolCount") - 1 latest_db = 0 - if res := await self.bot.db.minipools.find_one(sort=[("_id", pymongo.DESCENDING)]): + if res := await self.bot.db.minipools.find_one( + sort=[("_id", pymongo.DESCENDING)] + ): latest_db = res["_id"] if latest_db >= latest_rp: log.debug("No new minipools") return - log.debug(f"Latest minipool in db: {latest_db}, latest minipool in rp: {latest_rp}") - for index_batch in as_chunks(range(latest_db + 1, latest_rp + 1), self.batch_size): - results = await rp.multicall([mm.functions.getMinipoolAt(i) for i in index_batch]) + log.debug( + f"Latest minipool in db: {latest_db}, latest minipool in rp: {latest_rp}" + ) + for index_batch in as_chunks( + range(latest_db + 1, latest_rp + 1), self.batch_size + ): + results = await rp.multicall( + [mm.functions.getMinipoolAt(i) for i in index_batch] + ) await self.bot.db.minipools.insert_many( - [{"_id": i, "address": w3.to_checksum_address(a)} for i, a in zip(index_batch, results, strict=False)] + [ + {"_id": i, "address": w3.to_checksum_address(a)} + for i, a in zip(index_batch, results, strict=False) + ] ) @timerun_async @@ -323,23 +532,41 @@ async def add_static_minipool_data(self): async def lamb(n): return [ ( - (await rp.assemble_contract("rocketMinipool", address=n["address"])) - .functions.getNodeAddress(), True, w3.to_checksum_address, "node_operator" + ( + await rp.assemble_contract( + "rocketMinipool", address=n["address"] + ) + ).functions.getNodeAddress(), + True, + w3.to_checksum_address, + "node_operator", + ), + ( + mm.functions.getMinipoolPubkey(n["address"]), + True, + safe_to_hex, + "pubkey", ), - (mm.functions.getMinipoolPubkey(n["address"]), True, safe_to_hex, "pubkey"), ] + await self._batch_multicall_update( self.bot.db.minipools, {"node_operator": {"$exists": False}}, - lamb, {"address": 1}, label="minipools" + lamb, + {"address": 1}, + label="minipools", ) @timerun async def add_static_minipool_deposit_data(self): - minipools = await self.bot.db.minipools.find( - {"deposit_amount": {"$exists": False}, "status": "initialised"}, - {"address": 1, "_id": 0, "status_time": 1} - ).sort("status_time", pymongo.ASCENDING).to_list() + minipools = ( + await self.bot.db.minipools.find( + {"deposit_amount": {"$exists": False}, "status": "initialised"}, + {"address": 1, "_id": 0, "status_time": 1}, + ) + .sort("status_time", pymongo.ASCENDING) + .to_list() + ) if not minipools: return nd = await rp.get_contract_by_name("rocketNodeDeposit") @@ -351,9 +578,13 @@ async def add_static_minipool_deposit_data(self): log.debug(f"Processing deposit data for blocks {block_start}..{block_end}") addresses = {m["address"] for m in minipool_batch} - events = get_logs(nd.events.DepositReceived, block_start, block_end) \ - + get_logs(mm.events.MinipoolCreated, block_start, block_end) - events.sort(key=lambda e: (e['blockNumber'], e['transactionIndex'], e['logIndex']), reverse=True) + events = get_logs( + nd.events.DepositReceived, block_start, block_end + ) + get_logs(mm.events.MinipoolCreated, block_start, block_end) + events.sort( + key=lambda e: (e["blockNumber"], e["transactionIndex"], e["logIndex"]), + reverse=True, + ) # pair DepositReceived + MinipoolCreated events from same transaction pairs = [] @@ -364,7 +595,9 @@ async def add_static_minipool_deposit_data(self): pairs.append([e]) else: pairs[-1] = [e] - log.info(f"replacing creation event with newly found one ({pairs[-1]})") + log.info( + f"replacing creation event with newly found one ({pairs[-1]})" + ) elif e["event"] == "DepositReceived" and last_is_creation: pairs[-1].insert(0, e) last_is_creation = e["event"] == "MinipoolCreated" @@ -376,13 +609,15 @@ async def add_static_minipool_deposit_data(self): assert pair[0]["transactionHash"] == pair[1]["transactionHash"] mp = str(pair[1]["args"]["minipool"]).lower() if mp in addresses: - data[mp] = {"deposit_amount": solidity.to_float(pair[0]["args"]["amount"])} + data[mp] = { + "deposit_amount": solidity.to_float(pair[0]["args"]["amount"]) + } if not data: continue await self.bot.db.minipools.bulk_write( [UpdateOne({"address": addr}, {"$set": d}) for addr, d in data.items()], - ordered=False + ordered=False, ) @timerun_async @@ -390,25 +625,97 @@ async def update_dynamic_minipool_data(self): mc = await rp.get_contract_by_name("multicall3") async def get_calls(n): - minipool_contract = await rp.assemble_contract("rocketMinipool", address=n["address"]) + minipool_contract = await rp.assemble_contract( + "rocketMinipool", address=n["address"] + ) return [ - (minipool_contract.functions.getStatus(), True, safe_state_to_str, "status"), - (minipool_contract.functions.getStatusTime(), True, None, "status_time"), + ( + minipool_contract.functions.getStatus(), + True, + safe_state_to_str, + "status", + ), + ( + minipool_contract.functions.getStatusTime(), + True, + None, + "status_time", + ), (minipool_contract.functions.getVacant(), False, is_true, "vacant"), - (minipool_contract.functions.getFinalised(), True, is_true, "finalized"), - (minipool_contract.functions.getNodeDepositBalance(), True, safe_to_float, "node_deposit_balance"), - (minipool_contract.functions.getNodeRefundBalance(), True, safe_to_float, "node_refund_balance"), - (minipool_contract.functions.getPreMigrationBalance(), False, safe_to_float, "pre_migration_balance"), - (minipool_contract.functions.getNodeFee(), True, safe_to_float, "node_fee"), - (minipool_contract.functions.getDelegate(), True, w3.to_checksum_address, "delegate"), - (minipool_contract.functions.getPreviousDelegate(), False, w3.to_checksum_address, "previous_delegate"), - (minipool_contract.functions.getEffectiveDelegate(), True, w3.to_checksum_address, "effective_delegate"), - (minipool_contract.functions.getUseLatestDelegate(), True, is_true, "use_latest_delegate"), - (minipool_contract.functions.getUserDistributed(), False, is_true, "user_distributed"), - (mc.functions.getEthBalance(n["address"]), True, safe_to_float, "execution_balance"), + ( + minipool_contract.functions.getFinalised(), + True, + is_true, + "finalized", + ), + ( + minipool_contract.functions.getNodeDepositBalance(), + True, + safe_to_float, + "node_deposit_balance", + ), + ( + minipool_contract.functions.getNodeRefundBalance(), + True, + safe_to_float, + "node_refund_balance", + ), + ( + minipool_contract.functions.getPreMigrationBalance(), + False, + safe_to_float, + "pre_migration_balance", + ), + ( + minipool_contract.functions.getNodeFee(), + True, + safe_to_float, + "node_fee", + ), + ( + minipool_contract.functions.getDelegate(), + True, + w3.to_checksum_address, + "delegate", + ), + ( + minipool_contract.functions.getPreviousDelegate(), + False, + w3.to_checksum_address, + "previous_delegate", + ), + ( + minipool_contract.functions.getEffectiveDelegate(), + True, + w3.to_checksum_address, + "effective_delegate", + ), + ( + minipool_contract.functions.getUseLatestDelegate(), + True, + is_true, + "use_latest_delegate", + ), + ( + minipool_contract.functions.getUserDistributed(), + False, + is_true, + "user_distributed", + ), + ( + mc.functions.getEthBalance(n["address"]), + True, + safe_to_float, + "execution_balance", + ), ] + await self._batch_multicall_update( - self.bot.db.minipools, {"finalized": {"$ne": True}}, get_calls, {"address": 1}, label="minipools" + self.bot.db.minipools, + {"finalized": {"$ne": True}}, + get_calls, + {"address": 1}, + label="minipools", ) @timerun @@ -421,8 +728,12 @@ async def update_dynamic_minipool_beacon_data(self): for i, pubkey_batch in enumerate(as_chunks(pubkeys, self.batch_size)): start = i * self.batch_size + 1 end = min((i + 1) * self.batch_size, total) - log.info(f"Updating beacon chain data for minipools [{start}, {end}]/{total}") - beacon_data = (await bacon.get_validators_by_ids("head", ids=pubkey_batch))["data"] + log.info( + f"Updating beacon chain data for minipools [{start}, {end}]/{total}" + ) + beacon_data = (await bacon.get_validators_by_ids("head", ids=pubkey_batch))[ + "data" + ] data = {} for d in beacon_data: v = d["validator"] @@ -431,9 +742,13 @@ async def update_dynamic_minipool_beacon_data(self): "beacon": { "status": d["status"], "balance": solidity.to_float(d["balance"], 9), - "effective_balance": solidity.to_float(v["effective_balance"], 9), + "effective_balance": solidity.to_float( + v["effective_balance"], 9 + ), "slashed": v["slashed"], - "activation_eligibility_epoch": _parse_epoch(v["activation_eligibility_epoch"]), + "activation_eligibility_epoch": _parse_epoch( + v["activation_eligibility_epoch"] + ), "activation_epoch": _parse_epoch(v["activation_epoch"]), "exit_epoch": _parse_epoch(v["exit_epoch"]), "withdrawable_epoch": _parse_epoch(v["withdrawable_epoch"]), @@ -442,7 +757,7 @@ async def update_dynamic_minipool_beacon_data(self): if data: await self.bot.db.minipools.bulk_write( [UpdateMany({"pubkey": pk}, {"$set": d}) for pk, d in data.items()], - ordered=False + ordered=False, ) # -- Megapool validator tasks -- @@ -452,7 +767,7 @@ async def add_untracked_megapool_validators(self): # get deployed megapools with their on-chain validator count nodes = await self.bot.db.node_operators.find( {"megapool.deployed": True, "megapool.validator_count": {"$gt": 0}}, - {"address": 1, "megapool.address": 1, "megapool.validator_count": 1} + {"address": 1, "megapool.address": 1, "megapool.validator_count": 1}, ).to_list() if not nodes: return @@ -460,14 +775,20 @@ async def add_untracked_megapool_validators(self): for node in nodes: megapool_addr = node["megapool"]["address"] on_chain_count = node["megapool"]["validator_count"] - db_count = await self.bot.db.megapool_validators.count_documents({"megapool": megapool_addr}) + db_count = await self.bot.db.megapool_validators.count_documents( + {"megapool": megapool_addr} + ) if db_count >= on_chain_count: continue new_ids = list(range(db_count, on_chain_count)) - log.debug(f"Adding {len(new_ids)} new validators for megapool {megapool_addr}") + log.debug( + f"Adding {len(new_ids)} new validators for megapool {megapool_addr}" + ) - megapool_contract = await rp.assemble_contract("rocketMegapoolDelegate", address=megapool_addr) + megapool_contract = await rp.assemble_contract( + "rocketMegapoolDelegate", address=megapool_addr + ) for id_batch in as_chunks(new_ids, self.batch_size // 2): fns = [ fn @@ -487,20 +808,24 @@ async def add_untracked_megapool_validators(self): "megapool": megapool_addr, "node_operator": node["address"], "validator_id": vid, - "pubkey": safe_to_hex(pubkey_raw) if pubkey_raw is not None else None, + "pubkey": safe_to_hex(pubkey_raw) + if pubkey_raw is not None + else None, } info = _unpack_validator_info(info_raw) if info: doc.update(info) docs.append(doc) if docs: - await self.bot.db.megapool_validators.insert_many(docs, ordered=False) + await self.bot.db.megapool_validators.insert_many( + docs, ordered=False + ) @timerun_async async def update_dynamic_megapool_validator_data(self): validators = await self.bot.db.megapool_validators.find( {"status": {"$nin": ["exited", "dissolved"]}}, - {"megapool": 1, "validator_id": 1} + {"megapool": 1, "validator_id": 1}, ).to_list() if not validators: return @@ -511,14 +836,21 @@ async def update_dynamic_megapool_validator_data(self): end = min((i + 1) * self.batch_size, total) log.debug(f"Processing megapool validators [{start}, {end}]/{total}") fns = [ - (await rp.assemble_contract("rocketMegapoolDelegate", address=v["megapool"])) - .functions.getValidatorInfo(v["validator_id"]) + ( + await rp.assemble_contract( + "rocketMegapoolDelegate", address=v["megapool"] + ) + ).functions.getValidatorInfo(v["validator_id"]) for v in batch ] results = await rp.multicall(fns) ops = [] for v, info_raw in zip(batch, results, strict=False): - info = _unpack_validator_info_dynamic(info_raw) if info_raw is not None else None + info = ( + _unpack_validator_info_dynamic(info_raw) + if info_raw is not None + else None + ) if info is not None: ops.append(UpdateOne({"_id": v["_id"]}, {"$set": info})) if ops: @@ -536,8 +868,12 @@ async def update_dynamic_megapool_validator_beacon_data(self): for i, pubkey_batch in enumerate(as_chunks(pubkeys, self.batch_size)): start = i * self.batch_size + 1 end = min((i + 1) * self.batch_size, total) - log.debug(f"Updating beacon data for megapool validators [{start}, {end}]/{total}") - beacon_data = (await bacon.get_validators_by_ids("head", ids=pubkey_batch))["data"] + log.debug( + f"Updating beacon data for megapool validators [{start}, {end}]/{total}" + ) + beacon_data = (await bacon.get_validators_by_ids("head", ids=pubkey_batch))[ + "data" + ] data = {} for d in beacon_data: v = d["validator"] @@ -546,9 +882,13 @@ async def update_dynamic_megapool_validator_beacon_data(self): "beacon": { "status": d["status"], "balance": solidity.to_float(d["balance"], 9), - "effective_balance": solidity.to_float(v["effective_balance"], 9), + "effective_balance": solidity.to_float( + v["effective_balance"], 9 + ), "slashed": v["slashed"], - "activation_eligibility_epoch": _parse_epoch(v["activation_eligibility_epoch"]), + "activation_eligibility_epoch": _parse_epoch( + v["activation_eligibility_epoch"] + ), "activation_epoch": _parse_epoch(v["activation_epoch"]), "exit_epoch": _parse_epoch(v["exit_epoch"]), "withdrawable_epoch": _parse_epoch(v["withdrawable_epoch"]), @@ -557,7 +897,7 @@ async def update_dynamic_megapool_validator_beacon_data(self): if data: await self.bot.db.megapool_validators.bulk_write( [UpdateMany({"pubkey": pk}, {"$set": d}) for pk, d in data.items()], - ordered=False + ordered=False, ) diff --git a/rocketwatch/plugins/debug/debug.py b/rocketwatch/plugins/debug/debug.py index fb2b4d18..e5d6088a 100644 --- a/rocketwatch/plugins/debug/debug.py +++ b/rocketwatch/plugins/debug/debug.py @@ -34,7 +34,9 @@ async def raise_exception(self, interaction: Interaction): @command() @guilds(cfg.discord.owner.server_id) @is_owner() - async def get_members_of_role(self, interaction: Interaction, guild_id: str, role_id: str): + async def get_members_of_role( + self, interaction: Interaction, guild_id: str, role_id: str + ): """Get members of a role""" await interaction.response.defer(ephemeral=True) try: @@ -43,9 +45,15 @@ async def get_members_of_role(self, interaction: Interaction, guild_id: str, rol role = guild.get_role(int(role_id)) log.debug(role) # print name + identifier and id of each member - members = [f"{member.name}#{member.discriminator}, ({member.id})" for member in role.members] + members = [ + f"{member.name}#{member.discriminator}, ({member.id})" + for member in role.members + ] # generate a file with a header that mentions what role and guild the members are from - content = f"Members of {role.name} ({role.id}) in {guild.name} ({guild.id})\n\n" + "\n".join(members) + content = ( + f"Members of {role.name} ({role.id}) in {guild.name} ({guild.id})\n\n" + + "\n".join(members) + ) file = File(io.StringIO(content), "members.txt") await interaction.followup.send(file=file) except Exception as err: @@ -87,7 +95,9 @@ async def delete_msg(self, interaction: Interaction, message_url: str): @command() @guilds(cfg.discord.owner.server_id) @is_owner() - async def edit_embed(self, interaction: Interaction, message_url: str, new_description: str): + async def edit_embed( + self, interaction: Interaction, message_url: str, new_description: str + ): await interaction.response.defer(ephemeral=True) channel_id, message_id = message_url.split("/")[-2:] channel = await self.bot.get_or_fetch_channel(int(channel_id)) @@ -107,7 +117,9 @@ async def debug_transaction(self, interaction: Interaction, tnx_hash: str): await interaction.response.defer(ephemeral=True) transaction_receipt = await w3.eth.get_transaction(tnx_hash) if revert_reason := await rp.get_revert_reason(transaction_receipt): - await interaction.followup.send(content=f"```Revert reason: {revert_reason}```") + await interaction.followup.send( + content=f"```Revert reason: {revert_reason}```" + ) else: await interaction.followup.send(content="```No revert reason Available```") @@ -120,7 +132,9 @@ async def purge_minipools(self, interaction: Interaction, confirm: bool = False) """ await interaction.response.defer(ephemeral=True) if not confirm: - await interaction.followup.send("Not running. Set `confirm` to `true` to run.") + await interaction.followup.send( + "Not running. Set `confirm` to `true` to run." + ) return await self.bot.db.minipools.drop() await interaction.followup.send(content="Done") @@ -158,14 +172,19 @@ async def announce(self, interaction: Interaction, channel: str, message: str): await interaction.response.defer(ephemeral=True) channel = await self.bot.get_or_fetch_channel(int(channel)) e = Embed(title="Announcement", description=message) - e.add_field(name="Timestamp", value=f" ()") + e.add_field( + name="Timestamp", + value=f" ()", + ) await channel.send(embed=e) await interaction.followup.send(content="Done") @command() @guilds(cfg.discord.owner.server_id) @is_owner() - async def restore_support_template(self, interaction: Interaction, template_name: str, message_url: str): + async def restore_support_template( + self, interaction: Interaction, template_name: str, message_url: str + ): await interaction.response.defer(ephemeral=True) channel_id, message_id = message_url.split("/")[-2:] channel = await self.bot.get_or_fetch_channel(int(channel_id)) @@ -179,7 +198,9 @@ async def restore_support_template(self, interaction: Interaction, template_name from datetime import datetime edit_line = template_embed.description.splitlines()[-1] - match = re.search(r"Last Edited by <@(?P[0-9]+)> [0-9]+):R>", edit_line) + match = re.search( + r"Last Edited by <@(?P[0-9]+)> [0-9]+):R>", edit_line + ) user_id = int(match.group("user")) ts = int(match.group("ts")) @@ -187,21 +208,19 @@ async def restore_support_template(self, interaction: Interaction, template_name await self.bot.db.support_bot_dumps.insert_one( { - "ts" : datetime.fromtimestamp(ts, tz=UTC), + "ts": datetime.fromtimestamp(ts, tz=UTC), "template": template_name, - "prev" : None, - "new" : { - "title" : template_title, - "description": template_description - }, - "author" : { - "id" : user.id, - "name": user.name - } + "prev": None, + "new": {"title": template_title, "description": template_description}, + "author": {"id": user.id, "name": user.name}, } ) await self.bot.db.support_bot.insert_one( - {"_id": template_name, "title": template_title, "description": template_description} + { + "_id": template_name, + "title": template_title, + "description": template_description, + } ) await interaction.followup.send(content="Done") @@ -221,26 +240,38 @@ async def restore_missed_events(self, interaction: Interaction, tx_hash: str): filtered_events = [] for event_log in (await w3.eth.get_transaction_receipt(tx_hash)).logs: - if ("topics" in event_log) and (event_log["topics"][0].hex() in events_plugin.topic_map): + if ("topics" in event_log) and ( + event_log["topics"][0].hex() in events_plugin.topic_map + ): filtered_events.append(event_log) channels = cfg.discord.channels events, _ = events_plugin.process_events(filtered_events) for event in events: - channel_candidates = [value for key, value in channels.items() if event.event_name.startswith(key)] - channel_id = channel_candidates[0] if channel_candidates else channels["default"] - await self.bot.db.event_queue.insert_one({ - "_id": event.unique_id, - "embed": pickle.dumps(event.embed), - "topic": event.topic, - "event_name": event.event_name, - "block_number": event.block_number, - "score": event.get_score(), - "time_seen": datetime.now(), - "attachment": pickle.dumps(event.attachment) if event.attachment else None, - "channel_id": channel_id, - "message_id": None - }) + channel_candidates = [ + value + for key, value in channels.items() + if event.event_name.startswith(key) + ] + channel_id = ( + channel_candidates[0] if channel_candidates else channels["default"] + ) + await self.bot.db.event_queue.insert_one( + { + "_id": event.unique_id, + "embed": pickle.dumps(event.embed), + "topic": event.topic, + "event_name": event.event_name, + "block_number": event.block_number, + "score": event.get_score(), + "time_seen": datetime.now(), + "attachment": pickle.dumps(event.attachment) + if event.attachment + else None, + "channel_id": channel_id, + "message_id": None, + } + ) await interaction.followup.send(embed=event.embed) await interaction.followup.send(content="Done") diff --git a/rocketwatch/plugins/delegate_contracts/delegate_contracts.py b/rocketwatch/plugins/delegate_contracts/delegate_contracts.py index 0c87baa7..3feff8eb 100644 --- a/rocketwatch/plugins/delegate_contracts/delegate_contracts.py +++ b/rocketwatch/plugins/delegate_contracts/delegate_contracts.py @@ -27,22 +27,30 @@ async def _delegate_stats( latest_contract: str, title: str, ) -> Embed: - distribution_stats = await (await collection.aggregate([ - {"$match": match_filter}, - {"$group": {"_id": f"${delegate_field}", "count": {"$sum": 1}}}, - {"$sort": {"count": -1}}, - ])).to_list() + distribution_stats = await ( + await collection.aggregate( + [ + {"$match": match_filter}, + {"$group": {"_id": f"${delegate_field}", "count": {"$sum": 1}}}, + {"$sort": {"count": -1}}, + ] + ) + ).to_list() use_latest_counts = {True: 0, False: 0} - for d in await (await collection.aggregate([ - {"$match": match_filter}, - {"$group": {"_id": f"${use_latest_field}", "count": {"$sum": 1}}}, - ])).to_list(): + for d in await ( + await collection.aggregate( + [ + {"$match": match_filter}, + {"$group": {"_id": f"${use_latest_field}", "count": {"$sum": 1}}}, + ] + ) + ).to_list(): use_latest_counts[bool(d["_id"])] = d["count"] e = Embed() e.title = title - s = "\u00A0" * 4 + s = "\u00a0" * 4 desc = "**Effective Delegate Distribution:**\n" c_sum = sum(d["count"] for d in distribution_stats) # refresh cached address @@ -69,7 +77,11 @@ async def minipool_delegates(self, interaction: Interaction): await interaction.response.defer() e = await self._delegate_stats( collection=self.bot.db.minipools, - match_filter={"beacon.status": {"$in": ["pending_initialized", "pending_queued", "active_ongoing"]}}, + match_filter={ + "beacon.status": { + "$in": ["pending_initialized", "pending_queued", "active_ongoing"] + } + }, delegate_field="effective_delegate", use_latest_field="use_latest_delegate", latest_contract="rocketMinipoolDelegate", diff --git a/rocketwatch/plugins/deposit_pool/deposit_pool.py b/rocketwatch/plugins/deposit_pool/deposit_pool.py index 36d35341..7f2719f6 100644 --- a/rocketwatch/plugins/deposit_pool/deposit_pool.py +++ b/rocketwatch/plugins/deposit_pool/deposit_pool.py @@ -21,11 +21,19 @@ def __init__(self, bot: RocketWatch): @staticmethod async def get_deposit_pool_stats() -> Embed: - balance_raw, max_size_raw, max_amount_raw = await rp.multicall([ - (await rp.get_contract_by_name("rocketDepositPool")).functions.getBalance(), - (await rp.get_contract_by_name("rocketDAOProtocolSettingsDeposit")).functions.getMaximumDepositPoolSize(), - (await rp.get_contract_by_name("rocketDepositPool")).functions.getMaximumDepositAmount(), - ]) + balance_raw, max_size_raw, max_amount_raw = await rp.multicall( + [ + ( + await rp.get_contract_by_name("rocketDepositPool") + ).functions.getBalance(), + ( + await rp.get_contract_by_name("rocketDAOProtocolSettingsDeposit") + ).functions.getMaximumDepositPoolSize(), + ( + await rp.get_contract_by_name("rocketDepositPool") + ).functions.getMaximumDepositAmount(), + ] + ) dp_balance = solidity.to_float(balance_raw) deposit_cap = solidity.to_int(max_size_raw) @@ -42,8 +50,12 @@ async def get_deposit_pool_stats() -> Embed: embed.add_field(name="Status", value=dp_status, inline=False) display_limit = 2 - exp_queue_length, exp_queue_content = await Queue.get_express_queue(display_limit) - std_queue_length, std_queue_content = await Queue.get_standard_queue(display_limit) + exp_queue_length, exp_queue_content = await Queue.get_express_queue( + display_limit + ) + std_queue_length, std_queue_content = await Queue.get_standard_queue( + display_limit + ) total_queue_length = exp_queue_length + std_queue_length if (total_queue_length) > 0: embed.description = "" @@ -61,7 +73,9 @@ async def get_deposit_pool_stats() -> Embed: queue_capacity = max(free_capacity - deposit_cap, 0.0) possible_assignments = min(int(dp_balance // 32), total_queue_length) - embed.description += f"Need **{queue_capacity:,.2f} ETH** to dequeue all validators." + embed.description += ( + f"Need **{queue_capacity:,.2f} ETH** to dequeue all validators." + ) if possible_assignments > 0: embed.description += f"\nSufficient balance for **{possible_assignments} deposit assignment{'s' if possible_assignments != 1 else ''}**!" else: @@ -69,7 +83,9 @@ async def get_deposit_pool_stats() -> Embed: if (num_eb4 := int(dp_balance // 28)) > 0: lines.append(f"**`{num_eb4:>4}`** 4 ETH validators (28 ETH from DP)") if (num_credit := int(dp_balance // 32)) > 0: - lines.append(f"**`{num_credit:>4}`** credit validators (32 ETH from DP)") + lines.append( + f"**`{num_credit:>4}`** credit validators (32 ETH from DP)" + ) if lines: embed.add_field(name="Enough For", value="\n".join(lines), inline=False) @@ -78,12 +94,27 @@ async def get_deposit_pool_stats() -> Embed: @staticmethod async def get_contract_collateral_stats() -> Embed: - exchange_rate, total_supply, collateral_rate_raw, target_rate_raw = await rp.multicall([ - (await rp.get_contract_by_name("rocketTokenRETH")).functions.getExchangeRate(), - (await rp.get_contract_by_name("rocketTokenRETH")).functions.totalSupply(), - (await rp.get_contract_by_name("rocketTokenRETH")).functions.getCollateralRate(), - (await rp.get_contract_by_name("rocketDAOProtocolSettingsNetwork")).functions.getTargetRethCollateralRate(), - ]) + ( + exchange_rate, + total_supply, + collateral_rate_raw, + target_rate_raw, + ) = await rp.multicall( + [ + ( + await rp.get_contract_by_name("rocketTokenRETH") + ).functions.getExchangeRate(), + ( + await rp.get_contract_by_name("rocketTokenRETH") + ).functions.totalSupply(), + ( + await rp.get_contract_by_name("rocketTokenRETH") + ).functions.getCollateralRate(), + ( + await rp.get_contract_by_name("rocketDAOProtocolSettingsNetwork") + ).functions.getTargetRethCollateralRate(), + ] + ) total_eth_in_reth: float = total_supply * exchange_rate / 10**36 collateral_rate: float = solidity.to_float(collateral_rate_raw) @@ -117,7 +148,9 @@ async def deposit_pool(self, interaction: Interaction) -> None: async def reth_extra_collateral(self, interaction: Interaction) -> None: """Show the amount of tokens held in the rETH contract for exit liquidity""" await interaction.response.defer(ephemeral=is_hidden(interaction)) - await interaction.followup.send(embed=await self.get_contract_collateral_stats()) + await interaction.followup.send( + embed=await self.get_contract_collateral_stats() + ) async def get_status(self) -> Embed: embed = Embed(title=":rocket: Live Protocol Status") @@ -136,13 +169,17 @@ async def get_status(self) -> Embed: embed.add_field(name="Deposits", value=field.value, inline=False) collateral_embed = await self.get_contract_collateral_stats() - embed.add_field(name="Withdrawals", value=collateral_embed.description, inline=False) + embed.add_field( + name="Withdrawals", value=collateral_embed.description, inline=False + ) if cfg.rocketpool.chain != "mainnet": return embed reth_price = await rp.get_reth_eth_price() - protocol_rate = solidity.to_float(await rp.call("rocketTokenRETH.getExchangeRate")) + protocol_rate = solidity.to_float( + await rp.call("rocketTokenRETH.getExchangeRate") + ) relative_rate_diff = (reth_price / protocol_rate) - 1 expected_rate_diff = 0.0005 @@ -153,7 +190,11 @@ async def get_status(self) -> Embed: else: rate_status = f"at a **{-relative_rate_diff:.2%} discount**!" - embed.add_field(name="Secondary Market", value=f"rETH is trading {rate_status}", inline=False) + embed.add_field( + name="Secondary Market", + value=f"rETH is trading {rate_status}", + inline=False, + ) return embed diff --git a/rocketwatch/plugins/event_core/event_core.py b/rocketwatch/plugins/event_core/event_core.py index d5db9590..342322c0 100644 --- a/rocketwatch/plugins/event_core/event_core.py +++ b/rocketwatch/plugins/event_core/event_core.py @@ -84,7 +84,9 @@ async def gather_new_events(self) -> None: log.debug(f"{self.head_block = }") latest_block = await w3.eth.get_block_number() - submodules = [cog for cog in self.bot.cogs.values() if isinstance(cog, EventPlugin)] + submodules = [ + cog for cog in self.bot.cogs.values() if isinstance(cog, EventPlugin) + ] log.debug(f"Running {len(submodules)} submodules") if self.head_block == "latest": @@ -96,13 +98,20 @@ async def gather_new_events(self) -> None: self.head_block = cfg.events.genesis else: # behind chain head, let's see how far - last_event_entry = await self.bot.db.event_queue.find().sort( - "block_number", pymongo.DESCENDING - ).limit(1).to_list(None) + last_event_entry = ( + await self.bot.db.event_queue.find() + .sort("block_number", pymongo.DESCENDING) + .limit(1) + .to_list(None) + ) if last_event_entry: - self.head_block = max(self.head_block, last_event_entry[0]["block_number"]) + self.head_block = max( + self.head_block, last_event_entry[0]["block_number"] + ) - last_checked_entry = await self.bot.db.last_checked_block.find_one({"_id": "events"}) + last_checked_entry = await self.bot.db.last_checked_block.find_one( + {"_id": "events"} + ) if last_checked_entry: self.head_block = max(self.head_block, last_checked_entry["block"]) @@ -124,7 +133,9 @@ async def gather_new_events(self) -> None: coroutines = [] for sm in submodules: - coroutines.append(sm.get_past_events(from_block=from_block, to_block=to_block)) + coroutines.append( + sm.get_past_events(from_block=from_block, to_block=to_block) + ) if target_block == "latest": sm.start_tracking(to_block + 1) @@ -142,21 +153,31 @@ async def gather_new_events(self) -> None: continue # select channel dynamically from config based on event_name prefix - channel_candidates = [value for key, value in channels.items() if event.event_name.startswith(key)] - channel_id = channel_candidates[0] if channel_candidates else channels["default"] - events.append({ - "_id": event.unique_id, - "embed": pickle.dumps(event.embed), - "topic": event.topic, - "event_name": event.event_name, - "block_number": event.block_number, - "score": event.get_score(), - "time_seen": datetime.now(), - "image": pickle.dumps(event.image) if event.image else None, - "thumbnail": pickle.dumps(event.thumbnail) if event.thumbnail else None, - "channel_id": channel_id, - "message_id": None - }) + channel_candidates = [ + value + for key, value in channels.items() + if event.event_name.startswith(key) + ] + channel_id = ( + channel_candidates[0] if channel_candidates else channels["default"] + ) + events.append( + { + "_id": event.unique_id, + "embed": pickle.dumps(event.embed), + "topic": event.topic, + "event_name": event.event_name, + "block_number": event.block_number, + "score": event.get_score(), + "time_seen": datetime.now(), + "image": pickle.dumps(event.image) if event.image else None, + "thumbnail": pickle.dumps(event.thumbnail) + if event.thumbnail + else None, + "channel_id": channel_id, + "message_id": None, + } + ) log.info(f"{len(events)} new events gathered, updating DB") if events: @@ -164,15 +185,15 @@ async def gather_new_events(self) -> None: self.head_block = target_block await self.bot.db.last_checked_block.replace_one( - {"_id": "events"}, - {"_id": "events", "block": to_block}, - upsert=True + {"_id": "events"}, {"_id": "events", "block": to_block}, upsert=True ) async def process_event_queue(self) -> None: log.debug("Processing events in queue") # get all channels with unprocessed events - channels = await self.bot.db.event_queue.distinct("channel_id", {"message_id": None}) + channels = await self.bot.db.event_queue.distinct( + "channel_id", {"message_id": None} + ) if not channels: log.debug("No pending events in queue") return @@ -186,14 +207,20 @@ def try_load(_entry: dict, _key: str) -> Any | None: return None for channel_id in channels: - db_events: list[dict] = await self.bot.db.event_queue.find( - {"channel_id": channel_id, "message_id": None} - ).sort("score", pymongo.ASCENDING).to_list(None) + db_events: list[dict] = ( + await self.bot.db.event_queue.find( + {"channel_id": channel_id, "message_id": None} + ) + .sort("score", pymongo.ASCENDING) + .to_list(None) + ) log.debug(f"Found {len(db_events)} events for channel {channel_id}.") channel = await self.bot.get_or_fetch_channel(channel_id) - for state_message in await self.bot.db.state_messages.find({"channel_id": channel_id}).to_list(None): + for state_message in await self.bot.db.state_messages.find( + {"channel_id": channel_id} + ).to_list(None): msg = await channel.fetch_message(state_message["message_id"]) await msg.delete() await self.bot.db.state_messages.delete_one({"channel_id": channel_id}) @@ -216,17 +243,18 @@ def try_load(_entry: dict, _key: str) -> Any | None: msg = await channel.send(embed=embed, files=files) # add message id to event await self.bot.db.event_queue.update_one( - {"_id": event_entry["_id"]}, - {"$set": {"message_id": msg.id}} + {"_id": event_entry["_id"]}, {"$set": {"message_id": msg.id}} ) log.info("Processed all events in queue") async def update_status_messages(self) -> None: configs = cfg.events.status_message - for state_message in (await self.bot.db.state_messages.find().to_list()): + for state_message in await self.bot.db.state_messages.find().to_list(): if state_message["_id"] not in configs: - log.debug(f"No config for state message ID {state_message['_id']}, removing message") + log.debug( + f"No config for state message ID {state_message['_id']}, removing message" + ) await self._replace_or_add_status("", None, state_message) for channel_name, config in configs.items(): @@ -239,7 +267,9 @@ async def _update_status_message(self, channel_name: str, config) -> None: age = datetime.now() - state_message["sent_at"] cooldown = timedelta(seconds=config.cooldown) if (age < cooldown) and (state_message["state"] == str(self.State.OK)): - log.debug(f"State message for {channel_name} not past cooldown: {age} < {cooldown}") + log.debug( + f"State message for {channel_name} not past cooldown: {age} < {cooldown}" + ) return if not (embed := await generate_template_embed(self.bot.db, "announcement")): @@ -251,26 +281,31 @@ async def _update_status_message(self, channel_name: str, config) -> None: return embed.timestamp = datetime.now() - embed.set_footer(text=f"Tracking {cfg.rocketpool.chain} using {len(self.bot.cogs)} plugins") + embed.set_footer( + text=f"Tracking {cfg.rocketpool.chain} using {len(self.bot.cogs)} plugins" + ) for field in config.fields: embed.add_field(**field) await self._replace_or_add_status(channel_name, embed, state_message) async def show_service_interrupt(self) -> None: - embed = await assemble(MutableAttributeDict({"event_name": "service_interrupted"})) + embed = await assemble( + MutableAttributeDict({"event_name": "service_interrupted"}) + ) for channel_name in cfg.events.status_message: - state_message = await self.bot.db.state_messages.find_one({"_id": channel_name}) + state_message = await self.bot.db.state_messages.find_one( + {"_id": channel_name} + ) if (not state_message) or (state_message["state"] != str(self.state.ERROR)): await self._replace_or_add_status(channel_name, embed, state_message) async def _replace_or_add_status( - self, - target_channel: str, - embed: Embed | None, - prev_status: dict | None + self, target_channel: str, embed: Embed | None, prev_status: dict | None ) -> None: - target_channel_id = self.channels.get(target_channel) or self.channels["default"] + target_channel_id = ( + self.channels.get(target_channel) or self.channels["default"] + ) if embed and prev_status and (prev_status["channel_id"] == target_channel_id): log.debug(f"Replacing existing status message for channel {target_channel}") @@ -280,7 +315,7 @@ async def _replace_or_add_status( await msg.edit(embed=embed) await self.bot.db.state_messages.update_one( prev_status, - {"$set": {"sent_at": datetime.now(), "state": str(self.state)}} + {"$set": {"sent_at": datetime.now(), "state": str(self.state)}}, ) return except discord.errors.NotFound: @@ -299,13 +334,15 @@ async def _replace_or_add_status( log.debug(f"Creating new status message for channel {target_channel}") channel = await self.bot.get_or_fetch_channel(target_channel_id) msg = await channel.send(embed=embed, silent=True) - await self.bot.db.state_messages.insert_one({ - "_id" : target_channel, - "channel_id": target_channel_id, - "message_id": msg.id, - "sent_at" : datetime.now(), - "state" : str(self.state) - }) + await self.bot.db.state_messages.insert_one( + { + "_id": target_channel, + "channel_id": target_channel_id, + "message_id": msg.id, + "sent_at": datetime.now(), + "state": str(self.state), + } + ) async def setup(bot): diff --git a/rocketwatch/plugins/events/events.py b/rocketwatch/plugins/events/events.py index d19189eb..9377ac6b 100644 --- a/rocketwatch/plugins/events/events.py +++ b/rocketwatch/plugins/events/events.py @@ -28,7 +28,10 @@ log = logging.getLogger("rocketwatch.events") -PartialFilter = Callable[[BlockNumber, BlockNumber | Literal["latest"]], Coroutine[None, None, list[LogReceipt | EventData]]] +PartialFilter = Callable[ + [BlockNumber, BlockNumber | Literal["latest"]], + Coroutine[None, None, list[LogReceipt | EventData]], +] class Events(EventPlugin): @@ -69,11 +72,13 @@ async def _parse_event_config(self) -> tuple[list[PartialFilter], dict, dict]: try: log.info(f"Adding filter for {contract_name}.{event_name}") event_abi = contract.events[event_name].abi - input_types = ','.join(i['type'] for i in event_abi['inputs']) + input_types = ",".join(i["type"] for i in event_abi["inputs"]) topic = w3.keccak(text=f"{event_name}({input_types})").hex() except Exception as e: log.exception(e) - log.warning(f"Couldn't find event {event_name} ({event['name']}) in the contract") + log.warning( + f"Couldn't find event {event_name} ({event['name']}) in the contract" + ) continue aggregated_topics.add(topic) @@ -81,13 +86,19 @@ async def _parse_event_config(self) -> tuple[list[PartialFilter], dict, dict]: topic_map[topic] = event_name if addresses: - async def build_direct_filter(_from: BlockNumber, _to: BlockNumber | Literal["latest"]) -> list[LogReceipt]: - return await w3.eth.get_logs({ - "address" : list(addresses), - "topics" : [list(aggregated_topics)], - "fromBlock": _from, - "toBlock" : _to - }) + + async def build_direct_filter( + _from: BlockNumber, _to: BlockNumber | Literal["latest"] + ) -> list[LogReceipt]: + return await w3.eth.get_logs( + { + "address": list(addresses), + "topics": [list(aggregated_topics)], + "fromBlock": _from, + "toBlock": _to, + } + ) + partial_filters.append(build_direct_filter) # generate filters for global events @@ -103,18 +114,28 @@ async def build_direct_filter(_from: BlockNumber, _to: BlockNumber | Literal["la def super_builder(_contract, _event) -> PartialFilter: # this is needed to pin nonlocal variables - async def build_topic_filter(_from: BlockNumber, _to: BlockNumber | Literal["latest"]) -> list[EventData]: + async def build_topic_filter( + _from: BlockNumber, _to: BlockNumber | Literal["latest"] + ) -> list[EventData]: event_cls = _contract.events[_event["event_name"]] event_abi = event_cls.abi - input_types = ','.join(i['type'] for i in event_abi['inputs']) - topic0 = w3.keccak(text=f"{_event['event_name']}({input_types})").hex() - raw_logs = await w3.eth.get_logs({ - "topics" : [topic0], - "fromBlock": _from, - "toBlock" : _to, - }) - return [event_cls().process_log(raw_log) for raw_log in raw_logs] + input_types = ",".join(i["type"] for i in event_abi["inputs"]) + topic0 = w3.keccak( + text=f"{_event['event_name']}({input_types})" + ).hex() + raw_logs = await w3.eth.get_logs( + { + "topics": [topic0], + "fromBlock": _from, + "toBlock": _to, + } + ) + return [ + event_cls().process_log(raw_log) for raw_log in raw_logs + ] + return build_topic_filter + partial_filters.append(super_builder(contract, event)) return partial_filters, event_map, topic_map @@ -123,25 +144,24 @@ async def build_topic_filter(_from: BlockNumber, _to: BlockNumber | Literal["lat @guilds(cfg.discord.owner.server_id) @is_owner() async def trigger_event( - self, - interaction: Interaction, - contract: str, - event: str, - json_args: str = "{}", - block_number: int = 0 + self, + interaction: Interaction, + contract: str, + event: str, + json_args: str = "{}", + block_number: int = 0, ): await interaction.response.defer() try: - default_args = { - "tnx_fee": 0, - "tnx_fee_usd": 0 - } - event_obj = aDict({ - "event": event, - "transactionHash": aDict({"hex": lambda: '0' * 64}), - "blockNumber": block_number, - "args": aDict(default_args | json.loads(json_args)) - }) + default_args = {"tnx_fee": 0, "tnx_fee_usd": 0} + event_obj = aDict( + { + "event": event, + "transactionHash": aDict({"hex": lambda: "0" * 64}), + "blockNumber": block_number, + "args": aDict(default_args | json.loads(json_args)), + } + ) except json.JSONDecodeError: return await interaction.followup.send(content="Invalid JSON args!") @@ -182,7 +202,9 @@ async def replay_events(self, interaction: Interaction, tx_hash: str): responses, _ = await self.process_events(filtered_events) if responses: - await interaction.followup.send(embeds=[response.embed for response in responses]) + await interaction.followup.send( + embeds=[response.embed for response in responses] + ) else: await interaction.followup.send(content="No events found.") @@ -190,7 +212,9 @@ async def _get_new_events(self) -> list[Event]: from_block = self.last_served_block + 1 - self.lookback_distance return await self.get_past_events(from_block, self._pending_block) - async def get_past_events(self, from_block: BlockNumber, to_block: BlockNumber) -> list[Event]: + async def get_past_events( + self, from_block: BlockNumber, to_block: BlockNumber + ) -> list[Event]: log.debug(f"Fetching events in [{from_block}, {to_block}]") log.debug(f"Using {len(self._partial_filters)} filters") @@ -202,20 +226,26 @@ async def get_past_events(self, from_block: BlockNumber, to_block: BlockNumber) if not contract_upgrade_block: return messages - log.info(f"Detected contract upgrade at block {contract_upgrade_block}, reinitializing") + log.info( + f"Detected contract upgrade at block {contract_upgrade_block}, reinitializing" + ) old_config = self._partial_filters, self.event_map, self.topic_map try: await rp.flush() self.__init__(self.bot) await self.async_init() - return messages + await self.get_past_events(contract_upgrade_block + 1, to_block) + return messages + await self.get_past_events( + contract_upgrade_block + 1, to_block + ) except Exception as err: # rollback to pre upgrade config if this goes wrong self._partial_filters, self.event_map, self.topic_map = old_config raise err - async def process_events(self, events: list[LogReceipt | EventData]) -> tuple[list[Event], BlockNumber | None]: + async def process_events( + self, events: list[LogReceipt | EventData] + ) -> tuple[list[Event], BlockNumber | None]: events.sort(key=lambda e: (e.blockNumber, e.logIndex)) messages = [] upgrade_block = None @@ -263,11 +293,18 @@ def hash_args(_args: aDict, _hash=args_hash) -> None: log.warning(f"Skipping unknown event {n}.{event.event}") elif event.get("event") in self.event_map: event_name = self.event_map[event.event] - if event_name in ["odao_contract_upgraded_event", "odao_contract_added_event"]: + if event_name in [ + "odao_contract_upgraded_event", + "odao_contract_added_event", + ]: log.info("detected contract upgrade") upgrade_block = event.blockNumber - if event_name in ["odao_upgrade_pending_event", "sdao_upgrade_vetoed_event", - "odao_contract_added_event", "odao_contract_upgraded_event"]: + if event_name in [ + "odao_upgrade_pending_event", + "sdao_upgrade_vetoed_event", + "odao_contract_added_event", + "odao_contract_upgraded_event", + ]: event.args = aDict(event.args) hash_args(event.args) embed = await self.handle_event(event_name, event) @@ -284,8 +321,11 @@ def hash_args(_args: aDict, _hash=args_hash) -> None: # get the event offset based on the lowest event log index of events with the same txn hashes and block hashes identical_events = filter( - lambda e: (e.transactionHash == event.transactionHash) and (e.blockHash == event.blockHash), - events + lambda e: ( + (e.transactionHash == event.transactionHash) + and (e.blockHash == event.blockHash) + ), + events, ) tx_log_index = event.logIndex - min(e.logIndex for e in identical_events) @@ -296,13 +336,15 @@ def hash_args(_args: aDict, _hash=args_hash) -> None: unique_id=f"{event.transactionHash.hex()}:{event_name}:{args_hash.hexdigest()}:{tx_log_index}", block_number=event.blockNumber, transaction_index=event.transactionIndex, - event_index=event.logIndex + event_index=event.logIndex, ) messages.append(response) return messages, upgrade_block - async def aggregate_events(self, events: list[LogReceipt | EventData]) -> list[aDict]: + async def aggregate_events( + self, events: list[LogReceipt | EventData] + ) -> list[aDict]: # aggregate and deduplicate events within the same transaction events_by_tx = {} for event in reversed(events): @@ -314,7 +356,7 @@ async def aggregate_events(self, events: list[LogReceipt | EventData]) -> list[a aggregation_attributes = { "rocketDepositPool.DepositAssigned": "assignment_count", - "unstETH.WithdrawalRequested": "amountOfStETH" + "unstETH.WithdrawalRequested": "amountOfStETH", } async def get_event_name(_event: LogReceipt | EventData) -> tuple[str, str]: @@ -347,9 +389,14 @@ async def get_event_name(_event: LogReceipt | EventData) -> tuple[str, str]: # sum up the amount of stETH withdrawn in this transaction if amount := tx_aggregates.get(full_event_name, 0): events.remove(event) - tx_aggregates[full_event_name] = amount + _event["args"]["amountOfStETH"] + tx_aggregates[full_event_name] = ( + amount + _event["args"]["amountOfStETH"] + ) elif full_event_name == "rocketTokenRETH.Transfer": - conflicting_events = ["rocketTokenRETH.TokensBurned", "rocketDepositPool.DepositReceived"] + conflicting_events = [ + "rocketTokenRETH.TokensBurned", + "rocketDepositPool.DepositReceived", + ] if any(event in events_by_name for event in conflicting_events): events.remove(event) continue @@ -357,7 +404,9 @@ async def get_event_name(_event: LogReceipt | EventData) -> tuple[str, str]: # only keep largest rETH transfer contract = await rp.get_contract_by_address(event["address"]) _event = aDict(contract.events[event_name]().process_log(event)) - _prev_event = aDict(contract.events[event_name]().process_log(event)) + _prev_event = aDict( + contract.events[event_name]().process_log(event) + ) if _prev_event["args"]["value"] > _event["args"]["value"]: events.remove(event) event = prev_event @@ -368,17 +417,28 @@ async def get_event_name(_event: LogReceipt | EventData) -> tuple[str, str]: if "MinipoolScrubbed" in events_by_name: events.remove(event) continue - elif full_event_name == "rocketDAOProtocolProposal.ProposalVoteOverridden": + elif ( + full_event_name + == "rocketDAOProtocolProposal.ProposalVoteOverridden" + ): # override is emitted first, thus only seen here after the main vote event # remove last seen vote event - vote_event = events_by_name.get("rocketDAOProtocolProposal.ProposalVoted", [None]).pop() + vote_event = events_by_name.get( + "rocketDAOProtocolProposal.ProposalVoted", [None] + ).pop() if vote_event is not None: events.remove(vote_event) elif full_event_name == "MinipoolPrestaked": - for assign_event in events_by_name.get("rocketDepositPool.DepositAssigned", []).copy(): - assigned_minipool = w3.to_checksum_address(assign_event["topics"][1][-20:]) + for assign_event in events_by_name.get( + "rocketDepositPool.DepositAssigned", [] + ).copy(): + assigned_minipool = w3.to_checksum_address( + assign_event["topics"][1][-20:] + ) if event["address"] == assigned_minipool: - events_by_name["rocketDepositPool.DepositAssigned"].remove(assign_event) + events_by_name["rocketDepositPool.DepositAssigned"].remove( + assign_event + ) events.remove(assign_event) tx_aggregates["rocketDepositPool.DepositAssigned"] -= 1 elif full_event_name in aggregation_attributes: @@ -388,7 +448,9 @@ async def get_event_name(_event: LogReceipt | EventData) -> tuple[str, str]: tx_aggregates[full_event_name] = count + 1 else: # count, but report as individual events - tx_aggregates[full_event_name] = tx_aggregates.get(full_event_name, 0) + 1 + tx_aggregates[full_event_name] = ( + tx_aggregates.get(full_event_name, 0) + 1 + ) if event in events: events_by_name[full_event_name].append(event) @@ -400,7 +462,9 @@ async def get_event_name(_event: LogReceipt | EventData) -> tuple[str, str]: continue tx_hash = event["transactionHash"] - if (aggregated_value := aggregates[tx_hash].get(full_event_name, None)) is None: + if ( + aggregated_value := aggregates[tx_hash].get(full_event_name, None) + ) is None: continue event[aggregation_attributes[full_event_name]] = aggregated_value @@ -410,17 +474,25 @@ async def get_event_name(_event: LogReceipt | EventData) -> tuple[str, str]: async def handle_global_event(self, event_name: str, event: aDict) -> Embed | None: receipt = await w3.eth.get_transaction_receipt(event.transactionHash) - is_minipool_event = await rp.is_minipool(event.address) or await rp.is_minipool(receipt.to) - is_megapool_event = await rp.is_megapool(event.address) or await rp.is_megapool(receipt.to) - - if not any([ - is_minipool_event, - is_megapool_event, - rp.get_name_by_address(receipt.to) not in [None, "multicall3"], - rp.get_name_by_address(event.address) - ]): + is_minipool_event = await rp.is_minipool(event.address) or await rp.is_minipool( + receipt.to + ) + is_megapool_event = await rp.is_megapool(event.address) or await rp.is_megapool( + receipt.to + ) + + if not any( + [ + is_minipool_event, + is_megapool_event, + rp.get_name_by_address(receipt.to) not in [None, "multicall3"], + rp.get_name_by_address(event.address), + ] + ): # some random contract we don't care about - log.warning(f"Skipping {event.transactionHash.hex()} because the called contract is not a minipool") + log.warning( + f"Skipping {event.transactionHash.hex()} because the called contract is not a minipool" + ) return None pubkey = None @@ -431,14 +503,18 @@ async def handle_global_event(self, event_name: str, event: aDict) -> Embed | No # maybe the contract has it stored? if not pubkey: - pubkey = (await rp.call("rocketMinipoolManager.getMinipoolPubkey", event.address)).hex() + pubkey = ( + await rp.call("rocketMinipoolManager.getMinipoolPubkey", event.address) + ).hex() # maybe it's in the transaction? if not pubkey: with warnings.catch_warnings(): warnings.simplefilter("ignore") deposit_contract = await rp.get_contract_by_name("casperDeposit") - processed_logs = deposit_contract.events.DepositEvent().process_receipt(receipt) + processed_logs = deposit_contract.events.DepositEvent().process_receipt( + receipt + ) # attempt to retrieve the pubkey if processed_logs: @@ -450,7 +526,9 @@ async def handle_global_event(self, event_name: str, event: aDict) -> Embed | No # while we are at it add the sender address, so it shows up event.args["from"] = receipt["from"] - if (n := rp.get_name_by_address(receipt["to"])) is None or not n.startswith("rocket"): + if (n := rp.get_name_by_address(receipt["to"])) is None or not n.startswith( + "rocket" + ): event.args["from"] = receipt["to"] event.args["caller"] = receipt["from"] @@ -459,7 +537,9 @@ async def handle_global_event(self, event_name: str, event: aDict) -> Embed | No event.args.minipool = event.address if is_megapool_event: event.args.megapool = event.address - event.args.node = await rp.call("rocketMegapoolDelegate.getNodeAddress", address=event.address) + event.args.node = await rp.call( + "rocketMegapoolDelegate.getNodeAddress", address=event.address + ) return await self.handle_event(event_name, event) @@ -467,19 +547,33 @@ async def handle_event(self, event_name: str, event: aDict) -> Embed | None: args = aDict(event.args) if "negative_rETH_ratio_update_event" in event_name: - args.currRETHRate = solidity.to_float( - args.totalEth) / solidity.to_float(args.rethSupply) if args.rethSupply > 0 else 1 - args.prevRETHRate = solidity.to_float(await rp.call("rocketTokenRETH.getExchangeRate", block=event.blockNumber - 1)) + args.currRETHRate = ( + solidity.to_float(args.totalEth) / solidity.to_float(args.rethSupply) + if args.rethSupply > 0 + else 1 + ) + args.prevRETHRate = solidity.to_float( + await rp.call( + "rocketTokenRETH.getExchangeRate", block=event.blockNumber - 1 + ) + ) d = args.currRETHRate - args.prevRETHRate if d > 0 or abs(d) < 0.00001: return None elif "price_update_event" in event_name: args.value = args.rplPrice - period_start = await rp.call("rocketRewardsPool.getClaimIntervalTimeStart", block=event.blockNumber) - period_length = await rp.call("rocketRewardsPool.getClaimIntervalTime", block=event.blockNumber) + period_start = await rp.call( + "rocketRewardsPool.getClaimIntervalTimeStart", block=event.blockNumber + ) + period_length = await rp.call( + "rocketRewardsPool.getClaimIntervalTime", block=event.blockNumber + ) args.rewardPeriodEnd = period_start + period_length # in seconds - update_rate = await rp.call("rocketDAOProtocolSettingsNetwork.getSubmitPricesFrequency", block=event.blockNumber) + update_rate = await rp.call( + "rocketDAOProtocolSettingsNetwork.getSubmitPricesFrequency", + block=event.blockNumber, + ) # get timestamp of event block ts = await block_to_ts(event.blockNumber) # check if the next update is after the next period ts @@ -503,46 +597,55 @@ async def handle_event(self, event_name: str, event: aDict) -> Embed | None: value = w3.to_checksum_address(value_raw) case _: value = "???" - description_parts.append( - f"`{args.settingPaths[i]}` set to `{value}`" - ) + description_parts.append(f"`{args.settingPaths[i]}` set to `{value}`") args.description = "\n".join(description_parts) elif event_name == "bootstrap_pdao_claimer_event": + def share_repr(percentage: float) -> str: max_width = 35 num_points = round(max_width * percentage / 100) - return '*' * num_points - - node_share = args.nodePercent / 10 ** 16 - pdao_share = args.protocolPercent / 10 ** 16 - odao_share = args.trustedNodePercent / 10 ** 16 - - args.description = '\n'.join([ - "Node Operator Share", - f"{share_repr(node_share)} {node_share:.1f}%", - "Protocol DAO Share", - f"{share_repr(pdao_share)} {pdao_share:.1f}%", - "Oracle DAO Share", - f"{share_repr(odao_share)} {odao_share:.1f}%", - ]) + return "*" * num_points + + node_share = args.nodePercent / 10**16 + pdao_share = args.protocolPercent / 10**16 + odao_share = args.trustedNodePercent / 10**16 + + args.description = "\n".join( + [ + "Node Operator Share", + f"{share_repr(node_share)} {node_share:.1f}%", + "Protocol DAO Share", + f"{share_repr(pdao_share)} {pdao_share:.1f}%", + "Oracle DAO Share", + f"{share_repr(odao_share)} {odao_share:.1f}%", + ] + ) elif event_name == "bootstrap_sdao_member_kick_event": - args.memberAddress = await el_explorer_url(args.memberAddress, block=(event.blockNumber - 1)) + args.memberAddress = await el_explorer_url( + args.memberAddress, block=(event.blockNumber - 1) + ) elif event_name in [ "odao_member_leave_event", "odao_member_kick_event", "sdao_member_leave_event", - "sdao_member_request_leave_event" + "sdao_member_request_leave_event", ]: - args.nodeAddress = await el_explorer_url(args.nodeAddress, block=(event.blockNumber - 1)) - elif any([ - event_name.startswith("cs_deposit"), - event_name.startswith("cs_withdraw"), - event_name.startswith("rocksolid_deposit") - ]): + args.nodeAddress = await el_explorer_url( + args.nodeAddress, block=(event.blockNumber - 1) + ) + elif any( + [ + event_name.startswith("cs_deposit"), + event_name.startswith("cs_withdraw"), + event_name.startswith("rocksolid_deposit"), + ] + ): args.assets = solidity.to_float(args.assets) args.shares = solidity.to_float(args.shares) elif event_name.startswith("rocksolid_withdraw"): - assets = await rp.call("RockSolidVault.convertToAssets", args.shares, block=event.blockNumber) + assets = await rp.call( + "RockSolidVault.convertToAssets", args.shares, block=event.blockNumber + ) args.assets = solidity.to_float(assets) args.shares = solidity.to_float(args.shares) elif event_name == "cs_max_validator_change_event": @@ -552,25 +655,34 @@ def share_repr(percentage: float) -> str: elif args.newLimit < args.oldLimit: event_name = event_name.replace("change", "decrease") elif event_name == "cs_operator_added_event": - args.address = await w3.eth.get_transaction_receipt(event.transactionHash)["from"] + args.address = await w3.eth.get_transaction_receipt(event.transactionHash)[ + "from" + ] elif event_name == "cs_rpl_treasury_fee_change_event": args.oldFee = 100 * solidity.to_float(args.oldFee) args.newFee = 100 * solidity.to_float(args.newFee) elif "event_name" in [ "cs_eth_treasury_fee_change_event", "cs_eth_no_fee_change_event", - "cs_eth_mint_fee_change_event" + "cs_eth_mint_fee_change_event", ]: args.oldFee = 100 * solidity.to_float(args.oldValue) args.newFee = 100 * solidity.to_float(args.newValue) elif event_name.startswith("cs_operators"): - args.operatorList = "\n".join([await el_explorer_url(address) for address in args.operators]) - elif event_name in ["cs_rpl_min_ratio_change_event", "cs_rpl_target_ratio_change_event"]: + args.operatorList = "\n".join( + [await el_explorer_url(address) for address in args.operators] + ) + elif event_name in [ + "cs_rpl_min_ratio_change_event", + "cs_rpl_target_ratio_change_event", + ]: args.oldRatio = 100 * solidity.to_float(args.oldRatio) args.newRatio = 100 * solidity.to_float(args.newRatio) if "submission" in args: - args.submission = aDict(dict(zip(SUBMISSION_KEYS, args.submission, strict=False))) + args.submission = aDict( + dict(zip(SUBMISSION_KEYS, args.submission, strict=False)) + ) if "otc_swap" in event_name: # signer = seller @@ -600,15 +712,21 @@ def share_repr(percentage: float) -> str: args.otherToken = args.sellToken if args.otherToken.lower() == "wETH": # get exchange rate from rp - args.marketExchangeRate = await rp.call("rocketNetworkPrices.getRPLPrice") + args.marketExchangeRate = await rp.call( + "rocketNetworkPrices.getRPLPrice" + ) # calculate the discount received compared to the market price - args.discountAmount = (1 - args.exchangeRate / solidity.to_float(args.marketExchangeRate)) * 100 + args.discountAmount = ( + 1 - args.exchangeRate / solidity.to_float(args.marketExchangeRate) + ) * 100 receipt = None if cfg.rocketpool.chain == "mainnet": receipt = await w3.eth.get_transaction_receipt(event.transactionHash) args.tnx_fee = receipt["gasUsed"] * receipt["effectiveGasPrice"] - args.tnx_fee_usd = round(await rp.get_eth_usdc_price() * args.tnx_fee / 10**18, 2) + args.tnx_fee_usd = round( + await rp.get_eth_usdc_price() * args.tnx_fee / 10**18, 2 + ) args.caller = receipt["from"] # add transaction hash and block number to args @@ -617,27 +735,54 @@ def share_repr(percentage: float) -> str: # add proposal message manually if the event contains a proposal if "pdao_proposal" in event_name: - proposal_id = event.args.proposalID if "proposalID" in event.args else event.args.proposalId + proposal_id = ( + event.args.proposalID + if "proposalID" in event.args + else event.args.proposalId + ) if "root" in event_name: # not interesting if the root wasn't submitted in response to a challenge # ChallengeState.Challenged = 1 challenge_state = await rp.call( - "rocketDAOProtocolVerifier.getChallengeState", proposal_id, args.index, block=event.blockNumber + "rocketDAOProtocolVerifier.getChallengeState", + proposal_id, + args.index, + block=event.blockNumber, ) if challenge_state != 1: return None if "add" in event_name or "destroy" in event_name: - args.proposalBond = solidity.to_int(await rp.call("rocketDAOProtocolVerifier.getProposalBond", proposal_id)) + args.proposalBond = solidity.to_int( + await rp.call( + "rocketDAOProtocolVerifier.getProposalBond", proposal_id + ) + ) elif "root" in event_name or "challenge" in event_name: - args.proposalBond = solidity.to_int(await rp.call("rocketDAOProtocolVerifier.getProposalBond", proposal_id)) - args.challengeBond = solidity.to_int(await rp.call("rocketDAOProtocolVerifier.getChallengeBond", proposal_id)) - args.challengePeriod = await rp.call("rocketDAOProtocolVerifier.getChallengePeriod", proposal_id) + args.proposalBond = solidity.to_int( + await rp.call( + "rocketDAOProtocolVerifier.getProposalBond", proposal_id + ) + ) + args.challengeBond = solidity.to_int( + await rp.call( + "rocketDAOProtocolVerifier.getChallengeBond", proposal_id + ) + ) + args.challengePeriod = await rp.call( + "rocketDAOProtocolVerifier.getChallengePeriod", proposal_id + ) # create human-readable decision for votes if "direction" in args: - args.decision = ["invalid", "abstain", "for", "against", "against with veto"][args.direction] + args.decision = [ + "invalid", + "abstain", + "for", + "against", + "against with veto", + ][args.direction] if "votingPower" in args: args.votingPower = solidity.to_float(args.votingPower) @@ -645,9 +790,13 @@ def share_repr(percentage: float) -> str: # not interesting return None elif "vote_override" in event_name: - proposal_block = await rp.call("rocketDAOProtocolProposal.getProposalBlock", proposal_id) + proposal_block = await rp.call( + "rocketDAOProtocolProposal.getProposalBlock", proposal_id + ) args.votingPower = solidity.to_float( - await rp.call("rocketNetworkVoting.getVotingPower", args.voter, proposal_block) + await rp.call( + "rocketNetworkVoting.getVotingPower", args.voter, proposal_block + ) ) if args.votingPower < 100: # not interesting @@ -659,7 +808,10 @@ def share_repr(percentage: float) -> str: proposal, include_proposer=False, include_payload=("add" in event_name), - include_votes=all(kw not in event_name for kw in ("add", "challenge", "root", "destroy")), + include_votes=all( + kw not in event_name + for kw in ("add", "challenge", "root", "destroy") + ), ) elif "dao_proposal" in event_name: proposal_id = event.args.proposalID @@ -670,10 +822,13 @@ def share_repr(percentage: float) -> str: # change prefix for DAO-specific event dao_name = await rp.call("rocketDAOProposal.getDAO", proposal_id) - event_name = event_name.replace("dao", { - "rocketDAONodeTrustedProposals": "odao", - "rocketDAOSecurityProposals": "sdao" - }[dao_name]) + event_name = event_name.replace( + "dao", + { + "rocketDAONodeTrustedProposals": "odao", + "rocketDAOSecurityProposals": "sdao", + }[dao_name], + ) dao = DefaultDAO(dao_name) proposal = await dao.fetch_proposal(proposal_id) @@ -685,16 +840,25 @@ def share_repr(percentage: float) -> str: ) # add inflation and new supply if inflation occurred elif "rpl_inflation" in event_name: - args.total_supply = int(solidity.to_float(await rp.call("rocketTokenRPL.totalSupply"))) + args.total_supply = int( + solidity.to_float(await rp.call("rocketTokenRPL.totalSupply")) + ) args.inflation = round(await rp.get_annual_rpl_inflation() * 100, 4) elif "auction_bid_event" in event_name: eth = solidity.to_float(args.bidAmount) price = solidity.to_float( - await rp.call("rocketAuctionManager.getLotPriceAtBlock", args.lotIndex, args.blockNumber)) + await rp.call( + "rocketAuctionManager.getLotPriceAtBlock", + args.lotIndex, + args.blockNumber, + ) + ) args.rplAmount = eth / price if event_name in ["rpl_stake_event", "rpl_withdraw_event"]: # get eth price by multiplying the amount by the current RPL ratio - rpl_ratio = solidity.to_float(await rp.call("rocketNetworkPrices.getRPLPrice")) + rpl_ratio = solidity.to_float( + await rp.call("rocketNetworkPrices.getRPLPrice") + ) args.amount = solidity.to_float(args.amount) args.ethAmount = args.amount * rpl_ratio elif event_name in ["node_merkle_rewards_claimed"]: @@ -704,7 +868,9 @@ def share_repr(percentage: float) -> str: args.amount = args.value / 10**18 if args["from"] in cfg.rocketpool.dao_multisigs: event_name = "pdao_erc20_transfer_event" - token_contract = await rp.assemble_contract(name="ERC20", address=event["address"]) + token_contract = await rp.assemble_contract( + name="ERC20", address=event["address"] + ) args.symbol = await token_contract.functions.symbol().call() elif token_prefix != "reth": return None @@ -718,16 +884,24 @@ def share_repr(percentage: float) -> str: event_name = "validator_deposit_event" # reject if the amount is not major - if any([event_name == "reth_transfer_event" and args.amount < 1000, + if any( + [ + event_name == "reth_transfer_event" and args.amount < 1000, event_name == "rpl_stake_event" and args.amount < 1000, event_name == "rpl_stake_event" and args.amount < 1000, - event_name == "node_merkle_rewards_claimed" and args.ethAmount < 5 and args.amountETH < 5, - event_name == "rpl_withdraw_event" and args.ethAmount < 16]): + event_name == "node_merkle_rewards_claimed" + and args.ethAmount < 5 + and args.amountETH < 5, + event_name == "rpl_withdraw_event" and args.ethAmount < 16, + ] + ): amounts = {} for arg in ["ethAmount", "amount", "amountETH"]: if arg in args: amounts[arg] = args[arg] - log.debug(f"Skipping {event_name} because the event ({amounts}) is too small to be interesting") + log.debug( + f"Skipping {event_name} because the event ({amounts}) is too small to be interesting" + ) return None if "claimingContract" in args and args.claimingAddress == args.claimingContract: @@ -744,29 +918,41 @@ def share_repr(percentage: float) -> str: if event_name == "odao_upgrade_pending_event": args.contractName = await rp.call( - "rocketDAONodeTrustedUpgrade.getName", args.upgradeProposalID, block=event.blockNumber + "rocketDAONodeTrustedUpgrade.getName", + args.upgradeProposalID, + block=event.blockNumber, ) args.contractAddress = await rp.call( - "rocketDAONodeTrustedUpgrade.getUpgradeAddress", args.upgradeProposalID, block=event.blockNumber + "rocketDAONodeTrustedUpgrade.getUpgradeAddress", + args.upgradeProposalID, + block=event.blockNumber, ) args.vetoDeadline = await rp.call( - "rocketDAONodeTrustedUpgrade.getEnd", args.upgradeProposalID, block=event.blockNumber + "rocketDAONodeTrustedUpgrade.getEnd", + args.upgradeProposalID, + block=event.blockNumber, ) if args.contractAddress == "0x0000000000000000000000000000000000000000": del args.contractAddress event_name = "upgrade_pending_abi_event" elif event_name == "sdao_upgrade_vetoed_event": args.contractName = await rp.call( - "rocketDAONodeTrustedUpgrade.getName", args.upgradeProposalID, block=event.blockNumber + "rocketDAONodeTrustedUpgrade.getName", + args.upgradeProposalID, + block=event.blockNumber, ) elif event_name == "odao_contract_upgraded_event": args.contractName = rp.get_name_by_address(args.oldAddress) or "Unknown" elif event_name == "odao_contract_added_event": args.contractName = rp.get_name_by_address(args.newAddress) or "Unknown" if "node_register_event" in event_name: - args.timezone = await rp.call("rocketNodeManager.getNodeTimezoneLocation", args.node) + args.timezone = await rp.call( + "rocketNodeManager.getNodeTimezoneLocation", args.node + ) if "odao_member_challenge_event" in event_name: - args.challengeDeadline = args.time + await rp.call("rocketDAONodeTrustedSettingsMembers.getChallengeWindow") + args.challengeDeadline = args.time + await rp.call( + "rocketDAONodeTrustedSettingsMembers.getChallengeWindow" + ) if "odao_member_challenge_decision_event" in event_name: if args.success: event_name = "odao_member_challenge_accepted_event" @@ -775,17 +961,24 @@ def share_repr(percentage: float) -> str: await rp.call( "rocketDAONodeTrusted.getMemberRPLBondAmount", args.nodeChallengedAddress, - block=args.blockNumber - 1 + block=args.blockNumber - 1, ) ) args.sender = args.nodeChallengeDeciderAddress else: event_name = "odao_member_challenge_rejected_event" if "node_smoothing_pool_state_changed" in event_name: - validator_count = await rp.call("rocketMinipoolManager.getNodeMinipoolCount", args.node) - megapool_address = await rp.call("rocketNodeManager.getMegapoolAddress", args.node) + validator_count = await rp.call( + "rocketMinipoolManager.getNodeMinipoolCount", args.node + ) + megapool_address = await rp.call( + "rocketNodeManager.getMegapoolAddress", args.node + ) if megapool_address != "0x0000000000000000000000000000000000000000": - validator_count += await rp.call("rocketMegapoolDelegate.getActiveValidatorCount", address=megapool_address) + validator_count += await rp.call( + "rocketMegapoolDelegate.getActiveValidatorCount", + address=megapool_address, + ) args.validatorCount = validator_count if args.state: event_name = "node_smoothing_pool_joined" @@ -798,16 +991,24 @@ def share_repr(percentage: float) -> str: event_name = "node_merkle_rewards_claimed_rpl" if "minipool_deposit_received_event" in event_name: - contract = await rp.assemble_contract("rocketMinipoolDelegate", args.minipool) - args.commission = solidity.to_float(await contract.functions.getNodeFee().call()) + contract = await rp.assemble_contract( + "rocketMinipoolDelegate", args.minipool + ) + args.commission = solidity.to_float( + await contract.functions.getNodeFee().call() + ) # get the transaction receipt args.depositAmount = await rp.call( - "rocketMinipool.getNodeDepositBalance", address=args.minipool, block=args.blockNumber + "rocketMinipool.getNodeDepositBalance", + address=args.minipool, + block=args.blockNumber, ) user_deposit = args.depositAmount receipt = await w3.eth.get_transaction_receipt(args.transactionHash) args.node = receipt["from"] - ee = (await rp.get_contract_by_name("rocketNodeDeposit")).events.DepositReceived() + ee = ( + await rp.get_contract_by_name("rocketNodeDeposit") + ).events.DepositReceived() with warnings.catch_warnings(): warnings.simplefilter("ignore") processed_logs = ee.process_receipt(receipt) @@ -818,19 +1019,26 @@ def share_repr(percentage: float) -> str: if user_deposit < args.depositAmount: args.creditAmount = args.depositAmount - user_deposit args.balanceAmount = 0 - e = (await rp.get_contract_by_name("rocketVault")).events.EtherWithdrawn() + e = ( + await rp.get_contract_by_name("rocketVault") + ).events.EtherWithdrawn() with warnings.catch_warnings(): warnings.simplefilter("ignore") processed_logs = e.process_receipt(receipt) - deposit_contract = bytes(w3.solidity_keccak(["string"], ["rocketNodeDeposit"])) + deposit_contract = bytes( + w3.solidity_keccak(["string"], ["rocketNodeDeposit"]) + ) for withdraw_event in processed_logs: # event.logindex 44, withdraw_event.logindex 50, rough distance like that # reminder order is different than the previous example - if event.logIndex - 7 < withdraw_event.logIndex < event.logIndex and withdraw_event.args["by"] == deposit_contract: - args.balanceAmount = withdraw_event.args["amount"] - args.creditAmount -= args.balanceAmount - break + if ( + event.logIndex - 7 < withdraw_event.logIndex < event.logIndex + and withdraw_event.args["by"] == deposit_contract + ): + args.balanceAmount = withdraw_event.args["amount"] + args.creditAmount -= args.balanceAmount + break if args.balanceAmount == 0: event_name += "_credit" @@ -845,18 +1053,29 @@ def share_repr(percentage: float) -> str: case _: return None - args.operator = await rp.call("rocketMinipoolDelegate.getNodeAddress", address=args.minipool) + args.operator = await rp.call( + "rocketMinipoolDelegate.getNodeAddress", address=args.minipool + ) - if event_name in ["minipool_bond_reduce_event", "minipool_vacancy_prepared_event", - "minipool_withdrawal_processed_event", "minipool_bond_reduction_started_event", - "pool_deposit_assigned_event"]: + if event_name in [ + "minipool_bond_reduce_event", + "minipool_vacancy_prepared_event", + "minipool_withdrawal_processed_event", + "minipool_bond_reduction_started_event", + "pool_deposit_assigned_event", + ]: # get the node operator address from minipool contract contract = await rp.assemble_contract("rocketMinipool", args.minipool) args.node = await contract.functions.getNodeAddress().call() if "minipool_bond_reduction_started_event" in event_name: # get the previousBondAmount from the minipool contract args.previousBondAmount = solidity.to_float( - await rp.call("rocketMinipool.getNodeDepositBalance", address=args.minipool, block=args.blockNumber - 1)) + await rp.call( + "rocketMinipool.getNodeDepositBalance", + address=args.minipool, + block=args.blockNumber - 1, + ) + ) elif event_name == "minipool_withdrawal_processed_event": args.totalAmount = args.nodeAmount + args.userAmount elif event_name == "pool_deposit_assigned_event": @@ -866,7 +1085,9 @@ def share_repr(percentage: float) -> str: args.assignmentCount = event["assignment_count"] else: return None - elif "minipool_scrub" in event_name and await rp.call("rocketMinipoolDelegate.getVacant", address=args.minipool): + elif "minipool_scrub" in event_name and await rp.call( + "rocketMinipoolDelegate.getVacant", address=args.minipool + ): event_name = f"vacant_{event_name}" if event_name == "vacant_minipool_scrub_event": # let's try to determine the reason. there are 4 reasons a vacant minipool can get scrubbed: @@ -877,28 +1098,49 @@ def share_repr(percentage: float) -> str: # 4. the migration could have timed out, the oDAO will scrub minipools # after they have passed half of the migration window # get pubkey from minipool contract - pubkey = (await rp.call("rocketMinipoolManager.getMinipoolPubkey", args.minipool)).hex() + pubkey = ( + await rp.call( + "rocketMinipoolManager.getMinipoolPubkey", args.minipool + ) + ).hex() vali_info = (await bacon.get_validator(f"0x{pubkey}"))["data"] reason = "joe fucking up (unknown reason)" if vali_info: # check for #1 - if all([vali_info["validator"]["withdrawal_credentials"][:4] == "0x01", - vali_info["validator"]["withdrawal_credentials"][-40:] != args.minipool[2:]]): + if all( + [ + vali_info["validator"]["withdrawal_credentials"][:4] + == "0x01", + vali_info["validator"]["withdrawal_credentials"][-40:] + != args.minipool[2:], + ] + ): reason = "having invalid withdrawal credentials set on the beacon chain" # check for #2 configured_balance = solidity.to_float( - await rp.call("rocketMinipoolDelegate.getPreMigrationBalance", address=args.minipool, - block=args.blockNumber - 1)) - if (solidity.to_float(vali_info["balance"], 9) - configured_balance) < -0.01: + await rp.call( + "rocketMinipoolDelegate.getPreMigrationBalance", + address=args.minipool, + block=args.blockNumber - 1, + ) + ) + if ( + solidity.to_float(vali_info["balance"], 9) - configured_balance + ) < -0.01: reason = "having a balance lower than configured in the minipool contract on the beacon chain" # check for #3 if vali_info["status"] != "active_ongoing": reason = "not being active on the beacon chain" # check for #4 - scrub_period = await rp.call("rocketDAONodeTrustedSettingsMinipool.getPromotionScrubPeriod", - block=args.blockNumber - 1) - minipool_creation = await rp.call("rocketMinipoolDelegate.getStatusTime", address=args.minipool, - block=args.blockNumber - 1) + scrub_period = await rp.call( + "rocketDAONodeTrustedSettingsMinipool.getPromotionScrubPeriod", + block=args.blockNumber - 1, + ) + minipool_creation = await rp.call( + "rocketMinipoolDelegate.getStatusTime", + address=args.minipool, + block=args.blockNumber - 1, + ) block_time = await block_to_ts(args.blockNumber - 1) if block_time - minipool_creation > scrub_period // 2: reason = "taking too long to migrate their withdrawal credentials on the beacon chain" diff --git a/rocketwatch/plugins/fee_distribution/fee_distribution.py b/rocketwatch/plugins/fee_distribution/fee_distribution.py index b69d3838..562be60d 100644 --- a/rocketwatch/plugins/fee_distribution/fee_distribution.py +++ b/rocketwatch/plugins/fee_distribution/fee_distribution.py @@ -20,7 +20,9 @@ def __init__(self, bot: RocketWatch): self.bot = bot @command() - async def fee_distribution(self, interaction: Interaction, mode: Literal["tree", "pie"]): + async def fee_distribution( + self, interaction: Interaction, mode: Literal["tree", "pie"] + ): """ Show the distribution of minipool commission percentages. """ @@ -33,30 +35,30 @@ async def fee_distribution(self, interaction: Interaction, mode: Literal["tree", fig, axs = plt.subplots(1, 2) for i, bond in enumerate([8, 16]): - result = await self.bot.db.minipools.aggregate([ - { - "$match": { - "node_deposit_balance": bond, - "beacon.status": "active_ongoing" - } - }, - { - "$group": { - "_id" : {"$round": ["$node_fee", 2]}, - "count": {"$sum": 1} - } - }, - { - "$sort": {"_id": 1} - } - ]) + result = await self.bot.db.minipools.aggregate( + [ + { + "$match": { + "node_deposit_balance": bond, + "beacon.status": "active_ongoing", + } + }, + { + "$group": { + "_id": {"$round": ["$node_fee", 2]}, + "count": {"$sum": 1}, + } + }, + {"$sort": {"_id": 1}}, + ] + ) labels = [] sizes = [] subtree = {} for entry in await result.to_list(): - fee_percentage = entry['_id'] * 100 + fee_percentage = entry["_id"] * 100 labels.append(f"{fee_percentage:.0f}%") sizes.append(entry["count"]) subtree[labels[-1]] = sizes[-1] @@ -71,7 +73,13 @@ async def fee_distribution(self, interaction: Interaction, mode: Literal["tree", labels[i] = "" ax.set_title(f"{bond} ETH") - ax.pie(sizes, labels=labels, autopct=lambda p, _total=total: f"{p * _total / 100:.0f}" if (p >= 5) else "") + ax.pie( + sizes, + labels=labels, + autopct=lambda p, _total=total: ( + f"{p * _total / 100:.0f}" if (p >= 5) else "" + ), + ) if mode == "tree": e.description = f"```\n{render_tree_legacy(tree, 'Minipools')}\n```" @@ -79,7 +87,7 @@ async def fee_distribution(self, interaction: Interaction, mode: Literal["tree", elif mode == "pie": img = BytesIO() fig.tight_layout() - fig.savefig(img, format='png') + fig.savefig(img, format="png") img.seek(0) fig.clear() plt.close() diff --git a/rocketwatch/plugins/forum/forum.py b/rocketwatch/plugins/forum/forum.py index e6e3f619..7b7a903a 100644 --- a/rocketwatch/plugins/forum/forum.py +++ b/rocketwatch/plugins/forum/forum.py @@ -66,16 +66,18 @@ def datetime_to_epoch(_dt: str) -> int: topics = [] for topic_dict in topic_list: - topics.append(Forum.Topic( - id=topic_dict["id"], - title=topic_dict["fancy_title"], - slug=topic_dict["slug"], - post_count=topic_dict["posts_count"], - created_at=datetime_to_epoch(topic_dict["created_at"]), - last_post_at=datetime_to_epoch(topic_dict["last_posted_at"]), - views=topic_dict["views"], - like_count=topic_dict["like_count"] - )) + topics.append( + Forum.Topic( + id=topic_dict["id"], + title=topic_dict["fancy_title"], + slug=topic_dict["slug"], + post_count=topic_dict["posts_count"], + created_at=datetime_to_epoch(topic_dict["created_at"]), + last_post_at=datetime_to_epoch(topic_dict["last_posted_at"]), + views=topic_dict["views"], + like_count=topic_dict["like_count"], + ) + ) return topics @staticmethod @@ -100,26 +102,30 @@ async def get_recent_topics() -> list[Topic]: @retry_async(tries=3, delay=2, backoff=2) async def get_top_users(period: Period, order_by: UserMetric) -> list[User]: async with aiohttp.ClientSession() as session: - response = await session.get(f"{Forum.DOMAIN}/directory_items.json?period={period}&order={order_by}") + response = await session.get( + f"{Forum.DOMAIN}/directory_items.json?period={period}&order={order_by}" + ) data = await response.json() users = [] for user_dict in data["directory_items"]: - users.append(Forum.User( - id=user_dict["id"], - username=user_dict["user"]["username"], - name=user_dict["user"]["name"] if user_dict["user"]["name"] else None, - topic_count=user_dict["topic_count"], - post_count=user_dict["post_count"], - likes_received=user_dict["likes_received"] - )) + users.append( + Forum.User( + id=user_dict["id"], + username=user_dict["user"]["username"], + name=user_dict["user"]["name"] + if user_dict["user"]["name"] + else None, + topic_count=user_dict["topic_count"], + post_count=user_dict["post_count"], + likes_received=user_dict["likes_received"], + ) + ) return users @command() async def top_forum_posts( - self, - interaction: Interaction, - period: Period = "monthly" + self, interaction: Interaction, period: Period = "monthly" ) -> None: """Get the most popular topics from the forum""" await interaction.response.defer(ephemeral=is_hidden(interaction)) @@ -147,7 +153,7 @@ async def top_forum_users( self, interaction: Interaction, period: Period = "monthly", - order_by: UserMetric = "likes_received" + order_by: UserMetric = "likes_received", ) -> None: """Get the most active forum users""" await interaction.response.defer(ephemeral=is_hidden(interaction)) diff --git a/rocketwatch/plugins/governance/governance.py b/rocketwatch/plugins/governance/governance.py index e0162a2d..8afad26b 100644 --- a/rocketwatch/plugins/governance/governance.py +++ b/rocketwatch/plugins/governance/governance.py @@ -22,18 +22,26 @@ class Governance(StatusPlugin): @staticmethod - async def _get_active_pdao_proposals(dao: ProtocolDAO) -> list[ProtocolDAO.Proposal]: + async def _get_active_pdao_proposals( + dao: ProtocolDAO, + ) -> list[ProtocolDAO.Proposal]: proposal_ids = await dao.get_proposal_ids_by_state() active_proposal_ids = [] active_proposal_ids += proposal_ids[dao.ProposalState.ActivePhase1] active_proposal_ids += proposal_ids[dao.ProposalState.ActivePhase2] - return [await dao.fetch_proposal(proposal_id) for proposal_id in reversed(active_proposal_ids)] + return [ + await dao.fetch_proposal(proposal_id) + for proposal_id in reversed(active_proposal_ids) + ] @staticmethod async def _get_active_dao_proposals(dao: DefaultDAO) -> list[DefaultDAO.Proposal]: proposal_ids = await dao.get_proposal_ids_by_state() active_proposal_ids = proposal_ids[dao.ProposalState.Active] - return [await dao.fetch_proposal(proposal_id) for proposal_id in reversed(active_proposal_ids)] + return [ + await dao.fetch_proposal(proposal_id) + for proposal_id in reversed(active_proposal_ids) + ] @staticmethod async def _get_tx_hash_for_proposal(dao: DAO, proposal: DAO.Proposal) -> HexStr: @@ -41,7 +49,9 @@ async def _get_tx_hash_for_proposal(dao: DAO, proposal: DAO.Proposal) -> HexStr: to_block = (await ts_to_block(proposal.created)) + 1 log.info(f"Looking for proposal {proposal} in [{from_block},{to_block}]") - for receipt in dao.proposal_contract.events.ProposalAdded().get_logs(from_block=from_block, to_block=to_block): + for receipt in dao.proposal_contract.events.ProposalAdded().get_logs( + from_block=from_block, to_block=to_block + ): log.info(f"Found receipt {receipt}") if receipt.args.proposalID == proposal.id: return receipt.transactionHash.hex() @@ -58,7 +68,11 @@ async def _get_active_snapshot_proposals(self) -> list[Snapshot.Proposal]: async def _get_draft_rpips(self) -> list[RPIPs.RPIP]: try: statuses = {"Draft", "Review"} - return [rpip for rpip in await RPIPs.get_all_rpips() if (rpip.status in statuses)][::-1] + return [ + rpip + for rpip in await RPIPs.get_all_rpips() + if (rpip.status in statuses) + ][::-1] except Exception as e: await self.bot.report_error(e) return [] @@ -68,7 +82,11 @@ async def _get_latest_forum_topics(self, days: int) -> list[Forum.Topic]: topics = await Forum.get_recent_topics() now = datetime.now().timestamp() # only get topics from within a week - topics = [t for t in topics if (now - t.last_post_at) <= timedelta(days=days).total_seconds()] + topics = [ + t + for t in topics + if (now - t.last_post_at) <= timedelta(days=days).total_seconds() + ] return topics except Exception as e: await self.bot.report_error(e) @@ -84,7 +102,7 @@ def sanitize(text: str, max_length: int = 50) -> str: text = text.replace("https://", "") text = escape_markdown(text) if len(text) > max_length: - text = text[:(max_length - 1)] + "…" + text = text[: (max_length - 1)] + "…" return text async def print_proposals(_dao: DAO, _proposals: list[DAO.Proposal]) -> str: @@ -131,7 +149,9 @@ async def print_proposals(_dao: DAO, _proposals: list[DAO.Proposal]) -> str: section_content += "- **RPIPs in review or draft status**\n" for i, rpip in enumerate(draft_rpips, start=1): title = sanitize(rpip.title, 40) - section_content += f" {i}. [{title}]({rpip.url}) (RPIP-{rpip.number})\n" + section_content += ( + f" {i}. [{title}]({rpip.url}) (RPIP-{rpip.number})\n" + ) if section_content: embed.description += "### Protocol DAO\n" @@ -145,7 +165,7 @@ async def print_proposals(_dao: DAO, _proposals: list[DAO.Proposal]) -> str: embed.description += f"- **Recently active topics ({num_days}d)**\n" for i, topic in enumerate(topics[:10], start=1): title = sanitize(topic.title, 40) - embed.description += f" {i}. [{title}]({topic.url}) [`{topic.post_count-1}\u202f💬`]\n" + embed.description += f" {i}. [{title}]({topic.url}) [`{topic.post_count - 1}\u202f💬`]\n" if not embed.description: embed.set_image(url="https://c.tenor.com/PVf-csSHmu8AAAAd/tenor.gif") diff --git a/rocketwatch/plugins/lottery/lottery.py b/rocketwatch/plugins/lottery/lottery.py index 39e9a60d..1f2891c8 100644 --- a/rocketwatch/plugins/lottery/lottery.py +++ b/rocketwatch/plugins/lottery/lottery.py @@ -36,16 +36,20 @@ async def load_sync_committee(self, period): assert period in ["latest", "next"] await self._check_indexes() h = await bacon.get_block("head") - sync_period = int(h['data']['message']['slot']) // 32 // 256 + sync_period = int(h["data"]["message"]["slot"]) // 32 // 256 if period == "next": sync_period += 1 data = (await bacon.get_sync_committee(sync_period * 256))["data"] - await self.bot.db.sync_committee_stats.replace_one({"period": period}, - {"period" : period, - "start_epoch": sync_period * 256, - "end_epoch" : (sync_period + 1) * 256, - "sync_period": sync_period * 256, - }, upsert=True) + await self.bot.db.sync_committee_stats.replace_one( + {"period": period}, + { + "period": period, + "start_epoch": sync_period * 256, + "end_epoch": (sync_period + 1) * 256, + "sync_period": sync_period * 256, + }, + upsert=True, + ) validators = data["validators"] col = self.bot.db[f"sync_committee_{period}"] # get unique validators from collection @@ -62,40 +66,29 @@ async def load_sync_committee(self, period): await col.bulk_write(payload) async def get_validators_for_sync_committee_period(self, period): - data = await self.bot.db[f"sync_committee_{period}"].aggregate([ - { - '$lookup': { - 'from' : 'minipools', - 'localField' : 'validator', - 'foreignField': 'validator_index', - 'as' : 'entry' - } - }, { - '$match': { - 'entry': { - '$ne': [] - } - } - }, { - '$replaceRoot': { - 'newRoot': { - '$first': '$entry' + data = await self.bot.db[f"sync_committee_{period}"].aggregate( + [ + { + "$lookup": { + "from": "minipools", + "localField": "validator", + "foreignField": "validator_index", + "as": "entry", } - } - }, { - '$project': { - '_id' : 0, - 'validator' : "$validator_index", - 'pubkey' : 1, - 'node_operator': 1 - } - }, { - '$match': { - 'node_operator': { - '$ne': None + }, + {"$match": {"entry": {"$ne": []}}}, + {"$replaceRoot": {"newRoot": {"$first": "$entry"}}}, + { + "$project": { + "_id": 0, + "validator": "$validator_index", + "pubkey": 1, + "node_operator": 1, } - } - }]) + }, + {"$match": {"node_operator": {"$ne": None}}}, + ] + ) return await data.to_list() async def generate_sync_committee_description(self, period): @@ -104,27 +97,39 @@ async def generate_sync_committee_description(self, period): # get stats about the current period stats = await self.bot.db.sync_committee_stats.find_one({"period": period}) perc = len(validators) / 512 - description = f"_Rocket Pool Participation:_ {len(validators)}/512 ({perc:.2%})\n" - start_timestamp = BEACON_START_DATE + (stats['start_epoch'] * BEACON_EPOCH_LENGTH) + description = ( + f"_Rocket Pool Participation:_ {len(validators)}/512 ({perc:.2%})\n" + ) + start_timestamp = BEACON_START_DATE + ( + stats["start_epoch"] * BEACON_EPOCH_LENGTH + ) description += f"_Start:_ Epoch {stats['start_epoch']} ()\n" - end_timestamp = BEACON_START_DATE + (stats['end_epoch'] * BEACON_EPOCH_LENGTH) + end_timestamp = BEACON_START_DATE + (stats["end_epoch"] * BEACON_EPOCH_LENGTH) description += f"_End:_ Epoch {stats['end_epoch']} ()\n" # validators (called minipools here) # sort validators - validators.sort(key=lambda x: x['validator']) - description += f"_Minipools:_ `{', '.join(str(v['validator']) for v in validators)}`\n" + validators.sort(key=lambda x: x["validator"]) + description += ( + f"_Minipools:_ `{', '.join(str(v['validator']) for v in validators)}`\n" + ) # node operators # gather count per node_operators = {} for v in validators: - if v['node_operator'] not in node_operators: - node_operators[v['node_operator']] = 0 - node_operators[v['node_operator']] += 1 + if v["node_operator"] not in node_operators: + node_operators[v["node_operator"]] = 0 + node_operators[v["node_operator"]] += 1 # sort by count - node_operators = sorted(node_operators.items(), key=lambda x: x[1], reverse=True) + node_operators = sorted( + node_operators.items(), key=lambda x: x[1], reverse=True + ) description += "_Node Operators:_ " - description += ", ".join([f"{count}x {await el_explorer_url(node_operator)}" for node_operator, count in - node_operators]) + description += ", ".join( + [ + f"{count}x {await el_explorer_url(node_operator)}" + for node_operator, count in node_operators + ] + ) return description @command() @@ -134,8 +139,14 @@ async def lottery(self, interaction: Interaction): """ await interaction.response.defer(ephemeral=is_hidden(interaction)) embeds = [ - Embed(title="Current sync committee:", description=await self.generate_sync_committee_description("latest")), - Embed(title="Next sync committee:", description=await self.generate_sync_committee_description("next")) + Embed( + title="Current sync committee:", + description=await self.generate_sync_committee_description("latest"), + ), + Embed( + title="Next sync committee:", + description=await self.generate_sync_committee_description("next"), + ), ] await interaction.followup.send(embeds=embeds) diff --git a/rocketwatch/plugins/metrics/metrics.py b/rocketwatch/plugins/metrics/metrics.py index 8315cdf5..f2e2d1f9 100644 --- a/rocketwatch/plugins/metrics/metrics.py +++ b/rocketwatch/plugins/metrics/metrics.py @@ -33,46 +33,71 @@ async def metrics(self, interaction: Interaction): start = datetime.utcnow() - timedelta(days=7) # get the total number of processed events from the event_queue in the last 7 days - total_events_processed = await self.bot.db.event_queue.count_documents({'time_seen': {'$gte': start}}) + total_events_processed = await self.bot.db.event_queue.count_documents( + {"time_seen": {"$gte": start}} + ) desc += f"Total Events Processed:\n\t{total_events_processed}\n\n" # get the total number of handled commands in the last 7 days - total_commands_handled = await self.collection.count_documents({'timestamp': {'$gte': start}}) + total_commands_handled = await self.collection.count_documents( + {"timestamp": {"$gte": start}} + ) desc += f"Total Commands Handled:\n\t{total_commands_handled}\n\n" # get the average command response time in the last 7 days - avg_response_time = await (await self.collection.aggregate([ - {'$match': {'timestamp': {'$gte': start}}}, - {'$group': {'_id': None, 'avg': {'$avg': '$took'}}} - ])).to_list(length=1) - if avg_response_time[0]['avg'] is not None: + avg_response_time = await ( + await self.collection.aggregate( + [ + {"$match": {"timestamp": {"$gte": start}}}, + {"$group": {"_id": None, "avg": {"$avg": "$took"}}}, + ] + ) + ).to_list(length=1) + if avg_response_time[0]["avg"] is not None: desc += f"Average Command Response Time:\n\t{avg_response_time[0]['avg']:.03} seconds\n\n" # get completed rate in the last 7 days - completed_rate = await (await self.collection.aggregate([ - {'$match': {'timestamp': {'$gte': start}, 'status': 'completed'}}, - {'$group': {'_id': None, 'count': {'$sum': 1}}} - ])).to_list(length=1) + completed_rate = await ( + await self.collection.aggregate( + [ + { + "$match": { + "timestamp": {"$gte": start}, + "status": "completed", + } + }, + {"$group": {"_id": None, "count": {"$sum": 1}}}, + ] + ) + ).to_list(length=1) if completed_rate: - percent = completed_rate[0]['count'] / (total_commands_handled - 1) + percent = completed_rate[0]["count"] / (total_commands_handled - 1) desc += f"Command Success Rate:\n\t{percent:.03%}\n\n" # get the 5 most used commands of the last 7 days - most_used_commands = await (await self.collection.aggregate([ - {'$match': {'timestamp': {'$gte': start}}}, - {'$group': {'_id': '$command', 'count': {'$sum': 1}}}, - {'$sort': {'count': -1}} - ])).to_list(length=5) + most_used_commands = await ( + await self.collection.aggregate( + [ + {"$match": {"timestamp": {"$gte": start}}}, + {"$group": {"_id": "$command", "count": {"$sum": 1}}}, + {"$sort": {"count": -1}}, + ] + ) + ).to_list(length=5) desc += "Top 5 Commands based on usage:\n" for command in most_used_commands: desc += f" - {command['_id']}: {command['count']}\n" # get the top 5 channels of the last 7 days - top_channels = await (await self.collection.aggregate([ - {'$match': {'timestamp': {'$gte': start}}}, - {'$group': {'_id': '$channel', 'count': {'$sum': 1}}}, - {'$sort': {'count': -1}} - ])).to_list(length=5) + top_channels = await ( + await self.collection.aggregate( + [ + {"$match": {"timestamp": {"$gte": start}}}, + {"$group": {"_id": "$channel", "count": {"$sum": 1}}}, + {"$sort": {"count": -1}}, + ] + ) + ).to_list(length=5) desc += "\nTop 5 Channels based on commands handled:\n" for channel in top_channels: desc += f" - {channel['_id']['name']}: {channel['count']}\n" @@ -87,54 +112,70 @@ async def metrics_chart(self, interaction: Interaction): await interaction.response.defer(ephemeral=is_hidden(interaction)) # generate mathplotlib chart that shows monthly command usage and monthly event emission, in separate subplots - command_usage = await (await self.collection.aggregate([ - { - '$group': { - '_id' : { - 'year' : {'$year': '$timestamp'}, - 'month': {'$month': '$timestamp'} + command_usage = await ( + await self.collection.aggregate( + [ + { + "$group": { + "_id": { + "year": {"$year": "$timestamp"}, + "month": {"$month": "$timestamp"}, + }, + "total": {"$sum": 1}, + } }, - 'total': {'$sum': 1} - } - }, - { - '$sort': SON([('_id.year', 1), ('_id.month', 1)]) - } - ])).to_list(None) - event_emission = await (await self.bot.db.event_queue.aggregate([ - { - '$group': { - '_id' : { - 'year' : {'$year': '$time_seen'}, - 'month': {'$month': '$time_seen'} + {"$sort": SON([("_id.year", 1), ("_id.month", 1)])}, + ] + ) + ).to_list(None) + event_emission = await ( + await self.bot.db.event_queue.aggregate( + [ + { + "$group": { + "_id": { + "year": {"$year": "$time_seen"}, + "month": {"$month": "$time_seen"}, + }, + "total": {"$sum": 1}, + } }, - 'total': {'$sum': 1} - } - }, - { - '$sort': SON([('_id.year', 1), ('_id.month', 1)]) - } - ])).to_list(None) + {"$sort": SON([("_id.year", 1), ("_id.month", 1)])}, + ] + ) + ).to_list(None) # create a new figure _fig, (ax1, ax2) = plt.subplots(2, 1, figsize=(10, 10)) # plot the command usage as bars - ax1.bar([f"{x['_id']['year']}-{x['_id']['month']:0>2}" for x in command_usage], [x['total'] for x in command_usage]) + ax1.bar( + [f"{x['_id']['year']}-{x['_id']['month']:0>2}" for x in command_usage], + [x["total"] for x in command_usage], + ) ax1.set_title("Command Usage") - ax1.set_xticklabels([f"{x['_id']['year']}-{x['_id']['month']:0>2}" for x in command_usage], rotation=45) + ax1.set_xticklabels( + [f"{x['_id']['year']}-{x['_id']['month']:0>2}" for x in command_usage], + rotation=45, + ) # plot the event usage - ax2.bar([f"{x['_id']['year']}-{x['_id']['month']:0>2}" for x in event_emission], [x['total'] for x in event_emission]) + ax2.bar( + [f"{x['_id']['year']}-{x['_id']['month']:0>2}" for x in event_emission], + [x["total"] for x in event_emission], + ) ax2.set_title("Event Emission") - ax2.set_xticklabels([f"{x['_id']['year']}-{x['_id']['month']:0>2}" for x in event_emission], rotation=45) + ax2.set_xticklabels( + [f"{x['_id']['year']}-{x['_id']['month']:0>2}" for x in event_emission], + rotation=45, + ) # use minimal whitespace plt.tight_layout() # store the graph in an file object file = BytesIO() - plt.savefig(file, format='png') + plt.savefig(file, format="png") file.seek(0) # clear plot from memory @@ -143,7 +184,9 @@ async def metrics_chart(self, interaction: Interaction): e = Embed(title="Command Usage and Event ") e.set_image(url="attachment://metrics.png") - await interaction.followup.send(embed=e, file=File(file, filename="metrics.png")) + await interaction.followup.send( + embed=e, file=File(file, filename="metrics.png") + ) async def setup(bot): diff --git a/rocketwatch/plugins/milestones/milestones.py b/rocketwatch/plugins/milestones/milestones.py index ad273db2..cc94325b 100644 --- a/rocketwatch/plugins/milestones/milestones.py +++ b/rocketwatch/plugins/milestones/milestones.py @@ -23,7 +23,9 @@ def __init__(self, bot: RocketWatch): async def _get_new_events(self) -> list[Event]: if self.state == "RUNNING": - log.error("Milestones plugin was interrupted while running. Re-initializing...") + log.error( + "Milestones plugin was interrupted while running. Re-initializing..." + ) self.__init__(self.bot) self.state = "RUNNING" @@ -55,24 +57,32 @@ async def check_for_new_events(self): previous_milestone = state["current_goal"] else: log.debug( - f"First time we have processed Milestones for milestone {milestone.id}. Adding it to the Database.") - await self.collection.insert_one({"_id": milestone["id"], "current_goal": latest_goal}) + f"First time we have processed Milestones for milestone {milestone.id}. Adding it to the Database." + ) + await self.collection.insert_one( + {"_id": milestone["id"], "current_goal": latest_goal} + ) previous_milestone = milestone.min if previous_milestone < latest_goal: - log.info(f"Goal for milestone {milestone.id} has increased. Triggering Milestone!") - embed = await assemble(aDict({ - "event_name" : milestone.id, - "result_value": value - })) - payload.append(Event( - embed=embed, - topic="milestones", - block_number=self._pending_block, - event_name=milestone.id, - unique_id=f"{milestone.id}:{latest_goal}", - )) + log.info( + f"Goal for milestone {milestone.id} has increased. Triggering Milestone!" + ) + embed = await assemble( + aDict({"event_name": milestone.id, "result_value": value}) + ) + payload.append( + Event( + embed=embed, + topic="milestones", + block_number=self._pending_block, + event_name=milestone.id, + unique_id=f"{milestone.id}:{latest_goal}", + ) + ) # update the current goal in collection - await self.collection.update_one({"_id": milestone["id"]}, {"$set": {"current_goal": latest_goal}}) + await self.collection.update_one( + {"_id": milestone["id"]}, {"$set": {"current_goal": latest_goal}} + ) log.debug("Finished Checking Milestones") return payload diff --git a/rocketwatch/plugins/minipool_distribution/minipool_distribution.py b/rocketwatch/plugins/minipool_distribution/minipool_distribution.py index b269635f..2df4f839 100644 --- a/rocketwatch/plugins/minipool_distribution/minipool_distribution.py +++ b/rocketwatch/plugins/minipool_distribution/minipool_distribution.py @@ -19,7 +19,7 @@ def get_percentiles(percentiles, counts): for p in percentiles: - yield p, np.percentile(counts, p, method='nearest') + yield p, np.percentile(counts, p, method="nearest") async def minipool_distribution_raw(interaction: Interaction, distribution): @@ -27,8 +27,9 @@ async def minipool_distribution_raw(interaction: Interaction, distribution): e.title = "Minipool Distribution" description = "```\n" for minipools, nodes in distribution: - description += f"{p.no('minipool', minipools):>14}: " \ - f"{nodes:>4} {p.plural('node', nodes)}\n" + description += ( + f"{p.no('minipool', minipools):>14}: {nodes:>4} {p.plural('node', nodes)}\n" + ) description += "```" e.description = description await interaction.followup.send(embed=e) @@ -47,32 +48,19 @@ async def get_minipool_counts_per_node(self): # 3 nodes have 3 minipools pipeline = [ { - '$match': { - 'beacon.status': { - '$not': re.compile(r"(?:withdraw|exit|init)") - }, - 'status': 'staking' + "$match": { + "beacon.status": {"$not": re.compile(r"(?:withdraw|exit|init)")}, + "status": "staking", } - }, { - '$group': { - '_id': '$node_operator', - 'count': { - '$sum': 1 - } - } - }, { - '$sort': { - 'count': 1 - } - } + }, + {"$group": {"_id": "$node_operator", "count": {"$sum": 1}}}, + {"$sort": {"count": 1}}, ] return [x["count"] async for x in self.bot.db.minipools.aggregate(pipeline)] @command() @describe(raw="Show the raw Distribution Data") - async def minipool_distribution(self, - interaction: Interaction, - raw: bool = False): + async def minipool_distribution(self, interaction: Interaction, raw: bool = False): """Show the distribution of minipools per node.""" await interaction.response.defer(ephemeral=is_hidden(interaction)) e = Embed() @@ -101,12 +89,12 @@ async def minipool_distribution(self, ax.bar_label(rects, rotation=90, padding=3, fontsize=7) ax.set_ylabel("Total Minipools") # tilt the x axis labels - ax.tick_params(axis='x', labelrotation=90, labelsize=7) + ax.tick_params(axis="x", labelrotation=90, labelsize=7) # Add a 5% buffer to the ylim to help fit all the bar labels ax.set_ylim(top=(ax.get_ylim()[1] * 1.1)) fig.tight_layout() - fig.savefig(img, format='png') + fig.savefig(img, format="png") img.seek(0) fig.clear() @@ -115,8 +103,11 @@ async def minipool_distribution(self, e.title = "Minipool Distribution" e.set_image(url="attachment://graph.png") f = File(img, filename="graph.png") - percentile_strings = [f"{x[0]}th percentile: {p.no('minipool', int(x[1]))} per node" for x in - get_percentiles([50, 75, 90, 99], counts) if x[1]] + percentile_strings = [ + f"{x[0]}th percentile: {p.no('minipool', int(x[1]))} per node" + for x in get_percentiles([50, 75, 90, 99], counts) + if x[1] + ] percentile_strings.append(f"Max: {distribution[-1][0]} minipools per node") percentile_strings.append(f"Total: {p.no('minipool', sum(counts))}") e.set_footer(text="\n".join(percentile_strings)) @@ -144,7 +135,10 @@ async def node_gini(self, interaction: Interaction, raw: bool = False): # calculate gini coefficient from sorted list counts_nz = minipool_counts[minipool_counts != 0] n_nz = counts_nz.size - gini = -(((2 * np.arange(1, n_nz + 1) - n_nz - 1) * counts_nz).sum() / (n_nz * counts_nz.sum())) + gini = -( + ((2 * np.arange(1, n_nz + 1) - n_nz - 1) * counts_nz).sum() + / (n_nz * counts_nz.sum()) + ) e.set_footer(text=f"Gini coefficient: {gini:.4f}") @@ -179,7 +173,7 @@ def draw_threshold(threshold: float, color: str) -> None: x_pos = x[index] percentage = round(100 * threshold) x_ticks.append(x_pos) - ax.axvline(x=x_pos, linestyle='--', c=color, label=f'{percentage}%') + ax.axvline(x=x_pos, linestyle="--", c=color, label=f"{percentage}%") draw_threshold(1 / 3, "tab:green") draw_threshold(0.5, "tab:olive") diff --git a/rocketwatch/plugins/pinned_messages/pinned_messages.py b/rocketwatch/plugins/pinned_messages/pinned_messages.py index 3857c932..205037a8 100644 --- a/rocketwatch/plugins/pinned_messages/pinned_messages.py +++ b/rocketwatch/plugins/pinned_messages/pinned_messages.py @@ -32,8 +32,13 @@ async def run_loop(self): messages = await self.bot.db.pinned_messages.find().to_list() for message in messages: # if it's older than 6 hours and not disabled, mark as disabled - if message["created_at"] + timedelta(hours=6) < datetime.utcnow() and not message["disabled"]: - await self.bot.db.pinned_messages.update_one({"_id": message["_id"]}, {"$set": {"disabled": True}}) + if ( + message["created_at"] + timedelta(hours=6) < datetime.utcnow() + and not message["disabled"] + ): + await self.bot.db.pinned_messages.update_one( + {"_id": message["_id"]}, {"$set": {"disabled": True}} + ) message["disabled"] = True try: # check if it's marked as disabled but not cleaned_up @@ -45,13 +50,17 @@ async def run_loop(self): # delete message await msg.delete() # mark as cleaned_up - await self.bot.db.pinned_messages.update_one({"_id": message["_id"]}, {"$set": {"cleaned_up": True}}) + await self.bot.db.pinned_messages.update_one( + {"_id": message["_id"]}, {"$set": {"cleaned_up": True}} + ) elif not message["disabled"]: # delete and resend message channel = self.bot.get_channel(message["channel_id"]) # check if we have message sent already and if its the latest message in the channel if message.get("message_id"): - messages = [message async for message in channel.history(limit=5)] + messages = [ + message async for message in channel.history(limit=5) + ] # if it isnt within the last 5 messages, we need to resend it if any(m.id == message["message_id"] for m in messages): continue @@ -64,16 +73,21 @@ async def run_loop(self): text=( "This message has been pinned by Invis." " Will be automatically removed if not updated within 6 hours." - )) + ) + ) m = await channel.send(embed=e) - await self.bot.db.pinned_messages.update_one({"_id": message["_id"]}, {"$set": {"message_id": m.id}}) + await self.bot.db.pinned_messages.update_one( + {"_id": message["_id"]}, {"$set": {"message_id": m.id}} + ) except Exception as err: await self.bot.report_error(err) @command() @guilds(cfg.discord.owner.server_id) @is_owner() - async def pin(self, interaction: Interaction, channel_id: int, title: str, description: str): + async def pin( + self, interaction: Interaction, channel_id: int, title: str, description: str + ): await interaction.response.defer() # check if channel exists channel = self.bot.get_channel(channel_id) @@ -84,16 +98,34 @@ async def pin(self, interaction: Interaction, channel_id: int, title: str, descr message = await self.bot.db.pinned_messages.find_one({"channel_id": channel.id}) if message: # update message - await self.bot.db.pinned_messages.update_one({"_id": message["_id"]}, { - "$set": {"title" : title, "content": description, "disabled": False, "cleaned_up": False, - "message_id": None, "created_at": datetime.utcnow()}}) + await self.bot.db.pinned_messages.update_one( + {"_id": message["_id"]}, + { + "$set": { + "title": title, + "content": description, + "disabled": False, + "cleaned_up": False, + "message_id": None, + "created_at": datetime.utcnow(), + } + }, + ) # rest is done by the run_loop await interaction.followup.send("Updated pinned message") return # create new message await self.bot.db.pinned_messages.insert_one( - {"channel_id": channel.id, "message_id": None, "title": title, "content": description, "disabled": False, - "cleaned_up": False, "created_at": datetime.utcnow()}) + { + "channel_id": channel.id, + "message_id": None, + "title": title, + "content": description, + "disabled": False, + "cleaned_up": False, + "created_at": datetime.utcnow(), + } + ) # rest is done by the run_loop await interaction.followup.send("Created pinned message") @@ -117,7 +149,9 @@ async def unpin(self, interaction: Interaction, channel_id: str): await interaction.followup.send("Pinned message already disabled") return # soft delete - await self.bot.db.pinned_messages.update_one({"_id": message["_id"]}, {"$set": {"disabled": True}}) + await self.bot.db.pinned_messages.update_one( + {"_id": message["_id"]}, {"$set": {"disabled": True}} + ) # rest is done by the run_loop await interaction.followup.send("Disabled pinned message") diff --git a/rocketwatch/plugins/proposals/proposals.py b/rocketwatch/plugins/proposals/proposals.py index 7cc807e4..1140c8b0 100644 --- a/rocketwatch/plugins/proposals/proposals.py +++ b/rocketwatch/plugins/proposals/proposals.py @@ -31,52 +31,56 @@ "P": "Prysm", "L": "Lighthouse", "T": "Teku", - "S": "Lodestar" + "S": "Lodestar", }, "execution": { "G": "Geth", "B": "Besu", "N": "Nethermind", "R": "Reth", - "X": "External" - } + "X": "External", + }, } COLORS = { - "Nimbus" : "#CC9133", - "Prysm" : "#40BFBF", - "Lighthouse" : "#9933CC", - "Teku" : "#3357CC", - "Lodestar" : "#FB5B9D", - - "Geth" : "#40BFBF", - "Besu" : "#55AA7A", - "Nethermind" : "#2688D9", - "Reth" : "#760910", - "External" : "#808080", - - "Smart Node" : "#CC6E33", - "Allnodes" : "#4533cc", + "Nimbus": "#CC9133", + "Prysm": "#40BFBF", + "Lighthouse": "#9933CC", + "Teku": "#3357CC", + "Lodestar": "#FB5B9D", + "Geth": "#40BFBF", + "Besu": "#55AA7A", + "Nethermind": "#2688D9", + "Reth": "#760910", + "External": "#808080", + "Smart Node": "#CC6E33", + "Allnodes": "#4533cc", "No proposals yet": "#E0E0E0", - "Unknown" : "#AAAAAA", + "Unknown": "#AAAAAA", } PROPOSAL_TEMPLATE = { - "type" : "Unknown", + "type": "Unknown", "consensus_client": "Unknown", "execution_client": "Unknown", } # noinspection RegExpUnnecessaryNonCapturingGroup -SMARTNODE_REGEX = re.compile(r"^RP(?:(?:-)([A-Z])([A-Z])?)? (?:v)?(\d+\.\d+\.\d+(?:-\w+)?)(?:(?:(?: \()|(?: gw:))(.+)(?:\)))?") +SMARTNODE_REGEX = re.compile( + r"^RP(?:(?:-)([A-Z])([A-Z])?)? (?:v)?(\d+\.\d+\.\d+(?:-\w+)?)(?:(?:(?: \()|(?: gw:))(.+)(?:\)))?" +) def parse_proposal(beacon_block: dict) -> dict: - graffiti = bytes.fromhex(beacon_block["body"]["graffiti"][2:]).decode("utf-8").rstrip('\x00') + graffiti = ( + bytes.fromhex(beacon_block["body"]["graffiti"][2:]) + .decode("utf-8") + .rstrip("\x00") + ) data = { - "slot" : int(beacon_block["slot"]), + "slot": int(beacon_block["slot"]), "validator": int(beacon_block["proposer_index"]), - "graffiti" : graffiti, + "graffiti": graffiti, } | PROPOSAL_TEMPLATE if m := SMARTNODE_REGEX.findall(graffiti): groups = m[0] @@ -141,7 +145,9 @@ async def check_indexes(self): try: await self.bot.db.proposals.create_index("validator") await self.bot.db.proposals.create_index("slot", unique=True) - await self.bot.db.proposals.create_index([("validator", ASCENDING), ("slot", DESCENDING)]) + await self.bot.db.proposals.create_index( + [("validator", ASCENDING), ("slot", DESCENDING)] + ) except Exception as e: log.warning(f"Could not create indexes: {e}") @@ -151,15 +157,25 @@ async def fetch_proposals(self): else: last_checked_slot = 4700012 # last slot before merge - latest_slot = int((await bacon.get_block_header("finalized"))["data"]["header"]["message"]["slot"]) - for slots in as_chunks(range(last_checked_slot + 1, latest_slot + 1), self.batch_size): + latest_slot = int( + (await bacon.get_block_header("finalized"))["data"]["header"]["message"][ + "slot" + ] + ) + for slots in as_chunks( + range(last_checked_slot + 1, latest_slot + 1), self.batch_size + ): log.info(f"Fetching proposals for slots {slots[0]} to {slots[-1]}") await asyncio.gather(*[self.fetch_proposal(s) for s in slots]) - await self.bot.db.last_checked_block.replace_one({"_id": cog_id}, {"_id": cog_id, "slot": slots[-1]}, upsert=True) + await self.bot.db.last_checked_block.replace_one( + {"_id": cog_id}, {"_id": cog_id, "slot": slots[-1]}, upsert=True + ) async def fetch_proposal(self, slot: int) -> None: try: - beacon_header = (await bacon.get_block_header(str(slot)))["data"]["header"]["message"] + beacon_header = (await bacon.get_block_header(str(slot)))["data"]["header"][ + "message" + ] except ClientResponseError as e: if e.status == 404: return None @@ -169,119 +185,117 @@ async def fetch_proposal(self, slot: int) -> None: validator_index = int(beacon_header["proposer_index"]) query = {"validator_index": validator_index} is_megapool = await self.bot.db.minipools.count_documents(query, limit=1) - is_minipool = await self.bot.db.megapool_validators.count_documents(query, limit=1) + is_minipool = await self.bot.db.megapool_validators.count_documents( + query, limit=1 + ) if not (is_minipool or is_megapool): return None beacon_block = (await bacon.get_block(str(slot)))["data"]["message"] proposal_data = parse_proposal(beacon_block) - await self.bot.db.proposals.update_one({"slot": slot}, {"$set": proposal_data}, upsert=True) + await self.bot.db.proposals.update_one( + {"slot": slot}, {"$set": proposal_data}, upsert=True + ) async def create_latest_proposal_view(self): log.info("creating latest proposals view") pipeline = [ { - '$match': { - 'node_operator': {'$ne': None}, - 'beacon.status' : 'active_ongoing' + "$match": { + "node_operator": {"$ne": None}, + "beacon.status": "active_ongoing", } }, { - '$unionWith': { - 'coll': 'minipools', - 'pipeline': [ + "$unionWith": { + "coll": "minipools", + "pipeline": [ { - '$match': { - 'node_operator': {'$ne': None}, - 'beacon.status' : 'active_ongoing' + "$match": { + "node_operator": {"$ne": None}, + "beacon.status": "active_ongoing", } } - ] + ], } }, { - '$lookup': { - 'from' : 'proposals', - 'localField' : 'validator_index', - 'foreignField': 'validator', - 'as' : 'proposals', - 'pipeline' : [ - {'$sort': {'slot': -1}}, - {'$limit': 1} - ] + "$lookup": { + "from": "proposals", + "localField": "validator_index", + "foreignField": "validator", + "as": "proposals", + "pipeline": [{"$sort": {"slot": -1}}, {"$limit": 1}], } }, + {"$unwind": {"path": "$proposals", "preserveNullAndEmptyArrays": True}}, { - '$unwind': { - 'path': '$proposals', - 'preserveNullAndEmptyArrays': True + "$group": { + "_id": "$node_operator", + "validator_count": {"$sum": 1}, + "latest_proposal": {"$first": "$proposals"}, } }, + {"$match": {"latest_proposal": {"$ne": None}}}, { - '$group': { - '_id' : '$node_operator', - 'validator_count': {'$sum': 1}, - 'latest_proposal': {'$first': '$proposals'} + "$project": { + "_id": "$_id", + "node_operator": "$_id", + "validator_count": 1, + "latest_proposal": 1, } }, - { - '$match': {'latest_proposal': {'$ne': None}} - }, - { - '$project': { - '_id': '$_id', - 'node_operator': '$_id', - 'validator_count': 1, - 'latest_proposal': 1 - } - } ] await self.bot.db.latest_proposals.drop() - await self.bot.db.create_collection("latest_proposals", viewOn="megapool_validators", pipeline=pipeline) + await self.bot.db.create_collection( + "latest_proposals", viewOn="megapool_validators", pipeline=pipeline + ) @timerun_async async def gather_attribute(self, attribute, remove_allnodes=False): # Build the match stage to filter out Allnodes if needed match_stage = {} if remove_allnodes: - match_stage['$match'] = {'latest_proposal.type': {'$ne': 'Allnodes'}} + match_stage["$match"] = {"latest_proposal.type": {"$ne": "Allnodes"}} pipeline = [ { - '$project': { - 'attribute' : f'$latest_proposal.{attribute}', - 'type' : '$latest_proposal.type', - 'validator_count': 1 + "$project": { + "attribute": f"$latest_proposal.{attribute}", + "type": "$latest_proposal.type", + "validator_count": 1, } }, { - '$group': { - '_id' : {'attribute': '$attribute', 'type': '$type'}, - 'count' : {'$sum': 1}, - 'validator_count': {'$sum': '$validator_count'} + "$group": { + "_id": {"attribute": "$attribute", "type": "$type"}, + "count": {"$sum": 1}, + "validator_count": {"$sum": "$validator_count"}, } - } + }, ] # Add match stage at the beginning if filtering Allnodes if remove_allnodes: pipeline.insert(0, match_stage) - distribution = await (await self.bot.db.latest_proposals.aggregate(pipeline)).to_list() + distribution = await ( + await self.bot.db.latest_proposals.aggregate(pipeline) + ).to_list() if remove_allnodes: - d = {'remove_from_total': {'count': 0, 'validator_count': 0}} + d = {"remove_from_total": {"count": 0, "validator_count": 0}} for entry in distribution: - d[entry['_id']['attribute']] = entry + d[entry["_id"]["attribute"]] = entry return d else: # Convert nested _id structure and merge by attribute d = {} for entry in distribution: - key = entry['_id']['attribute'] + key = entry["_id"]["attribute"] if key in d: - d[key]['count'] += entry['count'] - d[key]['validator_count'] += entry['validator_count'] + d[key]["count"] += entry["count"] + d[key]["validator_count"] += entry["validator_count"] else: d[key] = entry return d @@ -303,35 +317,38 @@ async def version_chart(self, interaction: Interaction, days: int = 90): ) # get proposals # limit to specified number of days - proposals = await self.bot.db.proposals.find( - { - "version": {"$exists": 1}, - "slot" : {"$gt": date_to_beacon_block((datetime.now() - timedelta(days=days)).timestamp())} - }).sort("slot", 1).to_list(None) + proposals = ( + await self.bot.db.proposals.find( + { + "version": {"$exists": 1}, + "slot": { + "$gt": date_to_beacon_block( + (datetime.now() - timedelta(days=days)).timestamp() + ) + }, + } + ) + .sort("slot", 1) + .to_list(None) + ) max_slot = proposals[-1]["slot"] # get versions used after max_slot - window start_slot = max_slot - int(5 * 60 * 24 * window_length) - recent_versions = await (await self.bot.db.proposals.aggregate([ - { - '$match': { - 'slot' : { - '$gte': start_slot + recent_versions = await ( + await self.bot.db.proposals.aggregate( + [ + { + "$match": { + "slot": {"$gte": start_slot}, + "version": {"$exists": 1}, + } }, - 'version': { - '$exists': 1 - } - } - }, { - '$group': { - '_id' : '$version' - } - }, { - '$sort': { - '_id': -1 - } - } - ])).to_list() - recent_versions = [v['_id'] for v in recent_versions] + {"$group": {"_id": "$version"}}, + {"$sort": {"_id": -1}}, + ] + ) + ).to_list() + recent_versions = [v["_id"] for v in recent_versions] data = {} versions = [] proposal_buffer = [] @@ -363,7 +380,10 @@ async def version_chart(self, interaction: Interaction, days: int = 90): # generate enough distinct colors for all recent versions cmap = plt.cm.tab20 - recent_colors = [cmap(i / max(len(recent_versions) - 1, 1)) for i in range(len(recent_versions))] + recent_colors = [ + cmap(i / max(len(recent_versions) - 1, 1)) + for i in range(len(recent_versions)) + ] # generate color mapping colors = ["darkgray"] * len(versions) for i, version in enumerate(versions): @@ -372,12 +392,17 @@ async def version_chart(self, interaction: Interaction, days: int = 90): last_slot_data = data[max(x)] last_slot_data = {v: last_slot_data[v] for v in recent_versions} - labels = [f"{v} ({last_slot_data[v]:.2%})" if v in recent_versions else "_nolegend_" for v in versions] + labels = [ + f"{v} ({last_slot_data[v]:.2%})" if v in recent_versions else "_nolegend_" + for v in versions + ] # add percentage to labels ax = plt.subplot(111, frameon=False) plt.stackplot(x, *y.values(), labels=labels, colors=colors) # hide y axis - plt.tick_params(axis='y', which='both', left=False, right=False, labelleft=False) + plt.tick_params( + axis="y", which="both", left=False, right=False, labelleft=False + ) plt.gcf().autofmt_xdate() handles, legend_labels = ax.get_legend_handles_labels() ax.legend(reversed(handles), reversed(legend_labels), loc="upper left") @@ -400,11 +425,17 @@ async def version_chart(self, interaction: Interaction, days: int = 90): await interaction.followup.send(embed=e, file=File(img, filename="chart.png")) img.close() - async def plot_axes_with_data(self, attr: str, ax1, ax2, remove_allnodes: bool = False): + async def plot_axes_with_data( + self, attr: str, ax1, ax2, remove_allnodes: bool = False + ): # group by client and get count data = await self.gather_attribute(attr, remove_allnodes) - minipools = [(x, y["validator_count"]) for x, y in data.items() if x != "remove_from_total"] + minipools = [ + (x, y["validator_count"]) + for x, y in data.items() + if x != "remove_from_total" + ] minipools = sorted(minipools, key=lambda x: x[1]) # get total minipool count from rocketpool @@ -416,38 +447,64 @@ async def plot_axes_with_data(self, attr: str, ax1, ax2, remove_allnodes: bool = unobserved_minipools -= data["remove_from_total"]["validator_count"] minipools.insert(0, ("No proposals yet", unobserved_minipools)) # move "Unknown" to be before "No proposals yet" - minipools.insert(1, minipools.pop(next(i for i, (x, y) in enumerate(minipools) if x == "Unknown"))) + minipools.insert( + 1, + minipools.pop( + next(i for i, (x, y) in enumerate(minipools) if x == "Unknown") + ), + ) # move "External (if it exists) to be before "Unknown" # minipools is a list of tuples (name, count) if "External" in [x for x, y in minipools]: - minipools.insert(2, minipools.pop(next(i for i, (x, y) in enumerate(minipools) if x == "External"))) + minipools.insert( + 2, + minipools.pop( + next(i for i, (x, y) in enumerate(minipools) if x == "External") + ), + ) # get node operators - node_operators = [(x, y["count"]) for x, y in data.items() if x != "remove_from_total"] + node_operators = [ + (x, y["count"]) for x, y in data.items() if x != "remove_from_total" + ] node_operators = sorted(node_operators, key=lambda x: x[1]) # get total node operator count from rp distinct_nos = await self.bot.db.minipools.find( {"beacon.status": "active_ongoing", "status": "staking"} ).distinct("node_operator") - unobserved_node_operators = len(distinct_nos) - sum(d[1] for d in node_operators) + unobserved_node_operators = len(distinct_nos) - sum( + d[1] for d in node_operators + ) if "remove_from_total" in data: unobserved_node_operators -= data["remove_from_total"]["count"] node_operators.insert(0, ("No proposals yet", unobserved_node_operators)) # move "Unknown" to be before "No proposals yet" - node_operators.insert(1, node_operators.pop(next(i for i, (x, y) in enumerate(node_operators) if x == "Unknown"))) + node_operators.insert( + 1, + node_operators.pop( + next(i for i, (x, y) in enumerate(node_operators) if x == "Unknown") + ), + ) # move "External (if it exists) to be before "Unknown" # node_operators is a list of tuples (name, count) if "External" in [x for x, y in node_operators]: - node_operators.insert(2, node_operators.pop(next(i for i, (x, y) in enumerate(node_operators) if x == "External"))) + node_operators.insert( + 2, + node_operators.pop( + next( + i for i, (x, y) in enumerate(node_operators) if x == "External" + ) + ), + ) # sort data ax1.pie( [x[1] for x in minipools], colors=[COLORS.get(x[0], "red") for x in minipools], - autopct=lambda pct: (f'{pct:.1f}%') if pct > 5 else '', + autopct=lambda pct: (f"{pct:.1f}%") if pct > 5 else "", startangle=90, - textprops={'fontsize': '12'}, + textprops={"fontsize": "12"}, ) # legend total_minipols = sum(x[1] for x in minipools) @@ -456,28 +513,33 @@ async def plot_axes_with_data(self, attr: str, ax1, ax2, remove_allnodes: bool = [f"{x[1]} {x[0]} ({x[1] / total_minipols:.2%})" for x in minipools], loc="lower left", bbox_to_anchor=(0, -0.1), - fontsize=11 + fontsize=11, ) ax1.set_title("Minipools", fontsize=22) ax2.pie( [x[1] for x in node_operators], colors=[COLORS.get(x[0], "red") for x in node_operators], - autopct=lambda pct: (f'{pct:.1f}%') if pct > 5 else '', + autopct=lambda pct: (f"{pct:.1f}%") if pct > 5 else "", startangle=90, - textprops={'fontsize': '12'}, + textprops={"fontsize": "12"}, ) # legend total_node_operators = sum(x[1] for x in node_operators) ax2.legend( - [f"{x[1]} {x[0]} ({x[1] / total_node_operators:.2%})" for x in node_operators], + [ + f"{x[1]} {x[0]} ({x[1] / total_node_operators:.2%})" + for x in node_operators + ], loc="lower left", bbox_to_anchor=(0, -0.1), - fontsize=11 + fontsize=11, ) ax2.set_title("Node Operators", fontsize=22) - async def proposal_vs_node_operators_embed(self, attribute, name, remove_allnodes: bool = False): + async def proposal_vs_node_operators_embed( + self, attribute, name, remove_allnodes: bool = False + ): fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(12, 8)) # iterate axes in pairs title = f"Rocket Pool {name} Distribution {'without Allnodes' if remove_allnodes else ''}" @@ -502,14 +564,21 @@ async def proposal_vs_node_operators_embed(self, attribute, name, remove_allnode return e, f @command() - async def client_distribution(self, interaction: Interaction, remove_allnodes: bool = False): + async def client_distribution( + self, interaction: Interaction, remove_allnodes: bool = False + ): """ Generate a distribution graph of clients. """ await interaction.response.defer(ephemeral=is_hidden(interaction)) embeds, files = [], [] - for attr, name in [["consensus_client", "Consensus Client"], ["execution_client", "Execution Client"]]: - e, f = await self.proposal_vs_node_operators_embed(attr, name, remove_allnodes) + for attr, name in [ + ["consensus_client", "Consensus Client"], + ["execution_client", "Execution Client"], + ]: + e, f = await self.proposal_vs_node_operators_embed( + attr, name, remove_allnodes + ) embeds.append(e) files.append(f) await interaction.followup.send(embeds=embeds, files=files) @@ -525,45 +594,55 @@ async def operator_type_distribution(self, interaction: Interaction): @command() async def client_combo_ranking( - self, interaction: Interaction, remove_allnodes: bool = False, group_by_node_operators: bool = False + self, + interaction: Interaction, + remove_allnodes: bool = False, + group_by_node_operators: bool = False, ): """ Generate a ranking of most used execution and consensus clients. """ await interaction.response.defer(ephemeral=is_hidden(interaction)) # aggregate [consensus, execution] pair counts - client_pairs = await (await self.bot.db.latest_proposals.aggregate([ - { - "$match": { - "latest_proposal.consensus_client": {"$ne": "Unknown"}, - "latest_proposal.execution_client": {"$ne": "Unknown"}, - "latest_proposal.type" : {"$ne": "Allnodes"} if remove_allnodes else {"$ne": "deadbeef"} - } - }, { - "$group": { - "_id" : { - "consensus": "$latest_proposal.consensus_client", - "execution": "$latest_proposal.execution_client" + client_pairs = await ( + await self.bot.db.latest_proposals.aggregate( + [ + { + "$match": { + "latest_proposal.consensus_client": {"$ne": "Unknown"}, + "latest_proposal.execution_client": {"$ne": "Unknown"}, + "latest_proposal.type": {"$ne": "Allnodes"} + if remove_allnodes + else {"$ne": "deadbeef"}, + } }, - "count": { - "$sum": 1 if group_by_node_operators else "$validator_count" - } - } - }, - { - "$sort": { - "count": -1 - } - } - ])).to_list() + { + "$group": { + "_id": { + "consensus": "$latest_proposal.consensus_client", + "execution": "$latest_proposal.execution_client", + }, + "count": { + "$sum": 1 + if group_by_node_operators + else "$validator_count" + }, + } + }, + {"$sort": {"count": -1}}, + ] + ) + ).to_list() - e = Embed(title=f"Client Combo Ranking{' without Allnodes' if remove_allnodes else ''}") + e = Embed( + title=f"Client Combo Ranking{' without Allnodes' if remove_allnodes else ''}" + ) # generate max width of both columns max_widths = [ - max(len(x['_id']['consensus']) for x in client_pairs), - max(len(x['_id']['execution']) for x in client_pairs), - max(len(str(x['count'])) for x in client_pairs) + max(len(x["_id"]["consensus"]) for x in client_pairs), + max(len(x["_id"]["execution"]) for x in client_pairs), + max(len(str(x["count"])) for x in client_pairs), ] desc = "".join( diff --git a/rocketwatch/plugins/queue/queue.py b/rocketwatch/plugins/queue/queue.py index 188ef0e7..d7ffa8bb 100644 --- a/rocketwatch/plugins/queue/queue.py +++ b/rocketwatch/plugins/queue/queue.py @@ -53,14 +53,18 @@ async def _load_content(self, from_idx: int, to_idx: int) -> tuple[int, str]: @staticmethod @cached(key_builder=lambda _, address, prefix="": (address, prefix)) async def _cached_el_url(address, prefix="") -> str: - return await el_explorer_url(address, name_fmt=lambda n: f"`{n}`", prefix=prefix) + return await el_explorer_url( + address, name_fmt=lambda n: f"`{n}`", prefix=prefix + ) @staticmethod async def _megapool_to_node(megapool_address) -> ChecksumAddress: - return await rp.call("rocketMegapoolDelegate.getNodeAddress", address=megapool_address) + return await rp.call( + "rocketMegapoolDelegate.getNodeAddress", address=megapool_address + ) @staticmethod - async def __format_queue_entry(entry: 'Queue.Entry') -> str: + async def __format_queue_entry(entry: "Queue.Entry") -> str: node_address = await Queue._megapool_to_node(entry.megapool) node_label = await Queue._cached_el_url(node_address) return f"{node_label} #`{entry.validator_id}`" @@ -76,9 +80,13 @@ async def get_express_queue(limit: int, start: int = 0) -> tuple[int, str]: return await Queue._get_queue("deposit.queue.express", limit, start) @staticmethod - async def _scan_list(namespace: bytes, start: int, limit: int, block_identifier: BlockIdentifier) -> list['Queue.Entry']: + async def _scan_list( + namespace: bytes, start: int, limit: int, block_identifier: BlockIdentifier + ) -> list["Queue.Entry"]: list_contract = await rp.get_contract_by_name("linkedListStorage") - raw_entries, _ = await list_contract.functions.scan(namespace, 0, start + limit).call(block_identifier=block_identifier) + raw_entries, _ = await list_contract.functions.scan( + namespace, 0, start + limit + ).call(block_identifier=block_identifier) return [Queue.Entry(*entry) for entry in raw_entries][start:] @staticmethod @@ -91,27 +99,32 @@ async def _get_queue(namespace: str, limit: int, start: int = 0) -> tuple[int, s start = max(start, 0) latest_block = await w3.eth.get_block_number() - q_len = await list_contract.functions.getLength(queue_namespace).call(block_identifier=latest_block) + q_len = await list_contract.functions.getLength(queue_namespace).call( + block_identifier=latest_block + ) if start >= q_len: return q_len, "" - queue_entries = await Queue._scan_list(queue_namespace, start, limit, latest_block) + queue_entries = await Queue._scan_list( + queue_namespace, start, limit, latest_block + ) content = "" for i, entry in enumerate(queue_entries): entry_str = await Queue.__format_queue_entry(entry) - content += f"{start+i+1}. {entry_str}\n" + content += f"{start + i + 1}. {entry_str}\n" return q_len, content @staticmethod def _get_entries_used_in_interval( - start: int, end: int, len_express: int, len_standard: int, express_rate: int + start: int, end: int, len_express: int, len_standard: int, express_rate: int ) -> tuple[int, int]: log.debug( f"Calculating entries used in interval [{start}, {end}] with express_rate {express_rate}" - f" and queue lengths {len_express} (express) and {len_standard} (standard)") + f" and queue lengths {len_express} (express) and {len_standard} (standard)" + ) total_entries = end - start + 1 # end is inclusive num_standard = total_entries // (express_rate + 1) @@ -134,15 +147,25 @@ async def get_combined_queue(limit: int, start: int = 0) -> tuple[int, str]: """Get the next {limit} validators in the combined queue (express + standard)""" latest_block = await w3.eth.get_block_number() - express_queue_rate = await rp.call("rocketDAOProtocolSettingsDeposit.getExpressQueueRate", block=latest_block) - queue_index = await rp.call("rocketDepositPool.getQueueIndex", block=latest_block) + express_queue_rate = await rp.call( + "rocketDAOProtocolSettingsDeposit.getExpressQueueRate", block=latest_block + ) + queue_index = await rp.call( + "rocketDepositPool.getQueueIndex", block=latest_block + ) list_contract = await rp.get_contract_by_name("linkedListStorage") exp_namespace = bytes(w3.solidity_keccak(["string"], ["deposit.queue.express"])) - std_namespace = bytes(w3.solidity_keccak(["string"], ["deposit.queue.standard"])) + std_namespace = bytes( + w3.solidity_keccak(["string"], ["deposit.queue.standard"]) + ) - express_queue_length = await list_contract.functions.getLength(exp_namespace).call(block_identifier=latest_block) - standard_queue_length = await list_contract.functions.getLength(std_namespace).call(block_identifier=latest_block) + express_queue_length = await list_contract.functions.getLength( + exp_namespace + ).call(block_identifier=latest_block) + standard_queue_length = await list_contract.functions.getLength( + std_namespace + ).call(block_identifier=latest_block) q_len = express_queue_length + standard_queue_length if start >= q_len: @@ -152,7 +175,8 @@ async def get_combined_queue(limit: int, start: int = 0) -> tuple[int, str]: queue_index, queue_index + start - 1, express_queue_length, - standard_queue_length, express_queue_rate + standard_queue_length, + express_queue_rate, ) log.debug(f"{start_express_queue = }") log.debug(f"{start_standard_queue = }") @@ -161,22 +185,28 @@ async def get_combined_queue(limit: int, start: int = 0) -> tuple[int, str]: queue_index + start + limit - 1, express_queue_length - start_express_queue, standard_queue_length - start_standard_queue, - express_queue_rate + express_queue_rate, ) log.debug(f"{limit_express_queue = }") log.debug(f"{limit_standard_queue = }") express_entries_rev = ( - await Queue._scan_list(exp_namespace, start_express_queue, limit_express_queue, latest_block) + await Queue._scan_list( + exp_namespace, start_express_queue, limit_express_queue, latest_block + ) )[::-1] standard_entries_rev = ( - await Queue._scan_list(std_namespace, start_standard_queue, limit_standard_queue, latest_block) + await Queue._scan_list( + std_namespace, start_standard_queue, limit_standard_queue, latest_block + ) )[::-1] content = "" for i in range(len(express_entries_rev) + len(standard_entries_rev)): effective_queue_index = queue_index + start + i - is_express = (effective_queue_index % (express_queue_rate + 1)) != express_queue_rate + is_express = ( + effective_queue_index % (express_queue_rate + 1) + ) != express_queue_rate if (is_express and express_entries_rev) or (not standard_entries_rev): entry = express_entries_rev.pop() lane_pos = "🐇" @@ -192,7 +222,11 @@ async def get_combined_queue(limit: int, start: int = 0) -> tuple[int, str]: @command() @describe(lane="type of queue to display") - async def queue(self, interaction: Interaction, lane: Literal["combined", "standard", "express"] = "combined"): + async def queue( + self, + interaction: Interaction, + lane: Literal["combined", "standard", "express"] = "combined", + ): """Show the RP validator queue""" await interaction.response.defer(ephemeral=is_hidden(interaction)) view = Queue.ValidatorPageView(lane) diff --git a/rocketwatch/plugins/random/random.py b/rocketwatch/plugins/random/random.py index 6b3ed5cb..02d45963 100644 --- a/rocketwatch/plugins/random/random.py +++ b/rocketwatch/plugins/random/random.py @@ -62,7 +62,9 @@ async def burn_reason(self, interaction: Interaction): data = await resp.json() e = Embed() - e.set_author(name="🔗 Data from ultrasound.money", url="https://ultrasound.money") + e.set_author( + name="🔗 Data from ultrasound.money", url="https://ultrasound.money" + ) description = "**ETH Burned:**\n```" feesburned = data["feesBurned"] for span in ["5m", "1h", "24h"]: @@ -92,7 +94,7 @@ async def burn_reason(self, interaction: Interaction): e.add_field( name="Current Base Fee", - value=f"`{solidity.to_float(data['latestBlockFees'][0]['baseFeePerGas'], 9):,.2f} GWEI`" + value=f"`{solidity.to_float(data['latestBlockFees'][0]['baseFeePerGas'], 9):,.2f} GWEI`", ) e.description = description await interaction.followup.send(embed=e) @@ -111,18 +113,28 @@ async def dev_time(self, interaction: Interaction): uint_day = int(percentage_of_day * 65535) # generate binary string binary_day = f"{uint_day:016b}" - e.add_field(name="Coordinated Universal Time", - value=f"{dev_time.strftime(time_format)}\n" - f"`{binary_day} (0x{uint_day:04x})`") - head_slot = int((await bacon.get_block_header("head"))["data"]["header"]["message"]["slot"]) + e.add_field( + name="Coordinated Universal Time", + value=f"{dev_time.strftime(time_format)}\n" + f"`{binary_day} (0x{uint_day:04x})`", + ) + head_slot = int( + (await bacon.get_block_header("head"))["data"]["header"]["message"]["slot"] + ) b = solidity.slot_to_beacon_day_epoch_slot(head_slot) e.add_field(name="Beacon Time", value=f"Day {b[0]}, {b[1]}:{b[2]}") dev_time = datetime.now(tz=pytz.timezone("Australia/Lindeman")) - e.add_field(name="Most of the core team", value=dev_time.strftime(time_format), inline=False) + e.add_field( + name="Most of the core team", + value=dev_time.strftime(time_format), + inline=False, + ) fornax_time = datetime.now(tz=pytz.timezone("America/Sao_Paulo")) - e.add_field(name="Fornax", value=fornax_time.strftime(time_format), inline=False) + e.add_field( + name="Fornax", value=fornax_time.strftime(time_format), inline=False + ) e.add_field(name="Mav", value="Who even knows", inline=False) await interaction.response.send_message(embed=e) @@ -146,17 +158,32 @@ async def sea_creatures(self, interaction: Interaction, address: str | None = No e.description = f"No sea creature for {address}" else: # get the required holding from the dictionary - required_holding = next(h for h, c in sea_creatures.items() if c == creature[0]) - e.add_field(name="Visualization", value=await el_explorer_url(address, prefix=creature), inline=False) - e.add_field(name="Required holding for emoji", value=f"{required_holding * len(creature)} ETH", inline=False) + required_holding = next( + h for h, c in sea_creatures.items() if c == creature[0] + ) + e.add_field( + name="Visualization", + value=await el_explorer_url(address, prefix=creature), + inline=False, + ) + e.add_field( + name="Required holding for emoji", + value=f"{required_holding * len(creature)} ETH", + inline=False, + ) holding = await get_holding_for_address(address) - e.add_field(name="Actual Holding", value=f"{holding:.0f} ETH", inline=False) + e.add_field( + name="Actual Holding", value=f"{holding:.0f} ETH", inline=False + ) else: e.title = "Possible Sea Creatures" e.description = "RPL (both old and new), rETH and ETH are consider as assets for the sea creature determination!" for holding_value, sea_creature in sea_creatures.items(): - e.add_field(name=f"{sea_creature}:", value=f"holds over {holding_value} ETH worth of assets", - inline=False) + e.add_field( + name=f"{sea_creature}:", + value=f"holds over {holding_value} ETH worth of assets", + inline=False, + ) await interaction.followup.send(embed=e) @command() @@ -165,85 +192,78 @@ async def smoothie(self, interaction: Interaction): await interaction.response.defer(ephemeral=is_hidden(interaction)) e = Embed(title="Smoothing Pool") - smoothie_eth = solidity.to_float(await w3.eth.get_balance(await rp.get_address_by_name("rocketSmoothingPool"))) - data = await (await self.bot.db.minipools.aggregate([ - { - '$match': { - 'beacon.status': { - '$nin': [ - 'exited_unslashed', 'exited_slashed', 'withdrawal_possible', 'withdrawal_done', - 'pending_initialized' - ] - } - } - }, { - '$group': { - '_id' : '$node_operator', - 'count': { - '$sum': 1 - } - } - }, { - '$lookup': { - 'from' : 'node_operators', - 'localField' : '_id', - 'foreignField': 'address', - 'as' : 'meta' - } - }, { - '$unwind': { - 'path' : '$meta', - 'preserveNullAndEmptyArrays': True - } - }, { - '$project': { - '_id' : 1, - 'count' : 1, - 'smoothie': '$meta.smoothing_pool_registration' - } - }, { - '$group': { - '_id' : '$smoothie', - 'count' : { - '$sum': '$count' + smoothie_eth = solidity.to_float( + await w3.eth.get_balance( + await rp.get_address_by_name("rocketSmoothingPool") + ) + ) + data = await ( + await self.bot.db.minipools.aggregate( + [ + { + "$match": { + "beacon.status": { + "$nin": [ + "exited_unslashed", + "exited_slashed", + "withdrawal_possible", + "withdrawal_done", + "pending_initialized", + ] + } + } }, - 'node_count': { - '$sum': 1 + {"$group": {"_id": "$node_operator", "count": {"$sum": 1}}}, + { + "$lookup": { + "from": "node_operators", + "localField": "_id", + "foreignField": "address", + "as": "meta", + } }, - 'counts' : { - '$addToSet': { - 'count' : '$count', - 'address': '$_id' + {"$unwind": {"path": "$meta", "preserveNullAndEmptyArrays": True}}, + { + "$project": { + "_id": 1, + "count": 1, + "smoothie": "$meta.smoothing_pool_registration", } - } - } - }, { - '$project': { - '_id' : 1, - 'count' : 1, - 'node_count': 1, - 'counts' : { - '$sortArray': { - 'input' : '$counts', - 'sortBy': { - 'count': -1 - } + }, + { + "$group": { + "_id": "$smoothie", + "count": {"$sum": "$count"}, + "node_count": {"$sum": 1}, + "counts": { + "$addToSet": {"count": "$count", "address": "$_id"} + }, + } + }, + { + "$project": { + "_id": 1, + "count": 1, + "node_count": 1, + "counts": { + "$sortArray": { + "input": "$counts", + "sortBy": {"count": -1}, + } + }, + } + }, + { + "$project": { + "_id": 1, + "count": 1, + "node_count": 1, + "counts": {"$slice": ["$counts", 5]}, } - } - } - }, { - '$project': { - '_id' : 1, - 'count' : 1, - 'node_count': 1, - 'counts' : { - '$slice': [ - '$counts', 5 - ] - } - } - } - ])).to_list() + }, + ] + ) + ).to_list() if not data: await interaction.followup.send("no minipools found", ephemeral=True) return @@ -254,16 +274,22 @@ async def smoothie(self, interaction: Interaction): # minipool counts total_minipool_count = data[True]["count"] + data[False]["count"] smoothie_minipool_count = data[True]["count"] - d = datetime.now().timestamp() - await rp.call("rocketRewardsPool.getClaimIntervalTimeStart") - e.description = f"`{smoothie_node_count}/{total_node_count}` nodes (`{smoothie_node_count / total_node_count:.2%}`)" \ - f" have joined the smoothing pool.\n" \ - f" That is `{smoothie_minipool_count}/{total_minipool_count}` minipools " \ - f"(`{smoothie_minipool_count / total_minipool_count:.2%}`).\n" \ - f"The current (not overall) balance is **`{smoothie_eth:,.2f}` ETH.**\n" \ - f"This is over a span of `{pretty_time(d)}`.\n\n" \ - f"{min(smoothie_node_count, 5)} largest nodes:\n" - lines = [f"- `{d['count']:>4}` minipools - {await el_explorer_url(d['address'])}" for d in - data[True]["counts"][:min(smoothie_node_count, 5)]] + d = datetime.now().timestamp() - await rp.call( + "rocketRewardsPool.getClaimIntervalTimeStart" + ) + e.description = ( + f"`{smoothie_node_count}/{total_node_count}` nodes (`{smoothie_node_count / total_node_count:.2%}`)" + f" have joined the smoothing pool.\n" + f" That is `{smoothie_minipool_count}/{total_minipool_count}` minipools " + f"(`{smoothie_minipool_count / total_minipool_count:.2%}`).\n" + f"The current (not overall) balance is **`{smoothie_eth:,.2f}` ETH.**\n" + f"This is over a span of `{pretty_time(d)}`.\n\n" + f"{min(smoothie_node_count, 5)} largest nodes:\n" + ) + lines = [ + f"- `{d['count']:>4}` minipools - {await el_explorer_url(d['address'])}" + for d in data[True]["counts"][: min(smoothie_node_count, 5)] + ] e.description += "\n".join(lines) await interaction.followup.send(embed=e) @@ -273,11 +299,18 @@ async def odao_challenges(self, interaction: Interaction): await interaction.response.defer(ephemeral=is_hidden(interaction)) c = await rp.get_contract_by_name("rocketDAONodeTrustedActions") # get challenges made - events = list(c.events["ActionChallengeMade"].get_logs( - from_block=(await w3.eth.get_block("latest")).number - 7 * 24 * 60 * 60 // 12)) + events = list( + c.events["ActionChallengeMade"].get_logs( + from_block=(await w3.eth.get_block("latest")).number + - 7 * 24 * 60 * 60 // 12 + ) + ) # remove all events of nodes that aren't challenged anymore for event in events: - if not await rp.call("rocketDAONodeTrusted.getMemberIsChallenged", event.args.nodeChallengedAddress): + if not await rp.call( + "rocketDAONodeTrusted.getMemberIsChallenged", + event.args.nodeChallengedAddress, + ): events.remove(event) # sort by block number events.sort(key=lambda x: x.blockNumber) @@ -287,7 +320,9 @@ async def odao_challenges(self, interaction: Interaction): e = Embed(title="Active oDAO Challenges") e.description = "" # get duration of challenge period - challenge_period = await rp.call("rocketDAONodeTrustedSettingsMembers.getChallengeWindow") + challenge_period = await rp.call( + "rocketDAONodeTrustedSettingsMembers.getChallengeWindow" + ) for event in events: latest_block = await w3.eth.get_block("latest") time_left = challenge_period - (latest_block.timestamp - event.args.time) @@ -304,7 +339,12 @@ async def asian_restaurant_name(self, interaction: Interaction): Randomly generated Asian restaurant names """ await interaction.response.defer(ephemeral=is_hidden(interaction)) - async with aiohttp.ClientSession() as session, session.get("https://www.dotomator.com/api/random_name.json?type=asian") as resp: + async with ( + aiohttp.ClientSession() as session, + session.get( + "https://www.dotomator.com/api/random_name.json?type=asian" + ) as resp, + ): a = (await resp.json())["name"] await interaction.followup.send(a) @@ -319,10 +359,7 @@ async def get_block_by_timestamp(self, interaction: Interaction, timestamp: int) found_ts = await block_to_ts(block) if found_ts == timestamp: - text = ( - f"Found perfect match for timestamp {timestamp}:\n" - f"Block: {block}" - ) + text = f"Found perfect match for timestamp {timestamp}:\nBlock: {block}" else: text = ( f"Found close match for timestamp {timestamp}:\n" @@ -335,10 +372,14 @@ async def get_block_by_timestamp(self, interaction: Interaction, timestamp: int) @command() async def get_abi_of_contract(self, interaction: Interaction, contract: str): """Retrieve the latest ABI for a contract""" - await interaction.response.defer(ephemeral=is_hidden_role_controlled(interaction)) + await interaction.response.defer( + ephemeral=is_hidden_role_controlled(interaction) + ) try: abi = prettify_json_string(await rp.uncached_get_abi_by_name(contract)) - file = File(io.StringIO(abi), f"{contract}.{cfg.rocketpool.chain.lower()}.abi.json") + file = File( + io.StringIO(abi), f"{contract}.{cfg.rocketpool.chain.lower()}.abi.json" + ) await interaction.followup.send(file=file) except Exception as err: await interaction.followup.send(content=f"```Exception: {err!r}```") @@ -346,7 +387,9 @@ async def get_abi_of_contract(self, interaction: Interaction, contract: str): @command() async def get_address_of_contract(self, interaction: Interaction, contract: str): """Retrieve the latest address for a contract""" - await interaction.response.defer(ephemeral=is_hidden_role_controlled(interaction)) + await interaction.response.defer( + ephemeral=is_hidden_role_controlled(interaction) + ) try: address = cfg.rocketpool.manual_addresses.get(contract) if not address: @@ -366,11 +409,15 @@ async def get_address_of_contract(self, interaction: Interaction, contract: str) await interaction.followup.send(content=m) @command() - async def decode_tnx(self, interaction: Interaction, tnx_hash: str, contract_name: str | None = None): + async def decode_tnx( + self, interaction: Interaction, tnx_hash: str, contract_name: str | None = None + ): """ Decode transaction calldata """ - await interaction.response.defer(ephemeral=is_hidden_role_controlled(interaction)) + await interaction.response.defer( + ephemeral=is_hidden_role_controlled(interaction) + ) tnx = await w3.eth.get_transaction(tnx_hash) if contract_name: contract = await rp.get_contract_by_name(contract_name) @@ -384,8 +431,14 @@ async def decode_tnx(self, interaction: Interaction, tnx_hash: str, contract_nam @get_address_of_contract.autocomplete("contract") @get_abi_of_contract.autocomplete("contract") @decode_tnx.autocomplete("contract_name") - async def match_contract_names(self, interaction: Interaction, current: str) -> list[Choice[str]]: - return [Choice(name=name, value=name) for name in self.contract_names if current.lower() in name.lower()][:25] + async def match_contract_names( + self, interaction: Interaction, current: str + ) -> list[Choice[str]]: + return [ + Choice(name=name, value=name) + for name in self.contract_names + if current.lower() in name.lower() + ][:25] async def setup(self): diff --git a/rocketwatch/plugins/releases/releases.py b/rocketwatch/plugins/releases/releases.py index 8318a710..97efdcc3 100644 --- a/rocketwatch/plugins/releases/releases.py +++ b/rocketwatch/plugins/releases/releases.py @@ -25,7 +25,9 @@ async def latest_release(self, interaction: Interaction): await interaction.response.defer(ephemeral=is_hidden(interaction)) async with aiohttp.ClientSession() as session: - res = await session.get("https://api.github.com/repos/rocket-pool/smartnode-install/tags") + res = await session.get( + "https://api.github.com/repos/rocket-pool/smartnode-install/tags" + ) res = await res.json() latest_release = None for tag in res: @@ -34,7 +36,9 @@ async def latest_release(self, interaction: Interaction): break e = Embed() - e.add_field(name="Latest Smart Node Release", value=latest_release, inline=False) + e.add_field( + name="Latest Smart Node Release", value=latest_release, inline=False + ) await interaction.followup.send(embed=e) diff --git a/rocketwatch/plugins/reloader/reloader.py b/rocketwatch/plugins/reloader/reloader.py index 5f102d91..8c15ed40 100644 --- a/rocketwatch/plugins/reloader/reloader.py +++ b/rocketwatch/plugins/reloader/reloader.py @@ -18,14 +18,26 @@ class Reloader(Cog): def __init__(self, bot: RocketWatch): self.bot = bot - async def _get_loaded_extensions(self, interaction: Interaction, current: str) -> list[Choice[str]]: + async def _get_loaded_extensions( + self, interaction: Interaction, current: str + ) -> list[Choice[str]]: loaded = {ext.split(".")[-1] for ext in self.bot.extensions} - return [Choice(name=plugin, value=plugin) for plugin in loaded if current.lower() in plugin.lower()][:25] + return [ + Choice(name=plugin, value=plugin) + for plugin in loaded + if current.lower() in plugin.lower() + ][:25] - async def _get_unloaded_extensions(self, interaction: Interaction, current: str) -> list[Choice[str]]: + async def _get_unloaded_extensions( + self, interaction: Interaction, current: str + ) -> list[Choice[str]]: loaded = {ext.split(".")[-1] for ext in self.bot.extensions} - all = {path.stem for path in Path("plugins").glob('**/*.py')} - return [Choice(name=plugin, value=plugin) for plugin in (all - loaded) if current.lower() in plugin.lower()][:25] + all = {path.stem for path in Path("plugins").glob("**/*.py")} + return [ + Choice(name=plugin, value=plugin) + for plugin in (all - loaded) + if current.lower() in plugin.lower() + ][:25] @command() @guilds(cfg.discord.owner.server_id) @@ -39,7 +51,9 @@ async def load(self, interaction: Interaction, module: str): await interaction.followup.send(content=f"Loaded plugin `{module}`!") await self.bot.sync_commands() except ExtensionAlreadyLoaded: - await interaction.followup.send(content=f"Plugin `{module}` already loaded!") + await interaction.followup.send( + content=f"Plugin `{module}` already loaded!" + ) except ExtensionNotFound: await interaction.followup.send(content=f"Plugin `{module}` not found!") diff --git a/rocketwatch/plugins/rewards/rewards.py b/rocketwatch/plugins/rewards/rewards.py index 8981f60c..446e098b 100644 --- a/rocketwatch/plugins/rewards/rewards.py +++ b/rocketwatch/plugins/rewards/rewards.py @@ -41,7 +41,9 @@ async def _make_request(self, address) -> dict: response = await session.get(f"https://sprocketpool.net/api/node/{address}") return await response.json() - async def get_estimated_rewards(self, interaction: Interaction, address: str) -> RewardEstimate | None: + async def get_estimated_rewards( + self, interaction: Interaction, address: str + ) -> RewardEstimate | None: if not await rp.call("rocketNodeManager.getNodeExists", address): await interaction.followup.send(f"{address} is not a registered node.") return None @@ -50,13 +52,18 @@ async def get_estimated_rewards(self, interaction: Interaction, address: str) -> patches_res = await self._make_request(address) except Exception as e: await self.bot.report_error(e, interaction) - await interaction.followup.send("Error fetching node data from Sprocket Pool API. Blame Patches.") + await interaction.followup.send( + "Error fetching node data from Sprocket Pool API. Blame Patches." + ) return None data_block = await ts_to_block(patches_res["time"]) rpl_rewards: int = patches_res[address].get("collateralRpl", 0) eth_rewards: int = patches_res[address].get("smoothingPoolEth", 0) - interval_time = await rp.call("rocketDAOProtocolSettingsRewards.getRewardsClaimIntervalTime", block=data_block) + interval_time = await rp.call( + "rocketDAOProtocolSettingsRewards.getRewardsClaimIntervalTime", + block=data_block, + ) return Rewards.RewardEstimate( address=address, @@ -67,7 +74,7 @@ async def get_estimated_rewards(self, interaction: Interaction, address: str) -> end_time=patches_res["startTime"] + interval_time, rpl_rewards=solidity.to_float(rpl_rewards), eth_rewards=solidity.to_float(eth_rewards), - system_weight=solidity.to_float(patches_res["totalNodeWeight"]) + system_weight=solidity.to_float(patches_res["totalNodeWeight"]), ) @staticmethod @@ -82,8 +89,12 @@ def create_embed(title: str, rewards: RewardEstimate) -> Embed: @command() @describe(node_address="address of node to show rewards for") - @describe(extrapolate="whether to extrapolate partial rewards for the entire period") - async def upcoming_rewards(self, interaction: Interaction, node_address: str, extrapolate: bool = True): + @describe( + extrapolate="whether to extrapolate partial rewards for the entire period" + ) + async def upcoming_rewards( + self, interaction: Interaction, node_address: str, extrapolate: bool = True + ): """ Show estimated RPL and smoothing pool rewards for this period. """ @@ -97,9 +108,13 @@ async def upcoming_rewards(self, interaction: Interaction, node_address: str, ex return if extrapolate: - registration_time = await rp.call("rocketNodeManager.getNodeRegistrationTime", address) + registration_time = await rp.call( + "rocketNodeManager.getNodeRegistrationTime", address + ) reward_start_time = max(registration_time, rewards.start_time) - proj_factor = (rewards.end_time - reward_start_time) / (rewards.data_time - reward_start_time) + proj_factor = (rewards.end_time - reward_start_time) / ( + rewards.data_time - reward_start_time + ) rewards.rpl_rewards *= proj_factor rewards.eth_rewards *= proj_factor @@ -115,15 +130,15 @@ async def upcoming_rewards(self, interaction: Interaction, node_address: str, ex node_address="address of node to simulate rewards for", rpl_stake="amount of staked RPL to simulate", num_leb8="number of 8 ETH minipools to simulate", - num_eb16="number of 16 ETH minipools to simulate" + num_eb16="number of 16 ETH minipools to simulate", ) async def simulate_rewards( - self, - interaction: Interaction, - node_address: str, - rpl_stake: int = 0, - num_leb8: int = 0, - num_eb16: int = 0 + self, + interaction: Interaction, + node_address: str, + rpl_stake: int = 0, + num_leb8: int = 0, + num_eb16: int = 0, ): """ Simulate RPL rewards for this period @@ -145,16 +160,32 @@ async def simulate_rewards( data_block: int = rewards.data_block reward_start_block = await ts_to_block(rewards.start_time) - rpl_ratio = solidity.to_float(await rp.call("rocketNetworkPrices.getRPLPrice", block=data_block)) + rpl_ratio = solidity.to_float( + await rp.call("rocketNetworkPrices.getRPLPrice", block=data_block) + ) actual_borrowed_eth = solidity.to_float( - await rp.call("rocketNodeStaking.getNodeETHBorrowed", address, block=data_block) + await rp.call( + "rocketNodeStaking.getNodeETHBorrowed", address, block=data_block + ) + ) + actual_rpl_stake = solidity.to_float( + await rp.call( + "rocketNodeStaking.getNodeStakedRPL", address, block=data_block + ) ) - actual_rpl_stake = solidity.to_float(await rp.call("rocketNodeStaking.getNodeStakedRPL", address, block=data_block)) - inflation_rate: int = await rp.call("rocketTokenRPL.getInflationIntervalRate", block=data_block) - inflation_interval: int = await rp.call("rocketTokenRPL.getInflationIntervalTime", block=data_block) - num_inflation_intervals: int = (rewards.end_time - rewards.start_time) // inflation_interval - total_supply: int = await rp.call("rocketTokenRPL.totalSupply", block=reward_start_block) + inflation_rate: int = await rp.call( + "rocketTokenRPL.getInflationIntervalRate", block=data_block + ) + inflation_interval: int = await rp.call( + "rocketTokenRPL.getInflationIntervalTime", block=data_block + ) + num_inflation_intervals: int = ( + rewards.end_time - rewards.start_time + ) // inflation_interval + total_supply: int = await rp.call( + "rocketTokenRPL.totalSupply", block=reward_start_block + ) period_inflation: int = total_supply for _i in range(num_inflation_intervals): @@ -167,13 +198,17 @@ def node_weight(_stake: float, _borrowed_eth: float) -> float: if collateral_ratio <= 0.15: return 100 * rpl_value else: - return (13.6137 + 2 * np.log(100 * collateral_ratio - 13)) * _borrowed_eth + return ( + 13.6137 + 2 * np.log(100 * collateral_ratio - 13) + ) * _borrowed_eth def rewards_at(_stake: float, _borrowed_eth: float) -> float: weight = node_weight(_stake, _borrowed_eth) base_weight = node_weight(actual_rpl_stake, _borrowed_eth) new_system_weight = rewards.system_weight + weight - base_weight - return solidity.to_float(0.7 * period_inflation * weight / new_system_weight) + return solidity.to_float( + 0.7 * period_inflation * weight / new_system_weight + ) fig, ax = plt.subplots(figsize=(5, 2.5)) ax.grid() @@ -187,7 +222,9 @@ def rewards_at(_stake: float, _borrowed_eth: float) -> float: cur_color, cur_label, cur_ls = "#eb8e55", "current", "solid" sim_color, sim_label, sim_ls = "darkred", "simulated", "dashed" - def draw_reward_curve(_color: str, _label: str | None, _line_style: str, _borrowed_eth: float) -> None: + def draw_reward_curve( + _color: str, _label: str | None, _line_style: str, _borrowed_eth: float + ) -> None: step_size = max(1, (x_max - x_min) // 1000) x = np.arange(x_min, x_max, step_size, dtype=int) y = np.array([rewards_at(x, _borrowed_eth) for x in x]) @@ -202,7 +239,7 @@ def plot_point(_pt_color: str, _pt_label: str, _x: int) -> None: (_x, _y), textcoords="offset points", xytext=(5, -10 if _y > 0 else 5), - ha="left" + ha="left", ) plot_point(cur_color, cur_label, actual_rpl_stake) @@ -217,7 +254,9 @@ def plot_point(_pt_color: str, _pt_label: str, _x: int) -> None: elif borrowed_eth > 0: draw_reward_curve(sim_color, None, sim_ls, borrowed_eth) else: - await interaction.followup.send("Empty node. Choose another one or specify the minipool count.") + await interaction.followup.send( + "Empty node. Choose another one or specify the minipool count." + ) return def formatter(_x, _pos) -> str: @@ -234,7 +273,9 @@ def formatter(_x, _pos) -> str: ax.set_ylabel("rewards") ax.xaxis.set_major_formatter(formatter) - y_min = min(rewards_at(x_min, borrowed_eth), rewards_at(x_min, actual_borrowed_eth)) + y_min = min( + rewards_at(x_min, borrowed_eth), rewards_at(x_min, actual_borrowed_eth) + ) _, y_max = ax.get_ylim() ax.set_ylim((y_min, y_max)) diff --git a/rocketwatch/plugins/rocksolid/rocksolid.py b/rocketwatch/plugins/rocksolid/rocksolid.py index e203c236..4152347f 100644 --- a/rocketwatch/plugins/rocksolid/rocksolid.py +++ b/rocketwatch/plugins/rocksolid/rocksolid.py @@ -44,7 +44,9 @@ async def _fetch_asset_updates(self) -> list[tuple[int, float]]: updates.append((doc["time"], doc["assets"])) db_operations = [] - for event_log in get_logs(vault_contract.events.TotalAssetsUpdated, b_from, b_to): + for event_log in get_logs( + vault_contract.events.TotalAssetsUpdated, b_from, b_to + ): ts = await block_to_ts(event_log.blockNumber) assets = solidity.to_float(event_log.args.totalAssets) updates.append((ts, assets)) @@ -55,9 +57,7 @@ async def _fetch_asset_updates(self) -> list[tuple[int, float]]: if db_operations: await self.bot.db.rocksolid.bulk_write(db_operations) await self.bot.db.last_checked_block.replace_one( - {"_id": cog_id}, - {"_id": cog_id, "block": b_to}, - upsert=True + {"_id": cog_id}, {"_id": cog_id, "block": b_to}, upsert=True ) return updates @@ -74,16 +74,26 @@ async def rocksolid(self, interaction: Interaction): async def get_eth_rate(block_number: int) -> int: block_number = max(block_number, self.deployment_block) - reth_value = await rp.call("RockSolidVault.convertToAssets", 10**18, block=block_number) - return await rp.call("rocketTokenRETH.getEthValue", reth_value, block=block_number) + reth_value = await rp.call( + "RockSolidVault.convertToAssets", 10**18, block=block_number + ) + return await rp.call( + "rocketTokenRETH.getEthValue", reth_value, block=block_number + ) current_eth_rate = await get_eth_rate(current_block) async def get_apy(days: int) -> float | None: - reference_block = await ts_to_block(now - timedelta(days=days).total_seconds()) + reference_block = await ts_to_block( + now - timedelta(days=days).total_seconds() + ) if reference_block < self.deployment_block: return None - return (current_eth_rate / await get_eth_rate(reference_block) - 1) * (365 / days) * 100 + return ( + (current_eth_rate / await get_eth_rate(reference_block) - 1) + * (365 / days) + * 100 + ) apy_7d = await get_apy(days=7) apy_30d = await get_apy(days=30) @@ -93,7 +103,9 @@ async def get_apy(days: int) -> float | None: tvl_rock_reth = solidity.to_float(await rp.call("RockSolidVault.totalSupply")) asset_updates: list[tuple[int, float]] = await self._fetch_asset_updates() - current_date = datetime.fromtimestamp(asset_updates[0][0]).date() - timedelta(days=1) + current_date = datetime.fromtimestamp(asset_updates[0][0]).date() - timedelta( + days=1 + ) current_assets = 0.0 x, y = [], [] @@ -121,7 +133,7 @@ async def get_apy(days: int) -> float | None: img = BytesIO() fig.tight_layout() - fig.savefig(img, format='png') + fig.savefig(img, format="png") img.seek(0) plt.clf() @@ -132,11 +144,19 @@ async def get_apy(days: int) -> float | None: embed.add_field(name="7d APY", value=f"{apy_7d:.2f}%" if apy_7d else "-") embed.add_field(name="30d APY", value=f"{apy_30d:.2f}%" if apy_30d else "-") embed.add_field(name="90d APY", value=f"{apy_90d:.2f}%" if apy_90d else "-") - embed.add_field(name="TVL", value=f"`{tvl_reth:,.2f}` {await el_explorer_url(ca_reth, name=' rETH')}") - embed.add_field(name="Supply", value=f"`{tvl_rock_reth:,.2f}` {await el_explorer_url(ca_rock_reth, name=' rock.rETH')}") + embed.add_field( + name="TVL", + value=f"`{tvl_reth:,.2f}` {await el_explorer_url(ca_reth, name=' rETH')}", + ) + embed.add_field( + name="Supply", + value=f"`{tvl_rock_reth:,.2f}` {await el_explorer_url(ca_rock_reth, name=' rock.rETH')}", + ) embed.set_image(url="attachment://rocksolid-tvl.png") - await interaction.followup.send(embed=embed, file=File(img, "rocksolid-tvl.png")) + await interaction.followup.send( + embed=embed, file=File(img, "rocksolid-tvl.png") + ) async def setup(bot): diff --git a/rocketwatch/plugins/rpips/rpips.py b/rocketwatch/plugins/rpips/rpips.py index 796df2de..0ec29dea 100644 --- a/rocketwatch/plugins/rpips/rpips.py +++ b/rocketwatch/plugins/rpips/rpips.py @@ -24,9 +24,13 @@ async def rpip(self, interaction: Interaction, name: str): """Show information about a specific RPIP.""" await interaction.response.defer() embed = Embed() - embed.set_author(name="🔗 Data from rpips.rocketpool.net", url="https://rpips.rocketpool.net") + embed.set_author( + name="🔗 Data from rpips.rocketpool.net", url="https://rpips.rocketpool.net" + ) - rpips_by_name: dict[str, RPIPs.RPIP] = {rpip.full_title: rpip for rpip in await self.get_all_rpips()} + rpips_by_name: dict[str, RPIPs.RPIP] = { + rpip.full_title: rpip for rpip in await self.get_all_rpips() + } if rpip := rpips_by_name.get(name): details = await rpip.fetch_details() embed.title = name @@ -41,7 +45,9 @@ async def rpip(self, interaction: Interaction, name: str): embed.add_field(name="Status", value=rpip.status) embed.add_field(name="Created", value=details["created"]) - embed.add_field(name="Discussion Link", value=details["discussion"], inline=False) + embed.add_field( + name="Discussion Link", value=details["discussion"], inline=False + ) else: embed.description = "No matching RPIPs." @@ -61,13 +67,18 @@ def __str__(self) -> str: @cached(ttl=300, key_builder=lambda _, rpip: rpip.number) @retry_async(tries=3, delay=1) async def fetch_details(self) -> dict: - async with aiohttp.ClientSession() as session, session.get(self.url) as resp: + async with ( + aiohttp.ClientSession() as session, + session.get(self.url) as resp, + ): html = await resp.text() soup = BeautifulSoup(html, "html.parser") metadata = {} - for field in soup.main.find("table", {"class": "rpip-preamble"}).find_all("tr"): + for field in soup.main.find("table", {"class": "rpip-preamble"}).find_all( + "tr" + ): match field_name := field.th.text: case "Discussion": metadata[field_name] = field.td.a["href"] @@ -81,7 +92,7 @@ async def fetch_details(self) -> dict: "authors": metadata.get("Author"), "created": metadata.get("Created"), "discussion": metadata.get("Discussion"), - "description": soup.find("big", {"class": "rpip-description"}).text + "description": soup.find("big", {"class": "rpip-description"}).text, } @property @@ -93,7 +104,9 @@ def url(self) -> str: return f"https://rpips.rocketpool.net/RPIPs/RPIP-{self.number}" @rpip.autocomplete("name") - async def _get_rpip_names(self, interaction: Interaction, current: str) -> list[Choice[str]]: + async def _get_rpip_names( + self, interaction: Interaction, current: str + ) -> list[Choice[str]]: choices = [] for rpip in await self.get_all_rpips(): if current.lower() in (name := rpip.full_title).lower(): @@ -103,8 +116,11 @@ async def _get_rpip_names(self, interaction: Interaction, current: str) -> list[ @staticmethod @cached(ttl=60) @retry_async(tries=3, delay=1) - async def get_all_rpips() -> list['RPIPs.RPIP']: - async with aiohttp.ClientSession() as session, session.get("https://rpips.rocketpool.net/all") as resp: + async def get_all_rpips() -> list["RPIPs.RPIP"]: + async with ( + aiohttp.ClientSession() as session, + session.get("https://rpips.rocketpool.net/all") as resp, + ): html = await resp.text() soup = BeautifulSoup(html, "html.parser") diff --git a/rocketwatch/plugins/rpl/rpl.py b/rocketwatch/plugins/rpl/rpl.py index 4e15b03d..7fbf3d98 100644 --- a/rocketwatch/plugins/rpl/rpl.py +++ b/rocketwatch/plugins/rpl/rpl.py @@ -27,19 +27,27 @@ async def staked_rpl(self, interaction: Interaction): await interaction.response.defer(ephemeral=is_hidden(interaction)) rpl_supply = solidity.to_float(await rp.call("rocketTokenRPL.totalSupply")) - legacy_staked_rpl = solidity.to_float(await rp.call("rocketNodeStaking.getTotalLegacyStakedRPL")) - megapool_staked_rpl = solidity.to_float(await rp.call("rocketNodeStaking.getTotalMegapoolStakedRPL")) + legacy_staked_rpl = solidity.to_float( + await rp.call("rocketNodeStaking.getTotalLegacyStakedRPL") + ) + megapool_staked_rpl = solidity.to_float( + await rp.call("rocketNodeStaking.getTotalMegapoolStakedRPL") + ) staked_rpl = legacy_staked_rpl + megapool_staked_rpl - unstaking_rpl = (await (await self.bot.db.node_operators.aggregate([ - { - '$group': { - '_id' : 'out', - 'total_unstaking_rpl_': { - '$sum': '$rpl.unstaking' - } - } - } - ])).next())['total_unstaking_rpl_'] + unstaking_rpl = ( + await ( + await self.bot.db.node_operators.aggregate( + [ + { + "$group": { + "_id": "out", + "total_unstaking_rpl_": {"$sum": "$rpl.unstaking"}, + } + } + ] + ) + ).next() + )["total_unstaking_rpl_"] unstaked_rpl = rpl_supply - staked_rpl - unstaking_rpl def fmt(v): @@ -89,39 +97,38 @@ async def withdrawable_rpl(self, interaction: Interaction): """ await interaction.response.defer(ephemeral=is_hidden(interaction)) - data = await (await self.bot.db.node_operators.aggregate([ - { - '$match': { - 'staking_minipool_count': { - '$ne': 0 - } - } - }, { - '$project': { - 'eth_stake': { - '$multiply': [ - '$effective_node_share', { - '$multiply': [ - '$staking_minipool_count', 32 + data = await ( + await self.bot.db.node_operators.aggregate( + [ + {"$match": {"staking_minipool_count": {"$ne": 0}}}, + { + "$project": { + "eth_stake": { + "$multiply": [ + "$effective_node_share", + {"$multiply": ["$staking_minipool_count", 32]}, ] - } - ] + }, + "rpl_stake": "$rpl.legacy_stake", + } }, - 'rpl_stake': "$rpl.legacy_stake" - } - } - ])).to_list() - rpl_eth_price = solidity.to_float(await rp.call("rocketNetworkPrices.getRPLPrice")) + ] + ) + ).to_list() + rpl_eth_price = solidity.to_float( + await rp.call("rocketNetworkPrices.getRPLPrice") + ) # calculate withdrawable RPL at various RPL ETH prices # i/10 is the ratio of the price checked to the actual RPL ETH price free_rpl_liquidity = {} - max_collateral = solidity.to_float(await rp.call("rocketDAOProtocolSettingsNode.getMinimumLegacyRPLStake")) + max_collateral = solidity.to_float( + await rp.call("rocketDAOProtocolSettingsNode.getMinimumLegacyRPLStake") + ) current_withdrawable_rpl = 0 for i in range(1, 31): - - test_ratio = (i / 10) + test_ratio = i / 10 rpl_eth_test_price = rpl_eth_price * test_ratio liquid_rpl = 0 @@ -141,7 +148,9 @@ async def withdrawable_rpl(self, interaction: Interaction): if collateral_percentage < max_collateral: continue - liquid_rpl += ((collateral_percentage - max_collateral) / collateral_percentage) * rpl_stake + liquid_rpl += ( + (collateral_percentage - max_collateral) / collateral_percentage + ) * rpl_stake free_rpl_liquidity[i] = (rpl_eth_test_price, liquid_rpl) if test_ratio == 1: @@ -154,14 +163,23 @@ async def withdrawable_rpl(self, interaction: Interaction): # plot the data plt.plot(x, y, color=str(embed.color)) - plt.plot(rpl_eth_price, current_withdrawable_rpl, 'bo') + plt.plot(rpl_eth_price, current_withdrawable_rpl, "bo") plt.xlim(min(x), max(x)) - plt.annotate(f"{rpl_eth_price:.4f}", (rpl_eth_price, current_withdrawable_rpl), - textcoords="offset points", xytext=(-10, -5), ha='right') - plt.annotate(f"{current_withdrawable_rpl / 1000000:.2f} million RPL withdrawable", - (rpl_eth_price, current_withdrawable_rpl), textcoords="offset points", xytext=(10, -5), - ha='left') + plt.annotate( + f"{rpl_eth_price:.4f}", + (rpl_eth_price, current_withdrawable_rpl), + textcoords="offset points", + xytext=(-10, -5), + ha="right", + ) + plt.annotate( + f"{current_withdrawable_rpl / 1000000:.2f} million RPL withdrawable", + (rpl_eth_price, current_withdrawable_rpl), + textcoords="offset points", + xytext=(10, -5), + ha="left", + ) plt.grid() ax = plt.gca() @@ -172,7 +190,7 @@ async def withdrawable_rpl(self, interaction: Interaction): img = BytesIO() plt.tight_layout() - plt.savefig(img, format='png') + plt.savefig(img, format="png") img.seek(0) plt.close() diff --git a/rocketwatch/plugins/scam_detection/scam_detection.py b/rocketwatch/plugins/scam_detection/scam_detection.py index 611c5b3e..c29dd366 100644 --- a/rocketwatch/plugins/scam_detection/scam_detection.py +++ b/rocketwatch/plugins/scam_detection/scam_detection.py @@ -46,17 +46,19 @@ class Color: @staticmethod def is_reputable(user: Member) -> bool: - return any(( - user.id == cfg.discord.owner.user_id, - user.id in cfg.rocketpool.support.user_ids, - {role.id for role in user.roles} & set(cfg.rocketpool.support.role_ids), - user.guild_permissions.moderate_members - )) + return any( + ( + user.id == cfg.discord.owner.user_id, + user.id in cfg.rocketpool.support.user_ids, + {role.id for role in user.roles} & set(cfg.rocketpool.support.role_ids), + user.guild_permissions.moderate_members, + ) + ) class RemovalVoteView(ui.View): THRESHOLD = 5 - def __init__(self, plugin: 'ScamDetection', reportable: Message | Thread): + def __init__(self, plugin: "ScamDetection", reportable: Message | Thread): super().__init__(timeout=None) self.plugin = plugin self.reportable = reportable @@ -64,16 +66,24 @@ def __init__(self, plugin: 'ScamDetection', reportable: Message | Thread): @ui.button(label="Mark Safu", style=ButtonStyle.blurple) async def mark_safe(self, interaction: Interaction, button: ui.Button) -> None: - log.info(f"User {interaction.user.id} marked message {interaction.message.id} as safe") + log.info( + f"User {interaction.user.id} marked message {interaction.message.id} as safe" + ) reportable_repr = type(self.reportable).__name__.lower() if interaction.user.id in self.safu_votes: - log.debug(f"User {interaction.user.id} already voted on {reportable_repr}") - await interaction.response.send_message(content="You already voted!", ephemeral=True) + log.debug( + f"User {interaction.user.id} already voted on {reportable_repr}" + ) + await interaction.response.send_message( + content="You already voted!", ephemeral=True + ) return if interaction.user.is_timed_out(): - log.debug(f"Timed-out user {interaction.user.id} tried to vote on {self.reportable}") + log.debug( + f"Timed-out user {interaction.user.id} tried to vote on {self.reportable}" + ) return None if isinstance(self.reportable, Message): @@ -89,10 +99,12 @@ async def mark_safe(self, interaction: Interaction, button: ui.Button) -> None: return None if interaction.user == reported_user: - log.debug(f"User {interaction.user.id} tried to mark their own {reportable_repr} as safe") + log.debug( + f"User {interaction.user.id} tried to mark their own {reportable_repr} as safe" + ) await interaction.response.send_message( content=f"You can't vote on your own {reportable_repr}!", - ephemeral=True + ephemeral=True, ) return @@ -111,10 +123,15 @@ async def mark_safe(self, interaction: Interaction, button: ui.Button) -> None: async with required_lock: report = await self.plugin.bot.db.scam_reports.find_one(db_filter) - await self.plugin._update_report(report, f"This has been marked as safe by {user_repr}.") - await self.plugin.bot.db.scam_reports.update_one(db_filter, {"$set": {"warning_id": None}}) - await interaction.response.send_message(content="Warning removed!", ephemeral=True) - + await self.plugin._update_report( + report, f"This has been marked as safe by {user_repr}." + ) + await self.plugin.bot.db.scam_reports.update_one( + db_filter, {"$set": {"warning_id": None}} + ) + await interaction.response.send_message( + content="Warning removed!", ephemeral=True + ) def __init__(self, bot: RocketWatch): self.bot = bot @@ -122,19 +139,24 @@ def __init__(self, bot: RocketWatch): self._thread_report_lock = asyncio.Lock() self._user_report_lock = asyncio.Lock() self._message_react_cache = TTLCache(maxsize=1000, ttl=300) - self.markdown_link_pattern = re.compile(r"(?<=\[)([^/\] ]*).+?(?<=\(https?:\/\/)([^/\)]*)") - self.basic_url_pattern = re.compile(r"https?:\/\/?([/\\@\-_0-9a-zA-Z]+\.)+[\\@\-_0-9a-zA-Z]+") + self.markdown_link_pattern = re.compile( + r"(?<=\[)([^/\] ]*).+?(?<=\(https?:\/\/)([^/\)]*)" + ) + self.basic_url_pattern = re.compile( + r"https?:\/\/?([/\\@\-_0-9a-zA-Z]+\.)+[\\@\-_0-9a-zA-Z]+" + ) self.invite_pattern = re.compile( - r"((discord(app)?\.com\/(invite|oauth2))|((dsc|dcd|discord)\.gg))(\\|\/)(?P[a-zA-Z0-9]+)") + r"((discord(app)?\.com\/(invite|oauth2))|((dsc|dcd|discord)\.gg))(\\|\/)(?P[a-zA-Z0-9]+)" + ) # Detects URLs broken across lines (with optional blockquote "> " prefixes) to evade filters _brk = r"(?:[\s>\u2060\u200b\ufeff]*\n[\s>\u2060\u200b\ufeff]*)" # newline with optional blockquote/zero-width chars _ws = r"[\s>]*" self.obfuscated_url_pattern = re.compile( - rf"<{_ws}ht{_brk}tp|" # tp - rf"<{_ws}ma{_ws}i{_brk}l{_ws}t{_ws}o|" # i\n> L\n> To (mailto) - rf" tp + rf"<{_ws}ma{_ws}i{_brk}l{_ws}t{_ws}o|" # i\n> L\n> To (mailto) + rf" None: - self.bot.tree.remove_command(self.message_report_menu.name, type=self.message_report_menu.type) - self.bot.tree.remove_command(self.user_report_menu.name, type=self.user_report_menu.type) + self.bot.tree.remove_command( + self.message_report_menu.name, type=self.message_report_menu.type + ) + self.bot.tree.remove_command( + self.user_report_menu.name, type=self.user_report_menu.type + ) @staticmethod - def _get_message_content(message: Message, *, preserve_formatting: bool = False) -> str: + def _get_message_content( + message: Message, *, preserve_formatting: bool = False + ) -> str: text = "" if message.content: content = message.content @@ -184,7 +212,9 @@ def _get_message_content(message: Message, *, preserve_formatting: bool = False) return text - async def _generate_message_report(self, message: Message, reason: str) -> tuple[Embed, Embed, File] | None: + async def _generate_message_report( + self, message: Message, reason: str + ) -> tuple[Embed, Embed, File] | None: try: message = await message.channel.fetch_message(message.id) if isinstance(message, DeletedReferencedMessage): @@ -192,7 +222,9 @@ async def _generate_message_report(self, message: Message, reason: str) -> tuple except errors.NotFound: return None - if await self.bot.db.scam_reports.find_one({"type": "message", "message_id": message.id}): + if await self.bot.db.scam_reports.find_one( + {"type": "message", "message_id": message.id} + ): log.info(f"Found existing report for message {message.id} in database") return None @@ -201,7 +233,9 @@ async def _generate_message_report(self, message: Message, reason: str) -> tuple warning.description = f"**Reason**: {reason}\n" report = warning.copy() - warning.set_footer(text="This message will be deleted once the suspicious message is removed.") + warning.set_footer( + text="This message will be deleted once the suspicious message is removed." + ) report.description += ( "\n" @@ -219,13 +253,17 @@ async def _generate_message_report(self, message: Message, reason: str) -> tuple return warning, report, attachment - async def _generate_thread_report(self, thread: Thread, reason: str) -> tuple[Embed, Embed] | None: + async def _generate_thread_report( + self, thread: Thread, reason: str + ) -> tuple[Embed, Embed] | None: try: thread = await thread.guild.fetch_channel(thread.id) except (errors.NotFound, errors.Forbidden): return None - if await self.bot.db.scam_reports.find_one({"type": "thread", "channel_id": thread.id}): + if await self.bot.db.scam_reports.find_one( + {"type": "thread", "channel_id": thread.id} + ): log.info(f"Found existing report for thread {thread.id} in database") return None @@ -234,10 +272,12 @@ async def _generate_thread_report(self, thread: Thread, reason: str) -> tuple[Em warning.description = f"**Reason**: {reason}\n" report = warning.copy() - warning.set_footer(text=( - "There is no ticket system for support on this server.\n" - "Ignore this thread and any invites or DMs you may receive." - )) + warning.set_footer( + text=( + "There is no ticket system for support on this server.\n" + "Ignore this thread and any invites or DMs you may receive." + ) + ) thread_owner = await self.bot.get_or_fetch_user(thread.owner_id) report.description += ( "\n" @@ -249,21 +289,29 @@ async def _generate_thread_report(self, thread: Thread, reason: str) -> tuple[Em ) return warning, report - async def _add_message_report_to_db(self, message: Message, reason: str, warning_msg: Message | None, report_msg: Message) -> None: - await self.bot.db.scam_reports.insert_one({ - "type" : "message", - "guild_id" : message.guild.id, - "channel_id" : message.channel.id, - "message_id" : message.id, - "user_id" : message.author.id, - "reason" : reason, - "content" : message.content, - "embeds" : [embed.to_dict() for embed in message.embeds], - "warning_id" : warning_msg.id if warning_msg else None, - "report_id" : report_msg.id, - "user_banned": False, - "removed" : False, - }) + async def _add_message_report_to_db( + self, + message: Message, + reason: str, + warning_msg: Message | None, + report_msg: Message, + ) -> None: + await self.bot.db.scam_reports.insert_one( + { + "type": "message", + "guild_id": message.guild.id, + "channel_id": message.channel.id, + "message_id": message.id, + "user_id": message.author.id, + "reason": reason, + "content": message.content, + "embeds": [embed.to_dict() for embed in message.embeds], + "warning_id": warning_msg.id if warning_msg else None, + "report_id": report_msg.id, + "user_banned": False, + "removed": False, + } + ) async def report_message(self, message: Message, reason: str) -> None: async with self._message_report_lock: @@ -274,23 +322,35 @@ async def report_message(self, message: Message, reason: str) -> None: try: view = self.RemovalVoteView(self, message) - warning_msg = await message.reply(embed=warning, view=view, mention_author=False) + warning_msg = await message.reply( + embed=warning, view=view, mention_author=False + ) except errors.Forbidden: warning_msg = None log.warning(f"Failed to send warning message in reply to {message.id}") - report_channel = await self.bot.get_or_fetch_channel(cfg.discord.channels["report_scams"]) + report_channel = await self.bot.get_or_fetch_channel( + cfg.discord.channels["report_scams"] + ) report_msg = await report_channel.send(embed=report, file=attachment) - await self._add_message_report_to_db(message, reason, warning_msg, report_msg) + await self._add_message_report_to_db( + message, reason, warning_msg, report_msg + ) - async def manual_message_report(self, interaction: Interaction, message: Message) -> None: + async def manual_message_report( + self, interaction: Interaction, message: Message + ) -> None: await interaction.response.defer(ephemeral=True) if message.author.bot: - return await interaction.followup.send(content="Bot messages can't be reported.") + return await interaction.followup.send( + content="Bot messages can't be reported." + ) if message.author == interaction.user: - return await interaction.followup.send(content="Did you just report yourself?") + return await interaction.followup.send( + content="Did you just report yourself?" + ) async with self._message_report_lock: reason = f"Manual report by {interaction.user.mention}" @@ -301,18 +361,24 @@ async def manual_message_report(self, interaction: Interaction, message: Message warning, report, attachment = components - report_channel = await self.bot.get_or_fetch_channel(cfg.discord.channels["report_scams"]) + report_channel = await self.bot.get_or_fetch_channel( + cfg.discord.channels["report_scams"] + ) report_msg = await report_channel.send(embed=report, file=attachment) - moderator = await self.bot.get_or_fetch_user(cfg.rocketpool.support.moderator_id) + moderator = await self.bot.get_or_fetch_user( + cfg.rocketpool.support.moderator_id + ) view = self.RemovalVoteView(self, message) warning_msg = await message.reply( content=f"{moderator.mention} {report_msg.jump_url}", embed=warning, view=view, - mention_author=False + mention_author=False, + ) + await self._add_message_report_to_db( + message, reason, warning_msg, report_msg ) - await self._add_message_report_to_db(message, reason, warning_msg, report_msg) await interaction.followup.send(content="Thanks for reporting!") def _discord_invite(self, message: Message) -> str | None: @@ -326,8 +392,12 @@ def _discord_invite(self, message: Message) -> str | None: if match := self.invite_pattern.search(content): link = match.group(0) trusted_domains = [ - "youtu.be", "youtube.com", "tenor.com", "giphy.com", - "imgur.com", "bluesky.app" + "youtu.be", + "youtube.com", + "tenor.com", + "giphy.com", + "imgur.com", + "bluesky.app", ] if not any(domain in link for domain in trusted_domains): return "Invite to external server" @@ -335,9 +405,7 @@ def _discord_invite(self, message: Message) -> str | None: def _tap_on_this(self, message: Message) -> str | None: txt = self._get_message_content(message) - keywords = ( - [("tap on", "click on"), "proper"] - ) + keywords = [("tap on", "click on"), "proper"] return "Tap on deez nuts nerd" if self.__txt_contains(txt, keywords) else None def _obfuscated_url(self, message: Message) -> str | None: @@ -372,19 +440,19 @@ def _ticket_system(self, message: Message) -> str | None: # High-confidence scam indicators (don't need URL trust check) strong_keywords = ( - ("support team", "supp0rt", "🎫", ":ticket:", "🎟️", ":tickets:", "m0d", "tlcket"), - [ - ("relay"), - ("query", "question", "inquiry") - ], - [ - ("instant", "live"), - "chat" - ], - [ - ("submit"), - ("question", "issue", "query") - ] + ( + "support team", + "supp0rt", + "🎫", + ":ticket:", + "🎟️", + ":tickets:", + "m0d", + "tlcket", + ), + [("relay"), ("query", "question", "inquiry")], + [("instant", "live"), "chat"], + [("submit"), ("question", "issue", "query")], ) if self.__txt_contains(txt, strong_keywords): return default_reason @@ -402,11 +470,27 @@ def _ticket_system(self, message: Message) -> str | None: if len(content_only_txt) > 500: return None trusted_url_domains = ( - "youtu.be", "youtube.com", "twitter.com", "x.com", "fxtwitter.com", - "fixvx.com", "fxbsky.app", "reddit.com", "github.com", "etherscan.io", - "beaconcha.in", "rocketpool.net", "docs.rocketpool.net", "rocketpool.support", - "xcancel.com", "steely-test.org", "validatorqueue.com", "checkpointz", - "discord.com", "forms.gle", "google.com", + "youtu.be", + "youtube.com", + "twitter.com", + "x.com", + "fxtwitter.com", + "fixvx.com", + "fxbsky.app", + "reddit.com", + "github.com", + "etherscan.io", + "beaconcha.in", + "rocketpool.net", + "docs.rocketpool.net", + "rocketpool.support", + "xcancel.com", + "steely-test.org", + "validatorqueue.com", + "checkpointz", + "discord.com", + "forms.gle", + "google.com", ) content_urls = list(self.basic_url_pattern.finditer(content_only_txt)) if not content_urls or all( @@ -416,13 +500,16 @@ def _ticket_system(self, message: Message) -> str | None: return None weak_keywords = ( + [("support", "open", "create", "raise", "raisse"), "ticket"], [ - ("support", "open", "create", "raise", "raisse"), - "ticket" - ], - [ - ("contact", "reach out", "report", [("talk", "speak"), ("to", "with")], "ask"), - ("admin", "mod", "administrator", "moderator", "team") + ( + "contact", + "reach out", + "report", + [("talk", "speak"), ("to", "with")], + "ask", + ), + ("admin", "mod", "administrator", "moderator", "team"), ], ) if self.__txt_contains(content_only_txt, weak_keywords): @@ -480,7 +567,11 @@ def _spam_wall(self, message: Message) -> str | None: if len(stripped) < 10: return "Spoiler wall spam" # Invisible character wall: mostly blank/invisible characters - visible = re.sub(r"[\s\u2800\u200b\u200c\u200d\u2060\ufeff\U000e0000-\U000e007f]", "", content) + visible = re.sub( + r"[\s\u2800\u200b\u200c\u200d\u2060\ufeff\U000e0000-\U000e007f]", + "", + content, + ) if len(visible) < 10 and len(content) > 200: return "Invisible character spam" return None @@ -507,14 +598,18 @@ async def _reaction_spam(self, reaction: Reaction, user: User) -> str | None: if reactions is None: reactions = {} for msg_reaction in reaction.message.reactions: - reactions[msg_reaction.emoji] = {user async for user in msg_reaction.users()} + reactions[msg_reaction.emoji] = { + user async for user in msg_reaction.users() + } self._message_react_cache[reaction.message.id] = reactions elif reaction.emoji not in reactions: reactions[reaction.emoji] = {user} else: reactions[reaction.emoji].add(user) - reaction_count = len([r for r in reactions.values() if user in r and len(r) == 1]) + reaction_count = len( + [r for r in reactions.values() if user in r and len(r) == 1] + ) log.debug(f"{reaction_count} reactions on message {reaction.message.id}") # if there are 8 reactions done by the author of the message, report it return "Reaction spam by message author" if (reaction_count >= 8) else None @@ -523,7 +618,8 @@ async def _reaction_spam(self, reaction: Reaction, user: User) -> str | None: async def on_message(self, message: Message) -> None: log.debug( f"Message(id={message.id}, author={message.author}, channel={message.channel}," - f" content=\"{message.content}\", embeds={message.embeds})") + f' content="{message.content}", embeds={message.embeds})' + ) if message.author.bot: log.warning("Ignoring message sent by bot") @@ -565,9 +661,7 @@ async def on_reaction_add(self, reaction: Reaction, user: User) -> None: log.warning(f"Ignoring reaction in {reaction.message.guild.id}") return - checks = [ - self._reaction_spam(reaction, user) - ] + checks = [self._reaction_spam(reaction, user)] for reason in await asyncio.gather(*checks): if reason: await self.report_message(reaction.message, reason) @@ -578,8 +672,12 @@ async def on_raw_message_delete(self, event: RawMessageDeleteEvent) -> None: await self._on_message_delete(event.message_id) @Cog.listener() - async def on_raw_bulk_message_delete(self, event: RawBulkMessageDeleteEvent) -> None: - await asyncio.gather(*[self._on_message_delete(msg_id) for msg_id in event.message_ids]) + async def on_raw_bulk_message_delete( + self, event: RawBulkMessageDeleteEvent + ) -> None: + await asyncio.gather( + *[self._on_message_delete(msg_id) for msg_id in event.message_ids] + ) async def _on_message_delete(self, message_id: int) -> None: async with self._message_report_lock: @@ -588,30 +686,44 @@ async def _on_message_delete(self, message_id: int) -> None: return channel = await self.bot.get_or_fetch_channel(report["channel_id"]) - with contextlib.suppress(errors.NotFound, errors.Forbidden, errors.HTTPException): + with contextlib.suppress( + errors.NotFound, errors.Forbidden, errors.HTTPException + ): message = await channel.fetch_message(report["warning_id"]) await message.delete() await self._update_report(report, "Original message has been deleted.") - await self.bot.db.scam_reports.update_one(db_filter, {"$set": {"warning_id": None, "removed": True}}) + await self.bot.db.scam_reports.update_one( + db_filter, {"$set": {"warning_id": None, "removed": True}} + ) @Cog.listener() async def on_member_ban(self, guild: Guild, user: User) -> None: - async with self._message_report_lock, self._thread_report_lock, self._user_report_lock: + async with ( + self._message_report_lock, + self._thread_report_lock, + self._user_report_lock, + ): reports = await self.bot.db.scam_reports.find( {"guild_id": guild.id, "user_id": user.id, "user_banned": False} ).to_list(None) for report in reports: await self._update_report(report, "User has been banned.") - await self.bot.db.scam_reports.update_one(report, {"$set": {"user_banned": True}}) + await self.bot.db.scam_reports.update_one( + report, {"$set": {"user_banned": True}} + ) async def _update_report(self, report: dict, note: str) -> None: - report_channel = await self.bot.get_or_fetch_channel(cfg.discord.channels["report_scams"]) + report_channel = await self.bot.get_or_fetch_channel( + cfg.discord.channels["report_scams"] + ) try: message = await report_channel.fetch_message(report["report_id"]) embed = message.embeds[0] embed.description += f"\n\n**{note}**" - embed.color = self.Color.WARN if (embed.color == self.Color.ALERT) else self.Color.OK + embed.color = ( + self.Color.WARN if (embed.color == self.Color.ALERT) else self.Color.OK + ) await message.edit(embed=embed) except Exception as e: await self.bot.report_error(e) @@ -630,20 +742,24 @@ async def report_thread(self, thread: Thread, reason: str) -> None: log.warning(f"Failed to send warning message in thread {thread.id}") warning_msg = None - report_channel = await self.bot.get_or_fetch_channel(cfg.discord.channels["report_scams"]) + report_channel = await self.bot.get_or_fetch_channel( + cfg.discord.channels["report_scams"] + ) report_msg = await report_channel.send(embed=report) - await self.bot.db.scam_reports.insert_one({ - "type" : "thread", - "guild_id" : thread.guild.id, - "channel_id" : thread.id, - "user_id" : thread.owner_id, - "reason" : reason, - "content" : thread.name, - "warning_id" : warning_msg.id if warning_msg else None, - "report_id" : report_msg.id, - "user_banned": False, - "removed" : False, - }) + await self.bot.db.scam_reports.insert_one( + { + "type": "thread", + "guild_id": thread.guild.id, + "channel_id": thread.id, + "user_id": thread.owner_id, + "reason": reason, + "content": thread.name, + "warning_id": warning_msg.id if warning_msg else None, + "report_id": report_msg.id, + "user_banned": False, + "removed": False, + } + ) @Cog.listener() async def on_thread_create(self, thread: Thread) -> None: @@ -662,7 +778,10 @@ async def on_thread_create(self, thread: Thread) -> None: # Dash-digits near end of name (scam: "user-0816"; skip: "RIP-1559: ...") or ( (m := re.search(r"(-|–|—)\d{3,}", thread.name)) # noqa: RUF001 - and (m.end() >= len(thread.name.strip()) - 2 or len(thread.name.strip()) < 30) + and ( + m.end() >= len(thread.name.strip()) - 2 + or len(thread.name.strip()) < 30 + ) ) # Exact suspicious names or lower in (".", "!", "///") @@ -684,7 +803,9 @@ async def on_raw_thread_delete(self, event: RawThreadDeleteEvent) -> None: async with self._thread_report_lock: if report := await self.bot.db.scam_reports.find_one(db_filter): await self._update_report(report, "Thread has been deleted.") - await self.bot.db.scam_reports.update_one(db_filter, {"$set": {"warning_id": None, "removed": True}}) + await self.bot.db.scam_reports.update_one( + db_filter, {"$set": {"warning_id": None, "removed": True}} + ) @command() @guilds(cfg.rocketpool.support.server_id) @@ -699,7 +820,9 @@ async def manual_user_report(self, interaction: Interaction, user: Member) -> No return await interaction.followup.send(content="Bots can't be reported.") if user == interaction.user: - return await interaction.followup.send(content="Did you just report yourself?") + return await interaction.followup.send( + content="Did you just report yourself?" + ) async with self._user_report_lock: reason = f"Manual report by {interaction.user.mention}" @@ -708,18 +831,22 @@ async def manual_user_report(self, interaction: Interaction, user: Member) -> No content="Failed to report user. They may have already been reported or banned." ) - report_channel = await self.bot.get_or_fetch_channel(cfg.discord.channels["report_scams"]) + report_channel = await self.bot.get_or_fetch_channel( + cfg.discord.channels["report_scams"] + ) report_msg = await report_channel.send(embed=report) - await self.bot.db.scam_reports.insert_one({ - "type" : "user", - "guild_id" : user.guild.id, - "user_id" : user.id, - "reason" : reason, - "content" : user.display_name, - "warning_id" : None, - "report_id" : report_msg.id, - "user_banned": False, - }) + await self.bot.db.scam_reports.insert_one( + { + "type": "user", + "guild_id": user.guild.id, + "user_id": user.id, + "reason": reason, + "content": user.display_name, + "warning_id": None, + "report_id": report_msg.id, + "user_banned": False, + } + ) await interaction.followup.send(content="Thanks for reporting!") async def _generate_user_report(self, user: Member, reason: str) -> Embed | None: diff --git a/rocketwatch/plugins/scam_warning/scam_warning.py b/rocketwatch/plugins/scam_warning/scam_warning.py index b659dde5..a5c7f5a4 100644 --- a/rocketwatch/plugins/scam_warning/scam_warning.py +++ b/rocketwatch/plugins/scam_warning/scam_warning.py @@ -19,9 +19,15 @@ def __init__(self, bot: RocketWatch): self.failure_cooldown = timedelta(days=1) async def send_warning(self, user) -> None: - support_channel = await self.bot.get_or_fetch_channel(cfg.rocketpool.support.channel_id) - report_channel = await self.bot.get_or_fetch_channel(cfg.discord.channels["report_scams"]) - resource_channel = await self.bot.get_or_fetch_channel(cfg.discord.channels["resources"]) + support_channel = await self.bot.get_or_fetch_channel( + cfg.rocketpool.support.channel_id + ) + report_channel = await self.bot.get_or_fetch_channel( + cfg.discord.channels["report_scams"] + ) + resource_channel = await self.bot.get_or_fetch_channel( + cfg.discord.channels["resources"] + ) embed = Embed() embed.title = "**Stay Safe on Rocket Pool Discord**" @@ -76,7 +82,9 @@ async def on_message(self, message) -> None: return msg_time = message.created_at.replace(tzinfo=None) - db_entry = (await self.bot.db.scam_warning.find_one({"_id": message.author.id})) or {} + db_entry = ( + await self.bot.db.scam_warning.find_one({"_id": message.author.id}) + ) or {} cooldown_end = datetime.fromtimestamp(0) if last_failure_time := db_entry.get("last_failure"): @@ -95,8 +103,12 @@ async def on_message(self, message) -> None: await self.bot.db.scam_warning.replace_one( {"_id": message.author.id}, - {"_id": message.author.id, "last_message": msg_time, "last_failure": last_failure_time}, - upsert=True + { + "_id": message.author.id, + "last_message": msg_time, + "last_failure": last_failure_time, + }, + upsert=True, ) diff --git a/rocketwatch/plugins/snapshot/snapshot.py b/rocketwatch/plugins/snapshot/snapshot.py index ae6c7c75..e6897956 100644 --- a/rocketwatch/plugins/snapshot/snapshot.py +++ b/rocketwatch/plugins/snapshot/snapshot.py @@ -38,7 +38,10 @@ def __init__(self, bot: RocketWatch): async def _query_api(query: Query) -> list[dict] | dict | None: query_json = {"query": Operation(type="query", queries=[query]).render()} log.debug(f"Snapshot query: {query_json}") - async with aiohttp.ClientSession() as session, session.get("https://hub.snapshot.org/graphql", json=query_json) as resp: + async with ( + aiohttp.ClientSession() as session, + session.get("https://hub.snapshot.org/graphql", json=query_json) as resp, + ): response = await resp.json() if "errors" in response: raise Exception(response["errors"]) @@ -73,7 +76,9 @@ def predict_render_height(self, with_title: bool = True) -> int: height = 0 if with_title: height = self._TITLE_SIZE + self._V_SPACE_LARGE - height += len(self.choices) * (self._predict_choice_height() + self._V_SPACE_MEDIUM) + height += len(self.choices) * ( + self._predict_choice_height() + self._V_SPACE_MEDIUM + ) height += self._V_SPACE_SMALL + self._HEADER_SIZE + self._V_SPACE_SMALL height += self._BAR_SIZE + self._V_SPACE_LARGE height += self._TEXT_SIZE @@ -83,13 +88,13 @@ def reached_quorum(self) -> bool: return sum(self.scores) >= self.quorum def render_to( - self, - canvas: ImageCanvas, - width: int, - x_offset: int = 0, - y_offset: int = 0, - *, - include_title: bool = True + self, + canvas: ImageCanvas, + width: int, + x_offset: int = 0, + y_offset: int = 0, + *, + include_title: bool = True, ) -> int: def safe_div(x, y): return (x / y) if y else 0 @@ -97,12 +102,14 @@ def safe_div(x, y): label_offset = self._BAR_SIZE / 2 label_font_variant = FontVariant.BOLD - def render_choice(_choice: str, _score: float, _x_offset: int, _y_offset: int) -> int: + def render_choice( + _choice: str, _score: float, _x_offset: int, _y_offset: int + ) -> int: color: Color = (128, 128, 128) # slate gray choice_colors = { - "for": (4, 99, 7), # green + "for": (4, 99, 7), # green "against": (156, 0, 47), # red - "abstain": (114, 121, 138) + "abstain": (114, 121, 138), } for k, v in choice_colors.items(): # assign color based on keywords @@ -116,32 +123,40 @@ def render_choice(_choice: str, _score: float, _x_offset: int, _y_offset: int) - _choice, self._TEXT_SIZE, max_width=(width / 2), - anchor="lt" + anchor="lt", ) choice_height += self._TEXT_SIZE + self._V_SPACE_SMALL - divisor = max(self.scores) if len(self.scores) >= 5 else sum(self.scores) + divisor = ( + max(self.scores) if len(self.scores) >= 5 else sum(self.scores) + ) canvas.progress_bar( (_x_offset, _y_offset + choice_height), (width, self._BAR_SIZE), safe_div(_score, divisor), - fill_color=color + fill_color=color, ) canvas.dynamic_text( - (_x_offset + label_offset, _y_offset + choice_height + (self._BAR_SIZE / 2)), + ( + _x_offset + label_offset, + _y_offset + choice_height + (self._BAR_SIZE / 2), + ), f"{safe_div(_score, sum(self.scores)):.2%}", self._LABEL_SIZE, font_variant=label_font_variant, max_width=((width / 2) - label_offset), - anchor="lm" + anchor="lm", ) canvas.dynamic_text( - (_x_offset + width - label_offset, _y_offset + choice_height + (self._BAR_SIZE / 2)), + ( + _x_offset + width - label_offset, + _y_offset + choice_height + (self._BAR_SIZE / 2), + ), f"{_score:,.2f}", self._LABEL_SIZE, font_variant=label_font_variant, max_width=((width / 2) - label_offset), - anchor="rm" + anchor="rm", ) choice_height += self._BAR_SIZE return choice_height @@ -154,7 +169,7 @@ def render_choice(_choice: str, _score: float, _x_offset: int, _y_offset: int) - self.title, self._TITLE_SIZE, max_width=width, - anchor="mt" + anchor="mt", ) proposal_height += self._TITLE_SIZE + self._V_SPACE_LARGE @@ -162,7 +177,9 @@ def render_choice(_choice: str, _score: float, _x_offset: int, _y_offset: int) - choice_scores = list(zip(self.choices, self.scores, strict=False)) choice_scores.sort(key=lambda x: x[1], reverse=True) for choice, score in choice_scores: - proposal_height += render_choice(choice, score, x_offset, y_offset + proposal_height) + proposal_height += render_choice( + choice, score, x_offset, y_offset + proposal_height + ) proposal_height += self._V_SPACE_MEDIUM proposal_height += self._V_SPACE_SMALL @@ -173,7 +190,7 @@ def render_choice(_choice: str, _score: float, _x_offset: int, _y_offset: int) - "Quorum", self._HEADER_SIZE, max_width=(width / 2), - anchor="lt" + anchor="lt", ) proposal_height += self._HEADER_SIZE + self._V_SPACE_SMALL quorum_perc: float = safe_div(sum(self.scores), self.quorum) @@ -185,25 +202,31 @@ def render_choice(_choice: str, _score: float, _x_offset: int, _y_offset: int) - (x_offset, y_offset + proposal_height), (width, self._BAR_SIZE), min(quorum_perc, 1), - fill_color=pb_color + fill_color=pb_color, ) canvas.dynamic_text( - (x_offset + label_offset, y_offset + proposal_height + (self._BAR_SIZE / 2)), + ( + x_offset + label_offset, + y_offset + proposal_height + (self._BAR_SIZE / 2), + ), f"{quorum_perc:.2%}", self._LABEL_SIZE, font_variant=label_font_variant, max_width=((width / 2) - label_offset), anchor="lm", - color=label_color + color=label_color, ) canvas.dynamic_text( - (x_offset + width - label_offset, y_offset + proposal_height + (self._BAR_SIZE / 2)), + ( + x_offset + width - label_offset, + y_offset + proposal_height + (self._BAR_SIZE / 2), + ), f"{sum(self.scores):,.0f} / {self.quorum:,.0f}", self._LABEL_SIZE, font_variant=label_font_variant, max_width=((width / 2) - label_offset), anchor="rm", - color=label_color + color=label_color, ) proposal_height += self._BAR_SIZE + self._V_SPACE_LARGE @@ -214,7 +237,7 @@ def render_choice(_choice: str, _score: float, _x_offset: int, _y_offset: int) - f"{pretty_time(rem_time)} left" if (rem_time >= 0) else "Final Result", self._TEXT_SIZE, max_width=width, - anchor="mt" + anchor="mt", ) proposal_height += self._TEXT_SIZE return proposal_height @@ -233,8 +256,12 @@ def create_image(self, *, include_title: bool) -> Image: pad_left, pad_right = 20, 20 width = 800 height = self.predict_render_height(include_title) - canvas = ImageCanvas(width + pad_left + pad_right, height + pad_top + pad_bottom) - self.render_to(canvas, width, pad_left, pad_top, include_title=include_title) + canvas = ImageCanvas( + width + pad_left + pad_right, height + pad_top + pad_bottom + ) + self.render_to( + canvas, width, pad_left, pad_top, include_title=include_title + ) return canvas.image async def create_start_event(self) -> Event: @@ -246,7 +273,7 @@ async def create_start_event(self) -> Event: block_number=await ts_to_block(self.start), event_name="pdao_snapshot_vote_start", unique_id=f"snapshot_vote_start:{self.id}", - image=self.create_image(include_title=True) + image=self.create_image(include_title=True), ) def create_reached_quorum_event(self, block_number: BlockNumber) -> Event: @@ -258,7 +285,7 @@ def create_reached_quorum_event(self, block_number: BlockNumber) -> Event: block_number=block_number, event_name="pdao_snapshot_vote_quorum", unique_id=f"snapshot_vote_quorum:{self.id}", - image=self.create_image(include_title=True) + image=self.create_image(include_title=True), ) async def create_end_event(self) -> Event: @@ -281,7 +308,7 @@ async def create_end_event(self) -> Event: block_number=await ts_to_block(self.end), event_name="pdao_snapshot_vote_end", unique_id=f"snapshot_vote_end:{self.id}", - image=self.create_image(include_title=True) + image=self.create_image(include_title=True), ) @dataclass(frozen=True, slots=True) @@ -290,9 +317,9 @@ class Vote: MultiChoice = list[SingleChoice] # weighted votes use strings as keys for some reason WeightedChoice = dict[str, int] - Choice = (SingleChoice | MultiChoice | WeightedChoice) + Choice = SingleChoice | MultiChoice | WeightedChoice - proposal: 'Snapshot.Proposal' + proposal: "Snapshot.Proposal" id: str voter: ChecksumAddress created: int @@ -301,7 +328,7 @@ class Vote: reason: str def pretty_print(self) -> str | None: - match (raw_choice := self.choice): + match raw_choice := self.choice: case int(): return self._format_single_choice(raw_choice) case list(): @@ -336,18 +363,22 @@ def _format_multiple_choice(self, choice: MultiChoice) -> str: def _format_weighted_choice(self, choice: WeightedChoice) -> str: labels = {self._label_choice(int(c)): w for c, w in choice.items()} total_weight = sum(labels.values()) - choice_perc = [(c, round(100 * w / total_weight)) for c, w in labels.items()] + choice_perc = [ + (c, round(100 * w / total_weight)) for c, w in labels.items() + ] choice_perc.sort(key=lambda x: x[1], reverse=True) graph = tpl.figure() graph.barh( [x[1] for x in choice_perc], [x[0] for x in choice_perc], force_ascii=True, - max_width=15 + max_width=15, ) return "```" + graph.get_string().replace("]", "%]") + "```" - async def create_event(self, prev_vote: Optional['Snapshot.Vote']) -> Event | None: + async def create_event( + self, prev_vote: Optional["Snapshot.Vote"] + ) -> Event | None: node = await rp.call("rocketSignerRegistry.signerToNode", self.voter) signer = await el_explorer_url(self.voter) voter = signer if (node == ADDRESS_ZERO) else await el_explorer_url(node) @@ -364,14 +395,21 @@ async def create_event(self, prev_vote: Optional['Snapshot.Vote']) -> Event | No embed.description = separator.join([f"{voter} voted", vote_fmt]) elif self.choice != prev_vote.choice: prev_vote_fmt = prev_vote.pretty_print() - parts = [f"{voter} changed their vote from", prev_vote_fmt, "to", vote_fmt] + parts = [ + f"{voter} changed their vote from", + prev_vote_fmt, + "to", + vote_fmt, + ] separator = " " if (len(vote_fmt) + len(prev_vote_fmt) <= 20) else "\n" embed.description = separator.join(parts) elif self.reason != prev_vote.reason: embed.description = ( - f"{voter} " - "changed the reason for their vote" if prev_vote.reason else "added context to their vote" - f" ({vote_fmt})" if (len(vote_fmt) <= 20) else f":\n{vote_fmt}" + f"{voter} changed the reason for their vote" + if prev_vote.reason + else f"added context to their vote ({vote_fmt})" + if (len(vote_fmt) <= 20) + else f":\n{vote_fmt}" ) else: log.debug("Same vote as before, skipping event") @@ -383,7 +421,7 @@ async def create_event(self, prev_vote: Optional['Snapshot.Vote']) -> Event | No if len(embed.description) + len(reason) > max_length: suffix = "..." overage = len(embed.description) + len(reason) - max_length - reason = reason[:-(overage + len(suffix))] + suffix + reason = reason[: -(overage + len(suffix))] + suffix embed.description += f" ```{reason}```" @@ -394,12 +432,12 @@ async def create_event(self, prev_vote: Optional['Snapshot.Vote']) -> Event | No if self.vp >= 250: conditional_args = { "event_name": "pdao_snapshot_vote", - "image": self.proposal.create_image(include_title=False) + "image": self.proposal.create_image(include_title=False), } else: conditional_args = { "event_name": "snapshot_vote", - "thumbnail": self.proposal.create_image(include_title=False) + "thumbnail": self.proposal.create_image(include_title=False), } return Event( @@ -407,26 +445,22 @@ async def create_event(self, prev_vote: Optional['Snapshot.Vote']) -> Event | No topic="snapshot", block_number=await ts_to_block(self.created), unique_id=f"snapshot_vote:{self.proposal.id}:{self.voter}:{self.created}", - **conditional_args + **conditional_args, ) @staticmethod async def fetch_proposal(proposal_id: str) -> Proposal | None: query = Query( name="proposal", - arguments=[Argument(name="id", value=f"\"{proposal_id}\"")], - fields=["id", "title", "choices", "start", "end", "scores", "quorum"] + arguments=[Argument(name="id", value=f'"{proposal_id}"')], + fields=["id", "title", "choices", "start", "end", "scores", "quorum"], ) response: dict | None = await Snapshot._query_api(query) return Snapshot.Proposal(**response) if response else None @staticmethod async def fetch_proposals( - state: Proposal.State, - *, - reverse: bool = False, - limit: int = 25, - skip: int = 0 + state: Proposal.State, *, reverse: bool = False, limit: int = 25, skip: int = 0 ) -> list[Proposal]: query = Query( name="proposals", @@ -436,26 +470,26 @@ async def fetch_proposals( Argument( name="where", value=[ - Argument(name="space_in", value=["\"rocketpool-dao.eth\""]), - Argument(name="state", value=f"\"{state}\"") - ] + Argument(name="space_in", value=['"rocketpool-dao.eth"']), + Argument(name="state", value=f'"{state}"'), + ], ), - Argument(name="orderBy", value="\"created\""), - Argument(name="orderDirection", value="desc" if reverse else "asc") + Argument(name="orderBy", value='"created"'), + Argument(name="orderDirection", value="desc" if reverse else "asc"), ], - fields=["id", "title", "choices", "start", "end", "scores", "quorum"] + fields=["id", "title", "choices", "start", "end", "scores", "quorum"], ) response: list[dict] = await Snapshot._query_api(query) return [Snapshot.Proposal(**d) for d in response] @staticmethod async def fetch_votes( - proposal: Proposal, - *, - created_after: int = 0, - reverse: bool = False, - limit: int = 100, - skip: int = 0 + proposal: Proposal, + *, + created_after: int = 0, + reverse: bool = False, + limit: int = 100, + skip: int = 0, ) -> list[Vote]: query = Query( name="votes", @@ -465,14 +499,14 @@ async def fetch_votes( Argument( name="where", value=[ - Argument(name="proposal", value=f"\"{proposal.id}\""), - Argument(name="created_gt", value=created_after) - ] + Argument(name="proposal", value=f'"{proposal.id}"'), + Argument(name="created_gt", value=created_after), + ], ), - Argument(name="orderBy", value="\"created\""), - Argument(name="orderDirection", value="desc" if reverse else "asc") + Argument(name="orderBy", value='"created"'), + Argument(name="orderDirection", value="desc" if reverse else "asc"), ], - fields=["id", "voter", "created", "vp", "choice", "reason"] + fields=["id", "voter", "created", "vp", "choice", "reason"], ) response: list[dict] = await Snapshot._query_api(query) return [Snapshot.Vote(proposal=proposal, **d) for d in response] @@ -505,39 +539,50 @@ async def _get_new_events(self) -> list[Event]: log.info(f"Found new proposal: {proposal}") event = await proposal.create_start_event() proposal_dict = { - "_id" : proposal.id, - "start" : proposal.start, - "end" : proposal.end, - "quorum": proposal.reached_quorum() + "_id": proposal.id, + "start": proposal.start, + "end": proposal.end, + "quorum": proposal.reached_quorum(), } proposal_db_changes.append(InsertOne(proposal_dict)) known_active_proposals[proposal.id] = proposal_dict events.append(event) - elif proposal.reached_quorum() and (not known_active_proposals[proposal.id]["quorum"]): + elif proposal.reached_quorum() and ( + not known_active_proposals[proposal.id]["quorum"] + ): log.info(f"Proposal {proposal} has reached quorum") event = proposal.create_reached_quorum_event(self._pending_block) - proposal_db_changes.append(UpdateOne( - {"_id": proposal.id}, - {"$set": {"quorum": True}} - )) + proposal_db_changes.append( + UpdateOne({"_id": proposal.id}, {"$set": {"quorum": True}}) + ) events.append(event) try: - last_vote_entry = await self.vote_db.find( - {"proposal_id": proposal.id} - ).sort({"created": DESCENDING}).limit(1).to_list() + last_vote_entry = ( + await self.vote_db.find({"proposal_id": proposal.id}) + .sort({"created": DESCENDING}) + .limit(1) + .to_list() + ) last_vote_ts = last_vote_entry[0]["created"] except IndexError: last_vote_ts = 0 - current_votes: list[Snapshot.Vote] = await self.fetch_votes(proposal, created_after=last_vote_ts) + current_votes: list[Snapshot.Vote] = await self.fetch_votes( + proposal, created_after=last_vote_ts + ) for vote in current_votes: log.debug(f"Processing vote {vote}") try: - stored_vote = (await self.vote_db.find( - {"proposal_id": proposal.id, "voter": vote.voter} - ).sort({"created": DESCENDING}).limit(1).to_list())[0] + stored_vote = ( + await self.vote_db.find( + {"proposal_id": proposal.id, "voter": vote.voter} + ) + .sort({"created": DESCENDING}) + .limit(1) + .to_list() + )[0] prev_vote = Snapshot.Vote( id=stored_vote["_id"], proposal=proposal, @@ -545,7 +590,7 @@ async def _get_new_events(self) -> list[Event]: created=stored_vote["created"], vp=stored_vote["vp"], choice=stored_vote["choice"], - reason=stored_vote["reason"] + reason=stored_vote["reason"], ) except IndexError: prev_vote = None @@ -555,15 +600,17 @@ async def _get_new_events(self) -> list[Event]: continue events.append(event) - db_update = InsertOne({ - "_id" : vote.id, - "proposal_id": vote.proposal.id, - "voter" : vote.voter, - "created" : vote.created, - "vp" : vote.vp, - "choice" : vote.choice, - "reason" : vote.reason, - }) + db_update = InsertOne( + { + "_id": vote.id, + "proposal_id": vote.proposal.id, + "voter": vote.voter, + "created": vote.created, + "vp": vote.vp, + "choice": vote.choice, + "reason": vote.reason, + } + ) vote_db_changes.append(db_update) if proposal_db_changes: @@ -580,7 +627,9 @@ async def snapshot_votes(self, interaction: Interaction): await interaction.response.defer(ephemeral=is_hidden(interaction)) embed = Embed(title="Snapshot Proposals") - embed.set_author(name="🔗 Data from snapshot.org", url="https://vote.rocketpool.net") + embed.set_author( + name="🔗 Data from snapshot.org", url="https://vote.rocketpool.net" + ) proposals = (await self.fetch_proposals("active", reverse=True))[::-1] if not proposals: @@ -604,7 +653,7 @@ async def snapshot_votes(self, interaction: Interaction): total_height = v_spacing * (num_rows - 1) proposal_grid: list[list[Snapshot.Proposal]] = [] for row_idx in range(num_rows): - row = proposals[row_idx * num_cols:(row_idx + 1) * num_cols] + row = proposals[row_idx * num_cols : (row_idx + 1) * num_cols] proposal_grid.append(row) # row height is equal to height of its tallest proposal total_height += max(p.predict_render_height() for p in row) @@ -614,7 +663,9 @@ async def snapshot_votes(self, interaction: Interaction): proposal_width = (total_height - h_spacing * (num_cols - 1)) // num_cols total_width = (proposal_width * num_cols) + h_spacing * (num_cols - 1) - canvas = ImageCanvas(total_width + pad_left + pad_right, total_height + pad_top + pad_bottom) + canvas = ImageCanvas( + total_width + pad_left + pad_right, total_height + pad_top + pad_bottom + ) # draw proposals in num_rows x num_cols grid y_offset = pad_top diff --git a/rocketwatch/plugins/support_utils/support_utils.py b/rocketwatch/plugins/support_utils/support_utils.py index dd96d4fe..2b09cb8e 100644 --- a/rocketwatch/plugins/support_utils/support_utils.py +++ b/rocketwatch/plugins/support_utils/support_utils.py @@ -19,11 +19,10 @@ async def generate_template_embed(db, template_name: str): if not template: return None # get the last log entry from the db - dumps_col = db.support_bot_dumps.with_options(codec_options=CodecOptions(tz_aware=True)) - last_edit = await dumps_col.find_one( - {"template": template_name}, - sort=[("ts", -1)] + dumps_col = db.support_bot_dumps.with_options( + codec_options=CodecOptions(tz_aware=True) ) + last_edit = await dumps_col.find_one({"template": template_name}, sort=[("ts", -1)]) e = Embed(title=template["title"], description=template["description"]) if last_edit and template_name != "announcement": e.description += f"\n\n*Last Edited by <@{last_edit['author']['id']}> *" @@ -37,12 +36,14 @@ def __init__(self, db, template_name: str): self.db = db self.template_name = template_name - @ui.button(label='Edit', style=ButtonStyle.blurple) + @ui.button(label="Edit", style=ButtonStyle.blurple) async def edit(self, interaction: Interaction, button: ui.Button): - template = await self.db.support_bot.find_one({'_id': self.template_name}) + template = await self.db.support_bot.find_one({"_id": self.template_name}) # Make sure to update the message with our update await interaction.response.send_modal( - AdminModal(template["title"], template["description"], self.db, self.template_name) + AdminModal( + template["title"], template["description"], self.db, self.template_name + ) ) @@ -55,7 +56,9 @@ def __init__(self, user: User): async def delete(self, interaction: Interaction, button: ui.Button): if (interaction.user == self.user) or has_perms(interaction): await interaction.message.delete() - log.warning(f"Support template message deleted by {interaction.user} in {interaction.channel}") + log.warning( + f"Support template message deleted by {interaction.user} in {interaction.channel}" + ) class AdminModal(ui.Modal, title="Change Template Message"): @@ -66,25 +69,26 @@ def __init__(self, old_title, old_description, db, template_name): self.old_description = old_description self.template_name = template_name self.title_field = ui.TextInput( - label="Title", - placeholder="Enter a title", - default=old_title + label="Title", placeholder="Enter a title", default=old_title ) self.description_field = ui.TextInput( label="Description", placeholder="Enter a description", default=old_description, style=TextStyle.paragraph, - max_length=4000 + max_length=4000, ) self.add_item(self.title_field) self.add_item(self.description_field) async def on_submit(self, interaction: Interaction) -> None: # get the data from the db - template = await self.db.support_bot.find_one({'_id': self.template_name}) + template = await self.db.support_bot.find_one({"_id": self.template_name}) # verify that no changes were made while we were editing - if template["title"] != self.old_title or template["description"] != self.old_description: + if ( + template["title"] != self.old_title + or template["description"] != self.old_description + ): # dump the description into a memory file await interaction.response.edit_message( embed=Embed( @@ -92,51 +96,67 @@ async def on_submit(self, interaction: Interaction) -> None: "Someone made changes while you were editing. Please try again.\n" "Your pending changes have been attached to this message." ), - view=None + view=None, ) ) a = await interaction.original_response() - file = File(io.StringIO(self.description_field.value), f"{self.title_field.value}.txt") + file = File( + io.StringIO(self.description_field.value), + f"{self.title_field.value}.txt", + ) await a.add_files(file) return try: await self.db.support_bot_dumps.insert_one( { - "ts" : datetime.now(UTC), + "ts": datetime.now(UTC), "template": self.template_name, - "prev" : template, - "new" : { - "title" : self.title_field.value, - "description": self.description_field.value + "prev": template, + "new": { + "title": self.title_field.value, + "description": self.description_field.value, }, - "author" : { - "id" : interaction.user.id, - "name": interaction.user.name - } - }) + "author": { + "id": interaction.user.id, + "name": interaction.user.name, + }, + } + ) except Exception as e: log.error(e) await self.db.support_bot.update_one( {"_id": self.template_name}, - {"$set": {"title": self.title_field.value, "description": self.description_field.value}} + { + "$set": { + "title": self.title_field.value, + "description": self.description_field.value, + } + }, ) content = ( f"This is a preview of the `{self.template_name}` template.\n" f"You can change it using the `Edit` button." ) embed = await generate_template_embed(self.db, self.template_name) - await interaction.response.edit_message(content=content, embed=embed, view=AdminView(self.db, self.template_name)) + await interaction.response.edit_message( + content=content, embed=embed, view=AdminView(self.db, self.template_name) + ) def has_perms(interaction: Interaction): - return any([ - interaction.user.id in cfg.rocketpool.support.user_ids, - any(r.id in cfg.rocketpool.support.role_ids for r in interaction.user.roles), - cfg.discord.owner.user_id == interaction.user.id, - interaction.user.guild_permissions.moderate_members and interaction.guild.id == cfg.rocketpool.support.server_id - ]) + return any( + [ + interaction.user.id in cfg.rocketpool.support.user_ids, + any( + r.id in cfg.rocketpool.support.role_ids for r in interaction.user.roles + ), + cfg.discord.owner.user_id == interaction.user.id, + interaction.user.guild_permissions.moderate_members + and interaction.guild.id == cfg.rocketpool.support.server_id, + ] + ) async def _use(db, interaction: Interaction, name: str, mention: User | None): @@ -146,9 +166,9 @@ async def _use(db, interaction: Interaction, name: str, mention: User | None): await interaction.response.send_message( embed=Embed( title="Error", - description=f"A template with the name '{name}' does not exist." + description=f"A template with the name '{name}' does not exist.", ), - ephemeral=True + ephemeral=True, ) return @@ -157,15 +177,15 @@ async def _use(db, interaction: Interaction, name: str, mention: User | None): await interaction.response.send_message( content=mention.mention if mention else "", embed=e, - view=DeletableView(interaction.user) + view=DeletableView(interaction.user), ) else: await interaction.response.send_message( embed=Embed( title="Error", - description="An error occurred while generating the template embed." + description="An error occurred while generating the template embed.", ), - ephemeral=True + ephemeral=True, ) @@ -180,9 +200,8 @@ async def _use(self, interaction: Interaction, name: str, mention: User | None): @_use.autocomplete("name") async def match_template(self, interaction: Interaction, current: str): return [ - Choice( - name=c["_id"], value=c["_id"] - ) for c in await self.bot.db.support_bot.find( + Choice(name=c["_id"], value=c["_id"]) + for c in await self.bot.db.support_bot.find( {"_id": {"$regex": current, "$options": "i"}} ).to_list(25) ] @@ -190,9 +209,9 @@ async def match_template(self, interaction: Interaction, current: str): class SupportUtils(GroupCog, name="support"): subgroup = Group( - name='template', - description='various templates used by active support members', - guild_ids=[cfg.rocketpool.support.server_id] + name="template", + description="various templates used by active support members", + guild_ids=[cfg.rocketpool.support.server_id], ) def __init__(self, bot: RocketWatch): @@ -202,7 +221,12 @@ def __init__(self, bot: RocketWatch): async def add(self, interaction: Interaction, name: str): if not has_perms(interaction): await interaction.response.send_message( - embed=Embed(title="Error", description="You do not have permission to use this command."), ephemeral=True) + embed=Embed( + title="Error", + description="You do not have permission to use this command.", + ), + ephemeral=True, + ) return await interaction.response.defer(ephemeral=True) # check if the template already exists in the db @@ -210,26 +234,37 @@ async def add(self, interaction: Interaction, name: str): await interaction.edit_original_response( embed=Embed( title="Error", - description=f"A template with the name '{name}' already exists." + description=f"A template with the name '{name}' already exists.", ), ) return # create the template in the db await self.bot.db.support_bot.insert_one( - {"_id": name, "title": "Insert Title here", "description": "Insert Description here"} + { + "_id": name, + "title": "Insert Title here", + "description": "Insert Description here", + } ) content = ( f"This is a preview of the `{name}` template.\n" f"You can change it using the `Edit` button." ) embed = await generate_template_embed(self.bot.db, name) - await interaction.edit_original_response(content=content, embed=embed, view=AdminView(self.bot.db, name)) + await interaction.edit_original_response( + content=content, embed=embed, view=AdminView(self.bot.db, name) + ) @subgroup.command() async def edit(self, interaction: Interaction, name: str): if not has_perms(interaction): await interaction.response.send_message( - embed=Embed(title="Error", description="You do not have permission to use this command."), ephemeral=True) + embed=Embed( + title="Error", + description="You do not have permission to use this command.", + ), + ephemeral=True, + ) return await interaction.response.defer(ephemeral=True) # check if the template exists in the db @@ -239,7 +274,7 @@ async def edit(self, interaction: Interaction, name: str): await interaction.edit_original_response( embed=Embed( title="Error", - description=f"A template with the name '{name}' does not exist." + description=f"A template with the name '{name}' does not exist.", ), ) return @@ -249,13 +284,20 @@ async def edit(self, interaction: Interaction, name: str): f"You can change it using the `Edit` button." ) embed = await generate_template_embed(self.bot.db, name) - await interaction.edit_original_response(content=content, embed=embed, view=AdminView(self.bot.db, name)) + await interaction.edit_original_response( + content=content, embed=embed, view=AdminView(self.bot.db, name) + ) @subgroup.command() async def remove(self, interaction: Interaction, name: str): if not has_perms(interaction): await interaction.response.send_message( - embed=Embed(title="Error", description="You do not have permission to use this command."), ephemeral=True) + embed=Embed( + title="Error", + description="You do not have permission to use this command.", + ), + ephemeral=True, + ) return await interaction.response.defer(ephemeral=True) # check if the template exists in the db @@ -264,55 +306,59 @@ async def remove(self, interaction: Interaction, name: str): await interaction.edit_original_response( embed=Embed( title="Error", - description=f"A template with the name '{name}' does not exist." + description=f"A template with the name '{name}' does not exist.", ), ) return # remove the template from the db await self.bot.db.support_bot.delete_one({"_id": name}) await interaction.edit_original_response( - embed=Embed( - title="Success", - description=f"Template '{name}' removed." - ), + embed=Embed(title="Success", description=f"Template '{name}' removed."), ) @subgroup.command() @choices( order_by=[ Choice(name="Name", value="_id"), - Choice(name="Last Edited Date", value="last_edited_date") + Choice(name="Last Edited Date", value="last_edited_date"), ] ) async def list(self, interaction: Interaction, order_by: Choice[str] = "_id"): await interaction.response.defer(ephemeral=True) # get all templates and their last edited date using the support_bot_dumps collection - templates = await (await self.bot.db.support_bot.aggregate([ - { - "$lookup": { - "from": "support_bot_dumps", - "localField": "_id", - "foreignField": "template", - "as": "dump" - } - }, - { - "$project": { - "_id": 1, - "last_edited_date": {"$arrayElemAt": ["$dump.ts", 0]} - } - } - ])).to_list() + templates = await ( + await self.bot.db.support_bot.aggregate( + [ + { + "$lookup": { + "from": "support_bot_dumps", + "localField": "_id", + "foreignField": "template", + "as": "dump", + } + }, + { + "$project": { + "_id": 1, + "last_edited_date": {"$arrayElemAt": ["$dump.ts", 0]}, + } + }, + ] + ) + ).to_list() # sort the templates by the specified order if isinstance(order_by, Choice): order_by = order_by.value templates.sort(key=lambda x: x[order_by]) # create the embed embed = Embed(title="Templates") - embed.description = "".join( - f"\n`{template['_id']}` - " - for template in templates - ) + "" + embed.description = ( + "".join( + f"\n`{template['_id']}` - " + for template in templates + ) + + "" + ) # split the embed into multiple embeds if it is too long embeds = [embed] while len(embeds[-1]) > 6000: @@ -331,16 +377,9 @@ async def use(self, interaction: Interaction, name: str, mention: User | None): @use.autocomplete("name") async def match_template(self, interaction: Interaction, current: str): return [ - Choice( - name=c["_id"], - value=c["_id"] - ) for c in await self.bot.db.support_bot.find( - { - "_id": { - "$regex": current, - "$options": "i" - } - } + Choice(name=c["_id"], value=c["_id"]) + for c in await self.bot.db.support_bot.find( + {"_id": {"$regex": current, "$options": "i"}} ).to_list(25) ] diff --git a/rocketwatch/plugins/transactions/transactions.py b/rocketwatch/plugins/transactions/transactions.py index 12665511..b4652671 100644 --- a/rocketwatch/plugins/transactions/transactions.py +++ b/rocketwatch/plugins/transactions/transactions.py @@ -53,20 +53,24 @@ async def _parse_transaction_config() -> tuple[list[ChecksumAddress], dict]: @guilds(cfg.discord.owner.server_id) @is_owner() async def trigger_tx( - self, - interaction: Interaction, - contract: str, - function: str, - json_args: str = "{}", - block_number: int = 0 + self, + interaction: Interaction, + contract: str, + function: str, + json_args: str = "{}", + block_number: int = 0, ) -> None: await interaction.response.defer() try: - event_obj = aDict({ - "hash": aDict({"hex": lambda: '0x0000000000000000000000000000000000000000'}), - "blockNumber": block_number, - "args": json.loads(json_args) | {"function_name": function} - }) + event_obj = aDict( + { + "hash": aDict( + {"hex": lambda: "0x0000000000000000000000000000000000000000"} + ), + "blockNumber": block_number, + "args": json.loads(json_args) | {"function_name": function}, + } + ) except json.JSONDecodeError: await interaction.followup.send(content="Invalid JSON args!") return @@ -86,9 +90,13 @@ async def replay_tx(self, interaction: Interaction, tx_hash: str): tnx = await w3.eth.get_transaction(tx_hash) block = await w3.eth.get_block(tnx.blockHash) - responses: list[Event] = await self.process_transaction(block, tnx, tnx.to, tnx.input) + responses: list[Event] = await self.process_transaction( + block, tnx, tnx.to, tnx.input + ) if responses: - await interaction.followup.send(embeds=[response.embed for response in responses]) + await interaction.followup.send( + embeds=[response.embed for response in responses] + ) else: await interaction.followup.send(content="No events found.") @@ -103,7 +111,9 @@ async def _get_new_events(self) -> list[Event]: self.addresses = old_addresses raise err - async def get_past_events(self, from_block: BlockNumber, to_block: BlockNumber) -> list[Event]: + async def get_past_events( + self, from_block: BlockNumber, to_block: BlockNumber + ) -> list[Event]: await self._ensure_config() events = [] for block in range(from_block, to_block): @@ -121,7 +131,9 @@ async def get_events_for_block(self, block_number: BlockIdentifier) -> list[Even events = [] for tnx in block.transactions: if "to" in tnx: - events.extend(await self.process_transaction(block, tnx, tnx.to, tnx.input)) + events.extend( + await self.process_transaction(block, tnx, tnx.to, tnx.input) + ) else: log.debug( f"Skipping transaction {tnx.hash.hex()} as it has no `to` parameter. " @@ -149,7 +161,11 @@ async def create_embeds(self, event_name: str, event: aDict) -> list[Embed]: args.delegator = receipt["from"] args.delegate = args.get("delegate") or args.get("newDelegate") args.votingPower = solidity.to_float( - await rp.call("rocketNetworkVoting.getVotingPower", args.delegator, args.blockNumber) + await rp.call( + "rocketNetworkVoting.getVotingPower", + args.delegator, + args.blockNumber, + ) ) if (args.votingPower < 50) or (args.delegate == args.delegator): return [] @@ -160,7 +176,9 @@ async def create_embeds(self, event_name: str, event: aDict) -> list[Embed]: elif "deposit_pool_queue" in event_name: receipt = await w3.eth.get_transaction_receipt(args.transactionHash) args.node = receipt["from"] - event = (await rp.get_contract_by_name("rocketMinipoolQueue")).events.MinipoolDequeued() + event = ( + await rp.get_contract_by_name("rocketMinipoolQueue") + ).events.MinipoolDequeued() # get the amount of dequeues that happened in this transaction using the event logs with warnings.catch_warnings(): warnings.simplefilter("ignore") @@ -171,23 +189,26 @@ async def create_embeds(self, event_name: str, event: aDict) -> list[Embed]: # this is duplicated for now because boostrap events are in events.py # and there is no good spot in utils for it elif event_name == "pdao_claimer": + def share_repr(percentage: float) -> str: max_width = 35 num_points = round(max_width * percentage / 100) - return '*' * num_points - - node_share = args.nodePercent / 10 ** 16 - pdao_share = args.protocolPercent / 10 ** 16 - odao_share = args.trustedNodePercent / 10 ** 16 - - args.description = '\n'.join([ - "Node Operator Share", - f"{share_repr(node_share)} {node_share:.1f}%", - "Protocol DAO Share", - f"{share_repr(pdao_share)} {pdao_share:.1f}%", - "Oracle DAO Share", - f"{share_repr(odao_share)} {odao_share:.1f}%", - ]) + return "*" * num_points + + node_share = args.nodePercent / 10**16 + pdao_share = args.protocolPercent / 10**16 + odao_share = args.trustedNodePercent / 10**16 + + args.description = "\n".join( + [ + "Node Operator Share", + f"{share_repr(node_share)} {node_share:.1f}%", + "Protocol DAO Share", + f"{share_repr(pdao_share)} {pdao_share:.1f}%", + "Oracle DAO Share", + f"{share_repr(odao_share)} {odao_share:.1f}%", + ] + ) elif event_name == "pdao_setting_multi": description_parts = [] for i in range(len(args.settingContractNames)): @@ -204,19 +225,23 @@ def share_repr(percentage: float) -> str: value = w3.to_checksum_address(value_raw) case _: value = "???" - description_parts.append( - f"`{args.settingPaths[i]}` set to `{value}`" - ) + description_parts.append(f"`{args.settingPaths[i]}` set to `{value}`") args.description = "\n".join(description_parts) elif event_name == "sdao_member_kick": - args.memberAddress = await el_explorer_url(args.memberAddress, block=(args.blockNumber - 1)) + args.memberAddress = await el_explorer_url( + args.memberAddress, block=(args.blockNumber - 1) + ) elif event_name == "sdao_member_replace": - args.existingMemberAddress = await el_explorer_url(args.existingMemberAddress, block=(args.blockNumber - 1)) + args.existingMemberAddress = await el_explorer_url( + args.existingMemberAddress, block=(args.blockNumber - 1) + ) elif event_name == "sdao_member_kick_multi": - args.member_list = ", ".join([ - await el_explorer_url(member_address, block=(args.blockNumber - 1)) - for member_address in args.memberAddresses - ]) + args.member_list = ", ".join( + [ + await el_explorer_url(member_address, block=(args.blockNumber - 1)) + for member_address in args.memberAddresses + ] + ) elif event_name == "bootstrap_odao_network_upgrade": if args.type == "addContract": args.description = f"Contract `{args.name}` has been added!" @@ -232,9 +257,15 @@ def share_repr(percentage: float) -> str: embeds = [] for contract_name in args.contractNames: # (recipient, amount, period_length, start, periods_total, periods_paid) - get_contract = await rp.get_function("rocketClaimDAO.getContract", contract_name) - contract_pre = await get_contract.call(block_identifier=(args.blockNumber - 1)) - contract_post = await get_contract.call(block_identifier=args.blockNumber) + get_contract = await rp.get_function( + "rocketClaimDAO.getContract", contract_name + ) + contract_pre = await get_contract.call( + block_identifier=(args.blockNumber - 1) + ) + contract_post = await get_contract.call( + block_identifier=args.blockNumber + ) args.contract_name = contract_name args.periodLength = contract_post[2] @@ -245,11 +276,17 @@ def share_repr(percentage: float) -> str: periods_left: int = contract_post[4] - contract_post[5] if periods_left == 0: - args.contract_validity = "This was the final claim for this payment contract!" + args.contract_validity = ( + "This was the final claim for this payment contract!" + ) elif periods_left == 1: - args.contract_validity = "The contract is valid for one more period!" + args.contract_validity = ( + "The contract is valid for one more period!" + ) else: - args.contract_validity = f"The contract is valid for {periods_left} more periods." + args.contract_validity = ( + f"The contract is valid for {periods_left} more periods." + ) embed = await assemble(await prepare_args(args)) embeds.append(embed) @@ -259,7 +296,9 @@ def share_repr(percentage: float) -> str: args = await prepare_args(args) return [await assemble(args)] - async def process_transaction(self, block, tnx, contract_address, fn_input) -> list[Event]: + async def process_transaction( + self, block, tnx, contract_address, fn_input + ) -> list[Event]: if contract_address not in self.addresses: return [] @@ -298,12 +337,17 @@ async def process_transaction(self, block, tnx, contract_address, fn_input) -> l return [] if event_name == "dao_proposal_execute": - dao_name = await rp.call("rocketDAOProposal.getDAO", event.args["proposalID"]) + dao_name = await rp.call( + "rocketDAOProposal.getDAO", event.args["proposalID"] + ) # change prefix for DAO-specific event - event_name = event_name.replace("dao", { - "rocketDAONodeTrustedProposals": "odao", - "rocketDAOSecurityProposals": "sdao" - }[dao_name]) + event_name = event_name.replace( + "dao", + { + "rocketDAONodeTrustedProposals": "odao", + "rocketDAOSecurityProposals": "sdao", + }[dao_name], + ) responses = [] @@ -313,14 +357,18 @@ async def process_transaction(self, block, tnx, contract_address, fn_input) -> l proposal_id = event.args["proposalID"] if "pdao" in event_name: dao = ProtocolDAO() - payload = await rp.call("rocketDAOProtocolProposal.getPayload", proposal_id) + payload = await rp.call( + "rocketDAOProtocolProposal.getPayload", proposal_id + ) else: dao = DefaultDAO(await rp.call("rocketDAOProposal.getDAO", proposal_id)) payload = await rp.call("rocketDAOProposal.getPayload", proposal_id) event.args["executor"] = event["from"] proposal = await dao.fetch_proposal(proposal_id) - event.args["proposal_body"] = dao.build_proposal_body(proposal, include_proposer=False) + event.args["proposal_body"] = dao.build_proposal_body( + proposal, include_proposer=False + ) dao_address = dao.contract.address responses = await self.process_transaction(block, tnx, dao_address, payload) @@ -341,7 +389,9 @@ async def process_transaction(self, block, tnx, contract_address, fn_input) -> l new_responses.append(response) if "upgrade_triggered" in event_name: - log.info(f"Detected contract upgrade at block {response.block_number}, reinitializing") + log.info( + f"Detected contract upgrade at block {response.block_number}, reinitializing" + ) await rp.flush() self.__init__(self.bot) diff --git a/rocketwatch/plugins/tvl/tvl.py b/rocketwatch/plugins/tvl/tvl.py index 646a080c..1ed1187c 100644 --- a/rocketwatch/plugins/tvl/tvl.py +++ b/rocketwatch/plugins/tvl/tvl.py @@ -18,16 +18,7 @@ def minipool_split_rewards_logic(balance, node_share, commission, force_base=False): - d = { - "base" : { - "reth": 0, - "node": 0 - }, - "rewards": { - "reth": 0, - "node": 0 - } - } + d = {"base": {"reth": 0, "node": 0}, "rewards": {"reth": 0, "node": 0}} node_balance = 32 * node_share reth_balance = 32 - node_balance if balance >= 8 or force_base: @@ -44,7 +35,10 @@ def minipool_split_rewards_logic(balance, node_share, commission, force_base=Fal d["rewards"]["reth"] = balance * (1 - node_ownership_share) return d -def megapool_split_rewards(rewards, capital_ratio, node_commission, voter_share, dao_share): + +def megapool_split_rewards( + rewards, capital_ratio, node_commission, voter_share, dao_share +): borrowed_portion = rewards * (1 - capital_ratio) reth_commission = 1 - node_commission - voter_share - dao_share reth = borrowed_portion * reth_commission @@ -53,6 +47,7 @@ def megapool_split_rewards(rewards, capital_ratio, node_commission, voter_share, node = rewards - reth - voter - dao return {"node": node, "reth": reth, "voter": voter, "dao": dao} + class TVL(Cog): def __init__(self, bot: RocketWatch): self.bot = bot @@ -66,43 +61,43 @@ async def tvl(self, interaction: Interaction, show_all: bool = False): await interaction.response.defer(ephemeral=is_hidden(interaction)) data = { "Total RPL Locked": { - "Staked RPL" : { - "Minipools": {}, # accurate, live - "Megapools": {}, # accurate, live - "oDAO Bond" : {}, # accurate, live + "Staked RPL": { + "Minipools": {}, # accurate, live + "Megapools": {}, # accurate, live + "oDAO Bond": {}, # accurate, live }, "Unclaimed Rewards": { "Node Operators & oDAO": {}, # accurate, live - "pDAO" : {}, # accurate, live + "pDAO": {}, # accurate, live }, - "Slashed RPL" : {}, # accurate, live - "Unused Inflation" : {}, # accurate, live + "Slashed RPL": {}, # accurate, live + "Unused Inflation": {}, # accurate, live }, "Total ETH Locked": { "Minipool Stake": { "Dissolved Minipools": { "Locked on Beacon Chain": {}, # accurate, db - "Contract Balance" : {}, # accurate, db + "Contract Balance": {}, # accurate, db }, - "Staking Minipools" : { + "Staking Minipools": { "rETH Share": {"_val": 0}, # done, db "Node Share": {"_val": 0}, # done, db - } + }, }, "Megapool Stake": { - "Pending Validators" : {}, + "Pending Validators": {}, "Dissolved Validators": {}, - "Staking Validators" : { + "Staking Validators": { "rETH Share": {"_val": 0}, "Node Share": {"_val": 0}, }, - "Exiting Validators" : { + "Exiting Validators": { "rETH Share": {"_val": 0}, "Node Share": {"_val": 0}, - } + }, }, "rETH Collateral": { - "Deposit Pool" : {}, # accurate, live + "Deposit Pool": {}, # accurate, live "Extra Collateral": {}, # accurate, live }, "Undistributed Balances": { @@ -119,19 +114,19 @@ async def tvl(self, interaction: Interaction, show_all: bool = False): "Node Share": {"_val": 0}, # done, db }, "Megapool Contract Balances": { - "rETH Share" : {"_val": 0}, - "Node Share" : {"_val": 0}, + "rETH Share": {"_val": 0}, + "Node Share": {"_val": 0}, "Voter Share": {"_val": 0}, - "DAO Share" : {"_val": 0}, + "DAO Share": {"_val": 0}, }, - "Beacon Chain Rewards" : { - "rETH Share" : {"_val": 0}, - "Node Share" : {"_val": 0}, + "Beacon Chain Rewards": { + "rETH Share": {"_val": 0}, + "Node Share": {"_val": 0}, "Voter Share": {"_val": 0}, - "DAO Share" : {"_val": 0}, + "DAO Share": {"_val": 0}, }, }, - "Unclaimed Rewards" : {}, # accurate, live + "Unclaimed Rewards": {}, # accurate, live }, } # note: _value in each dict will store the final string that gets rendered in the render @@ -148,111 +143,151 @@ async def tvl(self, interaction: Interaction, show_all: bool = False): # - They have 1 ETH locked on the Beacon Chain, not earning any rewards. # - The 31 ETH that was waiting in their address was moved back to the Deposit Pool (This can cause the Deposit Pool # to grow beyond its Cap, check the below comment for information about that). - tmp = await (await self.bot.db.minipools.aggregate([ - { - '$match': { - 'status': 'dissolved', - 'vacant': False - } - }, { - '$group': { - '_id' : 'total', - 'beacon_balance' : { - '$sum': '$beacon.balance' + tmp = await ( + await self.bot.db.minipools.aggregate( + [ + {"$match": {"status": "dissolved", "vacant": False}}, + { + "$group": { + "_id": "total", + "beacon_balance": {"$sum": "$beacon.balance"}, + "execution_balance": {"$sum": "$execution_balance"}, + } }, - 'execution_balance': { - '$sum': '$execution_balance' - } - } - } - ])).to_list(1) + ] + ) + ).to_list(1) if len(tmp) > 0: tmp = tmp[0] - data["Total ETH Locked"]["Minipool Stake"]["Dissolved Minipools"]["Locked on Beacon Chain"]["_val"] = tmp[ - "beacon_balance"] - data["Total ETH Locked"]["Minipool Stake"]["Dissolved Minipools"]["Contract Balance"]["_val"] = tmp[ - "execution_balance"] + data["Total ETH Locked"]["Minipool Stake"]["Dissolved Minipools"][ + "Locked on Beacon Chain" + ]["_val"] = tmp["beacon_balance"] + data["Total ETH Locked"]["Minipool Stake"]["Dissolved Minipools"][ + "Contract Balance" + ]["_val"] = tmp["execution_balance"] # Staking Minipools: - minipools = await self.bot.db.minipools.find({ - 'status': {"$nin": ["initialised", "prelaunch", "dissolved"]}, - 'node_deposit_balance': {"$exists": True}, - }).to_list(None) + minipools = await self.bot.db.minipools.find( + { + "status": {"$nin": ["initialised", "prelaunch", "dissolved"]}, + "node_deposit_balance": {"$exists": True}, + } + ).to_list(None) for minipool in minipools: node_share = minipool["node_deposit_balance"] / 32 commission = minipool["node_fee"] refund_balance = minipool["node_refund_balance"] contract_balance = minipool["execution_balance"] - beacon_balance = minipool["beacon"]["balance"] if "beacon" in minipool else 32 + beacon_balance = ( + minipool["beacon"]["balance"] if "beacon" in minipool else 32 + ) # if there is a refund_balance, we first try to pay that off using the contract balance if refund_balance > 0: if contract_balance > 0: if contract_balance >= refund_balance: contract_balance -= refund_balance - data["Total ETH Locked"]["Undistributed Balances"]["Minipool Contract Balances"]["Node Share"][ - "_val"] += refund_balance + data["Total ETH Locked"]["Undistributed Balances"][ + "Minipool Contract Balances" + ]["Node Share"]["_val"] += refund_balance refund_balance = 0 else: refund_balance -= contract_balance - data["Total ETH Locked"]["Undistributed Balances"]["Minipool Contract Balances"]["Node Share"][ - "_val"] += contract_balance + data["Total ETH Locked"]["Undistributed Balances"][ + "Minipool Contract Balances" + ]["Node Share"]["_val"] += contract_balance contract_balance = 0 # if there is still a refund balance, we try to pay it off using the beacon balance if refund_balance > 0 and beacon_balance > 0: - if beacon_balance >= refund_balance: - beacon_balance -= refund_balance - data["Total ETH Locked"]["Minipool Stake"]["Staking Minipools"]["Node Share"][ - "_val"] += refund_balance - refund_balance = 0 - else: - refund_balance -= beacon_balance - data["Total ETH Locked"]["Minipool Stake"]["Staking Minipools"]["Node Share"][ - "_val"] += beacon_balance - beacon_balance = 0 + if beacon_balance >= refund_balance: + beacon_balance -= refund_balance + data["Total ETH Locked"]["Minipool Stake"]["Staking Minipools"][ + "Node Share" + ]["_val"] += refund_balance + refund_balance = 0 + else: + refund_balance -= beacon_balance + data["Total ETH Locked"]["Minipool Stake"]["Staking Minipools"][ + "Node Share" + ]["_val"] += beacon_balance + beacon_balance = 0 if beacon_balance > 0: - d = minipool_split_rewards_logic(beacon_balance, node_share, commission, force_base=True) - data["Total ETH Locked"]["Minipool Stake"]["Staking Minipools"]["Node Share"]["_val"] += d["base"]["node"] - data["Total ETH Locked"]["Minipool Stake"]["Staking Minipools"]["rETH Share"]["_val"] += d["base"]["reth"] - data["Total ETH Locked"]["Undistributed Balances"]["Beacon Chain Rewards"]["Node Share"]["_val"] += \ - d["rewards"]["node"] - data["Total ETH Locked"]["Undistributed Balances"]["Beacon Chain Rewards"]["rETH Share"]["_val"] += \ - d["rewards"]["reth"] + d = minipool_split_rewards_logic( + beacon_balance, node_share, commission, force_base=True + ) + data["Total ETH Locked"]["Minipool Stake"]["Staking Minipools"][ + "Node Share" + ]["_val"] += d["base"]["node"] + data["Total ETH Locked"]["Minipool Stake"]["Staking Minipools"][ + "rETH Share" + ]["_val"] += d["base"]["reth"] + data["Total ETH Locked"]["Undistributed Balances"][ + "Beacon Chain Rewards" + ]["Node Share"]["_val"] += d["rewards"]["node"] + data["Total ETH Locked"]["Undistributed Balances"][ + "Beacon Chain Rewards" + ]["rETH Share"]["_val"] += d["rewards"]["reth"] if contract_balance > 0: - d = minipool_split_rewards_logic(contract_balance, node_share, commission) - data["Total ETH Locked"]["Undistributed Balances"]["Minipool Contract Balances"]["Node Share"][ - "_val"] += d["base"]["node"] + d["rewards"]["node"] - data["Total ETH Locked"]["Undistributed Balances"]["Minipool Contract Balances"]["rETH Share"][ - "_val"] += d["base"]["reth"] + d["rewards"]["reth"] + d = minipool_split_rewards_logic( + contract_balance, node_share, commission + ) + data["Total ETH Locked"]["Undistributed Balances"][ + "Minipool Contract Balances" + ]["Node Share"]["_val"] += d["base"]["node"] + d["rewards"]["node"] + data["Total ETH Locked"]["Undistributed Balances"][ + "Minipool Contract Balances" + ]["rETH Share"]["_val"] += d["base"]["reth"] + d["rewards"]["reth"] # Megapool commission settings - network_settings = await rp.get_contract_by_name("rocketDAOProtocolSettingsNetwork") - node_share = solidity.to_float(await network_settings.functions.getNodeShare().call()) - voter_share = solidity.to_float(await network_settings.functions.getVoterShare().call()) - dao_share = solidity.to_float(await network_settings.functions.getProtocolDAOShare().call()) + network_settings = await rp.get_contract_by_name( + "rocketDAOProtocolSettingsNetwork" + ) + node_share = solidity.to_float( + await network_settings.functions.getNodeShare().call() + ) + voter_share = solidity.to_float( + await network_settings.functions.getVoterShare().call() + ) + dao_share = solidity.to_float( + await network_settings.functions.getProtocolDAOShare().call() + ) # Pending Megapool Validators: prestaked validators have deposit_value locked # (1 ETH on beacon + 31 ETH in contract as assignedValue) # in_queue validators are skipped — their ETH is in the Deposit Pool (already counted) - tmp = await (await self.bot.db.megapool_validators.aggregate([ - {'$match': {'status': 'prestaked'}}, - {'$count': 'count'} - ])).to_list(1) + tmp = await ( + await self.bot.db.megapool_validators.aggregate( + [{"$match": {"status": "prestaked"}}, {"$count": "count"}] + ) + ).to_list(1) if tmp: - data["Total ETH Locked"]["Megapool Stake"]["Pending Validators"]["_val"] = tmp[0]["count"] * 32 + data["Total ETH Locked"]["Megapool Stake"]["Pending Validators"]["_val"] = ( + tmp[0]["count"] * 32 + ) # Dissolved Megapool Validators: 1 ETH stuck on beacon chain, 31 ETH returned to DP - tmp = await (await self.bot.db.megapool_validators.aggregate([ - {'$match': {'status': 'dissolved'}}, - {'$group': {'_id': 'total', 'beacon_balance': {'$sum': '$beacon.balance'}}} - ])).to_list(1) + tmp = await ( + await self.bot.db.megapool_validators.aggregate( + [ + {"$match": {"status": "dissolved"}}, + { + "$group": { + "_id": "total", + "beacon_balance": {"$sum": "$beacon.balance"}, + } + }, + ] + ) + ).to_list(1) if tmp: - data["Total ETH Locked"]["Megapool Stake"]["Dissolved Validators"]["_val"] = tmp[0]["beacon_balance"] + data["Total ETH Locked"]["Megapool Stake"]["Dissolved Validators"][ + "_val" + ] = tmp[0]["beacon_balance"] # Staking, Locked & Exiting Megapool Validators: beacon balance split by capital ratio # locked = exit requested but not yet confirmed on beacon chain, treated as exiting megapool_validators = await self.bot.db.megapool_validators.find( - {'status': {'$in': ['staking', 'locked', 'exiting']}} + {"status": {"$in": ["staking", "locked", "exiting"]}} ).to_list(None) for v in megapool_validators: capital_ratio = v["requested_bond"] / 32 @@ -266,50 +301,89 @@ async def tvl(self, interaction: Interaction, show_all: bool = False): shortfall = 32 - base node_base = max(0, node_base - shortfall) reth_base = base - node_base - target = "Staking Validators" if (status == "staking") else "Exiting Validators" - data["Total ETH Locked"]["Megapool Stake"][target]["rETH Share"]["_val"] += reth_base - data["Total ETH Locked"]["Megapool Stake"][target]["Node Share"]["_val"] += node_base + target = ( + "Staking Validators" if (status == "staking") else "Exiting Validators" + ) + data["Total ETH Locked"]["Megapool Stake"][target]["rETH Share"][ + "_val" + ] += reth_base + data["Total ETH Locked"]["Megapool Stake"][target]["Node Share"][ + "_val" + ] += node_base # beacon chain rewards (anything over 32) if beacon_balance > 32: rewards = beacon_balance - 32 - split = megapool_split_rewards(rewards, capital_ratio, node_share, voter_share, dao_share) - data["Total ETH Locked"]["Undistributed Balances"]["Beacon Chain Rewards"]["Node Share"]["_val"] += split["node"] - data["Total ETH Locked"]["Undistributed Balances"]["Beacon Chain Rewards"]["rETH Share"]["_val"] += split["reth"] - data["Total ETH Locked"]["Undistributed Balances"]["Beacon Chain Rewards"]["Voter Share"]["_val"] += split["voter"] - data["Total ETH Locked"]["Undistributed Balances"]["Beacon Chain Rewards"]["DAO Share"]["_val"] += split["dao"] + split = megapool_split_rewards( + rewards, capital_ratio, node_share, voter_share, dao_share + ) + data["Total ETH Locked"]["Undistributed Balances"][ + "Beacon Chain Rewards" + ]["Node Share"]["_val"] += split["node"] + data["Total ETH Locked"]["Undistributed Balances"][ + "Beacon Chain Rewards" + ]["rETH Share"]["_val"] += split["reth"] + data["Total ETH Locked"]["Undistributed Balances"][ + "Beacon Chain Rewards" + ]["Voter Share"]["_val"] += split["voter"] + data["Total ETH Locked"]["Undistributed Balances"][ + "Beacon Chain Rewards" + ]["DAO Share"]["_val"] += split["dao"] # Megapool Contract Balances: eth_balance = assignedValue + refundValue + pendingRewards # assignedValue already counted in Queued Validators, so we split the rest: # refundValue (minus debt) → Node Share # pendingRewards → split by commission (node/rETH/voter/DAO) - megapool_balances = await (await self.bot.db.node_operators.aggregate([ - {'$match': {'megapool.deployed': True, 'megapool.eth_balance': {'$gt': 0}}}, - { - '$project': { - 'refund_value': '$megapool.refund_value', - 'debt': '$megapool.debt', - 'pending_rewards': '$megapool.pending_rewards', - 'node_bond': '$megapool.node_bond', - 'user_capital': '$megapool.user_capital', - } - } - ])).to_list() + megapool_balances = await ( + await self.bot.db.node_operators.aggregate( + [ + { + "$match": { + "megapool.deployed": True, + "megapool.eth_balance": {"$gt": 0}, + } + }, + { + "$project": { + "refund_value": "$megapool.refund_value", + "debt": "$megapool.debt", + "pending_rewards": "$megapool.pending_rewards", + "node_bond": "$megapool.node_bond", + "user_capital": "$megapool.user_capital", + } + }, + ] + ) + ).to_list() for mp in megapool_balances: refund_value = mp.get("refund_value", 0) debt_val = mp.get("debt", 0) pending_rewards = mp.get("pending_rewards", 0) # refundValue minus debt → Node Share node_refund = max(0, refund_value - debt_val) - data["Total ETH Locked"]["Undistributed Balances"]["Megapool Contract Balances"]["Node Share"]["_val"] += node_refund + data["Total ETH Locked"]["Undistributed Balances"][ + "Megapool Contract Balances" + ]["Node Share"]["_val"] += node_refund # pendingRewards → split by commission if pending_rewards > 0: total_capital = mp.get("node_bond", 0) + mp.get("user_capital", 0) - capital_ratio = mp.get("node_bond", 0) / total_capital if total_capital > 0 else 0 - split = megapool_split_rewards(pending_rewards, capital_ratio, node_share, voter_share, dao_share) - data["Total ETH Locked"]["Undistributed Balances"]["Megapool Contract Balances"]["Node Share"]["_val"] += split["node"] - data["Total ETH Locked"]["Undistributed Balances"]["Megapool Contract Balances"]["rETH Share"]["_val"] += split["reth"] - data["Total ETH Locked"]["Undistributed Balances"]["Megapool Contract Balances"]["Voter Share"]["_val"] += split["voter"] - data["Total ETH Locked"]["Undistributed Balances"]["Megapool Contract Balances"]["DAO Share"]["_val"] += split["dao"] + capital_ratio = ( + mp.get("node_bond", 0) / total_capital if total_capital > 0 else 0 + ) + split = megapool_split_rewards( + pending_rewards, capital_ratio, node_share, voter_share, dao_share + ) + data["Total ETH Locked"]["Undistributed Balances"][ + "Megapool Contract Balances" + ]["Node Share"]["_val"] += split["node"] + data["Total ETH Locked"]["Undistributed Balances"][ + "Megapool Contract Balances" + ]["rETH Share"]["_val"] += split["reth"] + data["Total ETH Locked"]["Undistributed Balances"][ + "Megapool Contract Balances" + ]["Voter Share"]["_val"] += split["voter"] + data["Total ETH Locked"]["Undistributed Balances"][ + "Megapool Contract Balances" + ]["DAO Share"]["_val"] += split["dao"] # Deposit Pool Balance: calls the contract and asks what its balance is, simple enough. # ETH in here has been swapped for rETH and is waiting to be matched with a minipool. @@ -319,111 +393,142 @@ async def tvl(self, interaction: Interaction, show_all: bool = False): # surpasses the configured targetCollateralRate, # which is 10% at the time of writing. Once this occurs the ETH gets moved # from the rETH contract to the Deposit Pool. - data["Total ETH Locked"]["rETH Collateral"]["Deposit Pool"]["_val"] = solidity.to_float( - await rp.call("rocketDepositPool.getBalance")) + data["Total ETH Locked"]["rETH Collateral"]["Deposit Pool"]["_val"] = ( + solidity.to_float(await rp.call("rocketDepositPool.getBalance")) + ) # Extra Collateral: This is ETH stored in the rETH contract from Minipools that have been withdrawn from. # This value has a cap - read the above comment for more information about that. - data["Total ETH Locked"]["rETH Collateral"]["Extra Collateral"]["_val"] = solidity.to_float( - await w3.eth.get_balance(await rp.get_address_by_name("rocketTokenRETH"))) + data["Total ETH Locked"]["rETH Collateral"]["Extra Collateral"]["_val"] = ( + solidity.to_float( + await w3.eth.get_balance( + await rp.get_address_by_name("rocketTokenRETH") + ) + ) + ) # Smoothing Pool Balance: This is ETH from Proposals by minipools that have joined the Smoothing Pool. - smoothie_balance = solidity.to_float(await w3.eth.get_balance(await rp.get_address_by_name("rocketSmoothingPool"))) - data["Total ETH Locked"]["Undistributed Balances"]["Smoothing Pool Balance"]["_val"] = smoothie_balance + smoothie_balance = solidity.to_float( + await w3.eth.get_balance( + await rp.get_address_by_name("rocketSmoothingPool") + ) + ) + data["Total ETH Locked"]["Undistributed Balances"]["Smoothing Pool Balance"][ + "_val" + ] = smoothie_balance # Unclaimed Smoothing Pool Rewards: This is ETH from the previous Reward Periods that have not been claimed yet. data["Total ETH Locked"]["Unclaimed Rewards"]["_val"] = solidity.to_float( - await rp.call("rocketVault.balanceOf", "rocketMerkleDistributorMainnet")) + await rp.call("rocketVault.balanceOf", "rocketMerkleDistributorMainnet") + ) # Staked RPL: This is all ETH that has been staked by node operators. data["Total RPL Locked"]["Staked RPL"]["Minipools"]["_val"] = solidity.to_float( - await rp.call("rocketNodeStaking.getTotalLegacyStakedRPL")) + await rp.call("rocketNodeStaking.getTotalLegacyStakedRPL") + ) data["Total RPL Locked"]["Staked RPL"]["Megapools"]["_val"] = solidity.to_float( - await rp.call("rocketNodeStaking.getTotalMegapoolStakedRPL")) + await rp.call("rocketNodeStaking.getTotalMegapoolStakedRPL") + ) # oDAO bonded RPL: RPL oDAO Members have to lock up to join it. This RPL can be slashed if they misbehave. data["Total RPL Locked"]["Staked RPL"]["oDAO Bond"]["_val"] = solidity.to_float( - await rp.call("rocketVault.balanceOfToken", "rocketDAONodeTrustedActions", rpl_address)) + await rp.call( + "rocketVault.balanceOfToken", "rocketDAONodeTrustedActions", rpl_address + ) + ) # Unclaimed RPL Rewards: RPL rewards that have been earned by Node Operators but have not been claimed yet. - data["Total RPL Locked"]["Unclaimed Rewards"]["Node Operators & oDAO"]["_val"] = solidity.to_float( - await rp.call("rocketVault.balanceOfToken", "rocketMerkleDistributorMainnet", rpl_address)) + data["Total RPL Locked"]["Unclaimed Rewards"]["Node Operators & oDAO"][ + "_val" + ] = solidity.to_float( + await rp.call( + "rocketVault.balanceOfToken", + "rocketMerkleDistributorMainnet", + rpl_address, + ) + ) # Undistributed pDAO Rewards: RPL rewards that have been earned by the pDAO but have not been distributed yet. - data["Total RPL Locked"]["Unclaimed Rewards"]["pDAO"]["_val"] = solidity.to_float( - await rp.call("rocketVault.balanceOfToken", "rocketClaimDAO", rpl_address)) + data["Total RPL Locked"]["Unclaimed Rewards"]["pDAO"]["_val"] = ( + solidity.to_float( + await rp.call( + "rocketVault.balanceOfToken", "rocketClaimDAO", rpl_address + ) + ) + ) # Unused Inflation: RPL that has been minted but not yet been used for rewards. # This is (or was) an issue as the snapshots didn't account for the last day of inflation. # Joe is already looking into this. data["Total RPL Locked"]["Unused Inflation"]["_val"] = solidity.to_float( - await rp.call("rocketVault.balanceOfToken", "rocketRewardsPool", rpl_address)) + await rp.call( + "rocketVault.balanceOfToken", "rocketRewardsPool", rpl_address + ) + ) # Slashed RPL: RPL that is slashed gets moved to the Auction Manager Contract. # This RPL will be sold using a Dutch Auction for ETH, which the gets moved to the rETH contract to be used as # extra rETH collateral. data["Total RPL Locked"]["Slashed RPL"]["_val"] = solidity.to_float( - await rp.call("rocketVault.balanceOfToken", "rocketAuctionManager", rpl_address)) + await rp.call( + "rocketVault.balanceOfToken", "rocketAuctionManager", rpl_address + ) + ) # create _value string for each branch. the _value is the sum of all _val or _val values in the children - tmp = await (await self.bot.db.node_operators.aggregate([ - { - '$match': { - 'fee_distributor.eth_balance': { - '$gt': 0 - } - } - }, { - '$project': { - 'fee_distributor.eth_balance': 1, - 'node_share' : { - '$sum': [ - '$effective_node_share', { - '$multiply': [ + tmp = await ( + await self.bot.db.node_operators.aggregate( + [ + {"$match": {"fee_distributor.eth_balance": {"$gt": 0}}}, + { + "$project": { + "fee_distributor.eth_balance": 1, + "node_share": { + "$sum": [ + "$effective_node_share", { - '$subtract': [ - 1, '$effective_node_share' + "$multiply": [ + {"$subtract": [1, "$effective_node_share"]}, + "$average_node_fee", ] - }, '$average_node_fee' + }, ] - } - ] - } - } - }, { - '$project': { - 'node_share': { - '$multiply': [ - '$fee_distributor.eth_balance', '$node_share' - ] + }, + } }, - 'reth_share': { - '$multiply': [ - '$fee_distributor.eth_balance', { - '$subtract': [ - 1, '$node_share' + { + "$project": { + "node_share": { + "$multiply": [ + "$fee_distributor.eth_balance", + "$node_share", ] - } - ] - } - } - }, { - '$group': { - '_id' : None, - 'node_share': { - '$sum': '$node_share' + }, + "reth_share": { + "$multiply": [ + "$fee_distributor.eth_balance", + {"$subtract": [1, "$node_share"]}, + ] + }, + } }, - 'reth_share': { - '$sum': '$reth_share' - } - } - } - ])).to_list() + { + "$group": { + "_id": None, + "node_share": {"$sum": "$node_share"}, + "reth_share": {"$sum": "$reth_share"}, + } + }, + ] + ) + ).to_list() if len(tmp) > 0: - data["Total ETH Locked"]["Undistributed Balances"]["Node Distributor Contracts"]["Node Share"]["_val"] = tmp[0][ - "node_share"] - data["Total ETH Locked"]["Undistributed Balances"]["Node Distributor Contracts"]["rETH Share"]["_val"] = tmp[0][ - "reth_share"] + data["Total ETH Locked"]["Undistributed Balances"][ + "Node Distributor Contracts" + ]["Node Share"]["_val"] = tmp[0]["node_share"] + data["Total ETH Locked"]["Undistributed Balances"][ + "Node Distributor Contracts" + ]["rETH Share"]["_val"] = tmp[0]["reth_share"] def set_val_of_branch(branch, unit): val = 0 @@ -445,13 +550,16 @@ def set_val_of_branch(branch, unit): set_val_of_branch(data["Total ETH Locked"], "ETH") set_val_of_branch(data["Total RPL Locked"], "RPL") # calculate total tvl - total_tvl = data["Total ETH Locked"]["_val"] + (data["Total RPL Locked"]["_val"] * rpl_price) + total_tvl = data["Total ETH Locked"]["_val"] + ( + data["Total RPL Locked"]["_val"] * rpl_price + ) usdc_total_tvl = total_tvl * eth_price data["_value"] = f"{total_tvl:,.2f} ETH" test = render_tree(data, "Total Locked Value", max_depth=0 if show_all else 2) # send embed with tvl closer = f"or about {Style.BRIGHT}{humanize.intword(usdc_total_tvl, format='%.3f')} USDC{Style.RESET_ALL}".rjust( - max([len(line) for line in test.split("\n")]) - 1) + max([len(line) for line in test.split("\n")]) - 1 + ) embed = Embed(title="Protocol TVL", description=f"```ansi\n{test}\n{closer}```") await interaction.followup.send(embed=embed) diff --git a/rocketwatch/plugins/user_distribute/user_distribute.py b/rocketwatch/plugins/user_distribute/user_distribute.py index 46a5ee59..b91b4939 100644 --- a/rocketwatch/plugins/user_distribute/user_distribute.py +++ b/rocketwatch/plugins/user_distribute/user_distribute.py @@ -20,7 +20,9 @@ class InstructionsView(ui.View): - def __init__(self, eligible: list[dict], distributable: list[dict], instruction_timeout: int): + def __init__( + self, eligible: list[dict], distributable: list[dict], instruction_timeout: int + ): super().__init__(timeout=instruction_timeout) self.eligible = eligible self.distributable = distributable @@ -28,8 +30,12 @@ def __init__(self, eligible: list[dict], distributable: list[dict], instruction_ @ui.button(label="Instructions", style=ButtonStyle.blurple) async def instructions(self, interaction: Interaction, _) -> None: mp_contract = await rp.assemble_contract("rocketMinipoolDelegate") - bud_calldata = bytes.fromhex(mp_contract.encodeABI(fn_name="beginUserDistribute")[2:]) - dist_calldata = bytes.fromhex(mp_contract.encodeABI(fn_name="distributeBalance", args=[False])[2:]) + bud_calldata = bytes.fromhex( + mp_contract.encodeABI(fn_name="beginUserDistribute")[2:] + ) + dist_calldata = bytes.fromhex( + mp_contract.encodeABI(fn_name="distributeBalance", args=[False])[2:] + ) calls = [(mp["address"], True, dist_calldata) for mp in self.distributable] calls += [(mp["address"], True, bud_calldata) for mp in self.eligible] @@ -41,7 +47,9 @@ async def instructions(self, interaction: Interaction, _) -> None: tuple_strs = [] for address, allow_failure, calldata in calls: - tuple_strs.append(f"[\"{address}\", {str(allow_failure).lower()}, 0x{calldata.hex()}]") + tuple_strs.append( + f'["{address}", {str(allow_failure).lower()}, 0x{calldata.hex()}]' + ) input_data = "[" + ",".join(tuple_strs) + "]" etherscan_url = f"https://etherscan.io/address/{multicall_contract.address}#writeContract#F2" @@ -57,9 +65,13 @@ async def instructions(self, interaction: Interaction, _) -> None: actions = [] if (count := len(self.distributable)) > 0: - actions.append(f"distribute the balance of **{count}** minipool{'s' if count != 1 else ''}") + actions.append( + f"distribute the balance of **{count}** minipool{'s' if count != 1 else ''}" + ) if (count := len(self.eligible)) > 0: - actions.append(f"begin the user distribution process for **{count}** minipool{'s' if count != 1 else ''}") + actions.append( + f"begin the user distribution process for **{count}** minipool{'s' if count != 1 else ''}" + ) embed.description += "\nThis will " + " and ".join(actions) + "." embed.description += f"\nEstimated cost: **{cost_eth:,.6f} ETH** ({gas_used:,} gas @ {(gas_price / 1e9):.2f} gwei)" @@ -67,7 +79,7 @@ async def instructions(self, interaction: Interaction, _) -> None: await interaction.response.send_message( embed=embed, file=discord.File(StringIO(input_data), filename="input_data.txt"), - ephemeral=True + ephemeral=True, ) @@ -100,7 +112,12 @@ async def task(self): f"The next window closes !" ) - await channel.send(embed=embed, view=InstructionsView([], distributable[:100], instruction_timeout=(4 * 3600))) + await channel.send( + embed=embed, + view=InstructionsView( + [], distributable[:100], instruction_timeout=(4 * 3600) + ), + ) @task.before_loop async def before_task(self): @@ -115,20 +132,30 @@ async def _fetch_minipools(self) -> tuple[list[dict], list[dict], list[dict]]: current_epoch = int(head["data"]["header"]["message"]["slot"]) // 32 threshold_epoch = current_epoch - 5000 - minipools = await self.bot.db.minipools.find({ - "user_distributed": False, - "status": "staking", - "execution_balance": {"$gte": 8}, - "beacon.withdrawable_epoch": {"$lt": threshold_epoch} - }).sort("beacon.withdrawable_epoch", ASCENDING).to_list() + minipools = ( + await self.bot.db.minipools.find( + { + "user_distributed": False, + "status": "staking", + "execution_balance": {"$gte": 8}, + "beacon.withdrawable_epoch": {"$lt": threshold_epoch}, + } + ) + .sort("beacon.withdrawable_epoch", ASCENDING) + .to_list() + ) eligible = [] pending = [] distributable = [] current_time = int(time.time()) - ud_window_start = await rp.call("rocketDAOProtocolSettingsMinipool.getUserDistributeWindowStart") - ud_window_end = ud_window_start + await rp.call("rocketDAOProtocolSettingsMinipool.getUserDistributeWindowLength") + ud_window_start = await rp.call( + "rocketDAOProtocolSettingsMinipool.getUserDistributeWindowStart" + ) + ud_window_end = ud_window_start + await rp.call( + "rocketDAOProtocolSettingsMinipool.getUserDistributeWindowLength" + ) for mp in minipools: mp["address"] = w3.to_checksum_address(mp["address"]) @@ -142,7 +169,9 @@ async def _fetch_minipools(self) -> tuple[list[dict], list[dict], list[dict]]: mp["ud_window_open"] = user_distribute_time + ud_window_start pending.append(mp) # double check, DB may lag behind - elif not await rp.call("rocketMinipoolDelegate.getUserDistributed", address=mp["address"]): + elif not await rp.call( + "rocketMinipoolDelegate.getUserDistributed", address=mp["address"] + ): mp["ud_window_close"] = user_distribute_time + ud_window_end distributable.append(mp) @@ -163,7 +192,7 @@ async def user_distribute_status(self, interaction: Interaction): embed.add_field( name="Eligible", value=f"**{len(eligible)}** minipool{'s' if len(eligible) != 1 else ''}", - inline=False + inline=False, ) if pending: @@ -174,7 +203,7 @@ async def user_distribute_status(self, interaction: Interaction): f"**{len(pending)}** minipool{'s' if len(pending) != 1 else ''}" f" · next window opens " ), - inline=False + inline=False, ) else: embed.add_field(name="Pending", value="**0** minipools", inline=False) @@ -187,7 +216,7 @@ async def user_distribute_status(self, interaction: Interaction): f"**{len(distributable)}** minipool{'s' if len(distributable) != 1 else ''}" f" · next window closes " ), - inline=False + inline=False, ) else: embed.add_field(name="Distributable", value="**0** minipools", inline=False) @@ -195,7 +224,10 @@ async def user_distribute_status(self, interaction: Interaction): if eligible or distributable: # limit the number of distributions to not run out of gas await interaction.followup.send( - embed=embed, view=InstructionsView(eligible[:50], distributable[:100], instruction_timeout=300) + embed=embed, + view=InstructionsView( + eligible[:50], distributable[:100], instruction_timeout=300 + ), ) else: await interaction.followup.send(embed=embed) diff --git a/rocketwatch/plugins/validator_states/validator_states.py b/rocketwatch/plugins/validator_states/validator_states.py index 9d4c38d3..7c26a527 100644 --- a/rocketwatch/plugins/validator_states/validator_states.py +++ b/rocketwatch/plugins/validator_states/validator_states.py @@ -13,7 +13,11 @@ log = logging.getLogger("rocketwatch.validator_states") -_BEACON_PENDING = {"in_queue": "unassigned", "prestaked": "prestaked", "staking": "staked"} +_BEACON_PENDING = { + "in_queue": "unassigned", + "prestaked": "prestaked", + "staking": "staked", +} def _classify_beacon_validator(beacon, contract_status): @@ -51,7 +55,7 @@ def _empty_state_tree(): "exiting": {}, "exited": {}, "withdrawn": {}, - "closed": {} + "closed": {}, } @@ -118,7 +122,13 @@ async def validator_states(self, interaction: Interaction): minipools = await self.bot.db.minipools.find( {"beacon.status": {"$exists": True}}, - {"beacon": 1, "status": 1, "finalized": 1, "node_operator": 1, "validator_index": 1} + { + "beacon": 1, + "status": 1, + "finalized": 1, + "node_operator": 1, + "validator_index": 1, + }, ).to_list(None) megapool_vals = await self.bot.db.megapool_validators.find( {}, {"beacon": 1, "status": 1, "node_operator": 1, "validator_index": 1} @@ -136,7 +146,7 @@ async def validator_states(self, interaction: Interaction): "megapools": _collapse_tree(mg_data), } - embed = Embed(title="Validator States", color=0x00ff00) + embed = Embed(title="Validator States", color=0x00FF00) description = "```\n" description += render_tree_legacy(tree, "Validators") @@ -172,35 +182,50 @@ async def validator_states(self, interaction: Interaction): exiting_node_operators, withdrawn_node_operators = node_operators max_total_list_length = 16 - if len(exiting_node_operators) + len(withdrawn_node_operators) <= max_total_list_length: + if ( + len(exiting_node_operators) + len(withdrawn_node_operators) + <= max_total_list_length + ): num_exiting = len(exiting_node_operators) num_withdrawn = len(withdrawn_node_operators) elif len(exiting_node_operators) >= len(withdrawn_node_operators): - num_withdrawn = min(len(withdrawn_node_operators), max_total_list_length // 2) + num_withdrawn = min( + len(withdrawn_node_operators), max_total_list_length // 2 + ) num_exiting = max_total_list_length - num_withdrawn else: - num_exiting = min(len(exiting_node_operators), max_total_list_length // 2) + num_exiting = min( + len(exiting_node_operators), max_total_list_length // 2 + ) num_withdrawn = max_total_list_length - num_exiting if num_exiting > 0: description += "\n**Exiting Node Operators**\n" - description += ", ".join([ - f"{await el_explorer_url(w3.to_checksum_address(v))} ({c})" - for v, c in exiting_node_operators[:num_exiting] - ]) + description += ", ".join( + [ + f"{await el_explorer_url(w3.to_checksum_address(v))} ({c})" + for v, c in exiting_node_operators[:num_exiting] + ] + ) if remaining_no := exiting_node_operators[num_exiting:]: num_remaining_valis = sum([c for _, c in remaining_no]) - description += f", and {len(remaining_no)} more ({num_remaining_valis})" + description += ( + f", and {len(remaining_no)} more ({num_remaining_valis})" + ) description += "\n" if num_withdrawn > 0: description += "\n**Withdrawn Node Operators**\n" - description += ", ".join([ - f"{await el_explorer_url(w3.to_checksum_address(v))} ({c})" - for v, c in withdrawn_node_operators[:num_withdrawn] - ]) + description += ", ".join( + [ + f"{await el_explorer_url(w3.to_checksum_address(v))} ({c})" + for v, c in withdrawn_node_operators[:num_withdrawn] + ] + ) if remaining_no := withdrawn_node_operators[num_withdrawn:]: num_remaining_valis = sum([c for _, c in remaining_no]) - description += f", and {len(remaining_no)} more ({num_remaining_valis})" + description += ( + f", and {len(remaining_no)} more ({num_remaining_valis})" + ) description += "\n" embed.description = description diff --git a/rocketwatch/plugins/wall/wall.py b/rocketwatch/plugins/wall/wall.py index aeb69cb0..77d0e6f7 100644 --- a/rocketwatch/plugins/wall/wall.py +++ b/rocketwatch/plugins/wall/wall.py @@ -76,23 +76,35 @@ def __init__(self, bot: RocketWatch): async def _get_dex(self) -> set[DEX]: if self.dex is None: self.dex = { - BalancerV2([ - await BalancerV2.WeightedPool.create( - HexStr("0x9f9d900462492d4c21e9523ca95a7cd86142f298000200000000000000000462") - ) - ]), - await UniswapV3.create([ - cast(ChecksumAddress, "0xe42318eA3b998e8355a3Da364EB9D48eC725Eb45"), - cast(ChecksumAddress, "0xcf15aD9bE9d33384B74b94D63D06B4A9Bd82f640") - ]) + BalancerV2( + [ + await BalancerV2.WeightedPool.create( + HexStr( + "0x9f9d900462492d4c21e9523ca95a7cd86142f298000200000000000000000462" + ) + ) + ] + ), + await UniswapV3.create( + [ + cast( + ChecksumAddress, + "0xe42318eA3b998e8355a3Da364EB9D48eC725Eb45", + ), + cast( + ChecksumAddress, + "0xcf15aD9bE9d33384B74b94D63D06B4A9Bd82f640", + ), + ] + ), } return self.dex @staticmethod def _get_market_depth_and_liquidity( - markets: dict[Market | DEX.LiquidityPool, Liquidity], - x: np.ndarray, - rpl_usd: float + markets: dict[Market | DEX.LiquidityPool, Liquidity], + x: np.ndarray, + rpl_usd: float, ) -> tuple[np.ndarray, float]: depth = np.zeros_like(x) liquidity = 0 @@ -100,41 +112,57 @@ def _get_market_depth_and_liquidity( for liq in markets.values(): conv = liq.price / rpl_usd depth += np.array(list(map(liq.depth_at, x * conv))) / conv - liquidity += (liq.depth_at(float(x[0] * conv)) + liq.depth_at(float(x[-1] * conv))) / conv + liquidity += ( + liq.depth_at(float(x[0] * conv)) + liq.depth_at(float(x[-1] * conv)) + ) / conv return depth, liquidity @timerun_async - async def _get_cex_data(self, x: np.ndarray, rpl_usd: float) -> OrderedDict[CEX, np.ndarray]: + async def _get_cex_data( + self, x: np.ndarray, rpl_usd: float + ) -> OrderedDict[CEX, np.ndarray]: depth: dict[CEX, np.ndarray] = {} liquidity: dict[CEX, float] = {} async with aiohttp.ClientSession() as session: requests = [cex.get_liquidity(session) for cex in self.cex] - for result in zip(self.cex, await asyncio.gather(*requests, return_exceptions=True), strict=False): + for result in zip( + self.cex, + await asyncio.gather(*requests, return_exceptions=True), + strict=False, + ): if not isinstance(result, Exception): cex, markets = result - depth[cex], liquidity[cex] = self._get_market_depth_and_liquidity(markets, x, rpl_usd) + depth[cex], liquidity[cex] = self._get_market_depth_and_liquidity( + markets, x, rpl_usd + ) else: log.error(f"Failed to get liquidity data for {cex}") await self.bot.report_error(result) - return OrderedDict(sorted(depth.items(), key=lambda e: liquidity[e[0]], reverse=True)) + return OrderedDict( + sorted(depth.items(), key=lambda e: liquidity[e[0]], reverse=True) + ) @timerun - async def _get_dex_data(self, x: np.ndarray, rpl_usd: float) -> OrderedDict[DEX, np.ndarray]: + async def _get_dex_data( + self, x: np.ndarray, rpl_usd: float + ) -> OrderedDict[DEX, np.ndarray]: depth: dict[DEX, np.ndarray] = {} liquidity: dict[DEX, float] = {} for dex in await self._get_dex(): if pools := await dex.get_liquidity(): - depth[dex], liquidity[dex] = self._get_market_depth_and_liquidity(pools, x, rpl_usd) + depth[dex], liquidity[dex] = self._get_market_depth_and_liquidity( + pools, x, rpl_usd + ) - return OrderedDict(sorted(depth.items(), key=lambda e: liquidity[e[0]], reverse=True)) + return OrderedDict( + sorted(depth.items(), key=lambda e: liquidity[e[0]], reverse=True) + ) @staticmethod def _label_exchange_data( - data: OrderedDict[Exchange, np.ndarray], - max_unique: int, - color_other: str + data: OrderedDict[Exchange, np.ndarray], max_unique: int, color_other: str ) -> list[tuple[np.ndarray, str, str]]: ret = [] for exchange, depth in list(data.items())[:max_unique]: @@ -148,11 +176,11 @@ def _label_exchange_data( @staticmethod def _plot_data( - x: np.ndarray, - rpl_usd: float, - rpl_eth: float, - cex_data: OrderedDict[CEX, np.ndarray], - dex_data: OrderedDict[DEX, np.ndarray], + x: np.ndarray, + rpl_usd: float, + rpl_eth: float, + cex_data: OrderedDict[CEX, np.ndarray], + dex_data: OrderedDict[DEX, np.ndarray], ) -> figure.Figure: fig, ax = plt.subplots(figsize=(10, 5)) @@ -174,13 +202,17 @@ def _plot_data( dex_data_aggr = Wall._label_exchange_data(dex_data, max_unique, "#777777") y_offset = 0.0 - max_label_length: int = np.max([len(t[1]) for t in (cex_data_aggr + dex_data_aggr)]) + max_label_length: int = np.max( + [len(t[1]) for t in (cex_data_aggr + dex_data_aggr)] + ) - def add_data(_data: list[tuple[np.ndarray, str, str]], _name: str | None) -> None: + def add_data( + _data: list[tuple[np.ndarray, str, str]], _name: str | None + ) -> None: labels, handles = [], [] for y_values, label, color in _data: y.append(y_values) - labels.append(f"{label:\u00A0<{max_label_length}}") + labels.append(f"{label:\u00a0<{max_label_length}}") colors.append(color) handles.append(plt.Rectangle((0, 0), 1, 1, color=color)) @@ -191,7 +223,7 @@ def add_data(_data: list[tuple[np.ndarray, str, str]], _name: str | None) -> Non title=_name, loc="upper left", bbox_to_anchor=(0, 1 - y_offset), - prop=fm.FontProperties(family="monospace", size=10) + prop=fm.FontProperties(family="monospace", size=10), ) ax.add_artist(legend) y_offset += 0.025 + 0.055 * (len(_data) + int(_name is not None)) @@ -204,16 +236,14 @@ def add_data(_data: list[tuple[np.ndarray, str, str]], _name: str | None) -> Non else: add_data(cex_data_aggr, None) - ax.stackplot(x, np.array(y[::-1]), colors=colors[::-1], edgecolor="black", linewidth=0.3) + ax.stackplot( + x, np.array(y[::-1]), colors=colors[::-1], edgecolor="black", linewidth=0.3 + ) ax.axvline(rpl_usd, color="black", linestyle="--", linewidth=1) def get_formatter(base_fmt: str, *, scale=1.0, prefix="", suffix=""): def formatter(_x, _pos) -> str: - levels = [ - (1_000_000_000, "B"), - (1_000_000, "M"), - (1_000, "K") - ] + levels = [(1_000_000_000, "B"), (1_000_000, "M"), (1_000, "K")] modifier = "" base_value = _x * scale @@ -223,28 +253,41 @@ def formatter(_x, _pos) -> str: base_value /= m break - return prefix + f"{base_value:{base_fmt}}".rstrip(".") + modifier + suffix + return ( + prefix + f"{base_value:{base_fmt}}".rstrip(".") + modifier + suffix + ) + return ticker.FuncFormatter(formatter) range_size = x[-1] - x[0] x_ticks = ax.get_xticks() - ax.set_xticks([t for t in x_ticks if abs(t - rpl_usd) >= range_size / 20] + [rpl_usd]) + ax.set_xticks( + [t for t in x_ticks if abs(t - rpl_usd) >= range_size / 20] + [rpl_usd] + ) ax.set_xlim((x[0], x[-1])) - ax.xaxis.set_major_formatter(get_formatter(".2f" if (range_size >= 0.1) else ".3f", prefix="$")) + ax.xaxis.set_major_formatter( + get_formatter(".2f" if (range_size >= 0.1) else ".3f", prefix="$") + ) ax.yaxis.set_major_formatter(get_formatter("#.3g", prefix="$")) ax_top = ax.twiny() ax_top.minorticks_on() - ax_top.set_xticks([t for t in x_ticks if abs(t - rpl_usd) >= range_size / 10] + [rpl_usd]) + ax_top.set_xticks( + [t for t in x_ticks if abs(t - rpl_usd) >= range_size / 10] + [rpl_usd] + ) ax_top.set_xlim(ax.get_xlim()) - ax_top.xaxis.set_major_formatter(get_formatter(".5f", prefix="Ξ ", scale=(rpl_eth / rpl_usd))) + ax_top.xaxis.set_major_formatter( + get_formatter(".5f", prefix="Ξ ", scale=(rpl_eth / rpl_usd)) + ) ax_right = ax.twinx() ax_right.minorticks_on() ax_right.set_yticks(ax.get_yticks()) ax_right.set_ylim(ax.get_ylim()) - ax_right.yaxis.set_major_formatter(get_formatter("#.3g", prefix="Ξ ", scale=(rpl_eth / rpl_usd))) + ax_right.yaxis.set_major_formatter( + get_formatter("#.3g", prefix="Ξ ", scale=(rpl_eth / rpl_usd)) + ) return fig @@ -253,25 +296,31 @@ def formatter(_x, _pos) -> str: @describe(max_price="upper end of price range in USD") @describe(sources="choose places to pull liquidity data from") async def wall( - self, - interaction: Interaction, - min_price: float = 0.0, - max_price: float | None = None, - sources: Literal["All", "CEX", "DEX"] = "All" + self, + interaction: Interaction, + min_price: float = 0.0, + max_price: float | None = None, + sources: Literal["All", "CEX", "DEX"] = "All", ) -> None: """Show the current RPL market depth across exchanges""" await interaction.response.defer(ephemeral=is_hidden(interaction)) embed = Embed(title="RPL Market Depth") async def on_fail() -> None: - embed.set_image(url="https://media1.giphy.com/media/hEc4k5pN17GZq/giphy.gif") + embed.set_image( + url="https://media1.giphy.com/media/hEc4k5pN17GZq/giphy.gif" + ) await interaction.followup.send(embed=embed) return None try: async with aiohttp.ClientSession() as session: # use Binance as USD price oracle - rpl_usd = next(iter((await Binance("RPL", ["USDT"]).get_liquidity(session)).values())).price + rpl_usd = next( + iter( + (await Binance("RPL", ["USDT"]).get_liquidity(session)).values() + ) + ).price eth_usd = await rp.get_eth_usdc_price() rpl_eth = rpl_usd / eth_usd except Exception as e: @@ -320,7 +369,10 @@ async def on_fail() -> None: embed.set_author(name="🔗 Data from CEX APIs and Mainnet") embed.add_field(name="Current Price", value=f"${rpl_usd:,.2f} | Ξ{rpl_eth:.5f}") - embed.add_field(name="Observed Liquidity", value=f"${liquidity_usd:,.0f} | Ξ{liquidity_eth:,.0f}") + embed.add_field( + name="Observed Liquidity", + value=f"${liquidity_usd:,.0f} | Ξ{liquidity_eth:,.0f}", + ) embed.add_field(name="Sources", value=", ".join(source_desc)) file_name = "wall.png" diff --git a/rocketwatch/strings.py b/rocketwatch/strings.py index 4c8097f4..0ca2ddbf 100644 --- a/rocketwatch/strings.py +++ b/rocketwatch/strings.py @@ -1,7 +1,7 @@ import i18n -i18n.load_path.append('./strings/') -i18n.set('skip_locale_root_data', True) -i18n.set('error_on_missing_translation', False) -i18n.set('file_format', 'json') +i18n.load_path.append("./strings/") +i18n.set("skip_locale_root_data", True) +i18n.set("error_on_missing_translation", False) +i18n.set("file_format", "json") _ = i18n.t diff --git a/rocketwatch/utils/block_time.py b/rocketwatch/utils/block_time.py index 23dc38e7..84217647 100644 --- a/rocketwatch/utils/block_time.py +++ b/rocketwatch/utils/block_time.py @@ -37,7 +37,9 @@ async def ts_to_block(target_ts: int) -> int: # l == r, highest block number below target block = hi - if abs(await block_to_ts(block + 1) - target_ts) < abs(await block_to_ts(block) - target_ts): + if abs(await block_to_ts(block + 1) - target_ts) < abs( + await block_to_ts(block) - target_ts + ): block += 1 log.debug(f"Closest match: block {block} @ {await block_to_ts(block)}") diff --git a/rocketwatch/utils/dao.py b/rocketwatch/utils/dao.py index 80dfa08a..08418daf 100644 --- a/rocketwatch/utils/dao.py +++ b/rocketwatch/utils/dao.py @@ -28,7 +28,9 @@ async def _get_contract(self): async def _get_proposal_contract(self): if self._proposal_contract is None: - self._proposal_contract = await rp.get_contract_by_name(self._proposal_contract_name) + self._proposal_contract = await rp.get_contract_by_name( + self._proposal_contract_name + ) return self._proposal_contract @dataclass(frozen=True, slots=True) @@ -51,16 +53,16 @@ def _build_vote_graph(self, proposal: Proposal) -> str: def sanitize(message: str) -> str: max_length = 150 if len(message) > max_length: - message = message[:(max_length - 1)] + "…" + message = message[: (max_length - 1)] + "…" return message async def build_proposal_body( - self, - proposal: Proposal, - *, - include_proposer=True, - include_payload=True, - include_votes=True + self, + proposal: Proposal, + *, + include_proposer=True, + include_payload=True, + include_votes=True, ) -> str: body_repr = f"Description:\n{self.sanitize(proposal.message)}" @@ -78,7 +80,9 @@ async def build_proposal_body( except Exception: # if this goes wrong, just use the raw payload log.exception("Failed to decode proposal payload") - body_repr += f"\n\nRaw Payload (failed to decode):\n{proposal.payload.hex()}" + body_repr += ( + f"\n\nRaw Payload (failed to decode):\n{proposal.payload.hex()}" + ) if include_votes: body_repr += f"\n\nVotes:\n{self._build_vote_graph(proposal)}" @@ -87,7 +91,12 @@ async def build_proposal_body( class DefaultDAO(DAO): - def __init__(self, contract_name: Literal["rocketDAONodeTrustedProposals", "rocketDAOSecurityProposals"]): + def __init__( + self, + contract_name: Literal[ + "rocketDAONodeTrustedProposals", "rocketDAOSecurityProposals" + ], + ): if contract_name == "rocketDAONodeTrustedProposals": self.display_name = "oDAO" elif contract_name == "rocketDAOSecurityProposals": @@ -117,36 +126,60 @@ class Proposal(DAO.Proposal): async def get_proposal_ids_by_state(self) -> dict[ProposalState, list[int]]: proposal_contract = await self._get_proposal_contract() num_proposals = await proposal_contract.functions.getTotal().call() - proposal_dao_names = await rp.multicall([ - proposal_contract.functions.getDAO(proposal_id) for proposal_id in range(1, num_proposals + 1) - ]) + proposal_dao_names = await rp.multicall( + [ + proposal_contract.functions.getDAO(proposal_id) + for proposal_id in range(1, num_proposals + 1) + ] + ) - relevant_proposals = [(i + 1) for (i, dao_name) in enumerate(proposal_dao_names) if (dao_name == self.contract_name)] - proposal_states = await rp.multicall([ - proposal_contract.functions.getState(proposal_id) for proposal_id in relevant_proposals - ]) + relevant_proposals = [ + (i + 1) + for (i, dao_name) in enumerate(proposal_dao_names) + if (dao_name == self.contract_name) + ] + proposal_states = await rp.multicall( + [ + proposal_contract.functions.getState(proposal_id) + for proposal_id in relevant_proposals + ] + ) proposals = {state: [] for state in DefaultDAO.ProposalState} - for proposal_id, state in zip(relevant_proposals, proposal_states, strict=False): + for proposal_id, state in zip( + relevant_proposals, proposal_states, strict=False + ): proposals[state].append(proposal_id) return proposals async def fetch_proposal(self, proposal_id: int) -> Proposal: proposal_contract = await self._get_proposal_contract() - (proposer, message, payload, created, start, end, expires, - votes_for_raw, votes_against_raw, votes_required_raw) = await rp.multicall([ - proposal_contract.functions.getProposer(proposal_id), - proposal_contract.functions.getMessage(proposal_id), - proposal_contract.functions.getPayload(proposal_id), - proposal_contract.functions.getCreated(proposal_id), - proposal_contract.functions.getStart(proposal_id), - proposal_contract.functions.getEnd(proposal_id), - proposal_contract.functions.getExpires(proposal_id), - proposal_contract.functions.getVotesFor(proposal_id), - proposal_contract.functions.getVotesAgainst(proposal_id), - proposal_contract.functions.getVotesRequired(proposal_id) - ]) + ( + proposer, + message, + payload, + created, + start, + end, + expires, + votes_for_raw, + votes_against_raw, + votes_required_raw, + ) = await rp.multicall( + [ + proposal_contract.functions.getProposer(proposal_id), + proposal_contract.functions.getMessage(proposal_id), + proposal_contract.functions.getPayload(proposal_id), + proposal_contract.functions.getCreated(proposal_id), + proposal_contract.functions.getStart(proposal_id), + proposal_contract.functions.getEnd(proposal_id), + proposal_contract.functions.getExpires(proposal_id), + proposal_contract.functions.getVotesFor(proposal_id), + proposal_contract.functions.getVotesAgainst(proposal_id), + proposal_contract.functions.getVotesRequired(proposal_id), + ] + ) return DefaultDAO.Proposal( id=proposal_id, proposer=cast(ChecksumAddress, proposer), @@ -158,7 +191,7 @@ async def fetch_proposal(self, proposal_id: int) -> Proposal: expires=expires, votes_for=solidity.to_int(votes_for_raw), votes_against=solidity.to_int(votes_against_raw), - votes_required=solidity.to_float(votes_required_raw) + votes_required=solidity.to_float(votes_required_raw), ) def _build_vote_graph(self, proposal: Proposal) -> str: @@ -170,13 +203,13 @@ def _build_vote_graph(self, proposal: Proposal) -> str: graph.barh( [votes_for, votes_against, max([votes_for, votes_against, votes_required])], ["For", "Against", ""], - max_width=12 + max_width=12, ) graph_bars = graph.get_string().split("\n") quorum_perc = max(votes_for, votes_against) / votes_required return ( - f"{graph_bars[0] : <{len(graph_bars[2])}}{'▏' if votes_for >= votes_against else ''}\n" - f"{graph_bars[1] : <{len(graph_bars[2])}}{'▏' if votes_for <= votes_against else ''}\n" + f"{graph_bars[0]: <{len(graph_bars[2])}}{'▏' if votes_for >= votes_against else ''}\n" + f"{graph_bars[1]: <{len(graph_bars[2])}}{'▏' if votes_for <= votes_against else ''}\n" f"Quorum: {quorum_perc:.0%}{' ✔' if (quorum_perc >= 1) else ''}" ) @@ -227,9 +260,12 @@ def votes_total(self): async def get_proposal_ids_by_state(self) -> dict[ProposalState, list[int]]: proposal_contract = await self._get_proposal_contract() num_proposals = await proposal_contract.functions.getTotal().call() - proposal_states = await rp.multicall([ - proposal_contract.functions.getState(proposal_id) for proposal_id in range(1, num_proposals + 1) - ]) + proposal_states = await rp.multicall( + [ + proposal_contract.functions.getState(proposal_id) + for proposal_id in range(1, num_proposals + 1) + ] + ) proposals = {state: [] for state in ProtocolDAO.ProposalState} for proposal_id in range(1, num_proposals + 1): @@ -240,24 +276,39 @@ async def get_proposal_ids_by_state(self) -> dict[ProposalState, list[int]]: async def fetch_proposal(self, proposal_id: int) -> Proposal: proposal_contract = await self._get_proposal_contract() - (proposer, message, payload, created, start, phase1_end, phase2_end, - expires, vp_for_raw, vp_against_raw, vp_veto_raw, vp_abstain_raw, - vp_required_raw, veto_quorum_raw) = await rp.multicall([ - proposal_contract.functions.getProposer(proposal_id), - proposal_contract.functions.getMessage(proposal_id), - proposal_contract.functions.getPayload(proposal_id), - proposal_contract.functions.getCreated(proposal_id), - proposal_contract.functions.getStart(proposal_id), - proposal_contract.functions.getPhase1End(proposal_id), - proposal_contract.functions.getPhase2End(proposal_id), - proposal_contract.functions.getExpires(proposal_id), - proposal_contract.functions.getVotingPowerFor(proposal_id), - proposal_contract.functions.getVotingPowerAgainst(proposal_id), - proposal_contract.functions.getVotingPowerVeto(proposal_id), - proposal_contract.functions.getVotingPowerAbstained(proposal_id), - proposal_contract.functions.getVotingPowerRequired(proposal_id), - proposal_contract.functions.getVetoQuorum(proposal_id) - ]) + ( + proposer, + message, + payload, + created, + start, + phase1_end, + phase2_end, + expires, + vp_for_raw, + vp_against_raw, + vp_veto_raw, + vp_abstain_raw, + vp_required_raw, + veto_quorum_raw, + ) = await rp.multicall( + [ + proposal_contract.functions.getProposer(proposal_id), + proposal_contract.functions.getMessage(proposal_id), + proposal_contract.functions.getPayload(proposal_id), + proposal_contract.functions.getCreated(proposal_id), + proposal_contract.functions.getStart(proposal_id), + proposal_contract.functions.getPhase1End(proposal_id), + proposal_contract.functions.getPhase2End(proposal_id), + proposal_contract.functions.getExpires(proposal_id), + proposal_contract.functions.getVotingPowerFor(proposal_id), + proposal_contract.functions.getVotingPowerAgainst(proposal_id), + proposal_contract.functions.getVotingPowerVeto(proposal_id), + proposal_contract.functions.getVotingPowerAbstained(proposal_id), + proposal_contract.functions.getVotingPowerRequired(proposal_id), + proposal_contract.functions.getVetoQuorum(proposal_id), + ] + ) return ProtocolDAO.Proposal( id=proposal_id, proposer=cast(ChecksumAddress, proposer), @@ -273,7 +324,7 @@ async def fetch_proposal(self, proposal_id: int) -> Proposal: votes_veto=solidity.to_float(vp_veto_raw), votes_abstain=solidity.to_float(vp_abstain_raw), quorum=solidity.to_float(vp_required_raw), - veto_quorum=solidity.to_float(veto_quorum_raw) + veto_quorum=solidity.to_float(veto_quorum_raw), ) def _build_vote_graph(self, proposal: Proposal) -> str: @@ -283,15 +334,17 @@ def _build_vote_graph(self, proposal: Proposal) -> str: round(proposal.votes_for), round(proposal.votes_against), round(proposal.votes_abstain), - round(max(proposal.votes_total, proposal.quorum)) + round(max(proposal.votes_total, proposal.quorum)), ], ["For", "Against", "Abstain", ""], - max_width=12 + max_width=12, ) main_quorum_perc = proposal.votes_total / proposal.quorum lines = graph.get_string().split("\n")[:-1] - lines.append(f"Quorum: {main_quorum_perc:.2%}{' ✔' if (main_quorum_perc >= 1) else ''}") + lines.append( + f"Quorum: {main_quorum_perc:.2%}{' ✔' if (main_quorum_perc >= 1) else ''}" + ) if proposal.votes_veto > 0: graph = tpl.figure() @@ -300,14 +353,16 @@ def _build_vote_graph(self, proposal: Proposal) -> str: round(proposal.votes_veto), round(max(proposal.votes_veto, proposal.veto_quorum)), ], - [f"{'Veto' : <{len('Against')}}", ""], - max_width=12 + [f"{'Veto': <{len('Against')}}", ""], + max_width=12, ) veto_graph_bars = graph.get_string().split("\n") veto_quorum_perc = proposal.votes_veto / proposal.veto_quorum lines.append("") - lines.append(f"{veto_graph_bars[0] : <{len(veto_graph_bars[1])}}▏") - lines.append(f"Quorum: {veto_quorum_perc:.2%}{' ✔' if (veto_quorum_perc >= 1) else ''}") + lines.append(f"{veto_graph_bars[0]: <{len(veto_graph_bars[1])}}▏") + lines.append( + f"Quorum: {veto_quorum_perc:.2%}{' ✔' if (veto_quorum_perc >= 1) else ''}" + ) return "\n".join(lines) diff --git a/rocketwatch/utils/embeds.py b/rocketwatch/utils/embeds.py index 3a52cbfc..6b87581b 100644 --- a/rocketwatch/utils/embeds.py +++ b/rocketwatch/utils/embeds.py @@ -84,7 +84,10 @@ async def resolve_ens(interaction, node_address): async def get_pdao_delegates() -> dict[str, str]: global _pdao_delegates try: - async with aiohttp.ClientSession() as session, session.get("https://delegates.rocketpool.net/api/delegates") as resp: + async with ( + aiohttp.ClientSession() as session, + session.get("https://delegates.rocketpool.net/api/delegates") as resp, + ): _pdao_delegates = {d["nodeAddress"]: d["name"] for d in await resp.json()} except Exception: log.warning("Failed to fetch pDAO delegates.") @@ -92,11 +95,11 @@ async def get_pdao_delegates() -> dict[str, str]: async def el_explorer_url( - target: str, - name: str = "", - prefix: str | Literal[-1] = "", - name_fmt: Callable[[str], str] | None = None, - block="latest" + target: str, + name: str = "", + prefix: str | Literal[-1] = "", + name_fmt: Callable[[str], str] | None = None, + block="latest", ): if w3.is_address(target): # sanitize address @@ -107,7 +110,9 @@ async def el_explorer_url( dashboard_network = "" if (chain == "mainnet") else f"?network={chain}" if await rp.is_node(target): - megapool_address = await rp.call("rocketNodeManager.getMegapoolAddress", target) + megapool_address = await rp.call( + "rocketNodeManager.getMegapoolAddress", target + ) if megapool_address != "0x0000000000000000000000000000000000000000": url = f"https://saturn-1.net/megapool/{megapool_address}{dashboard_network}" @@ -121,15 +126,25 @@ async def el_explorer_url( if not name and (n := _(n_key)) != n_key: name = n - if prefix != -1 and await rp.call("rocketNodeManager.getSmoothingPoolRegistrationState", target, block=block): + if prefix != -1 and await rp.call( + "rocketNodeManager.getSmoothingPoolRegistrationState", target, block=block + ): prefix += ":cup_with_straw:" - if not name and (member_id := await rp.call("rocketDAONodeTrusted.getMemberID", target, block=block)): + if not name and ( + member_id := await rp.call( + "rocketDAONodeTrusted.getMemberID", target, block=block + ) + ): if prefix != -1: prefix += "🔮" name = member_id - if not name and (member_id := await rp.call("rocketDAOSecurity.getMemberID", target, block=block)): + if not name and ( + member_id := await rp.call( + "rocketDAOSecurity.getMemberID", target, block=block + ) + ): if prefix != -1: prefix += "🔒" name = member_id @@ -145,10 +160,16 @@ async def el_explorer_url( if not name: a = Addresses.get(target) # don't apply name if it has label is one with the id "take-action", as these don't show up on the explorer - if all(( - (not a.labels or len(a.labels) != 1 or a.labels[0].id != "take-action"), - a.name and ("alert" not in a.name.lower()) - )): + if all( + ( + ( + not a.labels + or len(a.labels) != 1 + or a.labels[0].id != "take-action" + ), + a.name and ("alert" not in a.name.lower()), + ) + ): name = a.name if not name: # not an odao member, try to get their ens @@ -157,28 +178,52 @@ async def el_explorer_url( if code := await w3.eth.get_code(target): if prefix != -1: prefix += "📄" - if ((not name) and (w3.keccak(text=code.hex()).hex() in cfg.other.mev_hashes)): + if (not name) and ( + w3.keccak(text=code.hex()).hex() in cfg.other.mev_hashes + ): name = "MEV Bot Contract" if not name: with contextlib.suppress(Exception): - c = w3.eth.contract(address=target, abi=[{"inputs" : [], - "name" : "name", - "outputs" : [{"internalType": "string", - "name" : "", - "type" : "string"}], - "stateMutability": "view", - "type" : "function"}]) + c = w3.eth.contract( + address=target, + abi=[ + { + "inputs": [], + "name": "name", + "outputs": [ + { + "internalType": "string", + "name": "", + "type": "string", + } + ], + "stateMutability": "view", + "type": "function", + } + ], + ) n = await c.functions.name().call() # make sure nobody is trying to inject a custom link, as there was a guy that made the name of his contract # 'RocketSwapRouter](https://etherscan.io/search?q=0x16d5a408e807db8ef7c578279beeee6b228f1c1c)[', # in an attempt to get people to click on his contract # first, if the name has a link in it, we ignore it - if any(keyword in n.lower() for keyword in - ["http", "discord", "airdrop", "telegram", "twitter", "youtube"]): + if any( + keyword in n.lower() + for keyword in [ + "http", + "discord", + "airdrop", + "telegram", + "twitter", + "youtube", + ] + ): log.warning(f"Contract {target} has a suspicious name: {n}") else: - name = f"{discord.utils.remove_markdown(n, ignore_links=False)}*" + name = ( + f"{discord.utils.remove_markdown(n, ignore_links=False)}*" + ) else: # transaction hash url = f"{cfg.execution_layer.explorer}/tx/{target}" @@ -199,9 +244,20 @@ async def prepare_args(args): args[f"{arg_key}_raw"] = arg_value # handle numbers - numeric_keywords = ["amount", "value", "rate", "totaleth", "stakingeth", "rethsupply", "rplprice", "profit"] - if any(keyword in arg_key.lower() for keyword in numeric_keywords) and isinstance(arg_value, int): - args[arg_key] = arg_value / 10 ** 18 + numeric_keywords = [ + "amount", + "value", + "rate", + "totaleth", + "stakingeth", + "rethsupply", + "rplprice", + "profit", + ] + if any( + keyword in arg_key.lower() for keyword in numeric_keywords + ) and isinstance(arg_value, int): + args[arg_key] = arg_value / 10**18 # handle timestamps if "deadline" in arg_key.lower() and isinstance(arg_value, int): @@ -209,9 +265,9 @@ async def prepare_args(args): # handle percentages if "perc" in arg_key.lower(): - args[arg_key] = arg_value / 10 ** 16 + args[arg_key] = arg_value / 10**16 if arg_key.lower() in ["rate", "penalty"]: - args[f"{arg_key}_perc"] = arg_value / 10 ** 16 + args[f"{arg_key}_perc"] = arg_value / 10**16 # handle hex strings if str(arg_value).startswith("0x"): @@ -231,7 +287,9 @@ async def prepare_args(args): args[arg_key] = await el_explorer_url(arg_value, prefix=prefix) args[f"{arg_key}_clean"] = await el_explorer_url(arg_value) if len(arg_value) == 66: - args[f'{arg_key}_small'] = await el_explorer_url(arg_value, name="[tnx]") + args[f"{arg_key}_small"] = await el_explorer_url( + arg_value, name="[tnx]" + ) if "from" in args: args["fancy_from"] = args["from"] if "caller" in args and args["from"] != args["caller"]: @@ -245,7 +303,10 @@ async def assemble(args) -> Embed: e.colour = Color.from_rgb(235, 86, 86) if "sell_rpl" in args.event_name: e.colour = Color.from_rgb(235, 86, 86) - if "buy_rpl" in args.event_name or "finality_delay_recover_event" in args.event_name: + if ( + "buy_rpl" in args.event_name + or "finality_delay_recover_event" in args.event_name + ): e.colour = Color.from_rgb(86, 235, 86) if "price_update_event" in args.event_name: e.colour = Color.from_rgb(86, 235, 235) @@ -255,18 +316,32 @@ async def assemble(args) -> Embed: # raise Exception(str((args, args.assets, args.event_name))) if ("pool_deposit" in args.event_name) and (amount >= 1000): e.set_image(url="https://media.giphy.com/media/VIX2atZr8dCKk5jF6L/giphy.gif") - elif any(kw in args.event_name for kw in ["_scrub_event", "_dissolve_event", "_slash_event", "finality_delay_event"]): + elif any( + kw in args.event_name + for kw in [ + "_scrub_event", + "_dissolve_event", + "_slash_event", + "finality_delay_event", + ] + ): e.set_image(url="https://c.tenor.com/p3hWK5YRo6IAAAAC/this-is-fine-dog.gif") elif "_penalty" in args.event_name: e.set_image(url="https://i.giphy.com/jmSjPi6soIoQCFwaXJ.webp") elif "_proposal_smoothie_" in args.event_name: - e.set_image(url="https://cdn.discordapp.com/attachments/812745786638336021/1106983677130461214/butta-commie-filter.png") + e.set_image( + url="https://cdn.discordapp.com/attachments/812745786638336021/1106983677130461214/butta-commie-filter.png" + ) elif "sdao_member_kick" in args.event_name: - e.set_image(url="https://media1.tenor.com/m/Xuv3IEoH1a4AAAAC/youre-fired-donald-trump.gif") + e.set_image( + url="https://media1.tenor.com/m/Xuv3IEoH1a4AAAAC/youre-fired-donald-trump.gif" + ) match args.event_name: case "cs_max_validator_increase_event": - e.set_image(url="https://media1.tenor.com/m/Yp6Yeiufb04AAAAd/piranhas-feeding.gif") + e.set_image( + url="https://media1.tenor.com/m/Yp6Yeiufb04AAAAd/piranhas-feeding.gif" + ) case "redstone_upgrade_triggered": url = "https://cdn.dribbble.com/users/187497/screenshots/2284528/media/123903807d334c15aa105b44f2bd9252.gif" e.set_image(url=url) @@ -285,17 +360,22 @@ async def assemble(args) -> Embed: match args.event_name: case "pdao_set_delegate": - use_large = (args.votingPower >= 200) + use_large = args.votingPower >= 200 case "eth_deposit_event": - use_large = (amount >= 32) + use_large = amount >= 32 case "rpl_stake_event": - use_large = (amount >= ((3 * 2.4) / solidity.to_float(await rp.call("rocketNetworkPrices.getRPLPrice")))) + use_large = amount >= ( + (3 * 2.4) + / solidity.to_float(await rp.call("rocketNetworkPrices.getRPLPrice")) + ) case "rpl_migration_event": - use_large = (amount >= 1000) + use_large = amount >= 1000 case "cs_deposit_eth_event" | "cs_withdraw_eth_event": - use_large = (args["assets"] >= 100) + use_large = args["assets"] >= 100 case "cs_deposit_rpl_event" | "cs_withdraw_rpl_event": - use_large = (args["assets"] >= 16 / solidity.to_float(await rp.call("rocketNetworkPrices.getRPLPrice"))) + use_large = args["assets"] >= 16 / solidity.to_float( + await rp.call("rocketNetworkPrices.getRPLPrice") + ) case "rocksolid_deposit_event": use_large = args["assets"] >= 50 case "rocksolid_withdrawal_event": @@ -303,12 +383,25 @@ async def assemble(args) -> Embed: case "validator_multi_deposit_event": use_large = args["numberOfValidators"] >= 5 case _: - use_large = (amount >= 100) + use_large = amount >= 100 # make numbers look nice for arg_key, arg_value in list(args.items()): - if any(keyword in arg_key.lower() for keyword in - ["amount", "value", "total_supply", "perc", "tnx_fee", "rate", "votingpower", "assets", "shares", "profit"]): + if any( + keyword in arg_key.lower() + for keyword in [ + "amount", + "value", + "total_supply", + "perc", + "tnx_fee", + "rate", + "votingpower", + "assets", + "shares", + "profit", + ] + ): if not isinstance(arg_value, (int, float)) or "raw" in arg_key: continue if arg_value: @@ -319,8 +412,14 @@ async def assemble(args) -> Embed: arg_value = int(arg_value) args[arg_key] = humanize.intcomma(arg_value) - has_small = _(f"embeds.{args.event_name}.description_small") != f"embeds.{args.event_name}.description_small" - has_large = _(f"embeds.{args.event_name}.description") != f"embeds.{args.event_name}.description" + has_small = ( + _(f"embeds.{args.event_name}.description_small") + != f"embeds.{args.event_name}.description_small" + ) + has_large = ( + _(f"embeds.{args.event_name}.description") + != f"embeds.{args.event_name}.description" + ) if has_small and not (has_large and use_large): e.description = _(f"embeds.{args.event_name}.description_small", **args) @@ -334,17 +433,19 @@ async def assemble(args) -> Embed: e.description = _(f"embeds.{args.event_name}.description", **args) if "cow_uid" in args: - e.add_field(name="Cow Order", - value=args.cow_uid, - inline=False) + e.add_field(name="Cow Order", value=args.cow_uid, inline=False) if "exchangeRate" in args: - e.add_field(name="Exchange Rate", - value=f"`{args.exchangeRate} RPL/{args.otherToken}`" + ( - f" (`{args.discountAmount}%` Discount, oDAO: `{args.marketExchangeRate} RPL/ETH`)" - if "discountAmount" in args else "" - ), - inline=False) + e.add_field( + name="Exchange Rate", + value=f"`{args.exchangeRate} RPL/{args.otherToken}`" + + ( + f" (`{args.discountAmount}%` Discount, oDAO: `{args.marketExchangeRate} RPL/ETH`)" + if "discountAmount" in args + else "" + ), + inline=False, + ) """ # show public key if we have one @@ -355,21 +456,19 @@ async def assemble(args) -> Embed: """ if "epoch" in args: - e.add_field(name="Epoch", - value=f"[{args.epoch}](https://{cfg.consensus_layer.explorer}/epoch/{args.epoch})") + e.add_field( + name="Epoch", + value=f"[{args.epoch}](https://{cfg.consensus_layer.explorer}/epoch/{args.epoch})", + ) if "timezone" in args: - e.add_field(name="Timezone", - value=f"`{args.timezone}`", - inline=False) + e.add_field(name="Timezone", value=f"`{args.timezone}`", inline=False) if "node_operator" in args: - e.add_field(name="Node Operator", - value=args.node_operator) + e.add_field(name="Node Operator", value=args.node_operator) if "slashing_type" in args: - e.add_field(name="Reason", - value=f"`{args.slashing_type} Violation`") + e.add_field(name="Reason", value=f"`{args.slashing_type} Violation`") """ if "commission" in args: @@ -379,78 +478,62 @@ async def assemble(args) -> Embed: """ if "invoiceID" in args: - e.add_field( - name="Invoice ID", - value=f"`{args.invoiceID}`", - inline=False - ) + e.add_field(name="Invoice ID", value=f"`{args.invoiceID}`", inline=False) if "settingContractName" in args: - e.add_field(name="Contract", - value=f"`{args.settingContractName}`", - inline=False) + e.add_field( + name="Contract", value=f"`{args.settingContractName}`", inline=False + ) if "periodLength" in args: e.add_field( name="Payment Interval", value=humanize.naturaldelta(datetime.timedelta(seconds=args.periodLength)), - inline=False + inline=False, ) if "startTime" in args: e.add_field( name="First Payment", value=f"", - inline=False + inline=False, ) if "index" in args: - e.add_field( - name="Index", - value=args.index, - inline=True - ) + e.add_field(name="Index", value=args.index, inline=True) if "challengePeriod" in args: e.add_field( name="Challenge Period", - value=humanize.naturaldelta(datetime.timedelta(seconds=args.challengePeriod)), - inline=True + value=humanize.naturaldelta( + datetime.timedelta(seconds=args.challengePeriod) + ), + inline=True, ) if "proposalBond" in args: - e.add_field( - name="Proposal Bond", - value=f"{args.proposalBond} RPL", - inline=True - ) + e.add_field(name="Proposal Bond", value=f"{args.proposalBond} RPL", inline=True) if "challengeBond" in args: e.add_field( - name="Challenge Bond", - value=f"{args.challengeBond} RPL", - inline=True + name="Challenge Bond", value=f"{args.challengeBond} RPL", inline=True ) if "contractAddress" in args and "Contract" in args.get("type", ""): - e.add_field(name="Contract Address", - value=args.contractAddress, - inline=False) + e.add_field(name="Contract Address", value=args.contractAddress, inline=False) if "url" in args: - e.add_field(name="URL", - value=args.url, - inline=False) + e.add_field(name="URL", value=args.url, inline=False) # show current inflation if "inflation" in args: - e.add_field(name="Current Inflation", - value=f"{args.inflation}%", - inline=False) + e.add_field(name="Current Inflation", value=f"{args.inflation}%", inline=False) if "submission" in args and "merkleTreeCID" in args.submission: n = f"0x{s_hex(args.submission.merkleRoot.hex())}" - e.add_field(name="Merkle Tree", - value=f"[{n}](https://gateway.ipfs.io/ipfs/{args.submission.merkleTreeCID})") + e.add_field( + name="Merkle Tree", + value=f"[{n}](https://gateway.ipfs.io/ipfs/{args.submission.merkleTreeCID})", + ) # show transaction hash if possible if "transactionHash" in args: @@ -458,34 +541,35 @@ async def assemble(args) -> Embed: e.add_field(name="Transaction Hash", value=content) # show sender address - if senders := [value for key, value in args.items() if key.lower() in ["sender", "from"]]: + if senders := [ + value for key, value in args.items() if key.lower() in ["sender", "from"] + ]: sender = senders[0] v = sender # if args["origin"] is an address and does not match the sender, show both if "caller" in args and args["caller"] != sender and "0x" in args["caller"]: v = f"{args.caller} ({sender})" - e.add_field(name="Sender Address", - value=v) + e.add_field(name="Sender Address", value=v) # show block number el_explorer = cfg.execution_layer.explorer if "block_number" in args: - e.add_field(name="Block Number", - value=f"[{args.blockNumber}]({el_explorer}/block/{args.blockNumber})") + e.add_field( + name="Block Number", + value=f"[{args.blockNumber}]({el_explorer}/block/{args.blockNumber})", + ) cl_explorer = cfg.consensus_layer.explorer if "slot" in args: - e.add_field(name="Slot", - value=f"[{args.slot}]({cl_explorer}/slot/{args.slot})") + e.add_field(name="Slot", value=f"[{args.slot}]({cl_explorer}/slot/{args.slot})") if "smoothie_amount" in args: - e.add_field(name="Smoothing Pool Balance", - value=f"||{args.smoothie_amount}|| ETH") + e.add_field( + name="Smoothing Pool Balance", value=f"||{args.smoothie_amount}|| ETH" + ) if args.get("reason"): - e.add_field(name="Likely Revert Reason", - value=f"`{args.reason}`", - inline=False) + e.add_field(name="Likely Revert Reason", value=f"`{args.reason}`", inline=False) # show timestamp if "time" in args: @@ -497,9 +581,7 @@ async def assemble(args) -> Embed: times += [await block_to_ts(block)] time = times[0] if times else int(datetime.datetime.now().timestamp()) - e.add_field(name="Timestamp", - value=f" ()", - inline=False) + e.add_field(name="Timestamp", value=f" ()", inline=False) # show the transaction fees if "tnx_fee" in args: diff --git a/rocketwatch/utils/etherscan.py b/rocketwatch/utils/etherscan.py index 7749cfb5..523ee451 100644 --- a/rocketwatch/utils/etherscan.py +++ b/rocketwatch/utils/etherscan.py @@ -16,18 +16,22 @@ async def get_recent_account_transactions(address, block_count=44800): lowest_block = highest_block - block_count async with aiohttp.ClientSession() as session: - resp = await session.get(ETHERSCAN_URL, params={"address" : address, - "page" : page, - "apikey" : cfg.execution_layer.etherscan_secret, - "module" : "account", - "action" : "txlist", - "sort" : "desc", - "startblock": lowest_block, - "endblock" : highest_block}) + resp = await session.get( + ETHERSCAN_URL, + params={ + "address": address, + "page": page, + "apikey": cfg.execution_layer.etherscan_secret, + "module": "account", + "action": "txlist", + "sort": "desc", + "startblock": lowest_block, + "endblock": highest_block, + }, + ) if resp.status != 200: - log.debug( - f"Error querying etherscan, unexpected HTTP {resp.status!s}") + log.debug(f"Error querying etherscan, unexpected HTTP {resp.status!s}") return parsed = await resp.json() @@ -42,4 +46,6 @@ def valid_tx(tx): return False return int(tx["isError"]) == 0 - return {result["hash"]: result for result in parsed["result"] if valid_tx(result)} + return { + result["hash"]: result for result in parsed["result"] if valid_tx(result) + } diff --git a/rocketwatch/utils/event.py b/rocketwatch/utils/event.py index 6b0f2683..cd1e94e6 100644 --- a/rocketwatch/utils/event.py +++ b/rocketwatch/utils/event.py @@ -25,7 +25,11 @@ class Event: thumbnail: Image | None = None def get_score(self): - return (10**9 * self.block_number) + (10**5 * self.transaction_index) + self.event_index + return ( + (10**9 * self.block_number) + + (10**5 * self.transaction_index) + + self.event_index + ) class EventPlugin(commands.Cog): @@ -55,5 +59,7 @@ async def get_new_events(self) -> list[Event]: async def _get_new_events(self) -> list[Event]: pass - async def get_past_events(self, from_block: BlockNumber, to_block: BlockNumber) -> list[Event]: + async def get_past_events( + self, from_block: BlockNumber, to_block: BlockNumber + ) -> list[Event]: return [] diff --git a/rocketwatch/utils/event_logs.py b/rocketwatch/utils/event_logs.py index df763e4b..95b55674 100644 --- a/rocketwatch/utils/event_logs.py +++ b/rocketwatch/utils/event_logs.py @@ -12,7 +12,7 @@ def get_logs( event: ContractEvent, from_block: BlockNumber, to_block: BlockNumber, - arg_filters: dict[str, Any] | None = None + arg_filters: dict[str, Any] | None = None, ) -> list[LogReceipt]: start_block = from_block end_block = to_block @@ -29,7 +29,7 @@ def get_logs( logs += event.get_logs( from_block=from_block, to_block=min(to_block, end_block), - argument_filters=arg_filters + argument_filters=arg_filters, ) from_block = to_block + 1 diff --git a/rocketwatch/utils/image.py b/rocketwatch/utils/image.py index eddc2282..1725cd00 100644 --- a/rocketwatch/utils/image.py +++ b/rocketwatch/utils/image.py @@ -34,17 +34,17 @@ class FontVariant(StrEnum): class ImageCanvas(ImageDraw): # default color matches Discord mobile dark mode Embed def __init__(self, width: int, height: int, bg_color: Color = (57, 58, 64)): - p_img = PillowImage.new('RGB', (width, height), color=bg_color) + p_img = PillowImage.new("RGB", (width, height), color=bg_color) super().__init__(p_img) self.image = Image(p_img) def progress_bar( - self, - xy: tuple[float, float], - size: tuple[float, float], - progress: float, - fill_color: Color, - bg_color : Color = (0, 0, 0) + self, + xy: tuple[float, float], + size: tuple[float, float], + progress: float, + fill_color: Color, + bg_color: Color = (0, 0, 0), ) -> None: x, y = xy width, height = size @@ -59,33 +59,45 @@ def progress_bar( # left semicircle fill_perc = min(1.0, fill_width / radius) angle = 90 * (1 + 2 * math.acos(fill_perc) / math.pi) - self.chord((x, y, x + 2 * radius, y + height), angle, 360 - angle, fill_color) + self.chord( + (x, y, x + 2 * radius, y + height), angle, 360 - angle, fill_color + ) if fill_width > radius: # main bar - self.rectangle((x + radius, y, x + min(fill_width, width - radius), y + height), fill_color) + self.rectangle( + (x + radius, y, x + min(fill_width, width - radius), y + height), + fill_color, + ) if fill_width > (width - radius): # right semicircle fill_perc = min(1.0, (fill_width - width + radius) / radius) angle = 90 * (2 * math.acos(fill_perc) / math.pi) - self.chord((x + width - 2 * radius, y, x + width, y + height), angle, 360 - angle, fill_color) + self.chord( + (x + width - 2 * radius, y, x + width, y + height), + angle, + 360 - angle, + fill_color, + ) @staticmethod @cache - def _get_font(name: str, variant: FontVariant, size: float) -> ImageFont.FreeTypeFont: + def _get_font( + name: str, variant: FontVariant, size: float + ) -> ImageFont.FreeTypeFont: return ImageFont.truetype(f"fonts/{name}-{variant}.ttf", size) def dynamic_text( - self, - xy: tuple[float, float], - text: str, - font_size: float, - font_name: Font = Font.INTER, - font_variant: FontVariant = FontVariant.REGULAR, - color: Color = (255, 255, 255), - max_width: float | None = None, - anchor: str = "lt" + self, + xy: tuple[float, float], + text: str, + font_size: float, + font_name: Font = Font.INTER, + font_variant: FontVariant = FontVariant.REGULAR, + color: Color = (255, 255, 255), + max_width: float | None = None, + anchor: str = "lt", ) -> None: font = self._get_font(font_name, font_variant, font_size) if max_width is not None: diff --git a/rocketwatch/utils/liquidity.py b/rocketwatch/utils/liquidity.py index 744c1c73..00b8b279 100644 --- a/rocketwatch/utils/liquidity.py +++ b/rocketwatch/utils/liquidity.py @@ -72,20 +72,22 @@ def _get_asks(self, api_response: dict) -> dict[float, float]: @retry_async(tries=3, delay=1) async def _get_order_book( - self, - market: Market, - session: aiohttp.ClientSession + self, market: Market, session: aiohttp.ClientSession ) -> tuple[dict[float, float], dict[float, float]]: params = self._get_request_params(market) url = self._api_base_url + self._get_request_path(market) - response = await session.get(url, params=params, headers={"User-Agent": "Rocket Watch"}) + response = await session.get( + url, params=params, headers={"User-Agent": "Rocket Watch"} + ) log.debug(f"response from {url}: {response}") data = await response.json() bids = OrderedDict(sorted(self._get_bids(data).items(), reverse=True)) asks = OrderedDict(sorted(self._get_asks(data).items())) return bids, asks - async def _get_liquidity(self, market: Market, session: aiohttp.ClientSession) -> Liquidity | None: + async def _get_liquidity( + self, market: Market, session: aiohttp.ClientSession + ) -> Liquidity | None: bids, asks = await self._get_order_book(market, session) if not (bids and asks): log.warning("Empty order book") @@ -114,7 +116,9 @@ def depth_at(_price: float) -> float: return Liquidity(price, depth_at) - async def get_liquidity(self, session: aiohttp.ClientSession) -> dict[Market, Liquidity]: + async def get_liquidity( + self, session: aiohttp.ClientSession + ) -> dict[Market, Liquidity]: markets = {} for market in self.markets: if liq := await self._get_liquidity(market, session): @@ -164,10 +168,16 @@ def _get_request_params(market: Market) -> dict[str, str | int]: return {"product_id": f"{market.major}-{market.minor}"} def _get_bids(self, api_response: dict) -> dict[float, float]: - return {float(bid["price"]): float(bid["size"]) for bid in api_response["pricebook"]["bids"]} + return { + float(bid["price"]): float(bid["size"]) + for bid in api_response["pricebook"]["bids"] + } def _get_asks(self, api_response: dict) -> dict[float, float]: - return {float(ask["price"]): float(ask["size"]) for ask in api_response["pricebook"]["asks"]} + return { + float(ask["price"]): float(ask["size"]) + for ask in api_response["pricebook"]["asks"] + } class Deepcoin(CEX): @@ -188,10 +198,14 @@ def _get_request_params(market: Market) -> dict[str, str | int]: return {"instId": f"{market.major}-{market.minor}", "sz": 400} def _get_bids(self, api_response: dict) -> dict[float, float]: - return {float(price): float(size) for price, size in api_response["data"]["bids"]} + return { + float(price): float(size) for price, size in api_response["data"]["bids"] + } def _get_asks(self, api_response: dict) -> dict[float, float]: - return {float(price): float(size) for price, size in api_response["data"]["asks"]} + return { + float(price): float(size) for price, size in api_response["data"]["asks"] + } class GateIO(CEX): @@ -236,10 +250,16 @@ def _get_request_params(market: Market) -> dict[str, str | int]: return {"instId": f"{market.major}-{market.minor}", "sz": 400} def _get_bids(self, api_response: dict) -> dict[float, float]: - return {float(price): float(size) for price, size, _, _ in api_response["data"][0]["bids"]} + return { + float(price): float(size) + for price, size, _, _ in api_response["data"][0]["bids"] + } def _get_asks(self, api_response: dict) -> dict[float, float]: - return {float(price): float(size) for price, size, _, _ in api_response["data"][0]["asks"]} + return { + float(price): float(size) + for price, size, _, _ in api_response["data"][0]["asks"] + } class Bitget(CEX): @@ -260,10 +280,14 @@ def _get_request_params(market: Market) -> dict[str, str | int]: return {"symbol": f"{market.major}{market.minor}", "limit": 150} def _get_bids(self, api_response: dict) -> dict[float, float]: - return {float(price): float(size) for price, size in api_response["data"]["bids"]} + return { + float(price): float(size) for price, size in api_response["data"]["bids"] + } def _get_asks(self, api_response: dict) -> dict[float, float]: - return {float(price): float(size) for price, size in api_response["data"]["asks"]} + return { + float(price): float(size) for price, size in api_response["data"]["asks"] + } class MEXC(CEX): @@ -305,13 +329,21 @@ def _get_request_path(market: Market) -> str: @staticmethod def _get_request_params(market: Market) -> dict[str, str | int]: - return {"category": "spot", "symbol": f"{market.major}{market.minor}", "limit": 200} + return { + "category": "spot", + "symbol": f"{market.major}{market.minor}", + "limit": 200, + } def _get_bids(self, api_response: dict) -> dict[float, float]: - return {float(price): float(size) for price, size in api_response["result"]["b"]} + return { + float(price): float(size) for price, size in api_response["result"]["b"] + } def _get_asks(self, api_response: dict) -> dict[float, float]: - return {float(price): float(size) for price, size in api_response["result"]["a"]} + return { + float(price): float(size) for price, size in api_response["result"]["a"] + } class CryptoDotCom(CEX): @@ -335,10 +367,16 @@ def _get_request_params(market: Market) -> dict[str, str | int]: return {"instrument_name": f"{market.major}_{market.minor}", "depth": 150} def _get_bids(self, api_response: dict) -> dict[float, float]: - return {float(price): float(size) for price, size, _ in api_response["result"]["data"][0]["bids"]} + return { + float(price): float(size) + for price, size, _ in api_response["result"]["data"][0]["bids"] + } def _get_asks(self, api_response: dict) -> dict[float, float]: - return {float(price): float(size) for price, size, _ in api_response["result"]["data"][0]["asks"]} + return { + float(price): float(size) + for price, size, _ in api_response["result"]["data"][0]["asks"] + } class Kraken(CEX): @@ -359,10 +397,16 @@ def _get_request_params(market: Market) -> dict[str, str | int]: return {"pair": f"{market.major}{market.minor}", "count": 500} def _get_bids(self, api_response: dict) -> dict[float, float]: - return {float(price): float(size) for price, size, _ in next(iter(api_response["result"].values()))["bids"]} + return { + float(price): float(size) + for price, size, _ in next(iter(api_response["result"].values()))["bids"] + } def _get_asks(self, api_response: dict) -> dict[float, float]: - return {float(price): float(size) for price, size, _ in next(iter(api_response["result"].values()))["asks"]} + return { + float(price): float(size) + for price, size, _ in next(iter(api_response["result"].values()))["asks"] + } class Kucoin(CEX): @@ -383,10 +427,14 @@ def _get_request_params(market: Market) -> dict[str, str | int]: return {"symbol": f"{market.major}-{market.minor}"} def _get_bids(self, api_response: dict) -> dict[float, float]: - return {float(price): float(size) for price, size in api_response["data"]["bids"]} + return { + float(price): float(size) for price, size in api_response["data"]["bids"] + } def _get_asks(self, api_response: dict) -> dict[float, float]: - return {float(price): float(size) for price, size in api_response["data"]["asks"]} + return { + float(price): float(size) for price, size in api_response["data"]["asks"] + } class Bithumb(CEX): @@ -407,10 +455,16 @@ def _get_request_params(market: Market) -> dict[str, str | int]: return {"markets": f"{market.minor}-{market.major}"} def _get_bids(self, api_response: dict) -> dict[float, float]: - return {entry["bid_price"]: entry["bid_size"] for entry in api_response[0]["orderbook_units"]} + return { + entry["bid_price"]: entry["bid_size"] + for entry in api_response[0]["orderbook_units"] + } def _get_asks(self, api_response: dict) -> dict[float, float]: - return {entry["ask_price"]: entry["ask_size"] for entry in api_response[0]["orderbook_units"]} + return { + entry["ask_price"]: entry["ask_size"] + for entry in api_response[0]["orderbook_units"] + } class BingX(CEX): @@ -431,10 +485,14 @@ def _get_request_params(market: Market) -> dict[str, str | int]: return {"symbol": f"{market.major}-{market.minor}", "limit": 1000} def _get_bids(self, api_response: dict) -> dict[float, float]: - return {float(price): float(size) for price, size in api_response["data"]["bids"]} + return { + float(price): float(size) for price, size in api_response["data"]["bids"] + } def _get_asks(self, api_response: dict) -> dict[float, float]: - return {float(price): float(size) for price, size in api_response["data"]["asks"]} + return { + float(price): float(size) for price, size in api_response["data"]["asks"] + } class Bitvavo(CEX): @@ -476,13 +534,20 @@ def _get_request_path(market: Market) -> str: @staticmethod def _get_request_params(market: Market) -> dict[str, str | int]: - return {"symbol": f"{market.major.lower()}{market.minor.lower()}", "type": "step0"} + return { + "symbol": f"{market.major.lower()}{market.minor.lower()}", + "type": "step0", + } def _get_bids(self, api_response: dict) -> dict[float, float]: - return {float(entry[0]): float(entry[1]) for entry in api_response["tick"]["bids"]} + return { + float(entry[0]): float(entry[1]) for entry in api_response["tick"]["bids"] + } def _get_asks(self, api_response: dict) -> dict[float, float]: - return {float(entry[0]): float(entry[1]) for entry in api_response["tick"]["asks"]} + return { + float(entry[0]): float(entry[1]) for entry in api_response["tick"]["asks"] + } class BitMart(CEX): @@ -503,10 +568,14 @@ def _get_request_params(market: Market) -> dict[str, str | int]: return {"symbol": f"{market.major}_{market.minor}", "limit": 50} def _get_bids(self, api_response: dict) -> dict[float, float]: - return {float(entry[0]): float(entry[1]) for entry in api_response["data"]["bids"]} + return { + float(entry[0]): float(entry[1]) for entry in api_response["data"]["bids"] + } def _get_asks(self, api_response: dict) -> dict[float, float]: - return {float(entry[0]): float(entry[1]) for entry in api_response["data"]["asks"]} + return { + float(entry[0]): float(entry[1]) for entry in api_response["data"]["asks"] + } class Bitrue(CEX): @@ -527,10 +596,16 @@ def _get_request_params(market: Market) -> dict[str, str | int]: return {"symbol": f"{market.major}{market.minor}"} def _get_bids(self, api_response: dict) -> dict[float, float]: - return {float(entry[0]): float(entry[1]) for entry in api_response["data"]["tick"]["b"]} + return { + float(entry[0]): float(entry[1]) + for entry in api_response["data"]["tick"]["b"] + } def _get_asks(self, api_response: dict) -> dict[float, float]: - return {float(entry[0]): float(entry[1]) for entry in api_response["data"]["tick"]["a"]} + return { + float(entry[0]): float(entry[1]) + for entry in api_response["data"]["tick"]["a"] + } class CoinTR(CEX): @@ -551,10 +626,14 @@ def _get_request_params(market: Market) -> dict[str, str | int]: return {"symbol": f"{market.major}{market.minor}", "limit": 150} def _get_bids(self, api_response: dict) -> dict[float, float]: - return {float(price): float(size) for price, size in api_response["data"]["bids"]} + return { + float(price): float(size) for price, size in api_response["data"]["bids"] + } def _get_asks(self, api_response: dict) -> dict[float, float]: - return {float(price): float(size) for price, size in api_response["data"]["asks"]} + return { + float(price): float(size) for price, size in api_response["data"]["asks"] + } class DigiFinex(CEX): @@ -588,13 +667,12 @@ def __init__(self, address: ChecksumAddress, symbol: str, decimals: int): self.decimals = decimals @classmethod - async def create(cls, address: ChecksumAddress) -> 'ERC20Token': + async def create(cls, address: ChecksumAddress) -> "ERC20Token": address = w3.to_checksum_address(address) contract = await rp.assemble_contract("ERC20", address, mainnet=True) - symbol, decimals = await rp.multicall([ - contract.functions.symbol(), - contract.functions.decimals() - ]) + symbol, decimals = await rp.multicall( + [contract.functions.symbol(), contract.functions.decimals()] + ) return cls(address, symbol, decimals) def __str__(self) -> str: @@ -631,14 +709,16 @@ async def get_liquidity(self) -> dict[LiquidityPool, Liquidity]: class BalancerV2(DEX): class WeightedPool(DEX.LiquidityPool): - def __init__(self, pool_id: HexStr, vault, token_0: ERC20Token, token_1: ERC20Token): + def __init__( + self, pool_id: HexStr, vault, token_0: ERC20Token, token_1: ERC20Token + ): self.id = pool_id self.vault = vault self.token_0 = token_0 self.token_1 = token_1 @classmethod - async def create(cls, pool_id: HexStr) -> 'BalancerV2.WeightedPool': + async def create(cls, pool_id: HexStr) -> "BalancerV2.WeightedPool": vault = await rp.get_contract_by_name("BalancerVault", mainnet=True) tokens = (await vault.functions.getPoolTokens(pool_id).call())[0] token_0 = await ERC20Token.create(tokens[0]) @@ -650,10 +730,14 @@ async def get_price(self) -> float: return balances[1] / balances[0] if (balances[0] > 0) else 0 async def get_normalized_price(self) -> float: - return await self.get_price() * 10 ** (self.token_0.decimals - self.token_1.decimals) + return await self.get_price() * 10 ** ( + self.token_0.decimals - self.token_1.decimals + ) async def get_liquidity(self) -> Liquidity | None: - balance_0, balance_1 = (await self.vault.functions.getPoolTokens(self.id).call())[1] + balance_0, balance_1 = ( + await self.vault.functions.getPoolTokens(self.id).call() + )[1] if (balance_0 == 0) or (balance_1 == 0): log.warning("Empty token balances") return None @@ -665,7 +749,7 @@ async def get_liquidity(self) -> Liquidity | None: def depth_at(_price: float) -> float: invariant = balance_0 * balance_1 new_balance_0 = math.sqrt(_price * invariant / balance_norm) - return abs(new_balance_0 - balance_0) / (10 ** self.token_0.decimals) + return abs(new_balance_0 - balance_0) / (10**self.token_0.decimals) return Liquidity(price, depth_at) @@ -688,15 +772,21 @@ class UniswapV3(DEX): @staticmethod def tick_to_price(tick: int) -> float: - return 1.0001 ** tick + return 1.0001**tick @staticmethod def price_to_tick(price: float) -> float: return math.log(price, 1.0001) class Pool(DEX.LiquidityPool): - def __init__(self, pool_address: ChecksumAddress, contract, tick_spacing: int, - token_0: ERC20Token, token_1: ERC20Token): + def __init__( + self, + pool_address: ChecksumAddress, + contract, + tick_spacing: int, + token_0: ERC20Token, + token_1: ERC20Token, + ): self.pool_address = pool_address self.contract = contract self.tick_spacing = tick_spacing @@ -704,13 +794,17 @@ def __init__(self, pool_address: ChecksumAddress, contract, tick_spacing: int, self.token_1 = token_1 @classmethod - async def create(cls, pool_address: ChecksumAddress) -> 'UniswapV3.Pool': - contract = await rp.assemble_contract("UniswapV3Pool", pool_address, mainnet=True) - tick_spacing, token_0_addr, token_1_addr = await rp.multicall([ - contract.functions.tickSpacing(), - contract.functions.token0(), - contract.functions.token1() - ]) + async def create(cls, pool_address: ChecksumAddress) -> "UniswapV3.Pool": + contract = await rp.assemble_contract( + "UniswapV3Pool", pool_address, mainnet=True + ) + tick_spacing, token_0_addr, token_1_addr = await rp.multicall( + [ + contract.functions.tickSpacing(), + contract.functions.token0(), + contract.functions.token1(), + ] + ) token_0 = await ERC20Token.create(token_0_addr) token_1 = await ERC20Token.create(token_1_addr) return cls(pool_address, contract, tick_spacing, token_0, token_1) @@ -725,7 +819,9 @@ def tick_to_word_and_bit(self, tick: int) -> tuple[int, int]: return word_position, bit_position async def get_ticks_net_liquidity(self, ticks: list[int]) -> dict[int, int]: - results = await rp.multicall([self.contract.functions.ticks(tick) for tick in ticks]) + results = await rp.multicall( + [self.contract.functions.ticks(tick) for tick in ticks] + ) return dict(zip(ticks, [r[1] for r in results], strict=False)) async def get_initialized_ticks(self, current_tick: int) -> list[int]: @@ -733,9 +829,9 @@ async def get_initialized_ticks(self, current_tick: int) -> list[int]: active_word, b = self.tick_to_word_and_bit(current_tick) word_range = list(range(active_word - 5, active_word + 5)) - bitmaps = await rp.multicall([ - self.contract.functions.tickBitmap(word) for word in word_range - ]) + bitmaps = await rp.multicall( + [self.contract.functions.tickBitmap(word) for word in word_range] + ) for word, tick_bitmap in zip(word_range, bitmaps, strict=False): if not tick_bitmap: @@ -748,24 +844,28 @@ async def get_initialized_ticks(self, current_tick: int) -> list[int]: return ticks - def liquidity_to_tokens(self, liquidity: int, tick_lower: int, tick_upper: int) -> tuple[float, float]: + def liquidity_to_tokens( + self, liquidity: int, tick_lower: int, tick_upper: int + ) -> tuple[float, float]: sqrtp_lower = math.sqrt(UniswapV3.tick_to_price(tick_lower)) sqrtp_upper = math.sqrt(UniswapV3.tick_to_price(tick_upper)) delta_x = (1 / sqrtp_lower - 1 / sqrtp_upper) * liquidity delta_y = (sqrtp_upper - sqrtp_lower) * liquidity - balance_0 = float(delta_x / (10 ** self.token_0.decimals)) - balance_1 = float(delta_y / (10 ** self.token_1.decimals)) + balance_0 = float(delta_x / (10**self.token_0.decimals)) + balance_1 = float(delta_y / (10**self.token_1.decimals)) return balance_0, balance_1 async def get_price(self) -> float: sqrt96x = (await self.contract.functions.slot0().call())[0] - return (sqrt96x ** 2) / (2 ** 192) + return (sqrt96x**2) / (2**192) async def get_normalized_price(self) -> float: - return await self.get_price() * 10 ** (self.token_0.decimals - self.token_1.decimals) + return await self.get_price() * 10 ** ( + self.token_0.decimals - self.token_1.decimals + ) async def get_liquidity(self) -> Liquidity | None: price = await self.get_price() @@ -786,16 +886,22 @@ async def get_cumulative_liquidity(_ticks: list[int]) -> list[float]: last_tick = calculated_tick active_liquidity = initial_liquidity - net_liquidity: dict[int, int] = await self.get_ticks_net_liquidity(_ticks) + net_liquidity: dict[int, int] = await self.get_ticks_net_liquidity( + _ticks + ) liquidity = [] # assume liquidity in token 0 for now for tick in _ticks: if tick > last_tick: - liq_0, _ = self.liquidity_to_tokens(active_liquidity, last_tick, tick) + liq_0, _ = self.liquidity_to_tokens( + active_liquidity, last_tick, tick + ) active_liquidity += net_liquidity[tick] else: - liq_0, _ = self.liquidity_to_tokens(active_liquidity, tick, last_tick) + liq_0, _ = self.liquidity_to_tokens( + active_liquidity, tick, last_tick + ) active_liquidity -= net_liquidity[tick] cumulative_liquidity += liq_0 @@ -804,7 +910,9 @@ async def get_cumulative_liquidity(_ticks: list[int]) -> list[float]: return liquidity - ask_ticks = [t for t in reversed(ticks) if t <= current_tick] + [UniswapV3.MIN_TICK] + ask_ticks = [t for t in reversed(ticks) if t <= current_tick] + [ + UniswapV3.MIN_TICK + ] ask_liquidity = [0] + await get_cumulative_liquidity(ask_ticks) ask_ticks.insert(0, calculated_tick) @@ -832,7 +940,9 @@ def depth_at(_price: float) -> float: if i >= len(liquidity_levels): return liquidity_levels[-1] - range_share = abs(tick - liq_ticks[i - 1]) / abs(liq_ticks[i] - liq_ticks[i - 1]) + range_share = abs(tick - liq_ticks[i - 1]) / abs( + liq_ticks[i] - liq_ticks[i - 1] + ) range_liquidity = abs(liquidity_levels[i] - liquidity_levels[i - 1]) # linear interpolation should be fine since ticks are exponential return liquidity_levels[i - 1] + range_share * range_liquidity @@ -843,7 +953,7 @@ def __init__(self, pools: list[Pool]): super().__init__(pools) @classmethod - async def create(cls, pool_addresses: list[ChecksumAddress]) -> 'UniswapV3': + async def create(cls, pool_addresses: list[ChecksumAddress]) -> "UniswapV3": pools = [await UniswapV3.Pool.create(addr) for addr in pool_addresses] return cls(pools) diff --git a/rocketwatch/utils/readable.py b/rocketwatch/utils/readable.py index 5b0ac0ff..9e86e1ed 100644 --- a/rocketwatch/utils/readable.py +++ b/rocketwatch/utils/readable.py @@ -27,18 +27,18 @@ def pretty_time(time: int | float) -> str: days, time = divmod(int(time), units.days) if days: - parts.append(f'{days} day{"s" if days != 1 else ""}') + parts.append(f"{days} day{'s' if days != 1 else ''}") hours, time = divmod(time, units.hours) if hours: - parts.append(f'{hours} hour{"s" if hours != 1 else ""}') + parts.append(f"{hours} hour{'s' if hours != 1 else ''}") minutes, time = divmod(time, units.minutes) if minutes: - parts.append(f'{minutes} minute{"s" if minutes != 1 else ""}') + parts.append(f"{minutes} minute{'s' if minutes != 1 else ''}") if time or not parts: - parts.append(f'{time:.0f} seconds') + parts.append(f"{time:.0f} seconds") return " ".join(parts[:2]) @@ -125,29 +125,77 @@ def render_branch(k, v, prefix, current_depth=0, max_depth=0, reverse=False, m_p p = p[::-1] p += "├─" if i != len(v) - 1 else f"{m}─" # last connection if not reverse: - a = list(render_branch(sk, sv, p, current_depth + 1, max_depth=max_depth, reverse=False, m_prev=m)) + a + a = ( + list( + render_branch( + sk, + sv, + p, + current_depth + 1, + max_depth=max_depth, + reverse=False, + m_prev=m, + ) + ) + + a + ) else: - a.extend(render_branch(sk, sv, p, current_depth + 1, max_depth=max_depth, reverse=False, m_prev=m)) + a.extend( + render_branch( + sk, + sv, + p, + current_depth + 1, + max_depth=max_depth, + reverse=False, + m_prev=m, + ) + ) return a def render_tree(data: dict, name: str, max_depth: int = 0) -> str: # remove empty states data = {k: v for k, v in data.items() if v} - lines, values, depths = map(list, zip(*list(reversed(render_branch(name, data, "", max_depth=max_depth, reverse=True))), strict=False)) + lines, values, depths = map( + list, + zip( + *list( + reversed( + render_branch(name, data, "", max_depth=max_depth, reverse=True) + ) + ), + strict=False, + ), + ) max_right_len, max_left_len = [], [] # longest string offset per depth - max_left_len = max(max(len(s) for s, d in zip(lines, depths, strict=False) if d == depth) for depth in set(depths)) + max_left_len = max( + max(len(s) for s, d in zip(lines, depths, strict=False) if d == depth) + for depth in set(depths) + ) # same for right - max_right_len = max(max(len(str(v)) for v, d in zip(values, depths, strict=False) if d == depth) for depth in set(depths)) + max_right_len = max( + max(len(str(v)) for v, d in zip(values, depths, strict=False) if d == depth) + for depth in set(depths) + ) max_right_len += 2 - COLORS = [Style.BRIGHT, Style.BRIGHT, Fore.RESET, Fore.BLACK, Fore.BLACK, Fore.BLACK] + COLORS = [ + Style.BRIGHT, + Style.BRIGHT, + Fore.RESET, + Fore.BLACK, + Fore.BLACK, + Fore.BLACK, + ] for i, (v, d) in enumerate(zip(values, depths, strict=False)): _v = v _v = f"{COLORS[d]}{v}{Style.RESET_ALL}" - lines[i] = f"{lines[i].ljust(max_left_len, ' ')}{' ' * (max_right_len - len(str(v)))}{_v}" + lines[i] = ( + f"{lines[i].ljust(max_left_len, ' ')}{' ' * (max_right_len - len(str(v)))}{_v}" + ) # replace all spaces with non-breaking spaces lines = [line.replace(" ", "\u00a0") for line in lines] return "\n".join(lines) diff --git a/rocketwatch/utils/retry.py b/rocketwatch/utils/retry.py index 03daf3ff..62f78941 100644 --- a/rocketwatch/utils/retry.py +++ b/rocketwatch/utils/retry.py @@ -11,9 +11,16 @@ def retry( tries: int = -1, delay: float = 0, max_delay: float | None = None, - backoff: float = 1 + backoff: float = 1, ) -> Callable[..., Any]: - return __retry(exceptions, is_async=False, tries=tries, delay=delay, max_delay=max_delay, backoff=backoff) + return __retry( + exceptions, + is_async=False, + tries=tries, + delay=delay, + max_delay=max_delay, + backoff=backoff, + ) def retry_async( @@ -22,6 +29,13 @@ def retry_async( tries: int = -1, delay: float = 0, max_delay: float | None = None, - backoff: float = 1 + backoff: float = 1, ) -> Callable[..., Any]: - return __retry(exceptions, is_async=True, tries=tries, delay=delay, max_delay=max_delay, backoff=backoff) + return __retry( + exceptions, + is_async=True, + tries=tries, + delay=delay, + max_delay=max_delay, + backoff=backoff, + ) diff --git a/rocketwatch/utils/rocketpool.py b/rocketwatch/utils/rocketpool.py index fa56d62c..4fbc00ad 100644 --- a/rocketwatch/utils/rocketpool.py +++ b/rocketwatch/utils/rocketpool.py @@ -49,7 +49,7 @@ async def _init_contract_addresses(self) -> None: self._multicall = await self.get_contract_by_name("multicall3") log.info("Indexing Rocket Pool contracts...") - for path in Path("contracts/rocketpool/contracts/contract").rglob('*.sol'): + for path in Path("contracts/rocketpool/contracts/contract").rglob("*.sol"): file_name = path.stem contract = file_name[0].lower() + file_name[1:] try: @@ -61,12 +61,22 @@ async def _init_contract_addresses(self) -> None: try: cs_dir, cs_prefix = "ConstellationDirectory", "Constellation" self.addresses |= { - f"{cs_prefix}.SuperNodeAccount": await self.call(f"{cs_dir}.getSuperNodeAddress"), - f"{cs_prefix}.OperatorDistributor": await self.call(f"{cs_dir}.getOperatorDistributorAddress"), - f"{cs_prefix}.Whitelist": await self.call(f"{cs_dir}.getWhitelistAddress"), - f"{cs_prefix}.ETHVault": await self.call(f"{cs_dir}.getWETHVaultAddress"), - f"{cs_prefix}.RPLVault": await self.call(f"{cs_dir}.getRPLVaultAddress"), - "WETH": await self.call(f"{cs_dir}.getWETHAddress") + f"{cs_prefix}.SuperNodeAccount": await self.call( + f"{cs_dir}.getSuperNodeAddress" + ), + f"{cs_prefix}.OperatorDistributor": await self.call( + f"{cs_dir}.getOperatorDistributorAddress" + ), + f"{cs_prefix}.Whitelist": await self.call( + f"{cs_dir}.getWhitelistAddress" + ), + f"{cs_prefix}.ETHVault": await self.call( + f"{cs_dir}.getWETHVaultAddress" + ), + f"{cs_prefix}.RPLVault": await self.call( + f"{cs_dir}.getRPLVaultAddress" + ), + "WETH": await self.call(f"{cs_dir}.getWETHAddress"), } except NoAddressFound: log.warning("Failed to find address for Constellation contracts") @@ -108,7 +118,10 @@ def _normalize_calls(calls, default_require_success): async def multicall(self, calls, require_success=True) -> list: """Multicall accepting ContractFunction objects or (fn, require_success) tuples.""" fns, flags = self._normalize_calls(calls, require_success) - encoded = [(fn.address, af, fn._encode_transaction_data()) for fn, af in zip(fns, flags, strict=False)] + encoded = [ + (fn.address, af, fn._encode_transaction_data()) + for fn, af in zip(fns, flags, strict=False) + ] results = await self._multicall.functions.aggregate3(encoded).call() return [ RocketPool._decode_fn_output(fns[i], data) if success else None @@ -128,7 +141,9 @@ async def get_address_by_name(self, name): async def uncached_get_address_by_name(self, name, block="latest"): log.debug(f"Retrieving address for {name} Contract") sha3 = w3.solidity_keccak(["string", "string"], ["contract.address", name]) - storage = await self.get_contract_by_name("rocketStorage", historical=block != "latest") + storage = await self.get_contract_by_name( + "rocketStorage", historical=block != "latest" + ) address = await storage.functions.getAddress(sha3).call(block_identifier=block) if not w3.to_int(hexstr=address): raise NoAddressFound(f"No address found for {name} Contract") @@ -141,14 +156,14 @@ async def get_revert_reason(tnx): try: await w3.eth.call( { - "from" : tnx["from"], - "to" : tnx["to"], - "data" : tnx["input"], - "gas" : tnx["gas"], + "from": tnx["from"], + "to": tnx["to"], + "data": tnx["input"], + "gas": tnx["gas"], "gasPrice": tnx["gasPrice"], - "value" : tnx["value"] + "value": tnx["value"], }, - block_identifier=tnx.blockNumber + block_identifier=tnx.blockNumber, ) except ContractLogicError as err: log.debug(f"Transaction: {tnx.hash} ContractLogicError: {err}") @@ -193,7 +208,9 @@ async def uncached_get_abi_by_name(self, name): raise Exception(f"No abi found for {name} Contract") return decode_abi(compressed_string) - async def assemble_contract(self, name, address=None, historical=False, mainnet=False): + async def assemble_contract( + self, name, address=None, historical=False, mainnet=False + ): cache_key = (name, address, historical, mainnet) if cache_key in self.CONTRACT_CACHE: return self.CONTRACT_CACHE[cache_key] @@ -225,7 +242,9 @@ def get_name_by_address(self, address): async def get_contract_by_name(self, name, historical=False, mainnet=False): address = await self.get_address_by_name(name) - return await self.assemble_contract(name, address, historical=historical, mainnet=mainnet) + return await self.assemble_contract( + name, address, historical=historical, mainnet=mainnet + ) async def get_contract_by_address(self, address): """ @@ -238,29 +257,48 @@ async def estimate_gas_for_call(self, path, *args, block="latest"): log.debug(f"Estimating gas for {path} (block={block})") name, function = path.rsplit(".", 1) contract = await self.get_contract_by_name(name) - return await contract.functions[function](*args).estimate_gas({"gas": 2 ** 32}, - block_identifier=block) + return await contract.functions[function](*args).estimate_gas( + {"gas": 2**32}, block_identifier=block + ) - async def get_function(self, path, *args, historical=False, address=None, mainnet=False): + async def get_function( + self, path, *args, historical=False, address=None, mainnet=False + ): name, function = path.rsplit(".", 1) if not address: address = await self.get_address_by_name(name) contract = await self.assemble_contract(name, address, historical, mainnet) - args = tuple(w3.to_checksum_address(a) if isinstance(a, str) and w3.is_address(a) else a for a in args) + args = tuple( + w3.to_checksum_address(a) if isinstance(a, str) and w3.is_address(a) else a + for a in args + ) return contract.functions[function](*args) - async def call(self, path, *args, block: BlockIdentifier = "latest", address=None, mainnet=False): + async def call( + self, + path, + *args, + block: BlockIdentifier = "latest", + address=None, + mainnet=False, + ): log.debug(f"Calling {path} (block={block})") - fn = await self.get_function(path, *args, historical=block != "latest", address=address, mainnet=mainnet) + fn = await self.get_function( + path, *args, historical=block != "latest", address=address, mainnet=mainnet + ) return await fn.call(block_identifier=block) async def get_annual_rpl_inflation(self): - inflation_per_interval = solidity.to_float(await self.call("rocketTokenRPL.getInflationIntervalRate")) + inflation_per_interval = solidity.to_float( + await self.call("rocketTokenRPL.getInflationIntervalRate") + ) if not inflation_per_interval: return 0 - seconds_per_interval = await self.call("rocketTokenRPL.getInflationIntervalTime") + seconds_per_interval = await self.call( + "rocketTokenRPL.getInflationIntervalTime" + ) intervals_per_year = solidity.years / seconds_per_interval - return (inflation_per_interval ** intervals_per_year) - 1 + return (inflation_per_interval**intervals_per_year) - 1 async def get_percentage_rpl_swapped(self): value = solidity.to_float(await self.call("rocketTokenRPL.totalSwappedRPL")) @@ -280,12 +318,14 @@ async def is_megapool(self, address: ChecksumAddress) -> bool: async def get_eth_usdc_price(self) -> float: from utils.liquidity import UniswapV3 + pool_address = await self.get_address_by_name("UniV3_USDC_ETH") pool = await UniswapV3.Pool.create(pool_address) return 1 / await pool.get_normalized_price() async def get_reth_eth_price(self) -> float: from utils.liquidity import UniswapV3 + pool_address = await self.get_address_by_name("UniV3_rETH_ETH") pool = await UniswapV3.Pool.create(pool_address) return await pool.get_normalized_price() diff --git a/rocketwatch/utils/sea_creatures.py b/rocketwatch/utils/sea_creatures.py index ec170d82..8af8559b 100644 --- a/rocketwatch/utils/sea_creatures.py +++ b/rocketwatch/utils/sea_creatures.py @@ -4,33 +4,29 @@ from utils.rocketpool import rp from utils.shared_w3 import w3 -price_cache = { - "block" : 0, - "rpl_price" : 0, - "reth_price": 0 -} +price_cache = {"block": 0, "rpl_price": 0, "reth_price": 0} sea_creatures = { # 32 * 100: spouting whale emoji - 32 * 100: '🐳', + 32 * 100: "🐳", # 32 * 50: whale emoji - 32 * 50 : '🐋', + 32 * 50: "🐋", # 32 * 30: shark emoji - 32 * 30 : '🦈', + 32 * 30: "🦈", # 32 * 20: dolphin emoji - 32 * 20 : '🐬', + 32 * 20: "🐬", # 32 * 10: octopus emoji - 32 * 10 : '🐙', + 32 * 10: "🐙", # 32 * 5: fish emoji - 32 * 5 : '🐟', + 32 * 5: "🐟", # 32 * 2: crab emoji - 32 * 2 : '🦀', + 32 * 2: "🦀", # 32 * 1: fried shrimp emoji - 32 * 1 : '🍤', + 32 * 1: "🍤", # 5: snail emoji - 5 : '🐌', + 5: "🐌", # 1: microbe emoji - 1 : '🦠' + 1: "🦠", } @@ -44,14 +40,27 @@ def get_sea_creature_for_holdings(holdings): # return the highest sea creature with a multiplier next to it highest_possible_holdings = max(sea_creatures.keys()) if holdings >= 2 * highest_possible_holdings: - return sea_creatures[highest_possible_holdings] * int(holdings / highest_possible_holdings) - return next((sea_creature for holding_value, sea_creature in sea_creatures.items() if holdings >= holding_value), '') + return sea_creatures[highest_possible_holdings] * int( + holdings / highest_possible_holdings + ) + return next( + ( + sea_creature + for holding_value, sea_creature in sea_creatures.items() + if holdings >= holding_value + ), + "", + ) async def get_holding_for_address(address): if price_cache["block"] != (b := await w3.eth.get_block_number()): - price_cache["rpl_price"] = solidity.to_float(await rp.call("rocketNetworkPrices.getRPLPrice")) - price_cache["reth_price"] = solidity.to_float(await rp.call("rocketTokenRETH.getExchangeRate")) + price_cache["rpl_price"] = solidity.to_float( + await rp.call("rocketNetworkPrices.getRPLPrice") + ) + price_cache["reth_price"] = solidity.to_float( + await rp.call("rocketTokenRETH.getExchangeRate") + ) price_cache["block"] = b # get their eth balance @@ -61,18 +70,24 @@ async def get_holding_for_address(address): rpl_contract = await rp.get_contract_by_name("rocketTokenRPL") rplfs_contract = await rp.get_contract_by_name("rocketTokenRPLFixedSupply") reth_contract = await rp.get_contract_by_name("rocketTokenRETH") - rpl_balance, rplfs_balance, reth_balance = await rp.multicall([ - rpl_contract.functions.balanceOf(address), - rplfs_contract.functions.balanceOf(address), - reth_contract.functions.balanceOf(address), - ]) + rpl_balance, rplfs_balance, reth_balance = await rp.multicall( + [ + rpl_contract.functions.balanceOf(address), + rplfs_contract.functions.balanceOf(address), + reth_contract.functions.balanceOf(address), + ] + ) eth_balance += solidity.to_float(rpl_balance) * price_cache["rpl_price"] eth_balance += solidity.to_float(rplfs_balance) * price_cache["rpl_price"] eth_balance += solidity.to_float(reth_balance) * price_cache["reth_price"] # add eth they provided for minipools - eth_balance += solidity.to_float(await rp.call("rocketNodeStaking.getNodeETHBonded", address)) + eth_balance += solidity.to_float( + await rp.call("rocketNodeStaking.getNodeETHBonded", address) + ) # add their staked RPL - staked_rpl = solidity.to_float(await rp.call("rocketNodeStaking.getNodeStakedRPL", address)) + staked_rpl = solidity.to_float( + await rp.call("rocketNodeStaking.getNodeStakedRPL", address) + ) eth_balance += staked_rpl * price_cache["rpl_price"] return eth_balance diff --git a/rocketwatch/utils/shared_w3.py b/rocketwatch/utils/shared_w3.py index 8ecd9e5e..9fe8dc35 100644 --- a/rocketwatch/utils/shared_w3.py +++ b/rocketwatch/utils/shared_w3.py @@ -8,8 +8,10 @@ class Bacon(AsyncBeacon): - async def get_validators_by_ids(self, state_id: str, ids: list[int]) -> dict[str, Any]: - id_str = ','.join(map(str, ids)) + async def get_validators_by_ids( + self, state_id: str, ids: list[int] + ) -> dict[str, Any]: + id_str = ",".join(map(str, ids)) return await self._async_make_get_request( f"/eth/v1/beacon/states/{state_id}/validators?id={id_str}" ) @@ -19,8 +21,9 @@ async def get_sync_committee(self, epoch: int) -> dict[str, Any]: f"/eth/v1/beacon/states/head/sync_committees?epoch={epoch}" ) + def _get_web3(endpoint: str): - provider = AsyncHTTPProvider(endpoint, request_kwargs={'timeout': 60}) + provider = AsyncHTTPProvider(endpoint, request_kwargs={"timeout": 60}) return AsyncWeb3(provider) @@ -32,6 +35,6 @@ def _get_web3(endpoint: str): w3_mainnet = _get_web3(cfg.execution_layer.endpoint.mainnet) if cfg.execution_layer.endpoint.archive is not None: - w3_archive =_get_web3(cfg.execution_layer.endpoint.archive) + w3_archive = _get_web3(cfg.execution_layer.endpoint.archive) bacon = Bacon(cfg.consensus_layer.endpoint) diff --git a/rocketwatch/utils/solidity.py b/rocketwatch/utils/solidity.py index a449c1d8..096e721b 100644 --- a/rocketwatch/utils/solidity.py +++ b/rocketwatch/utils/solidity.py @@ -12,11 +12,11 @@ def to_float(n, decimals=18): - return int(n) / 10 ** decimals + return int(n) / 10**decimals def to_int(n, decimals=18): - return int(n) // 10 ** decimals + return int(n) // 10**decimals def beacon_block_to_date(block_num: int) -> int: @@ -32,8 +32,18 @@ def slot_to_beacon_day_epoch_slot(slot: int) -> tuple[int, int, int]: SUBMISSION_KEYS = ( - "rewardIndex", "executionBlock", "consensusBlock", "merkleRoot", "merkleTreeCID", "intervalsPassed", "treasuryRPL", - "trustedNodeRPL", "nodeRPL", "nodeETH", "userETH") + "rewardIndex", + "executionBlock", + "consensusBlock", + "merkleRoot", + "merkleTreeCID", + "intervalsPassed", + "treasuryRPL", + "trustedNodeRPL", + "nodeRPL", + "nodeETH", + "userETH", +) def mp_state_to_str(state): diff --git a/rocketwatch/utils/views.py b/rocketwatch/utils/views.py index b86ded91..02c8607e 100644 --- a/rocketwatch/utils/views.py +++ b/rocketwatch/utils/views.py @@ -29,7 +29,7 @@ async def load(self) -> Embed: num_items, content = await self._load_content( (self.page_index * self.page_size), - ((self.page_index + 1) * self.page_size - 1) + ((self.page_index + 1) * self.page_size - 1), ) embed = Embed(title=self._title) @@ -45,8 +45,8 @@ async def load(self) -> Embed: return await self.load() embed.description = content - self.prev_page.disabled = (self.page_index <= 0) - self.next_page.disabled = (self.page_index >= max_page_index) + self.prev_page.disabled = self.page_index <= 0 + self.next_page.disabled = self.page_index >= max_page_index return embed @ui.button(emoji="⬅", label="Prev", style=ButtonStyle.gray) @@ -62,13 +62,11 @@ async def next_page(self, interaction: Interaction, _) -> None: await interaction.response.edit_message(embed=embed, view=self) class JumpToModal(ui.Modal, title="Jump To Position"): - def __init__(self, view: 'PageView'): + def __init__(self, view: "PageView"): super().__init__() self.view = view self.position_field = ui.TextInput( - label="Position", - placeholder="Enter position to jump to", - required=True + label="Position", placeholder="Enter position to jump to", required=True ) self.add_item(self.position_field) diff --git a/tests/test_scam_detection.py b/tests/test_scam_detection.py index aab2f8be..bdb8eff3 100644 --- a/tests/test_scam_detection.py +++ b/tests/test_scam_detection.py @@ -21,6 +21,7 @@ def _get_test_cfg(): RocketPoolConfig, RocketPoolSupport, ) + return Config( discord=DiscordConfig( secret="test", @@ -29,7 +30,9 @@ def _get_test_cfg(): ), execution_layer=ExecutionLayerConfig( explorer="https://etherscan.io", - endpoint=ExecutionLayerEndpoint(current="http://localhost:8545", mainnet="http://localhost:8545"), + endpoint=ExecutionLayerEndpoint( + current="http://localhost:8545", mainnet="http://localhost:8545" + ), etherscan_secret="test", ), consensus_layer=ConsensusLayerConfig( @@ -41,7 +44,9 @@ def _get_test_cfg(): rocketpool=RocketPoolConfig( manual_addresses={"rocketStorage": "0x1234"}, dao_multisigs=["0xabcd"], - support=RocketPoolSupport(user_ids=[1], role_ids=[2], server_id=3, channel_id=4, moderator_id=5), + support=RocketPoolSupport( + user_ids=[1], role_ids=[2], server_id=3, channel_id=4, moderator_id=5 + ), dm_warning=DmWarningConfig(channels=[100]), ), events=EventsConfig(lookback_distance=100, genesis=0, block_batch_size=50), @@ -78,6 +83,7 @@ def _make_detector(): bot.tree = MagicMock() with patch.object(bot.tree, "add_command"): from plugins.scam_detection.scam_detection import ScamDetection + return ScamDetection(bot) @@ -133,13 +139,17 @@ def test_safe_message_not_flagged(self, detector, case): reasons = _check_message(detector, case) assert not reasons, f"Safe message falsely flagged: {reasons}" - @pytest.mark.parametrize("case", TEST_CASES["messages"]["known_false_positives"], ids=_case_id) + @pytest.mark.parametrize( + "case", TEST_CASES["messages"]["known_false_positives"], ids=_case_id + ) @pytest.mark.xfail(reason="known false positive", strict=True) def test_known_false_positive(self, detector, case): reasons = _check_message(detector, case) assert not reasons, f"Falsely flagged: {reasons}" - @pytest.mark.parametrize("case", TEST_CASES["messages"]["known_false_negatives"], ids=_case_id) + @pytest.mark.parametrize( + "case", TEST_CASES["messages"]["known_false_negatives"], ids=_case_id + ) @pytest.mark.xfail(reason="known false negative", strict=True) def test_known_false_negative(self, detector, case): reasons = _check_message(detector, case) From ba018924f8400fd27a64d7f50e8163ea0f26ba93 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 13 Mar 2026 12:40:28 +0000 Subject: [PATCH 208/279] add ruff format to lint action --- .github/workflows/lint.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 33bb8a15..eed06f94 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -15,3 +15,7 @@ jobs: with: args: "check" src: "rocketwatch" + - uses: astral-sh/ruff-action@v3 + with: + args: "format --check" + src: "rocketwatch" From 951d906f2a03ef86c151a18a1944144c984ed6ed Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 13 Mar 2026 15:41:57 +0000 Subject: [PATCH 209/279] skip command response if not enabled --- rocketwatch/utils/command_tree.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/rocketwatch/utils/command_tree.py b/rocketwatch/utils/command_tree.py index a0989273..865b87b9 100644 --- a/rocketwatch/utils/command_tree.py +++ b/rocketwatch/utils/command_tree.py @@ -12,11 +12,16 @@ TransformerError, ) +from utils.config import cfg + log = logging.getLogger("rocketwatch.command_tree") class RWCommandTree(CommandTree): async def _call(self, interaction: Interaction) -> None: + if not cfg.modules.enable_commands: + return + cmd_name = interaction.command.name if interaction.command else "unknown" timestamp = datetime.utcnow() From 80a38a236796bd6de0c55ed19034d79f90914f0a Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 13 Mar 2026 21:17:24 +0000 Subject: [PATCH 210/279] fix contract claim event --- rocketwatch/plugins/transactions/transactions.py | 7 ++++--- rocketwatch/utils/dao.py | 2 +- tests/message_samples.json | 3 ++- 3 files changed, 7 insertions(+), 5 deletions(-) diff --git a/rocketwatch/plugins/transactions/transactions.py b/rocketwatch/plugins/transactions/transactions.py index b4652671..3428b8a9 100644 --- a/rocketwatch/plugins/transactions/transactions.py +++ b/rocketwatch/plugins/transactions/transactions.py @@ -321,14 +321,15 @@ async def process_transaction( return [] log.debug(decoded) - function = decoded[0].function_identifier - if (event_name := self.function_map[contract_name].get(function)) is None: + function = decoded[0].abi_element_identifier + function_name = function.split("(")[0] + if (event_name := self.function_map[contract_name].get(function_name)) is None: return [] event = aDict(tnx) event.args = {arg.lstrip("_"): value for arg, value in decoded[1].items()} event.args["timestamp"] = block.timestamp - event.args["function_name"] = function + event.args["function_name"] = function_name if not receipt.status: event.args["reason"] = await rp.get_revert_reason(tnx) # if revert reason includes the phrase "insufficient for pre deposit" filter out diff --git a/rocketwatch/utils/dao.py b/rocketwatch/utils/dao.py index 08418daf..40540d8f 100644 --- a/rocketwatch/utils/dao.py +++ b/rocketwatch/utils/dao.py @@ -73,7 +73,7 @@ async def build_proposal_body( try: contract = await self._get_contract() decoded = contract.decode_function_input(proposal.payload) - function_name = decoded[0].function_identifier + function_name = decoded[0].abi_element_identifier args = [f" {arg} = {value}" for arg, value in decoded[1].items()] payload_str = f"{function_name}(\n" + "\n".join(args) + "\n)" body_repr += f"\n\nPayload:\n{payload_str}" diff --git a/tests/message_samples.json b/tests/message_samples.json index 1f333146..436da1c2 100644 --- a/tests/message_samples.json +++ b/tests/message_samples.json @@ -904,7 +904,8 @@ ], "known_false_positives": [ "rocketpool.support", - "Reduce Express ticket RPIP" + "Reduce Express ticket RPIP", + "EIP-7002 - Fee Structure" ], "known_false_negatives": [] } From d4b24f312bb5e21ac754500fd2db86db39c4fc07 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 13 Mar 2026 21:20:17 +0000 Subject: [PATCH 211/279] remove CodeQL analysis --- .github/workflows/codeql-analysis.yml | 55 --------------------------- 1 file changed, 55 deletions(-) delete mode 100644 .github/workflows/codeql-analysis.yml diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml deleted file mode 100644 index cc70e62a..00000000 --- a/.github/workflows/codeql-analysis.yml +++ /dev/null @@ -1,55 +0,0 @@ -# For most projects, this workflow file will not need changing; you simply need -# to commit it to your repository. -# -# You may wish to alter this file to override the set of languages analyzed, -# or to provide custom queries or build logic. -# -# ******** NOTE ******** -# We have attempted to detect the languages in your repository. Please check -# the `language` matrix defined below to confirm you have the correct set of -# supported CodeQL languages. -# -name: "CodeQL" - -on: - push: - branches: [ main ] - pull_request: - # The branches below must be a subset of the branches above - branches: [ main ] - schedule: - - cron: '26 9 * * 3' - -jobs: - analyze: - name: Analyze - runs-on: ubuntu-latest - permissions: - actions: read - contents: read - security-events: write - - strategy: - fail-fast: false - matrix: - language: [ 'python' ] - # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ] - # Learn more: - # https://docs.github.com/en/free-pro-team@latest/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#changing-the-languages-that-are-analyzed - - steps: - - name: Checkout repository - uses: actions/checkout@v6 - - # Initializes the CodeQL tools for scanning. - - name: Initialize CodeQL - uses: github/codeql-action/init@v4 - with: - languages: ${{ matrix.language }} - # If you wish to specify custom queries, you can do so here or in a config file. - # By default, queries listed here will override any specified in a config file. - # Prefix the list here with "+" to use these queries and those in the config file. - # queries: ./path/to/local/query, your-org/your-repo/queries@main - - - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v4 From 8909fe5db95c3b530dc822d17e57d3df7475c23b Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 13 Mar 2026 21:27:39 +0000 Subject: [PATCH 212/279] use 0x hex --- rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py | 2 +- rocketwatch/plugins/events/events.py | 2 +- rocketwatch/plugins/transactions/transactions.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py b/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py index 871f329b..e41f24d8 100644 --- a/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py +++ b/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py @@ -37,7 +37,7 @@ def safe_to_float(num): def safe_to_hex(b): - return f"0x{b.hex()}" if b else None + return b.to_0x_hex() if b else None def safe_state_to_str(state): diff --git a/rocketwatch/plugins/events/events.py b/rocketwatch/plugins/events/events.py index 9377ac6b..6b76f9a4 100644 --- a/rocketwatch/plugins/events/events.py +++ b/rocketwatch/plugins/events/events.py @@ -730,7 +730,7 @@ def share_repr(percentage: float) -> str: args.caller = receipt["from"] # add transaction hash and block number to args - args.transactionHash = "0x" + event.transactionHash.hex() + args.transactionHash = event.transactionHash.to_0x_hex() args.blockNumber = event.blockNumber # add proposal message manually if the event contains a proposal diff --git a/rocketwatch/plugins/transactions/transactions.py b/rocketwatch/plugins/transactions/transactions.py index 3428b8a9..c62de9e7 100644 --- a/rocketwatch/plugins/transactions/transactions.py +++ b/rocketwatch/plugins/transactions/transactions.py @@ -150,7 +150,7 @@ async def create_embeds(self, event_name: str, event: aDict) -> list[Embed]: args.event_name = event_name # add transaction hash and block number to args - args.transactionHash = event.hash.hex() + args.transactionHash = event.hash.to_0x_hex() args.blockNumber = event.blockNumber # oDAO bootstrap doesn't emit an event From a3daff37a709fada7ef8b5fb37211846bbcd59b3 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 13 Mar 2026 21:32:22 +0000 Subject: [PATCH 213/279] fix renovate config --- .github/renovate.json | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/renovate.json b/.github/renovate.json index 8c5841f2..abed150b 100644 --- a/.github/renovate.json +++ b/.github/renovate.json @@ -4,7 +4,6 @@ "config:recommended" ], "forkProcessing": "enabled", - "prCreation": "not-pending", "rollbackPrs": true, "stabilityDays": 3, "packageRules": [ From fa1c1e9c882567bd00b5a5696c8a6cef8bb2afee Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Fri, 13 Mar 2026 21:32:51 +0000 Subject: [PATCH 214/279] Update dependency cachetools to v7.0.5 --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index f27b2d50..de9d148f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,7 +9,7 @@ dependencies = [ "anyascii==0.3.3", "beautifulsoup4==4.14.3", "bidict==0.23.1", - "cachetools==7.0.3", + "cachetools==7.0.5", "colorama==0.4.6", "cronitor==4.9.0", "dice==4.0.0", From bb6de590208640391498f5d942948dd4d6f6d995 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Fri, 13 Mar 2026 21:32:55 +0000 Subject: [PATCH 215/279] Update dependency numpy to v2.4.3 --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index f27b2d50..1a3c1c02 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -22,7 +22,7 @@ dependencies = [ "humanize==4.15.0", "inflect==7.5.0", "matplotlib==3.10.8", - "numpy==2.4.2", + "numpy==2.4.3", "pillow==12.1.1", "psutil==7.2.2", "pydantic>=2.0.0,<3.0.0", From 77612517722b3cd0b95d722fa3330ced667148f5 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 14 Mar 2026 00:11:02 +0000 Subject: [PATCH 216/279] fix user distribute instructions --- rocketwatch/plugins/user_distribute/user_distribute.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/rocketwatch/plugins/user_distribute/user_distribute.py b/rocketwatch/plugins/user_distribute/user_distribute.py index b91b4939..9bea3f4b 100644 --- a/rocketwatch/plugins/user_distribute/user_distribute.py +++ b/rocketwatch/plugins/user_distribute/user_distribute.py @@ -31,10 +31,12 @@ def __init__( async def instructions(self, interaction: Interaction, _) -> None: mp_contract = await rp.assemble_contract("rocketMinipoolDelegate") bud_calldata = bytes.fromhex( - mp_contract.encodeABI(fn_name="beginUserDistribute")[2:] + mp_contract.encode_abi(abi_element_identifier="beginUserDistribute")[2:] ) dist_calldata = bytes.fromhex( - mp_contract.encodeABI(fn_name="distributeBalance", args=[False])[2:] + mp_contract.encode_abi( + abi_element_identifier="distributeBalance", args=[False] + )[2:] ) calls = [(mp["address"], True, dist_calldata) for mp in self.distributable] From 758426e2ed10bedb87124c94cbf6828148c05d9d Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 14 Mar 2026 09:31:56 +0000 Subject: [PATCH 217/279] fix safe_to_hex --- rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py b/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py index e41f24d8..e794c6c7 100644 --- a/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py +++ b/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py @@ -36,8 +36,8 @@ def safe_to_float(num): return None -def safe_to_hex(b): - return b.to_0x_hex() if b else None +def safe_to_hex(b: bytes) -> str | None: + return f"0x{b.hex()}" if b else None def safe_state_to_str(state): From 437e799e9f15a6135138e6c24dee11b399211457 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 14 Mar 2026 09:33:04 +0000 Subject: [PATCH 218/279] rename Docker stage --- .github/workflows/{docker-ci.yml => build.yml} | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) rename .github/workflows/{docker-ci.yml => build.yml} (98%) diff --git a/.github/workflows/docker-ci.yml b/.github/workflows/build.yml similarity index 98% rename from .github/workflows/docker-ci.yml rename to .github/workflows/build.yml index 0e6b6236..d9bec277 100644 --- a/.github/workflows/docker-ci.yml +++ b/.github/workflows/build.yml @@ -1,4 +1,4 @@ -name: CI +name: Build on: push: From 0b1588dc6606a003267ae64be460053e331a906c Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sun, 15 Mar 2026 21:39:09 +0000 Subject: [PATCH 219/279] make event_logs util async --- rocketwatch/plugins/dao/dao.py | 4 +-- rocketwatch/plugins/rocksolid/rocksolid.py | 2 +- rocketwatch/utils/event_logs.py | 39 ++++++++++------------ 3 files changed, 21 insertions(+), 24 deletions(-) diff --git a/rocketwatch/plugins/dao/dao.py b/rocketwatch/plugins/dao/dao.py index 0181a0cd..0fee22b0 100644 --- a/rocketwatch/plugins/dao/dao.py +++ b/rocketwatch/plugins/dao/dao.py @@ -196,7 +196,7 @@ async def _get_voter_list( dao = ProtocolDAO() proposal_contract = await dao._get_proposal_contract() - for vote_log in get_logs( + for vote_log in await get_logs( proposal_contract.events.ProposalVoted, await ts_to_block(proposal.start) - 1, await ts_to_block(proposal.end_phase_2) + 1, @@ -210,7 +210,7 @@ async def _get_voter_list( ) voters[vote.voter] = vote - for override_log in get_logs( + for override_log in await get_logs( proposal_contract.events.ProposalVoteOverridden, await ts_to_block(proposal.end_phase_1) - 1, await ts_to_block(proposal.end_phase_2) + 1, diff --git a/rocketwatch/plugins/rocksolid/rocksolid.py b/rocketwatch/plugins/rocksolid/rocksolid.py index 4152347f..8d2ab067 100644 --- a/rocketwatch/plugins/rocksolid/rocksolid.py +++ b/rocketwatch/plugins/rocksolid/rocksolid.py @@ -44,7 +44,7 @@ async def _fetch_asset_updates(self) -> list[tuple[int, float]]: updates.append((doc["time"], doc["assets"])) db_operations = [] - for event_log in get_logs( + for event_log in await get_logs( vault_contract.events.TotalAssetsUpdated, b_from, b_to ): ts = await block_to_ts(event_log.blockNumber) diff --git a/rocketwatch/utils/event_logs.py b/rocketwatch/utils/event_logs.py index 95b55674..77a8e083 100644 --- a/rocketwatch/utils/event_logs.py +++ b/rocketwatch/utils/event_logs.py @@ -1,38 +1,35 @@ +import asyncio import logging from typing import Any from eth_typing import BlockNumber -from web3.contract.contract import ContractEvent +from web3.contract.async_contract import AsyncContractEvent from web3.types import LogReceipt log = logging.getLogger("rocketwatch.event_logs") -def get_logs( - event: ContractEvent, +async def get_logs( + event: AsyncContractEvent, from_block: BlockNumber, to_block: BlockNumber, arg_filters: dict[str, Any] | None = None, ) -> list[LogReceipt]: - start_block = from_block - end_block = to_block - - log.debug(f"Fetching event logs in [{start_block}, {end_block}]") + log.debug(f"Fetching event logs in [{from_block}, {to_block}]") chunk_size = 50_000 - from_block = start_block - to_block = from_block + chunk_size - - logs = [] - - while from_block <= end_block: - logs += event.get_logs( - from_block=from_block, - to_block=min(to_block, end_block), - argument_filters=arg_filters, + tasks = [] + chunk_start = from_block + while chunk_start <= to_block: + chunk_end = min(chunk_start + chunk_size, to_block) + tasks.append( + event.get_logs( + from_block=chunk_start, + to_block=chunk_end, + argument_filters=arg_filters, + ) ) + chunk_start = chunk_end + 1 - from_block = to_block + 1 - to_block = from_block + chunk_size - - return logs + results = await asyncio.gather(*tasks) + return [log_entry for chunk in results for log_entry in chunk] From 440f61f8eaab024b3ae02438f1fe3da2c35c91c8 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sun, 15 Mar 2026 23:10:05 +0000 Subject: [PATCH 220/279] add deposit data to megapools --- .../plugins/db_upkeep_task/db_upkeep_task.py | 59 ++++++++++++++++++- 1 file changed, 57 insertions(+), 2 deletions(-) diff --git a/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py b/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py index e794c6c7..a5b77d38 100644 --- a/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py +++ b/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py @@ -131,6 +131,7 @@ async def loop(self): await self.update_dynamic_minipool_beacon_data() # megapool validator tasks await self.add_untracked_megapool_validators() + await self.add_static_megapool_deposit_data() await self.update_dynamic_megapool_validator_data() await self.update_dynamic_megapool_validator_beacon_data() log.debug("finished db upkeep task") @@ -155,6 +156,7 @@ async def check_indexes(self): ) await self.bot.db.megapool_validators.create_index("pubkey") await self.bot.db.megapool_validators.create_index("validator_index") + await self.bot.db.megapool_validators.create_index("status") await self.bot.db.megapool_validators.create_index("beacon.status") log.debug("indexes checked") @@ -578,9 +580,9 @@ async def add_static_minipool_deposit_data(self): log.debug(f"Processing deposit data for blocks {block_start}..{block_end}") addresses = {m["address"] for m in minipool_batch} - events = get_logs( + events = await get_logs( nd.events.DepositReceived, block_start, block_end - ) + get_logs(mm.events.MinipoolCreated, block_start, block_end) + ) + await get_logs(mm.events.MinipoolCreated, block_start, block_end) events.sort( key=lambda e: (e["blockNumber"], e["transactionIndex"], e["logIndex"]), reverse=True, @@ -821,6 +823,59 @@ async def add_untracked_megapool_validators(self): docs, ordered=False ) + @timerun_async + async def add_static_megapool_deposit_data(self): + validators = await self.bot.db.megapool_validators.find( + {"deposit_time": {"$exists": False}}, + {"megapool": 1, "validator_id": 1}, + ).to_list() + if not validators: + return + + dp = await rp.get_contract_by_name("rocketDepositPool") + saturn_upgrade_block = 24479994 + to_block = await w3.eth.get_block_number() + + by_megapool = defaultdict(list) + for v in validators: + by_megapool[v["megapool"]].append(v) + + for megapool_addr, megapool_validators in by_megapool.items(): + min_vid = min(v["validator_id"] for v in megapool_validators) + if min_vid > 0: + prev = await self.bot.db.megapool_validators.find_one( + {"megapool": megapool_addr, "validator_id": min_vid - 1}, + {"deposit_time": 1}, + ) + from_block = ( + await ts_to_block(prev["deposit_time"]) + if prev and prev.get("deposit_time") + else saturn_upgrade_block + ) + else: + from_block = saturn_upgrade_block + + events = await get_logs( + dp.events.FundsRequested, + from_block, + to_block, + arg_filters={"receiver": megapool_addr}, + ) + events_by_vid = {e["args"]["validatorId"]: e for e in events} + + ops = [] + for v in megapool_validators: + if not (event := events_by_vid.get(v["validator_id"])): + continue + ops.append( + UpdateOne( + {"_id": v["_id"]}, + {"$set": {"deposit_time": event["args"]["time"]}}, + ) + ) + if ops: + await self.bot.db.megapool_validators.bulk_write(ops, ordered=False) + @timerun_async async def update_dynamic_megapool_validator_data(self): validators = await self.bot.db.megapool_validators.find( From 0ebb6987719226d7aec7c5a917c41e294474383d Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sun, 15 Mar 2026 23:10:34 +0000 Subject: [PATCH 221/279] add block_identifier to multitcall --- rocketwatch/utils/rocketpool.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/rocketwatch/utils/rocketpool.py b/rocketwatch/utils/rocketpool.py index 4fbc00ad..1c689bfe 100644 --- a/rocketwatch/utils/rocketpool.py +++ b/rocketwatch/utils/rocketpool.py @@ -104,7 +104,7 @@ def _decode_fn_output(fn, data: bytes) -> Any: @staticmethod def _normalize_calls(calls, default_require_success): """Normalize calls to (fn, allow_failure) pairs. Each call may be a - plain ContractFunction or a (ContractFunction, require_success) tuple.""" + plain AsyncContractFunction or an (fn, require_success) tuple.""" fns, flags = [], [] for call in calls: if isinstance(call, tuple): @@ -115,14 +115,18 @@ def _normalize_calls(calls, default_require_success): flags.append(not req) return fns, flags - async def multicall(self, calls, require_success=True) -> list: - """Multicall accepting ContractFunction objects or (fn, require_success) tuples.""" + async def multicall( + self, calls, require_success=True, block: BlockIdentifier = "latest" + ) -> list: + """Multicall accepting AsyncContractFunction objects or (fn, require_success) tuples.""" fns, flags = self._normalize_calls(calls, require_success) encoded = [ (fn.address, af, fn._encode_transaction_data()) for fn, af in zip(fns, flags, strict=False) ] - results = await self._multicall.functions.aggregate3(encoded).call() + results = await self._multicall.functions.aggregate3(encoded).call( + block_identifier=block + ) return [ RocketPool._decode_fn_output(fns[i], data) if success else None for i, (success, data) in enumerate(results) From 994d3489dff3fa382a87aaeb442994ed8c52adf7 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sun, 15 Mar 2026 23:26:11 +0000 Subject: [PATCH 222/279] small optimizations to el_explorer_url --- rocketwatch/plugins/dao/dao.py | 2 +- rocketwatch/utils/embeds.py | 83 +++++++++++++++------------------- 2 files changed, 38 insertions(+), 47 deletions(-) diff --git a/rocketwatch/plugins/dao/dao.py b/rocketwatch/plugins/dao/dao.py index 0fee22b0..09de2c87 100644 --- a/rocketwatch/plugins/dao/dao.py +++ b/rocketwatch/plugins/dao/dao.py @@ -233,7 +233,7 @@ async def _load_content(self, from_idx: int, to_idx: int) -> tuple[int, str]: self._voter_list[from_idx : (to_idx + 1)], start=from_idx ): name = ( - (await el_explorer_url(voter.voter, prefix=-1)) + (await el_explorer_url(voter.voter, prefix=None)) .split("[")[1] .split("]")[0] ) diff --git a/rocketwatch/utils/embeds.py b/rocketwatch/utils/embeds.py index 6b87581b..78fb352e 100644 --- a/rocketwatch/utils/embeds.py +++ b/rocketwatch/utils/embeds.py @@ -3,7 +3,6 @@ import logging import math from collections.abc import Callable -from typing import Literal import aiohttp import discord @@ -11,6 +10,7 @@ from aiocache import cached from discord import Color from ens import InvalidName +from eth_typing import BlockIdentifier from etherscan_labels import Addresses from strings import _ @@ -97,10 +97,12 @@ async def get_pdao_delegates() -> dict[str, str]: async def el_explorer_url( target: str, name: str = "", - prefix: str | Literal[-1] = "", + prefix: str | None = "", name_fmt: Callable[[str], str] | None = None, - block="latest", + block: BlockIdentifier = "latest", ): + _prefix = "" + if w3.is_address(target): # sanitize address target = w3.to_checksum_address(target) @@ -109,50 +111,41 @@ async def el_explorer_url( chain = cfg.rocketpool.chain dashboard_network = "" if (chain == "mainnet") else f"?network={chain}" + n_key = f"addresses.{target}" + if not name and (n := _(n_key)) != n_key: + name = n + if await rp.is_node(target): megapool_address = await rp.call( "rocketNodeManager.getMegapoolAddress", target ) if megapool_address != "0x0000000000000000000000000000000000000000": url = f"https://saturn-1.net/megapool/{megapool_address}{dashboard_network}" - - if await rp.is_megapool(target): + if await rp.call( + "rocketNodeManager.getSmoothingPoolRegistrationState", + target, + block=block, + ): + _prefix += ":cup_with_straw:" + if not name: + if member_id := await rp.call( + "rocketDAONodeTrusted.getMemberID", target, block=block + ): + _prefix += "🔮" + name = member_id + elif member_id := await rp.call( + "rocketDAOSecurity.getMemberID", target, block=block + ): + _prefix += "🔒" + name = member_id + elif delegate_name := (await get_pdao_delegates()).get(target): + _prefix += "🏛️" + name = delegate_name + elif await rp.is_megapool(target): url = f"https://saturn-1.net/megapool/{target}{dashboard_network}" - - if await rp.is_minipool(target): - pass # TODO add explorer url once supported - - n_key = f"addresses.{target}" - if not name and (n := _(n_key)) != n_key: - name = n - - if prefix != -1 and await rp.call( - "rocketNodeManager.getSmoothingPoolRegistrationState", target, block=block - ): - prefix += ":cup_with_straw:" - - if not name and ( - member_id := await rp.call( - "rocketDAONodeTrusted.getMemberID", target, block=block - ) - ): - if prefix != -1: - prefix += "🔮" - name = member_id - - if not name and ( - member_id := await rp.call( - "rocketDAOSecurity.getMemberID", target, block=block - ) - ): - if prefix != -1: - prefix += "🔒" - name = member_id - - if not name and (delegate_name := (await get_pdao_delegates()).get(target)): - if prefix != -1: - prefix += "🏛️" - name = delegate_name + elif await rp.is_minipool(target): + if chain == "mainnet": + url = f"https://rocketexplorer.net/validator/{target}" if not name and cfg.rocketpool.chain != "mainnet": name = s_hex(target) @@ -172,12 +165,10 @@ async def el_explorer_url( ): name = a.name if not name: - # not an odao member, try to get their ens name = await ens.get_name(target) if code := await w3.eth.get_code(target): - if prefix != -1: - prefix += "📄" + _prefix += "📄" if (not name) and ( w3.keccak(text=code.hex()).hex() in cfg.other.mev_hashes ): @@ -233,8 +224,8 @@ async def el_explorer_url( name = s_hex(target) if name_fmt: name = name_fmt(name) - if prefix == -1: - prefix = "" + + prefix = "" if (prefix is None) else prefix + _prefix return f"{prefix}[{name}]({url})" @@ -284,7 +275,7 @@ async def prepare_args(args): elif arg_key == "cow_uid": args[arg_key] = f"[ORDER](https://explorer.cow.fi/orders/{arg_value})" else: - args[arg_key] = await el_explorer_url(arg_value, prefix=prefix) + args[arg_key] = await el_explorer_url(arg_value, _prefix=prefix) args[f"{arg_key}_clean"] = await el_explorer_url(arg_value) if len(arg_value) == 66: args[f"{arg_key}_small"] = await el_explorer_url( From dd467fe81bc78de4f225c9c8b5851c0f364e6ff4 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sun, 15 Mar 2026 23:30:48 +0000 Subject: [PATCH 223/279] fix typo --- rocketwatch/utils/embeds.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rocketwatch/utils/embeds.py b/rocketwatch/utils/embeds.py index 78fb352e..be2ff2d8 100644 --- a/rocketwatch/utils/embeds.py +++ b/rocketwatch/utils/embeds.py @@ -275,7 +275,7 @@ async def prepare_args(args): elif arg_key == "cow_uid": args[arg_key] = f"[ORDER](https://explorer.cow.fi/orders/{arg_value})" else: - args[arg_key] = await el_explorer_url(arg_value, _prefix=prefix) + args[arg_key] = await el_explorer_url(arg_value, prefix=prefix) args[f"{arg_key}_clean"] = await el_explorer_url(arg_value) if len(arg_value) == 66: args[f"{arg_key}_small"] = await el_explorer_url( From c934d33baafa1f00eb61dab49cf527bb10a6c60b Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sun, 15 Mar 2026 23:38:03 +0000 Subject: [PATCH 224/279] remove collateral percentage from withdrawal info --- .../plugins/deposit_pool/deposit_pool.py | 39 ++++++++----------- 1 file changed, 16 insertions(+), 23 deletions(-) diff --git a/rocketwatch/plugins/deposit_pool/deposit_pool.py b/rocketwatch/plugins/deposit_pool/deposit_pool.py index 7f2719f6..a4e5557b 100644 --- a/rocketwatch/plugins/deposit_pool/deposit_pool.py +++ b/rocketwatch/plugins/deposit_pool/deposit_pool.py @@ -21,17 +21,15 @@ def __init__(self, bot: RocketWatch): @staticmethod async def get_deposit_pool_stats() -> Embed: + dp_contract = await rp.get_contract_by_name("rocketDepositPool") + dp_settings_contract = await rp.get_contract_by_name( + "rocketDAOProtocolSettingsDeposit" + ) balance_raw, max_size_raw, max_amount_raw = await rp.multicall( [ - ( - await rp.get_contract_by_name("rocketDepositPool") - ).functions.getBalance(), - ( - await rp.get_contract_by_name("rocketDAOProtocolSettingsDeposit") - ).functions.getMaximumDepositPoolSize(), - ( - await rp.get_contract_by_name("rocketDepositPool") - ).functions.getMaximumDepositAmount(), + dp_contract.functions.getBalance(), + dp_settings_contract.functions.getMaximumDepositPoolSize(), + dp_contract.functions.getMaximumDepositAmount(), ] ) @@ -94,6 +92,10 @@ async def get_deposit_pool_stats() -> Embed: @staticmethod async def get_contract_collateral_stats() -> Embed: + reth_contract = await rp.get_contract_by_name("rocketTokenRETH") + network_setting_contract = await rp.get_contract_by_name( + "rocketDAOProtocolSettingsNetwork" + ) ( exchange_rate, total_supply, @@ -101,18 +103,10 @@ async def get_contract_collateral_stats() -> Embed: target_rate_raw, ) = await rp.multicall( [ - ( - await rp.get_contract_by_name("rocketTokenRETH") - ).functions.getExchangeRate(), - ( - await rp.get_contract_by_name("rocketTokenRETH") - ).functions.totalSupply(), - ( - await rp.get_contract_by_name("rocketTokenRETH") - ).functions.getCollateralRate(), - ( - await rp.get_contract_by_name("rocketDAOProtocolSettingsNetwork") - ).functions.getTargetRethCollateralRate(), + reth_contract.functions.getExchangeRate(), + reth_contract.functions.totalSupply(), + reth_contract.functions.getCollateralRate(), + network_setting_contract.functions.getTargetRethCollateralRate(), ] ) @@ -131,9 +125,8 @@ async def get_contract_collateral_stats() -> Embed: else: collateral_target_perc = collateral_eth / collateral_target_eth description = ( - f"**{collateral_eth:,.2f} ETH** of liquidity in the rETH contract.\n" + f"**{collateral_eth:,.2f} ETH** of liquidity in the rETH contract\n" f"**{collateral_target_perc:.2%}** of the {collateral_target_eth:,.0f} ETH target" - f" ({collateral_rate:.2%}/{collateral_rate_target:.0%})." ) return Embed(title="rETH Extra Collateral", description=description) From 343f1d858554f93fe10fc9e0c2f2de43de0f3393 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Mon, 16 Mar 2026 01:43:18 +0000 Subject: [PATCH 225/279] new scam thread detection approach --- .../plugins/scam_detection/scam_detection.py | 58 +++----- tests/message_samples.json | 128 +----------------- tests/test_scam_detection.py | 87 +++++++----- 3 files changed, 74 insertions(+), 199 deletions(-) diff --git a/rocketwatch/plugins/scam_detection/scam_detection.py b/rocketwatch/plugins/scam_detection/scam_detection.py index c29dd366..d60e2acb 100644 --- a/rocketwatch/plugins/scam_detection/scam_detection.py +++ b/rocketwatch/plugins/scam_detection/scam_detection.py @@ -21,7 +21,6 @@ RawBulkMessageDeleteEvent, RawMessageDeleteEvent, RawThreadDeleteEvent, - RawThreadUpdateEvent, Reaction, Thread, User, @@ -139,6 +138,7 @@ def __init__(self, bot: RocketWatch): self._thread_report_lock = asyncio.Lock() self._user_report_lock = asyncio.Lock() self._message_react_cache = TTLCache(maxsize=1000, ttl=300) + self._thread_creation_messages: set[int] = set() self.markdown_link_pattern = re.compile( r"(?<=\[)([^/\] ]*).+?(?<=\(https?:\/\/)([^/\)]*)" ) @@ -228,7 +228,7 @@ async def _generate_message_report( log.info(f"Found existing report for message {message.id} in database") return None - warning = Embed(title="🚨 Possible Scam Detected") + warning = Embed(title="🚨 Likely Scam Detected") warning.color = self.Color.ALERT warning.description = f"**Reason**: {reason}\n" @@ -267,7 +267,7 @@ async def _generate_thread_report( log.info(f"Found existing report for thread {thread.id} in database") return None - warning = Embed(title="🚨 Possible Scam Detected") + warning = Embed(title="🚨 Likely Scam Detected") warning.color = self.Color.ALERT warning.description = f"**Reason**: {reason}\n" @@ -275,6 +275,7 @@ async def _generate_thread_report( warning.set_footer( text=( "There is no ticket system for support on this server.\n" + "Don't engage in conversation outside of the public #support channel.\n" "Ignore this thread and any invites or DMs you may receive." ) ) @@ -680,6 +681,7 @@ async def on_raw_bulk_message_delete( ) async def _on_message_delete(self, message_id: int) -> None: + await self._check_thread_starter_deleted(message_id) async with self._message_report_lock: db_filter = {"type": "message", "message_id": message_id, "removed": False} if not (report := await self.bot.db.scam_reports.find_one(db_filter)): @@ -697,6 +699,20 @@ async def _on_message_delete(self, message_id: int) -> None: db_filter, {"$set": {"warning_id": None, "removed": True}} ) + async def _check_thread_starter_deleted(self, message_id: int) -> None: + if message_id not in self._thread_creation_messages: + return + + self._thread_creation_messages.remove(message_id) + + try: + thread = await self.bot.get_or_fetch_channel(message_id) + except (errors.NotFound, errors.Forbidden): + return + + if isinstance(thread, Thread): + await self.report_thread(thread, "Attempt to hide thread from main channel") + @Cog.listener() async def on_member_ban(self, guild: Guild, user: User) -> None: async with ( @@ -763,39 +779,9 @@ async def report_thread(self, thread: Thread, reason: str) -> None: @Cog.listener() async def on_thread_create(self, thread: Thread) -> None: - if thread.guild.id != cfg.rocketpool.support.server_id: - log.warning(f"Ignoring thread creation in {thread.guild.id}") - return - - lower = thread.name.strip().lower() - scam_thread = ( - # Ticket emoji or "assistance" — always scam - any(kw in lower for kw in ("🎫", "🎟️", "assistance")) - # "ticket"/"tick" — no real ticket system - or "tick" in lower - # "support" — only in short names (long ones are legit discussions) - or ("support" in lower and len(thread.name.strip()) < 25) - # Dash-digits near end of name (scam: "user-0816"; skip: "RIP-1559: ...") - or ( - (m := re.search(r"(-|–|—)\d{3,}", thread.name)) # noqa: RUF001 - and ( - m.end() >= len(thread.name.strip()) - 2 - or len(thread.name.strip()) < 30 - ) - ) - # Exact suspicious names - or lower in (".", "!", "///") - ) - if scam_thread: - await self.report_thread(thread, "Illegitimate support thread") - return - - log.debug(f"Ignoring thread creation (id: {thread.id}, name: {thread.name})") - - @Cog.listener() - async def on_raw_thread_update(self, event: RawThreadUpdateEvent) -> None: - thread: Thread = await self.bot.get_or_fetch_channel(event.thread_id) - await self.on_thread_create(thread) + if thread.guild.id == cfg.rocketpool.support.server_id: + # system message and thread share the same ID + self._thread_creation_messages.add(thread.id) @Cog.listener() async def on_raw_thread_delete(self, event: RawThreadDeleteEvent) -> None: diff --git a/tests/message_samples.json b/tests/message_samples.json index 436da1c2..aa971c24 100644 --- a/tests/message_samples.json +++ b/tests/message_samples.json @@ -782,131 +782,5 @@ "content": "Hello @everyone \n\nANYONE WHO CAN GET ME A WALLLET THAT HAVE PLENTY TRANSACTIONS I WILL PAY HIM 3SOL AN EMPTY WALLLET THAT HAVE REACH 3 MONTHS OR MORE THAN THAT I WILL PAY ANY AMOUNT AND SOME DEAD TOKENS, I AM GOING TO BUY DM" } ] - }, - "threads": { - "safe": [ - "rETH Referral Program", - "Rectify", - "that s the fallback", - "Thread", - "this a totally normal message", - "Yearn Strategy", - "Fixing Inflation Split", - "NodeSet Review of RPL Tokenomics", - "Hey guys can anyone help me understand", - "Tokenomics - Surplus Revenue", - "Tokenomics RPIPs (overall)", - "letter from the pDAO", - "Commission System", - "Ryedawg", - "NUC post unplug recovery", - "Protocols", - "ETH2 client testing", - "Hey all do you recommend I use `service", - "cant stop me from making threads", - "stuck transaction - command confirmation pls", - "goerliETH", - "Augustus", - "ETH Denver 2025", - "I m having some major frustrations with", - "alternative insurance ideas", - "Staging Pool (SP)", - "so your withdrawal address received it s", - "Self limiting early draft thread", - "rocketarb walkthrough", - "Rocket Pool voting eligibility", - "let s make a thread for this", - "Anyone know if the proposal related", - "dissolved and closed minipool", - "Yearn wstETH-rETH Integration", - "`sudo dmesg | grep i ext4`", - "Effective stake definition vote logistics", - "New thread for clarity I m trying to", - "Generalized Rewards Tree Distribution", - "Need some help Upgraded late a couple", - "Message me privately", - "lido risk bond chat", - "Tokenomics - UARS", - "NO Growth", - "Deposit", - "Can't SS since this machine is separate", - "IMC selection vote text", - "Rocket Split - Vote Text", - "testnet ETH", - "DevConnect Istanbul 2023", - "my `rp` alias is set to `rocketpool d", - "Creating a New Minipool (Validator) | Ro...", - "Allow bidding for block space in RPL", - "Hi If I try to recover my wallet should", - "Tokenomics - 1kx", - "ETHDenver 2024", - "Is there a way to look at past logs of", - "Hey all I just enabled the monitoring", - "ok now works and my ports are closed So", - "did you ever sort this error out? Im getting the same thing now", - "Error Grabbing Logs - Invalid Character \\x00", - "After updating to 1 9 4 I get this error", - "withdrawal error", - "RIP-1559: Burn RPL for higher priority in minipool queue", - "Get the wallets to support presigning", - "Error message: error: Failed to get remote head and new block ranges: EndpointError(FarBehind)", - "Error after updating smartnode stack", - "Smartnode Support for Allnodes Users", - "RP native mode error 127", - "Gas estimation error on deposit", - "Rpc error", - "Connection error", - "Team-supported troll thread to troll the other troll thread", - "```ERROR 09 14|200728 162 Dangling trie", - "Could Not Estimate Gas Limit Error", - "Error 126 running `node status`" - ], - "unsafe": [ - "circuitbuster.-0816", - "🎫 | Support ticket -7373", - "Support Ticket", - "🎫support-ticket #0168", - "Tick-0815", - "support-ticket #0733", - "ticket-0293", - "support", - "Ticket-0373", - "ticket-0202", - "Brzzrkr-0816", - "Tickets - 30", - "Ticket", - "///", - ".", - "Tick-819", - "Tick-0263", - "!", - "Spartacus-0816", - "ticket-0203", - "🎫 Help Request", - "Xebulon-0916", - "🎫 | support ticket -5556", - "jesseda-0816", - "Rell-0815", - "Tickets-0623", - "Ajix-0826", - "support-ticket-001", - "Tick-0175", - "Tick-0236", - "🎫 | support ticket -6363", - "FredTheNoob-0815", - "Rumseth-0816", - "ticket-12345", - "#🎬SUPPORT TICKET 🎫 277", - "Tickt 0364", - "error-5678", - "Support Ticket 🎫", - "Support—342" - ], - "known_false_positives": [ - "rocketpool.support", - "Reduce Express ticket RPIP", - "EIP-7002 - Fee Structure" - ], - "known_false_negatives": [] } -} \ No newline at end of file +} diff --git a/tests/test_scam_detection.py b/tests/test_scam_detection.py index bdb8eff3..2cb9619d 100644 --- a/tests/test_scam_detection.py +++ b/tests/test_scam_detection.py @@ -1,9 +1,9 @@ import json from pathlib import Path -from unittest.mock import MagicMock, patch +from unittest.mock import AsyncMock, MagicMock, patch import pytest -import regex as re +from discord import Thread from utils.config import Config, cfg @@ -111,23 +111,6 @@ def _case_id(case): return case["content"][:100] -THREAD_PATTERN = re.compile(r"(-|\u2013|\u2014)\d{3,}") - - -def _check_thread(name: str) -> bool: - lower = name.strip().lower() - return ( - any(kw in lower for kw in ("\U0001f3ab", "\U0001f39f\ufe0f", "assistance")) - or "tick" in lower - or ("support" in lower and len(name.strip()) < 25) - or ( - bool(m := THREAD_PATTERN.search(name)) - and (m.end() >= len(name.strip()) - 2 or len(name.strip()) < 30) - ) - or lower in (".", "!", "///") - ) - - class TestMessageDetection: @pytest.mark.parametrize("case", TEST_CASES["messages"]["unsafe"], ids=_case_id) def test_unsafe_message_detected(self, detector, case): @@ -156,21 +139,53 @@ def test_known_false_negative(self, detector, case): assert reasons, f"Scam not detected: {case['content'][:100]!r}" -class TestThreadDetection: - @pytest.mark.parametrize("name", TEST_CASES["threads"]["unsafe"]) - def test_unsafe_thread_detected(self, name): - assert _check_thread(name), f"Unsafe thread name not detected: {name!r}" - - @pytest.mark.parametrize("name", TEST_CASES["threads"]["safe"]) - def test_safe_thread_not_flagged(self, name): - assert not _check_thread(name), f"Safe thread name falsely flagged: {name!r}" - - @pytest.mark.parametrize("name", TEST_CASES["threads"]["known_false_positives"]) - @pytest.mark.xfail(reason="known false positive", strict=True) - def test_known_false_positive(self, name): - assert not _check_thread(name), f"Falsely flagged: {name!r}" +class TestThreadStarterDeleted: + @pytest.fixture() + def detector(self): + return _make_detector() + + def _make_thread(self, thread_id, owner_id, guild_id): + thread = MagicMock(spec=Thread) + thread.id = thread_id + thread.owner_id = owner_id + thread.guild.id = guild_id + thread.guild.get_member.return_value = MagicMock( + bot=False, + guild_permissions=MagicMock(moderate_members=False), + roles=[], + id=owner_id, + ) + return thread + + @pytest.mark.asyncio + async def test_on_thread_create_tracks_thread(self, detector): + thread = self._make_thread(123, 999, cfg.rocketpool.support.server_id) + await detector.on_thread_create(thread) + assert 123 in detector._thread_creation_messages + + @pytest.mark.asyncio + async def test_on_thread_create_ignores_other_guilds(self, detector): + thread = self._make_thread(123, 999, 0) + await detector.on_thread_create(thread) + assert 123 not in detector._thread_creation_messages + + @pytest.mark.asyncio + async def test_starter_deleted_reports_thread(self, detector): + thread_id = 123 + thread = self._make_thread(thread_id, 999, cfg.rocketpool.support.server_id) + detector._thread_creation_messages.add(thread_id) + detector.bot.get_or_fetch_channel = AsyncMock(return_value=thread) + detector.report_thread = AsyncMock() + + await detector._check_thread_starter_deleted(thread_id) + + detector.report_thread.assert_awaited_once_with( + thread, "Attempt to hide thread from main channel" + ) + assert thread_id not in detector._thread_creation_messages - @pytest.mark.parametrize("name", TEST_CASES["threads"]["known_false_negatives"]) - @pytest.mark.xfail(reason="known false negative", strict=True) - def test_known_false_negative(self, name): - assert _check_thread(name), f"Scam thread not detected: {name!r}" + @pytest.mark.asyncio + async def test_starter_deleted_ignores_untracked(self, detector): + detector.report_thread = AsyncMock() + await detector._check_thread_starter_deleted(456) + detector.report_thread.assert_not_awaited() From b697375616a03cc7b5d6864301fe803fd1205f93 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Mon, 16 Mar 2026 02:06:36 +0000 Subject: [PATCH 226/279] new scam test cases --- .../plugins/scam_detection/scam_detection.py | 55 ++++++++++++++----- tests/message_samples.json | 27 +++++++++ 2 files changed, 68 insertions(+), 14 deletions(-) diff --git a/rocketwatch/plugins/scam_detection/scam_detection.py b/rocketwatch/plugins/scam_detection/scam_detection.py index d60e2acb..425ffc9d 100644 --- a/rocketwatch/plugins/scam_detection/scam_detection.py +++ b/rocketwatch/plugins/scam_detection/scam_detection.py @@ -1,6 +1,7 @@ import asyncio import contextlib import io +import json import logging from datetime import UTC, datetime, timedelta from urllib import parse @@ -191,25 +192,20 @@ async def cog_unload(self) -> None: ) @staticmethod - def _get_message_content( - message: Message, *, preserve_formatting: bool = False - ) -> str: + def _get_message_content(message: Message) -> str: text = "" if message.content: content = message.content - if not preserve_formatting: - content = content.replace("\n> ", "") - content = content.replace("\n", "") + content = content.replace("\n> ", "") + content = content.replace("\n", "") text += content + "\n" if message.embeds: for embed in message.embeds: text += f"---\n Embed: {embed.title}\n{embed.description}\n---\n" - if not preserve_formatting: - text = parse.unquote(text) - text = anyascii(text) - text = text.lower() - + text = parse.unquote(text) + text = anyascii(text) + text = text.lower() return text async def _generate_message_report( @@ -247,9 +243,27 @@ async def _generate_message_report( "Please review and take appropriate action." ) - text = self._get_message_content(message, preserve_formatting=True) - with io.StringIO(text) as f: - attachment = File(f, filename="original_message.txt") + message_structure = json.dumps( + { + "content": message.content, + **( + { + "embeds": [ + { + "title": e.title, + "description": e.description, + } + for e in message.embeds + ] + } + if message.embeds + else {} + ), + }, + indent=2, + ) + with io.StringIO(message_structure) as f: + attachment = File(f, filename="message.json") return warning, report, attachment @@ -450,6 +464,7 @@ def _ticket_system(self, message: Message) -> str | None: ":tickets:", "m0d", "tlcket", + "relate your issue", ), [("relay"), ("query", "question", "inquiry")], [("instant", "live"), "chat"], @@ -470,6 +485,18 @@ def _ticket_system(self, message: Message) -> str | None: content_only_txt = content_txt.split("---")[0] # strip embed text if len(content_only_txt) > 500: return None + + ticket_keywords = [ + ("support", "open", "create", "raise", "raisse"), + "ticket", + ] + # For short messages, also check full text (including embeds) for ticket keywords. + # Scammers use embeds (via X posts, Discord invites) to carry ticket/support language. + # Only use the ticket pattern here; the contact+admin pattern is too broad for embed text + # (e.g. "administration" in news articles matches "admin"). + if len(content_only_txt) <= 200 and self.__txt_contains(txt, ticket_keywords): + return default_reason + trusted_url_domains = ( "youtu.be", "youtube.com", diff --git a/tests/message_samples.json b/tests/message_samples.json index aa971c24..7df6f4ec 100644 --- a/tests/message_samples.json +++ b/tests/message_samples.json @@ -762,6 +762,33 @@ }, { "content": "**Please share your questions/issues here ⬇️\n \n[ ]**" + }, + { + "content": "[`Use The Url For Support`]👇🏻Here\nhttps://x.com/cheong_lak92763/status/2014614660222222714\\", + "embeds": [ + { + "title": null, + "description": "CREATE A TICKET 👉 https://t.co/CNCQkoT7fC" + } + ] + }, + { + "content": "https://x.com/celiacristina4/status/2018097760872829017", + "embeds": [ + { + "title": null, + "description": "Relate your issues here :https://t.co/TKcJYsG86G" + } + ] + }, + { + "content": "Use the URL below\nhttps://da.gd/supprts so team can assist", + "embeds": [ + { + "title": "Join the SUPPORT TICKET Discord Server!", + "description": "Need help? Open a ticket — an admin or mod will take it from there. | 3235 members" + } + ] } ], "known_false_positives": [], From 9d8ab1e23c6f43c8ac00348a567139b593eac810 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Mon, 16 Mar 2026 17:47:57 +0000 Subject: [PATCH 227/279] strip URL before detection --- rocketwatch/plugins/scam_detection/scam_detection.py | 7 ++++--- tests/message_samples.json | 9 +++++++++ 2 files changed, 13 insertions(+), 3 deletions(-) diff --git a/rocketwatch/plugins/scam_detection/scam_detection.py b/rocketwatch/plugins/scam_detection/scam_detection.py index 425ffc9d..26820533 100644 --- a/rocketwatch/plugins/scam_detection/scam_detection.py +++ b/rocketwatch/plugins/scam_detection/scam_detection.py @@ -233,7 +233,7 @@ async def _generate_message_report( text="This message will be deleted once the suspicious message is removed." ) - report.description += ( + report.description = warning.description + ( "\n" f"User ID: `{message.author.id}` ({message.author.mention})\n" f"Message ID: `{message.id}` ({message.jump_url})\n" @@ -294,7 +294,7 @@ async def _generate_thread_report( ) ) thread_owner = await self.bot.get_or_fetch_user(thread.owner_id) - report.description += ( + report.description = warning.description + ( "\n" f"Thread Name: `{thread.name}`\n" f"User ID: `{thread_owner.id}` ({thread_owner.mention})\n" @@ -470,7 +470,8 @@ def _ticket_system(self, message: Message) -> str | None: [("instant", "live"), "chat"], [("submit"), ("question", "issue", "query")], ) - if self.__txt_contains(txt, strong_keywords): + txt_no_urls = re.sub(r"https?://\S+", "", txt) + if self.__txt_contains(txt_no_urls, strong_keywords): return default_reason # Short directive messages with a URL ("ask here", "get help here") diff --git a/tests/message_samples.json b/tests/message_samples.json index 7df6f4ec..8a21f62c 100644 --- a/tests/message_samples.json +++ b/tests/message_samples.json @@ -472,6 +472,15 @@ }, { "content": "so, there's a bunch of tibbir stuff happening rn it seems. crossmint and phala are active on their githubs around something called aac - agentic autonomous companies. there's this url that people have been looking at ribbit-aac.com but it seems like there's nothing there rn. there's a vercel link, but it's private - http://ribbit-aac-git-main-ribbita-projects.vercel.app. i'm missing some tweets because twitter messaging sucks so badly. and manu at crossmint tweeting about aac https://x.com/manuwritescode/status/2021104322277249209? \n\nnew article tease from altbro - to be released this week https://x.com/altcoinist/status/2021209833743978940?" + }, + { + "content": "https://fixvx.com/insiderwn/status/2032110843635089792?s=46&t=ZHOm0DA9s3h3Zztwyje1XQ", + "embeds": [ + { + "title": null, + "description": "#BREAKING: Iranian supreme leader confirmed in a coma, and had his leg amputated." + } + ] } ], "unsafe": [ From dabc714206f671c0db9aa3d225f971e44984169c Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Mon, 16 Mar 2026 18:12:16 +0000 Subject: [PATCH 228/279] refactor: fee_distribution --- .../fee_distribution/fee_distribution.py | 93 +++++++++++-------- 1 file changed, 52 insertions(+), 41 deletions(-) diff --git a/rocketwatch/plugins/fee_distribution/fee_distribution.py b/rocketwatch/plugins/fee_distribution/fee_distribution.py index 562be60d..42ae9851 100644 --- a/rocketwatch/plugins/fee_distribution/fee_distribution.py +++ b/rocketwatch/plugins/fee_distribution/fee_distribution.py @@ -6,6 +6,7 @@ from discord.app_commands import command from discord.ext import commands from matplotlib import pyplot as plt +from matplotlib.figure import Figure from rocketwatch import RocketWatch from utils.embeds import Embed @@ -19,59 +20,54 @@ class FeeDistribution(commands.Cog): def __init__(self, bot: RocketWatch): self.bot = bot - @command() - async def fee_distribution( - self, interaction: Interaction, mode: Literal["tree", "pie"] - ): - """ - Show the distribution of minipool commission percentages. - """ - await interaction.response.defer(ephemeral=is_hidden(interaction)) - - e = Embed() - e.title = "Minipool Fee Distribution" - + async def _get_minipools(self, bond: int) -> list[dict]: + result = await self.bot.db.minipools.aggregate( + [ + { + "$match": { + "node_deposit_balance": bond, + "beacon.status": "active_ongoing", + } + }, + { + "$group": { + "_id": {"$round": ["$node_fee", 2]}, + "count": {"$sum": 1}, + } + }, + {"$sort": {"_id": 1}}, + ] + ) + return await result.to_list() + + async def _get_tree(self) -> dict: tree = {} - fig, axs = plt.subplots(1, 2) - - for i, bond in enumerate([8, 16]): - result = await self.bot.db.minipools.aggregate( - [ - { - "$match": { - "node_deposit_balance": bond, - "beacon.status": "active_ongoing", - } - }, - { - "$group": { - "_id": {"$round": ["$node_fee", 2]}, - "count": {"$sum": 1}, - } - }, - {"$sort": {"_id": 1}}, - ] - ) + for bond in (8, 16): + subtree = {} + for entry in await self._get_minipools(bond): + fee_percentage = entry["_id"] * 100 + subtree[f"{fee_percentage:.0f}%"] = entry["count"] + tree[f"{bond} ETH"] = subtree + return tree + async def _get_pie(self) -> Figure: + fig, axs = plt.subplots(1, 2) + for i, bond in enumerate((8, 16)): labels = [] sizes = [] - subtree = {} - for entry in await result.to_list(): + for entry in await self._get_minipools(bond): fee_percentage = entry["_id"] * 100 labels.append(f"{fee_percentage:.0f}%") sizes.append(entry["count"]) - subtree[labels[-1]] = sizes[-1] - ax = axs[i] total = sum(sizes) - tree[f"{bond} ETH"] = subtree - # avoid overlapping labels for small slices - for i in range(len(sizes)): - if sizes[i] < 0.05 * total: - labels[i] = "" + for j in range(len(sizes)): + if sizes[j] < 0.05 * total: + labels[j] = "" + ax = axs[i] ax.set_title(f"{bond} ETH") ax.pie( sizes, @@ -80,12 +76,27 @@ async def fee_distribution( f"{p * _total / 100:.0f}" if (p >= 5) else "" ), ) + return fig + + @command() + async def fee_distribution( + self, interaction: Interaction, mode: Literal["tree", "pie"] = "pie" + ): + """ + Show the distribution of minipool commission percentages. + """ + await interaction.response.defer(ephemeral=is_hidden(interaction)) + + e = Embed() + e.title = "Minipool Fee Distribution" if mode == "tree": + tree = await self._get_tree() e.description = f"```\n{render_tree_legacy(tree, 'Minipools')}\n```" await interaction.followup.send(embed=e) elif mode == "pie": img = BytesIO() + fig = await self._get_pie() fig.tight_layout() fig.savefig(img, format="png") img.seek(0) From ddcb2951db3f01afd315a19f82d032cbd5f9c652 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Mon, 16 Mar 2026 20:23:05 +0000 Subject: [PATCH 229/279] consolidate retry --- .../plugins/beacon_events/beacon_events.py | 4 +-- rocketwatch/plugins/forum/forum.py | 8 ++--- rocketwatch/plugins/rewards/rewards.py | 4 +-- rocketwatch/plugins/rpips/rpips.py | 6 ++-- rocketwatch/plugins/snapshot/snapshot.py | 4 +-- rocketwatch/rocketwatch.py | 6 ++-- rocketwatch/utils/embeds.py | 4 +-- rocketwatch/utils/liquidity.py | 4 +-- rocketwatch/utils/retry.py | 36 ++++++------------- 9 files changed, 30 insertions(+), 46 deletions(-) diff --git a/rocketwatch/plugins/beacon_events/beacon_events.py b/rocketwatch/plugins/beacon_events/beacon_events.py index 09b3b00d..64ca32ac 100644 --- a/rocketwatch/plugins/beacon_events/beacon_events.py +++ b/rocketwatch/plugins/beacon_events/beacon_events.py @@ -13,7 +13,7 @@ from utils.embeds import assemble, prepare_args from utils.event import Event, EventPlugin from utils.readable import cl_explorer_url -from utils.retry import retry_async +from utils.retry import retry from utils.rocketpool import rp from utils.shared_w3 import bacon, w3 from utils.solidity import beacon_block_to_date, date_to_beacon_block @@ -144,7 +144,7 @@ async def _get_slashings(self, beacon_block: dict) -> list[Event]: return events - @retry_async(tries=5, delay=10, backoff=2, max_delay=30) + @retry(tries=5, delay=10, backoff=2, max_delay=30) async def _get_proposal(self, beacon_block: dict) -> Event | None: if not (payload := beacon_block["body"].get("execution_payload")): # no proposed block diff --git a/rocketwatch/plugins/forum/forum.py b/rocketwatch/plugins/forum/forum.py index 7b7a903a..8901877d 100644 --- a/rocketwatch/plugins/forum/forum.py +++ b/rocketwatch/plugins/forum/forum.py @@ -10,7 +10,7 @@ from rocketwatch import RocketWatch from utils.embeds import Embed -from utils.retry import retry_async +from utils.retry import retry from utils.visibility import is_hidden log = logging.getLogger("rocketwatch.forum") @@ -81,7 +81,7 @@ def datetime_to_epoch(_dt: str) -> int: return topics @staticmethod - @retry_async(tries=3, delay=2, backoff=2) + @retry(tries=3, delay=2, backoff=2) async def get_popular_topics(period: Period) -> list[Topic]: async with aiohttp.ClientSession() as session: response = await session.get(f"{Forum.DOMAIN}/top.json?period={period}") @@ -90,7 +90,7 @@ async def get_popular_topics(period: Period) -> list[Topic]: return Forum._parse_topics(data["topic_list"]["topics"]) @staticmethod - @retry_async(tries=3, delay=2, backoff=2) + @retry(tries=3, delay=2, backoff=2) async def get_recent_topics() -> list[Topic]: async with aiohttp.ClientSession() as session: response = await session.get(f"{Forum.DOMAIN}/latest.json") @@ -99,7 +99,7 @@ async def get_recent_topics() -> list[Topic]: return Forum._parse_topics(data["topic_list"]["topics"]) @staticmethod - @retry_async(tries=3, delay=2, backoff=2) + @retry(tries=3, delay=2, backoff=2) async def get_top_users(period: Period, order_by: UserMetric) -> list[User]: async with aiohttp.ClientSession() as session: response = await session.get( diff --git a/rocketwatch/plugins/rewards/rewards.py b/rocketwatch/plugins/rewards/rewards.py index 446e098b..0ab1659a 100644 --- a/rocketwatch/plugins/rewards/rewards.py +++ b/rocketwatch/plugins/rewards/rewards.py @@ -13,7 +13,7 @@ from utils import solidity from utils.block_time import ts_to_block from utils.embeds import Embed, resolve_ens -from utils.retry import retry_async +from utils.retry import retry from utils.rocketpool import rp log = logging.getLogger("rocketwatch.rewards") @@ -35,7 +35,7 @@ class RewardEstimate: eth_rewards: float system_weight: float - @retry_async(tries=3, delay=1) + @retry(tries=3, delay=1) async def _make_request(self, address) -> dict: async with aiohttp.ClientSession() as session: response = await session.get(f"https://sprocketpool.net/api/node/{address}") diff --git a/rocketwatch/plugins/rpips/rpips.py b/rocketwatch/plugins/rpips/rpips.py index 0ec29dea..290277df 100644 --- a/rocketwatch/plugins/rpips/rpips.py +++ b/rocketwatch/plugins/rpips/rpips.py @@ -9,7 +9,7 @@ from rocketwatch import RocketWatch from utils.embeds import Embed -from utils.retry import retry_async +from utils.retry import retry log = logging.getLogger("rocketwatch.rpips") @@ -65,7 +65,7 @@ def __str__(self) -> str: return self.full_title @cached(ttl=300, key_builder=lambda _, rpip: rpip.number) - @retry_async(tries=3, delay=1) + @retry(tries=3, delay=1) async def fetch_details(self) -> dict: async with ( aiohttp.ClientSession() as session, @@ -115,7 +115,7 @@ async def _get_rpip_names( @staticmethod @cached(ttl=60) - @retry_async(tries=3, delay=1) + @retry(tries=3, delay=1) async def get_all_rpips() -> list["RPIPs.RPIP"]: async with ( aiohttp.ClientSession() as session, diff --git a/rocketwatch/plugins/snapshot/snapshot.py b/rocketwatch/plugins/snapshot/snapshot.py index e6897956..032e07da 100644 --- a/rocketwatch/plugins/snapshot/snapshot.py +++ b/rocketwatch/plugins/snapshot/snapshot.py @@ -20,7 +20,7 @@ from utils.event import Event, EventPlugin from utils.image import Color, FontVariant, Image, ImageCanvas from utils.readable import pretty_time -from utils.retry import retry_async +from utils.retry import retry from utils.rocketpool import rp from utils.visibility import is_hidden @@ -34,7 +34,7 @@ def __init__(self, bot: RocketWatch): self.vote_db = bot.db.snapshot_votes @staticmethod - @retry_async(tries=3, delay=1) + @retry(tries=3, delay=1) async def _query_api(query: Query) -> list[dict] | dict | None: query_json = {"query": Operation(type="query", queries=[query]).render()} log.debug(f"Snapshot query: {query_json}") diff --git a/rocketwatch/rocketwatch.py b/rocketwatch/rocketwatch.py index b8305f0f..e927503e 100644 --- a/rocketwatch/rocketwatch.py +++ b/rocketwatch/rocketwatch.py @@ -17,7 +17,7 @@ from utils.command_tree import RWCommandTree from utils.config import cfg -from utils.retry import retry_async +from utils.retry import retry from utils.rocketpool import rp log = logging.getLogger("rocketwatch.bot") @@ -143,8 +143,6 @@ async def report_error( try: channel = await self.get_or_fetch_channel(cfg.discord.channels["errors"]) file = File(io.StringIO(err_trace), "exception.txt") - await retry_async(tries=5, delay=5)(channel.send)( - err_description, file=file - ) + await retry(tries=5, delay=5)(channel.send)(err_description, file=file) except Exception: log.exception("Failed to send message. Max retries reached.") diff --git a/rocketwatch/utils/embeds.py b/rocketwatch/utils/embeds.py index be2ff2d8..a111cffc 100644 --- a/rocketwatch/utils/embeds.py +++ b/rocketwatch/utils/embeds.py @@ -19,7 +19,7 @@ from utils.cached_ens import CachedEns from utils.config import cfg from utils.readable import advanced_tnx_url, cl_explorer_url, s_hex -from utils.retry import retry_async +from utils.retry import retry from utils.rocketpool import rp from utils.sea_creatures import get_sea_creature_for_address from utils.shared_w3 import w3 @@ -80,7 +80,7 @@ async def resolve_ens(interaction, node_address): @cached(ttl=900) -@retry_async(tries=3, delay=1) +@retry(tries=3, delay=1) async def get_pdao_delegates() -> dict[str, str]: global _pdao_delegates try: diff --git a/rocketwatch/utils/liquidity.py b/rocketwatch/utils/liquidity.py index 00b8b279..f5b055a3 100644 --- a/rocketwatch/utils/liquidity.py +++ b/rocketwatch/utils/liquidity.py @@ -9,7 +9,7 @@ import numpy as np from eth_typing import ChecksumAddress, HexStr -from utils.retry import retry_async +from utils.retry import retry from utils.rocketpool import rp from utils.shared_w3 import w3 @@ -70,7 +70,7 @@ def _get_asks(self, api_response: dict) -> dict[float, float]: """Extract mapping of price to major-denominated ask liquidity from API response""" pass - @retry_async(tries=3, delay=1) + @retry(tries=3, delay=1) async def _get_order_book( self, market: Market, session: aiohttp.ClientSession ) -> tuple[dict[float, float], dict[float, float]]: diff --git a/rocketwatch/utils/retry.py b/rocketwatch/utils/retry.py index 62f78941..0203918d 100644 --- a/rocketwatch/utils/retry.py +++ b/rocketwatch/utils/retry.py @@ -1,3 +1,4 @@ +import inspect from collections.abc import Callable from typing import Any @@ -13,29 +14,14 @@ def retry( max_delay: float | None = None, backoff: float = 1, ) -> Callable[..., Any]: - return __retry( - exceptions, - is_async=False, - tries=tries, - delay=delay, - max_delay=max_delay, - backoff=backoff, - ) + def decorator(func: Callable[..., Any]) -> Callable[..., Any]: + return __retry( + exceptions, + is_async=inspect.iscoroutinefunction(func), + tries=tries, + delay=delay, + max_delay=max_delay, + backoff=backoff, + )(func) - -def retry_async( - exceptions: EXCEPTIONS = Exception, - *, - tries: int = -1, - delay: float = 0, - max_delay: float | None = None, - backoff: float = 1, -) -> Callable[..., Any]: - return __retry( - exceptions, - is_async=True, - tries=tries, - delay=delay, - max_delay=max_delay, - backoff=backoff, - ) + return decorator From b1211985157ac14cc5c180063571743c39f7fbe0 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Thu, 19 Mar 2026 05:40:34 +0000 Subject: [PATCH 230/279] fix user distribution calldata --- rocketwatch/plugins/user_distribute/user_distribute.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/rocketwatch/plugins/user_distribute/user_distribute.py b/rocketwatch/plugins/user_distribute/user_distribute.py index 9bea3f4b..f3220390 100644 --- a/rocketwatch/plugins/user_distribute/user_distribute.py +++ b/rocketwatch/plugins/user_distribute/user_distribute.py @@ -39,8 +39,8 @@ async def instructions(self, interaction: Interaction, _) -> None: )[2:] ) - calls = [(mp["address"], True, dist_calldata) for mp in self.distributable] - calls += [(mp["address"], True, bud_calldata) for mp in self.eligible] + calls = [(mp["address"], False, dist_calldata) for mp in self.distributable] + calls += [(mp["address"], False, bud_calldata) for mp in self.eligible] multicall_contract = await rp.get_contract_by_name("multicall3") gas_used = await multicall_contract.functions.aggregate3(calls).estimate_gas() @@ -50,7 +50,7 @@ async def instructions(self, interaction: Interaction, _) -> None: tuple_strs = [] for address, allow_failure, calldata in calls: tuple_strs.append( - f'["{address}", {str(allow_failure).lower()}, 0x{calldata.hex()}]' + f'["{address}", {str(allow_failure).lower()}, "0x{calldata.hex()}"]' ) input_data = "[" + ",".join(tuple_strs) + "]" From f37f624ddc3c2e5178d1ec712a63d46946e71075 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Thu, 19 Mar 2026 12:10:18 +0000 Subject: [PATCH 231/279] add check for already distributed pools --- rocketwatch/plugins/user_distribute/user_distribute.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/rocketwatch/plugins/user_distribute/user_distribute.py b/rocketwatch/plugins/user_distribute/user_distribute.py index f3220390..cb7e60c1 100644 --- a/rocketwatch/plugins/user_distribute/user_distribute.py +++ b/rocketwatch/plugins/user_distribute/user_distribute.py @@ -163,8 +163,11 @@ async def _fetch_minipools(self) -> tuple[list[dict], list[dict], list[dict]]: mp["address"] = w3.to_checksum_address(mp["address"]) storage = await w3.eth.get_storage_at(mp["address"], 0x17) user_distribute_time: int = int.from_bytes(storage, "big") - elapsed_time = current_time - user_distribute_time + if user_distribute_time == 0: + continue # already distributed + + elapsed_time = current_time - user_distribute_time if elapsed_time >= ud_window_end: eligible.append(mp) elif elapsed_time < ud_window_start: From 18bc1283d725562b7ab51807157bb39e3e1ec8f6 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Thu, 19 Mar 2026 12:37:15 +0000 Subject: [PATCH 232/279] fix user distribute checks --- .../user_distribute/user_distribute.py | 25 +++++++++++-------- 1 file changed, 14 insertions(+), 11 deletions(-) diff --git a/rocketwatch/plugins/user_distribute/user_distribute.py b/rocketwatch/plugins/user_distribute/user_distribute.py index cb7e60c1..1eb87a81 100644 --- a/rocketwatch/plugins/user_distribute/user_distribute.py +++ b/rocketwatch/plugins/user_distribute/user_distribute.py @@ -163,22 +163,25 @@ async def _fetch_minipools(self) -> tuple[list[dict], list[dict], list[dict]]: mp["address"] = w3.to_checksum_address(mp["address"]) storage = await w3.eth.get_storage_at(mp["address"], 0x17) user_distribute_time: int = int.from_bytes(storage, "big") - - if user_distribute_time == 0: - continue # already distributed - elapsed_time = current_time - user_distribute_time - if elapsed_time >= ud_window_end: - eligible.append(mp) - elif elapsed_time < ud_window_start: + + if elapsed_time < ud_window_start: mp["ud_window_open"] = user_distribute_time + ud_window_start pending.append(mp) - # double check, DB may lag behind - elif not await rp.call( - "rocketMinipoolDelegate.getUserDistributed", address=mp["address"] - ): + elif elapsed_time < ud_window_end: mp["ud_window_close"] = user_distribute_time + ud_window_end distributable.append(mp) + else: + # double check, DB may lag behind + minipool_contract = await rp.assemble_contract( + "rocketMinipool", address=mp["address"] + ) + if await minipool_contract.functions.getUserDistributed().call(): + continue + if await minipool_contract.functions.getFinalised().call(): + continue + + eligible.append(mp) pending.sort(key=itemgetter("ud_window_open")) distributable.sort(key=itemgetter("ud_window_close")) From 99043876584968d0d26cf0fa6ea7d3120b27fb86 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 20 Mar 2026 00:14:55 +0000 Subject: [PATCH 233/279] tweak wording on user distribute notification --- rocketwatch/plugins/user_distribute/user_distribute.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/rocketwatch/plugins/user_distribute/user_distribute.py b/rocketwatch/plugins/user_distribute/user_distribute.py index 1eb87a81..4df52cdc 100644 --- a/rocketwatch/plugins/user_distribute/user_distribute.py +++ b/rocketwatch/plugins/user_distribute/user_distribute.py @@ -110,7 +110,7 @@ async def task(self): next_window_close = distributable[0]["ud_window_close"] embed.description = ( f"There {'are' if count != 1 else 'is'} **{count}**" - f" minipool{'s' if count != 1 else ''} eligible for distribution.\n" + f" minipool{'s' if count != 1 else ''} ready for distribution.\n" f"The next window closes !" ) @@ -234,7 +234,7 @@ async def user_distribute_status(self, interaction: Interaction): await interaction.followup.send( embed=embed, view=InstructionsView( - eligible[:50], distributable[:100], instruction_timeout=300 + eligible[:50], distributable[:100], instruction_timeout=1800 ), ) else: From 3af822966129360b1b4a307ba022014f60df0f79 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 20 Mar 2026 11:09:39 +0000 Subject: [PATCH 234/279] add test coverage --- .github/workflows/test.yml | 5 ++++- README.md | 3 +++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index c00138c0..d526c1ce 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -12,4 +12,7 @@ jobs: steps: - uses: actions/checkout@v6 - uses: astral-sh/setup-uv@v7 - - run: uv run --python 3.14 --extra test pytest + - run: uv run --python 3.14 --extra test pytest --cov=rocketwatch --cov-report=term-missing --cov-report=xml + - uses: codecov/codecov-action@v5 + with: + files: coverage.xml diff --git a/README.md b/README.md index 40c6dfec..e8c579f0 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,8 @@ # Rocket Watch +[![Test](https://github.com/haloooloolo/rocketwatch/actions/workflows/test.yml/badge.svg)](https://github.com/haloooloolo/rocketwatch/actions/workflows/test.yml) +[![codecov](https://codecov.io/gh/haloooloolo/rocketwatch/graph/badge.svg)](https://codecov.io/gh/haloooloolo/rocketwatch) + A Discord bot that monitors and reports on [Rocket Pool](https://rocketpool.net) protocol activity across the Ethereum execution and consensus layers. ## Features From 90e539915ce046a0ff6c9769575c2d66e4ddd68d Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 20 Mar 2026 11:10:46 +0000 Subject: [PATCH 235/279] first step for type checking --- .github/workflows/lint.yml | 8 ++ pyproject.toml | 36 +++++++ rocketwatch/__init__.pyi | 1 + rocketwatch/plugins/call/call.py | 6 +- rocketwatch/plugins/debug/debug.py | 8 +- rocketwatch/plugins/events/events.py | 5 +- rocketwatch/plugins/random/random.py | 10 +- rocketwatch/rocketwatch.py | 16 +-- rocketwatch/utils/block_time.py | 4 +- rocketwatch/utils/command_tree.py | 35 +++++-- rocketwatch/utils/dao.py | 16 ++- rocketwatch/utils/embeds.py | 3 +- rocketwatch/utils/event.py | 6 +- rocketwatch/utils/event_logs.py | 2 +- rocketwatch/utils/file.py | 7 ++ rocketwatch/utils/liquidity.py | 23 +++-- rocketwatch/utils/readable.py | 7 +- rocketwatch/utils/retry.py | 2 +- rocketwatch/utils/rocketpool.py | 149 +++++++++++++++++---------- rocketwatch/utils/status.py | 6 +- rocketwatch/utils/views.py | 2 +- rocketwatch/utils/visibility.py | 2 +- 22 files changed, 237 insertions(+), 117 deletions(-) create mode 100644 rocketwatch/__init__.pyi create mode 100644 rocketwatch/utils/file.py diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index eed06f94..6182775f 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -19,3 +19,11 @@ jobs: with: args: "format --check" src: "rocketwatch" + typecheck: + runs-on: ubuntu-latest + continue-on-error: true + steps: + - uses: actions/checkout@v6 + - uses: astral-sh/setup-uv@v6 + - run: uv sync --extra dev + - run: uv run mypy rocketwatch/ diff --git a/pyproject.toml b/pyproject.toml index 991c7bef..61c24083 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -43,12 +43,48 @@ dependencies = [ test = [ "pytest>=8.0", "pytest-asyncio>=1.0", + "pytest-cov>=6.0", +] +dev = [ + "mypy>=1.10", + "types-cachetools>=5.0", + "types-pytz>=2024.0", + "types-tabulate>=0.9", ] [tool.pytest.ini_options] asyncio_mode = "auto" testpaths = ["tests"] +[tool.mypy] +python_version = "3.12" +mypy_path = "." +explicit_package_bases = true +ignore_missing_imports = true +# Start lenient, tighten over time +check_untyped_defs = true +warn_return_any = false +warn_unused_ignores = true +# Disabled until the majority of code is annotated +disallow_untyped_defs = false +disallow_incomplete_defs = false + +# Heavy web3 MutableAttributeDict usage — almost all errors are false positives +[[tool.mypy.overrides]] +module = [ + "rocketwatch.plugins.events.*", + "rocketwatch.plugins.transactions.*", +] +disable_error_code = ["attr-defined", "union-attr", "index"] + +# Matplotlib/numpy stub gaps +[[tool.mypy.overrides]] +module = [ + "rocketwatch.plugins.apr.*", + "rocketwatch.plugins.proposals.*", +] +disable_error_code = ["attr-defined"] + [tool.ruff] target-version = "py312" diff --git a/rocketwatch/__init__.pyi b/rocketwatch/__init__.pyi new file mode 100644 index 00000000..38046386 --- /dev/null +++ b/rocketwatch/__init__.pyi @@ -0,0 +1 @@ +from rocketwatch.rocketwatch import RocketWatch as RocketWatch diff --git a/rocketwatch/plugins/call/call.py b/rocketwatch/plugins/call/call.py index 59ef433a..6095bee0 100644 --- a/rocketwatch/plugins/call/call.py +++ b/rocketwatch/plugins/call/call.py @@ -1,16 +1,16 @@ import contextlib -import io import json import logging import humanize -from discord import File, Interaction +from discord import Interaction from discord.app_commands import Choice, command, describe from discord.ext.commands import Cog from discord.ui import Modal, TextInput from rocketwatch import RocketWatch from utils import solidity +from utils.file import TextFile from utils.rocketpool import rp from utils.shared_w3 import w3 from utils.visibility import is_hidden_role_controlled @@ -177,7 +177,7 @@ async def _execute_call( if len(text + str(v)) > 2000: text += "too long, attached as file`" await interaction.followup.send( - text, file=File(io.StringIO(str(v)), "exception.txt") + text, file=TextFile(str(v), "exception.txt") ) else: text += f"{v!s}`" diff --git a/rocketwatch/plugins/debug/debug.py b/rocketwatch/plugins/debug/debug.py index e5d6088a..270398ad 100644 --- a/rocketwatch/plugins/debug/debug.py +++ b/rocketwatch/plugins/debug/debug.py @@ -1,16 +1,16 @@ -import io import logging import random import time from datetime import UTC -from discord import File, Interaction +from discord import Interaction from discord.app_commands import command, guilds from discord.ext.commands import Cog, is_owner from rocketwatch import RocketWatch from utils.config import cfg from utils.embeds import Embed +from utils.file import TextFile from utils.rocketpool import rp from utils.shared_w3 import w3 @@ -54,7 +54,7 @@ async def get_members_of_role( f"Members of {role.name} ({role.id}) in {guild.name} ({guild.id})\n\n" + "\n".join(members) ) - file = File(io.StringIO(content), "members.txt") + file = TextFile(content, "members.txt") await interaction.followup.send(file=file) except Exception as err: await interaction.followup.send(content=f"```{err!r}```") @@ -73,7 +73,7 @@ async def get_roles(self, interaction: Interaction, guild_id: str): roles = [f"{role.name}, ({role.id})" for role in guild.roles] # generate a file with a header that mentions what role and guild the members are from content = f"Roles of {guild.name} ({guild.id})\n\n" + "\n".join(roles) - file = File(io.StringIO(content), filename="roles.txt") + file = TextFile(content, "roles.txt") await interaction.followup.send(file=file) except Exception as err: await interaction.followup.send(content=f"```{err!r}```") diff --git a/rocketwatch/plugins/events/events.py b/rocketwatch/plugins/events/events.py index 6b76f9a4..3a70e8b5 100644 --- a/rocketwatch/plugins/events/events.py +++ b/rocketwatch/plugins/events/events.py @@ -10,6 +10,7 @@ from discord.ext.commands import is_owner from eth_typing.evm import BlockNumber, ChecksumAddress from hexbytes import HexBytes +from web3.constants import ADDRESS_ZERO from web3.datastructures import MutableAttributeDict as aDict from web3.logs import DISCARD from web3.types import EventData, LogReceipt @@ -932,7 +933,7 @@ def share_repr(percentage: float) -> str: args.upgradeProposalID, block=event.blockNumber, ) - if args.contractAddress == "0x0000000000000000000000000000000000000000": + if args.contractAddress == ADDRESS_ZERO: del args.contractAddress event_name = "upgrade_pending_abi_event" elif event_name == "sdao_upgrade_vetoed_event": @@ -974,7 +975,7 @@ def share_repr(percentage: float) -> str: megapool_address = await rp.call( "rocketNodeManager.getMegapoolAddress", args.node ) - if megapool_address != "0x0000000000000000000000000000000000000000": + if megapool_address != ADDRESS_ZERO: validator_count += await rp.call( "rocketMegapoolDelegate.getActiveValidatorCount", address=megapool_address, diff --git a/rocketwatch/plugins/random/random.py b/rocketwatch/plugins/random/random.py index 02d45963..ba07cbf7 100644 --- a/rocketwatch/plugins/random/random.py +++ b/rocketwatch/plugins/random/random.py @@ -1,4 +1,3 @@ -import io import logging from datetime import datetime @@ -6,7 +5,7 @@ import dice import humanize import pytz -from discord import File, Interaction +from discord import Interaction from discord.app_commands import Choice, command from discord.ext import commands @@ -15,6 +14,7 @@ from utils.block_time import block_to_ts, ts_to_block from utils.config import cfg from utils.embeds import Embed, el_explorer_url, ens +from utils.file import TextFile from utils.readable import prettify_json_string, pretty_time, s_hex from utils.rocketpool import rp from utils.sea_creatures import ( @@ -46,7 +46,7 @@ async def dice(self, interaction: Interaction, dice_string: str = "1d6"): e.title = f"🎲 {dice_string}" if len(str(result)) >= 2000: e.description = "Result too long to display, attaching as file." - file = File(io.StringIO(str(result)), filename="dice_result.txt") + file = TextFile(str(result), "dice_result.txt") await interaction.followup.send(embed=e, file=file) else: e.description = f"Result: `{result}`" @@ -377,9 +377,7 @@ async def get_abi_of_contract(self, interaction: Interaction, contract: str): ) try: abi = prettify_json_string(await rp.uncached_get_abi_by_name(contract)) - file = File( - io.StringIO(abi), f"{contract}.{cfg.rocketpool.chain.lower()}.abi.json" - ) + file = TextFile(abi, f"{contract}.{cfg.rocketpool.chain.lower()}.abi.json") await interaction.followup.send(file=file) except Exception as err: await interaction.followup.send(content=f"```Exception: {err!r}```") diff --git a/rocketwatch/rocketwatch.py b/rocketwatch/rocketwatch.py index e927503e..77b9fbac 100644 --- a/rocketwatch/rocketwatch.py +++ b/rocketwatch/rocketwatch.py @@ -1,22 +1,22 @@ -import io import logging import traceback from pathlib import Path from discord import ( - File, Guild, Intents, Interaction, Thread, User, ) -from discord.abc import GuildChannel, PrivateChannel +from discord.abc import GuildChannel, Messageable, PrivateChannel from discord.ext.commands import Bot from pymongo import AsyncMongoClient +from pymongo.asynchronous.database import AsyncDatabase from utils.command_tree import RWCommandTree from utils.config import cfg +from utils.file import TextFile from utils.retry import retry from utils.rocketpool import rp @@ -26,7 +26,7 @@ class RocketWatch(Bot): def __init__(self, intents: Intents) -> None: super().__init__(command_prefix=(), tree_cls=RWCommandTree, intents=intents) - self.db = AsyncMongoClient(cfg.mongodb.uri).rocketwatch + self.db: AsyncDatabase = AsyncMongoClient(cfg.mongodb.uri).rocketwatch async def _load_plugins(self): chain = cfg.rocketpool.chain @@ -84,6 +84,7 @@ def clear_commands(self) -> None: self.tree.clear_commands(guild=guild) async def on_ready(self): + assert self.user is not None log.info(f"Logged in as {self.user.name} ({self.user.id})") commands_enabled = cfg.modules.enable_commands if not commands_enabled: @@ -142,7 +143,10 @@ async def report_error( try: channel = await self.get_or_fetch_channel(cfg.discord.channels["errors"]) - file = File(io.StringIO(err_trace), "exception.txt") + file = TextFile(err_trace, "exception.txt") + assert isinstance(channel, Messageable), ( + f"Error channel {channel} is not messageable" + ) await retry(tries=5, delay=5)(channel.send)(err_description, file=file) except Exception: - log.exception("Failed to send message. Max retries reached.") + log.exception("Failed to send message.") diff --git a/rocketwatch/utils/block_time.py b/rocketwatch/utils/block_time.py index 84217647..5e9b7ccd 100644 --- a/rocketwatch/utils/block_time.py +++ b/rocketwatch/utils/block_time.py @@ -2,6 +2,7 @@ import math from aiocache import cached +from web3.types import BlockData from utils.shared_w3 import w3 @@ -10,7 +11,8 @@ @cached() async def block_to_ts(block_number: int) -> int: - return (await w3.eth.get_block(block_number)).timestamp + block: BlockData = await w3.eth.get_block(block_number) + return block.get("timestamp", 0) async def ts_to_block(target_ts: int) -> int: diff --git a/rocketwatch/utils/command_tree.py b/rocketwatch/utils/command_tree.py index 865b87b9..52a6fb2f 100644 --- a/rocketwatch/utils/command_tree.py +++ b/rocketwatch/utils/command_tree.py @@ -1,5 +1,6 @@ import logging from datetime import datetime +from typing import TYPE_CHECKING from discord import Interaction from discord.app_commands import AppCommandError, CommandTree @@ -14,19 +15,28 @@ from utils.config import cfg +if TYPE_CHECKING: + from rocketwatch.rocketwatch import RocketWatch + log = logging.getLogger("rocketwatch.command_tree") -class RWCommandTree(CommandTree): - async def _call(self, interaction: Interaction) -> None: +def _channel_name(interaction: Interaction) -> str: + return getattr(interaction.channel, "name", None) or "DM" + + +class RWCommandTree(CommandTree["RocketWatch"]): + async def _call(self, interaction: Interaction["RocketWatch"]) -> None: if not cfg.modules.enable_commands: return cmd_name = interaction.command.name if interaction.command else "unknown" timestamp = datetime.utcnow() + channel_name = _channel_name(interaction) + log.info( - f"/{cmd_name} triggered by {interaction.user} in #{interaction.channel.name} ({interaction.guild})" + f"/{cmd_name} triggered by {interaction.user} in #{channel_name} ({interaction.guild})" ) try: await self.client.db.command_metrics.insert_one( @@ -48,8 +58,10 @@ async def _call(self, interaction: Interaction) -> None: else None, "channel": { "id": interaction.channel.id, - "name": interaction.channel.name, - }, + "name": channel_name, + } + if interaction.channel + else None, "timestamp": timestamp, "status": "pending", } @@ -62,7 +74,7 @@ async def _call(self, interaction: Interaction) -> None: await super()._call(interaction) except Exception as error: log.info( - f"/{cmd_name} called by {interaction.user} in #{interaction.channel.name} ({interaction.guild}) failed" + f"/{cmd_name} called by {interaction.user} in #{channel_name} ({interaction.guild}) failed" ) try: await self.client.db.command_metrics.update_one( @@ -82,7 +94,7 @@ async def _call(self, interaction: Interaction) -> None: log.info( f"/{cmd_name} called by {interaction.user} in" - f" #{interaction.channel.name} ({interaction.guild}) completed successfully" + f" #{channel_name} ({interaction.guild}) completed successfully" ) try: await self.client.db.command_metrics.update_one( @@ -98,10 +110,13 @@ async def _call(self, interaction: Interaction) -> None: log.error(f"Failed to update command status to completed: {e}") await self.client.report_error(e) - async def on_error(self, interaction: Interaction, error: AppCommandError) -> None: + async def on_error( + self, interaction: Interaction["RocketWatch"], error: AppCommandError + ) -> None: cmd_name = interaction.command.name if interaction.command else "unknown" + channel_name = _channel_name(interaction) log.error( - f"/{cmd_name} called by {interaction.user} in #{interaction.channel.name} ({interaction.guild}) failed" + f"/{cmd_name} called by {interaction.user} in #{channel_name} ({interaction.guild}) failed" ) if isinstance(error, CommandOnCooldown): @@ -124,5 +139,3 @@ async def on_error(self, interaction: Interaction, error: AppCommandError) -> No await interaction.followup.send(content=msg, ephemeral=True) except Exception: log.exception("Failed to alert user") - - await self.client.on_app_command_error(interaction, error) diff --git a/rocketwatch/utils/dao.py b/rocketwatch/utils/dao.py index 40540d8f..61e3c490 100644 --- a/rocketwatch/utils/dao.py +++ b/rocketwatch/utils/dao.py @@ -145,7 +145,9 @@ async def get_proposal_ids_by_state(self) -> dict[ProposalState, list[int]]: ] ) - proposals = {state: [] for state in DefaultDAO.ProposalState} + proposals: dict[DefaultDAO.ProposalState, list[int]] = { + state: [] for state in DefaultDAO.ProposalState + } for proposal_id, state in zip( relevant_proposals, proposal_states, strict=False ): @@ -194,7 +196,8 @@ async def fetch_proposal(self, proposal_id: int) -> Proposal: votes_required=solidity.to_float(votes_required_raw), ) - def _build_vote_graph(self, proposal: Proposal) -> str: + def _build_vote_graph(self, proposal: DAO.Proposal) -> str: + assert isinstance(proposal, DefaultDAO.Proposal) votes_for = proposal.votes_for votes_against = proposal.votes_against votes_required = math.ceil(proposal.votes_required) @@ -267,7 +270,9 @@ async def get_proposal_ids_by_state(self) -> dict[ProposalState, list[int]]: ] ) - proposals = {state: [] for state in ProtocolDAO.ProposalState} + proposals: dict[ProtocolDAO.ProposalState, list[int]] = { + state: [] for state in ProtocolDAO.ProposalState + } for proposal_id in range(1, num_proposals + 1): state = proposal_states[proposal_id - 1] proposals[state].append(proposal_id) @@ -327,7 +332,8 @@ async def fetch_proposal(self, proposal_id: int) -> Proposal: veto_quorum=solidity.to_float(veto_quorum_raw), ) - def _build_vote_graph(self, proposal: Proposal) -> str: + def _build_vote_graph(self, proposal: DAO.Proposal) -> str: + assert isinstance(proposal, ProtocolDAO.Proposal) graph = tpl.figure() graph.barh( [ @@ -341,7 +347,7 @@ def _build_vote_graph(self, proposal: Proposal) -> str: ) main_quorum_perc = proposal.votes_total / proposal.quorum - lines = graph.get_string().split("\n")[:-1] + lines = str(graph.get_string()).split("\n")[:-1] lines.append( f"Quorum: {main_quorum_perc:.2%}{' ✔' if (main_quorum_perc >= 1) else ''}" ) diff --git a/rocketwatch/utils/embeds.py b/rocketwatch/utils/embeds.py index a111cffc..5124a6a7 100644 --- a/rocketwatch/utils/embeds.py +++ b/rocketwatch/utils/embeds.py @@ -12,6 +12,7 @@ from ens import InvalidName from eth_typing import BlockIdentifier from etherscan_labels import Addresses +from web3.constants import ADDRESS_ZERO from strings import _ from utils import solidity @@ -119,7 +120,7 @@ async def el_explorer_url( megapool_address = await rp.call( "rocketNodeManager.getMegapoolAddress", target ) - if megapool_address != "0x0000000000000000000000000000000000000000": + if megapool_address != ADDRESS_ZERO: url = f"https://saturn-1.net/megapool/{megapool_address}{dashboard_network}" if await rp.call( "rocketNodeManager.getSmoothingPoolRegistrationState", diff --git a/rocketwatch/utils/event.py b/rocketwatch/utils/event.py index cd1e94e6..16e1986d 100644 --- a/rocketwatch/utils/event.py +++ b/rocketwatch/utils/event.py @@ -1,11 +1,15 @@ +from __future__ import annotations + from abc import abstractmethod from dataclasses import dataclass from datetime import datetime, timedelta +from typing import TYPE_CHECKING from discord.ext import commands from eth_typing import BlockNumber -from rocketwatch import RocketWatch +if TYPE_CHECKING: + from rocketwatch.rocketwatch import RocketWatch from utils.config import cfg from utils.embeds import Embed from utils.image import Image diff --git a/rocketwatch/utils/event_logs.py b/rocketwatch/utils/event_logs.py index 77a8e083..57f4471b 100644 --- a/rocketwatch/utils/event_logs.py +++ b/rocketwatch/utils/event_logs.py @@ -29,7 +29,7 @@ async def get_logs( argument_filters=arg_filters, ) ) - chunk_start = chunk_end + 1 + chunk_start = BlockNumber(chunk_end + 1) results = await asyncio.gather(*tasks) return [log_entry for chunk in results for log_entry in chunk] diff --git a/rocketwatch/utils/file.py b/rocketwatch/utils/file.py new file mode 100644 index 00000000..67acc704 --- /dev/null +++ b/rocketwatch/utils/file.py @@ -0,0 +1,7 @@ +import io + +from discord import File + + +def TextFile(content: str, filename: str) -> File: + return File(io.BytesIO(content.encode()), filename) diff --git a/rocketwatch/utils/liquidity.py b/rocketwatch/utils/liquidity.py index f5b055a3..134278f8 100644 --- a/rocketwatch/utils/liquidity.py +++ b/rocketwatch/utils/liquidity.py @@ -2,7 +2,7 @@ import math from abc import ABC, abstractmethod from collections import OrderedDict -from collections.abc import Callable +from collections.abc import Callable, Sequence from dataclasses import dataclass import aiohttp @@ -696,7 +696,7 @@ async def get_normalized_price(self) -> float: async def get_liquidity(self) -> Liquidity | None: pass - def __init__(self, pools: list[LiquidityPool]): + def __init__(self, pools: Sequence[LiquidityPool]): self.pools = pools async def get_liquidity(self) -> dict[LiquidityPool, Liquidity]: @@ -771,7 +771,7 @@ class UniswapV3(DEX): MAX_TICK = 887_272 @staticmethod - def tick_to_price(tick: int) -> float: + def tick_to_price(tick: float) -> float: return 1.0001**tick @staticmethod @@ -845,7 +845,7 @@ async def get_initialized_ticks(self, current_tick: int) -> list[int]: return ticks def liquidity_to_tokens( - self, liquidity: int, tick_lower: int, tick_upper: int + self, liquidity: float, tick_lower: float, tick_upper: float ) -> tuple[float, float]: sqrtp_lower = math.sqrt(UniswapV3.tick_to_price(tick_lower)) sqrtp_upper = math.sqrt(UniswapV3.tick_to_price(tick_upper)) @@ -882,7 +882,7 @@ async def get_liquidity(self) -> Liquidity | None: log.debug(f"Found {len(ticks)} initialized ticks!") async def get_cumulative_liquidity(_ticks: list[int]) -> list[float]: - cumulative_liquidity = 0 + cumulative_liquidity: float = 0 last_tick = calculated_tick active_liquidity = initial_liquidity @@ -910,19 +910,20 @@ async def get_cumulative_liquidity(_ticks: list[int]) -> list[float]: return liquidity - ask_ticks = [t for t in reversed(ticks) if t <= current_tick] + [ + _ask_ticks = [t for t in reversed(ticks) if t <= current_tick] + [ UniswapV3.MIN_TICK ] - ask_liquidity = [0] + await get_cumulative_liquidity(ask_ticks) - ask_ticks.insert(0, calculated_tick) + ask_liquidity = [0.0] + await get_cumulative_liquidity(_ask_ticks) + ask_ticks: list[int | float] = [calculated_tick, *_ask_ticks] - bid_ticks = [t for t in ticks if t > current_tick] + [UniswapV3.MAX_TICK] - bid_liquidity = [0] + await get_cumulative_liquidity(bid_ticks) - bid_ticks.insert(0, calculated_tick) + _bid_ticks = [t for t in ticks if t > current_tick] + [UniswapV3.MAX_TICK] + bid_liquidity = [0.0] + await get_cumulative_liquidity(_bid_ticks) + bid_ticks: list[int | float] = [calculated_tick, *_bid_ticks] balance_norm = 10 ** (self.token_1.decimals - self.token_0.decimals) def depth_at(_price: float) -> float: + tick: float if _price <= 0: tick = UniswapV3.MAX_TICK else: diff --git a/rocketwatch/utils/readable.py b/rocketwatch/utils/readable.py index 9e86e1ed..09663c15 100644 --- a/rocketwatch/utils/readable.py +++ b/rocketwatch/utils/readable.py @@ -14,7 +14,7 @@ def prettify_json_string(data): return json.dumps(json.loads(data), indent=4) -def decode_abi(compressed_string): +def decode_abi(compressed_string: str) -> str: decompress = zlib.decompressobj(15) data = base64.b64decode(compressed_string) inflated = decompress.decompress(data) @@ -168,15 +168,14 @@ def render_tree(data: dict, name: str, max_depth: int = 0) -> str: strict=False, ), ) - max_right_len, max_left_len = [], [] # longest string offset per depth - max_left_len = max( + max_left_len: int = max( max(len(s) for s, d in zip(lines, depths, strict=False) if d == depth) for depth in set(depths) ) # same for right - max_right_len = max( + max_right_len: int = max( max(len(str(v)) for v, d in zip(values, depths, strict=False) if d == depth) for depth in set(depths) ) diff --git a/rocketwatch/utils/retry.py b/rocketwatch/utils/retry.py index 0203918d..a39339e1 100644 --- a/rocketwatch/utils/retry.py +++ b/rocketwatch/utils/retry.py @@ -20,7 +20,7 @@ def decorator(func: Callable[..., Any]) -> Callable[..., Any]: is_async=inspect.iscoroutinefunction(func), tries=tries, delay=delay, - max_delay=max_delay, + max_delay=max_delay, # pyright: ignore[reportArgumentType] backoff=backoff, )(func) diff --git a/rocketwatch/utils/rocketpool.py b/rocketwatch/utils/rocketpool.py index 1c689bfe..e33d750e 100644 --- a/rocketwatch/utils/rocketpool.py +++ b/rocketwatch/utils/rocketpool.py @@ -1,12 +1,15 @@ import logging import os from pathlib import Path -from typing import Any +from typing import Any, cast -import eth_abi from bidict import bidict from cachetools import FIFOCache +from eth_abi import abi from eth_typing import BlockIdentifier, ChecksumAddress +from web3.constants import ADDRESS_ZERO +from web3.contract import AsyncContract +from web3.contract.async_contract import AsyncContractFunction from web3.exceptions import ContractLogicError from utils import solidity @@ -22,12 +25,12 @@ class NoAddressFound(Exception): class RocketPool: - ADDRESS_CACHE = FIFOCache(maxsize=2048) - ABI_CACHE = FIFOCache(maxsize=2048) - CONTRACT_CACHE = FIFOCache(maxsize=2048) + ADDRESS_CACHE: FIFOCache[str, ChecksumAddress] = FIFOCache(maxsize=2048) + ABI_CACHE: FIFOCache[str, str] = FIFOCache(maxsize=2048) + CONTRACT_CACHE: FIFOCache[tuple, AsyncContract] = FIFOCache(maxsize=2048) def __init__(self): - self.addresses = bidict() + self.addresses: bidict[str, ChecksumAddress] = bidict() self._multicall = None async def async_init(self): @@ -44,7 +47,7 @@ async def flush(self): async def _init_contract_addresses(self) -> None: manual_addresses = cfg.rocketpool.manual_addresses for name, address in manual_addresses.items(): - self.addresses[name] = address + self.addresses[name] = w3.to_checksum_address(address) self._multicall = await self.get_contract_by_name("multicall3") @@ -60,24 +63,26 @@ async def _init_contract_addresses(self) -> None: try: cs_dir, cs_prefix = "ConstellationDirectory", "Constellation" - self.addresses |= { - f"{cs_prefix}.SuperNodeAccount": await self.call( - f"{cs_dir}.getSuperNodeAddress" - ), - f"{cs_prefix}.OperatorDistributor": await self.call( - f"{cs_dir}.getOperatorDistributorAddress" - ), - f"{cs_prefix}.Whitelist": await self.call( - f"{cs_dir}.getWhitelistAddress" - ), - f"{cs_prefix}.ETHVault": await self.call( - f"{cs_dir}.getWETHVaultAddress" - ), - f"{cs_prefix}.RPLVault": await self.call( - f"{cs_dir}.getRPLVaultAddress" - ), - "WETH": await self.call(f"{cs_dir}.getWETHAddress"), - } + self.addresses.update( + { + f"{cs_prefix}.SuperNodeAccount": await self.call( + f"{cs_dir}.getSuperNodeAddress" + ), + f"{cs_prefix}.OperatorDistributor": await self.call( + f"{cs_dir}.getOperatorDistributorAddress" + ), + f"{cs_prefix}.Whitelist": await self.call( + f"{cs_dir}.getWhitelistAddress" + ), + f"{cs_prefix}.ETHVault": await self.call( + f"{cs_dir}.getWETHVaultAddress" + ), + f"{cs_prefix}.RPLVault": await self.call( + f"{cs_dir}.getRPLVaultAddress" + ), + "WETH": await self.call(f"{cs_dir}.getWETHAddress"), + } + ) except NoAddressFound: log.warning("Failed to find address for Constellation contracts") @@ -98,14 +103,19 @@ def _decode_fn_output(fn, data: bytes) -> Any: if not outputs: return None types = [RocketPool._abi_type_str(o) for o in outputs] - decoded = eth_abi.decode(types, data) + decoded = abi.decode(types, data) return decoded[0] if len(decoded) == 1 else decoded + CallInput = AsyncContractFunction | tuple[AsyncContractFunction, bool] + @staticmethod - def _normalize_calls(calls, default_require_success): + def _normalize_calls( + calls: list[CallInput], default_require_success: bool + ) -> tuple[list[AsyncContractFunction], list[bool]]: """Normalize calls to (fn, allow_failure) pairs. Each call may be a plain AsyncContractFunction or an (fn, require_success) tuple.""" - fns, flags = [], [] + fns: list[AsyncContractFunction] = [] + flags: list[bool] = [] for call in calls: if isinstance(call, tuple): fn, req = call @@ -116,14 +126,18 @@ def _normalize_calls(calls, default_require_success): return fns, flags async def multicall( - self, calls, require_success=True, block: BlockIdentifier = "latest" - ) -> list: + self, + calls: list[CallInput], + require_success: bool = True, + block: BlockIdentifier = "latest", + ) -> list[Any]: """Multicall accepting AsyncContractFunction objects or (fn, require_success) tuples.""" fns, flags = self._normalize_calls(calls, require_success) encoded = [ (fn.address, af, fn._encode_transaction_data()) for fn, af in zip(fns, flags, strict=False) ] + assert self._multicall is not None results = await self._multicall.functions.aggregate3(encoded).call( block_identifier=block ) @@ -132,7 +146,7 @@ async def multicall( for i, (success, data) in enumerate(results) ] - async def get_address_by_name(self, name): + async def get_address_by_name(self, name: str) -> ChecksumAddress: if name in self.ADDRESS_CACHE: return self.ADDRESS_CACHE[name] if name in self.addresses: @@ -142,14 +156,16 @@ async def get_address_by_name(self, name): self.ADDRESS_CACHE[name] = address return address - async def uncached_get_address_by_name(self, name, block="latest"): + async def uncached_get_address_by_name( + self, name: str, block: BlockIdentifier = "latest" + ) -> ChecksumAddress: log.debug(f"Retrieving address for {name} Contract") sha3 = w3.solidity_keccak(["string", "string"], ["contract.address", name]) storage = await self.get_contract_by_name( "rocketStorage", historical=block != "latest" ) address = await storage.functions.getAddress(sha3).call(block_identifier=block) - if not w3.to_int(hexstr=address): + if address == ADDRESS_ZERO: raise NoAddressFound(f"No address found for {name} Contract") self.addresses[name] = address log.debug(f"Retrieved address for {name} Contract: {address}") @@ -196,25 +212,29 @@ async def get_protocol_version(self) -> tuple: version_string = await self.get_string("protocol.version") return tuple(map(int, version_string.split("."))) - async def get_abi_by_name(self, name): + async def get_abi_by_name(self, name) -> str: if name in self.ABI_CACHE: return self.ABI_CACHE[name] abi = await self.uncached_get_abi_by_name(name) self.ABI_CACHE[name] = abi return abi - async def uncached_get_abi_by_name(self, name): - log.debug(f"Retrieving abi for {name} Contract") + async def uncached_get_abi_by_name(self, name) -> str: + log.debug(f"Retrieving abi for {name} contract") sha3 = w3.solidity_keccak(["string", "string"], ["contract.abi", name]) storage = await self.get_contract_by_name("rocketStorage") compressed_string = await storage.functions.getString(sha3).call() if not compressed_string: - raise Exception(f"No abi found for {name} Contract") + raise Exception(f"No abi found for {name} contract") return decode_abi(compressed_string) async def assemble_contract( - self, name, address=None, historical=False, mainnet=False - ): + self, + name: str, + address: ChecksumAddress | None = None, + historical: bool = False, + mainnet: bool = False, + ) -> AsyncContract: cache_key = (name, address, historical, mainnet) if cache_key in self.CONTRACT_CACHE: return self.CONTRACT_CACHE[cache_key] @@ -238,27 +258,35 @@ async def assemble_contract( else: contract = w3.eth.contract(address=address, abi=abi) + contract = cast(AsyncContract, contract) self.CONTRACT_CACHE[cache_key] = contract return contract - def get_name_by_address(self, address): + def get_name_by_address(self, address: ChecksumAddress) -> str | None: return self.addresses.inverse.get(address, None) - async def get_contract_by_name(self, name, historical=False, mainnet=False): + async def get_contract_by_name( + self, name: str, historical: bool = False, mainnet: bool = False + ) -> AsyncContract: address = await self.get_address_by_name(name) return await self.assemble_contract( name, address, historical=historical, mainnet=mainnet ) - async def get_contract_by_address(self, address): + async def get_contract_by_address( + self, address: ChecksumAddress + ) -> AsyncContract | None: """ **WARNING**: only call after contract has been previously retrieved using its name """ - name = self.get_name_by_address(address) + if not (name := self.get_name_by_address(address)): + return None return await self.assemble_contract(name, address) - async def estimate_gas_for_call(self, path, *args, block="latest"): - log.debug(f"Estimating gas for {path} (block={block})") + async def estimate_gas_for_call( + self, path: str, *args, block: BlockIdentifier = "latest" + ) -> int: + log.debug(f"Estimating gas for {path} (block={block!r})") name, function = path.rsplit(".", 1) contract = await self.get_contract_by_name(name) return await contract.functions[function](*args).estimate_gas( @@ -266,8 +294,13 @@ async def estimate_gas_for_call(self, path, *args, block="latest"): ) async def get_function( - self, path, *args, historical=False, address=None, mainnet=False - ): + self, + path: str, + *args, + historical: bool = False, + address: ChecksumAddress | None = None, + mainnet: bool = False, + ) -> AsyncContractFunction: name, function = path.rsplit(".", 1) if not address: address = await self.get_address_by_name(name) @@ -280,32 +313,34 @@ async def get_function( async def call( self, - path, + path: str, *args, block: BlockIdentifier = "latest", - address=None, - mainnet=False, - ): - log.debug(f"Calling {path} (block={block})") + address: ChecksumAddress | None = None, + mainnet: bool = False, + ) -> Any: + log.debug(f"Calling {path} (block={block!r})") fn = await self.get_function( path, *args, historical=block != "latest", address=address, mainnet=mainnet ) return await fn.call(block_identifier=block) - async def get_annual_rpl_inflation(self): - inflation_per_interval = solidity.to_float( + async def get_annual_rpl_inflation(self) -> float: + inflation_per_interval: float = solidity.to_float( await self.call("rocketTokenRPL.getInflationIntervalRate") ) if not inflation_per_interval: return 0 - seconds_per_interval = await self.call( + seconds_per_interval: int = await self.call( "rocketTokenRPL.getInflationIntervalTime" ) intervals_per_year = solidity.years / seconds_per_interval return (inflation_per_interval**intervals_per_year) - 1 - async def get_percentage_rpl_swapped(self): - value = solidity.to_float(await self.call("rocketTokenRPL.totalSwappedRPL")) + async def get_percentage_rpl_swapped(self) -> float: + value: float = solidity.to_float( + await self.call("rocketTokenRPL.totalSwappedRPL") + ) percentage = (value / 18_000_000) * 100 return round(percentage, 2) diff --git a/rocketwatch/utils/status.py b/rocketwatch/utils/status.py index 9072a27a..9530d298 100644 --- a/rocketwatch/utils/status.py +++ b/rocketwatch/utils/status.py @@ -1,8 +1,12 @@ +from __future__ import annotations + from abc import abstractmethod +from typing import TYPE_CHECKING from discord.ext import commands -from rocketwatch import RocketWatch +if TYPE_CHECKING: + from rocketwatch.rocketwatch import RocketWatch from utils.embeds import Embed diff --git a/rocketwatch/utils/views.py b/rocketwatch/utils/views.py index 02c8607e..2d5b6730 100644 --- a/rocketwatch/utils/views.py +++ b/rocketwatch/utils/views.py @@ -65,7 +65,7 @@ class JumpToModal(ui.Modal, title="Jump To Position"): def __init__(self, view: "PageView"): super().__init__() self.view = view - self.position_field = ui.TextInput( + self.position_field: ui.TextInput[PageView.JumpToModal] = ui.TextInput( label="Position", placeholder="Enter position to jump to", required=True ) self.add_item(self.position_field) diff --git a/rocketwatch/utils/visibility.py b/rocketwatch/utils/visibility.py index 2e3e6595..a891bdc1 100644 --- a/rocketwatch/utils/visibility.py +++ b/rocketwatch/utils/visibility.py @@ -4,7 +4,7 @@ def is_hidden(interaction: Interaction): - channel_name: str = interaction.channel.name + channel_name = getattr(interaction.channel, "name", None) or "" for allowed_channel in ["random", "rocket-watch", "trading"]: if allowed_channel in channel_name: return False From 80a6e60424eb0ae3d3368ed3ece596f976404594 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 20 Mar 2026 11:15:51 +0000 Subject: [PATCH 236/279] update UV action version --- .github/workflows/lint.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 6182775f..58f757ea 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -24,6 +24,6 @@ jobs: continue-on-error: true steps: - uses: actions/checkout@v6 - - uses: astral-sh/setup-uv@v6 + - uses: astral-sh/setup-uv@v7 - run: uv sync --extra dev - run: uv run mypy rocketwatch/ From 07c4ccfbf05a0313bfd78d9fddd3fc1df4ce0c6e Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 20 Mar 2026 12:22:04 +0000 Subject: [PATCH 237/279] remove chat_summary --- pyproject.toml | 1 - rocketwatch/config.toml.sample | 1 - .../plugins/chat_summary/chat_summary.py | 209 ------------------ tests/test_cfg.py | 1 - 4 files changed, 212 deletions(-) delete mode 100644 rocketwatch/plugins/chat_summary/chat_summary.py diff --git a/pyproject.toml b/pyproject.toml index 61c24083..d8592199 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,7 +5,6 @@ requires-python = ">=3.12" dependencies = [ "aiohttp==3.13.3", "aiocache==0.12.3", - "anthropic==0.84.0", "anyascii==0.3.3", "beautifulsoup4==4.14.3", "bidict==0.23.1", diff --git a/rocketwatch/config.toml.sample b/rocketwatch/config.toml.sample index d8a2800b..a70271cc 100644 --- a/rocketwatch/config.toml.sample +++ b/rocketwatch/config.toml.sample @@ -85,4 +85,3 @@ mev_hashes = [] [other.secrets] wakatime = "" cronitor = "" -anthropic = "" diff --git a/rocketwatch/plugins/chat_summary/chat_summary.py b/rocketwatch/plugins/chat_summary/chat_summary.py deleted file mode 100644 index 0d2a33f7..00000000 --- a/rocketwatch/plugins/chat_summary/chat_summary.py +++ /dev/null @@ -1,209 +0,0 @@ -import logging -import re -from datetime import UTC, datetime, timedelta -from io import BytesIO - -import anthropic -import pytz -import tiktoken -from discord import DeletedReferencedMessage, File, Interaction -from discord.app_commands import command -from discord.channel import TextChannel -from discord.ext import commands -from discord.ext.commands import is_owner - -from rocketwatch import RocketWatch -from utils.config import cfg -from utils.embeds import Embed - -log = logging.getLogger("rocketwatch.chat_summary") - - -class ChatSummary(commands.Cog): - def __init__(self, bot: RocketWatch): - self.bot = bot - self.client = anthropic.AsyncAnthropic(api_key=cfg.other.secrets.anthropic) - # log all possible engines - self.tokenizer = tiktoken.encoding_for_model("gpt-4-turbo") - - @classmethod - def message_to_text(cls, message, index): - text = ( - f"{message.author.global_name or message.author.name}" - f" on {message.created_at.strftime('%a at %H:%M')}:\n {message.content}" - ) - - # if there is an image attached, add it to the text as a note - metadata = [] - if message.attachments: - metadata.append(f"{len(message.attachments)} attachments") - if message.embeds: - metadata.append(f"{len(message.embeds)} embeds") - # replies and make sure the reference is not deleted - if ( - message.reference - and not isinstance(message.reference.resolved, DeletedReferencedMessage) - and message.reference.resolved - ): - # show name of referenced message author - # and the first 10 characters of the referenced message - metadata.append( - f'reply to "{message.reference.resolved.content[:32]}…" from {message.reference.resolved.author.name}' - ) - if metadata: - text += f" <{', '.join(metadata)}>\n" - # replace all <@[0-9]+> with the name of the user - for mention in message.mentions: - text = text.replace(f"<@{mention.id}>", f"@{mention.name}") - # remove all emote ids, i.e change <:emote_name:emote_id> to <:emote_name> using regex - text = re.sub(r":[0-9]+>", ":>", text) - return text - - @command() - @is_owner() - async def summarize_chat(self, interaction: Interaction): - await interaction.response.defer(ephemeral=True) - last_ts = await self.bot.db["last_summary"].find_one( - {"channel_id": interaction.channel.id} - ) - # ratelimit - if last_ts and ( - datetime.now(UTC) - last_ts["timestamp"].replace(tzinfo=pytz.utc) - ) < timedelta(hours=6): - await interaction.followup.send( - "You can only summarize once every 6 hours.", ephemeral=True - ) - return - if interaction.channel.id not in [405163713063288832]: - await interaction.followup.send("You can't summarize here.", ephemeral=True) - return - msg = await interaction.channel.send("Summarizing chat…") - last_ts = ( - last_ts["timestamp"].replace(tzinfo=pytz.utc) - if last_ts and "timestamp" in last_ts - else datetime.now(UTC) - timedelta(days=365) - ) - prompt = ( - "Task Description:\n" - "I need a summary of the entire chat log. This summary should be presented in the form of a bullet list.\n\n" - "Format and Length Requirements:\n" - "- The bullet list must be kept short and concise, but the list has to cover the entire chat log." - " Make at most around 5 bullet points.\n" - "- Each bullet point should represent a distinct topic discussed in the chat log.\n\n" - "Content Constraints:\n" - "- Limit each topic to a single bullet point in the list.\n" - "- Omit any topics that are uninteresting or not crucial to the overall understanding of the chat log.\n" - "- If any content in the chat log goes against guidelines, refer to it in a safe and compliant manner," - " without detailing the specific content.\n\n" - "Response Instruction:\n" - "- Respond only with the bullet list summary as specified." - " Do not include any additional commentary or response outside of this list.\n\n" - "Truncated Example Output:\n" - "----------------\n" - "- Discussions between invis, langers, knoshua and more about the meaning of life.\n" - "- The current status of the war in europe was discussed.\n" - "- Patches announced that he has been taking a vacation in Switzerland and shared some images of his skiing.}\n" - "----------------\n\n" - "Please begin the task now." - ) - response, prompt, msgs = await self.prompt_model( - interaction.channel, prompt, last_ts - ) - if not response: - await msg.delete() - await interaction.followup.send(content="Not enough messages to summarize.") - return - es = [Embed()] - es[ - 0 - ].title = f"Chat Summarization of {msgs} messages since {last_ts.strftime('%Y-%m-%d %H:%M')}" - res = response.content[-1].text - # split content in multiple embeds if it is too long. limit for description is 4096 - while len(res): - if len(res) > 4096: - # find last newline before 4096 characters - idx = res[:4096].rfind("\n") - # if there is no newline, just split at 4096 - if idx == -1: - idx = 4096 - # add embed - es[-1].description = res[:idx] - es[-1].set_footer(text="") - # create new embed - es.append(Embed()) - res = res[idx:] - else: - es[-1].description = res - res = "" - # completion tokens are 3x more expensive - token_usage = response.usage.input_tokens + (response.usage.output_tokens * 5) - es[-1].set_footer( - text=( - f"Request cost: ${token_usage / 1000000 * 3:.2f}" - f" | Tokens: {response.usage.input_tokens + response.usage.output_tokens}" - " | /donate if you like this command" - ) - ) - # attach the prompt as a file - f = BytesIO(prompt.encode("utf-8")) - f.name = "prompt._log" - f = File( - f, - filename=f"prompt_log_{datetime.now().strftime('%Y-%m-%d_%H-%M-%S')}._log", - ) - # send message in the channel - await interaction.followup.send("done", ephemeral=True) - await msg.edit(embeds=es, attachments=[f]) - # save the timestamp of the last summary - await self.bot.db["last_summary"].update_one( - {"channel_id": interaction.channel.id}, - {"$set": {"timestamp": datetime.now(UTC)}}, - upsert=True, - ) - - # a function that generates the prompt for the model by taking an array of messages, a prefix and a suffix - def generate_prompt(self, messages, prefix, suffix): - messages.sort(key=lambda x: x.created_at) - prompt = "\n".join( - [self.message_to_text(message, i) for i, message in enumerate(messages)] - ).replace("\n\n", "\n") - return f"{prefix}\n\n{prompt}\n\n{suffix}" - - async def prompt_model( - self, channel: TextChannel, prompt: str, cut_off_ts: int - ) -> tuple[anthropic.types.Message, str, int]: - messages = [ - message - async for message in channel.history(limit=4096) - if message.content != "" - ] - messages = [ - message for message in messages if message.author.id != self.bot.user.id - ] - messages = [message for message in messages if message.created_at > cut_off_ts] - if len(messages) < 320: - return None, None, None - prefix = "The following is a chat log. Everything prefixed with `>` is a quote." - log.info( - f"Prompt len: {len(self.tokenizer.encode(self.generate_prompt(messages, prefix, prompt)))}" - ) - while ( - len(self.tokenizer.encode(self.generate_prompt(messages, prefix, prompt))) - > 100000 - 4096 - ): - # remove the oldest message - messages.pop(0) - prompt = self.generate_prompt(messages, prefix, prompt) - # get all models - response = await self.client.messages.create( - model="claude-3-sonnet-20240229", # Update this to the desired model - max_tokens=4096, - messages=[{"role": "user", "content": prompt}], - ) - # find all {message:index} in response["choices"][0]["message"]["content"] - log.debug(response.content[-1].text) - return response, prompt, len(messages) - - -async def setup(bot): - await bot.add_cog(ChatSummary(bot)) diff --git a/tests/test_cfg.py b/tests/test_cfg.py index be4d6a56..b8cecd45 100644 --- a/tests/test_cfg.py +++ b/tests/test_cfg.py @@ -137,7 +137,6 @@ def test_all_default_empty(self): s = SecretsConfig() assert s.wakatime == "" assert s.cronitor == "" - assert s.anthropic == "" def test_partial_override(self): s = SecretsConfig(wakatime="my-key") From 9aede3bddd016c9b18adb44215d66b4b90b8c034 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 20 Mar 2026 12:24:07 +0000 Subject: [PATCH 238/279] fix typing in milestones.py --- rocketwatch/plugins/metrics/metrics.py | 8 ++--- .../plugins/milestones/milestones.json | 20 ------------ rocketwatch/plugins/milestones/milestones.py | 31 ++++++++++++------- .../pinned_messages/pinned_messages.py | 8 ++--- rocketwatch/utils/command_tree.py | 8 ++--- rocketwatch/utils/config.py | 1 - rocketwatch/utils/event.py | 6 ++-- 7 files changed, 35 insertions(+), 47 deletions(-) diff --git a/rocketwatch/plugins/metrics/metrics.py b/rocketwatch/plugins/metrics/metrics.py index f2e2d1f9..5c6d3558 100644 --- a/rocketwatch/plugins/metrics/metrics.py +++ b/rocketwatch/plugins/metrics/metrics.py @@ -1,5 +1,5 @@ import logging -from datetime import datetime, timedelta +from datetime import UTC, datetime, timedelta from io import BytesIO from bson import SON @@ -30,7 +30,7 @@ async def metrics(self, interaction: Interaction): e = Embed(title="Metrics from the last 7 days") desc = "```\n" # last 7 days - start = datetime.utcnow() - timedelta(days=7) + start = datetime.now(UTC) - timedelta(days=7) # get the total number of processed events from the event_queue in the last 7 days total_events_processed = await self.bot.db.event_queue.count_documents( @@ -84,7 +84,7 @@ async def metrics(self, interaction: Interaction): ] ) ).to_list(length=5) - desc += "Top 5 Commands based on usage:\n" + desc += "Command Usage:\n" for command in most_used_commands: desc += f" - {command['_id']}: {command['count']}\n" @@ -98,7 +98,7 @@ async def metrics(self, interaction: Interaction): ] ) ).to_list(length=5) - desc += "\nTop 5 Channels based on commands handled:\n" + desc += "\nCommand Usage By Channel:\n" for channel in top_channels: desc += f" - {channel['_id']['name']}: {channel['count']}\n" e.description = desc + "```" diff --git a/rocketwatch/plugins/milestones/milestones.json b/rocketwatch/plugins/milestones/milestones.json index f605cf77..3b3a3c55 100644 --- a/rocketwatch/plugins/milestones/milestones.json +++ b/rocketwatch/plugins/milestones/milestones.json @@ -9,16 +9,6 @@ "min": 10000, "step_size": 100000 }, - { - "id": "milestone_max_deposit_size", - "function": "call", - "args": [ - "rocketDepositPool.getMaximumDepositAmount" - ], - "formatter": "to_float", - "min": 60000, - "step_size": 1000 - }, { "id": "milestone_reth_supply", "function": "call", @@ -29,16 +19,6 @@ "min": 1000, "step_size": 5000 }, - { - "id": "milestone_staking_minipools", - "function": "call", - "args": [ - "rocketMinipoolManager.getMinipoolCount" - ], - "formatter": "", - "min": 15, - "step_size": 250 - }, { "id": "milestone_rpl_swapped", "function": "get_percentage_rpl_swapped", diff --git a/rocketwatch/plugins/milestones/milestones.py b/rocketwatch/plugins/milestones/milestones.py index cc94325b..4ffef41c 100644 --- a/rocketwatch/plugins/milestones/milestones.py +++ b/rocketwatch/plugins/milestones/milestones.py @@ -1,7 +1,7 @@ import json import logging -from web3.datastructures import MutableAttributeDict as aDict +from pydantic import BaseModel from rocketwatch import RocketWatch from utils import solidity @@ -12,36 +12,45 @@ log = logging.getLogger("rocketwatch.milestones") +class MilestoneConfig(BaseModel): + id: str + function: str + args: list[str] + formatter: str + min: int + step_size: int + + class Milestones(EventPlugin): def __init__(self, bot: RocketWatch): super().__init__(bot) - self.collection = bot.db.milestones + self._reset() + + def _reset(self) -> None: + self.collection = self.bot.db.milestones self.state = "OK" with open("./plugins/milestones/milestones.json") as f: - self.milestones = json.load(f) + self.milestones = [MilestoneConfig(**m) for m in json.load(f)] async def _get_new_events(self) -> list[Event]: if self.state == "RUNNING": log.error( "Milestones plugin was interrupted while running. Re-initializing..." ) - self.__init__(self.bot) + self._reset() self.state = "RUNNING" result = await self.check_for_new_events() self.state = "OK" return result - # noinspection PyTypeChecker async def check_for_new_events(self): log.info("Checking Milestones") payload = [] for milestone in self.milestones: - milestone = aDict(milestone) - - state = await self.collection.find_one({"_id": milestone["id"]}) + state = await self.collection.find_one({"_id": milestone.id}) value = await getattr(rp, milestone.function)(*milestone.args) if milestone.formatter: @@ -60,7 +69,7 @@ async def check_for_new_events(self): f"First time we have processed Milestones for milestone {milestone.id}. Adding it to the Database." ) await self.collection.insert_one( - {"_id": milestone["id"], "current_goal": latest_goal} + {"_id": milestone.id, "current_goal": latest_goal} ) previous_milestone = milestone.min if previous_milestone < latest_goal: @@ -68,7 +77,7 @@ async def check_for_new_events(self): f"Goal for milestone {milestone.id} has increased. Triggering Milestone!" ) embed = await assemble( - aDict({"event_name": milestone.id, "result_value": value}) + {"event_name": milestone.id, "result_value": value} ) payload.append( Event( @@ -81,7 +90,7 @@ async def check_for_new_events(self): ) # update the current goal in collection await self.collection.update_one( - {"_id": milestone["id"]}, {"$set": {"current_goal": latest_goal}} + {"_id": milestone.id}, {"$set": {"current_goal": latest_goal}} ) log.debug("Finished Checking Milestones") diff --git a/rocketwatch/plugins/pinned_messages/pinned_messages.py b/rocketwatch/plugins/pinned_messages/pinned_messages.py index 205037a8..6e6d25f8 100644 --- a/rocketwatch/plugins/pinned_messages/pinned_messages.py +++ b/rocketwatch/plugins/pinned_messages/pinned_messages.py @@ -1,5 +1,5 @@ import logging -from datetime import datetime, timedelta +from datetime import UTC, datetime, timedelta from discord import Interaction from discord.app_commands import command, guilds @@ -33,7 +33,7 @@ async def run_loop(self): for message in messages: # if it's older than 6 hours and not disabled, mark as disabled if ( - message["created_at"] + timedelta(hours=6) < datetime.utcnow() + message["created_at"] + timedelta(hours=6) < datetime.now(UTC) and not message["disabled"] ): await self.bot.db.pinned_messages.update_one( @@ -107,7 +107,7 @@ async def pin( "disabled": False, "cleaned_up": False, "message_id": None, - "created_at": datetime.utcnow(), + "created_at": datetime.now(UTC), } }, ) @@ -123,7 +123,7 @@ async def pin( "content": description, "disabled": False, "cleaned_up": False, - "created_at": datetime.utcnow(), + "created_at": datetime.now(UTC), } ) # rest is done by the run_loop diff --git a/rocketwatch/utils/command_tree.py b/rocketwatch/utils/command_tree.py index 52a6fb2f..305ee6b0 100644 --- a/rocketwatch/utils/command_tree.py +++ b/rocketwatch/utils/command_tree.py @@ -1,5 +1,5 @@ import logging -from datetime import datetime +from datetime import UTC, datetime from typing import TYPE_CHECKING from discord import Interaction @@ -31,7 +31,7 @@ async def _call(self, interaction: Interaction["RocketWatch"]) -> None: return cmd_name = interaction.command.name if interaction.command else "unknown" - timestamp = datetime.utcnow() + timestamp = datetime.now(UTC) channel_name = _channel_name(interaction) @@ -82,7 +82,7 @@ async def _call(self, interaction: Interaction["RocketWatch"]) -> None: { "$set": { "status": "error", - "took": (datetime.utcnow() - timestamp).total_seconds(), + "took": (datetime.now(UTC) - timestamp).total_seconds(), "error": str(error), } }, @@ -102,7 +102,7 @@ async def _call(self, interaction: Interaction["RocketWatch"]) -> None: { "$set": { "status": "completed", - "took": (datetime.utcnow() - timestamp).total_seconds(), + "took": (datetime.now(UTC) - timestamp).total_seconds(), } }, ) diff --git a/rocketwatch/utils/config.py b/rocketwatch/utils/config.py index fa3f76b1..1246a859 100644 --- a/rocketwatch/utils/config.py +++ b/rocketwatch/utils/config.py @@ -78,7 +78,6 @@ class EventsConfig(BaseModel): class SecretsConfig(BaseModel): wakatime: str = "" cronitor: str = "" - anthropic: str = "" class OtherConfig(BaseModel): diff --git a/rocketwatch/utils/event.py b/rocketwatch/utils/event.py index 16e1986d..2b441df7 100644 --- a/rocketwatch/utils/event.py +++ b/rocketwatch/utils/event.py @@ -41,12 +41,12 @@ def __init__(self, bot: RocketWatch, rate_limit=timedelta(seconds=5)): self.bot = bot self.rate_limit = rate_limit self.lookback_distance: int = cfg.events.lookback_distance - self.last_served_block: int = cfg.events.genesis - 1 - self._pending_block: int = self.last_served_block + self.last_served_block = BlockNumber(cfg.events.genesis - 1) + self._pending_block = self.last_served_block self._last_run = datetime.now() - rate_limit def start_tracking(self, block: BlockNumber) -> None: - self.last_served_block = block - 1 + self.last_served_block = BlockNumber(block - 1) async def get_new_events(self) -> list[Event]: now = datetime.now() From c2ef88e711b4c55b91661c16f28c4ed3928cef1f Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 20 Mar 2026 12:31:27 +0000 Subject: [PATCH 239/279] fix typing in snapshot.py --- rocketwatch/plugins/snapshot/snapshot.py | 26 +++++++++++++----------- rocketwatch/utils/block_time.py | 9 ++++---- 2 files changed, 19 insertions(+), 16 deletions(-) diff --git a/rocketwatch/plugins/snapshot/snapshot.py b/rocketwatch/plugins/snapshot/snapshot.py index 032e07da..2e78c60c 100644 --- a/rocketwatch/plugins/snapshot/snapshot.py +++ b/rocketwatch/plugins/snapshot/snapshot.py @@ -289,7 +289,7 @@ def create_reached_quorum_event(self, block_number: BlockNumber) -> Event: ) async def create_end_event(self) -> Event: - max_for, max_against = 0, 0 + max_for, max_against = 0.0, 0.0 for choice, score in zip(self.choices, self.scores, strict=False): if "against" in choice.lower(): max_against = max(max_against, score) @@ -311,20 +311,20 @@ async def create_end_event(self) -> Event: image=self.create_image(include_title=True), ) + type SingleChoice = int + type MultiChoice = list[int] + # weighted votes use strings as keys for some reason + type WeightedChoice = dict[str, int] + type Choice = SingleChoice | MultiChoice | WeightedChoice + @dataclass(frozen=True, slots=True) class Vote: - SingleChoice = int - MultiChoice = list[SingleChoice] - # weighted votes use strings as keys for some reason - WeightedChoice = dict[str, int] - Choice = SingleChoice | MultiChoice | WeightedChoice - proposal: "Snapshot.Proposal" id: str voter: ChecksumAddress created: int vp: float - choice: Choice + choice: "Snapshot.Choice" reason: str def pretty_print(self) -> str | None: @@ -339,11 +339,11 @@ def pretty_print(self) -> str | None: log.error(f"Unknown vote type: {raw_choice}") return None - def _label_choice(self, raw_vote: SingleChoice) -> str: + def _label_choice(self, raw_vote: "Snapshot.SingleChoice") -> str: # vote choice represented as 1-based index return self.proposal.choices[raw_vote - 1] - def _format_single_choice(self, choice: SingleChoice): + def _format_single_choice(self, choice: "Snapshot.SingleChoice"): label = self._label_choice(choice) match label.lower(): case "for": @@ -354,13 +354,13 @@ def _format_single_choice(self, choice: SingleChoice): label = "⚪ Abstain" return f"`{label}`" - def _format_multiple_choice(self, choice: MultiChoice) -> str: + def _format_multiple_choice(self, choice: "Snapshot.MultiChoice") -> str: labels = [self._label_choice(c) for c in choice] if len(labels) == 1: return f"`{labels[0]}`" return "**" + "\n".join([f"- {c}" for c in labels]) + "**" - def _format_weighted_choice(self, choice: WeightedChoice) -> str: + def _format_weighted_choice(self, choice: "Snapshot.WeightedChoice") -> str: labels = {self._label_choice(int(c)): w for c, w in choice.items()} total_weight = sum(labels.values()) choice_perc = [ @@ -395,6 +395,8 @@ async def create_event( embed.description = separator.join([f"{voter} voted", vote_fmt]) elif self.choice != prev_vote.choice: prev_vote_fmt = prev_vote.pretty_print() + if prev_vote_fmt is None: + return None parts = [ f"{voter} changed their vote from", prev_vote_fmt, diff --git a/rocketwatch/utils/block_time.py b/rocketwatch/utils/block_time.py index 5e9b7ccd..6e78c38b 100644 --- a/rocketwatch/utils/block_time.py +++ b/rocketwatch/utils/block_time.py @@ -2,6 +2,7 @@ import math from aiocache import cached +from eth_typing import BlockNumber from web3.types import BlockData from utils.shared_w3 import w3 @@ -15,11 +16,11 @@ async def block_to_ts(block_number: int) -> int: return block.get("timestamp", 0) -async def ts_to_block(target_ts: int) -> int: +async def ts_to_block(target_ts: int) -> BlockNumber: log.debug(f"Looking for block at timestamp {target_ts}") if target_ts < await block_to_ts(1): # genesis block doesn't have a timestamp - return 0 + return BlockNumber(0) lo = 1 hi = await w3.eth.get_block_number() - 1 @@ -35,7 +36,7 @@ async def ts_to_block(target_ts: int) -> int: hi = mid - 1 elif ts == target_ts: log.debug(f"Exact match: block {mid} @ {ts}") - return mid + return BlockNumber(mid) # l == r, highest block number below target block = hi @@ -45,4 +46,4 @@ async def ts_to_block(target_ts: int) -> int: block += 1 log.debug(f"Closest match: block {block} @ {await block_to_ts(block)}") - return block + return BlockNumber(block) From 82afd1216c0d9c19e8242748e2d91a3fe34ec886 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 20 Mar 2026 12:34:44 +0000 Subject: [PATCH 240/279] fix typing in pinned_messages --- rocketwatch/plugins/pinned_messages/pinned_messages.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/rocketwatch/plugins/pinned_messages/pinned_messages.py b/rocketwatch/plugins/pinned_messages/pinned_messages.py index 6e6d25f8..6436e100 100644 --- a/rocketwatch/plugins/pinned_messages/pinned_messages.py +++ b/rocketwatch/plugins/pinned_messages/pinned_messages.py @@ -2,6 +2,7 @@ from datetime import UTC, datetime, timedelta from discord import Interaction +from discord.abc import Messageable from discord.app_commands import command, guilds from discord.ext import commands, tasks from discord.ext.commands import is_owner @@ -44,7 +45,9 @@ async def run_loop(self): # check if it's marked as disabled but not cleaned_up if message["disabled"] and not message["cleaned_up"]: # get channel - channel = self.bot.get_channel(message["channel_id"]) + channel = await self.bot.get_or_fetch_channel(message["channel_id"]) + if not isinstance(channel, Messageable): + continue # get message msg = await channel.fetch_message(message["message_id"]) # delete message @@ -56,6 +59,8 @@ async def run_loop(self): elif not message["disabled"]: # delete and resend message channel = self.bot.get_channel(message["channel_id"]) + if not isinstance(channel, Messageable): + continue # check if we have message sent already and if its the latest message in the channel if message.get("message_id"): messages = [ From ebe878f54a7cdae1b22dc39a1c4145296c12cfaa Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 20 Mar 2026 12:51:31 +0000 Subject: [PATCH 241/279] fix typing in debug.py --- rocketwatch/plugins/debug/debug.py | 43 ++++++++++++++++++++---------- rocketwatch/rocketwatch.py | 12 ++++----- 2 files changed, 34 insertions(+), 21 deletions(-) diff --git a/rocketwatch/plugins/debug/debug.py b/rocketwatch/plugins/debug/debug.py index 270398ad..3c4f1e66 100644 --- a/rocketwatch/plugins/debug/debug.py +++ b/rocketwatch/plugins/debug/debug.py @@ -2,10 +2,13 @@ import random import time from datetime import UTC +from typing import cast from discord import Interaction +from discord.abc import Messageable from discord.app_commands import command, guilds from discord.ext.commands import Cog, is_owner +from eth_typing import HexStr from rocketwatch import RocketWatch from utils.config import cfg @@ -40,10 +43,8 @@ async def get_members_of_role( """Get members of a role""" await interaction.response.defer(ephemeral=True) try: - guild = self.bot.get_guild(int(guild_id)) - log.debug(guild) - role = guild.get_role(int(role_id)) - log.debug(role) + guild = await self.bot.get_or_fetch_guild(int(guild_id)) + role = await self.bot.get_or_fetch_role(int(guild_id), int(role_id)) # print name + identifier and id of each member members = [ f"{member.name}#{member.discriminator}, ({member.id})" @@ -68,6 +69,7 @@ async def get_roles(self, interaction: Interaction, guild_id: str): await interaction.response.defer(ephemeral=True) try: guild = self.bot.get_guild(int(guild_id)) + assert guild is not None log.debug(guild) # print name + identifier and id of each member roles = [f"{role.name}, ({role.id})" for role in guild.roles] @@ -88,6 +90,7 @@ async def delete_msg(self, interaction: Interaction, message_url: str): await interaction.response.defer(ephemeral=True) channel_id, message_id = message_url.split("/")[-2:] channel = await self.bot.get_or_fetch_channel(int(channel_id)) + assert isinstance(channel, Messageable) msg = await channel.fetch_message(int(message_id)) await msg.delete() await interaction.followup.send(content="Done") @@ -101,6 +104,7 @@ async def edit_embed( await interaction.response.defer(ephemeral=True) channel_id, message_id = message_url.split("/")[-2:] channel = await self.bot.get_or_fetch_channel(int(channel_id)) + assert isinstance(channel, Messageable) msg = await channel.fetch_message(int(message_id)) embed = msg.embeds[0] embed.description = new_description @@ -115,7 +119,7 @@ async def debug_transaction(self, interaction: Interaction, tnx_hash: str): Try to return the revert reason of a transaction. """ await interaction.response.defer(ephemeral=True) - transaction_receipt = await w3.eth.get_transaction(tnx_hash) + transaction_receipt = await w3.eth.get_transaction(HexStr(tnx_hash)) if revert_reason := await rp.get_revert_reason(transaction_receipt): await interaction.followup.send( content=f"```Revert reason: {revert_reason}```" @@ -153,24 +157,26 @@ async def sync_commands(self, interaction: Interaction): @command() @guilds(cfg.discord.owner.server_id) @is_owner() - async def talk(self, interaction: Interaction, channel: str, message: str): + async def talk(self, interaction: Interaction, channel_id: str, message: str): """ Send a message to a channel. """ await interaction.response.defer(ephemeral=True) - channel = await self.bot.get_or_fetch_channel(int(channel)) + channel = await self.bot.get_or_fetch_channel(int(channel_id)) + assert isinstance(channel, Messageable) await channel.send(message) await interaction.followup.send(content="Done") @command() @guilds(cfg.discord.owner.server_id) @is_owner() - async def announce(self, interaction: Interaction, channel: str, message: str): + async def announce(self, interaction: Interaction, channel_id: str, message: str): """ Send a message to a channel. """ await interaction.response.defer(ephemeral=True) - channel = await self.bot.get_or_fetch_channel(int(channel)) + channel = await self.bot.get_or_fetch_channel(int(channel_id)) + assert isinstance(channel, Messageable) e = Embed(title="Announcement", description=message) e.add_field( name="Timestamp", @@ -188,10 +194,12 @@ async def restore_support_template( await interaction.response.defer(ephemeral=True) channel_id, message_id = message_url.split("/")[-2:] channel = await self.bot.get_or_fetch_channel(int(channel_id)) + assert isinstance(channel, Messageable) msg = await channel.fetch_message(int(message_id)) template_embed = msg.embeds[0] template_title = template_embed.title + assert template_embed.description is not None template_description = "\n".join(template_embed.description.splitlines()[:-2]) import re @@ -201,6 +209,12 @@ async def restore_support_template( match = re.search( r"Last Edited by <@(?P[0-9]+)> [0-9]+):R>", edit_line ) + if match is None: + await interaction.followup.send( + "Failed to restore support template. The provided message doesn't match the expected format." + ) + return + user_id = int(match.group("user")) ts = int(match.group("ts")) @@ -236,17 +250,17 @@ async def restore_missed_events(self, interaction: Interaction, tx_hash: str): await interaction.response.defer(ephemeral=True) - events_plugin: Events = self.bot.cogs["Events"] + events_plugin = cast(Events, self.bot.cogs["Events"]) filtered_events = [] - for event_log in (await w3.eth.get_transaction_receipt(tx_hash)).logs: + for event_log in (await w3.eth.get_transaction_receipt(HexStr(tx_hash))).logs: if ("topics" in event_log) and ( event_log["topics"][0].hex() in events_plugin.topic_map ): filtered_events.append(event_log) channels = cfg.discord.channels - events, _ = events_plugin.process_events(filtered_events) + events, _ = await events_plugin.process_events(filtered_events) for event in events: channel_candidates = [ value @@ -265,8 +279,9 @@ async def restore_missed_events(self, interaction: Interaction, tx_hash: str): "block_number": event.block_number, "score": event.get_score(), "time_seen": datetime.now(), - "attachment": pickle.dumps(event.attachment) - if event.attachment + "image": pickle.dumps(event.image) if event.image else None, + "thumbnail": pickle.dumps(event.thumbnail) + if event.thumbnail else None, "channel_id": channel_id, "message_id": None, diff --git a/rocketwatch/rocketwatch.py b/rocketwatch/rocketwatch.py index 77b9fbac..0ba9c72e 100644 --- a/rocketwatch/rocketwatch.py +++ b/rocketwatch/rocketwatch.py @@ -2,13 +2,7 @@ import traceback from pathlib import Path -from discord import ( - Guild, - Intents, - Interaction, - Thread, - User, -) +from discord import Guild, Intents, Interaction, Role, Thread, User from discord.abc import GuildChannel, Messageable, PrivateChannel from discord.ext.commands import Bot from pymongo import AsyncMongoClient @@ -107,6 +101,10 @@ async def get_or_fetch_channel( async def get_or_fetch_user(self, user_id: int) -> User: return self.get_user(user_id) or await self.fetch_user(user_id) + async def get_or_fetch_role(self, guild_id: int, role_id: int) -> Role: + guild = await self.get_or_fetch_guild(guild_id) + return guild.get_role(role_id) or await guild.fetch_role(role_id) + async def report_error( self, exception: Exception, interaction: Interaction | None = None, *args ) -> None: From 77797fd46168c1efd15261594860c44c70790054 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 20 Mar 2026 13:30:46 +0000 Subject: [PATCH 242/279] add command count by user --- rocketwatch/plugins/metrics/metrics.py | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/rocketwatch/plugins/metrics/metrics.py b/rocketwatch/plugins/metrics/metrics.py index 5c6d3558..0cf3dd67 100644 --- a/rocketwatch/plugins/metrics/metrics.py +++ b/rocketwatch/plugins/metrics/metrics.py @@ -88,6 +88,19 @@ async def metrics(self, interaction: Interaction): for command in most_used_commands: desc += f" - {command['_id']}: {command['count']}\n" + top_users = await ( + await self.collection.aggregate( + [ + {"$match": {"timestamp": {"$gte": start}}}, + {"$group": {"_id": "$user", "count": {"$sum": 1}}}, + {"$sort": {"count": -1}}, + ] + ) + ).to_list(length=5) + desc += "\nCommand Count By User:\n" + for user in top_users: + desc += f" - {user['_id']['name']}: {user['count']}\n" + # get the top 5 channels of the last 7 days top_channels = await ( await self.collection.aggregate( @@ -98,7 +111,7 @@ async def metrics(self, interaction: Interaction): ] ) ).to_list(length=5) - desc += "\nCommand Usage By Channel:\n" + desc += "\nCommand Count By Channel:\n" for channel in top_channels: desc += f" - {channel['_id']['name']}: {channel['count']}\n" e.description = desc + "```" From 1513883eae183e573e098d0fdd8f57d587dc7016 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 20 Mar 2026 15:46:32 +0000 Subject: [PATCH 243/279] fix typing in smaller files --- .gitignore | 8 +-- rocketwatch/plugins/about/about.py | 2 +- rocketwatch/plugins/activity/activity.py | 3 +- .../plugins/beacon_events/beacon_events.py | 72 +++++++++++-------- rocketwatch/plugins/call/call.py | 23 +++--- .../plugins/chicken_soup/chicken_soup.py | 5 +- rocketwatch/plugins/collateral/collateral.py | 21 +++--- .../plugins/commissions/commissions.py | 8 ++- rocketwatch/plugins/dao/dao.py | 3 +- .../plugins/db_upkeep_task/db_upkeep_task.py | 4 +- rocketwatch/plugins/debug/debug.py | 4 +- rocketwatch/plugins/forum/forum.py | 2 +- .../minipool_distribution.py | 23 +++--- .../pinned_messages/pinned_messages.py | 2 +- rocketwatch/plugins/proposals/proposals.py | 18 ++--- rocketwatch/plugins/random/random.py | 40 +++++++---- rocketwatch/plugins/rewards/rewards.py | 15 ++-- rocketwatch/plugins/rocksolid/rocksolid.py | 12 ++-- .../plugins/scam_warning/scam_warning.py | 4 ++ .../user_distribute/user_distribute.py | 10 +-- .../validator_states/validator_states.py | 8 ++- rocketwatch/plugins/wall/wall.py | 24 ++++--- rocketwatch/utils/event_logs.py | 4 +- 23 files changed, 184 insertions(+), 131 deletions(-) diff --git a/.gitignore b/.gitignore index bb4570d1..b3e96b42 100644 --- a/.gitignore +++ b/.gitignore @@ -109,11 +109,9 @@ venv.bak/ .dmypy.json dmypy.json -# Pyre type checker -.pyre/ - -# Pycharm project stuff +# IDE files .idea/ +.vscode # state state.db @@ -124,4 +122,4 @@ mongodb/ *.sh .claude -uv.lock \ No newline at end of file +uv.lock diff --git a/rocketwatch/plugins/about/about.py b/rocketwatch/plugins/about/about.py index 95fcbd8d..793ea759 100644 --- a/rocketwatch/plugins/about/about.py +++ b/rocketwatch/plugins/about/about.py @@ -58,7 +58,7 @@ async def about(self, interaction: Interaction): e.add_field( name="Bot Statistics", value=f"{len(g)} guilds joined and " - f"{humanize.intcomma(sum(guild.member_count for guild in g))} members reached!", + f"{humanize.intcomma(sum(guild.member_count or 0 for guild in g))} members reached!", inline=False, ) diff --git a/rocketwatch/plugins/activity/activity.py b/rocketwatch/plugins/activity/activity.py index 3fca0a15..c8eea94a 100644 --- a/rocketwatch/plugins/activity/activity.py +++ b/rocketwatch/plugins/activity/activity.py @@ -43,7 +43,8 @@ async def before_loop(self): await self.bot.wait_until_ready() @task.error - async def on_error(self, err: Exception): + async def on_error(self, err: BaseException): + assert isinstance(err, Exception) await self.bot.report_error(err) diff --git a/rocketwatch/plugins/beacon_events/beacon_events.py b/rocketwatch/plugins/beacon_events/beacon_events.py index 64ca32ac..f02ddca4 100644 --- a/rocketwatch/plugins/beacon_events/beacon_events.py +++ b/rocketwatch/plugins/beacon_events/beacon_events.py @@ -1,5 +1,6 @@ import logging -from typing import cast +from collections.abc import Mapping +from typing import Any, cast import aiohttp import eth_utils @@ -27,7 +28,7 @@ def __init__(self, bot: RocketWatch): self.finality_delay_threshold = 3 async def _get_new_events(self) -> list[Event]: - from_block = self.last_served_block + 1 - self.lookback_distance + from_block = BlockNumber(self.last_served_block + 1 - self.lookback_distance) return await self.get_past_events(from_block, self._pending_block) async def get_past_events( @@ -35,10 +36,14 @@ async def get_past_events( ) -> list[Event]: from_slot = max( 0, - date_to_beacon_block((await w3.eth.get_block(from_block - 1)).timestamp) + date_to_beacon_block( + (await w3.eth.get_block(from_block - 1)).get("timestamp", 0) + ) + 1, ) - to_slot = date_to_beacon_block((await w3.eth.get_block(to_block)).timestamp) + to_slot = date_to_beacon_block( + (await w3.eth.get_block(to_block)).get("timestamp", 0) + ) log.info( f"Checking for new beacon chain events in slot range [{from_slot}, {to_slot}]" ) @@ -82,7 +87,7 @@ async def _get_events_for_slot( async def _get_slashings(self, beacon_block: dict) -> list[Event]: slot = int(beacon_block["slot"]) timestamp = beacon_block_to_date(slot) - slashings = [] + slashings: list[dict[str, str | int]] = [] for slash in beacon_block["body"]["attester_slashings"]: att_1 = set(slash["attestation_1"]["attesting_indices"]) @@ -92,7 +97,6 @@ async def _get_slashings(self, beacon_block: dict) -> list[Event]: "slashing_type": "Attestation", "validator": index, "slasher": beacon_block["proposer_index"], - "timestamp": timestamp, } for index in att_1.intersection(att_2) ) @@ -102,41 +106,50 @@ async def _get_slashings(self, beacon_block: dict) -> list[Event]: "slashing_type": "Proposal", "validator": slash["signed_header_1"]["message"]["proposer_index"], "slasher": beacon_block["proposer_index"], - "timestamp": timestamp, } for slash in beacon_block["body"]["proposer_slashings"] ) - events = [] + events: list[Event] = [] for slash in slashings: - minipool = await self.bot.db.minipools.find_one( - {"validator_index": int(slash["validator"])} + validator = int(slash["validator"]) + slasher = slash["slasher"] + minipool: Mapping[str, Any] | None = await self.bot.db.minipools.find_one( + {"validator_index": validator} ) - megapool = await self.bot.db.megapool_validators.find_one( - {"validator_index": int(slash["validator"])} + megapool: ( + Mapping[str, Any] | None + ) = await self.bot.db.megapool_validators.find_one( + {"validator_index": validator} ) - if not (minipool or megapool): - log.info(f"Skipping slashing of unknown validator {slash['validator']}") + rp_pool = minipool or megapool + if rp_pool is None: + log.info(f"Skipping slashing of unknown validator {validator}") continue unique_id = ( - f"slash-{slash['validator']}" - f":slasher-{slash['slasher']}" + f"slash-{validator}" + f":slasher-{slasher}" f":slashing-type-{slash['slashing_type']}" f":{timestamp}" ) - slash["validator"] = await cl_explorer_url(slash["validator"]) - slash["slasher"] = await cl_explorer_url(slash["slasher"]) - slash["node_operator"] = (minipool or megapool)["node_operator"] - slash["event_name"] = "validator_slash_event" - - args = await prepare_args(aDict(slash)) + args = aDict( + { + "event_name": "validator_slash_event", + "slashing_type": slash["slashing_type"], + "validator": await cl_explorer_url(validator), + "slasher": await cl_explorer_url(slasher), + "node_operator": rp_pool["node_operator"], + "timestamp": timestamp, + } + ) + args = await prepare_args(args) if embed := await assemble(args): events.append( Event( topic="beacon_events", embed=embed, - event_name=slash["event_name"], + event_name="validator_slash_event", unique_id=unique_id, block_number=await ts_to_block(timestamp), ) @@ -155,13 +168,16 @@ async def _get_proposal(self, beacon_block: dict) -> Event | None: return None validator_index = int(beacon_block["proposer_index"]) - minipool = await self.bot.db.minipools.find_one( + minipool: Mapping[str, Any] | None = await self.bot.db.minipools.find_one( {"validator_index": validator_index} ) - megapool = await self.bot.db.megapool_validators.find_one( + megapool: ( + Mapping[str, Any] | None + ) = await self.bot.db.megapool_validators.find_one( {"validator_index": validator_index} ) - if not (minipool or megapool): + rp_pool = minipool or megapool + if not rp_pool: # not proposed by RP validator return None @@ -195,14 +211,14 @@ async def _get_proposal(self, beacon_block: dict) -> Event | None: fee_recipient = proposal_data["feeRecipient"] args = { - "node_operator": (minipool or megapool)["node_operator"], + "node_operator": rp_pool["node_operator"], "validator": await cl_explorer_url(validator_index), "slot": int(beacon_block["slot"]), "reward_amount": block_reward_eth, "timestamp": timestamp, } - if eth_utils.is_same_address( + if eth_utils.address.is_same_address( fee_recipient, await rp.get_address_by_name("rocketSmoothingPool") ): args["event_name"] = "mev_proposal_smoothie_event" diff --git a/rocketwatch/plugins/call/call.py b/rocketwatch/plugins/call/call.py index 6095bee0..7349fb3c 100644 --- a/rocketwatch/plugins/call/call.py +++ b/rocketwatch/plugins/call/call.py @@ -28,9 +28,9 @@ def __init__(self, cog, function, block, address, raw_output, abi_inputs): self.address = address self.raw_output = raw_output self.abi_inputs = abi_inputs - self.param_inputs = [] + self.param_inputs: list[TextInput] = [] for inp in abi_inputs: - text_input = TextInput( + text_input: TextInput = TextInput( label=f"{inp['name']} ({inp['type']})"[:45], required=True ) self.add_item(text_input) @@ -86,7 +86,7 @@ def _validate(value, abi_type): class Call(Cog): def __init__(self, bot: RocketWatch): self.bot = bot - self.function_names = [] + self.function_names: list[str] = [] @Cog.listener() async def on_ready(self): @@ -97,9 +97,11 @@ async def on_ready(self): try: c = await rp.get_contract_by_name(contract) for entry in c.abi: - if entry.get("type") == "function" and entry.get( - "stateMutability" - ) in ("view", "pure"): + if ( + entry.get("type") == "function" + and "name" in entry + and entry.get("stateMutability") in ("view", "pure") + ): func_id = f"{entry['name']}({','.join(inp['type'] for inp in entry.get('inputs', []))})" self.function_names.append(f"{contract}.{func_id}") except Exception: @@ -116,8 +118,7 @@ async def call( raw_output: bool = False, ): """Manually call a function on a protocol contract""" - if block.isnumeric(): - block = int(block) + block_id: int | str = int(block) if block.isnumeric() else block # Look up ABI inputs for the function abi_inputs = [] @@ -125,7 +126,7 @@ async def call( contract_name, func_id = function.rsplit(".", 1) contract = await rp.get_contract_by_name(contract_name) for entry in contract.abi: - if entry.get("type") == "function": + if entry.get("type") == "function" and "name" in entry: entry_id = f"{entry['name']}({','.join(inp['type'] for inp in entry.get('inputs', []))})" if entry_id == func_id: abi_inputs = entry.get("inputs", []) @@ -134,14 +135,14 @@ async def call( pass if abi_inputs: - modal = CallModal(self, function, block, address, raw_output, abi_inputs) + modal = CallModal(self, function, block_id, address, raw_output, abi_inputs) await interaction.response.send_modal(modal) else: await interaction.response.defer( ephemeral=is_hidden_role_controlled(interaction) ) await self._execute_call( - interaction, function, [], block, address, raw_output + interaction, function, [], block_id, address, raw_output ) async def _execute_call( diff --git a/rocketwatch/plugins/chicken_soup/chicken_soup.py b/rocketwatch/plugins/chicken_soup/chicken_soup.py index e4f801fd..8b70f18e 100644 --- a/rocketwatch/plugins/chicken_soup/chicken_soup.py +++ b/rocketwatch/plugins/chicken_soup/chicken_soup.py @@ -11,11 +11,12 @@ class ChickenSoup(commands.Cog): def __init__(self, bot: RocketWatch): self.bot = bot self.duration = timedelta(minutes=5) - self.dispense_end = {} + self.dispense_end: dict[int, datetime] = {} @command() async def chicken_soup(self, interaction: Interaction): - self.dispense_end[interaction.channel_id] = datetime.now() + self.duration + if interaction.channel_id is not None: + self.dispense_end[interaction.channel_id] = datetime.now() + self.duration await interaction.response.send_message( "https://tenor.com/view/muppets-muppet-show-swedish-chef-chicken-pot-gif-9362214582988742217" ) diff --git a/rocketwatch/plugins/collateral/collateral.py b/rocketwatch/plugins/collateral/collateral.py index dfabcdb8..3a13d77e 100644 --- a/rocketwatch/plugins/collateral/collateral.py +++ b/rocketwatch/plugins/collateral/collateral.py @@ -3,8 +3,8 @@ import operator from io import BytesIO -import inflect import matplotlib as mpl +import matplotlib.colors as mcolors import matplotlib.pyplot as plt import numpy as np from discord import File, Interaction @@ -22,8 +22,6 @@ log = logging.getLogger("rocketwatch.collateral") -p = inflect.engine() - def get_percentiles(percentiles, counts): for p in percentiles: @@ -35,7 +33,9 @@ async def collateral_distribution_raw(interaction: Interaction, distribution): e.title = "Collateral Distribution" description = "```\n" for collateral, nodes in distribution: - description += f"{collateral:>5}%: {nodes:>4} {p.plural('node', nodes)}\n" + description += ( + f"{collateral:>5}%: {nodes:>4} {'node' if nodes == 1 else 'nodes'}\n" + ) description += "```" e.description = description await interaction.followup.send(embed=e) @@ -106,7 +106,7 @@ async def get_average_collateral_percentage_per_node( np.argmin([abs(effective_bound / 30 - s) for s in possible_step_sizes]) ] - result = {} + result: dict[float, list[float]] = {} for rpl_stake, percentage in node_collaterals: percentage = step_size * (percentage * 10 // (step_size * 10)) if percentage not in result: @@ -180,7 +180,7 @@ def node_minipools(node): paths = ax.scatter(x, y, c=c, alpha=0.25, norm="log") polys = ax2.hexbin(x, y, gridsize=20, bins="log", xscale="log", cmap="viridis") # fill the background in with the default color. - ax2.set_facecolor(mpl.colors.to_rgba(mpl.colormaps["viridis"](0), 0.9)) + ax2.set_facecolor(mcolors.to_rgba(mpl.colormaps["viridis"](0), 0.9)) max_nodes = max(polys.get_array()) # log-scale the X-axis to account for thomas @@ -261,7 +261,7 @@ async def collateral_distribution( (collateral, len(nodes)) for collateral, nodes in sorted(data.items(), key=lambda x: x[0]) ] - counts = functools.reduce( + counts: list[float] = functools.reduce( operator.iadd, ([collateral] * num_nodes for collateral, num_nodes in distribution), [], @@ -278,9 +278,10 @@ async def collateral_distribution( fig, ax = plt.subplots() ax2 = ax.twinx() - bars = dict(distribution) - x_keys = [str(x) for x in bars] - rects = ax.bar(x_keys, bars.values(), color=str(e.color), align="edge") + x_keys = [str(x) for x, _ in distribution] + rects = ax.bar( + x_keys, [y for _, y in distribution], color=str(e.color), align="edge" + ) ax.bar_label(rects) ax.set_xticklabels(x_keys, rotation="vertical") diff --git a/rocketwatch/plugins/commissions/commissions.py b/rocketwatch/plugins/commissions/commissions.py index b62273d0..f38dbab4 100644 --- a/rocketwatch/plugins/commissions/commissions.py +++ b/rocketwatch/plugins/commissions/commissions.py @@ -37,7 +37,7 @@ async def commission_history(self, interaction: Interaction): ygrid = list(reversed(range(5, 21))) step_size = int(len(minipools) / len(ygrid) / 2) - data = [[0] * len(ygrid)] + data: list[list[int]] = [[0] * len(ygrid)] for pool in minipools: if sum(data[-1]) > step_size: # normalize data @@ -49,8 +49,10 @@ async def commission_history(self, interaction: Interaction): # normalize data # data[-1] = [x / max(data[-1]) for x in data[-1]] # heatmap distribution over time - data = np.array(data).T - ax = sns.heatmap(data, cmap="viridis", yticklabels=ygrid, xticklabels=False) + data_array = np.array(data).T + ax = sns.heatmap( + data_array, cmap="viridis", yticklabels=ygrid, xticklabels=False + ) ax.set_yticklabels(ax.get_yticklabels(), rotation=0, fontsize=8) # set y ticks ax.set_ylabel("Node Fee") diff --git a/rocketwatch/plugins/dao/dao.py b/rocketwatch/plugins/dao/dao.py index 09de2c87..89341330 100644 --- a/rocketwatch/plugins/dao/dao.py +++ b/rocketwatch/plugins/dao/dao.py @@ -182,7 +182,7 @@ class VoterPageView(PageView): def __init__(self, proposal: ProtocolDAO.Proposal): super().__init__(page_size=25) self.proposal = proposal - self._voter_list = None + self._voter_list: list[OnchainDAO.Vote] | None = None async def _ensure_voter_list(self): if self._voter_list is not None: @@ -227,6 +227,7 @@ def _title(self) -> str: async def _load_content(self, from_idx: int, to_idx: int) -> tuple[int, str]: await self._ensure_voter_list() + assert self._voter_list is not None headers = ["#", "Voter", "Choice", "Weight"] data = [] for i, voter in enumerate( diff --git a/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py b/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py index a5b77d38..5482f036 100644 --- a/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py +++ b/rocketwatch/plugins/db_upkeep_task/db_upkeep_task.py @@ -196,7 +196,7 @@ async def _batch_multicall_update( ] calls = [(e[1], e[2]) for e in expanded] results = await rp.multicall(calls) - updates = defaultdict(dict) + updates: dict[Any, dict[str, Any]] = defaultdict(dict) for e, value in zip(expanded, results, strict=False): addr, transform, field = e[0], e[3], e[4] if transform is not None and value is not None: @@ -224,7 +224,7 @@ async def add_untracked_node_operators(self): if latest_db >= latest_rp: log.debug("No new nodes") return - data = {} + data: dict[int, Any] = {} for index_batch in as_chunks( range(latest_db + 1, latest_rp + 1), self.batch_size ): diff --git a/rocketwatch/plugins/debug/debug.py b/rocketwatch/plugins/debug/debug.py index 3c4f1e66..a8595987 100644 --- a/rocketwatch/plugins/debug/debug.py +++ b/rocketwatch/plugins/debug/debug.py @@ -253,7 +253,9 @@ async def restore_missed_events(self, interaction: Interaction, tx_hash: str): events_plugin = cast(Events, self.bot.cogs["Events"]) filtered_events = [] - for event_log in (await w3.eth.get_transaction_receipt(HexStr(tx_hash))).logs: + for event_log in (await w3.eth.get_transaction_receipt(HexStr(tx_hash)))[ + "logs" + ]: if ("topics" in event_log) and ( event_log["topics"][0].hex() in events_plugin.topic_map ): diff --git a/rocketwatch/plugins/forum/forum.py b/rocketwatch/plugins/forum/forum.py index 8901877d..e29c966b 100644 --- a/rocketwatch/plugins/forum/forum.py +++ b/rocketwatch/plugins/forum/forum.py @@ -131,7 +131,7 @@ async def top_forum_posts( await interaction.response.defer(ephemeral=is_hidden(interaction)) if isinstance(period, Choice): - period: Forum.Period = cast(Forum.Period, period.value) + period = cast(Forum.Period, period.value) embed = Embed(title=f"Top Forum Posts ({period})") embed.description = "" diff --git a/rocketwatch/plugins/minipool_distribution/minipool_distribution.py b/rocketwatch/plugins/minipool_distribution/minipool_distribution.py index 2df4f839..d0fd4df5 100644 --- a/rocketwatch/plugins/minipool_distribution/minipool_distribution.py +++ b/rocketwatch/plugins/minipool_distribution/minipool_distribution.py @@ -1,8 +1,8 @@ import logging import re from io import BytesIO +from typing import Any -import inflect import matplotlib.pyplot as plt import numpy as np from discord import File, Interaction @@ -14,7 +14,6 @@ from utils.visibility import is_hidden log = logging.getLogger("rocketwatch.minipool_distribution") -p = inflect.engine() def get_percentiles(percentiles, counts): @@ -27,8 +26,9 @@ async def minipool_distribution_raw(interaction: Interaction, distribution): e.title = "Minipool Distribution" description = "```\n" for minipools, nodes in distribution: + minipool_str = f"{minipools} {'minipool' if minipools == 1 else 'minipools'}" description += ( - f"{p.no('minipool', minipools):>14}: {nodes:>4} {p.plural('node', nodes)}\n" + f"{minipool_str:>14}: {nodes:>4} {'node' if nodes == 1 else 'nodes'}\n" ) description += "```" e.description = description @@ -46,7 +46,7 @@ async def get_minipool_counts_per_node(self): # 1 node has 1 minipool # 1 node has 2 minipools # 3 nodes have 3 minipools - pipeline = [ + pipeline: list[dict[str, Any]] = [ { "$match": { "beacon.status": {"$not": re.compile(r"(?:withdraw|exit|init)")}, @@ -56,7 +56,9 @@ async def get_minipool_counts_per_node(self): {"$group": {"_id": "$node_operator", "count": {"$sum": 1}}}, {"$sort": {"count": 1}}, ] - return [x["count"] async for x in self.bot.db.minipools.aggregate(pipeline)] + return [ + x["count"] async for x in await self.bot.db.minipools.aggregate(pipeline) + ] @command() @describe(raw="Show the raw Distribution Data") @@ -82,10 +84,9 @@ async def minipool_distribution(self, interaction: Interaction, raw: bool = Fals fig, ax = plt.subplots(1, 1) # First chart is sorted bars showing total minipools provided by nodes with x minipools per node - bars = {x: x * y for x, y in distribution} # Remove the 0,0 value, since it doesn't provide any insight - x_keys = [str(x) for x in bars] - rects = ax.bar(x_keys, bars.values(), color=str(e.color)) + x_keys = [str(x) for x, _ in distribution] + rects = ax.bar(x_keys, [x * y for x, y in distribution], color=str(e.color)) ax.bar_label(rects, rotation=90, padding=3, fontsize=7) ax.set_ylabel("Total Minipools") # tilt the x axis labels @@ -104,12 +105,12 @@ async def minipool_distribution(self, interaction: Interaction, raw: bool = Fals e.set_image(url="attachment://graph.png") f = File(img, filename="graph.png") percentile_strings = [ - f"{x[0]}th percentile: {p.no('minipool', int(x[1]))} per node" + f"{x[0]}th percentile: {x[1]} minipools per node" for x in get_percentiles([50, 75, 90, 99], counts) if x[1] ] percentile_strings.append(f"Max: {distribution[-1][0]} minipools per node") - percentile_strings.append(f"Total: {p.no('minipool', sum(counts))}") + percentile_strings.append(f"Total: {sum(counts)} minipools") e.set_footer(text="\n".join(percentile_strings)) await interaction.followup.send(embed=e, files=[f]) img.close() @@ -173,7 +174,7 @@ def draw_threshold(threshold: float, color: str) -> None: x_pos = x[index] percentage = round(100 * threshold) x_ticks.append(x_pos) - ax.axvline(x=x_pos, linestyle="--", c=color, label=f"{percentage}%") + ax.axvline(x=float(x_pos), linestyle="--", c=color, label=f"{percentage}%") draw_threshold(1 / 3, "tab:green") draw_threshold(0.5, "tab:olive") diff --git a/rocketwatch/plugins/pinned_messages/pinned_messages.py b/rocketwatch/plugins/pinned_messages/pinned_messages.py index 6436e100..6a387941 100644 --- a/rocketwatch/plugins/pinned_messages/pinned_messages.py +++ b/rocketwatch/plugins/pinned_messages/pinned_messages.py @@ -58,7 +58,7 @@ async def run_loop(self): ) elif not message["disabled"]: # delete and resend message - channel = self.bot.get_channel(message["channel_id"]) + channel = await self.bot.get_or_fetch_channel(message["channel_id"]) if not isinstance(channel, Messageable): continue # check if we have message sent already and if its the latest message in the channel diff --git a/rocketwatch/plugins/proposals/proposals.py b/rocketwatch/plugins/proposals/proposals.py index 1140c8b0..febdefd3 100644 --- a/rocketwatch/plugins/proposals/proposals.py +++ b/rocketwatch/plugins/proposals/proposals.py @@ -254,11 +254,11 @@ async def create_latest_proposal_view(self): @timerun_async async def gather_attribute(self, attribute, remove_allnodes=False): # Build the match stage to filter out Allnodes if needed - match_stage = {} + match_stage: dict = {} if remove_allnodes: match_stage["$match"] = {"latest_proposal.type": {"$ne": "Allnodes"}} - pipeline = [ + pipeline: list[dict] = [ { "$project": { "attribute": f"$latest_proposal.{attribute}", @@ -300,6 +300,8 @@ async def gather_attribute(self, attribute, remove_allnodes=False): d[key] = entry return d + type Color = str | tuple[float, float, float, float] + @command() @describe(days="how many days to show history for") async def version_chart(self, interaction: Interaction, days: int = 90): @@ -323,7 +325,7 @@ async def version_chart(self, interaction: Interaction, days: int = 90): "version": {"$exists": 1}, "slot": { "$gt": date_to_beacon_block( - (datetime.now() - timedelta(days=days)).timestamp() + int((datetime.now() - timedelta(days=days)).timestamp()) ) }, } @@ -352,7 +354,7 @@ async def version_chart(self, interaction: Interaction, days: int = 90): data = {} versions = [] proposal_buffer = [] - tmp_data = {} + tmp_data: dict[str, float] = {} for proposal in proposals: proposal_buffer.append(proposal) if proposal["version"] not in versions: @@ -373,19 +375,19 @@ async def version_chart(self, interaction: Interaction, days: int = 90): # use plt.stackplot to stack the data x = list(data.keys()) - y = {v: [] for v in versions} + y: dict[str, list[float]] = {v: [] for v in versions} for _date, value_ in data.items(): for version in versions: y[version].append(value_.get(version, 0)) # generate enough distinct colors for all recent versions - cmap = plt.cm.tab20 + cmap = plt.colormaps["tab20"] recent_colors = [ cmap(i / max(len(recent_versions) - 1, 1)) for i in range(len(recent_versions)) ] # generate color mapping - colors = ["darkgray"] * len(versions) + colors: list[Proposals.Color] = ["darkgray"] * len(versions) for i, version in enumerate(versions): if version in recent_versions: colors[i] = recent_colors[recent_versions.index(version)] @@ -407,7 +409,7 @@ async def version_chart(self, interaction: Interaction, days: int = 90): handles, legend_labels = ax.get_legend_handles_labels() ax.legend(reversed(handles), reversed(legend_labels), loc="upper left") # add a thin line at current time from y=0 to y=1 with a width of 0.5 - plt.plot([max(x), max(x)], [0, 1], color="white", alpha=0.25) + plt.plot([max(x), max(x)], [0, 1], color="white", alpha=0.25) # type: ignore[arg-type] # calculate future point to make latest data more visible future_point = x[-1] + timedelta(days=window_length) last_y_values = [[yy[-1]] * 2 for yy in y.values()] diff --git a/rocketwatch/plugins/random/random.py b/rocketwatch/plugins/random/random.py index ba07cbf7..b14ce075 100644 --- a/rocketwatch/plugins/random/random.py +++ b/rocketwatch/plugins/random/random.py @@ -8,6 +8,9 @@ from discord import Interaction from discord.app_commands import Choice, command from discord.ext import commands +from eth_typing import HexStr +from web3.contract import AsyncContract +from web3.types import TxData from rocketwatch import RocketWatch from utils import solidity @@ -31,7 +34,7 @@ class Random(commands.Cog): def __init__(self, bot: RocketWatch): self.bot = bot - self.contract_names = [] + self.contract_names: list[str] = [] @commands.Cog.listener() async def on_ready(self): @@ -145,9 +148,12 @@ async def sea_creatures(self, interaction: Interaction, address: str | None = No await interaction.response.defer(ephemeral=is_hidden(interaction)) e = Embed() if address is not None: + address = address.strip() try: - if ".eth" in address: + if address.endswith(".eth"): address = await ens.resolve_name(address) + if address is None: + raise ValueError("unresolved ENS") address = w3.to_checksum_address(address) except (ValueError, TypeError): e.description = "Invalid address" @@ -267,13 +273,15 @@ async def smoothie(self, interaction: Interaction): if not data: await interaction.followup.send("no minipools found", ephemeral=True) return - data = {d["_id"]: d for d in data} + data_by_id = {d["_id"]: d for d in data} # node counts - total_node_count = data[True]["node_count"] + data[False]["node_count"] - smoothie_node_count = data[True]["node_count"] + total_node_count = ( + data_by_id[True]["node_count"] + data_by_id[False]["node_count"] + ) + smoothie_node_count = data_by_id[True]["node_count"] # minipool counts - total_minipool_count = data[True]["count"] + data[False]["count"] - smoothie_minipool_count = data[True]["count"] + total_minipool_count = data_by_id[True]["count"] + data_by_id[False]["count"] + smoothie_minipool_count = data_by_id[True]["count"] d = datetime.now().timestamp() - await rp.call( "rocketRewardsPool.getClaimIntervalTimeStart" ) @@ -288,7 +296,7 @@ async def smoothie(self, interaction: Interaction): ) lines = [ f"- `{d['count']:>4}` minipools - {await el_explorer_url(d['address'])}" - for d in data[True]["counts"][: min(smoothie_node_count, 5)] + for d in data_by_id[True]["counts"][: min(smoothie_node_count, 5)] ] e.description += "\n".join(lines) await interaction.followup.send(embed=e) @@ -301,7 +309,7 @@ async def odao_challenges(self, interaction: Interaction): # get challenges made events = list( c.events["ActionChallengeMade"].get_logs( - from_block=(await w3.eth.get_block("latest")).number + from_block=(await w3.eth.get_block("latest")).get("number", 0) - 7 * 24 * 60 * 60 // 12 ) ) @@ -325,7 +333,9 @@ async def odao_challenges(self, interaction: Interaction): ) for event in events: latest_block = await w3.eth.get_block("latest") - time_left = challenge_period - (latest_block.timestamp - event.args.time) + time_left = challenge_period - ( + latest_block.get("timestamp", 0) - event.args.time + ) time_left = pretty_time(time_left) challenged = await el_explorer_url(event.args.nodeChallengedAddress) challenger = await el_explorer_url(event.args.nodeChallengerAddress) @@ -416,12 +426,14 @@ async def decode_tnx( await interaction.response.defer( ephemeral=is_hidden_role_controlled(interaction) ) - tnx = await w3.eth.get_transaction(tnx_hash) + tnx: TxData = await w3.eth.get_transaction(HexStr(tnx_hash)) + contract: AsyncContract | None = None if contract_name: contract = await rp.get_contract_by_name(contract_name) - else: - contract = await rp.get_contract_by_address(tnx.to) - data = contract.decode_function_input(tnx.input) + elif "to" in tnx: + contract = await rp.get_contract_by_address(tnx["to"]) + assert contract is not None + data = contract.decode_function_input(tnx.get("input")) await interaction.followup.send(content=f"```Input:\n{data}```") # --------- AUTOCOMPLETE --------- # diff --git a/rocketwatch/plugins/rewards/rewards.py b/rocketwatch/plugins/rewards/rewards.py index 0ab1659a..f829ab54 100644 --- a/rocketwatch/plugins/rewards/rewards.py +++ b/rocketwatch/plugins/rewards/rewards.py @@ -1,5 +1,5 @@ import logging -from dataclasses import dataclass +from dataclasses import dataclass, replace from io import BytesIO import aiohttp @@ -100,7 +100,7 @@ async def upcoming_rewards( """ await interaction.response.defer(ephemeral=True) display_name, address = await resolve_ens(interaction, node_address) - if display_name is None: + if (display_name is None) or (address is None): return rewards = await self.get_estimated_rewards(interaction, address) @@ -115,8 +115,11 @@ async def upcoming_rewards( proj_factor = (rewards.end_time - reward_start_time) / ( rewards.data_time - reward_start_time ) - rewards.rpl_rewards *= proj_factor - rewards.eth_rewards *= proj_factor + rewards = replace( + rewards, + rpl_rewards=rewards.rpl_rewards * proj_factor, + eth_rewards=rewards.eth_rewards * proj_factor, + ) modifier = "Projected" if extrapolate else "Estimated Ongoing" title = f"{modifier} Rewards for {display_name}" @@ -145,7 +148,7 @@ async def simulate_rewards( """ await interaction.response.defer(ephemeral=True) display_name, address = await resolve_ens(interaction, node_address) - if display_name is None: + if (display_name is None) or (address is None): return rewards = await self.get_estimated_rewards(interaction, address) @@ -227,7 +230,7 @@ def draw_reward_curve( ) -> None: step_size = max(1, (x_max - x_min) // 1000) x = np.arange(x_min, x_max, step_size, dtype=int) - y = np.array([rewards_at(x, _borrowed_eth) for x in x]) + y = np.array([rewards_at(int(x), _borrowed_eth) for x in x]) ax.plot(x, y, color=_color, linestyle=_line_style, label=_label) def plot_point(_pt_color: str, _pt_label: str, _x: int) -> None: diff --git a/rocketwatch/plugins/rocksolid/rocksolid.py b/rocketwatch/plugins/rocksolid/rocksolid.py index 8d2ab067..9d0cfa17 100644 --- a/rocketwatch/plugins/rocksolid/rocksolid.py +++ b/rocketwatch/plugins/rocksolid/rocksolid.py @@ -6,6 +6,7 @@ from discord import File, Interaction from discord.app_commands import command from discord.ext.commands import Cog +from eth_typing import BlockNumber from matplotlib.dates import DateFormatter from pymongo import InsertOne @@ -35,7 +36,7 @@ async def _fetch_asset_updates(self) -> list[tuple[int, float]]: else: last_checked_block = self.deployment_block - b_from = last_checked_block + 1 + b_from = BlockNumber(last_checked_block + 1) b_to = await w3.eth.get_block_number() updates = [] @@ -47,8 +48,8 @@ async def _fetch_asset_updates(self) -> list[tuple[int, float]]: for event_log in await get_logs( vault_contract.events.TotalAssetsUpdated, b_from, b_to ): - ts = await block_to_ts(event_log.blockNumber) - assets = solidity.to_float(event_log.args.totalAssets) + ts = await block_to_ts(event_log["blockNumber"]) + assets = solidity.to_float(event_log["args"]["totalAssets"]) updates.append((ts, assets)) db_operations.append(InsertOne({"time": ts, "assets": assets})) @@ -125,10 +126,11 @@ async def get_apy(days: int) -> float | None: fig, ax = plt.subplots(figsize=(6, 2)) ax.grid() - ax.plot(x, y, color="#50b1f7") + # matplotlib stubs don't allow dates + ax.plot(x, y, color="#50b1f7") # type: ignore[arg-type] ax.xaxis.set_major_formatter(DateFormatter("%b %d")) ax.set_ylabel("AUM (rETH)") - ax.set_xlim((x[0], x[-1])) + ax.set_xlim((x[0], x[-1])) # type: ignore[arg-type] ax.set_ylim((y[0], y[-1] * 1.01)) img = BytesIO() diff --git a/rocketwatch/plugins/scam_warning/scam_warning.py b/rocketwatch/plugins/scam_warning/scam_warning.py index a5c7f5a4..b46153df 100644 --- a/rocketwatch/plugins/scam_warning/scam_warning.py +++ b/rocketwatch/plugins/scam_warning/scam_warning.py @@ -2,6 +2,7 @@ from datetime import datetime, timedelta from discord import errors +from discord.abc import Messageable from discord.ext import commands from rocketwatch import RocketWatch @@ -28,6 +29,9 @@ async def send_warning(self, user) -> None: resource_channel = await self.bot.get_or_fetch_channel( cfg.discord.channels["resources"] ) + assert isinstance(support_channel, Messageable) + assert isinstance(report_channel, Messageable) + assert isinstance(resource_channel, Messageable) embed = Embed() embed.title = "**Stay Safe on Rocket Pool Discord**" diff --git a/rocketwatch/plugins/user_distribute/user_distribute.py b/rocketwatch/plugins/user_distribute/user_distribute.py index 4df52cdc..a798579b 100644 --- a/rocketwatch/plugins/user_distribute/user_distribute.py +++ b/rocketwatch/plugins/user_distribute/user_distribute.py @@ -1,10 +1,9 @@ import logging import time -from io import StringIO from operator import itemgetter -import discord from discord import ButtonStyle, Interaction, ui +from discord.abc import Messageable from discord.app_commands import command from discord.ext import commands, tasks from pymongo import ASCENDING @@ -12,6 +11,7 @@ from rocketwatch import RocketWatch from utils.config import cfg from utils.embeds import Embed +from utils.file import TextFile from utils.rocketpool import rp from utils.shared_w3 import bacon, w3 from utils.visibility import is_hidden @@ -80,7 +80,7 @@ async def instructions(self, interaction: Interaction, _) -> None: await interaction.response.send_message( embed=embed, - file=discord.File(StringIO(input_data), filename="input_data.txt"), + file=TextFile(input_data, "input_data.txt"), ephemeral=True, ) @@ -100,6 +100,7 @@ async def task(self): return channel = await self.bot.get_or_fetch_channel(channel_id) + assert isinstance(channel, Messageable) _, _, distributable = await self._fetch_minipools() if not distributable: @@ -126,7 +127,8 @@ async def before_task(self): await self.bot.wait_until_ready() @task.error - async def on_task_error(self, err: Exception): + async def on_task_error(self, err: BaseException): + assert isinstance(err, Exception) await self.bot.report_error(err) async def _fetch_minipools(self) -> tuple[list[dict], list[dict], list[dict]]: diff --git a/rocketwatch/plugins/validator_states/validator_states.py b/rocketwatch/plugins/validator_states/validator_states.py index 7c26a527..49e84e5f 100644 --- a/rocketwatch/plugins/validator_states/validator_states.py +++ b/rocketwatch/plugins/validator_states/validator_states.py @@ -172,12 +172,14 @@ async def validator_states(self, interaction: Interaction): node_operators = [] for valis in (exiting_valis, withdrawn_valis): - valis_no = {} + valis_no: dict[str, int] = {} for v in valis: no = v["node_operator"] valis_no[no] = valis_no.get(no, 0) + 1 - valis_no = sorted(valis_no.items(), key=lambda x: x[1], reverse=True) - node_operators.append(valis_no) + valis_no_sorted = sorted( + valis_no.items(), key=lambda x: x[1], reverse=True + ) + node_operators.append(valis_no_sorted) exiting_node_operators, withdrawn_node_operators = node_operators max_total_list_length = 16 diff --git a/rocketwatch/plugins/wall/wall.py b/rocketwatch/plugins/wall/wall.py index 77d0e6f7..dbe30dcb 100644 --- a/rocketwatch/plugins/wall/wall.py +++ b/rocketwatch/plugins/wall/wall.py @@ -13,6 +13,7 @@ from matplotlib import figure, ticker from matplotlib import font_manager as fm from matplotlib import pyplot as plt +from matplotlib.patches import Rectangle from rocketwatch import RocketWatch from utils.embeds import Embed @@ -101,8 +102,8 @@ async def _get_dex(self) -> set[DEX]: return self.dex @staticmethod - def _get_market_depth_and_liquidity( - markets: dict[Market | DEX.LiquidityPool, Liquidity], + def _get_market_depth_and_liquidity[K]( + markets: dict[K, Liquidity], x: np.ndarray, rpl_usd: float, ) -> tuple[np.ndarray, float]: @@ -131,14 +132,14 @@ async def _get_cex_data( await asyncio.gather(*requests, return_exceptions=True), strict=False, ): - if not isinstance(result, Exception): - cex, markets = result + cex, maybe_markets = result + if not isinstance(maybe_markets, BaseException): + markets: dict[Market, Liquidity] = maybe_markets depth[cex], liquidity[cex] = self._get_market_depth_and_liquidity( markets, x, rpl_usd ) - else: - log.error(f"Failed to get liquidity data for {cex}") - await self.bot.report_error(result) + elif isinstance(maybe_markets, Exception): + await self.bot.report_error(maybe_markets) return OrderedDict( sorted(depth.items(), key=lambda e: liquidity[e[0]], reverse=True) @@ -161,8 +162,8 @@ async def _get_dex_data( ) @staticmethod - def _label_exchange_data( - data: OrderedDict[Exchange, np.ndarray], max_unique: int, color_other: str + def _label_exchange_data[E: Exchange]( + data: OrderedDict[E, np.ndarray], max_unique: int, color_other: str ) -> list[tuple[np.ndarray, str, str]]: ret = [] for exchange, depth in list(data.items())[:max_unique]: @@ -214,7 +215,7 @@ def add_data( y.append(y_values) labels.append(f"{label:\u00a0<{max_label_length}}") colors.append(color) - handles.append(plt.Rectangle((0, 0), 1, 1, color=color)) + handles.append(Rectangle((0, 0), 1, 1, color=color)) nonlocal y_offset legend = ax.legend( @@ -341,7 +342,8 @@ async def on_fail() -> None: x = np.arange(min_price, max_price + step_size, step_size) source_desc = [] - cex_data, dex_data = {}, {} + cex_data: OrderedDict[CEX, np.ndarray] = OrderedDict() + dex_data: OrderedDict[DEX, np.ndarray] = OrderedDict() try: if sources != "CEX": diff --git a/rocketwatch/utils/event_logs.py b/rocketwatch/utils/event_logs.py index 57f4471b..ff9ba3c4 100644 --- a/rocketwatch/utils/event_logs.py +++ b/rocketwatch/utils/event_logs.py @@ -4,7 +4,7 @@ from eth_typing import BlockNumber from web3.contract.async_contract import AsyncContractEvent -from web3.types import LogReceipt +from web3.types import EventData log = logging.getLogger("rocketwatch.event_logs") @@ -14,7 +14,7 @@ async def get_logs( from_block: BlockNumber, to_block: BlockNumber, arg_filters: dict[str, Any] | None = None, -) -> list[LogReceipt]: +) -> list[EventData]: log.debug(f"Fetching event logs in [{from_block}, {to_block}]") chunk_size = 50_000 From dddd9e63dbab1c69f3d9369ef27a5374b52a772e Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 20 Mar 2026 17:34:33 +0000 Subject: [PATCH 244/279] remove unused images --- .github/readme-images/ens.png | Bin 15914 -> 0 bytes .github/readme-images/minipools.png | Bin 52587 -> 0 bytes .github/readme-images/odao_members.png | Bin 31952 -> 0 bytes .github/readme-images/pool.png | Bin 31041 -> 0 bytes .github/readme-images/proposals.png | Bin 76344 -> 0 bytes 5 files changed, 0 insertions(+), 0 deletions(-) delete mode 100644 .github/readme-images/ens.png delete mode 100644 .github/readme-images/minipools.png delete mode 100644 .github/readme-images/odao_members.png delete mode 100644 .github/readme-images/pool.png delete mode 100644 .github/readme-images/proposals.png diff --git a/.github/readme-images/ens.png b/.github/readme-images/ens.png deleted file mode 100644 index b32d5344b02224bf33f2dd9d0bc8f3b54c94d7ce..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 15914 zcmbumQ*{K9NXigY-#DO3X8g+O$%mm{(vYjw~$9Gm7^E$~W6+ zSC`r%GVoX6C0Id-JZ#QEyvNDGP4l23o9 zq`1~{WNWKRHl3LQ*-*Xvy9)BR9xzTlbw<682fR08|3_hn;dqP9ts$iaQYT2C(q8!_ z#kxDfAjzYY(aZ~f?R)a_=Scpp_QCYhp`_w`LR$qF zZx3(K)dZ-K+Y81HwzuTvn9J8ikI ziFd?V+=Ka}U_oiHo*Plx(vL4ck|?shcXAcEYsF3wZLZOKd&k!=O#Fv}ou-5j2qdKT zpKefDJnmuM7o>AjZj{`NIYHgu_+I+T<=%5J5cx&mEo@lAgVRF9S<<_gV{ZEh!bl90 z?3cRrAu_sE`cH8JRohq*t&@5SL56pikm!9>h}@0^tE@M)wc29?6|cbUGvCp=9-&D& ziMq1K!S01)zSGT3BWiA+ARSWSEaL28x7#+Oeu6TjyMv2zCMfd?qI*Zgvf9f;mRSE5 zjT>*lA>SY>ckYCY?IRsaryVEygKdb=Z-?sEQ>epFBSYGyi!bQ^MYd0nF0@t$J@QDY zd22;_pXdkq9)kH5`GZoG(gr;$#QCzI&@d{0lA$o9SXke1Su=ZUs>i#Z<3oI92Vp`r zB$N(z?!66J=IP{0(_&0-a9zNY5P1XxH_>G5FXbjzbz4-HVhz5Pp3&6Wr{A zyaT&Y?XDA(+DGpT2~NuMwKLN}=X`_@uC}!N+cVht*1+clv9~1&f?;nZHzzm_rRWVl z*YiOCmY{4v9m@A-plMY)>Bh!n2a!!VC-C1&rR2uU65FJ!oMyL&4#rDT@_WMoF z6rJ3M&(u>#pK{W_JZB>ZZDsx+k7@;MIlF}X2pBDXtYiai=NaLgx-g@0&7i^32J9*g z4cLw_SKM`}6c$%$^vU}1Bz#=(8ZLdm|B?8G1p^zSqe+=*y@jTD9Z0roRI|W-(p2Rc zFSXcid&K!zgU^**koR}S5NiF5vFcpqTSp761O%4{jJ zZh4cWV+c?Jap&_=N?aV};0V2iuAR!8Q-U0=GA<6as;>4EBnP+7&#yJUfXM>&G8PBA zUa65DU-{~z!bK*Z#<$lcC5Ht_CG_^29uDx#w4))gkH-P8@Aq3+pPV9xJZcGM;ub>s zR3m(LbRZ$KMurte3OhlA`m!oGS%Tn!F7^3yVy7{~`w^B$4PxY`=q!BP+CcYwLNM2t zdyBsBx3L$)Y;l9v1GY3T3lRu#kw<0r|G|qZmmu#Si?-elMF9wtPj;>&ka^HidMfn6 z8FTQkMIM3j{6UfvCuc3US`z?dz~d*qJR*_v4=e#2ri{L~*oK(UIl6x૊zijwv zPYhi9c)gQbV%5+@>Ud*tc1j(e-6*%bwByp7?|ysm_%B zSnTbDgR4E`T4q%13nK9K|9%7ZSQJ}fv{s+e{YYV9icqM>y_!;vvDOfNjaj@0%rK4h zs>Qx3D{|;tFPNh4{pPPviR9uXrY`Z=7{MPXA0hAmlr%wPS=yQO$U*u)^f_HHiE zlgfiH5*#S9sK>C=-B4k^Jbt58nkFCGnVZ82bVgA*~p zxa{lOwTpHLH{d!2{(KRfJ^ddqx^hZE`l%RNM5)7t8t~(s0=A+B6EoU*@IsUKYBk^{H|uV)4)Ngf*buY%zH!N8&X2CxXnC=r=5mTXree(8BJu zWtZ~9Ix5@e<3@B`13So4#BYSKnUF>(-+Rmnx`2dwE2F{vgCj+(b|VN}3Zb+w=f7aE zM*UpVUYTk4@0azPj+D9GYkmWTyhd!2AE6!zt=4p^F;!7&5wwuEDCf_Uwv#F)d#X~u5h#Y&Rg;l9S(^H(;rCi+|jn9dB zJ1H@ncVm%7Ou20EzS3Ox7f*p!*oe~bSTl$c+2;4b*cy7%hc8B${iqC6efd{c4W%`7 zb#yv>m^!JbD=7P3ejXIetgmiHGnDSw*uyY=gKx$2&zVpGS*@ziy)>L7U05I*1X2KyxV(YD;m5qqM); zjS-V_9~y)m*&vV4keR?yExwDQjb7lZh(5ut6Aow!Q$zY7U~nP|Ex51n=hcS-*-+Bd?^wBS*r=V)Z$U&%G*~w zqxU^bNPj&fa}d1UFp*caRd-ed;$jl&cw1Inmh!n70fhEAh%71G#@9hyH- z--$R@KqHcTsGB?`ssXsuU^AsYf{((s5Mzv7bDB<$$D1kYJR*_RwdOA^hcEKsi6*fp zP2NKr6?f%GNwA5+KAOx=uLOb`2hMGnC(OLiFx}K@SYxq)?{rQElv4V3?k}T4C-_Nv zgDLiwS9vWYXEB+Qn_W*u6|nPWVVIjlh~T@C3fEIH@xl}*R+AhK->p@IhIYFE`;v`v z1j{^7!uC;QD8wo|)!m$L_`w9E_JWG4@Y2Gh`@QCVr$10PiVwQ|kVXyf43 z^?1J%e^E8JSBc?`71(C4VY_9YF%eCSD}CVHOhz$2K6>_y9LLSM4OSDjANB-Rrfo9^ zOvQMn+hir;KyAvYPrcGD08#wdmLwKk9BcI?A6n=ZvhpuNwRJhcUt$o%6jb`hPzN8M~eZ^UPO0$HoM2(2@whF&I+Bjwr#SABB$ znL}w`8637WJc@JnX1B==rp+p-g^B^H`i{GI9Mw)f z^152tzKaN3zMpEuoxULf7ZgP&n7CCHKPyCr7{e=5`FVEJSz+L$x@t4N|a29xx4~jZt0l^(!NaI;TI#xnWBF{RQZ=RJBCtp&8)qHN>XBzYO zXWW^tigNBA0$d--Zx3h(xNgGm@_ng`K3i}GjxGb_P&CSqPmnV%a|~RrZG*W!jl*w~ z8;w*k1BGdJ>I+`DZ*FM?8|bkU+=dcl-DH=`2^0GZ^@8u}vi?tat@gNoXmdGWa(?S< z!OPcfY13h?E+XMEnSM=MX2-F1gnNadU~@Ia_5nKRQwSDl-P;Z{nZ_OyvA@rSUCzFM zT-cI7^C!f?2iUd;#1^xN&H6|}+E0WN>{^J*GYp*OTJbAU!Dm_a_!ppMs z2`aD*A=Q9Na!pyWGT|hqY4JS@F1w+C*(e+zpg5+L!Xlo!uBt^g~Gs4hU5Y zo!oe5qYu^Gdkq&PIBc5sTW9u8U_#b;gE0szHTP_;Wq9UMK#bgSWgkP3D-no!mM@M> zJt~AITs@vDGil((8vJXmrf6mnPf%?w@i)!iRV|z2J}RS-YcxjCGgWOO1|$sEos0 z7T+$hB1vTjxHF6FHOHD5&UwizW@;-hE{l2qhZ1aUE&HYv({%0c6kM$wMoOkTVjmuOALp^5^hbA8^F^}>!8g5P2`N+;eh!j zOK8C}G?~$p){P}gSWk<-1IKEeDRVgR{AOr`#M=semRrjy9ic9e*_bOAy3}L~w<$6H zvl`4QN0k!A;N@~-V3-3gTE(UO{)bg)GzlySuH2X-$5}zS)HKeABk*ye9Tf!CQbCs$ zyR$f%$8hLzz*i6GtA}>DH}RE%B>7QE0{e^VIiWO|v7iyfC~Nm$ParmLGVl<0U{9Jb z9m0VLsEsz{!zaw5j142iGtQwJCpi}a(FalOPW$)nNEe#GM{@a-sspPujK0tr3N7SP zwsHZuoL-{@o4N=%#Jn~nmYLlCcr`KW1ECu~(6&q!f2QQ)b2YIJQ~PrYmj179d9#Et z4V(dy1A5oN3juDr!2gq2=3Qb7+xekOQjPXv4;;%#6zj&QxZnx6FiC8p1ZtH3 z!~7>Gy7Q$izT+A&xLSQU+O)}tA&YfSA?$5|UTLTpJ@6aBD8q<$f8?iES)A&Wec{F_ z{#&Jye4505PP6d0TAxnS$H&bdR%{s>S6#bR{H~++qic6;(Kb$C55@0K*M2@WHdK~% zLq)NsD?wefqML~$cgN=@I6|vD^oB`IaNSJ!^Z-GqvrAC6iSs2ZFXg5KZMt=}e?y1L z>uzmxysI#>q&%0I;OU3TlZ#b6#!v2QE5WzuPr_9(6jG&7q|Um3FaQ44jSXkbzD4*g zumN@%3HwiF&&21C#bbH1IZkqt{ULpT=2)BQ{*8nE>JAu&#{5fmnccLXcdWb$#1P`| zhuc|I%6rN>O_E69pn03 zt0THkQslz}2ml(%O`pOi1z8^eXkLu|VA>80rVTxgh0bL5n6 zN_gf=(-bwhdS0O}@mV}Fl?yC<)wY0OsPI-q7iwoME2iFW%Y6% zpOCk%bZHYu1ZCHRwg!!m7k~MWE0}iYiHM%h_uUYNSVaoi!D=$?=cF^lg8SF|N$4RH=^mx#E*dh!QV@=16Feqdt0RFbA?#ZpMv(1w zd$yc?RN92^TDXCD(98&>+Jh}H0?bhK>v@Xj-%Q|dhc@R&A#j%q*^xcRSHj=y7y}{h zM5n9mKUimOLK&EPCT$RNrhu9+dKI$-HQuh!2(aVo$EQHZGqaYEC@c;a7>ZAIS9;$K z{D5W+iM=sBZ;$WEKNhHGAAw106LwpY1&7N|Jx*FF!AQ{J%G|YmjdrJe+i$=5+~--B@vjI1?{|jVOyw%Ax7%km$2$e!9r-TpJo+p4%32zN zJH|Y_fgSdS3oh$F+YUVFY|(j!WOzc{7+C7X&q<3)R0x-~!}a=4s#+Q7LB1&Lz4oBp zZy|`ulmbhZ%!ZWV-4+)Y_U6Z?Y2TJ4ft$*-I+bqQe-2jQk&idBzo-&t$2TEySzwPk z^Fdn^@_3#oKDhw1-=EdkvJ7iYEmN3;0a~-U8d9rWC?<_7rZfhh6f~pVe>C-Dx;U%O zcMQJY`yO|aBngm>h9rbx*_NXR!&ynj)KML-ACR1|Z_!R}{JP~3^~;w zDTq=2ectfXn*$C{59Dn|Ax;$@EqM*Uw?$Q}9&oK63_4k}lBKa*QGe7|iPSFOmI=cT z#2DcKEd2y3b}Fbk-FU#-ktp}DI>?v4^zo!lf}d<#0TH?WKcxEd;zoI0j?e3myEOvs z)faVYjp#jn<*%rZdc=p*BNc6egh$LXl!`Pd1fl3`_X8y1GqGc(nrJ3Lqx$kr@GxVZ zglY;-B-x%e-DRH6((%DSQjbs< zEDV**J7PCe#I*xYUfu&$AuP}OF?3tvczrTjXwBbMrT!`Qeu&Q_ zJvG$YCJ@X~c@<>Ozbh*!KUh~xw%6(Rm%yR}ooEnUi7iJirUCcoBz6q&xeVE>1Wdy) z?Q5$O2&2S@1(gLq&O60~)PQb?t1q2V2C+))hQ`pN9BJl=w@3lFQMMS;1X(=*N zX82pmZ=N{!W1fP^B>{6y-HA0za9=81B9$-Z0n?~Rx>m`cK38DfAPBu&`?w*Z6Z}lC zu=4CmDf|#~Z>2L<&M<L72Zg`&aFEsk25R4_b>B=_YJL8fHk@}Hn zEhGavFcMeeo^+|d-p5-q9(*kQ?^+eCzQ~iF--ubIu?Nn-zIUzS?B~dic|5eQ`_uld zoX!mdK7X1bDtPrzR+7jCDi`o#2d>-jRelnWMhVUJ;~A#4K6t6$5O86k{5FQUSu2qBxlmoZLXhzc8YwvQigcKqjr@Y=Qx|D-L(cv=!4j zQEIEfWeBp$@$V|zI(tL;?%MJ!OzAN0(4$p;`+`l)G0A+Ia&t*3&{n|KZ}vV3@P&a5 z;8crKr1c&L`0H9R1XkA?$hIJio*fiMekoKVO7%Er{tW^W+NAM55SQLU>Nu=nJ%y(A z?n6ZlsvM{`L9y_ZFXO=f{3Fw6|3&vqh|al=!V-3H_GQB&xAK?T=KPuA(2?4p=IJg|_ea zJ*nJLxQWkf@QL9eSCw2`p^V_&d5<6khZ21QCNpYgqo$7L7{R=dKXb@C}!k3vs> z9!kc)fP=a^{dH*n9T^o^U{U#YbSkr=!qfFpwpRxEI}H2dM45QI&C2g zqc@y+_=u^lYG;AIk=1$iZNl)irbM?*A$~&JyV!UbTBUWu;t90j*B=&Gf1ntdMi}U< z#3InoC$}ArGfa@#f0qig=g!sFme;Kw|_{D%y zU4MCXBwP08r2q79Hi3aX(-k|2w=h@Z@2bVq>DA`x@=kS=n6Lc^a=#pDUxRkpZ}@)S zd&~jP$RUmCo3e@ucw{7u!4i79j%Zx4b!FOZ4<(@>DVEa%2t&yu2uf~`C8FU~Y-gAo z9~6B`u_gNOk58OU?3wu$A$85u_aSF+)oFe)OkO-8kQnnA1+(Rlb>PoYmH1S(4{TT;cBehda3qqS-V@sI&S0hi>h7az9r?c z!3{=v?fAhM)Xg*_vK&t^yUj+hO_tmK)2rv;I1}Z6)$m&@B3?*>|5N`yogZ`q0>U}^ zj|=cW)xvKF<6KJB_5g0?qwfkPRYU_`8)-AV1t7Bq|*}L`blc}xVZ7<`* z4Vo0>6_uqv>pmG@jx^2{=8!`@52KfN*dBhKy}M0@xKE9Zu%+53_KmG{u9*L9PtDz& zNS?{?zUFs6|7m~hdxf7YhhV|Xp?cRs+PF9g>#j(5EZMo5QKwInLfp^%nRkIaT1kDk zmm=`*kbil`NYV`hg z;o8_UX#U+Qh&pA@X9mvU{4McN9M5n0_=~6w*D2G4fs3lQe8_3B2aKKe!Vocqb)xNU z_?^8bEv$$=NmHP@er+|Bv=unA?@-1o^do9ub(>zs5JP>&e{lWqTa#;A=*j^qwb&L@ zc+D9@RTnGNVXAB1R#us0v6p3Z$bOB!h{X0=^N%MTD?SH(bV_Cu3mfLBj>T z>lP-#21CT~nmV{KZ=DYT=;oE3yf1*j@i_IAG@E@l23v_uW%nR^=aqY4ax$$Ha&k`=qG(gWQ>L8B3BehZ%sDl5H?w=c zNCBoLh%lUen7UbEA_N+~WiER+#CaA(^%ycXY+v!iF~knOFN^NmfSF0?CVbd z)+klixrLK0vFt3HoBx&8xA{z&HBgN-g5vLetx#lGl1UZOw;e z1-+i1ydi)21U?iKYW8tMecg2r9}IU#2LN( zENLGb;wb}%_Jfsd5Gt_WblrWQx%qQ&c|BxoWPlVprOPiqIm;?W6R%mYcf#=Ou*0uM zXtwkGYoKdqb2#-owNh2y;M=@XK3Aocf=L^xdPB`NS2P$y-nSA~ak=D|R1wkNFJ9bx zbWQeRS0eTBA1cc7`d_!O&Jp7telqJ-`zE!(%vQ&dM}6Uobl?e!daq{pO#Bu8`@d`L zCYHb>6Zt^Y#S0PaOqF$OLg02~yQwooyp>IgE}R@t$?V@uL1w$Vcv@5UZ$CMG+!>N| zJvZdNF34#+U6yy#tRF9+RMM1=saz9cx%kh}tJDCRtYWg%8LO2X><_=Gv4;n0KfS!K2 z+AqwYlnSB5^Vw#!l&%pRZGG|7v2(|Wnl}3R(3}4OYZ<6pE9U2`#!H~F#SEFDdGlX_ zyHt^;(k1<(E>LrxO{Q7L(;C9moSEn=0|L`D0W>Ug^ z$+3*+Q6OK*^66t0irrqq$vy^}5ve(B?eI9?EVmoL@rEnO4IpOXzm@E?&HG)^~G5U)XvnDHiNS^&Bf>1M=4&f zEN1kweW5;WqZ>supSq-BhT^6z8b&SpJfnlP`t}xK+xi#yIEz$t%iqN9MnBR@E9iqS zqD@y%3V74j`v*O{kE$YC?siM>a%IE`xT~y;dwzY9X{=%GQY-6O(`NALMKf{t83Dn3 zF;I@-TE{aIMojjAHM-8_7U>z#zk^XexNKa<^KQ|A&@N&V`SxshYYT`VRLLHT;{5DI&jRQ>bd(|8hB7*~D*V)`tEXalAFRp*irA2; zxt-4F;6g4Bm~jx5ZsbmDlI~t(t!mc{6IDOloXt;wW=7Jac@bT~Sb4Qk6JAu)7M0PO zcpWZ^CaYOpiu|n&HQYB-#M>3--0i!F8Yn_NdUhBQQdf};L0*Yrr}cU^RgU#>D?S- zpVfyr;D~VN*<@}1Cjo6pYu{*vaEE(5Ex|DCtqJmc+5`Z!ZV+>=}1DygL zDdbD++w6T}Laq?=eD+?u+?!r*n#h(XGIX{K={?Px;;MY)Q8hFS$o#eO%@78|%GVzP9?eC_XFJ|=eItZGB-2>yh2~#*0lrH7b}sJ9+2QJU*h7ZU;*&ds zHjy7*J*`Fb^wO#7#U3hV@4hx2QDkRL;l%l3KVG*b3zu~2@`~)a>Da6bOSx+3-YL#E zgr)%+gVJ{;y1P%k#N`clAoB$x4c|{V)Io-T$wI^Ta=Hv@ZqfySSRB$FeaSFY#4Dso zGF9M%J0~f*&D*kjBymxL61`v_{Mq|SsQT?4ArFfox6;~1{+o8waz5-$3|&7Fq3O?bv!0!f3%E{3pEIF7;6&D` zcDexniC)GA;aFpqn%ZVdGyo3lUhpuKx7dea^M*acGMm(}xF_29Q}mE+wBP)J@SHw7 zGmwX3_F_(s>Y`gxj;@cPQMdT)bFHqUoQ%qiDmZoDe$zFxYddDNRi;pufthz-LiE-N z)2lrxr2b0_>lMw_=ICB?fUsz77-2GZOMF3vDasl8C1bpfx1-^Fd*9Lc$rz^-Z%p2> zdp^wTf;!7Z7$miB+T~Dmb!F6NGuC@Jw~K@mp~iT!tq=przvL906_95;AI6#bvcO!Q z(2A3AJU8h>fuYo5kf^ELlYJO^ko3rcvEZV>SLs(!5+g1lZF zQvaKQ8^-ZJ6LbH6v3CEra^FqRP$%&91JDnNxc;5Ydg|tqK1*Fm*YuxM_vxpi9A{G7vqQmQVXZdl{o19?;mxOiWo%r+2-Xcl zixBSJiKB&&(gpu;@AY_X>HMhoX%Xs9tKgYsdjHvp;M4}Q6Yz$vYD1^ui1?kiC8<)# zWATd=-E76HsjRVCZdPqYRZ~QZGqIVx)V)tvNRuh#SG1ydH!yj}{drq_7usgzw9Jzh zyKl7Tl?T)PQI4DI{h=)h-#A2;Y^Q&dw(W2XmXP>2x|wnML~N=fd4OcBD!M9Brr5*Q z8Z;TI!{zmM;o*dlVlejDmM3?5_eBTe^T3My=j$&%v@~oR+{vh3*FCo8ggi;Z0Lt^V zKUC)hVznzvOlLJ*@{JzVpgR<~jJ0CC3?sG4Ts7Ft$>!RTfEjb=E5mZ|qgEyHHrJQ2 z8(OvZoPte^))W`iB5gtznqS(LdM4gIDHuL-Cg?g+R8rKr<1rj5;uo4J8@Omv6s*W& zg`NCsm~6!#b2b*_Bl+>znUbW%%>_!_Te)j-+>Y2IZI?y)P70BQ9Q) zJVPU!DWtKFbZiY!e$khBFBC{Z!*dii9HsFEj~6n0Cxl;}zI3IIeLsw|ijMHC9#HY* zww8oAN7`&|5wU6fNFyt67x2fyQ1i6WGVWMUvVDbj14%%H#-w&S`?DDwi#Pt2Zw(TKBxG-=c2sD z6^||j!E~Rjj#95 z0oRB%z6(Rg8LXx$=J$UCLa=#FPI1?ooy2M+1c+4oX57m_e% z?Vf|Dy(Bq~D!J#R7|M=s3^|xv1TJq*D8}nt_%{b1E(EI7v{&Wpi_DqLaA$z-(%BkV z8E^H@IGyGKiG<}uZ2Jzce6wdq7p>iiEI}GCwC&Ti-H+ptHHAl3Pa4a_nV!-fjS7Wa zgrlXPGc8&`&kPd^PkA|AEF^`tIlC}cWaA7p+}1qN`7ggscyXDM8}p*w0F!7c^uYxk zryfr@H8wK`1>C5t=Jr0ju}R@^~x+yJcZ z0hYKdF{|s49iB`jYA(WFMf*LWz^X9F(!9(W|8GZuuuO)A>EupkZ}Ysqc?p4Y?jPyM z#po#Uijpm!#p9_~0vx@S`B?OCx!@_8LT_j^_!J;`x2B}TkH%qX58pKM6f&B7>ho02 z{SE9Qp%jMgBhEVtJRucOMT(CQhLH#Dk;AEz@mfAM~z9z z+-PYXY*WU;KhyKUKFn=d;u%xIyq;(5Qlz$rj}x{J3Um-o?Q3S#*ry94iD-r?1Dyyb zofJ%Q?zzFcS)Mp>F672``2}|kF|csf(k-%*xjx&Mm=6ppwNb-tJkg04OXQ)l#yynr z3^!uw6z*c%Sv;6*-bk3yG2m05w+)_wpEV}qoh#{Uw*7Akk#jQPdLfpqcjokZ0njD1 zBOH&vxD`X_dA71-99OL|{f@`qgrJGYU`-!JBwMV*iJt@DmHWDQTYytWe%7xo=?q4g z&0q300+`#@6@|ELh#3FZ-U@--+z}j0Q=Lollc%*cza&{9A#(>Iu|OZ-t~#!y+d;ht zXGIHjXa+7qia(smIjw9BXK7Tmvhi`<6bZjpC{TopK-<~#CSs{}HgL|PEnNr`6M*(5 za5>}eCUyo6NFM` z#oYpYm|3rBGDpY!S{UCNmNl0-Ms3p<(er6xz5r|9dq@7#Dbc`R0(50*=p@D;ryiqX zKH^o(X}xrIWbVrBe+-Rn`apO+*AjvTb?@9Pv0W$}SCuQEB}IL@(s=K!7BO$V`fvbg z=j*DJ-lj1jwg2%hPalV+ha2LNB4>~v`%1g6s|2cn+Z8o>2%2$6E^N*&p}BO{Etxm2 zJ!FlA%D6IugrRlRCjD#_x*F+ctgxVGvd2FVNjfSyc6p+CdpzBEw$pQFBxv zbryA5iV9g!MajA3iN9jr{YcdEgTR?mw7(VKnw9eoT#G}=7DKQ<0l!8*4n_~*q-#; z9w%~TLVj7yf-crL%L|`-n+tqRgRJBTB3~)LHx3(JQJj}-giL+L!il2z49!KiTNg{_ zN;X}+Il%P@mtLs*kocQxd^K}gAiGdN?$;*wUvXb53=>tCO{Z-u7$hOQ8nUOp2F49> z7AuV;an?2B$&a`vvL3^sRt(gwiQUZfi8b$m-O{YWg*0ZStUIh3Lh~Cjc`8#(jng)F z0IV(@B*}**ju*~!WpRMoGS+4fZFN8D2)WnIaJ^40z%1bq*VI9ceWABKsHO)M5U-XH z{Y2(Hg>!V&c*)q_9NdWA#G%E}9Nc~&4Tf||kC=OW_F;L>Tr!h2{K|lr;#s#`L@NW< zB&X?Yu8~QbW>REyxUs#-agW9@JdoVnsrrk1Z;`U3R&AsU!&9O4YHIZOcmq)^g~|Lw zu!lX4e|~0db`&Evu=4e|A*W!%Gp&a^gLKJk6ptzbn3f&Y*zqU`RZ0qKUWND712B8K zZXc7qkiRSiarNi^$Iu++l3OkF5cCe%n0jxwDIHX*9Nm*S+ zlSMumk33M<*E8kX!f-7xQ;OM|F`iz7{Lb5m7q8gN_Q?TuTP<;Puw{YzP}#F(I$|+? zX=uitemQ(XSZVOwQvN7j+A!iIX1p-;+dQbs#TielE+_YdBMob`;;JqTcC~$TtDMc+ zkQzg6>scKvAd(kHyh*7IA4(uaTUf366|Jtf_B2RDYIlT57rmRKH47^Y+IgJ`l>+CR@T!LV<=9S#f2r*+5V zIcC87Mu4U5fQm;Gei(+TwG*Z1GDGJc*dwp$ndwP&!??MzOo8cdZe-cs` zv7Mzq`8{MB+vdA+Hb8;e2DTtmPHHX&U{0nKS7QpjBMQA$&-k^admT1U7gX<$L%@R- zy$FR+C@;CuexPCfKA+b~)u(8J_+sKRe1sGo19Ab*oQN6IY9E(WQzPg8SLB@Sg8m9c zb@|%n_`=Q(iW%27R54@fsYvp-vd(GPlpGcYlP6jy<0O@*Gv+~9qd3k1XC4i#_Ji5WUC-Se<4=I8-n zXB7i4pzDe0Fu2JV57k*uzKhrVqni*~&oG4BJvbXqOKkg5qk>zZw(U^+2r6g%OhrI{;Hf_hdWJ*FP`G8k1|2qy8Wz;LOUA-bf<~a96VKx2cwT6!<(p`O4SiQqx^+QU9{H^Cnt1!Ardm`^z-bzz0nA3|>1dmUmMB+i{FZ47 z!&fCuSJg`4^f;Baw(C1dhHH#+*r(*W_GO9L7b{f|Ts(nZzF7CSA?q0?(?r&8d1Jj7 zN77HF_0-{kd_QK7z+OH2Xji_)x5!|U@BuSHMe48Tcqc$Yfs#g{l4VJ^LcXA@lveYu z>iQ!S>C#1RZwVuwWqL)HfQ(M{!HfhlYwCnL zZ>?Uw-}N*n>TGFgEisC?n2M&V5soUFh5d^dqunS}|L(%l(S;(uZ0Sg4r?dB^*j|6% z4ZzV#0&^>#KJZwZJ6w~#G$g=cZ&E@#l{{rvjoqmgz329@N3DG-dk?XP_B*k{)-}20 zdp6kNayG8?-Fv@&djZZE=R9NCw+8d+AgflOCzdj7tF*>Af-dXBYh4LhSg2OA+Gm_M z1)j`62%lE&*co#y)-G#jswPy|cP=Y(zp-!xucDMntl(9@_6@

z}$4ELcqhJOUH_@s+wuwbRvfdGE#J#gpkcb^P_!IraYeiZjIs z_+n`!*6+s7{MB8PI)}vz}n@wo^54Na#9f-M@8-hd5sE=4Y1G0CJ9G`$Hi55n?i{ea(qRNCj(|j^W12 zV`$AqR{F}B1-H5I^wt|fLsN@V_U-Q8#U$4If8jONcCNcF+QDw_w@pV0kiz_$# zXKhvQ=WxT*rKnIFgbDhN*Na3qS>IF_0qqXkTH4rZSI^_tAHnq0HY{Xp+@ZBsK`o~7 zdl->}E8U5n?g`=4jA1rS+_+j4&usRDpl>i@C8x?VmTxyt#9Ux@lB>IX@z!zTv!4b1 zqdzWdke{D7QGj4lMfFkZJsBG~rt`q}Dwu~7({)R=I2A6w_k>`0v1ex-Vd9mkfr4Xo)Z+I?17k>%G zf-t>(JB@T`5TQy4Pzm(d#@hSpPWt=Y!f09AH`CJqAB&hCuKU2f!xA?zfC2PQE&p@V zB}z!9)g+rM%_GZbw-;jQC`hziT|fu{v+yc@&CtU9DYMG?A`V!jA;`XlaEn%{F?FB`zMzPsx3y&0xj+hDo zm@t_fMS*gn_6Wx)=OyGhzz{Hx4R81+QAQ?E#Yp{x!;9KbVrJ!G;*tvJ=JV|y^d&wN zya4;Q4HxePTgAp+PHz~;NTxt!w8w^rnx_6dZxhpvV2;Juzmzc9!Rd{~FG{tkQ=AvF zG~Rx6n+RIfNJsNcgv0lW5cVsaLdaK$XJYvk9Eqxs7R5&}=Q*Y@0Rx!6agI}PXAN$3 zsVAPIwgwHxB7i9yA0}cxN-0+U_EwJ|=cAEHmplQWeAVsgFUMzHfr}v8h0Q5$CJkyU zm?@L57%xL|6ebY9MO#jPfQ9MwYC3&><(IX1^Zl;jH+dPCO}IbER52g(5$hJO#v0>) zi`!TmLRiD#T0W}(Cf)~Wav-Tsz!`h$8u?Qf7v$}XL#t=LcuSQ3v|v)qhb2C+l2n)i zEOQkEWFPac7ZEZML+obrMAH`!rw|fA=o<}c5OyOUKo%i8G)ZJ*0&@MO2B(r8>G8Gn zCgaL_PNph0duU6l-v>d~Q%!a?olP-wGhgkEVi8?Os|kA)h=~ec8&4S%Nl0V@yZ&ry zU#SHy7N^lWwQO2TDhq7(!buwcXO)`sh#ztLOM!x8B!!$6<%(C#hPE_B1W^JdZ%d+d z(pXZ81TleVHJ-Rr*dHRt7P4v}$oHX1u-Nu*$FFhYZ+=4!or&6(V?B4el+IyOht>Mp0j*O;FK301|?8DHYZsgcga2 zE0aAXMHx7NvP26s|K2#EvREJsD0XamDO#Zh5G&LWXp|!X|McHs+bY%ij1nlk-1PJ% z3cwV0PGO)+P7cO^p4ncZS&fx3DBGq{*;e~e;(!POeQzn4*G0m`^KvTGppCeuo3j3- zevS}fHNhw>8Cn(oI9*4hWX0pv?J zV~!X-sqwll;e;>Bs5|@Z1(mZI;N>lB)!m(kn@Yk+K`p7$1g!rJiY@+g_RX^$@&$wb zab_C8!Z@nYlRcwTlKR4xZ5HjIJG31qic zcvJg1CzUKG@+*DOnL`J?eQ78u@6dx5o@k>u#j?Ns^T;XH>LfgG1|6f# zjpfsPP^ff~=?DP^x*(?#pt(Yd(AkU;82f7;hJ_F1aB_EYCmAFf_$2y^d=5Gg1a>#V z@#u-ShvKkQ*W`b0lX1&jL{KKih7fFeF?(Dcg4wfe%|idzUwc0e!1R)Z4emVUO6A-rKJDXztkrKgD-930#E&0*|8i6=`# zdi%P2vi5;GIaT9Jq6;1lC>RtC_cujpPiI|1rU+ni;QU$XBF>*@|9)yrMn^}}DiQ`M@ztlmp{8Lr7zl!e=FF^j z$Ln0A=odaNj3Ol=AJNwqO?=alZRxi%wqbnb*l%x@(2lioeLe~}({L zvEx721yf4iz7>y15W+8!9o~Vv{HRzwLsS6BPC zw$}#_Wr5Nege@`Y6W5Ebayc7MF}~aGV}1SVmpJDqv$@=583H%l=pv$$`@U!Ad94B8 zu5eplTQ|?v8ex#=hYB(p8TRsYo;MoWWk<*T?<4%F)HFZfZX@GnwL}(}uS-xHRQ*^$ z+kEbPx2zR<`{!j4XZFODWpeq3tbt|Onb8(1y*oPxcp^UVip>&2LA$`I}C3G7;Pk%kW-hd>v zyGnNqWsUVYN6*$QN%z@g@bmubO^N5`LfwQ%H~O|ocIPcAM7c=f@5tOWoa$xmzONj} z5$Fi6$;-7DB?oXZ#x9f2rUljc&u{PO%bp%qo!zk}e%kzz<*ggAoAbj5v8>R;Q=(W9 z#b-5`l#Rrb4uGa(yA2lP2B^Pu$>aOOa{8pfbOgLT2|Bu!J4e3w2Yx;$IAcIvWo)4B zCk))}ki*)PlPN2cMHgjq)neswmW2x|VsY7DU?nxncGg~0?sy7?%TeaCg*z{sWB}EC zP0-FYXd=qS^QO|jEcb8S(|aPjM)EG^pKV62M6l4N}2w>Hk?zDM9I-(jW81um+@-6NA%|13G+#y+U|`n+w;Nm zHzpmkM&V<`=i=F9yO2*eu&m)UC1IIl zgDb`)W|qvGg?AoMoI!bds${|vxmgsq)kY`UTLHzP>-JYzU@B)z6!TXM-0_}ugn{H; zTP3Dg6v6;FyBFSCe|UV3jKLN1=!}X5;wZl7q2msSS_cN);)g@x{_oAktK$Gx0@XG~ zw-EuY#3)E;P9LI}xuM#;9MlozR#?oD9Um3;AQc-SFs2ou;7;qoL^>E7dTH(x;E&nE zVghHwZGqxEHDc zc3%>nCu1<(n%=R-ponyxR_>Hq=OgNmhM@>^U)=G*Q%EZ#ng*Sx-yTia?e`XE+FGY^ zJz;IOR|P|?>4)^-M4y})4QN{v*Y@dA^j(q~up@yw27@f@5)BPySC``-KRf7aS=tH3 z?-Yk77JqB@EVVHA4;Q~}tia{V@vO+?nu~Pr6x?X=yD66 zF|(frvh+Lh?0AyKr2EAhWZ1dKs-Z@_YX z#4fiQ9peIr-^Bu8yNZh(zNBq(&3&*Vsn|PQ{<*B}UCMI|Ql}oQY&o4{-3b&L9WdiJ zXIM|CZz3XnmA=JxL?#EY_hw~VEoru^k6A*AH35G2q4a3m_{yVm-cx8x>hr3(U6je| zFWvHnXoc1Fl(s|7)c59Dwl(gndzSm06Pc>AkC3+}q=Mw9{hcUJC$9UW{z~*q4o&9R zykhr@&lBnHnHJq>FAbt`?;@0JdXe>U)aUZ+)Vdp%0zCM_lrmHoSrJ2T1LSc&dV&{Q zaj7Q0QUCkR+iY6${VEW)qftm7!nm@)GfK*3|1w;<1IVJcsy_zw6*fvKXm^9YP_HL3 zhWgL0e(199LQff@oY)$HNm=wWPny#Tg(cetLnS4EoYUPMNbvZ%mq84{`?eFi2|RxM z4Mv6_({Q7OBuszavK&*Nlk^E42e$bRM@_-wpM2QAm{y}U;0@f#p?A28yBiWuk=f&F zn?BjJuJii~nWGh5r8X3HBW#-H_`+b1h+e0(NN2#3K5;mC?Q3H554w z!}h*_jhlF{4y4FuIYFO(h`oV1#>PhXp00yE6t8ZoA!IV3M{KTkmyXpJo@QN#AkHgt<{6jm4eVz|9)!9KawXK z*AT+eQv&*ht%4J0J6&;9^fPo@hraPcyAG$L;3vK#t@vX2&cnul&D*G%QEA}Z7MQGg z(WO)lpUPp|CrItxInFMa(heP2Wkhbow<1&vwArj(!p`NTJ#d4F4cQ;Qz9u?^Th;m7vXM)DnwR+WoowH`7SJ%BfV^mLs2^#j`jZ0cHQFNGPj{JR0 z-YrGnO&Dexn+J6)Lvddf* z4-NnwektmBdX*hznSDMwYAv1{YB|wo%LBPT3&YAf1d^WM^}c#acI$Lnn(2Vri=0jN z5HqDFeyHYH|2c)X0Nhp`8o-T}bB6@FWUs_|<~95tW=AaSi^IIka(H z6ZG6R0zXunJg0GXy_gVN9S5U=7T<6!>Hu|l6YXM&G;W8FPBnc2T;Jxln1pDEt$MYA z!LBwnn8U=~O>LB2s4vIDCElQ>3>UATV2;Ar;-0vcoxgm>>vZdxjgBlhc*-`yFmzuhX&BFk zq+}+%Hz6ZOKN0?oxmSpb%csKG+LC|gXl=_e9ye)UioOHsYx~R_Wy#MgmKAr4yM9rr z;{}n%b}JVPmkCEEG5>tqYlCZ`ZBU~jIszT))Gbu(M)wItyEa)%-*mz z@xr1V^=xLe3KIsWCxHk>=fk*PBAcRYc%vJeoKJEm2O#YH^nGeuy=9N6sn{ho`PN@h%sd?)o06ZBS-k-EHis~7LIckn?8yMLX3 z-SAO_hs@*THjo_@@t)z^sg1A^3!$IhhH=QQGW|+DXMe7>;-cqIry$$NnZb*(|1H+o zX&g>)+qKqIDNU_hw}i96J}eo$@*G~HET|&cygaX=YD83yt(}az(BytZ>6CPV=^KYc zHg?e99L#}tej5T{SNxMA_7_YM8Wzp?d70{(W0euPOjOLB5>3gm33PY?n_Y2r=2a7u zg;k*wcMrbjJ~I)je@FTmF)f$hJbK1o-Z1Fhh}cABg24a_YFZUf6@%iQMZt*!H*o|- zRg$})f%NJ@^r+o!f#Pnij67`#idnxT+)OdPwIR$7sUjhWQG5?Y1$t3Z`rYp>4OzmQ z>8N!Kp6lz%`%W>^rP2e^3y*JItBakND*HJ-e)<~y%>wkYOOsILLp~(5EjQ!$!=Ays zoE($0H4?Dp(YiV9!n-?G!*C$F%FCo@8}j_`UkuRo&jt-RPcKhi}FpylolZ1YlfuI4bsw zGns_#De#1<_cnc>QVOz@1+ z|Is17Ohsl#Lw82&I$0LM0b=y|aJ~9wv;Nke4q(0}c#^JMuuIJgvTY5X3Jxt&kbVq%8da;L__lonP)$X5B+ zPd4p`>4PY|Jn@J7Hg7NZQz>O)-2iW52A45gV@2*<3}3L3Wl|%68ZDT5bo!eWt(mz- zlB3$>=K9ug&;}FjOj*cLt8`#%u+U_L|G-mWDrJe2x*MVe0|P;@GHV%IVE?+Q-7myd z2~2=j%!WB83{LLr*HlkRw?w#&BW#uXJLP4%N465*X26`*2lq4Ao8d%Odf%rQJY5^N z-b0AVrMIy7kx&#oX%b`zE%vy~V3M;-XR&FLkhs`E$Ix+|Yix^BrG!0A64FsW$1gT~LNI=LW9-r96hRi?^rCSAyNmY2~_?w8EUaSrl%Cs(lu_pVf z)iS2RdJ(D_0{ch5dLB{)QFxAH=JpdX#oy~gb>jkT8y4@@-I5zg40XOb~hd&6-O_*x@HfPFfy^ zA>7QCLOVUVvkBb?@UsxS4=9952gDYC z3Xv&p*-bo0Et2r;Lh^U_ALN0_)+v%O(L{YKi?4Y#D%9jjLHsj?&-oB(>(Po3D^E(l ztm)qQEqM0-^x-s~X+15x#Lnot-%qd|E0lt3UQ?hA55^}V1x4H@hAIG>al9MSB`bm# zwC}FRK`9B9ATzFoW^r0DxRSG&bLAx*Cva>aT0oC%(123=6gzThK@sA4Jg zvi!OIQ8-;kH@gsk_|Jx>^9p<7-{`m|WUl}2X0WDih^xfwIrSz`;KHzpGc)!lS^B1n z-r~Q>kps4$oOHWlHm6};lHiGs8i6N@o3q=-YSXCVVGE@xKxyYnim;W-BFCO_%R0J$ zU{F=Um=_+q&Yee~x-QB5P-8vR;mdu;iff119`xHFt@#}DLLRT^340n;rOdDpIcV?f04uP6a;K&@@*^SbX$pcqQTnz;jw_rTI5IU}y7sg(U|YIy z+tqJUOQM8a;qg$I=ABE?*GKd?gE%WsS0<(wP;UNWgX}MK*QcL^(W{ z*)D2uep9)C%hXGQr>&DSs@Mg1J~Na^pfAxe5A;csJKGYg$O2Eo+Fk;1mRw!H)oZWT zmE=5j({(lxNk>Cm&(0Vtlb=IncWOu{ub`u3;SD1x+j%izjhVexM(E`6Z67lV#FuF6 zeH7ZK2@U;Yov<^v|APx$mCyM_t|q8mGelSfK9{(m*!pm78*s>E$`EW!@g%M))&=aNRV}oQ_Jq*W zf;m-eY=C+k(B#e0lOB516ke^S@U0=7nzb46A0tDRV~Hru2oDcoN9mQMAEyOOP6Wfd z19H21jlzySBqy5|<7C0}8S8KVOd(?tsPcIcI}hGX(?Pq{OMs=4yP06y!}y3~4`bdt z2kH0e_cTw(!!p%21z*Srs+q^e-h%-$99h81Lenkq%$V(Pk)lMdPtF5Xn9*upuAB&~ zzSj&C3p`z6XB#Zu`C<9H4FZ~E6N3Bj-M*Q&5h&^t6BD5%cf{z?aRxvs*Gqx7g^JAQ z)*wtar8KnNm!uHt{{y2HA4x#BcQWgL7|P#vr=v|MDk=nVt8%KoNW=F6RSb0gJ1rw! z?xU5;3?5U&WfL7u=cHRFsEaKwU0xs_piY#$Gaqg;5nfnQ^!f^lP8p~!MZMkZ%jaCB z4PGFyZ@{Swn=<&3xq|aDxIdujTJ9fL$c`__?JtP`E&^)0)Z_H)=mq!X$L;>ilHh=$)VC0Ui|Fh#6?~1_ug99UHdY?&JFwfyJ&R!0^l(MwYc?=OD z84GSZrytvVX$&;S-JRUv*O#XVYmKqVKz8hVtVYfHMSDEw)r$QoEuS~qU|WO^pe4gH zyh1PCW<=^3di*|^K@V~$**n8^Yw~U)ch%B<-}RVOD*oW$Zn-Ao#aHUj7jx@B zR(-+`P`mTt=rO%|A#FI{BUWE_3WXjn`avfNCUF~m(k^*^S+7$A(zT?HgCZy`&3{$xeNA2(w1eCw%nydK5;Bx<8tSX(S zgreBDS+`I=`oWd-D+x&vAS_^}7T~T0pTu$5L;@9SOe1JM#Gw%dZ)>c4LCbK32}9)2 z-T}fuqLsUm@_fmjoPjKp0UgwA(a5JO#JXf{0Ufr-D`%|zv#n}QWI%dgxx7<3y+c1d zgE*J?`TfVeQQ9T9pq7JXIN%5HOPj#glM}v4H3x8aK>9{7kFLMbdISYe@_<2j=2sjDMh9LLtQYe?xk=)Z06p zkdv`l$|QTGLiq)RW^0Dl^zG0F^IgEjbdNN5;A9*qTA08(4l|p;ufN%nzRsK2 z-$%)kQI6r7yK{0;?_)@3iQE;ok%UlFMkMI3H#mDUl~f;9wDC2D!;eJ)Wi|$?3raW* z9*S8DVcd`C?UB%Kg6X4858XD6q=21UUM1cyv~ zX813SGfA^m&-=3k&D0{qe{%)9!h_aw9MTfKvHLJmfanf;^*Pbb(~(kcAUdDvhOKPL zfVgo$kL&Y+bl0X+mNnR9&X3V-eey*Zks*C+aL|F4U2N#iT=78q{EDb5ut&jj7Hzi- z-~B$yp3pcq%q$=T@uV|U@;cLA83RV*+eYHuv|q+ET`xzqB8;%cr#HNly}8nd zRck}$SaqxVHMS~SvcpQ3n~>QYmze{&%NiSD3&Lg8Nn!TslC>Xap?mBG{~ zNxhSM3ckj^CbrX+U6$9XH6%c(Gvs%yU7j;5F`DqMWf7sH2#C)Yef@m^%6hIH8aL+D zI11Jr`tO~j#9%N=s+L@r3~EO>0v4LW(;U+XczIKwSGegY=0;wt4y?i{x2S-fJUR*r za%*uCpD!ONo!0CuTINah>4Q8(D{6qm=*!BqO+f0ep?Keed_jN787DN`jeb>VAU_I| zppF8$YvQk2#z?nI0%H|EVZLpQKFZiETg@BcbvLy(#i;iJobD+ zQB0E_|60HQjonk5!SRh4R5kLez>eI&1En0aPWT}upR+T*HqR%Bc0+1ybe~qMYmp>` zyMVhhq?HtWomNM+F;W)J^6yxv>3U_{HcXm2CO6t|gl0x`S_PCh8l-|KLt^KF0!Hqz zk31l(zf1WzP|g%wEawqVIC9EQ08D7F+F&$7EzieW!6J}LdT{p^ zYcu{iL`{JsyusyD@E4B&ZUT`!i)}s)B(Bxr+PXc4bZbgY4E-YDL>O%T&7IjNm7jF^ zlzJ_sI%}aMZqJ)U>J7W1d&(d-5Gi&KuPHe4fj06QK9yR|SoW|=pa0JY{{D28NqE^@Xjuh8dmE0+SU)Byp+`7_+bY`en;CYCEu3 zHg2$afmsRV8B?%3B%QO+!+0hIz&ym)I}#Uo3zH$6|E0$rle_`%uzL3f16Ac(OC=WG zhY@ZeTU1YSsMhc-=YZ44rR$X<&VAL;hrg}AsAD~D%pAbGz9LH92Z>z@(glYS&C&cP z(AcIw%uvWM57$Sdx#7)DS;zyMP|VjA*0XVvy}%1Dem?rLNcKdpGr9EobOhyUtcZaS zB&jU`?=mwb{S`p{vMKXvQdekd*Fh`fmEo`np!wrdkG#nCsO{N%rGW$hy#&pY@EI@| zX2x=Mqps>>iDxkRLD2xx?9TVV)Ez}gsijuvk|k96oP5uoEU9(Sa)bs9r5S_@)i0!h z|Hy#TnSttlsBVI&2hHl!P#u*$ZYEWPq=&!cRUWR08`p>5QOR?^7C+Sq$nM1%dP?xY#PeB9z23|)+mppi0 zfRS&;i$`^B*GU3;V2SeJc>qZivRxP;PY5F)@bu#G6XsbyHJz2fj{+gJcEpZe9b#LN zKLly)>1zxn6!U)ech}=$B~T;B9`_XT{09&V6#8#oJQ+Rk=I5JFJo;ehuo_y@vbwFL z!z$i_AwFC*$-V;w6B22oKR{59F4*h&Mk|iji0qs}m_H{}_PSqDzSQlF2Er|`E%M@7 zye0b@iKyJ#j>bsmW+I_WE=(lQx-!^QLPh&^5a1l?05*kVI-a#QW;>;5HTo6!!7~ZI zom=F^svtCl$!dqJ|IjG1NFVg~xj&5ABYU`eZQ`u{^abvb@FmJ9fVJ%ah25mdKS-A6 z^<@FMr4gZw;ZxAijiYf_cCOb@!6V!hLy`{=?WyI zjotl$Q@2Z~FONoSEqvS(88W+L?Peoir@yX-3%h)i#0JXLFcaRNab7S%Ql@V11t;B} zCmWu?L7XWLpvht`+DWGev-ig)8k^Rfw|MLPJ$k5K)7$DMYH!tR!UR6m>-yqka>9C$ zNjbdqY4T_uDyyUa_;+yvi!*L8gA9wu_>s@&?C%qu=A^A}=N0N$Z%_yb7Wj~2dMUhv#TM7yM<0&v*@)D7 z*JS;)GT`Q}8!rFtF40!)wN7t6OFT&%Fgr(JZ%X(gGHhih-&~BEDFERG`R`$m_PTV$4DPsF#5ZD4*+wFTT03pFM{c zOI-4<7WV{{*}RuXW|WbnHVqlI92E1ffflm+?dPQ?s3Mok(B!cg11ua{bzC9eeaQ&u ztqfg@_e-;HSd5QWAJkhehB%EmW9Cdn^&RwBn%O&3#xp8rYx29ZO;Sr*9WG_>DZYg8 z&qzY*uKqV)`U|Bdc)FxsG zwf_oBD^;Oq?7S#7ga72y-lCC<;g#*RQqBBf^Bx4rPp`2$L{MaJ3nZ7wJiKG)cur{r z+L^O`&UgYTVss<8h_H^UGIDK^&_#buvlhfHVGdpVazcWm-f!bGK~!FTGSoN^-#S>v zKnAbQNHNSo3D(;7(3$zSS+3QbHF9Hu9vOZ1dfzzp|F+`A<>6<`6poY6Ggx-6dXm`d zSMs8L>%=%NML%8c-o~9Ya*#|W5IKfz7b?-0Eg6wL_pv^n0E#Vo)j%VnGc$MABPzW^+g-EHwu-e|cB<#? z?069$Jo~->;YqVj3f=MnfsMz8Ef>U$+}gU1j=VK7f6OB;9e+h@_-&XJBVfDDHOVTg zS}2;#4z2gYohG(A488;6IKPW4QOFo{Y`*{kd1KvWkc(HPPooht|nv2CWo*aiyswq*FwtRNg{0FT)uM*gb-(UfL_68)g z-^O7a*1KbUB>yo%a&{KAyx8v?)28$;T#&-;M4|Ez8C_-3*>77!V1S~^OD@my&TNss zlv8LnR4Kx1!lR0pCHs|0;?%;EXG3buqPo$fcFm5Zv}1}QE~>qa$g=V4twN~9L4VGz8f3_UuKN(fRi?kcS8Gx` z(0LS~e{yWBDUkKRto(O75q}z3q>!m*w7QIZ#alXBlH$DOoUf@?qn{)UDhr? z!nd$vOTeS506U)DBPUoXHCx#J>AAc3KgfA$BSVF4ujR|rTfo7Bq1nyR9GebPY#v%t zr`dJI%@&~3rme)m-}A9uNbgLxusbY1XkZ+ah0g!s~ZOH-hj3bIm zz6z{_t|Tnac|qrO*aI)oRJJ(>1aU=CTU#i0`3X zvEBc|*0$t0!IB=% Ndg^ax8fsRYobotO`|M_J>Pr29?Kht)cN0FlNIpc*TQ!Uy= zU8Mfd%}#UMxY!jEH?63i9dPD^_+V}NYIUJ91zWp=b;$8ycv$l?Rt4@}FevO2=(@}k zAR_in54w8MhEl=4>(%EbO7|lzEKX9=%pEDb?Qv9JZThS43$~EZ1yRdYXPA`g>4I0M zyA)iUPDhT%vz?fA?w_7$#LsFKw+C=(s_v)=|HPJIyTNV1iffbRc4nRXh@<)zYSj+$ zvRx7h0B4}`NyoO2n|UR%MyJn1fMitf(w-MTn?@%z;*+1~bvXQL`rF|4apFiC@_U+t zodLe2Fiq^CTI+qDh;B+Ee|hXB8g^7Gyt^1@RC*3rj9N43k14(2OjXy@t4E4OyKPRG zbiwj-4WrocfWl$@k4)j`!^yj)BLj~!6Z7g9K;HDQt_0}B-1*vN+&Evf?rSc7)@&$M%AoZ_ zhoM(pvx~|Eb@+c|e!XMEy%DvuxjA-;BOp_HNrg?en~zXyY=}<#O2*V=fR~DfCVbLG zwp*_O=4n-r=T5ICW24kK6xkv3E)N*y=ZM;SMc5VEfiCH?K)=6Mi`sc02the>!eHq^ zm}AGN!|hNVO4mqR)hf<4DTqpcBd%nM0pgV68VNmjjYoEjx;uTk0`b1ph|Hr`a;pK& z!kz@4uG%v6`lrjxQD5~jb(j^M&}ztB;5t>MiB=(v*y2t}x|kCC_9j` z=%KmkzNL%|M|GNpKXTChA{V}$T1I)ysOww^4UhAo-MG}N*5WeS;Jq&}3E-dWYOv?o z4BZ=!JZE9EW@Fj-m@$ipjFF|ue_;f#WNSndo?fgKRiuT z!3GiQoTcu{U>9H09=1wbV#urc>P3B>P2P>5c4kAR_%hZqYHrvWDne6z;KR2g)CyxV zh9raEE6tJaFFu!}+Qdm8v|NzGk}1Y)95F#0)Hz(K){%8xB%%8$*cEXqOUS=mH25IN zGwZNCQCa%Zm~Y~zFPY(s!812+N!tRzOlP+uVKl4y z3Hh+FF4jv<`1K_+ivB@F2mD$4;otd3PHYjSC*E-5-yi6mo{~LF8Io-GrOkAG4>&_5 z!1--&^YtnL5HqazB(Q<`8OI?;NJ)W4D@Mft<|ESrgCl74G|iZQmBSgj8Z)yc@$)BV65{)q@*b%0ba?0x%4Wimce7 zsWbrWr$@%7?>14Nx9FfvP>p6%V2eB*8gYi25*sgTT7+%)Lt28|9B`OZY2|Qs|2`S{ z0N}1u9A(HmBzP*I;!z}3+x+%(1W;TH<2?Mp3M=}$%Iwx_D{u&orUygdHWF5=v{jg0 z(X)E#qWAS2cPJx`z20~7cRBs9c59GeK^qJ8PAFJ*@zb~@fwH6kCD$v7WMi{s_{*09}QjMd^-x9^2$1tFh!Za{EOC?Ou z&PF!=8n4YzOHbB33Lw*o$Vky@$I=y%j*XxuYZW;2s~;I*fC?<=;WzOE3>BF_J96j4 zmfjHAfnKQ?$lOFs&bF%Rr_ou}Gn zdf_$yL{ben)=%;UhI~R9myEmClraR}fXwa%E#iF&2Ss_N$w|x<#pH!uzktY8WQ#@Z z4jkefOl-0UI~aLyWSCs3eDeAg5qCe5xrY9pv9w_Wc$d`>_mNz`=I-ct0A!P_-QNxI zK$||h6S=zbihn%oWu$`V=RdAyKDEsJi~5R^%VGJ3z+Q?OG?*6_D*C&witMtVSBMB! zqC9xL{nzO0cit8IqtK{&r4uLo(suKpnzp2eo98R-tz};`I9OXyhJA=bhyFb4BG;a{OV(%|PnQ#vCBFcvU1Nr*KR96BVRz(=^Nv_?f00PCJG@a3dS<%uEjtTsDe_p@79`9&a9Jfr z9}LZ*9Mjz2+VbCvye~90$~#Bs>!ep@9M*jkgEFx;jA}xS0kkZPs;cmjoze*JI_T|T zAXjbQt4`!sD=c=%Odqx60UHoMid-m5W)iQATe!2gKYV$eaKrNX;tSavwjnhC{Ig!p zR-RII%nahH8VB9VzZp@o4&2Ik-v#;5FiFfS6y@DxPLsZh8a`GI5xX&;!?z_fH zg%>hUZ*R4cB9ei=W%6ZbKDql?Tk(!L8NcWVN%y^aZQZSf^t(m$e{uGXQIb5-`)9kS z?P=S#ZB}#Iwry+Lw(V)#wr$(Cx4*x$|2MmPRbNoVjl2uAZznDqV8?#bk{-0Agk zb+)o|!!*jTp?VYO(GLq=Y+$wNs?qTiqPIbxSc%Pu1M1quCxg2J^PvLG4M z^7NOlz!ta)YL%y(dElC}x7jvh6Hu%frG_?{&Lc|(;Y7rWE%A8Uew@LsneN9C*_xwLUztZidk zljSLx>i)}<1Xk>jkCG<%M(U!b%gNb&%ptyKJHhxA6?1C8 zKv%@x%>&eISv_9|XY5KFQ|BmpN+|?Vtx{TL>`8KYeWO@CP}NTkvbqau64wYQga3>MG6g=Z zR6o`NoJ3fZw5sA$$@ItSbV%qzcjjOc(u`^fYA2;8Uq{c7*kuIuStY8&9~X;d2V$h1DDK z?4R+*6SFxH0>N=cJ^n2;P5`1alS@x^kZf(4eM)tF_}c21XhYm@W)BG2-0|o6w8lUh zB}{%V%r`Q>*!V4cJEH#3UYi{ivf-v*{C6XY@uEwrh=7Q6We`eRL1=OHNi=#(>NO>wH7FF|Cj+dn9ho{6IRr3>}FCJ5GHF3zYuUvU(WD=9H z5b?o2pHU3UUv_j4hqt6(469@BgD|gqoR;zNwtJ6fMmTd@hK4~@6qwJq1`wl;3YIJE~xlK{qi4WhFIsAbWCj<7{u5;NH1izl&m#pXoSo(TnZbQPKJn3ZBW`vNa@) zkvL#gX*t2-5FikH=#^9exlE1X>I0oy)TuIJ#(OZNsYbNzVA{S)&}4S7?O!jN0%cvw0>T<=|E>*`pCw8hBDY zdbR5Dy~FvvmDZbK66AygK4-}{ie)9k7NE=jKTO5iX`h3>S`vv zScR)fmt!h8NMFQirZ@PPPATHR*y;{@IrBeS`;LI^OR%LO%_cI*UVRgpD7`2C=>kg{ zQ6D{o62K?9_bkd`&=0L4Vy&tXr@>u|t10c7JOhSk#u$Tl&E)P{eRm=odnz#uU&Xy< zsf2zK+(};CSyTgiI@Pq$Xv4B=gX@Ns8_>%d)s7C~(O)~A_PJ_7bUaXS#o=dfO&dc9 zPh)VgZ_}H{Sv(@y>F%v68uq{&oKjDQl~|S8)slmE4Y>i z>BGls=5ojoB5OefalHK*cw;wq@X+l%VqK(n{x?Ss;iK(@qm#aM4A{y+lhRmJ6=2{H zut8*bfeS!~dmFnE`guijygt>-6P3=G-IOopid_S;*<>_#m<{`c7pj^d6FsyX*RyHS zO}nt1XD*620_6wvG%Gjvk@O%_bm2@5_wUk7bh79w#*GUo zfc}(nmF7xosp7GRA`&?%dQ>H9NW%H6gA&#xgq+FivMwe*pISPckF3EXYqO7HbX8d1 z(DjDuiZdtqN{)^gx2#rCGQ)=@{n<|WduNqfkY@c*!e0RhP%d6YL*64g z<^RRhD?_G0v-kyr{&!UMd^OGk?H;a88XT*zl6%*Li6SW>y6=qD1#YlBDg6O7`F=6+ zq{e0ri_)*0o7T#5ec`U8u^@U6$ImTVCwAUYQS~4te)Ybm(!tle95zD*%Y_EsbZ@-6 zPW)JUXXY)j3}@U~`R%E1TuTd1Okeq$&%an0vG%oz(ZNAp)K@t|SaEFaA{PlC}<#`ytu6`_zQcCQ}o|vArsSnMTtP#T7 zxWCNSVn$QiOjWogAq}S6UI8rU(z_wz#)g>3yoztStBEeQBR!O|`mQhK*xrLH)MiUi z2J1AYE_pTAev&qs9`A-CXW`QvwLN4;vy9AGbCD&9NCjyrIjb|CH$d@7^ zpiRhx4y%k=hb%AAb6MGRWqP*g5t^=M`oQE3wN-|r6CThpEr{Pt^J%?S3d4* zdNFl6%o59WIl_sf6)dffJ1_Cbfwd?YE~%v5;CtMIZ0b7`S3NnYlMe1RYZ+A;?0Usy zqjCN)-rkPVv5SchxasO@@?%-$cCvh^#O1Awd{Co)2#_ z0K<3RNHk$$tt{zUx^&sbWO~k=CiK)gvlCZ$Iug_mS|0osFd|@rH-Eq}9&jGX`N%Xz^V0Fey7$ zoyKk5NX?l1<;c_Srm8?@f86lftTyVo0p57(!u;gXSc1fZsq1}(;wZ)N)%FSqB5AO; zaqAj4DQ{l-l-ZnlTL#2wSe^Y`e4%Y4{e6G~XN0C&vA98|?RhaXQ?;*lb*B({O1%+$cN zklU5;;~Zwy5d;!WJx)pE{3{75?qi{R@c>QX=E1Yfd2GgOF~1%KiMcS1Mab;L=ZW?H z>%NM|ozGk50$tw3xod*swlvfU`QeH9qDkQ5@&?}Dgl&5*r{lHDCfv(mbS+kM#l@2F3ddQ^Ez|v z%L9)tUcAY~YWDj~ofxi?8UxqO&t(oBe)-8KOAJ>}wxQc+_q>V-C~#`tGj05Oh2>25 zWH7m`h(||#FBFvEs@zy68%%s;^aZtU)FF2KvecH5x?m%o!hgI+f#ro1NlT@R>20>k z6bi|u)UG_|ZG3#;w!HIh`m!>e_s;Solr%FuYH$HZ_lJ(nFIIPo^-+Jt!*Gh~U>h}_ zQ`c9bD5}#t>Sgu&-NMAo|ol&;^qh$!`Lq#Hm^74zW zXZLsN`JsW`f0i~mGy~L^ye778IFJL2#*oe{X+d2Lf7tgs9dMvohHjb))bISsOSYp* zo6`O~x0Ew}zS|`3`vnVaUK_?l;Z;+f%P%*7!z`5gjz4-Fyyph2m@ZM7y!Fn~_v0rJ zV3{ggJQ~^?r~bQ36CBb)l7>mpvDYu{a30|Bkb_i=#LhyJEIohMVF3L~jNR zj2A5)p=2qA0D(Ce-M#g{tKTwJPry-Jy zq!5#HCFOKs3BwMVoo}x+bN=axQAECLv(AR{H)Pr_s$uXe4-#Ct)jmyybVJY&gVFd7 z8oJeI6c0<4oR`uI_dmzj| zcsi$BDttZYL|G;)oUGT3x~n>lCOZ)_rCcZ94FDBzITqb$mV&dLV3EVn>MEvb5)9rZ zBom_p%KMUB$+PVw7gI7ySw^VGshokf3bNGz2Fgdw?1?=o&-zld&l2?Bby99UJ@K6` zxkU^?ISwD&rFSZH1yD~~Se7=8$cnU^-fukIc}~}+t2-K8+_9?Yh4Zc9sU+S9jHZzn z8jRZ5(q+i}Gp_KP5n`Nh86*`-#E(Uja7~?f6s%WNwn{WusxINJqIKt?TWcENhSKVj z-YMMe<#Fv})sr)jBGtNCkh-HJ5KN@{=qO7zBw)nautH(`3O8>q%f06~rypYAiyXDq z`OLIRo&BVgN+d&@Ge3@$(YpB2bCpqYn&l;`{CWPPu zFpQ>He0Y-h7JCm-O!BHN(YZXe{Eu_0426T{%PsM_JP~AFXK}Kj)E!Cm!Bbc40*=+c zVwD;M`1V;4Y_4fu!^M3`RZs}6_AjOLbCjY1?YgO>&R3Dpyr<@?&)ol9kgYDPVu4mx z^bC(>t|bdzu)z;fq+&UrNu2)K8x9xO!eVn{B2yP=E)U2C4Cg91)lw30ccZ4&BN+Ch z@4+B`P*UqWiyJ3Rj~bSYG&|%yw**x%RyqfJJy9xsYUl2t4$B?|O3Mw2RT*9Mv#qV6 z&e;P0D7L-|RFGM#$fwr&D?+yV2S62_VkwIl{{zmLQv`4u&l@Y^9p&4~M zhKs>NWFD{CiFiQbK5}A6OZTRPNOmh<31w&WI+rwg=fy2O}dlWD0`|V7r3&m(joMgeD)b5_Yg%!Uc=q6XIiPL;MBS|NBBA zkkPl6XUpL1@Jkv2-LHfBQ`dJTd0x@LW#ef9I4dsi@E{v(VNGcOcN&75{w+k6j(J7R zyw%MP5+?Z3bReW>tr<#^iI1Z$K?n--6R$QY2H_!%I}~Fn1PR|{pL-D>0m|qqZ6ws( zmdT(`b_$cE^doqGe0czu9*El2tKVONuk?a|-x|T*Innw8ksfE(@hT(iZwIQ>2>Yqo zAO;FgS-u)wC~^>Sb6p)pr&q|uU{NN2iK(RIrV@m*BW7;N!Pqj+1Vif#vrLuakp7Z3 zr6_`)Xl2B+#wF4|f4I!^a)6F%4=0dIyeXRp8WWVfpGO3xo++P3k61Af>d1v(fE8OB z-7uONFjU=dgjF{m9SUPVww#IZ591eE4T%Lv829b%lkBw-ziJl;3eA`bLk1;&ol%64 zJvGLvDpj4jmFGM1i!K8`>g9OXEtdDLc)%>J09ik1=(`-(t6#{p)6kQ%xER}T(}Z7AbHDi=Hs`$OMu&#!^x|lHC8Sl2wIUu- z{}MEV0A*>d3qYlNaBi*SbP&|D{Xox5MR*X*&n)#K?5BBlR37LI~*|0=s zz?z+_naLC#=Mo42ZOP9f3otA{$nn!C&i}6QA3+v!1Jp+2uNfmhTSKGA6vu8Zm){ZU z|Dnn3R7FO!|JzbZ`@4SoQ;BQW+jH)9&{+ia%D-pVsVVRyv6C!?J7`5|6%5uvtjuqs zzg&WCu(HzXoX!Ykt~c#UX&LFVq)1yAKhjare+LR_(6?*PZ%>ueIOU*Q5RtYvNcdQ% zx49kGS|dQAlma|(753_cxYQdaCp;>3?t-|&%aYT=@1$9M)r8dc;nDOA^2-nJu~+%8 zIQC{=k!@xEW{Up969V(q$XPw-!{Itjr{c8NC2A=bg$!Ru@~5bXUYI&uCz`4rW)^Jg7u$bOOs zz5nQX5W3b-`9%;!-1Q-ab-oV1c+My?q*Cm3TLf-WvMHZz z*Kn*mcwMFC=0B9F&GZJl5mG*0!EgN~j%N@WwfH>$ZpQHXjt7+UB@0Bw)YMa=UD;e3 zC4A(ZWe|aeefjWpW|98fyCkE(3xmDz>a0L|b(K+7lgxjO&*f`6r+6AKnephoPJa^z zl#c%V$AU7#lSKb*hwsUm#un}Z4o>pj5?*SZFACP@UBymS@zSFw?hwfBTZ{R#v_oVb z3?V=PEvOvW`g!+qPSDjs`~Q8#i%;tCrz@L6G>4zBQ0k*&^UL9rO!;kYQhDV)q0jbx z*3oamQpJ=7D+{Jpwiw&R{_w`y%!v*ARLTeh&;$MqDu$ZZ{J}W|db2MhU>t%nZ zpvoG`u46m-JZk>z+bhZ$(-F&uy574PZ`ts{22#dVlH6bt`>jI2ULM58cTYOiZbbo$ zP|zb~%k%Ht1~~dQj)m71&n&2yKK`Oh4l_oI;uY=KI*w(E_3#i$^a;_{MiV#~1jm;m zMis%scIV*RGR*rG6Bh1qC1*8g|4LacZOXUx{zG=kQzCYQRmaxLgnR0iaS<77V)C?} zvZDh!#n;-mhU^KmKQXr)hd2B7{bBuDq?s`R(}Ol`;T{1?NwP=67P@&T;jKMotz%QH z-YA4V#RPC1F)s9gS56^SNpj-k@WrP%I@G8PV0$q~TQW}SRze`X;pJb--Z?aa|A-wg zO6WF(VM&{OM5T347%0e=?s)b-7I{z4G*X3V} zvrk+*h8y%5<*`I+ihtRyVerwc3`9EJDZ9j?y5@yvGl5{{VunZyk6DclNEh~5#=x13 zIk>_Y0bIIUtv?G+yO0<}en4p5cs8M{ej_TdkS7RLl5-J;#%z0EEosSKjj~*L37x8l z(h9q`Ig^E%_6Qoo&S~@XnkU$BHK^_aNhI00Ip2yDQN)xUJ*3?qB*}fRj|=`AsEi=j zxjcMswXo6VxxJIe8XU$6f~J?Yr_nn(4dL_Vp1WJsxFW)tzigdl2C_TM?rXm@p|*uR z>K?bxmhI}dPbbw()jrBDs_lgUwJWKoj$a@}0~u^3Wtnz9EM}i+MQNtL6w;vcYmKnu zvFrzI`-Pn2^!r$Sv%(#F$=!i~(CAJt8rHBQSPfowx6i#ekpLXETTj>jX%wn#D1qsj ztrplUPYP`(J~ykP8-Azk&urCSef=ASkIjE*;RlTGl4bm3j1J;*QrI-r9lC4FOVk*f zMqw{ncvy$BB$1%D+Tii6@1R{R7cyp}ld+#%n|HvZT3-QB;+gu6$S4>ni>)52#b;|s z+z3c)S~_5fz0Eybn7Bw5&sKdL9qSnSjJpld@yXb&cy(Nz#jYPYD6eC=rf9jy_{7*c zYh&`Whn821)Wi{NrCvuJG_k&ouO^mhnP+46M-D=rXo=Sw`Ph9eI9U(tAO4rVtKl3 zPBqKN6I@UMP}hreW!jKpjI0Rinr}i+ijX1lDW$n~gy(;SyJNgD3LEm9xu<;hu;K~_ zPIw`coDw!!#~#;E9AxxNSu2~Mo8|X28!|c=qMM?~h! zVPfRy%!lu@YJ;?k7n&8Un^&6(G==P2KmNijOs%L^?$eD_`SpcC)wv>Y&_3HWe;F5!7`2B*_ zr}|`y9Axj7TP`(m=kCiMiWc+&q3d1Hdlx9JVEip3O7|F0p^Qcw&JPXoO|K?s9v@(O z>Fx(CVgswMm}2B0{7Jdao3*+gS-f$4S?%T?PyWtdZ!-3aeU2YtKLORxZOFSWRyBAS zv9{j<_lj*{KKBi(qWMA0pZP)@>lko*;yQEwsO8Efe9o#2j#`en;WV0Op4lqjB*JV0 z+H7?}X+zfgb>fZ%6s0()_W%}NC-4tgxczw4wSM^QM4iJWvDh2Zv7bDoST&;ccp;<@ z&pOWN>dl(hOFAa$k65eA=2@LT588=l!+;$xQdk<+#`*DN)&s3r%BvL-05l}u668tL z?;7mRsoie1cC5t*t0D0GYm4KWTc^_UeudYW@!i?)d)*3a1ByfND)`rs4Mn#(i*%Pu z3AW8o2042{j{hJ+W7w9E-y5fQ>E4eebOcQ>(ki28DZ2n_X7Kh6^7h2KG1u-J-(G0k!xoV?W#@$DCe&E!`#;a(5n86`>P z8blxX62~inxe6F9BEHVMPcv?XwWrcKNt_(QEN(LW>A#INl?3J+5pIysHC{7?RGixB zjJbg84r0pEqe&r4cBQW;L=ssVXNsWeKjFEVwYiVTbM&!e8?J|7tU8L(0*9PkaVE#% zo!vDw^Kz}JP9<~iFv;UImX(vGrdXp}Z>b{guKmnldhQ308Wo0bZ$l_q@+P}mQnOdl z7V{;eqacVUL05h5$?}C=th>n}*1@9(0d{-5SJCZ#HzJV3WK7g2?0&d%MJ#QZ!qKP_ ziuQESU}Y9vRhe04!i^VfHEsUT&+5UANx+9SuN8T3E_IJn0Hq80@cc)tB-pzRAvFu^ z;*xTYUN~1{e#HUl-B5=SmQ*Fh6*tEFi}r@S%^+_OoV%gSaTi;yc+NibAC$q5@?t@# zjmonx2v}_4oq4(uQbimIP^J}U`F_o7-$C@Vdy>o5iD)p^#per(_R%B}{NC@ZHnJP989(fzN%yWxGy^;M@61j)hH9I`14~EP>8F zSmcERb=Q&n`zqD>zHjzVgTr!bX))?{2 zRjU7Sw@q-@%0La}^FfF6A1Wa>^Qo!hnLO8qC*9xD9NDHHzPZgDEOkz*PmQ#`X1v{& z`uKokIvETSY4nUa)h#=(2^`Z+|MnTVsQyPKnA0q-2^%uoy$U_fw4c^VvDnBZ(g*(%tif{?65%F|J-%C30DC=nYq-#BN%@_rSl_kaSBW2cj5{GGC9bB{JP?{vV2 z^X!P8@>-p4MqqbLh`KTcsCE8b0#7%xs;(*3X$_t1M469-`P7hAeQ&I!xrsLN|ccYe(+ktR^yY8OvLT8_Ei#9g(OHeMVl&BoC*3h zINTe~^yv8EK!VH1j(*|h)RT#Z2$#PdD2H@j0gnDDscxOoklycvt&Q7}A$W(3*@gJi zaiqDMs8*M@`id*wgqns^;Z)c-)O?sq(MTHRpWdFog_C0$dpsyYcKFRBBm`AZD|P8i zMYI~PG&USf1pn#+57m%sm)kPltSjHBbMB+N7XPYMfh&Ezmou!&k#d30awniut%Wm4V&*}R^$ z*g(5s)VqFee zsyn>+27YI>1$_Qwi6KRcb|hr zIlB}_peixG`PuQws*12f$9S>!@f@se(l^C9%W;;ymh4$IVc*?>HzS(XXbgQE)1xv{ zOxEElGn(}AKwv8pI^TU(`6sC*3t+i;#UO|%ZN{)1@&j6${YvP1=C=JV} zC^J_E{B!CrBMF=cc(3McaP7a+fUwZ!t*^fun%T(yQK_;p@C;e3wh2kXwL_~^^}JFs z-|*wAY+)D;s+r??nbseD#TQs0q@lASwASShR2D@?p50nPSChkED%&y6_i1F}B!why zI4&;n7fJnbfJ=L1Yiqk~T(3BcT=toGXgnL%f#}s0Km`xzrlIw`?SuIOa|-6I=c{Nd zr+uk!n~up~zF7D9K={f50_nvcV8t~SC0Bcv4;-GaB=kV(M?Rv6z16rq;EoJ5_x*C0c>jfgG&G>64|s5m6pdOP1!ep_hUis+T2cl;WdBG3WBm~8-oX-BTGoHM zse^GExT0_HA!YK&ssoeN8e2yEar(gk_3Rxq(yXH|(Y$7pF0N&POb%aZXn|e7GomJ2 zcx5o15*P6S=FRstEa8|<1AT?Q_;UEE3`uemdY zi0uaC$1*k(D=C4m8L;6@&xp-mzm9>7pY?ncTIZ`&SxIseEpmk zV&T4s@9H7<%Oq3^qG*-Gw>i9}esL@!IpTYDv@P&dc`rx?H@dnbR|?0UQ-iMS9S_Ei z{Olb7HiqW5u!Fq4J)CT1f@Z(S1GjD3DKGJWnkhx_I`%2K-qJ1M^a3v?V!C}D?5ZN4 z-kKt8D!96tGbJl;N99kHWZX{95jk@;bu!~N(_(WRczctSP! z=?{o#=h4jf5_!c6CCy=NxPkWHDJ=8cnKjy@(IYUGC*d9S% z^|uM^QWH875e0#kC=hX|9)UL}zb|Ivid7C=-L&jE#TrZ|#O)^DU#s=0thz4+EGLw# z5a~`U$r>zl=rZ^dnpx!%ugq zTlYW<2qh3!BQlyHcCPz9oglFZE*eK*y|2ViayZUjJp4LraTl4s@i!dDybT1GG0KYBrK;cRP4t{c2%*c8L$M*R1L%Nwd$e9{i9S8rMl~K9a zAe1?@Z<~J158}QmDhbo9jZ{+%)vOOi+Vq$yaK%r?n-AE?+9H;D9wX6uv!KxqxgtTa zvpNU=(rHK#{^~`*Txv(q7VjQT=k1xzs))dmUcT}^*=^1=q8;J!KwdL}R|;v-E0EMb zoxyX$eZ$M}SsNem8S)%ilU%kT6#3&h7t+BBt=pP>U`+UMH+ULTauW2(%tZcW)GA5bea{vIHNiy$`QDzg`{ig>;|G+|)`tP>smF zCg-YSelfomn1%ywNRdA^hSAXG9Idz9f4RP&%!vg@CCy%zaR};UOyxD63jB80kOwBg z**$Rf1^Ga~>#yZJD8bji6!-L}A<4cE%~-`u(o(z<)Sq7lFG4Gg@t!QE2J%#IgUx+H zyPlE)H&fzhV8k9F+jINvZ;eU;fO#^?Au@=$)$rlqSUMQU+UgTur{2u^s-`9BA*mQ` zMimjiBx(x;Nh_>-e8%Sx_0jbO;S1jv5dl7iuOfGlE7E)!@2%#9G&(Zx6~(A9t_wZ? zJr=+Q8m3Vj+hczp*2R#T|MqYjie+VzcuRntD%_a3BY!}CarQzgqNXh1>qHtRI zlXw%CXh9s{pO%@;CGGZ7Ickn&Me0B8L;4+auAfP{G%DK%I?Uue2oImNWH4_}fP_C~ zcq2|7=2pv$^Ys^LQ>I?v9y23quXo(SSUADKbyVuk?va*K?-{|9`YW7jO2a(}fFFdk zlPP)8fY^35!BoDt{(%2XM%{$Fo2U{n9Lp&p$^|p-g;8~Jm}03R5FqoV=dG16SsGK6 z+vt$@9jV}KRajU<@R5@_FD>6nfoIEWEGFK)VDwyM>WYl{NEoMpk9>RODN?RGG(A+se9||w)hstD#0pz4zLQ`#or6PLec-Fsbv8}YCV0=RR6CWWij$_&w z$8{Xewz@KLQ%uU5=1hfz!Y2iaVSZ6&1h+cUeF2D63Nf@PT2pE!AeErjd~0G$W0o14 zB4*j3>6YqaOc!mK-!erjc#B1scq7r`c+U|J5C48MZ@#dJvEpz~=%0Pu-SNV*v6yqm z_8>W~EG&b=o@Q=zr8uhg40V!!;=+tfEEYWY#Lf8c<-K(ihC{~ONCR`c(HpI!SLZI6 zEaR5c6OTRh%6wX(kay`y_!T58c{bJDNpQVFfCJ-O8%%L|)P_!30S`RWUSltG89r6vU*$X#H$Z2KOxB(pn#>J81hg^7nsfW@n;aP^xc)+o1W(yugW0BJNJHE6Jv~k|)FWrAUYcrX@29BA#X|>=p*bs?l zgvt_QMi=8AlAHq&76NKM5Vw!k2}%OCiy)(Z!L>?}5mr=cm2-F4EJ-Zp)Uc7V`YZam zwX#Oc`vy&maP)`gp$4SF;#As-VMDTDlE1I)h32m^bpgQDVepNAw_%zG;lBtJ@Ny2P z#CT5ha%zgt4Nv$L&cIVLbua|$O2v+(T~0rR6Lrt+y8|P4%)z^e)h0*_9KnybPP2af z5O`=iMP-)by@Qega&+~6rolMr9hMf9fubX-+m<$ORdrXQe)5a|wDEC8hePMKzIXaK zZY*8DG$z*7MKr#x(80vR25R!Z6apNE%Itw|u^?D%oXET<(^zTI1}0L&3Ral4LRw8R zzaRIc5lc&k;qQ3RV6M0Lrjra6W}iUDC^7yZ!yNUkX-SD`?SjHE%Sv}im#E>};YXG> z@kusnHxLnp!?DXf9IMHs5xdx-!Ac5q$OiZ9azo-)$QdiPWnJ&E__H=be1L+kYN7d1 zg5pCjcgx)zhRqw?(NHPHLLkJlHvFlcP4^B!-9z!SxYM;)Z@D&HU-N%r^;g}^>f$!6 zGY;GJT^g<6z9b%I9EtJS^^^*arLRqA-s%vpye9G&%vga9%2Cp^q?MWmnoeK6cgQ?> zSB{`9nkqj8242~~x-m>On3(C-Enf=2`ip+2D^}Dv_<{wS`|c;D#jm9lmjWJ>%tuKV z_>w*O2A5f*S648XMR~uJme(P$J7eS#{t>0d){7%FS;@dz6pAsBG6>kqZ1U|%yz+uY zc>Y@GTsJj#EL@N6iNQK8n_m3k&33!vvaXZrjh(G|#}Rir$irZBiSVCU0?xwHP}s5w zN8-8`kI$CwIh-KOas$;~!zV#mu3 zk-}_Z6QU0_`Ca?>sG*PH#j#F@uE=|Wss>j^a$CTrg?A7a8;T1cOPgj}ayoX&0)F*c zakLz!(4dZ_a7%cNH5x6@Y77zdIb_%WFHsB~#oS?^a;z%-coB5-g%kCts^rJ;~FXZ7m7nv*3t!{v$USK%KY(9!DR9Kd=$eXGB+wA{n*MJmYh&f&NwyF++-`};T1=lSqQN}p1&k_we42Ic7dJ#r=oKI?{qECaBOet zWtFQ(xSgZM0tvWc@&^Wf5`I8G=RRc>|8x8;UWtXJtFNVG*b{_GL(#ySYInKdmyRQwvt9X|KiHmyA=nQhXlScPTMjNs-yz zRzel`fTY`E1!*6rkI}yD--s}?K?7Dm%8m%b+jCH0=iLSj)?2;fMB(Tq#V1lk+fQu>u6o2 zX$In+ceB~zAe_5&=fge2qc;vARKO8hcJVRe%&<@quO8bJyTF!O-a-3kj~k2i%#@~Z9Zn{O5M)L zf5~b60HSn{a?g;|hL+?>QvH^w5|nXYjmj8iqG-9!N9|Va|J5K%{8+{?Zj99$YpbJj zXB`DcJb1m>I)Q^U;%Ayu9Jos~T-Oj&RcUeAaAtfib#wECn%x=Dpi2QfIVaH#LE&-R zcVORvHJ5W$`H;!!9274Lit(()@73;p|9r=P$g9X)MR>{LCz(Kw%ynSSrG{?zB!p)y zZ|J7&!dhjG!=GL1B1kyA>moTA5?%z&8NW`-lfFOV!Fbw_u8{SarW-yPupZ2CJej?r zObt~vOIBKx18B3;ASj&$(e-EMe&jWHYwti}mX8MuPQL>p-dR~!Q*s_fD`~)~ahIv#`dOuk@K+C=# z4BZY>02RX@2}8ukcLE3$HlTRZhWC1Te0kh`igWtSoA3DDJG-9~S&0PU#iDpG=~c!T z+Gi7GvJ1sEj)VWG%L9@1d}-f2pp9}AVk7%(RLL4xE3H-!$9uvyF8*KNzv(}}r%ym$ zix$Boj&@^mQa~H92K!ot?Ov?eGvJHo?}Az<@Tn@fJAqx4GZbhvxkGZpbG(o4PV3~` z_{bt0vrzlyqvls~l)V)pqhLhJ_(pF%tODMw4i$!!sYLHQP2*cpB>_L~VTl+fR%m?v6ezFphn zqc}Cz=Ye(hAmsEQir`h3KSd+6#IieJ1VP&kJobU!_y3q#Y@(uq!JdRT-bxxgXA>a^ zREbEkt;tR^$yXh{d)t??a2b}X+lb;-@pjlPry4+u@dA~p5*TvztzJJnFo}ov4TmJF z+v9W>cXyGN(qWY0E23H(m)7UL`}vGGI9l_$hGST;096@YH6Oja3=3NVE{c%$9K6yqSF!UF#&o@Mv-nu&UNvcq7o@TE}sS zFclDFI@VTbcdqN$h?IWV_Zp zJIVF8PRLje+y&+O8Jk5Cu>_YF;d_c~$gMUS98M|*o?LKiRD{yfs)4nOKqr?=Y}YuD z`^fTX)VEf}hv9iqKht3V59_JBv%PKeA#H4C30)oC9=fpkpOw(h7-OBO_=71&a!a)} zGab4q{H8nUOzD~?)fsDC?3K@DmLsVd2z^ZTmD3xZP3O%b3;Qk&oO2NjFO=fs4$i~sZm`Xw?W9IB^KviZq7FsfqS?+ zX7zsmBs+QR)|m>G*rZNAt>>lLAhMwYrg{QDB%=wnQ6tcp+PA}6_sCO~_;T}n;zLz$ zbEU}4M1*ObHKV_b;l-JBLMz(wWZjJn!B8c=mWx&|B@YSF|mrIl-A)zm)*_UyZqDRYtkN3rTtSl0Zu#0{dp(rCuJML ztsziMMXqSKusL2R2~SQ#3wUoau>Q$IwII6u?!aJ8b$1qfuVmm&K9dg%NFsjd0TxF8 zj_CbVgB%$Sl&}-W5H!OShn6|)hvB=GcdLI*5PM?>=GH5S%H`*MTbTcHt_gi`rD+C zUXbo~FM3RCE_@H-WAX^5ir@7aY{FjhS`mgyT0HN~BzDd9{GV{km%%T&@}E5yuFG`> zQ^T1o&*T*H_?5=^#%~XHCVs97#Ft8uv9=&G(U=MG@-*=(Pc{Q}cn`NYb={z`D`bTR z--cSXB;o$*SMik2be`vh$)DLj+HW3Q2l^kW14Vj~nzbROHLb$?3WaDiXWNLUCn6aE zWXHMd+T(se~p!~MRepL zVBtAt$eo`X(JGkHDHSLal2wkn{&aKa>`h~?HGYO#aik(|Gf#hG6}%MN=d~A!N08g; zNf*A*^k25m!>>#=BXQ+FbCx0eZd#KrH(S^Ts~@BAo%X8ac?P9zLVoK|;EZlIA_ zQE2gnHeSzkps?o*E=&**y2`Y)%HKOKB4ekAA@?qTU=Mi^E+SI%2-L5ZtEwAhO1>6z ztlY5POef=#NaH8Z)YR7%H6|8&e(;?>A5nPEhYXh~S+#Vq#5Ko8+uR=yqw8cDQ$+eZ zEpls{G~B{6kjh3GId-s9;bv?*s(}Mz@qmYu z$sv$&y+=o3aHUw2g)!p=W4zatU${;2mWxwpv_!u8h@)(xpo$8T~R3!Rx%AS7Xv zM~skW=iGxJI&bVP#G)w7g#t{aJf-TI$&;!QR`0iOWdv-rU-;ZX)nup@m~cZuQoJji z(Pu1}jMn!Roz6PLum=)LX?6~w6bug#98s4#zpb4?%Q`8TaGToDHWqMSU7VTSKq>3) zBW7GV2qQfL37uPy)3phyM+72@r(ZVJ*ImI&I1`nnaT)v|I=Ewf&Fq^F%vY*R7wnne zdkBV(<>DeO?K%GzYM}2NdE(1e`_aPR>^Hj}lf&c5_=|z$*j8Gp>OM@5FQU0t`yn1dcO%r!JCS?Vs7 zf%X}WT01XZOuMqbDwjWUfg2HgN~Q6u#eZqItOTcKa_kMZPHiu0z*2}>Ffvt%wf@~1#lVEbu0T8hc#d3%Af66Is9gelLe*$?*|s(XQ$72IDpqi ztge*7kI0Mk)w5a#JKf@?4$koFUQNCJ!GM1se3=UP@J9LVSy)M4Yw}r;YSRtcx!Yf` zCMD=~DGz(8D$= zPRbO8KhMEs30NW(l<{x9s$pS%kSM_1{x;eyDpT@42~%I4{PO`K@r<53zd83_}VbzIy?wrlXe*}6iN;k7IkC?xC+oy4%; z#~Oc9A|g}I``6$Fc$E>CZ0wsPJOj+@9jdo$r4{au2x_0oOXO>Q>wE>y@o(SY^zZp!m>qX) zvU#|v9(RX4Eo^URi<9WDV(TS=Y{$rC9q~oWyfnYvmkatP0?!pib4dI0E;+OJNjA&=JYwv7w8t4U}>hYZcPnQ9TgSZbWyx313Z^gh}`u2z_C z0Qkbx&lIeoj|)_)g_6$?e+KJRs=4g#@3mBZ%<+`DJPW6Ls-x;_3P-8nGnqW^Hqo)y zp7weN-)$gbb>&tg4c!MdqV*G`X_#gb9IZOqZIPXLPXkRv z4*WU%?kVUA$+$LTyz(YgXho&J($Or1HtjD7VfJ^J=#& ztolEWjHM#BT(I}B<5Z-8gh{K~zHHfFpS(wf6#!^$ew`rmdk`~QZxq6R6*CCubu6UD zmX)^c&Zeu!rLJdH|JfKLPIu$$8MBmqd#Ia#H_0a&Mx^sj5gNzHWmi&LE3&zKI{z;9 zowWIez;M362_@NS|R%!m-l zXOZpZe6nXb?EF3WfoZ+q^}oUZv53B(HUA$h)?RPI_D!GofYMp5$UH>=;eu6`i%>W> z@^f7|la*me0Q>P3!ny`Gws zlYw@WG<+IdS<|_}B-(ViiR}-5@QV}*y7@<0sLS2a5mKu-iP*OjE3s|S=3Qkckl(gh zegE@9I|;ZL9kwAp9^9AXM0%=?He^OaWECy7A`a`bfch+0n6-jMWe|#(L&$~VtZE%f zO3KVcdY5yq#9Mf#&V{cRw>G@sKsK{y`A-q0nKERiOzyU{(Ufn_z*tt~1YtnrWn68t zR=oFJ_?)qN^-px~#C$@EKx)YT%q$OhSmpImsC~nre4%}w*ZgIQLdkWeLtSzOlW=#u=sDjy!a48UV8z$u9%m`{LRaN07_k(E|*rlW;A|(VN;PVc#r@z#) zohmH5ZY%EY&@X-rK?cH?`gZ3M(9vcPl^kI0SP7#oIO%%e${TMcM_IB_iNl{lKX>7= zO&(?M8ATCKFsZ|nu{$lVQBZVvZ7c8Bk{T9oIr!oZEV%Z=c()x!i|8Gg3Ymo4o%SlU zocZcP10E=HmehjUBW11F+4`^4kxy}5dteJr-tUVTRe-zLT5H00(%TuYPfuQwN{Hq- zNbpa1%=A`1igaURnGfp`~%`2wL zr!LV69HsmYL<&J70En4w!a6 zxh?}Z-Y{5D^XY-&^*+Ubt|P_R(e}O?7E@)$iI`*mN>m?x(4h)G$CTcyR&Y2^>Y!a> z38qKj5?%nh1H;)RfS}rk?M7+pUqtCMaIQsqYx39;@z2|tDyy=o@aq+M@51HjbYl!> z(R}qq1>$Y2j?BbAtwewY_yxm>C@ZLzVYS7O9vZuM3(aVl&P>0G*(F2Oi~?Ne+msSE z^wD(HtGB%kY~;c!2boa#wiJADQN^w(F{KdsL5;C{0J8wHa>#v0tdTouJ(!I8|nd2*PqH_9i8aL>|+$Bm<#% zQ_%L0D2i5Igdfk;ztKPuoP@bb!Gx+^EeaIp*Mt^v9PB(*A8TrXN>raH&QF2&$UQ*y zln&OAC+bM=#nv(N#~l50r|RMcV>a3%Gsjz_bMbshH3I5&fuX?RBwc;oy7R$jPbxK7I&zuag z7XsoJjbiW1nZADns}pcyb^h_pf<{q@u|TgHk}jdwc|0L(?9uNF+UFvR;T1AK%$%%T zskIQcPKgd)OGe8cc4lTATM)C?do)U_iw9p$RabY+>{LU=_Kn<% zdrGt}eE#s2Ca!cxJVr?317_6qZ=v& z499*hIGId`UON9^f@-NjFWz&k%pNZ&dcK9CoFQ7UaNeKkB}F`3ICnfa@O2(Vyk(1G#~HyZWnds?zr#@IDN|y=%Gv zM$-uw18 zOi&i2y5`5nBid=pA_^G{sN9-zr(xIazKUd-#dOw{F1b*ha{2(7X ztpl>;4wYYQvfdT15u%TE^Cvwa6yZyZecocC2V#u6q7={PY(wdh@&vvFe9DO$ zfMnQ>jI%-8`aC0}*`16DYXYwhc zfPP5xUrS8Ma=uvM2`BL8<#YCgo3UXS6u7zif~%adF_Ne%<%wQ-f@h*0YhXYv9$UOs zSJfz@r3@c$iM%(r9NcLHlU)3dU|Z_ng8poa5xs7Bghv4>@HkV2I-eU|N2pW5fCcn4 zlxQGij-lVk0^DhSGXBi~_HH16hVOq2YW}lp``=pUeCOw{1~_>D9UU7HeXqp*@;+Vv z_G<%YPUI(udF^ARhH*zsbTWkW1NS}AZ+un${A8`)7^AkW93px0((-Ny5r5KKA+R+7 zE01slIIVHbrKm!b07X z6Z~cVWvK_3+9|rdsC1~e@psFkYnPueZ2az5bjLvL=|*4KTh zYk%=#RwIsMo}!>rc@p;S!IEXydo`ozryiExI@!$W)6dv~-lcLMi~MqMV<^H`u5-@{ zkpo@<*{+KBI?lB_s8v5c$Ya?9sT~lCU_?(91Q-*KhV@gNPFpOF>lQ@+fY#m!pA z!p6oJ(F9E6KPJh>{V7WL`wRDHDkj{Bx*&?&+H;P4NL%Rzq9@WN&_q!g#_>R@oF5Bx zevIxY1cD8d)Z)~6=V3L`HcpMjI-r;X4p(il!m5~rSYMWoC~XZJIX9Z6ZGg-Amxw;+ zgN?JZTu0!He(Ugf?VeQ&!bqaS*Z?oJnJ7D1XD+Ov2RD*1K+47e`cFy<*vyHxrtiiO zXHMkVS$qylrW$;Qmz5JD@42;&4WV%tG~*n}_F^hJcb7JWgm6@R_ydj~NXLjt{8Y^~ z%l5tRQ=Qvchr{snImKV+G^|S zt2PUj_o{A#kx0Xg|ovgTM&|L$`O%q;5kr~kjh~>gI{vusDm9+7*e8rdHK(yS)Qy(!F}j)$54YE zCU+8SzGZGms4kccTG-sT)|qi6Mop6 z6LRMhEFMPZHp@GL!JPgi)*EdC<5e-$CETWie)5_q?^cVs&Y55Q;T{jdylcENN4(BP zh@mx6HaiJ+d{`>dk0z{};jw>BCuHEC(rR{u*y~GH0`>YqVCQmkNq0iS|wcvKnD!phuuAF!gQ!|Ts(FH1-dZibMvG`cxrE^b| z@sQ9?{CIxwKR_{@H<@u(Rwo{dmPE$nB6^13A_VOwq0JXS$2l7@93Jzhw%xINn9xuAFF7(f z+e~nBbifLUMXNo-a-(hxxsNpqd%9;{XHM`jSUin~YLsX{`lG7-=a%*&a5c~|$3DvnUR|5332Auikd42nw>Y9S zH`tO~vH1G}MI03#gh$}mRYTeeG@gU6agXK=3(ijoLCKkb<@DA;g%`plfv4%yhT9{6 zW>D&=Q=zjCG+ttq;PF6rg)Np+h(0Dwoed;7@u&9rrfm>9`zpES+MDUi3aV)0t=F*m z8cW%%^890b3X0-1=?u%1$7IQNu6u3HUF^(651ix~q+fDJo0mw@%*5KHfaAk4JqU9| zHxB&A46ZxbGqf{HH0d{IqDD-=_QxyI-z$%lXW~{1=_@Gx1j6y_Yd4LEvQ6277TLlt zgqBBFdh%~LS*|la`KxdL`z*lWtj&{T<>OxG)ZeqsbiUa9uM?`~ctw7Tf^Y4hVzjis zMbMJt-3D2?>`=P=~soQ^|Lvk6;J3*n=0wl`wj z)-;7sG0iEbf;L*8Vm+EX$EU=7w}!Y=WRcX&=&(aBtCodYR2bu!A1nle@GnOt*H^8Y zG*Ps)*qbVQ+yGsh;7e#h>_(VLx&wv~hp3y_IC8wC+?Br}F6kSI4w5vN1k`qsq#bb& z3~fC+pz0x-d!I*|Qg_ADL_VxWnxdCePXMVc0~!Z?S<$M7_8zzrqteUJMnh(ZSJdv0 zRqGmYUcfxHqlwNVJ1YiJbL#36p&pyw)!_9EaNx+hNb@qW8(x`xZ1_|ps1KE7wBZ|o zG99XKOF{`S`+!(d9_tabtHl+=dwO#NOy!`S7=Nz|N7#uf|LR7nm#z~+7wgUytxMCr z+geOLMOYr0PUcuJ8hrh&D)Zd@l|2-rQfNAUi+8N$(bW-%e5TGW5|1%bO~ytv&?l|j zmS#W$=`_#T_+ZR0vuL1gThtG39%}R5z>SauQHXC_XEPgZ#d_miv1jR5#J@*{`5WO+ zlv5Q7Euiw1adf!GSa<6NlcZTc3K$r8oN2;(&UD*t`=_LO0$d%qLsU8>_H4saZ?VD; zrqC7+>j-%CrPF2Rxf&nPJp8LRQB(?|`~YX06r==E*f-Xly&koy z-Die6>+pl45|kiPH(OWuFJ5HR!@6);-Zy0{ggrK|iFzOX_P&rfXxU z6}cT|P%L;hk)|U9Z?Z2ab9RrPFiZVFq~KdKuKf^CNlcR3O50anrz$VvnbbAJ#)ci@ z3e<@u+q*Qs2YrUuF~X>H!kXJVWU~*l#LAm}L$?aM7OqNt4G#2BG(`g6kEvR>KoEtK zO1=H*jN|10dXvdVj(Xdb%c4o0R@w$i!LN=T3L55LGN*dgn0k zRGXB3zf_OHH@@B4F8jFzb{r#UY`_0STrTr(@_y4sI}p$uGiU-!dxlu^KqLtKVq^b} zK&Ko1c$G%`S&!LLee`z|Tho!5Ck2}~+ITB?OuzDQ=u(*$UyiGVDN`4o9+c>B!`gS` z8&y1}oQX(?d(OT_V)C0EbR z?8W4TkgJ8ok0g3)b3)&hi*m(L0qn;MujC&b4RPJCpTZBhfBW=x3O^^J97pDqB}#Q{LD)x|n1*x&9TJlKIF ze?`6`HPE0s2n?cwQnDes@y4GiOBnva*O|LfSeB&}E@mJ(pNf0m*%qzVnlhhb>U`j= z2WZLzlm}d$ZiR9Do`cMK>GFq(d5=~!k&lL#x3}9%lb2OI7|h1VDj{zRXG+Z1e98Yf zMwm-40_D?3t`qJux$Cp3>EPo@jW}KJiE^U6WuwrVZvhZgd85lbeNloNwZu#NS3St@ z6c@`D*vcQ1sYgl!Ndb`dj#EF%du=BeOG*^$P9?3|9w`L#Zl2!mg0tPjsnc4q*Uz{i zZO{9A$OXlJe_v&8NA1Tn2{HQL$2&}s#yhA7J0H0-;Ds^C&n{)04abqZQ25uSg=Opm zAYTSb+OW|IYdV zLal`~C16WoS(V{(z?rum!>HCzn)JIfr$ij3N@@d+1uLyY5Y$z9|2fG@?T2^NyK?Ul zp|R$VjC9jQKndKCM6${i&R=419bYeCSVvN)88e8{!`2mKlobMFWA=uSEjAMP9CO|{ zna0D+Fw#BGtIDZOJ+famlmq$9R-t=Yhlz#wyQ3b8E)OLMp#S-azUAgMGz@uR2I~4@ zm8cy@Z7{`lq%X@qKSeh8@}(^U`Lpq+;$2z1H$L-)7eGitIC}EEs8}7puo*4MDoOC3 znVm~~5$O+AaIAtl>Mj_$dsC|Bv$5b00j=M=Ya$BSMw)V=ZIn=|rYDqaD z2sp4)$pzk!7-;#^c;C~z(zWJ}aCQVf+oNDO2;CnnL&zBVW+z9FE^ikQ;v({78o@oX znmWbl=pnIG6wdVUFZ~*xQjPhNhcw=sKW^iy)6wrvztg$V=n}5~meJmBgkV<^3@kEd zWYR7c2|e^Yc-?KM#g7VwvaOmvu(OTU}tXqn(%@l2cux7u4<-FGPuc_<&# z%>SG+66WtN0)2KC;HrtxQ?&T~Xpv=nX*_AB%&Fp-_r$B#bsR;;mwYRw9VG#kZY+AG zP$IhD-^+0_cabM-@8Tvv8ol8U802EI2fwfZo$^BYT$db% zY%|P4mRMy0z4Lm(YBAjo>O9uZ&GD zMMGH3=|ZOM1^A0&dJS#RWWF%&e6N-kY(=7)a5cLTIwDS4sQsJ~6UALN{1 z#3Ggv4env}6Y6CdT0>BA7Skp9=uEoaN)dtnos5xQ@(O6bI)HOZtfB6cUT)rq8HL|# zdwQy&3Nn3~Btccz5I~K;7Ch?t9JjBJG<}Fy1j(HLmxd#ovg#5@FW}k*!E}@2;5TQO^!Q$IvnCs zu_bf39A3n+2W=wJeL2Ff8=vgrJEfX|DfnjH4HzvIJd zXTO2E;^cC#tPV`mMtomC ziA}B&+j}A*{u-2f@)JSx;~KW-GPnVhusM-pO0h~mY+08|jqpa};ZvFG4gY{XK&CHu z#?OtZH|Y}q4^ImlFkK3lXE~l-yfH^82Ygm-@-w(RT3nt)~$oyb^j#rVGaStGR|VF?mz}C8^^eCkLvE z{DU{UFo9=q^EclQV8b-T;7s|zccxuzh02>^=hKC@AVT|7tQAuhbdy?-c}%iWa;gu7 zlPphyTs5&N(+N#m*ADeS*U-F3!b?$c>EQtmh8q6>X6S1M`uQc@_`bLq3{(%kxZBb! z9V$l>gLkXu8r|{4KKfKl=!jRo&%PZC$Nm#+%zM8@KD7k{?neWE;fXXtc!FTdGdcfy ze~~2u{W9UXDclRxFhq5EK96s`O|K8*1hdaLgqQ6vMYOD=*)TZ6B!zg$$UD+CS&^&m z7SwWmf+MBski6657#`|=-|v_I;;#PF4*%b`V1lT=YYg|r(h_R0Gyy|ElAqI$t&Rts0uQf04+hFL$Fh;5FQmLz0gW^54T{*Y03TJHAgL>JG|vFz z3^;($=R7pE4a76ZMSLkK$*SEQ#ic3;%p zeId2{=n@K0AzKLz2vSQtBJ8&uLcmD^c57(m^2G_p6gNuR3HV6n;uMb()gor5pv%@N zS=1bXGysqHF>IV_g=!T1_|#$!kl3qd(b8B;7~F(8yQ|~;j@P7iNd=++-b}(Dp|*mj zF!i@aI}@@u?6xVl!;=GeySv?4pv!D(Z1|7F*Oc?z(XCwEXz8!9$cD z60q`trIwvsnXhaUgDfQ_L%Nza#)7D^gHK5`n(iBcWiVt)7uLsv+2$QIke0K8mj9vC z(A!<=v2a?_wYm}_;$#fN0yIf)u<7cFN9+l_iEZzvva%^tlyBF+#dQQ^$*VXwqW`3PL;^n}vrO3-VGX$KIl=2lZ?$~_HPAa)DzkUW*^v}9IXVO-EEscREg?uH;CEfbAkCDwwp44Kx z4r5P)Ul?q55vE;TRB~&IbhKTVtqB$Qm8}v8blqBIHMwBdnvN~bG_;Fmr4n7DO3MhD z?smCKDUK$Z!w2=!gdvS#`KvvDZCg7c&6HFH$ZLj`H*sjId2^#B^!keKZ3`Nx;H_u6 z!lPN!hN^124>=$fXg(A(B~dV*gWCKEubZx;o;~T06s9O1+GNpiOGuc!2P&n2f<@Zr z4@Y_09WQ6Iy`6c&DVzI`=f!Zwc=Fvw`|cv7G%`8m7q&sio~8@WqJ+osKSv*IgUPPE zbir<~FfN7l4?Hd0!;7@ieHySLynz79bmBZ&&oC=BJ$Oa9K7-@QljCM{a_^PC#h1S+ zMc>b9I!V-|RdBj0AXxEm13y(RYkDfuLT7sE5uzmFLhs*}`Hp(c>~>z`7VN6r$?~e? zeGgu^oH%Q8jr9ZtnCy zjx+9P9(q^Q%t^V#Y2$Qh^`@*J2G~;;LTkM-QiOWK8>ejjpCZ5FyEZAi@}NM6BWs7;!~s?<|0p1J^Lu*Q&m>F>NW*CofA>RW+ybP z8}~47VRWqr-sl_pEPYEUyi)*7(JjAW#o*&WESwj_e?W54N%~ufwADS)*zv=`){o>4 zNp4C@kv8O#fh^oqyKc-%&hDoCDfNCEcyo-qdWZN~jHQq#KBss;aXmd_2D`cfj;yBu zyLlwPq6$8ltFiQ1`vR!A%|D9aC?wyCBSLsXcCtn|AyPJ6XXI>|-cm@D1~jjFbJJ46 zW2ik2pgB1)=%V6X)p8Q%Xs{l3ZZ<5bp( zrRnsi497fjTe1~DFbicpU~DfTHgoP^Oi$ifgi=X}uXVaFn63V0G|Rnan1HNL8vc*S zIujFPhhdISE2(Da_j3L^e0OIO>E;*+b6j@!c-Hm9V>8t16lu$ns*UdySO?I!YA@lr=$6x@O7e@VG`o{u^THlX4Ha5?tqLAVtVMD?L!D*xc6?BJCM2!xttNEjtFg_!L$QW)#d zzHvBPZZ0C&UK4A57b4c+pEx=f60Tn!u~I6FPI*(8x6i6)+AwTW@K3dQ_Y!J>}?4g&Y9{-hV=$3wM9?m}y{hBdt?Q z8NR=9q`s&cLWWxq)cLZFHKL{HDY(Sh87Fni2f-?Sz>&HhKjd?zKze%FkfDFQ&=DQ|h7H^zJXEHYGr0 zA=K6h`cgI!0`5nw1Q@&t=z-b~M*9DO+uxfB#6ST7X0zSC)MjsKcw&sUC->ldiy3D4 z7eei%snaXUOfdMe!=wJ#!;+$PaJd{ia;$}7=X6Ht@99OWcEYuc{N_*FiTLd5ak?UQ zy7%^=N98_oe4VTc(O=|`9UIVXb0$g3)PgK>@ncH_E*{IY!MEy^C&josascl*zr>>6 z($5-*Gz9(goK|9??v{pn1|CyWL%?YPpu2-}^K8d>{O~PvkU%curN%%G`8*8gvGm}t zWm+(_rnB1Q9Y@TuA9M)4+dc36(0|a5GG(WP*Y7kbWN{Qmb4x7D(+A#UTBw4#7cLqq zn5x_!2Fh?C51DNu!fEv1e;fNDwxFw?4a8iObG=$Vv)Sle0G$GW;|!TLt}^cAD7db9O$-L=zxvsFw$8e(Nx)g!6Oy`r~Kw;!VTxkLHv~MAHme(Qp6t! z29p6Jzt*|sFn?BlVp+J^zFn&2%GBsb2I^yaj~uBiIKdf6ClRhgP^DU0-!jA<6=r?Yu$UyYGqI(1UV5=_Hau{o82*p(u=g zIKj?3-%8%%ab$AzCwtmr4I5geSK!nk+){dvTl|(Ap`u5=;g3?7WjRYy?6gyLXe~vd z=>-CXpk4J9NTUWNi^ii9O1zq0cHQLh!P18_D6A9)r)L2o?iFoh_)SQipD!jzh&7sP zO`h3t{>K;T-%e{RBJh_Y{5%_!Cpns2gf3zejSoX&>;eple7&d0fW8c;hi3j`gH1U( zwzuxsq0uINyQe@!V6iaF^fE|Orpx3~p`S;1Ldn%xoLea(P8o)jvHye`90jWHqC@K` z$(L~ZH0AD|!YgjKqRQajgYpZzWhmktO~I{~eCTeT`GpzbJ70xZm5k{f;#gr%2b89p zi?0_HIDfYTTSlf2d2~0avx#~iF(WjNY=!{MaWE#pCL7=OgG~AiuJ4Jw(#YETE z0yU2m_T^j?a6I_QRc>SdRhL0U(Rx%6lvCf!xusPUMkI2@elJrT4wcceXi-&# zs_!rfD&y$g-R!CJL=;6;n`nvsZ2)bbf11$B-wE4KnDTV0ucxk#>tl>stNT*SsxC|& zBdbo2?j$9U8PuZSqg$}5>u;c-n2N34s&!fzh)DTlo0P8u0x2Lp@HkGF?V-+#r1?j= z#yiy{w@zMhbo%3i)*Fd1|Hd(Z`Lsrn5FMwdYjwt2d~8GjhqD9yYt0JngL#;XC`v-ZsCH z4;L@TCud_*#i)%rINw|b2Xb*8ku(zHG^AW)$7}gP z(vh>_4=;OWz??lTWis6M=5MK2FGA)G8R1010!(hm`CCr27z;ALzO?7cInzyW$~um1 z$g)YuFfE$_5NMHIHbRE;9{NEqPonn< za+BmlrA*V~<5Hyl%$KWT$b8J02AeH{W`-T1U?Rr;x!xlVtkQ+h*mQ+EqTf}~qTUpwijp}_K7cp*Jkfd3T@N^lFj98R@ z-|$MXZ$+-s8Ws?eg1|xjbT&+0S8sXj&_*ndc3hDWT+FVi#Exzn3T--eKd|DK;BJUWnQDOyN3#je<$m@VF2$8Ut$~kFIi6o5Q#O`Bfx+pGENWJl~5r z#R@QF_;8#Kf{IL^UhRwWPHqe;nn;O7YUo~wXrgR(p_r}R3A^$!UFJkAr1bP9;!yCJ zG?Z|5cjO>t1?pb!o4@kX=&FReb595(xa1DuOd8G)a`kp;jJ%(~IK2tpsl)g7924It z3Z#{zImG6`yPt?RIz@7^#lV?{DgA8~WM~YH!;<{HlfCDXv`COS6Pk)uW?lHsf*lnK zM8F*|97%4GDEkD>5;YBrkNCG4R}O)>Y|u$0_+%kEqQw^fN)0_*YvsVxI_u+<;CVq! z^pNl`KM^6LO39s}v>=tQ0=rZ7s2t?a<$VjRPnJ@xEeEAN7^Byh9n3QslavEP!bAM% z5guq`CNUp*cIr>1$j|*nkcyOiaHRXLNk0g@q^j8F8c55QTYQYyrtq^(^+J3@&H2r?8q=v zlRI>nRE0)5W6aBpWhyf2l5NKVF3yF!oQ}Iv+i9UQ;(rTR2B-Pa{!FFglq2Pt=%-qJxnZES@2k^Ln#)S^|;*ejA6tw}{g6r}kx#zAuCPiYA;3bPX*Mf|oBw zheH{3GS;pd50Aj_Igz?kuE&*b$*7+6vL9$Qd<4&U+^eV+g4feBt=|C@aqg0av9bPW z8q5^m_pIElZ>O+%P9lHP5@Kf0qgW2)l~;Qr(;c|hRUbGWKWB2iZ6{XoXeH7x6XD(x z)yE#>*a?A2dL8l&OAl;vjE}aIAl|4Uf6FE|erZ9j&ST@2Y)%PaoRdmQa2ua(UeDo9 zN7?9!oM@!EJgztPGnH!SXe(d3H!W(D#Fi^b6iNQm_ zyocn3dCb^#5#2ZwqXSIDMuwojM>c5RbIW7reIDUcak7@y5!0BU7b!o`xlg?T%Koy7 zgy>o9tk*Gdsz3UW2x3kC&}cyteuJNbCIpl!57IiyBa%$3M?VG$ z_V$rFb$fMv@eBiXpYFjqC+;rpuHTT+4KX>|ebEaRn##K=sEqT+^FjtODLcr&;+omr zamdfSr)VHN|A4(}CtIlhB8_GH>~DaaqZ3G(znk27_P2K3f32gUMyC2;Ev;!2`2O=k zyEEq$QMNgXHQ6EdWJNNQzM$jx5@LE5#c2y!lRd$ny`9e>@f2VH3?_23L!pliYgslq zS4+^l7%A=?Bd#%7Tl3&`i$n{*sewsdb3dpOW&RngCSg@USDf-xg6_<*4+U z3gqX~BRXY4g;b=?WpCzWnq`53f=t=|JW9=t1i5Ip zJdl-YXtJ0H&;Afy`XaWJ*CED6kdVC=@3>WteaI|wUj}YlDchfisEuOz#uW?%=wqY# z^Bq)-ieP2%_k43mU{bKaxs6@2!LEC5_k*f~)X3YIc%!7#@)w|g@Laa8aJ&v~TljoO zCEhPDBy50+^rAd+PWq6zA&ZGN;C3DP(Isx;tCrDYWfs4gMeZz_D_0$5w5kjW&Tk{( z<1D7yY>3}k+pFs%{-=<)Pj&JAZsM*uvykUkMzA+K0{Ykxk8CDW9>_{pgeO{1O^qiv zJ07{kN*g!rFNsScs@LPm-Dn_YgKG|{e?V4y#0OUi==CZ@QF4Fl06qF!Vn@Z(-Ss)C z6!_FKV_9~O$JGwdU~Q4^PXkj5yQbb`dt91RA>m;{gI?c;D9Vp`j*TSz`8@`mjwktm z{BT5fXS0XtNqQsR9u|A0u?OuB6VY1^*o$BLUCE_KEsCA0?-N(-a}*}dbl!8@vKGJE zRV>~AC=Q5o=Lc^5%!Mso>1MxIgSJJow-1hD>pCB*-%IXNOmZK0-S&UijT*E_SmmF` zANxL9SC$cbYX24y@uytx!MOKJh!c}`vp5LO?OO7P2+^KZiQ*%kVFciE}oR32XziN8>+x$!d>w!B-nJNHT* zL?$=#MUI)n_gH;&7kAzEf7gwYd*XbE{-_*rsz8AP1x`@@_gR4g1qu{WRG>hC0)-S6 zC{Un4Aw>lW6ev(gQGo&l3KUXQpg@5Fg%lMiP@q5|MFk2JC{RdIfdT~z6jD^6K!E~< z6cs2?px~Dy#mmw^0O;GRmogg)6e##rqgOBJU07%TKG|b^6;f27K*6s(OYGg3-kbA| z_1ao?gYQ~zGf?QG%!&d93VxLtVxK;JsVG0&>;C~Fr7=caaR;OT0000Z&9rKB8+qP}nw#}ROtNWclrw*#_{jq9Q&+4xB z>e*wCvBKnJMB$(@p@D#a;Kap*6o7z$g?}C-Nbnzv#rH505D-3)xR8L7+uB8@x7OtI z_xE*yA}2iA?1>%@+9L5myhW(&_(7gAhC{67(8S__0RUsDE*8TC7QflhUAI%G`uP0Sqa%BI`TX*;3XGm@ zruoD2$zC0HycN`$xKTz^0kf7gI;o&(=^a1?%hzW#3^7x35 zf#lGjzx)t|{eDb&_in!QYM7_ zc-&ISi1s%G+Uv-F8$+pftb(^r-V zf@}vP88qstiXqEAcm@S9-!HLSS7Dowx<(#}LL3(tmlcXj`?HJjm!NfaBwm0`(ZKm* z6?p}*+KAQ-rYYeXdRHXTZvE-}EPU$j6252e?`)`u)ixV9lGzkGob~nd^Sx8)9Me>o zx#4x;?&(6F%j^IQVrL%+O{#=9R%TnJfBX^{HT5?ofDO-9&snvm>v+E*MD=)NLR-FVoOm^MX^GnXU{Mv$EQI* zW!^enQFTcVp8XKWD|K}l9}{6 z)dMjatiSx6Q&M*}1J30u1YvbLVcQTYWu%_`_u!=Tug_gCY$fXADm2nb=`yPBcvdAU$>d@)Fe1*3qrtTGA->G8lz}0uc?U%7{G_@fF6o4<1x_15w zaeJ<$>6QO45j^@BXWuiHME*%ksG7|6cS39cD)Qt0?=G+MBA&~Uml`pH=;5>T`fEiK z7dDAgBG%f+aqB>(*{r!YRehKq^AQ6VJnp{J{n~*jMFxS(i(2Q07(Wb(%0da{6Li1$U$S}6Vj*w`tGKo}aPK;E!Z1B})&3KQ_ ztR<&L44t@YsOjk&i#9{`yN6L<9Z1HkI_r=1)pdi2BsMBHZ?SH4`#@x-!WIk6!^u}- z;RQ+;%W6WEz9-c`3Bdz65y%dRyqT$LL2t?W2jTlsvuEUP{;fXUk`=N;(Fh=;?mUZ;HZYEhXVgJ% z`|9pmp1>$;X%I%$B-TuUAJ~soV_o9bVm^k-g63zv&8(@6t)4%R>5MpH@D#%Kw`fl(2L!m5+f^Wz?&rLr&>x3=8KzYMMd zOxD)-_LKS1qb{%19m|B*rs8AJm8RbKYsjKSGA?vrvr{`ZF@(jSIUruJ73V--lsOzYe($ z^rD3~MMzi&wvuNM0BhRhx%Q*c%u%q~Q@xnaHMw8rr&df>VPj*vAXYJ*a>8&C=e~S+ z+OL+%1~BS|7%@Y1*@Y4E6ffr@;XpsV+&_PDT3eBL1R=cs4b{Cx#MT`Xc>a82Vc1_g zu0EjCB{_Tq*P@_<2+aHj_B+hDRB54^xnLzIF>xa(=2TlAh$C4B6TFg-h-)2~qok@J zR^uZ%3q;K9dyM(BR96dMxj5UBx;yeo=m*Cpz&lUGdL#c0gi!d;yv6H@1@uSjikirW{B#KN!fM#9 z8Wmei$&s2QA^>d>TS_V3o-W#UZ=S=2qkM0$D|&Y>lGe!)W2A}0^(AWyv`q}3Dl4Gf zZ%-Izy-LU%rxXo`%M()_IL(cfmVtZt=o%0Y7lYcL9*~?Q@^A9^_hTmC*pf{kbI1%< zxw>ZwmHyY0ljk*1yDorCnMm;TG&mmapjf#62^sl!gh1ex*~9d8B~Q9Y_R*7?n%|jP z!&mBH$lJqcTiK%X-r@a4QZvzq|R^21?(a%U}beQrHLnrh(%(8bsi-)Jla&jo+7 z)Ws56rR!o7yBkU7za1J_S3z*XAk+_MA%=Y&+}+{oCPCngNpce5=X=!5EytB1f-Xt_tS&1d7g|)BY?%(VpA!klr#N*`WiR zRnEGLzpR@_VlMQZuuBTGFxq2Yox_xeraMK5?OHt011v>)1@g*+9&ye3)iv^ryR{W6RLv|MBpW z$bIJLhOFfY~lk;rL^ZaV&6!Fg-D zV4(4b)IJykDEC%y>AKJAFsU*LVD^t7WKXJQe3D5QtbVa0Rb$yI?=VdyUSz(D1NB5V zZ(9tx+KW$L(XatI8E++(Ix*6byA!$eekvKbRvGQvOAq#Mn$(?Yn<^|v8lK7?x^nJ( zUkQOX#mAQ~{R7ZEI>fEquQKM*@d+=EJ&KZb^H)E5e)cDVK=Tt8ltQgwgC;?HEM-Of06`bl8 zzTe3QVw3py1}K)vm&FHGrCi9Uk%Q%7D2_}tWhr*NE7&1TM$c-ffb;N5C4cuVX`c@V zD6D-cjNCK_2}pxMTs{ThE@Ho}BX zTyv@Px?PNDzi!F5k8=vEDBRvT#M~pNOBtO)2)4sqLqVhV;hD4etp*KT0qx`34QtJ- znsfV)hfn>ejOwAz z>X@3XnXghIzOzA1!_05~}Rl-DyIEgn4GkG8Y{M~IbHLQD8 z-y|AycqT^=KIN6cRAxCY{h|a6*i@aPtwBvw>A1SW!ytO-4KX1QFtLSN~bO$*l{O19$u`k+J&W?drsiGVZE^ zi83#`cQ^L>r2+j#rck!A3_P*uC5x|SW_aau8)KXnVf;_7lGo`zyL?DV$-N_z57A4h zZ~brw7lx)Vl%RNf&ja#I6fwYCe_JAx0#wR+LbE-|6ZyOqgchfgw-g0t)YgogEM@hO zjYC4^fpS8%tgkjFd3l3#?Kb*zx^wy18!jhHQa;r}QfsbxBM|U|v&#K`8QutM!~RSe zI%B5yqxXtF5bhnZi;c{jXz&^_s|YE9uR9R!zi}YfgRoeuj*{-CCj)jaS-d?VnJ0l5OW*Fx^(baIpcnid)-<}uh@3DcY|(L zP^Pzh+|8d)56FS1XRM`uz-l>>o6{f6EN%_yLvFVjA%Ay?L9U0BR-X-Q z8;FTZ^afg3Mw>kAqfqZ9-l1-^peAWVsx3z{wlC~7)GMj!aHBUeh6Hi1HPBiEA2#Cd zj6)3`RTDND{qu=L>`H3k$n9a_Nk**?y=$KUR@k%9JqcKXHM|*}RjF-#Xcrsi4IGaK z0ehaX%(Za&lPO)BI}tA19A=z5bk^i7wjzdeDjG2mH`6e-)j^8ZYZTN+sr|~;Qj$;m zc3Z^D10>>2e>cV7{S_?AEBz%->$ABPcF8$yS>itRtn6Fcy1OZizs|QJpso~Y z@rvynMN~O~nG+qDp0rjY$qz}Fc(nWq<^YR{_lnAr@O~{TCG5s$gVzeSD6E)DrT^}1 z$-|>@`dVfs|L$8O*LJ|PM3X3b0=@H`1DBjPhpcZ^t>6G#Y0rMW0MkTevpTf~t|ceW zeJq&wgm5aFZ)x=12GA3nl5{&8g(=hP%k zr2TkiuDgrY(_Ku@*d%3aP=+oity%f|ShgG8gOQ&v8uYS1BMhpBKZy6iGPh6+?v2r!u zH57ZVN{37pF;+;@;X|KV!i!Z|y7ew%_(=4PH)5O>$j;8yKD@9Hbkuk;O+JO7A29|{ z);U5(cPX;+sJ`!Z7qy0aF2{QK4& zjnU0fc#!c$s&FtWh%KhW6n1zE^p4T!#b5|7f>>vQ_kCux@mwf|8<5yf#VjdLbjp?N zmv2*NdWw`cNg{?@HeQ%S*?=%BT4TbmzKC4QEOp0I zXDJa2081r;#j~o?MbHi=s9Yk|H$)WT;XEsmO@_=`?LSR1&$3JE`c#|y`po;CZ4 z4kbgX3B2U>rf=-!ExTj5yqwMiORniTs|%_Ypl^#<={?L##kKss1i$HjzaQLJaa)DL z8|@bixwon=443MVgL4kGc|A=jOXHEtYBY@xT?fT;2^g3Q)5eZ5!Z0dw5s88S@EOg{gZW=(;apcK5R)g71;(5t!A60=KUm3Zt`R)sX24-;^3 zGP2qOH=>v3kKcS$ypt77%J%lAAPp#Xt`bp{7M)+kS(@8TzrloS@xw?}bq|(i)K_dN z{$#a<5is*6$#cp8r966RAhN}(*#BWeYqi~3mQg3?MG#R?3ph`(C*(Ma`{e{Cs~5iT zIiZ26s4slAn!kIohsdgOs)R&5lto<$5?wCg{9#5WdwBA-2Zm~j`{4>a&zdv@84jf% z_;d<(%hw~DEdS`Nmy+N>(SY*p0?k-$MP40F{Bb5q#>ZEq^cx4SzguKtpnRB$-7G%k zQ%c3>Pozdoim!I`&i?ASoOys+ZK3*O1T6#GY{#V(8B*4UjjR|3Lb+kol|8VY&i*~} zm5E|?81|lR9L3v4D|=Xc7;bD7M8kz{c$XMu z+M=o^?0Fb>&*b3!2X%a=j(rJ6;+b5YY#{?_taakVfq8S^Q1Y*%Ak+xk;=GZkgQN3+ zN6Pf1Ucr#s!fCp+*Br;2S8vTjlp*P9tqTjvP}BKQ>&-vDrLkgSwsCHC!H1dBWU8n7 z4QG_a&9Nni&i2LZ3XJDq2EWT__TMJI@kkXnqACNDWiC(WdXsT&o``jkia=*D= z*g7X%xu~izM25Eqnr!K;7K_FC2mP{hwe_%Y3kPnjvVv=iQ!CTH-?_mPQ420BYRg8Q zUd&8U%EqzX|F)lUh*|{8`-)}S*CnPnAlmR+{QyL@0qir|!ef*7_R8TMf>`^;SP&u{ z>v3Fu2o{&T}r5q`SkiH!k6Uv6{Eg^dr_v_ z1I4REi3>Gu?obdXoO*Am_NYjHzg$&R+y+!mdZhJ187Rn7IrnPUl&L;(;9P#{TmFx; z$pM!{B+l*y^xTE)b~U|R{W%RhtT8IxLh+v1d_!q{ubu$5_{BTWs-mN$Q#0Q#^^rGy zZCfaS1-dj%uzZY{v4M>_BWZlwbl#NBUC%s1`3S+QP9AIpM1fO!WvU7ae7ikc;^Jv1 z?ii&|ux*xcAA5Y9>+?}erm$+rIy9lPy%Afx3F-8mNxh*QD|NZPw+Q2|gZQaJ&BQ~% z^qXTr@Yha-&rT_PQBOHMF)r)p%wS{WFOO(=!^UCzzr3a5WEuMlbVTTl1hC266b_rx z%&XB-bhS=t1685oZ%?KqwU*ZGWt5jH?n(`glCYUa-P1)+7EX_o^4}Jehmr=1#S9g0 zwk+p5^7kcOG|sdl^zU}cm-Z$z8^77l7D^m@%EGXH!VD7iyBx3QAT_GIvaK&|^M+7^ zjFHk!Q0XW$4Ht`t)fs$Q(sdN?`7I)}ko)5bg9j5|SSmPi$3C3TPkkjLl#w~oQlix@ zLcc!9I-@1$5KX)bD~mG9oa`|)xvEX?{PDkRZCD_>PsXbtx8qQ_UEJxnr*)ZtjmtyN^`8InN|=sh>xWA${4}7;PnQ&m7GIi=E=~x(YnTP73Sp3&}M`XSI}X zD|lB)l-c#tlmA*Q*>W9WD1nA7Ikd!!gr7x!mcYSPyYbEJ(*%ZfP#DJeYCavmU`3Fw*cgSK!QBT|+I=XMb2*Ozo0=$bk zlJ(xMM<}=X)R`_JGrCf0IQrqbHzBp(IdnyY?t1H6K5nMNq}in}f_F)+9ukaUvt&*Z z{G2RFG9JV~=v}^#CoZbC(bx?c6ocL7zszYjykTLEBXf0&%iz;QmHN!5aJB~$jzrSY zL>y>$A$8$>MF1#WH_M2^$`+WVs&itUal*X(`tW5`Z`Q63M!t0>9{Wr^Xn3-o{nu_j z;u55fQrx0J2p@1GdwtI#9Bzbg7aQmp9q7;7^=?#XB3_JeK_{WzV6vZoL#2EDtSDPG z-}1d;qz-nPS4|Sy$i?^d%6JsPnF+7zaRyjiiE_H>uISYGWfe>fYC zsQY4PuA62>Z2}ZYDC`G$Sr3`c!zh!@Te{Eu0YainLFjunN*xVB`}lqS&-BF9od%d& zTfrX-{n+0aD4Ct+wlL~w1})Xhm(MS%QPZ)$>@HG45&RgWr z&If1`KQ>9-=tVc_ErI5s*ud9yL;Vub>?!RR^(!$=4E%f22SHJI9Y}oJA%kBR^Pz8F z#1Ux-%gIC`UN~wG=ac6=OY=&J>gRbVwml}-*OF4`I(?o zimf&W+|a^uFOl9}+~<8n@FO(5r>^waj5u*##ZKm@;e?C%QGxOI3)$FqY3IB5x?~YV z-5xU?KvaN`&YrOZ#jdxUEa5qdW$NI@WHgo5hKsz0qRK_+oKz#zL|2^H*z3~>w$oz{ zviDo(*Wii;vhRAs_PUrP`U5swkiAp(VR}tJxJ_JwTWZjiY?U6~vpUHLYU^j03$w8R z4sS?SH$m{W-cjwlExcQ_)Mxj!Wjoe^aIQjei9s?p=!*GUCYsL-)UYDc#Dvc(qGrbCIdO6Zaq3*sSgH#^kaNg)oRoUjboRY-T&j$)lI z@T4hcN`heA+GiBtU8899`Pt|=!D)8J1$#=(SZT1uho^Mh+^R@vA&g|SA+31RDLyZ1 zX9nu)x*>4h>Gam4XKpjyfc{@DKnTW?pf9ZXtkVSwzgR1mcTdJ{%eZ6cJ2eo(v@5%t z;9nwJUtanNWc3jT_AebEoc_I00sp?t}CH zv5V6cWG_yN*x13@AU`)Z5Bh^nJ9tdp2L*OPPB6qo@Rb#w&5xCBTrMx(3?V&LPop69 zk2tN{@}R`0+IEU;@uR=DVRh~m-s5oM8q5U9t|?Q94P;PU2_jtsSzV4tvzK8Cwhumh zM8RDDCMYvUo_{^?`oqz1CF7QHb!QP|{#r>W2|#N@x>>RtdgmgPj3#(l>~iyVi{o-& z`?c*r)v8XeZ6y}H+BZf+7L@)m8%qd;5Etg<$W0Z|?*${uyW$!MKWVI%aypM2&X=4L zKXD5f<;P6r2(~pE&N%q6w)N^jyZ=mG-4N#;9g2nrF&E1?{>_rlBhgyG9?+N9&I8RP zm$};4gas6${))lB$h39&=RXm$+sadZP6E^UllcwsApO3q2be(Yx$Aah*hFtnN-}#0 zJa6r^s|PPf23~M|fTy0I+6i!<#yWm{g2wlWELzL_CRTQI9y?mxV4>kIHMde4>X(BR zO!B{l3G7cY6;M(!qB2GNh_rX4HWvnOkx8-(3EivZg6I&XJ*NK0?HFZ`cR_tj`THM~ zKO7TVwo0AWqtzeyAM*P_|Tj3qo7YEv;X!Q?aS2K6=WcWr6va^39OjPcysp7(X^ zFQ2<{-R~s2NZ9>N(WJYE$r1+(K%64Q7%aEi9#I}4jqS1y1fu?m-K&AL4aixq%p2?z zbj=PxP>iqfkM_pA`JBkIQK>vU#R%0V1SDC)z1w!;WCG^4_g?E1^JH(Y;N8>T9BDzh zl4|H}ficfNj0(k5G)PB>K>V*(q1hA_)&&R$1=*VjCw3eZ7PVX(UO?k6h+(XC7MgANP;TzhjiVS zd87lg1y4G~eZ=`QvzlCY5S@4Mgo&oBC%M$o$8t!k69_eS`%Jw&YN-Vd^s)fxTX_1J zt{_Tu=0P&G*+FN@=J`<0hk)17dB$Erri)Xz* zUh8tOLCdQTLx3fl=V2t;Tw4#7xfg91A<|$9F-&LVHxb)I5syAsZN*@JO+$8f?1i!w z51Gjn^Q3Z$z1`AN)AmIjy1VfQ2h}>Hp38uBbA(d7W^MdZ?kRg2-Qv22%%Nj{Ui({a0qOA)4ww)nIOw)gHzk6KZU0{xv5JqC{E$x{gO) zb?KS~-;yil_VZFqR>t@4$oM!$k-XP@#=v`%so4ru%;RqzYxt{R4_>DS;3R54un~=ou2!SI}o4hJbs-(;M?_ zi4|pGNuKQxNV;2Fw$fC-9xb3{H`DL)X#QfHln0i0=4{h}!de-y-O*rD50_X;WEb7~rh{Gv0s zK?Q^POPA-2%!`csBznjjMD_bKZ#I}3)0ZjZt<>7>;o9A(g-rnBhCO0vw!S1gPX6jw z6*sJg(A$1~1LFh3n7UE-TxJMHdM(!tPReV}J%&^f3^Q!H*kY4aq@az4*lvSn~pW_yUTOd z6hEM&BSF%0M6}f3w@0%ISsl7~V4m1*$S;Dt6083H4OnUz0M8-1vtxSPGLqRBCz~j3 z!N`tR^DrD~HGj5?*|0VK=6Xa(=GNX@0=<(-u#=G*bEyDP%ljFm&9jq;WlKesIZuH( z#2AWgQj$Pc-=Y!cM@|^%eLIU~@PQT*SQb^w_SzrncgJo|tOD_(?hS<%eR_^NkApVA zj!sC^TT|p}4`BH{@A84Onam1*U7UUKIs)F}nXG!8`Yu6vLV(R`JV1jZ1?x48wFh}L zH#&yf<4z%az!vm7;h`0kn|6eT&DSkSV~}k{lFF^u9In~(saU{Kb^<}R(1u`#H%aOe z_~N+&BByE_*T%Emz}1wxxkARU+okj?ji!HusblBO-WsP7H-YNguj=5lp^&-oW_x~Q zjL+X&y4yIe&c6t4Ifhm5w|bmMigsAeYiT_x0t*+%pI6D*69W-LZ)v{lLLVoTLzmAE z`A-mO26>8J$Aeo#w+91PC$=kvhdV1qopJu#MJ3`P+Z2|20#|;rGwkmiVrSa3FVwV( zYWV^q{_$e+7pM=B8`MYPCoL}1JzIX`(&^&Qps|oU_nj5}|e@%FecwW-@ z;LOLMH7of7L|vq7B{J0*oRL3l#PT4L++Qtux_LIv_5${1X6Hif9<1sXcMJ0t?FgE= zOlDv1i?iRqXncrX+Qav%F%}ZLL*XMytKMVix+8>dQKSP@5DKjnbLDuJD8)J zq@dAohaWbxGDBXCgYatvP@yc^l%WasX8&T6rUe67Nr2w!&v0Lv1SP9~c&>DFt*US~ z^afw^E+oaM4yTqoYW*-GrnhZDeRZ;YWNPgo9&I~EHm?fr*@ZW~TU^gk=Kl8Mc>E)a z!z(sOz9Veb#i~%%!BoJ?3_x(612nT0)&%X!L+8k7Aqijl!#YO)Th(K} zq&7o0wR0+_xA8bKP!&L2ms9FUF+7sU1?a;ooEfX3jP_A?6L5wU86Rcg8gM@=4dRf{ zfLO1GiCEv}lyupZZ6m0LV`9?TxaT-qg);M?e$FG-x5i@$+}nqAUa|YDT9~2RR?EIC;pOMFM-VZ9MVxOc|P?xkEq!qZw82LzjD-9 z)ZLs}*}{6h)~idu1!d3n9(Zstr5OjguAE+%J5|bQZ%(o2SmQ5}NJh-y`i%=J~ znPR}7gA6};+4C_l!i#oLAFl$gf`S*?9R3yAW9+>CNnU3eAhe}cJ&vULT|cxQtoXfT zOY?kUdymzdoo0F;r{0PXq%|z%_zHE0tqN`2`e(ZKAt!WxVXXWhW9YFY6GO7nl+$%N zfiS&^`-s*=U6w&zM_m)N8zBi7WsGFHmoDo+qd4?m>|kHy0T_axgZRHumj5e)@c)$g z#t;*$x38A3KUHg_TQU?*U07#~`hEP`3xj(J$n#V| zjIw?)%Cvp_iD8ydUqrmrviXXE3!HLIvqHbvEjC*+hTz-G`C_uUwp${TImh&rin5Ew zYn$_cuO3yMZC@SVg;E5$54);O2mt#$#2uwOL)?N$^p3uI z?R+t^N=l6p@^JclKUph(lIB;+rwkfyIm(Ob8M4ysG`Oz(A8z;Vs?3PwC%wtHMkH?2 z3KRY3ccJVSd6Ra79V!2I)VpsaCE0697PIa~Kww7**t8Z(Hm^bgTYqgLODjdPxQw{q zI_E!k-3rsD=X&?+n${qWqv=CWvLmtcv}Et=%+>^tI5J=4EUj5^(V&k}84LX3rrZpV z#EYiHRf5LKJQ*ZMV&#reB?b@PdEMY|Z?xWCCWIJf&!MKW@H^-8JLs>>#1i1)A;!YkSxX1JiF{&2D4ks$B0+)PC2!*76iiD!-XHWb1NIsyegQYWU-M^+oQ_k|WW9_!uag4Kqe7oCd1fS{jxgNsFND)`&4LDH zo!tX9OlV5zi;W~&@4gJ!on-`1e7}h;8fK=HGNlqUt14;ISNbN&pR%NW#j#t|Bu-1h zv3qFY;i@+~h*XLHB~tBMv`X;tX|anv`An|8qqUXosU!L#TI(JB6v|#y+9D%iXR^>i zq?NXg)u2?ntz}Y?u83U;W+WmD1YAoCma}Me%w%;=UG8{*a@4cHHCrsvXR64a-jUJfiCxQLMLvvP^Y;tQfo+Hh6oQrZ0fSL0C}{}3UVW4|qBprCo;)$icDyV8B z{$5zSW`UqY#Du!SgV5CW%V$ zP*i_DgG!P`Alq$Zdo3xZS)*pbWYY9_b}?RjNu)0}Xw{tmRBNG)F;Qom)dTzmM;lkw zS&G$<$IM@nINe;%&Lt8CZU#hb#-*OA`#dF7rhr0n1~BqN8!7!NO^3)nV#E2 zY^4wgbh7vduu=RGDtK96&+EifWL?8KiVEP2GQS0?C5BHQJ4n3VyOYG>#$Fhp4GS1Q zDr#N^GTt_hlwE9I6>QC%f6R;}D%`B;Me#xlY0zYvpvfJs0cz=t^N2^CUssVlsp8ys|qgORd;0OFh1J>-U zubDsXZHOei2%moDXJj4KItml6>7~2ENWUOq)bJc1uQ5brI?1<-AXo%Ii4Xr@F89hU|pNkPH>b z=Ul{7h$xzAn#V9&~gOvHeJm_|nw!zH#BnICa zm0wpC2P#&605|$z=Z_}$8wau{F_gBYs|S&Bjw6%87hQM~$cDUDw7%tpIlOiBz5s-+ z2iEjDoSkUyo8Ky)jo#u03$dEV{OhN2NcB52!Yh3=~(x_YUY17@j#87u*@X9Y$5gN7W@L71oZ=z9m z32tv-U002x3}e#{-ojaI1e3cBM3>LJAj|jvw9WjxJ!~M4{m)|DO=6ce-bc3;HYbGA zDj5mKz#?%;`Qg(bG19bTscH$~t`}0Ndf{xuc*`rSwM%Lxl`pwt0LBq1*==`huo)+^ zauGO=3VR!6EHAB0J=MAmw{27AjQ*c@ap#tX$;SfBCR%_XAZ^|;x#so&q2i4e;NQXA zmD(QH+5F%##*I4B&`KZ$&lghA$@W#5KHDQTeo1BeYH>^aw`;CXm7qhOFtl)XrwWeZ zu!wzV=aoJf@Gk8cPkatKUmQq(o<44NE)o0f6e=Y6Ee^qv-{KI(BU0RCMH-SJRh79q z1%V6z`+Y(W6pdQ?VQNBOe+16t{t?H>wv_fzSo`nErkt{MX(?g|i~{|!hzr4I?Qes= zqw)R~oUST+eXx2tYmMELkCL?~Ls`%%!+Ep`}?j@y2PGu!w5~` zPq4N)9U=>-ffzTe|7BgkL6QRO*%ou)0Q>CyXu;BGMZJV-Gs{|r$8!F#se!di+@YXaADpLl>1`a)s%2aO#U#d3C(K zgwfm(kUH54yD!qdD{=k_2fFW6Et7zx&^<}<>7I53#AgL9U#tFp#}kZ z6lBsnjZ5^RX|0}$(fya5gbcPJ2<5iT6L@KtA!(a*wN~=^BTNMcnGQ@qA`t0OEOnOSIlJidKZt{*fK)74KxB6r5d@V7Hk)_QIc_Git6zI~qw4vlnwmb}Y7#da_;8AsDCS zI%cu-#{S+NISEag)kDn|acKB_ z!PM>if8SmmfhUaFjq>yp<9-t_BXLR{xeM4%g~5uO^Eph(Gj(y7O(GN@ONBrDz7H$1 zU7TR{c{S%$BAN>V8xVr{;oBj!DfA;iUbvYY-!bZF8`(#OY~L)g0}jYZ zk=*a@_6wFj66~L^zeuAo``S=6J9N%N`?zADd~gVq0U{P55d!3eml^gYE?(v{ya{@= z^AVnomi>33B9c{we_Dx(#OXa!8DO2G!GSV@u;IwzjFF9=}ZLPP79I|6~|0yXswPNN0+mWq$^UW8qn# zj$EiK&YnWYh&dHpysOb2{V;6!oasE!glg-Drg8q{EQ;Tz{p>_!pi6bu$q(Q}6>azI zlMs;7)gv|I;@W2~q^vH51-7Snx2>AF^I{rw0VD_K2V5Q%ALoQ^%06$R)c}`6=G4)$A}je<4TQQ`j+G5#&hhF?>Il`v#lqqZma9}-)*4>vd%_rLoOG2QK z)|)CxvFuZY$>3_3L~cw~5opX+VI|E@h15S=p~pA>^d}QuS;S@ZtZlzqzT;7mW4qdD z17{6$u=@zCj)D+z!l?w_YeL1^@F*3DI}!7KnY5P50BNnl3fe8FTU<+CpVtkkomYX; zr_>xy++9N1a|*92E>A8cU%|YLZ$ZS5P*%BZPkr<2f6((;XqVSSWL%PtMm2WWvgvd@ ze;6rL092$b5(kvv45! zc2e?w<#FJRwi%L9KFl{}6TWWyEaEXQmEAHSd#l%44-Q*M1WBcj)n?iYsm6LMv0hPj z`UHWnEW#W!J&4|hDgFqX>HmIgoBn@MKSv`EmZymVEtD!tstNvZS@KJjVJt*R&bMVv zh4Qo7?d*2`FZ&bkUes`-Q?a3~cvHU2<&5}gD8^EGZ zyWfLrqJHWhHZV~MSp$wz0XpELL_+=jqXRd?;-*CU53=*Mit;Dync_zb zE`ag>hbDZQKXcpR4iv)jY^hZGJjo?xxlCBVgcOL<8rajfb2{;zQ(yl_)jl1DqK&CB z*VY9}Rlk;!zDT)R6_e)3rrXi`-a4}ZKgd^AItMapQ#O(or_)N#rG*A%WeU66?QA1zs8%Lz`! z#~yu|L>VJ5b)3(n9~T*9W=9%(zBe!o8$(?sEUu2;dP$)Wk70YTmv>svlDUvGw9#z+ zRf7T6HoFl24eBTBw7C~YJr#hntZV4Mxd5ZPYtPxgGcG2ZqQUiD&3W=kuS*t0=wlkc zGo{(Yxv4xmCY!e9DtlNUAWta?Xe-EUuMk00ZpflhsP=8Pn4*vlUC1@p^Oe^0O-(~G z=WvUsx*~O307Sd`ORFXGIx(*q|W zr*H>F>p-p2;1&(iSZcweEQZMq>69VJ`j!!^6?7&x1}#?2=@R=q8W-j zPEMRNVjtfO#mqC}$W86+9r_Y7GUC_DO&iX3quCKsX74aB+ zUxZDL=%)r>_)ALj8*G~19DuWv{#S*7TeHhh6@J#Abj6+}V2D;wt{_bu+#C#~(Rzxc z)p`jr{1+M_dH3dpliP~+=WJQ?YP_D}bUfZnZ}hw90Hc!Ls(pIGypA$$bQxDwsEtL1 zUoCFI``U{DqenB}3t|n%u53|VT|Te2*V*UJ03);BDOdDZf+1)(kH#nN7+h?-f2%RIFGKb^f*P#s;=wTlHpfKBk=?(XjH?(S^d-QC^Y z-Q5Wg+}$CtaSiTpc>m)!=c_um-CfmHy}DPgHOG9$01%J<%{EfQSZ9YiXlThEhs^H& zwYKs%kTAfx{@YFB>W!~yc{LA1+;LtbxHi`vt{I8?*B(Q_l}$=u&eb5~KVUsH+B$g+ ztd)Yw_G;ixm0Oa-A(5FJSz^^xp>C8mo2N5u$xe3nwkM3kgD4=l2cMCy`1C44bzM*h zB+5;_4iq;rC|NmVi9cTTdFcjRzjsQ}b1|_6Bzf^0?dOM_KVSdM>r?gGaE*EiZ{wbz z7>g#pxi`hB;b2a}&9gIbI+uC3V;?}jAlbEmFqb2>zZuDyWWQlhjMf?~D1yy?vxO~i zNmfofL9!GGt^K`CS&P8DR1f$oR543*Qt9aK`43 z#ux>;MnGil9;sT^7Ab<%iG#c)MvT*F|_;{ERlm0~m5u&u3mphhxMJyC}nVN)-V zclEpd0JlXR<@sH{d&K~J;^>uyY+BY^I=rYXDjmKnvE$g<^gp9K=LC=Nnpc2uy@15j zyil@f=rl`=i+szOj0Zc_raYINs)aAOFKA+7jcsaM^p75<>tyIP_rffnZulIr{-F$# zn1p6V^ee?C@#6l#eG42s#?AxYxh);k^8m!}im(1EgnH*ypuD9)_PY^m*O110WY;Mz zQTXx5t?Mc{v)vVvS}?n!aQ==X#5dvZlsB<;_FCIT!hk0pT|mDqZ7_AGCXL>5UW}O_u<&exg zLizJel1DTO`d*P`(IeQ+?$Gg3%r?cz&yLFH?TQ6lfl=nyVu6TV$u9&4pTB6!Rb{>RGSSY$J3yn zpJuNq^&&X2?g3VE6@q&*d74hqwU_FPQ_BURFnS)6$glDh{bPCI<#ehqF9`?9YpNN< z=-)q3bN6<#q#7&~x=NurwG#D}hR3ws&g9Nt-RWe`gxKQ(tTSA)u1;jr!=S#Vv_41* z{pR|s@myVQ7tnA&W>=^`Rm461vAzM^^~e_bX!Oxke-t&ZKpE%xGaXdgvYrT+;N88v zhEF~EO@`oSXL7gk*frA03?LiPTxc_5JthzD!_tHlx+|abkx~ zv^B3zNl|7vZUgp+H0a&S<&rU^yrCQL?yPp?Q*SEiz_yZH7( zSXhxmU}EZmnQnr{t9+mTN?Wtb_j8*N=INi0Wo_)?D^7FJ=^*wGjYWmWNFUca6lj}c z5(ruJ=lxc;HvOodwhC>r6p1f7FfK+PDHiSO$WNW~9hD&pb0<(PYjB$5R>cs)T0h+` zvAT%Xg=EDXX+`2ZwP`&Tu5qaIG=wq9er-Gj>-`{&0D{pu|GHGzMW}h=LurYiz0`$- zJ>B@`rVV>44Wp@O9MI1(PCtDz_#lJKFI~w*UUBQ$q{(PSE8Zlyi!XOq7$OpTKXPgN zK&OJQ#Y}+~>*&Cz(@ZOOO<>IK!krVWB zL@H7YR_5{u^f56h*Nl>X_$r@nX)zuX3S4fz!oR;UUn1aO8GZt&> z4)8!@+K;Di@)%!@!<&+ze-%5H3><&oBVk&RX+4hiB)#>VH0fOlN53)k@=u2x=@5Ti zw?46y{pvQa9C3vL+86bY${ZA*&sF5a{pUNBG2qAl*^2xs$%ZRq^Ysbev9VJk`0w8NJ!v%^Vg*2_5jFSFbxYmXeIa~Zr95h5BMtPS zML&79c(~9y<-N(p+{|7HD>SN+OT5w>G89Ke@8F!ZK{5Z_MxBVX6WyPC${Va7XK3HD zgl-#tEY=gUJ4g1D<%GA7BkQUof0XfWw+E1V07>U09kQG~Vi?`;xR14vf34d|TO zmuBLGobr3oqnH~wa8~cLT+PamP>%hG6BaNK;;4Sz?+SgC>hveGRg zD*LIDlXyeKdZ9>msF@buu7_0}+a-?f;!BTlqTw!k6)gj5iPw$D` zG)=p4Ot`@mQPgfuDJ5l1e#D$Ty>yqBZX;-ph6a@4{6ZT$JT2RRVm4QzxY)4+cZ!*bL_3_uYa{jBNL(3^@l5E!+>92(_5#HtuFmWp z9#hCTh8bocHd~7UU+7TdP}|Kmotgw&jp1XQoPL`yd|Mb!^s~WWREpu42#@OzM)W7F0n-VrZ zoA%|;BK@?X@GWZvo5!{IW72~i{_K;^a1*1#0<72u6Q4Cpvx!wk7db8z{>P%t+;_n@ z=HQuK)b>NeN8Rd=M^Ta;QiDsZ-@HDf&$F8hkw3~`y0Vk4(z7PqpMG)jowKEEWZD6< z@1YLLqaH5W-AwW4z2C1khgRRMhjFjK7_N(}3n(S5D??{)z{WFQu4jwi1OVtTL<}=i zbMG`8A?J;$N?E6brPM!wB^V|4?C@#(?d{a@Si#wNxa)hB@oI1l>w9zk3_0S#e_qa# zkCY`R*f~tk0>#5x-4~H-CDCf#n-j5#`|*6e3uAd2 zYKYg=U5)n1IGJj@8t3C^<0>0i#G*vc-G$~{uj)qsy<&{!wi@(>;XbAOIJgowL zK1kdpq#^XZf zekz00qpL`IX)=sOT9n!wm3Ja4r*9=ucJfec6KSB?9UA|y!<}cAfPIEk`XW4wDD77L zfP$t`F_mj!nk>Z>Mctv3r)l{E{`4@sVl!7!xJJ@2Vmhkn#^f2=u01oV*pwS9TX*13 zih2CY%poWthQp5qrQ^tiCP|IKSa}IhHLo_+lc9+<19LU3q|H#fmLsuV)$`9vZippN zUtax^jGl-TRxVaULV_TB`NB+aX8A|*k((0n-{tfDfffZGPcZEeiFXf?20R`L+DbON zG?|_oU@F`MaIZHqi}!V5bPQu5D5-%0gRY5f6R!al1rGaCqXOuCTq#Jho;1{D zGfG*9=5G+$BF0!BIWc8bD;!Yw>x)t*w^-~!eAeS+8K>GS=)wQSH(vQQXC*23s=m!57kGfvjC-~_qP(XcC-6Nyh*mg+3L=^7V)tHcR zhFqn)kq{xQBdLux&Gdc=t5ks+V;kj3^6@qJ62g0_)5MoVe^~=oaFd=5=DFtYos9Dk z;LMy51GJJEDcz8&?xfrttv2;DbP5J*V zcX=8fGiK2m8efO&f0Ouot~6{T53|m6v9QL%$(|9hQx4{L=)DGqSdKc>)Ijqwoj}{K zIq9zcS$_;JVcXz0j|{K^`&_d=SpfKd?YBAJcfV%u?-GOIdc5iJviLvMt+X#p7aK#O zF@H&$)Ocm0DQHm*t$%BWo{jQf?#R|Cfy-zkKe}0y?zdOyL$vlm_|_6Ur2d1&Jsm8$ zkakglp!sx$u}p%u3CqbU-~_Vzhtujs?q+5JJfo`rdXz_(+uu0Z@huGiW)KavmlG^xH}(a!Uvd_1Cfx4gTmiTd&|Sin%jCOXU= z)5r#H;$%t`G%_r0NR3^>QSn)A-6Lga8e3KpvKM&=lwjL_c|J`?EiYB{vX;hf!>3%D zc0s!n40WddT-%l^-0@;h*vw7~q4=N_xwZtL-=JH#%sBH3CXe1SQoFrjv5ISF$4M@< zTF43*S$m~6>Pjr`qP9STc0jp;wrOU`sBs53)eSF7(zARV0*YNGTGf8hj*FtcG-_S< z+aXgpw$a^@Q+OZN6bbnbk1BLiuba)?=@Crsoq5}@6RO?O3md)0b>7ZTWd%^&2VR_| z;B*!jKf)PGRG>rz`k$_8`NvCgC=mlQ(3@Pgk1Ecvv$c0ytk_gb*DAwf8$ z3X3p5Ad2`}bOM{S(0wzn>s?UUhdqrNu7p8_u9PSXhkO>)=o;(!E-Nb&yrDEZL^zgc zBDfBcnyZf|AwHLqaz!RPGqZHn2#*UO7s$a9#>8m%UO6>z$8*~I!FjbU1>itrgyJR|ARP*)W`qm#zJ_0!19z zMCtgTB&acHJYihFO*<)XKIn`6~ z>jCE>J6~>|DIa)BjniaUNy4ZfDB*bf665^ri4e0qKM_wv0r=HyCGqJ2n@(VO{*JGD z(;F~N2F+F}Cvou^{Tv^SX%{QbdJfAjq!J+-@2~?$I5fSHqOT7SOj~vYL!sP<7{Fe$ z8ol(={PCR-B3_tz_W9$B2Gn*kgB*YaZwHt36)5?@@OF?~1D!9=an^`qMYx}l3{x*d z61{=LuM2L<%85dXTPCSUV1aWJ=P-=z5lWT~Ad;vrt;W7QclYL4+tY*{l_i?IwxK|8 zg8{%Wvv;Z|7uBL&50~1J_4LG)MZ~Q|5;_9FhU^=2&Di>Jb?-@y3%*xjjVO)fa0kk# z0`dcDC_6Mee~!8XVeWXH78I)H0+kWoDmy4jBW^FQ25hC+;hw`#RQ4-LT{YuHr>GSVb z-lfC=h@F3T!Iz3;S*y`eResxLhsIR+%mUpyTu_r@XqzexQktFb9T1GExxfC}WEetC z-j_A$9sTAa}pyxwrLIjfYVfW|dLF@Y`AEvEh_i2g4{hcd)Y<6xNnL zJs{<;i~PIyHS-2G4tM9P>;~$j1Ss8}Io8`ANDX42#AV3Y93=eLp68-DR!0@95@4l_ zaFV9l*D=N;$IYXu&1-|wK0gn*q37N4hU!cC&p5aL^DX;7T>1Z#P5%EpS@@L9;j#O^ zhu`k~wI8;y<{Z|_?(iQ&Sv{?voQUO zU!4gEmerM*tTuev(G#iD8$4>@&iJHB#BU7^ULcp$V4sx0<~79&DnsT45^QO~68j)? z%dB>Vo@}`s4V2QF%>VRT=a(JW8u`g;1@FVnds@hO&gqlLEY8j_Hul$kQEob$@6XX- zt|Dm&?Hi|vVn%}GaES~P-GL&6LaCUCq}zF`)&opcDL*(Q1nfVTJ^FrSKNDABkzd$Uxv7Nho3h47 zU#POt&(8Mvu@882GK6~KYmf~Imj}{1|IaHjvwS(@8TAc`Zd>~c8+I~;e*w2F8C?*d4-9y5@+P4JJ%xtWBF>l8zG4=pJs4Cf0o1=s7s zHr~zjkn*w7<|QmFd36)y`l5}<6+?Xpv|e;J+5ntNYz~zv`!D5AZ`1ObA>~yUGTtjX zW0&murmj_uZei_ok8Wsdj{(dcbQ)mFx{pCPp+Rm`)x{#2~ zZ8$^IkQ~I%aDA&|PTAq3tg^qf3rb25WzNuZ;h4AsuC_SSw??*zExvlXEM=}rIe%asT2A!Wr3q;_| ze7tTg<67>b{T-ajW(BjtmY2~pw$IKDFw_>Fz$K083o$xW0W6SV9`lC?(YNu(XLN)V zt9cI#0~)iI2;4;7@xYd30>bh-j7T{{8Zd!jI6K?l+NB$b1m$7a-8XVA;M8ESYCa=B z*SgrZvITthIJEOmn?-B>d+s>xIvogzk%~^?8K>XM_zXuM#YmoyNNn3427G44C;8R3 z-JlKKgE87b07sS|5W`c3p5_wK5#rBUvRzke+&B?IHMKN8J@v|ughF<3xZ7GmZ6BRq z!WTN!lo)11NV=h|#Ldm@_`R{^;)eAJUub#w!=~KWaQ`6YMFhOs$OU@o4bR}wNMQuF zEh*)><~t*|q|wY#6z2RGOrEW8HAe|_2liIzab1dJ`^}I9*FzjJWWCiT_y=Bd^56wA zK0|&MK5p0y=oa#K1P7Z)OtG;!zqdC(p&|m)BV91(l!=nQ`J{?u_3@SP$j9yK4M;Hb zVOCez3)Fh9^7HT~ZnjB!#_<5&ID3FnnS-+rR{R((YdhE<{G)SOKZrMsoA-*0O*!uA z7eqve42kt=!;p2}(w` zWPB3skN05SwWDyJd~P>x2+NHk$(+U*L^st?Zr&Xo34c8el&f`Ql%BZs<lp4RM zGN;YUOZnenUK}VHBO%1t5nKwGUu5#WG#wab+u~mu_+^fa^Aprzh%$jfb$SwuMI9r$ z`+|QCFYXewY|PbgV00gZ3aMkc9Z{Sw;m9~KTEo2<{6x(zc0>yN{lCU$H+ryG4O|&> z(O??;)wPWy^Ec7yvL_6(!>}94rbkT#25bM|EL}hwLzml77o}`m-+qtz8Td4EjXJTo zN%uJJjdA0#FTiD=Oy7sP~6$&r;? zv~74F3(8iOw!h*|I;Maz%#a_v4|;PHMv#af%d3vFYQj9IAXy}vzRTvSjtxmBK^J*T zg42Sfn{J(H%VpBd*Dq{)P0Ry@M}GL1A0vi)`{5~KQ?(VVR)r9JG>FW_tF>x?pGF(M z5#fr#3>jA|aLXFr4+pgCM;gz+wUBH>nYcPPrGQ)3;z$o#Cc{LAzMQp@COBZ6dk<=U z{VfHJkLpcJ`YZEmuoH}hErZ!#=}m-%*#nJ+f3ld6&FSL)wIz$#$klcCtid1x1H*Cp zFC&25>O1Kg5NF;UX|2<7mV$`SXabC#?%XGAbV;)Fr_?1O&Fz>+zfm&0L)cgZy=K z?M`lCO)PUa-2DL;2B$M2`7RzN#}*167$kHGyh=ZSB9CLX{>|@cKSFLk+#sPG-ACT9 zzO&{ZhV$dJ+0V9w;I5X_xU0jNAmAol6Y1YUT5rB(@cXG7>|PSbcZcLh++#LZvbiK7 zdn;RUuU3aU9lX)a?^&qR@x7`|lHPWh+ht%49g)K2ljW>E}=J?dKCX(yE z$yYmNHa=!H5lkQi1ebw z;Y*>dzVWyT=kuk?^3dtJy7a%1K%TJ*rsR4B$z|5(@#p=o0EO>)|6^*xP#?*Zy9KsD zMsr3zSb%^+i2k$Scl=2G93fjPZK*S>28kW^e4-RfwBCk;3OtG&e2-l^2*(Q?ash%XO}Kf7=GVaMM1w2mT7+#e8zYS4FdJetx+b zbZ4Eu+zaBxfXob1atGh}Hrm%~Up~q8hj#Hjy;LT5)ynZj5q1Ty5i)&oBUCaVWey&y z2`*K-7WUrEInG}@mfz|TkL3FJHd|8!e9H6kqW7XCNK6Y~Kf>QC)7q}W&dJL9+cHA>2s1J}Qeal8lrDu7Q zKVbnAqtrg~U&)%ShiT)z5jJ~e8YmhF-RV>>38;+@_)zoeh5O9yOCgKOolT3Eyy9gzoA!K208K} zJ7FS*$iA0uwk0TD1r)gDLYfM!X;t?w!J!JgNMi9OD_WXR9q9R!ACmL+Ls6XVMX}kW z$^8U(#4t`Xlk`pV&&=SK*===UVvFF6o4y*?>LEBCCx3n>VzH#+x}qnKq_b&PPY&uR zAV|UOFD%C#q6IHgAH%{dL4nP}-1IhP7%FKgKEN8hZ`&yx-HPKTUUq28LIhin-_pV7 z3FPke8O2ABX>`NS8CfRw_X1iqO#3L3r{A!}>7F+Nf)pocDQ~d)a{%-MkQ7!=Mk#`( zmCJ=K%uL_8T6b%?@KN%&PfTdE5Xp9`;^-!Qr;_HIlV+YFQvx$B3Vi({yC(Bjhsqu% zto8P7d09)B;oM%$h*CPN5?XSLcgjX>YI=5gD`w=g{mth#d{#SbC(liq@h6C@ zwijnQM=v#bi!bnYr7KwUeVE4`>`~iU+>19ajDCuwOdk*_o0RY?w37@R72#MUsOZ)z zc-nM}mr`~^!{JTPhxYzbt?Rh9we(+x%t5RZAm)Jtn~{n5Ib-s#nuz?5v!IidcM3Vu zsOi^<2qJ;6T55qr3Dp6?^9tQ0nf!s7E6wzgmA1cof{`>V;;ZevU)XfAm)Yzr6iKNM z#U-9duNB;7mgH=?<6Wq>NYNF!fs|S>)K-g@7>m)rB)1w$P9nU3|l4svx{Fi{1HF5_w2$+ zYzIP+`V-8Sz<)%$4H1kjhZTSc7SIdcJ4PJo0*~{5qL!B;aRBbDe7+ z4(d&P8&6$$!k+-jFps}f6W!h;YYo++;Xb7y4^?q$jJi;_rpWIQNbNtT>n?p-FXGrZ z@R7-G6tWgPEVU*^ty;&FKcM1I=Cr{myLuA?svb{xMSUjqi3%p?6-*xI1ek-mS_Ouc z7e${Lmp7<|pREw&K+YvfKj8+$iguYM?7khS{MrSrGoUgA85$#wqUV+wsRWlFza}IMg%G7_X4<5=AUKRJ} z40eFj0=HgtS)r~KpHsNu$Y7EZKmap7 zA~=6zjKw86NZ9pH`36}@QeZ~yv03OKplfV~IprOqX6X%4Nq(ZHX)D!Mpkvf39iyps zlp8XoB6|SPPyD}XiiOdOb&k7YcnU7NWzaShw^~E@KeeshL{D(^lE7Gs!kn(ojUWXV zFxVH=UPMHxA~xTehkvj$wv zH1O*>AvOsZQ^GDni&!hXDJs`943jM4L)-A(98nr*sxA3j4jyNF*Wd2oWGRy*?D0l}xVKR(`b}!|@yZGZS3(K7kSEx{y9bRQIw8K( zw%xse5-NNYX6#65Wry|$+SF&KQC}yN#1}T6_8jGBJR>DkIoFA?>{f({Zl%bqRRaf; zhG_|Hz>y~f$1uqtHS-t|`jMSmfrkx16c`3HvVJVUFmA`&gifLkb9g!iMWX$A^(k}w zQn*SkAR@>6MD$&eL?=yX9xlY09l=*N?F<~^DouaBi8YI|GGVEnAke@GZu|-LOnaE4 zVTAE!cWB5#FdH)N;IXRyc;Uga!wg+Q_+7={7L^3d&~@S(A_SsjAaym2QWiFADwB*H zEs$&o$n<2*9go1OUft#>Tlpev#2t8+RisniGPcwg)Efc-+UP5spR`Xt?riuODgS4w zOc%KC%cDZB;xjb7+Tb|pLy;t1?nk{lrZ-WMAcsxS`*i2RPCu|eENLj)hcPn_sQj~= zo6Oqk9SUsSHC#C4q40$UiToq~UlCGIQ3QP73IBhkNc}$`@j!kEvL=WPF>nhh{D1fK zrz3q$gZ76qemNg?cf~8zyvOH6qE4(i*~6@2qx{Geu4>CP~SA=R5f*OUeT6 zKuj9&Sg6-iw|}LDEgE%)ecN=1nK z6R}+C@#cMkUZQYz-Fa@Qa?AT>U%C#f&AI#-Nl9lXs?C|`HRMwI=idHvFY?p@|1r1B`V7pq(6sKaQ_ zg`_+2Gy^y6%}br;C)oq8!I{urifZd=Z_WXP#>Ym`t4j3rCtcEh6D*ej3botKI^p4;qeEIY329r`bRAeknn6EW8C zdJG+R$oRm1yz0`t%5RD8JwS%FNhZp!RZ0C+KiKUizYlznivOUIIIfLK{Zw#}K!|J_PV=)+~0=@JilEyco$_u~gdAcO-A zpX}x~Gb-3>=U*YZ^@pBg<0_+_5BFrOoCejIHXei(y4-dv8OCo#c>asb>53NwraO9a zN0N%}ULa@UT$Gi&NA7@|z=Rt@(K)HuQ|Kq?Z$vW-OTtFgy?uY0dcG(BO8%n5pXGVN z=&K~2)M+TG5Sp3Q$cUfSs;nkTAL3iW>f*{EDsYzDRDZ+8aCoa6ebM1xhN3BvQkM{E zaQ3i(;h@_;7d;ZfpQjhF>24$@BHSE(Y3(Me7hl+gw%8PAtT?k*@!^kDdK>A%(q)PB zqKwj5lXoxWS&O@fG|pR>4Oe^G)C>JTXA1yg&!Euc3x59z>+`6EwI1#SRAkQmiqWU1RAch5hI!l)Y$K6j}wd;g2BY>8!k?#~`o`l&jNcW{dw_f z_KS7YFeR_tP=+dtM{)(9rNq6nf}ADbQF59jBvKfzLA4|_;rruOD8KBe%=ZiN;{ll> z&KoLX=+36kVIT3%(OG)ZPpjs{%4vVFgMSTb_KxO-rXfqVWlxsX;TW$i#$;bUlHorz zizlT;jsMBr?%~zswEZ?&nQ?V^ZcxaFFvv&`hqg>%V&ChzYl@4BwVkdLlf3QIy~b-x zX;v3I#7>-pa+X37DrX)bO$6HB02UwOV)DAt*sd$7P$3{>y@E`61AS`6PP!UYg0L@NO3U7 zMiFIn-RTA3ewdX#fl?9-wirHV@()_m^-~ErUu_48cl>t@*%*93c%2s-L!HptX+An_ zPT&$cr_wqCuFQ#BghUsoktKjPG<0AJO(T@PxUDBtoBfByY%_*njyj`l4DTF1jB#eN zgy0t--Y89)$R2U(5SC(`8voUa`LgiuNr_E81Jc2CFP}~4*)#Hzs}2c`?aOdfmhY}j zyl7f9gqM)M<7bQV_@4%Z__!B;$a#fr&DGRo#)SN~lxI=6=jJ?0a+IsT*TYcN%0~?y zLTISbTAjh$H#*5w|GX<`Zya1=>R1A|apAJI4X)+Ck{!CD&54QAvI+O!<=wXwLzKQb zhBTiFSTYB!Y%#6!o@j)uSU7Lqr%#9eYb}=}Q@Wc+esS9v(~kNP9a}KkeM0kHIlsdd zuABJy;uWEeoMXNy1WLJ+v6BDIxGh;z?EfOfn=KLuT;NOM%2F4&ALH$v$zSQ$KnrcO zuwhDbtq-N_O3ClC@2qI$a22V{LF*U;SS?#l&3NZpQtb>*gbq!QJR)& zpiy>wv8KZGNZF1W!FKZ4x+cm}e^WRBXI_++8}D;F?K#n=u&bDz|H&?oQH(e~c)lKs zJ)LDk>S;0MtAtlw-BhA7bT|~TpExn*#&7U`jj!>nOI9+J@pij~=ItZABG(ox!f>!9 zL1xPnRNTW)Sc3~RDcn6QF=3;HCwEMOW)r>93L#MQ4AQTWu45Zwgx>MGuHLcQ%p~y` zN#~+W#Qf|I9~O#yD}yXPewF7cr;U_0U$wEfk{0JIw{tjHR6CSsKo!X!1oO@`|5tZ{ zH0jPB7y;rdzcKL1?qxxp-KM9ao?rLgW+47nw0(DYDX))Q#;>!{?nOI`B3ENHJnkYg zwHb9Yq!(>z>kWUs@S{{l=g-lre4;ZahBeMLM|-57-n^3M3AS1Fs&En)%l;$UkbwEQ zv{Q$pPGm&|6wnIayT`k}!#BFfToo5>U0OqgSX9_1c>>DZ0lNDVDK zG7#bzd|1HJ@_a08YlEw7@}kUxSV_f9u23=Vj=8&6O0~%M#r1&@_%+v{jk&s>>k4YS zQMD$ZQ_Gc!(Us^B>xj*r@S+DkS)L#RuuZ6NIsD~{k;^dPhn`^Xs_s-QYr>LI@qO!N`M(RzSSc=wv z0SG8lhuk{!q*Kl=r3cH}U21@mK_fp65=M`*sH{{s+M8HdMN{hZPv%V&RcMCROHvsZ zjhXUnV-2VCT~{|@Xo^Q;7Izh~Zn`20Tg)jGgVq(DzlyzNX+$xUCF-t?A5k&YI}V9( zZ7RPp0dIxejTbX=GM7~J}{~)$oGud z2;QBOkzAJ_zEwrDVrX#``7t{jLl(EbB!3!LloMKrh3l)sGu|9JW#pm2d#Q$Af#0(!TH3pAtwQ= zuv$bp=@ZSzIeIj)r-+_w{@6(B{j{UQLdpGXuSvr%+3(`a;^dWlQ&;lUK4#j5{7v{> zrz54gi3qhVAv3VGaW~mb#X^>}x%!~JNAP=}hXV;UBHxCj`wF2Au)2G+;BY#=J$5w0 z?%%pOMuQ(^x?JuT!hRaJhqRq^+ht zT(`C~mFik~uc?q`zuLiB@>-kCEt*AxE1YWnZ9oW1T6M7&x{BUu_yIjOCc7J27jq1? zmoC-~_6%j`&ql8>+a6Zkw$v2@#sMX#+Y^NQa33a>U+;!j%-QJxC0_9GzQgb1; zo9|{3^WlXTXNNNfoRLcwS6U^qCV$0^5A4B94UPt5#o$N)9s$5zah zuMv_Hy?p2?&F`su`spO3rBB%XsNcaH<8F5ET#DU8c8&9I(?p5&wi5Gl#|st*y=@|?Vor+o)KWZ`{7 zu2l#0!Lsm-HTkqZIVdI$MpVr$)|{AnY-Bb2+{iZ&+e}aQYMy`rjV!Klk#)x2CG%tA zgUt!r&z9k#lU6UL%}v~V7pHjJFBO>=D@>OF>+Qjv!{7!& zHh7t4ibq=0rcy?yXxLmPj0NA_uW^edtMga7JjFv!OHKy>j;auKf3>Z)hZggfRPr9h zR25U8mQtitcG9<~=2NJt)w(ac_&T%bx5*x~WHiL()V0)P2l6Z>EfKiW<=uBpjKfmH z{~w^ZQnJ*PldZ8jS*eJQQi1*$^HuWKJcfZ@(UD4-%H?%!_3MZfp&0N6U22jE2fWGl69-WIpayWbpP7= z=`W@xKN45bk}dIGTFUkfhd5R%D&{}ioHz8}vYH}{k6e86kdEs+eK6kq2a0F)rUHBI z_nN}LKyj`*sS59D8yZ}>yHJDdx(m~fSo3lN`9<=+`NZUtVxE2n-zLW;?{`rbSMoeO zPPL-E+PCJU&r8)0IpZ1tH}iZN>zSHwO{LN(3ntE5uEZvBVchz#Zq3O`yMk()J)f`_ zs&@8&A=%)2e*d$|^FIY?{|oy$G*S5q#!{{>-yHe{XXlmb@s{_I%RbJDH9`LuzTN-X zo~F?UexG*9qha?vCgd17Zb8F}RP!I{P0lNVk_4SC$EA1m34WzqJUu#|efX7B4kjro LCsHe95ct0Vp|Nku diff --git a/.github/readme-images/pool.png b/.github/readme-images/pool.png deleted file mode 100644 index 8e373c04417ce95ffac2278690a9faaaa3c8f546..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 31041 zcmbrlb8sh5`0pEM<7{l(w)Kf^+jcg#ZQHiBu{XADTQ}cx>->4|uTGt=nwsk8>7ME8 zX-s#&U*Yny061tYXdoaUI0yaZGK6+>0ktOWvK_K7xzN3-% zFQvB*Nv(D&baJY47p?_~w@(LImvPSIy49|$R;^a8<42!fz1rjRN3R~dW0zaLYd2cI zbU~jXJC5bU{S#*{P!3d|lpCRVB{sK3D z4NX-;*Zk=ofhgSNX*5L$5WD{Ic)j?o(H^=0{a8d4ek>~}=)yL$ups^d@*#+#{sIJu z1r&(Cz=Tl1e@tNoKk;7*C_k?wjQEKY{Fmid5Ct?Ci2wgj;(+5QE?;HA6q$AXPe=Bm zY97W`hn>BGt%67JpJJ0o%%Oev)f{usU~#3CpdJ-{L#en3%+kOmySKFo5WFypC>U1u zwTy_N*H$XiIw5BY>T zCBw9~65U!=bm__zh5(I+?>2*{v>nTdS4O&9X+1y|8B^b%&lA=h$St~XUp1QzbAIy=QETY zM)bTx4E>9aXv}bF!)wbJ&p(3b$rQ4ZORUa1RB4LZ;Sh(qe|&QfCvYUYZ9#g7Ktkh% z&q!{>`IF6aS$0^ z+$Z0jjHz?gM$1tF)k~;h;gUv+y?Tt6(1;z>Upd?g)`{M^f@+QR6jWsAhragJYVxOy zuZLi|>FgmJsaaBhC~$WAyml0|=qS)%mxlqc6r&QLIB9RSt6(lk3ig~iN~z7GYcGs0 zuQWVBAbduPo_rKjSN2y69#?6cd%TXo=OK$trne$wr%Ozz(*r-(p}*hCxq%Y?AkpA-{W`Oex_V?;7?ppMBecd*S=%yO!+7J+h;uz7c=wF}1cPae zHTBlOM*d+UF|}l3zRIHmiCEZ>klLT-Rr}rj)41}6v1!pnhnl8(^xj_o)bg6W{`pA-2nP>8|n1wlg!^~REn4rS-j z?cA1dyOxD(p_i?TL%l=^-*=t-qsC~dE?4;6yRw?#bH)q{O$1XceC1Ps3nv$U!V!O; z9yk&93a`ica;KSxFUo(>hw%(;%X7y5rL#fZ{Hd+8`KL69W%>(6=450{tcr8=gVB{D zMtV5VmYx{vA!o|vY34&@p7*I z;bStlS1{9~Izqd-2q%_|64Fr#9=^OQu(vS3adh!l|Xn2$=U=-M{{i2 zaRGPXzE9RvYd-`^;fzhgk+~qh7<~yS#wUpOfttHqTxehp!P4=O`;aPGll_ss(Jv)= zy~ku-@T~<;{Jf?R>XIwtUzRb^Q8d9t*)LiD9eUD;kiE1`!4Mlm99z8*qOoo%U1oAR z_&>D|IQQFzV_%2ci(}D~iB7+&?bl&BHKBphi>p}1*5*Jc{D|byPlh#@bo>$p&@qwx zAXzID4gZ;S?Na#{DDOR$_*d6>N&F?_0q}9%Woa?y*TZ7w(NUGnxyI0$h{qN;UGZh~ z33i{ZF9zquX$0ZwkQ68pyi|r4ND;qqanEe~A zI0CzC}tO|K^l!P`*saeB0)ab3vfFz9IlZ%I&|wS5Rg*5D6vE% z*gx$5E%F3;wp!pEcPdux*{lC0;dgiFn_DKf%NEx6WHE`)1IP`frKg7_qZmpN@I@zx zbb@sKt7}u-6043Wn8%iJ$zN*!y@;HFDLN{I-a`M=&JK24c+jC(B6PFl>y5PHXe_5A1SQ!kqYr>E`OLt*yxFzJsn@~jxm$; zibP$gda~+Tu6Q%#ne)VD=APt!g;%US)sNbr@MXy7CZGnfJcVLolj0e_-!eMG+ZR1r z`HY4cswj7b>HF_cDp5NP*db+oc&wyvL?s&{Xd7=u?;5?A`$ zO%kj|;ASJdAh7u>_pgw6@5HxGpzo(K#0i_8tt2VQ+QMPU!k*oBSVa<84C%0w?qH(8 z&7N%TrXJOBH)qkI&DHTG={osUhAcF43!%hlo!8K1e)X}T)S&fdb4`(Ag*>QPRFb~RZlk#J_B z7da+^t2l|C{3L^yiN>lTF5EgFnQCj)Hs+Xc{bTQPX57<3$5pk%i6*-JY&O?;!=c7n z9hZU4nLnb;cLKw0)-&KwQ+N9wS3F;u!0Q7Z+w;g73_2jZs!ix36#g#;+DK}j_b}LY zL9Z;icErKh4nl1Pm}Bm+xn`rvp8~5iHl;5nm__~kZ zNwuV}U06$7i`u!Cnu49xCejMh3}vn%zOJFT>mnoN4p8b|O|5H`wuN>)2TFxRr%i-R z8L{+~Ml86NPb;}TK3= zg!kTeml9g_D#u4Q>m+Qqi+TEk6+EuWA`N9WzGu$D!R$CD-I1=h z_we|~Im1%!u614=i`MKQA@)>^3ccZc_Ak_z-31QYe7pklyMvQ6N7t#0k`(8YaHx?J zVV0%_M^FTYrTk*~{dBlAQB@5c= zkio&6ojLRlUruXA&Te8{vo=_fFxt&yosV@6q`J`G(nJkks66z0Hb|x#VU}|}ycoOL z*e-Tvp;ptSec=|c=zi8WvT7SUBvunf1+mJE(Ie4Li;i-9cC)ion2pj53jUw}iR1%v z+=jj&8)^xP_KO|K682eI3F&2e31?6eO#ZvA0&^C>>ngjGdH6o%_lt6QcBt_$1DbQ{ zd|e|?l5p&FN5mpw>1Gw!?of`zE+%#Up{;?@Vi*iwhOB?tA+Kl!Q;Tu5ZxuVWv=JS$ zMDYQ;lAR0EhK`$~D;b8#+T5dBV(gk!dt&n)XU;qg8jX$h6>zpQV(T%uKIXkbT5o{S2*6p8GCa%FaR0`qncnk*ePf@@PE$~TilTe3?F0I>uWe^L!+ z#Y%e8z_P3kwK^(bxLPfk0{ZK$z&$0?Z8+}%7#0GhXX?PXD98H>E1$sxVIEynO&s@Y z^euZ(T1ppf2Mmc>SR>9*SF2UT{^foQr$uG&d_I;hF?GM=^QV(~VIA@hFUKpa=eWLO zaMDx92RX%>#qKT_b;WS5g$DcWPG|F~(GNPRJgD^J4=Q!bDOA`m(f_kr!K^G8sx%tE zVzpuo$&p0u{m{B6SH1^1T~l?wovpoFe-g4N<##5F<|!ucfxxEe{_Nn(5UnK8woxGw z6`mADh1(2CU6NU=C8l+xbNI!{)*_$^9qLwNtkVUNNv4u!+VCN1{a-~b#+|jy7_B>n zWIk)nR_8W3<2LkGCS@M)Q61zR9>JlCi<+T+fCD13w{kT8#O9eEBeAurPk+eRD46(p zJ5fTNm()usEoKnKA&;OFQggnd0BZDXo#Ix^+tc%NmvYueQ$ZE=NyN>QyPj8DOOdM(qHx_sCQEoV}Fw^?LTPt63`#8#;r7d&DQ zq*C*6-rYhoNweyZUM4DJmu%at<;?I$nyv!$?iMRCdFon9r-SWHd6+7$Mh0WOM|I14 zHvt6h!wWCCw2Q;4;Z{m*9(0GC>gWjt=9J{e5#ysWLyk$+#l|GbIM~w-GBU3Q4~E3g zDk<>M4z^Xhd&yaVx#T+>Mdes#sv3147wa1572Ay#ETi!r^45`(V&+F%J(1$yxQaLI z*pyhL)WJbj$Hg63%4*V&T{wyfcuE2+RCKzGIXQPLxEso^$*bC+>z2}RYo!@a#(M1( zY>Hs?aTH%B?S1*UW;t`Uvi~iehkAL;IP3J90t9Xf2=Q>q{?8ZOxr9?2x90aa>&wHD zd`1D7vxl`yb@1+8Y;kc8z`o|?5z~BN!BJ#4ob|`)O{Yl)?jJ)&tPMn#TM>t|-e^q5XuR+xcX?xQdqNn!xA7%!4BJM{uD*Q2q9kV>bGxGD z8~vI8!d$<$W1_w^0uT)@|}ptuKn*ZS+JtpIBftyv)xSsrwzUr6({N zhqbitfi9{dNky5eB+;HF3M>$hwP>QVhTD9>Sy_V61Nw1uxmWg$Gh-J%R+JTSd=tqb znS(YNHVq&5C*ac)!SO|z3gs53WyttX$64;w<({Z`DtaHg2Gqs2XPgmb-U(0CKe^5C zo!dZ!dp?96(UM_=pcMw%Fu3Avdti!dQe;MVA}!dEF-gVM+&%G8Anv`M6)|mQH<0)D z-Puu)g)tGI-sm=Lh~q5>LSsMf7S3Cdst6(p{wdsBRUrBa0`^0ocx<^iFbXb6#FOKg zzaWnEfO-=ruhT$f)a+13qCW8R0G2&hK)&LNA(TE@VQeotrKcRCn?80r6RR6+$$W<+ zw0!p3SVfufXVVa>hAfE03&T$?sw5R}?HfE~zg|TMaErl9UEzaT)CY>rsVHmk!ZO%-A2+OJ^5rEwLlNTF1+zh`W8Xlq(n8;#m-o)p#Cj_$iXjgDeL_OK zo!31vl;2Ctv`}=xtFW5}-+9$v1sz62ayDCDJ4Y0G&p%X1H3rBY|0=XZ{v?4L8Z`jp zl90#U9;^VhFiCg86Dy1VQl3}$pcAR*8aU|d80#|G74r0;YSAM>et5L>!p+;sL5uw3 z^Kg1O6ro05;N=VK-=ZPiST`gKvIAfzKY8tma>*p$C!AN52SWSuac6lZx#=@p@;(eK z5SJ2+2ZQnDfBWMj(F(PGlY`!W2LzybaD6zv7^-zdUmFEVM=*fVS@GqI`*=o2JV}#0 z*g~GPW0O{k!GUhl-`&n+|5KHJ%<|I@m~fjk%x!GqPXXDu7XcUzJeToYQdk|xEp;?u zs1Tq>aZ}hB=Tx)HIzqWVd`-UmVXw4mjoumDOw}nMBoDW4fLY40SD;b%*G)M0QRl*9 zK3?seW?!V&pq|w`D=cL0&VN3Wghbh{4TqUb4)-iIjzn4za)dN} zDcpo;E38y@avP(I*13DK?XUh{m>I+=*G2`gv}+GEB(6RW-iyf=plG}A;$F(I&XB{d z4*s4{IGNe7ojT^578w46=Vx|1jS<0A(rf#L_hx@UET(i6wjz(us4(%~18)ETj!-gjwz!@wu;9;9#I$)XU!&IKRiA21!Oj6Z3JFT(&sRiEd}=g_`dutlQ{hm ziMo@U^+5uq3GCZ1ySs4#y-PQ1;4}#K1hkf9{+`j3KypJyo2$6j&uYv>mV1TXYiv2F z{6mew{?b|)eoU+pp8%!kC**BG@DO5TZD^!0?MLw6gUIc+zlFIx_D(mWQA-yH-1VWDz~HZjJ_!pfjW^Ew~LVTq2&`Uh=M*qQD?4&205hQAXb&d_}uf5Sn!@&QlUlyKV;Dmh&g z3UJZI0;0i!8)IkBG&%mvj%?RH)OPnzTuD^Ag}K0y(xF$lFf^Def%UvbyL`zD#X{!HSe)4L4c{Sk`xu5H+6bf~ zP1mfvPg`!l+zzh5y49G16Ac`?3wV8k;N1YO^y#z%yr(M?+e<)GroT^&V4yNF=1S>7 zzp|r(jqJ8?&1m?j{*huJ04UnrvsBnqklZlqT>91c8_$_-Fz_5{B{Bj3>Jv-pavriL zi+D+W936+Q{;*)H+U(7B0eNBfzeW?EtdfU?8AFZd7-3iuJykp`C(ca*TEC7-0;`M@ zY?XOxVsEh_p-$)EzUlWACbWOy@WKv3_W#%IbKj7@WpiG9o1zE8A+GJMyEE#508OP! zh)mZ(vwO|0X!T!7SO-^8m33mesAw+Nb=QRgAME5xeqb63JO%@gJ@Fj>I!L%1L8iTV z17bee9lr2ZJRlJ7Nb2uWk%m0SM7p?BUf(PL5y3D@Zumq`$}~#c=e>fHk|l6sI9j=| z?22PFPE<;@C1gH}0V%esWtQ z{&RS?hJs!POpd8Py>TbS%p>4)&SVp5dGSXdT*P}%o>LLG(g1w8JHkok#K)BRMMsM2 z`K`e=xnc>d-9X$TjGOS7G7rCESdxIFI~qRJVe9eRj36PDIETy ze8BuXn#e{nIBD>6$@f_)#xuSh|kkA%nl{PkG`e)tJnMovO zUQQIoC)#i;=H8?*Gz#pd*6Vgt$gpEdc_g*r>=Z1ImReQgf(jW`(IQxQO?g#94*M4JiF%b&GHA_+H%b z+0J0`uf}9{cN!wQmVnGsav;ej5DYr~pEZ;CgnZGT4Ne07P)Wx~Kr%STYaJCyT08bn z*;Tjjp<98tGKv!lQfBWK1;JDwLlz8toPXMWP_+9WH|kF<7gd-??c1+D|F&}t{iA^E zaeX0EJADz0{NxoQYj0M*WDuO2lWn|-B;^GrQr>OYzIBm{NB5<#1WG8aHorqs*JI;k ze9Yt~i(`GE&=*G6R{^e3fn;ZSJ@lU*^NvX<{P8pEgI1O#ztTOQpkGKW7ToCdb;QG~ zL;=3W&I-i_>vBMGg#t8s1|ScSt1_uRK7YOGgYx3~Pe6$*u&91H5{nAQg!vqJ_`I{JZevCR$lbj7Hv?+9(u*Dvs@Q&a^zfb! zd+hR>LTm{Lo(Znqla5Ry3=~$v2om;F%oUEZP>@{}7K^ax3B<`U81VVAMH}hO7=ZH0 zos!L)PwQSqH%u|Kgjy+lW`t%-K9X=bwJw z=AaASw;+Pq(?<`ebiJ5X*?Ah5g>!e6Yrlo)C15Vkq86_+O#9YFx3whRo^WN^S3mkG7jT*pJvAIMN4=k07@SMu`>IjQ705=LeqL}6 zGDg)GlVdcULUJ*xes?6+#qI%n>va5Kd;+BxkQ&!jqo8Vk0=xfq@r%iY++p-WBlgKD z(=ufeJb2ID={{!mdVKSE@uKp^#l6L4&InilOIWCh_3q8rtcC!f2cXgYYkh=LrPYpr zwnU+x2{8MPn1?1Ni(SSU`#fb_248cZ1>`SoL(4NoIFG26AJv7g6QX2q#%D+Pb~#pz z-b+|)ZbF_XnFp?u!^Xbmx8?TjDp~L#P;kBf%QzUR8v(z*vY!-LZ@#(;-OLWed4IHM zEl0;;MUzWjH(K!d*_~GIk{MFJ;3fZJ!j*$}cTV~3c7{3VfKA`UdAsoW0vH^3qWbT2 z_Qs1C?-$|!u$I&XQ%HI$F7vlvfbqX+-r?MV@}1q2_`*&cvu`M_Yf0%qf1W+yw<=Rv zG%@|0h4s#i5>w@=svdefU}xcZ3!GKL??RJa%JydAf@p##p4yInHx*DobM-t{;6GI? zr+tQ;CYYa%_`9Z`;sg;&n=gK6rP=y*0MQF5aS1-ie7AVrMAzrc+6KTL#b|?=m%pFG z<2(had2inTHG0WIxwhkc{>CygxuK|d`#z7>*wOAcX*US7kIwgcXdx#)bTEEr0_a*d#&YldpLX4UT!Rs{jT& z=+zvJC4f||-NAQ|`#|RvU5&2G;ymI>XH3vc$ZF}Kk`B0 zo?!wbWKoCWj5TmFl-s;VTFuqr*`^5om)PrO2G-IQWYzMAUw#YfiaCJetehwrH?bC{-uS$-JdbvLwjgcw7QT2#>xV$^qyqS%`v zS1%43wX1)J3U*v>dUgS#ceM75wQFRI-fUtFtJ4}QpLqyebcC;j5K?A(o9wks*h3+5 z!YS`(eC&9WIvPZ-fJcO0dafvEUn0%teiQ!LBGjR1Z%WIHv4Mo5r^_~97yP*dyOAi- zl~2q5YrGEjNR+YaxQ%eisr;qO^@R$nM|D9B>vJguBfW?sF&sO#;LhsCy7gX!8&WwR zs7#}J2p0}vg>!b;GyRK>;~+`ze?f*?a)F6j3L7hprk83;dTK*M@>t&N2ooEB8=5Mu z$}&~&hMZu=YE$!kP-nUuT1k5i^F>>~NJ^cBwR+7DZP+YOS4h&QCCn8z2s4 zX`gcb;!gRRyEoFvG}xiI^4k{4)*JA|>|U+sh!+gc-}rKusU(bbo3Ol?V$HGGJX#|a zm|MTiMZ9~o*3m5M1Ija(gFes%8rj=84e^G6;7Ss|ruFY&fJey8?ewU;qlkbkQsb7a zg!lvkDK|WPIoaR4r#6?jh&MIptr2zvyYFCRO*uWxFEmdadm_E%Az-d`apnGv9MS5H$j z_`HE_mh9CuJ|EV+)1GZl7OwPQujWj}7o7m~(Nkv+{z@uBPLfL*2Cp94XQ;SE95IGO zL53>WE>;(+Ut0;i=4_;HxT38jNe``rJWk0N!{-DH%)%^Gydz;1L~n0#sP zt#1FEkB9HPCmlK5zV(o?M532VJqw^N0uD+g#3ObDQHu|H7r<-+^=&z{2e?!sm)dDF zvOc>|zJlYs{Vdl=L}ukNXz|j8lAD)kNxnDzhl(hRN`v>^cU4)3O9R`tMWShlo@Rwb zt<2j9V>i@5+uxbeit&feH1QCa4Ij=ksoPbK`8htRwOhkJ-aNIjH~muCXe;OZ5pXi( z=}o$J|7yHa1_FGqEM@o7R9(AlU$n8Fv1FgDZ0-3`SM8SLByE#@Pfm%KKShsziE`I* z?u~!cG)%-B(M%iuGf`bUIi2~gr%xE`KcMQ(^O1zJMw&MRt=TN7>2e4@HN=NLaEsTT z_pRE-vH8-O#A+3_Qe~SO?12lJz)^4A<syf!yXD-D+?ut}kk3LtyfmI$DQsXQK(crPoA zmshTpMsx}cE#&1*6(hA;l_npkgaV&7YP;(PuJN+>v$CmbJNNdZ?l%xS*X`{RE4}5GU(Z1~k>9iJMi5?5T zL@-$6M|JgVoHPRmRWYXyybnVRhnm8S3=C zK?EuYrY=?pUc2{DyVbPt-Th#N2N&~LQXIZ+dPb&0yzHIhUpp*M&?~wb8EcW4#)tjQ zR`c%7u@-(WVdITW3HP#jhcxwgE;qDEp6KKT84=mKvnO^Yha=qL1j~m0Z1AarJvU;z zH@vx?rp0#DbA%Yv>0%Y=@{C91cHvdi+7q2V8PP!%m|2fCL*%W_-h@eR(d0moFee;Z z|5ZVz)g&-sk)&tu7jw9{|J+$O;h^S$bSG^KGsieEBY|7F~@4qpjgFFrF} zsBxdHo!x_M@dr3qhNfGRGXE&ER&QghaG8PQ+E1)t+mPdfr(IG{h=GU=v1fzivkPeQ{wKqQ}F6gB>LQ z^p077X9Unw$Q`pQR!XT}c4esB3f%r^yIW_XKw9`PZ2lvN>h8fhuM&c`!M-P;h8S}5 zoznr4fpj9)M>BQ1BI=Le@c_OP)>CZ#g$E|R+wd2fdj0jlBU$+cDdI2@&tt?jd2psM zeUnLXdpkr$mGCthOaA$UsLBz4rlgly?wGLAQ7@t`Mq_$^BXT*&aGi~3@*9;oI{p!{ zo%}#q-Hyo$TsxqE>h6HfRD-n*VR|x}NJE$Pd=`wx|D*Sg7Fy+Hr@UDY&P#__Q(>cx z6X7187;S2U!g9rLjWe`7`5UKAq3)qboG#zhRC?8h)3-&&m-;`gV*t93 z%>Sr!{GUaTrJ<{kJ~qd|=Fs>J=j<+zwHFLo8cN#*-&VY*kD3Tv)WzmCItNRirA90N z3RHdn!7#LJ5;qx4>h&SNICQkg$rmuzCE(U*$+n@JzLu4ZOo_*AAIWoTX zxvwx0`#Jj> zyF8ro>Yvro2i1Kgo(|^o7(r6+*I&C3J2P{<(J0?PxAMgVea6!A;kl3X1yo*My)_vU z>{DN7uZv(hTi*#Dn@-_eK84yHhC9k7F0^~Tr7Enk#g+@(!>U4GpMOi~B3SV$$20fl z=0dt6Q^kAIB8DmBH1ui>LSi4vF-&a@VqTd&F!DL)I*tzYu=VMNB#Ase$mE;;iW2&E zMV?L6+YyW`+-F~!z>8l}*Gm}23~o3-MJP0;-$8%e33I&5Z3@plP`0X=C^t==tAKD( zj{#}xo{m$~0%v*nPGkI0H%;(T2v&)ep!w5Fe1MjMOyHq~-_gD*e7`&SZ4dO+E86fX zPc^mm5w{9jTrywzTP>C;%f!kIN&c>WB-E>$CQVk3axZHH^49Va1%88WyBu`*K{(wh zBT;1#Ol3$?=7TDk$YU*9I=ziNvY6`RX*bo2v1Xqclag0 z)s#3BcxANbRu75Im?yhH%^r&qI#>6>MsxK0a(&>Grq1Hjzrhu!lpa_Sd|DQXX{I>N zWyX}h=J`|17SSFxDt)l z$cvjy4ytzB*y;h6<@&1$EelrGfrETe&x`q0-{wqM(R}wr0M}+4$mN08t4wq6Ex{n0 ze{naPv3Vbe?;nvD8<+qP2b{Z`-D)%&!p8Dj=ZbqWPtI)E^9{`iFsM2BX$m?sg8D@l zn0I(d>*-5pJ$qNO!(Y#4?x zrW=C!7CLc8L@On{9@iXFWLHN|m$wh+bDqo_m_`{JOWo|)`S3eI_}rT|rYt_wo>dTK z8f_B-;h-_K%gno~h=l{O6Y}HdI%^@hN9g+Zq8Xhf+DFG8sZfMVne;G6R$_^w7P^QF zO=J5-wZvs(LjFv7+qZgTdQXd;b(T^#0{Y)(BWJtthhdLl4e5lw!zI%?0!sSv_cJ(x zPAP}2GY04efLK(Q^^h<*9XPa7C|+!>I3VfF1u#do_RkD_E@&W6hLSiz|5d?~5t{?| zxMJNgHRL<+du>k&W@b+DPLy=5DNJiMG6PX&hd|7qw1oWe{&Y7{W5{>#ANE$X)F4t= z9~i^tO2%_zZ+J~(w(FrMG;-p2w^Jkw6Nj>I0wiMV_%v}ZfF4J2kd1&2#xXb}A**_Y zo!hzGI8PHCC33VG+wyc{QM}WcT1V7O=ZsemYPO2TB)JMhD!mBfC|;MOX$i&V`^WWw zRPKO3v@FMYaA$Hwd(M{LOAgnxkY7alq>YTkZ`#5Vx`qec(cS;~m_9BdtDg8i{=W%xqxyP9qalJ)y`#==)8nb{AmOpN*Q(BBNr?&}UPFVKPf@_eo%IZ^zsPK4}^XR5PNqWjV#p@neI zYr~s5I#GwWj^5XW#Q%d`vkrG7kpC8s3A23nI+>i+@<5oE>(i3rkVUB^l#bUBKW}RL z^2DrLW9Uw=t?Ze`#F})f)auy3f|U72lQ|-1^tKQ@Xv}#(4g|-aC6{l&&5uHHvMwuo z7ak^w&E?_AkZ2}Ld{n}Dw1iPzyZb2Vi4EQ9F(5#^_>Y(b7eZ7Bv1D3h#W3D zrmQ@0YUlEp@!o5l#SN)?mN>^bm@HCON%$#51a(}bj#|#)O&6b3)i4`*&fVxyc}mJw z`%=GwJ}K|U3B@mZ0+>%PV{-bZh=9&cIu}YhZ>@A1a&D>xh5c5Rs|-1DW=%R}a9_Z| zG)mxlL|(;zOPu>1Cu!hA^_|lmpwsGdndIS&m1q{iWU#M6a_Rlc-cr(=8SXT7=H@a$ zU#NGnuQk6*gDT9Vu>X2lx2kTCF@x^8P@{LmokgM{hMANk>GZoNsH6tvH6E1Y zxh^(61{(4HQ8(T;H)CXlH)EyGD;a)vj^>2Z+**^ZDBiNQ2Sa10eeW=0>NFk$!t=C(gV5$!)R*FG*2aZGPAK!){${p-#T%v zlpLFw8T7P}96sJU~Mit@q}KeY5QDK@Js@yZUZp{2Nw-)hibK zamVcvxz$RuD^<^2g$+{{5~hujU)6q9F7D)t%^1ZWcrw36FE>qLm{{G1C;0>?TLe<6 zoi!rROla2Ob#Xm<6@LgzwOwSOfL}1%;{xQKYtifD&$~Vp)thNZJ$^{hj(Dt@vAVN2 zp9w=aY!u2|_CVR_OEPoO7urdc zPOOyOWPE`*Qj~1RYbCWyer2!sH8zb#C7EUFVV`1a>-VzemF7DRH8`4wlLt&y8|1jL6pu(8B zeCrWfzgj9@?Z$j4@|Y9UpjJMSkLN27s$`fBZzvRT=*^<2XGize?_fE^{Wq7OBv{8H z__niP#kCsRX_cp19F}L&vTR=$AX81w%&Czo%u}lBQCZ1s`teH<+tZiKa z^2l3U!#k<+d8?!T5Xn)f=ed}j85`5#uRO4%&lkQ8gvlo3k&ye%M{^Xh@$6Q!N#};-B!{DUNwVgzAD)zmXlV`} z+qFD!3HC8^Wp$(iJP~uA&{w~+$MsQA#!gX-I9%_w8-j(OD)=Q?PYx-(ddbjbNmBmwj_ljPD#L zn_0{{A$8kKY4nl@T*J4Tj2Bcp&x44LUEpBqh<-~JC6j`;&O*WN1(Hb<`-62Xwn` z_J2Wp?#v($QF9^yl_^!Gn8W>?JqjsRqAkQoFY)G17R3C=hTa6kB3UixKP7|(Hm3|7 zIa1e#g(x!5Qi1sI1mh59q%ZC|e?T#E{l}L6SC0Sh+s$MdI0N%&?F-y-hZjz^Y)OfD zQ-lD4dwoz{%GEMW>Au`T1?!LBy`z*h<8SE0wvTXA%#YuIk-1X%QXusY5DE_k8Z8mx zFI{syJ}-gGlshh{Dj^k(5`Ke!Ej4TbD! zmhM%fQ=nIm_h*H<$xof6=1vEQ(oYH5MqCr)teZ58neSAa@Z)X8YrCtMVY?FY&B7QY6~2_23oIl zMRVNhJN^K>hAQe3cnPJ|GDa4J)mR7f%aozssfSILadNFkmOvfsYRlp%*lbQBrw?Tm zrsKfH#F79dwegb^gi&01E>5D#efh7ta%UhM^WB3mB#O4uU@oL2SvqI)>(QeA$S`C^ z;vXECWoRe0am8&2!fzeR3CPq%#z=L!2_P0JCnT9f?tf23Vta4g^R?aHX$0l0jNIBq zer?OlVYY&dQW0b35*uz!=%>Oz^>@9T*ZV)f_=f#Bds5(^|YdIQkHT9gyQigE#>yf7p{XlUvSPI>o5lAq{f-IO2)3hzWVp(?TOXk#uS|hh9Zq;dKL?;P4h zws>Vi&O-rqo^BW4ulWMJ?LKTs$=c_KtB~Vy_fO?Eq^loQG)Gt;-)5RG4-5<>J;=60 znQJ`dEb%XHc^?~<%CR}90|{2m-<}}-+g5Md@tQ7Y@*g{br{B$I3xab6sLwN51>q`$ zklR#=9d}B{$M}ogAe$5_SC$85ped$AaX*``x+pV6%haEGbJ}AA0q;EGz10&<3doxb z3;NJD6d57Qs*sQ~zlhwDynEi15a*Zp_fr@f2aI+>xK=uMX2*a>iB69L`vfRJ4(J|q z8h{Xr(YK~V2%vsQb~~x|Z1Vj2QXi{TPgfXnnH%c=b={7L9Z^y4y2+yYq+V7N-t4hhATK z77h$dtpGlMG5KC%FMc2^Hc4!Hui|nXzGSijaRNkQQCvXnM3@S1QK%JZg_T8ky3YYd zWx^LP*zcWXWE`|xV#JS2_r1+y%FZ)n>{NwAk9r48=~WfIrvC`~_ZSo@jNYEK7)ZO@ z?{)6yHmt_a+g^!I_lRtyW}`K_Lunz|3PG>TDD4bs3`|EUG2w$Y_NdNq2ToX1j8?%k z9Oq$-)&b{V!f(M6%;Uun+-pVEYlh@Lz85-|4V@8;W;xI2i{~c%sm**=-hAhcOUI3P zGt8HPT%(y#3pRP1;$xmKVGEg-JulBnbXzETn-oMucjyigKsl_jB$6wg{^{>UD?5?Ue4`Uj_L!i` zH$bsFJlyVpZOg_AkAJe6GyDQ=m`5R(5v@3bk5^`HkH%;SN3*{o?oB2mL~pOe@2H)v zL-QpXJ^@M6_u@bYpWFYZv3HKnBx<*PlXPr#Y}>ZcvF&thI~_Z3Y}WN3L zcrK!>>enIH%)SVr^q)c2a|u7M0}ahSh(d% zgEnTF+~ zkg#_77gi*b0G{wL3aS*lq_WiJ1w0E%fwHOi9@sb4Gy*AziSmFu6eAXEt>B!{EsH)k+gZ=wtA z;nIs(Um~-?8Y(~|2jn0`({Rx2Y`|yYl)H1(Ss}Q%in&pS8qt#O%nBqh`#fUdsi@zM z{UcipL@PXzH{Mo+P-&s1DU;mQ)3El_$L`;rutNTu)=m*uF2Fp=U*LMoZ%$`cr-!p+ zpEkJ-7lKy*M^Rx+!GxkQfAj%Xn=wJmLh2GvI%gy69N3eeeDU>TX=`Fat~vp^3~#lQ$8Y$j zET7Xa<4b)yf6FxkY~{lgS0R!l_+0uUuGH&Fr$g^z-^Cij>v=LayI=D>_^zn91qpH8 z3rHuh7fOxpN5<8n;EE#-EZCDaE*CA#57>5|ks&iql)MpdHgW_})q;9*MBd-IEIYD4j4{ySCw7rGiu z>#-cS{SH%!-IM;(x+aYj=9L^?Ie(-Rmk_q>LJ{J?dNTxABC03ou>nsLthXa*1{(4q? z5E>l-gO6)V&?X!RKh~zw5YmE4UN+kF;lr*rY@`VHz0mwv>K;AO%Ov%*{)%%34nn{8 zXS7}3{@{#b5!Jt7`=6sDc1UaNK#+{P&!t8Pp;K#ss~ZpJ`1vl&;e>yR0=~0%S4(Ak zGJ@V^)~`p&Mi4$5W$x#*s5AIALr&9`SmYWE{0+iW^lC=~)PTnNKh4eH;Jbj!nlYQg-vrz~ zpaybUK1k)Ta?G_RTaukiaSirc>y%szaiQ@bWJ6l#S3|tAUY9wxpiiTnBeyb$j7{D;N2$Q(=v%G2C8ts1)3aONT< zzu!{-178~_@z$1$)`ZL=2ZqZnIo?M>WXJLod?(gyI`9XT_74aazARO$dwdu@ENF9e zR-*y&-hes6&UheWHG-G=3I?4w6VR6zwUu%3miKZZ<>pdB8a@+iaP^4;trGmA<@29C z=`9;Y^Lsv?i29ZQ68| zpJVV%B@vvXm7`J`NbkqSP*3hpS}nUD%h_i2Rf&&M{3@X0QPY~Es$>#HstSke$+ARP z(!Y)}!O0|kbA*eNuAcYCUMm((Eb54b^T zdQvA)n~=%`XP_7W3%^36$YQaByv^8*egl<$+X&wpAD@7xWo!3uvH%HGT72#*(_eps zpnZU0+hr*%A7L@jUe&dgF#IGiM|oR|+N)p7#yF+0QqR$+#KcL$e=!vfhu zdNAC#My21mq~&~Y-`=nQ;eQl^0nXUb5{!!}pNdliHXVdtSK|~=+FlU1&o`Ri7RkYX zYv&%n$m=x%L8+}HL!)MVSsyj)NVP3eKS|0X($8Eyi9w$grvr9w;io=&CQ#947pA1o zrZHH55;M7?yX&S;%jX+XfTwF<9XSg+o_ngv@RBPkGft_KGEed&&fUvm3#Ze!P7=bJrh#(_pL_8}4%@Bho&}ad*@T zSQ>iQsUoTu03xZ_vbgK8G0J14;8pa*o;x^h6@Bd&QYF*%LdByZCRPEmqx+@y7~O3_ z)O*~mWqiQdly5Y!o2-+$Z&5s`fBTGsxJ-vteg zAMu4tGDCZ|P)FSMF=kutszDeiSZ>C}ex8 zoZb3`f+dohUFkipd5|KD)a?WLhp*}s4`e6`3eO(~dVO(!7vuR?i|Mm7I=3g;zt%-I zjDhBIFi zuWD-v)1iz6npKr56Lr9m3i|;_=rpd}S>Yv{k<4rKpXHWC`ZA(s3Dp#qKc3F76!v)yWNH2w^ z1(2~OncFN{@;G72S1?!R;+qxtgY2$DJ+(-7F>*wd>8EdjZeH&H?7(ynlndkGVHj;7Xf;e8P@?NY;;(7yTIKlWGyVeNje!v5FNn z;|EfyM1rrQQ+v`pj&I8-AJ){KV|%aukBxVxP7!bKqENdOQ*@3q6HJqR7_)Kjx&FwX z&H0L|lz^OAk506%!?;%y3V&9znGL_-N9Q5FkM5RQpeRKTG8oUgh(x_U!n&j-cS-yF zI&=*g#lrepe9LEz1IOo3cX{e`*L*TgQmJO{$Wk`6zlP{w#>dbRn}ovC(VD*JtCzHs zGR&6^%3uCmLQ>x{`!fqJ_$6R@d3P-A2A2=?C;gOX)yfX}WgUS3_B=Wfnyv^& zF*|-DPd!+#<8mx7M}l`$uoGj_2eV^YEW^?~4SS-u_SC|TUU1fkrI?t@j?jCM!z!+} zM==hO$yyX|D{{xAXp-x{7V319WqVhw?^;4A0c>fD5BVB1@>L%0c=r(xcde7A4H4w( zs33%VIwDUI$3Bg&V0OniIeegr0s?L9&Ldy9#@#RDa9RPOln|)0ug|j^A2NI+*Lf40 zR4H$3CIyLw8YA-WG_HrX&b^ya;SjNSuS2;bXeS5uxf1^X=4mS~;VlHzd~<)Kb8~!V zZSl7#MQ^KMnifn8>YE02ThDFGfA;1boiR3P&5tIBLkvmKE$A?X*=qzcf7u&Tx%D5yDb>*C%t9!#-8nn zk8g?V5`CKDz1mW~+TJd=J$z3bE5*V(C*{h6BM@u*V<7yt@zAdfMmWS0^qUbH9 z{)Qiac5;oXdI_Jtwyzmk!uBNgOQDBd74yiIBJ{SpLYx0iu)eR?{+1EXr}I&$?TwY^ zYXxZZWYw)!z-{vlR@J zFTuv6+$I$08&@w4dBTgwTlUp0*Vh@Yc3+B7BK*6-Q1Ev6!DvKlFZHZTjl;YDOcsC2 zUKvi3b83&lTo%N8t;eV3C;(Ly>w;a>xvbkCBR62N<1SeKM~&6|)~Tc&{W@1ys)r$` z2`OueGc2I-ZH>zv>x$QF|N4&Hk=t;)0}WKJ6m@-s08Q;X7u~VYePh$|;S$`p)wB}V zlKY7N4%4HiEc<0C>X3Mh+&WI^4)Aw4O#!E66g*~I%Hd?5*;_3AY%(BFKtwe~)Q8Np zHr|QQ_w|YVT-V0PjoN5V0=!vmY@dv{#H}uL^Ln_)naER&oIvhg?@i|^a5_u%%+h9J zFZSi0vh~g{3r$zyRAuyS`jaOHaXZz`gnN3yC+=LtoCLCNrgos- zN*h6&$Bs?S-#;T0oJM}X@LC|gM;}yE3z{)KxHEx@+R9X>VgLF;i;eb{HfU-uj)3MyPWc)WKW>X7GV?mWR^ngw1sabuQ<)1Xam;!U7lPu`4r~ZFCz$ zDxE)W?cjTC2qd2LLB+^r;@pu)+Tb@T6%80=R2XePYm2!}DV&egzta-9J+8ruND8l2 zk(48wC|~iSadL2dJ;lLU4+it4LA<^KTYYzAT?(Gf`ZB?}{231y(An0{ZzUPLQL7H$ z_pm0KA(^xmI~ ztxpzAR`E%aT^#9~TnNE0r-+qZcaL^HnPDz3w1cF6S2dMqDdiv=sAjwViQ_nl&G7qA z@`{fipMSJTu3L#8hY${GDU8?``ovH$Ok2A31oN2{U=kYrq_yq}!{Gv&R{g3>ak7O) z#`Z&JA%Hd5K#*n*Lymt&hP)xGql@UwYi)MxAkw#z1|3>XO_s`n$F z;A1_#b}6CY9o!Qf-Q6c&MieC?OKT(d+7S8u#u&XWarUA&H+xx{3!*suK;r=u$s@T_ zPjA+8kXij-0DsJ6z;L#fA5Dt$f^I!n7xHu*U(G1p-WZUziVA<-y?F?h$q}!=Tc!T; z;sjcXDHXc~$=3_PY00i?^&gp+J9>^Xf}!*Xl7y8;(-hsF5=*IiGNRr@d}&-XRzK9V z;Z(QB`l==vDqfhFF049&~Zrj=S&7O@3{0=QKRi7#=ffI zwjUb`Nk3{Q>Q_n23$K*+- z17X#CY@Nrm&E!BVbGeB)#HG1Yt!W4N3v-za<$CiIvCkfwQ%xJ{_Cn+$VJgcht20G* znxb*k&fs7Jj!_g05N<%9R2sPc#+4{4h~ck1WxMFJ)g#ex^?k> zhy~>}Dc6THK55H5azRGxVFB%<347aCW=#_2{q(>T1_Fnvbx55Nk;Cy+mP!YfR$Lju zb97=)m0HrlYKSM6pE@U6nS0>pj~4d|1MJs~O#Q+PB|Lcvlgj)RYuHvSSC(T*qk^OTb-B2YILBb zk+d%nC6KKLM%03`auCsV0j6q=@6YneBN~r6z9aUZ_MdumQvrB(S@I>H4(6snSXXR? zbJDtdXd}5(5HE-+(~WVMA%K%64=TZ-#(w;dKcX7*`8lLrDS)DtymB#@mJrGBnk>4t~lPv^)Y=C-Dq zO2tQ`CR9B*xx;8Sju#sk3yW+{lSeK$HI;Sdj6`c$nnjfdl9bqc(p z$O5BbyGK7P%n>7LC0%Umiw;JQHXQqEqDbk( z4tLrLhxSN&ol*#q1`A_z&0W=t7@pr^>I_pEE2#0RlXRB#G`YOMFBUQ;gjDa93#0n9 z)~7Y;w(+jc2k9Q-$1S2xrLSE*QJ6tDt%90lc=$gP>oQusKnW3f77Z3U*O~mYe|OT9 zV8?c(AiGLDBec&o7Ip4AlVil!&nc)QoS#{fr;pR4dZD)yP56V{a=q0LQh{(zZBF!P zv?Gy+%)Oy&Rj%3-Z8bmA(71&*^bk--r>(|1hdV>&Xj?ru3&6MpQrE;Ax}9*+n8aNJ@ZJdz3K38mRTHA zs*rPNnDgH=@^1NM=Afr&ZeyF^CNG{(TITRqtaEZM=j7NJsJA2-#^eeRh`mgw83|VL zo3=J^<(*wnXLGxe@=Z`}67KI-gg#5mwbGw{wIbT>d%W1xJFeiWLfYe#gGzM;h~M3G z)tz2Sg6tAEcQ7B$0;Bh++qmSFjzR9) z6a#4D{|}exf0dm7F_~zYV%&!Zw=Wr+TjdcOC8C(+Z7)mG=* zjSJg+cExHZckhJ!?e#1YaUP$-~P{Ej%gw3IyIyG>by)(hx9IXYa2kHWupnE6tE#*I% zYC>J17HT}{0OEF5%YU`Ri=}k@U(7Vr3w{;9E2}+T{Z&Sv-*osmz^tq-ZWlEB_YK=edHP`Rb0cg2A%XzdAA?)aXJibRRzAj{EX9Vjdx9 z7fgqLlsMHs_E@&ip<0BL7=X8a5X`!KUH{tp+9ZvJFNZATJAkcm5!v#m%GgLPm%+_B z!>jNb6(acR2*)U$4v6PGM^TZ!@%y=9azaz{PLFi2+FAc4!Yh>PzSRc=WwpGUBC*1j zDf6{urGGqY9vFqx3|YdgtfS$v5Z#`oKimfVu<=l5Or)3yBSJsjd7#)A*{)MM)Hew6 zd2`g;35m~dmp*939Pa^6Gi%#uKb~JuBZWb;=;qGu$|E9?cW|0#1RTzqY8M$m*v$IW z<+ywaT|SAW(6@?ed38TPofhG}m8Vp59B4f{+Gt#tsDsWrz+(5IjdrnsvsO}5@i|#G z{4I5xx9G~s6csEU8^4nZrZ#iQ!9D)Wpk=r$U)xs^1)Ol zjsRa7U}7$WVC(kUi#xZ6sy(&`yOe`o+lyP(?E~e=&*vC-G6N~D`6{d4srT5nxiJ?m zefWKA3hiE(L}p?RS1M=o0-yBN)Msi#oGweXTfFe31E-0>_*MhfemLJ(?^yyvs}Kd1t%h~X`fALXbfDFOhug@<(|wMg zFZ{5g8w3RF@IO5QKo5n>cp>go*UzVxI=T=tIyI>0s{U0%18qE_v0t`IoLW0O?bw+T zcRD*&(CUWM1UpxA{giAx;?UVlQ3HgnsuHo8SUZ?`E;xzd(o---fE-EBkS zRPQv*X$ntPBz|9&RbK|h7Z{MQlEYqg1vMV1I zv{(T}J~?Z9;9kZ~wkQA15~2#M1e(9J7pF{NmL(hTy|LMKWkg^5TFx&1I3PVJuY53^ z>GTVvapr7D;+@s~^0RN)aW1(2WY#RQFdy?Od`#UtbW{9##JEO)A9y_cZMEbJ=bBo> z=i&4-s-k6xEe`6-Ty)^Wqzr-&%1Wm*v9v>mzEjEcImGZMH#qCZ*PA{1u;?2vTR1}& zH~H;iwB%KWlE&{q8V4dzcXyLk^M_1K?Zp7?j7?=IXV>qLnN;^q%vc-D;ins@YZ#u* zO3i`}vfIgP)AhRljzG~AL?=YPj~CvHo2`D^r!el-M9>a(ErG}8LS`vUl!cda=eqxE z-)E|~bM74awcNG5n=MnfBlOw}9OO}oZbZ@9=jiy^OS6*nTX{jX0B~#I#y;~dN!I2Q zSnSq2LMd#Byk<#MmY4Sewdu|@aC%wm@JWZz@pv3?fseB}ebnAesdYUSS46M~!xR^G z5ZGqV5b#FnS(#AY&?74!uT8-itT=@8H|Q{ID=iv{l$0O!ZK6XlZ%>zH-F)8S%5H$0 zYN1jbu|Dt+!!w{uALqE|BbKOLG+7_Tnk}vh@M8>+D>{JNVU~>(Aj`OB zuw%<#eiLelp>>G|>*ZO{-I6IGF0&*>xBS!oKJBBG6NwT$e!-L#c=I`C92~f%iRX55 zTvn5Ig{y*RmK>kyZDlRmwa85FqS6%1*q9S=xOQkr;97IMu38r@VL(@i!h!xzUv6De z!9uBw^@o#r&V*Qx@$P4jdBXcJeSUh)nuw(w&O=%NCWWD99@m7S=k;lE&>6_fI> zUHW_boq#}Bw)Hfb$Ief_XI!k`S%a5Px;fM)uI!g3ed}9nAUT~iCr`W388t8?r~aeV z>ViI>ABe8Sa=)tgz3#+bL8Of`>V#R@0EC8%gD^2QEBCK%GmwuHik{9#lUUl?X5ZmMI%W?|q3(rKaxDo60sZ|#dLb4XWU zo~O^?mAT`Y`vnghm2HxPj{uOxt$PN4oy|lHgVRqp1EDtOXbc1Re82PBwBuHh zD?=4@8oqdS5ey@uwLVtQyyzmMy-3wtI{9Zz+xt^n94ya50u-t3V)5eW$%qpKon*}q ztM!(CF43DKJG&e{)nTKOFUQ!|Yw`zfgwi-`N2rCJ9ii$SF8Ki-e8r!StQ-~{?z^7z zj<%<>4iwl|to7QR>5vTprrb$AQTmiui07>%=)8iY?l~*9WjUI%$ti~}49g=XmHpoq z3}xy*0;g|wire_|HaXIJH4FfFZ^x(%!`~x6-k58(R>2Fg%t3zNjbqKkaQ7@r3#+T) zJTZehvE?jN>}lh%?Zn_7q+8~F>ulRJJDVAZ%F-=Iz>W0<;2uha&b?7|*C5Mqe2}vC z#S94LW9$0Gs`q*3xdKy0aC#p~ooUI$5Kb4i3M+CD#Kv_wRqdza;Ubu5j&P5XbmUt% zmbn>kWSSO$G*20A&B8iK zcM`u*K09Y1-txY~cZDk<5rlVcew=8(7F?QMKk9~Ytm>6`WhNSwJDh|*WJlEcZf5NA z1&TU3nk*pfbvaSMcb+*Ue17xIU+vOwdO2X4EKnSl1B`lZ^6Xfg`@Ux|8H9VBn0Emd z6U5%Dsjaq%V5!s|YlgGsx(z#L$))h9he&q}4Dbm)PuR^9D6~Zgh{xvxWLDo8o7zd} ziSLEeFDKrre;>Ec?ge6Y;SA2%<&DVB6?#DI4&jI8*PU-@|au6G~c>F@vThuBP2Xi zhBKjv=E{#e8<7~zB61A#~Rk?8d`PP35Dv^%)34Y$X*M79Y=lxDAan8%WV1C>Vd6^gJRdk(K z5wU4Zo5N{2+&H3EH*|jfqNarZMqP^#NC|PQ_@-}jcfbu&QoZYl?Q6OS+ZMTJZtK!< zIAeU^pY(sTViPC-zkH_uM$7&;XX*ciqb=e|Ecj#{gOxqx)<3mojqj{6#HA++6x?`k zo!#kk@{B_)_LMqM)|!Fi{&nCD`aOsDRa}yJIXqY^Fu?i*Vh+3Gx@yU1Fm*2S7?9hp z6_St@Hf`wANoVZeuJMwo{lfA{3Pq5kzLIuvR92fz=Du7Qv)!)q;E6< z=aY~#-_O(k=W&CtjiGzuqb)xLRBQi5ORwYo4%tL>v7!UL1$JwfCU&pa+rauU0&kw= z@6NMs(e^$;PtbbScl*@2x7-)fgZVj@lNjfOnJ%o8fI@n?78zei{v90ce`!X~oO&W9jKe<=THa{lYm<`J8^(Zoxc8?>Uf-ib zQ?ya&cIkkK7o7km{ILa9dt+%sT zh1;j~Fz*rWUcBblt_=i-uy~6yeiv-NOb^uDs;ZFkdJuGX^tB1#Tx4@%O&P*t`Z+@rL<4>?`oyA`0UpG7$ z4N&1wQ5l#ghT??ZYXm*p_)Oit!2`^&i;S|cEPuUU44=3HKA;#9APlJ`V^QTc2>!LG zI^HV6!!|Fk{#wz|3>Brs)UJ$Sh76(w=^IMjvn9IQ1x00nFtxK|>%tva%j|f|4BU?d zeL;?Ejw|SZBk1ypGqc(tW?tOhFD>cJ7&ct6iu$p(Vhb)QV5z&xBFXPi6$xei8IQv&CGw;r|FBx^fj=lZjhWK$)zLyhVOSh zda(U4nspjp-=W};?q5~Qj>yKGj`=jfvZedI8dwNT7>c@=G+0+PP4~75x37@6*O8`n zoe86ZSBPm0-e41e39K24H|EHx-R7N|ubU$K`^B8=-YSt@F8140ZYGDJ3SJ*{LBGn~ zx`WZV2ey~tfrExFE`8u=zCW|o+(C*o$tobHYikwlFL)wS;H{M0-X$bNLtkPVC4lsK zf$9gn{YIk$kRl=vmc#xqJ10xU(wh__U8>HMmS_n@50yug|2P&A{vPk`APoiBs z;JFouS#=b+XZ;JnkCvqnw@OxtNp{WcV|aK=4z1kla1qD3C2+s21GD2uX3cJe?t1{Dr^aFR)GoM} zNZ7PYaOnyP*6dH$W=f^fNM5eGa<|&nBYJ;)!rs{zvm}|}zF#H+PYn{;g<>J>emA4(xa3S6Wr*{8aNKmy z4}ceWvRHVi?CQy}>=}(4TxW)B;_1R+!+5sLYH@wx;oI*BH?>-UV!#Biyi^b^j=|W! z41#aO&3F#xl*xssaBZ?~2=ngkQbwRbC0{W>l_CjGs(Sb~-h{?mL;mMoD8QI2~+P_Y|&RVMHYPvt!R=!+jyjgbwa-gk-asy}9MiA6!$Qt8qpr@OB-oCkZxq7b zU;sD{=Dl0SCA3=PP$IiN{aci}B`eAXpEaF25d%5%te5v=(dSEsCT9JaNL1p6Lpfna zEO}#YY$t@t=+)HN9@j*dFz9Nj$H2~3xSJ;CkZ_sFXdX6JgDUY;dcgK*5H?UyOU|Yn zZos_OWDOcB{43&c#EH)bgpf1iMAy*#FGrD5cE;rD+1}-=90KPf3I@h>aIQTmGc~;U z5}8k>?Ml{{+1~72k8U>l5b{c5UQI^ygfy8xDif_@oC9^l;^h8P)I*t;kEwfdHEnE4 znjvK8FX?xMBs+Frq#^#FIOlt(B$6KLSZ1lNx`OsC+2MVR=hBWvZBu4}k=0TAiUo(8 z|3YlIoRgAwO5?w{V*KR{`nEgQ{W{#8SkOwsSlu4P$!xoBtt};gkg{X1Fx+0fBlq{H z4Ycn4V{{51s_fIdQ2ABNC9xlmIS~3Iv|2jh`})p0l)PWioOgiDqlHN?o+&)f!mKq% zUCwnjw`Zd}T{|AUyB$Z*xrYmN`n7S&9EC|IrnC66m;_;_q-oL?%J@R9KiG;ZY3VRt zXposakJ$0?)Q5a@4E&Di{1jkt@MBj|uQ9(~{6V#S-FIsiI)Y58&SZ26*3In_`+V`s z!XoV9n+3ke2z|GHOKTv>82ttLmpx)+Uup@-MHb?|(f%T9>>KVIPSZM*$FPV5m}8rm z8MSR6W4!)Oig}y}hF$dhgHqNLMXGf!W{*jjby4c{CMT`rwZ=>OEWR z@jTY#2Dd8K%f1rF_X$9b@=b+m5rf}INi6#k`RSUP2WA}G@t>YiNV=YZgit%k1?0%S zSPl3VS*Pwt;8XC~ zWwH=-47SG?w$f~u{Gp7kHuLfVSlS3R3~mV_+^u$C<~zFT{Au*!~yesaEhE6K$!vF`H5A5R4I2;R6Pl3bl4;Kc* z=j2BnLi@>MJtld(mC3!mSiRAL8HbO1EbWtOuQ=jY$?)C4^e8&!YI!JbI*rg!#J41> zvt;!~wJYoqb|+N{F|SvTs%QVlz|wv_(?Y%1Q2GPIZ>R-j<$vT`zalK>ogGxv+Y<-clW~edUI^IpLZSKb!`Bi!;Jo9ehrvBPq=(C6eIy1Mi#*U+CV0che zNVPcL;97u>SY(E+!bls_m6~vSi=w#ABeYMxC0|>MM=Y58bYgSj3vxs)klU0a_KmVu zt8g``0-*^3S=L~4f zP1Qz7nQm7Lk$G#Y(vLNFla08=%rj~wN|~CKgM#?wr|lbmyfw10FrS71Rj}&08Br&` zakmHp@ePX-sPsMmDhTS#h!|*Tw2sSY%IvUhlF3~2HsUsE{XU%-$f4}2f-TpPox+zx zG3pIQlTG|>oT(ji$U0^gkZ3k4L6cf~0zrhuPAyg5I!2~qm=OUYgpVyaxaFjABo zSk%<418(TL{k~9WN2+Ewq{^g=MonL+(9bnCM-Zd+9Rb9@Tt^YDjp? zdZ*z}=M^`aR{lexS^_xXOxM6DQ3c@`o6}YA+Grc!F&UDK10v~w7xg1NwaZCMR5X=e zvJjD}+y)U|mFLL(l+`8$P+1>d_q0yo>uSj&cad+J&}7DijE|g1OedW<{|wdU$7Hk< zNJ0i_@J^b?fcpoG6*5XbuJ`}oygD37B6E?bKJ)6;1G2GakD_d8|EjB9jxLsGZDNmn zIu!sKhV{~qdK~gAj7PMB9;y8HRsRFNBHeunMEyTWxc})<{-6F<{C9Znf9~Q*nziq` zqylojSV8_68Xi7skfGh2=jPJzSR6y)`d?`7J*ENqa*gN}xss$6J#Xq%_9hq(jD&IR p_ZZ&EHNqt%T&2PVRW81MY)e=IxxitJzgr`NNQ=vh)rc4d{$JHt#@7G< diff --git a/.github/readme-images/proposals.png b/.github/readme-images/proposals.png deleted file mode 100644 index 4ea784b996acb29d92519adff03e4cd6a9b8e92c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 76344 zcma%iV|Zj;v}VOdhaKDLBpusMI_lWAZL_29*tTukwr$&)>hIp~-kG`cV}6`^>RG31 z?|t@Jd*NO03X_u&Lx9DG1pojD65_%N003w;@S=f+1WKMLx}gC8LV$#@fTHWl*}9Kv zoYKqFbt6##Bcg6V-f!5VuAJX3p`PegYP1jtdGofjZ$UxvMv}UQ&!rK7YUcGk=5a@< zTC@c@8iSvABtzzAD5kIlSwxf!{*2<@pGhE)xX5Ve_ug8Vi~VD_uiTSq&6DqGhtf2@ zYTZJWQI6)Lslz_D6?G^?flZlp<`78SQz>A_9vUo09K~Eg$O?#9#PLNuCtzq zcWWwv`MQP`ov~0frV*PVKlEZnAwEeN7lR=P&A(p$?Sf=JNiB)ltl9ntSx6(A!qqAn zl`v6Y78X-Ah|lTR2nQJSlZJ^USDi>cbAO@E;K}0!O}bTe>nTzXM(e~4$d{bvW(Zau zUkHYu$x}3P-ykRhk;k$6I9D}lbj>f_3+aAf$1OJr>bT z_XZTbC0(PbT)2Nb59~t*Ol={>JRNtbA0Q?mg(8oaM;gt7ElQ=y4m?cPA5>GOb;Pj(U&-uc6E_V>*I2}%368o-XXU>rd%^#nJ+7-Eb3*)kwUiN|tXkd3`mwFsP?=wpx*R+V(f6S zs}TzI5UBNxW6no!@lj9ace8fd`t?AYbn3qtZlsb@HxW0+&D?7xUk~}UzovZlSS3z; z$=icCv+iK3OqckPgNe)<1odS##}r|u&5^Z}FQC8G_xt7bI5QG+Ye!xmkFQwS_!I`N zzQb5}%dzizm2FM|Qh#$hR-DDq#yyOljWsc56;8Xa(d4UcsRDm9tF@3Vo_C%eP##-3foAe(0^T%%y+pUEE?(Ep&ScEj62O(G8C$V;*c|~TT)Nm7 zbQY}33v)%&p?8iyMN|2CM=3<$oh+#)P+VDzRAL&%pv*VN$UBJcIg7%8iMX^fHaHC~ zkgV3*o%aRf1cb(UG61iZ>m^z!d3t|z$B=q6b0D(&cud$hx0=Fj&l`QqmXe3^qYq82 z#bGo4B7m5{MHC_qb05T*QW1764Jf&siN+R-H9>3)c9@1z%;>cHsS3gs)X_V+$t~B`rjpFVhjUpSovydA_6z|@m-5df zLchSD!H;vUr)^#Qq|!Ss6InJ7CfKNg+g_w&G{q9zue`}a+lw3p6ZEKTHnbTH#-=j+ zzY{C_S0=Ox(dFZLbJc3|?_Ey1U8O$cW)%Rf#!*3Uj( zc#M_r{RnRHH}lR8!dW|hXm=w48yI<3yhu3A>DMZyqTpIR$4elzhj>9Szed232t&kWv0>;f2j zSNW~KDmdBg!XZ%z%EeW+4Q9-x(4Up%(g6pw-#qHr0+W*yl^M`8c)Iw+})b2ycC1G6$!*n4EoVC zeq#>p_kislNhg|Iq*chM&VY%AKxNjaq6*?SF?j<_n(;PK5_-I%4YV-_e2+yPE>qmv zVu6({lc)4ufPLCU-t=V8AR;k7Vgg8EeY*JglUHt~Y8~TJ&w;|CDI*XhYYp*hqRIW{ z4kwa@M7_)7laIIuu5sfQ??;gz-J8gwmQyO40wSOJ4J=3u27q#=vWdHk&~Z z9A4sEr&Ns#I`C-$vJ17MP-6liuVW(cm_3$Od6|Q8g+EFhypu&;NXQk^MqK4wRZhhe z4_aHYvaIQs+>8awoHk0&{}V>hoN{P1H){-`&A67F2+%&>OlawADX{(&lY5X1T;wG=V6?Pv+@Dy((Ej0 zkT{*kAy2Mi5CjUuvvP0x$GGhPOb*Y_N!5m9onj?ee!s)o7kWeei@d5YGzTgXD@=aN$_4%{Uw7A zMDD(Z6NI5b8C17Ns&v?p+9I=30#F9w&nvEf!LJ3XYZWE{s=KDZ2&22~QhU8X0Vt9L zkjm(dwQ(QePGIUp_w-w!W#Cq%1D-q)|6nO`)yG}?=)kh?@^}L7YzJkW-obt4l}k>b z+7&h;j7%~gukhH@O{CHGyfLMg=zNRe?NkKhIaj@has!2DZq}}xZuQ7ZslHwWv#2vJ zMw*PFXlYq4E@NDo>GpCZ@hxMw66g%X(z0^r)9_6mL9kO2zP8doOMsmX_jp9$ybr(! zX(B;#&}>B;5{tEo!ZJLbD*B)8zhLHwfL8-(SdHd!`YM*<=7;Lz9IQ8SiE-|Q+kMNhGHQ>uQ#+1>PKqR<%Es+$0jL}Z zwbp66PU7Bttg0!uRb|m-WL<(K;&<9p%70xqC`F{Uy+oh zxDXFDn?+hX!s<3fcXzmdH`~RkI`<`;MX&g?hOcLWzIhn z^IssT1tQ@4z8=L3YcC;s=i0J`_}}4K>e7yOAZERV&_DCOBDebB_`0}gk?rRwgQznb zZSlIXe(+#FBG0kDKi~$J8(OV8+SgNSy7yqpPB3tO2H(;8r>9>>c_va3kj0kT>kh>V z$&*3-u}}9Zf=&9|d8Cs+e?)1fO&(K$+&X9dRsRLT(_1(|D{~sW`SAN^u`r`7#Mabu zK$Yx;9#`|739g%gt6uOZw9nR~SZ?k8c3vcdg8=xz!Cqk5a%*lroP%)X1+`A^P z1PW{7Pdr%j6m-ur_$_{$b@t&N4O+hkf_}inIAN@xg^tu*$vVoV3 zD=E|l~m>uA$N7^m|jcaL~@mztg`;8E4wQwGGtJ>hjE@ zoZ=oLxv8>0F&HQPN+;1SlR50xwIuj@I@q>;<%0yxU5H zsDH~^_|dq;6k=3AvM1#|v$XRZ4RFuY5}!IfHk7^XTIcW`Cs1_|JeD`{5)8%o(x**Z zx;1%=Jwg|6u(1Fi)Dt%oGq8@B`f2zH3F)Oh=@5-=MxCiXS@-3%+X>*k5M;UPWYq{U z?w1b_%d#=HU<7Y@?Ky_U7%NeV=&iyuZo|^jq4Ds$Js_^cTkF(>!LXfaBf5C#gYG>= zuO6Edas8pos0s;V8FH1WMH=5i2S&fV+AC|@OLtMwzYRlwaT09)!*Tsvw^nt;vj?s7 zyQgjAjO~zZa~}u1tft?g!9$#<${}kW@=ikWNJG@cyPy-G^-y?*<=yb|6r3L&72SCq z;b0MMD*IOTAj+<{%$DBb&`RH-i~nUD$M@0Uawq+1rG5~`)LV+QQ8}keTV)V*ztd=U zl)T{_XOyJZ($3A25~ym+$=iOF7W2U_SN$2ocW_mM`_!Ov_*)enNIr2X>TF0j;%~pMq;;%7+-y9k9Q3oP!EGN7{?yzlBEjuC#!atNSTy z2uPS=H5ba%=wIio`WPA{)^d%3JuDIclT~3YJ_2cr`J`v-x(i&^&Qq4l_xjt-uj{W6 zbef!lGXx~qXK<}v7sCf!ss=E=qKoMRC;+7hmX7<&#l@$NculCSL%{OpSBtz=Y@4oD z#4eTO-idd&djcOmbhFpmld@3+rD;8q(e-Qt%$%V4^zmXfrs_?bO&PXD?IOjN2?gvQ zFnpG)?k1A8l+ikm6tV4WQWdmB!r>2{KR@@S?GO_y;#jx!DvW#~4d<<{*2}%HE)!pmcATue78z(n?JPw_9Lkh2d}?Y9?wU3lYs zmT!R8tVdvr<;$yiow9c2jHkUXC{FD5;n{2H-kmRS&D@0AI&f9&V9i8pjMuX8ou zGWq1WZsOTt4G}KLE#ELSY6?!z`w1R(FOA3S08$A*^?v!V1KL9{3w7C!e;Cx5So3hM z`<|8&Sos#=MzR+`bzu|(Xo|!FZF^yt@aG4j7nfPFuX2}f8LTh#fbp*G7vBZwLduUP zpL0_W(Ew#VXOG9*KYsA1Hk%PNwEJCylUI{=!0TAYVcLU{-gV7slemf7awMqzW!9iD zkC@@ZNVQO0jVHS-lQFEau{Fo)1BT~5_dK)j%X9tZL>)WoQ2Br zozh4p82?FCEFP>%jASBHc7d*3rZ9IAAJ6DO6!h)fN@owovwny-i+j+Fg~=22YX3aae2`m-{c>)iFD81NHkP4mp0vA{fJ)(}jOK<4ZO2;{pcyUX6U?(xs-~jmt{X%rz;Hpvs)aigH?`ri zeyrr&*jq>W;}hK?L?IPvi9zqc@I&1`&QrQr^2^z1Cx8Dw+)Xm_W8A`c4Mrim%yVO} zyA`F?v8>sU<*#2zx@yLgZi9tKPkLW}jqu+L7Re_(?zJ6%=k$yp$s{pxkS`TkeSjYc zPj8ws!$mqMpQBThiO@(`Z8_T31k(MjCxXI_ZvKbMnP^owu^uu#K!UZ%=pr zkZdKN{$QR(EL)60z81ewNP<@c9k%Q&4lTX?!<);+Km54mOt5JZqK3A&NaQl>y zI0|O{hQqc9UMxD<)F)!WYIGN?CxFWqg8lQA1K;sNo~eA09ix3aa%V9h&2%y33Q_FY z@j&0XWU$&aitWug5bpfU&UK6Mr?yVWcpoK(8}-opJMKQxV^^~w%}?!3X)_*=G$)22 zSPO!+wdfii9`vJt+*kRB{K6PAK0JEiA0!)hwB#a#cL+`@AI$Bc!^?^R;D9>nmLVl4 zOm*~v6BK|*af^}CRaf(RUCA5;V8zrzTg6z{xn#W_#y2OuuEmlR#1`#J8*BlA3~=T) zRTeSJXW}5RSykWc8F$u`GEk>9{pJ1C;&3o)`rVdmOP?+z8uJ)?0#Ay=y{)K#XuUcv z*Cd|3SR+6XsPu>|r}8V?bL+6RCHs+OullJ2X}-h_g}A$BpW6J0i0g!}t`N?2tZ_ys zjrq=9=X$p(UgeU)jsa)2lDn_+^*;L|X+vWyMHHnY;3IJM>?s=R9^P9!$0YOnW{R0# zxK|e;OEV1@VDxcr3V-4j;;T&Q0o8K? zEF+dfB_;5^03A?=R~^RZ`J(&EqdOZiLvS!dCZfWTn6u?)0C(I*kA9VZUuRnRsVXNxHRtzne5H66>4ScVpa+_|2QQGK3dN zkuU`qug{Xc@@Rjbc82)${4CsBz~;<2_veDLq~GVbW_O&O^I3!}IV*FsyH|j=hKMe` z?l@H23e~TAyvCc8RuA8e!077zE-w1pN&{cUq#`5PFI5Lt(iGfLH=jAlH7BOyoDmo0 zpdnq97|%2em!<(bLln}RAl$I$?)L)KZ$<^AiT;Dq%FbLPui4=ofGH!~@D1!izJ;-E znk=nV2T#s(E$0MKE%I7scic}^)W~I00_ukLu~MBl<;y|7%=6)i&GahH_W0vV>vHt0 zgUT?&x0Di5hF^;u&i6Pnjt4Xn}aJGW{Wy{ZJaWG89FPJs^Kv#h3 zq5;|^-uuB{Ns)Z+8dMA^}S>LVvvx`Iz(c}&>?Olw_tX~ul^>$_DZ ztcC2jH{x%wr6KYOnw|Y;d**OtP`+Q<*n5Lf^LdU}55DqwKGDIX8`l`-pdKRP@9AU8 z_Rz$*c`R^7G5TX}9tG^gP_1m<7TF#Ngq%4^`>-X?yr6*RJ4=Up#|rFSjY2r!w)TD& zjBU?G?W&^fhY_9InxmnIU?%JClax{|0ejX(+~BKWppX1K{{G3r%?M$9O>A! z-*+G*4YdMdB5BxeD69F|b<>R}l}pY$B`XmQldi`bRi+^i5gn-l$6w~CiXmyHW;prk zIr&+dnYA(_O+g=r_uk4&FFs=e9JTDKx&uxuY{WjMIUu{$sPbPhm#JR(W%&qL!IBBe zeRd>`BwIBp2ooQ?M6V{l@ZLEo=9uP^Zkl4$Voo>Dw;K zjbvy4R`_$_dftxY+wB<|2KlK=g9;SdvW61~ZP#V$Ftz5d(cG6sh{Dq7@Xgw%U3(i& zz8M^&LQh7WN~*B+;z3s+yy%%WM(;x~I12P%S~47&P$w@=*Wa>1%*Rh8MI@8MtxXL9*sDhy*@*SR2xh+Daf3$_iVq&=#rsn z4gEpN_)XZL-v5OY|8iv6r{mZAiAdc{=lX*!DV}qBaYi{9pmHOHL>w+(Lwy!QK8Z4W zBXx`A*T*AkWkm_oCNOC6T1m5_+0Y;9&Vj5c>e|OeF>aSS_Z;LmX3GzSi?)3_6dg0o zVQ|$8D$W75f>3k`WgNN?gvjsoLxF(p&5>&e=JT5|+{jFjezoR(dk-6QGm@wO;5lZ( zPpS3o{OE`}x^fx*hBufqkHl6b6n!gMhZJRwGbXj6E6+?hhh*TSHk@J+dUL{AhYSlGIF1yesKUB}48P0G zNNSXiSw(>4SeADrO&Q38rj{wVGTWKv)B*>Le?eKmEte3bv=8hh+~Vj%K{5UN4Ed0#*v#is?0TY3inZkI%XXbvF>_q6|l z1-cSD$qRVV&81|d{7K?Xe}DuvJOc}(Z}ENg2F00#Z|iQT@Tkp*q}*Y`cG!SX#pT-^ zL_K+jX{+Ck=_vcwx|oC8_T{cEv9`)IRl2PL0So75?M63yw#_t+2jduOgUh^9mku5+ zXjzq6VUdXx6o=O^4m|-G*FDzkgAhUCVib>IW^%;hR|-`pv^XBSYWc3^RQ93(H%(7R zLXZ(dt<#P{Xm={v*d|QZIvLjfLO#SqdvV(JfS(tNo`W0$Z}R*Sz9C>Do#l}A=;v_Z zO}5<0l^c`$*+#Lh&>r{=!-qb#F~bmX?JEWwTQXjwueBXUf|j_y-2QlK!gkc=9;M<+ zoNc%}@T7fy!wJHsj6*7H3R%yG0puch05rH(bm#PCE4^x9lAo{Th^Eaa zeDi^ri{(E+CJe01`m3@B=u6Qe1&2> zjotiHJG+oIBHr^768S^p?bKA+IojZ(P>RE)0n~x(XAQ5n+GYw%nhjp6`-I`gNX8z9 ze|&n30~fFEYX-?)=QX@kZf0KnO5EElWHhy5rVy7SqZH=j;UF94?R83kaC=jn56_>; zcvezKA+J}6^hx%NH(?OjOc})xVKog=f0y!f~${c`L=3HDxsUdE_a09g4lvF zT`T0~d4zW_fwH=K$i(BW6&Dgw%*)^4EVWG2KBb10H7WBoAt~CS+RYs#{LPATq#Zzw zGdAfX1B4IF$9{iWE%yR-vYZKkMQh)Gf^d-c8_xIgWFqb zvDZs=VdY`~+GX0y+w2cSV)uGf^xj_oP-?plhj?9*FY@`Ui?YeE-?Nvb4hs9xJ=E%} z`e;ahU&wSqN^XvItE2wnYdvO0A8WIm7&}^ zTuM{;V1=_$dQKo*xaExV*M4o#Lgujfw2 z^vT-Jvey7$sFO&v}Tea=29MaAO)WUm<&2gC9TCw48 z<*yxJxRvjH7vtY+bfP;3)O$n7BJlOk3&SloT)IjqTNk_+*(#AAe`O*S)SHiavT3_? zj1cIh%MAuir+mZ(j}=y3p|)6Z4DZWt@9}DCbAePwZ7R65pn}*;>}6_6S+Ja`(2(Cs4weVfOBcj# zwB1w|Q+C1|=+&f8Ka=or`Oo%IZhW1|$XQcRUsaWV>%}GJtEJ%frnXMJZ8pu$V}EDH zJ99`v>5%!XDnU{XYR)4c+um*aGUmaO6hfa_5c;r0b-VoWfy?_Y!uB%8;PmCNPPvXZ z76F5!O|I|*=Wc~MV;^;(v$b&I`$>xgnY4T3f!{p-uIgP2(gEg8v%a3tsm{We4{yf& z2!rW@FH2@O945cHUHO{P;&=lGWd@pi2nDVNAfue&$*Il3@g!w_Di-$*5we}UTy(@<)o#wqENovxy$d& z*ve8THKK^GtL@^0>fE-9nz+?DILmCG8c1^(XO<~5ZLazG?9wAZ1x0j$5fAmW(5%l{x39X zb}n5Ix;XX1_W$O9{JXw?IUs`nO$sT`w1BR@zV4H;x9)Iy5kV#(n55-v4*lo(tQ!}` zJ0*RL`=NGBuVjWh`U5K`T1I~jWU5{J zXb41U{{iH=Oy~Hk`s&Y=1n>dJm?TZ$`Z{D>M$GM~bX(z|0iUe^0!Ks&LI}jm`r6U@ zuqBDzrRaCO+U$tJrMrBTx&4$_ob0eKMaqa#0Vaa_#ye(e^=Eb{f(GIxCD;gwOi2fP zH294|Fbp*(a9Iy^B}V4;Z(6{WKo56vq8=Hkox=6nVhtW&yDq2~Zu4S8p4QjGE0r`H zGg1X3*HB*Fv>6k8=XQo46$0{c{($=;3)W>3Vz8@;hvlndw*C0kP2uxBd%8t(*+>DN zC^pS0zD|cQBM2r?lvHcAAroCTi!<_3FcQi&ip1j&GPY!$w|1t)1ZnvMF%@SW=T%)r z>+gIDKUSE)VDe*|REV8UPSH+jujN2mWs$%dNxpWBPUsymK|#|ZI(8=nB4>AaA_|fv zFjZGKg(|YRDXaQNdFcP#9APPVeFW-MKcDKp9((d@ZAhEUBS^zz|Mfq9eS?oBKP8@b z4^*XBVqR#9V!UAHZ%_TF2H0f;>%1o-&=|voj`m>Jw!SeWMlHmhX(-j6{}u97U#L5! z!OZJ`Q{05Z)J+q(ly9m2*LD+Q zfZxFV%Bj%c>q`J?4@s*Xu^W1_pPMqB z-#h3vX*drSOT42%wb2i<73gFWxT-7IzD?Wogh7tAq7R*kz`j0I0*Ce*pjNTLqX1eo zuCfDzvEP)SVoITr5XhMCFxPf=!=j^!`jmbFD>62%BFU;!+@-HV$J84omgWj~`keh^ zhN5jIKN!2SE@$ECr2=S$1n{%taCAG={js5eFUUcVm;USfn>49 z>N-oG-q8i-teLMRGZh2hUPx>xT zSIU%x1}1=hVXDBV`lVGPwt)-*lfXuEXy)4`w&0n#I5f+R$2r1}(iX%MyvmuU%4VaA zC_+akA&|!E{j^iXsuuguOp$jGk904sdir3j+Bb<`2@J~Op!_I+e@>L?vOw83RMCP+ z=__IDexcrZe&>;emv7oI^IH&hz{v1)7AQj*skv8Ei4XC@Rid@|r=+xm0n-;JYQL4y z`Dd0?{eTFG^nLipdS^1@GZo90`8V83f#&%Net6JtKy$P1L*&PwYczXx4RHcGGvIC4 z?7pk7_E_kKgV(cDFhQ)R0s9olFon*@H1$sB=@rh*d>%ETKcB*^S@77CnJ21LkzD#Y z8urxrWhaGH4T1;;AOZMi^*JI?)vNl@<<@%IT+7p=$#rDeWg}BsbAH~XLu(Dc_E4{% z?p%T+Xx~E`amYH!f5RAxhQt`=^%uO_&L{q`B_}>kY$9+h&h>yKP>%q0xM>q zSxohCyo>O_>{d}EO&yh{2>eGIPF3RQ@XAX^8{}L5F>8GYEavWd?-roRhTWCO3Qkdt z{lB;HWpzEd{ra7((^;9%TUw42a>M+*()UYhU>21tOenjOWh$6&8n>M~b!=R%;QqH~ zCxfhBl&`Ug|L`B%ktS+#bcC!Cf04!JMJ-iN{b=IAr{H)Y@3_-yx2vw}(|702_-nr1 z+qX!&9Msx zpr>|vu-Tr8dV1=Pr}hUmDSlH)6YXhOi3}OAt*g%4lG`0!$j@bwb2U(d%d|!#HBlm9 zL$zRpm6&ev-2^%@rk!02FAx^ya=n3C7`6t@TW(=j;?A!G#WsPYfPeh31R%7PE3{li z6gK%tBUQ^|+jR#s$HqCmpY6dmP!E7kV6z+uD0!|AgaT)r#Na3Kx86m7sVbVdJF4NC z#|l!{Df$>=oH?;JUBTO4fw6#Pr%eX=@15F-GSl1bGp?p?1|)t=w0QG>N|@i-o4|Ir zR_7f(!duB^5hboXt?VzjgyULMq}e;qn)zK+N4 z_%fnD45&FmrJozCg&v!(wo7tbODIoF7A+KKQl<(bm!<|1-D<<6p&H2mTjMO6twJfl znqzD$d*mZLZ)0|Lq=htv$mxNWNT~Huv)YGCnW~Isar(ji(phXvSyeDw#LdmA>eBF<$sCMy5}>b8{)d_`E|`>2$OVwvGfP z3Eie@qQvsD;p@g}k;OA4Hf30Sf(3z#|2wfZc< zWbz)Wh9HcMg7F#w+n_$|2MKyq6R=XS^fB?Y z%FF{I7e8H|4uB>#R2V^6WJxY1f&KxEc5>)IekcDCG`^PhBM% z1ktUE7|lycp@R^`euh9G1;DCHgX-GNq5?vIdL3cmWPN>93V1eBaeXOW8(^CWrx!n7 zpT|Cdb4f&Mp3NDl{UUS}YHVV*{1Ke^{;aEBIxyx!b0T9Pgr&q{`fk#{8>=&WRPx%= za;-&kW!K@Hl-m+HsUF$$f)mab3n)4URWxZ4*sXo2#-Us6>eW@UVDY>f-FiAjv`|_% zZBN!~KGAB(Zn<5#m9q#f=RADtOXx>?`l^=Lae1}jH4Yp>DdRp$mA@9D+NjM!Q)s{7 zR8+Z2cp$tz@k9njU*wkIL9rH?=iv0QUiAY@kLW-@40ITT4+A`eg=f48)xWO3LDavT zC>NiEknwS?Cc(02JUv;|YPDRGrVncpgOnjZBCgBHFOck0EGiTMK zb$eZ;6x!Q#^PfJxV5+z8$M=_nLQ@eFT?~WsgI_~>Ia!baL`NnkSm{7?uWD;WQ2}3o zvm?Y@fD49tL^)Cboq2+q3oFI34Be9gB)&5N)98cXV3~2Qt{7LEy`R&~^lKKiF9M5_%M; zcRCq>rE3u{`6TKt+`9e5;h%Omcv z{^~@{hsW^o`2H%Sv8178k|$KXAuW2rO4r(<%=^sVnLymgNdto1Y_+ z1x~E49vhI}oL4*qkyZr(aTWr9v(gSiRyMAXvrYSugC4JIqG3whtvWYtZO{6Rx@o_}MR z)-1g}yU)mAjcII+)lu-sVdmHBsSzgw(s{mqP69r1{A13}Ek6wDlYO;10sg`6x+aih zon$o+vyS%ns|}}Y3D+PKs&R*Z9-N4qoy}JONkvyUyuN2P4>1j1Jco+duqB8^?p*tO z7n^-GeY9N|lY?t==y>F)1Org)Lg`j48B80oU|%n?{BO8tI<&hFA~;{egLgN{YV1{% z%lG{YB70^_sumb~eh2T`Oz81W2$PldPfA9HS1(6o7#J*$!Llx@%emkcp{v6pk;OWn ztSF^&M3d85q8jYnR4f-%wx-;i;pG|n0%f`ObzM>-}QSr4j98PL-ETSd|H?s(L- zq%JNoVEHQB)%jR<>cQptNQZYx$(*WEQJ2+|^5Z>`@hIrtb#j$o)0fBi<^AfE%@>}= zL{rIkB&4V@ZP}SJbhMI|aELc#eDlo6`>?v@TvebMV=NfSU@w>ED+=V#4cBC`x**ee zbH`^R0{m*k4pvnVG(7H5_t9o@21apl*;hxhpKE9@1}a-@Jy4G-{>htG96Et+Su^HT zVt$*pNM|pQOauUQA2(~V4C-~8vR9rH_~`K>R9zT3-S3_#e|m$r*RhyiA8IhnU3QBe zadBFWEjc=>jpbWmYQs`5Kwc9W4{%6`97Q=H)_H8P-tiqC71O!e|DK5h0f8Mvg#*9K zxaF}kC^Ng^y^r0MQ}3awVoJ;T7B8#|zxshphWGlbq1(~xxyi%B-2u-1T@;#NPb4tDts%1{ zOdlusW+=V+arcPdxATr&Wk28%;jJC-k6Jn~%f;}=va2N==c#Gi6au1k<3Qq4;0~WH zB)S3Y6-AB1hP(Y0A*XXuVTL)&9*f`h(6#`faj=Xsz2EQs7>_-qp6_hySj_46cdYnj zhk)?Ha2|w6ME|U$&`J#9i^sU`Diy>6FUZ*ZqXY9#GjTVb&<(p|Q#swkJ%c;j@iU-t z@-)5AY;7@CUpcJi#w_A`*xuap>&e&<>T0Y6qlR7PpilbHe#g3fp|E7qG&`V(IL7Ha zz~#5{FnH&>bZdC(V2R)IdnlrYduE||P2o7<8O#`{NTx+p`c{-OUUSGJw?~SC)w1CV z+2KfUx9UKtvb0O^9p|#(tc9cH^dsI777*`-kE4w=qeW6=cc#Kr`f!N;caD)>byrhD zfyrniNnfUtHp3qLTazgb52~sg!Y8-rldX;JA3wBj%rPoAXcEKz8d1qpjg|)>ot&F3 zs9&j(XFM(W_5P82MlC2A#urlh?$*3U{FxnYXgduO7ZCG!b4$Sc^g_Fp`o4{3c)q{u z;tb%Rbe}N3rMX~NrMeppSl0D8lO}ZeG+)R7UFzN;10e9}?OGQ?{@|p_JOFVM#vkjh zti|Sf2NT@GBerj~Ru}lHx>$=pzJ1?TTYd0w`TG!Ks}w14c4V$kgzU?rRwQpVY`NZH zbO`0wvw%yk)Mu)i-(y;t3^pG3*w0Rj=8S%44%k(Ra0^Yitd9uB-iO)3-!yzT1KC1f z$1`IF(ssY!Ycd%?0k*bK0r4D`-UdWe2xkH#ZK^Svn01%n%Yd>5t z)~8`XQ#9gf$GDQ;qr}&vzmz$u9PQIqA=dNWO>VG6r3zt5YDOfxIbVM2tW6u5zNn%| z`0jPDrVM6md{S^(8is}O;&U#I#^&6X2)Fz&9(B?gGg1N*tNJ>j=xJ5Ee5HWgj_Lr9 zc+C+V_cjn% z>s-a7XncJ;fM=Ki%5(?{-zGSzzg@ZqZ3;-yQk%Y zNWHTRnr_HIjDqiJxJMWirS z2TW*{*_rTAg}@5zOJCI$M__4tFqu^j))3|5a%C0^i$wISb4SNTA0CP6m?93p-KAnM zEmnJ0)8X2{mfEnlYAo*{v&S2qDm)+H5tOlnNWAEW0Cyp0!0Sow4c|dXDWP`)Nr%K< z$6(UVYf(C$dm*@jyjzkXJ+Yi1rIBe#4 zSRkc&>b#X0I6Vf66LtDf(+|Uh)g3M`4xPTv(K>#Q70nnLU?*c=d*ys{e{Ln{UZU3a zaizV@^}%1NZbrOOtPb~OLf5o(0{@BAbV<#Ey#(9&om=i))@H@IqIM#6 zpK%E)+;UflxfvL?tmpThtz}ZE?3d4Vadc!`6jU$1>uT7$tPf3DMj9+mLSfeIy_iA% z%VD)yrvkbH(RDXWsK0HMf15DHkv(qlf9qrZ(`D(Bx}X3K3^+Z^XzENy>3;&5o4O&I-;gvFY)Q%}AEgVHNIei(0HBMH&nch!i@PD`_z^a|)t~_x zpC{yhn>cqah_QgJnsYz?D}bImN7DcCB4@VxiBZ-BrE>1s?B2RyHyS1M5B#P%`6EhZ zDsz-XhrF{jr39i!!7vRGYoz>I6{+DTk$;i42i_)=Dng){31c7%&6dhbh2o?hWW zr$G^!uMyMTV3o_x@+-w@Y3v~S`rVDt3lGVss=DC2$u+f4#IZo@B>6PaXM}Z!NsjB* zZdl#7KRUR0+!1?Ejq1dGe@F2 z6shmK!8$bSI7MI4yn-9-DMqZIiXE{ejFySnpbyRbayxI?uP@l}`|q!l*}OicO0;#cJUQ=VB`$m6+Za>WSXMD}u(; zR4wHo>4HQR=({P2KmfF+)EAncXeODjHJx{2YzS7)G)gRrlB5sbnBL_{gJ-roam|OSc-iIp zq=itgH&&Cez_baxO@+S7P(YosJ>N6roX@vEr z!M@F{Rj@!4*Y#rjIR!n3rO7%ENIU#`B6|MDPABd0ORAs3tW@4^x)?9vR8;Q|RMOqn zSa_{jtP7;2hb_k;s+VyyUt&ha_%hVwY{ggVZ(IuQ5}0vyoVLFkerD{n4T|7%t4{3u z++`D6rnARHH!{DOp6nDjapBYoN(HujKkpWwp~> zmn2<1{vH2Kn&6H1WHi4FeQCc{VR;&z?lVea`ujS*vw4WUkHWdL!UPuG<0aTx+)v7j zub*ny;G8EPeAQ%eM^*~tf}yWOp;iz+%9>@fpZA32)OVl8bUQ?eNKXdhgbC^G4C~h@ zR;9#qHwKPx3<`;49=7;yu>;vZ+@Qi|OF16r94%em1xp*WsP6H{o^K55*c#rWveO(m z!q=hnD5*iT_b01lBG;as>U0_^+}lh3grRZf!0B^D-?xO-@kGfmAm=g`WvBTtUAgr}Z0Q|(xVKrT@aD=z4d<-)IWOAWVjaNu8AziAmCNIlQTj=r z#w458Z9OhU32!r-`ec7S+)*c!T-thH84N|uWj}q*{fkD6GKn*)6e@A%IwVgtb9AKJ zt7zyYuZ5<2zao8#mp%Sx14Z@FbdJ-BgxNG}ONB;HCX%U{lCnwJ6&2CHTmT3o^<#5l za|a+|)Ix#bv`H_s+a9W9sBL1GDDi!)>X|-!S|W18P{EN>f*low3H5X!sD11gQXnTt zFR#bI!d0hy6{)`xmiJUx|eZJ5|Dsny1F^ zdtn~(1Jpf2D#wEDYS&MD^yHpS%5ba0q==c&q*Lnz4N6*{6Yy{0WE3%#1g3DnP6lXw z*^KORC6&6b+)Mrh`^tykxr%+vmLbmfZ<8F)!WIHW!PANii4F{f-XK=q?ajv$F3_uhx(c!~!MUMg%D@W6gl*AU>vc2%1XVDmcUz;!&?(xU;6>#&SoLH>0ySkhO9?h81s6RHEOw(|^mR!+PrV ze0K+C?s~N7{-^on#O@!!!(ePDCT@x$tP(D^5Hz~uD`}jy4h=N&A3E7_X!fx+C(8E1 zhB)iYl)aRQBE8W&8Y|9Sij%KEMWE7>+D2YV&2H75?U zCb@IB$J3n!B#DDMFavfDr@5N_g%=QkGkdV)zr;=fI`n+e+cF7?9n#&_kWCpFl8L+Q zZ6Q?r_!$2Fbrkq6-PP)>fP&m7)0v!~yiCKqe>0!@ao5u%TS-Rj&i=$ik{2HCRuAzY z*XOu^5J_DfJc=b~N_?u?YK}q8oD+ z>(5X!%!5OH@z^$*E5Jk@suE8DHfa!jx}Dw_y;;W?VLL zq?9`s5E?*z>@S6=LZU@|HxEfRf6Pw39IWx=<64YZlxS&IFO!y-MAVKT2feG`xsDk_ zwlR>dSGz|yF*5HyPi7EvWX~e2xM@a6n6i7PHYsXt+%T`%Q7GXIM>HhSM<&UGsH+p< zmjyzf)I{N3WDgRl;1z?vpwfhW$B(q%o$6pubk@UG+UZigon_Ol(;%LT5%x8OEK^=~ zu;Om87f-m15Azi!J&2ifx~N>Bk|CgCnjH8pFF27z5)l%$pK$;67yOa(7vFIh2T}sONkOPdxgGZ%hj&BBjMQSKq=rZs%2;DG?tqF# zQ7UJ){vD(Ek3KTl>w`vS>#An6kbwrAP=AHFdp1hO!DVj>^TEpQvc8NNndPsEL3BHf z1-M*^w|dysO(>jrTn5#zq7{4hQmE;iC%>3WWEhN8!B${U=D0Bw3ifQ^oq%_@g**zMVri3N2N!QGPuTf z*P7Th_LEikfh2gTN%mZvhU8ew;z?{)VMd$YY(oinnDz4nl&iJhaPIO3i1}qS49MHI z9=dhwbCjB-?P#}h2LIxrP;-?NOXnf-x8w0BRrzb02KYFkma4xySh z@o_|s2rigSImnx$--_l>A)pA$zte&ETsa^W<7a{L*q{_3Bk zX4?pQACjmtig7**GM=O@zrXeX!=GAwwFNuV_Y#BX*0lCNzla?1rJ0(~x6@59k7n zRJ=;8z_hK4%b~04T?(|owBH4OfW02J#s>}ew<06emL|Gqx_JX#JzLw7h+0B@5WpQ$ z*2>4>33f_9mlN}UjZ;6lDu^@Mob>BB%-xTvSYBh_a`^(`THinnok;e zWPap^)4)}3(ee~sxa&qr=f9wQC_Z(dW;in)oi}F*?7ir2{OhhU_)mo1B0x1*oL|N3iFy_Wx#D?HDG8 z>wp*TyZor4gM+L?@0y`rPIFRn8v}&x|ik6P?+mrw70OW zKpd-~A=}H_0%cfrYY+44r&=k6|5^e4PXibhDi`vPh~WVh#Ppp*wi8G6yc57q0Fmd- z31|~l0|CvTp!}`5taB-s(l%Umx7#|ox@7f9; zu{j_iAb9Yh5lM)d3~ReiZ_e!%*x}DfJ?hI&!s~y2eXJu)>>YV-Gj$C zH{{bc#`yJ@H5H!VmbWV}=%F1G2f4YB!(REcuqtrEUKy;l#=CHK%C3~kR)r-0l%K6O z;CT4*uny6_b_<@ERqYzhlhCo%aN3;&o^^_5Zpqv3>z$~UiaB;FBs8Oo-z$7EwGVl8 z68EKg{#cnIbjg0<&PBF=e0)>s$QYx15n`t)^YO88&a##1Zi{1{(6GzgkNES%=b zQv@&d%KuuMI}P?>{N15EbF6zd;AgNTB6Mo3N!;a9%v2mo+DAtsA$+=p-LZ=j8s=Ky z04GX~>8arjIKjFDIjn8wA-tmsMkoI-*=Po?62J8uHa?#EL74(?DVOl-j_ika+&R3M ze*1lc9Q5Q|VmStis?WBMH;TA)t8e~PO6-lIzGx7?y$w=}X+wK7JpJViY|%`N;?&)L zeA+{_PLxT#xf2Y|;aS+yDM$dcD=RLLv$r4Rd>SvI9DeJ*cz?Z1qHld5fa&UJ=5#y3 zC^=u-vJtRr_ulPwY_Q>vHZ{0^g->fFzq?;>@S)X4DYXV9B5WXKaDe4XOi+!hj%A)l zn~Hj4PB2H&Mwt!s8c$i=g{QXMWV>5)5f84I!oSp!hiR5=FRc)F+tN&M%2zuYvkd+? zpzIXVbAAjNjgGdbZ07Lyo9eC_)04)eQf#;va;1O`F1V<6usaQTk4Lh&fWfYJVJgu^ z4(M24IV3(T4l-dGU%x5^eqXPOW+Uvv&t^M&IxH$XFM73jVdEW7zzr4lY_5^xYT1^6 zIVoy~rw6*P!C6?1e25mixL)i>?A(NpG(aphUdma(^4`RLucB@C!mr$zxVZD7R`M*} z>Y-*~&r|WtBTH0H$9unjfAXhiOT+VwYl%S_Q)N3NJ7!y#GcgC9=59mzv z5a}M8SVnYKnO}|*SUF5n|w=Ueb@~e5z9m| z*m(V=kt21GG;h)2W5?(`$Z)uYXn!?bJ7IC-q?pMyTWjtuK-d6AvFWv*tl6G^Z3-2A z>n9n_Ph-uc$A7zUe#H4lfE({Szv$Z%mkoy*qXc@KW5l}*i_$&*q->)}NPF$ZkKt8j z53g@IE9i4p$SHD=*Y*C{9;oE|>nmx;fri6zvC~nJh6$1=;l(6p^qe@I0R!Y8;gjJV zvSUms#qQ4&Oa@G93!m8rQ`(Rk?ArY+^Nwd+0Psc!J$$?H@@cF+=kYe?PYd%w>se>8?}RVWdEMg3DWv=!FRf@|l%e>*#E7hH zgV7eQh?nT%*W~?*wcYN~UCh0TDC5^l6__e2cvn{mMkB>zGo&XxmiY)a=Tn& zqoiUPup43VXS3^cq$#wAOk%8EBZkmBQ5@86q!oQj+~i46lBWziKsIyf94QtRps4=S z$o^&2w^q}$Lwb{rT;Eda#ogFt@VP^8ngU(lBQO>sdm8f~ZyNCcI9f8H|j>?}j5EPI{*3Vy4h zyI02yTn;+?CenVdeqQgXGR}k(jGs^1l)vAEb@aSRS+{&)#S2M&)DHV35=YF6_B5^a zUGu3xP9b@l66JU0ySu?4SIx=H%R_wMkUzHrtgIrz2XCfVnL?8Rm!9<1%lQ~X$HRL)Dbf$8K3uX(g_Qd8;9q+EEDStM?Kc>DZW-Az&ilFnV zQ94)JlJFxZ+<6Y8svgA3dI_c|6#H}w-wKI%pFU7KJ+Qdhuwh8jALdDU|MtS~1Xy+5 zdYIXno;_Jm#ORM-i;#OV@{Ns@z{?VR3WR$S@x3R6j7zeXY_`DCs07tDklSv)tbQ2D zYIFW(R;Gx1YNamXz@Z0jKif?_B$Z6YKU@Y!;dTlwT6?YX;a;%msrhMkRdIHbRR84< z+|V6s>n4i3VYTF3gF6DhyXtMf`eLmNCaZLr-Zex$?ekg=Gt+5)wq!BzcIl33wy!^K z&Gi%w(7Al)B#w>QZmdj<5O}Gj>=NlM+@<7l6AM?hq=1dhA|VIXxKEiMx#(H@WF#Z{ zRouV|3+nU6yqmXoG`_H;r_Nhc_tm!Jj|h&063*4NXA$pW0Vs+#EANw!C7KLld=}M>^&`u+1 zyt^;<>BpZ>@3mp$ns2Wr#MGmB@3Pv6WI;0c)xfyD$rk7zo7>e#je)~`7D(d{H($hn z47rh0$wE7$<+*kxLIcyjJy@@=RwdrqTy%J`PjtTBg)RHWmV%qukg$B>3B%6*fpk3z zF%adG3v8r}cjAC0z}v08Mf`A;kQq+DOm8y4K)$%by?KoL7r753;{b_id5JpP#udaA z5C3He;F%FMgXs9Y>0h4T>4Wb21g7S^nn2R{-p>bWOD4TvHNk@5VAu~el)9CuMH084 z5Z=c7S>4tC)>ZhGP|oHMKJVbjgMK3+s54G_(JKD{!tJ-a=wTmjziz@?6{+yj_!@|~#?(UM^w8Ylv8ylb{Cu%){IK@?bg zJ28vbk^Roxio{TMc8gBP)vee^^kLZel!=d*G8zeb**oTDQ#b~~Qzuw^{ts{Pw zo%_zahg*6$(|TD@&PDkh_WmekiTF7PsD_-kiKyfwz!VYnv@H8}pfcWG`4#$X{OUsb z!5>py3AVAsVsRg(mbNm;@f;Gq*70C%x>EIlSQ{OL#-asvu;&Wfn^x@yTBzkP_y;6u ztTzn=<3;Ew>mJ~r_cQ>R=}j+5p)|kUc5@~dIyG{BtZAla{ONGg(3k5}b`cG4*G8UH ze4O^dT}J5-?)|0YPA~Vv8R>LIOD&q z%T63bZ`ovlF0;3DRQMoW%9`54z__NU-~FZ{J^7VolGF<00I<9xzk9Bz+OEM|rS58=-{=|e*e^QR5c8d%D&~Hrg zb5z*bhM#ymdKde<2U3io*CYA||0`re7GOqFsuMt@Bd@8?%+@VziLf$)pX#X^y`b{T z8+n!XkUQI8KXlrs<+fXZiO9FTG}kjDH={+wiEf%a^yWsr*wL?rlW^tH%%f9CrSaHy zJ{nhdGPU^fq6OAHJ8y_Ji=j;R=Yp!7@;;1I`G0Fa-)jn31ZEeO+N^qGr20KrB#gIX&L799Pp1+4ri}_|ftu+HHU7hWPRrMU@dq zntOFUmQ{udC5>`>ZdPUQAIgW!Aowa-@aQ4A3r8VG5Ur;DTO@yf?y=Uj`|v`C-GlK1%Z5Lcu6Wm63Z9ZeJE}G%pvDocw{r-^x@W~m)`+lE{m(Zc9 zVB`b?F41tfrc6(G_?Wd3G~gR^o4`CKCq?e;0YwfPJc#ZAfWpv@KyE(t zvXJ}jt>17hmrzbYQaQy&cXCB{BkJ9OUAJDy5)U8t``Y$1k1(u1@-7VEV6_8pB75<1 zlt^>JJ@kOpsozR(%4&FQ!S*pMqU*L%uI*W-zCpj&xzTu^;$yQ8E@ePGAUtL4X}Mx< z&OD?Qlj#gv)G1x)-<#@HfgnDM1+u@`H|W;s-GZtkr|k^WfQiM80i z3f6G?)E|Wy@UAcC@dKPLpylE5Nbj=R35;3Bd)Ld4_OjAD0PRqBe%QC+@0c<{YvlQx z<#WrutJ(?q>9f=0hV}{dd+Pn0z4d|qUB&vpOC8l7Rr9N8VY!+!sq(gE(YBjy1-;zr z#xIQoB@~}%*0CUH>cb1QnG(diA!3}tpbhf)$#2JB?OOPMlt8;hIcpH4K~I_1>`BeW z?_2SDOhSz%zwBZb_~p+Mpwa#-j_lJX7}NcSo{bVse{B{?Lw`kTu18|Cp7gjnVK+gs zhg~{BE%nmhF_%OGqMAV#UjG+Z3hVsgV4d|~Pw`x8ezb=0lUOXy!wEO>PYQq0qcMxU zVDbQ^QcKso^$@{E4!X$PnTI9q|G})3&}Ih0=lln2Q_XQsF5xLhJBc1mjwPoRTz|4QCCS zaptc3$m8vX1B0DAX9ReA|I4^OXCaQdItZGP+|~Pe@w^3W={V(-eekl>M3>RLJb#Q3 zUt$PBH-nV%DgIRyGqH@`yjzefYhS?+7K3m?QCHhn39=Ix7i_yf0C9 z;~%>^77j={xs{VzdFsm79IC>1yjE6JVC!h5hWjQT9{pYIlW~kN3(sp|CqiB7Lqf3Q z&V-|x>-e`Q3I8F@1>*oZ@Ao}mC-Zm+3+Dr_lOXGv6qX3z@!$WnJp8_Xawr-;zFKv? zklrnPM}(J>cE!((9?}TUM<4T4EgbTb0x% z-LBz3sTu-lUqRP@%Q|Yv6Y^$>*7y<{S1e|fd+6HC@zTXUWD7MQkhg;i1Rd)Jq_=(2 z=)Hh7j+j(o8;AQ=zd$h>+hvQTe269qZ;Sv-WiGen&Svi6>~qkymw9ccc0t%~&2UYv z2%O>8w2qsaATw<@$rIAQ>JeYv*7c;je`3E#Q6G95c_bkSwli7Q1QAD3H9%lhx!twZ z5tRDvkJL=^APr%^6ONlTmCH5d_iwgzFiL>TvAX24BN^M*A< z(3Oh7U8lwU2b2bV{(pqPUq!+?(J(O5y+MNK$^SBvmsq z=%sNr86=~#Ftn%*x-pq-o|gF*lWIs1i0w0%jXqsq^QRLzxmE&AQb=q zaRKt`?kEU-c$0_MK~$5$Z(+!fbx?UvMLuqbq&YVJZ;r}#ASM+bgRqS=@A?`b)AU~_ z$+5X%261mwe%oc~+-dHe3urcNbV`nfLyDC2TK6 zEXf=#G;gI}~=Vgm*vvUg`$PQ?#+k1}2O?_C8*6eZw`I!hpLA zw;O>!IzGwJQ7PhaRyT+ETL!J$cv8}oe2->yR#w*QdiyFPaW>@|8I9KHB<_*_j<mPNih#?3OZ zr-Y=a_2U3{y=zI`y}<77g=-^B=uQB*&F`Q?omUw&89bsjt?%E zA(o&2!9KmOhwlWA%1^W%qWMH^(D34)Q;0#PfpFwiAmZ4XGVr3lX_V~OsoqG`4&R$xh+FGRy{aO5VrL5MVdL;^4cSA489eY}Ir*+nFi%~f-xRD8K+%EZL!VS! zkxya_o0)P+{#DTx>3dr!BMmDRS8cE=t@6;EzV>{zAfzaNHT4A9^DDhc838jaQ#D1C zIwecPIl1Q`9)mV^g0@~>#3^Q?rvW))<6m%wC;r30`WKv1C>EwUj56Kw2fkx;Txmv5 zYpQ?8=(pj~r0>LZdJxpsXyL6K@!T!|nV+Vr?-Ll7#MOTKq^5hI*Lr^Kc8l@A&f z4TZ24)#xoE`DJ<<`aGPyuY%PA@gDRoEUQh*?wWYtjtT{(GH~PHB5KvHhfpho(znZ!#vr z7$b1}33iufjp*T)#jXz|2+XZd*)d`|orml!`UtvwN&|KxiK&(qHmUs51QG>3`O7cH zTOY9)^WTcsg zFsE(Fmw8e?z;JeON-Z;VVH!>M(}~13o6HVb4NTlu-)Q5p0x1fK=dWa?&Il86aK{Spp2II>1bp4ZB`Lr_b3LY2dR^TaM&>Za{ zU)F0Wbu)yvKYS~Gm-TX4RTvxF4g}ZY(wfvaE4WUs!vkIFxc1>_md;%uTN98$(T-&CG|Ea#9bHgTQY%#30e_k5u^S`}I4 z#NI9zWB~c$rgtk3XH`W!zw;~Y?dR9e3MvO~7^yarj-^e7tQl)IJvIGV&+YD^$1g19 z`(wfdvfJdd!t>bC^of!~6g1|iwt0CGeQ~=Q?37W*BA|leRcN`JC$_F6@m&^L3AQDL zbg(*s{3Xz|DtvK)>QO4AzJlzL3Umj1ZLD$`5L$6Pi}3F{{D%FzUY(LGL)J)zQGINV znhx%^Sa+$l;>DBlz2!7!dkt&F6^^n+OF`>t@8fCtYr>l**9W+L?fY+t;x5IF*;r?` zS_YFz-tD@~ToFF()=Q<^2p;=(yYS4x~V+YZTkmvjS|WQO?X)kh9JP@CHdk+Y8a<4?Qd^R zV#ykGwJL*t0ZWQlrsHtXU@AFY&$iIX90k3v@15a!v+*c6-4M=hMgbOV@JFzLDVkqq zCcs&T0g5vq$d{lQ^M~RVT?1j7nB9s%iccK|S7?tnUgki3_qcq%An{mB^B-dIU7=Za!p7pBZ!+#$N?nJ}d%t1Vwb&zj{> z9>&sSe99Hz?57jqF*o{I6Y}jJgE1k@iZL{*$Pppx=C#$uwSVwubTbhvRNyxm*&=JW znSJDltrPzg$~WF`1aF4FL>CZ-Sdz@X`%ffPR2R2)qc<6#=|i|4+HkhJEBw)tv&6g7 z6SVa!N~U=ixcr}2f|-Bpsi)7CZu;MqZy~^X#xr6gA_FG6DUiy zHz~XfBQ9u7d5J9<-LdDV-`+L*#`H$I8d@6S-5XL|`92coZbTL!HBuUd`A{VyuPn(hL{(HLb1`B5(}Uqd-3$%abf3V0W) zEcEf33uB;s>mq>Ux>Ri{emw|(QqRb5ix73mDA8DHFei)T>kN^-V-9}RQ=%0}Of{;{ zkACZ9O}(d#RDzBnF>@kb(J-qvA8vhVWaNW`ClDy;uTb@0=$HG0==oKjtFhX-o8ATr zaW`SzDRq zb(?!AG%U-Zg6Tc8?YtWh=2XoW2kSsYO>1Eto3S&l=O?rtaBOB@H@-gyDERF_1Y!D? zGePT2WNJygBJw>=$q870wN`-FRp-ikBKh~{kzS&zbtMm4#f;OPAPlgTg&dh4{qP01$`BhJE zT#dIZ=9fo2#rdJa+nLYV@1>Rp8oC@$jFYbV>|5&Dz0YxCH*6K`njxQ{+7hcBkppN? zi(n7Ff9(k(B&9svWqnuyPZ0dMabo0$J(pq9y_i;>(s~hpj;G+YCEqyKF5D51P>ej zIg$oIGXFs~jFO~bcl4g?+8CGOZZ&A-mo=@XorO8*)FOa~>sfbItlIT`-XLw9>fF^# z4WA@-n^x0Q(Qa7T=nM26DOJ~3PHcMf#rYiw)&=LMEgB=e_Ccf!DjGdm`hVF1DDwr! zHdp4$bsDG{#5~dBz8OqLY(zGo1Ti&5xb^giRRh4qxX%7)`421N(n28&;vQs} zrq^KaKM)8C-}t(Q|3i-fWo`=*^)o?eKpynYq3kE|i#ktIf|K-_CALWG&*wY9>}q00 zVJ676Bo7s_ks<+;Gn~N~yZSu!z7Gn_gT>~<3;pq1LY-iI-=(0T&$CR!t6xt8e|W{K z1(Lq~yHun9>7SAJfU?7HYPO5#GB4kATrVF>J@~^;k~M8!xa0pqN_{(j4$tKnGa>=Y zZ)@~PyvTU*%|x0gpAKdx;D+21$(*`T74`|gas$8Ns;RumX4C5xT+sFQCzBEcnr~~o zpGy5vkDr_U3f7nt{>}D12o8*g`6KfF+XW~e_z*+q93%T9wO`o&GGVm-ko#jL@yCv$ zLPg>Ff3mLM8oFTkokP)RKrW@tGKNGq{?Ug`z9Xw^TI-MM@Iuy%ontR-eWON%?=POZ zRjL&e5MzoOv`n{1rXM8wDk%#ZoS|0=f`%%2w0ng6UGX{&=Y3tckuKdu;Sl&T(0cr@ zvkP>w6>CNny8mMQl#A0h3ga}v0#w_xdJ}>lq@vc*>cDEctP8|Dl9yu#rordQBmMWoh$gw%u<86 zzs3#< zM-QwN3q?;}+Eso4+IR2eb!vhEdlB@|-6FJYV`NWiIxlVC>arlV%K9&i7N9KO78%I<*85}`KCxN2h@U&&6n z%^~`~DKrjCex88H39b8I3ZWg!Om|y6<#y2Gy7V3Qcpy<-2JrXXW zLHcyGAXJak`>3sDj)gpdk%!z`Dz56_JvydacoRORs#H{Y3#oTEoNb`Qmk-;Ob-FXk z|0L``${tD3J&U5hV9!})bkOwuP@?wm+6Orcx%(GYt z-g4j)i@3~Y(1O`V3Yjqsb?3ZLtoD*5kpX*P@OdZm@Ck3}B6wqLXK2)wb3 zF{ZQUXRFwv)o_nUgba8vyPputmU+JZ>~m4|uD9Z*0n^d`sa6->?G1E(^}rfvK^Ku3 zerK2&*BDA&GSTDJT3B7Jd&8c)W(#^swCRcXG26?V0YP_k%vX$E-h~gp$`!Y$Uxd8d zd-zh4G9%^`)7k*zR z3u+SlSo3g1;-)8{E-mlhQQ?dB(-`A#H<{*6_-!uQBtizT7Hl28TXpvBlCAkz62Fab zzI16v+v~*X`N|7D2i^aC&ZhU8&S2ZAF(EKJwzb7~@>wp(f$Pw|iBf}ye@4S4R@*;4 z#|kfke@Bz*+hyp7i8aIZLBPJCDq|!PS6PNH`F;+K)f` z8M5Pm$%I;PRJb^7Ya?!{yR~B9)F*D}*uTY=NMZ-oIkArwJoo9scCf$7Z{~3P3eLLBiD?+!imK7W^HG!CIB_4|ycD&1P;)jAB+pYN61Dp@4|QhhmyJt)6p>e03@!xqNzdTHP42h*49+0Xugfk(e#dE@*n)=< z#UlKn5XTR-T_lkn|AK*ZG+f6sAI;2idCSsk@i-M4#OKb&N<1y9pY%RWN&7H|7-yA- z{ICca-=YBo4AavspHGDgz_lx1Zi_Ri3fi# zyphW7PAV(wrb9W7+X*tr6B$}}k?!IYk1exQsWfKBz?eA#5YEP78W%q~M4f?!4BXABsV+JD0~kV;11XggI`icuSxYfuKek)V38Y5-D^ByiHFM`O?zt||E4k;T;Kdn0 zXgrx!c4$74JMw2;bLv~1(q$}33B1XL-dND{%PuHn=0+XMctT&J{nB~zxc#mvCJD3S zBG+i7al=cyDNl=MTIeCTng*U~xb<8YRWgVN{%t-G8|yM*KipsNpoeTSbxHhjN31`>FsHC{odz|54=2T_9Zt# zu}E((L{3IG;M71aTM)_ntR^2bk5+3H{r&Sa*$k3W(GDl-z2g==>22Bn$GnPqVPy5Q zFmV?|mUK_USO&W9>ud*KO4}P(2H+#k3or1Nkzw#{EI)$y^zD*O+8S0jIaADO-_;*e z`%lT|Ch{D!4J-%pbc$|YU7LG78=83h@1LW*f2pA!mocnuHB)U1w|(yXY49=+YRXQv zA{0{pIH>>3a*UoS=VZ1A{FQGX(a_9v&8`>RXt7!xOMOe%+)ZA%l5i(CMq;QiRJmkt zx$n-tS+X;cJM0O}on;?+dV*Sy1IK>LxeZe3b!}E>qVa&+-KSAqh#;7_gFs4k9R+pIc`_
!U46m!%>Rbi0w_UH{S?ovZ8%EeY z1#J5|1V+&u+K}?tAR_X@>=a3r(1m{40d6lOoQf}Vslp$*87pD-A-h*8whdjb|F(F$ zM>^q>-J6L^S@LN?M0}e@N|(@EpItJgrMkh1?2l1AQ@&(*cLDQf+Xkm{i;{43-tomO zLgU2a?BgruE14HrXR>p#f?p0#GykSaq=nQZ{>Ku^j1r@?Sb*Mko6$A1rR-) z%S<+zPzEi|Y*>W>`ro)Y-M1UGINjzcI^N1(T4jl;P9G>v+K@}+O#@$jllo5yo$zwH zOtS%vXR(4?X0>(}j5_&e$5JMpanUE4(^LqU4zm%~wwXrx9HRVNssQ~pq21`wLf8k! zpU%3t1SaTW(=vo5jfec}ioez9shQlj2Jq#oxt%k3Vk(t*52qc(pSkAMn_twvH7_6x zt;?~#UnS#@v}c<+<|>7LOUi6;w*A3$C>iWnsmCF=;BWFJ z3r9gY>=nW2l!~tD;vOL;y>;lv((uKGn}^nd@cc-^I4qkE-CJN}uz%H|4m@+GPH}!c zfb+pL9Q&if#iPcFyp#7x;zqjxs(neh2jWHcR{h!%ZKy=M#2 zQgD@E4o#GIAIlnLHVt7?SN4_sr+r}{iEf*}SViy7bbsyqfvH`_@MbjcO^(eAoxcJS zbnG;ZoPQ#3pQQajz-Jgd_`??3DKhR_FVopiJiaBhcSu(}{2}@kEpTTIMu$^vp-}EJ z+QbdM;5cQx3&sZ9whN2uHhRC&keN*rXPnqIvT7YGShURVsXLaGY9K1F>zvi*mcq&M zhjoP-YQCR5N0Z#{jEufwLn&&*@myL_xJrIgE(w;hv)!3)_j6V)m>A>atgi~s0y*#H zLg}5q)|HmsYG_-v9E}Ftb=WgKK4Poc(FT()Lz+=5ZCWy_LcMLo!@yTrJg)+F@L&NA z`tsEu*lin4>63R-xA-&B=92P>Q8Ns9u+PwX+DCYUT*<1trFjU5r1 z)Wp)2M@M|wL{_KpZhy}<$|=LeHHLs{nYncT-vToSkibk_Q15=u$!0U++a@{MYcH$C z%pKwkrJa~(XpL!JffYPAW99Qa+q4i5GlIU=tvYQ4NgpW*6we(J^1}!w=gy$to?z@I(b0bYv zX`3Me=E}h6QHVrxxFYxBOR4N1^hKe8JT$?Yeevj*j~htR711qwzVffxT$Roznp5vC zQ=0_~-4IUn>K!`9@5!y%itt8C$LJ7x{Mdb1LkeqqcU`AQ z5OhJ*BsD}mcsci;WmI%sEOzzakMPTmwjsvuZDZaJdh*`%&Vd=cS`V4`!7qMoJgF@W z68Q7WNbXS>dmDx1AQ0{yb$jf|0BizmY5M-Y)TiUldBAn%(vLsn5=$aK;qade3kbfd z9@@8mbzHm0H~_2rTjk7YH|J6ziIket&_mYxrLyO&Tc^|3-0P)Pplau-B#A&Dv`U9x zu21fxDJ2luBYP1h%HZPqmN`ts#<4JWh(&nY0!Q5mk9TfRjiyv8U0 z>{fkRXOZM|1mqI;Wv3gudIXTPlI#u*lE^WK&4hfwz}Ed80~Gq@`n6{J?>8g2{#5Ju z7OJZ_pDUwabp#*roi#@gIKPtk}i^I@F7h znHT20e0YT*X#_?Ie)BYVmHkCX`-hfY%$!a`gz+d4$U8{_D{))MI|(|*#uHm3>(2)* zq+|=cAD#K}NS7B8h0T<`p%N^7P2b%P?2s<8f1DIgi!rW9cc>Bl{8x=%JL#t5J!Q^C zBvs?8jOc;zXA>>X@9_GvY7NkF^iK}amlIOLlaSY!OzPjzVdj^6zf5@A?YG3Vthb=A zQNm&;>zoE704=s-_JW^MINr4L<@afig)AJoxdS=D;h-|XQZgB6NcBXl^;U=Ip zyzcnr1v6*dCzV;o<=r#Cp!nA11n1AEJ=krLyYTfX=}hGcYv~8IT8w@d!{!FkO+&BB zbg-}}-tBA>;}4{)5pzge?v6tOw{uD3 zv8fie#X{4jU_h$fCkI#fKr@B>@?%SsoW{Ss9_BM0>0N73$b@?SnRz|VY!A0W*=;%D zk?2+o%e4$kUrpjsf?_=I4_WbicxZmSa;KouYu4!hj|+hMgv*!-BO)n#_?d1|qMjgb z7EYT(K3VPbHh4DUF_#7e>|(PHy}QTcg1e)7GgRbut?5IgW8w>A++XU9sP8Oj4PPq^ zURg>b;|lnPeHQLbb@yj(DtxQ{q+5da-1Rh~d_^r&8m$GHz=(u};`V)PgPLcOwxp8= z;g%K@W2LfRH`(bR1&Cip>mb;~jw%E=d$S34O3w02fmtXjKHi10E$2c(DcR3`t1Ff$ z;PJro_(~Dv0Ey&<;-0^D6y_!Z7cn2`x;p+&U0*wM&d~f@Y9oJjPpvoQ({zgRFDy+; zqLe)k+vwhs&JiblT*A3=T9Z7a&|>MRhiwyQtHAs3@Mt5Ac?Vhy!41SZGbI8vk4q=9QdYH51KI`8SJ;XV+2V@Fv2EO@ z{CEKu%9>x%!WDY31JH9H5IP9Bf9v9r_d7lqZ`jx~HAUkv*+`t+uHEL2dU!T*zA{S8#R0S5I@PyP;M-J8nVp8>TF7sepc=T=&5cTx!70iCV*AB>UoLK@C=#3} z*=FDUyGb+X1zcW5&Sgo?lT+a=5oF%p96ssOaWmXxt&>Dgc$4FejAkrDLhE4VRb=wm zk!>)kLi(9d!Jyp^ri7Mp&66{XnTs!IUI}$n_p-cFBWOPp#8$QrkHP*z;(7~k5OqP6 zC{#I}6H%!p=45E5!{Ei(M~kK=x;3pg*l00imyX@WAaWz!T;&CN%02UlzuB=TZfXyt zrWZ@CRA)rPw^*+>UXEM0LmqDd#0&x74tKJpH`50wQkQ|?iqlI(H?eC6SH9djCn<~y zl^$*Ngk2zafwkQ~Ww)Q7Dkf}_3@`~iY7qTjB1-k$|IV>5a0Cu@S1vfQGg|C6WLP3p zuLEJI%pRZdHd>!_<*2bME0=z1XTN!hqS19r&q-)7@Dzz2UBGI-yTAWa^O#r7A{rCvi?@^uzc3rBwtVK>viusv#;r7+?)2bTQyXy zAc;z%sn;gNx`UAhHksS-HM%G(H6JQ(XmzBIY`8Po8KA069KW|^2cNqAx$uSEl^DR7 z9u^tyT&!|=*Q48FZ!7hi|9q_;)7o((`Ia{ERdq?y%=HJ8iVAyrjy;mmO(f*=CH``S zn_rF&XTmZDZ|yZd>wEhHJYMreLx<7-McG@0<dJP6Yxxo1jK@Zy)u=DU`EiAP;uHto1LKrtu*EuQk zb806>tG=zZ#%1EYt_PO*4u1c!moc=?b;w(IHCjq{L+0EV}v>(Ol$Mk|{lNOU0Llv9t@2yB!Jw`*S;Emu+II&c4e5&}VQ6U5lOsJ&Hx6vH2^=Tjv8M`; zxmWj@t;R2LQXOD#aV>tg>zP8cxq-{YlLSj;mJM&dh#fEku%wU`1oM4A7@x}yl3WvLy zR2X`jGiQT7SEyfBJhj}xE7p=$z{q@ppvkKoU?s1P{@O1zyH5P2yXa{x4*m!Z?sY1T zy#PXHf769NAR`_ZiuBG~6Z1$%D41pSl=X}*SF(eF-CpKMQfSu2bIqWRC#hMOp;?vQ z==lnZA}Mu@z`QcP%4n4^^R+hKS=(K}^_x=OW9gba7%1{w(Pi^DYM(%Lx8S0s4iPsD zS8uXCJhn?Qjb@i)sZ)K4=+1{Me~ZX9`_&+1*_Bx1g>_1Wr$gcUPn?GDI8teJ}Q2)L9_sA3O)9y^Buij6?fl zY+Ri7tUw>108t%=YWe}Or+kI-$(ta&Yq#~eBs6c+%_D0x=s6$oO&&aml#8ThPdDB_oj+{MW^AmG33Mk|Yc84CTzMo7B-4k;6)>Sy3 zCG|GOGek_d9p#?AmKHrrMv^vr6V_@V@3Q)GWg?1mc5|_C2`b+eBVp*b5HMh&BhO6V#_bw*c=&SgP~-gcA(D=(oU%$}Y58_c21k>ddWG zuweE@f{3UAUZQE%$M7jbwP4wux)Wc|TP0SQq!Ycdb)D_9meH3bf{2Z!w$_egrZwGk z*>|$6tyNy*Qs1{^pNq}udr^#5j~?ty4;Ne`7W3}Uqn3Z=JT-|l3i~k2u*{!i$2Mu& zQgpxcm^PSz)$o13P@%<%=4Fd~vl>l~7PLh_QLjga3pDh$r>-!133VoTONM`xyFd1; z#I0*{I_j2FX1(OUItzAnHf5=haotVl)rJB zT>9jkIkMrcF{Sh{e8Kx(u+`H8JrSBo*;{|b&4f=UBmQgA*K2Ke_E3{`OyTG-L8VCy zX^;~;I?wOABC>~mi#dB^LO3O)Miablgm!t)i`A8|qX#M6->eO$)>YjL-E{l;I1tO~ z5{oHea4N!0zVBD<2)h-YH{)`1;G)U6f(_?7B;yzvZ+lO?(_kALPa6&?_MWw6spn-5 z-7uGAfcY0RznRc&zp(}xVPLN5w)=hcphn=0g&=k5^_iAowS}0@dP7c}FVMri1#ujHgrecKi6CPG~Y_3xWG&leLfKM^aLV%|c;tfad;NaJV z18YV;_jrarS`<#%AMJTAv4_=TDR-s_4w%@Bb>p1sd!hWGGZLKpD(%sb&`ZY`QvW=@ zW=sv-_Hz*l6~3HwHoekZSoG#@y@S zeo%*c$5i9#u9*#IcP1r8sS7AyGVXat6@m@s=xMi6&tO>@!;qk>w@l(S7mg5ypkR+}Fg2P@BQd$Bo^p358x z&#EAkS-XL~ZX~?i21-QV%enVSWn$%ZU2QW=@bqWc;azt2aJXIhx&%JJa zzmA!QE7$NQ+muGNnRaz|=TYm|R6KY2TD=o@Ys+gcjiO(0r+9Rdb*Ic4m+mDmC4$R< zafpo}JdpcLQKNgAOO?!G4VFRFv-zE3?Z?X0BE=ZSweTL1XZ9Sz>}>bjms{i{*7n#f z?NS=?ecY@mn;dJkg?727%@sm7ilk80f+#RQsOSI%(FbVbNKH}KTFja>5C3_0YFU%S z*Y_7ySK^M)W@_>MOcFxZfOUK{Dix(dj|k$btc26^Qhh_zmlY$@p0lIAsquS8>(nT& z#SxY^_saM8HnHBUz2A=vSxMI}oun-DylCSYGwVWCK2zBg&k^EurM=xM##nfULa~I& zN_(SHsZEeD{;md>C_yBVf7*gQ`QCToYK@MQZ|)Q79mrUzpD#10Sz5clnYunix~*K1 z!Hl2K?w)A|c|a(>R3t?5qKEUPcS}Cf(;f>Ne9_Mhe$q%*q08^)RMa%ed5Cjr zHNx`mXVD-{7b;C+^=3&E_W);b>sRaN;YnV;*3p}brK8|wiEP}g{Dgxz;1bjR)f#VC zb~b;&m3W{#X22o&cI{EgS<& z8W1%4V{!%{vgzU5v1yV&tn3gw;y3!IE~N_AKiiS*Vft_eE)AGic)8~BWpH|ovE|e_ zV6IgyMSMxr1>vjoG|gde{1*Begp8#&l;|W)ym6L$p50EXo3z)~^Nr=zb)(Nk%_@D6 z-f7y%o-xVQ+VE%CD7B|Oble)1h{JOhMwiUmx;24-3xvy2chOSaSk+=!-B^)S9oo3n zj}p~IGniVdtyB>DFb=9jhx4!cH<_*J@G7um+?9CKs@&tIEj0EoaWAiEFnB*2WuBDR zpUIwTlA^6X%Q7u?u3BBE@Lro4^Q3n(Y5FypOrck7j)6xE5d}Xi_SAIejqz@U)Qn0g zL$GAH&}^L=c{FiOc3OlxBy;f&(FCn7JvEnH3TE`~WSiH&oI{^ut+7AVWn_5DYe@=q zxZcR?W3RN0?Yede5J}#HIqa|RRaqWl_j8=PW0dAk7sb8o?cY5fm)AoSSmvw$rsKe?x7Qkgg2oWM&Q0 zS|n6yq$YpxB*C+Zb4Cb|6yEnHa=U9^OJ8_GEMaiy-jcN%lt_!-Rtbwz{=~IdIqrAY z&Tl&8e9;ajL0SBhvc39Iuzblo1X<=VQAsKLY*O>mLX`{=*EoCX70PLw+@K{v{v~1G zvqVPy0{$f1lb}U@m<0#3D1IoQXZq~yhUS3c1h@rX>_TYhhXvrlLgORiT8(lo){2K(YuSG4Ahr7syS7t zJp0$RZw?igR6do2Tp;Q2jhoR)>Pqa-%4*hUiLS1P(;BT`K05Jq{U8P&oqp!_%++lz zp5xO&Od1^}y8XjS`@q<$?m42}DXPBeRQF^zz?s;e#mMq`f<;=I-O}sY3>Vjh&*l`~ z@b9YEWN);Ay@D<|i(=lHOsVujvBy1O@!)d_^e`Re%NG;|Z!44Kl^9AGGET=GjXIz$ zy2c36YsfN}d=a!IRIV-O-sM#Yb4-J1Ygmy>BLsBg=L8KmdDHE}QtnqyDmXW#y=j<& zv=B>!L_0&q&jnfHM3CLWWa)?{yh_GSnj_sx1-8zvv?*L`i6;e=hmn(i+$Fq5%l>TKypS`L`wj1=m6ZPpz z4Ga0A(vx>`LBw^w@}2jT^Nq13me%*&ioA+G17)TkSN6Qguxid1XCvd}HOXzd$inS% z8i%q2L;j^}bxDnz<|BM%xq9C^(f3FLbIZ$i&rA{>*nYYp(~UDL(CeXm>A9oy}St9 z52fcWySeGz^|a-ZQym2UgW{Mws5S{tb#L%No#Srz*s{Y7MHaR!pImjc5?JZ2z&kme z?lE}2IeYcu4s!?1FuzE7-%f;%c&(7GACGuGKFJVTXZ2?gyisd4#NKxt^xY&SGW9~w zpw^gcc_ZJ?>pa2NUL5!jKW=!#K={;p#jVzb%yR8rUTp2ISk);w3qeIQMhHCjV7?vg zjj3+zwl7yYeto|N-j=G(0R2)LziU)EGu5}bwLvI1Jo2#469}&&)ukXWPD7Sp@^)|Z z@a_hU*NQ#%Cy6_n;F&J>SM6xk$@AFfWZAc&gV}1THhf4`))wdFr3d@9Kpd`FQ|7n$ zgvSj7lpKDC;*ng7xPk5M{mSNfowJm-?tM?LU{FPMJL@x9D+AN>Qk#uw7UP)Zg&gS>H}>9kXRHbBIz7>{{!oLfs$)=6>9Nkj3znPJ2$ z`x}^SUW>Gj#Z&urs<%I~|DhCWLSpzdo)-?6CQ?-|v>$#gCw2VeQ)@YS?ulpAbIf@Y z^a26X`sO72!quJOH?hvoN{?$IqgIBu=zFbcLgRSzmm|S6h3w;psp%%Nl*_VR909tu zxH@~hJ_6FSTPAtAs~1mHu< zH8xB(r(x0O{)FP>f5=j8*XA3rADOp-KH$+E9L#PMFZ;CoI zJ9{}S4l}1w!W;ygFJwVP=;Tmw%KfAJKP-`e(;_aD8~CDvX0s3)#(n&S+P{l^9H9EI z36O*aVE=uGSoN<>z}JK-A1wktLcOeU$tcwkBGsg(nzW+8b1pqmmq-)Ad8 zYAvQkFa;3;s8S_aVCyX&iIK&bjzVprixy)?Px|_j7D>d3M_sXYVH5@_;^8PE{irO$ zWEm~=@K2XRfsStu8Sgt!lVCtDYbXCGVc_W1B{b^?UY!XQXsHiZ^Ob`pZSP46_Bd}F zx;sIle@J$@^P*-@UUgFVbbukeRF(>|J3%;&b)w#?MKbYS_%BCvL|q$0KRjuwLD4rJ{Zp0iK(UsHA#y_F(}-R&2A@0a9C|i|C9fC6+e7tFlflW!HW$la@;&Y^is`cfB~+z%Ur z^qasGF8xB|Zw*aIZ%W$FUa%1>_FE0HWzapMf6ZEDNXUSwn1SZPI=gf+_LyB4F}vON z=L+m{N?;m!u}rom?1U8F!a;fVjOAd_c#AEv@GPf5MZ-MPygn2ygK92(&Fr!;Hq3vf zQzqD}gD0Tp1V-|mO*U%-CujVuB~>hSGNirA;Xf-2ph|@4=v9$|Rv-Kozq2nV#2`xP zHxVgI&^#&T_5&SuClPe9G}#R$9yGg!>#{q6i9b5ZfRzjM`qNN}|4~^Xubg9TdM7(i z=$IKe&hO3wC_N1Y#z=IgG;^%~ta$UYx`F=$2dR*9=4+q0?z#aldspsFP+tLqz5D-4P zSo6)%U>;1Zm5(%#I`>y8ltz;~jj%xRGE22KTkcg-BEIyFBH#Uj4lvkE;-SA(K3q#c z;vU-;a*3rmLTZI&s_kOf-~;qt0jEu)t#7C}0TlT5o@Pn&PPwsb_80Y-z7`m| zMg|`TVVoKL`DgWaya%!F3>=bSgQ=MkIi;f1A10m@9#Y4*q8ZbTZH#5VQ(PB+fmf01 zY4vJt-rh=nK+mvldnUkDB}<-PN1C$@fh=8x1a!3Y$6pq7Oz77BsKsaB`L9&m=X)x( z?5I6k{h$p6t{QCe6}A2hU(fwk%St&k8B=~Gis=OX{pUYTR|)49{q_v%6sX;*jFGeL zSWA+^!K5W8{btt3T13$pSS@#=eYRfiWvv8#QV0gWT&b_Ao9`0j z-mh-fVBoPb#8U6TsZeZb(m==vP!XW)=k@q2d|qXRS&^lK_@`Z?+z=cYXQ!Q|VjL-P zA;!Rd-p20%1-$ROJb|rOND0eqN`I8LLfD|X0inCQ4btHCV6#T;!+Jcr4G{_H_`o;t zf2)EvGDJHowCggoX>&7#ohBa)+#VX0p-}d3wy77~BZ{ufTw zxc$KV@x;aBDR9^~Ao4V!!^(6sGX=)ZU^BwHt*t^KDWH~QcO4?P)F{&l$ew6?34(1o zkR@>UnigPcwkXWC^4fYr_*_CoZZ;<=Z6k2Mt02U1;*avo0=!Gw8jr|57qRU+{AU@w z8_yi^FA453`GuCgnigc{-xrX(riW->9*8Bb$;l^MZmjZ$}g3MgVj!LnR3@_R-9`6F32q*LprI!$85 zYi^hx8jjn!rfz8z^i9&YC6J#jFU+9rfPn}25Ij5J! z(mUlYUTznGA(i+;F3)TAliGs5VZcqoG!&P=;`hIC9gRM?C;7F)3SWrI{{0a&d0^mc zeWJGQ>P%*;)0&tfq-Ib~s?-0^pEb4$m#RXu$p@S|T;H~Zk}nGOKjyoQsc>ULlIJoU zV1p_R|N5E~{68$fb_>a6x4dI5wkN`BrYb@Ke9+aV70h zX=M5;JB6gh*o-iWKSo0o>6~CRCwCXL|4_UZg7{z(4Cl9Ao5AAj>M6Ux)+*;`$-5cN zSMS2#q~rmRE}tyQNeMaXct;bF`uC24PG`}N9T=2Fb7Z+1Vuj#mkkn$syYy^dM%Qx! zJkWw8p$6u@6PissRg&DNQ@0`N^T*?%s(${MSW^nx5Ry43!dRV0l(P~NhgaO8NT$i8 z9Rpjn<`q(f#Zvb%w4qj6sb1NvPzJaoGUwf2Rll*+qiiou6>E-v(0jNGJ7;I4N~TUy zNVn9vd^mKn4dkg6F>{h0e9yieh z@0P5W7I+^nC9ZiH5T#QOl308l;Uyb(C9Fc6jrSq^67>-aYEi7BjMiPyIz6vcMJi~o zISibf@@m(Nqz&UXX|GvaG?hJ`Duc==wm`w0tesc1xumQ-IVzeA3qG7ZWe1Xv=-(~bf78|GLL3BwkIh0uhST!wu(xGvVh&if zX41bFFn>gjZW}l|GGM45PJu}C9k7roL=nx{vEl=#{%G455c0gJHmhT``TnpJrYSMt z^tcEvz|;gt_VzO2xE7Qni<>CHeS$%V!UKM2Rh$nEHQ~~#zn#`vUOiNGVV-AY%C>09 zeJpbcqRNi)o9GT90rJgFq!{gj`~vAl+-+*z`l{-J(~7VVp6!8EljZ3^R^kl#Nfl)_R)I@n%-tJeQ13?#5IhXVv;YZI012ih1X3&HdAKt8jHHl{%UBi$~9+9?8(IeD_b<4rhe%r+!098lZS34N9t*KjtL?4Ihkf&EA1|0T%* zDm_4Fus@@1^wbv&+88>$tS#3^IjMOYM;-iwx8Hj?7d%=m#zpcdbiEeA6nMi8n*+ah<0Fq$I0id z3u8RnjdYrR)Sd;emVBnajb4?vOPz|&+A-2ut95pYL=90<7|&z4itHE9vSParmacbG zQSXj1J;qeo zKo(T(;}*xUZJqag2|SAS+%W7=y!&Q9Wk%^sFHaX%^jEUxSN^#%k6>x#?1TPMV;Su4 zF2=^ZgZ}>R0<-#XfVx#%k)N21_-8$v1cY7Nsj=O932_9A4MchXpJNiKL88k}|}BfsAws^W43Pv=W5Z2PfFV-;F}r9^<3Lv<=LkIkkoj7p_1R+e38p|HR@Our!pAyw^6*Hw%F4_oXTY?iC%@J0RW4a!(aLmDlW*P03yN16M82!Gen*L8i(V4!nor(KS%6d%0y1 zCyQ#~;J-aAskl!0sOm5Gn8JNK7S^mc9`;<&*{m3ZrSund1veg8(T?8x8TEFK-MEWD#_J-`u$?4;hg6jFY(8t`WEc~G&fGk~pe zMM;3wDt3*xJnlot#_71i%>$c(aH@}edcgI-H-h67=%_5v&&>`F*p+?L91XGU2ja2M zK@)54=~r=-8?|YWyfTlnAj^ezL-uM72pjAq%N=$B39o+|VFEx4K~S3qS*h|O?>>De zB4p)MtC9&?zvXsFr~WlpBCvVC3I!JRuGUGKr8zEfxaS#G{5#+Aiv^mO;IHe=?G|-b zt124mfS*yFT|Lw_I68`U=_Zf_8<>?HP---&_1AoR3zua0paqZK2k$w^x{48cNa(Gx2@)}gF20z3b57T8jM-CfGZ!~QEj`1nv<4o-lW?FIu-n% zv~12cwkHo}KYKhcFM!+ID&DakOX5_12u~m^dh9{E*a?e=;8Bqh)3-;SStcBv5Kj`I*Kfk>y zOnM!U41Z4t*W>Etqy1}$lEC8c7XBfd#_67UOY~OCcf68M<5u(ct&7z7zSizrRdCAn z-M|5(C=eS&9}aYT9-W8zCwlE2EXj@V;0Wl5K`QNEh;LFJKttKULE1wLZ*|eG7tU=+ zUdCM1oo88xNQ6xx@fI4!uUv9CzeN@ z6wVS(X`iJQ^vfD)4ogEU?Jmqd#-24-AYA%gV|5qE5r}q66Mb)me?j8i}4NZN%%&wZr;be!bL|z|k z8N8pxR-hn8*SPzm7VA4U{#z+e0@iG(kB=XJOG75O%Kz ztTibwX?5XFo&ELYW6QjgV;5J;HT~~ks!k>gfYH{FI6M`|s^1ZoJ`THSec?xFo!V2o z^D_A~Lgr(1W3GPkQ3gx>mp?;bfl+)1J9QIp%(7@6S>O3XrTSbZI#3hz?Xx zo1fH1!8|X^t`$@@z~32uPWf0_+{MaFc0`j^GUF-w<8%T}u7oV!dnmu#LA;8qnXey7 zZdYe6k-{dfcSSIL*@25@f*{^ExWA%Oz0mO;+{{W^HMka7G*`-T|p! zNJv`@A(~c022EbnOMP9uJ^0VH2SlI#^{S>W{-fkWvSWK)TTe8g^j8bsoAHtBA<<8d zZ&m&c+Md_TNy5pZt`r!Sb4vDF-&1^&uBr_L&iGNWY^^FhrI^zvIgAKfGkL{MT=bu* zDv-tj5)_8z(wc7JjV!v$3BEjx|4hPfg4lR0Bu^PAT44O;{GU4G|L0ge=C;% z%&i->H!^(|cDs~0V7!op^J#tI;(f2TgBl8oth_;l^J55r1~fN=dQ9#KeNveGxJ%-_1D4<#r1q$3F`=F!7egj(&jTv>nT zG^pmL3kpk0;u2ASE#21sesHlDgwn0-;qaGRgrk?}E!A6`iO3oPhV)B5-}T9|dSvSz zOZwU(&aTCnV^hT{R6huTSrrm%kXoMlU#w2wFX)dr)sB7UQK#MtWa=F&_Nd=xoN)RV zivhB>-1xr0Vboji0txa~LydWor>U7IotE$K8)G<9aVR0-3?BPnwsxKprEdU)s=UUY z?87J~XGVF>pF-Gho}w6A0}n^X$ZeFfbnjeO9zM0fq_}oNA^OV@$ftrA&)aI=JRqt**L_WBiFO!fu$S*qr+oE9%`WiXIvjz4sT?D- zMdZ(y=mj1tA@#;Mac}M5!#}G=+UY{#aAq$2F_eXDmQLLRgLPL!IlW0?`bF(52jFBJ zkAs~@NR3Bzd`aHy(74fiGo>M*&e$Slma9|hess9Jo}a#8i87F#!Zpt%7ZocbmdYzS z0k=VzwRDAAbpq#)s`yiz24@u+3U6HTbH*(qoVmv9UYrnor1wb~ye9Fa;C%su8$kue zJ0TwDFyfAsO1Cw6Z0g*$Y~P8~cz*BNUIPqvDy(A z_wS;eiC^ZNf%iGv?QYyJM>2G~G=|S}Wyvp{<%36yJlz7 zVU`O-E1@5vspUKY0fXKe!Z=?PP6`Vk13lI>)h+~gylQj0~2@R zUkcmm@7})d%8+vdNb)pz6#*{jV%oN3ySAr3mn936rf#$M(fVquCNC^jd83?#B4yy= zBpDD%3*(qQD}5SIU$A^-j?7hLrT1ZA5Z>8`kvzU!T)fJobnph2TGiLinp58x-My|Z zzFiTu-s1em?Mr8S8wnwyo6|JYJ{a!N8N0`GfjD~qK~Euy*eQrR-c^%S5oY^}y+>Mn zu)qR4yCy94j)D%I;eIc8yGJB7)X)h$%oD3AOZDrbB?N!~JLsbK@JMF%N2JQ2Qav0? zKB|F??zoL)(8uQa&^eXtbM_nYj0bqARyJzNmU69aEV5~XJ$*}2v~AUZ^-4DqZ}ZL2 zsSw}wZBv57_!7*QtCs{8D`LQNMFC*dTmg?e!H!CAf=%Fpn2z0)l#)gx2<6`qsc1og zg(E^6UNzY9jgRF0`~5tuW98x(@6$EF)tqv$vdTa~o&?;6SV^=Q(GjH8b4ofbr8IDY z7O&GyTb)7ff6ofVZasD1eNV;FdLN4t!GnDDFsDEFSvtI~2MY(2vp~Nqs3KmOiqlI- z9XlzDR`M1@tOPMR@oj4oaf(^7x-C$rMkC>t3pAg?-dh4SwuNXEiTs9meEPy4U{*|? zT(h2}iVJ6@(RK8toc1$Fy!BR|r{<=I3%_!+3~(a8^6bVfmYiTe4Em&9uBAN7cLww4 zTYR)SO`c&jWu?AlHdurAqfw;-p1$e^IQf^XakVS?_ANhzsU*aEGCPkfTh3yo!eMw8 zb$`y! zk(_#!NfpuTGRL1gOJNkj<9x-Yn}p>bi@N$G6?+;Bvi5zvwfOr_RLN^1 z^v||%+w*oQE@t@cr**nkC-p-bgt#;LLdeE;6_if$^pfDoo;Z@&(S4L@<}~2<>%e5x z$uH%`H=N(p#%H=+sKm2s;UZ}>dpC(vt_9c+nZhw%uheGV-GfbFxr(r#*oQGSIX3nf zxEY$irIvS1xWPG`3ssS-j_98`d8j`jMLg$%-xh_owix7;(Z3Y}x3_kIAj9Oh%gko- zp!{7W*XsX!1Q-Qm($71Kg6yJ*ZMmx~)bQ>uq8B8VD(_dd4t=%Upt{s6>lIy*ByYoz zYKX$zkmhnSp6-#C4mMQDoMsmjPS%tu01>w*5{x~{z{#Im_R!Ru?Dn+d)k91&eCB9JMvRKB_wGTrDGU zthDyqqCRhLf6C#AyBQc*soq@MJrxz*!rR)Cb@t746*v8@Q~mF~j`bu348r>y+egC0 z4@E_?8&S!krjc`(!&@roc#bw_`D+b@+YQfWstu(E5KhmwPUA=`sPra*x9)Gx;n#=n zJBqE#0}(6}c-Qa=`}2-1I{UK~dlx%c6?QF6iBa0Gi#5a(p#_u#9Q9sIL#gAiUe?G;T z4=?9XA$YoJYy=I&G2l`8x^6t-X!xPl%<=8W+nDB8i+m%unFIB6dkx55TnD4OQ_47E zNeNZnEl}l6_^5HV>n!S%=9kU&6!Ch@0K}rxuv-E z9s4Ek$bD2#tW31IpDDwRO`f-#snSI&Y3*w@wJ;ek61dS6f39^*3#rQZpCm6mbJ|%^ zjG8pA(Es-3jQ`nejQD=&%QFwOl+DK(BxTzwa%u~U!H(74*POnAB331Fgqn%mAP&6) z#oHgHNiZJ@x|XX%eL1m9L#;pl3#*Xjhver3+8nH!mYkI;D33guOZJ12=wuz8@yc_I zk*?=EE})>9xUZY`E8oQ$&-kZpL}tbeMx=##k(&V8_cFO|jD$?KBOzczR0-8DfF`g9`zYL& zg)WfFQ|CA1FUz(B&h`0~445k2sTM8fglrr@rHq8Y*#9wrx2ca<^#bv-2V^56czk2} zNq&gG4VuQt3Jj#8oMS-shP{PWNQy1z%LYhOkd!88d7`PwJ@|g3T;yP1tcArl&lq~}h0{4%jAN>%p?HOTr+?>e7h^^cqAMad=V~wRq?7T?b7$|x_ zv~mPr03!fO)wU_23t|HPa53jU_(zw4Zg{fgWZT*88)F2T4;lV6e*VMRf`VPyghW0C zs7vH5n)XLv_M4<(#ZCb_>q6d8ZK3$Fkhzm)S=JsL(Q>$dOoHT(jeYABrDTeIu@B=u zE&&wuqE^QV(m}aqzpwZ@XtL?5LjY}uC5d@kD#EV*cdS;h z4>bs&z4VR$2$D}>%#+#}BIxK1S>ydx+#67&aZLGk;?N!&i5}-wXOC9nrpwaY=&t%X z4>o1wJarH@So0?%6xDbW?jr>K(v`i`3FU{XfKXt2L{{F^BB%t|y#_UpI*=QDdL9;E zj1mV7IjOms(+KKYC1oDTB&HYc! zt{rhD{~rvc@#O!|Dq7C9aikBYo6StgR0Xo%pJg(mXFqvo8ru2YIhs!z*3E21cMKSk zu}hOJWnD9zvuyNI9L!`JMQ4E1ftl52XpXZK-c=h{{gXPpdy)Qc2IjwA8nAK#U?Lll zYuQ8pVBC`HUVFW39iM#uT^dPc@%O*2rJnN>W?xBoMe*`ots4gLXte!a z$qp2NvO$Owm~+Ib-3hNdeZ*h3hVh%=6sDdzS0b@GL2ta>F`(bkZNgc*gA;YKQ8SWWB*(K zCvcMG!)LR2l2S}aEnMGXPto+|4RIr(0P-X<_{B@(#!e#0IlRgtJ>*ctk=BeeY0`f< zAq^{)2Qc!KKXOuT>E-+w+OR}_=lMRcBm;0T*)I~r*?`Y+sixdH<^9Rqjy3r^UDJxt z<<;gfQd?3YKXZa+@yFi+R)9_34u&@O6F|9SC5Ur%U=4ybdWitmzz}GeL7wzPZpq)i znAG`;1P*{R=!;Jqg0X$=e=rC~ti{{MU zzLa|QwH2taL2;HCL#{IcKz#?80CI;f@ytx(j|2&$-}Ta?uIj*HkF|&Y=|Y=1&}w~1 zG$rtW|DiP;_?_kl37ZNO^#@kU|iDLgrOA}s8Qp{AO!S73hXB%C`X$4cIiux4*$*9+>EgWUJwkgOhGK@dXMOoh(H#uv4-lha?mpj+7;T{~zCNToa& z)u305|L~rdGG?Dr>c0LSP=pVu^Z%M%aNB!Z5R>L(gtBF24f`HT{ZN(3`gxbZjaTyJ zg?nHN^z?8hXTZFu56>rhK*lyzrj~EtaLuAKRB@t6a2S{Cs}A&(DOW?L#l`aL4)*_G zJ$0A6tyIn@5rAa@aZ4In|HGSb=?wRl!=EZ`cS-(kSwRQrA zMSDq+uoR@M2~gx=|4~KK5&y%(9PZUZi8iEhe}GQ}!8x~gb+T4A3SY0ngG6O-~7G4PxUNmNyOUZYQoBqCQy!Py zz4~+*o3H7eS@!Gh%Dp6u%rJDV3a28L6tr~>|oXq{Y^=au2>Y-y9(QyN7z2@(>y1b<3?Hx4L%y*lRig}Q7C)=Z%F)!aB zE?G{S&pY-bG&*tf{Z%;C)9I*LPf50YAqyXQj{K!fo+7GsPDs3n7PKE^+Zyf83WRha zgN1KCry@%}Nxh7gYk++%oE^Zwh-6vzE|A?%`80o2PYyN0(9r)&OQdMV?_yp|qGklXkstmjuRKUt)OMRx|uFg9-VT$d)kByY#nC?H(=s_6ELMf*K+ zG!NhA{?pB%wN1zIsg0^TVfxxjI+wMB9V|6ekwhkMYh4{T1bM|=t|T16e2h8PpB6+3 z4lsj2RM0L_^ZFh3UeUF)F9*-A$XsLnp9%p86d{?G>#6D4TS3|l*4E30uvmKJ{suwz zKpOOy5PIan>h>djX2Q<$z&dc(!&g=UB#F_)^e1~@YX3ztgjHopJ@ImvPgs8pgQ}W+ z>Hh~IUJ%%JTSOoZFbNi3q(Qe~@0J0E2#URV*Lpts6i#Q!`CLwyDfS!MrZ?@ez2@Lgo-+1IO3)CJuIa zz{VX%T}jhLHFbWR&@l{|bMW)(N{(9<8O6w+rV4rsllqt2K*=9GPbGBQWTELAP`Uq3 z?2P3p-zo(_H-63=C;&@M%1yH_)y?m%+7V;mc!YqANGl4Eh0ivXtlJh5A7nOQrhH>U z9F%CS4ak=;j5c|n?$Oximl>SS%$!V?*a9;@_&}O+T;hdg%S)vI*NhC<2OOdM?|Sc- zu;@Sio}JeVXn5j-XWZ$0h*+>gS+Z#LAA6SfLvHtOt6PD`k8G?KHmoW-*L~g)(H*aU z-2ojhIuRMYpaRba`R2e6POcI+tR77f*-mqN*rROx$6KP@jm)rBLUKu{*d1^FHyoPa z7;EcchuvQ-d6%yHD~U+~a(#JlG!)wzq^vDGaj-B_Gwcmr@TfDOfz0GB_x03!Vqiiy zRXE~ew~aY|S2@a2v^vK#^F*Op8*}sbRylu9c|$T|4tKCh)MA~s(`rgn73L<&ttFja z!(VwAAZ2ln6UVZM)c&Ll*tzI7HWB7&PI8BFKL8SOm z6n!9fsupV%C%zAL&v0jN(vB($D7B2hk&@=Aw}?+Nmsx5;qaJuWC}!$pc+L_da)THb z5|Bd-{D^pYtuodF5AqMmck?M9K92prD0|DGx`J+76n6<8Jh(dqhXBDHf@^ShcM0z9 z7Tn$4-QC^Yee*W?&Utmut^4ZM`?qS>+N*nY&(%G9jxpy@J3Mq<7;C?{=P^HqX*_z+ zt=BX|bvsWF6_7eGO)1rP!5#I?A=WVE^%;(To{%ASlczIOa|DSiUaMgjjdLJLl}~UR z-$?)YFqBykG7o)voHX!jAxNPC`lvEK%|8AQfecID zYZBftOH)R6UdyGZtZgF=E{9GKV)1GPv zFsfc2Re~JYsNL`qX3~-jr~&}JxFc_1%^y`d<7c=VYUFq5GzM|Ghs6ctMpGTSr0SRI z%9al5)mmD~QqMZRhZ0~>&)}+R!iJLxy$r+(e|10X)AzEQl1H~i-+nH0no;%9<+_a! zP;`sPhU*r6jaf76+C}@wIqijnedX)shz6k5%pYT}mewPg6}6t6mzu3FXeQqCU6kyr z+9D`DR7>#AJ^RIX{`z8G>4&LH+e(WKRPD-|n3zBUzS(~3@%MceDs5M?`eP=~oN6vi zSuXiOWXb!%7pfGReaCubSvHsufx?I$3bq)j*;9jMJWID40m|wYm%lOt(PHRfqSTc& zPCRhdzoWMjOlJ)OE*EclMPz`$f zS*(>n3tn$WdxFhj5*m{-GjL7oWpe%rB8^Rf%c-`9p_C%#DhRA8JEzmHhg2luwOx(- z)A9O_a1}6y5VrNDUAS3wPEP3LTW?xX$c91cNtdw#Pc4N=H4S;pSoBvTXpCrdOSe)r zF@%;U${Kyt;j((9vQ52X7OKo;O!34=NL zL?R~4!SZzbY9cBOUnl8VYHiATY@?_$FPY|ZamIEk>Egwu=$H2Qvj$|nao@A^H-BK^ zwMIFl#H(*p4AR9yseX1Kn|*68Y$XnAxc1Wzw@_WnIt z@y|j%350KRBU@weSZiSI+hsV1XP4BqQP|wWw2^dAU)=?F|gI5S!eHrE;0#NG35K7h2^G@iTr?#Iu)$k zkD`%|yAv=?odT-5B-|98{Hn0m5V*U*aM~;U;3&!W%yErNuAw1fCh`Kk=CO9Qc!=VJ zGBs{?i3d_;sDWbs7~UER?H>nhbG|AXSP~Xck@tuaG^wv z_OpLdppfN#XdrLy%avo!o>=TWpPO1od=7DTaa%yDAf?xL|5bFDV^bCu;|^Yp8j5ao z4g=eQDadlpp}sUqORVfF=45+V-3IzDr@$o|ybSs^5#{gb{EnF7*wNycQh340Y6ecq zyPBG+qqs9Ta?na(uvtH>(`!2wPj)RpC)8&f#(lBmw%~y_37y#39CqJkiNdi$Ke|$o z0&cWgsZnXEZfW_QRau2@wWRzM?^vqb@e59=0X~-Dk6-N!qzPm9DXJMW1fg^_ZT0iM z{r$ers`;iA22|Lq~Q4CcBE4lf1*D2 zv1k;M;C<7G>R3Xgt+5`l6x+wFpzrXk_gOJkXhSto8HQOsk=bIQg3BU-WxFf9Beyq| ziePXXDr@2`T|?615kFo`s-cACU&nJ00O>z^O!sLprhcprynskiZgfx3XLA<1N^#)V z4%nm`H8>-Uql9{*PLZc9%U8e!j^Rq5h5a|=XdGDu1})()+BKrG-xC}2%!|RpDnNmK zOu;8ql1E|#mYkPr?Fefuu~dfbp#~p*$sn;bD+Rwjcbc`dKS>BE=UMO(B+EvMm}k-` zRDMZVP=-dPN|`8n;(WrBw^|Gu2wcQ&Ta-0q_YuZkWG*kEMdhEN*`y#z$X4zw{K_t~ zrmU0nPgt`2+b#km#FvWtp@*fBD-0~fogJf=dL6c{(p^y|HrRIMW6Wr~1`xlVY2~|+ z+KzTj-eg4@?5gO@k!7Fs?Lx*TDBb!bP_EeNWc%8x1NQL=z~z>hSm@zfFLAI?}d)k8C2WWF1unRFnt z5N0|#-e;K3+j#%T zt$e>+_f|LND=PP3HVg7i@sm6PMb|!rc>TMN+TAh9E=7%Ki+mOPC^3oA>30J|r@|fR z)qY~tE6XTcIo)d^RYjGc)W}u0Xj!^OMIpT@Q+RV#XG7QJkv^)FG4nUy23oXDO2w>E zYLY1mq&{QEPcO~Pf2pDoDPH3Vavp4{1Pip9C86SL%AzXt!h6c8T4KFStH}4&GP|_Z z!8wdR;O4Xq%xbN!PbVp!lY?IHUmoku)JVM-mOByx3+@{3m4uhX9Bn4z|DrX$)5i1;%mXLs20W~8xEr8$jx(d_wMzu5&y=pWJh`0$49 z%b&Z%O>mV>Mx>3R3J&dQePYLXW6l5YNhvys1%tU)qM+}jPgrz>I=O)!su&Sa?dI8S zCSS=vDPeKqUs2rP)yRLsUchpieoLz&qTXO0*4yv5=dwe4rs2r3A}exy{z1%SMmQ*l z7KGiog-m2pD?l|*nCm1sJsp4@1N7r6;PISW$hx}Mfx_+pEU_<~?frbRXaDkgy9Kdr z?Q1FFD*^YzrD2d&Sp>7$)^~!m;hy8%uS>!>$Mto+_&S|~>D?oBLu&BJeCm>wfn_dN zqz8jidM=wQMsB{+ua9;%hY|(Obx$q;D5Q&zMC8HmGXfGCpY70_){RjypY@&mXFFo-%(}gFHUzf-&w-~acg*Gk7998I>URk^@ zz`~3rHX!KUT!;bKtO_W=NA^k2Zi!lyr{fMw{1_Gq*mk7~LyY1bSD$sr>K3{tO`5|) zF5o{4^g4FC!+#HfA(ZG^N0LkJFOXf#v}PdbJ@fCPP#(F>Ca=}jZkB|zLH^TnY47m zz`Df{)sCU*^*+G|YCIQP(hQ55bCR{eZ;S2_twPIWBuI3DsLW;ju-@)ClWOq6a#+lZ z0ugC&x|IGGzR*UT*W0PaFgDBz%1}tXL8wzy(ZO`qH`}l3totG(BT5_ygc2rO%uu}b znZCE|RitC!(6e5D?oMR`OW8YQeBu%VD&6ATDE&rjF~4lbj22is-AW8rk2q=OjKKh( z$7DAI&BKUQ(DsDzhy%}5GEWJtb@|vLtc*`fX{!5twi==PGDAa$*n5~(FT z1Vh(uBzz;0^!Q{^!7n31Vps%&eGye)OcuRaN>r__YM%ig1Xp4%0v5@d6DKF+x?B404tID)!ZGNN!;c>8>HNLr^!RC^G zgQSa9CwL)BMKF`Z`a(T1{9Z&!HF$d7K}OmowcekC$=iwnZbo+Z;?UPt7HKq&T|%)L za%p*4UR#?U5?EjQQi!|tR2?vYbk?$pds*Y~ola*2>Y@6$cM*G?J-W-+U+gIdU9%mV zV}-RcF7&ML`VL8!9NOk|4_ckSO*2RL)traN=1covdmj*?fJtR0rkqeGS7_nHS|O5qo{-36LI&Nr$68Q!iJ5S?5RL!bm4 z30QWdG=(cb@g7^B^6zo|M7Mr23{`K0fFirNkT7j)ZgA_R@mRX%_+lQI2v=X<#hOz( zxs~@aRydGC%);d>k3Taaa=Y`DcFMa^gp4iL5Hzz9Hjg000H#E$-y0kmGWr*rLfCgF zR&bi{A#Xw6!Zw%3g$E8RW~j=SiAHv%ns%J5ae|-j_Y+@Rwe?v`e19NCVYF_?`SI5H zsE*fYJAQ^u6*wiN{TJ=j{s|1VAM}S3-Duw|+4%t_WU8$`)?Ue?0t;5kZPge#>YwjVd$Vzq^~gFP-1(Z~7L zcP!LrDPMpw2HA+SpqSc`$<9~V_Ws%x2M_Zdvgbw8ERj~~_b{)SREUxZ+-e8^&u(41 zZflDjT}`5tRTU<0$$lKp8c(*A9}~?ASJ6<`8uV!|A~zC)IkcJ`KGCoYHG3|UWcS6p z(`;=$|}SWG|(2N3(|oN26OTwn`RHkk``v}nYPti&)s z*HbrL?~8NZ_@>1X%JDl2t7KfxOrO_JZlrrl#l2n6!tZxKlJOg`4bOj;-%G+ ze$ItMVT0Y?8IK@WiH%)9b#QrUq`4W3EB{$g=!%+dmc78ta$iTW}w4tOaV!f=eU zS&gwo@doWG3kYNxkAOALR3dr~U6^Ne03h#`K5@9<^D;lEmRoCVGPi;X0!#t-uS3lc zRsyrCe4gk;aXPx^+?cro^BaBANTp7#>}Dllt2r&Q0-QUY`2+ZQL?+ z*=G)zn>cSFt%c0GGh?1<^~mmV&-WHPiI<92RIM-KqiqdEk@S`Hy4!yf!aG$=aRVq$ z)CTR%tXT$FDr04Fq zCGe5jr|a8tgG?~!O90Q7vY1QuyMrlJ1 zb>4C7cMP?5r`c0%T@u8k7}1XB~fobUdd&ndyW!`R4)L>d)4Ji zgW@1pWzwWB-2`U?;q)Gh3uWr&%<@C8sc8bc0-fVp_IJfiC;p|3wsS;RR*iR)7t?DZ z$rV|Wr)!@ManFO+=FJRX9{_vbfbs?Zgwk!_FJg&5y|_O><;nl6Jj27nHb=0XBOw(u z@P5HyjWNd#r|;3DBw0is1Z*l7%rkE$aOdpaKh7r$O=VrDJYzg`?Q9$@;4zp(q-vv<}@>qT5GDL624#_(m|V16$Qrony`X$$*gTI z@AwO@yt&g(aB4lj1F$&IresVdz0qgLpViW%GOMk^JhO6ib&=0H;dAeaveh`S2)+FL zfOtVCe3Jv}qvJFyzjbx87Gsi7=GGc-ex0i^)VR;4(4u{f@x01nW*;wK>OtM$QA!dgpAOT6);WRXB@9JfRbqTE0$k#$t)rB*t+DtEz~%{RHY+Fo7z`z2=M zs~yCvJ7Boj;0!g|591DP5uzH&sc#pR;=2Ev3*fBLGZ`h4>uSyCq~H%UfuH`GqRxJ% znn%jP8Z%t8n}c#>=hKWQ@779C`6<@Zgc%h}H83z^MPM(z3zmZE87S>_M zZkwITb+ChRFCG_USf6{Db6ufEd~c9#HZ0(DK_f5v_e;=JqO`P6-^k+!}9=}G)lv6=hrHwsPfXS1I&K|T*nkuXYDA^FEo~s4o+sv1fNly@# zj!k57M3tnVspP+W8ZDH$3;9AVO{W%*f7(8>>$6j;4#GhV7OP6cUQ&0r!;=l4v^xK2 zWn9b7+r;;?51$*LTtL-tIIc6vh_CB@ISr$=X7bl znr4MP0F!s5-meWYI;&cJOr6m;GQ5lgT7d`I4EWA9_U>&;7mR5 zhzG2+o_BTeoDb*ap_XU5tawP)Fedf^TH9ktsrzU3yc*^@^lVi9C|$11hpPl4wKt^u z=YGCnOBwam>H3tNh?l(yWs|0Gn)s+=)@p>=!;krG84Q{L?AE{f^GWuZQnsu5bpTb< zLFos=?C1DFj!W;D1?Z8cH?e)Lf2LmRzeIjDm=LD);zf`>j4|uYvbkfG<@|i_Yoo~p z^}Ev%YJAk!GwAo4frMRmHOxr6h{9AknUg!SiCfWuX;p;s=;HK9om*kdFuI#OS+J$- z^ZTTn>H{6Y{h36z1c9*VEupsL(ELkJVx!5szO2NWf=TVOy^YS0X_UT$I%eS-;9b_e ziW@xRloH&w_x&3BZNrsuK=H8IZIfq$#{p_wltnHuFPB0bKeoxvGqbf(oJ}?#96-H=M#X*`0LrzNK>L zz`wt^DmBdJp+=w|w@pMFAkbf@JS!jJ=thPlr z0`An_)GJXGT2)$e?KxkVt$izh-GEkw3ZSycXD|u99QDLADw2ojXGncHTIAfCw zGn{8FqCd=ww?#6k0Egp0*Eg{^>U$ds~} zzpy80O$%4BDyl$Kbw;@R-UgJGL9b}h@0!&n&L_5>t#!OOu5?&7anv0$+o+_3-Xtbs zaZ<6e)%BTN%lVZGfidoRh3Sj$3y_NZzS+x1m)5gX!%RC<1VUKjvmU-{%aLlx5TG11{~Tr6OK#pOs>h zz85r?Qhm5WkINE*x7@hHEA5~0Nj{Ms5nm{QtG4nbfY;(dDJ|R zte~b{UtVjT@j|OX?D%S0YJI+=GGRqS0|E~?lS|_O@W>WVAv~75wZp|_Rbh7u=mJ%5 zLEh0j9r-bPw1IOt>z?DtxSxonS?>89p>XDVkU~Lqou$M1ZPbd{u)^3xd-{VbJ~k7n zZl!%f57M~lv?jC$$rtmS`44lF!Wn!X)aN_~wnVn37@Gi&xx!^T1uG`i@Jyv`YxN!6 zF2NNU9}$EP`95a0=H*$2j1?ggZr}K;Y{2$IZ^bakjv9=MmJ`G~YmlL=SO50#P5b%- zzmDevtT5{hW?rlOId=+!PmTz_x$3iU|QWAwdu~CgFKyalDKkRlH*`&6arI3O`@#fU^ z!p=0}`t3Ux;1NW{y;E|dDrE^HT~t_-z9lh7#!LBlv&EVZe|>}4dBIH`Q+xYnz1+0p z-BCTYwgRw?uF2=4lF%JEuBgBi?j4ZjIWhCbUc&IHE*xyWu&3FRs{(q%@IH~uaD&r; zxu`o*{5!9KZoH*uUw@G=qC!)tF;9HkcYqFZQ9zS!U4E1#?xgzNoDO{2B^6TPqGYGS z{=T@E`hK|r%{$&m*SN|HwnwR+!P|y_Tf=dE>bx0Baic7df+$#H02nR2Kbha4{O7FU zl-Uj>pj);!1U2aJEmWD}<%(}3QU?6RzsCzx8hxm&NBFth+=kU!i>P+>ReFM;R^Hm! zM8T!t;KkzTAuj?3Z5M+Cc>Ib_lI$b_oNlQXk;? ztQrVBKcB~f?qcMAh9hlPA*v0N^1uA~@)xB8nJgGXFgQ(&xe6HC=GYASFMsiWja&jT zmQP>upJ(}b`3bsw{_qK9eY%+JYKl}~KpchzSW*S%Q2~(E&Y3D)tN>hYVfBD^stNxI zb0c&$pB(nPZA6Nb)<*VwlJ~Q;o;&|!kSKm6SHu*K^JAC4wznzOS&}TH$18zo`@}!i zc}odiu_ekbyxuC|81F2jW|JKC!$KU(P;(Spj+sb0+VN zEyjl_*dEab(s*lG`WYb& zXB)cBwCK5wBYUYM0WXGS0YG=Vr+wGuw-mBEs7(O;;C-IgGutaXuJPbN+XIuAn(UWH zTYW-1klKx`@b^GdTENDg{0DX$e|M&AF4~eUqWO*c4rk9(=psL#=0`wFoyR1IumFZ+I58$l@=lhnJ+?<7oqdf$IfwkKBtEp>*igLZO z`n>0<^b)1$gT)%85HT$E@{b$PAG|)InNxN;?ig5JDMxFv7Y8+`G2Sy&=3%FmRx6t-@9aMR*LdiCa4>~t#^m?|@r zlDE{Ac%v_!WXnBZte=-TsGMSNqnqD-(kR{*%-KeIE>Q+!143>;)4-;HkaVs7AcTbd6)&#l2GR#u=JQgLNa9ts5ORvI^PWUwrDT#891oUDZXn8mP;IIXAK z+mTpZsHiLZZKpxujJLW_BTMc6>w&FOo8?;AXao8qtuAtokUr;RYRFJ2t>ks#!}`zt zEh*tuhc(EfBk0jUSrR!*-ZCe!*=f5W1nsPOT!{*s{u&E+6YRM{ItQFVnn_-%RgyH- zTT6O;Lf1$vQO<=oSKbvR$6XgmEXm~ONutT?%Ep4}#u#j&-mnW;BS!Dx#vl7Tw9&?0+iV^at~{Y{ zYEJ()PVAp*cLY(ix~sVOmx$@eforzYrp+d|{qp`y67G0;Yn~=Pv;rb1ermQb1DDJ_ zmKuC7Ji`F9b!(xy!t*QyOADs4J`3BJ(&iIZ&ZJCx%@Ed;Vx5i+z}Shj&sc}0kB(~! zWdkWACUe7v7Zsglr}KU1_&d!fW)ej6rV|>QV>wR-GTC}}#t+3|cH|TlYR`XQBe0%U zHPx~7QIkMt>N;Pm6N%#|6gabe;}zS62>*<`vbBltCl7e7rX$G zkS*P>NFl>hm#Dn!Vd0@(J)L&oC&Ai~L8&*Waw^8a{6kjb)j+>hDRRD4OXJB9P3Mdc z0naF{xXiD$waW5P07TvK*w=jecc>=sb{`~~<5wvfhR3Zi0^aRIM~~QK zBXws|coiJ6k3IgaXFN*=Lc?7V-s7X^?>|O&wRyKRp;xnVD<}7%J?WyB2T?Gn@h;B; z4_x@D>+RI*+85)L`44_z$#R(IRSK`;fJmJ;%m-vWeV3NiW|4hc`8wQSL6R|WZQu>A z0caj>Sw~3eE}9rrW%~HWky&biW+uxsoKrLl(=&D-Ng_yhxoTN&fI6#fS6G305{q-zqHOzMM;Anq7&uwlPpO0l_V#?fJcD;Aa+)c03Mn7Hb01|jXUCP2pFitZc6qBU5WVi6hjYK$fl6x#_a8*3mTa&OH z(s8ad9&#V6XSd8{=qmp^CKea!ZO*qyR~)q*@dt`OHF_>?4FFK0nW<~&i?Ef|{Y-w#@NUt3R7g^m`}<+=E+0hh4<}*QVW9={ z+exa5DE+3%RY%f+%?EKR5S|yOzNhgm6P2`rdKA5)@@)X#J5S=o;xfIrG-%k@LuP_Y zxu4o7O=fuIvUwHLJQo?g1IMk@H8tNSTZ!ydIm@Ai-S0KqE~R^*t+og&wX0l_`Y|T9 z6T}b$vnB<)Gy@?$hj+QT81ri3+@Y9>@OId_Q1y4Y_bM@lGPAQe@XkrC6_kysKr#~Q_6?LUK$1?^q6I-y)T7FX(4rlR9AA6gCVoqTobYVL+??Xk)?33>l&%2Q~%Dq$+K!r*-}P~H1$ zEnnKesVU>{YvU)*UL@Q>b6pWb#`y*emC!o_&wIAZEIH^HGakE$*(fOR6VEs6_A0{VWNSZyt4A;kgo3CZIRtM>>>%b;xM=r7q}ut`Dzj@>o1N=QaD1gH)n!hSDefhFj80UqYqWNr%@I!8>9I| ziaYVo-LpBpqLUCAM_4;mpj9Zx$zFpH%UmiYFmE=3V9(-vRTQ;%;zD7;U2+y~X+{q+ z;J706ldZM;TcCK8hJMld?Nfj^-MvearZwbZ8^5->8<-G2{f3?Fu>XP=rrLBPK<==j zdyBU-3X%D|fida+I^}GF%frL9)p)N3Sf_|7_86{(ZM{mXA3Ja!!tySzJfTI^j}4w5 z7r-)zX|BV|PDuTu%^kGNWjGYi>_Zb%XMw+c;QMRY?XG`gp*zcUY*7rqCFXwI&ghe5 z2P!~sP^e}gtGMghs>M_5$Q8eBIP$FweRNlh+jpX_Jk(bUczReZYj{c@@ikY13Z=?2 z9lEQ4w(K{Job@P!Wzj&{p)LEnny{JHG8gmHN zHb6W>gF(%7juSy&v6Ro$r;w;fcrm>*5|VcI5%AxFBaTKSWa(55wkD7)sccrLWbbpC z8T}H(L+Au`(tT2uRSr&^;@dh*7aCb~EZd5r_Fm=CexsrAuHFMzm+?#<)WPjmBy-!y zijzC!`;7vzvdWC$qXg9VA9)no@lPT+NTNtP}2lYnGdr0X(Ylk@;{NP8ll+%(>r6}GM! z*UOE}B`9DBQnP@&VpYs_J*r?W7?Y>})*{THza4Mu;oTNBKFJ@VrGnz2!*L70^wq?~ zaqRvVutbeVsF6ede3bQVW&Q5gTRGOz0nxa)nqXsoE_M5c`W+BaM#o?h_YGCih~9JR zmHK6Y21Bji%7k>=6PClgclGmkcNkm?QsJa$$7E72TtRz_JF50$9$=g^H#cN5^EN|l z^ZSr#RA8cxu<-$dQ*Y%h~M1o*HJA2f$IxU5` zLO^Ta!+eN4WHtMNcUY`_z+=%Zq-m-C(GSXWV1We5v_#Y_dV!y3L+yNUHSn}{Zxv2L zT;g`_Ldh;=cV;i$|AANUV8dc_jnK7t#(5#JHMh}Zbh>Oep0DukLg#3%(>K6=$h?0| z+3*z%2DWmNFJ+leS-K14jdqYdfyUQH`$Qo00$U(i1?5-+_ofa^)Q=6gx$Q0H$~4?; z@37uTcHN{qt9cixn@t|&nWa{#xC;x^=ME_(4?lR% zwZy)Yvu0fpD~LQoD4fImS0^TzzU< z{&}siyo9)x`xHgC22ft>)*>iZA9v(suZ zR*xUDc#lsE3|KH%Hz=_Dv3!%lp0^x2JbHjQAfv`klvRMf*@+{(GL4VL7+raze)VRK z2JHW)Cr8M8>P(B@+KsDRZZ*NHESses?jq);DIhFEkWuy+OZ2nCfr^Swb7OuI}8Y$}~Bc!k2daGig=Pk%;z8KM^3V?T2sjXNCCg)J0_*983{wwQr+nu*X;&mcY%$U0q zl*lf|FKQCePR%@sO@8aGj~haRr37h@8+*@3031{-c81Jz&GeZ0av(P>_@SlHiEgwL69k8zhtzxme=)A1?*bvkHsvu<)OVZs?Q?B$H zt{q0$xku593YGAyQ+mT~$Y8^50=*;Y@OIM~dFQ?hA+Y#d?fM>yZ+XE#rP6!s8;M+I z{{XD1W4cbzF|LX3!c)|yvIufrSYt@8{cd6fc!?Q4RL!Va9H8 z&8(??;lLyZ2^JK=A(95*O1M@dBPVwz*d@khI$Z#&({XN_>%#&J%m8Dh{{p%Ae$DhI z6s~Ci<(JSzyya&sLXgD%3X=ZlcSYfk#i<}Ln}#P17~8|x3*hM6;zHO1$=`h^2ifq< zp79yKruiQ#X@PM%@oFxImaCE7L<+S31doF?#L7L@6jjkPW_Eqf^KrdMhWB@M-`;i&|h0*vmi+W z!r4-of_=f|ygM1P8hQ15Tmq!kd-K9gXFN;N<&OFXAb%4m^r+q5 zsHEv30H+Bz(a zrCr$9^9r>1v^iFS|FSs>*yE7ZOwzmcOJg4fgYP2I$w0F5QQvAEk6!XRZ&+h)Lf8Cf zqLIGPe!13vqUt-!s4JrpW0eX^{qH{^-Gl<4ADTWx=rA_6gGOKfn+s5#{t6*JeVv!w z0?(1g-!P=CzF{@Ffj9m)l-(WJJD`LY0XvwKnoQ~HwbG)g+j{B#OPkRNOx0SH2m_+^ z-#V=!nJ{)k?RM;`bx2J&REf~Ge#!qbKw=_VuM^r>=5KMd$sAt@Yoxl5@W9~Wf+NLq zZk#%(Y|3Iy5hLbN2x7&IN}iosQ9)XjR<*IfT22#a3$1~68Hm=Ru4WA6p#xLQjIJAne+N#Y$@Ujfp*kK*JC{M?t+miUlR#T zKI%&Y?O+=}_tQ8W*2#U#uE6N`QshzsfW zUNf&>r~o|g!(vRZ5RHOg;VHC2LQUucBC z0qq!M)Ay zcqxH0RLvI01E|fJNxj7wJ+KwPFM0ADWXP@b4AS@Cp9ljSQD6CI77{Q8tIWSPoCUq< z4;n%2(BC>R2Mq9H^aEej5L1e&h;)B?F<8cX_t0x5$R=g~ZjqCzh%&CO(Zmz~Az&(} z##x$c+Q^I5S{FSTNjiFI>6na$L3+R!=LYi6Mb-9vf{e!v+`hoo=SD%buDP)P++EIUd$Yv)&WsWZ3}|&GN9M-2`T4+^j`>_yra{;n zKW~3y6L$upOTdrQRy5`l{sCSBhxgyt`fqK$#?mu3rU(EIcf^$^42uLnJZ<#69EX^XF2C>j^BPed4yWg95>T(tptw}Y8R z<)$r@TRASVl4g<6vxH7UoesP&k$|6nkjb{Jpee`oW(!ZJvuctXyV8!4UvueK1_Sqp z4Z!}dw11MxI!o!!>Tu-Sf#x&T7lydu*&6^hTc^BeIKN?cJ7Ju zb||x=C|l|#_2FOc4S5=)n!GE5ifNW#k@?eE8&-`!a{|`R<<327*v3Pxy#;wPvRRXU zl{T11>bHl4dj}#$F_hYEP#z|=jMZrZ>45IVMEn_+#_O;b`#@Sh?bE6%@r5tR@Tr>n zf)=N&QsHvP0l0#!QpGrh1y0QS9J+FPXX0z*_^n%74!^B8OO1_d%|9Ol9XP>Xrg_2! z@!O8~Q^VX$F=Ia5qI8`pTMhbh`sD-z?mk-%oO2T#%^M`_g<5t?o9Lmd)H1XKozn>UIke+>@4|?LUvbMGL ziA3sg$U4jRwHw7m7&x`w8(Sjy;dSmSo;37hqBA>;$Q0jx73iy=Fbhgns>QU;?DO)a zokqn?i(yj>2EOJV;||k+a7@1sCqQr%$LnM7AbMwwlbdSqsIw}fae9LG2D$I*j_q$@ z>L%|ayt_V+3jcH|k~;UM%<g4CD$OMNVVS8S={)~UMwR|`g_w9vww8)K zcCKWVeq2!JNZjW!m)6^s9i>uDW{!Ek-x!z|Oz!x4%IJc6d-&3eJi64-&VPIO@b_$y zdF?`3Chtpm;*gzVzw13aAx7~oob>e-wp69Q*H>kQHAU5jS=c zy#drM^-wQU2w#C(ao+qMY21O)0*!1Wnfx;kKEDmg2l-GqY4-B*UdO8=*`cc(j1RYB zn!;m#t+BZ6k!s|R1J)v*qjwO9$NRnz!G`fDQbx+?>B-6(BnV~|CLm`&%?`AlyFE`S z#C&g$J+Wz6=-9C_v}RV#K)E?0J0wNqPf%!^wGrA^bb%h^e2(WpCd0DhH96d2@l5H< z4&QLVV&`Z{%>nPYcUDyPV;Sp>+;*;K;MDVPh)u*G<%mPyXtD1G#aAA> z%!*@&r@UTG@96_0`=!gz6pJwiKqkYEvOO?kNmhoa8=ch*uMKEi*3FH%Xh=>Y$aqc( z*l8IwH9ndX34RnmR;;|1)&hsxp=@CDMp9-^~4#NRf+z)Gk8jV=MS zE9Sg(7{U(NwIEh1+j9wT)q(7z`zI-_;ZSb`!-*cS?|7Bz1Z(~HL&6`E>mX(8nf-HK zfVyKdE_V9-TFVY*I{SWRW&!N7qdi$Me#-UQWQxV~3Rmcc&WWB%_v?#ZfOr7K0T#t7*$fdP}@5zvI?~bGW$7xu7g@tjQIF5MN|T z(zGJJDZ5ERRQ<r+ya+{F0SgThw&Tt2oI3Nb3M+O8`Y%_cfDU-LW+6PYmy5SYy^!Y{1&8aYXWUd5 z^{41T7L#5KHe~jS4wi#~({y?$dptRzz8`2)RN*Q`z@`tdrn*a@&}XQ+)e23YkZQ;{ zXhB%S-&SWA66s6vy6@R!-hzV%bvu&#{21s~3kKxL7&ww&-LobTY1WN)lxb~pLWsRs zF3AeFQcpMAx4Z|5P)&rab3D+kWd-S>@9jc|yzo7vFAWYQTF$+Z&{vuo4@!3nF%(_1 z3T__f-SHKp%x~jDi~FC&99Q**s8XOyjhc25cBfX|>A#kx7qhQ_LBx~)(BM=(MnA6#sZ{1(5a0vCzi#>+~t>jxYuor|~Vj;8f61zo~a|gMJWopTHq^ zY9HG>S%OE#nCk*O+}5J!ey-J1aNU6Aq?nz?L_Q^eP zATaPr9B$W_qQR5gi0t-iIC^$f?%jKJ72NzPxtoDyDqzQ&S7NjPBke6n#b0Zds5^&! zDLiJoJq3M?mM@$tqwV|Y>!b8F-&$X?m9teAX1x>XIpUd?u4bUV=zIVfeq{G^JD;re zj3v4?QD-}?$^~v_k@in?qcVd!MSj2P zR(sslgw3?~Bm=3SBsdCc)6(y28?n4T489$KrI*fMDe=MfS?*w2i?*P2clxooA!@0L z3*?Jfw}K@$jFS2v@AvNVpdtWKM2!}~h=9%vDWaC{>dxgOdj3sf&CmYz%g^wUai$IE z%JpYf%xfdRJ(}H+`?Y{jXXjby}hvxKUp@gRe40KN#%6t0y#|rR?kV`-L%AN>0 z-G9~t{|ETj?x{d{>h7l&|9~R~b0i4|7r-~XYYi5GXOERcyZfsLuyv|VqpH%RSk@F3 z_pOb4KHCv1SBQSyVNa4mFZbs7X0l;J4Uf>>GM?uTX07VP&*;87+PKX4uTwV;1R+yy z^Y`1%ji%h#I1e9uWgHkD^%(g3D7#}hfSIB_&b%s;jX6PuBlgvlDc`n{XhOe zb6nLM;Lo#nu$i94D5K zNk)>e`CS5G_|UdU_cTg-5NfCnL>u2ToP|F{uYJ0_;RMv`_vSAJdF7-5(;W*{3XANB zC1PPJaRlbqg(G-XG@erR&Rb<%X-7)cMI7ENRbwPR&H~Iy8KD{zH=$Ibq*I;uR_z%P zbf zttR<-h%HweMeOZ~V5@W*7I+~jvkPpK5jr0#BvV)W61&r7Ltd|%@_@;e9DA7su~Ws6 z83n>wgW?J~9es!{g^XG!rw3RbI#G+>=OMsn0dZYW9W__$4?5?w`NJRF@lgP%Lr(ho zFuaJNb_^$TD8CDZl=4IFORQ5;Wobx~u+vU!rfcV=(}PtMOT0{>!UYQ88uKmO*|5PQ z-KT#f$7)hGz-}gjTylu4gC#SnGJeC3QWAkRJ{=a0SKflx6}_m+K?yYWAF#U-W}Vg- z%|i{Pdo+MNcaF`HE{r+)$ov~0m!kl-6Ertq{iG+u&rQO^Yt zK#K9$ofe1F&SCH21m~3;#$mH<>hvqJZ z`b2MmzgIS;GYfVHlF^Z;Gw>U?l_c-yN!dis1Ps%We#7JoxKN94?wdy#%;RXX{WJW9 z659c%WCreJ@uyVr5Y?a^S!GP;8|zUs;+lYRZjs3)%Qb zipKYAXUg@0igyBgQMvnz4dgbo#&*(^ZjF5>OE~ldF6s~8Kjq7{HkrbaJ}YLnmciXR zQe~(0h;z)AC4GBYt3u-u7^T0B;4jJ34y^TotC(&c9x6fHHJAHWI(kKtBX=j~nU_DL z>`h711syh~hI3Cz@jV=QiUw%qMr*sxm4yd`er8g<%pSt+T(1^T8$6Ba*M-U9~ZrpI6S1_Juwwbb*8{f?Gx_5d`tuxZN;?1u=A{L3|-L${cX6QZi zpkxfdHX2V;??igde&XN>KX?LK+_a$mOLu}35# z^?qsS0upKbK(4D?U);jijv>GSbgcvM)PgKn35CoEuWkDd9j64tz;9X`&t%`H)ibM| z(9lSNc$ExDk*8{Ra9|AIp^FF5hmt9W4rJe)rJ1HQoPw z@+aBs!2>6M_BqO8f^_5a22abKU*l0U3mH=BTKuqmfwTEbG74D z<(h3b$K4fI|JgxAD>@ha$6|x=6rtW~YED8xT{N-j$4mHSmcWIY=aNCYMh6HcLn4yi zedWJBn6w`{Y-Dl$`aa95<-F33;?UE5HUU(&^3Zer2lCXXh+pTw2t_~`o#vSx0JNIJL3;Zw-Ddv1f{At-`Yo& z=mu9E1PP##J{E{6*D1l{yR6at$I@@SN-Y4*tjY3Sf&I^n*%@4NA~-&(n{Fz=)Ro8X z$3MGhj3tk@CDGE0j8mTf{08Jr{ypY;B(s$L_w)fWYt7P_`;_66<)Qt`rKq$KREZh{ zw^MY2z2&s{&-_Jc(7i}bfI=ppA!xJ70c1Axab74&7hd%f3C?uUcGodzxI5pr?&=RB zcygCGcXJv3fDDDt3FoL?Q|*vEQPJhJsE@jlJh_BC`b^Zyz#nfBB(tlF!zp#Y(Xsh6 zjaB`E$QE%DJbL0%i*v5e^X7WBa58J{mtRKpQ&G~qfF%n;NITm>hHhyy9#&Mf*ILXJ zXB}aSm33cKA7ac$k1f{P!s)@KVnR~=8%FJ$P-t!?M&@$g3<505>1((B{^a39M|SJX zuGDO@$PNUY>W+T%fkIpf?@(ayDwXhYUjmQ_@^%awa1tk`?++rbaZTjiYWI%!&l<J(d1qN$7)2U*7x^j2e(gf;7$EIGd{{$e|XL=9&{ zWlgc`-*)H_M_u^{PZmrF;1eoR{1>36;{g6->+4jf*C?CCn{o~T*$rkaiOWX{3nloO z0KxEZ0M#h#QZ%REi+B;RaK>Vmxh(3+iU_NFKvsS8C;WRKJL`OdpUk?HqKk=`znbJW zTl=q1Exd@rQGb|PJK|2s5n3?d!YmMOkkZh1o*aw{;ThG_g+ zW1?QzhDodM@O8wp5H96hC9a<+%5HW5-oTW(yr$zizNPRAS$RwKL1?5XWRWeZW%MhM z1e-;=aU3xYqfgE5$+Wnx+^2TqAy>TkDKCNy*gK+#y+T^$;2VXh-?w&B4U9|tyH<+I zfDBx)U?mZM@zIdW@dLr%fyV4_7-l4@ui2Y-#BZ3yl{ABm0hM7L=#}lF&OiFn=C%ls{M2RC@2}!w2DV!!l3yw!zZi@#Su}7A zQJ6K+Asrq$Pon?Bh{ks!^w{iG$J;&u#NQQ4^AnsMK*g<;B0&LCGBB zXKlw0L9XC>H4NQi4Y}-@z5W;~)~x)B$S>>5L^okH*9CxwjJ2%etsOo>SdEj_Lbb%1 z1_rE(z&!TByY;|J55hF7r|3^Y2Y))D#PZc&Evu{`)a0B8h%O==|CB>Myjqf@UP@Qh zE!3Xfzmo%}hEd#5%AyJ@xyq-W(l(TQ4bC8@k1YvyxUfA?K|ddr5TT2XMw-{sj;jy5 zMz;x5Y!HK=COXLn1iQc0&deTnY*7GaVdjzhHN$c;l+^w5$3P?eT0+@xfntT0TkP?S z+5QOjPPPiOUxnm(x2XQFu@=nyRF;a zIUG~Q*RJRlF=Khz!$|FE7v~QN@y^*7K1z`x)KS!E5769`+@RyQhhLvG{#Bd644ckC zl3rJKR6iCvhb75{rQrkN-H#~@eqnGCa#?b>S);gh#GW{OtdC6m zChEDxu-2%uRDIYMwZ-DEG)Q`7YJ;3(6OQsNY?d2PxBs{j1Qojbd9J;20?Tx;fW2Ti zO?tr_I~~SvSAn=f$JKlRDU`n5(zY@ULPEk!l>YQljjCW+%Q%at>>~AmRM3G5SZ>sH zUJ}N1Vt&C?qRrb!fpae^(}w)eCTa-O*`PM*{)mLV)ykD7%n%seuf~7*c6AkB4t^^P zh4zBdHKp}z?Pmi)mg|Ut6&d(1vzYs9v!XngT`Nv$DI&dyC^~9sdX{Q8k3Wc;+o%iS z&L-mKp*vc#X>F+q&kXzOX>u|yh*ps-g(2!-^x8bm`}b^H{TH4ygaUgr0h|H4Pz%yt zNYh))RCaveHqXq#^02Pqj{@2UZbOK6=S<|xdD7{mT)}X-k;{~E4K4=W!eAmsrW$LO za1T_%_*1F6u|1l&EWzK2xtVC1U&-oTOwVBpV;>!35(o`+<-@~ z!84#iCz=Ze-0EJ3Ns?^Af#H=Ywta1Ou*lbEf-iqYY66xaso+w9$5*jh%L)AEP{?*j zxD^v2^+j8?&sD0oF~LO!E9-@CKS=XZ*bzDq^wWVFy8*Wj-Fz~3qUkUzB=ih zkZ0%Ng2)v$M+%+l!}I64Wl{j?<)WQTVyCN)cGt)AMRfi@MAP-Z3t9#TW}eq=9J3ct zVzXk~VMQ|MZ_WYq^r(*OgG>CWh^w_!DW`0|5Ef++d>+4L@2F=x+%I&dEr@vGkOV)- zX}zjrH5|^SG?Z+sOE05=&?*(fQf1VgVvsqnD=h-Z2t^+AfzUT2oa$G~`N4)Xuv>C= zn(&`D@f5Q0iyXv7F{ANi`hENFr%H1bk;p!pUHF95p5-j|T-rq)MgDAKTOE&N9C_E; z582Kye^0UOmG!|xNIeAmCn4Cb{;d}*Jn=EKCZCX3u-l$_kx-6txQST>Z+f&B4JZId z-(o1re78Qge?PaCgIQf2xF65B^uSGCkDRD=3+|tx!QxN1*H=%Uf?6;zT&eJ%7<2bU zcxZ5zM^afR${@4&p^|MJcJ3OQ4l*n1oFnhY?CxB5xrpeKf_G-FkbkWtfWEsDb2sj_ z)jUjZ>KNx`?t9*6XT*Bg#63zC#Vz#?0_P$1-DP-1 zc{#3QA%kt=|JbitxijMc?) z3f#(%!K!H%9gtWWsmiT&#yYy}{(I!3M~GF?owi)^#c|0i`jBA|qRjcbkdGXLFJ&QprNF8~ z#b+%_6Qaefqobx|0LxV6+)w^=GE?vAnduAM6c=B|Y`eC>!KA_C@D>(d27Z;Ar4_tI z=Wtd-g2`CumR(0VY_$?j&pWPVo1H;l7KAPhbjq9WPf^g`FKVU=!`>~)Ugm6f#O$C- zvU3!@`IW}bBya5~Yxmtl#7mRh&|=C~hx@V=?8T)yzpiERU3j~RTeWo4{zC-KG8zi{EcEs9|N=$^*Ijyd@&_zV}IR;gIv*m0|17)O1*PXhZz#%_j#h%ax@aw{p z*YG;)>k3OosiJ!cy$VRD^B}g2iT%+IW9NmSbXy3_rB zEJ7YqVPh3W6ci5d4<^rwNP@EoXRDoG=TNWZkvDVvE{K5lq_@Q-m;H3OvaX|+?rX0I z+|_$2ErleeO=2ecj?ri(@bV=Z`=R-66z=Ce;XqdE;)4aFnhf&=$+ZY)znXGf^50_O z5^n(t#wYZ}Ba0`qw`2)e@kyaeL#QUFV{$xkDe$BO8 zn|0{h0Ssp5yIMbAF?+Qhs!r!Y*4bj9$$wufX2ceZDmye>m)veyBN*ccR(2Qo;A%3o zfP=I$V@o--1)aElO$aO%l#5(a-Q6xXmWNpI9-O}N1N->se90xMi;5|G-IlOBC1DP$ z-k-Zk8r-d}>%WTmRCzh(LJz9S{z`(?=Vse@{R_WFpgMXR#LRca6xOW2Uw0bo{pNF)0|Tv$c-aXMP=2wa=a2Ps{Fe?O8a)?EKHCWwx03#nyMgtJmF zzw1~^4AeqjGt{Db6mhwjK zQ?I_ZS-oi`voKHj3b7g7dNmnU~5Pq_nJ!&8SOAPsM z;Q!TG@ctY4|C>W{{(*2{Ma$8M2Dos3!&?(6Q5OHF+P|4sDMwLiPJWtoac-Y3)t3DS z6FS=6*Y#E1P)?jjSQYVuF&{q(c#b5tlgn?3wN3=jJ!#byqqYW=nic8skd{#RR4!%| F_#c+y-r4{F From 89fed07f47c01d12ea11c63ae6240e89bae48b85 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 20 Mar 2026 17:36:18 +0000 Subject: [PATCH 245/279] wrap milestone args in aDict --- rocketwatch/plugins/milestones/milestones.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/rocketwatch/plugins/milestones/milestones.py b/rocketwatch/plugins/milestones/milestones.py index 4ffef41c..b7afe755 100644 --- a/rocketwatch/plugins/milestones/milestones.py +++ b/rocketwatch/plugins/milestones/milestones.py @@ -2,6 +2,7 @@ import logging from pydantic import BaseModel +from web3.datastructures import MutableAttributeDict from rocketwatch import RocketWatch from utils import solidity @@ -77,7 +78,9 @@ async def check_for_new_events(self): f"Goal for milestone {milestone.id} has increased. Triggering Milestone!" ) embed = await assemble( - {"event_name": milestone.id, "result_value": value} + MutableAttributeDict( + {"event_name": milestone.id, "result_value": value} + ) ) payload.append( Event( From f6f9b24bb54fd86b5bde3acbc3d9948fd8e8e94a Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 20 Mar 2026 18:04:04 +0000 Subject: [PATCH 246/279] fix typing in lottery.py --- rocketwatch/plugins/lottery/lottery.py | 37 +++++++++++++++++--------- 1 file changed, 25 insertions(+), 12 deletions(-) diff --git a/rocketwatch/plugins/lottery/lottery.py b/rocketwatch/plugins/lottery/lottery.py index 1f2891c8..91404147 100644 --- a/rocketwatch/plugins/lottery/lottery.py +++ b/rocketwatch/plugins/lottery/lottery.py @@ -1,4 +1,5 @@ import logging +from typing import Literal, TypedDict from discord import Interaction from discord.app_commands import command @@ -14,13 +15,21 @@ log = logging.getLogger("rocketwatch.lottery") +Period = Literal["latest", "next"] + + +class ValidatorEntry(TypedDict): + validator: int + pubkey: str + node_operator: str + class Lottery(commands.Cog): def __init__(self, bot: RocketWatch): self.bot = bot self.did_check = False - async def _check_indexes(self): + async def _check_indexes(self) -> None: if self.did_check: return log.debug("Checking indexes") @@ -32,7 +41,7 @@ async def _check_indexes(self): log.debug("Indexes checked") @timerun_async - async def load_sync_committee(self, period): + async def load_sync_committee(self, period: Period) -> None: assert period in ["latest", "next"] await self._check_indexes() h = await bacon.get_block("head") @@ -65,7 +74,9 @@ async def load_sync_committee(self, period): await col.delete_many({}) await col.bulk_write(payload) - async def get_validators_for_sync_committee_period(self, period): + async def get_validators_for_sync_committee_period( + self, period: Period + ) -> list[ValidatorEntry]: data = await self.bot.db[f"sync_committee_{period}"].aggregate( [ { @@ -91,11 +102,13 @@ async def get_validators_for_sync_committee_period(self, period): ) return await data.to_list() - async def generate_sync_committee_description(self, period): + async def generate_sync_committee_description(self, period: Period) -> str: await self.load_sync_committee(period) validators = await self.get_validators_for_sync_committee_period(period) # get stats about the current period stats = await self.bot.db.sync_committee_stats.find_one({"period": period}) + if stats is None: + return "No data available for this period." perc = len(validators) / 512 description = ( f"_Rocket Pool Participation:_ {len(validators)}/512 ({perc:.2%})\n" @@ -114,20 +127,20 @@ async def generate_sync_committee_description(self, period): ) # node operators # gather count per - node_operators = {} + node_operator_counts: dict[str, int] = {} for v in validators: - if v["node_operator"] not in node_operators: - node_operators[v["node_operator"]] = 0 - node_operators[v["node_operator"]] += 1 + if v["node_operator"] not in node_operator_counts: + node_operator_counts[v["node_operator"]] = 0 + node_operator_counts[v["node_operator"]] += 1 # sort by count - node_operators = sorted( - node_operators.items(), key=lambda x: x[1], reverse=True + sorted_operators = sorted( + node_operator_counts.items(), key=lambda x: x[1], reverse=True ) description += "_Node Operators:_ " description += ", ".join( [ f"{count}x {await el_explorer_url(node_operator)}" - for node_operator, count in node_operators + for node_operator, count in sorted_operators ] ) return description @@ -151,5 +164,5 @@ async def lottery(self, interaction: Interaction): await interaction.followup.send(embeds=embeds) -async def setup(bot): +async def setup(bot: RocketWatch) -> None: await bot.add_cog(Lottery(bot)) From 6c66775b2c2ccae8151766a789a10e1b907f926d Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 20 Mar 2026 18:10:19 +0000 Subject: [PATCH 247/279] refactor lottery.py --- rocketwatch/plugins/lottery/lottery.py | 129 ++++++++----------------- 1 file changed, 38 insertions(+), 91 deletions(-) diff --git a/rocketwatch/plugins/lottery/lottery.py b/rocketwatch/plugins/lottery/lottery.py index 91404147..e5e14eb2 100644 --- a/rocketwatch/plugins/lottery/lottery.py +++ b/rocketwatch/plugins/lottery/lottery.py @@ -1,22 +1,18 @@ import logging -from typing import Literal, TypedDict +from typing import TypedDict from discord import Interaction from discord.app_commands import command from discord.ext import commands -from pymongo import InsertOne from rocketwatch import RocketWatch from utils.embeds import Embed, el_explorer_url from utils.shared_w3 import bacon from utils.solidity import BEACON_EPOCH_LENGTH, BEACON_START_DATE -from utils.time_debug import timerun_async from utils.visibility import is_hidden log = logging.getLogger("rocketwatch.lottery") -Period = Literal["latest", "next"] - class ValidatorEntry(TypedDict): validator: int @@ -24,115 +20,66 @@ class ValidatorEntry(TypedDict): node_operator: str +class SyncCommittee(TypedDict): + start_epoch: int + end_epoch: int + validators: list[ValidatorEntry] + + class Lottery(commands.Cog): def __init__(self, bot: RocketWatch): self.bot = bot - self.did_check = False - - async def _check_indexes(self) -> None: - if self.did_check: - return - log.debug("Checking indexes") - for period in ["latest", "next"]: - col = self.bot.db[f"sync_committee_{period}"] - await col.create_index("validator", unique=True) - await col.create_index("index", unique=True) - self.did_check = True - log.debug("Indexes checked") - @timerun_async - async def load_sync_committee(self, period: Period) -> None: - assert period in ["latest", "next"] - await self._check_indexes() + async def get_sync_committee_data(self, period: str) -> SyncCommittee: h = await bacon.get_block("head") sync_period = int(h["data"]["message"]["slot"]) // 32 // 256 if period == "next": sync_period += 1 data = (await bacon.get_sync_committee(sync_period * 256))["data"] - await self.bot.db.sync_committee_stats.replace_one( - {"period": period}, - { - "period": period, - "start_epoch": sync_period * 256, - "end_epoch": (sync_period + 1) * 256, - "sync_period": sync_period * 256, - }, - upsert=True, - ) - validators = data["validators"] - col = self.bot.db[f"sync_committee_{period}"] - # get unique validators from collection - validators_in_db = await col.distinct("validator") - if set(validators) == set(validators_in_db): - return - payload = [ - InsertOne({"index": i, "validator": int(validator)}) - for i, validator in enumerate(validators) - ] - async with self.bot.db.client.start_session() as session: # noqa: SIM117 - async with await session.start_transaction(): - await col.delete_many({}) - await col.bulk_write(payload) - - async def get_validators_for_sync_committee_period( - self, period: Period - ) -> list[ValidatorEntry]: - data = await self.bot.db[f"sync_committee_{period}"].aggregate( - [ - { - "$lookup": { - "from": "minipools", - "localField": "validator", - "foreignField": "validator_index", - "as": "entry", - } - }, - {"$match": {"entry": {"$ne": []}}}, - {"$replaceRoot": {"newRoot": {"$first": "$entry"}}}, + validators = [int(v) for v in data["validators"]] + projection = {"_id": 0, "validator_index": 1, "pubkey": 1, "node_operator": 1} + query = {"validator_index": {"$in": validators}} + minipool_results = await self.bot.db.minipools.find(query, projection).to_list() + megapool_results = await self.bot.db.megapool_validators.find( + query, projection + ).to_list() + results = minipool_results + megapool_results + return { + "start_epoch": sync_period * 256, + "end_epoch": (sync_period + 1) * 256, + "validators": [ { - "$project": { - "_id": 0, - "validator": "$validator_index", - "pubkey": 1, - "node_operator": 1, - } - }, - {"$match": {"node_operator": {"$ne": None}}}, - ] - ) - return await data.to_list() + "validator": r["validator_index"], + "pubkey": r["pubkey"], + "node_operator": r["node_operator"], + } + for r in results + if r.get("node_operator") is not None + ], + } - async def generate_sync_committee_description(self, period: Period) -> str: - await self.load_sync_committee(period) - validators = await self.get_validators_for_sync_committee_period(period) - # get stats about the current period - stats = await self.bot.db.sync_committee_stats.find_one({"period": period}) - if stats is None: - return "No data available for this period." + async def generate_sync_committee_description(self, period: str) -> str: + data = await self.get_sync_committee_data(period) + validators = data["validators"] perc = len(validators) / 512 description = ( f"_Rocket Pool Participation:_ {len(validators)}/512 ({perc:.2%})\n" ) start_timestamp = BEACON_START_DATE + ( - stats["start_epoch"] * BEACON_EPOCH_LENGTH + data["start_epoch"] * BEACON_EPOCH_LENGTH ) - description += f"_Start:_ Epoch {stats['start_epoch']} ()\n" - end_timestamp = BEACON_START_DATE + (stats["end_epoch"] * BEACON_EPOCH_LENGTH) - description += f"_End:_ Epoch {stats['end_epoch']} ()\n" - # validators (called minipools here) - # sort validators + description += f"_Start:_ Epoch {data['start_epoch']} ()\n" + end_timestamp = BEACON_START_DATE + (data["end_epoch"] * BEACON_EPOCH_LENGTH) + description += f"_End:_ Epoch {data['end_epoch']} ()\n" validators.sort(key=lambda x: x["validator"]) description += ( - f"_Minipools:_ `{', '.join(str(v['validator']) for v in validators)}`\n" + f"_Validators:_ `{', '.join(str(v['validator']) for v in validators)}`\n" ) - # node operators - # gather count per node_operator_counts: dict[str, int] = {} for v in validators: if v["node_operator"] not in node_operator_counts: node_operator_counts[v["node_operator"]] = 0 node_operator_counts[v["node_operator"]] += 1 - # sort by count sorted_operators = sorted( node_operator_counts.items(), key=lambda x: x[1], reverse=True ) @@ -153,11 +100,11 @@ async def lottery(self, interaction: Interaction): await interaction.response.defer(ephemeral=is_hidden(interaction)) embeds = [ Embed( - title="Current sync committee:", + title="Current Sync Committee", description=await self.generate_sync_committee_description("latest"), ), Embed( - title="Next sync committee:", + title="Next Sync Committee", description=await self.generate_sync_committee_description("next"), ), ] From f78ec763b9e9fc1ca63f58d15d167f6e2dbfb94c Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Fri, 20 Mar 2026 18:24:49 +0000 Subject: [PATCH 248/279] refactor some more --- rocketwatch/plugins/lottery/lottery.py | 53 ++++++++++++++------------ 1 file changed, 28 insertions(+), 25 deletions(-) diff --git a/rocketwatch/plugins/lottery/lottery.py b/rocketwatch/plugins/lottery/lottery.py index e5e14eb2..73b5d30b 100644 --- a/rocketwatch/plugins/lottery/lottery.py +++ b/rocketwatch/plugins/lottery/lottery.py @@ -30,12 +30,10 @@ class Lottery(commands.Cog): def __init__(self, bot: RocketWatch): self.bot = bot - async def get_sync_committee_data(self, period: str) -> SyncCommittee: - h = await bacon.get_block("head") - sync_period = int(h["data"]["message"]["slot"]) // 32 // 256 - if period == "next": - sync_period += 1 - data = (await bacon.get_sync_committee(sync_period * 256))["data"] + COMMITTEE_SIZE = 512 + + async def get_sync_committee_data(self, period: int) -> SyncCommittee: + data = (await bacon.get_sync_committee(period * 256))["data"] validators = [int(v) for v in data["validators"]] projection = {"_id": 0, "validator_index": 1, "pubkey": 1, "node_operator": 1} query = {"validator_index": {"$in": validators}} @@ -43,37 +41,34 @@ async def get_sync_committee_data(self, period: str) -> SyncCommittee: megapool_results = await self.bot.db.megapool_validators.find( query, projection ).to_list() - results = minipool_results + megapool_results return { - "start_epoch": sync_period * 256, - "end_epoch": (sync_period + 1) * 256, + "start_epoch": period * 256, + "end_epoch": (period + 1) * 256, "validators": [ { - "validator": r["validator_index"], - "pubkey": r["pubkey"], - "node_operator": r["node_operator"], + "validator": result["validator_index"], + "pubkey": result["pubkey"], + "node_operator": result["node_operator"], } - for r in results - if r.get("node_operator") is not None + for result in (minipool_results + megapool_results) + if result.get("node_operator") is not None ], } - async def generate_sync_committee_description(self, period: str) -> str: + async def generate_sync_committee_description(self, period: int) -> str: data = await self.get_sync_committee_data(period) validators = data["validators"] - perc = len(validators) / 512 - description = ( - f"_Rocket Pool Participation:_ {len(validators)}/512 ({perc:.2%})\n" - ) + perc = len(validators) / Lottery.COMMITTEE_SIZE + description = f"**Rocket Pool Participation**: {len(validators)}/{Lottery.COMMITTEE_SIZE} ({perc:.2%})\n" start_timestamp = BEACON_START_DATE + ( data["start_epoch"] * BEACON_EPOCH_LENGTH ) - description += f"_Start:_ Epoch {data['start_epoch']} ()\n" + description += f"**Start**: Epoch {data['start_epoch']} ()\n" end_timestamp = BEACON_START_DATE + (data["end_epoch"] * BEACON_EPOCH_LENGTH) - description += f"_End:_ Epoch {data['end_epoch']} ()\n" + description += f"**End**: Epoch {data['end_epoch']} ()\n" validators.sort(key=lambda x: x["validator"]) description += ( - f"_Validators:_ `{', '.join(str(v['validator']) for v in validators)}`\n" + f"**Validators**: `{', '.join(str(v['validator']) for v in validators)}`\n" ) node_operator_counts: dict[str, int] = {} for v in validators: @@ -83,7 +78,7 @@ async def generate_sync_committee_description(self, period: str) -> str: sorted_operators = sorted( node_operator_counts.items(), key=lambda x: x[1], reverse=True ) - description += "_Node Operators:_ " + description += "**Node Operators**: " description += ", ".join( [ f"{count}x {await el_explorer_url(node_operator)}" @@ -98,14 +93,22 @@ async def lottery(self, interaction: Interaction): Get the status of the current and next sync committee. """ await interaction.response.defer(ephemeral=is_hidden(interaction)) + + header = await bacon.get_block("head") + current_period = int(header["data"]["message"]["slot"]) // 32 // 256 + embeds = [ Embed( title="Current Sync Committee", - description=await self.generate_sync_committee_description("latest"), + description=await self.generate_sync_committee_description( + current_period + ), ), Embed( title="Next Sync Committee", - description=await self.generate_sync_committee_description("next"), + description=await self.generate_sync_committee_description( + current_period + 1 + ), ), ] await interaction.followup.send(embeds=embeds) From b68fcbf4ac4e31fcc9cfb34bdb474aa3f2586a3b Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Fri, 20 Mar 2026 21:47:49 +0000 Subject: [PATCH 249/279] chore(deps): update mongo docker tag to v8.2.6 --- compose.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compose.yaml b/compose.yaml index d1474c89..6793f86f 100644 --- a/compose.yaml +++ b/compose.yaml @@ -19,7 +19,7 @@ services: com.centurylinklabs.watchtower.enable: true mongodb: - image: mongo:8.2.5 + image: mongo:8.2.6 volumes: - ./mongodb:/data/db restart: unless-stopped From a003c3cba0ed3ddd696c6e12fe1ec5d724c75eef Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 21 Mar 2026 07:46:21 +0000 Subject: [PATCH 250/279] rework CoW module --- rocketwatch/config.toml.sample | 1 + rocketwatch/contracts/GPv2Settlement.abi.json | 51 +++ rocketwatch/plugins/cow_orders/cow_orders.py | 318 +++++++----------- rocketwatch/strings/embeds.en.json | 24 +- rocketwatch/utils/embeds.py | 20 +- 5 files changed, 188 insertions(+), 226 deletions(-) create mode 100644 rocketwatch/contracts/GPv2Settlement.abi.json diff --git a/rocketwatch/config.toml.sample b/rocketwatch/config.toml.sample index a70271cc..bbe3862a 100644 --- a/rocketwatch/config.toml.sample +++ b/rocketwatch/config.toml.sample @@ -63,6 +63,7 @@ BalancerVault = "0xBA12222222228d8Ba445958a75a0704d566BF2C8" UniV3_USDC_ETH = "0x88e6A0c2dDD26FEEb64F039a2c41296FcB3f5640" UniV3_rETH_ETH = "0x553e9C493678d8606d6a5ba284643dB2110Df823" RockSolidVault = "0x936faCdf10c8c36294e7b9d28345255539d81bc7" +GPv2Settlement = "0x9008D19f58AAbD9eD0D60971565AA8510560ab41" [modules] include = [] diff --git a/rocketwatch/contracts/GPv2Settlement.abi.json b/rocketwatch/contracts/GPv2Settlement.abi.json new file mode 100644 index 00000000..451d6a85 --- /dev/null +++ b/rocketwatch/contracts/GPv2Settlement.abi.json @@ -0,0 +1,51 @@ +[ + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "owner", + "type": "address" + }, + { + "indexed": false, + "internalType": "contract IERC20", + "name": "sellToken", + "type": "address" + }, + { + "indexed": false, + "internalType": "contract IERC20", + "name": "buyToken", + "type": "address" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "sellAmount", + "type": "uint256" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "buyAmount", + "type": "uint256" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "feeAmount", + "type": "uint256" + }, + { + "indexed": false, + "internalType": "bytes", + "name": "orderUid", + "type": "bytes" + } + ], + "name": "Trade", + "type": "event" + } +] diff --git a/rocketwatch/plugins/cow_orders/cow_orders.py b/rocketwatch/plugins/cow_orders/cow_orders.py index 7a285090..c59a6900 100644 --- a/rocketwatch/plugins/cow_orders/cow_orders.py +++ b/rocketwatch/plugins/cow_orders/cow_orders.py @@ -1,11 +1,14 @@ import contextlib import logging -from datetime import UTC, datetime, timedelta +from typing import Any, TypedDict, cast -import aiohttp from discord import Interaction from discord.app_commands import command +from eth_typing import BlockNumber, ChecksumAddress +from hexbytes import HexBytes +from web3.contract import AsyncContract from web3.datastructures import MutableAttributeDict as aDict +from web3.types import EventData from rocketwatch import RocketWatch from utils import solidity @@ -18,17 +21,38 @@ log = logging.getLogger("rocketwatch.cow_orders") -class CowOrders(EventPlugin): - def __init__(self, bot: RocketWatch): - super().__init__(bot, timedelta(minutes=5)) - self.state = "OK" - self.collection = bot.db.cow_orders - self._did_setup = False - self.tokens = None +class CoWTradeArgs(TypedDict): + owner: ChecksumAddress + sellToken: ChecksumAddress + buyToken: ChecksumAddress + sellAmount: int + buyAmount: int + feeAmount: int + orderUid: bytes + + +class CoWOrders(EventPlugin): + def __init__(self, bot: RocketWatch) -> None: + super().__init__(bot) + self._settlement: AsyncContract | None = None + self._trade_topic: HexBytes | None = None + self._tokens: list[str] | None = None + + async def _ensure_setup(self) -> None: + if self._settlement is None: + self._settlement = await rp.get_contract_by_name("GPv2Settlement") + # Trade(address,address,address,uint256,uint256,uint256,bytes) + self._trade_topic = w3.keccak( + text="Trade(address,address,address,uint256,uint256,uint256,bytes)" + ) + if self._tokens is None: + self._tokens = [ + str(await rp.get_address_by_name("rocketTokenRPL")).lower(), + str(await rp.get_address_by_name("rocketTokenRETH")).lower(), + ] @command() - async def cow(self, interaction: Interaction, tnx: str): - # https://etherscan.io/tx/0x47d96c6310f08b473f2c9948d6fbeef1084f0b393c2263d2fc8d5dc624f97fe3 + async def cow(self, interaction: Interaction, tnx: str) -> None: if "etherscan.io/tx/" not in tnx: await interaction.response.send_message("nop", ephemeral=True) return @@ -38,235 +62,119 @@ async def cow(self, interaction: Interaction, tnx: str): embed = Embed(description=f"[cow explorer]({url})") await interaction.followup.send(embed=embed) - async def _setup_collection(self): - if self._did_setup: - return - if "cow_orders" not in await self.bot.db.list_collection_names(): - await self.bot.db.create_collection("cow_orders", capped=True, size=10_000) - await self.collection.create_index("order_uid", unique=True) - self._did_setup = True - - async def _ensure_tokens(self): - if self.tokens is None: - self.tokens = [ - str(await rp.get_address_by_name("rocketTokenRPL")).lower(), - str(await rp.get_address_by_name("rocketTokenRETH")).lower(), - ] - async def _get_new_events(self) -> list[Event]: - await self._ensure_tokens() - await self._setup_collection() - if self.state == "RUNNING": - log.error( - "Cow Orders plugin was interrupted while running. Re-initializing..." - ) - self.__init__(self.bot) - self.state = "RUNNING" - try: - result = await self.check_for_new_events() - self.state = "OK" - except Exception as e: - log.error(f"Error while checking for new Cow Orders: {e}") - result = [] - self.state = "ERROR" - return result - - # noinspection PyTypeChecker - async def check_for_new_events(self): - log.info("Checking Cow Orders") - payload = [] - - # get all pending orders from the cow api (https://api.cow.fi/mainnet/api/v1/auction) - - async with ( - aiohttp.ClientSession() as session, - session.get("https://api.cow.fi/mainnet/api/v1/auction") as response, - ): - if response.status != 200: - text = await response.text() - log.error("Cow API returned non-200 status code: %s", text) - raise Exception("Cow API returned non-200 status code") - cow_orders = (await response.json())["orders"] + from_block = self.last_served_block + 1 - self.lookback_distance + return await self.get_past_events(BlockNumber(from_block), self._pending_block) + + async def get_past_events( + self, from_block: BlockNumber, to_block: BlockNumber + ) -> list[Event]: + await self._ensure_setup() + assert self._settlement is not None + assert self._trade_topic is not None + assert self._tokens is not None + + logs = await w3.eth.get_logs( + { + "address": self._settlement.address, + "topics": [self._trade_topic], + "fromBlock": from_block, + "toBlock": to_block, + } + ) - """ - entity example: - { - "creationDate": "2023-01-25T04:48:02.751347Z", - "owner": "0x40586600a136652f6d0a6cc6a62b6bd1bef7ae9a", - "uid": "0x...", - "availableBalance": "108475037", - "executedBuyAmount": "0", - "executedSellAmount": "0", - "executedSellAmountBeforeFees": "0", - "executedFeeAmount": "0", - "invalidated": false, - "status": "open", - "class": "limit", - "surplusFee": "10050959", - "surplusFeeTimestamp": "2023-01-26T14:51:51.453450Z", - "executedSurplusFee": null, - "settlementContract": "0x9008d19f58aabd9ed0d60971565aa8510560ab41", - "fullFeeAmount": "13254445", - "isLiquidityOrder": false, - "sellToken": "0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48", - "buyToken": "0x347a96a5bd06d2e15199b032f46fb724d6c73047", - "receiver": "0x40586600a136652f6d0a6cc6a62b6bd1bef7ae9a", - "sellAmount": "20000000", - "buyAmount": "17091759130902", - "validTo": 1675226872, - "appData": "0xc1164815465bff632c198b8455e9a421c07e8ce426c8cd1b59eef7b305b8ca90", - "feeAmount": "0", - "kind": "sell", - "partiallyFillable": false, - "sellTokenBalance": "erc20", - "buyTokenBalance": "erc20", - "signingScheme": "eip712", - "signature": "0x...", - "interactions": { - "pre": [ - ] - } - }, - """ + if not logs: + return [] - # filter all orders that do not contain RPL - cow_orders = [ - order - for order in cow_orders - if order["sellToken"] in self.tokens or order["buyToken"] in self.tokens + # decode logs into Trade events + trades: list[EventData] = [ + self._settlement.events.Trade().process_log(raw_log) for raw_log in logs ] - # filter all orders that are not open - cow_orders = [order for order in cow_orders if order["executed"] == "0"] - - # efficiently check if the orders are already in the database - order_uids = [order["uid"] for order in cow_orders] - existing_orders = self.collection.find({"order_uid": {"$in": order_uids}}) - existing_order_uids = [order["order_uid"] async for order in existing_orders] - - # filter all orders that are already in the database - cow_orders = [ - order for order in cow_orders if order["uid"] not in existing_order_uids + # filter for RPL/rETH trades + trades = [ + t + for t in trades + if t["args"]["sellToken"].lower() in self._tokens + or t["args"]["buyToken"].lower() in self._tokens ] - if not cow_orders: + if not trades: return [] - # get rpl price in dai + + # get prices for USD threshold rpl_ratio = solidity.to_float(await rp.call("rocketNetworkPrices.getRPLPrice")) reth_ratio = solidity.to_float(await rp.call("rocketTokenRETH.getExchangeRate")) eth_usdc_price = await rp.get_eth_usdc_price() rpl_price = rpl_ratio * eth_usdc_price reth_price = reth_ratio * eth_usdc_price - # generate payloads - for order in cow_orders: - data = aDict({}) - - data["cow_uid"] = order["uid"] - data["cow_owner"] = w3.to_checksum_address(order["owner"]) - decimals = 18 - # base the event_name depending on if its buying or selling RPL - if order["sellToken"] in self.tokens: - token = "reth" if order["sellToken"] == self.tokens[1] else "rpl" - data["event_name"] = f"cow_order_sell_{token}_found" - # token/token ratio - data["ratio"] = int(order["sellAmount"]) / int(order["buyAmount"]) - # store rpl and other token amount - data["ourAmount"] = solidity.to_float(int(order["sellAmount"])) - s = await rp.assemble_contract( - name="ERC20", address=w3.to_checksum_address(order["buyToken"]) - ) + events: list[Event] = [] + for trade in trades: + args = cast(CoWTradeArgs, trade["args"]) + data: aDict[str, Any] = aDict({}) + + data["cow_uid"] = f"0x{args['orderUid'].hex()}" + data["cow_owner"] = w3.to_checksum_address(args["owner"]) + data["transactionHash"] = trade["transactionHash"].to_0x_hex() + + sell_token: str = args["sellToken"].lower() + buy_token: str = args["buyToken"].lower() + + if sell_token in self._tokens: + token = "reth" if sell_token == self._tokens[1] else "rpl" + data["event_name"] = f"cow_order_sell_{token}" + data["ourAmount"] = solidity.to_float(args["sellAmount"]) + other_address = w3.to_checksum_address(args["buyToken"]) + decimals = 18 + s = await rp.assemble_contract(name="ERC20", address=other_address) with contextlib.suppress(Exception): decimals = await s.functions.decimals().call() - data["otherAmount"] = solidity.to_float( - int(order["buyAmount"]), decimals - ) + data["otherAmount"] = solidity.to_float(args["buyAmount"], decimals) else: - token = "reth" if order["buyToken"] == self.tokens[1] else "rpl" - data["event_name"] = f"cow_order_buy_{token}_found" - # store rpl and other token amount - data["ourAmount"] = solidity.to_float(int(order["buyAmount"])) - s = await rp.assemble_contract( - name="ERC20", address=w3.to_checksum_address(order["sellToken"]) - ) + token = "reth" if buy_token == self._tokens[1] else "rpl" + data["event_name"] = f"cow_order_buy_{token}" + data["ourAmount"] = solidity.to_float(args["buyAmount"]) + other_address = w3.to_checksum_address(args["sellToken"]) + decimals = 18 + s = await rp.assemble_contract(name="ERC20", address=other_address) with contextlib.suppress(Exception): decimals = await s.functions.decimals().call() - data["otherAmount"] = solidity.to_float( - int(order["sellAmount"]), decimals - ) - # our/other ratio + data["otherAmount"] = solidity.to_float(args["sellAmount"], decimals) + data["ratioAmount"] = data["otherAmount"] / data["ourAmount"] + + # skip trades under minimum value + if ((token == "rpl") and (data["ourAmount"] * rpl_price < 10_000)) or ( + (token == "reth") and (data["ourAmount"] * reth_price < 100_000) + ): + continue + try: data["otherToken"] = await s.functions.symbol().call() except Exception: data["otherToken"] = "UNKWN" - if s.address == w3.to_checksum_address( + if other_address == w3.to_checksum_address( "0xeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee" ): data["otherToken"] = "ETH" - data["deadline"] = int(order["validTo"]) - # if the rpl value in usd is less than 25k, ignore it - if ( - data["ourAmount"] * (rpl_price if token == "rpl" else reth_price) - < 25000 - ): - continue - - # request more data from the api - try: - async with ( - aiohttp.ClientSession() as session, - session.get( - f"https://cow-proxy.invis.workers.dev/mainnet/api/v1/orders/{order['uid']}" - ) as t, - ): - if t.status != 200: - log.error( - f"Failed to get more data from the cow api for order {order['uid']}: {await t.text()}" - ) - continue - extra = await t.json() - except Exception as e: - log.error( - f"Failed to get more data from the cow api for order {order['uid']}: {e}" - ) - continue - - if extra: - if extra["invalidated"]: - log.info(f"Order {order['uid']} is invalidated, skipping") - continue - created = datetime.fromisoformat( - extra["creationDate"].replace("Z", "+00:00") - ) - if datetime.now(UTC) - created > timedelta(minutes=15): - log.info(f"Order {order['uid']} is older than 15 minutes, skipping") - continue - data["timestamp"] = int(created.timestamp()) data = await prepare_args(data) embed = await assemble(data) - payload.append( + events.append( Event( embed=embed, - topic="cow_orders", - block_number=self._pending_block, + topic="cow_trade", + block_number=BlockNumber(trade["blockNumber"]), event_name=data["event_name"], - unique_id=f"cow_order_found_{order['uid']}", + unique_id=f"cow_trade_{trade['transactionHash'].hex()}:{trade['logIndex']}", + transaction_index=trade["transactionIndex"], + event_index=trade["logIndex"], ) ) - # don't emit if the db collection is empty - this is to prevent the bot from spamming the channel with stale data - if not await self.collection.count_documents({}): - payload = [] - - # insert all new orders into the database - await self.collection.insert_many( - [{"order_uid": order["uid"]} for order in cow_orders] - ) - log.debug("Finished Checking Cow Orders") - return payload + return events -async def setup(bot): - await bot.add_cog(CowOrders(bot)) +async def setup(bot: RocketWatch) -> None: + await bot.add_cog(CoWOrders(bot)) diff --git a/rocketwatch/strings/embeds.en.json b/rocketwatch/strings/embeds.en.json index be34dca8..ea772afd 100644 --- a/rocketwatch/strings/embeds.en.json +++ b/rocketwatch/strings/embeds.en.json @@ -531,21 +531,21 @@ "title": ":money_with_wings: Large stETH Withdrawal Requested", "description": "%{owner} has requested a withdrawal of **%{amountOfStETH} stETH**!" }, - "cow_order_buy_rpl_found": { - "title": ":cow: BUY Order Found", - "description": "%{cow_owner} has placed a buy order for %{ourAmount} RPL!\n Exchanging %{otherAmount} %{otherToken} for %{ourAmount} RPL (%{ratioAmount} RPL/%{otherToken})\nExpires: %{deadline}" + "cow_order_buy_rpl": { + "title": ":cow: RPL Buy", + "description": "%{cow_owner} bought **%{ourAmount} RPL** for %{otherAmount} %{otherToken}!" }, - "cow_order_buy_reth_found": { - "title": ":cow: rETH Order Found", - "description": "%{cow_owner} has placed a buy order for %{ourAmount} rETH!\n Exchanging %{otherAmount} %{otherToken} for %{ourAmount} rETH (%{ratioAmount} rETH/%{otherToken})\nExpires: %{deadline}" + "cow_order_buy_reth": { + "title": ":cow: rETH Buy", + "description": "%{cow_owner} bought **%{ourAmount} rETH** for %{otherAmount} %{otherToken}!" }, - "cow_order_sell_rpl_found": { - "title": ":cow: SELL Order Found", - "description": "%{cow_owner} has placed a sell order for %{ourAmount} RPL!\n Exchanging %{ourAmount} RPL for %{otherAmount} %{otherToken} (%{ratioAmount} RPL/%{otherToken})\nExpires: %{deadline}" + "cow_order_sell_rpl": { + "title": ":cow: RPL Sell", + "description": "%{cow_owner} sold **%{ourAmount} RPL** for %{otherAmount} %{otherToken}!" }, - "cow_order_sell_reth_found": { - "title": ":cow: rETH Order Found", - "description": "%{cow_owner} has placed a sell order for %{ourAmount} rETH!\n Exchanging %{ourAmount} rETH for %{otherAmount} %{otherToken} (%{ratioAmount} rETH/%{otherToken})\nExpires: %{deadline}" + "cow_order_sell_reth": { + "title": ":cow: rETH Sell", + "description": "%{cow_owner} sold **%{ourAmount} rETH** for %{otherAmount} %{otherToken}!" }, "finality_delay_event": { "title": ":warning: Finality Delay On Beacon Chain", diff --git a/rocketwatch/utils/embeds.py b/rocketwatch/utils/embeds.py index 5124a6a7..fd7bf35c 100644 --- a/rocketwatch/utils/embeds.py +++ b/rocketwatch/utils/embeds.py @@ -270,7 +270,6 @@ async def prepare_args(args): address = w3.to_checksum_address(arg_value) prefix = await get_sea_creature_for_address(address) - # handle validators if arg_key == "pubkey": args[arg_key] = await cl_explorer_url(arg_value) elif arg_key == "cow_uid": @@ -291,17 +290,20 @@ async def prepare_args(args): async def assemble(args) -> Embed: e = Embed() - if args.event_name in ["service_interrupted", "finality_delay_event"]: - e.colour = Color.from_rgb(235, 86, 86) - if "sell_rpl" in args.event_name: - e.colour = Color.from_rgb(235, 86, 86) if ( + args.event_name in ["service_interrupted", "finality_delay_event"] + or "sell_rpl" in args.event_name + or "sell_reth" in args.event_name + ): + e.colour = Color.from_rgb(235, 86, 86) # red + elif ( "buy_rpl" in args.event_name + or "buy_reth" in args.event_name or "finality_delay_recover_event" in args.event_name ): - e.colour = Color.from_rgb(86, 235, 86) - if "price_update_event" in args.event_name: - e.colour = Color.from_rgb(86, 235, 235) + e.colour = Color.from_rgb(86, 235, 86) # green + elif "price_update_event" in args.event_name: + e.colour = Color.from_rgb(86, 235, 235) # pink # do this here before the amounts are converted to a string amount = args.get("amount") or args.get("ethAmount", 0) @@ -425,7 +427,7 @@ async def assemble(args) -> Embed: e.description = _(f"embeds.{args.event_name}.description", **args) if "cow_uid" in args: - e.add_field(name="Cow Order", value=args.cow_uid, inline=False) + e.add_field(name="CoW Order", value=args.cow_uid, inline=False) if "exchangeRate" in args: e.add_field( From f6805a6e1a13a27aaf69a509cc07d4385a1e6867 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 21 Mar 2026 07:53:15 +0000 Subject: [PATCH 251/279] fix scam detection false positive --- .../plugins/scam_detection/scam_detection.py | 14 ++++++++++++-- tests/message_samples.json | 9 +++++++++ 2 files changed, 21 insertions(+), 2 deletions(-) diff --git a/rocketwatch/plugins/scam_detection/scam_detection.py b/rocketwatch/plugins/scam_detection/scam_detection.py index 26820533..38868308 100644 --- a/rocketwatch/plugins/scam_detection/scam_detection.py +++ b/rocketwatch/plugins/scam_detection/scam_detection.py @@ -470,8 +470,18 @@ def _ticket_system(self, message: Message) -> str | None: [("instant", "live"), "chat"], [("submit"), ("question", "issue", "query")], ) - txt_no_urls = re.sub(r"https?://\S+", "", txt) - if self.__txt_contains(txt_no_urls, strong_keywords): + content_only = txt.split("---")[0] + # Auto-generated embeds from video platforms may contain event/ticket + # language (e.g. YouTube 🎫 TICKETS) — only check content for those. + rich_embed_domains = ("youtube.com", "youtu.be", "twitch.tv") + content_urls = list(self.basic_url_pattern.finditer(content_only)) + if content_urls and all( + any(d in m.group(0) for d in rich_embed_domains) for m in content_urls + ): + strong_check_text = re.sub(r"https?://\S+", "", content_only) + else: + strong_check_text = re.sub(r"https?://\S+", "", txt) + if self.__txt_contains(strong_check_text, strong_keywords): return default_reason # Short directive messages with a URL ("ask here", "get help here") diff --git a/tests/message_samples.json b/tests/message_samples.json index 8a21f62c..a59ec8f4 100644 --- a/tests/message_samples.json +++ b/tests/message_samples.json @@ -481,6 +481,15 @@ "description": "#BREAKING: Iranian supreme leader confirmed in a coma, and had his leg amputated." } ] + }, + { + "content": "https://www.youtube.com/watch?v=7aTRXZli4zg", + "embeds": [ + { + "title": "Onboard: #3 | Mercedes-AMG Team Verstappen Racing | Mercedes-AMG GT...", + "description": "Buckle up and follow the race from the cockpit perspective.\n\n\ud83c\udfab TICKETS\n\u27a1\ufe0f https://vln.de/tickets\n\n\ud83d\udca5 ENTRY LIST\n\u27a1\ufe0f https://www.nuerburgring-langstrecken-serie.de/wp-content/uploads/ergebnisse/2026-03-21s.pdf\n\n\ud83d\udca5 ALL NLS LIVESTREAMS\nStream with \ud83c\udde9\ud83c\uddea commentary \u27a1\ufe0f https://youtube.com/live/0F6vg-_iGR8\nStream with \ud83c\uddec\ud83c\udde7 com..." + } + ] } ], "unsafe": [ From 466cc7fdf458d9469a351b2c2df11e2a783e072b Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 21 Mar 2026 12:58:00 +0000 Subject: [PATCH 252/279] fix typing in apr.py --- pyproject.toml | 8 ---- rocketwatch/plugins/apr/apr.py | 43 ++++++++++++---------- rocketwatch/plugins/proposals/proposals.py | 10 +++-- rocketwatch/plugins/rocksolid/rocksolid.py | 7 ++-- 4 files changed, 35 insertions(+), 33 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index d8592199..c8fef924 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -76,14 +76,6 @@ module = [ ] disable_error_code = ["attr-defined", "union-attr", "index"] -# Matplotlib/numpy stub gaps -[[tool.mypy.overrides]] -module = [ - "rocketwatch.plugins.apr.*", - "rocketwatch.plugins.proposals.*", -] -disable_error_code = ["attr-defined"] - [tool.ruff] target-version = "py312" diff --git a/rocketwatch/plugins/apr/apr.py b/rocketwatch/plugins/apr/apr.py index c45df25a..01e8fb39 100644 --- a/rocketwatch/plugins/apr/apr.py +++ b/rocketwatch/plugins/apr/apr.py @@ -4,10 +4,12 @@ from io import BytesIO import matplotlib.pyplot as plt +import numpy as np from discord import File, Interaction from discord.app_commands import command from discord.ext import commands, tasks from matplotlib.dates import DateFormatter +from matplotlib.ticker import FuncFormatter from rocketwatch import RocketWatch from utils import solidity @@ -49,7 +51,7 @@ async def task(self): # get latest block update from the db latest_db_block = await self.bot.db.reth_apr.find_one(sort=[("block", -1)]) latest_db_block = 0 if latest_db_block is None else latest_db_block["block"] - cursor_block = (await w3_archive.eth.get_block("latest"))["number"] + cursor_block = (await w3_archive.eth.get_block("latest")).get("number", 0) while True: # get address of rocketNetworkBalances contract at cursor block address = await rp.uncached_get_address_by_name( @@ -62,7 +64,7 @@ async def task(self): ) if balance_block == latest_db_block: break - block_time = (await w3.eth.get_block(balance_block))["timestamp"] + block_time = (await w3.eth.get_block(balance_block)).get("timestamp", 0) # abort if the blocktime is older than 120 days if block_time < (datetime.now().timestamp() - 120 * 24 * 60 * 60): break @@ -91,7 +93,8 @@ async def before_loop(self): await self.bot.wait_until_ready() @task.error - async def on_error(self, err: Exception): + async def on_error(self, err: BaseException): + assert isinstance(err, Exception) await self.bot.report_error(err) @command() @@ -209,12 +212,13 @@ async def reth_apr(self, interaction: Interaction): value=f"{y_7d_virtual[-1]:.2%}", inline=False, ) + x_arr = np.array(x) fig = plt.figure() ax1 = plt.gca() - ax2 = plt.twinx() + ax2: plt.Axes = plt.twinx() # type: ignore[assignment] ax2.plot( - x, + x_arr, y, marker="+", linestyle="", @@ -222,24 +226,24 @@ async def reth_apr(self, interaction: Interaction): alpha=0.6, color="orange", ) - # ax2.plot(x, y_virtual, marker="x", linestyle="", label="Period Average (Virtual)", alpha=0.4) - # ax2.plot(x, y_node_operators, marker="+", linestyle="", label="Node Operator APR", alpha=0.4) + # ax2.plot(x_arr, y_virtual, marker="x", linestyle="", label="Period Average (Virtual)", alpha=0.4) + # ax2.plot(x_arr, y_node_operators, marker="+", linestyle="", label="Node Operator APR", alpha=0.4) ax2.plot( - x, + x_arr, y_7d, linestyle="-", label=f"{y_7d_claim:.1f} Day Average", color="orange", ) ax2.plot( - x, + x_arr, y_7d_virtual, linestyle="-", label=f"{y_7d_claim:.1f} Day Average (Virtual)", color="green", ) ax1.plot( - x, + x_arr, y_effectiveness, linestyle="--", label="Effectiveness", @@ -250,13 +254,13 @@ async def reth_apr(self, interaction: Interaction): plt.title("Observed rETH APR values") plt.xlabel("Date") plt.grid(True) - plt.xlim(left=x[38]) + plt.xlim(left=x_arr[38]) plt.xticks(rotation=45) old_formatter = plt.gca().xaxis.get_major_formatter() plt.gca().xaxis.set_major_formatter(DateFormatter("%b %d")) - ax2.yaxis.set_major_formatter(plt.FuncFormatter(lambda x, loc: f"{x:.1%}")) - ax1.yaxis.set_major_formatter(plt.FuncFormatter(lambda x, loc: f"{x:.1%}")) + ax2.yaxis.set_major_formatter(FuncFormatter(lambda x, loc: f"{x:.1%}")) + ax1.yaxis.set_major_formatter(FuncFormatter(lambda x, loc: f"{x:.1%}")) ax1.set_ylabel("Effectiveness") ax2.set_ylabel("APR") ax1.set_ylim(top=1) @@ -424,12 +428,13 @@ async def node_apr(self, interaction: Interaction): inline=False, ) + x_arr = np.array(x) fig = plt.figure() ax1 = plt.gca() # solo apr ax1.plot( - x, + x_arr, y_7d_node_operators_leb8_14, linestyle="-.", label=f"{y_7d_claim:.1f} Day Average (leb8 14%)", @@ -438,7 +443,7 @@ async def node_apr(self, interaction: Interaction): ) # use area to show region between leb16 20% and leb16 5%. use a spare dotted fill to show the region between ax1.fill_between( - x, + x_arr, y_7d_node_operators_leb16_20, y_7d_node_operators_leb16_05, alpha=0.2, @@ -447,7 +452,7 @@ async def node_apr(self, interaction: Interaction): ) # plot the leb16 14% line ax1.plot( - x, + x_arr, y_7d_node_operators_leb16_14, linestyle="--", label=f"{y_7d_claim:.1f} Day Average (leb16 14%)", @@ -455,7 +460,7 @@ async def node_apr(self, interaction: Interaction): alpha=0.5, ) ax1.plot( - x, + x_arr, y_7d_solo, linestyle=":", label=f"{y_7d_claim:.1f} Day Average (solo)", @@ -465,13 +470,13 @@ async def node_apr(self, interaction: Interaction): plt.title("Observed NO APR values") plt.grid(True) - plt.xlim(left=x[38]) + plt.xlim(left=x_arr[38]) plt.xticks(rotation=0) plt.ylim(bottom=0.02) old_formatter = plt.gca().xaxis.get_major_formatter() plt.gca().xaxis.set_major_formatter(DateFormatter("%m.%d")) - ax1.yaxis.set_major_formatter(plt.FuncFormatter(lambda x, loc: f"{x:.1%}")) + ax1.yaxis.set_major_formatter(FuncFormatter(lambda x, loc: f"{x:.1%}")) ax1.legend(loc="lower left") img = BytesIO() diff --git a/rocketwatch/plugins/proposals/proposals.py b/rocketwatch/plugins/proposals/proposals.py index febdefd3..9a2bb750 100644 --- a/rocketwatch/plugins/proposals/proposals.py +++ b/rocketwatch/plugins/proposals/proposals.py @@ -5,6 +5,7 @@ from datetime import datetime, timedelta from io import BytesIO +import numpy as np from aiohttp.client_exceptions import ClientResponseError from cronitor import Monitor from discord import File, Interaction @@ -399,8 +400,9 @@ async def version_chart(self, interaction: Interaction, days: int = 90): for v in versions ] # add percentage to labels + x_arr = np.array(x) ax = plt.subplot(111, frameon=False) - plt.stackplot(x, *y.values(), labels=labels, colors=colors) + plt.stackplot(x_arr, *y.values(), labels=labels, colors=colors) # hide y axis plt.tick_params( axis="y", which="both", left=False, right=False, labelleft=False @@ -409,11 +411,13 @@ async def version_chart(self, interaction: Interaction, days: int = 90): handles, legend_labels = ax.get_legend_handles_labels() ax.legend(reversed(handles), reversed(legend_labels), loc="upper left") # add a thin line at current time from y=0 to y=1 with a width of 0.5 - plt.plot([max(x), max(x)], [0, 1], color="white", alpha=0.25) # type: ignore[arg-type] + plt.plot([x_arr[-1], x_arr[-1]], [0, 1], color="white", alpha=0.25) # calculate future point to make latest data more visible future_point = x[-1] + timedelta(days=window_length) last_y_values = [[yy[-1]] * 2 for yy in y.values()] - plt.stackplot([x[-1], future_point], *last_y_values, colors=colors) + plt.stackplot( + [x_arr[-1], np.datetime64(future_point)], *last_y_values, colors=colors + ) plt.tight_layout() # respond with image diff --git a/rocketwatch/plugins/rocksolid/rocksolid.py b/rocketwatch/plugins/rocksolid/rocksolid.py index 9d0cfa17..8e47d079 100644 --- a/rocketwatch/plugins/rocksolid/rocksolid.py +++ b/rocketwatch/plugins/rocksolid/rocksolid.py @@ -3,6 +3,7 @@ from io import BytesIO import matplotlib.pyplot as plt +import numpy as np from discord import File, Interaction from discord.app_commands import command from discord.ext.commands import Cog @@ -126,11 +127,11 @@ async def get_apy(days: int) -> float | None: fig, ax = plt.subplots(figsize=(6, 2)) ax.grid() - # matplotlib stubs don't allow dates - ax.plot(x, y, color="#50b1f7") # type: ignore[arg-type] + x_arr = np.array(x) + ax.plot(x_arr, y, color="#50b1f7") ax.xaxis.set_major_formatter(DateFormatter("%b %d")) ax.set_ylabel("AUM (rETH)") - ax.set_xlim((x[0], x[-1])) # type: ignore[arg-type] + ax.set_xlim((x_arr[0], x_arr[-1])) ax.set_ylim((y[0], y[-1] * 1.01)) img = BytesIO() From 4c201eac21aaa50923c0c470b16d9ca0f7c5f90c Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 21 Mar 2026 12:58:40 +0000 Subject: [PATCH 253/279] higher y limit for /rocksolid --- rocketwatch/plugins/rocksolid/rocksolid.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rocketwatch/plugins/rocksolid/rocksolid.py b/rocketwatch/plugins/rocksolid/rocksolid.py index 8e47d079..8ff42221 100644 --- a/rocketwatch/plugins/rocksolid/rocksolid.py +++ b/rocketwatch/plugins/rocksolid/rocksolid.py @@ -132,7 +132,7 @@ async def get_apy(days: int) -> float | None: ax.xaxis.set_major_formatter(DateFormatter("%b %d")) ax.set_ylabel("AUM (rETH)") ax.set_xlim((x_arr[0], x_arr[-1])) - ax.set_ylim((y[0], y[-1] * 1.01)) + ax.set_ylim((y[0], y[-1] * 1.05)) img = BytesIO() fig.tight_layout() From b3fd1300cb3fe4899fea841b12eca1e1a2737009 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 21 Mar 2026 13:22:53 +0000 Subject: [PATCH 254/279] add missing await --- rocketwatch/plugins/event_core/event_core.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/rocketwatch/plugins/event_core/event_core.py b/rocketwatch/plugins/event_core/event_core.py index 342322c0..e1233057 100644 --- a/rocketwatch/plugins/event_core/event_core.py +++ b/rocketwatch/plugins/event_core/event_core.py @@ -198,12 +198,12 @@ async def process_event_queue(self) -> None: log.debug("No pending events in queue") return - def try_load(_entry: dict, _key: str) -> Any | None: + async def try_load(_entry: dict, _key: str) -> Any | None: try: serialized = _entry.get(_key) return pickle.loads(serialized) if serialized else None except Exception as err: - self.bot.report_error(err) + await self.bot.report_error(err) return None for channel_id in channels: @@ -226,15 +226,15 @@ def try_load(_entry: dict, _key: str) -> Any | None: await self.bot.db.state_messages.delete_one({"channel_id": channel_id}) for event_entry in db_events: - embed: Embed | None = try_load(event_entry, "embed") + embed: Embed | None = await try_load(event_entry, "embed") files = [] - if embed and (image := try_load(event_entry, "image")): + if embed and (image := await try_load(event_entry, "image")): file_name = f"{event_entry['event_name']}_img.png" files.append(image.to_file(file_name)) embed.set_image(url=f"attachment://{file_name}") - if embed and (thumbnail := try_load(event_entry, "thumbnail")): + if embed and (thumbnail := await try_load(event_entry, "thumbnail")): file_name = f"{event_entry['event_name']}_thumb.png" files.append(thumbnail.to_file(file_name)) embed.set_thumbnail(url=f"attachment://{file_name}") From a8749bc2d134ef882baa384263c9a928370ce00c Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 21 Mar 2026 13:23:21 +0000 Subject: [PATCH 255/279] adjust rpl_swapped milestone --- rocketwatch/plugins/milestones/milestones.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/rocketwatch/plugins/milestones/milestones.json b/rocketwatch/plugins/milestones/milestones.json index 3b3a3c55..9ac6cd4e 100644 --- a/rocketwatch/plugins/milestones/milestones.json +++ b/rocketwatch/plugins/milestones/milestones.json @@ -24,8 +24,8 @@ "function": "get_percentage_rpl_swapped", "args": [], "formatter": "", - "min": 1, - "step_size": 5 + "min": 90, + "step_size": 1 }, { "id": "milestone_registered_nodes", From 22e945e28953c3b2e0850d4de9f0d3d6f3d9f453 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 21 Mar 2026 13:27:13 +0000 Subject: [PATCH 256/279] fix tvl.py typing --- rocketwatch/plugins/tvl/tvl.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/rocketwatch/plugins/tvl/tvl.py b/rocketwatch/plugins/tvl/tvl.py index 1ed1187c..92f1e26c 100644 --- a/rocketwatch/plugins/tvl/tvl.py +++ b/rocketwatch/plugins/tvl/tvl.py @@ -1,4 +1,5 @@ import logging +from typing import Any import humanize from colorama import Style @@ -59,7 +60,7 @@ async def tvl(self, interaction: Interaction, show_all: bool = False): Show the total value locked in the protocol """ await interaction.response.defer(ephemeral=is_hidden(interaction)) - data = { + data: dict[str, Any] = { "Total RPL Locked": { "Staked RPL": { "Minipools": {}, # accurate, live From 8de33690a7c4a832b945a99ff06d58b5de64630b Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 21 Mar 2026 13:55:46 +0000 Subject: [PATCH 257/279] fix typing in scam_detection --- pyproject.toml | 2 - .../plugins/scam_detection/scam_detection.py | 87 ++++++++++--------- 2 files changed, 48 insertions(+), 41 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index c8fef924..6b844806 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -60,11 +60,9 @@ python_version = "3.12" mypy_path = "." explicit_package_bases = true ignore_missing_imports = true -# Start lenient, tighten over time check_untyped_defs = true warn_return_any = false warn_unused_ignores = true -# Disabled until the majority of code is annotated disallow_untyped_defs = false disallow_incomplete_defs = false diff --git a/rocketwatch/plugins/scam_detection/scam_detection.py b/rocketwatch/plugins/scam_detection/scam_detection.py index 38868308..ae522214 100644 --- a/rocketwatch/plugins/scam_detection/scam_detection.py +++ b/rocketwatch/plugins/scam_detection/scam_detection.py @@ -1,6 +1,5 @@ import asyncio import contextlib -import io import json import logging from datetime import UTC, datetime, timedelta @@ -14,11 +13,13 @@ ButtonStyle, Color, DeletedReferencedMessage, + Emoji, File, Guild, Interaction, Member, Message, + PartialEmoji, RawBulkMessageDeleteEvent, RawMessageDeleteEvent, RawThreadDeleteEvent, @@ -28,12 +29,14 @@ errors, ui, ) +from discord.abc import Messageable from discord.app_commands import ContextMenu, command, guilds from discord.ext.commands import Cog from rocketwatch import RocketWatch from utils.config import cfg from utils.embeds import Embed +from utils.file import TextFile log = logging.getLogger("rocketwatch.scam_detection") @@ -62,10 +65,13 @@ def __init__(self, plugin: "ScamDetection", reportable: Message | Thread): super().__init__(timeout=None) self.plugin = plugin self.reportable = reportable - self.safu_votes = set() + self.safu_votes: set[int] = set() @ui.button(label="Mark Safu", style=ButtonStyle.blurple) async def mark_safe(self, interaction: Interaction, button: ui.Button) -> None: + if interaction.message is None: + return + log.info( f"User {interaction.user.id} marked message {interaction.message.id} as safe" ) @@ -80,12 +86,13 @@ async def mark_safe(self, interaction: Interaction, button: ui.Button) -> None: ) return - if interaction.user.is_timed_out(): + if isinstance(interaction.user, Member) and interaction.user.is_timed_out(): log.debug( f"Timed-out user {interaction.user.id} tried to vote on {self.reportable}" ) - return None + return + reported_user = None if isinstance(self.reportable, Message): reported_user = self.reportable.author db_filter = {"type": "message", "message_id": self.reportable.id} @@ -96,7 +103,7 @@ async def mark_safe(self, interaction: Interaction, button: ui.Button) -> None: required_lock = self.plugin._thread_report_lock else: log.warning(f"Unknown reportable type {type(self.reportable)}") - return None + return if interaction.user == reported_user: log.debug( @@ -110,7 +117,9 @@ async def mark_safe(self, interaction: Interaction, button: ui.Button) -> None: self.safu_votes.add(interaction.user.id) - if ScamDetection.is_reputable(interaction.user): + if isinstance(interaction.user, Member) and ScamDetection.is_reputable( + interaction.user + ): user_repr = interaction.user.mention elif len(self.safu_votes) >= self.THRESHOLD: user_repr = "the community" @@ -138,7 +147,9 @@ def __init__(self, bot: RocketWatch): self._message_report_lock = asyncio.Lock() self._thread_report_lock = asyncio.Lock() self._user_report_lock = asyncio.Lock() - self._message_react_cache = TTLCache(maxsize=1000, ttl=300) + self._message_react_cache: TTLCache[ + int, dict[PartialEmoji | Emoji | str, set[User | Member]] + ] = TTLCache(maxsize=1000, ttl=300) self._thread_creation_messages: set[int] = set() self.markdown_link_pattern = re.compile( r"(?<=\[)([^/\] ]*).+?(?<=\(https?:\/\/)([^/\)]*)" @@ -191,6 +202,13 @@ async def cog_unload(self) -> None: self.user_report_menu.name, type=self.user_report_menu.type ) + async def _get_report_channel(self) -> Messageable: + channel = await self.bot.get_or_fetch_channel( + cfg.discord.channels["report_scams"] + ) + assert isinstance(channel, Messageable) + return channel + @staticmethod def _get_message_content(message: Message) -> str: text = "" @@ -262,19 +280,13 @@ async def _generate_message_report( }, indent=2, ) - with io.StringIO(message_structure) as f: - attachment = File(f, filename="message.json") + attachment = TextFile(message_structure, filename="message.json") return warning, report, attachment async def _generate_thread_report( self, thread: Thread, reason: str ) -> tuple[Embed, Embed] | None: - try: - thread = await thread.guild.fetch_channel(thread.id) - except (errors.NotFound, errors.Forbidden): - return None - if await self.bot.db.scam_reports.find_one( {"type": "thread", "channel_id": thread.id} ): @@ -314,7 +326,7 @@ async def _add_message_report_to_db( await self.bot.db.scam_reports.insert_one( { "type": "message", - "guild_id": message.guild.id, + "guild_id": message.guild if message.guild else None, "channel_id": message.channel.id, "message_id": message.id, "user_id": message.author.id, @@ -344,9 +356,7 @@ async def report_message(self, message: Message, reason: str) -> None: warning_msg = None log.warning(f"Failed to send warning message in reply to {message.id}") - report_channel = await self.bot.get_or_fetch_channel( - cfg.discord.channels["report_scams"] - ) + report_channel = await self._get_report_channel() report_msg = await report_channel.send(embed=report, file=attachment) await self._add_message_report_to_db( message, reason, warning_msg, report_msg @@ -376,9 +386,7 @@ async def manual_message_report( warning, report, attachment = components - report_channel = await self.bot.get_or_fetch_channel( - cfg.discord.channels["report_scams"] - ) + report_channel = await self._get_report_channel() report_msg = await report_channel.send(embed=report, file=attachment) moderator = await self.bot.get_or_fetch_user( @@ -664,10 +672,6 @@ async def on_message(self, message: Message) -> None: log.warning("Ignoring message sent by bot") return - if self.is_reputable(message.author): - log.warning(f"Ignoring message sent by trusted user ({message.author})") - return - if message.guild is None: return @@ -675,6 +679,10 @@ async def on_message(self, message: Message) -> None: log.warning(f"Ignoring message in {message.guild.id})") return + if isinstance(message.author, Member) and self.is_reputable(message.author): + log.warning(f"Ignoring message sent by trusted user ({message.author})") + return + checks = [ self._obfuscated_url, self._ticket_system, @@ -696,8 +704,12 @@ async def on_message_edit(self, before: Message, after: Message) -> None: @Cog.listener() async def on_reaction_add(self, reaction: Reaction, user: User) -> None: - if reaction.message.guild.id != cfg.rocketpool.support.server_id: - log.warning(f"Ignoring reaction in {reaction.message.guild.id}") + if ( + reaction.message.guild is None + or reaction.message.guild.id != cfg.rocketpool.support.server_id + ): + log.warning(f"Ignoring reaction in {reaction.message.guild}") + return checks = [self._reaction_spam(reaction, user)] @@ -726,6 +738,7 @@ async def _on_message_delete(self, message_id: int) -> None: return channel = await self.bot.get_or_fetch_channel(report["channel_id"]) + assert isinstance(channel, Messageable) with contextlib.suppress( errors.NotFound, errors.Forbidden, errors.HTTPException ): @@ -760,21 +773,21 @@ async def on_member_ban(self, guild: Guild, user: User) -> None: ): reports = await self.bot.db.scam_reports.find( {"guild_id": guild.id, "user_id": user.id, "user_banned": False} - ).to_list(None) + ).to_list() for report in reports: await self._update_report(report, "User has been banned.") await self.bot.db.scam_reports.update_one( report, {"$set": {"user_banned": True}} ) - async def _update_report(self, report: dict, note: str) -> None: - report_channel = await self.bot.get_or_fetch_channel( - cfg.discord.channels["report_scams"] - ) + async def _update_report(self, report: dict | None, note: str) -> None: + if report is None: + return + report_channel = await self._get_report_channel() try: message = await report_channel.fetch_message(report["report_id"]) embed = message.embeds[0] - embed.description += f"\n\n**{note}**" + embed.description = (embed.description or "") + f"\n\n**{note}**" embed.color = ( self.Color.WARN if (embed.color == self.Color.ALERT) else self.Color.OK ) @@ -796,9 +809,7 @@ async def report_thread(self, thread: Thread, reason: str) -> None: log.warning(f"Failed to send warning message in thread {thread.id}") warning_msg = None - report_channel = await self.bot.get_or_fetch_channel( - cfg.discord.channels["report_scams"] - ) + report_channel = await self._get_report_channel() report_msg = await report_channel.send(embed=report) await self.bot.db.scam_reports.insert_one( { @@ -855,9 +866,7 @@ async def manual_user_report(self, interaction: Interaction, user: Member) -> No content="Failed to report user. They may have already been reported or banned." ) - report_channel = await self.bot.get_or_fetch_channel( - cfg.discord.channels["report_scams"] - ) + report_channel = await self._get_report_channel() report_msg = await report_channel.send(embed=report) await self.bot.db.scam_reports.insert_one( { From 33ed844b065b54ebea5231c722843ffd7508735a Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 21 Mar 2026 15:12:58 +0000 Subject: [PATCH 258/279] fix typing in rpips.py --- rocketwatch/plugins/rpips/rpips.py | 56 +++++++++++++++++++----------- 1 file changed, 36 insertions(+), 20 deletions(-) diff --git a/rocketwatch/plugins/rpips/rpips.py b/rocketwatch/plugins/rpips/rpips.py index 290277df..53d4c2b0 100644 --- a/rocketwatch/plugins/rpips/rpips.py +++ b/rocketwatch/plugins/rpips/rpips.py @@ -66,7 +66,7 @@ def __str__(self) -> str: @cached(ttl=300, key_builder=lambda _, rpip: rpip.number) @retry(tries=3, delay=1) - async def fetch_details(self) -> dict: + async def fetch_details(self) -> dict[str, str | list[str] | None]: async with ( aiohttp.ClientSession() as session, session.get(self.url) as resp, @@ -74,25 +74,34 @@ async def fetch_details(self) -> dict: html = await resp.text() soup = BeautifulSoup(html, "html.parser") - metadata = {} - - for field in soup.main.find("table", {"class": "rpip-preamble"}).find_all( - "tr" - ): - match field_name := field.th.text: - case "Discussion": - metadata[field_name] = field.td.a["href"] - case "Author": - metadata[field_name] = [a.text for a in field.td.find_all("a")] - case _: - metadata[field_name] = field.td.text - + if not soup.main: + return {} + + preamble = soup.main.find("table", {"class": "rpip-preamble"}) + if not preamble: + return {} + + metadata: dict[str, str | list[str]] = {} + for field in preamble.find_all("tr"): + if field.th and field.td: + match field_name := field.th.text: + case "Discussion": + if field.td.a: + metadata[field_name] = field.td.a["href"] + case "Author": + metadata[field_name] = [ + a.text for a in field.td.find_all("a") + ] + case _: + metadata[field_name] = field.td.text + + description_tag = soup.find("big", {"class": "rpip-description"}) return { "type": metadata.get("Type"), "authors": metadata.get("Author"), "created": metadata.get("Created"), "discussion": metadata.get("Discussion"), - "description": soup.find("big", {"class": "rpip-description"}).text, + "description": description_tag.text if description_tag else None, } @property @@ -124,13 +133,20 @@ async def get_all_rpips() -> list["RPIPs.RPIP"]: html = await resp.text() soup = BeautifulSoup(html, "html.parser") - rpips: list[RPIPs.RPIP] = [] + if not soup.table: + return [] + rpips: list[RPIPs.RPIP] = [] for row in soup.table.find_all("tr", recursive=False): - title = row.find("td", {"class": "title"}).text.strip() - rpip_num = int(row.find("td", {"class": "rpipnum"}).text) - status = row.find("td", {"class": "status"}).text.strip() - rpips.append(RPIPs.RPIP(title, rpip_num, status)) + title_td = row.find("td", {"class": "title"}) + num_td = row.find("td", {"class": "rpipnum"}) + status_td = row.find("td", {"class": "status"}) + if title_td and num_td and status_td: + rpips.append( + RPIPs.RPIP( + title_td.text.strip(), int(num_td.text), status_td.text.strip() + ) + ) return rpips From 128cb81b74a07a1d1e83d1703ee12d0b43d65efd Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 21 Mar 2026 15:18:35 +0000 Subject: [PATCH 259/279] fix typing in support_utils --- .../plugins/support_utils/support_utils.py | 56 ++++++++++--------- 1 file changed, 31 insertions(+), 25 deletions(-) diff --git a/rocketwatch/plugins/support_utils/support_utils.py b/rocketwatch/plugins/support_utils/support_utils.py index 2b09cb8e..8bb87ee3 100644 --- a/rocketwatch/plugins/support_utils/support_utils.py +++ b/rocketwatch/plugins/support_utils/support_utils.py @@ -1,15 +1,15 @@ -import io import logging from datetime import UTC, datetime from bson import CodecOptions -from discord import ButtonStyle, File, Interaction, TextStyle, User, app_commands, ui +from discord import ButtonStyle, Interaction, Member, TextStyle, User, app_commands, ui from discord.app_commands import Choice, Group, choices from discord.ext.commands import Cog, GroupCog from rocketwatch import RocketWatch from utils.config import cfg from utils.embeds import Embed +from utils.file import TextFile log = logging.getLogger("rocketwatch.support_utils") @@ -23,10 +23,11 @@ async def generate_template_embed(db, template_name: str): codec_options=CodecOptions(tz_aware=True) ) last_edit = await dumps_col.find_one({"template": template_name}, sort=[("ts", -1)]) - e = Embed(title=template["title"], description=template["description"]) + embed = Embed(title=template["title"]) + embed.description = template["description"] or "" if last_edit and template_name != "announcement": - e.description += f"\n\n*Last Edited by <@{last_edit['author']['id']}> *" - return e + embed.description += f"\n\n*Last Edited by <@{last_edit['author']['id']}> *" + return embed # Define a simple View that gives us a counter button @@ -48,13 +49,15 @@ async def edit(self, interaction: Interaction, button: ui.Button): class DeletableView(ui.View): - def __init__(self, user: User): + def __init__(self, user: User | Member): super().__init__(timeout=None) self.user = user @ui.button(emoji="<:delete:1364953621191721002>", style=ButtonStyle.gray) async def delete(self, interaction: Interaction, button: ui.Button): - if (interaction.user == self.user) or has_perms(interaction): + if ( + (interaction.user == self.user) or has_perms(interaction) + ) and interaction.message: await interaction.message.delete() log.warning( f"Support template message deleted by {interaction.user} in {interaction.channel}" @@ -68,10 +71,10 @@ def __init__(self, old_title, old_description, db, template_name): self.old_title = old_title self.old_description = old_description self.template_name = template_name - self.title_field = ui.TextInput( + self.title_field: ui.TextInput[AdminModal] = ui.TextInput( label="Title", placeholder="Enter a title", default=old_title ) - self.description_field = ui.TextInput( + self.description_field: ui.TextInput[AdminModal] = ui.TextInput( label="Description", placeholder="Enter a description", default=old_description, @@ -100,9 +103,8 @@ async def on_submit(self, interaction: Interaction) -> None: ) ) a = await interaction.original_response() - file = File( - io.StringIO(self.description_field.value), - f"{self.title_field.value}.txt", + file = TextFile( + self.description_field.value, f"{self.title_field.value}.txt" ) await a.add_files(file) return @@ -145,18 +147,22 @@ async def on_submit(self, interaction: Interaction) -> None: ) -def has_perms(interaction: Interaction): - return any( - [ - interaction.user.id in cfg.rocketpool.support.user_ids, - any( - r.id in cfg.rocketpool.support.role_ids for r in interaction.user.roles - ), - cfg.discord.owner.user_id == interaction.user.id, - interaction.user.guild_permissions.moderate_members - and interaction.guild.id == cfg.rocketpool.support.server_id, - ] - ) +def has_perms(interaction: Interaction) -> bool: + user = interaction.user + if user.id in cfg.rocketpool.support.user_ids: + return True + if cfg.discord.owner.user_id == user.id: + return True + if isinstance(user, Member): + if any(r.id in cfg.rocketpool.support.role_ids for r in user.roles): + return True + if ( + user.guild_permissions.moderate_members + and interaction.guild + and interaction.guild.id == cfg.rocketpool.support.server_id + ): + return True + return False async def _use(db, interaction: Interaction, name: str, mention: User | None): @@ -323,7 +329,7 @@ async def remove(self, interaction: Interaction, name: str): Choice(name="Last Edited Date", value="last_edited_date"), ] ) - async def list(self, interaction: Interaction, order_by: Choice[str] = "_id"): + async def list(self, interaction: Interaction, order_by: Choice[str] | str = "_id"): await interaction.response.defer(ephemeral=True) # get all templates and their last edited date using the support_bot_dumps collection templates = await ( From 4976ad48efbc352ac0f97f46c80c2d6e299f430a Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 21 Mar 2026 15:34:42 +0000 Subject: [PATCH 260/279] fix support utils choices --- rocketwatch/plugins/support_utils/support_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rocketwatch/plugins/support_utils/support_utils.py b/rocketwatch/plugins/support_utils/support_utils.py index 8bb87ee3..3adaa57d 100644 --- a/rocketwatch/plugins/support_utils/support_utils.py +++ b/rocketwatch/plugins/support_utils/support_utils.py @@ -329,7 +329,7 @@ async def remove(self, interaction: Interaction, name: str): Choice(name="Last Edited Date", value="last_edited_date"), ] ) - async def list(self, interaction: Interaction, order_by: Choice[str] | str = "_id"): + async def list(self, interaction: Interaction, order_by: Choice[str] = "_id"): # type: ignore[assignment] await interaction.response.defer(ephemeral=True) # get all templates and their last edited date using the support_bot_dumps collection templates = await ( From 8b54935a0d1e9d5de398a712f3ef8a1c414e3372 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 21 Mar 2026 15:52:11 +0000 Subject: [PATCH 261/279] fix typing in event_core --- rocketwatch/plugins/event_core/event_core.py | 42 +++++++++++++------- 1 file changed, 27 insertions(+), 15 deletions(-) diff --git a/rocketwatch/plugins/event_core/event_core.py b/rocketwatch/plugins/event_core/event_core.py index e1233057..e8c8674e 100644 --- a/rocketwatch/plugins/event_core/event_core.py +++ b/rocketwatch/plugins/event_core/event_core.py @@ -9,8 +9,9 @@ import discord import pymongo from cronitor import Monitor +from discord.abc import Messageable from discord.ext import commands, tasks -from eth_typing import BlockIdentifier, BlockNumber +from eth_typing import BlockNumber from web3.datastructures import MutableAttributeDict from plugins.support_utils.support_utils import generate_template_embed @@ -36,9 +37,10 @@ def __init__(self, bot: RocketWatch): self.bot = bot self.state = self.State.OK self.channels = cfg.discord.channels - self.head_block: BlockIdentifier = cfg.events.genesis - self.block_batch_size = cfg.events.block_batch_size - self.monitor = Monitor("gather-new-events", api_key=cfg.other.secrets.cronitor) + self.head_block: BlockNumber = BlockNumber(cfg.events.genesis) + self.at_head: bool = False + self.block_batch_size: int = cfg.events.block_batch_size + self.monitor = Monitor("event-core", api_key=cfg.other.secrets.cronitor) self.task.start() async def cog_unload(self) -> None: @@ -89,12 +91,13 @@ async def gather_new_events(self) -> None: ] log.debug(f"Running {len(submodules)} submodules") - if self.head_block == "latest": + if self.at_head: # already caught up to head, just fetch new events - target_block = "latest" + close_to_head = True to_block = latest_block coroutines = [sm.get_new_events() for sm in submodules] # prevent losing state if process is interrupted before updating db + self.at_head = False self.head_block = cfg.events.genesis else: # behind chain head, let's see how far @@ -117,14 +120,14 @@ async def gather_new_events(self) -> None: if (latest_block - self.head_block) < self.block_batch_size: # close enough to catch up in a single request - target_block = "latest" + close_to_head = True to_block = latest_block else: # too far, advance one batch - target_block = self.head_block + self.block_batch_size - to_block = target_block + close_to_head = False + to_block = BlockNumber(self.head_block + self.block_batch_size) - from_block: BlockNumber = self.head_block + 1 + from_block = BlockNumber(self.head_block + 1) if to_block < from_block: log.warning(f"Skipping empty block range [{from_block}, {to_block}]") return @@ -136,10 +139,10 @@ async def gather_new_events(self) -> None: coroutines.append( sm.get_past_events(from_block=from_block, to_block=to_block) ) - if target_block == "latest": - sm.start_tracking(to_block + 1) + if close_to_head: + sm.start_tracking(BlockNumber(to_block + 1)) - log.debug(f"{target_block = }") + log.debug(f"{close_to_head = }, {to_block = }") results = await asyncio.gather(*coroutines) @@ -183,7 +186,8 @@ async def gather_new_events(self) -> None: if events: await self.bot.db.event_queue.insert_many(events) - self.head_block = target_block + self.head_block = to_block + self.at_head = close_to_head await self.bot.db.last_checked_block.replace_one( {"_id": "events"}, {"_id": "events", "block": to_block}, upsert=True ) @@ -217,6 +221,7 @@ async def try_load(_entry: dict, _key: str) -> Any | None: log.debug(f"Found {len(db_events)} events for channel {channel_id}.") channel = await self.bot.get_or_fetch_channel(channel_id) + assert isinstance(channel, Messageable) for state_message in await self.bot.db.state_messages.find( {"channel_id": channel_id} @@ -227,6 +232,9 @@ async def try_load(_entry: dict, _key: str) -> Any | None: for event_entry in db_events: embed: Embed | None = await try_load(event_entry, "embed") + if not embed: + continue + files = [] if embed and (image := await try_load(event_entry, "image")): @@ -274,7 +282,8 @@ async def _update_status_message(self, channel_name: str, config) -> None: if not (embed := await generate_template_embed(self.bot.db, "announcement")): try: - plugin: StatusPlugin = self.bot.cogs.get(config.plugin) + plugin = self.bot.cogs.get(config.plugin) + assert isinstance(plugin, StatusPlugin) embed = await plugin.get_status() except Exception as err: await self.bot.report_error(err) @@ -310,6 +319,7 @@ async def _replace_or_add_status( if embed and prev_status and (prev_status["channel_id"] == target_channel_id): log.debug(f"Replacing existing status message for channel {target_channel}") channel = await self.bot.get_or_fetch_channel(target_channel_id) + assert isinstance(channel, Messageable) try: msg = await channel.fetch_message(prev_status["message_id"]) await msg.edit(embed=embed) @@ -326,6 +336,7 @@ async def _replace_or_add_status( if prev_status: log.debug(f"Deleting status message for channel {target_channel}") channel = await self.bot.get_or_fetch_channel(prev_status["channel_id"]) + assert isinstance(channel, Messageable) msg = await channel.fetch_message(prev_status["message_id"]) await msg.delete() await self.bot.db.state_messages.delete_one(prev_status) @@ -333,6 +344,7 @@ async def _replace_or_add_status( if embed: log.debug(f"Creating new status message for channel {target_channel}") channel = await self.bot.get_or_fetch_channel(target_channel_id) + assert isinstance(channel, Messageable) msg = await channel.send(embed=embed, silent=True) await self.bot.db.state_messages.insert_one( { From 389a375feaf433f1e40c5eb378c24c0a58bf5780 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 21 Mar 2026 15:58:26 +0000 Subject: [PATCH 262/279] fix support_utils again --- rocketwatch/plugins/support_utils/support_utils.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/rocketwatch/plugins/support_utils/support_utils.py b/rocketwatch/plugins/support_utils/support_utils.py index 3adaa57d..7843f08c 100644 --- a/rocketwatch/plugins/support_utils/support_utils.py +++ b/rocketwatch/plugins/support_utils/support_utils.py @@ -1,5 +1,6 @@ import logging from datetime import UTC, datetime +from operator import itemgetter from bson import CodecOptions from discord import ButtonStyle, Interaction, Member, TextStyle, User, app_commands, ui @@ -329,7 +330,7 @@ async def remove(self, interaction: Interaction, name: str): Choice(name="Last Edited Date", value="last_edited_date"), ] ) - async def list(self, interaction: Interaction, order_by: Choice[str] = "_id"): # type: ignore[assignment] + async def list(self, interaction: Interaction, order_by: str = "_id"): await interaction.response.defer(ephemeral=True) # get all templates and their last edited date using the support_bot_dumps collection templates = await ( @@ -353,9 +354,7 @@ async def list(self, interaction: Interaction, order_by: Choice[str] = "_id"): ) ).to_list() # sort the templates by the specified order - if isinstance(order_by, Choice): - order_by = order_by.value - templates.sort(key=lambda x: x[order_by]) + templates.sort(key=itemgetter(order_by)) # create the embed embed = Embed(title="Templates") embed.description = ( From 1d492817c50e53bbdad5ddf6db12775d5be485aa Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 21 Mar 2026 16:03:04 +0000 Subject: [PATCH 263/279] ignore transaction and events module for now --- .github/workflows/lint.yml | 1 - pyproject.toml | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 58f757ea..b22fd578 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -21,7 +21,6 @@ jobs: src: "rocketwatch" typecheck: runs-on: ubuntu-latest - continue-on-error: true steps: - uses: actions/checkout@v6 - uses: astral-sh/setup-uv@v7 diff --git a/pyproject.toml b/pyproject.toml index 6b844806..6bfe200a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -72,7 +72,7 @@ module = [ "rocketwatch.plugins.events.*", "rocketwatch.plugins.transactions.*", ] -disable_error_code = ["attr-defined", "union-attr", "index"] +ignore_errors = true [tool.ruff] target-version = "py312" From ab6e8ed87d532ee6d0fb6e1ab142c14e66ce7fc8 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 21 Mar 2026 16:03:21 +0000 Subject: [PATCH 264/279] remove comment in pyproject.toml --- pyproject.toml | 1 - 1 file changed, 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 6bfe200a..c6c4fec2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -66,7 +66,6 @@ warn_unused_ignores = true disallow_untyped_defs = false disallow_incomplete_defs = false -# Heavy web3 MutableAttributeDict usage — almost all errors are false positives [[tool.mypy.overrides]] module = [ "rocketwatch.plugins.events.*", From 1262cbed56ed88729d999f7df156ba1bea869d66 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 21 Mar 2026 17:32:55 +0000 Subject: [PATCH 265/279] update README --- README.md | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index e8c579f0..60e78c9c 100644 --- a/README.md +++ b/README.md @@ -99,6 +99,12 @@ uv run ruff check rocketwatch/ Configured rules: `B` (bugbear), `E` (pycodestyle), `F` (pyflakes), `I` (isort), `RUF`, `SIM`, `UP` (pyupgrade), `W` (warnings). +### Type checking + +```sh +uv run mypy rocketwatch/ +``` + ### Testing ```sh @@ -162,9 +168,9 @@ Plugins can be selectively loaded via the `modules.include` / `modules.exclude` | Workflow | Trigger | Purpose | |---|---|---| -| [Lint](.github/workflows/lint.yml) | Push & PR to main | Ruff linting | +| [Lint](.github/workflows/lint.yml) | Push & PR to main | Ruff linting & mypy type checking | | [Test](.github/workflows/test.yml) | Push & PR to main | pytest suite | -| [Docker CI](.github/workflows/docker-ci.yml) | Push to main | Build & push image to DockerHub | +| [Build](.github/workflows/build.yml) | Push to main | Build & push image to DockerHub | ## License From 6f94bc68aa2ddff1e49fa43c0706d72e51b795a4 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 21 Mar 2026 17:37:31 +0000 Subject: [PATCH 266/279] add Python version badge --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 60e78c9c..e6162ebc 100644 --- a/README.md +++ b/README.md @@ -2,6 +2,7 @@ [![Test](https://github.com/haloooloolo/rocketwatch/actions/workflows/test.yml/badge.svg)](https://github.com/haloooloolo/rocketwatch/actions/workflows/test.yml) [![codecov](https://codecov.io/gh/haloooloolo/rocketwatch/graph/badge.svg)](https://codecov.io/gh/haloooloolo/rocketwatch) +![Python 3.12+](https://img.shields.io/badge/python-3.12%2B-blue) A Discord bot that monitors and reports on [Rocket Pool](https://rocketpool.net) protocol activity across the Ethereum execution and consensus layers. From eb2e385741bf7df5f1e64ad6f53095c27d9a6275 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 21 Mar 2026 20:36:36 +0000 Subject: [PATCH 267/279] fix scam detection DB insertion --- .../plugins/scam_detection/scam_detection.py | 83 ++++++++++--------- 1 file changed, 46 insertions(+), 37 deletions(-) diff --git a/rocketwatch/plugins/scam_detection/scam_detection.py b/rocketwatch/plugins/scam_detection/scam_detection.py index ae522214..91670585 100644 --- a/rocketwatch/plugins/scam_detection/scam_detection.py +++ b/rocketwatch/plugins/scam_detection/scam_detection.py @@ -132,9 +132,10 @@ async def mark_safe(self, interaction: Interaction, button: ui.Button) -> None: async with required_lock: report = await self.plugin.bot.db.scam_reports.find_one(db_filter) - await self.plugin._update_report( - report, f"This has been marked as safe by {user_repr}." - ) + if report is not None: + await self.plugin._update_report( + report, f"This has been marked as safe by {user_repr}." + ) await self.plugin.bot.db.scam_reports.update_one( db_filter, {"$set": {"warning_id": None}} ) @@ -326,7 +327,7 @@ async def _add_message_report_to_db( await self.bot.db.scam_reports.insert_one( { "type": "message", - "guild_id": message.guild if message.guild else None, + "guild_id": message.guild.id if message.guild else None, "channel_id": message.channel.id, "message_id": message.id, "user_id": message.author.id, @@ -693,10 +694,14 @@ async def on_message(self, message: Message) -> None: self._bio_redirect, self._spam_wall, ] - for check in checks: - if reason := check(message): - await self.report_message(message, reason) - return + + try: + for check in checks: + if reason := check(message): + await self.report_message(message, reason) + return + except Exception as e: + await self.bot.report_error(e) @Cog.listener() async def on_message_edit(self, before: Message, after: Message) -> None: @@ -780,9 +785,7 @@ async def on_member_ban(self, guild: Guild, user: User) -> None: report, {"$set": {"user_banned": True}} ) - async def _update_report(self, report: dict | None, note: str) -> None: - if report is None: - return + async def _update_report(self, report: dict, note: str) -> None: report_channel = await self._get_report_channel() try: message = await report_channel.fetch_message(report["report_id"]) @@ -811,20 +814,23 @@ async def report_thread(self, thread: Thread, reason: str) -> None: report_channel = await self._get_report_channel() report_msg = await report_channel.send(embed=report) - await self.bot.db.scam_reports.insert_one( - { - "type": "thread", - "guild_id": thread.guild.id, - "channel_id": thread.id, - "user_id": thread.owner_id, - "reason": reason, - "content": thread.name, - "warning_id": warning_msg.id if warning_msg else None, - "report_id": report_msg.id, - "user_banned": False, - "removed": False, - } - ) + try: + await self.bot.db.scam_reports.insert_one( + { + "type": "thread", + "guild_id": thread.guild.id, + "channel_id": thread.id, + "user_id": thread.owner_id, + "reason": reason, + "content": thread.name, + "warning_id": warning_msg.id if warning_msg else None, + "report_id": report_msg.id, + "user_banned": False, + "removed": False, + } + ) + except Exception as e: + await self.bot.report_error(e) @Cog.listener() async def on_thread_create(self, thread: Thread) -> None: @@ -868,18 +874,21 @@ async def manual_user_report(self, interaction: Interaction, user: Member) -> No report_channel = await self._get_report_channel() report_msg = await report_channel.send(embed=report) - await self.bot.db.scam_reports.insert_one( - { - "type": "user", - "guild_id": user.guild.id, - "user_id": user.id, - "reason": reason, - "content": user.display_name, - "warning_id": None, - "report_id": report_msg.id, - "user_banned": False, - } - ) + try: + await self.bot.db.scam_reports.insert_one( + { + "type": "user", + "guild_id": user.guild.id, + "user_id": user.id, + "reason": reason, + "content": user.display_name, + "warning_id": None, + "report_id": report_msg.id, + "user_banned": False, + } + ) + except Exception as e: + await self.bot.report_error(e) await interaction.followup.send(content="Thanks for reporting!") async def _generate_user_report(self, user: Member, reason: str) -> Embed | None: From 23dc92095ef9fd7984250e39b9ebb5751d08ce07 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 21 Mar 2026 21:01:20 +0000 Subject: [PATCH 268/279] add general exception handler --- .../plugins/scam_detection/scam_detection.py | 69 ++++++++----------- rocketwatch/rocketwatch.py | 7 ++ 2 files changed, 37 insertions(+), 39 deletions(-) diff --git a/rocketwatch/plugins/scam_detection/scam_detection.py b/rocketwatch/plugins/scam_detection/scam_detection.py index 91670585..205bfbc5 100644 --- a/rocketwatch/plugins/scam_detection/scam_detection.py +++ b/rocketwatch/plugins/scam_detection/scam_detection.py @@ -695,13 +695,10 @@ async def on_message(self, message: Message) -> None: self._spam_wall, ] - try: - for check in checks: - if reason := check(message): - await self.report_message(message, reason) - return - except Exception as e: - await self.bot.report_error(e) + for check in checks: + if reason := check(message): + await self.report_message(message, reason) + return @Cog.listener() async def on_message_edit(self, before: Message, after: Message) -> None: @@ -814,23 +811,20 @@ async def report_thread(self, thread: Thread, reason: str) -> None: report_channel = await self._get_report_channel() report_msg = await report_channel.send(embed=report) - try: - await self.bot.db.scam_reports.insert_one( - { - "type": "thread", - "guild_id": thread.guild.id, - "channel_id": thread.id, - "user_id": thread.owner_id, - "reason": reason, - "content": thread.name, - "warning_id": warning_msg.id if warning_msg else None, - "report_id": report_msg.id, - "user_banned": False, - "removed": False, - } - ) - except Exception as e: - await self.bot.report_error(e) + await self.bot.db.scam_reports.insert_one( + { + "type": "thread", + "guild_id": thread.guild.id, + "channel_id": thread.id, + "user_id": thread.owner_id, + "reason": reason, + "content": thread.name, + "warning_id": warning_msg.id if warning_msg else None, + "report_id": report_msg.id, + "user_banned": False, + "removed": False, + } + ) @Cog.listener() async def on_thread_create(self, thread: Thread) -> None: @@ -874,21 +868,18 @@ async def manual_user_report(self, interaction: Interaction, user: Member) -> No report_channel = await self._get_report_channel() report_msg = await report_channel.send(embed=report) - try: - await self.bot.db.scam_reports.insert_one( - { - "type": "user", - "guild_id": user.guild.id, - "user_id": user.id, - "reason": reason, - "content": user.display_name, - "warning_id": None, - "report_id": report_msg.id, - "user_banned": False, - } - ) - except Exception as e: - await self.bot.report_error(e) + await self.bot.db.scam_reports.insert_one( + { + "type": "user", + "guild_id": user.guild.id, + "user_id": user.id, + "reason": reason, + "content": user.display_name, + "warning_id": None, + "report_id": report_msg.id, + "user_banned": False, + } + ) await interaction.followup.send(content="Thanks for reporting!") async def _generate_user_report(self, user: Member, reason: str) -> Embed | None: diff --git a/rocketwatch/rocketwatch.py b/rocketwatch/rocketwatch.py index 0ba9c72e..8f46dd38 100644 --- a/rocketwatch/rocketwatch.py +++ b/rocketwatch/rocketwatch.py @@ -1,4 +1,5 @@ import logging +import sys import traceback from pathlib import Path @@ -77,6 +78,12 @@ def clear_commands(self) -> None: for guild in self.guilds: self.tree.clear_commands(guild=guild) + async def on_error(self, event_method: str, /, *args, **kwargs) -> None: + exc = sys.exc_info()[1] + if isinstance(exc, Exception): + log.error(f"Error in listener {event_method}") + await self.report_error(exc) + async def on_ready(self): assert self.user is not None log.info(f"Logged in as {self.user.name} ({self.user.id})") From ea3dc0a5207d49c00ad62e7fd7ed79654eb94141 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 21 Mar 2026 22:38:23 +0000 Subject: [PATCH 269/279] I blame sno --- rocketwatch/plugins/random/random.py | 88 +++++++++++++++++++++++++++- 1 file changed, 87 insertions(+), 1 deletion(-) diff --git a/rocketwatch/plugins/random/random.py b/rocketwatch/plugins/random/random.py index b14ce075..81ca0851 100644 --- a/rocketwatch/plugins/random/random.py +++ b/rocketwatch/plugins/random/random.py @@ -1,4 +1,5 @@ import logging +import random from datetime import datetime import aiohttp @@ -346,7 +347,7 @@ async def odao_challenges(self, interaction: Interaction): @command() async def asian_restaurant_name(self, interaction: Interaction): """ - Randomly generated Asian restaurant names + Randomly generated Asian restaurant name """ await interaction.response.defer(ephemeral=is_hidden(interaction)) async with ( @@ -358,6 +359,91 @@ async def asian_restaurant_name(self, interaction: Interaction): a = (await resp.json())["name"] await interaction.followup.send(a) + @command() + async def mexican_restaurant_name(self, interaction: Interaction): + """ + Randomly generated Mexican restaurant name + """ + prefix = random.choice( + [ + "El", + "La", + "Los", + "Las", + "Casa", + "Don", + "Doña", + "Taco", + "Señor", + "Mi", + "Tres", + "Dos", + "El Gran", + "La Casa de", + "Rancho", + "Hacienda", + "Cocina", + "Pueblo", + "Villa", + "Cantina", + ] + ) + middle = random.choice( + [ + "Fuego", + "Sol", + "Luna", + "Loco", + "Grande", + "Diablo", + "Oro", + "Rojo", + "Verde", + "Azteca", + "Maya", + "Jalisco", + "Oaxaca", + "Baja", + "Bravo", + "Charro", + "Gordo", + "Amigo", + "Hermano", + "Fiesta", + "Coyote", + "Tigre", + "Águila", + "Toro", + "Mariposa", + "Cielo", + "Sombrero", + "Guapo", + "Rico", + "Caliente", + "Bonito", + "Fresco", + ] + ) + suffix = random.choice( + [ + "Cantina", + "Grill", + "Kitchen", + "Cocina", + "Taqueria", + "Restaurante", + "Mexican Grill", + "Tex-Mex", + "Cocina & Bar", + "Street Tacos", + "Cantina & Grill", + "Mexican Kitchen", + "Burrito Bar", + "", + ] + ) + await interaction.response.send_message(f"{prefix} {middle} {suffix}") + @command() async def get_block_by_timestamp(self, interaction: Interaction, timestamp: int): """ From 6fec3d319c80249134f8381a6f56bd1589ec0106 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 21 Mar 2026 22:50:51 +0000 Subject: [PATCH 270/279] I blame myself for this one --- rocketwatch/plugins/random/random.py | 88 ++++++++++++++++++++++++++-- 1 file changed, 83 insertions(+), 5 deletions(-) diff --git a/rocketwatch/plugins/random/random.py b/rocketwatch/plugins/random/random.py index 81ca0851..bd966135 100644 --- a/rocketwatch/plugins/random/random.py +++ b/rocketwatch/plugins/random/random.py @@ -144,7 +144,9 @@ async def dev_time(self, interaction: Interaction): await interaction.response.send_message(embed=e) @command() - async def sea_creatures(self, interaction: Interaction, address: str | None = None): + async def sea_creatures( + self, interaction: Interaction, address: str | None = None + ) -> None: """List all sea creatures with their required minimum holding.""" await interaction.response.defer(ephemeral=is_hidden(interaction)) e = Embed() @@ -194,7 +196,7 @@ async def sea_creatures(self, interaction: Interaction, address: str | None = No await interaction.followup.send(embed=e) @command() - async def smoothie(self, interaction: Interaction): + async def smoothie(self, interaction: Interaction) -> None: """Show smoothing pool information""" await interaction.response.defer(ephemeral=is_hidden(interaction)) @@ -303,7 +305,7 @@ async def smoothie(self, interaction: Interaction): await interaction.followup.send(embed=e) @command() - async def odao_challenges(self, interaction: Interaction): + async def odao_challenges(self, interaction: Interaction) -> None: """Shows the current oDAO challenges""" await interaction.response.defer(ephemeral=is_hidden(interaction)) c = await rp.get_contract_by_name("rocketDAONodeTrustedActions") @@ -345,7 +347,7 @@ async def odao_challenges(self, interaction: Interaction): await interaction.followup.send(embed=e) @command() - async def asian_restaurant_name(self, interaction: Interaction): + async def asian_restaurant_name(self, interaction: Interaction) -> None: """ Randomly generated Asian restaurant name """ @@ -360,7 +362,7 @@ async def asian_restaurant_name(self, interaction: Interaction): await interaction.followup.send(a) @command() - async def mexican_restaurant_name(self, interaction: Interaction): + async def mexican_restaurant_name(self, interaction: Interaction) -> None: """ Randomly generated Mexican restaurant name """ @@ -444,6 +446,82 @@ async def mexican_restaurant_name(self, interaction: Interaction): ) await interaction.response.send_message(f"{prefix} {middle} {suffix}") + @command() + async def austrian_restaurant_name(self, interaction: Interaction) -> None: + """ + Randomly generated Austrian restaurant name + """ + venues = [ + "Gasthaus", + "Gasthof", + "Wirtshaus", + "Beisl", + "Stüberl", + "Heuriger", + "Landgasthof", + "Alpengasthof", + "Berggasthof", + "Café-Restaurant", + "Braugasthof", + "Jausenstation", + ] + # (noun, gender): m = masculine, f = feminine, n = neuter + nouns = [ + ("Adler", "m"), + ("Hirsch", "m"), + ("Bär", "m"), + ("Ochse", "m"), + ("Löwe", "m"), + ("Hahn", "m"), + ("Schwan", "m"), + ("Fuchs", "m"), + ("Wolf", "m"), + ("Steinbock", "m"), + ("Falke", "m"), + ("Auerhahn", "m"), + ("Gamsbock", "m"), + ("Dachs", "m"), + ("Lamm", "n"), + ("Rößl", "n"), + ("Murmeltier", "n"), + ("Kreuz", "n"), + ("Krone", "f"), + ("Forelle", "f"), + ("Linde", "f"), + ("Rose", "f"), + ("Gams", "f"), + ] + adj_stems = [ + "golden", + "schwarz", + "weiß", + "grün", + "wild", + "alt", + "klein", + "groß", + "lustig", + "brav", + "fein", + "rot", + ] + nom_endings = {"m": "er", "f": "e", "n": "es"} + + noun, gender = random.choice(nouns) + stem = random.choice(adj_stems) + + # 30% chance for "Zum/Zur" style (dative), otherwise "Venue" style (nominative) + if random.random() < 0.3: + article = "Zur" if gender == "f" else "Zum" + adj = stem.capitalize() + "en" + name = f"{article} {adj} {noun}" + else: + venue = random.choice(venues) + adj = stem.capitalize() + nom_endings[gender] + name = f"{venue} {adj} {noun}" + + await interaction.response.send_message(name) + @command() async def get_block_by_timestamp(self, interaction: Interaction, timestamp: int): """ From bffc7087cfedb10068fdf15be8427212a5729fc1 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sat, 21 Mar 2026 23:26:29 +0000 Subject: [PATCH 271/279] add megapool validators to smoothie command --- rocketwatch/plugins/random/random.py | 47 +++++++++++++++++----------- 1 file changed, 28 insertions(+), 19 deletions(-) diff --git a/rocketwatch/plugins/random/random.py b/rocketwatch/plugins/random/random.py index bd966135..0c2f5fb2 100644 --- a/rocketwatch/plugins/random/random.py +++ b/rocketwatch/plugins/random/random.py @@ -206,20 +206,29 @@ async def smoothie(self, interaction: Interaction) -> None: await rp.get_address_by_name("rocketSmoothingPool") ) ) + inactive_statuses = [ + "exited_unslashed", + "exited_slashed", + "withdrawal_possible", + "withdrawal_done", + "pending_initialized", + ] data = await ( await self.bot.db.minipools.aggregate( [ + {"$match": {"beacon.status": {"$nin": inactive_statuses}}}, + {"$project": {"node_operator": 1}}, { - "$match": { - "beacon.status": { - "$nin": [ - "exited_unslashed", - "exited_slashed", - "withdrawal_possible", - "withdrawal_done", - "pending_initialized", - ] - } + "$unionWith": { + "coll": "megapool_validators", + "pipeline": [ + { + "$match": { + "beacon.status": {"$nin": inactive_statuses} + } + }, + {"$project": {"node_operator": 1}}, + ], } }, {"$group": {"_id": "$node_operator", "count": {"$sum": 1}}}, @@ -274,31 +283,31 @@ async def smoothie(self, interaction: Interaction) -> None: ) ).to_list() if not data: - await interaction.followup.send("no minipools found", ephemeral=True) + await interaction.followup.send("No validators found.", ephemeral=True) return + data_by_id = {d["_id"]: d for d in data} # node counts total_node_count = ( data_by_id[True]["node_count"] + data_by_id[False]["node_count"] ) smoothie_node_count = data_by_id[True]["node_count"] - # minipool counts - total_minipool_count = data_by_id[True]["count"] + data_by_id[False]["count"] - smoothie_minipool_count = data_by_id[True]["count"] + # validator counts + total_validator_count = data_by_id[True]["count"] + data_by_id[False]["count"] + smoothie_validator_count = data_by_id[True]["count"] d = datetime.now().timestamp() - await rp.call( "rocketRewardsPool.getClaimIntervalTimeStart" ) e.description = ( f"`{smoothie_node_count}/{total_node_count}` nodes (`{smoothie_node_count / total_node_count:.2%}`)" f" have joined the smoothing pool.\n" - f" That is `{smoothie_minipool_count}/{total_minipool_count}` minipools " - f"(`{smoothie_minipool_count / total_minipool_count:.2%}`).\n" - f"The current (not overall) balance is **`{smoothie_eth:,.2f}` ETH.**\n" - f"This is over a span of `{pretty_time(d)}`.\n\n" + f" That is `{smoothie_validator_count}/{total_validator_count}`" + f" (`{smoothie_validator_count / total_validator_count:.2%}`) validators.\n" + f"The current balance is **`{smoothie_eth:,.2f}` ETH**, {pretty_time(d)} into the reward period.\n\n" f"{min(smoothie_node_count, 5)} largest nodes:\n" ) lines = [ - f"- `{d['count']:>4}` minipools - {await el_explorer_url(d['address'])}" + f"- `{d['count']:>4}` validators - {await el_explorer_url(d['address'])}" for d in data_by_id[True]["counts"][: min(smoothie_node_count, 5)] ] e.description += "\n".join(lines) From 42c72a7d1bc2d1d7a5cda5c6a245c3bbe3587b1d Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sun, 22 Mar 2026 07:43:42 +0000 Subject: [PATCH 272/279] tweak scam detection --- rocketwatch/plugins/scam_detection/scam_detection.py | 4 ++-- tests/message_samples.json | 9 +++++++++ 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/rocketwatch/plugins/scam_detection/scam_detection.py b/rocketwatch/plugins/scam_detection/scam_detection.py index 205bfbc5..1b9a4719 100644 --- a/rocketwatch/plugins/scam_detection/scam_detection.py +++ b/rocketwatch/plugins/scam_detection/scam_detection.py @@ -443,8 +443,8 @@ def _obfuscated_url(self, message: Message) -> str | None: # Fullwidth/homoglyph dots in domain if self.homoglyph_url_pattern.search(message.content): return default_reason - # Heavily percent-encoded domain - if re.search(r"https?://[^\s]*(?:%[0-9a-fA-F]{2}){5}", message.content): + # Heavily percent-encoded ASCII in URL (encoding ASCII is suspicious; non-ASCII like Cyrillic is normal) + if re.search(r"https?://[^\s]*(?:%[0-7][0-9a-fA-F]){5}", message.content): return default_reason # Markdown link where visible text looks like a different domain than the actual URL content = parse.unquote(message.content) diff --git a/tests/message_samples.json b/tests/message_samples.json index a59ec8f4..de481870 100644 --- a/tests/message_samples.json +++ b/tests/message_samples.json @@ -490,6 +490,15 @@ "description": "Buckle up and follow the race from the cockpit perspective.\n\n\ud83c\udfab TICKETS\n\u27a1\ufe0f https://vln.de/tickets\n\n\ud83d\udca5 ENTRY LIST\n\u27a1\ufe0f https://www.nuerburgring-langstrecken-serie.de/wp-content/uploads/ergebnisse/2026-03-21s.pdf\n\n\ud83d\udca5 ALL NLS LIVESTREAMS\nStream with \ud83c\udde9\ud83c\uddea commentary \u27a1\ufe0f https://youtube.com/live/0F6vg-_iGR8\nStream with \ud83c\uddec\ud83c\udde7 com..." } ] + }, + { + "content": "https://tenor.com/view/%D0%B4%D0%B6%D0%BE%D0%BD%D0%B0-%D1%85%D0%B8%D0%BB%D0%BB-jonah-hill-facepalm-%D1%80%D1%83%D0%BA%D0%B0-%D0%BB%D0%B8%D1%86%D0%BE-%D0%B4%D0%BE%D0%BB%D0%BE%D0%BD%D1%8F-%D0%BE%D0%B1%D0%BB%D0%B8%D1%87%D1%87%D1%8F-gif-7167197686338732060", + "embeds": [ + { + "title": null, + "description": null + } + ] } ], "unsafe": [ From 7285cfe939d58d11fa41c50caca8e67df9a8dcf6 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sun, 22 Mar 2026 23:43:11 +0000 Subject: [PATCH 273/279] refactor milestones --- .../plugins/milestones/milestones.json | 50 ----------- rocketwatch/plugins/milestones/milestones.py | 89 ++++++++++++------- rocketwatch/utils/rocketpool.py | 7 -- 3 files changed, 58 insertions(+), 88 deletions(-) delete mode 100644 rocketwatch/plugins/milestones/milestones.json diff --git a/rocketwatch/plugins/milestones/milestones.json b/rocketwatch/plugins/milestones/milestones.json deleted file mode 100644 index 9ac6cd4e..00000000 --- a/rocketwatch/plugins/milestones/milestones.json +++ /dev/null @@ -1,50 +0,0 @@ -[ - { - "id": "milestone_rpl_stake", - "function": "call", - "args": [ - "rocketNodeStaking.getTotalStakedRPL" - ], - "formatter": "to_float", - "min": 10000, - "step_size": 100000 - }, - { - "id": "milestone_reth_supply", - "function": "call", - "args": [ - "rocketTokenRETH.totalSupply" - ], - "formatter": "to_float", - "min": 1000, - "step_size": 5000 - }, - { - "id": "milestone_rpl_swapped", - "function": "get_percentage_rpl_swapped", - "args": [], - "formatter": "", - "min": 90, - "step_size": 1 - }, - { - "id": "milestone_registered_nodes", - "function": "call", - "args": [ - "rocketNodeManager.getNodeCount" - ], - "formatter": "", - "min": 50, - "step_size": 100 - }, - { - "id": "milestone_rocksolid_tvl", - "function": "call", - "args": [ - "RockSolidVault.totalAssets" - ], - "formatter": "to_float", - "min": 0, - "step_size": 5000 - } -] diff --git a/rocketwatch/plugins/milestones/milestones.py b/rocketwatch/plugins/milestones/milestones.py index b7afe755..c05f644b 100644 --- a/rocketwatch/plugins/milestones/milestones.py +++ b/rocketwatch/plugins/milestones/milestones.py @@ -1,7 +1,7 @@ -import json import logging +from collections.abc import Awaitable, Callable +from dataclasses import dataclass -from pydantic import BaseModel from web3.datastructures import MutableAttributeDict from rocketwatch import RocketWatch @@ -13,49 +13,76 @@ log = logging.getLogger("rocketwatch.milestones") -class MilestoneConfig(BaseModel): +@dataclass(frozen=True, slots=True) +class Milestone: id: str - function: str - args: list[str] - formatter: str min: int step_size: int + call: Callable[[], Awaitable[float | int]] + + +def contract_call( + path: str, formatter: Callable[[int], float] | None = None +) -> Callable[[], Awaitable[float | int]]: + async def call(): + value = await rp.call(path) + return formatter(value) if formatter else value + + return call + + +async def _get_percentage_rpl_swapped() -> float: + value = solidity.to_float(await rp.call("rocketTokenRPL.totalSwappedRPL")) + return round((value / 18_000_000) * 100, 2) + + +MILESTONES: list[Milestone] = [ + Milestone( + id="milestone_rpl_stake", + min=10_000, + step_size=100_000, + call=contract_call("rocketNodeStaking.getTotalStakedRPL", solidity.to_float), + ), + Milestone( + id="milestone_reth_supply", + min=1_000, + step_size=5_000, + call=contract_call("rocketTokenRETH.totalSupply", solidity.to_float), + ), + Milestone( + id="milestone_rpl_swapped", + min=90, + step_size=1, + call=_get_percentage_rpl_swapped, + ), + Milestone( + id="milestone_registered_nodes", + min=50, + step_size=100, + call=contract_call("rocketNodeManager.getNodeCount"), + ), + Milestone( + id="milestone_rocksolid_tvl", + min=0, + step_size=5000, + call=contract_call("RockSolidVault.totalAssets", solidity.to_float), + ), +] class Milestones(EventPlugin): def __init__(self, bot: RocketWatch): super().__init__(bot) - self._reset() - - def _reset(self) -> None: self.collection = self.bot.db.milestones - self.state = "OK" - - with open("./plugins/milestones/milestones.json") as f: - self.milestones = [MilestoneConfig(**m) for m in json.load(f)] async def _get_new_events(self) -> list[Event]: - if self.state == "RUNNING": - log.error( - "Milestones plugin was interrupted while running. Re-initializing..." - ) - self._reset() - - self.state = "RUNNING" - result = await self.check_for_new_events() - self.state = "OK" - return result - - async def check_for_new_events(self): - log.info("Checking Milestones") + log.info("Checking milestones") payload = [] - for milestone in self.milestones: + for milestone in MILESTONES: state = await self.collection.find_one({"_id": milestone.id}) - value = await getattr(rp, milestone.function)(*milestone.args) - if milestone.formatter: - value = getattr(solidity, milestone.formatter)(value) + value = await milestone.call() log.debug(f"{milestone.id}:{value}") if value < milestone.min: continue @@ -96,7 +123,7 @@ async def check_for_new_events(self): {"_id": milestone.id}, {"$set": {"current_goal": latest_goal}} ) - log.debug("Finished Checking Milestones") + log.debug("Finished checking milestones") return payload diff --git a/rocketwatch/utils/rocketpool.py b/rocketwatch/utils/rocketpool.py index e33d750e..29dbaa34 100644 --- a/rocketwatch/utils/rocketpool.py +++ b/rocketwatch/utils/rocketpool.py @@ -337,13 +337,6 @@ async def get_annual_rpl_inflation(self) -> float: intervals_per_year = solidity.years / seconds_per_interval return (inflation_per_interval**intervals_per_year) - 1 - async def get_percentage_rpl_swapped(self) -> float: - value: float = solidity.to_float( - await self.call("rocketTokenRPL.totalSwappedRPL") - ) - percentage = (value / 18_000_000) * 100 - return round(percentage, 2) - async def is_node(self, address: ChecksumAddress) -> bool: return await self.call("rocketNodeManager.getNodeExists", address) From f6248281ab121fc7268c8e115211b192e460b326 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Sun, 22 Mar 2026 23:46:37 +0000 Subject: [PATCH 274/279] smoothie: flip validator status check --- rocketwatch/plugins/random/random.py | 20 +++++--------------- 1 file changed, 5 insertions(+), 15 deletions(-) diff --git a/rocketwatch/plugins/random/random.py b/rocketwatch/plugins/random/random.py index 0c2f5fb2..8aa0cc3b 100644 --- a/rocketwatch/plugins/random/random.py +++ b/rocketwatch/plugins/random/random.py @@ -206,27 +206,17 @@ async def smoothie(self, interaction: Interaction) -> None: await rp.get_address_by_name("rocketSmoothingPool") ) ) - inactive_statuses = [ - "exited_unslashed", - "exited_slashed", - "withdrawal_possible", - "withdrawal_done", - "pending_initialized", - ] + active_statuses = ["active_ongoing", "active_exiting"] data = await ( await self.bot.db.minipools.aggregate( [ - {"$match": {"beacon.status": {"$nin": inactive_statuses}}}, + {"$match": {"beacon.status": {"$in": active_statuses}}}, {"$project": {"node_operator": 1}}, { "$unionWith": { "coll": "megapool_validators", "pipeline": [ - { - "$match": { - "beacon.status": {"$nin": inactive_statuses} - } - }, + {"$match": {"beacon.status": {"$in": active_statuses}}}, {"$project": {"node_operator": 1}}, ], } @@ -301,8 +291,8 @@ async def smoothie(self, interaction: Interaction) -> None: e.description = ( f"`{smoothie_node_count}/{total_node_count}` nodes (`{smoothie_node_count / total_node_count:.2%}`)" f" have joined the smoothing pool.\n" - f" That is `{smoothie_validator_count}/{total_validator_count}`" - f" (`{smoothie_validator_count / total_validator_count:.2%}`) validators.\n" + f" That is `{smoothie_validator_count}/{total_validator_count}` validators" + f" (`{smoothie_validator_count / total_validator_count:.2%}`).\n" f"The current balance is **`{smoothie_eth:,.2f}` ETH**, {pretty_time(d)} into the reward period.\n\n" f"{min(smoothie_node_count, 5)} largest nodes:\n" ) From 3703aec4be4e8b1fe0583568c72d600690fcff4a Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Mon, 23 Mar 2026 15:07:56 +0000 Subject: [PATCH 275/279] log ignored BaseException --- rocketwatch/rocketwatch.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/rocketwatch/rocketwatch.py b/rocketwatch/rocketwatch.py index 8f46dd38..a5015ffd 100644 --- a/rocketwatch/rocketwatch.py +++ b/rocketwatch/rocketwatch.py @@ -81,8 +81,10 @@ def clear_commands(self) -> None: async def on_error(self, event_method: str, /, *args, **kwargs) -> None: exc = sys.exc_info()[1] if isinstance(exc, Exception): - log.error(f"Error in listener {event_method}") + log.exception(f"Error in listener {event_method}") await self.report_error(exc) + else: + log.exception("Ignoring BaseException in error handler") async def on_ready(self): assert self.user is not None From f317ff1b4b2cb8e7b4804be25a9345e6284f3a5a Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Mon, 23 Mar 2026 15:08:48 +0000 Subject: [PATCH 276/279] refactor: cow_orders --- rocketwatch/plugins/cow_orders/cow_orders.py | 94 +++++++++----------- 1 file changed, 42 insertions(+), 52 deletions(-) diff --git a/rocketwatch/plugins/cow_orders/cow_orders.py b/rocketwatch/plugins/cow_orders/cow_orders.py index c59a6900..b1f2cf6b 100644 --- a/rocketwatch/plugins/cow_orders/cow_orders.py +++ b/rocketwatch/plugins/cow_orders/cow_orders.py @@ -5,7 +5,6 @@ from discord import Interaction from discord.app_commands import command from eth_typing import BlockNumber, ChecksumAddress -from hexbytes import HexBytes from web3.contract import AsyncContract from web3.datastructures import MutableAttributeDict as aDict from web3.types import EventData @@ -35,30 +34,27 @@ class CoWOrders(EventPlugin): def __init__(self, bot: RocketWatch) -> None: super().__init__(bot) self._settlement: AsyncContract | None = None - self._trade_topic: HexBytes | None = None - self._tokens: list[str] | None = None + self._tokens: list[ChecksumAddress] | None = None async def _ensure_setup(self) -> None: if self._settlement is None: self._settlement = await rp.get_contract_by_name("GPv2Settlement") - # Trade(address,address,address,uint256,uint256,uint256,bytes) - self._trade_topic = w3.keccak( - text="Trade(address,address,address,uint256,uint256,uint256,bytes)" - ) if self._tokens is None: self._tokens = [ - str(await rp.get_address_by_name("rocketTokenRPL")).lower(), - str(await rp.get_address_by_name("rocketTokenRETH")).lower(), + await rp.get_address_by_name("rocketTokenRPL"), + await rp.get_address_by_name("rocketTokenRETH"), ] @command() - async def cow(self, interaction: Interaction, tnx: str) -> None: - if "etherscan.io/tx/" not in tnx: - await interaction.response.send_message("nop", ephemeral=True) + async def cow(self, interaction: Interaction, etherscan_url: str) -> None: + if "etherscan.io/tx/" not in etherscan_url: + await interaction.response.send_message( + "Invalid Etherscan URL", ephemeral=True + ) return await interaction.response.defer(ephemeral=is_hidden(interaction)) - url = tnx.replace("etherscan.io", "explorer.cow.fi") + url = etherscan_url.replace("etherscan.io", "explorer.cow.fi") embed = Embed(description=f"[cow explorer]({url})") await interaction.followup.send(embed=embed) @@ -71,13 +67,13 @@ async def get_past_events( ) -> list[Event]: await self._ensure_setup() assert self._settlement is not None - assert self._trade_topic is not None assert self._tokens is not None + trade_event = self._settlement.events.Trade() logs = await w3.eth.get_logs( { "address": self._settlement.address, - "topics": [self._trade_topic], + "topics": [trade_event.topic], "fromBlock": from_block, "toBlock": to_block, } @@ -87,16 +83,13 @@ async def get_past_events( return [] # decode logs into Trade events - trades: list[EventData] = [ - self._settlement.events.Trade().process_log(raw_log) for raw_log in logs - ] - - # filter for RPL/rETH trades + trades: list[EventData] = [trade_event.process_log(raw_log) for raw_log in logs] + # filter for RPL and rETH trades trades = [ - t - for t in trades - if t["args"]["sellToken"].lower() in self._tokens - or t["args"]["buyToken"].lower() in self._tokens + trade + for trade in trades + if trade["args"]["sellToken"] in self._tokens + or trade["args"]["buyToken"] in self._tokens ] if not trades: @@ -106,8 +99,8 @@ async def get_past_events( rpl_ratio = solidity.to_float(await rp.call("rocketNetworkPrices.getRPLPrice")) reth_ratio = solidity.to_float(await rp.call("rocketTokenRETH.getExchangeRate")) eth_usdc_price = await rp.get_eth_usdc_price() - rpl_price = rpl_ratio * eth_usdc_price - reth_price = reth_ratio * eth_usdc_price + rpl_price: float = rpl_ratio * eth_usdc_price + reth_price: float = reth_ratio * eth_usdc_price events: list[Event] = [] for trade in trades: @@ -118,40 +111,37 @@ async def get_past_events( data["cow_owner"] = w3.to_checksum_address(args["owner"]) data["transactionHash"] = trade["transactionHash"].to_0x_hex() - sell_token: str = args["sellToken"].lower() - buy_token: str = args["buyToken"].lower() + sell_token: ChecksumAddress = args["sellToken"] + buy_token: ChecksumAddress = args["buyToken"] - if sell_token in self._tokens: - token = "reth" if sell_token == self._tokens[1] else "rpl" - data["event_name"] = f"cow_order_sell_{token}" - data["ourAmount"] = solidity.to_float(args["sellAmount"]) - other_address = w3.to_checksum_address(args["buyToken"]) - decimals = 18 - s = await rp.assemble_contract(name="ERC20", address=other_address) - with contextlib.suppress(Exception): - decimals = await s.functions.decimals().call() - data["otherAmount"] = solidity.to_float(args["buyAmount"], decimals) - else: - token = "reth" if buy_token == self._tokens[1] else "rpl" - data["event_name"] = f"cow_order_buy_{token}" - data["ourAmount"] = solidity.to_float(args["buyAmount"]) + if buy_token in self._tokens: + token = "rETH" if buy_token == self._tokens[1] else "RPL" + token_amount, other_amount = args["buyAmount"], args["sellAmount"] other_address = w3.to_checksum_address(args["sellToken"]) - decimals = 18 - s = await rp.assemble_contract(name="ERC20", address=other_address) - with contextlib.suppress(Exception): - decimals = await s.functions.decimals().call() - data["otherAmount"] = solidity.to_float(args["sellAmount"], decimals) - - data["ratioAmount"] = data["otherAmount"] / data["ourAmount"] + data["event_name"] = f"cow_order_buy_{token.lower()}" + else: + token = "rETH" if sell_token == self._tokens[1] else "RPL" + token_amount, other_amount = args["sellAmount"], args["buyAmount"] + other_address = w3.to_checksum_address(args["buyToken"]) + data["event_name"] = f"cow_order_sell_{token.lower()}" + data["ourAmount"] = solidity.to_float(token_amount, 18) # skip trades under minimum value - if ((token == "rpl") and (data["ourAmount"] * rpl_price < 10_000)) or ( - (token == "reth") and (data["ourAmount"] * reth_price < 100_000) + if ((token == "RPL") and (data["ourAmount"] * rpl_price < 10_000)) or ( + (token == "rETH") and (data["ourAmount"] * reth_price < 100_000) ): continue + decimals = 18 + erc20 = await rp.assemble_contract(name="ERC20", address=other_address) + with contextlib.suppress(Exception): + decimals = await erc20.functions.decimals().call() + + data["otherAmount"] = solidity.to_float(other_amount, decimals) + data["ratioAmount"] = data["otherAmount"] / data["ourAmount"] + try: - data["otherToken"] = await s.functions.symbol().call() + data["otherToken"] = await erc20.functions.symbol().call() except Exception: data["otherToken"] = "UNKWN" if other_address == w3.to_checksum_address( From 87dd860d3b3f2c8bf9c70b499dc525d0ba33da19 Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Mon, 23 Mar 2026 15:09:08 +0000 Subject: [PATCH 277/279] fix: use new rocketdash domain --- rocketwatch/utils/embeds.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/rocketwatch/utils/embeds.py b/rocketwatch/utils/embeds.py index fd7bf35c..62e647fd 100644 --- a/rocketwatch/utils/embeds.py +++ b/rocketwatch/utils/embeds.py @@ -121,7 +121,7 @@ async def el_explorer_url( "rocketNodeManager.getMegapoolAddress", target ) if megapool_address != ADDRESS_ZERO: - url = f"https://saturn-1.net/megapool/{megapool_address}{dashboard_network}" + url = f"https://rocketdash.net/megapool/{megapool_address}{dashboard_network}" if await rp.call( "rocketNodeManager.getSmoothingPoolRegistrationState", target, @@ -143,7 +143,7 @@ async def el_explorer_url( _prefix += "🏛️" name = delegate_name elif await rp.is_megapool(target): - url = f"https://saturn-1.net/megapool/{target}{dashboard_network}" + url = f"https://rocketdash.net/megapool/{target}{dashboard_network}" elif await rp.is_minipool(target): if chain == "mainnet": url = f"https://rocketexplorer.net/validator/{target}" From 59a26b0dc38dd39455d1ca4f0825e619efc84a2e Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Mon, 23 Mar 2026 15:09:39 +0000 Subject: [PATCH 278/279] combine all global events into single filter --- rocketwatch/plugins/events/events.py | 63 ++++++++++++++++------------ 1 file changed, 37 insertions(+), 26 deletions(-) diff --git a/rocketwatch/plugins/events/events.py b/rocketwatch/plugins/events/events.py index 3a70e8b5..14a9efe8 100644 --- a/rocketwatch/plugins/events/events.py +++ b/rocketwatch/plugins/events/events.py @@ -102,7 +102,9 @@ async def build_direct_filter( partial_filters.append(build_direct_filter) - # generate filters for global events + # generate filter for global events + global_topics: set[HexBytes] = set() + global_topic_decoders: dict[str, type] = {} for group in config["global"]: try: contract = await rp.get_contract_by_name(name=group["contract_name"]) @@ -111,33 +113,42 @@ async def build_direct_filter( continue for event in group["events"]: - event_map[event["event_name"]] = event["name"] - - def super_builder(_contract, _event) -> PartialFilter: - # this is needed to pin nonlocal variables - async def build_topic_filter( - _from: BlockNumber, _to: BlockNumber | Literal["latest"] - ) -> list[EventData]: - event_cls = _contract.events[_event["event_name"]] - event_abi = event_cls.abi - input_types = ",".join(i["type"] for i in event_abi["inputs"]) - topic0 = w3.keccak( - text=f"{_event['event_name']}({input_types})" - ).hex() - raw_logs = await w3.eth.get_logs( - { - "topics": [topic0], - "fromBlock": _from, - "toBlock": _to, - } - ) - return [ - event_cls().process_log(raw_log) for raw_log in raw_logs - ] + event_name = event["event_name"] + event_map[event_name] = event["name"] + + try: + event_cls = contract.events[event_name] + event_abi = event_cls.abi + input_types = ",".join(i["type"] for i in event_abi["inputs"]) + topic = w3.keccak(text=f"{event_name}({input_types})").hex() + except Exception as e: + log.exception(e) + log.warning(f"Couldn't find global event {event_name}") + continue - return build_topic_filter + global_topics.add(topic) + global_topic_decoders[topic] = event_cls + + if global_topics: + + async def build_global_filter( + _from: BlockNumber, _to: BlockNumber | Literal["latest"] + ) -> list[EventData]: + raw_logs = await w3.eth.get_logs( + { + "topics": [list(global_topics)], + "fromBlock": _from, + "toBlock": _to, + } + ) + return [ + global_topic_decoders[raw_log["topics"][0].hex()]().process_log( + raw_log + ) + for raw_log in raw_logs + ] - partial_filters.append(super_builder(contract, event)) + partial_filters.append(build_global_filter) return partial_filters, event_map, topic_map From 75e55fd1329cd692ad5791e86461eff7750c050a Mon Sep 17 00:00:00 2001 From: haloooloolo <03_sharks.guises@icloud.com> Date: Mon, 23 Mar 2026 17:46:56 +0000 Subject: [PATCH 279/279] use new figures for plots --- rocketwatch/plugins/apr/apr.py | 46 +++++++------------ rocketwatch/plugins/collateral/collateral.py | 6 +-- .../plugins/commissions/commissions.py | 16 +++---- rocketwatch/plugins/metrics/metrics.py | 9 ++-- rocketwatch/plugins/proposals/proposals.py | 24 +++++----- rocketwatch/plugins/rewards/rewards.py | 4 +- rocketwatch/plugins/rocksolid/rocksolid.py | 2 +- rocketwatch/plugins/rpl/rpl.py | 20 ++++---- rocketwatch/plugins/wall/wall.py | 2 +- 9 files changed, 55 insertions(+), 74 deletions(-) diff --git a/rocketwatch/plugins/apr/apr.py b/rocketwatch/plugins/apr/apr.py index 01e8fb39..00d1fea4 100644 --- a/rocketwatch/plugins/apr/apr.py +++ b/rocketwatch/plugins/apr/apr.py @@ -213,9 +213,8 @@ async def reth_apr(self, interaction: Interaction): inline=False, ) x_arr = np.array(x) - fig = plt.figure() - ax1 = plt.gca() - ax2: plt.Axes = plt.twinx() # type: ignore[assignment] + fig, ax1 = plt.subplots() + ax2: plt.Axes = ax1.twinx() # type: ignore[assignment] ax2.plot( x_arr, @@ -251,13 +250,12 @@ async def reth_apr(self, interaction: Interaction): color="royalblue", ) - plt.title("Observed rETH APR values") - plt.xlabel("Date") - plt.grid(True) - plt.xlim(left=x_arr[38]) - plt.xticks(rotation=45) - old_formatter = plt.gca().xaxis.get_major_formatter() - plt.gca().xaxis.set_major_formatter(DateFormatter("%b %d")) + ax1.set_title("Observed rETH APR values") + ax1.set_xlabel("Date") + ax1.grid(True) + ax1.set_xlim(left=x_arr[38]) + ax1.tick_params(axis="x", rotation=45) + ax1.xaxis.set_major_formatter(DateFormatter("%b %d")) ax2.yaxis.set_major_formatter(FuncFormatter(lambda x, loc: f"{x:.1%}")) ax1.yaxis.set_major_formatter(FuncFormatter(lambda x, loc: f"{x:.1%}")) @@ -271,11 +269,7 @@ async def reth_apr(self, interaction: Interaction): fig.tight_layout() fig.savefig(img, format="png") img.seek(0) - fig.clear() - plt.close() - - # reset the x axis formatter - plt.gca().xaxis.set_major_formatter(old_formatter) + plt.close(fig) e.set_image(url="attachment://reth_apr.png") @@ -429,8 +423,7 @@ async def node_apr(self, interaction: Interaction): ) x_arr = np.array(x) - fig = plt.figure() - ax1 = plt.gca() + fig, ax1 = plt.subplots() # solo apr ax1.plot( @@ -468,13 +461,12 @@ async def node_apr(self, interaction: Interaction): alpha=0.5, ) - plt.title("Observed NO APR values") - plt.grid(True) - plt.xlim(left=x_arr[38]) - plt.xticks(rotation=0) - plt.ylim(bottom=0.02) - old_formatter = plt.gca().xaxis.get_major_formatter() - plt.gca().xaxis.set_major_formatter(DateFormatter("%m.%d")) + ax1.set_title("Observed NO APR values") + ax1.grid(True) + ax1.set_xlim(left=x_arr[38]) + ax1.tick_params(axis="x", rotation=0) + ax1.set_ylim(bottom=0.02) + ax1.xaxis.set_major_formatter(DateFormatter("%m.%d")) ax1.yaxis.set_major_formatter(FuncFormatter(lambda x, loc: f"{x:.1%}")) ax1.legend(loc="lower left") @@ -483,11 +475,7 @@ async def node_apr(self, interaction: Interaction): fig.tight_layout() fig.savefig(img, format="png") img.seek(0) - fig.clear() - plt.close() - - # reset the x axis formatter - plt.gca().xaxis.set_major_formatter(old_formatter) + plt.close(fig) e.add_field( name="Current Average Effective Commission:", diff --git a/rocketwatch/plugins/collateral/collateral.py b/rocketwatch/plugins/collateral/collateral.py index 3a13d77e..2b782e33 100644 --- a/rocketwatch/plugins/collateral/collateral.py +++ b/rocketwatch/plugins/collateral/collateral.py @@ -188,12 +188,12 @@ def node_minipools(node): # Add a legend for the color-coding on the scatter plot formatToInt = "{x:.0f}" - cb = plt.colorbar(mappable=paths, ax=ax, format=formatToInt) + cb = fig.colorbar(mappable=paths, ax=ax, format=formatToInt) cb.set_label("Minipools") cb.set_ticks([1, 10, 100, max_minipools]) # Add a legend for the color-coding on the hex distribution - cb = plt.colorbar(mappable=polys, ax=ax2, format=formatToInt) + cb = fig.colorbar(mappable=polys, ax=ax2, format=formatToInt) cb.set_label("Nodes") cb.set_ticks([1, 10, 100, max_nodes - 1]) @@ -324,7 +324,7 @@ async def collateral_distribution( f"{x[0]}th percentile: {int(x[1])}% collateral" for x in get_percentiles([50, 75, 90, 99], counts) ] - e.description = f"Total Effective Staked RPL: {sum(bars.values()):,}" + e.description = f"Total Staked RPL: {sum(bars.values()):,.0f}" e.set_footer(text="\n".join(percentile_strings)) await interaction.followup.send(embed=e, files=[f]) img.close() diff --git a/rocketwatch/plugins/commissions/commissions.py b/rocketwatch/plugins/commissions/commissions.py index f38dbab4..a1214da9 100644 --- a/rocketwatch/plugins/commissions/commissions.py +++ b/rocketwatch/plugins/commissions/commissions.py @@ -50,24 +50,20 @@ async def commission_history(self, interaction: Interaction): # data[-1] = [x / max(data[-1]) for x in data[-1]] # heatmap distribution over time data_array = np.array(data).T - ax = sns.heatmap( - data_array, cmap="viridis", yticklabels=ygrid, xticklabels=False + fig, ax = plt.subplots() + sns.heatmap( + data_array, cmap="viridis", yticklabels=ygrid, xticklabels=False, ax=ax ) ax.set_yticklabels(ax.get_yticklabels(), rotation=0, fontsize=8) # set y ticks ax.set_ylabel("Node Fee") - plt.tight_layout() - - # save figure to buffer - buf = BytesIO() - plt.savefig(buf, format="png") - buf.seek(0) + fig.tight_layout() # respond with image img = BytesIO() - plt.savefig(img, format="png") + fig.savefig(img, format="png") img.seek(0) - plt.close() + plt.close(fig) e.set_image(url="attachment://chart.png") e.add_field(name="Total Minipools", value=len(minipools)) e.add_field(name="Bar Width", value=f"{step_size} minipools") diff --git a/rocketwatch/plugins/metrics/metrics.py b/rocketwatch/plugins/metrics/metrics.py index 0cf3dd67..2f125da9 100644 --- a/rocketwatch/plugins/metrics/metrics.py +++ b/rocketwatch/plugins/metrics/metrics.py @@ -159,7 +159,7 @@ async def metrics_chart(self, interaction: Interaction): ).to_list(None) # create a new figure - _fig, (ax1, ax2) = plt.subplots(2, 1, figsize=(10, 10)) + fig, (ax1, ax2) = plt.subplots(2, 1, figsize=(10, 10)) # plot the command usage as bars ax1.bar( @@ -184,16 +184,15 @@ async def metrics_chart(self, interaction: Interaction): ) # use minimal whitespace - plt.tight_layout() + fig.tight_layout() # store the graph in an file object file = BytesIO() - plt.savefig(file, format="png") + fig.savefig(file, format="png") file.seek(0) # clear plot from memory - plt.clf() - plt.close() + plt.close(fig) e = Embed(title="Command Usage and Event ") e.set_image(url="attachment://metrics.png") diff --git a/rocketwatch/plugins/proposals/proposals.py b/rocketwatch/plugins/proposals/proposals.py index 9a2bb750..1e7fa808 100644 --- a/rocketwatch/plugins/proposals/proposals.py +++ b/rocketwatch/plugins/proposals/proposals.py @@ -401,30 +401,28 @@ async def version_chart(self, interaction: Interaction, days: int = 90): ] # add percentage to labels x_arr = np.array(x) - ax = plt.subplot(111, frameon=False) - plt.stackplot(x_arr, *y.values(), labels=labels, colors=colors) + fig, ax = plt.subplots() + ax.stackplot(x_arr, *y.values(), labels=labels, colors=colors) # hide y axis - plt.tick_params( - axis="y", which="both", left=False, right=False, labelleft=False - ) - plt.gcf().autofmt_xdate() + ax.tick_params(axis="y", which="both", left=False, right=False, labelleft=False) + fig.autofmt_xdate() handles, legend_labels = ax.get_legend_handles_labels() ax.legend(reversed(handles), reversed(legend_labels), loc="upper left") # add a thin line at current time from y=0 to y=1 with a width of 0.5 - plt.plot([x_arr[-1], x_arr[-1]], [0, 1], color="white", alpha=0.25) + ax.plot([x_arr[-1], x_arr[-1]], [0, 1], color="white", alpha=0.25) # calculate future point to make latest data more visible future_point = x[-1] + timedelta(days=window_length) last_y_values = [[yy[-1]] * 2 for yy in y.values()] - plt.stackplot( + ax.stackplot( [x_arr[-1], np.datetime64(future_point)], *last_y_values, colors=colors ) - plt.tight_layout() + fig.tight_layout() # respond with image img = BytesIO() - plt.savefig(img, format="png", bbox_inches="tight", dpi=300) + fig.savefig(img, format="png", bbox_inches="tight", dpi=300) img.seek(0) - plt.close() + plt.close(fig) e.set_image(url="attachment://chart.png") # send data @@ -559,9 +557,9 @@ async def proposal_vs_node_operators_embed( # respond with image img = BytesIO() - plt.savefig(img, format="png") + fig.savefig(img, format="png") img.seek(0) - plt.close() + plt.close(fig) e.set_image(url=f"attachment://{attribute}.png") # send data diff --git a/rocketwatch/plugins/rewards/rewards.py b/rocketwatch/plugins/rewards/rewards.py index f829ab54..d4c8ea5e 100644 --- a/rocketwatch/plugins/rewards/rewards.py +++ b/rocketwatch/plugins/rewards/rewards.py @@ -284,13 +284,13 @@ def formatter(_x, _pos) -> str: handles, labels = ax.get_legend_handles_labels() by_label = dict(zip(labels, handles, strict=False)) - plt.legend(by_label.values(), by_label.keys(), loc="lower right") + ax.legend(by_label.values(), by_label.keys(), loc="lower right") fig.tight_layout() img = BytesIO() fig.savefig(img, format="png") img.seek(0) - plt.close() + plt.close(fig) sim_info = [] if rpl_stake > 0: diff --git a/rocketwatch/plugins/rocksolid/rocksolid.py b/rocketwatch/plugins/rocksolid/rocksolid.py index 8ff42221..679435ba 100644 --- a/rocketwatch/plugins/rocksolid/rocksolid.py +++ b/rocketwatch/plugins/rocksolid/rocksolid.py @@ -138,7 +138,7 @@ async def get_apy(days: int) -> float | None: fig.tight_layout() fig.savefig(img, format="png") img.seek(0) - plt.clf() + plt.close(fig) ca_reth = await rp.get_address_by_name("rocketTokenRETH") ca_rock_reth = await rp.get_address_by_name("RockSolidVault") diff --git a/rocketwatch/plugins/rpl/rpl.py b/rocketwatch/plugins/rpl/rpl.py index 7fbf3d98..b5458248 100644 --- a/rocketwatch/plugins/rpl/rpl.py +++ b/rocketwatch/plugins/rpl/rpl.py @@ -162,38 +162,38 @@ async def withdrawable_rpl(self, interaction: Interaction): embed = Embed() # plot the data - plt.plot(x, y, color=str(embed.color)) - plt.plot(rpl_eth_price, current_withdrawable_rpl, "bo") - plt.xlim(min(x), max(x)) + fig, ax = plt.subplots() + ax.plot(x, y, color=str(embed.color)) + ax.plot(rpl_eth_price, current_withdrawable_rpl, "bo") + ax.set_xlim(min(x), max(x)) - plt.annotate( + ax.annotate( f"{rpl_eth_price:.4f}", (rpl_eth_price, current_withdrawable_rpl), textcoords="offset points", xytext=(-10, -5), ha="right", ) - plt.annotate( + ax.annotate( f"{current_withdrawable_rpl / 1000000:.2f} million RPL withdrawable", (rpl_eth_price, current_withdrawable_rpl), textcoords="offset points", xytext=(10, -5), ha="left", ) - plt.grid() + ax.grid() - ax = plt.gca() ax.set_ylabel("Withdrawable RPL") ax.set_xlabel("RPL / ETH ratio") ax.yaxis.set_major_formatter(lambda x, _: f"{x / 1000000:.1f}m") ax.xaxis.set_major_formatter(lambda x, _: f"{x:.4f}") img = BytesIO() - plt.tight_layout() - plt.savefig(img, format="png") + fig.tight_layout() + fig.savefig(img, format="png") img.seek(0) - plt.close() + plt.close(fig) embed.title = "Available RPL Liquidity" embed.set_image(url="attachment://graph.png") diff --git a/rocketwatch/plugins/wall/wall.py b/rocketwatch/plugins/wall/wall.py index dbe30dcb..007307fe 100644 --- a/rocketwatch/plugins/wall/wall.py +++ b/rocketwatch/plugins/wall/wall.py @@ -366,7 +366,7 @@ async def on_fail() -> None: buffer = BytesIO() fig = self._plot_data(x, rpl_usd, rpl_eth, cex_data, dex_data) fig.savefig(buffer, format="png") - fig.clf() + plt.close(fig) buffer.seek(0) embed.set_author(name="🔗 Data from CEX APIs and Mainnet")