Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions .env.example
Original file line number Diff line number Diff line change
Expand Up @@ -139,3 +139,5 @@ HYPEREVM_RPC=' '
SEI_RPC=' '

PLASMA_RPC=' '

MONAD_RPC=' '
769 changes: 720 additions & 49 deletions deployments/prod_addresses.json

Large diffs are not rendered by default.

146 changes: 146 additions & 0 deletions deployments/prod_verification.json
Original file line number Diff line number Diff line change
Expand Up @@ -972,6 +972,152 @@
["0x5fD7D0d6b91CC4787Bcb86ca47e0Bd4ea0346d34", 10]
]
],
"143": [
[
"0xcd620187f4846ba5a42ab41270aC550467dc9FbB",
"SwitchboardSimulator",
"contracts/mocks/fee-updater/SwitchboardSimulator.sol",
[
"0xB0BBff6311B7F245761A7846d3Ce7B1b100C1836",
"0x8c36353db4F3a2DDDeed38405199d13DCF8a1B82",
143,
1000,
"0x1CAdCd88fC148D3966eDe75D029937C886f66009"
]
],
[
"0x09A03E0d298AA13a07A9a3e36a606d4F6a4b8bB7",
"SimulatorUtils",
"contracts/mocks/fee-updater/SimulatorUtils.sol",
[
"0x8c36353db4F3a2DDDeed38405199d13DCF8a1B82",
"0x1CAdCd88fC148D3966eDe75D029937C886f66009",
"0xB0BBff6311B7F245761A7846d3Ce7B1b100C1836",
143
]
],
[
"0x8c36353db4F3a2DDDeed38405199d13DCF8a1B82",
"SocketSimulator",
"contracts/mocks/fee-updater/SocketSimulator.sol",
[
143,
143,
"0x9B8c323468AEC7A7Cb041CeD48F92559bFF33705",
"0x1CAdCd88fC148D3966eDe75D029937C886f66009",
"IMLI"
]
],
[
"0xb4Db1838d0B97BD58C7663b3E487eEbC9a996472",
"Counter",
"contracts/examples/Counter.sol",
["0x0CC93650bF4D98237628DACf87f94E443956D8dF"]
],
[
"0xd9E3a8Ba9Be55919C5C0De6694e3103F5a35820E",
"SocketBatcher",
"contracts/socket/SocketBatcher.sol",
["0xB0BBff6311B7F245761A7846d3Ce7B1b100C1836"]
],
[
"0x3043Ad9C9e01664bc3A68477f0870Df35dC4bFf8",
"OptimisticSwitchboard",
"contracts/switchboard/default-switchboards/OptimisticSwitchboard.sol",
[
"0xB0BBff6311B7F245761A7846d3Ce7B1b100C1836",
"0x0CC93650bF4D98237628DACf87f94E443956D8dF",
143,
7200,
"0x1CAdCd88fC148D3966eDe75D029937C886f66009"
]
],
[
"0x525a6489a1df5fF1ae077fAf628E43b7F52298eF",
"FastSwitchboard",
"contracts/switchboard/default-switchboards/FastSwitchboard.sol",
[
"0xB0BBff6311B7F245761A7846d3Ce7B1b100C1836",
"0x0CC93650bF4D98237628DACf87f94E443956D8dF",
143,
7200,
"0x1CAdCd88fC148D3966eDe75D029937C886f66009"
]
],
[
"0x657e72B305Dc1c41e98d9efC2350EC10e3c83E21",
"TransmitManager",
"contracts/TransmitManager.sol",
[
"0xB0BBff6311B7F245761A7846d3Ce7B1b100C1836",
143,
"0x0CC93650bF4D98237628DACf87f94E443956D8dF",
"0x1CAdCd88fC148D3966eDe75D029937C886f66009"
]
],
[
"0xb3314456567986e657d4C65Ec9e8cB736B92d11D",
"ExecutionManagerDF",
"contracts/ExecutionManagerDF.sol",
[
"0xB0BBff6311B7F245761A7846d3Ce7B1b100C1836",
143,
"0x0CC93650bF4D98237628DACf87f94E443956D8dF",
"0x1CAdCd88fC148D3966eDe75D029937C886f66009"
]
],
[
"0x0CC93650bF4D98237628DACf87f94E443956D8dF",
"Socket",
"contracts/socket/Socket.sol",
[
143,
"0x9B8c323468AEC7A7Cb041CeD48F92559bFF33705",
"0xb4Ef469c9d8317851270346070dA0ecE24616E6b",
"0xB0BBff6311B7F245761A7846d3Ce7B1b100C1836",
"IMLI"
]
],
[
"0xb4Ef469c9d8317851270346070dA0ecE24616E6b",
"CapacitorFactory",
"contracts/CapacitorFactory.sol",
["0xB0BBff6311B7F245761A7846d3Ce7B1b100C1836", 10]
],
[
"0x9B8c323468AEC7A7Cb041CeD48F92559bFF33705",
"Hasher",
"contracts/utils/Hasher.sol",
["0xB0BBff6311B7F245761A7846d3Ce7B1b100C1836"]
],
[
"0x1CAdCd88fC148D3966eDe75D029937C886f66009",
"SignatureVerifier",
"contracts/utils/SignatureVerifier.sol",
["0xB0BBff6311B7F245761A7846d3Ce7B1b100C1836"]
],
[
"0xbe7241e9D11EC2D1Ac86CE217c4A37b7aD1701cE",
"MultiSigWrapper",
"contracts/utils/multisig/MultiSigWrapper.sol",
[
"0xB0BBff6311B7F245761A7846d3Ce7B1b100C1836",
"0xA49f876C8F49414859cC9b0c4fEF794F0795Ccb4"
]
],
[
"0x1F6bc87f3309B5D31Eb0BdaBE3ED7d3110d3B9c3",
"SafeProxyFactory",
"contracts/utils/multisig/proxies/SafeProxyFactory.sol",
[]
],
[
"0xc8a4D2fd77c155fd52e65Ab07F337aBF84495Ead",
"SafeL2",
"contracts/utils/multisig/SafeL2.sol",
[]
]
],
"146": [
[
"0xeA964e160dEaed1960E31a20c1fA57D079d4e616",
Expand Down
10 changes: 10 additions & 0 deletions hardhat.config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -131,6 +131,7 @@ if (isProduction) {
[HardhatChainName.PLUME]: getChainConfig(ChainSlug.PLUME),
[HardhatChainName.KATANA]: getChainConfig(ChainSlug.KATANA),
[HardhatChainName.HYPEREVM]: getChainConfig(ChainSlug.HYPEREVM),
[HardhatChainName.MONAD]: getChainConfig(ChainSlug.MONAD),
};
}

Expand Down Expand Up @@ -194,6 +195,7 @@ const config: HardhatUserConfig = {
plume: process.env.PLUME_API_KEY || "none",
katana: process.env.KATANA_API_KEY || "none",
hyperevm: process.env.HYPEREVM_API_KEY || "none",
monad: process.env.ETHERSCAN_API_KEY || "none",
},
customChains: [
{
Expand Down Expand Up @@ -444,6 +446,14 @@ const config: HardhatUserConfig = {
browserURL: "https://www.hyperscan.com/",
},
},
{
network: "monad",
chainId: ChainId.MONAD,
urls: {
apiURL: "https://api.etherscan.io/v2/api?chainid=143",
browserURL: "https://monadscan.com/",
},
},
Comment on lines +449 to +456
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🔴 Critical

🧩 Analysis chain

Verify Etherscan API endpoint supports MONAD chain ID 143.

The apiURL uses https://api.etherscan.io/v2/api?chainid=143, which routes through Etherscan's infrastructure. MONAD is a separate L1 blockchain - verify that Etherscan actually indexes MONAD (chain ID 143) before contracts can be verified on mainnet.

If Etherscan doesn't support MONAD yet, this configuration will fail during contract verification with hardhat verify.

Alternatively, search for Etherscan's supported chains:


🏁 Script executed:

#!/bin/bash
# Verify if Etherscan API v2 supports MONAD chain ID 143

echo "=== Testing Etherscan API for MONAD (chain ID 143) ==="
curl -s "https://api.etherscan.io/v2/api?chainid=143&module=proxy&action=eth_blockNumber" | jq '.'

echo ""
echo "=== Check if monadscan.com is reachable ==="
curl -s -o /dev/null -w "%{http_code}" "https://monadscan.com/"

echo ""
echo "=== Check Etherscan supported chains documentation ==="
echo "Verify at: https://docs.etherscan.io/v/etherscan-v2/"

Length of output: 292


🌐 Web query:

Does Etherscan API v2 support MONAD chain ID 143?

💡 Result:

Short answer: No — Etherscan API v2 does not list Monad mainnet (chain ID 143) as a supported chain; it lists Monad testnet (chain ID 10143). [1][2][3]

Sources:

  • Etherscan supported-chains (shows Monad Testnet 10143, not 143). [1]
  • Etherscan API V2 multichain overview. [2]
  • Monad docs (confirming Monad mainnet chain ID = 143). [3]

This configuration will fail contract verification—Etherscan API v2 doesn't support MONAD mainnet (chain ID 143).

Etherscan API v2 only supports Monad testnet (chain ID 10143), not mainnet (chain ID 143). The apiURL pointing to Etherscan will return errors when users attempt hardhat verify for MONAD mainnet contracts. Either:

  • Use a different verification service that supports MONAD, or
  • Remove the Etherscan configuration for MONAD until it's officially supported, or
  • Clarify in documentation that contract verification isn't available via Etherscan for MONAD mainnet
🤖 Prompt for AI Agents
In hardhat.config.ts around lines 449 to 456, the MONAD network block uses an
Etherscan v2 apiURL for chainId 143 which is unsupported and will break hardhat
verify; either remove the Etherscan configuration for MONAD, replace the apiURL
with a supported verification endpoint (or correct testnet chainId 10143 if you
intended testnet), or switch to a verification service that supports MONAD
mainnet; update the config accordingly and add a short comment explaining why
the Etherscan entry was removed/changed.

],
},
networks: {
Expand Down
147 changes: 147 additions & 0 deletions scripts/admin/get-outbound-txs.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,147 @@
import { config as dotenvConfig } from "dotenv";
import {
ChainSlug,
DeploymentAddresses,
IntegrationTypes,
getAllAddresses,
} from "../../src";
import { mode } from "../deploy/config/config";
import SingleCapacitorArtifact from "../../out/SingleCapacitor.sol/SingleCapacitor.json";
import { getProviderFromChainSlug } from "../constants";
import { ethers } from "ethers";

dotenvConfig();

/**
* Usage
*
* --source Specify the source chain slug.
* This flag is required.
* Eg. npx --source=1 --destination=10 --startblock=12345 --endblock=12456 ts-node scripts/admin/get-outbound-txs.ts
*
* --destination Specify the destination chain slug.
* This flag is required.
*
* --startblock Specify the start block number.
* This flag is required.
*
* --endblock Specify the end block number.
* This flag is required.
*/

const sourceChainSlug = process.env.npm_config_source;
if (!sourceChainSlug) {
console.error("Error: source flag is required");
process.exit(1);
}

const destinationChainSlug = process.env.npm_config_destination;
if (!destinationChainSlug) {
console.error("Error: destination flag is required");
process.exit(1);
}

const startBlock = process.env.npm_config_startblock;
if (!startBlock) {
console.error("Error: startblock flag is required");
process.exit(1);
}

const endBlock = process.env.npm_config_endblock;
if (!endBlock) {
console.error("Error: endblock flag is required");
process.exit(1);
}
Comment on lines +32 to +54
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🔴 Critical

Validate and convert chain slugs to numbers immediately after extraction.

The env vars are strings but need to be numeric ChainSlug values. Convert and validate them right after extraction to fail fast and ensure type safety throughout the script.

Apply this diff:

 const sourceChainSlug = process.env.npm_config_source;
 if (!sourceChainSlug) {
   console.error("Error: source flag is required");
   process.exit(1);
 }
+const sourceChainSlugNum = parseInt(sourceChainSlug);
+if (isNaN(sourceChainSlugNum)) {
+  console.error(`Error: source must be a valid number, got: ${sourceChainSlug}`);
+  process.exit(1);
+}
 
 const destinationChainSlug = process.env.npm_config_destination;
 if (!destinationChainSlug) {
   console.error("Error: destination flag is required");
   process.exit(1);
 }
+const destinationChainSlugNum = parseInt(destinationChainSlug);
+if (isNaN(destinationChainSlugNum)) {
+  console.error(`Error: destination must be a valid number, got: ${destinationChainSlug}`);
+  process.exit(1);
+}

Then update lines 59-60 and 98 to use the numeric versions:

-  const sourceChain = sourceChainSlug;
-  const destinationChain = destinationChainSlug;
+  const sourceChain = sourceChainSlugNum as ChainSlug;
+  const destinationChain = destinationChainSlugNum as ChainSlug;
-  const provider = getProviderFromChainSlug(parseInt(sourceChain) as ChainSlug);
+  const provider = getProviderFromChainSlug(sourceChain);

Committable suggestion skipped: line range outside the PR's diff.

🤖 Prompt for AI Agents
In scripts/admin/get-outbound-txs.ts around lines 32 to 54, the extracted
npm_config_* environment variables are left as strings but are expected to be
numeric ChainSlug values; convert and validate sourceChainSlug,
destinationChainSlug, startBlock, and endBlock to numbers immediately after
extraction (parseInt or Number), check for NaN and out-of-range/invalid values,
and exit with a clear error if validation fails so the script fails fast; then
update the usages at lines 59-60 and line 98 to reference the new numeric
variables (e.g., sourceChain, destinationChain, startBlockNum/endBlockNum)
instead of the original string variables.


const addresses: DeploymentAddresses = getAllAddresses(mode);

export const main = async () => {
const sourceChain = sourceChainSlug;
const destinationChain = destinationChainSlug;

console.log(`\nProcessing path: ${sourceChain} -> ${destinationChain}\n`);

// Get addresses from prod_addresses.json
const sourceAddresses = addresses[sourceChain];
if (!sourceAddresses) {
console.error(`Error: No addresses found for source chain ${sourceChain}`);
process.exit(1);
}

const integration = sourceAddresses.integrations?.[destinationChain];
if (!integration) {
console.error(
`Error: No integration found for ${sourceChain} -> ${destinationChain}`
);
process.exit(1);
}

// Get FAST integration addresses (switchboard, socket, capacitor)
const fastIntegration = integration[IntegrationTypes.fast];
if (!fastIntegration) {
console.error(
`Error: No FAST integration found for ${sourceChain} -> ${destinationChain}`
);
process.exit(1);
}

const switchboardAddress = fastIntegration.switchboard;
const capacitorAddress = fastIntegration.capacitor;
const socketAddress = sourceAddresses.Socket;

console.log("Addresses:");
console.log(` Switchboard: ${switchboardAddress}`);
console.log(` Socket: ${socketAddress}`);
console.log(` Capacitor: ${capacitorAddress}\n`);

// Get provider and query events
const provider = getProviderFromChainSlug(parseInt(sourceChain) as ChainSlug);

const capacitorContract = new ethers.Contract(
capacitorAddress,
SingleCapacitorArtifact.abi,
provider
);

const fromBlock = parseInt(startBlock);
const toBlock = parseInt(endBlock);

console.log(`Querying events from block ${fromBlock} to ${toBlock}\n`);

// Query MessageAdded events in chunks of 5000 blocks
const CHUNK_SIZE = 5000;
const messageAddedEvents = [];

for (let currentBlock = fromBlock; currentBlock <= toBlock; currentBlock += CHUNK_SIZE) {
const chunkEnd = Math.min(currentBlock + CHUNK_SIZE - 1, toBlock);
console.log(`Querying chunk: ${currentBlock} to ${chunkEnd}`);

const events = await capacitorContract.queryFilter(
capacitorContract.filters.MessageAdded(),
currentBlock,
chunkEnd
);

messageAddedEvents.push(...events);
}

console.log(`Found ${messageAddedEvents.length} outbound transactions:\n`);

for (const event of messageAddedEvents) {
console.log(`Block: ${event.blockNumber}`);
console.log(` Transaction Hash: ${event.transactionHash}`);
console.log(` Packed Message: ${event.args?.packedMessage}`);
console.log(` Packet Count: ${event.args?.packetCount?.toString()}`);
console.log(` Root Hash: ${event.args?.newRootHash}`);
console.log("");
}

console.log("Script completed.");
};

main()
.then(() => process.exit(0))
.catch((error: Error) => {
console.error(error);
process.exit(1);
});
Comment on lines +1 to +147
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

🛠️ Refactor suggestion | 🟠 Major

Eliminate code duplication with get-seal-events.ts.

This script shares ~95% identical code with get-seal-events.ts. Extract the common logic (env parsing, validation, address resolution, chunked querying) into a shared utility function or base class. The only differences are the contract artifact and event filter.

Consider creating a shared helper like:

// scripts/admin/utils/query-events.ts
export async function queryEventsForPath(
  sourceChain: ChainSlug,
  destinationChain: ChainSlug,
  startBlock: number,
  endBlock: number,
  contractType: 'Socket' | 'SingleCapacitor',
  eventName: string
) {
  // Common logic here
}

Then both scripts become simple wrappers:

// get-outbound-txs.ts
const result = await queryEventsForPath(
  sourceChain,
  destinationChain, 
  startBlock,
  endBlock,
  'SingleCapacitor',
  'MessageAdded'
);
🤖 Prompt for AI Agents
In scripts/admin/get-outbound-txs.ts around lines 1-147, the file duplicates
~95% of the logic from get-seal-events.ts; extract the common logic (env flag
parsing and validation, address resolution via getAllAddresses(mode), provider
creation, chunked query loop and result aggregation) into a shared helper (e.g.,
scripts/admin/utils/query-events.ts) that accepts parameters for sourceChain,
destinationChain, startBlock, endBlock, contract artifact/ABI or a contract
factory, and the event filter name or filter function; change this script to
validate flags then call the helper with the SingleCapacitor artifact and
MessageAdded event, returning the matched events; ensure the helper returns
event objects and that error handling/logging stays consistent; update
get-seal-events.ts to call the same helper with its respective artifact and
event name so both scripts are thin wrappers.

Loading
Loading