diff --git a/README.md b/README.md index 6e32564..ad3841e 100644 --- a/README.md +++ b/README.md @@ -1,13 +1,3 @@ -# Multichain transfers squid - -This [squid](https://docs.subsquid.io/) captures USDC Transfer events on ETH and BSC, stores them in the same database and serves the data over a common GraphQL API. - -The Ethereum processor is located in `src/eth` and similarly the Binance Chain processor can be found in `src/bsc`. The scripts file `commands.json` was updated with the commands `process:eth` and `process:bsc` to run the processors. - -You can find some useful hints on developing multichain squids on the [dedicated documentation page](https://docs.subsquid.io/basics/multichain/). - -Dependencies: Node.js, Docker, Git. - ## Quickstart ```bash @@ -15,8 +5,8 @@ Dependencies: Node.js, Docker, Git. npm i -g @subsquid/cli # 1. Clone the repo -git clone https://github.com/subsquid-labs/multichain-transfers-example -cd multichain-transfers-example +git clone https://github.com/polymerdao/evm-indexer +cd evm-indexer # 2. Install dependencies npm ci @@ -37,8 +27,8 @@ A GraphiQL playground will be available at [localhost:4350/graphql](http://local You can also run individual services separately: ```bash -sqd process:eth # Ethereum processor -sqd process:bsc # BSC processor +PROCESSOR_NAME=optimism sqd process # Optimism processor +PROCESSOR_NAME=base sqd process # Base processor sqd serve # GraphQL server ``` @@ -80,90 +70,55 @@ To set up a new chain for indexing contracts and/or transactions, follow these s 1. Decide whether you want to track contracts, transactions, or both for the new chain. -2. Choose a unique processor name for your new chain (e.g., 'arbitrum', 'polygon'). This name will be used throughout the setup process. Create a new file in the appropriate directory: - - For contracts: `src/chains/contracts/{processorName}.ts` - - For transactions: `src/chains/wallets/{processorName}.ts` - -3. In this new file, import the necessary functions and handler: - - For contracts: - ```typescript - import { runProcessor } from "../../utils/ibc-processor"; - import { handler } from "../../handlers"; - - runProcessor('{processorName}', handler) - ``` - - For transactions: - ```typescript - import { runProcessor } from '../../utils/ibc-processor' - import { handler } from "../../handlers/wallets"; - - runProcessor('{processorName}_txs', handler) - ``` +2. Choose a unique processor name for your new chain (e.g., 'arbitrum', 'polygon'). This name will be used as the `PROCESSOR_NAME` environment variable. - Replace '{processorName}' with your chosen unique processor name. - -4. Update the configuration file (specified by the `CONFIG_FILE` environment variable) to include the new chain. Add an entry for your chain with the relevant contracts and/or transaction addresses: +3. Update the configuration file (specified by the `CONFIG_FILE` environment variable) to include the new chain. Add an entry for your chain with the relevant configuration: ```yaml - {PROCESSOR_NAME}: + {processorName}: contracts: - "0x1234567890123456789012345678901234567890" transactions: - "0x0987654321098765432109876543210987654321" + rpc: "https://rpc.example.com" + rpcRateLimit: 10 + maxBatchCallSize: 100 + gateway: "https://gateway.example.com" + fromBlock: 1000000 + finalityConfirmation: 20 + version: 1 ``` -5. Set the following environment variables for the new chain: + All fields are optional and can be overridden by environment variables. Note that if both `contracts` and `transactions` are omitted in the config, the processor won't perform any actual work and will exit after starting. + +4. Set the following environment variables for the new chain: - - `{PROCESSOR_NAME}_RPC`: The RPC endpoint for the new chain - - `{PROCESSOR_NAME}_GATEWAY`: (Optional) The gateway for the new chain - - `DISPATCHER_ADDRESS_{PROCESSOR_NAME}_START_BLOCK`: The starting block number for indexing - - `{PROCESSOR_NAME}_VERSION`: (Optional) The version number for the processor state schema + - `PROCESSOR_NAME`: Set this to your chosen processor name + - `CONFIG_FILE`: Path to the configuration file + - `{PROCESSOR_NAME}_RPC`: The RPC endpoint for the new chain (overrides config) + - `{PROCESSOR_NAME}_GATEWAY`: The gateway for the new chain (overrides config) + - `DISPATCHER_ADDRESS_{PROCESSOR_NAME}_START_BLOCK`: The starting block number for indexing (overrides config's `fromBlock`) + - `{PROCESSOR_NAME}_VERSION`: The version number for the processor state schema (overrides config) + - `RPC_RATE_LIMIT`: Global RPC rate limit (can be overridden per chain) + - `MAX_BATCH_CALL_SIZE`: Global max batch call size (can be overridden per chain) + - `FINALITY_CONFIRMATION`: Global finality confirmation (can be overridden per chain) - You can also set custom rate limits and batch call sizes: + You can also set chain-specific overrides: - `{PROCESSOR_NAME}_RPC_RATE_LIMIT`: Custom RPC rate limit for this chain - `{PROCESSOR_NAME}_MAX_BATCH_CALL_SIZE`: Custom max batch call size for this chain -6. If needed, update the appropriate handler file: - - For contracts: `src/handlers/index.ts` - - For transactions: `src/handlers/wallets.ts` - Include any chain-specific logic in the handler function. - -7. Add new squid commands for the new chain in the `commands.json` file: - - For contracts: - ```json - "process:{processorName}": { - "description": "Load .env and start the {ProcessorName} squid processor", - "deps": ["build", "migration:apply"], - "cmd": ["node", "--require=dotenv/config", "lib/chains/contracts/{processorName}.js"] - }, - "process:prod:{processorName}": { - "description": "Start the {ProcessorName} squid processor", - "cmd": ["node", "lib/chains/contracts/{processorName}.js"], - "hidden": true - } - ``` + Note: Environment variables take precedence over configuration file values. + +5. To run the processor for the new chain, use the following command: - For transactions: - ```json - "process:{processorName}:wallets": { - "description": "Load .env and start the {ProcessorName} Wallets squid processor", - "deps": ["build", "migration:apply"], - "cmd": ["node", "--require=dotenv/config", "lib/chains/wallets/{processorName}.js"] - }, - "process:prod:{processorName}:wallets": { - "description": "Start the {ProcessorName} Wallets squid processor", - "cmd": ["node", "lib/chains/wallets/{processorName}.js"], - "hidden": true - } + ```bash + PROCESSOR_NAME={processorName} sqd process ``` - Replace '{processorName}' with your chosen unique processor name and '{ProcessorName}' with a capitalized version. + This command will use the same processor code but with the configuration specific to the new chain. -8. Rebuild and restart your squid to include the new chain in the indexing process. +6. Rebuild your squid to ensure all changes are compiled. Remember to replace '{processorName}' and '{PROCESSOR_NAME}' with your actual unique processor name in all the above examples. The `ibc-processor.ts` utility will automatically set up the processor with the correct configuration based on the environment variables and the config file. -Note: If you're tracking both contracts and transactions for the new chain, you'll need to create two separate files (one in each directory) and set up both processors and commands. \ No newline at end of file +Note: This setup allows you to use the same processor code for multiple chains, simplifying maintenance and reducing code duplication. You only need to specify different `PROCESSOR_NAME` environment variables to run the processor for different chains. \ No newline at end of file diff --git a/commands.json b/commands.json index 0648b77..51bca2e 100644 --- a/commands.json +++ b/commands.json @@ -44,65 +44,15 @@ "description": "Generate data access classes for an ABI file(s) in the ./abi folder", "cmd": ["squid-evm-typegen", "./src/abi", {"glob": "./abi/*.json"}, "--multicall"] }, - "process:optimism": { - "description": "Load .env and start the OP squid processor", + "process": { + "description": "Load .env and start a squid processor", "deps": ["build", "migration:apply"], - "cmd": ["node", "--require=dotenv/config", "lib/chains/optimism.js"] + "cmd": ["node", "--require=dotenv/config", "lib/chains/processor.js"] }, - "process:base": { - "description": "Load .env and start the Base squid processor", - "deps": ["build", "migration:apply"], - "cmd": ["node", "--require=dotenv/config", "lib/chains/base.js"] - }, - "process:backfill": { - "description": "Load .env and start the Backfill squid processor", - "deps": ["build", "migration:apply"], - "cmd": ["node", "--require=dotenv/config", "lib/chains/backfill.js"] - }, - "process:eth:wallets": { - "description": "Load .env and start the Wallets squid processor", - "deps": ["build", "migration:apply"], - "cmd": ["node", "--require=dotenv/config", "lib/chains/wallets/eth.js"] - }, - "process:base:wallets": { - "description": "Load .env and start the Base Wallets squid processor", - "deps": ["build", "migration:apply"], - "cmd": ["node", "--require=dotenv/config", "lib/chains/wallets/base.js"] - }, - "process:optimism:wallets": { - "description": "Load .env and start the Optimism Wallets squid processor", - "deps": ["build", "migration:apply"], - "cmd": ["node", "--require=dotenv/config", "lib/chains/wallets/optimism.js"] - }, - "process:prod:optimism": { - "description": "Start the Optimism squid processor", + "process:prod": { + "description": "Start the squid processor", "deps": ["migration:apply"], - "cmd": ["node", "lib/chains/optimism.js"], - "hidden": true - }, - "process:prod:base": { - "description": "Start the Base squid processor", - "cmd": ["node", "lib/chains/base.js"], - "hidden": true - }, - "process:prod:backfill": { - "description": "Start the backfill squid processor", - "cmd": ["node", "lib/chains/backfill.js"], - "hidden": true - }, - "process:prod:eth:wallets": { - "description": "Start the wallets squid processor", - "cmd": ["node", "lib/chains/wallets/eth.js"], - "hidden": true - }, - "process:prod:base:wallets": { - "description": "Start the wallets squid processor", - "cmd": ["node", "lib/chains/wallets/base.js"], - "hidden": true - }, - "process:prod:optimism:wallets": { - "description": "Start the wallets squid processor", - "cmd": ["node", "lib/chains/wallets/optimism.js"], + "cmd": ["node", "lib/chains/processor.js"], "hidden": true }, "serve": { diff --git a/package-lock.json b/package-lock.json index ff27f78..7f36b5c 100644 --- a/package-lock.json +++ b/package-lock.json @@ -14,7 +14,6 @@ "@subsquid/typeorm-store": "^1.5.1", "bluebird": "^3.7.2", "dotenv": "^16.4.5", - "dotenv-expand": "^11.0.6", "dset": "^3.1.4", "ethers": "^6.12.1", "express": "^4.21.0", @@ -2379,20 +2378,6 @@ "url": "https://dotenvx.com" } }, - "node_modules/dotenv-expand": { - "version": "11.0.6", - "resolved": "https://registry.npmjs.org/dotenv-expand/-/dotenv-expand-11.0.6.tgz", - "integrity": "sha512-8NHi73otpWsZGBSZwwknTXS5pqMOrk9+Ssrna8xCaxkzEpU9OTf9R5ArQGVw03//Zmk9MOwLPng9WwndvpAJ5g==", - "dependencies": { - "dotenv": "^16.4.4" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://dotenvx.com" - } - }, "node_modules/dset": { "version": "3.1.4", "resolved": "https://registry.npmjs.org/dset/-/dset-3.1.4.tgz", diff --git a/package.json b/package.json index a158506..f315628 100644 --- a/package.json +++ b/package.json @@ -13,7 +13,6 @@ "@subsquid/typeorm-store": "^1.5.1", "bluebird": "^3.7.2", "dotenv": "^16.4.5", - "dotenv-expand": "^11.0.6", "dset": "^3.1.4", "ethers": "^6.12.1", "express": "^4.21.0", diff --git a/squid.yaml b/squid.yaml index 1c18a8a..9f15051 100644 --- a/squid.yaml +++ b/squid.yaml @@ -10,15 +10,15 @@ deploy: - name: optimism-processor cmd: - sqd - - process:prod:optimism + - process:prod env: - RPC_ENDPOINT_ETH: ${{ secrets.RPC_ENDPOINT_ETH }} + PROCESSOR_NAME: optimism - name: base-processor cmd: - sqd - - process:prod:base + - process:prod env: - RPC_ENDPOINT_BSC: ${{ secrets.RPC_ENDPOINT_BSC }} + PROCESSOR_NAME: base api: cmd: - sqd diff --git a/src/chains/backfill.ts b/src/chains/backfill.ts deleted file mode 100644 index aa1e018..0000000 --- a/src/chains/backfill.ts +++ /dev/null @@ -1,4 +0,0 @@ -import { runProcessor } from '../utils/ibc-processor' -import { handler } from "../handlers/backfill"; - -runProcessor('backfill', handler) \ No newline at end of file diff --git a/src/chains/optimism.ts b/src/chains/optimism.ts deleted file mode 100644 index 92c5dd3..0000000 --- a/src/chains/optimism.ts +++ /dev/null @@ -1,4 +0,0 @@ -import { runProcessor } from "../utils/ibc-processor"; -import { handler } from "../handlers"; - -runProcessor('optimism', handler) diff --git a/src/chains/base.ts b/src/chains/processor.ts similarity index 76% rename from src/chains/base.ts rename to src/chains/processor.ts index 2c50468..7e34df7 100644 --- a/src/chains/base.ts +++ b/src/chains/processor.ts @@ -1,4 +1,4 @@ import { runProcessor } from "../utils/ibc-processor"; import { handler } from "../handlers"; -runProcessor('base', handler) +runProcessor(handler) diff --git a/src/chains/wallets/base.ts b/src/chains/wallets/base.ts deleted file mode 100644 index 1c87336..0000000 --- a/src/chains/wallets/base.ts +++ /dev/null @@ -1,4 +0,0 @@ -import { runProcessor } from '../../utils/ibc-processor' -import { handler } from "../../handlers/wallets"; - -runProcessor('base_txs', handler) diff --git a/src/chains/wallets/eth.ts b/src/chains/wallets/eth.ts deleted file mode 100644 index 135e88b..0000000 --- a/src/chains/wallets/eth.ts +++ /dev/null @@ -1,4 +0,0 @@ -import { runProcessor } from '../../utils/ibc-processor' -import { handler } from "../../handlers/wallets"; - -runProcessor('eth_txs', handler) \ No newline at end of file diff --git a/src/chains/wallets/optimism.ts b/src/chains/wallets/optimism.ts deleted file mode 100644 index 178a28c..0000000 --- a/src/chains/wallets/optimism.ts +++ /dev/null @@ -1,5 +0,0 @@ -import { runProcessor } from '../../utils/ibc-processor' -import { handler } from "../../handlers/wallets"; - -runProcessor('op_txs', handler) - diff --git a/src/utils/ibc-processor.ts b/src/utils/ibc-processor.ts index 04550a2..1b7fe23 100644 --- a/src/utils/ibc-processor.ts +++ b/src/utils/ibc-processor.ts @@ -5,17 +5,31 @@ import { z } from "zod"; import { topics } from "./topics"; import { Context } from "./types"; import { TypeormDatabase } from "@subsquid/typeorm-store"; -import dotenvExpand from 'dotenv-expand'; const ConfigSchema = z.record(z.string(), z.object({ transactions: z.array(z.string()).optional(), contracts: z.array(z.string()).optional(), + rpc: z.string().optional(), + rpcRateLimit: z.number().optional(), + maxBatchCallSize: z.number().optional(), + gateway: z.string().optional(), + fromBlock: z.number().optional(), + finalityConfirmation: z.number().optional(), + version: z.number().default(1).optional(), })) type Config = z.infer -export function IbcProcessor(processorName: string) { +export function IbcProcessor(processorName?: string) { + if (!processorName) { + processorName = process.env.PROCESSOR_NAME + } + + if (!processorName) { + throw new Error('PROCESSOR_NAME environment variable is not set') + } + let capProcessorName = processorName.toUpperCase(); // Read the config file @@ -27,17 +41,10 @@ export function IbcProcessor(processorName: string) { let config: Config try { const fileContents = fs.readFileSync(configPath, 'utf8') - const rawConfig = yaml.load(fileContents) as Record - - // Interpolate environment variables - const interpolatedConfig = Object.entries(rawConfig).reduce((acc, [key, value]) => { - acc[key] = { - transactions: value.transactions?.map((t: string) => dotenvExpand.expand({ parsed: { VALUE: t } }).parsed!.VALUE), - contracts: value.contracts?.map((c: string) => dotenvExpand.expand({ parsed: { VALUE: c } }).parsed!.VALUE), - } - return acc - }, {} as Record) - + const interpolatedFileContents = fileContents.replace(/\$\{([A-Z0-9_]+)}/g, (match, p1) => { + return process.env[p1] || match; // Replace with env var value, or keep the placeholder if not found + }); + const interpolatedConfig = yaml.load(interpolatedFileContents) as Record config = ConfigSchema.parse(interpolatedConfig) } catch (error) { throw new Error(`Failed to read or parse config file: ${error}`) @@ -68,53 +75,52 @@ export function IbcProcessor(processorName: string) { } }); - let rpcUrl = process.env[`${capProcessorName}_RPC`] + let rpcUrl = config[processorName]?.rpc ?? process.env[`${capProcessorName}_RPC`] if (!rpcUrl) { throw new Error(`Missing RPC endpoint for chain ${capProcessorName}`) } - let rpcRateLimit = process.env.RPC_RATE_LIMIT - - if (!rpcRateLimit) { - throw new Error(`Missing RPC rate limit env var`) + let rpcRateLimit = config[processorName]?.rpcRateLimit ?? Number(process.env.RPC_RATE_LIMIT) + if (rpcRateLimit === undefined) { + throw new Error(`Missing RPC rate limit`) } - let customRateLimit = process.env[`${capProcessorName}_RPC_RATE_LIMIT`] - if (customRateLimit) { + let customRateLimit = Number(process.env[`${capProcessorName}_RPC_RATE_LIMIT`]) + if (!isNaN(customRateLimit)) { rpcRateLimit = customRateLimit } - let maxBatchCallSize = process.env.MAX_BATCH_CALL_SIZE ?? "100" - if (!maxBatchCallSize) { - throw new Error(`Missing max batch call size env var`) + let maxBatchCallSize = config[processorName]?.maxBatchCallSize ?? Number(process.env.MAX_BATCH_CALL_SIZE ?? "100") + if (maxBatchCallSize === undefined) { + throw new Error(`Missing max batch call size`) } - let customMaxBatchCallSize = process.env[`${capProcessorName}_MAX_BATCH_CALL_SIZE`] - if (customMaxBatchCallSize) { + let customMaxBatchCallSize = Number(process.env[`${capProcessorName}_MAX_BATCH_CALL_SIZE`]) + if (!isNaN(customMaxBatchCallSize)) { maxBatchCallSize = customMaxBatchCallSize } processor = processor.setRpcEndpoint({ url: rpcUrl, - rateLimit: Number(rpcRateLimit), - maxBatchCallSize: Number(maxBatchCallSize), + rateLimit: rpcRateLimit, + maxBatchCallSize: maxBatchCallSize, }) - let gateway = process.env[`${capProcessorName}_GATEWAY`] + let gateway = config[processorName]?.gateway ?? process.env[`${capProcessorName}_GATEWAY`] if (gateway) { processor = processor.setGateway(gateway) } - let fromBlock = process.env[`DISPATCHER_ADDRESS_${capProcessorName}_START_BLOCK`] + let fromBlock = config[processorName]?.fromBlock ?? Number(process.env[`DISPATCHER_ADDRESS_${capProcessorName}_START_BLOCK`]) if (fromBlock) { processor = processor.setBlockRange({ - from: Number(fromBlock), + from: fromBlock, }) } - let finalityConfirmation = process.env.FINALITY_CONFIRMATION; + let finalityConfirmation = config[processorName]?.finalityConfirmation ?? Number(process.env.FINALITY_CONFIRMATION); if (finalityConfirmation) { - processor = processor.setFinalityConfirmation(Number(finalityConfirmation)) + processor = processor.setFinalityConfirmation(finalityConfirmation) } if (config[processorName]) { @@ -136,13 +142,17 @@ export function IbcProcessor(processorName: string) { } } - return processor + return {processor, processorName, config} } -export function runProcessor(processorName: string, handler: (ctx: Context) => Promise) { - const processor = IbcProcessor(processorName) +export function runProcessor(handler: (ctx: Context) => Promise) { + const {processor, processorName, config} = IbcProcessor() + + let version = process.env[`${processorName.toUpperCase()}_VERSION`] ?? config.version ?? 1; + if (!version) { + throw new Error('Version not set') + } - let version = process.env[`${processorName.toUpperCase()}_VERSION`] ?? '1'; processor.run(new TypeormDatabase({ supportHotBlocks: true, isolationLevel: "REPEATABLE READ",