diff --git a/.github/workflows/live-test.yml b/.github/workflows/live-test.yml new file mode 100644 index 0000000..372fd10 --- /dev/null +++ b/.github/workflows/live-test.yml @@ -0,0 +1,46 @@ +name: Live Bridge E2E Test +on: + workflow_dispatch: + +jobs: + e2e: + if: true + runs-on: ubuntu-latest + timeout-minutes: 30 + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-node@v4 + with: + node-version: '20' + - run: npm install -g pnpm && pnpm install + working-directory: bridge + - name: Run live roundtrip test + working-directory: bridge + env: + # Bridge operation env vars (from secrets) + GRAPHQL_API_ENDPOINT: ${{ secrets.GRAPHQL_API_ENDPOINT }} + KMS_PROVIDER_URL: ${{ secrets.KMS_PROVIDER_URL }} + KMS_PROVIDER_KEY_ID: ${{ secrets.KMS_PROVIDER_KEY_ID }} + KMS_PROVIDER_REGION: ${{ secrets.KMS_PROVIDER_REGION }} + KMS_PROVIDER_AWS_ACCESSKEY: ${{ secrets.KMS_PROVIDER_AWS_ACCESSKEY }} + KMS_PROVIDER_AWS_SECRETKEY: ${{ secrets.KMS_PROVIDER_AWS_SECRETKEY }} + KMS_PROVIDER_PUBLIC_KEY: ${{ secrets.KMS_PROVIDER_PUBLIC_KEY }} + WNCG_CONTRACT_ADDRESS: ${{ secrets.WNCG_CONTRACT_ADDRESS }} + BSC_KMS_PROVIDER_URL: ${{ secrets.BSC_KMS_PROVIDER_URL }} + BSC_WNCG_CONTRACT_ADDRESS: ${{ secrets.BSC_WNCG_CONTRACT_ADDRESS }} + NCG_MINTER: ${{ secrets.NCG_MINTER }} + # E2E test-specific secrets + E2E_TEST_PRIVATE_KEY: ${{ secrets.E2E_TEST_PRIVATE_KEY }} + E2E_TEST_9C_PRIVATE_KEY: ${{ secrets.E2E_TEST_9C_PRIVATE_KEY }} + E2E_TEST_9C_ADDRESS: ${{ secrets.E2E_TEST_9C_ADDRESS }} + BRIDGE_LAMBDA_ARN: ${{ secrets.BRIDGE_LAMBDA_ARN }} + AWS_ACCESS_KEY_ID: ${{ secrets.E2E_AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.E2E_AWS_SECRET_ACCESS_KEY }} + AWS_REGION: us-east-1 + run: npx tsx e2e/live-roundtrip.ts + - name: Notify Slack on failure + if: failure() + run: | + curl -s -X POST "${{ secrets.SLACK_WEBHOOK_URL }}" \ + -H 'Content-type: application/json' \ + -d '{"text":"🚨 *λΈŒλ¦Ώμ§€ 라이브 E2E ν…ŒμŠ€νŠΈ μ‹€νŒ¨*\n배포 ν›„ λΌμš΄λ“œνŠΈλ¦½ 검증 μ‹€νŒ¨ β€” μ¦‰μ‹œ 확인 ν•„μš”"}' diff --git a/bridge/e2e/live-roundtrip.ts b/bridge/e2e/live-roundtrip.ts new file mode 100644 index 0000000..62167e0 --- /dev/null +++ b/bridge/e2e/live-roundtrip.ts @@ -0,0 +1,312 @@ +/** + * Live Bridge E2E Roundtrip Test + * + * Phase 1 (ETH -> 9c): Burns WNCG on Ethereum, invokes Lambda, waits for NCG receipt on 9c. + * Phase 2 (9c -> ETH): Transfers NCG on 9c back to bridge, invokes Lambda, waits for WNCG mint on ETH. + * + * Required env vars: + * KMS_PROVIDER_URL - Ethereum RPC + * WNCG_CONTRACT_ADDRESS - WNCG contract on Ethereum + * GRAPHQL_API_ENDPOINT - 9c GraphQL endpoint + * E2E_TEST_PRIVATE_KEY - Plain ETH private key for test account + * E2E_TEST_9C_PRIVATE_KEY - Plain 9c private key (hex) + * E2E_TEST_9C_ADDRESS - 9c test account address (0x-prefixed hex) + * BRIDGE_LAMBDA_ARN - Lambda ARN to invoke + * KMS_PROVIDER_PUBLIC_KEY - Bridge's ETH address public key (base64) + * NCG_MINTER - NCG minter address for transfer action + */ + +import { ethers } from "ethers"; +import Web3 from "web3"; +import crypto from "crypto"; +import { RawPrivateKey } from "@planetarium/account"; +import { signTransaction } from "@planetarium/sign"; +import { encodeUnsignedTx } from "@planetarium/tx"; +import { RecordView, encode } from "@planetarium/bencodex"; +import { HeadlessGraphQLClient } from "../src/headless-graphql-client"; +import { + sleep, + getWNCGBalance, + waitForNCGReceipt, + waitForWNCGMint, + invokeLambda, +} from "./utils"; + +const BURN_WNCG_AMOUNT = ethers.utils.parseEther("120"); // 120 WNCG +const GRAPHQL_REQUEST_RETRY = 5; +const PHASE_TIMEOUT_MS = 10 * 60 * 1000; // 10 minutes + +const wNCGTokenAbi = [ + "function balanceOf(address owner) view returns (uint256)", + "function burn(uint256 amount, bytes32 _to) external", + "event Burn(address indexed _sender, bytes32 indexed _to, uint256 amount)", +]; + +function requireEnv(name: string): string { + const val = process.env[name]; + if (!val) { + throw new Error(`Missing required env var: ${name}`); + } + return val; +} + +async function burnWNCG( + wallet: ethers.Wallet, + contractAddress: string, + amount: ethers.BigNumber, + recipient9cAddress: string +): Promise<{ txHash: string; blockNumber: number }> { + const contract = new ethers.Contract( + contractAddress, + wNCGTokenAbi, + wallet + ); + + // recipient9cAddress is a 0x-prefixed 40-char hex address; pad to bytes32 + const recipient = recipient9cAddress.toLowerCase().startsWith("0x") + ? recipient9cAddress + : "0x" + recipient9cAddress; + const recipientBytes32 = ethers.utils.hexZeroPad(recipient, 32); + + console.log( + `[Phase 1] Burning ${ethers.utils.formatEther(amount)} WNCG to 9c address ${recipient9cAddress}` + ); + + const tx = await contract.burn(amount, recipientBytes32); + console.log(`[Phase 1] Burn tx submitted: ${tx.hash}`); + + const receipt = await tx.wait(1); + console.log( + `[Phase 1] Burn tx confirmed at block ${receipt.blockNumber}: ${tx.hash}` + ); + + return { txHash: tx.hash, blockNumber: receipt.blockNumber }; +} + +async function transferNCGToBridge( + privateKeyHex: string, + senderAddress: string, + bridgeAddress: string, + amount: string, + memo: string, + graphqlClient: HeadlessGraphQLClient, + ncgMinter: string +): Promise { + const account = RawPrivateKey.fromHex(privateKeyHex); + const publicKey = await account.getPublicKey(); + const publicKeyBytes = publicKey.toBytes("uncompressed"); + + const recipient = Buffer.from( + Web3.utils.hexToBytes(bridgeAddress) + ); + const sender = Buffer.from( + Web3.utils.hexToBytes(senderAddress) + ); + + const nonce = BigInt(await graphqlClient.getNextTxNonce(senderAddress)); + const genesisHash = Buffer.from( + await graphqlClient.getGenesisHash(), + "hex" + ); + + const ncgAmount = Math.floor(parseFloat(amount) * 100); + const updatedAddresses = new Set([recipient, sender]); + + const action = new RecordView( + { + type_id: "transfer_asset5", + values: { + amount: [ + new RecordView( + { + decimalPlaces: Buffer.from([0x02]), + minters: [ + Buffer.from( + Web3.utils.hexToBytes(ncgMinter) + ), + ], + ticker: "NCG", + }, + "text" + ), + BigInt(ncgAmount), + ], + memo, + recipient, + sender, + }, + }, + "text" + ); + + const MEAD_CURRENCY = { + ticker: "Mead", + decimalPlaces: 18, + minters: null, + totalSupplyTrackable: false, + maximumSupply: null, + }; + + const unsignedTx = encodeUnsignedTx({ + nonce, + publicKey: Buffer.from(publicKeyBytes), + signer: sender, + timestamp: new Date(), + updatedAddresses, + genesisHash, + maxGasPrice: { + currency: MEAD_CURRENCY, + rawValue: 10n ** 13n, + }, + gasLimit: 4n, + actions: [action], + }); + + const tx = await signTransaction( + Buffer.from(encode(unsignedTx)).toString("hex"), + account + ); + + const staged = await graphqlClient.stageTx( + Buffer.from(tx, "hex").toString("base64") + ); + if (!staged) { + throw new Error("[Phase 2] Failed to stage NCG transfer transaction"); + } + + const txId = crypto + .createHash("sha256") + .update(tx, "hex") + .digest() + .toString("hex"); + + console.log(`[Phase 2] NCG transfer staged: txId=${txId}`); + return txId; +} + +async function main(): Promise { + console.log("[E2E] Starting live bridge roundtrip test"); + + const KMS_PROVIDER_URL = requireEnv("KMS_PROVIDER_URL"); + const WNCG_CONTRACT_ADDRESS = requireEnv("WNCG_CONTRACT_ADDRESS"); + const GRAPHQL_API_ENDPOINT = requireEnv("GRAPHQL_API_ENDPOINT"); + const E2E_TEST_PRIVATE_KEY = requireEnv("E2E_TEST_PRIVATE_KEY"); + const E2E_TEST_9C_PRIVATE_KEY = requireEnv("E2E_TEST_9C_PRIVATE_KEY"); + const E2E_TEST_9C_ADDRESS = requireEnv("E2E_TEST_9C_ADDRESS"); + const BRIDGE_LAMBDA_ARN = requireEnv("BRIDGE_LAMBDA_ARN"); + const KMS_PROVIDER_PUBLIC_KEY = requireEnv("KMS_PROVIDER_PUBLIC_KEY"); + const NCG_MINTER = requireEnv("NCG_MINTER"); + + // Derive bridge ETH address from public key + const web3 = new Web3(); + const bridgeEthAddress = + "0x" + + web3.utils + .keccak256( + "0x" + + Buffer.from(KMS_PROVIDER_PUBLIC_KEY, "base64") + .toString("hex") + .slice(2) + ) + .slice(26); + + console.log(`[E2E] Bridge ETH address: ${bridgeEthAddress}`); + + const provider = new ethers.providers.JsonRpcProvider(KMS_PROVIDER_URL); + const wallet = new ethers.Wallet(E2E_TEST_PRIVATE_KEY, provider); + const graphqlClient = new HeadlessGraphQLClient( + GRAPHQL_API_ENDPOINT, + GRAPHQL_REQUEST_RETRY, + process.env.JWT_SECRET_KEY + ); + + // Pre-flight balance check + const balance = await getWNCGBalance( + wallet.address, + provider, + WNCG_CONTRACT_ADDRESS + ); + console.log( + `[E2E] Test account WNCG balance: ${ethers.utils.formatEther(balance)} WNCG` + ); + + if (balance.lt(BURN_WNCG_AMOUNT)) { + console.warn( + `[E2E] Insufficient WNCG balance (${ethers.utils.formatEther(balance)} < 120). Skipping test.` + ); + process.exit(0); + } + + // ── Phase 1: ETH β†’ 9c ──────────────────────────────────────────────────── + + console.log("\n[E2E] === Phase 1: ETH β†’ 9c ==="); + + const currentBlock = await provider.getBlockNumber(); + + const { txHash: burnTxHash } = await burnWNCG( + wallet, + WNCG_CONTRACT_ADDRESS, + BURN_WNCG_AMOUNT, + E2E_TEST_9C_ADDRESS + ); + + // Give the RPC a moment to settle before invoking Lambda + await sleep(5000); + await invokeLambda(BRIDGE_LAMBDA_ARN); + + console.log("[Phase 1] Waiting for NCG receipt on 9c..."); + const ncgTxId = await waitForNCGReceipt( + E2E_TEST_9C_ADDRESS, + burnTxHash, + graphqlClient, + { timeoutMs: PHASE_TIMEOUT_MS } + ); + console.log(`[Phase 1] SUCCESS β€” NCG received: txId=${ncgTxId}`); + + // ── Phase 2: 9c β†’ ETH ──────────────────────────────────────────────────── + + console.log("\n[E2E] === Phase 2: 9c β†’ ETH ==="); + + // Transfer the received NCG back to the bridge (120 NCG) + const ncgAmount = "120.00"; + const ncg9cTxId = await transferNCGToBridge( + E2E_TEST_9C_PRIVATE_KEY, + E2E_TEST_9C_ADDRESS, + bridgeEthAddress, + ncgAmount, + wallet.address, // ETH address as memo so bridge mints to our test wallet + graphqlClient, + NCG_MINTER + ); + + console.log( + `[Phase 2] NCG transfer to bridge submitted: txId=${ncg9cTxId}` + ); + + // Wait a bit for transaction to propagate + await sleep(10000); + await invokeLambda(BRIDGE_LAMBDA_ARN); + + console.log("[Phase 2] Waiting for WNCG mint on Ethereum..."); + const mintTxHash = await waitForWNCGMint( + wallet.address, + currentBlock, + provider, + WNCG_CONTRACT_ADDRESS, + { timeoutMs: PHASE_TIMEOUT_MS } + ); + console.log(`[Phase 2] SUCCESS β€” WNCG minted: txHash=${mintTxHash}`); + + // ── Summary ────────────────────────────────────────────────────────────── + + console.log("\n[E2E] === ROUNDTRIP COMPLETE ==="); + console.log(` ETH burn tx: ${burnTxHash}`); + console.log(` 9c NCG receipt tx: ${ncgTxId}`); + console.log(` 9c NCG transfer: ${ncg9cTxId}`); + console.log(` ETH WNCG mint tx: ${mintTxHash}`); +} + +main().catch((error) => { + console.error("[E2E] FAILED:", error); + process.exit(1); +}); diff --git a/bridge/e2e/utils.ts b/bridge/e2e/utils.ts new file mode 100644 index 0000000..794f8c9 --- /dev/null +++ b/bridge/e2e/utils.ts @@ -0,0 +1,142 @@ +import { ethers } from "ethers"; +import { + LambdaClient, + InvokeCommand, + InvocationType, +} from "@aws-sdk/client-lambda"; +import { IHeadlessGraphQLClient } from "../src/interfaces/headless-graphql-client"; + +export function sleep(ms: number): Promise { + return new Promise((resolve) => setTimeout(resolve, ms)); +} + +export async function getWNCGBalance( + address: string, + provider: ethers.providers.JsonRpcProvider, + contractAddress: string +): Promise { + const abi = ["function balanceOf(address owner) view returns (uint256)"]; + const contract = new ethers.Contract(contractAddress, abi, provider); + return contract.balanceOf(address); +} + +export async function waitForNCGReceipt( + address: string, + burnTxHash: string, + graphqlClient: IHeadlessGraphQLClient, + opts: { timeoutMs?: number; pollIntervalMs?: number } = {} +): Promise { + const timeoutMs = opts.timeoutMs ?? 10 * 60 * 1000; + const pollIntervalMs = opts.pollIntervalMs ?? 15 * 1000; + const deadline = Date.now() + timeoutMs; + + console.log( + `[waitForNCGReceipt] Waiting for NCG receipt for burn tx ${burnTxHash}` + ); + + while (Date.now() < deadline) { + try { + const tipIndex = await graphqlClient.getTipIndex(); + const tipHash = await graphqlClient.getBlockHash(tipIndex); + const events = await graphqlClient.getNCGTransferredEvents( + tipHash, + address + ); + + for (const event of events) { + if (event.memo === burnTxHash) { + console.log( + `[waitForNCGReceipt] Found NCG receipt: txId=${event.txId}, amount=${event.amount}` + ); + return event.txId; + } + } + } catch (e) { + console.error("[waitForNCGReceipt] Error polling:", e); + } + + await sleep(pollIntervalMs); + } + + throw new Error( + `[waitForNCGReceipt] Timed out waiting for NCG receipt for burn tx ${burnTxHash}` + ); +} + +export async function waitForWNCGMint( + recipientAddress: string, + fromBlock: number, + provider: ethers.providers.JsonRpcProvider, + contractAddress: string, + opts: { timeoutMs?: number; pollIntervalMs?: number } = {} +): Promise { + const timeoutMs = opts.timeoutMs ?? 10 * 60 * 1000; + const pollIntervalMs = opts.pollIntervalMs ?? 15 * 1000; + const deadline = Date.now() + timeoutMs; + + const abi = [ + "event Transfer(address indexed from, address indexed to, uint256 value)", + ]; + const contract = new ethers.Contract(contractAddress, abi, provider); + const ZERO_ADDRESS = "0x0000000000000000000000000000000000000000"; + + console.log( + `[waitForWNCGMint] Waiting for WNCG mint to ${recipientAddress} from block ${fromBlock}` + ); + + while (Date.now() < deadline) { + try { + const currentBlock = await provider.getBlockNumber(); + const filter = contract.filters.Transfer( + ZERO_ADDRESS, + recipientAddress + ); + const events = await contract.queryFilter( + filter, + fromBlock, + currentBlock + ); + + if (events.length > 0) { + const txHash = events[0].transactionHash; + console.log( + `[waitForWNCGMint] Found WNCG mint tx: ${txHash}` + ); + return txHash; + } + } catch (e) { + console.error("[waitForWNCGMint] Error polling:", e); + } + + await sleep(pollIntervalMs); + } + + throw new Error( + `[waitForWNCGMint] Timed out waiting for WNCG mint to ${recipientAddress}` + ); +} + +export async function invokeLambda(lambdaArn: string): Promise { + const region = process.env.AWS_REGION ?? "us-east-1"; + const client = new LambdaClient({ region }); + + console.log(`[invokeLambda] Invoking Lambda ${lambdaArn}`); + + const result = await client.send( + new InvokeCommand({ + FunctionName: lambdaArn, + InvocationType: InvocationType.RequestResponse, + }) + ); + + if (result.FunctionError) { + const payload = result.Payload + ? Buffer.from(result.Payload).toString("utf-8") + : "(no payload)"; + throw new Error( + `Lambda invocation failed: ${result.FunctionError} β€” ${payload}` + ); + } + + console.log(`[invokeLambda] Lambda invocation succeeded`); +} diff --git a/bridge/package.json b/bridge/package.json index aded6c8..9e5b46f 100644 --- a/bridge/package.json +++ b/bridge/package.json @@ -5,6 +5,7 @@ "main": "dist/src/index.js", "scripts": { "start": "npx tsx ./src/index.ts", + "start:lambda": "npx tsx ./src/lambda.ts", "test": "run-s test:*", "test:aws": "jest --config=./aws.jest.config.js", "test:bridge": "jest --config=./bridge.jest.config.js", @@ -13,7 +14,8 @@ "build": "tsc", "mint": "node scripts/mint-script.js mint", "transfer": "node scripts/transfer-script.js transfer", - "feeTransfer": "node scripts/fee-transfer.js feeTransfer" + "feeTransfer": "node scripts/fee-transfer.js feeTransfer", + "e2e:live": "npx tsx ./e2e/live-roundtrip.ts" }, "repository": { "type": "git", @@ -69,7 +71,9 @@ "web3-core-promievent": "^1.5.3" }, "dependencies": { + "@aws-sdk/client-dynamodb": "^3.721.0", "@aws-sdk/client-kms": "^3.721.0", + "@aws-sdk/client-lambda": "^3.721.0", "@pagerduty/pdjs": "^2.2.3", "@planetarium/account": "4.0.6", "@planetarium/account-aws-kms": "^0.0.2", diff --git a/bridge/src/dynamo-exchange-history-store.ts b/bridge/src/dynamo-exchange-history-store.ts new file mode 100644 index 0000000..04be94c --- /dev/null +++ b/bridge/src/dynamo-exchange-history-store.ts @@ -0,0 +1,183 @@ +import { + ExchangeHistory, + IExchangeHistoryStore, +} from "./interfaces/exchange-history-store"; +import { TransactionStatus } from "./types/transaction-status"; +import { + DynamoDBClient, + PutItemCommand, + GetItemCommand, + UpdateItemCommand, + ScanCommand, + QueryCommand, + QueryCommandOutput, + ScanCommandOutput, + ConditionalCheckFailedException, +} from "@aws-sdk/client-dynamodb"; + +export class DynamoExchangeHistoryStore implements IExchangeHistoryStore { + private readonly _client: DynamoDBClient; + private readonly _tableName: string; + + private constructor(client: DynamoDBClient, tableName: string) { + this._client = client; + this._tableName = tableName; + } + + static create(): DynamoExchangeHistoryStore { + const tableName = + process.env.DYNAMODB_TABLE_EXCHANGE_HISTORY ?? + "bridge-exchange-histories"; + const client = new DynamoDBClient({}); + return new DynamoExchangeHistoryStore(client, tableName); + } + + async put(history: ExchangeHistory): Promise { + const { network, tx_id, sender, recipient, timestamp, amount, status } = + history; + + try { + await this._client.send( + new PutItemCommand({ + TableName: this._tableName, + Item: { + tx_id: { S: tx_id }, + network: { S: network }, + sender: { S: sender }, + recipient: { S: recipient }, + timestamp: { S: timestamp }, + amount: { N: String(amount) }, + status: { S: status }, + }, + ConditionExpression: "attribute_not_exists(tx_id)", + }) + ); + } catch (e) { + if (e instanceof ConditionalCheckFailedException) { + // Already exists β€” idempotent, ignore + return; + } + throw e; + } + } + + async exist(tx_id: string): Promise { + const result = await this._client.send( + new GetItemCommand({ + TableName: this._tableName, + Key: { + tx_id: { S: tx_id }, + }, + }) + ); + + return result.Item !== undefined; + } + + async transferredAmountInLast24Hours( + network: string, + sender: string + ): Promise { + const cutoff = new Date(Date.now() - 24 * 60 * 60 * 1000).toISOString(); + let amountSum = 0; + let lastEvaluatedKey: QueryCommandOutput["LastEvaluatedKey"] = undefined; + + do { + const result: QueryCommandOutput = await this._client.send( + new QueryCommand({ + TableName: this._tableName, + IndexName: "network-timestamp-index", + KeyConditionExpression: + "network = :network AND #ts > :cutoff", + FilterExpression: "sender = :sender", + ExpressionAttributeNames: { + "#ts": "timestamp", + }, + ExpressionAttributeValues: { + ":network": { S: network }, + ":cutoff": { S: cutoff }, + ":sender": { S: sender }, + }, + ExclusiveStartKey: lastEvaluatedKey, + }) + ); + + const items = result.Items ?? []; + amountSum += items.reduce( + ( + sum: number, + item: Record + ) => { + return sum + parseFloat(item.amount?.N ?? "0"); + }, + 0 + ); + + lastEvaluatedKey = result.LastEvaluatedKey; + } while (lastEvaluatedKey !== undefined); + + return amountSum; + } + + async updateStatus( + tx_id: string, + status: TransactionStatus.COMPLETED | TransactionStatus.FAILED + ): Promise { + await this._client.send( + new UpdateItemCommand({ + TableName: this._tableName, + Key: { + tx_id: { S: tx_id }, + }, + UpdateExpression: "SET #s = :status", + ExpressionAttributeNames: { + "#s": "status", + }, + ExpressionAttributeValues: { + ":status": { S: status }, + }, + }) + ); + } + + async getPendingTransactions(): Promise { + const pendingTransactions: ExchangeHistory[] = []; + let lastEvaluatedKey: ScanCommandOutput["LastEvaluatedKey"] = undefined; + + do { + const result: ScanCommandOutput = await this._client.send( + new ScanCommand({ + TableName: this._tableName, + FilterExpression: "#s = :pending", + ExpressionAttributeNames: { + "#s": "status", + }, + ExpressionAttributeValues: { + ":pending": { S: TransactionStatus.PENDING }, + }, + ExclusiveStartKey: lastEvaluatedKey, + }) + ); + + const items = result.Items ?? []; + pendingTransactions.push( + ...items.map( + (item: Record) => ({ + network: item.network?.S ?? "", + tx_id: item.tx_id?.S ?? "", + sender: item.sender?.S ?? "", + recipient: item.recipient?.S ?? "", + timestamp: item.timestamp?.S ?? "", + amount: parseFloat(item.amount?.N ?? "0"), + status: (item.status?.S ?? + TransactionStatus.PENDING) as TransactionStatus, + }) + ) + ); + + lastEvaluatedKey = result.LastEvaluatedKey; + } while (lastEvaluatedKey !== undefined); + + return pendingTransactions; + } +} diff --git a/bridge/src/dynamo-monitor-state-store.ts b/bridge/src/dynamo-monitor-state-store.ts new file mode 100644 index 0000000..52659e4 --- /dev/null +++ b/bridge/src/dynamo-monitor-state-store.ts @@ -0,0 +1,62 @@ +import { IMonitorStateStore } from "./interfaces/monitor-state-store"; +import { TransactionLocation } from "./types/transaction-location"; +import { + DynamoDBClient, + PutItemCommand, + GetItemCommand, +} from "@aws-sdk/client-dynamodb"; + +export class DynamoMonitorStateStore implements IMonitorStateStore { + private readonly _client: DynamoDBClient; + private readonly _tableName: string; + + private constructor(client: DynamoDBClient, tableName: string) { + this._client = client; + this._tableName = tableName; + } + + static create(): DynamoMonitorStateStore { + const tableName = + process.env.DYNAMODB_TABLE_MONITOR_STATE ?? "bridge-monitor-state"; + const client = new DynamoDBClient({}); + return new DynamoMonitorStateStore(client, tableName); + } + + async store( + network: string, + transactionLocation: TransactionLocation + ): Promise { + await this._client.send( + new PutItemCommand({ + TableName: this._tableName, + Item: { + network: { S: network }, + block_hash: { S: transactionLocation.blockHash }, + tx_id: transactionLocation.txId + ? { S: transactionLocation.txId } + : { NULL: true }, + }, + }) + ); + } + + async load(network: string): Promise { + const result = await this._client.send( + new GetItemCommand({ + TableName: this._tableName, + Key: { + network: { S: network }, + }, + }) + ); + + if (!result.Item) { + return null; + } + + return { + blockHash: result.Item.block_hash.S!, + txId: result.Item.tx_id?.S ?? null, + }; + } +} diff --git a/bridge/src/lambda.ts b/bridge/src/lambda.ts new file mode 100644 index 0000000..741ed73 --- /dev/null +++ b/bridge/src/lambda.ts @@ -0,0 +1,601 @@ +import Web3 from "web3"; +import { KmsProvider } from "@planetarium/aws-kms-provider"; + +import { IWrappedNCGMinter } from "./interfaces/wrapped-ncg-minter"; +import { WrappedNCGMinter } from "./wrapped-ncg-minter"; +import { wNCGTokenAbi } from "./wrapped-ncg-token"; +import { HeadlessGraphQLClient } from "./headless-graphql-client"; +import { ContractDescription } from "./types/contract-description"; +import { IMonitorStateStore } from "./interfaces/monitor-state-store"; +import { DynamoMonitorStateStore } from "./dynamo-monitor-state-store"; +import { WebClient } from "@slack/web-api"; +import { OpenSearchClient } from "./opensearch-client"; +import { Configuration } from "./configuration"; +import { NCGTransferredEventObserver } from "./observers/nine-chronicles"; +import { EthereumBurnEventObserver } from "./observers/burn-event-observer"; +import { KMSNCGSigner } from "./kms-ncg-signer"; +import { NCGKMSTransfer } from "./ncg-kms-transfer"; +import Decimal from "decimal.js"; +import { IExchangeHistoryStore } from "./interfaces/exchange-history-store"; +import { DynamoExchangeHistoryStore } from "./dynamo-exchange-history-store"; +import { AddressBanPolicy } from "./policies/address-ban"; +import { + GasPriceLimitPolicy, + GasPricePolicies, + GasPriceTipPolicy, + IGasPricePolicy, +} from "./policies/gas-price"; +import { Integration } from "./integrations"; +import { PagerDutyIntegration } from "./integrations/pagerduty"; +import { SlackMessageSender } from "./slack-message-sender"; +import { + FixedExchangeFeeRatioPolicy, + IExchangeFeeRatioPolicy, +} from "./policies/exchange-fee-ratio"; +import { SlackChannel } from "./slack-channel"; +import { AwsKmsSigner, AwsKmsSignerCredentials } from "./ethers-aws-kms-signer"; +import { SafeWrappedNCGMinter } from "./safe-wrapped-ncg-minter"; +import { ethers } from "ethers"; +import { whitelistAccounts } from "./whitelist/whitelist-accounts"; +import { SpreadsheetClient } from "./spreadsheet-client"; +import { google } from "googleapis"; +import { MultiPlanetary } from "./multi-planetary"; +import { PendingTransactionHandler } from "./pending-transactions"; +import { + relayEthereum, + relayBSC, + relayNineChronicles, + EthereumRelayerDeps, + NineChroniclesRelayerDeps, +} from "./relayer"; + +async function initializeDeps() { + const GRAPHQL_API_ENDPOINT: string = Configuration.get( + "GRAPHQL_API_ENDPOINT" + ); + const NCG_MINTER: string = Configuration.get("NCG_MINTER"); + const KMS_PROVIDER_URL: string = Configuration.get("KMS_PROVIDER_URL"); + const KMS_PROVIDER_KEY_ID: string = Configuration.get( + "KMS_PROVIDER_KEY_ID" + ); + const KMS_PROVIDER_REGION: string = Configuration.get( + "KMS_PROVIDER_REGION" + ); + const KMS_PROVIDER_AWS_ACCESSKEY: string = Configuration.get( + "KMS_PROVIDER_AWS_ACCESSKEY" + ); + const KMS_PROVIDER_AWS_SECRETKEY: string = Configuration.get( + "KMS_PROVIDER_AWS_SECRETKEY" + ); + const KMS_PROVIDER_PUBLIC_KEY: string = Configuration.get( + "KMS_PROVIDER_PUBLIC_KEY" + ); + const WNCG_CONTRACT_ADDRESS: string = Configuration.get( + "WNCG_CONTRACT_ADDRESS" + ); + const BSC_KMS_PROVIDER_URL: string = Configuration.get( + "BSC_KMS_PROVIDER_URL" + ); + const BSC_WNCG_CONTRACT_ADDRESS: string = Configuration.get( + "BSC_WNCG_CONTRACT_ADDRESS" + ); + const MINIMUM_NCG: number = Configuration.get("MINIMUM_NCG", true, "float"); + const MAXIMUM_NCG: number = Configuration.get("MAXIMUM_NCG", true, "float"); + const MAXIMUM_WHITELIST_NCG: number = Configuration.get( + "MAXIMUM_WHITELIST_NCG", + true, + "float" + ); + const BASE_FEE_CRITERION: number = Configuration.get( + "BASE_FEE_CRITERION", + true, + "float" + ); + const BASE_FEE: number = Configuration.get("BASE_FEE", true, "float"); + const FEE_RANGE_DIVIDER_AMOUNT: number = Configuration.get( + "FEE_RANGE_DIVIDER_AMOUNT", + true, + "float" + ); + + const FEE_RANGE1_RATIO: number = Configuration.get( + "FEE_RANGE1_RATIO", + true, + "float" + ); + const FEE_RANGE2_RATIO: number = Configuration.get( + "FEE_RANGE2_RATIO", + true, + "float" + ); + + const SLACK_WEB_TOKEN: string = Configuration.get("SLACK_WEB_TOKEN"); + const FAILURE_SUBSCRIBERS: string = Configuration.get( + "FAILURE_SUBSCRIBERS" + ); + const OPENSEARCH_ENDPOINT: string = Configuration.get( + "OPENSEARCH_ENDPOINT" + ); + const OPENSEARCH_AUTH: string = Configuration.get("OPENSEARCH_AUTH"); + const OPENSEARCH_INDEX: string = + Configuration.get("OPENSEARCH_INDEX", false) || "9c-eth-bridge"; + const SLACK_CHANNEL_NAME: string = + Configuration.get("SLACK_CHANNEL_NAME", false) || + "#nine-chronicles-bridge-bot"; + const EXPLORER_ROOT_URL: string = Configuration.get("EXPLORER_ROOT_URL"); + const NCSCAN_URL: string | undefined = Configuration.get( + "NCSCAN_URL", + false + ); + const USE_NCSCAN_URL: boolean = Configuration.get( + "USE_NCSCAN_URL", + false, + "boolean" + ); + const ETHERSCAN_ROOT_URL: string = Configuration.get("ETHERSCAN_ROOT_URL"); + + const SLACK_URL: string = Configuration.get("SLACK_URL"); + + const GOOGLE_SPREADSHEET_URL: string = Configuration.get( + "GOOGLE_SPREADSHEET_URL" + ); + const GOOGLE_SPREADSHEET_ID: string = Configuration.get( + "GOOGLE_SPREADSHEET_ID" + ); + const GOOGLE_CLIENT_EMAIL: string = Configuration.get( + "GOOGLE_CLIENT_EMAIL" + ); + const GOOGLE_CLIENT_PRIVATE_KEY: string = Configuration.get( + "GOOGLE_CLIENT_PRIVATE_KEY" + ); + const USE_GOOGLE_SPREAD_SHEET: boolean = Configuration.get( + "USE_GOOGLE_SPREAD_SHEET", + false, + "boolean" + ); + const SHEET_MINT: string = Configuration.get("SHEET_MINT"); + const SHEET_BURN: string = Configuration.get("SHEET_BURN"); + + if (BASE_FEE >= BASE_FEE_CRITERION) { + throw Error( + `BASE_FEE(value: ${BASE_FEE}) should be less than BASE_FEE_CRITERION(value: ${BASE_FEE_CRITERION})` + ); + } + + if (BASE_FEE_CRITERION > FEE_RANGE_DIVIDER_AMOUNT) { + throw Error( + `BASE_FEE_CRITERION(value: ${BASE_FEE_CRITERION}) should be less than or Equal FEE_RANGE_DIVIDER_AMOUNT(value: ${FEE_RANGE_DIVIDER_AMOUNT})` + ); + } + + if (FEE_RANGE_DIVIDER_AMOUNT > MAXIMUM_NCG) { + throw Error( + `FEE_RANGE_DIVIDER_AMOUNT(value: ${FEE_RANGE_DIVIDER_AMOUNT}) should be less than or Equal MAXIMUM_NCG(value: ${MAXIMUM_NCG})` + ); + } + + const ncgExchangeFeeRatioPolicy: IExchangeFeeRatioPolicy = + new FixedExchangeFeeRatioPolicy( + new Decimal(MAXIMUM_NCG), + new Decimal(FEE_RANGE_DIVIDER_AMOUNT), + { + criterion: new Decimal(BASE_FEE_CRITERION), + fee: new Decimal(BASE_FEE), + }, + { + range1: new Decimal(FEE_RANGE1_RATIO), + range2: new Decimal(FEE_RANGE2_RATIO), + } + ); + + const authorize = new google.auth.JWT( + GOOGLE_CLIENT_EMAIL, + undefined, + GOOGLE_CLIENT_PRIVATE_KEY, + [GOOGLE_SPREADSHEET_URL] + ); + const googleSheet = google.sheets({ + version: "v4", + auth: authorize, + }); + + const spreadsheetClient = new SpreadsheetClient( + googleSheet, + GOOGLE_SPREADSHEET_ID, + USE_GOOGLE_SPREAD_SHEET, + SLACK_URL, + { + mint: SHEET_MINT, + burn: SHEET_BURN, + }, + ncgExchangeFeeRatioPolicy + ); + + const PRIORITY_FEE: number = Configuration.get( + "PRIORITY_FEE", + true, + "float" + ); + + const GAS_TIP_RATIO_STRING: string = Configuration.get( + "GAS_TIP_RATIO", + true, + "string" + ); + const GAS_TIP_RATIO = new Decimal(GAS_TIP_RATIO_STRING); + + const MAX_GAS_PRICE_STRING: string = Configuration.get( + "MAX_GAS_PRICE", + true, + "string" + ); + const MAX_GAS_PRICE = new Decimal(MAX_GAS_PRICE_STRING); + + const PAGERDUTY_ROUTING_KEY: string = Configuration.get( + "PAGERDUTY_ROUTING_KEY", + true, + "string" + ); + + const STAGE_HEADLESSES: string[] = + Configuration.get("STAGE_HEADLESSES").split(","); + + const USE_SAFE_WRAPPED_NCG_MINTER: boolean = Configuration.get( + "USE_SAFE_WRAPPED_NCG_MINTER", + false, + "boolean" + ); + const SAFE_OWNER_CREDENTIALS: AwsKmsSignerCredentials[] | null = + USE_SAFE_WRAPPED_NCG_MINTER + ? [1, 2, 3].map((value) => { + return { + region: Configuration.get( + `SAFE_OWNER_${value}_AWS_REGION`, + true, + "string" + ), + accessKeyId: Configuration.get( + `SAFE_OWNER_${value}_AWS_ACCESS_KEY_ID`, + true, + "string" + ), + secretAccessKey: Configuration.get( + `SAFE_OWNER_${value}_AWS_SECRET_ACCESS_KEY`, + true, + "string" + ), + keyId: Configuration.get( + `SAFE_OWNER_${value}_AWS_KEY_ID`, + true, + "string" + ), + }; + }) + : null; + const SAFE_TX_SERVICE_URL: string | undefined = Configuration.get( + "SAFE_TX_SERVICE_URL", + USE_SAFE_WRAPPED_NCG_MINTER, + "string" + ); + const SAFE_ADDRESS: string | undefined = Configuration.get( + "SAFE_ADDRESS", + USE_SAFE_WRAPPED_NCG_MINTER, + "string" + ); + + const PLANET_ODIN_ID: string | undefined = Configuration.get( + "PLANET_ODIN_ID", + true, + "string" + ); + const PLANET_HEIMDALL_ID: string | undefined = Configuration.get( + "PLANET_HEIMDALL_ID", + true, + "string" + ); + const ODIN_TO_HEIMDALL_VALUT_ADDRESS: string | undefined = + Configuration.get("ODIN_TO_HEIMDALL_VALUT_ADDRESS", true, "string"); + + const monitorStateStore: IMonitorStateStore = + DynamoMonitorStateStore.create(); + const exchangeHistoryStore: IExchangeHistoryStore = + DynamoExchangeHistoryStore.create(); + const slackWebClient = new WebClient(SLACK_WEB_TOKEN); + const opensearchClient = new OpenSearchClient( + OPENSEARCH_ENDPOINT, + OPENSEARCH_AUTH, + OPENSEARCH_INDEX + ); + const GRAPHQL_REQUEST_RETRY = 5; + const JWT_SECRET_KEY = Configuration.get("JWT_SECRET_KEY"); + const headlessGraphQLCLient = new HeadlessGraphQLClient( + GRAPHQL_API_ENDPOINT, + GRAPHQL_REQUEST_RETRY, + JWT_SECRET_KEY + ); + const stageGraphQLClients = STAGE_HEADLESSES.map( + (endpoint) => + new HeadlessGraphQLClient( + endpoint, + GRAPHQL_REQUEST_RETRY, + JWT_SECRET_KEY + ) + ); + const integration: Integration = new PagerDutyIntegration( + PAGERDUTY_ROUTING_KEY + ); + const kmsProvider = new KmsProvider(KMS_PROVIDER_URL, { + region: KMS_PROVIDER_REGION, + keyIds: [KMS_PROVIDER_KEY_ID], + credential: { + accessKeyId: KMS_PROVIDER_AWS_ACCESSKEY, + secretAccessKey: KMS_PROVIDER_AWS_SECRETKEY, + }, + }); + const web3 = new Web3(kmsProvider); + + const wNCGToken: ContractDescription = { + abi: wNCGTokenAbi, + address: WNCG_CONTRACT_ADDRESS, + }; + + const bscWNCGToken: ContractDescription = { + abi: wNCGTokenAbi, + address: BSC_WNCG_CONTRACT_ADDRESS, + }; + + if (!web3.utils.isAddress(NCG_MINTER)) { + throw Error("NCG_MINTER is invalid - it is not valid address format."); + } + + const kmsAddresses = await kmsProvider.getAccounts(); + if (kmsAddresses.length != 1) { + throw Error("NineChronicles.EthBridge is supported only one address."); + } + const kmsAddress = kmsAddresses[0]; + console.log(kmsAddress); + const gasPriceLimitPolicy: IGasPricePolicy = new GasPriceLimitPolicy( + MAX_GAS_PRICE + ); + const gasPriceTipPolicy: IGasPricePolicy = new GasPriceTipPolicy( + GAS_TIP_RATIO + ); + const gasPricePolicy: IGasPricePolicy = new GasPricePolicies([ + gasPriceTipPolicy, + gasPriceLimitPolicy, + ]); + + const provider = new ethers.providers.JsonRpcProvider(KMS_PROVIDER_URL); + const bscProvider = new ethers.providers.JsonRpcProvider( + BSC_KMS_PROVIDER_URL + ); + + const FEE_COLLECTOR_ADDRESS: string = Configuration.get( + "FEE_COLLECTOR_ADDRESS" + ); + if (!web3.utils.isAddress(FEE_COLLECTOR_ADDRESS)) { + throw Error( + "FEE_COLLECTOR_ADDRESS is invalid - it is not valid address format." + ); + } + + async function makeSafeWrappedNCGMinter(): Promise { + if ( + !USE_SAFE_WRAPPED_NCG_MINTER || + !SAFE_TX_SERVICE_URL || + !SAFE_OWNER_CREDENTIALS || + !SAFE_ADDRESS + ) { + throw new Error("Unsufficient environment variables were given."); + } + + const [owner1Signer, owner2Signer, owner3Signer] = + SAFE_OWNER_CREDENTIALS.map( + (credentials) => new AwsKmsSigner(credentials, provider) + ); + + return await SafeWrappedNCGMinter.create( + SAFE_TX_SERVICE_URL, + SAFE_ADDRESS, + WNCG_CONTRACT_ADDRESS, + owner1Signer, + owner2Signer, + owner3Signer, + provider, + gasPricePolicy + ); + } + + const minter: IWrappedNCGMinter = USE_SAFE_WRAPPED_NCG_MINTER + ? await makeSafeWrappedNCGMinter() + : new WrappedNCGMinter( + web3, + wNCGToken, + kmsAddress, + gasPricePolicy, + new Decimal(PRIORITY_FEE) + ); + const signer = new KMSNCGSigner(KMS_PROVIDER_REGION, KMS_PROVIDER_KEY_ID, { + accessKeyId: KMS_PROVIDER_AWS_ACCESSKEY, + secretAccessKey: KMS_PROVIDER_AWS_SECRETKEY, + }); + const derivedAddress = + "0x" + + web3.utils + .keccak256( + "0x" + + Buffer.from(KMS_PROVIDER_PUBLIC_KEY, "base64") + .toString("hex") + .slice(2) + ) + .slice(26); + if (kmsAddress.toLowerCase() !== derivedAddress.toLowerCase()) { + throw Error( + "KMS_PROVIDER_PUBLIC_KEY variable seems invalid because it doesn't match to address from KMS." + ); + } + + const ncgKmsTransfer = new NCGKMSTransfer( + [headlessGraphQLCLient, ...stageGraphQLClients], + kmsAddress, + KMS_PROVIDER_PUBLIC_KEY, + [NCG_MINTER], + signer + ); + + // Nine Coparations' cold wallet addresses. + const addressBanPolicy = new AddressBanPolicy([ + "0xa1ef9701F151244F9aA7131639990c4664d2aEeF", + "0xf2fAe7aAF4c8AAC267EAB6962Fc294bc876d7b08", + "0x4b56280B84a7DC0B1Da1CdE43Aa109a33354Da1f", + "0xb3a2025bEbC87E2fF9DfD065F8e622b1583eDF19", + "0x0bbBd789280AF719Ee886cb3A0430F63D04bDc2b", + "0x7cA620bAc4b96dA636BD4Cb2141A42b55C5f6Fdd", + "0xebCa4032529221a9BCd3fF3a17C26e7d4f829695", + "0x310518163256A9642364FDadb0eB2b218cfa86c6", + "0xEc20402FD4426CDeb233a7F04B6c42af9f3bb5B5", + "0x47D082a115c63E7b58B1532d20E631538eaFADde", + "0xB3bCa3b3c6069EF5Bdd6384bAD98F11378Dc360E", + "0xa86E321048C397C0f7f23C65B1EE902AFE24644e", + ]); + + const slackChannel = new SlackChannel(slackWebClient, SLACK_CHANNEL_NAME); + const slackMessageSender = new SlackMessageSender(slackChannel); + const planetIds = { + odin: PLANET_ODIN_ID, + heimdall: PLANET_HEIMDALL_ID, + }; + const planetVaultAddress = { + heimdall: ODIN_TO_HEIMDALL_VALUT_ADDRESS, + }; + const multiPlanetary = new MultiPlanetary(planetIds, planetVaultAddress); + + const pendingTransactionRetryHandler = new PendingTransactionHandler( + exchangeHistoryStore, + ncgKmsTransfer, + multiPlanetary, + slackMessageSender + ); + + const ethereumBurnEventObserver = new EthereumBurnEventObserver( + ncgKmsTransfer, + slackMessageSender, + opensearchClient, + spreadsheetClient, + monitorStateStore, + exchangeHistoryStore, + EXPLORER_ROOT_URL, + NCSCAN_URL, + USE_NCSCAN_URL, + ETHERSCAN_ROOT_URL, + integration, + multiPlanetary, + FAILURE_SUBSCRIBERS + ); + + const bscBurnEventObserver = new EthereumBurnEventObserver( + ncgKmsTransfer, + slackMessageSender, + opensearchClient, + spreadsheetClient, + monitorStateStore, + exchangeHistoryStore, + EXPLORER_ROOT_URL, + NCSCAN_URL, + USE_NCSCAN_URL, + ETHERSCAN_ROOT_URL, + integration, + multiPlanetary, + FAILURE_SUBSCRIBERS, + "bsc" + ); + + const ncgTransferredEventObserver = new NCGTransferredEventObserver( + ncgKmsTransfer, + minter, + slackMessageSender, + opensearchClient, + spreadsheetClient, + monitorStateStore, + exchangeHistoryStore, + EXPLORER_ROOT_URL, + NCSCAN_URL, + USE_NCSCAN_URL, + ETHERSCAN_ROOT_URL, + ncgExchangeFeeRatioPolicy, + { + maximum: MAXIMUM_NCG, + whitelistMaximum: MAXIMUM_WHITELIST_NCG, + minimum: MINIMUM_NCG, + }, + addressBanPolicy, + integration, + FAILURE_SUBSCRIBERS, + whitelistAccounts, + FEE_COLLECTOR_ADDRESS + ); + + const ethDeps: EthereumRelayerDeps = { + provider, + contractDescription: wNCGToken, + monitorStateStore, + burnEventObserver: ethereumBurnEventObserver, + networkKey: "ethereum", + }; + + const bscDeps: EthereumRelayerDeps = { + provider: bscProvider, + contractDescription: bscWNCGToken, + monitorStateStore, + burnEventObserver: bscBurnEventObserver, + networkKey: "bsc", + }; + + const ncDeps: NineChroniclesRelayerDeps = { + headlessGraphQLClient: headlessGraphQLCLient, + monitorStateStore, + ncgTransferredEventObserver, + address: kmsAddress, + }; + + return { + pendingTransactionRetryHandler, + ethDeps, + bscDeps, + ncDeps, + }; +} + +export async function handler(): Promise { + console.log("[lambda] Invoked"); + + const { pendingTransactionRetryHandler, ethDeps, bscDeps, ncDeps } = + await initializeDeps(); + + await pendingTransactionRetryHandler.messagePendingTransactions(); + + const relayResults = await Promise.allSettled([ + relayEthereum(ethDeps), + relayBSC(bscDeps), + relayNineChronicles(ncDeps), + ]); + const relayNames = ["ethereum", "bsc", "nineChronicles"]; + relayResults.forEach((result, index) => { + if (result.status === "rejected") { + console.error( + `[lambda] ${relayNames[index]} relay failed`, + result.reason + ); + } + }); + + console.log("[lambda] Done"); +} + +// Allow running directly for local testing +if (require.main === module) { + handler().catch((error) => { + console.error(error); + process.exit(-1); + }); +} diff --git a/bridge/src/observers/burn-event-observer.ts b/bridge/src/observers/burn-event-observer.ts index 7d688cf..dc241f4 100644 --- a/bridge/src/observers/burn-event-observer.ts +++ b/bridge/src/observers/burn-event-observer.ts @@ -37,6 +37,7 @@ export class EthereumBurnEventObserver private readonly _integration: Integration; private readonly _multiPlanetary: MultiPlanetary; private readonly _failureSubscribers: string; + private readonly _networkKey: string; constructor( ncgTransfer: INCGTransfer, slackMessageSender: ISlackMessageSender, @@ -50,7 +51,8 @@ export class EthereumBurnEventObserver etherscanUrl: string, integration: Integration, multiPlanetary: MultiPlanetary, - failureSubscribers: string + failureSubscribers: string, + networkKey: string = "ethereum" ) { this._ncgTransfer = ncgTransfer; this._slackMessageSender = slackMessageSender; @@ -65,6 +67,7 @@ export class EthereumBurnEventObserver this._integration = integration; this._multiPlanetary = multiPlanetary; this._failureSubscribers = failureSubscribers; + this._networkKey = networkKey; } async notify(data: { @@ -73,7 +76,7 @@ export class EthereumBurnEventObserver }): Promise { const { blockHash, events } = data; if (events.length === 0) { - await this._monitorStateStore.store("ethereum", { + await this._monitorStateStore.store(this._networkKey, { blockHash, txId: null, }); @@ -120,7 +123,7 @@ export class EthereumBurnEventObserver } await this._exchangeHistoryStore.put({ - network: "ethereum", + network: this._networkKey, tx_id: transactionHash, sender, recipient: user9cAddress, @@ -165,7 +168,7 @@ export class EthereumBurnEventObserver memo ); - await this._monitorStateStore.store("ethereum", { + await this._monitorStateStore.store(this._networkKey, { blockHash, txId: transactionHash, }); diff --git a/bridge/src/relayer.ts b/bridge/src/relayer.ts new file mode 100644 index 0000000..059aa33 --- /dev/null +++ b/bridge/src/relayer.ts @@ -0,0 +1,209 @@ +import { ethers } from "ethers"; +import { ContractDescription } from "./types/contract-description"; +import { IMonitorStateStore } from "./interfaces/monitor-state-store"; +import { IHeadlessGraphQLClient } from "./interfaces/headless-graphql-client"; +import { IObserver } from "./observers"; +import { BlockHash } from "./types/block-hash"; +import { EventData } from "web3-eth-contract"; +import { TransactionLocation } from "./types/transaction-location"; +import { NCGTransferredEvent } from "./types/ncg-transferred-event"; + +const CONFIRMATIONS = 10; +const ETH_LOGS_CHUNK_SIZE = 2000; +const BURN_EVENT_SIG = "Burn(address,bytes32,uint256)"; + +export interface EthereumRelayerDeps { + provider: ethers.providers.JsonRpcProvider; + contractDescription: ContractDescription; + monitorStateStore: IMonitorStateStore; + burnEventObserver: IObserver<{ + blockHash: BlockHash; + events: (EventData & TransactionLocation)[]; + }>; + networkKey: string; +} + +export interface NineChroniclesRelayerDeps { + headlessGraphQLClient: IHeadlessGraphQLClient; + monitorStateStore: IMonitorStateStore; + ncgTransferredEventObserver: IObserver<{ + blockHash: BlockHash; + events: (NCGTransferredEvent & TransactionLocation)[]; + }>; + address: string; +} + +function parseEthLogs( + contract: ethers.Contract, + logs: ethers.providers.Log[] +): (EventData & TransactionLocation)[] { + return logs.map((log) => { + const parsedEvent = contract.interface.parseLog(log); + return { + ...log, + ...parsedEvent, + txId: log.transactionHash, + returnValues: { + ...parsedEvent.args, + amount: ethers.BigNumber.from( + parsedEvent.args.amount + ).toString(), + }, + raw: { + data: log.data, + topics: log.topics, + }, + event: parsedEvent.name, + } as EventData & TransactionLocation; + }); +} + +async function fetchEthBurnEvents( + provider: ethers.providers.JsonRpcProvider, + contract: ethers.Contract, + contractAddress: string, + fromBlock: number, + toBlock: number +): Promise> { + const eventsByBlock = new Map(); + + for ( + let chunkFrom = fromBlock; + chunkFrom <= toBlock; + chunkFrom += ETH_LOGS_CHUNK_SIZE + ) { + const chunkTo = Math.min(chunkFrom + ETH_LOGS_CHUNK_SIZE - 1, toBlock); + const filter = { + address: contractAddress, + topics: [ethers.utils.id(BURN_EVENT_SIG)], + fromBlock: chunkFrom, + toBlock: chunkTo, + }; + const logs = await provider.getLogs(filter); + const parsed = parseEthLogs(contract, logs); + + for (const event of parsed) { + const blockNum = (event as any).blockNumber as number; + if (!eventsByBlock.has(blockNum)) { + eventsByBlock.set(blockNum, []); + } + eventsByBlock.get(blockNum)!.push(event); + } + } + + return eventsByBlock; +} + +export async function relayEthereum(deps: EthereumRelayerDeps): Promise { + const { + provider, + contractDescription, + monitorStateStore, + burnEventObserver, + networkKey, + } = deps; + + const contract = new ethers.Contract( + contractDescription.address, + contractDescription.abi, + provider + ); + + const tipBlock = await provider.getBlockNumber(); + const confirmedTip = tipBlock - CONFIRMATIONS; + + const lastState = await monitorStateStore.load(networkKey); + let fromBlock: number; + + if (lastState === null) { + fromBlock = confirmedTip; + } else { + const lastBlock = await provider.getBlock(lastState.blockHash); + fromBlock = lastBlock.number + 1; + } + + if (fromBlock > confirmedTip) { + console.log( + `[relayEthereum:${networkKey}] No new blocks to process (fromBlock=${fromBlock}, confirmedTip=${confirmedTip})` + ); + return; + } + + console.log( + `[relayEthereum:${networkKey}] Processing blocks ${fromBlock} to ${confirmedTip}` + ); + + const eventsByBlock = await fetchEthBurnEvents( + provider, + contract, + contractDescription.address, + fromBlock, + confirmedTip + ); + + for (let blockNum = fromBlock; blockNum <= confirmedTip; blockNum++) { + const block = await provider.getBlock(blockNum); + const blockHash = block.hash; + const events = eventsByBlock.get(blockNum) ?? []; + + await burnEventObserver.notify({ blockHash, events }); + } +} + +export async function relayBSC(deps: EthereumRelayerDeps): Promise { + return relayEthereum(deps); +} + +export async function relayNineChronicles( + deps: NineChroniclesRelayerDeps +): Promise { + const { + headlessGraphQLClient, + monitorStateStore, + ncgTransferredEventObserver, + address, + } = deps; + + const tipIndex = await headlessGraphQLClient.getTipIndex(); + const lastState = await monitorStateStore.load("nineChronicles"); + + let fromIndex: number; + if (lastState === null) { + fromIndex = tipIndex; + } else { + const lastBlockIndex = await headlessGraphQLClient.getBlockIndex( + lastState.blockHash + ); + fromIndex = lastBlockIndex + 1; + } + + if (fromIndex > tipIndex) { + console.log( + `[relayNineChronicles] No new blocks to process (fromIndex=${fromIndex}, tipIndex=${tipIndex})` + ); + return; + } + + console.log( + `[relayNineChronicles] Processing blocks ${fromIndex} to ${tipIndex}` + ); + + for (let blockIndex = fromIndex; blockIndex <= tipIndex; blockIndex++) { + const blockHash = await headlessGraphQLClient.getBlockHash(blockIndex); + const events = await headlessGraphQLClient.getNCGTransferredEvents( + blockHash, + address + ); + + const eventsWithLocation = events.map((event) => ({ + ...event, + blockHash, + txId: (event as any).txId ?? null, + })); + + await ncgTransferredEventObserver.notify({ + blockHash, + events: eventsWithLocation, + }); + } +} diff --git a/infra/bootstrap-dynamodb.sh b/infra/bootstrap-dynamodb.sh new file mode 100755 index 0000000..760b5b4 --- /dev/null +++ b/infra/bootstrap-dynamodb.sh @@ -0,0 +1,119 @@ +#!/bin/bash +# Creates bridge-monitor-state and bridge-exchange-histories DynamoDB tables, +# and sets up the EventBridge rule to trigger the Lambda every 5 minutes. +# +# Lambda timeout must be set to 5 minutes (300s) to match the schedule interval. +# EventBridge fires every 5 minutes β€” Lambda must finish before the next invocation. +# +# Usage: +# AWS_REGION=us-east-1 LAMBDA_FUNCTION_NAME=bridge-relayer bash bootstrap-dynamodb.sh + +set -e + +REGION="${AWS_REGION:-us-east-1}" +LAMBDA_FUNCTION_NAME="${LAMBDA_FUNCTION_NAME:-bridge-relayer}" +SCHEDULE_EXPRESSION="rate(5 minutes)" # Must match Lambda timeout (300s) + +echo "Creating DynamoDB tables in region: ${REGION}" + +# bridge-monitor-state table +echo "Creating bridge-monitor-state..." +aws dynamodb create-table \ + --region "$REGION" \ + --table-name bridge-monitor-state \ + --attribute-definitions AttributeName=network,AttributeType=S \ + --key-schema AttributeName=network,KeyType=HASH \ + --billing-mode PAY_PER_REQUEST + +echo "Waiting for bridge-monitor-state to become active..." +aws dynamodb wait table-exists \ + --region "$REGION" \ + --table-name bridge-monitor-state + +echo "bridge-monitor-state created." + +# bridge-exchange-histories table with GSI +echo "Creating bridge-exchange-histories..." +aws dynamodb create-table \ + --region "$REGION" \ + --table-name bridge-exchange-histories \ + --attribute-definitions \ + AttributeName=tx_id,AttributeType=S \ + AttributeName=network,AttributeType=S \ + AttributeName=timestamp,AttributeType=S \ + --key-schema AttributeName=tx_id,KeyType=HASH \ + --global-secondary-indexes '[ + { + "IndexName": "network-timestamp-index", + "KeySchema": [ + {"AttributeName": "network", "KeyType": "HASH"}, + {"AttributeName": "timestamp", "KeyType": "RANGE"} + ], + "Projection": {"ProjectionType": "ALL"} + } + ]' \ + --billing-mode PAY_PER_REQUEST + +echo "Waiting for bridge-exchange-histories to become active..." +aws dynamodb wait table-exists \ + --region "$REGION" \ + --table-name bridge-exchange-histories + +echo "bridge-exchange-histories created." + +echo "" +echo "All DynamoDB tables created successfully." + +# --- EventBridge rule: invoke Lambda every 5 minutes --- +echo "" +echo "Setting up EventBridge schedule (${SCHEDULE_EXPRESSION})..." + +RULE_NAME="bridge-relayer-schedule" + +RULE_ARN=$(aws events put-rule \ + --region "$REGION" \ + --name "$RULE_NAME" \ + --schedule-expression "$SCHEDULE_EXPRESSION" \ + --state ENABLED \ + --description "Triggers bridge-relayer Lambda every 5 minutes" \ + --query 'RuleArn' \ + --output text) + +echo "EventBridge rule created: ${RULE_ARN}" + +LAMBDA_ARN=$(aws lambda get-function \ + --region "$REGION" \ + --function-name "$LAMBDA_FUNCTION_NAME" \ + --query 'Configuration.FunctionArn' \ + --output text) + +# Allow EventBridge to invoke the Lambda +aws lambda add-permission \ + --region "$REGION" \ + --function-name "$LAMBDA_FUNCTION_NAME" \ + --statement-id "AllowEventBridgeInvoke" \ + --action "lambda:InvokeFunction" \ + --principal "events.amazonaws.com" \ + --source-arn "$RULE_ARN" \ + 2>/dev/null || echo " (permission already exists, skipping)" + +aws events put-targets \ + --region "$REGION" \ + --rule "$RULE_NAME" \ + --targets "Id=bridge-relayer-target,Arn=${LAMBDA_ARN}" + +echo "EventBridge rule wired to Lambda: ${LAMBDA_FUNCTION_NAME}" + +# --- Lambda timeout: enforce 300s (5 minutes) --- +echo "" +echo "Setting Lambda timeout to 300s..." +aws lambda update-function-configuration \ + --region "$REGION" \ + --function-name "$LAMBDA_FUNCTION_NAME" \ + --timeout 300 + +echo "" +echo "Bootstrap complete." +echo " Schedule : ${SCHEDULE_EXPRESSION}" +echo " Lambda : ${LAMBDA_FUNCTION_NAME} (timeout: 300s)" +echo " Tables : bridge-monitor-state, bridge-exchange-histories" diff --git a/infra/create-lambda.sh b/infra/create-lambda.sh new file mode 100755 index 0000000..26acd12 --- /dev/null +++ b/infra/create-lambda.sh @@ -0,0 +1,278 @@ +#!/bin/bash +# Creates the bridge-relayer Lambda function with all required configuration. +# Run this ONCE before the first deploy-lambda.sh. +# +# Prerequisites: +# - AWS CLI configured with permissions: +# lambda:CreateFunction, lambda:AddPermission, +# iam:CreateRole, iam:AttachRolePolicy, iam:PassRole +# - Copy this file's env var section and fill in actual values +# (refer to existing EC2 instance environment variables) +# +# Usage: +# Fill in the variables below, then: +# bash create-lambda.sh + +set -e + +REGION="${AWS_REGION:-us-east-1}" +FUNCTION_NAME="${LAMBDA_FUNCTION_NAME:-bridge-relayer}" +ROLE_NAME="bridge-relayer-role" + +# --------------------------------------------------------------------------- +# Environment variables for the Lambda function. +# Copy values from existing EC2 instances (via SSM or direct SSH). +# --------------------------------------------------------------------------- + +# --- Required: NineChronicles GraphQL --- +GRAPHQL_API_ENDPOINT="" # e.g. https://9c-main-full-state.nine-chronicles.com/graphql +STAGE_HEADLESSES="" # comma-separated fallback endpoints +JWT_SECRET_KEY="" + +# --- Required: KMS (ETH) --- +KMS_PROVIDER_URL="" # ETH RPC endpoint +KMS_PROVIDER_KEY_ID="" # AWS KMS key ID +KMS_PROVIDER_REGION="" # e.g. us-east-1 +KMS_PROVIDER_AWS_ACCESSKEY="" +KMS_PROVIDER_AWS_SECRETKEY="" +KMS_PROVIDER_PUBLIC_KEY="" # base64-encoded uncompressed public key + +# --- Required: KMS (BSC) --- +BSC_KMS_PROVIDER_URL="" # BSC RPC endpoint + +# --- Required: Contract Addresses --- +WNCG_CONTRACT_ADDRESS="" # ETH WNCG contract +BSC_WNCG_CONTRACT_ADDRESS="" # BSC WNCG contract +NCG_MINTER="" # 9c minter address +FEE_COLLECTOR_ADDRESS="" + +# --- Required: Multi-planetary --- +PLANET_ODIN_ID="" +PLANET_HEIMDALL_ID="" +ODIN_TO_HEIMDALL_VALUT_ADDRESS="" # typo matches original code + +# --- Required: Fee policy --- +MINIMUM_NCG="" # e.g. 100 +MAXIMUM_NCG="" # e.g. 100000 +MAXIMUM_WHITELIST_NCG="" # e.g. 1000000 +BASE_FEE="" # e.g. 0.01 +BASE_FEE_CRITERION="" # e.g. 1000 +FEE_RANGE_DIVIDER_AMOUNT="" # e.g. 10000 +FEE_RANGE1_RATIO="" # e.g. 0.01 +FEE_RANGE2_RATIO="" # e.g. 0.02 +PRIORITY_FEE="" # e.g. 2 +GAS_TIP_RATIO="" # e.g. 1.2 +MAX_GAS_PRICE="" # e.g. 500 (gwei) + +# --- Required: Slack --- +SLACK_WEB_TOKEN="" +SLACK_URL="" # webhook URL +SLACK_CHANNEL_NAME="" # e.g. #nine-chronicles-bridge-bot +FAILURE_SUBSCRIBERS="" # Slack user IDs to mention on failure + +# --- Required: OpenSearch --- +OPENSEARCH_ENDPOINT="" +OPENSEARCH_AUTH="" # user:password +OPENSEARCH_INDEX="" # e.g. 9c-eth-bridge + +# --- Required: Block explorers --- +EXPLORER_ROOT_URL="" # 9c explorer +ETHERSCAN_ROOT_URL="" # etherscan URL prefix +NCSCAN_URL="" # optional +USE_NCSCAN_URL="false" + +# --- Required: Google Sheets --- +USE_GOOGLE_SPREAD_SHEET="false" +GOOGLE_SPREADSHEET_URL="" +GOOGLE_SPREADSHEET_ID="" +GOOGLE_CLIENT_EMAIL="" +GOOGLE_CLIENT_PRIVATE_KEY="" # JSON private key (escape newlines as \n) +SHEET_MINT="" +SHEET_BURN="" + +# --- Required: PagerDuty --- +PAGERDUTY_ROUTING_KEY="" + +# --- Optional: Safe multisig (set to false unless using Safe) --- +USE_SAFE_WRAPPED_NCG_MINTER="false" +SAFE_ADDRESS="" +SAFE_TX_SERVICE_URL="" + +# --------------------------------------------------------------------------- +# Validation: check required fields +# --------------------------------------------------------------------------- +REQUIRED_VARS=( + GRAPHQL_API_ENDPOINT STAGE_HEADLESSES JWT_SECRET_KEY + KMS_PROVIDER_URL KMS_PROVIDER_KEY_ID KMS_PROVIDER_REGION + KMS_PROVIDER_AWS_ACCESSKEY KMS_PROVIDER_AWS_SECRETKEY KMS_PROVIDER_PUBLIC_KEY + BSC_KMS_PROVIDER_URL + WNCG_CONTRACT_ADDRESS BSC_WNCG_CONTRACT_ADDRESS NCG_MINTER FEE_COLLECTOR_ADDRESS + PLANET_ODIN_ID PLANET_HEIMDALL_ID ODIN_TO_HEIMDALL_VALUT_ADDRESS + MINIMUM_NCG MAXIMUM_NCG MAXIMUM_WHITELIST_NCG + BASE_FEE BASE_FEE_CRITERION FEE_RANGE_DIVIDER_AMOUNT + FEE_RANGE1_RATIO FEE_RANGE2_RATIO PRIORITY_FEE GAS_TIP_RATIO MAX_GAS_PRICE + SLACK_WEB_TOKEN SLACK_URL SLACK_CHANNEL_NAME FAILURE_SUBSCRIBERS + OPENSEARCH_ENDPOINT OPENSEARCH_AUTH OPENSEARCH_INDEX + EXPLORER_ROOT_URL ETHERSCAN_ROOT_URL + PAGERDUTY_ROUTING_KEY +) + +echo "=== Validating required variables ===" +MISSING=0 +for VAR in "${REQUIRED_VARS[@]}"; do + VAL="${!VAR}" + if [ -z "$VAL" ]; then + echo " MISSING: $VAR" + MISSING=1 + fi +done +if [ "$MISSING" = "1" ]; then + echo "" + echo "Fill in the missing variables in this script and re-run." + exit 1 +fi +echo " All required variables set." + +# --------------------------------------------------------------------------- +# Step 1: Create IAM role +# --------------------------------------------------------------------------- +echo "" +echo "[1/3] Creating IAM role: ${ROLE_NAME}..." + +TRUST_POLICY='{ + "Version": "2012-10-17", + "Statement": [{ + "Effect": "Allow", + "Principal": {"Service": "lambda.amazonaws.com"}, + "Action": "sts:AssumeRole" + }] +}' + +ROLE_ARN=$(aws iam create-role \ + --role-name "${ROLE_NAME}" \ + --assume-role-policy-document "${TRUST_POLICY}" \ + --query 'Role.Arn' \ + --output text 2>/dev/null \ + || aws iam get-role --role-name "${ROLE_NAME}" --query 'Role.Arn' --output text) + +echo " Role ARN: ${ROLE_ARN}" + +# Attach managed policies +for POLICY in \ + "arn:aws:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole" \ + "arn:aws:iam::aws:policy/AmazonDynamoDBFullAccess"; do + aws iam attach-role-policy \ + --role-name "${ROLE_NAME}" \ + --policy-arn "${POLICY}" \ + 2>/dev/null && echo " Attached: ${POLICY}" \ + || echo " Already attached: ${POLICY}" +done + +# KMS decrypt permission (inline policy) +ACCOUNT_ID=$(aws sts get-caller-identity --query Account --output text) +aws iam put-role-policy \ + --role-name "${ROLE_NAME}" \ + --policy-name "bridge-relayer-kms" \ + --policy-document "{ + \"Version\": \"2012-10-17\", + \"Statement\": [{ + \"Effect\": \"Allow\", + \"Action\": [\"kms:Sign\",\"kms:GetPublicKey\",\"kms:DescribeKey\"], + \"Resource\": \"arn:aws:kms:${KMS_PROVIDER_REGION}:${ACCOUNT_ID}:key/${KMS_PROVIDER_KEY_ID}\" + }] + }" +echo " KMS policy attached." + +echo " Waiting 10s for IAM role propagation..." +sleep 10 + +# --------------------------------------------------------------------------- +# Step 2: Build environment variables JSON for Lambda +# --------------------------------------------------------------------------- +echo "" +echo "[2/3] Creating Lambda function: ${FUNCTION_NAME}..." + +# Build the --environment Variables=... string +ENV_VARS="Variables={" +ENV_VARS+="GRAPHQL_API_ENDPOINT=${GRAPHQL_API_ENDPOINT}," +ENV_VARS+="STAGE_HEADLESSES=${STAGE_HEADLESSES}," +ENV_VARS+="JWT_SECRET_KEY=${JWT_SECRET_KEY}," +ENV_VARS+="KMS_PROVIDER_URL=${KMS_PROVIDER_URL}," +ENV_VARS+="KMS_PROVIDER_KEY_ID=${KMS_PROVIDER_KEY_ID}," +ENV_VARS+="KMS_PROVIDER_REGION=${KMS_PROVIDER_REGION}," +ENV_VARS+="KMS_PROVIDER_AWS_ACCESSKEY=${KMS_PROVIDER_AWS_ACCESSKEY}," +ENV_VARS+="KMS_PROVIDER_AWS_SECRETKEY=${KMS_PROVIDER_AWS_SECRETKEY}," +ENV_VARS+="KMS_PROVIDER_PUBLIC_KEY=${KMS_PROVIDER_PUBLIC_KEY}," +ENV_VARS+="BSC_KMS_PROVIDER_URL=${BSC_KMS_PROVIDER_URL}," +ENV_VARS+="WNCG_CONTRACT_ADDRESS=${WNCG_CONTRACT_ADDRESS}," +ENV_VARS+="BSC_WNCG_CONTRACT_ADDRESS=${BSC_WNCG_CONTRACT_ADDRESS}," +ENV_VARS+="NCG_MINTER=${NCG_MINTER}," +ENV_VARS+="FEE_COLLECTOR_ADDRESS=${FEE_COLLECTOR_ADDRESS}," +ENV_VARS+="PLANET_ODIN_ID=${PLANET_ODIN_ID}," +ENV_VARS+="PLANET_HEIMDALL_ID=${PLANET_HEIMDALL_ID}," +ENV_VARS+="ODIN_TO_HEIMDALL_VALUT_ADDRESS=${ODIN_TO_HEIMDALL_VALUT_ADDRESS}," +ENV_VARS+="MINIMUM_NCG=${MINIMUM_NCG}," +ENV_VARS+="MAXIMUM_NCG=${MAXIMUM_NCG}," +ENV_VARS+="MAXIMUM_WHITELIST_NCG=${MAXIMUM_WHITELIST_NCG}," +ENV_VARS+="BASE_FEE=${BASE_FEE}," +ENV_VARS+="BASE_FEE_CRITERION=${BASE_FEE_CRITERION}," +ENV_VARS+="FEE_RANGE_DIVIDER_AMOUNT=${FEE_RANGE_DIVIDER_AMOUNT}," +ENV_VARS+="FEE_RANGE1_RATIO=${FEE_RANGE1_RATIO}," +ENV_VARS+="FEE_RANGE2_RATIO=${FEE_RANGE2_RATIO}," +ENV_VARS+="PRIORITY_FEE=${PRIORITY_FEE}," +ENV_VARS+="GAS_TIP_RATIO=${GAS_TIP_RATIO}," +ENV_VARS+="MAX_GAS_PRICE=${MAX_GAS_PRICE}," +ENV_VARS+="SLACK_WEB_TOKEN=${SLACK_WEB_TOKEN}," +ENV_VARS+="SLACK_URL=${SLACK_URL}," +ENV_VARS+="SLACK_CHANNEL_NAME=${SLACK_CHANNEL_NAME}," +ENV_VARS+="FAILURE_SUBSCRIBERS=${FAILURE_SUBSCRIBERS}," +ENV_VARS+="OPENSEARCH_ENDPOINT=${OPENSEARCH_ENDPOINT}," +ENV_VARS+="OPENSEARCH_AUTH=${OPENSEARCH_AUTH}," +ENV_VARS+="OPENSEARCH_INDEX=${OPENSEARCH_INDEX}," +ENV_VARS+="EXPLORER_ROOT_URL=${EXPLORER_ROOT_URL}," +ENV_VARS+="ETHERSCAN_ROOT_URL=${ETHERSCAN_ROOT_URL}," +ENV_VARS+="NCSCAN_URL=${NCSCAN_URL}," +ENV_VARS+="USE_NCSCAN_URL=${USE_NCSCAN_URL}," +ENV_VARS+="USE_GOOGLE_SPREAD_SHEET=${USE_GOOGLE_SPREAD_SHEET}," +ENV_VARS+="GOOGLE_SPREADSHEET_URL=${GOOGLE_SPREADSHEET_URL}," +ENV_VARS+="GOOGLE_SPREADSHEET_ID=${GOOGLE_SPREADSHEET_ID}," +ENV_VARS+="GOOGLE_CLIENT_EMAIL=${GOOGLE_CLIENT_EMAIL}," +ENV_VARS+="GOOGLE_CLIENT_PRIVATE_KEY=${GOOGLE_CLIENT_PRIVATE_KEY}," +ENV_VARS+="SHEET_MINT=${SHEET_MINT}," +ENV_VARS+="SHEET_BURN=${SHEET_BURN}," +ENV_VARS+="PAGERDUTY_ROUTING_KEY=${PAGERDUTY_ROUTING_KEY}," +ENV_VARS+="USE_SAFE_WRAPPED_NCG_MINTER=${USE_SAFE_WRAPPED_NCG_MINTER}" +ENV_VARS+="}" + +# Create a minimal placeholder zip (will be overwritten by deploy-lambda.sh) +echo "exports.handler = async () => 'placeholder';" > /tmp/placeholder.js +cd /tmp && zip placeholder.zip placeholder.js > /dev/null + +aws lambda create-function \ + --region "${REGION}" \ + --function-name "${FUNCTION_NAME}" \ + --runtime nodejs20.x \ + --role "${ROLE_ARN}" \ + --handler "src/lambda.handler" \ + --zip-file "fileb:///tmp/placeholder.zip" \ + --timeout 300 \ + --memory-size 512 \ + --environment "${ENV_VARS}" \ + --description "NineChronicles ETH+BSC bridge relayer (serverless)" \ + --query 'FunctionArn' \ + --output text | xargs -I{} echo " Created: {}" + +rm /tmp/placeholder.js /tmp/placeholder.zip + +# --------------------------------------------------------------------------- +# Step 3: Print next steps +# --------------------------------------------------------------------------- +echo "" +echo "[3/3] Done." +echo "" +echo "Next steps:" +echo " 1. Run bootstrap: bash bootstrap-dynamodb.sh" +echo " 2. Run deploy: ETH_RPC_URL=... BSC_RPC_URL=... GRAPHQL_API_ENDPOINT=... bash deploy-lambda.sh" +echo " 3. Verify: aws lambda invoke --function-name ${FUNCTION_NAME} --region ${REGION} /tmp/out.json && cat /tmp/out.json" +echo " 4. E2E test: cd ../bridge && pnpm e2e:live" +echo " 5. Terminate EC2 instances after confirming stable operation." diff --git a/infra/deploy-lambda.sh b/infra/deploy-lambda.sh new file mode 100755 index 0000000..513de7e --- /dev/null +++ b/infra/deploy-lambda.sh @@ -0,0 +1,236 @@ +#!/bin/bash +# Manual local deploy script for bridge-relayer Lambda. +# Run this instead of using CI/CD to reduce supply chain attack surface. +# +# Prerequisites: +# - AWS CLI configured with sufficient permissions +# - pnpm installed +# - zip installed +# +# Usage: +# AWS_REGION=us-east-1 \ +# LAMBDA_FUNCTION_NAME=bridge-relayer \ +# ETH_RPC_URL=https://... \ +# BSC_RPC_URL=https://... \ +# GRAPHQL_API_ENDPOINT=https://... \ +# bash deploy-lambda.sh +# +# To skip DynamoDB checkpoint initialization (e.g. already initialized): +# SKIP_CHECKPOINT_INIT=true bash deploy-lambda.sh + +set -e + +REGION="${AWS_REGION:-us-east-1}" +LAMBDA_FUNCTION_NAME="${LAMBDA_FUNCTION_NAME:-bridge-relayer}" +SKIP_CHECKPOINT_INIT="${SKIP_CHECKPOINT_INIT:-false}" + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +BRIDGE_DIR="${SCRIPT_DIR}/../bridge" +BUILD_DIR="${BRIDGE_DIR}/dist" +PACKAGE_DIR="/tmp/lambda-package-$$" +ZIP_PATH="/tmp/bridge-relayer-$$.zip" + +echo "=== Bridge Relayer Lambda Deploy ===" +echo " Region : ${REGION}" +echo " Function : ${LAMBDA_FUNCTION_NAME}" +echo " Skip checkpoint init: ${SKIP_CHECKPOINT_INIT}" +echo "" + +# --- Step 1: Build TypeScript --- +echo "[1/4] Building TypeScript..." +cd "${BRIDGE_DIR}" +pnpm install --frozen-lockfile +pnpm build +echo " Build complete: ${BUILD_DIR}" + +# --- Step 2: Package Lambda zip --- +echo "" +echo "[2/4] Packaging Lambda zip..." +rm -rf "${PACKAGE_DIR}" +mkdir -p "${PACKAGE_DIR}" + +# Copy compiled output +cp -r "${BUILD_DIR}/." "${PACKAGE_DIR}/" + +# Copy production node_modules (exclude devDependencies) +cp -r "${BRIDGE_DIR}/node_modules" "${PACKAGE_DIR}/node_modules" + +# Remove dev-only packages to reduce zip size (best-effort) +for pkg in \ + "@babel" \ + "jest" \ + "jest-cli" \ + "jest-config" \ + "babel-jest" \ + "prettier" \ + "eslint" \ + "husky" \ + "lint-staged" \ + "ts-node" \ + "typescript" \ + "nyc" \ + "npm-run-all" \ + "web3-core-promievent" \ + "@typescript-eslint"; do + rm -rf "${PACKAGE_DIR}/node_modules/${pkg}" 2>/dev/null || true +done + +# Create zip +rm -f "${ZIP_PATH}" +cd "${PACKAGE_DIR}" +zip -r "${ZIP_PATH}" . -x "*.map" -x "**/*.d.ts" > /dev/null +ZIP_SIZE=$(du -sh "${ZIP_PATH}" | cut -f1) +echo " Package size: ${ZIP_SIZE} β†’ ${ZIP_PATH}" + +# --- Step 3: Deploy to Lambda --- +echo "" +echo "[3/4] Deploying to Lambda..." + +# Check if function exists +if aws lambda get-function \ + --region "${REGION}" \ + --function-name "${LAMBDA_FUNCTION_NAME}" \ + --query 'Configuration.FunctionName' \ + --output text 2>/dev/null | grep -q "${LAMBDA_FUNCTION_NAME}"; then + + echo " Updating existing function..." + aws lambda update-function-code \ + --region "${REGION}" \ + --function-name "${LAMBDA_FUNCTION_NAME}" \ + --zip-file "fileb://${ZIP_PATH}" \ + --query 'CodeSize' \ + --output text | xargs -I{} echo " Deployed: {} bytes" + + # Wait for update to complete + echo " Waiting for update to complete..." + aws lambda wait function-updated \ + --region "${REGION}" \ + --function-name "${LAMBDA_FUNCTION_NAME}" + echo " Function updated successfully." +else + echo " ERROR: Lambda function '${LAMBDA_FUNCTION_NAME}' not found in region ${REGION}." + echo " Create the function first, then re-run this script." + echo " (Tip: set up the function via the AWS console or terraform before deploying code.)" + exit 1 +fi + +# --- Step 4: Initialize DynamoDB checkpoints --- +echo "" +echo "[4/4] Initializing DynamoDB checkpoints..." + +if [ "${SKIP_CHECKPOINT_INIT}" = "true" ]; then + echo " Skipped (SKIP_CHECKPOINT_INIT=true)" +else + # ETH checkpoint + if [ -z "${ETH_RPC_URL}" ]; then + echo " WARNING: ETH_RPC_URL not set, skipping ETH checkpoint" + else + echo " Querying ETH tip block..." + ETH_TIP_HEX=$(curl -s -X POST "${ETH_RPC_URL}" \ + -H 'Content-Type: application/json' \ + -d '{"jsonrpc":"2.0","method":"eth_blockNumber","params":[],"id":1}' \ + | python3 -c "import sys,json; print(json.load(sys.stdin)['result'])" 2>/dev/null || echo "") + + if [ -n "${ETH_TIP_HEX}" ]; then + ETH_TIP=$(python3 -c "print(int('${ETH_TIP_HEX}', 16))" 2>/dev/null || echo "") + # Get the actual block hash for the tip block + ETH_BLOCK_HASH=$(curl -s -X POST "${ETH_RPC_URL}" \ + -H 'Content-Type: application/json' \ + -d "{\"jsonrpc\":\"2.0\",\"method\":\"eth_getBlockByNumber\",\"params\":[\"${ETH_TIP_HEX}\",false],\"id\":1}" \ + | python3 -c "import sys,json; print(json.load(sys.stdin)['result']['hash'])" 2>/dev/null || echo "") + + if [ -n "${ETH_BLOCK_HASH}" ] && [ -n "${ETH_TIP}" ]; then + echo " ETH tip: block ${ETH_TIP} (${ETH_BLOCK_HASH})" + aws dynamodb put-item \ + --region "${REGION}" \ + --table-name bridge-monitor-state \ + --item "{\"network\":{\"S\":\"ethereum\"},\"blockHash\":{\"S\":\"${ETH_BLOCK_HASH}\"}}" \ + --condition-expression "attribute_not_exists(network)" \ + 2>/dev/null && echo " ETH checkpoint written." \ + || echo " ETH checkpoint already exists, skipped." + else + echo " WARNING: Could not get ETH block hash, skipping ETH checkpoint" + fi + else + echo " WARNING: Could not query ETH tip, skipping ETH checkpoint" + fi + fi + + # BSC checkpoint + if [ -z "${BSC_RPC_URL}" ]; then + echo " WARNING: BSC_RPC_URL not set, skipping BSC checkpoint" + else + echo " Querying BSC tip block..." + BSC_TIP_HEX=$(curl -s -X POST "${BSC_RPC_URL}" \ + -H 'Content-Type: application/json' \ + -d '{"jsonrpc":"2.0","method":"eth_blockNumber","params":[],"id":1}' \ + | python3 -c "import sys,json; print(json.load(sys.stdin)['result'])" 2>/dev/null || echo "") + + if [ -n "${BSC_TIP_HEX}" ]; then + BSC_TIP=$(python3 -c "print(int('${BSC_TIP_HEX}', 16))" 2>/dev/null || echo "") + BSC_BLOCK_HASH=$(curl -s -X POST "${BSC_RPC_URL}" \ + -H 'Content-Type: application/json' \ + -d "{\"jsonrpc\":\"2.0\",\"method\":\"eth_getBlockByNumber\",\"params\":[\"${BSC_TIP_HEX}\",false],\"id\":1}" \ + | python3 -c "import sys,json; print(json.load(sys.stdin)['result']['hash'])" 2>/dev/null || echo "") + + if [ -n "${BSC_BLOCK_HASH}" ] && [ -n "${BSC_TIP}" ]; then + echo " BSC tip: block ${BSC_TIP} (${BSC_BLOCK_HASH})" + aws dynamodb put-item \ + --region "${REGION}" \ + --table-name bridge-monitor-state \ + --item "{\"network\":{\"S\":\"bsc\"},\"blockHash\":{\"S\":\"${BSC_BLOCK_HASH}\"}}" \ + --condition-expression "attribute_not_exists(network)" \ + 2>/dev/null && echo " BSC checkpoint written." \ + || echo " BSC checkpoint already exists, skipped." + else + echo " WARNING: Could not get BSC block hash, skipping BSC checkpoint" + fi + else + echo " WARNING: Could not query BSC tip, skipping BSC checkpoint" + fi + fi + + # NineChronicles checkpoint + if [ -z "${GRAPHQL_API_ENDPOINT}" ]; then + echo " WARNING: GRAPHQL_API_ENDPOINT not set, skipping 9c checkpoint" + else + echo " Querying NineChronicles tip block..." + NC_RESPONSE=$(curl -s -X POST "${GRAPHQL_API_ENDPOINT}" \ + -H 'Content-Type: application/json' \ + -d '{"query":"{ nodeStatus { tip { hash index } } }"}' 2>/dev/null || echo "") + + NC_BLOCK_HASH=$(echo "${NC_RESPONSE}" | python3 -c \ + "import sys,json; d=json.load(sys.stdin); print(d['data']['nodeStatus']['tip']['hash'])" \ + 2>/dev/null || echo "") + NC_TIP=$(echo "${NC_RESPONSE}" | python3 -c \ + "import sys,json; d=json.load(sys.stdin); print(d['data']['nodeStatus']['tip']['index'])" \ + 2>/dev/null || echo "") + + if [ -n "${NC_BLOCK_HASH}" ] && [ -n "${NC_TIP}" ]; then + echo " 9c tip: block ${NC_TIP} (${NC_BLOCK_HASH})" + aws dynamodb put-item \ + --region "${REGION}" \ + --table-name bridge-monitor-state \ + --item "{\"network\":{\"S\":\"nineChronicles\"},\"blockHash\":{\"S\":\"${NC_BLOCK_HASH}\"}}" \ + --condition-expression "attribute_not_exists(network)" \ + 2>/dev/null && echo " 9c checkpoint written." \ + || echo " 9c checkpoint already exists, skipped." + else + echo " WARNING: Could not query 9c tip, skipping 9c checkpoint" + fi + fi +fi + +# --- Cleanup --- +rm -rf "${PACKAGE_DIR}" +rm -f "${ZIP_PATH}" + +echo "" +echo "=== Deploy complete ===" +echo " Function : ${LAMBDA_FUNCTION_NAME}" +echo " Region : ${REGION}" +echo "" +echo "Next steps:" +echo " 1. Verify the Lambda runs clean: aws lambda invoke --function-name ${LAMBDA_FUNCTION_NAME} --region ${REGION} /tmp/lambda-out.json && cat /tmp/lambda-out.json" +echo " 2. Run E2E test locally: cd bridge && pnpm e2e:live" +echo " 3. Once confirmed stable, terminate the old EC2 instances."