diff --git a/.env.example b/.env.example index 97bade47..e6e3c5d0 100644 --- a/.env.example +++ b/.env.example @@ -12,13 +12,13 @@ CUSTOM_CHAINS_CONFIG_PATH=# optional DEFAULT_USER_OPS_BATCH_GAS_LIMIT=# default: 8_000_000 DEFAULT_NUM_SIMULATOR_WORKERS_PER_CHAIN=# default: 1 DEFAULT_SIMULATOR_WORKER_CONCURRENCY=# default: 10 -DEFAULT_SIMULATOR_TRACE_CALL_RETRY_DELAY=# default: 2 +DEFAULT_SIMULATOR_TRACE_CALL_RETRY_DELAY=# default: 75 milliseconds -DEFAULT_SIMULATOR_STALLED_JOBS_RETRY_INTERVAL=# default: 5 +DEFAULT_SIMULATOR_STALLED_JOBS_RETRY_INTERVAL=# default: 1 second DEFAULT_SIMULATOR_RATE_LIMIT_MAX_REQUESTS_PER_INTERVAL=# default: 100 DEFAULT_SIMULATOR_RATE_LIMIT_DURATION=# default: 1 -DEFAULT_EXECUTOR_STALLED_JOBS_RETRY_INTERVAL=# default: 5 +DEFAULT_EXECUTOR_STALLED_JOBS_RETRY_INTERVAL=# default: 1 second DEFAULT_EXECUTOR_RATE_LIMIT_MAX_REQUESTS_PER_INTERVAL=# default: 100 DEFAULT_EXECUTOR_RATE_LIMIT_DURATION=# default: 1 @@ -34,7 +34,7 @@ REDIS_PORT=# default: 6379 ## executor EXECUTOR_QUEUE_JOB_ATTEMPTS=# default: 3 EXECUTOR_QUEUE_JOB_BACKOFF_TYPE=# default: fixed, options: fixed|exponential -EXECUTOR_QUEUE_JOB_BACKOFF_DELAY=# default: 1 (second) +EXECUTOR_QUEUE_JOB_BACKOFF_DELAY=# default: 75 milliseconds ## health check INITIAL_HEALTH_CHECK_DELAY=# default: 1 @@ -54,7 +54,7 @@ ORACLE_PRICE_FEED_TTL=# default: 60 (seconds) ## simulator SIMULATOR_QUEUE_JOB_ATTEMPTS=# default: 10 SIMULATOR_QUEUE_JOB_BACKOFF_TYPE=# default: fixed, options: fixed|exponential -SIMULATOR_QUEUE_JOB_BACKOFF_DELAY=# default: 1 (seconds) +SIMULATOR_QUEUE_JOB_BACKOFF_DELAY=# default: 75 milliseconds ## workers NUM_CLUSTER_WORKERS=# default: 1 diff --git a/src/api/bootstrap.ts b/src/api/bootstrap.ts index 153673e8..05b5e952 100644 --- a/src/api/bootstrap.ts +++ b/src/api/bootstrap.ts @@ -2,6 +2,7 @@ import process from "node:process"; import { ChainsService } from "@/chains"; import { parsePort } from "@/common"; import { Logger } from "@/core/logger"; +import { GasManagerService } from "@/gas-manager"; import { HealthCheckService } from "@/health-check"; import { NodeService } from "@/node"; import { RpcManagerService } from "@/rpc-manager"; @@ -25,6 +26,7 @@ export async function bootstrap() { const healthCheckService = Container.get(HealthCheckService); const nodeService = Container.get(NodeService); const rpcManagerService = Container.get(RpcManagerService); + const gasManagerService = Container.get(GasManagerService); await nodeService.readVersion(); @@ -76,6 +78,11 @@ export async function bootstrap() { ); break; } + + case "gasInfoSync": { + gasManagerService.syncGasInfo(data.chainId, data.gasInfo); + break; + } } }); } diff --git a/src/master/bootstrap.ts b/src/master/bootstrap.ts index aba9d0a2..401fb991 100644 --- a/src/master/bootstrap.ts +++ b/src/master/bootstrap.ts @@ -2,6 +2,7 @@ import { BatcherService } from "@/batcher"; import { ChainsService } from "@/chains"; import { Logger } from "@/core/logger"; import { ExecutorService } from "@/executor"; +import { GasManagerService } from "@/gas-manager"; import { HealthCheckService } from "@/health-check"; import { NodeService } from "@/node"; import { RpcChainConfig, RpcManagerService } from "@/rpc-manager"; @@ -16,12 +17,16 @@ export async function bootstrap() { const logger = Container.get(Logger); const batcherService = Container.get(BatcherService); const chainsService = Container.get(ChainsService); + const gasManagerService = Container.get(GasManagerService); const healthCheckService = Container.get(HealthCheckService); const nodeService = Container.get(NodeService); const workersService = Container.get(WorkersService); const rpcManagerService = Container.get(RpcManagerService); + // Chain settings will be initialized await chainsService.initialize(); + + // Workers will be spawned await workersService.initialize(); const chainsSettings = chainsService.getChainsSettings(); @@ -98,9 +103,52 @@ export async function bootstrap() { }, ); }) + // RPC providers will be initialized .setup(rpcConfigs, true); + const gasManagerConfigs = chainsSettings.map((chainSettings) => ({ + chainId: chainSettings.chainId, + gasFetchInterval: chainSettings.gasCacheDuration, + })); + + const threadGasInfoSync = true; + + // Gas manager will be initialized + await gasManagerService.initialize(gasManagerConfigs, threadGasInfoSync); + + gasManagerService.on("sync", (gasInfoByChain) => { + workersService.notifyClusterWorkers({ + type: "gasInfoSync", + data: gasInfoByChain, + }); + + workersService.notifyThreadWorkers( + { + type: "gasInfoSync", + data: gasInfoByChain, + }, + { + chainId: gasInfoByChain.chainId, + workerType: "simulator", + }, + ); + + workersService.notifyThreadWorkers( + { + type: "gasInfoSync", + data: gasInfoByChain, + }, + { + chainId: gasInfoByChain.chainId, + workerType: "executor", + }, + ); + }); + + // Batcher will be initialized and this can be initialzed anytime after chain service. await batcherService.initialize(); + + // node service will be initialized which depends on RPC manager. await nodeService.initialize(); await healthCheckService @@ -153,5 +201,6 @@ export async function bootstrap() { ); } }) + // Finally the health service is initialized and workers will be activated for execution processing .initialize(); } diff --git a/src/modules/batcher/batcher.service.ts b/src/modules/batcher/batcher.service.ts index 6284d07c..ee39c00a 100644 --- a/src/modules/batcher/batcher.service.ts +++ b/src/modules/batcher/batcher.service.ts @@ -1,4 +1,5 @@ import { ChainsService } from "@/chains"; +import { withTrace } from "@/common"; import { type ConfigType, InjectConfig } from "@/core/config"; import { Logger } from "@/core/logger"; import { ExecutorService } from "@/executor"; @@ -68,100 +69,107 @@ export class BatcherService { maxBatchGasLimitCap: bigint, ) { try { - const { gasLimitOverrides } = - this.chainsService.getChainSettings(chainId); - - const executorJobCount = await this.executorService.getJobCounts(chainId); - - // UserOp blocks are still waiting to be executed. So wait for new block creation which will result in - // more userOps being batched. This is a smart batching feature - if (executorJobCount.waiting) { - return; - } - - const simulatorCompleteJobs = - await this.simulatorService.getCompletedJobs(chainId); - - // Simulated jobs will be fetched from completed simulator queue - if (!simulatorCompleteJobs.length) { - return; - } - - const batches: MeeUserOpBatch[] = []; - - let batch: MeeUserOpBatch = { - batchGasLimit: 0n, - meeUserOps: [], - }; - - for (const { data: simulatedJobData } of simulatorCompleteJobs) { - const { meeUserOp } = simulatedJobData; - const { meeUserOpHash } = meeUserOp; - const unpackedUserOp = unpackPackedUserOp(meeUserOp.userOp); - - const paymasterVerificationGasLimit = - gasLimitOverrides?.paymasterVerificationGasLimit || - BigInt(this.gasEstimatorConfiguration.paymasterVerificationGas); - - const maxGasLimit = // add all limits except preVerificationGas (that one is not spent during execution) - unpackedUserOp.verificationGasLimit + - unpackedUserOp.callGasLimit + - paymasterVerificationGasLimit + - BigInt(this.gasEstimatorConfiguration.paymasterPostOpGas); - - // If a userOp's gas limit is more than batch gas limit cap. There is something wrong in - // the gas limit itself. This userOp will be skipped from being executed - if (maxGasLimit > maxBatchGasLimitCap) { - this.logger.error( - { - meeUserOpHash, - maxGasLimit, - maxBatchGasLimitCap, - }, - "Invalid maxGasLimit is being used", - ); - - // Update error status in background to improve latency - this.storageService.setUserOpCustomField( - meeUserOpHash, - "error", - "Invalid maxGasLimit", - ); - - continue; - } + await withTrace( + "batcher.userOpBatching", + async () => { + const { gasLimitOverrides } = + this.chainsService.getChainSettings(chainId); + + const executorJobCount = + await this.executorService.getJobCounts(chainId); + + // UserOp blocks are still waiting to be executed. So wait for new block creation which will result in + // more userOps being batched. This is a smart batching feature + if (executorJobCount.waiting) { + return; + } + + const simulatorCompleteJobs = + await this.simulatorService.getCompletedJobs(chainId); + + // Simulated jobs will be fetched from completed simulator queue + if (!simulatorCompleteJobs.length) { + return; + } + + const batches: MeeUserOpBatch[] = []; + + let batch: MeeUserOpBatch = { + batchGasLimit: 0n, + meeUserOps: [], + }; + + for (const { data: simulatedJobData } of simulatorCompleteJobs) { + const { meeUserOp } = simulatedJobData; + const { meeUserOpHash } = meeUserOp; + const unpackedUserOp = unpackPackedUserOp(meeUserOp.userOp); + + const paymasterVerificationGasLimit = + gasLimitOverrides?.paymasterVerificationGasLimit || + BigInt(this.gasEstimatorConfiguration.paymasterVerificationGas); + + const maxGasLimit = // add all limits except preVerificationGas (that one is not spent during execution) + unpackedUserOp.verificationGasLimit + + unpackedUserOp.callGasLimit + + paymasterVerificationGasLimit + + BigInt(this.gasEstimatorConfiguration.paymasterPostOpGas); + + // If a userOp's gas limit is more than batch gas limit cap. There is something wrong in + // the gas limit itself. This userOp will be skipped from being executed + if (maxGasLimit > maxBatchGasLimitCap) { + this.logger.error( + { + meeUserOpHash, + maxGasLimit, + maxBatchGasLimitCap, + }, + "Invalid maxGasLimit is being used", + ); + + // Update error status in background to improve latency + this.storageService.setUserOpCustomField( + meeUserOpHash, + "error", + "Invalid maxGasLimit", + ); + + continue; + } + + // If a userOp doesn't fit into the block, the existing block will be finalized as full block + // and the userOp will be assigned for a new batch + if (batch.batchGasLimit + maxGasLimit > maxBatchGasLimitCap) { + batches.push(batch); + batch = { batchGasLimit: 0n, meeUserOps: [] }; + } + + batch.meeUserOps.push(meeUserOp); + batch.batchGasLimit += maxGasLimit; + } + + if (batch.meeUserOps.length > 0) { + batches.push(batch); + } + + for (const batch of batches) { + this.logger.info( + { + from: "batcher", + chainId, + meeUserOpHashes: batch.meeUserOps.map( + (meeUserOp) => meeUserOp.meeUserOpHash, + ), + }, + `Generated a block of ${batch.meeUserOps.length} meeUserOp(s) for chain (${chainId}) with blockGasLimit (${batch.batchGasLimit})`, + ); + } - // If a userOp doesn't fit into the block, the existing block will be finalized as full block - // and the userOp will be assigned for a new batch - if (batch.batchGasLimit + maxGasLimit > maxBatchGasLimitCap) { - batches.push(batch); - batch = { batchGasLimit: 0n, meeUserOps: [] }; - } + await this.executorService.addJobs(chainId, batches); - batch.meeUserOps.push(meeUserOp); - batch.batchGasLimit += maxGasLimit; - } - - if (batch.meeUserOps.length > 0) { - batches.push(batch); - } - - for (const batch of batches) { - this.logger.info( - { - from: "batcher", - chainId, - meeUserOpHashes: batch.meeUserOps.map( - (meeUserOp) => meeUserOp.meeUserOpHash, - ), - }, - `Generated a block of ${batch.meeUserOps.length} meeUserOp(s) for chain (${chainId}) with blockGasLimit (${batch.batchGasLimit})`, - ); - } - - await this.executorService.addJobs(chainId, batches); - - await Promise.all(simulatorCompleteJobs.map((job) => job.remove())); + await Promise.all(simulatorCompleteJobs.map((job) => job.remove())); + }, + { chainId }, + )(); } catch (error) { this.logger.error(error); } diff --git a/src/modules/chains/constants.ts b/src/modules/chains/constants.ts index 10731eb4..20166464 100644 --- a/src/modules/chains/constants.ts +++ b/src/modules/chains/constants.ts @@ -240,10 +240,10 @@ export const CHAIN_CONFIG_DEFAULTS = { simulator: { stalledJobsRetryInterval: parseSeconds( process.env.DEFAULT_SIMULATOR_STALLED_JOBS_RETRY_INTERVAL, - 5, + 1, { - min: 5, - max: 30, + min: 1, + max: 10, }, ), rateLimitMaxRequestsPerInterval: parseNum( @@ -276,21 +276,21 @@ export const CHAIN_CONFIG_DEFAULTS = { min: 1, }, ), - traceCallRetryDelay: parseSeconds( + traceCallRetryDelay: parseNum( process.env.DEFAULT_SIMULATOR_TRACE_CALL_RETRY_DELAY, - 2, + 75, { - min: 1, + min: 75, }, ), }, executor: { stalledJobsRetryInterval: parseSeconds( process.env.DEFAULT_EXECUTOR_STALLED_JOBS_RETRY_INTERVAL, - 5, + 1, { - min: 5, - max: 30, + min: 1, + max: 10, }, ), rateLimitMaxRequestsPerInterval: parseNum( diff --git a/src/modules/core/redis/redis.service.ts b/src/modules/core/redis/redis.service.ts index eaf4fc5f..f172b351 100644 --- a/src/modules/core/redis/redis.service.ts +++ b/src/modules/core/redis/redis.service.ts @@ -85,6 +85,8 @@ export class RedisService ...this.config, ...redisOptions, lazyConnect: false, + // Automatic pipelining for the redis calls which improves performance by reducing the roundtrip + enableAutoPipelining: true, }); client.client("SETNAME", name).catch((err) => this.logger.error(err)); diff --git a/src/modules/executor/executor.config.ts b/src/modules/executor/executor.config.ts index a8a25324..6d8aaf2f 100644 --- a/src/modules/executor/executor.config.ts +++ b/src/modules/executor/executor.config.ts @@ -1,5 +1,5 @@ import process from "node:process"; -import { parseNum, parseSeconds } from "@/common"; +import { parseNum } from "@/common"; import { registerConfigAs } from "@/core/config"; import { type JobOptions, @@ -28,8 +28,8 @@ export const executorConfig = registerConfigAs<{ process.env.EXECUTOR_QUEUE_JOB_BACKOFF_TYPE, "fixed", ), - delay: parseSeconds(process.env.EXECUTOR_QUEUE_JOB_BACKOFF_DELAY, 1, { - min: 1, + delay: parseNum(process.env.EXECUTOR_QUEUE_JOB_BACKOFF_DELAY, 75, { + min: 75, }), }, }, diff --git a/src/modules/executor/executor.processor.ts b/src/modules/executor/executor.processor.ts index 1892e84c..a1c26a04 100644 --- a/src/modules/executor/executor.processor.ts +++ b/src/modules/executor/executor.processor.ts @@ -1,5 +1,6 @@ import { ChainsService } from "@/chains"; import { + BadRequestException, SomethingWentWrongException, sanitizeUrl, unixTimestamp, @@ -12,6 +13,7 @@ import { EntryPointService } from "@/entry-point"; import { GasConditions } from "@/gas-estimator"; import { GasEstimatorServiceV2 } from "@/gas-estimator/gas-estimator-v2.service"; import { NODE_ACCOUNT_TOKEN, type NodeAccount } from "@/node"; +import { NonceManagerService } from "@/nonce-manager"; import { RpcManagerService } from "@/rpc-manager"; import { trackSimulationTransactionData } from "@/simulator/utils"; import { StorageService } from "@/storage"; @@ -31,7 +33,6 @@ import { stringify, toHex, } from "viem"; -import { base } from "viem/chains"; import { BLOCK_GAS_LIMIT_EXCEEDS_ERROR_MESSAGES, GAS_LIMIT_ERROR_MESSAGES, @@ -67,6 +68,7 @@ export class ExecutorProcessor implements Processor { private readonly userOpService: UserOpService, private readonly entryPointService: EntryPointService, private readonly rpcManagerService: RpcManagerService, + private readonly nonceManagerService: NonceManagerService, ) { logger.setCaller(ExecutorProcessor); } @@ -178,66 +180,75 @@ export class ExecutorProcessor implements Processor { const authHashMap = new Map(); const meeUserOpHashes: Hash[] = []; - const now = Date.now(); - - const userOpDuplicationFilterResult = await Promise.all( - meeUserOps.map(async (meeUserOp) => { - const { meeUserOpHash, maxGasLimit } = meeUserOp; - - // Check for duplicate userOp execution - const isNonDuplicateMeeUserOp = await withTrace( - "executorPhase.isNonDuplicateMeeUserOp", - async () => - await this.storageService.setUserOpCustomField( - meeUserOpHash, - "batchHash", - batchHash, - ), - { - chainId, - meeUserOpHash, - }, - )(); - - if (isNonDuplicateMeeUserOp) { - return { isDuplicate: false, meeUserOp, gasLimit: 0n }; - } - - this.logger.trace( - { - chainId, - batchHash, - eoaWorkerAddress: this.nodeAccount.address, - meeUserOpHash, - }, - "Duplicate userOp execution is identified. Removing the userop from block", - ); - - return { - isDuplicate: true, - meeUserOp: null, - gasLimit: maxGasLimit, - }; - }), - ); - - const nonDuplicateUserOps: SignedPackedMeeUserOp[] = []; - let gasLimitToReduce = 0n; - - for (const { - meeUserOp, - gasLimit, - isDuplicate, - } of userOpDuplicationFilterResult) { - if (isDuplicate) { - gasLimitToReduce += gasLimit; - } else if (meeUserOp) { - nonDuplicateUserOps.push(meeUserOp); - } - } - - meeUserOps = nonDuplicateUserOps; - batchGasLimit -= gasLimitToReduce; + const initialMeeUserOpsLength = meeUserOps.length; + + // Ignoreing the meeUserOp duplication check optimisitically to reduce latency here. + // If incase the userOp duplication happens here ? Either they might be executed for the first time where there is no problem. + // If it is being executed second time ? The entire block will be reverted and this duplicate userOp will be considered as faulty userOp + // Faulty userOps will be removed from the block and remaining userOps will be executed again. So it is not a big deal to disable this now + // IMPORTANT NOTE: the code is being commented and kept here to re-enable if something bad happens with any worst case scenarios + + // const userOpDuplicationFilterResult = await Promise.all( + // meeUserOps.map(async (meeUserOp) => { + // const { meeUserOpHash, maxGasLimit } = meeUserOp; + + // // Check for duplicate userOp execution + // const isNonDuplicateMeeUserOp = await withTrace( + // "executorPhase.isNonDuplicateMeeUserOp", + // async () => + // await this.storageService.setUserOpCustomField( + // meeUserOpHash, + // "batchHash", + // batchHash, + // ), + // { + // chainId, + // meeUserOpHash, + // }, + // )(); + + // if (isNonDuplicateMeeUserOp) { + // return { isDuplicate: false, meeUserOp, gasLimit: 0n }; + // } + + // this.logger.trace( + // { + // chainId, + // batchHash, + // eoaWorkerAddress: this.nodeAccount.address, + // meeUserOpHash, + // }, + // "Duplicate userOp execution is identified. Removing the userop from block", + // ); + + // return { + // isDuplicate: true, + // meeUserOp: null, + // gasLimit: maxGasLimit, + // }; + // }), + // ); + + // const nonDuplicateUserOps: SignedPackedMeeUserOp[] = []; + // let gasLimitToReduce = 0n; + + // for (const { + // meeUserOp, + // gasLimit, + // isDuplicate, + // } of userOpDuplicationFilterResult) { + // if (isDuplicate) { + // gasLimitToReduce += gasLimit; + // } else if (meeUserOp) { + // nonDuplicateUserOps.push(meeUserOp); + // } + // } + + // // initialMeeUserOpsLength !== nonDuplicateUserOps.length => true ? It means there are duplicate userOps and it will be removed + // if (initialMeeUserOpsLength !== nonDuplicateUserOps.length) { + // meeUserOps = nonDuplicateUserOps; + // batchGasLimit -= gasLimitToReduce; + // } // Validate the userOp execution window. If it is expired, the expired userOps will be marked as failed const { validMeeUserOps, expiredMeeUserOps } = @@ -245,19 +256,32 @@ export class ExecutorProcessor implements Processor { const currentTime = Date.now(); - // First before removing the userOp from job, mark the expired userOp as failed - await Promise.all( - expiredMeeUserOps.map((expiredMeeUserOp) => { - return this.storageService.updateUserOpCustomFields( - expiredMeeUserOp.meeUserOpHash, - { - error: "Execution deadline limit exceeded", - executionStartedAt: currentTime, - executionFinishedAt: currentTime, - }, - ); - }), - ); + if (expiredMeeUserOps.length > 0) { + // Optimistically marking the expired userOp as failed in background to improve latency + Promise.all( + expiredMeeUserOps.map((expiredMeeUserOp) => { + return this.storageService.updateUserOpCustomFields( + expiredMeeUserOp.meeUserOpHash, + { + error: "Execution deadline limit exceeded", + executionStartedAt: currentTime, + executionFinishedAt: currentTime, + }, + ); + }), + ).catch((error) => { + this.logger.error({ + chainId, + batchHash, + expiredMeeUserOpHashes: expiredMeeUserOps.map( + (meeUserOp) => meeUserOp.meeUserOpHash, + ), + eoaWorkerAddress: this.nodeAccount.address, + executionAttempts, + error, + }); + }); + } // New block gas limit with validMeeUserOps const newBlockGasLimit = validMeeUserOps.reduce( @@ -265,26 +289,30 @@ export class ExecutorProcessor implements Processor { 0n, ); + // This is unexpired valid userOps and non duplicates meeUserOps = validMeeUserOps; batchGasLimit = newBlockGasLimit; - await job.updateData({ - batchGasLimit, - meeUserOps, - }); + // initialMeeUserOpsLength !== meeUserOps.length => true ? It means there are either duplicate or expired userOps and it will be removed + if (initialMeeUserOpsLength !== meeUserOps.length) { + await job.updateData({ + batchGasLimit, + meeUserOps, + }); - if (!meeUserOps.length) { - this.logger.trace( - { - chainId, - batchHash, - eoaWorkerAddress: this.nodeAccount.address, - }, - "No userop remaining to execute. The entire block is full of duplicate userOps", - ); + if (!meeUserOps.length) { + this.logger.trace( + { + chainId, + batchHash, + eoaWorkerAddress: this.nodeAccount.address, + }, + "No userop remaining to execute. The entire block is full of duplicate userOps", + ); - // Returning a value resolves the processor and block will be marked as executed by default - return false; + // Returning a value resolves the processor and block will be marked as executed by default + return false; + } } for (const { eip7702Auth, meeUserOpHash } of meeUserOps) { @@ -312,7 +340,7 @@ export class ExecutorProcessor implements Processor { this.storageService.createUserOpCustomField( meeUserOpHash, "executionStartedAt", - now, + currentTime, ); meeUserOpHashes.push(meeUserOpHash); @@ -332,12 +360,25 @@ export class ExecutorProcessor implements Processor { ); const [feeData, nonce] = await Promise.all([ - this.gasEstimatorService.getCurrentGasConditions(chainId), - this.rpcManagerService.executeRequest(chainId, (chainClient) => { - return chainClient.getTransactionCount({ - address: this.nodeAccount.address, - }); - }), + withTrace( + "executorPhase.getCurrentGasConditions", + () => this.gasEstimatorService.getCurrentGasConditions(chainId), + { + chainId, + }, + )(), + withTrace( + "executorPhase.getNonce", + () => + this.nonceManagerService.getNonce( + this.nodeAccount.address, + chainId, + ), + { + chainId, + workerAddress: this.nodeAccount.address, + }, + )(), ]); let executionOptions = { @@ -396,12 +437,25 @@ export class ExecutorProcessor implements Processor { USER_OP_EXECUTION_ERRORS.TRANSACTION_RECEIPT_TIMEOUT_ERROR && txHash ) { - // Add the current tx hash into the job - await job.updateData({ - meeUserOps, - batchGasLimit, - previousTxHash: txHash, - }); + await Promise.all([ + // If there is a transaction receipt timeout error, it means the transaction is either dropped or pending in the mempool. + // For nonce manager, there is a chance where futuristic nonce might be used for transaction and it will be always pending due to + // nonce sequential dependency. eg: current nonce is 1 but we sent the tx with nonce 2 + // In this case the tx will be stuck without errors. If this worst case happens, it will usually endup as tx receipt timeout error + // So to handle this future nonce case, we forcefully fetch the latest nonce from RPC which will automatically sync the local nonce in cache + // for the RPC manager for this worker. To know more about this edge case, refer the nonce manager code description at the top + this.nonceManagerService.getNonce( + this.nodeAccount.address, + chainId, + true, // force fetch nonce to refresh the cache and fetch latest proper nonce + ), + // Add the current tx hash into the job + job.updateData({ + meeUserOps, + batchGasLimit, + previousTxHash: txHash, + }), + ]); // If it is a retriable error, throwing an error will trigger a queue retry throw new SomethingWentWrongException( @@ -506,6 +560,7 @@ export class ExecutorProcessor implements Processor { if ( error instanceof UnrecoverableError || error instanceof SomethingWentWrongException || + error instanceof BadRequestException || error instanceof Error ) { errorMessage = error.message; @@ -754,6 +809,16 @@ export class ExecutorProcessor implements Processor { { chainId, batchHash }, )(); + if (transactionReceipt) { + // If the transaction receipt is fetched. It guaruntees that the transaction has been mined irrespective of whether it is successful or reverted on chain. + // So we mark the nonce as used and increase the current nonce to sync the cache in nonce manager to have a fresh nonce for next executions for this worker + this.nonceManagerService.markNonceAsUsed( + this.nodeAccount.address, + chainId, + nonce, + ); + } + // This will skip the quick fast block status update if (transactionReceipt?.status === "reverted") { throw new SomethingWentWrongException( @@ -814,6 +879,16 @@ export class ExecutorProcessor implements Processor { }, { chainId, batchHash }, )(); + + if (transactionReceipt) { + // If the transaction receipt is fetched. It guaruntees that the transaction has been mined irrespective of whether it is successful or reverted on chain. + // So we mark the nonce as used and increase the current nonce to sync the cache in nonce manager to have a fresh nonce for next executions for this worker + this.nonceManagerService.markNonceAsUsed( + this.nodeAccount.address, + chainId, + nonce, + ); + } } catch (error) { if (error instanceof Error) { const err = error as Error; @@ -897,6 +972,7 @@ export class ExecutorProcessor implements Processor { if ( error instanceof Error || error instanceof SomethingWentWrongException || + error instanceof BadRequestException || error instanceof ContractFunctionExecutionError || error instanceof ContractFunctionRevertedError ) { @@ -1047,6 +1123,20 @@ export class ExecutorProcessor implements Processor { } } + if (!isConfirmed) { + this.logger.info( + { + chainId, + batchHash, + eoaWorkerAddress: this.nodeAccount.address, + meeUserOpHashes: meeUserOps.map( + (meeUserOp) => meeUserOp.meeUserOpHash, + ), + }, + "Fast block transaction confirmation handled", + ); + } + // Check if all the userOps are processed and updated in storage. If not, something bad happened, // so we mark them as failed to execute to avoid pending state in all the extreme worst case scenario's for (const { meeUserOpHash } of meeUserOps) { @@ -1138,7 +1228,7 @@ export class ExecutorProcessor implements Processor { if (errorMessage.includes(configuredErrorMessage.toLowerCase())) { return { isRetriableError: false, - errorType: USER_OP_EXECUTION_ERRORS.PRIORITY_FEE_TOO_HIGH, + errorType: USER_OP_EXECUTION_ERRORS.BLOCK_GAS_LIMIT_EXCEEDS_ERROR, }; } } @@ -1208,23 +1298,53 @@ export class ExecutorProcessor implements Processor { switch (errorType) { case USER_OP_EXECUTION_ERRORS.GAS_TOO_LOW: case USER_OP_EXECUTION_ERRORS.MAX_FEE_TOO_LOW: + case USER_OP_EXECUTION_ERRORS.PRIORITY_FEE_TOO_HIGH: case USER_OP_EXECUTION_ERRORS.NONCE_EXPIRED: { - const nonce = await this.rpcManagerService.executeRequest( - chainId, - (chainClient) => { - return chainClient.getTransactionCount({ - address: this.nodeAccount.address, - }); + // If it is a nonce error, we ignore cache and forcefully fetch the new nonce from RPC call + // This handles errors such as nonce already known, nonce too low and etc... + const forceFetchNonce = + errorType === USER_OP_EXECUTION_ERRORS.NONCE_EXPIRED; + + const nonce = await withTrace( + "executorPhase.getNonceOnRetry", + async () => + await this.nonceManagerService.getNonce( + this.nodeAccount.address, + chainId, + forceFetchNonce, + ), + { + chainId, + workerAddress: this.nodeAccount.address, + forceFetchNonce, }, - ); + )(); + + // If it is a gas related error, we ignore cache and forcefully fetch the new new gas info from RPC call + const forceFetchGasConditions = + errorType === USER_OP_EXECUTION_ERRORS.GAS_TOO_LOW || + errorType === USER_OP_EXECUTION_ERRORS.PRIORITY_FEE_TOO_HIGH || + errorType === USER_OP_EXECUTION_ERRORS.MAX_FEE_TOO_LOW; + + const currentGasConditions = await withTrace( + "executorPhase.getCurrentGasConditionsOnRetry", + async () => + await this.gasEstimatorService.getCurrentGasConditions( + chainId, + forceFetchGasConditions, + ), + { + chainId, + forceFetchGasConditions, + }, + )(); return { maxFeePerGas: - (options.executeOptions.maxFeePerGas * - (100n + options.percentage)) / + (currentGasConditions.maxFeePerGas * (100n + options.percentage)) / 100n, maxPriorityFeePerGas: - (options.executeOptions.maxPriorityFeePerGas * + (currentGasConditions.maxPriorityFeePerGas * (100n + options.percentage)) / 100n, nonce, @@ -1253,15 +1373,16 @@ export class ExecutorProcessor implements Processor { // If all the userOps are faulty ? New block will not be created if (validMeeUserOps.length > 0) { - // Remove the valid userOps from duplication record for the current block, so that it will be executed in a different block - await Promise.all( - validMeeUserOps.map(({ meeUserOpHash }) => - this.storageService.unsetUserOpCustomField( - meeUserOpHash, - "batchHash", - ), - ), - ); + // UserOp duplication checks are disabled optimistically for latency improvements + // // Remove the valid userOps from duplication record for the current block, so that it will be executed in a different block + // await Promise.all( + // validMeeUserOps.map(({ meeUserOpHash }) => + // this.storageService.unsetUserOpCustomField( + // meeUserOpHash, + // "batchHash", + // ), + // ), + // ); // If there is a block gas limit error, the userOps will be separated into individual blocks and it gets executed. if (splitUserOpsIntoSeparateBlocks) { diff --git a/src/modules/gas-estimator/gas-estimator-v2.service.test.ts b/src/modules/gas-estimator/gas-estimator-v2.service.test.ts index 0cea76a6..873ce3ac 100644 --- a/src/modules/gas-estimator/gas-estimator-v2.service.test.ts +++ b/src/modules/gas-estimator/gas-estimator-v2.service.test.ts @@ -2,9 +2,10 @@ import type { ChainsService } from "@/chains"; import { setupEnvs } from "@/common/setup"; import { ContractsService } from "@/contracts"; import type { Logger } from "@/core/logger"; +import { EncoderAndDecoderService } from "@/encoder-and-decoder"; +import { GasManagerService } from "@/gas-manager"; import { NodeService } from "@/node"; import { RpcManagerService } from "@/rpc-manager"; -import { StorageService } from "@/storage"; import { http, createPublicClient, formatGwei } from "viem"; import { afterEach, beforeEach, describe, expect, it, vi } from "vitest"; import { GasEstimatorServiceV2 } from "./gas-estimator-v2.service"; @@ -25,12 +26,14 @@ describe("GasEstimatorServiceV2", () => { }; let mockContractsService: Record; let mockNodeService: Record; + let mockEncoderService: Record; let mockRpcManagerService: { executeRequest: ReturnType; }; - let mockStorageService: { - setCache: ReturnType; - getCache: ReturnType; + let mockGasManagerService: { + getLatestGasPrice: ReturnType; + getLatestFeeHistory: ReturnType; + getLatestMaxPriorityFee: ReturnType; }; let mockLogger: { trace: ReturnType; @@ -214,6 +217,7 @@ describe("GasEstimatorServiceV2", () => { mockContractsService = {}; mockNodeService = {}; + mockEncoderService = {}; mockRpcManagerService = { executeRequest: vi .fn() @@ -224,9 +228,17 @@ describe("GasEstimatorServiceV2", () => { }); }), }; - mockStorageService = { - setCache: vi.fn(), - getCache: vi.fn(), + mockGasManagerService = { + getLatestGasPrice: vi.fn(), + getLatestFeeHistory: vi + .fn() + .mockImplementation((chainId: string, fn: unknown) => { + // Return an object containing baseFeePerGas for test purposes + return Promise.resolve({ + baseFeePerGas: [15000000000n, 16000000000n], + }); + }), + getLatestMaxPriorityFee: vi.fn(), }; mockLogger = { trace: vi.fn(), @@ -249,8 +261,9 @@ describe("GasEstimatorServiceV2", () => { mockChainsService as unknown as ChainsService, mockContractsService as unknown as ContractsService, mockNodeService as unknown as NodeService, - mockStorageService as unknown as StorageService, mockRpcManagerService as unknown as RpcManagerService, + mockEncoderService as unknown as EncoderAndDecoderService, + mockGasManagerService as unknown as GasManagerService, ); }); @@ -382,8 +395,9 @@ describe("GasEstimatorServiceV2", () => { realChainsService as unknown as ChainsService, mockContractsService as unknown as ContractsService, mockNodeService as unknown as NodeService, - mockStorageService as unknown as StorageService, mockRpcManagerService as unknown as RpcManagerService, + mockEncoderService as unknown as EncoderAndDecoderService, + mockGasManagerService as unknown as GasManagerService, ); // Test 1: Get gas conditions with forceRefresh=true, fetchL1GasPrice=false @@ -391,7 +405,6 @@ describe("GasEstimatorServiceV2", () => { await realGasEstimatorService.getCurrentGasConditions( chain.chainId, true, // forceRefresh - true, // addBuffer false, // fetchL1GasPrice ); @@ -402,7 +415,6 @@ describe("GasEstimatorServiceV2", () => { await realGasEstimatorService.getCurrentGasConditions( chain.chainId, true, // forceRefresh - true, // addBuffer true, // fetchL1GasPrice ); } catch (l1Error) { diff --git a/src/modules/gas-estimator/gas-estimator-v2.service.ts b/src/modules/gas-estimator/gas-estimator-v2.service.ts index 304c645b..1bb785e7 100644 --- a/src/modules/gas-estimator/gas-estimator-v2.service.ts +++ b/src/modules/gas-estimator/gas-estimator-v2.service.ts @@ -2,14 +2,29 @@ import { ChainSettings, ChainsService } from "@/chains"; import { BadRequestException, withTrace } from "@/common"; import { ContractsService } from "@/contracts"; import { Logger } from "@/core/logger"; +import { EncoderAndDecoderService } from "@/encoder-and-decoder"; +import { GasManagerService } from "@/gas-manager"; import { NodeService } from "@/node"; +import type { QuoteType } from "@/quotes"; import { RpcManagerService } from "@/rpc-manager"; -import { StorageService } from "@/storage"; +import { type GrantPermissionResponse } from "@biconomy/abstractjs/dist/_types/modules/validators/smartSessions/decorators/grantPermission"; import { Inject, Service } from "typedi"; -import { Hex } from "viem"; +import type { Address, Hex } from "viem"; import { publicActionsL2 } from "viem/op-stack"; -import { ARBITRUM_ORACLE_ADDRESS } from "./constants"; -import { GasConditions } from "./interfaces"; +import { + APPROXIMATE_SMART_SESSION_BATCH_CALLDATA_DECODING_GAS_LIMIT, + APPROXIMATE_SMART_SESSION_SINGLE_CALLDATA_DECODING_GAS_LIMIT, + ARBITRUM_ORACLE_ADDRESS, + FIXED_SMART_SESSIONS_COLD_GET_POLICY_GAS_LIMIT, + FIXED_SMART_SESSIONS_POLICY_COLD_ACCESS_GAS_ESTIMATION_LOOKUP_TABLE, + FIXED_SMART_SESSIONS_POLICY_WARM_ACCESS_GAS_ESTIMATION_LOOKUP_TABLE, + FIXED_SMART_SESSIONS_USE_MODE_GAS_LIMIT, + FIXED_SMART_SESSIONS_WARM_GET_POLICY_GAS_LIMIT, + FIXED_SMART_SESSION_CHECK_ACTION_GAS_LIMIT, + FIXED_SMART_SESSION_COLD_CHECK_ACTION_POLICY_GAS_LIMIT, + FIXED_SMART_SESSION_WARM_CHECK_ACTION_POLICY_GAS_LIMIT, +} from "./constants"; +import { GasConditions, GasEstimationLookupKey } from "./interfaces"; @Service() export class GasEstimatorServiceV2 { @@ -19,38 +34,18 @@ export class GasEstimatorServiceV2 { private readonly chainsService: ChainsService, private readonly contractsService: ContractsService, private readonly nodeService: NodeService, - private readonly storageService: StorageService, private readonly rpcManagerService: RpcManagerService, + private readonly encoderAndDecoderService: EncoderAndDecoderService, + private readonly gasManagerService: GasManagerService, ) {} async getCurrentGasConditions( chainId: string, forceRefresh = false, - addBuffer = true, // keep the same interface as the old service fetchL1GasPrice = false, ): Promise { const chainSettings = this.chainsService.getChainSettings(chainId); try { - // Cache duration is originally configured as milliseconds and we are converting that into seconds for redis cache here - const cacheDuration = chainSettings.gasCacheDuration / 1000; - const cacheKey = `gas-conditions-${chainSettings.name}`; - - // If it is force refresh ? the cache will be always null - const cached = !forceRefresh - ? await withTrace( - "gasEstimator.getCachedGasConditions", - async () => - await this.storageService.getCache<{ conditions: GasConditions }>( - cacheKey, - ), - { cacheKey }, - )() - : null; - - if (cached) { - return cached.conditions; - } - let conditions: GasConditions; if (!chainSettings.eip1559) { @@ -58,11 +53,9 @@ export class GasEstimatorServiceV2 { const gasPrice = await withTrace( "gasEstimator.legacyGasPrice", async () => - await this.rpcManagerService.executeRequest( - chainSettings.chainId, - (chainClient) => { - return chainClient.getGasPrice(); - }, + await this.gasManagerService.getLatestGasPrice( + chainId, + forceRefresh, ), { chainId, @@ -77,15 +70,13 @@ export class GasEstimatorServiceV2 { }; } else { // EIP-1559 chain - use fee history for prediction - const rewardPercentiles = [50]; // 50th percentile for "standard" mode - const [feeHistory, rpcMaxPriorityFee] = await withTrace( "gasEstimator.eip1559GasPrice", async () => await Promise.all([ - this.getFeeHistory(chainId, rewardPercentiles), + this.getFeeHistory(chainId, forceRefresh), // Fetch max priority fee using eth_maxPriorityFeePerGas RPC call - this.getMaxPriorityFee(chainId), + this.getMaxPriorityFee(chainId, forceRefresh), ]), { chainId, @@ -151,7 +142,6 @@ export class GasEstimatorServiceV2 { const fetchedL1GasPrice = await this.getCurrentGasConditions( chainSettings.l1ChainId, false, - false, ); this.logger.trace({ fetchedL1GasPrice }, "L1 gas price"); @@ -169,19 +159,6 @@ export class GasEstimatorServiceV2 { }; } - await withTrace( - "gasEstimator.setCachedGasConditions", - async () => - await this.storageService.setCache<{ conditions: GasConditions }>( - cacheKey, - { - conditions, - }, - { ttl: cacheDuration }, - ), - { cacheKey }, - )(); - this.logger.trace( { conditions }, `Current gas conditions on chain ${chainSettings.name}`, @@ -308,47 +285,194 @@ export class GasEstimatorServiceV2 { } } - async getMaxPriorityFee(chainId: string): Promise { + async getMaxPriorityFee( + chainId: string, + forceRefresh = false, + ): Promise { // Fetch max priority fee using eth_maxPriorityFeePerGas RPC call let rpcMaxPriorityFee = 0n; try { - const rpcResponse = await withTrace( + rpcMaxPriorityFee = await withTrace( "gasEstimator.maxPriorityFeePerGas", async () => - await this.rpcManagerService.executeRequest( + await this.gasManagerService.getLatestMaxPriorityFee( chainId, - (chainClient) => { - return chainClient.request({ - method: "eth_maxPriorityFeePerGas", - }); - }, + forceRefresh, ), { chainId, }, )(); - rpcMaxPriorityFee = (BigInt(rpcResponse as string) * 110n) / 100n; // 10% buffer } catch (rpcError) { - this.logger.warn(rpcError, "Failed to fetch eth_maxPriorityFeePerGas:"); + this.logger.warn({ rpcError }, "Failed to fetch maxPriorityFee"); rpcMaxPriorityFee = 0n; // Fallback to 0 gwei } return rpcMaxPriorityFee; } - async getFeeHistory(chainId: string, rewardPercentiles: number[]) { + async getFeeHistory(chainId: string, forceRefresh = false) { return await withTrace( "gasEstimator.getFeeHistory", async () => - await this.rpcManagerService.executeRequest(chainId, (chainClient) => { - return chainClient.getFeeHistory({ - blockCount: 10, // looking at last 10 blocks - blockTag: "pending", - rewardPercentiles, - }); - }), + await this.gasManagerService.getLatestFeeHistory(chainId, forceRefresh), { chainId }, )(); } + + getGasEstimationLookupKey(options: { + quoteType: QuoteType; + isPermitRequired?: boolean; + isRedeemDelegationRequired?: boolean; + isSafeExecutionRequired?: boolean; + }): GasEstimationLookupKey { + const { + quoteType, + isPermitRequired, + isRedeemDelegationRequired, + isSafeExecutionRequired, + } = options; + + const permitKey: "active" | "unactive" = isPermitRequired + ? "active" + : "unactive"; + + const delegationKey: "delegation" | "non-delegation" = + isRedeemDelegationRequired ? "delegation" : "non-delegation"; + + const safeExecutionKey: "safe-execution" | "non-safe-execution" = + isSafeExecutionRequired ? "safe-execution" : "non-safe-execution"; + + switch (quoteType) { + case "simple": + return `${quoteType}-mode`; + case "permit": + return `${permitKey}-${quoteType}-mode`; + case "onchain": + return `${quoteType}-mode`; + case "mm-dtk": + return `${delegationKey}-${quoteType}-mode`; + case "safe-sa": + return `${safeExecutionKey}-${quoteType}-mode`; + default: + throw new BadRequestException("Failed to estimate gas"); + } + } + + // Gas estimation algorithm to calculate the vgl for Smart sessions use mode + getSmartSessionUseModeVerificationGasLimit( + userOpCalldata: Hex, + sessionDetails: GrantPermissionResponse, + ) { + try { + // This fixed gas limit will be always there no matter different execution mode or number of actions or number of action policies + let verificationGasLimit = FIXED_SMART_SESSIONS_USE_MODE_GAS_LIMIT; + + let decodedCalldataArray: { + target: Address; + value: bigint; + calldata: Hex; + }[] = []; + + try { + decodedCalldataArray = + this.encoderAndDecoderService.decodeERC7579Calldata(userOpCalldata); + } catch { + throw new BadRequestException( + "Failed to decode userop calldata to estimate gas for Smart Sessions use mode", + ); + } + + if (decodedCalldataArray.length === 0) { + throw new BadRequestException( + "Failed to estimate gas for Smart Sessions use mode", + ); + } + + const isBatch = decodedCalldataArray.length > 1; + + // Based on execution calldata (batch vs single), the approximate decoding calldata gas limit will be added + verificationGasLimit += isBatch + ? APPROXIMATE_SMART_SESSION_BATCH_CALLDATA_DECODING_GAS_LIMIT + : APPROXIMATE_SMART_SESSION_SINGLE_CALLDATA_DECODING_GAS_LIMIT; + + const hasActionAlreadySeen = new Set(); + + for (const decodedCalldata of decodedCalldataArray) { + // Contract address where permission was enabled via policy + const actionTarget = decodedCalldata.target.toLowerCase(); + // Function selector where permission was enabled via policy + const actionTargetSelector = decodedCalldata.calldata + .slice(0, 10) + .toLowerCase(); + + // Fetch the action to calculate gas limits + const [action] = + sessionDetails.enableSessionData.enableSession.sessionToEnable.actions.filter( + (action) => { + return ( + action.actionTarget.toLowerCase() === actionTarget && + action.actionTargetSelector.toLowerCase() === + actionTargetSelector + ); + }, + ); + + // If the action doesn't exists in sessionDetails, skip it. Reverts will be handled by simulation and no need to worry about this case here + if (action) { + const actionKey = `${actionTarget}::${actionTargetSelector}`; + + // Sometime the gas is being low drastically due to warm access. So we track this flag to toggle between cold vs warm storage access + const isColdAccess = !hasActionAlreadySeen.has(actionKey); + + // Fixed gas limits added for check action flow + verificationGasLimit += FIXED_SMART_SESSION_CHECK_ACTION_GAS_LIMIT; + + // Fixed gas limits added for check action policy flow + verificationGasLimit += isColdAccess + ? FIXED_SMART_SESSION_COLD_CHECK_ACTION_POLICY_GAS_LIMIT + : FIXED_SMART_SESSION_WARM_CHECK_ACTION_POLICY_GAS_LIMIT; + + const actionPolicyLength = BigInt(action.actionPolicies.length); + + // Fixed get policy gas limit will be multiplied by number of policies per action + verificationGasLimit += + (isColdAccess + ? FIXED_SMART_SESSIONS_COLD_GET_POLICY_GAS_LIMIT + : FIXED_SMART_SESSIONS_WARM_GET_POLICY_GAS_LIMIT) * + actionPolicyLength; + + for (const { policy } of action.actionPolicies) { + const policyGasLimit = isColdAccess + ? FIXED_SMART_SESSIONS_POLICY_COLD_ACCESS_GAS_ESTIMATION_LOOKUP_TABLE[ + policy.toLowerCase() + ] + : FIXED_SMART_SESSIONS_POLICY_WARM_ACCESS_GAS_ESTIMATION_LOOKUP_TABLE[ + policy.toLowerCase() + ]; + + // If there is no policy match ? default 20k gas will be used and this is approximate safe value + // This is to handle unknown policies being used in the smart sessions flow if any + const defaultPolicyGasLimit = isColdAccess ? 20_000n : 10_000n; + + verificationGasLimit += policyGasLimit || defaultPolicyGasLimit; + } + + // Mark the action as already seen to reduce gas limit which has warm storage access + hasActionAlreadySeen.add(actionKey); + } + } + + return verificationGasLimit; + } catch (error) { + this.logger.error( + { error }, + "Failed to estimate gas for Smart Sessions use mode", + ); + throw new BadRequestException( + "Failed to estimate gas for Smart Sessions use mode", + ); + } + } } diff --git a/src/modules/gas-estimator/gas-estimator.service.ts b/src/modules/gas-estimator/gas-estimator.service.ts deleted file mode 100644 index 671531b8..00000000 --- a/src/modules/gas-estimator/gas-estimator.service.ts +++ /dev/null @@ -1,408 +0,0 @@ -import { ChainSettings, ChainsService } from "@/chains"; -import { BadRequestException } from "@/common"; -import { ContractsService } from "@/contracts"; -import { Logger } from "@/core/logger"; -import { EncoderAndDecoderService } from "@/encoder-and-decoder"; -import { NodeService } from "@/node"; -import { type QuoteType } from "@/quotes"; -import { RpcManagerService } from "@/rpc-manager"; -import { type GrantPermissionResponse } from "@biconomy/abstractjs/dist/_types/modules/validators/smartSessions/decorators/grantPermission"; -import { Service } from "typedi"; -import { type Address, Hex } from "viem"; -import { publicActionsL2 } from "viem/op-stack"; -import { - APPROXIMATE_SMART_SESSION_BATCH_CALLDATA_DECODING_GAS_LIMIT, - APPROXIMATE_SMART_SESSION_SINGLE_CALLDATA_DECODING_GAS_LIMIT, - ARBITRUM_ORACLE_ADDRESS, - FIXED_SMART_SESSIONS_COLD_GET_POLICY_GAS_LIMIT, - FIXED_SMART_SESSIONS_POLICY_COLD_ACCESS_GAS_ESTIMATION_LOOKUP_TABLE, - FIXED_SMART_SESSIONS_POLICY_WARM_ACCESS_GAS_ESTIMATION_LOOKUP_TABLE, - FIXED_SMART_SESSIONS_USE_MODE_GAS_LIMIT, - FIXED_SMART_SESSIONS_WARM_GET_POLICY_GAS_LIMIT, - FIXED_SMART_SESSION_CHECK_ACTION_GAS_LIMIT, - FIXED_SMART_SESSION_COLD_CHECK_ACTION_POLICY_GAS_LIMIT, - FIXED_SMART_SESSION_WARM_CHECK_ACTION_POLICY_GAS_LIMIT, -} from "./constants"; -import { GasConditions, type GasEstimationLookupKey } from "./interfaces"; - -// This service is not being used for gas price calculation, so latency tracking is not added here -@Service() -export class GasEstimatorService { - constructor( - private readonly logger: Logger, - private readonly chainsService: ChainsService, - private readonly encoderAndDecoderService: EncoderAndDecoderService, - private readonly contractsService: ContractsService, - private readonly nodeService: NodeService, - private readonly rpcManagerService: RpcManagerService, - ) {} - - private gasCache: Map< - string, - { conditions: GasConditions; timestamp: number } - > = new Map(); - - async getCurrentGasConditions( - chainId: string, - forceRefresh = false, - addBuffer = true, - fetchL1GasPrice = false, - ): Promise { - const chainSettings = this.chainsService.getChainSettings(chainId); - try { - const now = Date.now(); - const cached = this.gasCache.get(chainSettings.name); - const cacheDuration = chainSettings.gasCacheDuration; - - if (!forceRefresh && cached && now - cached.timestamp < cacheDuration) { - return cached.conditions; - } - - let conditions: GasConditions; - - if (chainSettings.eip1559) { - const [latestBlockData, maxPriorityFeePerGas] = await Promise.all([ - this.rpcManagerService.executeRequest( - chainSettings.chainId, - (chainClient) => { - return chainClient.getBlock(); - }, - ), - this.rpcManagerService.executeRequest( - chainSettings.chainId, - (chainClient) => { - return chainClient.estimateMaxPriorityFeePerGas(); - }, - ), - ]); - - const baseFee = latestBlockData.baseFeePerGas || 0n; - const priorityFee = maxPriorityFeePerGas; - - // maxFeePerGas should be at least baseFee + maxPriorityFeePerGas - const minMaxFee = baseFee + priorityFee; - - // Add 50% buffer to ensure it's high enough - let maxFeePerGas = minMaxFee; - - if (addBuffer) { - maxFeePerGas = (minMaxFee * 3n) / 2n; - } - - let l1GasPrice = 0n; - - if (chainSettings.l1ChainId && fetchL1GasPrice) { - this.logger.trace( - { l1ChainId: chainSettings.l1ChainId }, - "Getting L1 gas price", - ); - - const fetchedL1GasPrice = await this.getCurrentGasConditions( - chainSettings.l1ChainId, - false, - false, - ); - - this.logger.trace({ fetchedL1GasPrice }, "L1 gas price"); - - if (fetchedL1GasPrice) { - l1GasPrice = fetchedL1GasPrice.baseFee; - } - } - - this.logger.trace( - { maxFeePerGas, maxPriorityFeePerGas, l1GasPrice, baseFee }, - `Current gas conditions on chain ${chainSettings.name}`, - ); - - conditions = { - l1GasPrice: l1GasPrice, - maxFeePerGas: maxFeePerGas, - maxPriorityFeePerGas: priorityFee, - baseFee, - }; - } else { - const gasPrice = await this.rpcManagerService.executeRequest( - chainSettings.chainId, - (chainClient) => { - return chainClient.getGasPrice(); - }, - ); - - conditions = { - l1GasPrice: 0n, - maxFeePerGas: gasPrice, - maxPriorityFeePerGas: gasPrice, - baseFee: gasPrice, - }; - } - - this.gasCache.set(chainSettings.name, { - conditions, - timestamp: now, - }); - - return conditions; - } catch (error) { - this.logger.error( - { error }, - `Failed to get gas conditions on chain ${chainSettings.name}`, - ); - throw new BadRequestException(`Failed to get gas conditions: ${error}`); - } - } - - async getL1Gas( - chainSettings: ChainSettings, - callData: Hex, - l2BaseFee: bigint, - ): Promise<{ l1GasUsedInTermsOfL2: bigint; l1Fee: bigint }> { - try { - const entryPointV7Address = this.contractsService.getContractAddress( - "entryPointV7", - chainSettings.chainId, - ); - - switch (chainSettings.type) { - case "optimism": { - this.logger.trace("Estimating L1 on Optimism gas"); - - const l1Fee = await this.rpcManagerService.executeRequest( - chainSettings.chainId, - (chainClient) => { - return chainClient.extend(publicActionsL2()).estimateL1Gas({ - account: this.nodeService.getMasterAccount().address, - to: entryPointV7Address, - data: callData, - }); - }, - ); - - this.logger.trace( - { l1Fee, l2BaseFee, chainId: chainSettings.chainId }, - "Got L1 fee", - ); - - if (l1Fee === 0n) { - throw new BadRequestException( - "Failed to estimate L1 fee. Received 0", - ); - } - - return { l1GasUsedInTermsOfL2: l1Fee / l2BaseFee, l1Fee }; - } - - case "arbitrum": { - this.logger.trace("Estimating L1 on Arbitrum gas"); - - const arbitrumOracleContractAbi = - this.contractsService.getContractAbi("arbitrumOracle"); - - const simulation = await this.rpcManagerService.executeRequest( - chainSettings.chainId, - (chainClient) => { - return chainClient.simulateContract({ - abi: arbitrumOracleContractAbi, - address: ARBITRUM_ORACLE_ADDRESS, - functionName: "gasEstimateL1Component", - args: [entryPointV7Address, false, callData || "0x"], - }); - }, - ); - - if (simulation?.result?.length <= 0) { - throw new BadRequestException("Failed to estimate L1 gas"); - } - - const gasEstimateForL1 = simulation.result[0]; - const baseFee = simulation.result[1]; - const l1BaseFeeEstimate = simulation.result[2]; - - this.logger.trace( - { gasEstimateForL1, baseFee, l1BaseFeeEstimate }, - "Got L1 gas components on Arb based chain", - ); - - /** - * Formula taken from here - * https://github.com/OffchainLabs/arbitrum-tutorials/blob/master/packages/gas-estimation/scripts/exec.ts - */ - const parentChainCost = gasEstimateForL1 * baseFee; - const parentChainEstimatedPrice = l1BaseFeeEstimate * 16n; - - const parentChainSize = - parentChainEstimatedPrice === 0n - ? 0n - : parentChainCost / parentChainEstimatedPrice; - - const l1Cost = parentChainEstimatedPrice * parentChainSize; - const l1GasUsedInTermsOfL2 = l1Cost / baseFee; - return { l1GasUsedInTermsOfL2, l1Fee: l1Cost }; - } - default: - return { l1GasUsedInTermsOfL2: 0n, l1Fee: 0n }; - } - } catch (error) { - this.logger.trace( - { error }, - `Failed to estimate L1 gas on ${chainSettings.name}`, - ); - throw new BadRequestException(`Failed to estimate L1 gas: ${error}`); - } - } - - getGasEstimationLookupKey(options: { - quoteType: QuoteType; - isPermitRequired?: boolean; - isRedeemDelegationRequired?: boolean; - isSafeExecutionRequired?: boolean; - }): GasEstimationLookupKey { - const { - quoteType, - isPermitRequired, - isRedeemDelegationRequired, - isSafeExecutionRequired, - } = options; - - const permitKey: "active" | "unactive" = isPermitRequired - ? "active" - : "unactive"; - - const delegationKey: "delegation" | "non-delegation" = - isRedeemDelegationRequired ? "delegation" : "non-delegation"; - - const safeExecutionKey: "safe-execution" | "non-safe-execution" = - isSafeExecutionRequired ? "safe-execution" : "non-safe-execution"; - - switch (quoteType) { - case "simple": - return `${quoteType}-mode`; - case "permit": - return `${permitKey}-${quoteType}-mode`; - case "onchain": - return `${quoteType}-mode`; - case "mm-dtk": - return `${delegationKey}-${quoteType}-mode`; - case "safe-sa": - return `${safeExecutionKey}-${quoteType}-mode`; - default: - throw new BadRequestException("Failed to estimate gas"); - } - } - - // Gas estimation algorithm to calculate the vgl for Smart sessions use mode - getSmartSessionUseModeVerificationGasLimit( - userOpCalldata: Hex, - sessionDetails: GrantPermissionResponse, - ) { - try { - // This fixed gas limit will be always there no matter different execution mode or number of actions or number of action policies - let verificationGasLimit = FIXED_SMART_SESSIONS_USE_MODE_GAS_LIMIT; - - let decodedCalldataArray: { - target: Address; - value: bigint; - calldata: Hex; - }[] = []; - - try { - decodedCalldataArray = - this.encoderAndDecoderService.decodeERC7579Calldata(userOpCalldata); - } catch { - throw new BadRequestException( - "Failed to decode userop calldata to estimate gas for Smart Sessions use mode", - ); - } - - if (decodedCalldataArray.length === 0) { - throw new BadRequestException( - "Failed to estimate gas for Smart Sessions use mode", - ); - } - - const isBatch = decodedCalldataArray.length > 1; - - // Based on execution calldata (batch vs single), the approximate decoding calldata gas limit will be added - verificationGasLimit += isBatch - ? APPROXIMATE_SMART_SESSION_BATCH_CALLDATA_DECODING_GAS_LIMIT - : APPROXIMATE_SMART_SESSION_SINGLE_CALLDATA_DECODING_GAS_LIMIT; - - const hasActionAlreadySeen = new Set(); - - for (const decodedCalldata of decodedCalldataArray) { - // Contract address where permission was enabled via policy - const actionTarget = decodedCalldata.target.toLowerCase(); - // Function selector where permission was enabled via policy - const actionTargetSelector = decodedCalldata.calldata - .slice(0, 10) - .toLowerCase(); - - // Fetch the action to calculate gas limits - const [action] = - sessionDetails.enableSessionData.enableSession.sessionToEnable.actions.filter( - (action) => { - return ( - action.actionTarget.toLowerCase() === actionTarget && - action.actionTargetSelector.toLowerCase() === - actionTargetSelector - ); - }, - ); - - // If the action doesn't exists in sessionDetails, skip it. Reverts will be handled by simulation and no need to worry about this case here - if (action) { - const actionKey = `${actionTarget}::${actionTargetSelector}`; - - // Sometime the gas is being low drastically due to warm access. So we track this flag to toggle between cold vs warm storage access - const isColdAccess = !hasActionAlreadySeen.has(actionKey); - - // Fixed gas limits added for check action flow - verificationGasLimit += FIXED_SMART_SESSION_CHECK_ACTION_GAS_LIMIT; - - // Fixed gas limits added for check action policy flow - verificationGasLimit += isColdAccess - ? FIXED_SMART_SESSION_COLD_CHECK_ACTION_POLICY_GAS_LIMIT - : FIXED_SMART_SESSION_WARM_CHECK_ACTION_POLICY_GAS_LIMIT; - - const actionPolicyLength = BigInt(action.actionPolicies.length); - - // Fixed get policy gas limit will be multiplied by number of policies per action - verificationGasLimit += - (isColdAccess - ? FIXED_SMART_SESSIONS_COLD_GET_POLICY_GAS_LIMIT - : FIXED_SMART_SESSIONS_WARM_GET_POLICY_GAS_LIMIT) * - actionPolicyLength; - - for (const { policy } of action.actionPolicies) { - const policyGasLimit = isColdAccess - ? FIXED_SMART_SESSIONS_POLICY_COLD_ACCESS_GAS_ESTIMATION_LOOKUP_TABLE[ - policy.toLowerCase() - ] - : FIXED_SMART_SESSIONS_POLICY_WARM_ACCESS_GAS_ESTIMATION_LOOKUP_TABLE[ - policy.toLowerCase() - ]; - - // If there is no policy match ? default 20k gas will be used and this is approximate safe value - // This is to handle unknown policies being used in the smart sessions flow if any - const defaultPolicyGasLimit = isColdAccess ? 20_000n : 10_000n; - - verificationGasLimit += policyGasLimit || defaultPolicyGasLimit; - } - - // Mark the action as already seen to reduce gas limit which has warm storage access - hasActionAlreadySeen.add(actionKey); - } - } - - return verificationGasLimit; - } catch (error) { - this.logger.error( - { error }, - "Failed to estimate gas for Smart Sessions use mode", - ); - throw new BadRequestException( - "Failed to estimate gas for Smart Sessions use mode", - ); - } - } -} diff --git a/src/modules/gas-estimator/index.ts b/src/modules/gas-estimator/index.ts index a22eefda..e6d8ea91 100644 --- a/src/modules/gas-estimator/index.ts +++ b/src/modules/gas-estimator/index.ts @@ -1,4 +1,3 @@ export * from "./interfaces"; -export * from "./gas-estimator.service"; export * from "./gas-estimator-v2.service"; export * from "./constants"; diff --git a/src/modules/gas-manager/gas-manager.service.ts b/src/modules/gas-manager/gas-manager.service.ts new file mode 100644 index 00000000..834d743a --- /dev/null +++ b/src/modules/gas-manager/gas-manager.service.ts @@ -0,0 +1,354 @@ +import { BadRequestException, withTrace } from "@/common"; +import { Logger } from "@/core/logger"; +import { RpcManagerService } from "@/rpc-manager"; +import { Service } from "typedi"; +import { + type GasInfo, + type GasManagerConfig, + type GasManagerEventHandler, + type GasManagerEventHandlers, + type GasManagerEvents, +} from "./interfaces"; + +@Service() +export class GasManagerService { + private gasInfoFetchingInProgress: Map = new Map(); + private gasInfosByChain: Map = new Map(); + private readonly eventHandlers: GasManagerEventHandlers = {}; + private sync = false; + + constructor( + private readonly logger: Logger, + private readonly rpcManagerService: RpcManagerService, + ) { + logger.setCaller(GasManagerService); + } + + /** + * Starts gas info fetch intervals for all provided chain configs. + * @param configs List of chain/configs to fetch gas info for. + */ + async initialize(configs: GasManagerConfig[], sync = false) { + // This should be only enabled on master thread so it will be synced to worker threads + this.sync = sync; + + await Promise.all( + configs.map((config) => + this.fetchGasInfoOnInterval(config.chainId, config.gasFetchInterval), + ), + ); + + this.logger.trace( + { + configs, + }, + "Gas Manager initialized", + ); + } + + /** + * Retrieves the latest known gas price for the given chainId. + * Throws if no info is available. + * @param chainId Chain identifier. + * @returns Gas price. + */ + async getLatestGasPrice(chainId: string, forceFetch = false) { + if (forceFetch) { + await this.fetchGasInfo(chainId); + } + + const gasInfo = this.gasInfosByChain.get(chainId); + + if (!gasInfo) { + throw new BadRequestException("Failed to fetch gas price"); + } + + if (gasInfo.gasPrice === null) { + throw new BadRequestException("Failed to fetch gas price"); + } + + return gasInfo.gasPrice; + } + + /** + * Retrieves the latest known fee history for the given chainId. + * Throws if no info is available. + * @param chainId Chain identifier. + * @returns Fee history. + */ + async getLatestFeeHistory(chainId: string, forceFetch = false) { + if (forceFetch) { + await this.fetchGasInfo(chainId); + } + + const gasInfo = this.gasInfosByChain.get(chainId); + + if (!gasInfo) { + throw new BadRequestException("Failed to fetch fee history"); + } + + if (gasInfo.feeHistory === null) { + throw new BadRequestException("Failed to fetch fee history"); + } + + return gasInfo.feeHistory; + } + + /** + * Retrieves the latest known max priority fee for the given chainId. + * Throws if no info is available. + * @param chainId Chain identifier. + * @returns Max priority fee. + */ + async getLatestMaxPriorityFee(chainId: string, forceFetch = false) { + if (forceFetch) { + await this.fetchGasInfo(chainId); + } + + const gasInfo = this.gasInfosByChain.get(chainId); + + if (!gasInfo) { + throw new BadRequestException("Failed to fetch max priority fee"); + } + + if (gasInfo.maxPriorityFee === null) { + throw new BadRequestException("Failed to fetch max priority fee"); + } + + return gasInfo.maxPriorityFee; + } + + /** + * Fetches latest gas information for a chain and updates the state. + * Ensures only one fetch at a time per chain. + * @param chainId Chain identifier. + */ + private async fetchGasInfo(chainId: string) { + try { + const isInProgress = this.gasInfoFetchingInProgress.get(chainId); + if (isInProgress) return; + + this.gasInfoFetchingInProgress.set(chainId, true); + + const [gasPrice, feeHistory, maxPriorityFee] = await Promise.all([ + this.getGasPrice(chainId), + this.getFeeHistroy(chainId), + this.getMaxPriorityFee(chainId), + ]); + + const gasInfo: GasInfo = { + gasPrice, + feeHistory, + maxPriorityFee, + }; + + this.logger.trace({ gasInfo, chainId }, "Fetched gas info for chain"); + + this.setGasInfo(chainId, gasInfo); + } catch (error) { + this.logger.error({ error, chainId }, "Failed to fetch gas info"); + } finally { + this.gasInfoFetchingInProgress.set(chainId, false); + } + } + + /** + * Starts an interval timer to fetch gas info repeatedly for a given chain. + * Immediately triggers a fetch, then continues at specified interval. + * @param chainId Chain identifier. + * @param intervalMs Milliseconds between fetches (default 2000). + * @returns A function to stop the interval timer. + */ + private async fetchGasInfoOnInterval( + chainId: string, + intervalMs = 2000, + ): Promise<() => void> { + // Immediately fetch once, then schedule subsequent fetches + await this.fetchGasInfo(chainId); + const intervalId = setInterval( + () => this.fetchGasInfo(chainId), + intervalMs, + ); + + // Return a function to clear the interval (stop fetching) + return () => { + clearInterval(intervalId); + }; + } + + /** + * Fetches the current legacy gas price for a given chain. + * @param chainId Chain identifier. + * @returns Gas price or null if fetching fails. + */ + private async getGasPrice(chainId: string) { + try { + return await withTrace( + "gasManager.legacyGasPrice", + async () => + await this.rpcManagerService.executeRequest( + chainId, + (chainClient) => { + return chainClient.getGasPrice(); + }, + ), + { + chainId, + }, + )(); + } catch (error) { + this.logger.error( + { + chainId, + error, + }, + "Failed to fetch the gas price", + ); + return null; + } + } + + /** + * Fetches the current fee history for a given chain. + * @param chainId Chain identifier. + * @param rewardPercentiles Percentiles for reward calculation. Default [50]. + * @returns Fee history result or null if fetching fails. + */ + private async getFeeHistroy( + chainId: string, + // 50th percentile for "standard" mode + rewardPercentiles: number[] = [50], + ) { + try { + return await withTrace( + "gasManager.getFeeHistory", + async () => + await this.rpcManagerService.executeRequest( + chainId, + (chainClient) => { + return chainClient.getFeeHistory({ + blockCount: 10, // looking at last 10 blocks + blockTag: "pending", + rewardPercentiles, + }); + }, + ), + { chainId }, + )(); + } catch (error) { + this.logger.error( + { + chainId, + error, + }, + "Failed to fetch the fee history", + ); + return null; + } + } + + /** + * Fetches the current max priority fee for a given chain and applies a 10% buffer. + * @param chainId Chain identifier. + * @returns Max priority fee (with buffer) or null if fetching fails. + */ + private async getMaxPriorityFee(chainId: string) { + try { + const maxPriorityFee = await withTrace( + "gasManager.maxPriorityFeePerGas", + async () => + await this.rpcManagerService.executeRequest( + chainId, + (chainClient) => { + return chainClient.request({ + method: "eth_maxPriorityFeePerGas", + }); + }, + ), + { + chainId, + }, + )(); + + return (BigInt(maxPriorityFee as string) * 110n) / 100n; // 10% buffer + } catch (error) { + this.logger.error( + { + chainId, + error, + }, + "Failed to fetch the max priority fee", + ); + return null; + } + } + + /** + * Updates the stored gas info for a chain. Maintains existing values if new data is null. + * @param chainId Chain identifier. + * @param newGasInfo GasInfo object containing latest data. + */ + private setGasInfo(chainId: string, newGasInfo: GasInfo): void { + const previousGasInfo = this.gasInfosByChain.get(chainId); + + if (!previousGasInfo) { + this.gasInfosByChain.set(chainId, newGasInfo); + return; + } + + // If somehow the gas fields of the new gas info value is null, we will stick with old value itself + const updatedGasInfo = { + gasPrice: + newGasInfo.gasPrice !== null + ? newGasInfo.gasPrice + : previousGasInfo.gasPrice, + feeHistory: + newGasInfo.feeHistory !== null + ? newGasInfo.feeHistory + : previousGasInfo.feeHistory, + maxPriorityFee: + newGasInfo.maxPriorityFee !== null + ? newGasInfo.maxPriorityFee + : previousGasInfo.maxPriorityFee, + }; + + this.gasInfosByChain.set(chainId, updatedGasInfo); + + // Syncing will be only enabled on master thread for thread sync + if (this.sync) { + this.emit("sync", { chainId, gasInfo: updatedGasInfo }); + } + } + + syncGasInfo(chainId: string, newGasInfo: GasInfo) { + this.setGasInfo(chainId, newGasInfo); + } + + on( + event: K, + handler: GasManagerEventHandler, + ) { + if (!this.eventHandlers[event]) { + this.eventHandlers[event] = []; + } + + this.eventHandlers[event].push(handler); + + return this; + } + + private async emit( + event: K, + payload: unknown, + ) { + const handlers = this.eventHandlers[event]; + + if (!handlers) { + return; + } + + for (const handler of handlers) { + await handler(payload as GasManagerEvents[K]); + } + } +} diff --git a/src/modules/gas-manager/index.ts b/src/modules/gas-manager/index.ts new file mode 100644 index 00000000..e2b602bc --- /dev/null +++ b/src/modules/gas-manager/index.ts @@ -0,0 +1,2 @@ +export * from "./gas-manager.service"; +export * from "./interfaces"; diff --git a/src/modules/gas-manager/interfaces.ts b/src/modules/gas-manager/interfaces.ts new file mode 100644 index 00000000..3f991423 --- /dev/null +++ b/src/modules/gas-manager/interfaces.ts @@ -0,0 +1,22 @@ +import type { GetFeeHistoryReturnType, GetGasPriceReturnType } from "viem"; + +export type GasManagerConfig = { + chainId: string; + gasFetchInterval: number; +}; + +export type GasInfo = { + gasPrice: GetGasPriceReturnType | null; + feeHistory: GetFeeHistoryReturnType | null; + maxPriorityFee: bigint | null; +}; + +export type GasManagerEvents = { + sync: { chainId: string; gasInfo: GasInfo }; +}; + +export type GasManagerEventHandler = (payload: T) => void | Promise; + +export type GasManagerEventHandlers = { + [K in keyof GasManagerEvents]?: GasManagerEventHandler[]; +}; diff --git a/src/modules/nonce-manager/index.ts b/src/modules/nonce-manager/index.ts new file mode 100644 index 00000000..2fa9e301 --- /dev/null +++ b/src/modules/nonce-manager/index.ts @@ -0,0 +1 @@ +export * from "./nonce-manager.service"; diff --git a/src/modules/nonce-manager/nonce-manager.service.ts b/src/modules/nonce-manager/nonce-manager.service.ts new file mode 100644 index 00000000..52e06e66 --- /dev/null +++ b/src/modules/nonce-manager/nonce-manager.service.ts @@ -0,0 +1,242 @@ +import { Logger } from "@/core/logger"; +import { RpcManagerService } from "@/rpc-manager"; +import { Service } from "typedi"; +import { type Address } from "viem"; + +/** + * Nonce manager context: MUST READ if something needs to be changed here in this file + * + * Nonce manager will attempts to fetch latest nonce from the RPC provider and caches it in the memory for a given worker address and chain id. + * It will also track the last used nonce for a given worker address and chain id. + * For every transaction, the nonce manager attempts to get a transaction from cache first. If the nonce is not found or it is same as last used nonce + * The nonce is out of sync and it will be fetched from the RPC provider. + * + * Advantages: This reduces the RPC call for fetching the nonce most of the time which will reduce the latency for execution phase. + * + * edge cases: + * 1. If the nonce is out of sync, the nonce manager will fetch the latest nonce from the RPC provider and update the cache. + * + * 2. If the nonce is out of sync but the last used nonce and latest nonce are not updated properly. Used nonce will be used for next transaction and it will fail + * with nonce too low error. With immediate retry mode, the nonce will be force fetched if the error is nonce related which will yield a proper nonce + * and also updates the cache to have a fresh nonce for next transaction. + * + * 3. If there are some RPC failures or gas config or nonce config issues ? The transaction will be failed and not broadcasted. So the nonce will be never marked as used + * + * 4. If there is a transaction revert status from txReceipt. It means the transaction has been reverted on chain. So the nonce will be used. So we always mark the nonce as used if + * we have the tx receipt irrespective of the status. + * + * 5. IMPORTANT: If somehow the nonce is marked as used but the transaction is not broadcasted ? the latest nonce will be a future nonce. As a consequence, the next broadcasting transaction will be with future nonce + * and it will be stuck in the mempool waiting for the sequential nonce dependency. This case will never results in an error but RPC node accepts the transaction and returns a txHash. + * When we attempt to fetch the txReceipt for this txHash, it will mostly likely fail with tx receipt timeout error. Hence the previousTx hash is stored in the job and retried again later. + * + * From here we have three cases, + * A) If the same job is retried immediately with same worker, during the retry the txReceipt will be not available. As we've already forcefully synced the nonce before adding the prevTxHash + * the nonce in nonce manager should be properly synced now. The tx retry will happen properly. + * Post tx, the previous transaction with future nonce will become valid and tried to execute as a double spend/tx but this will fail due to AA nonce duplication issue which is good for us. + * + * B) If the same worker tries a different job, it will definitely have a proper nonce as we've forcefully synced the nonce before adding the prevTxHash to previous job. + * so the proper transaction will be executed for the new job. Post tx, the previous tx with future nonce will become valid and got executed properly by the RPC nodes in background. + * But unfortunately the tx status is not synced in MEE node yet. When this stuck tx is retried with any worker, the prevTx receipt will be fetched first before retry which will sync the tx status and ends the job. + * + * C) If this stuck tx/job is picked up by a different worker, the prevTxHash will result in timeout error because it is still stuck in mempool. So the new worker will retry the job with proper nonce + * and the tx will be exected. Post tx, the previous transaction (Same calldata) with future nonce of different worker will become valid and tried to execute as a double spend/tx but this will fail due to AA nonce duplication issue which is good for us. + * + * NOTE: there are so much such permutations of edge cases are there for #5 and everything will be handled in the same way around with different context. + */ + +// IMPORTANT NOTE: +// This nonce manager service works under a non concurrent enviornment only due to the fact that the EOA workers always execute only one transaction at a time. +// There will be no concurrency within a single EOA worker hence concurrency and conflict managements are unneccessary for this service +@Service() +export class NonceManagerService { + // Tracks the next unused nonce for each (chainId, workerAddress) pair. + private unusedNonceTrackerByChain: Map> = + new Map(); + + // Tracks the latest used nonce for each (chainId, workerAddress) pair. + private usedNonceTrackerByChain: Map> = new Map(); + + constructor( + private readonly logger: Logger, + private readonly rpcManagerService: RpcManagerService, + ) { + logger.setCaller(NonceManagerService); + } + + /** + * Returns the next nonce for a worker address on a given chain. + * - If forceFetch: always fetch from RPC. + * - Else: Uses cached unused nonce unless out of sync, then fetches from RPC. + * @param workerAddress Address to get nonce for. + * @param chainId Chain id. + * @param forceFetch Force RPC fetch? + * @returns Next nonce to use. + */ + async getNonce( + workerAddress: Address, + chainId: string, + forceFetch?: boolean, + ): Promise { + if (forceFetch) { + return await this.getNonceFromRpc(workerAddress, chainId); + } + + let unusedNonce = this.getUnusedNonce(workerAddress, chainId); + + if (unusedNonce !== undefined) { + this.logger.trace( + { + workerAddress, + chainId, + unusedNonce, + }, + "Fetched unused nonce for the worker address from cache", + ); + + const usedNonce = this.getUsedNonce(workerAddress, chainId); + + // If the "unused" nonce matches the last used nonce, it means + // the unused nonce might be out of sync with the internal cache, so fetch a fresh one via RPC call. + if (unusedNonce === usedNonce) { + this.logger.trace( + { + workerAddress, + chainId, + unusedNonce, + usedNonce, + }, + "Unused nonce for the worker is out of sync. Fetching the new nonce from RPC provider", + ); + unusedNonce = await this.getNonceFromRpc(workerAddress, chainId); + } + } else { + // If not found in cache, load from RPC and cache it. + unusedNonce = await this.getNonceFromRpc(workerAddress, chainId); + } + + return unusedNonce; + } + + /** + * Requests the latest nonce for the worker address from the chain RPC, + * caches the fetched nonce as the unused nonce. + * @param workerAddress The address of the sender + * @param chainId Chain identifier + * @returns The nonce returned by the chain client for the current address + */ + async getNonceFromRpc( + workerAddress: Address, + chainId: string, + ): Promise { + const nonce = await this.rpcManagerService.executeRequest( + chainId, + (chainClient) => { + return chainClient.getTransactionCount({ address: workerAddress }); + }, + ); + + this.logger.trace( + { + workerAddress, + chainId, + nonce, + }, + "Fetched nonce from RPC for the worker", + ); + + this.setUnusedNonce(workerAddress, chainId, nonce); + + return nonce; + } + + /** + * Marks a nonce as used for the given worker and advances the unused nonce to the next value. + * Should be called after a transaction is sent. + * @param workerAddress The address of the sender + * @param chainId Chain identifier + * @param nonce The nonce that was just used + */ + markNonceAsUsed( + workerAddress: Address, + chainId: string, + nonce: number, + ): void { + // Note: even if you call this function multiple times with same nonce value during error handling, the behavior is same as calling it once + // because we are passing the current nonce instead just increamenting from last latest nonce automatically + + // Mark the current nonce as used + this.setUsedNonce(workerAddress, chainId, nonce); + // Increment the current nonce for next fresh nonce + this.setUnusedNonce(workerAddress, chainId, nonce + 1); + } + + /** + * Gets the last used nonce for a given worker address and chain ID from the tracker. + * Creates the inner map if not present. + * @param workerAddress The address of the sender + * @param chainId Chain identifier + * @returns The last used nonce for this worker+chain, or undefined + */ + getUsedNonce(workerAddress: Address, chainId: string): number | undefined { + let usedNonceTracker = this.usedNonceTrackerByChain.get(chainId); + + if (!usedNonceTracker) { + usedNonceTracker = new Map(); + this.usedNonceTrackerByChain.set(chainId, usedNonceTracker); + } + + return usedNonceTracker.get(workerAddress.toLowerCase()); + } + + /** + * Gets the next unused nonce for a given worker address and chain ID from the tracker. + * Creates the inner map if not present. + * @param workerAddress The address of the sender + * @param chainId Chain identifier + * @returns The next available unused nonce, or undefined + */ + getUnusedNonce(workerAddress: Address, chainId: string): number | undefined { + let unusedNonceTracker = this.unusedNonceTrackerByChain.get(chainId); + + if (!unusedNonceTracker) { + unusedNonceTracker = new Map(); + this.unusedNonceTrackerByChain.set(chainId, unusedNonceTracker); + } + + return unusedNonceTracker.get(workerAddress.toLowerCase()); + } + + /** + * Sets the last used nonce for a worker address on a given chain, creating the inner map if missing. + * @param workerAddress The address of the sender + * @param chainId Chain identifier + * @param nonce The nonce to set as last used + */ + setUsedNonce(workerAddress: Address, chainId: string, nonce: number): void { + let usedNonceTracker = this.usedNonceTrackerByChain.get(chainId); + + if (!usedNonceTracker) { + usedNonceTracker = new Map(); + this.usedNonceTrackerByChain.set(chainId, usedNonceTracker); + } + + usedNonceTracker.set(workerAddress.toLowerCase(), nonce); + } + + /** + * Sets the next unused nonce for a worker address on a given chain, creating the inner map if missing. + * @param workerAddress The address of the sender + * @param chainId Chain identifier + * @param nonce The nonce to set as next unused + */ + setUnusedNonce(workerAddress: Address, chainId: string, nonce: number): void { + let unusedNonceTracker = this.unusedNonceTrackerByChain.get(chainId); + + if (!unusedNonceTracker) { + unusedNonceTracker = new Map(); + this.unusedNonceTrackerByChain.set(chainId, unusedNonceTracker); + } + + unusedNonceTracker.set(workerAddress.toLowerCase(), nonce); + } +} diff --git a/src/modules/quotes/quotes.service.ts b/src/modules/quotes/quotes.service.ts index ecebc2f5..eee8d84f 100644 --- a/src/modules/quotes/quotes.service.ts +++ b/src/modules/quotes/quotes.service.ts @@ -433,10 +433,14 @@ export class QuotesService { paymentInfo.token, ); - const [paymentTokenPrice, paymentTokenDecimals] = await Promise.all([ - this.priceFeedsService.getPaymentTokenPrice(paymentToken), - this.priceFeedsService.getPaymentTokenDecimals(paymentToken), - ]); + const [paymentTokenPrice, paymentTokenDecimals] = await withTrace( + "paymentToken.getTokenPriceAndDecimals", + async () => + await Promise.all([ + this.priceFeedsService.getPaymentTokenPrice(paymentToken), + this.priceFeedsService.getPaymentTokenDecimals(paymentToken), + ]), + )(); tokenAmount = this.paymentService.dollarCostToTokenAmount( paymentInfo.token, @@ -532,18 +536,25 @@ export class QuotesService { packMeeUserOp(meeUserOp), ); - const merkleTree = this.merkleTreeService.createMerkleTree( - packedMeeUserOps, - quoteType === "simple", - isEIP712SupportedMeeVersion, - ); + const merkleTree = await withTrace( + "requestQuote.createMerkleTree", + async () => + await this.merkleTreeService.createMerkleTree( + packedMeeUserOps, + quoteType === "simple", + isEIP712SupportedMeeVersion, + ), + )(); const quoteResponse: MeeQuote = { hash: merkleTree.root as Hex, node: this.nodeService.address, // New slashing algorithm implementation will move this quote level commitment to execution level commitment // So that the node don't blindly give commitment to the supertransaction execution - commitment: await this.nodeService.signMessage(merkleTree.root), + commitment: await withTrace( + "requestQuote.signMessage", + async () => await this.nodeService.signMessage(merkleTree.root), + )(), paymentInfo: { ...paymentInfo, tokenAmount: round( @@ -1131,33 +1142,44 @@ export class QuotesService { this.logger.trace( { requestId, superTransactionHash: hash }, - "Simulating validation for the payment userOp", + "Simulating the payment userOp", ); const paymentMeeUserOp = signedPackedMeeUserOps.at( 0, ) as SignedPackedMeeUserOp; - const { sigFailed, validAfter, validUntil } = await withTrace( - "exec.simulateSimulateHandleOp", - async () => { - return await this.entryPointService.simulateSimulateHandleOp( - paymentMeeUserOp, - { - retries: 20, - useStorage: false, - }, - ); - }, - )(); + // ethCall and debugTraceCall simulations are concurrent now. This will reduce the latency a lot + const [{ sigFailed, validAfter, validUntil }, execFailedErr] = + await Promise.all([ + withTrace("exec.simulateSimulateHandleOp", async () => { + return await this.entryPointService.simulateSimulateHandleOp( + paymentMeeUserOp, + { + retries: 20, + useStorage: false, + }, + ); + })(), + withTrace("exec.simulateHandleOps", async () => { + return await this.entryPointService.simulateHandleOps( + paymentMeeUserOp, + { + retries: signatureType === MeeSignatureType.ON_CHAIN ? 20 : 0, + useStorage: false, + }, + ); + })(), + ]); this.logger.trace( { requestId, superTransactionHash: hash, validationResult: { sigFailed, validAfter, validUntil }, + executionValidationResult: { execFailedErr }, }, - "Payment userOp simulation validation result is fetched", + "Payment userOp simulation result is fetched", ); if (sigFailed) { @@ -1203,19 +1225,6 @@ export class QuotesService { ); } - const execFailedErr = await withTrace( - "exec.simulateHandleOps", - async () => { - return await this.entryPointService.simulateHandleOps( - paymentMeeUserOp, - { - retries: signatureType === MeeSignatureType.ON_CHAIN ? 20 : 0, - useStorage: false, - }, - ); - }, - )(); - if (execFailedErr.isError) { throw new BadRequestException( "Your supertransaction gas payment userOp has been reverted", @@ -1246,7 +1255,10 @@ export class QuotesService { if (isCleanUpUserOp) userOpsForceExecuteByChain[chainId] = true; } - await Promise.all( + // The userOp simulation execution scheduling is done in the background now. This is inspired from RPC nodes implementation logic + // where sometime the transaction is accepted and returns txHash but in worst case the tx is not broadcasted internally. + // This is a optimistic scheduling which reduces a latency a bit + Promise.all( entries(userOpsByChain).map(async ([chainId, meeUserOps]) => this.simulatorService.addJobs( chainId, @@ -1258,7 +1270,16 @@ export class QuotesService { }), ), ), - ); + ).catch((error) => { + this.logger.error( + { + error, + requestId, + superTransactionHash: hash, + }, + "Failed to schedule the supertransaction for simulation and execution phase", + ); + }); return { hash, @@ -1317,6 +1338,17 @@ export class QuotesService { } } + if (!userOp.isConfirmed) { + this.logger.info( + { + executionStatus, + executionData, + hash, + }, + "TX status for explorer", + ); + } + return { ...omit(rest, ["simulationAttempts", "batchHash"]), executionStatus, diff --git a/src/modules/simulator/simulation.service.ts b/src/modules/simulator/simulation.service.ts index f28d07fb..a6746f8b 100644 --- a/src/modules/simulator/simulation.service.ts +++ b/src/modules/simulator/simulation.service.ts @@ -1,4 +1,3 @@ -import { ChainsService } from "@/chains"; import { BadRequestException, packUint128Pair, withTrace } from "@/common"; import { TokenWithPermitAbi } from "@/contracts/resources/erc20-with-permit"; import { type ConfigType, InjectConfig } from "@/core/config"; @@ -11,7 +10,7 @@ import { import { EntryPointService } from "@/entry-point"; import { GAS_ESTIMATION_LOOKUP_TABLE, - GasEstimatorService, + GasEstimatorServiceV2, NONCE_VALIDATION_AND_UPDATION_GAS_LIMIT, } from "@/gas-estimator"; import { gasEstimatorConfig } from "@/gas-estimator/gas-estimator.config"; @@ -72,12 +71,11 @@ import { resolveStateOverrides } from "./utils"; export class SimulationService { constructor( private readonly userOpService: UserOpService, - private readonly gasEstimatorService: GasEstimatorService, + private readonly gasEstimatorService: GasEstimatorServiceV2, private readonly encoderAndDecoderService: EncoderAndDecoderService, private readonly entryPointService: EntryPointService, private readonly storageService: StorageService, private readonly nodeService: NodeService, - private readonly chainsService: ChainsService, private readonly tokenSlotDetectionService: TokenSlotDetectionService, private readonly rpcManagerService: RpcManagerService, @InjectConfig(gasEstimatorConfig) diff --git a/src/modules/simulator/simulator.config.ts b/src/modules/simulator/simulator.config.ts index a90beda0..2452bdaf 100644 --- a/src/modules/simulator/simulator.config.ts +++ b/src/modules/simulator/simulator.config.ts @@ -1,5 +1,5 @@ import process from "node:process"; -import { parseNum, parseSeconds } from "@/common"; +import { parseNum } from "@/common"; import { registerConfigAs } from "@/core/config"; import { type JobOptions, parseJobBackoffType } from "@/core/queue"; @@ -18,8 +18,8 @@ export const simulatorConfig = registerConfigAs<{ process.env.SIMULATOR_QUEUE_JOB_BACKOFF_TYPE, "fixed", ), - delay: parseSeconds(process.env.SIMULATOR_QUEUE_JOB_BACKOFF_DELAY, 1, { - min: 1, + delay: parseNum(process.env.SIMULATOR_QUEUE_JOB_BACKOFF_DELAY, 75, { + min: 75, }), }, }, diff --git a/src/modules/simulator/simulator.processor.ts b/src/modules/simulator/simulator.processor.ts index 03431a4e..5b79827f 100644 --- a/src/modules/simulator/simulator.processor.ts +++ b/src/modules/simulator/simulator.processor.ts @@ -34,31 +34,7 @@ export class SimulatorProcessor implements Processor { const { chainId } = this.chainsService; // Custom user defined retry delay for long standing transactions if specified - const retryDelay = meeUserOp.executionSimulationRetryDelay || 250; - - const hasQuote = await withTrace( - "simulatorPhase.hasQuote", - async () => await this.storageService.hasUserOp(meeUserOpHash), - { - chainId, - meeUserOpHash, - }, - )(); - - if (!hasQuote) { - this.logger.error( - { - meeUserOpHash, - userOpHash, - chainId, - }, - "Failed to fetch meeUserOp quote information. Unknown meeUserOp, simulation validation is skipped", - ); - - // Direct failure without retries - // This should never happen. If it happens, there is something wrong in exec API storage mechanism - throw new UnrecoverableError(); - } + const retryDelay = meeUserOp.executionSimulationRetryDelay || 75; // Updating userOp in background to improve latency this.storageService.createUserOpCustomField( @@ -251,9 +227,47 @@ export class SimulatorProcessor implements Processor { "Simulation started", ); + let execSimulationResult: { + isError: boolean; + errorMessage: string; + } | null = null; + // Simulation validation will be attempted only once. Execution validation will be tried until the userOp timestamps expires if (simulationAttempts === 0 && !isRetryJob) { - await this.simulateValidation(job); + // During the first attempt, both ethCall and debugTraceCall simulations happens concurrently to save some latency here + // Further attempts, this block of code will be skipped and debugTraceCall will happen individually down the line + const [hasQuote, , execSimulationResultInfo] = await Promise.all([ + withTrace( + "simulatorPhase.hasQuote", + async () => await this.storageService.hasUserOp(meeUserOpHash), + { + chainId, + meeUserOpHash, + }, + )(), + this.simulateValidation(job), + this.entryPointService.simulateHandleOps(meeUserOp, { + retries: 0, + useStorage: true, + }), + ]); + + if (!hasQuote) { + this.logger.error( + { + meeUserOpHash, + userOpHash, + chainId, + }, + "Failed to fetch meeUserOp quote information. Unknown meeUserOp, simulation validation is skipped", + ); + + // Direct failure without retries + // This should never happen. If it happens, there is something wrong in exec API storage mechanism + throw new UnrecoverableError(); + } + + execSimulationResult = execSimulationResultInfo; } const now = unixTimestamp(); @@ -307,15 +321,18 @@ export class SimulatorProcessor implements Processor { } } - const execFailed = await this.entryPointService.simulateHandleOps( - meeUserOp, - { - retries: 0, - useStorage: true, - }, - ); + // If the execution simulation (debugTraceCall) is null, the simulation doesn't happen and we need to simulate here + if (execSimulationResult === null) { + execSimulationResult = await this.entryPointService.simulateHandleOps( + meeUserOp, + { + retries: 0, + useStorage: true, + }, + ); + } - if (execFailed.isError) { + if (execSimulationResult.isError) { const { chainSettings: { simulator: { traceCallRetryDelay }, @@ -327,7 +344,7 @@ export class SimulatorProcessor implements Processor { meeUserOpHash, userOpHash, chainId, - error: execFailed.errorMessage, + error: execSimulationResult.errorMessage, }, "Simulation execution failed, adding to the queue", ); diff --git a/src/modules/user-ops/userop.service.ts b/src/modules/user-ops/userop.service.ts index 381f3f06..02deab1b 100644 --- a/src/modules/user-ops/userop.service.ts +++ b/src/modules/user-ops/userop.service.ts @@ -3,7 +3,7 @@ import { type ChainIdLike, ChainsService, } from "@/chains"; -import { unixTimestamp } from "@/common"; +import { unixTimestamp, withTrace } from "@/common"; import { validateTimestamps } from "@/common/utils/timestamp"; import { ContractsService } from "@/contracts"; import { type ConfigType, InjectConfig } from "@/core/config"; @@ -246,7 +246,6 @@ export class UserOpService { chainId, false, true, - true, )); const maxFeePerGas = feeData.maxFeePerGas; @@ -435,10 +434,14 @@ export class UserOpService { zeroAddress, ); - const [paymentTokenPrice, paymentTokenDecimals] = await Promise.all([ - this.priceFeedsService.getPaymentTokenPrice(nativeToken), - this.priceFeedsService.getPaymentTokenDecimals(nativeToken), - ]); + const [paymentTokenPrice, paymentTokenDecimals] = await withTrace( + "nativeToken.getTokenPriceAndDecimals", + async () => + await Promise.all([ + this.priceFeedsService.getPaymentTokenPrice(nativeToken), + this.priceFeedsService.getPaymentTokenDecimals(nativeToken), + ]), + )(); this.logger.trace( { maxGasLimit, maxFeePerGas, chainId, l1Gas }, @@ -447,6 +450,7 @@ export class UserOpService { const maxGasCost = maxFeePerGas * (maxGasLimit - l1Gas.l1GasUsedInTermsOfL2) + l1Gas.l1Fee; + this.logger.trace( { maxGasCost: maxGasCost.toString(), diff --git a/src/modules/workers/interfaces.ts b/src/modules/workers/interfaces.ts index 4e33a132..e177a43b 100644 --- a/src/modules/workers/interfaces.ts +++ b/src/modules/workers/interfaces.ts @@ -1,6 +1,7 @@ import type * as cluster from "node:cluster"; import type * as workerThreads from "node:worker_threads"; import { type ChainSettings } from "@/chains"; +import { type GasManagerEvents } from "@/gas-manager"; import { type HealthCheckDataWithChains, type HealthCheckResult, @@ -51,7 +52,8 @@ export type ClusterWorkerMessage = | WorkerMessage< "rpcProviderSyncConfig", { chainId: string; rpcProviderSyncConfigs: RpcProviderSyncConfig[] } - >; + > + | WorkerMessage<"gasInfoSync", GasManagerEvents["sync"]>; export type ThreadWorkerMessage = | WorkerMessage< @@ -65,7 +67,8 @@ export type ThreadWorkerMessage = | WorkerMessage< "rpcProviderSyncConfig", { chainId: string; rpcProviderSyncConfigs: RpcProviderSyncConfig[] } - >; + > + | WorkerMessage<"gasInfoSync", GasManagerEvents["sync"]>; // health check diff --git a/src/workers/executor/bootstrap.ts b/src/workers/executor/bootstrap.ts index eb5558a9..a874f284 100644 --- a/src/workers/executor/bootstrap.ts +++ b/src/workers/executor/bootstrap.ts @@ -3,6 +3,7 @@ import { ChainsService } from "@/chains"; import { Logger } from "@/core/logger"; import { type Worker } from "@/core/queue"; import { ExecutorProcessor, ExecutorService } from "@/executor"; +import { GasManagerService } from "@/gas-manager"; import { NODE_ACCOUNT_TOKEN, NodeService } from "@/node"; import { RpcManagerService } from "@/rpc-manager"; import { MeeUserOpBatch } from "@/user-ops"; @@ -32,6 +33,7 @@ export async function bootstrap(options: ThreadWorkerData) { const executorService = Container.get(ExecutorService); const nodeService = Container.get(NodeService); const rpcManagerService = Container.get(RpcManagerService); + const gasManagerService = Container.get(GasManagerService); chainsService.setChainSettings(chainSettings); @@ -93,6 +95,11 @@ export async function bootstrap(options: ThreadWorkerData) { data.rpcProviderSyncConfigs, ); break; + + case "gasInfoSync": { + gasManagerService.syncGasInfo(data.chainId, data.gasInfo); + break; + } } }); diff --git a/src/workers/simulator/bootstrap.ts b/src/workers/simulator/bootstrap.ts index cd891a09..09b4946c 100644 --- a/src/workers/simulator/bootstrap.ts +++ b/src/workers/simulator/bootstrap.ts @@ -1,6 +1,7 @@ import { parentPort } from "node:worker_threads"; import { ChainsService } from "@/chains"; import { Logger } from "@/core/logger"; +import { GasManagerService } from "@/gas-manager"; import { RpcManagerService } from "@/rpc-manager"; import { SimulatorProcessor, SimulatorService } from "@/simulator"; import { @@ -31,6 +32,7 @@ export async function bootstrap(options: ThreadWorkerData) { const simulatorProcessor = Container.get(SimulatorProcessor); const simulatorService = Container.get(SimulatorService); const rpcManagerService = Container.get(RpcManagerService); + const gasManagerService = Container.get(GasManagerService); chainsService.setChainSettings(chainSettings); @@ -48,6 +50,11 @@ export async function bootstrap(options: ThreadWorkerData) { data.rpcProviderSyncConfigs, ); break; + + case "gasInfoSync": { + gasManagerService.syncGasInfo(data.chainId, data.gasInfo); + break; + } } });