diff --git a/indexer/services/vulcan/src/config.ts b/indexer/services/vulcan/src/config.ts index 9755e5032a..cab0e73976 100644 --- a/indexer/services/vulcan/src/config.ts +++ b/indexer/services/vulcan/src/config.ts @@ -21,6 +21,7 @@ export const configSchema = { ...redisConfigSchema, BATCH_PROCESSING_ENABLED: parseBoolean({ default: true }), + PROCESS_FROM_BEGINNING: parseBoolean({ default: true }), KAFKA_BATCH_PROCESSING_COMMIT_FREQUENCY_MS: parseNumber({ default: 3_000, }), diff --git a/indexer/services/vulcan/src/helpers/kafka/kafka-controller.ts b/indexer/services/vulcan/src/helpers/kafka/kafka-controller.ts index 7c71f72af8..5482d4bae4 100644 --- a/indexer/services/vulcan/src/helpers/kafka/kafka-controller.ts +++ b/indexer/services/vulcan/src/helpers/kafka/kafka-controller.ts @@ -19,7 +19,7 @@ export async function connect(): Promise { // https://kafka.js.org/docs/consuming#a-name-from-beginning-a-frombeginning // Need to set fromBeginning to true, so when vulcan restarts, it will consume all messages // rather than ignoring the messages in queue that were produced before ender was started. - fromBeginning: true, + fromBeginning: config.PROCESS_FROM_BEGINNING, }); if (config.BATCH_PROCESSING_ENABLED) {