From 77a5255dd070d6a2423e9c89a4297b2662ced9de Mon Sep 17 00:00:00 2001 From: Lukasz Cwik Date: Tue, 28 Nov 2023 14:03:32 -0800 Subject: [PATCH] [IND-481] Update handlers to use SQL function only removing the Knex option This is towards updating the block processor to be executed as a SQL function. --- .../__tests__/handlers/asset-handler.test.ts | 128 +--- .../handlers/funding-handler.test.ts | 424 +++++------ .../handlers/liquidity-tier-handler.test.ts | 169 ++--- .../markets/market-create-handler.test.ts | 137 ++-- .../markets/market-modify-handler.test.ts | 113 ++- .../market-price-update-handler.test.ts | 246 +++---- .../order-fills/liquidation-handler.test.ts | 388 ++++------ .../order-fills/order-handler.test.ts | 215 +----- .../handlers/perpetual-market-handler.test.ts | 175 ++--- ...onditional-order-placement-handler.test.ts | 39 +- ...onditional-order-triggered-handler.test.ts | 36 +- .../stateful-order-placement-handler.test.ts | 37 +- .../stateful-order-removal-handler.test.ts | 36 +- .../subaccount-update-handler.test.ts | 102 +-- .../handlers/transfer-handler.test.ts | 687 +++++++----------- .../handlers/update-clob-pair-handler.test.ts | 95 +-- .../handlers/update-perpetual-handler.test.ts | 91 +-- indexer/services/ender/src/config.ts | 42 -- .../abstract-stateful-order-handler.ts | 81 --- .../ender/src/handlers/asset-handler.ts | 34 - .../ender/src/handlers/funding-handler.ts | 111 --- .../src/handlers/liquidity-tier-handler.ts | 44 -- .../handlers/markets/market-create-handler.ts | 39 - .../handlers/markets/market-modify-handler.ts | 59 +- .../markets/market-price-update-handler.ts | 98 --- .../abstract-order-fill-handler.ts | 331 +-------- .../order-fills/liquidation-handler.ts | 111 +-- .../src/handlers/order-fills/order-handler.ts | 124 +--- .../src/handlers/perpetual-market-handler.ts | 63 +- .../conditional-order-placement-handler.ts | 45 +- .../conditional-order-triggered-handler.ts | 40 - .../stateful-order-placement-handler.ts | 42 -- .../stateful-order-removal-handler.ts | 19 - .../src/handlers/subaccount-update-handler.ts | 415 +---------- .../ender/src/handlers/transfer-handler.ts | 124 +--- .../src/handlers/update-clob-pair-handler.ts | 53 -- .../src/handlers/update-perpetual-handler.ts | 47 -- 37 files changed, 1047 insertions(+), 3993 deletions(-) diff --git a/indexer/services/ender/__tests__/handlers/asset-handler.test.ts b/indexer/services/ender/__tests__/handlers/asset-handler.test.ts index 32557975cb..0eeadb7df2 100644 --- a/indexer/services/ender/__tests__/handlers/asset-handler.test.ts +++ b/indexer/services/ender/__tests__/handlers/asset-handler.test.ts @@ -1,4 +1,4 @@ -import { stats, STATS_FUNCTION_NAME } from '@dydxprotocol-indexer/base'; +import { stats } from '@dydxprotocol-indexer/base'; import { AssetCreateEventV1, IndexerTendermintBlock, @@ -36,7 +36,6 @@ import { } from '../helpers/constants'; import { updateBlockCache } from '../../src/caches/block-cache'; import { createPostgresFunctions } from '../../src/helpers/postgres/postgres-functions'; -import config from '../../src/config'; describe('assetHandler', () => { beforeAll(async () => { @@ -100,99 +99,50 @@ describe('assetHandler', () => { }); }); - it.each([ - [ - 'via knex', - false, - ], - [ - 'via SQL function', - true, - ], - ])( - 'fails when market doesnt exist for asset (%s)', - async ( - _name: string, - useSqlFunction: boolean, - ) => { - config.USE_ASSET_CREATE_HANDLER_SQL_FUNCTION = useSqlFunction; - const transactionIndex: number = 0; - const kafkaMessage: KafkaMessage = createKafkaMessageFromAssetEvent({ - assetEvent: defaultAssetCreateEvent, - transactionIndex, - height: defaultHeight, - time: defaultTime, - txHash: defaultTxHash, - }); - - await expect(onMessage(kafkaMessage)).rejects.toThrowError( - 'Unable to find market with id: 0', - ); + it('fails when market doesnt exist for asset', async () => { + const transactionIndex: number = 0; + const kafkaMessage: KafkaMessage = createKafkaMessageFromAssetEvent({ + assetEvent: defaultAssetCreateEvent, + transactionIndex, + height: defaultHeight, + time: defaultTime, + txHash: defaultTxHash, }); - it.each([ - [ - 'via knex', - false, - ], - [ - 'via SQL function', - true, - ], - ])( - 'creates new asset (%s)', - async ( - _name: string, - useSqlFunction: boolean, - ) => { - config.USE_ASSET_CREATE_HANDLER_SQL_FUNCTION = useSqlFunction; - await MarketTable.create(testConstants.defaultMarket); - await marketRefresher.updateMarkets(); - const transactionIndex: number = 0; + await expect(onMessage(kafkaMessage)).rejects.toThrowError( + 'Unable to find market with id: 0', + ); + }); - const assetEvent: AssetCreateEventV1 = defaultAssetCreateEvent; - const kafkaMessage: KafkaMessage = createKafkaMessageFromAssetEvent({ - assetEvent, - transactionIndex, - height: defaultHeight, - time: defaultTime, - txHash: defaultTxHash, - }); - // Confirm there is no existing asset to or from the sender subaccount - await expectNoExistingAssets(); - - await onMessage(kafkaMessage); - - const newAssets: AssetFromDatabase[] = await AssetTable.findAll( - {}, - [], { - orderBy: [[AssetColumns.id, Ordering.ASC]], - }); - expect(newAssets.length).toEqual(1); - expectAssetMatchesEvent(assetEvent, newAssets[0]); - if (!useSqlFunction) { - expectTimingStats(); - } - const asset: AssetFromDatabase = assetRefresher.getAssetFromId('0'); - expect(asset).toBeDefined(); + it('creates new asset', async () => { + await MarketTable.create(testConstants.defaultMarket); + await marketRefresher.updateMarkets(); + const transactionIndex: number = 0; + + const assetEvent: AssetCreateEventV1 = defaultAssetCreateEvent; + const kafkaMessage: KafkaMessage = createKafkaMessageFromAssetEvent({ + assetEvent, + transactionIndex, + height: defaultHeight, + time: defaultTime, + txHash: defaultTxHash, }); -}); + // Confirm there is no existing asset to or from the sender subaccount + await expectNoExistingAssets(); -function expectTimingStats() { - expectTimingStat('create_asset'); -} + await onMessage(kafkaMessage); -function expectTimingStat(fnName: string) { - expect(stats.timing).toHaveBeenCalledWith( - `ender.${STATS_FUNCTION_NAME}.timing`, - expect.any(Number), - { - className: 'AssetCreationHandler', - eventType: 'AssetCreateEvent', - fnName, - }, - ); -} + const newAssets: AssetFromDatabase[] = await AssetTable.findAll( + {}, + [], { + orderBy: [[AssetColumns.id, Ordering.ASC]], + }); + expect(newAssets.length).toEqual(1); + expectAssetMatchesEvent(assetEvent, newAssets[0]); + const asset: AssetFromDatabase = assetRefresher.getAssetFromId('0'); + expect(asset).toBeDefined(); + }); +}); function expectAssetMatchesEvent( event: AssetCreateEventV1, diff --git a/indexer/services/ender/__tests__/handlers/funding-handler.test.ts b/indexer/services/ender/__tests__/handlers/funding-handler.test.ts index 5ad391ead2..f3a6e3b118 100644 --- a/indexer/services/ender/__tests__/handlers/funding-handler.test.ts +++ b/indexer/services/ender/__tests__/handlers/funding-handler.test.ts @@ -1,4 +1,4 @@ -import { stats, STATS_FUNCTION_NAME } from '@dydxprotocol-indexer/base'; +import { stats } from '@dydxprotocol-indexer/base'; import { FundingEventV1, FundingEventV1_Type, @@ -42,7 +42,6 @@ import { redisClient } from '../../src/helpers/redis/redis-controller'; import { bigIntToBytes } from '@dydxprotocol-indexer/v4-proto-parser'; import { startPriceCache } from '../../src/caches/price-cache'; import { createPostgresFunctions } from '../../src/helpers/postgres/postgres-functions'; -import config from '../../src/config'; describe('fundingHandler', () => { beforeAll(async () => { @@ -115,281 +114,196 @@ describe('fundingHandler', () => { }); }); - it.each([ - [ - 'via knex', - false, - ], - [ - 'via SQL function', - true, - ], - ])( - 'successfully processes single premium sample event (%s)', - async ( - _name: string, - useSqlFunction: boolean, - ) => { - config.USE_FUNDING_HANDLER_SQL_FUNCTION = useSqlFunction; - const kafkaMessage: KafkaMessage = createKafkaMessageFromFundingEvents({ - fundingEvents: [defaultFundingUpdateSampleEvent], - height: defaultHeight, - time: defaultTime, - }); - - await onMessage(kafkaMessage); - - await expectNextFundingRate( - 'BTC-USD', - new Big(protocolTranslations.funding8HourValuePpmTo1HourRate( - defaultFundingUpdateSampleEvent.updates[0].fundingValuePpm, - )), - ); - if (!useSqlFunction) { - expectTimingStat('handle_premium_sample'); - } + it('successfully processes single premium sample event', async () => { + const kafkaMessage: KafkaMessage = createKafkaMessageFromFundingEvents({ + fundingEvents: [defaultFundingUpdateSampleEvent], + height: defaultHeight, + time: defaultTime, }); - it.each([ - [ - 'via knex', - false, - ], - [ - 'via SQL function', - true, - ], - ])( - 'successfully processes multiple premium sample event for different markets (%s)', - async ( - _name: string, - useSqlFunction: boolean, - ) => { - config.USE_FUNDING_HANDLER_SQL_FUNCTION = useSqlFunction; - const fundingUpdateSampleEvent2: FundingEventV1 = { - type: FundingEventV1_Type.TYPE_PREMIUM_SAMPLE, - updates: [ - { - perpetualId: 0, - fundingValuePpm: 100, - fundingIndex: bigIntToBytes(BigInt(0)), - }, - { - perpetualId: 1, - fundingValuePpm: 50, - fundingIndex: bigIntToBytes(BigInt(0)), - }, - ], - }; + await onMessage(kafkaMessage); - const kafkaMessage: KafkaMessage = createKafkaMessageFromFundingEvents({ - fundingEvents: [defaultFundingUpdateSampleEvent, fundingUpdateSampleEvent2], - height: defaultHeight, - time: defaultTime, - }); + await expectNextFundingRate( + 'BTC-USD', + new Big(protocolTranslations.funding8HourValuePpmTo1HourRate( + defaultFundingUpdateSampleEvent.updates[0].fundingValuePpm, + )), + ); + }); - await onMessage(kafkaMessage); + it('successfully processes multiple premium sample event for different markets', async () => { + const fundingUpdateSampleEvent2: FundingEventV1 = { + type: FundingEventV1_Type.TYPE_PREMIUM_SAMPLE, + updates: [ + { + perpetualId: 0, + fundingValuePpm: 100, + fundingIndex: bigIntToBytes(BigInt(0)), + }, + { + perpetualId: 1, + fundingValuePpm: 50, + fundingIndex: bigIntToBytes(BigInt(0)), + }, + ], + }; - await expectNextFundingRate( - 'BTC-USD', - new Big('0.000006875'), - ); - await expectNextFundingRate( - 'ETH-USD', - new Big(protocolTranslations.funding8HourValuePpmTo1HourRate( - fundingUpdateSampleEvent2.updates[1].fundingValuePpm, - )), - ); - if (!useSqlFunction) { - expectTimingStat('handle_premium_sample'); - } + const kafkaMessage: KafkaMessage = createKafkaMessageFromFundingEvents({ + fundingEvents: [defaultFundingUpdateSampleEvent, fundingUpdateSampleEvent2], + height: defaultHeight, + time: defaultTime, }); - it.each([ - [ - 'via knex', - false, - ], - [ - 'via SQL function', - true, - ], - ])( - 'successfully processes and clears cache for a new funding rate (%s)', - async ( - _name: string, - useSqlFunction: boolean, - ) => { - config.USE_FUNDING_HANDLER_SQL_FUNCTION = useSqlFunction; - const kafkaMessage: KafkaMessage = createKafkaMessageFromFundingEvents({ - fundingEvents: [defaultFundingUpdateSampleEvent], - height: defaultHeight, - time: defaultTime, - }); + await onMessage(kafkaMessage); - await onMessage(kafkaMessage); + await expectNextFundingRate( + 'BTC-USD', + new Big('0.000006875'), + ); + await expectNextFundingRate( + 'ETH-USD', + new Big(protocolTranslations.funding8HourValuePpmTo1HourRate( + fundingUpdateSampleEvent2.updates[1].fundingValuePpm, + )), + ); + }); - await expectNextFundingRate( - 'BTC-USD', - new Big(protocolTranslations.funding8HourValuePpmTo1HourRate( - defaultFundingUpdateSampleEvent.updates[0].fundingValuePpm, - )), - ); - if (!useSqlFunction) { - expectTimingStat('handle_premium_sample'); - } + it('successfully processes and clears cache for a new funding rate', async () => { + const kafkaMessage: KafkaMessage = createKafkaMessageFromFundingEvents({ + fundingEvents: [defaultFundingUpdateSampleEvent], + height: defaultHeight, + time: defaultTime, + }); - const kafkaMessage2: KafkaMessage = createKafkaMessageFromFundingEvents({ - fundingEvents: [defaultFundingRateEvent], - height: 4, - time: defaultTime, - }); + await onMessage(kafkaMessage); - await onMessage(kafkaMessage2); - await expectNextFundingRate( - 'BTC-USD', - undefined, - ); - const fundingIndices: FundingIndexUpdatesFromDatabase[] = await - FundingIndexUpdatesTable.findAll({}, [], {}); + await expectNextFundingRate( + 'BTC-USD', + new Big(protocolTranslations.funding8HourValuePpmTo1HourRate( + defaultFundingUpdateSampleEvent.updates[0].fundingValuePpm, + )), + ); - expect(fundingIndices.length).toEqual(1); - expect(fundingIndices[0]).toEqual(expect.objectContaining({ - perpetualId: '0', - rate: '0.00000125', - oraclePrice: '10000', - fundingIndex: '0.1', - })); - if (!useSqlFunction) { - expectTimingStat('handle_funding_rate'); - } + const kafkaMessage2: KafkaMessage = createKafkaMessageFromFundingEvents({ + fundingEvents: [defaultFundingRateEvent], + height: 4, + time: defaultTime, }); - it.each([ - [ - 'via knex', - false, - ], - [ - 'via SQL function', - true, - ], - ])( - 'successfully processes and clears cache for multiple new funding rates (%s)', - async ( - _name: string, - useSqlFunction: boolean, - ) => { - config.USE_FUNDING_HANDLER_SQL_FUNCTION = useSqlFunction; - const fundingSampleEvent: FundingEventV1 = { - type: FundingEventV1_Type.TYPE_PREMIUM_SAMPLE, - updates: [ - { - perpetualId: 0, - fundingValuePpm: 100, - fundingIndex: bigIntToBytes(BigInt(0)), - }, - { - perpetualId: 1, - fundingValuePpm: 50, - fundingIndex: bigIntToBytes(BigInt(0)), - }, - ], - }; - const kafkaMessage: KafkaMessage = createKafkaMessageFromFundingEvents({ - fundingEvents: [fundingSampleEvent], - height: defaultHeight, - time: defaultTime, - }); + await onMessage(kafkaMessage2); + await expectNextFundingRate( + 'BTC-USD', + undefined, + ); + const fundingIndices: FundingIndexUpdatesFromDatabase[] = await + FundingIndexUpdatesTable.findAll({}, [], {}); - await onMessage(kafkaMessage); + expect(fundingIndices.length).toEqual(1); + expect(fundingIndices[0]).toEqual(expect.objectContaining({ + perpetualId: '0', + rate: '0.00000125', + oraclePrice: '10000', + fundingIndex: '0.1', + })); + }); - await Promise.all([ - expectNextFundingRate( - 'BTC-USD', - new Big(protocolTranslations.funding8HourValuePpmTo1HourRate( - fundingSampleEvent.updates[0].fundingValuePpm, - )), - ), - expectNextFundingRate( - 'ETH-USD', - new Big(protocolTranslations.funding8HourValuePpmTo1HourRate( - fundingSampleEvent.updates[1].fundingValuePpm, - )), - ), - ]); - if (!useSqlFunction) { - expectTimingStat('handle_premium_sample'); - } + it('successfully processes and clears cache for multiple new funding rates', async () => { + const fundingSampleEvent: FundingEventV1 = { + type: FundingEventV1_Type.TYPE_PREMIUM_SAMPLE, + updates: [ + { + perpetualId: 0, + fundingValuePpm: 100, + fundingIndex: bigIntToBytes(BigInt(0)), + }, + { + perpetualId: 1, + fundingValuePpm: 50, + fundingIndex: bigIntToBytes(BigInt(0)), + }, + ], + }; + const kafkaMessage: KafkaMessage = createKafkaMessageFromFundingEvents({ + fundingEvents: [fundingSampleEvent], + height: defaultHeight, + time: defaultTime, + }); - const fundingRateEvent: FundingEventMessage = { - type: FundingEventV1_Type.TYPE_FUNDING_RATE_AND_INDEX, - updates: [ - { - perpetualId: 0, - fundingValuePpm: 10, - fundingIndex: bigIntToBytes(BigInt(10)), - }, - { - perpetualId: 1, - fundingValuePpm: 100, - fundingIndex: bigIntToBytes(BigInt(100)), - }, - ], - }; - const kafkaMessage2: KafkaMessage = createKafkaMessageFromFundingEvents({ - fundingEvents: [fundingRateEvent], - height: 4, - time: defaultTime, - }); + await onMessage(kafkaMessage); - await onMessage(kafkaMessage2); - await Promise.all([ - expectNextFundingRate( - 'BTC-USD', - undefined, - ), - expectNextFundingRate( - 'ETH-USD', - undefined, - ), - ]); - const fundingIndices: FundingIndexUpdatesFromDatabase[] = await - FundingIndexUpdatesTable.findAll( - {}, - [], + await Promise.all([ + expectNextFundingRate( + 'BTC-USD', + new Big(protocolTranslations.funding8HourValuePpmTo1HourRate( + fundingSampleEvent.updates[0].fundingValuePpm, + )), + ), + expectNextFundingRate( + 'ETH-USD', + new Big(protocolTranslations.funding8HourValuePpmTo1HourRate( + fundingSampleEvent.updates[1].fundingValuePpm, + )), + ), + ]); + + const fundingRateEvent: FundingEventMessage = { + type: FundingEventV1_Type.TYPE_FUNDING_RATE_AND_INDEX, + updates: [ { - orderBy: [[FundingIndexUpdatesColumns.perpetualId, Ordering.ASC]], + perpetualId: 0, + fundingValuePpm: 10, + fundingIndex: bigIntToBytes(BigInt(10)), }, - ); - - expect(fundingIndices.length).toEqual(2); - expect(fundingIndices[0]).toEqual(expect.objectContaining({ - perpetualId: '0', - rate: '0.00000125', - oraclePrice: '10000', - // 1e1 * 1e-6 * 1e-6 / 1e-10 = 1e-1 - fundingIndex: '0.1', - })); - expect(fundingIndices[1]).toEqual(expect.objectContaining({ - perpetualId: '1', - rate: '0.0000125', - oraclePrice: '500', - // 1e2 * 1e-6 * 1e-6 / 1e-18 = 1e8 - fundingIndex: '100000000', - })); - if (!useSqlFunction) { - expectTimingStat('handle_funding_rate'); - } + { + perpetualId: 1, + fundingValuePpm: 100, + fundingIndex: bigIntToBytes(BigInt(100)), + }, + ], + }; + const kafkaMessage2: KafkaMessage = createKafkaMessageFromFundingEvents({ + fundingEvents: [fundingRateEvent], + height: 4, + time: defaultTime, }); -}); -function expectTimingStat(fnName: string) { - expect(stats.timing).toHaveBeenCalledWith( - `ender.${STATS_FUNCTION_NAME}.timing`, - expect.any(Number), - { className: 'FundingHandler', eventType: 'FundingEvent', fnName }); -} + await onMessage(kafkaMessage2); + await Promise.all([ + expectNextFundingRate( + 'BTC-USD', + undefined, + ), + expectNextFundingRate( + 'ETH-USD', + undefined, + ), + ]); + const fundingIndices: FundingIndexUpdatesFromDatabase[] = await + FundingIndexUpdatesTable.findAll( + {}, + [], + { + orderBy: [[FundingIndexUpdatesColumns.perpetualId, Ordering.ASC]], + }, + ); + + expect(fundingIndices.length).toEqual(2); + expect(fundingIndices[0]).toEqual(expect.objectContaining({ + perpetualId: '0', + rate: '0.00000125', + oraclePrice: '10000', + // 1e1 * 1e-6 * 1e-6 / 1e-10 = 1e-1 + fundingIndex: '0.1', + })); + expect(fundingIndices[1]).toEqual(expect.objectContaining({ + perpetualId: '1', + rate: '0.0000125', + oraclePrice: '500', + // 1e2 * 1e-6 * 1e-6 / 1e-18 = 1e8 + fundingIndex: '100000000', + })); + }); +}); function createKafkaMessageFromFundingEvents({ fundingEvents, diff --git a/indexer/services/ender/__tests__/handlers/liquidity-tier-handler.test.ts b/indexer/services/ender/__tests__/handlers/liquidity-tier-handler.test.ts index bdeeae8322..631f4fb4a8 100644 --- a/indexer/services/ender/__tests__/handlers/liquidity-tier-handler.test.ts +++ b/indexer/services/ender/__tests__/handlers/liquidity-tier-handler.test.ts @@ -1,4 +1,4 @@ -import { stats, STATS_FUNCTION_NAME } from '@dydxprotocol-indexer/base'; +import { stats } from '@dydxprotocol-indexer/base'; import { IndexerTendermintBlock, IndexerTendermintEvent, @@ -39,7 +39,6 @@ import { updateBlockCache } from '../../src/caches/block-cache'; import { defaultLiquidityTier } from '@dydxprotocol-indexer/postgres/build/__tests__/helpers/constants'; import _ from 'lodash'; import { createPostgresFunctions } from '../../src/helpers/postgres/postgres-functions'; -import config from '../../src/config'; describe('liquidityTierHandler', () => { beforeAll(async () => { @@ -104,125 +103,73 @@ describe('liquidityTierHandler', () => { }); }); - it.each([ - [ - 'via knex', - false, - ], - [ - 'via SQL function', - true, - ], - ])( - 'creates new liquidity tier (%s)', - async ( - _name: string, - useSqlFunction: boolean, - ) => { - config.USE_LIQUIDITY_TIER_HANDLER_SQL_FUNCTION = useSqlFunction; - const transactionIndex: number = 0; - const liquidityTierEvent: LiquidityTierUpsertEventV1 = defaultLiquidityTierUpsertEvent; - const kafkaMessage: KafkaMessage = createKafkaMessageFromLiquidityTiersEvent({ - liquidityTierEvent, - transactionIndex, - height: defaultHeight, - time: defaultTime, - txHash: defaultTxHash, - }); + it('creates new liquidity tier', async () => { + const transactionIndex: number = 0; + const liquidityTierEvent: LiquidityTierUpsertEventV1 = defaultLiquidityTierUpsertEvent; + const kafkaMessage: KafkaMessage = createKafkaMessageFromLiquidityTiersEvent({ + liquidityTierEvent, + transactionIndex, + height: defaultHeight, + time: defaultTime, + txHash: defaultTxHash, + }); // Confirm there is no existing liquidity tier - await expectNoExistingLiquidityTiers(); - await perpetualMarketRefresher.updatePerpetualMarkets(); - - const producerSendMock: jest.SpyInstance = jest.spyOn(producer, 'send'); - await onMessage(kafkaMessage); + await expectNoExistingLiquidityTiers(); + await perpetualMarketRefresher.updatePerpetualMarkets(); - const newLiquidityTiers: LiquidityTiersFromDatabase[] = await LiquidityTiersTable.findAll( - {}, - [], { - orderBy: [[LiquidityTiersColumns.id, Ordering.ASC]], - }); - expect(newLiquidityTiers.length).toEqual(1); - expectLiquidityTier(newLiquidityTiers[0], liquidityTierEvent); - if (!useSqlFunction) { - expectTimingStats(); - } - validateLiquidityTierRefresher(defaultLiquidityTierUpsertEvent); - expectKafkaMessages(producerSendMock, liquidityTierEvent, 0); - }); + const producerSendMock: jest.SpyInstance = jest.spyOn(producer, 'send'); + await onMessage(kafkaMessage); - it.each([ - [ - 'via knex', - false, - ], - [ - 'via SQL function', - true, - ], - ])( - 'updates existing liquidity tier (%s)', - async ( - _name: string, - useSqlFunction: boolean, - ) => { - config.USE_LIQUIDITY_TIER_HANDLER_SQL_FUNCTION = useSqlFunction; - const transactionIndex: number = 0; - const liquidityTierEvent: LiquidityTierUpsertEventV1 = defaultLiquidityTierUpsertEvent; - const kafkaMessage: KafkaMessage = createKafkaMessageFromLiquidityTiersEvent({ - liquidityTierEvent, - transactionIndex, - height: defaultHeight, - time: defaultTime, - txHash: defaultTxHash, + const newLiquidityTiers: LiquidityTiersFromDatabase[] = await LiquidityTiersTable.findAll( + {}, + [], { + orderBy: [[LiquidityTiersColumns.id, Ordering.ASC]], }); + expect(newLiquidityTiers.length).toEqual(1); + expectLiquidityTier(newLiquidityTiers[0], liquidityTierEvent); + validateLiquidityTierRefresher(defaultLiquidityTierUpsertEvent); + expectKafkaMessages(producerSendMock, liquidityTierEvent, 0); + }); + + it('updates existing liquidity tier', async () => { + const transactionIndex: number = 0; + const liquidityTierEvent: LiquidityTierUpsertEventV1 = defaultLiquidityTierUpsertEvent; + const kafkaMessage: KafkaMessage = createKafkaMessageFromLiquidityTiersEvent({ + liquidityTierEvent, + transactionIndex, + height: defaultHeight, + time: defaultTime, + txHash: defaultTxHash, + }); // Create existing liquidity tier - await LiquidityTiersTable.upsert(defaultLiquidityTier); + await LiquidityTiersTable.upsert(defaultLiquidityTier); - // create perpetual market with existing liquidity tier to test websockets - await Promise.all([ - MarketTable.create(testConstants.defaultMarket), - MarketTable.create(testConstants.defaultMarket2), - ]); - await Promise.all([ - PerpetualMarketTable.create(testConstants.defaultPerpetualMarket), - PerpetualMarketTable.create(testConstants.defaultPerpetualMarket2), - ]); - await perpetualMarketRefresher.updatePerpetualMarkets(); + // create perpetual market with existing liquidity tier to test websockets + await Promise.all([ + MarketTable.create(testConstants.defaultMarket), + MarketTable.create(testConstants.defaultMarket2), + ]); + await Promise.all([ + PerpetualMarketTable.create(testConstants.defaultPerpetualMarket), + PerpetualMarketTable.create(testConstants.defaultPerpetualMarket2), + ]); + await perpetualMarketRefresher.updatePerpetualMarkets(); - const producerSendMock: jest.SpyInstance = jest.spyOn(producer, 'send'); - await onMessage(kafkaMessage); + const producerSendMock: jest.SpyInstance = jest.spyOn(producer, 'send'); + await onMessage(kafkaMessage); - const newLiquidityTiers: LiquidityTiersFromDatabase[] = await LiquidityTiersTable.findAll( - {}, - [], { - orderBy: [[LiquidityTiersColumns.id, Ordering.ASC]], - }); - expect(newLiquidityTiers.length).toEqual(1); - expectLiquidityTier(newLiquidityTiers[0], liquidityTierEvent); - if (!useSqlFunction) { - expectTimingStats(); - } - validateLiquidityTierRefresher(defaultLiquidityTierUpsertEvent); - expectKafkaMessages(producerSendMock, liquidityTierEvent, 2); - }); + const newLiquidityTiers: LiquidityTiersFromDatabase[] = await LiquidityTiersTable.findAll( + {}, + [], { + orderBy: [[LiquidityTiersColumns.id, Ordering.ASC]], + }); + expect(newLiquidityTiers.length).toEqual(1); + expectLiquidityTier(newLiquidityTiers[0], liquidityTierEvent); + validateLiquidityTierRefresher(defaultLiquidityTierUpsertEvent); + expectKafkaMessages(producerSendMock, liquidityTierEvent, 2); + }); }); -function expectTimingStats() { - expectTimingStat('upsert_liquidity_tier'); -} - -function expectTimingStat(fnName: string) { - expect(stats.timing).toHaveBeenCalledWith( - `ender.${STATS_FUNCTION_NAME}.timing`, - expect.any(Number), - { - className: 'LiquidityTierHandler', - eventType: 'LiquidityTierUpsertEvent', - fnName, - }, - ); -} - export function expectLiquidityTier( liquidityTierFromDb: LiquidityTiersFromDatabase, event: LiquidityTierUpsertEventV1, diff --git a/indexer/services/ender/__tests__/handlers/markets/market-create-handler.test.ts b/indexer/services/ender/__tests__/handlers/markets/market-create-handler.test.ts index 3495d574a5..839626cdd5 100644 --- a/indexer/services/ender/__tests__/handlers/markets/market-create-handler.test.ts +++ b/indexer/services/ender/__tests__/handlers/markets/market-create-handler.test.ts @@ -23,7 +23,6 @@ import { } from '../../helpers/indexer-proto-helpers'; import Long from 'long'; import { createPostgresFunctions } from '../../../src/helpers/postgres/postgres-functions'; -import config from '../../../src/config'; describe('marketCreateHandler', () => { beforeAll(async () => { @@ -87,97 +86,67 @@ describe('marketCreateHandler', () => { }); }); - it.each([ - [ - 'via knex', - false, - ], - [ - 'via SQL function', - true, - ], - ])( - 'creates new market (%s)', - async ( - _name: string, - useSqlFunction: boolean, - ) => { - config.USE_MARKET_CREATE_HANDLER_SQL_FUNCTION = useSqlFunction; - const transactionIndex: number = 0; + it('creates new market', async () => { + const transactionIndex: number = 0; - const marketCreate: MarketEventV1 = { - marketId: 3, - marketCreate: { - base: { - pair: 'DYDX-USD', - minPriceChangePpm: 500, - }, - exponent: -5, + const marketCreate: MarketEventV1 = { + marketId: 3, + marketCreate: { + base: { + pair: 'DYDX-USD', + minPriceChangePpm: 500, }, - }; - - const kafkaMessage: KafkaMessage = createKafkaMessageFromMarketEvent({ - marketEvents: [marketCreate], - transactionIndex, - height: defaultHeight, - time: defaultTime, - txHash: defaultTxHash, - }); - - await onMessage(kafkaMessage); + exponent: -5, + }, + }; + + const kafkaMessage: KafkaMessage = createKafkaMessageFromMarketEvent({ + marketEvents: [marketCreate], + transactionIndex, + height: defaultHeight, + time: defaultTime, + txHash: defaultTxHash, + }); - const market: MarketFromDatabase = await MarketTable.findById( - marketCreate.marketId, - ) as MarketFromDatabase; + await onMessage(kafkaMessage); - expectMarketMatchesEvent(marketCreate as MarketCreateEventMessage, market); - }); + const market: MarketFromDatabase = await MarketTable.findById( + marketCreate.marketId, + ) as MarketFromDatabase; - it.each([ - [ - 'via knex', - false, - ], - [ - 'via SQL function', - true, - ], - ])( - 'errors when attempting to create an existing market (%s)', - async ( - _name: string, - useSqlFunction: boolean, - ) => { - config.USE_MARKET_CREATE_HANDLER_SQL_FUNCTION = useSqlFunction; - const transactionIndex: number = 0; + expectMarketMatchesEvent(marketCreate as MarketCreateEventMessage, market); + }); - const kafkaMessage: KafkaMessage = createKafkaMessageFromMarketEvent({ - marketEvents: [defaultMarketCreate], - transactionIndex, - height: defaultHeight, - time: defaultTime, - txHash: defaultTxHash, - }); - await expect(onMessage(kafkaMessage)).rejects.toThrowError( - new ParseMessageError('Market in MarketCreate already exists'), - ); + it('errors when attempting to create an existing market', async () => { + const transactionIndex: number = 0; - // Check that market in database is the old market. - const market: MarketFromDatabase = await MarketTable.findById( - defaultMarketCreate.marketId, - ) as MarketFromDatabase; - expect(market.minPriceChangePpm).toEqual(50); - - expect(loggerError).toHaveBeenCalledWith(expect.objectContaining({ - at: 'MarketCreateHandler#logAndThrowParseMessageError', - message: 'Market in MarketCreate already exists', - })); - expect(loggerCrit).toHaveBeenCalledWith(expect.objectContaining({ - at: 'onMessage#onMessage', - message: 'Error: Unable to parse message, this must be due to a bug in V4 node', - })); - expect(producerSendMock.mock.calls.length).toEqual(0); + const kafkaMessage: KafkaMessage = createKafkaMessageFromMarketEvent({ + marketEvents: [defaultMarketCreate], + transactionIndex, + height: defaultHeight, + time: defaultTime, + txHash: defaultTxHash, }); + await expect(onMessage(kafkaMessage)).rejects.toThrowError( + new ParseMessageError('Market in MarketCreate already exists'), + ); + + // Check that market in database is the old market. + const market: MarketFromDatabase = await MarketTable.findById( + defaultMarketCreate.marketId, + ) as MarketFromDatabase; + expect(market.minPriceChangePpm).toEqual(50); + + expect(loggerError).toHaveBeenCalledWith(expect.objectContaining({ + at: 'MarketCreateHandler#logAndThrowParseMessageError', + message: 'Market in MarketCreate already exists', + })); + expect(loggerCrit).toHaveBeenCalledWith(expect.objectContaining({ + at: 'onMessage#onMessage', + message: 'Error: Unable to parse message, this must be due to a bug in V4 node', + })); + expect(producerSendMock.mock.calls.length).toEqual(0); + }); }); function expectMarketMatchesEvent( diff --git a/indexer/services/ender/__tests__/handlers/markets/market-modify-handler.test.ts b/indexer/services/ender/__tests__/handlers/markets/market-modify-handler.test.ts index ae33c3d228..c80d4a1a7f 100644 --- a/indexer/services/ender/__tests__/handlers/markets/market-modify-handler.test.ts +++ b/indexer/services/ender/__tests__/handlers/markets/market-modify-handler.test.ts @@ -16,7 +16,6 @@ import { createIndexerTendermintBlock, createIndexerTendermintEvent } from '../. import { MarketModifyHandler } from '../../../src/handlers/markets/market-modify-handler'; import Long from 'long'; import { createPostgresFunctions } from '../../../src/helpers/postgres/postgres-functions'; -import config from '../../../src/config'; describe('marketModifyHandler', () => { @@ -81,84 +80,54 @@ describe('marketModifyHandler', () => { }); }); - it.each([ - [ - 'via knex', - false, - ], - [ - 'via SQL function', - true, - ], - ])( - 'modifies existing market (%s)', - async ( - _name: string, - useSqlFunction: boolean, - ) => { - config.USE_MARKET_MODIFY_HANDLER_SQL_FUNCTION = useSqlFunction; - const transactionIndex: number = 0; - - const kafkaMessage: KafkaMessage = createKafkaMessageFromMarketEvent({ - marketEvents: [defaultMarketModify], - transactionIndex, - height: defaultHeight, - time: defaultTime, - txHash: defaultTxHash, - }); + it('modifies existing market', async () => { + const transactionIndex: number = 0; - await onMessage(kafkaMessage); - - const market: MarketFromDatabase = await MarketTable.findById( - defaultMarketModify.marketId, - ) as MarketFromDatabase; - - expectMarketMatchesEvent(defaultMarketModify as MarketModifyEventMessage, market); + const kafkaMessage: KafkaMessage = createKafkaMessageFromMarketEvent({ + marketEvents: [defaultMarketModify], + transactionIndex, + height: defaultHeight, + time: defaultTime, + txHash: defaultTxHash, }); - it.each([ - [ - 'via knex', - false, - ], - [ - 'via SQL function', - true, - ], - ])( - 'modifies non-existent market (%s)', - async ( - _name: string, - useSqlFunction: boolean, - ) => { - config.USE_MARKET_MODIFY_HANDLER_SQL_FUNCTION = useSqlFunction; - const transactionIndex: number = 0; + await onMessage(kafkaMessage); - const kafkaMessage: KafkaMessage = createKafkaMessageFromMarketEvent({ - marketEvents: [{ - ...defaultMarketModify, - marketId: 5, - }], - transactionIndex, - height: defaultHeight, - time: defaultTime, - txHash: defaultTxHash, - }); + const market: MarketFromDatabase = await MarketTable.findById( + defaultMarketModify.marketId, + ) as MarketFromDatabase; - await expect(onMessage(kafkaMessage)).rejects.toThrowError( - new ParseMessageError('Market in MarketModify doesn\'t exist'), - ); + expectMarketMatchesEvent(defaultMarketModify as MarketModifyEventMessage, market); + }); - expect(loggerError).toHaveBeenCalledWith(expect.objectContaining({ - at: 'MarketModifyHandler#logAndThrowParseMessageError', - message: 'Market in MarketModify doesn\'t exist', - })); - expect(loggerCrit).toHaveBeenCalledWith(expect.objectContaining({ - at: 'onMessage#onMessage', - message: 'Error: Unable to parse message, this must be due to a bug in V4 node', - })); - expect(producerSendMock.mock.calls.length).toEqual(0); + it('modifies non-existent market', async () => { + const transactionIndex: number = 0; + + const kafkaMessage: KafkaMessage = createKafkaMessageFromMarketEvent({ + marketEvents: [{ + ...defaultMarketModify, + marketId: 5, + }], + transactionIndex, + height: defaultHeight, + time: defaultTime, + txHash: defaultTxHash, }); + + await expect(onMessage(kafkaMessage)).rejects.toThrowError( + new ParseMessageError('Market in MarketModify doesn\'t exist'), + ); + + expect(loggerError).toHaveBeenCalledWith(expect.objectContaining({ + at: 'MarketModifyHandler#logAndThrowParseMessageError', + message: 'Market in MarketModify doesn\'t exist', + })); + expect(loggerCrit).toHaveBeenCalledWith(expect.objectContaining({ + at: 'onMessage#onMessage', + message: 'Error: Unable to parse message, this must be due to a bug in V4 node', + })); + expect(producerSendMock.mock.calls.length).toEqual(0); + }); }); function expectMarketMatchesEvent( diff --git a/indexer/services/ender/__tests__/handlers/markets/market-price-update-handler.test.ts b/indexer/services/ender/__tests__/handlers/markets/market-price-update-handler.test.ts index 05edfd648d..9a56111afd 100644 --- a/indexer/services/ender/__tests__/handlers/markets/market-price-update-handler.test.ts +++ b/indexer/services/ender/__tests__/handlers/markets/market-price-update-handler.test.ts @@ -33,7 +33,6 @@ import { MarketPriceUpdateHandler } from '../../../src/handlers/markets/market-p import Long from 'long'; import { getPrice } from '../../../src/caches/price-cache'; import { createPostgresFunctions } from '../../../src/helpers/postgres/postgres-functions'; -import config from '../../../src/config'; describe('marketPriceUpdateHandler', () => { beforeAll(async () => { @@ -96,170 +95,125 @@ describe('marketPriceUpdateHandler', () => { }); }); - it.each([ - [ - 'via knex', - false, - ], - [ - 'via SQL function', - true, - ], - ])( - 'fails when no market exists (%s)', - async ( - _name: string, - useSqlFunction: boolean, - ) => { - config.USE_MARKET_PRICE_UPDATE_HANDLER_SQL_FUNCTION = useSqlFunction; - const transactionIndex: number = 0; - const marketPriceUpdate: MarketEventV1 = { - marketId: 5, - priceUpdate: { - priceWithExponent: Long.fromValue(50000000, true), - }, - }; - const kafkaMessage: KafkaMessage = createKafkaMessageFromMarketEvent({ - marketEvents: [marketPriceUpdate], - transactionIndex, - height: defaultHeight, - time: defaultTime, - txHash: defaultTxHash, - }); - - await expect(onMessage(kafkaMessage)).rejects.toThrowError( - new ParseMessageError('MarketPriceUpdateEvent contains a non-existent market id'), - ); - - expect(loggerError).toHaveBeenCalledWith(expect.objectContaining({ - at: 'MarketPriceUpdateHandler#logAndThrowParseMessageError', - message: 'MarketPriceUpdateEvent contains a non-existent market id', - })); - expect(loggerCrit).toHaveBeenCalledWith(expect.objectContaining({ - at: 'onMessage#onMessage', - message: 'Error: Unable to parse message, this must be due to a bug in V4 node', - })); - expect(producerSendMock.mock.calls.length).toEqual(0); + it('fails when no market exists', async () => { + const transactionIndex: number = 0; + const marketPriceUpdate: MarketEventV1 = { + marketId: 5, + priceUpdate: { + priceWithExponent: Long.fromValue(50000000, true), + }, + }; + const kafkaMessage: KafkaMessage = createKafkaMessageFromMarketEvent({ + marketEvents: [marketPriceUpdate], + transactionIndex, + height: defaultHeight, + time: defaultTime, + txHash: defaultTxHash, }); - it.each([ - [ - 'via knex', - false, - ], - [ - 'via SQL function', - true, - ], - ])( - 'successfully inserts new oracle price for existing market (%s)', - async ( - _name: string, - useSqlFunction: boolean, - ) => { - config.USE_MARKET_PRICE_UPDATE_HANDLER_SQL_FUNCTION = useSqlFunction; - const transactionIndex: number = 0; + await expect(onMessage(kafkaMessage)).rejects.toThrowError( + new ParseMessageError('MarketPriceUpdateEvent contains a non-existent market id'), + ); + + expect(loggerError).toHaveBeenCalledWith(expect.objectContaining({ + at: 'MarketPriceUpdateHandler#logAndThrowParseMessageError', + message: 'MarketPriceUpdateEvent contains a non-existent market id', + })); + expect(loggerCrit).toHaveBeenCalledWith(expect.objectContaining({ + at: 'onMessage#onMessage', + message: 'Error: Unable to parse message, this must be due to a bug in V4 node', + })); + expect(producerSendMock.mock.calls.length).toEqual(0); + }); - const kafkaMessage: KafkaMessage = createKafkaMessageFromMarketEvent({ - marketEvents: [defaultMarketPriceUpdate], - transactionIndex, - height: defaultHeight, - time: defaultTime, - txHash: defaultTxHash, - }); + it('successfully inserts new oracle price for existing market', async () => { + const transactionIndex: number = 0; - await onMessage(kafkaMessage); + const kafkaMessage: KafkaMessage = createKafkaMessageFromMarketEvent({ + marketEvents: [defaultMarketPriceUpdate], + transactionIndex, + height: defaultHeight, + time: defaultTime, + txHash: defaultTxHash, + }); - const { market, oraclePrice } = await getDbState(defaultMarketPriceUpdate); + await onMessage(kafkaMessage); - expectOraclePriceMatchesEvent( - defaultMarketPriceUpdate as MarketPriceUpdateEventMessage, - oraclePrice, - market, - defaultHeight, - ); + const { market, oraclePrice } = await getDbState(defaultMarketPriceUpdate); - expect(getPrice(oraclePrice.marketId)).toEqual(oraclePrice.price); + expectOraclePriceMatchesEvent( + defaultMarketPriceUpdate as MarketPriceUpdateEventMessage, + oraclePrice, + market, + defaultHeight, + ); - const contents: MarketMessageContents = generateOraclePriceContents( - oraclePrice, - market.pair, - ); + expect(getPrice(oraclePrice.marketId)).toEqual(oraclePrice.price); - expectMarketKafkaMessage({ - producerSendMock, - contents: JSON.stringify(contents), - }); + const contents: MarketMessageContents = generateOraclePriceContents( + oraclePrice, + market.pair, + ); + + expectMarketKafkaMessage({ + producerSendMock, + contents: JSON.stringify(contents), }); + }); - it.each([ - [ - 'via knex', - false, - ], - [ - 'via SQL function', - true, - ], - ])( - 'successfully inserts new oracle price for market created in same block (%s)', - async ( - _name: string, - useSqlFunction: boolean, - ) => { - config.USE_MARKET_PRICE_UPDATE_HANDLER_SQL_FUNCTION = useSqlFunction; - const transactionIndex: number = 0; - const newMarketId: number = 3000; - - // Include an event to create the market - const marketCreate: MarketEventV1 = { - marketId: newMarketId, - marketCreate: { - base: { - pair: 'NEWTOKEN-USD', - minPriceChangePpm: 500, - }, - exponent: -5, + it('successfully inserts new oracle price for market created in same block', async () => { + const transactionIndex: number = 0; + const newMarketId: number = 3000; + + // Include an event to create the market + const marketCreate: MarketEventV1 = { + marketId: newMarketId, + marketCreate: { + base: { + pair: 'NEWTOKEN-USD', + minPriceChangePpm: 500, }, - }; - const marketPriceUpdate: MarketEventV1 = { - marketId: newMarketId, - priceUpdate: { - priceWithExponent: Long.fromValue(50000000), - }, - }; - - const kafkaMessage: KafkaMessage = createKafkaMessageFromMarketEvent({ - marketEvents: [marketCreate, marketPriceUpdate], - transactionIndex, - height: defaultHeight, - time: defaultTime, - txHash: defaultTxHash, - }); + exponent: -5, + }, + }; + const marketPriceUpdate: MarketEventV1 = { + marketId: newMarketId, + priceUpdate: { + priceWithExponent: Long.fromValue(50000000), + }, + }; + + const kafkaMessage: KafkaMessage = createKafkaMessageFromMarketEvent({ + marketEvents: [marketCreate, marketPriceUpdate], + transactionIndex, + height: defaultHeight, + time: defaultTime, + txHash: defaultTxHash, + }); - await onMessage(kafkaMessage); + await onMessage(kafkaMessage); - const { market, oraclePrice } = await getDbState(marketPriceUpdate); + const { market, oraclePrice } = await getDbState(marketPriceUpdate); - expectOraclePriceMatchesEvent( - marketPriceUpdate as MarketPriceUpdateEventMessage, - oraclePrice, - market, - defaultHeight, - ); + expectOraclePriceMatchesEvent( + marketPriceUpdate as MarketPriceUpdateEventMessage, + oraclePrice, + market, + defaultHeight, + ); - expect(getPrice(oraclePrice.marketId)).toEqual(oraclePrice.price); + expect(getPrice(oraclePrice.marketId)).toEqual(oraclePrice.price); - const contents: MarketMessageContents = generateOraclePriceContents( - oraclePrice, - market.pair, - ); + const contents: MarketMessageContents = generateOraclePriceContents( + oraclePrice, + market.pair, + ); - expectMarketKafkaMessage({ - producerSendMock, - contents: JSON.stringify(contents), - }); + expectMarketKafkaMessage({ + producerSendMock, + contents: JSON.stringify(contents), }); + }); }); async function getDbState(marketPriceUpdate: MarketEventV1): Promise { diff --git a/indexer/services/ender/__tests__/handlers/order-fills/liquidation-handler.test.ts b/indexer/services/ender/__tests__/handlers/order-fills/liquidation-handler.test.ts index dc5018c3e1..16959eaf8b 100644 --- a/indexer/services/ender/__tests__/handlers/order-fills/liquidation-handler.test.ts +++ b/indexer/services/ender/__tests__/handlers/order-fills/liquidation-handler.test.ts @@ -1,5 +1,5 @@ import { - logger, stats, STATS_FUNCTION_NAME, + logger, stats, } from '@dydxprotocol-indexer/base'; import { IndexerTendermintBlock, IndexerTendermintEvent, Timestamp, @@ -73,7 +73,6 @@ import { LiquidationHandler } from '../../../src/handlers/order-fills/liquidatio import { clearCandlesMap } from '../../../src/caches/candle-cache'; import Long from 'long'; import { createPostgresFunctions } from '../../../src/helpers/postgres/postgres-functions'; -import config from '../../../src/config'; import { expectStateFilledQuantums } from '../../helpers/redis-helpers'; const defaultClobPairId: string = testConstants.defaultPerpetualMarket.clobPairId; @@ -205,32 +204,16 @@ describe('LiquidationHandler', () => { it.each([ [ - 'goodTilBlock via knex', + 'goodTilBlock', { goodTilBlock: 10, }, - false, ], [ - 'goodTilBlock via SQL function', - { - goodTilBlock: 10, - }, - true, - ], - [ - 'goodTilBlockTime via knex', + 'goodTilBlockTime', { goodTilBlockTime: 1_000_000_000, }, - false, - ], - [ - 'goodTilBlockTime via SQL function', - { - goodTilBlockTime: 1_000_000_000, - }, - true, ], ])( 'creates fills and orders (with %s), sends vulcan message for maker order update and updates ' + @@ -238,9 +221,7 @@ describe('LiquidationHandler', () => { async ( _name: string, goodTilOneof: Partial, - useSqlFunction: boolean, ) => { - config.USE_LIQUIDATION_HANDLER_SQL_FUNCTION = useSqlFunction; const transactionIndex: number = 0; const eventIndex: number = 0; const makerQuantums: number = 10_000_000; @@ -430,46 +411,22 @@ describe('LiquidationHandler', () => { ), expectCandlesUpdated(), ]); - - if (!useSqlFunction) { - expectTimingStats(); - } }); it.each([ [ - 'goodTilBlock via knex', - { - goodTilBlock: 10, - }, - false, - '5', - undefined, - ], - [ - 'goodTilBlock via SQL function', + 'goodTilBlock', { goodTilBlock: 10, }, - true, '5', undefined, ], [ - 'goodTilBlockTime via knex', + 'goodTilBlockTime', { goodTilBlockTime: 1_000_000, }, - false, - undefined, - '1970-01-11T13:46:40.000Z', - ], - [ - 'goodTilBlockTime via SQL function', - { - goodTilBlockTime: 1_000_000, - }, - true, undefined, '1970-01-11T13:46:40.000Z', ], @@ -479,11 +436,9 @@ describe('LiquidationHandler', () => { async ( _name: string, goodTilOneof: Partial, - useSqlFunction: boolean, existingGoodTilBlock?: string, existingGoodTilBlockTime?: string, ) => { - config.USE_LIQUIDATION_HANDLER_SQL_FUNCTION = useSqlFunction; // create initial orders const existingMakerOrder: OrderCreateObject = { @@ -675,46 +630,22 @@ describe('LiquidationHandler', () => { orderFillEvent.totalFilledMaker.toString(), ), ]); - - if (!useSqlFunction) { - expectTimingStats(); - } }); it.each([ [ - 'goodTilBlock via knex', + 'goodTilBlock', { goodTilBlock: 10, }, - false, '5', undefined, ], [ - 'goodTilBlock via SQL function', - { - goodTilBlock: 10, - }, - true, - '5', - undefined, - ], - [ - 'goodTilBlockTime via knex', + 'goodTilBlockTime', { goodTilBlockTime: 1_000_000, }, - false, - undefined, - '1970-01-11T13:46:40.000Z', - ], - [ - 'goodTilBlockTime via SQL function', - { - goodTilBlockTime: 1_000_000, - }, - true, undefined, '1970-01-11T13:46:40.000Z', ], @@ -723,11 +654,9 @@ describe('LiquidationHandler', () => { async ( _name: string, goodTilOneof: Partial, - useSqlFunction: boolean, existingGoodTilBlock?: string, existingGoodTilBlockTime?: string, ) => { - config.USE_LIQUIDATION_HANDLER_SQL_FUNCTION = useSqlFunction; // create initial orders const existingMakerOrder: OrderCreateObject = { @@ -906,172 +835,153 @@ describe('LiquidationHandler', () => { orderFillEvent.totalFilledMaker.toString(), ), ]); - - if (!useSqlFunction) { - expectTimingStats(); - } }); - it.each([ - [ - 'via knex', - false, - ], - [ - 'via SQL function', - true, - ], - ])( - 'creates fills and orders (%s) with fixed-point notation quoteAmount', - async ( - _name: string, - useSqlFunction: boolean, - ) => { - config.USE_LIQUIDATION_HANDLER_SQL_FUNCTION = useSqlFunction; - const transactionIndex: number = 0; - const eventIndex: number = 0; - const makerQuantums: number = 100; - const makerSubticks: number = 1_000_000; + it('creates fills and orders with fixed-point notation quoteAmount', async () => { + const transactionIndex: number = 0; + const eventIndex: number = 0; + const makerQuantums: number = 100; + const makerSubticks: number = 1_000_000; - const makerOrderProto: IndexerOrder = createOrder({ - subaccountId: defaultSubaccountId, - clientId: 0, - side: IndexerOrder_Side.SIDE_BUY, - quantums: makerQuantums, - subticks: makerSubticks, - goodTilOneof: { goodTilBlock: 10 }, - clobPairId: defaultClobPairId, - orderFlags: ORDER_FLAG_SHORT_TERM.toString(), - timeInForce: IndexerOrder_TimeInForce.TIME_IN_FORCE_UNSPECIFIED, - reduceOnly: false, - clientMetadata: 0, - }); + const makerOrderProto: IndexerOrder = createOrder({ + subaccountId: defaultSubaccountId, + clientId: 0, + side: IndexerOrder_Side.SIDE_BUY, + quantums: makerQuantums, + subticks: makerSubticks, + goodTilOneof: { goodTilBlock: 10 }, + clobPairId: defaultClobPairId, + orderFlags: ORDER_FLAG_SHORT_TERM.toString(), + timeInForce: IndexerOrder_TimeInForce.TIME_IN_FORCE_UNSPECIFIED, + reduceOnly: false, + clientMetadata: 0, + }); - const takerSubticks: number = 150_000; - const takerQuantums: number = 10; - const liquidationOrder: LiquidationOrderV1 = createLiquidationOrder({ - subaccountId: defaultSubaccountId2, - clobPairId: defaultClobPairId, - perpetualId: defaultPerpetualPosition.perpetualId, - quantums: takerQuantums, - isBuy: false, - subticks: takerSubticks, - }); + const takerSubticks: number = 150_000; + const takerQuantums: number = 10; + const liquidationOrder: LiquidationOrderV1 = createLiquidationOrder({ + subaccountId: defaultSubaccountId2, + clobPairId: defaultClobPairId, + perpetualId: defaultPerpetualPosition.perpetualId, + quantums: takerQuantums, + isBuy: false, + subticks: takerSubticks, + }); - const fillAmount: number = 10; - const orderFillEvent: OrderFillEventV1 = createLiquidationOrderFillEvent( - makerOrderProto, - liquidationOrder, - fillAmount, - fillAmount, - ); - const kafkaMessage: KafkaMessage = createKafkaMessageFromOrderFillEvent({ - orderFillEvent, - transactionIndex, - eventIndex, - height: parseInt(defaultHeight, 10), - time: defaultTime, - txHash: defaultTxHash, - }); + const fillAmount: number = 10; + const orderFillEvent: OrderFillEventV1 = createLiquidationOrderFillEvent( + makerOrderProto, + liquidationOrder, + fillAmount, + fillAmount, + ); + const kafkaMessage: KafkaMessage = createKafkaMessageFromOrderFillEvent({ + orderFillEvent, + transactionIndex, + eventIndex, + height: parseInt(defaultHeight, 10), + time: defaultTime, + txHash: defaultTxHash, + }); - // create initial PerpetualPositions - await Promise.all([ - PerpetualPositionTable.create(defaultPerpetualPosition), - PerpetualPositionTable.create({ - ...defaultPerpetualPosition, - subaccountId: testConstants.defaultSubaccountId2, - }), - ]); + // create initial PerpetualPositions + await Promise.all([ + PerpetualPositionTable.create(defaultPerpetualPosition), + PerpetualPositionTable.create({ + ...defaultPerpetualPosition, + subaccountId: testConstants.defaultSubaccountId2, + }), + ]); - const producerSendMock: jest.SpyInstance = jest.spyOn(producer, 'send'); - await onMessage(kafkaMessage); + const producerSendMock: jest.SpyInstance = jest.spyOn(producer, 'send'); + await onMessage(kafkaMessage); + + // This size should be in fixed-point notation rather than exponential notation (1e-8) + const makerOrderSize: string = '0.00000001'; // quantums in human = 1e2 * 1e-10 = 1e-8 + const makerPrice: string = '100'; // quote currency / base currency = 1e6 * 1e-8 * 1e-6 / 1e-10 = 1e2 + const totalFilled: string = '0.000000001'; // fillAmount in human = 1e1 * 1e-10 = 1e-9 + await expectOrderInDatabase({ + subaccountId: testConstants.defaultSubaccountId, + clientId: '0', + size: makerOrderSize, + totalFilled, + price: makerPrice, + status: OrderStatus.OPEN, // orderSize > totalFilled so status is open + clobPairId: defaultClobPairId, + side: makerOrderProto.side === IndexerOrder_Side.SIDE_BUY ? OrderSide.BUY : OrderSide.SELL, + orderFlags: makerOrderProto.orderId!.orderFlags.toString(), + timeInForce: TimeInForce.GTT, + reduceOnly: false, + goodTilBlock: protocolTranslations.getGoodTilBlock(makerOrderProto)?.toString(), + goodTilBlockTime: protocolTranslations.getGoodTilBlockTime(makerOrderProto), + clientMetadata: makerOrderProto.clientMetadata.toString(), + updatedAt: defaultDateTime.toISO(), + updatedAtHeight: defaultHeight.toString(), + }); - // This size should be in fixed-point notation rather than exponential notation (1e-8) - const makerOrderSize: string = '0.00000001'; // quantums in human = 1e2 * 1e-10 = 1e-8 - const makerPrice: string = '100'; // quote currency / base currency = 1e6 * 1e-8 * 1e-6 / 1e-10 = 1e2 - const totalFilled: string = '0.000000001'; // fillAmount in human = 1e1 * 1e-10 = 1e-9 - await expectOrderInDatabase({ - subaccountId: testConstants.defaultSubaccountId, - clientId: '0', - size: makerOrderSize, - totalFilled, - price: makerPrice, - status: OrderStatus.OPEN, // orderSize > totalFilled so status is open - clobPairId: defaultClobPairId, - side: makerOrderProto.side === IndexerOrder_Side.SIDE_BUY ? OrderSide.BUY : OrderSide.SELL, - orderFlags: makerOrderProto.orderId!.orderFlags.toString(), - timeInForce: TimeInForce.GTT, - reduceOnly: false, - goodTilBlock: protocolTranslations.getGoodTilBlock(makerOrderProto)?.toString(), - goodTilBlockTime: protocolTranslations.getGoodTilBlockTime(makerOrderProto), - clientMetadata: makerOrderProto.clientMetadata.toString(), - updatedAt: defaultDateTime.toISO(), - updatedAtHeight: defaultHeight.toString(), - }); + const eventId: Buffer = TendermintEventTable.createEventId( + defaultHeight, + transactionIndex, + eventIndex, + ); - const eventId: Buffer = TendermintEventTable.createEventId( - defaultHeight, - transactionIndex, - eventIndex, - ); + // This size should be in fixed-point notation rather than exponential notation (1e-5) + const quoteAmount: string = '0.0000001'; // quote amount is price * fillAmount = 1e2 * 1e-9 = 1e-7 + await expectFillInDatabase({ + subaccountId: testConstants.defaultSubaccountId, + clientId: '0', + liquidity: Liquidity.MAKER, + size: totalFilled, + price: makerPrice, + quoteAmount, + eventId, + transactionHash: defaultTxHash, + createdAt: defaultDateTime.toISO(), + createdAtHeight: defaultHeight, + type: FillType.LIQUIDATION, + clobPairId: defaultClobPairId, + side: protocolTranslations.protocolOrderSideToOrderSide(makerOrderProto.side), + orderFlags: ORDER_FLAG_SHORT_TERM.toString(), + clientMetadata: makerOrderProto.clientMetadata.toString(), + fee: defaultMakerFee, + }); + await expectFillInDatabase({ + subaccountId: testConstants.defaultSubaccountId2, + clientId: '0', + liquidity: Liquidity.TAKER, + size: totalFilled, + price: makerPrice, + quoteAmount, + eventId, + transactionHash: defaultTxHash, + createdAt: defaultDateTime.toISO(), + createdAtHeight: defaultHeight, + type: FillType.LIQUIDATED, + clobPairId: defaultClobPairId, + side: liquidationOrderToOrderSide(liquidationOrder), + orderFlags: ORDER_FLAG_SHORT_TERM.toString(), + clientMetadata: null, + fee: defaultTakerFee, + hasOrderId: false, + }); - // This size should be in fixed-point notation rather than exponential notation (1e-5) - const quoteAmount: string = '0.0000001'; // quote amount is price * fillAmount = 1e2 * 1e-9 = 1e-7 - await expectFillInDatabase({ - subaccountId: testConstants.defaultSubaccountId, - clientId: '0', - liquidity: Liquidity.MAKER, - size: totalFilled, - price: makerPrice, - quoteAmount, + await Promise.all([ + expectDefaultOrderFillAndPositionSubaccountKafkaMessages( + producerSendMock, eventId, - transactionHash: defaultTxHash, - createdAt: defaultDateTime.toISO(), - createdAtHeight: defaultHeight, - type: FillType.LIQUIDATION, - clobPairId: defaultClobPairId, - side: protocolTranslations.protocolOrderSideToOrderSide(makerOrderProto.side), - orderFlags: ORDER_FLAG_SHORT_TERM.toString(), - clientMetadata: makerOrderProto.clientMetadata.toString(), - fee: defaultMakerFee, - }); - await expectFillInDatabase({ - subaccountId: testConstants.defaultSubaccountId2, - clientId: '0', - liquidity: Liquidity.TAKER, - size: totalFilled, - price: makerPrice, - quoteAmount, + ORDER_FLAG_SHORT_TERM, + ), + expectDefaultTradeKafkaMessageFromTakerFillId( + producerSendMock, eventId, - transactionHash: defaultTxHash, - createdAt: defaultDateTime.toISO(), - createdAtHeight: defaultHeight, - type: FillType.LIQUIDATED, - clobPairId: defaultClobPairId, - side: liquidationOrderToOrderSide(liquidationOrder), - orderFlags: ORDER_FLAG_SHORT_TERM.toString(), - clientMetadata: null, - fee: defaultTakerFee, - hasOrderId: false, - }); - - await Promise.all([ - expectDefaultOrderFillAndPositionSubaccountKafkaMessages( - producerSendMock, - eventId, - ORDER_FLAG_SHORT_TERM, - ), - expectDefaultTradeKafkaMessageFromTakerFillId( - producerSendMock, - eventId, - ), - expectCandlesUpdated(), - expectStateFilledQuantums( - OrderTable.orderIdToUuid(makerOrderProto.orderId!), - orderFillEvent.totalFilledMaker.toString(), - ), - ]); - }); + ), + expectCandlesUpdated(), + expectStateFilledQuantums( + OrderTable.orderIdToUuid(makerOrderProto.orderId!), + orderFillEvent.totalFilledMaker.toString(), + ), + ]); + }); it('LiquidationOrderFillEvent fails liquidationOrder validation', async () => { const makerQuantums: number = 10_000_000; @@ -1281,20 +1191,6 @@ function createLiquidationOrderFillEvent( } as OrderFillEventV1; } -function expectTimingStats() { - expectTimingStat('upsert_maker_order'); - expectTimingStat('create_fill'); - expectTimingStat('update_perpetual_position'); -} - -function expectTimingStat(fnName: string) { - expect(stats.timing).toHaveBeenCalledWith( - `ender.${STATS_FUNCTION_NAME}.timing`, - expect.any(Number), - { className: 'LiquidationHandler', eventType: 'OrderFillEvent', fnName }, - ); -} - async function expectCandlesUpdated() { const candles: CandleFromDatabase[] = await CandleTable.findAll({}, []); expect(candles.length).toBeGreaterThan(0); diff --git a/indexer/services/ender/__tests__/handlers/order-fills/order-handler.test.ts b/indexer/services/ender/__tests__/handlers/order-fills/order-handler.test.ts index 82fcbc45d3..548d863c73 100644 --- a/indexer/services/ender/__tests__/handlers/order-fills/order-handler.test.ts +++ b/indexer/services/ender/__tests__/handlers/order-fills/order-handler.test.ts @@ -1,4 +1,4 @@ -import { logger, stats, STATS_FUNCTION_NAME } from '@dydxprotocol-indexer/base'; +import { logger, stats } from '@dydxprotocol-indexer/base'; import { IndexerOrder, IndexerOrder_Side, @@ -73,7 +73,6 @@ import { OrderHandler } from '../../../src/handlers/order-fills/order-handler'; import { clearCandlesMap } from '../../../src/caches/candle-cache'; import Long from 'long'; import { createPostgresFunctions } from '../../../src/helpers/postgres/postgres-functions'; -import config from '../../../src/config'; import { redisClient } from '../../../src/helpers/redis/redis-controller'; import { expectStateFilledQuantums } from '../../helpers/redis-helpers'; @@ -203,44 +202,22 @@ describe('OrderHandler', () => { it.each([ [ - 'goodTilBlock via knex', + 'goodTilBlock', { goodTilBlock: 10, }, { goodTilBlock: 15, }, - false, - ], - [ - 'goodTilBlock via SQL function', - { - goodTilBlock: 10, - }, - { - goodTilBlock: 15, - }, - true, ], [ - 'goodTilBlockTime via knex', + 'goodTilBlockTime', { goodTilBlockTime: 1_000_000_000, }, { goodTilBlockTime: 1_000_005_000, }, - false, - ], - [ - 'goodTilBlockTime via SQL function', - { - goodTilBlockTime: 1_000_000_000, - }, - { - goodTilBlockTime: 1_000_005_000, - }, - false, ], ])( 'creates fills and orders (with %s), sends vulcan messages for order updates and order ' + @@ -249,9 +226,7 @@ describe('OrderHandler', () => { _name: string, makerGoodTilOneof: Partial, takerGoodTilOneof: Partial, - useSqlFunction: boolean, ) => { - config.USE_ORDER_HANDLER_SQL_FUNCTION = useSqlFunction; const transactionIndex: number = 0; const eventIndex: number = 0; const makerQuantums: number = 1_000_000; @@ -488,15 +463,11 @@ describe('OrderHandler', () => { orderFillEvent.totalFilledTaker.toString(), ), ]); - - if (!useSqlFunction) { - expectTimingStats(); - } }); it.each([ [ - 'goodTilBlock via knex', + 'goodTilBlock', { goodTilBlock: 10, }, @@ -504,25 +475,11 @@ describe('OrderHandler', () => { goodTilBlock: 15, }, false, - false, - '5', - undefined, - ], - [ - 'goodTilBlock via SQL function', - { - goodTilBlock: 10, - }, - { - goodTilBlock: 15, - }, - true, - false, '5', undefined, ], [ - 'goodTilBlockTime via knex', + 'goodTilBlockTime', { goodTilBlockTime: 1_000_000_000, }, @@ -530,38 +487,11 @@ describe('OrderHandler', () => { goodTilBlockTime: 1_000_005_000, }, false, - false, - undefined, - '1970-01-11T13:46:40.000Z', - ], - [ - 'goodTilBlockTime via SQL function', - { - goodTilBlockTime: 1_000_000_000, - }, - { - goodTilBlockTime: 1_000_005_000, - }, - true, - false, - undefined, - '1970-01-11T13:46:40.000Z', - ], - [ - 'goodTilBlockTime w/ cancelled maker order via knex', - { - goodTilBlockTime: 1_000_000_000, - }, - { - goodTilBlockTime: 1_000_005_000, - }, - false, - true, undefined, '1970-01-11T13:46:40.000Z', ], [ - 'goodTilBlockTime w/ cancelled maker order via SQL function', + 'goodTilBlockTime w/ cancelled maker order', { goodTilBlockTime: 1_000_000_000, }, @@ -569,7 +499,6 @@ describe('OrderHandler', () => { goodTilBlockTime: 1_000_005_000, }, true, - true, undefined, '1970-01-11T13:46:40.000Z', ], @@ -580,12 +509,10 @@ describe('OrderHandler', () => { _name: string, makerGoodTilOneof: Partial, takerGoodTilOneof: Partial, - useSqlFunction: boolean, isOrderCanceled: boolean, existingGoodTilBlock?: string, existingGoodTilBlockTime?: string, ) => { - config.USE_ORDER_HANDLER_SQL_FUNCTION = useSqlFunction; if (isOrderCanceled) { await CanceledOrdersCache.addCanceledOrderId( OrderTable.uuid( @@ -850,58 +777,28 @@ describe('OrderHandler', () => { orderFillEvent.totalFilledTaker.toString(), ), ]); - - if (!useSqlFunction) { - expectTimingStats(); - } }); it.each([ [ - 'goodTilBlock via knex', + 'goodTilBlock', { goodTilBlock: 10, }, { goodTilBlock: 15, }, - false, '5', undefined, ], [ - 'goodTilBlock via SQL function', - { - goodTilBlock: 10, - }, - { - goodTilBlock: 15, - }, - true, - '5', - undefined, - ], - [ - 'goodTilBlockTime via knex', - { - goodTilBlockTime: 1_000_000_000, - }, - { - goodTilBlockTime: 1_000_005_000, - }, - false, - undefined, - '1970-01-11T13:46:40.000Z', - ], - [ - 'goodTilBlockTime via SQL function', + 'goodTilBlockTime', { goodTilBlockTime: 1_000_000_000, }, { goodTilBlockTime: 1_000_005_000, }, - true, undefined, '1970-01-11T13:46:40.000Z', ], @@ -911,11 +808,9 @@ describe('OrderHandler', () => { _name: string, makerGoodTilOneof: Partial, takerGoodTilOneof: Partial, - useSqlFunction: boolean, existingGoodTilBlock?: string, existingGoodTilBlockTime?: string, ) => { - config.USE_ORDER_HANDLER_SQL_FUNCTION = useSqlFunction; // create initial orders await Promise.all([ // maker order @@ -1154,26 +1049,9 @@ describe('OrderHandler', () => { orderFillEvent.totalFilledTaker.toString(), ), ]); - - if (!useSqlFunction) { - expectTimingStats(); - } }); - it.each([ - [ - 'via knex', - false, - ], - [ - 'via SQL function', - true, - ], - ])('creates fills and orders with fixed-point notation quoteAmount (%s)', async ( - _name: string, - useSqlFunction: boolean, - ) => { - config.USE_ORDER_HANDLER_SQL_FUNCTION = useSqlFunction; + it('creates fills and orders with fixed-point notation quoteAmount', async () => { const transactionIndex: number = 0; const eventIndex: number = 0; const makerQuantums: number = 100; @@ -1371,20 +1249,7 @@ describe('OrderHandler', () => { ]); }); - it.each([ - [ - 'via knex', - false, - ], - [ - 'via SQL function', - true, - ], - ])('creates fills and orders with fixed-point notation price (%s)', async ( - _name: string, - useSqlFunction: boolean, - ) => { - config.USE_ORDER_HANDLER_SQL_FUNCTION = useSqlFunction; + it('creates fills and orders with fixed-point notation price', async () => { const transactionIndex: number = 0; const eventIndex: number = 0; const makerQuantums: number = 100; @@ -1692,20 +1557,7 @@ describe('OrderHandler', () => { await expectNoCandles(); }); - it.each([ - [ - 'via knex', - false, - ], - [ - 'via SQL function', - true, - ], - ])('correctly sets status for short term IOC orders (%s)', async ( - _name: string, - useSqlFunction: boolean, - ) => { - config.USE_ORDER_HANDLER_SQL_FUNCTION = useSqlFunction; + it('correctly sets status for short term IOC orders', async () => { const transactionIndex: number = 0; const eventIndex: number = 0; const makerQuantums: number = 100; @@ -1801,51 +1653,23 @@ describe('OrderHandler', () => { it.each([ [ 'limit', - 'via knex', - false, - IndexerOrder_TimeInForce.TIME_IN_FORCE_UNSPECIFIED, - ], - [ - 'limit', - 'via SQL function', - true, IndexerOrder_TimeInForce.TIME_IN_FORCE_UNSPECIFIED, ], [ 'post-only best effort canceled', - 'via knex', - false, - IndexerOrder_TimeInForce.TIME_IN_FORCE_POST_ONLY, - ], - [ - 'post-only best effort canceled', - 'via SQL function', - true, IndexerOrder_TimeInForce.TIME_IN_FORCE_POST_ONLY, ], [ 'post-only canceled', - 'via knex', - false, IndexerOrder_TimeInForce.TIME_IN_FORCE_POST_ONLY, OrderStatus.CANCELED, ], - [ - 'post-only canceled', - 'via SQL function', - true, - IndexerOrder_TimeInForce.TIME_IN_FORCE_POST_ONLY, - OrderStatus.CANCELED, - ], - ])('correctly sets status for short term %s orders (%s)', async ( + ])('correctly sets status for short term %s orders', async ( _orderType: string, - _name: string, - useSqlFunction: boolean, timeInForce: IndexerOrder_TimeInForce, // either BEST_EFFORT_CANCELED or CANCELED status: OrderStatus = OrderStatus.BEST_EFFORT_CANCELED, ) => { - config.USE_ORDER_HANDLER_SQL_FUNCTION = useSqlFunction; const transactionIndex: number = 0; const eventIndex: number = 0; const makerQuantums: number = 100; @@ -2012,21 +1836,6 @@ function createOrderFillEvent( } as OrderFillEventV1; } -function expectTimingStats() { - expectTimingStat('upsert_orders'); - expectTimingStat('create_fill'); - expectTimingStat('update_perpetual_position'); -} - -function expectTimingStat(fnName: string) { - expect(stats.timing).toHaveBeenCalledWith( - `ender.${STATS_FUNCTION_NAME}.timing`, - expect.any(Number), - { className: 'OrderHandler', eventType: 'OrderFillEvent', fnName }, - ); - -} - async function expectCandlesUpdated() { const candles: CandleFromDatabase[] = await CandleTable.findAll({}, []); expect(candles.length).toBeGreaterThan(0); diff --git a/indexer/services/ender/__tests__/handlers/perpetual-market-handler.test.ts b/indexer/services/ender/__tests__/handlers/perpetual-market-handler.test.ts index 02235fd182..e0bb411c8c 100644 --- a/indexer/services/ender/__tests__/handlers/perpetual-market-handler.test.ts +++ b/indexer/services/ender/__tests__/handlers/perpetual-market-handler.test.ts @@ -1,4 +1,4 @@ -import { stats, STATS_FUNCTION_NAME } from '@dydxprotocol-indexer/base'; +import { stats } from '@dydxprotocol-indexer/base'; import { PerpetualMarketCreateEventV1, IndexerTendermintBlock, @@ -40,7 +40,6 @@ import { } from '../helpers/constants'; import { updateBlockCache } from '../../src/caches/block-cache'; import { createPostgresFunctions } from '../../src/helpers/postgres/postgres-functions'; -import config from '../../src/config'; describe('perpetualMarketHandler', () => { beforeAll(async () => { @@ -105,135 +104,71 @@ describe('perpetualMarketHandler', () => { }); }); - it.each([ - [ - 'via knex', - false, - ], - [ - 'via SQL function', - true, - ], - ])( - 'fails when market doesnt exist for perpetual market (%s)', - async ( - _name: string, - useSqlFunction: boolean, - ) => { - config.USE_PERPETUAL_MARKET_HANDLER_SQL_FUNCTION = useSqlFunction; - const transactionIndex: number = 0; - const kafkaMessage: KafkaMessage = createKafkaMessageFromPerpetualMarketEvent({ - perpetualMarketEvent: defaultPerpetualMarketCreateEvent, - transactionIndex, - height: defaultHeight, - time: defaultTime, - txHash: defaultTxHash, - }); - - await expect(onMessage(kafkaMessage)).rejects.toThrowError(); + it('fails when market doesnt exist for perpetual market', async () => { + const transactionIndex: number = 0; + const kafkaMessage: KafkaMessage = createKafkaMessageFromPerpetualMarketEvent({ + perpetualMarketEvent: defaultPerpetualMarketCreateEvent, + transactionIndex, + height: defaultHeight, + time: defaultTime, + txHash: defaultTxHash, }); - it.each([ - [ - 'via knex', - false, - ], - [ - 'via SQL function', - true, - ], - ])( - 'fails when liquidity tier doesnt exist for perpetual market (%s)', - async ( - _name: string, - useSqlFunction: boolean, - ) => { - config.USE_PERPETUAL_MARKET_HANDLER_SQL_FUNCTION = useSqlFunction; - await MarketTable.create(testConstants.defaultMarket); - await marketRefresher.updateMarkets(); - const transactionIndex: number = 0; - const kafkaMessage: KafkaMessage = createKafkaMessageFromPerpetualMarketEvent({ - perpetualMarketEvent: defaultPerpetualMarketCreateEvent, - transactionIndex, - height: defaultHeight, - time: defaultTime, - txHash: defaultTxHash, - }); + await expect(onMessage(kafkaMessage)).rejects.toThrowError(); + }); - await expect(onMessage(kafkaMessage)).rejects.toThrowError(); + it('fails when liquidity tier doesnt exist for perpetual market', async () => { + await MarketTable.create(testConstants.defaultMarket); + await marketRefresher.updateMarkets(); + const transactionIndex: number = 0; + const kafkaMessage: KafkaMessage = createKafkaMessageFromPerpetualMarketEvent({ + perpetualMarketEvent: defaultPerpetualMarketCreateEvent, + transactionIndex, + height: defaultHeight, + time: defaultTime, + txHash: defaultTxHash, }); - it.each([ - [ - 'via knex', - false, - ], - [ - 'via SQL function', - true, - ], - ])( - 'creates new perpetual market (%s)', - async ( - _name: string, - useSqlFunction: boolean, - ) => { - config.USE_PERPETUAL_MARKET_HANDLER_SQL_FUNCTION = useSqlFunction; - await Promise.all([ - MarketTable.create(testConstants.defaultMarket), - LiquidityTiersTable.create(testConstants.defaultLiquidityTier), - ]); - await liquidityTierRefresher.updateLiquidityTiers(); - await marketRefresher.updateMarkets(); - - const transactionIndex: number = 0; + await expect(onMessage(kafkaMessage)).rejects.toThrowError(); + }); - const perpetualMarketEvent: PerpetualMarketCreateEventV1 = defaultPerpetualMarketCreateEvent; - const kafkaMessage: KafkaMessage = createKafkaMessageFromPerpetualMarketEvent({ - perpetualMarketEvent, - transactionIndex, - height: defaultHeight, - time: defaultTime, - txHash: defaultTxHash, - }); - // Confirm there is no existing perpetualMarket. - await expectNoExistingPerpetualMarkets(); + it('creates new perpetual market', async () => { + await Promise.all([ + MarketTable.create(testConstants.defaultMarket), + LiquidityTiersTable.create(testConstants.defaultLiquidityTier), + ]); + await liquidityTierRefresher.updateLiquidityTiers(); + await marketRefresher.updateMarkets(); - const producerSendMock: jest.SpyInstance = jest.spyOn(producer, 'send'); - await onMessage(kafkaMessage); + const transactionIndex: number = 0; - const newPerpetualMarkets: PerpetualMarketFromDatabase[] = await PerpetualMarketTable.findAll( - {}, - [], { - orderBy: [[PerpetualMarketColumns.id, Ordering.ASC]], - }); - expect(newPerpetualMarkets.length).toEqual(1); - expectPerpetualMarketMatchesEvent(perpetualMarketEvent, newPerpetualMarkets[0]); - if (!useSqlFunction) { - expectTimingStats(); - } - const perpetualMarket: PerpetualMarketFromDatabase | undefined = perpetualMarketRefresher.getPerpetualMarketFromId('0'); - expect(perpetualMarket).toBeDefined(); - expectPerpetualMarket(perpetualMarket!, perpetualMarketEvent); - expectPerpetualMarketKafkaMessage(producerSendMock, [perpetualMarket!]); + const perpetualMarketEvent: PerpetualMarketCreateEventV1 = defaultPerpetualMarketCreateEvent; + const kafkaMessage: KafkaMessage = createKafkaMessageFromPerpetualMarketEvent({ + perpetualMarketEvent, + transactionIndex, + height: defaultHeight, + time: defaultTime, + txHash: defaultTxHash, }); -}); + // Confirm there is no existing perpetualMarket. + await expectNoExistingPerpetualMarkets(); -function expectTimingStats() { - expectTimingStat('create_perpetual_market'); -} + const producerSendMock: jest.SpyInstance = jest.spyOn(producer, 'send'); + await onMessage(kafkaMessage); -function expectTimingStat(fnName: string) { - expect(stats.timing).toHaveBeenCalledWith( - `ender.${STATS_FUNCTION_NAME}.timing`, - expect.any(Number), - { - className: 'PerpetualMarketCreationHandler', - eventType: 'PerpetualMarketCreateEvent', - fnName, - }, - ); -} + const newPerpetualMarkets: PerpetualMarketFromDatabase[] = await PerpetualMarketTable.findAll( + {}, + [], { + orderBy: [[PerpetualMarketColumns.id, Ordering.ASC]], + }); + expect(newPerpetualMarkets.length).toEqual(1); + expectPerpetualMarketMatchesEvent(perpetualMarketEvent, newPerpetualMarkets[0]); + const perpetualMarket: PerpetualMarketFromDatabase | undefined = perpetualMarketRefresher.getPerpetualMarketFromId('0'); + expect(perpetualMarket).toBeDefined(); + expectPerpetualMarket(perpetualMarket!, perpetualMarketEvent); + expectPerpetualMarketKafkaMessage(producerSendMock, [perpetualMarket!]); + }); +}); function expectPerpetualMarketMatchesEvent( perpetual: PerpetualMarketCreateEventV1, diff --git a/indexer/services/ender/__tests__/handlers/stateful-order/conditional-order-placement-handler.test.ts b/indexer/services/ender/__tests__/handlers/stateful-order/conditional-order-placement-handler.test.ts index c995bee8a0..ead0876f1a 100644 --- a/indexer/services/ender/__tests__/handlers/stateful-order/conditional-order-placement-handler.test.ts +++ b/indexer/services/ender/__tests__/handlers/stateful-order/conditional-order-placement-handler.test.ts @@ -38,14 +38,13 @@ import { expectOrderSubaccountKafkaMessage, } from '../../helpers/indexer-proto-helpers'; import { getPrice, getSize, getTriggerPrice } from '../../../src/lib/helper'; -import { stats, STATS_FUNCTION_NAME } from '@dydxprotocol-indexer/base'; +import { stats } from '@dydxprotocol-indexer/base'; import { STATEFUL_ORDER_ORDER_FILL_EVENT_TYPE } from '../../../src/constants'; import { ORDER_FLAG_CONDITIONAL } from '@dydxprotocol-indexer/v4-proto-parser'; import Long from 'long'; import { producer } from '@dydxprotocol-indexer/kafka'; import { ConditionalOrderPlacementHandler } from '../../../src/handlers/stateful-order/conditional-order-placement-handler'; import { createPostgresFunctions } from '../../../src/helpers/postgres/postgres-functions'; -import config from '../../../src/config'; describe('conditionalOrderPlacementHandler', () => { beforeAll(async () => { @@ -126,14 +125,7 @@ describe('conditionalOrderPlacementHandler', () => { }); }); - it.each([ - ['via knex', false], - ['via SQL function', true], - ])('successfully places order (%s)', async ( - _name: string, - useSqlFunction: boolean, - ) => { - config.USE_STATEFUL_ORDER_HANDLER_SQL_FUNCTION = useSqlFunction; + it('successfully places order', async () => { const kafkaMessage: KafkaMessage = createKafkaMessageFromStatefulOrderEvent( defaultStatefulOrderEvent, ); @@ -162,9 +154,6 @@ describe('conditionalOrderPlacementHandler', () => { updatedAt: defaultDateTime.toISO(), updatedAtHeight: defaultHeight.toString(), }); - if (!useSqlFunction) { - expectTimingStats(); - } expectOrderSubaccountKafkaMessage( producerSendMock, defaultOrder.orderId!.subaccountId!, @@ -172,14 +161,7 @@ describe('conditionalOrderPlacementHandler', () => { ); }); - it.each([ - ['via knex', false], - ['via SQL function', true], - ])('successfully upserts order (%s)', async ( - _name: string, - useSqlFunction: boolean, - ) => { - config.USE_STATEFUL_ORDER_HANDLER_SQL_FUNCTION = useSqlFunction; + it('successfully upserts order', async () => { const subaccountId: string = SubaccountTable.subaccountIdToUuid( defaultOrder.orderId!.subaccountId!, ); @@ -232,9 +214,6 @@ describe('conditionalOrderPlacementHandler', () => { updatedAt: defaultDateTime.toISO(), updatedAtHeight: defaultHeight.toString(), }); - if (!useSqlFunction) { - expectTimingStats(); - } expectOrderSubaccountKafkaMessage( producerSendMock, defaultOrder.orderId!.subaccountId!, @@ -242,15 +221,3 @@ describe('conditionalOrderPlacementHandler', () => { ); }); }); - -function expectTimingStats() { - expectTimingStat('upsert_order'); -} - -function expectTimingStat(fnName: string) { - expect(stats.timing).toHaveBeenCalledWith( - `ender.${STATS_FUNCTION_NAME}.timing`, - expect.any(Number), - { className: 'ConditionalOrderPlacementHandler', eventType: 'StatefulOrderEvent', fnName }, - ); -} diff --git a/indexer/services/ender/__tests__/handlers/stateful-order/conditional-order-triggered-handler.test.ts b/indexer/services/ender/__tests__/handlers/stateful-order/conditional-order-triggered-handler.test.ts index 395301a856..47e2ea051f 100644 --- a/indexer/services/ender/__tests__/handlers/stateful-order/conditional-order-triggered-handler.test.ts +++ b/indexer/services/ender/__tests__/handlers/stateful-order/conditional-order-triggered-handler.test.ts @@ -32,14 +32,13 @@ import { createIndexerTendermintEvent, expectVulcanKafkaMessage, } from '../../helpers/indexer-proto-helpers'; -import { stats, STATS_FUNCTION_NAME } from '@dydxprotocol-indexer/base'; +import { stats } from '@dydxprotocol-indexer/base'; import { STATEFUL_ORDER_ORDER_FILL_EVENT_TYPE } from '../../../src/constants'; import { producer } from '@dydxprotocol-indexer/kafka'; import { ORDER_FLAG_CONDITIONAL } from '@dydxprotocol-indexer/v4-proto-parser'; import { ConditionalOrderTriggeredHandler } from '../../../src/handlers/stateful-order/conditional-order-triggered-handler'; import { defaultPerpetualMarket } from '@dydxprotocol-indexer/postgres/build/__tests__/helpers/constants'; import { createPostgresFunctions } from '../../../src/helpers/postgres/postgres-functions'; -import config from '../../../src/config'; describe('conditionalOrderTriggeredHandler', () => { beforeAll(async () => { @@ -111,14 +110,7 @@ describe('conditionalOrderTriggeredHandler', () => { }); }); - it.each([ - ['via knex', false], - ['via SQL function', true], - ])('successfully triggers order and sends to vulcan (%s)', async ( - _name: string, - useSqlFunction: boolean, - ) => { - config.USE_STATEFUL_ORDER_HANDLER_SQL_FUNCTION = useSqlFunction; + it('successfully triggers order and sends to vulcan', async () => { await OrderTable.create({ ...testConstants.defaultOrderGoodTilBlockTime, orderFlags: conditionalOrderId.orderFlags.toString(), @@ -155,19 +147,9 @@ describe('conditionalOrderTriggeredHandler', () => { orderId: conditionalOrderId, offchainUpdate: expectedOffchainUpdate, }); - if (!useSqlFunction) { - expectTimingStats(); - } }); - it.each([ - ['via knex', false], - ['via SQL function', true], - ])('throws error when attempting to trigger an order that does not exist (%s)', async ( - _name: string, - useSqlFunction: boolean, - ) => { - config.USE_STATEFUL_ORDER_HANDLER_SQL_FUNCTION = useSqlFunction; + it('throws error when attempting to trigger an order that does not exist', async () => { const kafkaMessage: KafkaMessage = createKafkaMessageFromStatefulOrderEvent( defaultStatefulOrderEvent, ); @@ -177,15 +159,3 @@ describe('conditionalOrderTriggeredHandler', () => { ); }); }); - -function expectTimingStats() { - expectTimingStat('trigger_order'); -} - -function expectTimingStat(fnName: string) { - expect(stats.timing).toHaveBeenCalledWith( - `ender.${STATS_FUNCTION_NAME}.timing`, - expect.any(Number), - { className: 'ConditionalOrderTriggeredHandler', eventType: 'StatefulOrderEvent', fnName }, - ); -} diff --git a/indexer/services/ender/__tests__/handlers/stateful-order/stateful-order-placement-handler.test.ts b/indexer/services/ender/__tests__/handlers/stateful-order/stateful-order-placement-handler.test.ts index 62cdce5817..cb02a7f39a 100644 --- a/indexer/services/ender/__tests__/handlers/stateful-order/stateful-order-placement-handler.test.ts +++ b/indexer/services/ender/__tests__/handlers/stateful-order/stateful-order-placement-handler.test.ts @@ -40,12 +40,11 @@ import { } from '../../helpers/indexer-proto-helpers'; import { StatefulOrderPlacementHandler } from '../../../src/handlers/stateful-order/stateful-order-placement-handler'; import { getPrice, getSize } from '../../../src/lib/helper'; -import { stats, STATS_FUNCTION_NAME } from '@dydxprotocol-indexer/base'; +import { stats } from '@dydxprotocol-indexer/base'; import { STATEFUL_ORDER_ORDER_FILL_EVENT_TYPE } from '../../../src/constants'; import { producer } from '@dydxprotocol-indexer/kafka'; import { ORDER_FLAG_LONG_TERM } from '@dydxprotocol-indexer/v4-proto-parser'; import { createPostgresFunctions } from '../../../src/helpers/postgres/postgres-functions'; -import config from '../../../src/config'; describe('statefulOrderPlacementHandler', () => { beforeAll(async () => { @@ -139,16 +138,12 @@ describe('statefulOrderPlacementHandler', () => { it.each([ // TODO(IND-334): Remove after deprecating StatefulOrderPlacementEvent - ['stateful order placement (via knex)', defaultStatefulOrderEvent, false], - ['stateful order placement (via SQL function)', defaultStatefulOrderEvent, true], - ['stateful long term order placement (via knex)', defaultStatefulOrderLongTermEvent, false], - ['stateful long term order placement (via SQL function)', defaultStatefulOrderLongTermEvent, true], + ['stateful order placement', defaultStatefulOrderEvent], + ['stateful long term order placement', defaultStatefulOrderLongTermEvent], ])('successfully places order with %s', async ( _name: string, statefulOrderEvent: StatefulOrderEventV1, - useSqlFunction: boolean, ) => { - config.USE_STATEFUL_ORDER_HANDLER_SQL_FUNCTION = useSqlFunction; const kafkaMessage: KafkaMessage = createKafkaMessageFromStatefulOrderEvent( statefulOrderEvent, ); @@ -177,9 +172,6 @@ describe('statefulOrderPlacementHandler', () => { updatedAt: defaultDateTime.toISO(), updatedAtHeight: defaultHeight.toString(), }); - if (!useSqlFunction) { - expectTimingStats(); - } const expectedOffchainUpdate: OffChainUpdateV1 = { orderPlace: { @@ -196,16 +188,12 @@ describe('statefulOrderPlacementHandler', () => { it.each([ // TODO(IND-334): Remove after deprecating StatefulOrderPlacementEvent - ['stateful order placement (via knex)', defaultStatefulOrderEvent, false], - ['stateful order placement (via SQL function)', defaultStatefulOrderEvent, true], - ['stateful long term order placement (via knex)', defaultStatefulOrderLongTermEvent, false], - ['stateful long term order placement (via SQL function)', defaultStatefulOrderLongTermEvent, true], + ['stateful order placement', defaultStatefulOrderEvent], + ['stateful long term order placement', defaultStatefulOrderLongTermEvent], ])('successfully upserts order with %s', async ( _name: string, statefulOrderEvent: StatefulOrderEventV1, - useSqlFunction: boolean, ) => { - config.USE_STATEFUL_ORDER_HANDLER_SQL_FUNCTION = useSqlFunction; const subaccountId: string = SubaccountTable.subaccountIdToUuid( defaultOrder.orderId!.subaccountId!, ); @@ -258,21 +246,6 @@ describe('statefulOrderPlacementHandler', () => { updatedAt: defaultDateTime.toISO(), updatedAtHeight: defaultHeight.toString(), }); - if (!useSqlFunction) { - expectTimingStats(); - } // TODO[IND-20]: Add tests for vulcan messages }); }); - -function expectTimingStats() { - expectTimingStat('upsert_order'); -} - -function expectTimingStat(fnName: string) { - expect(stats.timing).toHaveBeenCalledWith( - `ender.${STATS_FUNCTION_NAME}.timing`, - expect.any(Number), - { className: 'StatefulOrderPlacementHandler', eventType: 'StatefulOrderEvent', fnName }, - ); -} diff --git a/indexer/services/ender/__tests__/handlers/stateful-order/stateful-order-removal-handler.test.ts b/indexer/services/ender/__tests__/handlers/stateful-order/stateful-order-removal-handler.test.ts index c47f935af1..aada9ed686 100644 --- a/indexer/services/ender/__tests__/handlers/stateful-order/stateful-order-removal-handler.test.ts +++ b/indexer/services/ender/__tests__/handlers/stateful-order/stateful-order-removal-handler.test.ts @@ -31,11 +31,10 @@ import { expectVulcanKafkaMessage, } from '../../helpers/indexer-proto-helpers'; import { StatefulOrderRemovalHandler } from '../../../src/handlers/stateful-order/stateful-order-removal-handler'; -import { stats, STATS_FUNCTION_NAME } from '@dydxprotocol-indexer/base'; +import { stats } from '@dydxprotocol-indexer/base'; import { STATEFUL_ORDER_ORDER_FILL_EVENT_TYPE } from '../../../src/constants'; import { producer } from '@dydxprotocol-indexer/kafka'; import { createPostgresFunctions } from '../../../src/helpers/postgres/postgres-functions'; -import config from '../../../src/config'; describe('statefulOrderRemovalHandler', () => { beforeAll(async () => { @@ -105,14 +104,7 @@ describe('statefulOrderRemovalHandler', () => { }); }); - it.each([ - ['via knex', false], - ['via SQL function', true], - ])('successfully cancels and removes order (%s)', async ( - _name: string, - useSqlFunction: boolean, - ) => { - config.USE_STATEFUL_ORDER_HANDLER_SQL_FUNCTION = useSqlFunction; + it('successfully cancels and removes order', async () => { await OrderTable.create({ ...testConstants.defaultOrder, clientId: '0', @@ -129,9 +121,6 @@ describe('statefulOrderRemovalHandler', () => { updatedAt: defaultDateTime.toISO(), updatedAtHeight: defaultHeight.toString(), })); - if (!useSqlFunction) { - expectTimingStats(); - } const expectedOffchainUpdate: OffChainUpdateV1 = { orderRemove: { @@ -147,14 +136,7 @@ describe('statefulOrderRemovalHandler', () => { }); }); - it.each([ - ['via knex', false], - ['via SQL function', true], - ])('throws error when attempting to cancel an order that does not exist (%s)', async ( - _name: string, - useSqlFunction: boolean, - ) => { - config.USE_STATEFUL_ORDER_HANDLER_SQL_FUNCTION = useSqlFunction; + it('throws error when attempting to cancel an order that does not exist', async () => { const kafkaMessage: KafkaMessage = createKafkaMessageFromStatefulOrderEvent( defaultStatefulOrderEvent, ); @@ -164,15 +146,3 @@ describe('statefulOrderRemovalHandler', () => { ); }); }); - -function expectTimingStats() { - expectTimingStat('cancel_order'); -} - -function expectTimingStat(fnName: string) { - expect(stats.timing).toHaveBeenCalledWith( - `ender.${STATS_FUNCTION_NAME}.timing`, - expect.any(Number), - { className: 'StatefulOrderRemovalHandler', eventType: 'StatefulOrderEvent', fnName }, - ); -} diff --git a/indexer/services/ender/__tests__/handlers/subaccount-update-handler.test.ts b/indexer/services/ender/__tests__/handlers/subaccount-update-handler.test.ts index a83af20a19..e4e05c90ba 100644 --- a/indexer/services/ender/__tests__/handlers/subaccount-update-handler.test.ts +++ b/indexer/services/ender/__tests__/handlers/subaccount-update-handler.test.ts @@ -1,4 +1,4 @@ -import { stats, STATS_FUNCTION_NAME } from '@dydxprotocol-indexer/base'; +import { stats } from '@dydxprotocol-indexer/base'; import { IndexerTendermintBlock, IndexerTendermintEvent, @@ -58,7 +58,6 @@ import { defaultTxHash, } from '../helpers/constants'; import { updateBlockCache } from '../../src/caches/block-cache'; -import config from '../../src/config'; import { createPostgresFunctions } from '../../src/helpers/postgres/postgres-functions'; describe('subaccountUpdateHandler', () => { @@ -144,14 +143,7 @@ describe('subaccountUpdateHandler', () => { }); }); - it.each([ - ['via knex', false], - ['via SQL function', true], - ])('successfully creates subaccount (%s)', async ( - _name: string, - useSqlFunction: boolean, - ) => { - config.USE_SUBACCOUNT_UPDATE_SQL_FUNCTION = useSqlFunction; + it('successfully creates subaccount', async () => { const transactionIndex: number = 0; const address: string = 'cosmosnewaddress'; const subaccountId: string = SubaccountTable.uuid( @@ -188,24 +180,16 @@ describe('subaccountUpdateHandler', () => { [], [], ); - if (!useSqlFunction) { - expectTimingStats(); - } }); it.each([ - ['via knex', 'positive', 200000, '-0.2', false], - ['via SQL function', 'positive', 200000, '-0.2', true], - ['via knex', 'negative', -200000, '0.2', false], - ['via SQL function', 'negative', -200000, '0.2', true], - ])('successfully upserts perpetual position (%s) with %s funding payment', async ( - _method: string, + ['positive', 200000, '-0.2'], + ['negative', -200000, '0.2'], + ])('successfully upserts perpetual position with %s funding payment', async ( _name: string, fundingPayment: number, settledFunding: string, - useSqlFunction: boolean, ) => { - config.USE_SUBACCOUNT_UPDATE_SQL_FUNCTION = useSqlFunction; const transactionIndex: number = 0; const eventIndex: number = 0; const sizeInQuantums: number = 1_000_000; @@ -275,24 +259,16 @@ describe('subaccountUpdateHandler', () => { [updatedPerpetualPositionSubaccountKafkaObject], [], ); - if (!useSqlFunction) { - expectTimingStats(); - } }); it.each([ - ['via knex', 'positive', 2_000_000, '-200002', false], - ['via SQL function', 'positive', 2_000_000, '-200002', true], - ['via knex', 'negative', -2_000_000, '-199998', false], - ['via SQL function', 'negative', -2_000_000, '-199998', true], + ['positive', 2_000_000, '-200002'], + ['negative', -2_000_000, '-199998'], ])('successfully updates existing perpetual position with %s funding payment', async ( - _method: string, _name: string, fundingPayment: number, settledFunding: string, - useSqlFunction: boolean, ) => { - config.USE_SUBACCOUNT_UPDATE_SQL_FUNCTION = useSqlFunction; const transactionIndex: number = 0; const sizeInQuantums: number = 1_000_000; const fundingIndex: number = 200; @@ -346,19 +322,9 @@ describe('subaccountUpdateHandler', () => { [updatedPerpetualPositionSubaccountKafkaObject], [], ); - if (!useSqlFunction) { - expectTimingStats(); - } }); - it.each([ - ['via knex', false], - ['via SQL function', true], - ])('closes and creates new position when when side is opposing (%s)', async ( - _name: string, - useSqlFunction: boolean, - ) => { - config.USE_SUBACCOUNT_UPDATE_SQL_FUNCTION = useSqlFunction; + it('closes and creates new position when when side is opposing', async () => { const transactionIndex: number = 0; const sizeInQuantums: number = 1_000_000; const fundingIndex: number = 200; @@ -440,14 +406,7 @@ describe('subaccountUpdateHandler', () => { ); }); - it.each([ - ['via knex', false], - ['via SQL function', true], - ])('updates existing asset position (%s)', async ( - _name: string, - useSqlFunction: boolean, - ) => { - config.USE_SUBACCOUNT_UPDATE_SQL_FUNCTION = useSqlFunction; + it('updates existing asset position', async () => { const transactionIndex: number = 0; const sizeInQuantums: number = 1_000_000; const subaccountUpdateEvent: SubaccountUpdateEventV1 = SubaccountUpdateEventV1.fromPartial({ @@ -495,16 +454,11 @@ describe('subaccountUpdateHandler', () => { }); it.each([ - ['via knex', 1_000_000, false], - ['via SQL function', 1_000_000, true], - ['via knex', -2_000_000, false], - ['via SQL function', -2_000_000, true], - ])('creates new asset position (%s), size = %d', async ( - _name: string, + [1_000_000], + [-2_000_000], + ])('creates new asset position, size = %d', async ( sizeInQuantums: number, - useSqlFunction: boolean, ) => { - config.USE_SUBACCOUNT_UPDATE_SQL_FUNCTION = useSqlFunction; const transactionIndex: number = 0; const subaccountUpdateEvent: SubaccountUpdateEventV1 = SubaccountUpdateEventV1.fromPartial({ // eslint-disable-next-line @typescript-eslint/no-explicit-any @@ -567,14 +521,7 @@ describe('subaccountUpdateHandler', () => { ); }); - it.each([ - ['via knex', false], - ['via SQL function', true], - ])('closes existing position when size is 0 (%s)', async ( - _name: string, - useSqlFunction: boolean, - ) => { - config.USE_SUBACCOUNT_UPDATE_SQL_FUNCTION = useSqlFunction; + it('closes existing position when size is 0', async () => { const transactionIndex: number = 0; const sizeInQuantums: number = 1_000_000; const fundingIndex: number = 200; @@ -640,14 +587,7 @@ describe('subaccountUpdateHandler', () => { ).toBeUndefined(); }); - it.each([ - ['via knex', false], - ['via SQL function', true], - ])('successfully upserts perpetual and asset position with fixed-point notation sizes (%s)', async ( - _name: string, - useSqlFunction: boolean, - ) => { - config.USE_SUBACCOUNT_UPDATE_SQL_FUNCTION = useSqlFunction; + it('successfully upserts perpetual and asset position with fixed-point notation sizes', async () => { const transactionIndex: number = 0; const eventIndex: number = 0; const sizeInQuantums: number = 10; @@ -740,20 +680,6 @@ describe('subaccountUpdateHandler', () => { }); }); -function expectTimingStats() { - expectTimingStat('upsert_subaccount'); - expectTimingStat('get_existing_perpetual_positions'); - expectTimingStat('update_perpetual_positions'); - expectTimingStat('update_asset_positions'); -} - -function expectTimingStat(fnName: string) { - expect(stats.timing).toHaveBeenCalledWith( - `ender.${STATS_FUNCTION_NAME}.timing`, - expect.any(Number), - { className: 'SubaccountUpdateHandler', eventType: 'SubaccountUpdateEvent', fnName }); -} - function createKafkaMessageFromSubaccountUpdateEvent({ subaccountUpdateEvent, transactionIndex, diff --git a/indexer/services/ender/__tests__/handlers/transfer-handler.test.ts b/indexer/services/ender/__tests__/handlers/transfer-handler.test.ts index fe05ed7ff9..0b526d9617 100644 --- a/indexer/services/ender/__tests__/handlers/transfer-handler.test.ts +++ b/indexer/services/ender/__tests__/handlers/transfer-handler.test.ts @@ -2,7 +2,6 @@ import { logger, ParseMessageError, stats, - STATS_FUNCTION_NAME, } from '@dydxprotocol-indexer/base'; import { IndexerTendermintBlock, @@ -52,7 +51,6 @@ import { } from '../helpers/constants'; import { updateBlockCache } from '../../src/caches/block-cache'; import { createPostgresFunctions } from '../../src/helpers/postgres/postgres-functions'; -import config from '../../src/config'; describe('transferHandler', () => { beforeAll(async () => { @@ -135,452 +133,332 @@ describe('transferHandler', () => { }); }); - it.each([ - [ - 'via knex', - false, - ], - [ - 'via SQL function', - true, - ], - ])( - 'fails when TransferEvent does not contain sender subaccountId (%s)', - async ( - _name: string, - useSqlFunction: boolean, - ) => { - config.USE_TRANSFER_HANDLER_SQL_FUNCTION = useSqlFunction; - const transactionIndex: number = 0; - const transferEvent: TransferEventV1 = TransferEventV1.fromPartial({ - recipient: { - subaccountId: { - owner: '', - number: 0, - }, + it('fails when TransferEvent does not contain sender subaccountId', async () => { + const transactionIndex: number = 0; + const transferEvent: TransferEventV1 = TransferEventV1.fromPartial({ + recipient: { + subaccountId: { + owner: '', + number: 0, }, - assetId: 0, - amount: 100, - }); - const kafkaMessage: KafkaMessage = createKafkaMessageFromTransferEvent({ - transferEvent, - transactionIndex, - height: defaultHeight, - time: defaultTime, - txHash: defaultTxHash, - }); + }, + assetId: 0, + amount: 100, + }); + const kafkaMessage: KafkaMessage = createKafkaMessageFromTransferEvent({ + transferEvent, + transactionIndex, + height: defaultHeight, + time: defaultTime, + txHash: defaultTxHash, + }); - const loggerCrit = jest.spyOn(logger, 'crit'); - const loggerError = jest.spyOn(logger, 'error'); - await expect(onMessage(kafkaMessage)).rejects.toThrowError( - new ParseMessageError( - 'TransferEvent must have either a sender subaccount id or sender wallet address', - ), - ); + const loggerCrit = jest.spyOn(logger, 'crit'); + const loggerError = jest.spyOn(logger, 'error'); + await expect(onMessage(kafkaMessage)).rejects.toThrowError( + new ParseMessageError( + 'TransferEvent must have either a sender subaccount id or sender wallet address', + ), + ); - expect(loggerError).toHaveBeenCalledWith(expect.objectContaining({ - at: 'TransferValidator#logAndThrowParseMessageError', - message: 'TransferEvent must have either a sender subaccount id or sender wallet address', - })); - expect(loggerCrit).toHaveBeenCalledWith(expect.objectContaining({ - at: 'onMessage#onMessage', - message: 'Error: Unable to parse message, this must be due to a bug in V4 node', - })); - }); + expect(loggerError).toHaveBeenCalledWith(expect.objectContaining({ + at: 'TransferValidator#logAndThrowParseMessageError', + message: 'TransferEvent must have either a sender subaccount id or sender wallet address', + })); + expect(loggerCrit).toHaveBeenCalledWith(expect.objectContaining({ + at: 'onMessage#onMessage', + message: 'Error: Unable to parse message, this must be due to a bug in V4 node', + })); + }); - it.each([ - [ - 'via knex', - false, - ], - [ - 'via SQL function', - true, - ], - ])( - 'fails when TransferEvent does not contain recipient subaccountId (%s)', - async ( - _name: string, - useSqlFunction: boolean, - ) => { - config.USE_TRANSFER_HANDLER_SQL_FUNCTION = useSqlFunction; - const transactionIndex: number = 0; - const transferEvent: TransferEventV1 = TransferEventV1.fromPartial({ - sender: { - subaccountId: { - owner: '', - number: 0, - }, + it('fails when TransferEvent does not contain recipient subaccountId', async () => { + const transactionIndex: number = 0; + const transferEvent: TransferEventV1 = TransferEventV1.fromPartial({ + sender: { + subaccountId: { + owner: '', + number: 0, }, - assetId: 0, - amount: 100, - }); - const kafkaMessage: KafkaMessage = createKafkaMessageFromTransferEvent({ - transferEvent, - transactionIndex, - height: defaultHeight, - time: defaultTime, - txHash: defaultTxHash, - }); + }, + assetId: 0, + amount: 100, + }); + const kafkaMessage: KafkaMessage = createKafkaMessageFromTransferEvent({ + transferEvent, + transactionIndex, + height: defaultHeight, + time: defaultTime, + txHash: defaultTxHash, + }); - const loggerCrit = jest.spyOn(logger, 'crit'); - const loggerError = jest.spyOn(logger, 'error'); - await expect(onMessage(kafkaMessage)).rejects.toThrowError( - new ParseMessageError( - 'TransferEvent must have either a recipient subaccount id or recipient wallet address', - ), - ); + const loggerCrit = jest.spyOn(logger, 'crit'); + const loggerError = jest.spyOn(logger, 'error'); + await expect(onMessage(kafkaMessage)).rejects.toThrowError( + new ParseMessageError( + 'TransferEvent must have either a recipient subaccount id or recipient wallet address', + ), + ); + + expect(loggerError).toHaveBeenCalledWith(expect.objectContaining({ + at: 'TransferValidator#logAndThrowParseMessageError', + message: 'TransferEvent must have either a recipient subaccount id or recipient wallet address', + })); + expect(loggerCrit).toHaveBeenCalledWith(expect.objectContaining({ + at: 'onMessage#onMessage', + message: 'Error: Unable to parse message, this must be due to a bug in V4 node', + })); + }); - expect(loggerError).toHaveBeenCalledWith(expect.objectContaining({ - at: 'TransferValidator#logAndThrowParseMessageError', - message: 'TransferEvent must have either a recipient subaccount id or recipient wallet address', - })); - expect(loggerCrit).toHaveBeenCalledWith(expect.objectContaining({ - at: 'onMessage#onMessage', - message: 'Error: Unable to parse message, this must be due to a bug in V4 node', - })); + it('creates new transfer for existing subaccounts', async () => { + const transactionIndex: number = 0; + + const transferEvent: TransferEventV1 = defaultTransferEvent; + const kafkaMessage: KafkaMessage = createKafkaMessageFromTransferEvent({ + transferEvent, + transactionIndex, + height: defaultHeight, + time: defaultTime, + txHash: defaultTxHash, }); - it.each([ - [ - 'via knex', - false, - ], - [ - 'via SQL function', - true, - ], - ])( - 'creates new transfer for existing subaccounts (%s)', - async ( - _name: string, - useSqlFunction: boolean, - ) => { - config.USE_TRANSFER_HANDLER_SQL_FUNCTION = useSqlFunction; - const transactionIndex: number = 0; + // Create the subaccounts + await Promise.all([ + SubaccountTable.upsert(defaultSenderSubaccount), + SubaccountTable.upsert(defaultRecipientSubaccount), + ]); + + // Confirm there are subaccounts + const subaccountIds: string[] = [defaultSenderSubaccountId, defaultRecipientSubaccountId]; + _.each(subaccountIds, async (subaccountId) => { + const existingSubaccount: + SubaccountFromDatabase | undefined = await SubaccountTable.findById( + subaccountId, + ); + expect(existingSubaccount).toBeDefined(); + }); - const transferEvent: TransferEventV1 = defaultTransferEvent; - const kafkaMessage: KafkaMessage = createKafkaMessageFromTransferEvent({ - transferEvent, - transactionIndex, - height: defaultHeight, - time: defaultTime, - txHash: defaultTxHash, - }); + // Confirm there is no existing transfer to or from the recipient/sender subaccounts + await expectNoExistingTransfers([defaultRecipientSubaccountId, defaultSenderSubaccountId]); - // Create the subaccounts - await Promise.all([ - SubaccountTable.upsert(defaultSenderSubaccount), - SubaccountTable.upsert(defaultRecipientSubaccount), - ]); - - // Confirm there are subaccounts - const subaccountIds: string[] = [defaultSenderSubaccountId, defaultRecipientSubaccountId]; - _.each(subaccountIds, async (subaccountId) => { - const existingSubaccount: - SubaccountFromDatabase | undefined = await SubaccountTable.findById( - subaccountId, - ); - expect(existingSubaccount).toBeDefined(); - }); + const producerSendMock: jest.SpyInstance = jest.spyOn(producer, 'send'); + await onMessage(kafkaMessage); - // Confirm there is no existing transfer to or from the recipient/sender subaccounts - await expectNoExistingTransfers([defaultRecipientSubaccountId, defaultSenderSubaccountId]); + const newTransfer: TransferFromDatabase = await expectAndReturnNewTransfer({ + recipientSubaccountId: defaultRecipientSubaccountId, + senderSubaccountId: defaultSenderSubaccountId, + }); - const producerSendMock: jest.SpyInstance = jest.spyOn(producer, 'send'); - await onMessage(kafkaMessage); + expectTransferMatchesEvent(transferEvent, newTransfer, asset); - const newTransfer: TransferFromDatabase = await expectAndReturnNewTransfer({ - recipientSubaccountId: defaultRecipientSubaccountId, - senderSubaccountId: defaultSenderSubaccountId, - }); + await expectTransfersSubaccountKafkaMessage( + producerSendMock, + transferEvent, + newTransfer, + asset, + ); + }); - expectTransferMatchesEvent(transferEvent, newTransfer, asset); + it('creates new deposit for existing subaccount', async () => { + const transactionIndex: number = 0; - await expectTransfersSubaccountKafkaMessage( - producerSendMock, - transferEvent, - newTransfer, - asset, - ); - if (!useSqlFunction) { - expectTimingStats(); - } + const depositEvent: TransferEventV1 = defaultDepositEvent; + const kafkaMessage: KafkaMessage = createKafkaMessageFromTransferEvent({ + transferEvent: depositEvent, + transactionIndex, + height: defaultHeight, + time: defaultTime, + txHash: defaultTxHash, }); - it.each([ - [ - 'via knex', - false, - ], - [ - 'via SQL function', - true, - ], - ])( - 'creates new deposit for existing subaccount (%s)', - async ( - _name: string, - useSqlFunction: boolean, - ) => { - config.USE_TRANSFER_HANDLER_SQL_FUNCTION = useSqlFunction; - const transactionIndex: number = 0; + // Create the subaccounts + await Promise.all([ + SubaccountTable.upsert(defaultRecipientSubaccount), + ]); - const depositEvent: TransferEventV1 = defaultDepositEvent; - const kafkaMessage: KafkaMessage = createKafkaMessageFromTransferEvent({ - transferEvent: depositEvent, - transactionIndex, - height: defaultHeight, - time: defaultTime, - txHash: defaultTxHash, - }); + // Confirm there is a recipient subaccount + const existingSubaccount: SubaccountFromDatabase | undefined = await SubaccountTable.findById( + defaultRecipientSubaccountId, + ); + expect(existingSubaccount).toBeDefined(); - // Create the subaccounts - await Promise.all([ - SubaccountTable.upsert(defaultRecipientSubaccount), - ]); + // Confirm there is no existing transfer to or from the recipient subaccount + await expectNoExistingTransfers([defaultRecipientSubaccountId]); - // Confirm there is a recipient subaccount - const existingSubaccount: SubaccountFromDatabase | undefined = await SubaccountTable.findById( - defaultRecipientSubaccountId, - ); - expect(existingSubaccount).toBeDefined(); + const producerSendMock: jest.SpyInstance = jest.spyOn(producer, 'send'); + await onMessage(kafkaMessage); - // Confirm there is no existing transfer to or from the recipient subaccount - await expectNoExistingTransfers([defaultRecipientSubaccountId]); + const newTransfer: TransferFromDatabase = await expectAndReturnNewTransfer( + { + recipientSubaccountId: defaultRecipientSubaccountId, + }, + ); - const producerSendMock: jest.SpyInstance = jest.spyOn(producer, 'send'); - await onMessage(kafkaMessage); + expectTransferMatchesEvent(depositEvent, newTransfer, asset); - const newTransfer: TransferFromDatabase = await expectAndReturnNewTransfer( - { - recipientSubaccountId: defaultRecipientSubaccountId, - }, - ); + await expectTransfersSubaccountKafkaMessage( + producerSendMock, + depositEvent, + newTransfer, + asset, + ); + // Confirm the wallet was created + const wallet: WalletFromDatabase | undefined = await WalletTable.findById( + defaultWalletAddress, + ); + expect(wallet).toBeDefined(); + }); - expectTransferMatchesEvent(depositEvent, newTransfer, asset); + it('creates new deposit for previously non-existent subaccount', async () => { + const transactionIndex: number = 0; - await expectTransfersSubaccountKafkaMessage( - producerSendMock, - depositEvent, - newTransfer, - asset, - ); - // Confirm the wallet was created - const wallet: WalletFromDatabase | undefined = await WalletTable.findById( - defaultWalletAddress, - ); - expect(wallet).toBeDefined(); - if (!useSqlFunction) { - expectTimingStats(); - } + const depositEvent: TransferEventV1 = defaultDepositEvent; + const kafkaMessage: KafkaMessage = createKafkaMessageFromTransferEvent({ + transferEvent: depositEvent, + transactionIndex, + height: defaultHeight, + time: defaultTime, + txHash: defaultTxHash, }); - it.each([ - [ - 'via knex', - false, - ], - [ - 'via SQL function', - true, - ], - ])( - 'creates new deposit for previously non-existent subaccount (%s)', - async ( - _name: string, - useSqlFunction: boolean, - ) => { - config.USE_TRANSFER_HANDLER_SQL_FUNCTION = useSqlFunction; - const transactionIndex: number = 0; + // Confirm there is no recipient subaccount + const existingSubaccount: SubaccountFromDatabase | undefined = await SubaccountTable.findById( + defaultRecipientSubaccountId, + ); + expect(existingSubaccount).toBeUndefined(); - const depositEvent: TransferEventV1 = defaultDepositEvent; - const kafkaMessage: KafkaMessage = createKafkaMessageFromTransferEvent({ - transferEvent: depositEvent, - transactionIndex, - height: defaultHeight, - time: defaultTime, - txHash: defaultTxHash, - }); + // Confirm there is no existing transfer to or from the recipient subaccount + await expectNoExistingTransfers([defaultRecipientSubaccountId]); - // Confirm there is no recipient subaccount - const existingSubaccount: SubaccountFromDatabase | undefined = await SubaccountTable.findById( - defaultRecipientSubaccountId, - ); - expect(existingSubaccount).toBeUndefined(); + const producerSendMock: jest.SpyInstance = jest.spyOn(producer, 'send'); + await onMessage(kafkaMessage); - // Confirm there is no existing transfer to or from the recipient subaccount - await expectNoExistingTransfers([defaultRecipientSubaccountId]); + const newTransfer: TransferFromDatabase = await expectAndReturnNewTransfer( + { + recipientSubaccountId: defaultRecipientSubaccountId, + }, + ); - const producerSendMock: jest.SpyInstance = jest.spyOn(producer, 'send'); - await onMessage(kafkaMessage); + expectTransferMatchesEvent(depositEvent, newTransfer, asset); + await expectTransfersSubaccountKafkaMessage( + producerSendMock, + depositEvent, + newTransfer, + asset, + ); + // Confirm the wallet was created + const wallet: WalletFromDatabase | undefined = await WalletTable.findById( + defaultWalletAddress, + ); + const newRecipientSubaccount: SubaccountFromDatabase | undefined = await + SubaccountTable.findById( + defaultRecipientSubaccountId, + ); + expect(newRecipientSubaccount).toBeDefined(); + expect(wallet).toBeDefined(); + }); - const newTransfer: TransferFromDatabase = await expectAndReturnNewTransfer( - { - recipientSubaccountId: defaultRecipientSubaccountId, - }, - ); + it('creates new withdrawal for existing subaccount', async () => { + const transactionIndex: number = 0; - expectTransferMatchesEvent(depositEvent, newTransfer, asset); - await expectTransfersSubaccountKafkaMessage( - producerSendMock, - depositEvent, - newTransfer, - asset, - ); - // Confirm the wallet was created - const wallet: WalletFromDatabase | undefined = await WalletTable.findById( - defaultWalletAddress, - ); - const newRecipientSubaccount: SubaccountFromDatabase | undefined = await - SubaccountTable.findById( - defaultRecipientSubaccountId, - ); - expect(newRecipientSubaccount).toBeDefined(); - expect(wallet).toBeDefined(); - if (!useSqlFunction) { - expectTimingStats(); - } + const withdrawalEvent: TransferEventV1 = defaultWithdrawalEvent; + const kafkaMessage: KafkaMessage = createKafkaMessageFromTransferEvent({ + transferEvent: withdrawalEvent, + transactionIndex, + height: defaultHeight, + time: defaultTime, + txHash: defaultTxHash, }); - it.each([ - [ - 'via knex', - false, - ], - [ - 'via SQL function', - true, - ], - ])( - 'creates new withdrawal for existing subaccount (%s)', - async ( - _name: string, - useSqlFunction: boolean, - ) => { - config.USE_TRANSFER_HANDLER_SQL_FUNCTION = useSqlFunction; - const transactionIndex: number = 0; + // Create the subaccounts + await Promise.all([ + SubaccountTable.upsert(defaultSenderSubaccount), + ]); - const withdrawalEvent: TransferEventV1 = defaultWithdrawalEvent; - const kafkaMessage: KafkaMessage = createKafkaMessageFromTransferEvent({ - transferEvent: withdrawalEvent, - transactionIndex, - height: defaultHeight, - time: defaultTime, - txHash: defaultTxHash, - }); + // Confirm there is a sender subaccount + const existingSubaccount: SubaccountFromDatabase | undefined = await SubaccountTable.findById( + defaultSenderSubaccountId, + ); + expect(existingSubaccount).toBeDefined(); - // Create the subaccounts - await Promise.all([ - SubaccountTable.upsert(defaultSenderSubaccount), - ]); + // Confirm there is no existing transfer to or from the sender subaccount + await expectNoExistingTransfers([defaultSenderSubaccountId]); - // Confirm there is a sender subaccount - const existingSubaccount: SubaccountFromDatabase | undefined = await SubaccountTable.findById( - defaultSenderSubaccountId, - ); - expect(existingSubaccount).toBeDefined(); + const producerSendMock: jest.SpyInstance = jest.spyOn(producer, 'send'); + await onMessage(kafkaMessage); - // Confirm there is no existing transfer to or from the sender subaccount - await expectNoExistingTransfers([defaultSenderSubaccountId]); + const newTransfer: TransferFromDatabase = await expectAndReturnNewTransfer( + { + senderSubaccountId: defaultSenderSubaccountId, + }, + ); - const producerSendMock: jest.SpyInstance = jest.spyOn(producer, 'send'); - await onMessage(kafkaMessage); + expectTransferMatchesEvent(withdrawalEvent, newTransfer, asset); - const newTransfer: TransferFromDatabase = await expectAndReturnNewTransfer( - { - senderSubaccountId: defaultSenderSubaccountId, - }, - ); + await expectTransfersSubaccountKafkaMessage( + producerSendMock, + withdrawalEvent, + newTransfer, + asset, + ); + // Confirm the wallet was created + const wallet: WalletFromDatabase | undefined = await WalletTable.findById( + defaultWalletAddress, + ); + expect(wallet).toBeDefined(); + }); - expectTransferMatchesEvent(withdrawalEvent, newTransfer, asset); + it('creates new transfer and the recipient subaccount', async () => { + const transactionIndex: number = 0; - await expectTransfersSubaccountKafkaMessage( - producerSendMock, - withdrawalEvent, - newTransfer, - asset, - ); - // Confirm the wallet was created - const wallet: WalletFromDatabase | undefined = await WalletTable.findById( - defaultWalletAddress, - ); - expect(wallet).toBeDefined(); - if (!useSqlFunction) { - expectTimingStats(); - } + const transferEvent: TransferEventV1 = defaultTransferEvent; + const kafkaMessage: KafkaMessage = createKafkaMessageFromTransferEvent({ + transferEvent, + transactionIndex, + height: defaultHeight, + time: defaultTime, + txHash: defaultTxHash, }); - it.each([ - [ - 'via knex', - false, - ], - [ - 'via SQL function', - true, - ], - ])( - 'creates new transfer and the recipient subaccount (%s)', - async ( - _name: string, - useSqlFunction: boolean, - ) => { - config.USE_TRANSFER_HANDLER_SQL_FUNCTION = useSqlFunction; - const transactionIndex: number = 0; - - const transferEvent: TransferEventV1 = defaultTransferEvent; - const kafkaMessage: KafkaMessage = createKafkaMessageFromTransferEvent({ - transferEvent, - transactionIndex, - height: defaultHeight, - time: defaultTime, - txHash: defaultTxHash, - }); + await SubaccountTable.upsert(defaultSenderSubaccount); - await SubaccountTable.upsert(defaultSenderSubaccount); - - // Confirm there is 1 subaccount - const existingSenderSubaccount: SubaccountFromDatabase | undefined = await - SubaccountTable.findById( - defaultSenderSubaccountId, - ); - expect(existingSenderSubaccount).toBeDefined(); - const existingRecipientSubaccount: SubaccountFromDatabase | undefined = await - SubaccountTable.findById( - defaultRecipientSubaccountId, - ); - expect(existingRecipientSubaccount).toBeUndefined(); + // Confirm there is 1 subaccount + const existingSenderSubaccount: SubaccountFromDatabase | undefined = await + SubaccountTable.findById( + defaultSenderSubaccountId, + ); + expect(existingSenderSubaccount).toBeDefined(); + const existingRecipientSubaccount: SubaccountFromDatabase | undefined = await + SubaccountTable.findById( + defaultRecipientSubaccountId, + ); + expect(existingRecipientSubaccount).toBeUndefined(); - // Confirm there is no existing transfers - await expectNoExistingTransfers([defaultRecipientSubaccountId, defaultSenderSubaccountId]); + // Confirm there is no existing transfers + await expectNoExistingTransfers([defaultRecipientSubaccountId, defaultSenderSubaccountId]); - const producerSendMock: jest.SpyInstance = jest.spyOn(producer, 'send'); - await onMessage(kafkaMessage); + const producerSendMock: jest.SpyInstance = jest.spyOn(producer, 'send'); + await onMessage(kafkaMessage); - const newTransfer: TransferFromDatabase = await expectAndReturnNewTransfer( - { - recipientSubaccountId: defaultRecipientSubaccountId, - senderSubaccountId: defaultSenderSubaccountId, - }); + const newTransfer: TransferFromDatabase = await expectAndReturnNewTransfer( + { + recipientSubaccountId: defaultRecipientSubaccountId, + senderSubaccountId: defaultSenderSubaccountId, + }); - expectTransferMatchesEvent(transferEvent, newTransfer, asset); - const newRecipientSubaccount: SubaccountFromDatabase | undefined = await - SubaccountTable.findById( - defaultRecipientSubaccountId, - ); - expect(newRecipientSubaccount).toBeDefined(); + expectTransferMatchesEvent(transferEvent, newTransfer, asset); + const newRecipientSubaccount: SubaccountFromDatabase | undefined = await + SubaccountTable.findById( + defaultRecipientSubaccountId, + ); + expect(newRecipientSubaccount).toBeDefined(); - await expectTransfersSubaccountKafkaMessage( - producerSendMock, - transferEvent, - newTransfer, - asset, - ); - if (!useSqlFunction) { - expectTimingStats(); - } - }); + await expectTransfersSubaccountKafkaMessage( + producerSendMock, + transferEvent, + newTransfer, + asset, + ); + }); }); function createKafkaMessageFromTransferEvent({ @@ -621,23 +499,6 @@ function createKafkaMessageFromTransferEvent({ return createKafkaMessage(Buffer.from(binaryBlock)); } -function expectTimingStats() { - expectTimingStat('upsert_recipient_subaccount_and_wallets'); - expectTimingStat('create_transfer_and_get_asset'); -} - -function expectTimingStat(fnName: string) { - expect(stats.timing).toHaveBeenCalledWith( - `ender.${STATS_FUNCTION_NAME}.timing`, - expect.any(Number), - { - className: 'TransferHandler', - eventType: 'TransferEvent', - fnName, - }, - ); -} - function expectTransferMatchesEvent( event: TransferEventV1, transfer: TransferFromDatabase, diff --git a/indexer/services/ender/__tests__/handlers/update-clob-pair-handler.test.ts b/indexer/services/ender/__tests__/handlers/update-clob-pair-handler.test.ts index 23b0fbd255..bf57b1ff43 100644 --- a/indexer/services/ender/__tests__/handlers/update-clob-pair-handler.test.ts +++ b/indexer/services/ender/__tests__/handlers/update-clob-pair-handler.test.ts @@ -1,4 +1,4 @@ -import { STATS_FUNCTION_NAME, stats } from '@dydxprotocol-indexer/base'; +import { stats } from '@dydxprotocol-indexer/base'; import { PerpetualMarketFromDatabase, PerpetualMarketTable, @@ -33,7 +33,6 @@ import { createKafkaMessage, producer } from '@dydxprotocol-indexer/kafka'; import { KafkaMessage } from 'kafkajs'; import { onMessage } from '../../src/lib/on-message'; import { createPostgresFunctions } from '../../src/helpers/postgres/postgres-functions'; -import config from '../../src/config'; describe('update-clob-pair-handler', () => { beforeAll(async () => { @@ -92,71 +91,37 @@ describe('update-clob-pair-handler', () => { }); }); - it.each([ - [ - 'via knex', - false, - ], - [ - 'via SQL function', - true, - ], - ])( - 'updates an existing perpetual market (%s)', - async ( - _name: string, - useSqlFunction: boolean, - ) => { - config.USE_UPDATE_CLOB_PAIR_HANDLER_SQL_FUNCTION = useSqlFunction; - const transactionIndex: number = 0; - const kafkaMessage: KafkaMessage = createKafkaMessageFromUpdateClobPairEvent({ - updatePerpetualEvent: defaultUpdateClobPairEvent, - transactionIndex, - height: defaultHeight, - time: defaultTime, - txHash: defaultTxHash, - }); - const producerSendMock: jest.SpyInstance = jest.spyOn(producer, 'send'); - await onMessage(kafkaMessage); - - const perpetualMarketId: string = perpetualMarketRefresher.getPerpetualMarketFromClobPairId( - defaultUpdateClobPairEvent.clobPairId.toString(), - )!.id; - const perpetualMarket: - PerpetualMarketFromDatabase | undefined = await PerpetualMarketTable.findById( - perpetualMarketId, - ); - expect(perpetualMarket).toEqual(expect.objectContaining({ - clobPairId: defaultUpdateClobPairEvent.clobPairId.toString(), - status: protocolTranslations.clobStatusToMarketStatus(defaultUpdateClobPairEvent.status), - quantumConversionExponent: defaultUpdateClobPairEvent.quantumConversionExponent, - subticksPerTick: defaultUpdateClobPairEvent.subticksPerTick, - stepBaseQuantums: defaultUpdateClobPairEvent.stepBaseQuantums.toNumber(), - })); - expect(perpetualMarket).toEqual( - perpetualMarketRefresher.getPerpetualMarketFromId(perpetualMarketId)); - if (!useSqlFunction) { - expectTimingStats(); - } - expectPerpetualMarketKafkaMessage(producerSendMock, [perpetualMarket!]); + it('updates an existing perpetual market', async () => { + const transactionIndex: number = 0; + const kafkaMessage: KafkaMessage = createKafkaMessageFromUpdateClobPairEvent({ + updatePerpetualEvent: defaultUpdateClobPairEvent, + transactionIndex, + height: defaultHeight, + time: defaultTime, + txHash: defaultTxHash, }); -}); + const producerSendMock: jest.SpyInstance = jest.spyOn(producer, 'send'); + await onMessage(kafkaMessage); -function expectTimingStats() { - expectTimingStat('update_clob_pair'); -} - -function expectTimingStat(fnName: string) { - expect(stats.timing).toHaveBeenCalledWith( - `ender.${STATS_FUNCTION_NAME}.timing`, - expect.any(Number), - { - className: 'UpdateClobPairHandler', - eventType: 'UpdateClobPairEventV1', - fnName, - }, - ); -} + const perpetualMarketId: string = perpetualMarketRefresher.getPerpetualMarketFromClobPairId( + defaultUpdateClobPairEvent.clobPairId.toString(), + )!.id; + const perpetualMarket: + PerpetualMarketFromDatabase | undefined = await PerpetualMarketTable.findById( + perpetualMarketId, + ); + expect(perpetualMarket).toEqual(expect.objectContaining({ + clobPairId: defaultUpdateClobPairEvent.clobPairId.toString(), + status: protocolTranslations.clobStatusToMarketStatus(defaultUpdateClobPairEvent.status), + quantumConversionExponent: defaultUpdateClobPairEvent.quantumConversionExponent, + subticksPerTick: defaultUpdateClobPairEvent.subticksPerTick, + stepBaseQuantums: defaultUpdateClobPairEvent.stepBaseQuantums.toNumber(), + })); + expect(perpetualMarket).toEqual( + perpetualMarketRefresher.getPerpetualMarketFromId(perpetualMarketId)); + expectPerpetualMarketKafkaMessage(producerSendMock, [perpetualMarket!]); + }); +}); function createKafkaMessageFromUpdateClobPairEvent({ updatePerpetualEvent, diff --git a/indexer/services/ender/__tests__/handlers/update-perpetual-handler.test.ts b/indexer/services/ender/__tests__/handlers/update-perpetual-handler.test.ts index dcc3103023..a4c655d916 100644 --- a/indexer/services/ender/__tests__/handlers/update-perpetual-handler.test.ts +++ b/indexer/services/ender/__tests__/handlers/update-perpetual-handler.test.ts @@ -1,4 +1,4 @@ -import { STATS_FUNCTION_NAME, stats } from '@dydxprotocol-indexer/base'; +import { stats } from '@dydxprotocol-indexer/base'; import { PerpetualMarketFromDatabase, PerpetualMarketTable, @@ -32,7 +32,6 @@ import { createKafkaMessage, producer } from '@dydxprotocol-indexer/kafka'; import { KafkaMessage } from 'kafkajs'; import { onMessage } from '../../src/lib/on-message'; import { createPostgresFunctions } from '../../src/helpers/postgres/postgres-functions'; -import config from '../../src/config'; describe('update-perpetual-handler', () => { beforeAll(async () => { @@ -91,69 +90,35 @@ describe('update-perpetual-handler', () => { }); }); - it.each([ - [ - 'via knex', - false, - ], - [ - 'via SQL function', - true, - ], - ])( - 'updates an existing perpetual market (%s)', - async ( - _name: string, - useSqlFunction: boolean, - ) => { - config.USE_UPDATE_PERPETUAL_HANDLER_SQL_FUNCTION = useSqlFunction; - const transactionIndex: number = 0; - const kafkaMessage: KafkaMessage = createKafkaMessageFromUpdatePerpetualEvent({ - updatePerpetualEvent: defaultUpdatePerpetualEvent, - transactionIndex, - height: defaultHeight, - time: defaultTime, - txHash: defaultTxHash, - }); - const producerSendMock: jest.SpyInstance = jest.spyOn(producer, 'send'); - await onMessage(kafkaMessage); - - const perpetualMarket: - PerpetualMarketFromDatabase | undefined = await PerpetualMarketTable.findById( - defaultUpdatePerpetualEvent.id.toString(), - ); - expect(perpetualMarket).toEqual(expect.objectContaining({ - id: defaultUpdatePerpetualEvent.id.toString(), - ticker: defaultUpdatePerpetualEvent.ticker, - marketId: defaultUpdatePerpetualEvent.marketId, - atomicResolution: defaultUpdatePerpetualEvent.atomicResolution, - liquidityTierId: defaultUpdatePerpetualEvent.liquidityTier, - })); - expect(perpetualMarket).toEqual( - perpetualMarketRefresher.getPerpetualMarketFromId( - defaultUpdatePerpetualEvent.id.toString())); - if (!useSqlFunction) { - expectTimingStats(); - } - expectPerpetualMarketKafkaMessage(producerSendMock, [perpetualMarket!]); + it('updates an existing perpetual market', async () => { + const transactionIndex: number = 0; + const kafkaMessage: KafkaMessage = createKafkaMessageFromUpdatePerpetualEvent({ + updatePerpetualEvent: defaultUpdatePerpetualEvent, + transactionIndex, + height: defaultHeight, + time: defaultTime, + txHash: defaultTxHash, }); -}); + const producerSendMock: jest.SpyInstance = jest.spyOn(producer, 'send'); + await onMessage(kafkaMessage); -function expectTimingStats() { - expectTimingStat('update_perpetual'); -} - -function expectTimingStat(fnName: string) { - expect(stats.timing).toHaveBeenCalledWith( - `ender.${STATS_FUNCTION_NAME}.timing`, - expect.any(Number), - { - className: 'UpdatePerpetualHandler', - eventType: 'UpdatePerpetualEventV1', - fnName, - }, - ); -} + const perpetualMarket: + PerpetualMarketFromDatabase | undefined = await PerpetualMarketTable.findById( + defaultUpdatePerpetualEvent.id.toString(), + ); + expect(perpetualMarket).toEqual(expect.objectContaining({ + id: defaultUpdatePerpetualEvent.id.toString(), + ticker: defaultUpdatePerpetualEvent.ticker, + marketId: defaultUpdatePerpetualEvent.marketId, + atomicResolution: defaultUpdatePerpetualEvent.atomicResolution, + liquidityTierId: defaultUpdatePerpetualEvent.liquidityTier, + })); + expect(perpetualMarket).toEqual( + perpetualMarketRefresher.getPerpetualMarketFromId( + defaultUpdatePerpetualEvent.id.toString())); + expectPerpetualMarketKafkaMessage(producerSendMock, [perpetualMarket!]); + }); +}); function createKafkaMessageFromUpdatePerpetualEvent({ updatePerpetualEvent, diff --git a/indexer/services/ender/src/config.ts b/indexer/services/ender/src/config.ts index e129dace57..21f170ae73 100644 --- a/indexer/services/ender/src/config.ts +++ b/indexer/services/ender/src/config.ts @@ -23,48 +23,6 @@ export const configSchema = { SEND_WEBSOCKET_MESSAGES: parseBoolean({ default: true, }), - USE_ASSET_CREATE_HANDLER_SQL_FUNCTION: parseBoolean({ - default: true, - }), - USE_FUNDING_HANDLER_SQL_FUNCTION: parseBoolean({ - default: true, - }), - USE_LIQUIDATION_HANDLER_SQL_FUNCTION: parseBoolean({ - default: true, - }), - USE_LIQUIDITY_TIER_HANDLER_SQL_FUNCTION: parseBoolean({ - default: true, - }), - USE_MARKET_CREATE_HANDLER_SQL_FUNCTION: parseBoolean({ - default: true, - }), - USE_MARKET_MODIFY_HANDLER_SQL_FUNCTION: parseBoolean({ - default: true, - }), - USE_MARKET_PRICE_UPDATE_HANDLER_SQL_FUNCTION: parseBoolean({ - default: true, - }), - USE_ORDER_HANDLER_SQL_FUNCTION: parseBoolean({ - default: true, - }), - USE_PERPETUAL_MARKET_HANDLER_SQL_FUNCTION: parseBoolean({ - default: true, - }), - USE_STATEFUL_ORDER_HANDLER_SQL_FUNCTION: parseBoolean({ - default: true, - }), - USE_SUBACCOUNT_UPDATE_SQL_FUNCTION: parseBoolean({ - default: true, - }), - USE_TRANSFER_HANDLER_SQL_FUNCTION: parseBoolean({ - default: true, - }), - USE_UPDATE_CLOB_PAIR_HANDLER_SQL_FUNCTION: parseBoolean({ - default: true, - }), - USE_UPDATE_PERPETUAL_HANDLER_SQL_FUNCTION: parseBoolean({ - default: true, - }), USE_SQL_FUNCTION_TO_CREATE_INITIAL_ROWS: parseBoolean({ default: true, }), diff --git a/indexer/services/ender/src/handlers/abstract-stateful-order-handler.ts b/indexer/services/ender/src/handlers/abstract-stateful-order-handler.ts index 2f1ea7dcfd..e2dfb35283 100644 --- a/indexer/services/ender/src/handlers/abstract-stateful-order-handler.ts +++ b/indexer/services/ender/src/handlers/abstract-stateful-order-handler.ts @@ -1,14 +1,6 @@ import { logger } from '@dydxprotocol-indexer/base'; import { OrderFromDatabase, - OrderStatus, - OrderTable, - OrderUpdateObject, - OrderCreateObject, - SubaccountTable, - OrderSide, - OrderType, - protocolTranslations, PerpetualMarketFromDatabase, storeHelpers, OrderModel, @@ -17,16 +9,11 @@ import { } from '@dydxprotocol-indexer/postgres'; import SubaccountModel from '@dydxprotocol-indexer/postgres/build/src/models/subaccount-model'; import { - IndexerOrderId, - IndexerOrder, - IndexerOrder_Side, StatefulOrderEventV1, } from '@dydxprotocol-indexer/v4-protos'; -import { DateTime } from 'luxon'; import * as pg from 'pg'; import { STATEFUL_ORDER_ORDER_FILL_EVENT_TYPE } from '../constants'; -import { getPrice, getSize } from '../lib/helper'; import { Handler } from './handler'; export abstract class AbstractStatefulOrderHandler extends Handler { @@ -69,72 +56,4 @@ export abstract class AbstractStatefulOrderHandler extends Handler { : undefined, ]; } - - protected async updateOrderStatus( - orderIdProto: IndexerOrderId, - status: OrderStatus, - ): Promise { - const orderId = OrderTable.orderIdToUuid(orderIdProto); - const orderUpdateObject: OrderUpdateObject = { - id: orderId, - status, - updatedAt: DateTime.fromJSDate(this.block.time!).toISO(), - updatedAtHeight: this.block.height.toString(), - }; - - const order: OrderFromDatabase | undefined = await OrderTable.update( - orderUpdateObject, - { txId: this.txId }, - ); - if (order === undefined) { - const message: string = `Unable to update order status with orderId: ${orderId}`; - logger.error({ - at: 'AbstractStatefulOrderHandler#cancelOrder', - message, - status, - }); - throw new Error(message); - } - return order; - } - - /** - * Upsert order to database, because there may be an existing order with the orderId in the - * database. - */ - // eslint-disable-next-line @typescript-eslint/require-await - protected async upsertOrder( - perpetualMarket: PerpetualMarketFromDatabase, - order: IndexerOrder, - type: OrderType, - status: OrderStatus, - triggerPrice?: string, - ): Promise { - const size: string = getSize(order, perpetualMarket); - const price: string = getPrice(order, perpetualMarket); - - const orderToCreate: OrderCreateObject = { - subaccountId: SubaccountTable.subaccountIdToUuid(order.orderId!.subaccountId!), - clientId: order.orderId!.clientId.toString(), - clobPairId: order.orderId!.clobPairId.toString(), - side: order.side === IndexerOrder_Side.SIDE_BUY ? OrderSide.BUY : OrderSide.SELL, - size, - totalFilled: '0', - price, - type, - status, - timeInForce: protocolTranslations.protocolOrderTIFToTIF(order.timeInForce), - reduceOnly: order.reduceOnly, - orderFlags: order.orderId!.orderFlags.toString(), - // On chain orders must have a goodTilBlockTime rather than a goodTilBlock - goodTilBlockTime: protocolTranslations.getGoodTilBlockTime(order), - createdAtHeight: this.block.height.toString(), - clientMetadata: order.clientMetadata.toString(), - triggerPrice, - updatedAt: DateTime.fromJSDate(this.block.time!).toISO(), - updatedAtHeight: this.block.height.toString(), - }; - - return OrderTable.upsert(orderToCreate, { txId: this.txId }); - } } diff --git a/indexer/services/ender/src/handlers/asset-handler.ts b/indexer/services/ender/src/handlers/asset-handler.ts index 1287952cc7..19a4314754 100644 --- a/indexer/services/ender/src/handlers/asset-handler.ts +++ b/indexer/services/ender/src/handlers/asset-handler.ts @@ -2,15 +2,12 @@ import { logger } from '@dydxprotocol-indexer/base'; import { AssetFromDatabase, AssetModel, - AssetTable, assetRefresher, - marketRefresher, storeHelpers, } from '@dydxprotocol-indexer/postgres'; import { AssetCreateEventV1 } from '@dydxprotocol-indexer/v4-protos'; import * as pg from 'pg'; -import config from '../config'; import { ConsolidatedKafkaEvent } from '../lib/types'; import { Handler } from './handler'; @@ -23,13 +20,6 @@ export class AssetCreationHandler extends Handler { // eslint-disable-next-line @typescript-eslint/require-await public async internalHandle(): Promise { - if (config.USE_ASSET_CREATE_HANDLER_SQL_FUNCTION) { - return this.handleViaSqlFunction(); - } - return this.handleViaKnex(); - } - - private async handleViaSqlFunction(): Promise { const eventDataBinary: Uint8Array = this.indexerTendermintEvent.dataBytes; const result: pg.QueryResult = await storeHelpers.rawQuery( `SELECT dydx_asset_create_handler( @@ -51,28 +41,4 @@ export class AssetCreationHandler extends Handler { assetRefresher.addAsset(asset); return []; } - - private async handleViaKnex(): Promise { - await this.runFuncWithTimingStatAndErrorLogging( - this.createAsset(), - this.generateTimingStatsOptions('create_asset'), - ); - return []; - } - - private async createAsset(): Promise { - if (this.event.hasMarket) { - marketRefresher.getMarketFromId( - this.event.marketId, - ); - } - const asset: AssetFromDatabase = await AssetTable.create({ - id: this.event.id.toString(), - symbol: this.event.symbol, - atomicResolution: this.event.atomicResolution, - hasMarket: this.event.hasMarket, - marketId: this.event.marketId, - }, { txId: this.txId }); - assetRefresher.addAsset(asset); - } } diff --git a/indexer/services/ender/src/handlers/funding-handler.ts b/indexer/services/ender/src/handlers/funding-handler.ts index 28baf619ec..c64e33651c 100644 --- a/indexer/services/ender/src/handlers/funding-handler.ts +++ b/indexer/services/ender/src/handlers/funding-handler.ts @@ -2,16 +2,12 @@ import { logger } from '@dydxprotocol-indexer/base'; import { FundingIndexUpdatesTable, PerpetualMarketFromDatabase, - perpetualMarketRefresher, TendermintEventTable, - FundingIndexUpdatesCreateObject, - FundingIndexUpdatesFromDatabase, protocolTranslations, storeHelpers, PerpetualMarketModel, } from '@dydxprotocol-indexer/postgres'; import { NextFundingCache } from '@dydxprotocol-indexer/redis'; -import { bytesToBigInt } from '@dydxprotocol-indexer/v4-proto-parser'; import { FundingEventV1, FundingEventV1_Type, @@ -21,8 +17,6 @@ import Big from 'big.js'; import _ from 'lodash'; import * as pg from 'pg'; -import { getPrice } from '../caches/price-cache'; -import config from '../config'; import { redisClient } from '../helpers/redis/redis-controller'; import { indexerTendermintEventToTransactionIndex } from '../lib/helper'; import { ConsolidatedKafkaEvent, FundingEventMessage } from '../lib/types'; @@ -56,13 +50,6 @@ export class FundingHandler extends Handler { // eslint-disable-next-line @typescript-eslint/require-await public async internalHandle(): Promise { - if (config.USE_FUNDING_HANDLER_SQL_FUNCTION) { - return this.handleViaSqlFunction(); - } - return this.handleViaKnex(); - } - - private async handleViaSqlFunction(): Promise { const eventDataBinary: Uint8Array = this.indexerTendermintEvent.dataBytes; const transactionIndex: number = indexerTendermintEventToTransactionIndex( this.indexerTendermintEvent, @@ -143,102 +130,4 @@ export class FundingHandler extends Handler { await Promise.all(promises); return []; } - - private async handleViaKnex(): Promise { - logger.info({ - at: 'FundingHandler#handle', - message: 'Received FundingEvent.', - event: this.event, - }); - const castedFundingEvent: FundingEventMessage = this.event as FundingEventMessage; - switch (castedFundingEvent.type) { - case FundingEventV1_Type.TYPE_PREMIUM_SAMPLE: - await this.runFuncWithTimingStatAndErrorLogging( - this.handleFundingSample(castedFundingEvent.updates), - this.generateTimingStatsOptions('handle_premium_sample'), - ); - break; - case FundingEventV1_Type.TYPE_FUNDING_RATE_AND_INDEX: - await this.runFuncWithTimingStatAndErrorLogging( - this.handleFundingRate(castedFundingEvent.updates), - this.generateTimingStatsOptions('handle_funding_rate'), - ); - break; - default: - logger.error({ - at: 'FundingHandler#handle', - message: 'Received unknown FundingEvent type.', - event: this.event, - }); - } - return []; - } - - private async handleFundingSample(samples: FundingUpdateV1[]): Promise { - await Promise.all( - _.map(samples, (sample: FundingUpdateV1) => { - const perpetualMarket: - PerpetualMarketFromDatabase | undefined = perpetualMarketRefresher.getPerpetualMarketFromId( - sample.perpetualId.toString(), - ); - if (perpetualMarket === undefined) { - logger.error({ - at: 'FundingHandler#handleFundingSample', - message: 'Received FundingUpdate with unknown perpetualId.', - sample, - }); - return; - } - const ticker: string = perpetualMarket.ticker; - const rate: string = protocolTranslations.funding8HourValuePpmTo1HourRate( - sample.fundingValuePpm, - ); - return NextFundingCache.addFundingSample(ticker, new Big(rate), redisClient); - }), - ); - } - - private async handleFundingRate(updates: FundingUpdateV1[]): Promise { - // clear the cache for the predicted next funding rate - await Promise.all( - _.map(updates, (update: FundingUpdateV1) => { - const perpetualMarket: - PerpetualMarketFromDatabase | undefined = perpetualMarketRefresher.getPerpetualMarketFromId( - update.perpetualId.toString(), - ); - if (perpetualMarket === undefined) { - logger.error({ - at: 'FundingHandler#handleFundingRate', - message: 'Received FundingUpdate with unknown perpetualId.', - update, - }); - return; - } - const ticker: string = perpetualMarket.ticker; - const numCleared: - Promise = NextFundingCache.clearFundingSamples(ticker, redisClient); - const fundingIndexUpdatesCreateObject: FundingIndexUpdatesCreateObject = { - perpetualId: update.perpetualId.toString(), - eventId: this.eventId, - rate: protocolTranslations.funding8HourValuePpmTo1HourRate(update.fundingValuePpm), - oraclePrice: getPrice(perpetualMarket.marketId), - fundingIndex: protocolTranslations.fundingIndexToHumanFixedString( - bytesToBigInt(update.fundingIndex).toString(), - perpetualMarket, - ), - effectiveAt: this.timestamp.toISO(), - effectiveAtHeight: this.block.height.toString(), - }; - const fundingIndexUpdatesFromDatabase: - Promise = FundingIndexUpdatesTable - .create( - fundingIndexUpdatesCreateObject, - { txId: this.txId }, - ); - return [numCleared, fundingIndexUpdatesFromDatabase]; - }) - // flatten nested promise arrays - .map(Promise.all, Promise), - ); - } } diff --git a/indexer/services/ender/src/handlers/liquidity-tier-handler.ts b/indexer/services/ender/src/handlers/liquidity-tier-handler.ts index 708440023b..04910c442e 100644 --- a/indexer/services/ender/src/handlers/liquidity-tier-handler.ts +++ b/indexer/services/ender/src/handlers/liquidity-tier-handler.ts @@ -1,21 +1,16 @@ import { logger } from '@dydxprotocol-indexer/base'; import { - LiquidityTiersCreateObject, LiquidityTiersFromDatabase, LiquidityTiersModel, - LiquidityTiersTable, PerpetualMarketFromDatabase, liquidityTierRefresher, perpetualMarketRefresher, - protocolTranslations, storeHelpers, } from '@dydxprotocol-indexer/postgres'; import { LiquidityTierUpsertEventV1 } from '@dydxprotocol-indexer/v4-protos'; import _ from 'lodash'; import * as pg from 'pg'; -import config from '../config'; -import { QUOTE_CURRENCY_ATOMIC_RESOLUTION } from '../constants'; import { generatePerpetualMarketMessage } from '../helpers/kafka-helper'; import { ConsolidatedKafkaEvent } from '../lib/types'; import { Handler } from './handler'; @@ -29,13 +24,6 @@ export class LiquidityTierHandler extends Handler { // eslint-disable-next-line @typescript-eslint/require-await public async internalHandle(): Promise { - if (config.USE_LIQUIDITY_TIER_HANDLER_SQL_FUNCTION) { - return this.handleViaSqlFunction(); - } - return this.handleViaKnex(); - } - - private async handleViaSqlFunction(): Promise { const eventDataBinary: Uint8Array = this.indexerTendermintEvent.dataBytes; const result: pg.QueryResult = await storeHelpers.rawQuery( `SELECT dydx_liquidity_tier_handler( @@ -58,38 +46,6 @@ export class LiquidityTierHandler extends Handler { return this.generateWebsocketEventsForLiquidityTier(liquidityTier); } - private async handleViaKnex(): Promise { - const liquidityTier: - LiquidityTiersFromDatabase = await this.runFuncWithTimingStatAndErrorLogging( - this.upsertLiquidityTier(), - this.generateTimingStatsOptions('upsert_liquidity_tier'), - ); - return this.generateWebsocketEventsForLiquidityTier(liquidityTier); - } - - private async upsertLiquidityTier(): Promise { - const liquidityTier: LiquidityTiersFromDatabase = await LiquidityTiersTable.upsert( - this.getLiquidityTiersCreateObject(this.event), - { txId: this.txId }, - ); - liquidityTierRefresher.upsertLiquidityTier(liquidityTier); - return liquidityTier; - } - - private getLiquidityTiersCreateObject(liquidityTier: LiquidityTierUpsertEventV1): - LiquidityTiersCreateObject { - return { - id: liquidityTier.id, - name: liquidityTier.name, - initialMarginPpm: liquidityTier.initialMarginPpm.toString(), - maintenanceFractionPpm: liquidityTier.maintenanceFractionPpm.toString(), - basePositionNotional: protocolTranslations.quantumsToHuman( - liquidityTier.basePositionNotional.toString(), - QUOTE_CURRENCY_ATOMIC_RESOLUTION, - ).toFixed(), - }; - } - private generateWebsocketEventsForLiquidityTier(liquidityTier: LiquidityTiersFromDatabase): ConsolidatedKafkaEvent[] { const perpetualMarkets: PerpetualMarketFromDatabase[] = _.filter( diff --git a/indexer/services/ender/src/handlers/markets/market-create-handler.ts b/indexer/services/ender/src/handlers/markets/market-create-handler.ts index 8c56eb1880..7ee57ce94e 100644 --- a/indexer/services/ender/src/handlers/markets/market-create-handler.ts +++ b/indexer/services/ender/src/handlers/markets/market-create-handler.ts @@ -2,14 +2,12 @@ import { logger } from '@dydxprotocol-indexer/base'; import { MarketFromDatabase, MarketModel, - MarketTable, marketRefresher, storeHelpers, } from '@dydxprotocol-indexer/postgres'; import { MarketEventV1 } from '@dydxprotocol-indexer/v4-protos'; import * as pg from 'pg'; -import config from '../../config'; import { ConsolidatedKafkaEvent, MarketCreateEventMessage } from '../../lib/types'; import { Handler } from '../handler'; @@ -27,34 +25,7 @@ export class MarketCreateHandler extends Handler { message: 'Received MarketEvent with MarketCreate.', event: this.event, }); - if (config.USE_MARKET_CREATE_HANDLER_SQL_FUNCTION) { - return this.handleViaSqlFunction(); - } - return this.handleViaKnexQueries(); - } - - // eslint-disable-next-line @typescript-eslint/require-await - public async handleViaKnexQueries(): Promise { - // MarketHandler already makes sure the event has 'marketCreate' as the oneofKind. - const marketCreate: MarketCreateEventMessage = this.event as MarketCreateEventMessage; - const market: MarketFromDatabase | undefined = await MarketTable.findById( - marketCreate.marketId, - ); - if (market !== undefined) { - this.logAndThrowParseMessageError( - 'Market in MarketCreate already exists', - { marketCreate }, - ); - } - await this.runFuncWithTimingStatAndErrorLogging( - this.createMarket(marketCreate), - this.generateTimingStatsOptions('create_market'), - ); - return []; - } - - private async handleViaSqlFunction(): Promise { const eventDataBinary: Uint8Array = this.indexerTendermintEvent.dataBytes; const result: pg.QueryResult = await storeHelpers.rawQuery( `SELECT dydx_market_create_handler( @@ -84,14 +55,4 @@ export class MarketCreateHandler extends Handler { marketRefresher.updateMarket(market); return []; } - - private async createMarket(marketCreate: MarketCreateEventMessage): Promise { - await MarketTable.create({ - id: marketCreate.marketId, - pair: marketCreate.marketCreate.base!.pair, - exponent: marketCreate.marketCreate.exponent, - minPriceChangePpm: marketCreate.marketCreate.base!.minPriceChangePpm, - }, { txId: this.txId }); - await marketRefresher.updateMarkets({ txId: this.txId }); - } } diff --git a/indexer/services/ender/src/handlers/markets/market-modify-handler.ts b/indexer/services/ender/src/handlers/markets/market-modify-handler.ts index 147cd4b651..2230e9cfff 100644 --- a/indexer/services/ender/src/handlers/markets/market-modify-handler.ts +++ b/indexer/services/ender/src/handlers/markets/market-modify-handler.ts @@ -1,11 +1,10 @@ import { logger } from '@dydxprotocol-indexer/base'; import { - MarketFromDatabase, MarketUpdateObject, MarketTable, marketRefresher, storeHelpers, MarketModel, + MarketFromDatabase, marketRefresher, storeHelpers, MarketModel, } from '@dydxprotocol-indexer/postgres'; import { MarketEventV1 } from '@dydxprotocol-indexer/v4-protos'; import * as pg from 'pg'; -import config from '../../config'; import { ConsolidatedKafkaEvent, MarketModifyEventMessage } from '../../lib/types'; import { Handler } from '../handler'; @@ -23,30 +22,7 @@ export class MarketModifyHandler extends Handler { message: 'Received MarketEvent with MarketCreate.', event: this.event, }); - if (config.USE_MARKET_MODIFY_HANDLER_SQL_FUNCTION) { - return this.handleViaSqlFunction(); - } - return this.handleViaKnexQueries(); - } - - private async handleViaKnexQueries(): Promise { - logger.info({ - at: 'MarketModifyHandler#handle', - message: 'Received MarketEvent with MarketModify.', - event: this.event, - }); - // MarketHandler already makes sure the event has 'marketModify' as the oneofKind. - const castedMarketModifyMessage: - MarketModifyEventMessage = this.event as MarketModifyEventMessage; - await this.runFuncWithTimingStatAndErrorLogging( - this.updateMarketFromEvent(castedMarketModifyMessage), - this.generateTimingStatsOptions('update_market'), - ); - return []; - } - - private async handleViaSqlFunction(): Promise { const eventDataBinary: Uint8Array = this.indexerTendermintEvent.dataBytes; const result: pg.QueryResult = await storeHelpers.rawQuery( `SELECT dydx_market_modify_handler( @@ -81,37 +57,4 @@ export class MarketModifyHandler extends Handler { marketRefresher.updateMarket(market); return []; } - - protected async updateMarketFromEvent( - castedMarketModifyMessage: MarketModifyEventMessage, - ): Promise { - - const market: MarketFromDatabase | undefined = await MarketTable.findById( - castedMarketModifyMessage.marketId, - ); - if (market === undefined) { - this.logAndThrowParseMessageError( - 'Market in MarketModify doesn\'t exist', - { castedMarketModifyMessage }, - ); - } - - const updateObject: MarketUpdateObject = { - id: castedMarketModifyMessage.marketId, - pair: castedMarketModifyMessage.marketModify.base!.pair!, - minPriceChangePpm: castedMarketModifyMessage.marketModify.base!.minPriceChangePpm!, - }; - - const updatedMarket: - MarketFromDatabase | undefined = await MarketTable - .update(updateObject, { txId: this.txId }); - if (updatedMarket === undefined) { - this.logAndThrowParseMessageError( - 'Failed to update market in markets table', - { castedMarketModifyMessage }, - ); - } - await marketRefresher.updateMarkets({ txId: this.txId }); - return updatedMarket as MarketFromDatabase; - } } diff --git a/indexer/services/ender/src/handlers/markets/market-price-update-handler.ts b/indexer/services/ender/src/handlers/markets/market-price-update-handler.ts index 4efdbfc6b7..71d7cc2b6a 100644 --- a/indexer/services/ender/src/handlers/markets/market-price-update-handler.ts +++ b/indexer/services/ender/src/handlers/markets/market-price-update-handler.ts @@ -1,20 +1,14 @@ import { logger } from '@dydxprotocol-indexer/base'; import { MarketFromDatabase, - MarketUpdateObject, - MarketTable, - OraclePriceCreateObject, OraclePriceFromDatabase, OraclePriceModel, - OraclePriceTable, - protocolTranslations, MarketMessageContents, storeHelpers, MarketModel, marketRefresher, } from '@dydxprotocol-indexer/postgres'; import { MarketEventV1 } from '@dydxprotocol-indexer/v4-protos'; import * as pg from 'pg'; import { updatePriceCacheWithPrice } from '../../caches/price-cache'; -import config from '../../config'; import { generateOraclePriceContents } from '../../helpers/kafka-helper'; import { ConsolidatedKafkaEvent, @@ -22,11 +16,6 @@ import { } from '../../lib/types'; import { Handler } from '../handler'; -type OraclePriceWithTicker = { - oraclePrice: OraclePriceFromDatabase, - pair: string, -}; - export class MarketPriceUpdateHandler extends Handler { eventType: string = 'MarketEvent'; @@ -41,30 +30,7 @@ export class MarketPriceUpdateHandler extends Handler { message: 'Received MarketEvent with MarketPriceUpdate.', event: this.event, }); - if (config.USE_MARKET_MODIFY_HANDLER_SQL_FUNCTION) { - return this.handleViaSqlFunction(); - } - return this.handleViaKnexQueries(); - } - - private async handleViaKnexQueries(): Promise { - // MarketHandler already makes sure the event has 'priceUpdate' as the oneofKind. - const castedMarketPriceUpdateMessage: - MarketPriceUpdateEventMessage = this.event as MarketPriceUpdateEventMessage; - - const { oraclePrice, pair }: - OraclePriceWithTicker = await this.runFuncWithTimingStatAndErrorLogging( - this.createOraclePriceAndUpdateMarketFromEvent(castedMarketPriceUpdateMessage), - this.generateTimingStatsOptions('create_and_update_oracle_prices'), - ); - return [ - this.generateKafkaEvent( - oraclePrice, pair, - ), - ]; - } - private async handleViaSqlFunction(): Promise { const eventDataBinary: Uint8Array = this.indexerTendermintEvent.dataBytes; const result: pg.QueryResult = await storeHelpers.rawQuery( `SELECT dydx_market_price_update_handler( @@ -107,70 +73,6 @@ export class MarketPriceUpdateHandler extends Handler { ]; } - protected async updateMarketFromEvent( - castedMarketPriceUpdateMessage: MarketPriceUpdateEventMessage, - humanPrice: string, - ): Promise { - - const market: MarketFromDatabase | undefined = await MarketTable.findById( - castedMarketPriceUpdateMessage.marketId, - { txId: this.txId }, - ); - - if (market === undefined) { - this.logAndThrowParseMessageError( - 'Market in MarketPriceUpdateEventMessage doesn\'t exist', - { castedMarketModifyMessage: castedMarketPriceUpdateMessage }, - ); - } - - const updateObject: MarketUpdateObject = { - id: castedMarketPriceUpdateMessage.marketId, - oraclePrice: humanPrice, - }; - - const updatedMarket: - MarketFromDatabase | undefined = await MarketTable - .update(updateObject, { txId: this.txId }); - if (updatedMarket === undefined) { - this.logAndThrowParseMessageError( - 'Failed to update market in markets table', - { castedMarketModifyMessage: castedMarketPriceUpdateMessage }, - ); - } - return updatedMarket as MarketFromDatabase; - } - - protected async createOraclePriceAndUpdateMarketFromEvent( - castedMarketPriceUpdateMessage: MarketPriceUpdateEventMessage, - ): Promise<{oraclePrice: OraclePriceFromDatabase, pair: string}> { - const market: MarketFromDatabase | undefined = await MarketTable - .findById(castedMarketPriceUpdateMessage.marketId, { txId: this.txId }); - if (market === undefined) { - this.logAndThrowParseMessageError( - 'MarketPriceUpdateEvent contains a non-existent market id', - { castedMarketPriceUpdateMessage }, - ); - } - const humanPrice: string = protocolTranslations.protocolPriceToHuman( - castedMarketPriceUpdateMessage.priceUpdate.priceWithExponent.toString(), - market!.exponent, - ); - await this.updateMarketFromEvent(castedMarketPriceUpdateMessage, humanPrice); - const oraclePriceToCreate: OraclePriceCreateObject = { - marketId: castedMarketPriceUpdateMessage.marketId, - price: humanPrice, - effectiveAt: this.timestamp.toISO(), - effectiveAtHeight: this.block.height.toString(), - }; - const oraclePriceFromDatabase: OraclePriceFromDatabase = await OraclePriceTable.create( - oraclePriceToCreate, - { txId: this.txId }, - ); - updatePriceCacheWithPrice(oraclePriceFromDatabase); - return { oraclePrice: oraclePriceFromDatabase, pair: market!.pair }; - } - protected generateKafkaEvent( oraclePrice: OraclePriceFromDatabase, pair: string, diff --git a/indexer/services/ender/src/handlers/order-fills/abstract-order-fill-handler.ts b/indexer/services/ender/src/handlers/order-fills/abstract-order-fill-handler.ts index b3fefe9531..5bd5394d4a 100644 --- a/indexer/services/ender/src/handlers/order-fills/abstract-order-fill-handler.ts +++ b/indexer/services/ender/src/handlers/order-fills/abstract-order-fill-handler.ts @@ -1,65 +1,33 @@ -import { logger } from '@dydxprotocol-indexer/base'; import { - AssetFromDatabase, - assetRefresher, - FillCreateObject, FillFromDatabase, - FillTable, FillType, fillTypeToTradeType, Liquidity, - OrderCreateObject, OrderFromDatabase, OrderSide, - OrderStatus, - OrderTable, - OrderType, PerpetualMarketFromDatabase, perpetualMarketRefresher, - PerpetualPositionColumns, - PerpetualPositionFromDatabase, - PerpetualPositionTable, - protocolTranslations, SubaccountMessageContents, - SubaccountTable, - TendermintEventTable, - TimeInForce, TradeMessageContents, UpdatedPerpetualPositionSubaccountKafkaObject, - USDC_ASSET_ID, } from '@dydxprotocol-indexer/postgres'; -import { CanceledOrderStatus } from '@dydxprotocol-indexer/redis'; -import { getOrderIdHash, ORDER_FLAG_LONG_TERM } from '@dydxprotocol-indexer/v4-proto-parser'; +import { getOrderIdHash } from '@dydxprotocol-indexer/v4-proto-parser'; import { IndexerOrder, - IndexerOrder_Side, IndexerSubaccountId, - LiquidationOrderV1, IndexerOrderId, OffChainUpdateV1, OrderRemovalReason, OrderRemoveV1_OrderRemovalStatus, } from '@dydxprotocol-indexer/v4-protos'; -import Big from 'big.js'; import Long from 'long'; -import { DateTime } from 'luxon'; import { generateFillSubaccountMessage, generateOrderSubaccountMessage, generatePerpetualPositionsContents, } from '../../helpers/kafka-helper'; -import { - getPrice, - getSize, - getWeightedAverage, - indexerTendermintEventToTransactionIndex, - perpetualPositionAndOrderSideMatching, -} from '../../lib/helper'; import { ConsolidatedKafkaEvent, - OrderFillEventWithLiquidation, OrderFillEventWithOrder, - PriceFields, - SumFields, } from '../../lib/types'; import { Handler } from '../handler'; @@ -85,303 +53,6 @@ export abstract class AbstractOrderFillHandler extends Handler { : castedOrderFillEventMessage.order; } - protected createEventBase( - castedOrderFillEventMessage: OrderFillEventWithOrder, - liquidity: Liquidity, - ): OrderFillEventBase { - // event is validated before calling this method, so all fields on the order must exist - const order: IndexerOrder = this.liquidityToOrder(castedOrderFillEventMessage, liquidity)!; - return this.createEventBaseFromOrder( - order, - castedOrderFillEventMessage.makerOrder, - castedOrderFillEventMessage.fillAmount, - liquidity, - FillType.LIMIT, - liquidity === Liquidity.MAKER - ? castedOrderFillEventMessage.makerFee - : castedOrderFillEventMessage.takerFee, - ); - } - - protected createEventBaseFromOrder( - order: IndexerOrder, - makerOrder: IndexerOrder, - fillAmount: Long, - liquidity: Liquidity, - fillType: FillType, - fee: Long, - ): OrderFillEventBase { - return { - subaccountId: SubaccountTable.subaccountIdToUuid(order.orderId!.subaccountId!), - orderId: OrderTable.orderIdToUuid(order.orderId!), - fillType, - clobPairId: order.orderId!.clobPairId.toString(), - side: protocolTranslations.protocolOrderSideToOrderSide(order.side), - makerOrder, - fillAmount, - liquidity, - clientMetadata: order.clientMetadata.toString(), - fee, - }; - } - - protected createEventBaseFromLiquidation( - castedLiquidationFillEventMessage: OrderFillEventWithLiquidation, - liquidity: Liquidity, - ): OrderFillEventBase { - // event is validated before calling this method, so all fields on the order must exist - if (liquidity === Liquidity.TAKER) { - const order: LiquidationOrderV1 = castedLiquidationFillEventMessage.liquidationOrder; - return { - subaccountId: SubaccountTable.subaccountIdToUuid(order.liquidated!), - orderId: undefined, - fillType: FillType.LIQUIDATED, - clobPairId: order.clobPairId.toString(), - side: order.isBuy ? OrderSide.BUY : OrderSide.SELL, - makerOrder: castedLiquidationFillEventMessage.makerOrder, - fillAmount: castedLiquidationFillEventMessage.fillAmount, - liquidity, - fee: castedLiquidationFillEventMessage.takerFee, - }; - } else { - return this.createEventBaseFromOrder( - castedLiquidationFillEventMessage.makerOrder, - castedLiquidationFillEventMessage.makerOrder, - castedLiquidationFillEventMessage.fillAmount, - liquidity, - FillType.LIQUIDATION, - castedLiquidationFillEventMessage.makerFee, - ); - } - } - - protected createFillFromEvent( - perpetualMarket: PerpetualMarketFromDatabase, - event: OrderFillEventBase, - ): Promise { - // event is validated before calling this method, so all fields on the order must exist - const eventId: Buffer = TendermintEventTable.createEventId( - this.block.height.toString(), - indexerTendermintEventToTransactionIndex(this.indexerTendermintEvent), - this.indexerTendermintEvent.eventIndex, - ); - const size: string = protocolTranslations.quantumsToHumanFixedString( - event.fillAmount.toString(), - perpetualMarket.atomicResolution, - ); - const price: string = getPrice( - event.makerOrder, - perpetualMarket, - ); - const transactionIndex: number = indexerTendermintEventToTransactionIndex( - this.indexerTendermintEvent, - ); - const asset: AssetFromDatabase = assetRefresher.getAssetFromId(USDC_ASSET_ID); - const fee: string = protocolTranslations.quantumsToHumanFixedString( - event.fee.toString(), - asset.atomicResolution, - ); - - const fillToCreate: FillCreateObject = { - subaccountId: event.subaccountId, - side: event.side, - liquidity: event.liquidity, - type: event.fillType, - clobPairId: event.clobPairId, - orderId: event.orderId, - size, - price, - quoteAmount: Big(size).times(price).toFixed(), - eventId, - transactionHash: this.block.txHashes[transactionIndex], - createdAt: this.timestamp.toISO(), - createdAtHeight: this.block.height.toString(), - clientMetadata: event.clientMetadata, - fee, - }; - - return FillTable.create(fillToCreate, { txId: this.txId }); - } - - protected async getLatestPerpetualPosition( - perpetualMarket: PerpetualMarketFromDatabase, - event: OrderFillEventBase, - ): Promise { - const latestPerpetualPositions: - PerpetualPositionFromDatabase[] = await PerpetualPositionTable.findAll( - { - subaccountId: [event.subaccountId], - perpetualId: [perpetualMarket.id], - limit: 1, - }, - [], - { txId: this.txId }, - ); - - if (latestPerpetualPositions.length === 0) { - logger.error({ - at: 'orderFillHandler#getLatestPerpetualPosition', - message: 'Unable to find existing perpetual position.', - blockHeight: this.block.height, - clobPairId: event.clobPairId, - subaccountId: event.subaccountId, - orderId: event.orderId, - }); - throw new Error(`Unable to find existing perpetual position. blockHeight: ${this.block.height}, clobPairId: ${event.clobPairId}, subaccountId: ${event.subaccountId}, orderId: ${event.orderId}`); - } - - return latestPerpetualPositions[0]; - } - - protected async updatePerpetualPosition( - perpetualMarket: PerpetualMarketFromDatabase, - orderFillEventBase: OrderFillEventBase, - ): Promise { - const latestPerpetualPosition: - PerpetualPositionFromDatabase = await this.getLatestPerpetualPosition( - perpetualMarket, - orderFillEventBase, - ); - - // update (sumOpen and entryPrice) or (sumClose and exitPrice) - let sumField: SumFields; - let priceField: PriceFields; - if (perpetualPositionAndOrderSideMatching( - latestPerpetualPosition.side, orderFillEventBase.side, - )) { - sumField = PerpetualPositionColumns.sumOpen; - priceField = PerpetualPositionColumns.entryPrice; - } else { - sumField = PerpetualPositionColumns.sumClose; - priceField = PerpetualPositionColumns.exitPrice; - } - - const size: string = protocolTranslations.quantumsToHumanFixedString( - orderFillEventBase.fillAmount.toString(), - perpetualMarket.atomicResolution, - ); - const price: string = getPrice( - orderFillEventBase.makerOrder, - perpetualMarket, - ); - - const updatedPerpetualPosition: PerpetualPositionFromDatabase | undefined = await - PerpetualPositionTable.update( - { - id: latestPerpetualPosition.id, - [sumField]: Big(latestPerpetualPosition[sumField]).plus(size).toFixed(), - [priceField]: getWeightedAverage( - latestPerpetualPosition[priceField] ?? '0', - latestPerpetualPosition[sumField], - price, - size, - ), - }, - { txId: this.txId }, - ); - if (updatedPerpetualPosition === undefined) { - logger.error({ - at: 'orderFillHandler#handle', - message: 'Unable to update perpetual position', - latestPerpetualPositionId: latestPerpetualPosition.id, - orderFillEventBase, - }); - throw new Error(`Unable to update perpetual position with id: ${latestPerpetualPosition.id}`); - } - return updatedPerpetualPosition; - } - - /** - * Upsert the an order based on the event processed by the handler - * @param canceledOrderStatus - Status of the order in the CanceledOrderCache, always - * NOT_CANCELED for liquidation orders - */ - protected upsertOrderFromEvent( - perpetualMarket: PerpetualMarketFromDatabase, - order: IndexerOrder, - totalFilledFromProto: Long, - canceledOrderStatus: CanceledOrderStatus, - ): Promise { - const size: string = getSize(order, perpetualMarket); - const price: string = getPrice(order, perpetualMarket); - const totalFilled: string = protocolTranslations.quantumsToHumanFixedString( - totalFilledFromProto.toString(10), - perpetualMarket.atomicResolution, - ); - const timeInForce: TimeInForce = protocolTranslations.protocolOrderTIFToTIF(order.timeInForce); - const status: OrderStatus = this.getOrderStatus( - canceledOrderStatus, - size, - totalFilled, - order.orderId!.orderFlags, - timeInForce, - ); - - const orderToCreate: OrderCreateObject = { - subaccountId: SubaccountTable.subaccountIdToUuid(order.orderId!.subaccountId!), - clientId: order.orderId!.clientId.toString(), - clobPairId: order.orderId!.clobPairId.toString(), - side: order.side === IndexerOrder_Side.SIDE_BUY ? OrderSide.BUY : OrderSide.SELL, - size, - totalFilled, - price, - type: OrderType.LIMIT, // TODO: Add additional order types once we support - status, - timeInForce, - reduceOnly: order.reduceOnly, - orderFlags: order.orderId!.orderFlags.toString(), - goodTilBlock: protocolTranslations.getGoodTilBlock(order)?.toString(), - goodTilBlockTime: protocolTranslations.getGoodTilBlockTime(order), - clientMetadata: order.clientMetadata.toString(), - updatedAt: DateTime.fromJSDate(this.block.time!).toISO(), - updatedAtHeight: this.block.height.toString(), - }; - - return OrderTable.upsert(orderToCreate, { txId: this.txId }); - } - - /** - * The obvious case is if totalFilled >= size, then the order status should always be `FILLED`. - * The difficult case is if totalFilled < size after a fill, then we need to keep the following - * cases in mind: - * 1. Stateful Orders - All cancelations are on-chain events, so the will be `OPEN`. The - * CanceledOrdersCache does not store any stateful orders and we never send - * BEST_EFFORT_CANCELED notifications for stateful orders. - * 2. Short-term FOK - FOK orders can never be `OPEN`, since they don't rest on the orderbook, so - * totalFilled cannot be < size. By the end of the block, the order will be filled, so we mark - * it as `FILLED`. - * 3. Short-term IOC - Protocol guarantees that an IOC order will only ever be filled in a single - * block, so status should be `CANCELED`. - * 4. Short-term Limit & Post-only - If the order is in the CanceledOrdersCache, then it should be - * set to the corresponding CanceledOrderStatus, otherwise `OPEN`. - * @param isCanceled - if the order is in the CanceledOrderCache, always false for liquidiation - * orders - */ - protected getOrderStatus( - canceledOrderStatus: CanceledOrderStatus, - size: string, - totalFilled: string, - orderFlags: number, - timeInForce: TimeInForce, - ): OrderStatus { - if (Big(totalFilled).gte(size)) { - return OrderStatus.FILLED; - } else if (orderFlags === ORDER_FLAG_LONG_TERM) { // 1. Stateful Order - return OrderStatus.OPEN; - } else if (timeInForce === TimeInForce.FOK) { // 2. Short-term FOK - return OrderStatus.FILLED; - } else if (timeInForce === TimeInForce.IOC) { // 3. Short-term IOC - return OrderStatus.CANCELED; - } - // 4. Short-term Limit & Post-only - if (canceledOrderStatus === CanceledOrderStatus.BEST_EFFORT_CANCELED) { - return OrderStatus.BEST_EFFORT_CANCELED; - } else if (canceledOrderStatus === CanceledOrderStatus.CANCELED) { - return OrderStatus.CANCELED; - } - return OrderStatus.OPEN; - } - /** * @param order - order may be undefined if the fill is a liquidation and this is the TAKER */ diff --git a/indexer/services/ender/src/handlers/order-fills/liquidation-handler.ts b/indexer/services/ender/src/handlers/order-fills/liquidation-handler.ts index 203dac7ee8..f8a9945c5d 100644 --- a/indexer/services/ender/src/handlers/order-fills/liquidation-handler.ts +++ b/indexer/services/ender/src/handlers/order-fills/liquidation-handler.ts @@ -8,7 +8,6 @@ import { OrderTable, PerpetualMarketFromDatabase, PerpetualMarketModel, - perpetualMarketRefresher, PerpetualPositionFromDatabase, PerpetualPositionModel, storeHelpers, @@ -16,7 +15,7 @@ import { USDC_ASSET_ID, OrderStatus, FillType, } from '@dydxprotocol-indexer/postgres'; -import { CanceledOrderStatus, StateFilledQuantumsCache } from '@dydxprotocol-indexer/redis'; +import { StateFilledQuantumsCache } from '@dydxprotocol-indexer/redis'; import { isStatefulOrder } from '@dydxprotocol-indexer/v4-proto-parser'; import { LiquidationOrderV1, IndexerOrderId, OrderFillEventV1, @@ -24,7 +23,6 @@ import { import Long from 'long'; import * as pg from 'pg'; -import config from '../../config'; import { STATEFUL_ORDER_ORDER_FILL_EVENT_TYPE, SUBACCOUNT_ORDER_FILL_EVENT_TYPE } from '../../constants'; import { convertPerpetualPosition } from '../../helpers/kafka-helper'; import { redisClient } from '../../helpers/redis/redis-controller'; @@ -37,7 +35,7 @@ import { ConsolidatedKafkaEvent, OrderFillEventWithLiquidation, } from '../../lib/types'; -import { AbstractOrderFillHandler, OrderFillEventBase } from './abstract-order-fill-handler'; +import { AbstractOrderFillHandler } from './abstract-order-fill-handler'; export class LiquidationHandler extends AbstractOrderFillHandler { eventType: string = 'OrderFillEvent'; @@ -86,7 +84,7 @@ export class LiquidationHandler extends AbstractOrderFillHandler { + public async internalHandle(): Promise { const eventDataBinary: Uint8Array = this.indexerTendermintEvent.dataBytes; const transactionIndex: number = indexerTendermintEventToTransactionIndex( this.indexerTendermintEvent, @@ -183,107 +181,4 @@ export class LiquidationHandler extends AbstractOrderFillHandler { - const castedLiquidationFillEventMessage: - OrderFillEventWithLiquidation = orderFillWithLiquidityToOrderFillEventWithLiquidation( - this.event, - ); - const clobPairId: - string = castedLiquidationFillEventMessage.makerOrder.orderId!.clobPairId.toString(); - const perpetualMarket: PerpetualMarketFromDatabase | undefined = perpetualMarketRefresher - .getPerpetualMarketFromClobPairId(clobPairId); - if (perpetualMarket === undefined) { - logger.error({ - at: 'liquidationHandler#internalHandle', - message: 'Unable to find perpetual market', - clobPairId, - castedLiquidationFillEventMessage, - }); - throw new Error(`Unable to find perpetual market with clobPairId: ${clobPairId}`); - } - - const orderFillBaseEventBase: OrderFillEventBase = this.createEventBaseFromLiquidation( - castedLiquidationFillEventMessage, - this.event.liquidity, - ); - - // Must be done in this order, because fills refer to an order - // We do not create a taker order for liquidations. - let makerOrder: OrderFromDatabase | undefined; - if (this.event.liquidity === Liquidity.MAKER) { - makerOrder = await this.runFuncWithTimingStatAndErrorLogging( - this.upsertOrderFromEvent( - perpetualMarket, - castedLiquidationFillEventMessage.makerOrder, - this.getTotalFilled(castedLiquidationFillEventMessage), - CanceledOrderStatus.NOT_CANCELED, - ), this.generateTimingStatsOptions('upsert_maker_order')); - } - - const fill: FillFromDatabase = await this.runFuncWithTimingStatAndErrorLogging( - this.createFillFromEvent(perpetualMarket, orderFillBaseEventBase), - this.generateTimingStatsOptions('create_fill'), - ); - - const position: PerpetualPositionFromDatabase = await this.runFuncWithTimingStatAndErrorLogging( - this.updatePerpetualPosition(perpetualMarket, orderFillBaseEventBase), - this.generateTimingStatsOptions('update_perpetual_position'), - ); - - if (this.event.liquidity === Liquidity.MAKER) { - // Update the cache tracking the state-filled amount per order for use in vulcan - await StateFilledQuantumsCache.updateStateFilledQuantums( - makerOrder!.id, - this.getTotalFilled(castedLiquidationFillEventMessage).toString(), - redisClient, - ); - - const kafkaEvents: ConsolidatedKafkaEvent[] = [ - this.generateConsolidatedKafkaEvent( - castedLiquidationFillEventMessage.makerOrder.orderId!.subaccountId!, - makerOrder, - convertPerpetualPosition(position), - fill, - perpetualMarket, - ), - // Update vulcan with the total filled amount of the maker order. - this.getOrderUpdateKafkaEvent( - castedLiquidationFillEventMessage.makerOrder!.orderId!, - castedLiquidationFillEventMessage.totalFilledMaker, - ), - ]; - - // If the order is stateful and fully-filled, send an order removal to vulcan. We only do this - // for stateful orders as we are guaranteed a stateful order cannot be replaced until the next - // block. - if (makerOrder?.status === OrderStatus.FILLED && isStatefulOrder(makerOrder?.orderFlags)) { - kafkaEvents.push( - this.getOrderRemoveKafkaEvent(castedLiquidationFillEventMessage.makerOrder!.orderId!), - ); - } - return kafkaEvents; - } else { - return [ - this.generateConsolidatedKafkaEvent( - castedLiquidationFillEventMessage.liquidationOrder.liquidated!, - undefined, - convertPerpetualPosition(position), - fill, - perpetualMarket, - ), - this.generateTradeKafkaEventFromTakerOrderFill( - fill, - ), - ]; - } - } - - public async internalHandle(): Promise { - if (config.USE_LIQUIDATION_HANDLER_SQL_FUNCTION) { - return this.handleViaSqlFunction(); - } - return this.handleViaKnexQueries(); - } } diff --git a/indexer/services/ender/src/handlers/order-fills/order-handler.ts b/indexer/services/ender/src/handlers/order-fills/order-handler.ts index 7e2cb4ef94..dd465afd4f 100644 --- a/indexer/services/ender/src/handlers/order-fills/order-handler.ts +++ b/indexer/services/ender/src/handlers/order-fills/order-handler.ts @@ -8,7 +8,6 @@ import { OrderTable, PerpetualMarketFromDatabase, PerpetualMarketModel, - perpetualMarketRefresher, PerpetualPositionFromDatabase, PerpetualPositionModel, storeHelpers, @@ -24,7 +23,6 @@ import { import Long from 'long'; import * as pg from 'pg'; -import config from '../../config'; import { STATEFUL_ORDER_ORDER_FILL_EVENT_TYPE, SUBACCOUNT_ORDER_FILL_EVENT_TYPE } from '../../constants'; import { convertPerpetualPosition } from '../../helpers/kafka-helper'; import { redisClient } from '../../helpers/redis/redis-controller'; @@ -32,7 +30,7 @@ import { orderFillWithLiquidityToOrderFillEventWithOrder } from '../../helpers/t import { indexerTendermintEventToTransactionIndex } from '../../lib/helper'; import { OrderFillWithLiquidity } from '../../lib/translated-types'; import { ConsolidatedKafkaEvent, OrderFillEventWithOrder } from '../../lib/types'; -import { AbstractOrderFillHandler, OrderFillEventBase } from './abstract-order-fill-handler'; +import { AbstractOrderFillHandler } from './abstract-order-fill-handler'; export class OrderHandler extends AbstractOrderFillHandler { eventType: string = 'OrderFillEvent'; @@ -60,7 +58,13 @@ export class OrderHandler extends AbstractOrderFillHandler { + protected getTotalFilled(castedOrderFillEventMessage: OrderFillEventWithOrder): Long { + return this.event.liquidity === Liquidity.TAKER + ? castedOrderFillEventMessage.totalFilledTaker + : castedOrderFillEventMessage.totalFilledMaker; + } + + public async internalHandle(): Promise { const eventDataBinary: Uint8Array = this.indexerTendermintEvent.dataBytes; const transactionIndex: number = indexerTendermintEventToTransactionIndex( this.indexerTendermintEvent, @@ -158,116 +162,4 @@ export class OrderHandler extends AbstractOrderFillHandler { - // OrderFillHandler already makes sure the event has 'takerOrder' as the oneofKind. - const castedOrderFillEventMessage: - OrderFillEventWithOrder = orderFillWithLiquidityToOrderFillEventWithOrder(this.event); - const kafkaEvents: ConsolidatedKafkaEvent[] = []; - - const clobPairId: - string = castedOrderFillEventMessage.makerOrder.orderId!.clobPairId.toString(); - const perpetualMarket: PerpetualMarketFromDatabase | undefined = perpetualMarketRefresher - .getPerpetualMarketFromClobPairId(clobPairId); - if (perpetualMarket === undefined) { - logger.error({ - at: 'orderHandler#handleViaKnexQueries', - message: 'Unable to find perpetual market', - clobPairId, - castedOrderFillEventMessage, - }); - throw new Error(`Unable to find perpetual market with clobPairId: ${clobPairId}`); - } - - const orderFillBaseEvent: OrderFillEventBase = this.createEventBase( - castedOrderFillEventMessage, - this.event.liquidity, - ); - const orderProto: IndexerOrder = this.liquidityToOrder( - castedOrderFillEventMessage, - this.event.liquidity, - ); - const orderUuid = OrderTable.orderIdToUuid(orderProto.orderId!); - const canceledOrderStatus: - CanceledOrderStatus = await CanceledOrdersCache.getOrderCanceledStatus( - orderUuid, - redisClient, - ); - - // Must be done in this order, because fills refer to an order - const order: OrderFromDatabase = await this.runFuncWithTimingStatAndErrorLogging( - this.upsertOrderFromEvent( - perpetualMarket, - orderProto, - this.getTotalFilled(castedOrderFillEventMessage), - canceledOrderStatus, - ), - this.generateTimingStatsOptions('upsert_orders')); - - const fill: FillFromDatabase = await this.runFuncWithTimingStatAndErrorLogging( - this.createFillFromEvent(perpetualMarket, orderFillBaseEvent), - this.generateTimingStatsOptions('create_fill')); - - const position: PerpetualPositionFromDatabase = await this.runFuncWithTimingStatAndErrorLogging( - this.updatePerpetualPosition(perpetualMarket, orderFillBaseEvent), - this.generateTimingStatsOptions('update_perpetual_position')); - - let subaccountId: IndexerSubaccountId; - if (this.event.liquidity === Liquidity.MAKER) { - subaccountId = castedOrderFillEventMessage.makerOrder.orderId!.subaccountId!; - } else { - subaccountId = castedOrderFillEventMessage.order.orderId!.subaccountId!; - } - kafkaEvents.push( - this.generateConsolidatedKafkaEvent( - subaccountId, - order, - convertPerpetualPosition(position), - fill, - perpetualMarket, - ), - ); - - // Update vulcan with the total filled amount of the order. - kafkaEvents.push( - this.getOrderUpdateKafkaEvent( - orderProto.orderId!, - this.getTotalFilled(castedOrderFillEventMessage), - ), - ); - - // Update the cache tracking the state-filled amount per order for use in vulcan - await StateFilledQuantumsCache.updateStateFilledQuantums( - order.id, - this.getTotalFilled(castedOrderFillEventMessage).toString(), - redisClient, - ); - - // If the order is stateful and fully-filled, send an order removal to vulcan. We only do this - // for stateful orders as we are guaranteed a stateful order cannot be replaced until the next - // block. - if (order.status === OrderStatus.FILLED && isStatefulOrder(order.orderFlags)) { - kafkaEvents.push(this.getOrderRemoveKafkaEvent(orderProto.orderId!)); - } - - if (this.event.liquidity === Liquidity.TAKER) { - kafkaEvents.push(this.generateTradeKafkaEventFromTakerOrderFill(fill)); - return kafkaEvents; - } - - return kafkaEvents; - } - - protected getTotalFilled(castedOrderFillEventMessage: OrderFillEventWithOrder): Long { - return this.event.liquidity === Liquidity.TAKER - ? castedOrderFillEventMessage.totalFilledTaker - : castedOrderFillEventMessage.totalFilledMaker; - } - - public async internalHandle(): Promise { - if (config.USE_ORDER_HANDLER_SQL_FUNCTION) { - return this.handleViaSqlFunction(); - } - return this.handleViaKnexQueries(); - } } diff --git a/indexer/services/ender/src/handlers/perpetual-market-handler.ts b/indexer/services/ender/src/handlers/perpetual-market-handler.ts index 3d08f78638..f30afe4a66 100644 --- a/indexer/services/ender/src/handlers/perpetual-market-handler.ts +++ b/indexer/services/ender/src/handlers/perpetual-market-handler.ts @@ -1,15 +1,12 @@ import { logger } from '@dydxprotocol-indexer/base'; import { - PerpetualMarketCreateObject, PerpetualMarketFromDatabase, PerpetualMarketModel, perpetualMarketRefresher, - PerpetualMarketTable, - protocolTranslations, storeHelpers, + storeHelpers, } from '@dydxprotocol-indexer/postgres'; import { PerpetualMarketCreateEventV1 } from '@dydxprotocol-indexer/v4-protos'; import * as pg from 'pg'; -import config from '../config'; import { generatePerpetualMarketMessage } from '../helpers/kafka-helper'; import { ConsolidatedKafkaEvent } from '../lib/types'; import { Handler } from './handler'; @@ -23,14 +20,6 @@ export class PerpetualMarketCreationHandler extends Handler { - if (config.USE_PERPETUAL_MARKET_HANDLER_SQL_FUNCTION) { - return this.handleViaSqlFunction(); - } - return this.handleViaKnex(); - } - - // eslint-disable-next-line @typescript-eslint/require-await - private async handleViaSqlFunction(): Promise { const eventDataBinary: Uint8Array = this.indexerTendermintEvent.dataBytes; const result: pg.QueryResult = await storeHelpers.rawQuery( `SELECT dydx_perpetual_market_handler( @@ -57,54 +46,4 @@ export class PerpetualMarketCreationHandler extends Handler { - const perpetualMarket: - PerpetualMarketFromDatabase = await this.runFuncWithTimingStatAndErrorLogging( - this.createPerpetualMarket(), - this.generateTimingStatsOptions('create_perpetual_market'), - ); - return [ - this.generateConsolidatedMarketKafkaEvent( - JSON.stringify(generatePerpetualMarketMessage([perpetualMarket])), - ), - ]; - } - - private async createPerpetualMarket(): Promise { - const perpetualMarket: PerpetualMarketFromDatabase = await PerpetualMarketTable.create( - this.getPerpetualMarketCreateObject(this.event), - { txId: this.txId }, - ); - perpetualMarketRefresher.upsertPerpetualMarket(perpetualMarket); - return perpetualMarket; - } - - /** - * @description Given a PerpetualMarketCreateEventV1 event, generate the `PerpetualMarket` - * to create. - */ - private getPerpetualMarketCreateObject( - perpetualMarketCreateEventV1: PerpetualMarketCreateEventV1, - ): PerpetualMarketCreateObject { - return { - id: perpetualMarketCreateEventV1.id.toString(), - clobPairId: perpetualMarketCreateEventV1.clobPairId.toString(), - ticker: perpetualMarketCreateEventV1.ticker, - marketId: perpetualMarketCreateEventV1.marketId, - status: protocolTranslations.clobStatusToMarketStatus(perpetualMarketCreateEventV1.status), - lastPrice: '0', - priceChange24H: '0', - trades24H: 0, - volume24H: '0', - nextFundingRate: '0', - openInterest: '0', - quantumConversionExponent: perpetualMarketCreateEventV1.quantumConversionExponent, - atomicResolution: perpetualMarketCreateEventV1.atomicResolution, - subticksPerTick: perpetualMarketCreateEventV1.subticksPerTick, - stepBaseQuantums: Number(perpetualMarketCreateEventV1.stepBaseQuantums), - liquidityTierId: perpetualMarketCreateEventV1.liquidityTier, - }; - } } diff --git a/indexer/services/ender/src/handlers/stateful-order/conditional-order-placement-handler.ts b/indexer/services/ender/src/handlers/stateful-order/conditional-order-placement-handler.ts index ba5acdbde6..dadc145cd9 100644 --- a/indexer/services/ender/src/handlers/stateful-order/conditional-order-placement-handler.ts +++ b/indexer/services/ender/src/handlers/stateful-order/conditional-order-placement-handler.ts @@ -1,22 +1,16 @@ -import { logger } from '@dydxprotocol-indexer/base'; import { OrderFromDatabase, - OrderStatus, OrderTable, PerpetualMarketFromDatabase, - perpetualMarketRefresher, - protocolTranslations, SubaccountFromDatabase, + SubaccountFromDatabase, SubaccountMessageContents, } from '@dydxprotocol-indexer/postgres'; import { - IndexerOrder, IndexerSubaccountId, StatefulOrderEventV1, } from '@dydxprotocol-indexer/v4-protos'; -import config from '../../config'; import { generateOrderSubaccountMessage } from '../../helpers/kafka-helper'; -import { getTriggerPrice } from '../../lib/helper'; import { ConsolidatedKafkaEvent } from '../../lib/types'; import { AbstractStatefulOrderHandler } from '../abstract-stateful-order-handler'; @@ -33,13 +27,6 @@ export class ConditionalOrderPlacementHandler extends // eslint-disable-next-line @typescript-eslint/require-await public async internalHandle(): Promise { - if (config.USE_STATEFUL_ORDER_HANDLER_SQL_FUNCTION) { - return this.handleViaSqlFunction(); - } - return this.handleViaKnex(); - } - - private async handleViaSqlFunction(): Promise { const result: [OrderFromDatabase, PerpetualMarketFromDatabase, @@ -50,36 +37,6 @@ export class ConditionalOrderPlacementHandler extends return this.createKafkaEvents(subaccountId, result[0], result[1]); } - private async handleViaKnex(): Promise { - const order: IndexerOrder = this.event.conditionalOrderPlacement!.order!; - const subaccountId: IndexerSubaccountId = order.orderId!.subaccountId!; - const clobPairId: string = order.orderId!.clobPairId.toString(); - const perpetualMarket: PerpetualMarketFromDatabase | undefined = perpetualMarketRefresher - .getPerpetualMarketFromClobPairId(clobPairId); - if (perpetualMarket === undefined) { - logger.error({ - at: 'conditionalOrderPlacementHandler#internalHandle', - message: 'Unable to find perpetual market', - clobPairId, - order, - }); - throw new Error(`Unable to find perpetual market with clobPairId: ${clobPairId}`); - } - - const conditionalOrder: OrderFromDatabase = await this.runFuncWithTimingStatAndErrorLogging( - this.upsertOrder( - perpetualMarket!, - order, - protocolTranslations.protocolConditionTypeToOrderType(order.conditionType), - OrderStatus.UNTRIGGERED, - getTriggerPrice(order, perpetualMarket), - ), - this.generateTimingStatsOptions('upsert_order'), - ); - - return this.createKafkaEvents(subaccountId, conditionalOrder, perpetualMarket); - } - private createKafkaEvents( subaccountId: IndexerSubaccountId, conditionalOrder: OrderFromDatabase, diff --git a/indexer/services/ender/src/handlers/stateful-order/conditional-order-triggered-handler.ts b/indexer/services/ender/src/handlers/stateful-order/conditional-order-triggered-handler.ts index 5bc85e8885..99a3521a08 100644 --- a/indexer/services/ender/src/handlers/stateful-order/conditional-order-triggered-handler.ts +++ b/indexer/services/ender/src/handlers/stateful-order/conditional-order-triggered-handler.ts @@ -1,23 +1,18 @@ -import { logger } from '@dydxprotocol-indexer/base'; import { OrderFromDatabase, - OrderStatus, OrderTable, PerpetualMarketFromDatabase, orderTranslations, - perpetualMarketRefresher, SubaccountFromDatabase, } from '@dydxprotocol-indexer/postgres'; import { getOrderIdHash } from '@dydxprotocol-indexer/v4-proto-parser'; import { IndexerOrder, - IndexerOrderId, OffChainUpdateV1, OrderPlaceV1_OrderPlacementStatus, StatefulOrderEventV1, } from '@dydxprotocol-indexer/v4-protos'; -import config from '../../config'; import { ConsolidatedKafkaEvent } from '../../lib/types'; import { AbstractStatefulOrderHandler } from '../abstract-stateful-order-handler'; @@ -34,13 +29,6 @@ export class ConditionalOrderTriggeredHandler extends // eslint-disable-next-line @typescript-eslint/require-await public async internalHandle(): Promise { - if (config.USE_STATEFUL_ORDER_HANDLER_SQL_FUNCTION) { - return this.handleViaSqlFunction(); - } - return this.handleViaKnex(); - } - - private async handleViaSqlFunction(): Promise { const result: [OrderFromDatabase, PerpetualMarketFromDatabase, @@ -51,34 +39,6 @@ export class ConditionalOrderTriggeredHandler extends return this.createKafkaEvents(order); } - private async handleViaKnex(): Promise { - const orderIdProto: IndexerOrderId = this.event.conditionalOrderTriggered!.triggeredOrderId!; - const orderFromDatabase: OrderFromDatabase = await this.runFuncWithTimingStatAndErrorLogging( - this.updateOrderStatus(orderIdProto, OrderStatus.OPEN), - this.generateTimingStatsOptions('trigger_order'), - ); - - const clobPairId: string = orderIdProto.clobPairId.toString(); - const perpetualMarket: PerpetualMarketFromDatabase | undefined = perpetualMarketRefresher - .getPerpetualMarketFromClobPairId(clobPairId); - if (perpetualMarket === undefined) { - logger.error({ - at: 'statefulOrderPlacementHandler#internalHandle', - message: 'Unable to find perpetual market', - clobPairId, - orderIdProto, - }); - throw new Error(`Unable to find perpetual market with clobPairId: ${clobPairId}`); - } - - // The conditional order was triggered, so send a message to vulcan to place the order - const order: IndexerOrder = await orderTranslations.convertToIndexerOrder( - orderFromDatabase, - perpetualMarket, - ); - return this.createKafkaEvents(order); - } - private createKafkaEvents(order: IndexerOrder): ConsolidatedKafkaEvent[] { const offChainUpdate: OffChainUpdateV1 = OffChainUpdateV1.fromPartial({ orderPlace: { diff --git a/indexer/services/ender/src/handlers/stateful-order/stateful-order-placement-handler.ts b/indexer/services/ender/src/handlers/stateful-order/stateful-order-placement-handler.ts index e941dee2f9..aea4b59152 100644 --- a/indexer/services/ender/src/handlers/stateful-order/stateful-order-placement-handler.ts +++ b/indexer/services/ender/src/handlers/stateful-order/stateful-order-placement-handler.ts @@ -1,10 +1,5 @@ -import { logger } from '@dydxprotocol-indexer/base'; import { OrderTable, - OrderType, - PerpetualMarketFromDatabase, - perpetualMarketRefresher, - OrderStatus, } from '@dydxprotocol-indexer/postgres'; import { getOrderIdHash } from '@dydxprotocol-indexer/v4-proto-parser'; import { @@ -14,7 +9,6 @@ import { StatefulOrderEventV1, } from '@dydxprotocol-indexer/v4-protos'; -import config from '../../config'; import { ConsolidatedKafkaEvent } from '../../lib/types'; import { AbstractStatefulOrderHandler } from '../abstract-stateful-order-handler'; @@ -37,13 +31,6 @@ export class StatefulOrderPlacementHandler extends // eslint-disable-next-line @typescript-eslint/require-await public async internalHandle(): Promise { - if (config.USE_STATEFUL_ORDER_HANDLER_SQL_FUNCTION) { - return this.handleViaSqlFunction(); - } - return this.handleViaKnex(); - } - - private async handleViaSqlFunction(): Promise { await this.handleEventViaSqlFunction(); let order: IndexerOrder; @@ -56,35 +43,6 @@ export class StatefulOrderPlacementHandler extends return this.createKafkaEvents(order); } - private async handleViaKnex(): Promise { - let order: IndexerOrder; - // TODO(IND-334): Remove after deprecating StatefulOrderPlacementEvent - if (this.event.orderPlace !== undefined) { - order = this.event.orderPlace!.order!; - } else { - order = this.event.longTermOrderPlacement!.order!; - } - const clobPairId: string = order.orderId!.clobPairId.toString(); - const perpetualMarket: PerpetualMarketFromDatabase | undefined = perpetualMarketRefresher - .getPerpetualMarketFromClobPairId(clobPairId); - if (perpetualMarket === undefined) { - logger.error({ - at: 'statefulOrderPlacementHandler#internalHandle', - message: 'Unable to find perpetual market', - clobPairId, - order, - }); - throw new Error(`Unable to find perpetual market with clobPairId: ${clobPairId}`); - } - - await this.runFuncWithTimingStatAndErrorLogging( - this.upsertOrder(perpetualMarket!, order, OrderType.LIMIT, OrderStatus.OPEN), - this.generateTimingStatsOptions('upsert_order'), - ); - - return this.createKafkaEvents(order); - } - private createKafkaEvents(order: IndexerOrder): ConsolidatedKafkaEvent[] { const kafakEvents: ConsolidatedKafkaEvent[] = []; diff --git a/indexer/services/ender/src/handlers/stateful-order/stateful-order-removal-handler.ts b/indexer/services/ender/src/handlers/stateful-order/stateful-order-removal-handler.ts index a835e74cba..69a11bdded 100644 --- a/indexer/services/ender/src/handlers/stateful-order/stateful-order-removal-handler.ts +++ b/indexer/services/ender/src/handlers/stateful-order/stateful-order-removal-handler.ts @@ -1,6 +1,5 @@ import { OrderTable, - OrderStatus, } from '@dydxprotocol-indexer/postgres'; import { getOrderIdHash } from '@dydxprotocol-indexer/v4-proto-parser'; import { @@ -10,7 +9,6 @@ import { StatefulOrderEventV1, } from '@dydxprotocol-indexer/v4-protos'; -import config from '../../config'; import { ConsolidatedKafkaEvent } from '../../lib/types'; import { AbstractStatefulOrderHandler } from '../abstract-stateful-order-handler'; @@ -26,28 +24,11 @@ export class StatefulOrderRemovalHandler extends // eslint-disable-next-line @typescript-eslint/require-await public async internalHandle(): Promise { - if (config.USE_STATEFUL_ORDER_HANDLER_SQL_FUNCTION) { - return this.handleViaSqlFunction(); - } - return this.handleViaKnex(); - } - - private async handleViaSqlFunction(): Promise { const orderIdProto: IndexerOrderId = this.event.orderRemoval!.removedOrderId!; await this.handleEventViaSqlFunction(); return this.createKafkaEvents(orderIdProto); } - private async handleViaKnex(): Promise { - const orderIdProto: IndexerOrderId = this.event.orderRemoval!.removedOrderId!; - await this.runFuncWithTimingStatAndErrorLogging( - this.updateOrderStatus(orderIdProto, OrderStatus.CANCELED), - this.generateTimingStatsOptions('cancel_order'), - ); - - return this.createKafkaEvents(orderIdProto); - } - private createKafkaEvents(orderIdProto: IndexerOrderId): ConsolidatedKafkaEvent[] { const offChainUpdate: OffChainUpdateV1 = OffChainUpdateV1.fromPartial({ orderRemove: { diff --git a/indexer/services/ender/src/handlers/subaccount-update-handler.ts b/indexer/services/ender/src/handlers/subaccount-update-handler.ts index 279c3546cf..cd070f0d94 100644 --- a/indexer/services/ender/src/handlers/subaccount-update-handler.ts +++ b/indexer/services/ender/src/handlers/subaccount-update-handler.ts @@ -1,9 +1,7 @@ import { logger } from '@dydxprotocol-indexer/base'; import { - AssetFromDatabase, AssetPositionFromDatabase, AssetPositionModel, - AssetPositionTable, assetRefresher, AssetsMap, MarketColumns, @@ -12,32 +10,17 @@ import { MarketTable, perpetualMarketRefresher, PerpetualMarketsMap, - PerpetualPositionColumns, - PerpetualPositionCreateObject, - PerpetualPositionFromDatabase, PerpetualPositionModel, - PerpetualPositionsMap, - PerpetualPositionStatus, - PerpetualPositionSubaccountUpdateObject, - PerpetualPositionTable, - PositionSide, - protocolTranslations, storeHelpers, SubaccountMessageContents, SubaccountTable, - TendermintEventTable, UpdatedPerpetualPositionSubaccountKafkaObject, } from '@dydxprotocol-indexer/postgres'; -import { bytesToBigInt, getPositionIsLong } from '@dydxprotocol-indexer/v4-proto-parser'; -import { IndexerAssetPosition, IndexerPerpetualPosition } from '@dydxprotocol-indexer/v4-protos'; -import Big from 'big.js'; import _ from 'lodash'; -import { DateTime } from 'luxon'; import * as pg from 'pg'; -import config from '../config'; -import { QUOTE_CURRENCY_ATOMIC_RESOLUTION, SUBACCOUNT_ORDER_FILL_EVENT_TYPE } from '../constants'; -import { addPositionsToContents, annotateWithPnl, convertPerpetualPosition } from '../helpers/kafka-helper'; +import { SUBACCOUNT_ORDER_FILL_EVENT_TYPE } from '../constants'; +import { addPositionsToContents, annotateWithPnl } from '../helpers/kafka-helper'; import { indexerTendermintEventToTransactionIndex } from '../lib/helper'; import { SubaccountUpdate } from '../lib/translated-types'; import { ConsolidatedKafkaEvent } from '../lib/types'; @@ -57,13 +40,6 @@ export class SubaccountUpdateHandler extends Handler { } public async internalHandle(): Promise { - if (config.USE_SUBACCOUNT_UPDATE_SQL_FUNCTION) { - return this.handleViaSqlFunction(); - } - return this.handleViaKnexQueries(); - } - - public async handleViaSqlFunction(): Promise { const transactionIndex: number = indexerTendermintEventToTransactionIndex( this.indexerTendermintEvent, ); @@ -119,393 +95,6 @@ export class SubaccountUpdateHandler extends Handler { ]; } - public async handleViaKnexQueries(): Promise { - const subaccountId: string = SubaccountTable.subaccountIdToUuid(this.event.subaccountId!); - - await this.runFuncWithTimingStatAndErrorLogging( - this.upsertSubaccount(), - this.generateTimingStatsOptions('upsert_subaccount'), - ); - - const perpetualMarketsMapping: - PerpetualMarketsMap = perpetualMarketRefresher.getPerpetualMarketsMap(); - - const perpetualPositionsMap: PerpetualPositionsMap = await - this.runFuncWithTimingStatAndErrorLogging( - this.getPerpetualPositionsMapFromEvent(subaccountId, perpetualMarketsMapping), - this.generateTimingStatsOptions('get_existing_perpetual_positions'), - ); - const marketIdToMarket: MarketsMap = await this.runFuncWithTimingStatAndErrorLogging( - MarketTable.getMarketsMap(), - this.generateTimingStatsOptions('get_markets'), - ); - - const updateObjects: UpdatedPerpetualPositionSubaccountKafkaObject[] = await - this.runFuncWithTimingStatAndErrorLogging( - this.updatePerpetualPositionsFromEvent( - subaccountId, - perpetualMarketsMapping, - perpetualPositionsMap, - marketIdToMarket, - ), - this.generateTimingStatsOptions('update_perpetual_positions'), - ); - - const assetsMap: AssetsMap = assetRefresher.getAssetsMap(); - const updatedAssetPositions: AssetPositionFromDatabase[] = await - this.runFuncWithTimingStatAndErrorLogging( - this.updateAssetPositionsFromEvent(subaccountId, assetsMap), - this.generateTimingStatsOptions('update_asset_positions'), - ); - - // TODO: Update perpetual_assets once protocol supports assets - return [ - this.generateConsolidatedKafkaEvent( - updateObjects, - perpetualMarketsMapping, - updatedAssetPositions, - assetsMap, - ), - ]; - } - - /** - * If subaccount does not exist, creates one. - */ - protected async upsertSubaccount(): Promise { - await SubaccountTable.upsert({ - address: this.event.subaccountId!.owner, - subaccountNumber: this.event.subaccountId!.number, - updatedAt: this.timestamp.toISO(), - updatedAtHeight: this.block.height.toString(), - }, { txId: this.txId }); - } - - /** - * Returns a list of asset ids that are missing from assets - * - * @param assetIds - * @param assets - * @protected - */ - protected findMissingAssets(assetIds: string[], assets: AssetFromDatabase[]): string[] { - const presentAssets: string[] = assets.map((asset) => { - return asset.id; - }); - return assetIds.filter((element) => !presentAssets.includes(element)); - } - - /** - * Updates all asset positions in postgres for each 'updatedAssetPosition' - * in the SubaccountUpdateEvent. - * @param subaccountId - * @param AssetsMap - * @protected - */ - protected async updateAssetPositionsFromEvent( - subaccountId: string, - assetsMap: AssetsMap, - ): Promise { - const assetPositions: AssetPositionFromDatabase[] = await - Promise.all( - _.map( - this.event.updatedAssetPositions, - async (assetPositionProto: IndexerAssetPosition) => { - return this.upsertAssetPositionFromAssetPositionProto( - subaccountId, - assetPositionProto, - assetsMap[assetPositionProto.assetId.toString()], - ); - }, - ), - ); - return assetPositions; - } - - protected async getPerpetualPositionsMapFromEvent( - subaccountId: string, - perpetualMarketsMapping: PerpetualMarketsMap, - ): Promise { - const perpetualPositions = await PerpetualPositionTable.findAll({ - subaccountId: [subaccountId], - perpetualId: _.map( - this.event.updatedPerpetualPositions, - (perpetualPositionProto: IndexerPerpetualPosition) => { - return perpetualMarketsMapping[perpetualPositionProto.perpetualId].id; - }, - ), - status: [PerpetualPositionStatus.OPEN], - }, [], { txId: this.txId }); - return _.keyBy(perpetualPositions, PerpetualPositionColumns.perpetualId); - } - - /** - * Updates all perpetual positions in postgres for each 'updatedPerpetualPosition' - * in the SubaccountUpdateEvent. - * @returns a list of PerpetualPositionSubaccountUpdateKafkaObject - */ - protected async updatePerpetualPositionsFromEvent( - subaccountId: string, - perpetualMarketsMapping: PerpetualMarketsMap, - perpetualPositionsMap: PerpetualPositionsMap, - marketsMap: MarketsMap, - ): Promise { - const positionUpdateObjects: UpdatedPerpetualPositionSubaccountKafkaObject[] = []; - const positionCreateObjects: PerpetualPositionCreateObject[] = []; - - _.forEach( - this.event.updatedPerpetualPositions, - (perpetualPositionProto: IndexerPerpetualPosition) => { - const [ - updateObject, - createObject, - ]: [ - UpdatedPerpetualPositionSubaccountKafkaObject | null, - PerpetualPositionCreateObject | null, - ] = this.generateUpdateAndCreateFromPerpetualPositionProto( - subaccountId, - perpetualPositionProto, - perpetualMarketsMapping, - perpetualPositionsMap[perpetualPositionProto.perpetualId], - marketsMap, - ); - - if (updateObject !== null) { - positionUpdateObjects.push(updateObject); - } - if (createObject !== null) { - positionCreateObjects.push(createObject); - } - }, - ); - - // eslint-disable-next-line @typescript-eslint/no-unused-vars - const [createdPositions, _ignore]: [PerpetualPositionFromDatabase[], void] = await Promise.all([ - PerpetualPositionTable.bulkCreate(positionCreateObjects, { txId: this.txId }), - PerpetualPositionTable.bulkUpdateSubaccountFields( - _.map( - positionUpdateObjects, - this.getPerpetualPositionSubaccountUpdateObject, - ), - { txId: this.txId }, - ), - ]); - - const createdPositionsWithPnl: - UpdatedPerpetualPositionSubaccountKafkaObject[] = createdPositions - .map( - (position) => { - return annotateWithPnl( - convertPerpetualPosition(position), - perpetualMarketRefresher.getPerpetualMarketsMap(), - marketsMap, - ); - }); - // We can combine the two arrays because the PerpetualPositionFromDatabase extends - // UpdatedPerpetualPositionSubaccountKafkaObject. - return _.flatten([positionUpdateObjects, createdPositionsWithPnl]); - } - - /** - * Generates a PerpetualPositionSubaccountUpdateObject from - * PerpetualPositionSubaccountUpdateKafkaObject by picking the relevant fields. - */ - protected getPerpetualPositionSubaccountUpdateObject( - kafkaObject: UpdatedPerpetualPositionSubaccountKafkaObject, - ): PerpetualPositionSubaccountUpdateObject { - return _.pick(kafkaObject, [ - PerpetualPositionColumns.id, - PerpetualPositionColumns.closedAt, - PerpetualPositionColumns.closedAtHeight, - PerpetualPositionColumns.closeEventId, - PerpetualPositionColumns.lastEventId, - PerpetualPositionColumns.settledFunding, - PerpetualPositionColumns.status, - PerpetualPositionColumns.size, - ]); - } - - /** - * Makes postgres updates for the asset position based on the 'assetPositionProto'. - * If there is an existing position, update the existing asset position. - * Else create a new asset position. - * @param subaccountId - * @param perpetualPositionProto - * @param existingPosition - * @param perpetualMarket - * @returns - */ - // eslint-disable-next-line @typescript-eslint/require-await - protected async upsertAssetPositionFromAssetPositionProto( - subaccountId: string, - assetPositionProto: IndexerAssetPosition, - assetFromDatabase: AssetFromDatabase, - ): Promise { - return AssetPositionTable.upsert({ - subaccountId, - assetId: assetFromDatabase.id, - // TODO(DEC-1597): deprecate `isLong` in asset and perpetual position tables. - isLong: getPositionIsLong(assetPositionProto), - // TODO(DEC-1597): use signed instead of absolute value after deprecating `isLong`. - size: protocolTranslations.serializedQuantumsToAbsHumanFixedString( - assetPositionProto.quantums, - assetFromDatabase.atomicResolution, - ), - }, { txId: this.txId }); - } - - /** - * Returns the SubaccountUpdate and Create objects for PerpetualPositions based on the - * 'perpetualPositionProto'. - * If there is no existing position, create the perpetual position. - * If the updated position has size 0, close the existing position. - * If the updated position has the same side as the existing position, - * update the existing position. - * If the updated position has the opposite side as the existing position, - * close the existing position, and create a new perpetual position. - */ - protected generateUpdateAndCreateFromPerpetualPositionProto( - subaccountId: string, - perpetualPositionProto: IndexerPerpetualPosition, - perpetualMarketMap: PerpetualMarketsMap, - existingPosition: PerpetualPositionFromDatabase | undefined, - marketIdToMarket: MarketsMap, - ): [ - UpdatedPerpetualPositionSubaccountKafkaObject | null, - PerpetualPositionCreateObject | null, - ] { - let updateObject: UpdatedPerpetualPositionSubaccountKafkaObject | null = null; - const size: string = protocolTranslations.serializedQuantumsToAbsHumanFixedString( - perpetualPositionProto.quantums, - perpetualMarketMap[perpetualPositionProto.perpetualId].atomicResolution, - ); - const side: PositionSide = getPositionIsLong(perpetualPositionProto) - ? PositionSide.LONG - : PositionSide.SHORT; - const eventId: Buffer = TendermintEventTable.createEventId( - this.block.height.toString(), - indexerTendermintEventToTransactionIndex(this.indexerTendermintEvent), - this.indexerTendermintEvent.eventIndex, - ); - const blockTime: string = DateTime.fromJSDate(this.block.time!).toISO(); - const latestFundingQuantums: string = bytesToBigInt( - perpetualPositionProto.fundingPayment, - ).toString(); - const latestSettledFunding: Big = protocolTranslations.quantumsToHuman( - latestFundingQuantums, - QUOTE_CURRENCY_ATOMIC_RESOLUTION, - ).times(-1); - let priorSettledFunding: Big = new Big(0); - if (existingPosition !== undefined) { - priorSettledFunding = new Big(existingPosition.settledFunding); - } - const settledFunding: string = priorSettledFunding.plus(latestSettledFunding).toString(); - - // Close existing position and do not create another if incoming size is 0. - if (existingPosition !== undefined && size === '0') { - return [ - annotateWithPnl( - { - ...PerpetualPositionTable.closePositionUpdateObject( - existingPosition, - { - id: existingPosition.id, - closedAt: blockTime, - closedAtHeight: this.block.height.toString(), - closeEventId: eventId, - settledFunding, - }, - ), - perpetualId: perpetualPositionProto.perpetualId.toString(), - maxSize: existingPosition.maxSize, - side: existingPosition.side, - entryPrice: existingPosition.entryPrice, - exitPrice: existingPosition.exitPrice, - sumOpen: existingPosition.sumOpen, - sumClose: existingPosition.sumClose, - }, - perpetualMarketMap, - marketIdToMarket, - ), - null, - ]; - } - - if (existingPosition !== undefined) { - if (existingPosition.side === side) { - return [ - annotateWithPnl( - { - id: existingPosition.id, - size, - status: PerpetualPositionStatus.OPEN, - lastEventId: eventId, - settledFunding, - perpetualId: existingPosition.perpetualId, - maxSize: Big(existingPosition.maxSize).gte(size) ? existingPosition.maxSize : size, - side: existingPosition.side, - entryPrice: existingPosition.entryPrice, - exitPrice: existingPosition.exitPrice, - sumOpen: existingPosition.sumOpen, - sumClose: existingPosition.sumClose, - }, - perpetualMarketMap, - marketIdToMarket, - ), - null, - ]; - } else { - // Close the existing position if the existing position is of the opposite side of the - // new position. New position will be created below. - updateObject = annotateWithPnl( - { - ...PerpetualPositionTable.closePositionUpdateObject( - existingPosition, - { - id: existingPosition.id, - closedAt: blockTime, - closedAtHeight: this.block.height.toString(), - closeEventId: eventId, - settledFunding, - }, - ), - perpetualId: existingPosition.perpetualId, - maxSize: existingPosition.maxSize, - side: existingPosition.side, - entryPrice: existingPosition.entryPrice, - exitPrice: existingPosition.exitPrice, - sumOpen: existingPosition.sumOpen, - sumClose: existingPosition.sumClose, - }, - perpetualMarketMap, - marketIdToMarket, - ); - } - } - - // should create a new perpetual position if none exist or if previous position has changed side - // and is not 0. - // if the previous position changed sides, the last funding payment will be applied to the - // settled funding of the closed position and the new position will be created with 0 settled - // funding. - return [ - updateObject, - { - subaccountId, - perpetualId: perpetualPositionProto.perpetualId.toString(), - side, - status: PerpetualPositionStatus.OPEN, - size, - maxSize: size, - createdAt: blockTime, - createdAtHeight: this.block.height.toString(), - openEventId: eventId, - lastEventId: eventId, - settledFunding: updateObject === null ? settledFunding : '0', - }, - ]; - } - /** * Generate the ConsolidatedKafkaEvent generated from this event. * @param updatedPerpetualPositions diff --git a/indexer/services/ender/src/handlers/transfer-handler.ts b/indexer/services/ender/src/handlers/transfer-handler.ts index b3b3118517..e9a82269d8 100644 --- a/indexer/services/ender/src/handlers/transfer-handler.ts +++ b/indexer/services/ender/src/handlers/transfer-handler.ts @@ -2,25 +2,17 @@ import { logger } from '@dydxprotocol-indexer/base'; import { AssetFromDatabase, AssetModel, - assetRefresher, - protocolTranslations, storeHelpers, SubaccountMessageContents, - SubaccountTable, - TendermintEventTable, - TransferCreateObject, TransferFromDatabase, TransferModel, - TransferTable, - WalletTable, } from '@dydxprotocol-indexer/postgres'; import { TransferEventV1 } from '@dydxprotocol-indexer/v4-protos'; import * as pg from 'pg'; -import config from '../config'; import { generateTransferContents } from '../helpers/kafka-helper'; import { indexerTendermintEventToTransactionIndex } from '../lib/helper'; -import { ConsolidatedKafkaEvent, TransferEventType } from '../lib/types'; +import { ConsolidatedKafkaEvent } from '../lib/types'; import { Handler } from './handler'; export class TransferHandler extends Handler { @@ -33,13 +25,6 @@ export class TransferHandler extends Handler { // eslint-disable-next-line @typescript-eslint/require-await public async internalHandle(): Promise { - if (config.USE_TRANSFER_HANDLER_SQL_FUNCTION) { - return this.handleViaSqlFunction(); - } - return this.handleViaKnex(); - } - - private async handleViaSqlFunction(): Promise { const transactionIndex: number = indexerTendermintEventToTransactionIndex( this.indexerTendermintEvent, ); @@ -74,113 +59,6 @@ export class TransferHandler extends Handler { ); } - private async handleViaKnex(): Promise { - await this.runFuncWithTimingStatAndErrorLogging( - Promise.all([ - this.upsertRecipientSubaccount(), - this.upsertWallets(), - ]), - this.generateTimingStatsOptions('upsert_recipient_subaccount_and_wallets'), - ); - - const asset: AssetFromDatabase = assetRefresher.getAssetFromId( - this.event.assetId.toString(), - ); - const transfer: TransferFromDatabase = await this.runFuncWithTimingStatAndErrorLogging( - this.createTransferFromEvent(asset), - this.generateTimingStatsOptions('create_transfer_and_get_asset'), - ); - - return this.generateKafkaEvents( - transfer, - asset, - ); - } - - protected async createTransferFromEvent(asset: AssetFromDatabase): Promise { - const eventId: Buffer = TendermintEventTable.createEventId( - this.block.height.toString(), - indexerTendermintEventToTransactionIndex(this.indexerTendermintEvent), - this.indexerTendermintEvent.eventIndex, - ); - const senderWalletAddress: string | undefined = this.event.sender!.address; - const recipientWalletAddress: string | undefined = this.event.recipient!.address; - const senderSubaccountId: string | undefined = this.event.sender!.subaccountId - ? SubaccountTable.subaccountIdToUuid(this.event.sender!.subaccountId!) - : undefined; - const recipientSubaccountId: string | undefined = this.event.recipient!.subaccountId - ? SubaccountTable.subaccountIdToUuid(this.event.recipient!.subaccountId!) - : undefined; - - const size: string = protocolTranslations.quantumsToHumanFixedString( - this.event.amount.toString(), - asset.atomicResolution, - ); - const transactionIndex: number = indexerTendermintEventToTransactionIndex( - this.indexerTendermintEvent, - ); - - const transferToCreate: TransferCreateObject = { - senderSubaccountId, - recipientSubaccountId, - senderWalletAddress, - recipientWalletAddress, - assetId: this.event.assetId.toString(), - size, - eventId, - transactionHash: this.block.txHashes[transactionIndex], - createdAt: this.timestamp.toISO(), - createdAtHeight: this.block.height.toString(), - }; - - const transferFromDatabase: TransferFromDatabase = await TransferTable.create( - transferToCreate, - { txId: this.txId }, - ); - - return transferFromDatabase; - } - - protected async upsertRecipientSubaccount(): Promise { - if (this.event!.recipient!.subaccountId) { - await SubaccountTable.upsert({ - address: this.event!.recipient!.subaccountId!.owner, - subaccountNumber: this.event!.recipient!.subaccountId!.number, - updatedAt: this.timestamp.toISO(), - updatedAtHeight: this.block.height.toString(), - }, { txId: this.txId }); - } - } - - protected async upsertWallets(): Promise { - const promises = []; - if (this.event!.sender!.address) { - promises.push( - WalletTable.upsert({ - address: this.event!.sender!.address, - }, { txId: this.txId }), - ); - } - if (this.event!.recipient!.address) { - promises.push( - WalletTable.upsert({ - address: this.event!.recipient!.address, - }, { txId: this.txId }), - ); - } - await Promise.all(promises); - } - - protected getTransferType(): TransferEventType { - if (this.event!.sender!.address) { - return TransferEventType.DEPOSIT; - } - if (this.event!.recipient!.address) { - return TransferEventType.WITHDRAWAL; - } - return TransferEventType.TRANSFER; - } - /** Generates a kafka websocket event for each subaccount involved in the transfer. * * If the transfer is between 2 subaccounts, 1 event for the sender subaccount and another diff --git a/indexer/services/ender/src/handlers/update-clob-pair-handler.ts b/indexer/services/ender/src/handlers/update-clob-pair-handler.ts index 90662029f0..04c642eaf9 100644 --- a/indexer/services/ender/src/handlers/update-clob-pair-handler.ts +++ b/indexer/services/ender/src/handlers/update-clob-pair-handler.ts @@ -1,18 +1,13 @@ -import assert from 'assert'; - import { logger } from '@dydxprotocol-indexer/base'; import { PerpetualMarketFromDatabase, PerpetualMarketModel, - PerpetualMarketTable, perpetualMarketRefresher, - protocolTranslations, storeHelpers, } from '@dydxprotocol-indexer/postgres'; import { UpdateClobPairEventV1 } from '@dydxprotocol-indexer/v4-protos'; import * as pg from 'pg'; -import config from '../config'; import { generatePerpetualMarketMessage } from '../helpers/kafka-helper'; import { ConsolidatedKafkaEvent } from '../lib/types'; import { Handler } from './handler'; @@ -26,13 +21,6 @@ export class UpdateClobPairHandler extends Handler { // eslint-disable-next-line @typescript-eslint/require-await public async internalHandle(): Promise { - if (config.USE_UPDATE_CLOB_PAIR_HANDLER_SQL_FUNCTION) { - return this.handleViaSqlFunction(); - } - return this.handleViaKnex(); - } - - private async handleViaSqlFunction(): Promise { const eventDataBinary: Uint8Array = this.indexerTendermintEvent.dataBytes; const result: pg.QueryResult = await storeHelpers.rawQuery( `SELECT dydx_update_clob_pair_handler( @@ -60,45 +48,4 @@ export class UpdateClobPairHandler extends Handler { ), ]; } - - private async handleViaKnex(): Promise { - const perpetualMarket: - PerpetualMarketFromDatabase = await this.runFuncWithTimingStatAndErrorLogging( - this.updateClobPair(), - this.generateTimingStatsOptions('update_clob_pair'), - ); - return [ - this.generateConsolidatedMarketKafkaEvent( - JSON.stringify(generatePerpetualMarketMessage([perpetualMarket])), - ), - ]; - } - - private async updateClobPair(): Promise { - // perpetualMarketRefresher.getPerpetualMarketFromClobPairId() cannot be undefined because it - // is validated by UpdateClobPairValidator. - const perpetualMarketId: string = perpetualMarketRefresher.getPerpetualMarketFromClobPairId( - this.event.clobPairId.toString(), - )!.id; - const perpetualMarket: - PerpetualMarketFromDatabase | undefined = await PerpetualMarketTable.update({ - id: perpetualMarketId, - status: protocolTranslations.clobStatusToMarketStatus(this.event.status), - quantumConversionExponent: this.event.quantumConversionExponent, - subticksPerTick: this.event.subticksPerTick, - stepBaseQuantums: Number(this.event.stepBaseQuantums), - }, { txId: this.txId }); - - if (perpetualMarket === undefined) { - this.logAndThrowParseMessageError( - 'Could not find perpetual market with corresponding clobPairId', - { event: this.event }, - ); - // This assert should never be hit because a ParseMessageError should be thrown above. - assert(false); - } - - await perpetualMarketRefresher.upsertPerpetualMarket(perpetualMarket); - return perpetualMarket; - } } diff --git a/indexer/services/ender/src/handlers/update-perpetual-handler.ts b/indexer/services/ender/src/handlers/update-perpetual-handler.ts index d7787fff28..0fc54665c5 100644 --- a/indexer/services/ender/src/handlers/update-perpetual-handler.ts +++ b/indexer/services/ender/src/handlers/update-perpetual-handler.ts @@ -1,9 +1,6 @@ -import assert from 'assert'; - import { logger } from '@dydxprotocol-indexer/base'; import { PerpetualMarketFromDatabase, - PerpetualMarketTable, perpetualMarketRefresher, storeHelpers, PerpetualMarketModel, @@ -11,7 +8,6 @@ import { import { UpdatePerpetualEventV1 } from '@dydxprotocol-indexer/v4-protos'; import * as pg from 'pg'; -import config from '../config'; import { generatePerpetualMarketMessage } from '../helpers/kafka-helper'; import { ConsolidatedKafkaEvent } from '../lib/types'; import { Handler } from './handler'; @@ -25,13 +21,6 @@ export class UpdatePerpetualHandler extends Handler { // eslint-disable-next-line @typescript-eslint/require-await public async internalHandle(): Promise { - if (config.USE_UPDATE_PERPETUAL_HANDLER_SQL_FUNCTION) { - return this.handleViaSqlFunction(); - } - return this.handleViaKnex(); - } - - private async handleViaSqlFunction(): Promise { const eventDataBinary: Uint8Array = this.indexerTendermintEvent.dataBytes; const result: pg.QueryResult = await storeHelpers.rawQuery( `SELECT dydx_update_perpetual_handler( @@ -59,40 +48,4 @@ export class UpdatePerpetualHandler extends Handler { ), ]; } - - private async handleViaKnex(): Promise { - const perpetualMarket: - PerpetualMarketFromDatabase = await this.runFuncWithTimingStatAndErrorLogging( - this.updatePerpetual(), - this.generateTimingStatsOptions('update_perpetual'), - ); - return [ - this.generateConsolidatedMarketKafkaEvent( - JSON.stringify(generatePerpetualMarketMessage([perpetualMarket])), - ), - ]; - } - - private async updatePerpetual(): Promise { - const perpetualMarket: - PerpetualMarketFromDatabase | undefined = await PerpetualMarketTable.update({ - id: this.event.id.toString(), - ticker: this.event.ticker, - marketId: this.event.marketId, - atomicResolution: this.event.atomicResolution, - liquidityTierId: this.event.liquidityTier, - }, { txId: this.txId }); - - if (perpetualMarket === undefined) { - this.logAndThrowParseMessageError( - 'Could not find perpetual market with corresponding updatePerpetualEvent.id', - { event: this.event }, - ); - // This assert should never be hit because a ParseMessageError should be thrown above. - assert(false); - } - - await perpetualMarketRefresher.upsertPerpetualMarket(perpetualMarket); - return perpetualMarket; - } }