Skip to content

Commit

Permalink
Iterate all chains when fetching all Safes (#2002)
Browse files Browse the repository at this point in the history
Adds a new `IChainsRepository['getAllChains']` method that fetches subsequent `next` pages and concatenates `results`, called in the `getAllSafesByOwner` method:

- Add `IChainsRepository['getAllChains']` implementation
- Call `getAllChains` instead of `getChains` in `getAllSafesByOwner`
- Add appropriate test coverage
  • Loading branch information
iamacook authored Oct 16, 2024
1 parent b706542 commit 36d6cfa
Show file tree
Hide file tree
Showing 7 changed files with 468 additions and 9 deletions.
3 changes: 3 additions & 0 deletions src/config/entities/__tests__/configuration.ts
Original file line number Diff line number Diff line change
Expand Up @@ -195,6 +195,9 @@ export default (): ReturnType<typeof configuration> => ({
},
safeConfig: {
baseUri: faker.internet.url({ appendSlash: false }),
chains: {
maxSequentialPages: faker.number.int(),
},
},
safeTransaction: {
useVpcUrl: false,
Expand Down
5 changes: 5 additions & 0 deletions src/config/entities/configuration.ts
Original file line number Diff line number Diff line change
Expand Up @@ -298,6 +298,11 @@ export default () => ({
safeConfig: {
baseUri:
process.env.SAFE_CONFIG_BASE_URI || 'https://safe-config.safe.global/',
chains: {
maxSequentialPages: parseInt(
process.env.SAFE_CONFIG_CHAINS_MAX_SEQUENTIAL_PAGES ?? `${3}`,
),
},
},
safeTransaction: {
useVpcUrl: process.env.USE_TX_SERVICE_VPC_URL?.toLowerCase() === 'true',
Expand Down
5 changes: 5 additions & 0 deletions src/domain/chains/chains.repository.interface.ts
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,11 @@ export interface IChainsRepository {
*/
getChains(limit?: number, offset?: number): Promise<Page<Chain>>;

/**
* Gets all the {@link Chain} available across pages
*/
getAllChains(): Promise<Array<Chain>>;

/**
* Gets the {@link Chain} associated with {@link chainId}
*
Expand Down
303 changes: 303 additions & 0 deletions src/domain/chains/chains.repository.spec.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,303 @@
import { faker } from '@faker-js/faker/.';
import { chunk } from 'lodash';
import { getAddress } from 'viem';
import { chainBuilder } from '@/domain/chains/entities/__tests__/chain.builder';
import {
limitAndOffsetUrlFactory,
pageBuilder,
} from '@/domain/entities/__tests__/page.builder';
import { ChainsRepository } from '@/domain/chains/chains.repository';
import type { IConfigurationService } from '@/config/configuration.service.interface';
import type { Chain } from '@/domain/chains/entities/chain.entity';
import type { IConfigApi } from '@/domain/interfaces/config-api.interface';
import type { ITransactionApiManager } from '@/domain/interfaces/transaction-api.manager.interface';
import type { Page } from '@/domain/entities/page.entity';
import type { ILoggingService } from '@/logging/logging.interface';

const mockLoggingService = {
error: jest.fn(),
} as jest.MockedObjectDeep<ILoggingService>;
const mockConfigApi = {
getChains: jest.fn(),
} as jest.MockedObjectDeep<IConfigApi>;
const mockTransactionApiManager =
{} as jest.MockedObjectDeep<ITransactionApiManager>;
const mockConfigurationService = jest.mocked({
getOrThrow: jest.fn(),
} as jest.MockedObjectDeep<IConfigurationService>);

/**
* Note: all other methods of the repository are tested in situ.
* Whilst `getAllChains` is to some extent as well, the following
* tests are required to cover all edge cases.
*/
describe('ChainsRepository', () => {
// According to the limits of the Config Service
// @see https://github.com/safe-global/safe-config-service/blob/main/src/chains/views.py#L14-L16
const OFFSET = 40;
const MAX_LIMIT = 40;

let target: ChainsRepository;
const maxSequentialPages = 3;

beforeEach(() => {
jest.resetAllMocks();

mockConfigurationService.getOrThrow.mockImplementation((key) => {
if (key === 'safeConfig.chains.maxSequentialPages')
return maxSequentialPages;
});

target = new ChainsRepository(
mockLoggingService,
mockConfigApi,
mockTransactionApiManager,
mockConfigurationService,
);
});

it('should return all chains across pages below request limit', async () => {
const url = faker.internet.url({ appendSlash: true });
const chains = Array.from(
{
length: ChainsRepository.MAX_LIMIT * (maxSequentialPages - 1), // One page less than request limit
},
(_, i) => chainBuilder().with('chainId', i.toString()).build(),
);
const pages = chunk(chains, ChainsRepository.MAX_LIMIT).map(
(results, i, arr) => {
const pageOffset = (i + 1) * OFFSET;

const previous = ((): string | null => {
if (i === 0) {
return null;
}
return limitAndOffsetUrlFactory(
ChainsRepository.MAX_LIMIT,
pageOffset,
url,
);
})();
const next = ((): string | null => {
if (i === arr.length - 1) {
return null;
}
return limitAndOffsetUrlFactory(
ChainsRepository.MAX_LIMIT,
pageOffset,
url,
);
})();

return pageBuilder<Chain>()
.with('results', results)
.with('count', chains.length)
.with('previous', previous)
.with('next', next)
.build();
},
);
mockConfigApi.getChains.mockImplementation(
({ offset }): Promise<Page<Chain>> => {
if (offset === 0) {
return Promise.resolve(pages[0]);
}
if (offset === OFFSET) {
return Promise.resolve(pages[1]);
}
if (offset === OFFSET * 2) {
return Promise.resolve(pages[2]);
}
return Promise.reject(new Error('Invalid offset'));
},
);

const result = await target.getAllChains();

expect(result).toStrictEqual(
chains.map((chain) => {
return {
...chain,
ensRegistryAddress: getAddress(chain.ensRegistryAddress!),
};
}),
);
expect(mockConfigApi.getChains).toHaveBeenCalledTimes(2);
expect(mockConfigApi.getChains).toHaveBeenNthCalledWith(1, {
limit: MAX_LIMIT,
offset: 0,
});
expect(mockConfigApi.getChains).toHaveBeenNthCalledWith(2, {
limit: MAX_LIMIT,
offset: OFFSET,
});
});

it('should return all chains across pages up request limit', async () => {
const url = faker.internet.url({ appendSlash: true });
const chains = Array.from(
{
length: ChainsRepository.MAX_LIMIT * maxSequentialPages, // Exactly request limit
},
(_, i) => chainBuilder().with('chainId', i.toString()).build(),
);
const pages = chunk(chains, ChainsRepository.MAX_LIMIT).map(
(results, i, arr) => {
const pageOffset = (i + 1) * OFFSET;

const previous = ((): string | null => {
if (i === 0) {
return null;
}
return limitAndOffsetUrlFactory(
ChainsRepository.MAX_LIMIT,
pageOffset,
url,
);
})();
const next = ((): string | null => {
if (i === arr.length - 1) {
return null;
}
return limitAndOffsetUrlFactory(
ChainsRepository.MAX_LIMIT,
pageOffset,
url,
);
})();

return pageBuilder<Chain>()
.with('results', results)
.with('count', chains.length)
.with('previous', previous)
.with('next', next)
.build();
},
);
mockConfigApi.getChains.mockImplementation(
({ offset }): Promise<Page<Chain>> => {
if (offset === 0) {
return Promise.resolve(pages[0]);
}
if (offset === OFFSET) {
return Promise.resolve(pages[1]);
}
if (offset === OFFSET * 2) {
return Promise.resolve(pages[2]);
}
return Promise.reject(new Error('Invalid offset'));
},
);

const result = await target.getAllChains();

expect(result).toStrictEqual(
chains.map((chain) => {
return {
...chain,
ensRegistryAddress: getAddress(chain.ensRegistryAddress!),
};
}),
);
expect(mockConfigApi.getChains).toHaveBeenCalledTimes(3);
expect(mockConfigApi.getChains).toHaveBeenNthCalledWith(1, {
limit: MAX_LIMIT,
offset: 0,
});
expect(mockConfigApi.getChains).toHaveBeenNthCalledWith(2, {
limit: MAX_LIMIT,
offset: OFFSET,
});
expect(mockConfigApi.getChains).toHaveBeenNthCalledWith(3, {
limit: MAX_LIMIT,
offset: OFFSET * 2,
});
});

it('should return all chains across pages up to request limit and notify if there are more', async () => {
const url = faker.internet.url({ appendSlash: true });
const chains = Array.from(
{
length: ChainsRepository.MAX_LIMIT * (maxSequentialPages + 1), // One page more than request limit
},
(_, i) => chainBuilder().with('chainId', i.toString()).build(),
);
const pages = chunk(chains, ChainsRepository.MAX_LIMIT).map(
(results, i, arr) => {
const pageOffset = (i + 1) * OFFSET;

const previous = ((): string | null => {
if (i === 0) {
return null;
}
return limitAndOffsetUrlFactory(
ChainsRepository.MAX_LIMIT,
pageOffset,
url,
);
})();
const next = ((): string | null => {
if (i === arr.length - 1) {
return null;
}
return limitAndOffsetUrlFactory(
ChainsRepository.MAX_LIMIT,
pageOffset,
url,
);
})();

return pageBuilder<Chain>()
.with('results', results)
.with('count', chains.length)
.with('previous', previous)
.with('next', next)
.build();
},
);
mockConfigApi.getChains.mockImplementation(
({ offset }): Promise<Page<Chain>> => {
if (offset === 0) {
return Promise.resolve(pages[0]);
}
if (offset === OFFSET) {
return Promise.resolve(pages[1]);
}
if (offset === OFFSET * 2) {
return Promise.resolve(pages[2]);
}
return Promise.reject(new Error('Invalid offset'));
},
);

const result = await target.getAllChains();

expect(result).toStrictEqual(
chains
.slice(0, ChainsRepository.MAX_LIMIT * maxSequentialPages)
.map((chain) => {
return {
...chain,
ensRegistryAddress: getAddress(chain.ensRegistryAddress!),
};
}),
);
expect(mockConfigApi.getChains).toHaveBeenNthCalledWith(1, {
limit: MAX_LIMIT,
offset: 0,
});
expect(mockConfigApi.getChains).toHaveBeenNthCalledWith(2, {
limit: MAX_LIMIT,
offset: OFFSET,
});
expect(mockConfigApi.getChains).toHaveBeenNthCalledWith(3, {
limit: MAX_LIMIT,
offset: OFFSET * 2,
});
expect(mockLoggingService.error).toHaveBeenCalledTimes(1);
expect(mockLoggingService.error).toHaveBeenNthCalledWith(
1,
'More chains available despite request limit reached',
);
});
});
Loading

0 comments on commit 36d6cfa

Please sign in to comment.