aboutsummaryrefslogtreecommitdiffstats
path: root/packages/pipeline/src/scripts
diff options
context:
space:
mode:
Diffstat (limited to 'packages/pipeline/src/scripts')
-rw-r--r--packages/pipeline/src/scripts/pull_competing_dex_trades.ts4
-rw-r--r--packages/pipeline/src/scripts/pull_ddex_orderbook_snapshots.ts4
-rw-r--r--packages/pipeline/src/scripts/pull_erc20_events.ts66
-rw-r--r--packages/pipeline/src/scripts/pull_exchange_events.ts (renamed from packages/pipeline/src/scripts/pull_missing_events.ts)28
-rw-r--r--packages/pipeline/src/scripts/pull_idex_orderbook_snapshots.ts63
-rw-r--r--packages/pipeline/src/scripts/pull_missing_blocks.ts29
-rw-r--r--packages/pipeline/src/scripts/pull_oasis_orderbook_snapshots.ts58
-rw-r--r--packages/pipeline/src/scripts/pull_paradex_orderbook_snapshots.ts4
-rw-r--r--packages/pipeline/src/scripts/pull_trusted_tokens.ts8
-rw-r--r--packages/pipeline/src/scripts/update_relayer_info.ts4
10 files changed, 239 insertions, 29 deletions
diff --git a/packages/pipeline/src/scripts/pull_competing_dex_trades.ts b/packages/pipeline/src/scripts/pull_competing_dex_trades.ts
index 4e4c12dd0..1478d5615 100644
--- a/packages/pipeline/src/scripts/pull_competing_dex_trades.ts
+++ b/packages/pipeline/src/scripts/pull_competing_dex_trades.ts
@@ -15,11 +15,11 @@ let connection: Connection;
(async () => {
connection = await createConnection(ormConfig as ConnectionOptions);
- await getAndSaveTrades();
+ await getAndSaveTradesAsync();
process.exit(0);
})().catch(handleError);
-async function getAndSaveTrades(): Promise<void> {
+async function getAndSaveTradesAsync(): Promise<void> {
const apiKey = process.env.BLOXY_API_KEY;
if (apiKey === undefined) {
throw new Error('Missing required env var: BLOXY_API_KEY');
diff --git a/packages/pipeline/src/scripts/pull_ddex_orderbook_snapshots.ts b/packages/pipeline/src/scripts/pull_ddex_orderbook_snapshots.ts
index 7868e9c5a..4e00f258f 100644
--- a/packages/pipeline/src/scripts/pull_ddex_orderbook_snapshots.ts
+++ b/packages/pipeline/src/scripts/pull_ddex_orderbook_snapshots.ts
@@ -25,7 +25,7 @@ let connection: Connection;
const markets = await ddexSource.getActiveMarketsAsync();
for (const marketsChunk of R.splitEvery(MARKET_ORDERBOOK_REQUEST_BATCH_SIZE, markets)) {
await Promise.all(
- marketsChunk.map(async (market: DdexMarket) => getAndSaveMarketOrderbook(ddexSource, market)),
+ marketsChunk.map(async (market: DdexMarket) => getAndSaveMarketOrderbookAsync(ddexSource, market)),
);
await new Promise<void>(resolve => setTimeout(resolve, MILLISEC_MARKET_ORDERBOOK_REQUEST_DELAY));
}
@@ -38,7 +38,7 @@ let connection: Connection;
* @param ddexSource Data source which can query Ddex API.
* @param market Object from Ddex API containing market data.
*/
-async function getAndSaveMarketOrderbook(ddexSource: DdexSource, market: DdexMarket): Promise<void> {
+async function getAndSaveMarketOrderbookAsync(ddexSource: DdexSource, market: DdexMarket): Promise<void> {
const orderBook = await ddexSource.getMarketOrderbookAsync(market.id);
const observedTimestamp = Date.now();
diff --git a/packages/pipeline/src/scripts/pull_erc20_events.ts b/packages/pipeline/src/scripts/pull_erc20_events.ts
new file mode 100644
index 000000000..0ad12c97a
--- /dev/null
+++ b/packages/pipeline/src/scripts/pull_erc20_events.ts
@@ -0,0 +1,66 @@
+// tslint:disable:no-console
+import { getContractAddressesForNetworkOrThrow } from '@0x/contract-addresses';
+import { web3Factory } from '@0x/dev-utils';
+import { Web3ProviderEngine } from '@0x/subproviders';
+import { Web3Wrapper } from '@0x/web3-wrapper';
+import 'reflect-metadata';
+import { Connection, ConnectionOptions, createConnection, Repository } from 'typeorm';
+
+import { ERC20EventsSource } from '../data_sources/contract-wrappers/erc20_events';
+import { ERC20ApprovalEvent } from '../entities';
+import * as ormConfig from '../ormconfig';
+import { parseERC20ApprovalEvents } from '../parsers/events';
+import { handleError, INFURA_ROOT_URL } from '../utils';
+
+const NETWORK_ID = 1;
+const START_BLOCK_OFFSET = 100; // Number of blocks before the last known block to consider when updating fill events.
+const BATCH_SAVE_SIZE = 1000; // Number of events to save at once.
+const BLOCK_FINALITY_THRESHOLD = 10; // When to consider blocks as final. Used to compute default endBlock.
+const WETH_START_BLOCK = 4719568; // Block number when the WETH contract was deployed.
+
+let connection: Connection;
+
+(async () => {
+ connection = await createConnection(ormConfig as ConnectionOptions);
+ const provider = web3Factory.getRpcProvider({
+ rpcUrl: INFURA_ROOT_URL,
+ });
+ const endBlock = await calculateEndBlockAsync(provider);
+ await getAndSaveWETHApprovalEventsAsync(provider, endBlock);
+ process.exit(0);
+})().catch(handleError);
+
+async function getAndSaveWETHApprovalEventsAsync(provider: Web3ProviderEngine, endBlock: number): Promise<void> {
+ console.log('Checking existing approval events...');
+ const repository = connection.getRepository(ERC20ApprovalEvent);
+ const startBlock = (await getStartBlockAsync(repository)) || WETH_START_BLOCK;
+
+ console.log(`Getting WETH approval events starting at ${startBlock}...`);
+ const wethTokenAddress = getContractAddressesForNetworkOrThrow(NETWORK_ID).etherToken;
+ const eventsSource = new ERC20EventsSource(provider, NETWORK_ID, wethTokenAddress);
+ const eventLogs = await eventsSource.getApprovalEventsAsync(startBlock, endBlock);
+
+ console.log(`Parsing ${eventLogs.length} WETH approval events...`);
+ const events = parseERC20ApprovalEvents(eventLogs);
+ console.log(`Retrieved and parsed ${events.length} total WETH approval events.`);
+ await repository.save(events, { chunk: Math.ceil(events.length / BATCH_SAVE_SIZE) });
+}
+
+async function calculateEndBlockAsync(provider: Web3ProviderEngine): Promise<number> {
+ const web3Wrapper = new Web3Wrapper(provider);
+ const currentBlock = await web3Wrapper.getBlockNumberAsync();
+ return currentBlock - BLOCK_FINALITY_THRESHOLD;
+}
+
+async function getStartBlockAsync(repository: Repository<ERC20ApprovalEvent>): Promise<number | null> {
+ const fillEventCount = await repository.count();
+ if (fillEventCount === 0) {
+ console.log(`No existing approval events found.`);
+ return null;
+ }
+ const queryResult = await connection.query(
+ `SELECT block_number FROM raw.erc20_approval_events ORDER BY block_number DESC LIMIT 1`,
+ );
+ const lastKnownBlock = queryResult[0].block_number;
+ return lastKnownBlock - START_BLOCK_OFFSET;
+}
diff --git a/packages/pipeline/src/scripts/pull_missing_events.ts b/packages/pipeline/src/scripts/pull_exchange_events.ts
index 80abbb8b0..e98fc6629 100644
--- a/packages/pipeline/src/scripts/pull_missing_events.ts
+++ b/packages/pipeline/src/scripts/pull_exchange_events.ts
@@ -1,5 +1,7 @@
// tslint:disable:no-console
import { web3Factory } from '@0x/dev-utils';
+import { Web3ProviderEngine } from '@0x/subproviders';
+import { Web3Wrapper } from '@0x/web3-wrapper';
import R = require('ramda');
import 'reflect-metadata';
import { Connection, ConnectionOptions, createConnection, Repository } from 'typeorm';
@@ -12,6 +14,7 @@ import { EXCHANGE_START_BLOCK, handleError, INFURA_ROOT_URL } from '../utils';
const START_BLOCK_OFFSET = 100; // Number of blocks before the last known block to consider when updating fill events.
const BATCH_SAVE_SIZE = 1000; // Number of events to save at once.
+const BLOCK_FINALITY_THRESHOLD = 10; // When to consider blocks as final. Used to compute default endBlock.
let connection: Connection;
@@ -20,43 +23,44 @@ let connection: Connection;
const provider = web3Factory.getRpcProvider({
rpcUrl: INFURA_ROOT_URL,
});
+ const endBlock = await calculateEndBlockAsync(provider);
const eventsSource = new ExchangeEventsSource(provider, 1);
- await getFillEventsAsync(eventsSource);
- await getCancelEventsAsync(eventsSource);
- await getCancelUpToEventsAsync(eventsSource);
+ await getFillEventsAsync(eventsSource, endBlock);
+ await getCancelEventsAsync(eventsSource, endBlock);
+ await getCancelUpToEventsAsync(eventsSource, endBlock);
process.exit(0);
})().catch(handleError);
-async function getFillEventsAsync(eventsSource: ExchangeEventsSource): Promise<void> {
+async function getFillEventsAsync(eventsSource: ExchangeEventsSource, endBlock: number): Promise<void> {
console.log('Checking existing fill events...');
const repository = connection.getRepository(ExchangeFillEvent);
const startBlock = await getStartBlockAsync(repository);
console.log(`Getting fill events starting at ${startBlock}...`);
- const eventLogs = await eventsSource.getFillEventsAsync(startBlock);
+ const eventLogs = await eventsSource.getFillEventsAsync(startBlock, endBlock);
console.log('Parsing fill events...');
const events = parseExchangeFillEvents(eventLogs);
console.log(`Retrieved and parsed ${events.length} total fill events.`);
await saveEventsAsync(startBlock === EXCHANGE_START_BLOCK, repository, events);
}
-async function getCancelEventsAsync(eventsSource: ExchangeEventsSource): Promise<void> {
+async function getCancelEventsAsync(eventsSource: ExchangeEventsSource, endBlock: number): Promise<void> {
console.log('Checking existing cancel events...');
const repository = connection.getRepository(ExchangeCancelEvent);
const startBlock = await getStartBlockAsync(repository);
console.log(`Getting cancel events starting at ${startBlock}...`);
- const eventLogs = await eventsSource.getCancelEventsAsync(startBlock);
+ const eventLogs = await eventsSource.getCancelEventsAsync(startBlock, endBlock);
console.log('Parsing cancel events...');
const events = parseExchangeCancelEvents(eventLogs);
console.log(`Retrieved and parsed ${events.length} total cancel events.`);
await saveEventsAsync(startBlock === EXCHANGE_START_BLOCK, repository, events);
}
-async function getCancelUpToEventsAsync(eventsSource: ExchangeEventsSource): Promise<void> {
+async function getCancelUpToEventsAsync(eventsSource: ExchangeEventsSource, endBlock: number): Promise<void> {
console.log('Checking existing CancelUpTo events...');
const repository = connection.getRepository(ExchangeCancelUpToEvent);
const startBlock = await getStartBlockAsync(repository);
console.log(`Getting CancelUpTo events starting at ${startBlock}...`);
- const eventLogs = await eventsSource.getCancelUpToEventsAsync(startBlock);
+ const eventLogs = await eventsSource.getCancelUpToEventsAsync(startBlock, endBlock);
console.log('Parsing CancelUpTo events...');
const events = parseExchangeCancelUpToEvents(eventLogs);
console.log(`Retrieved and parsed ${events.length} total CancelUpTo events.`);
@@ -134,3 +138,9 @@ async function saveIndividuallyWithFallbackAsync<T extends ExchangeEvent>(
}
}
}
+
+async function calculateEndBlockAsync(provider: Web3ProviderEngine): Promise<number> {
+ const web3Wrapper = new Web3Wrapper(provider);
+ const currentBlock = await web3Wrapper.getBlockNumberAsync();
+ return currentBlock - BLOCK_FINALITY_THRESHOLD;
+}
diff --git a/packages/pipeline/src/scripts/pull_idex_orderbook_snapshots.ts b/packages/pipeline/src/scripts/pull_idex_orderbook_snapshots.ts
new file mode 100644
index 000000000..490b17766
--- /dev/null
+++ b/packages/pipeline/src/scripts/pull_idex_orderbook_snapshots.ts
@@ -0,0 +1,63 @@
+import { logUtils } from '@0x/utils';
+import * as R from 'ramda';
+import { Connection, ConnectionOptions, createConnection } from 'typeorm';
+
+import { IDEX_SOURCE, IdexSource } from '../data_sources/idex';
+import { TokenOrderbookSnapshot as TokenOrder } from '../entities';
+import * as ormConfig from '../ormconfig';
+import { parseIdexOrders } from '../parsers/idex_orders';
+import { handleError } from '../utils';
+
+// Number of orders to save at once.
+const BATCH_SAVE_SIZE = 1000;
+
+// Number of markets to retrieve orderbooks for at once.
+const MARKET_ORDERBOOK_REQUEST_BATCH_SIZE = 100;
+
+// Delay between market orderbook requests.
+const MILLISEC_MARKET_ORDERBOOK_REQUEST_DELAY = 2000;
+
+let connection: Connection;
+
+(async () => {
+ connection = await createConnection(ormConfig as ConnectionOptions);
+ const idexSource = new IdexSource();
+ logUtils.log('Getting all IDEX markets');
+ const markets = await idexSource.getMarketsAsync();
+ logUtils.log(`Got ${markets.length} markets.`);
+ for (const marketsChunk of R.splitEvery(MARKET_ORDERBOOK_REQUEST_BATCH_SIZE, markets)) {
+ await Promise.all(
+ marketsChunk.map(async (marketId: string) => getAndSaveMarketOrderbookAsync(idexSource, marketId)),
+ );
+ await new Promise<void>(resolve => setTimeout(resolve, MILLISEC_MARKET_ORDERBOOK_REQUEST_DELAY));
+ }
+ process.exit(0);
+})().catch(handleError);
+
+/**
+ * Retrieve orderbook from Idex API for a given market. Parse orders and insert
+ * them into our database.
+ * @param idexSource Data source which can query Idex API.
+ * @param marketId String representing market of interest, eg. 'ETH_TIC'.
+ */
+async function getAndSaveMarketOrderbookAsync(idexSource: IdexSource, marketId: string): Promise<void> {
+ logUtils.log(`${marketId}: Retrieving orderbook.`);
+ const orderBook = await idexSource.getMarketOrderbookAsync(marketId);
+ const observedTimestamp = Date.now();
+
+ if (!R.has('bids', orderBook) || !R.has('asks', orderBook)) {
+ logUtils.warn(`${marketId}: Orderbook faulty.`);
+ return;
+ }
+
+ logUtils.log(`${marketId}: Parsing orders.`);
+ const orders = parseIdexOrders(orderBook, observedTimestamp, IDEX_SOURCE);
+
+ if (orders.length > 0) {
+ logUtils.log(`${marketId}: Saving ${orders.length} orders.`);
+ const TokenOrderRepository = connection.getRepository(TokenOrder);
+ await TokenOrderRepository.save(orders, { chunk: Math.ceil(orders.length / BATCH_SAVE_SIZE) });
+ } else {
+ logUtils.log(`${marketId}: 0 orders to save.`);
+ }
+}
diff --git a/packages/pipeline/src/scripts/pull_missing_blocks.ts b/packages/pipeline/src/scripts/pull_missing_blocks.ts
index b7bd51f08..a5203824c 100644
--- a/packages/pipeline/src/scripts/pull_missing_blocks.ts
+++ b/packages/pipeline/src/scripts/pull_missing_blocks.ts
@@ -24,10 +24,10 @@ let connection: Connection;
(async () => {
connection = await createConnection(ormConfig as ConnectionOptions);
const provider = web3Factory.getRpcProvider({
- rpcUrl: `${INFURA_ROOT_URL}/${process.env.INFURA_API_KEY}`,
+ rpcUrl: INFURA_ROOT_URL,
});
const web3Source = new Web3Source(provider);
- await getAllMissingBlocks(web3Source);
+ await getAllMissingBlocksAsync(web3Source);
process.exit(0);
})().catch(handleError);
@@ -35,26 +35,39 @@ interface MissingBlocksResponse {
block_number: string;
}
-async function getAllMissingBlocks(web3Source: Web3Source): Promise<void> {
+async function getAllMissingBlocksAsync(web3Source: Web3Source): Promise<void> {
const blocksRepository = connection.getRepository(Block);
let fromBlock = EXCHANGE_START_BLOCK;
while (true) {
- const blockNumbers = await getMissingBlockNumbers(fromBlock);
+ const blockNumbers = await getMissingBlockNumbersAsync(fromBlock);
if (blockNumbers.length === 0) {
// There are no more missing blocks. We're done.
break;
}
- await getAndSaveBlocks(web3Source, blocksRepository, blockNumbers);
+ await getAndSaveBlocksAsync(web3Source, blocksRepository, blockNumbers);
fromBlock = Math.max(...blockNumbers) + 1;
}
const totalBlocks = await blocksRepository.count();
console.log(`Done saving blocks. There are now ${totalBlocks} total blocks.`);
}
-async function getMissingBlockNumbers(fromBlock: number): Promise<number[]> {
+async function getMissingBlockNumbersAsync(fromBlock: number): Promise<number[]> {
console.log(`Checking for missing blocks starting at ${fromBlock}...`);
+ // Note(albrow): The easiest way to get all the blocks we need is to
+ // consider all the events tables together in a single query. If this query
+ // gets too slow, we should consider re-architecting so that we can work on
+ // getting the blocks for one type of event at a time.
const response = (await connection.query(
- 'SELECT DISTINCT(block_number) FROM raw.exchange_fill_events WHERE block_number NOT IN (SELECT number FROM raw.blocks) AND block_number >= $1 ORDER BY block_number ASC LIMIT $2',
+ `WITH all_events AS (
+ SELECT block_number FROM raw.exchange_fill_events
+ UNION SELECT block_number FROM raw.exchange_cancel_events
+ UNION SELECT block_number FROM raw.exchange_cancel_up_to_events
+ UNION SELECT block_number FROM raw.erc20_approval_events
+ )
+ SELECT DISTINCT(block_number) FROM all_events
+ WHERE block_number NOT IN (SELECT number FROM raw.blocks)
+ AND block_number >= $1
+ ORDER BY block_number ASC LIMIT $2`,
[fromBlock, MAX_BLOCKS_PER_QUERY],
)) as MissingBlocksResponse[];
const blockNumberStrings = R.pluck('block_number', response);
@@ -63,7 +76,7 @@ async function getMissingBlockNumbers(fromBlock: number): Promise<number[]> {
return blockNumbers;
}
-async function getAndSaveBlocks(
+async function getAndSaveBlocksAsync(
web3Source: Web3Source,
blocksRepository: Repository<Block>,
blockNumbers: number[],
diff --git a/packages/pipeline/src/scripts/pull_oasis_orderbook_snapshots.ts b/packages/pipeline/src/scripts/pull_oasis_orderbook_snapshots.ts
new file mode 100644
index 000000000..c4dcf6c83
--- /dev/null
+++ b/packages/pipeline/src/scripts/pull_oasis_orderbook_snapshots.ts
@@ -0,0 +1,58 @@
+import { logUtils } from '@0x/utils';
+import * as R from 'ramda';
+import { Connection, ConnectionOptions, createConnection } from 'typeorm';
+
+import { OASIS_SOURCE, OasisMarket, OasisSource } from '../data_sources/oasis';
+import { TokenOrderbookSnapshot as TokenOrder } from '../entities';
+import * as ormConfig from '../ormconfig';
+import { parseOasisOrders } from '../parsers/oasis_orders';
+import { handleError } from '../utils';
+
+// Number of orders to save at once.
+const BATCH_SAVE_SIZE = 1000;
+
+// Number of markets to retrieve orderbooks for at once.
+const MARKET_ORDERBOOK_REQUEST_BATCH_SIZE = 50;
+
+// Delay between market orderbook requests.
+const MILLISEC_MARKET_ORDERBOOK_REQUEST_DELAY = 1000;
+
+let connection: Connection;
+
+(async () => {
+ connection = await createConnection(ormConfig as ConnectionOptions);
+ const oasisSource = new OasisSource();
+ logUtils.log('Getting all active Oasis markets');
+ const markets = await oasisSource.getActiveMarketsAsync();
+ logUtils.log(`Got ${markets.length} markets.`);
+ for (const marketsChunk of R.splitEvery(MARKET_ORDERBOOK_REQUEST_BATCH_SIZE, markets)) {
+ await Promise.all(
+ marketsChunk.map(async (market: OasisMarket) => getAndSaveMarketOrderbookAsync(oasisSource, market)),
+ );
+ await new Promise<void>(resolve => setTimeout(resolve, MILLISEC_MARKET_ORDERBOOK_REQUEST_DELAY));
+ }
+ process.exit(0);
+})().catch(handleError);
+
+/**
+ * Retrieve orderbook from Oasis API for a given market. Parse orders and insert
+ * them into our database.
+ * @param oasisSource Data source which can query Oasis API.
+ * @param marketId String identifying market we want data for. eg. 'REPAUG'.
+ */
+async function getAndSaveMarketOrderbookAsync(oasisSource: OasisSource, market: OasisMarket): Promise<void> {
+ logUtils.log(`${market.id}: Retrieving orderbook.`);
+ const orderBook = await oasisSource.getMarketOrderbookAsync(market.id);
+ const observedTimestamp = Date.now();
+
+ logUtils.log(`${market.id}: Parsing orders.`);
+ const orders = parseOasisOrders(orderBook, market, observedTimestamp, OASIS_SOURCE);
+
+ if (orders.length > 0) {
+ logUtils.log(`${market.id}: Saving ${orders.length} orders.`);
+ const TokenOrderRepository = connection.getRepository(TokenOrder);
+ await TokenOrderRepository.save(orders, { chunk: Math.ceil(orders.length / BATCH_SAVE_SIZE) });
+ } else {
+ logUtils.log(`${market.id}: 0 orders to save.`);
+ }
+}
diff --git a/packages/pipeline/src/scripts/pull_paradex_orderbook_snapshots.ts b/packages/pipeline/src/scripts/pull_paradex_orderbook_snapshots.ts
index bae1fbede..34345f355 100644
--- a/packages/pipeline/src/scripts/pull_paradex_orderbook_snapshots.ts
+++ b/packages/pipeline/src/scripts/pull_paradex_orderbook_snapshots.ts
@@ -29,7 +29,7 @@ let connection: Connection;
const tokenInfoResponse = await paradexSource.getTokenInfoAsync();
const extendedMarkets = addTokenAddresses(markets, tokenInfoResponse);
await Promise.all(
- extendedMarkets.map(async (market: ParadexMarket) => getAndSaveMarketOrderbook(paradexSource, market)),
+ extendedMarkets.map(async (market: ParadexMarket) => getAndSaveMarketOrderbookAsync(paradexSource, market)),
);
process.exit(0);
})().catch(handleError);
@@ -70,7 +70,7 @@ function addTokenAddresses(
* @param paradexSource Data source which can query the Paradex API.
* @param market Object from the Paradex API with information about the market in question.
*/
-async function getAndSaveMarketOrderbook(paradexSource: ParadexSource, market: ParadexMarket): Promise<void> {
+async function getAndSaveMarketOrderbookAsync(paradexSource: ParadexSource, market: ParadexMarket): Promise<void> {
const paradexOrderbookResponse = await paradexSource.getMarketOrderbookAsync(market.symbol);
const observedTimestamp = Date.now();
diff --git a/packages/pipeline/src/scripts/pull_trusted_tokens.ts b/packages/pipeline/src/scripts/pull_trusted_tokens.ts
index 1befc4437..5906deee6 100644
--- a/packages/pipeline/src/scripts/pull_trusted_tokens.ts
+++ b/packages/pipeline/src/scripts/pull_trusted_tokens.ts
@@ -16,12 +16,12 @@ let connection: Connection;
(async () => {
connection = await createConnection(ormConfig as ConnectionOptions);
- await getMetamaskTrustedTokens();
- await getZeroExTrustedTokens();
+ await getMetamaskTrustedTokensAsync();
+ await getZeroExTrustedTokensAsync();
process.exit(0);
})().catch(handleError);
-async function getMetamaskTrustedTokens(): Promise<void> {
+async function getMetamaskTrustedTokensAsync(): Promise<void> {
// tslint:disable-next-line:no-console
console.log('Getting latest metamask trusted tokens list ...');
const trustedTokensRepository = connection.getRepository(TokenMetadata);
@@ -37,7 +37,7 @@ async function getMetamaskTrustedTokens(): Promise<void> {
console.log('Done saving metamask trusted tokens.');
}
-async function getZeroExTrustedTokens(): Promise<void> {
+async function getZeroExTrustedTokensAsync(): Promise<void> {
// tslint:disable-next-line:no-console
console.log('Getting latest 0x trusted tokens list ...');
const trustedTokensRepository = connection.getRepository(TokenMetadata);
diff --git a/packages/pipeline/src/scripts/update_relayer_info.ts b/packages/pipeline/src/scripts/update_relayer_info.ts
index f8918728d..41d29b385 100644
--- a/packages/pipeline/src/scripts/update_relayer_info.ts
+++ b/packages/pipeline/src/scripts/update_relayer_info.ts
@@ -17,11 +17,11 @@ let connection: Connection;
(async () => {
connection = await createConnection(ormConfig as ConnectionOptions);
- await getRelayers();
+ await getRelayersAsync();
process.exit(0);
})().catch(handleError);
-async function getRelayers(): Promise<void> {
+async function getRelayersAsync(): Promise<void> {
console.log('Getting latest relayer info...');
const relayerRepository = connection.getRepository(Relayer);
const relayerSource = new RelayerRegistrySource(RELAYER_REGISTRY_URL);