diff options
Diffstat (limited to 'packages/pipeline/src')
30 files changed, 1005 insertions, 255 deletions
diff --git a/packages/pipeline/src/data_sources/contract-wrappers/erc20_events.ts b/packages/pipeline/src/data_sources/contract-wrappers/erc20_events.ts new file mode 100644 index 000000000..e0098122f --- /dev/null +++ b/packages/pipeline/src/data_sources/contract-wrappers/erc20_events.ts @@ -0,0 +1,45 @@ +import { + ContractWrappers, + ERC20TokenApprovalEventArgs, + ERC20TokenEvents, + ERC20TokenWrapper, +} from '@0x/contract-wrappers'; +import { Web3ProviderEngine } from '@0x/subproviders'; +import { LogWithDecodedArgs } from 'ethereum-types'; + +import { GetEventsFunc, getEventsWithPaginationAsync } from './utils'; + +export class ERC20EventsSource { + private readonly _erc20Wrapper: ERC20TokenWrapper; + private readonly _tokenAddress: string; + constructor(provider: Web3ProviderEngine, networkId: number, tokenAddress: string) { + const contractWrappers = new ContractWrappers(provider, { networkId }); + this._erc20Wrapper = contractWrappers.erc20Token; + this._tokenAddress = tokenAddress; + } + + public async getApprovalEventsAsync( + startBlock: number, + endBlock: number, + ): Promise<Array<LogWithDecodedArgs<ERC20TokenApprovalEventArgs>>> { + return getEventsWithPaginationAsync( + this._getApprovalEventsForRangeAsync.bind(this) as GetEventsFunc<ERC20TokenApprovalEventArgs>, + startBlock, + endBlock, + ); + } + + // Gets all approval events of for a specific sub-range. This getter + // function will be called during each step of pagination. + private async _getApprovalEventsForRangeAsync( + fromBlock: number, + toBlock: number, + ): Promise<Array<LogWithDecodedArgs<ERC20TokenApprovalEventArgs>>> { + return this._erc20Wrapper.getLogsAsync<ERC20TokenApprovalEventArgs>( + this._tokenAddress, + ERC20TokenEvents.Approval, + { fromBlock, toBlock }, + {}, + ); + } +} diff --git a/packages/pipeline/src/data_sources/contract-wrappers/exchange_events.ts b/packages/pipeline/src/data_sources/contract-wrappers/exchange_events.ts index 1717eb8b3..58691e2ab 100644 --- a/packages/pipeline/src/data_sources/contract-wrappers/exchange_events.ts +++ b/packages/pipeline/src/data_sources/contract-wrappers/exchange_events.ts @@ -8,78 +8,52 @@ import { ExchangeWrapper, } from '@0x/contract-wrappers'; import { Web3ProviderEngine } from '@0x/subproviders'; -import { Web3Wrapper } from '@0x/web3-wrapper'; import { LogWithDecodedArgs } from 'ethereum-types'; -import { EXCHANGE_START_BLOCK } from '../../utils'; - -const BLOCK_FINALITY_THRESHOLD = 10; // When to consider blocks as final. Used to compute default toBlock. -const NUM_BLOCKS_PER_QUERY = 20000; // Number of blocks to query for events at a time. +import { GetEventsFunc, getEventsWithPaginationAsync } from './utils'; export class ExchangeEventsSource { private readonly _exchangeWrapper: ExchangeWrapper; - private readonly _web3Wrapper: Web3Wrapper; constructor(provider: Web3ProviderEngine, networkId: number) { - this._web3Wrapper = new Web3Wrapper(provider); const contractWrappers = new ContractWrappers(provider, { networkId }); this._exchangeWrapper = contractWrappers.exchange; } public async getFillEventsAsync( - fromBlock?: number, - toBlock?: number, + startBlock: number, + endBlock: number, ): Promise<Array<LogWithDecodedArgs<ExchangeFillEventArgs>>> { - return this._getEventsAsync<ExchangeFillEventArgs>(ExchangeEvents.Fill, fromBlock, toBlock); + const getFillEventsForRangeAsync = this._makeGetterFuncForEventType<ExchangeFillEventArgs>(ExchangeEvents.Fill); + return getEventsWithPaginationAsync(getFillEventsForRangeAsync, startBlock, endBlock); } public async getCancelEventsAsync( - fromBlock?: number, - toBlock?: number, + startBlock: number, + endBlock: number, ): Promise<Array<LogWithDecodedArgs<ExchangeCancelEventArgs>>> { - return this._getEventsAsync<ExchangeCancelEventArgs>(ExchangeEvents.Cancel, fromBlock, toBlock); + const getCancelEventsForRangeAsync = this._makeGetterFuncForEventType<ExchangeCancelEventArgs>( + ExchangeEvents.Cancel, + ); + return getEventsWithPaginationAsync(getCancelEventsForRangeAsync, startBlock, endBlock); } public async getCancelUpToEventsAsync( - fromBlock?: number, - toBlock?: number, + startBlock: number, + endBlock: number, ): Promise<Array<LogWithDecodedArgs<ExchangeCancelUpToEventArgs>>> { - return this._getEventsAsync<ExchangeCancelUpToEventArgs>(ExchangeEvents.CancelUpTo, fromBlock, toBlock); - } - - private async _getEventsAsync<ArgsType extends ExchangeEventArgs>( - eventName: ExchangeEvents, - fromBlock: number = EXCHANGE_START_BLOCK, - toBlock?: number, - ): Promise<Array<LogWithDecodedArgs<ArgsType>>> { - const calculatedToBlock = - toBlock === undefined - ? (await this._web3Wrapper.getBlockNumberAsync()) - BLOCK_FINALITY_THRESHOLD - : toBlock; - let events: Array<LogWithDecodedArgs<ArgsType>> = []; - for (let currFromBlock = fromBlock; currFromBlock <= calculatedToBlock; currFromBlock += NUM_BLOCKS_PER_QUERY) { - events = events.concat( - await this._getEventsForRangeAsync<ArgsType>( - eventName, - currFromBlock, - Math.min(currFromBlock + NUM_BLOCKS_PER_QUERY - 1, calculatedToBlock), - ), - ); - } - return events; + const getCancelUpToEventsForRangeAsync = this._makeGetterFuncForEventType<ExchangeCancelUpToEventArgs>( + ExchangeEvents.CancelUpTo, + ); + return getEventsWithPaginationAsync(getCancelUpToEventsForRangeAsync, startBlock, endBlock); } - private async _getEventsForRangeAsync<ArgsType extends ExchangeEventArgs>( - eventName: ExchangeEvents, - fromBlock: number, - toBlock: number, - ): Promise<Array<LogWithDecodedArgs<ArgsType>>> { - return this._exchangeWrapper.getLogsAsync<ArgsType>( - eventName, - { - fromBlock, - toBlock, - }, - {}, - ); + // Returns a getter function which gets all events of a specific type for a + // specific sub-range. This getter function will be called during each step + // of pagination. + private _makeGetterFuncForEventType<ArgsType extends ExchangeEventArgs>( + eventType: ExchangeEvents, + ): GetEventsFunc<ArgsType> { + return async (fromBlock: number, toBlock: number) => + this._exchangeWrapper.getLogsAsync<ArgsType>(eventType, { fromBlock, toBlock }, {}); } } diff --git a/packages/pipeline/src/data_sources/contract-wrappers/utils.ts b/packages/pipeline/src/data_sources/contract-wrappers/utils.ts new file mode 100644 index 000000000..67660a37e --- /dev/null +++ b/packages/pipeline/src/data_sources/contract-wrappers/utils.ts @@ -0,0 +1,67 @@ +import { DecodedLogArgs, LogWithDecodedArgs } from 'ethereum-types'; + +const NUM_BLOCKS_PER_QUERY = 10000; // Number of blocks to query for events at a time. +const NUM_RETRIES = 3; // Number of retries if a request fails or times out. + +export type GetEventsFunc<ArgsType extends DecodedLogArgs> = ( + fromBlock: number, + toBlock: number, +) => Promise<Array<LogWithDecodedArgs<ArgsType>>>; + +/** + * Gets all events between the given startBlock and endBlock by querying for + * NUM_BLOCKS_PER_QUERY at a time. Accepts a getter function in order to + * maximize code re-use and allow for getting different types of events for + * different contracts. If the getter function throws with a retryable error, + * it will automatically be retried up to NUM_RETRIES times. + * @param getEventsAsync A getter function which will be called for each step during pagination. + * @param startBlock The start of the entire block range to get events for. + * @param endBlock The end of the entire block range to get events for. + */ +export async function getEventsWithPaginationAsync<ArgsType extends DecodedLogArgs>( + getEventsAsync: GetEventsFunc<ArgsType>, + startBlock: number, + endBlock: number, +): Promise<Array<LogWithDecodedArgs<ArgsType>>> { + let events: Array<LogWithDecodedArgs<ArgsType>> = []; + for (let fromBlock = startBlock; fromBlock <= endBlock; fromBlock += NUM_BLOCKS_PER_QUERY) { + const toBlock = Math.min(fromBlock + NUM_BLOCKS_PER_QUERY - 1, endBlock); + const eventsInRange = await _getEventsWithRetriesAsync(getEventsAsync, NUM_RETRIES, fromBlock, toBlock); + events = events.concat(eventsInRange); + } + return events; +} + +/** + * Calls the getEventsAsync function and retries up to numRetries times if it + * throws with an error that is considered retryable. + * @param getEventsAsync a function that will be called on each iteration. + * @param numRetries the maximum number times to retry getEventsAsync if it fails with a retryable error. + * @param fromBlock the start of the sub-range of blocks we are getting events for. + * @param toBlock the end of the sub-range of blocks we are getting events for. + */ +export async function _getEventsWithRetriesAsync<ArgsType extends DecodedLogArgs>( + getEventsAsync: GetEventsFunc<ArgsType>, + numRetries: number, + fromBlock: number, + toBlock: number, +): Promise<Array<LogWithDecodedArgs<ArgsType>>> { + let eventsInRange: Array<LogWithDecodedArgs<ArgsType>> = []; + for (let i = 0; i <= numRetries; i++) { + try { + eventsInRange = await getEventsAsync(fromBlock, toBlock); + } catch (err) { + if (isErrorRetryable(err) && i < numRetries) { + continue; + } else { + throw err; + } + } + break; + } + return eventsInRange; +} + +function isErrorRetryable(err: Error): boolean { + return err.message.includes('network timeout'); +} diff --git a/packages/pipeline/src/data_sources/idex/index.ts b/packages/pipeline/src/data_sources/idex/index.ts new file mode 100644 index 000000000..c1e53c08d --- /dev/null +++ b/packages/pipeline/src/data_sources/idex/index.ts @@ -0,0 +1,82 @@ +import { fetchAsync } from '@0x/utils'; + +const IDEX_BASE_URL = 'https://api.idex.market'; +const MARKETS_URL = `${IDEX_BASE_URL}/returnTicker`; +const ORDERBOOK_URL = `${IDEX_BASE_URL}/returnOrderBook`; +const MAX_ORDER_COUNT = 100; // Maximum based on https://github.com/AuroraDAO/idex-api-docs#returnorderbook +export const IDEX_SOURCE = 'idex'; + +export interface IdexMarketsResponse { + [marketName: string]: IdexMarket; +} + +export interface IdexMarket { + last: string; + high: string; + low: string; + lowestAsk: string; + highestBid: string; + percentChange: string; + baseVolume: string; + quoteVolume: string; +} + +export interface IdexOrderbook { + asks: IdexOrder[]; + bids: IdexOrder[]; +} + +export interface IdexOrder { + price: string; + amount: string; + total: string; + orderHash: string; + params: IdexOrderParam; +} + +export interface IdexOrderParam { + tokenBuy: string; + buySymbol: string; + buyPrecision: number; + amountBuy: string; + tokenSell: string; + sellSymbol: string; + sellPrecision: number; + amountSell: string; + expires: number; + nonce: number; + user: string; +} + +// tslint:disable:prefer-function-over-method +// ^ Keep consistency with other sources and help logical organization +export class IdexSource { + /** + * Call Idex API to find out which markets they are maintaining orderbooks for. + */ + public async getMarketsAsync(): Promise<string[]> { + const params = { method: 'POST' }; + const resp = await fetchAsync(MARKETS_URL, params); + const respJson: IdexMarketsResponse = await resp.json(); + const markets: string[] = Object.keys(respJson); + return markets; + } + + /** + * Retrieve orderbook from Idex API for a given market. + * @param marketId String identifying the market we want data for. Eg. 'REP_AUG' + */ + public async getMarketOrderbookAsync(marketId: string): Promise<IdexOrderbook> { + const params = { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + market: marketId, + count: MAX_ORDER_COUNT, + }), + }; + const resp = await fetchAsync(ORDERBOOK_URL, params); + const respJson: IdexOrderbook = await resp.json(); + return respJson; + } +} diff --git a/packages/pipeline/src/data_sources/oasis/index.ts b/packages/pipeline/src/data_sources/oasis/index.ts new file mode 100644 index 000000000..3b30e9dfd --- /dev/null +++ b/packages/pipeline/src/data_sources/oasis/index.ts @@ -0,0 +1,103 @@ +import { fetchAsync } from '@0x/utils'; + +const OASIS_BASE_URL = 'https://data.makerdao.com/v1'; +const OASIS_MARKET_QUERY = `query { + oasisMarkets(period: "1 week") { + nodes { + id + base + quote + buyVol + sellVol + price + high + low + } + } +}`; +const OASIS_ORDERBOOK_QUERY = `query ($market: String!) { + allOasisOrders(condition: { market: $market }) { + totalCount + nodes { + market + offerId + price + amount + act + } + } +}`; +export const OASIS_SOURCE = 'oasis'; + +export interface OasisMarket { + id: string; // market symbol e.g MKRDAI + base: string; // base symbol e.g MKR + quote: string; // quote symbol e.g DAI + buyVol: number; // total buy volume (base) + sellVol: number; // total sell volume (base) + price: number; // volume weighted price (quote) + high: number; // max sell price + low: number; // min buy price +} + +export interface OasisMarketResponse { + data: { + oasisMarkets: { + nodes: OasisMarket[]; + }; + }; +} + +export interface OasisOrder { + offerId: number; // Offer Id + market: string; // Market symbol (base/quote) + price: string; // Offer price (quote) + amount: string; // Offer amount (base) + act: string; // Action (ask|bid) +} + +export interface OasisOrderbookResponse { + data: { + allOasisOrders: { + totalCount: number; + nodes: OasisOrder[]; + }; + }; +} + +// tslint:disable:prefer-function-over-method +// ^ Keep consistency with other sources and help logical organization +export class OasisSource { + /** + * Call Ddex API to find out which markets they are maintaining orderbooks for. + */ + public async getActiveMarketsAsync(): Promise<OasisMarket[]> { + const params = { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ query: OASIS_MARKET_QUERY }), + }; + const resp = await fetchAsync(OASIS_BASE_URL, params); + const respJson: OasisMarketResponse = await resp.json(); + const markets = respJson.data.oasisMarkets.nodes; + return markets; + } + + /** + * Retrieve orderbook from Oasis API for a given market. + * @param marketId String identifying the market we want data for. Eg. 'REPAUG'. + */ + public async getMarketOrderbookAsync(marketId: string): Promise<OasisOrder[]> { + const input = { + market: marketId, + }; + const params = { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ query: OASIS_ORDERBOOK_QUERY, variables: input }), + }; + const resp = await fetchAsync(OASIS_BASE_URL, params); + const respJson: OasisOrderbookResponse = await resp.json(); + return respJson.data.allOasisOrders.nodes; + } +} diff --git a/packages/pipeline/src/entities/erc20_approval_event.ts b/packages/pipeline/src/entities/erc20_approval_event.ts new file mode 100644 index 000000000..69cdfcb0b --- /dev/null +++ b/packages/pipeline/src/entities/erc20_approval_event.ts @@ -0,0 +1,26 @@ +import { BigNumber } from '@0x/utils'; +import { Column, Entity, PrimaryColumn } from 'typeorm'; + +import { bigNumberTransformer, numberToBigIntTransformer } from '../utils'; + +@Entity({ name: 'erc20_approval_events', schema: 'raw' }) +export class ERC20ApprovalEvent { + @PrimaryColumn({ name: 'token_address' }) + public tokenAddress!: string; + @PrimaryColumn({ name: 'log_index' }) + public logIndex!: number; + @PrimaryColumn({ name: 'block_number', transformer: numberToBigIntTransformer }) + public blockNumber!: number; + + @Column({ name: 'raw_data' }) + public rawData!: string; + + @Column({ name: 'transaction_hash' }) + public transactionHash!: string; + @Column({ name: 'owner_address' }) + public ownerAddress!: string; + @Column({ name: 'spender_address' }) + public spenderAddress!: string; + @Column({ name: 'amount', type: 'numeric', transformer: bigNumberTransformer }) + public amount!: BigNumber; +} diff --git a/packages/pipeline/src/entities/index.ts b/packages/pipeline/src/entities/index.ts index db0814e38..cc3de78bb 100644 --- a/packages/pipeline/src/entities/index.ts +++ b/packages/pipeline/src/entities/index.ts @@ -14,5 +14,6 @@ export { SraOrdersObservedTimeStamp, createObservedTimestampForOrder } from './s export { TokenMetadata } from './token_metadata'; export { TokenOrderbookSnapshot } from './token_order'; export { Transaction } from './transaction'; +export { ERC20ApprovalEvent } from './erc20_approval_event'; export type ExchangeEvent = ExchangeFillEvent | ExchangeCancelEvent | ExchangeCancelUpToEvent; diff --git a/packages/pipeline/src/entities/token_order.ts b/packages/pipeline/src/entities/token_order.ts index 557705767..4b8f0abc3 100644 --- a/packages/pipeline/src/entities/token_order.ts +++ b/packages/pipeline/src/entities/token_order.ts @@ -10,20 +10,20 @@ export class TokenOrderbookSnapshot { public observedTimestamp!: number; @PrimaryColumn({ name: 'source' }) public source!: string; - @Column({ name: 'order_type' }) + @PrimaryColumn({ name: 'order_type' }) public orderType!: OrderType; @PrimaryColumn({ name: 'price', type: 'numeric', transformer: bigNumberTransformer }) public price!: BigNumber; @PrimaryColumn({ name: 'base_asset_symbol' }) public baseAssetSymbol!: string; - @Column({ name: 'base_asset_address' }) - public baseAssetAddress!: string; + @Column({ nullable: true, type: String, name: 'base_asset_address' }) + public baseAssetAddress!: string | null; @Column({ name: 'base_volume', type: 'numeric', transformer: bigNumberTransformer }) public baseVolume!: BigNumber; @PrimaryColumn({ name: 'quote_asset_symbol' }) public quoteAssetSymbol!: string; - @Column({ name: 'quote_asset_address' }) - public quoteAssetAddress!: string; + @Column({ nullable: true, type: String, name: 'quote_asset_address' }) + public quoteAssetAddress!: string | null; @Column({ name: 'quote_volume', type: 'numeric', transformer: bigNumberTransformer }) public quoteVolume!: BigNumber; } diff --git a/packages/pipeline/src/ormconfig.ts b/packages/pipeline/src/ormconfig.ts index 9f7815b4e..fe11d81d5 100644 --- a/packages/pipeline/src/ormconfig.ts +++ b/packages/pipeline/src/ormconfig.ts @@ -3,6 +3,7 @@ import { ConnectionOptions } from 'typeorm'; import { Block, DexTrade, + ERC20ApprovalEvent, ExchangeCancelEvent, ExchangeCancelUpToEvent, ExchangeFillEvent, @@ -21,6 +22,7 @@ const entities = [ ExchangeCancelEvent, ExchangeCancelUpToEvent, ExchangeFillEvent, + ERC20ApprovalEvent, OHLCVExternal, Relayer, SraOrder, diff --git a/packages/pipeline/src/parsers/ddex_orders/index.ts b/packages/pipeline/src/parsers/ddex_orders/index.ts index 81132e8f0..d7b97efbe 100644 --- a/packages/pipeline/src/parsers/ddex_orders/index.ts +++ b/packages/pipeline/src/parsers/ddex_orders/index.ts @@ -1,7 +1,8 @@ import { BigNumber } from '@0x/utils'; -import * as R from 'ramda'; -import { DdexMarket, DdexOrder, DdexOrderbook } from '../../data_sources/ddex'; +import { aggregateOrders } from '../utils'; + +import { DdexMarket, DdexOrderbook } from '../../data_sources/ddex'; import { TokenOrderbookSnapshot as TokenOrder } from '../../entities'; import { OrderType } from '../../types'; @@ -28,19 +29,6 @@ export function parseDdexOrders( } /** - * Aggregates orders by price point for consistency with other exchanges. - * Querying the Ddex API at level 3 setting returns a breakdown of - * individual orders at each price point. Other exchanges only give total amount - * at each price point. Returns an array of <price, amount> tuples. - * @param ddexOrders A list of Ddex orders awaiting aggregation. - */ -export function aggregateOrders(ddexOrders: DdexOrder[]): Array<[string, BigNumber]> { - const sumAmount = (acc: BigNumber, order: DdexOrder): BigNumber => acc.plus(order.amount); - const aggregatedPricePoints = R.reduceBy(sumAmount, new BigNumber(0), R.prop('price'), ddexOrders); - return Object.entries(aggregatedPricePoints); -} - -/** * Parse a single aggregated Ddex order in order to form a tokenOrder entity * which can be saved into the database. * @param ddexMarket An object containing information about the market where these @@ -66,12 +54,14 @@ export function parseDdexOrder( tokenOrder.orderType = orderType; tokenOrder.price = price; - tokenOrder.baseAssetSymbol = ddexMarket.baseToken; - tokenOrder.baseAssetAddress = ddexMarket.baseTokenAddress; - tokenOrder.baseVolume = price.times(amount); + // ddex currently confuses quote and base assets. + // We switch them here to maintain our internal consistency. + tokenOrder.baseAssetSymbol = ddexMarket.quoteToken; + tokenOrder.baseAssetAddress = ddexMarket.quoteTokenAddress; + tokenOrder.baseVolume = amount; - tokenOrder.quoteAssetSymbol = ddexMarket.quoteToken; - tokenOrder.quoteAssetAddress = ddexMarket.quoteTokenAddress; - tokenOrder.quoteVolume = amount; + tokenOrder.quoteAssetSymbol = ddexMarket.baseToken; + tokenOrder.quoteAssetAddress = ddexMarket.baseTokenAddress; + tokenOrder.quoteVolume = price.times(amount); return tokenOrder; } diff --git a/packages/pipeline/src/parsers/events/erc20_events.ts b/packages/pipeline/src/parsers/events/erc20_events.ts new file mode 100644 index 000000000..caf9984d0 --- /dev/null +++ b/packages/pipeline/src/parsers/events/erc20_events.ts @@ -0,0 +1,34 @@ +import { ERC20TokenApprovalEventArgs } from '@0x/contract-wrappers'; +import { LogWithDecodedArgs } from 'ethereum-types'; +import * as R from 'ramda'; + +import { ERC20ApprovalEvent } from '../../entities'; + +/** + * Parses raw event logs for an ERC20 approval event and returns an array of + * ERC20ApprovalEvent entities. + * @param eventLogs Raw event logs (e.g. returned from contract-wrappers). + */ +export const parseERC20ApprovalEvents: ( + eventLogs: Array<LogWithDecodedArgs<ERC20TokenApprovalEventArgs>>, +) => ERC20ApprovalEvent[] = R.map(_convertToERC20ApprovalEvent); + +/** + * Converts a raw event log for an ERC20 approval event into an + * ERC20ApprovalEvent entity. + * @param eventLog Raw event log (e.g. returned from contract-wrappers). + */ +export function _convertToERC20ApprovalEvent( + eventLog: LogWithDecodedArgs<ERC20TokenApprovalEventArgs>, +): ERC20ApprovalEvent { + const erc20ApprovalEvent = new ERC20ApprovalEvent(); + erc20ApprovalEvent.tokenAddress = eventLog.address as string; + erc20ApprovalEvent.blockNumber = eventLog.blockNumber as number; + erc20ApprovalEvent.logIndex = eventLog.logIndex as number; + erc20ApprovalEvent.rawData = eventLog.data as string; + erc20ApprovalEvent.transactionHash = eventLog.transactionHash; + erc20ApprovalEvent.ownerAddress = eventLog.args._owner; + erc20ApprovalEvent.spenderAddress = eventLog.args._spender; + erc20ApprovalEvent.amount = eventLog.args._value; + return erc20ApprovalEvent; +} diff --git a/packages/pipeline/src/parsers/events/exchange_events.ts b/packages/pipeline/src/parsers/events/exchange_events.ts new file mode 100644 index 000000000..e18106c75 --- /dev/null +++ b/packages/pipeline/src/parsers/events/exchange_events.ts @@ -0,0 +1,133 @@ +import { ExchangeCancelEventArgs, ExchangeCancelUpToEventArgs, ExchangeFillEventArgs } from '@0x/contract-wrappers'; +import { assetDataUtils } from '@0x/order-utils'; +import { AssetProxyId, ERC721AssetData } from '@0x/types'; +import { LogWithDecodedArgs } from 'ethereum-types'; +import * as R from 'ramda'; + +import { ExchangeCancelEvent, ExchangeCancelUpToEvent, ExchangeFillEvent } from '../../entities'; +import { bigNumbertoStringOrNull } from '../../utils'; + +/** + * Parses raw event logs for a fill event and returns an array of + * ExchangeFillEvent entities. + * @param eventLogs Raw event logs (e.g. returned from contract-wrappers). + */ +export const parseExchangeFillEvents: ( + eventLogs: Array<LogWithDecodedArgs<ExchangeFillEventArgs>>, +) => ExchangeFillEvent[] = R.map(_convertToExchangeFillEvent); + +/** + * Parses raw event logs for a cancel event and returns an array of + * ExchangeCancelEvent entities. + * @param eventLogs Raw event logs (e.g. returned from contract-wrappers). + */ +export const parseExchangeCancelEvents: ( + eventLogs: Array<LogWithDecodedArgs<ExchangeCancelEventArgs>>, +) => ExchangeCancelEvent[] = R.map(_convertToExchangeCancelEvent); + +/** + * Parses raw event logs for a CancelUpTo event and returns an array of + * ExchangeCancelUpToEvent entities. + * @param eventLogs Raw event logs (e.g. returned from contract-wrappers). + */ +export const parseExchangeCancelUpToEvents: ( + eventLogs: Array<LogWithDecodedArgs<ExchangeCancelUpToEventArgs>>, +) => ExchangeCancelUpToEvent[] = R.map(_convertToExchangeCancelUpToEvent); + +/** + * Converts a raw event log for a fill event into an ExchangeFillEvent entity. + * @param eventLog Raw event log (e.g. returned from contract-wrappers). + */ +export function _convertToExchangeFillEvent(eventLog: LogWithDecodedArgs<ExchangeFillEventArgs>): ExchangeFillEvent { + const makerAssetData = assetDataUtils.decodeAssetDataOrThrow(eventLog.args.makerAssetData); + const makerAssetType = makerAssetData.assetProxyId === AssetProxyId.ERC20 ? 'erc20' : 'erc721'; + const takerAssetData = assetDataUtils.decodeAssetDataOrThrow(eventLog.args.takerAssetData); + const takerAssetType = takerAssetData.assetProxyId === AssetProxyId.ERC20 ? 'erc20' : 'erc721'; + const exchangeFillEvent = new ExchangeFillEvent(); + exchangeFillEvent.contractAddress = eventLog.address as string; + exchangeFillEvent.blockNumber = eventLog.blockNumber as number; + exchangeFillEvent.logIndex = eventLog.logIndex as number; + exchangeFillEvent.rawData = eventLog.data as string; + exchangeFillEvent.transactionHash = eventLog.transactionHash; + exchangeFillEvent.makerAddress = eventLog.args.makerAddress; + exchangeFillEvent.takerAddress = eventLog.args.takerAddress; + exchangeFillEvent.feeRecipientAddress = eventLog.args.feeRecipientAddress; + exchangeFillEvent.senderAddress = eventLog.args.senderAddress; + exchangeFillEvent.makerAssetFilledAmount = eventLog.args.makerAssetFilledAmount; + exchangeFillEvent.takerAssetFilledAmount = eventLog.args.takerAssetFilledAmount; + exchangeFillEvent.makerFeePaid = eventLog.args.makerFeePaid; + exchangeFillEvent.takerFeePaid = eventLog.args.takerFeePaid; + exchangeFillEvent.orderHash = eventLog.args.orderHash; + exchangeFillEvent.rawMakerAssetData = eventLog.args.makerAssetData; + exchangeFillEvent.makerAssetType = makerAssetType; + exchangeFillEvent.makerAssetProxyId = makerAssetData.assetProxyId; + exchangeFillEvent.makerTokenAddress = makerAssetData.tokenAddress; + // tslint has a false positive here. Type assertion is required. + // tslint:disable-next-line:no-unnecessary-type-assertion + exchangeFillEvent.makerTokenId = bigNumbertoStringOrNull((makerAssetData as ERC721AssetData).tokenId); + exchangeFillEvent.rawTakerAssetData = eventLog.args.takerAssetData; + exchangeFillEvent.takerAssetType = takerAssetType; + exchangeFillEvent.takerAssetProxyId = takerAssetData.assetProxyId; + exchangeFillEvent.takerTokenAddress = takerAssetData.tokenAddress; + // tslint:disable-next-line:no-unnecessary-type-assertion + exchangeFillEvent.takerTokenId = bigNumbertoStringOrNull((takerAssetData as ERC721AssetData).tokenId); + return exchangeFillEvent; +} + +/** + * Converts a raw event log for a cancel event into an ExchangeCancelEvent + * entity. + * @param eventLog Raw event log (e.g. returned from contract-wrappers). + */ +export function _convertToExchangeCancelEvent( + eventLog: LogWithDecodedArgs<ExchangeCancelEventArgs>, +): ExchangeCancelEvent { + const makerAssetData = assetDataUtils.decodeAssetDataOrThrow(eventLog.args.makerAssetData); + const makerAssetType = makerAssetData.assetProxyId === AssetProxyId.ERC20 ? 'erc20' : 'erc721'; + const takerAssetData = assetDataUtils.decodeAssetDataOrThrow(eventLog.args.takerAssetData); + const takerAssetType = takerAssetData.assetProxyId === AssetProxyId.ERC20 ? 'erc20' : 'erc721'; + const exchangeCancelEvent = new ExchangeCancelEvent(); + exchangeCancelEvent.contractAddress = eventLog.address as string; + exchangeCancelEvent.blockNumber = eventLog.blockNumber as number; + exchangeCancelEvent.logIndex = eventLog.logIndex as number; + exchangeCancelEvent.rawData = eventLog.data as string; + exchangeCancelEvent.transactionHash = eventLog.transactionHash; + exchangeCancelEvent.makerAddress = eventLog.args.makerAddress; + exchangeCancelEvent.takerAddress = eventLog.args.takerAddress; + exchangeCancelEvent.feeRecipientAddress = eventLog.args.feeRecipientAddress; + exchangeCancelEvent.senderAddress = eventLog.args.senderAddress; + exchangeCancelEvent.orderHash = eventLog.args.orderHash; + exchangeCancelEvent.rawMakerAssetData = eventLog.args.makerAssetData; + exchangeCancelEvent.makerAssetType = makerAssetType; + exchangeCancelEvent.makerAssetProxyId = makerAssetData.assetProxyId; + exchangeCancelEvent.makerTokenAddress = makerAssetData.tokenAddress; + // tslint:disable-next-line:no-unnecessary-type-assertion + exchangeCancelEvent.makerTokenId = bigNumbertoStringOrNull((makerAssetData as ERC721AssetData).tokenId); + exchangeCancelEvent.rawTakerAssetData = eventLog.args.takerAssetData; + exchangeCancelEvent.takerAssetType = takerAssetType; + exchangeCancelEvent.takerAssetProxyId = takerAssetData.assetProxyId; + exchangeCancelEvent.takerTokenAddress = takerAssetData.tokenAddress; + // tslint:disable-next-line:no-unnecessary-type-assertion + exchangeCancelEvent.takerTokenId = bigNumbertoStringOrNull((takerAssetData as ERC721AssetData).tokenId); + return exchangeCancelEvent; +} + +/** + * Converts a raw event log for a cancelUpTo event into an + * ExchangeCancelUpToEvent entity. + * @param eventLog Raw event log (e.g. returned from contract-wrappers). + */ +export function _convertToExchangeCancelUpToEvent( + eventLog: LogWithDecodedArgs<ExchangeCancelUpToEventArgs>, +): ExchangeCancelUpToEvent { + const exchangeCancelUpToEvent = new ExchangeCancelUpToEvent(); + exchangeCancelUpToEvent.contractAddress = eventLog.address as string; + exchangeCancelUpToEvent.blockNumber = eventLog.blockNumber as number; + exchangeCancelUpToEvent.logIndex = eventLog.logIndex as number; + exchangeCancelUpToEvent.rawData = eventLog.data as string; + exchangeCancelUpToEvent.transactionHash = eventLog.transactionHash; + exchangeCancelUpToEvent.makerAddress = eventLog.args.makerAddress; + exchangeCancelUpToEvent.senderAddress = eventLog.args.senderAddress; + exchangeCancelUpToEvent.orderEpoch = eventLog.args.orderEpoch; + return exchangeCancelUpToEvent; +} diff --git a/packages/pipeline/src/parsers/events/index.ts b/packages/pipeline/src/parsers/events/index.ts index e18106c75..3f9915e8b 100644 --- a/packages/pipeline/src/parsers/events/index.ts +++ b/packages/pipeline/src/parsers/events/index.ts @@ -1,133 +1,2 @@ -import { ExchangeCancelEventArgs, ExchangeCancelUpToEventArgs, ExchangeFillEventArgs } from '@0x/contract-wrappers'; -import { assetDataUtils } from '@0x/order-utils'; -import { AssetProxyId, ERC721AssetData } from '@0x/types'; -import { LogWithDecodedArgs } from 'ethereum-types'; -import * as R from 'ramda'; - -import { ExchangeCancelEvent, ExchangeCancelUpToEvent, ExchangeFillEvent } from '../../entities'; -import { bigNumbertoStringOrNull } from '../../utils'; - -/** - * Parses raw event logs for a fill event and returns an array of - * ExchangeFillEvent entities. - * @param eventLogs Raw event logs (e.g. returned from contract-wrappers). - */ -export const parseExchangeFillEvents: ( - eventLogs: Array<LogWithDecodedArgs<ExchangeFillEventArgs>>, -) => ExchangeFillEvent[] = R.map(_convertToExchangeFillEvent); - -/** - * Parses raw event logs for a cancel event and returns an array of - * ExchangeCancelEvent entities. - * @param eventLogs Raw event logs (e.g. returned from contract-wrappers). - */ -export const parseExchangeCancelEvents: ( - eventLogs: Array<LogWithDecodedArgs<ExchangeCancelEventArgs>>, -) => ExchangeCancelEvent[] = R.map(_convertToExchangeCancelEvent); - -/** - * Parses raw event logs for a CancelUpTo event and returns an array of - * ExchangeCancelUpToEvent entities. - * @param eventLogs Raw event logs (e.g. returned from contract-wrappers). - */ -export const parseExchangeCancelUpToEvents: ( - eventLogs: Array<LogWithDecodedArgs<ExchangeCancelUpToEventArgs>>, -) => ExchangeCancelUpToEvent[] = R.map(_convertToExchangeCancelUpToEvent); - -/** - * Converts a raw event log for a fill event into an ExchangeFillEvent entity. - * @param eventLog Raw event log (e.g. returned from contract-wrappers). - */ -export function _convertToExchangeFillEvent(eventLog: LogWithDecodedArgs<ExchangeFillEventArgs>): ExchangeFillEvent { - const makerAssetData = assetDataUtils.decodeAssetDataOrThrow(eventLog.args.makerAssetData); - const makerAssetType = makerAssetData.assetProxyId === AssetProxyId.ERC20 ? 'erc20' : 'erc721'; - const takerAssetData = assetDataUtils.decodeAssetDataOrThrow(eventLog.args.takerAssetData); - const takerAssetType = takerAssetData.assetProxyId === AssetProxyId.ERC20 ? 'erc20' : 'erc721'; - const exchangeFillEvent = new ExchangeFillEvent(); - exchangeFillEvent.contractAddress = eventLog.address as string; - exchangeFillEvent.blockNumber = eventLog.blockNumber as number; - exchangeFillEvent.logIndex = eventLog.logIndex as number; - exchangeFillEvent.rawData = eventLog.data as string; - exchangeFillEvent.transactionHash = eventLog.transactionHash; - exchangeFillEvent.makerAddress = eventLog.args.makerAddress; - exchangeFillEvent.takerAddress = eventLog.args.takerAddress; - exchangeFillEvent.feeRecipientAddress = eventLog.args.feeRecipientAddress; - exchangeFillEvent.senderAddress = eventLog.args.senderAddress; - exchangeFillEvent.makerAssetFilledAmount = eventLog.args.makerAssetFilledAmount; - exchangeFillEvent.takerAssetFilledAmount = eventLog.args.takerAssetFilledAmount; - exchangeFillEvent.makerFeePaid = eventLog.args.makerFeePaid; - exchangeFillEvent.takerFeePaid = eventLog.args.takerFeePaid; - exchangeFillEvent.orderHash = eventLog.args.orderHash; - exchangeFillEvent.rawMakerAssetData = eventLog.args.makerAssetData; - exchangeFillEvent.makerAssetType = makerAssetType; - exchangeFillEvent.makerAssetProxyId = makerAssetData.assetProxyId; - exchangeFillEvent.makerTokenAddress = makerAssetData.tokenAddress; - // tslint has a false positive here. Type assertion is required. - // tslint:disable-next-line:no-unnecessary-type-assertion - exchangeFillEvent.makerTokenId = bigNumbertoStringOrNull((makerAssetData as ERC721AssetData).tokenId); - exchangeFillEvent.rawTakerAssetData = eventLog.args.takerAssetData; - exchangeFillEvent.takerAssetType = takerAssetType; - exchangeFillEvent.takerAssetProxyId = takerAssetData.assetProxyId; - exchangeFillEvent.takerTokenAddress = takerAssetData.tokenAddress; - // tslint:disable-next-line:no-unnecessary-type-assertion - exchangeFillEvent.takerTokenId = bigNumbertoStringOrNull((takerAssetData as ERC721AssetData).tokenId); - return exchangeFillEvent; -} - -/** - * Converts a raw event log for a cancel event into an ExchangeCancelEvent - * entity. - * @param eventLog Raw event log (e.g. returned from contract-wrappers). - */ -export function _convertToExchangeCancelEvent( - eventLog: LogWithDecodedArgs<ExchangeCancelEventArgs>, -): ExchangeCancelEvent { - const makerAssetData = assetDataUtils.decodeAssetDataOrThrow(eventLog.args.makerAssetData); - const makerAssetType = makerAssetData.assetProxyId === AssetProxyId.ERC20 ? 'erc20' : 'erc721'; - const takerAssetData = assetDataUtils.decodeAssetDataOrThrow(eventLog.args.takerAssetData); - const takerAssetType = takerAssetData.assetProxyId === AssetProxyId.ERC20 ? 'erc20' : 'erc721'; - const exchangeCancelEvent = new ExchangeCancelEvent(); - exchangeCancelEvent.contractAddress = eventLog.address as string; - exchangeCancelEvent.blockNumber = eventLog.blockNumber as number; - exchangeCancelEvent.logIndex = eventLog.logIndex as number; - exchangeCancelEvent.rawData = eventLog.data as string; - exchangeCancelEvent.transactionHash = eventLog.transactionHash; - exchangeCancelEvent.makerAddress = eventLog.args.makerAddress; - exchangeCancelEvent.takerAddress = eventLog.args.takerAddress; - exchangeCancelEvent.feeRecipientAddress = eventLog.args.feeRecipientAddress; - exchangeCancelEvent.senderAddress = eventLog.args.senderAddress; - exchangeCancelEvent.orderHash = eventLog.args.orderHash; - exchangeCancelEvent.rawMakerAssetData = eventLog.args.makerAssetData; - exchangeCancelEvent.makerAssetType = makerAssetType; - exchangeCancelEvent.makerAssetProxyId = makerAssetData.assetProxyId; - exchangeCancelEvent.makerTokenAddress = makerAssetData.tokenAddress; - // tslint:disable-next-line:no-unnecessary-type-assertion - exchangeCancelEvent.makerTokenId = bigNumbertoStringOrNull((makerAssetData as ERC721AssetData).tokenId); - exchangeCancelEvent.rawTakerAssetData = eventLog.args.takerAssetData; - exchangeCancelEvent.takerAssetType = takerAssetType; - exchangeCancelEvent.takerAssetProxyId = takerAssetData.assetProxyId; - exchangeCancelEvent.takerTokenAddress = takerAssetData.tokenAddress; - // tslint:disable-next-line:no-unnecessary-type-assertion - exchangeCancelEvent.takerTokenId = bigNumbertoStringOrNull((takerAssetData as ERC721AssetData).tokenId); - return exchangeCancelEvent; -} - -/** - * Converts a raw event log for a cancelUpTo event into an - * ExchangeCancelUpToEvent entity. - * @param eventLog Raw event log (e.g. returned from contract-wrappers). - */ -export function _convertToExchangeCancelUpToEvent( - eventLog: LogWithDecodedArgs<ExchangeCancelUpToEventArgs>, -): ExchangeCancelUpToEvent { - const exchangeCancelUpToEvent = new ExchangeCancelUpToEvent(); - exchangeCancelUpToEvent.contractAddress = eventLog.address as string; - exchangeCancelUpToEvent.blockNumber = eventLog.blockNumber as number; - exchangeCancelUpToEvent.logIndex = eventLog.logIndex as number; - exchangeCancelUpToEvent.rawData = eventLog.data as string; - exchangeCancelUpToEvent.transactionHash = eventLog.transactionHash; - exchangeCancelUpToEvent.makerAddress = eventLog.args.makerAddress; - exchangeCancelUpToEvent.senderAddress = eventLog.args.senderAddress; - exchangeCancelUpToEvent.orderEpoch = eventLog.args.orderEpoch; - return exchangeCancelUpToEvent; -} +export { parseExchangeCancelEvents, parseExchangeCancelUpToEvents, parseExchangeFillEvents } from './exchange_events'; +export { parseERC20ApprovalEvents } from './erc20_events'; diff --git a/packages/pipeline/src/parsers/idex_orders/index.ts b/packages/pipeline/src/parsers/idex_orders/index.ts new file mode 100644 index 000000000..dfe27455c --- /dev/null +++ b/packages/pipeline/src/parsers/idex_orders/index.ts @@ -0,0 +1,77 @@ +import { BigNumber } from '@0x/utils'; + +import { aggregateOrders } from '../utils'; + +import { IdexOrder, IdexOrderbook, IdexOrderParam } from '../../data_sources/idex'; +import { TokenOrderbookSnapshot as TokenOrder } from '../../entities'; +import { OrderType } from '../../types'; + +/** + * Marque function of this file. + * 1) Takes in orders from an orderbook, + * 2) Aggregates them by price point, + * 3) Parses them into entities which are then saved into the database. + * @param idexOrderbook raw orderbook that we pull from the Idex API. + * @param observedTimestamp Time at which the orders for the market were pulled. + * @param source The exchange where these orders are placed. In this case 'idex'. + */ +export function parseIdexOrders(idexOrderbook: IdexOrderbook, observedTimestamp: number, source: string): TokenOrder[] { + const aggregatedBids = aggregateOrders(idexOrderbook.bids); + // Any of the bid orders' params will work + const idexBidOrder = idexOrderbook.bids[0]; + const parsedBids = + aggregatedBids.length > 0 + ? aggregatedBids.map(order => parseIdexOrder(idexBidOrder.params, observedTimestamp, 'bid', source, order)) + : []; + + const aggregatedAsks = aggregateOrders(idexOrderbook.asks); + // Any of the ask orders' params will work + const idexAskOrder = idexOrderbook.asks[0]; + const parsedAsks = + aggregatedAsks.length > 0 + ? aggregatedAsks.map(order => parseIdexOrder(idexAskOrder.params, observedTimestamp, 'ask', source, order)) + : []; + return parsedBids.concat(parsedAsks); +} + +/** + * Parse a single aggregated Idex order in order to form a tokenOrder entity + * which can be saved into the database. + * @param idexOrderParam An object containing information about the market where these + * trades have been placed. + * @param observedTimestamp The time when the API response returned back to us. + * @param orderType 'bid' or 'ask' enum. + * @param source Exchange where these orders were placed. + * @param idexOrder A <price, amount> tuple which we will convert to volume-basis. + */ +export function parseIdexOrder( + idexOrderParam: IdexOrderParam, + observedTimestamp: number, + orderType: OrderType, + source: string, + idexOrder: [string, BigNumber], +): TokenOrder { + const tokenOrder = new TokenOrder(); + const price = new BigNumber(idexOrder[0]); + const amount = idexOrder[1]; + + tokenOrder.source = source; + tokenOrder.observedTimestamp = observedTimestamp; + tokenOrder.orderType = orderType; + tokenOrder.price = price; + tokenOrder.baseVolume = amount; + tokenOrder.quoteVolume = price.times(amount); + + if (orderType === 'bid') { + tokenOrder.baseAssetSymbol = idexOrderParam.buySymbol; + tokenOrder.baseAssetAddress = idexOrderParam.tokenBuy; + tokenOrder.quoteAssetSymbol = idexOrderParam.sellSymbol; + tokenOrder.quoteAssetAddress = idexOrderParam.tokenSell; + } else { + tokenOrder.baseAssetSymbol = idexOrderParam.sellSymbol; + tokenOrder.baseAssetAddress = idexOrderParam.tokenSell; + tokenOrder.quoteAssetSymbol = idexOrderParam.buySymbol; + tokenOrder.quoteAssetAddress = idexOrderParam.tokenBuy; + } + return tokenOrder; +} diff --git a/packages/pipeline/src/parsers/oasis_orders/index.ts b/packages/pipeline/src/parsers/oasis_orders/index.ts new file mode 100644 index 000000000..13997f31b --- /dev/null +++ b/packages/pipeline/src/parsers/oasis_orders/index.ts @@ -0,0 +1,71 @@ +import { BigNumber } from '@0x/utils'; +import * as R from 'ramda'; + +import { aggregateOrders } from '../utils'; + +import { OasisMarket, OasisOrder } from '../../data_sources/oasis'; +import { TokenOrderbookSnapshot as TokenOrder } from '../../entities'; +import { OrderType } from '../../types'; + +/** + * Marque function of this file. + * 1) Takes in orders from an orderbook, + * 2) Aggregates them according to price point, + * 3) Builds TokenOrder entity with other information attached. + * @param oasisOrderbook A raw orderbook that we pull from the Oasis API. + * @param oasisMarket An object containing market data also directly from the API. + * @param observedTimestamp Time at which the orders for the market were pulled. + * @param source The exchange where these orders are placed. In this case 'oasis'. + */ +export function parseOasisOrders( + oasisOrderbook: OasisOrder[], + oasisMarket: OasisMarket, + observedTimestamp: number, + source: string, +): TokenOrder[] { + const aggregatedBids = aggregateOrders(R.filter(R.propEq('act', 'bid'), oasisOrderbook)); + const aggregatedAsks = aggregateOrders(R.filter(R.propEq('act', 'ask'), oasisOrderbook)); + const parsedBids = aggregatedBids.map(order => + parseOasisOrder(oasisMarket, observedTimestamp, 'bid', source, order), + ); + const parsedAsks = aggregatedAsks.map(order => + parseOasisOrder(oasisMarket, observedTimestamp, 'ask', source, order), + ); + return parsedBids.concat(parsedAsks); +} + +/** + * Parse a single aggregated Oasis order to form a tokenOrder entity + * which can be saved into the database. + * @param oasisMarket An object containing information about the market where these + * trades have been placed. + * @param observedTimestamp The time when the API response returned back to us. + * @param orderType 'bid' or 'ask' enum. + * @param source Exchange where these orders were placed. + * @param oasisOrder A <price, amount> tuple which we will convert to volume-basis. + */ +export function parseOasisOrder( + oasisMarket: OasisMarket, + observedTimestamp: number, + orderType: OrderType, + source: string, + oasisOrder: [string, BigNumber], +): TokenOrder { + const tokenOrder = new TokenOrder(); + const price = new BigNumber(oasisOrder[0]); + const amount = oasisOrder[1]; + + tokenOrder.source = source; + tokenOrder.observedTimestamp = observedTimestamp; + tokenOrder.orderType = orderType; + tokenOrder.price = price; + + tokenOrder.baseAssetSymbol = oasisMarket.base; + tokenOrder.baseAssetAddress = null; // Oasis doesn't provide address information + tokenOrder.baseVolume = amount; + + tokenOrder.quoteAssetSymbol = oasisMarket.quote; + tokenOrder.quoteAssetAddress = null; // Oasis doesn't provide address information + tokenOrder.quoteVolume = price.times(amount); + return tokenOrder; +} diff --git a/packages/pipeline/src/parsers/paradex_orders/index.ts b/packages/pipeline/src/parsers/paradex_orders/index.ts index 7966658a7..5ceeb64a4 100644 --- a/packages/pipeline/src/parsers/paradex_orders/index.ts +++ b/packages/pipeline/src/parsers/paradex_orders/index.ts @@ -57,10 +57,10 @@ export function parseParadexOrder( tokenOrder.baseAssetSymbol = paradexMarket.baseToken; tokenOrder.baseAssetAddress = paradexMarket.baseTokenAddress as string; - tokenOrder.baseVolume = price.times(amount); + tokenOrder.baseVolume = amount; tokenOrder.quoteAssetSymbol = paradexMarket.quoteToken; tokenOrder.quoteAssetAddress = paradexMarket.quoteTokenAddress as string; - tokenOrder.quoteVolume = amount; + tokenOrder.quoteVolume = price.times(amount); return tokenOrder; } diff --git a/packages/pipeline/src/parsers/token_metadata/index.ts b/packages/pipeline/src/parsers/token_metadata/index.ts index f258af063..65e0aaa6e 100644 --- a/packages/pipeline/src/parsers/token_metadata/index.ts +++ b/packages/pipeline/src/parsers/token_metadata/index.ts @@ -1,9 +1,8 @@ -import { BigNumber } from '@0x/utils'; import * as R from 'ramda'; import { MetamaskTrustedTokenMeta, ZeroExTrustedTokenMeta } from '../../data_sources/trusted_tokens'; import { TokenMetadata } from '../../entities'; -import {} from '../../utils'; +import { toBigNumberOrNull } from '../../utils'; /** * Parses Metamask's trusted tokens list. @@ -26,7 +25,7 @@ function parseMetamaskTrustedToken(resp: MetamaskTrustedTokenMeta, address: stri const trustedToken = new TokenMetadata(); trustedToken.address = address; - trustedToken.decimals = new BigNumber(resp.decimals); + trustedToken.decimals = toBigNumberOrNull(resp.decimals); trustedToken.symbol = resp.symbol; trustedToken.name = resp.name; trustedToken.authority = 'metamask'; @@ -38,7 +37,7 @@ function parseZeroExTrustedToken(resp: ZeroExTrustedTokenMeta): TokenMetadata { const trustedToken = new TokenMetadata(); trustedToken.address = resp.address; - trustedToken.decimals = resp.decimals ? new BigNumber(resp.decimals) : null; + trustedToken.decimals = toBigNumberOrNull(resp.decimals); trustedToken.symbol = resp.symbol; trustedToken.name = resp.name; trustedToken.authority = '0x'; diff --git a/packages/pipeline/src/parsers/utils.ts b/packages/pipeline/src/parsers/utils.ts new file mode 100644 index 000000000..860729e9f --- /dev/null +++ b/packages/pipeline/src/parsers/utils.ts @@ -0,0 +1,28 @@ +import { BigNumber } from '@0x/utils'; + +export interface GenericRawOrder { + price: string; + amount: string; +} + +/** + * Aggregates individual orders by price point. Filters zero amount orders. + * @param rawOrders An array of objects that have price and amount information. + */ +export function aggregateOrders(rawOrders: GenericRawOrder[]): Array<[string, BigNumber]> { + const aggregatedOrders = new Map<string, BigNumber>(); + rawOrders.forEach(order => { + const amount = new BigNumber(order.amount); + if (amount.isZero()) { + return; + } + // Use string instead of BigNum to aggregate by value instead of variable. + // Convert to BigNumber first to consolidate different string + // representations of the same number. Eg. '0.0' and '0.00'. + const price = new BigNumber(order.price).toString(); + + const existingAmount = aggregatedOrders.get(price) || new BigNumber(0); + aggregatedOrders.set(price, amount.plus(existingAmount)); + }); + return Array.from(aggregatedOrders.entries()); +} diff --git a/packages/pipeline/src/scripts/pull_competing_dex_trades.ts b/packages/pipeline/src/scripts/pull_competing_dex_trades.ts index 4e4c12dd0..1478d5615 100644 --- a/packages/pipeline/src/scripts/pull_competing_dex_trades.ts +++ b/packages/pipeline/src/scripts/pull_competing_dex_trades.ts @@ -15,11 +15,11 @@ let connection: Connection; (async () => { connection = await createConnection(ormConfig as ConnectionOptions); - await getAndSaveTrades(); + await getAndSaveTradesAsync(); process.exit(0); })().catch(handleError); -async function getAndSaveTrades(): Promise<void> { +async function getAndSaveTradesAsync(): Promise<void> { const apiKey = process.env.BLOXY_API_KEY; if (apiKey === undefined) { throw new Error('Missing required env var: BLOXY_API_KEY'); diff --git a/packages/pipeline/src/scripts/pull_ddex_orderbook_snapshots.ts b/packages/pipeline/src/scripts/pull_ddex_orderbook_snapshots.ts index 7868e9c5a..4e00f258f 100644 --- a/packages/pipeline/src/scripts/pull_ddex_orderbook_snapshots.ts +++ b/packages/pipeline/src/scripts/pull_ddex_orderbook_snapshots.ts @@ -25,7 +25,7 @@ let connection: Connection; const markets = await ddexSource.getActiveMarketsAsync(); for (const marketsChunk of R.splitEvery(MARKET_ORDERBOOK_REQUEST_BATCH_SIZE, markets)) { await Promise.all( - marketsChunk.map(async (market: DdexMarket) => getAndSaveMarketOrderbook(ddexSource, market)), + marketsChunk.map(async (market: DdexMarket) => getAndSaveMarketOrderbookAsync(ddexSource, market)), ); await new Promise<void>(resolve => setTimeout(resolve, MILLISEC_MARKET_ORDERBOOK_REQUEST_DELAY)); } @@ -38,7 +38,7 @@ let connection: Connection; * @param ddexSource Data source which can query Ddex API. * @param market Object from Ddex API containing market data. */ -async function getAndSaveMarketOrderbook(ddexSource: DdexSource, market: DdexMarket): Promise<void> { +async function getAndSaveMarketOrderbookAsync(ddexSource: DdexSource, market: DdexMarket): Promise<void> { const orderBook = await ddexSource.getMarketOrderbookAsync(market.id); const observedTimestamp = Date.now(); diff --git a/packages/pipeline/src/scripts/pull_erc20_events.ts b/packages/pipeline/src/scripts/pull_erc20_events.ts new file mode 100644 index 000000000..0ad12c97a --- /dev/null +++ b/packages/pipeline/src/scripts/pull_erc20_events.ts @@ -0,0 +1,66 @@ +// tslint:disable:no-console +import { getContractAddressesForNetworkOrThrow } from '@0x/contract-addresses'; +import { web3Factory } from '@0x/dev-utils'; +import { Web3ProviderEngine } from '@0x/subproviders'; +import { Web3Wrapper } from '@0x/web3-wrapper'; +import 'reflect-metadata'; +import { Connection, ConnectionOptions, createConnection, Repository } from 'typeorm'; + +import { ERC20EventsSource } from '../data_sources/contract-wrappers/erc20_events'; +import { ERC20ApprovalEvent } from '../entities'; +import * as ormConfig from '../ormconfig'; +import { parseERC20ApprovalEvents } from '../parsers/events'; +import { handleError, INFURA_ROOT_URL } from '../utils'; + +const NETWORK_ID = 1; +const START_BLOCK_OFFSET = 100; // Number of blocks before the last known block to consider when updating fill events. +const BATCH_SAVE_SIZE = 1000; // Number of events to save at once. +const BLOCK_FINALITY_THRESHOLD = 10; // When to consider blocks as final. Used to compute default endBlock. +const WETH_START_BLOCK = 4719568; // Block number when the WETH contract was deployed. + +let connection: Connection; + +(async () => { + connection = await createConnection(ormConfig as ConnectionOptions); + const provider = web3Factory.getRpcProvider({ + rpcUrl: INFURA_ROOT_URL, + }); + const endBlock = await calculateEndBlockAsync(provider); + await getAndSaveWETHApprovalEventsAsync(provider, endBlock); + process.exit(0); +})().catch(handleError); + +async function getAndSaveWETHApprovalEventsAsync(provider: Web3ProviderEngine, endBlock: number): Promise<void> { + console.log('Checking existing approval events...'); + const repository = connection.getRepository(ERC20ApprovalEvent); + const startBlock = (await getStartBlockAsync(repository)) || WETH_START_BLOCK; + + console.log(`Getting WETH approval events starting at ${startBlock}...`); + const wethTokenAddress = getContractAddressesForNetworkOrThrow(NETWORK_ID).etherToken; + const eventsSource = new ERC20EventsSource(provider, NETWORK_ID, wethTokenAddress); + const eventLogs = await eventsSource.getApprovalEventsAsync(startBlock, endBlock); + + console.log(`Parsing ${eventLogs.length} WETH approval events...`); + const events = parseERC20ApprovalEvents(eventLogs); + console.log(`Retrieved and parsed ${events.length} total WETH approval events.`); + await repository.save(events, { chunk: Math.ceil(events.length / BATCH_SAVE_SIZE) }); +} + +async function calculateEndBlockAsync(provider: Web3ProviderEngine): Promise<number> { + const web3Wrapper = new Web3Wrapper(provider); + const currentBlock = await web3Wrapper.getBlockNumberAsync(); + return currentBlock - BLOCK_FINALITY_THRESHOLD; +} + +async function getStartBlockAsync(repository: Repository<ERC20ApprovalEvent>): Promise<number | null> { + const fillEventCount = await repository.count(); + if (fillEventCount === 0) { + console.log(`No existing approval events found.`); + return null; + } + const queryResult = await connection.query( + `SELECT block_number FROM raw.erc20_approval_events ORDER BY block_number DESC LIMIT 1`, + ); + const lastKnownBlock = queryResult[0].block_number; + return lastKnownBlock - START_BLOCK_OFFSET; +} diff --git a/packages/pipeline/src/scripts/pull_missing_events.ts b/packages/pipeline/src/scripts/pull_exchange_events.ts index 80abbb8b0..e98fc6629 100644 --- a/packages/pipeline/src/scripts/pull_missing_events.ts +++ b/packages/pipeline/src/scripts/pull_exchange_events.ts @@ -1,5 +1,7 @@ // tslint:disable:no-console import { web3Factory } from '@0x/dev-utils'; +import { Web3ProviderEngine } from '@0x/subproviders'; +import { Web3Wrapper } from '@0x/web3-wrapper'; import R = require('ramda'); import 'reflect-metadata'; import { Connection, ConnectionOptions, createConnection, Repository } from 'typeorm'; @@ -12,6 +14,7 @@ import { EXCHANGE_START_BLOCK, handleError, INFURA_ROOT_URL } from '../utils'; const START_BLOCK_OFFSET = 100; // Number of blocks before the last known block to consider when updating fill events. const BATCH_SAVE_SIZE = 1000; // Number of events to save at once. +const BLOCK_FINALITY_THRESHOLD = 10; // When to consider blocks as final. Used to compute default endBlock. let connection: Connection; @@ -20,43 +23,44 @@ let connection: Connection; const provider = web3Factory.getRpcProvider({ rpcUrl: INFURA_ROOT_URL, }); + const endBlock = await calculateEndBlockAsync(provider); const eventsSource = new ExchangeEventsSource(provider, 1); - await getFillEventsAsync(eventsSource); - await getCancelEventsAsync(eventsSource); - await getCancelUpToEventsAsync(eventsSource); + await getFillEventsAsync(eventsSource, endBlock); + await getCancelEventsAsync(eventsSource, endBlock); + await getCancelUpToEventsAsync(eventsSource, endBlock); process.exit(0); })().catch(handleError); -async function getFillEventsAsync(eventsSource: ExchangeEventsSource): Promise<void> { +async function getFillEventsAsync(eventsSource: ExchangeEventsSource, endBlock: number): Promise<void> { console.log('Checking existing fill events...'); const repository = connection.getRepository(ExchangeFillEvent); const startBlock = await getStartBlockAsync(repository); console.log(`Getting fill events starting at ${startBlock}...`); - const eventLogs = await eventsSource.getFillEventsAsync(startBlock); + const eventLogs = await eventsSource.getFillEventsAsync(startBlock, endBlock); console.log('Parsing fill events...'); const events = parseExchangeFillEvents(eventLogs); console.log(`Retrieved and parsed ${events.length} total fill events.`); await saveEventsAsync(startBlock === EXCHANGE_START_BLOCK, repository, events); } -async function getCancelEventsAsync(eventsSource: ExchangeEventsSource): Promise<void> { +async function getCancelEventsAsync(eventsSource: ExchangeEventsSource, endBlock: number): Promise<void> { console.log('Checking existing cancel events...'); const repository = connection.getRepository(ExchangeCancelEvent); const startBlock = await getStartBlockAsync(repository); console.log(`Getting cancel events starting at ${startBlock}...`); - const eventLogs = await eventsSource.getCancelEventsAsync(startBlock); + const eventLogs = await eventsSource.getCancelEventsAsync(startBlock, endBlock); console.log('Parsing cancel events...'); const events = parseExchangeCancelEvents(eventLogs); console.log(`Retrieved and parsed ${events.length} total cancel events.`); await saveEventsAsync(startBlock === EXCHANGE_START_BLOCK, repository, events); } -async function getCancelUpToEventsAsync(eventsSource: ExchangeEventsSource): Promise<void> { +async function getCancelUpToEventsAsync(eventsSource: ExchangeEventsSource, endBlock: number): Promise<void> { console.log('Checking existing CancelUpTo events...'); const repository = connection.getRepository(ExchangeCancelUpToEvent); const startBlock = await getStartBlockAsync(repository); console.log(`Getting CancelUpTo events starting at ${startBlock}...`); - const eventLogs = await eventsSource.getCancelUpToEventsAsync(startBlock); + const eventLogs = await eventsSource.getCancelUpToEventsAsync(startBlock, endBlock); console.log('Parsing CancelUpTo events...'); const events = parseExchangeCancelUpToEvents(eventLogs); console.log(`Retrieved and parsed ${events.length} total CancelUpTo events.`); @@ -134,3 +138,9 @@ async function saveIndividuallyWithFallbackAsync<T extends ExchangeEvent>( } } } + +async function calculateEndBlockAsync(provider: Web3ProviderEngine): Promise<number> { + const web3Wrapper = new Web3Wrapper(provider); + const currentBlock = await web3Wrapper.getBlockNumberAsync(); + return currentBlock - BLOCK_FINALITY_THRESHOLD; +} diff --git a/packages/pipeline/src/scripts/pull_idex_orderbook_snapshots.ts b/packages/pipeline/src/scripts/pull_idex_orderbook_snapshots.ts new file mode 100644 index 000000000..490b17766 --- /dev/null +++ b/packages/pipeline/src/scripts/pull_idex_orderbook_snapshots.ts @@ -0,0 +1,63 @@ +import { logUtils } from '@0x/utils'; +import * as R from 'ramda'; +import { Connection, ConnectionOptions, createConnection } from 'typeorm'; + +import { IDEX_SOURCE, IdexSource } from '../data_sources/idex'; +import { TokenOrderbookSnapshot as TokenOrder } from '../entities'; +import * as ormConfig from '../ormconfig'; +import { parseIdexOrders } from '../parsers/idex_orders'; +import { handleError } from '../utils'; + +// Number of orders to save at once. +const BATCH_SAVE_SIZE = 1000; + +// Number of markets to retrieve orderbooks for at once. +const MARKET_ORDERBOOK_REQUEST_BATCH_SIZE = 100; + +// Delay between market orderbook requests. +const MILLISEC_MARKET_ORDERBOOK_REQUEST_DELAY = 2000; + +let connection: Connection; + +(async () => { + connection = await createConnection(ormConfig as ConnectionOptions); + const idexSource = new IdexSource(); + logUtils.log('Getting all IDEX markets'); + const markets = await idexSource.getMarketsAsync(); + logUtils.log(`Got ${markets.length} markets.`); + for (const marketsChunk of R.splitEvery(MARKET_ORDERBOOK_REQUEST_BATCH_SIZE, markets)) { + await Promise.all( + marketsChunk.map(async (marketId: string) => getAndSaveMarketOrderbookAsync(idexSource, marketId)), + ); + await new Promise<void>(resolve => setTimeout(resolve, MILLISEC_MARKET_ORDERBOOK_REQUEST_DELAY)); + } + process.exit(0); +})().catch(handleError); + +/** + * Retrieve orderbook from Idex API for a given market. Parse orders and insert + * them into our database. + * @param idexSource Data source which can query Idex API. + * @param marketId String representing market of interest, eg. 'ETH_TIC'. + */ +async function getAndSaveMarketOrderbookAsync(idexSource: IdexSource, marketId: string): Promise<void> { + logUtils.log(`${marketId}: Retrieving orderbook.`); + const orderBook = await idexSource.getMarketOrderbookAsync(marketId); + const observedTimestamp = Date.now(); + + if (!R.has('bids', orderBook) || !R.has('asks', orderBook)) { + logUtils.warn(`${marketId}: Orderbook faulty.`); + return; + } + + logUtils.log(`${marketId}: Parsing orders.`); + const orders = parseIdexOrders(orderBook, observedTimestamp, IDEX_SOURCE); + + if (orders.length > 0) { + logUtils.log(`${marketId}: Saving ${orders.length} orders.`); + const TokenOrderRepository = connection.getRepository(TokenOrder); + await TokenOrderRepository.save(orders, { chunk: Math.ceil(orders.length / BATCH_SAVE_SIZE) }); + } else { + logUtils.log(`${marketId}: 0 orders to save.`); + } +} diff --git a/packages/pipeline/src/scripts/pull_missing_blocks.ts b/packages/pipeline/src/scripts/pull_missing_blocks.ts index b7bd51f08..a5203824c 100644 --- a/packages/pipeline/src/scripts/pull_missing_blocks.ts +++ b/packages/pipeline/src/scripts/pull_missing_blocks.ts @@ -24,10 +24,10 @@ let connection: Connection; (async () => { connection = await createConnection(ormConfig as ConnectionOptions); const provider = web3Factory.getRpcProvider({ - rpcUrl: `${INFURA_ROOT_URL}/${process.env.INFURA_API_KEY}`, + rpcUrl: INFURA_ROOT_URL, }); const web3Source = new Web3Source(provider); - await getAllMissingBlocks(web3Source); + await getAllMissingBlocksAsync(web3Source); process.exit(0); })().catch(handleError); @@ -35,26 +35,39 @@ interface MissingBlocksResponse { block_number: string; } -async function getAllMissingBlocks(web3Source: Web3Source): Promise<void> { +async function getAllMissingBlocksAsync(web3Source: Web3Source): Promise<void> { const blocksRepository = connection.getRepository(Block); let fromBlock = EXCHANGE_START_BLOCK; while (true) { - const blockNumbers = await getMissingBlockNumbers(fromBlock); + const blockNumbers = await getMissingBlockNumbersAsync(fromBlock); if (blockNumbers.length === 0) { // There are no more missing blocks. We're done. break; } - await getAndSaveBlocks(web3Source, blocksRepository, blockNumbers); + await getAndSaveBlocksAsync(web3Source, blocksRepository, blockNumbers); fromBlock = Math.max(...blockNumbers) + 1; } const totalBlocks = await blocksRepository.count(); console.log(`Done saving blocks. There are now ${totalBlocks} total blocks.`); } -async function getMissingBlockNumbers(fromBlock: number): Promise<number[]> { +async function getMissingBlockNumbersAsync(fromBlock: number): Promise<number[]> { console.log(`Checking for missing blocks starting at ${fromBlock}...`); + // Note(albrow): The easiest way to get all the blocks we need is to + // consider all the events tables together in a single query. If this query + // gets too slow, we should consider re-architecting so that we can work on + // getting the blocks for one type of event at a time. const response = (await connection.query( - 'SELECT DISTINCT(block_number) FROM raw.exchange_fill_events WHERE block_number NOT IN (SELECT number FROM raw.blocks) AND block_number >= $1 ORDER BY block_number ASC LIMIT $2', + `WITH all_events AS ( + SELECT block_number FROM raw.exchange_fill_events + UNION SELECT block_number FROM raw.exchange_cancel_events + UNION SELECT block_number FROM raw.exchange_cancel_up_to_events + UNION SELECT block_number FROM raw.erc20_approval_events + ) + SELECT DISTINCT(block_number) FROM all_events + WHERE block_number NOT IN (SELECT number FROM raw.blocks) + AND block_number >= $1 + ORDER BY block_number ASC LIMIT $2`, [fromBlock, MAX_BLOCKS_PER_QUERY], )) as MissingBlocksResponse[]; const blockNumberStrings = R.pluck('block_number', response); @@ -63,7 +76,7 @@ async function getMissingBlockNumbers(fromBlock: number): Promise<number[]> { return blockNumbers; } -async function getAndSaveBlocks( +async function getAndSaveBlocksAsync( web3Source: Web3Source, blocksRepository: Repository<Block>, blockNumbers: number[], diff --git a/packages/pipeline/src/scripts/pull_oasis_orderbook_snapshots.ts b/packages/pipeline/src/scripts/pull_oasis_orderbook_snapshots.ts new file mode 100644 index 000000000..c4dcf6c83 --- /dev/null +++ b/packages/pipeline/src/scripts/pull_oasis_orderbook_snapshots.ts @@ -0,0 +1,58 @@ +import { logUtils } from '@0x/utils'; +import * as R from 'ramda'; +import { Connection, ConnectionOptions, createConnection } from 'typeorm'; + +import { OASIS_SOURCE, OasisMarket, OasisSource } from '../data_sources/oasis'; +import { TokenOrderbookSnapshot as TokenOrder } from '../entities'; +import * as ormConfig from '../ormconfig'; +import { parseOasisOrders } from '../parsers/oasis_orders'; +import { handleError } from '../utils'; + +// Number of orders to save at once. +const BATCH_SAVE_SIZE = 1000; + +// Number of markets to retrieve orderbooks for at once. +const MARKET_ORDERBOOK_REQUEST_BATCH_SIZE = 50; + +// Delay between market orderbook requests. +const MILLISEC_MARKET_ORDERBOOK_REQUEST_DELAY = 1000; + +let connection: Connection; + +(async () => { + connection = await createConnection(ormConfig as ConnectionOptions); + const oasisSource = new OasisSource(); + logUtils.log('Getting all active Oasis markets'); + const markets = await oasisSource.getActiveMarketsAsync(); + logUtils.log(`Got ${markets.length} markets.`); + for (const marketsChunk of R.splitEvery(MARKET_ORDERBOOK_REQUEST_BATCH_SIZE, markets)) { + await Promise.all( + marketsChunk.map(async (market: OasisMarket) => getAndSaveMarketOrderbookAsync(oasisSource, market)), + ); + await new Promise<void>(resolve => setTimeout(resolve, MILLISEC_MARKET_ORDERBOOK_REQUEST_DELAY)); + } + process.exit(0); +})().catch(handleError); + +/** + * Retrieve orderbook from Oasis API for a given market. Parse orders and insert + * them into our database. + * @param oasisSource Data source which can query Oasis API. + * @param marketId String identifying market we want data for. eg. 'REPAUG'. + */ +async function getAndSaveMarketOrderbookAsync(oasisSource: OasisSource, market: OasisMarket): Promise<void> { + logUtils.log(`${market.id}: Retrieving orderbook.`); + const orderBook = await oasisSource.getMarketOrderbookAsync(market.id); + const observedTimestamp = Date.now(); + + logUtils.log(`${market.id}: Parsing orders.`); + const orders = parseOasisOrders(orderBook, market, observedTimestamp, OASIS_SOURCE); + + if (orders.length > 0) { + logUtils.log(`${market.id}: Saving ${orders.length} orders.`); + const TokenOrderRepository = connection.getRepository(TokenOrder); + await TokenOrderRepository.save(orders, { chunk: Math.ceil(orders.length / BATCH_SAVE_SIZE) }); + } else { + logUtils.log(`${market.id}: 0 orders to save.`); + } +} diff --git a/packages/pipeline/src/scripts/pull_paradex_orderbook_snapshots.ts b/packages/pipeline/src/scripts/pull_paradex_orderbook_snapshots.ts index bae1fbede..34345f355 100644 --- a/packages/pipeline/src/scripts/pull_paradex_orderbook_snapshots.ts +++ b/packages/pipeline/src/scripts/pull_paradex_orderbook_snapshots.ts @@ -29,7 +29,7 @@ let connection: Connection; const tokenInfoResponse = await paradexSource.getTokenInfoAsync(); const extendedMarkets = addTokenAddresses(markets, tokenInfoResponse); await Promise.all( - extendedMarkets.map(async (market: ParadexMarket) => getAndSaveMarketOrderbook(paradexSource, market)), + extendedMarkets.map(async (market: ParadexMarket) => getAndSaveMarketOrderbookAsync(paradexSource, market)), ); process.exit(0); })().catch(handleError); @@ -70,7 +70,7 @@ function addTokenAddresses( * @param paradexSource Data source which can query the Paradex API. * @param market Object from the Paradex API with information about the market in question. */ -async function getAndSaveMarketOrderbook(paradexSource: ParadexSource, market: ParadexMarket): Promise<void> { +async function getAndSaveMarketOrderbookAsync(paradexSource: ParadexSource, market: ParadexMarket): Promise<void> { const paradexOrderbookResponse = await paradexSource.getMarketOrderbookAsync(market.symbol); const observedTimestamp = Date.now(); diff --git a/packages/pipeline/src/scripts/pull_trusted_tokens.ts b/packages/pipeline/src/scripts/pull_trusted_tokens.ts index 1befc4437..5906deee6 100644 --- a/packages/pipeline/src/scripts/pull_trusted_tokens.ts +++ b/packages/pipeline/src/scripts/pull_trusted_tokens.ts @@ -16,12 +16,12 @@ let connection: Connection; (async () => { connection = await createConnection(ormConfig as ConnectionOptions); - await getMetamaskTrustedTokens(); - await getZeroExTrustedTokens(); + await getMetamaskTrustedTokensAsync(); + await getZeroExTrustedTokensAsync(); process.exit(0); })().catch(handleError); -async function getMetamaskTrustedTokens(): Promise<void> { +async function getMetamaskTrustedTokensAsync(): Promise<void> { // tslint:disable-next-line:no-console console.log('Getting latest metamask trusted tokens list ...'); const trustedTokensRepository = connection.getRepository(TokenMetadata); @@ -37,7 +37,7 @@ async function getMetamaskTrustedTokens(): Promise<void> { console.log('Done saving metamask trusted tokens.'); } -async function getZeroExTrustedTokens(): Promise<void> { +async function getZeroExTrustedTokensAsync(): Promise<void> { // tslint:disable-next-line:no-console console.log('Getting latest 0x trusted tokens list ...'); const trustedTokensRepository = connection.getRepository(TokenMetadata); diff --git a/packages/pipeline/src/scripts/update_relayer_info.ts b/packages/pipeline/src/scripts/update_relayer_info.ts index f8918728d..41d29b385 100644 --- a/packages/pipeline/src/scripts/update_relayer_info.ts +++ b/packages/pipeline/src/scripts/update_relayer_info.ts @@ -17,11 +17,11 @@ let connection: Connection; (async () => { connection = await createConnection(ormConfig as ConnectionOptions); - await getRelayers(); + await getRelayersAsync(); process.exit(0); })().catch(handleError); -async function getRelayers(): Promise<void> { +async function getRelayersAsync(): Promise<void> { console.log('Getting latest relayer info...'); const relayerRepository = connection.getRepository(Relayer); const relayerSource = new RelayerRegistrySource(RELAYER_REGISTRY_URL); diff --git a/packages/pipeline/src/utils/get_ohlcv_trading_pairs.ts b/packages/pipeline/src/utils/get_ohlcv_trading_pairs.ts index 9d3ef2fba..19f81344e 100644 --- a/packages/pipeline/src/utils/get_ohlcv_trading_pairs.ts +++ b/packages/pipeline/src/utils/get_ohlcv_trading_pairs.ts @@ -23,6 +23,18 @@ interface CryptoCompareCoin { const TO_CURRENCIES = ['USD', 'EUR', 'ETH', 'USDT']; const ETHEREUM_IDENTIFIER = '7605'; const HTTP_OK_STATUS = 200; + +interface StaticPair { + fromSymbol: string; + toSymbol: string; +} +const SPECIAL_CASES: StaticPair[] = [ + { + fromSymbol: 'ETH', + toSymbol: 'USD', + }, +]; + /** * Get trading pairs with latest scraped time for OHLCV records * @param conn a typeorm Connection to postgres @@ -44,6 +56,7 @@ export async function fetchOHLCVTradingPairsAsync( FROM raw.ohlcv_external GROUP BY from_symbol, to_symbol;`); + // build addressable index: { fromsym: { tosym: time }} const latestTradingPairsIndex: { [fromSym: string]: { [toSym: string]: number } } = {}; latestTradingPairs.forEach(pair => { const latestIndex: { [toSym: string]: number } = latestTradingPairsIndex[pair.from_symbol] || {}; @@ -51,6 +64,13 @@ export async function fetchOHLCVTradingPairsAsync( latestTradingPairsIndex[pair.from_symbol] = latestIndex; }); + // match time to special cases + const specialCases: TradingPair[] = SPECIAL_CASES.map(pair => { + const latestSavedTime = + R.path<number>([pair.fromSymbol, pair.toSymbol], latestTradingPairsIndex) || earliestBackfillTime; + return R.assoc('latestSavedTime', latestSavedTime, pair); + }); + // get token symbols used by Crypto Compare const allCoinsResp = await fetchAsync(COINLIST_API); if (allCoinsResp.status !== HTTP_OK_STATUS) { @@ -66,27 +86,31 @@ export async function fetchOHLCVTradingPairsAsync( }); // fetch all tokens that are traded on 0x - const rawTokenAddresses: Array<{ tokenaddress: string }> = await conn.query( + const rawEventTokenAddresses: Array<{ tokenaddress: string }> = await conn.query( `SELECT DISTINCT(maker_token_address) as tokenaddress FROM raw.exchange_fill_events UNION SELECT DISTINCT(taker_token_address) as tokenaddress FROM raw.exchange_fill_events`, ); - const tokenAddresses = R.pluck('tokenaddress', rawTokenAddresses); + + // tslint:disable-next-line:no-unbound-method + const eventTokenAddresses = R.pluck('tokenaddress', rawEventTokenAddresses).map(R.toLower); // join token addresses with CC symbols - const allTokenSymbols: string[] = tokenAddresses - .map(tokenAddress => erc20CoinsIndex.get(tokenAddress.toLowerCase()) || '') - .filter(x => x); + const eventTokenSymbols: string[] = eventTokenAddresses + .filter(tokenAddress => erc20CoinsIndex.has(tokenAddress)) + .map(tokenAddress => erc20CoinsIndex.get(tokenAddress) as string); - // generate list of all tokens with time of latest existing record OR default earliest time - const allTradingPairCombinations: TradingPair[] = R.chain(sym => { + // join traded tokens with fiat and latest backfill time + const eventTradingPairs: TradingPair[] = R.chain(sym => { return TO_CURRENCIES.map(fiat => { - return { + const pair = { fromSymbol: sym, toSymbol: fiat, latestSavedTime: R.path<number>([sym, fiat], latestTradingPairsIndex) || earliestBackfillTime, }; + return pair; }); - }, allTokenSymbols); + }, eventTokenSymbols); - return allTradingPairCombinations; + // join with special cases + return R.concat(eventTradingPairs, specialCases); } diff --git a/packages/pipeline/src/utils/index.ts b/packages/pipeline/src/utils/index.ts index 2096a0a39..094c0178e 100644 --- a/packages/pipeline/src/utils/index.ts +++ b/packages/pipeline/src/utils/index.ts @@ -15,6 +15,21 @@ export function bigNumbertoStringOrNull(n: BigNumber): string | null { } /** + * If value is null or undefined, returns null. Otherwise converts value to a + * BigNumber. + * @param value A string or number to be converted to a BigNumber + */ +export function toBigNumberOrNull(value: string | number | null): BigNumber | null { + switch (value) { + case null: + case undefined: + return null; + default: + return new BigNumber(value); + } +} + +/** * Logs an error by intelligently checking for `message` and `stack` properties. * Intended for use with top-level immediately invoked asynchronous functions. * @param e the error to log. |