diff options
author | Leonid Logvinov <logvinov.leon@gmail.com> | 2018-12-11 08:10:49 +0800 |
---|---|---|
committer | Leonid Logvinov <logvinov.leon@gmail.com> | 2018-12-11 08:10:49 +0800 |
commit | e72742f1f76dd3b46976ed3d0f272b539bdfb229 (patch) | |
tree | 968debeda7e6efcabd58c7218d5dff05dc24f357 /packages | |
parent | 928b253c81971eb6f59efd09ab6a9063d4e6e4ca (diff) | |
parent | 253bf4df6a6909d8bd65361c0d115e9d1a8e489e (diff) | |
download | dexon-sol-tools-e72742f1f76dd3b46976ed3d0f272b539bdfb229.tar dexon-sol-tools-e72742f1f76dd3b46976ed3d0f272b539bdfb229.tar.gz dexon-sol-tools-e72742f1f76dd3b46976ed3d0f272b539bdfb229.tar.bz2 dexon-sol-tools-e72742f1f76dd3b46976ed3d0f272b539bdfb229.tar.lz dexon-sol-tools-e72742f1f76dd3b46976ed3d0f272b539bdfb229.tar.xz dexon-sol-tools-e72742f1f76dd3b46976ed3d0f272b539bdfb229.tar.zst dexon-sol-tools-e72742f1f76dd3b46976ed3d0f272b539bdfb229.zip |
Merge branch 'development' into feature/contracts-monorepo-7
Diffstat (limited to 'packages')
195 files changed, 6128 insertions, 237 deletions
diff --git a/packages/0x.js/CHANGELOG.json b/packages/0x.js/CHANGELOG.json index 4ee1e92be..728ad5cbe 100644 --- a/packages/0x.js/CHANGELOG.json +++ b/packages/0x.js/CHANGELOG.json @@ -1,5 +1,14 @@ [ { + "timestamp": 1544482891, + "version": "2.0.7", + "changes": [ + { + "note": "Dependencies updated" + } + ] + }, + { "timestamp": 1543401373, "version": "2.0.6", "changes": [ diff --git a/packages/0x.js/CHANGELOG.md b/packages/0x.js/CHANGELOG.md index 463ff923d..32bbbd425 100644 --- a/packages/0x.js/CHANGELOG.md +++ b/packages/0x.js/CHANGELOG.md @@ -5,6 +5,10 @@ Edit the package's CHANGELOG.json file only. CHANGELOG +## v2.0.7 - _December 10, 2018_ + + * Dependencies updated + ## v2.0.6 - _November 28, 2018_ * Dependencies updated diff --git a/packages/abi-gen-wrappers/CHANGELOG.json b/packages/abi-gen-wrappers/CHANGELOG.json index 6905a7537..16bc2bceb 100644 --- a/packages/abi-gen-wrappers/CHANGELOG.json +++ b/packages/abi-gen-wrappers/CHANGELOG.json @@ -1,5 +1,14 @@ [ { + "timestamp": 1544482891, + "version": "2.0.1", + "changes": [ + { + "note": "Dependencies updated" + } + ] + }, + { "version": "2.0.0", "changes": [ { diff --git a/packages/abi-gen-wrappers/CHANGELOG.md b/packages/abi-gen-wrappers/CHANGELOG.md index 30a10d6bd..c1af3f91f 100644 --- a/packages/abi-gen-wrappers/CHANGELOG.md +++ b/packages/abi-gen-wrappers/CHANGELOG.md @@ -5,6 +5,10 @@ Edit the package's CHANGELOG.json file only. CHANGELOG +## v2.0.1 - _December 10, 2018_ + + * Dependencies updated + ## v2.0.0 - _November 28, 2018_ * Update Exchange artifact to receive ZRX asset data as a constructor argument (#1309) diff --git a/packages/abi-gen/CHANGELOG.json b/packages/abi-gen/CHANGELOG.json index 2b4455bed..36adb27a5 100644 --- a/packages/abi-gen/CHANGELOG.json +++ b/packages/abi-gen/CHANGELOG.json @@ -1,5 +1,14 @@ [ { + "timestamp": 1544482891, + "version": "1.0.18", + "changes": [ + { + "note": "Dependencies updated" + } + ] + }, + { "timestamp": 1542821676, "version": "1.0.17", "changes": [ diff --git a/packages/abi-gen/CHANGELOG.md b/packages/abi-gen/CHANGELOG.md index f939199fd..868781247 100644 --- a/packages/abi-gen/CHANGELOG.md +++ b/packages/abi-gen/CHANGELOG.md @@ -5,6 +5,10 @@ Edit the package's CHANGELOG.json file only. CHANGELOG +## v1.0.18 - _December 10, 2018_ + + * Dependencies updated + ## v1.0.17 - _November 21, 2018_ * Dependencies updated diff --git a/packages/assert/CHANGELOG.json b/packages/assert/CHANGELOG.json index 2b3fc68a4..2fecfa8d1 100644 --- a/packages/assert/CHANGELOG.json +++ b/packages/assert/CHANGELOG.json @@ -1,5 +1,14 @@ [ { + "timestamp": 1544482891, + "version": "1.0.19", + "changes": [ + { + "note": "Dependencies updated" + } + ] + }, + { "timestamp": 1542821676, "version": "1.0.18", "changes": [ diff --git a/packages/assert/CHANGELOG.md b/packages/assert/CHANGELOG.md index 017b1c6ef..55de769d3 100644 --- a/packages/assert/CHANGELOG.md +++ b/packages/assert/CHANGELOG.md @@ -5,6 +5,10 @@ Edit the package's CHANGELOG.json file only. CHANGELOG +## v1.0.19 - _December 10, 2018_ + + * Dependencies updated + ## v1.0.18 - _November 21, 2018_ * Dependencies updated diff --git a/packages/asset-buyer/CHANGELOG.json b/packages/asset-buyer/CHANGELOG.json index 4ff83018e..48b774811 100644 --- a/packages/asset-buyer/CHANGELOG.json +++ b/packages/asset-buyer/CHANGELOG.json @@ -5,7 +5,8 @@ { "note": "Update SRA order provider to include Dai" } - ] + ], + "timestamp": 1544482891 }, { "timestamp": 1543401373, diff --git a/packages/asset-buyer/CHANGELOG.md b/packages/asset-buyer/CHANGELOG.md index be3ef67d1..0494d7cf8 100644 --- a/packages/asset-buyer/CHANGELOG.md +++ b/packages/asset-buyer/CHANGELOG.md @@ -5,6 +5,10 @@ Edit the package's CHANGELOG.json file only. CHANGELOG +## v3.0.3 - _December 10, 2018_ + + * Update SRA order provider to include Dai + ## v3.0.2 - _November 28, 2018_ * Dependencies updated diff --git a/packages/base-contract/CHANGELOG.json b/packages/base-contract/CHANGELOG.json index e4dff5530..b40f2f2a5 100644 --- a/packages/base-contract/CHANGELOG.json +++ b/packages/base-contract/CHANGELOG.json @@ -1,5 +1,14 @@ [ { + "timestamp": 1544482891, + "version": "3.0.9", + "changes": [ + { + "note": "Dependencies updated" + } + ] + }, + { "timestamp": 1543401373, "version": "3.0.8", "changes": [ diff --git a/packages/base-contract/CHANGELOG.md b/packages/base-contract/CHANGELOG.md index f61b6c6ce..c07dca358 100644 --- a/packages/base-contract/CHANGELOG.md +++ b/packages/base-contract/CHANGELOG.md @@ -5,6 +5,10 @@ Edit the package's CHANGELOG.json file only. CHANGELOG +## v3.0.9 - _December 10, 2018_ + + * Dependencies updated + ## v3.0.8 - _November 28, 2018_ * Dependencies updated diff --git a/packages/connect/CHANGELOG.json b/packages/connect/CHANGELOG.json index 3abb895a7..20b86e776 100644 --- a/packages/connect/CHANGELOG.json +++ b/packages/connect/CHANGELOG.json @@ -1,5 +1,14 @@ [ { + "timestamp": 1544482891, + "version": "3.0.9", + "changes": [ + { + "note": "Dependencies updated" + } + ] + }, + { "timestamp": 1543401373, "version": "3.0.8", "changes": [ diff --git a/packages/connect/CHANGELOG.md b/packages/connect/CHANGELOG.md index 1dfc2672d..de5c29c1f 100644 --- a/packages/connect/CHANGELOG.md +++ b/packages/connect/CHANGELOG.md @@ -5,6 +5,10 @@ Edit the package's CHANGELOG.json file only. CHANGELOG +## v3.0.9 - _December 10, 2018_ + + * Dependencies updated + ## v3.0.8 - _November 28, 2018_ * Dependencies updated diff --git a/packages/contract-wrappers/CHANGELOG.json b/packages/contract-wrappers/CHANGELOG.json index 006a0904d..4361c890f 100644 --- a/packages/contract-wrappers/CHANGELOG.json +++ b/packages/contract-wrappers/CHANGELOG.json @@ -1,5 +1,14 @@ [ { + "timestamp": 1544482891, + "version": "4.1.2", + "changes": [ + { + "note": "Dependencies updated" + } + ] + }, + { "timestamp": 1543401373, "version": "4.1.1", "changes": [ diff --git a/packages/contract-wrappers/CHANGELOG.md b/packages/contract-wrappers/CHANGELOG.md index ebdcc9638..ad7df9c4b 100644 --- a/packages/contract-wrappers/CHANGELOG.md +++ b/packages/contract-wrappers/CHANGELOG.md @@ -5,6 +5,10 @@ Edit the package's CHANGELOG.json file only. CHANGELOG +## v4.1.2 - _December 10, 2018_ + + * Dependencies updated + ## v4.1.1 - _November 28, 2018_ * Dependencies updated diff --git a/packages/dev-utils/CHANGELOG.json b/packages/dev-utils/CHANGELOG.json index 417a3c65e..a6482ac27 100644 --- a/packages/dev-utils/CHANGELOG.json +++ b/packages/dev-utils/CHANGELOG.json @@ -1,5 +1,14 @@ [ { + "timestamp": 1544482891, + "version": "1.0.20", + "changes": [ + { + "note": "Dependencies updated" + } + ] + }, + { "timestamp": 1543401373, "version": "1.0.19", "changes": [ diff --git a/packages/dev-utils/CHANGELOG.md b/packages/dev-utils/CHANGELOG.md index 1842c6824..49023d4f0 100644 --- a/packages/dev-utils/CHANGELOG.md +++ b/packages/dev-utils/CHANGELOG.md @@ -5,6 +5,10 @@ Edit the package's CHANGELOG.json file only. CHANGELOG +## v1.0.20 - _December 10, 2018_ + + * Dependencies updated + ## v1.0.19 - _November 28, 2018_ * Dependencies updated diff --git a/packages/ethereum-types/CHANGELOG.json b/packages/ethereum-types/CHANGELOG.json index 9db75ae9f..a421532be 100644 --- a/packages/ethereum-types/CHANGELOG.json +++ b/packages/ethereum-types/CHANGELOG.json @@ -1,5 +1,14 @@ [ { + "timestamp": 1544482891, + "version": "1.1.3", + "changes": [ + { + "note": "Dependencies updated" + } + ] + }, + { "version": "1.1.2", "changes": [ { diff --git a/packages/ethereum-types/CHANGELOG.md b/packages/ethereum-types/CHANGELOG.md index 6ad7b4cc6..19948d172 100644 --- a/packages/ethereum-types/CHANGELOG.md +++ b/packages/ethereum-types/CHANGELOG.md @@ -5,6 +5,10 @@ Edit the package's CHANGELOG.json file only. CHANGELOG +## v1.1.3 - _December 10, 2018_ + + * Dependencies updated + ## v1.1.2 - _November 9, 2018_ * Dependencies updated diff --git a/packages/fill-scenarios/CHANGELOG.json b/packages/fill-scenarios/CHANGELOG.json index 58ba49509..ab445058e 100644 --- a/packages/fill-scenarios/CHANGELOG.json +++ b/packages/fill-scenarios/CHANGELOG.json @@ -1,5 +1,14 @@ [ { + "timestamp": 1544482891, + "version": "1.0.15", + "changes": [ + { + "note": "Dependencies updated" + } + ] + }, + { "timestamp": 1543401373, "version": "1.0.14", "changes": [ diff --git a/packages/fill-scenarios/CHANGELOG.md b/packages/fill-scenarios/CHANGELOG.md index aa7df302e..132ec8fef 100644 --- a/packages/fill-scenarios/CHANGELOG.md +++ b/packages/fill-scenarios/CHANGELOG.md @@ -5,6 +5,10 @@ Edit the package's CHANGELOG.json file only. CHANGELOG +## v1.0.15 - _December 10, 2018_ + + * Dependencies updated + ## v1.0.14 - _November 28, 2018_ * Dependencies updated diff --git a/packages/instant/.DS_Store b/packages/instant/.DS_Store Binary files differindex 9a0cceca6..c86c5cbcd 100644 --- a/packages/instant/.DS_Store +++ b/packages/instant/.DS_Store diff --git a/packages/instant/README.md b/packages/instant/README.md index 2092b45d9..7f6ee7c46 100644 --- a/packages/instant/README.md +++ b/packages/instant/README.md @@ -1,5 +1,9 @@ ## @0x/instant +## Integration + +Looking to integrate 0x Instant into your web application or site? Check out the dedicated [instant documentation](https://0xproject.com/wiki#Get-Started-With-Instant) to get started. The documentation covers instant and related topics in depth. For a more "drag and drop" experience, check out our [configurator tool](https://0xproject.com/instant#configure). For on demand developer support, join our [Discord](https://discordapp.com/invite/d3FTX3M). + ## Installation The package is available as a UMD module named `zeroExInstant` at https://instant.0xproject.com/instant.js. diff --git a/packages/instant/package.json b/packages/instant/package.json index f1515065e..e3452e3b8 100644 --- a/packages/instant/package.json +++ b/packages/instant/package.json @@ -10,7 +10,7 @@ "scripts": { "build": "webpack --mode production", "build:ci": "yarn build", - "dev": "webpack-dev-server --mode development", + "dev": "dotenv webpack-dev-server -- --mode development", "lint": "tslint --format stylish --project .", "test": "jest", "test:coverage": "jest --coverage", @@ -24,10 +24,7 @@ }, "config": { "postpublish": { - "assets": [ - "packages/instant/umd/instant.js", - "packages/instant/umd/instant.js.map" - ] + "assets": ["packages/instant/umd/instant.js", "packages/instant/umd/instant.js.map"] } }, "repository": { diff --git a/packages/instant/public/external.css b/packages/instant/public/external.css index cab11112a..21278577e 100644 --- a/packages/instant/public/external.css +++ b/packages/instant/public/external.css @@ -15,6 +15,10 @@ input { height: 100px; } +input::-webkit-input-placeholder { + color: #b4b4b4 !important; +} + div { padding: 3px; } diff --git a/packages/instant/src/components/erc20_token_selector.tsx b/packages/instant/src/components/erc20_token_selector.tsx index f7d5a4fe4..cb8a8c797 100644 --- a/packages/instant/src/components/erc20_token_selector.tsx +++ b/packages/instant/src/components/erc20_token_selector.tsx @@ -7,7 +7,6 @@ import { analytics } from '../util/analytics'; import { assetUtils } from '../util/asset'; import { SearchInput } from './search_input'; - import { Circle } from './ui/circle'; import { Container } from './ui/container'; import { Flex } from './ui/flex'; @@ -123,10 +122,20 @@ interface TokenSelectorRowIconProps { token: ERC20Asset; } +const getTokenIcon = (symbol: string): React.StatelessComponent | undefined => { + try { + return require(`../assets/icons/${symbol}.svg`) as React.StatelessComponent; + } catch (e) { + // Can't find icon + return undefined; + } +}; + const TokenSelectorRowIcon: React.StatelessComponent<TokenSelectorRowIconProps> = props => { const { token } = props; const iconUrlIfExists = token.metaData.iconUrl; - const TokenIcon = require(`../assets/icons/${token.metaData.symbol}.svg`); + + const TokenIcon = getTokenIcon(token.metaData.symbol); const displaySymbol = assetUtils.bestNameForAsset(token); if (!_.isUndefined(iconUrlIfExists)) { return <img src={iconUrlIfExists} />; diff --git a/packages/instant/src/components/instant_heading.tsx b/packages/instant/src/components/instant_heading.tsx index 117f9dd5f..816cc5c33 100644 --- a/packages/instant/src/components/instant_heading.tsx +++ b/packages/instant/src/components/instant_heading.tsx @@ -61,12 +61,19 @@ export class InstantHeading extends React.Component<InstantHeadingProps, {}> { } private _renderAmountsSection(): React.ReactNode { - return ( - <Container> - <Container marginBottom="5px">{this._renderPlaceholderOrAmount(this._renderEthAmount)}</Container> - <Container opacity={0.7}>{this._renderPlaceholderOrAmount(this._renderDollarAmount)}</Container> - </Container> - ); + if ( + _.isUndefined(this.props.totalEthBaseUnitAmount) && + this.props.quoteRequestState !== AsyncProcessState.Pending + ) { + return null; + } else { + return ( + <Container> + <Container marginBottom="5px">{this._renderPlaceholderOrAmount(this._renderEthAmount)}</Container> + <Container opacity={0.7}>{this._renderPlaceholderOrAmount(this._renderDollarAmount)}</Container> + </Container> + ); + } } private _renderIcon(): React.ReactNode { diff --git a/packages/instant/src/components/ui/input.tsx b/packages/instant/src/components/ui/input.tsx index 863c970ef..62c70f9e1 100644 --- a/packages/instant/src/components/ui/input.tsx +++ b/packages/instant/src/components/ui/input.tsx @@ -29,8 +29,8 @@ export const Input = outline: none; border: none; &::placeholder { - color: ${props => props.theme[props.fontColor || 'white']}; - opacity: 0.5; + color: ${props => props.theme[props.fontColor || 'white']} !important; + opacity: 0.5 !important; } } `; diff --git a/packages/instant/src/components/zero_ex_instant_provider.tsx b/packages/instant/src/components/zero_ex_instant_provider.tsx index dae9124c6..204115fa9 100644 --- a/packages/instant/src/components/zero_ex_instant_provider.tsx +++ b/packages/instant/src/components/zero_ex_instant_provider.tsx @@ -1,6 +1,4 @@ -import { ObjectMap } from '@0x/types'; import { BigNumber } from '@0x/utils'; -import { Provider } from 'ethereum-types'; import * as _ from 'lodash'; import * as React from 'react'; import { Provider as ReduxProvider } from 'react-redux'; @@ -11,7 +9,7 @@ import { asyncData } from '../redux/async_data'; import { DEFAULT_STATE, DefaultState, State } from '../redux/reducer'; import { store, Store } from '../redux/store'; import { fonts } from '../style/fonts'; -import { AccountState, AffiliateInfo, AssetMetaData, Network, OrderSource, QuoteFetchOrigin } from '../types'; +import { AccountState, Network, QuoteFetchOrigin, ZeroExInstantBaseConfig } from '../types'; import { analytics, disableAnalytics } from '../util/analytics'; import { assetUtils } from '../util/asset'; import { errorFlasher } from '../util/error_flasher'; @@ -21,24 +19,7 @@ import { Heartbeater } from '../util/heartbeater'; import { generateAccountHeartbeater, generateBuyQuoteHeartbeater } from '../util/heartbeater_factory'; import { providerStateFactory } from '../util/provider_state_factory'; -export type ZeroExInstantProviderProps = ZeroExInstantProviderRequiredProps & - Partial<ZeroExInstantProviderOptionalProps>; - -export interface ZeroExInstantProviderRequiredProps { - orderSource: OrderSource; -} - -export interface ZeroExInstantProviderOptionalProps { - provider: Provider; - walletDisplayName: string; - availableAssetDatas: string[]; - defaultAssetBuyAmount: number; - defaultSelectedAssetData: string; - additionalAssetMetaDataMap: ObjectMap<AssetMetaData>; - networkId: Network; - affiliateInfo: AffiliateInfo; - shouldDisableAnalyticsTracking: boolean; -} +export type ZeroExInstantProviderProps = ZeroExInstantBaseConfig; export class ZeroExInstantProvider extends React.Component<ZeroExInstantProviderProps> { private readonly _store: Store; @@ -57,10 +38,12 @@ export class ZeroExInstantProvider extends React.Component<ZeroExInstantProvider props.orderSource, networkId, props.provider, + props.walletDisplayName, ); // merge the additional additionalAssetMetaDataMap with our default map const completeAssetMetaDataMap = { - ...props.additionalAssetMetaDataMap, + // Make sure the passed in assetDatas are lower case + ..._.mapKeys(props.additionalAssetMetaDataMap || {}, (value, key) => key.toLowerCase()), ...defaultState.assetMetaDataMap, }; // construct the final state diff --git a/packages/instant/src/constants.ts b/packages/instant/src/constants.ts index 506348092..f83eb4ac7 100644 --- a/packages/instant/src/constants.ts +++ b/packages/instant/src/constants.ts @@ -15,6 +15,7 @@ export const GWEI_IN_WEI = new BigNumber(1000000000); export const ONE_SECOND_MS = 1000; export const ONE_MINUTE_MS = ONE_SECOND_MS * 60; export const GIT_SHA = process.env.GIT_SHA; +export const NODE_ENV = process.env.NODE_ENV; export const NPM_PACKAGE_VERSION = process.env.NPM_PACKAGE_VERSION; export const ACCOUNT_UPDATE_INTERVAL_TIME_MS = ONE_SECOND_MS * 5; export const BUY_QUOTE_UPDATE_INTERVAL_TIME_MS = ONE_SECOND_MS * 15; @@ -28,14 +29,12 @@ export const HEAP_ENABLED = process.env.HEAP_ENABLED; export const COINBASE_API_BASE_URL = 'https://api.coinbase.com/v2'; export const PROGRESS_STALL_AT_WIDTH = '95%'; export const PROGRESS_FINISH_ANIMATION_TIME_MS = 200; -export const HOST_DOMAINS = [ +export const HOST_DOMAINS_EXTERNAL = [ '0x-instant-staging.s3-website-us-east-1.amazonaws.com', '0x-instant-dogfood.s3-website-us-east-1.amazonaws.com', - 'localhost', - '127.0.0.1', - '0.0.0.0', 'instant.0xproject.com', ]; +export const HOST_DOMAINS_LOCAL = ['localhost', '127.0.0.1', '0.0.0.0']; export const ROLLBAR_CLIENT_TOKEN = process.env.ROLLBAR_CLIENT_TOKEN; export const ROLLBAR_ENABLED = process.env.ROLLBAR_ENABLED; export const INSTANT_DISCHARGE_TARGET = process.env.INSTANT_DISCHARGE_TARGET as diff --git a/packages/instant/src/containers/connected_account_payment_method.ts b/packages/instant/src/containers/connected_account_payment_method.ts index bb68fdd57..f648f0b54 100644 --- a/packages/instant/src/containers/connected_account_payment_method.ts +++ b/packages/instant/src/containers/connected_account_payment_method.ts @@ -58,7 +58,7 @@ const mergeProps = ( ...ownProps, network: connectedState.network, account: connectedState.providerState.account, - walletDisplayName: connectedState.walletDisplayName || connectedState.providerState.name, + walletDisplayName: connectedState.providerState.displayName, onUnlockWalletClick: () => connectedDispatch.unlockWalletAndDispatchToStore(connectedState.providerState), onInstallWalletClick: () => { const isMobile = envUtil.isMobileOperatingSystem(); diff --git a/packages/instant/src/types.ts b/packages/instant/src/types.ts index 2d73ba29e..1c7490e63 100644 --- a/packages/instant/src/types.ts +++ b/packages/instant/src/types.ts @@ -102,6 +102,7 @@ export interface AffiliateInfo { export interface ProviderState { name: string; + displayName: string; provider: Provider; assetBuyer: AssetBuyer; web3Wrapper: Web3Wrapper; @@ -177,3 +178,21 @@ export enum ProviderType { Cipher = 'CIPHER', Fallback = 'FALLBACK', } + +export interface ZeroExInstantRequiredBaseConfig { + orderSource: OrderSource; +} + +export interface ZeroExInstantOptionalBaseConfig { + provider: Provider; + walletDisplayName: string; + availableAssetDatas: string[]; + defaultAssetBuyAmount: number; + defaultSelectedAssetData: string; + additionalAssetMetaDataMap: ObjectMap<AssetMetaData>; + networkId: Network; + affiliateInfo: AffiliateInfo; + shouldDisableAnalyticsTracking: boolean; +} + +export type ZeroExInstantBaseConfig = ZeroExInstantRequiredBaseConfig & Partial<ZeroExInstantOptionalBaseConfig>; diff --git a/packages/instant/src/util/analytics.ts b/packages/instant/src/util/analytics.ts index 6da37bedb..e6128f857 100644 --- a/packages/instant/src/util/analytics.ts +++ b/packages/instant/src/util/analytics.ts @@ -2,7 +2,7 @@ import { BuyQuote } from '@0x/asset-buyer'; import { BigNumber } from '@0x/utils'; import * as _ from 'lodash'; -import { GIT_SHA, HEAP_ENABLED, INSTANT_DISCHARGE_TARGET, NPM_PACKAGE_VERSION } from '../constants'; +import { GIT_SHA, HEAP_ENABLED, INSTANT_DISCHARGE_TARGET, NODE_ENV, NPM_PACKAGE_VERSION } from '../constants'; import { AffiliateInfo, Asset, @@ -106,6 +106,7 @@ export interface AnalyticsEventOptions { ethAddress?: string; networkId?: number; providerName?: string; + providerDisplayName?: string; gitSha?: string; npmVersion?: string; instantEnvironment?: string; @@ -149,6 +150,7 @@ export const analytics = { embeddedUrl: window.location.href, networkId: network, providerName: providerState.name, + providerDisplayName: providerState.displayName, gitSha: GIT_SHA, npmVersion: NPM_PACKAGE_VERSION, orderSource: orderSourceName, @@ -156,7 +158,7 @@ export const analytics = { affiliateFeePercent, selectedAssetName: selectedAsset ? selectedAsset.metaData.name : 'none', selectedAssetData: selectedAsset ? selectedAsset.assetData : 'none', - instantEnvironment: INSTANT_DISCHARGE_TARGET || `Local ${process.env.NODE_ENV}`, + instantEnvironment: INSTANT_DISCHARGE_TARGET || `Local ${NODE_ENV}`, }; return eventOptions; }, diff --git a/packages/instant/src/util/asset.ts b/packages/instant/src/util/asset.ts index 08f3642e3..13f84ef74 100644 --- a/packages/instant/src/util/asset.ts +++ b/packages/instant/src/util/asset.ts @@ -26,7 +26,7 @@ export const assetUtils = { return; } return { - assetData, + assetData: assetData.toLowerCase(), metaData, }; }, @@ -36,7 +36,7 @@ export const assetUtils = { network: Network, ): Asset => { return { - assetData, + assetData: assetData.toLowerCase(), metaData: assetUtils.getMetaDataOrThrow(assetData, assetMetaDataMap, network), }; }, diff --git a/packages/instant/src/util/buy_quote_updater.ts b/packages/instant/src/util/buy_quote_updater.ts index 4229f2735..6191c92e3 100644 --- a/packages/instant/src/util/buy_quote_updater.ts +++ b/packages/instant/src/util/buy_quote_updater.ts @@ -38,14 +38,11 @@ export const buyQuoteUpdater = { } catch (error) { const errorMessage = assetUtils.assetBuyerErrorMessage(asset, error); - if (_.isUndefined(errorMessage)) { - // This is an unknown error, report it to rollbar - errorReporter.report(error); - } + errorReporter.report(error); + analytics.trackQuoteError(error.message ? error.message : 'other', baseUnitValue, fetchOrigin); if (options.dispatchErrors) { dispatch(actions.setQuoteRequestStateFailure()); - analytics.trackQuoteError(error.message ? error.message : 'other', baseUnitValue, fetchOrigin); errorFlasher.flashNewErrorMessage(dispatch, errorMessage || 'Error fetching price, please try again'); } return; diff --git a/packages/instant/src/util/env.ts b/packages/instant/src/util/env.ts index 4a32f9cb1..0fda0cc0e 100644 --- a/packages/instant/src/util/env.ts +++ b/packages/instant/src/util/env.ts @@ -62,4 +62,11 @@ export const envUtil = { } return PROVIDER_TYPE_TO_NAME[providerTypeIfExists]; }, + getProviderDisplayName(provider: Provider): string { + const providerTypeIfExists = envUtil.getProviderType(provider); + if (_.isUndefined(providerTypeIfExists)) { + return 'Wallet'; + } + return PROVIDER_TYPE_TO_NAME[providerTypeIfExists]; + }, }; diff --git a/packages/instant/src/util/error_reporter.ts b/packages/instant/src/util/error_reporter.ts index b1824eaf9..8d7481684 100644 --- a/packages/instant/src/util/error_reporter.ts +++ b/packages/instant/src/util/error_reporter.ts @@ -1,17 +1,34 @@ import { logUtils } from '@0x/utils'; import * as _ from 'lodash'; -import { GIT_SHA, HOST_DOMAINS, INSTANT_DISCHARGE_TARGET, ROLLBAR_CLIENT_TOKEN, ROLLBAR_ENABLED } from '../constants'; +import { + GIT_SHA, + HOST_DOMAINS_EXTERNAL, + HOST_DOMAINS_LOCAL, + INSTANT_DISCHARGE_TARGET, + NODE_ENV, + ROLLBAR_CLIENT_TOKEN, + ROLLBAR_ENABLED, +} from '../constants'; // Import version of Rollbar designed for embedded components // See https://docs.rollbar.com/docs/using-rollbarjs-inside-an-embedded-component // tslint:disable-next-line:no-var-requires const Rollbar = require('rollbar/dist/rollbar.noconflict.umd'); +const getRollbarHostDomains = (): string[] => { + if (NODE_ENV === 'development') { + return HOST_DOMAINS_EXTERNAL.concat(HOST_DOMAINS_LOCAL); + } else { + return HOST_DOMAINS_EXTERNAL; + } +}; + let rollbar: any; // Configures rollbar and sets up error catching export const setupRollbar = (): any => { if (_.isUndefined(rollbar) && ROLLBAR_CLIENT_TOKEN && ROLLBAR_ENABLED) { + const hostDomains = getRollbarHostDomains(); rollbar = new Rollbar({ accessToken: ROLLBAR_CLIENT_TOKEN, captureUncaught: true, @@ -20,7 +37,7 @@ export const setupRollbar = (): any => { itemsPerMinute: 10, maxItems: 500, payload: { - environment: INSTANT_DISCHARGE_TARGET || `Local ${process.env.NODE_ENV}`, + environment: INSTANT_DISCHARGE_TARGET || `Local ${NODE_ENV}`, client: { javascript: { source_map_enabled: true, @@ -29,7 +46,7 @@ export const setupRollbar = (): any => { }, }, }, - hostWhiteList: HOST_DOMAINS, + hostWhiteList: hostDomains, uncaughtErrorLevel: 'error', ignoredMessages: [ // Errors from the third-party scripts diff --git a/packages/instant/src/util/provider_state_factory.ts b/packages/instant/src/util/provider_state_factory.ts index 7c788dff2..bd2d6dad5 100644 --- a/packages/instant/src/util/provider_state_factory.ts +++ b/packages/instant/src/util/provider_state_factory.ts @@ -10,27 +10,40 @@ import { assetBuyerFactory } from './asset_buyer_factory'; import { providerFactory } from './provider_factory'; export const providerStateFactory = { - getInitialProviderState: (orderSource: OrderSource, network: Network, provider?: Provider): ProviderState => { + getInitialProviderState: ( + orderSource: OrderSource, + network: Network, + provider?: Provider, + walletDisplayName?: string, + ): ProviderState => { if (!_.isUndefined(provider)) { - return providerStateFactory.getInitialProviderStateFromProvider(orderSource, network, provider); + return providerStateFactory.getInitialProviderStateFromProvider( + orderSource, + network, + provider, + walletDisplayName, + ); } const providerStateFromWindowIfExits = providerStateFactory.getInitialProviderStateFromWindowIfExists( orderSource, network, + walletDisplayName, ); if (providerStateFromWindowIfExits) { return providerStateFromWindowIfExits; } else { - return providerStateFactory.getInitialProviderStateFallback(orderSource, network); + return providerStateFactory.getInitialProviderStateFallback(orderSource, network, walletDisplayName); } }, getInitialProviderStateFromProvider: ( orderSource: OrderSource, network: Network, provider: Provider, + walletDisplayName?: string, ): ProviderState => { const providerState: ProviderState = { name: envUtil.getProviderName(provider), + displayName: walletDisplayName || envUtil.getProviderDisplayName(provider), provider, web3Wrapper: new Web3Wrapper(provider), assetBuyer: assetBuyerFactory.getAssetBuyer(provider, orderSource, network), @@ -38,11 +51,16 @@ export const providerStateFactory = { }; return providerState; }, - getInitialProviderStateFromWindowIfExists: (orderSource: OrderSource, network: Network): Maybe<ProviderState> => { + getInitialProviderStateFromWindowIfExists: ( + orderSource: OrderSource, + network: Network, + walletDisplayName?: string, + ): Maybe<ProviderState> => { const injectedProviderIfExists = providerFactory.getInjectedProviderIfExists(); if (!_.isUndefined(injectedProviderIfExists)) { const providerState: ProviderState = { name: envUtil.getProviderName(injectedProviderIfExists), + displayName: walletDisplayName || envUtil.getProviderDisplayName(injectedProviderIfExists), provider: injectedProviderIfExists, web3Wrapper: new Web3Wrapper(injectedProviderIfExists), assetBuyer: assetBuyerFactory.getAssetBuyer(injectedProviderIfExists, orderSource, network), @@ -53,10 +71,15 @@ export const providerStateFactory = { return undefined; } }, - getInitialProviderStateFallback: (orderSource: OrderSource, network: Network): ProviderState => { + getInitialProviderStateFallback: ( + orderSource: OrderSource, + network: Network, + walletDisplayName?: string, + ): ProviderState => { const provider = providerFactory.getFallbackNoSigningProvider(network); const providerState: ProviderState = { name: 'Fallback', + displayName: walletDisplayName || envUtil.getProviderDisplayName(provider), provider, web3Wrapper: new Web3Wrapper(provider), assetBuyer: assetBuyerFactory.getAssetBuyer(provider, orderSource, network), diff --git a/packages/json-schemas/CHANGELOG.json b/packages/json-schemas/CHANGELOG.json index 17cabc473..aabcf7bec 100644 --- a/packages/json-schemas/CHANGELOG.json +++ b/packages/json-schemas/CHANGELOG.json @@ -1,5 +1,14 @@ [ { + "timestamp": 1544482891, + "version": "2.1.3", + "changes": [ + { + "note": "Dependencies updated" + } + ] + }, + { "timestamp": 1542821676, "version": "2.1.2", "changes": [ diff --git a/packages/json-schemas/CHANGELOG.md b/packages/json-schemas/CHANGELOG.md index 2f39c9596..2e3daa711 100644 --- a/packages/json-schemas/CHANGELOG.md +++ b/packages/json-schemas/CHANGELOG.md @@ -5,6 +5,10 @@ Edit the package's CHANGELOG.json file only. CHANGELOG +## v2.1.3 - _December 10, 2018_ + + * Dependencies updated + ## v2.1.2 - _November 21, 2018_ * Dependencies updated diff --git a/packages/migrations/CHANGELOG.json b/packages/migrations/CHANGELOG.json index 56705fc1a..fd9c5e8a2 100644 --- a/packages/migrations/CHANGELOG.json +++ b/packages/migrations/CHANGELOG.json @@ -1,5 +1,14 @@ [ { + "timestamp": 1544482891, + "version": "2.2.1", + "changes": [ + { + "note": "Dependencies updated" + } + ] + }, + { "version": "2.2.0", "changes": [ { diff --git a/packages/migrations/CHANGELOG.md b/packages/migrations/CHANGELOG.md index 3808b2d3d..eced3655b 100644 --- a/packages/migrations/CHANGELOG.md +++ b/packages/migrations/CHANGELOG.md @@ -5,6 +5,10 @@ Edit the package's CHANGELOG.json file only. CHANGELOG +## v2.2.1 - _December 10, 2018_ + + * Dependencies updated + ## v2.2.0 - _November 28, 2018_ * Add CLI `0x-migrate` for running the 0x migrations in a language-agnostic way (#1324) diff --git a/packages/monorepo-scripts/src/test_installation.ts b/packages/monorepo-scripts/src/test_installation.ts index 96875d0f9..5ae13b198 100644 --- a/packages/monorepo-scripts/src/test_installation.ts +++ b/packages/monorepo-scripts/src/test_installation.ts @@ -98,7 +98,7 @@ async function testInstallPackageAsync( const lastChangelogVersion = JSON.parse(fs.readFileSync(changelogPath).toString())[0].version; const packageName = installablePackage.packageJson.name; utils.log(`Testing ${packageName}@${lastChangelogVersion}`); - const packageDirName = path.join(...(packageName + '-test').split('/')); + const packageDirName = path.join(...`${packageName}-test`.split('/')); // NOTE(fabio): The `testDirectory` needs to be somewhere **outside** the monorepo root directory. // Otherwise, it will have access to the hoisted `node_modules` directory and the Typescript missing // type errors will not be caught. diff --git a/packages/order-utils/CHANGELOG.json b/packages/order-utils/CHANGELOG.json index 6c8fd6239..989bd9397 100644 --- a/packages/order-utils/CHANGELOG.json +++ b/packages/order-utils/CHANGELOG.json @@ -1,5 +1,14 @@ [ { + "timestamp": 1544482891, + "version": "3.0.5", + "changes": [ + { + "note": "Dependencies updated" + } + ] + }, + { "timestamp": 1543401373, "version": "3.0.4", "changes": [ diff --git a/packages/order-utils/CHANGELOG.md b/packages/order-utils/CHANGELOG.md index 5eae590b5..f232ec63a 100644 --- a/packages/order-utils/CHANGELOG.md +++ b/packages/order-utils/CHANGELOG.md @@ -5,6 +5,10 @@ Edit the package's CHANGELOG.json file only. CHANGELOG +## v3.0.5 - _December 10, 2018_ + + * Dependencies updated + ## v3.0.4 - _November 28, 2018_ * Dependencies updated diff --git a/packages/order-utils/test/order_hash_test.ts b/packages/order-utils/test/order_hash_test.ts index a85d4c81a..30fb15a37 100644 --- a/packages/order-utils/test/order_hash_test.ts +++ b/packages/order-utils/test/order_hash_test.ts @@ -70,7 +70,7 @@ describe('Order hashing', () => { }); it('returns true if order hash is correct', () => { const orderHashLength = 65; - const isValid = orderHashUtils.isValidOrderHash('0x' + Array(orderHashLength).join('0')); + const isValid = orderHashUtils.isValidOrderHash(`0x${Array(orderHashLength).join('0')}`); expect(isValid).to.be.true(); }); }); diff --git a/packages/order-watcher/CHANGELOG.json b/packages/order-watcher/CHANGELOG.json index 4e56dc400..4dfb86861 100644 --- a/packages/order-watcher/CHANGELOG.json +++ b/packages/order-watcher/CHANGELOG.json @@ -1,5 +1,14 @@ [ { + "timestamp": 1544482891, + "version": "2.2.7", + "changes": [ + { + "note": "Dependencies updated" + } + ] + }, + { "timestamp": 1543401373, "version": "2.2.6", "changes": [ diff --git a/packages/order-watcher/CHANGELOG.md b/packages/order-watcher/CHANGELOG.md index 37b4a7438..1871697ab 100644 --- a/packages/order-watcher/CHANGELOG.md +++ b/packages/order-watcher/CHANGELOG.md @@ -5,6 +5,10 @@ Edit the package's CHANGELOG.json file only. CHANGELOG +## v2.2.7 - _December 10, 2018_ + + * Dependencies updated + ## v2.2.6 - _November 28, 2018_ * Dependencies updated diff --git a/packages/pipeline/.npmignore b/packages/pipeline/.npmignore new file mode 100644 index 000000000..89302c908 --- /dev/null +++ b/packages/pipeline/.npmignore @@ -0,0 +1,7 @@ +.* +yarn-error.log +/scripts/ +/generated_docs/ +/src/ +tsconfig.json +/lib/monorepo_scripts/ diff --git a/packages/pipeline/README.md b/packages/pipeline/README.md new file mode 100644 index 000000000..794488cac --- /dev/null +++ b/packages/pipeline/README.md @@ -0,0 +1,166 @@ +## @0xproject/pipeline + +This repository contains scripts used for scraping data from the Ethereum blockchain into SQL tables for analysis by the 0x team. + +## Contributing + +We strongly recommend that the community help us make improvements and determine the future direction of the protocol. To report bugs within this package, please create an issue in this repository. + +Please read our [contribution guidelines](../../CONTRIBUTING.md) before getting started. + +### Install dependencies: + +```bash +yarn install +``` + +### Build + +```bash +yarn build +``` + +### Clean + +```bash +yarn clean +``` + +### Lint + +```bash +yarn lint +``` + +### Migrations + +Create a new migration: `yarn migrate:create --name MigrationNameInCamelCase` +Run migrations: `yarn migrate:run` +Revert the most recent migration (CAUTION: may result in data loss!): `yarn migrate:revert` + +## Testing + +There are several test scripts in **package.json**. You can run all the tests +with `yarn test:all` or run certain tests seprately by following the +instructions below. Some tests may not work out of the box on certain platforms +or operating systems (see the "Database tests" section below). + +### Unit tests + +The unit tests can be run with `yarn test`. These tests don't depend on any +services or databases and will run in any environment that can run Node. + +### Database tests + +Database integration tests can be run with `yarn test:db`. These tests will +attempt to automatically spin up a Postgres database via Docker. If this doesn't +work you have two other options: + +1. Set the `DOCKER_SOCKET` environment variable to a valid socket path to use + for communicating with Docker. +2. Start Postgres manually and set the `ZEROEX_DATA_PIPELINE_TEST_DB_URL` + environment variable. If this is set, the tests will use your existing + Postgres database instead of trying to create one with Docker. + +## Running locally + +`pipeline` requires access to a PostgreSQL database. The easiest way to start +Postgres is via Docker. Depending on your platform, you may need to prepend +`sudo` to the following command: + +``` +docker run --rm -d -p 5432:5432 --name pipeline_postgres postgres:11-alpine +``` + +This will start a Postgres server with the default username and database name +(`postgres` and `postgres`). You should set the environment variable as follows: + +``` +export ZEROEX_DATA_PIPELINE_DB_URL=postgresql://postgres@localhost/postgres +``` + +First thing you will need to do is run the migrations: + +``` +yarn migrate:run +``` + +Now you can run scripts locally: + +``` +node packages/pipeline/lib/src/scripts/pull_radar_relay_orders.js +``` + +To stop the Postgres server (you may need to add `sudo`): + +``` +docker stop pipeline_postgres +``` + +This will remove all data from the database. + +If you prefer, you can also install Postgres with e.g., +[Homebrew](https://wiki.postgresql.org/wiki/Homebrew) or +[Postgress.app](https://postgresapp.com/). Keep in mind that you will need to +set the`ZEROEX_DATA_PIPELINE_DB_URL` environment variable to a valid +[PostgreSQL connection url](https://stackoverflow.com/questions/3582552/postgresql-connection-url) + +## Directory structure + +``` +. +├── lib: Code generated by the TypeScript compiler. Don't edit this directly. +├── migrations: Code for creating and updating database schemas. +├── node_modules: +├── src: All TypeScript source code. +│ ├── data_sources: Code responsible for getting raw data, typically from a third-party source. +│ ├── entities: TypeORM entities which closely mirror our database schemas. Some other ORMs call these "models". +│ ├── parsers: Code for converting raw data into entities. +│ ├── scripts: Executable scripts which put all the pieces together. +│ └── utils: Various utils used across packages/files. +├── test: All tests go here and are organized in the same way as the folder/file that they test. +``` + +## Adding new data to the pipeline + +1. Create an entity in the _entities_ directory. Entities directly mirror our + database schemas. We follow the practice of having "dumb" entities, so + entity classes should typically not have any methods. +2. Create a migration using the `yarn migrate:create` command. Create/update + tables as needed. Remember to fill in both the `up` and `down` methods. Try + to avoid data loss as much as possible in your migrations. +3. Add basic tests for your entity and migrations to the **test/entities/** + directory. +4. Create a class or function in the **data_sources/** directory for getting + raw data. This code should abstract away pagination and rate-limiting as + much as possible. +5. Create a class or function in the **parsers/** directory for converting the + raw data into an entity. Also add tests in the **tests/** directory to test + the parser. +6. Create an executable script in the **scripts/** directory for putting + everything together. Your script can accept environment variables for things + like API keys. It should pull the data, parse it, and save it to the + database. Scripts should be idempotent and atomic (when possible). What this + means is that your script may be responsible for determining _which_ data + needs to be updated. For example, you may need to query the database to find + the most recent block number that we have already pulled, then pull new data + starting from that block number. +7. Run the migrations and then run your new script locally and verify it works + as expected. + +#### Additional guidelines and tips: + +* Table names should be plural and separated by underscores (e.g., + `exchange_fill_events`). +* Any table which contains data which comes directly from a third-party source + should be namespaced in the `raw` PostgreSQL schema. +* Column names in the database should be separated by underscores (e.g., + `maker_asset_type`). +* Field names in entity classes (like any other fields in TypeScript) should + be camel-cased (e.g., `makerAssetType`). +* All timestamps should be stored as milliseconds since the Unix Epoch. +* Use the `BigNumber` type for TypeScript code which deals with 256-bit + numbers from smart contracts or for any case where we are dealing with large + floating point numbers. +* [TypeORM documentation](http://typeorm.io/#/) is pretty robust and can be a + helpful resource. diff --git a/packages/pipeline/coverage/.gitkeep b/packages/pipeline/coverage/.gitkeep new file mode 100644 index 000000000..e69de29bb --- /dev/null +++ b/packages/pipeline/coverage/.gitkeep diff --git a/packages/pipeline/migrations/1542070840010-InitialSchema.ts b/packages/pipeline/migrations/1542070840010-InitialSchema.ts new file mode 100644 index 000000000..895f9e6c9 --- /dev/null +++ b/packages/pipeline/migrations/1542070840010-InitialSchema.ts @@ -0,0 +1,187 @@ +import { MigrationInterface, QueryRunner, Table } from 'typeorm'; + +const blocks = new Table({ + name: 'raw.blocks', + columns: [ + { name: 'number', type: 'bigint', isPrimary: true }, + { name: 'hash', type: 'varchar', isPrimary: true }, + { name: 'timestamp', type: 'bigint' }, + ], +}); + +const exchange_cancel_events = new Table({ + name: 'raw.exchange_cancel_events', + columns: [ + { name: 'contract_address', type: 'char(42)', isPrimary: true }, + { name: 'log_index', type: 'integer', isPrimary: true }, + { name: 'block_number', type: 'bigint', isPrimary: true }, + + { name: 'raw_data', type: 'varchar' }, + + { name: 'transaction_hash', type: 'varchar' }, + { name: 'maker_address', type: 'char(42)' }, + { name: 'taker_address', type: 'char(42)' }, + { name: 'fee_recipient_address', type: 'char(42)' }, + { name: 'sender_address', type: 'char(42)' }, + { name: 'order_hash', type: 'varchar' }, + + { name: 'raw_maker_asset_data', type: 'varchar' }, + { name: 'maker_asset_type', type: 'varchar' }, + { name: 'maker_asset_proxy_id', type: 'varchar' }, + { name: 'maker_token_address', type: 'char(42)' }, + { name: 'maker_token_id', type: 'varchar', isNullable: true }, + { name: 'raw_taker_asset_data', type: 'varchar' }, + { name: 'taker_asset_type', type: 'varchar' }, + { name: 'taker_asset_proxy_id', type: 'varchar' }, + { name: 'taker_token_address', type: 'char(42)' }, + { name: 'taker_token_id', type: 'varchar', isNullable: true }, + ], +}); + +const exchange_cancel_up_to_events = new Table({ + name: 'raw.exchange_cancel_up_to_events', + columns: [ + { name: 'contract_address', type: 'char(42)', isPrimary: true }, + { name: 'log_index', type: 'integer', isPrimary: true }, + { name: 'block_number', type: 'bigint', isPrimary: true }, + + { name: 'raw_data', type: 'varchar' }, + + { name: 'transaction_hash', type: 'varchar' }, + { name: 'maker_address', type: 'char(42)' }, + { name: 'sender_address', type: 'char(42)' }, + { name: 'order_epoch', type: 'varchar' }, + ], +}); + +const exchange_fill_events = new Table({ + name: 'raw.exchange_fill_events', + columns: [ + { name: 'contract_address', type: 'char(42)', isPrimary: true }, + { name: 'log_index', type: 'integer', isPrimary: true }, + { name: 'block_number', type: 'bigint', isPrimary: true }, + + { name: 'raw_data', type: 'varchar' }, + + { name: 'transaction_hash', type: 'varchar' }, + { name: 'maker_address', type: 'char(42)' }, + { name: 'taker_address', type: 'char(42)' }, + { name: 'fee_recipient_address', type: 'char(42)' }, + { name: 'sender_address', type: 'char(42)' }, + { name: 'maker_asset_filled_amount', type: 'varchar' }, + { name: 'taker_asset_filled_amount', type: 'varchar' }, + { name: 'maker_fee_paid', type: 'varchar' }, + { name: 'taker_fee_paid', type: 'varchar' }, + { name: 'order_hash', type: 'varchar' }, + + { name: 'raw_maker_asset_data', type: 'varchar' }, + { name: 'maker_asset_type', type: 'varchar' }, + { name: 'maker_asset_proxy_id', type: 'varchar' }, + { name: 'maker_token_address', type: 'char(42)' }, + { name: 'maker_token_id', type: 'varchar', isNullable: true }, + { name: 'raw_taker_asset_data', type: 'varchar' }, + { name: 'taker_asset_type', type: 'varchar' }, + { name: 'taker_asset_proxy_id', type: 'varchar' }, + { name: 'taker_token_address', type: 'char(42)' }, + { name: 'taker_token_id', type: 'varchar', isNullable: true }, + ], +}); + +const relayers = new Table({ + name: 'raw.relayers', + columns: [ + { name: 'uuid', type: 'varchar', isPrimary: true }, + { name: 'name', type: 'varchar' }, + { name: 'sra_http_endpoint', type: 'varchar', isNullable: true }, + { name: 'sra_ws_endpoint', type: 'varchar', isNullable: true }, + { name: 'app_url', type: 'varchar', isNullable: true }, + { name: 'fee_recipient_addresses', type: 'char(42)', isArray: true }, + { name: 'taker_addresses', type: 'char(42)', isArray: true }, + ], +}); + +const sra_orders = new Table({ + name: 'raw.sra_orders', + columns: [ + { name: 'exchange_address', type: 'char(42)', isPrimary: true }, + { name: 'order_hash_hex', type: 'varchar', isPrimary: true }, + + { name: 'source_url', type: 'varchar' }, + { name: 'last_updated_timestamp', type: 'bigint' }, + { name: 'first_seen_timestamp', type: 'bigint' }, + + { name: 'maker_address', type: 'char(42)' }, + { name: 'taker_address', type: 'char(42)' }, + { name: 'fee_recipient_address', type: 'char(42)' }, + { name: 'sender_address', type: 'char(42)' }, + { name: 'maker_asset_filled_amount', type: 'varchar' }, + { name: 'taker_asset_filled_amount', type: 'varchar' }, + { name: 'maker_fee', type: 'varchar' }, + { name: 'taker_fee', type: 'varchar' }, + { name: 'expiration_time_seconds', type: 'int' }, + { name: 'salt', type: 'varchar' }, + { name: 'signature', type: 'varchar' }, + + { name: 'raw_maker_asset_data', type: 'varchar' }, + { name: 'maker_asset_type', type: 'varchar' }, + { name: 'maker_asset_proxy_id', type: 'varchar' }, + { name: 'maker_token_address', type: 'char(42)' }, + { name: 'maker_token_id', type: 'varchar', isNullable: true }, + { name: 'raw_taker_asset_data', type: 'varchar' }, + { name: 'taker_asset_type', type: 'varchar' }, + { name: 'taker_asset_proxy_id', type: 'varchar' }, + { name: 'taker_token_address', type: 'char(42)' }, + { name: 'taker_token_id', type: 'varchar', isNullable: true }, + + { name: 'metadata_json', type: 'varchar' }, + ], +}); + +const token_on_chain_metadata = new Table({ + name: 'raw.token_on_chain_metadata', + columns: [ + { name: 'address', type: 'char(42)', isPrimary: true }, + { name: 'decimals', type: 'integer' }, + { name: 'symbol', type: 'varchar' }, + { name: 'name', type: 'varchar' }, + ], +}); + +const transactions = new Table({ + name: 'raw.transactions', + columns: [ + { name: 'block_number', type: 'bigint', isPrimary: true }, + { name: 'block_hash', type: 'varchar', isPrimary: true }, + { name: 'transaction_hash', type: 'varchar', isPrimary: true }, + { name: 'gas_used', type: 'bigint' }, + { name: 'gas_price', type: 'bigint' }, + ], +}); + +export class InitialSchema1542070840010 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise<any> { + await queryRunner.createSchema('raw'); + + await queryRunner.createTable(blocks); + await queryRunner.createTable(exchange_cancel_events); + await queryRunner.createTable(exchange_cancel_up_to_events); + await queryRunner.createTable(exchange_fill_events); + await queryRunner.createTable(relayers); + await queryRunner.createTable(sra_orders); + await queryRunner.createTable(token_on_chain_metadata); + await queryRunner.createTable(transactions); + } + + public async down(queryRunner: QueryRunner): Promise<any> { + await queryRunner.dropTable(blocks.name); + await queryRunner.dropTable(exchange_cancel_events.name); + await queryRunner.dropTable(exchange_cancel_up_to_events.name); + await queryRunner.dropTable(exchange_fill_events.name); + await queryRunner.dropTable(relayers.name); + await queryRunner.dropTable(sra_orders.name); + await queryRunner.dropTable(token_on_chain_metadata.name); + await queryRunner.dropTable(transactions.name); + + await queryRunner.dropSchema('raw'); + } +} diff --git a/packages/pipeline/migrations/1542147915364-NewSraOrderTimestampFormat.ts b/packages/pipeline/migrations/1542147915364-NewSraOrderTimestampFormat.ts new file mode 100644 index 000000000..5a8f3fec8 --- /dev/null +++ b/packages/pipeline/migrations/1542147915364-NewSraOrderTimestampFormat.ts @@ -0,0 +1,48 @@ +import { MigrationInterface, QueryRunner, Table } from 'typeorm'; + +export class NewSraOrderTimestampFormat1542147915364 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise<any> { + await queryRunner.query( + `ALTER TABLE raw.sra_orders + DROP CONSTRAINT "PK_09bfb9980715329563bd53d667e", + ADD PRIMARY KEY (order_hash_hex, exchange_address, source_url); + `, + ); + + await queryRunner.query( + `CREATE TABLE raw.sra_orders_observed_timestamps ( + order_hash_hex varchar NOT NULL, + exchange_address varchar NOT NULL, + source_url varchar NOT NULL, + observed_timestamp bigint NOT NULL, + FOREIGN KEY + (order_hash_hex, exchange_address, source_url) + REFERENCES raw.sra_orders (order_hash_hex, exchange_address, source_url), + PRIMARY KEY (order_hash_hex, exchange_address, source_url, observed_timestamp) + );`, + ); + + await queryRunner.query( + `ALTER TABLE raw.sra_orders + DROP COLUMN last_updated_timestamp, + DROP COLUMN first_seen_timestamp;`, + ); + } + + public async down(queryRunner: QueryRunner): Promise<any> { + await queryRunner.dropTable('raw.sra_orders_observed_timestamps'); + + await queryRunner.query( + `ALTER TABLE raw.sra_orders + ADD COLUMN last_updated_timestamp bigint NOT NULL DEFAULT 0, + ADD COLUMN first_seen_timestamp bigint NOT NULL DEFAULT 0;`, + ); + + await queryRunner.query( + `ALTER TABLE raw.sra_orders + DROP CONSTRAINT sra_orders_pkey, + ADD CONSTRAINT "PK_09bfb9980715329563bd53d667e" PRIMARY KEY ("exchange_address", "order_hash_hex"); + `, + ); + } +} diff --git a/packages/pipeline/migrations/1542152278484-RenameSraOrdersFilledAmounts.ts b/packages/pipeline/migrations/1542152278484-RenameSraOrdersFilledAmounts.ts new file mode 100644 index 000000000..a13e3efa5 --- /dev/null +++ b/packages/pipeline/migrations/1542152278484-RenameSraOrdersFilledAmounts.ts @@ -0,0 +1,13 @@ +import { MigrationInterface, QueryRunner } from 'typeorm'; + +export class RenameSraOrdersFilledAmounts1542152278484 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise<any> { + await queryRunner.renameColumn('raw.sra_orders', 'maker_asset_filled_amount', 'maker_asset_amount'); + await queryRunner.renameColumn('raw.sra_orders', 'taker_asset_filled_amount', 'taker_asset_amount'); + } + + public async down(queryRunner: QueryRunner): Promise<any> { + await queryRunner.renameColumn('raw.sra_orders', 'maker_asset_amount', 'maker_asset_filled_amount'); + await queryRunner.renameColumn('raw.sra_orders', 'taker_asset_amount', 'taker_asset_filled_amount'); + } +} diff --git a/packages/pipeline/migrations/1542234704666-ConvertBigNumberToNumeric.ts b/packages/pipeline/migrations/1542234704666-ConvertBigNumberToNumeric.ts new file mode 100644 index 000000000..5200ef7cc --- /dev/null +++ b/packages/pipeline/migrations/1542234704666-ConvertBigNumberToNumeric.ts @@ -0,0 +1,53 @@ +import { MigrationInterface, QueryRunner } from 'typeorm'; + +export class ConvertBigNumberToNumeric1542234704666 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise<any> { + await queryRunner.query( + `ALTER TABLE raw.exchange_fill_events + ALTER COLUMN maker_asset_filled_amount TYPE numeric USING maker_asset_filled_amount::numeric, + ALTER COLUMN taker_asset_filled_amount TYPE numeric USING taker_asset_filled_amount::numeric, + ALTER COLUMN maker_fee_paid TYPE numeric USING maker_fee_paid::numeric, + ALTER COLUMN taker_fee_paid TYPE numeric USING taker_fee_paid::numeric;`, + ); + + await queryRunner.query( + `ALTER TABLE raw.exchange_cancel_up_to_events + ALTER COLUMN order_epoch TYPE numeric USING order_epoch::numeric;`, + ); + + await queryRunner.query( + `ALTER TABLE raw.sra_orders + ALTER COLUMN maker_asset_amount TYPE numeric USING maker_asset_amount::numeric, + ALTER COLUMN taker_asset_amount TYPE numeric USING taker_asset_amount::numeric, + ALTER COLUMN maker_fee TYPE numeric USING maker_fee::numeric, + ALTER COLUMN taker_fee TYPE numeric USING taker_fee::numeric, + ALTER COLUMN expiration_time_seconds TYPE numeric USING expiration_time_seconds::numeric, + ALTER COLUMN salt TYPE numeric USING salt::numeric;`, + ); + } + + public async down(queryRunner: QueryRunner): Promise<any> { + await queryRunner.query( + `ALTER TABLE raw.sra_orders + ALTER COLUMN maker_asset_amount TYPE varchar USING maker_asset_amount::varchar, + ALTER COLUMN taker_asset_amount TYPE varchar USING taker_asset_amount::varchar, + ALTER COLUMN maker_fee TYPE varchar USING maker_fee::varchar, + ALTER COLUMN taker_fee TYPE varchar USING taker_fee::varchar, + ALTER COLUMN expiration_time_seconds TYPE varchar USING expiration_time_seconds::varchar, + ALTER COLUMN salt TYPE varchar USING salt::varchar;`, + ); + + await queryRunner.query( + `ALTER TABLE raw.exchange_cancel_up_to_events + ALTER COLUMN order_epoch TYPE varchar USING order_epoch::varchar;`, + ); + + await queryRunner.query( + `ALTER TABLE raw.exchange_fill_events + ALTER COLUMN maker_asset_filled_amount TYPE varchar USING maker_asset_filled_amount::varchar, + ALTER COLUMN taker_asset_filled_amount TYPE varchar USING taker_asset_filled_amount::varchar, + ALTER COLUMN maker_fee_paid TYPE varchar USING maker_fee_paid::varchar, + ALTER COLUMN taker_fee_paid TYPE varchar USING taker_fee_paid::varchar;`, + ); + } +} diff --git a/packages/pipeline/migrations/1542249766882-AddHomepageUrlToRelayers.ts b/packages/pipeline/migrations/1542249766882-AddHomepageUrlToRelayers.ts new file mode 100644 index 000000000..9a4811ad5 --- /dev/null +++ b/packages/pipeline/migrations/1542249766882-AddHomepageUrlToRelayers.ts @@ -0,0 +1,14 @@ +import { MigrationInterface, QueryRunner, TableColumn } from 'typeorm'; + +export class AddHomepageUrlToRelayers1542249766882 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise<any> { + await queryRunner.addColumn( + 'raw.relayers', + new TableColumn({ name: 'homepage_url', type: 'varchar', default: `'unknown'` }), + ); + } + + public async down(queryRunner: QueryRunner): Promise<any> { + await queryRunner.dropColumn('raw.relayers', 'homepage_url'); + } +} diff --git a/packages/pipeline/migrations/1542401122477-MakeTakerAddressNullable.ts b/packages/pipeline/migrations/1542401122477-MakeTakerAddressNullable.ts new file mode 100644 index 000000000..957c85a36 --- /dev/null +++ b/packages/pipeline/migrations/1542401122477-MakeTakerAddressNullable.ts @@ -0,0 +1,17 @@ +import { MigrationInterface, QueryRunner } from 'typeorm'; + +export class MakeTakerAddressNullable1542401122477 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise<any> { + await queryRunner.query( + `ALTER TABLE raw.exchange_cancel_events + ALTER COLUMN taker_address DROP NOT NULL;`, + ); + } + + public async down(queryRunner: QueryRunner): Promise<any> { + await queryRunner.query( + `ALTER TABLE raw.exchange_cancel_events + ALTER COLUMN taker_address SET NOT NULL;`, + ); + } +} diff --git a/packages/pipeline/migrations/1542655823221-NewMetadataAndOHLCVTables.ts b/packages/pipeline/migrations/1542655823221-NewMetadataAndOHLCVTables.ts new file mode 100644 index 000000000..838f5ba9c --- /dev/null +++ b/packages/pipeline/migrations/1542655823221-NewMetadataAndOHLCVTables.ts @@ -0,0 +1,60 @@ +import { MigrationInterface, QueryRunner } from 'typeorm'; + +export class NewMetadataAndOHLCVTables1542655823221 implements MigrationInterface { + // tslint:disable-next-line + public async up(queryRunner: QueryRunner): Promise<any> { + await queryRunner.query(` + CREATE TABLE raw.token_metadata ( + address VARCHAR NOT NULL, + authority VARCHAR NOT NULL, + decimals INT NULL, + symbol VARCHAR NULL, + name VARCHAR NULL, + + PRIMARY KEY (address, authority) + ); + `); + + await queryRunner.dropTable('raw.token_on_chain_metadata'); + + await queryRunner.query(` + CREATE TABLE raw.ohlcv_external ( + exchange VARCHAR NOT NULL, + from_symbol VARCHAR NOT NULL, + to_symbol VARCHAR NOT NULL, + start_time BIGINT NOT NULL, + end_time BIGINT NOT NULL, + + open DOUBLE PRECISION NOT NULL, + close DOUBLE PRECISION NOT NULL, + low DOUBLE PRECISION NOT NULL, + high DOUBLE PRECISION NOT NULL, + volume_from DOUBLE PRECISION NOT NULL, + volume_to DOUBLE PRECISION NOT NULL, + + source VARCHAR NOT NULL, + observed_timestamp BIGINT NOT NULL, + + PRIMARY KEY (exchange, from_symbol, to_symbol, start_time, end_time, source, observed_timestamp) + ); + `); + } + + // tslint:disable-next-line + public async down(queryRunner: QueryRunner): Promise<any> { + await queryRunner.query(` + CREATE TABLE raw.token_on_chain_metadata ( + address VARCHAR NOT NULL, + decimals INT NULL, + symbol VARCHAR NULL, + name VARCHAR NULL, + + PRIMARY KEY (address) + ); + `); + + await queryRunner.dropTable('raw.token_metadata'); + + await queryRunner.dropTable('raw.ohlcv_external'); + } +} diff --git a/packages/pipeline/migrations/1543434472116-TokenOrderbookSnapshots.ts b/packages/pipeline/migrations/1543434472116-TokenOrderbookSnapshots.ts new file mode 100644 index 000000000..a7117c753 --- /dev/null +++ b/packages/pipeline/migrations/1543434472116-TokenOrderbookSnapshots.ts @@ -0,0 +1,30 @@ +import { MigrationInterface, QueryRunner, Table } from 'typeorm'; + +const tokenOrderbookSnapshots = new Table({ + name: 'raw.token_orderbook_snapshots', + columns: [ + { name: 'observed_timestamp', type: 'bigint', isPrimary: true }, + { name: 'source', type: 'varchar', isPrimary: true }, + { name: 'order_type', type: 'order_t' }, + { name: 'price', type: 'numeric', isPrimary: true }, + + { name: 'base_asset_symbol', type: 'varchar', isPrimary: true }, + { name: 'base_asset_address', type: 'char(42)' }, + { name: 'base_volume', type: 'numeric' }, + + { name: 'quote_asset_symbol', type: 'varchar', isPrimary: true }, + { name: 'quote_asset_address', type: 'char(42)' }, + { name: 'quote_volume', type: 'numeric' }, + ], +}); + +export class TokenOrderbookSnapshots1543434472116 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise<any> { + await queryRunner.query(`CREATE TYPE order_t AS enum('bid', 'ask');`); + await queryRunner.createTable(tokenOrderbookSnapshots); + } + + public async down(queryRunner: QueryRunner): Promise<any> { + await queryRunner.dropTable(tokenOrderbookSnapshots.name); + } +} diff --git a/packages/pipeline/migrations/1543446690436-CreateDexTrades.ts b/packages/pipeline/migrations/1543446690436-CreateDexTrades.ts new file mode 100644 index 000000000..267cf144b --- /dev/null +++ b/packages/pipeline/migrations/1543446690436-CreateDexTrades.ts @@ -0,0 +1,41 @@ +import { MigrationInterface, QueryRunner, Table } from 'typeorm'; + +const dexTrades = new Table({ + name: 'raw.dex_trades', + columns: [ + { name: 'source_url', type: 'varchar', isPrimary: true }, + { name: 'tx_hash', type: 'varchar', isPrimary: true }, + + { name: 'tx_timestamp', type: 'bigint' }, + { name: 'tx_date', type: 'varchar' }, + { name: 'tx_sender', type: 'varchar(42)' }, + { name: 'smart_contract_id', type: 'bigint' }, + { name: 'smart_contract_address', type: 'varchar(42)' }, + { name: 'contract_type', type: 'varchar' }, + { name: 'maker', type: 'varchar(42)' }, + { name: 'taker', type: 'varchar(42)' }, + { name: 'amount_buy', type: 'numeric' }, + { name: 'maker_fee_amount', type: 'numeric' }, + { name: 'buy_currency_id', type: 'bigint' }, + { name: 'buy_symbol', type: 'varchar' }, + { name: 'amount_sell', type: 'numeric' }, + { name: 'taker_fee_amount', type: 'numeric' }, + { name: 'sell_currency_id', type: 'bigint' }, + { name: 'sell_symbol', type: 'varchar' }, + { name: 'maker_annotation', type: 'varchar' }, + { name: 'taker_annotation', type: 'varchar' }, + { name: 'protocol', type: 'varchar' }, + { name: 'buy_address', type: 'varchar(42)', isNullable: true }, + { name: 'sell_address', type: 'varchar(42)', isNullable: true }, + ], +}); + +export class CreateDexTrades1543446690436 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise<any> { + await queryRunner.createTable(dexTrades); + } + + public async down(queryRunner: QueryRunner): Promise<any> { + await queryRunner.dropTable(dexTrades); + } +} diff --git a/packages/pipeline/migrations/1543980079179-ConvertTokenMetadataDecimalsToBigNumber.ts b/packages/pipeline/migrations/1543980079179-ConvertTokenMetadataDecimalsToBigNumber.ts new file mode 100644 index 000000000..351bc7eb8 --- /dev/null +++ b/packages/pipeline/migrations/1543980079179-ConvertTokenMetadataDecimalsToBigNumber.ts @@ -0,0 +1,17 @@ +import { MigrationInterface, QueryRunner } from 'typeorm'; + +export class ConvertTokenMetadataDecimalsToBigNumber1543980079179 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise<any> { + await queryRunner.query( + `ALTER TABLE raw.token_metadata + ALTER COLUMN decimals TYPE numeric USING decimals::numeric;`, + ); + } + + public async down(queryRunner: QueryRunner): Promise<any> { + await queryRunner.query( + `ALTER TABLE raw.token_metadata + ALTER COLUMN decimals TYPE numeric USING decimals::integer;`, + ); + } +} diff --git a/packages/pipeline/migrations/1543983324954-ConvertTransactionGasPriceToBigNumber.ts b/packages/pipeline/migrations/1543983324954-ConvertTransactionGasPriceToBigNumber.ts new file mode 100644 index 000000000..dcb0fd727 --- /dev/null +++ b/packages/pipeline/migrations/1543983324954-ConvertTransactionGasPriceToBigNumber.ts @@ -0,0 +1,19 @@ +import { MigrationInterface, QueryRunner } from 'typeorm'; + +export class ConvertTransactionGasPriceToBigNumber1543983324954 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise<any> { + await queryRunner.query( + `ALTER TABLE raw.transactions + ALTER COLUMN gas_price TYPE numeric USING gas_price::numeric, + ALTER COLUMN gas_used TYPE numeric USING gas_used::numeric;`, + ); + } + + public async down(queryRunner: QueryRunner): Promise<any> { + await queryRunner.query( + `ALTER TABLE raw.transactions + ALTER COLUMN gas_price TYPE numeric USING gas_price::bigint, + ALTER COLUMN gas_used TYPE numeric USING gas_used::bigint;`, + ); + } +} diff --git a/packages/pipeline/package.json b/packages/pipeline/package.json new file mode 100644 index 000000000..4fde906b8 --- /dev/null +++ b/packages/pipeline/package.json @@ -0,0 +1,65 @@ +{ + "name": "@0x/pipeline", + "version": "1.0.0", + "private": true, + "description": "Data pipeline for offline analysis", + "scripts": { + "build": "yarn tsc -b", + "build:ci": "yarn build", + "test": "yarn run_mocha", + "rebuild_and_test": "run-s build test:all", + "test:db": "yarn run_mocha:db", + "test:all": "run-s test test:db", + "test:circleci": "yarn test:coverage", + "run_mocha": "mocha --require source-map-support/register --require make-promises-safe 'lib/test/!(entities)/**/*_test.js' --bail --exit", + "run_mocha:db": "mocha --require source-map-support/register --require make-promises-safe lib/test/db_global_hooks.js 'lib/test/entities/*_test.js' --bail --exit --timeout 60000", + "test:coverage": "nyc npm run test:all --all && yarn coverage:report:lcov", + "coverage:report:lcov": "nyc report --reporter=text-lcov > coverage/lcov.info", + "clean": "shx rm -rf lib", + "lint": "tslint --project . --format stylish --exclude ./migrations/**/*", + "migrate:run": "yarn typeorm migration:run --config ./lib/src/ormconfig", + "migrate:revert": "yarn typeorm migration:revert --config ./lib/src/ormconfig", + "migrate:create": "yarn typeorm migration:create --config ./lib/src/ormconfig --dir migrations" + }, + "repository": { + "type": "git", + "url": "https://github.com/0xProject/0x-monorepo" + }, + "license": "Apache-2.0", + "devDependencies": { + "@0x/tslint-config": "^1.0.9", + "@types/axios": "^0.14.0", + "@types/ramda": "^0.25.38", + "chai": "^4.1.2", + "chai-as-promised": "^7.1.1", + "chai-bignumber": "^2.0.2", + "dirty-chai": "^2.0.1", + "mocha": "^5.2.0", + "tslint": "5.11.0", + "typescript": "3.0.1" + }, + "dependencies": { + "@0x/connect": "^3.0.2", + "@0x/contract-artifacts": "^1.0.1", + "@0x/contract-wrappers": "^3.0.0", + "@0x/dev-utils": "^1.0.13", + "@0x/order-utils": "^2.0.0", + "@0x/subproviders": "^2.1.0", + "@0x/types": "^1.2.0", + "@0x/utils": "^2.0.3", + "@0x/web3-wrapper": "^3.1.0", + "@types/dockerode": "^2.5.9", + "@types/p-limit": "^2.0.0", + "async-parallel": "^1.2.3", + "axios": "^0.18.0", + "bottleneck": "^2.13.2", + "dockerode": "^2.5.7", + "ethereum-types": "^1.0.6", + "pg": "^7.5.0", + "prettier": "^1.15.3", + "ramda": "^0.25.0", + "reflect-metadata": "^0.1.12", + "sqlite3": "^4.0.2", + "typeorm": "^0.2.7" + } +} diff --git a/packages/pipeline/src/data_sources/bloxy/index.ts b/packages/pipeline/src/data_sources/bloxy/index.ts new file mode 100644 index 000000000..94468d25a --- /dev/null +++ b/packages/pipeline/src/data_sources/bloxy/index.ts @@ -0,0 +1,133 @@ +import axios from 'axios'; +import * as R from 'ramda'; + +// URL to use for getting dex trades from Bloxy. +export const BLOXY_DEX_TRADES_URL = 'https://bloxy.info/api/dex/trades'; +// Number of trades to get at once. Must be less than or equal to MAX_OFFSET. +const TRADES_PER_QUERY = 10000; +// Maximum offset supported by the Bloxy API. +const MAX_OFFSET = 100000; +// Buffer to subtract from offset. This means we will request some trades twice +// but we have less chance on missing out on any data. +const OFFSET_BUFFER = 1000; +// Maximum number of days supported by the Bloxy API. +const MAX_DAYS = 30; +// Buffer used for comparing the last seen timestamp to the last returned +// timestamp. Increasing this reduces chances of data loss but also creates more +// redundancy and can impact performance. +// tslint:disable-next-line:custom-no-magic-numbers +const LAST_SEEN_TIMESTAMP_BUFFER_MS = 1000 * 60 * 30; // 30 minutes + +// tslint:disable-next-line:custom-no-magic-numbers +const millisecondsPerDay = 1000 * 60 * 60 * 24; // ms/d = ms/s * s/m * m/h * h/d + +export interface BloxyTrade { + tx_hash: string; + tx_time: string; + tx_date: string; + tx_sender: string; + smart_contract_id: number; + smart_contract_address: string; + contract_type: string; + maker: string; + taker: string; + amountBuy: number; + makerFee: number; + buyCurrencyId: number; + buySymbol: string; + amountSell: number; + takerFee: number; + sellCurrencyId: number; + sellSymbol: string; + maker_annotation: string; + taker_annotation: string; + protocol: string; + buyAddress: string | null; + sellAddress: string | null; +} + +interface BloxyError { + error: string; +} + +type BloxyResponse<T> = T | BloxyError; +type BloxyTradeResponse = BloxyResponse<BloxyTrade[]>; + +function isError<T>(response: BloxyResponse<T>): response is BloxyError { + return (response as BloxyError).error !== undefined; +} + +export class BloxySource { + private readonly _apiKey: string; + + constructor(apiKey: string) { + this._apiKey = apiKey; + } + + /** + * Gets all latest trades between the lastSeenTimestamp (minus some buffer) + * and the current time. Note that because the Bloxy API has some hard + * limits it might not always be possible to get *all* the trades in the + * desired time range. + * @param lastSeenTimestamp The latest timestamp for trades that have + * already been seen. + */ + public async getDexTradesAsync(lastSeenTimestamp: number): Promise<BloxyTrade[]> { + let allTrades: BloxyTrade[] = []; + + // Clamp numberOfDays so that it is always between 1 and MAX_DAYS (inclusive) + const numberOfDays = R.clamp(1, MAX_DAYS, getDaysSinceTimestamp(lastSeenTimestamp)); + + // Keep getting trades until we hit one of the following conditions: + // + // 1. Offset hits MAX_OFFSET (we can't go back any further). + // 2. There are no more trades in the response. + // 3. We see a tx_time equal to or earlier than lastSeenTimestamp (plus + // some buffer). + // + for (let offset = 0; offset <= MAX_OFFSET; offset += TRADES_PER_QUERY - OFFSET_BUFFER) { + const trades = await this._getTradesWithOffsetAsync(numberOfDays, offset); + if (trades.length === 0) { + // There are no more trades left for the days we are querying. + // This means we are done. + return filterDuplicateTrades(allTrades); + } + const sortedTrades = R.reverse(R.sortBy(trade => trade.tx_time, trades)); + allTrades = allTrades.concat(sortedTrades); + + // Check if lastReturnedTimestamp < lastSeenTimestamp + const lastReturnedTimestamp = new Date(sortedTrades[0].tx_time).getTime(); + if (lastReturnedTimestamp < lastSeenTimestamp - LAST_SEEN_TIMESTAMP_BUFFER_MS) { + // We are at the point where we have already seen trades for the + // timestamp range that is being returned. We're done. + return filterDuplicateTrades(allTrades); + } + } + return filterDuplicateTrades(allTrades); + } + + private async _getTradesWithOffsetAsync(numberOfDays: number, offset: number): Promise<BloxyTrade[]> { + const resp = await axios.get<BloxyTradeResponse>(BLOXY_DEX_TRADES_URL, { + params: { + key: this._apiKey, + days: numberOfDays, + limit: TRADES_PER_QUERY, + offset, + }, + }); + if (isError(resp.data)) { + throw new Error(`Error in Bloxy API response: ${resp.data.error}`); + } + return resp.data; + } +} + +// Computes the number of days between the given timestamp and the current +// timestamp (rounded up). +function getDaysSinceTimestamp(timestamp: number): number { + const msSinceTimestamp = Date.now() - timestamp; + const daysSinceTimestamp = msSinceTimestamp / millisecondsPerDay; + return Math.ceil(daysSinceTimestamp); +} + +const filterDuplicateTrades = R.uniqBy((trade: BloxyTrade) => trade.tx_hash); diff --git a/packages/pipeline/src/data_sources/contract-wrappers/exchange_events.ts b/packages/pipeline/src/data_sources/contract-wrappers/exchange_events.ts new file mode 100644 index 000000000..1717eb8b3 --- /dev/null +++ b/packages/pipeline/src/data_sources/contract-wrappers/exchange_events.ts @@ -0,0 +1,85 @@ +import { + ContractWrappers, + ExchangeCancelEventArgs, + ExchangeCancelUpToEventArgs, + ExchangeEventArgs, + ExchangeEvents, + ExchangeFillEventArgs, + ExchangeWrapper, +} from '@0x/contract-wrappers'; +import { Web3ProviderEngine } from '@0x/subproviders'; +import { Web3Wrapper } from '@0x/web3-wrapper'; +import { LogWithDecodedArgs } from 'ethereum-types'; + +import { EXCHANGE_START_BLOCK } from '../../utils'; + +const BLOCK_FINALITY_THRESHOLD = 10; // When to consider blocks as final. Used to compute default toBlock. +const NUM_BLOCKS_PER_QUERY = 20000; // Number of blocks to query for events at a time. + +export class ExchangeEventsSource { + private readonly _exchangeWrapper: ExchangeWrapper; + private readonly _web3Wrapper: Web3Wrapper; + constructor(provider: Web3ProviderEngine, networkId: number) { + this._web3Wrapper = new Web3Wrapper(provider); + const contractWrappers = new ContractWrappers(provider, { networkId }); + this._exchangeWrapper = contractWrappers.exchange; + } + + public async getFillEventsAsync( + fromBlock?: number, + toBlock?: number, + ): Promise<Array<LogWithDecodedArgs<ExchangeFillEventArgs>>> { + return this._getEventsAsync<ExchangeFillEventArgs>(ExchangeEvents.Fill, fromBlock, toBlock); + } + + public async getCancelEventsAsync( + fromBlock?: number, + toBlock?: number, + ): Promise<Array<LogWithDecodedArgs<ExchangeCancelEventArgs>>> { + return this._getEventsAsync<ExchangeCancelEventArgs>(ExchangeEvents.Cancel, fromBlock, toBlock); + } + + public async getCancelUpToEventsAsync( + fromBlock?: number, + toBlock?: number, + ): Promise<Array<LogWithDecodedArgs<ExchangeCancelUpToEventArgs>>> { + return this._getEventsAsync<ExchangeCancelUpToEventArgs>(ExchangeEvents.CancelUpTo, fromBlock, toBlock); + } + + private async _getEventsAsync<ArgsType extends ExchangeEventArgs>( + eventName: ExchangeEvents, + fromBlock: number = EXCHANGE_START_BLOCK, + toBlock?: number, + ): Promise<Array<LogWithDecodedArgs<ArgsType>>> { + const calculatedToBlock = + toBlock === undefined + ? (await this._web3Wrapper.getBlockNumberAsync()) - BLOCK_FINALITY_THRESHOLD + : toBlock; + let events: Array<LogWithDecodedArgs<ArgsType>> = []; + for (let currFromBlock = fromBlock; currFromBlock <= calculatedToBlock; currFromBlock += NUM_BLOCKS_PER_QUERY) { + events = events.concat( + await this._getEventsForRangeAsync<ArgsType>( + eventName, + currFromBlock, + Math.min(currFromBlock + NUM_BLOCKS_PER_QUERY - 1, calculatedToBlock), + ), + ); + } + return events; + } + + private async _getEventsForRangeAsync<ArgsType extends ExchangeEventArgs>( + eventName: ExchangeEvents, + fromBlock: number, + toBlock: number, + ): Promise<Array<LogWithDecodedArgs<ArgsType>>> { + return this._exchangeWrapper.getLogsAsync<ArgsType>( + eventName, + { + fromBlock, + toBlock, + }, + {}, + ); + } +} diff --git a/packages/pipeline/src/data_sources/ddex/index.ts b/packages/pipeline/src/data_sources/ddex/index.ts new file mode 100644 index 000000000..2bbd8c29b --- /dev/null +++ b/packages/pipeline/src/data_sources/ddex/index.ts @@ -0,0 +1,78 @@ +import { fetchAsync, logUtils } from '@0x/utils'; + +const DDEX_BASE_URL = 'https://api.ddex.io/v2'; +const ACTIVE_MARKETS_URL = `${DDEX_BASE_URL}/markets`; +const NO_AGGREGATION_LEVEL = 3; // See https://docs.ddex.io/#get-orderbook +const ORDERBOOK_ENDPOINT = `/orderbook?level=${NO_AGGREGATION_LEVEL}`; +export const DDEX_SOURCE = 'ddex'; + +export interface DdexActiveMarketsResponse { + status: number; + desc: string; + data: { + markets: DdexMarket[]; + }; +} + +export interface DdexMarket { + id: string; + quoteToken: string; + quoteTokenDecimals: number; + quoteTokenAddress: string; + baseToken: string; + baseTokenDecimals: number; + baseTokenAddress: string; + minOrderSize: string; + maxOrderSize: string; + pricePrecision: number; + priceDecimals: number; + amountDecimals: number; +} + +export interface DdexOrderbookResponse { + status: number; + desc: string; + data: { + orderBook: DdexOrderbook; + }; +} + +export interface DdexOrderbook { + marketId: string; + bids: DdexOrder[]; + asks: DdexOrder[]; +} + +export interface DdexOrder { + price: string; + amount: string; + orderId: string; +} + +// tslint:disable:prefer-function-over-method +// ^ Keep consistency with other sources and help logical organization +export class DdexSource { + /** + * Call Ddex API to find out which markets they are maintaining orderbooks for. + */ + public async getActiveMarketsAsync(): Promise<DdexMarket[]> { + logUtils.log('Getting all active DDEX markets'); + const resp = await fetchAsync(ACTIVE_MARKETS_URL); + const respJson: DdexActiveMarketsResponse = await resp.json(); + const markets = respJson.data.markets; + logUtils.log(`Got ${markets.length} markets.`); + return markets; + } + + /** + * Retrieve orderbook from Ddex API for a given market. + * @param marketId String identifying the market we want data for. Eg. 'REP/AUG' + */ + public async getMarketOrderbookAsync(marketId: string): Promise<DdexOrderbook> { + logUtils.log(`${marketId}: Retrieving orderbook.`); + const marketOrderbookUrl = `${ACTIVE_MARKETS_URL}/${marketId}${ORDERBOOK_ENDPOINT}`; + const resp = await fetchAsync(marketOrderbookUrl); + const respJson: DdexOrderbookResponse = await resp.json(); + return respJson.data.orderBook; + } +} diff --git a/packages/pipeline/src/data_sources/ohlcv_external/crypto_compare.ts b/packages/pipeline/src/data_sources/ohlcv_external/crypto_compare.ts new file mode 100644 index 000000000..85042501b --- /dev/null +++ b/packages/pipeline/src/data_sources/ohlcv_external/crypto_compare.ts @@ -0,0 +1,110 @@ +// tslint:disable:no-duplicate-imports +import { fetchAsync } from '@0x/utils'; +import Bottleneck from 'bottleneck'; +import { stringify } from 'querystring'; +import * as R from 'ramda'; + +import { TradingPair } from '../../utils/get_ohlcv_trading_pairs'; + +export interface CryptoCompareOHLCVResponse { + Data: CryptoCompareOHLCVRecord[]; + Response: string; + Message: string; + Type: number; +} + +export interface CryptoCompareOHLCVRecord { + time: number; // in seconds, not milliseconds + close: number; + high: number; + low: number; + open: number; + volumefrom: number; + volumeto: number; +} + +export interface CryptoCompareOHLCVParams { + fsym: string; + tsym: string; + e?: string; + aggregate?: string; + aggregatePredictableTimePeriods?: boolean; + limit?: number; + toTs?: number; +} + +const ONE_HOUR = 60 * 60 * 1000; // tslint:disable-line:custom-no-magic-numbers +const ONE_SECOND = 1000; +const ONE_HOUR_AGO = new Date().getTime() - ONE_HOUR; +const HTTP_OK_STATUS = 200; +const CRYPTO_COMPARE_VALID_EMPTY_RESPONSE_TYPE = 96; +const MAX_PAGE_SIZE = 2000; + +export class CryptoCompareOHLCVSource { + public readonly intervalBetweenRecords = ONE_HOUR; + public readonly defaultExchange = 'CCCAGG'; + public readonly interval = this.intervalBetweenRecords * MAX_PAGE_SIZE; // the hourly API returns data for one interval at a time + private readonly _url: string = 'https://min-api.cryptocompare.com/data/histohour?'; + + // rate-limit for all API calls through this class instance + private readonly _limiter: Bottleneck; + constructor(maxReqsPerSecond: number) { + this._limiter = new Bottleneck({ + minTime: ONE_SECOND / maxReqsPerSecond, + reservoir: 30, + reservoirRefreshAmount: 30, + reservoirRefreshInterval: ONE_SECOND, + }); + } + + // gets OHLCV records starting from pair.latest + public async getHourlyOHLCVAsync(pair: TradingPair): Promise<CryptoCompareOHLCVRecord[]> { + const params = { + e: this.defaultExchange, + fsym: pair.fromSymbol, + tsym: pair.toSymbol, + limit: MAX_PAGE_SIZE, + toTs: Math.floor((pair.latestSavedTime + this.interval) / ONE_SECOND), // CryptoCompare uses timestamp in seconds. not ms + }; + const url = this._url + stringify(params); + const response = await this._limiter.schedule(() => fetchAsync(url)); + if (response.status !== HTTP_OK_STATUS) { + throw new Error(`HTTP error while scraping Crypto Compare: [${response}]`); + } + const json: CryptoCompareOHLCVResponse = await response.json(); + if ( + (json.Response === 'Error' || json.Data.length === 0) && + json.Type !== CRYPTO_COMPARE_VALID_EMPTY_RESPONSE_TYPE + ) { + throw new Error(JSON.stringify(json)); + } + return json.Data.filter(rec => { + return ( + // Crypto Compare takes ~30 mins to finalise records + rec.time * ONE_SECOND < ONE_HOUR_AGO && rec.time * ONE_SECOND > pair.latestSavedTime && hasData(rec) + ); + }); + } + public generateBackfillIntervals(pair: TradingPair): TradingPair[] { + const now = new Date().getTime(); + const f = (p: TradingPair): false | [TradingPair, TradingPair] => { + if (p.latestSavedTime > now) { + return false; + } else { + return [p, R.merge(p, { latestSavedTime: p.latestSavedTime + this.interval })]; + } + }; + return R.unfold(f, pair); + } +} + +function hasData(record: CryptoCompareOHLCVRecord): boolean { + return ( + record.close !== 0 || + record.open !== 0 || + record.high !== 0 || + record.low !== 0 || + record.volumefrom !== 0 || + record.volumeto !== 0 + ); +} diff --git a/packages/pipeline/src/data_sources/paradex/index.ts b/packages/pipeline/src/data_sources/paradex/index.ts new file mode 100644 index 000000000..46d448f4b --- /dev/null +++ b/packages/pipeline/src/data_sources/paradex/index.ts @@ -0,0 +1,92 @@ +import { fetchAsync, logUtils } from '@0x/utils'; + +const PARADEX_BASE_URL = 'https://api.paradex.io/consumer/v0'; +const ACTIVE_MARKETS_URL = `${PARADEX_BASE_URL}/markets`; +const ORDERBOOK_ENDPOINT = `${PARADEX_BASE_URL}/orderbook`; +const TOKEN_INFO_ENDPOINT = `${PARADEX_BASE_URL}/tokens`; +export const PARADEX_SOURCE = 'paradex'; + +export type ParadexActiveMarketsResponse = ParadexMarket[]; + +export interface ParadexMarket { + id: string; + symbol: string; + baseToken: string; + quoteToken: string; + minOrderSize: string; + maxOrderSize: string; + priceMaxDecimals: number; + amountMaxDecimals: number; + // These are not native to the Paradex API response. We tag them on later + // by calling the token endpoint and joining on symbol. + baseTokenAddress?: string; + quoteTokenAddress?: string; +} + +export interface ParadexOrderbookResponse { + marketId: number; + marketSymbol: string; + bids: ParadexOrder[]; + asks: ParadexOrder[]; +} + +export interface ParadexOrder { + amount: string; + price: string; +} + +export type ParadexTokenInfoResponse = ParadexTokenInfo[]; + +export interface ParadexTokenInfo { + name: string; + symbol: string; + address: string; +} + +export class ParadexSource { + private readonly _apiKey: string; + + constructor(apiKey: string) { + this._apiKey = apiKey; + } + + /** + * Call Paradex API to find out which markets they are maintaining orderbooks for. + */ + public async getActiveMarketsAsync(): Promise<ParadexActiveMarketsResponse> { + logUtils.log('Getting all active Paradex markets.'); + const resp = await fetchAsync(ACTIVE_MARKETS_URL, { + headers: { 'API-KEY': this._apiKey }, + }); + const markets: ParadexActiveMarketsResponse = await resp.json(); + logUtils.log(`Got ${markets.length} markets.`); + return markets; + } + + /** + * Call Paradex API to find out their token information. + */ + public async getTokenInfoAsync(): Promise<ParadexTokenInfoResponse> { + logUtils.log('Getting token information from Paradex.'); + const resp = await fetchAsync(TOKEN_INFO_ENDPOINT, { + headers: { 'API-KEY': this._apiKey }, + }); + const tokens: ParadexTokenInfoResponse = await resp.json(); + logUtils.log(`Got information for ${tokens.length} tokens.`); + return tokens; + } + + /** + * Retrieve orderbook from Paradex API for a given market. + * @param marketSymbol String representing the market we want data for. + */ + public async getMarketOrderbookAsync(marketSymbol: string): Promise<ParadexOrderbookResponse> { + logUtils.log(`${marketSymbol}: Retrieving orderbook.`); + const marketOrderbookUrl = `${ORDERBOOK_ENDPOINT}?market=${marketSymbol}`; + const resp = await fetchAsync(marketOrderbookUrl, { + headers: { 'API-KEY': this._apiKey }, + }); + const orderbookResponse: ParadexOrderbookResponse = await resp.json(); + return orderbookResponse; + } +} diff --git a/packages/pipeline/src/data_sources/relayer-registry/index.ts b/packages/pipeline/src/data_sources/relayer-registry/index.ts new file mode 100644 index 000000000..8133f5eae --- /dev/null +++ b/packages/pipeline/src/data_sources/relayer-registry/index.ts @@ -0,0 +1,33 @@ +import axios from 'axios'; + +export interface RelayerResponse { + name: string; + homepage_url: string; + app_url: string; + header_img: string; + logo_img: string; + networks: RelayerResponseNetwork[]; +} + +export interface RelayerResponseNetwork { + networkId: number; + sra_http_endpoint?: string; + sra_ws_endpoint?: string; + static_order_fields?: { + fee_recipient_addresses?: string[]; + taker_addresses?: string[]; + }; +} + +export class RelayerRegistrySource { + private readonly _url: string; + + constructor(url: string) { + this._url = url; + } + + public async getRelayerInfoAsync(): Promise<Map<string, RelayerResponse>> { + const resp = await axios.get<Map<string, RelayerResponse>>(this._url); + return resp.data; + } +} diff --git a/packages/pipeline/src/data_sources/trusted_tokens/index.ts b/packages/pipeline/src/data_sources/trusted_tokens/index.ts new file mode 100644 index 000000000..552739fb9 --- /dev/null +++ b/packages/pipeline/src/data_sources/trusted_tokens/index.ts @@ -0,0 +1,29 @@ +import axios from 'axios'; + +export interface ZeroExTrustedTokenMeta { + address: string; + name: string; + symbol: string; + decimals: number; +} + +export interface MetamaskTrustedTokenMeta { + address: string; + name: string; + erc20: boolean; + symbol: string; + decimals: number; +} + +export class TrustedTokenSource<T> { + private readonly _url: string; + + constructor(url: string) { + this._url = url; + } + + public async getTrustedTokenMetaAsync(): Promise<T> { + const resp = await axios.get<T>(this._url); + return resp.data; + } +} diff --git a/packages/pipeline/src/data_sources/web3/index.ts b/packages/pipeline/src/data_sources/web3/index.ts new file mode 100644 index 000000000..45a9ea161 --- /dev/null +++ b/packages/pipeline/src/data_sources/web3/index.ts @@ -0,0 +1,22 @@ +import { Web3ProviderEngine } from '@0x/subproviders'; +import { Web3Wrapper } from '@0x/web3-wrapper'; +import { BlockWithoutTransactionData, Transaction } from 'ethereum-types'; + +export class Web3Source { + private readonly _web3Wrapper: Web3Wrapper; + constructor(provider: Web3ProviderEngine) { + this._web3Wrapper = new Web3Wrapper(provider); + } + + public async getBlockInfoAsync(blockNumber: number): Promise<BlockWithoutTransactionData> { + const block = await this._web3Wrapper.getBlockIfExistsAsync(blockNumber); + if (block == null) { + return Promise.reject(new Error(`Could not find block for given block number: ${blockNumber}`)); + } + return block; + } + + public async getTransactionInfoAsync(txHash: string): Promise<Transaction> { + return this._web3Wrapper.getTransactionByHashAsync(txHash); + } +} diff --git a/packages/pipeline/src/entities/block.ts b/packages/pipeline/src/entities/block.ts new file mode 100644 index 000000000..398946622 --- /dev/null +++ b/packages/pipeline/src/entities/block.ts @@ -0,0 +1,13 @@ +import { Column, Entity, PrimaryColumn } from 'typeorm'; + +import { numberToBigIntTransformer } from '../utils'; + +@Entity({ name: 'blocks', schema: 'raw' }) +export class Block { + @PrimaryColumn() public hash!: string; + @PrimaryColumn({ transformer: numberToBigIntTransformer }) + public number!: number; + + @Column({ name: 'timestamp', transformer: numberToBigIntTransformer }) + public timestamp!: number; +} diff --git a/packages/pipeline/src/entities/dex_trade.ts b/packages/pipeline/src/entities/dex_trade.ts new file mode 100644 index 000000000..9d288cb51 --- /dev/null +++ b/packages/pipeline/src/entities/dex_trade.ts @@ -0,0 +1,54 @@ +import { BigNumber } from '@0x/utils'; +import { Column, Entity, PrimaryColumn } from 'typeorm'; + +import { bigNumberTransformer, numberToBigIntTransformer } from '../utils'; + +@Entity({ name: 'dex_trades', schema: 'raw' }) +export class DexTrade { + @PrimaryColumn({ name: 'source_url' }) + public sourceUrl!: string; + @PrimaryColumn({ name: 'tx_hash' }) + public txHash!: string; + + @Column({ name: 'tx_timestamp', type: 'bigint', transformer: numberToBigIntTransformer }) + public txTimestamp!: number; + @Column({ name: 'tx_date' }) + public txDate!: string; + @Column({ name: 'tx_sender' }) + public txSender!: string; + @Column({ name: 'smart_contract_id', type: 'bigint', transformer: numberToBigIntTransformer }) + public smartContractId!: number; + @Column({ name: 'smart_contract_address' }) + public smartContractAddress!: string; + @Column({ name: 'contract_type' }) + public contractType!: string; + @Column({ type: 'varchar' }) + public maker!: string; + @Column({ type: 'varchar' }) + public taker!: string; + @Column({ name: 'amount_buy', type: 'numeric', transformer: bigNumberTransformer }) + public amountBuy!: BigNumber; + @Column({ name: 'maker_fee_amount', type: 'numeric', transformer: bigNumberTransformer }) + public makerFeeAmount!: BigNumber; + @Column({ name: 'buy_currency_id', type: 'bigint', transformer: numberToBigIntTransformer }) + public buyCurrencyId!: number; + @Column({ name: 'buy_symbol' }) + public buySymbol!: string; + @Column({ name: 'amount_sell', type: 'numeric', transformer: bigNumberTransformer }) + public amountSell!: BigNumber; + @Column({ name: 'taker_fee_amount', type: 'numeric', transformer: bigNumberTransformer }) + public takerFeeAmount!: BigNumber; + @Column({ name: 'sell_currency_id', type: 'bigint', transformer: numberToBigIntTransformer }) + public sellCurrencyId!: number; + @Column({ name: 'sell_symbol' }) + public sellSymbol!: string; + @Column({ name: 'maker_annotation' }) + public makerAnnotation!: string; + @Column({ name: 'taker_annotation' }) + public takerAnnotation!: string; + @Column() public protocol!: string; + @Column({ name: 'buy_address', type: 'varchar', nullable: true }) + public buyAddress!: string | null; + @Column({ name: 'sell_address', type: 'varchar', nullable: true }) + public sellAddress!: string | null; +} diff --git a/packages/pipeline/src/entities/exchange_cancel_event.ts b/packages/pipeline/src/entities/exchange_cancel_event.ts new file mode 100644 index 000000000..38f99c903 --- /dev/null +++ b/packages/pipeline/src/entities/exchange_cancel_event.ts @@ -0,0 +1,51 @@ +import { Column, Entity, PrimaryColumn } from 'typeorm'; + +import { AssetType } from '../types'; +import { numberToBigIntTransformer } from '../utils'; + +@Entity({ name: 'exchange_cancel_events', schema: 'raw' }) +export class ExchangeCancelEvent { + @PrimaryColumn({ name: 'contract_address' }) + public contractAddress!: string; + @PrimaryColumn({ name: 'log_index' }) + public logIndex!: number; + @PrimaryColumn({ name: 'block_number', transformer: numberToBigIntTransformer }) + public blockNumber!: number; + + @Column({ name: 'raw_data' }) + public rawData!: string; + + @Column({ name: 'transaction_hash' }) + public transactionHash!: string; + @Column({ name: 'maker_address' }) + public makerAddress!: string; + @Column({ nullable: true, type: String, name: 'taker_address' }) + public takerAddress!: string; + @Column({ name: 'fee_recipient_address' }) + public feeRecipientAddress!: string; + @Column({ name: 'sender_address' }) + public senderAddress!: string; + @Column({ name: 'order_hash' }) + public orderHash!: string; + + @Column({ name: 'raw_maker_asset_data' }) + public rawMakerAssetData!: string; + @Column({ name: 'maker_asset_type' }) + public makerAssetType!: AssetType; + @Column({ name: 'maker_asset_proxy_id' }) + public makerAssetProxyId!: string; + @Column({ name: 'maker_token_address' }) + public makerTokenAddress!: string; + @Column({ nullable: true, type: String, name: 'maker_token_id' }) + public makerTokenId!: string | null; + @Column({ name: 'raw_taker_asset_data' }) + public rawTakerAssetData!: string; + @Column({ name: 'taker_asset_type' }) + public takerAssetType!: AssetType; + @Column({ name: 'taker_asset_proxy_id' }) + public takerAssetProxyId!: string; + @Column({ name: 'taker_token_address' }) + public takerTokenAddress!: string; + @Column({ nullable: true, type: String, name: 'taker_token_id' }) + public takerTokenId!: string | null; +} diff --git a/packages/pipeline/src/entities/exchange_cancel_up_to_event.ts b/packages/pipeline/src/entities/exchange_cancel_up_to_event.ts new file mode 100644 index 000000000..27580305e --- /dev/null +++ b/packages/pipeline/src/entities/exchange_cancel_up_to_event.ts @@ -0,0 +1,26 @@ +import { BigNumber } from '@0x/utils'; +import { Column, Entity, PrimaryColumn } from 'typeorm'; + +import { bigNumberTransformer, numberToBigIntTransformer } from '../utils'; + +@Entity({ name: 'exchange_cancel_up_to_events', schema: 'raw' }) +export class ExchangeCancelUpToEvent { + @PrimaryColumn({ name: 'contract_address' }) + public contractAddress!: string; + @PrimaryColumn({ name: 'log_index' }) + public logIndex!: number; + @PrimaryColumn({ name: 'block_number', transformer: numberToBigIntTransformer }) + public blockNumber!: number; + + @Column({ name: 'raw_data' }) + public rawData!: string; + + @Column({ name: 'transaction_hash' }) + public transactionHash!: string; + @Column({ name: 'maker_address' }) + public makerAddress!: string; + @Column({ name: 'sender_address' }) + public senderAddress!: string; + @Column({ name: 'order_epoch', type: 'numeric', transformer: bigNumberTransformer }) + public orderEpoch!: BigNumber; +} diff --git a/packages/pipeline/src/entities/exchange_fill_event.ts b/packages/pipeline/src/entities/exchange_fill_event.ts new file mode 100644 index 000000000..9b7727615 --- /dev/null +++ b/packages/pipeline/src/entities/exchange_fill_event.ts @@ -0,0 +1,60 @@ +import { BigNumber } from '@0x/utils'; +import { Column, Entity, PrimaryColumn } from 'typeorm'; + +import { AssetType } from '../types'; +import { bigNumberTransformer, numberToBigIntTransformer } from '../utils'; + +@Entity({ name: 'exchange_fill_events', schema: 'raw' }) +export class ExchangeFillEvent { + @PrimaryColumn({ name: 'contract_address' }) + public contractAddress!: string; + @PrimaryColumn({ name: 'log_index' }) + public logIndex!: number; + @PrimaryColumn({ name: 'block_number', transformer: numberToBigIntTransformer }) + public blockNumber!: number; + + @Column({ name: 'raw_data' }) + public rawData!: string; + + @Column({ name: 'transaction_hash' }) + public transactionHash!: string; + @Column({ name: 'maker_address' }) + public makerAddress!: string; + @Column({ name: 'taker_address' }) + public takerAddress!: string; + @Column({ name: 'fee_recipient_address' }) + public feeRecipientAddress!: string; + @Column({ name: 'sender_address' }) + public senderAddress!: string; + @Column({ name: 'maker_asset_filled_amount', type: 'numeric', transformer: bigNumberTransformer }) + public makerAssetFilledAmount!: BigNumber; + @Column({ name: 'taker_asset_filled_amount', type: 'numeric', transformer: bigNumberTransformer }) + public takerAssetFilledAmount!: BigNumber; + @Column({ name: 'maker_fee_paid', type: 'numeric', transformer: bigNumberTransformer }) + public makerFeePaid!: BigNumber; + @Column({ name: 'taker_fee_paid', type: 'numeric', transformer: bigNumberTransformer }) + public takerFeePaid!: BigNumber; + @Column({ name: 'order_hash' }) + public orderHash!: string; + + @Column({ name: 'raw_maker_asset_data' }) + public rawMakerAssetData!: string; + @Column({ name: 'maker_asset_type' }) + public makerAssetType!: AssetType; + @Column({ name: 'maker_asset_proxy_id' }) + public makerAssetProxyId!: string; + @Column({ name: 'maker_token_address' }) + public makerTokenAddress!: string; + @Column({ nullable: true, type: String, name: 'maker_token_id' }) + public makerTokenId!: string | null; + @Column({ name: 'raw_taker_asset_data' }) + public rawTakerAssetData!: string; + @Column({ name: 'taker_asset_type' }) + public takerAssetType!: AssetType; + @Column({ name: 'taker_asset_proxy_id' }) + public takerAssetProxyId!: string; + @Column({ name: 'taker_token_address' }) + public takerTokenAddress!: string; + @Column({ nullable: true, type: String, name: 'taker_token_id' }) + public takerTokenId!: string | null; +} diff --git a/packages/pipeline/src/entities/index.ts b/packages/pipeline/src/entities/index.ts new file mode 100644 index 000000000..db0814e38 --- /dev/null +++ b/packages/pipeline/src/entities/index.ts @@ -0,0 +1,18 @@ +import { ExchangeCancelEvent } from './exchange_cancel_event'; +import { ExchangeCancelUpToEvent } from './exchange_cancel_up_to_event'; +import { ExchangeFillEvent } from './exchange_fill_event'; + +export { Block } from './block'; +export { DexTrade } from './dex_trade'; +export { ExchangeCancelEvent } from './exchange_cancel_event'; +export { ExchangeCancelUpToEvent } from './exchange_cancel_up_to_event'; +export { ExchangeFillEvent } from './exchange_fill_event'; +export { OHLCVExternal } from './ohlcv_external'; +export { Relayer } from './relayer'; +export { SraOrder } from './sra_order'; +export { SraOrdersObservedTimeStamp, createObservedTimestampForOrder } from './sra_order_observed_timestamp'; +export { TokenMetadata } from './token_metadata'; +export { TokenOrderbookSnapshot } from './token_order'; +export { Transaction } from './transaction'; + +export type ExchangeEvent = ExchangeFillEvent | ExchangeCancelEvent | ExchangeCancelUpToEvent; diff --git a/packages/pipeline/src/entities/ohlcv_external.ts b/packages/pipeline/src/entities/ohlcv_external.ts new file mode 100644 index 000000000..4f55dd930 --- /dev/null +++ b/packages/pipeline/src/entities/ohlcv_external.ts @@ -0,0 +1,30 @@ +import { Column, Entity, PrimaryColumn } from 'typeorm'; + +import { numberToBigIntTransformer } from '../utils'; + +@Entity({ name: 'ohlcv_external', schema: 'raw' }) +export class OHLCVExternal { + @PrimaryColumn() public exchange!: string; + + @PrimaryColumn({ name: 'from_symbol', type: 'varchar' }) + public fromSymbol!: string; + @PrimaryColumn({ name: 'to_symbol', type: 'varchar' }) + public toSymbol!: string; + @PrimaryColumn({ name: 'start_time', transformer: numberToBigIntTransformer }) + public startTime!: number; + @PrimaryColumn({ name: 'end_time', transformer: numberToBigIntTransformer }) + public endTime!: number; + + @Column() public open!: number; + @Column() public close!: number; + @Column() public low!: number; + @Column() public high!: number; + @Column({ name: 'volume_from' }) + public volumeFrom!: number; + @Column({ name: 'volume_to' }) + public volumeTo!: number; + + @PrimaryColumn() public source!: string; + @PrimaryColumn({ name: 'observed_timestamp', transformer: numberToBigIntTransformer }) + public observedTimestamp!: number; +} diff --git a/packages/pipeline/src/entities/relayer.ts b/packages/pipeline/src/entities/relayer.ts new file mode 100644 index 000000000..5af8578b4 --- /dev/null +++ b/packages/pipeline/src/entities/relayer.ts @@ -0,0 +1,21 @@ +import { Column, Entity, PrimaryColumn } from 'typeorm'; + +@Entity({ name: 'relayers', schema: 'raw' }) +export class Relayer { + @PrimaryColumn() public uuid!: string; + + @Column() public name!: string; + @Column({ name: 'homepage_url', type: 'varchar' }) + public homepageUrl!: string; + @Column({ name: 'sra_http_endpoint', type: 'varchar', nullable: true }) + public sraHttpEndpoint!: string | null; + @Column({ name: 'sra_ws_endpoint', type: 'varchar', nullable: true }) + public sraWsEndpoint!: string | null; + @Column({ name: 'app_url', type: 'varchar', nullable: true }) + public appUrl!: string | null; + + @Column({ name: 'fee_recipient_addresses', type: 'varchar', array: true }) + public feeRecipientAddresses!: string[]; + @Column({ name: 'taker_addresses', type: 'varchar', array: true }) + public takerAddresses!: string[]; +} diff --git a/packages/pipeline/src/entities/sra_order.ts b/packages/pipeline/src/entities/sra_order.ts new file mode 100644 index 000000000..9c730a0bb --- /dev/null +++ b/packages/pipeline/src/entities/sra_order.ts @@ -0,0 +1,63 @@ +import { BigNumber } from '@0x/utils'; +import { Column, Entity, PrimaryColumn } from 'typeorm'; + +import { AssetType } from '../types'; +import { bigNumberTransformer } from '../utils'; + +@Entity({ name: 'sra_orders', schema: 'raw' }) +export class SraOrder { + @PrimaryColumn({ name: 'exchange_address' }) + public exchangeAddress!: string; + @PrimaryColumn({ name: 'order_hash_hex' }) + public orderHashHex!: string; + @PrimaryColumn({ name: 'source_url' }) + public sourceUrl!: string; + + @Column({ name: 'maker_address' }) + public makerAddress!: string; + @Column({ name: 'taker_address' }) + public takerAddress!: string; + @Column({ name: 'fee_recipient_address' }) + public feeRecipientAddress!: string; + @Column({ name: 'sender_address' }) + public senderAddress!: string; + @Column({ name: 'maker_asset_amount', type: 'numeric', transformer: bigNumberTransformer }) + public makerAssetAmount!: BigNumber; + @Column({ name: 'taker_asset_amount', type: 'numeric', transformer: bigNumberTransformer }) + public takerAssetAmount!: BigNumber; + @Column({ name: 'maker_fee', type: 'numeric', transformer: bigNumberTransformer }) + public makerFee!: BigNumber; + @Column({ name: 'taker_fee', type: 'numeric', transformer: bigNumberTransformer }) + public takerFee!: BigNumber; + @Column({ name: 'expiration_time_seconds', type: 'numeric', transformer: bigNumberTransformer }) + public expirationTimeSeconds!: BigNumber; + @Column({ name: 'salt', type: 'numeric', transformer: bigNumberTransformer }) + public salt!: BigNumber; + @Column({ name: 'signature' }) + public signature!: string; + + @Column({ name: 'raw_maker_asset_data' }) + public rawMakerAssetData!: string; + @Column({ name: 'maker_asset_type' }) + public makerAssetType!: AssetType; + @Column({ name: 'maker_asset_proxy_id' }) + public makerAssetProxyId!: string; + @Column({ name: 'maker_token_address' }) + public makerTokenAddress!: string; + @Column({ nullable: true, type: String, name: 'maker_token_id' }) + public makerTokenId!: string | null; + @Column({ name: 'raw_taker_asset_data' }) + public rawTakerAssetData!: string; + @Column({ name: 'taker_asset_type' }) + public takerAssetType!: AssetType; + @Column({ name: 'taker_asset_proxy_id' }) + public takerAssetProxyId!: string; + @Column({ name: 'taker_token_address' }) + public takerTokenAddress!: string; + @Column({ nullable: true, type: String, name: 'taker_token_id' }) + public takerTokenId!: string | null; + + // TODO(albrow): Make this optional? + @Column({ name: 'metadata_json' }) + public metadataJson!: string; +} diff --git a/packages/pipeline/src/entities/sra_order_observed_timestamp.ts b/packages/pipeline/src/entities/sra_order_observed_timestamp.ts new file mode 100644 index 000000000..cbec1c6d0 --- /dev/null +++ b/packages/pipeline/src/entities/sra_order_observed_timestamp.ts @@ -0,0 +1,35 @@ +import { Entity, PrimaryColumn } from 'typeorm'; + +import { numberToBigIntTransformer } from '../utils'; + +import { SraOrder } from './sra_order'; + +@Entity({ name: 'sra_orders_observed_timestamps', schema: 'raw' }) +export class SraOrdersObservedTimeStamp { + @PrimaryColumn({ name: 'exchange_address' }) + public exchangeAddress!: string; + @PrimaryColumn({ name: 'order_hash_hex' }) + public orderHashHex!: string; + @PrimaryColumn({ name: 'source_url' }) + public sourceUrl!: string; + + @PrimaryColumn({ name: 'observed_timestamp', transformer: numberToBigIntTransformer }) + public observedTimestamp!: number; +} + +/** + * Returns a new SraOrdersObservedTimeStamp for the given order based on the + * current time. + * @param order The order to generate a timestamp for. + */ +export function createObservedTimestampForOrder( + order: SraOrder, + observedTimestamp: number, +): SraOrdersObservedTimeStamp { + const observed = new SraOrdersObservedTimeStamp(); + observed.exchangeAddress = order.exchangeAddress; + observed.orderHashHex = order.orderHashHex; + observed.sourceUrl = order.sourceUrl; + observed.observedTimestamp = observedTimestamp; + return observed; +} diff --git a/packages/pipeline/src/entities/token_metadata.ts b/packages/pipeline/src/entities/token_metadata.ts new file mode 100644 index 000000000..911b53972 --- /dev/null +++ b/packages/pipeline/src/entities/token_metadata.ts @@ -0,0 +1,22 @@ +import { BigNumber } from '@0x/utils'; +import { Column, Entity, PrimaryColumn } from 'typeorm'; + +import { bigNumberTransformer } from '../utils/transformers'; + +@Entity({ name: 'token_metadata', schema: 'raw' }) +export class TokenMetadata { + @PrimaryColumn({ type: 'varchar', nullable: false }) + public address!: string; + + @PrimaryColumn({ type: 'varchar', nullable: false }) + public authority!: string; + + @Column({ type: 'numeric', transformer: bigNumberTransformer, nullable: true }) + public decimals!: BigNumber | null; + + @Column({ type: 'varchar', nullable: true }) + public symbol!: string | null; + + @Column({ type: 'varchar', nullable: true }) + public name!: string | null; +} diff --git a/packages/pipeline/src/entities/token_order.ts b/packages/pipeline/src/entities/token_order.ts new file mode 100644 index 000000000..557705767 --- /dev/null +++ b/packages/pipeline/src/entities/token_order.ts @@ -0,0 +1,29 @@ +import { BigNumber } from '@0x/utils'; +import { Column, Entity, PrimaryColumn } from 'typeorm'; + +import { OrderType } from '../types'; +import { bigNumberTransformer, numberToBigIntTransformer } from '../utils'; + +@Entity({ name: 'token_orderbook_snapshots', schema: 'raw' }) +export class TokenOrderbookSnapshot { + @PrimaryColumn({ name: 'observed_timestamp', type: 'bigint', transformer: numberToBigIntTransformer }) + public observedTimestamp!: number; + @PrimaryColumn({ name: 'source' }) + public source!: string; + @Column({ name: 'order_type' }) + public orderType!: OrderType; + @PrimaryColumn({ name: 'price', type: 'numeric', transformer: bigNumberTransformer }) + public price!: BigNumber; + @PrimaryColumn({ name: 'base_asset_symbol' }) + public baseAssetSymbol!: string; + @Column({ name: 'base_asset_address' }) + public baseAssetAddress!: string; + @Column({ name: 'base_volume', type: 'numeric', transformer: bigNumberTransformer }) + public baseVolume!: BigNumber; + @PrimaryColumn({ name: 'quote_asset_symbol' }) + public quoteAssetSymbol!: string; + @Column({ name: 'quote_asset_address' }) + public quoteAssetAddress!: string; + @Column({ name: 'quote_volume', type: 'numeric', transformer: bigNumberTransformer }) + public quoteVolume!: BigNumber; +} diff --git a/packages/pipeline/src/entities/transaction.ts b/packages/pipeline/src/entities/transaction.ts new file mode 100644 index 000000000..742050177 --- /dev/null +++ b/packages/pipeline/src/entities/transaction.ts @@ -0,0 +1,19 @@ +import { BigNumber } from '@0x/utils'; +import { Column, Entity, PrimaryColumn } from 'typeorm'; + +import { bigNumberTransformer, numberToBigIntTransformer } from '../utils'; + +@Entity({ name: 'transactions', schema: 'raw' }) +export class Transaction { + @PrimaryColumn({ name: 'transaction_hash' }) + public transactionHash!: string; + @PrimaryColumn({ name: 'block_hash' }) + public blockHash!: string; + @PrimaryColumn({ name: 'block_number', transformer: numberToBigIntTransformer }) + public blockNumber!: number; + + @Column({ type: 'numeric', name: 'gas_used', transformer: bigNumberTransformer }) + public gasUsed!: BigNumber; + @Column({ type: 'numeric', name: 'gas_price', transformer: bigNumberTransformer }) + public gasPrice!: BigNumber; +} diff --git a/packages/pipeline/src/ormconfig.ts b/packages/pipeline/src/ormconfig.ts new file mode 100644 index 000000000..9f7815b4e --- /dev/null +++ b/packages/pipeline/src/ormconfig.ts @@ -0,0 +1,42 @@ +import { ConnectionOptions } from 'typeorm'; + +import { + Block, + DexTrade, + ExchangeCancelEvent, + ExchangeCancelUpToEvent, + ExchangeFillEvent, + OHLCVExternal, + Relayer, + SraOrder, + SraOrdersObservedTimeStamp, + TokenMetadata, + TokenOrderbookSnapshot, + Transaction, +} from './entities'; + +const entities = [ + Block, + DexTrade, + ExchangeCancelEvent, + ExchangeCancelUpToEvent, + ExchangeFillEvent, + OHLCVExternal, + Relayer, + SraOrder, + SraOrdersObservedTimeStamp, + TokenMetadata, + TokenOrderbookSnapshot, + Transaction, +]; + +const config: ConnectionOptions = { + type: 'postgres', + url: process.env.ZEROEX_DATA_PIPELINE_DB_URL, + synchronize: false, + logging: ['error'], + entities, + migrations: ['./lib/migrations/**/*.js'], +}; + +module.exports = config; diff --git a/packages/pipeline/src/parsers/bloxy/index.ts b/packages/pipeline/src/parsers/bloxy/index.ts new file mode 100644 index 000000000..caa55d289 --- /dev/null +++ b/packages/pipeline/src/parsers/bloxy/index.ts @@ -0,0 +1,53 @@ +import { BigNumber } from '@0x/utils'; +import * as R from 'ramda'; + +import { BLOXY_DEX_TRADES_URL, BloxyTrade } from '../../data_sources/bloxy'; +import { DexTrade } from '../../entities'; + +/** + * Parses a raw trades response from the Bloxy Dex API and returns an array of + * DexTrade entities. + * @param rawTrades A raw order response from an SRA endpoint. + */ +export function parseBloxyTrades(rawTrades: BloxyTrade[]): DexTrade[] { + return R.map(_parseBloxyTrade, rawTrades); +} + +/** + * Converts a single Bloxy trade into a DexTrade entity. + * @param rawTrade A single trade from the response from the Bloxy API. + */ +export function _parseBloxyTrade(rawTrade: BloxyTrade): DexTrade { + const dexTrade = new DexTrade(); + dexTrade.sourceUrl = BLOXY_DEX_TRADES_URL; + dexTrade.txHash = rawTrade.tx_hash; + dexTrade.txTimestamp = new Date(rawTrade.tx_time).getTime(); + dexTrade.txDate = rawTrade.tx_date; + dexTrade.txSender = rawTrade.tx_sender; + dexTrade.smartContractId = rawTrade.smart_contract_id; + dexTrade.smartContractAddress = rawTrade.smart_contract_address; + dexTrade.contractType = rawTrade.contract_type; + dexTrade.maker = rawTrade.maker; + dexTrade.taker = rawTrade.taker; + // TODO(albrow): The Bloxy API returns amounts and fees as a `number` type + // but some of their values have too many significant digits to be + // represented that way. Ideally they will switch to using strings and then + // we can update this code. + dexTrade.amountBuy = new BigNumber(rawTrade.amountBuy.toString()); + dexTrade.makerFeeAmount = new BigNumber(rawTrade.makerFee.toString()); + dexTrade.buyCurrencyId = rawTrade.buyCurrencyId; + dexTrade.buySymbol = filterNullCharacters(rawTrade.buySymbol); + dexTrade.amountSell = new BigNumber(rawTrade.amountSell.toString()); + dexTrade.takerFeeAmount = new BigNumber(rawTrade.takerFee.toString()); + dexTrade.sellCurrencyId = rawTrade.sellCurrencyId; + dexTrade.sellSymbol = filterNullCharacters(rawTrade.sellSymbol); + dexTrade.makerAnnotation = rawTrade.maker_annotation; + dexTrade.takerAnnotation = rawTrade.taker_annotation; + dexTrade.protocol = rawTrade.protocol; + dexTrade.buyAddress = rawTrade.buyAddress; + dexTrade.sellAddress = rawTrade.sellAddress; + return dexTrade; +} + +// Works with any form of escaped null character (e.g., '\0' and '\u0000'). +const filterNullCharacters = R.replace(/\0/g, ''); diff --git a/packages/pipeline/src/parsers/ddex_orders/index.ts b/packages/pipeline/src/parsers/ddex_orders/index.ts new file mode 100644 index 000000000..81132e8f0 --- /dev/null +++ b/packages/pipeline/src/parsers/ddex_orders/index.ts @@ -0,0 +1,77 @@ +import { BigNumber } from '@0x/utils'; +import * as R from 'ramda'; + +import { DdexMarket, DdexOrder, DdexOrderbook } from '../../data_sources/ddex'; +import { TokenOrderbookSnapshot as TokenOrder } from '../../entities'; +import { OrderType } from '../../types'; + +/** + * Marque function of this file. + * 1) Takes in orders from an orderbook, + * other information attached. + * @param ddexOrderbook A raw orderbook that we pull from the Ddex API. + * @param ddexMarket An object containing market data also directly from the API. + * @param observedTimestamp Time at which the orders for the market were pulled. + * @param source The exchange where these orders are placed. In this case 'ddex'. + */ +export function parseDdexOrders( + ddexOrderbook: DdexOrderbook, + ddexMarket: DdexMarket, + observedTimestamp: number, + source: string, +): TokenOrder[] { + const aggregatedBids = aggregateOrders(ddexOrderbook.bids); + const aggregatedAsks = aggregateOrders(ddexOrderbook.asks); + const parsedBids = aggregatedBids.map(order => parseDdexOrder(ddexMarket, observedTimestamp, 'bid', source, order)); + const parsedAsks = aggregatedAsks.map(order => parseDdexOrder(ddexMarket, observedTimestamp, 'ask', source, order)); + return parsedBids.concat(parsedAsks); +} + +/** + * Aggregates orders by price point for consistency with other exchanges. + * Querying the Ddex API at level 3 setting returns a breakdown of + * individual orders at each price point. Other exchanges only give total amount + * at each price point. Returns an array of <price, amount> tuples. + * @param ddexOrders A list of Ddex orders awaiting aggregation. + */ +export function aggregateOrders(ddexOrders: DdexOrder[]): Array<[string, BigNumber]> { + const sumAmount = (acc: BigNumber, order: DdexOrder): BigNumber => acc.plus(order.amount); + const aggregatedPricePoints = R.reduceBy(sumAmount, new BigNumber(0), R.prop('price'), ddexOrders); + return Object.entries(aggregatedPricePoints); +} + +/** + * Parse a single aggregated Ddex order in order to form a tokenOrder entity + * which can be saved into the database. + * @param ddexMarket An object containing information about the market where these + * trades have been placed. + * @param observedTimestamp The time when the API response returned back to us. + * @param orderType 'bid' or 'ask' enum. + * @param source Exchange where these orders were placed. + * @param ddexOrder A <price, amount> tuple which we will convert to volume-basis. + */ +export function parseDdexOrder( + ddexMarket: DdexMarket, + observedTimestamp: number, + orderType: OrderType, + source: string, + ddexOrder: [string, BigNumber], +): TokenOrder { + const tokenOrder = new TokenOrder(); + const price = new BigNumber(ddexOrder[0]); + const amount = ddexOrder[1]; + + tokenOrder.source = source; + tokenOrder.observedTimestamp = observedTimestamp; + tokenOrder.orderType = orderType; + tokenOrder.price = price; + + tokenOrder.baseAssetSymbol = ddexMarket.baseToken; + tokenOrder.baseAssetAddress = ddexMarket.baseTokenAddress; + tokenOrder.baseVolume = price.times(amount); + + tokenOrder.quoteAssetSymbol = ddexMarket.quoteToken; + tokenOrder.quoteAssetAddress = ddexMarket.quoteTokenAddress; + tokenOrder.quoteVolume = amount; + return tokenOrder; +} diff --git a/packages/pipeline/src/parsers/events/index.ts b/packages/pipeline/src/parsers/events/index.ts new file mode 100644 index 000000000..e18106c75 --- /dev/null +++ b/packages/pipeline/src/parsers/events/index.ts @@ -0,0 +1,133 @@ +import { ExchangeCancelEventArgs, ExchangeCancelUpToEventArgs, ExchangeFillEventArgs } from '@0x/contract-wrappers'; +import { assetDataUtils } from '@0x/order-utils'; +import { AssetProxyId, ERC721AssetData } from '@0x/types'; +import { LogWithDecodedArgs } from 'ethereum-types'; +import * as R from 'ramda'; + +import { ExchangeCancelEvent, ExchangeCancelUpToEvent, ExchangeFillEvent } from '../../entities'; +import { bigNumbertoStringOrNull } from '../../utils'; + +/** + * Parses raw event logs for a fill event and returns an array of + * ExchangeFillEvent entities. + * @param eventLogs Raw event logs (e.g. returned from contract-wrappers). + */ +export const parseExchangeFillEvents: ( + eventLogs: Array<LogWithDecodedArgs<ExchangeFillEventArgs>>, +) => ExchangeFillEvent[] = R.map(_convertToExchangeFillEvent); + +/** + * Parses raw event logs for a cancel event and returns an array of + * ExchangeCancelEvent entities. + * @param eventLogs Raw event logs (e.g. returned from contract-wrappers). + */ +export const parseExchangeCancelEvents: ( + eventLogs: Array<LogWithDecodedArgs<ExchangeCancelEventArgs>>, +) => ExchangeCancelEvent[] = R.map(_convertToExchangeCancelEvent); + +/** + * Parses raw event logs for a CancelUpTo event and returns an array of + * ExchangeCancelUpToEvent entities. + * @param eventLogs Raw event logs (e.g. returned from contract-wrappers). + */ +export const parseExchangeCancelUpToEvents: ( + eventLogs: Array<LogWithDecodedArgs<ExchangeCancelUpToEventArgs>>, +) => ExchangeCancelUpToEvent[] = R.map(_convertToExchangeCancelUpToEvent); + +/** + * Converts a raw event log for a fill event into an ExchangeFillEvent entity. + * @param eventLog Raw event log (e.g. returned from contract-wrappers). + */ +export function _convertToExchangeFillEvent(eventLog: LogWithDecodedArgs<ExchangeFillEventArgs>): ExchangeFillEvent { + const makerAssetData = assetDataUtils.decodeAssetDataOrThrow(eventLog.args.makerAssetData); + const makerAssetType = makerAssetData.assetProxyId === AssetProxyId.ERC20 ? 'erc20' : 'erc721'; + const takerAssetData = assetDataUtils.decodeAssetDataOrThrow(eventLog.args.takerAssetData); + const takerAssetType = takerAssetData.assetProxyId === AssetProxyId.ERC20 ? 'erc20' : 'erc721'; + const exchangeFillEvent = new ExchangeFillEvent(); + exchangeFillEvent.contractAddress = eventLog.address as string; + exchangeFillEvent.blockNumber = eventLog.blockNumber as number; + exchangeFillEvent.logIndex = eventLog.logIndex as number; + exchangeFillEvent.rawData = eventLog.data as string; + exchangeFillEvent.transactionHash = eventLog.transactionHash; + exchangeFillEvent.makerAddress = eventLog.args.makerAddress; + exchangeFillEvent.takerAddress = eventLog.args.takerAddress; + exchangeFillEvent.feeRecipientAddress = eventLog.args.feeRecipientAddress; + exchangeFillEvent.senderAddress = eventLog.args.senderAddress; + exchangeFillEvent.makerAssetFilledAmount = eventLog.args.makerAssetFilledAmount; + exchangeFillEvent.takerAssetFilledAmount = eventLog.args.takerAssetFilledAmount; + exchangeFillEvent.makerFeePaid = eventLog.args.makerFeePaid; + exchangeFillEvent.takerFeePaid = eventLog.args.takerFeePaid; + exchangeFillEvent.orderHash = eventLog.args.orderHash; + exchangeFillEvent.rawMakerAssetData = eventLog.args.makerAssetData; + exchangeFillEvent.makerAssetType = makerAssetType; + exchangeFillEvent.makerAssetProxyId = makerAssetData.assetProxyId; + exchangeFillEvent.makerTokenAddress = makerAssetData.tokenAddress; + // tslint has a false positive here. Type assertion is required. + // tslint:disable-next-line:no-unnecessary-type-assertion + exchangeFillEvent.makerTokenId = bigNumbertoStringOrNull((makerAssetData as ERC721AssetData).tokenId); + exchangeFillEvent.rawTakerAssetData = eventLog.args.takerAssetData; + exchangeFillEvent.takerAssetType = takerAssetType; + exchangeFillEvent.takerAssetProxyId = takerAssetData.assetProxyId; + exchangeFillEvent.takerTokenAddress = takerAssetData.tokenAddress; + // tslint:disable-next-line:no-unnecessary-type-assertion + exchangeFillEvent.takerTokenId = bigNumbertoStringOrNull((takerAssetData as ERC721AssetData).tokenId); + return exchangeFillEvent; +} + +/** + * Converts a raw event log for a cancel event into an ExchangeCancelEvent + * entity. + * @param eventLog Raw event log (e.g. returned from contract-wrappers). + */ +export function _convertToExchangeCancelEvent( + eventLog: LogWithDecodedArgs<ExchangeCancelEventArgs>, +): ExchangeCancelEvent { + const makerAssetData = assetDataUtils.decodeAssetDataOrThrow(eventLog.args.makerAssetData); + const makerAssetType = makerAssetData.assetProxyId === AssetProxyId.ERC20 ? 'erc20' : 'erc721'; + const takerAssetData = assetDataUtils.decodeAssetDataOrThrow(eventLog.args.takerAssetData); + const takerAssetType = takerAssetData.assetProxyId === AssetProxyId.ERC20 ? 'erc20' : 'erc721'; + const exchangeCancelEvent = new ExchangeCancelEvent(); + exchangeCancelEvent.contractAddress = eventLog.address as string; + exchangeCancelEvent.blockNumber = eventLog.blockNumber as number; + exchangeCancelEvent.logIndex = eventLog.logIndex as number; + exchangeCancelEvent.rawData = eventLog.data as string; + exchangeCancelEvent.transactionHash = eventLog.transactionHash; + exchangeCancelEvent.makerAddress = eventLog.args.makerAddress; + exchangeCancelEvent.takerAddress = eventLog.args.takerAddress; + exchangeCancelEvent.feeRecipientAddress = eventLog.args.feeRecipientAddress; + exchangeCancelEvent.senderAddress = eventLog.args.senderAddress; + exchangeCancelEvent.orderHash = eventLog.args.orderHash; + exchangeCancelEvent.rawMakerAssetData = eventLog.args.makerAssetData; + exchangeCancelEvent.makerAssetType = makerAssetType; + exchangeCancelEvent.makerAssetProxyId = makerAssetData.assetProxyId; + exchangeCancelEvent.makerTokenAddress = makerAssetData.tokenAddress; + // tslint:disable-next-line:no-unnecessary-type-assertion + exchangeCancelEvent.makerTokenId = bigNumbertoStringOrNull((makerAssetData as ERC721AssetData).tokenId); + exchangeCancelEvent.rawTakerAssetData = eventLog.args.takerAssetData; + exchangeCancelEvent.takerAssetType = takerAssetType; + exchangeCancelEvent.takerAssetProxyId = takerAssetData.assetProxyId; + exchangeCancelEvent.takerTokenAddress = takerAssetData.tokenAddress; + // tslint:disable-next-line:no-unnecessary-type-assertion + exchangeCancelEvent.takerTokenId = bigNumbertoStringOrNull((takerAssetData as ERC721AssetData).tokenId); + return exchangeCancelEvent; +} + +/** + * Converts a raw event log for a cancelUpTo event into an + * ExchangeCancelUpToEvent entity. + * @param eventLog Raw event log (e.g. returned from contract-wrappers). + */ +export function _convertToExchangeCancelUpToEvent( + eventLog: LogWithDecodedArgs<ExchangeCancelUpToEventArgs>, +): ExchangeCancelUpToEvent { + const exchangeCancelUpToEvent = new ExchangeCancelUpToEvent(); + exchangeCancelUpToEvent.contractAddress = eventLog.address as string; + exchangeCancelUpToEvent.blockNumber = eventLog.blockNumber as number; + exchangeCancelUpToEvent.logIndex = eventLog.logIndex as number; + exchangeCancelUpToEvent.rawData = eventLog.data as string; + exchangeCancelUpToEvent.transactionHash = eventLog.transactionHash; + exchangeCancelUpToEvent.makerAddress = eventLog.args.makerAddress; + exchangeCancelUpToEvent.senderAddress = eventLog.args.senderAddress; + exchangeCancelUpToEvent.orderEpoch = eventLog.args.orderEpoch; + return exchangeCancelUpToEvent; +} diff --git a/packages/pipeline/src/parsers/ohlcv_external/crypto_compare.ts b/packages/pipeline/src/parsers/ohlcv_external/crypto_compare.ts new file mode 100644 index 000000000..3efb90384 --- /dev/null +++ b/packages/pipeline/src/parsers/ohlcv_external/crypto_compare.ts @@ -0,0 +1,38 @@ +import { CryptoCompareOHLCVRecord } from '../../data_sources/ohlcv_external/crypto_compare'; +import { OHLCVExternal } from '../../entities'; + +const ONE_SECOND = 1000; // Crypto Compare uses timestamps in seconds instead of milliseconds + +export interface OHLCVMetadata { + exchange: string; + fromSymbol: string; + toSymbol: string; + source: string; + observedTimestamp: number; + interval: number; +} +/** + * Parses OHLCV records from Crypto Compare into an array of OHLCVExternal entities + * @param rawRecords an array of OHLCV records from Crypto Compare (not the full response) + */ +export function parseRecords(rawRecords: CryptoCompareOHLCVRecord[], metadata: OHLCVMetadata): OHLCVExternal[] { + return rawRecords.map(rec => { + const ohlcvRecord = new OHLCVExternal(); + ohlcvRecord.exchange = metadata.exchange; + ohlcvRecord.fromSymbol = metadata.fromSymbol; + ohlcvRecord.toSymbol = metadata.toSymbol; + ohlcvRecord.startTime = rec.time * ONE_SECOND - metadata.interval; + ohlcvRecord.endTime = rec.time * ONE_SECOND; + + ohlcvRecord.open = rec.open; + ohlcvRecord.close = rec.close; + ohlcvRecord.low = rec.low; + ohlcvRecord.high = rec.high; + ohlcvRecord.volumeFrom = rec.volumefrom; + ohlcvRecord.volumeTo = rec.volumeto; + + ohlcvRecord.source = metadata.source; + ohlcvRecord.observedTimestamp = metadata.observedTimestamp; + return ohlcvRecord; + }); +} diff --git a/packages/pipeline/src/parsers/paradex_orders/index.ts b/packages/pipeline/src/parsers/paradex_orders/index.ts new file mode 100644 index 000000000..7966658a7 --- /dev/null +++ b/packages/pipeline/src/parsers/paradex_orders/index.ts @@ -0,0 +1,66 @@ +import { BigNumber } from '@0x/utils'; + +import { ParadexMarket, ParadexOrder, ParadexOrderbookResponse } from '../../data_sources/paradex'; +import { TokenOrderbookSnapshot as TokenOrder } from '../../entities'; +import { OrderType } from '../../types'; + +/** + * Marque function of this file. + * 1) Takes in orders from an orderbook (orders are already aggregated by price point), + * 2) For each aggregated order, forms a TokenOrder entity with market data and + * other information attached. + * @param paradexOrderbookResponse An orderbook response from the Paradex API. + * @param paradexMarket An object containing market data also directly from the API. + * @param observedTimestamp Time at which the orders for the market were pulled. + * @param source The exchange where these orders are placed. In this case 'paradex'. + */ +export function parseParadexOrders( + paradexOrderbookResponse: ParadexOrderbookResponse, + paradexMarket: ParadexMarket, + observedTimestamp: number, + source: string, +): TokenOrder[] { + const parsedBids = paradexOrderbookResponse.bids.map(order => + parseParadexOrder(paradexMarket, observedTimestamp, 'bid', source, order), + ); + const parsedAsks = paradexOrderbookResponse.asks.map(order => + parseParadexOrder(paradexMarket, observedTimestamp, 'ask', source, order), + ); + return parsedBids.concat(parsedAsks); +} + +/** + * Parse a single aggregated Ddex order in order to form a tokenOrder entity + * which can be saved into the database. + * @param paradexMarket An object containing information about the market where these + * orders have been placed. + * @param observedTimestamp The time when the API response returned back to us. + * @param orderType 'bid' or 'ask' enum. + * @param source Exchange where these orders were placed. + * @param paradexOrder A ParadexOrder object; basically price, amount tuple. + */ +export function parseParadexOrder( + paradexMarket: ParadexMarket, + observedTimestamp: number, + orderType: OrderType, + source: string, + paradexOrder: ParadexOrder, +): TokenOrder { + const tokenOrder = new TokenOrder(); + const price = new BigNumber(paradexOrder.price); + const amount = new BigNumber(paradexOrder.amount); + + tokenOrder.source = source; + tokenOrder.observedTimestamp = observedTimestamp; + tokenOrder.orderType = orderType; + tokenOrder.price = price; + + tokenOrder.baseAssetSymbol = paradexMarket.baseToken; + tokenOrder.baseAssetAddress = paradexMarket.baseTokenAddress as string; + tokenOrder.baseVolume = price.times(amount); + + tokenOrder.quoteAssetSymbol = paradexMarket.quoteToken; + tokenOrder.quoteAssetAddress = paradexMarket.quoteTokenAddress as string; + tokenOrder.quoteVolume = amount; + return tokenOrder; +} diff --git a/packages/pipeline/src/parsers/relayer_registry/index.ts b/packages/pipeline/src/parsers/relayer_registry/index.ts new file mode 100644 index 000000000..9723880a4 --- /dev/null +++ b/packages/pipeline/src/parsers/relayer_registry/index.ts @@ -0,0 +1,37 @@ +import * as R from 'ramda'; + +import { RelayerResponse, RelayerResponseNetwork } from '../../data_sources/relayer-registry'; +import { Relayer } from '../../entities'; + +/** + * Parses a raw relayer registry response into an array of Relayer entities. + * @param rawResp raw response from the relayer-registry json file. + */ +export function parseRelayers(rawResp: Map<string, RelayerResponse>): Relayer[] { + const parsedAsObject = R.mapObjIndexed(parseRelayer, rawResp); + return R.values(parsedAsObject); +} + +function parseRelayer(relayerResp: RelayerResponse, uuid: string): Relayer { + const relayer = new Relayer(); + relayer.uuid = uuid; + relayer.name = relayerResp.name; + relayer.homepageUrl = relayerResp.homepage_url; + relayer.appUrl = relayerResp.app_url; + const mainNetworkRelayerInfo = getMainNetwork(relayerResp); + if (mainNetworkRelayerInfo !== undefined) { + relayer.sraHttpEndpoint = mainNetworkRelayerInfo.sra_http_endpoint || null; + relayer.sraWsEndpoint = mainNetworkRelayerInfo.sra_ws_endpoint || null; + relayer.feeRecipientAddresses = + R.path(['static_order_fields', 'fee_recipient_addresses'], mainNetworkRelayerInfo) || []; + relayer.takerAddresses = R.path(['static_order_fields', 'taker_addresses'], mainNetworkRelayerInfo) || []; + } else { + relayer.feeRecipientAddresses = []; + relayer.takerAddresses = []; + } + return relayer; +} + +function getMainNetwork(relayerResp: RelayerResponse): RelayerResponseNetwork | undefined { + return R.find(network => network.networkId === 1, relayerResp.networks); +} diff --git a/packages/pipeline/src/parsers/sra_orders/index.ts b/packages/pipeline/src/parsers/sra_orders/index.ts new file mode 100644 index 000000000..ef8901e40 --- /dev/null +++ b/packages/pipeline/src/parsers/sra_orders/index.ts @@ -0,0 +1,62 @@ +import { APIOrder, OrdersResponse } from '@0x/connect'; +import { assetDataUtils, orderHashUtils } from '@0x/order-utils'; +import { AssetProxyId, ERC721AssetData } from '@0x/types'; +import * as R from 'ramda'; + +import { SraOrder } from '../../entities'; +import { bigNumbertoStringOrNull } from '../../utils'; + +/** + * Parses a raw order response from an SRA endpoint and returns an array of + * SraOrder entities. + * @param rawOrdersResponse A raw order response from an SRA endpoint. + */ +export function parseSraOrders(rawOrdersResponse: OrdersResponse): SraOrder[] { + return R.map(_convertToEntity, rawOrdersResponse.records); +} + +/** + * Converts a single APIOrder into an SraOrder entity. + * @param apiOrder A single order from the response from an SRA endpoint. + */ +export function _convertToEntity(apiOrder: APIOrder): SraOrder { + // TODO(albrow): refactor out common asset data decoding code. + const makerAssetData = assetDataUtils.decodeAssetDataOrThrow(apiOrder.order.makerAssetData); + const makerAssetType = makerAssetData.assetProxyId === AssetProxyId.ERC20 ? 'erc20' : 'erc721'; + const takerAssetData = assetDataUtils.decodeAssetDataOrThrow(apiOrder.order.takerAssetData); + const takerAssetType = takerAssetData.assetProxyId === AssetProxyId.ERC20 ? 'erc20' : 'erc721'; + + const sraOrder = new SraOrder(); + sraOrder.exchangeAddress = apiOrder.order.exchangeAddress; + sraOrder.orderHashHex = orderHashUtils.getOrderHashHex(apiOrder.order); + + sraOrder.makerAddress = apiOrder.order.makerAddress; + sraOrder.takerAddress = apiOrder.order.takerAddress; + sraOrder.feeRecipientAddress = apiOrder.order.feeRecipientAddress; + sraOrder.senderAddress = apiOrder.order.senderAddress; + sraOrder.makerAssetAmount = apiOrder.order.makerAssetAmount; + sraOrder.takerAssetAmount = apiOrder.order.takerAssetAmount; + sraOrder.makerFee = apiOrder.order.makerFee; + sraOrder.takerFee = apiOrder.order.takerFee; + sraOrder.expirationTimeSeconds = apiOrder.order.expirationTimeSeconds; + sraOrder.salt = apiOrder.order.salt; + sraOrder.signature = apiOrder.order.signature; + + sraOrder.rawMakerAssetData = apiOrder.order.makerAssetData; + sraOrder.makerAssetType = makerAssetType; + sraOrder.makerAssetProxyId = makerAssetData.assetProxyId; + sraOrder.makerTokenAddress = makerAssetData.tokenAddress; + // tslint has a false positive here. Type assertion is required. + // tslint:disable-next-line:no-unnecessary-type-assertion + sraOrder.makerTokenId = bigNumbertoStringOrNull((makerAssetData as ERC721AssetData).tokenId); + sraOrder.rawTakerAssetData = apiOrder.order.takerAssetData; + sraOrder.takerAssetType = takerAssetType; + sraOrder.takerAssetProxyId = takerAssetData.assetProxyId; + sraOrder.takerTokenAddress = takerAssetData.tokenAddress; + // tslint:disable-next-line:no-unnecessary-type-assertion + sraOrder.takerTokenId = bigNumbertoStringOrNull((takerAssetData as ERC721AssetData).tokenId); + + sraOrder.metadataJson = JSON.stringify(apiOrder.metaData); + + return sraOrder; +} diff --git a/packages/pipeline/src/parsers/token_metadata/index.ts b/packages/pipeline/src/parsers/token_metadata/index.ts new file mode 100644 index 000000000..f258af063 --- /dev/null +++ b/packages/pipeline/src/parsers/token_metadata/index.ts @@ -0,0 +1,47 @@ +import { BigNumber } from '@0x/utils'; +import * as R from 'ramda'; + +import { MetamaskTrustedTokenMeta, ZeroExTrustedTokenMeta } from '../../data_sources/trusted_tokens'; +import { TokenMetadata } from '../../entities'; +import {} from '../../utils'; + +/** + * Parses Metamask's trusted tokens list. + * @param rawResp raw response from the metamask json file. + */ +export function parseMetamaskTrustedTokens(rawResp: Map<string, MetamaskTrustedTokenMeta>): TokenMetadata[] { + const parsedAsObject = R.mapObjIndexed(parseMetamaskTrustedToken, rawResp); + return R.values(parsedAsObject); +} + +/** + * Parses 0x's trusted tokens list. + * @param rawResp raw response from the 0x trusted tokens file. + */ +export function parseZeroExTrustedTokens(rawResp: ZeroExTrustedTokenMeta[]): TokenMetadata[] { + return R.map(parseZeroExTrustedToken, rawResp); +} + +function parseMetamaskTrustedToken(resp: MetamaskTrustedTokenMeta, address: string): TokenMetadata { + const trustedToken = new TokenMetadata(); + + trustedToken.address = address; + trustedToken.decimals = new BigNumber(resp.decimals); + trustedToken.symbol = resp.symbol; + trustedToken.name = resp.name; + trustedToken.authority = 'metamask'; + + return trustedToken; +} + +function parseZeroExTrustedToken(resp: ZeroExTrustedTokenMeta): TokenMetadata { + const trustedToken = new TokenMetadata(); + + trustedToken.address = resp.address; + trustedToken.decimals = resp.decimals ? new BigNumber(resp.decimals) : null; + trustedToken.symbol = resp.symbol; + trustedToken.name = resp.name; + trustedToken.authority = '0x'; + + return trustedToken; +} diff --git a/packages/pipeline/src/parsers/web3/index.ts b/packages/pipeline/src/parsers/web3/index.ts new file mode 100644 index 000000000..f986efc59 --- /dev/null +++ b/packages/pipeline/src/parsers/web3/index.ts @@ -0,0 +1,49 @@ +import { BigNumber } from '@0x/utils'; +import { BlockWithoutTransactionData, Transaction as EthTransaction } from 'ethereum-types'; + +import { Block, Transaction } from '../../entities'; + +const MILLISECONDS_PER_SECOND = 1000; + +/** + * Parses a raw block and returns a Block entity. + * @param rawBlock a raw block (e.g. returned from web3-wrapper). + */ +export function parseBlock(rawBlock: BlockWithoutTransactionData): Block { + if (rawBlock.hash == null) { + throw new Error('Tried to parse raw block but hash was null'); + } + if (rawBlock.number == null) { + throw new Error('Tried to parse raw block but number was null'); + } + + const block = new Block(); + block.hash = rawBlock.hash; + block.number = rawBlock.number; + // Block timestamps are in seconds, but we use milliseconds everywhere else. + block.timestamp = rawBlock.timestamp * MILLISECONDS_PER_SECOND; + return block; +} + +/** + * Parses a raw transaction and returns a Transaction entity. + * @param rawBlock a raw transaction (e.g. returned from web3-wrapper). + */ +export function parseTransaction(rawTransaction: EthTransaction): Transaction { + if (rawTransaction.blockHash == null) { + throw new Error('Tried to parse raw transaction but blockHash was null'); + } + if (rawTransaction.blockNumber == null) { + throw new Error('Tried to parse raw transaction but blockNumber was null'); + } + + const tx = new Transaction(); + tx.transactionHash = rawTransaction.hash; + tx.blockHash = rawTransaction.blockHash; + tx.blockNumber = rawTransaction.blockNumber; + + tx.gasUsed = new BigNumber(rawTransaction.gas); + tx.gasPrice = rawTransaction.gasPrice; + + return tx; +} diff --git a/packages/pipeline/src/scripts/pull_competing_dex_trades.ts b/packages/pipeline/src/scripts/pull_competing_dex_trades.ts new file mode 100644 index 000000000..4e4c12dd0 --- /dev/null +++ b/packages/pipeline/src/scripts/pull_competing_dex_trades.ts @@ -0,0 +1,51 @@ +// tslint:disable:no-console +import 'reflect-metadata'; +import { Connection, ConnectionOptions, createConnection, Repository } from 'typeorm'; + +import { BloxySource } from '../data_sources/bloxy'; +import { DexTrade } from '../entities'; +import * as ormConfig from '../ormconfig'; +import { parseBloxyTrades } from '../parsers/bloxy'; +import { handleError } from '../utils'; + +// Number of trades to save at once. +const BATCH_SAVE_SIZE = 1000; + +let connection: Connection; + +(async () => { + connection = await createConnection(ormConfig as ConnectionOptions); + await getAndSaveTrades(); + process.exit(0); +})().catch(handleError); + +async function getAndSaveTrades(): Promise<void> { + const apiKey = process.env.BLOXY_API_KEY; + if (apiKey === undefined) { + throw new Error('Missing required env var: BLOXY_API_KEY'); + } + const bloxySource = new BloxySource(apiKey); + const tradesRepository = connection.getRepository(DexTrade); + const lastSeenTimestamp = await getLastSeenTimestampAsync(tradesRepository); + console.log(`Last seen timestamp: ${lastSeenTimestamp === 0 ? 'none' : lastSeenTimestamp}`); + console.log('Getting latest dex trades...'); + const rawTrades = await bloxySource.getDexTradesAsync(lastSeenTimestamp); + console.log(`Parsing ${rawTrades.length} trades...`); + const trades = parseBloxyTrades(rawTrades); + console.log(`Saving ${trades.length} trades...`); + await tradesRepository.save(trades, { chunk: Math.ceil(trades.length / BATCH_SAVE_SIZE) }); + console.log('Done saving trades.'); +} + +async function getLastSeenTimestampAsync(tradesRepository: Repository<DexTrade>): Promise<number> { + if ((await tradesRepository.count()) === 0) { + return 0; + } + const response = (await connection.query( + 'SELECT tx_timestamp FROM raw.dex_trades ORDER BY tx_timestamp DESC LIMIT 1', + )) as Array<{ tx_timestamp: number }>; + if (response.length === 0) { + return 0; + } + return response[0].tx_timestamp; +} diff --git a/packages/pipeline/src/scripts/pull_ddex_orderbook_snapshots.ts b/packages/pipeline/src/scripts/pull_ddex_orderbook_snapshots.ts new file mode 100644 index 000000000..7868e9c5a --- /dev/null +++ b/packages/pipeline/src/scripts/pull_ddex_orderbook_snapshots.ts @@ -0,0 +1,55 @@ +import { logUtils } from '@0x/utils'; +import * as R from 'ramda'; +import { Connection, ConnectionOptions, createConnection } from 'typeorm'; + +import { DDEX_SOURCE, DdexMarket, DdexSource } from '../data_sources/ddex'; +import { TokenOrderbookSnapshot as TokenOrder } from '../entities'; +import * as ormConfig from '../ormconfig'; +import { parseDdexOrders } from '../parsers/ddex_orders'; +import { handleError } from '../utils'; + +// Number of orders to save at once. +const BATCH_SAVE_SIZE = 1000; + +// Number of markets to retrieve orderbooks for at once. +const MARKET_ORDERBOOK_REQUEST_BATCH_SIZE = 50; + +// Delay between market orderbook requests. +const MILLISEC_MARKET_ORDERBOOK_REQUEST_DELAY = 5000; + +let connection: Connection; + +(async () => { + connection = await createConnection(ormConfig as ConnectionOptions); + const ddexSource = new DdexSource(); + const markets = await ddexSource.getActiveMarketsAsync(); + for (const marketsChunk of R.splitEvery(MARKET_ORDERBOOK_REQUEST_BATCH_SIZE, markets)) { + await Promise.all( + marketsChunk.map(async (market: DdexMarket) => getAndSaveMarketOrderbook(ddexSource, market)), + ); + await new Promise<void>(resolve => setTimeout(resolve, MILLISEC_MARKET_ORDERBOOK_REQUEST_DELAY)); + } + process.exit(0); +})().catch(handleError); + +/** + * Retrieve orderbook from Ddex API for a given market. Parse orders and insert + * them into our database. + * @param ddexSource Data source which can query Ddex API. + * @param market Object from Ddex API containing market data. + */ +async function getAndSaveMarketOrderbook(ddexSource: DdexSource, market: DdexMarket): Promise<void> { + const orderBook = await ddexSource.getMarketOrderbookAsync(market.id); + const observedTimestamp = Date.now(); + + logUtils.log(`${market.id}: Parsing orders.`); + const orders = parseDdexOrders(orderBook, market, observedTimestamp, DDEX_SOURCE); + + if (orders.length > 0) { + logUtils.log(`${market.id}: Saving ${orders.length} orders.`); + const TokenOrderRepository = connection.getRepository(TokenOrder); + await TokenOrderRepository.save(orders, { chunk: Math.ceil(orders.length / BATCH_SAVE_SIZE) }); + } else { + logUtils.log(`${market.id}: 0 orders to save.`); + } +} diff --git a/packages/pipeline/src/scripts/pull_missing_blocks.ts b/packages/pipeline/src/scripts/pull_missing_blocks.ts new file mode 100644 index 000000000..b7bd51f08 --- /dev/null +++ b/packages/pipeline/src/scripts/pull_missing_blocks.ts @@ -0,0 +1,80 @@ +// tslint:disable:no-console +import { web3Factory } from '@0x/dev-utils'; +import * as Parallel from 'async-parallel'; +import R = require('ramda'); +import 'reflect-metadata'; +import { Connection, ConnectionOptions, createConnection, Repository } from 'typeorm'; + +import { Web3Source } from '../data_sources/web3'; +import { Block } from '../entities'; +import * as ormConfig from '../ormconfig'; +import { parseBlock } from '../parsers/web3'; +import { EXCHANGE_START_BLOCK, handleError, INFURA_ROOT_URL } from '../utils'; + +// Number of blocks to save at once. +const BATCH_SAVE_SIZE = 1000; +// Maximum number of requests to send at once. +const MAX_CONCURRENCY = 10; +// Maximum number of blocks to query for at once. This is also the maximum +// number of blocks we will hold in memory prior to being saved to the database. +const MAX_BLOCKS_PER_QUERY = 1000; + +let connection: Connection; + +(async () => { + connection = await createConnection(ormConfig as ConnectionOptions); + const provider = web3Factory.getRpcProvider({ + rpcUrl: `${INFURA_ROOT_URL}/${process.env.INFURA_API_KEY}`, + }); + const web3Source = new Web3Source(provider); + await getAllMissingBlocks(web3Source); + process.exit(0); +})().catch(handleError); + +interface MissingBlocksResponse { + block_number: string; +} + +async function getAllMissingBlocks(web3Source: Web3Source): Promise<void> { + const blocksRepository = connection.getRepository(Block); + let fromBlock = EXCHANGE_START_BLOCK; + while (true) { + const blockNumbers = await getMissingBlockNumbers(fromBlock); + if (blockNumbers.length === 0) { + // There are no more missing blocks. We're done. + break; + } + await getAndSaveBlocks(web3Source, blocksRepository, blockNumbers); + fromBlock = Math.max(...blockNumbers) + 1; + } + const totalBlocks = await blocksRepository.count(); + console.log(`Done saving blocks. There are now ${totalBlocks} total blocks.`); +} + +async function getMissingBlockNumbers(fromBlock: number): Promise<number[]> { + console.log(`Checking for missing blocks starting at ${fromBlock}...`); + const response = (await connection.query( + 'SELECT DISTINCT(block_number) FROM raw.exchange_fill_events WHERE block_number NOT IN (SELECT number FROM raw.blocks) AND block_number >= $1 ORDER BY block_number ASC LIMIT $2', + [fromBlock, MAX_BLOCKS_PER_QUERY], + )) as MissingBlocksResponse[]; + const blockNumberStrings = R.pluck('block_number', response); + const blockNumbers = R.map(parseInt, blockNumberStrings); + console.log(`Found ${blockNumbers.length} missing blocks in the given range.`); + return blockNumbers; +} + +async function getAndSaveBlocks( + web3Source: Web3Source, + blocksRepository: Repository<Block>, + blockNumbers: number[], +): Promise<void> { + console.log(`Getting block data for ${blockNumbers.length} blocks...`); + Parallel.setConcurrency(MAX_CONCURRENCY); + const rawBlocks = await Parallel.map(blockNumbers, async (blockNumber: number) => + web3Source.getBlockInfoAsync(blockNumber), + ); + console.log(`Parsing ${rawBlocks.length} blocks...`); + const blocks = R.map(parseBlock, rawBlocks); + console.log(`Saving ${blocks.length} blocks...`); + await blocksRepository.save(blocks, { chunk: Math.ceil(blocks.length / BATCH_SAVE_SIZE) }); +} diff --git a/packages/pipeline/src/scripts/pull_missing_events.ts b/packages/pipeline/src/scripts/pull_missing_events.ts new file mode 100644 index 000000000..80abbb8b0 --- /dev/null +++ b/packages/pipeline/src/scripts/pull_missing_events.ts @@ -0,0 +1,136 @@ +// tslint:disable:no-console +import { web3Factory } from '@0x/dev-utils'; +import R = require('ramda'); +import 'reflect-metadata'; +import { Connection, ConnectionOptions, createConnection, Repository } from 'typeorm'; + +import { ExchangeEventsSource } from '../data_sources/contract-wrappers/exchange_events'; +import { ExchangeCancelEvent, ExchangeCancelUpToEvent, ExchangeEvent, ExchangeFillEvent } from '../entities'; +import * as ormConfig from '../ormconfig'; +import { parseExchangeCancelEvents, parseExchangeCancelUpToEvents, parseExchangeFillEvents } from '../parsers/events'; +import { EXCHANGE_START_BLOCK, handleError, INFURA_ROOT_URL } from '../utils'; + +const START_BLOCK_OFFSET = 100; // Number of blocks before the last known block to consider when updating fill events. +const BATCH_SAVE_SIZE = 1000; // Number of events to save at once. + +let connection: Connection; + +(async () => { + connection = await createConnection(ormConfig as ConnectionOptions); + const provider = web3Factory.getRpcProvider({ + rpcUrl: INFURA_ROOT_URL, + }); + const eventsSource = new ExchangeEventsSource(provider, 1); + await getFillEventsAsync(eventsSource); + await getCancelEventsAsync(eventsSource); + await getCancelUpToEventsAsync(eventsSource); + process.exit(0); +})().catch(handleError); + +async function getFillEventsAsync(eventsSource: ExchangeEventsSource): Promise<void> { + console.log('Checking existing fill events...'); + const repository = connection.getRepository(ExchangeFillEvent); + const startBlock = await getStartBlockAsync(repository); + console.log(`Getting fill events starting at ${startBlock}...`); + const eventLogs = await eventsSource.getFillEventsAsync(startBlock); + console.log('Parsing fill events...'); + const events = parseExchangeFillEvents(eventLogs); + console.log(`Retrieved and parsed ${events.length} total fill events.`); + await saveEventsAsync(startBlock === EXCHANGE_START_BLOCK, repository, events); +} + +async function getCancelEventsAsync(eventsSource: ExchangeEventsSource): Promise<void> { + console.log('Checking existing cancel events...'); + const repository = connection.getRepository(ExchangeCancelEvent); + const startBlock = await getStartBlockAsync(repository); + console.log(`Getting cancel events starting at ${startBlock}...`); + const eventLogs = await eventsSource.getCancelEventsAsync(startBlock); + console.log('Parsing cancel events...'); + const events = parseExchangeCancelEvents(eventLogs); + console.log(`Retrieved and parsed ${events.length} total cancel events.`); + await saveEventsAsync(startBlock === EXCHANGE_START_BLOCK, repository, events); +} + +async function getCancelUpToEventsAsync(eventsSource: ExchangeEventsSource): Promise<void> { + console.log('Checking existing CancelUpTo events...'); + const repository = connection.getRepository(ExchangeCancelUpToEvent); + const startBlock = await getStartBlockAsync(repository); + console.log(`Getting CancelUpTo events starting at ${startBlock}...`); + const eventLogs = await eventsSource.getCancelUpToEventsAsync(startBlock); + console.log('Parsing CancelUpTo events...'); + const events = parseExchangeCancelUpToEvents(eventLogs); + console.log(`Retrieved and parsed ${events.length} total CancelUpTo events.`); + await saveEventsAsync(startBlock === EXCHANGE_START_BLOCK, repository, events); +} + +const tableNameRegex = /^[a-zA-Z_]*$/; + +async function getStartBlockAsync<T extends ExchangeEvent>(repository: Repository<T>): Promise<number> { + const fillEventCount = await repository.count(); + if (fillEventCount === 0) { + console.log(`No existing ${repository.metadata.name}s found.`); + return EXCHANGE_START_BLOCK; + } + const tableName = repository.metadata.tableName; + if (!tableNameRegex.test(tableName)) { + throw new Error(`Unexpected special character in table name: ${tableName}`); + } + const queryResult = await connection.query( + `SELECT block_number FROM raw.${tableName} ORDER BY block_number DESC LIMIT 1`, + ); + const lastKnownBlock = queryResult[0].block_number; + return lastKnownBlock - START_BLOCK_OFFSET; +} + +async function saveEventsAsync<T extends ExchangeEvent>( + isInitialPull: boolean, + repository: Repository<T>, + events: T[], +): Promise<void> { + console.log(`Saving ${repository.metadata.name}s...`); + if (isInitialPull) { + // Split data into numChunks pieces of maximum size BATCH_SAVE_SIZE + // each. + for (const eventsBatch of R.splitEvery(BATCH_SAVE_SIZE, events)) { + await repository.insert(eventsBatch); + } + } else { + // If we possibly have some overlap where we need to update some + // existing events, we need to use our workaround/fallback. + await saveIndividuallyWithFallbackAsync(repository, events); + } + const totalEvents = await repository.count(); + console.log(`Done saving events. There are now ${totalEvents} total ${repository.metadata.name}s.`); +} + +async function saveIndividuallyWithFallbackAsync<T extends ExchangeEvent>( + repository: Repository<T>, + events: T[], +): Promise<void> { + // Note(albrow): This is a temporary hack because `save` is not working as + // documented and is causing a foreign key constraint violation. Hopefully + // can remove later because this "poor man's upsert" implementation operates + // on one event at a time and is therefore much slower. + for (const event of events) { + try { + // First try an insert. + await repository.insert(event); + } catch { + // If it fails, assume it was a foreign key constraint error and try + // doing an update instead. + // Note(albrow): Unfortunately the `as any` hack here seems + // required. I can't figure out how to convince the type-checker + // that the criteria and the entity itself are the correct type for + // the given repository. If we can remove the `save` hack then this + // will probably no longer be necessary. + await repository.update( + { + contractAddress: event.contractAddress, + blockNumber: event.blockNumber, + logIndex: event.logIndex, + } as any, + event as any, + ); + } + } +} diff --git a/packages/pipeline/src/scripts/pull_ohlcv_cryptocompare.ts b/packages/pipeline/src/scripts/pull_ohlcv_cryptocompare.ts new file mode 100644 index 000000000..d44eb5cc6 --- /dev/null +++ b/packages/pipeline/src/scripts/pull_ohlcv_cryptocompare.ts @@ -0,0 +1,95 @@ +// tslint:disable:no-console +import { Connection, ConnectionOptions, createConnection, Repository } from 'typeorm'; + +import { CryptoCompareOHLCVSource } from '../data_sources/ohlcv_external/crypto_compare'; +import { OHLCVExternal } from '../entities'; +import * as ormConfig from '../ormconfig'; +import { OHLCVMetadata, parseRecords } from '../parsers/ohlcv_external/crypto_compare'; +import { handleError } from '../utils'; +import { fetchOHLCVTradingPairsAsync, TradingPair } from '../utils/get_ohlcv_trading_pairs'; + +const SOURCE_NAME = 'CryptoCompare'; +const TWO_HOURS_AGO = new Date().getTime() - 2 * 60 * 60 * 1000; // tslint:disable-line:custom-no-magic-numbers + +const MAX_REQS_PER_SECOND = parseInt(process.env.CRYPTOCOMPARE_MAX_REQS_PER_SECOND || '15', 10); // tslint:disable-line:custom-no-magic-numbers +const EARLIEST_BACKFILL_DATE = process.env.OHLCV_EARLIEST_BACKFILL_DATE || '2014-06-01'; +const EARLIEST_BACKFILL_TIME = new Date(EARLIEST_BACKFILL_DATE).getTime(); + +let connection: Connection; + +(async () => { + connection = await createConnection(ormConfig as ConnectionOptions); + const repository = connection.getRepository(OHLCVExternal); + const source = new CryptoCompareOHLCVSource(MAX_REQS_PER_SECOND); + + const jobTime = new Date().getTime(); + const tradingPairs = await fetchOHLCVTradingPairsAsync(connection, SOURCE_NAME, EARLIEST_BACKFILL_TIME); + console.log(`Starting ${tradingPairs.length} job(s) to scrape Crypto Compare for OHLCV records...`); + + const fetchAndSavePromises = tradingPairs.map(async pair => { + const pairs = source.generateBackfillIntervals(pair); + return fetchAndSaveAsync(source, repository, jobTime, pairs); + }); + await Promise.all(fetchAndSavePromises); + console.log(`Finished scraping OHLCV records from Crypto Compare, exiting...`); + process.exit(0); +})().catch(handleError); + +async function fetchAndSaveAsync( + source: CryptoCompareOHLCVSource, + repository: Repository<OHLCVExternal>, + jobTime: number, + pairs: TradingPair[], +): Promise<void> { + const sortAscTimestamp = (a: TradingPair, b: TradingPair): number => { + if (a.latestSavedTime < b.latestSavedTime) { + return -1; + } else if (a.latestSavedTime > b.latestSavedTime) { + return 1; + } else { + return 0; + } + }; + pairs.sort(sortAscTimestamp); + + let i = 0; + while (i < pairs.length) { + const pair = pairs[i]; + if (pair.latestSavedTime > TWO_HOURS_AGO) { + break; + } + try { + const records = await source.getHourlyOHLCVAsync(pair); + console.log(`Retrieved ${records.length} records for ${JSON.stringify(pair)}`); + if (records.length > 0) { + const metadata: OHLCVMetadata = { + exchange: source.defaultExchange, + fromSymbol: pair.fromSymbol, + toSymbol: pair.toSymbol, + source: SOURCE_NAME, + observedTimestamp: jobTime, + interval: source.intervalBetweenRecords, + }; + const parsedRecords = parseRecords(records, metadata); + await saveRecordsAsync(repository, parsedRecords); + } + i++; + } catch (err) { + console.log(`Error scraping OHLCVRecords, stopping task for ${JSON.stringify(pair)} [${err}]`); + break; + } + } + return Promise.resolve(); +} + +async function saveRecordsAsync(repository: Repository<OHLCVExternal>, records: OHLCVExternal[]): Promise<void> { + const metadata = [ + records[0].fromSymbol, + records[0].toSymbol, + new Date(records[0].startTime), + new Date(records[records.length - 1].endTime), + ]; + + console.log(`Saving ${records.length} records to ${repository.metadata.name}... ${JSON.stringify(metadata)}`); + await repository.save(records); +} diff --git a/packages/pipeline/src/scripts/pull_paradex_orderbook_snapshots.ts b/packages/pipeline/src/scripts/pull_paradex_orderbook_snapshots.ts new file mode 100644 index 000000000..bae1fbede --- /dev/null +++ b/packages/pipeline/src/scripts/pull_paradex_orderbook_snapshots.ts @@ -0,0 +1,87 @@ +import { logUtils } from '@0x/utils'; +import { Connection, ConnectionOptions, createConnection } from 'typeorm'; + +import { + PARADEX_SOURCE, + ParadexActiveMarketsResponse, + ParadexMarket, + ParadexSource, + ParadexTokenInfoResponse, +} from '../data_sources/paradex'; +import { TokenOrderbookSnapshot as TokenOrder } from '../entities'; +import * as ormConfig from '../ormconfig'; +import { parseParadexOrders } from '../parsers/paradex_orders'; +import { handleError } from '../utils'; + +// Number of orders to save at once. +const BATCH_SAVE_SIZE = 1000; + +let connection: Connection; + +(async () => { + connection = await createConnection(ormConfig as ConnectionOptions); + const apiKey = process.env.PARADEX_DATA_PIPELINE_API_KEY; + if (apiKey === undefined) { + throw new Error('Missing required env var: PARADEX_DATA_PIPELINE_API_KEY'); + } + const paradexSource = new ParadexSource(apiKey); + const markets = await paradexSource.getActiveMarketsAsync(); + const tokenInfoResponse = await paradexSource.getTokenInfoAsync(); + const extendedMarkets = addTokenAddresses(markets, tokenInfoResponse); + await Promise.all( + extendedMarkets.map(async (market: ParadexMarket) => getAndSaveMarketOrderbook(paradexSource, market)), + ); + process.exit(0); +})().catch(handleError); + +/** + * Extend the default ParadexMarket objects with token addresses. + * @param markets An array of ParadexMarket objects. + * @param tokenInfoResponse An array of ParadexTokenInfo containing the addresses. + */ +function addTokenAddresses( + markets: ParadexActiveMarketsResponse, + tokenInfoResponse: ParadexTokenInfoResponse, +): ParadexMarket[] { + const symbolAddressMapping = new Map<string, string>(); + tokenInfoResponse.forEach(tokenInfo => symbolAddressMapping.set(tokenInfo.symbol, tokenInfo.address)); + + markets.forEach((market: ParadexMarket) => { + if (symbolAddressMapping.has(market.baseToken)) { + market.baseTokenAddress = symbolAddressMapping.get(market.baseToken); + } else { + market.quoteTokenAddress = ''; + logUtils.warn(`${market.baseToken}: No address found.`); + } + + if (symbolAddressMapping.has(market.quoteToken)) { + market.quoteTokenAddress = symbolAddressMapping.get(market.quoteToken); + } else { + market.quoteTokenAddress = ''; + logUtils.warn(`${market.quoteToken}: No address found.`); + } + }); + return markets; +} + +/** + * Retrieve orderbook from Paradex API for a given market. Parse orders and insert + * them into our database. + * @param paradexSource Data source which can query the Paradex API. + * @param market Object from the Paradex API with information about the market in question. + */ +async function getAndSaveMarketOrderbook(paradexSource: ParadexSource, market: ParadexMarket): Promise<void> { + const paradexOrderbookResponse = await paradexSource.getMarketOrderbookAsync(market.symbol); + const observedTimestamp = Date.now(); + + logUtils.log(`${market.symbol}: Parsing orders.`); + const orders = parseParadexOrders(paradexOrderbookResponse, market, observedTimestamp, PARADEX_SOURCE); + + if (orders.length > 0) { + logUtils.log(`${market.symbol}: Saving ${orders.length} orders.`); + const tokenOrderRepository = connection.getRepository(TokenOrder); + await tokenOrderRepository.save(orders, { chunk: Math.ceil(orders.length / BATCH_SAVE_SIZE) }); + } else { + logUtils.log(`${market.symbol}: 0 orders to save.`); + } +} diff --git a/packages/pipeline/src/scripts/pull_radar_relay_orders.ts b/packages/pipeline/src/scripts/pull_radar_relay_orders.ts new file mode 100644 index 000000000..40bb6fc97 --- /dev/null +++ b/packages/pipeline/src/scripts/pull_radar_relay_orders.ts @@ -0,0 +1,54 @@ +// tslint:disable:no-console +import { HttpClient } from '@0x/connect'; +import * as R from 'ramda'; +import 'reflect-metadata'; +import { Connection, ConnectionOptions, createConnection, EntityManager } from 'typeorm'; + +import { createObservedTimestampForOrder, SraOrder } from '../entities'; +import * as ormConfig from '../ormconfig'; +import { parseSraOrders } from '../parsers/sra_orders'; +import { handleError } from '../utils'; + +const RADAR_RELAY_URL = 'https://api.radarrelay.com/0x/v2'; +const ORDERS_PER_PAGE = 10000; // Number of orders to get per request. + +let connection: Connection; + +(async () => { + connection = await createConnection(ormConfig as ConnectionOptions); + await getOrderbookAsync(); + process.exit(0); +})().catch(handleError); + +async function getOrderbookAsync(): Promise<void> { + console.log('Getting all orders...'); + const connectClient = new HttpClient(RADAR_RELAY_URL); + const rawOrders = await connectClient.getOrdersAsync({ + perPage: ORDERS_PER_PAGE, + }); + console.log(`Got ${rawOrders.records.length} orders.`); + console.log('Parsing orders...'); + // Parse the sra orders, then add source url to each. + const orders = R.pipe(parseSraOrders, R.map(setSourceUrl(RADAR_RELAY_URL)))(rawOrders); + // Save all the orders and update the observed time stamps in a single + // transaction. + console.log('Saving orders and updating timestamps...'); + const observedTimestamp = Date.now(); + await connection.transaction(async (manager: EntityManager): Promise<void> => { + for (const order of orders) { + await manager.save(SraOrder, order); + const orderObservation = createObservedTimestampForOrder(order, observedTimestamp); + await manager.save(orderObservation); + } + }); +} + +const sourceUrlProp = R.lensProp('sourceUrl'); + +/** + * Sets the source url for a single order. Returns a new order instead of + * mutating the given one. + */ +const setSourceUrl = R.curry((sourceURL: string, order: SraOrder): SraOrder => { + return R.set(sourceUrlProp, sourceURL, order); +}); diff --git a/packages/pipeline/src/scripts/pull_trusted_tokens.ts b/packages/pipeline/src/scripts/pull_trusted_tokens.ts new file mode 100644 index 000000000..1befc4437 --- /dev/null +++ b/packages/pipeline/src/scripts/pull_trusted_tokens.ts @@ -0,0 +1,52 @@ +import 'reflect-metadata'; +import { Connection, ConnectionOptions, createConnection } from 'typeorm'; + +import { MetamaskTrustedTokenMeta, TrustedTokenSource, ZeroExTrustedTokenMeta } from '../data_sources/trusted_tokens'; +import { TokenMetadata } from '../entities'; +import * as ormConfig from '../ormconfig'; +import { parseMetamaskTrustedTokens, parseZeroExTrustedTokens } from '../parsers/token_metadata'; +import { handleError } from '../utils'; + +const METAMASK_TRUSTED_TOKENS_URL = + 'https://raw.githubusercontent.com/MetaMask/eth-contract-metadata/d45916c533116510cc8e9e048a8b5fc3732a6b6d/contract-map.json'; + +const ZEROEX_TRUSTED_TOKENS_URL = 'https://website-api.0xproject.com/tokens'; + +let connection: Connection; + +(async () => { + connection = await createConnection(ormConfig as ConnectionOptions); + await getMetamaskTrustedTokens(); + await getZeroExTrustedTokens(); + process.exit(0); +})().catch(handleError); + +async function getMetamaskTrustedTokens(): Promise<void> { + // tslint:disable-next-line:no-console + console.log('Getting latest metamask trusted tokens list ...'); + const trustedTokensRepository = connection.getRepository(TokenMetadata); + const trustedTokensSource = new TrustedTokenSource<Map<string, MetamaskTrustedTokenMeta>>( + METAMASK_TRUSTED_TOKENS_URL, + ); + const resp = await trustedTokensSource.getTrustedTokenMetaAsync(); + const trustedTokens = parseMetamaskTrustedTokens(resp); + // tslint:disable-next-line:no-console + console.log('Saving metamask trusted tokens list'); + await trustedTokensRepository.save(trustedTokens); + // tslint:disable-next-line:no-console + console.log('Done saving metamask trusted tokens.'); +} + +async function getZeroExTrustedTokens(): Promise<void> { + // tslint:disable-next-line:no-console + console.log('Getting latest 0x trusted tokens list ...'); + const trustedTokensRepository = connection.getRepository(TokenMetadata); + const trustedTokensSource = new TrustedTokenSource<ZeroExTrustedTokenMeta[]>(ZEROEX_TRUSTED_TOKENS_URL); + const resp = await trustedTokensSource.getTrustedTokenMetaAsync(); + const trustedTokens = parseZeroExTrustedTokens(resp); + // tslint:disable-next-line:no-console + console.log('Saving metamask trusted tokens list'); + await trustedTokensRepository.save(trustedTokens); + // tslint:disable-next-line:no-console + console.log('Done saving metamask trusted tokens.'); +} diff --git a/packages/pipeline/src/scripts/update_relayer_info.ts b/packages/pipeline/src/scripts/update_relayer_info.ts new file mode 100644 index 000000000..f8918728d --- /dev/null +++ b/packages/pipeline/src/scripts/update_relayer_info.ts @@ -0,0 +1,33 @@ +// tslint:disable:no-console +import 'reflect-metadata'; +import { Connection, ConnectionOptions, createConnection } from 'typeorm'; + +import { RelayerRegistrySource } from '../data_sources/relayer-registry'; +import { Relayer } from '../entities'; +import * as ormConfig from '../ormconfig'; +import { parseRelayers } from '../parsers/relayer_registry'; +import { handleError } from '../utils'; + +// NOTE(albrow): We need to manually update this URL for now. Fix this when we +// have the relayer-registry behind semantic versioning. +const RELAYER_REGISTRY_URL = + 'https://raw.githubusercontent.com/0xProject/0x-relayer-registry/4701c85677d161ea729a466aebbc1826c6aa2c0b/relayers.json'; + +let connection: Connection; + +(async () => { + connection = await createConnection(ormConfig as ConnectionOptions); + await getRelayers(); + process.exit(0); +})().catch(handleError); + +async function getRelayers(): Promise<void> { + console.log('Getting latest relayer info...'); + const relayerRepository = connection.getRepository(Relayer); + const relayerSource = new RelayerRegistrySource(RELAYER_REGISTRY_URL); + const relayersResp = await relayerSource.getRelayerInfoAsync(); + const relayers = parseRelayers(relayersResp); + console.log('Saving relayer info...'); + await relayerRepository.save(relayers); + console.log('Done saving relayer info.'); +} diff --git a/packages/pipeline/src/types.ts b/packages/pipeline/src/types.ts new file mode 100644 index 000000000..e02b42a40 --- /dev/null +++ b/packages/pipeline/src/types.ts @@ -0,0 +1,2 @@ +export type AssetType = 'erc20' | 'erc721'; +export type OrderType = 'bid' | 'ask'; diff --git a/packages/pipeline/src/utils/constants.ts b/packages/pipeline/src/utils/constants.ts new file mode 100644 index 000000000..56f3e82d8 --- /dev/null +++ b/packages/pipeline/src/utils/constants.ts @@ -0,0 +1,3 @@ +// Block number when the Exchange contract was deployed to mainnet. +export const EXCHANGE_START_BLOCK = 6271590; +export const INFURA_ROOT_URL = 'https://mainnet.infura.io'; diff --git a/packages/pipeline/src/utils/get_ohlcv_trading_pairs.ts b/packages/pipeline/src/utils/get_ohlcv_trading_pairs.ts new file mode 100644 index 000000000..9d3ef2fba --- /dev/null +++ b/packages/pipeline/src/utils/get_ohlcv_trading_pairs.ts @@ -0,0 +1,92 @@ +import { fetchAsync } from '@0x/utils'; +import * as R from 'ramda'; +import { Connection } from 'typeorm'; + +export interface TradingPair { + fromSymbol: string; + toSymbol: string; + latestSavedTime: number; +} + +const COINLIST_API = 'https://min-api.cryptocompare.com/data/all/coinlist?BuiltOn=7605'; + +interface CryptoCompareCoinListResp { + Data: Map<string, CryptoCompareCoin>; +} + +interface CryptoCompareCoin { + Symbol: string; + BuiltOn: string; + SmartContractAddress: string; +} + +const TO_CURRENCIES = ['USD', 'EUR', 'ETH', 'USDT']; +const ETHEREUM_IDENTIFIER = '7605'; +const HTTP_OK_STATUS = 200; +/** + * Get trading pairs with latest scraped time for OHLCV records + * @param conn a typeorm Connection to postgres + */ +export async function fetchOHLCVTradingPairsAsync( + conn: Connection, + source: string, + earliestBackfillTime: number, +): Promise<TradingPair[]> { + // fetch existing ohlcv records + const latestTradingPairs: Array<{ + from_symbol: string; + to_symbol: string; + latest: string; + }> = await conn.query(`SELECT + MAX(end_time) as latest, + from_symbol, + to_symbol + FROM raw.ohlcv_external + GROUP BY from_symbol, to_symbol;`); + + const latestTradingPairsIndex: { [fromSym: string]: { [toSym: string]: number } } = {}; + latestTradingPairs.forEach(pair => { + const latestIndex: { [toSym: string]: number } = latestTradingPairsIndex[pair.from_symbol] || {}; + latestIndex[pair.to_symbol] = parseInt(pair.latest, 10); // tslint:disable-line:custom-no-magic-numbers + latestTradingPairsIndex[pair.from_symbol] = latestIndex; + }); + + // get token symbols used by Crypto Compare + const allCoinsResp = await fetchAsync(COINLIST_API); + if (allCoinsResp.status !== HTTP_OK_STATUS) { + return []; + } + const allCoins: CryptoCompareCoinListResp = await allCoinsResp.json(); + const erc20CoinsIndex: Map<string, string> = new Map(); + Object.entries(allCoins.Data).forEach(pair => { + const [symbol, coinData] = pair; + if (coinData.BuiltOn === ETHEREUM_IDENTIFIER && coinData.SmartContractAddress !== 'N/A') { + erc20CoinsIndex.set(coinData.SmartContractAddress.toLowerCase(), symbol); + } + }); + + // fetch all tokens that are traded on 0x + const rawTokenAddresses: Array<{ tokenaddress: string }> = await conn.query( + `SELECT DISTINCT(maker_token_address) as tokenaddress FROM raw.exchange_fill_events UNION + SELECT DISTINCT(taker_token_address) as tokenaddress FROM raw.exchange_fill_events`, + ); + const tokenAddresses = R.pluck('tokenaddress', rawTokenAddresses); + + // join token addresses with CC symbols + const allTokenSymbols: string[] = tokenAddresses + .map(tokenAddress => erc20CoinsIndex.get(tokenAddress.toLowerCase()) || '') + .filter(x => x); + + // generate list of all tokens with time of latest existing record OR default earliest time + const allTradingPairCombinations: TradingPair[] = R.chain(sym => { + return TO_CURRENCIES.map(fiat => { + return { + fromSymbol: sym, + toSymbol: fiat, + latestSavedTime: R.path<number>([sym, fiat], latestTradingPairsIndex) || earliestBackfillTime, + }; + }); + }, allTokenSymbols); + + return allTradingPairCombinations; +} diff --git a/packages/pipeline/src/utils/index.ts b/packages/pipeline/src/utils/index.ts new file mode 100644 index 000000000..2096a0a39 --- /dev/null +++ b/packages/pipeline/src/utils/index.ts @@ -0,0 +1,38 @@ +import { BigNumber } from '@0x/utils'; +export * from './transformers'; +export * from './constants'; + +/** + * If the given BigNumber is not null, returns the string representation of that + * number. Otherwise, returns null. + * @param n The number to convert. + */ +export function bigNumbertoStringOrNull(n: BigNumber): string | null { + if (n == null) { + return null; + } + return n.toString(); +} + +/** + * Logs an error by intelligently checking for `message` and `stack` properties. + * Intended for use with top-level immediately invoked asynchronous functions. + * @param e the error to log. + */ +export function handleError(e: any): void { + if (e.message != null) { + // tslint:disable-next-line:no-console + console.error(e.message); + } else { + // tslint:disable-next-line:no-console + console.error('Unknown error'); + } + if (e.stack != null) { + // tslint:disable-next-line:no-console + console.error(e.stack); + } else { + // tslint:disable-next-line:no-console + console.error('(No stack trace)'); + } + process.exit(1); +} diff --git a/packages/pipeline/src/utils/transformers/big_number.ts b/packages/pipeline/src/utils/transformers/big_number.ts new file mode 100644 index 000000000..5f2e4d565 --- /dev/null +++ b/packages/pipeline/src/utils/transformers/big_number.ts @@ -0,0 +1,16 @@ +import { BigNumber } from '@0x/utils'; +import { ValueTransformer } from 'typeorm/decorator/options/ValueTransformer'; + +export class BigNumberTransformer implements ValueTransformer { + // tslint:disable-next-line:prefer-function-over-method + public to(value: BigNumber | null): string | null { + return value === null ? null : value.toString(); + } + + // tslint:disable-next-line:prefer-function-over-method + public from(value: string | null): BigNumber | null { + return value === null ? null : new BigNumber(value); + } +} + +export const bigNumberTransformer = new BigNumberTransformer(); diff --git a/packages/pipeline/src/utils/transformers/index.ts b/packages/pipeline/src/utils/transformers/index.ts new file mode 100644 index 000000000..232c1c5de --- /dev/null +++ b/packages/pipeline/src/utils/transformers/index.ts @@ -0,0 +1,2 @@ +export * from './big_number'; +export * from './number_to_bigint'; diff --git a/packages/pipeline/src/utils/transformers/number_to_bigint.ts b/packages/pipeline/src/utils/transformers/number_to_bigint.ts new file mode 100644 index 000000000..85560c1f0 --- /dev/null +++ b/packages/pipeline/src/utils/transformers/number_to_bigint.ts @@ -0,0 +1,27 @@ +import { BigNumber } from '@0x/utils'; +import { ValueTransformer } from 'typeorm/decorator/options/ValueTransformer'; + +const decimalRadix = 10; + +// Can be used to convert a JavaScript number type to a Postgres bigint type and +// vice versa. By default TypeORM will silently convert number types to string +// if the corresponding Postgres type is bigint. See +// https://github.com/typeorm/typeorm/issues/2400 for more information. +export class NumberToBigIntTransformer implements ValueTransformer { + // tslint:disable-next-line:prefer-function-over-method + public to(value: number): string { + return value.toString(); + } + + // tslint:disable-next-line:prefer-function-over-method + public from(value: string): number { + if (new BigNumber(value).greaterThan(Number.MAX_SAFE_INTEGER)) { + throw new Error( + `Attempted to convert PostgreSQL bigint value (${value}) to JavaScript number type but it is too big to safely convert`, + ); + } + return Number.parseInt(value, decimalRadix); + } +} + +export const numberToBigIntTransformer = new NumberToBigIntTransformer(); diff --git a/packages/pipeline/test/data_sources/ohlcv_external/crypto_compare_test.ts b/packages/pipeline/test/data_sources/ohlcv_external/crypto_compare_test.ts new file mode 100644 index 000000000..2efe3f5ec --- /dev/null +++ b/packages/pipeline/test/data_sources/ohlcv_external/crypto_compare_test.ts @@ -0,0 +1,47 @@ +import * as chai from 'chai'; +import 'mocha'; +import * as R from 'ramda'; + +import { CryptoCompareOHLCVSource } from '../../../src/data_sources/ohlcv_external/crypto_compare'; +import { TradingPair } from '../../../src/utils/get_ohlcv_trading_pairs'; +import { chaiSetup } from '../../utils/chai_setup'; + +chaiSetup.configure(); +const expect = chai.expect; + +// tslint:disable:custom-no-magic-numbers +describe('ohlcv_external data source (Crypto Compare)', () => { + describe('generateBackfillIntervals', () => { + it('generates pairs with intervals to query', () => { + const source = new CryptoCompareOHLCVSource(20); + const pair: TradingPair = { + fromSymbol: 'ETH', + toSymbol: 'ZRX', + latestSavedTime: new Date().getTime() - source.interval * 2, + }; + + const expected = [ + pair, + R.merge(pair, { latestSavedTime: pair.latestSavedTime + source.interval }), + R.merge(pair, { latestSavedTime: pair.latestSavedTime + source.interval * 2 }), + ]; + + const actual = source.generateBackfillIntervals(pair); + expect(actual).deep.equal(expected); + }); + + it('returns single pair if no backfill is needed', () => { + const source = new CryptoCompareOHLCVSource(20); + const pair: TradingPair = { + fromSymbol: 'ETH', + toSymbol: 'ZRX', + latestSavedTime: new Date().getTime() - source.interval + 5000, + }; + + const expected = [pair]; + + const actual = source.generateBackfillIntervals(pair); + expect(actual).deep.equal(expected); + }); + }); +}); diff --git a/packages/pipeline/test/db_global_hooks.ts b/packages/pipeline/test/db_global_hooks.ts new file mode 100644 index 000000000..dfee02c45 --- /dev/null +++ b/packages/pipeline/test/db_global_hooks.ts @@ -0,0 +1,9 @@ +import { setUpDbAsync, tearDownDbAsync } from './db_setup'; + +before('set up database', async () => { + await setUpDbAsync(); +}); + +after('tear down database', async () => { + await tearDownDbAsync(); +}); diff --git a/packages/pipeline/test/db_setup.ts b/packages/pipeline/test/db_setup.ts new file mode 100644 index 000000000..bf31d15b6 --- /dev/null +++ b/packages/pipeline/test/db_setup.ts @@ -0,0 +1,174 @@ +import * as Docker from 'dockerode'; +import * as fs from 'fs'; +import * as R from 'ramda'; +import { Connection, ConnectionOptions, createConnection } from 'typeorm'; + +import * as ormConfig from '../src/ormconfig'; + +// The name of the image to pull and use for the container. This also affects +// which version of Postgres we use. +const DOCKER_IMAGE_NAME = 'postgres:11-alpine'; +// The name to use for the Docker container which will run Postgres. +const DOCKER_CONTAINER_NAME = '0x_pipeline_postgres_test'; +// The port which will be exposed on the Docker container. +const POSTGRES_HOST_PORT = '15432'; +// Number of milliseconds to wait for postgres to finish initializing after +// starting the docker container. +const POSTGRES_SETUP_DELAY_MS = 5000; + +/** + * Sets up the database for testing purposes. If the + * ZEROEX_DATA_PIPELINE_TEST_DB_URL env var is specified, it will create a + * connection using that url. Otherwise it will spin up a new Docker container + * with a Postgres database and then create a connection to that database. + */ +export async function setUpDbAsync(): Promise<void> { + const connection = await createDbConnectionOnceAsync(); + await connection.runMigrations({ transaction: true }); +} + +/** + * Tears down the database used for testing. This completely destroys any data. + * If a docker container was created, it destroys that container too. + */ +export async function tearDownDbAsync(): Promise<void> { + const connection = await createDbConnectionOnceAsync(); + for (const _ of connection.migrations) { + await connection.undoLastMigration({ transaction: true }); + } + if (needsDocker()) { + const docker = initDockerOnce(); + const postgresContainer = docker.getContainer(DOCKER_CONTAINER_NAME); + await postgresContainer.kill(); + await postgresContainer.remove(); + } +} + +let savedConnection: Connection; + +/** + * The first time this is run, it creates and returns a new TypeORM connection. + * Each subsequent time, it returns the existing connection. This is helpful + * because only one TypeORM connection can be active at a time. + */ +export async function createDbConnectionOnceAsync(): Promise<Connection> { + if (savedConnection !== undefined) { + return savedConnection; + } + + if (needsDocker()) { + await initContainerAsync(); + } + const testDbUrl = + process.env.ZEROEX_DATA_PIPELINE_TEST_DB_URL || + `postgresql://postgres@localhost:${POSTGRES_HOST_PORT}/postgres`; + const testOrmConfig = R.merge(ormConfig, { url: testDbUrl }) as ConnectionOptions; + + savedConnection = await createConnection(testOrmConfig); + return savedConnection; +} + +async function sleepAsync(ms: number): Promise<{}> { + return new Promise<{}>(resolve => setTimeout(resolve, ms)); +} + +let savedDocker: Docker; + +function initDockerOnce(): Docker { + if (savedDocker !== undefined) { + return savedDocker; + } + + // Note(albrow): Code for determining the right socket path is partially + // based on https://github.com/apocas/dockerode/blob/8f3aa85311fab64d58eca08fef49aa1da5b5f60b/test/spec_helper.js + const isWin = require('os').type() === 'Windows_NT'; + const socketPath = process.env.DOCKER_SOCKET || (isWin ? '//./pipe/docker_engine' : '/var/run/docker.sock'); + const isSocket = fs.existsSync(socketPath) ? fs.statSync(socketPath).isSocket() : false; + if (!isSocket) { + throw new Error(`Failed to connect to Docker using socket path: "${socketPath}". + +The database integration tests need to be able to connect to a Postgres database. Make sure that Docker is running and accessible at the expected socket path. If Docker isn't working you have two options: + + 1) Set the DOCKER_SOCKET environment variable to a socket path that can be used to connect to Docker or + 2) Set the ZEROEX_DATA_PIPELINE_TEST_DB_URL environment variable to connect directly to an existing Postgres database instead of trying to start Postgres via Docker +`); + } + savedDocker = new Docker({ + socketPath, + }); + return savedDocker; +} + +// Creates the container, waits for it to initialize, and returns it. +async function initContainerAsync(): Promise<Docker.Container> { + const docker = initDockerOnce(); + + // Tear down any existing containers with the same name. + await tearDownExistingContainerIfAnyAsync(); + + // Pull the image we need. + await pullImageAsync(docker, DOCKER_IMAGE_NAME); + + // Create the container. + const postgresContainer = await docker.createContainer({ + name: DOCKER_CONTAINER_NAME, + Image: DOCKER_IMAGE_NAME, + ExposedPorts: { + '5432': {}, + }, + HostConfig: { + PortBindings: { + '5432': [ + { + HostPort: POSTGRES_HOST_PORT, + }, + ], + }, + }, + }); + await postgresContainer.start(); + await sleepAsync(POSTGRES_SETUP_DELAY_MS); + return postgresContainer; +} + +async function tearDownExistingContainerIfAnyAsync(): Promise<void> { + const docker = initDockerOnce(); + + // Check if a container with the desired name already exists. If so, this + // probably means we didn't clean up properly on the last test run. + const existingContainer = docker.getContainer(DOCKER_CONTAINER_NAME); + if (existingContainer != null) { + try { + await existingContainer.kill(); + } catch { + // If this fails, it's fine. The container was probably already + // killed. + } + try { + await existingContainer.remove(); + } catch { + // If this fails, it's fine. The container was probably already + // removed. + } + } +} + +function needsDocker(): boolean { + return process.env.ZEROEX_DATA_PIPELINE_TEST_DB_URL === undefined; +} + +// Note(albrow): This is partially based on +// https://stackoverflow.com/questions/38258263/how-do-i-wait-for-a-pull +async function pullImageAsync(docker: Docker, imageName: string): Promise<void> { + return new Promise<void>((resolve, reject) => { + docker.pull(imageName, {}, (err, stream) => { + if (err != null) { + reject(err); + return; + } + docker.modem.followProgress(stream, () => { + resolve(); + }); + }); + }); +} diff --git a/packages/pipeline/test/entities/block_test.ts b/packages/pipeline/test/entities/block_test.ts new file mode 100644 index 000000000..503f284f0 --- /dev/null +++ b/packages/pipeline/test/entities/block_test.ts @@ -0,0 +1,23 @@ +import 'mocha'; +import 'reflect-metadata'; + +import { Block } from '../../src/entities'; +import { createDbConnectionOnceAsync } from '../db_setup'; +import { chaiSetup } from '../utils/chai_setup'; + +import { testSaveAndFindEntityAsync } from './util'; + +chaiSetup.configure(); + +// tslint:disable:custom-no-magic-numbers +describe('Block entity', () => { + it('save/find', async () => { + const connection = await createDbConnectionOnceAsync(); + const block = new Block(); + block.hash = '0x12345'; + block.number = 1234567; + block.timestamp = 5432154321; + const blocksRepository = connection.getRepository(Block); + await testSaveAndFindEntityAsync(blocksRepository, block); + }); +}); diff --git a/packages/pipeline/test/entities/dex_trades_test.ts b/packages/pipeline/test/entities/dex_trades_test.ts new file mode 100644 index 000000000..83aaeec8f --- /dev/null +++ b/packages/pipeline/test/entities/dex_trades_test.ts @@ -0,0 +1,60 @@ +import { BigNumber } from '@0x/utils'; +import 'mocha'; +import * as R from 'ramda'; +import 'reflect-metadata'; + +import { DexTrade } from '../../src/entities'; +import { createDbConnectionOnceAsync } from '../db_setup'; +import { chaiSetup } from '../utils/chai_setup'; + +import { testSaveAndFindEntityAsync } from './util'; + +chaiSetup.configure(); + +const baseTrade = { + sourceUrl: 'https://bloxy.info/api/dex/trades', + txTimestamp: 1543447585938, + txDate: '2018-11-21', + txSender: '0x00923b9a074762b93650716333b3e1473a15048e', + smartContractId: 7091917, + smartContractAddress: '0x818e6fecd516ecc3849daf6845e3ec868087b755', + contractType: 'DEX/Kyber Network Proxy', + maker: '0xbf2179859fc6d5bee9bf9158632dc51678a4100c', + taker: '0xbf2179859fc6d5bee9bf9158632dc51678a4100d', + amountBuy: new BigNumber('1.011943163078103'), + makerFeeAmount: new BigNumber(0), + buyCurrencyId: 1, + buySymbol: 'ETH', + amountSell: new BigNumber('941.4997928436911'), + takerFeeAmount: new BigNumber(0), + sellCurrencyId: 16610, + sellSymbol: 'ELF', + makerAnnotation: '', + takerAnnotation: '', + protocol: 'Kyber Network Proxy', + sellAddress: '0xbf2179859fc6d5bee9bf9158632dc51678a4100e', +}; + +const tradeWithNullAddresses: DexTrade = R.merge(baseTrade, { + txHash: '0xb93a7faf92efbbb5405c9a73cd4efd99702fe27c03ff22baee1f1b1e37b3a0bf', + buyAddress: '0xbf2179859fc6d5bee9bf9158632dc51678a4100e', + sellAddress: '0xbf2179859fc6d5bee9bf9158632dc51678a4100f', +}); + +const tradeWithNonNullAddresses: DexTrade = R.merge(baseTrade, { + txHash: '0xb93a7faf92efbbb5405c9a73cd4efd99702fe27c03ff22baee1f1b1e37b3a0be', + buyAddress: null, + sellAddress: null, +}); + +// tslint:disable:custom-no-magic-numbers +describe('DexTrade entity', () => { + it('save/find', async () => { + const connection = await createDbConnectionOnceAsync(); + const trades = [tradeWithNullAddresses, tradeWithNonNullAddresses]; + const tradesRepository = connection.getRepository(DexTrade); + for (const trade of trades) { + await testSaveAndFindEntityAsync(tradesRepository, trade); + } + }); +}); diff --git a/packages/pipeline/test/entities/exchange_cancel_event_test.ts b/packages/pipeline/test/entities/exchange_cancel_event_test.ts new file mode 100644 index 000000000..f3b306d69 --- /dev/null +++ b/packages/pipeline/test/entities/exchange_cancel_event_test.ts @@ -0,0 +1,57 @@ +import 'mocha'; +import * as R from 'ramda'; +import 'reflect-metadata'; + +import { ExchangeCancelEvent } from '../../src/entities'; +import { AssetType } from '../../src/types'; +import { createDbConnectionOnceAsync } from '../db_setup'; +import { chaiSetup } from '../utils/chai_setup'; + +import { testSaveAndFindEntityAsync } from './util'; + +chaiSetup.configure(); + +const baseCancelEvent = { + contractAddress: '0x4f833a24e1f95d70f028921e27040ca56e09ab0b', + logIndex: 1234, + blockNumber: 6276262, + rawData: '0x000000000000000000000000f6da68519f78b0d0bc93c701e86affcb75c92428', + transactionHash: '0x6dd106d002873746072fc5e496dd0fb2541b68c77bcf9184ae19a42fd33657fe', + makerAddress: '0xf6da68519f78b0d0bc93c701e86affcb75c92428', + takerAddress: '0xf6da68519f78b0d0bc93c701e86affcb75c92428', + feeRecipientAddress: '0xc370d2a5920344aa6b7d8d11250e3e861434cbdd', + senderAddress: '0xf6da68519f78b0d0bc93c701e86affcb75c92428', + orderHash: '0xab12ed2cbaa5615ab690b9da75a46e53ddfcf3f1a68655b5fe0d94c75a1aac4a', + rawMakerAssetData: '0xf47261b0000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc2', + makerAssetProxyId: '0xf47261b0', + makerTokenAddress: '0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2', + rawTakerAssetData: '0xf47261b0000000000000000000000000e41d2489571d322189246dafa5ebde1f4699f498', + takerAssetProxyId: '0xf47261b0', + takerTokenAddress: '0xe41d2489571d322189246dafa5ebde1f4699f498', +}; + +const erc20CancelEvent = R.merge(baseCancelEvent, { + makerAssetType: 'erc20' as AssetType, + makerTokenId: null, + takerAssetType: 'erc20' as AssetType, + takerTokenId: null, +}); + +const erc721CancelEvent = R.merge(baseCancelEvent, { + makerAssetType: 'erc721' as AssetType, + makerTokenId: '19378573', + takerAssetType: 'erc721' as AssetType, + takerTokenId: '63885673888', +}); + +// tslint:disable:custom-no-magic-numbers +describe('ExchangeCancelEvent entity', () => { + it('save/find', async () => { + const connection = await createDbConnectionOnceAsync(); + const events = [erc20CancelEvent, erc721CancelEvent]; + const cancelEventRepository = connection.getRepository(ExchangeCancelEvent); + for (const event of events) { + await testSaveAndFindEntityAsync(cancelEventRepository, event); + } + }); +}); diff --git a/packages/pipeline/test/entities/exchange_cancel_up_to_event_test.ts b/packages/pipeline/test/entities/exchange_cancel_up_to_event_test.ts new file mode 100644 index 000000000..aa34f8c1c --- /dev/null +++ b/packages/pipeline/test/entities/exchange_cancel_up_to_event_test.ts @@ -0,0 +1,29 @@ +import { BigNumber } from '@0x/utils'; +import 'mocha'; +import 'reflect-metadata'; + +import { ExchangeCancelUpToEvent } from '../../src/entities'; +import { createDbConnectionOnceAsync } from '../db_setup'; +import { chaiSetup } from '../utils/chai_setup'; + +import { testSaveAndFindEntityAsync } from './util'; + +chaiSetup.configure(); + +// tslint:disable:custom-no-magic-numbers +describe('ExchangeCancelUpToEvent entity', () => { + it('save/find', async () => { + const connection = await createDbConnectionOnceAsync(); + const cancelUpToEventRepository = connection.getRepository(ExchangeCancelUpToEvent); + const cancelUpToEvent = new ExchangeCancelUpToEvent(); + cancelUpToEvent.blockNumber = 6276262; + cancelUpToEvent.contractAddress = '0x4f833a24e1f95d70f028921e27040ca56e09ab0b'; + cancelUpToEvent.logIndex = 42; + cancelUpToEvent.makerAddress = '0xf6da68519f78b0d0bc93c701e86affcb75c92428'; + cancelUpToEvent.orderEpoch = new BigNumber('123456789123456789'); + cancelUpToEvent.rawData = '0x000000000000000000000000f6da68519f78b0d0bc93c701e86affcb75c92428'; + cancelUpToEvent.senderAddress = '0xf6da68519f78b0d0bc93c701e86affcb75c92428'; + cancelUpToEvent.transactionHash = '0x6dd106d002873746072fc5e496dd0fb2541b68c77bcf9184ae19a42fd33657fe'; + await testSaveAndFindEntityAsync(cancelUpToEventRepository, cancelUpToEvent); + }); +}); diff --git a/packages/pipeline/test/entities/exchange_fill_event_test.ts b/packages/pipeline/test/entities/exchange_fill_event_test.ts new file mode 100644 index 000000000..b2cb8c5e0 --- /dev/null +++ b/packages/pipeline/test/entities/exchange_fill_event_test.ts @@ -0,0 +1,62 @@ +import { BigNumber } from '@0x/utils'; +import 'mocha'; +import * as R from 'ramda'; +import 'reflect-metadata'; + +import { ExchangeFillEvent } from '../../src/entities'; +import { AssetType } from '../../src/types'; +import { createDbConnectionOnceAsync } from '../db_setup'; +import { chaiSetup } from '../utils/chai_setup'; + +import { testSaveAndFindEntityAsync } from './util'; + +chaiSetup.configure(); + +const baseFillEvent = { + contractAddress: '0x4f833a24e1f95d70f028921e27040ca56e09ab0b', + blockNumber: 6276262, + logIndex: 102, + rawData: '0x000000000000000000000000f6da68519f78b0d0bc93c701e86affcb75c92428', + transactionHash: '0x6dd106d002873746072fc5e496dd0fb2541b68c77bcf9184ae19a42fd33657fe', + makerAddress: '0xf6da68519f78b0d0bc93c701e86affcb75c92428', + takerAddress: '0xf6da68519f78b0d0bc93c701e86affcb75c92428', + feeRecipientAddress: '0xc370d2a5920344aa6b7d8d11250e3e861434cbdd', + senderAddress: '0xf6da68519f78b0d0bc93c701e86affcb75c92428', + makerAssetFilledAmount: new BigNumber('10000000000000000'), + takerAssetFilledAmount: new BigNumber('100000000000000000'), + makerFeePaid: new BigNumber('0'), + takerFeePaid: new BigNumber('12345'), + orderHash: '0xab12ed2cbaa5615ab690b9da75a46e53ddfcf3f1a68655b5fe0d94c75a1aac4a', + rawMakerAssetData: '0xf47261b0000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc2', + makerAssetProxyId: '0xf47261b0', + makerTokenAddress: '0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2', + rawTakerAssetData: '0xf47261b0000000000000000000000000e41d2489571d322189246dafa5ebde1f4699f498', + takerAssetProxyId: '0xf47261b0', + takerTokenAddress: '0xe41d2489571d322189246dafa5ebde1f4699f498', +}; + +const erc20FillEvent = R.merge(baseFillEvent, { + makerAssetType: 'erc20' as AssetType, + makerTokenId: null, + takerAssetType: 'erc20' as AssetType, + takerTokenId: null, +}); + +const erc721FillEvent = R.merge(baseFillEvent, { + makerAssetType: 'erc721' as AssetType, + makerTokenId: '19378573', + takerAssetType: 'erc721' as AssetType, + takerTokenId: '63885673888', +}); + +// tslint:disable:custom-no-magic-numbers +describe('ExchangeFillEvent entity', () => { + it('save/find', async () => { + const connection = await createDbConnectionOnceAsync(); + const events = [erc20FillEvent, erc721FillEvent]; + const fillEventsRepository = connection.getRepository(ExchangeFillEvent); + for (const event of events) { + await testSaveAndFindEntityAsync(fillEventsRepository, event); + } + }); +}); diff --git a/packages/pipeline/test/entities/ohlcv_external_test.ts b/packages/pipeline/test/entities/ohlcv_external_test.ts new file mode 100644 index 000000000..8b995db50 --- /dev/null +++ b/packages/pipeline/test/entities/ohlcv_external_test.ts @@ -0,0 +1,35 @@ +import 'mocha'; +import 'reflect-metadata'; + +import { OHLCVExternal } from '../../src/entities'; +import { createDbConnectionOnceAsync } from '../db_setup'; +import { chaiSetup } from '../utils/chai_setup'; + +import { testSaveAndFindEntityAsync } from './util'; + +chaiSetup.configure(); + +const ohlcvExternal: OHLCVExternal = { + exchange: 'CCCAGG', + fromSymbol: 'ETH', + toSymbol: 'ZRX', + startTime: 1543352400000, + endTime: 1543356000000, + open: 307.41, + close: 310.08, + low: 304.6, + high: 310.27, + volumeFrom: 904.6, + volumeTo: 278238.5, + source: 'Crypto Compare', + observedTimestamp: 1543442338074, +}; + +// tslint:disable:custom-no-magic-numbers +describe('OHLCVExternal entity', () => { + it('save/find', async () => { + const connection = await createDbConnectionOnceAsync(); + const repository = connection.getRepository(OHLCVExternal); + await testSaveAndFindEntityAsync(repository, ohlcvExternal); + }); +}); diff --git a/packages/pipeline/test/entities/relayer_test.ts b/packages/pipeline/test/entities/relayer_test.ts new file mode 100644 index 000000000..760ffb6f9 --- /dev/null +++ b/packages/pipeline/test/entities/relayer_test.ts @@ -0,0 +1,55 @@ +import 'mocha'; +import * as R from 'ramda'; +import 'reflect-metadata'; + +import { Relayer } from '../../src/entities'; +import { createDbConnectionOnceAsync } from '../db_setup'; +import { chaiSetup } from '../utils/chai_setup'; + +import { testSaveAndFindEntityAsync } from './util'; + +chaiSetup.configure(); + +const baseRelayer = { + uuid: 'e8d27d8d-ddf6-48b1-9663-60b0a3ddc716', + name: 'Radar Relay', + homepageUrl: 'https://radarrelay.com', + appUrl: null, + sraHttpEndpoint: null, + sraWsEndpoint: null, + feeRecipientAddresses: [], + takerAddresses: [], +}; + +const relayerWithUrls = R.merge(baseRelayer, { + uuid: 'e8d27d8d-ddf6-48b1-9663-60b0a3ddc717', + appUrl: 'https://app.radarrelay.com', + sraHttpEndpoint: 'https://api.radarrelay.com/0x/v2/', + sraWsEndpoint: 'wss://ws.radarrelay.com/0x/v2', +}); + +const relayerWithAddresses = R.merge(baseRelayer, { + uuid: 'e8d27d8d-ddf6-48b1-9663-60b0a3ddc718', + feeRecipientAddresses: [ + '0xa258b39954cef5cb142fd567a46cddb31a670124', + '0xa258b39954cef5cb142fd567a46cddb31a670125', + '0xa258b39954cef5cb142fd567a46cddb31a670126', + ], + takerAddresses: [ + '0xa258b39954cef5cb142fd567a46cddb31a670127', + '0xa258b39954cef5cb142fd567a46cddb31a670128', + '0xa258b39954cef5cb142fd567a46cddb31a670129', + ], +}); + +// tslint:disable:custom-no-magic-numbers +describe('Relayer entity', () => { + it('save/find', async () => { + const connection = await createDbConnectionOnceAsync(); + const relayers = [baseRelayer, relayerWithUrls, relayerWithAddresses]; + const relayerRepository = connection.getRepository(Relayer); + for (const relayer of relayers) { + await testSaveAndFindEntityAsync(relayerRepository, relayer); + } + }); +}); diff --git a/packages/pipeline/test/entities/sra_order_test.ts b/packages/pipeline/test/entities/sra_order_test.ts new file mode 100644 index 000000000..c43de8ce8 --- /dev/null +++ b/packages/pipeline/test/entities/sra_order_test.ts @@ -0,0 +1,84 @@ +import { BigNumber } from '@0x/utils'; +import 'mocha'; +import * as R from 'ramda'; +import 'reflect-metadata'; +import { Repository } from 'typeorm'; + +import { SraOrder, SraOrdersObservedTimeStamp } from '../../src/entities'; +import { AssetType } from '../../src/types'; +import { createDbConnectionOnceAsync } from '../db_setup'; +import { chaiSetup } from '../utils/chai_setup'; + +import { testSaveAndFindEntityAsync } from './util'; + +chaiSetup.configure(); + +const baseOrder = { + sourceUrl: 'https://api.radarrelay.com/0x/v2', + exchangeAddress: '0x4f833a24e1f95d70f028921e27040ca56e09ab0b', + makerAddress: '0xb45df06e38540a675fdb5b598abf2c0dbe9d6b81', + takerAddress: '0x0000000000000000000000000000000000000000', + feeRecipientAddress: '0xa258b39954cef5cb142fd567a46cddb31a670124', + senderAddress: '0x0000000000000000000000000000000000000000', + makerAssetAmount: new BigNumber('1619310371000000000'), + takerAssetAmount: new BigNumber('8178335207070707070707'), + makerFee: new BigNumber('100'), + takerFee: new BigNumber('200'), + expirationTimeSeconds: new BigNumber('1538529488'), + salt: new BigNumber('1537924688891'), + signature: '0x1b5a5d672b0d647b5797387ccbb89d8', + rawMakerAssetData: '0xf47261b0000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc2', + makerAssetProxyId: '0xf47261b0', + makerTokenAddress: '0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2', + rawTakerAssetData: '0xf47261b000000000000000000000000042d6622dece394b54999fbd73d108123806f6a18', + takerAssetProxyId: '0xf47261b0', + takerTokenAddress: '0x42d6622dece394b54999fbd73d108123806f6a18', + metadataJson: '{"isThisArbitraryData":true,"powerLevel":9001}', +}; + +const erc20Order = R.merge(baseOrder, { + orderHashHex: '0x1bdbeb0d088a33da28b9ee6d94e8771452f90f4a69107da2fa75195d61b9a1c9', + makerAssetType: 'erc20' as AssetType, + makerTokenId: null, + takerAssetType: 'erc20' as AssetType, + takerTokenId: null, +}); + +const erc721Order = R.merge(baseOrder, { + orderHashHex: '0x1bdbeb0d088a33da28b9ee6d94e8771452f90f4a69107da2fa75195d61b9a1d0', + makerAssetType: 'erc721' as AssetType, + makerTokenId: '19378573', + takerAssetType: 'erc721' as AssetType, + takerTokenId: '63885673888', +}); + +// tslint:disable:custom-no-magic-numbers +describe('SraOrder and SraOrdersObservedTimeStamp entities', () => { + // Note(albrow): SraOrder and SraOrdersObservedTimeStamp are tightly coupled + // and timestamps have a foreign key constraint such that they have to point + // to an existing SraOrder. For these reasons, we are testing them together + // in the same test. + it('save/find', async () => { + const connection = await createDbConnectionOnceAsync(); + const orderRepository = connection.getRepository(SraOrder); + const timestampRepository = connection.getRepository(SraOrdersObservedTimeStamp); + const orders = [erc20Order, erc721Order]; + for (const order of orders) { + await testOrderWithTimestampAsync(orderRepository, timestampRepository, order); + } + }); +}); + +async function testOrderWithTimestampAsync( + orderRepository: Repository<SraOrder>, + timestampRepository: Repository<SraOrdersObservedTimeStamp>, + order: SraOrder, +): Promise<void> { + await testSaveAndFindEntityAsync(orderRepository, order); + const timestamp = new SraOrdersObservedTimeStamp(); + timestamp.exchangeAddress = order.exchangeAddress; + timestamp.orderHashHex = order.orderHashHex; + timestamp.sourceUrl = order.sourceUrl; + timestamp.observedTimestamp = 1543377376153; + await testSaveAndFindEntityAsync(timestampRepository, timestamp); +} diff --git a/packages/pipeline/test/entities/token_metadata_test.ts b/packages/pipeline/test/entities/token_metadata_test.ts new file mode 100644 index 000000000..48e656644 --- /dev/null +++ b/packages/pipeline/test/entities/token_metadata_test.ts @@ -0,0 +1,39 @@ +import { BigNumber } from '@0x/utils'; +import 'mocha'; +import 'reflect-metadata'; + +import { TokenMetadata } from '../../src/entities'; +import { createDbConnectionOnceAsync } from '../db_setup'; +import { chaiSetup } from '../utils/chai_setup'; + +import { testSaveAndFindEntityAsync } from './util'; + +chaiSetup.configure(); + +const metadataWithoutNullFields: TokenMetadata = { + address: '0xe41d2489571d322189246dafa5ebde1f4699f498', + authority: 'https://website-api.0xproject.com/tokens', + decimals: new BigNumber(18), + symbol: 'ZRX', + name: '0x', +}; + +const metadataWithNullFields: TokenMetadata = { + address: '0xe41d2489571d322189246dafa5ebde1f4699f499', + authority: 'https://website-api.0xproject.com/tokens', + decimals: null, + symbol: null, + name: null, +}; + +// tslint:disable:custom-no-magic-numbers +describe('TokenMetadata entity', () => { + it('save/find', async () => { + const connection = await createDbConnectionOnceAsync(); + const tokenMetadata = [metadataWithoutNullFields, metadataWithNullFields]; + const tokenMetadataRepository = connection.getRepository(TokenMetadata); + for (const tokenMetadatum of tokenMetadata) { + await testSaveAndFindEntityAsync(tokenMetadataRepository, tokenMetadatum); + } + }); +}); diff --git a/packages/pipeline/test/entities/token_order_test.ts b/packages/pipeline/test/entities/token_order_test.ts new file mode 100644 index 000000000..c6057f5aa --- /dev/null +++ b/packages/pipeline/test/entities/token_order_test.ts @@ -0,0 +1,31 @@ +import { BigNumber } from '@0x/utils'; +import 'mocha'; + +import { TokenOrderbookSnapshot } from '../../src/entities'; +import { createDbConnectionOnceAsync } from '../db_setup'; +import { chaiSetup } from '../utils/chai_setup'; + +import { testSaveAndFindEntityAsync } from './util'; + +chaiSetup.configure(); + +const tokenOrderbookSnapshot: TokenOrderbookSnapshot = { + source: 'ddextest', + observedTimestamp: Date.now(), + orderType: 'bid', + price: new BigNumber(10.1), + baseAssetSymbol: 'ETH', + baseAssetAddress: '0x818e6fecd516ecc3849daf6845e3ec868087b755', + baseVolume: new BigNumber(143), + quoteAssetSymbol: 'ABC', + quoteAssetAddress: '0x00923b9a074762b93650716333b3e1473a15048e', + quoteVolume: new BigNumber(12.3234234), +}; + +describe('TokenOrderbookSnapshot entity', () => { + it('save/find', async () => { + const connection = await createDbConnectionOnceAsync(); + const tokenOrderbookSnapshotRepository = connection.getRepository(TokenOrderbookSnapshot); + await testSaveAndFindEntityAsync(tokenOrderbookSnapshotRepository, tokenOrderbookSnapshot); + }); +}); diff --git a/packages/pipeline/test/entities/transaction_test.ts b/packages/pipeline/test/entities/transaction_test.ts new file mode 100644 index 000000000..634844544 --- /dev/null +++ b/packages/pipeline/test/entities/transaction_test.ts @@ -0,0 +1,26 @@ +import { BigNumber } from '@0x/utils'; +import 'mocha'; +import 'reflect-metadata'; + +import { Transaction } from '../../src/entities'; +import { createDbConnectionOnceAsync } from '../db_setup'; +import { chaiSetup } from '../utils/chai_setup'; + +import { testSaveAndFindEntityAsync } from './util'; + +chaiSetup.configure(); + +// tslint:disable:custom-no-magic-numbers +describe('Transaction entity', () => { + it('save/find', async () => { + const connection = await createDbConnectionOnceAsync(); + const transactionRepository = connection.getRepository(Transaction); + const transaction = new Transaction(); + transaction.blockHash = '0x6ff106d00b6c3746072fc06bae140fb2549036ba7bcf9184ae19a42fd33657fd'; + transaction.blockNumber = 6276262; + transaction.gasPrice = new BigNumber(3000000); + transaction.gasUsed = new BigNumber(125000); + transaction.transactionHash = '0x6dd106d002873746072fc5e496dd0fb2541b68c77bcf9184ae19a42fd33657fe'; + await testSaveAndFindEntityAsync(transactionRepository, transaction); + }); +}); diff --git a/packages/pipeline/test/entities/util.ts b/packages/pipeline/test/entities/util.ts new file mode 100644 index 000000000..043a3b15d --- /dev/null +++ b/packages/pipeline/test/entities/util.ts @@ -0,0 +1,25 @@ +import * as chai from 'chai'; +import 'mocha'; + +import { Repository } from 'typeorm'; + +const expect = chai.expect; + +/** + * First saves the given entity to the database, then finds it and makes sure + * that the found entity is exactly equal to the original one. This is a bare + * minimum basic test to make sure that the entity type definition and our + * database schema are aligned and that it is possible to save and find the + * entity. + * @param repository A TypeORM repository corresponding with the type of the entity. + * @param entity An instance of a TypeORM entity which will be saved/retrieved from the database. + */ +export async function testSaveAndFindEntityAsync<T>(repository: Repository<T>, entity: T): Promise<void> { + // Note(albrow): We are forced to use an 'as any' hack here because + // TypeScript complains about stack depth when checking the types. + await repository.save(entity as any); + const gotEntity = await repository.findOneOrFail({ + where: entity, + }); + expect(gotEntity).deep.equal(entity); +} diff --git a/packages/pipeline/test/parsers/bloxy/index_test.ts b/packages/pipeline/test/parsers/bloxy/index_test.ts new file mode 100644 index 000000000..2b8d68f98 --- /dev/null +++ b/packages/pipeline/test/parsers/bloxy/index_test.ts @@ -0,0 +1,99 @@ +// tslint:disable:custom-no-magic-numbers +import { BigNumber } from '@0x/utils'; +import * as chai from 'chai'; +import 'mocha'; +import * as R from 'ramda'; + +import { BLOXY_DEX_TRADES_URL, BloxyTrade } from '../../../src/data_sources/bloxy'; +import { DexTrade } from '../../../src/entities'; +import { _parseBloxyTrade } from '../../../src/parsers/bloxy'; +import { _convertToExchangeFillEvent } from '../../../src/parsers/events'; +import { chaiSetup } from '../../utils/chai_setup'; + +chaiSetup.configure(); +const expect = chai.expect; + +const baseInput: BloxyTrade = { + tx_hash: '0xb93a7faf92efbbb5405c9a73cd4efd99702fe27c03ff22baee1f1b1e37b3a0bf', + tx_time: '2018-11-21T09:06:28.000+00:00', + tx_date: '2018-11-21', + tx_sender: '0x00923b9a074762b93650716333b3e1473a15048e', + smart_contract_id: 7091917, + smart_contract_address: '0x818e6fecd516ecc3849daf6845e3ec868087b755', + contract_type: 'DEX/Kyber Network Proxy', + maker: '0x0000000000000000000000000000000000000001', + taker: '0x0000000000000000000000000000000000000002', + amountBuy: 1.011943163078103, + makerFee: 38.912083, + buyCurrencyId: 1, + buySymbol: 'ETH', + amountSell: 941.4997928436911, + takerFee: 100.39, + sellCurrencyId: 16610, + sellSymbol: 'ELF', + maker_annotation: 'random annotation', + taker_annotation: 'random other annotation', + protocol: 'Kyber Network Proxy', + buyAddress: '0xbf2179859fc6d5bee9bf9158632dc51678a4100d', + sellAddress: '0xbf2179859fc6d5bee9bf9158632dc51678a4100e', +}; + +const baseExpected: DexTrade = { + sourceUrl: BLOXY_DEX_TRADES_URL, + txHash: '0xb93a7faf92efbbb5405c9a73cd4efd99702fe27c03ff22baee1f1b1e37b3a0bf', + txTimestamp: 1542791188000, + txDate: '2018-11-21', + txSender: '0x00923b9a074762b93650716333b3e1473a15048e', + smartContractId: 7091917, + smartContractAddress: '0x818e6fecd516ecc3849daf6845e3ec868087b755', + contractType: 'DEX/Kyber Network Proxy', + maker: '0x0000000000000000000000000000000000000001', + taker: '0x0000000000000000000000000000000000000002', + amountBuy: new BigNumber('1.011943163078103'), + makerFeeAmount: new BigNumber('38.912083'), + buyCurrencyId: 1, + buySymbol: 'ETH', + amountSell: new BigNumber('941.4997928436911'), + takerFeeAmount: new BigNumber('100.39'), + sellCurrencyId: 16610, + sellSymbol: 'ELF', + makerAnnotation: 'random annotation', + takerAnnotation: 'random other annotation', + protocol: 'Kyber Network Proxy', + buyAddress: '0xbf2179859fc6d5bee9bf9158632dc51678a4100d', + sellAddress: '0xbf2179859fc6d5bee9bf9158632dc51678a4100e', +}; + +interface TestCase { + input: BloxyTrade; + expected: DexTrade; +} + +const testCases: TestCase[] = [ + { + input: baseInput, + expected: baseExpected, + }, + { + input: R.merge(baseInput, { buyAddress: null, sellAddress: null }), + expected: R.merge(baseExpected, { buyAddress: null, sellAddress: null }), + }, + { + input: R.merge(baseInput, { + buySymbol: + 'RING\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000', + }), + expected: R.merge(baseExpected, { buySymbol: 'RING' }), + }, +]; + +describe('bloxy', () => { + describe('_parseBloxyTrade', () => { + for (const [i, testCase] of testCases.entries()) { + it(`converts BloxyTrade to DexTrade entity (${i + 1}/${testCases.length})`, () => { + const actual = _parseBloxyTrade(testCase.input); + expect(actual).deep.equal(testCase.expected); + }); + } + }); +}); diff --git a/packages/pipeline/test/parsers/ddex_orders/index_test.ts b/packages/pipeline/test/parsers/ddex_orders/index_test.ts new file mode 100644 index 000000000..213100f44 --- /dev/null +++ b/packages/pipeline/test/parsers/ddex_orders/index_test.ts @@ -0,0 +1,66 @@ +import { BigNumber } from '@0x/utils'; +import * as chai from 'chai'; +import 'mocha'; + +import { DdexMarket } from '../../../src/data_sources/ddex'; +import { TokenOrderbookSnapshot as TokenOrder } from '../../../src/entities'; +import { aggregateOrders, parseDdexOrder } from '../../../src/parsers/ddex_orders'; +import { OrderType } from '../../../src/types'; +import { chaiSetup } from '../../utils/chai_setup'; + +chaiSetup.configure(); +const expect = chai.expect; + +// tslint:disable:custom-no-magic-numbers +describe('ddex_orders', () => { + describe('aggregateOrders', () => { + it('aggregates orders by price point', () => { + const input = [ + { price: '1', amount: '20', orderId: 'testtest' }, + { price: '1', amount: '30', orderId: 'testone' }, + { price: '2', amount: '100', orderId: 'testtwo' }, + ]; + const expected = [['1', new BigNumber(50)], ['2', new BigNumber(100)]]; + const actual = aggregateOrders(input); + expect(actual).deep.equal(expected); + }); + }); + + describe('parseDdexOrder', () => { + it('converts ddexOrder to TokenOrder entity', () => { + const ddexOrder: [string, BigNumber] = ['0.5', new BigNumber(10)]; + const ddexMarket: DdexMarket = { + id: 'ABC-DEF', + quoteToken: 'ABC', + quoteTokenDecimals: 5, + quoteTokenAddress: '0x0000000000000000000000000000000000000000', + baseToken: 'DEF', + baseTokenDecimals: 2, + baseTokenAddress: '0xb45df06e38540a675fdb5b598abf2c0dbe9d6b81', + minOrderSize: '0.1', + maxOrderSize: '1000', + pricePrecision: 1, + priceDecimals: 1, + amountDecimals: 0, + }; + const observedTimestamp: number = Date.now(); + const orderType: OrderType = 'bid'; + const source: string = 'ddex'; + + const expected = new TokenOrder(); + expected.source = 'ddex'; + expected.observedTimestamp = observedTimestamp; + expected.orderType = 'bid'; + expected.price = new BigNumber(0.5); + expected.baseAssetSymbol = 'DEF'; + expected.baseAssetAddress = '0xb45df06e38540a675fdb5b598abf2c0dbe9d6b81'; + expected.baseVolume = new BigNumber(5); + expected.quoteAssetSymbol = 'ABC'; + expected.quoteAssetAddress = '0x0000000000000000000000000000000000000000'; + expected.quoteVolume = new BigNumber(10); + + const actual = parseDdexOrder(ddexMarket, observedTimestamp, orderType, source, ddexOrder); + expect(actual).deep.equal(expected); + }); + }); +}); diff --git a/packages/pipeline/test/parsers/events/index_test.ts b/packages/pipeline/test/parsers/events/index_test.ts new file mode 100644 index 000000000..7e439ce39 --- /dev/null +++ b/packages/pipeline/test/parsers/events/index_test.ts @@ -0,0 +1,78 @@ +import { ExchangeFillEventArgs } from '@0x/contract-wrappers'; +import { BigNumber } from '@0x/utils'; +import * as chai from 'chai'; +import { LogWithDecodedArgs } from 'ethereum-types'; +import 'mocha'; + +import { ExchangeFillEvent } from '../../../src/entities'; +import { _convertToExchangeFillEvent } from '../../../src/parsers/events'; +import { chaiSetup } from '../../utils/chai_setup'; + +chaiSetup.configure(); +const expect = chai.expect; + +// tslint:disable:custom-no-magic-numbers +describe('exchange_events', () => { + describe('_convertToExchangeFillEvent', () => { + it('converts LogWithDecodedArgs to ExchangeFillEvent entity', () => { + const input: LogWithDecodedArgs<ExchangeFillEventArgs> = { + logIndex: 102, + transactionIndex: 38, + transactionHash: '0x6dd106d002873746072fc5e496dd0fb2541b68c77bcf9184ae19a42fd33657fe', + blockHash: '', + blockNumber: 6276262, + address: '0x4f833a24e1f95d70f028921e27040ca56e09ab0b', + data: + '0x000000000000000000000000f6da68519f78b0d0bc93c701e86affcb75c92428000000000000000000000000f6da68519f78b0d0bc93c701e86affcb75c92428000000000000000000000000000000000000000000000000002386f26fc10000000000000000000000000000000000000000000000000000016345785d8a000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000001600000000000000000000000000000000000000000000000000000000000000024f47261b0000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc2000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000024f47261b0000000000000000000000000e41d2489571d322189246dafa5ebde1f4699f49800000000000000000000000000000000000000000000000000000000', + topics: [ + '0x0bcc4c97732e47d9946f229edb95f5b6323f601300e4690de719993f3c371129', + '0x000000000000000000000000f6da68519f78b0d0bc93c701e86affcb75c92428', + '0x000000000000000000000000c370d2a5920344aa6b7d8d11250e3e861434cbdd', + '0xab12ed2cbaa5615ab690b9da75a46e53ddfcf3f1a68655b5fe0d94c75a1aac4a', + ], + event: 'Fill', + args: { + makerAddress: '0xf6da68519f78b0d0bc93c701e86affcb75c92428', + feeRecipientAddress: '0xc370d2a5920344aa6b7d8d11250e3e861434cbdd', + takerAddress: '0xf6da68519f78b0d0bc93c701e86affcb75c92428', + senderAddress: '0xf6da68519f78b0d0bc93c701e86affcb75c92428', + makerAssetFilledAmount: new BigNumber('10000000000000000'), + takerAssetFilledAmount: new BigNumber('100000000000000000'), + makerFeePaid: new BigNumber('0'), + takerFeePaid: new BigNumber('12345'), + orderHash: '0xab12ed2cbaa5615ab690b9da75a46e53ddfcf3f1a68655b5fe0d94c75a1aac4a', + makerAssetData: '0xf47261b0000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc2', + takerAssetData: '0xf47261b0000000000000000000000000e41d2489571d322189246dafa5ebde1f4699f498', + }, + }; + const expected = new ExchangeFillEvent(); + expected.contractAddress = '0x4f833a24e1f95d70f028921e27040ca56e09ab0b'; + expected.blockNumber = 6276262; + expected.logIndex = 102; + expected.rawData = + '0x000000000000000000000000f6da68519f78b0d0bc93c701e86affcb75c92428000000000000000000000000f6da68519f78b0d0bc93c701e86affcb75c92428000000000000000000000000000000000000000000000000002386f26fc10000000000000000000000000000000000000000000000000000016345785d8a000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000001600000000000000000000000000000000000000000000000000000000000000024f47261b0000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc2000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000024f47261b0000000000000000000000000e41d2489571d322189246dafa5ebde1f4699f49800000000000000000000000000000000000000000000000000000000'; + expected.transactionHash = '0x6dd106d002873746072fc5e496dd0fb2541b68c77bcf9184ae19a42fd33657fe'; + expected.makerAddress = '0xf6da68519f78b0d0bc93c701e86affcb75c92428'; + expected.takerAddress = '0xf6da68519f78b0d0bc93c701e86affcb75c92428'; + expected.feeRecipientAddress = '0xc370d2a5920344aa6b7d8d11250e3e861434cbdd'; + expected.senderAddress = '0xf6da68519f78b0d0bc93c701e86affcb75c92428'; + expected.makerAssetFilledAmount = new BigNumber('10000000000000000'); + expected.takerAssetFilledAmount = new BigNumber('100000000000000000'); + expected.makerFeePaid = new BigNumber('0'); + expected.takerFeePaid = new BigNumber('12345'); + expected.orderHash = '0xab12ed2cbaa5615ab690b9da75a46e53ddfcf3f1a68655b5fe0d94c75a1aac4a'; + expected.rawMakerAssetData = '0xf47261b0000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc2'; + expected.makerAssetType = 'erc20'; + expected.makerAssetProxyId = '0xf47261b0'; + expected.makerTokenAddress = '0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2'; + expected.makerTokenId = null; + expected.rawTakerAssetData = '0xf47261b0000000000000000000000000e41d2489571d322189246dafa5ebde1f4699f498'; + expected.takerAssetType = 'erc20'; + expected.takerAssetProxyId = '0xf47261b0'; + expected.takerTokenAddress = '0xe41d2489571d322189246dafa5ebde1f4699f498'; + expected.takerTokenId = null; + const actual = _convertToExchangeFillEvent(input); + expect(actual).deep.equal(expected); + }); + }); +}); diff --git a/packages/pipeline/test/parsers/ohlcv_external/crypto_compare_test.ts b/packages/pipeline/test/parsers/ohlcv_external/crypto_compare_test.ts new file mode 100644 index 000000000..118cafc5e --- /dev/null +++ b/packages/pipeline/test/parsers/ohlcv_external/crypto_compare_test.ts @@ -0,0 +1,62 @@ +import * as chai from 'chai'; +import 'mocha'; +import * as R from 'ramda'; + +import { CryptoCompareOHLCVRecord } from '../../../src/data_sources/ohlcv_external/crypto_compare'; +import { OHLCVExternal } from '../../../src/entities'; +import { OHLCVMetadata, parseRecords } from '../../../src/parsers/ohlcv_external/crypto_compare'; +import { chaiSetup } from '../../utils/chai_setup'; + +chaiSetup.configure(); +const expect = chai.expect; + +// tslint:disable:custom-no-magic-numbers +describe('ohlcv_external parser (Crypto Compare)', () => { + describe('parseRecords', () => { + const record: CryptoCompareOHLCVRecord = { + time: 200, + close: 100, + high: 101, + low: 99, + open: 98, + volumefrom: 1234, + volumeto: 4321, + }; + + const metadata: OHLCVMetadata = { + fromSymbol: 'ETH', + toSymbol: 'ZRX', + exchange: 'CCCAGG', + source: 'CryptoCompare', + observedTimestamp: new Date().getTime(), + interval: 100000, + }; + + const entity = new OHLCVExternal(); + entity.exchange = metadata.exchange; + entity.fromSymbol = metadata.fromSymbol; + entity.toSymbol = metadata.toSymbol; + entity.startTime = 100000; + entity.endTime = 200000; + entity.open = record.open; + entity.close = record.close; + entity.low = record.low; + entity.high = record.high; + entity.volumeFrom = record.volumefrom; + entity.volumeTo = record.volumeto; + entity.source = metadata.source; + entity.observedTimestamp = metadata.observedTimestamp; + + it('converts Crypto Compare OHLCV records to OHLCVExternal entity', () => { + const input = [record, R.merge(record, { time: 300 }), R.merge(record, { time: 400 })]; + const expected = [ + entity, + R.merge(entity, { startTime: 200000, endTime: 300000 }), + R.merge(entity, { startTime: 300000, endTime: 400000 }), + ]; + + const actual = parseRecords(input, metadata); + expect(actual).deep.equal(expected); + }); + }); +}); diff --git a/packages/pipeline/test/parsers/paradex_orders/index_test.ts b/packages/pipeline/test/parsers/paradex_orders/index_test.ts new file mode 100644 index 000000000..1522806bf --- /dev/null +++ b/packages/pipeline/test/parsers/paradex_orders/index_test.ts @@ -0,0 +1,54 @@ +import { BigNumber } from '@0x/utils'; +import * as chai from 'chai'; +import 'mocha'; + +import { ParadexMarket, ParadexOrder } from '../../../src/data_sources/paradex'; +import { TokenOrderbookSnapshot as TokenOrder } from '../../../src/entities'; +import { parseParadexOrder } from '../../../src/parsers/paradex_orders'; +import { OrderType } from '../../../src/types'; +import { chaiSetup } from '../../utils/chai_setup'; + +chaiSetup.configure(); +const expect = chai.expect; + +// tslint:disable:custom-no-magic-numbers +describe('paradex_orders', () => { + describe('parseParadexOrder', () => { + it('converts ParadexOrder to TokenOrder entity', () => { + const paradexOrder: ParadexOrder = { + amount: '412', + price: '0.1245', + }; + const paradexMarket: ParadexMarket = { + id: '2', + symbol: 'ABC/DEF', + baseToken: 'DEF', + quoteToken: 'ABC', + minOrderSize: '0.1', + maxOrderSize: '1000', + priceMaxDecimals: 5, + amountMaxDecimals: 5, + baseTokenAddress: '0xb45df06e38540a675fdb5b598abf2c0dbe9d6b81', + quoteTokenAddress: '0x0000000000000000000000000000000000000000', + }; + const observedTimestamp: number = Date.now(); + const orderType: OrderType = 'bid'; + const source: string = 'paradex'; + + const expected = new TokenOrder(); + expected.source = 'paradex'; + expected.observedTimestamp = observedTimestamp; + expected.orderType = 'bid'; + expected.price = new BigNumber(0.1245); + expected.baseAssetSymbol = 'DEF'; + expected.baseAssetAddress = '0xb45df06e38540a675fdb5b598abf2c0dbe9d6b81'; + expected.baseVolume = new BigNumber(412 * 0.1245); + expected.quoteAssetSymbol = 'ABC'; + expected.quoteAssetAddress = '0x0000000000000000000000000000000000000000'; + expected.quoteVolume = new BigNumber(412); + + const actual = parseParadexOrder(paradexMarket, observedTimestamp, orderType, source, paradexOrder); + expect(actual).deep.equal(expected); + }); + }); +}); diff --git a/packages/pipeline/test/parsers/sra_orders/index_test.ts b/packages/pipeline/test/parsers/sra_orders/index_test.ts new file mode 100644 index 000000000..ee2842ef3 --- /dev/null +++ b/packages/pipeline/test/parsers/sra_orders/index_test.ts @@ -0,0 +1,68 @@ +import { APIOrder } from '@0x/types'; +import { BigNumber } from '@0x/utils'; +import * as chai from 'chai'; +import 'mocha'; + +import { SraOrder } from '../../../src/entities'; +import { _convertToEntity } from '../../../src/parsers/sra_orders'; +import { chaiSetup } from '../../utils/chai_setup'; + +chaiSetup.configure(); +const expect = chai.expect; + +// tslint:disable:custom-no-magic-numbers +describe('sra_orders', () => { + describe('_convertToEntity', () => { + it('converts ApiOrder to SraOrder entity', () => { + const input: APIOrder = { + order: { + makerAddress: '0xb45df06e38540a675fdb5b598abf2c0dbe9d6b81', + takerAddress: '0x0000000000000000000000000000000000000000', + feeRecipientAddress: '0xa258b39954cef5cb142fd567a46cddb31a670124', + senderAddress: '0x0000000000000000000000000000000000000000', + makerAssetAmount: new BigNumber('1619310371000000000'), + takerAssetAmount: new BigNumber('8178335207070707070707'), + makerFee: new BigNumber('0'), + takerFee: new BigNumber('0'), + exchangeAddress: '0x4f833a24e1f95d70f028921e27040ca56e09ab0b', + expirationTimeSeconds: new BigNumber('1538529488'), + signature: + '0x1b5a5d672b0d647b5797387ccbb89d822d5d2e873346b014f4ff816ff0783f2a7a0d2824d2d7042ec8ea375bc7f870963e1cb8248f1db03ddf125e27b5963aa11f03', + salt: new BigNumber('1537924688891'), + makerAssetData: '0xf47261b0000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc2', + takerAssetData: '0xf47261b000000000000000000000000042d6622dece394b54999fbd73d108123806f6a18', + }, + metaData: { isThisArbitraryData: true, powerLevel: 9001 }, + }; + const expected = new SraOrder(); + expected.exchangeAddress = '0x4f833a24e1f95d70f028921e27040ca56e09ab0b'; + expected.orderHashHex = '0x1bdbeb0d088a33da28b9ee6d94e8771452f90f4a69107da2fa75195d61b9a1c9'; + expected.makerAddress = '0xb45df06e38540a675fdb5b598abf2c0dbe9d6b81'; + expected.takerAddress = '0x0000000000000000000000000000000000000000'; + expected.feeRecipientAddress = '0xa258b39954cef5cb142fd567a46cddb31a670124'; + expected.senderAddress = '0x0000000000000000000000000000000000000000'; + expected.makerAssetAmount = new BigNumber('1619310371000000000'); + expected.takerAssetAmount = new BigNumber('8178335207070707070707'); + expected.makerFee = new BigNumber('0'); + expected.takerFee = new BigNumber('0'); + expected.expirationTimeSeconds = new BigNumber('1538529488'); + expected.salt = new BigNumber('1537924688891'); + expected.signature = + '0x1b5a5d672b0d647b5797387ccbb89d822d5d2e873346b014f4ff816ff0783f2a7a0d2824d2d7042ec8ea375bc7f870963e1cb8248f1db03ddf125e27b5963aa11f03'; + expected.rawMakerAssetData = '0xf47261b0000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc2'; + expected.makerAssetType = 'erc20'; + expected.makerAssetProxyId = '0xf47261b0'; + expected.makerTokenAddress = '0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2'; + expected.makerTokenId = null; + expected.rawTakerAssetData = '0xf47261b000000000000000000000000042d6622dece394b54999fbd73d108123806f6a18'; + expected.takerAssetType = 'erc20'; + expected.takerAssetProxyId = '0xf47261b0'; + expected.takerTokenAddress = '0x42d6622dece394b54999fbd73d108123806f6a18'; + expected.takerTokenId = null; + expected.metadataJson = '{"isThisArbitraryData":true,"powerLevel":9001}'; + + const actual = _convertToEntity(input); + expect(actual).deep.equal(expected); + }); + }); +}); diff --git a/packages/pipeline/test/utils/chai_setup.ts b/packages/pipeline/test/utils/chai_setup.ts new file mode 100644 index 000000000..1a8733093 --- /dev/null +++ b/packages/pipeline/test/utils/chai_setup.ts @@ -0,0 +1,13 @@ +import * as chai from 'chai'; +import chaiAsPromised = require('chai-as-promised'); +import ChaiBigNumber = require('chai-bignumber'); +import * as dirtyChai from 'dirty-chai'; + +export const chaiSetup = { + configure(): void { + chai.config.includeStack = true; + chai.use(ChaiBigNumber()); + chai.use(dirtyChai); + chai.use(chaiAsPromised); + }, +}; diff --git a/packages/pipeline/tsconfig.json b/packages/pipeline/tsconfig.json new file mode 100644 index 000000000..6f138f260 --- /dev/null +++ b/packages/pipeline/tsconfig.json @@ -0,0 +1,10 @@ +{ + "extends": "../../tsconfig", + "compilerOptions": { + "outDir": "lib", + "rootDir": ".", + "emitDecoratorMetadata": true, + "experimentalDecorators": true + }, + "include": ["./src/**/*", "./test/**/*", "./migrations/**/*"] +} diff --git a/packages/pipeline/tslint.json b/packages/pipeline/tslint.json new file mode 100644 index 000000000..dd9053357 --- /dev/null +++ b/packages/pipeline/tslint.json @@ -0,0 +1,3 @@ +{ + "extends": ["@0x/tslint-config"] +} diff --git a/packages/pipeline/typedoc-tsconfig.json b/packages/pipeline/typedoc-tsconfig.json new file mode 100644 index 000000000..8b0ff51c1 --- /dev/null +++ b/packages/pipeline/typedoc-tsconfig.json @@ -0,0 +1,10 @@ +{ + "extends": "../../typedoc-tsconfig", + "compilerOptions": { + "outDir": "lib", + "rootDir": ".", + "emitDecoratorMetadata": true, + "experimentalDecorators": true + }, + "include": ["./src/**/*", "./test/**/*", "./migrations/**/*"] +} diff --git a/packages/react-docs/CHANGELOG.json b/packages/react-docs/CHANGELOG.json index d456a3b53..a93330899 100644 --- a/packages/react-docs/CHANGELOG.json +++ b/packages/react-docs/CHANGELOG.json @@ -1,5 +1,14 @@ [ { + "timestamp": 1544482891, + "version": "1.0.21", + "changes": [ + { + "note": "Dependencies updated" + } + ] + }, + { "timestamp": 1543401373, "version": "1.0.20", "changes": [ diff --git a/packages/react-docs/CHANGELOG.md b/packages/react-docs/CHANGELOG.md index e48f43fb8..40f222b73 100644 --- a/packages/react-docs/CHANGELOG.md +++ b/packages/react-docs/CHANGELOG.md @@ -5,6 +5,10 @@ Edit the package's CHANGELOG.json file only. CHANGELOG +## v1.0.21 - _December 10, 2018_ + + * Dependencies updated + ## v1.0.20 - _November 28, 2018_ * Dependencies updated diff --git a/packages/react-shared/CHANGELOG.json b/packages/react-shared/CHANGELOG.json index a376bae29..d79c3434f 100644 --- a/packages/react-shared/CHANGELOG.json +++ b/packages/react-shared/CHANGELOG.json @@ -1,5 +1,14 @@ [ { + "timestamp": 1544482891, + "version": "1.0.24", + "changes": [ + { + "note": "Dependencies updated" + } + ] + }, + { "timestamp": 1543401373, "version": "1.0.23", "changes": [ diff --git a/packages/react-shared/CHANGELOG.md b/packages/react-shared/CHANGELOG.md index a983e0af2..bdacefdab 100644 --- a/packages/react-shared/CHANGELOG.md +++ b/packages/react-shared/CHANGELOG.md @@ -5,6 +5,10 @@ Edit the package's CHANGELOG.json file only. CHANGELOG +## v1.0.24 - _December 10, 2018_ + + * Dependencies updated + ## v1.0.23 - _November 28, 2018_ * Dependencies updated diff --git a/packages/sol-compiler/CHANGELOG.json b/packages/sol-compiler/CHANGELOG.json index fe077b6cc..1dbc3692d 100644 --- a/packages/sol-compiler/CHANGELOG.json +++ b/packages/sol-compiler/CHANGELOG.json @@ -6,7 +6,8 @@ "note": "Fix bug where we were appending base path to absolute imports (e.g NPM imports)", "pr": 1311 } - ] + ], + "timestamp": 1544482891 }, { "timestamp": 1543401373, diff --git a/packages/sol-compiler/CHANGELOG.md b/packages/sol-compiler/CHANGELOG.md index a1782bb3b..63dfaf29f 100644 --- a/packages/sol-compiler/CHANGELOG.md +++ b/packages/sol-compiler/CHANGELOG.md @@ -5,6 +5,10 @@ Edit the package's CHANGELOG.json file only. CHANGELOG +## v1.1.15 - _December 10, 2018_ + + * Fix bug where we were appending base path to absolute imports (e.g NPM imports) (#1311) + ## v1.1.14 - _November 28, 2018_ * Dependencies updated diff --git a/packages/sol-compiler/src/compiler.ts b/packages/sol-compiler/src/compiler.ts index cba67f292..85df8209e 100644 --- a/packages/sol-compiler/src/compiler.ts +++ b/packages/sol-compiler/src/compiler.ts @@ -400,7 +400,7 @@ export class Compiler { * while others are absolute ("Token.sol", "@0x/contracts/Wallet.sol") * And we need to append the base path for relative imports. */ - importPath = path.resolve('/' + contractFolder, importPath).replace('/', ''); + importPath = path.resolve(`/${contractFolder}`, importPath).replace('/', ''); } if (_.isUndefined(sourcesToAppendTo[importPath])) { diff --git a/packages/sol-cov/CHANGELOG.json b/packages/sol-cov/CHANGELOG.json index bc8aa71e1..1afa85298 100644 --- a/packages/sol-cov/CHANGELOG.json +++ b/packages/sol-cov/CHANGELOG.json @@ -1,5 +1,14 @@ [ { + "timestamp": 1544482891, + "version": "2.1.15", + "changes": [ + { + "note": "Dependencies updated" + } + ] + }, + { "timestamp": 1543401373, "version": "2.1.14", "changes": [ diff --git a/packages/sol-cov/CHANGELOG.md b/packages/sol-cov/CHANGELOG.md index 25ba93026..b2f7facad 100644 --- a/packages/sol-cov/CHANGELOG.md +++ b/packages/sol-cov/CHANGELOG.md @@ -5,6 +5,10 @@ Edit the package's CHANGELOG.json file only. CHANGELOG +## v2.1.15 - _December 10, 2018_ + + * Dependencies updated + ## v2.1.14 - _November 28, 2018_ * Dependencies updated diff --git a/packages/sol-doc/CHANGELOG.json b/packages/sol-doc/CHANGELOG.json index 332aeb025..01acbe436 100644 --- a/packages/sol-doc/CHANGELOG.json +++ b/packages/sol-doc/CHANGELOG.json @@ -1,5 +1,14 @@ [ { + "timestamp": 1544482891, + "version": "1.0.10", + "changes": [ + { + "note": "Dependencies updated" + } + ] + }, + { "timestamp": 1543401373, "version": "1.0.9", "changes": [ diff --git a/packages/sol-doc/CHANGELOG.md b/packages/sol-doc/CHANGELOG.md index 5a1df59c7..3e2b60bbb 100644 --- a/packages/sol-doc/CHANGELOG.md +++ b/packages/sol-doc/CHANGELOG.md @@ -5,6 +5,10 @@ Edit the package's CHANGELOG.json file only. CHANGELOG +## v1.0.10 - _December 10, 2018_ + + * Dependencies updated + ## v1.0.9 - _November 28, 2018_ * Dependencies updated diff --git a/packages/sol-resolver/CHANGELOG.json b/packages/sol-resolver/CHANGELOG.json index 4c9e612d7..18ec4cd1f 100644 --- a/packages/sol-resolver/CHANGELOG.json +++ b/packages/sol-resolver/CHANGELOG.json @@ -6,7 +6,8 @@ "note": "NPMResolver now supports scoped packages", "pr": 1311 } - ] + ], + "timestamp": 1544482891 }, { "timestamp": 1542821676, diff --git a/packages/sol-resolver/CHANGELOG.md b/packages/sol-resolver/CHANGELOG.md index b83275e5a..2edb58f6c 100644 --- a/packages/sol-resolver/CHANGELOG.md +++ b/packages/sol-resolver/CHANGELOG.md @@ -5,6 +5,10 @@ Edit the package's CHANGELOG.json file only. CHANGELOG +## v1.1.0 - _December 10, 2018_ + + * NPMResolver now supports scoped packages (#1311) + ## v1.0.17 - _November 21, 2018_ * Dependencies updated diff --git a/packages/sra-spec/CHANGELOG.json b/packages/sra-spec/CHANGELOG.json index 49d1f1c83..cb5c837f1 100644 --- a/packages/sra-spec/CHANGELOG.json +++ b/packages/sra-spec/CHANGELOG.json @@ -1,5 +1,14 @@ [ { + "timestamp": 1544482891, + "version": "1.0.12", + "changes": [ + { + "note": "Dependencies updated" + } + ] + }, + { "timestamp": 1542821676, "version": "1.0.11", "changes": [ diff --git a/packages/sra-spec/CHANGELOG.md b/packages/sra-spec/CHANGELOG.md index 77b22515f..aefe07585 100644 --- a/packages/sra-spec/CHANGELOG.md +++ b/packages/sra-spec/CHANGELOG.md @@ -5,6 +5,10 @@ Edit the package's CHANGELOG.json file only. CHANGELOG +## v1.0.12 - _December 10, 2018_ + + * Dependencies updated + ## v1.0.11 - _November 21, 2018_ * Dependencies updated diff --git a/packages/subproviders/CHANGELOG.json b/packages/subproviders/CHANGELOG.json index 6da170be3..05f1b96c3 100644 --- a/packages/subproviders/CHANGELOG.json +++ b/packages/subproviders/CHANGELOG.json @@ -1,5 +1,14 @@ [ { + "timestamp": 1544482891, + "version": "2.1.7", + "changes": [ + { + "note": "Dependencies updated" + } + ] + }, + { "timestamp": 1543401373, "version": "2.1.6", "changes": [ diff --git a/packages/subproviders/CHANGELOG.md b/packages/subproviders/CHANGELOG.md index 01dd8d652..57cf0d21d 100644 --- a/packages/subproviders/CHANGELOG.md +++ b/packages/subproviders/CHANGELOG.md @@ -5,6 +5,10 @@ Edit the package's CHANGELOG.json file only. CHANGELOG +## v2.1.7 - _December 10, 2018_ + + * Dependencies updated + ## v2.1.6 - _November 28, 2018_ * Dependencies updated diff --git a/packages/tslint-config/tslint.json b/packages/tslint-config/tslint.json index fd1849dd0..e8de6221e 100644 --- a/packages/tslint-config/tslint.json +++ b/packages/tslint-config/tslint.json @@ -92,6 +92,7 @@ "prefer-function-over-method": true, "prefer-object-spread": true, "prefer-readonly": true, + "prefer-template": true, "promise-function-async": true, "quotemark": [true, "single", "avoid-escape", "jsx-double"], "restrict-plus-operands": true, diff --git a/packages/types/CHANGELOG.json b/packages/types/CHANGELOG.json index b09859101..ea692c79c 100644 --- a/packages/types/CHANGELOG.json +++ b/packages/types/CHANGELOG.json @@ -10,7 +10,8 @@ "note": "Add RevertReasons for DutchAuction contract", "pr": 1225 } - ] + ], + "timestamp": 1544482891 }, { "version": "1.3.0", diff --git a/packages/types/CHANGELOG.md b/packages/types/CHANGELOG.md index f133c05f1..45544938e 100644 --- a/packages/types/CHANGELOG.md +++ b/packages/types/CHANGELOG.md @@ -5,6 +5,11 @@ Edit the package's CHANGELOG.json file only. CHANGELOG +## v1.4.0 - _December 10, 2018_ + + * Add `LengthMismatch` and `LengthGreaterThan3Required` revert reasons (#1224) + * Add RevertReasons for DutchAuction contract (#1225) + ## v1.3.0 - _November 21, 2018_ * Add the `SimpleContractArtifact` type, which describes the artifact format published in the `@0x/contract-artifacts` package (#1298) diff --git a/packages/typescript-typings/CHANGELOG.json b/packages/typescript-typings/CHANGELOG.json index 5653b397d..85da449b6 100644 --- a/packages/typescript-typings/CHANGELOG.json +++ b/packages/typescript-typings/CHANGELOG.json @@ -1,5 +1,14 @@ [ { + "timestamp": 1544482891, + "version": "3.0.5", + "changes": [ + { + "note": "Dependencies updated" + } + ] + }, + { "version": "3.0.4", "changes": [ { diff --git a/packages/typescript-typings/CHANGELOG.md b/packages/typescript-typings/CHANGELOG.md index 62e6665be..5ff1448fd 100644 --- a/packages/typescript-typings/CHANGELOG.md +++ b/packages/typescript-typings/CHANGELOG.md @@ -5,6 +5,10 @@ Edit the package's CHANGELOG.json file only. CHANGELOG +## v3.0.5 - _December 10, 2018_ + + * Dependencies updated + ## v3.0.4 - _November 9, 2018_ * Dependencies updated diff --git a/packages/utils/CHANGELOG.md b/packages/utils/CHANGELOG.md index c5c42161a..e712ebea8 100644 --- a/packages/utils/CHANGELOG.md +++ b/packages/utils/CHANGELOG.md @@ -5,6 +5,10 @@ Edit the package's CHANGELOG.json file only. CHANGELOG +## v2.0.7 - _November 28, 2018_ + + * Optimized ABI Encoder/Decoder. Generates compressed calldata to save gas. Generates human-readable calldata to aid development. + ## v2.0.6 - _November 21, 2018_ * Dependencies updated diff --git a/packages/utils/src/abi_utils.ts b/packages/utils/src/abi_utils.ts index 598ea5fcc..3e6fc9665 100644 --- a/packages/utils/src/abi_utils.ts +++ b/packages/utils/src/abi_utils.ts @@ -26,7 +26,7 @@ function parseEthersParams(params: DataItem[]): { names: ParamName[]; types: str const result = parseEthersParams(param.components); names.push({ name: param.name || null, names: result.names }); - types.push('tuple(' + result.types.join(',') + ')' + suffix); + types.push(`tuple(${result.types.join(',')})${suffix}`); } else { names.push(param.name || null); types.push(param.type); @@ -120,7 +120,7 @@ function splitTupleTypes(type: string): string[] { if (_.endsWith(type, '[]')) { throw new Error('Internal error: array types are not supported'); } else if (!_.startsWith(type, 'tuple(')) { - throw new Error('Internal error: expected tuple type but got non-tuple type: ' + type); + throw new Error(`Internal error: expected tuple type but got non-tuple type: ${type}`); } // Trim the outtermost tuple(). const trimmedType = type.substring('tuple('.length, type.length - 1); diff --git a/packages/utils/test/abi_encoder/evm_data_types_test.ts b/packages/utils/test/abi_encoder/evm_data_types_test.ts index 9ef80a560..7185851a8 100644 --- a/packages/utils/test/abi_encoder/evm_data_types_test.ts +++ b/packages/utils/test/abi_encoder/evm_data_types_test.ts @@ -901,7 +901,7 @@ describe('ABI Encoder: EVM Data Type Encoding/Decoding', () => { // Construct args to be encoded // Note: There will be padding because this is a bytes32 but we are only passing in 4 bytes. const bytesLength = 40; - const args = '0x' + '61'.repeat(bytesLength); + const args = `0x${'61'.repeat(bytesLength)}`; // Encode Args and validate result const encodedArgs = dataType.encode(args, encodingRules); const expectedEncodedArgs = @@ -993,7 +993,7 @@ describe('ABI Encoder: EVM Data Type Encoding/Decoding', () => { // Construct args to be encoded // Note: There will be padding because this is a bytes32 but we are only passing in 4 bytes. const strLength = 40; - const args = '0x' + 'a'.repeat(strLength); + const args = `0x${'a'.repeat(strLength)}`; // Encode Args and validate result const encodedArgs = dataType.encode(args, encodingRules); const expectedEncodedArgs = diff --git a/packages/web3-wrapper/CHANGELOG.json b/packages/web3-wrapper/CHANGELOG.json index 9f5194e0d..aa96ea765 100644 --- a/packages/web3-wrapper/CHANGELOG.json +++ b/packages/web3-wrapper/CHANGELOG.json @@ -1,5 +1,15 @@ [ { + "version": "3.2.0", + "changes": [ + { + "note": "Return `value` and `gasPrice` as BigNumbers to avoid loss of precision errors", + "pr": 1402 + } + ], + "timestamp": 1544482891 + }, + { "version": "3.1.6", "changes": [ { diff --git a/packages/web3-wrapper/CHANGELOG.md b/packages/web3-wrapper/CHANGELOG.md index fffaf1d0a..6788dbaed 100644 --- a/packages/web3-wrapper/CHANGELOG.md +++ b/packages/web3-wrapper/CHANGELOG.md @@ -5,6 +5,10 @@ Edit the package's CHANGELOG.json file only. CHANGELOG +## v3.2.0 - _December 10, 2018_ + + * Return `value` and `gasPrice` as BigNumbers to avoid loss of precision errors (#1402) + ## v3.1.6 - _November 28, 2018_ * Unmarshall mined transaction receipts (#1308) diff --git a/packages/web3-wrapper/src/marshaller.ts b/packages/web3-wrapper/src/marshaller.ts index 7bd274c85..4230f8eab 100644 --- a/packages/web3-wrapper/src/marshaller.ts +++ b/packages/web3-wrapper/src/marshaller.ts @@ -120,9 +120,11 @@ export const marshaller = { } const txData = { ...txDataRpc, - value: !_.isUndefined(txDataRpc.value) ? utils.convertHexToNumber(txDataRpc.value) : undefined, + value: !_.isUndefined(txDataRpc.value) ? utils.convertAmountToBigNumber(txDataRpc.value) : undefined, gas: !_.isUndefined(txDataRpc.gas) ? utils.convertHexToNumber(txDataRpc.gas) : undefined, - gasPrice: !_.isUndefined(txDataRpc.gasPrice) ? utils.convertHexToNumber(txDataRpc.gasPrice) : undefined, + gasPrice: !_.isUndefined(txDataRpc.gasPrice) + ? utils.convertAmountToBigNumber(txDataRpc.gasPrice) + : undefined, nonce: !_.isUndefined(txDataRpc.nonce) ? utils.convertHexToNumber(txDataRpc.nonce) : undefined, }; return txData; diff --git a/packages/website/README.md b/packages/website/README.md index d82d045a6..f735b0df5 100644 --- a/packages/website/README.md +++ b/packages/website/README.md @@ -3,7 +3,6 @@ This repository contains our website and [0x Portal DApp][portal-url] (over-the-counter exchange), facilitating trustless over-the-counter trading of Ethereum-based tokens using 0x protocol. [website-url]: https://0xproject.com/ -[whitepaper-url]: https://0xproject.com/pdfs/0x_white_paper.pdf [portal-url]: https://0xproject.com/portal ## Contributing diff --git a/packages/website/package.json b/packages/website/package.json index dc10c7b1c..5d2e563e9 100644 --- a/packages/website/package.json +++ b/packages/website/package.json @@ -20,6 +20,8 @@ "author": "Fabio Berger", "license": "Apache-2.0", "dependencies": { + "@0x/asset-buyer": "^3.0.2", + "@0x/contract-addresses": "^2.0.0", "@0x/contract-wrappers": "^4.1.1", "@0x/json-schemas": "^2.1.2", "@0x/order-utils": "^3.0.4", @@ -46,6 +48,7 @@ "numeral": "^2.0.6", "polished": "^1.9.2", "query-string": "^6.0.0", + "rc-slider": "^8.6.3", "react": "^16.4.2", "react-copy-to-clipboard": "^5.0.0", "react-document-title": "^2.0.3", @@ -54,6 +57,7 @@ "react-popper": "^1.0.0-beta.6", "react-redux": "^5.0.3", "react-scroll": "0xproject/react-scroll#pr-330-and-replace-state", + "react-syntax-highlighter": "^10.1.1", "react-tooltip": "^3.2.7", "react-typist": "^2.0.4", "redux": "^3.6.0", @@ -77,12 +81,14 @@ "@types/node": "*", "@types/numeral": "^0.0.22", "@types/query-string": "^5.1.0", + "@types/rc-slider": "^8.6.0", "@types/react": "^16.4.2", "@types/react-copy-to-clipboard": "^4.2.0", "@types/react-dom": "^16.0.7", "@types/react-helmet": "^5.0.6", "@types/react-redux": "^4.4.37", "@types/react-scroll": "1.5.3", + "@types/react-syntax-highlighter": "^0.0.8", "@types/react-tap-event-plugin": "0.0.30", "@types/redux": "^3.6.0", "@types/web3-provider-engine": "^14.0.0", diff --git a/packages/website/public/images/social/discord.png b/packages/website/public/images/social/discord.png Binary files differnew file mode 100644 index 000000000..1bdb07394 --- /dev/null +++ b/packages/website/public/images/social/discord.png diff --git a/packages/website/public/images/social/rocketchat.png b/packages/website/public/images/social/rocketchat.png Binary files differdeleted file mode 100644 index 58ff8d293..000000000 --- a/packages/website/public/images/social/rocketchat.png +++ /dev/null diff --git a/packages/website/public/index.html b/packages/website/public/index.html index a8a61f8ad..538eca6d9 100644 --- a/packages/website/public/index.html +++ b/packages/website/public/index.html @@ -1,95 +1,132 @@ <!DOCTYPE html> <html> + <head> + <meta charset="utf-8" /> + <meta name="viewport" content="width=device-width, initial-scale=1" /> + <meta name="description" content="An Open Protocol For Decentralized Exchange On The Ethereum Blockchain" /> + <meta property="og:type" content="website" /> + <meta property="og:title" content="0x" /> + <meta + property="og:description" + content="An Open Protocol For Decentralized Exchange On The Ethereum Blockchain" + /> + <meta property="og:image" content="/images/og_image.png" /> + <title>0x: The Protocol for Trading Tokens</title> + <link rel="icon" type="image/png" href="/images/favicon/favicon-2-32x32.png" sizes="32x32" /> + <link rel="icon" type="image/png" href="/images/favicon/favicon-2-16x16.png" sizes="16x16" /> + <link rel="stylesheet" href="/css/material-design-iconic-font.min.css" /> + <link rel="stylesheet" href="/css/roboto.css" /> + <link rel="stylesheet" href="/css/roboto_mono.css" /> + <link rel="stylesheet" href="/css/basscss_responsive_custom.css" /> + <link rel="stylesheet" href="/css/basscss_responsive_padding.css" /> + <link rel="stylesheet" href="/css/basscss_responsive_margin.css" /> + <link rel="stylesheet" href="/css/basscss_responsive_type_scale.css" /> + </head> -<head> - <meta charset="utf-8"> - <meta name="viewport" content="width=device-width, initial-scale=1"> - <meta name="description" content="An Open Protocol For Decentralized Exchange On The Ethereum Blockchain" /> - <meta property="og:type" content="website" /> - <meta property="og:title" content="0x" /> - <meta property="og:description" content="An Open Protocol For Decentralized Exchange On The Ethereum Blockchain" /> - <meta property="og:image" content="/images/og_image.png" /> - <title>0x: The Protocol for Trading Tokens</title> - <link rel="icon" type="image/png" href="/images/favicon/favicon-2-32x32.png" sizes="32x32" /> - <link rel="icon" type="image/png" href="/images/favicon/favicon-2-16x16.png" sizes="16x16" /> - <link rel="stylesheet" href="/css/github-gist.css"> - <link rel="stylesheet" href="/css/material-design-iconic-font.min.css"> - <link rel="stylesheet" href="/css/roboto.css"> - <link rel="stylesheet" href="/css/roboto_mono.css"> - <link rel="stylesheet" href="/css/basscss_responsive_custom.css"> - <link rel="stylesheet" href="/css/basscss_responsive_padding.css"> - <link rel="stylesheet" href="/css/basscss_responsive_margin.css"> - <link rel="stylesheet" href="/css/basscss_responsive_type_scale.css"> -</head> + <body style="margin: 0px; min-width: 355px;"> + <!-- Heap SDK --> + <script type="text/javascript"> + (window.heap = window.heap || []), + (heap.load = function(e, t) { + (window.heap.appid = e), (window.heap.config = t = t || {}); + var r = t.forceSSL || 'https:' === document.location.protocol, + a = document.createElement('script'); + (a.type = 'text/javascript'), + (a.async = !0), + (a.src = (r ? 'https:' : 'http:') + '//cdn.heapanalytics.com/js/heap-' + e + '.js'); + var n = document.getElementsByTagName('script')[0]; + n.parentNode.insertBefore(a, n); + for ( + var o = function(e) { + return function() { + heap.push([e].concat(Array.prototype.slice.call(arguments, 0))); + }; + }, + p = [ + 'addEventProperties', + 'addUserProperties', + 'clearEventProperties', + 'identify', + 'resetIdentity', + 'removeEventProperty', + 'setEventProperties', + 'track', + 'unsetEventProperty', + ], + c = 0; + c < p.length; + c++ + ) + heap[p[c]] = o(p[c]); + }); + heap.load('410099666'); + </script> + <!-- End Heap SDK --> + <!-- Global site tag (gtag.js) - Google Analytics --> + <script async src="https://www.googletagmanager.com/gtag/js?id=UA-98720122-1"></script> + <script> + window.dataLayer = window.dataLayer || []; + function gtag() { + dataLayer.push(arguments); + } + gtag('js', new Date()); -<body style="margin: 0px; min-width: 355px;"> - <!-- Heap SDK --> - <script type="text/javascript"> - window.heap = window.heap || [], heap.load = function (e, t) { window.heap.appid = e, window.heap.config = t = t || {}; var r = t.forceSSL || "https:" === document.location.protocol, a = document.createElement("script"); a.type = "text/javascript", a.async = !0, a.src = (r ? "https:" : "http:") + "//cdn.heapanalytics.com/js/heap-" + e + ".js"; var n = document.getElementsByTagName("script")[0]; n.parentNode.insertBefore(a, n); for (var o = function (e) { return function () { heap.push([e].concat(Array.prototype.slice.call(arguments, 0))) } }, p = ["addEventProperties", "addUserProperties", "clearEventProperties", "identify", "resetIdentity", "removeEventProperty", "setEventProperties", "track", "unsetEventProperty"], c = 0; c < p.length; c++)heap[p[c]] = o(p[c]) }; - heap.load("410099666"); - </script> - <!-- End Heap SDK --> - <!-- Global site tag (gtag.js) - Google Analytics --> - <script async src="https://www.googletagmanager.com/gtag/js?id=UA-98720122-1"></script> - <script> - window.dataLayer = window.dataLayer || []; - function gtag() { - dataLayer.push(arguments); - } - gtag('js', new Date()); + gtag('config', 'UA-98720122-1'); + </script> + <!-- End Google Analytics --> + <!-- Facebook SDK --> + <div id="fb-root"></div> + <script> + (function(d, s, id) { + var js, + fjs = d.getElementsByTagName(s)[0]; + if (d.getElementById(id)) return; + js = d.createElement(s); + js.id = id; + js.src = '//connect.facebook.net/en_US/sdk.js#xfbml=1&version=v2.8&appId=1687545238205192'; + fjs.parentNode.insertBefore(js, fjs); + })(document, 'script', 'facebook-jssdk'); + </script> + <div id="app"></div> + <!-- End Facebook SDK --> + <!-- Twitter SDK --> + <script> + window.twttr = (function(d, s, id) { + var js, + fjs = d.getElementsByTagName(s)[0], + t = window.twttr || {}; + if (d.getElementById(id)) return t; + js = d.createElement(s); + js.id = id; + js.src = 'https://platform.twitter.com/widgets.js'; + fjs.parentNode.insertBefore(js, fjs); - gtag('config', 'UA-98720122-1'); - </script> - <!-- End Google Analytics --> - <!-- Facebook SDK --> - <div id="fb-root"></div> - <script> - (function (d, s, id) { - var js, - fjs = d.getElementsByTagName(s)[0]; - if (d.getElementById(id)) return; - js = d.createElement(s); - js.id = id; - js.src = '//connect.facebook.net/en_US/sdk.js#xfbml=1&version=v2.8&appId=1687545238205192'; - fjs.parentNode.insertBefore(js, fjs); - })(document, 'script', 'facebook-jssdk'); - </script> - <div id="app"></div> - <!-- End Facebook SDK --> - <!-- Twitter SDK --> - <script> - window.twttr = (function (d, s, id) { - var js, - fjs = d.getElementsByTagName(s)[0], - t = window.twttr || {}; - if (d.getElementById(id)) return t; - js = d.createElement(s); - js.id = id; - js.src = 'https://platform.twitter.com/widgets.js'; - fjs.parentNode.insertBefore(js, fjs); - - t._e = []; - t.ready = function (f) { - t._e.push(f); - }; - return t; - })(document, 'script', 'twitter-wjs'); - </script> - <!-- End Twitter SDK --> - <!-- Hotjar Tracking Code for https://0xproject.com/ --> - <script> - (function (h, o, t, j, a, r) { - h.hj = h.hj || function () { (h.hj.q = h.hj.q || []).push(arguments) }; - h._hjSettings = { hjid: 935597, hjsv: 6 }; - a = o.getElementsByTagName('head')[0]; - r = o.createElement('script'); r.async = 1; - r.src = t + h._hjSettings.hjid + j + h._hjSettings.hjsv; - a.appendChild(r); - })(window, document, 'https://static.hotjar.com/c/hotjar-', '.js?sv='); - </script> - <!-- End Hotjar Tracking Code --> - <!-- Main --> - <script type="text/javascript" crossorigin="anonymous" src="/bundle.js" charset="utf-8"></script> -</body> - -</html>
\ No newline at end of file + t._e = []; + t.ready = function(f) { + t._e.push(f); + }; + return t; + })(document, 'script', 'twitter-wjs'); + </script> + <!-- End Twitter SDK --> + <!-- Hotjar Tracking Code for https://0xproject.com/ --> + <script> + (function(h, o, t, j, a, r) { + h.hj = + h.hj || + function() { + (h.hj.q = h.hj.q || []).push(arguments); + }; + h._hjSettings = { hjid: 935597, hjsv: 6 }; + a = o.getElementsByTagName('head')[0]; + r = o.createElement('script'); + r.async = 1; + r.src = t + h._hjSettings.hjid + j + h._hjSettings.hjsv; + a.appendChild(r); + })(window, document, 'https://static.hotjar.com/c/hotjar-', '.js?sv='); + </script> + <!-- End Hotjar Tracking Code --> + <!-- Main --> + <script type="text/javascript" crossorigin="anonymous" src="/bundle.js" charset="utf-8"></script> + </body> +</html> diff --git a/packages/website/translations/chinese.json b/packages/website/translations/chinese.json index eb88b43d0..b99a3cdcb 100644 --- a/packages/website/translations/chinese.json +++ b/packages/website/translations/chinese.json @@ -67,6 +67,7 @@ "BLOG": "博客", "FORUM": "论坛", "CONNECT": "0x 连接", + "PROTOCOL_SPECIFICATION": "protocol specification", "WHITEPAPER": "白皮书", "WIKI": "维基", "WEB3_WRAPPER": "Web3Wrapper", @@ -76,9 +77,9 @@ "STANDARD_RELAYER_API": "中继方标准API", "PORTAL_DAPP": "去中心化应用门户", "WEBSITE": "网站", - "DEVELOPERS": "首页", - "HOME": "Rocket.chat", - "ROCKETCHAT": "开发人员", + "DEVELOPERS": "开发人员", + "HOME": "首页", + "DISCORD": "discord chat", "BUILD_A_RELAYER": "build a relayer", "BUILD_A_RELAYER_DESCRIPTION": "Learn how to build your own 0x relayer from scratch", "DEVELOP_ON_ETHEREUM": "develop on Ethereum", diff --git a/packages/website/translations/english.json b/packages/website/translations/english.json index 5fba7a0ff..78f29d0f6 100644 --- a/packages/website/translations/english.json +++ b/packages/website/translations/english.json @@ -69,6 +69,7 @@ "BLOG": "blog", "FORUM": "forum", "CONNECT": "0x Connect", + "PROTOCOL_SPECIFICATION": "protocol specification", "WHITEPAPER": "whitepaper", "WIKI": "wiki", "WEB3_WRAPPER": "Web3Wrapper", @@ -81,7 +82,7 @@ "WEBSITE": "website", "DEVELOPERS": "developers", "HOME": "home", - "ROCKETCHAT": "rocket.chat", + "DISCORD": "discord chat", "TRADE_CALL_TO_ACTION": "trade on 0x", "BUILD_A_RELAYER": "build a relayer", "BUILD_A_RELAYER_DESCRIPTION": "Learn how to build your own 0x relayer from scratch", diff --git a/packages/website/translations/korean.json b/packages/website/translations/korean.json index e3ce74676..a421ffb94 100644 --- a/packages/website/translations/korean.json +++ b/packages/website/translations/korean.json @@ -67,6 +67,7 @@ "BLOG": "블로그", "FORUM": "포럼", "CONNECT": "0x Connect", + "PROTOCOL_SPECIFICATION": "protocol specification", "WHITEPAPER": "백서", "WIKI": "위키", "WEB3_WRAPPER": "Web3Wrapper", @@ -77,7 +78,7 @@ "PORTAL_DAPP": "포털 dApp", "WEBSITE": "Website", "HOME": "홈", - "ROCKETCHAT": "Rocket.chat", + "DISCORD": "discord chat", "DEVELOPERS": "개발자", "BUILD_A_RELAYER": "build a relayer", "BUILD_A_RELAYER_DESCRIPTION": "Learn how to build your own 0x relayer from scratch", diff --git a/packages/website/translations/russian.json b/packages/website/translations/russian.json index c74fb5e32..b3ea29cf3 100644 --- a/packages/website/translations/russian.json +++ b/packages/website/translations/russian.json @@ -67,6 +67,7 @@ "BLOG": "Блог", "FORUM": "Форум", "CONNECT": "0x Connect", + "PROTOCOL_SPECIFICATION": "protocol specification", "WHITEPAPER": "Whitepaper", "WIKI": "Вики", "WEB3_WRAPPER": "Web3Wrapper", @@ -76,9 +77,9 @@ "STANDARD_RELAYER_API": "standard relayer API", "PORTAL_DAPP": "DApp-портал", "WEBSITE": "Веб-сайт", - "DEVELOPERS": "Домашняя страница", - "HOME": "Rocket.chat", - "ROCKETCHAT": "Для разработчиков", + "DEVELOPERS": "Для разработчиков", + "HOME": "Домашняя страница", + "DISCORD": "discord chat", "BUILD_A_RELAYER": "build a relayer", "BUILD_A_RELAYER_DESCRIPTION": "Learn how to build your own 0x relayer from scratch", "DEVELOP_ON_ETHEREUM": "develop on Ethereum", diff --git a/packages/website/translations/spanish.json b/packages/website/translations/spanish.json index e29db711b..db75312c5 100644 --- a/packages/website/translations/spanish.json +++ b/packages/website/translations/spanish.json @@ -68,6 +68,7 @@ "BLOG": "blog", "FORUM": "foro", "CONNECT": "0x Connect", + "PROTOCOL_SPECIFICATION": "protocol specification", "WHITEPAPER": "documento técnico", "WIKI": "wiki", "WEB3_WRAPPER": "Web3Wrapper", @@ -77,9 +78,9 @@ "STANDARD_RELAYER_API": "API de transmisión estándar", "PORTAL_DAPP": "portal dApp", "WEBSITE": "website", - "DEVELOPERS": "inicio", - "HOME": "rocket.chat", - "ROCKETCHAT": "desarrolladores", + "DEVELOPERS": "desarrolladores", + "HOME": "inicio", + "DISCORD": "discord chat", "BUILD_A_RELAYER": "build a relayer", "BUILD_A_RELAYER_DESCRIPTION": "Learn how to build your own 0x relayer from scratch", "DEVELOP_ON_ETHEREUM": "develop on Ethereum", diff --git a/packages/website/ts/components/dropdowns/developers_drop_down.tsx b/packages/website/ts/components/dropdowns/developers_drop_down.tsx index b8a35fab0..079132f2b 100644 --- a/packages/website/ts/components/dropdowns/developers_drop_down.tsx +++ b/packages/website/ts/components/dropdowns/developers_drop_down.tsx @@ -53,8 +53,8 @@ const usefulLinksToLinkInfo: ALink[] = [ shouldOpenInNewTab: true, }, { - title: Key.Whitepaper, - to: WebsitePaths.Whitepaper, + title: Key.ProtocolSpecification, + to: constants.URL_PROTOCOL_SPECIFICATION, shouldOpenInNewTab: true, }, ]; diff --git a/packages/website/ts/components/footer.tsx b/packages/website/ts/components/footer.tsx index e10005a0a..1098d6d0b 100644 --- a/packages/website/ts/components/footer.tsx +++ b/packages/website/ts/components/footer.tsx @@ -71,7 +71,7 @@ export class Footer extends React.Component<FooterProps, FooterState> { ], [Key.Community]: [ { - title: this.props.translate.get(Key.RocketChat, Deco.Cap), + title: this.props.translate.get(Key.Discord, Deco.Cap), to: constants.URL_ZEROEX_CHAT, shouldOpenInNewTab: true, }, @@ -177,7 +177,7 @@ export class Footer extends React.Component<FooterProps, FooterState> { } private _renderMenuItem(link: ALink): React.ReactNode { const titleToIcon: { [title: string]: string } = { - [this.props.translate.get(Key.RocketChat, Deco.Cap)]: 'rocketchat.png', + [this.props.translate.get(Key.Discord, Deco.Cap)]: 'discord.png', [this.props.translate.get(Key.Blog, Deco.Cap)]: 'medium.png', Twitter: 'twitter.png', Reddit: 'reddit.png', diff --git a/packages/website/ts/components/ui/check_mark.tsx b/packages/website/ts/components/ui/check_mark.tsx new file mode 100644 index 000000000..86e427c8b --- /dev/null +++ b/packages/website/ts/components/ui/check_mark.tsx @@ -0,0 +1,31 @@ +import * as React from 'react'; + +import { colors } from '@0x/react-shared'; + +export interface CheckMarkProps { + color?: string; + isChecked?: boolean; +} + +export const CheckMark: React.StatelessComponent<CheckMarkProps> = ({ color, isChecked }) => ( + <svg width="17" height="17" viewBox="0 0 17 17" fill="none" xmlns="http://www.w3.org/2000/svg"> + <circle + cx="8.5" + cy="8.5" + r="8.5" + fill={isChecked ? color : 'white'} + stroke={isChecked ? undefined : '#CCCCCC'} + /> + <path + d="M2.5 4.5L1.79289 5.20711L2.5 5.91421L3.20711 5.20711L2.5 4.5ZM-0.707107 2.70711L1.79289 5.20711L3.20711 3.79289L0.707107 1.29289L-0.707107 2.70711ZM3.20711 5.20711L7.70711 0.707107L6.29289 -0.707107L1.79289 3.79289L3.20711 5.20711Z" + transform="translate(5 6.5)" + fill="white" + /> + </svg> +); + +CheckMark.displayName = 'Check'; + +CheckMark.defaultProps = { + color: colors.mediumBlue, +}; diff --git a/packages/website/ts/components/ui/container.tsx b/packages/website/ts/components/ui/container.tsx index 7eab2a50f..ae00851e5 100644 --- a/packages/website/ts/components/ui/container.tsx +++ b/packages/website/ts/components/ui/container.tsx @@ -1,11 +1,15 @@ import { TextAlignProperty } from 'csstype'; +import { darken } from 'polished'; import * as React from 'react'; +import { styled } from 'ts/style/theme'; + type StringOrNum = string | number; export type ContainerTag = 'div' | 'span'; export interface ContainerProps { + margin?: string; marginTop?: StringOrNum; marginBottom?: StringOrNum; marginRight?: StringOrNum; @@ -17,10 +21,13 @@ export interface ContainerProps { paddingLeft?: StringOrNum; backgroundColor?: string; background?: string; + border?: string; + borderTop?: string; borderRadius?: StringOrNum; borderBottomLeftRadius?: StringOrNum; borderBottomRightRadius?: StringOrNum; borderBottom?: StringOrNum; + borderColor?: string; maxWidth?: StringOrNum; maxHeight?: StringOrNum; width?: StringOrNum; @@ -37,15 +44,32 @@ export interface ContainerProps { right?: string; bottom?: string; zIndex?: number; + float?: 'right' | 'left'; Tag?: ContainerTag; cursor?: string; id?: string; onClick?: (event: React.MouseEvent<HTMLElement>) => void; overflowX?: 'scroll' | 'hidden' | 'auto' | 'visible'; + overflowY?: 'scroll' | 'hidden' | 'auto' | 'visible'; + shouldDarkenOnHover?: boolean; + hasBoxShadow?: boolean; + shouldAddBoxShadowOnHover?: boolean; } -export const Container: React.StatelessComponent<ContainerProps> = props => { - const { children, className, Tag, isHidden, id, onClick, ...style } = props; +export const PlainContainer: React.StatelessComponent<ContainerProps> = props => { + const { + children, + className, + Tag, + isHidden, + id, + onClick, + shouldDarkenOnHover, + shouldAddBoxShadowOnHover, + hasBoxShadow, + // tslint:disable-next-line:trailing-comma + ...style + } = props; const visibility = isHidden ? 'hidden' : undefined; return ( <Tag id={id} style={{ ...style, visibility }} className={className} onClick={onClick}> @@ -54,6 +78,20 @@ export const Container: React.StatelessComponent<ContainerProps> = props => { ); }; +const BOX_SHADOW = '0px 3px 10px rgba(0, 0, 0, 0.3)'; + +export const Container = styled(PlainContainer)` + box-sizing: border-box; + ${props => (props.hasBoxShadow ? `box-shadow: ${BOX_SHADOW}` : '')}; + &:hover { + ${props => + props.shouldDarkenOnHover + ? `background-color: ${props.backgroundColor ? darken(0.05, props.backgroundColor) : 'none'} !important` + : ''}; + ${props => (props.shouldAddBoxShadowOnHover ? `box-shadow: ${BOX_SHADOW}` : '')}; + } +`; + Container.defaultProps = { Tag: 'div', }; diff --git a/packages/website/ts/components/ui/input.tsx b/packages/website/ts/components/ui/input.tsx index e5f4f6c70..d21b9fd0e 100644 --- a/packages/website/ts/components/ui/input.tsx +++ b/packages/website/ts/components/ui/input.tsx @@ -8,6 +8,8 @@ export interface InputProps { width?: string; fontSize?: string; fontColor?: string; + border?: string; + padding?: string; placeholderColor?: string; placeholder?: string; backgroundColor?: string; @@ -21,11 +23,13 @@ const PlainInput: React.StatelessComponent<InputProps> = ({ value, className, pl export const Input = styled(PlainInput)` font-size: ${props => props.fontSize}; width: ${props => props.width}; - padding: 0.8em 1.2em; + padding: ${props => props.padding}; border-radius: 3px; + box-sizing: border-box; font-family: 'Roboto Mono'; color: ${props => props.fontColor}; - border: none; + border: ${props => props.border}; + outline: none; background-color: ${props => props.backgroundColor}; &::placeholder { color: ${props => props.placeholderColor}; @@ -38,6 +42,8 @@ Input.defaultProps = { fontColor: colors.darkestGrey, placeholderColor: colors.darkGrey, fontSize: '12px', + border: 'none', + padding: '0.8em 1.2em', }; Input.displayName = 'Input'; diff --git a/packages/website/ts/components/ui/multi_select.tsx b/packages/website/ts/components/ui/multi_select.tsx new file mode 100644 index 000000000..2cf44cae1 --- /dev/null +++ b/packages/website/ts/components/ui/multi_select.tsx @@ -0,0 +1,66 @@ +import { colors } from '@0x/react-shared'; +import * as _ from 'lodash'; +import * as React from 'react'; + +import { Container } from './container'; + +export interface MultiSelectItemConfig { + value: string; + renderItemContent: (isSelected: boolean) => React.ReactNode; + onClick?: () => void; +} + +export interface MultiSelectProps { + selectedValues?: string[]; + items: MultiSelectItemConfig[]; + backgroundColor?: string; + height?: string; +} + +export class MultiSelect extends React.Component<MultiSelectProps> { + public static defaultProps = { + backgroundColor: colors.white, + }; + public render(): React.ReactNode { + const { items, backgroundColor, selectedValues, height } = this.props; + return ( + <Container + backgroundColor={backgroundColor} + borderRadius="4px" + width="100%" + height={height} + overflowY="scroll" + > + {_.map(items, item => ( + <MultiSelectItem + key={item.value} + renderItemContent={item.renderItemContent} + backgroundColor={backgroundColor} + onClick={item.onClick} + isSelected={_.isUndefined(selectedValues) || _.includes(selectedValues, item.value)} + /> + ))} + </Container> + ); + } +} + +export interface MultiSelectItemProps { + renderItemContent: (isSelected: boolean) => React.ReactNode; + isSelected?: boolean; + onClick?: () => void; + backgroundColor?: string; +} + +export const MultiSelectItem: React.StatelessComponent<MultiSelectItemProps> = ({ + renderItemContent, + isSelected, + onClick, + backgroundColor, +}) => ( + <Container cursor="pointer" shouldDarkenOnHover={true} onClick={onClick} backgroundColor={backgroundColor}> + <Container borderBottom={`1px solid ${colors.lightestGrey}`} margin="0px 15px" padding="10px 0px"> + {renderItemContent(isSelected)} + </Container> + </Container> +); diff --git a/packages/website/ts/components/ui/select.tsx b/packages/website/ts/components/ui/select.tsx new file mode 100644 index 000000000..e4fb50f59 --- /dev/null +++ b/packages/website/ts/components/ui/select.tsx @@ -0,0 +1,170 @@ +import { colors } from '@0x/react-shared'; +import * as _ from 'lodash'; +import * as React from 'react'; + +import { zIndex } from 'ts/style/z_index'; + +import { Container } from './container'; +import { Overlay } from './overlay'; +import { Text } from './text'; + +export interface SelectItemConfig { + text: string; + onClick?: () => void; +} + +export interface SelectProps { + value: string; + label?: string; + items: SelectItemConfig[]; + onOpen?: () => void; + border?: string; + fontSize?: string; + iconSize?: number; + textColor?: string; + labelColor?: string; + backgroundColor?: string; +} + +export interface SelectState { + isOpen: boolean; +} + +export class Select extends React.Component<SelectProps, SelectState> { + public static defaultProps = { + items: [] as SelectItemConfig[], + textColor: colors.black, + backgroundColor: colors.white, + fontSize: '16px', + iconSize: 25, + }; + public state: SelectState = { + isOpen: false, + }; + public render(): React.ReactNode { + const { value, label, items, border, textColor, labelColor, backgroundColor, fontSize, iconSize } = this.props; + const { isOpen } = this.state; + const hasItems = !_.isEmpty(items); + const borderRadius = isOpen ? '4px 4px 0px 0px' : '4px'; + return ( + <React.Fragment> + {isOpen && ( + <Overlay + style={{ + zIndex: zIndex.overlay, + backgroundColor: 'rgba(255, 255, 255, 0)', + }} + onClick={this._closeDropdown} + /> + )} + <Container position="relative"> + <Container + cursor={hasItems ? 'pointer' : undefined} + onClick={this._handleDropdownClick} + borderRadius={borderRadius} + hasBoxShadow={isOpen} + border={border} + backgroundColor={backgroundColor} + padding="0.5em 0.8em" + width="100%" + > + <Container className="flex justify-between"> + <Text fontSize={fontSize} fontColor={textColor}> + {value} + </Text> + <Container> + {label && ( + <Text fontSize={fontSize} fontColor={labelColor}> + {label} + </Text> + )} + {hasItems && ( + <Container marginLeft="5px" display="inline-block"> + <i + className="zmdi zmdi-chevron-down" + style={{ fontSize: iconSize, color: colors.darkGrey }} + /> + </Container> + )} + </Container> + </Container> + </Container> + {isOpen && ( + <Container + width="100%" + position="absolute" + onClick={this._closeDropdown} + zIndex={zIndex.aboveOverlay} + hasBoxShadow={true} + > + {_.map(items, (item, index) => ( + <SelectItem + key={item.text} + {...item} + isLast={index === items.length - 1} + backgroundColor={backgroundColor} + textColor={textColor} + border={border} + /> + ))} + </Container> + )} + </Container> + </React.Fragment> + ); + } + private readonly _handleDropdownClick = (): void => { + if (_.isEmpty(this.props.items)) { + return; + } + const isOpen = !this.state.isOpen; + this.setState({ + isOpen, + }); + + if (isOpen && this.props.onOpen) { + this.props.onOpen(); + } + }; + private readonly _closeDropdown = (): void => { + this.setState({ + isOpen: false, + }); + }; +} + +export interface SelectItemProps extends SelectItemConfig { + text: string; + onClick?: () => void; + isLast: boolean; + backgroundColor?: string; + border?: string; + textColor?: string; + fontSize?: string; +} + +export const SelectItem: React.StatelessComponent<SelectItemProps> = ({ + text, + onClick, + isLast, + border, + backgroundColor, + textColor, + fontSize, +}) => ( + <Container + onClick={onClick} + cursor="pointer" + backgroundColor={backgroundColor} + padding="0.8em" + borderTop="0" + border={border} + shouldDarkenOnHover={true} + borderRadius={isLast ? '0px 0px 4px 4px' : undefined} + width="100%" + > + <Text fontSize={fontSize} fontColor={textColor}> + {text} + </Text> + </Container> +); diff --git a/packages/website/ts/pages/documentation/developers_page.tsx b/packages/website/ts/pages/documentation/developers_page.tsx index a84be7bfe..fcca2b6ad 100644 --- a/packages/website/ts/pages/documentation/developers_page.tsx +++ b/packages/website/ts/pages/documentation/developers_page.tsx @@ -2,6 +2,7 @@ import { colors, constants as sharedConstants, utils as sharedUtils } from '@0x/ import * as _ from 'lodash'; import * as React from 'react'; import DocumentTitle from 'react-document-title'; +import { Helmet } from 'react-helmet'; import { DocsLogo } from 'ts/components/documentation/docs_logo'; import { DocsTopBar } from 'ts/components/documentation/docs_top_bar'; import { Container } from 'ts/components/ui/container'; @@ -146,6 +147,9 @@ export class DevelopersPage extends React.Component<DevelopersPageProps, Develop } 50%, ${colors.white} 100%)`} > <DocumentTitle title="0x Docs" /> + <Helmet> + <link rel="stylesheet" href="/css/github-gist.css" /> + </Helmet> <Container className="flex mx-auto" height="100vh"> <Container className="sm-hide xs-hide relative" diff --git a/packages/website/ts/pages/faq/faq.tsx b/packages/website/ts/pages/faq/faq.tsx index 10d91bae8..c4965e61c 100644 --- a/packages/website/ts/pages/faq/faq.tsx +++ b/packages/website/ts/pages/faq/faq.tsx @@ -379,7 +379,7 @@ const sections: FAQSection[] = [ <div> Join our{' '} <a href={constants.URL_ZEROEX_CHAT} target="_blank"> - Rocket.chat + Discord </a>! As an open source project, 0x will rely on a worldwide community of passionate developers to contribute proposals, ideas and code. </div> diff --git a/packages/website/ts/pages/instant/action_link.tsx b/packages/website/ts/pages/instant/action_link.tsx new file mode 100644 index 000000000..c196f03ef --- /dev/null +++ b/packages/website/ts/pages/instant/action_link.tsx @@ -0,0 +1,46 @@ +import * as _ from 'lodash'; +import * as React from 'react'; + +import { Container } from 'ts/components/ui/container'; +import { Text } from 'ts/components/ui/text'; +import { colors } from 'ts/style/colors'; +import { utils } from 'ts/utils/utils'; + +export interface ActionLinkProps { + displayText: string; + linkSrc?: string; + onClick?: () => void; + fontSize?: number; + color?: string; + className?: string; +} + +export class ActionLink extends React.Component<ActionLinkProps> { + public static defaultProps = { + fontSize: 16, + color: colors.white, + }; + public render(): React.ReactNode { + const { displayText, fontSize, color, className } = this.props; + return ( + <Container className={`flex items-center ${className}`} onClick={this._handleClick} cursor="pointer"> + <Container> + <Text fontSize="16px" fontColor={color}> + {displayText} + </Text> + </Container> + <Container paddingTop="1px" paddingLeft="6px"> + <i className="zmdi zmdi-chevron-right bold" style={{ fontSize, color }} /> + </Container> + </Container> + ); + } + + private readonly _handleClick = (event: React.MouseEvent<HTMLElement>) => { + if (!_.isUndefined(this.props.onClick)) { + this.props.onClick(); + } else if (!_.isUndefined(this.props.linkSrc)) { + utils.openUrl(this.props.linkSrc); + } + }; +} diff --git a/packages/website/ts/pages/instant/code_demo.tsx b/packages/website/ts/pages/instant/code_demo.tsx new file mode 100644 index 000000000..a3b5fe847 --- /dev/null +++ b/packages/website/ts/pages/instant/code_demo.tsx @@ -0,0 +1,177 @@ +import * as React from 'react'; +import * as CopyToClipboard from 'react-copy-to-clipboard'; +import SyntaxHighlighter from 'react-syntax-highlighter'; + +import { Button } from 'ts/components/ui/button'; +import { Container } from 'ts/components/ui/container'; +import { colors } from 'ts/style/colors'; +import { styled } from 'ts/style/theme'; +import { zIndex } from 'ts/style/z_index'; + +const CustomPre = styled.pre` + margin: 0px; + line-height: 24px; + overflow: scroll; + width: 600px; + height: 100%; + max-height: 800px; + border-radius: 4px; + code { + background-color: inherit !important; + border-radius: 0px; + font-family: 'Roboto Mono', sans-serif; + border: none; + } + code:first-of-type { + background-color: #2a2a2a !important; + color: #999; + min-height: 98%; + text-align: center; + padding-right: 5px !important; + padding-left: 5px; + margin-right: 15px; + line-height: 25px; + padding-top: 10px; + } + code:last-of-type { + position: relative; + top: 10px; + } +`; + +const customStyle = { + 'hljs-comment': { + color: '#7e7887', + }, + 'hljs-quote': { + color: '#7e7887', + }, + 'hljs-variable': { + color: '#be4678', + }, + 'hljs-template-variable': { + color: '#be4678', + }, + 'hljs-attribute': { + color: '#be4678', + }, + 'hljs-regexp': { + color: '#be4678', + }, + 'hljs-link': { + color: '#be4678', + }, + 'hljs-tag': { + color: '#61f5ff', + }, + 'hljs-name': { + color: '#61f5ff', + }, + 'hljs-selector-id': { + color: '#be4678', + }, + 'hljs-selector-class': { + color: '#be4678', + }, + 'hljs-number': { + color: '#c994ff', + }, + 'hljs-meta': { + color: '#61f5ff', + }, + 'hljs-built_in': { + color: '#aa573c', + }, + 'hljs-builtin-name': { + color: '#aa573c', + }, + 'hljs-literal': { + color: '#aa573c', + }, + 'hljs-type': { + color: '#aa573c', + }, + 'hljs-params': { + color: '#aa573c', + }, + 'hljs-string': { + color: '#bcff88', + }, + 'hljs-symbol': { + color: '#2a9292', + }, + 'hljs-bullet': { + color: '#2a9292', + }, + 'hljs-title': { + color: '#576ddb', + }, + 'hljs-section': { + color: '#576ddb', + }, + 'hljs-keyword': { + color: '#955ae7', + }, + 'hljs-selector-tag': { + color: '#955ae7', + }, + 'hljs-deletion': { + color: '#19171c', + display: 'inline-block', + width: '100%', + backgroundColor: '#be4678', + }, + 'hljs-addition': { + color: '#19171c', + display: 'inline-block', + width: '100%', + backgroundColor: '#2a9292', + }, + hljs: { + display: 'block', + overflowX: 'hidden', + background: colors.instantSecondaryBackground, + color: 'white', + fontSize: '12px', + }, + 'hljs-emphasis': { + fontStyle: 'italic', + }, + 'hljs-strong': { + fontWeight: 'bold', + }, +}; + +export interface CodeDemoProps { + children: string; +} + +export interface CodeDemoState { + didCopyCode: boolean; +} + +export class CodeDemo extends React.Component<CodeDemoProps, CodeDemoState> { + public state: CodeDemoState = { + didCopyCode: false, + }; + public render(): React.ReactNode { + const copyButtonText = this.state.didCopyCode ? 'Copied!' : 'Copy'; + return ( + <Container position="relative" height="100%"> + <Container position="absolute" top="10px" right="10px" zIndex={zIndex.overlay - 1}> + <CopyToClipboard text={this.props.children} onCopy={this._handleCopyClick}> + <Button fontSize="14px"> + <b>{copyButtonText}</b> + </Button> + </CopyToClipboard> + </Container> + <SyntaxHighlighter language="html" style={customStyle} showLineNumbers={true} PreTag={CustomPre}> + {this.props.children} + </SyntaxHighlighter> + </Container> + ); + } + private readonly _handleCopyClick = () => { + this.setState({ didCopyCode: true }); + }; +} diff --git a/packages/website/ts/pages/instant/config_generator.tsx b/packages/website/ts/pages/instant/config_generator.tsx new file mode 100644 index 000000000..fbeeeaeaf --- /dev/null +++ b/packages/website/ts/pages/instant/config_generator.tsx @@ -0,0 +1,311 @@ +import { StandardRelayerAPIOrderProvider } from '@0x/asset-buyer'; +import { getContractAddressesForNetworkOrThrow } from '@0x/contract-addresses'; +import { assetDataUtils } from '@0x/order-utils'; +import { ObjectMap } from '@0x/types'; +import * as _ from 'lodash'; +import * as React from 'react'; + +import { CheckMark } from 'ts/components/ui/check_mark'; +import { Container } from 'ts/components/ui/container'; +import { MultiSelect } from 'ts/components/ui/multi_select'; +import { Select, SelectItemConfig } from 'ts/components/ui/select'; +import { Spinner } from 'ts/components/ui/spinner'; +import { Text } from 'ts/components/ui/text'; +import { ConfigGeneratorAddressInput } from 'ts/pages/instant/config_generator_address_input'; +import { FeePercentageSlider } from 'ts/pages/instant/fee_percentage_slider'; +import { colors } from 'ts/style/colors'; +import { WebsitePaths } from 'ts/types'; +import { constants } from 'ts/utils/constants'; + +import { assetMetaDataMap } from '../../../../instant/src/data/asset_meta_data_map'; +import { ERC20AssetMetaData, ZeroExInstantBaseConfig } from '../../../../instant/src/types'; + +export interface ConfigGeneratorProps { + value: ZeroExInstantBaseConfig; + onConfigChange: (config: ZeroExInstantBaseConfig) => void; +} + +export interface ConfigGeneratorState { + isLoadingAvailableTokens: boolean; + // Address to token info + availableTokens?: ObjectMap<ERC20AssetMetaData>; +} + +const SRA_ENDPOINTS = ['https://api.radarrelay.com/0x/v2/', 'https://sra.bamboorelay.com/0x/v2/']; + +export class ConfigGenerator extends React.Component<ConfigGeneratorProps, ConfigGeneratorState> { + public state: ConfigGeneratorState = { + isLoadingAvailableTokens: true, + }; + public componentDidMount(): void { + // tslint:disable-next-line:no-floating-promises + this._setAvailableAssetsFromOrderProvider(); + } + public componentDidUpdate(prevProps: ConfigGeneratorProps): void { + if (prevProps.value.orderSource !== this.props.value.orderSource) { + // tslint:disable-next-line:no-floating-promises + this._setAvailableAssetsFromOrderProvider(); + const newConfig: ZeroExInstantBaseConfig = { + ...this.props.value, + availableAssetDatas: undefined, + }; + this.props.onConfigChange(newConfig); + } + } + public render(): React.ReactNode { + const { value } = this.props; + if (!_.isString(value.orderSource)) { + throw new Error('ConfigGenerator component only supports string values as an orderSource.'); + } + return ( + <Container minWidth="350px"> + <ConfigGeneratorSection title="Standard relayer API endpoint"> + <Select value={value.orderSource} items={this._generateItems()} /> + </ConfigGeneratorSection> + <ConfigGeneratorSection {...this._getTokenSelectorProps()}> + {this._renderTokenMultiSelectOrSpinner()} + </ConfigGeneratorSection> + <ConfigGeneratorSection title="Transaction fee ETH address" marginBottom="10px" isOptional={true}> + <ConfigGeneratorAddressInput + value={value.affiliateInfo ? value.affiliateInfo.feeRecipient : ''} + onChange={this._handleAffiliateAddressChange} + /> + </ConfigGeneratorSection> + <ConfigGeneratorSection + title="Fee percentage" + actionText="Learn more" + onActionTextClick={this._handleAffiliatePercentageLearnMoreClick} + > + <FeePercentageSlider + value={value.affiliateInfo.feePercentage} + onChange={this._handleAffiliatePercentageChange} + isDisabled={ + _.isUndefined(value.affiliateInfo) || + _.isUndefined(value.affiliateInfo.feeRecipient) || + _.isEmpty(value.affiliateInfo.feeRecipient) + } + /> + </ConfigGeneratorSection> + </Container> + ); + } + private readonly _getTokenSelectorProps = (): ConfigGeneratorSectionProps => { + if (_.isEmpty(this.state.availableTokens)) { + return { + title: 'What tokens can users buy?', + }; + } + if (_.isUndefined(this.props.value.availableAssetDatas)) { + return { + title: 'What tokens can users buy?', + actionText: 'Unselect All', + onActionTextClick: this._handleUnselectAllClick, + }; + } + return { + title: 'What tokens can users buy?', + actionText: 'Select All', + onActionTextClick: this._handleSelectAllClick, + }; + }; + private readonly _generateItems = (): SelectItemConfig[] => { + return _.map(SRA_ENDPOINTS, endpoint => ({ + text: endpoint, + onClick: this._handleSRASelection.bind(this, endpoint), + })); + }; + private readonly _handleAffiliatePercentageLearnMoreClick = (): void => { + window.open(`${WebsitePaths.Wiki}#Learn-About-Affiliate-Fees`, '_blank'); + }; + private readonly _handleSRASelection = (sraEndpoint: string) => { + const newConfig: ZeroExInstantBaseConfig = { + ...this.props.value, + orderSource: sraEndpoint, + }; + this.props.onConfigChange(newConfig); + }; + private readonly _handleAffiliateAddressChange = (address: string, isValid: boolean) => { + const oldConfig: ZeroExInstantBaseConfig = this.props.value; + const newConfig: ZeroExInstantBaseConfig = { + ...oldConfig, + affiliateInfo: { + feeRecipient: address, + feePercentage: oldConfig.affiliateInfo.feePercentage, + }, + }; + this.props.onConfigChange(newConfig); + }; + private readonly _handleAffiliatePercentageChange = (value: number) => { + const oldConfig: ZeroExInstantBaseConfig = this.props.value; + const newConfig: ZeroExInstantBaseConfig = { + ...oldConfig, + affiliateInfo: { + feeRecipient: oldConfig.affiliateInfo.feeRecipient, + feePercentage: value, + }, + }; + this.props.onConfigChange(newConfig); + }; + private readonly _handleSelectAllClick = () => { + const newConfig: ZeroExInstantBaseConfig = { + ...this.props.value, + availableAssetDatas: undefined, + }; + this.props.onConfigChange(newConfig); + }; + private readonly _handleUnselectAllClick = () => { + const newConfig: ZeroExInstantBaseConfig = { + ...this.props.value, + availableAssetDatas: [], + }; + this.props.onConfigChange(newConfig); + }; + private readonly _handleTokenClick = (assetData: string) => { + const { value } = this.props; + let newAvailableAssetDatas: string[] = []; + const allKnownAssetDatas = _.keys(this.state.availableTokens); + const availableAssetDatas = value.availableAssetDatas; + if (_.isUndefined(availableAssetDatas)) { + // It being undefined means it's all tokens. + newAvailableAssetDatas = _.pull(allKnownAssetDatas, assetData); + } else if (!_.includes(availableAssetDatas, assetData)) { + // Add it + newAvailableAssetDatas = [...availableAssetDatas, assetData]; + if (newAvailableAssetDatas.length === allKnownAssetDatas.length) { + // If all tokens are manually selected, just show none. + newAvailableAssetDatas = undefined; + } + } else { + // Remove it + newAvailableAssetDatas = _.pull(availableAssetDatas, assetData); + } + const newConfig: ZeroExInstantBaseConfig = { + ...this.props.value, + availableAssetDatas: newAvailableAssetDatas, + }; + this.props.onConfigChange(newConfig); + }; + private readonly _setAvailableAssetsFromOrderProvider = async (): Promise<void> => { + const { value } = this.props; + if (!_.isUndefined(value.orderSource) && _.isString(value.orderSource)) { + this.setState({ isLoadingAvailableTokens: true }); + const networkId = constants.NETWORK_ID_MAINNET; + const sraOrderProvider = new StandardRelayerAPIOrderProvider(value.orderSource, networkId); + const etherTokenAddress = getContractAddressesForNetworkOrThrow(networkId).etherToken; + const etherTokenAssetData = assetDataUtils.encodeERC20AssetData(etherTokenAddress); + const assetDatas = await sraOrderProvider.getAvailableMakerAssetDatasAsync(etherTokenAssetData); + const availableTokens = _.reduce( + assetDatas, + (acc, assetData) => { + const metaDataIfExists = assetMetaDataMap[assetData] as ERC20AssetMetaData; + if (metaDataIfExists) { + acc[assetData] = metaDataIfExists; + } + return acc; + }, + {} as ObjectMap<ERC20AssetMetaData>, + ); + this.setState({ availableTokens, isLoadingAvailableTokens: false }); + } + }; + private readonly _renderTokenMultiSelectOrSpinner = (): React.ReactNode => { + const { value } = this.props; + const { availableTokens, isLoadingAvailableTokens } = this.state; + const multiSelectHeight = '200px'; + if (isLoadingAvailableTokens) { + return ( + <Container + className="flex flex-column items-center justify-center" + height={multiSelectHeight} + backgroundColor={colors.white} + borderRadius="4px" + width="100%" + > + <Container position="relative" left="12px" marginBottom="20px"> + <Spinner /> + </Container> + <Text fontSize="16px">Loading...</Text> + </Container> + ); + } + const availableAssetDatas = _.keys(availableTokens); + if (availableAssetDatas.length === 0) { + return ( + <Container + className="flex flex-column items-center justify-center" + height={multiSelectHeight} + backgroundColor={colors.white} + borderRadius="4px" + width="100%" + > + <Text fontSize="16px">No tokens available. Try another endpoint?</Text> + </Container> + ); + } + const items = _.map(_.keys(availableTokens), assetData => { + const metaData = availableTokens[assetData]; + return { + value: assetData, + renderItemContent: (isSelected: boolean) => ( + <Container className="flex items-center"> + <Container marginRight="10px"> + <CheckMark isChecked={isSelected} /> + </Container> + <Text + fontSize="16px" + fontColor={isSelected ? colors.mediumBlue : colors.darkerGrey} + fontWeight={300} + > + <b>{metaData.symbol.toUpperCase()}</b> — {metaData.name} + </Text> + </Container> + ), + onClick: this._handleTokenClick.bind(this, assetData), + }; + }); + return <MultiSelect items={items} selectedValues={value.availableAssetDatas} height={multiSelectHeight} />; + }; +} + +export interface ConfigGeneratorSectionProps { + title: string; + actionText?: string; + onActionTextClick?: () => void; + isOptional?: boolean; + marginBottom?: string; +} + +export const ConfigGeneratorSection: React.StatelessComponent<ConfigGeneratorSectionProps> = ({ + title, + actionText, + onActionTextClick, + isOptional, + marginBottom, + children, +}) => ( + <Container marginBottom={marginBottom}> + <Container marginBottom="10px" className="flex justify-between items-center"> + <Container> + <Text fontColor={colors.white} fontSize="16px" lineHeight="18px" display="inline"> + {title} + </Text> + {isOptional && ( + <Text fontColor={colors.grey} fontSize="16px" lineHeight="18px" display="inline"> + {' '} + (optional) + </Text> + )} + </Container> + {actionText && ( + <Text fontSize="12px" fontColor={colors.grey} onClick={onActionTextClick}> + {actionText} + </Text> + )} + </Container> + {children} + </Container> +); + +ConfigGeneratorSection.defaultProps = { + marginBottom: '30px', +}; diff --git a/packages/website/ts/pages/instant/config_generator_address_input.tsx b/packages/website/ts/pages/instant/config_generator_address_input.tsx new file mode 100644 index 000000000..ccbaf4482 --- /dev/null +++ b/packages/website/ts/pages/instant/config_generator_address_input.tsx @@ -0,0 +1,59 @@ +import { colors } from '@0x/react-shared'; +import { addressUtils } from '@0x/utils'; +import * as _ from 'lodash'; +import * as React from 'react'; + +import { Container } from 'ts/components/ui/container'; +import { Input } from 'ts/components/ui/input'; +import { Text } from 'ts/components/ui/text'; + +export interface ConfigGeneratorAddressInputProps { + value?: string; + onChange?: (address: string, isValid: boolean) => void; +} + +export interface ConfigGeneratorAddressInputState { + errMsg: string; +} + +export class ConfigGeneratorAddressInput extends React.Component< + ConfigGeneratorAddressInputProps, + ConfigGeneratorAddressInputState +> { + public state = { + errMsg: '', + }; + public render(): React.ReactNode { + const { errMsg } = this.state; + const hasError = !_.isEmpty(errMsg); + const border = hasError ? '1px solid red' : undefined; + return ( + <Container height="80px"> + <Input + width="100%" + fontSize="16px" + padding="0.7em 1em" + value={this.props.value} + onChange={this._handleChange} + placeholder="0xe99...aa8da4" + border={border} + /> + <Container marginTop="5px" isHidden={!hasError} height="25px"> + <Text fontSize="14px" fontColor={colors.grey} fontStyle="italic"> + {errMsg} + </Text> + </Container> + </Container> + ); + } + + private readonly _handleChange = (event: React.ChangeEvent<HTMLInputElement>): void => { + const address = event.target.value; + const isValidAddress = addressUtils.isAddress(address.toLowerCase()) || address === ''; + const errMsg = isValidAddress ? '' : 'Please enter a valid Ethereum address'; + this.setState({ + errMsg, + }); + this.props.onChange(address, isValidAddress); + }; +} diff --git a/packages/website/ts/pages/instant/configurator.tsx b/packages/website/ts/pages/instant/configurator.tsx index c836739bb..2cb1a1c1c 100644 --- a/packages/website/ts/pages/instant/configurator.tsx +++ b/packages/website/ts/pages/instant/configurator.tsx @@ -1,12 +1,110 @@ +import * as _ from 'lodash'; import * as React from 'react'; import { Container } from 'ts/components/ui/container'; +import { Text } from 'ts/components/ui/text'; +import { ActionLink } from 'ts/pages/instant/action_link'; +import { CodeDemo } from 'ts/pages/instant/code_demo'; +import { ConfigGenerator } from 'ts/pages/instant/config_generator'; import { colors } from 'ts/style/colors'; +import { WebsitePaths } from 'ts/types'; + +import { ZeroExInstantBaseConfig } from '../../../../instant/src/types'; export interface ConfiguratorProps { hash: string; } -export const Configurator = (props: ConfiguratorProps) => ( - <Container id={props.hash} height="400px" backgroundColor={colors.instantTertiaryBackground} /> -); +export interface ConfiguratorState { + instantConfig: ZeroExInstantBaseConfig; +} + +export class Configurator extends React.Component<ConfiguratorProps> { + public state: ConfiguratorState = { + instantConfig: { + orderSource: 'https://api.radarrelay.com/0x/v2/', + availableAssetDatas: undefined, + affiliateInfo: { + feeRecipient: '', + feePercentage: 0, + }, + }, + }; + public render(): React.ReactNode { + const { hash } = this.props; + const codeToDisplay = this._generateCodeDemoCode(); + return ( + <Container + className="flex justify-center py4 px3" + id={hash} + backgroundColor={colors.instantTertiaryBackground} + > + <Container className="mx3"> + <Container className="mb3"> + <Text fontSize="20px" lineHeight="28px" fontColor={colors.white} fontWeight={500}> + 0x Instant Configurator + </Text> + </Container> + <ConfigGenerator value={this.state.instantConfig} onConfigChange={this._handleConfigChange} /> + </Container> + <Container className="mx3" height="550px"> + <Container className="mb3 flex justify-between"> + <Text fontSize="20px" lineHeight="28px" fontColor={colors.white} fontWeight={500}> + Code Snippet + </Text> + <ActionLink + displayText="Explore the Docs" + linkSrc={`${WebsitePaths.Wiki}#Get-Started-With-Instant`} + color={colors.grey} + /> + </Container> + <CodeDemo key={codeToDisplay}>{codeToDisplay}</CodeDemo> + </Container> + </Container> + ); + } + private readonly _handleConfigChange = (config: ZeroExInstantBaseConfig) => { + this.setState({ + instantConfig: config, + }); + }; + private readonly _generateCodeDemoCode = (): string => { + const { instantConfig } = this.state; + return `<!DOCTYPE html> +<html> + <head> + <meta charset="utf-8" /> + <script src="https://instant.0xproject.com/instant.js"></script> + </head> + <body> + <script> + zeroExInstant.render({ + orderSource: '${instantConfig.orderSource}',${ + !_.isUndefined(instantConfig.affiliateInfo) && instantConfig.affiliateInfo.feeRecipient + ? `\n affiliateInfo: { + feeRecipient: '${instantConfig.affiliateInfo.feeRecipient.toLowerCase()}', + feePercentage: ${instantConfig.affiliateInfo.feePercentage} + },` + : '' + }${ + !_.isUndefined(instantConfig.availableAssetDatas) + ? `\n availableAssetDatas: ${this._renderAvailableAssetDatasString( + instantConfig.availableAssetDatas, + )}` + : '' + } + }, 'body'); + </script> + </body> +</html>`; + }; + private readonly _renderAvailableAssetDatasString = (availableAssetDatas: string[]): string => { + const stringAvailableAssetDatas = availableAssetDatas.map(assetData => `'${assetData}'`); + if (availableAssetDatas.length < 2) { + return `[${stringAvailableAssetDatas.join(', ')}]`; + } + return `[\n ${stringAvailableAssetDatas.join( + ', \n ', + )}\n ]`; + }; +} diff --git a/packages/website/ts/pages/instant/features.tsx b/packages/website/ts/pages/instant/features.tsx index 9c1668c1c..ed98ceb53 100644 --- a/packages/website/ts/pages/instant/features.tsx +++ b/packages/website/ts/pages/instant/features.tsx @@ -4,9 +4,9 @@ import * as React from 'react'; import { Container } from 'ts/components/ui/container'; import { Image } from 'ts/components/ui/image'; import { Text } from 'ts/components/ui/text'; +import { ActionLink, ActionLinkProps } from 'ts/pages/instant/action_link'; import { colors } from 'ts/style/colors'; -import { ScreenWidths } from 'ts/types'; -import { utils } from 'ts/utils/utils'; +import { ScreenWidths, WebsitePaths } from 'ts/types'; export interface FeatureProps { screenWidth: ScreenWidths; @@ -21,7 +21,7 @@ export const Features = (props: FeatureProps) => { }; const exploreTheDocsLinkInfo = { displayText: 'Explore the docs', - linkSrc: `${utils.getCurrentBaseUrl()}/wiki#Get-Started`, + linkSrc: `${WebsitePaths.Wiki}#Get-Started-With-Instant`, }; const tokenLinkInfos = isSmallScreen ? [getStartedLinkInfo] : [getStartedLinkInfo, exploreTheDocsLinkInfo]; return ( @@ -40,7 +40,7 @@ export const Features = (props: FeatureProps) => { linkInfos={[ { displayText: 'Learn about affiliate fees', - linkSrc: `${utils.getCurrentBaseUrl()}/wiki#Learn-About-Affiliate-Fees`, + linkSrc: `${WebsitePaths.Wiki}#Learn-About-Affiliate-Fees`, }, ]} screenWidth={props.screenWidth} @@ -52,7 +52,7 @@ export const Features = (props: FeatureProps) => { linkInfos={[ { displayText: 'Explore AssetBuyer', - linkSrc: `${utils.getCurrentBaseUrl()}/docs/asset-buyer`, + linkSrc: `${WebsitePaths.Docs}/asset-buyer`, }, ]} screenWidth={props.screenWidth} @@ -61,17 +61,11 @@ export const Features = (props: FeatureProps) => { ); }; -interface LinkInfo { - displayText: string; - linkSrc?: string; - onClick?: () => void; -} - interface FeatureItemProps { imgSrc: string; title: string; description: string; - linkInfos: LinkInfo[]; + linkInfos: ActionLinkProps[]; screenWidth: ScreenWidths; } @@ -95,36 +89,11 @@ const FeatureItem = (props: FeatureItemProps) => { </Text> </Container> <Container className="flex" marginTop="28px"> - {_.map(linkInfos, linkInfo => { - const onClick = (event: React.MouseEvent<HTMLElement>) => { - if (!_.isUndefined(linkInfo.onClick)) { - linkInfo.onClick(); - } else if (!_.isUndefined(linkInfo.linkSrc)) { - utils.openUrl(linkInfo.linkSrc); - } - }; - return ( - <Container - key={linkInfo.linkSrc} - className="flex items-center" - marginRight="32px" - onClick={onClick} - cursor="pointer" - > - <Container> - <Text fontSize="16px" fontColor={colors.white}> - {linkInfo.displayText} - </Text> - </Container> - <Container paddingTop="1px" paddingLeft="6px"> - <i - className="zmdi zmdi-chevron-right bold" - style={{ fontSize: 16, color: colors.white }} - /> - </Container> - </Container> - ); - })} + {_.map(linkInfos, linkInfo => ( + <Container key={linkInfo.displayText} marginRight="32px"> + <ActionLink {...linkInfo} /> + </Container> + ))} </Container> </Container> ); diff --git a/packages/website/ts/pages/instant/fee_percentage_slider.tsx b/packages/website/ts/pages/instant/fee_percentage_slider.tsx new file mode 100644 index 000000000..d76cee58f --- /dev/null +++ b/packages/website/ts/pages/instant/fee_percentage_slider.tsx @@ -0,0 +1,77 @@ +import Slider from 'rc-slider'; +import 'rc-slider/assets/index.css'; +import * as React from 'react'; + +import { Text } from 'ts/components/ui/text'; +import { colors } from 'ts/style/colors'; +import { injectGlobal } from 'ts/style/theme'; + +const SliderWithTooltip = (Slider as any).createSliderWithTooltip(Slider); +// tslint:disable-next-line:no-unused-expression +injectGlobal` + .rc-slider-tooltip-inner { + box-shadow: none !important; + background-color: ${colors.white} !important; + border-radius: 4px !important; + padding: 3px 12px !important; + height: auto !important; + position: relative; + top: 7px; + &: after { + border: solid transparent; + content: " "; + height: 0; + width: 0; + position: absolute; + pointer-events: none; + border-width: 6px; + bottom: 100%; + left: 100%; + border-bottom-color: ${colors.white}; + margin-left: -60%; + } + } + .rc-slider-disabled { + background-color: inherit !important; + } +`; + +export interface FeePercentageSliderProps { + value: number; + isDisabled: boolean; + onChange: (value: number) => void; +} + +export class FeePercentageSlider extends React.Component<FeePercentageSliderProps> { + public render(): React.ReactNode { + return ( + <SliderWithTooltip + disabled={this.props.isDisabled} + min={0} + max={0.05} + step={0.0025} + value={this.props.value} + onChange={this.props.onChange} + tipFormatter={this._feePercentageSliderFormatter} + tipProps={{ placement: 'bottom' }} + trackStyle={{ + backgroundColor: '#b4b4b4', + }} + railStyle={{ + backgroundColor: '#696969', + }} + handleStyle={{ + border: 'none', + boxShadow: 'none', + }} + activeDotStyle={{ + boxShadow: 'none', + border: 'none', + }} + /> + ); + } + private readonly _feePercentageSliderFormatter = (value: number): React.ReactNode => { + return <Text fontColor={colors.black} fontSize="14px" fontWeight={700}>{`${(value * 100).toFixed(2)}%`}</Text>; + }; +} diff --git a/packages/website/ts/pages/instant/instant.tsx b/packages/website/ts/pages/instant/instant.tsx index fa6bd1c33..d72585bfa 100644 --- a/packages/website/ts/pages/instant/instant.tsx +++ b/packages/website/ts/pages/instant/instant.tsx @@ -14,7 +14,7 @@ import { NeedMore } from 'ts/pages/instant/need_more'; import { Screenshots } from 'ts/pages/instant/screenshots'; import { Dispatcher } from 'ts/redux/dispatcher'; import { colors } from 'ts/style/colors'; -import { ScreenWidths } from 'ts/types'; +import { ScreenWidths, WebsitePaths } from 'ts/types'; import { Translate } from 'ts/utils/translate'; import { utils } from 'ts/utils/utils'; @@ -67,7 +67,7 @@ export class Instant extends React.Component<InstantProps, InstantState> { } private readonly _onGetStartedClick = () => { if (this._isSmallScreen()) { - utils.openUrl(`${utils.getCurrentBaseUrl()}/wiki#Get-Started`); + utils.openUrl(`${WebsitePaths.Wiki}#Get-Started-With-Instant`); } else { this._scrollToConfigurator(); } diff --git a/packages/website/ts/pages/instant/need_more.tsx b/packages/website/ts/pages/instant/need_more.tsx index e6d5c3694..70aea7363 100644 --- a/packages/website/ts/pages/instant/need_more.tsx +++ b/packages/website/ts/pages/instant/need_more.tsx @@ -4,7 +4,7 @@ import { Button } from 'ts/components/ui/button'; import { Container } from 'ts/components/ui/container'; import { Text } from 'ts/components/ui/text'; import { colors } from 'ts/style/colors'; -import { ScreenWidths } from 'ts/types'; +import { ScreenWidths, WebsitePaths } from 'ts/types'; import { constants } from 'ts/utils/constants'; import { utils } from 'ts/utils/utils'; @@ -58,5 +58,5 @@ const onGetInTouchClick = () => { utils.openUrl(constants.URL_ZEROEX_CHAT); }; const onDocsClick = () => { - utils.openUrl(`${utils.getCurrentBaseUrl()}/wiki#Get-Started`); + utils.openUrl(`${WebsitePaths.Wiki}#Get-Started-With-Instant`); }; diff --git a/packages/website/ts/pages/landing/landing.tsx b/packages/website/ts/pages/landing/landing.tsx index bb76efe21..b75b55edb 100644 --- a/packages/website/ts/pages/landing/landing.tsx +++ b/packages/website/ts/pages/landing/landing.tsx @@ -36,8 +36,8 @@ interface Project { } const THROTTLE_TIMEOUT = 100; -const WHATS_NEW_TITLE = 'Introducing the 0x Launch Kit'; -const WHATS_NEW_URL = 'https://blog.0xproject.com/introducing-the-0x-launch-kit-4acdc3453585'; +const WHATS_NEW_TITLE = 'Introducing 0x Instant'; +const WHATS_NEW_URL = WebsitePaths.Instant; const TITLE_STYLE: React.CSSProperties = { fontFamily: 'Roboto Mono', color: colors.grey, @@ -237,7 +237,7 @@ export class Landing extends React.Component<LandingProps, LandingState> { private _renderWhatsNew(): React.ReactNode { return ( <div className="sm-center sm-px1"> - <a href={WHATS_NEW_URL} target="_blank" className="inline-block text-decoration-none"> + <a href={WHATS_NEW_URL} className="inline-block text-decoration-none"> <div className="flex items-center sm-pl0 md-pl2 lg-pl0"> <Container paddingTop="3px" diff --git a/packages/website/ts/types.ts b/packages/website/ts/types.ts index 9c4b8a018..b20dd7095 100644 --- a/packages/website/ts/types.ts +++ b/packages/website/ts/types.ts @@ -463,7 +463,7 @@ export enum Key { Website = 'WEBSITE', Developers = 'DEVELOPERS', Home = 'HOME', - RocketChat = 'ROCKETCHAT', + Discord = 'DISCORD', TradeCallToAction = 'TRADE_CALL_TO_ACTION', OurMissionAndValues = 'OUR_MISSION_AND_VALUES', BuildARelayer = 'BUILD_A_RELAYER', @@ -496,6 +496,7 @@ export enum Key { GetInTouch = 'GET_IN_TOUCH', LearnMore = 'LEARN_MORE', GetStarted = 'GET_STARTED', + ProtocolSpecification = 'PROTOCOL_SPECIFICATION', } export enum SmartContractDocSections { diff --git a/packages/website/ts/utils/constants.ts b/packages/website/ts/utils/constants.ts index e9afc8763..715199515 100644 --- a/packages/website/ts/utils/constants.ts +++ b/packages/website/ts/utils/constants.ts @@ -3,7 +3,7 @@ import { BigNumber } from '@0x/utils'; import { Key, WebsitePaths } from 'ts/types'; const URL_FORUM = 'https://forum.0xproject.com'; -const URL_ZEROEX_CHAT = 'https://chat.0xproject.com'; +const URL_ZEROEX_CHAT = 'https://discord.gg/d3FTX3M'; export const constants = { DECIMAL_PLACES_ETH: 18, @@ -81,6 +81,8 @@ export const constants = { URL_GITHUB_ORG: 'https://github.com/0xProject', URL_GITHUB_WIKI: 'https://github.com/0xProject/wiki', URL_FORUM, + URL_PROTOCOL_SPECIFICATION: + 'https://github.com/0xProject/0x-protocol-specification/blob/master/v2/v2-specification.md', URL_METAMASK_CHROME_STORE: 'https://chrome.google.com/webstore/detail/metamask/nkbihfbeogaeaoehlefnkodbefgpgknn', URL_METAMASK_FIREFOX_STORE: 'https://addons.mozilla.org/en-US/firefox/addon/ether-metamask/', URL_COINBASE_WALLET_IOS_APP_STORE: 'https://itunes.apple.com/us/app/coinbase-wallet/id1278383455?mt=8', |