aboutsummaryrefslogtreecommitdiffstats
path: root/packages/pipeline
diff options
context:
space:
mode:
authorAlex Browne <stephenalexbrowne@gmail.com>2018-11-29 05:21:04 +0800
committerAlex Browne <stephenalexbrowne@gmail.com>2018-12-05 06:25:38 +0800
commit3d211c415b58a67f84332ff512bf9372cac5a3ac (patch)
tree087909fde4c6f2c8f99d8487a1f0ce45bf91591f /packages/pipeline
parent4061731245a8513e8d990f3af87e182fb674838b (diff)
downloaddexon-sol-tools-3d211c415b58a67f84332ff512bf9372cac5a3ac.tar
dexon-sol-tools-3d211c415b58a67f84332ff512bf9372cac5a3ac.tar.gz
dexon-sol-tools-3d211c415b58a67f84332ff512bf9372cac5a3ac.tar.bz2
dexon-sol-tools-3d211c415b58a67f84332ff512bf9372cac5a3ac.tar.lz
dexon-sol-tools-3d211c415b58a67f84332ff512bf9372cac5a3ac.tar.xz
dexon-sol-tools-3d211c415b58a67f84332ff512bf9372cac5a3ac.tar.zst
dexon-sol-tools-3d211c415b58a67f84332ff512bf9372cac5a3ac.zip
Introduce framework for running basic tests for entities (#1344)
* Introduce framework for running basic tests for entities * Add pipeline tests to CircleCI config * Make pipeline tests more configurable and fix CircleCI config * Add coverage dir to pipeline package * Add basic tests for all exchange event entities * Add tests for remaining entities * Create separate test scripts in package.json and add new info to README * Update db_setup.ts to revert migrations even if you are using docker * Automatically pull the postgres image if needed * Add comment about why NumberToBigIntTransformer is needed
Diffstat (limited to 'packages/pipeline')
-rw-r--r--packages/pipeline/README.md59
-rw-r--r--packages/pipeline/coverage/.gitkeep0
-rw-r--r--packages/pipeline/package.json11
-rw-r--r--packages/pipeline/src/entities/block.ts7
-rw-r--r--packages/pipeline/src/entities/exchange_cancel_event.ts3
-rw-r--r--packages/pipeline/src/entities/exchange_cancel_up_to_event.ts4
-rw-r--r--packages/pipeline/src/entities/exchange_fill_event.ts4
-rw-r--r--packages/pipeline/src/entities/sra_order_observed_timestamp.ts4
-rw-r--r--packages/pipeline/src/entities/token_metadata.ts8
-rw-r--r--packages/pipeline/src/entities/transaction.ts8
-rw-r--r--packages/pipeline/src/utils/index.ts16
-rw-r--r--packages/pipeline/src/utils/transformers/big_number.ts16
-rw-r--r--packages/pipeline/src/utils/transformers/index.ts2
-rw-r--r--packages/pipeline/src/utils/transformers/number_to_bigint.ts27
-rw-r--r--packages/pipeline/test/db_global_hooks.ts9
-rw-r--r--packages/pipeline/test/db_setup.ts174
-rw-r--r--packages/pipeline/test/entities/block_test.ts23
-rw-r--r--packages/pipeline/test/entities/exchange_cancel_event_test.ts57
-rw-r--r--packages/pipeline/test/entities/exchange_cancel_up_to_event_test.ts29
-rw-r--r--packages/pipeline/test/entities/exchange_fill_event_test.ts62
-rw-r--r--packages/pipeline/test/entities/relayer_test.ts55
-rw-r--r--packages/pipeline/test/entities/sra_order_test.ts84
-rw-r--r--packages/pipeline/test/entities/token_metadata_test.ts38
-rw-r--r--packages/pipeline/test/entities/transaction_test.ts25
-rw-r--r--packages/pipeline/test/entities/util.ts25
25 files changed, 698 insertions, 52 deletions
diff --git a/packages/pipeline/README.md b/packages/pipeline/README.md
index c647950a2..fb563b14c 100644
--- a/packages/pipeline/README.md
+++ b/packages/pipeline/README.md
@@ -38,17 +38,34 @@ Create a new migration: `yarn migrate:create --name MigrationNameInCamelCase`
Run migrations: `yarn migrate:run`
Revert the most recent migration (CAUTION: may result in data loss!): `yarn migrate:revert`
-## Connecting to PostgreSQL
+## Testing
-Across the pipeline package, any code which accesses the database uses the
-environment variable `ZEROEX_DATA_PIPELINE_DB_URL` which should be a properly
-formatted
-[PostgreSQL connection url](https://stackoverflow.com/questions/3582552/postgresql-connection-url).
+There are several test scripts in **package.json**. You can run all the tests
+with `yarn test:all` or run certain tests seprately by following the
+instructions below. Some tests may not work out of the box on certain platforms.
-## Test environment
+### Unit tests
-The easiest way to start Postgres is via Docker. Depending on your
-platform, you may need to prepend `sudo` to the following command:
+The unit tests can be run with `yarn test`. These tests don't depend on any
+services or databases and will run in any environment that can run Node.
+
+### Database tests
+
+Database integration tests can be run with `yarn test:db`. These tests will
+attempt to automatically spin up a Postgres database via Docker. If this doesn't
+work you have two other options:
+
+1. Set the `DOCKER_SOCKET` environment variable to a valid socket path to use
+ for communicating with Docker.
+2. Start Postgres manually and set the `ZEROEX_DATA_PIPELINE_TEST_DB_URL`
+ environment variable. If this is set, the tests will use your existing
+ Postgres database instead of trying to create one with Docker.
+
+## Running locally
+
+`pipeline` requires access to a PostgreSQL database. The easiest way to start
+Postgres is via Docker. Depending on your platform, you may need to prepend
+`sudo` to the following command:
```
docker run --rm -d -p 5432:5432 --name pipeline_postgres postgres:11-alpine
@@ -83,9 +100,9 @@ This will remove all data from the database.
If you prefer, you can also install Postgres with e.g.,
[Homebrew](https://wiki.postgresql.org/wiki/Homebrew) or
-[Postgress.app](https://postgresapp.com/). As long as you set the
-`ZEROEX_DATA_PIPELINE_DB_URL` environment variable appropriately, any Postgres
-server will work.
+[Postgress.app](https://postgresapp.com/). Keep in mind that you will need to
+set the`ZEROEX_DATA_PIPELINE_DB_URL` environment variable to a valid
+[PostgreSQL connection url](https://stackoverflow.com/questions/3582552/postgresql-connection-url)
## Directory structure
@@ -111,21 +128,23 @@ server will work.
2. Create a migration using the `yarn migrate:create` command. Create/update
tables as needed. Remember to fill in both the `up` and `down` methods. Try
to avoid data loss as much as possible in your migrations.
-3. Create a class or function in the _data_sources_ directory for getting raw
- data. This code should abstract away pagination and rate-limiting as much as
- possible.
-4. Create a class or function in the _parsers_ directory for converting the raw
- data into an entity. Also add tests in the _tests_ directory to test the
- parser.
-5. Create an executable script in the _scripts_ directory for putting
+3. Add basic tests for your entity and migrations to the **test/entities/**
+ directory.
+4. Create a class or function in the **data_sources/** directory for getting
+ raw data. This code should abstract away pagination and rate-limiting as
+ much as possible.
+5. Create a class or function in the **parsers/** directory for converting the
+ raw data into an entity. Also add tests in the **tests/** directory to test
+ the parser.
+6. Create an executable script in the **scripts/** directory for putting
everything together. Your script can accept environment variables for things
like API keys. It should pull the data, parse it, and save it to the
database. Scripts should be idempotent and atomic (when possible). What this
- means is that your script may be responsible for determining **which** data
+ means is that your script may be responsible for determining _which_ data
needs to be updated. For example, you may need to query the database to find
the most recent block number that we have already pulled, then pull new data
starting from that block number.
-6. Run the migrations and then run your new script locally and verify it works
+7. Run the migrations and then run your new script locally and verify it works
as expected.
#### Additional guidelines and tips:
diff --git a/packages/pipeline/coverage/.gitkeep b/packages/pipeline/coverage/.gitkeep
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/packages/pipeline/coverage/.gitkeep
diff --git a/packages/pipeline/package.json b/packages/pipeline/package.json
index a57fbf5bc..9831517fb 100644
--- a/packages/pipeline/package.json
+++ b/packages/pipeline/package.json
@@ -7,10 +7,13 @@
"build": "yarn tsc -b",
"build:ci": "yarn build",
"test": "yarn run_mocha",
- "rebuild_and_test": "run-s build test",
+ "rebuild_and_test": "run-s build test:all",
+ "test:db": "yarn run_mocha:db",
+ "test:all": "run-s test test:db",
"test:circleci": "yarn test:coverage",
- "run_mocha": "mocha --require source-map-support/register --require make-promises-safe lib/test/**/*_test.js --bail --exit",
- "test:coverage": "nyc npm run test --all && yarn coverage:report:lcov",
+ "run_mocha": "mocha --require source-map-support/register --require make-promises-safe 'lib/test/!(entities)/**/*_test.js' --bail --exit",
+ "run_mocha:db": "mocha --require source-map-support/register --require make-promises-safe lib/test/db_global_hooks.js 'lib/test/entities/*_test.js' --bail --exit --timeout 60000",
+ "test:coverage": "nyc npm run test:all --all && yarn coverage:report:lcov",
"coverage:report:lcov": "nyc report --reporter=text-lcov > coverage/lcov.info",
"clean": "shx rm -rf lib",
"lint": "tslint --project . --format stylish --exclude ./migrations/**/*",
@@ -50,8 +53,10 @@
"@0x/types": "^1.2.0",
"@0x/utils": "^2.0.3",
"@0x/web3-wrapper": "^3.1.0",
+ "@types/dockerode": "^2.5.9",
"async-parallel": "^1.2.3",
"axios": "^0.18.0",
+ "dockerode": "^2.5.7",
"ethereum-types": "^1.0.6",
"pg": "^7.5.0",
"ramda": "^0.25.0",
diff --git a/packages/pipeline/src/entities/block.ts b/packages/pipeline/src/entities/block.ts
index f2efc6390..398946622 100644
--- a/packages/pipeline/src/entities/block.ts
+++ b/packages/pipeline/src/entities/block.ts
@@ -1,10 +1,13 @@
import { Column, Entity, PrimaryColumn } from 'typeorm';
+import { numberToBigIntTransformer } from '../utils';
+
@Entity({ name: 'blocks', schema: 'raw' })
export class Block {
@PrimaryColumn() public hash!: string;
- @PrimaryColumn() public number!: number;
+ @PrimaryColumn({ transformer: numberToBigIntTransformer })
+ public number!: number;
- @Column({ name: 'timestamp' })
+ @Column({ name: 'timestamp', transformer: numberToBigIntTransformer })
public timestamp!: number;
}
diff --git a/packages/pipeline/src/entities/exchange_cancel_event.ts b/packages/pipeline/src/entities/exchange_cancel_event.ts
index 5a40ba799..2fcc17df6 100644
--- a/packages/pipeline/src/entities/exchange_cancel_event.ts
+++ b/packages/pipeline/src/entities/exchange_cancel_event.ts
@@ -1,6 +1,7 @@
import { Column, Entity, PrimaryColumn } from 'typeorm';
import { AssetType } from '../types';
+import { numberToBigIntTransformer } from '../utils';
@Entity({ name: 'exchange_cancel_events', schema: 'raw' })
export class ExchangeCancelEvent {
@@ -8,7 +9,7 @@ export class ExchangeCancelEvent {
public contractAddress!: string;
@PrimaryColumn({ name: 'log_index' })
public logIndex!: number;
- @PrimaryColumn({ name: 'block_number' })
+ @PrimaryColumn({ name: 'block_number', transformer: numberToBigIntTransformer })
public blockNumber!: number;
@Column({ name: 'raw_data' })
diff --git a/packages/pipeline/src/entities/exchange_cancel_up_to_event.ts b/packages/pipeline/src/entities/exchange_cancel_up_to_event.ts
index 9b1c6174a..60ead324f 100644
--- a/packages/pipeline/src/entities/exchange_cancel_up_to_event.ts
+++ b/packages/pipeline/src/entities/exchange_cancel_up_to_event.ts
@@ -1,7 +1,7 @@
import { BigNumber } from '@0x/utils';
import { Column, Entity, PrimaryColumn } from 'typeorm';
-import { bigNumberTransformer } from '../utils';
+import { bigNumberTransformer, numberToBigIntTransformer } from '../utils';
@Entity({ name: 'exchange_cancel_up_to_events', schema: 'raw' })
export class ExchangeCancelUpToEvent {
@@ -9,7 +9,7 @@ export class ExchangeCancelUpToEvent {
public contractAddress!: string;
@PrimaryColumn({ name: 'log_index' })
public logIndex!: number;
- @PrimaryColumn({ name: 'block_number' })
+ @PrimaryColumn({ name: 'block_number', transformer: numberToBigIntTransformer })
public blockNumber!: number;
// TODO(albrow): Include transaction hash
diff --git a/packages/pipeline/src/entities/exchange_fill_event.ts b/packages/pipeline/src/entities/exchange_fill_event.ts
index aa082436b..bbf0abf58 100644
--- a/packages/pipeline/src/entities/exchange_fill_event.ts
+++ b/packages/pipeline/src/entities/exchange_fill_event.ts
@@ -2,7 +2,7 @@ import { BigNumber } from '@0x/utils';
import { Column, Entity, PrimaryColumn } from 'typeorm';
import { AssetType } from '../types';
-import { bigNumberTransformer } from '../utils';
+import { bigNumberTransformer, numberToBigIntTransformer } from '../utils';
@Entity({ name: 'exchange_fill_events', schema: 'raw' })
export class ExchangeFillEvent {
@@ -10,7 +10,7 @@ export class ExchangeFillEvent {
public contractAddress!: string;
@PrimaryColumn({ name: 'log_index' })
public logIndex!: number;
- @PrimaryColumn({ name: 'block_number' })
+ @PrimaryColumn({ name: 'block_number', transformer: numberToBigIntTransformer })
public blockNumber!: number;
@Column({ name: 'raw_data' })
diff --git a/packages/pipeline/src/entities/sra_order_observed_timestamp.ts b/packages/pipeline/src/entities/sra_order_observed_timestamp.ts
index bdb6cd36b..cd2d41397 100644
--- a/packages/pipeline/src/entities/sra_order_observed_timestamp.ts
+++ b/packages/pipeline/src/entities/sra_order_observed_timestamp.ts
@@ -1,5 +1,7 @@
import { Entity, PrimaryColumn } from 'typeorm';
+import { numberToBigIntTransformer } from '../utils';
+
import { SraOrder } from './sra_order';
@Entity({ name: 'sra_orders_observed_timestamps', schema: 'raw' })
@@ -11,7 +13,7 @@ export class SraOrdersObservedTimeStamp {
@PrimaryColumn({ name: 'source_url' })
public sourceUrl!: string;
- @PrimaryColumn({ name: 'observed_timestamp' })
+ @PrimaryColumn({ name: 'observed_timestamp', transformer: numberToBigIntTransformer })
public observedTimestamp!: number;
}
diff --git a/packages/pipeline/src/entities/token_metadata.ts b/packages/pipeline/src/entities/token_metadata.ts
index 4a0bceefe..ca1e57937 100644
--- a/packages/pipeline/src/entities/token_metadata.ts
+++ b/packages/pipeline/src/entities/token_metadata.ts
@@ -8,12 +8,14 @@ export class TokenMetadata {
@PrimaryColumn({ type: 'varchar', nullable: false })
public authority!: string;
+ // TODO(albrow): Convert decimals field to type BigNumber/numeric because it
+ // comes from a 256-bit integer in a smart contract.
@Column({ type: 'integer', nullable: true })
- public decimals!: number;
+ public decimals!: number | null;
@Column({ type: 'varchar', nullable: true })
- public symbol!: string;
+ public symbol!: string | null;
@Column({ type: 'varchar', nullable: true })
- public name!: string;
+ public name!: string | null;
}
diff --git a/packages/pipeline/src/entities/transaction.ts b/packages/pipeline/src/entities/transaction.ts
index dd2143df5..91e4ecb5d 100644
--- a/packages/pipeline/src/entities/transaction.ts
+++ b/packages/pipeline/src/entities/transaction.ts
@@ -1,16 +1,18 @@
import { Column, Entity, PrimaryColumn } from 'typeorm';
+import { numberToBigIntTransformer } from '../utils';
+
@Entity({ name: 'transactions', schema: 'raw' })
export class Transaction {
@PrimaryColumn({ name: 'transaction_hash' })
public transactionHash!: string;
@PrimaryColumn({ name: 'block_hash' })
public blockHash!: string;
- @PrimaryColumn({ name: 'block_number' })
+ @PrimaryColumn({ name: 'block_number', transformer: numberToBigIntTransformer })
public blockNumber!: number;
- @Column({ type: 'bigint', name: 'gas_used' })
+ @Column({ type: 'bigint', name: 'gas_used', transformer: numberToBigIntTransformer })
public gasUsed!: number;
- @Column({ type: 'bigint', name: 'gas_price' })
+ @Column({ type: 'bigint', name: 'gas_price', transformer: numberToBigIntTransformer })
public gasPrice!: number;
}
diff --git a/packages/pipeline/src/utils/index.ts b/packages/pipeline/src/utils/index.ts
index b6d84a565..918cfc695 100644
--- a/packages/pipeline/src/utils/index.ts
+++ b/packages/pipeline/src/utils/index.ts
@@ -1,5 +1,5 @@
import { BigNumber } from '@0x/utils';
-import { ValueTransformer } from 'typeorm/decorator/options/ValueTransformer';
+export * from './transformers';
/**
* If the given BigNumber is not null, returns the string representation of that
@@ -36,20 +36,6 @@ export function handleError(e: any): void {
process.exit(1);
}
-class BigNumberTransformer implements ValueTransformer {
- // tslint:disable-next-line:prefer-function-over-method
- public to(value: BigNumber): string {
- return value.toString();
- }
-
- // tslint:disable-next-line:prefer-function-over-method
- public from(value: string): BigNumber {
- return new BigNumber(value);
- }
-}
-
-export const bigNumberTransformer = new BigNumberTransformer();
-
/**
* Returns the unix timestamp of the current hour
*/
diff --git a/packages/pipeline/src/utils/transformers/big_number.ts b/packages/pipeline/src/utils/transformers/big_number.ts
new file mode 100644
index 000000000..a0471a5e3
--- /dev/null
+++ b/packages/pipeline/src/utils/transformers/big_number.ts
@@ -0,0 +1,16 @@
+import { BigNumber } from '@0x/utils';
+import { ValueTransformer } from 'typeorm/decorator/options/ValueTransformer';
+
+export class BigNumberTransformer implements ValueTransformer {
+ // tslint:disable-next-line:prefer-function-over-method
+ public to(value: BigNumber): string {
+ return value.toString();
+ }
+
+ // tslint:disable-next-line:prefer-function-over-method
+ public from(value: string): BigNumber {
+ return new BigNumber(value);
+ }
+}
+
+export const bigNumberTransformer = new BigNumberTransformer();
diff --git a/packages/pipeline/src/utils/transformers/index.ts b/packages/pipeline/src/utils/transformers/index.ts
new file mode 100644
index 000000000..232c1c5de
--- /dev/null
+++ b/packages/pipeline/src/utils/transformers/index.ts
@@ -0,0 +1,2 @@
+export * from './big_number';
+export * from './number_to_bigint';
diff --git a/packages/pipeline/src/utils/transformers/number_to_bigint.ts b/packages/pipeline/src/utils/transformers/number_to_bigint.ts
new file mode 100644
index 000000000..85560c1f0
--- /dev/null
+++ b/packages/pipeline/src/utils/transformers/number_to_bigint.ts
@@ -0,0 +1,27 @@
+import { BigNumber } from '@0x/utils';
+import { ValueTransformer } from 'typeorm/decorator/options/ValueTransformer';
+
+const decimalRadix = 10;
+
+// Can be used to convert a JavaScript number type to a Postgres bigint type and
+// vice versa. By default TypeORM will silently convert number types to string
+// if the corresponding Postgres type is bigint. See
+// https://github.com/typeorm/typeorm/issues/2400 for more information.
+export class NumberToBigIntTransformer implements ValueTransformer {
+ // tslint:disable-next-line:prefer-function-over-method
+ public to(value: number): string {
+ return value.toString();
+ }
+
+ // tslint:disable-next-line:prefer-function-over-method
+ public from(value: string): number {
+ if (new BigNumber(value).greaterThan(Number.MAX_SAFE_INTEGER)) {
+ throw new Error(
+ `Attempted to convert PostgreSQL bigint value (${value}) to JavaScript number type but it is too big to safely convert`,
+ );
+ }
+ return Number.parseInt(value, decimalRadix);
+ }
+}
+
+export const numberToBigIntTransformer = new NumberToBigIntTransformer();
diff --git a/packages/pipeline/test/db_global_hooks.ts b/packages/pipeline/test/db_global_hooks.ts
new file mode 100644
index 000000000..dfee02c45
--- /dev/null
+++ b/packages/pipeline/test/db_global_hooks.ts
@@ -0,0 +1,9 @@
+import { setUpDbAsync, tearDownDbAsync } from './db_setup';
+
+before('set up database', async () => {
+ await setUpDbAsync();
+});
+
+after('tear down database', async () => {
+ await tearDownDbAsync();
+});
diff --git a/packages/pipeline/test/db_setup.ts b/packages/pipeline/test/db_setup.ts
new file mode 100644
index 000000000..bf31d15b6
--- /dev/null
+++ b/packages/pipeline/test/db_setup.ts
@@ -0,0 +1,174 @@
+import * as Docker from 'dockerode';
+import * as fs from 'fs';
+import * as R from 'ramda';
+import { Connection, ConnectionOptions, createConnection } from 'typeorm';
+
+import * as ormConfig from '../src/ormconfig';
+
+// The name of the image to pull and use for the container. This also affects
+// which version of Postgres we use.
+const DOCKER_IMAGE_NAME = 'postgres:11-alpine';
+// The name to use for the Docker container which will run Postgres.
+const DOCKER_CONTAINER_NAME = '0x_pipeline_postgres_test';
+// The port which will be exposed on the Docker container.
+const POSTGRES_HOST_PORT = '15432';
+// Number of milliseconds to wait for postgres to finish initializing after
+// starting the docker container.
+const POSTGRES_SETUP_DELAY_MS = 5000;
+
+/**
+ * Sets up the database for testing purposes. If the
+ * ZEROEX_DATA_PIPELINE_TEST_DB_URL env var is specified, it will create a
+ * connection using that url. Otherwise it will spin up a new Docker container
+ * with a Postgres database and then create a connection to that database.
+ */
+export async function setUpDbAsync(): Promise<void> {
+ const connection = await createDbConnectionOnceAsync();
+ await connection.runMigrations({ transaction: true });
+}
+
+/**
+ * Tears down the database used for testing. This completely destroys any data.
+ * If a docker container was created, it destroys that container too.
+ */
+export async function tearDownDbAsync(): Promise<void> {
+ const connection = await createDbConnectionOnceAsync();
+ for (const _ of connection.migrations) {
+ await connection.undoLastMigration({ transaction: true });
+ }
+ if (needsDocker()) {
+ const docker = initDockerOnce();
+ const postgresContainer = docker.getContainer(DOCKER_CONTAINER_NAME);
+ await postgresContainer.kill();
+ await postgresContainer.remove();
+ }
+}
+
+let savedConnection: Connection;
+
+/**
+ * The first time this is run, it creates and returns a new TypeORM connection.
+ * Each subsequent time, it returns the existing connection. This is helpful
+ * because only one TypeORM connection can be active at a time.
+ */
+export async function createDbConnectionOnceAsync(): Promise<Connection> {
+ if (savedConnection !== undefined) {
+ return savedConnection;
+ }
+
+ if (needsDocker()) {
+ await initContainerAsync();
+ }
+ const testDbUrl =
+ process.env.ZEROEX_DATA_PIPELINE_TEST_DB_URL ||
+ `postgresql://postgres@localhost:${POSTGRES_HOST_PORT}/postgres`;
+ const testOrmConfig = R.merge(ormConfig, { url: testDbUrl }) as ConnectionOptions;
+
+ savedConnection = await createConnection(testOrmConfig);
+ return savedConnection;
+}
+
+async function sleepAsync(ms: number): Promise<{}> {
+ return new Promise<{}>(resolve => setTimeout(resolve, ms));
+}
+
+let savedDocker: Docker;
+
+function initDockerOnce(): Docker {
+ if (savedDocker !== undefined) {
+ return savedDocker;
+ }
+
+ // Note(albrow): Code for determining the right socket path is partially
+ // based on https://github.com/apocas/dockerode/blob/8f3aa85311fab64d58eca08fef49aa1da5b5f60b/test/spec_helper.js
+ const isWin = require('os').type() === 'Windows_NT';
+ const socketPath = process.env.DOCKER_SOCKET || (isWin ? '//./pipe/docker_engine' : '/var/run/docker.sock');
+ const isSocket = fs.existsSync(socketPath) ? fs.statSync(socketPath).isSocket() : false;
+ if (!isSocket) {
+ throw new Error(`Failed to connect to Docker using socket path: "${socketPath}".
+
+The database integration tests need to be able to connect to a Postgres database. Make sure that Docker is running and accessible at the expected socket path. If Docker isn't working you have two options:
+
+ 1) Set the DOCKER_SOCKET environment variable to a socket path that can be used to connect to Docker or
+ 2) Set the ZEROEX_DATA_PIPELINE_TEST_DB_URL environment variable to connect directly to an existing Postgres database instead of trying to start Postgres via Docker
+`);
+ }
+ savedDocker = new Docker({
+ socketPath,
+ });
+ return savedDocker;
+}
+
+// Creates the container, waits for it to initialize, and returns it.
+async function initContainerAsync(): Promise<Docker.Container> {
+ const docker = initDockerOnce();
+
+ // Tear down any existing containers with the same name.
+ await tearDownExistingContainerIfAnyAsync();
+
+ // Pull the image we need.
+ await pullImageAsync(docker, DOCKER_IMAGE_NAME);
+
+ // Create the container.
+ const postgresContainer = await docker.createContainer({
+ name: DOCKER_CONTAINER_NAME,
+ Image: DOCKER_IMAGE_NAME,
+ ExposedPorts: {
+ '5432': {},
+ },
+ HostConfig: {
+ PortBindings: {
+ '5432': [
+ {
+ HostPort: POSTGRES_HOST_PORT,
+ },
+ ],
+ },
+ },
+ });
+ await postgresContainer.start();
+ await sleepAsync(POSTGRES_SETUP_DELAY_MS);
+ return postgresContainer;
+}
+
+async function tearDownExistingContainerIfAnyAsync(): Promise<void> {
+ const docker = initDockerOnce();
+
+ // Check if a container with the desired name already exists. If so, this
+ // probably means we didn't clean up properly on the last test run.
+ const existingContainer = docker.getContainer(DOCKER_CONTAINER_NAME);
+ if (existingContainer != null) {
+ try {
+ await existingContainer.kill();
+ } catch {
+ // If this fails, it's fine. The container was probably already
+ // killed.
+ }
+ try {
+ await existingContainer.remove();
+ } catch {
+ // If this fails, it's fine. The container was probably already
+ // removed.
+ }
+ }
+}
+
+function needsDocker(): boolean {
+ return process.env.ZEROEX_DATA_PIPELINE_TEST_DB_URL === undefined;
+}
+
+// Note(albrow): This is partially based on
+// https://stackoverflow.com/questions/38258263/how-do-i-wait-for-a-pull
+async function pullImageAsync(docker: Docker, imageName: string): Promise<void> {
+ return new Promise<void>((resolve, reject) => {
+ docker.pull(imageName, {}, (err, stream) => {
+ if (err != null) {
+ reject(err);
+ return;
+ }
+ docker.modem.followProgress(stream, () => {
+ resolve();
+ });
+ });
+ });
+}
diff --git a/packages/pipeline/test/entities/block_test.ts b/packages/pipeline/test/entities/block_test.ts
new file mode 100644
index 000000000..503f284f0
--- /dev/null
+++ b/packages/pipeline/test/entities/block_test.ts
@@ -0,0 +1,23 @@
+import 'mocha';
+import 'reflect-metadata';
+
+import { Block } from '../../src/entities';
+import { createDbConnectionOnceAsync } from '../db_setup';
+import { chaiSetup } from '../utils/chai_setup';
+
+import { testSaveAndFindEntityAsync } from './util';
+
+chaiSetup.configure();
+
+// tslint:disable:custom-no-magic-numbers
+describe('Block entity', () => {
+ it('save/find', async () => {
+ const connection = await createDbConnectionOnceAsync();
+ const block = new Block();
+ block.hash = '0x12345';
+ block.number = 1234567;
+ block.timestamp = 5432154321;
+ const blocksRepository = connection.getRepository(Block);
+ await testSaveAndFindEntityAsync(blocksRepository, block);
+ });
+});
diff --git a/packages/pipeline/test/entities/exchange_cancel_event_test.ts b/packages/pipeline/test/entities/exchange_cancel_event_test.ts
new file mode 100644
index 000000000..f3b306d69
--- /dev/null
+++ b/packages/pipeline/test/entities/exchange_cancel_event_test.ts
@@ -0,0 +1,57 @@
+import 'mocha';
+import * as R from 'ramda';
+import 'reflect-metadata';
+
+import { ExchangeCancelEvent } from '../../src/entities';
+import { AssetType } from '../../src/types';
+import { createDbConnectionOnceAsync } from '../db_setup';
+import { chaiSetup } from '../utils/chai_setup';
+
+import { testSaveAndFindEntityAsync } from './util';
+
+chaiSetup.configure();
+
+const baseCancelEvent = {
+ contractAddress: '0x4f833a24e1f95d70f028921e27040ca56e09ab0b',
+ logIndex: 1234,
+ blockNumber: 6276262,
+ rawData: '0x000000000000000000000000f6da68519f78b0d0bc93c701e86affcb75c92428',
+ transactionHash: '0x6dd106d002873746072fc5e496dd0fb2541b68c77bcf9184ae19a42fd33657fe',
+ makerAddress: '0xf6da68519f78b0d0bc93c701e86affcb75c92428',
+ takerAddress: '0xf6da68519f78b0d0bc93c701e86affcb75c92428',
+ feeRecipientAddress: '0xc370d2a5920344aa6b7d8d11250e3e861434cbdd',
+ senderAddress: '0xf6da68519f78b0d0bc93c701e86affcb75c92428',
+ orderHash: '0xab12ed2cbaa5615ab690b9da75a46e53ddfcf3f1a68655b5fe0d94c75a1aac4a',
+ rawMakerAssetData: '0xf47261b0000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc2',
+ makerAssetProxyId: '0xf47261b0',
+ makerTokenAddress: '0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2',
+ rawTakerAssetData: '0xf47261b0000000000000000000000000e41d2489571d322189246dafa5ebde1f4699f498',
+ takerAssetProxyId: '0xf47261b0',
+ takerTokenAddress: '0xe41d2489571d322189246dafa5ebde1f4699f498',
+};
+
+const erc20CancelEvent = R.merge(baseCancelEvent, {
+ makerAssetType: 'erc20' as AssetType,
+ makerTokenId: null,
+ takerAssetType: 'erc20' as AssetType,
+ takerTokenId: null,
+});
+
+const erc721CancelEvent = R.merge(baseCancelEvent, {
+ makerAssetType: 'erc721' as AssetType,
+ makerTokenId: '19378573',
+ takerAssetType: 'erc721' as AssetType,
+ takerTokenId: '63885673888',
+});
+
+// tslint:disable:custom-no-magic-numbers
+describe('ExchangeCancelEvent entity', () => {
+ it('save/find', async () => {
+ const connection = await createDbConnectionOnceAsync();
+ const events = [erc20CancelEvent, erc721CancelEvent];
+ const cancelEventRepository = connection.getRepository(ExchangeCancelEvent);
+ for (const event of events) {
+ await testSaveAndFindEntityAsync(cancelEventRepository, event);
+ }
+ });
+});
diff --git a/packages/pipeline/test/entities/exchange_cancel_up_to_event_test.ts b/packages/pipeline/test/entities/exchange_cancel_up_to_event_test.ts
new file mode 100644
index 000000000..aa34f8c1c
--- /dev/null
+++ b/packages/pipeline/test/entities/exchange_cancel_up_to_event_test.ts
@@ -0,0 +1,29 @@
+import { BigNumber } from '@0x/utils';
+import 'mocha';
+import 'reflect-metadata';
+
+import { ExchangeCancelUpToEvent } from '../../src/entities';
+import { createDbConnectionOnceAsync } from '../db_setup';
+import { chaiSetup } from '../utils/chai_setup';
+
+import { testSaveAndFindEntityAsync } from './util';
+
+chaiSetup.configure();
+
+// tslint:disable:custom-no-magic-numbers
+describe('ExchangeCancelUpToEvent entity', () => {
+ it('save/find', async () => {
+ const connection = await createDbConnectionOnceAsync();
+ const cancelUpToEventRepository = connection.getRepository(ExchangeCancelUpToEvent);
+ const cancelUpToEvent = new ExchangeCancelUpToEvent();
+ cancelUpToEvent.blockNumber = 6276262;
+ cancelUpToEvent.contractAddress = '0x4f833a24e1f95d70f028921e27040ca56e09ab0b';
+ cancelUpToEvent.logIndex = 42;
+ cancelUpToEvent.makerAddress = '0xf6da68519f78b0d0bc93c701e86affcb75c92428';
+ cancelUpToEvent.orderEpoch = new BigNumber('123456789123456789');
+ cancelUpToEvent.rawData = '0x000000000000000000000000f6da68519f78b0d0bc93c701e86affcb75c92428';
+ cancelUpToEvent.senderAddress = '0xf6da68519f78b0d0bc93c701e86affcb75c92428';
+ cancelUpToEvent.transactionHash = '0x6dd106d002873746072fc5e496dd0fb2541b68c77bcf9184ae19a42fd33657fe';
+ await testSaveAndFindEntityAsync(cancelUpToEventRepository, cancelUpToEvent);
+ });
+});
diff --git a/packages/pipeline/test/entities/exchange_fill_event_test.ts b/packages/pipeline/test/entities/exchange_fill_event_test.ts
new file mode 100644
index 000000000..b2cb8c5e0
--- /dev/null
+++ b/packages/pipeline/test/entities/exchange_fill_event_test.ts
@@ -0,0 +1,62 @@
+import { BigNumber } from '@0x/utils';
+import 'mocha';
+import * as R from 'ramda';
+import 'reflect-metadata';
+
+import { ExchangeFillEvent } from '../../src/entities';
+import { AssetType } from '../../src/types';
+import { createDbConnectionOnceAsync } from '../db_setup';
+import { chaiSetup } from '../utils/chai_setup';
+
+import { testSaveAndFindEntityAsync } from './util';
+
+chaiSetup.configure();
+
+const baseFillEvent = {
+ contractAddress: '0x4f833a24e1f95d70f028921e27040ca56e09ab0b',
+ blockNumber: 6276262,
+ logIndex: 102,
+ rawData: '0x000000000000000000000000f6da68519f78b0d0bc93c701e86affcb75c92428',
+ transactionHash: '0x6dd106d002873746072fc5e496dd0fb2541b68c77bcf9184ae19a42fd33657fe',
+ makerAddress: '0xf6da68519f78b0d0bc93c701e86affcb75c92428',
+ takerAddress: '0xf6da68519f78b0d0bc93c701e86affcb75c92428',
+ feeRecipientAddress: '0xc370d2a5920344aa6b7d8d11250e3e861434cbdd',
+ senderAddress: '0xf6da68519f78b0d0bc93c701e86affcb75c92428',
+ makerAssetFilledAmount: new BigNumber('10000000000000000'),
+ takerAssetFilledAmount: new BigNumber('100000000000000000'),
+ makerFeePaid: new BigNumber('0'),
+ takerFeePaid: new BigNumber('12345'),
+ orderHash: '0xab12ed2cbaa5615ab690b9da75a46e53ddfcf3f1a68655b5fe0d94c75a1aac4a',
+ rawMakerAssetData: '0xf47261b0000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc2',
+ makerAssetProxyId: '0xf47261b0',
+ makerTokenAddress: '0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2',
+ rawTakerAssetData: '0xf47261b0000000000000000000000000e41d2489571d322189246dafa5ebde1f4699f498',
+ takerAssetProxyId: '0xf47261b0',
+ takerTokenAddress: '0xe41d2489571d322189246dafa5ebde1f4699f498',
+};
+
+const erc20FillEvent = R.merge(baseFillEvent, {
+ makerAssetType: 'erc20' as AssetType,
+ makerTokenId: null,
+ takerAssetType: 'erc20' as AssetType,
+ takerTokenId: null,
+});
+
+const erc721FillEvent = R.merge(baseFillEvent, {
+ makerAssetType: 'erc721' as AssetType,
+ makerTokenId: '19378573',
+ takerAssetType: 'erc721' as AssetType,
+ takerTokenId: '63885673888',
+});
+
+// tslint:disable:custom-no-magic-numbers
+describe('ExchangeFillEvent entity', () => {
+ it('save/find', async () => {
+ const connection = await createDbConnectionOnceAsync();
+ const events = [erc20FillEvent, erc721FillEvent];
+ const fillEventsRepository = connection.getRepository(ExchangeFillEvent);
+ for (const event of events) {
+ await testSaveAndFindEntityAsync(fillEventsRepository, event);
+ }
+ });
+});
diff --git a/packages/pipeline/test/entities/relayer_test.ts b/packages/pipeline/test/entities/relayer_test.ts
new file mode 100644
index 000000000..760ffb6f9
--- /dev/null
+++ b/packages/pipeline/test/entities/relayer_test.ts
@@ -0,0 +1,55 @@
+import 'mocha';
+import * as R from 'ramda';
+import 'reflect-metadata';
+
+import { Relayer } from '../../src/entities';
+import { createDbConnectionOnceAsync } from '../db_setup';
+import { chaiSetup } from '../utils/chai_setup';
+
+import { testSaveAndFindEntityAsync } from './util';
+
+chaiSetup.configure();
+
+const baseRelayer = {
+ uuid: 'e8d27d8d-ddf6-48b1-9663-60b0a3ddc716',
+ name: 'Radar Relay',
+ homepageUrl: 'https://radarrelay.com',
+ appUrl: null,
+ sraHttpEndpoint: null,
+ sraWsEndpoint: null,
+ feeRecipientAddresses: [],
+ takerAddresses: [],
+};
+
+const relayerWithUrls = R.merge(baseRelayer, {
+ uuid: 'e8d27d8d-ddf6-48b1-9663-60b0a3ddc717',
+ appUrl: 'https://app.radarrelay.com',
+ sraHttpEndpoint: 'https://api.radarrelay.com/0x/v2/',
+ sraWsEndpoint: 'wss://ws.radarrelay.com/0x/v2',
+});
+
+const relayerWithAddresses = R.merge(baseRelayer, {
+ uuid: 'e8d27d8d-ddf6-48b1-9663-60b0a3ddc718',
+ feeRecipientAddresses: [
+ '0xa258b39954cef5cb142fd567a46cddb31a670124',
+ '0xa258b39954cef5cb142fd567a46cddb31a670125',
+ '0xa258b39954cef5cb142fd567a46cddb31a670126',
+ ],
+ takerAddresses: [
+ '0xa258b39954cef5cb142fd567a46cddb31a670127',
+ '0xa258b39954cef5cb142fd567a46cddb31a670128',
+ '0xa258b39954cef5cb142fd567a46cddb31a670129',
+ ],
+});
+
+// tslint:disable:custom-no-magic-numbers
+describe('Relayer entity', () => {
+ it('save/find', async () => {
+ const connection = await createDbConnectionOnceAsync();
+ const relayers = [baseRelayer, relayerWithUrls, relayerWithAddresses];
+ const relayerRepository = connection.getRepository(Relayer);
+ for (const relayer of relayers) {
+ await testSaveAndFindEntityAsync(relayerRepository, relayer);
+ }
+ });
+});
diff --git a/packages/pipeline/test/entities/sra_order_test.ts b/packages/pipeline/test/entities/sra_order_test.ts
new file mode 100644
index 000000000..c43de8ce8
--- /dev/null
+++ b/packages/pipeline/test/entities/sra_order_test.ts
@@ -0,0 +1,84 @@
+import { BigNumber } from '@0x/utils';
+import 'mocha';
+import * as R from 'ramda';
+import 'reflect-metadata';
+import { Repository } from 'typeorm';
+
+import { SraOrder, SraOrdersObservedTimeStamp } from '../../src/entities';
+import { AssetType } from '../../src/types';
+import { createDbConnectionOnceAsync } from '../db_setup';
+import { chaiSetup } from '../utils/chai_setup';
+
+import { testSaveAndFindEntityAsync } from './util';
+
+chaiSetup.configure();
+
+const baseOrder = {
+ sourceUrl: 'https://api.radarrelay.com/0x/v2',
+ exchangeAddress: '0x4f833a24e1f95d70f028921e27040ca56e09ab0b',
+ makerAddress: '0xb45df06e38540a675fdb5b598abf2c0dbe9d6b81',
+ takerAddress: '0x0000000000000000000000000000000000000000',
+ feeRecipientAddress: '0xa258b39954cef5cb142fd567a46cddb31a670124',
+ senderAddress: '0x0000000000000000000000000000000000000000',
+ makerAssetAmount: new BigNumber('1619310371000000000'),
+ takerAssetAmount: new BigNumber('8178335207070707070707'),
+ makerFee: new BigNumber('100'),
+ takerFee: new BigNumber('200'),
+ expirationTimeSeconds: new BigNumber('1538529488'),
+ salt: new BigNumber('1537924688891'),
+ signature: '0x1b5a5d672b0d647b5797387ccbb89d8',
+ rawMakerAssetData: '0xf47261b0000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc2',
+ makerAssetProxyId: '0xf47261b0',
+ makerTokenAddress: '0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2',
+ rawTakerAssetData: '0xf47261b000000000000000000000000042d6622dece394b54999fbd73d108123806f6a18',
+ takerAssetProxyId: '0xf47261b0',
+ takerTokenAddress: '0x42d6622dece394b54999fbd73d108123806f6a18',
+ metadataJson: '{"isThisArbitraryData":true,"powerLevel":9001}',
+};
+
+const erc20Order = R.merge(baseOrder, {
+ orderHashHex: '0x1bdbeb0d088a33da28b9ee6d94e8771452f90f4a69107da2fa75195d61b9a1c9',
+ makerAssetType: 'erc20' as AssetType,
+ makerTokenId: null,
+ takerAssetType: 'erc20' as AssetType,
+ takerTokenId: null,
+});
+
+const erc721Order = R.merge(baseOrder, {
+ orderHashHex: '0x1bdbeb0d088a33da28b9ee6d94e8771452f90f4a69107da2fa75195d61b9a1d0',
+ makerAssetType: 'erc721' as AssetType,
+ makerTokenId: '19378573',
+ takerAssetType: 'erc721' as AssetType,
+ takerTokenId: '63885673888',
+});
+
+// tslint:disable:custom-no-magic-numbers
+describe('SraOrder and SraOrdersObservedTimeStamp entities', () => {
+ // Note(albrow): SraOrder and SraOrdersObservedTimeStamp are tightly coupled
+ // and timestamps have a foreign key constraint such that they have to point
+ // to an existing SraOrder. For these reasons, we are testing them together
+ // in the same test.
+ it('save/find', async () => {
+ const connection = await createDbConnectionOnceAsync();
+ const orderRepository = connection.getRepository(SraOrder);
+ const timestampRepository = connection.getRepository(SraOrdersObservedTimeStamp);
+ const orders = [erc20Order, erc721Order];
+ for (const order of orders) {
+ await testOrderWithTimestampAsync(orderRepository, timestampRepository, order);
+ }
+ });
+});
+
+async function testOrderWithTimestampAsync(
+ orderRepository: Repository<SraOrder>,
+ timestampRepository: Repository<SraOrdersObservedTimeStamp>,
+ order: SraOrder,
+): Promise<void> {
+ await testSaveAndFindEntityAsync(orderRepository, order);
+ const timestamp = new SraOrdersObservedTimeStamp();
+ timestamp.exchangeAddress = order.exchangeAddress;
+ timestamp.orderHashHex = order.orderHashHex;
+ timestamp.sourceUrl = order.sourceUrl;
+ timestamp.observedTimestamp = 1543377376153;
+ await testSaveAndFindEntityAsync(timestampRepository, timestamp);
+}
diff --git a/packages/pipeline/test/entities/token_metadata_test.ts b/packages/pipeline/test/entities/token_metadata_test.ts
new file mode 100644
index 000000000..805b4b0f7
--- /dev/null
+++ b/packages/pipeline/test/entities/token_metadata_test.ts
@@ -0,0 +1,38 @@
+import 'mocha';
+import 'reflect-metadata';
+
+import { TokenMetadata } from '../../src/entities';
+import { createDbConnectionOnceAsync } from '../db_setup';
+import { chaiSetup } from '../utils/chai_setup';
+
+import { testSaveAndFindEntityAsync } from './util';
+
+chaiSetup.configure();
+
+const metadataWithoutNullFields = {
+ address: '0xe41d2489571d322189246dafa5ebde1f4699f498',
+ authority: 'https://website-api.0xproject.com/tokens',
+ decimals: 18,
+ symbol: 'ZRX',
+ name: '0x',
+};
+
+const metadataWithNullFields = {
+ address: '0xe41d2489571d322189246dafa5ebde1f4699f499',
+ authority: 'https://website-api.0xproject.com/tokens',
+ decimals: null,
+ symbol: null,
+ name: null,
+};
+
+// tslint:disable:custom-no-magic-numbers
+describe('TokenMetadata entity', () => {
+ it('save/find', async () => {
+ const connection = await createDbConnectionOnceAsync();
+ const tokenMetadata = [metadataWithoutNullFields, metadataWithNullFields];
+ const tokenMetadataRepository = connection.getRepository(TokenMetadata);
+ for (const tokenMetadatum of tokenMetadata) {
+ await testSaveAndFindEntityAsync(tokenMetadataRepository, tokenMetadatum);
+ }
+ });
+});
diff --git a/packages/pipeline/test/entities/transaction_test.ts b/packages/pipeline/test/entities/transaction_test.ts
new file mode 100644
index 000000000..027de7d32
--- /dev/null
+++ b/packages/pipeline/test/entities/transaction_test.ts
@@ -0,0 +1,25 @@
+import 'mocha';
+import 'reflect-metadata';
+
+import { Transaction } from '../../src/entities';
+import { createDbConnectionOnceAsync } from '../db_setup';
+import { chaiSetup } from '../utils/chai_setup';
+
+import { testSaveAndFindEntityAsync } from './util';
+
+chaiSetup.configure();
+
+// tslint:disable:custom-no-magic-numbers
+describe('Transaction entity', () => {
+ it('save/find', async () => {
+ const connection = await createDbConnectionOnceAsync();
+ const transactionRepository = connection.getRepository(Transaction);
+ const transaction = new Transaction();
+ transaction.blockHash = '0x6ff106d00b6c3746072fc06bae140fb2549036ba7bcf9184ae19a42fd33657fd';
+ transaction.blockNumber = 6276262;
+ transaction.gasPrice = 3000000;
+ transaction.gasUsed = 125000;
+ transaction.transactionHash = '0x6dd106d002873746072fc5e496dd0fb2541b68c77bcf9184ae19a42fd33657fe';
+ await testSaveAndFindEntityAsync(transactionRepository, transaction);
+ });
+});
diff --git a/packages/pipeline/test/entities/util.ts b/packages/pipeline/test/entities/util.ts
new file mode 100644
index 000000000..043a3b15d
--- /dev/null
+++ b/packages/pipeline/test/entities/util.ts
@@ -0,0 +1,25 @@
+import * as chai from 'chai';
+import 'mocha';
+
+import { Repository } from 'typeorm';
+
+const expect = chai.expect;
+
+/**
+ * First saves the given entity to the database, then finds it and makes sure
+ * that the found entity is exactly equal to the original one. This is a bare
+ * minimum basic test to make sure that the entity type definition and our
+ * database schema are aligned and that it is possible to save and find the
+ * entity.
+ * @param repository A TypeORM repository corresponding with the type of the entity.
+ * @param entity An instance of a TypeORM entity which will be saved/retrieved from the database.
+ */
+export async function testSaveAndFindEntityAsync<T>(repository: Repository<T>, entity: T): Promise<void> {
+ // Note(albrow): We are forced to use an 'as any' hack here because
+ // TypeScript complains about stack depth when checking the types.
+ await repository.save(entity as any);
+ const gotEntity = await repository.findOneOrFail({
+ where: entity,
+ });
+ expect(gotEntity).deep.equal(entity);
+}