aboutsummaryrefslogtreecommitdiffstats
path: root/packages/pipeline/test
diff options
context:
space:
mode:
authorAlex Browne <stephenalexbrowne@gmail.com>2018-11-29 05:21:04 +0800
committerFred Carlsen <fred@sjelfull.no>2018-12-06 19:05:38 +0800
commitd14d38dabd0d40d85f4844cb9bbdd5916a26a2d4 (patch)
treee6d09ea275ec35f112869ee221d1147718a82ad5 /packages/pipeline/test
parent3ca876c5744d9030f4d954f73038ddb05d014d42 (diff)
downloaddexon-sol-tools-d14d38dabd0d40d85f4844cb9bbdd5916a26a2d4.tar
dexon-sol-tools-d14d38dabd0d40d85f4844cb9bbdd5916a26a2d4.tar.gz
dexon-sol-tools-d14d38dabd0d40d85f4844cb9bbdd5916a26a2d4.tar.bz2
dexon-sol-tools-d14d38dabd0d40d85f4844cb9bbdd5916a26a2d4.tar.lz
dexon-sol-tools-d14d38dabd0d40d85f4844cb9bbdd5916a26a2d4.tar.xz
dexon-sol-tools-d14d38dabd0d40d85f4844cb9bbdd5916a26a2d4.tar.zst
dexon-sol-tools-d14d38dabd0d40d85f4844cb9bbdd5916a26a2d4.zip
Introduce framework for running basic tests for entities (#1344)
* Introduce framework for running basic tests for entities * Add pipeline tests to CircleCI config * Make pipeline tests more configurable and fix CircleCI config * Add coverage dir to pipeline package * Add basic tests for all exchange event entities * Add tests for remaining entities * Create separate test scripts in package.json and add new info to README * Update db_setup.ts to revert migrations even if you are using docker * Automatically pull the postgres image if needed * Add comment about why NumberToBigIntTransformer is needed
Diffstat (limited to 'packages/pipeline/test')
-rw-r--r--packages/pipeline/test/db_global_hooks.ts9
-rw-r--r--packages/pipeline/test/db_setup.ts174
-rw-r--r--packages/pipeline/test/entities/block_test.ts23
-rw-r--r--packages/pipeline/test/entities/exchange_cancel_event_test.ts57
-rw-r--r--packages/pipeline/test/entities/exchange_cancel_up_to_event_test.ts29
-rw-r--r--packages/pipeline/test/entities/exchange_fill_event_test.ts62
-rw-r--r--packages/pipeline/test/entities/relayer_test.ts55
-rw-r--r--packages/pipeline/test/entities/sra_order_test.ts84
-rw-r--r--packages/pipeline/test/entities/token_metadata_test.ts38
-rw-r--r--packages/pipeline/test/entities/transaction_test.ts25
-rw-r--r--packages/pipeline/test/entities/util.ts25
11 files changed, 581 insertions, 0 deletions
diff --git a/packages/pipeline/test/db_global_hooks.ts b/packages/pipeline/test/db_global_hooks.ts
new file mode 100644
index 000000000..dfee02c45
--- /dev/null
+++ b/packages/pipeline/test/db_global_hooks.ts
@@ -0,0 +1,9 @@
+import { setUpDbAsync, tearDownDbAsync } from './db_setup';
+
+before('set up database', async () => {
+ await setUpDbAsync();
+});
+
+after('tear down database', async () => {
+ await tearDownDbAsync();
+});
diff --git a/packages/pipeline/test/db_setup.ts b/packages/pipeline/test/db_setup.ts
new file mode 100644
index 000000000..bf31d15b6
--- /dev/null
+++ b/packages/pipeline/test/db_setup.ts
@@ -0,0 +1,174 @@
+import * as Docker from 'dockerode';
+import * as fs from 'fs';
+import * as R from 'ramda';
+import { Connection, ConnectionOptions, createConnection } from 'typeorm';
+
+import * as ormConfig from '../src/ormconfig';
+
+// The name of the image to pull and use for the container. This also affects
+// which version of Postgres we use.
+const DOCKER_IMAGE_NAME = 'postgres:11-alpine';
+// The name to use for the Docker container which will run Postgres.
+const DOCKER_CONTAINER_NAME = '0x_pipeline_postgres_test';
+// The port which will be exposed on the Docker container.
+const POSTGRES_HOST_PORT = '15432';
+// Number of milliseconds to wait for postgres to finish initializing after
+// starting the docker container.
+const POSTGRES_SETUP_DELAY_MS = 5000;
+
+/**
+ * Sets up the database for testing purposes. If the
+ * ZEROEX_DATA_PIPELINE_TEST_DB_URL env var is specified, it will create a
+ * connection using that url. Otherwise it will spin up a new Docker container
+ * with a Postgres database and then create a connection to that database.
+ */
+export async function setUpDbAsync(): Promise<void> {
+ const connection = await createDbConnectionOnceAsync();
+ await connection.runMigrations({ transaction: true });
+}
+
+/**
+ * Tears down the database used for testing. This completely destroys any data.
+ * If a docker container was created, it destroys that container too.
+ */
+export async function tearDownDbAsync(): Promise<void> {
+ const connection = await createDbConnectionOnceAsync();
+ for (const _ of connection.migrations) {
+ await connection.undoLastMigration({ transaction: true });
+ }
+ if (needsDocker()) {
+ const docker = initDockerOnce();
+ const postgresContainer = docker.getContainer(DOCKER_CONTAINER_NAME);
+ await postgresContainer.kill();
+ await postgresContainer.remove();
+ }
+}
+
+let savedConnection: Connection;
+
+/**
+ * The first time this is run, it creates and returns a new TypeORM connection.
+ * Each subsequent time, it returns the existing connection. This is helpful
+ * because only one TypeORM connection can be active at a time.
+ */
+export async function createDbConnectionOnceAsync(): Promise<Connection> {
+ if (savedConnection !== undefined) {
+ return savedConnection;
+ }
+
+ if (needsDocker()) {
+ await initContainerAsync();
+ }
+ const testDbUrl =
+ process.env.ZEROEX_DATA_PIPELINE_TEST_DB_URL ||
+ `postgresql://postgres@localhost:${POSTGRES_HOST_PORT}/postgres`;
+ const testOrmConfig = R.merge(ormConfig, { url: testDbUrl }) as ConnectionOptions;
+
+ savedConnection = await createConnection(testOrmConfig);
+ return savedConnection;
+}
+
+async function sleepAsync(ms: number): Promise<{}> {
+ return new Promise<{}>(resolve => setTimeout(resolve, ms));
+}
+
+let savedDocker: Docker;
+
+function initDockerOnce(): Docker {
+ if (savedDocker !== undefined) {
+ return savedDocker;
+ }
+
+ // Note(albrow): Code for determining the right socket path is partially
+ // based on https://github.com/apocas/dockerode/blob/8f3aa85311fab64d58eca08fef49aa1da5b5f60b/test/spec_helper.js
+ const isWin = require('os').type() === 'Windows_NT';
+ const socketPath = process.env.DOCKER_SOCKET || (isWin ? '//./pipe/docker_engine' : '/var/run/docker.sock');
+ const isSocket = fs.existsSync(socketPath) ? fs.statSync(socketPath).isSocket() : false;
+ if (!isSocket) {
+ throw new Error(`Failed to connect to Docker using socket path: "${socketPath}".
+
+The database integration tests need to be able to connect to a Postgres database. Make sure that Docker is running and accessible at the expected socket path. If Docker isn't working you have two options:
+
+ 1) Set the DOCKER_SOCKET environment variable to a socket path that can be used to connect to Docker or
+ 2) Set the ZEROEX_DATA_PIPELINE_TEST_DB_URL environment variable to connect directly to an existing Postgres database instead of trying to start Postgres via Docker
+`);
+ }
+ savedDocker = new Docker({
+ socketPath,
+ });
+ return savedDocker;
+}
+
+// Creates the container, waits for it to initialize, and returns it.
+async function initContainerAsync(): Promise<Docker.Container> {
+ const docker = initDockerOnce();
+
+ // Tear down any existing containers with the same name.
+ await tearDownExistingContainerIfAnyAsync();
+
+ // Pull the image we need.
+ await pullImageAsync(docker, DOCKER_IMAGE_NAME);
+
+ // Create the container.
+ const postgresContainer = await docker.createContainer({
+ name: DOCKER_CONTAINER_NAME,
+ Image: DOCKER_IMAGE_NAME,
+ ExposedPorts: {
+ '5432': {},
+ },
+ HostConfig: {
+ PortBindings: {
+ '5432': [
+ {
+ HostPort: POSTGRES_HOST_PORT,
+ },
+ ],
+ },
+ },
+ });
+ await postgresContainer.start();
+ await sleepAsync(POSTGRES_SETUP_DELAY_MS);
+ return postgresContainer;
+}
+
+async function tearDownExistingContainerIfAnyAsync(): Promise<void> {
+ const docker = initDockerOnce();
+
+ // Check if a container with the desired name already exists. If so, this
+ // probably means we didn't clean up properly on the last test run.
+ const existingContainer = docker.getContainer(DOCKER_CONTAINER_NAME);
+ if (existingContainer != null) {
+ try {
+ await existingContainer.kill();
+ } catch {
+ // If this fails, it's fine. The container was probably already
+ // killed.
+ }
+ try {
+ await existingContainer.remove();
+ } catch {
+ // If this fails, it's fine. The container was probably already
+ // removed.
+ }
+ }
+}
+
+function needsDocker(): boolean {
+ return process.env.ZEROEX_DATA_PIPELINE_TEST_DB_URL === undefined;
+}
+
+// Note(albrow): This is partially based on
+// https://stackoverflow.com/questions/38258263/how-do-i-wait-for-a-pull
+async function pullImageAsync(docker: Docker, imageName: string): Promise<void> {
+ return new Promise<void>((resolve, reject) => {
+ docker.pull(imageName, {}, (err, stream) => {
+ if (err != null) {
+ reject(err);
+ return;
+ }
+ docker.modem.followProgress(stream, () => {
+ resolve();
+ });
+ });
+ });
+}
diff --git a/packages/pipeline/test/entities/block_test.ts b/packages/pipeline/test/entities/block_test.ts
new file mode 100644
index 000000000..503f284f0
--- /dev/null
+++ b/packages/pipeline/test/entities/block_test.ts
@@ -0,0 +1,23 @@
+import 'mocha';
+import 'reflect-metadata';
+
+import { Block } from '../../src/entities';
+import { createDbConnectionOnceAsync } from '../db_setup';
+import { chaiSetup } from '../utils/chai_setup';
+
+import { testSaveAndFindEntityAsync } from './util';
+
+chaiSetup.configure();
+
+// tslint:disable:custom-no-magic-numbers
+describe('Block entity', () => {
+ it('save/find', async () => {
+ const connection = await createDbConnectionOnceAsync();
+ const block = new Block();
+ block.hash = '0x12345';
+ block.number = 1234567;
+ block.timestamp = 5432154321;
+ const blocksRepository = connection.getRepository(Block);
+ await testSaveAndFindEntityAsync(blocksRepository, block);
+ });
+});
diff --git a/packages/pipeline/test/entities/exchange_cancel_event_test.ts b/packages/pipeline/test/entities/exchange_cancel_event_test.ts
new file mode 100644
index 000000000..f3b306d69
--- /dev/null
+++ b/packages/pipeline/test/entities/exchange_cancel_event_test.ts
@@ -0,0 +1,57 @@
+import 'mocha';
+import * as R from 'ramda';
+import 'reflect-metadata';
+
+import { ExchangeCancelEvent } from '../../src/entities';
+import { AssetType } from '../../src/types';
+import { createDbConnectionOnceAsync } from '../db_setup';
+import { chaiSetup } from '../utils/chai_setup';
+
+import { testSaveAndFindEntityAsync } from './util';
+
+chaiSetup.configure();
+
+const baseCancelEvent = {
+ contractAddress: '0x4f833a24e1f95d70f028921e27040ca56e09ab0b',
+ logIndex: 1234,
+ blockNumber: 6276262,
+ rawData: '0x000000000000000000000000f6da68519f78b0d0bc93c701e86affcb75c92428',
+ transactionHash: '0x6dd106d002873746072fc5e496dd0fb2541b68c77bcf9184ae19a42fd33657fe',
+ makerAddress: '0xf6da68519f78b0d0bc93c701e86affcb75c92428',
+ takerAddress: '0xf6da68519f78b0d0bc93c701e86affcb75c92428',
+ feeRecipientAddress: '0xc370d2a5920344aa6b7d8d11250e3e861434cbdd',
+ senderAddress: '0xf6da68519f78b0d0bc93c701e86affcb75c92428',
+ orderHash: '0xab12ed2cbaa5615ab690b9da75a46e53ddfcf3f1a68655b5fe0d94c75a1aac4a',
+ rawMakerAssetData: '0xf47261b0000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc2',
+ makerAssetProxyId: '0xf47261b0',
+ makerTokenAddress: '0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2',
+ rawTakerAssetData: '0xf47261b0000000000000000000000000e41d2489571d322189246dafa5ebde1f4699f498',
+ takerAssetProxyId: '0xf47261b0',
+ takerTokenAddress: '0xe41d2489571d322189246dafa5ebde1f4699f498',
+};
+
+const erc20CancelEvent = R.merge(baseCancelEvent, {
+ makerAssetType: 'erc20' as AssetType,
+ makerTokenId: null,
+ takerAssetType: 'erc20' as AssetType,
+ takerTokenId: null,
+});
+
+const erc721CancelEvent = R.merge(baseCancelEvent, {
+ makerAssetType: 'erc721' as AssetType,
+ makerTokenId: '19378573',
+ takerAssetType: 'erc721' as AssetType,
+ takerTokenId: '63885673888',
+});
+
+// tslint:disable:custom-no-magic-numbers
+describe('ExchangeCancelEvent entity', () => {
+ it('save/find', async () => {
+ const connection = await createDbConnectionOnceAsync();
+ const events = [erc20CancelEvent, erc721CancelEvent];
+ const cancelEventRepository = connection.getRepository(ExchangeCancelEvent);
+ for (const event of events) {
+ await testSaveAndFindEntityAsync(cancelEventRepository, event);
+ }
+ });
+});
diff --git a/packages/pipeline/test/entities/exchange_cancel_up_to_event_test.ts b/packages/pipeline/test/entities/exchange_cancel_up_to_event_test.ts
new file mode 100644
index 000000000..aa34f8c1c
--- /dev/null
+++ b/packages/pipeline/test/entities/exchange_cancel_up_to_event_test.ts
@@ -0,0 +1,29 @@
+import { BigNumber } from '@0x/utils';
+import 'mocha';
+import 'reflect-metadata';
+
+import { ExchangeCancelUpToEvent } from '../../src/entities';
+import { createDbConnectionOnceAsync } from '../db_setup';
+import { chaiSetup } from '../utils/chai_setup';
+
+import { testSaveAndFindEntityAsync } from './util';
+
+chaiSetup.configure();
+
+// tslint:disable:custom-no-magic-numbers
+describe('ExchangeCancelUpToEvent entity', () => {
+ it('save/find', async () => {
+ const connection = await createDbConnectionOnceAsync();
+ const cancelUpToEventRepository = connection.getRepository(ExchangeCancelUpToEvent);
+ const cancelUpToEvent = new ExchangeCancelUpToEvent();
+ cancelUpToEvent.blockNumber = 6276262;
+ cancelUpToEvent.contractAddress = '0x4f833a24e1f95d70f028921e27040ca56e09ab0b';
+ cancelUpToEvent.logIndex = 42;
+ cancelUpToEvent.makerAddress = '0xf6da68519f78b0d0bc93c701e86affcb75c92428';
+ cancelUpToEvent.orderEpoch = new BigNumber('123456789123456789');
+ cancelUpToEvent.rawData = '0x000000000000000000000000f6da68519f78b0d0bc93c701e86affcb75c92428';
+ cancelUpToEvent.senderAddress = '0xf6da68519f78b0d0bc93c701e86affcb75c92428';
+ cancelUpToEvent.transactionHash = '0x6dd106d002873746072fc5e496dd0fb2541b68c77bcf9184ae19a42fd33657fe';
+ await testSaveAndFindEntityAsync(cancelUpToEventRepository, cancelUpToEvent);
+ });
+});
diff --git a/packages/pipeline/test/entities/exchange_fill_event_test.ts b/packages/pipeline/test/entities/exchange_fill_event_test.ts
new file mode 100644
index 000000000..b2cb8c5e0
--- /dev/null
+++ b/packages/pipeline/test/entities/exchange_fill_event_test.ts
@@ -0,0 +1,62 @@
+import { BigNumber } from '@0x/utils';
+import 'mocha';
+import * as R from 'ramda';
+import 'reflect-metadata';
+
+import { ExchangeFillEvent } from '../../src/entities';
+import { AssetType } from '../../src/types';
+import { createDbConnectionOnceAsync } from '../db_setup';
+import { chaiSetup } from '../utils/chai_setup';
+
+import { testSaveAndFindEntityAsync } from './util';
+
+chaiSetup.configure();
+
+const baseFillEvent = {
+ contractAddress: '0x4f833a24e1f95d70f028921e27040ca56e09ab0b',
+ blockNumber: 6276262,
+ logIndex: 102,
+ rawData: '0x000000000000000000000000f6da68519f78b0d0bc93c701e86affcb75c92428',
+ transactionHash: '0x6dd106d002873746072fc5e496dd0fb2541b68c77bcf9184ae19a42fd33657fe',
+ makerAddress: '0xf6da68519f78b0d0bc93c701e86affcb75c92428',
+ takerAddress: '0xf6da68519f78b0d0bc93c701e86affcb75c92428',
+ feeRecipientAddress: '0xc370d2a5920344aa6b7d8d11250e3e861434cbdd',
+ senderAddress: '0xf6da68519f78b0d0bc93c701e86affcb75c92428',
+ makerAssetFilledAmount: new BigNumber('10000000000000000'),
+ takerAssetFilledAmount: new BigNumber('100000000000000000'),
+ makerFeePaid: new BigNumber('0'),
+ takerFeePaid: new BigNumber('12345'),
+ orderHash: '0xab12ed2cbaa5615ab690b9da75a46e53ddfcf3f1a68655b5fe0d94c75a1aac4a',
+ rawMakerAssetData: '0xf47261b0000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc2',
+ makerAssetProxyId: '0xf47261b0',
+ makerTokenAddress: '0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2',
+ rawTakerAssetData: '0xf47261b0000000000000000000000000e41d2489571d322189246dafa5ebde1f4699f498',
+ takerAssetProxyId: '0xf47261b0',
+ takerTokenAddress: '0xe41d2489571d322189246dafa5ebde1f4699f498',
+};
+
+const erc20FillEvent = R.merge(baseFillEvent, {
+ makerAssetType: 'erc20' as AssetType,
+ makerTokenId: null,
+ takerAssetType: 'erc20' as AssetType,
+ takerTokenId: null,
+});
+
+const erc721FillEvent = R.merge(baseFillEvent, {
+ makerAssetType: 'erc721' as AssetType,
+ makerTokenId: '19378573',
+ takerAssetType: 'erc721' as AssetType,
+ takerTokenId: '63885673888',
+});
+
+// tslint:disable:custom-no-magic-numbers
+describe('ExchangeFillEvent entity', () => {
+ it('save/find', async () => {
+ const connection = await createDbConnectionOnceAsync();
+ const events = [erc20FillEvent, erc721FillEvent];
+ const fillEventsRepository = connection.getRepository(ExchangeFillEvent);
+ for (const event of events) {
+ await testSaveAndFindEntityAsync(fillEventsRepository, event);
+ }
+ });
+});
diff --git a/packages/pipeline/test/entities/relayer_test.ts b/packages/pipeline/test/entities/relayer_test.ts
new file mode 100644
index 000000000..760ffb6f9
--- /dev/null
+++ b/packages/pipeline/test/entities/relayer_test.ts
@@ -0,0 +1,55 @@
+import 'mocha';
+import * as R from 'ramda';
+import 'reflect-metadata';
+
+import { Relayer } from '../../src/entities';
+import { createDbConnectionOnceAsync } from '../db_setup';
+import { chaiSetup } from '../utils/chai_setup';
+
+import { testSaveAndFindEntityAsync } from './util';
+
+chaiSetup.configure();
+
+const baseRelayer = {
+ uuid: 'e8d27d8d-ddf6-48b1-9663-60b0a3ddc716',
+ name: 'Radar Relay',
+ homepageUrl: 'https://radarrelay.com',
+ appUrl: null,
+ sraHttpEndpoint: null,
+ sraWsEndpoint: null,
+ feeRecipientAddresses: [],
+ takerAddresses: [],
+};
+
+const relayerWithUrls = R.merge(baseRelayer, {
+ uuid: 'e8d27d8d-ddf6-48b1-9663-60b0a3ddc717',
+ appUrl: 'https://app.radarrelay.com',
+ sraHttpEndpoint: 'https://api.radarrelay.com/0x/v2/',
+ sraWsEndpoint: 'wss://ws.radarrelay.com/0x/v2',
+});
+
+const relayerWithAddresses = R.merge(baseRelayer, {
+ uuid: 'e8d27d8d-ddf6-48b1-9663-60b0a3ddc718',
+ feeRecipientAddresses: [
+ '0xa258b39954cef5cb142fd567a46cddb31a670124',
+ '0xa258b39954cef5cb142fd567a46cddb31a670125',
+ '0xa258b39954cef5cb142fd567a46cddb31a670126',
+ ],
+ takerAddresses: [
+ '0xa258b39954cef5cb142fd567a46cddb31a670127',
+ '0xa258b39954cef5cb142fd567a46cddb31a670128',
+ '0xa258b39954cef5cb142fd567a46cddb31a670129',
+ ],
+});
+
+// tslint:disable:custom-no-magic-numbers
+describe('Relayer entity', () => {
+ it('save/find', async () => {
+ const connection = await createDbConnectionOnceAsync();
+ const relayers = [baseRelayer, relayerWithUrls, relayerWithAddresses];
+ const relayerRepository = connection.getRepository(Relayer);
+ for (const relayer of relayers) {
+ await testSaveAndFindEntityAsync(relayerRepository, relayer);
+ }
+ });
+});
diff --git a/packages/pipeline/test/entities/sra_order_test.ts b/packages/pipeline/test/entities/sra_order_test.ts
new file mode 100644
index 000000000..c43de8ce8
--- /dev/null
+++ b/packages/pipeline/test/entities/sra_order_test.ts
@@ -0,0 +1,84 @@
+import { BigNumber } from '@0x/utils';
+import 'mocha';
+import * as R from 'ramda';
+import 'reflect-metadata';
+import { Repository } from 'typeorm';
+
+import { SraOrder, SraOrdersObservedTimeStamp } from '../../src/entities';
+import { AssetType } from '../../src/types';
+import { createDbConnectionOnceAsync } from '../db_setup';
+import { chaiSetup } from '../utils/chai_setup';
+
+import { testSaveAndFindEntityAsync } from './util';
+
+chaiSetup.configure();
+
+const baseOrder = {
+ sourceUrl: 'https://api.radarrelay.com/0x/v2',
+ exchangeAddress: '0x4f833a24e1f95d70f028921e27040ca56e09ab0b',
+ makerAddress: '0xb45df06e38540a675fdb5b598abf2c0dbe9d6b81',
+ takerAddress: '0x0000000000000000000000000000000000000000',
+ feeRecipientAddress: '0xa258b39954cef5cb142fd567a46cddb31a670124',
+ senderAddress: '0x0000000000000000000000000000000000000000',
+ makerAssetAmount: new BigNumber('1619310371000000000'),
+ takerAssetAmount: new BigNumber('8178335207070707070707'),
+ makerFee: new BigNumber('100'),
+ takerFee: new BigNumber('200'),
+ expirationTimeSeconds: new BigNumber('1538529488'),
+ salt: new BigNumber('1537924688891'),
+ signature: '0x1b5a5d672b0d647b5797387ccbb89d8',
+ rawMakerAssetData: '0xf47261b0000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc2',
+ makerAssetProxyId: '0xf47261b0',
+ makerTokenAddress: '0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2',
+ rawTakerAssetData: '0xf47261b000000000000000000000000042d6622dece394b54999fbd73d108123806f6a18',
+ takerAssetProxyId: '0xf47261b0',
+ takerTokenAddress: '0x42d6622dece394b54999fbd73d108123806f6a18',
+ metadataJson: '{"isThisArbitraryData":true,"powerLevel":9001}',
+};
+
+const erc20Order = R.merge(baseOrder, {
+ orderHashHex: '0x1bdbeb0d088a33da28b9ee6d94e8771452f90f4a69107da2fa75195d61b9a1c9',
+ makerAssetType: 'erc20' as AssetType,
+ makerTokenId: null,
+ takerAssetType: 'erc20' as AssetType,
+ takerTokenId: null,
+});
+
+const erc721Order = R.merge(baseOrder, {
+ orderHashHex: '0x1bdbeb0d088a33da28b9ee6d94e8771452f90f4a69107da2fa75195d61b9a1d0',
+ makerAssetType: 'erc721' as AssetType,
+ makerTokenId: '19378573',
+ takerAssetType: 'erc721' as AssetType,
+ takerTokenId: '63885673888',
+});
+
+// tslint:disable:custom-no-magic-numbers
+describe('SraOrder and SraOrdersObservedTimeStamp entities', () => {
+ // Note(albrow): SraOrder and SraOrdersObservedTimeStamp are tightly coupled
+ // and timestamps have a foreign key constraint such that they have to point
+ // to an existing SraOrder. For these reasons, we are testing them together
+ // in the same test.
+ it('save/find', async () => {
+ const connection = await createDbConnectionOnceAsync();
+ const orderRepository = connection.getRepository(SraOrder);
+ const timestampRepository = connection.getRepository(SraOrdersObservedTimeStamp);
+ const orders = [erc20Order, erc721Order];
+ for (const order of orders) {
+ await testOrderWithTimestampAsync(orderRepository, timestampRepository, order);
+ }
+ });
+});
+
+async function testOrderWithTimestampAsync(
+ orderRepository: Repository<SraOrder>,
+ timestampRepository: Repository<SraOrdersObservedTimeStamp>,
+ order: SraOrder,
+): Promise<void> {
+ await testSaveAndFindEntityAsync(orderRepository, order);
+ const timestamp = new SraOrdersObservedTimeStamp();
+ timestamp.exchangeAddress = order.exchangeAddress;
+ timestamp.orderHashHex = order.orderHashHex;
+ timestamp.sourceUrl = order.sourceUrl;
+ timestamp.observedTimestamp = 1543377376153;
+ await testSaveAndFindEntityAsync(timestampRepository, timestamp);
+}
diff --git a/packages/pipeline/test/entities/token_metadata_test.ts b/packages/pipeline/test/entities/token_metadata_test.ts
new file mode 100644
index 000000000..805b4b0f7
--- /dev/null
+++ b/packages/pipeline/test/entities/token_metadata_test.ts
@@ -0,0 +1,38 @@
+import 'mocha';
+import 'reflect-metadata';
+
+import { TokenMetadata } from '../../src/entities';
+import { createDbConnectionOnceAsync } from '../db_setup';
+import { chaiSetup } from '../utils/chai_setup';
+
+import { testSaveAndFindEntityAsync } from './util';
+
+chaiSetup.configure();
+
+const metadataWithoutNullFields = {
+ address: '0xe41d2489571d322189246dafa5ebde1f4699f498',
+ authority: 'https://website-api.0xproject.com/tokens',
+ decimals: 18,
+ symbol: 'ZRX',
+ name: '0x',
+};
+
+const metadataWithNullFields = {
+ address: '0xe41d2489571d322189246dafa5ebde1f4699f499',
+ authority: 'https://website-api.0xproject.com/tokens',
+ decimals: null,
+ symbol: null,
+ name: null,
+};
+
+// tslint:disable:custom-no-magic-numbers
+describe('TokenMetadata entity', () => {
+ it('save/find', async () => {
+ const connection = await createDbConnectionOnceAsync();
+ const tokenMetadata = [metadataWithoutNullFields, metadataWithNullFields];
+ const tokenMetadataRepository = connection.getRepository(TokenMetadata);
+ for (const tokenMetadatum of tokenMetadata) {
+ await testSaveAndFindEntityAsync(tokenMetadataRepository, tokenMetadatum);
+ }
+ });
+});
diff --git a/packages/pipeline/test/entities/transaction_test.ts b/packages/pipeline/test/entities/transaction_test.ts
new file mode 100644
index 000000000..027de7d32
--- /dev/null
+++ b/packages/pipeline/test/entities/transaction_test.ts
@@ -0,0 +1,25 @@
+import 'mocha';
+import 'reflect-metadata';
+
+import { Transaction } from '../../src/entities';
+import { createDbConnectionOnceAsync } from '../db_setup';
+import { chaiSetup } from '../utils/chai_setup';
+
+import { testSaveAndFindEntityAsync } from './util';
+
+chaiSetup.configure();
+
+// tslint:disable:custom-no-magic-numbers
+describe('Transaction entity', () => {
+ it('save/find', async () => {
+ const connection = await createDbConnectionOnceAsync();
+ const transactionRepository = connection.getRepository(Transaction);
+ const transaction = new Transaction();
+ transaction.blockHash = '0x6ff106d00b6c3746072fc06bae140fb2549036ba7bcf9184ae19a42fd33657fd';
+ transaction.blockNumber = 6276262;
+ transaction.gasPrice = 3000000;
+ transaction.gasUsed = 125000;
+ transaction.transactionHash = '0x6dd106d002873746072fc5e496dd0fb2541b68c77bcf9184ae19a42fd33657fe';
+ await testSaveAndFindEntityAsync(transactionRepository, transaction);
+ });
+});
diff --git a/packages/pipeline/test/entities/util.ts b/packages/pipeline/test/entities/util.ts
new file mode 100644
index 000000000..043a3b15d
--- /dev/null
+++ b/packages/pipeline/test/entities/util.ts
@@ -0,0 +1,25 @@
+import * as chai from 'chai';
+import 'mocha';
+
+import { Repository } from 'typeorm';
+
+const expect = chai.expect;
+
+/**
+ * First saves the given entity to the database, then finds it and makes sure
+ * that the found entity is exactly equal to the original one. This is a bare
+ * minimum basic test to make sure that the entity type definition and our
+ * database schema are aligned and that it is possible to save and find the
+ * entity.
+ * @param repository A TypeORM repository corresponding with the type of the entity.
+ * @param entity An instance of a TypeORM entity which will be saved/retrieved from the database.
+ */
+export async function testSaveAndFindEntityAsync<T>(repository: Repository<T>, entity: T): Promise<void> {
+ // Note(albrow): We are forced to use an 'as any' hack here because
+ // TypeScript complains about stack depth when checking the types.
+ await repository.save(entity as any);
+ const gotEntity = await repository.findOneOrFail({
+ where: entity,
+ });
+ expect(gotEntity).deep.equal(entity);
+}