aboutsummaryrefslogtreecommitdiffstats
path: root/packages
diff options
context:
space:
mode:
authorSteve Klebanoff <steve.klebanoff@gmail.com>2018-12-20 00:19:36 +0800
committerSteve Klebanoff <steve.klebanoff@gmail.com>2018-12-20 00:19:36 +0800
commit125a940560a01305781bfb6754f52fa64669a6f3 (patch)
tree70fc5d2aab95676245460eed72ba059c1661339f /packages
parenta7f847bf3e693a275b0aa71bcdb12d947b445175 (diff)
parentb3978b641c12c7bfe9bab1d561384eeaace25321 (diff)
downloaddexon-sol-tools-125a940560a01305781bfb6754f52fa64669a6f3.tar
dexon-sol-tools-125a940560a01305781bfb6754f52fa64669a6f3.tar.gz
dexon-sol-tools-125a940560a01305781bfb6754f52fa64669a6f3.tar.bz2
dexon-sol-tools-125a940560a01305781bfb6754f52fa64669a6f3.tar.lz
dexon-sol-tools-125a940560a01305781bfb6754f52fa64669a6f3.tar.xz
dexon-sol-tools-125a940560a01305781bfb6754f52fa64669a6f3.tar.zst
dexon-sol-tools-125a940560a01305781bfb6754f52fa64669a6f3.zip
Merge branch 'development' into feature/website/0x-org
Diffstat (limited to 'packages')
-rw-r--r--packages/json-schemas/schemas/order_watcher_web_socket_request_schema.json52
-rw-r--r--packages/json-schemas/schemas/order_watcher_web_socket_utf8_message_schema.json10
-rw-r--r--packages/json-schemas/src/schemas.ts4
-rw-r--r--packages/json-schemas/tsconfig.json2
-rw-r--r--packages/order-watcher/CHANGELOG.json10
-rw-r--r--packages/order-watcher/README.md88
-rw-r--r--packages/order-watcher/package.json3
-rw-r--r--packages/order-watcher/src/index.ts1
-rw-r--r--packages/order-watcher/src/order_watcher/order_watcher_web_socket_server.ts200
-rw-r--r--packages/order-watcher/src/types.ts66
-rw-r--r--packages/order-watcher/test/order_watcher_web_socket_server_test.ts308
-rw-r--r--packages/sol-compiler/CHANGELOG.json13
-rw-r--r--packages/sol-compiler/package.json4
-rw-r--r--packages/sol-compiler/src/cli.ts10
-rw-r--r--packages/sol-compiler/src/compiler.ts258
-rw-r--r--packages/sol-compiler/src/utils/compiler.ts217
-rw-r--r--packages/sol-compiler/src/utils/constants.ts3
-rw-r--r--packages/sol-compiler/src/utils/types.ts9
-rw-r--r--packages/sol-compiler/test/compiler_utils_test.ts6
-rw-r--r--packages/sol-resolver/CHANGELOG.json13
-rw-r--r--packages/sol-resolver/src/index.ts1
-rw-r--r--packages/sol-resolver/src/resolvers/fs_resolver.ts5
-rw-r--r--packages/sol-resolver/src/resolvers/name_resolver.ts10
-rw-r--r--packages/sol-resolver/src/resolvers/npm_resolver.ts5
-rw-r--r--packages/sol-resolver/src/resolvers/relative_fs_resolver.ts7
-rw-r--r--packages/sol-resolver/src/resolvers/spy_resolver.ts25
-rw-r--r--packages/sol-resolver/src/resolvers/url_resolver.ts5
-rw-r--r--packages/sol-resolver/src/types.ts1
-rw-r--r--packages/types/src/index.ts4
-rw-r--r--packages/typescript-typings/tsconfig.json3
-rw-r--r--packages/utils/CHANGELOG.json9
-rw-r--r--packages/utils/package.json1
-rw-r--r--packages/utils/src/log_utils.ts5
-rw-r--r--packages/website/ts/pages/documentation/docs_home.tsx8
34 files changed, 1146 insertions, 220 deletions
diff --git a/packages/json-schemas/schemas/order_watcher_web_socket_request_schema.json b/packages/json-schemas/schemas/order_watcher_web_socket_request_schema.json
new file mode 100644
index 000000000..b0c419f94
--- /dev/null
+++ b/packages/json-schemas/schemas/order_watcher_web_socket_request_schema.json
@@ -0,0 +1,52 @@
+{
+ "id": "/orderWatcherWebSocketRequestSchema",
+ "type": "object",
+ "definitions": {
+ "signedOrderParam": {
+ "type": "object",
+ "properties": {
+ "signedOrder": { "$ref": "/signedOrderSchema" }
+ },
+ "required": ["signedOrder"]
+ },
+ "orderHashParam": {
+ "type": "object",
+ "properties": {
+ "orderHash": { "$ref": "/hexSchema" }
+ },
+ "required": ["orderHash"]
+ }
+ },
+ "oneOf": [
+ {
+ "type": "object",
+ "properties": {
+ "id": { "type": "number" },
+ "jsonrpc": { "type": "string" },
+ "method": { "enum": ["ADD_ORDER"] },
+ "params": { "$ref": "#/definitions/signedOrderParam" }
+ },
+ "required": ["id", "jsonrpc", "method", "params"]
+ },
+ {
+ "type": "object",
+ "properties": {
+ "id": { "type": "number" },
+ "jsonrpc": { "type": "string" },
+ "method": { "enum": ["REMOVE_ORDER"] },
+ "params": { "$ref": "#/definitions/orderHashParam" }
+ },
+ "required": ["id", "jsonrpc", "method", "params"]
+ },
+ {
+ "type": "object",
+ "properties": {
+ "id": { "type": "number" },
+ "jsonrpc": { "type": "string" },
+ "method": { "enum": ["GET_STATS"] },
+ "params": {}
+ },
+ "required": ["id", "jsonrpc", "method"]
+ }
+ ]
+} \ No newline at end of file
diff --git a/packages/json-schemas/schemas/order_watcher_web_socket_utf8_message_schema.json b/packages/json-schemas/schemas/order_watcher_web_socket_utf8_message_schema.json
new file mode 100644
index 000000000..154d6d754
--- /dev/null
+++ b/packages/json-schemas/schemas/order_watcher_web_socket_utf8_message_schema.json
@@ -0,0 +1,10 @@
+{
+ "id": "/orderWatcherWebSocketUtf8MessageSchema",
+ "properties": {
+ "utf8Data": { "type": "string" }
+ },
+ "required": [
+ "utf8Data"
+ ],
+ "type": "object"
+}
diff --git a/packages/json-schemas/src/schemas.ts b/packages/json-schemas/src/schemas.ts
index 21a6f424c..050f4e625 100644
--- a/packages/json-schemas/src/schemas.ts
+++ b/packages/json-schemas/src/schemas.ts
@@ -16,6 +16,8 @@ import * as orderFillOrKillRequestsSchema from '../schemas/order_fill_or_kill_re
import * as orderFillRequestsSchema from '../schemas/order_fill_requests_schema.json';
import * as orderHashSchema from '../schemas/order_hash_schema.json';
import * as orderSchema from '../schemas/order_schema.json';
+import * as orderWatcherWebSocketRequestSchema from '../schemas/order_watcher_web_socket_request_schema.json';
+import * as orderWatcherWebSocketUtf8MessageSchema from '../schemas/order_watcher_web_socket_utf8_message_schema.json';
import * as orderBookRequestSchema from '../schemas/orderbook_request_schema.json';
import * as ordersRequestOptsSchema from '../schemas/orders_request_opts_schema.json';
import * as ordersSchema from '../schemas/orders_schema.json';
@@ -66,6 +68,8 @@ export const schemas = {
jsNumber,
requestOptsSchema,
pagedRequestOptsSchema,
+ orderWatcherWebSocketRequestSchema,
+ orderWatcherWebSocketUtf8MessageSchema,
ordersRequestOptsSchema,
orderBookRequestSchema,
orderConfigRequestSchema,
diff --git a/packages/json-schemas/tsconfig.json b/packages/json-schemas/tsconfig.json
index a79d54385..ec573290c 100644
--- a/packages/json-schemas/tsconfig.json
+++ b/packages/json-schemas/tsconfig.json
@@ -23,6 +23,8 @@
"./schemas/order_schema.json",
"./schemas/signed_order_schema.json",
"./schemas/orders_schema.json",
+ "./schemas/order_watcher_web_socket_request_schema.json",
+ "./schemas/order_watcher_web_socket_utf8_message_schema.json",
"./schemas/paginated_collection_schema.json",
"./schemas/relayer_api_asset_data_pairs_response_schema.json",
"./schemas/relayer_api_asset_data_pairs_schema.json",
diff --git a/packages/order-watcher/CHANGELOG.json b/packages/order-watcher/CHANGELOG.json
index c1fd8d4a9..304dc45fd 100644
--- a/packages/order-watcher/CHANGELOG.json
+++ b/packages/order-watcher/CHANGELOG.json
@@ -1,5 +1,15 @@
[
{
+ "version": "2.3.0",
+ "changes": [
+ {
+ "note":
+ "Added a WebSocket interface to OrderWatcher so that it can be used by a client written in any language",
+ "pr": 1427
+ }
+ ]
+ },
+ {
"version": "2.2.8",
"changes": [
{
diff --git a/packages/order-watcher/README.md b/packages/order-watcher/README.md
index c0b99b272..385fe4715 100644
--- a/packages/order-watcher/README.md
+++ b/packages/order-watcher/README.md
@@ -4,6 +4,9 @@ An order watcher daemon that watches for order validity.
#### Read the wiki [article](https://0xproject.com/wiki#0x-OrderWatcher).
+OrderWatcher also comes with a WebSocket server to provide language-agnostic access
+to order watching functionality. We used the [WebSocket Client and Server Implementation for Node](https://www.npmjs.com/package/websocket). The server sends and receives messages that conform to the [JSON RPC specifications](https://www.jsonrpc.org/specification).
+
## Installation
**Install**
@@ -26,6 +29,91 @@ If your project is in [TypeScript](https://www.typescriptlang.org/), add the fol
}
```
+## Using the WebSocket Server
+
+**Setup**
+
+**Environmental Variables**
+Several environmental variables can be set to configure the server:
+
+* `ORDER_WATCHER_HTTP_PORT` specifies the port that the http server will listen on
+ and accept connections from. When this is not set, we default to 8080.
+
+**Requests**
+The server accepts three types of requests: `ADD_ORDER`, `REMOVE_ORDER` and `GET_STATS`. These mirror what the underlying OrderWatcher does. You can read more in the [wiki](https://0xproject.com/wiki#0x-OrderWatcher). Unlike the OrderWatcher, it does not expose any `subscribe` or `unsubscribe` functionality because the WebSocket server keeps a single subscription open for all clients.
+
+The first step for making a request is establishing a connection with the server. In Javascript:
+
+```
+var W3CWebSocket = require('websocket').w3cwebsocket;
+wsClient = new W3CWebSocket('ws://127.0.0.1:8080');
+```
+
+In Python, you could use the [websocket-client library](http://pypi.python.org/pypi/websocket-client/) and run:
+
+```
+from websocket import create_connection
+wsClient = create_connection("ws://127.0.0.1:8080")
+```
+
+With the connection established, you prepare the payload for your request. The payload is a json object with a format established by the [JSON RPC specification](https://www.jsonrpc.org/specification):
+
+* `id`: All requests require you to specify a numerical `id`. When the server responds to the request, the response will have the same `id` as the one supplied with your request.
+* `jsonrpc`: This is always the string `'2.0'`.
+* `method`: This specifies the OrderWatcher method you want to call. I.e., `'ADD_ORDER'`, `'REMOVE_ORDER'` or `'GET_STATS'`.
+* `params`: These contain the parameters needed by OrderWatcher to execute the method you called. For `ADD_ORDER`, provide `{ signedOrder: <your signedOrder> }`. For `REMOVE_ORDER`, provide `{ orderHash: <your orderHash> }`. For `GET_STATS`, no parameters are needed, so you may leave this empty.
+
+Next, convert the payload to a string and send it through the connection.
+In Javascript:
+
+```
+const addOrderPayload = {
+ id: 1,
+ jsonrpc: '2.0',
+ method: 'ADD_ORDER',
+ params: { signedOrder: <your signedOrder> },
+};
+wsClient.send(JSON.stringify(addOrderPayload));
+```
+
+In Python:
+
+```
+import json
+remove_order_payload = {
+ 'id': 1,
+ 'jsonrpc': '2.0',
+ 'method': 'REMOVE_ORDER',
+ 'params': {'orderHash': '0x6edc16bf37fde79f5012088c33784c730e2f103d9ab1caf73060c386ad107b7e'},
+}
+wsClient.send(json.dumps(remove_order_payload));
+```
+
+**Response**
+The server responds to all requests in a similar format. In the data field, you'll find another object containing the following fields:
+
+* `id`: The id corresponding to the request that the server is responding to. `UPDATE` responses are not based on any requests so the `id` field is omitted`.
+* `jsonrpc`: Always `'2.0'`.
+* `method`: The method the server is responding to. Eg. `ADD_ORDER`. When order states change the server may also initiate a response. In this case, method will be listed as `UPDATE`.
+* `result`: This field varies based on the method. `UPDATE` responses contain the new order state. `GET_STATS` responses contain the current order count. When there are errors, this field is omitted.
+* `error`: When there is an error executing a request, the [JSON RPC](https://www.jsonrpc.org/specification) error object is listed here. When the server responds successfully, this field is omitted.
+
+In Javascript, the responses can be parsed using the `onmessage` callback:
+
+```
+wsClient.onmessage = (msg) => {
+ const responseData = JSON.parse(msg.data);
+ const method = responseData.method
+};
+```
+
+In Python, `recv` is a lightweight way to receive a response:
+
+```
+result = wsClient.recv()
+method = result.method
+```
+
## Contributing
We strongly recommend that the community help us make improvements and determine the future direction of the protocol. To report bugs within this package, please create an issue in this repository.
diff --git a/packages/order-watcher/package.json b/packages/order-watcher/package.json
index 499d4cead..16a46294e 100644
--- a/packages/order-watcher/package.json
+++ b/packages/order-watcher/package.json
@@ -74,7 +74,8 @@
"ethereum-types": "^1.1.4",
"ethereumjs-blockstream": "6.0.0",
"ethers": "~4.0.4",
- "lodash": "^4.17.5"
+ "lodash": "^4.17.5",
+ "websocket": "^1.0.25"
},
"publishConfig": {
"access": "public"
diff --git a/packages/order-watcher/src/index.ts b/packages/order-watcher/src/index.ts
index 5eeba3e87..e275a0c6a 100644
--- a/packages/order-watcher/src/index.ts
+++ b/packages/order-watcher/src/index.ts
@@ -1,4 +1,5 @@
export { OrderWatcher } from './order_watcher/order_watcher';
+export { OrderWatcherWebSocketServer } from './order_watcher/order_watcher_web_socket_server';
export { ExpirationWatcher } from './order_watcher/expiration_watcher';
export {
diff --git a/packages/order-watcher/src/order_watcher/order_watcher_web_socket_server.ts b/packages/order-watcher/src/order_watcher/order_watcher_web_socket_server.ts
new file mode 100644
index 000000000..b75b07603
--- /dev/null
+++ b/packages/order-watcher/src/order_watcher/order_watcher_web_socket_server.ts
@@ -0,0 +1,200 @@
+import { ContractAddresses } from '@0x/contract-addresses';
+import { schemas } from '@0x/json-schemas';
+import { OrderStateInvalid, OrderStateValid, SignedOrder } from '@0x/types';
+import { BigNumber, logUtils } from '@0x/utils';
+import { Provider } from 'ethereum-types';
+import * as http from 'http';
+import * as WebSocket from 'websocket';
+
+import { GetStatsResult, OrderWatcherConfig, OrderWatcherMethod, WebSocketRequest, WebSocketResponse } from '../types';
+import { assert } from '../utils/assert';
+
+import { OrderWatcher } from './order_watcher';
+
+const DEFAULT_HTTP_PORT = 8080;
+const JSON_RPC_VERSION = '2.0';
+
+// Wraps the OrderWatcher functionality in a WebSocket server. Motivations:
+// 1) Users can watch orders via non-typescript programs.
+// 2) Better encapsulation so that users can work
+export class OrderWatcherWebSocketServer {
+ private readonly _orderWatcher: OrderWatcher;
+ private readonly _httpServer: http.Server;
+ private readonly _connectionStore: Set<WebSocket.connection>;
+ private readonly _wsServer: WebSocket.server;
+ private readonly _isVerbose: boolean;
+ /**
+ * Recover types lost when the payload is stringified.
+ */
+ private static _parseSignedOrder(rawRequest: any): SignedOrder {
+ const bigNumberFields = [
+ 'salt',
+ 'makerFee',
+ 'takerFee',
+ 'makerAssetAmount',
+ 'takerAssetAmount',
+ 'expirationTimeSeconds',
+ ];
+ for (const field of bigNumberFields) {
+ rawRequest[field] = new BigNumber(rawRequest[field]);
+ }
+ return rawRequest;
+ }
+
+ /**
+ * Instantiate a new WebSocket server which provides OrderWatcher functionality
+ * @param provider Web3 provider to use for JSON RPC calls.
+ * @param networkId NetworkId to watch orders on.
+ * @param contractAddresses Optional contract addresses. Defaults to known
+ * addresses based on networkId.
+ * @param orderWatcherConfig OrderWatcher configurations. isVerbose sets the verbosity for the WebSocket server aswell.
+ * @param isVerbose Whether to enable verbose logging. Defaults to true.
+ */
+ constructor(
+ provider: Provider,
+ networkId: number,
+ contractAddresses?: ContractAddresses,
+ orderWatcherConfig?: Partial<OrderWatcherConfig>,
+ ) {
+ this._isVerbose =
+ orderWatcherConfig !== undefined && orderWatcherConfig.isVerbose !== undefined
+ ? orderWatcherConfig.isVerbose
+ : true;
+ this._orderWatcher = new OrderWatcher(provider, networkId, contractAddresses, orderWatcherConfig);
+ this._connectionStore = new Set();
+ this._httpServer = http.createServer();
+ this._wsServer = new WebSocket.server({
+ httpServer: this._httpServer,
+ // Avoid setting autoAcceptConnections to true as it defeats all
+ // standard cross-origin protection facilities built into the protocol
+ // and the browser.
+ // Source: https://www.npmjs.com/package/websocket#server-example
+ // Also ensures that a request event is emitted by
+ // the server whenever a new WebSocket request is made.
+ autoAcceptConnections: false,
+ });
+
+ this._wsServer.on('request', async (request: any) => {
+ // Designed for usage pattern where client and server are run on the same
+ // machine by the same user. As such, no security checks are in place.
+ const connection: WebSocket.connection = request.accept(null, request.origin);
+ this._log(`${new Date()} [Server] Accepted connection from origin ${request.origin}.`);
+ connection.on('message', this._onMessageCallbackAsync.bind(this, connection));
+ connection.on('close', this._onCloseCallback.bind(this, connection));
+ this._connectionStore.add(connection);
+ });
+ }
+
+ /**
+ * Activates the WebSocket server by subscribing to the OrderWatcher and
+ * starting the WebSocket's HTTP server
+ */
+ public start(): void {
+ // Have the WebSocket server subscribe to the OrderWatcher to receive updates.
+ // These updates are then broadcast to clients in the _connectionStore.
+ this._orderWatcher.subscribe(this._broadcastCallback.bind(this));
+
+ const port = process.env.ORDER_WATCHER_HTTP_PORT || DEFAULT_HTTP_PORT;
+ this._httpServer.listen(port, () => {
+ this._log(`${new Date()} [Server] Listening on port ${port}`);
+ });
+ }
+
+ /**
+ * Deactivates the WebSocket server by stopping the HTTP server from accepting
+ * new connections and unsubscribing from the OrderWatcher
+ */
+ public stop(): void {
+ this._httpServer.close();
+ this._orderWatcher.unsubscribe();
+ }
+
+ private _log(...args: any[]): void {
+ if (this._isVerbose) {
+ logUtils.log(...args);
+ }
+ }
+
+ private async _onMessageCallbackAsync(connection: WebSocket.connection, message: any): Promise<void> {
+ let response: WebSocketResponse;
+ let id: number | null = null;
+ try {
+ assert.doesConformToSchema('message', message, schemas.orderWatcherWebSocketUtf8MessageSchema);
+ const request: WebSocketRequest = JSON.parse(message.utf8Data);
+ id = request.id;
+ assert.doesConformToSchema('request', request, schemas.orderWatcherWebSocketRequestSchema);
+ assert.isString(request.jsonrpc, JSON_RPC_VERSION);
+ response = {
+ id,
+ jsonrpc: JSON_RPC_VERSION,
+ method: request.method,
+ result: await this._routeRequestAsync(request),
+ };
+ } catch (err) {
+ response = {
+ id,
+ jsonrpc: JSON_RPC_VERSION,
+ method: null,
+ error: err.toString(),
+ };
+ }
+ this._log(`${new Date()} [Server] OrderWatcher output: ${JSON.stringify(response)}`);
+ connection.sendUTF(JSON.stringify(response));
+ }
+
+ private _onCloseCallback(connection: WebSocket.connection): void {
+ this._connectionStore.delete(connection);
+ this._log(`${new Date()} [Server] Client ${connection.remoteAddress} disconnected.`);
+ }
+
+ private async _routeRequestAsync(request: WebSocketRequest): Promise<GetStatsResult | undefined> {
+ this._log(`${new Date()} [Server] Request received: ${request.method}`);
+ switch (request.method) {
+ case OrderWatcherMethod.AddOrder: {
+ const signedOrder: SignedOrder = OrderWatcherWebSocketServer._parseSignedOrder(
+ request.params.signedOrder,
+ );
+ await this._orderWatcher.addOrderAsync(signedOrder);
+ break;
+ }
+ case OrderWatcherMethod.RemoveOrder: {
+ this._orderWatcher.removeOrder(request.params.orderHash || 'undefined');
+ break;
+ }
+ case OrderWatcherMethod.GetStats: {
+ return this._orderWatcher.getStats();
+ }
+ default:
+ // Should never reach here. Should be caught by JSON schema check.
+ throw new Error(`Unexpected default case hit for request.method`);
+ }
+ return undefined;
+ }
+
+ /**
+ * Broadcasts OrderState changes to ALL connected clients. At the moment,
+ * we do not support clients subscribing to only a subset of orders. As such,
+ * Client B will be notified of changes to an order that Client A added.
+ */
+ private _broadcastCallback(err: Error | null, orderState?: OrderStateValid | OrderStateInvalid | undefined): void {
+ const method = OrderWatcherMethod.Update;
+ const response =
+ err === null
+ ? {
+ jsonrpc: JSON_RPC_VERSION,
+ method,
+ result: orderState,
+ }
+ : {
+ jsonrpc: JSON_RPC_VERSION,
+ method,
+ error: {
+ code: -32000,
+ message: err.message,
+ },
+ };
+ this._connectionStore.forEach((connection: WebSocket.connection) => {
+ connection.sendUTF(JSON.stringify(response));
+ });
+ }
+}
diff --git a/packages/order-watcher/src/types.ts b/packages/order-watcher/src/types.ts
index 8078dd971..2b529a939 100644
--- a/packages/order-watcher/src/types.ts
+++ b/packages/order-watcher/src/types.ts
@@ -1,4 +1,4 @@
-import { OrderState } from '@0x/types';
+import { OrderState, SignedOrder } from '@0x/types';
import { LogEntryEvent } from 'ethereum-types';
export enum OrderWatcherError {
@@ -31,3 +31,67 @@ export enum InternalOrderWatcherError {
ZrxNotInTokenRegistry = 'ZRX_NOT_IN_TOKEN_REGISTRY',
WethNotInTokenRegistry = 'WETH_NOT_IN_TOKEN_REGISTRY',
}
+
+export enum OrderWatcherMethod {
+ // Methods initiated by the user.
+ GetStats = 'GET_STATS',
+ AddOrder = 'ADD_ORDER',
+ RemoveOrder = 'REMOVE_ORDER',
+ // These are spontaneous; they are primarily orderstate changes.
+ Update = 'UPDATE',
+ // `subscribe` and `unsubscribe` are methods of OrderWatcher, but we don't
+ // need to expose them to the WebSocket server user because the user implicitly
+ // subscribes and unsubscribes by connecting and disconnecting from the server.
+}
+
+// Users have to create a json object of this format and attach it to
+// the data field of their WebSocket message to interact with the server.
+export type WebSocketRequest = AddOrderRequest | RemoveOrderRequest | GetStatsRequest;
+
+export interface AddOrderRequest {
+ id: number;
+ jsonrpc: string;
+ method: OrderWatcherMethod.AddOrder;
+ params: { signedOrder: SignedOrder };
+}
+
+export interface RemoveOrderRequest {
+ id: number;
+ jsonrpc: string;
+ method: OrderWatcherMethod.RemoveOrder;
+ params: { orderHash: string };
+}
+
+export interface GetStatsRequest {
+ id: number;
+ jsonrpc: string;
+ method: OrderWatcherMethod.GetStats;
+}
+
+// Users should expect a json object of this format in the data field
+// of the WebSocket messages that the server sends out.
+export type WebSocketResponse = SuccessfulWebSocketResponse | ErrorWebSocketResponse;
+
+export interface SuccessfulWebSocketResponse {
+ id: number;
+ jsonrpc: string;
+ method: OrderWatcherMethod;
+ result: OrderState | GetStatsResult | undefined; // result is undefined for ADD_ORDER and REMOVE_ORDER
+}
+
+export interface ErrorWebSocketResponse {
+ id: number | null;
+ jsonrpc: string;
+ method: null;
+ error: JSONRPCError;
+}
+
+export interface JSONRPCError {
+ code: number;
+ message: string;
+ data?: string | object;
+}
+
+export interface GetStatsResult {
+ orderCount: number;
+}
diff --git a/packages/order-watcher/test/order_watcher_web_socket_server_test.ts b/packages/order-watcher/test/order_watcher_web_socket_server_test.ts
new file mode 100644
index 000000000..578e0de61
--- /dev/null
+++ b/packages/order-watcher/test/order_watcher_web_socket_server_test.ts
@@ -0,0 +1,308 @@
+import { ContractWrappers } from '@0x/contract-wrappers';
+import { tokenUtils } from '@0x/contract-wrappers/lib/test/utils/token_utils';
+import { BlockchainLifecycle } from '@0x/dev-utils';
+import { FillScenarios } from '@0x/fill-scenarios';
+import { assetDataUtils, orderHashUtils } from '@0x/order-utils';
+import { ExchangeContractErrs, OrderStateInvalid, OrderStateValid, SignedOrder } from '@0x/types';
+import { BigNumber, logUtils } from '@0x/utils';
+import { Web3Wrapper } from '@0x/web3-wrapper';
+import * as chai from 'chai';
+import 'mocha';
+import * as WebSocket from 'websocket';
+
+import { OrderWatcherWebSocketServer } from '../src/order_watcher/order_watcher_web_socket_server';
+import { AddOrderRequest, OrderWatcherMethod, RemoveOrderRequest } from '../src/types';
+
+import { chaiSetup } from './utils/chai_setup';
+import { constants } from './utils/constants';
+import { migrateOnceAsync } from './utils/migrate';
+import { provider, web3Wrapper } from './utils/web3_wrapper';
+
+chaiSetup.configure();
+const expect = chai.expect;
+const blockchainLifecycle = new BlockchainLifecycle(web3Wrapper);
+
+interface WsMessage {
+ data: string;
+}
+
+describe.only('OrderWatcherWebSocketServer', async () => {
+ let contractWrappers: ContractWrappers;
+ let wsServer: OrderWatcherWebSocketServer;
+ let wsClient: WebSocket.w3cwebsocket;
+ let wsClientTwo: WebSocket.w3cwebsocket;
+ let fillScenarios: FillScenarios;
+ let userAddresses: string[];
+ let makerAssetData: string;
+ let takerAssetData: string;
+ let makerTokenAddress: string;
+ let takerTokenAddress: string;
+ let makerAddress: string;
+ let takerAddress: string;
+ let zrxTokenAddress: string;
+ let signedOrder: SignedOrder;
+ let orderHash: string;
+ let addOrderPayload: AddOrderRequest;
+ let removeOrderPayload: RemoveOrderRequest;
+ const decimals = constants.ZRX_DECIMALS;
+ const fillableAmount = Web3Wrapper.toBaseUnitAmount(new BigNumber(5), decimals);
+
+ before(async () => {
+ // Set up constants
+ const contractAddresses = await migrateOnceAsync();
+ await blockchainLifecycle.startAsync();
+ const networkId = constants.TESTRPC_NETWORK_ID;
+ const config = {
+ networkId,
+ contractAddresses,
+ };
+ contractWrappers = new ContractWrappers(provider, config);
+ userAddresses = await web3Wrapper.getAvailableAddressesAsync();
+ zrxTokenAddress = contractAddresses.zrxToken;
+ [makerAddress, takerAddress] = userAddresses;
+ [makerTokenAddress, takerTokenAddress] = tokenUtils.getDummyERC20TokenAddresses();
+ [makerAssetData, takerAssetData] = [
+ assetDataUtils.encodeERC20AssetData(makerTokenAddress),
+ assetDataUtils.encodeERC20AssetData(takerTokenAddress),
+ ];
+ fillScenarios = new FillScenarios(
+ provider,
+ userAddresses,
+ zrxTokenAddress,
+ contractAddresses.exchange,
+ contractAddresses.erc20Proxy,
+ contractAddresses.erc721Proxy,
+ );
+ signedOrder = await fillScenarios.createFillableSignedOrderAsync(
+ makerAssetData,
+ takerAssetData,
+ makerAddress,
+ takerAddress,
+ fillableAmount,
+ );
+ orderHash = orderHashUtils.getOrderHashHex(signedOrder);
+ addOrderPayload = {
+ id: 1,
+ jsonrpc: '2.0',
+ method: OrderWatcherMethod.AddOrder,
+ params: { signedOrder },
+ };
+ removeOrderPayload = {
+ id: 1,
+ jsonrpc: '2.0',
+ method: OrderWatcherMethod.RemoveOrder,
+ params: { orderHash },
+ };
+
+ // Prepare OrderWatcher WebSocket server
+ const orderWatcherConfig = {
+ isVerbose: true,
+ };
+ wsServer = new OrderWatcherWebSocketServer(provider, networkId, contractAddresses, orderWatcherConfig);
+ });
+ after(async () => {
+ await blockchainLifecycle.revertAsync();
+ });
+ beforeEach(async () => {
+ wsServer.start();
+ await blockchainLifecycle.startAsync();
+ wsClient = new WebSocket.w3cwebsocket('ws://127.0.0.1:8080/');
+ logUtils.log(`${new Date()} [Client] Connected.`);
+ });
+ afterEach(async () => {
+ wsClient.close();
+ await blockchainLifecycle.revertAsync();
+ wsServer.stop();
+ logUtils.log(`${new Date()} [Client] Closed.`);
+ });
+
+ it('responds to getStats requests correctly', (done: any) => {
+ const payload = {
+ id: 1,
+ jsonrpc: '2.0',
+ method: 'GET_STATS',
+ };
+ wsClient.onopen = () => wsClient.send(JSON.stringify(payload));
+ wsClient.onmessage = (msg: any) => {
+ const responseData = JSON.parse(msg.data);
+ expect(responseData.id).to.be.eq(1);
+ expect(responseData.jsonrpc).to.be.eq('2.0');
+ expect(responseData.method).to.be.eq('GET_STATS');
+ expect(responseData.result.orderCount).to.be.eq(0);
+ done();
+ };
+ });
+
+ it('throws an error when an invalid method is attempted', async () => {
+ const invalidMethodPayload = {
+ id: 1,
+ jsonrpc: '2.0',
+ method: 'BAD_METHOD',
+ };
+ wsClient.onopen = () => wsClient.send(JSON.stringify(invalidMethodPayload));
+ const errorMsg = await onMessageAsync(wsClient, null);
+ const errorData = JSON.parse(errorMsg.data);
+ // tslint:disable-next-line:no-unused-expression
+ expect(errorData.id).to.be.null;
+ // tslint:disable-next-line:no-unused-expression
+ expect(errorData.method).to.be.null;
+ expect(errorData.jsonrpc).to.be.eq('2.0');
+ expect(errorData.error).to.match(/^Error: Expected request to conform to schema/);
+ });
+
+ it('throws an error when jsonrpc field missing from request', async () => {
+ const noJsonRpcPayload = {
+ id: 1,
+ method: 'GET_STATS',
+ };
+ wsClient.onopen = () => wsClient.send(JSON.stringify(noJsonRpcPayload));
+ const errorMsg = await onMessageAsync(wsClient, null);
+ const errorData = JSON.parse(errorMsg.data);
+ // tslint:disable-next-line:no-unused-expression
+ expect(errorData.method).to.be.null;
+ expect(errorData.jsonrpc).to.be.eq('2.0');
+ expect(errorData.error).to.match(/^Error: Expected request to conform to schema/);
+ });
+
+ it('throws an error when we try to add an order without a signedOrder', async () => {
+ const noSignedOrderAddOrderPayload = {
+ id: 1,
+ jsonrpc: '2.0',
+ method: 'ADD_ORDER',
+ orderHash: '0x7337e2f2a9aa2ed6afe26edc2df7ad79c3ffa9cf9b81a964f707ea63f5272355',
+ };
+ wsClient.onopen = () => wsClient.send(JSON.stringify(noSignedOrderAddOrderPayload));
+ const errorMsg = await onMessageAsync(wsClient, null);
+ const errorData = JSON.parse(errorMsg.data);
+ // tslint:disable-next-line:no-unused-expression
+ expect(errorData.id).to.be.null;
+ // tslint:disable-next-line:no-unused-expression
+ expect(errorData.method).to.be.null;
+ expect(errorData.jsonrpc).to.be.eq('2.0');
+ expect(errorData.error).to.match(/^Error: Expected request to conform to schema/);
+ });
+
+ it('throws an error when we try to add a bad signedOrder', async () => {
+ const invalidAddOrderPayload = {
+ id: 1,
+ jsonrpc: '2.0',
+ method: 'ADD_ORDER',
+ signedOrder: {
+ makerAddress: '0x0',
+ },
+ };
+ wsClient.onopen = () => wsClient.send(JSON.stringify(invalidAddOrderPayload));
+ const errorMsg = await onMessageAsync(wsClient, null);
+ const errorData = JSON.parse(errorMsg.data);
+ // tslint:disable-next-line:no-unused-expression
+ expect(errorData.id).to.be.null;
+ // tslint:disable-next-line:no-unused-expression
+ expect(errorData.method).to.be.null;
+ expect(errorData.error).to.match(/^Error: Expected request to conform to schema/);
+ });
+
+ it('executes addOrder and removeOrder requests correctly', async () => {
+ wsClient.onopen = () => wsClient.send(JSON.stringify(addOrderPayload));
+ const addOrderMsg = await onMessageAsync(wsClient, OrderWatcherMethod.AddOrder);
+ const addOrderData = JSON.parse(addOrderMsg.data);
+ expect(addOrderData.method).to.be.eq('ADD_ORDER');
+ expect((wsServer as any)._orderWatcher._orderByOrderHash).to.deep.include({
+ [orderHash]: signedOrder,
+ });
+
+ const clientOnMessagePromise = onMessageAsync(wsClient, OrderWatcherMethod.RemoveOrder);
+ wsClient.send(JSON.stringify(removeOrderPayload));
+ const removeOrderMsg = await clientOnMessagePromise;
+ const removeOrderData = JSON.parse(removeOrderMsg.data);
+ expect(removeOrderData.method).to.be.eq('REMOVE_ORDER');
+ expect((wsServer as any)._orderWatcher._orderByOrderHash).to.not.deep.include({
+ [orderHash]: signedOrder,
+ });
+ });
+
+ it('broadcasts orderStateInvalid message when makerAddress allowance set to 0 for watched order', async () => {
+ // Add the regular order
+ wsClient.onopen = () => wsClient.send(JSON.stringify(addOrderPayload));
+
+ // We register the onMessage callback before calling `setProxyAllowanceAsync` which we
+ // expect will cause a message to be emitted. We do now "await" here, since we want to
+ // check for messages _after_ calling `setProxyAllowanceAsync`
+ const clientOnMessagePromise = onMessageAsync(wsClient, OrderWatcherMethod.Update);
+
+ // Set the allowance to 0
+ await contractWrappers.erc20Token.setProxyAllowanceAsync(makerTokenAddress, makerAddress, new BigNumber(0));
+
+ // We now await the `onMessage` promise to check for the message
+ const orderWatcherUpdateMsg = await clientOnMessagePromise;
+ const orderWatcherUpdateData = JSON.parse(orderWatcherUpdateMsg.data);
+ expect(orderWatcherUpdateData.method).to.be.eq('UPDATE');
+ const invalidOrderState = orderWatcherUpdateData.result as OrderStateInvalid;
+ expect(invalidOrderState.isValid).to.be.false();
+ expect(invalidOrderState.orderHash).to.be.eq(orderHash);
+ expect(invalidOrderState.error).to.be.eq(ExchangeContractErrs.InsufficientMakerAllowance);
+ });
+
+ it('broadcasts to multiple clients when an order backing ZRX allowance changes', async () => {
+ // Prepare order
+ const makerFee = Web3Wrapper.toBaseUnitAmount(new BigNumber(2), decimals);
+ const takerFee = Web3Wrapper.toBaseUnitAmount(new BigNumber(0), decimals);
+ const nonZeroMakerFeeSignedOrder = await fillScenarios.createFillableSignedOrderWithFeesAsync(
+ makerAssetData,
+ takerAssetData,
+ makerFee,
+ takerFee,
+ makerAddress,
+ takerAddress,
+ fillableAmount,
+ takerAddress,
+ );
+ const nonZeroMakerFeeOrderPayload = {
+ id: 1,
+ jsonrpc: '2.0',
+ method: 'ADD_ORDER',
+ signedOrder: nonZeroMakerFeeSignedOrder,
+ };
+
+ // Set up a second client and have it add the order
+ wsClientTwo = new WebSocket.w3cwebsocket('ws://127.0.0.1:8080/');
+ logUtils.log(`${new Date()} [Client] Connected.`);
+ wsClientTwo.onopen = () => wsClientTwo.send(JSON.stringify(nonZeroMakerFeeOrderPayload));
+
+ // Setup the onMessage callbacks, but don't await them yet
+ const clientOneOnMessagePromise = onMessageAsync(wsClient, OrderWatcherMethod.Update);
+ const clientTwoOnMessagePromise = onMessageAsync(wsClientTwo, OrderWatcherMethod.Update);
+
+ // Change the allowance
+ await contractWrappers.erc20Token.setProxyAllowanceAsync(zrxTokenAddress, makerAddress, new BigNumber(0));
+
+ // Check that both clients receive the emitted event by awaiting the onMessageAsync promises
+ let updateMsg = await clientOneOnMessagePromise;
+ let updateData = JSON.parse(updateMsg.data);
+ let orderState = updateData.result as OrderStateValid;
+ expect(orderState.isValid).to.be.true();
+ expect(orderState.orderRelevantState.makerFeeProxyAllowance).to.be.eq('0');
+
+ updateMsg = await clientTwoOnMessagePromise;
+ updateData = JSON.parse(updateMsg.data);
+ orderState = updateData.result as OrderStateValid;
+ expect(orderState.isValid).to.be.true();
+ expect(orderState.orderRelevantState.makerFeeProxyAllowance).to.be.eq('0');
+
+ wsClientTwo.close();
+ logUtils.log(`${new Date()} [Client] Closed.`);
+ });
+});
+
+// HACK: createFillableSignedOrderAsync is Promise-based, which forces us
+// to use Promises instead of the done() callbacks for tests.
+// onmessage callback must thus be wrapped as a Promise.
+async function onMessageAsync(client: WebSocket.w3cwebsocket, method: string | null): Promise<WsMessage> {
+ return new Promise<WsMessage>(resolve => {
+ client.onmessage = (msg: WsMessage) => {
+ const data = JSON.parse(msg.data);
+ if (data.method === method) {
+ resolve(msg);
+ }
+ };
+ });
+}
diff --git a/packages/sol-compiler/CHANGELOG.json b/packages/sol-compiler/CHANGELOG.json
index 0a757f519..8548fd73f 100644
--- a/packages/sol-compiler/CHANGELOG.json
+++ b/packages/sol-compiler/CHANGELOG.json
@@ -1,5 +1,18 @@
[
{
+ "version": "2.0.0",
+ "changes": [
+ {
+ "note": "Add sol-compiler watch mode with -w flag",
+ "pr": 1461
+ },
+ {
+ "note": "Make error and warning colouring more visually pleasant and consistent with other compilers",
+ "pr": 1461
+ }
+ ]
+ },
+ {
"version": "1.1.16",
"changes": [
{
diff --git a/packages/sol-compiler/package.json b/packages/sol-compiler/package.json
index 0ad620b1f..86167a603 100644
--- a/packages/sol-compiler/package.json
+++ b/packages/sol-compiler/package.json
@@ -44,7 +44,9 @@
"devDependencies": {
"@0x/dev-utils": "^1.0.21",
"@0x/tslint-config": "^2.0.0",
+ "@types/chokidar": "^1.7.5",
"@types/mkdirp": "^0.5.2",
+ "@types/pluralize": "^0.0.29",
"@types/require-from-string": "^1.2.0",
"@types/semver": "^5.5.0",
"chai": "^4.0.1",
@@ -74,10 +76,12 @@
"@0x/web3-wrapper": "^3.2.1",
"@types/yargs": "^11.0.0",
"chalk": "^2.3.0",
+ "chokidar": "^2.0.4",
"ethereum-types": "^1.1.4",
"ethereumjs-util": "^5.1.1",
"lodash": "^4.17.5",
"mkdirp": "^0.5.1",
+ "pluralize": "^7.0.0",
"require-from-string": "^2.0.1",
"semver": "5.5.0",
"solc": "^0.4.23",
diff --git a/packages/sol-compiler/src/cli.ts b/packages/sol-compiler/src/cli.ts
index 0a9db6e05..18cc68aaf 100644
--- a/packages/sol-compiler/src/cli.ts
+++ b/packages/sol-compiler/src/cli.ts
@@ -25,6 +25,10 @@ const SEPARATOR = ',';
type: 'string',
description: 'comma separated list of contracts to compile',
})
+ .option('watch', {
+ alias: 'w',
+ default: false,
+ })
.help().argv;
const contracts = _.isUndefined(argv.contracts)
? undefined
@@ -37,7 +41,11 @@ const SEPARATOR = ',';
contracts,
};
const compiler = new Compiler(opts);
- await compiler.compileAsync();
+ if (argv.watch) {
+ await compiler.watchAsync();
+ } else {
+ await compiler.compileAsync();
+ }
})().catch(err => {
logUtils.log(err);
process.exit(1);
diff --git a/packages/sol-compiler/src/compiler.ts b/packages/sol-compiler/src/compiler.ts
index 85df8209e..d38ccbf39 100644
--- a/packages/sol-compiler/src/compiler.ts
+++ b/packages/sol-compiler/src/compiler.ts
@@ -6,26 +6,29 @@ import {
NPMResolver,
RelativeFSResolver,
Resolver,
+ SpyResolver,
URLResolver,
} from '@0x/sol-resolver';
-import { fetchAsync, logUtils } from '@0x/utils';
-import chalk from 'chalk';
+import { logUtils } from '@0x/utils';
+import * as chokidar from 'chokidar';
import { CompilerOptions, ContractArtifact, ContractVersionData, StandardOutput } from 'ethereum-types';
-import * as ethUtil from 'ethereumjs-util';
import * as fs from 'fs';
import * as _ from 'lodash';
import * as path from 'path';
-import * as requireFromString from 'require-from-string';
+import * as pluralize from 'pluralize';
import * as semver from 'semver';
import solc = require('solc');
import { compilerOptionsSchema } from './schemas/compiler_options_schema';
import { binPaths } from './solc/bin_paths';
import {
+ addHexPrefixToContractBytecode,
+ compile,
createDirIfDoesNotExistAsync,
getContractArtifactIfExistsAsync,
- getNormalizedErrMsg,
- parseDependencies,
+ getSolcAsync,
+ getSourcesWithDependencies,
+ getSourceTreeHash,
parseSolidityVersionRange,
} from './utils/compiler';
import { constants } from './utils/constants';
@@ -35,7 +38,6 @@ import { utils } from './utils/utils';
type TYPE_ALL_FILES_IDENTIFIER = '*';
const ALL_CONTRACTS_IDENTIFIER = '*';
const ALL_FILES_IDENTIFIER = '*';
-const SOLC_BIN_DIR = path.join(__dirname, '..', '..', 'solc_bin');
const DEFAULT_CONTRACTS_DIR = path.resolve('contracts');
const DEFAULT_ARTIFACTS_DIR = path.resolve('artifacts');
// Solc compiler settings cannot be configured from the commandline.
@@ -82,49 +84,6 @@ export class Compiler {
private readonly _artifactsDir: string;
private readonly _solcVersionIfExists: string | undefined;
private readonly _specifiedContracts: string[] | TYPE_ALL_FILES_IDENTIFIER;
- private static async _getSolcAsync(
- solcVersion: string,
- ): Promise<{ solcInstance: solc.SolcInstance; fullSolcVersion: string }> {
- const fullSolcVersion = binPaths[solcVersion];
- if (_.isUndefined(fullSolcVersion)) {
- throw new Error(`${solcVersion} is not a known compiler version`);
- }
- const compilerBinFilename = path.join(SOLC_BIN_DIR, fullSolcVersion);
- let solcjs: string;
- if (await fsWrapper.doesFileExistAsync(compilerBinFilename)) {
- solcjs = (await fsWrapper.readFileAsync(compilerBinFilename)).toString();
- } else {
- logUtils.warn(`Downloading ${fullSolcVersion}...`);
- const url = `${constants.BASE_COMPILER_URL}${fullSolcVersion}`;
- const response = await fetchAsync(url);
- const SUCCESS_STATUS = 200;
- if (response.status !== SUCCESS_STATUS) {
- throw new Error(`Failed to load ${fullSolcVersion}`);
- }
- solcjs = await response.text();
- await fsWrapper.writeFileAsync(compilerBinFilename, solcjs);
- }
- if (solcjs.length === 0) {
- throw new Error('No compiler available');
- }
- const solcInstance = solc.setupMethods(requireFromString(solcjs, compilerBinFilename));
- return { solcInstance, fullSolcVersion };
- }
- private static _addHexPrefixToContractBytecode(compiledContract: solc.StandardContractOutput): void {
- if (!_.isUndefined(compiledContract.evm)) {
- if (!_.isUndefined(compiledContract.evm.bytecode) && !_.isUndefined(compiledContract.evm.bytecode.object)) {
- compiledContract.evm.bytecode.object = ethUtil.addHexPrefix(compiledContract.evm.bytecode.object);
- }
- if (
- !_.isUndefined(compiledContract.evm.deployedBytecode) &&
- !_.isUndefined(compiledContract.evm.deployedBytecode.object)
- ) {
- compiledContract.evm.deployedBytecode.object = ethUtil.addHexPrefix(
- compiledContract.evm.deployedBytecode.object,
- );
- }
- }
- }
/**
* Instantiates a new instance of the Compiler class.
* @param opts Optional compiler options
@@ -158,7 +117,7 @@ export class Compiler {
*/
public async compileAsync(): Promise<void> {
await createDirIfDoesNotExistAsync(this._artifactsDir);
- await createDirIfDoesNotExistAsync(SOLC_BIN_DIR);
+ await createDirIfDoesNotExistAsync(constants.SOLC_BIN_DIR);
await this._compileContractsAsync(this._getContractNamesToCompile(), true);
}
/**
@@ -173,6 +132,54 @@ export class Compiler {
const promisedOutputs = this._compileContractsAsync(this._getContractNamesToCompile(), false);
return promisedOutputs;
}
+ public async watchAsync(): Promise<void> {
+ console.clear(); // tslint:disable-line:no-console
+ logUtils.logWithTime('Starting compilation in watch mode...');
+ const MATCH_NOTHING_REGEX = '^$';
+ const IGNORE_DOT_FILES_REGEX = /(^|[\/\\])\../;
+ // Initially we watch nothing. We'll add the paths later.
+ const watcher = chokidar.watch(MATCH_NOTHING_REGEX, { ignored: IGNORE_DOT_FILES_REGEX });
+ const onFileChangedAsync = async () => {
+ watcher.unwatch('*'); // Stop watching
+ try {
+ await this.compileAsync();
+ logUtils.logWithTime('Found 0 errors. Watching for file changes.');
+ } catch (err) {
+ if (err.typeName === 'CompilationError') {
+ logUtils.logWithTime(
+ `Found ${err.errorsCount} ${pluralize('error', err.errorsCount)}. Watching for file changes.`,
+ );
+ } else {
+ logUtils.logWithTime('Found errors. Watching for file changes.');
+ }
+ }
+
+ const pathsToWatch = this._getPathsToWatch();
+ watcher.add(pathsToWatch);
+ };
+ await onFileChangedAsync();
+ watcher.on('change', (changedFilePath: string) => {
+ console.clear(); // tslint:disable-line:no-console
+ logUtils.logWithTime('File change detected. Starting incremental compilation...');
+ // NOTE: We can't await it here because that's a callback.
+ // Instead we stop watching inside of it and start it again when we're finished.
+ onFileChangedAsync(); // tslint:disable-line no-floating-promises
+ });
+ }
+ private _getPathsToWatch(): string[] {
+ const contractNames = this._getContractNamesToCompile();
+ const spyResolver = new SpyResolver(this._resolver);
+ for (const contractName of contractNames) {
+ const contractSource = spyResolver.resolve(contractName);
+ // NOTE: We ignore the return value here. We don't want to compute the source tree hash.
+ // We just want to call a SpyResolver on each contracts and it's dependencies and
+ // this is a convenient way to reuse the existing code that does that.
+ // We can then get all the relevant paths from the `spyResolver` below.
+ getSourceTreeHash(spyResolver, contractSource.path);
+ }
+ const pathsToWatch = _.uniq(spyResolver.resolvedContractSources.map(cs => cs.absolutePath));
+ return pathsToWatch;
+ }
private _getContractNamesToCompile(): string[] {
let contractNamesToCompile;
if (this._specifiedContracts === ALL_CONTRACTS_IDENTIFIER) {
@@ -201,12 +208,14 @@ export class Compiler {
for (const contractName of contractNames) {
const contractSource = this._resolver.resolve(contractName);
+ const sourceTreeHashHex = getSourceTreeHash(
+ this._resolver,
+ path.join(this._contractsDir, contractSource.path),
+ ).toString('hex');
const contractData = {
contractName,
currentArtifactIfExists: await getContractArtifactIfExistsAsync(this._artifactsDir, contractName),
- sourceTreeHashHex: `0x${this._getSourceTreeHash(
- path.join(this._contractsDir, contractSource.path),
- ).toString('hex')}`,
+ sourceTreeHashHex: `0x${sourceTreeHashHex}`,
};
if (!this._shouldCompile(contractData)) {
continue;
@@ -244,9 +253,8 @@ export class Compiler {
}) with Solidity v${solcVersion}...`,
);
- const { solcInstance, fullSolcVersion } = await Compiler._getSolcAsync(solcVersion);
-
- const compilerOutput = this._compile(solcInstance, input.standardInput);
+ const { solcInstance, fullSolcVersion } = await getSolcAsync(solcVersion);
+ const compilerOutput = compile(this._resolver, solcInstance, input.standardInput);
compilerOutputs.push(compilerOutput);
for (const contractPath of input.contractsToCompile) {
@@ -259,7 +267,7 @@ export class Compiler {
);
}
- Compiler._addHexPrefixToContractBytecode(compiledContract);
+ addHexPrefixToContractBytecode(compiledContract);
if (shouldPersist) {
await this._persistCompiledContractAsync(
@@ -298,10 +306,14 @@ export class Compiler {
const compiledContract = compilerOutput.contracts[contractPath][contractName];
// need to gather sourceCodes for this artifact, but compilerOutput.sources (the list of contract modules)
- // contains listings for for every contract compiled during the compiler invocation that compiled the contract
+ // contains listings for every contract compiled during the compiler invocation that compiled the contract
// to be persisted, which could include many that are irrelevant to the contract at hand. So, gather up only
// the relevant sources:
- const { sourceCodes, sources } = this._getSourcesWithDependencies(contractPath, compilerOutput.sources);
+ const { sourceCodes, sources } = getSourcesWithDependencies(
+ this._resolver,
+ contractPath,
+ compilerOutput.sources,
+ );
const contractVersion: ContractVersionData = {
compilerOutput: compiledContract,
@@ -336,130 +348,4 @@ export class Compiler {
await fsWrapper.writeFileAsync(currentArtifactPath, artifactString);
logUtils.warn(`${contractName} artifact saved!`);
}
- /**
- * For the given @param contractPath, populates JSON objects to be used in the ContractVersionData interface's
- * properties `sources` (source code file names mapped to ID numbers) and `sourceCodes` (source code content of
- * contracts) for that contract. The source code pointed to by contractPath is read and parsed directly (via
- * `this._resolver.resolve().source`), as are its imports, recursively. The ID numbers for @return `sources` are
- * taken from the corresponding ID's in @param fullSources, and the content for @return sourceCodes is read from
- * disk (via the aforementioned `resolver.source`).
- */
- private _getSourcesWithDependencies(
- contractPath: string,
- fullSources: { [sourceName: string]: { id: number } },
- ): { sourceCodes: { [sourceName: string]: string }; sources: { [sourceName: string]: { id: number } } } {
- const sources = { [contractPath]: { id: fullSources[contractPath].id } };
- const sourceCodes = { [contractPath]: this._resolver.resolve(contractPath).source };
- this._recursivelyGatherDependencySources(
- contractPath,
- sourceCodes[contractPath],
- fullSources,
- sources,
- sourceCodes,
- );
- return { sourceCodes, sources };
- }
- private _recursivelyGatherDependencySources(
- contractPath: string,
- contractSource: string,
- fullSources: { [sourceName: string]: { id: number } },
- sourcesToAppendTo: { [sourceName: string]: { id: number } },
- sourceCodesToAppendTo: { [sourceName: string]: string },
- ): void {
- const importStatementMatches = contractSource.match(/\nimport[^;]*;/g);
- if (importStatementMatches === null) {
- return;
- }
- for (const importStatementMatch of importStatementMatches) {
- const importPathMatches = importStatementMatch.match(/\"([^\"]*)\"/);
- if (importPathMatches === null || importPathMatches.length === 0) {
- continue;
- }
-
- let importPath = importPathMatches[1];
- // HACK(ablrow): We have, e.g.:
- //
- // importPath = "../../utils/LibBytes/LibBytes.sol"
- // contractPath = "2.0.0/protocol/AssetProxyOwner/AssetProxyOwner.sol"
- //
- // Resolver doesn't understand "../" so we want to pass
- // "2.0.0/utils/LibBytes/LibBytes.sol" to resolver.
- //
- // This hack involves using path.resolve. But path.resolve returns
- // absolute directories by default. We trick it into thinking that
- // contractPath is a root directory by prepending a '/' and then
- // removing the '/' the end.
- //
- // path.resolve("/a/b/c", ""../../d/e") === "/a/d/e"
- //
- const lastPathSeparatorPos = contractPath.lastIndexOf('/');
- const contractFolder = lastPathSeparatorPos === -1 ? '' : contractPath.slice(0, lastPathSeparatorPos + 1);
- if (importPath.startsWith('.')) {
- /**
- * Some imports path are relative ("../Token.sol", "./Wallet.sol")
- * while others are absolute ("Token.sol", "@0x/contracts/Wallet.sol")
- * And we need to append the base path for relative imports.
- */
- importPath = path.resolve(`/${contractFolder}`, importPath).replace('/', '');
- }
-
- if (_.isUndefined(sourcesToAppendTo[importPath])) {
- sourcesToAppendTo[importPath] = { id: fullSources[importPath].id };
- sourceCodesToAppendTo[importPath] = this._resolver.resolve(importPath).source;
-
- this._recursivelyGatherDependencySources(
- importPath,
- this._resolver.resolve(importPath).source,
- fullSources,
- sourcesToAppendTo,
- sourceCodesToAppendTo,
- );
- }
- }
- }
- private _compile(solcInstance: solc.SolcInstance, standardInput: solc.StandardInput): solc.StandardOutput {
- const compiled: solc.StandardOutput = JSON.parse(
- solcInstance.compileStandardWrapper(JSON.stringify(standardInput), importPath => {
- const sourceCodeIfExists = this._resolver.resolve(importPath);
- return { contents: sourceCodeIfExists.source };
- }),
- );
- if (!_.isUndefined(compiled.errors)) {
- const SOLIDITY_WARNING = 'warning';
- const errors = _.filter(compiled.errors, entry => entry.severity !== SOLIDITY_WARNING);
- const warnings = _.filter(compiled.errors, entry => entry.severity === SOLIDITY_WARNING);
- if (!_.isEmpty(errors)) {
- errors.forEach(error => {
- const normalizedErrMsg = getNormalizedErrMsg(error.formattedMessage || error.message);
- logUtils.warn(chalk.red(normalizedErrMsg));
- });
- throw new Error('Compilation errors encountered');
- } else {
- warnings.forEach(warning => {
- const normalizedWarningMsg = getNormalizedErrMsg(warning.formattedMessage || warning.message);
- logUtils.warn(chalk.yellow(normalizedWarningMsg));
- });
- }
- }
- return compiled;
- }
- /**
- * Gets the source tree hash for a file and its dependencies.
- * @param fileName Name of contract file.
- */
- private _getSourceTreeHash(importPath: string): Buffer {
- const contractSource = this._resolver.resolve(importPath);
- const dependencies = parseDependencies(contractSource);
- const sourceHash = ethUtil.sha3(contractSource.source);
- if (dependencies.length === 0) {
- return sourceHash;
- } else {
- const dependencySourceTreeHashes = _.map(dependencies, (dependency: string) =>
- this._getSourceTreeHash(dependency),
- );
- const sourceTreeHashesBuffer = Buffer.concat([sourceHash, ...dependencySourceTreeHashes]);
- const sourceTreeHash = ethUtil.sha3(sourceTreeHashesBuffer);
- return sourceTreeHash;
- }
- }
}
diff --git a/packages/sol-compiler/src/utils/compiler.ts b/packages/sol-compiler/src/utils/compiler.ts
index cda67a414..db308f2b5 100644
--- a/packages/sol-compiler/src/utils/compiler.ts
+++ b/packages/sol-compiler/src/utils/compiler.ts
@@ -1,10 +1,18 @@
-import { ContractSource } from '@0x/sol-resolver';
-import { logUtils } from '@0x/utils';
+import { ContractSource, Resolver } from '@0x/sol-resolver';
+import { fetchAsync, logUtils } from '@0x/utils';
+import chalk from 'chalk';
import { ContractArtifact } from 'ethereum-types';
+import * as ethUtil from 'ethereumjs-util';
import * as _ from 'lodash';
import * as path from 'path';
+import * as requireFromString from 'require-from-string';
+import * as solc from 'solc';
+import { binPaths } from '../solc/bin_paths';
+
+import { constants } from './constants';
import { fsWrapper } from './fs_wrapper';
+import { CompilationError } from './types';
/**
* Gets contract data on network or returns if an artifact does not exist.
@@ -106,3 +114,208 @@ export function parseDependencies(contractSource: ContractSource): string[] {
});
return dependencies;
}
+
+/**
+ * Compiles the contracts and prints errors/warnings
+ * @param resolver Resolver
+ * @param solcInstance Instance of a solc compiler
+ * @param standardInput Solidity standard JSON input
+ */
+export function compile(
+ resolver: Resolver,
+ solcInstance: solc.SolcInstance,
+ standardInput: solc.StandardInput,
+): solc.StandardOutput {
+ const standardInputStr = JSON.stringify(standardInput);
+ const standardOutputStr = solcInstance.compileStandardWrapper(standardInputStr, importPath => {
+ const sourceCodeIfExists = resolver.resolve(importPath);
+ return { contents: sourceCodeIfExists.source };
+ });
+ const compiled: solc.StandardOutput = JSON.parse(standardOutputStr);
+ if (!_.isUndefined(compiled.errors)) {
+ printCompilationErrorsAndWarnings(compiled.errors);
+ }
+ return compiled;
+}
+/**
+ * Separates errors from warnings, formats the messages and prints them. Throws if there is any compilation error (not warning).
+ * @param solcErrors The errors field of standard JSON output that contains errors and warnings.
+ */
+function printCompilationErrorsAndWarnings(solcErrors: solc.SolcError[]): void {
+ const SOLIDITY_WARNING = 'warning';
+ const errors = _.filter(solcErrors, entry => entry.severity !== SOLIDITY_WARNING);
+ const warnings = _.filter(solcErrors, entry => entry.severity === SOLIDITY_WARNING);
+ if (!_.isEmpty(errors)) {
+ errors.forEach(error => {
+ const normalizedErrMsg = getNormalizedErrMsg(error.formattedMessage || error.message);
+ logUtils.log(chalk.red('error'), normalizedErrMsg);
+ });
+ throw new CompilationError(errors.length);
+ } else {
+ warnings.forEach(warning => {
+ const normalizedWarningMsg = getNormalizedErrMsg(warning.formattedMessage || warning.message);
+ logUtils.log(chalk.yellow('warning'), normalizedWarningMsg);
+ });
+ }
+}
+
+/**
+ * Gets the source tree hash for a file and its dependencies.
+ * @param fileName Name of contract file.
+ */
+export function getSourceTreeHash(resolver: Resolver, importPath: string): Buffer {
+ const contractSource = resolver.resolve(importPath);
+ const dependencies = parseDependencies(contractSource);
+ const sourceHash = ethUtil.sha3(contractSource.source);
+ if (dependencies.length === 0) {
+ return sourceHash;
+ } else {
+ const dependencySourceTreeHashes = _.map(dependencies, (dependency: string) =>
+ getSourceTreeHash(resolver, dependency),
+ );
+ const sourceTreeHashesBuffer = Buffer.concat([sourceHash, ...dependencySourceTreeHashes]);
+ const sourceTreeHash = ethUtil.sha3(sourceTreeHashesBuffer);
+ return sourceTreeHash;
+ }
+}
+
+/**
+ * For the given @param contractPath, populates JSON objects to be used in the ContractVersionData interface's
+ * properties `sources` (source code file names mapped to ID numbers) and `sourceCodes` (source code content of
+ * contracts) for that contract. The source code pointed to by contractPath is read and parsed directly (via
+ * `resolver.resolve().source`), as are its imports, recursively. The ID numbers for @return `sources` are
+ * taken from the corresponding ID's in @param fullSources, and the content for @return sourceCodes is read from
+ * disk (via the aforementioned `resolver.source`).
+ */
+export function getSourcesWithDependencies(
+ resolver: Resolver,
+ contractPath: string,
+ fullSources: { [sourceName: string]: { id: number } },
+): { sourceCodes: { [sourceName: string]: string }; sources: { [sourceName: string]: { id: number } } } {
+ const sources = { [contractPath]: { id: fullSources[contractPath].id } };
+ const sourceCodes = { [contractPath]: resolver.resolve(contractPath).source };
+ recursivelyGatherDependencySources(
+ resolver,
+ contractPath,
+ sourceCodes[contractPath],
+ fullSources,
+ sources,
+ sourceCodes,
+ );
+ return { sourceCodes, sources };
+}
+
+function recursivelyGatherDependencySources(
+ resolver: Resolver,
+ contractPath: string,
+ contractSource: string,
+ fullSources: { [sourceName: string]: { id: number } },
+ sourcesToAppendTo: { [sourceName: string]: { id: number } },
+ sourceCodesToAppendTo: { [sourceName: string]: string },
+): void {
+ const importStatementMatches = contractSource.match(/\nimport[^;]*;/g);
+ if (importStatementMatches === null) {
+ return;
+ }
+ for (const importStatementMatch of importStatementMatches) {
+ const importPathMatches = importStatementMatch.match(/\"([^\"]*)\"/);
+ if (importPathMatches === null || importPathMatches.length === 0) {
+ continue;
+ }
+
+ let importPath = importPathMatches[1];
+ // HACK(albrow): We have, e.g.:
+ //
+ // importPath = "../../utils/LibBytes/LibBytes.sol"
+ // contractPath = "2.0.0/protocol/AssetProxyOwner/AssetProxyOwner.sol"
+ //
+ // Resolver doesn't understand "../" so we want to pass
+ // "2.0.0/utils/LibBytes/LibBytes.sol" to resolver.
+ //
+ // This hack involves using path.resolve. But path.resolve returns
+ // absolute directories by default. We trick it into thinking that
+ // contractPath is a root directory by prepending a '/' and then
+ // removing the '/' the end.
+ //
+ // path.resolve("/a/b/c", ""../../d/e") === "/a/d/e"
+ //
+ const lastPathSeparatorPos = contractPath.lastIndexOf('/');
+ const contractFolder = lastPathSeparatorPos === -1 ? '' : contractPath.slice(0, lastPathSeparatorPos + 1);
+ if (importPath.startsWith('.')) {
+ /**
+ * Some imports path are relative ("../Token.sol", "./Wallet.sol")
+ * while others are absolute ("Token.sol", "@0x/contracts/Wallet.sol")
+ * And we need to append the base path for relative imports.
+ */
+ importPath = path.resolve(`/${contractFolder}`, importPath).replace('/', '');
+ }
+
+ if (_.isUndefined(sourcesToAppendTo[importPath])) {
+ sourcesToAppendTo[importPath] = { id: fullSources[importPath].id };
+ sourceCodesToAppendTo[importPath] = resolver.resolve(importPath).source;
+
+ recursivelyGatherDependencySources(
+ resolver,
+ importPath,
+ resolver.resolve(importPath).source,
+ fullSources,
+ sourcesToAppendTo,
+ sourceCodesToAppendTo,
+ );
+ }
+ }
+}
+
+/**
+ * Gets the solidity compiler instance and full version name. If the compiler is already cached - gets it from FS,
+ * otherwise - fetches it and caches it.
+ * @param solcVersion The compiler version. e.g. 0.5.0
+ */
+export async function getSolcAsync(
+ solcVersion: string,
+): Promise<{ solcInstance: solc.SolcInstance; fullSolcVersion: string }> {
+ const fullSolcVersion = binPaths[solcVersion];
+ if (_.isUndefined(fullSolcVersion)) {
+ throw new Error(`${solcVersion} is not a known compiler version`);
+ }
+ const compilerBinFilename = path.join(constants.SOLC_BIN_DIR, fullSolcVersion);
+ let solcjs: string;
+ if (await fsWrapper.doesFileExistAsync(compilerBinFilename)) {
+ solcjs = (await fsWrapper.readFileAsync(compilerBinFilename)).toString();
+ } else {
+ logUtils.warn(`Downloading ${fullSolcVersion}...`);
+ const url = `${constants.BASE_COMPILER_URL}${fullSolcVersion}`;
+ const response = await fetchAsync(url);
+ const SUCCESS_STATUS = 200;
+ if (response.status !== SUCCESS_STATUS) {
+ throw new Error(`Failed to load ${fullSolcVersion}`);
+ }
+ solcjs = await response.text();
+ await fsWrapper.writeFileAsync(compilerBinFilename, solcjs);
+ }
+ if (solcjs.length === 0) {
+ throw new Error('No compiler available');
+ }
+ const solcInstance = solc.setupMethods(requireFromString(solcjs, compilerBinFilename));
+ return { solcInstance, fullSolcVersion };
+}
+
+/**
+ * Solidity compiler emits the bytecode without a 0x prefix for a hex. This function fixes it if bytecode is present.
+ * @param compiledContract The standard JSON output section for a contract. Geth modified in place.
+ */
+export function addHexPrefixToContractBytecode(compiledContract: solc.StandardContractOutput): void {
+ if (!_.isUndefined(compiledContract.evm)) {
+ if (!_.isUndefined(compiledContract.evm.bytecode) && !_.isUndefined(compiledContract.evm.bytecode.object)) {
+ compiledContract.evm.bytecode.object = ethUtil.addHexPrefix(compiledContract.evm.bytecode.object);
+ }
+ if (
+ !_.isUndefined(compiledContract.evm.deployedBytecode) &&
+ !_.isUndefined(compiledContract.evm.deployedBytecode.object)
+ ) {
+ compiledContract.evm.deployedBytecode.object = ethUtil.addHexPrefix(
+ compiledContract.evm.deployedBytecode.object,
+ );
+ }
+ }
+}
diff --git a/packages/sol-compiler/src/utils/constants.ts b/packages/sol-compiler/src/utils/constants.ts
index df2ddb3b2..433897f8a 100644
--- a/packages/sol-compiler/src/utils/constants.ts
+++ b/packages/sol-compiler/src/utils/constants.ts
@@ -1,5 +1,8 @@
+import * as path from 'path';
+
export const constants = {
SOLIDITY_FILE_EXTENSION: '.sol',
BASE_COMPILER_URL: 'https://ethereum.github.io/solc-bin/bin/',
LATEST_ARTIFACT_VERSION: '2.0.0',
+ SOLC_BIN_DIR: path.join(__dirname, '..', '..', 'solc_bin'),
};
diff --git a/packages/sol-compiler/src/utils/types.ts b/packages/sol-compiler/src/utils/types.ts
index b211cfcbc..64328899d 100644
--- a/packages/sol-compiler/src/utils/types.ts
+++ b/packages/sol-compiler/src/utils/types.ts
@@ -29,3 +29,12 @@ export interface Token {
}
export type DoneCallback = (err?: Error) => void;
+
+export class CompilationError extends Error {
+ public errorsCount: number;
+ public typeName = 'CompilationError';
+ constructor(errorsCount: number) {
+ super('Compilation errors encountered');
+ this.errorsCount = errorsCount;
+ }
+}
diff --git a/packages/sol-compiler/test/compiler_utils_test.ts b/packages/sol-compiler/test/compiler_utils_test.ts
index 4fe7b994e..b8c18110c 100644
--- a/packages/sol-compiler/test/compiler_utils_test.ts
+++ b/packages/sol-compiler/test/compiler_utils_test.ts
@@ -52,7 +52,7 @@ describe('Compiler utils', () => {
const source = await fsWrapper.readFileAsync(path, {
encoding: 'utf8',
});
- const dependencies = parseDependencies({ source, path });
+ const dependencies = parseDependencies({ source, path, absolutePath: path });
const expectedDependencies = [
'zeppelin-solidity/contracts/token/ERC20/ERC20.sol',
'packages/sol-compiler/lib/test/fixtures/contracts/TokenTransferProxy.sol',
@@ -68,7 +68,7 @@ describe('Compiler utils', () => {
const source = await fsWrapper.readFileAsync(path, {
encoding: 'utf8',
});
- expect(parseDependencies({ source, path })).to.be.deep.equal([
+ expect(parseDependencies({ source, path, absolutePath: path })).to.be.deep.equal([
'zeppelin-solidity/contracts/ownership/Ownable.sol',
'zeppelin-solidity/contracts/token/ERC20/ERC20.sol',
]);
@@ -77,7 +77,7 @@ describe('Compiler utils', () => {
it.skip('correctly parses commented out dependencies', async () => {
const path = '';
const source = `// import "./TokenTransferProxy.sol";`;
- expect(parseDependencies({ path, source })).to.be.deep.equal([]);
+ expect(parseDependencies({ path, source, absolutePath: path })).to.be.deep.equal([]);
});
});
});
diff --git a/packages/sol-resolver/CHANGELOG.json b/packages/sol-resolver/CHANGELOG.json
index 85398e624..74c4d39c5 100644
--- a/packages/sol-resolver/CHANGELOG.json
+++ b/packages/sol-resolver/CHANGELOG.json
@@ -1,5 +1,18 @@
[
{
+ "version": "1.2.1",
+ "changes": [
+ {
+ "note": "Add `absolutePath` to `ContractSource` type",
+ "pr": 1461
+ },
+ {
+ "note": "Add `SpyResolver` that records all resolved contracts data",
+ "pr": 1461
+ }
+ ]
+ },
+ {
"version": "1.1.1",
"changes": [
{
diff --git a/packages/sol-resolver/src/index.ts b/packages/sol-resolver/src/index.ts
index a86053259..f55aca070 100644
--- a/packages/sol-resolver/src/index.ts
+++ b/packages/sol-resolver/src/index.ts
@@ -5,5 +5,6 @@ export { NPMResolver } from './resolvers/npm_resolver';
export { FSResolver } from './resolvers/fs_resolver';
export { RelativeFSResolver } from './resolvers/relative_fs_resolver';
export { NameResolver } from './resolvers/name_resolver';
+export { SpyResolver } from './resolvers/spy_resolver';
export { EnumerableResolver } from './resolvers/enumerable_resolver';
export { Resolver } from './resolvers/resolver';
diff --git a/packages/sol-resolver/src/resolvers/fs_resolver.ts b/packages/sol-resolver/src/resolvers/fs_resolver.ts
index 63fc3448e..86128023d 100644
--- a/packages/sol-resolver/src/resolvers/fs_resolver.ts
+++ b/packages/sol-resolver/src/resolvers/fs_resolver.ts
@@ -9,10 +9,7 @@ export class FSResolver extends Resolver {
public resolveIfExists(importPath: string): ContractSource | undefined {
if (fs.existsSync(importPath) && fs.lstatSync(importPath).isFile()) {
const fileContent = fs.readFileSync(importPath).toString();
- return {
- source: fileContent,
- path: importPath,
- };
+ return { source: fileContent, path: importPath, absolutePath: importPath };
}
return undefined;
}
diff --git a/packages/sol-resolver/src/resolvers/name_resolver.ts b/packages/sol-resolver/src/resolvers/name_resolver.ts
index d6ac6a499..aee326fb7 100644
--- a/packages/sol-resolver/src/resolvers/name_resolver.ts
+++ b/packages/sol-resolver/src/resolvers/name_resolver.ts
@@ -20,10 +20,7 @@ export class NameResolver extends EnumerableResolver {
if (contractName === lookupContractName) {
const absoluteContractPath = path.join(this._contractsDir, filePath);
const source = fs.readFileSync(absoluteContractPath).toString();
- contractSource = {
- source,
- path: filePath,
- };
+ contractSource = { source, path: filePath, absolutePath: absoluteContractPath };
return true;
}
return undefined;
@@ -36,10 +33,7 @@ export class NameResolver extends EnumerableResolver {
const onFile = (filePath: string) => {
const absoluteContractPath = path.join(this._contractsDir, filePath);
const source = fs.readFileSync(absoluteContractPath).toString();
- const contractSource = {
- source,
- path: filePath,
- };
+ const contractSource = { source, path: filePath, absolutePath: absoluteContractPath };
contractSources.push(contractSource);
};
this._traverseContractsDir(this._contractsDir, onFile);
diff --git a/packages/sol-resolver/src/resolvers/npm_resolver.ts b/packages/sol-resolver/src/resolvers/npm_resolver.ts
index eeb2b5493..3c1d09557 100644
--- a/packages/sol-resolver/src/resolvers/npm_resolver.ts
+++ b/packages/sol-resolver/src/resolvers/npm_resolver.ts
@@ -32,10 +32,7 @@ export class NPMResolver extends Resolver {
const lookupPath = path.join(currentPath, 'node_modules', packagePath, pathWithinPackage);
if (fs.existsSync(lookupPath) && fs.lstatSync(lookupPath).isFile()) {
const fileContent = fs.readFileSync(lookupPath).toString();
- return {
- source: fileContent,
- path: lookupPath,
- };
+ return { source: fileContent, path: importPath, absolutePath: lookupPath };
}
currentPath = path.dirname(currentPath);
}
diff --git a/packages/sol-resolver/src/resolvers/relative_fs_resolver.ts b/packages/sol-resolver/src/resolvers/relative_fs_resolver.ts
index ed96040d3..cfff145f9 100644
--- a/packages/sol-resolver/src/resolvers/relative_fs_resolver.ts
+++ b/packages/sol-resolver/src/resolvers/relative_fs_resolver.ts
@@ -13,13 +13,10 @@ export class RelativeFSResolver extends Resolver {
}
// tslint:disable-next-line:prefer-function-over-method
public resolveIfExists(importPath: string): ContractSource | undefined {
- const filePath = path.join(this._contractsDir, importPath);
+ const filePath = path.resolve(path.join(this._contractsDir, importPath));
if (fs.existsSync(filePath) && !fs.lstatSync(filePath).isDirectory()) {
const fileContent = fs.readFileSync(filePath).toString();
- return {
- source: fileContent,
- path: importPath,
- };
+ return { source: fileContent, path: importPath, absolutePath: filePath };
}
return undefined;
}
diff --git a/packages/sol-resolver/src/resolvers/spy_resolver.ts b/packages/sol-resolver/src/resolvers/spy_resolver.ts
new file mode 100644
index 000000000..5582d771a
--- /dev/null
+++ b/packages/sol-resolver/src/resolvers/spy_resolver.ts
@@ -0,0 +1,25 @@
+import * as _ from 'lodash';
+
+import { ContractSource } from '../types';
+
+import { Resolver } from './resolver';
+
+/**
+ * This resolver is a passthrough proxy to any resolver that records all the resolved contracts sources.
+ * You can access them later using the `resolvedContractSources` public field.
+ */
+export class SpyResolver extends Resolver {
+ public resolvedContractSources: ContractSource[] = [];
+ private readonly _resolver: Resolver;
+ constructor(resolver: Resolver) {
+ super();
+ this._resolver = resolver;
+ }
+ public resolveIfExists(importPath: string): ContractSource | undefined {
+ const contractSourceIfExists = this._resolver.resolveIfExists(importPath);
+ if (!_.isUndefined(contractSourceIfExists)) {
+ this.resolvedContractSources.push(contractSourceIfExists);
+ }
+ return contractSourceIfExists;
+ }
+}
diff --git a/packages/sol-resolver/src/resolvers/url_resolver.ts b/packages/sol-resolver/src/resolvers/url_resolver.ts
index 180b0c9f6..ef300e6db 100644
--- a/packages/sol-resolver/src/resolvers/url_resolver.ts
+++ b/packages/sol-resolver/src/resolvers/url_resolver.ts
@@ -11,10 +11,7 @@ export class URLResolver extends Resolver {
if (importPath.startsWith(FILE_URL_PREXIF)) {
const filePath = importPath.substr(FILE_URL_PREXIF.length);
const fileContent = fs.readFileSync(filePath).toString();
- return {
- source: fileContent,
- path: importPath,
- };
+ return { source: fileContent, path: importPath, absolutePath: filePath };
}
return undefined;
}
diff --git a/packages/sol-resolver/src/types.ts b/packages/sol-resolver/src/types.ts
index 41492622d..b4ba164c8 100644
--- a/packages/sol-resolver/src/types.ts
+++ b/packages/sol-resolver/src/types.ts
@@ -1,6 +1,7 @@
export interface ContractSource {
source: string;
path: string;
+ absolutePath: string;
}
export interface ContractSources {
diff --git a/packages/types/src/index.ts b/packages/types/src/index.ts
index 6b728af71..4470dd501 100644
--- a/packages/types/src/index.ts
+++ b/packages/types/src/index.ts
@@ -243,6 +243,10 @@ export enum RevertReason {
AuctionNotStarted = 'AUCTION_NOT_STARTED',
AuctionInvalidBeginTime = 'INVALID_BEGIN_TIME',
InvalidAssetData = 'INVALID_ASSET_DATA',
+ // Balance Threshold Filter
+ InvalidOrBlockedExchangeSelector = 'INVALID_OR_BLOCKED_EXCHANGE_SELECTOR',
+ BalanceQueryFailed = 'BALANCE_QUERY_FAILED',
+ AtLeastOneAddressDoesNotMeetBalanceThreshold = 'AT_LEAST_ONE_ADDRESS_DOES_NOT_MEET_BALANCE_THRESHOLD',
}
export enum StatusCodes {
diff --git a/packages/typescript-typings/tsconfig.json b/packages/typescript-typings/tsconfig.json
index 7f0fe2f7a..8ea3bfb0c 100644
--- a/packages/typescript-typings/tsconfig.json
+++ b/packages/typescript-typings/tsconfig.json
@@ -3,5 +3,6 @@
"compilerOptions": {
"outDir": "lib",
"rootDir": "."
- }
+ },
+ "include": ["types"]
}
diff --git a/packages/utils/CHANGELOG.json b/packages/utils/CHANGELOG.json
index fe66d3f31..605151fb6 100644
--- a/packages/utils/CHANGELOG.json
+++ b/packages/utils/CHANGELOG.json
@@ -1,5 +1,14 @@
[
{
+ "version": "2.1.0",
+ "changes": [
+ {
+ "note": "Add `logWithTime` to `logUtils`",
+ "pr": 1461
+ }
+ ]
+ },
+ {
"version": "2.0.8",
"changes": [
{
diff --git a/packages/utils/package.json b/packages/utils/package.json
index a25dc9cff..5ffec049a 100644
--- a/packages/utils/package.json
+++ b/packages/utils/package.json
@@ -49,6 +49,7 @@
"@types/node": "*",
"abortcontroller-polyfill": "^1.1.9",
"bignumber.js": "~4.1.0",
+ "chalk": "^2.4.1",
"detect-node": "2.0.3",
"ethereum-types": "^1.1.4",
"ethereumjs-util": "^5.1.1",
diff --git a/packages/utils/src/log_utils.ts b/packages/utils/src/log_utils.ts
index 87f8479b5..6d9996c67 100644
--- a/packages/utils/src/log_utils.ts
+++ b/packages/utils/src/log_utils.ts
@@ -1,3 +1,5 @@
+import chalk from 'chalk';
+
export const logUtils = {
log(...args: any[]): void {
console.log(...args); // tslint:disable-line:no-console
@@ -5,4 +7,7 @@ export const logUtils = {
warn(...args: any[]): void {
console.warn(...args); // tslint:disable-line:no-console
},
+ logWithTime(arg: string): void {
+ logUtils.log(`[${chalk.gray(new Date().toLocaleTimeString())}] ${arg}`);
+ },
};
diff --git a/packages/website/ts/pages/documentation/docs_home.tsx b/packages/website/ts/pages/documentation/docs_home.tsx
index c52d7bd8b..fd3932bfa 100644
--- a/packages/website/ts/pages/documentation/docs_home.tsx
+++ b/packages/website/ts/pages/documentation/docs_home.tsx
@@ -100,6 +100,14 @@ const CATEGORY_TO_PACKAGES: ObjectMap<Package[]> = {
},
},
{
+ description: 'A Python Standard Relayer API client',
+ link: {
+ title: '0x-sra-client.py',
+ to: 'https://pypi.org/project/0x-sra-client/',
+ shouldOpenInNewTab: true,
+ },
+ },
+ {
description:
'An http & websocket client for interacting with relayers that have implemented the [Standard Relayer API](https://github.com/0xProject/standard-relayer-api)',
link: {