aboutsummaryrefslogtreecommitdiffstats
path: root/packages
diff options
context:
space:
mode:
authorLeonid Logvinov <logvinov.leon@gmail.com>2018-12-19 23:52:49 +0800
committerGitHub <noreply@github.com>2018-12-19 23:52:49 +0800
commitb3978b641c12c7bfe9bab1d561384eeaace25321 (patch)
tree74f623d9ca80fe8b4a82ea9428655c3fe90711cb /packages
parent89ea7b2a2d4e3af995ee08988b64a617e7b89aee (diff)
parentd456710441f5d126c63ae31003ddfffb2654047a (diff)
downloaddexon-sol-tools-b3978b641c12c7bfe9bab1d561384eeaace25321.tar
dexon-sol-tools-b3978b641c12c7bfe9bab1d561384eeaace25321.tar.gz
dexon-sol-tools-b3978b641c12c7bfe9bab1d561384eeaace25321.tar.bz2
dexon-sol-tools-b3978b641c12c7bfe9bab1d561384eeaace25321.tar.lz
dexon-sol-tools-b3978b641c12c7bfe9bab1d561384eeaace25321.tar.xz
dexon-sol-tools-b3978b641c12c7bfe9bab1d561384eeaace25321.tar.zst
dexon-sol-tools-b3978b641c12c7bfe9bab1d561384eeaace25321.zip
Merge pull request #1461 from 0xProject/feature/sol-compiler-watch
Sol-compiler watch mode
Diffstat (limited to 'packages')
-rw-r--r--packages/sol-compiler/CHANGELOG.json13
-rw-r--r--packages/sol-compiler/package.json4
-rw-r--r--packages/sol-compiler/src/cli.ts10
-rw-r--r--packages/sol-compiler/src/compiler.ts258
-rw-r--r--packages/sol-compiler/src/utils/compiler.ts217
-rw-r--r--packages/sol-compiler/src/utils/constants.ts3
-rw-r--r--packages/sol-compiler/src/utils/types.ts9
-rw-r--r--packages/sol-compiler/test/compiler_utils_test.ts6
-rw-r--r--packages/sol-resolver/CHANGELOG.json13
-rw-r--r--packages/sol-resolver/src/index.ts1
-rw-r--r--packages/sol-resolver/src/resolvers/fs_resolver.ts5
-rw-r--r--packages/sol-resolver/src/resolvers/name_resolver.ts10
-rw-r--r--packages/sol-resolver/src/resolvers/npm_resolver.ts5
-rw-r--r--packages/sol-resolver/src/resolvers/relative_fs_resolver.ts7
-rw-r--r--packages/sol-resolver/src/resolvers/spy_resolver.ts25
-rw-r--r--packages/sol-resolver/src/resolvers/url_resolver.ts5
-rw-r--r--packages/sol-resolver/src/types.ts1
-rw-r--r--packages/typescript-typings/tsconfig.json3
-rw-r--r--packages/utils/CHANGELOG.json9
-rw-r--r--packages/utils/package.json1
-rw-r--r--packages/utils/src/log_utils.ts5
21 files changed, 392 insertions, 218 deletions
diff --git a/packages/sol-compiler/CHANGELOG.json b/packages/sol-compiler/CHANGELOG.json
index 0a757f519..8548fd73f 100644
--- a/packages/sol-compiler/CHANGELOG.json
+++ b/packages/sol-compiler/CHANGELOG.json
@@ -1,5 +1,18 @@
[
{
+ "version": "2.0.0",
+ "changes": [
+ {
+ "note": "Add sol-compiler watch mode with -w flag",
+ "pr": 1461
+ },
+ {
+ "note": "Make error and warning colouring more visually pleasant and consistent with other compilers",
+ "pr": 1461
+ }
+ ]
+ },
+ {
"version": "1.1.16",
"changes": [
{
diff --git a/packages/sol-compiler/package.json b/packages/sol-compiler/package.json
index 0ad620b1f..86167a603 100644
--- a/packages/sol-compiler/package.json
+++ b/packages/sol-compiler/package.json
@@ -44,7 +44,9 @@
"devDependencies": {
"@0x/dev-utils": "^1.0.21",
"@0x/tslint-config": "^2.0.0",
+ "@types/chokidar": "^1.7.5",
"@types/mkdirp": "^0.5.2",
+ "@types/pluralize": "^0.0.29",
"@types/require-from-string": "^1.2.0",
"@types/semver": "^5.5.0",
"chai": "^4.0.1",
@@ -74,10 +76,12 @@
"@0x/web3-wrapper": "^3.2.1",
"@types/yargs": "^11.0.0",
"chalk": "^2.3.0",
+ "chokidar": "^2.0.4",
"ethereum-types": "^1.1.4",
"ethereumjs-util": "^5.1.1",
"lodash": "^4.17.5",
"mkdirp": "^0.5.1",
+ "pluralize": "^7.0.0",
"require-from-string": "^2.0.1",
"semver": "5.5.0",
"solc": "^0.4.23",
diff --git a/packages/sol-compiler/src/cli.ts b/packages/sol-compiler/src/cli.ts
index 0a9db6e05..18cc68aaf 100644
--- a/packages/sol-compiler/src/cli.ts
+++ b/packages/sol-compiler/src/cli.ts
@@ -25,6 +25,10 @@ const SEPARATOR = ',';
type: 'string',
description: 'comma separated list of contracts to compile',
})
+ .option('watch', {
+ alias: 'w',
+ default: false,
+ })
.help().argv;
const contracts = _.isUndefined(argv.contracts)
? undefined
@@ -37,7 +41,11 @@ const SEPARATOR = ',';
contracts,
};
const compiler = new Compiler(opts);
- await compiler.compileAsync();
+ if (argv.watch) {
+ await compiler.watchAsync();
+ } else {
+ await compiler.compileAsync();
+ }
})().catch(err => {
logUtils.log(err);
process.exit(1);
diff --git a/packages/sol-compiler/src/compiler.ts b/packages/sol-compiler/src/compiler.ts
index 85df8209e..d38ccbf39 100644
--- a/packages/sol-compiler/src/compiler.ts
+++ b/packages/sol-compiler/src/compiler.ts
@@ -6,26 +6,29 @@ import {
NPMResolver,
RelativeFSResolver,
Resolver,
+ SpyResolver,
URLResolver,
} from '@0x/sol-resolver';
-import { fetchAsync, logUtils } from '@0x/utils';
-import chalk from 'chalk';
+import { logUtils } from '@0x/utils';
+import * as chokidar from 'chokidar';
import { CompilerOptions, ContractArtifact, ContractVersionData, StandardOutput } from 'ethereum-types';
-import * as ethUtil from 'ethereumjs-util';
import * as fs from 'fs';
import * as _ from 'lodash';
import * as path from 'path';
-import * as requireFromString from 'require-from-string';
+import * as pluralize from 'pluralize';
import * as semver from 'semver';
import solc = require('solc');
import { compilerOptionsSchema } from './schemas/compiler_options_schema';
import { binPaths } from './solc/bin_paths';
import {
+ addHexPrefixToContractBytecode,
+ compile,
createDirIfDoesNotExistAsync,
getContractArtifactIfExistsAsync,
- getNormalizedErrMsg,
- parseDependencies,
+ getSolcAsync,
+ getSourcesWithDependencies,
+ getSourceTreeHash,
parseSolidityVersionRange,
} from './utils/compiler';
import { constants } from './utils/constants';
@@ -35,7 +38,6 @@ import { utils } from './utils/utils';
type TYPE_ALL_FILES_IDENTIFIER = '*';
const ALL_CONTRACTS_IDENTIFIER = '*';
const ALL_FILES_IDENTIFIER = '*';
-const SOLC_BIN_DIR = path.join(__dirname, '..', '..', 'solc_bin');
const DEFAULT_CONTRACTS_DIR = path.resolve('contracts');
const DEFAULT_ARTIFACTS_DIR = path.resolve('artifacts');
// Solc compiler settings cannot be configured from the commandline.
@@ -82,49 +84,6 @@ export class Compiler {
private readonly _artifactsDir: string;
private readonly _solcVersionIfExists: string | undefined;
private readonly _specifiedContracts: string[] | TYPE_ALL_FILES_IDENTIFIER;
- private static async _getSolcAsync(
- solcVersion: string,
- ): Promise<{ solcInstance: solc.SolcInstance; fullSolcVersion: string }> {
- const fullSolcVersion = binPaths[solcVersion];
- if (_.isUndefined(fullSolcVersion)) {
- throw new Error(`${solcVersion} is not a known compiler version`);
- }
- const compilerBinFilename = path.join(SOLC_BIN_DIR, fullSolcVersion);
- let solcjs: string;
- if (await fsWrapper.doesFileExistAsync(compilerBinFilename)) {
- solcjs = (await fsWrapper.readFileAsync(compilerBinFilename)).toString();
- } else {
- logUtils.warn(`Downloading ${fullSolcVersion}...`);
- const url = `${constants.BASE_COMPILER_URL}${fullSolcVersion}`;
- const response = await fetchAsync(url);
- const SUCCESS_STATUS = 200;
- if (response.status !== SUCCESS_STATUS) {
- throw new Error(`Failed to load ${fullSolcVersion}`);
- }
- solcjs = await response.text();
- await fsWrapper.writeFileAsync(compilerBinFilename, solcjs);
- }
- if (solcjs.length === 0) {
- throw new Error('No compiler available');
- }
- const solcInstance = solc.setupMethods(requireFromString(solcjs, compilerBinFilename));
- return { solcInstance, fullSolcVersion };
- }
- private static _addHexPrefixToContractBytecode(compiledContract: solc.StandardContractOutput): void {
- if (!_.isUndefined(compiledContract.evm)) {
- if (!_.isUndefined(compiledContract.evm.bytecode) && !_.isUndefined(compiledContract.evm.bytecode.object)) {
- compiledContract.evm.bytecode.object = ethUtil.addHexPrefix(compiledContract.evm.bytecode.object);
- }
- if (
- !_.isUndefined(compiledContract.evm.deployedBytecode) &&
- !_.isUndefined(compiledContract.evm.deployedBytecode.object)
- ) {
- compiledContract.evm.deployedBytecode.object = ethUtil.addHexPrefix(
- compiledContract.evm.deployedBytecode.object,
- );
- }
- }
- }
/**
* Instantiates a new instance of the Compiler class.
* @param opts Optional compiler options
@@ -158,7 +117,7 @@ export class Compiler {
*/
public async compileAsync(): Promise<void> {
await createDirIfDoesNotExistAsync(this._artifactsDir);
- await createDirIfDoesNotExistAsync(SOLC_BIN_DIR);
+ await createDirIfDoesNotExistAsync(constants.SOLC_BIN_DIR);
await this._compileContractsAsync(this._getContractNamesToCompile(), true);
}
/**
@@ -173,6 +132,54 @@ export class Compiler {
const promisedOutputs = this._compileContractsAsync(this._getContractNamesToCompile(), false);
return promisedOutputs;
}
+ public async watchAsync(): Promise<void> {
+ console.clear(); // tslint:disable-line:no-console
+ logUtils.logWithTime('Starting compilation in watch mode...');
+ const MATCH_NOTHING_REGEX = '^$';
+ const IGNORE_DOT_FILES_REGEX = /(^|[\/\\])\../;
+ // Initially we watch nothing. We'll add the paths later.
+ const watcher = chokidar.watch(MATCH_NOTHING_REGEX, { ignored: IGNORE_DOT_FILES_REGEX });
+ const onFileChangedAsync = async () => {
+ watcher.unwatch('*'); // Stop watching
+ try {
+ await this.compileAsync();
+ logUtils.logWithTime('Found 0 errors. Watching for file changes.');
+ } catch (err) {
+ if (err.typeName === 'CompilationError') {
+ logUtils.logWithTime(
+ `Found ${err.errorsCount} ${pluralize('error', err.errorsCount)}. Watching for file changes.`,
+ );
+ } else {
+ logUtils.logWithTime('Found errors. Watching for file changes.');
+ }
+ }
+
+ const pathsToWatch = this._getPathsToWatch();
+ watcher.add(pathsToWatch);
+ };
+ await onFileChangedAsync();
+ watcher.on('change', (changedFilePath: string) => {
+ console.clear(); // tslint:disable-line:no-console
+ logUtils.logWithTime('File change detected. Starting incremental compilation...');
+ // NOTE: We can't await it here because that's a callback.
+ // Instead we stop watching inside of it and start it again when we're finished.
+ onFileChangedAsync(); // tslint:disable-line no-floating-promises
+ });
+ }
+ private _getPathsToWatch(): string[] {
+ const contractNames = this._getContractNamesToCompile();
+ const spyResolver = new SpyResolver(this._resolver);
+ for (const contractName of contractNames) {
+ const contractSource = spyResolver.resolve(contractName);
+ // NOTE: We ignore the return value here. We don't want to compute the source tree hash.
+ // We just want to call a SpyResolver on each contracts and it's dependencies and
+ // this is a convenient way to reuse the existing code that does that.
+ // We can then get all the relevant paths from the `spyResolver` below.
+ getSourceTreeHash(spyResolver, contractSource.path);
+ }
+ const pathsToWatch = _.uniq(spyResolver.resolvedContractSources.map(cs => cs.absolutePath));
+ return pathsToWatch;
+ }
private _getContractNamesToCompile(): string[] {
let contractNamesToCompile;
if (this._specifiedContracts === ALL_CONTRACTS_IDENTIFIER) {
@@ -201,12 +208,14 @@ export class Compiler {
for (const contractName of contractNames) {
const contractSource = this._resolver.resolve(contractName);
+ const sourceTreeHashHex = getSourceTreeHash(
+ this._resolver,
+ path.join(this._contractsDir, contractSource.path),
+ ).toString('hex');
const contractData = {
contractName,
currentArtifactIfExists: await getContractArtifactIfExistsAsync(this._artifactsDir, contractName),
- sourceTreeHashHex: `0x${this._getSourceTreeHash(
- path.join(this._contractsDir, contractSource.path),
- ).toString('hex')}`,
+ sourceTreeHashHex: `0x${sourceTreeHashHex}`,
};
if (!this._shouldCompile(contractData)) {
continue;
@@ -244,9 +253,8 @@ export class Compiler {
}) with Solidity v${solcVersion}...`,
);
- const { solcInstance, fullSolcVersion } = await Compiler._getSolcAsync(solcVersion);
-
- const compilerOutput = this._compile(solcInstance, input.standardInput);
+ const { solcInstance, fullSolcVersion } = await getSolcAsync(solcVersion);
+ const compilerOutput = compile(this._resolver, solcInstance, input.standardInput);
compilerOutputs.push(compilerOutput);
for (const contractPath of input.contractsToCompile) {
@@ -259,7 +267,7 @@ export class Compiler {
);
}
- Compiler._addHexPrefixToContractBytecode(compiledContract);
+ addHexPrefixToContractBytecode(compiledContract);
if (shouldPersist) {
await this._persistCompiledContractAsync(
@@ -298,10 +306,14 @@ export class Compiler {
const compiledContract = compilerOutput.contracts[contractPath][contractName];
// need to gather sourceCodes for this artifact, but compilerOutput.sources (the list of contract modules)
- // contains listings for for every contract compiled during the compiler invocation that compiled the contract
+ // contains listings for every contract compiled during the compiler invocation that compiled the contract
// to be persisted, which could include many that are irrelevant to the contract at hand. So, gather up only
// the relevant sources:
- const { sourceCodes, sources } = this._getSourcesWithDependencies(contractPath, compilerOutput.sources);
+ const { sourceCodes, sources } = getSourcesWithDependencies(
+ this._resolver,
+ contractPath,
+ compilerOutput.sources,
+ );
const contractVersion: ContractVersionData = {
compilerOutput: compiledContract,
@@ -336,130 +348,4 @@ export class Compiler {
await fsWrapper.writeFileAsync(currentArtifactPath, artifactString);
logUtils.warn(`${contractName} artifact saved!`);
}
- /**
- * For the given @param contractPath, populates JSON objects to be used in the ContractVersionData interface's
- * properties `sources` (source code file names mapped to ID numbers) and `sourceCodes` (source code content of
- * contracts) for that contract. The source code pointed to by contractPath is read and parsed directly (via
- * `this._resolver.resolve().source`), as are its imports, recursively. The ID numbers for @return `sources` are
- * taken from the corresponding ID's in @param fullSources, and the content for @return sourceCodes is read from
- * disk (via the aforementioned `resolver.source`).
- */
- private _getSourcesWithDependencies(
- contractPath: string,
- fullSources: { [sourceName: string]: { id: number } },
- ): { sourceCodes: { [sourceName: string]: string }; sources: { [sourceName: string]: { id: number } } } {
- const sources = { [contractPath]: { id: fullSources[contractPath].id } };
- const sourceCodes = { [contractPath]: this._resolver.resolve(contractPath).source };
- this._recursivelyGatherDependencySources(
- contractPath,
- sourceCodes[contractPath],
- fullSources,
- sources,
- sourceCodes,
- );
- return { sourceCodes, sources };
- }
- private _recursivelyGatherDependencySources(
- contractPath: string,
- contractSource: string,
- fullSources: { [sourceName: string]: { id: number } },
- sourcesToAppendTo: { [sourceName: string]: { id: number } },
- sourceCodesToAppendTo: { [sourceName: string]: string },
- ): void {
- const importStatementMatches = contractSource.match(/\nimport[^;]*;/g);
- if (importStatementMatches === null) {
- return;
- }
- for (const importStatementMatch of importStatementMatches) {
- const importPathMatches = importStatementMatch.match(/\"([^\"]*)\"/);
- if (importPathMatches === null || importPathMatches.length === 0) {
- continue;
- }
-
- let importPath = importPathMatches[1];
- // HACK(ablrow): We have, e.g.:
- //
- // importPath = "../../utils/LibBytes/LibBytes.sol"
- // contractPath = "2.0.0/protocol/AssetProxyOwner/AssetProxyOwner.sol"
- //
- // Resolver doesn't understand "../" so we want to pass
- // "2.0.0/utils/LibBytes/LibBytes.sol" to resolver.
- //
- // This hack involves using path.resolve. But path.resolve returns
- // absolute directories by default. We trick it into thinking that
- // contractPath is a root directory by prepending a '/' and then
- // removing the '/' the end.
- //
- // path.resolve("/a/b/c", ""../../d/e") === "/a/d/e"
- //
- const lastPathSeparatorPos = contractPath.lastIndexOf('/');
- const contractFolder = lastPathSeparatorPos === -1 ? '' : contractPath.slice(0, lastPathSeparatorPos + 1);
- if (importPath.startsWith('.')) {
- /**
- * Some imports path are relative ("../Token.sol", "./Wallet.sol")
- * while others are absolute ("Token.sol", "@0x/contracts/Wallet.sol")
- * And we need to append the base path for relative imports.
- */
- importPath = path.resolve(`/${contractFolder}`, importPath).replace('/', '');
- }
-
- if (_.isUndefined(sourcesToAppendTo[importPath])) {
- sourcesToAppendTo[importPath] = { id: fullSources[importPath].id };
- sourceCodesToAppendTo[importPath] = this._resolver.resolve(importPath).source;
-
- this._recursivelyGatherDependencySources(
- importPath,
- this._resolver.resolve(importPath).source,
- fullSources,
- sourcesToAppendTo,
- sourceCodesToAppendTo,
- );
- }
- }
- }
- private _compile(solcInstance: solc.SolcInstance, standardInput: solc.StandardInput): solc.StandardOutput {
- const compiled: solc.StandardOutput = JSON.parse(
- solcInstance.compileStandardWrapper(JSON.stringify(standardInput), importPath => {
- const sourceCodeIfExists = this._resolver.resolve(importPath);
- return { contents: sourceCodeIfExists.source };
- }),
- );
- if (!_.isUndefined(compiled.errors)) {
- const SOLIDITY_WARNING = 'warning';
- const errors = _.filter(compiled.errors, entry => entry.severity !== SOLIDITY_WARNING);
- const warnings = _.filter(compiled.errors, entry => entry.severity === SOLIDITY_WARNING);
- if (!_.isEmpty(errors)) {
- errors.forEach(error => {
- const normalizedErrMsg = getNormalizedErrMsg(error.formattedMessage || error.message);
- logUtils.warn(chalk.red(normalizedErrMsg));
- });
- throw new Error('Compilation errors encountered');
- } else {
- warnings.forEach(warning => {
- const normalizedWarningMsg = getNormalizedErrMsg(warning.formattedMessage || warning.message);
- logUtils.warn(chalk.yellow(normalizedWarningMsg));
- });
- }
- }
- return compiled;
- }
- /**
- * Gets the source tree hash for a file and its dependencies.
- * @param fileName Name of contract file.
- */
- private _getSourceTreeHash(importPath: string): Buffer {
- const contractSource = this._resolver.resolve(importPath);
- const dependencies = parseDependencies(contractSource);
- const sourceHash = ethUtil.sha3(contractSource.source);
- if (dependencies.length === 0) {
- return sourceHash;
- } else {
- const dependencySourceTreeHashes = _.map(dependencies, (dependency: string) =>
- this._getSourceTreeHash(dependency),
- );
- const sourceTreeHashesBuffer = Buffer.concat([sourceHash, ...dependencySourceTreeHashes]);
- const sourceTreeHash = ethUtil.sha3(sourceTreeHashesBuffer);
- return sourceTreeHash;
- }
- }
}
diff --git a/packages/sol-compiler/src/utils/compiler.ts b/packages/sol-compiler/src/utils/compiler.ts
index cda67a414..db308f2b5 100644
--- a/packages/sol-compiler/src/utils/compiler.ts
+++ b/packages/sol-compiler/src/utils/compiler.ts
@@ -1,10 +1,18 @@
-import { ContractSource } from '@0x/sol-resolver';
-import { logUtils } from '@0x/utils';
+import { ContractSource, Resolver } from '@0x/sol-resolver';
+import { fetchAsync, logUtils } from '@0x/utils';
+import chalk from 'chalk';
import { ContractArtifact } from 'ethereum-types';
+import * as ethUtil from 'ethereumjs-util';
import * as _ from 'lodash';
import * as path from 'path';
+import * as requireFromString from 'require-from-string';
+import * as solc from 'solc';
+import { binPaths } from '../solc/bin_paths';
+
+import { constants } from './constants';
import { fsWrapper } from './fs_wrapper';
+import { CompilationError } from './types';
/**
* Gets contract data on network or returns if an artifact does not exist.
@@ -106,3 +114,208 @@ export function parseDependencies(contractSource: ContractSource): string[] {
});
return dependencies;
}
+
+/**
+ * Compiles the contracts and prints errors/warnings
+ * @param resolver Resolver
+ * @param solcInstance Instance of a solc compiler
+ * @param standardInput Solidity standard JSON input
+ */
+export function compile(
+ resolver: Resolver,
+ solcInstance: solc.SolcInstance,
+ standardInput: solc.StandardInput,
+): solc.StandardOutput {
+ const standardInputStr = JSON.stringify(standardInput);
+ const standardOutputStr = solcInstance.compileStandardWrapper(standardInputStr, importPath => {
+ const sourceCodeIfExists = resolver.resolve(importPath);
+ return { contents: sourceCodeIfExists.source };
+ });
+ const compiled: solc.StandardOutput = JSON.parse(standardOutputStr);
+ if (!_.isUndefined(compiled.errors)) {
+ printCompilationErrorsAndWarnings(compiled.errors);
+ }
+ return compiled;
+}
+/**
+ * Separates errors from warnings, formats the messages and prints them. Throws if there is any compilation error (not warning).
+ * @param solcErrors The errors field of standard JSON output that contains errors and warnings.
+ */
+function printCompilationErrorsAndWarnings(solcErrors: solc.SolcError[]): void {
+ const SOLIDITY_WARNING = 'warning';
+ const errors = _.filter(solcErrors, entry => entry.severity !== SOLIDITY_WARNING);
+ const warnings = _.filter(solcErrors, entry => entry.severity === SOLIDITY_WARNING);
+ if (!_.isEmpty(errors)) {
+ errors.forEach(error => {
+ const normalizedErrMsg = getNormalizedErrMsg(error.formattedMessage || error.message);
+ logUtils.log(chalk.red('error'), normalizedErrMsg);
+ });
+ throw new CompilationError(errors.length);
+ } else {
+ warnings.forEach(warning => {
+ const normalizedWarningMsg = getNormalizedErrMsg(warning.formattedMessage || warning.message);
+ logUtils.log(chalk.yellow('warning'), normalizedWarningMsg);
+ });
+ }
+}
+
+/**
+ * Gets the source tree hash for a file and its dependencies.
+ * @param fileName Name of contract file.
+ */
+export function getSourceTreeHash(resolver: Resolver, importPath: string): Buffer {
+ const contractSource = resolver.resolve(importPath);
+ const dependencies = parseDependencies(contractSource);
+ const sourceHash = ethUtil.sha3(contractSource.source);
+ if (dependencies.length === 0) {
+ return sourceHash;
+ } else {
+ const dependencySourceTreeHashes = _.map(dependencies, (dependency: string) =>
+ getSourceTreeHash(resolver, dependency),
+ );
+ const sourceTreeHashesBuffer = Buffer.concat([sourceHash, ...dependencySourceTreeHashes]);
+ const sourceTreeHash = ethUtil.sha3(sourceTreeHashesBuffer);
+ return sourceTreeHash;
+ }
+}
+
+/**
+ * For the given @param contractPath, populates JSON objects to be used in the ContractVersionData interface's
+ * properties `sources` (source code file names mapped to ID numbers) and `sourceCodes` (source code content of
+ * contracts) for that contract. The source code pointed to by contractPath is read and parsed directly (via
+ * `resolver.resolve().source`), as are its imports, recursively. The ID numbers for @return `sources` are
+ * taken from the corresponding ID's in @param fullSources, and the content for @return sourceCodes is read from
+ * disk (via the aforementioned `resolver.source`).
+ */
+export function getSourcesWithDependencies(
+ resolver: Resolver,
+ contractPath: string,
+ fullSources: { [sourceName: string]: { id: number } },
+): { sourceCodes: { [sourceName: string]: string }; sources: { [sourceName: string]: { id: number } } } {
+ const sources = { [contractPath]: { id: fullSources[contractPath].id } };
+ const sourceCodes = { [contractPath]: resolver.resolve(contractPath).source };
+ recursivelyGatherDependencySources(
+ resolver,
+ contractPath,
+ sourceCodes[contractPath],
+ fullSources,
+ sources,
+ sourceCodes,
+ );
+ return { sourceCodes, sources };
+}
+
+function recursivelyGatherDependencySources(
+ resolver: Resolver,
+ contractPath: string,
+ contractSource: string,
+ fullSources: { [sourceName: string]: { id: number } },
+ sourcesToAppendTo: { [sourceName: string]: { id: number } },
+ sourceCodesToAppendTo: { [sourceName: string]: string },
+): void {
+ const importStatementMatches = contractSource.match(/\nimport[^;]*;/g);
+ if (importStatementMatches === null) {
+ return;
+ }
+ for (const importStatementMatch of importStatementMatches) {
+ const importPathMatches = importStatementMatch.match(/\"([^\"]*)\"/);
+ if (importPathMatches === null || importPathMatches.length === 0) {
+ continue;
+ }
+
+ let importPath = importPathMatches[1];
+ // HACK(albrow): We have, e.g.:
+ //
+ // importPath = "../../utils/LibBytes/LibBytes.sol"
+ // contractPath = "2.0.0/protocol/AssetProxyOwner/AssetProxyOwner.sol"
+ //
+ // Resolver doesn't understand "../" so we want to pass
+ // "2.0.0/utils/LibBytes/LibBytes.sol" to resolver.
+ //
+ // This hack involves using path.resolve. But path.resolve returns
+ // absolute directories by default. We trick it into thinking that
+ // contractPath is a root directory by prepending a '/' and then
+ // removing the '/' the end.
+ //
+ // path.resolve("/a/b/c", ""../../d/e") === "/a/d/e"
+ //
+ const lastPathSeparatorPos = contractPath.lastIndexOf('/');
+ const contractFolder = lastPathSeparatorPos === -1 ? '' : contractPath.slice(0, lastPathSeparatorPos + 1);
+ if (importPath.startsWith('.')) {
+ /**
+ * Some imports path are relative ("../Token.sol", "./Wallet.sol")
+ * while others are absolute ("Token.sol", "@0x/contracts/Wallet.sol")
+ * And we need to append the base path for relative imports.
+ */
+ importPath = path.resolve(`/${contractFolder}`, importPath).replace('/', '');
+ }
+
+ if (_.isUndefined(sourcesToAppendTo[importPath])) {
+ sourcesToAppendTo[importPath] = { id: fullSources[importPath].id };
+ sourceCodesToAppendTo[importPath] = resolver.resolve(importPath).source;
+
+ recursivelyGatherDependencySources(
+ resolver,
+ importPath,
+ resolver.resolve(importPath).source,
+ fullSources,
+ sourcesToAppendTo,
+ sourceCodesToAppendTo,
+ );
+ }
+ }
+}
+
+/**
+ * Gets the solidity compiler instance and full version name. If the compiler is already cached - gets it from FS,
+ * otherwise - fetches it and caches it.
+ * @param solcVersion The compiler version. e.g. 0.5.0
+ */
+export async function getSolcAsync(
+ solcVersion: string,
+): Promise<{ solcInstance: solc.SolcInstance; fullSolcVersion: string }> {
+ const fullSolcVersion = binPaths[solcVersion];
+ if (_.isUndefined(fullSolcVersion)) {
+ throw new Error(`${solcVersion} is not a known compiler version`);
+ }
+ const compilerBinFilename = path.join(constants.SOLC_BIN_DIR, fullSolcVersion);
+ let solcjs: string;
+ if (await fsWrapper.doesFileExistAsync(compilerBinFilename)) {
+ solcjs = (await fsWrapper.readFileAsync(compilerBinFilename)).toString();
+ } else {
+ logUtils.warn(`Downloading ${fullSolcVersion}...`);
+ const url = `${constants.BASE_COMPILER_URL}${fullSolcVersion}`;
+ const response = await fetchAsync(url);
+ const SUCCESS_STATUS = 200;
+ if (response.status !== SUCCESS_STATUS) {
+ throw new Error(`Failed to load ${fullSolcVersion}`);
+ }
+ solcjs = await response.text();
+ await fsWrapper.writeFileAsync(compilerBinFilename, solcjs);
+ }
+ if (solcjs.length === 0) {
+ throw new Error('No compiler available');
+ }
+ const solcInstance = solc.setupMethods(requireFromString(solcjs, compilerBinFilename));
+ return { solcInstance, fullSolcVersion };
+}
+
+/**
+ * Solidity compiler emits the bytecode without a 0x prefix for a hex. This function fixes it if bytecode is present.
+ * @param compiledContract The standard JSON output section for a contract. Geth modified in place.
+ */
+export function addHexPrefixToContractBytecode(compiledContract: solc.StandardContractOutput): void {
+ if (!_.isUndefined(compiledContract.evm)) {
+ if (!_.isUndefined(compiledContract.evm.bytecode) && !_.isUndefined(compiledContract.evm.bytecode.object)) {
+ compiledContract.evm.bytecode.object = ethUtil.addHexPrefix(compiledContract.evm.bytecode.object);
+ }
+ if (
+ !_.isUndefined(compiledContract.evm.deployedBytecode) &&
+ !_.isUndefined(compiledContract.evm.deployedBytecode.object)
+ ) {
+ compiledContract.evm.deployedBytecode.object = ethUtil.addHexPrefix(
+ compiledContract.evm.deployedBytecode.object,
+ );
+ }
+ }
+}
diff --git a/packages/sol-compiler/src/utils/constants.ts b/packages/sol-compiler/src/utils/constants.ts
index df2ddb3b2..433897f8a 100644
--- a/packages/sol-compiler/src/utils/constants.ts
+++ b/packages/sol-compiler/src/utils/constants.ts
@@ -1,5 +1,8 @@
+import * as path from 'path';
+
export const constants = {
SOLIDITY_FILE_EXTENSION: '.sol',
BASE_COMPILER_URL: 'https://ethereum.github.io/solc-bin/bin/',
LATEST_ARTIFACT_VERSION: '2.0.0',
+ SOLC_BIN_DIR: path.join(__dirname, '..', '..', 'solc_bin'),
};
diff --git a/packages/sol-compiler/src/utils/types.ts b/packages/sol-compiler/src/utils/types.ts
index b211cfcbc..64328899d 100644
--- a/packages/sol-compiler/src/utils/types.ts
+++ b/packages/sol-compiler/src/utils/types.ts
@@ -29,3 +29,12 @@ export interface Token {
}
export type DoneCallback = (err?: Error) => void;
+
+export class CompilationError extends Error {
+ public errorsCount: number;
+ public typeName = 'CompilationError';
+ constructor(errorsCount: number) {
+ super('Compilation errors encountered');
+ this.errorsCount = errorsCount;
+ }
+}
diff --git a/packages/sol-compiler/test/compiler_utils_test.ts b/packages/sol-compiler/test/compiler_utils_test.ts
index 4fe7b994e..b8c18110c 100644
--- a/packages/sol-compiler/test/compiler_utils_test.ts
+++ b/packages/sol-compiler/test/compiler_utils_test.ts
@@ -52,7 +52,7 @@ describe('Compiler utils', () => {
const source = await fsWrapper.readFileAsync(path, {
encoding: 'utf8',
});
- const dependencies = parseDependencies({ source, path });
+ const dependencies = parseDependencies({ source, path, absolutePath: path });
const expectedDependencies = [
'zeppelin-solidity/contracts/token/ERC20/ERC20.sol',
'packages/sol-compiler/lib/test/fixtures/contracts/TokenTransferProxy.sol',
@@ -68,7 +68,7 @@ describe('Compiler utils', () => {
const source = await fsWrapper.readFileAsync(path, {
encoding: 'utf8',
});
- expect(parseDependencies({ source, path })).to.be.deep.equal([
+ expect(parseDependencies({ source, path, absolutePath: path })).to.be.deep.equal([
'zeppelin-solidity/contracts/ownership/Ownable.sol',
'zeppelin-solidity/contracts/token/ERC20/ERC20.sol',
]);
@@ -77,7 +77,7 @@ describe('Compiler utils', () => {
it.skip('correctly parses commented out dependencies', async () => {
const path = '';
const source = `// import "./TokenTransferProxy.sol";`;
- expect(parseDependencies({ path, source })).to.be.deep.equal([]);
+ expect(parseDependencies({ path, source, absolutePath: path })).to.be.deep.equal([]);
});
});
});
diff --git a/packages/sol-resolver/CHANGELOG.json b/packages/sol-resolver/CHANGELOG.json
index 85398e624..74c4d39c5 100644
--- a/packages/sol-resolver/CHANGELOG.json
+++ b/packages/sol-resolver/CHANGELOG.json
@@ -1,5 +1,18 @@
[
{
+ "version": "1.2.1",
+ "changes": [
+ {
+ "note": "Add `absolutePath` to `ContractSource` type",
+ "pr": 1461
+ },
+ {
+ "note": "Add `SpyResolver` that records all resolved contracts data",
+ "pr": 1461
+ }
+ ]
+ },
+ {
"version": "1.1.1",
"changes": [
{
diff --git a/packages/sol-resolver/src/index.ts b/packages/sol-resolver/src/index.ts
index a86053259..f55aca070 100644
--- a/packages/sol-resolver/src/index.ts
+++ b/packages/sol-resolver/src/index.ts
@@ -5,5 +5,6 @@ export { NPMResolver } from './resolvers/npm_resolver';
export { FSResolver } from './resolvers/fs_resolver';
export { RelativeFSResolver } from './resolvers/relative_fs_resolver';
export { NameResolver } from './resolvers/name_resolver';
+export { SpyResolver } from './resolvers/spy_resolver';
export { EnumerableResolver } from './resolvers/enumerable_resolver';
export { Resolver } from './resolvers/resolver';
diff --git a/packages/sol-resolver/src/resolvers/fs_resolver.ts b/packages/sol-resolver/src/resolvers/fs_resolver.ts
index 63fc3448e..86128023d 100644
--- a/packages/sol-resolver/src/resolvers/fs_resolver.ts
+++ b/packages/sol-resolver/src/resolvers/fs_resolver.ts
@@ -9,10 +9,7 @@ export class FSResolver extends Resolver {
public resolveIfExists(importPath: string): ContractSource | undefined {
if (fs.existsSync(importPath) && fs.lstatSync(importPath).isFile()) {
const fileContent = fs.readFileSync(importPath).toString();
- return {
- source: fileContent,
- path: importPath,
- };
+ return { source: fileContent, path: importPath, absolutePath: importPath };
}
return undefined;
}
diff --git a/packages/sol-resolver/src/resolvers/name_resolver.ts b/packages/sol-resolver/src/resolvers/name_resolver.ts
index d6ac6a499..aee326fb7 100644
--- a/packages/sol-resolver/src/resolvers/name_resolver.ts
+++ b/packages/sol-resolver/src/resolvers/name_resolver.ts
@@ -20,10 +20,7 @@ export class NameResolver extends EnumerableResolver {
if (contractName === lookupContractName) {
const absoluteContractPath = path.join(this._contractsDir, filePath);
const source = fs.readFileSync(absoluteContractPath).toString();
- contractSource = {
- source,
- path: filePath,
- };
+ contractSource = { source, path: filePath, absolutePath: absoluteContractPath };
return true;
}
return undefined;
@@ -36,10 +33,7 @@ export class NameResolver extends EnumerableResolver {
const onFile = (filePath: string) => {
const absoluteContractPath = path.join(this._contractsDir, filePath);
const source = fs.readFileSync(absoluteContractPath).toString();
- const contractSource = {
- source,
- path: filePath,
- };
+ const contractSource = { source, path: filePath, absolutePath: absoluteContractPath };
contractSources.push(contractSource);
};
this._traverseContractsDir(this._contractsDir, onFile);
diff --git a/packages/sol-resolver/src/resolvers/npm_resolver.ts b/packages/sol-resolver/src/resolvers/npm_resolver.ts
index eeb2b5493..3c1d09557 100644
--- a/packages/sol-resolver/src/resolvers/npm_resolver.ts
+++ b/packages/sol-resolver/src/resolvers/npm_resolver.ts
@@ -32,10 +32,7 @@ export class NPMResolver extends Resolver {
const lookupPath = path.join(currentPath, 'node_modules', packagePath, pathWithinPackage);
if (fs.existsSync(lookupPath) && fs.lstatSync(lookupPath).isFile()) {
const fileContent = fs.readFileSync(lookupPath).toString();
- return {
- source: fileContent,
- path: lookupPath,
- };
+ return { source: fileContent, path: importPath, absolutePath: lookupPath };
}
currentPath = path.dirname(currentPath);
}
diff --git a/packages/sol-resolver/src/resolvers/relative_fs_resolver.ts b/packages/sol-resolver/src/resolvers/relative_fs_resolver.ts
index ed96040d3..cfff145f9 100644
--- a/packages/sol-resolver/src/resolvers/relative_fs_resolver.ts
+++ b/packages/sol-resolver/src/resolvers/relative_fs_resolver.ts
@@ -13,13 +13,10 @@ export class RelativeFSResolver extends Resolver {
}
// tslint:disable-next-line:prefer-function-over-method
public resolveIfExists(importPath: string): ContractSource | undefined {
- const filePath = path.join(this._contractsDir, importPath);
+ const filePath = path.resolve(path.join(this._contractsDir, importPath));
if (fs.existsSync(filePath) && !fs.lstatSync(filePath).isDirectory()) {
const fileContent = fs.readFileSync(filePath).toString();
- return {
- source: fileContent,
- path: importPath,
- };
+ return { source: fileContent, path: importPath, absolutePath: filePath };
}
return undefined;
}
diff --git a/packages/sol-resolver/src/resolvers/spy_resolver.ts b/packages/sol-resolver/src/resolvers/spy_resolver.ts
new file mode 100644
index 000000000..5582d771a
--- /dev/null
+++ b/packages/sol-resolver/src/resolvers/spy_resolver.ts
@@ -0,0 +1,25 @@
+import * as _ from 'lodash';
+
+import { ContractSource } from '../types';
+
+import { Resolver } from './resolver';
+
+/**
+ * This resolver is a passthrough proxy to any resolver that records all the resolved contracts sources.
+ * You can access them later using the `resolvedContractSources` public field.
+ */
+export class SpyResolver extends Resolver {
+ public resolvedContractSources: ContractSource[] = [];
+ private readonly _resolver: Resolver;
+ constructor(resolver: Resolver) {
+ super();
+ this._resolver = resolver;
+ }
+ public resolveIfExists(importPath: string): ContractSource | undefined {
+ const contractSourceIfExists = this._resolver.resolveIfExists(importPath);
+ if (!_.isUndefined(contractSourceIfExists)) {
+ this.resolvedContractSources.push(contractSourceIfExists);
+ }
+ return contractSourceIfExists;
+ }
+}
diff --git a/packages/sol-resolver/src/resolvers/url_resolver.ts b/packages/sol-resolver/src/resolvers/url_resolver.ts
index 180b0c9f6..ef300e6db 100644
--- a/packages/sol-resolver/src/resolvers/url_resolver.ts
+++ b/packages/sol-resolver/src/resolvers/url_resolver.ts
@@ -11,10 +11,7 @@ export class URLResolver extends Resolver {
if (importPath.startsWith(FILE_URL_PREXIF)) {
const filePath = importPath.substr(FILE_URL_PREXIF.length);
const fileContent = fs.readFileSync(filePath).toString();
- return {
- source: fileContent,
- path: importPath,
- };
+ return { source: fileContent, path: importPath, absolutePath: filePath };
}
return undefined;
}
diff --git a/packages/sol-resolver/src/types.ts b/packages/sol-resolver/src/types.ts
index 41492622d..b4ba164c8 100644
--- a/packages/sol-resolver/src/types.ts
+++ b/packages/sol-resolver/src/types.ts
@@ -1,6 +1,7 @@
export interface ContractSource {
source: string;
path: string;
+ absolutePath: string;
}
export interface ContractSources {
diff --git a/packages/typescript-typings/tsconfig.json b/packages/typescript-typings/tsconfig.json
index 7f0fe2f7a..8ea3bfb0c 100644
--- a/packages/typescript-typings/tsconfig.json
+++ b/packages/typescript-typings/tsconfig.json
@@ -3,5 +3,6 @@
"compilerOptions": {
"outDir": "lib",
"rootDir": "."
- }
+ },
+ "include": ["types"]
}
diff --git a/packages/utils/CHANGELOG.json b/packages/utils/CHANGELOG.json
index fe66d3f31..605151fb6 100644
--- a/packages/utils/CHANGELOG.json
+++ b/packages/utils/CHANGELOG.json
@@ -1,5 +1,14 @@
[
{
+ "version": "2.1.0",
+ "changes": [
+ {
+ "note": "Add `logWithTime` to `logUtils`",
+ "pr": 1461
+ }
+ ]
+ },
+ {
"version": "2.0.8",
"changes": [
{
diff --git a/packages/utils/package.json b/packages/utils/package.json
index a25dc9cff..5ffec049a 100644
--- a/packages/utils/package.json
+++ b/packages/utils/package.json
@@ -49,6 +49,7 @@
"@types/node": "*",
"abortcontroller-polyfill": "^1.1.9",
"bignumber.js": "~4.1.0",
+ "chalk": "^2.4.1",
"detect-node": "2.0.3",
"ethereum-types": "^1.1.4",
"ethereumjs-util": "^5.1.1",
diff --git a/packages/utils/src/log_utils.ts b/packages/utils/src/log_utils.ts
index 87f8479b5..6d9996c67 100644
--- a/packages/utils/src/log_utils.ts
+++ b/packages/utils/src/log_utils.ts
@@ -1,3 +1,5 @@
+import chalk from 'chalk';
+
export const logUtils = {
log(...args: any[]): void {
console.log(...args); // tslint:disable-line:no-console
@@ -5,4 +7,7 @@ export const logUtils = {
warn(...args: any[]): void {
console.warn(...args); // tslint:disable-line:no-console
},
+ logWithTime(arg: string): void {
+ logUtils.log(`[${chalk.gray(new Date().toLocaleTimeString())}] ${arg}`);
+ },
};