aboutsummaryrefslogtreecommitdiffstats
path: root/packages/sol-compiler/src/utils/compiler.ts
diff options
context:
space:
mode:
Diffstat (limited to 'packages/sol-compiler/src/utils/compiler.ts')
-rw-r--r--packages/sol-compiler/src/utils/compiler.ts217
1 files changed, 215 insertions, 2 deletions
diff --git a/packages/sol-compiler/src/utils/compiler.ts b/packages/sol-compiler/src/utils/compiler.ts
index cda67a414..db308f2b5 100644
--- a/packages/sol-compiler/src/utils/compiler.ts
+++ b/packages/sol-compiler/src/utils/compiler.ts
@@ -1,10 +1,18 @@
-import { ContractSource } from '@0x/sol-resolver';
-import { logUtils } from '@0x/utils';
+import { ContractSource, Resolver } from '@0x/sol-resolver';
+import { fetchAsync, logUtils } from '@0x/utils';
+import chalk from 'chalk';
import { ContractArtifact } from 'ethereum-types';
+import * as ethUtil from 'ethereumjs-util';
import * as _ from 'lodash';
import * as path from 'path';
+import * as requireFromString from 'require-from-string';
+import * as solc from 'solc';
+import { binPaths } from '../solc/bin_paths';
+
+import { constants } from './constants';
import { fsWrapper } from './fs_wrapper';
+import { CompilationError } from './types';
/**
* Gets contract data on network or returns if an artifact does not exist.
@@ -106,3 +114,208 @@ export function parseDependencies(contractSource: ContractSource): string[] {
});
return dependencies;
}
+
+/**
+ * Compiles the contracts and prints errors/warnings
+ * @param resolver Resolver
+ * @param solcInstance Instance of a solc compiler
+ * @param standardInput Solidity standard JSON input
+ */
+export function compile(
+ resolver: Resolver,
+ solcInstance: solc.SolcInstance,
+ standardInput: solc.StandardInput,
+): solc.StandardOutput {
+ const standardInputStr = JSON.stringify(standardInput);
+ const standardOutputStr = solcInstance.compileStandardWrapper(standardInputStr, importPath => {
+ const sourceCodeIfExists = resolver.resolve(importPath);
+ return { contents: sourceCodeIfExists.source };
+ });
+ const compiled: solc.StandardOutput = JSON.parse(standardOutputStr);
+ if (!_.isUndefined(compiled.errors)) {
+ printCompilationErrorsAndWarnings(compiled.errors);
+ }
+ return compiled;
+}
+/**
+ * Separates errors from warnings, formats the messages and prints them. Throws if there is any compilation error (not warning).
+ * @param solcErrors The errors field of standard JSON output that contains errors and warnings.
+ */
+function printCompilationErrorsAndWarnings(solcErrors: solc.SolcError[]): void {
+ const SOLIDITY_WARNING = 'warning';
+ const errors = _.filter(solcErrors, entry => entry.severity !== SOLIDITY_WARNING);
+ const warnings = _.filter(solcErrors, entry => entry.severity === SOLIDITY_WARNING);
+ if (!_.isEmpty(errors)) {
+ errors.forEach(error => {
+ const normalizedErrMsg = getNormalizedErrMsg(error.formattedMessage || error.message);
+ logUtils.log(chalk.red('error'), normalizedErrMsg);
+ });
+ throw new CompilationError(errors.length);
+ } else {
+ warnings.forEach(warning => {
+ const normalizedWarningMsg = getNormalizedErrMsg(warning.formattedMessage || warning.message);
+ logUtils.log(chalk.yellow('warning'), normalizedWarningMsg);
+ });
+ }
+}
+
+/**
+ * Gets the source tree hash for a file and its dependencies.
+ * @param fileName Name of contract file.
+ */
+export function getSourceTreeHash(resolver: Resolver, importPath: string): Buffer {
+ const contractSource = resolver.resolve(importPath);
+ const dependencies = parseDependencies(contractSource);
+ const sourceHash = ethUtil.sha3(contractSource.source);
+ if (dependencies.length === 0) {
+ return sourceHash;
+ } else {
+ const dependencySourceTreeHashes = _.map(dependencies, (dependency: string) =>
+ getSourceTreeHash(resolver, dependency),
+ );
+ const sourceTreeHashesBuffer = Buffer.concat([sourceHash, ...dependencySourceTreeHashes]);
+ const sourceTreeHash = ethUtil.sha3(sourceTreeHashesBuffer);
+ return sourceTreeHash;
+ }
+}
+
+/**
+ * For the given @param contractPath, populates JSON objects to be used in the ContractVersionData interface's
+ * properties `sources` (source code file names mapped to ID numbers) and `sourceCodes` (source code content of
+ * contracts) for that contract. The source code pointed to by contractPath is read and parsed directly (via
+ * `resolver.resolve().source`), as are its imports, recursively. The ID numbers for @return `sources` are
+ * taken from the corresponding ID's in @param fullSources, and the content for @return sourceCodes is read from
+ * disk (via the aforementioned `resolver.source`).
+ */
+export function getSourcesWithDependencies(
+ resolver: Resolver,
+ contractPath: string,
+ fullSources: { [sourceName: string]: { id: number } },
+): { sourceCodes: { [sourceName: string]: string }; sources: { [sourceName: string]: { id: number } } } {
+ const sources = { [contractPath]: { id: fullSources[contractPath].id } };
+ const sourceCodes = { [contractPath]: resolver.resolve(contractPath).source };
+ recursivelyGatherDependencySources(
+ resolver,
+ contractPath,
+ sourceCodes[contractPath],
+ fullSources,
+ sources,
+ sourceCodes,
+ );
+ return { sourceCodes, sources };
+}
+
+function recursivelyGatherDependencySources(
+ resolver: Resolver,
+ contractPath: string,
+ contractSource: string,
+ fullSources: { [sourceName: string]: { id: number } },
+ sourcesToAppendTo: { [sourceName: string]: { id: number } },
+ sourceCodesToAppendTo: { [sourceName: string]: string },
+): void {
+ const importStatementMatches = contractSource.match(/\nimport[^;]*;/g);
+ if (importStatementMatches === null) {
+ return;
+ }
+ for (const importStatementMatch of importStatementMatches) {
+ const importPathMatches = importStatementMatch.match(/\"([^\"]*)\"/);
+ if (importPathMatches === null || importPathMatches.length === 0) {
+ continue;
+ }
+
+ let importPath = importPathMatches[1];
+ // HACK(albrow): We have, e.g.:
+ //
+ // importPath = "../../utils/LibBytes/LibBytes.sol"
+ // contractPath = "2.0.0/protocol/AssetProxyOwner/AssetProxyOwner.sol"
+ //
+ // Resolver doesn't understand "../" so we want to pass
+ // "2.0.0/utils/LibBytes/LibBytes.sol" to resolver.
+ //
+ // This hack involves using path.resolve. But path.resolve returns
+ // absolute directories by default. We trick it into thinking that
+ // contractPath is a root directory by prepending a '/' and then
+ // removing the '/' the end.
+ //
+ // path.resolve("/a/b/c", ""../../d/e") === "/a/d/e"
+ //
+ const lastPathSeparatorPos = contractPath.lastIndexOf('/');
+ const contractFolder = lastPathSeparatorPos === -1 ? '' : contractPath.slice(0, lastPathSeparatorPos + 1);
+ if (importPath.startsWith('.')) {
+ /**
+ * Some imports path are relative ("../Token.sol", "./Wallet.sol")
+ * while others are absolute ("Token.sol", "@0x/contracts/Wallet.sol")
+ * And we need to append the base path for relative imports.
+ */
+ importPath = path.resolve(`/${contractFolder}`, importPath).replace('/', '');
+ }
+
+ if (_.isUndefined(sourcesToAppendTo[importPath])) {
+ sourcesToAppendTo[importPath] = { id: fullSources[importPath].id };
+ sourceCodesToAppendTo[importPath] = resolver.resolve(importPath).source;
+
+ recursivelyGatherDependencySources(
+ resolver,
+ importPath,
+ resolver.resolve(importPath).source,
+ fullSources,
+ sourcesToAppendTo,
+ sourceCodesToAppendTo,
+ );
+ }
+ }
+}
+
+/**
+ * Gets the solidity compiler instance and full version name. If the compiler is already cached - gets it from FS,
+ * otherwise - fetches it and caches it.
+ * @param solcVersion The compiler version. e.g. 0.5.0
+ */
+export async function getSolcAsync(
+ solcVersion: string,
+): Promise<{ solcInstance: solc.SolcInstance; fullSolcVersion: string }> {
+ const fullSolcVersion = binPaths[solcVersion];
+ if (_.isUndefined(fullSolcVersion)) {
+ throw new Error(`${solcVersion} is not a known compiler version`);
+ }
+ const compilerBinFilename = path.join(constants.SOLC_BIN_DIR, fullSolcVersion);
+ let solcjs: string;
+ if (await fsWrapper.doesFileExistAsync(compilerBinFilename)) {
+ solcjs = (await fsWrapper.readFileAsync(compilerBinFilename)).toString();
+ } else {
+ logUtils.warn(`Downloading ${fullSolcVersion}...`);
+ const url = `${constants.BASE_COMPILER_URL}${fullSolcVersion}`;
+ const response = await fetchAsync(url);
+ const SUCCESS_STATUS = 200;
+ if (response.status !== SUCCESS_STATUS) {
+ throw new Error(`Failed to load ${fullSolcVersion}`);
+ }
+ solcjs = await response.text();
+ await fsWrapper.writeFileAsync(compilerBinFilename, solcjs);
+ }
+ if (solcjs.length === 0) {
+ throw new Error('No compiler available');
+ }
+ const solcInstance = solc.setupMethods(requireFromString(solcjs, compilerBinFilename));
+ return { solcInstance, fullSolcVersion };
+}
+
+/**
+ * Solidity compiler emits the bytecode without a 0x prefix for a hex. This function fixes it if bytecode is present.
+ * @param compiledContract The standard JSON output section for a contract. Geth modified in place.
+ */
+export function addHexPrefixToContractBytecode(compiledContract: solc.StandardContractOutput): void {
+ if (!_.isUndefined(compiledContract.evm)) {
+ if (!_.isUndefined(compiledContract.evm.bytecode) && !_.isUndefined(compiledContract.evm.bytecode.object)) {
+ compiledContract.evm.bytecode.object = ethUtil.addHexPrefix(compiledContract.evm.bytecode.object);
+ }
+ if (
+ !_.isUndefined(compiledContract.evm.deployedBytecode) &&
+ !_.isUndefined(compiledContract.evm.deployedBytecode.object)
+ ) {
+ compiledContract.evm.deployedBytecode.object = ethUtil.addHexPrefix(
+ compiledContract.evm.deployedBytecode.object,
+ );
+ }
+ }
+}