aboutsummaryrefslogtreecommitdiffstats
path: root/packages/sol-compiler/src/utils/compiler.ts
blob: db308f2b57b8d6227c60a4d0cc501e7508f3a301 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
import { ContractSource, Resolver } from '@0x/sol-resolver';
import { fetchAsync, logUtils } from '@0x/utils';
import chalk from 'chalk';
import { ContractArtifact } from 'ethereum-types';
import * as ethUtil from 'ethereumjs-util';
import * as _ from 'lodash';
import * as path from 'path';
import * as requireFromString from 'require-from-string';
import * as solc from 'solc';

import { binPaths } from '../solc/bin_paths';

import { constants } from './constants';
import { fsWrapper } from './fs_wrapper';
import { CompilationError } from './types';

/**
 * Gets contract data on network or returns if an artifact does not exist.
 * @param artifactsDir Path to the artifacts directory.
 * @param contractName Name of contract.
 * @return Contract data on network or undefined.
 */
export async function getContractArtifactIfExistsAsync(
    artifactsDir: string,
    contractName: string,
): Promise<ContractArtifact | void> {
    let contractArtifact;
    const currentArtifactPath = `${artifactsDir}/${contractName}.json`;
    try {
        const opts = {
            encoding: 'utf8',
        };
        const contractArtifactString = await fsWrapper.readFileAsync(currentArtifactPath, opts);
        contractArtifact = JSON.parse(contractArtifactString);
        return contractArtifact;
    } catch (err) {
        logUtils.warn(`Artifact for ${contractName} does not exist`);
        return undefined;
    }
}

/**
 * Creates a directory if it does not already exist.
 * @param artifactsDir Path to the directory.
 */
export async function createDirIfDoesNotExistAsync(dirPath: string): Promise<void> {
    if (!fsWrapper.doesPathExistSync(dirPath)) {
        logUtils.warn(`Creating directory at ${dirPath}...`);
        await fsWrapper.mkdirpAsync(dirPath);
    }
}

/**
 * Searches Solidity source code for compiler version range.
 * @param  source Source code of contract.
 * @return Solc compiler version range.
 */
export function parseSolidityVersionRange(source: string): string {
    const SOLIDITY_VERSION_RANGE_REGEX = /pragma\s+solidity\s+(.*);/;
    const solcVersionRangeMatch = source.match(SOLIDITY_VERSION_RANGE_REGEX);
    if (_.isNull(solcVersionRangeMatch)) {
        throw new Error('Could not find Solidity version range in source');
    }
    const solcVersionRange = solcVersionRangeMatch[1];
    return solcVersionRange;
}

/**
 * Normalizes the path found in the error message. If it cannot be normalized
 * the original error message is returned.
 * Example: converts 'base/Token.sol:6:46: Warning: Unused local variable'
 *          to 'Token.sol:6:46: Warning: Unused local variable'
 * This is used to prevent logging the same error multiple times.
 * @param  errMsg An error message from the compiled output.
 * @return The error message with directories truncated from the contract path.
 */
export function getNormalizedErrMsg(errMsg: string): string {
    const SOLIDITY_FILE_EXTENSION_REGEX = /(.*\.sol)/;
    const errPathMatch = errMsg.match(SOLIDITY_FILE_EXTENSION_REGEX);
    if (_.isNull(errPathMatch)) {
        // This can occur if solidity outputs a general warning, e.g
        // Warning: This is a pre-release compiler version, please do not use it in production.
        return errMsg;
    }
    const errPath = errPathMatch[0];
    const baseContract = path.basename(errPath);
    const normalizedErrMsg = errMsg.replace(errPath, baseContract);
    return normalizedErrMsg;
}

/**
 * Parses the contract source code and extracts the dendencies
 * @param  source Contract source code
 * @return List of dependendencies
 */
export function parseDependencies(contractSource: ContractSource): string[] {
    // TODO: Use a proper parser
    const source = contractSource.source;
    const IMPORT_REGEX = /(import\s)/;
    const DEPENDENCY_PATH_REGEX = /"([^"]+)"/; // Source: https://github.com/BlockChainCompany/soljitsu/blob/master/lib/shared.js
    const dependencies: string[] = [];
    const lines = source.split('\n');
    _.forEach(lines, line => {
        if (!_.isNull(line.match(IMPORT_REGEX))) {
            const dependencyMatch = line.match(DEPENDENCY_PATH_REGEX);
            if (!_.isNull(dependencyMatch)) {
                let dependencyPath = dependencyMatch[1];
                if (dependencyPath.startsWith('.')) {
                    dependencyPath = path.join(path.dirname(contractSource.path), dependencyPath);
                }
                dependencies.push(dependencyPath);
            }
        }
    });
    return dependencies;
}

/**
 * Compiles the contracts and prints errors/warnings
 * @param resolver Resolver
 * @param solcInstance Instance of a solc compiler
 * @param standardInput Solidity standard JSON input
 */
export function compile(
    resolver: Resolver,
    solcInstance: solc.SolcInstance,
    standardInput: solc.StandardInput,
): solc.StandardOutput {
    const standardInputStr = JSON.stringify(standardInput);
    const standardOutputStr = solcInstance.compileStandardWrapper(standardInputStr, importPath => {
        const sourceCodeIfExists = resolver.resolve(importPath);
        return { contents: sourceCodeIfExists.source };
    });
    const compiled: solc.StandardOutput = JSON.parse(standardOutputStr);
    if (!_.isUndefined(compiled.errors)) {
        printCompilationErrorsAndWarnings(compiled.errors);
    }
    return compiled;
}
/**
 * Separates errors from warnings, formats the messages and prints them. Throws if there is any compilation error (not warning).
 * @param solcErrors The errors field of standard JSON output that contains errors and warnings.
 */
function printCompilationErrorsAndWarnings(solcErrors: solc.SolcError[]): void {
    const SOLIDITY_WARNING = 'warning';
    const errors = _.filter(solcErrors, entry => entry.severity !== SOLIDITY_WARNING);
    const warnings = _.filter(solcErrors, entry => entry.severity === SOLIDITY_WARNING);
    if (!_.isEmpty(errors)) {
        errors.forEach(error => {
            const normalizedErrMsg = getNormalizedErrMsg(error.formattedMessage || error.message);
            logUtils.log(chalk.red('error'), normalizedErrMsg);
        });
        throw new CompilationError(errors.length);
    } else {
        warnings.forEach(warning => {
            const normalizedWarningMsg = getNormalizedErrMsg(warning.formattedMessage || warning.message);
            logUtils.log(chalk.yellow('warning'), normalizedWarningMsg);
        });
    }
}

/**
 * Gets the source tree hash for a file and its dependencies.
 * @param fileName Name of contract file.
 */
export function getSourceTreeHash(resolver: Resolver, importPath: string): Buffer {
    const contractSource = resolver.resolve(importPath);
    const dependencies = parseDependencies(contractSource);
    const sourceHash = ethUtil.sha3(contractSource.source);
    if (dependencies.length === 0) {
        return sourceHash;
    } else {
        const dependencySourceTreeHashes = _.map(dependencies, (dependency: string) =>
            getSourceTreeHash(resolver, dependency),
        );
        const sourceTreeHashesBuffer = Buffer.concat([sourceHash, ...dependencySourceTreeHashes]);
        const sourceTreeHash = ethUtil.sha3(sourceTreeHashesBuffer);
        return sourceTreeHash;
    }
}

/**
 * For the given @param contractPath, populates JSON objects to be used in the ContractVersionData interface's
 * properties `sources` (source code file names mapped to ID numbers) and `sourceCodes` (source code content of
 * contracts) for that contract.  The source code pointed to by contractPath is read and parsed directly (via
 * `resolver.resolve().source`), as are its imports, recursively.  The ID numbers for @return `sources` are
 * taken from the corresponding ID's in @param fullSources, and the content for @return sourceCodes is read from
 * disk (via the aforementioned `resolver.source`).
 */
export function getSourcesWithDependencies(
    resolver: Resolver,
    contractPath: string,
    fullSources: { [sourceName: string]: { id: number } },
): { sourceCodes: { [sourceName: string]: string }; sources: { [sourceName: string]: { id: number } } } {
    const sources = { [contractPath]: { id: fullSources[contractPath].id } };
    const sourceCodes = { [contractPath]: resolver.resolve(contractPath).source };
    recursivelyGatherDependencySources(
        resolver,
        contractPath,
        sourceCodes[contractPath],
        fullSources,
        sources,
        sourceCodes,
    );
    return { sourceCodes, sources };
}

function recursivelyGatherDependencySources(
    resolver: Resolver,
    contractPath: string,
    contractSource: string,
    fullSources: { [sourceName: string]: { id: number } },
    sourcesToAppendTo: { [sourceName: string]: { id: number } },
    sourceCodesToAppendTo: { [sourceName: string]: string },
): void {
    const importStatementMatches = contractSource.match(/\nimport[^;]*;/g);
    if (importStatementMatches === null) {
        return;
    }
    for (const importStatementMatch of importStatementMatches) {
        const importPathMatches = importStatementMatch.match(/\"([^\"]*)\"/);
        if (importPathMatches === null || importPathMatches.length === 0) {
            continue;
        }

        let importPath = importPathMatches[1];
        // HACK(albrow): We have, e.g.:
        //
        //      importPath   = "../../utils/LibBytes/LibBytes.sol"
        //      contractPath = "2.0.0/protocol/AssetProxyOwner/AssetProxyOwner.sol"
        //
        // Resolver doesn't understand "../" so we want to pass
        // "2.0.0/utils/LibBytes/LibBytes.sol" to resolver.
        //
        // This hack involves using path.resolve. But path.resolve returns
        // absolute directories by default. We trick it into thinking that
        // contractPath is a root directory by prepending a '/' and then
        // removing the '/' the end.
        //
        //      path.resolve("/a/b/c", ""../../d/e") === "/a/d/e"
        //
        const lastPathSeparatorPos = contractPath.lastIndexOf('/');
        const contractFolder = lastPathSeparatorPos === -1 ? '' : contractPath.slice(0, lastPathSeparatorPos + 1);
        if (importPath.startsWith('.')) {
            /**
             * Some imports path are relative ("../Token.sol", "./Wallet.sol")
             * while others are absolute ("Token.sol", "@0x/contracts/Wallet.sol")
             * And we need to append the base path for relative imports.
             */
            importPath = path.resolve(`/${contractFolder}`, importPath).replace('/', '');
        }

        if (_.isUndefined(sourcesToAppendTo[importPath])) {
            sourcesToAppendTo[importPath] = { id: fullSources[importPath].id };
            sourceCodesToAppendTo[importPath] = resolver.resolve(importPath).source;

            recursivelyGatherDependencySources(
                resolver,
                importPath,
                resolver.resolve(importPath).source,
                fullSources,
                sourcesToAppendTo,
                sourceCodesToAppendTo,
            );
        }
    }
}

/**
 * Gets the solidity compiler instance and full version name. If the compiler is already cached - gets it from FS,
 * otherwise - fetches it and caches it.
 * @param solcVersion The compiler version. e.g. 0.5.0
 */
export async function getSolcAsync(
    solcVersion: string,
): Promise<{ solcInstance: solc.SolcInstance; fullSolcVersion: string }> {
    const fullSolcVersion = binPaths[solcVersion];
    if (_.isUndefined(fullSolcVersion)) {
        throw new Error(`${solcVersion} is not a known compiler version`);
    }
    const compilerBinFilename = path.join(constants.SOLC_BIN_DIR, fullSolcVersion);
    let solcjs: string;
    if (await fsWrapper.doesFileExistAsync(compilerBinFilename)) {
        solcjs = (await fsWrapper.readFileAsync(compilerBinFilename)).toString();
    } else {
        logUtils.warn(`Downloading ${fullSolcVersion}...`);
        const url = `${constants.BASE_COMPILER_URL}${fullSolcVersion}`;
        const response = await fetchAsync(url);
        const SUCCESS_STATUS = 200;
        if (response.status !== SUCCESS_STATUS) {
            throw new Error(`Failed to load ${fullSolcVersion}`);
        }
        solcjs = await response.text();
        await fsWrapper.writeFileAsync(compilerBinFilename, solcjs);
    }
    if (solcjs.length === 0) {
        throw new Error('No compiler available');
    }
    const solcInstance = solc.setupMethods(requireFromString(solcjs, compilerBinFilename));
    return { solcInstance, fullSolcVersion };
}

/**
 * Solidity compiler emits the bytecode without a 0x prefix for a hex. This function fixes it if bytecode is present.
 * @param compiledContract The standard JSON output section for a contract. Geth modified in place.
 */
export function addHexPrefixToContractBytecode(compiledContract: solc.StandardContractOutput): void {
    if (!_.isUndefined(compiledContract.evm)) {
        if (!_.isUndefined(compiledContract.evm.bytecode) && !_.isUndefined(compiledContract.evm.bytecode.object)) {
            compiledContract.evm.bytecode.object = ethUtil.addHexPrefix(compiledContract.evm.bytecode.object);
        }
        if (
            !_.isUndefined(compiledContract.evm.deployedBytecode) &&
            !_.isUndefined(compiledContract.evm.deployedBytecode.object)
        ) {
            compiledContract.evm.deployedBytecode.object = ethUtil.addHexPrefix(
                compiledContract.evm.deployedBytecode.object,
            );
        }
    }
}