aboutsummaryrefslogtreecommitdiffstats
path: root/packages/sol-tracing-utils/src
diff options
context:
space:
mode:
authorLeonid Logvinov <logvinov.leon@gmail.com>2019-01-10 18:21:05 +0800
committerLeonid Logvinov <logvinov.leon@gmail.com>2019-01-10 18:21:05 +0800
commit7ae9e79235ed3b7eb110b0a1e88338b3965f44da (patch)
tree5339e9548142eb6ec3e7a7e4288cb5e5b1a582cc /packages/sol-tracing-utils/src
parent15c9479ebeca57e7c275cd2e73ca3daad03a412f (diff)
downloaddexon-sol-tools-7ae9e79235ed3b7eb110b0a1e88338b3965f44da.tar
dexon-sol-tools-7ae9e79235ed3b7eb110b0a1e88338b3965f44da.tar.gz
dexon-sol-tools-7ae9e79235ed3b7eb110b0a1e88338b3965f44da.tar.bz2
dexon-sol-tools-7ae9e79235ed3b7eb110b0a1e88338b3965f44da.tar.lz
dexon-sol-tools-7ae9e79235ed3b7eb110b0a1e88338b3965f44da.tar.xz
dexon-sol-tools-7ae9e79235ed3b7eb110b0a1e88338b3965f44da.tar.zst
dexon-sol-tools-7ae9e79235ed3b7eb110b0a1e88338b3965f44da.zip
Rename sol-trace-based-tools-common to sol-tracing-utils
Diffstat (limited to 'packages/sol-tracing-utils/src')
-rw-r--r--packages/sol-tracing-utils/src/artifact_adapters/abstract_artifact_adapter.ts5
-rw-r--r--packages/sol-tracing-utils/src/artifact_adapters/sol_compiler_artifact_adapter.ts61
-rw-r--r--packages/sol-tracing-utils/src/artifact_adapters/truffle_artifact_adapter.ts88
-rw-r--r--packages/sol-tracing-utils/src/ast_visitor.ts168
-rw-r--r--packages/sol-tracing-utils/src/collect_coverage_entries.ts41
-rw-r--r--packages/sol-tracing-utils/src/constants.ts8
-rw-r--r--packages/sol-tracing-utils/src/get_source_range_snippet.ts185
-rw-r--r--packages/sol-tracing-utils/src/globals.d.ts7
-rw-r--r--packages/sol-tracing-utils/src/index.ts39
-rw-r--r--packages/sol-tracing-utils/src/instructions.ts23
-rw-r--r--packages/sol-tracing-utils/src/revert_trace.ts95
-rw-r--r--packages/sol-tracing-utils/src/source_maps.ts91
-rw-r--r--packages/sol-tracing-utils/src/trace.ts104
-rw-r--r--packages/sol-tracing-utils/src/trace_collection_subprovider.ts188
-rw-r--r--packages/sol-tracing-utils/src/trace_collector.ts93
-rw-r--r--packages/sol-tracing-utils/src/trace_info_subprovider.ts59
-rw-r--r--packages/sol-tracing-utils/src/types.ts126
-rw-r--r--packages/sol-tracing-utils/src/utils.ts87
18 files changed, 1468 insertions, 0 deletions
diff --git a/packages/sol-tracing-utils/src/artifact_adapters/abstract_artifact_adapter.ts b/packages/sol-tracing-utils/src/artifact_adapters/abstract_artifact_adapter.ts
new file mode 100644
index 000000000..fcc6562ad
--- /dev/null
+++ b/packages/sol-tracing-utils/src/artifact_adapters/abstract_artifact_adapter.ts
@@ -0,0 +1,5 @@
+import { ContractData } from '../types';
+
+export abstract class AbstractArtifactAdapter {
+ public abstract async collectContractsDataAsync(): Promise<ContractData[]>;
+}
diff --git a/packages/sol-tracing-utils/src/artifact_adapters/sol_compiler_artifact_adapter.ts b/packages/sol-tracing-utils/src/artifact_adapters/sol_compiler_artifact_adapter.ts
new file mode 100644
index 000000000..57391abbe
--- /dev/null
+++ b/packages/sol-tracing-utils/src/artifact_adapters/sol_compiler_artifact_adapter.ts
@@ -0,0 +1,61 @@
+import { logUtils } from '@0x/utils';
+import { CompilerOptions, ContractArtifact } from 'ethereum-types';
+import * as fs from 'fs';
+import * as glob from 'glob';
+import * as _ from 'lodash';
+import * as path from 'path';
+
+import { ContractData } from '../types';
+
+import { AbstractArtifactAdapter } from './abstract_artifact_adapter';
+
+const CONFIG_FILE = 'compiler.json';
+
+export class SolCompilerArtifactAdapter extends AbstractArtifactAdapter {
+ private readonly _artifactsPath: string;
+ private readonly _sourcesPath: string;
+ /**
+ * Instantiates a SolCompilerArtifactAdapter
+ * @param artifactsPath Path to your artifacts directory
+ * @param sourcesPath Path to your contract sources directory
+ */
+ constructor(artifactsPath?: string, sourcesPath?: string) {
+ super();
+ const config: CompilerOptions = fs.existsSync(CONFIG_FILE)
+ ? JSON.parse(fs.readFileSync(CONFIG_FILE).toString())
+ : {};
+ if (_.isUndefined(artifactsPath) && _.isUndefined(config.artifactsDir)) {
+ throw new Error(`artifactsDir not found in ${CONFIG_FILE}`);
+ }
+ this._artifactsPath = (artifactsPath || config.artifactsDir) as string;
+ if (_.isUndefined(sourcesPath) && _.isUndefined(config.contractsDir)) {
+ throw new Error(`contractsDir not found in ${CONFIG_FILE}`);
+ }
+ this._sourcesPath = (sourcesPath || config.contractsDir) as string;
+ }
+ public async collectContractsDataAsync(): Promise<ContractData[]> {
+ const artifactsGlob = `${this._artifactsPath}/**/*.json`;
+ const artifactFileNames = glob.sync(artifactsGlob, { absolute: true });
+ const contractsData: ContractData[] = [];
+ for (const artifactFileName of artifactFileNames) {
+ const artifact: ContractArtifact = JSON.parse(fs.readFileSync(artifactFileName).toString());
+ if (_.isUndefined(artifact.compilerOutput.evm)) {
+ logUtils.warn(`${artifactFileName} doesn't contain bytecode. Skipping...`);
+ continue;
+ }
+ let sources = _.keys(artifact.sources);
+ sources = _.map(sources, relativeFilePath => path.resolve(this._sourcesPath, relativeFilePath));
+ const sourceCodes = _.map(sources, (source: string) => fs.readFileSync(source).toString());
+ const contractData = {
+ sourceCodes,
+ sources,
+ bytecode: artifact.compilerOutput.evm.bytecode.object,
+ sourceMap: artifact.compilerOutput.evm.bytecode.sourceMap,
+ runtimeBytecode: artifact.compilerOutput.evm.deployedBytecode.object,
+ sourceMapRuntime: artifact.compilerOutput.evm.deployedBytecode.sourceMap,
+ };
+ contractsData.push(contractData);
+ }
+ return contractsData;
+ }
+}
diff --git a/packages/sol-tracing-utils/src/artifact_adapters/truffle_artifact_adapter.ts b/packages/sol-tracing-utils/src/artifact_adapters/truffle_artifact_adapter.ts
new file mode 100644
index 000000000..bb2b15153
--- /dev/null
+++ b/packages/sol-tracing-utils/src/artifact_adapters/truffle_artifact_adapter.ts
@@ -0,0 +1,88 @@
+import { Compiler, CompilerOptions } from '@0x/sol-compiler';
+import * as fs from 'fs';
+import * as glob from 'glob';
+import * as path from 'path';
+
+import { ContractData } from '../types';
+
+import { AbstractArtifactAdapter } from './abstract_artifact_adapter';
+import { SolCompilerArtifactAdapter } from './sol_compiler_artifact_adapter';
+
+const DEFAULT_TRUFFLE_ARTIFACTS_DIR = './build/contracts';
+
+interface TruffleConfig {
+ solc?: any;
+ contracts_build_directory?: string;
+}
+
+export class TruffleArtifactAdapter extends AbstractArtifactAdapter {
+ private readonly _solcVersion: string;
+ private readonly _projectRoot: string;
+ /**
+ * Instantiates a TruffleArtifactAdapter
+ * @param projectRoot Path to the truffle project's root directory
+ * @param solcVersion Solidity version with which to compile all the contracts
+ */
+ constructor(projectRoot: string, solcVersion: string) {
+ super();
+ this._solcVersion = solcVersion;
+ this._projectRoot = projectRoot;
+ }
+ public async collectContractsDataAsync(): Promise<ContractData[]> {
+ const artifactsDir = '.0x-artifacts';
+ const contractsDir = path.join(this._projectRoot, 'contracts');
+ const truffleConfig = this._getTruffleConfig();
+ const solcConfig = truffleConfig.solc || {};
+ const truffleArtifactsDirectory = truffleConfig.contracts_build_directory || DEFAULT_TRUFFLE_ARTIFACTS_DIR;
+ this._assertSolidityVersionIsCorrect(truffleArtifactsDirectory);
+ const compilerOptions: CompilerOptions = {
+ contractsDir,
+ artifactsDir,
+ compilerSettings: {
+ ...solcConfig,
+ outputSelection: {
+ ['*']: {
+ ['*']: ['abi', 'evm.bytecode.object', 'evm.deployedBytecode.object'],
+ },
+ },
+ },
+ contracts: '*',
+ solcVersion: this._solcVersion,
+ };
+ const compiler = new Compiler(compilerOptions);
+ await compiler.compileAsync();
+ const solCompilerArtifactAdapter = new SolCompilerArtifactAdapter(artifactsDir, contractsDir);
+ const contractsDataFrom0xArtifacts = await solCompilerArtifactAdapter.collectContractsDataAsync();
+ return contractsDataFrom0xArtifacts;
+ }
+ private _getTruffleConfig(): TruffleConfig {
+ const truffleConfigFileShort = path.resolve(path.join(this._projectRoot, 'truffle.js'));
+ const truffleConfigFileLong = path.resolve(path.join(this._projectRoot, 'truffle-config.js'));
+ if (fs.existsSync(truffleConfigFileShort)) {
+ const truffleConfig = require(truffleConfigFileShort);
+ return truffleConfig;
+ } else if (fs.existsSync(truffleConfigFileLong)) {
+ const truffleConfig = require(truffleConfigFileLong);
+ return truffleConfig;
+ } else {
+ throw new Error(
+ `Neither ${truffleConfigFileShort} nor ${truffleConfigFileLong} exists. Make sure the project root is correct`,
+ );
+ }
+ }
+ private _assertSolidityVersionIsCorrect(truffleArtifactsDirectory: string): void {
+ const artifactsGlob = `${truffleArtifactsDirectory}/**/*.json`;
+ const artifactFileNames = glob.sync(artifactsGlob, { absolute: true });
+ for (const artifactFileName of artifactFileNames) {
+ const artifact = JSON.parse(fs.readFileSync(artifactFileName).toString());
+ const compilerVersion = artifact.compiler.version;
+ if (!compilerVersion.startsWith(this._solcVersion)) {
+ throw new Error(
+ `${artifact.contractName} was compiled with solidity ${compilerVersion} but specified version is ${
+ this._solcVersion
+ } making it impossible to process traces`,
+ );
+ }
+ }
+ }
+}
diff --git a/packages/sol-tracing-utils/src/ast_visitor.ts b/packages/sol-tracing-utils/src/ast_visitor.ts
new file mode 100644
index 000000000..e55cdf6ec
--- /dev/null
+++ b/packages/sol-tracing-utils/src/ast_visitor.ts
@@ -0,0 +1,168 @@
+import * as _ from 'lodash';
+import * as Parser from 'solidity-parser-antlr';
+
+import { BranchMap, FnMap, LocationByOffset, SingleFileSourceRange, StatementMap } from './types';
+
+export interface CoverageEntriesDescription {
+ fnMap: FnMap;
+ branchMap: BranchMap;
+ statementMap: StatementMap;
+ modifiersStatementIds: number[];
+}
+
+enum BranchType {
+ If = 'if',
+ ConditionalExpression = 'cond-expr',
+ BinaryExpression = 'binary-expr',
+}
+
+export class ASTVisitor {
+ private _entryId = 0;
+ private readonly _fnMap: FnMap = {};
+ private readonly _branchMap: BranchMap = {};
+ private readonly _modifiersStatementIds: number[] = [];
+ private readonly _statementMap: StatementMap = {};
+ private readonly _locationByOffset: LocationByOffset;
+ private readonly _ignoreRangesBeginningAt: number[];
+ // keep track of contract/function ranges that are to be ignored
+ // so we can also ignore any children nodes within the contract/function
+ private readonly _ignoreRangesWithin: Array<[number, number]> = [];
+ constructor(locationByOffset: LocationByOffset, ignoreRangesBeginningAt: number[] = []) {
+ this._locationByOffset = locationByOffset;
+ this._ignoreRangesBeginningAt = ignoreRangesBeginningAt;
+ }
+ public getCollectedCoverageEntries(): CoverageEntriesDescription {
+ const coverageEntriesDescription = {
+ fnMap: this._fnMap,
+ branchMap: this._branchMap,
+ statementMap: this._statementMap,
+ modifiersStatementIds: this._modifiersStatementIds,
+ };
+ return coverageEntriesDescription;
+ }
+ public IfStatement(ast: Parser.IfStatement): void {
+ this._visitStatement(ast);
+ this._visitBinaryBranch(ast, ast.trueBody, ast.falseBody || ast, BranchType.If);
+ }
+ public FunctionDefinition(ast: Parser.FunctionDefinition): void {
+ this._visitFunctionLikeDefinition(ast);
+ }
+ public ContractDefinition(ast: Parser.ContractDefinition): void {
+ if (this._shouldIgnoreExpression(ast)) {
+ this._ignoreRangesWithin.push(ast.range as [number, number]);
+ }
+ }
+ public ModifierDefinition(ast: Parser.ModifierDefinition): void {
+ this._visitFunctionLikeDefinition(ast);
+ }
+ public ForStatement(ast: Parser.ForStatement): void {
+ this._visitStatement(ast);
+ }
+ public ReturnStatement(ast: Parser.ReturnStatement): void {
+ this._visitStatement(ast);
+ }
+ public BreakStatement(ast: Parser.BreakStatement): void {
+ this._visitStatement(ast);
+ }
+ public ContinueStatement(ast: Parser.ContinueStatement): void {
+ this._visitStatement(ast);
+ }
+ public EmitStatement(ast: any /* TODO: Parser.EmitStatement */): void {
+ this._visitStatement(ast);
+ }
+ public VariableDeclarationStatement(ast: Parser.VariableDeclarationStatement): void {
+ this._visitStatement(ast);
+ }
+ public Statement(ast: Parser.Statement): void {
+ this._visitStatement(ast);
+ }
+ public WhileStatement(ast: Parser.WhileStatement): void {
+ this._visitStatement(ast);
+ }
+ public SimpleStatement(ast: Parser.SimpleStatement): void {
+ this._visitStatement(ast);
+ }
+ public ThrowStatement(ast: Parser.ThrowStatement): void {
+ this._visitStatement(ast);
+ }
+ public DoWhileStatement(ast: Parser.DoWhileStatement): void {
+ this._visitStatement(ast);
+ }
+ public ExpressionStatement(ast: Parser.ExpressionStatement): void {
+ this._visitStatement(ast.expression);
+ }
+ public InlineAssemblyStatement(ast: Parser.InlineAssemblyStatement): void {
+ this._visitStatement(ast);
+ }
+ public BinaryOperation(ast: Parser.BinaryOperation): void {
+ const BRANCHING_BIN_OPS = ['&&', '||'];
+ if (_.includes(BRANCHING_BIN_OPS, ast.operator)) {
+ this._visitBinaryBranch(ast, ast.left, ast.right, BranchType.BinaryExpression);
+ }
+ }
+ public Conditional(ast: Parser.Conditional): void {
+ this._visitBinaryBranch(ast, ast.trueExpression, ast.falseExpression, BranchType.ConditionalExpression);
+ }
+ public ModifierInvocation(ast: Parser.ModifierInvocation): void {
+ const BUILTIN_MODIFIERS = ['public', 'view', 'payable', 'external', 'internal', 'pure', 'constant'];
+ if (!_.includes(BUILTIN_MODIFIERS, ast.name)) {
+ if (this._shouldIgnoreExpression(ast)) {
+ return;
+ }
+ this._modifiersStatementIds.push(this._entryId);
+ this._visitStatement(ast);
+ }
+ }
+ private _visitBinaryBranch(
+ ast: Parser.ASTNode,
+ left: Parser.ASTNode,
+ right: Parser.ASTNode,
+ type: BranchType,
+ ): void {
+ if (this._shouldIgnoreExpression(ast)) {
+ return;
+ }
+ this._branchMap[this._entryId++] = {
+ line: this._getExpressionRange(ast).start.line,
+ type,
+ locations: [this._getExpressionRange(left), this._getExpressionRange(right)],
+ };
+ }
+ private _visitStatement(ast: Parser.ASTNode): void {
+ if (this._shouldIgnoreExpression(ast)) {
+ return;
+ }
+ this._statementMap[this._entryId++] = this._getExpressionRange(ast);
+ }
+ private _getExpressionRange(ast: Parser.ASTNode): SingleFileSourceRange {
+ const astRange = ast.range as [number, number];
+ const start = this._locationByOffset[astRange[0]];
+ const end = this._locationByOffset[astRange[1] + 1];
+ const range = {
+ start,
+ end,
+ };
+ return range;
+ }
+ private _shouldIgnoreExpression(ast: Parser.ASTNode): boolean {
+ const [astStart, astEnd] = ast.range as [number, number];
+ const isRangeIgnored = _.some(
+ this._ignoreRangesWithin,
+ ([rangeStart, rangeEnd]: [number, number]) => astStart >= rangeStart && astEnd <= rangeEnd,
+ );
+ return this._ignoreRangesBeginningAt.includes(astStart) || isRangeIgnored;
+ }
+ private _visitFunctionLikeDefinition(ast: Parser.ModifierDefinition | Parser.FunctionDefinition): void {
+ if (this._shouldIgnoreExpression(ast)) {
+ this._ignoreRangesWithin.push(ast.range as [number, number]);
+ return;
+ }
+ const loc = this._getExpressionRange(ast);
+ this._fnMap[this._entryId++] = {
+ name: ast.name,
+ line: loc.start.line,
+ loc,
+ };
+ this._visitStatement(ast);
+ }
+}
diff --git a/packages/sol-tracing-utils/src/collect_coverage_entries.ts b/packages/sol-tracing-utils/src/collect_coverage_entries.ts
new file mode 100644
index 000000000..bdbcd613e
--- /dev/null
+++ b/packages/sol-tracing-utils/src/collect_coverage_entries.ts
@@ -0,0 +1,41 @@
+import * as ethUtil from 'ethereumjs-util';
+import * as _ from 'lodash';
+import * as parser from 'solidity-parser-antlr';
+
+import { ASTVisitor, CoverageEntriesDescription } from './ast_visitor';
+import { getLocationByOffset } from './source_maps';
+
+const IGNORE_RE = /\/\*\s*solcov\s+ignore\s+next\s*\*\/\s*/gm;
+
+// Parsing source code for each transaction/code is slow and therefore we cache it
+const coverageEntriesBySourceHash: { [sourceHash: string]: CoverageEntriesDescription } = {};
+
+export const collectCoverageEntries = (contractSource: string) => {
+ const sourceHash = ethUtil.sha3(contractSource).toString('hex');
+ if (_.isUndefined(coverageEntriesBySourceHash[sourceHash]) && !_.isUndefined(contractSource)) {
+ const ast = parser.parse(contractSource, { range: true });
+ const locationByOffset = getLocationByOffset(contractSource);
+ const ignoreRangesBegingingAt = gatherRangesToIgnore(contractSource);
+ const visitor = new ASTVisitor(locationByOffset, ignoreRangesBegingingAt);
+ parser.visit(ast, visitor);
+ coverageEntriesBySourceHash[sourceHash] = visitor.getCollectedCoverageEntries();
+ }
+ const coverageEntriesDescription = coverageEntriesBySourceHash[sourceHash];
+ return coverageEntriesDescription;
+};
+
+// Gather the start index of all code blocks preceeded by "/* solcov ignore next */"
+function gatherRangesToIgnore(contractSource: string): number[] {
+ const ignoreRangesStart = [];
+
+ let match;
+ do {
+ match = IGNORE_RE.exec(contractSource);
+ if (match) {
+ const matchLen = match[0].length;
+ ignoreRangesStart.push(match.index + matchLen);
+ }
+ } while (match);
+
+ return ignoreRangesStart;
+}
diff --git a/packages/sol-tracing-utils/src/constants.ts b/packages/sol-tracing-utils/src/constants.ts
new file mode 100644
index 000000000..34d62b537
--- /dev/null
+++ b/packages/sol-tracing-utils/src/constants.ts
@@ -0,0 +1,8 @@
+// tslint:disable:number-literal-format
+export const constants = {
+ NEW_CONTRACT: 'NEW_CONTRACT' as 'NEW_CONTRACT',
+ PUSH1: 0x60,
+ PUSH2: 0x61,
+ PUSH32: 0x7f,
+ TIMESTAMP: 0x42,
+};
diff --git a/packages/sol-tracing-utils/src/get_source_range_snippet.ts b/packages/sol-tracing-utils/src/get_source_range_snippet.ts
new file mode 100644
index 000000000..f578675d3
--- /dev/null
+++ b/packages/sol-tracing-utils/src/get_source_range_snippet.ts
@@ -0,0 +1,185 @@
+import * as ethUtil from 'ethereumjs-util';
+import * as _ from 'lodash';
+import * as Parser from 'solidity-parser-antlr';
+
+import { SingleFileSourceRange, SourceRange, SourceSnippet } from './types';
+import { utils } from './utils';
+
+interface ASTInfo {
+ type: string;
+ node: Parser.ASTNode;
+ name: string | null;
+ range?: SingleFileSourceRange;
+}
+
+// Parsing source code for each transaction/code is slow and therefore we cache it
+const parsedSourceByHash: { [sourceHash: string]: Parser.ASTNode } = {};
+
+/**
+ * Gets the source range snippet by source range to be used by revert trace.
+ * @param sourceRange source range
+ * @param sourceCode source code
+ */
+export function getSourceRangeSnippet(sourceRange: SourceRange, sourceCode: string): SourceSnippet | null {
+ const sourceHash = ethUtil.sha3(sourceCode).toString('hex');
+ if (_.isUndefined(parsedSourceByHash[sourceHash])) {
+ parsedSourceByHash[sourceHash] = Parser.parse(sourceCode, { loc: true });
+ }
+ const astNode = parsedSourceByHash[sourceHash];
+ const visitor = new ASTInfoVisitor();
+ Parser.visit(astNode, visitor);
+ const astInfo = visitor.getASTInfoForRange(sourceRange);
+ if (astInfo === null) {
+ return null;
+ }
+ const sourceCodeInRange = utils.getRange(sourceCode, sourceRange.location);
+ return {
+ ...astInfo,
+ range: astInfo.range as SingleFileSourceRange,
+ source: sourceCodeInRange,
+ fileName: sourceRange.fileName,
+ };
+}
+
+// A visitor which collects ASTInfo for most nodes in the AST.
+class ASTInfoVisitor {
+ private readonly _astInfos: ASTInfo[] = [];
+ public getASTInfoForRange(sourceRange: SourceRange): ASTInfo | null {
+ // HACK(albrow): Sometimes the source range doesn't exactly match that
+ // of astInfo. To work around that we try with a +/-1 offset on
+ // end.column. If nothing matches even with the offset, we return null.
+ const offset = {
+ start: {
+ line: 0,
+ column: 0,
+ },
+ end: {
+ line: 0,
+ column: 0,
+ },
+ };
+ let astInfo = this._getASTInfoForRange(sourceRange, offset);
+ if (astInfo !== null) {
+ return astInfo;
+ }
+ offset.end.column += 1;
+ astInfo = this._getASTInfoForRange(sourceRange, offset);
+ if (astInfo !== null) {
+ return astInfo;
+ }
+ offset.end.column -= 2;
+ astInfo = this._getASTInfoForRange(sourceRange, offset);
+ if (astInfo !== null) {
+ return astInfo;
+ }
+ return null;
+ }
+ public ContractDefinition(ast: Parser.ContractDefinition): void {
+ this._visitContractDefinition(ast);
+ }
+ public IfStatement(ast: Parser.IfStatement): void {
+ this._visitStatement(ast);
+ }
+ public FunctionDefinition(ast: Parser.FunctionDefinition): void {
+ this._visitFunctionLikeDefinition(ast);
+ }
+ public ModifierDefinition(ast: Parser.ModifierDefinition): void {
+ this._visitFunctionLikeDefinition(ast);
+ }
+ public ForStatement(ast: Parser.ForStatement): void {
+ this._visitStatement(ast);
+ }
+ public ReturnStatement(ast: Parser.ReturnStatement): void {
+ this._visitStatement(ast);
+ }
+ public BreakStatement(ast: Parser.BreakStatement): void {
+ this._visitStatement(ast);
+ }
+ public ContinueStatement(ast: Parser.ContinueStatement): void {
+ this._visitStatement(ast);
+ }
+ public EmitStatement(ast: any /* TODO: Parser.EmitStatement */): void {
+ this._visitStatement(ast);
+ }
+ public VariableDeclarationStatement(ast: Parser.VariableDeclarationStatement): void {
+ this._visitStatement(ast);
+ }
+ public Statement(ast: Parser.Statement): void {
+ this._visitStatement(ast);
+ }
+ public WhileStatement(ast: Parser.WhileStatement): void {
+ this._visitStatement(ast);
+ }
+ public SimpleStatement(ast: Parser.SimpleStatement): void {
+ this._visitStatement(ast);
+ }
+ public ThrowStatement(ast: Parser.ThrowStatement): void {
+ this._visitStatement(ast);
+ }
+ public DoWhileStatement(ast: Parser.DoWhileStatement): void {
+ this._visitStatement(ast);
+ }
+ public ExpressionStatement(ast: Parser.ExpressionStatement): void {
+ this._visitStatement(ast.expression);
+ }
+ public InlineAssemblyStatement(ast: Parser.InlineAssemblyStatement): void {
+ this._visitStatement(ast);
+ }
+ public ModifierInvocation(ast: Parser.ModifierInvocation): void {
+ const BUILTIN_MODIFIERS = ['public', 'view', 'payable', 'external', 'internal', 'pure', 'constant'];
+ if (!_.includes(BUILTIN_MODIFIERS, ast.name)) {
+ this._visitStatement(ast);
+ }
+ }
+ private _visitStatement(ast: Parser.ASTNode): void {
+ this._astInfos.push({
+ type: ast.type,
+ node: ast,
+ name: null,
+ range: ast.loc,
+ });
+ }
+ private _visitFunctionLikeDefinition(ast: Parser.ModifierDefinition | Parser.FunctionDefinition): void {
+ this._astInfos.push({
+ type: ast.type,
+ node: ast,
+ name: ast.name,
+ range: ast.loc,
+ });
+ }
+ private _visitContractDefinition(ast: Parser.ContractDefinition): void {
+ this._astInfos.push({
+ type: ast.type,
+ node: ast,
+ name: ast.name,
+ range: ast.loc,
+ });
+ }
+ private _getASTInfoForRange(sourceRange: SourceRange, offset: SingleFileSourceRange): ASTInfo | null {
+ const offsetSourceRange = {
+ ...sourceRange,
+ location: {
+ start: {
+ line: sourceRange.location.start.line + offset.start.line,
+ column: sourceRange.location.start.column + offset.start.column,
+ },
+ end: {
+ line: sourceRange.location.end.line + offset.end.line,
+ column: sourceRange.location.end.column + offset.end.column,
+ },
+ },
+ };
+ for (const astInfo of this._astInfos) {
+ const astInfoRange = astInfo.range as SingleFileSourceRange;
+ if (
+ astInfoRange.start.column === offsetSourceRange.location.start.column &&
+ astInfoRange.start.line === offsetSourceRange.location.start.line &&
+ astInfoRange.end.column === offsetSourceRange.location.end.column &&
+ astInfoRange.end.line === offsetSourceRange.location.end.line
+ ) {
+ return astInfo;
+ }
+ }
+ return null;
+ }
+}
diff --git a/packages/sol-tracing-utils/src/globals.d.ts b/packages/sol-tracing-utils/src/globals.d.ts
new file mode 100644
index 000000000..e799b3529
--- /dev/null
+++ b/packages/sol-tracing-utils/src/globals.d.ts
@@ -0,0 +1,7 @@
+// tslint:disable:completed-docs
+declare module '*.json' {
+ const json: any;
+ /* tslint:disable */
+ export default json;
+ /* tslint:enable */
+}
diff --git a/packages/sol-tracing-utils/src/index.ts b/packages/sol-tracing-utils/src/index.ts
new file mode 100644
index 000000000..413e5305e
--- /dev/null
+++ b/packages/sol-tracing-utils/src/index.ts
@@ -0,0 +1,39 @@
+export { SolCompilerArtifactAdapter } from './artifact_adapters/sol_compiler_artifact_adapter';
+export { TruffleArtifactAdapter } from './artifact_adapters/truffle_artifact_adapter';
+export { AbstractArtifactAdapter } from './artifact_adapters/abstract_artifact_adapter';
+
+export {
+ ContractData,
+ EvmCallStack,
+ SourceRange,
+ SourceSnippet,
+ StatementCoverage,
+ StatementDescription,
+ BranchCoverage,
+ BranchDescription,
+ Subtrace,
+ TraceInfo,
+ Coverage,
+ LineColumn,
+ LineCoverage,
+ FunctionCoverage,
+ FunctionDescription,
+ SingleFileSourceRange,
+ BranchMap,
+ EvmCallStackEntry,
+ FnMap,
+ LocationByOffset,
+ StatementMap,
+ TraceInfoBase,
+ TraceInfoExistingContract,
+ TraceInfoNewContract,
+} from './types';
+export { collectCoverageEntries } from './collect_coverage_entries';
+export { TraceCollector, SingleFileSubtraceHandler } from './trace_collector';
+export { TraceInfoSubprovider } from './trace_info_subprovider';
+export { utils } from './utils';
+export { constants } from './constants';
+export { parseSourceMap } from './source_maps';
+export { getSourceRangeSnippet } from './get_source_range_snippet';
+export { getRevertTrace } from './revert_trace';
+export { TraceCollectionSubprovider } from './trace_collection_subprovider';
diff --git a/packages/sol-tracing-utils/src/instructions.ts b/packages/sol-tracing-utils/src/instructions.ts
new file mode 100644
index 000000000..40987dbe5
--- /dev/null
+++ b/packages/sol-tracing-utils/src/instructions.ts
@@ -0,0 +1,23 @@
+import { constants } from './constants';
+
+const isPush = (inst: number) => inst >= constants.PUSH1 && inst <= constants.PUSH32;
+
+const pushDataLength = (inst: number) => inst - constants.PUSH1 + 1;
+
+const instructionLength = (inst: number) => (isPush(inst) ? pushDataLength(inst) + 1 : 1);
+
+export const getPcToInstructionIndexMapping = (bytecode: Uint8Array) => {
+ const result: {
+ [programCounter: number]: number;
+ } = {};
+ let byteIndex = 0;
+ let instructionIndex = 0;
+ while (byteIndex < bytecode.length) {
+ const instruction = bytecode[byteIndex];
+ const length = instructionLength(instruction);
+ result[byteIndex] = instructionIndex;
+ byteIndex += length;
+ instructionIndex += 1;
+ }
+ return result;
+};
diff --git a/packages/sol-tracing-utils/src/revert_trace.ts b/packages/sol-tracing-utils/src/revert_trace.ts
new file mode 100644
index 000000000..4d474120c
--- /dev/null
+++ b/packages/sol-tracing-utils/src/revert_trace.ts
@@ -0,0 +1,95 @@
+import { logUtils } from '@0x/utils';
+import { OpCode, StructLog } from 'ethereum-types';
+
+import * as _ from 'lodash';
+
+import { EvmCallStack } from './types';
+import { utils } from './utils';
+
+/**
+ * Converts linear trace to a call stack by following calls and returns
+ * @param structLogs Linear trace
+ * @param startAddress The address of initial context
+ */
+export function getRevertTrace(structLogs: StructLog[], startAddress: string): EvmCallStack {
+ const evmCallStack: EvmCallStack = [];
+ const addressStack = [startAddress];
+ if (_.isEmpty(structLogs)) {
+ return [];
+ }
+ const normalizedStructLogs = utils.normalizeStructLogs(structLogs);
+ // tslint:disable-next-line:prefer-for-of
+ for (let i = 0; i < normalizedStructLogs.length; i++) {
+ const structLog = normalizedStructLogs[i];
+ if (structLog.depth !== addressStack.length - 1) {
+ throw new Error("Malformed trace. Trace depth doesn't match call stack depth");
+ }
+ // After that check we have a guarantee that call stack is never empty
+ // If it would: callStack.length - 1 === structLog.depth === -1
+ // That means that we can always safely pop from it
+
+ if (utils.isCallLike(structLog.op)) {
+ const currentAddress = _.last(addressStack) as string;
+ const jumpAddressOffset = 1;
+ const newAddress = utils.getAddressFromStackEntry(
+ structLog.stack[structLog.stack.length - jumpAddressOffset - 1],
+ );
+
+ // Sometimes calls don't change the execution context (current address). When we do a transfer to an
+ // externally owned account - it does the call and immediately returns because there is no fallback
+ // function. We manually check if the call depth had changed to handle that case.
+ const nextStructLog = normalizedStructLogs[i + 1];
+ if (nextStructLog.depth !== structLog.depth) {
+ addressStack.push(newAddress);
+ evmCallStack.push({
+ address: currentAddress,
+ structLog,
+ });
+ }
+ } else if (utils.isEndOpcode(structLog.op) && structLog.op !== OpCode.Revert) {
+ // Just like with calls, sometimes returns/stops don't change the execution context (current address).
+ const nextStructLog = normalizedStructLogs[i + 1];
+ if (_.isUndefined(nextStructLog) || nextStructLog.depth !== structLog.depth) {
+ evmCallStack.pop();
+ addressStack.pop();
+ }
+ if (structLog.op === OpCode.SelfDestruct) {
+ // After contract execution, we look at all sub-calls to external contracts, and for each one, fetch
+ // the bytecode and compute the coverage for the call. If the contract is destroyed with a call
+ // to `selfdestruct`, we are unable to fetch it's bytecode and compute coverage.
+ // TODO: Refactor this logic to fetch the sub-called contract bytecode before the selfdestruct is called
+ // in order to handle this edge-case.
+ logUtils.warn(
+ "Detected a selfdestruct. We currently do not support that scenario. We'll just skip the trace part for a destructed contract",
+ );
+ }
+ } else if (structLog.op === OpCode.Revert) {
+ evmCallStack.push({
+ address: _.last(addressStack) as string,
+ structLog,
+ });
+ return evmCallStack;
+ } else if (structLog.op === OpCode.Create) {
+ // TODO: Extract the new contract address from the stack and handle that scenario
+ logUtils.warn(
+ "Detected a contract created from within another contract. We currently do not support that scenario. We'll just skip that trace",
+ );
+ return [];
+ } else {
+ if (structLog !== _.last(normalizedStructLogs)) {
+ const nextStructLog = normalizedStructLogs[i + 1];
+ if (nextStructLog.depth === structLog.depth) {
+ continue;
+ } else if (nextStructLog.depth === structLog.depth - 1) {
+ addressStack.pop();
+ } else {
+ throw new Error('Malformed trace. Unexpected call depth change');
+ }
+ }
+ }
+ }
+ if (evmCallStack.length !== 0) {
+ logUtils.warn('Malformed trace. Call stack non empty at the end. (probably out of gas)');
+ }
+ return [];
+}
diff --git a/packages/sol-tracing-utils/src/source_maps.ts b/packages/sol-tracing-utils/src/source_maps.ts
new file mode 100644
index 000000000..af0fb4035
--- /dev/null
+++ b/packages/sol-tracing-utils/src/source_maps.ts
@@ -0,0 +1,91 @@
+import * as _ from 'lodash';
+
+import { getPcToInstructionIndexMapping } from './instructions';
+import { LocationByOffset, SourceRange } from './types';
+
+const RADIX = 10;
+
+export interface SourceLocation {
+ offset: number;
+ length: number;
+ fileIndex: number;
+}
+
+/**
+ * Receives a string with newlines and returns a map of byte offset to LineColumn
+ * @param str A string to process
+ */
+export function getLocationByOffset(str: string): LocationByOffset {
+ const locationByOffset: LocationByOffset = { 0: { line: 1, column: 0 } };
+ let currentOffset = 0;
+ for (const char of str.split('')) {
+ const location = locationByOffset[currentOffset];
+ const isNewline = char === '\n';
+ locationByOffset[currentOffset + 1] = {
+ line: location.line + (isNewline ? 1 : 0),
+ column: isNewline ? 0 : location.column + 1,
+ };
+ currentOffset++;
+ }
+ return locationByOffset;
+}
+
+/**
+ * Parses a sourcemap string.
+ * The solidity sourcemap format is documented here: https://github.com/ethereum/solidity/blob/develop/docs/miscellaneous.rst#source-mappings
+ * @param sourceCodes sources contents
+ * @param srcMap source map string
+ * @param bytecodeHex contract bytecode
+ * @param sources sources file names
+ */
+export function parseSourceMap(
+ sourceCodes: string[],
+ srcMap: string,
+ bytecodeHex: string,
+ sources: string[],
+): { [programCounter: number]: SourceRange } {
+ const bytecode = Uint8Array.from(Buffer.from(bytecodeHex, 'hex'));
+ const pcToInstructionIndex: { [programCounter: number]: number } = getPcToInstructionIndexMapping(bytecode);
+ const locationByOffsetByFileIndex = _.map(sourceCodes, s => (_.isUndefined(s) ? {} : getLocationByOffset(s)));
+ const entries = srcMap.split(';');
+ let lastParsedEntry: SourceLocation = {} as any;
+ const instructionIndexToSourceRange: { [instructionIndex: number]: SourceRange } = {};
+ _.each(entries, (entry: string, i: number) => {
+ // tslint:disable-next-line:no-unused-variable
+ const [instructionIndexStrIfExists, lengthStrIfExists, fileIndexStrIfExists, jumpTypeStrIfExists] = entry.split(
+ ':',
+ );
+ const instructionIndexIfExists = parseInt(instructionIndexStrIfExists, RADIX);
+ const lengthIfExists = parseInt(lengthStrIfExists, RADIX);
+ const fileIndexIfExists = parseInt(fileIndexStrIfExists, RADIX);
+ const offset = _.isNaN(instructionIndexIfExists) ? lastParsedEntry.offset : instructionIndexIfExists;
+ const length = _.isNaN(lengthIfExists) ? lastParsedEntry.length : lengthIfExists;
+ const fileIndex = _.isNaN(fileIndexIfExists) ? lastParsedEntry.fileIndex : fileIndexIfExists;
+ const parsedEntry = {
+ offset,
+ length,
+ fileIndex,
+ };
+ if (parsedEntry.fileIndex !== -1 && !_.isUndefined(locationByOffsetByFileIndex[parsedEntry.fileIndex])) {
+ const sourceRange = {
+ location: {
+ start: locationByOffsetByFileIndex[parsedEntry.fileIndex][parsedEntry.offset],
+ end: locationByOffsetByFileIndex[parsedEntry.fileIndex][parsedEntry.offset + parsedEntry.length],
+ },
+ fileName: sources[parsedEntry.fileIndex],
+ };
+ instructionIndexToSourceRange[i] = sourceRange;
+ } else {
+ // Some assembly code generated by Solidity can't be mapped back to a line of source code.
+ // Source: https://github.com/ethereum/solidity/issues/3629
+ }
+ lastParsedEntry = parsedEntry;
+ });
+ const pcsToSourceRange: { [programCounter: number]: SourceRange } = {};
+ for (const programCounterKey of _.keys(pcToInstructionIndex)) {
+ const pc = parseInt(programCounterKey, RADIX);
+ const instructionIndex: number = pcToInstructionIndex[pc];
+ pcsToSourceRange[pc] = instructionIndexToSourceRange[instructionIndex];
+ }
+ return pcsToSourceRange;
+}
diff --git a/packages/sol-tracing-utils/src/trace.ts b/packages/sol-tracing-utils/src/trace.ts
new file mode 100644
index 000000000..770080af3
--- /dev/null
+++ b/packages/sol-tracing-utils/src/trace.ts
@@ -0,0 +1,104 @@
+import { logUtils } from '@0x/utils';
+import { OpCode, StructLog } from 'ethereum-types';
+import * as _ from 'lodash';
+
+import { utils } from './utils';
+
+export interface TraceByContractAddress {
+ [contractAddress: string]: StructLog[];
+}
+
+/**
+ * Converts linear stack trace to `TraceByContractAddress`.
+ * @param structLogs stack trace
+ * @param startAddress initial context address
+ */
+export function getTracesByContractAddress(structLogs: StructLog[], startAddress: string): TraceByContractAddress {
+ const traceByContractAddress: TraceByContractAddress = {};
+ let currentTraceSegment = [];
+ const addressStack = [startAddress];
+ if (_.isEmpty(structLogs)) {
+ return traceByContractAddress;
+ }
+ const normalizedStructLogs = utils.normalizeStructLogs(structLogs);
+ // tslint:disable-next-line:prefer-for-of
+ for (let i = 0; i < normalizedStructLogs.length; i++) {
+ const structLog = normalizedStructLogs[i];
+ if (structLog.depth !== addressStack.length - 1) {
+ throw new Error("Malformed trace. Trace depth doesn't match call stack depth");
+ }
+ // After that check we have a guarantee that call stack is never empty
+ // If it would: callStack.length - 1 === structLog.depth === -1
+ // That means that we can always safely pop from it
+ currentTraceSegment.push(structLog);
+
+ if (utils.isCallLike(structLog.op)) {
+ const currentAddress = _.last(addressStack) as string;
+ const jumpAddressOffset = 1;
+ const newAddress = utils.getAddressFromStackEntry(
+ structLog.stack[structLog.stack.length - jumpAddressOffset - 1],
+ );
+
+ // Sometimes calls don't change the execution context (current address). When we do a transfer to an
+ // externally owned account - it does the call and immediately returns because there is no fallback
+ // function. We manually check if the call depth had changed to handle that case.
+ const nextStructLog = normalizedStructLogs[i + 1];
+ if (nextStructLog.depth !== structLog.depth) {
+ addressStack.push(newAddress);
+ traceByContractAddress[currentAddress] = (traceByContractAddress[currentAddress] || []).concat(
+ currentTraceSegment,
+ );
+ currentTraceSegment = [];
+ }
+ } else if (utils.isEndOpcode(structLog.op)) {
+ const currentAddress = addressStack.pop() as string;
+ traceByContractAddress[currentAddress] = (traceByContractAddress[currentAddress] || []).concat(
+ currentTraceSegment,
+ );
+ currentTraceSegment = [];
+ if (structLog.op === OpCode.SelfDestruct) {
+ // After contract execution, we look at all sub-calls to external contracts, and for each one, fetch
+ // the bytecode and compute the coverage for the call. If the contract is destroyed with a call
+ // to `selfdestruct`, we are unable to fetch it's bytecode and compute coverage.
+ // TODO: Refactor this logic to fetch the sub-called contract bytecode before the selfdestruct is called
+ // in order to handle this edge-case.
+ logUtils.warn(
+ "Detected a selfdestruct. We currently do not support that scenario. We'll just skip the trace part for a destructed contract",
+ );
+ }
+ } else if (structLog.op === OpCode.Create) {
+ // TODO: Extract the new contract address from the stack and handle that scenario
+ logUtils.warn(
+ "Detected a contract created from within another contract. We currently do not support that scenario. We'll just skip that trace",
+ );
+ return traceByContractAddress;
+ } else {
+ if (structLog !== _.last(normalizedStructLogs)) {
+ const nextStructLog = normalizedStructLogs[i + 1];
+ if (nextStructLog.depth === structLog.depth) {
+ continue;
+ } else if (nextStructLog.depth === structLog.depth - 1) {
+ const currentAddress = addressStack.pop() as string;
+ traceByContractAddress[currentAddress] = (traceByContractAddress[currentAddress] || []).concat(
+ currentTraceSegment,
+ );
+ currentTraceSegment = [];
+ } else {
+ throw new Error('Malformed trace. Unexpected call depth change');
+ }
+ }
+ }
+ }
+ if (addressStack.length !== 0) {
+ logUtils.warn('Malformed trace. Call stack non empty at the end');
+ }
+ if (currentTraceSegment.length !== 0) {
+ const currentAddress = addressStack.pop() as string;
+ traceByContractAddress[currentAddress] = (traceByContractAddress[currentAddress] || []).concat(
+ currentTraceSegment,
+ );
+ currentTraceSegment = [];
+ logUtils.warn('Malformed trace. Current trace segment non empty at the end');
+ }
+ return traceByContractAddress;
+}
diff --git a/packages/sol-tracing-utils/src/trace_collection_subprovider.ts b/packages/sol-tracing-utils/src/trace_collection_subprovider.ts
new file mode 100644
index 000000000..25e38768d
--- /dev/null
+++ b/packages/sol-tracing-utils/src/trace_collection_subprovider.ts
@@ -0,0 +1,188 @@
+import { BlockchainLifecycle } from '@0x/dev-utils';
+import { Callback, ErrorCallback, NextCallback, Subprovider } from '@0x/subproviders';
+import { CallDataRPC, marshaller, Web3Wrapper } from '@0x/web3-wrapper';
+import { JSONRPCRequestPayload, Provider, TxData } from 'ethereum-types';
+import * as _ from 'lodash';
+import { Lock } from 'semaphore-async-await';
+
+import { constants } from './constants';
+import { BlockParamLiteral } from './types';
+
+interface MaybeFakeTxData extends TxData {
+ isFakeTransaction?: boolean;
+}
+
+const BLOCK_GAS_LIMIT = 6000000;
+
+export interface TraceCollectionSubproviderConfig {
+ shouldCollectTransactionTraces: boolean;
+ shouldCollectCallTraces: boolean;
+ shouldCollectGasEstimateTraces: boolean;
+}
+
+// Because there is no notion of a call trace in the Ethereum rpc - we collect them in a rather non-obvious/hacky way.
+// On each call - we create a snapshot, execute the call as a transaction, get the trace, revert the snapshot.
+// That allows us to avoid influencing test behaviour.
+
+/**
+ * This class implements the [web3-provider-engine](https://github.com/MetaMask/provider-engine) subprovider interface.
+ * It collects traces of all transactions that were sent and all calls that were executed through JSON RPC. It must
+ * be extended by implementing the _recordTxTraceAsync method which is called for every transaction.
+ */
+export abstract class TraceCollectionSubprovider extends Subprovider {
+ protected _web3Wrapper!: Web3Wrapper;
+ // Lock is used to not accept normal transactions while doing call/snapshot magic because they'll be reverted later otherwise
+ private readonly _lock = new Lock();
+ private readonly _defaultFromAddress: string;
+ private _isEnabled = true;
+ private readonly _config: TraceCollectionSubproviderConfig;
+ /**
+ * Instantiates a TraceCollectionSubprovider instance
+ * @param defaultFromAddress default from address to use when sending transactions
+ */
+ constructor(defaultFromAddress: string, config: TraceCollectionSubproviderConfig) {
+ super();
+ this._defaultFromAddress = defaultFromAddress;
+ this._config = config;
+ }
+ /**
+ * Starts trace collection
+ */
+ public start(): void {
+ this._isEnabled = true;
+ }
+ /**
+ * Stops trace collection
+ */
+ public stop(): void {
+ this._isEnabled = false;
+ }
+ /**
+ * This method conforms to the web3-provider-engine interface.
+ * It is called internally by the ProviderEngine when it is this subproviders
+ * turn to handle a JSON RPC request.
+ * @param payload JSON RPC payload
+ * @param next Callback to call if this subprovider decides not to handle the request
+ * @param _end Callback to call if subprovider handled the request and wants to pass back the request.
+ */
+ // tslint:disable-next-line:prefer-function-over-method async-suffix
+ public async handleRequest(payload: JSONRPCRequestPayload, next: NextCallback, _end: ErrorCallback): Promise<void> {
+ if (this._isEnabled) {
+ switch (payload.method) {
+ case 'eth_sendTransaction':
+ if (!this._config.shouldCollectTransactionTraces) {
+ next();
+ } else {
+ const txData = payload.params[0];
+ next(this._onTransactionSentAsync.bind(this, txData));
+ }
+ return;
+
+ case 'eth_call':
+ if (!this._config.shouldCollectCallTraces) {
+ next();
+ } else {
+ const callData = payload.params[0];
+ next(this._onCallOrGasEstimateExecutedAsync.bind(this, callData));
+ }
+ return;
+
+ case 'eth_estimateGas':
+ if (!this._config.shouldCollectGasEstimateTraces) {
+ next();
+ } else {
+ const estimateGasData = payload.params[0];
+ next(this._onCallOrGasEstimateExecutedAsync.bind(this, estimateGasData));
+ }
+ return;
+
+ default:
+ next();
+ return;
+ }
+ } else {
+ next();
+ return;
+ }
+ }
+ /**
+ * Set's the subprovider's engine to the ProviderEngine it is added to.
+ * This is only called within the ProviderEngine source code, do not call
+ * directly.
+ * @param engine The ProviderEngine this subprovider is added to
+ */
+ public setEngine(engine: Provider): void {
+ super.setEngine(engine);
+ this._web3Wrapper = new Web3Wrapper(engine);
+ }
+ protected abstract async _recordTxTraceAsync(
+ address: string,
+ data: string | undefined,
+ txHash: string,
+ ): Promise<void>;
+ private async _onTransactionSentAsync(
+ txData: MaybeFakeTxData,
+ err: Error | null,
+ txHash: string | undefined,
+ cb: Callback,
+ ): Promise<void> {
+ if (!txData.isFakeTransaction) {
+ // This transaction is a usual transaction. Not a call executed as one.
+ // And we don't want it to be executed within a snapshotting period
+ await this._lock.acquire();
+ }
+ const NULL_ADDRESS = '0x0';
+ if (_.isNull(err)) {
+ const toAddress =
+ _.isUndefined(txData.to) || txData.to === NULL_ADDRESS ? constants.NEW_CONTRACT : txData.to;
+ await this._recordTxTraceAsync(toAddress, txData.data, txHash as string);
+ } else {
+ const latestBlock = await this._web3Wrapper.getBlockWithTransactionDataAsync(BlockParamLiteral.Latest);
+ const transactions = latestBlock.transactions;
+ for (const transaction of transactions) {
+ const toAddress =
+ _.isUndefined(txData.to) || txData.to === NULL_ADDRESS ? constants.NEW_CONTRACT : txData.to;
+ await this._recordTxTraceAsync(toAddress, transaction.input, transaction.hash);
+ }
+ }
+ if (!txData.isFakeTransaction) {
+ // This transaction is a usual transaction. Not a call executed as one.
+ // And we don't want it to be executed within a snapshotting period
+ this._lock.release();
+ }
+ cb();
+ }
+ private async _onCallOrGasEstimateExecutedAsync(
+ callData: Partial<CallDataRPC>,
+ _err: Error | null,
+ _callResult: string,
+ cb: Callback,
+ ): Promise<void> {
+ await this._recordCallOrGasEstimateTraceAsync(callData);
+ cb();
+ }
+ private async _recordCallOrGasEstimateTraceAsync(callData: Partial<CallDataRPC>): Promise<void> {
+ // We don't want other transactions to be exeucted during snashotting period, that's why we lock the
+ // transaction execution for all transactions except our fake ones.
+ await this._lock.acquire();
+ const blockchainLifecycle = new BlockchainLifecycle(this._web3Wrapper);
+ await blockchainLifecycle.startAsync();
+ const fakeTxData = {
+ gas: BLOCK_GAS_LIMIT.toString(16), // tslint:disable-line:custom-no-magic-numbers
+ isFakeTransaction: true, // This transaction (and only it) is allowed to come through when the lock is locked
+ ...callData,
+ from: callData.from || this._defaultFromAddress,
+ };
+ try {
+ const txData = marshaller.unmarshalTxData(fakeTxData);
+ const txHash = await this._web3Wrapper.sendTransactionAsync(txData);
+ await this._web3Wrapper.awaitTransactionMinedAsync(txHash, 0);
+ } catch (err) {
+ // TODO(logvinov) Check that transaction failed and not some other exception
+ // Even if this transaction failed - we've already recorded it's trace.
+ _.noop();
+ }
+ await blockchainLifecycle.revertAsync();
+ this._lock.release();
+ }
+}
diff --git a/packages/sol-tracing-utils/src/trace_collector.ts b/packages/sol-tracing-utils/src/trace_collector.ts
new file mode 100644
index 000000000..943e208cf
--- /dev/null
+++ b/packages/sol-tracing-utils/src/trace_collector.ts
@@ -0,0 +1,93 @@
+import { promisify } from '@0x/utils';
+import { stripHexPrefix } from 'ethereumjs-util';
+import * as fs from 'fs';
+import { Collector } from 'istanbul';
+import * as _ from 'lodash';
+import { getLogger, levels, Logger } from 'loglevel';
+import * as mkdirp from 'mkdirp';
+
+import { AbstractArtifactAdapter } from './artifact_adapters/abstract_artifact_adapter';
+import { constants } from './constants';
+import { parseSourceMap } from './source_maps';
+import {
+ ContractData,
+ Coverage,
+ SourceRange,
+ Subtrace,
+ TraceInfo,
+ TraceInfoExistingContract,
+ TraceInfoNewContract,
+} from './types';
+import { utils } from './utils';
+
+const mkdirpAsync = promisify<undefined>(mkdirp);
+
+export type SingleFileSubtraceHandler = (
+ contractData: ContractData,
+ subtrace: Subtrace,
+ pcToSourceRange: { [programCounter: number]: SourceRange },
+ fileIndex: number,
+) => Coverage;
+
+/**
+ * TraceCollector is used by CoverageSubprovider to compute code coverage based on collected trace data.
+ */
+export class TraceCollector {
+ private readonly _artifactAdapter: AbstractArtifactAdapter;
+ private readonly _logger: Logger;
+ private _contractsData!: ContractData[];
+ private readonly _collector = new Collector();
+ private readonly _singleFileSubtraceHandler: SingleFileSubtraceHandler;
+
+ /**
+ * Instantiates a TraceCollector instance
+ * @param artifactAdapter Adapter for used artifacts format (0x, truffle, giveth, etc.)
+ * @param isVerbose If true, we will log any unknown transactions. Otherwise we will ignore them
+ * @param singleFileSubtraceHandler A handler function for computing partial coverage for a single file & subtrace
+ */
+ constructor(
+ artifactAdapter: AbstractArtifactAdapter,
+ isVerbose: boolean,
+ singleFileSubtraceHandler: SingleFileSubtraceHandler,
+ ) {
+ this._artifactAdapter = artifactAdapter;
+ this._logger = getLogger('sol-tracing-utils');
+ this._logger.setLevel(isVerbose ? levels.TRACE : levels.ERROR);
+ this._singleFileSubtraceHandler = singleFileSubtraceHandler;
+ }
+ public async writeOutputAsync(): Promise<void> {
+ const finalCoverage = this._collector.getFinalCoverage();
+ const stringifiedCoverage = JSON.stringify(finalCoverage, null, '\t');
+ await mkdirpAsync('coverage');
+ fs.writeFileSync('coverage/coverage.json', stringifiedCoverage);
+ }
+ public async computeSingleTraceCoverageAsync(traceInfo: TraceInfo): Promise<void> {
+ if (_.isUndefined(this._contractsData)) {
+ this._contractsData = await this._artifactAdapter.collectContractsDataAsync();
+ }
+ const isContractCreation = traceInfo.address === constants.NEW_CONTRACT;
+ const bytecode = isContractCreation
+ ? (traceInfo as TraceInfoNewContract).bytecode
+ : (traceInfo as TraceInfoExistingContract).runtimeBytecode;
+ const contractData = utils.getContractDataIfExists(this._contractsData, bytecode);
+ if (_.isUndefined(contractData)) {
+ const errMsg = isContractCreation
+ ? `Unknown contract creation transaction`
+ : `Transaction to an unknown address: ${traceInfo.address}`;
+ this._logger.warn(errMsg);
+ return;
+ }
+ const bytecodeHex = stripHexPrefix(bytecode);
+ const sourceMap = isContractCreation ? contractData.sourceMap : contractData.sourceMapRuntime;
+ const pcToSourceRange = parseSourceMap(contractData.sourceCodes, sourceMap, bytecodeHex, contractData.sources);
+ for (let fileIndex = 0; fileIndex < contractData.sources.length; fileIndex++) {
+ const singleFileCoverageForTrace = this._singleFileSubtraceHandler(
+ contractData,
+ traceInfo.subtrace,
+ pcToSourceRange,
+ fileIndex,
+ );
+ this._collector.add(singleFileCoverageForTrace);
+ }
+ }
+}
diff --git a/packages/sol-tracing-utils/src/trace_info_subprovider.ts b/packages/sol-tracing-utils/src/trace_info_subprovider.ts
new file mode 100644
index 000000000..635a68f58
--- /dev/null
+++ b/packages/sol-tracing-utils/src/trace_info_subprovider.ts
@@ -0,0 +1,59 @@
+import * as _ from 'lodash';
+
+import { constants } from './constants';
+import { getTracesByContractAddress } from './trace';
+import { TraceCollectionSubprovider } from './trace_collection_subprovider';
+import { TraceInfo, TraceInfoExistingContract, TraceInfoNewContract } from './types';
+
+// TraceInfoSubprovider is extended by subproviders which need to work with one
+// TraceInfo at a time. It has one abstract method: _handleTraceInfoAsync, which
+// is called for each TraceInfo.
+export abstract class TraceInfoSubprovider extends TraceCollectionSubprovider {
+ protected abstract _handleTraceInfoAsync(traceInfo: TraceInfo): Promise<void>;
+ protected async _recordTxTraceAsync(address: string, data: string | undefined, txHash: string): Promise<void> {
+ await this._web3Wrapper.awaitTransactionMinedAsync(txHash, 0);
+ const trace = await this._web3Wrapper.getTransactionTraceAsync(txHash, {
+ disableMemory: true,
+ disableStack: false,
+ disableStorage: true,
+ });
+ const tracesByContractAddress = getTracesByContractAddress(trace.structLogs, address);
+ const subcallAddresses = _.keys(tracesByContractAddress);
+ if (address === constants.NEW_CONTRACT) {
+ for (const subcallAddress of subcallAddresses) {
+ let traceInfo: TraceInfoNewContract | TraceInfoExistingContract;
+ if (subcallAddress === 'NEW_CONTRACT') {
+ const traceForThatSubcall = tracesByContractAddress[subcallAddress];
+ traceInfo = {
+ subtrace: traceForThatSubcall,
+ txHash,
+ address: subcallAddress,
+ bytecode: data as string,
+ };
+ } else {
+ const runtimeBytecode = await this._web3Wrapper.getContractCodeAsync(subcallAddress);
+ const traceForThatSubcall = tracesByContractAddress[subcallAddress];
+ traceInfo = {
+ subtrace: traceForThatSubcall,
+ txHash,
+ address: subcallAddress,
+ runtimeBytecode,
+ };
+ }
+ await this._handleTraceInfoAsync(traceInfo);
+ }
+ } else {
+ for (const subcallAddress of subcallAddresses) {
+ const runtimeBytecode = await this._web3Wrapper.getContractCodeAsync(subcallAddress);
+ const traceForThatSubcall = tracesByContractAddress[subcallAddress];
+ const traceInfo: TraceInfoExistingContract = {
+ subtrace: traceForThatSubcall,
+ txHash,
+ address: subcallAddress,
+ runtimeBytecode,
+ };
+ await this._handleTraceInfoAsync(traceInfo);
+ }
+ }
+ }
+}
diff --git a/packages/sol-tracing-utils/src/types.ts b/packages/sol-tracing-utils/src/types.ts
new file mode 100644
index 000000000..54ade0400
--- /dev/null
+++ b/packages/sol-tracing-utils/src/types.ts
@@ -0,0 +1,126 @@
+import { StructLog } from 'ethereum-types';
+import * as Parser from 'solidity-parser-antlr';
+
+export interface LineColumn {
+ line: number;
+ column: number;
+}
+
+export interface SourceRange {
+ location: SingleFileSourceRange;
+ fileName: string;
+}
+
+export interface SingleFileSourceRange {
+ start: LineColumn;
+ end: LineColumn;
+}
+
+export interface LocationByOffset {
+ [offset: number]: LineColumn;
+}
+
+export interface FunctionDescription {
+ name: string;
+ line: number;
+ loc: SingleFileSourceRange;
+ skip?: boolean;
+}
+
+export type StatementDescription = SingleFileSourceRange;
+
+export interface BranchDescription {
+ line: number;
+ type: 'if' | 'switch' | 'cond-expr' | 'binary-expr';
+ locations: SingleFileSourceRange[];
+}
+
+export interface FnMap {
+ [functionId: string]: FunctionDescription;
+}
+
+export interface BranchMap {
+ [branchId: string]: BranchDescription;
+}
+
+export interface StatementMap {
+ [statementId: string]: StatementDescription;
+}
+
+export interface LineCoverage {
+ [lineNo: number]: number;
+}
+
+export interface FunctionCoverage {
+ [functionId: string]: number;
+}
+
+export interface StatementCoverage {
+ [statementId: string]: number;
+}
+
+export interface BranchCoverage {
+ [branchId: string]: number[];
+}
+
+export interface Coverage {
+ [fineName: string]: {
+ l?: LineCoverage;
+ f: FunctionCoverage;
+ s: StatementCoverage;
+ b: BranchCoverage;
+ fnMap: FnMap;
+ branchMap: BranchMap;
+ statementMap: StatementMap;
+ path: string;
+ };
+}
+
+export interface ContractData {
+ bytecode: string;
+ sourceMap: string;
+ runtimeBytecode: string;
+ sourceMapRuntime: string;
+ sourceCodes: string[];
+ sources: string[];
+}
+
+// Part of the trace executed within the same context
+export type Subtrace = StructLog[];
+
+export interface TraceInfoBase {
+ subtrace: Subtrace;
+ txHash: string;
+}
+
+export interface TraceInfoNewContract extends TraceInfoBase {
+ address: 'NEW_CONTRACT';
+ bytecode: string;
+}
+
+export interface TraceInfoExistingContract extends TraceInfoBase {
+ address: string;
+ runtimeBytecode: string;
+}
+
+export type TraceInfo = TraceInfoNewContract | TraceInfoExistingContract;
+
+export enum BlockParamLiteral {
+ Latest = 'latest',
+}
+
+export interface EvmCallStackEntry {
+ structLog: StructLog;
+ address: string;
+}
+
+export type EvmCallStack = EvmCallStackEntry[];
+
+export interface SourceSnippet {
+ source: string;
+ fileName: string;
+ type: string;
+ node: Parser.ASTNode;
+ name: string | null;
+ range: SingleFileSourceRange;
+}
diff --git a/packages/sol-tracing-utils/src/utils.ts b/packages/sol-tracing-utils/src/utils.ts
new file mode 100644
index 000000000..d8bc65e73
--- /dev/null
+++ b/packages/sol-tracing-utils/src/utils.ts
@@ -0,0 +1,87 @@
+import { addressUtils, BigNumber } from '@0x/utils';
+import { OpCode, StructLog } from 'ethereum-types';
+import { addHexPrefix } from 'ethereumjs-util';
+import * as _ from 'lodash';
+
+import { ContractData, LineColumn, SingleFileSourceRange } from './types';
+
+// This is the minimum length of valid contract bytecode. The Solidity compiler
+// metadata is 86 bytes. If you add the '0x' prefix, we get 88.
+const MIN_CONTRACT_BYTECODE_LENGTH = 88;
+
+export const utils = {
+ compareLineColumn(lhs: LineColumn, rhs: LineColumn): number {
+ return lhs.line !== rhs.line ? lhs.line - rhs.line : lhs.column - rhs.column;
+ },
+ removeHexPrefix(hex: string): string {
+ const hexPrefix = '0x';
+ return hex.startsWith(hexPrefix) ? hex.slice(hexPrefix.length) : hex;
+ },
+ isRangeInside(childRange: SingleFileSourceRange, parentRange: SingleFileSourceRange): boolean {
+ return (
+ utils.compareLineColumn(parentRange.start, childRange.start) <= 0 &&
+ utils.compareLineColumn(childRange.end, parentRange.end) <= 0
+ );
+ },
+ bytecodeToBytecodeRegex(bytecode: string): string {
+ const bytecodeRegex = bytecode
+ // Library linking placeholder: __ConvertLib____________________________
+ .replace(/_.*_/, '.*')
+ // Last 86 characters is solidity compiler metadata that's different between compilations
+ .replace(/.{86}$/, '')
+ // Libraries contain their own address at the beginning of the code and it's impossible to know it in advance
+ .replace(/^0x730000000000000000000000000000000000000000/, '0x73........................................');
+ // HACK: Node regexes can't be longer that 32767 characters. Contracts bytecode can. We just truncate the regexes. It's safe in practice.
+ const MAX_REGEX_LENGTH = 32767;
+ const truncatedBytecodeRegex = bytecodeRegex.slice(0, MAX_REGEX_LENGTH);
+ return truncatedBytecodeRegex;
+ },
+ getContractDataIfExists(contractsData: ContractData[], bytecode: string): ContractData | undefined {
+ if (!bytecode.startsWith('0x')) {
+ throw new Error(`0x hex prefix missing: ${bytecode}`);
+ }
+ const contractData = _.find(contractsData, contractDataCandidate => {
+ const bytecodeRegex = utils.bytecodeToBytecodeRegex(contractDataCandidate.bytecode);
+ // If the bytecode is less than the minimum length, we are probably
+ // dealing with an interface. This isn't what we're looking for.
+ if (bytecodeRegex.length < MIN_CONTRACT_BYTECODE_LENGTH) {
+ return false;
+ }
+ const runtimeBytecodeRegex = utils.bytecodeToBytecodeRegex(contractDataCandidate.runtimeBytecode);
+ if (runtimeBytecodeRegex.length < MIN_CONTRACT_BYTECODE_LENGTH) {
+ return false;
+ }
+ // We use that function to find by bytecode or runtimeBytecode. Those are quasi-random strings so
+ // collisions are practically impossible and it allows us to reuse that code
+ return !_.isNull(bytecode.match(bytecodeRegex)) || !_.isNull(bytecode.match(runtimeBytecodeRegex));
+ });
+ return contractData;
+ },
+ isCallLike(op: OpCode): boolean {
+ return _.includes([OpCode.CallCode, OpCode.StaticCall, OpCode.Call, OpCode.DelegateCall], op);
+ },
+ isEndOpcode(op: OpCode): boolean {
+ return _.includes([OpCode.Return, OpCode.Stop, OpCode.Revert, OpCode.Invalid, OpCode.SelfDestruct], op);
+ },
+ getAddressFromStackEntry(stackEntry: string): string {
+ const hexBase = 16;
+ return addressUtils.padZeros(new BigNumber(addHexPrefix(stackEntry)).toString(hexBase));
+ },
+ normalizeStructLogs(structLogs: StructLog[]): StructLog[] {
+ if (structLogs[0].depth === 1) {
+ // Geth uses 1-indexed depth counter whilst ganache starts from 0
+ const newStructLogs = _.map(structLogs, structLog => ({
+ ...structLog,
+ depth: structLog.depth - 1,
+ }));
+ return newStructLogs;
+ }
+ return structLogs;
+ },
+ getRange(sourceCode: string, range: SingleFileSourceRange): string {
+ const lines = sourceCode.split('\n').slice(range.start.line - 1, range.end.line);
+ lines[lines.length - 1] = lines[lines.length - 1].slice(0, range.end.column);
+ lines[0] = lines[0].slice(range.start.column);
+ return lines.join('\n');
+ },
+};