diff options
author | Leonid Logvinov <logvinov.leon@gmail.com> | 2019-01-10 21:35:13 +0800 |
---|---|---|
committer | GitHub <noreply@github.com> | 2019-01-10 21:35:13 +0800 |
commit | 6c22594882c94146519ec6e3b24d558127bd092c (patch) | |
tree | 522c5ee89e4af0948b8e645237678f46ec8d3d5c /packages/sol-tracing-utils | |
parent | 686f27a96f0cd749f6315d7edd2bb56cf1819245 (diff) | |
parent | b8e3829fdbd1f516686618562172cb45fbb63bde (diff) | |
download | dexon-sol-tools-6c22594882c94146519ec6e3b24d558127bd092c.tar dexon-sol-tools-6c22594882c94146519ec6e3b24d558127bd092c.tar.gz dexon-sol-tools-6c22594882c94146519ec6e3b24d558127bd092c.tar.bz2 dexon-sol-tools-6c22594882c94146519ec6e3b24d558127bd092c.tar.lz dexon-sol-tools-6c22594882c94146519ec6e3b24d558127bd092c.tar.xz dexon-sol-tools-6c22594882c94146519ec6e3b24d558127bd092c.tar.zst dexon-sol-tools-6c22594882c94146519ec6e3b24d558127bd092c.zip |
Merge pull request #1492 from 0xProject/feature/sol-cov-sol-profiler-sol-trace-divorce
Refactor out sol-cov, sol-profiler and sol-trace into their separate packages
Diffstat (limited to 'packages/sol-tracing-utils')
37 files changed, 3050 insertions, 0 deletions
diff --git a/packages/sol-tracing-utils/.npmignore b/packages/sol-tracing-utils/.npmignore new file mode 100644 index 000000000..037786e46 --- /dev/null +++ b/packages/sol-tracing-utils/.npmignore @@ -0,0 +1,6 @@ +.* +yarn-error.log +/src/ +/scripts/ +tsconfig.json +/lib/src/monorepo_scripts/ diff --git a/packages/sol-tracing-utils/CHANGELOG.json b/packages/sol-tracing-utils/CHANGELOG.json new file mode 100644 index 000000000..c39753ea1 --- /dev/null +++ b/packages/sol-tracing-utils/CHANGELOG.json @@ -0,0 +1,407 @@ +[ + { + "version": "3.0.0", + "changes": [ + { + "note": "Move out specific tools and leave just the shared parts of the codebase", + "pr": 1492 + } + ] + }, + { + "timestamp": 1547040760, + "version": "2.1.17", + "changes": [ + { + "note": "Dependencies updated" + } + ] + }, + { + "version": "2.1.16", + "changes": [ + { + "note": "Dependencies updated" + } + ], + "timestamp": 1544739608 + }, + { + "version": "2.1.15", + "changes": [ + { + "note": "Dependencies updated" + } + ], + "timestamp": 1544570656 + }, + { + "timestamp": 1543401373, + "version": "2.1.14", + "changes": [ + { + "note": "Dependencies updated" + } + ] + }, + { + "timestamp": 1542821676, + "version": "2.1.13", + "changes": [ + { + "note": "Dependencies updated" + } + ] + }, + { + "timestamp": 1542208198, + "version": "2.1.12", + "changes": [ + { + "note": "Dependencies updated" + } + ] + }, + { + "timestamp": 1542134075, + "version": "2.1.11", + "changes": [ + { + "note": "Dependencies updated" + } + ] + }, + { + "timestamp": 1542028948, + "version": "2.1.10", + "changes": [ + { + "note": "Dependencies updated" + } + ] + }, + { + "version": "2.1.9", + "changes": [ + { + "note": "Dependencies updated" + } + ], + "timestamp": 1541740904 + }, + { + "version": "2.1.8", + "changes": [ + { + "note": "Make @types/solidity-parser-antlr a 'dependency' so it's available to users of the library", + "pr": 1105 + } + ], + "timestamp": 1539871071 + }, + { + "version": "2.1.7", + "changes": [ + { + "note": "Dependencies updated" + } + ], + "timestamp": 1538693146 + }, + { + "timestamp": 1538157789, + "version": "2.1.6", + "changes": [ + { + "note": "Dependencies updated" + } + ] + }, + { + "timestamp": 1537907159, + "version": "2.1.5", + "changes": [ + { + "note": "Dependencies updated" + } + ] + }, + { + "timestamp": 1537875740, + "version": "2.1.4", + "changes": [ + { + "note": "Dependencies updated" + } + ] + }, + { + "timestamp": 1537541580, + "version": "2.1.3", + "changes": [ + { + "note": "Dependencies updated" + } + ] + }, + { + "timestamp": 1536142250, + "version": "2.1.2", + "changes": [ + { + "note": "Dependencies updated" + } + ] + }, + { + "timestamp": 1535377027, + "version": "2.1.1", + "changes": [ + { + "note": "Dependencies updated" + } + ] + }, + { + "version": "2.1.0", + "changes": [ + { + "note": + "Export types: `JSONRPCRequestPayload`, `Provider`, `JSONRPCErrorCallback`, `JSONRPCResponsePayload`, `JSONRPCRequestPayloadWithMethod`, `NextCallback`, `ErrorCallback`, `OnNextCompleted` and `Callback`", + "pr": 924 + } + ], + "timestamp": 1535133899 + }, + { + "version": "2.0.0", + "changes": [ + { + "note": + "Fix a bug when eth_call coverage was not computed because of silent schema validation failures", + "pr": 938 + }, + { + "note": "Make `TruffleArtifactAdapter` read the `truffle.js` config for `solc` settings", + "pr": 938 + }, + { + "note": + "Change the first param of `TruffleArtifactAdapter` to be the `projectRoot` instead of `sourcesDir`", + "pr": 938 + }, + { + "note": + "Throw a helpful error message if truffle artifacts were generated with a different solc version than the one passed in", + "pr": 938 + } + ], + "timestamp": 1534210131 + }, + { + "version": "1.0.3", + "changes": [ + { + "note": "Dependencies updated" + } + ], + "timestamp": 1532619515 + }, + { + "timestamp": 1532605697, + "version": "1.0.2", + "changes": [ + { + "note": "Dependencies updated" + } + ] + }, + { + "timestamp": 1532357734, + "version": "1.0.1", + "changes": [ + { + "note": "Dependencies updated" + } + ] + }, + { + "timestamp": 1532043000, + "version": "1.0.0", + "changes": [ + { + "note": + "Add artifact adapter as a parameter for `CoverageSubprovider`. Export `AbstractArtifactAdapter`", + "pr": 589 + }, + { + "note": "Implement `SolCompilerArtifactAdapter` and `TruffleArtifactAdapter`", + "pr": 589 + }, + { + "note": "Properly parse multi-level traces", + "pr": 589 + }, + { + "note": "Add support for solidity libraries", + "pr": 589 + }, + { + "note": "Fixed a bug causing `RegExp` to crash if contract code is longer that 32767 characters", + "pr": 675 + }, + { + "note": "Fixed a bug caused by Geth debug trace depth being 1indexed", + "pr": 675 + }, + { + "note": "Fixed a bug when the tool crashed on empty traces", + "pr": 675 + }, + { + "note": "Use `BlockchainLifecycle` to support reverts on Geth", + "pr": 675 + }, + { + "note": "Add `ProfilerSubprovider` as a hacky way to profile code using coverage tools", + "pr": 675 + }, + { + "note": "Collect traces from `estimate_gas` calls", + "pr": 675 + }, + { + "note": "Fix a race condition caused by not awaiting the transaction before getting a trace", + "pr": 675 + }, + { + "note": "Add `start`/`stop` functionality to `CoverageSubprovider` and `ProfilerSubprovider`", + "pr": 675 + }, + { + "note": "Skip interface artifacts with a warning instead of failing", + "pr": 675 + }, + { + "note": "Fix `solcVersion` regex in parameter validation", + "pr": 690 + }, + { + "note": + "Fix a bug when in `TruffleArtifactsAdapter` causing it to throw if `compiler.json` is not there", + "pr": 690 + }, + { + "note": "HUGE perf improvements", + "pr": 690 + }, + { + "note": "Create `RevertTraceSubprovider` which prints a stack trace when a `REVERT` is detected", + "pr": 705 + }, + { + "note": "Add source code snippets to stack traces printed by `RevertTraceSubprovider`", + "pr": 725 + } + ] + }, + { + "timestamp": 1531919263, + "version": "0.1.3", + "changes": [ + { + "note": "Dependencies updated" + } + ] + }, + { + "timestamp": 1531149657, + "version": "0.1.2", + "changes": [ + { + "note": "Dependencies updated" + } + ] + }, + { + "timestamp": 1529397769, + "version": "0.1.1", + "changes": [ + { + "note": "Dependencies updated" + } + ] + }, + { + "version": "0.1.0", + "changes": [ + { + "note": "Incorrect publish that was unpublished" + } + ], + "timestamp": 1527810075 + }, + { + "timestamp": 1527009134, + "version": "0.0.11", + "changes": [ + { + "note": "Dependencies updated" + } + ] + }, + { + "timestamp": 1525477860, + "version": "0.0.10", + "changes": [ + { + "note": "Dependencies updated" + } + ] + }, + { + "timestamp": 1525428773, + "version": "0.0.9", + "changes": [ + { + "note": "Dependencies updated" + } + ] + }, + { + "timestamp": 1524044013, + "version": "0.0.8", + "changes": [ + { + "note": "Dependencies updated" + } + ] + }, + { + "timestamp": 1523462196, + "version": "0.0.7", + "changes": [ + { + "note": "Dependencies updated" + } + ] + }, + { + "timestamp": 1522673609, + "version": "0.0.6", + "changes": [ + { + "note": "Dependencies updated" + } + ] + }, + { + "timestamp": 1522658513, + "version": "0.0.5", + "changes": [ + { + "note": "Dependencies updated" + } + ] + } +] diff --git a/packages/sol-tracing-utils/CHANGELOG.md b/packages/sol-tracing-utils/CHANGELOG.md new file mode 100644 index 000000000..c2bc3cd01 --- /dev/null +++ b/packages/sol-tracing-utils/CHANGELOG.md @@ -0,0 +1,162 @@ +<!-- +changelogUtils.file is auto-generated using the monorepo-scripts package. Don't edit directly. +Edit the package's CHANGELOG.json file only. +--> + +CHANGELOG + +## v2.1.17 - _January 9, 2019_ + + * Dependencies updated + +## v2.1.16 - _December 13, 2018_ + + * Dependencies updated + +## v2.1.15 - _December 11, 2018_ + + * Dependencies updated + +## v2.1.14 - _November 28, 2018_ + + * Dependencies updated + +## v2.1.13 - _November 21, 2018_ + + * Dependencies updated + +## v2.1.12 - _November 14, 2018_ + + * Dependencies updated + +## v2.1.11 - _November 13, 2018_ + + * Dependencies updated + +## v2.1.10 - _November 12, 2018_ + + * Dependencies updated + +## v2.1.9 - _November 9, 2018_ + + * Dependencies updated + +## v2.1.8 - _October 18, 2018_ + + * Make @types/solidity-parser-antlr a 'dependency' so it's available to users of the library (#1105) + +## v2.1.7 - _October 4, 2018_ + + * Dependencies updated + +## v2.1.6 - _September 28, 2018_ + + * Dependencies updated + +## v2.1.5 - _September 25, 2018_ + + * Dependencies updated + +## v2.1.4 - _September 25, 2018_ + + * Dependencies updated + +## v2.1.3 - _September 21, 2018_ + + * Dependencies updated + +## v2.1.2 - _September 5, 2018_ + + * Dependencies updated + +## v2.1.1 - _August 27, 2018_ + + * Dependencies updated + +## v2.1.0 - _August 24, 2018_ + + * Export types: `JSONRPCRequestPayload`, `Provider`, `JSONRPCErrorCallback`, `JSONRPCResponsePayload`, `JSONRPCRequestPayloadWithMethod`, `NextCallback`, `ErrorCallback`, `OnNextCompleted` and `Callback` (#924) + +## v2.0.0 - _August 14, 2018_ + + * Fix a bug when eth_call coverage was not computed because of silent schema validation failures (#938) + * Make `TruffleArtifactAdapter` read the `truffle.js` config for `solc` settings (#938) + * Change the first param of `TruffleArtifactAdapter` to be the `projectRoot` instead of `sourcesDir` (#938) + * Throw a helpful error message if truffle artifacts were generated with a different solc version than the one passed in (#938) + +## v1.0.3 - _July 26, 2018_ + + * Dependencies updated + +## v1.0.2 - _July 26, 2018_ + + * Dependencies updated + +## v1.0.1 - _July 23, 2018_ + + * Dependencies updated + +## v1.0.0 - _July 19, 2018_ + + * Add artifact adapter as a parameter for `CoverageSubprovider`. Export `AbstractArtifactAdapter` (#589) + * Implement `SolCompilerArtifactAdapter` and `TruffleArtifactAdapter` (#589) + * Properly parse multi-level traces (#589) + * Add support for solidity libraries (#589) + * Fixed a bug causing `RegExp` to crash if contract code is longer that 32767 characters (#675) + * Fixed a bug caused by Geth debug trace depth being 1indexed (#675) + * Fixed a bug when the tool crashed on empty traces (#675) + * Use `BlockchainLifecycle` to support reverts on Geth (#675) + * Add `ProfilerSubprovider` as a hacky way to profile code using coverage tools (#675) + * Collect traces from `estimate_gas` calls (#675) + * Fix a race condition caused by not awaiting the transaction before getting a trace (#675) + * Add `start`/`stop` functionality to `CoverageSubprovider` and `ProfilerSubprovider` (#675) + * Skip interface artifacts with a warning instead of failing (#675) + * Fix `solcVersion` regex in parameter validation (#690) + * Fix a bug when in `TruffleArtifactsAdapter` causing it to throw if `compiler.json` is not there (#690) + * HUGE perf improvements (#690) + * Create `RevertTraceSubprovider` which prints a stack trace when a `REVERT` is detected (#705) + * Add source code snippets to stack traces printed by `RevertTraceSubprovider` (#725) + +## v0.1.3 - _July 18, 2018_ + + * Dependencies updated + +## v0.1.2 - _July 9, 2018_ + + * Dependencies updated + +## v0.1.1 - _June 19, 2018_ + + * Dependencies updated + +## v0.1.0 - _May 31, 2018_ + + * Incorrect publish that was unpublished + +## v0.0.11 - _May 22, 2018_ + + * Dependencies updated + +## v0.0.10 - _May 4, 2018_ + + * Dependencies updated + +## v0.0.9 - _May 4, 2018_ + + * Dependencies updated + +## v0.0.8 - _April 18, 2018_ + + * Dependencies updated + +## v0.0.7 - _April 11, 2018_ + + * Dependencies updated + +## v0.0.6 - _April 2, 2018_ + + * Dependencies updated + +## v0.0.5 - _April 2, 2018_ + + * Dependencies updated diff --git a/packages/sol-tracing-utils/README.md b/packages/sol-tracing-utils/README.md new file mode 100644 index 000000000..0a4749b8e --- /dev/null +++ b/packages/sol-tracing-utils/README.md @@ -0,0 +1,61 @@ +## @0x/sol-tracing-utils + +Common code for all solidity trace-based tools (sol-coverage, sol-profiler, sol-trace). + +## Installation + +```bash +yarn add @0x/sol-sol-tracing-utils +``` + +## Contributing + +We welcome improvements and fixes from the wider community! To report bugs within this package, please create an issue in this repository. + +Please read our [contribution guidelines](../../CONTRIBUTING.md) before getting started. + +### Install dependencies + +If you don't have yarn workspaces enabled (Yarn < v1.0) - enable them: + +```bash +yarn config set workspaces-experimental true +``` + +Then install dependencies + +```bash +yarn install +``` + +### Build + +To build this package and all other monorepo packages that it depends on, run the following from the monorepo root directory: + +```bash +PKG=@0x/sol-tracing-utils yarn build +``` + +Or continuously rebuild on change: + +```bash +PKG=@0x/sol-tracing-utils yarn watch +``` + +### Clean + +```bash +yarn clean +``` + +### Lint + +```bash +yarn lint +``` + +### Run Tests + +```bash +yarn test +``` diff --git a/packages/sol-tracing-utils/compiler.json b/packages/sol-tracing-utils/compiler.json new file mode 100644 index 000000000..a6a0c6d3a --- /dev/null +++ b/packages/sol-tracing-utils/compiler.json @@ -0,0 +1,18 @@ +{ + "contracts": ["SimpleStorage"], + "contractsDir": "test/fixtures/contracts", + "artifactsDir": "test/fixtures/artifacts", + "compilerSettings": { + "outputSelection": { + "*": { + "*": [ + "abi", + "evm.bytecode.object", + "evm.bytecode.sourceMap", + "evm.deployedBytecode.object", + "evm.deployedBytecode.sourceMap" + ] + } + } + } +} diff --git a/packages/sol-tracing-utils/coverage/.gitkeep b/packages/sol-tracing-utils/coverage/.gitkeep new file mode 100644 index 000000000..e69de29bb --- /dev/null +++ b/packages/sol-tracing-utils/coverage/.gitkeep diff --git a/packages/sol-tracing-utils/package.json b/packages/sol-tracing-utils/package.json new file mode 100644 index 000000000..9486ab541 --- /dev/null +++ b/packages/sol-tracing-utils/package.json @@ -0,0 +1,87 @@ +{ + "name": "@0x/sol-tracing-utils", + "version": "2.1.17", + "engines": { + "node": ">=6.12" + }, + "description": "Common part of trace based solidity tools (sol-coverage, sol-trace, sol-profiler)", + "main": "lib/src/index.js", + "types": "lib/src/index.d.ts", + "scripts": { + "build": "yarn pre_build && tsc -b", + "build:ci": "yarn build", + "pre_build": "run-s copy_test_fixtures", + "lint": "tslint --format stylish --project .", + "test": "run-s compile_test run_mocha", + "rebuild_and_test": "run-s clean build test", + "test:coverage": "nyc npm run test --all && yarn coverage:report:lcov", + "coverage:report:lcov": "nyc report --reporter=text-lcov > coverage/lcov.info", + "test:circleci": "yarn test:coverage", + "run_mocha": "mocha --require source-map-support/register --require make-promises-safe lib/test/**/*_test.js --exit", + "clean": "shx rm -rf lib test/fixtures/artifacts src/artifacts generated_docs", + "copy_test_fixtures": "copyfiles 'test/fixtures/**/*' ./lib", + "compile_test": "sol-compiler compile", + "docs:json": "typedoc --excludePrivate --excludeExternals --target ES5 --tsconfig typedoc-tsconfig.json --json $JSON_FILE_PATH $PROJECT_FILES" + }, + "config": { + "postpublish": { + "assets": [], + "docOmitExports": [ + "ProfilerSubprovider", + "RevertTraceSubprovider" + ] + } + }, + "repository": { + "type": "git", + "url": "https://github.com/0xProject/0x-monorepo.git" + }, + "license": "Apache-2.0", + "bugs": { + "url": "https://github.com/0xProject/0x-monorepo/issues" + }, + "homepage": "https://github.com/0xProject/0x-monorepo/packages/sol-tracing-utils/README.md", + "dependencies": { + "@0x/dev-utils": "^1.0.22", + "@0x/sol-compiler": "^2.0.0", + "@0x/subproviders": "^2.1.9", + "@0x/typescript-typings": "^3.0.6", + "@0x/utils": "^2.1.1", + "@0x/web3-wrapper": "^3.2.2", + "@types/solidity-parser-antlr": "^0.2.0", + "ethereum-types": "^1.1.4", + "ethereumjs-util": "^5.1.1", + "glob": "^7.1.2", + "istanbul": "^0.4.5", + "lodash": "^4.17.5", + "loglevel": "^1.6.1", + "mkdirp": "^0.5.1", + "rimraf": "^2.6.2", + "semaphore-async-await": "^1.5.1", + "solidity-parser-antlr": "^0.2.12" + }, + "devDependencies": { + "@0x/tslint-config": "^2.0.0", + "@types/istanbul": "^0.4.30", + "@types/loglevel": "^1.5.3", + "@types/mkdirp": "^0.5.1", + "@types/mocha": "^2.2.42", + "@types/node": "*", + "@types/rimraf": "^2.0.2", + "chai": "^4.0.1", + "copyfiles": "^2.0.0", + "dirty-chai": "^2.0.1", + "make-promises-safe": "^1.1.0", + "mocha": "^4.1.0", + "npm-run-all": "^4.1.2", + "nyc": "^11.0.1", + "shx": "^0.2.2", + "sinon": "^4.0.0", + "tslint": "5.11.0", + "typedoc": "0.13.0", + "typescript": "3.0.1" + }, + "publishConfig": { + "access": "public" + } +} diff --git a/packages/sol-tracing-utils/src/artifact_adapters/abstract_artifact_adapter.ts b/packages/sol-tracing-utils/src/artifact_adapters/abstract_artifact_adapter.ts new file mode 100644 index 000000000..fcc6562ad --- /dev/null +++ b/packages/sol-tracing-utils/src/artifact_adapters/abstract_artifact_adapter.ts @@ -0,0 +1,5 @@ +import { ContractData } from '../types'; + +export abstract class AbstractArtifactAdapter { + public abstract async collectContractsDataAsync(): Promise<ContractData[]>; +} diff --git a/packages/sol-tracing-utils/src/artifact_adapters/sol_compiler_artifact_adapter.ts b/packages/sol-tracing-utils/src/artifact_adapters/sol_compiler_artifact_adapter.ts new file mode 100644 index 000000000..57391abbe --- /dev/null +++ b/packages/sol-tracing-utils/src/artifact_adapters/sol_compiler_artifact_adapter.ts @@ -0,0 +1,61 @@ +import { logUtils } from '@0x/utils'; +import { CompilerOptions, ContractArtifact } from 'ethereum-types'; +import * as fs from 'fs'; +import * as glob from 'glob'; +import * as _ from 'lodash'; +import * as path from 'path'; + +import { ContractData } from '../types'; + +import { AbstractArtifactAdapter } from './abstract_artifact_adapter'; + +const CONFIG_FILE = 'compiler.json'; + +export class SolCompilerArtifactAdapter extends AbstractArtifactAdapter { + private readonly _artifactsPath: string; + private readonly _sourcesPath: string; + /** + * Instantiates a SolCompilerArtifactAdapter + * @param artifactsPath Path to your artifacts directory + * @param sourcesPath Path to your contract sources directory + */ + constructor(artifactsPath?: string, sourcesPath?: string) { + super(); + const config: CompilerOptions = fs.existsSync(CONFIG_FILE) + ? JSON.parse(fs.readFileSync(CONFIG_FILE).toString()) + : {}; + if (_.isUndefined(artifactsPath) && _.isUndefined(config.artifactsDir)) { + throw new Error(`artifactsDir not found in ${CONFIG_FILE}`); + } + this._artifactsPath = (artifactsPath || config.artifactsDir) as string; + if (_.isUndefined(sourcesPath) && _.isUndefined(config.contractsDir)) { + throw new Error(`contractsDir not found in ${CONFIG_FILE}`); + } + this._sourcesPath = (sourcesPath || config.contractsDir) as string; + } + public async collectContractsDataAsync(): Promise<ContractData[]> { + const artifactsGlob = `${this._artifactsPath}/**/*.json`; + const artifactFileNames = glob.sync(artifactsGlob, { absolute: true }); + const contractsData: ContractData[] = []; + for (const artifactFileName of artifactFileNames) { + const artifact: ContractArtifact = JSON.parse(fs.readFileSync(artifactFileName).toString()); + if (_.isUndefined(artifact.compilerOutput.evm)) { + logUtils.warn(`${artifactFileName} doesn't contain bytecode. Skipping...`); + continue; + } + let sources = _.keys(artifact.sources); + sources = _.map(sources, relativeFilePath => path.resolve(this._sourcesPath, relativeFilePath)); + const sourceCodes = _.map(sources, (source: string) => fs.readFileSync(source).toString()); + const contractData = { + sourceCodes, + sources, + bytecode: artifact.compilerOutput.evm.bytecode.object, + sourceMap: artifact.compilerOutput.evm.bytecode.sourceMap, + runtimeBytecode: artifact.compilerOutput.evm.deployedBytecode.object, + sourceMapRuntime: artifact.compilerOutput.evm.deployedBytecode.sourceMap, + }; + contractsData.push(contractData); + } + return contractsData; + } +} diff --git a/packages/sol-tracing-utils/src/artifact_adapters/truffle_artifact_adapter.ts b/packages/sol-tracing-utils/src/artifact_adapters/truffle_artifact_adapter.ts new file mode 100644 index 000000000..bb2b15153 --- /dev/null +++ b/packages/sol-tracing-utils/src/artifact_adapters/truffle_artifact_adapter.ts @@ -0,0 +1,88 @@ +import { Compiler, CompilerOptions } from '@0x/sol-compiler'; +import * as fs from 'fs'; +import * as glob from 'glob'; +import * as path from 'path'; + +import { ContractData } from '../types'; + +import { AbstractArtifactAdapter } from './abstract_artifact_adapter'; +import { SolCompilerArtifactAdapter } from './sol_compiler_artifact_adapter'; + +const DEFAULT_TRUFFLE_ARTIFACTS_DIR = './build/contracts'; + +interface TruffleConfig { + solc?: any; + contracts_build_directory?: string; +} + +export class TruffleArtifactAdapter extends AbstractArtifactAdapter { + private readonly _solcVersion: string; + private readonly _projectRoot: string; + /** + * Instantiates a TruffleArtifactAdapter + * @param projectRoot Path to the truffle project's root directory + * @param solcVersion Solidity version with which to compile all the contracts + */ + constructor(projectRoot: string, solcVersion: string) { + super(); + this._solcVersion = solcVersion; + this._projectRoot = projectRoot; + } + public async collectContractsDataAsync(): Promise<ContractData[]> { + const artifactsDir = '.0x-artifacts'; + const contractsDir = path.join(this._projectRoot, 'contracts'); + const truffleConfig = this._getTruffleConfig(); + const solcConfig = truffleConfig.solc || {}; + const truffleArtifactsDirectory = truffleConfig.contracts_build_directory || DEFAULT_TRUFFLE_ARTIFACTS_DIR; + this._assertSolidityVersionIsCorrect(truffleArtifactsDirectory); + const compilerOptions: CompilerOptions = { + contractsDir, + artifactsDir, + compilerSettings: { + ...solcConfig, + outputSelection: { + ['*']: { + ['*']: ['abi', 'evm.bytecode.object', 'evm.deployedBytecode.object'], + }, + }, + }, + contracts: '*', + solcVersion: this._solcVersion, + }; + const compiler = new Compiler(compilerOptions); + await compiler.compileAsync(); + const solCompilerArtifactAdapter = new SolCompilerArtifactAdapter(artifactsDir, contractsDir); + const contractsDataFrom0xArtifacts = await solCompilerArtifactAdapter.collectContractsDataAsync(); + return contractsDataFrom0xArtifacts; + } + private _getTruffleConfig(): TruffleConfig { + const truffleConfigFileShort = path.resolve(path.join(this._projectRoot, 'truffle.js')); + const truffleConfigFileLong = path.resolve(path.join(this._projectRoot, 'truffle-config.js')); + if (fs.existsSync(truffleConfigFileShort)) { + const truffleConfig = require(truffleConfigFileShort); + return truffleConfig; + } else if (fs.existsSync(truffleConfigFileLong)) { + const truffleConfig = require(truffleConfigFileLong); + return truffleConfig; + } else { + throw new Error( + `Neither ${truffleConfigFileShort} nor ${truffleConfigFileLong} exists. Make sure the project root is correct`, + ); + } + } + private _assertSolidityVersionIsCorrect(truffleArtifactsDirectory: string): void { + const artifactsGlob = `${truffleArtifactsDirectory}/**/*.json`; + const artifactFileNames = glob.sync(artifactsGlob, { absolute: true }); + for (const artifactFileName of artifactFileNames) { + const artifact = JSON.parse(fs.readFileSync(artifactFileName).toString()); + const compilerVersion = artifact.compiler.version; + if (!compilerVersion.startsWith(this._solcVersion)) { + throw new Error( + `${artifact.contractName} was compiled with solidity ${compilerVersion} but specified version is ${ + this._solcVersion + } making it impossible to process traces`, + ); + } + } + } +} diff --git a/packages/sol-tracing-utils/src/ast_visitor.ts b/packages/sol-tracing-utils/src/ast_visitor.ts new file mode 100644 index 000000000..e55cdf6ec --- /dev/null +++ b/packages/sol-tracing-utils/src/ast_visitor.ts @@ -0,0 +1,168 @@ +import * as _ from 'lodash'; +import * as Parser from 'solidity-parser-antlr'; + +import { BranchMap, FnMap, LocationByOffset, SingleFileSourceRange, StatementMap } from './types'; + +export interface CoverageEntriesDescription { + fnMap: FnMap; + branchMap: BranchMap; + statementMap: StatementMap; + modifiersStatementIds: number[]; +} + +enum BranchType { + If = 'if', + ConditionalExpression = 'cond-expr', + BinaryExpression = 'binary-expr', +} + +export class ASTVisitor { + private _entryId = 0; + private readonly _fnMap: FnMap = {}; + private readonly _branchMap: BranchMap = {}; + private readonly _modifiersStatementIds: number[] = []; + private readonly _statementMap: StatementMap = {}; + private readonly _locationByOffset: LocationByOffset; + private readonly _ignoreRangesBeginningAt: number[]; + // keep track of contract/function ranges that are to be ignored + // so we can also ignore any children nodes within the contract/function + private readonly _ignoreRangesWithin: Array<[number, number]> = []; + constructor(locationByOffset: LocationByOffset, ignoreRangesBeginningAt: number[] = []) { + this._locationByOffset = locationByOffset; + this._ignoreRangesBeginningAt = ignoreRangesBeginningAt; + } + public getCollectedCoverageEntries(): CoverageEntriesDescription { + const coverageEntriesDescription = { + fnMap: this._fnMap, + branchMap: this._branchMap, + statementMap: this._statementMap, + modifiersStatementIds: this._modifiersStatementIds, + }; + return coverageEntriesDescription; + } + public IfStatement(ast: Parser.IfStatement): void { + this._visitStatement(ast); + this._visitBinaryBranch(ast, ast.trueBody, ast.falseBody || ast, BranchType.If); + } + public FunctionDefinition(ast: Parser.FunctionDefinition): void { + this._visitFunctionLikeDefinition(ast); + } + public ContractDefinition(ast: Parser.ContractDefinition): void { + if (this._shouldIgnoreExpression(ast)) { + this._ignoreRangesWithin.push(ast.range as [number, number]); + } + } + public ModifierDefinition(ast: Parser.ModifierDefinition): void { + this._visitFunctionLikeDefinition(ast); + } + public ForStatement(ast: Parser.ForStatement): void { + this._visitStatement(ast); + } + public ReturnStatement(ast: Parser.ReturnStatement): void { + this._visitStatement(ast); + } + public BreakStatement(ast: Parser.BreakStatement): void { + this._visitStatement(ast); + } + public ContinueStatement(ast: Parser.ContinueStatement): void { + this._visitStatement(ast); + } + public EmitStatement(ast: any /* TODO: Parser.EmitStatement */): void { + this._visitStatement(ast); + } + public VariableDeclarationStatement(ast: Parser.VariableDeclarationStatement): void { + this._visitStatement(ast); + } + public Statement(ast: Parser.Statement): void { + this._visitStatement(ast); + } + public WhileStatement(ast: Parser.WhileStatement): void { + this._visitStatement(ast); + } + public SimpleStatement(ast: Parser.SimpleStatement): void { + this._visitStatement(ast); + } + public ThrowStatement(ast: Parser.ThrowStatement): void { + this._visitStatement(ast); + } + public DoWhileStatement(ast: Parser.DoWhileStatement): void { + this._visitStatement(ast); + } + public ExpressionStatement(ast: Parser.ExpressionStatement): void { + this._visitStatement(ast.expression); + } + public InlineAssemblyStatement(ast: Parser.InlineAssemblyStatement): void { + this._visitStatement(ast); + } + public BinaryOperation(ast: Parser.BinaryOperation): void { + const BRANCHING_BIN_OPS = ['&&', '||']; + if (_.includes(BRANCHING_BIN_OPS, ast.operator)) { + this._visitBinaryBranch(ast, ast.left, ast.right, BranchType.BinaryExpression); + } + } + public Conditional(ast: Parser.Conditional): void { + this._visitBinaryBranch(ast, ast.trueExpression, ast.falseExpression, BranchType.ConditionalExpression); + } + public ModifierInvocation(ast: Parser.ModifierInvocation): void { + const BUILTIN_MODIFIERS = ['public', 'view', 'payable', 'external', 'internal', 'pure', 'constant']; + if (!_.includes(BUILTIN_MODIFIERS, ast.name)) { + if (this._shouldIgnoreExpression(ast)) { + return; + } + this._modifiersStatementIds.push(this._entryId); + this._visitStatement(ast); + } + } + private _visitBinaryBranch( + ast: Parser.ASTNode, + left: Parser.ASTNode, + right: Parser.ASTNode, + type: BranchType, + ): void { + if (this._shouldIgnoreExpression(ast)) { + return; + } + this._branchMap[this._entryId++] = { + line: this._getExpressionRange(ast).start.line, + type, + locations: [this._getExpressionRange(left), this._getExpressionRange(right)], + }; + } + private _visitStatement(ast: Parser.ASTNode): void { + if (this._shouldIgnoreExpression(ast)) { + return; + } + this._statementMap[this._entryId++] = this._getExpressionRange(ast); + } + private _getExpressionRange(ast: Parser.ASTNode): SingleFileSourceRange { + const astRange = ast.range as [number, number]; + const start = this._locationByOffset[astRange[0]]; + const end = this._locationByOffset[astRange[1] + 1]; + const range = { + start, + end, + }; + return range; + } + private _shouldIgnoreExpression(ast: Parser.ASTNode): boolean { + const [astStart, astEnd] = ast.range as [number, number]; + const isRangeIgnored = _.some( + this._ignoreRangesWithin, + ([rangeStart, rangeEnd]: [number, number]) => astStart >= rangeStart && astEnd <= rangeEnd, + ); + return this._ignoreRangesBeginningAt.includes(astStart) || isRangeIgnored; + } + private _visitFunctionLikeDefinition(ast: Parser.ModifierDefinition | Parser.FunctionDefinition): void { + if (this._shouldIgnoreExpression(ast)) { + this._ignoreRangesWithin.push(ast.range as [number, number]); + return; + } + const loc = this._getExpressionRange(ast); + this._fnMap[this._entryId++] = { + name: ast.name, + line: loc.start.line, + loc, + }; + this._visitStatement(ast); + } +} diff --git a/packages/sol-tracing-utils/src/collect_coverage_entries.ts b/packages/sol-tracing-utils/src/collect_coverage_entries.ts new file mode 100644 index 000000000..bdbcd613e --- /dev/null +++ b/packages/sol-tracing-utils/src/collect_coverage_entries.ts @@ -0,0 +1,41 @@ +import * as ethUtil from 'ethereumjs-util'; +import * as _ from 'lodash'; +import * as parser from 'solidity-parser-antlr'; + +import { ASTVisitor, CoverageEntriesDescription } from './ast_visitor'; +import { getLocationByOffset } from './source_maps'; + +const IGNORE_RE = /\/\*\s*solcov\s+ignore\s+next\s*\*\/\s*/gm; + +// Parsing source code for each transaction/code is slow and therefore we cache it +const coverageEntriesBySourceHash: { [sourceHash: string]: CoverageEntriesDescription } = {}; + +export const collectCoverageEntries = (contractSource: string) => { + const sourceHash = ethUtil.sha3(contractSource).toString('hex'); + if (_.isUndefined(coverageEntriesBySourceHash[sourceHash]) && !_.isUndefined(contractSource)) { + const ast = parser.parse(contractSource, { range: true }); + const locationByOffset = getLocationByOffset(contractSource); + const ignoreRangesBegingingAt = gatherRangesToIgnore(contractSource); + const visitor = new ASTVisitor(locationByOffset, ignoreRangesBegingingAt); + parser.visit(ast, visitor); + coverageEntriesBySourceHash[sourceHash] = visitor.getCollectedCoverageEntries(); + } + const coverageEntriesDescription = coverageEntriesBySourceHash[sourceHash]; + return coverageEntriesDescription; +}; + +// Gather the start index of all code blocks preceeded by "/* solcov ignore next */" +function gatherRangesToIgnore(contractSource: string): number[] { + const ignoreRangesStart = []; + + let match; + do { + match = IGNORE_RE.exec(contractSource); + if (match) { + const matchLen = match[0].length; + ignoreRangesStart.push(match.index + matchLen); + } + } while (match); + + return ignoreRangesStart; +} diff --git a/packages/sol-tracing-utils/src/constants.ts b/packages/sol-tracing-utils/src/constants.ts new file mode 100644 index 000000000..34d62b537 --- /dev/null +++ b/packages/sol-tracing-utils/src/constants.ts @@ -0,0 +1,8 @@ +// tslint:disable:number-literal-format +export const constants = { + NEW_CONTRACT: 'NEW_CONTRACT' as 'NEW_CONTRACT', + PUSH1: 0x60, + PUSH2: 0x61, + PUSH32: 0x7f, + TIMESTAMP: 0x42, +}; diff --git a/packages/sol-tracing-utils/src/get_source_range_snippet.ts b/packages/sol-tracing-utils/src/get_source_range_snippet.ts new file mode 100644 index 000000000..f578675d3 --- /dev/null +++ b/packages/sol-tracing-utils/src/get_source_range_snippet.ts @@ -0,0 +1,185 @@ +import * as ethUtil from 'ethereumjs-util'; +import * as _ from 'lodash'; +import * as Parser from 'solidity-parser-antlr'; + +import { SingleFileSourceRange, SourceRange, SourceSnippet } from './types'; +import { utils } from './utils'; + +interface ASTInfo { + type: string; + node: Parser.ASTNode; + name: string | null; + range?: SingleFileSourceRange; +} + +// Parsing source code for each transaction/code is slow and therefore we cache it +const parsedSourceByHash: { [sourceHash: string]: Parser.ASTNode } = {}; + +/** + * Gets the source range snippet by source range to be used by revert trace. + * @param sourceRange source range + * @param sourceCode source code + */ +export function getSourceRangeSnippet(sourceRange: SourceRange, sourceCode: string): SourceSnippet | null { + const sourceHash = ethUtil.sha3(sourceCode).toString('hex'); + if (_.isUndefined(parsedSourceByHash[sourceHash])) { + parsedSourceByHash[sourceHash] = Parser.parse(sourceCode, { loc: true }); + } + const astNode = parsedSourceByHash[sourceHash]; + const visitor = new ASTInfoVisitor(); + Parser.visit(astNode, visitor); + const astInfo = visitor.getASTInfoForRange(sourceRange); + if (astInfo === null) { + return null; + } + const sourceCodeInRange = utils.getRange(sourceCode, sourceRange.location); + return { + ...astInfo, + range: astInfo.range as SingleFileSourceRange, + source: sourceCodeInRange, + fileName: sourceRange.fileName, + }; +} + +// A visitor which collects ASTInfo for most nodes in the AST. +class ASTInfoVisitor { + private readonly _astInfos: ASTInfo[] = []; + public getASTInfoForRange(sourceRange: SourceRange): ASTInfo | null { + // HACK(albrow): Sometimes the source range doesn't exactly match that + // of astInfo. To work around that we try with a +/-1 offset on + // end.column. If nothing matches even with the offset, we return null. + const offset = { + start: { + line: 0, + column: 0, + }, + end: { + line: 0, + column: 0, + }, + }; + let astInfo = this._getASTInfoForRange(sourceRange, offset); + if (astInfo !== null) { + return astInfo; + } + offset.end.column += 1; + astInfo = this._getASTInfoForRange(sourceRange, offset); + if (astInfo !== null) { + return astInfo; + } + offset.end.column -= 2; + astInfo = this._getASTInfoForRange(sourceRange, offset); + if (astInfo !== null) { + return astInfo; + } + return null; + } + public ContractDefinition(ast: Parser.ContractDefinition): void { + this._visitContractDefinition(ast); + } + public IfStatement(ast: Parser.IfStatement): void { + this._visitStatement(ast); + } + public FunctionDefinition(ast: Parser.FunctionDefinition): void { + this._visitFunctionLikeDefinition(ast); + } + public ModifierDefinition(ast: Parser.ModifierDefinition): void { + this._visitFunctionLikeDefinition(ast); + } + public ForStatement(ast: Parser.ForStatement): void { + this._visitStatement(ast); + } + public ReturnStatement(ast: Parser.ReturnStatement): void { + this._visitStatement(ast); + } + public BreakStatement(ast: Parser.BreakStatement): void { + this._visitStatement(ast); + } + public ContinueStatement(ast: Parser.ContinueStatement): void { + this._visitStatement(ast); + } + public EmitStatement(ast: any /* TODO: Parser.EmitStatement */): void { + this._visitStatement(ast); + } + public VariableDeclarationStatement(ast: Parser.VariableDeclarationStatement): void { + this._visitStatement(ast); + } + public Statement(ast: Parser.Statement): void { + this._visitStatement(ast); + } + public WhileStatement(ast: Parser.WhileStatement): void { + this._visitStatement(ast); + } + public SimpleStatement(ast: Parser.SimpleStatement): void { + this._visitStatement(ast); + } + public ThrowStatement(ast: Parser.ThrowStatement): void { + this._visitStatement(ast); + } + public DoWhileStatement(ast: Parser.DoWhileStatement): void { + this._visitStatement(ast); + } + public ExpressionStatement(ast: Parser.ExpressionStatement): void { + this._visitStatement(ast.expression); + } + public InlineAssemblyStatement(ast: Parser.InlineAssemblyStatement): void { + this._visitStatement(ast); + } + public ModifierInvocation(ast: Parser.ModifierInvocation): void { + const BUILTIN_MODIFIERS = ['public', 'view', 'payable', 'external', 'internal', 'pure', 'constant']; + if (!_.includes(BUILTIN_MODIFIERS, ast.name)) { + this._visitStatement(ast); + } + } + private _visitStatement(ast: Parser.ASTNode): void { + this._astInfos.push({ + type: ast.type, + node: ast, + name: null, + range: ast.loc, + }); + } + private _visitFunctionLikeDefinition(ast: Parser.ModifierDefinition | Parser.FunctionDefinition): void { + this._astInfos.push({ + type: ast.type, + node: ast, + name: ast.name, + range: ast.loc, + }); + } + private _visitContractDefinition(ast: Parser.ContractDefinition): void { + this._astInfos.push({ + type: ast.type, + node: ast, + name: ast.name, + range: ast.loc, + }); + } + private _getASTInfoForRange(sourceRange: SourceRange, offset: SingleFileSourceRange): ASTInfo | null { + const offsetSourceRange = { + ...sourceRange, + location: { + start: { + line: sourceRange.location.start.line + offset.start.line, + column: sourceRange.location.start.column + offset.start.column, + }, + end: { + line: sourceRange.location.end.line + offset.end.line, + column: sourceRange.location.end.column + offset.end.column, + }, + }, + }; + for (const astInfo of this._astInfos) { + const astInfoRange = astInfo.range as SingleFileSourceRange; + if ( + astInfoRange.start.column === offsetSourceRange.location.start.column && + astInfoRange.start.line === offsetSourceRange.location.start.line && + astInfoRange.end.column === offsetSourceRange.location.end.column && + astInfoRange.end.line === offsetSourceRange.location.end.line + ) { + return astInfo; + } + } + return null; + } +} diff --git a/packages/sol-tracing-utils/src/globals.d.ts b/packages/sol-tracing-utils/src/globals.d.ts new file mode 100644 index 000000000..e799b3529 --- /dev/null +++ b/packages/sol-tracing-utils/src/globals.d.ts @@ -0,0 +1,7 @@ +// tslint:disable:completed-docs +declare module '*.json' { + const json: any; + /* tslint:disable */ + export default json; + /* tslint:enable */ +} diff --git a/packages/sol-tracing-utils/src/index.ts b/packages/sol-tracing-utils/src/index.ts new file mode 100644 index 000000000..413e5305e --- /dev/null +++ b/packages/sol-tracing-utils/src/index.ts @@ -0,0 +1,39 @@ +export { SolCompilerArtifactAdapter } from './artifact_adapters/sol_compiler_artifact_adapter'; +export { TruffleArtifactAdapter } from './artifact_adapters/truffle_artifact_adapter'; +export { AbstractArtifactAdapter } from './artifact_adapters/abstract_artifact_adapter'; + +export { + ContractData, + EvmCallStack, + SourceRange, + SourceSnippet, + StatementCoverage, + StatementDescription, + BranchCoverage, + BranchDescription, + Subtrace, + TraceInfo, + Coverage, + LineColumn, + LineCoverage, + FunctionCoverage, + FunctionDescription, + SingleFileSourceRange, + BranchMap, + EvmCallStackEntry, + FnMap, + LocationByOffset, + StatementMap, + TraceInfoBase, + TraceInfoExistingContract, + TraceInfoNewContract, +} from './types'; +export { collectCoverageEntries } from './collect_coverage_entries'; +export { TraceCollector, SingleFileSubtraceHandler } from './trace_collector'; +export { TraceInfoSubprovider } from './trace_info_subprovider'; +export { utils } from './utils'; +export { constants } from './constants'; +export { parseSourceMap } from './source_maps'; +export { getSourceRangeSnippet } from './get_source_range_snippet'; +export { getRevertTrace } from './revert_trace'; +export { TraceCollectionSubprovider } from './trace_collection_subprovider'; diff --git a/packages/sol-tracing-utils/src/instructions.ts b/packages/sol-tracing-utils/src/instructions.ts new file mode 100644 index 000000000..40987dbe5 --- /dev/null +++ b/packages/sol-tracing-utils/src/instructions.ts @@ -0,0 +1,23 @@ +import { constants } from './constants'; + +const isPush = (inst: number) => inst >= constants.PUSH1 && inst <= constants.PUSH32; + +const pushDataLength = (inst: number) => inst - constants.PUSH1 + 1; + +const instructionLength = (inst: number) => (isPush(inst) ? pushDataLength(inst) + 1 : 1); + +export const getPcToInstructionIndexMapping = (bytecode: Uint8Array) => { + const result: { + [programCounter: number]: number; + } = {}; + let byteIndex = 0; + let instructionIndex = 0; + while (byteIndex < bytecode.length) { + const instruction = bytecode[byteIndex]; + const length = instructionLength(instruction); + result[byteIndex] = instructionIndex; + byteIndex += length; + instructionIndex += 1; + } + return result; +}; diff --git a/packages/sol-tracing-utils/src/revert_trace.ts b/packages/sol-tracing-utils/src/revert_trace.ts new file mode 100644 index 000000000..4d474120c --- /dev/null +++ b/packages/sol-tracing-utils/src/revert_trace.ts @@ -0,0 +1,95 @@ +import { logUtils } from '@0x/utils'; +import { OpCode, StructLog } from 'ethereum-types'; + +import * as _ from 'lodash'; + +import { EvmCallStack } from './types'; +import { utils } from './utils'; + +/** + * Converts linear trace to a call stack by following calls and returns + * @param structLogs Linear trace + * @param startAddress The address of initial context + */ +export function getRevertTrace(structLogs: StructLog[], startAddress: string): EvmCallStack { + const evmCallStack: EvmCallStack = []; + const addressStack = [startAddress]; + if (_.isEmpty(structLogs)) { + return []; + } + const normalizedStructLogs = utils.normalizeStructLogs(structLogs); + // tslint:disable-next-line:prefer-for-of + for (let i = 0; i < normalizedStructLogs.length; i++) { + const structLog = normalizedStructLogs[i]; + if (structLog.depth !== addressStack.length - 1) { + throw new Error("Malformed trace. Trace depth doesn't match call stack depth"); + } + // After that check we have a guarantee that call stack is never empty + // If it would: callStack.length - 1 === structLog.depth === -1 + // That means that we can always safely pop from it + + if (utils.isCallLike(structLog.op)) { + const currentAddress = _.last(addressStack) as string; + const jumpAddressOffset = 1; + const newAddress = utils.getAddressFromStackEntry( + structLog.stack[structLog.stack.length - jumpAddressOffset - 1], + ); + + // Sometimes calls don't change the execution context (current address). When we do a transfer to an + // externally owned account - it does the call and immediately returns because there is no fallback + // function. We manually check if the call depth had changed to handle that case. + const nextStructLog = normalizedStructLogs[i + 1]; + if (nextStructLog.depth !== structLog.depth) { + addressStack.push(newAddress); + evmCallStack.push({ + address: currentAddress, + structLog, + }); + } + } else if (utils.isEndOpcode(structLog.op) && structLog.op !== OpCode.Revert) { + // Just like with calls, sometimes returns/stops don't change the execution context (current address). + const nextStructLog = normalizedStructLogs[i + 1]; + if (_.isUndefined(nextStructLog) || nextStructLog.depth !== structLog.depth) { + evmCallStack.pop(); + addressStack.pop(); + } + if (structLog.op === OpCode.SelfDestruct) { + // After contract execution, we look at all sub-calls to external contracts, and for each one, fetch + // the bytecode and compute the coverage for the call. If the contract is destroyed with a call + // to `selfdestruct`, we are unable to fetch it's bytecode and compute coverage. + // TODO: Refactor this logic to fetch the sub-called contract bytecode before the selfdestruct is called + // in order to handle this edge-case. + logUtils.warn( + "Detected a selfdestruct. We currently do not support that scenario. We'll just skip the trace part for a destructed contract", + ); + } + } else if (structLog.op === OpCode.Revert) { + evmCallStack.push({ + address: _.last(addressStack) as string, + structLog, + }); + return evmCallStack; + } else if (structLog.op === OpCode.Create) { + // TODO: Extract the new contract address from the stack and handle that scenario + logUtils.warn( + "Detected a contract created from within another contract. We currently do not support that scenario. We'll just skip that trace", + ); + return []; + } else { + if (structLog !== _.last(normalizedStructLogs)) { + const nextStructLog = normalizedStructLogs[i + 1]; + if (nextStructLog.depth === structLog.depth) { + continue; + } else if (nextStructLog.depth === structLog.depth - 1) { + addressStack.pop(); + } else { + throw new Error('Malformed trace. Unexpected call depth change'); + } + } + } + } + if (evmCallStack.length !== 0) { + logUtils.warn('Malformed trace. Call stack non empty at the end. (probably out of gas)'); + } + return []; +} diff --git a/packages/sol-tracing-utils/src/source_maps.ts b/packages/sol-tracing-utils/src/source_maps.ts new file mode 100644 index 000000000..af0fb4035 --- /dev/null +++ b/packages/sol-tracing-utils/src/source_maps.ts @@ -0,0 +1,91 @@ +import * as _ from 'lodash'; + +import { getPcToInstructionIndexMapping } from './instructions'; +import { LocationByOffset, SourceRange } from './types'; + +const RADIX = 10; + +export interface SourceLocation { + offset: number; + length: number; + fileIndex: number; +} + +/** + * Receives a string with newlines and returns a map of byte offset to LineColumn + * @param str A string to process + */ +export function getLocationByOffset(str: string): LocationByOffset { + const locationByOffset: LocationByOffset = { 0: { line: 1, column: 0 } }; + let currentOffset = 0; + for (const char of str.split('')) { + const location = locationByOffset[currentOffset]; + const isNewline = char === '\n'; + locationByOffset[currentOffset + 1] = { + line: location.line + (isNewline ? 1 : 0), + column: isNewline ? 0 : location.column + 1, + }; + currentOffset++; + } + return locationByOffset; +} + +/** + * Parses a sourcemap string. + * The solidity sourcemap format is documented here: https://github.com/ethereum/solidity/blob/develop/docs/miscellaneous.rst#source-mappings + * @param sourceCodes sources contents + * @param srcMap source map string + * @param bytecodeHex contract bytecode + * @param sources sources file names + */ +export function parseSourceMap( + sourceCodes: string[], + srcMap: string, + bytecodeHex: string, + sources: string[], +): { [programCounter: number]: SourceRange } { + const bytecode = Uint8Array.from(Buffer.from(bytecodeHex, 'hex')); + const pcToInstructionIndex: { [programCounter: number]: number } = getPcToInstructionIndexMapping(bytecode); + const locationByOffsetByFileIndex = _.map(sourceCodes, s => (_.isUndefined(s) ? {} : getLocationByOffset(s))); + const entries = srcMap.split(';'); + let lastParsedEntry: SourceLocation = {} as any; + const instructionIndexToSourceRange: { [instructionIndex: number]: SourceRange } = {}; + _.each(entries, (entry: string, i: number) => { + // tslint:disable-next-line:no-unused-variable + const [instructionIndexStrIfExists, lengthStrIfExists, fileIndexStrIfExists, jumpTypeStrIfExists] = entry.split( + ':', + ); + const instructionIndexIfExists = parseInt(instructionIndexStrIfExists, RADIX); + const lengthIfExists = parseInt(lengthStrIfExists, RADIX); + const fileIndexIfExists = parseInt(fileIndexStrIfExists, RADIX); + const offset = _.isNaN(instructionIndexIfExists) ? lastParsedEntry.offset : instructionIndexIfExists; + const length = _.isNaN(lengthIfExists) ? lastParsedEntry.length : lengthIfExists; + const fileIndex = _.isNaN(fileIndexIfExists) ? lastParsedEntry.fileIndex : fileIndexIfExists; + const parsedEntry = { + offset, + length, + fileIndex, + }; + if (parsedEntry.fileIndex !== -1 && !_.isUndefined(locationByOffsetByFileIndex[parsedEntry.fileIndex])) { + const sourceRange = { + location: { + start: locationByOffsetByFileIndex[parsedEntry.fileIndex][parsedEntry.offset], + end: locationByOffsetByFileIndex[parsedEntry.fileIndex][parsedEntry.offset + parsedEntry.length], + }, + fileName: sources[parsedEntry.fileIndex], + }; + instructionIndexToSourceRange[i] = sourceRange; + } else { + // Some assembly code generated by Solidity can't be mapped back to a line of source code. + // Source: https://github.com/ethereum/solidity/issues/3629 + } + lastParsedEntry = parsedEntry; + }); + const pcsToSourceRange: { [programCounter: number]: SourceRange } = {}; + for (const programCounterKey of _.keys(pcToInstructionIndex)) { + const pc = parseInt(programCounterKey, RADIX); + const instructionIndex: number = pcToInstructionIndex[pc]; + pcsToSourceRange[pc] = instructionIndexToSourceRange[instructionIndex]; + } + return pcsToSourceRange; +} diff --git a/packages/sol-tracing-utils/src/trace.ts b/packages/sol-tracing-utils/src/trace.ts new file mode 100644 index 000000000..770080af3 --- /dev/null +++ b/packages/sol-tracing-utils/src/trace.ts @@ -0,0 +1,104 @@ +import { logUtils } from '@0x/utils'; +import { OpCode, StructLog } from 'ethereum-types'; +import * as _ from 'lodash'; + +import { utils } from './utils'; + +export interface TraceByContractAddress { + [contractAddress: string]: StructLog[]; +} + +/** + * Converts linear stack trace to `TraceByContractAddress`. + * @param structLogs stack trace + * @param startAddress initial context address + */ +export function getTracesByContractAddress(structLogs: StructLog[], startAddress: string): TraceByContractAddress { + const traceByContractAddress: TraceByContractAddress = {}; + let currentTraceSegment = []; + const addressStack = [startAddress]; + if (_.isEmpty(structLogs)) { + return traceByContractAddress; + } + const normalizedStructLogs = utils.normalizeStructLogs(structLogs); + // tslint:disable-next-line:prefer-for-of + for (let i = 0; i < normalizedStructLogs.length; i++) { + const structLog = normalizedStructLogs[i]; + if (structLog.depth !== addressStack.length - 1) { + throw new Error("Malformed trace. Trace depth doesn't match call stack depth"); + } + // After that check we have a guarantee that call stack is never empty + // If it would: callStack.length - 1 === structLog.depth === -1 + // That means that we can always safely pop from it + currentTraceSegment.push(structLog); + + if (utils.isCallLike(structLog.op)) { + const currentAddress = _.last(addressStack) as string; + const jumpAddressOffset = 1; + const newAddress = utils.getAddressFromStackEntry( + structLog.stack[structLog.stack.length - jumpAddressOffset - 1], + ); + + // Sometimes calls don't change the execution context (current address). When we do a transfer to an + // externally owned account - it does the call and immediately returns because there is no fallback + // function. We manually check if the call depth had changed to handle that case. + const nextStructLog = normalizedStructLogs[i + 1]; + if (nextStructLog.depth !== structLog.depth) { + addressStack.push(newAddress); + traceByContractAddress[currentAddress] = (traceByContractAddress[currentAddress] || []).concat( + currentTraceSegment, + ); + currentTraceSegment = []; + } + } else if (utils.isEndOpcode(structLog.op)) { + const currentAddress = addressStack.pop() as string; + traceByContractAddress[currentAddress] = (traceByContractAddress[currentAddress] || []).concat( + currentTraceSegment, + ); + currentTraceSegment = []; + if (structLog.op === OpCode.SelfDestruct) { + // After contract execution, we look at all sub-calls to external contracts, and for each one, fetch + // the bytecode and compute the coverage for the call. If the contract is destroyed with a call + // to `selfdestruct`, we are unable to fetch it's bytecode and compute coverage. + // TODO: Refactor this logic to fetch the sub-called contract bytecode before the selfdestruct is called + // in order to handle this edge-case. + logUtils.warn( + "Detected a selfdestruct. We currently do not support that scenario. We'll just skip the trace part for a destructed contract", + ); + } + } else if (structLog.op === OpCode.Create) { + // TODO: Extract the new contract address from the stack and handle that scenario + logUtils.warn( + "Detected a contract created from within another contract. We currently do not support that scenario. We'll just skip that trace", + ); + return traceByContractAddress; + } else { + if (structLog !== _.last(normalizedStructLogs)) { + const nextStructLog = normalizedStructLogs[i + 1]; + if (nextStructLog.depth === structLog.depth) { + continue; + } else if (nextStructLog.depth === structLog.depth - 1) { + const currentAddress = addressStack.pop() as string; + traceByContractAddress[currentAddress] = (traceByContractAddress[currentAddress] || []).concat( + currentTraceSegment, + ); + currentTraceSegment = []; + } else { + throw new Error('Malformed trace. Unexpected call depth change'); + } + } + } + } + if (addressStack.length !== 0) { + logUtils.warn('Malformed trace. Call stack non empty at the end'); + } + if (currentTraceSegment.length !== 0) { + const currentAddress = addressStack.pop() as string; + traceByContractAddress[currentAddress] = (traceByContractAddress[currentAddress] || []).concat( + currentTraceSegment, + ); + currentTraceSegment = []; + logUtils.warn('Malformed trace. Current trace segment non empty at the end'); + } + return traceByContractAddress; +} diff --git a/packages/sol-tracing-utils/src/trace_collection_subprovider.ts b/packages/sol-tracing-utils/src/trace_collection_subprovider.ts new file mode 100644 index 000000000..25e38768d --- /dev/null +++ b/packages/sol-tracing-utils/src/trace_collection_subprovider.ts @@ -0,0 +1,188 @@ +import { BlockchainLifecycle } from '@0x/dev-utils'; +import { Callback, ErrorCallback, NextCallback, Subprovider } from '@0x/subproviders'; +import { CallDataRPC, marshaller, Web3Wrapper } from '@0x/web3-wrapper'; +import { JSONRPCRequestPayload, Provider, TxData } from 'ethereum-types'; +import * as _ from 'lodash'; +import { Lock } from 'semaphore-async-await'; + +import { constants } from './constants'; +import { BlockParamLiteral } from './types'; + +interface MaybeFakeTxData extends TxData { + isFakeTransaction?: boolean; +} + +const BLOCK_GAS_LIMIT = 6000000; + +export interface TraceCollectionSubproviderConfig { + shouldCollectTransactionTraces: boolean; + shouldCollectCallTraces: boolean; + shouldCollectGasEstimateTraces: boolean; +} + +// Because there is no notion of a call trace in the Ethereum rpc - we collect them in a rather non-obvious/hacky way. +// On each call - we create a snapshot, execute the call as a transaction, get the trace, revert the snapshot. +// That allows us to avoid influencing test behaviour. + +/** + * This class implements the [web3-provider-engine](https://github.com/MetaMask/provider-engine) subprovider interface. + * It collects traces of all transactions that were sent and all calls that were executed through JSON RPC. It must + * be extended by implementing the _recordTxTraceAsync method which is called for every transaction. + */ +export abstract class TraceCollectionSubprovider extends Subprovider { + protected _web3Wrapper!: Web3Wrapper; + // Lock is used to not accept normal transactions while doing call/snapshot magic because they'll be reverted later otherwise + private readonly _lock = new Lock(); + private readonly _defaultFromAddress: string; + private _isEnabled = true; + private readonly _config: TraceCollectionSubproviderConfig; + /** + * Instantiates a TraceCollectionSubprovider instance + * @param defaultFromAddress default from address to use when sending transactions + */ + constructor(defaultFromAddress: string, config: TraceCollectionSubproviderConfig) { + super(); + this._defaultFromAddress = defaultFromAddress; + this._config = config; + } + /** + * Starts trace collection + */ + public start(): void { + this._isEnabled = true; + } + /** + * Stops trace collection + */ + public stop(): void { + this._isEnabled = false; + } + /** + * This method conforms to the web3-provider-engine interface. + * It is called internally by the ProviderEngine when it is this subproviders + * turn to handle a JSON RPC request. + * @param payload JSON RPC payload + * @param next Callback to call if this subprovider decides not to handle the request + * @param _end Callback to call if subprovider handled the request and wants to pass back the request. + */ + // tslint:disable-next-line:prefer-function-over-method async-suffix + public async handleRequest(payload: JSONRPCRequestPayload, next: NextCallback, _end: ErrorCallback): Promise<void> { + if (this._isEnabled) { + switch (payload.method) { + case 'eth_sendTransaction': + if (!this._config.shouldCollectTransactionTraces) { + next(); + } else { + const txData = payload.params[0]; + next(this._onTransactionSentAsync.bind(this, txData)); + } + return; + + case 'eth_call': + if (!this._config.shouldCollectCallTraces) { + next(); + } else { + const callData = payload.params[0]; + next(this._onCallOrGasEstimateExecutedAsync.bind(this, callData)); + } + return; + + case 'eth_estimateGas': + if (!this._config.shouldCollectGasEstimateTraces) { + next(); + } else { + const estimateGasData = payload.params[0]; + next(this._onCallOrGasEstimateExecutedAsync.bind(this, estimateGasData)); + } + return; + + default: + next(); + return; + } + } else { + next(); + return; + } + } + /** + * Set's the subprovider's engine to the ProviderEngine it is added to. + * This is only called within the ProviderEngine source code, do not call + * directly. + * @param engine The ProviderEngine this subprovider is added to + */ + public setEngine(engine: Provider): void { + super.setEngine(engine); + this._web3Wrapper = new Web3Wrapper(engine); + } + protected abstract async _recordTxTraceAsync( + address: string, + data: string | undefined, + txHash: string, + ): Promise<void>; + private async _onTransactionSentAsync( + txData: MaybeFakeTxData, + err: Error | null, + txHash: string | undefined, + cb: Callback, + ): Promise<void> { + if (!txData.isFakeTransaction) { + // This transaction is a usual transaction. Not a call executed as one. + // And we don't want it to be executed within a snapshotting period + await this._lock.acquire(); + } + const NULL_ADDRESS = '0x0'; + if (_.isNull(err)) { + const toAddress = + _.isUndefined(txData.to) || txData.to === NULL_ADDRESS ? constants.NEW_CONTRACT : txData.to; + await this._recordTxTraceAsync(toAddress, txData.data, txHash as string); + } else { + const latestBlock = await this._web3Wrapper.getBlockWithTransactionDataAsync(BlockParamLiteral.Latest); + const transactions = latestBlock.transactions; + for (const transaction of transactions) { + const toAddress = + _.isUndefined(txData.to) || txData.to === NULL_ADDRESS ? constants.NEW_CONTRACT : txData.to; + await this._recordTxTraceAsync(toAddress, transaction.input, transaction.hash); + } + } + if (!txData.isFakeTransaction) { + // This transaction is a usual transaction. Not a call executed as one. + // And we don't want it to be executed within a snapshotting period + this._lock.release(); + } + cb(); + } + private async _onCallOrGasEstimateExecutedAsync( + callData: Partial<CallDataRPC>, + _err: Error | null, + _callResult: string, + cb: Callback, + ): Promise<void> { + await this._recordCallOrGasEstimateTraceAsync(callData); + cb(); + } + private async _recordCallOrGasEstimateTraceAsync(callData: Partial<CallDataRPC>): Promise<void> { + // We don't want other transactions to be exeucted during snashotting period, that's why we lock the + // transaction execution for all transactions except our fake ones. + await this._lock.acquire(); + const blockchainLifecycle = new BlockchainLifecycle(this._web3Wrapper); + await blockchainLifecycle.startAsync(); + const fakeTxData = { + gas: BLOCK_GAS_LIMIT.toString(16), // tslint:disable-line:custom-no-magic-numbers + isFakeTransaction: true, // This transaction (and only it) is allowed to come through when the lock is locked + ...callData, + from: callData.from || this._defaultFromAddress, + }; + try { + const txData = marshaller.unmarshalTxData(fakeTxData); + const txHash = await this._web3Wrapper.sendTransactionAsync(txData); + await this._web3Wrapper.awaitTransactionMinedAsync(txHash, 0); + } catch (err) { + // TODO(logvinov) Check that transaction failed and not some other exception + // Even if this transaction failed - we've already recorded it's trace. + _.noop(); + } + await blockchainLifecycle.revertAsync(); + this._lock.release(); + } +} diff --git a/packages/sol-tracing-utils/src/trace_collector.ts b/packages/sol-tracing-utils/src/trace_collector.ts new file mode 100644 index 000000000..943e208cf --- /dev/null +++ b/packages/sol-tracing-utils/src/trace_collector.ts @@ -0,0 +1,93 @@ +import { promisify } from '@0x/utils'; +import { stripHexPrefix } from 'ethereumjs-util'; +import * as fs from 'fs'; +import { Collector } from 'istanbul'; +import * as _ from 'lodash'; +import { getLogger, levels, Logger } from 'loglevel'; +import * as mkdirp from 'mkdirp'; + +import { AbstractArtifactAdapter } from './artifact_adapters/abstract_artifact_adapter'; +import { constants } from './constants'; +import { parseSourceMap } from './source_maps'; +import { + ContractData, + Coverage, + SourceRange, + Subtrace, + TraceInfo, + TraceInfoExistingContract, + TraceInfoNewContract, +} from './types'; +import { utils } from './utils'; + +const mkdirpAsync = promisify<undefined>(mkdirp); + +export type SingleFileSubtraceHandler = ( + contractData: ContractData, + subtrace: Subtrace, + pcToSourceRange: { [programCounter: number]: SourceRange }, + fileIndex: number, +) => Coverage; + +/** + * TraceCollector is used by CoverageSubprovider to compute code coverage based on collected trace data. + */ +export class TraceCollector { + private readonly _artifactAdapter: AbstractArtifactAdapter; + private readonly _logger: Logger; + private _contractsData!: ContractData[]; + private readonly _collector = new Collector(); + private readonly _singleFileSubtraceHandler: SingleFileSubtraceHandler; + + /** + * Instantiates a TraceCollector instance + * @param artifactAdapter Adapter for used artifacts format (0x, truffle, giveth, etc.) + * @param isVerbose If true, we will log any unknown transactions. Otherwise we will ignore them + * @param singleFileSubtraceHandler A handler function for computing partial coverage for a single file & subtrace + */ + constructor( + artifactAdapter: AbstractArtifactAdapter, + isVerbose: boolean, + singleFileSubtraceHandler: SingleFileSubtraceHandler, + ) { + this._artifactAdapter = artifactAdapter; + this._logger = getLogger('sol-tracing-utils'); + this._logger.setLevel(isVerbose ? levels.TRACE : levels.ERROR); + this._singleFileSubtraceHandler = singleFileSubtraceHandler; + } + public async writeOutputAsync(): Promise<void> { + const finalCoverage = this._collector.getFinalCoverage(); + const stringifiedCoverage = JSON.stringify(finalCoverage, null, '\t'); + await mkdirpAsync('coverage'); + fs.writeFileSync('coverage/coverage.json', stringifiedCoverage); + } + public async computeSingleTraceCoverageAsync(traceInfo: TraceInfo): Promise<void> { + if (_.isUndefined(this._contractsData)) { + this._contractsData = await this._artifactAdapter.collectContractsDataAsync(); + } + const isContractCreation = traceInfo.address === constants.NEW_CONTRACT; + const bytecode = isContractCreation + ? (traceInfo as TraceInfoNewContract).bytecode + : (traceInfo as TraceInfoExistingContract).runtimeBytecode; + const contractData = utils.getContractDataIfExists(this._contractsData, bytecode); + if (_.isUndefined(contractData)) { + const errMsg = isContractCreation + ? `Unknown contract creation transaction` + : `Transaction to an unknown address: ${traceInfo.address}`; + this._logger.warn(errMsg); + return; + } + const bytecodeHex = stripHexPrefix(bytecode); + const sourceMap = isContractCreation ? contractData.sourceMap : contractData.sourceMapRuntime; + const pcToSourceRange = parseSourceMap(contractData.sourceCodes, sourceMap, bytecodeHex, contractData.sources); + for (let fileIndex = 0; fileIndex < contractData.sources.length; fileIndex++) { + const singleFileCoverageForTrace = this._singleFileSubtraceHandler( + contractData, + traceInfo.subtrace, + pcToSourceRange, + fileIndex, + ); + this._collector.add(singleFileCoverageForTrace); + } + } +} diff --git a/packages/sol-tracing-utils/src/trace_info_subprovider.ts b/packages/sol-tracing-utils/src/trace_info_subprovider.ts new file mode 100644 index 000000000..635a68f58 --- /dev/null +++ b/packages/sol-tracing-utils/src/trace_info_subprovider.ts @@ -0,0 +1,59 @@ +import * as _ from 'lodash'; + +import { constants } from './constants'; +import { getTracesByContractAddress } from './trace'; +import { TraceCollectionSubprovider } from './trace_collection_subprovider'; +import { TraceInfo, TraceInfoExistingContract, TraceInfoNewContract } from './types'; + +// TraceInfoSubprovider is extended by subproviders which need to work with one +// TraceInfo at a time. It has one abstract method: _handleTraceInfoAsync, which +// is called for each TraceInfo. +export abstract class TraceInfoSubprovider extends TraceCollectionSubprovider { + protected abstract _handleTraceInfoAsync(traceInfo: TraceInfo): Promise<void>; + protected async _recordTxTraceAsync(address: string, data: string | undefined, txHash: string): Promise<void> { + await this._web3Wrapper.awaitTransactionMinedAsync(txHash, 0); + const trace = await this._web3Wrapper.getTransactionTraceAsync(txHash, { + disableMemory: true, + disableStack: false, + disableStorage: true, + }); + const tracesByContractAddress = getTracesByContractAddress(trace.structLogs, address); + const subcallAddresses = _.keys(tracesByContractAddress); + if (address === constants.NEW_CONTRACT) { + for (const subcallAddress of subcallAddresses) { + let traceInfo: TraceInfoNewContract | TraceInfoExistingContract; + if (subcallAddress === 'NEW_CONTRACT') { + const traceForThatSubcall = tracesByContractAddress[subcallAddress]; + traceInfo = { + subtrace: traceForThatSubcall, + txHash, + address: subcallAddress, + bytecode: data as string, + }; + } else { + const runtimeBytecode = await this._web3Wrapper.getContractCodeAsync(subcallAddress); + const traceForThatSubcall = tracesByContractAddress[subcallAddress]; + traceInfo = { + subtrace: traceForThatSubcall, + txHash, + address: subcallAddress, + runtimeBytecode, + }; + } + await this._handleTraceInfoAsync(traceInfo); + } + } else { + for (const subcallAddress of subcallAddresses) { + const runtimeBytecode = await this._web3Wrapper.getContractCodeAsync(subcallAddress); + const traceForThatSubcall = tracesByContractAddress[subcallAddress]; + const traceInfo: TraceInfoExistingContract = { + subtrace: traceForThatSubcall, + txHash, + address: subcallAddress, + runtimeBytecode, + }; + await this._handleTraceInfoAsync(traceInfo); + } + } + } +} diff --git a/packages/sol-tracing-utils/src/types.ts b/packages/sol-tracing-utils/src/types.ts new file mode 100644 index 000000000..54ade0400 --- /dev/null +++ b/packages/sol-tracing-utils/src/types.ts @@ -0,0 +1,126 @@ +import { StructLog } from 'ethereum-types'; +import * as Parser from 'solidity-parser-antlr'; + +export interface LineColumn { + line: number; + column: number; +} + +export interface SourceRange { + location: SingleFileSourceRange; + fileName: string; +} + +export interface SingleFileSourceRange { + start: LineColumn; + end: LineColumn; +} + +export interface LocationByOffset { + [offset: number]: LineColumn; +} + +export interface FunctionDescription { + name: string; + line: number; + loc: SingleFileSourceRange; + skip?: boolean; +} + +export type StatementDescription = SingleFileSourceRange; + +export interface BranchDescription { + line: number; + type: 'if' | 'switch' | 'cond-expr' | 'binary-expr'; + locations: SingleFileSourceRange[]; +} + +export interface FnMap { + [functionId: string]: FunctionDescription; +} + +export interface BranchMap { + [branchId: string]: BranchDescription; +} + +export interface StatementMap { + [statementId: string]: StatementDescription; +} + +export interface LineCoverage { + [lineNo: number]: number; +} + +export interface FunctionCoverage { + [functionId: string]: number; +} + +export interface StatementCoverage { + [statementId: string]: number; +} + +export interface BranchCoverage { + [branchId: string]: number[]; +} + +export interface Coverage { + [fineName: string]: { + l?: LineCoverage; + f: FunctionCoverage; + s: StatementCoverage; + b: BranchCoverage; + fnMap: FnMap; + branchMap: BranchMap; + statementMap: StatementMap; + path: string; + }; +} + +export interface ContractData { + bytecode: string; + sourceMap: string; + runtimeBytecode: string; + sourceMapRuntime: string; + sourceCodes: string[]; + sources: string[]; +} + +// Part of the trace executed within the same context +export type Subtrace = StructLog[]; + +export interface TraceInfoBase { + subtrace: Subtrace; + txHash: string; +} + +export interface TraceInfoNewContract extends TraceInfoBase { + address: 'NEW_CONTRACT'; + bytecode: string; +} + +export interface TraceInfoExistingContract extends TraceInfoBase { + address: string; + runtimeBytecode: string; +} + +export type TraceInfo = TraceInfoNewContract | TraceInfoExistingContract; + +export enum BlockParamLiteral { + Latest = 'latest', +} + +export interface EvmCallStackEntry { + structLog: StructLog; + address: string; +} + +export type EvmCallStack = EvmCallStackEntry[]; + +export interface SourceSnippet { + source: string; + fileName: string; + type: string; + node: Parser.ASTNode; + name: string | null; + range: SingleFileSourceRange; +} diff --git a/packages/sol-tracing-utils/src/utils.ts b/packages/sol-tracing-utils/src/utils.ts new file mode 100644 index 000000000..d8bc65e73 --- /dev/null +++ b/packages/sol-tracing-utils/src/utils.ts @@ -0,0 +1,87 @@ +import { addressUtils, BigNumber } from '@0x/utils'; +import { OpCode, StructLog } from 'ethereum-types'; +import { addHexPrefix } from 'ethereumjs-util'; +import * as _ from 'lodash'; + +import { ContractData, LineColumn, SingleFileSourceRange } from './types'; + +// This is the minimum length of valid contract bytecode. The Solidity compiler +// metadata is 86 bytes. If you add the '0x' prefix, we get 88. +const MIN_CONTRACT_BYTECODE_LENGTH = 88; + +export const utils = { + compareLineColumn(lhs: LineColumn, rhs: LineColumn): number { + return lhs.line !== rhs.line ? lhs.line - rhs.line : lhs.column - rhs.column; + }, + removeHexPrefix(hex: string): string { + const hexPrefix = '0x'; + return hex.startsWith(hexPrefix) ? hex.slice(hexPrefix.length) : hex; + }, + isRangeInside(childRange: SingleFileSourceRange, parentRange: SingleFileSourceRange): boolean { + return ( + utils.compareLineColumn(parentRange.start, childRange.start) <= 0 && + utils.compareLineColumn(childRange.end, parentRange.end) <= 0 + ); + }, + bytecodeToBytecodeRegex(bytecode: string): string { + const bytecodeRegex = bytecode + // Library linking placeholder: __ConvertLib____________________________ + .replace(/_.*_/, '.*') + // Last 86 characters is solidity compiler metadata that's different between compilations + .replace(/.{86}$/, '') + // Libraries contain their own address at the beginning of the code and it's impossible to know it in advance + .replace(/^0x730000000000000000000000000000000000000000/, '0x73........................................'); + // HACK: Node regexes can't be longer that 32767 characters. Contracts bytecode can. We just truncate the regexes. It's safe in practice. + const MAX_REGEX_LENGTH = 32767; + const truncatedBytecodeRegex = bytecodeRegex.slice(0, MAX_REGEX_LENGTH); + return truncatedBytecodeRegex; + }, + getContractDataIfExists(contractsData: ContractData[], bytecode: string): ContractData | undefined { + if (!bytecode.startsWith('0x')) { + throw new Error(`0x hex prefix missing: ${bytecode}`); + } + const contractData = _.find(contractsData, contractDataCandidate => { + const bytecodeRegex = utils.bytecodeToBytecodeRegex(contractDataCandidate.bytecode); + // If the bytecode is less than the minimum length, we are probably + // dealing with an interface. This isn't what we're looking for. + if (bytecodeRegex.length < MIN_CONTRACT_BYTECODE_LENGTH) { + return false; + } + const runtimeBytecodeRegex = utils.bytecodeToBytecodeRegex(contractDataCandidate.runtimeBytecode); + if (runtimeBytecodeRegex.length < MIN_CONTRACT_BYTECODE_LENGTH) { + return false; + } + // We use that function to find by bytecode or runtimeBytecode. Those are quasi-random strings so + // collisions are practically impossible and it allows us to reuse that code + return !_.isNull(bytecode.match(bytecodeRegex)) || !_.isNull(bytecode.match(runtimeBytecodeRegex)); + }); + return contractData; + }, + isCallLike(op: OpCode): boolean { + return _.includes([OpCode.CallCode, OpCode.StaticCall, OpCode.Call, OpCode.DelegateCall], op); + }, + isEndOpcode(op: OpCode): boolean { + return _.includes([OpCode.Return, OpCode.Stop, OpCode.Revert, OpCode.Invalid, OpCode.SelfDestruct], op); + }, + getAddressFromStackEntry(stackEntry: string): string { + const hexBase = 16; + return addressUtils.padZeros(new BigNumber(addHexPrefix(stackEntry)).toString(hexBase)); + }, + normalizeStructLogs(structLogs: StructLog[]): StructLog[] { + if (structLogs[0].depth === 1) { + // Geth uses 1-indexed depth counter whilst ganache starts from 0 + const newStructLogs = _.map(structLogs, structLog => ({ + ...structLog, + depth: structLog.depth - 1, + })); + return newStructLogs; + } + return structLogs; + }, + getRange(sourceCode: string, range: SingleFileSourceRange): string { + const lines = sourceCode.split('\n').slice(range.start.line - 1, range.end.line); + lines[lines.length - 1] = lines[lines.length - 1].slice(0, range.end.column); + lines[0] = lines[0].slice(range.start.column); + return lines.join('\n'); + }, +}; diff --git a/packages/sol-tracing-utils/test/collect_coverage_entries_test.ts b/packages/sol-tracing-utils/test/collect_coverage_entries_test.ts new file mode 100644 index 000000000..7832ec316 --- /dev/null +++ b/packages/sol-tracing-utils/test/collect_coverage_entries_test.ts @@ -0,0 +1,155 @@ +import * as chai from 'chai'; +import * as fs from 'fs'; +import * as _ from 'lodash'; +import 'mocha'; +import * as path from 'path'; + +import { collectCoverageEntries } from '../src/collect_coverage_entries'; +import { utils } from '../src/utils'; + +const expect = chai.expect; + +describe('Collect coverage entries', () => { + describe('#collectCoverageEntries', () => { + it('correctly collects coverage entries for Simplest contract', () => { + const simplestContractBaseName = 'Simplest.sol'; + const simplestContractFileName = path.resolve(__dirname, 'fixtures/contracts', simplestContractBaseName); + const simplestContract = fs.readFileSync(simplestContractFileName).toString(); + const coverageEntries = collectCoverageEntries(simplestContract); + expect(coverageEntries.fnMap).to.be.deep.equal({}); + expect(coverageEntries.branchMap).to.be.deep.equal({}); + expect(coverageEntries.statementMap).to.be.deep.equal({}); + expect(coverageEntries.modifiersStatementIds).to.be.deep.equal([]); + }); + it('correctly collects coverage entries for SimpleStorage contract', () => { + const simpleStorageContractBaseName = 'SimpleStorage.sol'; + const simpleStorageContractFileName = path.resolve( + __dirname, + 'fixtures/contracts', + simpleStorageContractBaseName, + ); + const simpleStorageContract = fs.readFileSync(simpleStorageContractFileName).toString(); + const coverageEntries = collectCoverageEntries(simpleStorageContract); + const fnIds = _.keys(coverageEntries.fnMap); + expect(coverageEntries.fnMap[fnIds[0]].name).to.be.equal('set'); + // tslint:disable-next-line:custom-no-magic-numbers + expect(coverageEntries.fnMap[fnIds[0]].line).to.be.equal(5); + const setFunction = `function set(uint x) { + storedData = x; + }`; + expect(utils.getRange(simpleStorageContract, coverageEntries.fnMap[fnIds[0]].loc)).to.be.equal(setFunction); + expect(coverageEntries.fnMap[fnIds[1]].name).to.be.equal('get'); + // tslint:disable-next-line:custom-no-magic-numbers + expect(coverageEntries.fnMap[fnIds[1]].line).to.be.equal(8); + const getFunction = `function get() constant returns (uint retVal) { + return storedData; + }`; + expect(utils.getRange(simpleStorageContract, coverageEntries.fnMap[fnIds[1]].loc)).to.be.equal(getFunction); + expect(coverageEntries.branchMap).to.be.deep.equal({}); + const statementIds = _.keys(coverageEntries.statementMap); + expect(utils.getRange(simpleStorageContract, coverageEntries.statementMap[statementIds[1]])).to.be.equal( + 'storedData = x', + ); + expect(utils.getRange(simpleStorageContract, coverageEntries.statementMap[statementIds[3]])).to.be.equal( + 'return storedData;', + ); + expect(coverageEntries.modifiersStatementIds).to.be.deep.equal([]); + }); + it('correctly collects coverage entries for AllSolidityFeatures contract', () => { + const simpleStorageContractBaseName = 'AllSolidityFeatures.sol'; + const simpleStorageContractFileName = path.resolve( + __dirname, + 'fixtures/contracts', + simpleStorageContractBaseName, + ); + const simpleStorageContract = fs.readFileSync(simpleStorageContractFileName).toString(); + const coverageEntries = collectCoverageEntries(simpleStorageContract); + const fnDescriptions = _.values(coverageEntries.fnMap); + const fnNames = _.map(fnDescriptions, fnDescription => fnDescription.name); + const expectedFnNames = [ + 'f', + 'c', + 'test', + 'getChoice', + 'Base', + 'Derived', + 'f', + 'f', + '', + 'g', + 'setData', + 'getData', + 'sendHalf', + 'insert', + 'remove', + 'contains', + 'iterate_start', + 'iterate_valid', + 'iterate_advance', + 'iterate_get', + 'insert', + 'sum', + 'restricted', + 'DualIndex', + 'set', + 'transfer_ownership', + 'lookup', + '', + '', + 'sum', + 'someFunction', + 'fun', + 'at', + 'test', + 'get', + 'returnNumber', + 'alloc', + 'ham', + 'getMyTuple', + 'ham', + 'abstain', + 'foobar', + 'foobar', + 'a', + ]; + expect(fnNames).to.be.deep.equal(expectedFnNames); + + const branchDescriptions = _.values(coverageEntries.branchMap); + const branchLines = _.map(branchDescriptions, branchDescription => branchDescription.line); + // tslint:disable-next-line:custom-no-magic-numbers + expect(branchLines).to.be.deep.equal([94, 115, 119, 130, 151, 187]); + const branchTypes = _.map(branchDescriptions, branchDescription => branchDescription.type); + expect(branchTypes).to.be.deep.equal(['if', 'if', 'if', 'if', 'binary-expr', 'if']); + }); + + it('correctly ignores all coverage entries for Ignore contract', () => { + const solcovIgnoreContractBaseName = 'SolcovIgnore.sol'; + const solcovIgnoreContractFileName = path.resolve( + __dirname, + 'fixtures/contracts', + solcovIgnoreContractBaseName, + ); + const solcovIgnoreContract = fs.readFileSync(solcovIgnoreContractFileName).toString(); + const coverageEntries = collectCoverageEntries(solcovIgnoreContract); + const fnIds = _.keys(coverageEntries.fnMap); + + expect(fnIds.length).to.be.equal(1); + expect(coverageEntries.fnMap[fnIds[0]].name).to.be.equal('set'); + // tslint:disable-next-line:custom-no-magic-numbers + expect(coverageEntries.fnMap[fnIds[0]].line).to.be.equal(6); + const setFunction = `function set(uint x) public { + /* solcov ignore next */ + storedData = x; + }`; + expect(utils.getRange(solcovIgnoreContract, coverageEntries.fnMap[fnIds[0]].loc)).to.be.equal(setFunction); + + expect(coverageEntries.branchMap).to.be.deep.equal({}); + const statementIds = _.keys(coverageEntries.statementMap); + expect(utils.getRange(solcovIgnoreContract, coverageEntries.statementMap[statementIds[0]])).to.be.equal( + setFunction, + ); + expect(statementIds.length).to.be.equal(1); + expect(coverageEntries.modifiersStatementIds.length).to.be.equal(0); + }); + }); +}); diff --git a/packages/sol-tracing-utils/test/fixtures/contracts/AllSolidityFeatures.sol b/packages/sol-tracing-utils/test/fixtures/contracts/AllSolidityFeatures.sol new file mode 100644 index 000000000..21137347e --- /dev/null +++ b/packages/sol-tracing-utils/test/fixtures/contracts/AllSolidityFeatures.sol @@ -0,0 +1,413 @@ +// Examples taken from the Solidity documentation online. + +// for pragma version numbers, see https://docs.npmjs.com/misc/semver#versions +pragma solidity 0.4.0; +pragma solidity ^0.4.0; + +import "SomeFile.sol"; +import "SomeFile.sol" as SomeOtherFile; +import * as SomeSymbol from "AnotherFile.sol"; +import {symbol1 as alias, symbol2} from "File.sol"; + +interface i { + function f(); +} + +contract c { + function c() + { + val1 = 1 wei; // 1 + val2 = 1 szabo; // 1 * 10 ** 12 + val3 = 1 finney; // 1 * 10 ** 15 + val4 = 1 ether; // 1 * 10 ** 18 + } + uint256 val1; + uint256 val2; + uint256 val3; + uint256 val4; +} + +contract test { + enum ActionChoices { GoLeft, GoRight, GoStraight, SitStill } + + function test() + { + choices = ActionChoices.GoStraight; + } + function getChoice() returns (uint d) + { + d = uint256(choices); + } + ActionChoices choices; +} + +contract Base { + function Base(uint i) + { + m_i = i; + } + uint public m_i; +} +contract Derived is Base(0) { + function Derived(uint i) Base(i) {} +} + +contract C { + uint248 x; // 31 bytes: slot 0, offset 0 + uint16 y; // 2 bytes: slot 1, offset 0 (does not fit in slot 0) + uint240 z; // 30 bytes: slot 1, offset 2 bytes + uint8 a; // 1 byte: slot 2, offset 0 bytes + struct S { + uint8 a; // 1 byte, slot +0, offset 0 bytes + uint256 b; // 32 bytes, slot +1, offset 0 bytes (does not fit) + } + S structData; // 2 slots, slot 3, offset 0 bytes (does not really apply) + uint8 alpha; // 1 byte, slot 4 (start new slot after struct) + uint16[3] beta; // 3*16 bytes, slots 5+6 (start new slot for array) + uint8 gamma; // 1 byte, slot 7 (start new slot after array) +} + +contract test { + function f(uint x, uint y) returns (uint z) { + var c = x + 3; + var b = 7 + (c * (8 - 7)) - x; + return -(-b | 0); + } +} + +contract test { + function f(uint x, uint y) returns (uint z) { + return 10; + } +} + +contract c { + function () returns (uint) { return g(8); } + function g(uint pos) internal returns (uint) { setData(pos, 8); return getData(pos); } + function setData(uint pos, uint value) internal { data[pos] = value; } + function getData(uint pos) internal { return data[pos]; } + mapping(uint => uint) data; +} + +contract Sharer { + function sendHalf(address addr) returns (uint balance) { + if (!addr.send(msg.value/2)) + throw; // also reverts the transfer to Sharer + return address(this).balance; + } +} + +/// @dev Models a modifiable and iterable set of uint values. +library IntegerSet +{ + struct data + { + /// Mapping item => index (or zero if not present) + mapping(uint => uint) index; + /// Items by index (index 0 is invalid), items with index[item] == 0 are invalid. + uint[] items; + /// Number of stored items. + uint size; + } + function insert(data storage self, uint value) returns (bool alreadyPresent) + { + uint index = self.index[value]; + if (index > 0) + return true; + else + { + if (self.items.length == 0) self.items.length = 1; + index = self.items.length++; + self.items[index] = value; + self.index[value] = index; + self.size++; + return false; + } + } + function remove(data storage self, uint value) returns (bool success) + { + uint index = self.index[value]; + if (index == 0) + return false; + delete self.index[value]; + delete self.items[index]; + self.size --; + } + function contains(data storage self, uint value) returns (bool) + { + return self.index[value] > 0; + } + function iterate_start(data storage self) returns (uint index) + { + return iterate_advance(self, 0); + } + function iterate_valid(data storage self, uint index) returns (bool) + { + return index < self.items.length; + } + function iterate_advance(data storage self, uint index) returns (uint r_index) + { + index++; + while (iterate_valid(self, index) && self.index[self.items[index]] == index) + index++; + return index; + } + function iterate_get(data storage self, uint index) returns (uint value) + { + return self.items[index]; + } +} + +/// How to use it: +contract User +{ + /// Just a struct holding our data. + IntegerSet.data data; + /// Insert something + function insert(uint v) returns (uint size) + { + /// Sends `data` via reference, so IntegerSet can modify it. + IntegerSet.insert(data, v); + /// We can access members of the struct - but we should take care not to mess with them. + return data.size; + } + /// Computes the sum of all stored data. + function sum() returns (uint s) + { + for (var i = IntegerSet.iterate_start(data); IntegerSet.iterate_valid(data, i); i = IntegerSet.iterate_advance(data, i)) + s += IntegerSet.iterate_get(data, i); + } +} + +// This broke it at one point (namely the modifiers). +contract DualIndex { + mapping(uint => mapping(uint => uint)) data; + address public admin; + + modifier restricted { if (msg.sender == admin) _; } + + function DualIndex() { + admin = msg.sender; + } + + function set(uint key1, uint key2, uint value) restricted { + uint[2][4] memory defaults; // "memory" broke things at one time. + data[key1][key2] = value; + } + + function transfer_ownership(address _admin) restricted { + admin = _admin; + } + + function lookup(uint key1, uint key2) returns(uint) { + return data[key1][key2]; + } +} + +contract A { + +} + +contract B { + +} + +contract C is A, B { + +} + +contract TestPrivate +{ + uint private value; +} + +contract TestInternal +{ + uint internal value; +} + +contract FromSolparse is A, B, TestPrivate, TestInternal { + function() { + uint a = 6 ** 9; + var (x) = 100; + uint y = 2 days; + } +} + +contract CommentedOutFunction { + // FYI: This empty function, as well as the commented + // out function below (bad code) is important to this test. + function() { + + } + + // function something() + // uint x = 10; + // } +} + +library VarHasBrackets { + string constant specialRight = "}"; + //string storage specialLeft = "{"; +} + +library UsingExampleLibrary { + function sum(uint[] storage self) returns (uint s) { + for (uint i = 0; i < self.length; i++) + s += self[i]; + } +} + +contract UsingExampleContract { + using UsingExampleLibrary for uint[]; +} + +contract NewStuff { + uint[] b; + + function someFunction() payable { + string storage a = hex"ab1248fe"; + b[2+2]; + } +} + +// modifier with expression +contract MyContract { + function fun() mymodifier(foo.bar()) {} +} + +library GetCode { + function at(address _addr) returns (bytes o_code) { + assembly { + // retrieve the size of the code, this needs assembly + let size := extcodesize(_addr) + // allocate output byte array - this could also be done without assembly + // by using o_code = new bytes(size) + o_code := mload(0x40) + // new "memory end" including padding + mstore(0x40, add(o_code, and(add(add(size, 0x20), 0x1f), not(0x1f)))) + // store length in memory + mstore(o_code, size) + // actually retrieve the code, this needs assembly + extcodecopy(_addr, add(o_code, 0x20), 0, size) + } + } +} + +contract assemblyLocalBinding { + function test(){ + assembly { + let v := 1 + let x := 0x00 + let y := x + let z := "hello" + } + } +} + +contract assemblyReturn { + uint a = 10; + + function get() constant returns(uint) { + assembly { + mstore(0x40, sload(0)) + byte(0) + address(0) + return(0x40,32) + } + } +} + +contract usesConst { + uint const = 0; +} + +contract memoryArrays { + uint seven = 7; + + function returnNumber(uint number) returns (uint){ + return number; + } + + function alloc() { + uint[] memory a = new uint[](7); + uint[] memory b = new uint[](returnNumber(seven)); + } +} + +contract DeclarativeExpressions { + uint a; + uint b = 7; + uint b2=0; + uint public c; + uint constant public d; + uint public constant e; + uint private constant f = 7; + struct S { uint q;} + + function ham(S storage s1, uint[] storage arr) internal { + uint x; + uint y = 7; + S storage s2 = s1; + uint[] memory stor; + uint[] storage stor2 = arr; + } +} + +contract VariableDeclarationTuple { + function getMyTuple() returns (bool, bool){ + return (true, false); + } + + function ham (){ + var (x, y) = (10, 20); + var (a, b) = getMyTuple(); + var (,c) = (10, 20); + var (d,,) = (10, 20, 30); + var (,e,,f,) = (10, 20, 30, 40, 50); + + var ( + num1, num2, + num3, ,num5 + ) = (10, 20, 30, 40, 50); + } +} + +contract TypeIndexSpacing { + uint [ 7 ] x; + uint [] y; +} + +contract Ballot { + + struct Voter { + uint weight; + bool voted; + } + + function abstain() returns (bool) { + return false; + } + + function foobar() payable owner (myPrice) returns (uint[], address myAdd, string[] names) {} + function foobar() payable owner (myPrice) returns (uint[], address myAdd, string[] names); + + Voter you = Voter(1, true); + + Voter me = Voter({ + weight: 2, + voted: abstain() + }); + + Voter airbnb = Voter({ + weight: 2, + voted: true, + }); +} + +contract multilineReturn { + function a() returns (uint x) { + return + 5; + } +} diff --git a/packages/sol-tracing-utils/test/fixtures/contracts/SimpleStorage.sol b/packages/sol-tracing-utils/test/fixtures/contracts/SimpleStorage.sol new file mode 100644 index 000000000..e4b4ac246 --- /dev/null +++ b/packages/sol-tracing-utils/test/fixtures/contracts/SimpleStorage.sol @@ -0,0 +1,11 @@ +pragma solidity ^0.4.21; + +contract SimpleStorage { + uint public storedData; + function set(uint x) { + storedData = x; + } + function get() constant returns (uint retVal) { + return storedData; + } +} diff --git a/packages/sol-tracing-utils/test/fixtures/contracts/Simplest.sol b/packages/sol-tracing-utils/test/fixtures/contracts/Simplest.sol new file mode 100644 index 000000000..d71016e07 --- /dev/null +++ b/packages/sol-tracing-utils/test/fixtures/contracts/Simplest.sol @@ -0,0 +1,2 @@ +contract Simplest { +} diff --git a/packages/sol-tracing-utils/test/fixtures/contracts/SolcovIgnore.sol b/packages/sol-tracing-utils/test/fixtures/contracts/SolcovIgnore.sol new file mode 100644 index 000000000..a7977ffb4 --- /dev/null +++ b/packages/sol-tracing-utils/test/fixtures/contracts/SolcovIgnore.sol @@ -0,0 +1,22 @@ +pragma solidity ^0.4.21; + +contract SolcovIgnore { + uint public storedData; + + function set(uint x) public { + /* solcov ignore next */ + storedData = x; + } + + /* solcov ignore next */ + function get() constant public returns (uint retVal) { + return storedData; + } +} + +/* solcov ignore next */ +contract Ignore { + function ignored() public returns (bool) { + return false; + } +} diff --git a/packages/sol-tracing-utils/test/instructions_test.ts b/packages/sol-tracing-utils/test/instructions_test.ts new file mode 100644 index 000000000..058053cf9 --- /dev/null +++ b/packages/sol-tracing-utils/test/instructions_test.ts @@ -0,0 +1,19 @@ +import * as chai from 'chai'; +import 'mocha'; + +import { constants } from '../src/constants'; +import { getPcToInstructionIndexMapping } from '../src/instructions'; + +const expect = chai.expect; + +describe('instructions', () => { + describe('#getPcToInstructionIndexMapping', () => { + it('correctly maps pcs to instruction indexed', () => { + // tslint:disable-next-line:custom-no-magic-numbers + const bytecode = new Uint8Array([constants.PUSH1, 42, constants.PUSH2, 1, 2, constants.TIMESTAMP]); + const pcToInstruction = getPcToInstructionIndexMapping(bytecode); + const expectedPcToInstruction = { '0': 0, '2': 1, '5': 2 }; + expect(pcToInstruction).to.be.deep.equal(expectedPcToInstruction); + }); + }); +}); diff --git a/packages/sol-tracing-utils/test/sol_compiler_artifact_adapter_test.ts b/packages/sol-tracing-utils/test/sol_compiler_artifact_adapter_test.ts new file mode 100644 index 000000000..9c58d2cef --- /dev/null +++ b/packages/sol-tracing-utils/test/sol_compiler_artifact_adapter_test.ts @@ -0,0 +1,29 @@ +import * as chai from 'chai'; +import * as _ from 'lodash'; +import 'mocha'; +import * as path from 'path'; + +import { SolCompilerArtifactAdapter } from '../src/artifact_adapters/sol_compiler_artifact_adapter'; + +const expect = chai.expect; + +describe('SolCompilerArtifactAdapter', () => { + describe('#collectContractsData', () => { + it('correctly collects contracts data', async () => { + const artifactsPath = path.resolve(__dirname, 'fixtures/artifacts'); + const sourcesPath = path.resolve(__dirname, 'fixtures/contracts'); + const zeroExArtifactsAdapter = new SolCompilerArtifactAdapter(artifactsPath, sourcesPath); + const contractsData = await zeroExArtifactsAdapter.collectContractsDataAsync(); + _.forEach(contractsData, contractData => { + expect(contractData).to.have.keys([ + 'sourceCodes', + 'sources', + 'sourceMap', + 'sourceMapRuntime', + 'bytecode', + 'runtimeBytecode', + ]); + }); + }); + }); +}); diff --git a/packages/sol-tracing-utils/test/source_maps_test.ts b/packages/sol-tracing-utils/test/source_maps_test.ts new file mode 100644 index 000000000..5820bedd7 --- /dev/null +++ b/packages/sol-tracing-utils/test/source_maps_test.ts @@ -0,0 +1,71 @@ +import * as chai from 'chai'; +import * as fs from 'fs'; +import * as _ from 'lodash'; +import 'mocha'; +import * as path from 'path'; + +import { getLocationByOffset, parseSourceMap } from '../src/source_maps'; + +const expect = chai.expect; + +const simplestContractBaseName = 'Simplest.sol'; +const simplestContractFileName = path.resolve(__dirname, 'fixtures/contracts', simplestContractBaseName); +const simplestContract = fs.readFileSync(simplestContractFileName).toString(); + +describe('source maps', () => { + describe('#getLocationByOffset', () => { + it('correctly computes location by offset', () => { + const locationByOffset = getLocationByOffset(simplestContract); + const expectedLocationByOffset = { + '0': { line: 1, column: 0 }, + '1': { line: 1, column: 1 }, + '2': { line: 1, column: 2 }, + '3': { line: 1, column: 3 }, + '4': { line: 1, column: 4 }, + '5': { line: 1, column: 5 }, + '6': { line: 1, column: 6 }, + '7': { line: 1, column: 7 }, + '8': { line: 1, column: 8 }, + '9': { line: 1, column: 9 }, + '10': { line: 1, column: 10 }, + '11': { line: 1, column: 11 }, + '12': { line: 1, column: 12 }, + '13': { line: 1, column: 13 }, + '14': { line: 1, column: 14 }, + '15': { line: 1, column: 15 }, + '16': { line: 1, column: 16 }, + '17': { line: 1, column: 17 }, + '18': { line: 1, column: 18 }, + '19': { line: 1, column: 19 }, + '20': { line: 2, column: 0 }, + '21': { line: 2, column: 1 }, + '22': { line: 3, column: 0 }, + }; + expect(locationByOffset).to.be.deep.equal(expectedLocationByOffset); + }); + }); + describe('#parseSourceMap', () => { + it('correctly parses the source map', () => { + // This is the source map and bytecode for an empty contract like Example.sol + const srcMap = '0:21:0:-;;;;;;;;;;;;;;;;;'; + const bytecodeHex = + '60606040523415600e57600080fd5b603580601b6000396000f3006060604052600080fd00a165627a7a72305820377cdef690e46589f40efeef14d8ef73504af059fb3fd46f1da3cd2fc52ef7890029'; + const sources = [simplestContractBaseName]; + const pcToSourceRange = parseSourceMap([simplestContract], srcMap, bytecodeHex, sources); + const expectedSourceRange = { + location: { + start: { line: 1, column: 0 }, + end: { line: 2, column: 1 }, + }, + fileName: simplestContractBaseName, + }; + _.forEach(pcToSourceRange, sourceRange => { + // Solidity source maps are too short and we map some instructions to undefined + // Source: https://github.com/ethereum/solidity/issues/3741 + if (!_.isUndefined(sourceRange)) { + expect(sourceRange).to.be.deep.equal(expectedSourceRange); + } + }); + }); + }); +}); diff --git a/packages/sol-tracing-utils/test/trace_test.ts b/packages/sol-tracing-utils/test/trace_test.ts new file mode 100644 index 000000000..7a034362c --- /dev/null +++ b/packages/sol-tracing-utils/test/trace_test.ts @@ -0,0 +1,55 @@ +import * as chai from 'chai'; +import { OpCode, StructLog } from 'ethereum-types'; +import * as _ from 'lodash'; +import 'mocha'; + +import { getTracesByContractAddress } from '../src/trace'; + +const expect = chai.expect; + +const DEFAULT_STRUCT_LOG: StructLog = { + depth: 0, + error: '', + gas: 0, + gasCost: 0, + memory: [], + op: OpCode.Invalid, + pc: 0, + stack: [], + storage: {}, +}; + +function addDefaultStructLogFields(compactStructLog: Partial<StructLog> & { op: OpCode; depth: number }): StructLog { + return { ...DEFAULT_STRUCT_LOG, ...compactStructLog }; +} + +describe('Trace', () => { + describe('#getTracesByContractAddress', () => { + it('correctly splits trace by contract address', () => { + const delegateCallAddress = '0x0000000000000000000000000000000000000002'; + const trace = [ + { + op: OpCode.DelegateCall, + stack: [delegateCallAddress, '0x'], + depth: 0, + }, + { + op: OpCode.Return, + depth: 1, + }, + { + op: OpCode.Return, + depth: 0, + }, + ]; + const fullTrace = _.map(trace, compactStructLog => addDefaultStructLogFields(compactStructLog)); + const startAddress = '0x0000000000000000000000000000000000000001'; + const traceByContractAddress = getTracesByContractAddress(fullTrace, startAddress); + const expectedTraceByContractAddress = { + [startAddress]: [fullTrace[0], fullTrace[2]], + [delegateCallAddress]: [fullTrace[1]], + }; + expect(traceByContractAddress).to.be.deep.equal(expectedTraceByContractAddress); + }); + }); +}); diff --git a/packages/sol-tracing-utils/test/utils_test.ts b/packages/sol-tracing-utils/test/utils_test.ts new file mode 100644 index 000000000..6fc8fcfe1 --- /dev/null +++ b/packages/sol-tracing-utils/test/utils_test.ts @@ -0,0 +1,53 @@ +import * as chai from 'chai'; +import * as dirtyChai from 'dirty-chai'; +import 'mocha'; + +import { utils } from '../src/utils'; + +chai.use(dirtyChai); +const expect = chai.expect; + +describe('utils', () => { + describe('#compareLineColumn', () => { + it('correctly compares LineColumns', () => { + expect(utils.compareLineColumn({ line: 1, column: 3 }, { line: 1, column: 4 })).to.be.lessThan(0); + expect(utils.compareLineColumn({ line: 1, column: 4 }, { line: 1, column: 3 })).to.be.greaterThan(0); + expect(utils.compareLineColumn({ line: 1, column: 3 }, { line: 1, column: 3 })).to.be.equal(0); + expect(utils.compareLineColumn({ line: 0, column: 2 }, { line: 1, column: 0 })).to.be.lessThan(0); + expect(utils.compareLineColumn({ line: 1, column: 0 }, { line: 0, column: 2 })).to.be.greaterThan(0); + }); + }); + + describe('#isRangeInside', () => { + it('returns true if inside', () => { + expect( + utils.isRangeInside( + { start: { line: 1, column: 3 }, end: { line: 1, column: 4 } }, + { start: { line: 1, column: 2 }, end: { line: 1, column: 5 } }, + ), + ).to.be.true(); + }); + it('returns true if the same', () => { + expect( + utils.isRangeInside( + { start: { line: 1, column: 3 }, end: { line: 1, column: 4 } }, + { start: { line: 1, column: 3 }, end: { line: 1, column: 4 } }, + ), + ).to.be.true(); + }); + it('returns false if not inside', () => { + expect( + utils.isRangeInside( + { start: { line: 1, column: 3 }, end: { line: 1, column: 4 } }, + { start: { line: 1, column: 4 }, end: { line: 1, column: 4 } }, + ), + ).to.be.false(); + expect( + utils.isRangeInside( + { start: { line: 1, column: 3 }, end: { line: 1, column: 4 } }, + { start: { line: 1, column: 4 }, end: { line: 1, column: 5 } }, + ), + ).to.be.false(); + }); + }); +}); diff --git a/packages/sol-tracing-utils/tsconfig.json b/packages/sol-tracing-utils/tsconfig.json new file mode 100644 index 000000000..2ee711adc --- /dev/null +++ b/packages/sol-tracing-utils/tsconfig.json @@ -0,0 +1,8 @@ +{ + "extends": "../../tsconfig", + "compilerOptions": { + "outDir": "lib", + "rootDir": "." + }, + "include": ["./src/**/*", "./test/**/*"] +} diff --git a/packages/sol-tracing-utils/tslint.json b/packages/sol-tracing-utils/tslint.json new file mode 100644 index 000000000..dd9053357 --- /dev/null +++ b/packages/sol-tracing-utils/tslint.json @@ -0,0 +1,3 @@ +{ + "extends": ["@0x/tslint-config"] +} |