diff options
author | chriseth <chris@ethereum.org> | 2018-11-14 02:33:35 +0800 |
---|---|---|
committer | GitHub <noreply@github.com> | 2018-11-14 02:33:35 +0800 |
commit | 1d4f565a64988a3400847d2655ca24f73f234bc6 (patch) | |
tree | caaa6c26e307513505349b50ca4f2a8a9506752b /scripts | |
parent | 59dbf8f1085b8b92e8b7eb0ce380cbeb642e97eb (diff) | |
parent | 91b6b8a88e76016e0324036cb7a7f9300a1e2439 (diff) | |
download | dexon-solidity-1d4f565a64988a3400847d2655ca24f73f234bc6.tar dexon-solidity-1d4f565a64988a3400847d2655ca24f73f234bc6.tar.gz dexon-solidity-1d4f565a64988a3400847d2655ca24f73f234bc6.tar.bz2 dexon-solidity-1d4f565a64988a3400847d2655ca24f73f234bc6.tar.lz dexon-solidity-1d4f565a64988a3400847d2655ca24f73f234bc6.tar.xz dexon-solidity-1d4f565a64988a3400847d2655ca24f73f234bc6.tar.zst dexon-solidity-1d4f565a64988a3400847d2655ca24f73f234bc6.zip |
Merge pull request #5416 from ethereum/develop
Merge develop into release for 0.5.0
Diffstat (limited to 'scripts')
-rw-r--r-- | scripts/Dockerfile | 39 | ||||
-rw-r--r-- | scripts/bytecodecompare/storebytecode.bat | 1 | ||||
-rwxr-xr-x | scripts/bytecodecompare/storebytecode.sh | 20 | ||||
-rwxr-xr-x | scripts/check_style.sh | 29 | ||||
-rw-r--r-- | scripts/codespell_whitelist.txt | 4 | ||||
-rwxr-xr-x | scripts/create_source_tarball.sh | 1 | ||||
-rwxr-xr-x | scripts/docs.sh | 1 | ||||
-rwxr-xr-x | scripts/extract_test_cases.py | 2 | ||||
-rwxr-xr-x | scripts/install_deps.sh | 14 | ||||
-rwxr-xr-x | scripts/install_obsolete_jsoncpp_1_7_4.sh | 16 | ||||
-rwxr-xr-x | scripts/isolate_tests.py | 93 | ||||
-rwxr-xr-x | scripts/release_ppa.sh | 6 | ||||
-rwxr-xr-x | scripts/tests.sh | 125 | ||||
-rwxr-xr-x | scripts/update_bugs_by_version.py | 4 |
14 files changed, 244 insertions, 111 deletions
diff --git a/scripts/Dockerfile b/scripts/Dockerfile index 654a9f29..2b2de1e2 100644 --- a/scripts/Dockerfile +++ b/scripts/Dockerfile @@ -1,18 +1,39 @@ -FROM alpine +FROM alpine AS build MAINTAINER chriseth <chris@ethereum.org> #Official solidity docker image #Establish working directory as solidity WORKDIR /solidity + +# Build dependencies +ADD /scripts/install_deps.sh /solidity/scripts/install_deps.sh +RUN ./scripts/install_deps.sh + #Copy working directory on travis to the image COPY / $WORKDIR -#Install dependencies, eliminate annoying warnings, and build release, delete all remaining points and statically link. -RUN ./scripts/install_deps.sh && sed -i -E -e 's/include <sys\/poll.h>/include <poll.h>/' /usr/include/boost/asio/detail/socket_types.hpp &&\ -cmake -DCMAKE_BUILD_TYPE=Release -DTESTS=0 -DSOLC_LINK_STATIC=1 &&\ -make solc && install -s solc/solc /usr/bin &&\ -cd / && rm -rf solidity &&\ -apk del sed build-base git make cmake gcc g++ musl-dev curl-dev boost-dev &&\ -rm -rf /var/cache/apk/* +# Number of parallel jobs during build +# or 0 for auto-computing (max(1, CPU_core_count * 2/3), a greedy value) +ARG BUILD_CONCURRENCY="0" + +#Install dependencies, eliminate annoying warnings +RUN sed -i -E -e 's/include <sys\/poll.h>/include <poll.h>/' /usr/include/boost/asio/detail/socket_types.hpp +RUN cmake -DCMAKE_BUILD_TYPE=Release -DTESTS=0 -DSOLC_LINK_STATIC=1 +RUN make solc \ + -j$(awk "BEGIN { \ + if (${BUILD_CONCURRENCY} != 0) { \ + print(${BUILD_CONCURRENCY}); \ + } else { \ + x=($(grep -c ^processor /proc/cpuinfo) * 2/3); \ + if (x > 1) { \ + printf(\"%d\n\", x); \ + } else { \ + print(1); \ + } \ + } \ + }") +RUN strip solc/solc -ENTRYPOINT ["/usr/bin/solc"]
\ No newline at end of file +FROM scratch +COPY --from=build /solidity/solc/solc /usr/bin/solc +ENTRYPOINT ["/usr/bin/solc"] diff --git a/scripts/bytecodecompare/storebytecode.bat b/scripts/bytecodecompare/storebytecode.bat index e64e9276..ef20a320 100644 --- a/scripts/bytecodecompare/storebytecode.bat +++ b/scripts/bytecodecompare/storebytecode.bat @@ -39,4 +39,5 @@ set REPORT=%DIRECTORY%/windows.txt cp ../report.txt %REPORT% git add %REPORT% git commit -a -m "Added report." +git pull --rebase git push origin 2>&1 diff --git a/scripts/bytecodecompare/storebytecode.sh b/scripts/bytecodecompare/storebytecode.sh index 557e3275..ccf6e60e 100755 --- a/scripts/bytecodecompare/storebytecode.sh +++ b/scripts/bytecodecompare/storebytecode.sh @@ -40,14 +40,16 @@ TMPDIR=$(mktemp -d) if [[ "$SOLC_EMSCRIPTEN" = "On" ]] then - cp "$REPO_ROOT/build/libsolc/soljson.js" . - npm install solc + # npm install solc + git clone --depth 1 https://github.com/ethereum/solc-js.git solc-js + ( cd solc-js; npm install ) + cp "$REPO_ROOT/build/libsolc/soljson.js" solc-js/ cat > solc <<EOF #!/usr/bin/env node var process = require('process') var fs = require('fs') -var compiler = require('solc/wrapper.js')(require('./soljson.js')) +var compiler = require('./solc-js/wrapper.js')(require('./solc-js/soljson.js')) for (var optimize of [false, true]) { @@ -57,7 +59,15 @@ for (var optimize of [false, true]) { var inputs = {} inputs[filename] = fs.readFileSync(filename).toString() - var result = compiler.compile({sources: inputs}, optimize) + var input = { + language: 'Solidity', + sources: inputs, + settings: { + optimizer: { enabled: optimize }, + outputSelection: { '*': { '*': ['evm.bytecode.object', 'metadata'] } } + } + } + var result = JSON.parse(compiler.compile(JSON.stringify(input))) if (!('contracts' in result) || Object.keys(result['contracts']).length === 0) { console.log(filename + ': ERROR') @@ -66,7 +76,7 @@ for (var optimize of [false, true]) { for (var contractName in result['contracts']) { - console.log(contractName + ' ' + result['contracts'][contractName].bytecode) + console.log(contractName + ' ' + result['contracts'][contractName].evm.bytecode.object) console.log(contractName + ' ' + result['contracts'][contractName].metadata) } } diff --git a/scripts/check_style.sh b/scripts/check_style.sh new file mode 100755 index 00000000..4f716d66 --- /dev/null +++ b/scripts/check_style.sh @@ -0,0 +1,29 @@ +#!/usr/bin/env bash + +( +REPO_ROOT="$(dirname "$0")"/.. +cd $REPO_ROOT + +WHITESPACE=$(git grep -n -I -E "^.*[[:space:]]+$" | grep -v "test/libsolidity/ASTJSON\|test/compilationTests/zeppelin/LICENSE") + +if [[ "$WHITESPACE" != "" ]] +then + echo "Error: Trailing whitespace found:" >&2 + echo "$WHITESPACE" >&2 + exit 1 +fi + +FORMATERROR=$( +( +git grep -nIE "\<(if|for)\(" -- '*.h' '*.cpp' +git grep -nIE "\<if\>\s*\(.*\)\s*\{\s*$" -- '*.h' '*.cpp' +) | egrep -v "^[a-zA-Z\./]*:[0-9]*:\s*\/(\/|\*)" | egrep -v "^test/" +) + +if [[ "$FORMATERROR" != "" ]] +then + echo "Error: Format error for if/for:" >&2 + echo "$FORMATERROR" >&2 + exit 1 +fi +) diff --git a/scripts/codespell_whitelist.txt b/scripts/codespell_whitelist.txt new file mode 100644 index 00000000..0f5013cb --- /dev/null +++ b/scripts/codespell_whitelist.txt @@ -0,0 +1,4 @@ +iff +nd +assignend +uint diff --git a/scripts/create_source_tarball.sh b/scripts/create_source_tarball.sh index 4e930707..632c1daa 100755 --- a/scripts/create_source_tarball.sh +++ b/scripts/create_source_tarball.sh @@ -23,7 +23,6 @@ REPO_ROOT="$(dirname "$0")"/.. mkdir "$SOLDIR" # Store the current source git checkout-index -a --prefix="$SOLDIR" - git submodule foreach 'git checkout-index -a --prefix="'"$SOLDIR"'/$path/"' # Store the commit hash echo "$commithash" > "$SOLDIR/commit_hash.txt" if [ -e prerelease.txt -a ! -s prerelease.txt ] diff --git a/scripts/docs.sh b/scripts/docs.sh index 42400bc7..2c08a82b 100755 --- a/scripts/docs.sh +++ b/scripts/docs.sh @@ -28,5 +28,6 @@ set -e cd docs +pip install -r requirements.txt sphinx-build -nW -b html -d _build/doctrees . _build/html cd .. diff --git a/scripts/extract_test_cases.py b/scripts/extract_test_cases.py index 07ef9a96..47c53f3c 100755 --- a/scripts/extract_test_cases.py +++ b/scripts/extract_test_cases.py @@ -1,4 +1,4 @@ -#!/usr/bin/python +#!/usr/bin/env python2 # # This script reads C++ or RST source files and writes all # multi-line strings into individual files. diff --git a/scripts/install_deps.sh b/scripts/install_deps.sh index fa5551bf..b107f7c9 100755 --- a/scripts/install_deps.sh +++ b/scripts/install_deps.sh @@ -87,9 +87,12 @@ case $(uname -s) in 10.13) echo "Installing solidity dependencies on macOS 10.13 High Sierra." ;; + 10.14) + echo "Installing solidity dependencies on macOS 10.14 Mojave." + ;; *) echo "Unsupported macOS version." - echo "We only support Mavericks, Yosemite, El Capitan, Sierra and High Sierra." + echo "We only support Mavericks, Yosemite, El Capitan, Sierra, High Sierra and Mojave." exit 1 ;; esac @@ -133,19 +136,18 @@ case $(uname -s) in # Arch Linux #------------------------------------------------------------------------------ - Arch) + Arch*|ManjaroLinux) #Arch echo "Installing solidity dependencies on Arch Linux." # All our dependencies can be found in the Arch Linux official repositories. # See https://wiki.archlinux.org/index.php/Official_repositories - # Also adding ethereum-git to allow for testing with the `eth` client sudo pacman -Syu \ base-devel \ boost \ cmake \ git \ - ethereum-git \ + cvc4 ;; #------------------------------------------------------------------------------ @@ -160,7 +162,7 @@ case $(uname -s) in # See https://pkgs.alpinelinux.org/ apk update - apk add boost-dev build-base cmake + apk add boost-dev build-base cmake git ;; @@ -329,7 +331,7 @@ case $(uname -s) in "$install_z3" if [ "$CI" = true ]; then # install Z3 from PPA if the distribution does not provide it - if ! dpkg -l libz3-dev > /dev/null 2>&1 + if ! dpkg -l libz3-dev > /dev/null 2>&1 then sudo apt-add-repository -y ppa:hvr/z3 sudo apt-get -y update diff --git a/scripts/install_obsolete_jsoncpp_1_7_4.sh b/scripts/install_obsolete_jsoncpp_1_7_4.sh new file mode 100755 index 00000000..0ae7b34c --- /dev/null +++ b/scripts/install_obsolete_jsoncpp_1_7_4.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh +set -e + +TEMPDIR=$(mktemp -d) +( + cd $TEMPDIR + wget https://github.com/open-source-parsers/jsoncpp/archive/1.7.4.tar.gz + tar xvzf "1.7.4.tar.gz" + cd "jsoncpp-1.7.4" + mkdir -p build + cd build + cmake -DARCHIVE_INSTALL_DIR=. -G "Unix Makefiles" .. + make + make install +) +rm -rf $TEMPDIR diff --git a/scripts/isolate_tests.py b/scripts/isolate_tests.py index 5bf577d3..8a9aa0a7 100755 --- a/scripts/isolate_tests.py +++ b/scripts/isolate_tests.py @@ -1,4 +1,4 @@ -#!/usr/bin/python +#!/usr/bin/env python2 # # This script reads C++ or RST source files and writes all # multi-line strings into individual files. @@ -10,7 +10,7 @@ import sys import re import os import hashlib -from os.path import join +from os.path import join, isfile def extract_test_cases(path): lines = open(path, 'rb').read().splitlines() @@ -35,47 +35,42 @@ def extract_test_cases(path): return tests # Contract sources are indented by 4 spaces. -# Look for `pragma solidity` and abort a line not indented properly. -# If the comment `// This will not compile` is above the pragma, -# the code is skipped. +# Look for `pragma solidity`, `contract`, `library` or `interface` +# and abort a line not indented properly. def extract_docs_cases(path): - # Note: this code works, because splitlines() removes empty new lines - # and thus even if the empty new lines are missing indentation - lines = open(path, 'rb').read().splitlines() - - ignore = False inside = False tests = [] - for l in lines: - if inside: - # Abort if indentation is missing - m = re.search(r'^[^ ]+', l) - if m: - inside = False - else: - tests[-1] += l + '\n' - else: - m = re.search(r'^ // This will not compile', l) - if m: - ignore = True + # Collect all snippets of indented blocks + for l in open(path, 'rb').read().splitlines(): + if l != '': + if not inside and l.startswith(' '): + # start new test + tests += [''] + inside = l.startswith(' ') + if inside: + tests[-1] += l + '\n' + # Filter all tests that do not contain Solidity + return [ + test for test in tests + if re.search(r'^ [ ]*(pragma solidity|contract |library |interface )', test, re.MULTILINE) + ] - if ignore: - # Abort if indentation is missing - m = re.search(r'^[^ ]+', l) - if m: - ignore = False - else: - m = re.search(r'^ pragma solidity .*[0-9]+\.[0-9]+\.[0-9]+;$', l) - if m: - inside = True - tests += [l] +def write_cases(f, tests): + cleaned_filename = f.replace(".","_").replace("-","_").replace(" ","_").lower() + for test in tests: + open('test_%s_%s.sol' % (hashlib.sha256(test).hexdigest(), cleaned_filename), 'wb').write(test) - return tests -def write_cases(tests): - for test in tests: - open('test_%s.sol' % hashlib.sha256(test).hexdigest(), 'wb').write(test) +def extract_and_write(f, path): + if docs: + cases = extract_docs_cases(path) + else: + if f.endswith('.sol'): + cases = [open(path, 'r').read()] + else: + cases = extract_test_cases(path) + write_cases(f, cases) if __name__ == '__main__': path = sys.argv[1] @@ -83,18 +78,14 @@ if __name__ == '__main__': if len(sys.argv) > 2 and sys.argv[2] == 'docs': docs = True - for root, subdirs, files in os.walk(path): - if '_build' in subdirs: - subdirs.remove('_build') - if 'compilationTests' in subdirs: - subdirs.remove('compilationTests') - for f in files: - path = join(root, f) - if docs: - cases = extract_docs_cases(path) - else: - if f.endswith(".sol"): - cases = [open(path, "r").read()] - else: - cases = extract_test_cases(path) - write_cases(cases) + if isfile(path): + extract_and_write(path, path) + else: + for root, subdirs, files in os.walk(path): + if '_build' in subdirs: + subdirs.remove('_build') + if 'compilationTests' in subdirs: + subdirs.remove('compilationTests') + for f in files: + path = join(root, f) + extract_and_write(f, path) diff --git a/scripts/release_ppa.sh b/scripts/release_ppa.sh index b1601336..36a8ef7f 100755 --- a/scripts/release_ppa.sh +++ b/scripts/release_ppa.sh @@ -22,7 +22,7 @@ ## method = ftp ## incoming = ~ethereum/ethereum-dev ## login = anonymous -## +## ## [ethereum] ## fqdn = ppa.launchpad.net ## method = ftp @@ -50,11 +50,11 @@ else ppafilesurl=https://launchpad.net/~ethereum/+archive/ubuntu/ethereum/+files fi -keyid=703F83D0 +keyid=70D110489D66E2F6 email=builds@ethereum.org packagename=solc -for distribution in trusty vivid xenial zesty artful bionic +for distribution in trusty xenial bionic cosmic do cd /tmp/ rm -rf $distribution diff --git a/scripts/tests.sh b/scripts/tests.sh index d63c1fe4..c284c05c 100755 --- a/scripts/tests.sh +++ b/scripts/tests.sh @@ -30,7 +30,11 @@ set -e REPO_ROOT="$(dirname "$0")"/.. +WORKDIR=`mktemp -d` IPC_ENABLED=true +ALETH_PID= +CMDLINE_PID= + if [[ "$OSTYPE" == "darwin"* ]] then SMT_FLAGS="--no-smt" @@ -41,6 +45,49 @@ then fi fi +safe_kill() { + local PID=${1} + local NAME=${2:-${1}} + local n=1 + + # only proceed if $PID does exist + kill -0 $PID 2>/dev/null || return + + echo "Sending SIGTERM to ${NAME} (${PID}) ..." + kill $PID + + # wait until process terminated gracefully + while kill -0 $PID 2>/dev/null && [[ $n -le 4 ]]; do + echo "Waiting ($n) ..." + sleep 1 + n=$[n + 1] + done + + # process still alive? then hard-kill + if kill -0 $PID 2>/dev/null; then + echo "Sending SIGKILL to ${NAME} (${PID}) ..." + kill -9 $PID + fi +} + +cleanup() { + # ensure failing commands don't cause termination during cleanup (especially within safe_kill) + set +e + + if [[ "$IPC_ENABLED" = true ]] && [[ -n "${ALETH_PID}" ]] + then + safe_kill $ALETH_PID $ALETH_PATH + fi + if [[ -n "$CMDLINE_PID" ]] + then + safe_kill $CMDLINE_PID "Commandline tests" + fi + + echo "Cleaning up working directory ${WORKDIR} ..." + rm -rf "$WORKDIR" || true +} +trap cleanup INT TERM + if [ "$1" = --junit_report ] then if [ -z "$2" ] @@ -53,66 +100,82 @@ else log_directory="" fi -function printError() { echo "$(tput setaf 1)$1$(tput sgr0)"; } -function printTask() { echo "$(tput bold)$(tput setaf 2)$1$(tput sgr0)"; } - +if [ "$CIRCLECI" ] +then + function printTask() { echo "$(tput bold)$(tput setaf 2)$1$(tput setaf 7)"; } + function printError() { echo "$(tput setaf 1)$1$(tput setaf 7)"; } +else + function printTask() { echo "$(tput bold)$(tput setaf 2)$1$(tput sgr0)"; } + function printError() { echo "$(tput setaf 1)$1$(tput sgr0)"; } +fi printTask "Running commandline tests..." -"$REPO_ROOT/test/cmdlineTests.sh" & -CMDLINE_PID=$! # Only run in parallel if this is run on CI infrastructure -if [ -z "$CI" ] +if [[ -n "$CI" ]] then - if ! wait $CMDLINE_PID + "$REPO_ROOT/test/cmdlineTests.sh" & + CMDLINE_PID=$! +else + if ! $REPO_ROOT/test/cmdlineTests.sh then printError "Commandline tests FAILED" exit 1 fi fi -function download_eth() +function download_aleth() { if [[ "$OSTYPE" == "darwin"* ]]; then - ETH_PATH="$REPO_ROOT/eth" + ALETH_PATH="$REPO_ROOT/aleth" elif [ -z $CI ]; then - ETH_PATH="eth" + ALETH_PATH="aleth" else + # Any time the hash is updated here, the "Running compiler tests" section should also be updated. mkdir -p /tmp/test if grep -i trusty /etc/lsb-release >/dev/null 2>&1 then - # built from 5ac09111bd0b6518365fe956e1bdb97a2db82af1 at 2018-04-05 - ETH_BINARY=eth_2018-04-05_trusty - ETH_HASH="1e5e178b005e5b51f9d347df4452875ba9b53cc6" + # built from d661ac4fec0aeffbedcdc195f67f5ded0c798278 at 2018-06-20 + ALETH_BINARY=aleth_2018-06-20_trusty + ALETH_HASH="54b8a5455e45b295e3a962f353ff8f1580ed106c" else - # built from 5ac09111bd0b6518365fe956e1bdb97a2db82af1 at 2018-04-05 - ETH_BINARY=eth_2018-04-05_artful - ETH_HASH="eb2d0df022753bb2b442ba73e565a9babf6828d6" + # built from d661ac4fec0aeffbedcdc195f67f5ded0c798278 at 2018-06-20 + ALETH_BINARY=aleth_2018-06-20_artful + ALETH_HASH="02e6c4b3d98299885e73f7db6c9e3fbe3d66d444" fi - wget -q -O /tmp/test/eth https://github.com/ethereum/cpp-ethereum/releases/download/solidityTester/$ETH_BINARY - test "$(shasum /tmp/test/eth)" = "$ETH_HASH /tmp/test/eth" + ALETH_PATH="/tmp/test/aleth" + wget -q -O $ALETH_PATH https://github.com/ethereum/cpp-ethereum/releases/download/solidityTester/$ALETH_BINARY + test "$(shasum $ALETH_PATH)" = "$ALETH_HASH $ALETH_PATH" sync - chmod +x /tmp/test/eth + chmod +x $ALETH_PATH sync # Otherwise we might get a "text file busy" error - ETH_PATH="/tmp/test/eth" fi } # $1: data directory # echos the PID -function run_eth() +function run_aleth() { - $ETH_PATH --test -d "$1" >/dev/null 2>&1 & + $ALETH_PATH --test -d "${WORKDIR}" >/dev/null 2>&1 & echo $! # Wait until the IPC endpoint is available. - while [ ! -S "$1"/geth.ipc ] ; do sleep 1; done + while [ ! -S "${WORKDIR}/geth.ipc" ] ; do sleep 1; done sleep 2 } +function check_aleth() { + printTask "Running IPC tests with $ALETH_PATH..." + if ! hash $ALETH_PATH 2>/dev/null; then + printError "$ALETH_PATH not found" + exit 1 + fi +} + if [ "$IPC_ENABLED" = true ]; then - download_eth - ETH_PID=$(run_eth /tmp/test) + download_aleth + check_aleth + ALETH_PID=$(run_aleth) fi progress="--show-progress" @@ -145,19 +208,15 @@ do log=--logger=JUNIT,test_suite,$log_directory/noopt_$vm.xml $testargs_no_opt fi fi - "$REPO_ROOT"/build/test/soltest $progress $log -- --testpath "$REPO_ROOT"/test "$optimize" --evm-version "$vm" $SMT_FLAGS $IPC_FLAGS --ipcpath /tmp/test/geth.ipc + "$REPO_ROOT"/build/test/soltest $progress $log -- --testpath "$REPO_ROOT"/test "$optimize" --evm-version "$vm" $SMT_FLAGS $IPC_FLAGS --ipcpath "${WORKDIR}/geth.ipc" done done -if ! wait $CMDLINE_PID +if [[ -n $CMDLINE_PID ]] && ! wait $CMDLINE_PID then printError "Commandline tests FAILED" + CMDLINE_PID= exit 1 fi -if [ "$IPC_ENABLED" = true ] -then - pkill "$ETH_PID" || true - sleep 4 - pgrep "$ETH_PID" && pkill -9 "$ETH_PID" || true -fi +cleanup diff --git a/scripts/update_bugs_by_version.py b/scripts/update_bugs_by_version.py index cbedf1a5..655ffe23 100755 --- a/scripts/update_bugs_by_version.py +++ b/scripts/update_bugs_by_version.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python2 # # This script is used to generate the list of bugs per compiler version # from the list of bugs. @@ -35,7 +35,7 @@ for v in versions: continue versions[v]['bugs'] += [bug['name']] -new_contents = json.dumps(versions, sort_keys=True, indent=4) +new_contents = json.dumps(versions, sort_keys=True, indent=4, separators=(',', ': ')) with open(path + '/../docs/bugs_by_version.json', 'r') as bugs_by_version: old_contents = bugs_by_version.read() with open(path + '/../docs/bugs_by_version.json', 'w') as bugs_by_version: |