aboutsummaryrefslogtreecommitdiffstats
path: root/eth
diff options
context:
space:
mode:
authorPéter Szilágyi <peterke@gmail.com>2016-12-14 17:15:21 +0800
committerGitHub <noreply@github.com>2016-12-14 17:15:21 +0800
commitfdb8edf5ea0cc6e8e9b6f3e64c8c4c7f5324eea3 (patch)
treed0d743ca77665e16bcb63b27e0361ce210ded0be /eth
parent157a4bd9266bb3ae7c4617a4a7159fd309b2f7eb (diff)
parent9ba9fe818d252ee9770371f0ccad68e3d09fbf5c (diff)
downloaddexon-fdb8edf5ea0cc6e8e9b6f3e64c8c4c7f5324eea3.tar
dexon-fdb8edf5ea0cc6e8e9b6f3e64c8c4c7f5324eea3.tar.gz
dexon-fdb8edf5ea0cc6e8e9b6f3e64c8c4c7f5324eea3.tar.bz2
dexon-fdb8edf5ea0cc6e8e9b6f3e64c8c4c7f5324eea3.tar.lz
dexon-fdb8edf5ea0cc6e8e9b6f3e64c8c4c7f5324eea3.tar.xz
dexon-fdb8edf5ea0cc6e8e9b6f3e64c8c4c7f5324eea3.tar.zst
dexon-fdb8edf5ea0cc6e8e9b6f3e64c8c4c7f5324eea3.zip
Merge pull request #3427 from Arachnid/gzipdump
cmd/utils, eth: Add gzip support for chain dump and restore
Diffstat (limited to 'eth')
-rw-r--r--eth/api.go19
1 files changed, 17 insertions, 2 deletions
diff --git a/eth/api.go b/eth/api.go
index a86ed95cf..0a1d097e3 100644
--- a/eth/api.go
+++ b/eth/api.go
@@ -18,6 +18,7 @@ package eth
import (
"bytes"
+ "compress/gzip"
"errors"
"fmt"
"io"
@@ -25,6 +26,7 @@ import (
"math/big"
"os"
"runtime"
+ "strings"
"time"
"github.com/ethereum/ethash"
@@ -217,8 +219,14 @@ func (api *PrivateAdminAPI) ExportChain(file string) (bool, error) {
}
defer out.Close()
+ var writer io.Writer = out
+ if strings.HasSuffix(file, ".gz") {
+ writer = gzip.NewWriter(writer)
+ defer writer.(*gzip.Writer).Close()
+ }
+
// Export the blockchain
- if err := api.eth.BlockChain().Export(out); err != nil {
+ if err := api.eth.BlockChain().Export(writer); err != nil {
return false, err
}
return true, nil
@@ -243,8 +251,15 @@ func (api *PrivateAdminAPI) ImportChain(file string) (bool, error) {
}
defer in.Close()
+ var reader io.Reader = in
+ if strings.HasSuffix(file, ".gz") {
+ if reader, err = gzip.NewReader(reader); err != nil {
+ return false, err
+ }
+ }
+
// Run actual the import in pre-configured batches
- stream := rlp.NewStream(in, 0)
+ stream := rlp.NewStream(reader, 0)
blocks, index := make([]*types.Block, 0, 2500), 0
for batch := 0; ; batch++ {