aboutsummaryrefslogtreecommitdiffstats
path: root/cmd
diff options
context:
space:
mode:
authorPéter Szilágyi <peterke@gmail.com>2018-03-26 18:34:21 +0800
committerPéter Szilágyi <peterke@gmail.com>2018-03-26 19:08:01 +0800
commit495bdb0c713ce6deafa51fa25cb7ea66426b6b2e (patch)
tree648283a32470db5018d6efba60b5f7cb0a60f0ae /cmd
parentb6b6f52ec8608e1a694357357c3f1fde669f1e6d (diff)
downloaddexon-495bdb0c713ce6deafa51fa25cb7ea66426b6b2e.tar
dexon-495bdb0c713ce6deafa51fa25cb7ea66426b6b2e.tar.gz
dexon-495bdb0c713ce6deafa51fa25cb7ea66426b6b2e.tar.bz2
dexon-495bdb0c713ce6deafa51fa25cb7ea66426b6b2e.tar.lz
dexon-495bdb0c713ce6deafa51fa25cb7ea66426b6b2e.tar.xz
dexon-495bdb0c713ce6deafa51fa25cb7ea66426b6b2e.tar.zst
dexon-495bdb0c713ce6deafa51fa25cb7ea66426b6b2e.zip
cmd: export preimages in RLP, support GZIP, uniform with block export
Diffstat (limited to 'cmd')
-rw-r--r--cmd/geth/chaincmd.go175
-rw-r--r--cmd/geth/main.go4
-rw-r--r--cmd/utils/cmd.go94
3 files changed, 154 insertions, 119 deletions
diff --git a/cmd/geth/chaincmd.go b/cmd/geth/chaincmd.go
index 692cc2d8d..d3086921b 100644
--- a/cmd/geth/chaincmd.go
+++ b/cmd/geth/chaincmd.go
@@ -41,11 +41,6 @@ import (
)
var (
- // secureKeyPrefix is the database key prefix used to store trie node preimages.
- secureKeyPrefix = []byte("secure-key-")
-)
-
-var (
initCommand = cli.Command{
Action: utils.MigrateFlags(initGenesis),
Name: "init",
@@ -101,6 +96,34 @@ Optional second and third arguments control the first and
last block to write. In this mode, the file will be appended
if already existing.`,
}
+ importPreimagesCommand = cli.Command{
+ Action: utils.MigrateFlags(importPreimages),
+ Name: "import-preimages",
+ Usage: "Import the preimage database from an RLP stream",
+ ArgsUsage: "<datafile>",
+ Flags: []cli.Flag{
+ utils.DataDirFlag,
+ utils.CacheFlag,
+ utils.LightModeFlag,
+ },
+ Category: "BLOCKCHAIN COMMANDS",
+ Description: `
+ The import-preimages command imports hash preimages from an RLP encoded stream.`,
+ }
+ exportPreimagesCommand = cli.Command{
+ Action: utils.MigrateFlags(exportPreimages),
+ Name: "export-preimages",
+ Usage: "Export the preimage database into an RLP stream",
+ ArgsUsage: "<dumpfile>",
+ Flags: []cli.Flag{
+ utils.DataDirFlag,
+ utils.CacheFlag,
+ utils.LightModeFlag,
+ },
+ Category: "BLOCKCHAIN COMMANDS",
+ Description: `
+The export-preimages command export hash preimages to an RLP encoded stream`,
+ }
copydbCommand = cli.Command{
Action: utils.MigrateFlags(copyDb),
Name: "copydb",
@@ -146,34 +169,6 @@ Remove blockchain and state databases`,
The arguments are interpreted as block numbers or hashes.
Use "ethereum dump 0" to dump the genesis block.`,
}
- preimageDumpCommand = cli.Command{
- Action: utils.MigrateFlags(dumpPreimage),
- Name: "preimagedump",
- Usage: "Dump the preimage database in json format",
- ArgsUsage: "<dumpfile>",
- Flags: []cli.Flag{
- utils.DataDirFlag,
- utils.CacheFlag,
- utils.LightModeFlag,
- },
- Category: "BLOCKCHAIN COMMANDS",
- Description: `
-Dump the preimage database in json format`,
- }
- preimageImportCommand = cli.Command{
- Action: utils.MigrateFlags(importPreimage),
- Name: "preimageimport",
- Usage: "Import the preimage data from the specified file",
- ArgsUsage: "<datafile>",
- Flags: []cli.Flag{
- utils.DataDirFlag,
- utils.CacheFlag,
- utils.LightModeFlag,
- },
- Category: "BLOCKCHAIN COMMANDS",
- Description: `
-Import the preimage data from the specified file`,
- }
)
// initGenesis will initialise the given JSON format genesis file and writes it as
@@ -332,7 +327,39 @@ func exportChain(ctx *cli.Context) error {
if err != nil {
utils.Fatalf("Export error: %v\n", err)
}
- fmt.Printf("Export done in %v", time.Since(start))
+ fmt.Printf("Export done in %v\n", time.Since(start))
+ return nil
+}
+
+// importPreimages imports preimage data from the specified file.
+func importPreimages(ctx *cli.Context) error {
+ if len(ctx.Args()) < 1 {
+ utils.Fatalf("This command requires an argument.")
+ }
+ stack := makeFullNode(ctx)
+ diskdb := utils.MakeChainDatabase(ctx, stack).(*ethdb.LDBDatabase)
+
+ start := time.Now()
+ if err := utils.ImportPreimages(diskdb, ctx.Args().First()); err != nil {
+ utils.Fatalf("Export error: %v\n", err)
+ }
+ fmt.Printf("Export done in %v\n", time.Since(start))
+ return nil
+}
+
+// exportPreimages dumps the preimage data to specified json file in streaming way.
+func exportPreimages(ctx *cli.Context) error {
+ if len(ctx.Args()) < 1 {
+ utils.Fatalf("This command requires an argument.")
+ }
+ stack := makeFullNode(ctx)
+ diskdb := utils.MakeChainDatabase(ctx, stack).(*ethdb.LDBDatabase)
+
+ start := time.Now()
+ if err := utils.ExportPreimages(diskdb, ctx.Args().First()); err != nil {
+ utils.Fatalf("Export error: %v\n", err)
+ }
+ fmt.Printf("Export done in %v\n", time.Since(start))
return nil
}
@@ -439,86 +466,6 @@ func dump(ctx *cli.Context) error {
return nil
}
-// PreimageEntry represents a map between preimage and hash.
-type PreimageEntry struct {
- Hash string `json:"hash"`
- Preimage string `json:"preimage"`
-}
-
-// dumpPreimage dumps the preimage data to specified json file in streaming way.
-func dumpPreimage(ctx *cli.Context) error {
- // Make sure the export json file has been specified.
- if len(ctx.Args()) < 1 {
- utils.Fatalf("This command requires an argument.")
- }
-
- // Encode preimage data to json file in streaming way.
- file, err := os.Create(ctx.Args().First())
- if err != nil {
- return err
- }
- encoder := json.NewEncoder(file)
-
- stack := makeFullNode(ctx)
- db := utils.MakeChainDatabase(ctx, stack)
-
- // Dump all preimage entries.
- it := db.(*ethdb.LDBDatabase).NewIteratorByPrefix(secureKeyPrefix)
- for it.Next() {
- hash := it.Key()[len(secureKeyPrefix):]
- if err := encoder.Encode(PreimageEntry{common.Bytes2Hex(hash), common.Bytes2Hex(it.Value())}); err != nil {
- return err
- }
- }
- return nil
-}
-
-// importPreimages imports preimage data from the specified file.
-func importPreimage(ctx *cli.Context) error {
- // Make sure the export json file has been specified.
- if len(ctx.Args()) < 1 {
- utils.Fatalf("This command requires an argument.")
- }
-
- // Decode the preimage data in streaming way.
- file, err := os.Open(ctx.Args().First())
- if err != nil {
- return err
- }
- decoder := json.NewDecoder(file)
-
- stack := makeFullNode(ctx)
- db := utils.MakeChainDatabase(ctx, stack)
-
- var (
- entry PreimageEntry
- preimages = make(map[common.Hash][]byte)
- )
-
- for decoder.More() {
- if err := decoder.Decode(&entry); err != nil {
- return err
- }
- preimages[common.HexToHash(entry.Hash)] = common.Hex2Bytes(entry.Preimage)
- // Flush to database in batch
- if len(preimages) > 1024 {
- err := core.WritePreimages(db, 0, preimages)
- if err != nil {
- return err
- }
- preimages = make(map[common.Hash][]byte)
- }
- }
- // Flush the last batch preimage data
- if len(preimages) > 0 {
- err := core.WritePreimages(db, 0, preimages)
- if err != nil {
- return err
- }
- }
- return nil
-}
-
// hashish returns true for strings that look like hashes.
func hashish(x string) bool {
_, err := strconv.Atoi(x)
diff --git a/cmd/geth/main.go b/cmd/geth/main.go
index 6e234a704..061384d1b 100644
--- a/cmd/geth/main.go
+++ b/cmd/geth/main.go
@@ -155,11 +155,11 @@ func init() {
initCommand,
importCommand,
exportCommand,
+ importPreimagesCommand,
+ exportPreimagesCommand,
copydbCommand,
removedbCommand,
dumpCommand,
- preimageDumpCommand,
- preimageImportCommand,
// See monitorcmd.go:
monitorCommand,
// See accountcmd.go:
diff --git a/cmd/utils/cmd.go b/cmd/utils/cmd.go
index 186d18d8f..c0af4c13e 100644
--- a/cmd/utils/cmd.go
+++ b/cmd/utils/cmd.go
@@ -27,8 +27,11 @@ import (
"strings"
"syscall"
+ "github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/core"
"github.com/ethereum/go-ethereum/core/types"
+ "github.com/ethereum/go-ethereum/crypto"
+ "github.com/ethereum/go-ethereum/ethdb"
"github.com/ethereum/go-ethereum/internal/debug"
"github.com/ethereum/go-ethereum/log"
"github.com/ethereum/go-ethereum/node"
@@ -105,6 +108,8 @@ func ImportChain(chain *core.BlockChain, fn string) error {
}
log.Info("Importing blockchain", "file", fn)
+
+ // Open the file handle and potentially unwrap the gzip stream
fh, err := os.Open(fn)
if err != nil {
return err
@@ -180,8 +185,12 @@ func missingBlocks(chain *core.BlockChain, blocks []*types.Block) []*types.Block
return nil
}
+// ExportChain exports a blockchain into the specified file, truncating any data
+// already present in the file.
func ExportChain(blockchain *core.BlockChain, fn string) error {
log.Info("Exporting blockchain", "file", fn)
+
+ // Open the file handle and potentially wrap with a gzip stream
fh, err := os.OpenFile(fn, os.O_CREATE|os.O_WRONLY|os.O_TRUNC, os.ModePerm)
if err != nil {
return err
@@ -193,7 +202,7 @@ func ExportChain(blockchain *core.BlockChain, fn string) error {
writer = gzip.NewWriter(writer)
defer writer.(*gzip.Writer).Close()
}
-
+ // Iterate over the blocks and export them
if err := blockchain.Export(writer); err != nil {
return err
}
@@ -202,9 +211,12 @@ func ExportChain(blockchain *core.BlockChain, fn string) error {
return nil
}
+// ExportAppendChain exports a blockchain into the specified file, appending to
+// the file if data already exists in it.
func ExportAppendChain(blockchain *core.BlockChain, fn string, first uint64, last uint64) error {
log.Info("Exporting blockchain", "file", fn)
- // TODO verify mode perms
+
+ // Open the file handle and potentially wrap with a gzip stream
fh, err := os.OpenFile(fn, os.O_CREATE|os.O_APPEND|os.O_WRONLY, os.ModePerm)
if err != nil {
return err
@@ -216,10 +228,86 @@ func ExportAppendChain(blockchain *core.BlockChain, fn string, first uint64, las
writer = gzip.NewWriter(writer)
defer writer.(*gzip.Writer).Close()
}
-
+ // Iterate over the blocks and export them
if err := blockchain.ExportN(writer, first, last); err != nil {
return err
}
log.Info("Exported blockchain to", "file", fn)
return nil
}
+
+// ImportPreimages imports a batch of exported hash preimages into the database.
+func ImportPreimages(db *ethdb.LDBDatabase, fn string) error {
+ log.Info("Importing preimages", "file", fn)
+
+ // Open the file handle and potentially unwrap the gzip stream
+ fh, err := os.Open(fn)
+ if err != nil {
+ return err
+ }
+ defer fh.Close()
+
+ var reader io.Reader = fh
+ if strings.HasSuffix(fn, ".gz") {
+ if reader, err = gzip.NewReader(reader); err != nil {
+ return err
+ }
+ }
+ stream := rlp.NewStream(reader, 0)
+
+ // Import the preimages in batches to prevent disk trashing
+ preimages := make(map[common.Hash][]byte)
+
+ for {
+ // Read the next entry and ensure it's not junk
+ var blob []byte
+
+ if err := stream.Decode(&blob); err != nil {
+ if err == io.EOF {
+ break
+ }
+ return err
+ }
+ // Accumulate the preimages and flush when enough ws gathered
+ preimages[crypto.Keccak256Hash(blob)] = common.CopyBytes(blob)
+ if len(preimages) > 1024 {
+ if err := core.WritePreimages(db, 0, preimages); err != nil {
+ return err
+ }
+ preimages = make(map[common.Hash][]byte)
+ }
+ }
+ // Flush the last batch preimage data
+ if len(preimages) > 0 {
+ return core.WritePreimages(db, 0, preimages)
+ }
+ return nil
+}
+
+// ExportPreimages exports all known hash preimages into the specified file,
+// truncating any data already present in the file.
+func ExportPreimages(db *ethdb.LDBDatabase, fn string) error {
+ log.Info("Exporting preimages", "file", fn)
+
+ // Open the file handle and potentially wrap with a gzip stream
+ fh, err := os.OpenFile(fn, os.O_CREATE|os.O_WRONLY|os.O_TRUNC, os.ModePerm)
+ if err != nil {
+ return err
+ }
+ defer fh.Close()
+
+ var writer io.Writer = fh
+ if strings.HasSuffix(fn, ".gz") {
+ writer = gzip.NewWriter(writer)
+ defer writer.(*gzip.Writer).Close()
+ }
+ // Iterate over the preimages and export them
+ it := db.NewIteratorWithPrefix([]byte("secure-key-"))
+ for it.Next() {
+ if err := rlp.Encode(writer, it.Value()); err != nil {
+ return err
+ }
+ }
+ log.Info("Exported preimages", "file", fn)
+ return nil
+}