aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--.github/CODEOWNERS2
-rw-r--r--.github/CONTRIBUTING.md2
-rw-r--r--.github/no-response.yml11
-rw-r--r--.github/stale.yml17
-rw-r--r--.gitignore6
-rw-r--r--.mailmap14
-rw-r--r--.travis.yml22
-rw-r--r--AUTHORS92
-rw-r--r--Dockerfile2
-rw-r--r--Dockerfile.alltools2
-rw-r--r--README.md31
-rw-r--r--VERSION2
-rw-r--r--accounts/abi/abi.go7
-rw-r--r--accounts/abi/abi_test.go6
-rw-r--r--accounts/abi/argument.go130
-rw-r--r--accounts/abi/bind/backend.go27
-rw-r--r--accounts/abi/bind/backends/simulated.go148
-rw-r--r--accounts/abi/bind/base.go121
-rw-r--r--accounts/abi/bind/bind.go286
-rw-r--r--accounts/abi/bind/bind_test.go311
-rw-r--r--accounts/abi/bind/template.go169
-rw-r--r--accounts/abi/bind/topics.go189
-rw-r--r--accounts/abi/event.go11
-rw-r--r--accounts/abi/pack_test.go7
-rw-r--r--accounts/abi/unpack.go64
-rw-r--r--accounts/abi/unpack_test.go119
-rw-r--r--accounts/errors.go2
-rw-r--r--appveyor.yml4
-rw-r--r--build/ci-notes.md13
-rw-r--r--build/ci.go9
-rw-r--r--build/deb.control2
-rw-r--r--build/deb.rules2
-rw-r--r--build/update-license.go5
-rw-r--r--cmd/bootnode/main.go7
-rw-r--r--cmd/ethkey/inspect.go16
-rw-r--r--cmd/ethkey/message_test.go2
-rw-r--r--cmd/ethkey/run_test.go2
-rw-r--r--cmd/evm/json_logger.go14
-rw-r--r--cmd/evm/main.go5
-rw-r--r--cmd/evm/runner.go9
-rw-r--r--cmd/faucet/faucet.go28
-rw-r--r--cmd/geth/chaincmd.go20
-rw-r--r--cmd/geth/config.go17
-rw-r--r--cmd/geth/consolecmd.go9
-rw-r--r--cmd/geth/main.go5
-rw-r--r--cmd/geth/usage.go7
-rw-r--r--cmd/p2psim/main.go16
-rw-r--r--cmd/puppeth/genesis.go28
-rw-r--r--cmd/puppeth/module_dashboard.go79
-rw-r--r--cmd/puppeth/module_faucet.go8
-rw-r--r--cmd/puppeth/module_node.go54
-rw-r--r--cmd/puppeth/module_wallet.go2
-rw-r--r--cmd/puppeth/wizard.go3
-rw-r--r--cmd/puppeth/wizard_explorer.go2
-rw-r--r--cmd/puppeth/wizard_faucet.go8
-rw-r--r--cmd/puppeth/wizard_intro.go7
-rw-r--r--cmd/puppeth/wizard_netstats.go14
-rw-r--r--cmd/puppeth/wizard_node.go10
-rw-r--r--cmd/puppeth/wizard_wallet.go2
-rw-r--r--cmd/swarm/config.go58
-rw-r--r--cmd/swarm/config_test.go95
-rw-r--r--cmd/swarm/main.go82
-rw-r--r--cmd/swarm/manifest.go6
-rw-r--r--cmd/swarm/run_test.go2
-rw-r--r--cmd/swarm/upload_test.go2
-rw-r--r--cmd/utils/cmd.go31
-rw-r--r--cmd/utils/flags.go100
-rw-r--r--cmd/wnode/main.go258
-rw-r--r--common/big.go2
-rw-r--r--common/compiler/solidity.go1
-rw-r--r--common/fdlimit/fdlimit_freebsd.go14
-rw-r--r--common/fdlimit/fdlimit_test.go14
-rw-r--r--common/fdlimit/fdlimit_unix.go14
-rw-r--r--common/fdlimit/fdlimit_windows.go16
-rw-r--r--common/size.go27
-rw-r--r--consensus/errors.go4
-rw-r--r--consensus/ethash/algorithm.go43
-rw-r--r--consensus/ethash/algorithm_go1.7.go47
-rw-r--r--consensus/ethash/algorithm_go1.8.go63
-rw-r--r--consensus/ethash/algorithm_go1.8_test.go37
-rw-r--r--consensus/ethash/algorithm_test.go16
-rw-r--r--consensus/ethash/consensus.go12
-rw-r--r--consensus/ethash/ethash.go2
-rw-r--r--console/console.go3
-rw-r--r--containers/docker/master-alpine/Dockerfile2
-rw-r--r--containers/docker/master-ubuntu/Dockerfile2
-rw-r--r--containers/vagrant/.gitignore1
-rw-r--r--containers/vagrant/Vagrantfile38
-rwxr-xr-xcontainers/vagrant/provisioners/shell/centos.sh11
-rwxr-xr-xcontainers/vagrant/provisioners/shell/debian.sh11
-rwxr-xr-xcontainers/vagrant/provisioners/shell/ubuntu.sh11
-rw-r--r--contracts/chequebook/cheque_test.go4
-rw-r--r--contracts/chequebook/contract/chequebook.go155
-rw-r--r--contracts/chequebook/contract/code.go2
-rw-r--r--contracts/ens/contract/ens.go574
-rw-r--r--contracts/ens/contract/fifsregistrar.go31
-rw-r--r--contracts/ens/contract/publicresolver.go849
-rw-r--r--contracts/release/contract.go432
-rw-r--r--contracts/release/contract.sol251
-rw-r--r--contracts/release/contract_test.go374
-rw-r--r--contracts/release/release.go164
-rw-r--r--core/asm/compiler.go2
-rw-r--r--core/asm/lexer.go6
-rw-r--r--core/bench_test.go4
-rw-r--r--core/block_validator.go9
-rw-r--r--core/block_validator_test.go8
-rw-r--r--core/blockchain.go462
-rw-r--r--core/blockchain_test.go351
-rw-r--r--core/chain_indexer.go3
-rw-r--r--core/chain_makers.go9
-rw-r--r--core/chain_makers_test.go2
-rw-r--r--core/dao_test.go24
-rw-r--r--core/database_util.go24
-rw-r--r--core/fees.go23
-rw-r--r--core/genesis.go24
-rw-r--r--core/genesis_test.go12
-rw-r--r--core/headerchain.go39
-rw-r--r--core/state/database.go51
-rw-r--r--core/state/iterator_test.go17
-rw-r--r--core/state/state_object.go5
-rw-r--r--core/state/state_test.go6
-rw-r--r--core/state/statedb.go38
-rw-r--r--core/state/statedb_test.go14
-rw-r--r--core/state/sync_test.go44
-rw-r--r--core/state_transition.go3
-rw-r--r--core/tx_pool.go31
-rw-r--r--core/tx_pool_test.go163
-rw-r--r--core/types/block.go9
-rw-r--r--core/types/receipt.go13
-rw-r--r--core/types/transaction.go2
-rw-r--r--core/vm/contracts.go31
-rw-r--r--core/vm/contracts_test.go16
-rw-r--r--core/vm/instructions.go131
-rw-r--r--core/vm/instructions_test.go184
-rw-r--r--core/vm/interpreter.go24
-rw-r--r--core/vm/jump_table.go33
-rw-r--r--core/vm/opcodes.go9
-rw-r--r--crypto/bn256/bn256_amd64.go63
-rw-r--r--crypto/bn256/bn256_other.go63
-rw-r--r--crypto/bn256/cloudflare/bn256.go481
-rw-r--r--crypto/bn256/cloudflare/bn256_test.go118
-rw-r--r--crypto/bn256/cloudflare/constants.go59
-rw-r--r--crypto/bn256/cloudflare/curve.go229
-rw-r--r--crypto/bn256/cloudflare/example_test.go45
-rw-r--r--crypto/bn256/cloudflare/gfp.go81
-rw-r--r--crypto/bn256/cloudflare/gfp.h32
-rw-r--r--crypto/bn256/cloudflare/gfp12.go160
-rw-r--r--crypto/bn256/cloudflare/gfp2.go156
-rw-r--r--crypto/bn256/cloudflare/gfp6.go213
-rw-r--r--crypto/bn256/cloudflare/gfp_amd64.go15
-rw-r--r--crypto/bn256/cloudflare/gfp_amd64.s97
-rw-r--r--crypto/bn256/cloudflare/gfp_pure.go19
-rw-r--r--crypto/bn256/cloudflare/gfp_test.go62
-rw-r--r--crypto/bn256/cloudflare/main_test.go73
-rw-r--r--crypto/bn256/cloudflare/mul.h181
-rw-r--r--crypto/bn256/cloudflare/mul_bmi2.h112
-rw-r--r--crypto/bn256/cloudflare/optate.go271
-rw-r--r--crypto/bn256/cloudflare/twist.go204
-rw-r--r--crypto/bn256/google/bn256.go (renamed from crypto/bn256/bn256.go)49
-rw-r--r--crypto/bn256/google/bn256_test.go (renamed from crypto/bn256/bn256_test.go)35
-rw-r--r--crypto/bn256/google/constants.go (renamed from crypto/bn256/constants.go)0
-rw-r--r--crypto/bn256/google/curve.go (renamed from crypto/bn256/curve.go)0
-rw-r--r--crypto/bn256/google/example_test.go (renamed from crypto/bn256/example_test.go)0
-rw-r--r--crypto/bn256/google/gfp12.go (renamed from crypto/bn256/gfp12.go)0
-rw-r--r--crypto/bn256/google/gfp2.go (renamed from crypto/bn256/gfp2.go)0
-rw-r--r--crypto/bn256/google/gfp6.go (renamed from crypto/bn256/gfp6.go)0
-rw-r--r--crypto/bn256/google/main_test.go (renamed from crypto/bn256/main_test.go)0
-rw-r--r--crypto/bn256/google/optate.go (renamed from crypto/bn256/optate.go)0
-rw-r--r--crypto/bn256/google/twist.go (renamed from crypto/bn256/twist.go)8
-rw-r--r--dashboard/README.md15
-rw-r--r--dashboard/assets.go4257
-rw-r--r--dashboard/assets/components/CustomTooltip.jsx6
-rw-r--r--dashboard/assets/components/Dashboard.jsx28
-rw-r--r--dashboard/assets/components/Footer.jsx158
-rw-r--r--dashboard/assets/components/Header.jsx50
-rw-r--r--dashboard/assets/components/Main.jsx3
-rw-r--r--dashboard/assets/components/SideBar.jsx4
-rw-r--r--dashboard/assets/index.html (renamed from dashboard/assets/dashboard.html)0
-rw-r--r--dashboard/assets/package-lock.json7678
-rw-r--r--dashboard/assets/package.json13
-rw-r--r--dashboard/assets/types/content.jsx34
-rw-r--r--dashboard/assets/webpack.config.js3
-rw-r--r--dashboard/assets/yarn.lock6551
-rw-r--r--dashboard/config.go4
-rw-r--r--dashboard/dashboard.go46
-rw-r--r--dashboard/message.go32
-rw-r--r--eth/api.go4
-rw-r--r--eth/api_backend.go12
-rw-r--r--eth/api_test.go2
-rw-r--r--eth/api_tracer.go135
-rw-r--r--eth/backend.go14
-rw-r--r--eth/bind.go136
-rw-r--r--eth/config.go10
-rw-r--r--eth/downloader/downloader.go372
-rw-r--r--eth/downloader/downloader_test.go205
-rw-r--r--eth/downloader/metrics.go28
-rw-r--r--eth/downloader/queue.go171
-rw-r--r--eth/downloader/statesync.go35
-rw-r--r--eth/fetcher/fetcher.go2
-rw-r--r--eth/fetcher/metrics.go28
-rw-r--r--eth/filters/api.go7
-rw-r--r--eth/filters/filter.go19
-rw-r--r--eth/filters/filter_system.go33
-rw-r--r--eth/filters/filter_system_test.go34
-rw-r--r--eth/handler.go19
-rw-r--r--eth/handler_test.go16
-rw-r--r--eth/helper_test.go22
-rw-r--r--eth/metrics.go64
-rw-r--r--eth/protocol_test.go20
-rw-r--r--eth/sync.go19
-rw-r--r--eth/sync_test.go4
-rw-r--r--eth/tracers/tracer.go2
-rw-r--r--eth/tracers/tracer_test.go2
-rw-r--r--ethdb/database.go125
-rw-r--r--ethdb/interface.go2
-rw-r--r--ethdb/memory_database.go5
-rw-r--r--internal/build/env.go2
-rw-r--r--internal/cmdtest/test_cmd.go16
-rw-r--r--internal/debug/api.go27
-rw-r--r--internal/debug/flags.go6
-rw-r--r--internal/ethapi/addrlock.go2
-rw-r--r--internal/ethapi/api.go120
-rw-r--r--internal/web3ext/web3ext.go21
-rw-r--r--les/api_backend.go4
-rw-r--r--les/backend.go5
-rw-r--r--les/fetcher.go33
-rw-r--r--les/handler.go241
-rw-r--r--les/handler_test.go162
-rw-r--r--les/helper_test.go39
-rw-r--r--les/metrics.go8
-rw-r--r--les/odr_requests.go26
-rw-r--r--les/odr_test.go1
-rw-r--r--les/peer.go5
-rw-r--r--les/protocol.go4
-rw-r--r--les/retrieve.go2
-rw-r--r--les/server.go34
-rw-r--r--light/lightchain.go41
-rw-r--r--light/nodeset.go37
-rw-r--r--light/odr_test.go4
-rw-r--r--light/odr_util.go56
-rw-r--r--light/postprocess.go103
-rw-r--r--light/trie.go18
-rw-r--r--light/trie_test.go2
-rw-r--r--light/txpool_test.go2
-rw-r--r--metrics/FORK.md1
-rw-r--r--metrics/LICENSE (renamed from vendor/github.com/rcrowley/go-metrics/LICENSE)0
-rw-r--r--metrics/README.md (renamed from vendor/github.com/rcrowley/go-metrics/README.md)37
-rw-r--r--metrics/counter.go (renamed from vendor/github.com/rcrowley/go-metrics/counter.go)2
-rw-r--r--metrics/counter_test.go77
-rw-r--r--metrics/debug.go (renamed from vendor/github.com/rcrowley/go-metrics/debug.go)6
-rw-r--r--metrics/debug_test.go48
-rw-r--r--metrics/ewma.go (renamed from vendor/github.com/rcrowley/go-metrics/ewma.go)2
-rw-r--r--metrics/ewma_test.go225
-rw-r--r--metrics/exp/exp.go (renamed from vendor/github.com/rcrowley/go-metrics/exp/exp.go)46
-rw-r--r--metrics/gauge.go (renamed from vendor/github.com/rcrowley/go-metrics/gauge.go)4
-rw-r--r--metrics/gauge_float64.go (renamed from vendor/github.com/rcrowley/go-metrics/gauge_float64.go)4
-rw-r--r--metrics/gauge_float64_test.go59
-rw-r--r--metrics/gauge_test.go68
-rw-r--r--metrics/graphite.go (renamed from vendor/github.com/rcrowley/go-metrics/graphite.go)2
-rw-r--r--metrics/graphite_test.go22
-rw-r--r--metrics/healthcheck.go (renamed from vendor/github.com/rcrowley/go-metrics/healthcheck.go)2
-rw-r--r--metrics/histogram.go (renamed from vendor/github.com/rcrowley/go-metrics/histogram.go)2
-rw-r--r--metrics/histogram_test.go95
-rw-r--r--metrics/influxdb/LICENSE19
-rw-r--r--metrics/influxdb/README.md30
-rw-r--r--metrics/influxdb/influxdb.go227
-rw-r--r--metrics/init_test.go5
-rw-r--r--metrics/json.go31
-rw-r--r--metrics/json_test.go28
-rw-r--r--metrics/librato/client.go102
-rw-r--r--metrics/librato/librato.go235
-rw-r--r--metrics/log.go (renamed from vendor/github.com/rcrowley/go-metrics/log.go)2
-rw-r--r--metrics/memory.md (renamed from vendor/github.com/rcrowley/go-metrics/memory.md)0
-rw-r--r--metrics/meter.go (renamed from vendor/github.com/rcrowley/go-metrics/meter.go)50
-rw-r--r--metrics/meter_test.go73
-rw-r--r--metrics/metrics.go76
-rw-r--r--metrics/metrics_test.go125
-rw-r--r--metrics/opentsdb.go (renamed from vendor/github.com/rcrowley/go-metrics/opentsdb.go)2
-rw-r--r--metrics/opentsdb_test.go21
-rw-r--r--metrics/registry.go (renamed from vendor/github.com/rcrowley/go-metrics/registry.go)88
-rw-r--r--metrics/registry_test.go305
-rw-r--r--metrics/resetting_timer.go237
-rw-r--r--metrics/resetting_timer_test.go106
-rw-r--r--metrics/runtime.go (renamed from vendor/github.com/rcrowley/go-metrics/runtime.go)2
-rw-r--r--metrics/runtime_cgo.go (renamed from vendor/github.com/rcrowley/go-metrics/runtime_cgo.go)0
-rw-r--r--metrics/runtime_gccpufraction.go (renamed from vendor/github.com/rcrowley/go-metrics/runtime_gccpufraction.go)0
-rw-r--r--metrics/runtime_no_cgo.go (renamed from vendor/github.com/rcrowley/go-metrics/runtime_no_cgo.go)0
-rw-r--r--metrics/runtime_no_gccpufraction.go (renamed from vendor/github.com/rcrowley/go-metrics/runtime_no_gccpufraction.go)0
-rw-r--r--metrics/runtime_test.go88
-rw-r--r--metrics/sample.go (renamed from vendor/github.com/rcrowley/go-metrics/sample.go)4
-rw-r--r--metrics/sample_test.go363
-rw-r--r--metrics/syslog.go (renamed from vendor/github.com/rcrowley/go-metrics/syslog.go)2
-rw-r--r--metrics/timer.go (renamed from vendor/github.com/rcrowley/go-metrics/timer.go)22
-rw-r--r--metrics/timer_test.go101
-rwxr-xr-xmetrics/validate.sh (renamed from vendor/github.com/rcrowley/go-metrics/validate.sh)2
-rw-r--r--metrics/writer.go (renamed from vendor/github.com/rcrowley/go-metrics/writer.go)2
-rw-r--r--metrics/writer_test.go22
-rw-r--r--miner/miner.go3
-rw-r--r--miner/worker.go2
-rw-r--r--mobile/android_test.go15
-rw-r--r--mobile/bind.go20
-rw-r--r--node/api.go24
-rw-r--r--node/config.go11
-rw-r--r--node/defaults.go11
-rw-r--r--node/node.go29
-rw-r--r--p2p/dial.go13
-rw-r--r--p2p/dial_test.go44
-rw-r--r--p2p/discover/database.go17
-rw-r--r--p2p/discover/database_test.go18
-rw-r--r--p2p/discover/node.go6
-rw-r--r--p2p/discover/table.go486
-rw-r--r--p2p/discover/table_test.go171
-rw-r--r--p2p/discover/udp.go88
-rw-r--r--p2p/discover/udp_test.go29
-rw-r--r--p2p/discv5/net.go40
-rw-r--r--p2p/discv5/ticket.go12
-rw-r--r--p2p/discv5/udp.go20
-rw-r--r--p2p/message.go26
-rw-r--r--p2p/metrics.go8
-rw-r--r--p2p/netutil/net.go131
-rw-r--r--p2p/netutil/net_test.go89
-rw-r--r--p2p/peer.go6
-rw-r--r--p2p/protocols/protocol.go311
-rw-r--r--p2p/protocols/protocol_test.go389
-rw-r--r--p2p/rlpx.go18
-rw-r--r--p2p/rlpx_test.go38
-rw-r--r--p2p/server.go83
-rw-r--r--p2p/simulations/adapters/state.go1
-rw-r--r--p2p/testing/peerpool.go67
-rw-r--r--p2p/testing/protocolsession.go280
-rw-r--r--p2p/testing/protocoltester.go269
-rw-r--r--params/bootnodes.go15
-rw-r--r--params/config.go83
-rw-r--r--params/version.go2
-rw-r--r--rpc/http.go76
-rw-r--r--rpc/http_test.go2
-rw-r--r--rpc/json.go46
-rw-r--r--rpc/server.go2
-rw-r--r--rpc/types_test.go2
-rw-r--r--rpc/websocket.go30
-rw-r--r--swarm/api/api.go129
-rw-r--r--swarm/api/api_test.go125
-rw-r--r--swarm/api/config.go4
-rw-r--r--swarm/api/http/error.go52
-rw-r--r--swarm/api/http/error_templates.go31
-rw-r--r--swarm/api/http/error_test.go42
-rw-r--r--swarm/api/http/server.go102
-rw-r--r--swarm/api/http/server_test.go6
-rw-r--r--swarm/api/http/templates.go146
-rw-r--r--swarm/fuse/swarmfs_util.go7
-rw-r--r--swarm/metrics/flags.go91
-rw-r--r--swarm/network/depo.go15
-rw-r--r--swarm/network/hive.go10
-rw-r--r--swarm/network/kademlia/kademlia.go28
-rw-r--r--swarm/network/protocol.go24
-rw-r--r--swarm/storage/chunker.go14
-rw-r--r--swarm/storage/chunker_test.go2
-rw-r--r--swarm/storage/dbstore.go9
-rw-r--r--swarm/storage/localstore.go16
-rw-r--r--swarm/storage/memstore.go14
-rw-r--r--swarm/swarm.go156
-rw-r--r--swarm/swarm_test.go119
-rw-r--r--tests/block_test_util.go4
-rw-r--r--tests/difficulty_test.go1
-rw-r--r--tests/difficulty_test_util.go1
-rw-r--r--tests/init.go2
-rw-r--r--tests/init_test.go2
-rw-r--r--tests/state_test.go2
-rw-r--r--tests/state_test_util.go8
-rw-r--r--trie/database.go355
-rw-r--r--trie/hasher.go61
-rw-r--r--trie/iterator_test.go125
-rw-r--r--trie/proof.go47
-rw-r--r--trie/secure_trie.go62
-rw-r--r--trie/secure_trie_test.go20
-rw-r--r--trie/sync.go14
-rw-r--r--trie/sync_test.go103
-rw-r--r--trie/trie.go92
-rw-r--r--trie/trie_test.go104
-rw-r--r--vendor/github.com/influxdata/influxdb/LICENSE20
-rw-r--r--vendor/github.com/influxdata/influxdb/LICENSE_OF_DEPENDENCIES.md62
-rw-r--r--vendor/github.com/influxdata/influxdb/client/README.md306
-rw-r--r--vendor/github.com/influxdata/influxdb/client/influxdb.go840
-rw-r--r--vendor/github.com/influxdata/influxdb/client/v2/client.go635
-rw-r--r--vendor/github.com/influxdata/influxdb/client/v2/udp.go112
-rw-r--r--vendor/github.com/influxdata/influxdb/models/consistency.go48
-rw-r--r--vendor/github.com/influxdata/influxdb/models/inline_fnv.go32
-rw-r--r--vendor/github.com/influxdata/influxdb/models/inline_strconv_parse.go44
-rw-r--r--vendor/github.com/influxdata/influxdb/models/points.go2337
-rw-r--r--vendor/github.com/influxdata/influxdb/models/rows.go62
-rw-r--r--vendor/github.com/influxdata/influxdb/models/statistic.go42
-rw-r--r--vendor/github.com/influxdata/influxdb/models/time.go74
-rw-r--r--vendor/github.com/influxdata/influxdb/models/uint_support.go7
-rw-r--r--vendor/github.com/influxdata/influxdb/pkg/escape/bytes.go115
-rw-r--r--vendor/github.com/influxdata/influxdb/pkg/escape/strings.go21
-rw-r--r--vendor/github.com/rcrowley/go-metrics/json.go87
-rw-r--r--vendor/github.com/rcrowley/go-metrics/metrics.go13
-rw-r--r--vendor/github.com/rjeczalik/notify/watcher_fsevents_cgo.go6
-rw-r--r--vendor/github.com/rjeczalik/notify/watcher_fsevents_go1.10.go9
-rw-r--r--vendor/github.com/rjeczalik/notify/watcher_fsevents_go1.9.go14
-rw-r--r--vendor/github.com/syndtr/goleveldb/.travis.yml12
-rw-r--r--vendor/github.com/syndtr/goleveldb/README.md4
-rw-r--r--vendor/github.com/syndtr/goleveldb/leveldb/db.go23
-rw-r--r--vendor/github.com/syndtr/goleveldb/leveldb/db_write.go10
-rw-r--r--vendor/github.com/syndtr/goleveldb/leveldb/iterator/iter.go2
-rw-r--r--vendor/github.com/syndtr/goleveldb/leveldb/memdb/memdb.go2
-rw-r--r--vendor/github.com/syndtr/goleveldb/leveldb/session.go4
-rw-r--r--vendor/github.com/syndtr/goleveldb/leveldb/storage.go63
-rw-r--r--vendor/github.com/syndtr/goleveldb/leveldb/storage/file_storage_plan9.go4
-rw-r--r--vendor/github.com/syndtr/goleveldb/leveldb/util/util.go2
-rw-r--r--vendor/gopkg.in/olebedev/go-duktape.v3/api.go4
-rw-r--r--vendor/gopkg.in/olebedev/go-duktape.v3/duktape.go1
-rw-r--r--vendor/vendor.json106
-rw-r--r--whisper/mailserver/mailserver.go52
-rw-r--r--whisper/mailserver/server_test.go27
-rw-r--r--whisper/whisperv2/api.go402
-rw-r--r--whisper/whisperv2/doc.go32
-rw-r--r--whisper/whisperv2/envelope.go150
-rw-r--r--whisper/whisperv2/envelope_test.go158
-rw-r--r--whisper/whisperv2/filter.go129
-rw-r--r--whisper/whisperv2/filter_test.go215
-rw-r--r--whisper/whisperv2/main.go106
-rw-r--r--whisper/whisperv2/message.go158
-rw-r--r--whisper/whisperv2/message_test.go158
-rw-r--r--whisper/whisperv2/peer.go174
-rw-r--r--whisper/whisperv2/peer_test.go261
-rw-r--r--whisper/whisperv2/topic.go140
-rw-r--r--whisper/whisperv2/topic_test.go215
-rw-r--r--whisper/whisperv2/whisper.go378
-rw-r--r--whisper/whisperv2/whisper_test.go216
-rw-r--r--whisper/whisperv5/api.go25
-rw-r--r--whisper/whisperv5/filter.go8
-rw-r--r--whisper/whisperv5/filter_test.go44
-rw-r--r--whisper/whisperv6/api.go49
-rw-r--r--whisper/whisperv6/config.go2
-rw-r--r--whisper/whisperv6/doc.go21
-rw-r--r--whisper/whisperv6/envelope.go41
-rw-r--r--whisper/whisperv6/filter.go131
-rw-r--r--whisper/whisperv6/filter_test.go75
-rw-r--r--whisper/whisperv6/gen_criteria_json.go2
-rw-r--r--whisper/whisperv6/gen_message_json.go2
-rw-r--r--whisper/whisperv6/gen_newmessage_json.go2
-rw-r--r--whisper/whisperv6/message.go214
-rw-r--r--whisper/whisperv6/message_test.go57
-rw-r--r--whisper/whisperv6/peer.go113
-rw-r--r--whisper/whisperv6/peer_test.go186
-rw-r--r--whisper/whisperv6/topic.go4
-rw-r--r--whisper/whisperv6/whisper.go505
-rw-r--r--whisper/whisperv6/whisper_test.go47
449 files changed, 31348 insertions, 18696 deletions
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index 6076fe46a..a7b617655 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -5,5 +5,7 @@ accounts/usbwallet @karalabe
consensus @karalabe
core/ @karalabe @holiman
eth/ @karalabe
+les/ @zsfelfoldi
+light/ @zsfelfoldi
mobile/ @karalabe
p2p/ @fjl @zsfelfoldi
diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md
index a332b815d..9f2dbfcb8 100644
--- a/.github/CONTRIBUTING.md
+++ b/.github/CONTRIBUTING.md
@@ -2,7 +2,7 @@
Before you do a feature request please check and make sure that it isn't possible
through some other means. The JavaScript enabled console is a powerful feature
-in the right hands. Please check our [Bitchin' tricks](https://github.com/ethereum/go-ethereum/wiki/bitchin-tricks) wiki page for more info
+in the right hands. Please check our [Wiki page](https://github.com/ethereum/go-ethereum/wiki) for more info
and help.
## Contributing
diff --git a/.github/no-response.yml b/.github/no-response.yml
new file mode 100644
index 000000000..a6227159d
--- /dev/null
+++ b/.github/no-response.yml
@@ -0,0 +1,11 @@
+# Number of days of inactivity before an Issue is closed for lack of response
+daysUntilClose: 30
+# Label requiring a response
+responseRequiredLabel: more-information-needed
+# Comment to post when closing an Issue for lack of response. Set to `false` to disable
+closeComment: >
+ This issue has been automatically closed because there has been no response
+ to our request for more information from the original author. With only the
+ information that is currently in the issue, we don't have enough information
+ to take action. Please reach out if you have or find the answers we need so
+ that we can investigate further.
diff --git a/.github/stale.yml b/.github/stale.yml
new file mode 100644
index 000000000..c621939c3
--- /dev/null
+++ b/.github/stale.yml
@@ -0,0 +1,17 @@
+# Number of days of inactivity before an issue becomes stale
+daysUntilStale: 366
+# Number of days of inactivity before a stale issue is closed
+daysUntilClose: 42
+# Issues with these labels will never be considered stale
+exemptLabels:
+ - pinned
+ - security
+# Label to use when marking an issue as stale
+staleLabel: stale
+# Comment to post when marking an issue as stale. Set to `false` to disable
+markComment: >
+ This issue has been automatically marked as stale because it has not had
+ recent activity. It will be closed if no further activity occurs. Thank you
+ for your contributions.
+# Comment to post when closing a stale issue. Set to `false` to disable
+closeComment: false
diff --git a/.gitignore b/.gitignore
index 0763d8492..3e0009e16 100644
--- a/.gitignore
+++ b/.gitignore
@@ -34,8 +34,14 @@ profile.cov
# IdeaIDE
.idea
+# VS Code
+.vscode
+
# dashboard
/dashboard/assets/flow-typed
/dashboard/assets/node_modules
/dashboard/assets/stats.json
/dashboard/assets/bundle.js
+/dashboard/assets/package-lock.json
+
+**/yarn-error.log
diff --git a/.mailmap b/.mailmap
index d51c7b609..cc4b871a3 100644
--- a/.mailmap
+++ b/.mailmap
@@ -65,7 +65,8 @@ Enrique Fynn <enriquefynn@gmail.com>
Vincent G <caktux@gmail.com>
-RJ Catalano <rj@erisindustries.com>
+RJ Catalano <catalanor0220@gmail.com>
+RJ Catalano <catalanor0220@gmail.com> <rj@erisindustries.com>
Nchinda Nchinda <nchinda2@gmail.com>
@@ -109,3 +110,14 @@ Frank Wang <eternnoir@gmail.com>
Gary Rong <garyrong0905@gmail.com>
Guillaume Nicolas <guin56@gmail.com>
+
+Sorin Neacsu <sorin.neacsu@gmail.com>
+Sorin Neacsu <sorin.neacsu@gmail.com> <sorin@users.noreply.github.com>
+
+Valentin Wüstholz <wuestholz@gmail.com>
+Valentin Wüstholz <wuestholz@gmail.com> <wuestholz@users.noreply.github.com>
+
+Armin Braun <me@obrown.io>
+
+Ernesto del Toro <ernesto.deltoro@gmail.com>
+Ernesto del Toro <ernesto.deltoro@gmail.com> <ernestodeltoro@users.noreply.github.com>
diff --git a/.travis.yml b/.travis.yml
index ba62b87bf..b3757ff7d 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -6,7 +6,7 @@ matrix:
- os: linux
dist: trusty
sudo: required
- go: 1.7.x
+ go: 1.8.x
script:
- sudo modprobe fuse
- sudo chmod 666 /dev/fuse
@@ -17,7 +17,7 @@ matrix:
- os: linux
dist: trusty
sudo: required
- go: 1.8.x
+ go: 1.9.x
script:
- sudo modprobe fuse
- sudo chmod 666 /dev/fuse
@@ -29,7 +29,7 @@ matrix:
- os: linux
dist: trusty
sudo: required
- go: 1.9.x
+ go: "1.10"
script:
- sudo modprobe fuse
- sudo chmod 666 /dev/fuse
@@ -38,7 +38,7 @@ matrix:
- go run build/ci.go test -coverage
- os: osx
- go: 1.9.x
+ go: "1.10"
script:
- unset -f cd # workaround for https://github.com/travis-ci/travis-ci/issues/8703
- brew update
@@ -50,7 +50,7 @@ matrix:
# This builder only tests code linters on latest version of Go
- os: linux
dist: trusty
- go: 1.9.x
+ go: "1.10"
env:
- lint
git:
@@ -62,7 +62,7 @@ matrix:
- os: linux
dist: trusty
sudo: required
- go: 1.9.x
+ go: "1.10"
env:
- ubuntu-ppa
- azure-linux
@@ -102,7 +102,7 @@ matrix:
dist: trusty
services:
- docker
- go: 1.9.x
+ go: "1.10"
env:
- azure-linux-mips
git:
@@ -146,7 +146,7 @@ matrix:
git:
submodules: false # avoid cloning ethereum/tests
before_install:
- - curl https://storage.googleapis.com/golang/go1.9.2.linux-amd64.tar.gz | tar -xz
+ - curl https://storage.googleapis.com/golang/go1.10.linux-amd64.tar.gz | tar -xz
- export PATH=`pwd`/go/bin:$PATH
- export GOROOT=`pwd`/go
- export GOPATH=$HOME/go
@@ -163,7 +163,7 @@ matrix:
# This builder does the OSX Azure, iOS CocoaPods and iOS Azure uploads
- os: osx
- go: 1.9.x
+ go: "1.10"
env:
- azure-osx
- azure-ios
@@ -185,13 +185,15 @@ matrix:
- xctool -version
- xcrun simctl list
+ # Workaround for https://github.com/golang/go/issues/23749
+ - export CGO_CFLAGS_ALLOW='-fmodules|-fblocks|-fobjc-arc'
- go run build/ci.go xcode -signer IOS_SIGNING_KEY -deploy trunk -upload gethstore/builds
# This builder does the Azure archive purges to avoid accumulating junk
- os: linux
dist: trusty
sudo: required
- go: 1.9.x
+ go: "1.10"
env:
- azure-purge
git:
diff --git a/AUTHORS b/AUTHORS
index faa19d281..bd44a3de5 100644
--- a/AUTHORS
+++ b/AUTHORS
@@ -1,85 +1,173 @@
# This is the official list of go-ethereum authors for copyright purposes.
+Afri Schoedon <5chdn@users.noreply.github.com>
+Agustin Armellini Fischer <armellini13@gmail.com>
+Airead <fgh1987168@gmail.com>
+Alan Chen <alanchchen@users.noreply.github.com>
+Alejandro Isaza <alejandro.isaza@gmail.com>
Ales Katona <ales@coinbase.com>
Alex Leverington <alex@ethdev.com>
+Alex Wu <wuyiding@gmail.com>
Alexandre Van de Sande <alex.vandesande@ethdev.com>
+Ali Hajimirza <Ali92hm@users.noreply.github.com>
+Anton Evangelatov <anton.evangelatov@gmail.com>
+Arba Sasmoyo <arba.sasmoyo@gmail.com>
+Armani Ferrante <armaniferrante@berkeley.edu>
+Armin Braun <me@obrown.io>
Aron Fischer <github@aron.guru>
Bas van Kervel <bas@ethdev.com>
Benjamin Brent <benjamin@benjaminbrent.com>
+Benoit Verkindt <benoit.verkindt@gmail.com>
+Bo <bohende@gmail.com>
+Bo Ye <boy.e.computer.1982@outlook.com>
+Bob Glickstein <bobg@users.noreply.github.com>
Brian Schroeder <bts@gmail.com>
Casey Detrio <cdetrio@gmail.com>
+Chase Wright <mysticryuujin@gmail.com>
Christoph Jentzsch <jentzsch.software@gmail.com>
Daniel A. Nagy <nagy.da@gmail.com>
+Daniel Sloof <goapsychadelic@gmail.com>
+Darrel Herbst <dherbst@gmail.com>
+Dave Appleton <calistralabs@gmail.com>
Diego Siqueira <DiSiqueira@users.noreply.github.com>
+Dmitry Shulyak <yashulyak@gmail.com>
+Egon Elbre <egonelbre@gmail.com>
+Elias Naur <elias.naur@gmail.com>
Elliot Shepherd <elliot@identitii.com>
Enrique Fynn <enriquefynn@gmail.com>
+Ernesto del Toro <ernesto.deltoro@gmail.com>
Ethan Buchman <ethan@coinculture.info>
+Eugene Valeyev <evgen.povt@gmail.com>
+Evangelos Pappas <epappas@evalonlabs.com>
+Evgeny Danilenko <6655321@bk.ru>
Fabian Vogelsteller <fabian@frozeman.de>
+Fabio Barone <fabio.barone.co@gmail.com>
Fabio Berger <fabioberger1991@gmail.com>
+FaceHo <facehoshi@gmail.com>
Felix Lange <fjl@twurst.com>
+Fiisio <liangcszzu@163.com>
Frank Wang <eternnoir@gmail.com>
+Furkan KAMACI <furkankamaci@gmail.com>
Gary Rong <garyrong0905@gmail.com>
+George Ornbo <george@shapeshed.com>
Gregg Dourgarian <greggd@tempworks.com>
+Guillaume Ballet <gballet@gmail.com>
Guillaume Nicolas <guin56@gmail.com>
Gustav Simonsson <gustav.simonsson@gmail.com>
Hao Bryan Cheng <haobcheng@gmail.com>
Henning Diedrich <hd@eonblast.com>
Isidoro Ghezzi <isidoro.ghezzi@icloud.com>
+Ivan Daniluk <ivan.daniluk@gmail.com>
Jae Kwon <jkwon.work@gmail.com>
Jamie Pitts <james.pitts@gmail.com>
+Janoš Guljaš <janos@users.noreply.github.com>
Jason Carver <jacarver@linkedin.com>
+Jay Guo <guojiannan1101@gmail.com>
Jeff R. Allen <jra@nella.org>
Jeffrey Wilcke <jeffrey@ethereum.org>
Jens Agerberg <github@agerberg.me>
+Jia Chenhui <jiachenhui1989@gmail.com>
+Jim McDonald <Jim@mcdee.net>
+Joel Burget <joelburget@gmail.com>
Jonathan Brown <jbrown@bluedroplet.com>
Joseph Chow <ethereum@outlook.com>
Justin Clark-Casey <justincc@justincc.org>
Justin Drake <drakefjustin@gmail.com>
Kenji Siu <kenji@isuntv.com>
Kobi Gurkan <kobigurk@gmail.com>
+Konrad Feldmeier <konrad@brainbot.com>
+Kurkó Mihály <kurkomisi@users.noreply.github.com>
+Kyuntae Ethan Kim <ethan.kyuntae.kim@gmail.com>
Lefteris Karapetsas <lefteris@refu.co>
Leif Jurvetson <leijurv@gmail.com>
+Leo Shklovskii <leo@thermopylae.net>
Lewis Marshall <lewis@lmars.net>
+Lio李欧 <lionello@users.noreply.github.com>
Louis Holbrook <dev@holbrook.no>
Luca Zeug <luclu@users.noreply.github.com>
+Magicking <s@6120.eu>
Maran Hidskes <maran.hidskes@gmail.com>
Marek Kotewicz <marek.kotewicz@gmail.com>
+Mark <markya0616@gmail.com>
Martin Holst Swende <martin@swende.se>
Matthew Di Ferrante <mattdf@users.noreply.github.com>
Matthew Wampler-Doty <matthew.wampler.doty@gmail.com>
+Maximilian Meister <mmeister@suse.de>
Micah Zoltu <micah@zoltu.net>
+Michael Ruminer <michael.ruminer+github@gmail.com>
+Miguel Mota <miguelmota2@gmail.com>
+Miya Chen <miyatlchen@gmail.com>
Nchinda Nchinda <nchinda2@gmail.com>
Nick Dodson <silentcicero@outlook.com>
Nick Johnson <arachnid@notdot.net>
+Nicolas Guillaume <gunicolas@sqli.com>
+Noman <noman@noman.land>
+Oli Bye <olibye@users.noreply.github.com>
+Paul Litvak <litvakpol@012.net.il>
Paulo L F Casaretto <pcasaretto@gmail.com>
+Paweł Bylica <chfast@gmail.com>
Peter Pratscher <pratscher@gmail.com>
+Petr Mikusek <petr@mikusek.info>
Péter Szilágyi <peterke@gmail.com>
-RJ Catalano <rj@erisindustries.com>
+RJ Catalano <catalanor0220@gmail.com>
Ramesh Nair <ram@hiddentao.com>
Ricardo Catalinas Jiménez <r@untroubled.be>
+Ricardo Domingos <ricardohsd@gmail.com>
+Richard Hart <richardhart92@gmail.com>
+Rob <robert@rojotek.com>
+Robert Zaremba <robert.zaremba@scale-it.pl>
+Russ Cox <rsc@golang.org>
Rémy Roy <remyroy@remyroy.com>
+S. Matthew English <s-matthew-english@users.noreply.github.com>
Shintaro Kaneko <kaneshin0120@gmail.com>
+Sorin Neacsu <sorin.neacsu@gmail.com>
Stein Dekker <dekker.stein@gmail.com>
+Steve Waldman <swaldman@mchange.com>
Steven Roose <stevenroose@gmail.com>
Taylor Gerring <taylor.gerring@gmail.com>
Thomas Bocek <tom@tomp2p.net>
+Ti Zhou <tizhou1986@gmail.com>
Tosh Camille <tochecamille@gmail.com>
-Valentin Wüstholz <wuestholz@users.noreply.github.com>
+Valentin Wüstholz <wuestholz@gmail.com>
Victor Farazdagi <simple.square@gmail.com>
Victor Tran <vu.tran54@gmail.com>
Viktor Trón <viktor.tron@gmail.com>
Ville Sundell <github@solarius.fi>
Vincent G <caktux@gmail.com>
Vitalik Buterin <v@buterin.com>
+Vitaly V <vvelikodny@gmail.com>
Vivek Anand <vivekanand1101@users.noreply.github.com>
Vlad Gluhovsky <gluk256@users.noreply.github.com>
Yohann Léon <sybiload@gmail.com>
Yoichi Hirai <i@yoichihirai.com>
+Yondon Fu <yondon.fu@gmail.com>
+Zach <zach.ramsay@gmail.com>
Zahoor Mohamed <zahoor@zahoor.in>
+Zoe Nolan <github@zoenolan.org>
Zsolt Felföldi <zsfelfoldi@gmail.com>
+am2rican5 <am2rican5@gmail.com>
+ayeowch <ayeowch@gmail.com>
+b00ris <b00ris@mail.ru>
+bailantaotao <Edwin@maicoin.com>
+baizhenxuan <nkbai@163.com>
+bloonfield <bloonfield@163.com>
+changhong <changhong.yu@shanbay.com>
+evgk <evgeniy.kamyshev@gmail.com>
+ferhat elmas <elmas.ferhat@gmail.com>
holisticode <holistic.computing@gmail.com>
+jtakalai <juuso.takalainen@streamr.com>
ken10100147 <sunhongping@kanjian.com>
ligi <ligi@ligi.de>
+mark.lin <mark@maicoin.com>
+necaremus <necaremus@gmail.com>
+njupt-moon <1015041018@njupt.edu.cn>
+nkbai <nkbai@163.com>
+rhaps107 <dod-source@yandex.ru>
+slumber1122 <slumber1122@gmail.com>
+sunxiaojun2014 <sunxiaojun-xy@360.cn>
+terasum <terasum@163.com>
+tsarpaul <Litvakpol@012.net.il>
xiekeyang <xiekeyang@users.noreply.github.com>
+yoza <yoza.is12s@gmail.com>
ΞTHΞЯSPHΞЯΞ <{viktor.tron,nagydani,zsfelfoldi}@gmail.com>
Максим Чусовлянов <mchusovlianov@gmail.com>
diff --git a/Dockerfile b/Dockerfile
index f4396fcf2..29cdc80f9 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,5 +1,5 @@
# Build Geth in a stock Go builder container
-FROM golang:1.9-alpine as builder
+FROM golang:1.10-alpine as builder
RUN apk add --no-cache make gcc musl-dev linux-headers
diff --git a/Dockerfile.alltools b/Dockerfile.alltools
index 79bf0f8d5..1047738d2 100644
--- a/Dockerfile.alltools
+++ b/Dockerfile.alltools
@@ -1,5 +1,5 @@
# Build Geth in a stock Go builder container
-FROM golang:1.9-alpine as builder
+FROM golang:1.10-alpine as builder
RUN apk add --no-cache make gcc musl-dev linux-headers
diff --git a/README.md b/README.md
index 61e36afec..3d0d4d35d 100644
--- a/README.md
+++ b/README.md
@@ -5,6 +5,8 @@ Official golang implementation of the Ethereum protocol.
[![API Reference](
https://camo.githubusercontent.com/915b7be44ada53c290eb157634330494ebe3e30a/68747470733a2f2f676f646f632e6f72672f6769746875622e636f6d2f676f6c616e672f6764646f3f7374617475732e737667
)](https://godoc.org/github.com/ethereum/go-ethereum)
+[![Go Report Card](https://goreportcard.com/badge/github.com/ethereum/go-ethereum)](https://goreportcard.com/report/github.com/ethereum/go-ethereum)
+[![Travis](https://travis-ci.org/ethereum/go-ethereum.svg?branch=master)](https://travis-ci.org/ethereum/go-ethereum)
[![Gitter](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/ethereum/go-ethereum?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge)
Automated builds are available for stable releases and the unstable master branch.
@@ -56,16 +58,14 @@ the user doesn't care about years-old historical data, so we can fast-sync quick
state of the network. To do so:
```
-$ geth --fast --cache=512 console
+$ geth console
```
This command will:
- * Start geth in fast sync mode (`--fast`), causing it to download more data in exchange for avoiding
- processing the entire history of the Ethereum network, which is very CPU intensive.
- * Bump the memory allowance of the database to 512MB (`--cache=512`), which can help significantly in
- sync times especially for HDD users. This flag is optional and you can set it as high or as low as
- you'd like, though we'd recommend the 512MB - 2GB range.
+ * Start geth in fast sync mode (default, can be changed with the `--syncmode` flag), causing it to
+ download more data in exchange for avoiding processing the entire history of the Ethereum network,
+ which is very CPU intensive.
* Start up Geth's built-in interactive [JavaScript console](https://github.com/ethereum/go-ethereum/wiki/JavaScript-Console),
(via the trailing `console` subcommand) through which you can invoke all official [`web3` methods](https://github.com/ethereum/wiki/wiki/JavaScript-API)
as well as Geth's own [management APIs](https://github.com/ethereum/go-ethereum/wiki/Management-APIs).
@@ -80,12 +80,11 @@ entire system. In other words, instead of attaching to the main network, you wan
network with your node, which is fully equivalent to the main network, but with play-Ether only.
```
-$ geth --testnet --fast --cache=512 console
+$ geth --testnet console
```
-The `--fast`, `--cache` flags and `console` subcommand have the exact same meaning as above and they
-are equally useful on the testnet too. Please see above for their explanations if you've skipped to
-here.
+The `console` subcommand have the exact same meaning as above and they are equally useful on the
+testnet too. Please see above for their explanations if you've skipped to here.
Specifying the `--testnet` flag however will reconfigure your Geth instance a bit:
@@ -102,6 +101,14 @@ over between the main network and test network, you should make sure to always u
for play-money and real-money. Unless you manually move accounts, Geth will by default correctly
separate the two networks and will not make any accounts available between them.*
+### Full node on the Rinkeby test network
+
+The above test network is a cross client one based on the ethash proof-of-work consensus algorithm. As such, it has certain extra overhead and is more susceptible to reorganization attacks due to the network's low difficulty / security. Go Ethereum also supports connecting to a proof-of-authority based test network called [*Rinkeby*](https://www.rinkeby.io) (operated by members of the community). This network is lighter, more secure, but is only supported by go-ethereum.
+
+```
+$ geth --rinkeby console
+```
+
### Configuration
As an alternative to passing the numerous flags to the `geth` binary, you can also pass a configuration file via:
@@ -125,10 +132,10 @@ One of the quickest ways to get Ethereum up and running on your machine is by us
```
docker run -d --name ethereum-node -v /Users/alice/ethereum:/root \
-p 8545:8545 -p 30303:30303 \
- ethereum/client-go --fast --cache=512
+ ethereum/client-go
```
-This will start geth in fast sync mode with a DB memory allowance of 512MB just as the above command does. It will also create a persistent volume in your home directory for saving your blockchain as well as map the default ports. There is also an `alpine` tag available for a slim version of the image.
+This will start geth in fast-sync mode with a DB memory allowance of 1GB just as the above command does. It will also create a persistent volume in your home directory for saving your blockchain as well as map the default ports. There is also an `alpine` tag available for a slim version of the image.
Do not forget `--rpcaddr 0.0.0.0`, if you want to access RPC from other containers and/or hosts. By default, `geth` binds to the local interface and RPC endpoints is not accessible from the outside.
diff --git a/VERSION b/VERSION
index 27f9cd322..a7ee35a3e 100644
--- a/VERSION
+++ b/VERSION
@@ -1 +1 @@
-1.8.0
+1.8.3
diff --git a/accounts/abi/abi.go b/accounts/abi/abi.go
index cbcf4ca92..254b1f7fb 100644
--- a/accounts/abi/abi.go
+++ b/accounts/abi/abi.go
@@ -97,7 +97,6 @@ func (abi *ABI) UnmarshalJSON(data []byte) error {
Type string
Name string
Constant bool
- Indexed bool
Anonymous bool
Inputs []Argument
Outputs []Argument
@@ -137,11 +136,11 @@ func (abi *ABI) UnmarshalJSON(data []byte) error {
// MethodById looks up a method by the 4-byte id
// returns nil if none found
-func (abi *ABI) MethodById(sigdata []byte) *Method {
+func (abi *ABI) MethodById(sigdata []byte) (*Method, error) {
for _, method := range abi.Methods {
if bytes.Equal(method.Id(), sigdata[:4]) {
- return &method
+ return &method, nil
}
}
- return nil
+ return nil, fmt.Errorf("no method with id: %#x", sigdata[:4])
}
diff --git a/accounts/abi/abi_test.go b/accounts/abi/abi_test.go
index 2d43b631c..35e0094dd 100644
--- a/accounts/abi/abi_test.go
+++ b/accounts/abi/abi_test.go
@@ -689,7 +689,11 @@ func TestABI_MethodById(t *testing.T) {
}
for name, m := range abi.Methods {
a := fmt.Sprintf("%v", m)
- b := fmt.Sprintf("%v", abi.MethodById(m.Id()))
+ m2, err := abi.MethodById(m.Id())
+ if err != nil {
+ t.Fatalf("Failed to look up ABI method: %v", err)
+ }
+ b := fmt.Sprintf("%v", m2)
if a != b {
t.Errorf("Method %v (id %v) not 'findable' by id in ABI", name, common.ToHex(m.Id()))
}
diff --git a/accounts/abi/argument.go b/accounts/abi/argument.go
index 04ca6150a..1b480da60 100644
--- a/accounts/abi/argument.go
+++ b/accounts/abi/argument.go
@@ -67,6 +67,17 @@ func (arguments Arguments) LengthNonIndexed() int {
return out
}
+// NonIndexed returns the arguments with indexed arguments filtered out
+func (arguments Arguments) NonIndexed() Arguments {
+ var ret []Argument
+ for _, arg := range arguments {
+ if !arg.Indexed {
+ ret = append(ret, arg)
+ }
+ }
+ return ret
+}
+
// isTuple returns true for non-atomic constructs, like (uint,uint) or uint[]
func (arguments Arguments) isTuple() bool {
return len(arguments) > 1
@@ -74,21 +85,25 @@ func (arguments Arguments) isTuple() bool {
// Unpack performs the operation hexdata -> Go format
func (arguments Arguments) Unpack(v interface{}, data []byte) error {
- if arguments.isTuple() {
- return arguments.unpackTuple(v, data)
- }
- return arguments.unpackAtomic(v, data)
-}
-func (arguments Arguments) unpackTuple(v interface{}, output []byte) error {
// make sure the passed value is arguments pointer
- valueOf := reflect.ValueOf(v)
- if reflect.Ptr != valueOf.Kind() {
+ if reflect.Ptr != reflect.ValueOf(v).Kind() {
return fmt.Errorf("abi: Unpack(non-pointer %T)", v)
}
+ marshalledValues, err := arguments.UnpackValues(data)
+ if err != nil {
+ return err
+ }
+ if arguments.isTuple() {
+ return arguments.unpackTuple(v, marshalledValues)
+ }
+ return arguments.unpackAtomic(v, marshalledValues)
+}
+
+func (arguments Arguments) unpackTuple(v interface{}, marshalledValues []interface{}) error {
var (
- value = valueOf.Elem()
+ value = reflect.ValueOf(v).Elem()
typ = value.Type()
kind = value.Kind()
)
@@ -110,30 +125,9 @@ func (arguments Arguments) unpackTuple(v interface{}, output []byte) error {
exists[field] = true
}
}
- // `i` counts the nonindexed arguments.
- // `j` counts the number of complex types.
- // both `i` and `j` are used to to correctly compute `data` offset.
+ for i, arg := range arguments.NonIndexed() {
- i, j := -1, 0
- for _, arg := range arguments {
-
- if arg.Indexed {
- // can't read, continue
- continue
- }
- i++
- marshalledValue, err := toGoType((i+j)*32, arg.Type, output)
- if err != nil {
- return err
- }
-
- if arg.Type.T == ArrayTy {
- // combined index ('i' + 'j') need to be adjusted only by size of array, thus
- // we need to decrement 'j' because 'i' was incremented
- j += arg.Type.Size - 1
- }
-
- reflectValue := reflect.ValueOf(marshalledValue)
+ reflectValue := reflect.ValueOf(marshalledValues[i])
switch kind {
case reflect.Struct:
@@ -166,34 +160,72 @@ func (arguments Arguments) unpackTuple(v interface{}, output []byte) error {
}
// unpackAtomic unpacks ( hexdata -> go ) a single value
-func (arguments Arguments) unpackAtomic(v interface{}, output []byte) error {
- // make sure the passed value is arguments pointer
- valueOf := reflect.ValueOf(v)
- if reflect.Ptr != valueOf.Kind() {
- return fmt.Errorf("abi: Unpack(non-pointer %T)", v)
- }
- arg := arguments[0]
- if arg.Indexed {
- return fmt.Errorf("abi: attempting to unpack indexed variable into element.")
+func (arguments Arguments) unpackAtomic(v interface{}, marshalledValues []interface{}) error {
+ if len(marshalledValues) != 1 {
+ return fmt.Errorf("abi: wrong length, expected single value, got %d", len(marshalledValues))
}
+ elem := reflect.ValueOf(v).Elem()
+ reflectValue := reflect.ValueOf(marshalledValues[0])
+ return set(elem, reflectValue, arguments.NonIndexed()[0])
+}
- value := valueOf.Elem()
+// Computes the full size of an array;
+// i.e. counting nested arrays, which count towards size for unpacking.
+func getArraySize(arr *Type) int {
+ size := arr.Size
+ // Arrays can be nested, with each element being the same size
+ arr = arr.Elem
+ for arr.T == ArrayTy {
+ // Keep multiplying by elem.Size while the elem is an array.
+ size *= arr.Size
+ arr = arr.Elem
+ }
+ // Now we have the full array size, including its children.
+ return size
+}
- marshalledValue, err := toGoType(0, arg.Type, output)
- if err != nil {
- return err
+// UnpackValues can be used to unpack ABI-encoded hexdata according to the ABI-specification,
+// without supplying a struct to unpack into. Instead, this method returns a list containing the
+// values. An atomic argument will be a list with one element.
+func (arguments Arguments) UnpackValues(data []byte) ([]interface{}, error) {
+ retval := make([]interface{}, 0, arguments.LengthNonIndexed())
+ virtualArgs := 0
+ for index, arg := range arguments.NonIndexed() {
+ marshalledValue, err := toGoType((index+virtualArgs)*32, arg.Type, data)
+ if arg.Type.T == ArrayTy {
+ // If we have a static array, like [3]uint256, these are coded as
+ // just like uint256,uint256,uint256.
+ // This means that we need to add two 'virtual' arguments when
+ // we count the index from now on.
+ //
+ // Array values nested multiple levels deep are also encoded inline:
+ // [2][3]uint256: uint256,uint256,uint256,uint256,uint256,uint256
+ //
+ // Calculate the full array size to get the correct offset for the next argument.
+ // Decrement it by 1, as the normal index increment is still applied.
+ virtualArgs += getArraySize(&arg.Type) - 1
+ }
+ if err != nil {
+ return nil, err
+ }
+ retval = append(retval, marshalledValue)
}
- return set(value, reflect.ValueOf(marshalledValue), arg)
+ return retval, nil
}
-// Unpack performs the operation Go format -> Hexdata
+// PackValues performs the operation Go format -> Hexdata
+// It is the semantic opposite of UnpackValues
+func (arguments Arguments) PackValues(args []interface{}) ([]byte, error) {
+ return arguments.Pack(args...)
+}
+
+// Pack performs the operation Go format -> Hexdata
func (arguments Arguments) Pack(args ...interface{}) ([]byte, error) {
// Make sure arguments match up and pack them
abiArgs := arguments
if len(args) != len(abiArgs) {
return nil, fmt.Errorf("argument count mismatch: %d for %d", len(args), len(abiArgs))
}
-
// variable input is the output appended at the end of packed
// output. This is used for strings and bytes types input.
var variableInput []byte
@@ -207,7 +239,6 @@ func (arguments Arguments) Pack(args ...interface{}) ([]byte, error) {
inputOffset += 32
}
}
-
var ret []byte
for i, a := range args {
input := abiArgs[i]
@@ -216,7 +247,6 @@ func (arguments Arguments) Pack(args ...interface{}) ([]byte, error) {
if err != nil {
return nil, err
}
-
// check for a slice type (string, bytes, slice)
if input.Type.requiresLengthPrefix() {
// calculate the offset
diff --git a/accounts/abi/bind/backend.go b/accounts/abi/bind/backend.go
index a7ca7bfc0..ca60cc1b4 100644
--- a/accounts/abi/bind/backend.go
+++ b/accounts/abi/bind/backend.go
@@ -52,12 +52,6 @@ type ContractCaller interface {
CallContract(ctx context.Context, call ethereum.CallMsg, blockNumber *big.Int) ([]byte, error)
}
-// DeployBackend wraps the operations needed by WaitMined and WaitDeployed.
-type DeployBackend interface {
- TransactionReceipt(ctx context.Context, txHash common.Hash) (*types.Receipt, error)
- CodeAt(ctx context.Context, account common.Address, blockNumber *big.Int) ([]byte, error)
-}
-
// PendingContractCaller defines methods to perform contract calls on the pending state.
// Call will try to discover this interface when access to the pending state is requested.
// If the backend does not support the pending state, Call returns ErrNoPendingState.
@@ -90,8 +84,29 @@ type ContractTransactor interface {
SendTransaction(ctx context.Context, tx *types.Transaction) error
}
+// ContractFilterer defines the methods needed to access log events using one-off
+// queries or continuous event subscriptions.
+type ContractFilterer interface {
+ // FilterLogs executes a log filter operation, blocking during execution and
+ // returning all the results in one batch.
+ //
+ // TODO(karalabe): Deprecate when the subscription one can return past data too.
+ FilterLogs(ctx context.Context, query ethereum.FilterQuery) ([]types.Log, error)
+
+ // SubscribeFilterLogs creates a background log filtering operation, returning
+ // a subscription immediately, which can be used to stream the found events.
+ SubscribeFilterLogs(ctx context.Context, query ethereum.FilterQuery, ch chan<- types.Log) (ethereum.Subscription, error)
+}
+
+// DeployBackend wraps the operations needed by WaitMined and WaitDeployed.
+type DeployBackend interface {
+ TransactionReceipt(ctx context.Context, txHash common.Hash) (*types.Receipt, error)
+ CodeAt(ctx context.Context, account common.Address, blockNumber *big.Int) ([]byte, error)
+}
+
// ContractBackend defines the methods needed to work with contracts on a read-write basis.
type ContractBackend interface {
ContractCaller
ContractTransactor
+ ContractFilterer
}
diff --git a/accounts/abi/bind/backends/simulated.go b/accounts/abi/bind/backends/simulated.go
index 5d8aa48a4..2b5c5fc4a 100644
--- a/accounts/abi/bind/backends/simulated.go
+++ b/accounts/abi/bind/backends/simulated.go
@@ -30,11 +30,15 @@ import (
"github.com/ethereum/go-ethereum/common/math"
"github.com/ethereum/go-ethereum/consensus/ethash"
"github.com/ethereum/go-ethereum/core"
+ "github.com/ethereum/go-ethereum/core/bloombits"
"github.com/ethereum/go-ethereum/core/state"
"github.com/ethereum/go-ethereum/core/types"
"github.com/ethereum/go-ethereum/core/vm"
+ "github.com/ethereum/go-ethereum/eth/filters"
"github.com/ethereum/go-ethereum/ethdb"
+ "github.com/ethereum/go-ethereum/event"
"github.com/ethereum/go-ethereum/params"
+ "github.com/ethereum/go-ethereum/rpc"
)
// This nil assignment ensures compile time that SimulatedBackend implements bind.ContractBackend.
@@ -53,6 +57,8 @@ type SimulatedBackend struct {
pendingBlock *types.Block // Currently pending block that will be imported on request
pendingState *state.StateDB // Currently pending state that will be the active on on request
+ events *filters.EventSystem // Event system for filtering log events live
+
config *params.ChainConfig
}
@@ -62,8 +68,14 @@ func NewSimulatedBackend(alloc core.GenesisAlloc) *SimulatedBackend {
database, _ := ethdb.NewMemDatabase()
genesis := core.Genesis{Config: params.AllEthashProtocolChanges, Alloc: alloc}
genesis.MustCommit(database)
- blockchain, _ := core.NewBlockChain(database, genesis.Config, ethash.NewFaker(), vm.Config{})
- backend := &SimulatedBackend{database: database, blockchain: blockchain, config: genesis.Config}
+ blockchain, _ := core.NewBlockChain(database, nil, genesis.Config, ethash.NewFaker(), vm.Config{})
+
+ backend := &SimulatedBackend{
+ database: database,
+ blockchain: blockchain,
+ config: genesis.Config,
+ events: filters.NewEventSystem(new(event.TypeMux), &filterBackend{database, blockchain}, false),
+ }
backend.rollback()
return backend
}
@@ -90,8 +102,10 @@ func (b *SimulatedBackend) Rollback() {
func (b *SimulatedBackend) rollback() {
blocks, _ := core.GenerateChain(b.config, b.blockchain.CurrentBlock(), ethash.NewFaker(), b.database, 1, func(int, *core.BlockGen) {})
+ statedb, _ := b.blockchain.State()
+
b.pendingBlock = blocks[0]
- b.pendingState, _ = state.New(b.pendingBlock.Root(), state.NewDatabase(b.database))
+ b.pendingState, _ = state.New(b.pendingBlock.Root(), statedb.Database())
}
// CodeAt returns the code associated with a certain account in the blockchain.
@@ -248,7 +262,7 @@ func (b *SimulatedBackend) EstimateGas(ctx context.Context, call ethereum.CallMs
return hi, nil
}
-// callContract implemens common code between normal and pending contract calls.
+// callContract implements common code between normal and pending contract calls.
// state is modified during execution, make sure to copy it if necessary.
func (b *SimulatedBackend) callContract(ctx context.Context, call ethereum.CallMsg, block *types.Block, statedb *state.StateDB) ([]byte, uint64, bool, error) {
// Ensure message is initialized properly.
@@ -297,12 +311,76 @@ func (b *SimulatedBackend) SendTransaction(ctx context.Context, tx *types.Transa
}
block.AddTxWithChain(b.blockchain, tx)
})
+ statedb, _ := b.blockchain.State()
+
b.pendingBlock = blocks[0]
- b.pendingState, _ = state.New(b.pendingBlock.Root(), state.NewDatabase(b.database))
+ b.pendingState, _ = state.New(b.pendingBlock.Root(), statedb.Database())
return nil
}
-// JumpTimeInSeconds adds skip seconds to the clock
+// FilterLogs executes a log filter operation, blocking during execution and
+// returning all the results in one batch.
+//
+// TODO(karalabe): Deprecate when the subscription one can return past data too.
+func (b *SimulatedBackend) FilterLogs(ctx context.Context, query ethereum.FilterQuery) ([]types.Log, error) {
+ // Initialize unset filter boundaried to run from genesis to chain head
+ from := int64(0)
+ if query.FromBlock != nil {
+ from = query.FromBlock.Int64()
+ }
+ to := int64(-1)
+ if query.ToBlock != nil {
+ to = query.ToBlock.Int64()
+ }
+ // Construct and execute the filter
+ filter := filters.New(&filterBackend{b.database, b.blockchain}, from, to, query.Addresses, query.Topics)
+
+ logs, err := filter.Logs(ctx)
+ if err != nil {
+ return nil, err
+ }
+ res := make([]types.Log, len(logs))
+ for i, log := range logs {
+ res[i] = *log
+ }
+ return res, nil
+}
+
+// SubscribeFilterLogs creates a background log filtering operation, returning a
+// subscription immediately, which can be used to stream the found events.
+func (b *SimulatedBackend) SubscribeFilterLogs(ctx context.Context, query ethereum.FilterQuery, ch chan<- types.Log) (ethereum.Subscription, error) {
+ // Subscribe to contract events
+ sink := make(chan []*types.Log)
+
+ sub, err := b.events.SubscribeLogs(query, sink)
+ if err != nil {
+ return nil, err
+ }
+ // Since we're getting logs in batches, we need to flatten them into a plain stream
+ return event.NewSubscription(func(quit <-chan struct{}) error {
+ defer sub.Unsubscribe()
+ for {
+ select {
+ case logs := <-sink:
+ for _, log := range logs {
+ select {
+ case ch <- *log:
+ case err := <-sub.Err():
+ return err
+ case <-quit:
+ return nil
+ }
+ }
+ case err := <-sub.Err():
+ return err
+ case <-quit:
+ return nil
+ }
+ }
+ }), nil
+}
+
+// AdjustTime adds a time shift to the simulated clock.
func (b *SimulatedBackend) AdjustTime(adjustment time.Duration) error {
b.mu.Lock()
defer b.mu.Unlock()
@@ -312,8 +390,10 @@ func (b *SimulatedBackend) AdjustTime(adjustment time.Duration) error {
}
block.OffsetTime(int64(adjustment.Seconds()))
})
+ statedb, _ := b.blockchain.State()
+
b.pendingBlock = blocks[0]
- b.pendingState, _ = state.New(b.pendingBlock.Root(), state.NewDatabase(b.database))
+ b.pendingState, _ = state.New(b.pendingBlock.Root(), statedb.Database())
return nil
}
@@ -331,3 +411,57 @@ func (m callmsg) GasPrice() *big.Int { return m.CallMsg.GasPrice }
func (m callmsg) Gas() uint64 { return m.CallMsg.Gas }
func (m callmsg) Value() *big.Int { return m.CallMsg.Value }
func (m callmsg) Data() []byte { return m.CallMsg.Data }
+
+// filterBackend implements filters.Backend to support filtering for logs without
+// taking bloom-bits acceleration structures into account.
+type filterBackend struct {
+ db ethdb.Database
+ bc *core.BlockChain
+}
+
+func (fb *filterBackend) ChainDb() ethdb.Database { return fb.db }
+func (fb *filterBackend) EventMux() *event.TypeMux { panic("not supported") }
+
+func (fb *filterBackend) HeaderByNumber(ctx context.Context, block rpc.BlockNumber) (*types.Header, error) {
+ if block == rpc.LatestBlockNumber {
+ return fb.bc.CurrentHeader(), nil
+ }
+ return fb.bc.GetHeaderByNumber(uint64(block.Int64())), nil
+}
+
+func (fb *filterBackend) GetReceipts(ctx context.Context, hash common.Hash) (types.Receipts, error) {
+ return core.GetBlockReceipts(fb.db, hash, core.GetBlockNumber(fb.db, hash)), nil
+}
+
+func (fb *filterBackend) GetLogs(ctx context.Context, hash common.Hash) ([][]*types.Log, error) {
+ receipts := core.GetBlockReceipts(fb.db, hash, core.GetBlockNumber(fb.db, hash))
+ if receipts == nil {
+ return nil, nil
+ }
+ logs := make([][]*types.Log, len(receipts))
+ for i, receipt := range receipts {
+ logs[i] = receipt.Logs
+ }
+ return logs, nil
+}
+
+func (fb *filterBackend) SubscribeTxPreEvent(ch chan<- core.TxPreEvent) event.Subscription {
+ return event.NewSubscription(func(quit <-chan struct{}) error {
+ <-quit
+ return nil
+ })
+}
+func (fb *filterBackend) SubscribeChainEvent(ch chan<- core.ChainEvent) event.Subscription {
+ return fb.bc.SubscribeChainEvent(ch)
+}
+func (fb *filterBackend) SubscribeRemovedLogsEvent(ch chan<- core.RemovedLogsEvent) event.Subscription {
+ return fb.bc.SubscribeRemovedLogsEvent(ch)
+}
+func (fb *filterBackend) SubscribeLogsEvent(ch chan<- []*types.Log) event.Subscription {
+ return fb.bc.SubscribeLogsEvent(ch)
+}
+
+func (fb *filterBackend) BloomStatus() (uint64, uint64) { return 4096, 0 }
+func (fb *filterBackend) ServiceFilter(ctx context.Context, ms *bloombits.MatcherSession) {
+ panic("not supported")
+}
diff --git a/accounts/abi/bind/base.go b/accounts/abi/bind/base.go
index 2bd683f22..83ad1c8ae 100644
--- a/accounts/abi/bind/base.go
+++ b/accounts/abi/bind/base.go
@@ -27,6 +27,7 @@ import (
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/core/types"
"github.com/ethereum/go-ethereum/crypto"
+ "github.com/ethereum/go-ethereum/event"
)
// SignerFn is a signer function callback when a contract requires a method to
@@ -55,6 +56,22 @@ type TransactOpts struct {
Context context.Context // Network context to support cancellation and timeouts (nil = no timeout)
}
+// FilterOpts is the collection of options to fine tune filtering for events
+// within a bound contract.
+type FilterOpts struct {
+ Start uint64 // Start of the queried range
+ End *uint64 // End of the range (nil = latest)
+
+ Context context.Context // Network context to support cancellation and timeouts (nil = no timeout)
+}
+
+// WatchOpts is the collection of options to fine tune subscribing for events
+// within a bound contract.
+type WatchOpts struct {
+ Start *uint64 // Start of the queried range (nil = latest)
+ Context context.Context // Network context to support cancellation and timeouts (nil = no timeout)
+}
+
// BoundContract is the base wrapper object that reflects a contract on the
// Ethereum network. It contains a collection of methods that are used by the
// higher level contract bindings to operate.
@@ -63,16 +80,18 @@ type BoundContract struct {
abi abi.ABI // Reflect based ABI to access the correct Ethereum methods
caller ContractCaller // Read interface to interact with the blockchain
transactor ContractTransactor // Write interface to interact with the blockchain
+ filterer ContractFilterer // Event filtering to interact with the blockchain
}
// NewBoundContract creates a low level contract interface through which calls
// and transactions may be made through.
-func NewBoundContract(address common.Address, abi abi.ABI, caller ContractCaller, transactor ContractTransactor) *BoundContract {
+func NewBoundContract(address common.Address, abi abi.ABI, caller ContractCaller, transactor ContractTransactor, filterer ContractFilterer) *BoundContract {
return &BoundContract{
address: address,
abi: abi,
caller: caller,
transactor: transactor,
+ filterer: filterer,
}
}
@@ -80,7 +99,7 @@ func NewBoundContract(address common.Address, abi abi.ABI, caller ContractCaller
// deployment address with a Go wrapper.
func DeployContract(opts *TransactOpts, abi abi.ABI, bytecode []byte, backend ContractBackend, params ...interface{}) (common.Address, *types.Transaction, *BoundContract, error) {
// Otherwise try to deploy the contract
- c := NewBoundContract(common.Address{}, abi, backend, backend)
+ c := NewBoundContract(common.Address{}, abi, backend, backend, backend)
input, err := c.abi.Pack("", params...)
if err != nil {
@@ -225,6 +244,104 @@ func (c *BoundContract) transact(opts *TransactOpts, contract *common.Address, i
return signedTx, nil
}
+// FilterLogs filters contract logs for past blocks, returning the necessary
+// channels to construct a strongly typed bound iterator on top of them.
+func (c *BoundContract) FilterLogs(opts *FilterOpts, name string, query ...[]interface{}) (chan types.Log, event.Subscription, error) {
+ // Don't crash on a lazy user
+ if opts == nil {
+ opts = new(FilterOpts)
+ }
+ // Append the event selector to the query parameters and construct the topic set
+ query = append([][]interface{}{{c.abi.Events[name].Id()}}, query...)
+
+ topics, err := makeTopics(query...)
+ if err != nil {
+ return nil, nil, err
+ }
+ // Start the background filtering
+ logs := make(chan types.Log, 128)
+
+ config := ethereum.FilterQuery{
+ Addresses: []common.Address{c.address},
+ Topics: topics,
+ FromBlock: new(big.Int).SetUint64(opts.Start),
+ }
+ if opts.End != nil {
+ config.ToBlock = new(big.Int).SetUint64(*opts.End)
+ }
+ /* TODO(karalabe): Replace the rest of the method below with this when supported
+ sub, err := c.filterer.SubscribeFilterLogs(ensureContext(opts.Context), config, logs)
+ */
+ buff, err := c.filterer.FilterLogs(ensureContext(opts.Context), config)
+ if err != nil {
+ return nil, nil, err
+ }
+ sub, err := event.NewSubscription(func(quit <-chan struct{}) error {
+ for _, log := range buff {
+ select {
+ case logs <- log:
+ case <-quit:
+ return nil
+ }
+ }
+ return nil
+ }), nil
+
+ if err != nil {
+ return nil, nil, err
+ }
+ return logs, sub, nil
+}
+
+// WatchLogs filters subscribes to contract logs for future blocks, returning a
+// subscription object that can be used to tear down the watcher.
+func (c *BoundContract) WatchLogs(opts *WatchOpts, name string, query ...[]interface{}) (chan types.Log, event.Subscription, error) {
+ // Don't crash on a lazy user
+ if opts == nil {
+ opts = new(WatchOpts)
+ }
+ // Append the event selector to the query parameters and construct the topic set
+ query = append([][]interface{}{{c.abi.Events[name].Id()}}, query...)
+
+ topics, err := makeTopics(query...)
+ if err != nil {
+ return nil, nil, err
+ }
+ // Start the background filtering
+ logs := make(chan types.Log, 128)
+
+ config := ethereum.FilterQuery{
+ Addresses: []common.Address{c.address},
+ Topics: topics,
+ }
+ if opts.Start != nil {
+ config.FromBlock = new(big.Int).SetUint64(*opts.Start)
+ }
+ sub, err := c.filterer.SubscribeFilterLogs(ensureContext(opts.Context), config, logs)
+ if err != nil {
+ return nil, nil, err
+ }
+ return logs, sub, nil
+}
+
+// UnpackLog unpacks a retrieved log into the provided output structure.
+func (c *BoundContract) UnpackLog(out interface{}, event string, log types.Log) error {
+ if len(log.Data) > 0 {
+ if err := c.abi.Unpack(out, event, log.Data); err != nil {
+ return err
+ }
+ }
+ var indexed abi.Arguments
+ for _, arg := range c.abi.Events[event].Inputs {
+ if arg.Indexed {
+ indexed = append(indexed, arg)
+ }
+ }
+ return parseTopics(out, indexed, log.Topics[1:])
+}
+
+// ensureContext is a helper method to ensure a context is not nil, even if the
+// user specified it as such.
func ensureContext(ctx context.Context) context.Context {
if ctx == nil {
return context.TODO()
diff --git a/accounts/abi/bind/bind.go b/accounts/abi/bind/bind.go
index 8175e3cb9..411177057 100644
--- a/accounts/abi/bind/bind.go
+++ b/accounts/abi/bind/bind.go
@@ -63,10 +63,11 @@ func Bind(types []string, abis []string, bytecodes []string, pkg string, lang La
return r
}, abis[i])
- // Extract the call and transact methods, and sort them alphabetically
+ // Extract the call and transact methods; events; and sort them alphabetically
var (
calls = make(map[string]*tmplMethod)
transacts = make(map[string]*tmplMethod)
+ events = make(map[string]*tmplEvent)
)
for _, original := range evmABI.Methods {
// Normalize the method for capital cases and non-anonymous inputs/outputs
@@ -89,11 +90,33 @@ func Bind(types []string, abis []string, bytecodes []string, pkg string, lang La
}
// Append the methods to the call or transact lists
if original.Const {
- calls[original.Name] = &tmplMethod{Original: original, Normalized: normalized, Structured: structured(original)}
+ calls[original.Name] = &tmplMethod{Original: original, Normalized: normalized, Structured: structured(original.Outputs)}
} else {
- transacts[original.Name] = &tmplMethod{Original: original, Normalized: normalized, Structured: structured(original)}
+ transacts[original.Name] = &tmplMethod{Original: original, Normalized: normalized, Structured: structured(original.Outputs)}
}
}
+ for _, original := range evmABI.Events {
+ // Skip anonymous events as they don't support explicit filtering
+ if original.Anonymous {
+ continue
+ }
+ // Normalize the event for capital cases and non-anonymous outputs
+ normalized := original
+ normalized.Name = methodNormalizer[lang](original.Name)
+
+ normalized.Inputs = make([]abi.Argument, len(original.Inputs))
+ copy(normalized.Inputs, original.Inputs)
+ for j, input := range normalized.Inputs {
+ // Indexed fields are input, non-indexed ones are outputs
+ if input.Indexed {
+ if input.Name == "" {
+ normalized.Inputs[j].Name = fmt.Sprintf("arg%d", j)
+ }
+ }
+ }
+ // Append the event to the accumulator list
+ events[original.Name] = &tmplEvent{Original: original, Normalized: normalized}
+ }
contracts[types[i]] = &tmplContract{
Type: capitalise(types[i]),
InputABI: strings.Replace(strippedABI, "\"", "\\\"", -1),
@@ -101,6 +124,7 @@ func Bind(types []string, abis []string, bytecodes []string, pkg string, lang La
Constructor: evmABI.Constructor,
Calls: calls,
Transacts: transacts,
+ Events: events,
}
}
// Generate the contract template data content and render it
@@ -111,10 +135,11 @@ func Bind(types []string, abis []string, bytecodes []string, pkg string, lang La
buffer := new(bytes.Buffer)
funcs := map[string]interface{}{
- "bindtype": bindType[lang],
- "namedtype": namedType[lang],
- "capitalise": capitalise,
- "decapitalise": decapitalise,
+ "bindtype": bindType[lang],
+ "bindtopictype": bindTopicType[lang],
+ "namedtype": namedType[lang],
+ "capitalise": capitalise,
+ "decapitalise": decapitalise,
}
tmpl := template.Must(template.New("").Funcs(funcs).Parse(tmplSource[lang]))
if err := tmpl.Execute(buffer, data); err != nil {
@@ -133,125 +158,181 @@ func Bind(types []string, abis []string, bytecodes []string, pkg string, lang La
}
// bindType is a set of type binders that convert Solidity types to some supported
-// programming language.
+// programming language types.
var bindType = map[Lang]func(kind abi.Type) string{
LangGo: bindTypeGo,
LangJava: bindTypeJava,
}
+// Helper function for the binding generators.
+// It reads the unmatched characters after the inner type-match,
+// (since the inner type is a prefix of the total type declaration),
+// looks for valid arrays (possibly a dynamic one) wrapping the inner type,
+// and returns the sizes of these arrays.
+//
+// Returned array sizes are in the same order as solidity signatures; inner array size first.
+// Array sizes may also be "", indicating a dynamic array.
+func wrapArray(stringKind string, innerLen int, innerMapping string) (string, []string) {
+ remainder := stringKind[innerLen:]
+ //find all the sizes
+ matches := regexp.MustCompile(`\[(\d*)\]`).FindAllStringSubmatch(remainder, -1)
+ parts := make([]string, 0, len(matches))
+ for _, match := range matches {
+ //get group 1 from the regex match
+ parts = append(parts, match[1])
+ }
+ return innerMapping, parts
+}
+
+// Translates the array sizes to a Go-lang declaration of a (nested) array of the inner type.
+// Simply returns the inner type if arraySizes is empty.
+func arrayBindingGo(inner string, arraySizes []string) string {
+ out := ""
+ //prepend all array sizes, from outer (end arraySizes) to inner (start arraySizes)
+ for i := len(arraySizes) - 1; i >= 0; i-- {
+ out += "[" + arraySizes[i] + "]"
+ }
+ out += inner
+ return out
+}
+
// bindTypeGo converts a Solidity type to a Go one. Since there is no clear mapping
// from all Solidity types to Go ones (e.g. uint17), those that cannot be exactly
// mapped will use an upscaled type (e.g. *big.Int).
func bindTypeGo(kind abi.Type) string {
stringKind := kind.String()
+ innerLen, innerMapping := bindUnnestedTypeGo(stringKind)
+ return arrayBindingGo(wrapArray(stringKind, innerLen, innerMapping))
+}
+
+// The inner function of bindTypeGo, this finds the inner type of stringKind.
+// (Or just the type itself if it is not an array or slice)
+// The length of the matched part is returned, with the the translated type.
+func bindUnnestedTypeGo(stringKind string) (int, string) {
switch {
case strings.HasPrefix(stringKind, "address"):
- parts := regexp.MustCompile(`address(\[[0-9]*\])?`).FindStringSubmatch(stringKind)
- if len(parts) != 2 {
- return stringKind
- }
- return fmt.Sprintf("%scommon.Address", parts[1])
+ return len("address"), "common.Address"
case strings.HasPrefix(stringKind, "bytes"):
- parts := regexp.MustCompile(`bytes([0-9]*)(\[[0-9]*\])?`).FindStringSubmatch(stringKind)
- if len(parts) != 3 {
- return stringKind
- }
- return fmt.Sprintf("%s[%s]byte", parts[2], parts[1])
+ parts := regexp.MustCompile(`bytes([0-9]*)`).FindStringSubmatch(stringKind)
+ return len(parts[0]), fmt.Sprintf("[%s]byte", parts[1])
case strings.HasPrefix(stringKind, "int") || strings.HasPrefix(stringKind, "uint"):
- parts := regexp.MustCompile(`(u)?int([0-9]*)(\[[0-9]*\])?`).FindStringSubmatch(stringKind)
- if len(parts) != 4 {
- return stringKind
- }
+ parts := regexp.MustCompile(`(u)?int([0-9]*)`).FindStringSubmatch(stringKind)
switch parts[2] {
case "8", "16", "32", "64":
- return fmt.Sprintf("%s%sint%s", parts[3], parts[1], parts[2])
+ return len(parts[0]), fmt.Sprintf("%sint%s", parts[1], parts[2])
}
- return fmt.Sprintf("%s*big.Int", parts[3])
+ return len(parts[0]), "*big.Int"
- case strings.HasPrefix(stringKind, "bool") || strings.HasPrefix(stringKind, "string"):
- parts := regexp.MustCompile(`([a-z]+)(\[[0-9]*\])?`).FindStringSubmatch(stringKind)
- if len(parts) != 3 {
- return stringKind
- }
- return fmt.Sprintf("%s%s", parts[2], parts[1])
+ case strings.HasPrefix(stringKind, "bool"):
+ return len("bool"), "bool"
+
+ case strings.HasPrefix(stringKind, "string"):
+ return len("string"), "string"
default:
- return stringKind
+ return len(stringKind), stringKind
}
}
+// Translates the array sizes to a Java declaration of a (nested) array of the inner type.
+// Simply returns the inner type if arraySizes is empty.
+func arrayBindingJava(inner string, arraySizes []string) string {
+ // Java array type declarations do not include the length.
+ return inner + strings.Repeat("[]", len(arraySizes))
+}
+
// bindTypeJava converts a Solidity type to a Java one. Since there is no clear mapping
// from all Solidity types to Java ones (e.g. uint17), those that cannot be exactly
// mapped will use an upscaled type (e.g. BigDecimal).
func bindTypeJava(kind abi.Type) string {
stringKind := kind.String()
+ innerLen, innerMapping := bindUnnestedTypeJava(stringKind)
+ return arrayBindingJava(wrapArray(stringKind, innerLen, innerMapping))
+}
+
+// The inner function of bindTypeJava, this finds the inner type of stringKind.
+// (Or just the type itself if it is not an array or slice)
+// The length of the matched part is returned, with the the translated type.
+func bindUnnestedTypeJava(stringKind string) (int, string) {
switch {
case strings.HasPrefix(stringKind, "address"):
parts := regexp.MustCompile(`address(\[[0-9]*\])?`).FindStringSubmatch(stringKind)
if len(parts) != 2 {
- return stringKind
+ return len(stringKind), stringKind
}
if parts[1] == "" {
- return fmt.Sprintf("Address")
+ return len("address"), "Address"
}
- return fmt.Sprintf("Addresses")
+ return len(parts[0]), "Addresses"
case strings.HasPrefix(stringKind, "bytes"):
- parts := regexp.MustCompile(`bytes([0-9]*)(\[[0-9]*\])?`).FindStringSubmatch(stringKind)
- if len(parts) != 3 {
- return stringKind
- }
- if parts[2] != "" {
- return "byte[][]"
+ parts := regexp.MustCompile(`bytes([0-9]*)`).FindStringSubmatch(stringKind)
+ if len(parts) != 2 {
+ return len(stringKind), stringKind
}
- return "byte[]"
+ return len(parts[0]), "byte[]"
case strings.HasPrefix(stringKind, "int") || strings.HasPrefix(stringKind, "uint"):
- parts := regexp.MustCompile(`(u)?int([0-9]*)(\[[0-9]*\])?`).FindStringSubmatch(stringKind)
- if len(parts) != 4 {
- return stringKind
- }
- switch parts[2] {
- case "8", "16", "32", "64":
- if parts[1] == "" {
- if parts[3] == "" {
- return fmt.Sprintf("int%s", parts[2])
- }
- return fmt.Sprintf("int%s[]", parts[2])
- }
+ //Note that uint and int (without digits) are also matched,
+ // these are size 256, and will translate to BigInt (the default).
+ parts := regexp.MustCompile(`(u)?int([0-9]*)`).FindStringSubmatch(stringKind)
+ if len(parts) != 3 {
+ return len(stringKind), stringKind
}
- if parts[3] == "" {
- return fmt.Sprintf("BigInt")
+
+ namedSize := map[string]string{
+ "8": "byte",
+ "16": "short",
+ "32": "int",
+ "64": "long",
+ }[parts[2]]
+
+ //default to BigInt
+ if namedSize == "" {
+ namedSize = "BigInt"
}
- return fmt.Sprintf("BigInts")
+ return len(parts[0]), namedSize
case strings.HasPrefix(stringKind, "bool"):
- parts := regexp.MustCompile(`bool(\[[0-9]*\])?`).FindStringSubmatch(stringKind)
- if len(parts) != 2 {
- return stringKind
- }
- if parts[1] == "" {
- return fmt.Sprintf("bool")
- }
- return fmt.Sprintf("bool[]")
+ return len("bool"), "boolean"
case strings.HasPrefix(stringKind, "string"):
- parts := regexp.MustCompile(`string(\[[0-9]*\])?`).FindStringSubmatch(stringKind)
- if len(parts) != 2 {
- return stringKind
- }
- if parts[1] == "" {
- return fmt.Sprintf("String")
- }
- return fmt.Sprintf("String[]")
+ return len("string"), "String"
default:
- return stringKind
+ return len(stringKind), stringKind
+ }
+}
+
+// bindTopicType is a set of type binders that convert Solidity types to some
+// supported programming language topic types.
+var bindTopicType = map[Lang]func(kind abi.Type) string{
+ LangGo: bindTopicTypeGo,
+ LangJava: bindTopicTypeJava,
+}
+
+// bindTypeGo converts a Solidity topic type to a Go one. It is almost the same
+// funcionality as for simple types, but dynamic types get converted to hashes.
+func bindTopicTypeGo(kind abi.Type) string {
+ bound := bindTypeGo(kind)
+ if bound == "string" || bound == "[]byte" {
+ bound = "common.Hash"
}
+ return bound
+}
+
+// bindTypeGo converts a Solidity topic type to a Java one. It is almost the same
+// funcionality as for simple types, but dynamic types get converted to hashes.
+func bindTopicTypeJava(kind abi.Type) string {
+ bound := bindTypeJava(kind)
+ if bound == "String" || bound == "Bytes" {
+ bound = "Hash"
+ }
+ return bound
}
// namedType is a set of functions that transform language specific types to
@@ -273,11 +354,13 @@ func namedTypeJava(javaKind string, solKind abi.Type) string {
return "String"
case "string[]":
return "Strings"
- case "bool":
+ case "boolean":
return "Bool"
- case "bool[]":
+ case "boolean[]":
return "Bools"
- case "BigInt":
+ case "BigInt[]":
+ return "BigInts"
+ default:
parts := regexp.MustCompile(`(u)?int([0-9]*)(\[[0-9]*\])?`).FindStringSubmatch(solKind.String())
if len(parts) != 4 {
return javaKind
@@ -292,8 +375,6 @@ func namedTypeJava(javaKind string, solKind abi.Type) string {
default:
return javaKind
}
- default:
- return javaKind
}
}
@@ -304,8 +385,7 @@ var methodNormalizer = map[Lang]func(string) string{
LangJava: decapitalise,
}
-// capitalise makes the first character of a string upper case, also removing any
-// prefixing underscores from the variable names.
+// capitalise makes a camel-case string which starts with an upper case character.
func capitalise(input string) string {
for len(input) > 0 && input[0] == '_' {
input = input[1:]
@@ -313,22 +393,52 @@ func capitalise(input string) string {
if len(input) == 0 {
return ""
}
- return strings.ToUpper(input[:1]) + input[1:]
+ return toCamelCase(strings.ToUpper(input[:1]) + input[1:])
}
-// decapitalise makes the first character of a string lower case.
+// decapitalise makes a camel-case string which starts with a lower case character.
func decapitalise(input string) string {
- return strings.ToLower(input[:1]) + input[1:]
+ for len(input) > 0 && input[0] == '_' {
+ input = input[1:]
+ }
+ if len(input) == 0 {
+ return ""
+ }
+ return toCamelCase(strings.ToLower(input[:1]) + input[1:])
+}
+
+// toCamelCase converts an under-score string to a camel-case string
+func toCamelCase(input string) string {
+ toupper := false
+
+ result := ""
+ for k, v := range input {
+ switch {
+ case k == 0:
+ result = strings.ToUpper(string(input[0]))
+
+ case toupper:
+ result += strings.ToUpper(string(v))
+ toupper = false
+
+ case v == '_':
+ toupper = true
+
+ default:
+ result += string(v)
+ }
+ }
+ return result
}
-// structured checks whether a method has enough information to return a proper
-// Go struct or if flat returns are needed.
-func structured(method abi.Method) bool {
- if len(method.Outputs) < 2 {
+// structured checks whether a list of ABI data types has enough information to
+// operate through a proper Go struct or if flat returns are needed.
+func structured(args abi.Arguments) bool {
+ if len(args) < 2 {
return false
}
exists := make(map[string]bool)
- for _, out := range method.Outputs {
+ for _, out := range args {
// If the name is anonymous, we can't organize into a struct
if out.Name == "" {
return false
diff --git a/accounts/abi/bind/bind_test.go b/accounts/abi/bind/bind_test.go
index b56477e0c..2a5a88648 100644
--- a/accounts/abi/bind/bind_test.go
+++ b/accounts/abi/bind/bind_test.go
@@ -148,6 +148,64 @@ var bindTests = []struct {
fmt.Println(str1, str2, res.Str1, res.Str2, err)
}`,
},
+ // Tests that named, anonymous and indexed events are handled correctly
+ {
+ `EventChecker`, ``, ``,
+ `
+ [
+ {"type":"event","name":"empty","inputs":[]},
+ {"type":"event","name":"indexed","inputs":[{"name":"addr","type":"address","indexed":true},{"name":"num","type":"int256","indexed":true}]},
+ {"type":"event","name":"mixed","inputs":[{"name":"addr","type":"address","indexed":true},{"name":"num","type":"int256"}]},
+ {"type":"event","name":"anonymous","anonymous":true,"inputs":[]},
+ {"type":"event","name":"dynamic","inputs":[{"name":"idxStr","type":"string","indexed":true},{"name":"idxDat","type":"bytes","indexed":true},{"name":"str","type":"string"},{"name":"dat","type":"bytes"}]}
+ ]
+ `,
+ `if e, err := NewEventChecker(common.Address{}, nil); e == nil || err != nil {
+ t.Fatalf("binding (%v) nil or error (%v) not nil", e, nil)
+ } else if false { // Don't run, just compile and test types
+ var (
+ err error
+ res bool
+ str string
+ dat []byte
+ hash common.Hash
+ )
+ _, err = e.FilterEmpty(nil)
+ _, err = e.FilterIndexed(nil, []common.Address{}, []*big.Int{})
+
+ mit, err := e.FilterMixed(nil, []common.Address{})
+
+ res = mit.Next() // Make sure the iterator has a Next method
+ err = mit.Error() // Make sure the iterator has an Error method
+ err = mit.Close() // Make sure the iterator has a Close method
+
+ fmt.Println(mit.Event.Raw.BlockHash) // Make sure the raw log is contained within the results
+ fmt.Println(mit.Event.Num) // Make sure the unpacked non-indexed fields are present
+ fmt.Println(mit.Event.Addr) // Make sure the reconstructed indexed fields are present
+
+ dit, err := e.FilterDynamic(nil, []string{}, [][]byte{})
+
+ str = dit.Event.Str // Make sure non-indexed strings retain their type
+ dat = dit.Event.Dat // Make sure non-indexed bytes retain their type
+ hash = dit.Event.IdxStr // Make sure indexed strings turn into hashes
+ hash = dit.Event.IdxDat // Make sure indexed bytes turn into hashes
+
+ sink := make(chan *EventCheckerMixed)
+ sub, err := e.WatchMixed(nil, sink, []common.Address{})
+ defer sub.Unsubscribe()
+
+ event := <-sink
+ fmt.Println(event.Raw.BlockHash) // Make sure the raw log is contained within the results
+ fmt.Println(event.Num) // Make sure the unpacked non-indexed fields are present
+ fmt.Println(event.Addr) // Make sure the reconstructed indexed fields are present
+
+ fmt.Println(res, str, dat, hash, err)
+ }
+ // Run a tiny reflection test to ensure disallowed methods don't appear
+ if _, ok := reflect.TypeOf(&EventChecker{}).MethodByName("FilterAnonymous"); ok {
+ t.Errorf("binding has disallowed method (FilterAnonymous)")
+ }`,
+ },
// Test that contract interactions (deploy, transact and call) generate working code
{
`Interactor`,
@@ -367,7 +425,7 @@ var bindTests = []struct {
}
`,
},
- // Tests that gas estimation works for contracts with weird gas mechanics too.
+ // Tests that gas estimation works for contracts with weird gas mechanics too.
{
`FunkyGasPattern`,
`
@@ -448,6 +506,7 @@ var bindTests = []struct {
}
`,
},
+ // Tests that methods and returns with underscores inside work correctly.
{
`Underscorer`,
`
@@ -460,7 +519,7 @@ var bindTests = []struct {
}
function LowerUpperCollision() constant returns (int _res, int Res) {
return (1, 2);
- }
+ }
function UpperLowerCollision() constant returns (int _Res, int res) {
return (1, 2);
}
@@ -473,9 +532,12 @@ var bindTests = []struct {
function AllPurelyUnderscoredOutput() constant returns (int _, int __) {
return (1, 2);
}
+ function _under_scored_func() constant returns (int _int) {
+ return 0;
+ }
}
- `, `6060604052341561000f57600080fd5b6103498061001e6000396000f300606060405260043610610083576000357c0100000000000000000000000000000000000000000000000000000000900463ffffffff16806303a592131461008857806367e6633d146100b85780639df484851461014d578063af7486ab1461017d578063b564b34d146101ad578063e02ab24d146101dd578063e409ca451461020d575b600080fd5b341561009357600080fd5b61009b61023d565b604051808381526020018281526020019250505060405180910390f35b34156100c357600080fd5b6100cb610252565b6040518083815260200180602001828103825283818151815260200191508051906020019080838360005b838110156101115780820151818401526020810190506100f6565b50505050905090810190601f16801561013e5780820380516001836020036101000a031916815260200191505b50935050505060405180910390f35b341561015857600080fd5b6101606102a0565b604051808381526020018281526020019250505060405180910390f35b341561018857600080fd5b6101906102b5565b604051808381526020018281526020019250505060405180910390f35b34156101b857600080fd5b6101c06102ca565b604051808381526020018281526020019250505060405180910390f35b34156101e857600080fd5b6101f06102df565b604051808381526020018281526020019250505060405180910390f35b341561021857600080fd5b6102206102f4565b604051808381526020018281526020019250505060405180910390f35b60008060016002819150809050915091509091565b600061025c610309565b61013a8090506040805190810160405280600281526020017f7069000000000000000000000000000000000000000000000000000000000000815250915091509091565b60008060016002819150809050915091509091565b60008060016002819150809050915091509091565b60008060016002819150809050915091509091565b60008060016002819150809050915091509091565b60008060016002819150809050915091509091565b6020604051908101604052806000815250905600a165627a7a72305820c11dcfa136fc7d182ee4d34f0b12d988496228f7e2d02d2b5376d996ca1743d00029`,
- `[{"constant":true,"inputs":[],"name":"LowerUpperCollision","outputs":[{"name":"_res","type":"int256"},{"name":"Res","type":"int256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"UnderscoredOutput","outputs":[{"name":"_int","type":"int256"},{"name":"_string","type":"string"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"PurelyUnderscoredOutput","outputs":[{"name":"_","type":"int256"},{"name":"res","type":"int256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"UpperLowerCollision","outputs":[{"name":"_Res","type":"int256"},{"name":"res","type":"int256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"AllPurelyUnderscoredOutput","outputs":[{"name":"_","type":"int256"},{"name":"__","type":"int256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"UpperUpperCollision","outputs":[{"name":"_Res","type":"int256"},{"name":"Res","type":"int256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"LowerLowerCollision","outputs":[{"name":"_res","type":"int256"},{"name":"res","type":"int256"}],"payable":false,"stateMutability":"view","type":"function"}]`,
+ `, `6060604052341561000f57600080fd5b6103858061001e6000396000f30060606040526004361061008e576000357c0100000000000000000000000000000000000000000000000000000000900463ffffffff16806303a592131461009357806346546dbe146100c357806367e6633d146100ec5780639df4848514610181578063af7486ab146101b1578063b564b34d146101e1578063e02ab24d14610211578063e409ca4514610241575b600080fd5b341561009e57600080fd5b6100a6610271565b604051808381526020018281526020019250505060405180910390f35b34156100ce57600080fd5b6100d6610286565b6040518082815260200191505060405180910390f35b34156100f757600080fd5b6100ff61028e565b6040518083815260200180602001828103825283818151815260200191508051906020019080838360005b8381101561014557808201518184015260208101905061012a565b50505050905090810190601f1680156101725780820380516001836020036101000a031916815260200191505b50935050505060405180910390f35b341561018c57600080fd5b6101946102dc565b604051808381526020018281526020019250505060405180910390f35b34156101bc57600080fd5b6101c46102f1565b604051808381526020018281526020019250505060405180910390f35b34156101ec57600080fd5b6101f4610306565b604051808381526020018281526020019250505060405180910390f35b341561021c57600080fd5b61022461031b565b604051808381526020018281526020019250505060405180910390f35b341561024c57600080fd5b610254610330565b604051808381526020018281526020019250505060405180910390f35b60008060016002819150809050915091509091565b600080905090565b6000610298610345565b61013a8090506040805190810160405280600281526020017f7069000000000000000000000000000000000000000000000000000000000000815250915091509091565b60008060016002819150809050915091509091565b60008060016002819150809050915091509091565b60008060016002819150809050915091509091565b60008060016002819150809050915091509091565b60008060016002819150809050915091509091565b6020604051908101604052806000815250905600a165627a7a72305820d1a53d9de9d1e3d55cb3dc591900b63c4f1ded79114f7b79b332684840e186a40029`,
+ `[{"constant":true,"inputs":[],"name":"LowerUpperCollision","outputs":[{"name":"_res","type":"int256"},{"name":"Res","type":"int256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"_under_scored_func","outputs":[{"name":"_int","type":"int256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"UnderscoredOutput","outputs":[{"name":"_int","type":"int256"},{"name":"_string","type":"string"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"PurelyUnderscoredOutput","outputs":[{"name":"_","type":"int256"},{"name":"res","type":"int256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"UpperLowerCollision","outputs":[{"name":"_Res","type":"int256"},{"name":"res","type":"int256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"AllPurelyUnderscoredOutput","outputs":[{"name":"_","type":"int256"},{"name":"__","type":"int256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"UpperUpperCollision","outputs":[{"name":"_Res","type":"int256"},{"name":"Res","type":"int256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"LowerLowerCollision","outputs":[{"name":"_res","type":"int256"},{"name":"res","type":"int256"}],"payable":false,"stateMutability":"view","type":"function"}]`,
`
// Generate a new random account and a funded simulator
key, _ := crypto.GenerateKey()
@@ -504,10 +566,249 @@ var bindTests = []struct {
a, b, _ = underscorer.UpperUpperCollision(nil)
a, b, _ = underscorer.PurelyUnderscoredOutput(nil)
a, b, _ = underscorer.AllPurelyUnderscoredOutput(nil)
+ a, _ = underscorer.UnderScoredFunc(nil)
fmt.Println(a, b, err)
`,
},
+ // Tests that logs can be successfully filtered and decoded.
+ {
+ `Eventer`,
+ `
+ contract Eventer {
+ event SimpleEvent (
+ address indexed Addr,
+ bytes32 indexed Id,
+ bool indexed Flag,
+ uint Value
+ );
+ function raiseSimpleEvent(address addr, bytes32 id, bool flag, uint value) {
+ SimpleEvent(addr, id, flag, value);
+ }
+
+ event NodataEvent (
+ uint indexed Number,
+ int16 indexed Short,
+ uint32 indexed Long
+ );
+ function raiseNodataEvent(uint number, int16 short, uint32 long) {
+ NodataEvent(number, short, long);
+ }
+
+ event DynamicEvent (
+ string indexed IndexedString,
+ bytes indexed IndexedBytes,
+ string NonIndexedString,
+ bytes NonIndexedBytes
+ );
+ function raiseDynamicEvent(string str, bytes blob) {
+ DynamicEvent(str, blob, str, blob);
+ }
+ }
+ `,
+ `6060604052341561000f57600080fd5b61042c8061001e6000396000f300606060405260043610610057576000357c0100000000000000000000000000000000000000000000000000000000900463ffffffff168063528300ff1461005c578063630c31e2146100fc578063c7d116dd14610156575b600080fd5b341561006757600080fd5b6100fa600480803590602001908201803590602001908080601f0160208091040260200160405190810160405280939291908181526020018383808284378201915050505050509190803590602001908201803590602001908080601f01602080910402602001604051908101604052809392919081815260200183838082843782019150505050505091905050610194565b005b341561010757600080fd5b610154600480803573ffffffffffffffffffffffffffffffffffffffff16906020019091908035600019169060200190919080351515906020019091908035906020019091905050610367565b005b341561016157600080fd5b610192600480803590602001909190803560010b90602001909190803563ffffffff169060200190919050506103c3565b005b806040518082805190602001908083835b6020831015156101ca57805182526020820191506020810190506020830392506101a5565b6001836020036101000a0380198251168184511680821785525050505050509050019150506040518091039020826040518082805190602001908083835b60208310151561022d5780518252602082019150602081019050602083039250610208565b6001836020036101000a03801982511681845116808217855250505050505090500191505060405180910390207f3281fd4f5e152dd3385df49104a3f633706e21c9e80672e88d3bcddf33101f008484604051808060200180602001838103835285818151815260200191508051906020019080838360005b838110156102c15780820151818401526020810190506102a6565b50505050905090810190601f1680156102ee5780820380516001836020036101000a031916815260200191505b50838103825284818151815260200191508051906020019080838360005b8381101561032757808201518184015260208101905061030c565b50505050905090810190601f1680156103545780820380516001836020036101000a031916815260200191505b5094505050505060405180910390a35050565b81151583600019168573ffffffffffffffffffffffffffffffffffffffff167f1f097de4289df643bd9c11011cc61367aa12983405c021056e706eb5ba1250c8846040518082815260200191505060405180910390a450505050565b8063ffffffff168260010b847f3ca7f3a77e5e6e15e781850bc82e32adfa378a2a609370db24b4d0fae10da2c960405160405180910390a45050505600a165627a7a72305820d1f8a8bbddbc5bb29f285891d6ae1eef8420c52afdc05e1573f6114d8e1714710029`,
+ `[{"constant":false,"inputs":[{"name":"str","type":"string"},{"name":"blob","type":"bytes"}],"name":"raiseDynamicEvent","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[{"name":"addr","type":"address"},{"name":"id","type":"bytes32"},{"name":"flag","type":"bool"},{"name":"value","type":"uint256"}],"name":"raiseSimpleEvent","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[{"name":"number","type":"uint256"},{"name":"short","type":"int16"},{"name":"long","type":"uint32"}],"name":"raiseNodataEvent","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"anonymous":false,"inputs":[{"indexed":true,"name":"Addr","type":"address"},{"indexed":true,"name":"Id","type":"bytes32"},{"indexed":true,"name":"Flag","type":"bool"},{"indexed":false,"name":"Value","type":"uint256"}],"name":"SimpleEvent","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"Number","type":"uint256"},{"indexed":true,"name":"Short","type":"int16"},{"indexed":true,"name":"Long","type":"uint32"}],"name":"NodataEvent","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"IndexedString","type":"string"},{"indexed":true,"name":"IndexedBytes","type":"bytes"},{"indexed":false,"name":"NonIndexedString","type":"string"},{"indexed":false,"name":"NonIndexedBytes","type":"bytes"}],"name":"DynamicEvent","type":"event"}]`,
+ `
+ // Generate a new random account and a funded simulator
+ key, _ := crypto.GenerateKey()
+ auth := bind.NewKeyedTransactor(key)
+ sim := backends.NewSimulatedBackend(core.GenesisAlloc{auth.From: {Balance: big.NewInt(10000000000)}})
+
+ // Deploy an eventer contract
+ _, _, eventer, err := DeployEventer(auth, sim)
+ if err != nil {
+ t.Fatalf("Failed to deploy eventer contract: %v", err)
+ }
+ sim.Commit()
+
+ // Inject a few events into the contract, gradually more in each block
+ for i := 1; i <= 3; i++ {
+ for j := 1; j <= i; j++ {
+ if _, err := eventer.RaiseSimpleEvent(auth, common.Address{byte(j)}, [32]byte{byte(j)}, true, big.NewInt(int64(10*i+j))); err != nil {
+ t.Fatalf("block %d, event %d: raise failed: %v", i, j, err)
+ }
+ }
+ sim.Commit()
+ }
+ // Test filtering for certain events and ensure they can be found
+ sit, err := eventer.FilterSimpleEvent(nil, []common.Address{common.Address{1}, common.Address{3}}, [][32]byte{{byte(1)}, {byte(2)}, {byte(3)}}, []bool{true})
+ if err != nil {
+ t.Fatalf("failed to filter for simple events: %v", err)
+ }
+ defer sit.Close()
+
+ sit.Next()
+ if sit.Event.Value.Uint64() != 11 || !sit.Event.Flag {
+ t.Errorf("simple log content mismatch: have %v, want {11, true}", sit.Event)
+ }
+ sit.Next()
+ if sit.Event.Value.Uint64() != 21 || !sit.Event.Flag {
+ t.Errorf("simple log content mismatch: have %v, want {21, true}", sit.Event)
+ }
+ sit.Next()
+ if sit.Event.Value.Uint64() != 31 || !sit.Event.Flag {
+ t.Errorf("simple log content mismatch: have %v, want {31, true}", sit.Event)
+ }
+ sit.Next()
+ if sit.Event.Value.Uint64() != 33 || !sit.Event.Flag {
+ t.Errorf("simple log content mismatch: have %v, want {33, true}", sit.Event)
+ }
+
+ if sit.Next() {
+ t.Errorf("unexpected simple event found: %+v", sit.Event)
+ }
+ if err = sit.Error(); err != nil {
+ t.Fatalf("simple event iteration failed: %v", err)
+ }
+ // Test raising and filtering for an event with no data component
+ if _, err := eventer.RaiseNodataEvent(auth, big.NewInt(314), 141, 271); err != nil {
+ t.Fatalf("failed to raise nodata event: %v", err)
+ }
+ sim.Commit()
+
+ nit, err := eventer.FilterNodataEvent(nil, []*big.Int{big.NewInt(314)}, []int16{140, 141, 142}, []uint32{271})
+ if err != nil {
+ t.Fatalf("failed to filter for nodata events: %v", err)
+ }
+ defer nit.Close()
+
+ if !nit.Next() {
+ t.Fatalf("nodata log not found: %v", nit.Error())
+ }
+ if nit.Event.Number.Uint64() != 314 {
+ t.Errorf("nodata log content mismatch: have %v, want 314", nit.Event.Number)
+ }
+ if nit.Next() {
+ t.Errorf("unexpected nodata event found: %+v", nit.Event)
+ }
+ if err = nit.Error(); err != nil {
+ t.Fatalf("nodata event iteration failed: %v", err)
+ }
+ // Test raising and filtering for events with dynamic indexed components
+ if _, err := eventer.RaiseDynamicEvent(auth, "Hello", []byte("World")); err != nil {
+ t.Fatalf("failed to raise dynamic event: %v", err)
+ }
+ sim.Commit()
+
+ dit, err := eventer.FilterDynamicEvent(nil, []string{"Hi", "Hello", "Bye"}, [][]byte{[]byte("World")})
+ if err != nil {
+ t.Fatalf("failed to filter for dynamic events: %v", err)
+ }
+ defer dit.Close()
+
+ if !dit.Next() {
+ t.Fatalf("dynamic log not found: %v", dit.Error())
+ }
+ if dit.Event.NonIndexedString != "Hello" || string(dit.Event.NonIndexedBytes) != "World" || dit.Event.IndexedString != common.HexToHash("0x06b3dfaec148fb1bb2b066f10ec285e7c9bf402ab32aa78a5d38e34566810cd2") || dit.Event.IndexedBytes != common.HexToHash("0xf2208c967df089f60420785795c0a9ba8896b0f6f1867fa7f1f12ad6f79c1a18") {
+ t.Errorf("dynamic log content mismatch: have %v, want {'0x06b3dfaec148fb1bb2b066f10ec285e7c9bf402ab32aa78a5d38e34566810cd2, '0xf2208c967df089f60420785795c0a9ba8896b0f6f1867fa7f1f12ad6f79c1a18', 'Hello', 'World'}", dit.Event)
+ }
+ if dit.Next() {
+ t.Errorf("unexpected dynamic event found: %+v", dit.Event)
+ }
+ if err = dit.Error(); err != nil {
+ t.Fatalf("dynamic event iteration failed: %v", err)
+ }
+ // Test subscribing to an event and raising it afterwards
+ ch := make(chan *EventerSimpleEvent, 16)
+ sub, err := eventer.WatchSimpleEvent(nil, ch, nil, nil, nil)
+ if err != nil {
+ t.Fatalf("failed to subscribe to simple events: %v", err)
+ }
+ if _, err := eventer.RaiseSimpleEvent(auth, common.Address{255}, [32]byte{255}, true, big.NewInt(255)); err != nil {
+ t.Fatalf("failed to raise subscribed simple event: %v", err)
+ }
+ sim.Commit()
+
+ select {
+ case event := <-ch:
+ if event.Value.Uint64() != 255 {
+ t.Errorf("simple log content mismatch: have %v, want 255", event)
+ }
+ case <-time.After(250 * time.Millisecond):
+ t.Fatalf("subscribed simple event didn't arrive")
+ }
+ // Unsubscribe from the event and make sure we're not delivered more
+ sub.Unsubscribe()
+
+ if _, err := eventer.RaiseSimpleEvent(auth, common.Address{254}, [32]byte{254}, true, big.NewInt(254)); err != nil {
+ t.Fatalf("failed to raise subscribed simple event: %v", err)
+ }
+ sim.Commit()
+
+ select {
+ case event := <-ch:
+ t.Fatalf("unsubscribed simple event arrived: %v", event)
+ case <-time.After(250 * time.Millisecond):
+ }
+ `,
+ },
+ {
+ `DeeplyNestedArray`,
+ `
+ contract DeeplyNestedArray {
+ uint64[3][4][5] public deepUint64Array;
+ function storeDeepUintArray(uint64[3][4][5] arr) public {
+ deepUint64Array = arr;
+ }
+ function retrieveDeepArray() public view returns (uint64[3][4][5]) {
+ return deepUint64Array;
+ }
+ }
+ `,
+ `6060604052341561000f57600080fd5b6106438061001e6000396000f300606060405260043610610057576000357c0100000000000000000000000000000000000000000000000000000000900463ffffffff168063344248551461005c5780638ed4573a1461011457806398ed1856146101ab575b600080fd5b341561006757600080fd5b610112600480806107800190600580602002604051908101604052809291906000905b828210156101055783826101800201600480602002604051908101604052809291906000905b828210156100f25783826060020160038060200260405190810160405280929190826003602002808284378201915050505050815260200190600101906100b0565b505050508152602001906001019061008a565b5050505091905050610208565b005b341561011f57600080fd5b61012761021d565b604051808260056000925b8184101561019b578284602002015160046000925b8184101561018d5782846020020151600360200280838360005b8381101561017c578082015181840152602081019050610161565b505050509050019260010192610147565b925050509260010192610132565b9250505091505060405180910390f35b34156101b657600080fd5b6101de6004808035906020019091908035906020019091908035906020019091905050610309565b604051808267ffffffffffffffff1667ffffffffffffffff16815260200191505060405180910390f35b80600090600561021992919061035f565b5050565b6102256103b0565b6000600580602002604051908101604052809291906000905b8282101561030057838260040201600480602002604051908101604052809291906000905b828210156102ed578382016003806020026040519081016040528092919082600380156102d9576020028201916000905b82829054906101000a900467ffffffffffffffff1667ffffffffffffffff16815260200190600801906020826007010492830192600103820291508084116102945790505b505050505081526020019060010190610263565b505050508152602001906001019061023e565b50505050905090565b60008360058110151561031857fe5b600402018260048110151561032957fe5b018160038110151561033757fe5b6004918282040191900660080292509250509054906101000a900467ffffffffffffffff1681565b826005600402810192821561039f579160200282015b8281111561039e5782518290600461038e9291906103df565b5091602001919060040190610375565b5b5090506103ac919061042d565b5090565b610780604051908101604052806005905b6103c9610459565b8152602001906001900390816103c15790505090565b826004810192821561041c579160200282015b8281111561041b5782518290600361040b929190610488565b50916020019190600101906103f2565b5b5090506104299190610536565b5090565b61045691905b8082111561045257600081816104499190610562565b50600401610433565b5090565b90565b610180604051908101604052806004905b6104726105a7565b81526020019060019003908161046a5790505090565b82600380016004900481019282156105255791602002820160005b838211156104ef57835183826101000a81548167ffffffffffffffff021916908367ffffffffffffffff16021790555092602001926008016020816007010492830192600103026104a3565b80156105235782816101000a81549067ffffffffffffffff02191690556008016020816007010492830192600103026104ef565b505b50905061053291906105d9565b5090565b61055f91905b8082111561055b57600081816105529190610610565b5060010161053c565b5090565b90565b50600081816105719190610610565b50600101600081816105839190610610565b50600101600081816105959190610610565b5060010160006105a59190610610565b565b6060604051908101604052806003905b600067ffffffffffffffff168152602001906001900390816105b75790505090565b61060d91905b8082111561060957600081816101000a81549067ffffffffffffffff0219169055506001016105df565b5090565b90565b50600090555600a165627a7a7230582087e5a43f6965ab6ef7a4ff056ab80ed78fd8c15cff57715a1bf34ec76a93661c0029`,
+ `[{"constant":false,"inputs":[{"name":"arr","type":"uint64[3][4][5]"}],"name":"storeDeepUintArray","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[],"name":"retrieveDeepArray","outputs":[{"name":"","type":"uint64[3][4][5]"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[{"name":"","type":"uint256"},{"name":"","type":"uint256"},{"name":"","type":"uint256"}],"name":"deepUint64Array","outputs":[{"name":"","type":"uint64"}],"payable":false,"stateMutability":"view","type":"function"}]`,
+ `
+ // Generate a new random account and a funded simulator
+ key, _ := crypto.GenerateKey()
+ auth := bind.NewKeyedTransactor(key)
+ sim := backends.NewSimulatedBackend(core.GenesisAlloc{auth.From: {Balance: big.NewInt(10000000000)}})
+
+ //deploy the test contract
+ _, _, testContract, err := DeployDeeplyNestedArray(auth, sim)
+ if err != nil {
+ t.Fatalf("Failed to deploy test contract: %v", err)
+ }
+
+ // Finish deploy.
+ sim.Commit()
+
+ //Create coordinate-filled array, for testing purposes.
+ testArr := [5][4][3]uint64{}
+ for i := 0; i < 5; i++ {
+ testArr[i] = [4][3]uint64{}
+ for j := 0; j < 4; j++ {
+ testArr[i][j] = [3]uint64{}
+ for k := 0; k < 3; k++ {
+ //pack the coordinates, each array value will be unique, and can be validated easily.
+ testArr[i][j][k] = uint64(i) << 16 | uint64(j) << 8 | uint64(k)
+ }
+ }
+ }
+
+ if _, err := testContract.StoreDeepUintArray(&bind.TransactOpts{
+ From: auth.From,
+ Signer: auth.Signer,
+ }, testArr); err != nil {
+ t.Fatalf("Failed to store nested array in test contract: %v", err)
+ }
+
+ sim.Commit()
+
+ retrievedArr, err := testContract.RetrieveDeepArray(&bind.CallOpts{
+ From: auth.From,
+ Pending: false,
+ })
+ if err != nil {
+ t.Fatalf("Failed to retrieve nested array from test contract: %v", err)
+ }
+
+ //quick check to see if contents were copied
+ // (See accounts/abi/unpack_test.go for more extensive testing)
+ if retrievedArr[4][3][2] != testArr[4][3][2] {
+ t.Fatalf("Retrieved value does not match expected value! got: %d, expected: %d. %v", retrievedArr[4][3][2], testArr[4][3][2], err)
+ }
+ `,
+ },
}
// Tests that packages generated by the binder can be successfully compiled and
@@ -559,7 +860,7 @@ func TestBindings(t *testing.T) {
}
}
// Test the entire package and report any failures
- cmd := exec.Command(gocmd, "test", "-v")
+ cmd := exec.Command(gocmd, "test", "-v", "-count", "1")
cmd.Dir = pkg
if out, err := cmd.CombinedOutput(); err != nil {
t.Fatalf("failed to run binding test: %v\n%s", err, out)
diff --git a/accounts/abi/bind/template.go b/accounts/abi/bind/template.go
index d07610e7c..7202ee67a 100644
--- a/accounts/abi/bind/template.go
+++ b/accounts/abi/bind/template.go
@@ -32,6 +32,7 @@ type tmplContract struct {
Constructor abi.Method // Contract constructor for deploy parametrization
Calls map[string]*tmplMethod // Contract calls that only read state data
Transacts map[string]*tmplMethod // Contract calls that write state data
+ Events map[string]*tmplEvent // Contract events accessors
}
// tmplMethod is a wrapper around an abi.Method that contains a few preprocessed
@@ -39,7 +40,13 @@ type tmplContract struct {
type tmplMethod struct {
Original abi.Method // Original method as parsed by the abi package
Normalized abi.Method // Normalized version of the parsed method (capitalized names, non-anonymous args/returns)
- Structured bool // Whether the returns should be accumulated into a contract
+ Structured bool // Whether the returns should be accumulated into a struct
+}
+
+// tmplEvent is a wrapper around an a
+type tmplEvent struct {
+ Original abi.Event // Original event as parsed by the abi package
+ Normalized abi.Event // Normalized version of the parsed fields
}
// tmplSource is language to template mapping containing all the supported
@@ -75,7 +82,7 @@ package {{.Package}}
if err != nil {
return common.Address{}, nil, nil, err
}
- return address, tx, &{{.Type}}{ {{.Type}}Caller: {{.Type}}Caller{contract: contract}, {{.Type}}Transactor: {{.Type}}Transactor{contract: contract} }, nil
+ return address, tx, &{{.Type}}{ {{.Type}}Caller: {{.Type}}Caller{contract: contract}, {{.Type}}Transactor: {{.Type}}Transactor{contract: contract}, {{.Type}}Filterer: {{.Type}}Filterer{contract: contract} }, nil
}
{{end}}
@@ -83,6 +90,7 @@ package {{.Package}}
type {{.Type}} struct {
{{.Type}}Caller // Read-only binding to the contract
{{.Type}}Transactor // Write-only binding to the contract
+ {{.Type}}Filterer // Log filterer for contract events
}
// {{.Type}}Caller is an auto generated read-only Go binding around an Ethereum contract.
@@ -95,6 +103,11 @@ package {{.Package}}
contract *bind.BoundContract // Generic contract wrapper for the low level calls
}
+ // {{.Type}}Filterer is an auto generated log filtering Go binding around an Ethereum contract events.
+ type {{.Type}}Filterer struct {
+ contract *bind.BoundContract // Generic contract wrapper for the low level calls
+ }
+
// {{.Type}}Session is an auto generated Go binding around an Ethereum contract,
// with pre-set call and transact options.
type {{.Type}}Session struct {
@@ -134,16 +147,16 @@ package {{.Package}}
// New{{.Type}} creates a new instance of {{.Type}}, bound to a specific deployed contract.
func New{{.Type}}(address common.Address, backend bind.ContractBackend) (*{{.Type}}, error) {
- contract, err := bind{{.Type}}(address, backend, backend)
+ contract, err := bind{{.Type}}(address, backend, backend, backend)
if err != nil {
return nil, err
}
- return &{{.Type}}{ {{.Type}}Caller: {{.Type}}Caller{contract: contract}, {{.Type}}Transactor: {{.Type}}Transactor{contract: contract} }, nil
+ return &{{.Type}}{ {{.Type}}Caller: {{.Type}}Caller{contract: contract}, {{.Type}}Transactor: {{.Type}}Transactor{contract: contract}, {{.Type}}Filterer: {{.Type}}Filterer{contract: contract} }, nil
}
// New{{.Type}}Caller creates a new read-only instance of {{.Type}}, bound to a specific deployed contract.
func New{{.Type}}Caller(address common.Address, caller bind.ContractCaller) (*{{.Type}}Caller, error) {
- contract, err := bind{{.Type}}(address, caller, nil)
+ contract, err := bind{{.Type}}(address, caller, nil, nil)
if err != nil {
return nil, err
}
@@ -152,20 +165,29 @@ package {{.Package}}
// New{{.Type}}Transactor creates a new write-only instance of {{.Type}}, bound to a specific deployed contract.
func New{{.Type}}Transactor(address common.Address, transactor bind.ContractTransactor) (*{{.Type}}Transactor, error) {
- contract, err := bind{{.Type}}(address, nil, transactor)
+ contract, err := bind{{.Type}}(address, nil, transactor, nil)
if err != nil {
return nil, err
}
return &{{.Type}}Transactor{contract: contract}, nil
}
+ // New{{.Type}}Filterer creates a new log filterer instance of {{.Type}}, bound to a specific deployed contract.
+ func New{{.Type}}Filterer(address common.Address, filterer bind.ContractFilterer) (*{{.Type}}Filterer, error) {
+ contract, err := bind{{.Type}}(address, nil, nil, filterer)
+ if err != nil {
+ return nil, err
+ }
+ return &{{.Type}}Filterer{contract: contract}, nil
+ }
+
// bind{{.Type}} binds a generic wrapper to an already deployed contract.
- func bind{{.Type}}(address common.Address, caller bind.ContractCaller, transactor bind.ContractTransactor) (*bind.BoundContract, error) {
+ func bind{{.Type}}(address common.Address, caller bind.ContractCaller, transactor bind.ContractTransactor, filterer bind.ContractFilterer) (*bind.BoundContract, error) {
parsed, err := abi.JSON(strings.NewReader({{.Type}}ABI))
if err != nil {
return nil, err
}
- return bind.NewBoundContract(address, parsed, caller, transactor), nil
+ return bind.NewBoundContract(address, parsed, caller, transactor, filterer), nil
}
// Call invokes the (constant) contract method with params as input values and
@@ -263,6 +285,137 @@ package {{.Package}}
return _{{$contract.Type}}.Contract.{{.Normalized.Name}}(&_{{$contract.Type}}.TransactOpts {{range $i, $_ := .Normalized.Inputs}}, {{.Name}}{{end}})
}
{{end}}
+
+ {{range .Events}}
+ // {{$contract.Type}}{{.Normalized.Name}}Iterator is returned from Filter{{.Normalized.Name}} and is used to iterate over the raw logs and unpacked data for {{.Normalized.Name}} events raised by the {{$contract.Type}} contract.
+ type {{$contract.Type}}{{.Normalized.Name}}Iterator struct {
+ Event *{{$contract.Type}}{{.Normalized.Name}} // Event containing the contract specifics and raw log
+
+ contract *bind.BoundContract // Generic contract to use for unpacking event data
+ event string // Event name to use for unpacking event data
+
+ logs chan types.Log // Log channel receiving the found contract events
+ sub ethereum.Subscription // Subscription for errors, completion and termination
+ done bool // Whether the subscription completed delivering logs
+ fail error // Occurred error to stop iteration
+ }
+ // Next advances the iterator to the subsequent event, returning whether there
+ // are any more events found. In case of a retrieval or parsing error, false is
+ // returned and Error() can be queried for the exact failure.
+ func (it *{{$contract.Type}}{{.Normalized.Name}}Iterator) Next() bool {
+ // If the iterator failed, stop iterating
+ if (it.fail != nil) {
+ return false
+ }
+ // If the iterator completed, deliver directly whatever's available
+ if (it.done) {
+ select {
+ case log := <-it.logs:
+ it.Event = new({{$contract.Type}}{{.Normalized.Name}})
+ if err := it.contract.UnpackLog(it.Event, it.event, log); err != nil {
+ it.fail = err
+ return false
+ }
+ it.Event.Raw = log
+ return true
+
+ default:
+ return false
+ }
+ }
+ // Iterator still in progress, wait for either a data or an error event
+ select {
+ case log := <-it.logs:
+ it.Event = new({{$contract.Type}}{{.Normalized.Name}})
+ if err := it.contract.UnpackLog(it.Event, it.event, log); err != nil {
+ it.fail = err
+ return false
+ }
+ it.Event.Raw = log
+ return true
+
+ case err := <-it.sub.Err():
+ it.done = true
+ it.fail = err
+ return it.Next()
+ }
+ }
+ // Error returns any retrieval or parsing error occurred during filtering.
+ func (it *{{$contract.Type}}{{.Normalized.Name}}Iterator) Error() error {
+ return it.fail
+ }
+ // Close terminates the iteration process, releasing any pending underlying
+ // resources.
+ func (it *{{$contract.Type}}{{.Normalized.Name}}Iterator) Close() error {
+ it.sub.Unsubscribe()
+ return nil
+ }
+
+ // {{$contract.Type}}{{.Normalized.Name}} represents a {{.Normalized.Name}} event raised by the {{$contract.Type}} contract.
+ type {{$contract.Type}}{{.Normalized.Name}} struct { {{range .Normalized.Inputs}}
+ {{capitalise .Name}} {{if .Indexed}}{{bindtopictype .Type}}{{else}}{{bindtype .Type}}{{end}}; {{end}}
+ Raw types.Log // Blockchain specific contextual infos
+ }
+
+ // Filter{{.Normalized.Name}} is a free log retrieval operation binding the contract event 0x{{printf "%x" .Original.Id}}.
+ //
+ // Solidity: {{.Original.String}}
+ func (_{{$contract.Type}} *{{$contract.Type}}Filterer) Filter{{.Normalized.Name}}(opts *bind.FilterOpts{{range .Normalized.Inputs}}{{if .Indexed}}, {{.Name}} []{{bindtype .Type}}{{end}}{{end}}) (*{{$contract.Type}}{{.Normalized.Name}}Iterator, error) {
+ {{range .Normalized.Inputs}}
+ {{if .Indexed}}var {{.Name}}Rule []interface{}
+ for _, {{.Name}}Item := range {{.Name}} {
+ {{.Name}}Rule = append({{.Name}}Rule, {{.Name}}Item)
+ }{{end}}{{end}}
+
+ logs, sub, err := _{{$contract.Type}}.contract.FilterLogs(opts, "{{.Original.Name}}"{{range .Normalized.Inputs}}{{if .Indexed}}, {{.Name}}Rule{{end}}{{end}})
+ if err != nil {
+ return nil, err
+ }
+ return &{{$contract.Type}}{{.Normalized.Name}}Iterator{contract: _{{$contract.Type}}.contract, event: "{{.Original.Name}}", logs: logs, sub: sub}, nil
+ }
+
+ // Watch{{.Normalized.Name}} is a free log subscription operation binding the contract event 0x{{printf "%x" .Original.Id}}.
+ //
+ // Solidity: {{.Original.String}}
+ func (_{{$contract.Type}} *{{$contract.Type}}Filterer) Watch{{.Normalized.Name}}(opts *bind.WatchOpts, sink chan<- *{{$contract.Type}}{{.Normalized.Name}}{{range .Normalized.Inputs}}{{if .Indexed}}, {{.Name}} []{{bindtype .Type}}{{end}}{{end}}) (event.Subscription, error) {
+ {{range .Normalized.Inputs}}
+ {{if .Indexed}}var {{.Name}}Rule []interface{}
+ for _, {{.Name}}Item := range {{.Name}} {
+ {{.Name}}Rule = append({{.Name}}Rule, {{.Name}}Item)
+ }{{end}}{{end}}
+
+ logs, sub, err := _{{$contract.Type}}.contract.WatchLogs(opts, "{{.Original.Name}}"{{range .Normalized.Inputs}}{{if .Indexed}}, {{.Name}}Rule{{end}}{{end}})
+ if err != nil {
+ return nil, err
+ }
+ return event.NewSubscription(func(quit <-chan struct{}) error {
+ defer sub.Unsubscribe()
+ for {
+ select {
+ case log := <-logs:
+ // New log arrived, parse the event and forward to the user
+ event := new({{$contract.Type}}{{.Normalized.Name}})
+ if err := _{{$contract.Type}}.contract.UnpackLog(event, "{{.Original.Name}}", log); err != nil {
+ return err
+ }
+ event.Raw = log
+
+ select {
+ case sink <- event:
+ case err := <-sub.Err():
+ return err
+ case <-quit:
+ return nil
+ }
+ case err := <-sub.Err():
+ return err
+ case <-quit:
+ return nil
+ }
+ }
+ }), nil
+ }
+ {{end}}
{{end}}
`
diff --git a/accounts/abi/bind/topics.go b/accounts/abi/bind/topics.go
new file mode 100644
index 000000000..600dfcda9
--- /dev/null
+++ b/accounts/abi/bind/topics.go
@@ -0,0 +1,189 @@
+// Copyright 2018 The go-ethereum Authors
+// This file is part of the go-ethereum library.
+//
+// The go-ethereum library is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Lesser General Public License as published by
+// the Free Software Foundation, either version 3 of the License, or
+// (at your option) any later version.
+//
+// The go-ethereum library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+// GNU Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public License
+// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
+
+package bind
+
+import (
+ "errors"
+ "fmt"
+ "math/big"
+ "reflect"
+
+ "github.com/ethereum/go-ethereum/accounts/abi"
+ "github.com/ethereum/go-ethereum/common"
+ "github.com/ethereum/go-ethereum/crypto"
+)
+
+// makeTopics converts a filter query argument list into a filter topic set.
+func makeTopics(query ...[]interface{}) ([][]common.Hash, error) {
+ topics := make([][]common.Hash, len(query))
+ for i, filter := range query {
+ for _, rule := range filter {
+ var topic common.Hash
+
+ // Try to generate the topic based on simple types
+ switch rule := rule.(type) {
+ case common.Hash:
+ copy(topic[:], rule[:])
+ case common.Address:
+ copy(topic[common.HashLength-common.AddressLength:], rule[:])
+ case *big.Int:
+ blob := rule.Bytes()
+ copy(topic[common.HashLength-len(blob):], blob)
+ case bool:
+ if rule {
+ topic[common.HashLength-1] = 1
+ }
+ case int8:
+ blob := big.NewInt(int64(rule)).Bytes()
+ copy(topic[common.HashLength-len(blob):], blob)
+ case int16:
+ blob := big.NewInt(int64(rule)).Bytes()
+ copy(topic[common.HashLength-len(blob):], blob)
+ case int32:
+ blob := big.NewInt(int64(rule)).Bytes()
+ copy(topic[common.HashLength-len(blob):], blob)
+ case int64:
+ blob := big.NewInt(rule).Bytes()
+ copy(topic[common.HashLength-len(blob):], blob)
+ case uint8:
+ blob := new(big.Int).SetUint64(uint64(rule)).Bytes()
+ copy(topic[common.HashLength-len(blob):], blob)
+ case uint16:
+ blob := new(big.Int).SetUint64(uint64(rule)).Bytes()
+ copy(topic[common.HashLength-len(blob):], blob)
+ case uint32:
+ blob := new(big.Int).SetUint64(uint64(rule)).Bytes()
+ copy(topic[common.HashLength-len(blob):], blob)
+ case uint64:
+ blob := new(big.Int).SetUint64(rule).Bytes()
+ copy(topic[common.HashLength-len(blob):], blob)
+ case string:
+ hash := crypto.Keccak256Hash([]byte(rule))
+ copy(topic[:], hash[:])
+ case []byte:
+ hash := crypto.Keccak256Hash(rule)
+ copy(topic[:], hash[:])
+
+ default:
+ // Attempt to generate the topic from funky types
+ val := reflect.ValueOf(rule)
+
+ switch {
+ case val.Kind() == reflect.Array && reflect.TypeOf(rule).Elem().Kind() == reflect.Uint8:
+ reflect.Copy(reflect.ValueOf(topic[common.HashLength-val.Len():]), val)
+
+ default:
+ return nil, fmt.Errorf("unsupported indexed type: %T", rule)
+ }
+ }
+ topics[i] = append(topics[i], topic)
+ }
+ }
+ return topics, nil
+}
+
+// Big batch of reflect types for topic reconstruction.
+var (
+ reflectHash = reflect.TypeOf(common.Hash{})
+ reflectAddress = reflect.TypeOf(common.Address{})
+ reflectBigInt = reflect.TypeOf(new(big.Int))
+)
+
+// parseTopics converts the indexed topic fields into actual log field values.
+//
+// Note, dynamic types cannot be reconstructed since they get mapped to Keccak256
+// hashes as the topic value!
+func parseTopics(out interface{}, fields abi.Arguments, topics []common.Hash) error {
+ // Sanity check that the fields and topics match up
+ if len(fields) != len(topics) {
+ return errors.New("topic/field count mismatch")
+ }
+ // Iterate over all the fields and reconstruct them from topics
+ for _, arg := range fields {
+ if !arg.Indexed {
+ return errors.New("non-indexed field in topic reconstruction")
+ }
+ field := reflect.ValueOf(out).Elem().FieldByName(capitalise(arg.Name))
+
+ // Try to parse the topic back into the fields based on primitive types
+ switch field.Kind() {
+ case reflect.Bool:
+ if topics[0][common.HashLength-1] == 1 {
+ field.Set(reflect.ValueOf(true))
+ }
+ case reflect.Int8:
+ num := new(big.Int).SetBytes(topics[0][:])
+ field.Set(reflect.ValueOf(int8(num.Int64())))
+
+ case reflect.Int16:
+ num := new(big.Int).SetBytes(topics[0][:])
+ field.Set(reflect.ValueOf(int16(num.Int64())))
+
+ case reflect.Int32:
+ num := new(big.Int).SetBytes(topics[0][:])
+ field.Set(reflect.ValueOf(int32(num.Int64())))
+
+ case reflect.Int64:
+ num := new(big.Int).SetBytes(topics[0][:])
+ field.Set(reflect.ValueOf(num.Int64()))
+
+ case reflect.Uint8:
+ num := new(big.Int).SetBytes(topics[0][:])
+ field.Set(reflect.ValueOf(uint8(num.Uint64())))
+
+ case reflect.Uint16:
+ num := new(big.Int).SetBytes(topics[0][:])
+ field.Set(reflect.ValueOf(uint16(num.Uint64())))
+
+ case reflect.Uint32:
+ num := new(big.Int).SetBytes(topics[0][:])
+ field.Set(reflect.ValueOf(uint32(num.Uint64())))
+
+ case reflect.Uint64:
+ num := new(big.Int).SetBytes(topics[0][:])
+ field.Set(reflect.ValueOf(num.Uint64()))
+
+ default:
+ // Ran out of plain primitive types, try custom types
+ switch field.Type() {
+ case reflectHash: // Also covers all dynamic types
+ field.Set(reflect.ValueOf(topics[0]))
+
+ case reflectAddress:
+ var addr common.Address
+ copy(addr[:], topics[0][common.HashLength-common.AddressLength:])
+ field.Set(reflect.ValueOf(addr))
+
+ case reflectBigInt:
+ num := new(big.Int).SetBytes(topics[0][:])
+ field.Set(reflect.ValueOf(num))
+
+ default:
+ // Ran out of custom types, try the crazies
+ switch {
+ case arg.Type.T == abi.FixedBytesTy:
+ reflect.Copy(field, reflect.ValueOf(topics[0][common.HashLength-arg.Type.Size:]))
+
+ default:
+ return fmt.Errorf("unsupported indexed type: %v", arg.Type)
+ }
+ }
+ }
+ topics = topics[1:]
+ }
+ return nil
+}
diff --git a/accounts/abi/event.go b/accounts/abi/event.go
index 726bac90e..595f169f3 100644
--- a/accounts/abi/event.go
+++ b/accounts/abi/event.go
@@ -33,6 +33,17 @@ type Event struct {
Inputs Arguments
}
+func (event Event) String() string {
+ inputs := make([]string, len(event.Inputs))
+ for i, input := range event.Inputs {
+ inputs[i] = fmt.Sprintf("%v %v", input.Name, input.Type)
+ if input.Indexed {
+ inputs[i] = fmt.Sprintf("%v indexed %v", input.Name, input.Type)
+ }
+ }
+ return fmt.Sprintf("event %v(%v)", event.Name, strings.Join(inputs, ", "))
+}
+
// Id returns the canonical representation of the event's signature used by the
// abi definition to identify event names and types.
func (e Event) Id() common.Hash {
diff --git a/accounts/abi/pack_test.go b/accounts/abi/pack_test.go
index 36401ee67..58a5b7a58 100644
--- a/accounts/abi/pack_test.go
+++ b/accounts/abi/pack_test.go
@@ -1,4 +1,4 @@
-// Copyright 2015 The go-ethereum Authors
+// Copyright 2017 The go-ethereum Authors
// This file is part of the go-ethereum library.
//
// The go-ethereum library is free software: you can redistribute it and/or modify
@@ -300,6 +300,11 @@ func TestPack(t *testing.T) {
common.Hex2Bytes("0100000000000000000000000000000000000000000000000000000000000000"),
},
{
+ "uint32[2][3][4]",
+ [4][3][2]uint32{{{1, 2}, {3, 4}, {5, 6}}, {{7, 8}, {9, 10}, {11, 12}}, {{13, 14}, {15, 16}, {17, 18}}, {{19, 20}, {21, 22}, {23, 24}}},
+ common.Hex2Bytes("000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000003000000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000050000000000000000000000000000000000000000000000000000000000000006000000000000000000000000000000000000000000000000000000000000000700000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000009000000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000000b000000000000000000000000000000000000000000000000000000000000000c000000000000000000000000000000000000000000000000000000000000000d000000000000000000000000000000000000000000000000000000000000000e000000000000000000000000000000000000000000000000000000000000000f000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000110000000000000000000000000000000000000000000000000000000000000012000000000000000000000000000000000000000000000000000000000000001300000000000000000000000000000000000000000000000000000000000000140000000000000000000000000000000000000000000000000000000000000015000000000000000000000000000000000000000000000000000000000000001600000000000000000000000000000000000000000000000000000000000000170000000000000000000000000000000000000000000000000000000000000018"),
+ },
+ {
"address[]",
[]common.Address{{1}, {2}},
common.Hex2Bytes("000000000000000000000000000000000000000000000000000000000000000200000000000000000000000001000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000"),
diff --git a/accounts/abi/unpack.go b/accounts/abi/unpack.go
index 80efb3f7e..793d515ad 100644
--- a/accounts/abi/unpack.go
+++ b/accounts/abi/unpack.go
@@ -1,4 +1,4 @@
-// Copyright 2015 The go-ethereum Authors
+// Copyright 2017 The go-ethereum Authors
// This file is part of the go-ethereum library.
//
// The go-ethereum library is free software: you can redistribute it and/or modify
@@ -93,15 +93,28 @@ func readFixedBytes(t Type, word []byte) (interface{}, error) {
}
+func getFullElemSize(elem *Type) int {
+ //all other should be counted as 32 (slices have pointers to respective elements)
+ size := 32
+ //arrays wrap it, each element being the same size
+ for elem.T == ArrayTy {
+ size *= elem.Size
+ elem = elem.Elem
+ }
+ return size
+}
+
// iteratively unpack elements
func forEachUnpack(t Type, output []byte, start, size int) (interface{}, error) {
+ if size < 0 {
+ return nil, fmt.Errorf("cannot marshal input to array, size is negative (%d)", size)
+ }
if start+32*size > len(output) {
return nil, fmt.Errorf("abi: cannot marshal in to go array: offset %d would go over slice boundary (len=%d)", len(output), start+32*size)
}
// this value will become our slice or our array, depending on the type
var refSlice reflect.Value
- slice := output[start : start+size*32]
if t.T == SliceTy {
// declare our slice
@@ -113,15 +126,20 @@ func forEachUnpack(t Type, output []byte, start, size int) (interface{}, error)
return nil, fmt.Errorf("abi: invalid type in array/slice unpacking stage")
}
- for i, j := start, 0; j*32 < len(slice); i, j = i+32, j+1 {
- // this corrects the arrangement so that we get all the underlying array values
- if t.Elem.T == ArrayTy && j != 0 {
- i = start + t.Elem.Size*32*j
- }
+ // Arrays have packed elements, resulting in longer unpack steps.
+ // Slices have just 32 bytes per element (pointing to the contents).
+ elemSize := 32
+ if t.T == ArrayTy {
+ elemSize = getFullElemSize(t.Elem)
+ }
+
+ for i, j := start, 0; j < size; i, j = i+elemSize, j+1 {
+
inter, err := toGoType(i, *t.Elem, output)
if err != nil {
return nil, err
}
+
// append the item to our reflect slice
refSlice.Index(j).Set(reflect.ValueOf(inter))
}
@@ -181,16 +199,32 @@ func toGoType(index int, t Type, output []byte) (interface{}, error) {
// interprets a 32 byte slice as an offset and then determines which indice to look to decode the type.
func lengthPrefixPointsTo(index int, output []byte) (start int, length int, err error) {
- offset := int(binary.BigEndian.Uint64(output[index+24 : index+32]))
- if offset+32 > len(output) {
- return 0, 0, fmt.Errorf("abi: cannot marshal in to go slice: offset %d would go over slice boundary (len=%d)", len(output), offset+32)
+ bigOffsetEnd := big.NewInt(0).SetBytes(output[index : index+32])
+ bigOffsetEnd.Add(bigOffsetEnd, common.Big32)
+ outputLength := big.NewInt(int64(len(output)))
+
+ if bigOffsetEnd.Cmp(outputLength) > 0 {
+ return 0, 0, fmt.Errorf("abi: cannot marshal in to go slice: offset %v would go over slice boundary (len=%v)", bigOffsetEnd, outputLength)
}
- length = int(binary.BigEndian.Uint64(output[offset+24 : offset+32]))
- if offset+32+length > len(output) {
- return 0, 0, fmt.Errorf("abi: cannot marshal in to go type: length insufficient %d require %d", len(output), offset+32+length)
+
+ if bigOffsetEnd.BitLen() > 63 {
+ return 0, 0, fmt.Errorf("abi offset larger than int64: %v", bigOffsetEnd)
}
- start = offset + 32
- //fmt.Printf("LENGTH PREFIX INFO: \nsize: %v\noffset: %v\nstart: %v\n", length, offset, start)
+ offsetEnd := int(bigOffsetEnd.Uint64())
+ lengthBig := big.NewInt(0).SetBytes(output[offsetEnd-32 : offsetEnd])
+
+ totalSize := big.NewInt(0)
+ totalSize.Add(totalSize, bigOffsetEnd)
+ totalSize.Add(totalSize, lengthBig)
+ if totalSize.BitLen() > 63 {
+ return 0, 0, fmt.Errorf("abi length larger than int64: %v", totalSize)
+ }
+
+ if totalSize.Cmp(outputLength) > 0 {
+ return 0, 0, fmt.Errorf("abi: cannot marshal in to go type: length insufficient %v require %v", outputLength, totalSize)
+ }
+ start = int(bigOffsetEnd.Uint64())
+ length = int(lengthBig.Uint64())
return
}
diff --git a/accounts/abi/unpack_test.go b/accounts/abi/unpack_test.go
index 4d7fe638c..ee6256709 100644
--- a/accounts/abi/unpack_test.go
+++ b/accounts/abi/unpack_test.go
@@ -1,4 +1,4 @@
-// Copyright 2015 The go-ethereum Authors
+// Copyright 2017 The go-ethereum Authors
// This file is part of the go-ethereum library.
//
// The go-ethereum library is free software: you can redistribute it and/or modify
@@ -130,7 +130,7 @@ var unpackTests = []unpackTest{
{
def: `[{"type": "bytes32"}]`,
enc: "0100000000000000000000000000000000000000000000000000000000000000",
- want: common.HexToHash("0100000000000000000000000000000000000000000000000000000000000000"),
+ want: [32]byte{1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0},
},
{
def: `[{"type": "function"}]`,
@@ -190,6 +190,11 @@ var unpackTests = []unpackTest{
want: [2]uint32{1, 2},
},
{
+ def: `[{"type": "uint32[2][3][4]"}]`,
+ enc: "000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000003000000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000050000000000000000000000000000000000000000000000000000000000000006000000000000000000000000000000000000000000000000000000000000000700000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000009000000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000000b000000000000000000000000000000000000000000000000000000000000000c000000000000000000000000000000000000000000000000000000000000000d000000000000000000000000000000000000000000000000000000000000000e000000000000000000000000000000000000000000000000000000000000000f000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000110000000000000000000000000000000000000000000000000000000000000012000000000000000000000000000000000000000000000000000000000000001300000000000000000000000000000000000000000000000000000000000000140000000000000000000000000000000000000000000000000000000000000015000000000000000000000000000000000000000000000000000000000000001600000000000000000000000000000000000000000000000000000000000000170000000000000000000000000000000000000000000000000000000000000018",
+ want: [4][3][2]uint32{{{1, 2}, {3, 4}, {5, 6}}, {{7, 8}, {9, 10}, {11, 12}}, {{13, 14}, {15, 16}, {17, 18}}, {{19, 20}, {21, 22}, {23, 24}}},
+ },
+ {
def: `[{"type": "uint64[]"}]`,
enc: "0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002",
want: []uint64{1, 2},
@@ -435,6 +440,46 @@ func TestMultiReturnWithArray(t *testing.T) {
}
}
+func TestMultiReturnWithDeeplyNestedArray(t *testing.T) {
+ // Similar to TestMultiReturnWithArray, but with a special case in mind:
+ // values of nested static arrays count towards the size as well, and any element following
+ // after such nested array argument should be read with the correct offset,
+ // so that it does not read content from the previous array argument.
+ const definition = `[{"name" : "multi", "outputs": [{"type": "uint64[3][2][4]"}, {"type": "uint64"}]}]`
+ abi, err := JSON(strings.NewReader(definition))
+ if err != nil {
+ t.Fatal(err)
+ }
+ buff := new(bytes.Buffer)
+ // construct the test array, each 3 char element is joined with 61 '0' chars,
+ // to from the ((3 + 61) * 0.5) = 32 byte elements in the array.
+ buff.Write(common.Hex2Bytes(strings.Join([]string{
+ "", //empty, to apply the 61-char separator to the first element as well.
+ "111", "112", "113", "121", "122", "123",
+ "211", "212", "213", "221", "222", "223",
+ "311", "312", "313", "321", "322", "323",
+ "411", "412", "413", "421", "422", "423",
+ }, "0000000000000000000000000000000000000000000000000000000000000")))
+ buff.Write(common.Hex2Bytes("0000000000000000000000000000000000000000000000000000000000009876"))
+
+ ret1, ret1Exp := new([4][2][3]uint64), [4][2][3]uint64{
+ {{0x111, 0x112, 0x113}, {0x121, 0x122, 0x123}},
+ {{0x211, 0x212, 0x213}, {0x221, 0x222, 0x223}},
+ {{0x311, 0x312, 0x313}, {0x321, 0x322, 0x323}},
+ {{0x411, 0x412, 0x413}, {0x421, 0x422, 0x423}},
+ }
+ ret2, ret2Exp := new(uint64), uint64(0x9876)
+ if err := abi.Unpack(&[]interface{}{ret1, ret2}, "multi", buff.Bytes()); err != nil {
+ t.Fatal(err)
+ }
+ if !reflect.DeepEqual(*ret1, ret1Exp) {
+ t.Error("array result", *ret1, "!= Expected", ret1Exp)
+ }
+ if *ret2 != ret2Exp {
+ t.Error("int result", *ret2, "!= Expected", ret2Exp)
+ }
+}
+
func TestUnmarshal(t *testing.T) {
const definition = `[
{ "name" : "int", "constant" : false, "outputs": [ { "type": "uint256" } ] },
@@ -683,3 +728,73 @@ func TestUnmarshal(t *testing.T) {
t.Fatal("expected error:", err)
}
}
+
+func TestOOMMaliciousInput(t *testing.T) {
+ oomTests := []unpackTest{
+ {
+ def: `[{"type": "uint8[]"}]`,
+ enc: "0000000000000000000000000000000000000000000000000000000000000020" + // offset
+ "0000000000000000000000000000000000000000000000000000000000000003" + // num elems
+ "0000000000000000000000000000000000000000000000000000000000000001" + // elem 1
+ "0000000000000000000000000000000000000000000000000000000000000002", // elem 2
+ },
+ { // Length larger than 64 bits
+ def: `[{"type": "uint8[]"}]`,
+ enc: "0000000000000000000000000000000000000000000000000000000000000020" + // offset
+ "00ffffffffffffffffffffffffffffffffffffffffffffff0000000000000002" + // num elems
+ "0000000000000000000000000000000000000000000000000000000000000001" + // elem 1
+ "0000000000000000000000000000000000000000000000000000000000000002", // elem 2
+ },
+ { // Offset very large (over 64 bits)
+ def: `[{"type": "uint8[]"}]`,
+ enc: "00ffffffffffffffffffffffffffffffffffffffffffffff0000000000000020" + // offset
+ "0000000000000000000000000000000000000000000000000000000000000002" + // num elems
+ "0000000000000000000000000000000000000000000000000000000000000001" + // elem 1
+ "0000000000000000000000000000000000000000000000000000000000000002", // elem 2
+ },
+ { // Offset very large (below 64 bits)
+ def: `[{"type": "uint8[]"}]`,
+ enc: "0000000000000000000000000000000000000000000000007ffffffffff00020" + // offset
+ "0000000000000000000000000000000000000000000000000000000000000002" + // num elems
+ "0000000000000000000000000000000000000000000000000000000000000001" + // elem 1
+ "0000000000000000000000000000000000000000000000000000000000000002", // elem 2
+ },
+ { // Offset negative (as 64 bit)
+ def: `[{"type": "uint8[]"}]`,
+ enc: "000000000000000000000000000000000000000000000000f000000000000020" + // offset
+ "0000000000000000000000000000000000000000000000000000000000000002" + // num elems
+ "0000000000000000000000000000000000000000000000000000000000000001" + // elem 1
+ "0000000000000000000000000000000000000000000000000000000000000002", // elem 2
+ },
+
+ { // Negative length
+ def: `[{"type": "uint8[]"}]`,
+ enc: "0000000000000000000000000000000000000000000000000000000000000020" + // offset
+ "000000000000000000000000000000000000000000000000f000000000000002" + // num elems
+ "0000000000000000000000000000000000000000000000000000000000000001" + // elem 1
+ "0000000000000000000000000000000000000000000000000000000000000002", // elem 2
+ },
+ { // Very large length
+ def: `[{"type": "uint8[]"}]`,
+ enc: "0000000000000000000000000000000000000000000000000000000000000020" + // offset
+ "0000000000000000000000000000000000000000000000007fffffffff000002" + // num elems
+ "0000000000000000000000000000000000000000000000000000000000000001" + // elem 1
+ "0000000000000000000000000000000000000000000000000000000000000002", // elem 2
+ },
+ }
+ for i, test := range oomTests {
+ def := fmt.Sprintf(`[{ "name" : "method", "outputs": %s}]`, test.def)
+ abi, err := JSON(strings.NewReader(def))
+ if err != nil {
+ t.Fatalf("invalid ABI definition %s: %v", def, err)
+ }
+ encb, err := hex.DecodeString(test.enc)
+ if err != nil {
+ t.Fatalf("invalid hex: %s" + test.enc)
+ }
+ _, err = abi.Methods["method"].Outputs.UnpackValues(encb)
+ if err == nil {
+ t.Fatalf("Expected error on malicious input, test %d", i)
+ }
+ }
+}
diff --git a/accounts/errors.go b/accounts/errors.go
index 64da8821c..40b21ed17 100644
--- a/accounts/errors.go
+++ b/accounts/errors.go
@@ -62,7 +62,7 @@ func NewAuthNeededError(needed string) error {
}
}
-// Error implements the standard error interfacel.
+// Error implements the standard error interface.
func (err *AuthNeededError) Error() string {
return fmt.Sprintf("authentication needed: %s", err.Needed)
}
diff --git a/appveyor.yml b/appveyor.yml
index 99029f553..45475d166 100644
--- a/appveyor.yml
+++ b/appveyor.yml
@@ -23,8 +23,8 @@ environment:
install:
- git submodule update --init
- rmdir C:\go /s /q
- - appveyor DownloadFile https://storage.googleapis.com/golang/go1.9.2.windows-%GETH_ARCH%.zip
- - 7z x go1.9.2.windows-%GETH_ARCH%.zip -y -oC:\ > NUL
+ - appveyor DownloadFile https://storage.googleapis.com/golang/go1.10.windows-%GETH_ARCH%.zip
+ - 7z x go1.10.windows-%GETH_ARCH%.zip -y -oC:\ > NUL
- go version
- gcc --version
diff --git a/build/ci-notes.md b/build/ci-notes.md
index 78e9575c0..f5b0e869d 100644
--- a/build/ci-notes.md
+++ b/build/ci-notes.md
@@ -2,12 +2,7 @@
Tagged releases and develop branch commits are available as installable Debian packages
for Ubuntu. Packages are built for the all Ubuntu versions which are supported by
-Canonical:
-
-- Trusty Tahr (14.04 LTS)
-- Xenial Xerus (16.04 LTS)
-- Yakkety Yak (16.10)
-- Zesty Zapus (17.04)
+Canonical.
Packages of develop branch commits have suffix -unstable and cannot be installed alongside
the stable version. Switching between release streams requires user intervention.
@@ -21,18 +16,18 @@ variable which Travis CI makes available to certain builds.
We want to build go-ethereum with the most recent version of Go, irrespective of the Go
version that is available in the main Ubuntu repository. In order to make this possible,
our PPA depends on the ~gophers/ubuntu/archive PPA. Our source package build-depends on
-golang-1.9, which is co-installable alongside the regular golang package. PPA dependencies
+golang-1.10, which is co-installable alongside the regular golang package. PPA dependencies
can be edited at https://launchpad.net/%7Eethereum/+archive/ubuntu/ethereum/+edit-dependencies
## Building Packages Locally (for testing)
You need to run Ubuntu to do test packaging.
-Add the gophers PPA and install Go 1.9 and Debian packaging tools:
+Add the gophers PPA and install Go 1.10 and Debian packaging tools:
$ sudo apt-add-repository ppa:gophers/ubuntu/archive
$ sudo apt-get update
- $ sudo apt-get install build-essential golang-1.9 devscripts debhelper
+ $ sudo apt-get install build-essential golang-1.10 devscripts debhelper
Create the source packages:
diff --git a/build/ci.go b/build/ci.go
index 1f98bb843..24b58c1ae 100644
--- a/build/ci.go
+++ b/build/ci.go
@@ -121,7 +121,8 @@ var (
// Note: vivid is unsupported because there is no golang-1.6 package for it.
// Note: wily is unsupported because it was officially deprecated on lanchpad.
// Note: yakkety is unsupported because it was officially deprecated on lanchpad.
- debDistros = []string{"trusty", "xenial", "zesty", "artful"}
+ // Note: zesty is unsupported because it was officially deprecated on lanchpad.
+ debDistros = []string{"trusty", "xenial", "artful", "bionic"}
)
var GOBIN, _ = filepath.Abs(filepath.Join("build", "bin"))
@@ -181,13 +182,13 @@ func doInstall(cmdline []string) {
// Check Go version. People regularly open issues about compilation
// failure with outdated Go. This should save them the trouble.
if !strings.Contains(runtime.Version(), "devel") {
- // Figure out the minor version number since we can't textually compare (1.10 < 1.7)
+ // Figure out the minor version number since we can't textually compare (1.10 < 1.8)
var minor int
fmt.Sscanf(strings.TrimPrefix(runtime.Version(), "go1."), "%d", &minor)
- if minor < 7 {
+ if minor < 8 {
log.Println("You have Go version", runtime.Version())
- log.Println("go-ethereum requires at least Go version 1.7 and cannot")
+ log.Println("go-ethereum requires at least Go version 1.8 and cannot")
log.Println("be compiled with an earlier version. Please upgrade your Go installation.")
os.Exit(1)
}
diff --git a/build/deb.control b/build/deb.control
index 5c9ce6705..33c1a779f 100644
--- a/build/deb.control
+++ b/build/deb.control
@@ -2,7 +2,7 @@ Source: {{.Name}}
Section: science
Priority: extra
Maintainer: {{.Author}}
-Build-Depends: debhelper (>= 8.0.0), golang-1.9
+Build-Depends: debhelper (>= 8.0.0), golang-1.10
Standards-Version: 3.9.5
Homepage: https://ethereum.org
Vcs-Git: git://github.com/ethereum/go-ethereum.git
diff --git a/build/deb.rules b/build/deb.rules
index 7a7852513..7f286569e 100644
--- a/build/deb.rules
+++ b/build/deb.rules
@@ -5,7 +5,7 @@
#export DH_VERBOSE=1
override_dh_auto_build:
- build/env.sh /usr/lib/go-1.9/bin/go run build/ci.go install -git-commit={{.Env.Commit}} -git-branch={{.Env.Branch}} -git-tag={{.Env.Tag}} -buildnum={{.Env.Buildnum}} -pull-request={{.Env.IsPullRequest}}
+ build/env.sh /usr/lib/go-1.10/bin/go run build/ci.go install -git-commit={{.Env.Commit}} -git-branch={{.Env.Branch}} -git-tag={{.Env.Tag}} -buildnum={{.Env.Buildnum}} -pull-request={{.Env.IsPullRequest}}
override_dh_auto_test:
diff --git a/build/update-license.go b/build/update-license.go
index 3d69598b7..22e403342 100644
--- a/build/update-license.go
+++ b/build/update-license.go
@@ -55,10 +55,9 @@ var (
"crypto/sha3/",
"internal/jsre/deps",
"log/",
+ "common/bitutil/bitutil",
// don't license generated files
- "contracts/chequebook/contract/",
- "contracts/ens/contract/",
- "contracts/release/contract.go",
+ "contracts/chequebook/contract/code.go",
}
// paths with this prefix are licensed as GPL. all other files are LGPL.
diff --git a/cmd/bootnode/main.go b/cmd/bootnode/main.go
index ecfc6fc24..2e93cc04d 100644
--- a/cmd/bootnode/main.go
+++ b/cmd/bootnode/main.go
@@ -122,7 +122,12 @@ func main() {
utils.Fatalf("%v", err)
}
} else {
- if _, err := discover.ListenUDP(nodeKey, conn, realaddr, nil, "", restrictList); err != nil {
+ cfg := discover.Config{
+ PrivateKey: nodeKey,
+ AnnounceAddr: realaddr,
+ NetRestrict: restrictList,
+ }
+ if _, err := discover.ListenUDP(conn, cfg); err != nil {
utils.Fatalf("%v", err)
}
}
diff --git a/cmd/ethkey/inspect.go b/cmd/ethkey/inspect.go
index 219a5460b..dbf5afc0c 100644
--- a/cmd/ethkey/inspect.go
+++ b/cmd/ethkey/inspect.go
@@ -1,3 +1,19 @@
+// Copyright 2017 The go-ethereum Authors
+// This file is part of go-ethereum.
+//
+// go-ethereum is free software: you can redistribute it and/or modify
+// it under the terms of the GNU General Public License as published by
+// the Free Software Foundation, either version 3 of the License, or
+// (at your option) any later version.
+//
+// go-ethereum is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+// GNU General Public License for more details.
+//
+// You should have received a copy of the GNU General Public License
+// along with go-ethereum. If not, see <http://www.gnu.org/licenses/>.
+
package main
import (
diff --git a/cmd/ethkey/message_test.go b/cmd/ethkey/message_test.go
index fb16f03d0..39352b1d2 100644
--- a/cmd/ethkey/message_test.go
+++ b/cmd/ethkey/message_test.go
@@ -1,4 +1,4 @@
-// Copyright 2017 The go-ethereum Authors
+// Copyright 2018 The go-ethereum Authors
// This file is part of go-ethereum.
//
// go-ethereum is free software: you can redistribute it and/or modify
diff --git a/cmd/ethkey/run_test.go b/cmd/ethkey/run_test.go
index 8ce4fe5cd..6006f6b5b 100644
--- a/cmd/ethkey/run_test.go
+++ b/cmd/ethkey/run_test.go
@@ -1,4 +1,4 @@
-// Copyright 2017 The go-ethereum Authors
+// Copyright 2018 The go-ethereum Authors
// This file is part of go-ethereum.
//
// go-ethereum is free software: you can redistribute it and/or modify
diff --git a/cmd/evm/json_logger.go b/cmd/evm/json_logger.go
index 47daf7dbb..0e7a91189 100644
--- a/cmd/evm/json_logger.go
+++ b/cmd/evm/json_logger.go
@@ -1,18 +1,18 @@
// Copyright 2017 The go-ethereum Authors
-// This file is part of the go-ethereum library.
+// This file is part of go-ethereum.
//
-// The go-ethereum library is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Lesser General Public License as published by
+// go-ethereum is free software: you can redistribute it and/or modify
+// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
-// The go-ethereum library is distributed in the hope that it will be useful,
+// go-ethereum is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU Lesser General Public License for more details.
+// GNU General Public License for more details.
//
-// You should have received a copy of the GNU Lesser General Public License
-// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
+// You should have received a copy of the GNU General Public License
+// along with go-ethereum. If not, see <http://www.gnu.org/licenses/>.
package main
diff --git a/cmd/evm/main.go b/cmd/evm/main.go
index 6c39cf8b8..a59cb1fb8 100644
--- a/cmd/evm/main.go
+++ b/cmd/evm/main.go
@@ -86,10 +86,6 @@ var (
Name: "create",
Usage: "indicates the action should be create rather than call",
}
- DisableGasMeteringFlag = cli.BoolFlag{
- Name: "nogasmetering",
- Usage: "disable gas metering",
- }
GenesisFlag = cli.StringFlag{
Name: "prestate",
Usage: "JSON file with prestate (genesis) config",
@@ -128,7 +124,6 @@ func init() {
ValueFlag,
DumpFlag,
InputFlag,
- DisableGasMeteringFlag,
MemProfileFlag,
CPUProfileFlag,
StatDumpFlag,
diff --git a/cmd/evm/runner.go b/cmd/evm/runner.go
index 96de0c76a..8a7399840 100644
--- a/cmd/evm/runner.go
+++ b/cmd/evm/runner.go
@@ -96,7 +96,9 @@ func runCmd(ctx *cli.Context) error {
}
if ctx.GlobalString(GenesisFlag.Name) != "" {
gen := readGenesis(ctx.GlobalString(GenesisFlag.Name))
- _, statedb = gen.ToBlock()
+ db, _ := ethdb.NewMemDatabase()
+ genesis := gen.ToBlock(db)
+ statedb, _ = state.New(genesis.Root(), state.NewDatabase(db))
chainConfig = gen.Config
} else {
db, _ := ethdb.NewMemDatabase()
@@ -159,9 +161,8 @@ func runCmd(ctx *cli.Context) error {
GasPrice: utils.GlobalBig(ctx, PriceFlag.Name),
Value: utils.GlobalBig(ctx, ValueFlag.Name),
EVMConfig: vm.Config{
- Tracer: tracer,
- Debug: ctx.GlobalBool(DebugFlag.Name) || ctx.GlobalBool(MachineFlag.Name),
- DisableGasMetering: ctx.GlobalBool(DisableGasMeteringFlag.Name),
+ Tracer: tracer,
+ Debug: ctx.GlobalBool(DebugFlag.Name) || ctx.GlobalBool(MachineFlag.Name),
},
}
diff --git a/cmd/faucet/faucet.go b/cmd/faucet/faucet.go
index 99527f9d1..5bad09bbd 100644
--- a/cmd/faucet/faucet.go
+++ b/cmd/faucet/faucet.go
@@ -533,9 +533,11 @@ func (f *faucet) loop() {
}
defer sub.Unsubscribe()
- for {
- select {
- case head := <-heads:
+ // Start a goroutine to update the state from head notifications in the background
+ update := make(chan *types.Header)
+
+ go func() {
+ for head := range update {
// New chain head arrived, query the current stats and stream to clients
var (
balance *big.Int
@@ -588,6 +590,17 @@ func (f *faucet) loop() {
}
}
f.lock.RUnlock()
+ }
+ }()
+ // Wait for various events and assing to the appropriate background threads
+ for {
+ select {
+ case head := <-heads:
+ // New head arrived, send if for state update if there's none running
+ select {
+ case update <- head:
+ default:
+ }
case <-f.update:
// Pending requests updated, stream to clients
@@ -686,8 +699,6 @@ func authTwitter(url string) (string, string, common.Address, error) {
if len(parts) < 4 || parts[len(parts)-2] != "status" {
return "", "", common.Address{}, errors.New("Invalid Twitter status URL")
}
- username := parts[len(parts)-3]
-
// Twitter's API isn't really friendly with direct links. Still, we don't
// want to do ask read permissions from users, so just load the public posts and
// scrape it for the Ethereum address and profile URL.
@@ -697,6 +708,13 @@ func authTwitter(url string) (string, string, common.Address, error) {
}
defer res.Body.Close()
+ // Resolve the username from the final redirect, no intermediate junk
+ parts = strings.Split(res.Request.URL.String(), "/")
+ if len(parts) < 4 || parts[len(parts)-2] != "status" {
+ return "", "", common.Address{}, errors.New("Invalid Twitter status URL")
+ }
+ username := parts[len(parts)-3]
+
body, err := ioutil.ReadAll(res.Body)
if err != nil {
return "", "", common.Address{}, err
diff --git a/cmd/geth/chaincmd.go b/cmd/geth/chaincmd.go
index 4a9a7b11b..c9ab72b6d 100644
--- a/cmd/geth/chaincmd.go
+++ b/cmd/geth/chaincmd.go
@@ -67,6 +67,9 @@ It expects the genesis file as argument.`,
utils.DataDirFlag,
utils.CacheFlag,
utils.LightModeFlag,
+ utils.GCModeFlag,
+ utils.CacheDatabaseFlag,
+ utils.CacheGCFlag,
},
Category: "BLOCKCHAIN COMMANDS",
Description: `
@@ -202,7 +205,7 @@ func importChain(ctx *cli.Context) error {
if len(ctx.Args()) == 1 {
if err := utils.ImportChain(chain, ctx.Args().First()); err != nil {
- utils.Fatalf("Import error: %v", err)
+ log.Error("Import error", "err", err)
}
} else {
for _, arg := range ctx.Args() {
@@ -211,7 +214,7 @@ func importChain(ctx *cli.Context) error {
}
}
}
-
+ chain.Stop()
fmt.Printf("Import done in %v.\n\n", time.Since(start))
// Output pre-compaction stats mostly to see the import trashing
@@ -222,6 +225,13 @@ func importChain(ctx *cli.Context) error {
utils.Fatalf("Failed to read database stats: %v", err)
}
fmt.Println(stats)
+
+ ioStats, err := db.LDB().GetProperty("leveldb.iostats")
+ if err != nil {
+ utils.Fatalf("Failed to read database iostats: %v", err)
+ }
+ fmt.Println(ioStats)
+
fmt.Printf("Trie cache misses: %d\n", trie.CacheMisses())
fmt.Printf("Trie cache unloads: %d\n\n", trie.CacheUnloads())
@@ -252,6 +262,12 @@ func importChain(ctx *cli.Context) error {
}
fmt.Println(stats)
+ ioStats, err = db.LDB().GetProperty("leveldb.iostats")
+ if err != nil {
+ utils.Fatalf("Failed to read database iostats: %v", err)
+ }
+ fmt.Println(ioStats)
+
return nil
}
diff --git a/cmd/geth/config.go b/cmd/geth/config.go
index 9c703758e..50e4de2e7 100644
--- a/cmd/geth/config.go
+++ b/cmd/geth/config.go
@@ -18,7 +18,6 @@ package main
import (
"bufio"
- "encoding/hex"
"errors"
"fmt"
"io"
@@ -29,7 +28,6 @@ import (
cli "gopkg.in/urfave/cli.v1"
"github.com/ethereum/go-ethereum/cmd/utils"
- "github.com/ethereum/go-ethereum/contracts/release"
"github.com/ethereum/go-ethereum/dashboard"
"github.com/ethereum/go-ethereum/eth"
"github.com/ethereum/go-ethereum/node"
@@ -177,21 +175,6 @@ func makeFullNode(ctx *cli.Context) *node.Node {
if cfg.Ethstats.URL != "" {
utils.RegisterEthStatsService(stack, cfg.Ethstats.URL)
}
-
- // Add the release oracle service so it boots along with node.
- if err := stack.Register(func(ctx *node.ServiceContext) (node.Service, error) {
- config := release.Config{
- Oracle: relOracle,
- Major: uint32(params.VersionMajor),
- Minor: uint32(params.VersionMinor),
- Patch: uint32(params.VersionPatch),
- }
- commit, _ := hex.DecodeString(gitCommit)
- copy(config.Commit[:], commit)
- return release.NewReleaseService(ctx, config)
- }); err != nil {
- utils.Fatalf("Failed to register the Geth release oracle service: %v", err)
- }
return stack
}
diff --git a/cmd/geth/consolecmd.go b/cmd/geth/consolecmd.go
index 9d5cc38a1..2500a969c 100644
--- a/cmd/geth/consolecmd.go
+++ b/cmd/geth/consolecmd.go
@@ -22,6 +22,7 @@ import (
"os/signal"
"path/filepath"
"strings"
+ "syscall"
"github.com/ethereum/go-ethereum/cmd/utils"
"github.com/ethereum/go-ethereum/console"
@@ -42,7 +43,7 @@ var (
Description: `
The Geth console is an interactive shell for the JavaScript runtime environment
which exposes a node admin interface as well as the Ðapp JavaScript API.
-See https://github.com/ethereum/go-ethereum/wiki/Javascipt-Console.`,
+See https://github.com/ethereum/go-ethereum/wiki/JavaScript-Console.`,
}
attachCommand = cli.Command{
@@ -55,7 +56,7 @@ See https://github.com/ethereum/go-ethereum/wiki/Javascipt-Console.`,
Description: `
The Geth console is an interactive shell for the JavaScript runtime environment
which exposes a node admin interface as well as the Ðapp JavaScript API.
-See https://github.com/ethereum/go-ethereum/wiki/Javascipt-Console.
+See https://github.com/ethereum/go-ethereum/wiki/JavaScript-Console.
This command allows to open a console on a running geth node.`,
}
@@ -68,7 +69,7 @@ This command allows to open a console on a running geth node.`,
Category: "CONSOLE COMMANDS",
Description: `
The JavaScript VM exposes a node admin interface as well as the Ðapp
-JavaScript API. See https://github.com/ethereum/go-ethereum/wiki/Javascipt-Console`,
+JavaScript API. See https://github.com/ethereum/go-ethereum/wiki/JavaScript-Console`,
}
)
@@ -207,7 +208,7 @@ func ephemeralConsole(ctx *cli.Context) error {
}
// Wait for pending callbacks, but stop for Ctrl-C.
abort := make(chan os.Signal, 1)
- signal.Notify(abort, os.Interrupt)
+ signal.Notify(abort, syscall.SIGINT, syscall.SIGTERM)
go func() {
<-abort
diff --git a/cmd/geth/main.go b/cmd/geth/main.go
index b955bd243..f5a3fa941 100644
--- a/cmd/geth/main.go
+++ b/cmd/geth/main.go
@@ -65,7 +65,6 @@ var (
utils.DashboardAddrFlag,
utils.DashboardPortFlag,
utils.DashboardRefreshFlag,
- utils.DashboardAssetsFlag,
utils.EthashCacheDirFlag,
utils.EthashCachesInMemoryFlag,
utils.EthashCachesOnDiskFlag,
@@ -85,10 +84,13 @@ var (
utils.FastSyncFlag,
utils.LightModeFlag,
utils.SyncModeFlag,
+ utils.GCModeFlag,
utils.LightServFlag,
utils.LightPeersFlag,
utils.LightKDFFlag,
utils.CacheFlag,
+ utils.CacheDatabaseFlag,
+ utils.CacheGCFlag,
utils.TrieCacheGenFlag,
utils.ListenPortFlag,
utils.MaxPeersFlag,
@@ -111,6 +113,7 @@ var (
utils.VMEnableDebugFlag,
utils.NetworkIdFlag,
utils.RPCCORSDomainFlag,
+ utils.RPCVirtualHostsFlag,
utils.EthStatsURLFlag,
utils.MetricsEnabledFlag,
utils.FakePoWFlag,
diff --git a/cmd/geth/usage.go b/cmd/geth/usage.go
index a834d5b7a..a1558c233 100644
--- a/cmd/geth/usage.go
+++ b/cmd/geth/usage.go
@@ -22,10 +22,11 @@ import (
"io"
"sort"
+ "strings"
+
"github.com/ethereum/go-ethereum/cmd/utils"
"github.com/ethereum/go-ethereum/internal/debug"
"gopkg.in/urfave/cli.v1"
- "strings"
)
// AppHelpTemplate is the test template for the default, global app help topic.
@@ -74,6 +75,7 @@ var AppHelpFlagGroups = []flagGroup{
utils.TestnetFlag,
utils.RinkebyFlag,
utils.SyncModeFlag,
+ utils.GCModeFlag,
utils.EthStatsURLFlag,
utils.IdentityFlag,
utils.LightServFlag,
@@ -127,6 +129,8 @@ var AppHelpFlagGroups = []flagGroup{
Name: "PERFORMANCE TUNING",
Flags: []cli.Flag{
utils.CacheFlag,
+ utils.CacheDatabaseFlag,
+ utils.CacheGCFlag,
utils.TrieCacheGenFlag,
},
},
@@ -152,6 +156,7 @@ var AppHelpFlagGroups = []flagGroup{
utils.IPCDisabledFlag,
utils.IPCPathFlag,
utils.RPCCORSDomainFlag,
+ utils.RPCVirtualHostsFlag,
utils.JSpathFlag,
utils.ExecFlag,
utils.PreloadJSFlag,
diff --git a/cmd/p2psim/main.go b/cmd/p2psim/main.go
index 56b74d135..0c8ed038d 100644
--- a/cmd/p2psim/main.go
+++ b/cmd/p2psim/main.go
@@ -1,3 +1,19 @@
+// Copyright 2017 The go-ethereum Authors
+// This file is part of go-ethereum.
+//
+// go-ethereum is free software: you can redistribute it and/or modify
+// it under the terms of the GNU General Public License as published by
+// the Free Software Foundation, either version 3 of the License, or
+// (at your option) any later version.
+//
+// go-ethereum is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+// GNU General Public License for more details.
+//
+// You should have received a copy of the GNU General Public License
+// along with go-ethereum. If not, see <http://www.gnu.org/licenses/>.
+
// p2psim provides a command-line client for a simulation HTTP API.
//
// Here is an example of creating a 2 node network with the first node
diff --git a/cmd/puppeth/genesis.go b/cmd/puppeth/genesis.go
index f747f4739..1974a94aa 100644
--- a/cmd/puppeth/genesis.go
+++ b/cmd/puppeth/genesis.go
@@ -168,19 +168,18 @@ type parityChainSpec struct {
Engine struct {
Ethash struct {
Params struct {
- MinimumDifficulty *hexutil.Big `json:"minimumDifficulty"`
- DifficultyBoundDivisor *hexutil.Big `json:"difficultyBoundDivisor"`
- GasLimitBoundDivisor hexutil.Uint64 `json:"gasLimitBoundDivisor"`
- DurationLimit *hexutil.Big `json:"durationLimit"`
- BlockReward *hexutil.Big `json:"blockReward"`
- HomesteadTransition uint64 `json:"homesteadTransition"`
- EIP150Transition uint64 `json:"eip150Transition"`
- EIP160Transition uint64 `json:"eip160Transition"`
- EIP161abcTransition uint64 `json:"eip161abcTransition"`
- EIP161dTransition uint64 `json:"eip161dTransition"`
- EIP649Reward *hexutil.Big `json:"eip649Reward"`
- EIP100bTransition uint64 `json:"eip100bTransition"`
- EIP649Transition uint64 `json:"eip649Transition"`
+ MinimumDifficulty *hexutil.Big `json:"minimumDifficulty"`
+ DifficultyBoundDivisor *hexutil.Big `json:"difficultyBoundDivisor"`
+ DurationLimit *hexutil.Big `json:"durationLimit"`
+ BlockReward *hexutil.Big `json:"blockReward"`
+ HomesteadTransition uint64 `json:"homesteadTransition"`
+ EIP150Transition uint64 `json:"eip150Transition"`
+ EIP160Transition uint64 `json:"eip160Transition"`
+ EIP161abcTransition uint64 `json:"eip161abcTransition"`
+ EIP161dTransition uint64 `json:"eip161dTransition"`
+ EIP649Reward *hexutil.Big `json:"eip649Reward"`
+ EIP100bTransition uint64 `json:"eip100bTransition"`
+ EIP649Transition uint64 `json:"eip649Transition"`
} `json:"params"`
} `json:"Ethash"`
} `json:"engine"`
@@ -188,6 +187,7 @@ type parityChainSpec struct {
Params struct {
MaximumExtraDataSize hexutil.Uint64 `json:"maximumExtraDataSize"`
MinGasLimit hexutil.Uint64 `json:"minGasLimit"`
+ GasLimitBoundDivisor hexutil.Uint64 `json:"gasLimitBoundDivisor"`
NetworkID hexutil.Uint64 `json:"networkID"`
MaxCodeSize uint64 `json:"maxCodeSize"`
EIP155Transition uint64 `json:"eip155Transition"`
@@ -270,7 +270,6 @@ func newParityChainSpec(network string, genesis *core.Genesis, bootnodes []strin
}
spec.Engine.Ethash.Params.MinimumDifficulty = (*hexutil.Big)(params.MinimumDifficulty)
spec.Engine.Ethash.Params.DifficultyBoundDivisor = (*hexutil.Big)(params.DifficultyBoundDivisor)
- spec.Engine.Ethash.Params.GasLimitBoundDivisor = (hexutil.Uint64)(params.GasLimitBoundDivisor)
spec.Engine.Ethash.Params.DurationLimit = (*hexutil.Big)(params.DurationLimit)
spec.Engine.Ethash.Params.BlockReward = (*hexutil.Big)(ethash.FrontierBlockReward)
spec.Engine.Ethash.Params.HomesteadTransition = genesis.Config.HomesteadBlock.Uint64()
@@ -284,6 +283,7 @@ func newParityChainSpec(network string, genesis *core.Genesis, bootnodes []strin
spec.Params.MaximumExtraDataSize = (hexutil.Uint64)(params.MaximumExtraDataSize)
spec.Params.MinGasLimit = (hexutil.Uint64)(params.MinGasLimit)
+ spec.Params.GasLimitBoundDivisor = (hexutil.Uint64)(params.GasLimitBoundDivisor)
spec.Params.NetworkID = (hexutil.Uint64)(genesis.Config.ChainId.Uint64())
spec.Params.MaxCodeSize = params.MaxCodeSize
spec.Params.EIP155Transition = genesis.Config.EIP155Block.Uint64()
diff --git a/cmd/puppeth/module_dashboard.go b/cmd/puppeth/module_dashboard.go
index 1092c4c88..3832b247f 100644
--- a/cmd/puppeth/module_dashboard.go
+++ b/cmd/puppeth/module_dashboard.go
@@ -117,7 +117,7 @@ var dashboardContent = `
<br/>
<p>To run an archive node, download <a href="/{{.GethGenesis}}"><code>{{.GethGenesis}}</code></a> and start Geth with:
<pre>geth --datadir=$HOME/.{{.Network}} init {{.GethGenesis}}</pre>
- <pre>geth --networkid={{.NetworkID}} --datadir=$HOME/.{{.Network}} --cache=1024 --syncmode=full{{if .Ethstats}} --ethstats='{{.Ethstats}}'{{end}} --bootnodes={{.BootnodesFullFlat}}</pre>
+ <pre>geth --networkid={{.NetworkID}} --datadir=$HOME/.{{.Network}} --cache=1024 --syncmode=full{{if .Ethstats}} --ethstats='{{.Ethstats}}'{{end}} --bootnodes={{.BootnodesFlat}}</pre>
</p>
<br/>
<p>You can download Geth from <a href="https://geth.ethereum.org/downloads/" target="about:blank">https://geth.ethereum.org/downloads/</a>.</p>
@@ -136,7 +136,7 @@ var dashboardContent = `
<br/>
<p>To run a full node, download <a href="/{{.GethGenesis}}"><code>{{.GethGenesis}}</code></a> and start Geth with:
<pre>geth --datadir=$HOME/.{{.Network}} init {{.GethGenesis}}</pre>
- <pre>geth --networkid={{.NetworkID}} --datadir=$HOME/.{{.Network}} --cache=512{{if .Ethstats}} --ethstats='{{.Ethstats}}'{{end}} --bootnodes={{.BootnodesFullFlat}}</pre>
+ <pre>geth --networkid={{.NetworkID}} --datadir=$HOME/.{{.Network}} --cache=512{{if .Ethstats}} --ethstats='{{.Ethstats}}'{{end}} --bootnodes={{.BootnodesFlat}}</pre>
</p>
<br/>
<p>You can download Geth from <a href="https://geth.ethereum.org/downloads/" target="about:blank">https://geth.ethereum.org/downloads/</a>.</p>
@@ -158,7 +158,7 @@ var dashboardContent = `
<br/>
<p>To run a light node, download <a href="/{{.GethGenesis}}"><code>{{.GethGenesis}}</code></a> and start Geth with:
<pre>geth --datadir=$HOME/.{{.Network}} init {{.GethGenesis}}</pre>
- <pre>geth --networkid={{.NetworkID}} --datadir=$HOME/.{{.Network}} --syncmode=light{{if .Ethstats}} --ethstats='{{.Ethstats}}'{{end}} --bootnodes={{.BootnodesLightFlat}}</pre>
+ <pre>geth --networkid={{.NetworkID}} --datadir=$HOME/.{{.Network}} --syncmode=light{{if .Ethstats}} --ethstats='{{.Ethstats}}'{{end}} --bootnodes={{.BootnodesFlat}}</pre>
</p>
<br/>
<p>You can download Geth from <a href="https://geth.ethereum.org/downloads/" target="about:blank">https://geth.ethereum.org/downloads/</a>.</p>
@@ -177,7 +177,7 @@ var dashboardContent = `
<br/>
<p>To run an embedded node, download <a href="/{{.GethGenesis}}"><code>{{.GethGenesis}}</code></a> and start Geth with:
<pre>geth --datadir=$HOME/.{{.Network}} init {{.GethGenesis}}</pre>
- <pre>geth --networkid={{.NetworkID}} --datadir=$HOME/.{{.Network}} --cache=16 --ethash.cachesinmem=1 --syncmode=light{{if .Ethstats}} --ethstats='{{.Ethstats}}'{{end}} --bootnodes={{.BootnodesLightFlat}}</pre>
+ <pre>geth --networkid={{.NetworkID}} --datadir=$HOME/.{{.Network}} --cache=16 --ethash.cachesinmem=1 --syncmode=light{{if .Ethstats}} --ethstats='{{.Ethstats}}'{{end}} --bootnodes={{.BootnodesFlat}}</pre>
</p>
<br/>
<p>You can download Geth from <a href="https://geth.ethereum.org/downloads/" target="about:blank">https://geth.ethereum.org/downloads/</a>.</p>
@@ -208,7 +208,7 @@ var dashboardContent = `
<pre>geth --datadir=$HOME/.{{.Network}} init {{.GethGenesis}}</pre>
</p>
<p>With your local chain initialized, you can start the Ethereum Wallet:
- <pre>ethereumwallet --rpc $HOME/.{{.Network}}/geth.ipc --node-networkid={{.NetworkID}} --node-datadir=$HOME/.{{.Network}}{{if .Ethstats}} --node-ethstats='{{.Ethstats}}'{{end}} --node-bootnodes={{.BootnodesFullFlat}}</pre>
+ <pre>ethereumwallet --rpc $HOME/.{{.Network}}/geth.ipc --node-networkid={{.NetworkID}} --node-datadir=$HOME/.{{.Network}}{{if .Ethstats}} --node-ethstats='{{.Ethstats}}'{{end}} --node-bootnodes={{.BootnodesFlat}}</pre>
<p>
<br/>
<p>You can download the Ethereum Wallet from <a href="https://github.com/ethereum/mist/releases" target="about:blank">https://github.com/ethereum/mist/releases</a>.</p>
@@ -229,7 +229,7 @@ var dashboardContent = `
<pre>geth --datadir=$HOME/.{{.Network}} init {{.GethGenesis}}</pre>
</p>
<p>With your local chain initialized, you can start Mist:
- <pre>mist --rpc $HOME/.{{.Network}}/geth.ipc --node-networkid={{.NetworkID}} --node-datadir=$HOME/.{{.Network}}{{if .Ethstats}} --node-ethstats='{{.Ethstats}}'{{end}} --node-bootnodes={{.BootnodesFullFlat}}</pre>
+ <pre>mist --rpc $HOME/.{{.Network}}/geth.ipc --node-networkid={{.NetworkID}} --node-datadir=$HOME/.{{.Network}}{{if .Ethstats}} --node-ethstats='{{.Ethstats}}'{{end}} --node-bootnodes={{.BootnodesFlat}}</pre>
<p>
<br/>
<p>You can download the Mist browser from <a href="https://github.com/ethereum/mist/releases" target="about:blank">https://github.com/ethereum/mist/releases</a>.</p>
@@ -261,7 +261,7 @@ var dashboardContent = `
<p>Inside your Java code you can now import the geth archive and connect to Ethereum:
<pre>import org.ethereum.geth.*;</pre>
<pre>
-Enodes bootnodes = new Enodes();{{range .BootnodesLight}}
+Enodes bootnodes = new Enodes();{{range .Bootnodes}}
bootnodes.append(new Enode("{{.}}"));{{end}}
NodeConfig config = new NodeConfig();
@@ -294,7 +294,7 @@ node.start();
<pre>
var error: NSError?
-let bootnodes = GethNewEnodesEmpty(){{range .BootnodesLight}}
+let bootnodes = GethNewEnodesEmpty(){{range .Bootnodes}}
bootnodes?.append(GethNewEnode("{{.}}", &error)){{end}}
let config = GethNewNodeConfig()
@@ -595,44 +595,43 @@ func deployDashboard(client *sshClient, network string, conf *config, config *da
statsLogin = ""
}
indexfile := new(bytes.Buffer)
- bootCpp := make([]string, len(conf.bootFull))
- for i, boot := range conf.bootFull {
+ bootCpp := make([]string, len(conf.bootnodes))
+ for i, boot := range conf.bootnodes {
bootCpp[i] = "required:" + strings.TrimPrefix(boot, "enode://")
}
- bootHarmony := make([]string, len(conf.bootFull))
- for i, boot := range conf.bootFull {
+ bootHarmony := make([]string, len(conf.bootnodes))
+ for i, boot := range conf.bootnodes {
bootHarmony[i] = fmt.Sprintf("-Dpeer.active.%d.url=%s", i, boot)
}
- bootPython := make([]string, len(conf.bootFull))
- for i, boot := range conf.bootFull {
+ bootPython := make([]string, len(conf.bootnodes))
+ for i, boot := range conf.bootnodes {
bootPython[i] = "'" + boot + "'"
}
template.Must(template.New("").Parse(dashboardContent)).Execute(indexfile, map[string]interface{}{
- "Network": network,
- "NetworkID": conf.Genesis.Config.ChainId,
- "NetworkTitle": strings.Title(network),
- "EthstatsPage": config.ethstats,
- "ExplorerPage": config.explorer,
- "WalletPage": config.wallet,
- "FaucetPage": config.faucet,
- "GethGenesis": network + ".json",
- "BootnodesFull": conf.bootFull,
- "BootnodesLight": conf.bootLight,
- "BootnodesFullFlat": strings.Join(conf.bootFull, ","),
- "BootnodesLightFlat": strings.Join(conf.bootLight, ","),
- "Ethstats": statsLogin,
- "Ethash": conf.Genesis.Config.Ethash != nil,
- "CppGenesis": network + "-cpp.json",
- "CppBootnodes": strings.Join(bootCpp, " "),
- "HarmonyGenesis": network + "-harmony.json",
- "HarmonyBootnodes": strings.Join(bootHarmony, " "),
- "ParityGenesis": network + "-parity.json",
- "PythonGenesis": network + "-python.json",
- "PythonBootnodes": strings.Join(bootPython, ","),
- "Homestead": conf.Genesis.Config.HomesteadBlock,
- "Tangerine": conf.Genesis.Config.EIP150Block,
- "Spurious": conf.Genesis.Config.EIP155Block,
- "Byzantium": conf.Genesis.Config.ByzantiumBlock,
+ "Network": network,
+ "NetworkID": conf.Genesis.Config.ChainId,
+ "NetworkTitle": strings.Title(network),
+ "EthstatsPage": config.ethstats,
+ "ExplorerPage": config.explorer,
+ "WalletPage": config.wallet,
+ "FaucetPage": config.faucet,
+ "GethGenesis": network + ".json",
+ "Bootnodes": conf.bootnodes,
+ "BootnodesFlat": strings.Join(conf.bootnodes, ","),
+ "Ethstats": statsLogin,
+ "Ethash": conf.Genesis.Config.Ethash != nil,
+ "CppGenesis": network + "-cpp.json",
+ "CppBootnodes": strings.Join(bootCpp, " "),
+ "HarmonyGenesis": network + "-harmony.json",
+ "HarmonyBootnodes": strings.Join(bootHarmony, " "),
+ "ParityGenesis": network + "-parity.json",
+ "PythonGenesis": network + "-python.json",
+ "PythonBootnodes": strings.Join(bootPython, ","),
+ "Homestead": conf.Genesis.Config.HomesteadBlock,
+ "Tangerine": conf.Genesis.Config.EIP150Block,
+ "Spurious": conf.Genesis.Config.EIP155Block,
+ "Byzantium": conf.Genesis.Config.ByzantiumBlock,
+ "Constantinople": conf.Genesis.Config.ConstantinopleBlock,
})
files[filepath.Join(workdir, "index.html")] = indexfile.Bytes()
@@ -651,7 +650,7 @@ func deployDashboard(client *sshClient, network string, conf *config, config *da
harmonySpecJSON, _ := conf.Genesis.MarshalJSON()
files[filepath.Join(workdir, network+"-harmony.json")] = harmonySpecJSON
- paritySpec, err := newParityChainSpec(network, conf.Genesis, conf.bootFull)
+ paritySpec, err := newParityChainSpec(network, conf.Genesis, conf.bootnodes)
if err != nil {
return nil, err
}
diff --git a/cmd/puppeth/module_faucet.go b/cmd/puppeth/module_faucet.go
index 92b4cb286..976bf04d0 100644
--- a/cmd/puppeth/module_faucet.go
+++ b/cmd/puppeth/module_faucet.go
@@ -93,7 +93,7 @@ func deployFaucet(client *sshClient, network string, bootnodes []string, config
"NetworkID": config.node.network,
"Bootnodes": strings.Join(bootnodes, ","),
"Ethstats": config.node.ethstats,
- "EthPort": config.node.portFull,
+ "EthPort": config.node.port,
"CaptchaToken": config.captchaToken,
"CaptchaSecret": config.captchaSecret,
"FaucetName": strings.Title(network),
@@ -110,7 +110,7 @@ func deployFaucet(client *sshClient, network string, bootnodes []string, config
"Datadir": config.node.datadir,
"VHost": config.host,
"ApiPort": config.port,
- "EthPort": config.node.portFull,
+ "EthPort": config.node.port,
"EthName": config.node.ethstats[:strings.Index(config.node.ethstats, ":")],
"CaptchaToken": config.captchaToken,
"CaptchaSecret": config.captchaSecret,
@@ -158,7 +158,7 @@ func (info *faucetInfos) Report() map[string]string {
report := map[string]string{
"Website address": info.host,
"Website listener port": strconv.Itoa(info.port),
- "Ethereum listener port": strconv.Itoa(info.node.portFull),
+ "Ethereum listener port": strconv.Itoa(info.node.port),
"Funding amount (base tier)": fmt.Sprintf("%d Ethers", info.amount),
"Funding cooldown (base tier)": fmt.Sprintf("%d mins", info.minutes),
"Funding tiers": strconv.Itoa(info.tiers),
@@ -228,7 +228,7 @@ func checkFaucet(client *sshClient, network string) (*faucetInfos, error) {
return &faucetInfos{
node: &nodeInfos{
datadir: infos.volumes["/root/.faucet"],
- portFull: infos.portmap[infos.envvars["ETH_PORT"]+"/tcp"],
+ port: infos.portmap[infos.envvars["ETH_PORT"]+"/tcp"],
ethstats: infos.envvars["ETH_NAME"],
keyJSON: keyJSON,
keyPass: keyPass,
diff --git a/cmd/puppeth/module_node.go b/cmd/puppeth/module_node.go
index 69cb19c34..2609fd976 100644
--- a/cmd/puppeth/module_node.go
+++ b/cmd/puppeth/module_node.go
@@ -42,7 +42,7 @@ ADD genesis.json /genesis.json
RUN \
echo 'geth --cache 512 init /genesis.json' > geth.sh && \{{if .Unlock}}
echo 'mkdir -p /root/.ethereum/keystore/ && cp /signer.json /root/.ethereum/keystore/' >> geth.sh && \{{end}}
- echo $'geth --networkid {{.NetworkID}} --cache 512 --port {{.Port}} --maxpeers {{.Peers}} {{.LightFlag}} --ethstats \'{{.Ethstats}}\' {{if .BootV4}}--bootnodesv4 {{.BootV4}}{{end}} {{if .BootV5}}--bootnodesv5 {{.BootV5}}{{end}} {{if .Etherbase}}--etherbase {{.Etherbase}} --mine --minerthreads 1{{end}} {{if .Unlock}}--unlock 0 --password /signer.pass --mine{{end}} --targetgaslimit {{.GasTarget}} --gasprice {{.GasPrice}}' >> geth.sh
+ echo $'geth --networkid {{.NetworkID}} --cache 512 --port {{.Port}} --maxpeers {{.Peers}} {{.LightFlag}} --ethstats \'{{.Ethstats}}\' {{if .Bootnodes}}--bootnodes {{.Bootnodes}}{{end}} {{if .Etherbase}}--etherbase {{.Etherbase}} --mine --minerthreads 1{{end}} {{if .Unlock}}--unlock 0 --password /signer.pass --mine{{end}} --targetgaslimit {{.GasTarget}} --gasprice {{.GasPrice}}' >> geth.sh
ENTRYPOINT ["/bin/sh", "geth.sh"]
`
@@ -56,15 +56,13 @@ services:
build: .
image: {{.Network}}/{{.Type}}
ports:
- - "{{.FullPort}}:{{.FullPort}}"
- - "{{.FullPort}}:{{.FullPort}}/udp"{{if .Light}}
- - "{{.LightPort}}:{{.LightPort}}/udp"{{end}}
+ - "{{.Port}}:{{.Port}}"
+ - "{{.Port}}:{{.Port}}/udp"
volumes:
- {{.Datadir}}:/root/.ethereum{{if .Ethashdir}}
- {{.Ethashdir}}:/root/.ethash{{end}}
environment:
- - FULL_PORT={{.FullPort}}/tcp
- - LIGHT_PORT={{.LightPort}}/udp
+ - PORT={{.Port}}/tcp
- TOTAL_PEERS={{.TotalPeers}}
- LIGHT_PEERS={{.LightPeers}}
- STATS_NAME={{.Ethstats}}
@@ -82,12 +80,11 @@ services:
// deployNode deploys a new Ethereum node container to a remote machine via SSH,
// docker and docker-compose. If an instance with the specified network name
// already exists there, it will be overwritten!
-func deployNode(client *sshClient, network string, bootv4, bootv5 []string, config *nodeInfos, nocache bool) ([]byte, error) {
+func deployNode(client *sshClient, network string, bootnodes []string, config *nodeInfos, nocache bool) ([]byte, error) {
kind := "sealnode"
if config.keyJSON == "" && config.etherbase == "" {
kind = "bootnode"
- bootv4 = make([]string, 0)
- bootv5 = make([]string, 0)
+ bootnodes = make([]string, 0)
}
// Generate the content to upload to the server
workdir := fmt.Sprintf("%d", rand.Int63())
@@ -100,11 +97,10 @@ func deployNode(client *sshClient, network string, bootv4, bootv5 []string, conf
dockerfile := new(bytes.Buffer)
template.Must(template.New("").Parse(nodeDockerfile)).Execute(dockerfile, map[string]interface{}{
"NetworkID": config.network,
- "Port": config.portFull,
+ "Port": config.port,
"Peers": config.peersTotal,
"LightFlag": lightFlag,
- "BootV4": strings.Join(bootv4, ","),
- "BootV5": strings.Join(bootv5, ","),
+ "Bootnodes": strings.Join(bootnodes, ","),
"Ethstats": config.ethstats,
"Etherbase": config.etherbase,
"GasTarget": uint64(1000000 * config.gasTarget),
@@ -119,10 +115,9 @@ func deployNode(client *sshClient, network string, bootv4, bootv5 []string, conf
"Datadir": config.datadir,
"Ethashdir": config.ethashdir,
"Network": network,
- "FullPort": config.portFull,
+ "Port": config.port,
"TotalPeers": config.peersTotal,
"Light": config.peersLight > 0,
- "LightPort": config.portFull + 1,
"LightPeers": config.peersLight,
"Ethstats": config.ethstats[:strings.Index(config.ethstats, ":")],
"Etherbase": config.etherbase,
@@ -157,10 +152,8 @@ type nodeInfos struct {
datadir string
ethashdir string
ethstats string
- portFull int
- portLight int
- enodeFull string
- enodeLight string
+ port int
+ enode string
peersTotal int
peersLight int
etherbase string
@@ -174,15 +167,11 @@ type nodeInfos struct {
// most - but not all - fields for reporting to the user.
func (info *nodeInfos) Report() map[string]string {
report := map[string]string{
- "Data directory": info.datadir,
- "Listener port (full nodes)": strconv.Itoa(info.portFull),
- "Peer count (all total)": strconv.Itoa(info.peersTotal),
- "Peer count (light nodes)": strconv.Itoa(info.peersLight),
- "Ethstats username": info.ethstats,
- }
- if info.peersLight > 0 {
- // Light server enabled
- report["Listener port (light nodes)"] = strconv.Itoa(info.portLight)
+ "Data directory": info.datadir,
+ "Listener port": strconv.Itoa(info.port),
+ "Peer count (all total)": strconv.Itoa(info.peersTotal),
+ "Peer count (light nodes)": strconv.Itoa(info.peersLight),
+ "Ethstats username": info.ethstats,
}
if info.gasTarget > 0 {
// Miner or signer node
@@ -250,7 +239,7 @@ func checkNode(client *sshClient, network string, boot bool) (*nodeInfos, error)
keyPass = string(bytes.TrimSpace(out))
}
// Run a sanity check to see if the devp2p is reachable
- port := infos.portmap[infos.envvars["FULL_PORT"]]
+ port := infos.portmap[infos.envvars["PORT"]]
if err = checkPort(client.server, port); err != nil {
log.Warn(fmt.Sprintf("%s devp2p port seems unreachable", strings.Title(kind)), "server", client.server, "port", port, "err", err)
}
@@ -259,8 +248,7 @@ func checkNode(client *sshClient, network string, boot bool) (*nodeInfos, error)
genesis: genesis,
datadir: infos.volumes["/root/.ethereum"],
ethashdir: infos.volumes["/root/.ethash"],
- portFull: infos.portmap[infos.envvars["FULL_PORT"]],
- portLight: infos.portmap[infos.envvars["LIGHT_PORT"]],
+ port: port,
peersTotal: totalPeers,
peersLight: lightPeers,
ethstats: infos.envvars["STATS_NAME"],
@@ -270,9 +258,7 @@ func checkNode(client *sshClient, network string, boot bool) (*nodeInfos, error)
gasTarget: gasTarget,
gasPrice: gasPrice,
}
- stats.enodeFull = fmt.Sprintf("enode://%s@%s:%d", id, client.address, stats.portFull)
- if stats.portLight != 0 {
- stats.enodeLight = fmt.Sprintf("enode://%s@%s:%d?discport=%d", id, client.address, stats.portFull, stats.portLight)
- }
+ stats.enode = fmt.Sprintf("enode://%s@%s:%d", id, client.address, stats.port)
+
return stats, nil
}
diff --git a/cmd/puppeth/module_wallet.go b/cmd/puppeth/module_wallet.go
index 67f47c70e..5e5032bed 100644
--- a/cmd/puppeth/module_wallet.go
+++ b/cmd/puppeth/module_wallet.go
@@ -37,7 +37,7 @@ ADD genesis.json /genesis.json
RUN \
echo 'node server.js &' > wallet.sh && \
echo 'geth --cache 512 init /genesis.json' >> wallet.sh && \
- echo $'geth --networkid {{.NetworkID}} --port {{.NodePort}} --bootnodes {{.Bootnodes}} --ethstats \'{{.Ethstats}}\' --cache=512 --rpc --rpcaddr=0.0.0.0 --rpccorsdomain "*"' >> wallet.sh
+ echo $'geth --networkid {{.NetworkID}} --port {{.NodePort}} --bootnodes {{.Bootnodes}} --ethstats \'{{.Ethstats}}\' --cache=512 --rpc --rpcaddr=0.0.0.0 --rpccorsdomain "*" --rpcvhosts "*"' >> wallet.sh
RUN \
sed -i 's/PuppethNetworkID/{{.NetworkID}}/g' dist/js/etherwallet-master.js && \
diff --git a/cmd/puppeth/wizard.go b/cmd/puppeth/wizard.go
index 2e2b4644c..b88a61de7 100644
--- a/cmd/puppeth/wizard.go
+++ b/cmd/puppeth/wizard.go
@@ -40,8 +40,7 @@ import (
// between sessions.
type config struct {
path string // File containing the configuration values
- bootFull []string // Bootnodes to always connect to by full nodes
- bootLight []string // Bootnodes to always connect to by light nodes
+ bootnodes []string // Bootnodes to always connect to by all nodes
ethstats string // Ethstats settings to cache for node deploys
Genesis *core.Genesis `json:"genesis,omitempty"` // Genesis block to cache for node deploys
diff --git a/cmd/puppeth/wizard_explorer.go b/cmd/puppeth/wizard_explorer.go
index 10ef72f78..413511c1c 100644
--- a/cmd/puppeth/wizard_explorer.go
+++ b/cmd/puppeth/wizard_explorer.go
@@ -55,7 +55,7 @@ func (w *wizard) deployExplorer() {
}
existed := err == nil
- chainspec, err := newParityChainSpec(w.network, w.conf.Genesis, w.conf.bootFull)
+ chainspec, err := newParityChainSpec(w.network, w.conf.Genesis, w.conf.bootnodes)
if err != nil {
log.Error("Failed to create chain spec for explorer", "err", err)
return
diff --git a/cmd/puppeth/wizard_faucet.go b/cmd/puppeth/wizard_faucet.go
index 191575b16..9a429bc96 100644
--- a/cmd/puppeth/wizard_faucet.go
+++ b/cmd/puppeth/wizard_faucet.go
@@ -38,7 +38,7 @@ func (w *wizard) deployFaucet() {
infos, err := checkFaucet(client, w.network)
if err != nil {
infos = &faucetInfos{
- node: &nodeInfos{portFull: 30303, peersTotal: 25},
+ node: &nodeInfos{port: 30303, peersTotal: 25},
port: 80,
host: client.server,
amount: 1,
@@ -113,8 +113,8 @@ func (w *wizard) deployFaucet() {
}
// Figure out which port to listen on
fmt.Println()
- fmt.Printf("Which TCP/UDP port should the light client listen on? (default = %d)\n", infos.node.portFull)
- infos.node.portFull = w.readDefaultInt(infos.node.portFull)
+ fmt.Printf("Which TCP/UDP port should the light client listen on? (default = %d)\n", infos.node.port)
+ infos.node.port = w.readDefaultInt(infos.node.port)
// Set a proper name to report on the stats page
fmt.Println()
@@ -168,7 +168,7 @@ func (w *wizard) deployFaucet() {
fmt.Printf("Should the faucet be built from scratch (y/n)? (default = no)\n")
nocache = w.readDefaultString("n") != "n"
}
- if out, err := deployFaucet(client, w.network, w.conf.bootLight, infos, nocache); err != nil {
+ if out, err := deployFaucet(client, w.network, w.conf.bootnodes, infos, nocache); err != nil {
log.Error("Failed to deploy faucet container", "err", err)
if len(out) > 0 {
fmt.Printf("%s\n", out)
diff --git a/cmd/puppeth/wizard_intro.go b/cmd/puppeth/wizard_intro.go
index 84998afc9..60aa0f7ff 100644
--- a/cmd/puppeth/wizard_intro.go
+++ b/cmd/puppeth/wizard_intro.go
@@ -59,15 +59,16 @@ func (w *wizard) run() {
fmt.Println()
// Make sure we have a good network name to work with fmt.Println()
+ // Docker accepts hyphens in image names, but doesn't like it for container names
if w.network == "" {
- fmt.Println("Please specify a network name to administer (no spaces, please)")
+ fmt.Println("Please specify a network name to administer (no spaces or hyphens, please)")
for {
w.network = w.readString()
- if !strings.Contains(w.network, " ") {
+ if !strings.Contains(w.network, " ") && !strings.Contains(w.network, "-") {
fmt.Printf("\nSweet, you can set this via --network=%s next time!\n\n", w.network)
break
}
- log.Error("I also like to live dangerously, still no spaces")
+ log.Error("I also like to live dangerously, still no spaces or hyphens")
}
}
log.Info("Administering Ethereum network", "name", w.network)
diff --git a/cmd/puppeth/wizard_netstats.go b/cmd/puppeth/wizard_netstats.go
index e19180bb1..90bf7ae3c 100644
--- a/cmd/puppeth/wizard_netstats.go
+++ b/cmd/puppeth/wizard_netstats.go
@@ -37,8 +37,7 @@ func (w *wizard) networkStats() {
}
// Clear out some previous configs to refill from current scan
w.conf.ethstats = ""
- w.conf.bootFull = w.conf.bootFull[:0]
- w.conf.bootLight = w.conf.bootLight[:0]
+ w.conf.bootnodes = w.conf.bootnodes[:0]
// Iterate over all the specified hosts and check their status
var pend sync.WaitGroup
@@ -76,8 +75,7 @@ func (w *wizard) gatherStats(server string, pubkey []byte, client *sshClient) *s
var (
genesis string
ethstats string
- bootFull []string
- bootLight []string
+ bootnodes []string
)
// Ensure a valid SSH connection to the remote server
logger := log.New("server", server)
@@ -123,10 +121,7 @@ func (w *wizard) gatherStats(server string, pubkey []byte, client *sshClient) *s
stat.services["bootnode"] = infos.Report()
genesis = string(infos.genesis)
- bootFull = append(bootFull, infos.enodeFull)
- if infos.enodeLight != "" {
- bootLight = append(bootLight, infos.enodeLight)
- }
+ bootnodes = append(bootnodes, infos.enode)
}
logger.Debug("Checking for sealnode availability")
if infos, err := checkNode(client, w.network, false); err != nil {
@@ -184,8 +179,7 @@ func (w *wizard) gatherStats(server string, pubkey []byte, client *sshClient) *s
if ethstats != "" {
w.conf.ethstats = ethstats
}
- w.conf.bootFull = append(w.conf.bootFull, bootFull...)
- w.conf.bootLight = append(w.conf.bootLight, bootLight...)
+ w.conf.bootnodes = append(w.conf.bootnodes, bootnodes...)
return stat
}
diff --git a/cmd/puppeth/wizard_node.go b/cmd/puppeth/wizard_node.go
index 097e2e41a..a60948bc6 100644
--- a/cmd/puppeth/wizard_node.go
+++ b/cmd/puppeth/wizard_node.go
@@ -48,9 +48,9 @@ func (w *wizard) deployNode(boot bool) {
infos, err := checkNode(client, w.network, boot)
if err != nil {
if boot {
- infos = &nodeInfos{portFull: 30303, peersTotal: 512, peersLight: 256}
+ infos = &nodeInfos{port: 30303, peersTotal: 512, peersLight: 256}
} else {
- infos = &nodeInfos{portFull: 30303, peersTotal: 50, peersLight: 0, gasTarget: 4.7, gasPrice: 18}
+ infos = &nodeInfos{port: 30303, peersTotal: 50, peersLight: 0, gasTarget: 4.7, gasPrice: 18}
}
}
existed := err == nil
@@ -79,8 +79,8 @@ func (w *wizard) deployNode(boot bool) {
}
// Figure out which port to listen on
fmt.Println()
- fmt.Printf("Which TCP/UDP port to listen on? (default = %d)\n", infos.portFull)
- infos.portFull = w.readDefaultInt(infos.portFull)
+ fmt.Printf("Which TCP/UDP port to listen on? (default = %d)\n", infos.port)
+ infos.port = w.readDefaultInt(infos.port)
// Figure out how many peers to allow (different based on node type)
fmt.Println()
@@ -163,7 +163,7 @@ func (w *wizard) deployNode(boot bool) {
fmt.Printf("Should the node be built from scratch (y/n)? (default = no)\n")
nocache = w.readDefaultString("n") != "n"
}
- if out, err := deployNode(client, w.network, w.conf.bootFull, w.conf.bootLight, infos, nocache); err != nil {
+ if out, err := deployNode(client, w.network, w.conf.bootnodes, infos, nocache); err != nil {
log.Error("Failed to deploy Ethereum node container", "err", err)
if len(out) > 0 {
fmt.Printf("%s\n", out)
diff --git a/cmd/puppeth/wizard_wallet.go b/cmd/puppeth/wizard_wallet.go
index 7c3896a17..933cd9ae5 100644
--- a/cmd/puppeth/wizard_wallet.go
+++ b/cmd/puppeth/wizard_wallet.go
@@ -98,7 +98,7 @@ func (w *wizard) deployWallet() {
fmt.Printf("Should the wallet be built from scratch (y/n)? (default = no)\n")
nocache = w.readDefaultString("n") != "n"
}
- if out, err := deployWallet(client, w.network, w.conf.bootFull, infos, nocache); err != nil {
+ if out, err := deployWallet(client, w.network, w.conf.bootnodes, infos, nocache); err != nil {
log.Error("Failed to deploy wallet container", "err", err)
if len(out) > 0 {
fmt.Printf("%s\n", out)
diff --git a/cmd/swarm/config.go b/cmd/swarm/config.go
index 29b5faefa..adac772ba 100644
--- a/cmd/swarm/config.go
+++ b/cmd/swarm/config.go
@@ -23,6 +23,7 @@ import (
"os"
"reflect"
"strconv"
+ "strings"
"unicode"
cli "gopkg.in/urfave/cli.v1"
@@ -97,10 +98,15 @@ func buildConfig(ctx *cli.Context) (config *bzzapi.Config, err error) {
config = bzzapi.NewDefaultConfig()
//first load settings from config file (if provided)
config, err = configFileOverride(config, ctx)
+ if err != nil {
+ return nil, err
+ }
//override settings provided by environment variables
config = envVarsOverride(config)
//override settings provided by command line
config = cmdLineOverride(config, ctx)
+ //validate configuration parameters
+ err = validateConfig(config)
return
}
@@ -194,12 +200,16 @@ func cmdLineOverride(currentConfig *bzzapi.Config, ctx *cli.Context) *bzzapi.Con
utils.Fatalf(SWARM_ERR_SWAP_SET_NO_API)
}
- //EnsApi can be set to "", so can't check for empty string, as it is allowed!
if ctx.GlobalIsSet(EnsAPIFlag.Name) {
- currentConfig.EnsApi = ctx.GlobalString(EnsAPIFlag.Name)
+ ensAPIs := ctx.GlobalStringSlice(EnsAPIFlag.Name)
+ // preserve backward compatibility to disable ENS with --ens-api=""
+ if len(ensAPIs) == 1 && ensAPIs[0] == "" {
+ ensAPIs = nil
+ }
+ currentConfig.EnsAPIs = ensAPIs
}
- if ensaddr := ctx.GlobalString(EnsAddrFlag.Name); ensaddr != "" {
+ if ensaddr := ctx.GlobalString(DeprecatedEnsAddrFlag.Name); ensaddr != "" {
currentConfig.EnsRoot = common.HexToAddress(ensaddr)
}
@@ -266,9 +276,8 @@ func envVarsOverride(currentConfig *bzzapi.Config) (config *bzzapi.Config) {
utils.Fatalf(SWARM_ERR_SWAP_SET_NO_API)
}
- //EnsApi can be set to "", so can't check for empty string, as it is allowed
- if ensapi, exists := os.LookupEnv(SWARM_ENV_ENS_API); exists {
- currentConfig.EnsApi = ensapi
+ if ensapi := os.Getenv(SWARM_ENV_ENS_API); ensapi != "" {
+ currentConfig.EnsAPIs = strings.Split(ensapi, ",")
}
if ensaddr := os.Getenv(SWARM_ENV_ENS_ADDR); ensaddr != "" {
@@ -309,6 +318,43 @@ func checkDeprecated(ctx *cli.Context) {
if ctx.GlobalString(DeprecatedEthAPIFlag.Name) != "" {
utils.Fatalf("--ethapi is no longer a valid command line flag, please use --ens-api and/or --swap-api.")
}
+ // warn if --ens-api flag is set
+ if ctx.GlobalString(DeprecatedEnsAddrFlag.Name) != "" {
+ log.Warn("--ens-addr is no longer a valid command line flag, please use --ens-api to specify contract address.")
+ }
+}
+
+//validate configuration parameters
+func validateConfig(cfg *bzzapi.Config) (err error) {
+ for _, ensAPI := range cfg.EnsAPIs {
+ if ensAPI != "" {
+ if err := validateEnsAPIs(ensAPI); err != nil {
+ return fmt.Errorf("invalid format [tld:][contract-addr@]url for ENS API endpoint configuration %q: %v", ensAPI, err)
+ }
+ }
+ }
+ return nil
+}
+
+//validate EnsAPIs configuration parameter
+func validateEnsAPIs(s string) (err error) {
+ // missing contract address
+ if strings.HasPrefix(s, "@") {
+ return errors.New("missing contract address")
+ }
+ // missing url
+ if strings.HasSuffix(s, "@") {
+ return errors.New("missing url")
+ }
+ // missing tld
+ if strings.HasPrefix(s, ":") {
+ return errors.New("missing tld")
+ }
+ // missing url
+ if strings.HasSuffix(s, ":") {
+ return errors.New("missing url")
+ }
+ return nil
}
//print a Config as string
diff --git a/cmd/swarm/config_test.go b/cmd/swarm/config_test.go
index 166980d14..9bf584f50 100644
--- a/cmd/swarm/config_test.go
+++ b/cmd/swarm/config_test.go
@@ -457,3 +457,98 @@ func TestCmdLineOverridesFile(t *testing.T) {
node.Shutdown()
}
+
+func TestValidateConfig(t *testing.T) {
+ for _, c := range []struct {
+ cfg *api.Config
+ err string
+ }{
+ {
+ cfg: &api.Config{EnsAPIs: []string{
+ "/data/testnet/geth.ipc",
+ }},
+ },
+ {
+ cfg: &api.Config{EnsAPIs: []string{
+ "http://127.0.0.1:1234",
+ }},
+ },
+ {
+ cfg: &api.Config{EnsAPIs: []string{
+ "ws://127.0.0.1:1234",
+ }},
+ },
+ {
+ cfg: &api.Config{EnsAPIs: []string{
+ "test:/data/testnet/geth.ipc",
+ }},
+ },
+ {
+ cfg: &api.Config{EnsAPIs: []string{
+ "test:ws://127.0.0.1:1234",
+ }},
+ },
+ {
+ cfg: &api.Config{EnsAPIs: []string{
+ "314159265dD8dbb310642f98f50C066173C1259b@/data/testnet/geth.ipc",
+ }},
+ },
+ {
+ cfg: &api.Config{EnsAPIs: []string{
+ "314159265dD8dbb310642f98f50C066173C1259b@http://127.0.0.1:1234",
+ }},
+ },
+ {
+ cfg: &api.Config{EnsAPIs: []string{
+ "314159265dD8dbb310642f98f50C066173C1259b@ws://127.0.0.1:1234",
+ }},
+ },
+ {
+ cfg: &api.Config{EnsAPIs: []string{
+ "test:314159265dD8dbb310642f98f50C066173C1259b@/data/testnet/geth.ipc",
+ }},
+ },
+ {
+ cfg: &api.Config{EnsAPIs: []string{
+ "eth:314159265dD8dbb310642f98f50C066173C1259b@http://127.0.0.1:1234",
+ }},
+ },
+ {
+ cfg: &api.Config{EnsAPIs: []string{
+ "eth:314159265dD8dbb310642f98f50C066173C1259b@ws://127.0.0.1:12344",
+ }},
+ },
+ {
+ cfg: &api.Config{EnsAPIs: []string{
+ "eth:",
+ }},
+ err: "invalid format [tld:][contract-addr@]url for ENS API endpoint configuration \"eth:\": missing url",
+ },
+ {
+ cfg: &api.Config{EnsAPIs: []string{
+ "314159265dD8dbb310642f98f50C066173C1259b@",
+ }},
+ err: "invalid format [tld:][contract-addr@]url for ENS API endpoint configuration \"314159265dD8dbb310642f98f50C066173C1259b@\": missing url",
+ },
+ {
+ cfg: &api.Config{EnsAPIs: []string{
+ ":314159265dD8dbb310642f98f50C066173C1259",
+ }},
+ err: "invalid format [tld:][contract-addr@]url for ENS API endpoint configuration \":314159265dD8dbb310642f98f50C066173C1259\": missing tld",
+ },
+ {
+ cfg: &api.Config{EnsAPIs: []string{
+ "@/data/testnet/geth.ipc",
+ }},
+ err: "invalid format [tld:][contract-addr@]url for ENS API endpoint configuration \"@/data/testnet/geth.ipc\": missing contract address",
+ },
+ } {
+ err := validateConfig(c.cfg)
+ if c.err != "" && err.Error() != c.err {
+ t.Errorf("expected error %q, got %q", c.err, err)
+ }
+ if c.err == "" && err != nil {
+ t.Errorf("unexpected error %q", err)
+ }
+ }
+}
diff --git a/cmd/swarm/main.go b/cmd/swarm/main.go
index 77315a426..360020b77 100644
--- a/cmd/swarm/main.go
+++ b/cmd/swarm/main.go
@@ -17,11 +17,9 @@
package main
import (
- "context"
"crypto/ecdsa"
"fmt"
"io/ioutil"
- "math/big"
"os"
"os/signal"
"runtime"
@@ -29,14 +27,12 @@ import (
"strconv"
"strings"
"syscall"
- "time"
"github.com/ethereum/go-ethereum/accounts"
"github.com/ethereum/go-ethereum/accounts/keystore"
"github.com/ethereum/go-ethereum/cmd/utils"
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/console"
- "github.com/ethereum/go-ethereum/contracts/ens"
"github.com/ethereum/go-ethereum/crypto"
"github.com/ethereum/go-ethereum/ethclient"
"github.com/ethereum/go-ethereum/internal/debug"
@@ -45,9 +41,9 @@ import (
"github.com/ethereum/go-ethereum/p2p"
"github.com/ethereum/go-ethereum/p2p/discover"
"github.com/ethereum/go-ethereum/params"
- "github.com/ethereum/go-ethereum/rpc"
"github.com/ethereum/go-ethereum/swarm"
bzzapi "github.com/ethereum/go-ethereum/swarm/api"
+ swarmmetrics "github.com/ethereum/go-ethereum/swarm/metrics"
"gopkg.in/urfave/cli.v1"
)
@@ -110,16 +106,11 @@ var (
Usage: "Swarm Syncing enabled (default true)",
EnvVar: SWARM_ENV_SYNC_ENABLE,
}
- EnsAPIFlag = cli.StringFlag{
+ EnsAPIFlag = cli.StringSliceFlag{
Name: "ens-api",
- Usage: "URL of the Ethereum API provider to use for ENS record lookups",
+ Usage: "ENS API endpoint for a TLD and with contract address, can be repeated, format [tld:][contract-addr@]url",
EnvVar: SWARM_ENV_ENS_API,
}
- EnsAddrFlag = cli.StringFlag{
- Name: "ens-addr",
- Usage: "ENS contract address (default is detected as testnet or mainnet using --ens-api)",
- EnvVar: SWARM_ENV_ENS_ADDR,
- }
SwarmApiFlag = cli.StringFlag{
Name: "bzzapi",
Usage: "Swarm HTTP endpoint",
@@ -156,6 +147,10 @@ var (
Name: "ethapi",
Usage: "DEPRECATED: please use --ens-api and --swap-api",
}
+ DeprecatedEnsAddrFlag = cli.StringFlag{
+ Name: "ens-addr",
+ Usage: "DEPRECATED: ENS contract address, please use --ens-api with contract address according to its format",
+ }
)
//declare a few constant error messages, useful for later error check comparisons in test
@@ -343,7 +338,6 @@ DEPRECATED: use 'swarm db clean'.
// bzzd-specific flags
CorsStringFlag,
EnsAPIFlag,
- EnsAddrFlag,
SwarmTomlConfigPathFlag,
SwarmConfigPathFlag,
SwarmSwapEnabledFlag,
@@ -363,11 +357,17 @@ DEPRECATED: use 'swarm db clean'.
SwarmUploadMimeType,
//deprecated flags
DeprecatedEthAPIFlag,
+ DeprecatedEnsAddrFlag,
}
app.Flags = append(app.Flags, debug.Flags...)
+ app.Flags = append(app.Flags, swarmmetrics.Flags...)
app.Before = func(ctx *cli.Context) error {
runtime.GOMAXPROCS(runtime.NumCPU())
- return debug.Setup(ctx)
+ if err := debug.Setup(ctx); err != nil {
+ return err
+ }
+ swarmmetrics.Setup(ctx)
+ return nil
}
app.After = func(ctx *cli.Context) error {
debug.Exit()
@@ -448,38 +448,6 @@ func bzzd(ctx *cli.Context) error {
return nil
}
-// detectEnsAddr determines the ENS contract address by getting both the
-// version and genesis hash using the client and matching them to either
-// mainnet or testnet addresses
-func detectEnsAddr(client *rpc.Client) (common.Address, error) {
- ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
- defer cancel()
-
- var version string
- if err := client.CallContext(ctx, &version, "net_version"); err != nil {
- return common.Address{}, err
- }
-
- block, err := ethclient.NewClient(client).BlockByNumber(ctx, big.NewInt(0))
- if err != nil {
- return common.Address{}, err
- }
-
- switch {
-
- case version == "1" && block.Hash() == params.MainnetGenesisHash:
- log.Info("using Mainnet ENS contract address", "addr", ens.MainNetAddress)
- return ens.MainNetAddress, nil
-
- case version == "3" && block.Hash() == params.TestnetGenesisHash:
- log.Info("using Testnet ENS contract address", "addr", ens.TestNetAddress)
- return ens.TestNetAddress, nil
-
- default:
- return common.Address{}, fmt.Errorf("unknown version and genesis hash: %s %s", version, block.Hash())
- }
-}
-
func registerBzzService(bzzconfig *bzzapi.Config, ctx *cli.Context, stack *node.Node) {
//define the swarm service boot function
@@ -494,27 +462,7 @@ func registerBzzService(bzzconfig *bzzapi.Config, ctx *cli.Context, stack *node.
}
}
- var ensClient *ethclient.Client
- if bzzconfig.EnsApi != "" {
- log.Info("connecting to ENS API", "url", bzzconfig.EnsApi)
- client, err := rpc.Dial(bzzconfig.EnsApi)
- if err != nil {
- return nil, fmt.Errorf("error connecting to ENS API %s: %s", bzzconfig.EnsApi, err)
- }
- ensClient = ethclient.NewClient(client)
-
- //no ENS root address set yet
- if bzzconfig.EnsRoot == (common.Address{}) {
- ensAddr, err := detectEnsAddr(client)
- if err == nil {
- bzzconfig.EnsRoot = ensAddr
- } else {
- log.Warn(fmt.Sprintf("could not determine ENS contract address, using default %s", bzzconfig.EnsRoot), "err", err)
- }
- }
- }
-
- return swarm.NewSwarm(ctx, swapClient, ensClient, bzzconfig, bzzconfig.SwapEnabled, bzzconfig.SyncEnabled, bzzconfig.Cors)
+ return swarm.NewSwarm(ctx, swapClient, bzzconfig)
}
//register within the ethereum node
if err := stack.Register(boot); err != nil {
diff --git a/cmd/swarm/manifest.go b/cmd/swarm/manifest.go
index aa276e0f9..41a69a5d0 100644
--- a/cmd/swarm/manifest.go
+++ b/cmd/swarm/manifest.go
@@ -35,7 +35,7 @@ const bzzManifestJSON = "application/bzz-manifest+json"
func add(ctx *cli.Context) {
args := ctx.Args()
if len(args) < 3 {
- utils.Fatalf("Need atleast three arguments <MHASH> <path> <HASH> [<content-type>]")
+ utils.Fatalf("Need at least three arguments <MHASH> <path> <HASH> [<content-type>]")
}
var (
@@ -69,7 +69,7 @@ func update(ctx *cli.Context) {
args := ctx.Args()
if len(args) < 3 {
- utils.Fatalf("Need atleast three arguments <MHASH> <path> <HASH>")
+ utils.Fatalf("Need at least three arguments <MHASH> <path> <HASH>")
}
var (
@@ -101,7 +101,7 @@ func update(ctx *cli.Context) {
func remove(ctx *cli.Context) {
args := ctx.Args()
if len(args) < 2 {
- utils.Fatalf("Need atleast two arguments <MHASH> <path>")
+ utils.Fatalf("Need at least two arguments <MHASH> <path>")
}
var (
diff --git a/cmd/swarm/run_test.go b/cmd/swarm/run_test.go
index ed1502868..594cfa55c 100644
--- a/cmd/swarm/run_test.go
+++ b/cmd/swarm/run_test.go
@@ -1,4 +1,4 @@
-// Copyright 2016 The go-ethereum Authors
+// Copyright 2017 The go-ethereum Authors
// This file is part of go-ethereum.
//
// go-ethereum is free software: you can redistribute it and/or modify
diff --git a/cmd/swarm/upload_test.go b/cmd/swarm/upload_test.go
index 5656186e1..df7fc216a 100644
--- a/cmd/swarm/upload_test.go
+++ b/cmd/swarm/upload_test.go
@@ -1,4 +1,4 @@
-// Copyright 2016 The go-ethereum Authors
+// Copyright 2017 The go-ethereum Authors
// This file is part of go-ethereum.
//
// go-ethereum is free software: you can redistribute it and/or modify
diff --git a/cmd/utils/cmd.go b/cmd/utils/cmd.go
index 23b10c2d7..186d18d8f 100644
--- a/cmd/utils/cmd.go
+++ b/cmd/utils/cmd.go
@@ -25,6 +25,7 @@ import (
"os/signal"
"runtime"
"strings"
+ "syscall"
"github.com/ethereum/go-ethereum/core"
"github.com/ethereum/go-ethereum/core/types"
@@ -64,7 +65,7 @@ func StartNode(stack *node.Node) {
}
go func() {
sigc := make(chan os.Signal, 1)
- signal.Notify(sigc, os.Interrupt)
+ signal.Notify(sigc, syscall.SIGINT, syscall.SIGTERM)
defer signal.Stop(sigc)
<-sigc
log.Info("Got interrupt, shutting down...")
@@ -85,7 +86,7 @@ func ImportChain(chain *core.BlockChain, fn string) error {
// If a signal is received, the import will stop at the next batch.
interrupt := make(chan os.Signal, 1)
stop := make(chan struct{})
- signal.Notify(interrupt, os.Interrupt)
+ signal.Notify(interrupt, syscall.SIGINT, syscall.SIGTERM)
defer signal.Stop(interrupt)
defer close(interrupt)
go func() {
@@ -116,7 +117,6 @@ func ImportChain(chain *core.BlockChain, fn string) error {
return err
}
}
-
stream := rlp.NewStream(reader, 0)
// Run actual the import.
@@ -150,25 +150,34 @@ func ImportChain(chain *core.BlockChain, fn string) error {
if checkInterrupt() {
return fmt.Errorf("interrupted")
}
- if hasAllBlocks(chain, blocks[:i]) {
+ missing := missingBlocks(chain, blocks[:i])
+ if len(missing) == 0 {
log.Info("Skipping batch as all blocks present", "batch", batch, "first", blocks[0].Hash(), "last", blocks[i-1].Hash())
continue
}
-
- if _, err := chain.InsertChain(blocks[:i]); err != nil {
+ if _, err := chain.InsertChain(missing); err != nil {
return fmt.Errorf("invalid block %d: %v", n, err)
}
}
return nil
}
-func hasAllBlocks(chain *core.BlockChain, bs []*types.Block) bool {
- for _, b := range bs {
- if !chain.HasBlock(b.Hash(), b.NumberU64()) {
- return false
+func missingBlocks(chain *core.BlockChain, blocks []*types.Block) []*types.Block {
+ head := chain.CurrentBlock()
+ for i, block := range blocks {
+ // If we're behind the chain head, only check block, state is available at head
+ if head.NumberU64() > block.NumberU64() {
+ if !chain.HasBlock(block.Hash(), block.NumberU64()) {
+ return blocks[i:]
+ }
+ continue
+ }
+ // If we're above the chain head, state availability is a must
+ if !chain.HasBlockAndState(block.Hash(), block.NumberU64()) {
+ return blocks[i:]
}
}
- return true
+ return nil
}
func ExportChain(blockchain *core.BlockChain, fn string) error {
diff --git a/cmd/utils/flags.go b/cmd/utils/flags.go
index 89dcd230c..ff78a0fcc 100644
--- a/cmd/utils/flags.go
+++ b/cmd/utils/flags.go
@@ -170,7 +170,11 @@ var (
Usage: `Blockchain sync mode ("fast", "full", or "light")`,
Value: &defaultSyncMode,
}
-
+ GCModeFlag = cli.StringFlag{
+ Name: "gcmode",
+ Usage: `Blockchain garbage collection mode ("full", "archive")`,
+ Value: "full",
+ }
LightServFlag = cli.IntFlag{
Name: "lightserv",
Usage: "Maximum percentage of time allowed for serving LES requests (0-90)",
@@ -179,7 +183,7 @@ var (
LightPeersFlag = cli.IntFlag{
Name: "lightpeers",
Usage: "Maximum number of LES client peers",
- Value: 20,
+ Value: eth.DefaultConfig.LightPeers,
}
LightKDFFlag = cli.BoolFlag{
Name: "lightkdf",
@@ -205,11 +209,6 @@ var (
Usage: "Dashboard metrics collection refresh rate",
Value: dashboard.DefaultConfig.Refresh,
}
- DashboardAssetsFlag = cli.StringFlag{
- Name: "dashboard.assets",
- Usage: "Developer flag to serve the dashboard from the local file system",
- Value: dashboard.DefaultConfig.Assets,
- }
// Ethash settings
EthashCacheDirFlag = DirectoryFlag{
Name: "ethash.cachedir",
@@ -293,8 +292,18 @@ var (
// Performance tuning settings
CacheFlag = cli.IntFlag{
Name: "cache",
- Usage: "Megabytes of memory allocated to internal caching (min 16MB / database forced)",
- Value: 128,
+ Usage: "Megabytes of memory allocated to internal caching",
+ Value: 1024,
+ }
+ CacheDatabaseFlag = cli.IntFlag{
+ Name: "cache.database",
+ Usage: "Percentage of cache memory allowance to use for database io",
+ Value: 75,
+ }
+ CacheGCFlag = cli.IntFlag{
+ Name: "cache.gc",
+ Usage: "Percentage of cache memory allowance to use for trie pruning",
+ Value: 25,
}
TrieCacheGenFlag = cli.IntFlag{
Name: "trie-cache-gens",
@@ -383,6 +392,11 @@ var (
Usage: "Comma separated list of domains from which to accept cross origin requests (browser enforced)",
Value: "",
}
+ RPCVirtualHostsFlag = cli.StringFlag{
+ Name: "rpcvhosts",
+ Usage: "Comma separated list of virtual hostnames from which to accept requests (server enforced). Accepts '*' wildcard.",
+ Value: strings.Join(node.DefaultConfig.HTTPVirtualHosts, ","),
+ }
RPCApiFlag = cli.StringFlag{
Name: "rpcapi",
Usage: "API's offered over the HTTP-RPC interface",
@@ -612,7 +626,7 @@ func setBootstrapNodesV5(ctx *cli.Context, cfg *p2p.Config) {
urls = strings.Split(ctx.GlobalString(BootnodesFlag.Name), ",")
}
case ctx.GlobalBool(RinkebyFlag.Name):
- urls = params.RinkebyV5Bootnodes
+ urls = params.RinkebyBootnodes
case cfg.BootstrapNodesV5 != nil:
return // already set, don't apply defaults.
}
@@ -676,6 +690,9 @@ func setHTTP(ctx *cli.Context, cfg *node.Config) {
if ctx.GlobalIsSet(RPCApiFlag.Name) {
cfg.HTTPModules = splitAndTrim(ctx.GlobalString(RPCApiFlag.Name))
}
+ if ctx.GlobalIsSet(RPCVirtualHostsFlag.Name) {
+ cfg.HTTPVirtualHosts = splitAndTrim(ctx.GlobalString(RPCVirtualHostsFlag.Name))
+ }
}
// setWS creates the WebSocket RPC listener interface string from the set
@@ -714,13 +731,15 @@ func setIPC(ctx *cli.Context, cfg *node.Config) {
// makeDatabaseHandles raises out the number of allowed file handles per process
// for Geth and returns half of the allowance to assign to the database.
func makeDatabaseHandles() int {
- if err := fdlimit.Raise(2048); err != nil {
- Fatalf("Failed to raise file descriptor allowance: %v", err)
- }
limit, err := fdlimit.Current()
if err != nil {
Fatalf("Failed to retrieve file descriptor allowance: %v", err)
}
+ if limit < 2048 {
+ if err := fdlimit.Raise(2048); err != nil {
+ Fatalf("Failed to raise file descriptor allowance: %v", err)
+ }
+ }
if limit > 2048 { // cap database file descriptors even if more is available
limit = 2048
}
@@ -789,20 +808,43 @@ func SetP2PConfig(ctx *cli.Context, cfg *p2p.Config) {
setBootstrapNodes(ctx, cfg)
setBootstrapNodesV5(ctx, cfg)
+ lightClient := ctx.GlobalBool(LightModeFlag.Name) || ctx.GlobalString(SyncModeFlag.Name) == "light"
+ lightServer := ctx.GlobalInt(LightServFlag.Name) != 0
+ lightPeers := ctx.GlobalInt(LightPeersFlag.Name)
+
if ctx.GlobalIsSet(MaxPeersFlag.Name) {
cfg.MaxPeers = ctx.GlobalInt(MaxPeersFlag.Name)
+ if lightServer && !ctx.GlobalIsSet(LightPeersFlag.Name) {
+ cfg.MaxPeers += lightPeers
+ }
+ } else {
+ if lightServer {
+ cfg.MaxPeers += lightPeers
+ }
+ if lightClient && ctx.GlobalIsSet(LightPeersFlag.Name) && cfg.MaxPeers < lightPeers {
+ cfg.MaxPeers = lightPeers
+ }
}
+ if !(lightClient || lightServer) {
+ lightPeers = 0
+ }
+ ethPeers := cfg.MaxPeers - lightPeers
+ if lightClient {
+ ethPeers = 0
+ }
+ log.Info("Maximum peer count", "ETH", ethPeers, "LES", lightPeers, "total", cfg.MaxPeers)
+
if ctx.GlobalIsSet(MaxPendingPeersFlag.Name) {
cfg.MaxPendingPeers = ctx.GlobalInt(MaxPendingPeersFlag.Name)
}
- if ctx.GlobalIsSet(NoDiscoverFlag.Name) || ctx.GlobalBool(LightModeFlag.Name) {
+ if ctx.GlobalIsSet(NoDiscoverFlag.Name) || lightClient {
cfg.NoDiscovery = true
}
// if we're running a light client or server, force enable the v5 peer discovery
// unless it is explicitly disabled with --nodiscover note that explicitly specifying
// --v5disc overrides --nodiscover, in which case the later only disables v4 discovery
- forceV5Discovery := (ctx.GlobalBool(LightModeFlag.Name) || ctx.GlobalInt(LightServFlag.Name) > 0) && !ctx.GlobalBool(NoDiscoverFlag.Name)
+ forceV5Discovery := (lightClient || lightServer) && !ctx.GlobalBool(NoDiscoverFlag.Name)
if ctx.GlobalIsSet(DiscoveryV5Flag.Name) {
cfg.DiscoveryV5 = ctx.GlobalBool(DiscoveryV5Flag.Name)
} else if forceV5Discovery {
@@ -999,11 +1041,19 @@ func SetEthConfig(ctx *cli.Context, stack *node.Node, cfg *eth.Config) {
cfg.NetworkId = ctx.GlobalUint64(NetworkIdFlag.Name)
}
- if ctx.GlobalIsSet(CacheFlag.Name) {
- cfg.DatabaseCache = ctx.GlobalInt(CacheFlag.Name)
+ if ctx.GlobalIsSet(CacheFlag.Name) || ctx.GlobalIsSet(CacheDatabaseFlag.Name) {
+ cfg.DatabaseCache = ctx.GlobalInt(CacheFlag.Name) * ctx.GlobalInt(CacheDatabaseFlag.Name) / 100
}
cfg.DatabaseHandles = makeDatabaseHandles()
+ if gcmode := ctx.GlobalString(GCModeFlag.Name); gcmode != "full" && gcmode != "archive" {
+ Fatalf("--%s must be either 'full' or 'archive'", GCModeFlag.Name)
+ }
+ cfg.NoPruning = ctx.GlobalString(GCModeFlag.Name) == "archive"
+
+ if ctx.GlobalIsSet(CacheFlag.Name) || ctx.GlobalIsSet(CacheGCFlag.Name) {
+ cfg.TrieCache = ctx.GlobalInt(CacheFlag.Name) * ctx.GlobalInt(CacheGCFlag.Name) / 100
+ }
if ctx.GlobalIsSet(MinerThreadsFlag.Name) {
cfg.MinerThreads = ctx.GlobalInt(MinerThreadsFlag.Name)
}
@@ -1068,7 +1118,6 @@ func SetDashboardConfig(ctx *cli.Context, cfg *dashboard.Config) {
cfg.Host = ctx.GlobalString(DashboardAddrFlag.Name)
cfg.Port = ctx.GlobalInt(DashboardPortFlag.Name)
cfg.Refresh = ctx.GlobalDuration(DashboardRefreshFlag.Name)
- cfg.Assets = ctx.GlobalString(DashboardAssetsFlag.Name)
}
// RegisterEthService adds an Ethereum client to the stack.
@@ -1135,7 +1184,7 @@ func SetupNetwork(ctx *cli.Context) {
// MakeChainDatabase open an LevelDB using the flags passed to the client and will hard crash if it fails.
func MakeChainDatabase(ctx *cli.Context, stack *node.Node) ethdb.Database {
var (
- cache = ctx.GlobalInt(CacheFlag.Name)
+ cache = ctx.GlobalInt(CacheFlag.Name) * ctx.GlobalInt(CacheDatabaseFlag.Name) / 100
handles = makeDatabaseHandles()
)
name := "chaindata"
@@ -1187,8 +1236,19 @@ func MakeChain(ctx *cli.Context, stack *node.Node) (chain *core.BlockChain, chai
})
}
}
+ if gcmode := ctx.GlobalString(GCModeFlag.Name); gcmode != "full" && gcmode != "archive" {
+ Fatalf("--%s must be either 'full' or 'archive'", GCModeFlag.Name)
+ }
+ cache := &core.CacheConfig{
+ Disabled: ctx.GlobalString(GCModeFlag.Name) == "archive",
+ TrieNodeLimit: eth.DefaultConfig.TrieCache,
+ TrieTimeLimit: eth.DefaultConfig.TrieTimeout,
+ }
+ if ctx.GlobalIsSet(CacheFlag.Name) || ctx.GlobalIsSet(CacheGCFlag.Name) {
+ cache.TrieNodeLimit = ctx.GlobalInt(CacheFlag.Name) * ctx.GlobalInt(CacheGCFlag.Name) / 100
+ }
vmcfg := vm.Config{EnablePreimageRecording: ctx.GlobalBool(VMEnableDebugFlag.Name)}
- chain, err = core.NewBlockChain(chainDb, config, engine, vmcfg)
+ chain, err = core.NewBlockChain(chainDb, cache, config, engine, vmcfg)
if err != nil {
Fatalf("Can't create BlockChain: %v", err)
}
diff --git a/cmd/wnode/main.go b/cmd/wnode/main.go
index 05e6b2908..988c50ce3 100644
--- a/cmd/wnode/main.go
+++ b/cmd/wnode/main.go
@@ -22,6 +22,7 @@ package main
import (
"bufio"
"crypto/ecdsa"
+ crand "crypto/rand"
"crypto/sha512"
"encoding/binary"
"encoding/hex"
@@ -43,11 +44,12 @@ import (
"github.com/ethereum/go-ethereum/p2p/discover"
"github.com/ethereum/go-ethereum/p2p/nat"
"github.com/ethereum/go-ethereum/whisper/mailserver"
- whisper "github.com/ethereum/go-ethereum/whisper/whisperv5"
+ whisper "github.com/ethereum/go-ethereum/whisper/whisperv6"
"golang.org/x/crypto/pbkdf2"
)
const quitCommand = "~Q"
+const entropySize = 32
// singletons
var (
@@ -55,33 +57,37 @@ var (
shh *whisper.Whisper
done chan struct{}
mailServer mailserver.WMailServer
+ entropy [entropySize]byte
input = bufio.NewReader(os.Stdin)
)
// encryption
var (
- symKey []byte
- pub *ecdsa.PublicKey
- asymKey *ecdsa.PrivateKey
- nodeid *ecdsa.PrivateKey
- topic whisper.TopicType
- asymKeyID string
- filterID string
- symPass string
- msPassword string
+ symKey []byte
+ pub *ecdsa.PublicKey
+ asymKey *ecdsa.PrivateKey
+ nodeid *ecdsa.PrivateKey
+ topic whisper.TopicType
+
+ asymKeyID string
+ asymFilterID string
+ symFilterID string
+ symPass string
+ msPassword string
)
// cmd arguments
var (
- bootstrapMode = flag.Bool("standalone", false, "boostrap node: don't actively connect to peers, wait for incoming connections")
- forwarderMode = flag.Bool("forwarder", false, "forwarder mode: only forward messages, neither send nor decrypt messages")
+ bootstrapMode = flag.Bool("standalone", false, "boostrap node: don't initiate connection to peers, just wait for incoming connections")
+ forwarderMode = flag.Bool("forwarder", false, "forwarder mode: only forward messages, neither encrypt nor decrypt messages")
mailServerMode = flag.Bool("mailserver", false, "mail server mode: delivers expired messages on demand")
requestMail = flag.Bool("mailclient", false, "request expired messages from the bootstrap server")
asymmetricMode = flag.Bool("asym", false, "use asymmetric encryption")
generateKey = flag.Bool("generatekey", false, "generate and show the private key")
fileExMode = flag.Bool("fileexchange", false, "file exchange mode")
- testMode = flag.Bool("test", false, "use of predefined parameters for diagnostics")
+ fileReader = flag.Bool("filereader", false, "load and decrypt messages saved as files, display as plain text")
+ testMode = flag.Bool("test", false, "use of predefined parameters for diagnostics (password, etc.)")
echoMode = flag.Bool("echo", false, "echo mode: prints some arguments for diagnostics")
argVerbosity = flag.Int("verbosity", int(log.LvlError), "log verbosity level")
@@ -97,13 +103,14 @@ var (
argIDFile = flag.String("idfile", "", "file name with node id (private key)")
argEnode = flag.String("boot", "", "bootstrap node you want to connect to (e.g. enode://e454......08d50@52.176.211.200:16428)")
argTopic = flag.String("topic", "", "topic in hexadecimal format (e.g. 70a4beef)")
- argSaveDir = flag.String("savedir", "", "directory where incoming messages will be saved as files")
+ argSaveDir = flag.String("savedir", "", "directory where all incoming messages will be saved as files")
)
func main() {
processArgs()
initialize()
run()
+ shutdown()
}
func processArgs() {
@@ -190,6 +197,8 @@ func initialize() {
if len(*argIP) == 0 {
argIP = scanLineA("Please enter your IP and port (e.g. 127.0.0.1:30348): ")
}
+ } else if *fileReader {
+ *bootstrapMode = true
} else {
if len(*argEnode) == 0 {
argEnode = scanLineA("Please enter the peer's enode: ")
@@ -198,11 +207,6 @@ func initialize() {
peers = append(peers, peer)
}
- cfg := &whisper.Config{
- MaxMessageSize: uint32(*argMaxSize),
- MinimumAcceptedPOW: *argPoW,
- }
-
if *mailServerMode {
if len(msPassword) == 0 {
msPassword, err = console.Stdin.PromptPassword("Please enter the Mail Server password: ")
@@ -210,14 +214,15 @@ func initialize() {
utils.Fatalf("Failed to read Mail Server password: %s", err)
}
}
+ }
- shh = whisper.New(cfg)
- shh.RegisterServer(&mailServer)
- mailServer.Init(shh, *argDBPath, msPassword, *argServerPoW)
- } else {
- shh = whisper.New(cfg)
+ cfg := &whisper.Config{
+ MaxMessageSize: uint32(*argMaxSize),
+ MinimumAcceptedPOW: *argPoW,
}
+ shh = whisper.New(cfg)
+
if *argPoW != whisper.DefaultMinimumPoW {
err := shh.SetMinimumPoW(*argPoW)
if err != nil {
@@ -259,11 +264,21 @@ func initialize() {
maxPeers = 800
}
+ _, err = crand.Read(entropy[:])
+ if err != nil {
+ utils.Fatalf("crypto/rand failed: %s", err)
+ }
+
+ if *mailServerMode {
+ shh.RegisterServer(&mailServer)
+ mailServer.Init(shh, *argDBPath, msPassword, *argServerPoW)
+ }
+
server = &p2p.Server{
Config: p2p.Config{
PrivateKey: nodeid,
MaxPeers: maxPeers,
- Name: common.MakeName("wnode", "5.0"),
+ Name: common.MakeName("wnode", "6.0"),
Protocols: shh.Protocols(),
ListenAddr: *argIP,
NAT: nat.Any(),
@@ -274,10 +289,11 @@ func initialize() {
}
}
-func startServer() {
+func startServer() error {
err := server.Start()
if err != nil {
- utils.Fatalf("Failed to start Whisper peer: %s.", err)
+ fmt.Printf("Failed to start Whisper peer: %s.", err)
+ return err
}
fmt.Printf("my public key: %s \n", common.ToHex(crypto.FromECDSAPub(&asymKey.PublicKey)))
@@ -293,9 +309,14 @@ func startServer() {
configureNode()
}
- if !*forwarderMode {
+ if *fileExMode {
+ fmt.Printf("Please type the file name to be send. To quit type: '%s'\n", quitCommand)
+ } else if *fileReader {
+ fmt.Printf("Please type the file name to be decrypted. To quit type: '%s'\n", quitCommand)
+ } else if !*forwarderMode {
fmt.Printf("Please type the message. To quit type: '%s'\n", quitCommand)
}
+ return nil
}
func isKeyValid(k *ecdsa.PublicKey) bool {
@@ -363,13 +384,22 @@ func configureNode() {
}
}
- filter := whisper.Filter{
+ symFilter := whisper.Filter{
KeySym: symKey,
+ Topics: [][]byte{topic[:]},
+ AllowP2P: p2pAccept,
+ }
+ symFilterID, err = shh.Subscribe(&symFilter)
+ if err != nil {
+ utils.Fatalf("Failed to install filter: %s", err)
+ }
+
+ asymFilter := whisper.Filter{
KeyAsym: asymKey,
Topics: [][]byte{topic[:]},
AllowP2P: p2pAccept,
}
- filterID, err = shh.Subscribe(&filter)
+ asymFilterID, err = shh.Subscribe(&asymFilter)
if err != nil {
utils.Fatalf("Failed to install filter: %s", err)
}
@@ -400,8 +430,10 @@ func waitForConnection(timeout bool) {
}
func run() {
- defer mailServer.Close()
- startServer()
+ err := startServer()
+ if err != nil {
+ return
+ }
defer server.Stop()
shh.Start(nil)
defer shh.Stop()
@@ -414,21 +446,26 @@ func run() {
requestExpiredMessagesLoop()
} else if *fileExMode {
sendFilesLoop()
+ } else if *fileReader {
+ fileReaderLoop()
} else {
sendLoop()
}
}
+func shutdown() {
+ close(done)
+ mailServer.Close()
+}
+
func sendLoop() {
for {
s := scanLine("")
if s == quitCommand {
fmt.Println("Quit command received")
- close(done)
- break
+ return
}
sendMsg([]byte(s))
-
if *asymmetricMode {
// print your own message for convenience,
// because in asymmetric mode it is impossible to decrypt it
@@ -444,13 +481,11 @@ func sendFilesLoop() {
s := scanLine("")
if s == quitCommand {
fmt.Println("Quit command received")
- close(done)
- break
+ return
}
b, err := ioutil.ReadFile(s)
if err != nil {
fmt.Printf(">>> Error: %s \n", err)
- continue
} else {
h := sendMsg(b)
if (h == common.Hash{}) {
@@ -464,6 +499,38 @@ func sendFilesLoop() {
}
}
+func fileReaderLoop() {
+ watcher1 := shh.GetFilter(symFilterID)
+ watcher2 := shh.GetFilter(asymFilterID)
+ if watcher1 == nil && watcher2 == nil {
+ fmt.Println("Error: neither symmetric nor asymmetric filter is installed")
+ return
+ }
+
+ for {
+ s := scanLine("")
+ if s == quitCommand {
+ fmt.Println("Quit command received")
+ return
+ }
+ raw, err := ioutil.ReadFile(s)
+ if err != nil {
+ fmt.Printf(">>> Error: %s \n", err)
+ } else {
+ env := whisper.Envelope{Data: raw} // the topic is zero
+ msg := env.Open(watcher1) // force-open envelope regardless of the topic
+ if msg == nil {
+ msg = env.Open(watcher2)
+ }
+ if msg == nil {
+ fmt.Printf(">>> Error: failed to decrypt the message \n")
+ } else {
+ printMessageInfo(msg)
+ }
+ }
+ }
+}
+
func scanLine(prompt string) string {
if len(prompt) > 0 {
fmt.Print(prompt)
@@ -506,6 +573,7 @@ func sendMsg(payload []byte) common.Hash {
if err != nil {
utils.Fatalf("failed to create new message: %s", err)
}
+
envelope, err := msg.Wrap(&params)
if err != nil {
fmt.Printf("failed to seal message: %v \n", err)
@@ -522,9 +590,14 @@ func sendMsg(payload []byte) common.Hash {
}
func messageLoop() {
- f := shh.GetFilter(filterID)
- if f == nil {
- utils.Fatalf("filter is not installed")
+ sf := shh.GetFilter(symFilterID)
+ if sf == nil {
+ utils.Fatalf("symmetric filter is not installed")
+ }
+
+ af := shh.GetFilter(asymFilterID)
+ if af == nil {
+ utils.Fatalf("asymmetric filter is not installed")
}
ticker := time.NewTicker(time.Millisecond * 50)
@@ -532,12 +605,21 @@ func messageLoop() {
for {
select {
case <-ticker.C:
- messages := f.Retrieve()
+ m1 := sf.Retrieve()
+ m2 := af.Retrieve()
+ messages := append(m1, m2...)
for _, msg := range messages {
- if *fileExMode || len(msg.Payload) > 2048 {
- writeMessageToFile(*argSaveDir, msg)
- } else {
+ reportedOnce := false
+ if !*fileExMode && len(msg.Payload) <= 2048 {
printMessageInfo(msg)
+ reportedOnce = true
+ }
+
+ // All messages are saved upon specifying argSaveDir.
+ // fileExMode only specifies how messages are displayed on the console after they are saved.
+ // if fileExMode == true, only the hashes are displayed, since messages might be too big.
+ if len(*argSaveDir) > 0 {
+ writeMessageToFile(*argSaveDir, msg, !reportedOnce)
}
}
case <-done:
@@ -562,7 +644,11 @@ func printMessageInfo(msg *whisper.ReceivedMessage) {
}
}
-func writeMessageToFile(dir string, msg *whisper.ReceivedMessage) {
+func writeMessageToFile(dir string, msg *whisper.ReceivedMessage, show bool) {
+ if len(dir) == 0 {
+ return
+ }
+
timestamp := fmt.Sprintf("%d", msg.Sent)
name := fmt.Sprintf("%x", msg.EnvelopeHash)
@@ -571,27 +657,32 @@ func writeMessageToFile(dir string, msg *whisper.ReceivedMessage) {
address = crypto.PubkeyToAddress(*msg.Src)
}
- if whisper.IsPubKeyEqual(msg.Src, &asymKey.PublicKey) {
- // message from myself: don't save, only report
- fmt.Printf("\n%s <%x>: message received: '%s'\n", timestamp, address, name)
- } else if len(dir) > 0 {
- fullpath := filepath.Join(dir, name)
- err := ioutil.WriteFile(fullpath, msg.Payload, 0644)
- if err != nil {
- fmt.Printf("\n%s {%x}: message received but not saved: %s\n", timestamp, address, err)
- } else {
- fmt.Printf("\n%s {%x}: message received and saved as '%s' (%d bytes)\n", timestamp, address, name, len(msg.Payload))
- }
- } else {
- fmt.Printf("\n%s {%x}: big message received (%d bytes), but not saved: %s\n", timestamp, address, len(msg.Payload), name)
+ env := shh.GetEnvelope(msg.EnvelopeHash)
+ if env == nil {
+ fmt.Printf("\nUnexpected error: envelope not found: %x\n", msg.EnvelopeHash)
+ return
+ }
+
+ // this is a sample code; uncomment if you don't want to save your own messages.
+ //if whisper.IsPubKeyEqual(msg.Src, &asymKey.PublicKey) {
+ // fmt.Printf("\n%s <%x>: message from myself received, not saved: '%s'\n", timestamp, address, name)
+ // return
+ //}
+
+ fullpath := filepath.Join(dir, name)
+ err := ioutil.WriteFile(fullpath, env.Data, 0644)
+ if err != nil {
+ fmt.Printf("\n%s {%x}: message received but not saved: %s\n", timestamp, address, err)
+ } else if show {
+ fmt.Printf("\n%s {%x}: message received and saved as '%s' (%d bytes)\n", timestamp, address, name, len(env.Data))
}
}
func requestExpiredMessagesLoop() {
- var key, peerID []byte
+ var key, peerID, bloom []byte
var timeLow, timeUpp uint32
var t string
- var xt, empty whisper.TopicType
+ var xt whisper.TopicType
keyID, err := shh.AddSymKeyFromPassword(msPassword)
if err != nil {
@@ -601,37 +692,43 @@ func requestExpiredMessagesLoop() {
if err != nil {
utils.Fatalf("Failed to save symmetric key for mail request: %s", err)
}
- peerID = extractIdFromEnode(*argEnode)
+ peerID = extractIDFromEnode(*argEnode)
shh.AllowP2PMessagesFromPeer(peerID)
for {
timeLow = scanUint("Please enter the lower limit of the time range (unix timestamp): ")
timeUpp = scanUint("Please enter the upper limit of the time range (unix timestamp): ")
- t = scanLine("Please enter the topic (hexadecimal): ")
- if len(t) >= whisper.TopicLength*2 {
+ t = scanLine("Enter the topic (hex). Press enter to request all messages, regardless of the topic: ")
+ if len(t) == whisper.TopicLength*2 {
x, err := hex.DecodeString(t)
if err != nil {
- utils.Fatalf("Failed to parse the topic: %s", err)
+ fmt.Printf("Failed to parse the topic: %s \n", err)
+ continue
}
xt = whisper.BytesToTopic(x)
+ bloom = whisper.TopicToBloom(xt)
+ obfuscateBloom(bloom)
+ } else if len(t) == 0 {
+ bloom = whisper.MakeFullNodeBloom()
+ } else {
+ fmt.Println("Error: topic is invalid, request aborted")
+ continue
}
+
if timeUpp == 0 {
timeUpp = 0xFFFFFFFF
}
- data := make([]byte, 8+whisper.TopicLength)
+ data := make([]byte, 8, 8+whisper.BloomFilterSize)
binary.BigEndian.PutUint32(data, timeLow)
binary.BigEndian.PutUint32(data[4:], timeUpp)
- copy(data[8:], xt[:])
- if xt == empty {
- data = data[:8]
- }
+ data = append(data, bloom...)
var params whisper.MessageParams
params.PoW = *argServerPoW
params.Payload = data
params.KeySym = key
- params.Src = nodeid
+ params.Src = asymKey
params.WorkTime = 5
msg, err := whisper.NewSentMessage(&params)
@@ -652,10 +749,27 @@ func requestExpiredMessagesLoop() {
}
}
-func extractIdFromEnode(s string) []byte {
+func extractIDFromEnode(s string) []byte {
n, err := discover.ParseNode(s)
if err != nil {
utils.Fatalf("Failed to parse enode: %s", err)
}
return n.ID[:]
}
+
+// obfuscateBloom adds 16 random bits to the the bloom
+// filter, in order to obfuscate the containing topics.
+// it does so deterministically within every session.
+// despite additional bits, it will match on average
+// 32000 times less messages than full node's bloom filter.
+func obfuscateBloom(bloom []byte) {
+ const half = entropySize / 2
+ for i := 0; i < half; i++ {
+ x := int(entropy[i])
+ if entropy[half+i] < 128 {
+ x += 256
+ }
+
+ bloom[x/8] = 1 << uint(x%8) // set the bit number X
+ }
+}
diff --git a/common/big.go b/common/big.go
index b552608bc..65d4377bf 100644
--- a/common/big.go
+++ b/common/big.go
@@ -25,6 +25,6 @@ var (
Big3 = big.NewInt(3)
Big0 = big.NewInt(0)
Big32 = big.NewInt(32)
- Big256 = big.NewInt(0xff)
+ Big256 = big.NewInt(256)
Big257 = big.NewInt(257)
)
diff --git a/common/compiler/solidity.go b/common/compiler/solidity.go
index abb803989..234714a2b 100644
--- a/common/compiler/solidity.go
+++ b/common/compiler/solidity.go
@@ -65,7 +65,6 @@ type solcOutput struct {
func (s *Solidity) makeArgs() []string {
p := []string{
"--combined-json", "bin,abi,userdoc,devdoc",
- "--add-std", // include standard lib contracts
"--optimize", // code optimizer switched on
}
if s.Major > 0 || s.Minor > 4 || s.Patch > 6 {
diff --git a/common/fdlimit/fdlimit_freebsd.go b/common/fdlimit/fdlimit_freebsd.go
index 25caaafe2..c126b0c26 100644
--- a/common/fdlimit/fdlimit_freebsd.go
+++ b/common/fdlimit/fdlimit_freebsd.go
@@ -1,18 +1,18 @@
// Copyright 2016 The go-ethereum Authors
-// This file is part of go-ethereum.
+// This file is part of the go-ethereum library.
//
-// go-ethereum is free software: you can redistribute it and/or modify
-// it under the terms of the GNU General Public License as published by
+// The go-ethereum library is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
-// go-ethereum is distributed in the hope that it will be useful,
+// The go-ethereum library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU General Public License for more details.
+// GNU Lesser General Public License for more details.
//
-// You should have received a copy of the GNU General Public License
-// along with go-ethereum. If not, see <http://www.gnu.org/licenses/>.
+// You should have received a copy of the GNU Lesser General Public License
+// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
// +build freebsd
diff --git a/common/fdlimit/fdlimit_test.go b/common/fdlimit/fdlimit_test.go
index 05e9f0b65..a9ee9ab36 100644
--- a/common/fdlimit/fdlimit_test.go
+++ b/common/fdlimit/fdlimit_test.go
@@ -1,18 +1,18 @@
// Copyright 2016 The go-ethereum Authors
-// This file is part of go-ethereum.
+// This file is part of the go-ethereum library.
//
-// go-ethereum is free software: you can redistribute it and/or modify
-// it under the terms of the GNU General Public License as published by
+// The go-ethereum library is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
-// go-ethereum is distributed in the hope that it will be useful,
+// The go-ethereum library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU General Public License for more details.
+// GNU Lesser General Public License for more details.
//
-// You should have received a copy of the GNU General Public License
-// along with go-ethereum. If not, see <http://www.gnu.org/licenses/>.
+// You should have received a copy of the GNU Lesser General Public License
+// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
package fdlimit
diff --git a/common/fdlimit/fdlimit_unix.go b/common/fdlimit/fdlimit_unix.go
index 27c7e783f..a25813235 100644
--- a/common/fdlimit/fdlimit_unix.go
+++ b/common/fdlimit/fdlimit_unix.go
@@ -1,18 +1,18 @@
// Copyright 2016 The go-ethereum Authors
-// This file is part of go-ethereum.
+// This file is part of the go-ethereum library.
//
-// go-ethereum is free software: you can redistribute it and/or modify
-// it under the terms of the GNU General Public License as published by
+// The go-ethereum library is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
-// go-ethereum is distributed in the hope that it will be useful,
+// The go-ethereum library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU General Public License for more details.
+// GNU Lesser General Public License for more details.
//
-// You should have received a copy of the GNU General Public License
-// along with go-ethereum. If not, see <http://www.gnu.org/licenses/>.
+// You should have received a copy of the GNU Lesser General Public License
+// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
// +build linux darwin netbsd openbsd solaris
diff --git a/common/fdlimit/fdlimit_windows.go b/common/fdlimit/fdlimit_windows.go
index efcd3220e..863c58bed 100644
--- a/common/fdlimit/fdlimit_windows.go
+++ b/common/fdlimit/fdlimit_windows.go
@@ -1,18 +1,18 @@
-// Copyright 2016 The go-ethereum Authors
-// This file is part of go-ethereum.
+// Copyright 2018 The go-ethereum Authors
+// This file is part of the go-ethereum library.
//
-// go-ethereum is free software: you can redistribute it and/or modify
-// it under the terms of the GNU General Public License as published by
+// The go-ethereum library is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
-// go-ethereum is distributed in the hope that it will be useful,
+// The go-ethereum library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU General Public License for more details.
+// GNU Lesser General Public License for more details.
//
-// You should have received a copy of the GNU General Public License
-// along with go-ethereum. If not, see <http://www.gnu.org/licenses/>.
+// You should have received a copy of the GNU Lesser General Public License
+// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
package fdlimit
diff --git a/common/size.go b/common/size.go
index c5a0cb0f2..bd0fc85c7 100644
--- a/common/size.go
+++ b/common/size.go
@@ -20,18 +20,29 @@ import (
"fmt"
)
+// StorageSize is a wrapper around a float value that supports user friendly
+// formatting.
type StorageSize float64
-func (self StorageSize) String() string {
- if self > 1000000 {
- return fmt.Sprintf("%.2f mB", self/1000000)
- } else if self > 1000 {
- return fmt.Sprintf("%.2f kB", self/1000)
+// String implements the stringer interface.
+func (s StorageSize) String() string {
+ if s > 1000000 {
+ return fmt.Sprintf("%.2f mB", s/1000000)
+ } else if s > 1000 {
+ return fmt.Sprintf("%.2f kB", s/1000)
} else {
- return fmt.Sprintf("%.2f B", self)
+ return fmt.Sprintf("%.2f B", s)
}
}
-func (self StorageSize) Int64() int64 {
- return int64(self)
+// TerminalString implements log.TerminalStringer, formatting a string for console
+// output during logging.
+func (s StorageSize) TerminalString() string {
+ if s > 1000000 {
+ return fmt.Sprintf("%.2fmB", s/1000000)
+ } else if s > 1000 {
+ return fmt.Sprintf("%.2fkB", s/1000)
+ } else {
+ return fmt.Sprintf("%.2fB", s)
+ }
}
diff --git a/consensus/errors.go b/consensus/errors.go
index 3b136dbdd..a005c5f63 100644
--- a/consensus/errors.go
+++ b/consensus/errors.go
@@ -23,6 +23,10 @@ var (
// that is unknown.
ErrUnknownAncestor = errors.New("unknown ancestor")
+ // ErrPrunedAncestor is returned when validating a block requires an ancestor
+ // that is known, but the state of which is not available.
+ ErrPrunedAncestor = errors.New("pruned ancestor")
+
// ErrFutureBlock is returned when a block's timestamp is in the future according
// to the current node.
ErrFutureBlock = errors.New("block in the future")
diff --git a/consensus/ethash/algorithm.go b/consensus/ethash/algorithm.go
index 10767bb31..905a7b1ea 100644
--- a/consensus/ethash/algorithm.go
+++ b/consensus/ethash/algorithm.go
@@ -19,6 +19,7 @@ package ethash
import (
"encoding/binary"
"hash"
+ "math/big"
"reflect"
"runtime"
"sync"
@@ -47,6 +48,48 @@ const (
loopAccesses = 64 // Number of accesses in hashimoto loop
)
+// cacheSize returns the size of the ethash verification cache that belongs to a certain
+// block number.
+func cacheSize(block uint64) uint64 {
+ epoch := int(block / epochLength)
+ if epoch < maxEpoch {
+ return cacheSizes[epoch]
+ }
+ return calcCacheSize(epoch)
+}
+
+// calcCacheSize calculates the cache size for epoch. The cache size grows linearly,
+// however, we always take the highest prime below the linearly growing threshold in order
+// to reduce the risk of accidental regularities leading to cyclic behavior.
+func calcCacheSize(epoch int) uint64 {
+ size := cacheInitBytes + cacheGrowthBytes*uint64(epoch) - hashBytes
+ for !new(big.Int).SetUint64(size / hashBytes).ProbablyPrime(1) { // Always accurate for n < 2^64
+ size -= 2 * hashBytes
+ }
+ return size
+}
+
+// datasetSize returns the size of the ethash mining dataset that belongs to a certain
+// block number.
+func datasetSize(block uint64) uint64 {
+ epoch := int(block / epochLength)
+ if epoch < maxEpoch {
+ return datasetSizes[epoch]
+ }
+ return calcDatasetSize(epoch)
+}
+
+// calcDatasetSize calculates the dataset size for epoch. The dataset size grows linearly,
+// however, we always take the highest prime below the linearly growing threshold in order
+// to reduce the risk of accidental regularities leading to cyclic behavior.
+func calcDatasetSize(epoch int) uint64 {
+ size := datasetInitBytes + datasetGrowthBytes*uint64(epoch) - mixBytes
+ for !new(big.Int).SetUint64(size / mixBytes).ProbablyPrime(1) { // Always accurate for n < 2^64
+ size -= 2 * mixBytes
+ }
+ return size
+}
+
// hasher is a repetitive hasher allowing the same hash data structures to be
// reused between hash runs instead of requiring new ones to be created.
type hasher func(dest []byte, data []byte)
diff --git a/consensus/ethash/algorithm_go1.7.go b/consensus/ethash/algorithm_go1.7.go
deleted file mode 100644
index c7f7f48e4..000000000
--- a/consensus/ethash/algorithm_go1.7.go
+++ /dev/null
@@ -1,47 +0,0 @@
-// Copyright 2017 The go-ethereum Authors
-// This file is part of the go-ethereum library.
-//
-// The go-ethereum library is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Lesser General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-//
-// The go-ethereum library is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU Lesser General Public License for more details.
-//
-// You should have received a copy of the GNU Lesser General Public License
-// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
-
-// +build !go1.8
-
-package ethash
-
-// cacheSize calculates and returns the size of the ethash verification cache that
-// belongs to a certain block number. The cache size grows linearly, however, we
-// always take the highest prime below the linearly growing threshold in order to
-// reduce the risk of accidental regularities leading to cyclic behavior.
-func cacheSize(block uint64) uint64 {
- // If we have a pre-generated value, use that
- epoch := int(block / epochLength)
- if epoch < maxEpoch {
- return cacheSizes[epoch]
- }
- // We don't have a way to verify primes fast before Go 1.8
- panic("fast prime testing unsupported in Go < 1.8")
-}
-
-// datasetSize calculates and returns the size of the ethash mining dataset that
-// belongs to a certain block number. The dataset size grows linearly, however, we
-// always take the highest prime below the linearly growing threshold in order to
-// reduce the risk of accidental regularities leading to cyclic behavior.
-func datasetSize(block uint64) uint64 {
- // If we have a pre-generated value, use that
- epoch := int(block / epochLength)
- if epoch < maxEpoch {
- return datasetSizes[epoch]
- }
- // We don't have a way to verify primes fast before Go 1.8
- panic("fast prime testing unsupported in Go < 1.8")
-}
diff --git a/consensus/ethash/algorithm_go1.8.go b/consensus/ethash/algorithm_go1.8.go
deleted file mode 100644
index 975fdffe5..000000000
--- a/consensus/ethash/algorithm_go1.8.go
+++ /dev/null
@@ -1,63 +0,0 @@
-// Copyright 2017 The go-ethereum Authors
-// This file is part of the go-ethereum library.
-//
-// The go-ethereum library is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Lesser General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-//
-// The go-ethereum library is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU Lesser General Public License for more details.
-//
-// You should have received a copy of the GNU Lesser General Public License
-// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
-
-// +build go1.8
-
-package ethash
-
-import "math/big"
-
-// cacheSize returns the size of the ethash verification cache that belongs to a certain
-// block number.
-func cacheSize(block uint64) uint64 {
- epoch := int(block / epochLength)
- if epoch < maxEpoch {
- return cacheSizes[epoch]
- }
- return calcCacheSize(epoch)
-}
-
-// calcCacheSize calculates the cache size for epoch. The cache size grows linearly,
-// however, we always take the highest prime below the linearly growing threshold in order
-// to reduce the risk of accidental regularities leading to cyclic behavior.
-func calcCacheSize(epoch int) uint64 {
- size := cacheInitBytes + cacheGrowthBytes*uint64(epoch) - hashBytes
- for !new(big.Int).SetUint64(size / hashBytes).ProbablyPrime(1) { // Always accurate for n < 2^64
- size -= 2 * hashBytes
- }
- return size
-}
-
-// datasetSize returns the size of the ethash mining dataset that belongs to a certain
-// block number.
-func datasetSize(block uint64) uint64 {
- epoch := int(block / epochLength)
- if epoch < maxEpoch {
- return datasetSizes[epoch]
- }
- return calcDatasetSize(epoch)
-}
-
-// calcDatasetSize calculates the dataset size for epoch. The dataset size grows linearly,
-// however, we always take the highest prime below the linearly growing threshold in order
-// to reduce the risk of accidental regularities leading to cyclic behavior.
-func calcDatasetSize(epoch int) uint64 {
- size := datasetInitBytes + datasetGrowthBytes*uint64(epoch) - mixBytes
- for !new(big.Int).SetUint64(size / mixBytes).ProbablyPrime(1) { // Always accurate for n < 2^64
- size -= 2 * mixBytes
- }
- return size
-}
diff --git a/consensus/ethash/algorithm_go1.8_test.go b/consensus/ethash/algorithm_go1.8_test.go
deleted file mode 100644
index 6648bd6a9..000000000
--- a/consensus/ethash/algorithm_go1.8_test.go
+++ /dev/null
@@ -1,37 +0,0 @@
-// Copyright 2017 The go-ethereum Authors
-// This file is part of the go-ethereum library.
-//
-// The go-ethereum library is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Lesser General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-//
-// The go-ethereum library is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU Lesser General Public License for more details.
-//
-// You should have received a copy of the GNU Lesser General Public License
-// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
-
-// +build go1.8
-
-package ethash
-
-import "testing"
-
-// Tests whether the dataset size calculator works correctly by cross checking the
-// hard coded lookup table with the value generated by it.
-func TestSizeCalculations(t *testing.T) {
- // Verify all the cache and dataset sizes from the lookup table.
- for epoch, want := range cacheSizes {
- if size := calcCacheSize(epoch); size != want {
- t.Errorf("cache %d: cache size mismatch: have %d, want %d", epoch, size, want)
- }
- }
- for epoch, want := range datasetSizes {
- if size := calcDatasetSize(epoch); size != want {
- t.Errorf("dataset %d: dataset size mismatch: have %d, want %d", epoch, size, want)
- }
- }
-}
diff --git a/consensus/ethash/algorithm_test.go b/consensus/ethash/algorithm_test.go
index a54f3b582..841e39233 100644
--- a/consensus/ethash/algorithm_test.go
+++ b/consensus/ethash/algorithm_test.go
@@ -30,6 +30,22 @@ import (
"github.com/ethereum/go-ethereum/core/types"
)
+// Tests whether the dataset size calculator works correctly by cross checking the
+// hard coded lookup table with the value generated by it.
+func TestSizeCalculations(t *testing.T) {
+ // Verify all the cache and dataset sizes from the lookup table.
+ for epoch, want := range cacheSizes {
+ if size := calcCacheSize(epoch); size != want {
+ t.Errorf("cache %d: cache size mismatch: have %d, want %d", epoch, size, want)
+ }
+ }
+ for epoch, want := range datasetSizes {
+ if size := calcDatasetSize(epoch); size != want {
+ t.Errorf("dataset %d: dataset size mismatch: have %d, want %d", epoch, size, want)
+ }
+ }
+}
+
// Tests that verification caches can be correctly generated.
func TestCacheGeneration(t *testing.T) {
tests := []struct {
diff --git a/consensus/ethash/consensus.go b/consensus/ethash/consensus.go
index 92a23d4a4..99eec8221 100644
--- a/consensus/ethash/consensus.go
+++ b/consensus/ethash/consensus.go
@@ -53,7 +53,6 @@ var (
errDuplicateUncle = errors.New("duplicate uncle")
errUncleIsAncestor = errors.New("uncle is ancestor")
errDanglingUncle = errors.New("uncle's parent is not ancestor")
- errNonceOutOfRange = errors.New("nonce out of range")
errInvalidDifficulty = errors.New("non-positive difficulty")
errInvalidMixDigest = errors.New("invalid mix digest")
errInvalidPoW = errors.New("invalid proof-of-work")
@@ -356,7 +355,7 @@ func calcDifficultyByzantium(time uint64, parent *types.Header) *big.Int {
if x.Cmp(params.MinimumDifficulty) < 0 {
x.Set(params.MinimumDifficulty)
}
- // calculate a fake block numer for the ice-age delay:
+ // calculate a fake block number for the ice-age delay:
// https://github.com/ethereum/EIPs/pull/669
// fake_block_number = min(0, block.number - 3_000_000
fakeBlockNumber := new(big.Int)
@@ -474,18 +473,13 @@ func (ethash *Ethash) VerifySeal(chain consensus.ChainReader, header *types.Head
if ethash.shared != nil {
return ethash.shared.VerifySeal(chain, header)
}
- // Sanity check that the block number is below the lookup table size (60M blocks)
- number := header.Number.Uint64()
- if number/epochLength >= maxEpoch {
- // Go < 1.7 cannot calculate new cache/dataset sizes (no fast prime check)
- return errNonceOutOfRange
- }
// Ensure that we have a valid difficulty for the block
if header.Difficulty.Sign() <= 0 {
return errInvalidDifficulty
}
-
// Recompute the digest and PoW value and verify against the header
+ number := header.Number.Uint64()
+
cache := ethash.cache(number)
size := datasetSize(number)
if ethash.config.PowMode == ModeTest {
diff --git a/consensus/ethash/ethash.go b/consensus/ethash/ethash.go
index 91e20112a..1b3dcee30 100644
--- a/consensus/ethash/ethash.go
+++ b/consensus/ethash/ethash.go
@@ -35,9 +35,9 @@ import (
mmap "github.com/edsrzf/mmap-go"
"github.com/ethereum/go-ethereum/consensus"
"github.com/ethereum/go-ethereum/log"
+ "github.com/ethereum/go-ethereum/metrics"
"github.com/ethereum/go-ethereum/rpc"
"github.com/hashicorp/golang-lru/simplelru"
- metrics "github.com/rcrowley/go-metrics"
)
var ErrInvalidDumpMagic = errors.New("invalid dump magic")
diff --git a/console/console.go b/console/console.go
index 52fe1f542..b280d4e65 100644
--- a/console/console.go
+++ b/console/console.go
@@ -26,6 +26,7 @@ import (
"regexp"
"sort"
"strings"
+ "syscall"
"github.com/ethereum/go-ethereum/internal/jsre"
"github.com/ethereum/go-ethereum/internal/web3ext"
@@ -332,7 +333,7 @@ func (c *Console) Interactive() {
}()
// Monitor Ctrl-C too in case the input is empty and we need to bail
abort := make(chan os.Signal, 1)
- signal.Notify(abort, os.Interrupt)
+ signal.Notify(abort, syscall.SIGINT, syscall.SIGTERM)
// Start sending prompts to the user and reading back inputs
for {
diff --git a/containers/docker/master-alpine/Dockerfile b/containers/docker/master-alpine/Dockerfile
index c7b71c726..8d4e7fe81 100644
--- a/containers/docker/master-alpine/Dockerfile
+++ b/containers/docker/master-alpine/Dockerfile
@@ -2,7 +2,7 @@ FROM alpine:3.7
RUN \
apk add --update go git make gcc musl-dev linux-headers ca-certificates && \
- git clone --depth 1 --branch release/1.7 https://github.com/ethereum/go-ethereum && \
+ git clone --depth 1 --branch release/1.8 https://github.com/ethereum/go-ethereum && \
(cd go-ethereum && make geth) && \
cp go-ethereum/build/bin/geth /geth && \
apk del go git make gcc musl-dev linux-headers && \
diff --git a/containers/docker/master-ubuntu/Dockerfile b/containers/docker/master-ubuntu/Dockerfile
index bba70abfd..4cfc4f58c 100644
--- a/containers/docker/master-ubuntu/Dockerfile
+++ b/containers/docker/master-ubuntu/Dockerfile
@@ -5,7 +5,7 @@ ENV PATH=/usr/lib/go-1.9/bin:$PATH
RUN \
apt-get update && apt-get upgrade -q -y && \
apt-get install -y --no-install-recommends golang-1.9 git make gcc libc-dev ca-certificates && \
- git clone --depth 1 --branch release/1.7 https://github.com/ethereum/go-ethereum && \
+ git clone --depth 1 --branch release/1.8 https://github.com/ethereum/go-ethereum && \
(cd go-ethereum && make geth) && \
cp go-ethereum/build/bin/geth /geth && \
apt-get remove -y golang-1.9 git make gcc libc-dev && apt autoremove -y && apt-get clean && \
diff --git a/containers/vagrant/.gitignore b/containers/vagrant/.gitignore
deleted file mode 100644
index 8000dd9db..000000000
--- a/containers/vagrant/.gitignore
+++ /dev/null
@@ -1 +0,0 @@
-.vagrant
diff --git a/containers/vagrant/Vagrantfile b/containers/vagrant/Vagrantfile
deleted file mode 100644
index 72ec366e2..000000000
--- a/containers/vagrant/Vagrantfile
+++ /dev/null
@@ -1,38 +0,0 @@
-# -*- mode: ruby -*-
-# vi: set ft=ruby :
-
-require 'yaml'
-
-VAGRANTFILE_API_VERSION = 2
-VM_RAM = 2048
-
-Vagrant.configure(VAGRANTFILE_API_VERSION) do |config|
-
- config.vm.define "ubuntu", :primary => true do |ubuntu|
- ubuntu.vm.box = "ubuntu/trusty64"
- ubuntu.vm.provision "shell", :path => "provisioners/shell/ubuntu.sh"
- end
-
- config.vm.define "debian", :primary => true do |debian|
- debian.vm.box = "debian/jessie64"
- debian.vm.provision "shell", :path => "provisioners/shell/debian.sh"
- end
-
- config.vm.define "centos", :autostart => false do |centos|
- centos.vm.box = "centos/7"
- centos.vm.provision "shell", :path => "provisioners/shell/centos.sh"
- end
-
- config.vm.provider "virtualbox" do |vb|
- vb.memory = VM_RAM
- end
-
- config.vm.provider "libvirt" do |lv|
- lv.memory = VM_RAM
-
- config.vm.synced_folder ".", "/home/vagrant/sync", :disabled => true
- end
-
- config.vm.synced_folder ".", "/vagrant", :disabled => true
- config.vm.synced_folder "../../", "/home/vagrant/go/src/github.com/ethereum/go-ethereum"
-end
diff --git a/containers/vagrant/provisioners/shell/centos.sh b/containers/vagrant/provisioners/shell/centos.sh
deleted file mode 100755
index 744da4bfd..000000000
--- a/containers/vagrant/provisioners/shell/centos.sh
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/bin/bash
-
-sudo yum install -y git wget
-sudo yum update -y
-
-wget --continue https://storage.googleapis.com/golang/go1.8.1.linux-amd64.tar.gz
-sudo tar -C /usr/local -xzf go1.8.1.linux-amd64.tar.gz
-
-GETH_PATH="~vagrant/go/src/github.com/ethereum/go-ethereum/build/bin/"
-
-echo "export PATH=$PATH:/usr/local/go/bin:$GETH_PATH" >> ~vagrant/.bashrc
diff --git a/containers/vagrant/provisioners/shell/debian.sh b/containers/vagrant/provisioners/shell/debian.sh
deleted file mode 100755
index 1c1793336..000000000
--- a/containers/vagrant/provisioners/shell/debian.sh
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/bin/bash
-
-sudo apt-get install -y build-essential git-all wget
-sudo apt-get update
-
-wget --continue https://storage.googleapis.com/golang/go1.8.1.linux-amd64.tar.gz
-sudo tar -C /usr/local -xzf go1.8.1.linux-amd64.tar.gz
-
-GETH_PATH="~vagrant/go/src/github.com/ethereum/go-ethereum/build/bin/"
-
-echo "export PATH=$PATH:/usr/local/go/bin:$GETH_PATH" >> ~vagrant/.bashrc
diff --git a/containers/vagrant/provisioners/shell/ubuntu.sh b/containers/vagrant/provisioners/shell/ubuntu.sh
deleted file mode 100755
index 1c1793336..000000000
--- a/containers/vagrant/provisioners/shell/ubuntu.sh
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/bin/bash
-
-sudo apt-get install -y build-essential git-all wget
-sudo apt-get update
-
-wget --continue https://storage.googleapis.com/golang/go1.8.1.linux-amd64.tar.gz
-sudo tar -C /usr/local -xzf go1.8.1.linux-amd64.tar.gz
-
-GETH_PATH="~vagrant/go/src/github.com/ethereum/go-ethereum/build/bin/"
-
-echo "export PATH=$PATH:/usr/local/go/bin:$GETH_PATH" >> ~vagrant/.bashrc
diff --git a/contracts/chequebook/cheque_test.go b/contracts/chequebook/cheque_test.go
index b7555d081..6b6b28e65 100644
--- a/contracts/chequebook/cheque_test.go
+++ b/contracts/chequebook/cheque_test.go
@@ -281,8 +281,8 @@ func TestDeposit(t *testing.T) {
t.Fatalf("expected balance %v, got %v", exp, chbook.Balance())
}
- // autodeposit every 30ms if new cheque issued
- interval := 30 * time.Millisecond
+ // autodeposit every 200ms if new cheque issued
+ interval := 200 * time.Millisecond
chbook.AutoDeposit(interval, common.Big1, balance)
_, err = chbook.Issue(addr1, amount)
if err != nil {
diff --git a/contracts/chequebook/contract/chequebook.go b/contracts/chequebook/contract/chequebook.go
index ce29b01f0..e275ac9b8 100644
--- a/contracts/chequebook/contract/chequebook.go
+++ b/contracts/chequebook/contract/chequebook.go
@@ -7,17 +7,19 @@ import (
"math/big"
"strings"
+ ethereum "github.com/ethereum/go-ethereum"
"github.com/ethereum/go-ethereum/accounts/abi"
"github.com/ethereum/go-ethereum/accounts/abi/bind"
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/core/types"
+ "github.com/ethereum/go-ethereum/event"
)
// ChequebookABI is the input ABI used to generate the binding from.
const ChequebookABI = "[{\"constant\":false,\"inputs\":[],\"name\":\"kill\",\"outputs\":[],\"payable\":false,\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"constant\":true,\"inputs\":[{\"name\":\"\",\"type\":\"address\"}],\"name\":\"sent\",\"outputs\":[{\"name\":\"\",\"type\":\"uint256\"}],\"payable\":false,\"stateMutability\":\"view\",\"type\":\"function\"},{\"constant\":false,\"inputs\":[{\"name\":\"beneficiary\",\"type\":\"address\"},{\"name\":\"amount\",\"type\":\"uint256\"},{\"name\":\"sig_v\",\"type\":\"uint8\"},{\"name\":\"sig_r\",\"type\":\"bytes32\"},{\"name\":\"sig_s\",\"type\":\"bytes32\"}],\"name\":\"cash\",\"outputs\":[],\"payable\":false,\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"payable\":true,\"stateMutability\":\"payable\",\"type\":\"fallback\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":false,\"name\":\"deadbeat\",\"type\":\"address\"}],\"name\":\"Overdraft\",\"type\":\"event\"}]"
// ChequebookBin is the compiled bytecode used for deploying new contracts.
-const ChequebookBin = `0x606060405260008054600160a060020a033316600160a060020a03199091161790556102ec806100306000396000f3006060604052600436106100565763ffffffff7c010000000000000000000000000000000000000000000000000000000060003504166341c0e1b581146100585780637bf786f81461006b578063fbf788d61461009c575b005b341561006357600080fd5b6100566100ca565b341561007657600080fd5b61008a600160a060020a03600435166100f1565b60405190815260200160405180910390f35b34156100a757600080fd5b610056600160a060020a036004351660243560ff60443516606435608435610103565b60005433600160a060020a03908116911614156100ef57600054600160a060020a0316ff5b565b60016020526000908152604090205481565b600160a060020a0385166000908152600160205260408120548190861161012957600080fd5b3087876040516c01000000000000000000000000600160a060020a03948516810282529290931690910260148301526028820152604801604051809103902091506001828686866040516000815260200160405260006040516020015260405193845260ff90921660208085019190915260408085019290925260608401929092526080909201915160208103908084039060008661646e5a03f115156101cf57600080fd5b505060206040510351600054600160a060020a039081169116146101f257600080fd5b50600160a060020a03808716600090815260016020526040902054860390301631811161026257600160a060020a0387166000818152600160205260409081902088905582156108fc0290839051600060405180830381858888f19350505050151561025d57600080fd5b6102b7565b6000547f2250e2993c15843b32621c89447cc589ee7a9f049c026986e545d3c2c0c6f97890600160a060020a0316604051600160a060020a03909116815260200160405180910390a186600160a060020a0316ff5b505050505050505600a165627a7a7230582014e927522ca5cd8f68529ac4d3b9cdf36d40e09d8a33b70008248d1abebf79680029`
+const ChequebookBin = `0x606060405260008054600160a060020a033316600160a060020a03199091161790556102ec806100306000396000f3006060604052600436106100565763ffffffff7c010000000000000000000000000000000000000000000000000000000060003504166341c0e1b581146100585780637bf786f81461006b578063fbf788d61461009c575b005b341561006357600080fd5b6100566100ca565b341561007657600080fd5b61008a600160a060020a03600435166100f1565b60405190815260200160405180910390f35b34156100a757600080fd5b610056600160a060020a036004351660243560ff60443516606435608435610103565b60005433600160a060020a03908116911614156100ef57600054600160a060020a0316ff5b565b60016020526000908152604090205481565b600160a060020a0385166000908152600160205260408120548190861161012957600080fd5b3087876040516c01000000000000000000000000600160a060020a03948516810282529290931690910260148301526028820152604801604051809103902091506001828686866040516000815260200160405260006040516020015260405193845260ff90921660208085019190915260408085019290925260608401929092526080909201915160208103908084039060008661646e5a03f115156101cf57600080fd5b505060206040510351600054600160a060020a039081169116146101f257600080fd5b50600160a060020a03808716600090815260016020526040902054860390301631811161026257600160a060020a0387166000818152600160205260409081902088905582156108fc0290839051600060405180830381858888f19350505050151561025d57600080fd5b6102b7565b6000547f2250e2993c15843b32621c89447cc589ee7a9f049c026986e545d3c2c0c6f97890600160a060020a0316604051600160a060020a03909116815260200160405180910390a186600160a060020a0316ff5b505050505050505600a165627a7a72305820533e856fc37e3d64d1706bcc7dfb6b1d490c8d566ea498d9d01ec08965a896ca0029`
// DeployChequebook deploys a new Ethereum contract, binding an instance of Chequebook to it.
func DeployChequebook(auth *bind.TransactOpts, backend bind.ContractBackend) (common.Address, *types.Transaction, *Chequebook, error) {
@@ -29,13 +31,14 @@ func DeployChequebook(auth *bind.TransactOpts, backend bind.ContractBackend) (co
if err != nil {
return common.Address{}, nil, nil, err
}
- return address, tx, &Chequebook{ChequebookCaller: ChequebookCaller{contract: contract}, ChequebookTransactor: ChequebookTransactor{contract: contract}}, nil
+ return address, tx, &Chequebook{ChequebookCaller: ChequebookCaller{contract: contract}, ChequebookTransactor: ChequebookTransactor{contract: contract}, ChequebookFilterer: ChequebookFilterer{contract: contract}}, nil
}
// Chequebook is an auto generated Go binding around an Ethereum contract.
type Chequebook struct {
ChequebookCaller // Read-only binding to the contract
ChequebookTransactor // Write-only binding to the contract
+ ChequebookFilterer // Log filterer for contract events
}
// ChequebookCaller is an auto generated read-only Go binding around an Ethereum contract.
@@ -48,6 +51,11 @@ type ChequebookTransactor struct {
contract *bind.BoundContract // Generic contract wrapper for the low level calls
}
+// ChequebookFilterer is an auto generated log filtering Go binding around an Ethereum contract events.
+type ChequebookFilterer struct {
+ contract *bind.BoundContract // Generic contract wrapper for the low level calls
+}
+
// ChequebookSession is an auto generated Go binding around an Ethereum contract,
// with pre-set call and transact options.
type ChequebookSession struct {
@@ -87,16 +95,16 @@ type ChequebookTransactorRaw struct {
// NewChequebook creates a new instance of Chequebook, bound to a specific deployed contract.
func NewChequebook(address common.Address, backend bind.ContractBackend) (*Chequebook, error) {
- contract, err := bindChequebook(address, backend, backend)
+ contract, err := bindChequebook(address, backend, backend, backend)
if err != nil {
return nil, err
}
- return &Chequebook{ChequebookCaller: ChequebookCaller{contract: contract}, ChequebookTransactor: ChequebookTransactor{contract: contract}}, nil
+ return &Chequebook{ChequebookCaller: ChequebookCaller{contract: contract}, ChequebookTransactor: ChequebookTransactor{contract: contract}, ChequebookFilterer: ChequebookFilterer{contract: contract}}, nil
}
// NewChequebookCaller creates a new read-only instance of Chequebook, bound to a specific deployed contract.
func NewChequebookCaller(address common.Address, caller bind.ContractCaller) (*ChequebookCaller, error) {
- contract, err := bindChequebook(address, caller, nil)
+ contract, err := bindChequebook(address, caller, nil, nil)
if err != nil {
return nil, err
}
@@ -105,20 +113,29 @@ func NewChequebookCaller(address common.Address, caller bind.ContractCaller) (*C
// NewChequebookTransactor creates a new write-only instance of Chequebook, bound to a specific deployed contract.
func NewChequebookTransactor(address common.Address, transactor bind.ContractTransactor) (*ChequebookTransactor, error) {
- contract, err := bindChequebook(address, nil, transactor)
+ contract, err := bindChequebook(address, nil, transactor, nil)
if err != nil {
return nil, err
}
return &ChequebookTransactor{contract: contract}, nil
}
+// NewChequebookFilterer creates a new log filterer instance of Chequebook, bound to a specific deployed contract.
+func NewChequebookFilterer(address common.Address, filterer bind.ContractFilterer) (*ChequebookFilterer, error) {
+ contract, err := bindChequebook(address, nil, nil, filterer)
+ if err != nil {
+ return nil, err
+ }
+ return &ChequebookFilterer{contract: contract}, nil
+}
+
// bindChequebook binds a generic wrapper to an already deployed contract.
-func bindChequebook(address common.Address, caller bind.ContractCaller, transactor bind.ContractTransactor) (*bind.BoundContract, error) {
+func bindChequebook(address common.Address, caller bind.ContractCaller, transactor bind.ContractTransactor, filterer bind.ContractFilterer) (*bind.BoundContract, error) {
parsed, err := abi.JSON(strings.NewReader(ChequebookABI))
if err != nil {
return nil, err
}
- return bind.NewBoundContract(address, parsed, caller, transactor), nil
+ return bind.NewBoundContract(address, parsed, caller, transactor, filterer), nil
}
// Call invokes the (constant) contract method with params as input values and
@@ -226,3 +243,125 @@ func (_Chequebook *ChequebookSession) Kill() (*types.Transaction, error) {
func (_Chequebook *ChequebookTransactorSession) Kill() (*types.Transaction, error) {
return _Chequebook.Contract.Kill(&_Chequebook.TransactOpts)
}
+
+// ChequebookOverdraftIterator is returned from FilterOverdraft and is used to iterate over the raw logs and unpacked data for Overdraft events raised by the Chequebook contract.
+type ChequebookOverdraftIterator struct {
+ Event *ChequebookOverdraft // Event containing the contract specifics and raw log
+
+ contract *bind.BoundContract // Generic contract to use for unpacking event data
+ event string // Event name to use for unpacking event data
+
+ logs chan types.Log // Log channel receiving the found contract events
+ sub ethereum.Subscription // Subscription for errors, completion and termination
+ done bool // Whether the subscription completed delivering logs
+ fail error // Occurred error to stop iteration
+}
+
+// Next advances the iterator to the subsequent event, returning whether there
+// are any more events found. In case of a retrieval or parsing error, false is
+// returned and Error() can be queried for the exact failure.
+func (it *ChequebookOverdraftIterator) Next() bool {
+ // If the iterator failed, stop iterating
+ if it.fail != nil {
+ return false
+ }
+ // If the iterator completed, deliver directly whatever's available
+ if it.done {
+ select {
+ case log := <-it.logs:
+ it.Event = new(ChequebookOverdraft)
+ if err := it.contract.UnpackLog(it.Event, it.event, log); err != nil {
+ it.fail = err
+ return false
+ }
+ it.Event.Raw = log
+ return true
+
+ default:
+ return false
+ }
+ }
+ // Iterator still in progress, wait for either a data or an error event
+ select {
+ case log := <-it.logs:
+ it.Event = new(ChequebookOverdraft)
+ if err := it.contract.UnpackLog(it.Event, it.event, log); err != nil {
+ it.fail = err
+ return false
+ }
+ it.Event.Raw = log
+ return true
+
+ case err := <-it.sub.Err():
+ it.done = true
+ it.fail = err
+ return it.Next()
+ }
+}
+
+// Error retruned any retrieval or parsing error occurred during filtering.
+func (it *ChequebookOverdraftIterator) Error() error {
+ return it.fail
+}
+
+// Close terminates the iteration process, releasing any pending underlying
+// resources.
+func (it *ChequebookOverdraftIterator) Close() error {
+ it.sub.Unsubscribe()
+ return nil
+}
+
+// ChequebookOverdraft represents a Overdraft event raised by the Chequebook contract.
+type ChequebookOverdraft struct {
+ Deadbeat common.Address
+ Raw types.Log // Blockchain specific contextual infos
+}
+
+// FilterOverdraft is a free log retrieval operation binding the contract event 0x2250e2993c15843b32621c89447cc589ee7a9f049c026986e545d3c2c0c6f978.
+//
+// Solidity: event Overdraft(deadbeat address)
+func (_Chequebook *ChequebookFilterer) FilterOverdraft(opts *bind.FilterOpts) (*ChequebookOverdraftIterator, error) {
+
+ logs, sub, err := _Chequebook.contract.FilterLogs(opts, "Overdraft")
+ if err != nil {
+ return nil, err
+ }
+ return &ChequebookOverdraftIterator{contract: _Chequebook.contract, event: "Overdraft", logs: logs, sub: sub}, nil
+}
+
+// WatchOverdraft is a free log subscription operation binding the contract event 0x2250e2993c15843b32621c89447cc589ee7a9f049c026986e545d3c2c0c6f978.
+//
+// Solidity: event Overdraft(deadbeat address)
+func (_Chequebook *ChequebookFilterer) WatchOverdraft(opts *bind.WatchOpts, sink chan<- *ChequebookOverdraft) (event.Subscription, error) {
+
+ logs, sub, err := _Chequebook.contract.WatchLogs(opts, "Overdraft")
+ if err != nil {
+ return nil, err
+ }
+ return event.NewSubscription(func(quit <-chan struct{}) error {
+ defer sub.Unsubscribe()
+ for {
+ select {
+ case log := <-logs:
+ // New log arrived, parse the event and forward to the user
+ event := new(ChequebookOverdraft)
+ if err := _Chequebook.contract.UnpackLog(event, "Overdraft", log); err != nil {
+ return err
+ }
+ event.Raw = log
+
+ select {
+ case sink <- event:
+ case err := <-sub.Err():
+ return err
+ case <-quit:
+ return nil
+ }
+ case err := <-sub.Err():
+ return err
+ case <-quit:
+ return nil
+ }
+ }
+ }), nil
+}
diff --git a/contracts/chequebook/contract/code.go b/contracts/chequebook/contract/code.go
index 9d1fb169e..d837a9d60 100644
--- a/contracts/chequebook/contract/code.go
+++ b/contracts/chequebook/contract/code.go
@@ -2,4 +2,4 @@ package contract
// ContractDeployedCode is used to detect suicides. This constant needs to be
// updated when the contract code is changed.
-const ContractDeployedCode = "0x6060604052600436106100565763ffffffff7c010000000000000000000000000000000000000000000000000000000060003504166341c0e1b581146100585780637bf786f81461006b578063fbf788d61461009c575b005b341561006357600080fd5b6100566100ca565b341561007657600080fd5b61008a600160a060020a03600435166100f1565b60405190815260200160405180910390f35b34156100a757600080fd5b610056600160a060020a036004351660243560ff60443516606435608435610103565b60005433600160a060020a03908116911614156100ef57600054600160a060020a0316ff5b565b60016020526000908152604090205481565b600160a060020a0385166000908152600160205260408120548190861161012957600080fd5b3087876040516c01000000000000000000000000600160a060020a03948516810282529290931690910260148301526028820152604801604051809103902091506001828686866040516000815260200160405260006040516020015260405193845260ff90921660208085019190915260408085019290925260608401929092526080909201915160208103908084039060008661646e5a03f115156101cf57600080fd5b505060206040510351600054600160a060020a039081169116146101f257600080fd5b50600160a060020a03808716600090815260016020526040902054860390301631811161026257600160a060020a0387166000818152600160205260409081902088905582156108fc0290839051600060405180830381858888f19350505050151561025d57600080fd5b6102b7565b6000547f2250e2993c15843b32621c89447cc589ee7a9f049c026986e545d3c2c0c6f97890600160a060020a0316604051600160a060020a03909116815260200160405180910390a186600160a060020a0316ff5b505050505050505600a165627a7a7230582014e927522ca5cd8f68529ac4d3b9cdf36d40e09d8a33b70008248d1abebf79680029"
+const ContractDeployedCode = "0x6060604052600436106100565763ffffffff7c010000000000000000000000000000000000000000000000000000000060003504166341c0e1b581146100585780637bf786f81461006b578063fbf788d61461009c575b005b341561006357600080fd5b6100566100ca565b341561007657600080fd5b61008a600160a060020a03600435166100f1565b60405190815260200160405180910390f35b34156100a757600080fd5b610056600160a060020a036004351660243560ff60443516606435608435610103565b60005433600160a060020a03908116911614156100ef57600054600160a060020a0316ff5b565b60016020526000908152604090205481565b600160a060020a0385166000908152600160205260408120548190861161012957600080fd5b3087876040516c01000000000000000000000000600160a060020a03948516810282529290931690910260148301526028820152604801604051809103902091506001828686866040516000815260200160405260006040516020015260405193845260ff90921660208085019190915260408085019290925260608401929092526080909201915160208103908084039060008661646e5a03f115156101cf57600080fd5b505060206040510351600054600160a060020a039081169116146101f257600080fd5b50600160a060020a03808716600090815260016020526040902054860390301631811161026257600160a060020a0387166000818152600160205260409081902088905582156108fc0290839051600060405180830381858888f19350505050151561025d57600080fd5b6102b7565b6000547f2250e2993c15843b32621c89447cc589ee7a9f049c026986e545d3c2c0c6f97890600160a060020a0316604051600160a060020a03909116815260200160405180910390a186600160a060020a0316ff5b505050505050505600a165627a7a72305820533e856fc37e3d64d1706bcc7dfb6b1d490c8d566ea498d9d01ec08965a896ca0029"
diff --git a/contracts/ens/contract/ens.go b/contracts/ens/contract/ens.go
index acb6a4e4c..cbf6cb05b 100644
--- a/contracts/ens/contract/ens.go
+++ b/contracts/ens/contract/ens.go
@@ -6,17 +6,19 @@ package contract
import (
"strings"
+ ethereum "github.com/ethereum/go-ethereum"
"github.com/ethereum/go-ethereum/accounts/abi"
"github.com/ethereum/go-ethereum/accounts/abi/bind"
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/core/types"
+ "github.com/ethereum/go-ethereum/event"
)
// ENSABI is the input ABI used to generate the binding from.
const ENSABI = "[{\"constant\":true,\"inputs\":[{\"name\":\"node\",\"type\":\"bytes32\"}],\"name\":\"resolver\",\"outputs\":[{\"name\":\"\",\"type\":\"address\"}],\"payable\":false,\"stateMutability\":\"view\",\"type\":\"function\"},{\"constant\":true,\"inputs\":[{\"name\":\"node\",\"type\":\"bytes32\"}],\"name\":\"owner\",\"outputs\":[{\"name\":\"\",\"type\":\"address\"}],\"payable\":false,\"stateMutability\":\"view\",\"type\":\"function\"},{\"constant\":false,\"inputs\":[{\"name\":\"node\",\"type\":\"bytes32\"},{\"name\":\"label\",\"type\":\"bytes32\"},{\"name\":\"owner\",\"type\":\"address\"}],\"name\":\"setSubnodeOwner\",\"outputs\":[],\"payable\":false,\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"constant\":false,\"inputs\":[{\"name\":\"node\",\"type\":\"bytes32\"},{\"name\":\"ttl\",\"type\":\"uint64\"}],\"name\":\"setTTL\",\"outputs\":[],\"payable\":false,\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"constant\":true,\"inputs\":[{\"name\":\"node\",\"type\":\"bytes32\"}],\"name\":\"ttl\",\"outputs\":[{\"name\":\"\",\"type\":\"uint64\"}],\"payable\":false,\"stateMutability\":\"view\",\"type\":\"function\"},{\"constant\":false,\"inputs\":[{\"name\":\"node\",\"type\":\"bytes32\"},{\"name\":\"resolver\",\"type\":\"address\"}],\"name\":\"setResolver\",\"outputs\":[],\"payable\":false,\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"constant\":false,\"inputs\":[{\"name\":\"node\",\"type\":\"bytes32\"},{\"name\":\"owner\",\"type\":\"address\"}],\"name\":\"setOwner\",\"outputs\":[],\"payable\":false,\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[],\"payable\":false,\"stateMutability\":\"nonpayable\",\"type\":\"constructor\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":true,\"name\":\"node\",\"type\":\"bytes32\"},{\"indexed\":true,\"name\":\"label\",\"type\":\"bytes32\"},{\"indexed\":false,\"name\":\"owner\",\"type\":\"address\"}],\"name\":\"NewOwner\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":true,\"name\":\"node\",\"type\":\"bytes32\"},{\"indexed\":false,\"name\":\"owner\",\"type\":\"address\"}],\"name\":\"Transfer\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":true,\"name\":\"node\",\"type\":\"bytes32\"},{\"indexed\":false,\"name\":\"resolver\",\"type\":\"address\"}],\"name\":\"NewResolver\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":true,\"name\":\"node\",\"type\":\"bytes32\"},{\"indexed\":false,\"name\":\"ttl\",\"type\":\"uint64\"}],\"name\":\"NewTTL\",\"type\":\"event\"}]"
// ENSBin is the compiled bytecode used for deploying new contracts.
-const ENSBin = `0x6060604052341561000f57600080fd5b60008080526020527fad3228b676f7d3cd4284a5443f17f1962b36e491b30a40b2405849e597ba5fb58054600160a060020a033316600160a060020a0319909116179055610503806100626000396000f3006060604052600436106100825763ffffffff7c01000000000000000000000000000000000000000000000000000000006000350416630178b8bf811461008757806302571be3146100b957806306ab5923146100cf57806314ab9038146100f657806316a25cbd146101195780631896f70a1461014c5780635b0fc9c31461016e575b600080fd5b341561009257600080fd5b61009d600435610190565b604051600160a060020a03909116815260200160405180910390f35b34156100c457600080fd5b61009d6004356101ae565b34156100da57600080fd5b6100f4600435602435600160a060020a03604435166101c9565b005b341561010157600080fd5b6100f460043567ffffffffffffffff6024351661028b565b341561012457600080fd5b61012f600435610357565b60405167ffffffffffffffff909116815260200160405180910390f35b341561015757600080fd5b6100f4600435600160a060020a036024351661038e565b341561017957600080fd5b6100f4600435600160a060020a0360243516610434565b600090815260208190526040902060010154600160a060020a031690565b600090815260208190526040902054600160a060020a031690565b600083815260208190526040812054849033600160a060020a039081169116146101f257600080fd5b8484604051918252602082015260409081019051908190039020915083857fce0457fe73731f824cc272376169235128c118b49d344817417c6d108d155e8285604051600160a060020a03909116815260200160405180910390a3506000908152602081905260409020805473ffffffffffffffffffffffffffffffffffffffff1916600160a060020a03929092169190911790555050565b600082815260208190526040902054829033600160a060020a039081169116146102b457600080fd5b827f1d4f9bbfc9cab89d66e1a1562f2233ccbf1308cb4f63de2ead5787adddb8fa688360405167ffffffffffffffff909116815260200160405180910390a250600091825260208290526040909120600101805467ffffffffffffffff90921674010000000000000000000000000000000000000000027fffffffff0000000000000000ffffffffffffffffffffffffffffffffffffffff909216919091179055565b60009081526020819052604090206001015474010000000000000000000000000000000000000000900467ffffffffffffffff1690565b600082815260208190526040902054829033600160a060020a039081169116146103b757600080fd5b827f335721b01866dc23fbee8b6b2c7b1e14d6f05c28cd35a2c934239f94095602a083604051600160a060020a03909116815260200160405180910390a250600091825260208290526040909120600101805473ffffffffffffffffffffffffffffffffffffffff1916600160a060020a03909216919091179055565b600082815260208190526040902054829033600160a060020a0390811691161461045d57600080fd5b827fd4735d920b0f87494915f556dd9b54c8f309026070caea5c737245152564d26683604051600160a060020a03909116815260200160405180910390a250600091825260208290526040909120805473ffffffffffffffffffffffffffffffffffffffff1916600160a060020a039092169190911790555600a165627a7a7230582087c335a130f7bd19015451f7e1dc0e44cdeb5b64393f51a105ee00160711fcff0029`
+const ENSBin = `0x6060604052341561000f57600080fd5b60008080526020527fad3228b676f7d3cd4284a5443f17f1962b36e491b30a40b2405849e597ba5fb58054600160a060020a033316600160a060020a0319909116179055610503806100626000396000f3006060604052600436106100825763ffffffff7c01000000000000000000000000000000000000000000000000000000006000350416630178b8bf811461008757806302571be3146100b957806306ab5923146100cf57806314ab9038146100f657806316a25cbd146101195780631896f70a1461014c5780635b0fc9c31461016e575b600080fd5b341561009257600080fd5b61009d600435610190565b604051600160a060020a03909116815260200160405180910390f35b34156100c457600080fd5b61009d6004356101ae565b34156100da57600080fd5b6100f4600435602435600160a060020a03604435166101c9565b005b341561010157600080fd5b6100f460043567ffffffffffffffff6024351661028b565b341561012457600080fd5b61012f600435610357565b60405167ffffffffffffffff909116815260200160405180910390f35b341561015757600080fd5b6100f4600435600160a060020a036024351661038e565b341561017957600080fd5b6100f4600435600160a060020a0360243516610434565b600090815260208190526040902060010154600160a060020a031690565b600090815260208190526040902054600160a060020a031690565b600083815260208190526040812054849033600160a060020a039081169116146101f257600080fd5b8484604051918252602082015260409081019051908190039020915083857fce0457fe73731f824cc272376169235128c118b49d344817417c6d108d155e8285604051600160a060020a03909116815260200160405180910390a3506000908152602081905260409020805473ffffffffffffffffffffffffffffffffffffffff1916600160a060020a03929092169190911790555050565b600082815260208190526040902054829033600160a060020a039081169116146102b457600080fd5b827f1d4f9bbfc9cab89d66e1a1562f2233ccbf1308cb4f63de2ead5787adddb8fa688360405167ffffffffffffffff909116815260200160405180910390a250600091825260208290526040909120600101805467ffffffffffffffff90921674010000000000000000000000000000000000000000027fffffffff0000000000000000ffffffffffffffffffffffffffffffffffffffff909216919091179055565b60009081526020819052604090206001015474010000000000000000000000000000000000000000900467ffffffffffffffff1690565b600082815260208190526040902054829033600160a060020a039081169116146103b757600080fd5b827f335721b01866dc23fbee8b6b2c7b1e14d6f05c28cd35a2c934239f94095602a083604051600160a060020a03909116815260200160405180910390a250600091825260208290526040909120600101805473ffffffffffffffffffffffffffffffffffffffff1916600160a060020a03909216919091179055565b600082815260208190526040902054829033600160a060020a0390811691161461045d57600080fd5b827fd4735d920b0f87494915f556dd9b54c8f309026070caea5c737245152564d26683604051600160a060020a03909116815260200160405180910390a250600091825260208290526040909120805473ffffffffffffffffffffffffffffffffffffffff1916600160a060020a039092169190911790555600a165627a7a72305820f4c798d4c84c9912f389f64631e85e8d16c3e6644f8c2e1579936015c7d5f6660029`
// DeployENS deploys a new Ethereum contract, binding an instance of ENS to it.
func DeployENS(auth *bind.TransactOpts, backend bind.ContractBackend) (common.Address, *types.Transaction, *ENS, error) {
@@ -28,13 +30,14 @@ func DeployENS(auth *bind.TransactOpts, backend bind.ContractBackend) (common.Ad
if err != nil {
return common.Address{}, nil, nil, err
}
- return address, tx, &ENS{ENSCaller: ENSCaller{contract: contract}, ENSTransactor: ENSTransactor{contract: contract}}, nil
+ return address, tx, &ENS{ENSCaller: ENSCaller{contract: contract}, ENSTransactor: ENSTransactor{contract: contract}, ENSFilterer: ENSFilterer{contract: contract}}, nil
}
// ENS is an auto generated Go binding around an Ethereum contract.
type ENS struct {
ENSCaller // Read-only binding to the contract
ENSTransactor // Write-only binding to the contract
+ ENSFilterer // Log filterer for contract events
}
// ENSCaller is an auto generated read-only Go binding around an Ethereum contract.
@@ -47,6 +50,11 @@ type ENSTransactor struct {
contract *bind.BoundContract // Generic contract wrapper for the low level calls
}
+// ENSFilterer is an auto generated log filtering Go binding around an Ethereum contract events.
+type ENSFilterer struct {
+ contract *bind.BoundContract // Generic contract wrapper for the low level calls
+}
+
// ENSSession is an auto generated Go binding around an Ethereum contract,
// with pre-set call and transact options.
type ENSSession struct {
@@ -86,16 +94,16 @@ type ENSTransactorRaw struct {
// NewENS creates a new instance of ENS, bound to a specific deployed contract.
func NewENS(address common.Address, backend bind.ContractBackend) (*ENS, error) {
- contract, err := bindENS(address, backend, backend)
+ contract, err := bindENS(address, backend, backend, backend)
if err != nil {
return nil, err
}
- return &ENS{ENSCaller: ENSCaller{contract: contract}, ENSTransactor: ENSTransactor{contract: contract}}, nil
+ return &ENS{ENSCaller: ENSCaller{contract: contract}, ENSTransactor: ENSTransactor{contract: contract}, ENSFilterer: ENSFilterer{contract: contract}}, nil
}
// NewENSCaller creates a new read-only instance of ENS, bound to a specific deployed contract.
func NewENSCaller(address common.Address, caller bind.ContractCaller) (*ENSCaller, error) {
- contract, err := bindENS(address, caller, nil)
+ contract, err := bindENS(address, caller, nil, nil)
if err != nil {
return nil, err
}
@@ -104,20 +112,29 @@ func NewENSCaller(address common.Address, caller bind.ContractCaller) (*ENSCalle
// NewENSTransactor creates a new write-only instance of ENS, bound to a specific deployed contract.
func NewENSTransactor(address common.Address, transactor bind.ContractTransactor) (*ENSTransactor, error) {
- contract, err := bindENS(address, nil, transactor)
+ contract, err := bindENS(address, nil, transactor, nil)
if err != nil {
return nil, err
}
return &ENSTransactor{contract: contract}, nil
}
+// NewENSFilterer creates a new log filterer instance of ENS, bound to a specific deployed contract.
+func NewENSFilterer(address common.Address, filterer bind.ContractFilterer) (*ENSFilterer, error) {
+ contract, err := bindENS(address, nil, nil, filterer)
+ if err != nil {
+ return nil, err
+ }
+ return &ENSFilterer{contract: contract}, nil
+}
+
// bindENS binds a generic wrapper to an already deployed contract.
-func bindENS(address common.Address, caller bind.ContractCaller, transactor bind.ContractTransactor) (*bind.BoundContract, error) {
+func bindENS(address common.Address, caller bind.ContractCaller, transactor bind.ContractTransactor, filterer bind.ContractFilterer) (*bind.BoundContract, error) {
parsed, err := abi.JSON(strings.NewReader(ENSABI))
if err != nil {
return nil, err
}
- return bind.NewBoundContract(address, parsed, caller, transactor), nil
+ return bind.NewBoundContract(address, parsed, caller, transactor, filterer), nil
}
// Call invokes the (constant) contract method with params as input values and
@@ -319,3 +336,544 @@ func (_ENS *ENSSession) SetTTL(node [32]byte, ttl uint64) (*types.Transaction, e
func (_ENS *ENSTransactorSession) SetTTL(node [32]byte, ttl uint64) (*types.Transaction, error) {
return _ENS.Contract.SetTTL(&_ENS.TransactOpts, node, ttl)
}
+
+// ENSNewOwnerIterator is returned from FilterNewOwner and is used to iterate over the raw logs and unpacked data for NewOwner events raised by the ENS contract.
+type ENSNewOwnerIterator struct {
+ Event *ENSNewOwner // Event containing the contract specifics and raw log
+
+ contract *bind.BoundContract // Generic contract to use for unpacking event data
+ event string // Event name to use for unpacking event data
+
+ logs chan types.Log // Log channel receiving the found contract events
+ sub ethereum.Subscription // Subscription for errors, completion and termination
+ done bool // Whether the subscription completed delivering logs
+ fail error // Occurred error to stop iteration
+}
+
+// Next advances the iterator to the subsequent event, returning whether there
+// are any more events found. In case of a retrieval or parsing error, false is
+// returned and Error() can be queried for the exact failure.
+func (it *ENSNewOwnerIterator) Next() bool {
+ // If the iterator failed, stop iterating
+ if it.fail != nil {
+ return false
+ }
+ // If the iterator completed, deliver directly whatever's available
+ if it.done {
+ select {
+ case log := <-it.logs:
+ it.Event = new(ENSNewOwner)
+ if err := it.contract.UnpackLog(it.Event, it.event, log); err != nil {
+ it.fail = err
+ return false
+ }
+ it.Event.Raw = log
+ return true
+
+ default:
+ return false
+ }
+ }
+ // Iterator still in progress, wait for either a data or an error event
+ select {
+ case log := <-it.logs:
+ it.Event = new(ENSNewOwner)
+ if err := it.contract.UnpackLog(it.Event, it.event, log); err != nil {
+ it.fail = err
+ return false
+ }
+ it.Event.Raw = log
+ return true
+
+ case err := <-it.sub.Err():
+ it.done = true
+ it.fail = err
+ return it.Next()
+ }
+}
+
+// Error retruned any retrieval or parsing error occurred during filtering.
+func (it *ENSNewOwnerIterator) Error() error {
+ return it.fail
+}
+
+// Close terminates the iteration process, releasing any pending underlying
+// resources.
+func (it *ENSNewOwnerIterator) Close() error {
+ it.sub.Unsubscribe()
+ return nil
+}
+
+// ENSNewOwner represents a NewOwner event raised by the ENS contract.
+type ENSNewOwner struct {
+ Node [32]byte
+ Label [32]byte
+ Owner common.Address
+ Raw types.Log // Blockchain specific contextual infos
+}
+
+// FilterNewOwner is a free log retrieval operation binding the contract event 0xce0457fe73731f824cc272376169235128c118b49d344817417c6d108d155e82.
+//
+// Solidity: event NewOwner(node indexed bytes32, label indexed bytes32, owner address)
+func (_ENS *ENSFilterer) FilterNewOwner(opts *bind.FilterOpts, node [][32]byte, label [][32]byte) (*ENSNewOwnerIterator, error) {
+
+ var nodeRule []interface{}
+ for _, nodeItem := range node {
+ nodeRule = append(nodeRule, nodeItem)
+ }
+ var labelRule []interface{}
+ for _, labelItem := range label {
+ labelRule = append(labelRule, labelItem)
+ }
+
+ logs, sub, err := _ENS.contract.FilterLogs(opts, "NewOwner", nodeRule, labelRule)
+ if err != nil {
+ return nil, err
+ }
+ return &ENSNewOwnerIterator{contract: _ENS.contract, event: "NewOwner", logs: logs, sub: sub}, nil
+}
+
+// WatchNewOwner is a free log subscription operation binding the contract event 0xce0457fe73731f824cc272376169235128c118b49d344817417c6d108d155e82.
+//
+// Solidity: event NewOwner(node indexed bytes32, label indexed bytes32, owner address)
+func (_ENS *ENSFilterer) WatchNewOwner(opts *bind.WatchOpts, sink chan<- *ENSNewOwner, node [][32]byte, label [][32]byte) (event.Subscription, error) {
+
+ var nodeRule []interface{}
+ for _, nodeItem := range node {
+ nodeRule = append(nodeRule, nodeItem)
+ }
+ var labelRule []interface{}
+ for _, labelItem := range label {
+ labelRule = append(labelRule, labelItem)
+ }
+
+ logs, sub, err := _ENS.contract.WatchLogs(opts, "NewOwner", nodeRule, labelRule)
+ if err != nil {
+ return nil, err
+ }
+ return event.NewSubscription(func(quit <-chan struct{}) error {
+ defer sub.Unsubscribe()
+ for {
+ select {
+ case log := <-logs:
+ // New log arrived, parse the event and forward to the user
+ event := new(ENSNewOwner)
+ if err := _ENS.contract.UnpackLog(event, "NewOwner", log); err != nil {
+ return err
+ }
+ event.Raw = log
+
+ select {
+ case sink <- event:
+ case err := <-sub.Err():
+ return err
+ case <-quit:
+ return nil
+ }
+ case err := <-sub.Err():
+ return err
+ case <-quit:
+ return nil
+ }
+ }
+ }), nil
+}
+
+// ENSNewResolverIterator is returned from FilterNewResolver and is used to iterate over the raw logs and unpacked data for NewResolver events raised by the ENS contract.
+type ENSNewResolverIterator struct {
+ Event *ENSNewResolver // Event containing the contract specifics and raw log
+
+ contract *bind.BoundContract // Generic contract to use for unpacking event data
+ event string // Event name to use for unpacking event data
+
+ logs chan types.Log // Log channel receiving the found contract events
+ sub ethereum.Subscription // Subscription for errors, completion and termination
+ done bool // Whether the subscription completed delivering logs
+ fail error // Occurred error to stop iteration
+}
+
+// Next advances the iterator to the subsequent event, returning whether there
+// are any more events found. In case of a retrieval or parsing error, false is
+// returned and Error() can be queried for the exact failure.
+func (it *ENSNewResolverIterator) Next() bool {
+ // If the iterator failed, stop iterating
+ if it.fail != nil {
+ return false
+ }
+ // If the iterator completed, deliver directly whatever's available
+ if it.done {
+ select {
+ case log := <-it.logs:
+ it.Event = new(ENSNewResolver)
+ if err := it.contract.UnpackLog(it.Event, it.event, log); err != nil {
+ it.fail = err
+ return false
+ }
+ it.Event.Raw = log
+ return true
+
+ default:
+ return false
+ }
+ }
+ // Iterator still in progress, wait for either a data or an error event
+ select {
+ case log := <-it.logs:
+ it.Event = new(ENSNewResolver)
+ if err := it.contract.UnpackLog(it.Event, it.event, log); err != nil {
+ it.fail = err
+ return false
+ }
+ it.Event.Raw = log
+ return true
+
+ case err := <-it.sub.Err():
+ it.done = true
+ it.fail = err
+ return it.Next()
+ }
+}
+
+// Error retruned any retrieval or parsing error occurred during filtering.
+func (it *ENSNewResolverIterator) Error() error {
+ return it.fail
+}
+
+// Close terminates the iteration process, releasing any pending underlying
+// resources.
+func (it *ENSNewResolverIterator) Close() error {
+ it.sub.Unsubscribe()
+ return nil
+}
+
+// ENSNewResolver represents a NewResolver event raised by the ENS contract.
+type ENSNewResolver struct {
+ Node [32]byte
+ Resolver common.Address
+ Raw types.Log // Blockchain specific contextual infos
+}
+
+// FilterNewResolver is a free log retrieval operation binding the contract event 0x335721b01866dc23fbee8b6b2c7b1e14d6f05c28cd35a2c934239f94095602a0.
+//
+// Solidity: event NewResolver(node indexed bytes32, resolver address)
+func (_ENS *ENSFilterer) FilterNewResolver(opts *bind.FilterOpts, node [][32]byte) (*ENSNewResolverIterator, error) {
+
+ var nodeRule []interface{}
+ for _, nodeItem := range node {
+ nodeRule = append(nodeRule, nodeItem)
+ }
+
+ logs, sub, err := _ENS.contract.FilterLogs(opts, "NewResolver", nodeRule)
+ if err != nil {
+ return nil, err
+ }
+ return &ENSNewResolverIterator{contract: _ENS.contract, event: "NewResolver", logs: logs, sub: sub}, nil
+}
+
+// WatchNewResolver is a free log subscription operation binding the contract event 0x335721b01866dc23fbee8b6b2c7b1e14d6f05c28cd35a2c934239f94095602a0.
+//
+// Solidity: event NewResolver(node indexed bytes32, resolver address)
+func (_ENS *ENSFilterer) WatchNewResolver(opts *bind.WatchOpts, sink chan<- *ENSNewResolver, node [][32]byte) (event.Subscription, error) {
+
+ var nodeRule []interface{}
+ for _, nodeItem := range node {
+ nodeRule = append(nodeRule, nodeItem)
+ }
+
+ logs, sub, err := _ENS.contract.WatchLogs(opts, "NewResolver", nodeRule)
+ if err != nil {
+ return nil, err
+ }
+ return event.NewSubscription(func(quit <-chan struct{}) error {
+ defer sub.Unsubscribe()
+ for {
+ select {
+ case log := <-logs:
+ // New log arrived, parse the event and forward to the user
+ event := new(ENSNewResolver)
+ if err := _ENS.contract.UnpackLog(event, "NewResolver", log); err != nil {
+ return err
+ }
+ event.Raw = log
+
+ select {
+ case sink <- event:
+ case err := <-sub.Err():
+ return err
+ case <-quit:
+ return nil
+ }
+ case err := <-sub.Err():
+ return err
+ case <-quit:
+ return nil
+ }
+ }
+ }), nil
+}
+
+// ENSNewTTLIterator is returned from FilterNewTTL and is used to iterate over the raw logs and unpacked data for NewTTL events raised by the ENS contract.
+type ENSNewTTLIterator struct {
+ Event *ENSNewTTL // Event containing the contract specifics and raw log
+
+ contract *bind.BoundContract // Generic contract to use for unpacking event data
+ event string // Event name to use for unpacking event data
+
+ logs chan types.Log // Log channel receiving the found contract events
+ sub ethereum.Subscription // Subscription for errors, completion and termination
+ done bool // Whether the subscription completed delivering logs
+ fail error // Occurred error to stop iteration
+}
+
+// Next advances the iterator to the subsequent event, returning whether there
+// are any more events found. In case of a retrieval or parsing error, false is
+// returned and Error() can be queried for the exact failure.
+func (it *ENSNewTTLIterator) Next() bool {
+ // If the iterator failed, stop iterating
+ if it.fail != nil {
+ return false
+ }
+ // If the iterator completed, deliver directly whatever's available
+ if it.done {
+ select {
+ case log := <-it.logs:
+ it.Event = new(ENSNewTTL)
+ if err := it.contract.UnpackLog(it.Event, it.event, log); err != nil {
+ it.fail = err
+ return false
+ }
+ it.Event.Raw = log
+ return true
+
+ default:
+ return false
+ }
+ }
+ // Iterator still in progress, wait for either a data or an error event
+ select {
+ case log := <-it.logs:
+ it.Event = new(ENSNewTTL)
+ if err := it.contract.UnpackLog(it.Event, it.event, log); err != nil {
+ it.fail = err
+ return false
+ }
+ it.Event.Raw = log
+ return true
+
+ case err := <-it.sub.Err():
+ it.done = true
+ it.fail = err
+ return it.Next()
+ }
+}
+
+// Error retruned any retrieval or parsing error occurred during filtering.
+func (it *ENSNewTTLIterator) Error() error {
+ return it.fail
+}
+
+// Close terminates the iteration process, releasing any pending underlying
+// resources.
+func (it *ENSNewTTLIterator) Close() error {
+ it.sub.Unsubscribe()
+ return nil
+}
+
+// ENSNewTTL represents a NewTTL event raised by the ENS contract.
+type ENSNewTTL struct {
+ Node [32]byte
+ Ttl uint64
+ Raw types.Log // Blockchain specific contextual infos
+}
+
+// FilterNewTTL is a free log retrieval operation binding the contract event 0x1d4f9bbfc9cab89d66e1a1562f2233ccbf1308cb4f63de2ead5787adddb8fa68.
+//
+// Solidity: event NewTTL(node indexed bytes32, ttl uint64)
+func (_ENS *ENSFilterer) FilterNewTTL(opts *bind.FilterOpts, node [][32]byte) (*ENSNewTTLIterator, error) {
+
+ var nodeRule []interface{}
+ for _, nodeItem := range node {
+ nodeRule = append(nodeRule, nodeItem)
+ }
+
+ logs, sub, err := _ENS.contract.FilterLogs(opts, "NewTTL", nodeRule)
+ if err != nil {
+ return nil, err
+ }
+ return &ENSNewTTLIterator{contract: _ENS.contract, event: "NewTTL", logs: logs, sub: sub}, nil
+}
+
+// WatchNewTTL is a free log subscription operation binding the contract event 0x1d4f9bbfc9cab89d66e1a1562f2233ccbf1308cb4f63de2ead5787adddb8fa68.
+//
+// Solidity: event NewTTL(node indexed bytes32, ttl uint64)
+func (_ENS *ENSFilterer) WatchNewTTL(opts *bind.WatchOpts, sink chan<- *ENSNewTTL, node [][32]byte) (event.Subscription, error) {
+
+ var nodeRule []interface{}
+ for _, nodeItem := range node {
+ nodeRule = append(nodeRule, nodeItem)
+ }
+
+ logs, sub, err := _ENS.contract.WatchLogs(opts, "NewTTL", nodeRule)
+ if err != nil {
+ return nil, err
+ }
+ return event.NewSubscription(func(quit <-chan struct{}) error {
+ defer sub.Unsubscribe()
+ for {
+ select {
+ case log := <-logs:
+ // New log arrived, parse the event and forward to the user
+ event := new(ENSNewTTL)
+ if err := _ENS.contract.UnpackLog(event, "NewTTL", log); err != nil {
+ return err
+ }
+ event.Raw = log
+
+ select {
+ case sink <- event:
+ case err := <-sub.Err():
+ return err
+ case <-quit:
+ return nil
+ }
+ case err := <-sub.Err():
+ return err
+ case <-quit:
+ return nil
+ }
+ }
+ }), nil
+}
+
+// ENSTransferIterator is returned from FilterTransfer and is used to iterate over the raw logs and unpacked data for Transfer events raised by the ENS contract.
+type ENSTransferIterator struct {
+ Event *ENSTransfer // Event containing the contract specifics and raw log
+
+ contract *bind.BoundContract // Generic contract to use for unpacking event data
+ event string // Event name to use for unpacking event data
+
+ logs chan types.Log // Log channel receiving the found contract events
+ sub ethereum.Subscription // Subscription for errors, completion and termination
+ done bool // Whether the subscription completed delivering logs
+ fail error // Occurred error to stop iteration
+}
+
+// Next advances the iterator to the subsequent event, returning whether there
+// are any more events found. In case of a retrieval or parsing error, false is
+// returned and Error() can be queried for the exact failure.
+func (it *ENSTransferIterator) Next() bool {
+ // If the iterator failed, stop iterating
+ if it.fail != nil {
+ return false
+ }
+ // If the iterator completed, deliver directly whatever's available
+ if it.done {
+ select {
+ case log := <-it.logs:
+ it.Event = new(ENSTransfer)
+ if err := it.contract.UnpackLog(it.Event, it.event, log); err != nil {
+ it.fail = err
+ return false
+ }
+ it.Event.Raw = log
+ return true
+
+ default:
+ return false
+ }
+ }
+ // Iterator still in progress, wait for either a data or an error event
+ select {
+ case log := <-it.logs:
+ it.Event = new(ENSTransfer)
+ if err := it.contract.UnpackLog(it.Event, it.event, log); err != nil {
+ it.fail = err
+ return false
+ }
+ it.Event.Raw = log
+ return true
+
+ case err := <-it.sub.Err():
+ it.done = true
+ it.fail = err
+ return it.Next()
+ }
+}
+
+// Error retruned any retrieval or parsing error occurred during filtering.
+func (it *ENSTransferIterator) Error() error {
+ return it.fail
+}
+
+// Close terminates the iteration process, releasing any pending underlying
+// resources.
+func (it *ENSTransferIterator) Close() error {
+ it.sub.Unsubscribe()
+ return nil
+}
+
+// ENSTransfer represents a Transfer event raised by the ENS contract.
+type ENSTransfer struct {
+ Node [32]byte
+ Owner common.Address
+ Raw types.Log // Blockchain specific contextual infos
+}
+
+// FilterTransfer is a free log retrieval operation binding the contract event 0xd4735d920b0f87494915f556dd9b54c8f309026070caea5c737245152564d266.
+//
+// Solidity: event Transfer(node indexed bytes32, owner address)
+func (_ENS *ENSFilterer) FilterTransfer(opts *bind.FilterOpts, node [][32]byte) (*ENSTransferIterator, error) {
+
+ var nodeRule []interface{}
+ for _, nodeItem := range node {
+ nodeRule = append(nodeRule, nodeItem)
+ }
+
+ logs, sub, err := _ENS.contract.FilterLogs(opts, "Transfer", nodeRule)
+ if err != nil {
+ return nil, err
+ }
+ return &ENSTransferIterator{contract: _ENS.contract, event: "Transfer", logs: logs, sub: sub}, nil
+}
+
+// WatchTransfer is a free log subscription operation binding the contract event 0xd4735d920b0f87494915f556dd9b54c8f309026070caea5c737245152564d266.
+//
+// Solidity: event Transfer(node indexed bytes32, owner address)
+func (_ENS *ENSFilterer) WatchTransfer(opts *bind.WatchOpts, sink chan<- *ENSTransfer, node [][32]byte) (event.Subscription, error) {
+
+ var nodeRule []interface{}
+ for _, nodeItem := range node {
+ nodeRule = append(nodeRule, nodeItem)
+ }
+
+ logs, sub, err := _ENS.contract.WatchLogs(opts, "Transfer", nodeRule)
+ if err != nil {
+ return nil, err
+ }
+ return event.NewSubscription(func(quit <-chan struct{}) error {
+ defer sub.Unsubscribe()
+ for {
+ select {
+ case log := <-logs:
+ // New log arrived, parse the event and forward to the user
+ event := new(ENSTransfer)
+ if err := _ENS.contract.UnpackLog(event, "Transfer", log); err != nil {
+ return err
+ }
+ event.Raw = log
+
+ select {
+ case sink <- event:
+ case err := <-sub.Err():
+ return err
+ case <-quit:
+ return nil
+ }
+ case err := <-sub.Err():
+ return err
+ case <-quit:
+ return nil
+ }
+ }
+ }), nil
+}
diff --git a/contracts/ens/contract/fifsregistrar.go b/contracts/ens/contract/fifsregistrar.go
index fdc9b9c1b..a08380adf 100644
--- a/contracts/ens/contract/fifsregistrar.go
+++ b/contracts/ens/contract/fifsregistrar.go
@@ -16,7 +16,7 @@ import (
const FIFSRegistrarABI = "[{\"constant\":false,\"inputs\":[{\"name\":\"subnode\",\"type\":\"bytes32\"},{\"name\":\"owner\",\"type\":\"address\"}],\"name\":\"register\",\"outputs\":[],\"payable\":false,\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[{\"name\":\"ensAddr\",\"type\":\"address\"},{\"name\":\"node\",\"type\":\"bytes32\"}],\"payable\":false,\"stateMutability\":\"nonpayable\",\"type\":\"constructor\"}]"
// FIFSRegistrarBin is the compiled bytecode used for deploying new contracts.
-const FIFSRegistrarBin = `0x6060604052341561000f57600080fd5b604051604080610224833981016040528080519190602001805160008054600160a060020a03909516600160a060020a03199095169490941790935550506001556101c58061005f6000396000f3006060604052600436106100275763ffffffff60e060020a600035041663d22057a9811461002c575b600080fd5b341561003757600080fd5b61004e600435600160a060020a0360243516610050565b005b816000806001548360405191825260208201526040908101905190819003902060008054919350600160a060020a03909116906302571be39084906040516020015260405160e060020a63ffffffff84160281526004810191909152602401602060405180830381600087803b15156100c857600080fd5b6102c65a03f115156100d957600080fd5b5050506040518051915050600160a060020a0381161580159061010e575033600160a060020a031681600160a060020a031614155b1561011857600080fd5b600054600154600160a060020a03909116906306ab592390878760405160e060020a63ffffffff861602815260048101939093526024830191909152600160a060020a03166044820152606401600060405180830381600087803b151561017e57600080fd5b6102c65a03f1151561018f57600080fd5b50505050505050505600a165627a7a723058209b0c0f4ed76e4fe49a71d4b838ab3d00d6bad29021172db7ced9f36abcafbf510029`
+const FIFSRegistrarBin = `0x6060604052341561000f57600080fd5b604051604080610224833981016040528080519190602001805160008054600160a060020a03909516600160a060020a03199095169490941790935550506001556101c58061005f6000396000f3006060604052600436106100275763ffffffff60e060020a600035041663d22057a9811461002c575b600080fd5b341561003757600080fd5b61004e600435600160a060020a0360243516610050565b005b816000806001548360405191825260208201526040908101905190819003902060008054919350600160a060020a03909116906302571be39084906040516020015260405160e060020a63ffffffff84160281526004810191909152602401602060405180830381600087803b15156100c857600080fd5b6102c65a03f115156100d957600080fd5b5050506040518051915050600160a060020a0381161580159061010e575033600160a060020a031681600160a060020a031614155b1561011857600080fd5b600054600154600160a060020a03909116906306ab592390878760405160e060020a63ffffffff861602815260048101939093526024830191909152600160a060020a03166044820152606401600060405180830381600087803b151561017e57600080fd5b6102c65a03f1151561018f57600080fd5b50505050505050505600a165627a7a723058206fb963cb168d5e3a51af12cd6bb23e324dbd32dd4954f43653ba27e66b68ea650029`
// DeployFIFSRegistrar deploys a new Ethereum contract, binding an instance of FIFSRegistrar to it.
func DeployFIFSRegistrar(auth *bind.TransactOpts, backend bind.ContractBackend, ensAddr common.Address, node [32]byte) (common.Address, *types.Transaction, *FIFSRegistrar, error) {
@@ -28,13 +28,14 @@ func DeployFIFSRegistrar(auth *bind.TransactOpts, backend bind.ContractBackend,
if err != nil {
return common.Address{}, nil, nil, err
}
- return address, tx, &FIFSRegistrar{FIFSRegistrarCaller: FIFSRegistrarCaller{contract: contract}, FIFSRegistrarTransactor: FIFSRegistrarTransactor{contract: contract}}, nil
+ return address, tx, &FIFSRegistrar{FIFSRegistrarCaller: FIFSRegistrarCaller{contract: contract}, FIFSRegistrarTransactor: FIFSRegistrarTransactor{contract: contract}, FIFSRegistrarFilterer: FIFSRegistrarFilterer{contract: contract}}, nil
}
// FIFSRegistrar is an auto generated Go binding around an Ethereum contract.
type FIFSRegistrar struct {
FIFSRegistrarCaller // Read-only binding to the contract
FIFSRegistrarTransactor // Write-only binding to the contract
+ FIFSRegistrarFilterer // Log filterer for contract events
}
// FIFSRegistrarCaller is an auto generated read-only Go binding around an Ethereum contract.
@@ -47,6 +48,11 @@ type FIFSRegistrarTransactor struct {
contract *bind.BoundContract // Generic contract wrapper for the low level calls
}
+// FIFSRegistrarFilterer is an auto generated log filtering Go binding around an Ethereum contract events.
+type FIFSRegistrarFilterer struct {
+ contract *bind.BoundContract // Generic contract wrapper for the low level calls
+}
+
// FIFSRegistrarSession is an auto generated Go binding around an Ethereum contract,
// with pre-set call and transact options.
type FIFSRegistrarSession struct {
@@ -86,16 +92,16 @@ type FIFSRegistrarTransactorRaw struct {
// NewFIFSRegistrar creates a new instance of FIFSRegistrar, bound to a specific deployed contract.
func NewFIFSRegistrar(address common.Address, backend bind.ContractBackend) (*FIFSRegistrar, error) {
- contract, err := bindFIFSRegistrar(address, backend, backend)
+ contract, err := bindFIFSRegistrar(address, backend, backend, backend)
if err != nil {
return nil, err
}
- return &FIFSRegistrar{FIFSRegistrarCaller: FIFSRegistrarCaller{contract: contract}, FIFSRegistrarTransactor: FIFSRegistrarTransactor{contract: contract}}, nil
+ return &FIFSRegistrar{FIFSRegistrarCaller: FIFSRegistrarCaller{contract: contract}, FIFSRegistrarTransactor: FIFSRegistrarTransactor{contract: contract}, FIFSRegistrarFilterer: FIFSRegistrarFilterer{contract: contract}}, nil
}
// NewFIFSRegistrarCaller creates a new read-only instance of FIFSRegistrar, bound to a specific deployed contract.
func NewFIFSRegistrarCaller(address common.Address, caller bind.ContractCaller) (*FIFSRegistrarCaller, error) {
- contract, err := bindFIFSRegistrar(address, caller, nil)
+ contract, err := bindFIFSRegistrar(address, caller, nil, nil)
if err != nil {
return nil, err
}
@@ -104,20 +110,29 @@ func NewFIFSRegistrarCaller(address common.Address, caller bind.ContractCaller)
// NewFIFSRegistrarTransactor creates a new write-only instance of FIFSRegistrar, bound to a specific deployed contract.
func NewFIFSRegistrarTransactor(address common.Address, transactor bind.ContractTransactor) (*FIFSRegistrarTransactor, error) {
- contract, err := bindFIFSRegistrar(address, nil, transactor)
+ contract, err := bindFIFSRegistrar(address, nil, transactor, nil)
if err != nil {
return nil, err
}
return &FIFSRegistrarTransactor{contract: contract}, nil
}
+// NewFIFSRegistrarFilterer creates a new log filterer instance of FIFSRegistrar, bound to a specific deployed contract.
+func NewFIFSRegistrarFilterer(address common.Address, filterer bind.ContractFilterer) (*FIFSRegistrarFilterer, error) {
+ contract, err := bindFIFSRegistrar(address, nil, nil, filterer)
+ if err != nil {
+ return nil, err
+ }
+ return &FIFSRegistrarFilterer{contract: contract}, nil
+}
+
// bindFIFSRegistrar binds a generic wrapper to an already deployed contract.
-func bindFIFSRegistrar(address common.Address, caller bind.ContractCaller, transactor bind.ContractTransactor) (*bind.BoundContract, error) {
+func bindFIFSRegistrar(address common.Address, caller bind.ContractCaller, transactor bind.ContractTransactor, filterer bind.ContractFilterer) (*bind.BoundContract, error) {
parsed, err := abi.JSON(strings.NewReader(FIFSRegistrarABI))
if err != nil {
return nil, err
}
- return bind.NewBoundContract(address, parsed, caller, transactor), nil
+ return bind.NewBoundContract(address, parsed, caller, transactor, filterer), nil
}
// Call invokes the (constant) contract method with params as input values and
diff --git a/contracts/ens/contract/publicresolver.go b/contracts/ens/contract/publicresolver.go
index 72e5c5582..c567d5884 100644
--- a/contracts/ens/contract/publicresolver.go
+++ b/contracts/ens/contract/publicresolver.go
@@ -7,17 +7,19 @@ import (
"math/big"
"strings"
+ ethereum "github.com/ethereum/go-ethereum"
"github.com/ethereum/go-ethereum/accounts/abi"
"github.com/ethereum/go-ethereum/accounts/abi/bind"
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/core/types"
+ "github.com/ethereum/go-ethereum/event"
)
// PublicResolverABI is the input ABI used to generate the binding from.
const PublicResolverABI = "[{\"constant\":true,\"inputs\":[{\"name\":\"interfaceID\",\"type\":\"bytes4\"}],\"name\":\"supportsInterface\",\"outputs\":[{\"name\":\"\",\"type\":\"bool\"}],\"payable\":false,\"stateMutability\":\"view\",\"type\":\"function\"},{\"constant\":false,\"inputs\":[{\"name\":\"node\",\"type\":\"bytes32\"},{\"name\":\"key\",\"type\":\"string\"},{\"name\":\"value\",\"type\":\"string\"}],\"name\":\"setText\",\"outputs\":[],\"payable\":false,\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"constant\":true,\"inputs\":[{\"name\":\"node\",\"type\":\"bytes32\"},{\"name\":\"contentTypes\",\"type\":\"uint256\"}],\"name\":\"ABI\",\"outputs\":[{\"name\":\"contentType\",\"type\":\"uint256\"},{\"name\":\"data\",\"type\":\"bytes\"}],\"payable\":false,\"stateMutability\":\"view\",\"type\":\"function\"},{\"constant\":false,\"inputs\":[{\"name\":\"node\",\"type\":\"bytes32\"},{\"name\":\"x\",\"type\":\"bytes32\"},{\"name\":\"y\",\"type\":\"bytes32\"}],\"name\":\"setPubkey\",\"outputs\":[],\"payable\":false,\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"constant\":true,\"inputs\":[{\"name\":\"node\",\"type\":\"bytes32\"}],\"name\":\"content\",\"outputs\":[{\"name\":\"ret\",\"type\":\"bytes32\"}],\"payable\":false,\"stateMutability\":\"view\",\"type\":\"function\"},{\"constant\":true,\"inputs\":[{\"name\":\"node\",\"type\":\"bytes32\"}],\"name\":\"addr\",\"outputs\":[{\"name\":\"ret\",\"type\":\"address\"}],\"payable\":false,\"stateMutability\":\"view\",\"type\":\"function\"},{\"constant\":true,\"inputs\":[{\"name\":\"node\",\"type\":\"bytes32\"},{\"name\":\"key\",\"type\":\"string\"}],\"name\":\"text\",\"outputs\":[{\"name\":\"ret\",\"type\":\"string\"}],\"payable\":false,\"stateMutability\":\"view\",\"type\":\"function\"},{\"constant\":false,\"inputs\":[{\"name\":\"node\",\"type\":\"bytes32\"},{\"name\":\"contentType\",\"type\":\"uint256\"},{\"name\":\"data\",\"type\":\"bytes\"}],\"name\":\"setABI\",\"outputs\":[],\"payable\":false,\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"constant\":true,\"inputs\":[{\"name\":\"node\",\"type\":\"bytes32\"}],\"name\":\"name\",\"outputs\":[{\"name\":\"ret\",\"type\":\"string\"}],\"payable\":false,\"stateMutability\":\"view\",\"type\":\"function\"},{\"constant\":false,\"inputs\":[{\"name\":\"node\",\"type\":\"bytes32\"},{\"name\":\"name\",\"type\":\"string\"}],\"name\":\"setName\",\"outputs\":[],\"payable\":false,\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"constant\":false,\"inputs\":[{\"name\":\"node\",\"type\":\"bytes32\"},{\"name\":\"hash\",\"type\":\"bytes32\"}],\"name\":\"setContent\",\"outputs\":[],\"payable\":false,\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"constant\":true,\"inputs\":[{\"name\":\"node\",\"type\":\"bytes32\"}],\"name\":\"pubkey\",\"outputs\":[{\"name\":\"x\",\"type\":\"bytes32\"},{\"name\":\"y\",\"type\":\"bytes32\"}],\"payable\":false,\"stateMutability\":\"view\",\"type\":\"function\"},{\"constant\":false,\"inputs\":[{\"name\":\"node\",\"type\":\"bytes32\"},{\"name\":\"addr\",\"type\":\"address\"}],\"name\":\"setAddr\",\"outputs\":[],\"payable\":false,\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[{\"name\":\"ensAddr\",\"type\":\"address\"}],\"payable\":false,\"stateMutability\":\"nonpayable\",\"type\":\"constructor\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":true,\"name\":\"node\",\"type\":\"bytes32\"},{\"indexed\":false,\"name\":\"a\",\"type\":\"address\"}],\"name\":\"AddrChanged\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":true,\"name\":\"node\",\"type\":\"bytes32\"},{\"indexed\":false,\"name\":\"hash\",\"type\":\"bytes32\"}],\"name\":\"ContentChanged\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":true,\"name\":\"node\",\"type\":\"bytes32\"},{\"indexed\":false,\"name\":\"name\",\"type\":\"string\"}],\"name\":\"NameChanged\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":true,\"name\":\"node\",\"type\":\"bytes32\"},{\"indexed\":true,\"name\":\"contentType\",\"type\":\"uint256\"}],\"name\":\"ABIChanged\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":true,\"name\":\"node\",\"type\":\"bytes32\"},{\"indexed\":false,\"name\":\"x\",\"type\":\"bytes32\"},{\"indexed\":false,\"name\":\"y\",\"type\":\"bytes32\"}],\"name\":\"PubkeyChanged\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":true,\"name\":\"node\",\"type\":\"bytes32\"},{\"indexed\":true,\"name\":\"indexedKey\",\"type\":\"string\"},{\"indexed\":false,\"name\":\"key\",\"type\":\"string\"}],\"name\":\"TextChanged\",\"type\":\"event\"}]"
// PublicResolverBin is the compiled bytecode used for deploying new contracts.
-const PublicResolverBin = `0x6060604052341561000f57600080fd5b6040516020806111b28339810160405280805160008054600160a060020a03909216600160a060020a0319909216919091179055505061115e806100546000396000f3006060604052600436106100ab5763ffffffff60e060020a60003504166301ffc9a781146100b057806310f13a8c146100e45780632203ab561461017e57806329cd62ea146102155780632dff6941146102315780633b3b57de1461025957806359d1d43c1461028b578063623195b014610358578063691f3431146103b457806377372213146103ca578063c3d014d614610420578063c869023314610439578063d5fa2b0014610467575b600080fd5b34156100bb57600080fd5b6100d0600160e060020a031960043516610489565b604051901515815260200160405180910390f35b34156100ef57600080fd5b61017c600480359060446024803590810190830135806020601f8201819004810201604051908101604052818152929190602084018383808284378201915050505050509190803590602001908201803590602001908080601f0160208091040260200160405190810160405281815292919060208401838380828437509496506105f695505050505050565b005b341561018957600080fd5b610197600435602435610807565b60405182815260406020820181815290820183818151815260200191508051906020019080838360005b838110156101d95780820151838201526020016101c1565b50505050905090810190601f1680156102065780820380516001836020036101000a031916815260200191505b50935050505060405180910390f35b341561022057600080fd5b61017c600435602435604435610931565b341561023c57600080fd5b610247600435610a30565b60405190815260200160405180910390f35b341561026457600080fd5b61026f600435610a46565b604051600160a060020a03909116815260200160405180910390f35b341561029657600080fd5b6102e1600480359060446024803590810190830135806020601f82018190048102016040519081016040528181529291906020840183838082843750949650610a6195505050505050565b60405160208082528190810183818151815260200191508051906020019080838360005b8381101561031d578082015183820152602001610305565b50505050905090810190601f16801561034a5780820380516001836020036101000a031916815260200191505b509250505060405180910390f35b341561036357600080fd5b61017c600480359060248035919060649060443590810190830135806020601f82018190048102016040519081016040528181529291906020840183838082843750949650610b8095505050505050565b34156103bf57600080fd5b6102e1600435610c7c565b34156103d557600080fd5b61017c600480359060446024803590810190830135806020601f82018190048102016040519081016040528181529291906020840183838082843750949650610d4295505050505050565b341561042b57600080fd5b61017c600435602435610e8c565b341561044457600080fd5b61044f600435610f65565b60405191825260208201526040908101905180910390f35b341561047257600080fd5b61017c600435600160a060020a0360243516610f82565b6000600160e060020a031982167f3b3b57de0000000000000000000000000000000000000000000000000000000014806104ec5750600160e060020a031982167fd8389dc500000000000000000000000000000000000000000000000000000000145b806105205750600160e060020a031982167f691f343100000000000000000000000000000000000000000000000000000000145b806105545750600160e060020a031982167f2203ab5600000000000000000000000000000000000000000000000000000000145b806105885750600160e060020a031982167fc869023300000000000000000000000000000000000000000000000000000000145b806105bc5750600160e060020a031982167f59d1d43c00000000000000000000000000000000000000000000000000000000145b806105f05750600160e060020a031982167f01ffc9a700000000000000000000000000000000000000000000000000000000145b92915050565b600080548491600160a060020a033381169216906302571be39084906040516020015260405160e060020a63ffffffff84160281526004810191909152602401602060405180830381600087803b151561064f57600080fd5b6102c65a03f1151561066057600080fd5b50505060405180519050600160a060020a031614151561067f57600080fd5b6000848152600160205260409081902083916005909101908590518082805190602001908083835b602083106106c65780518252601f1990920191602091820191016106a7565b6001836020036101000a038019825116818451168082178552505050505050905001915050908152602001604051809103902090805161070a929160200190611085565b50826040518082805190602001908083835b6020831061073b5780518252601f19909201916020918201910161071c565b6001836020036101000a0380198251168184511617909252505050919091019250604091505051908190039020847fd8c9334b1a9c2f9da342a0a2b32629c1a229b6445dad78947f674b44444a75508560405160208082528190810183818151815260200191508051906020019080838360005b838110156107c75780820151838201526020016107af565b50505050905090810190601f1680156107f45780820380516001836020036101000a031916815260200191505b509250505060405180910390a350505050565b6000610811611103565b60008481526001602081905260409091209092505b838311610924578284161580159061085f5750600083815260068201602052604081205460026000196101006001841615020190911604115b15610919578060060160008481526020019081526020016000208054600181600116156101000203166002900480601f01602080910402602001604051908101604052809291908181526020018280546001816001161561010002031660029004801561090d5780601f106108e25761010080835404028352916020019161090d565b820191906000526020600020905b8154815290600101906020018083116108f057829003601f168201915b50505050509150610929565b600290920291610826565b600092505b509250929050565b600080548491600160a060020a033381169216906302571be39084906040516020015260405160e060020a63ffffffff84160281526004810191909152602401602060405180830381600087803b151561098a57600080fd5b6102c65a03f1151561099b57600080fd5b50505060405180519050600160a060020a03161415156109ba57600080fd5b6040805190810160409081528482526020808301859052600087815260019091522060030181518155602082015160019091015550837f1d6f5e03d3f63eb58751986629a5439baee5079ff04f345becb66e23eb154e46848460405191825260208201526040908101905180910390a250505050565b6000908152600160208190526040909120015490565b600090815260016020526040902054600160a060020a031690565b610a69611103565b60008381526001602052604090819020600501908390518082805190602001908083835b60208310610aac5780518252601f199092019160209182019101610a8d565b6001836020036101000a03801982511681845116808217855250505050505090500191505090815260200160405180910390208054600181600116156101000203166002900480601f016020809104026020016040519081016040528092919081815260200182805460018160011615610100020316600290048015610b735780601f10610b4857610100808354040283529160200191610b73565b820191906000526020600020905b815481529060010190602001808311610b5657829003601f168201915b5050505050905092915050565b600080548491600160a060020a033381169216906302571be39084906040516020015260405160e060020a63ffffffff84160281526004810191909152602401602060405180830381600087803b1515610bd957600080fd5b6102c65a03f11515610bea57600080fd5b50505060405180519050600160a060020a0316141515610c0957600080fd5b6000198301831615610c1a57600080fd5b60008481526001602090815260408083208684526006019091529020828051610c47929160200190611085565b5082847faa121bbeef5f32f5961a2a28966e769023910fc9479059ee3495d4c1a696efe360405160405180910390a350505050565b610c84611103565b6001600083600019166000191681526020019081526020016000206002018054600181600116156101000203166002900480601f016020809104026020016040519081016040528092919081815260200182805460018160011615610100020316600290048015610d365780601f10610d0b57610100808354040283529160200191610d36565b820191906000526020600020905b815481529060010190602001808311610d1957829003601f168201915b50505050509050919050565b600080548391600160a060020a033381169216906302571be39084906040516020015260405160e060020a63ffffffff84160281526004810191909152602401602060405180830381600087803b1515610d9b57600080fd5b6102c65a03f11515610dac57600080fd5b50505060405180519050600160a060020a0316141515610dcb57600080fd5b6000838152600160205260409020600201828051610ded929160200190611085565b50827fb7d29e911041e8d9b843369e890bcb72c9388692ba48b65ac54e7214c4c348f78360405160208082528190810183818151815260200191508051906020019080838360005b83811015610e4d578082015183820152602001610e35565b50505050905090810190601f168015610e7a5780820380516001836020036101000a031916815260200191505b509250505060405180910390a2505050565b600080548391600160a060020a033381169216906302571be39084906040516020015260405160e060020a63ffffffff84160281526004810191909152602401602060405180830381600087803b1515610ee557600080fd5b6102c65a03f11515610ef657600080fd5b50505060405180519050600160a060020a0316141515610f1557600080fd5b6000838152600160208190526040918290200183905583907f0424b6fe0d9c3bdbece0e7879dc241bb0c22e900be8b6c168b4ee08bd9bf83bc9084905190815260200160405180910390a2505050565b600090815260016020526040902060038101546004909101549091565b600080548391600160a060020a033381169216906302571be39084906040516020015260405160e060020a63ffffffff84160281526004810191909152602401602060405180830381600087803b1515610fdb57600080fd5b6102c65a03f11515610fec57600080fd5b50505060405180519050600160a060020a031614151561100b57600080fd5b60008381526001602052604090819020805473ffffffffffffffffffffffffffffffffffffffff1916600160a060020a03851617905583907f52d7d861f09ab3d26239d492e8968629f95e9e318cf0b73bfddc441522a15fd290849051600160a060020a03909116815260200160405180910390a2505050565b828054600181600116156101000203166002900490600052602060002090601f016020900481019282601f106110c657805160ff19168380011785556110f3565b828001600101855582156110f3579182015b828111156110f35782518255916020019190600101906110d8565b506110ff929150611115565b5090565b60206040519081016040526000815290565b61112f91905b808211156110ff576000815560010161111b565b905600a165627a7a72305820691c9aa3f737ab0ca25e23bc35cc10d4b93067d8a1fc5c9266b66365e32ed85a0029`
+const PublicResolverBin = `0x6060604052341561000f57600080fd5b6040516020806111b28339810160405280805160008054600160a060020a03909216600160a060020a0319909216919091179055505061115e806100546000396000f3006060604052600436106100ab5763ffffffff60e060020a60003504166301ffc9a781146100b057806310f13a8c146100e45780632203ab561461017e57806329cd62ea146102155780632dff6941146102315780633b3b57de1461025957806359d1d43c1461028b578063623195b014610358578063691f3431146103b457806377372213146103ca578063c3d014d614610420578063c869023314610439578063d5fa2b0014610467575b600080fd5b34156100bb57600080fd5b6100d0600160e060020a031960043516610489565b604051901515815260200160405180910390f35b34156100ef57600080fd5b61017c600480359060446024803590810190830135806020601f8201819004810201604051908101604052818152929190602084018383808284378201915050505050509190803590602001908201803590602001908080601f0160208091040260200160405190810160405281815292919060208401838380828437509496506105f695505050505050565b005b341561018957600080fd5b610197600435602435610807565b60405182815260406020820181815290820183818151815260200191508051906020019080838360005b838110156101d95780820151838201526020016101c1565b50505050905090810190601f1680156102065780820380516001836020036101000a031916815260200191505b50935050505060405180910390f35b341561022057600080fd5b61017c600435602435604435610931565b341561023c57600080fd5b610247600435610a30565b60405190815260200160405180910390f35b341561026457600080fd5b61026f600435610a46565b604051600160a060020a03909116815260200160405180910390f35b341561029657600080fd5b6102e1600480359060446024803590810190830135806020601f82018190048102016040519081016040528181529291906020840183838082843750949650610a6195505050505050565b60405160208082528190810183818151815260200191508051906020019080838360005b8381101561031d578082015183820152602001610305565b50505050905090810190601f16801561034a5780820380516001836020036101000a031916815260200191505b509250505060405180910390f35b341561036357600080fd5b61017c600480359060248035919060649060443590810190830135806020601f82018190048102016040519081016040528181529291906020840183838082843750949650610b8095505050505050565b34156103bf57600080fd5b6102e1600435610c7c565b34156103d557600080fd5b61017c600480359060446024803590810190830135806020601f82018190048102016040519081016040528181529291906020840183838082843750949650610d4295505050505050565b341561042b57600080fd5b61017c600435602435610e8c565b341561044457600080fd5b61044f600435610f65565b60405191825260208201526040908101905180910390f35b341561047257600080fd5b61017c600435600160a060020a0360243516610f82565b6000600160e060020a031982167f3b3b57de0000000000000000000000000000000000000000000000000000000014806104ec5750600160e060020a031982167fd8389dc500000000000000000000000000000000000000000000000000000000145b806105205750600160e060020a031982167f691f343100000000000000000000000000000000000000000000000000000000145b806105545750600160e060020a031982167f2203ab5600000000000000000000000000000000000000000000000000000000145b806105885750600160e060020a031982167fc869023300000000000000000000000000000000000000000000000000000000145b806105bc5750600160e060020a031982167f59d1d43c00000000000000000000000000000000000000000000000000000000145b806105f05750600160e060020a031982167f01ffc9a700000000000000000000000000000000000000000000000000000000145b92915050565b600080548491600160a060020a033381169216906302571be39084906040516020015260405160e060020a63ffffffff84160281526004810191909152602401602060405180830381600087803b151561064f57600080fd5b6102c65a03f1151561066057600080fd5b50505060405180519050600160a060020a031614151561067f57600080fd5b6000848152600160205260409081902083916005909101908590518082805190602001908083835b602083106106c65780518252601f1990920191602091820191016106a7565b6001836020036101000a038019825116818451168082178552505050505050905001915050908152602001604051809103902090805161070a929160200190611085565b50826040518082805190602001908083835b6020831061073b5780518252601f19909201916020918201910161071c565b6001836020036101000a0380198251168184511617909252505050919091019250604091505051908190039020847fd8c9334b1a9c2f9da342a0a2b32629c1a229b6445dad78947f674b44444a75508560405160208082528190810183818151815260200191508051906020019080838360005b838110156107c75780820151838201526020016107af565b50505050905090810190601f1680156107f45780820380516001836020036101000a031916815260200191505b509250505060405180910390a350505050565b6000610811611103565b60008481526001602081905260409091209092505b838311610924578284161580159061085f5750600083815260068201602052604081205460026000196101006001841615020190911604115b15610919578060060160008481526020019081526020016000208054600181600116156101000203166002900480601f01602080910402602001604051908101604052809291908181526020018280546001816001161561010002031660029004801561090d5780601f106108e25761010080835404028352916020019161090d565b820191906000526020600020905b8154815290600101906020018083116108f057829003601f168201915b50505050509150610929565b600290920291610826565b600092505b509250929050565b600080548491600160a060020a033381169216906302571be39084906040516020015260405160e060020a63ffffffff84160281526004810191909152602401602060405180830381600087803b151561098a57600080fd5b6102c65a03f1151561099b57600080fd5b50505060405180519050600160a060020a03161415156109ba57600080fd5b6040805190810160409081528482526020808301859052600087815260019091522060030181518155602082015160019091015550837f1d6f5e03d3f63eb58751986629a5439baee5079ff04f345becb66e23eb154e46848460405191825260208201526040908101905180910390a250505050565b6000908152600160208190526040909120015490565b600090815260016020526040902054600160a060020a031690565b610a69611103565b60008381526001602052604090819020600501908390518082805190602001908083835b60208310610aac5780518252601f199092019160209182019101610a8d565b6001836020036101000a03801982511681845116808217855250505050505090500191505090815260200160405180910390208054600181600116156101000203166002900480601f016020809104026020016040519081016040528092919081815260200182805460018160011615610100020316600290048015610b735780601f10610b4857610100808354040283529160200191610b73565b820191906000526020600020905b815481529060010190602001808311610b5657829003601f168201915b5050505050905092915050565b600080548491600160a060020a033381169216906302571be39084906040516020015260405160e060020a63ffffffff84160281526004810191909152602401602060405180830381600087803b1515610bd957600080fd5b6102c65a03f11515610bea57600080fd5b50505060405180519050600160a060020a0316141515610c0957600080fd5b6000198301831615610c1a57600080fd5b60008481526001602090815260408083208684526006019091529020828051610c47929160200190611085565b5082847faa121bbeef5f32f5961a2a28966e769023910fc9479059ee3495d4c1a696efe360405160405180910390a350505050565b610c84611103565b6001600083600019166000191681526020019081526020016000206002018054600181600116156101000203166002900480601f016020809104026020016040519081016040528092919081815260200182805460018160011615610100020316600290048015610d365780601f10610d0b57610100808354040283529160200191610d36565b820191906000526020600020905b815481529060010190602001808311610d1957829003601f168201915b50505050509050919050565b600080548391600160a060020a033381169216906302571be39084906040516020015260405160e060020a63ffffffff84160281526004810191909152602401602060405180830381600087803b1515610d9b57600080fd5b6102c65a03f11515610dac57600080fd5b50505060405180519050600160a060020a0316141515610dcb57600080fd5b6000838152600160205260409020600201828051610ded929160200190611085565b50827fb7d29e911041e8d9b843369e890bcb72c9388692ba48b65ac54e7214c4c348f78360405160208082528190810183818151815260200191508051906020019080838360005b83811015610e4d578082015183820152602001610e35565b50505050905090810190601f168015610e7a5780820380516001836020036101000a031916815260200191505b509250505060405180910390a2505050565b600080548391600160a060020a033381169216906302571be39084906040516020015260405160e060020a63ffffffff84160281526004810191909152602401602060405180830381600087803b1515610ee557600080fd5b6102c65a03f11515610ef657600080fd5b50505060405180519050600160a060020a0316141515610f1557600080fd5b6000838152600160208190526040918290200183905583907f0424b6fe0d9c3bdbece0e7879dc241bb0c22e900be8b6c168b4ee08bd9bf83bc9084905190815260200160405180910390a2505050565b600090815260016020526040902060038101546004909101549091565b600080548391600160a060020a033381169216906302571be39084906040516020015260405160e060020a63ffffffff84160281526004810191909152602401602060405180830381600087803b1515610fdb57600080fd5b6102c65a03f11515610fec57600080fd5b50505060405180519050600160a060020a031614151561100b57600080fd5b60008381526001602052604090819020805473ffffffffffffffffffffffffffffffffffffffff1916600160a060020a03851617905583907f52d7d861f09ab3d26239d492e8968629f95e9e318cf0b73bfddc441522a15fd290849051600160a060020a03909116815260200160405180910390a2505050565b828054600181600116156101000203166002900490600052602060002090601f016020900481019282601f106110c657805160ff19168380011785556110f3565b828001600101855582156110f3579182015b828111156110f35782518255916020019190600101906110d8565b506110ff929150611115565b5090565b60206040519081016040526000815290565b61112f91905b808211156110ff576000815560010161111b565b905600a165627a7a723058201ecacbc445b9fbcd91b0ab164389f69d7283b856883bc7437eeed1008345a4920029`
// DeployPublicResolver deploys a new Ethereum contract, binding an instance of PublicResolver to it.
func DeployPublicResolver(auth *bind.TransactOpts, backend bind.ContractBackend, ensAddr common.Address) (common.Address, *types.Transaction, *PublicResolver, error) {
@@ -29,13 +31,14 @@ func DeployPublicResolver(auth *bind.TransactOpts, backend bind.ContractBackend,
if err != nil {
return common.Address{}, nil, nil, err
}
- return address, tx, &PublicResolver{PublicResolverCaller: PublicResolverCaller{contract: contract}, PublicResolverTransactor: PublicResolverTransactor{contract: contract}}, nil
+ return address, tx, &PublicResolver{PublicResolverCaller: PublicResolverCaller{contract: contract}, PublicResolverTransactor: PublicResolverTransactor{contract: contract}, PublicResolverFilterer: PublicResolverFilterer{contract: contract}}, nil
}
// PublicResolver is an auto generated Go binding around an Ethereum contract.
type PublicResolver struct {
PublicResolverCaller // Read-only binding to the contract
PublicResolverTransactor // Write-only binding to the contract
+ PublicResolverFilterer // Log filterer for contract events
}
// PublicResolverCaller is an auto generated read-only Go binding around an Ethereum contract.
@@ -48,6 +51,11 @@ type PublicResolverTransactor struct {
contract *bind.BoundContract // Generic contract wrapper for the low level calls
}
+// PublicResolverFilterer is an auto generated log filtering Go binding around an Ethereum contract events.
+type PublicResolverFilterer struct {
+ contract *bind.BoundContract // Generic contract wrapper for the low level calls
+}
+
// PublicResolverSession is an auto generated Go binding around an Ethereum contract,
// with pre-set call and transact options.
type PublicResolverSession struct {
@@ -87,16 +95,16 @@ type PublicResolverTransactorRaw struct {
// NewPublicResolver creates a new instance of PublicResolver, bound to a specific deployed contract.
func NewPublicResolver(address common.Address, backend bind.ContractBackend) (*PublicResolver, error) {
- contract, err := bindPublicResolver(address, backend, backend)
+ contract, err := bindPublicResolver(address, backend, backend, backend)
if err != nil {
return nil, err
}
- return &PublicResolver{PublicResolverCaller: PublicResolverCaller{contract: contract}, PublicResolverTransactor: PublicResolverTransactor{contract: contract}}, nil
+ return &PublicResolver{PublicResolverCaller: PublicResolverCaller{contract: contract}, PublicResolverTransactor: PublicResolverTransactor{contract: contract}, PublicResolverFilterer: PublicResolverFilterer{contract: contract}}, nil
}
// NewPublicResolverCaller creates a new read-only instance of PublicResolver, bound to a specific deployed contract.
func NewPublicResolverCaller(address common.Address, caller bind.ContractCaller) (*PublicResolverCaller, error) {
- contract, err := bindPublicResolver(address, caller, nil)
+ contract, err := bindPublicResolver(address, caller, nil, nil)
if err != nil {
return nil, err
}
@@ -105,20 +113,29 @@ func NewPublicResolverCaller(address common.Address, caller bind.ContractCaller)
// NewPublicResolverTransactor creates a new write-only instance of PublicResolver, bound to a specific deployed contract.
func NewPublicResolverTransactor(address common.Address, transactor bind.ContractTransactor) (*PublicResolverTransactor, error) {
- contract, err := bindPublicResolver(address, nil, transactor)
+ contract, err := bindPublicResolver(address, nil, transactor, nil)
if err != nil {
return nil, err
}
return &PublicResolverTransactor{contract: contract}, nil
}
+// NewPublicResolverFilterer creates a new log filterer instance of PublicResolver, bound to a specific deployed contract.
+func NewPublicResolverFilterer(address common.Address, filterer bind.ContractFilterer) (*PublicResolverFilterer, error) {
+ contract, err := bindPublicResolver(address, nil, nil, filterer)
+ if err != nil {
+ return nil, err
+ }
+ return &PublicResolverFilterer{contract: contract}, nil
+}
+
// bindPublicResolver binds a generic wrapper to an already deployed contract.
-func bindPublicResolver(address common.Address, caller bind.ContractCaller, transactor bind.ContractTransactor) (*bind.BoundContract, error) {
+func bindPublicResolver(address common.Address, caller bind.ContractCaller, transactor bind.ContractTransactor, filterer bind.ContractFilterer) (*bind.BoundContract, error) {
parsed, err := abi.JSON(strings.NewReader(PublicResolverABI))
if err != nil {
return nil, err
}
- return bind.NewBoundContract(address, parsed, caller, transactor), nil
+ return bind.NewBoundContract(address, parsed, caller, transactor, filterer), nil
}
// Call invokes the (constant) contract method with params as input values and
@@ -486,3 +503,819 @@ func (_PublicResolver *PublicResolverSession) SetText(node [32]byte, key string,
func (_PublicResolver *PublicResolverTransactorSession) SetText(node [32]byte, key string, value string) (*types.Transaction, error) {
return _PublicResolver.Contract.SetText(&_PublicResolver.TransactOpts, node, key, value)
}
+
+// PublicResolverABIChangedIterator is returned from FilterABIChanged and is used to iterate over the raw logs and unpacked data for ABIChanged events raised by the PublicResolver contract.
+type PublicResolverABIChangedIterator struct {
+ Event *PublicResolverABIChanged // Event containing the contract specifics and raw log
+
+ contract *bind.BoundContract // Generic contract to use for unpacking event data
+ event string // Event name to use for unpacking event data
+
+ logs chan types.Log // Log channel receiving the found contract events
+ sub ethereum.Subscription // Subscription for errors, completion and termination
+ done bool // Whether the subscription completed delivering logs
+ fail error // Occurred error to stop iteration
+}
+
+// Next advances the iterator to the subsequent event, returning whether there
+// are any more events found. In case of a retrieval or parsing error, false is
+// returned and Error() can be queried for the exact failure.
+func (it *PublicResolverABIChangedIterator) Next() bool {
+ // If the iterator failed, stop iterating
+ if it.fail != nil {
+ return false
+ }
+ // If the iterator completed, deliver directly whatever's available
+ if it.done {
+ select {
+ case log := <-it.logs:
+ it.Event = new(PublicResolverABIChanged)
+ if err := it.contract.UnpackLog(it.Event, it.event, log); err != nil {
+ it.fail = err
+ return false
+ }
+ it.Event.Raw = log
+ return true
+
+ default:
+ return false
+ }
+ }
+ // Iterator still in progress, wait for either a data or an error event
+ select {
+ case log := <-it.logs:
+ it.Event = new(PublicResolverABIChanged)
+ if err := it.contract.UnpackLog(it.Event, it.event, log); err != nil {
+ it.fail = err
+ return false
+ }
+ it.Event.Raw = log
+ return true
+
+ case err := <-it.sub.Err():
+ it.done = true
+ it.fail = err
+ return it.Next()
+ }
+}
+
+// Error retruned any retrieval or parsing error occurred during filtering.
+func (it *PublicResolverABIChangedIterator) Error() error {
+ return it.fail
+}
+
+// Close terminates the iteration process, releasing any pending underlying
+// resources.
+func (it *PublicResolverABIChangedIterator) Close() error {
+ it.sub.Unsubscribe()
+ return nil
+}
+
+// PublicResolverABIChanged represents a ABIChanged event raised by the PublicResolver contract.
+type PublicResolverABIChanged struct {
+ Node [32]byte
+ ContentType *big.Int
+ Raw types.Log // Blockchain specific contextual infos
+}
+
+// FilterABIChanged is a free log retrieval operation binding the contract event 0xaa121bbeef5f32f5961a2a28966e769023910fc9479059ee3495d4c1a696efe3.
+//
+// Solidity: event ABIChanged(node indexed bytes32, contentType indexed uint256)
+func (_PublicResolver *PublicResolverFilterer) FilterABIChanged(opts *bind.FilterOpts, node [][32]byte, contentType []*big.Int) (*PublicResolverABIChangedIterator, error) {
+
+ var nodeRule []interface{}
+ for _, nodeItem := range node {
+ nodeRule = append(nodeRule, nodeItem)
+ }
+ var contentTypeRule []interface{}
+ for _, contentTypeItem := range contentType {
+ contentTypeRule = append(contentTypeRule, contentTypeItem)
+ }
+
+ logs, sub, err := _PublicResolver.contract.FilterLogs(opts, "ABIChanged", nodeRule, contentTypeRule)
+ if err != nil {
+ return nil, err
+ }
+ return &PublicResolverABIChangedIterator{contract: _PublicResolver.contract, event: "ABIChanged", logs: logs, sub: sub}, nil
+}
+
+// WatchABIChanged is a free log subscription operation binding the contract event 0xaa121bbeef5f32f5961a2a28966e769023910fc9479059ee3495d4c1a696efe3.
+//
+// Solidity: event ABIChanged(node indexed bytes32, contentType indexed uint256)
+func (_PublicResolver *PublicResolverFilterer) WatchABIChanged(opts *bind.WatchOpts, sink chan<- *PublicResolverABIChanged, node [][32]byte, contentType []*big.Int) (event.Subscription, error) {
+
+ var nodeRule []interface{}
+ for _, nodeItem := range node {
+ nodeRule = append(nodeRule, nodeItem)
+ }
+ var contentTypeRule []interface{}
+ for _, contentTypeItem := range contentType {
+ contentTypeRule = append(contentTypeRule, contentTypeItem)
+ }
+
+ logs, sub, err := _PublicResolver.contract.WatchLogs(opts, "ABIChanged", nodeRule, contentTypeRule)
+ if err != nil {
+ return nil, err
+ }
+ return event.NewSubscription(func(quit <-chan struct{}) error {
+ defer sub.Unsubscribe()
+ for {
+ select {
+ case log := <-logs:
+ // New log arrived, parse the event and forward to the user
+ event := new(PublicResolverABIChanged)
+ if err := _PublicResolver.contract.UnpackLog(event, "ABIChanged", log); err != nil {
+ return err
+ }
+ event.Raw = log
+
+ select {
+ case sink <- event:
+ case err := <-sub.Err():
+ return err
+ case <-quit:
+ return nil
+ }
+ case err := <-sub.Err():
+ return err
+ case <-quit:
+ return nil
+ }
+ }
+ }), nil
+}
+
+// PublicResolverAddrChangedIterator is returned from FilterAddrChanged and is used to iterate over the raw logs and unpacked data for AddrChanged events raised by the PublicResolver contract.
+type PublicResolverAddrChangedIterator struct {
+ Event *PublicResolverAddrChanged // Event containing the contract specifics and raw log
+
+ contract *bind.BoundContract // Generic contract to use for unpacking event data
+ event string // Event name to use for unpacking event data
+
+ logs chan types.Log // Log channel receiving the found contract events
+ sub ethereum.Subscription // Subscription for errors, completion and termination
+ done bool // Whether the subscription completed delivering logs
+ fail error // Occurred error to stop iteration
+}
+
+// Next advances the iterator to the subsequent event, returning whether there
+// are any more events found. In case of a retrieval or parsing error, false is
+// returned and Error() can be queried for the exact failure.
+func (it *PublicResolverAddrChangedIterator) Next() bool {
+ // If the iterator failed, stop iterating
+ if it.fail != nil {
+ return false
+ }
+ // If the iterator completed, deliver directly whatever's available
+ if it.done {
+ select {
+ case log := <-it.logs:
+ it.Event = new(PublicResolverAddrChanged)
+ if err := it.contract.UnpackLog(it.Event, it.event, log); err != nil {
+ it.fail = err
+ return false
+ }
+ it.Event.Raw = log
+ return true
+
+ default:
+ return false
+ }
+ }
+ // Iterator still in progress, wait for either a data or an error event
+ select {
+ case log := <-it.logs:
+ it.Event = new(PublicResolverAddrChanged)
+ if err := it.contract.UnpackLog(it.Event, it.event, log); err != nil {
+ it.fail = err
+ return false
+ }
+ it.Event.Raw = log
+ return true
+
+ case err := <-it.sub.Err():
+ it.done = true
+ it.fail = err
+ return it.Next()
+ }
+}
+
+// Error retruned any retrieval or parsing error occurred during filtering.
+func (it *PublicResolverAddrChangedIterator) Error() error {
+ return it.fail
+}
+
+// Close terminates the iteration process, releasing any pending underlying
+// resources.
+func (it *PublicResolverAddrChangedIterator) Close() error {
+ it.sub.Unsubscribe()
+ return nil
+}
+
+// PublicResolverAddrChanged represents a AddrChanged event raised by the PublicResolver contract.
+type PublicResolverAddrChanged struct {
+ Node [32]byte
+ A common.Address
+ Raw types.Log // Blockchain specific contextual infos
+}
+
+// FilterAddrChanged is a free log retrieval operation binding the contract event 0x52d7d861f09ab3d26239d492e8968629f95e9e318cf0b73bfddc441522a15fd2.
+//
+// Solidity: event AddrChanged(node indexed bytes32, a address)
+func (_PublicResolver *PublicResolverFilterer) FilterAddrChanged(opts *bind.FilterOpts, node [][32]byte) (*PublicResolverAddrChangedIterator, error) {
+
+ var nodeRule []interface{}
+ for _, nodeItem := range node {
+ nodeRule = append(nodeRule, nodeItem)
+ }
+
+ logs, sub, err := _PublicResolver.contract.FilterLogs(opts, "AddrChanged", nodeRule)
+ if err != nil {
+ return nil, err
+ }
+ return &PublicResolverAddrChangedIterator{contract: _PublicResolver.contract, event: "AddrChanged", logs: logs, sub: sub}, nil
+}
+
+// WatchAddrChanged is a free log subscription operation binding the contract event 0x52d7d861f09ab3d26239d492e8968629f95e9e318cf0b73bfddc441522a15fd2.
+//
+// Solidity: event AddrChanged(node indexed bytes32, a address)
+func (_PublicResolver *PublicResolverFilterer) WatchAddrChanged(opts *bind.WatchOpts, sink chan<- *PublicResolverAddrChanged, node [][32]byte) (event.Subscription, error) {
+
+ var nodeRule []interface{}
+ for _, nodeItem := range node {
+ nodeRule = append(nodeRule, nodeItem)
+ }
+
+ logs, sub, err := _PublicResolver.contract.WatchLogs(opts, "AddrChanged", nodeRule)
+ if err != nil {
+ return nil, err
+ }
+ return event.NewSubscription(func(quit <-chan struct{}) error {
+ defer sub.Unsubscribe()
+ for {
+ select {
+ case log := <-logs:
+ // New log arrived, parse the event and forward to the user
+ event := new(PublicResolverAddrChanged)
+ if err := _PublicResolver.contract.UnpackLog(event, "AddrChanged", log); err != nil {
+ return err
+ }
+ event.Raw = log
+
+ select {
+ case sink <- event:
+ case err := <-sub.Err():
+ return err
+ case <-quit:
+ return nil
+ }
+ case err := <-sub.Err():
+ return err
+ case <-quit:
+ return nil
+ }
+ }
+ }), nil
+}
+
+// PublicResolverContentChangedIterator is returned from FilterContentChanged and is used to iterate over the raw logs and unpacked data for ContentChanged events raised by the PublicResolver contract.
+type PublicResolverContentChangedIterator struct {
+ Event *PublicResolverContentChanged // Event containing the contract specifics and raw log
+
+ contract *bind.BoundContract // Generic contract to use for unpacking event data
+ event string // Event name to use for unpacking event data
+
+ logs chan types.Log // Log channel receiving the found contract events
+ sub ethereum.Subscription // Subscription for errors, completion and termination
+ done bool // Whether the subscription completed delivering logs
+ fail error // Occurred error to stop iteration
+}
+
+// Next advances the iterator to the subsequent event, returning whether there
+// are any more events found. In case of a retrieval or parsing error, false is
+// returned and Error() can be queried for the exact failure.
+func (it *PublicResolverContentChangedIterator) Next() bool {
+ // If the iterator failed, stop iterating
+ if it.fail != nil {
+ return false
+ }
+ // If the iterator completed, deliver directly whatever's available
+ if it.done {
+ select {
+ case log := <-it.logs:
+ it.Event = new(PublicResolverContentChanged)
+ if err := it.contract.UnpackLog(it.Event, it.event, log); err != nil {
+ it.fail = err
+ return false
+ }
+ it.Event.Raw = log
+ return true
+
+ default:
+ return false
+ }
+ }
+ // Iterator still in progress, wait for either a data or an error event
+ select {
+ case log := <-it.logs:
+ it.Event = new(PublicResolverContentChanged)
+ if err := it.contract.UnpackLog(it.Event, it.event, log); err != nil {
+ it.fail = err
+ return false
+ }
+ it.Event.Raw = log
+ return true
+
+ case err := <-it.sub.Err():
+ it.done = true
+ it.fail = err
+ return it.Next()
+ }
+}
+
+// Error retruned any retrieval or parsing error occurred during filtering.
+func (it *PublicResolverContentChangedIterator) Error() error {
+ return it.fail
+}
+
+// Close terminates the iteration process, releasing any pending underlying
+// resources.
+func (it *PublicResolverContentChangedIterator) Close() error {
+ it.sub.Unsubscribe()
+ return nil
+}
+
+// PublicResolverContentChanged represents a ContentChanged event raised by the PublicResolver contract.
+type PublicResolverContentChanged struct {
+ Node [32]byte
+ Hash [32]byte
+ Raw types.Log // Blockchain specific contextual infos
+}
+
+// FilterContentChanged is a free log retrieval operation binding the contract event 0x0424b6fe0d9c3bdbece0e7879dc241bb0c22e900be8b6c168b4ee08bd9bf83bc.
+//
+// Solidity: event ContentChanged(node indexed bytes32, hash bytes32)
+func (_PublicResolver *PublicResolverFilterer) FilterContentChanged(opts *bind.FilterOpts, node [][32]byte) (*PublicResolverContentChangedIterator, error) {
+
+ var nodeRule []interface{}
+ for _, nodeItem := range node {
+ nodeRule = append(nodeRule, nodeItem)
+ }
+
+ logs, sub, err := _PublicResolver.contract.FilterLogs(opts, "ContentChanged", nodeRule)
+ if err != nil {
+ return nil, err
+ }
+ return &PublicResolverContentChangedIterator{contract: _PublicResolver.contract, event: "ContentChanged", logs: logs, sub: sub}, nil
+}
+
+// WatchContentChanged is a free log subscription operation binding the contract event 0x0424b6fe0d9c3bdbece0e7879dc241bb0c22e900be8b6c168b4ee08bd9bf83bc.
+//
+// Solidity: event ContentChanged(node indexed bytes32, hash bytes32)
+func (_PublicResolver *PublicResolverFilterer) WatchContentChanged(opts *bind.WatchOpts, sink chan<- *PublicResolverContentChanged, node [][32]byte) (event.Subscription, error) {
+
+ var nodeRule []interface{}
+ for _, nodeItem := range node {
+ nodeRule = append(nodeRule, nodeItem)
+ }
+
+ logs, sub, err := _PublicResolver.contract.WatchLogs(opts, "ContentChanged", nodeRule)
+ if err != nil {
+ return nil, err
+ }
+ return event.NewSubscription(func(quit <-chan struct{}) error {
+ defer sub.Unsubscribe()
+ for {
+ select {
+ case log := <-logs:
+ // New log arrived, parse the event and forward to the user
+ event := new(PublicResolverContentChanged)
+ if err := _PublicResolver.contract.UnpackLog(event, "ContentChanged", log); err != nil {
+ return err
+ }
+ event.Raw = log
+
+ select {
+ case sink <- event:
+ case err := <-sub.Err():
+ return err
+ case <-quit:
+ return nil
+ }
+ case err := <-sub.Err():
+ return err
+ case <-quit:
+ return nil
+ }
+ }
+ }), nil
+}
+
+// PublicResolverNameChangedIterator is returned from FilterNameChanged and is used to iterate over the raw logs and unpacked data for NameChanged events raised by the PublicResolver contract.
+type PublicResolverNameChangedIterator struct {
+ Event *PublicResolverNameChanged // Event containing the contract specifics and raw log
+
+ contract *bind.BoundContract // Generic contract to use for unpacking event data
+ event string // Event name to use for unpacking event data
+
+ logs chan types.Log // Log channel receiving the found contract events
+ sub ethereum.Subscription // Subscription for errors, completion and termination
+ done bool // Whether the subscription completed delivering logs
+ fail error // Occurred error to stop iteration
+}
+
+// Next advances the iterator to the subsequent event, returning whether there
+// are any more events found. In case of a retrieval or parsing error, false is
+// returned and Error() can be queried for the exact failure.
+func (it *PublicResolverNameChangedIterator) Next() bool {
+ // If the iterator failed, stop iterating
+ if it.fail != nil {
+ return false
+ }
+ // If the iterator completed, deliver directly whatever's available
+ if it.done {
+ select {
+ case log := <-it.logs:
+ it.Event = new(PublicResolverNameChanged)
+ if err := it.contract.UnpackLog(it.Event, it.event, log); err != nil {
+ it.fail = err
+ return false
+ }
+ it.Event.Raw = log
+ return true
+
+ default:
+ return false
+ }
+ }
+ // Iterator still in progress, wait for either a data or an error event
+ select {
+ case log := <-it.logs:
+ it.Event = new(PublicResolverNameChanged)
+ if err := it.contract.UnpackLog(it.Event, it.event, log); err != nil {
+ it.fail = err
+ return false
+ }
+ it.Event.Raw = log
+ return true
+
+ case err := <-it.sub.Err():
+ it.done = true
+ it.fail = err
+ return it.Next()
+ }
+}
+
+// Error retruned any retrieval or parsing error occurred during filtering.
+func (it *PublicResolverNameChangedIterator) Error() error {
+ return it.fail
+}
+
+// Close terminates the iteration process, releasing any pending underlying
+// resources.
+func (it *PublicResolverNameChangedIterator) Close() error {
+ it.sub.Unsubscribe()
+ return nil
+}
+
+// PublicResolverNameChanged represents a NameChanged event raised by the PublicResolver contract.
+type PublicResolverNameChanged struct {
+ Node [32]byte
+ Name string
+ Raw types.Log // Blockchain specific contextual infos
+}
+
+// FilterNameChanged is a free log retrieval operation binding the contract event 0xb7d29e911041e8d9b843369e890bcb72c9388692ba48b65ac54e7214c4c348f7.
+//
+// Solidity: event NameChanged(node indexed bytes32, name string)
+func (_PublicResolver *PublicResolverFilterer) FilterNameChanged(opts *bind.FilterOpts, node [][32]byte) (*PublicResolverNameChangedIterator, error) {
+
+ var nodeRule []interface{}
+ for _, nodeItem := range node {
+ nodeRule = append(nodeRule, nodeItem)
+ }
+
+ logs, sub, err := _PublicResolver.contract.FilterLogs(opts, "NameChanged", nodeRule)
+ if err != nil {
+ return nil, err
+ }
+ return &PublicResolverNameChangedIterator{contract: _PublicResolver.contract, event: "NameChanged", logs: logs, sub: sub}, nil
+}
+
+// WatchNameChanged is a free log subscription operation binding the contract event 0xb7d29e911041e8d9b843369e890bcb72c9388692ba48b65ac54e7214c4c348f7.
+//
+// Solidity: event NameChanged(node indexed bytes32, name string)
+func (_PublicResolver *PublicResolverFilterer) WatchNameChanged(opts *bind.WatchOpts, sink chan<- *PublicResolverNameChanged, node [][32]byte) (event.Subscription, error) {
+
+ var nodeRule []interface{}
+ for _, nodeItem := range node {
+ nodeRule = append(nodeRule, nodeItem)
+ }
+
+ logs, sub, err := _PublicResolver.contract.WatchLogs(opts, "NameChanged", nodeRule)
+ if err != nil {
+ return nil, err
+ }
+ return event.NewSubscription(func(quit <-chan struct{}) error {
+ defer sub.Unsubscribe()
+ for {
+ select {
+ case log := <-logs:
+ // New log arrived, parse the event and forward to the user
+ event := new(PublicResolverNameChanged)
+ if err := _PublicResolver.contract.UnpackLog(event, "NameChanged", log); err != nil {
+ return err
+ }
+ event.Raw = log
+
+ select {
+ case sink <- event:
+ case err := <-sub.Err():
+ return err
+ case <-quit:
+ return nil
+ }
+ case err := <-sub.Err():
+ return err
+ case <-quit:
+ return nil
+ }
+ }
+ }), nil
+}
+
+// PublicResolverPubkeyChangedIterator is returned from FilterPubkeyChanged and is used to iterate over the raw logs and unpacked data for PubkeyChanged events raised by the PublicResolver contract.
+type PublicResolverPubkeyChangedIterator struct {
+ Event *PublicResolverPubkeyChanged // Event containing the contract specifics and raw log
+
+ contract *bind.BoundContract // Generic contract to use for unpacking event data
+ event string // Event name to use for unpacking event data
+
+ logs chan types.Log // Log channel receiving the found contract events
+ sub ethereum.Subscription // Subscription for errors, completion and termination
+ done bool // Whether the subscription completed delivering logs
+ fail error // Occurred error to stop iteration
+}
+
+// Next advances the iterator to the subsequent event, returning whether there
+// are any more events found. In case of a retrieval or parsing error, false is
+// returned and Error() can be queried for the exact failure.
+func (it *PublicResolverPubkeyChangedIterator) Next() bool {
+ // If the iterator failed, stop iterating
+ if it.fail != nil {
+ return false
+ }
+ // If the iterator completed, deliver directly whatever's available
+ if it.done {
+ select {
+ case log := <-it.logs:
+ it.Event = new(PublicResolverPubkeyChanged)
+ if err := it.contract.UnpackLog(it.Event, it.event, log); err != nil {
+ it.fail = err
+ return false
+ }
+ it.Event.Raw = log
+ return true
+
+ default:
+ return false
+ }
+ }
+ // Iterator still in progress, wait for either a data or an error event
+ select {
+ case log := <-it.logs:
+ it.Event = new(PublicResolverPubkeyChanged)
+ if err := it.contract.UnpackLog(it.Event, it.event, log); err != nil {
+ it.fail = err
+ return false
+ }
+ it.Event.Raw = log
+ return true
+
+ case err := <-it.sub.Err():
+ it.done = true
+ it.fail = err
+ return it.Next()
+ }
+}
+
+// Error retruned any retrieval or parsing error occurred during filtering.
+func (it *PublicResolverPubkeyChangedIterator) Error() error {
+ return it.fail
+}
+
+// Close terminates the iteration process, releasing any pending underlying
+// resources.
+func (it *PublicResolverPubkeyChangedIterator) Close() error {
+ it.sub.Unsubscribe()
+ return nil
+}
+
+// PublicResolverPubkeyChanged represents a PubkeyChanged event raised by the PublicResolver contract.
+type PublicResolverPubkeyChanged struct {
+ Node [32]byte
+ X [32]byte
+ Y [32]byte
+ Raw types.Log // Blockchain specific contextual infos
+}
+
+// FilterPubkeyChanged is a free log retrieval operation binding the contract event 0x1d6f5e03d3f63eb58751986629a5439baee5079ff04f345becb66e23eb154e46.
+//
+// Solidity: event PubkeyChanged(node indexed bytes32, x bytes32, y bytes32)
+func (_PublicResolver *PublicResolverFilterer) FilterPubkeyChanged(opts *bind.FilterOpts, node [][32]byte) (*PublicResolverPubkeyChangedIterator, error) {
+
+ var nodeRule []interface{}
+ for _, nodeItem := range node {
+ nodeRule = append(nodeRule, nodeItem)
+ }
+
+ logs, sub, err := _PublicResolver.contract.FilterLogs(opts, "PubkeyChanged", nodeRule)
+ if err != nil {
+ return nil, err
+ }
+ return &PublicResolverPubkeyChangedIterator{contract: _PublicResolver.contract, event: "PubkeyChanged", logs: logs, sub: sub}, nil
+}
+
+// WatchPubkeyChanged is a free log subscription operation binding the contract event 0x1d6f5e03d3f63eb58751986629a5439baee5079ff04f345becb66e23eb154e46.
+//
+// Solidity: event PubkeyChanged(node indexed bytes32, x bytes32, y bytes32)
+func (_PublicResolver *PublicResolverFilterer) WatchPubkeyChanged(opts *bind.WatchOpts, sink chan<- *PublicResolverPubkeyChanged, node [][32]byte) (event.Subscription, error) {
+
+ var nodeRule []interface{}
+ for _, nodeItem := range node {
+ nodeRule = append(nodeRule, nodeItem)
+ }
+
+ logs, sub, err := _PublicResolver.contract.WatchLogs(opts, "PubkeyChanged", nodeRule)
+ if err != nil {
+ return nil, err
+ }
+ return event.NewSubscription(func(quit <-chan struct{}) error {
+ defer sub.Unsubscribe()
+ for {
+ select {
+ case log := <-logs:
+ // New log arrived, parse the event and forward to the user
+ event := new(PublicResolverPubkeyChanged)
+ if err := _PublicResolver.contract.UnpackLog(event, "PubkeyChanged", log); err != nil {
+ return err
+ }
+ event.Raw = log
+
+ select {
+ case sink <- event:
+ case err := <-sub.Err():
+ return err
+ case <-quit:
+ return nil
+ }
+ case err := <-sub.Err():
+ return err
+ case <-quit:
+ return nil
+ }
+ }
+ }), nil
+}
+
+// PublicResolverTextChangedIterator is returned from FilterTextChanged and is used to iterate over the raw logs and unpacked data for TextChanged events raised by the PublicResolver contract.
+type PublicResolverTextChangedIterator struct {
+ Event *PublicResolverTextChanged // Event containing the contract specifics and raw log
+
+ contract *bind.BoundContract // Generic contract to use for unpacking event data
+ event string // Event name to use for unpacking event data
+
+ logs chan types.Log // Log channel receiving the found contract events
+ sub ethereum.Subscription // Subscription for errors, completion and termination
+ done bool // Whether the subscription completed delivering logs
+ fail error // Occurred error to stop iteration
+}
+
+// Next advances the iterator to the subsequent event, returning whether there
+// are any more events found. In case of a retrieval or parsing error, false is
+// returned and Error() can be queried for the exact failure.
+func (it *PublicResolverTextChangedIterator) Next() bool {
+ // If the iterator failed, stop iterating
+ if it.fail != nil {
+ return false
+ }
+ // If the iterator completed, deliver directly whatever's available
+ if it.done {
+ select {
+ case log := <-it.logs:
+ it.Event = new(PublicResolverTextChanged)
+ if err := it.contract.UnpackLog(it.Event, it.event, log); err != nil {
+ it.fail = err
+ return false
+ }
+ it.Event.Raw = log
+ return true
+
+ default:
+ return false
+ }
+ }
+ // Iterator still in progress, wait for either a data or an error event
+ select {
+ case log := <-it.logs:
+ it.Event = new(PublicResolverTextChanged)
+ if err := it.contract.UnpackLog(it.Event, it.event, log); err != nil {
+ it.fail = err
+ return false
+ }
+ it.Event.Raw = log
+ return true
+
+ case err := <-it.sub.Err():
+ it.done = true
+ it.fail = err
+ return it.Next()
+ }
+}
+
+// Error retruned any retrieval or parsing error occurred during filtering.
+func (it *PublicResolverTextChangedIterator) Error() error {
+ return it.fail
+}
+
+// Close terminates the iteration process, releasing any pending underlying
+// resources.
+func (it *PublicResolverTextChangedIterator) Close() error {
+ it.sub.Unsubscribe()
+ return nil
+}
+
+// PublicResolverTextChanged represents a TextChanged event raised by the PublicResolver contract.
+type PublicResolverTextChanged struct {
+ Node [32]byte
+ IndexedKey common.Hash
+ Key string
+ Raw types.Log // Blockchain specific contextual infos
+}
+
+// FilterTextChanged is a free log retrieval operation binding the contract event 0xd8c9334b1a9c2f9da342a0a2b32629c1a229b6445dad78947f674b44444a7550.
+//
+// Solidity: event TextChanged(node indexed bytes32, indexedKey indexed string, key string)
+func (_PublicResolver *PublicResolverFilterer) FilterTextChanged(opts *bind.FilterOpts, node [][32]byte, indexedKey []string) (*PublicResolverTextChangedIterator, error) {
+
+ var nodeRule []interface{}
+ for _, nodeItem := range node {
+ nodeRule = append(nodeRule, nodeItem)
+ }
+ var indexedKeyRule []interface{}
+ for _, indexedKeyItem := range indexedKey {
+ indexedKeyRule = append(indexedKeyRule, indexedKeyItem)
+ }
+
+ logs, sub, err := _PublicResolver.contract.FilterLogs(opts, "TextChanged", nodeRule, indexedKeyRule)
+ if err != nil {
+ return nil, err
+ }
+ return &PublicResolverTextChangedIterator{contract: _PublicResolver.contract, event: "TextChanged", logs: logs, sub: sub}, nil
+}
+
+// WatchTextChanged is a free log subscription operation binding the contract event 0xd8c9334b1a9c2f9da342a0a2b32629c1a229b6445dad78947f674b44444a7550.
+//
+// Solidity: event TextChanged(node indexed bytes32, indexedKey indexed string, key string)
+func (_PublicResolver *PublicResolverFilterer) WatchTextChanged(opts *bind.WatchOpts, sink chan<- *PublicResolverTextChanged, node [][32]byte, indexedKey []string) (event.Subscription, error) {
+
+ var nodeRule []interface{}
+ for _, nodeItem := range node {
+ nodeRule = append(nodeRule, nodeItem)
+ }
+ var indexedKeyRule []interface{}
+ for _, indexedKeyItem := range indexedKey {
+ indexedKeyRule = append(indexedKeyRule, indexedKeyItem)
+ }
+
+ logs, sub, err := _PublicResolver.contract.WatchLogs(opts, "TextChanged", nodeRule, indexedKeyRule)
+ if err != nil {
+ return nil, err
+ }
+ return event.NewSubscription(func(quit <-chan struct{}) error {
+ defer sub.Unsubscribe()
+ for {
+ select {
+ case log := <-logs:
+ // New log arrived, parse the event and forward to the user
+ event := new(PublicResolverTextChanged)
+ if err := _PublicResolver.contract.UnpackLog(event, "TextChanged", log); err != nil {
+ return err
+ }
+ event.Raw = log
+
+ select {
+ case sink <- event:
+ case err := <-sub.Err():
+ return err
+ case <-quit:
+ return nil
+ }
+ case err := <-sub.Err():
+ return err
+ case <-quit:
+ return nil
+ }
+ }
+ }), nil
+}
diff --git a/contracts/release/contract.go b/contracts/release/contract.go
deleted file mode 100644
index 03d7f8875..000000000
--- a/contracts/release/contract.go
+++ /dev/null
@@ -1,432 +0,0 @@
-// Code generated - DO NOT EDIT.
-// This file is a generated binding and any manual changes will be lost.
-
-package release
-
-import (
- "math/big"
- "strings"
-
- "github.com/ethereum/go-ethereum/accounts/abi"
- "github.com/ethereum/go-ethereum/accounts/abi/bind"
- "github.com/ethereum/go-ethereum/common"
- "github.com/ethereum/go-ethereum/core/types"
-)
-
-// ReleaseOracleABI is the input ABI used to generate the binding from.
-const ReleaseOracleABI = "[{\"constant\":true,\"inputs\":[],\"name\":\"proposedVersion\",\"outputs\":[{\"name\":\"major\",\"type\":\"uint32\"},{\"name\":\"minor\",\"type\":\"uint32\"},{\"name\":\"patch\",\"type\":\"uint32\"},{\"name\":\"commit\",\"type\":\"bytes20\"},{\"name\":\"pass\",\"type\":\"address[]\"},{\"name\":\"fail\",\"type\":\"address[]\"}],\"payable\":false,\"stateMutability\":\"view\",\"type\":\"function\"},{\"constant\":true,\"inputs\":[],\"name\":\"signers\",\"outputs\":[{\"name\":\"\",\"type\":\"address[]\"}],\"payable\":false,\"stateMutability\":\"view\",\"type\":\"function\"},{\"constant\":false,\"inputs\":[{\"name\":\"user\",\"type\":\"address\"}],\"name\":\"demote\",\"outputs\":[],\"payable\":false,\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"constant\":true,\"inputs\":[{\"name\":\"user\",\"type\":\"address\"}],\"name\":\"authVotes\",\"outputs\":[{\"name\":\"promote\",\"type\":\"address[]\"},{\"name\":\"demote\",\"type\":\"address[]\"}],\"payable\":false,\"stateMutability\":\"view\",\"type\":\"function\"},{\"constant\":true,\"inputs\":[],\"name\":\"currentVersion\",\"outputs\":[{\"name\":\"major\",\"type\":\"uint32\"},{\"name\":\"minor\",\"type\":\"uint32\"},{\"name\":\"patch\",\"type\":\"uint32\"},{\"name\":\"commit\",\"type\":\"bytes20\"},{\"name\":\"time\",\"type\":\"uint256\"}],\"payable\":false,\"stateMutability\":\"view\",\"type\":\"function\"},{\"constant\":false,\"inputs\":[],\"name\":\"nuke\",\"outputs\":[],\"payable\":false,\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"constant\":true,\"inputs\":[],\"name\":\"authProposals\",\"outputs\":[{\"name\":\"\",\"type\":\"address[]\"}],\"payable\":false,\"stateMutability\":\"view\",\"type\":\"function\"},{\"constant\":false,\"inputs\":[{\"name\":\"user\",\"type\":\"address\"}],\"name\":\"promote\",\"outputs\":[],\"payable\":false,\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"constant\":false,\"inputs\":[{\"name\":\"major\",\"type\":\"uint32\"},{\"name\":\"minor\",\"type\":\"uint32\"},{\"name\":\"patch\",\"type\":\"uint32\"},{\"name\":\"commit\",\"type\":\"bytes20\"}],\"name\":\"release\",\"outputs\":[],\"payable\":false,\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[{\"name\":\"signers\",\"type\":\"address[]\"}],\"payable\":false,\"stateMutability\":\"nonpayable\",\"type\":\"constructor\"}]"
-
-// ReleaseOracleBin is the compiled bytecode used for deploying new contracts.
-const ReleaseOracleBin = `0x606060405234156200001057600080fd5b60405162001395380380620013958339810160405280805190910190506000815115156200009a57600160a060020a0333166000908152602081905260409020805460ff1916600190811790915580548082016200006f838262000157565b5060009182526020909120018054600160a060020a03191633600160a060020a03161790556200014f565b5060005b81518110156200014f576001600080848481518110620000ba57fe5b90602001906020020151600160a060020a031681526020810191909152604001600020805460ff191691151591909117905560018054808201620000ff838262000157565b916000526020600020900160008484815181106200011957fe5b906020019060200201518254600160a060020a039182166101009390930a9283029190920219909116179055506001016200009e565b5050620001a7565b8154818355818115116200017e576000838152602090206200017e91810190830162000183565b505050565b620001a491905b80821115620001a057600081556001016200018a565b5090565b90565b6111de80620001b76000396000f3006060604052600436106100985763ffffffff7c010000000000000000000000000000000000000000000000000000000060003504166326db7648811461009d57806346f0975a1461017e5780635c3d005d146101e457806364ed31fe146102055780639d888e86146102bd578063bc8fbbf814610318578063bf8ecf9c1461032b578063d0e0813a1461033e578063d67cbec91461035d575b600080fd5b34156100a857600080fd5b6100b0610397565b60405163ffffffff80881682528681166020830152851660408201526bffffffffffffffffffffffff198416606082015260c0608082018181529060a0830190830185818151815260200191508051906020019060200280838360005b8381101561012557808201518382015260200161010d565b50505050905001838103825284818151815260200191508051906020019060200280838360005b8381101561016457808201518382015260200161014c565b505050509050019850505050505050505060405180910390f35b341561018957600080fd5b6101916104bc565b60405160208082528190810183818151815260200191508051906020019060200280838360005b838110156101d05780820151838201526020016101b8565b505050509050019250505060405180910390f35b34156101ef57600080fd5b610203600160a060020a0360043516610525565b005b341561021057600080fd5b610224600160a060020a0360043516610533565b604051808060200180602001838103835285818151815260200191508051906020019060200280838360005b83811015610268578082015183820152602001610250565b50505050905001838103825284818151815260200191508051906020019060200280838360005b838110156102a757808201518382015260200161028f565b5050505090500194505050505060405180910390f35b34156102c857600080fd5b6102d0610627565b60405163ffffffff95861681529385166020850152919093166040808401919091526bffffffffffffffffffffffff199093166060830152608082015260a001905180910390f35b341561032357600080fd5b6102036106cf565b341561033657600080fd5b6101916106df565b341561034957600080fd5b610203600160a060020a0360043516610745565b341561036857600080fd5b61020363ffffffff600435811690602435811690604435166bffffffffffffffffffffffff1960643516610750565b6000806000806103a5611051565b6103ad611051565b6004546006805463ffffffff808416936401000000008104821693680100000000000000008204909216926c01000000000000000000000000918290049091029190600790829060208082020160405190810160405280929190818152602001828054801561044557602002820191906000526020600020905b8154600160a060020a03168152600190910190602001808311610427575b50505050509150808054806020026020016040519081016040528092919081815260200182805480156104a157602002820191906000526020600020905b8154600160a060020a03168152600190910190602001808311610483575b50505050509050955095509550955095509550909192939495565b6104c4611051565b600180548060200260200160405190810160405280929190818152602001828054801561051a57602002820191906000526020600020905b8154600160a060020a031681526001909101906020018083116104fc575b505050505090505b90565b610530816000610764565b50565b61053b611051565b610543611051565b600160a060020a03831660009081526002602090815260409182902080549092600184019284929182820290910190519081016040528092919081815260200182805480156105bb57602002820191906000526020600020905b8154600160a060020a0316815260019091019060200180831161059d575b505050505091508080548060200260200160405190810160405280929190818152602001828054801561061757602002820191906000526020600020905b8154600160a060020a031681526001909101906020018083116105f9575b5050505050905091509150915091565b6000806000806000806008805490506000141561065357600095508594508493508392508291506106c7565b60088054600019810190811061066557fe5b600091825260209091206004909102018054600182015463ffffffff80831699506401000000008304811698506801000000000000000083041696506c0100000000000000000000000091829004909102945067ffffffffffffffff16925090505b509091929394565b6106dd600080808080610c01565b565b6106e7611051565b600380548060200260200160405190810160405280929190818152602001828054801561051a57602002820191906000526020600020908154600160a060020a031681526001909101906020018083116104fc575050505050905090565b610530816001610764565b61075e848484846001610c01565b50505050565b600160a060020a033316600090815260208190526040812054819060ff161561075e575050600160a060020a0382166000908152600260205260408120905b81548110156107ed578154600160a060020a033316908390839081106107c557fe5b600091825260209091200154600160a060020a031614156107e55761075e565b6001016107a3565b5060005b60018201548110156108405733600160a060020a0316826001018281548110151561081857fe5b600091825260209091200154600160a060020a031614156108385761075e565b6001016107f1565b815415801561085157506001820154155b1561088e5760038054600181016108688382611063565b5060009182526020909120018054600160a060020a031916600160a060020a0386161790555b82156108e65781548290600181016108a68382611063565b5060009182526020909120018054600160a060020a03191633600160a060020a0316179055600154600290835491900490116108e15761075e565b61093a565b8160010180548060010182816108fc9190611063565b5060009182526020909120018054600160a060020a03191633600160a060020a03161790556001546002906001840154919004901161093a5761075e565b8280156109605750600160a060020a03841660009081526020819052604090205460ff16155b156109c457600160a060020a0384166000908152602081905260409020805460ff19166001908117909155805480820161099a8382611063565b5060009182526020909120018054600160a060020a031916600160a060020a038616179055610b09565b821580156109ea5750600160a060020a03841660009081526020819052604090205460ff165b15610b095750600160a060020a0383166000908152602081905260408120805460ff191690555b600154811015610b095783600160a060020a0316600182815481101515610a3457fe5b600091825260209091200154600160a060020a03161415610b0157600180546000198101908110610a6157fe5b60009182526020909120015460018054600160a060020a039092169183908110610a8757fe5b60009182526020909120018054600160a060020a031916600160a060020a03929092169190911790556001805490610ac3906000198301611063565b50600060048181556005805467ffffffffffffffff1916905590600681610aea828261108c565b610af860018301600061108c565b50505050610b09565b600101610a11565b600160a060020a038416600090815260026020526040812090610b2c828261108c565b610b3a60018301600061108c565b5050600090505b60035481101561075e5783600160a060020a0316600382815481101515610b6457fe5b600091825260209091200154600160a060020a03161415610bf957600380546000198101908110610b9157fe5b60009182526020909120015460038054600160a060020a039092169183908110610bb757fe5b60009182526020909120018054600160a060020a031916600160a060020a03929092169190911790556003805490610bf3906000198301611063565b5061075e565b600101610b41565b600160a060020a033316600090815260208190526040812054819060ff16156110485782158015610c325750600654155b15610c3c57611048565b6006541515610cb7576004805463ffffffff191663ffffffff8981169190911767ffffffff00000000191664010000000089831602176bffffffff000000000000000019166801000000000000000091881691909102176bffffffffffffffffffffffff166c01000000000000000000000000808704021790555b828015610d41575060045463ffffffff8881169116141580610cec575060045463ffffffff8781166401000000009092041614155b80610d0e575060045463ffffffff868116680100000000000000009092041614155b80610d4157506004546c0100000000000000000000000090819004026bffffffffffffffffffffffff1990811690851614155b15610d4b57611048565b506006905060005b8154811015610d9d578154600160a060020a03331690839083908110610d7557fe5b600091825260209091200154600160a060020a03161415610d9557611048565b600101610d53565b5060005b6001820154811015610df05733600160a060020a03168260010182815481101515610dc857fe5b600091825260209091200154600160a060020a03161415610de857611048565b600101610da1565b8215610e48578154829060018101610e088382611063565b5060009182526020909120018054600160a060020a03191633600160a060020a031617905560015460029083549190049011610e4357611048565b610e9c565b816001018054806001018281610e5e9190611063565b5060009182526020909120018054600160a060020a03191633600160a060020a031617905560015460029060018401549190049011610e9c57611048565b821561100f576005805467ffffffffffffffff19164267ffffffffffffffff161790556008805460018101610ed183826110aa565b6000928352602090922060048054928102909101805463ffffffff191663ffffffff9384161780825582546401000000009081900485160267ffffffff000000001990911617808255825468010000000000000000908190049094169093026bffffffff0000000000000000199093169290921780835581546c01000000000000000000000000908190048102819004026bffffffffffffffffffffffff90911617825560055460018301805467ffffffffffffffff191667ffffffffffffffff909216919091179055600680549192916002830190610fb490829084906110d6565b5060018281018054610fc992840191906110d6565b5050600060048181556005805467ffffffffffffffff191690559450925060069150829050610ff8828261108c565b61100660018301600061108c565b50505050611048565b600060048181556005805467ffffffffffffffff1916905590600681611035828261108c565b61104360018301600061108c565b505050505b50505050505050565b60206040519081016040526000815290565b81548183558181151161108757600083815260209020611087918101908301611126565b505050565b50805460008255906000526020600020908101906105309190611126565b815481835581811511611087576004028160040283600052602060002091820191016110879190611140565b8280548282559060005260206000209081019282156111165760005260206000209182015b828111156111165782548255916001019190600101906110fb565b5061112292915061118e565b5090565b61052291905b80821115611122576000815560010161112c565b61052291905b8082111561112257600080825560018201805467ffffffffffffffff191690556002820181611175828261108c565b61118360018301600061108c565b505050600401611146565b61052291905b80821115611122578054600160a060020a03191681556001016111945600a165627a7a72305820aa9c89b9c569e44fa08285a00ab47bcdf0a01ebbc54e3cd864450622b5e559a40029`
-
-// DeployReleaseOracle deploys a new Ethereum contract, binding an instance of ReleaseOracle to it.
-func DeployReleaseOracle(auth *bind.TransactOpts, backend bind.ContractBackend, signers []common.Address) (common.Address, *types.Transaction, *ReleaseOracle, error) {
- parsed, err := abi.JSON(strings.NewReader(ReleaseOracleABI))
- if err != nil {
- return common.Address{}, nil, nil, err
- }
- address, tx, contract, err := bind.DeployContract(auth, parsed, common.FromHex(ReleaseOracleBin), backend, signers)
- if err != nil {
- return common.Address{}, nil, nil, err
- }
- return address, tx, &ReleaseOracle{ReleaseOracleCaller: ReleaseOracleCaller{contract: contract}, ReleaseOracleTransactor: ReleaseOracleTransactor{contract: contract}}, nil
-}
-
-// ReleaseOracle is an auto generated Go binding around an Ethereum contract.
-type ReleaseOracle struct {
- ReleaseOracleCaller // Read-only binding to the contract
- ReleaseOracleTransactor // Write-only binding to the contract
-}
-
-// ReleaseOracleCaller is an auto generated read-only Go binding around an Ethereum contract.
-type ReleaseOracleCaller struct {
- contract *bind.BoundContract // Generic contract wrapper for the low level calls
-}
-
-// ReleaseOracleTransactor is an auto generated write-only Go binding around an Ethereum contract.
-type ReleaseOracleTransactor struct {
- contract *bind.BoundContract // Generic contract wrapper for the low level calls
-}
-
-// ReleaseOracleSession is an auto generated Go binding around an Ethereum contract,
-// with pre-set call and transact options.
-type ReleaseOracleSession struct {
- Contract *ReleaseOracle // Generic contract binding to set the session for
- CallOpts bind.CallOpts // Call options to use throughout this session
- TransactOpts bind.TransactOpts // Transaction auth options to use throughout this session
-}
-
-// ReleaseOracleCallerSession is an auto generated read-only Go binding around an Ethereum contract,
-// with pre-set call options.
-type ReleaseOracleCallerSession struct {
- Contract *ReleaseOracleCaller // Generic contract caller binding to set the session for
- CallOpts bind.CallOpts // Call options to use throughout this session
-}
-
-// ReleaseOracleTransactorSession is an auto generated write-only Go binding around an Ethereum contract,
-// with pre-set transact options.
-type ReleaseOracleTransactorSession struct {
- Contract *ReleaseOracleTransactor // Generic contract transactor binding to set the session for
- TransactOpts bind.TransactOpts // Transaction auth options to use throughout this session
-}
-
-// ReleaseOracleRaw is an auto generated low-level Go binding around an Ethereum contract.
-type ReleaseOracleRaw struct {
- Contract *ReleaseOracle // Generic contract binding to access the raw methods on
-}
-
-// ReleaseOracleCallerRaw is an auto generated low-level read-only Go binding around an Ethereum contract.
-type ReleaseOracleCallerRaw struct {
- Contract *ReleaseOracleCaller // Generic read-only contract binding to access the raw methods on
-}
-
-// ReleaseOracleTransactorRaw is an auto generated low-level write-only Go binding around an Ethereum contract.
-type ReleaseOracleTransactorRaw struct {
- Contract *ReleaseOracleTransactor // Generic write-only contract binding to access the raw methods on
-}
-
-// NewReleaseOracle creates a new instance of ReleaseOracle, bound to a specific deployed contract.
-func NewReleaseOracle(address common.Address, backend bind.ContractBackend) (*ReleaseOracle, error) {
- contract, err := bindReleaseOracle(address, backend, backend)
- if err != nil {
- return nil, err
- }
- return &ReleaseOracle{ReleaseOracleCaller: ReleaseOracleCaller{contract: contract}, ReleaseOracleTransactor: ReleaseOracleTransactor{contract: contract}}, nil
-}
-
-// NewReleaseOracleCaller creates a new read-only instance of ReleaseOracle, bound to a specific deployed contract.
-func NewReleaseOracleCaller(address common.Address, caller bind.ContractCaller) (*ReleaseOracleCaller, error) {
- contract, err := bindReleaseOracle(address, caller, nil)
- if err != nil {
- return nil, err
- }
- return &ReleaseOracleCaller{contract: contract}, nil
-}
-
-// NewReleaseOracleTransactor creates a new write-only instance of ReleaseOracle, bound to a specific deployed contract.
-func NewReleaseOracleTransactor(address common.Address, transactor bind.ContractTransactor) (*ReleaseOracleTransactor, error) {
- contract, err := bindReleaseOracle(address, nil, transactor)
- if err != nil {
- return nil, err
- }
- return &ReleaseOracleTransactor{contract: contract}, nil
-}
-
-// bindReleaseOracle binds a generic wrapper to an already deployed contract.
-func bindReleaseOracle(address common.Address, caller bind.ContractCaller, transactor bind.ContractTransactor) (*bind.BoundContract, error) {
- parsed, err := abi.JSON(strings.NewReader(ReleaseOracleABI))
- if err != nil {
- return nil, err
- }
- return bind.NewBoundContract(address, parsed, caller, transactor), nil
-}
-
-// Call invokes the (constant) contract method with params as input values and
-// sets the output to result. The result type might be a single field for simple
-// returns, a slice of interfaces for anonymous returns and a struct for named
-// returns.
-func (_ReleaseOracle *ReleaseOracleRaw) Call(opts *bind.CallOpts, result interface{}, method string, params ...interface{}) error {
- return _ReleaseOracle.Contract.ReleaseOracleCaller.contract.Call(opts, result, method, params...)
-}
-
-// Transfer initiates a plain transaction to move funds to the contract, calling
-// its default method if one is available.
-func (_ReleaseOracle *ReleaseOracleRaw) Transfer(opts *bind.TransactOpts) (*types.Transaction, error) {
- return _ReleaseOracle.Contract.ReleaseOracleTransactor.contract.Transfer(opts)
-}
-
-// Transact invokes the (paid) contract method with params as input values.
-func (_ReleaseOracle *ReleaseOracleRaw) Transact(opts *bind.TransactOpts, method string, params ...interface{}) (*types.Transaction, error) {
- return _ReleaseOracle.Contract.ReleaseOracleTransactor.contract.Transact(opts, method, params...)
-}
-
-// Call invokes the (constant) contract method with params as input values and
-// sets the output to result. The result type might be a single field for simple
-// returns, a slice of interfaces for anonymous returns and a struct for named
-// returns.
-func (_ReleaseOracle *ReleaseOracleCallerRaw) Call(opts *bind.CallOpts, result interface{}, method string, params ...interface{}) error {
- return _ReleaseOracle.Contract.contract.Call(opts, result, method, params...)
-}
-
-// Transfer initiates a plain transaction to move funds to the contract, calling
-// its default method if one is available.
-func (_ReleaseOracle *ReleaseOracleTransactorRaw) Transfer(opts *bind.TransactOpts) (*types.Transaction, error) {
- return _ReleaseOracle.Contract.contract.Transfer(opts)
-}
-
-// Transact invokes the (paid) contract method with params as input values.
-func (_ReleaseOracle *ReleaseOracleTransactorRaw) Transact(opts *bind.TransactOpts, method string, params ...interface{}) (*types.Transaction, error) {
- return _ReleaseOracle.Contract.contract.Transact(opts, method, params...)
-}
-
-// AuthProposals is a free data retrieval call binding the contract method 0xbf8ecf9c.
-//
-// Solidity: function authProposals() constant returns(address[])
-func (_ReleaseOracle *ReleaseOracleCaller) AuthProposals(opts *bind.CallOpts) ([]common.Address, error) {
- var (
- ret0 = new([]common.Address)
- )
- out := ret0
- err := _ReleaseOracle.contract.Call(opts, out, "authProposals")
- return *ret0, err
-}
-
-// AuthProposals is a free data retrieval call binding the contract method 0xbf8ecf9c.
-//
-// Solidity: function authProposals() constant returns(address[])
-func (_ReleaseOracle *ReleaseOracleSession) AuthProposals() ([]common.Address, error) {
- return _ReleaseOracle.Contract.AuthProposals(&_ReleaseOracle.CallOpts)
-}
-
-// AuthProposals is a free data retrieval call binding the contract method 0xbf8ecf9c.
-//
-// Solidity: function authProposals() constant returns(address[])
-func (_ReleaseOracle *ReleaseOracleCallerSession) AuthProposals() ([]common.Address, error) {
- return _ReleaseOracle.Contract.AuthProposals(&_ReleaseOracle.CallOpts)
-}
-
-// AuthVotes is a free data retrieval call binding the contract method 0x64ed31fe.
-//
-// Solidity: function authVotes(user address) constant returns(promote address[], demote address[])
-func (_ReleaseOracle *ReleaseOracleCaller) AuthVotes(opts *bind.CallOpts, user common.Address) (struct {
- Promote []common.Address
- Demote []common.Address
-}, error) {
- ret := new(struct {
- Promote []common.Address
- Demote []common.Address
- })
- out := ret
- err := _ReleaseOracle.contract.Call(opts, out, "authVotes", user)
- return *ret, err
-}
-
-// AuthVotes is a free data retrieval call binding the contract method 0x64ed31fe.
-//
-// Solidity: function authVotes(user address) constant returns(promote address[], demote address[])
-func (_ReleaseOracle *ReleaseOracleSession) AuthVotes(user common.Address) (struct {
- Promote []common.Address
- Demote []common.Address
-}, error) {
- return _ReleaseOracle.Contract.AuthVotes(&_ReleaseOracle.CallOpts, user)
-}
-
-// AuthVotes is a free data retrieval call binding the contract method 0x64ed31fe.
-//
-// Solidity: function authVotes(user address) constant returns(promote address[], demote address[])
-func (_ReleaseOracle *ReleaseOracleCallerSession) AuthVotes(user common.Address) (struct {
- Promote []common.Address
- Demote []common.Address
-}, error) {
- return _ReleaseOracle.Contract.AuthVotes(&_ReleaseOracle.CallOpts, user)
-}
-
-// CurrentVersion is a free data retrieval call binding the contract method 0x9d888e86.
-//
-// Solidity: function currentVersion() constant returns(major uint32, minor uint32, patch uint32, commit bytes20, time uint256)
-func (_ReleaseOracle *ReleaseOracleCaller) CurrentVersion(opts *bind.CallOpts) (struct {
- Major uint32
- Minor uint32
- Patch uint32
- Commit [20]byte
- Time *big.Int
-}, error) {
- ret := new(struct {
- Major uint32
- Minor uint32
- Patch uint32
- Commit [20]byte
- Time *big.Int
- })
- out := ret
- err := _ReleaseOracle.contract.Call(opts, out, "currentVersion")
- return *ret, err
-}
-
-// CurrentVersion is a free data retrieval call binding the contract method 0x9d888e86.
-//
-// Solidity: function currentVersion() constant returns(major uint32, minor uint32, patch uint32, commit bytes20, time uint256)
-func (_ReleaseOracle *ReleaseOracleSession) CurrentVersion() (struct {
- Major uint32
- Minor uint32
- Patch uint32
- Commit [20]byte
- Time *big.Int
-}, error) {
- return _ReleaseOracle.Contract.CurrentVersion(&_ReleaseOracle.CallOpts)
-}
-
-// CurrentVersion is a free data retrieval call binding the contract method 0x9d888e86.
-//
-// Solidity: function currentVersion() constant returns(major uint32, minor uint32, patch uint32, commit bytes20, time uint256)
-func (_ReleaseOracle *ReleaseOracleCallerSession) CurrentVersion() (struct {
- Major uint32
- Minor uint32
- Patch uint32
- Commit [20]byte
- Time *big.Int
-}, error) {
- return _ReleaseOracle.Contract.CurrentVersion(&_ReleaseOracle.CallOpts)
-}
-
-// ProposedVersion is a free data retrieval call binding the contract method 0x26db7648.
-//
-// Solidity: function proposedVersion() constant returns(major uint32, minor uint32, patch uint32, commit bytes20, pass address[], fail address[])
-func (_ReleaseOracle *ReleaseOracleCaller) ProposedVersion(opts *bind.CallOpts) (struct {
- Major uint32
- Minor uint32
- Patch uint32
- Commit [20]byte
- Pass []common.Address
- Fail []common.Address
-}, error) {
- ret := new(struct {
- Major uint32
- Minor uint32
- Patch uint32
- Commit [20]byte
- Pass []common.Address
- Fail []common.Address
- })
- out := ret
- err := _ReleaseOracle.contract.Call(opts, out, "proposedVersion")
- return *ret, err
-}
-
-// ProposedVersion is a free data retrieval call binding the contract method 0x26db7648.
-//
-// Solidity: function proposedVersion() constant returns(major uint32, minor uint32, patch uint32, commit bytes20, pass address[], fail address[])
-func (_ReleaseOracle *ReleaseOracleSession) ProposedVersion() (struct {
- Major uint32
- Minor uint32
- Patch uint32
- Commit [20]byte
- Pass []common.Address
- Fail []common.Address
-}, error) {
- return _ReleaseOracle.Contract.ProposedVersion(&_ReleaseOracle.CallOpts)
-}
-
-// ProposedVersion is a free data retrieval call binding the contract method 0x26db7648.
-//
-// Solidity: function proposedVersion() constant returns(major uint32, minor uint32, patch uint32, commit bytes20, pass address[], fail address[])
-func (_ReleaseOracle *ReleaseOracleCallerSession) ProposedVersion() (struct {
- Major uint32
- Minor uint32
- Patch uint32
- Commit [20]byte
- Pass []common.Address
- Fail []common.Address
-}, error) {
- return _ReleaseOracle.Contract.ProposedVersion(&_ReleaseOracle.CallOpts)
-}
-
-// Signers is a free data retrieval call binding the contract method 0x46f0975a.
-//
-// Solidity: function signers() constant returns(address[])
-func (_ReleaseOracle *ReleaseOracleCaller) Signers(opts *bind.CallOpts) ([]common.Address, error) {
- var (
- ret0 = new([]common.Address)
- )
- out := ret0
- err := _ReleaseOracle.contract.Call(opts, out, "signers")
- return *ret0, err
-}
-
-// Signers is a free data retrieval call binding the contract method 0x46f0975a.
-//
-// Solidity: function signers() constant returns(address[])
-func (_ReleaseOracle *ReleaseOracleSession) Signers() ([]common.Address, error) {
- return _ReleaseOracle.Contract.Signers(&_ReleaseOracle.CallOpts)
-}
-
-// Signers is a free data retrieval call binding the contract method 0x46f0975a.
-//
-// Solidity: function signers() constant returns(address[])
-func (_ReleaseOracle *ReleaseOracleCallerSession) Signers() ([]common.Address, error) {
- return _ReleaseOracle.Contract.Signers(&_ReleaseOracle.CallOpts)
-}
-
-// Demote is a paid mutator transaction binding the contract method 0x5c3d005d.
-//
-// Solidity: function demote(user address) returns()
-func (_ReleaseOracle *ReleaseOracleTransactor) Demote(opts *bind.TransactOpts, user common.Address) (*types.Transaction, error) {
- return _ReleaseOracle.contract.Transact(opts, "demote", user)
-}
-
-// Demote is a paid mutator transaction binding the contract method 0x5c3d005d.
-//
-// Solidity: function demote(user address) returns()
-func (_ReleaseOracle *ReleaseOracleSession) Demote(user common.Address) (*types.Transaction, error) {
- return _ReleaseOracle.Contract.Demote(&_ReleaseOracle.TransactOpts, user)
-}
-
-// Demote is a paid mutator transaction binding the contract method 0x5c3d005d.
-//
-// Solidity: function demote(user address) returns()
-func (_ReleaseOracle *ReleaseOracleTransactorSession) Demote(user common.Address) (*types.Transaction, error) {
- return _ReleaseOracle.Contract.Demote(&_ReleaseOracle.TransactOpts, user)
-}
-
-// Nuke is a paid mutator transaction binding the contract method 0xbc8fbbf8.
-//
-// Solidity: function nuke() returns()
-func (_ReleaseOracle *ReleaseOracleTransactor) Nuke(opts *bind.TransactOpts) (*types.Transaction, error) {
- return _ReleaseOracle.contract.Transact(opts, "nuke")
-}
-
-// Nuke is a paid mutator transaction binding the contract method 0xbc8fbbf8.
-//
-// Solidity: function nuke() returns()
-func (_ReleaseOracle *ReleaseOracleSession) Nuke() (*types.Transaction, error) {
- return _ReleaseOracle.Contract.Nuke(&_ReleaseOracle.TransactOpts)
-}
-
-// Nuke is a paid mutator transaction binding the contract method 0xbc8fbbf8.
-//
-// Solidity: function nuke() returns()
-func (_ReleaseOracle *ReleaseOracleTransactorSession) Nuke() (*types.Transaction, error) {
- return _ReleaseOracle.Contract.Nuke(&_ReleaseOracle.TransactOpts)
-}
-
-// Promote is a paid mutator transaction binding the contract method 0xd0e0813a.
-//
-// Solidity: function promote(user address) returns()
-func (_ReleaseOracle *ReleaseOracleTransactor) Promote(opts *bind.TransactOpts, user common.Address) (*types.Transaction, error) {
- return _ReleaseOracle.contract.Transact(opts, "promote", user)
-}
-
-// Promote is a paid mutator transaction binding the contract method 0xd0e0813a.
-//
-// Solidity: function promote(user address) returns()
-func (_ReleaseOracle *ReleaseOracleSession) Promote(user common.Address) (*types.Transaction, error) {
- return _ReleaseOracle.Contract.Promote(&_ReleaseOracle.TransactOpts, user)
-}
-
-// Promote is a paid mutator transaction binding the contract method 0xd0e0813a.
-//
-// Solidity: function promote(user address) returns()
-func (_ReleaseOracle *ReleaseOracleTransactorSession) Promote(user common.Address) (*types.Transaction, error) {
- return _ReleaseOracle.Contract.Promote(&_ReleaseOracle.TransactOpts, user)
-}
-
-// Release is a paid mutator transaction binding the contract method 0xd67cbec9.
-//
-// Solidity: function release(major uint32, minor uint32, patch uint32, commit bytes20) returns()
-func (_ReleaseOracle *ReleaseOracleTransactor) Release(opts *bind.TransactOpts, major uint32, minor uint32, patch uint32, commit [20]byte) (*types.Transaction, error) {
- return _ReleaseOracle.contract.Transact(opts, "release", major, minor, patch, commit)
-}
-
-// Release is a paid mutator transaction binding the contract method 0xd67cbec9.
-//
-// Solidity: function release(major uint32, minor uint32, patch uint32, commit bytes20) returns()
-func (_ReleaseOracle *ReleaseOracleSession) Release(major uint32, minor uint32, patch uint32, commit [20]byte) (*types.Transaction, error) {
- return _ReleaseOracle.Contract.Release(&_ReleaseOracle.TransactOpts, major, minor, patch, commit)
-}
-
-// Release is a paid mutator transaction binding the contract method 0xd67cbec9.
-//
-// Solidity: function release(major uint32, minor uint32, patch uint32, commit bytes20) returns()
-func (_ReleaseOracle *ReleaseOracleTransactorSession) Release(major uint32, minor uint32, patch uint32, commit [20]byte) (*types.Transaction, error) {
- return _ReleaseOracle.Contract.Release(&_ReleaseOracle.TransactOpts, major, minor, patch, commit)
-}
diff --git a/contracts/release/contract.sol b/contracts/release/contract.sol
deleted file mode 100644
index 2a28c5894..000000000
--- a/contracts/release/contract.sol
+++ /dev/null
@@ -1,251 +0,0 @@
-// Copyright 2016 The go-ethereum Authors
-// This file is part of the go-ethereum library.
-//
-// The go-ethereum library is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Lesser General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-//
-// The go-ethereum library is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU Lesser General Public License for more details.
-//
-// You should have received a copy of the GNU Lesser General Public License
-// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
-
-pragma solidity ^0.4.18;
-
-// ReleaseOracle is an Ethereum contract to store the current and previous
-// versions of the go-ethereum implementation. Its goal is to allow Geth to
-// check for new releases automatically without the need to consult a central
-// repository.
-//
-// The contract takes a vote based approach on both assigning authorised signers
-// as well as signing off on new Geth releases.
-//
-// Note, when a signer is demoted, the currently pending release is auto-nuked.
-// The reason is to prevent suprises where a demotion actually tilts the votes
-// in favor of one voter party and pushing out a new release as a consequence of
-// a simple demotion.
-contract ReleaseOracle {
- // Votes is an internal data structure to count votes on a specific proposal
- struct Votes {
- address[] pass; // List of signers voting to pass a proposal
- address[] fail; // List of signers voting to fail a proposal
- }
-
- // Version is the version details of a particular Geth release
- struct Version {
- uint32 major; // Major version component of the release
- uint32 minor; // Minor version component of the release
- uint32 patch; // Patch version component of the release
- bytes20 commit; // Git SHA1 commit hash of the release
-
- uint64 time; // Timestamp of the release approval
- Votes votes; // Votes that passed this release
- }
-
- // Oracle authorization details
- mapping(address => bool) authorised; // Set of accounts allowed to vote on updating the contract
- address[] voters; // List of addresses currently accepted as signers
-
- // Various proposals being voted on
- mapping(address => Votes) authProps; // Currently running user authorization proposals
- address[] authPend; // List of addresses being voted on (map indexes)
-
- Version verProp; // Currently proposed release being voted on
- Version[] releases; // All the positively voted releases
-
- // isSigner is a modifier to authorize contract transactions.
- modifier isSigner() {
- if (authorised[msg.sender]) {
- _;
- }
- }
-
- // Constructor to assign the initial set of signers.
- function ReleaseOracle(address[] signers) {
- // If no signers were specified, assign the creator as the sole signer
- if (signers.length == 0) {
- authorised[msg.sender] = true;
- voters.push(msg.sender);
- return;
- }
- // Otherwise assign the individual signers one by one
- for (uint i = 0; i < signers.length; i++) {
- authorised[signers[i]] = true;
- voters.push(signers[i]);
- }
- }
-
- // signers is an accessor method to retrieve all the signers (public accessor
- // generates an indexed one, not a retrieve-all version).
- function signers() constant returns(address[]) {
- return voters;
- }
-
- // authProposals retrieves the list of addresses that authorization proposals
- // are currently being voted on.
- function authProposals() constant returns(address[]) {
- return authPend;
- }
-
- // authVotes retrieves the current authorization votes for a particular user
- // to promote him into the list of signers, or demote him from there.
- function authVotes(address user) constant returns(address[] promote, address[] demote) {
- return (authProps[user].pass, authProps[user].fail);
- }
-
- // currentVersion retrieves the semantic version, commit hash and release time
- // of the currently votec active release.
- function currentVersion() constant returns (uint32 major, uint32 minor, uint32 patch, bytes20 commit, uint time) {
- if (releases.length == 0) {
- return (0, 0, 0, 0, 0);
- }
- var release = releases[releases.length - 1];
-
- return (release.major, release.minor, release.patch, release.commit, release.time);
- }
-
- // proposedVersion retrieves the semantic version, commit hash and the current
- // votes for the next proposed release.
- function proposedVersion() constant returns (uint32 major, uint32 minor, uint32 patch, bytes20 commit, address[] pass, address[] fail) {
- return (verProp.major, verProp.minor, verProp.patch, verProp.commit, verProp.votes.pass, verProp.votes.fail);
- }
-
- // promote pitches in on a voting campaign to promote a new user to a signer
- // position.
- function promote(address user) {
- updateSigner(user, true);
- }
-
- // demote pitches in on a voting campaign to demote an authorised user from
- // its signer position.
- function demote(address user) {
- updateSigner(user, false);
- }
-
- // release votes for a particular version to be included as the next release.
- function release(uint32 major, uint32 minor, uint32 patch, bytes20 commit) {
- updateRelease(major, minor, patch, commit, true);
- }
-
- // nuke votes for the currently proposed version to not be included as the next
- // release. Nuking doesn't require a specific version number for simplicity.
- function nuke() {
- updateRelease(0, 0, 0, 0, false);
- }
-
- // updateSigner marks a vote for changing the status of an Ethereum user, either
- // for or against the user being an authorised signer.
- function updateSigner(address user, bool authorize) internal isSigner {
- // Gather the current votes and ensure we don't double vote
- Votes votes = authProps[user];
- for (uint i = 0; i < votes.pass.length; i++) {
- if (votes.pass[i] == msg.sender) {
- return;
- }
- }
- for (i = 0; i < votes.fail.length; i++) {
- if (votes.fail[i] == msg.sender) {
- return;
- }
- }
- // If no authorization proposal is open, add the user to the index for later lookups
- if (votes.pass.length == 0 && votes.fail.length == 0) {
- authPend.push(user);
- }
- // Cast the vote and return if the proposal cannot be resolved yet
- if (authorize) {
- votes.pass.push(msg.sender);
- if (votes.pass.length <= voters.length / 2) {
- return;
- }
- } else {
- votes.fail.push(msg.sender);
- if (votes.fail.length <= voters.length / 2) {
- return;
- }
- }
- // Proposal resolved in our favor, execute whatever we voted on
- if (authorize && !authorised[user]) {
- authorised[user] = true;
- voters.push(user);
- } else if (!authorize && authorised[user]) {
- authorised[user] = false;
-
- for (i = 0; i < voters.length; i++) {
- if (voters[i] == user) {
- voters[i] = voters[voters.length - 1];
- voters.length--;
-
- delete verProp; // Nuke any version proposal (no surprise releases!)
- break;
- }
- }
- }
- // Finally delete the resolved proposal, index and garbage collect
- delete authProps[user];
-
- for (i = 0; i < authPend.length; i++) {
- if (authPend[i] == user) {
- authPend[i] = authPend[authPend.length - 1];
- authPend.length--;
- break;
- }
- }
- }
-
- // updateRelease votes for a particular version to be included as the next release,
- // or for the currently proposed release to be nuked out.
- function updateRelease(uint32 major, uint32 minor, uint32 patch, bytes20 commit, bool release) internal isSigner {
- // Skip nuke votes if no proposal is pending
- if (!release && verProp.votes.pass.length == 0) {
- return;
- }
- // Mark a new release if no proposal is pending
- if (verProp.votes.pass.length == 0) {
- verProp.major = major;
- verProp.minor = minor;
- verProp.patch = patch;
- verProp.commit = commit;
- }
- // Make sure positive votes match the current proposal
- if (release && (verProp.major != major || verProp.minor != minor || verProp.patch != patch || verProp.commit != commit)) {
- return;
- }
- // Gather the current votes and ensure we don't double vote
- Votes votes = verProp.votes;
- for (uint i = 0; i < votes.pass.length; i++) {
- if (votes.pass[i] == msg.sender) {
- return;
- }
- }
- for (i = 0; i < votes.fail.length; i++) {
- if (votes.fail[i] == msg.sender) {
- return;
- }
- }
- // Cast the vote and return if the proposal cannot be resolved yet
- if (release) {
- votes.pass.push(msg.sender);
- if (votes.pass.length <= voters.length / 2) {
- return;
- }
- } else {
- votes.fail.push(msg.sender);
- if (votes.fail.length <= voters.length / 2) {
- return;
- }
- }
- // Proposal resolved in our favor, execute whatever we voted on
- if (release) {
- verProp.time = uint64(now);
- releases.push(verProp);
- delete verProp;
- } else {
- delete verProp;
- }
- }
-}
diff --git a/contracts/release/contract_test.go b/contracts/release/contract_test.go
deleted file mode 100644
index 0b2b2f048..000000000
--- a/contracts/release/contract_test.go
+++ /dev/null
@@ -1,374 +0,0 @@
-// Copyright 2016 The go-ethereum Authors
-// This file is part of the go-ethereum library.
-//
-// The go-ethereum library is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Lesser General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-//
-// The go-ethereum library is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU Lesser General Public License for more details.
-//
-// You should have received a copy of the GNU Lesser General Public License
-// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
-
-package release
-
-import (
- "crypto/ecdsa"
- "math/big"
- "testing"
-
- "github.com/ethereum/go-ethereum/accounts/abi/bind"
- "github.com/ethereum/go-ethereum/accounts/abi/bind/backends"
- "github.com/ethereum/go-ethereum/common"
- "github.com/ethereum/go-ethereum/core"
- "github.com/ethereum/go-ethereum/crypto"
-)
-
-// setupReleaseTest creates a blockchain simulator and deploys a version oracle
-// contract for testing.
-func setupReleaseTest(t *testing.T, prefund ...*ecdsa.PrivateKey) (*ecdsa.PrivateKey, *ReleaseOracle, *backends.SimulatedBackend) {
- // Generate a new random account and a funded simulator
- key, _ := crypto.GenerateKey()
- auth := bind.NewKeyedTransactor(key)
-
- alloc := core.GenesisAlloc{auth.From: {Balance: big.NewInt(10000000000)}}
- for _, key := range prefund {
- alloc[crypto.PubkeyToAddress(key.PublicKey)] = core.GenesisAccount{Balance: big.NewInt(10000000000)}
- }
- sim := backends.NewSimulatedBackend(alloc)
-
- // Deploy a version oracle contract, commit and return
- _, _, oracle, err := DeployReleaseOracle(auth, sim, []common.Address{auth.From})
- if err != nil {
- t.Fatalf("Failed to deploy version contract: %v", err)
- }
- sim.Commit()
-
- return key, oracle, sim
-}
-
-// Tests that the version contract can be deployed and the creator is assigned
-// the sole authorized signer.
-func TestContractCreation(t *testing.T) {
- key, oracle, _ := setupReleaseTest(t)
-
- owner := crypto.PubkeyToAddress(key.PublicKey)
- signers, err := oracle.Signers(nil)
- if err != nil {
- t.Fatalf("Failed to retrieve list of signers: %v", err)
- }
- if len(signers) != 1 || signers[0] != owner {
- t.Fatalf("Initial signer mismatch: have %v, want %v", signers, owner)
- }
-}
-
-// Tests that subsequent signers can be promoted, each requiring half plus one
-// votes for it to pass through.
-func TestSignerPromotion(t *testing.T) {
- // Prefund a few accounts to authorize with and create the oracle
- keys := make([]*ecdsa.PrivateKey, 5)
- for i := 0; i < len(keys); i++ {
- keys[i], _ = crypto.GenerateKey()
- }
- key, oracle, sim := setupReleaseTest(t, keys...)
-
- // Gradually promote the keys, until all are authorized
- keys = append([]*ecdsa.PrivateKey{key}, keys...)
- for i := 1; i < len(keys); i++ {
- // Check that no votes are accepted from the not yet authorized user
- if _, err := oracle.Promote(bind.NewKeyedTransactor(keys[i]), common.Address{}); err != nil {
- t.Fatalf("Iter #%d: failed invalid promotion attempt: %v", i, err)
- }
- sim.Commit()
-
- pend, err := oracle.AuthProposals(nil)
- if err != nil {
- t.Fatalf("Iter #%d: failed to retrieve active proposals: %v", i, err)
- }
- if len(pend) != 0 {
- t.Fatalf("Iter #%d: proposal count mismatch: have %d, want 0", i, len(pend))
- }
- // Promote with half - 1 voters and check that the user's not yet authorized
- for j := 0; j < i/2; j++ {
- if _, err = oracle.Promote(bind.NewKeyedTransactor(keys[j]), crypto.PubkeyToAddress(keys[i].PublicKey)); err != nil {
- t.Fatalf("Iter #%d: failed valid promotion attempt: %v", i, err)
- }
- }
- sim.Commit()
-
- signers, err := oracle.Signers(nil)
- if err != nil {
- t.Fatalf("Iter #%d: failed to retrieve list of signers: %v", i, err)
- }
- if len(signers) != i {
- t.Fatalf("Iter #%d: signer count mismatch: have %v, want %v", i, len(signers), i)
- }
- // Promote with the last one needed to pass the promotion
- if _, err = oracle.Promote(bind.NewKeyedTransactor(keys[i/2]), crypto.PubkeyToAddress(keys[i].PublicKey)); err != nil {
- t.Fatalf("Iter #%d: failed valid promotion completion attempt: %v", i, err)
- }
- sim.Commit()
-
- signers, err = oracle.Signers(nil)
- if err != nil {
- t.Fatalf("Iter #%d: failed to retrieve list of signers: %v", i, err)
- }
- if len(signers) != i+1 {
- t.Fatalf("Iter #%d: signer count mismatch: have %v, want %v", i, len(signers), i+1)
- }
- }
-}
-
-// Tests that subsequent signers can be demoted, each requiring half plus one
-// votes for it to pass through.
-func TestSignerDemotion(t *testing.T) {
- // Prefund a few accounts to authorize with and create the oracle
- keys := make([]*ecdsa.PrivateKey, 5)
- for i := 0; i < len(keys); i++ {
- keys[i], _ = crypto.GenerateKey()
- }
- key, oracle, sim := setupReleaseTest(t, keys...)
-
- // Authorize all the keys as valid signers and verify cardinality
- keys = append([]*ecdsa.PrivateKey{key}, keys...)
- for i := 1; i < len(keys); i++ {
- for j := 0; j <= i/2; j++ {
- if _, err := oracle.Promote(bind.NewKeyedTransactor(keys[j]), crypto.PubkeyToAddress(keys[i].PublicKey)); err != nil {
- t.Fatalf("Iter #%d: failed valid promotion attempt: %v", i, err)
- }
- }
- sim.Commit()
- }
- signers, err := oracle.Signers(nil)
- if err != nil {
- t.Fatalf("Failed to retrieve list of signers: %v", err)
- }
- if len(signers) != len(keys) {
- t.Fatalf("Signer count mismatch: have %v, want %v", len(signers), len(keys))
- }
- // Gradually demote users until we run out of signers
- for i := len(keys) - 1; i >= 0; i-- {
- // Demote with half - 1 voters and check that the user's not yet dropped
- for j := 0; j < (i+1)/2; j++ {
- if _, err = oracle.Demote(bind.NewKeyedTransactor(keys[j]), crypto.PubkeyToAddress(keys[i].PublicKey)); err != nil {
- t.Fatalf("Iter #%d: failed valid demotion attempt: %v", len(keys)-i, err)
- }
- }
- sim.Commit()
-
- signers, err := oracle.Signers(nil)
- if err != nil {
- t.Fatalf("Iter #%d: failed to retrieve list of signers: %v", len(keys)-i, err)
- }
- if len(signers) != i+1 {
- t.Fatalf("Iter #%d: signer count mismatch: have %v, want %v", len(keys)-i, len(signers), i+1)
- }
- // Demote with the last one needed to pass the demotion
- if _, err = oracle.Demote(bind.NewKeyedTransactor(keys[(i+1)/2]), crypto.PubkeyToAddress(keys[i].PublicKey)); err != nil {
- t.Fatalf("Iter #%d: failed valid demotion completion attempt: %v", i, err)
- }
- sim.Commit()
-
- signers, err = oracle.Signers(nil)
- if err != nil {
- t.Fatalf("Iter #%d: failed to retrieve list of signers: %v", len(keys)-i, err)
- }
- if len(signers) != i {
- t.Fatalf("Iter #%d: signer count mismatch: have %v, want %v", len(keys)-i, len(signers), i)
- }
- // Check that no votes are accepted from the already demoted users
- if _, err = oracle.Promote(bind.NewKeyedTransactor(keys[i]), common.Address{}); err != nil {
- t.Fatalf("Iter #%d: failed invalid promotion attempt: %v", i, err)
- }
- sim.Commit()
-
- pend, err := oracle.AuthProposals(nil)
- if err != nil {
- t.Fatalf("Iter #%d: failed to retrieve active proposals: %v", i, err)
- }
- if len(pend) != 0 {
- t.Fatalf("Iter #%d: proposal count mismatch: have %d, want 0", i, len(pend))
- }
- }
-}
-
-// Tests that new versions can be released, honouring both voting rights as well
-// as the minimum required vote count.
-func TestVersionRelease(t *testing.T) {
- // Prefund a few accounts to authorize with and create the oracle
- keys := make([]*ecdsa.PrivateKey, 5)
- for i := 0; i < len(keys); i++ {
- keys[i], _ = crypto.GenerateKey()
- }
- key, oracle, sim := setupReleaseTest(t, keys...)
-
- // Track the "current release"
- var (
- verMajor = uint32(0)
- verMinor = uint32(0)
- verPatch = uint32(0)
- verCommit = [20]byte{}
- )
- // Gradually push releases, always requiring more signers than previously
- keys = append([]*ecdsa.PrivateKey{key}, keys...)
- for i := 1; i < len(keys); i++ {
- // Check that no votes are accepted from the not yet authorized user
- if _, err := oracle.Release(bind.NewKeyedTransactor(keys[i]), 0, 0, 0, [20]byte{0}); err != nil {
- t.Fatalf("Iter #%d: failed invalid release attempt: %v", i, err)
- }
- sim.Commit()
-
- prop, err := oracle.ProposedVersion(nil)
- if err != nil {
- t.Fatalf("Iter #%d: failed to retrieve active proposal: %v", i, err)
- }
- if len(prop.Pass) != 0 {
- t.Fatalf("Iter #%d: proposal vote count mismatch: have %d, want 0", i, len(prop.Pass))
- }
- // Authorize the user to make releases
- for j := 0; j <= i/2; j++ {
- if _, err = oracle.Promote(bind.NewKeyedTransactor(keys[j]), crypto.PubkeyToAddress(keys[i].PublicKey)); err != nil {
- t.Fatalf("Iter #%d: failed valid promotion attempt: %v", i, err)
- }
- }
- sim.Commit()
-
- // Propose release with half voters and check that the release does not yet go through
- for j := 0; j < (i+1)/2; j++ {
- if _, err = oracle.Release(bind.NewKeyedTransactor(keys[j]), uint32(i), uint32(i+1), uint32(i+2), [20]byte{byte(i + 3)}); err != nil {
- t.Fatalf("Iter #%d: failed valid release attempt: %v", i, err)
- }
- }
- sim.Commit()
-
- ver, err := oracle.CurrentVersion(nil)
- if err != nil {
- t.Fatalf("Iter #%d: failed to retrieve current version: %v", i, err)
- }
- if ver.Major != verMajor || ver.Minor != verMinor || ver.Patch != verPatch || ver.Commit != verCommit {
- t.Fatalf("Iter #%d: version mismatch: have %d.%d.%d-%x, want %d.%d.%d-%x", i, ver.Major, ver.Minor, ver.Patch, ver.Commit, verMajor, verMinor, verPatch, verCommit)
- }
-
- // Pass the release and check that it became the next version
- verMajor, verMinor, verPatch, verCommit = uint32(i), uint32(i+1), uint32(i+2), [20]byte{byte(i + 3)}
- if _, err = oracle.Release(bind.NewKeyedTransactor(keys[(i+1)/2]), uint32(i), uint32(i+1), uint32(i+2), [20]byte{byte(i + 3)}); err != nil {
- t.Fatalf("Iter #%d: failed valid release completion attempt: %v", i, err)
- }
- sim.Commit()
-
- ver, err = oracle.CurrentVersion(nil)
- if err != nil {
- t.Fatalf("Iter #%d: failed to retrieve current version: %v", i, err)
- }
- if ver.Major != verMajor || ver.Minor != verMinor || ver.Patch != verPatch || ver.Commit != verCommit {
- t.Fatalf("Iter #%d: version mismatch: have %d.%d.%d-%x, want %d.%d.%d-%x", i, ver.Major, ver.Minor, ver.Patch, ver.Commit, verMajor, verMinor, verPatch, verCommit)
- }
- }
-}
-
-// Tests that proposed versions can be nuked out of existence.
-func TestVersionNuking(t *testing.T) {
- // Prefund a few accounts to authorize with and create the oracle
- keys := make([]*ecdsa.PrivateKey, 9)
- for i := 0; i < len(keys); i++ {
- keys[i], _ = crypto.GenerateKey()
- }
- key, oracle, sim := setupReleaseTest(t, keys...)
-
- // Authorize all the keys as valid signers
- keys = append([]*ecdsa.PrivateKey{key}, keys...)
- for i := 1; i < len(keys); i++ {
- for j := 0; j <= i/2; j++ {
- if _, err := oracle.Promote(bind.NewKeyedTransactor(keys[j]), crypto.PubkeyToAddress(keys[i].PublicKey)); err != nil {
- t.Fatalf("Iter #%d: failed valid promotion attempt: %v", i, err)
- }
- }
- sim.Commit()
- }
- // Propose releases with more and more keys, always retaining enough users to nuke the proposals
- for i := 1; i < (len(keys)+1)/2; i++ {
- // Propose release with an initial set of signers
- for j := 0; j < i; j++ {
- if _, err := oracle.Release(bind.NewKeyedTransactor(keys[j]), uint32(i), uint32(i+1), uint32(i+2), [20]byte{byte(i + 3)}); err != nil {
- t.Fatalf("Iter #%d: failed valid proposal attempt: %v", i, err)
- }
- }
- sim.Commit()
-
- prop, err := oracle.ProposedVersion(nil)
- if err != nil {
- t.Fatalf("Iter #%d: failed to retrieve active proposal: %v", i, err)
- }
- if len(prop.Pass) != i {
- t.Fatalf("Iter #%d: proposal vote count mismatch: have %d, want %d", i, len(prop.Pass), i)
- }
- // Nuke the release with half+1 voters
- for j := i; j <= i+(len(keys)+1)/2; j++ {
- if _, err := oracle.Nuke(bind.NewKeyedTransactor(keys[j])); err != nil {
- t.Fatalf("Iter #%d: failed valid nuke attempt: %v", i, err)
- }
- }
- sim.Commit()
-
- prop, err = oracle.ProposedVersion(nil)
- if err != nil {
- t.Fatalf("Iter #%d: failed to retrieve active proposal: %v", i, err)
- }
- if len(prop.Pass) != 0 || len(prop.Fail) != 0 {
- t.Fatalf("Iter #%d: proposal vote count mismatch: have %d/%d pass/fail, want 0/0", i, len(prop.Pass), len(prop.Fail))
- }
- }
-}
-
-// Tests that demoting a signer will auto-nuke the currently pending release.
-func TestVersionAutoNuke(t *testing.T) {
- // Prefund a few accounts to authorize with and create the oracle
- keys := make([]*ecdsa.PrivateKey, 5)
- for i := 0; i < len(keys); i++ {
- keys[i], _ = crypto.GenerateKey()
- }
- key, oracle, sim := setupReleaseTest(t, keys...)
-
- // Authorize all the keys as valid signers
- keys = append([]*ecdsa.PrivateKey{key}, keys...)
- for i := 1; i < len(keys); i++ {
- for j := 0; j <= i/2; j++ {
- if _, err := oracle.Promote(bind.NewKeyedTransactor(keys[j]), crypto.PubkeyToAddress(keys[i].PublicKey)); err != nil {
- t.Fatalf("Iter #%d: failed valid promotion attempt: %v", i, err)
- }
- }
- sim.Commit()
- }
- // Make a release proposal and check it's existence
- if _, err := oracle.Release(bind.NewKeyedTransactor(keys[0]), 1, 2, 3, [20]byte{4}); err != nil {
- t.Fatalf("Failed valid proposal attempt: %v", err)
- }
- sim.Commit()
-
- prop, err := oracle.ProposedVersion(nil)
- if err != nil {
- t.Fatalf("Failed to retrieve active proposal: %v", err)
- }
- if len(prop.Pass) != 1 {
- t.Fatalf("Proposal vote count mismatch: have %d, want 1", len(prop.Pass))
- }
- // Demote a signer and check release proposal deletion
- for i := 0; i <= len(keys)/2; i++ {
- if _, err := oracle.Demote(bind.NewKeyedTransactor(keys[i]), crypto.PubkeyToAddress(keys[len(keys)-1].PublicKey)); err != nil {
- t.Fatalf("Iter #%d: failed valid demotion attempt: %v", i, err)
- }
- }
- sim.Commit()
-
- prop, err = oracle.ProposedVersion(nil)
- if err != nil {
- t.Fatalf("Failed to retrieve active proposal: %v", err)
- }
- if len(prop.Pass) != 0 {
- t.Fatalf("Proposal vote count mismatch: have %d, want 0", len(prop.Pass))
- }
-}
diff --git a/contracts/release/release.go b/contracts/release/release.go
deleted file mode 100644
index 4442ce3e0..000000000
--- a/contracts/release/release.go
+++ /dev/null
@@ -1,164 +0,0 @@
-// Copyright 2015 The go-ethereum Authors
-// This file is part of the go-ethereum library.
-//
-// The go-ethereum library is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Lesser General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-//
-// The go-ethereum library is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU Lesser General Public License for more details.
-//
-// You should have received a copy of the GNU Lesser General Public License
-// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
-
-// Package release contains the node service that tracks client releases.
-package release
-
-//go:generate abigen --sol ./contract.sol --pkg release --out ./contract.go
-
-import (
- "context"
- "fmt"
- "strings"
- "time"
-
- "github.com/ethereum/go-ethereum/accounts/abi/bind"
- "github.com/ethereum/go-ethereum/common"
- "github.com/ethereum/go-ethereum/eth"
- "github.com/ethereum/go-ethereum/internal/ethapi"
- "github.com/ethereum/go-ethereum/les"
- "github.com/ethereum/go-ethereum/log"
- "github.com/ethereum/go-ethereum/node"
- "github.com/ethereum/go-ethereum/p2p"
- "github.com/ethereum/go-ethereum/rpc"
-)
-
-// Interval to check for new releases
-const releaseRecheckInterval = time.Hour
-
-// Config contains the configurations of the release service.
-type Config struct {
- Oracle common.Address // Ethereum address of the release oracle
- Major uint32 // Major version component of the release
- Minor uint32 // Minor version component of the release
- Patch uint32 // Patch version component of the release
- Commit [20]byte // Git SHA1 commit hash of the release
-}
-
-// ReleaseService is a node service that periodically checks the blockchain for
-// newly released versions of the client being run and issues a warning to the
-// user about it.
-type ReleaseService struct {
- config Config // Current version to check releases against
- oracle *ReleaseOracle // Native binding to the release oracle contract
- quit chan chan error // Quit channel to terminate the version checker
-}
-
-// NewReleaseService creates a new service to periodically check for new client
-// releases and notify the user of such.
-func NewReleaseService(ctx *node.ServiceContext, config Config) (node.Service, error) {
- // Retrieve the Ethereum service dependency to access the blockchain
- var apiBackend ethapi.Backend
- var ethereum *eth.Ethereum
- if err := ctx.Service(&ethereum); err == nil {
- apiBackend = ethereum.ApiBackend
- } else {
- var ethereum *les.LightEthereum
- if err := ctx.Service(&ethereum); err == nil {
- apiBackend = ethereum.ApiBackend
- } else {
- return nil, err
- }
- }
- // Construct the release service
- contract, err := NewReleaseOracle(config.Oracle, eth.NewContractBackend(apiBackend))
- if err != nil {
- return nil, err
- }
- return &ReleaseService{
- config: config,
- oracle: contract,
- quit: make(chan chan error),
- }, nil
-}
-
-// Protocols returns an empty list of P2P protocols as the release service does
-// not have a networking component.
-func (r *ReleaseService) Protocols() []p2p.Protocol { return nil }
-
-// APIs returns an empty list of RPC descriptors as the release service does not
-// expose any functioanlity to the outside world.
-func (r *ReleaseService) APIs() []rpc.API { return nil }
-
-// Start spawns the periodic version checker goroutine
-func (r *ReleaseService) Start(server *p2p.Server) error {
- go r.checker()
- return nil
-}
-
-// Stop terminates all goroutines belonging to the service, blocking until they
-// are all terminated.
-func (r *ReleaseService) Stop() error {
- errc := make(chan error)
- r.quit <- errc
- return <-errc
-}
-
-// checker runs indefinitely in the background, periodically checking for new
-// client releases.
-func (r *ReleaseService) checker() {
- // Set up the timers to periodically check for releases
- timer := time.NewTimer(0) // Immediately fire a version check
- defer timer.Stop()
-
- for {
- select {
- case <-timer.C:
- // Rechedule the timer before continuing
- timer.Reset(releaseRecheckInterval)
- r.checkVersion()
- case errc := <-r.quit:
- errc <- nil
- return
- }
- }
-}
-
-func (r *ReleaseService) checkVersion() {
- // Retrieve the current version, and handle missing contracts gracefully
- ctx, cancel := context.WithTimeout(context.Background(), time.Second*5)
- opts := &bind.CallOpts{Context: ctx}
- defer cancel()
-
- version, err := r.oracle.CurrentVersion(opts)
- if err != nil {
- if err == bind.ErrNoCode {
- log.Debug("Release oracle not found", "contract", r.config.Oracle)
- } else if err != les.ErrNoPeers {
- log.Error("Failed to retrieve current release", "err", err)
- }
- return
- }
- // Version was successfully retrieved, notify if newer than ours
- if version.Major > r.config.Major ||
- (version.Major == r.config.Major && version.Minor > r.config.Minor) ||
- (version.Major == r.config.Major && version.Minor == r.config.Minor && version.Patch > r.config.Patch) {
-
- warning := fmt.Sprintf("Client v%d.%d.%d-%x seems older than the latest upstream release v%d.%d.%d-%x",
- r.config.Major, r.config.Minor, r.config.Patch, r.config.Commit[:4], version.Major, version.Minor, version.Patch, version.Commit[:4])
- howtofix := fmt.Sprintf("Please check https://github.com/ethereum/go-ethereum/releases for new releases")
- separator := strings.Repeat("-", len(warning))
-
- log.Warn(separator)
- log.Warn(warning)
- log.Warn(howtofix)
- log.Warn(separator)
- } else {
- log.Debug("Client seems up to date with upstream",
- "local", fmt.Sprintf("v%d.%d.%d-%x", r.config.Major, r.config.Minor, r.config.Patch, r.config.Commit[:4]),
- "upstream", fmt.Sprintf("v%d.%d.%d-%x", version.Major, version.Minor, version.Patch, version.Commit[:4]))
- }
-}
diff --git a/core/asm/compiler.go b/core/asm/compiler.go
index 318c4e4d8..1b9025a54 100644
--- a/core/asm/compiler.go
+++ b/core/asm/compiler.go
@@ -122,7 +122,7 @@ func (c *Compiler) next() token {
}
// compile line compiles a single line instruction e.g.
-// "push 1", "jump @labal".
+// "push 1", "jump @label".
func (c *Compiler) compileLine() error {
n := c.next()
if n.typ != lineStart {
diff --git a/core/asm/lexer.go b/core/asm/lexer.go
index a34b2cbd8..4d62159e5 100644
--- a/core/asm/lexer.go
+++ b/core/asm/lexer.go
@@ -48,7 +48,7 @@ const (
lineEnd // emitted when a line ends
invalidStatement // any invalid statement
element // any element during element parsing
- label // label is emitted when a labal is found
+ label // label is emitted when a label is found
labelDef // label definition is emitted when a new label is found
number // number is emitted when a number is found
stringValue // stringValue is emitted when a string has been found
@@ -206,7 +206,7 @@ func lexLine(l *lexer) stateFn {
return lexComment
case isSpace(r):
l.ignore()
- case isAlphaNumeric(r) || r == '_':
+ case isLetter(r) || r == '_':
return lexElement
case isNumber(r):
return lexNumber
@@ -278,7 +278,7 @@ func lexElement(l *lexer) stateFn {
return lexLine
}
-func isAlphaNumeric(t rune) bool {
+func isLetter(t rune) bool {
return unicode.IsLetter(t)
}
diff --git a/core/bench_test.go b/core/bench_test.go
index f976331d1..e23f0d19d 100644
--- a/core/bench_test.go
+++ b/core/bench_test.go
@@ -173,7 +173,7 @@ func benchInsertChain(b *testing.B, disk bool, gen func(int, *BlockGen)) {
// Time the insertion of the new chain.
// State and blocks are stored in the same DB.
- chainman, _ := NewBlockChain(db, gspec.Config, ethash.NewFaker(), vm.Config{})
+ chainman, _ := NewBlockChain(db, nil, gspec.Config, ethash.NewFaker(), vm.Config{})
defer chainman.Stop()
b.ReportAllocs()
b.ResetTimer()
@@ -283,7 +283,7 @@ func benchReadChain(b *testing.B, full bool, count uint64) {
if err != nil {
b.Fatalf("error opening database at %v: %v", dir, err)
}
- chain, err := NewBlockChain(db, params.TestChainConfig, ethash.NewFaker(), vm.Config{})
+ chain, err := NewBlockChain(db, nil, params.TestChainConfig, ethash.NewFaker(), vm.Config{})
if err != nil {
b.Fatalf("error creating chain: %v", err)
}
diff --git a/core/block_validator.go b/core/block_validator.go
index 143728bb8..98958809b 100644
--- a/core/block_validator.go
+++ b/core/block_validator.go
@@ -50,11 +50,14 @@ func NewBlockValidator(config *params.ChainConfig, blockchain *BlockChain, engin
// validated at this point.
func (v *BlockValidator) ValidateBody(block *types.Block) error {
// Check whether the block's known, and if not, that it's linkable
- if v.bc.HasBlockAndState(block.Hash()) {
+ if v.bc.HasBlockAndState(block.Hash(), block.NumberU64()) {
return ErrKnownBlock
}
- if !v.bc.HasBlockAndState(block.ParentHash()) {
- return consensus.ErrUnknownAncestor
+ if !v.bc.HasBlockAndState(block.ParentHash(), block.NumberU64()-1) {
+ if !v.bc.HasBlock(block.ParentHash(), block.NumberU64()-1) {
+ return consensus.ErrUnknownAncestor
+ }
+ return consensus.ErrPrunedAncestor
}
// Header validity is known at this point, check the uncles and transactions
header := block.Header()
diff --git a/core/block_validator_test.go b/core/block_validator_test.go
index e668601f3..e334b3c3c 100644
--- a/core/block_validator_test.go
+++ b/core/block_validator_test.go
@@ -42,7 +42,7 @@ func TestHeaderVerification(t *testing.T) {
headers[i] = block.Header()
}
// Run the header checker for blocks one-by-one, checking for both valid and invalid nonces
- chain, _ := NewBlockChain(testdb, params.TestChainConfig, ethash.NewFaker(), vm.Config{})
+ chain, _ := NewBlockChain(testdb, nil, params.TestChainConfig, ethash.NewFaker(), vm.Config{})
defer chain.Stop()
for i := 0; i < len(blocks); i++ {
@@ -106,11 +106,11 @@ func testHeaderConcurrentVerification(t *testing.T, threads int) {
var results <-chan error
if valid {
- chain, _ := NewBlockChain(testdb, params.TestChainConfig, ethash.NewFaker(), vm.Config{})
+ chain, _ := NewBlockChain(testdb, nil, params.TestChainConfig, ethash.NewFaker(), vm.Config{})
_, results = chain.engine.VerifyHeaders(chain, headers, seals)
chain.Stop()
} else {
- chain, _ := NewBlockChain(testdb, params.TestChainConfig, ethash.NewFakeFailer(uint64(len(headers)-1)), vm.Config{})
+ chain, _ := NewBlockChain(testdb, nil, params.TestChainConfig, ethash.NewFakeFailer(uint64(len(headers)-1)), vm.Config{})
_, results = chain.engine.VerifyHeaders(chain, headers, seals)
chain.Stop()
}
@@ -173,7 +173,7 @@ func testHeaderConcurrentAbortion(t *testing.T, threads int) {
defer runtime.GOMAXPROCS(old)
// Start the verifications and immediately abort
- chain, _ := NewBlockChain(testdb, params.TestChainConfig, ethash.NewFakeDelayer(time.Millisecond), vm.Config{})
+ chain, _ := NewBlockChain(testdb, nil, params.TestChainConfig, ethash.NewFakeDelayer(time.Millisecond), vm.Config{})
defer chain.Stop()
abort, results := chain.engine.VerifyHeaders(chain, headers, seals)
diff --git a/core/blockchain.go b/core/blockchain.go
index f886ffe4e..b33eb85a4 100644
--- a/core/blockchain.go
+++ b/core/blockchain.go
@@ -42,10 +42,11 @@ import (
"github.com/ethereum/go-ethereum/rlp"
"github.com/ethereum/go-ethereum/trie"
"github.com/hashicorp/golang-lru"
+ "gopkg.in/karalabe/cookiejar.v2/collections/prque"
)
var (
- blockInsertTimer = metrics.NewTimer("chain/inserts")
+ blockInsertTimer = metrics.NewRegisteredTimer("chain/inserts", nil)
ErrNoGenesis = errors.New("Genesis not found in chain")
)
@@ -56,11 +57,20 @@ const (
maxFutureBlocks = 256
maxTimeFutureBlocks = 30
badBlockLimit = 10
+ triesInMemory = 128
// BlockChainVersion ensures that an incompatible database forces a resync from scratch.
BlockChainVersion = 3
)
+// CacheConfig contains the configuration values for the trie caching/pruning
+// that's resident in a blockchain.
+type CacheConfig struct {
+ Disabled bool // Whether to disable trie write caching (archive node)
+ TrieNodeLimit int // Memory limit (MB) at which to flush the current in-memory trie to disk
+ TrieTimeLimit time.Duration // Time limit after which to flush the current in-memory trie to disk
+}
+
// BlockChain represents the canonical chain given a database with a genesis
// block. The Blockchain manages chain imports, reverts, chain reorganisations.
//
@@ -76,10 +86,14 @@ const (
// included in the canonical one where as GetBlockByNumber always represents the
// canonical chain.
type BlockChain struct {
- config *params.ChainConfig // chain & network configuration
+ chainConfig *params.ChainConfig // Chain & network configuration
+ cacheConfig *CacheConfig // Cache configuration for pruning
+
+ db ethdb.Database // Low level persistent database to store final content in
+ triegc *prque.Prque // Priority queue mapping block numbers to tries to gc
+ gcproc time.Duration // Accumulates canonical block processing for trie dumping
hc *HeaderChain
- chainDb ethdb.Database
rmLogsFeed event.Feed
chainFeed event.Feed
chainSideFeed event.Feed
@@ -93,8 +107,8 @@ type BlockChain struct {
procmu sync.RWMutex // block processor lock
checkpoint int // checkpoint counts towards the new checkpoint
- currentBlock *types.Block // Current head of the block chain
- currentFastBlock *types.Block // Current head of the fast-sync chain (may be above the block chain!)
+ currentBlock atomic.Value // Current head of the block chain
+ currentFastBlock atomic.Value // Current head of the fast-sync chain (may be above the block chain!)
stateCache state.Database // State database to reuse between imports (contains state cache)
bodyCache *lru.Cache // Cache for the most recent block bodies
@@ -119,7 +133,13 @@ type BlockChain struct {
// NewBlockChain returns a fully initialised block chain using information
// available in the database. It initialises the default Ethereum Validator and
// Processor.
-func NewBlockChain(chainDb ethdb.Database, config *params.ChainConfig, engine consensus.Engine, vmConfig vm.Config) (*BlockChain, error) {
+func NewBlockChain(db ethdb.Database, cacheConfig *CacheConfig, chainConfig *params.ChainConfig, engine consensus.Engine, vmConfig vm.Config) (*BlockChain, error) {
+ if cacheConfig == nil {
+ cacheConfig = &CacheConfig{
+ TrieNodeLimit: 256 * 1024 * 1024,
+ TrieTimeLimit: 5 * time.Minute,
+ }
+ }
bodyCache, _ := lru.New(bodyCacheLimit)
bodyRLPCache, _ := lru.New(bodyCacheLimit)
blockCache, _ := lru.New(blockCacheLimit)
@@ -127,9 +147,11 @@ func NewBlockChain(chainDb ethdb.Database, config *params.ChainConfig, engine co
badBlocks, _ := lru.New(badBlockLimit)
bc := &BlockChain{
- config: config,
- chainDb: chainDb,
- stateCache: state.NewDatabase(chainDb),
+ chainConfig: chainConfig,
+ cacheConfig: cacheConfig,
+ db: db,
+ triegc: prque.New(),
+ stateCache: state.NewDatabase(db),
quit: make(chan struct{}),
bodyCache: bodyCache,
bodyRLPCache: bodyRLPCache,
@@ -139,11 +161,11 @@ func NewBlockChain(chainDb ethdb.Database, config *params.ChainConfig, engine co
vmConfig: vmConfig,
badBlocks: badBlocks,
}
- bc.SetValidator(NewBlockValidator(config, bc, engine))
- bc.SetProcessor(NewStateProcessor(config, bc, engine))
+ bc.SetValidator(NewBlockValidator(chainConfig, bc, engine))
+ bc.SetProcessor(NewStateProcessor(chainConfig, bc, engine))
var err error
- bc.hc, err = NewHeaderChain(chainDb, config, engine, bc.getProcInterrupt)
+ bc.hc, err = NewHeaderChain(db, chainConfig, engine, bc.getProcInterrupt)
if err != nil {
return nil, err
}
@@ -180,7 +202,7 @@ func (bc *BlockChain) getProcInterrupt() bool {
// assumes that the chain manager mutex is held.
func (bc *BlockChain) loadLastState() error {
// Restore the last known head block
- head := GetHeadBlockHash(bc.chainDb)
+ head := GetHeadBlockHash(bc.db)
if head == (common.Hash{}) {
// Corrupt or empty database, init from scratch
log.Warn("Empty database, resetting chain")
@@ -196,15 +218,17 @@ func (bc *BlockChain) loadLastState() error {
// Make sure the state associated with the block is available
if _, err := state.New(currentBlock.Root(), bc.stateCache); err != nil {
// Dangling block without a state associated, init from scratch
- log.Warn("Head state missing, resetting chain", "number", currentBlock.Number(), "hash", currentBlock.Hash())
- return bc.Reset()
+ log.Warn("Head state missing, repairing chain", "number", currentBlock.Number(), "hash", currentBlock.Hash())
+ if err := bc.repair(&currentBlock); err != nil {
+ return err
+ }
}
// Everything seems to be fine, set as the head block
- bc.currentBlock = currentBlock
+ bc.currentBlock.Store(currentBlock)
// Restore the last known head header
- currentHeader := bc.currentBlock.Header()
- if head := GetHeadHeaderHash(bc.chainDb); head != (common.Hash{}) {
+ currentHeader := currentBlock.Header()
+ if head := GetHeadHeaderHash(bc.db); head != (common.Hash{}) {
if header := bc.GetHeaderByHash(head); header != nil {
currentHeader = header
}
@@ -212,21 +236,23 @@ func (bc *BlockChain) loadLastState() error {
bc.hc.SetCurrentHeader(currentHeader)
// Restore the last known head fast block
- bc.currentFastBlock = bc.currentBlock
- if head := GetHeadFastBlockHash(bc.chainDb); head != (common.Hash{}) {
+ bc.currentFastBlock.Store(currentBlock)
+ if head := GetHeadFastBlockHash(bc.db); head != (common.Hash{}) {
if block := bc.GetBlockByHash(head); block != nil {
- bc.currentFastBlock = block
+ bc.currentFastBlock.Store(block)
}
}
// Issue a status log for the user
+ currentFastBlock := bc.CurrentFastBlock()
+
headerTd := bc.GetTd(currentHeader.Hash(), currentHeader.Number.Uint64())
- blockTd := bc.GetTd(bc.currentBlock.Hash(), bc.currentBlock.NumberU64())
- fastTd := bc.GetTd(bc.currentFastBlock.Hash(), bc.currentFastBlock.NumberU64())
+ blockTd := bc.GetTd(currentBlock.Hash(), currentBlock.NumberU64())
+ fastTd := bc.GetTd(currentFastBlock.Hash(), currentFastBlock.NumberU64())
log.Info("Loaded most recent local header", "number", currentHeader.Number, "hash", currentHeader.Hash(), "td", headerTd)
- log.Info("Loaded most recent local full block", "number", bc.currentBlock.Number(), "hash", bc.currentBlock.Hash(), "td", blockTd)
- log.Info("Loaded most recent local fast block", "number", bc.currentFastBlock.Number(), "hash", bc.currentFastBlock.Hash(), "td", fastTd)
+ log.Info("Loaded most recent local full block", "number", currentBlock.Number(), "hash", currentBlock.Hash(), "td", blockTd)
+ log.Info("Loaded most recent local fast block", "number", currentFastBlock.Number(), "hash", currentFastBlock.Hash(), "td", fastTd)
return nil
}
@@ -243,7 +269,7 @@ func (bc *BlockChain) SetHead(head uint64) error {
// Rewind the header chain, deleting all block bodies until then
delFn := func(hash common.Hash, num uint64) {
- DeleteBody(bc.chainDb, hash, num)
+ DeleteBody(bc.db, hash, num)
}
bc.hc.SetHead(head, delFn)
currentHeader := bc.hc.CurrentHeader()
@@ -255,30 +281,32 @@ func (bc *BlockChain) SetHead(head uint64) error {
bc.futureBlocks.Purge()
// Rewind the block chain, ensuring we don't end up with a stateless head block
- if bc.currentBlock != nil && currentHeader.Number.Uint64() < bc.currentBlock.NumberU64() {
- bc.currentBlock = bc.GetBlock(currentHeader.Hash(), currentHeader.Number.Uint64())
+ if currentBlock := bc.CurrentBlock(); currentBlock != nil && currentHeader.Number.Uint64() < currentBlock.NumberU64() {
+ bc.currentBlock.Store(bc.GetBlock(currentHeader.Hash(), currentHeader.Number.Uint64()))
}
- if bc.currentBlock != nil {
- if _, err := state.New(bc.currentBlock.Root(), bc.stateCache); err != nil {
+ if currentBlock := bc.CurrentBlock(); currentBlock != nil {
+ if _, err := state.New(currentBlock.Root(), bc.stateCache); err != nil {
// Rewound state missing, rolled back to before pivot, reset to genesis
- bc.currentBlock = nil
+ bc.currentBlock.Store(bc.genesisBlock)
}
}
// Rewind the fast block in a simpleton way to the target head
- if bc.currentFastBlock != nil && currentHeader.Number.Uint64() < bc.currentFastBlock.NumberU64() {
- bc.currentFastBlock = bc.GetBlock(currentHeader.Hash(), currentHeader.Number.Uint64())
+ if currentFastBlock := bc.CurrentFastBlock(); currentFastBlock != nil && currentHeader.Number.Uint64() < currentFastBlock.NumberU64() {
+ bc.currentFastBlock.Store(bc.GetBlock(currentHeader.Hash(), currentHeader.Number.Uint64()))
}
// If either blocks reached nil, reset to the genesis state
- if bc.currentBlock == nil {
- bc.currentBlock = bc.genesisBlock
+ if currentBlock := bc.CurrentBlock(); currentBlock == nil {
+ bc.currentBlock.Store(bc.genesisBlock)
}
- if bc.currentFastBlock == nil {
- bc.currentFastBlock = bc.genesisBlock
+ if currentFastBlock := bc.CurrentFastBlock(); currentFastBlock == nil {
+ bc.currentFastBlock.Store(bc.genesisBlock)
}
- if err := WriteHeadBlockHash(bc.chainDb, bc.currentBlock.Hash()); err != nil {
+ currentBlock := bc.CurrentBlock()
+ currentFastBlock := bc.CurrentFastBlock()
+ if err := WriteHeadBlockHash(bc.db, currentBlock.Hash()); err != nil {
log.Crit("Failed to reset head full block", "err", err)
}
- if err := WriteHeadFastBlockHash(bc.chainDb, bc.currentFastBlock.Hash()); err != nil {
+ if err := WriteHeadFastBlockHash(bc.db, currentFastBlock.Hash()); err != nil {
log.Crit("Failed to reset head fast block", "err", err)
}
return bc.loadLastState()
@@ -292,12 +320,12 @@ func (bc *BlockChain) FastSyncCommitHead(hash common.Hash) error {
if block == nil {
return fmt.Errorf("non existent block [%x…]", hash[:4])
}
- if _, err := trie.NewSecure(block.Root(), bc.chainDb, 0); err != nil {
+ if _, err := trie.NewSecure(block.Root(), bc.stateCache.TrieDB(), 0); err != nil {
return err
}
// If all checks out, manually set the head block
bc.mu.Lock()
- bc.currentBlock = block
+ bc.currentBlock.Store(block)
bc.mu.Unlock()
log.Info("Committed new head block", "number", block.Number(), "hash", hash)
@@ -306,45 +334,19 @@ func (bc *BlockChain) FastSyncCommitHead(hash common.Hash) error {
// GasLimit returns the gas limit of the current HEAD block.
func (bc *BlockChain) GasLimit() uint64 {
- bc.mu.RLock()
- defer bc.mu.RUnlock()
-
- return bc.currentBlock.GasLimit()
-}
-
-// LastBlockHash return the hash of the HEAD block.
-func (bc *BlockChain) LastBlockHash() common.Hash {
- bc.mu.RLock()
- defer bc.mu.RUnlock()
-
- return bc.currentBlock.Hash()
+ return bc.CurrentBlock().GasLimit()
}
// CurrentBlock retrieves the current head block of the canonical chain. The
// block is retrieved from the blockchain's internal cache.
func (bc *BlockChain) CurrentBlock() *types.Block {
- bc.mu.RLock()
- defer bc.mu.RUnlock()
-
- return bc.currentBlock
+ return bc.currentBlock.Load().(*types.Block)
}
// CurrentFastBlock retrieves the current fast-sync head block of the canonical
// chain. The block is retrieved from the blockchain's internal cache.
func (bc *BlockChain) CurrentFastBlock() *types.Block {
- bc.mu.RLock()
- defer bc.mu.RUnlock()
-
- return bc.currentFastBlock
-}
-
-// Status returns status information about the current chain such as the HEAD Td,
-// the HEAD hash and the hash of the genesis block.
-func (bc *BlockChain) Status() (td *big.Int, currentBlock common.Hash, genesisBlock common.Hash) {
- bc.mu.RLock()
- defer bc.mu.RUnlock()
-
- return bc.GetTd(bc.currentBlock.Hash(), bc.currentBlock.NumberU64()), bc.currentBlock.Hash(), bc.genesisBlock.Hash()
+ return bc.currentFastBlock.Load().(*types.Block)
}
// SetProcessor sets the processor required for making state modifications.
@@ -404,22 +406,40 @@ func (bc *BlockChain) ResetWithGenesisBlock(genesis *types.Block) error {
if err := bc.hc.WriteTd(genesis.Hash(), genesis.NumberU64(), genesis.Difficulty()); err != nil {
log.Crit("Failed to write genesis block TD", "err", err)
}
- if err := WriteBlock(bc.chainDb, genesis); err != nil {
+ if err := WriteBlock(bc.db, genesis); err != nil {
log.Crit("Failed to write genesis block", "err", err)
}
bc.genesisBlock = genesis
bc.insert(bc.genesisBlock)
- bc.currentBlock = bc.genesisBlock
+ bc.currentBlock.Store(bc.genesisBlock)
bc.hc.SetGenesis(bc.genesisBlock.Header())
bc.hc.SetCurrentHeader(bc.genesisBlock.Header())
- bc.currentFastBlock = bc.genesisBlock
+ bc.currentFastBlock.Store(bc.genesisBlock)
return nil
}
+// repair tries to repair the current blockchain by rolling back the current block
+// until one with associated state is found. This is needed to fix incomplete db
+// writes caused either by crashes/power outages, or simply non-committed tries.
+//
+// This method only rolls back the current block. The current header and current
+// fast block are left intact.
+func (bc *BlockChain) repair(head **types.Block) error {
+ for {
+ // Abort if we've rewound to a head block that does have associated state
+ if _, err := state.New((*head).Root(), bc.stateCache); err == nil {
+ log.Info("Rewound blockchain to past state", "number", (*head).Number(), "hash", (*head).Hash())
+ return nil
+ }
+ // Otherwise rewind one block and recheck state availability there
+ (*head) = bc.GetBlock((*head).ParentHash(), (*head).NumberU64()-1)
+ }
+}
+
// Export writes the active chain to the given writer.
func (bc *BlockChain) Export(w io.Writer) error {
- return bc.ExportN(w, uint64(0), bc.currentBlock.NumberU64())
+ return bc.ExportN(w, uint64(0), bc.CurrentBlock().NumberU64())
}
// ExportN writes a subset of the active chain to the given writer.
@@ -454,25 +474,25 @@ func (bc *BlockChain) ExportN(w io.Writer, first uint64, last uint64) error {
// Note, this function assumes that the `mu` mutex is held!
func (bc *BlockChain) insert(block *types.Block) {
// If the block is on a side chain or an unknown one, force other heads onto it too
- updateHeads := GetCanonicalHash(bc.chainDb, block.NumberU64()) != block.Hash()
+ updateHeads := GetCanonicalHash(bc.db, block.NumberU64()) != block.Hash()
// Add the block to the canonical chain number scheme and mark as the head
- if err := WriteCanonicalHash(bc.chainDb, block.Hash(), block.NumberU64()); err != nil {
+ if err := WriteCanonicalHash(bc.db, block.Hash(), block.NumberU64()); err != nil {
log.Crit("Failed to insert block number", "err", err)
}
- if err := WriteHeadBlockHash(bc.chainDb, block.Hash()); err != nil {
+ if err := WriteHeadBlockHash(bc.db, block.Hash()); err != nil {
log.Crit("Failed to insert head block hash", "err", err)
}
- bc.currentBlock = block
+ bc.currentBlock.Store(block)
// If the block is better than our head or is on a different chain, force update heads
if updateHeads {
bc.hc.SetCurrentHeader(block.Header())
- if err := WriteHeadFastBlockHash(bc.chainDb, block.Hash()); err != nil {
+ if err := WriteHeadFastBlockHash(bc.db, block.Hash()); err != nil {
log.Crit("Failed to insert head fast block hash", "err", err)
}
- bc.currentFastBlock = block
+ bc.currentFastBlock.Store(block)
}
}
@@ -489,7 +509,7 @@ func (bc *BlockChain) GetBody(hash common.Hash) *types.Body {
body := cached.(*types.Body)
return body
}
- body := GetBody(bc.chainDb, hash, bc.hc.GetBlockNumber(hash))
+ body := GetBody(bc.db, hash, bc.hc.GetBlockNumber(hash))
if body == nil {
return nil
}
@@ -505,7 +525,7 @@ func (bc *BlockChain) GetBodyRLP(hash common.Hash) rlp.RawValue {
if cached, ok := bc.bodyRLPCache.Get(hash); ok {
return cached.(rlp.RawValue)
}
- body := GetBodyRLP(bc.chainDb, hash, bc.hc.GetBlockNumber(hash))
+ body := GetBodyRLP(bc.db, hash, bc.hc.GetBlockNumber(hash))
if len(body) == 0 {
return nil
}
@@ -519,21 +539,25 @@ func (bc *BlockChain) HasBlock(hash common.Hash, number uint64) bool {
if bc.blockCache.Contains(hash) {
return true
}
- ok, _ := bc.chainDb.Has(blockBodyKey(hash, number))
+ ok, _ := bc.db.Has(blockBodyKey(hash, number))
return ok
}
+// HasState checks if state trie is fully present in the database or not.
+func (bc *BlockChain) HasState(hash common.Hash) bool {
+ _, err := bc.stateCache.OpenTrie(hash)
+ return err == nil
+}
+
// HasBlockAndState checks if a block and associated state trie is fully present
// in the database or not, caching it if present.
-func (bc *BlockChain) HasBlockAndState(hash common.Hash) bool {
+func (bc *BlockChain) HasBlockAndState(hash common.Hash, number uint64) bool {
// Check first that the block itself is known
- block := bc.GetBlockByHash(hash)
+ block := bc.GetBlock(hash, number)
if block == nil {
return false
}
- // Ensure the associated state is also present
- _, err := bc.stateCache.OpenTrie(block.Root())
- return err == nil
+ return bc.HasState(block.Root())
}
// GetBlock retrieves a block from the database by hash and number,
@@ -543,7 +567,7 @@ func (bc *BlockChain) GetBlock(hash common.Hash, number uint64) *types.Block {
if block, ok := bc.blockCache.Get(hash); ok {
return block.(*types.Block)
}
- block := GetBlock(bc.chainDb, hash, number)
+ block := GetBlock(bc.db, hash, number)
if block == nil {
return nil
}
@@ -560,13 +584,18 @@ func (bc *BlockChain) GetBlockByHash(hash common.Hash) *types.Block {
// GetBlockByNumber retrieves a block from the database by number, caching it
// (associated with its hash) if found.
func (bc *BlockChain) GetBlockByNumber(number uint64) *types.Block {
- hash := GetCanonicalHash(bc.chainDb, number)
+ hash := GetCanonicalHash(bc.db, number)
if hash == (common.Hash{}) {
return nil
}
return bc.GetBlock(hash, number)
}
+// GetReceiptsByHash retrieves the receipts for all transactions in a given block.
+func (bc *BlockChain) GetReceiptsByHash(hash common.Hash) types.Receipts {
+ return GetBlockReceipts(bc.db, hash, GetBlockNumber(bc.db, hash))
+}
+
// GetBlocksFromHash returns the block corresponding to hash and up to n-1 ancestors.
// [deprecated by eth/62]
func (bc *BlockChain) GetBlocksFromHash(hash common.Hash, n int) (blocks []*types.Block) {
@@ -594,6 +623,12 @@ func (bc *BlockChain) GetUnclesInChain(block *types.Block, length int) []*types.
return uncles
}
+// TrieNode retrieves a blob of data associated with a trie node (or code hash)
+// either from ephemeral in-memory cache, or from persistent storage.
+func (bc *BlockChain) TrieNode(hash common.Hash) ([]byte, error) {
+ return bc.stateCache.TrieDB().Node(hash)
+}
+
// Stop stops the blockchain service. If any imports are currently in progress
// it will abort them using the procInterrupt.
func (bc *BlockChain) Stop() {
@@ -606,6 +641,32 @@ func (bc *BlockChain) Stop() {
atomic.StoreInt32(&bc.procInterrupt, 1)
bc.wg.Wait()
+
+ // Ensure the state of a recent block is also stored to disk before exiting.
+ // We're writing three different states to catch different restart scenarios:
+ // - HEAD: So we don't need to reprocess any blocks in the general case
+ // - HEAD-1: So we don't do large reorgs if our HEAD becomes an uncle
+ // - HEAD-127: So we have a hard limit on the number of blocks reexecuted
+ if !bc.cacheConfig.Disabled {
+ triedb := bc.stateCache.TrieDB()
+
+ for _, offset := range []uint64{0, 1, triesInMemory - 1} {
+ if number := bc.CurrentBlock().NumberU64(); number > offset {
+ recent := bc.GetBlockByNumber(number - offset)
+
+ log.Info("Writing cached state to disk", "block", recent.Number(), "hash", recent.Hash(), "root", recent.Root())
+ if err := triedb.Commit(recent.Root(), true); err != nil {
+ log.Error("Failed to commit recent state trie", "err", err)
+ }
+ }
+ }
+ for !bc.triegc.Empty() {
+ triedb.Dereference(bc.triegc.PopItem().(common.Hash), common.Hash{})
+ }
+ if size := triedb.Size(); size != 0 {
+ log.Error("Dangling trie nodes after full cleanup")
+ }
+ }
log.Info("Blockchain manager stopped")
}
@@ -648,22 +709,27 @@ func (bc *BlockChain) Rollback(chain []common.Hash) {
if currentHeader.Hash() == hash {
bc.hc.SetCurrentHeader(bc.GetHeader(currentHeader.ParentHash, currentHeader.Number.Uint64()-1))
}
- if bc.currentFastBlock.Hash() == hash {
- bc.currentFastBlock = bc.GetBlock(bc.currentFastBlock.ParentHash(), bc.currentFastBlock.NumberU64()-1)
- WriteHeadFastBlockHash(bc.chainDb, bc.currentFastBlock.Hash())
+ if currentFastBlock := bc.CurrentFastBlock(); currentFastBlock.Hash() == hash {
+ newFastBlock := bc.GetBlock(currentFastBlock.ParentHash(), currentFastBlock.NumberU64()-1)
+ bc.currentFastBlock.Store(newFastBlock)
+ WriteHeadFastBlockHash(bc.db, newFastBlock.Hash())
}
- if bc.currentBlock.Hash() == hash {
- bc.currentBlock = bc.GetBlock(bc.currentBlock.ParentHash(), bc.currentBlock.NumberU64()-1)
- WriteHeadBlockHash(bc.chainDb, bc.currentBlock.Hash())
+ if currentBlock := bc.CurrentBlock(); currentBlock.Hash() == hash {
+ newBlock := bc.GetBlock(currentBlock.ParentHash(), currentBlock.NumberU64()-1)
+ bc.currentBlock.Store(newBlock)
+ WriteHeadBlockHash(bc.db, newBlock.Hash())
}
}
}
// SetReceiptsData computes all the non-consensus fields of the receipts
-func SetReceiptsData(config *params.ChainConfig, block *types.Block, receipts types.Receipts) {
+func SetReceiptsData(config *params.ChainConfig, block *types.Block, receipts types.Receipts) error {
signer := types.MakeSigner(config, block.Number())
transactions, logIndex := block.Transactions(), uint(0)
+ if len(transactions) != len(receipts) {
+ return errors.New("transaction and receipt count mismatch")
+ }
for j := 0; j < len(receipts); j++ {
// The transaction hash can be retrieved from the transaction itself
@@ -691,6 +757,7 @@ func SetReceiptsData(config *params.ChainConfig, block *types.Block, receipts ty
logIndex++
}
}
+ return nil
}
// InsertReceiptChain attempts to complete an already existing header chain with
@@ -713,7 +780,7 @@ func (bc *BlockChain) InsertReceiptChain(blockChain types.Blocks, receiptChain [
stats = struct{ processed, ignored int32 }{}
start = time.Now()
bytes = 0
- batch = bc.chainDb.NewBatch()
+ batch = bc.db.NewBatch()
)
for i, block := range blockChain {
receipts := receiptChain[i]
@@ -731,7 +798,9 @@ func (bc *BlockChain) InsertReceiptChain(blockChain types.Blocks, receiptChain [
continue
}
// Compute all the non-consensus fields of the receipts
- SetReceiptsData(bc.config, block, receipts)
+ if err := SetReceiptsData(bc.chainConfig, block, receipts); err != nil {
+ return i, fmt.Errorf("failed to set receipts data: %v", err)
+ }
// Write all the data out into the database
if err := WriteBody(batch, block.Hash(), block.NumberU64(), block.Body()); err != nil {
return i, fmt.Errorf("failed to write block body: %v", err)
@@ -749,7 +818,7 @@ func (bc *BlockChain) InsertReceiptChain(blockChain types.Blocks, receiptChain [
return 0, err
}
bytes += batch.ValueSize()
- batch = bc.chainDb.NewBatch()
+ batch.Reset()
}
}
if batch.ValueSize() > 0 {
@@ -763,11 +832,12 @@ func (bc *BlockChain) InsertReceiptChain(blockChain types.Blocks, receiptChain [
bc.mu.Lock()
head := blockChain[len(blockChain)-1]
if td := bc.GetTd(head.Hash(), head.NumberU64()); td != nil { // Rewind may have occurred, skip in that case
- if bc.GetTd(bc.currentFastBlock.Hash(), bc.currentFastBlock.NumberU64()).Cmp(td) < 0 {
- if err := WriteHeadFastBlockHash(bc.chainDb, head.Hash()); err != nil {
+ currentFastBlock := bc.CurrentFastBlock()
+ if bc.GetTd(currentFastBlock.Hash(), currentFastBlock.NumberU64()).Cmp(td) < 0 {
+ if err := WriteHeadFastBlockHash(bc.db, head.Hash()); err != nil {
log.Crit("Failed to update head fast block hash", "err", err)
}
- bc.currentFastBlock = head
+ bc.currentFastBlock.Store(head)
}
}
bc.mu.Unlock()
@@ -775,15 +845,33 @@ func (bc *BlockChain) InsertReceiptChain(blockChain types.Blocks, receiptChain [
log.Info("Imported new block receipts",
"count", stats.processed,
"elapsed", common.PrettyDuration(time.Since(start)),
- "bytes", bytes,
"number", head.Number(),
"hash", head.Hash(),
+ "size", common.StorageSize(bytes),
"ignored", stats.ignored)
return 0, nil
}
-// WriteBlock writes the block to the chain.
-func (bc *BlockChain) WriteBlockAndState(block *types.Block, receipts []*types.Receipt, state *state.StateDB) (status WriteStatus, err error) {
+var lastWrite uint64
+
+// WriteBlockWithoutState writes only the block and its metadata to the database,
+// but does not write any state. This is used to construct competing side forks
+// up to the point where they exceed the canonical total difficulty.
+func (bc *BlockChain) WriteBlockWithoutState(block *types.Block, td *big.Int) (err error) {
+ bc.wg.Add(1)
+ defer bc.wg.Done()
+
+ if err := bc.hc.WriteTd(block.Hash(), block.NumberU64(), td); err != nil {
+ return err
+ }
+ if err := WriteBlock(bc.db, block); err != nil {
+ return err
+ }
+ return nil
+}
+
+// WriteBlockWithState writes the block and all associated state to the database.
+func (bc *BlockChain) WriteBlockWithState(block *types.Block, receipts []*types.Receipt, state *state.StateDB) (status WriteStatus, err error) {
bc.wg.Add(1)
defer bc.wg.Done()
@@ -796,7 +884,8 @@ func (bc *BlockChain) WriteBlockAndState(block *types.Block, receipts []*types.R
bc.mu.Lock()
defer bc.mu.Unlock()
- localTd := bc.GetTd(bc.currentBlock.Hash(), bc.currentBlock.NumberU64())
+ currentBlock := bc.CurrentBlock()
+ localTd := bc.GetTd(currentBlock.Hash(), currentBlock.NumberU64())
externTd := new(big.Int).Add(block.Difficulty(), ptd)
// Irrelevant of the canonical status, write the block itself to the database
@@ -804,29 +893,82 @@ func (bc *BlockChain) WriteBlockAndState(block *types.Block, receipts []*types.R
return NonStatTy, err
}
// Write other block data using a batch.
- batch := bc.chainDb.NewBatch()
+ batch := bc.db.NewBatch()
if err := WriteBlock(batch, block); err != nil {
return NonStatTy, err
}
- if _, err := state.CommitTo(batch, bc.config.IsEIP158(block.Number())); err != nil {
+ root, err := state.Commit(bc.chainConfig.IsEIP158(block.Number()))
+ if err != nil {
return NonStatTy, err
}
+ triedb := bc.stateCache.TrieDB()
+
+ // If we're running an archive node, always flush
+ if bc.cacheConfig.Disabled {
+ if err := triedb.Commit(root, false); err != nil {
+ return NonStatTy, err
+ }
+ } else {
+ // Full but not archive node, do proper garbage collection
+ triedb.Reference(root, common.Hash{}) // metadata reference to keep trie alive
+ bc.triegc.Push(root, -float32(block.NumberU64()))
+
+ if current := block.NumberU64(); current > triesInMemory {
+ // Find the next state trie we need to commit
+ header := bc.GetHeaderByNumber(current - triesInMemory)
+ chosen := header.Number.Uint64()
+
+ // Only write to disk if we exceeded our memory allowance *and* also have at
+ // least a given number of tries gapped.
+ var (
+ size = triedb.Size()
+ limit = common.StorageSize(bc.cacheConfig.TrieNodeLimit) * 1024 * 1024
+ )
+ if size > limit || bc.gcproc > bc.cacheConfig.TrieTimeLimit {
+ // If we're exceeding limits but haven't reached a large enough memory gap,
+ // warn the user that the system is becoming unstable.
+ if chosen < lastWrite+triesInMemory {
+ switch {
+ case size >= 2*limit:
+ log.Warn("State memory usage too high, committing", "size", size, "limit", limit, "optimum", float64(chosen-lastWrite)/triesInMemory)
+ case bc.gcproc >= 2*bc.cacheConfig.TrieTimeLimit:
+ log.Info("State in memory for too long, committing", "time", bc.gcproc, "allowance", bc.cacheConfig.TrieTimeLimit, "optimum", float64(chosen-lastWrite)/triesInMemory)
+ }
+ }
+ // If optimum or critical limits reached, write to disk
+ if chosen >= lastWrite+triesInMemory || size >= 2*limit || bc.gcproc >= 2*bc.cacheConfig.TrieTimeLimit {
+ triedb.Commit(header.Root, true)
+ lastWrite = chosen
+ bc.gcproc = 0
+ }
+ }
+ // Garbage collect anything below our required write retention
+ for !bc.triegc.Empty() {
+ root, number := bc.triegc.Pop()
+ if uint64(-number) > chosen {
+ bc.triegc.Push(root, number)
+ break
+ }
+ triedb.Dereference(root.(common.Hash), common.Hash{})
+ }
+ }
+ }
if err := WriteBlockReceipts(batch, block.Hash(), block.NumberU64(), receipts); err != nil {
return NonStatTy, err
}
-
// If the total difficulty is higher than our known, add it to the canonical chain
// Second clause in the if statement reduces the vulnerability to selfish mining.
// Please refer to http://www.cs.cornell.edu/~ie53/publications/btcProcFC.pdf
reorg := externTd.Cmp(localTd) > 0
+ currentBlock = bc.CurrentBlock()
if !reorg && externTd.Cmp(localTd) == 0 {
// Split same-difficulty blocks by number, then at random
- reorg = block.NumberU64() < bc.currentBlock.NumberU64() || (block.NumberU64() == bc.currentBlock.NumberU64() && mrand.Float64() < 0.5)
+ reorg = block.NumberU64() < currentBlock.NumberU64() || (block.NumberU64() == currentBlock.NumberU64() && mrand.Float64() < 0.5)
}
if reorg {
// Reorganise the chain if the parent is not the head block
- if block.ParentHash() != bc.currentBlock.Hash() {
- if err := bc.reorg(bc.currentBlock, block); err != nil {
+ if block.ParentHash() != currentBlock.Hash() {
+ if err := bc.reorg(currentBlock, block); err != nil {
return NonStatTy, err
}
}
@@ -835,7 +977,7 @@ func (bc *BlockChain) WriteBlockAndState(block *types.Block, receipts []*types.R
return NonStatTy, err
}
// Write hash preimages
- if err := WritePreimages(bc.chainDb, block.NumberU64(), state.Preimages()); err != nil {
+ if err := WritePreimages(bc.db, block.NumberU64(), state.Preimages()); err != nil {
return NonStatTy, err
}
status = CanonStatTy
@@ -927,31 +1069,65 @@ func (bc *BlockChain) insertChain(chain types.Blocks) (int, []interface{}, []*ty
if err == nil {
err = bc.Validator().ValidateBody(block)
}
- if err != nil {
- if err == ErrKnownBlock {
+ switch {
+ case err == ErrKnownBlock:
+ // Block and state both already known. However if the current block is below
+ // this number we did a rollback and we should reimport it nonetheless.
+ if bc.CurrentBlock().NumberU64() >= block.NumberU64() {
stats.ignored++
continue
}
- if err == consensus.ErrFutureBlock {
- // Allow up to MaxFuture second in the future blocks. If this limit
- // is exceeded the chain is discarded and processed at a later time
- // if given.
- max := big.NewInt(time.Now().Unix() + maxTimeFutureBlocks)
- if block.Time().Cmp(max) > 0 {
- return i, events, coalescedLogs, fmt.Errorf("future block: %v > %v", block.Time(), max)
+ case err == consensus.ErrFutureBlock:
+ // Allow up to MaxFuture second in the future blocks. If this limit is exceeded
+ // the chain is discarded and processed at a later time if given.
+ max := big.NewInt(time.Now().Unix() + maxTimeFutureBlocks)
+ if block.Time().Cmp(max) > 0 {
+ return i, events, coalescedLogs, fmt.Errorf("future block: %v > %v", block.Time(), max)
+ }
+ bc.futureBlocks.Add(block.Hash(), block)
+ stats.queued++
+ continue
+
+ case err == consensus.ErrUnknownAncestor && bc.futureBlocks.Contains(block.ParentHash()):
+ bc.futureBlocks.Add(block.Hash(), block)
+ stats.queued++
+ continue
+
+ case err == consensus.ErrPrunedAncestor:
+ // Block competing with the canonical chain, store in the db, but don't process
+ // until the competitor TD goes above the canonical TD
+ currentBlock := bc.CurrentBlock()
+ localTd := bc.GetTd(currentBlock.Hash(), currentBlock.NumberU64())
+ externTd := new(big.Int).Add(bc.GetTd(block.ParentHash(), block.NumberU64()-1), block.Difficulty())
+ if localTd.Cmp(externTd) > 0 {
+ if err = bc.WriteBlockWithoutState(block, externTd); err != nil {
+ return i, events, coalescedLogs, err
}
- bc.futureBlocks.Add(block.Hash(), block)
- stats.queued++
continue
}
+ // Competitor chain beat canonical, gather all blocks from the common ancestor
+ var winner []*types.Block
- if err == consensus.ErrUnknownAncestor && bc.futureBlocks.Contains(block.ParentHash()) {
- bc.futureBlocks.Add(block.Hash(), block)
- stats.queued++
- continue
+ parent := bc.GetBlock(block.ParentHash(), block.NumberU64()-1)
+ for !bc.HasState(parent.Root()) {
+ winner = append(winner, parent)
+ parent = bc.GetBlock(parent.ParentHash(), parent.NumberU64()-1)
+ }
+ for j := 0; j < len(winner)/2; j++ {
+ winner[j], winner[len(winner)-1-j] = winner[len(winner)-1-j], winner[j]
+ }
+ // Import all the pruned blocks to make the state available
+ bc.chainmu.Unlock()
+ _, evs, logs, err := bc.insertChain(winner)
+ bc.chainmu.Lock()
+ events, coalescedLogs = evs, logs
+
+ if err != nil {
+ return i, events, coalescedLogs, err
}
+ case err != nil:
bc.reportBlock(block, nil, err)
return i, events, coalescedLogs, err
}
@@ -979,8 +1155,10 @@ func (bc *BlockChain) insertChain(chain types.Blocks) (int, []interface{}, []*ty
bc.reportBlock(block, receipts, err)
return i, events, coalescedLogs, err
}
+ proctime := time.Since(bstart)
+
// Write the block to the chain and get the status.
- status, err := bc.WriteBlockAndState(block, receipts, state)
+ status, err := bc.WriteBlockWithState(block, receipts, state)
if err != nil {
return i, events, coalescedLogs, err
}
@@ -994,6 +1172,9 @@ func (bc *BlockChain) insertChain(chain types.Blocks) (int, []interface{}, []*ty
events = append(events, ChainEvent{block, block.Hash(), logs})
lastCanon = block
+ // Only count canonical blocks for GC processing time
+ bc.gcproc += proctime
+
case SideStatTy:
log.Debug("Inserted forked block", "number", block.Number(), "hash", block.Hash(), "diff", block.Difficulty(), "elapsed",
common.PrettyDuration(time.Since(bstart)), "txs", len(block.Transactions()), "gas", block.GasUsed(), "uncles", len(block.Uncles()))
@@ -1003,10 +1184,10 @@ func (bc *BlockChain) insertChain(chain types.Blocks) (int, []interface{}, []*ty
}
stats.processed++
stats.usedGas += usedGas
- stats.report(chain, i)
+ stats.report(chain, i, bc.stateCache.TrieDB().Size())
}
// Append a single chain head event if we've progressed the chain
- if lastCanon != nil && bc.LastBlockHash() == lastCanon.Hash() {
+ if lastCanon != nil && bc.CurrentBlock().Hash() == lastCanon.Hash() {
events = append(events, ChainHeadEvent{lastCanon})
}
return 0, events, coalescedLogs, nil
@@ -1026,7 +1207,7 @@ const statsReportLimit = 8 * time.Second
// report prints statistics if some number of blocks have been processed
// or more than a few seconds have passed since the last message.
-func (st *insertStats) report(chain []*types.Block, index int) {
+func (st *insertStats) report(chain []*types.Block, index int, cache common.StorageSize) {
// Fetch the timings for the batch
var (
now = mclock.Now()
@@ -1041,7 +1222,7 @@ func (st *insertStats) report(chain []*types.Block, index int) {
context := []interface{}{
"blocks", st.processed, "txs", txs, "mgas", float64(st.usedGas) / 1000000,
"elapsed", common.PrettyDuration(elapsed), "mgasps", float64(st.usedGas) * 1000 / float64(elapsed),
- "number", end.Number(), "hash", end.Hash(),
+ "number", end.Number(), "hash", end.Hash(), "cache", cache,
}
if st.queued > 0 {
context = append(context, []interface{}{"queued", st.queued}...)
@@ -1077,7 +1258,7 @@ func (bc *BlockChain) reorg(oldBlock, newBlock *types.Block) error {
// These logs are later announced as deleted.
collectLogs = func(h common.Hash) {
// Coalesce logs and set 'Removed'.
- receipts := GetBlockReceipts(bc.chainDb, h, bc.hc.GetBlockNumber(h))
+ receipts := GetBlockReceipts(bc.db, h, bc.hc.GetBlockNumber(h))
for _, receipt := range receipts {
for _, log := range receipt.Logs {
del := *log
@@ -1146,7 +1327,7 @@ func (bc *BlockChain) reorg(oldBlock, newBlock *types.Block) error {
// insert the block in the canonical way, re-writing history
bc.insert(newChain[i])
// write lookup entries for hash based transaction/receipt searches
- if err := WriteTxLookupEntries(bc.chainDb, newChain[i]); err != nil {
+ if err := WriteTxLookupEntries(bc.db, newChain[i]); err != nil {
return err
}
addedTxs = append(addedTxs, newChain[i].Transactions()...)
@@ -1156,7 +1337,7 @@ func (bc *BlockChain) reorg(oldBlock, newBlock *types.Block) error {
// When transactions get deleted from the database that means the
// receipts that were created in the fork must also be deleted
for _, tx := range diff {
- DeleteTxLookupEntry(bc.chainDb, tx.Hash())
+ DeleteTxLookupEntry(bc.db, tx.Hash())
}
if len(deletedLogs) > 0 {
go bc.rmLogsFeed.Send(RemovedLogsEvent{deletedLogs})
@@ -1248,7 +1429,7 @@ Hash: 0x%x
Error: %v
##############################
-`, bc.config, block.Number(), block.Hash(), receiptString, err))
+`, bc.chainConfig, block.Number(), block.Hash(), receiptString, err))
}
// InsertHeaderChain attempts to insert the given header chain in to the local
@@ -1306,9 +1487,6 @@ func (bc *BlockChain) writeHeader(header *types.Header) error {
// CurrentHeader retrieves the current head header of the canonical chain. The
// header is retrieved from the HeaderChain's internal cache.
func (bc *BlockChain) CurrentHeader() *types.Header {
- bc.mu.RLock()
- defer bc.mu.RUnlock()
-
return bc.hc.CurrentHeader()
}
@@ -1355,7 +1533,7 @@ func (bc *BlockChain) GetHeaderByNumber(number uint64) *types.Header {
}
// Config retrieves the blockchain's chain configuration.
-func (bc *BlockChain) Config() *params.ChainConfig { return bc.config }
+func (bc *BlockChain) Config() *params.ChainConfig { return bc.chainConfig }
// Engine retrieves the blockchain's consensus engine.
func (bc *BlockChain) Engine() consensus.Engine { return bc.engine }
diff --git a/core/blockchain_test.go b/core/blockchain_test.go
index cbde3bcd2..748cdc5c7 100644
--- a/core/blockchain_test.go
+++ b/core/blockchain_test.go
@@ -34,26 +34,6 @@ import (
"github.com/ethereum/go-ethereum/params"
)
-// newTestBlockChain creates a blockchain without validation.
-func newTestBlockChain(fake bool) *BlockChain {
- db, _ := ethdb.NewMemDatabase()
- gspec := &Genesis{
- Config: params.TestChainConfig,
- Difficulty: big.NewInt(1),
- }
- gspec.MustCommit(db)
- engine := ethash.NewFullFaker()
- if !fake {
- engine = ethash.NewTester()
- }
- blockchain, err := NewBlockChain(db, gspec.Config, engine, vm.Config{})
- if err != nil {
- panic(err)
- }
- blockchain.SetValidator(bproc{})
- return blockchain
-}
-
// Test fork of length N starting from block i
func testFork(t *testing.T, blockchain *BlockChain, i, n int, full bool, comparator func(td1, td2 *big.Int)) {
// Copy old chain up to #i into a new db
@@ -148,9 +128,9 @@ func testBlockChainImport(chain types.Blocks, blockchain *BlockChain) error {
return err
}
blockchain.mu.Lock()
- WriteTd(blockchain.chainDb, block.Hash(), block.NumberU64(), new(big.Int).Add(block.Difficulty(), blockchain.GetTdByHash(block.ParentHash())))
- WriteBlock(blockchain.chainDb, block)
- statedb.CommitTo(blockchain.chainDb, false)
+ WriteTd(blockchain.db, block.Hash(), block.NumberU64(), new(big.Int).Add(block.Difficulty(), blockchain.GetTdByHash(block.ParentHash())))
+ WriteBlock(blockchain.db, block)
+ statedb.Commit(false)
blockchain.mu.Unlock()
}
return nil
@@ -166,8 +146,8 @@ func testHeaderChainImport(chain []*types.Header, blockchain *BlockChain) error
}
// Manually insert the header into the database, but don't reorganise (allows subsequent testing)
blockchain.mu.Lock()
- WriteTd(blockchain.chainDb, header.Hash(), header.Number.Uint64(), new(big.Int).Add(header.Difficulty, blockchain.GetTdByHash(header.ParentHash)))
- WriteHeader(blockchain.chainDb, header)
+ WriteTd(blockchain.db, header.Hash(), header.Number.Uint64(), new(big.Int).Add(header.Difficulty, blockchain.GetTdByHash(header.ParentHash)))
+ WriteHeader(blockchain.db, header)
blockchain.mu.Unlock()
}
return nil
@@ -183,13 +163,18 @@ func insertChain(done chan bool, blockchain *BlockChain, chain types.Blocks, t *
}
func TestLastBlock(t *testing.T) {
- bchain := newTestBlockChain(false)
- defer bchain.Stop()
+ _, blockchain, err := newCanonical(ethash.NewFaker(), 0, true)
+ if err != nil {
+ t.Fatalf("failed to create pristine chain: %v", err)
+ }
+ defer blockchain.Stop()
- block := makeBlockChain(bchain.CurrentBlock(), 1, ethash.NewFaker(), bchain.chainDb, 0)[0]
- bchain.insert(block)
- if block.Hash() != GetHeadBlockHash(bchain.chainDb) {
- t.Errorf("Write/Get HeadBlockHash failed")
+ blocks := makeBlockChain(blockchain.CurrentBlock(), 1, ethash.NewFullFaker(), blockchain.db, 0)
+ if _, err := blockchain.InsertChain(blocks); err != nil {
+ t.Fatalf("Failed to insert block: %v", err)
+ }
+ if blocks[len(blocks)-1].Hash() != GetHeadBlockHash(blockchain.db) {
+ t.Fatalf("Write/Get HeadBlockHash failed")
}
}
@@ -337,55 +322,13 @@ func testBrokenChain(t *testing.T, full bool) {
}
}
-type bproc struct{}
-
-func (bproc) ValidateBody(*types.Block) error { return nil }
-func (bproc) ValidateState(block, parent *types.Block, state *state.StateDB, receipts types.Receipts, usedGas uint64) error {
- return nil
-}
-func (bproc) Process(block *types.Block, statedb *state.StateDB, cfg vm.Config) (types.Receipts, []*types.Log, uint64, error) {
- return nil, nil, 0, nil
-}
-
-func makeHeaderChainWithDiff(genesis *types.Block, d []int, seed byte) []*types.Header {
- blocks := makeBlockChainWithDiff(genesis, d, seed)
- headers := make([]*types.Header, len(blocks))
- for i, block := range blocks {
- headers[i] = block.Header()
- }
- return headers
-}
-
-func makeBlockChainWithDiff(genesis *types.Block, d []int, seed byte) []*types.Block {
- var chain []*types.Block
- for i, difficulty := range d {
- header := &types.Header{
- Coinbase: common.Address{seed},
- Number: big.NewInt(int64(i + 1)),
- Difficulty: big.NewInt(int64(difficulty)),
- UncleHash: types.EmptyUncleHash,
- TxHash: types.EmptyRootHash,
- ReceiptHash: types.EmptyRootHash,
- Time: big.NewInt(int64(i) + 1),
- }
- if i == 0 {
- header.ParentHash = genesis.Hash()
- } else {
- header.ParentHash = chain[i-1].Hash()
- }
- block := types.NewBlockWithHeader(header)
- chain = append(chain, block)
- }
- return chain
-}
-
// Tests that reorganising a long difficult chain after a short easy one
// overwrites the canonical numbers and links in the database.
func TestReorgLongHeaders(t *testing.T) { testReorgLong(t, false) }
func TestReorgLongBlocks(t *testing.T) { testReorgLong(t, true) }
func testReorgLong(t *testing.T, full bool) {
- testReorg(t, []int{1, 2, 4}, []int{1, 2, 3, 4}, 10, full)
+ testReorg(t, []int64{0, 0, -9}, []int64{0, 0, 0, -9}, 393280, full)
}
// Tests that reorganising a short difficult chain after a long easy one
@@ -394,45 +337,82 @@ func TestReorgShortHeaders(t *testing.T) { testReorgShort(t, false) }
func TestReorgShortBlocks(t *testing.T) { testReorgShort(t, true) }
func testReorgShort(t *testing.T, full bool) {
- testReorg(t, []int{1, 2, 3, 4}, []int{1, 10}, 11, full)
+ // Create a long easy chain vs. a short heavy one. Due to difficulty adjustment
+ // we need a fairly long chain of blocks with different difficulties for a short
+ // one to become heavyer than a long one. The 96 is an empirical value.
+ easy := make([]int64, 96)
+ for i := 0; i < len(easy); i++ {
+ easy[i] = 60
+ }
+ diff := make([]int64, len(easy)-1)
+ for i := 0; i < len(diff); i++ {
+ diff[i] = -9
+ }
+ testReorg(t, easy, diff, 12615120, full)
}
-func testReorg(t *testing.T, first, second []int, td int64, full bool) {
- bc := newTestBlockChain(true)
- defer bc.Stop()
+func testReorg(t *testing.T, first, second []int64, td int64, full bool) {
+ // Create a pristine chain and database
+ db, blockchain, err := newCanonical(ethash.NewFaker(), 0, full)
+ if err != nil {
+ t.Fatalf("failed to create pristine chain: %v", err)
+ }
+ defer blockchain.Stop()
// Insert an easy and a difficult chain afterwards
+ easyBlocks, _ := GenerateChain(params.TestChainConfig, blockchain.CurrentBlock(), ethash.NewFaker(), db, len(first), func(i int, b *BlockGen) {
+ b.OffsetTime(first[i])
+ })
+ diffBlocks, _ := GenerateChain(params.TestChainConfig, blockchain.CurrentBlock(), ethash.NewFaker(), db, len(second), func(i int, b *BlockGen) {
+ b.OffsetTime(second[i])
+ })
if full {
- bc.InsertChain(makeBlockChainWithDiff(bc.genesisBlock, first, 11))
- bc.InsertChain(makeBlockChainWithDiff(bc.genesisBlock, second, 22))
+ if _, err := blockchain.InsertChain(easyBlocks); err != nil {
+ t.Fatalf("failed to insert easy chain: %v", err)
+ }
+ if _, err := blockchain.InsertChain(diffBlocks); err != nil {
+ t.Fatalf("failed to insert difficult chain: %v", err)
+ }
} else {
- bc.InsertHeaderChain(makeHeaderChainWithDiff(bc.genesisBlock, first, 11), 1)
- bc.InsertHeaderChain(makeHeaderChainWithDiff(bc.genesisBlock, second, 22), 1)
+ easyHeaders := make([]*types.Header, len(easyBlocks))
+ for i, block := range easyBlocks {
+ easyHeaders[i] = block.Header()
+ }
+ diffHeaders := make([]*types.Header, len(diffBlocks))
+ for i, block := range diffBlocks {
+ diffHeaders[i] = block.Header()
+ }
+ if _, err := blockchain.InsertHeaderChain(easyHeaders, 1); err != nil {
+ t.Fatalf("failed to insert easy chain: %v", err)
+ }
+ if _, err := blockchain.InsertHeaderChain(diffHeaders, 1); err != nil {
+ t.Fatalf("failed to insert difficult chain: %v", err)
+ }
}
// Check that the chain is valid number and link wise
if full {
- prev := bc.CurrentBlock()
- for block := bc.GetBlockByNumber(bc.CurrentBlock().NumberU64() - 1); block.NumberU64() != 0; prev, block = block, bc.GetBlockByNumber(block.NumberU64()-1) {
+ prev := blockchain.CurrentBlock()
+ for block := blockchain.GetBlockByNumber(blockchain.CurrentBlock().NumberU64() - 1); block.NumberU64() != 0; prev, block = block, blockchain.GetBlockByNumber(block.NumberU64()-1) {
if prev.ParentHash() != block.Hash() {
t.Errorf("parent block hash mismatch: have %x, want %x", prev.ParentHash(), block.Hash())
}
}
} else {
- prev := bc.CurrentHeader()
- for header := bc.GetHeaderByNumber(bc.CurrentHeader().Number.Uint64() - 1); header.Number.Uint64() != 0; prev, header = header, bc.GetHeaderByNumber(header.Number.Uint64()-1) {
+ prev := blockchain.CurrentHeader()
+ for header := blockchain.GetHeaderByNumber(blockchain.CurrentHeader().Number.Uint64() - 1); header.Number.Uint64() != 0; prev, header = header, blockchain.GetHeaderByNumber(header.Number.Uint64()-1) {
if prev.ParentHash != header.Hash() {
t.Errorf("parent header hash mismatch: have %x, want %x", prev.ParentHash, header.Hash())
}
}
}
// Make sure the chain total difficulty is the correct one
- want := new(big.Int).Add(bc.genesisBlock.Difficulty(), big.NewInt(td))
+ want := new(big.Int).Add(blockchain.genesisBlock.Difficulty(), big.NewInt(td))
if full {
- if have := bc.GetTdByHash(bc.CurrentBlock().Hash()); have.Cmp(want) != 0 {
+ if have := blockchain.GetTdByHash(blockchain.CurrentBlock().Hash()); have.Cmp(want) != 0 {
t.Errorf("total difficulty mismatch: have %v, want %v", have, want)
}
} else {
- if have := bc.GetTdByHash(bc.CurrentHeader().Hash()); have.Cmp(want) != 0 {
+ if have := blockchain.GetTdByHash(blockchain.CurrentHeader().Hash()); have.Cmp(want) != 0 {
t.Errorf("total difficulty mismatch: have %v, want %v", have, want)
}
}
@@ -443,19 +423,28 @@ func TestBadHeaderHashes(t *testing.T) { testBadHashes(t, false) }
func TestBadBlockHashes(t *testing.T) { testBadHashes(t, true) }
func testBadHashes(t *testing.T, full bool) {
- bc := newTestBlockChain(true)
- defer bc.Stop()
+ // Create a pristine chain and database
+ db, blockchain, err := newCanonical(ethash.NewFaker(), 0, full)
+ if err != nil {
+ t.Fatalf("failed to create pristine chain: %v", err)
+ }
+ defer blockchain.Stop()
// Create a chain, ban a hash and try to import
- var err error
if full {
- blocks := makeBlockChainWithDiff(bc.genesisBlock, []int{1, 2, 4}, 10)
+ blocks := makeBlockChain(blockchain.CurrentBlock(), 3, ethash.NewFaker(), db, 10)
+
BadHashes[blocks[2].Header().Hash()] = true
- _, err = bc.InsertChain(blocks)
+ defer func() { delete(BadHashes, blocks[2].Header().Hash()) }()
+
+ _, err = blockchain.InsertChain(blocks)
} else {
- headers := makeHeaderChainWithDiff(bc.genesisBlock, []int{1, 2, 4}, 10)
+ headers := makeHeaderChain(blockchain.CurrentHeader(), 3, ethash.NewFaker(), db, 10)
+
BadHashes[headers[2].Hash()] = true
- _, err = bc.InsertHeaderChain(headers, 1)
+ defer func() { delete(BadHashes, headers[2].Hash()) }()
+
+ _, err = blockchain.InsertHeaderChain(headers, 1)
}
if err != ErrBlacklistedHash {
t.Errorf("error mismatch: have: %v, want: %v", err, ErrBlacklistedHash)
@@ -468,40 +457,41 @@ func TestReorgBadHeaderHashes(t *testing.T) { testReorgBadHashes(t, false) }
func TestReorgBadBlockHashes(t *testing.T) { testReorgBadHashes(t, true) }
func testReorgBadHashes(t *testing.T, full bool) {
- bc := newTestBlockChain(true)
- defer bc.Stop()
-
+ // Create a pristine chain and database
+ db, blockchain, err := newCanonical(ethash.NewFaker(), 0, full)
+ if err != nil {
+ t.Fatalf("failed to create pristine chain: %v", err)
+ }
// Create a chain, import and ban afterwards
- headers := makeHeaderChainWithDiff(bc.genesisBlock, []int{1, 2, 3, 4}, 10)
- blocks := makeBlockChainWithDiff(bc.genesisBlock, []int{1, 2, 3, 4}, 10)
+ headers := makeHeaderChain(blockchain.CurrentHeader(), 4, ethash.NewFaker(), db, 10)
+ blocks := makeBlockChain(blockchain.CurrentBlock(), 4, ethash.NewFaker(), db, 10)
if full {
- if _, err := bc.InsertChain(blocks); err != nil {
- t.Fatalf("failed to import blocks: %v", err)
+ if _, err = blockchain.InsertChain(blocks); err != nil {
+ t.Errorf("failed to import blocks: %v", err)
}
- if bc.CurrentBlock().Hash() != blocks[3].Hash() {
- t.Errorf("last block hash mismatch: have: %x, want %x", bc.CurrentBlock().Hash(), blocks[3].Header().Hash())
+ if blockchain.CurrentBlock().Hash() != blocks[3].Hash() {
+ t.Errorf("last block hash mismatch: have: %x, want %x", blockchain.CurrentBlock().Hash(), blocks[3].Header().Hash())
}
BadHashes[blocks[3].Header().Hash()] = true
defer func() { delete(BadHashes, blocks[3].Header().Hash()) }()
} else {
- if _, err := bc.InsertHeaderChain(headers, 1); err != nil {
- t.Fatalf("failed to import headers: %v", err)
+ if _, err = blockchain.InsertHeaderChain(headers, 1); err != nil {
+ t.Errorf("failed to import headers: %v", err)
}
- if bc.CurrentHeader().Hash() != headers[3].Hash() {
- t.Errorf("last header hash mismatch: have: %x, want %x", bc.CurrentHeader().Hash(), headers[3].Hash())
+ if blockchain.CurrentHeader().Hash() != headers[3].Hash() {
+ t.Errorf("last header hash mismatch: have: %x, want %x", blockchain.CurrentHeader().Hash(), headers[3].Hash())
}
BadHashes[headers[3].Hash()] = true
defer func() { delete(BadHashes, headers[3].Hash()) }()
}
+ blockchain.Stop()
// Create a new BlockChain and check that it rolled back the state.
- ncm, err := NewBlockChain(bc.chainDb, bc.config, ethash.NewFaker(), vm.Config{})
+ ncm, err := NewBlockChain(blockchain.db, nil, blockchain.chainConfig, ethash.NewFaker(), vm.Config{})
if err != nil {
t.Fatalf("failed to create new chain manager: %v", err)
}
- defer ncm.Stop()
-
if full {
if ncm.CurrentBlock().Hash() != blocks[2].Header().Hash() {
t.Errorf("last block hash mismatch: have: %x, want %x", ncm.CurrentBlock().Hash(), blocks[2].Header().Hash())
@@ -514,6 +504,7 @@ func testReorgBadHashes(t *testing.T, full bool) {
t.Errorf("last header hash mismatch: have: %x, want %x", ncm.CurrentHeader().Hash(), headers[2].Hash())
}
}
+ ncm.Stop()
}
// Tests chain insertions in the face of one entity containing an invalid nonce.
@@ -609,7 +600,7 @@ func TestFastVsFullChains(t *testing.T) {
// Import the chain as an archive node for the comparison baseline
archiveDb, _ := ethdb.NewMemDatabase()
gspec.MustCommit(archiveDb)
- archive, _ := NewBlockChain(archiveDb, gspec.Config, ethash.NewFaker(), vm.Config{})
+ archive, _ := NewBlockChain(archiveDb, nil, gspec.Config, ethash.NewFaker(), vm.Config{})
defer archive.Stop()
if n, err := archive.InsertChain(blocks); err != nil {
@@ -618,7 +609,7 @@ func TestFastVsFullChains(t *testing.T) {
// Fast import the chain as a non-archive node to test
fastDb, _ := ethdb.NewMemDatabase()
gspec.MustCommit(fastDb)
- fast, _ := NewBlockChain(fastDb, gspec.Config, ethash.NewFaker(), vm.Config{})
+ fast, _ := NewBlockChain(fastDb, nil, gspec.Config, ethash.NewFaker(), vm.Config{})
defer fast.Stop()
headers := make([]*types.Header, len(blocks))
@@ -696,7 +687,7 @@ func TestLightVsFastVsFullChainHeads(t *testing.T) {
archiveDb, _ := ethdb.NewMemDatabase()
gspec.MustCommit(archiveDb)
- archive, _ := NewBlockChain(archiveDb, gspec.Config, ethash.NewFaker(), vm.Config{})
+ archive, _ := NewBlockChain(archiveDb, nil, gspec.Config, ethash.NewFaker(), vm.Config{})
if n, err := archive.InsertChain(blocks); err != nil {
t.Fatalf("failed to process block %d: %v", n, err)
}
@@ -709,7 +700,7 @@ func TestLightVsFastVsFullChainHeads(t *testing.T) {
// Import the chain as a non-archive node and ensure all pointers are updated
fastDb, _ := ethdb.NewMemDatabase()
gspec.MustCommit(fastDb)
- fast, _ := NewBlockChain(fastDb, gspec.Config, ethash.NewFaker(), vm.Config{})
+ fast, _ := NewBlockChain(fastDb, nil, gspec.Config, ethash.NewFaker(), vm.Config{})
defer fast.Stop()
headers := make([]*types.Header, len(blocks))
@@ -730,7 +721,7 @@ func TestLightVsFastVsFullChainHeads(t *testing.T) {
lightDb, _ := ethdb.NewMemDatabase()
gspec.MustCommit(lightDb)
- light, _ := NewBlockChain(lightDb, gspec.Config, ethash.NewFaker(), vm.Config{})
+ light, _ := NewBlockChain(lightDb, nil, gspec.Config, ethash.NewFaker(), vm.Config{})
if n, err := light.InsertHeaderChain(headers, 1); err != nil {
t.Fatalf("failed to insert header %d: %v", n, err)
}
@@ -799,7 +790,7 @@ func TestChainTxReorgs(t *testing.T) {
}
})
// Import the chain. This runs all block validation rules.
- blockchain, _ := NewBlockChain(db, gspec.Config, ethash.NewFaker(), vm.Config{})
+ blockchain, _ := NewBlockChain(db, nil, gspec.Config, ethash.NewFaker(), vm.Config{})
if i, err := blockchain.InsertChain(chain); err != nil {
t.Fatalf("failed to insert original chain[%d]: %v", i, err)
}
@@ -870,7 +861,7 @@ func TestLogReorgs(t *testing.T) {
signer = types.NewEIP155Signer(gspec.Config.ChainId)
)
- blockchain, _ := NewBlockChain(db, gspec.Config, ethash.NewFaker(), vm.Config{})
+ blockchain, _ := NewBlockChain(db, nil, gspec.Config, ethash.NewFaker(), vm.Config{})
defer blockchain.Stop()
rmLogsCh := make(chan RemovedLogsEvent)
@@ -917,7 +908,7 @@ func TestReorgSideEvent(t *testing.T) {
signer = types.NewEIP155Signer(gspec.Config.ChainId)
)
- blockchain, _ := NewBlockChain(db, gspec.Config, ethash.NewFaker(), vm.Config{})
+ blockchain, _ := NewBlockChain(db, nil, gspec.Config, ethash.NewFaker(), vm.Config{})
defer blockchain.Stop()
chain, _ := GenerateChain(gspec.Config, genesis, ethash.NewFaker(), db, 3, func(i int, gen *BlockGen) {})
@@ -989,10 +980,13 @@ done:
// Tests if the canonical block can be fetched from the database during chain insertion.
func TestCanonicalBlockRetrieval(t *testing.T) {
- bc := newTestBlockChain(true)
- defer bc.Stop()
+ _, blockchain, err := newCanonical(ethash.NewFaker(), 0, true)
+ if err != nil {
+ t.Fatalf("failed to create pristine chain: %v", err)
+ }
+ defer blockchain.Stop()
- chain, _ := GenerateChain(bc.config, bc.genesisBlock, ethash.NewFaker(), bc.chainDb, 10, func(i int, gen *BlockGen) {})
+ chain, _ := GenerateChain(blockchain.chainConfig, blockchain.genesisBlock, ethash.NewFaker(), blockchain.db, 10, func(i int, gen *BlockGen) {})
var pend sync.WaitGroup
pend.Add(len(chain))
@@ -1003,14 +997,14 @@ func TestCanonicalBlockRetrieval(t *testing.T) {
// try to retrieve a block by its canonical hash and see if the block data can be retrieved.
for {
- ch := GetCanonicalHash(bc.chainDb, block.NumberU64())
+ ch := GetCanonicalHash(blockchain.db, block.NumberU64())
if ch == (common.Hash{}) {
continue // busy wait for canonical hash to be written
}
if ch != block.Hash() {
t.Fatalf("unknown canonical hash, want %s, got %s", block.Hash().Hex(), ch.Hex())
}
- fb := GetBlock(bc.chainDb, ch, block.NumberU64())
+ fb := GetBlock(blockchain.db, ch, block.NumberU64())
if fb == nil {
t.Fatalf("unable to retrieve block %d for canonical hash: %s", block.NumberU64(), ch.Hex())
}
@@ -1021,7 +1015,7 @@ func TestCanonicalBlockRetrieval(t *testing.T) {
}
}(chain[i])
- if _, err := bc.InsertChain(types.Blocks{chain[i]}); err != nil {
+ if _, err := blockchain.InsertChain(types.Blocks{chain[i]}); err != nil {
t.Fatalf("failed to insert block %d: %v", i, err)
}
}
@@ -1043,7 +1037,7 @@ func TestEIP155Transition(t *testing.T) {
genesis = gspec.MustCommit(db)
)
- blockchain, _ := NewBlockChain(db, gspec.Config, ethash.NewFaker(), vm.Config{})
+ blockchain, _ := NewBlockChain(db, nil, gspec.Config, ethash.NewFaker(), vm.Config{})
defer blockchain.Stop()
blocks, _ := GenerateChain(gspec.Config, genesis, ethash.NewFaker(), db, 4, func(i int, block *BlockGen) {
@@ -1151,7 +1145,7 @@ func TestEIP161AccountRemoval(t *testing.T) {
}
genesis = gspec.MustCommit(db)
)
- blockchain, _ := NewBlockChain(db, gspec.Config, ethash.NewFaker(), vm.Config{})
+ blockchain, _ := NewBlockChain(db, nil, gspec.Config, ethash.NewFaker(), vm.Config{})
defer blockchain.Stop()
blocks, _ := GenerateChain(gspec.Config, genesis, ethash.NewFaker(), db, 3, func(i int, block *BlockGen) {
@@ -1226,7 +1220,7 @@ func TestBlockchainHeaderchainReorgConsistency(t *testing.T) {
diskdb, _ := ethdb.NewMemDatabase()
new(Genesis).MustCommit(diskdb)
- chain, err := NewBlockChain(diskdb, params.TestChainConfig, engine, vm.Config{})
+ chain, err := NewBlockChain(diskdb, nil, params.TestChainConfig, engine, vm.Config{})
if err != nil {
t.Fatalf("failed to create tester chain: %v", err)
}
@@ -1245,3 +1239,102 @@ func TestBlockchainHeaderchainReorgConsistency(t *testing.T) {
}
}
}
+
+// Tests that importing small side forks doesn't leave junk in the trie database
+// cache (which would eventually cause memory issues).
+func TestTrieForkGC(t *testing.T) {
+ // Generate a canonical chain to act as the main dataset
+ engine := ethash.NewFaker()
+
+ db, _ := ethdb.NewMemDatabase()
+ genesis := new(Genesis).MustCommit(db)
+ blocks, _ := GenerateChain(params.TestChainConfig, genesis, engine, db, 2*triesInMemory, func(i int, b *BlockGen) { b.SetCoinbase(common.Address{1}) })
+
+ // Generate a bunch of fork blocks, each side forking from the canonical chain
+ forks := make([]*types.Block, len(blocks))
+ for i := 0; i < len(forks); i++ {
+ parent := genesis
+ if i > 0 {
+ parent = blocks[i-1]
+ }
+ fork, _ := GenerateChain(params.TestChainConfig, parent, engine, db, 1, func(i int, b *BlockGen) { b.SetCoinbase(common.Address{2}) })
+ forks[i] = fork[0]
+ }
+ // Import the canonical and fork chain side by side, forcing the trie cache to cache both
+ diskdb, _ := ethdb.NewMemDatabase()
+ new(Genesis).MustCommit(diskdb)
+
+ chain, err := NewBlockChain(diskdb, nil, params.TestChainConfig, engine, vm.Config{})
+ if err != nil {
+ t.Fatalf("failed to create tester chain: %v", err)
+ }
+ for i := 0; i < len(blocks); i++ {
+ if _, err := chain.InsertChain(blocks[i : i+1]); err != nil {
+ t.Fatalf("block %d: failed to insert into chain: %v", i, err)
+ }
+ if _, err := chain.InsertChain(forks[i : i+1]); err != nil {
+ t.Fatalf("fork %d: failed to insert into chain: %v", i, err)
+ }
+ }
+ // Dereference all the recent tries and ensure no past trie is left in
+ for i := 0; i < triesInMemory; i++ {
+ chain.stateCache.TrieDB().Dereference(blocks[len(blocks)-1-i].Root(), common.Hash{})
+ chain.stateCache.TrieDB().Dereference(forks[len(blocks)-1-i].Root(), common.Hash{})
+ }
+ if len(chain.stateCache.TrieDB().Nodes()) > 0 {
+ t.Fatalf("stale tries still alive after garbase collection")
+ }
+}
+
+// Tests that doing large reorgs works even if the state associated with the
+// forking point is not available any more.
+func TestLargeReorgTrieGC(t *testing.T) {
+ // Generate the original common chain segment and the two competing forks
+ engine := ethash.NewFaker()
+
+ db, _ := ethdb.NewMemDatabase()
+ genesis := new(Genesis).MustCommit(db)
+
+ shared, _ := GenerateChain(params.TestChainConfig, genesis, engine, db, 64, func(i int, b *BlockGen) { b.SetCoinbase(common.Address{1}) })
+ original, _ := GenerateChain(params.TestChainConfig, shared[len(shared)-1], engine, db, 2*triesInMemory, func(i int, b *BlockGen) { b.SetCoinbase(common.Address{2}) })
+ competitor, _ := GenerateChain(params.TestChainConfig, shared[len(shared)-1], engine, db, 2*triesInMemory+1, func(i int, b *BlockGen) { b.SetCoinbase(common.Address{3}) })
+
+ // Import the shared chain and the original canonical one
+ diskdb, _ := ethdb.NewMemDatabase()
+ new(Genesis).MustCommit(diskdb)
+
+ chain, err := NewBlockChain(diskdb, nil, params.TestChainConfig, engine, vm.Config{})
+ if err != nil {
+ t.Fatalf("failed to create tester chain: %v", err)
+ }
+ if _, err := chain.InsertChain(shared); err != nil {
+ t.Fatalf("failed to insert shared chain: %v", err)
+ }
+ if _, err := chain.InsertChain(original); err != nil {
+ t.Fatalf("failed to insert shared chain: %v", err)
+ }
+ // Ensure that the state associated with the forking point is pruned away
+ if node, _ := chain.stateCache.TrieDB().Node(shared[len(shared)-1].Root()); node != nil {
+ t.Fatalf("common-but-old ancestor still cache")
+ }
+ // Import the competitor chain without exceeding the canonical's TD and ensure
+ // we have not processed any of the blocks (protection against malicious blocks)
+ if _, err := chain.InsertChain(competitor[:len(competitor)-2]); err != nil {
+ t.Fatalf("failed to insert competitor chain: %v", err)
+ }
+ for i, block := range competitor[:len(competitor)-2] {
+ if node, _ := chain.stateCache.TrieDB().Node(block.Root()); node != nil {
+ t.Fatalf("competitor %d: low TD chain became processed", i)
+ }
+ }
+ // Import the head of the competitor chain, triggering the reorg and ensure we
+ // successfully reprocess all the stashed away blocks.
+ if _, err := chain.InsertChain(competitor[len(competitor)-2:]); err != nil {
+ t.Fatalf("failed to finalize competitor chain: %v", err)
+ }
+ for i, block := range competitor[:len(competitor)-triesInMemory] {
+ if node, _ := chain.stateCache.TrieDB().Node(block.Root()); node != nil {
+ t.Fatalf("competitor %d: competing chain state missing", i)
+ }
+ }
+}
diff --git a/core/chain_indexer.go b/core/chain_indexer.go
index 7fb184aaa..158ed8324 100644
--- a/core/chain_indexer.go
+++ b/core/chain_indexer.go
@@ -203,6 +203,9 @@ func (c *ChainIndexer) eventLoop(currentHeader *types.Header, events chan ChainE
if header.ParentHash != prevHash {
// Reorg to the common ancestor (might not exist in light sync mode, skip reorg then)
// TODO(karalabe, zsfelfoldi): This seems a bit brittle, can we detect this case explicitly?
+
+ // TODO(karalabe): This operation is expensive and might block, causing the event system to
+ // potentially also lock up. We need to do with on a different thread somehow.
if h := FindCommonAncestor(c.chainDb, prevHeader, header); h != nil {
c.newHead(h.Number.Uint64(), true)
}
diff --git a/core/chain_makers.go b/core/chain_makers.go
index 9bd3b2aee..31c9e3fb7 100644
--- a/core/chain_makers.go
+++ b/core/chain_makers.go
@@ -178,7 +178,7 @@ func GenerateChain(config *params.ChainConfig, parent *types.Block, engine conse
genblock := func(i int, parent *types.Block, statedb *state.StateDB) (*types.Block, types.Receipts) {
// TODO(karalabe): This is needed for clique, which depends on multiple blocks.
// It's nonetheless ugly to spin up a blockchain here. Get rid of this somehow.
- blockchain, _ := NewBlockChain(db, config, engine, vm.Config{})
+ blockchain, _ := NewBlockChain(db, nil, config, engine, vm.Config{})
defer blockchain.Stop()
b := &BlockGen{i: i, parent: parent, chain: blocks, chainReader: blockchain, statedb: statedb, config: config, engine: engine}
@@ -204,10 +204,13 @@ func GenerateChain(config *params.ChainConfig, parent *types.Block, engine conse
if b.engine != nil {
block, _ := b.engine.Finalize(b.chainReader, b.header, statedb, b.txs, b.uncles, b.receipts)
// Write state changes to db
- _, err := statedb.CommitTo(db, config.IsEIP158(b.header.Number))
+ root, err := statedb.Commit(config.IsEIP158(b.header.Number))
if err != nil {
panic(fmt.Sprintf("state write error: %v", err))
}
+ if err := statedb.Database().TrieDB().Commit(root, false); err != nil {
+ panic(fmt.Sprintf("trie write error: %v", err))
+ }
return block, b.receipts
}
return nil, nil
@@ -258,7 +261,7 @@ func newCanonical(engine consensus.Engine, n int, full bool) (ethdb.Database, *B
db, _ := ethdb.NewMemDatabase()
genesis := gspec.MustCommit(db)
- blockchain, _ := NewBlockChain(db, params.AllEthashProtocolChanges, engine, vm.Config{})
+ blockchain, _ := NewBlockChain(db, nil, params.AllEthashProtocolChanges, engine, vm.Config{})
// Create and inject the requested chain
if n == 0 {
return db, blockchain, nil
diff --git a/core/chain_makers_test.go b/core/chain_makers_test.go
index a3b80da29..93be43ddc 100644
--- a/core/chain_makers_test.go
+++ b/core/chain_makers_test.go
@@ -79,7 +79,7 @@ func ExampleGenerateChain() {
})
// Import the chain. This runs all block validation rules.
- blockchain, _ := NewBlockChain(db, gspec.Config, ethash.NewFaker(), vm.Config{})
+ blockchain, _ := NewBlockChain(db, nil, gspec.Config, ethash.NewFaker(), vm.Config{})
defer blockchain.Stop()
if i, err := blockchain.InsertChain(chain); err != nil {
diff --git a/core/dao_test.go b/core/dao_test.go
index 43e2982a5..e0a3e3ff3 100644
--- a/core/dao_test.go
+++ b/core/dao_test.go
@@ -45,7 +45,7 @@ func TestDAOForkRangeExtradata(t *testing.T) {
proConf.DAOForkBlock = forkBlock
proConf.DAOForkSupport = true
- proBc, _ := NewBlockChain(proDb, &proConf, ethash.NewFaker(), vm.Config{})
+ proBc, _ := NewBlockChain(proDb, nil, &proConf, ethash.NewFaker(), vm.Config{})
defer proBc.Stop()
conDb, _ := ethdb.NewMemDatabase()
@@ -55,7 +55,7 @@ func TestDAOForkRangeExtradata(t *testing.T) {
conConf.DAOForkBlock = forkBlock
conConf.DAOForkSupport = false
- conBc, _ := NewBlockChain(conDb, &conConf, ethash.NewFaker(), vm.Config{})
+ conBc, _ := NewBlockChain(conDb, nil, &conConf, ethash.NewFaker(), vm.Config{})
defer conBc.Stop()
if _, err := proBc.InsertChain(prefix); err != nil {
@@ -69,7 +69,7 @@ func TestDAOForkRangeExtradata(t *testing.T) {
// Create a pro-fork block, and try to feed into the no-fork chain
db, _ = ethdb.NewMemDatabase()
gspec.MustCommit(db)
- bc, _ := NewBlockChain(db, &conConf, ethash.NewFaker(), vm.Config{})
+ bc, _ := NewBlockChain(db, nil, &conConf, ethash.NewFaker(), vm.Config{})
defer bc.Stop()
blocks := conBc.GetBlocksFromHash(conBc.CurrentBlock().Hash(), int(conBc.CurrentBlock().NumberU64()))
@@ -79,6 +79,9 @@ func TestDAOForkRangeExtradata(t *testing.T) {
if _, err := bc.InsertChain(blocks); err != nil {
t.Fatalf("failed to import contra-fork chain for expansion: %v", err)
}
+ if err := bc.stateCache.TrieDB().Commit(bc.CurrentHeader().Root, true); err != nil {
+ t.Fatalf("failed to commit contra-fork head for expansion: %v", err)
+ }
blocks, _ = GenerateChain(&proConf, conBc.CurrentBlock(), ethash.NewFaker(), db, 1, func(i int, gen *BlockGen) {})
if _, err := conBc.InsertChain(blocks); err == nil {
t.Fatalf("contra-fork chain accepted pro-fork block: %v", blocks[0])
@@ -91,7 +94,7 @@ func TestDAOForkRangeExtradata(t *testing.T) {
// Create a no-fork block, and try to feed into the pro-fork chain
db, _ = ethdb.NewMemDatabase()
gspec.MustCommit(db)
- bc, _ = NewBlockChain(db, &proConf, ethash.NewFaker(), vm.Config{})
+ bc, _ = NewBlockChain(db, nil, &proConf, ethash.NewFaker(), vm.Config{})
defer bc.Stop()
blocks = proBc.GetBlocksFromHash(proBc.CurrentBlock().Hash(), int(proBc.CurrentBlock().NumberU64()))
@@ -101,6 +104,9 @@ func TestDAOForkRangeExtradata(t *testing.T) {
if _, err := bc.InsertChain(blocks); err != nil {
t.Fatalf("failed to import pro-fork chain for expansion: %v", err)
}
+ if err := bc.stateCache.TrieDB().Commit(bc.CurrentHeader().Root, true); err != nil {
+ t.Fatalf("failed to commit pro-fork head for expansion: %v", err)
+ }
blocks, _ = GenerateChain(&conConf, proBc.CurrentBlock(), ethash.NewFaker(), db, 1, func(i int, gen *BlockGen) {})
if _, err := proBc.InsertChain(blocks); err == nil {
t.Fatalf("pro-fork chain accepted contra-fork block: %v", blocks[0])
@@ -114,7 +120,7 @@ func TestDAOForkRangeExtradata(t *testing.T) {
// Verify that contra-forkers accept pro-fork extra-datas after forking finishes
db, _ = ethdb.NewMemDatabase()
gspec.MustCommit(db)
- bc, _ := NewBlockChain(db, &conConf, ethash.NewFaker(), vm.Config{})
+ bc, _ := NewBlockChain(db, nil, &conConf, ethash.NewFaker(), vm.Config{})
defer bc.Stop()
blocks := conBc.GetBlocksFromHash(conBc.CurrentBlock().Hash(), int(conBc.CurrentBlock().NumberU64()))
@@ -124,6 +130,9 @@ func TestDAOForkRangeExtradata(t *testing.T) {
if _, err := bc.InsertChain(blocks); err != nil {
t.Fatalf("failed to import contra-fork chain for expansion: %v", err)
}
+ if err := bc.stateCache.TrieDB().Commit(bc.CurrentHeader().Root, true); err != nil {
+ t.Fatalf("failed to commit contra-fork head for expansion: %v", err)
+ }
blocks, _ = GenerateChain(&proConf, conBc.CurrentBlock(), ethash.NewFaker(), db, 1, func(i int, gen *BlockGen) {})
if _, err := conBc.InsertChain(blocks); err != nil {
t.Fatalf("contra-fork chain didn't accept pro-fork block post-fork: %v", err)
@@ -131,7 +140,7 @@ func TestDAOForkRangeExtradata(t *testing.T) {
// Verify that pro-forkers accept contra-fork extra-datas after forking finishes
db, _ = ethdb.NewMemDatabase()
gspec.MustCommit(db)
- bc, _ = NewBlockChain(db, &proConf, ethash.NewFaker(), vm.Config{})
+ bc, _ = NewBlockChain(db, nil, &proConf, ethash.NewFaker(), vm.Config{})
defer bc.Stop()
blocks = proBc.GetBlocksFromHash(proBc.CurrentBlock().Hash(), int(proBc.CurrentBlock().NumberU64()))
@@ -141,6 +150,9 @@ func TestDAOForkRangeExtradata(t *testing.T) {
if _, err := bc.InsertChain(blocks); err != nil {
t.Fatalf("failed to import pro-fork chain for expansion: %v", err)
}
+ if err := bc.stateCache.TrieDB().Commit(bc.CurrentHeader().Root, true); err != nil {
+ t.Fatalf("failed to commit pro-fork head for expansion: %v", err)
+ }
blocks, _ = GenerateChain(&conConf, proBc.CurrentBlock(), ethash.NewFaker(), db, 1, func(i int, gen *BlockGen) {})
if _, err := proBc.InsertChain(blocks); err != nil {
t.Fatalf("pro-fork chain didn't accept contra-fork block post-fork: %v", err)
diff --git a/core/database_util.go b/core/database_util.go
index c6b125dae..8c4698985 100644
--- a/core/database_util.go
+++ b/core/database_util.go
@@ -47,6 +47,7 @@ var (
headHeaderKey = []byte("LastHeader")
headBlockKey = []byte("LastBlock")
headFastKey = []byte("LastFast")
+ trieSyncKey = []byte("TrieSync")
// Data item prefixes (use single byte to avoid mixing data types, avoid `i`).
headerPrefix = []byte("h") // headerPrefix + num (uint64 big endian) + hash -> header
@@ -70,8 +71,8 @@ var (
ErrChainConfigNotFound = errors.New("ChainConfig not found") // general config not found error
- preimageCounter = metrics.NewCounter("db/preimage/total")
- preimageHitCounter = metrics.NewCounter("db/preimage/hits")
+ preimageCounter = metrics.NewRegisteredCounter("db/preimage/total", nil)
+ preimageHitCounter = metrics.NewRegisteredCounter("db/preimage/hits", nil)
)
// TxLookupEntry is a positional metadata to help looking up the data content of
@@ -146,6 +147,16 @@ func GetHeadFastBlockHash(db DatabaseReader) common.Hash {
return common.BytesToHash(data)
}
+// GetTrieSyncProgress retrieves the number of tries nodes fast synced to allow
+// reportinc correct numbers across restarts.
+func GetTrieSyncProgress(db DatabaseReader) uint64 {
+ data, _ := db.Get(trieSyncKey)
+ if len(data) == 0 {
+ return 0
+ }
+ return new(big.Int).SetBytes(data).Uint64()
+}
+
// GetHeaderRLP retrieves a block header in its raw RLP database encoding, or nil
// if the header's not found.
func GetHeaderRLP(db DatabaseReader, hash common.Hash, number uint64) rlp.RawValue {
@@ -374,6 +385,15 @@ func WriteHeadFastBlockHash(db ethdb.Putter, hash common.Hash) error {
return nil
}
+// WriteTrieSyncProgress stores the fast sync trie process counter to support
+// retrieving it across restarts.
+func WriteTrieSyncProgress(db ethdb.Putter, count uint64) error {
+ if err := db.Put(trieSyncKey, new(big.Int).SetUint64(count).Bytes()); err != nil {
+ log.Crit("Failed to store fast sync trie progress", "err", err)
+ }
+ return nil
+}
+
// WriteHeader serializes a block header into the database.
func WriteHeader(db ethdb.Putter, header *types.Header) error {
data, err := rlp.EncodeToBytes(header)
diff --git a/core/fees.go b/core/fees.go
deleted file mode 100644
index 83275ea36..000000000
--- a/core/fees.go
+++ /dev/null
@@ -1,23 +0,0 @@
-// Copyright 2014 The go-ethereum Authors
-// This file is part of the go-ethereum library.
-//
-// The go-ethereum library is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Lesser General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-//
-// The go-ethereum library is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU Lesser General Public License for more details.
-//
-// You should have received a copy of the GNU Lesser General Public License
-// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
-
-package core
-
-import (
- "math/big"
-)
-
-var BlockReward = big.NewInt(5e+18)
diff --git a/core/genesis.go b/core/genesis.go
index e22985b80..b6ead2250 100644
--- a/core/genesis.go
+++ b/core/genesis.go
@@ -169,10 +169,9 @@ func SetupGenesisBlock(db ethdb.Database, genesis *Genesis) (*params.ChainConfig
// Check whether the genesis block is already written.
if genesis != nil {
- block, _ := genesis.ToBlock()
- hash := block.Hash()
+ hash := genesis.ToBlock(nil).Hash()
if hash != stored {
- return genesis.Config, block.Hash(), &GenesisMismatchError{stored, hash}
+ return genesis.Config, hash, &GenesisMismatchError{stored, hash}
}
}
@@ -220,9 +219,12 @@ func (g *Genesis) configOrDefault(ghash common.Hash) *params.ChainConfig {
}
}
-// ToBlock creates the block and state of a genesis specification.
-func (g *Genesis) ToBlock() (*types.Block, *state.StateDB) {
- db, _ := ethdb.NewMemDatabase()
+// ToBlock creates the genesis block and writes state of a genesis specification
+// to the given database (or discards it if nil).
+func (g *Genesis) ToBlock(db ethdb.Database) *types.Block {
+ if db == nil {
+ db, _ = ethdb.NewMemDatabase()
+ }
statedb, _ := state.New(common.Hash{}, state.NewDatabase(db))
for addr, account := range g.Alloc {
statedb.AddBalance(addr, account.Balance)
@@ -252,19 +254,19 @@ func (g *Genesis) ToBlock() (*types.Block, *state.StateDB) {
if g.Difficulty == nil {
head.Difficulty = params.GenesisDifficulty
}
- return types.NewBlock(head, nil, nil, nil), statedb
+ statedb.Commit(false)
+ statedb.Database().TrieDB().Commit(root, true)
+
+ return types.NewBlock(head, nil, nil, nil)
}
// Commit writes the block and state of a genesis specification to the database.
// The block is committed as the canonical head block.
func (g *Genesis) Commit(db ethdb.Database) (*types.Block, error) {
- block, statedb := g.ToBlock()
+ block := g.ToBlock(db)
if block.Number().Sign() != 0 {
return nil, fmt.Errorf("can't commit genesis block with number > 0")
}
- if _, err := statedb.CommitTo(db, false); err != nil {
- return nil, fmt.Errorf("cannot write state: %v", err)
- }
if err := WriteTd(db, block.Hash(), block.NumberU64(), g.Difficulty); err != nil {
return nil, err
}
diff --git a/core/genesis_test.go b/core/genesis_test.go
index 2fe931b24..052ded699 100644
--- a/core/genesis_test.go
+++ b/core/genesis_test.go
@@ -30,11 +30,11 @@ import (
)
func TestDefaultGenesisBlock(t *testing.T) {
- block, _ := DefaultGenesisBlock().ToBlock()
+ block := DefaultGenesisBlock().ToBlock(nil)
if block.Hash() != params.MainnetGenesisHash {
t.Errorf("wrong mainnet genesis hash, got %v, want %v", block.Hash(), params.MainnetGenesisHash)
}
- block, _ = DefaultTestnetGenesisBlock().ToBlock()
+ block = DefaultTestnetGenesisBlock().ToBlock(nil)
if block.Hash() != params.TestnetGenesisHash {
t.Errorf("wrong testnet genesis hash, got %v, want %v", block.Hash(), params.TestnetGenesisHash)
}
@@ -118,10 +118,12 @@ func TestSetupGenesis(t *testing.T) {
// Commit the 'old' genesis block with Homestead transition at #2.
// Advance to block #4, past the homestead transition block of customg.
genesis := oldcustomg.MustCommit(db)
- bc, _ := NewBlockChain(db, oldcustomg.Config, ethash.NewFullFaker(), vm.Config{})
+
+ bc, _ := NewBlockChain(db, nil, oldcustomg.Config, ethash.NewFullFaker(), vm.Config{})
defer bc.Stop()
- bc.SetValidator(bproc{})
- bc.InsertChain(makeBlockChainWithDiff(genesis, []int{2, 3, 4, 5}, 0))
+
+ blocks, _ := GenerateChain(oldcustomg.Config, genesis, ethash.NewFaker(), db, 4, nil)
+ bc.InsertChain(blocks)
bc.CurrentBlock()
// This should return a compatibility error.
return SetupGenesisBlock(db, &customg)
diff --git a/core/headerchain.go b/core/headerchain.go
index 0e5215293..73cd5d2c4 100644
--- a/core/headerchain.go
+++ b/core/headerchain.go
@@ -32,6 +32,7 @@ import (
"github.com/ethereum/go-ethereum/log"
"github.com/ethereum/go-ethereum/params"
"github.com/hashicorp/golang-lru"
+ "sync/atomic"
)
const (
@@ -51,8 +52,8 @@ type HeaderChain struct {
chainDb ethdb.Database
genesisHeader *types.Header
- currentHeader *types.Header // Current head of the header chain (may be above the block chain!)
- currentHeaderHash common.Hash // Hash of the current head of the header chain (prevent recomputing all the time)
+ currentHeader atomic.Value // Current head of the header chain (may be above the block chain!)
+ currentHeaderHash common.Hash // Hash of the current head of the header chain (prevent recomputing all the time)
headerCache *lru.Cache // Cache for the most recent block headers
tdCache *lru.Cache // Cache for the most recent block total difficulties
@@ -95,13 +96,13 @@ func NewHeaderChain(chainDb ethdb.Database, config *params.ChainConfig, engine c
return nil, ErrNoGenesis
}
- hc.currentHeader = hc.genesisHeader
+ hc.currentHeader.Store(hc.genesisHeader)
if head := GetHeadBlockHash(chainDb); head != (common.Hash{}) {
if chead := hc.GetHeaderByHash(head); chead != nil {
- hc.currentHeader = chead
+ hc.currentHeader.Store(chead)
}
}
- hc.currentHeaderHash = hc.currentHeader.Hash()
+ hc.currentHeaderHash = hc.CurrentHeader().Hash()
return hc, nil
}
@@ -139,7 +140,7 @@ func (hc *HeaderChain) WriteHeader(header *types.Header) (status WriteStatus, er
if ptd == nil {
return NonStatTy, consensus.ErrUnknownAncestor
}
- localTd := hc.GetTd(hc.currentHeaderHash, hc.currentHeader.Number.Uint64())
+ localTd := hc.GetTd(hc.currentHeaderHash, hc.CurrentHeader().Number.Uint64())
externTd := new(big.Int).Add(header.Difficulty, ptd)
// Irrelevant of the canonical status, write the td and header to the database
@@ -181,7 +182,8 @@ func (hc *HeaderChain) WriteHeader(header *types.Header) (status WriteStatus, er
if err := WriteHeadHeaderHash(hc.chainDb, hash); err != nil {
log.Crit("Failed to insert head header hash", "err", err)
}
- hc.currentHeaderHash, hc.currentHeader = hash, types.CopyHeader(header)
+ hc.currentHeaderHash = hash
+ hc.currentHeader.Store(types.CopyHeader(header))
status = CanonStatTy
} else {
@@ -383,7 +385,7 @@ func (hc *HeaderChain) GetHeaderByNumber(number uint64) *types.Header {
// CurrentHeader retrieves the current head header of the canonical chain. The
// header is retrieved from the HeaderChain's internal cache.
func (hc *HeaderChain) CurrentHeader() *types.Header {
- return hc.currentHeader
+ return hc.currentHeader.Load().(*types.Header)
}
// SetCurrentHeader sets the current head header of the canonical chain.
@@ -391,7 +393,7 @@ func (hc *HeaderChain) SetCurrentHeader(head *types.Header) {
if err := WriteHeadHeaderHash(hc.chainDb, head.Hash()); err != nil {
log.Crit("Failed to insert head header hash", "err", err)
}
- hc.currentHeader = head
+ hc.currentHeader.Store(head)
hc.currentHeaderHash = head.Hash()
}
@@ -403,19 +405,20 @@ type DeleteCallback func(common.Hash, uint64)
// will be deleted and the new one set.
func (hc *HeaderChain) SetHead(head uint64, delFn DeleteCallback) {
height := uint64(0)
- if hc.currentHeader != nil {
- height = hc.currentHeader.Number.Uint64()
+
+ if hdr := hc.CurrentHeader(); hdr != nil {
+ height = hdr.Number.Uint64()
}
- for hc.currentHeader != nil && hc.currentHeader.Number.Uint64() > head {
- hash := hc.currentHeader.Hash()
- num := hc.currentHeader.Number.Uint64()
+ for hdr := hc.CurrentHeader(); hdr != nil && hdr.Number.Uint64() > head; hdr = hc.CurrentHeader() {
+ hash := hdr.Hash()
+ num := hdr.Number.Uint64()
if delFn != nil {
delFn(hash, num)
}
DeleteHeader(hc.chainDb, hash, num)
DeleteTd(hc.chainDb, hash, num)
- hc.currentHeader = hc.GetHeader(hc.currentHeader.ParentHash, hc.currentHeader.Number.Uint64()-1)
+ hc.currentHeader.Store(hc.GetHeader(hdr.ParentHash, hdr.Number.Uint64()-1))
}
// Roll back the canonical chain numbering
for i := height; i > head; i-- {
@@ -426,10 +429,10 @@ func (hc *HeaderChain) SetHead(head uint64, delFn DeleteCallback) {
hc.tdCache.Purge()
hc.numberCache.Purge()
- if hc.currentHeader == nil {
- hc.currentHeader = hc.genesisHeader
+ if hc.CurrentHeader() == nil {
+ hc.currentHeader.Store(hc.genesisHeader)
}
- hc.currentHeaderHash = hc.currentHeader.Hash()
+ hc.currentHeaderHash = hc.CurrentHeader().Hash()
if err := WriteHeadHeaderHash(hc.chainDb, hc.currentHeaderHash); err != nil {
log.Crit("Failed to reset head header hash", "err", err)
diff --git a/core/state/database.go b/core/state/database.go
index 946625e76..36926ec69 100644
--- a/core/state/database.go
+++ b/core/state/database.go
@@ -40,16 +40,23 @@ const (
// Database wraps access to tries and contract code.
type Database interface {
- // Accessing tries:
// OpenTrie opens the main account trie.
- // OpenStorageTrie opens the storage trie of an account.
OpenTrie(root common.Hash) (Trie, error)
+
+ // OpenStorageTrie opens the storage trie of an account.
OpenStorageTrie(addrHash, root common.Hash) (Trie, error)
- // Accessing contract code:
- ContractCode(addrHash, codeHash common.Hash) ([]byte, error)
- ContractCodeSize(addrHash, codeHash common.Hash) (int, error)
+
// CopyTrie returns an independent copy of the given trie.
CopyTrie(Trie) Trie
+
+ // ContractCode retrieves a particular contract's code.
+ ContractCode(addrHash, codeHash common.Hash) ([]byte, error)
+
+ // ContractCodeSize retrieves a particular contracts code's size.
+ ContractCodeSize(addrHash, codeHash common.Hash) (int, error)
+
+ // TrieDB retrieves the low level trie database used for data storage.
+ TrieDB() *trie.Database
}
// Trie is a Ethereum Merkle Trie.
@@ -57,26 +64,33 @@ type Trie interface {
TryGet(key []byte) ([]byte, error)
TryUpdate(key, value []byte) error
TryDelete(key []byte) error
- CommitTo(trie.DatabaseWriter) (common.Hash, error)
+ Commit(onleaf trie.LeafCallback) (common.Hash, error)
Hash() common.Hash
NodeIterator(startKey []byte) trie.NodeIterator
GetKey([]byte) []byte // TODO(fjl): remove this when SecureTrie is removed
+ Prove(key []byte, fromLevel uint, proofDb ethdb.Putter) error
}
// NewDatabase creates a backing store for state. The returned database is safe for
-// concurrent use and retains cached trie nodes in memory.
+// concurrent use and retains cached trie nodes in memory. The pool is an optional
+// intermediate trie-node memory pool between the low level storage layer and the
+// high level trie abstraction.
func NewDatabase(db ethdb.Database) Database {
csc, _ := lru.New(codeSizeCacheSize)
- return &cachingDB{db: db, codeSizeCache: csc}
+ return &cachingDB{
+ db: trie.NewDatabase(db),
+ codeSizeCache: csc,
+ }
}
type cachingDB struct {
- db ethdb.Database
+ db *trie.Database
mu sync.Mutex
pastTries []*trie.SecureTrie
codeSizeCache *lru.Cache
}
+// OpenTrie opens the main account trie.
func (db *cachingDB) OpenTrie(root common.Hash) (Trie, error) {
db.mu.Lock()
defer db.mu.Unlock()
@@ -105,10 +119,12 @@ func (db *cachingDB) pushTrie(t *trie.SecureTrie) {
}
}
+// OpenStorageTrie opens the storage trie of an account.
func (db *cachingDB) OpenStorageTrie(addrHash, root common.Hash) (Trie, error) {
return trie.NewSecure(root, db.db, 0)
}
+// CopyTrie returns an independent copy of the given trie.
func (db *cachingDB) CopyTrie(t Trie) Trie {
switch t := t.(type) {
case cachedTrie:
@@ -120,14 +136,16 @@ func (db *cachingDB) CopyTrie(t Trie) Trie {
}
}
+// ContractCode retrieves a particular contract's code.
func (db *cachingDB) ContractCode(addrHash, codeHash common.Hash) ([]byte, error) {
- code, err := db.db.Get(codeHash[:])
+ code, err := db.db.Node(codeHash)
if err == nil {
db.codeSizeCache.Add(codeHash, len(code))
}
return code, err
}
+// ContractCodeSize retrieves a particular contracts code's size.
func (db *cachingDB) ContractCodeSize(addrHash, codeHash common.Hash) (int, error) {
if cached, ok := db.codeSizeCache.Get(codeHash); ok {
return cached.(int), nil
@@ -139,16 +157,25 @@ func (db *cachingDB) ContractCodeSize(addrHash, codeHash common.Hash) (int, erro
return len(code), err
}
+// TrieDB retrieves any intermediate trie-node caching layer.
+func (db *cachingDB) TrieDB() *trie.Database {
+ return db.db
+}
+
// cachedTrie inserts its trie into a cachingDB on commit.
type cachedTrie struct {
*trie.SecureTrie
db *cachingDB
}
-func (m cachedTrie) CommitTo(dbw trie.DatabaseWriter) (common.Hash, error) {
- root, err := m.SecureTrie.CommitTo(dbw)
+func (m cachedTrie) Commit(onleaf trie.LeafCallback) (common.Hash, error) {
+ root, err := m.SecureTrie.Commit(onleaf)
if err == nil {
m.db.pushTrie(m.SecureTrie)
}
return root, err
}
+
+func (m cachedTrie) Prove(key []byte, fromLevel uint, proofDb ethdb.Putter) error {
+ return m.SecureTrie.Prove(key, fromLevel, proofDb)
+}
diff --git a/core/state/iterator_test.go b/core/state/iterator_test.go
index ff66ba7a9..9e46c851c 100644
--- a/core/state/iterator_test.go
+++ b/core/state/iterator_test.go
@@ -21,12 +21,13 @@ import (
"testing"
"github.com/ethereum/go-ethereum/common"
+ "github.com/ethereum/go-ethereum/ethdb"
)
// Tests that the node iterator indeed walks over the entire database contents.
func TestNodeIteratorCoverage(t *testing.T) {
// Create some arbitrary test state to iterate
- db, mem, root, _ := makeTestState()
+ db, root, _ := makeTestState()
state, err := New(root, db)
if err != nil {
@@ -39,14 +40,18 @@ func TestNodeIteratorCoverage(t *testing.T) {
hashes[it.Hash] = struct{}{}
}
}
-
- // Cross check the hashes and the database itself
+ // Cross check the iterated hashes and the database/nodepool content
for hash := range hashes {
- if _, err := mem.Get(hash.Bytes()); err != nil {
- t.Errorf("failed to retrieve reported node %x: %v", hash, err)
+ if _, err := db.TrieDB().Node(hash); err != nil {
+ t.Errorf("failed to retrieve reported node %x", hash)
+ }
+ }
+ for _, hash := range db.TrieDB().Nodes() {
+ if _, ok := hashes[hash]; !ok {
+ t.Errorf("state entry not reported %x", hash)
}
}
- for _, key := range mem.Keys() {
+ for _, key := range db.TrieDB().DiskDB().(*ethdb.MemDatabase).Keys() {
if bytes.HasPrefix(key, []byte("secure-key-")) {
continue
}
diff --git a/core/state/state_object.go b/core/state/state_object.go
index b2378c69c..b2112bfae 100644
--- a/core/state/state_object.go
+++ b/core/state/state_object.go
@@ -25,7 +25,6 @@ import (
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/crypto"
"github.com/ethereum/go-ethereum/rlp"
- "github.com/ethereum/go-ethereum/trie"
)
var emptyCodeHash = crypto.Keccak256(nil)
@@ -238,12 +237,12 @@ func (self *stateObject) updateRoot(db Database) {
// CommitTrie the storage trie of the object to dwb.
// This updates the trie root.
-func (self *stateObject) CommitTrie(db Database, dbw trie.DatabaseWriter) error {
+func (self *stateObject) CommitTrie(db Database) error {
self.updateTrie(db)
if self.dbErr != nil {
return self.dbErr
}
- root, err := self.trie.CommitTo(dbw)
+ root, err := self.trie.Commit(nil)
if err == nil {
self.data.Root = root
}
diff --git a/core/state/state_test.go b/core/state/state_test.go
index bbae3685b..6d42d63d8 100644
--- a/core/state/state_test.go
+++ b/core/state/state_test.go
@@ -48,7 +48,7 @@ func (s *StateSuite) TestDump(c *checker.C) {
// write some of them to the trie
s.state.updateStateObject(obj1)
s.state.updateStateObject(obj2)
- s.state.CommitTo(s.db, false)
+ s.state.Commit(false)
// check that dump contains the state objects that are in trie
got := string(s.state.Dump())
@@ -97,7 +97,7 @@ func (s *StateSuite) TestNull(c *checker.C) {
//value := common.FromHex("0x823140710bf13990e4500136726d8b55")
var value common.Hash
s.state.SetState(address, common.Hash{}, value)
- s.state.CommitTo(s.db, false)
+ s.state.Commit(false)
value = s.state.GetState(address, common.Hash{})
if !common.EmptyHash(value) {
c.Errorf("expected empty hash. got %x", value)
@@ -155,7 +155,7 @@ func TestSnapshot2(t *testing.T) {
so0.deleted = false
state.setStateObject(so0)
- root, _ := state.CommitTo(db, false)
+ root, _ := state.Commit(false)
state.Reset(root)
// and one with deleted == true
diff --git a/core/state/statedb.go b/core/state/statedb.go
index 8e29104d5..776693e24 100644
--- a/core/state/statedb.go
+++ b/core/state/statedb.go
@@ -36,6 +36,14 @@ type revision struct {
journalIndex int
}
+var (
+ // emptyState is the known hash of an empty state trie entry.
+ emptyState = crypto.Keccak256Hash(nil)
+
+ // emptyCode is the known hash of the empty EVM bytecode.
+ emptyCode = crypto.Keccak256Hash(nil)
+)
+
// StateDBs within the ethereum protocol are used to store anything
// within the merkle trie. StateDBs take care of caching and storing
// nested states. It's the general query interface to retrieve:
@@ -235,6 +243,11 @@ func (self *StateDB) GetState(a common.Address, b common.Hash) common.Hash {
return common.Hash{}
}
+// Database retrieves the low level database supporting the lower level trie ops.
+func (self *StateDB) Database() Database {
+ return self.db
+}
+
// StorageTrie returns the storage trie of an account.
// The return value is a copy and is nil for non-existent accounts.
func (self *StateDB) StorageTrie(a common.Address) Trie {
@@ -568,8 +581,8 @@ func (s *StateDB) clearJournalAndRefund() {
s.refund = 0
}
-// CommitTo writes the state to the given database.
-func (s *StateDB) CommitTo(dbw trie.DatabaseWriter, deleteEmptyObjects bool) (root common.Hash, err error) {
+// Commit writes the state to the underlying in-memory trie database.
+func (s *StateDB) Commit(deleteEmptyObjects bool) (root common.Hash, err error) {
defer s.clearJournalAndRefund()
// Commit objects to the trie.
@@ -583,13 +596,11 @@ func (s *StateDB) CommitTo(dbw trie.DatabaseWriter, deleteEmptyObjects bool) (ro
case isDirty:
// Write any contract code associated with the state object
if stateObject.code != nil && stateObject.dirtyCode {
- if err := dbw.Put(stateObject.CodeHash(), stateObject.code); err != nil {
- return common.Hash{}, err
- }
+ s.db.TrieDB().Insert(common.BytesToHash(stateObject.CodeHash()), stateObject.code)
stateObject.dirtyCode = false
}
// Write any storage changes in the state object to its storage trie.
- if err := stateObject.CommitTrie(s.db, dbw); err != nil {
+ if err := stateObject.CommitTrie(s.db); err != nil {
return common.Hash{}, err
}
// Update the object in the main account trie.
@@ -598,7 +609,20 @@ func (s *StateDB) CommitTo(dbw trie.DatabaseWriter, deleteEmptyObjects bool) (ro
delete(s.stateObjectsDirty, addr)
}
// Write trie changes.
- root, err = s.trie.CommitTo(dbw)
+ root, err = s.trie.Commit(func(leaf []byte, parent common.Hash) error {
+ var account Account
+ if err := rlp.DecodeBytes(leaf, &account); err != nil {
+ return nil
+ }
+ if account.Root != emptyState {
+ s.db.TrieDB().Reference(account.Root, parent)
+ }
+ code := common.BytesToHash(account.CodeHash)
+ if code != emptyCode {
+ s.db.TrieDB().Reference(code, parent)
+ }
+ return nil
+ })
log.Debug("Trie cache stats after commit", "misses", trie.CacheMisses(), "unloads", trie.CacheUnloads())
return root, err
}
diff --git a/core/state/statedb_test.go b/core/state/statedb_test.go
index 5c80e3aa5..d9e3d9b79 100644
--- a/core/state/statedb_test.go
+++ b/core/state/statedb_test.go
@@ -97,10 +97,10 @@ func TestIntermediateLeaks(t *testing.T) {
}
// Commit and cross check the databases.
- if _, err := transState.CommitTo(transDb, false); err != nil {
+ if _, err := transState.Commit(false); err != nil {
t.Fatalf("failed to commit transition state: %v", err)
}
- if _, err := finalState.CommitTo(finalDb, false); err != nil {
+ if _, err := finalState.Commit(false); err != nil {
t.Fatalf("failed to commit final state: %v", err)
}
for _, key := range finalDb.Keys() {
@@ -122,8 +122,8 @@ func TestIntermediateLeaks(t *testing.T) {
// https://github.com/ethereum/go-ethereum/pull/15549.
func TestCopy(t *testing.T) {
// Create a random state test to copy and modify "independently"
- mem, _ := ethdb.NewMemDatabase()
- orig, _ := New(common.Hash{}, NewDatabase(mem))
+ db, _ := ethdb.NewMemDatabase()
+ orig, _ := New(common.Hash{}, NewDatabase(db))
for i := byte(0); i < 255; i++ {
obj := orig.GetOrNewStateObject(common.BytesToAddress([]byte{i}))
@@ -346,11 +346,10 @@ func (test *snapshotTest) run() bool {
}
action.fn(action, state)
}
-
// Revert all snapshots in reverse order. Each revert must yield a state
// that is equivalent to fresh state with all actions up the snapshot applied.
for sindex--; sindex >= 0; sindex-- {
- checkstate, _ := New(common.Hash{}, NewDatabase(db))
+ checkstate, _ := New(common.Hash{}, state.Database())
for _, action := range test.actions[:test.snapshots[sindex]] {
action.fn(action, checkstate)
}
@@ -409,7 +408,7 @@ func (test *snapshotTest) checkEqual(state, checkstate *StateDB) error {
func (s *StateSuite) TestTouchDelete(c *check.C) {
s.state.GetOrNewStateObject(common.Address{})
- root, _ := s.state.CommitTo(s.db, false)
+ root, _ := s.state.Commit(false)
s.state.Reset(root)
snapshot := s.state.Snapshot()
@@ -417,7 +416,6 @@ func (s *StateSuite) TestTouchDelete(c *check.C) {
if len(s.state.stateObjectsDirty) != 1 {
c.Fatal("expected one dirty state object")
}
-
s.state.RevertToSnapshot(snapshot)
if len(s.state.stateObjectsDirty) != 0 {
c.Fatal("expected no dirty state object")
diff --git a/core/state/sync_test.go b/core/state/sync_test.go
index 06c572ea6..8f14a44e7 100644
--- a/core/state/sync_test.go
+++ b/core/state/sync_test.go
@@ -36,10 +36,10 @@ type testAccount struct {
}
// makeTestState create a sample test state to test node-wise reconstruction.
-func makeTestState() (Database, *ethdb.MemDatabase, common.Hash, []*testAccount) {
+func makeTestState() (Database, common.Hash, []*testAccount) {
// Create an empty state
- mem, _ := ethdb.NewMemDatabase()
- db := NewDatabase(mem)
+ diskdb, _ := ethdb.NewMemDatabase()
+ db := NewDatabase(diskdb)
state, _ := New(common.Hash{}, db)
// Fill it with some arbitrary data
@@ -61,10 +61,10 @@ func makeTestState() (Database, *ethdb.MemDatabase, common.Hash, []*testAccount)
state.updateStateObject(obj)
accounts = append(accounts, acc)
}
- root, _ := state.CommitTo(mem, false)
+ root, _ := state.Commit(false)
// Return the generated state
- return db, mem, root, accounts
+ return db, root, accounts
}
// checkStateAccounts cross references a reconstructed state with an expected
@@ -96,7 +96,7 @@ func checkTrieConsistency(db ethdb.Database, root common.Hash) error {
if v, _ := db.Get(root[:]); v == nil {
return nil // Consider a non existent state consistent.
}
- trie, err := trie.New(root, db)
+ trie, err := trie.New(root, trie.NewDatabase(db))
if err != nil {
return err
}
@@ -138,7 +138,7 @@ func TestIterativeStateSyncBatched(t *testing.T) { testIterativeStateSync(t,
func testIterativeStateSync(t *testing.T, batch int) {
// Create a random state to copy
- _, srcMem, srcRoot, srcAccounts := makeTestState()
+ srcDb, srcRoot, srcAccounts := makeTestState()
// Create a destination state and sync with the scheduler
dstDb, _ := ethdb.NewMemDatabase()
@@ -148,9 +148,9 @@ func testIterativeStateSync(t *testing.T, batch int) {
for len(queue) > 0 {
results := make([]trie.SyncResult, len(queue))
for i, hash := range queue {
- data, err := srcMem.Get(hash.Bytes())
+ data, err := srcDb.TrieDB().Node(hash)
if err != nil {
- t.Fatalf("failed to retrieve node data for %x: %v", hash, err)
+ t.Fatalf("failed to retrieve node data for %x", hash)
}
results[i] = trie.SyncResult{Hash: hash, Data: data}
}
@@ -170,7 +170,7 @@ func testIterativeStateSync(t *testing.T, batch int) {
// partial results are returned, and the others sent only later.
func TestIterativeDelayedStateSync(t *testing.T) {
// Create a random state to copy
- _, srcMem, srcRoot, srcAccounts := makeTestState()
+ srcDb, srcRoot, srcAccounts := makeTestState()
// Create a destination state and sync with the scheduler
dstDb, _ := ethdb.NewMemDatabase()
@@ -181,9 +181,9 @@ func TestIterativeDelayedStateSync(t *testing.T) {
// Sync only half of the scheduled nodes
results := make([]trie.SyncResult, len(queue)/2+1)
for i, hash := range queue[:len(results)] {
- data, err := srcMem.Get(hash.Bytes())
+ data, err := srcDb.TrieDB().Node(hash)
if err != nil {
- t.Fatalf("failed to retrieve node data for %x: %v", hash, err)
+ t.Fatalf("failed to retrieve node data for %x", hash)
}
results[i] = trie.SyncResult{Hash: hash, Data: data}
}
@@ -207,7 +207,7 @@ func TestIterativeRandomStateSyncBatched(t *testing.T) { testIterativeRandomS
func testIterativeRandomStateSync(t *testing.T, batch int) {
// Create a random state to copy
- _, srcMem, srcRoot, srcAccounts := makeTestState()
+ srcDb, srcRoot, srcAccounts := makeTestState()
// Create a destination state and sync with the scheduler
dstDb, _ := ethdb.NewMemDatabase()
@@ -221,9 +221,9 @@ func testIterativeRandomStateSync(t *testing.T, batch int) {
// Fetch all the queued nodes in a random order
results := make([]trie.SyncResult, 0, len(queue))
for hash := range queue {
- data, err := srcMem.Get(hash.Bytes())
+ data, err := srcDb.TrieDB().Node(hash)
if err != nil {
- t.Fatalf("failed to retrieve node data for %x: %v", hash, err)
+ t.Fatalf("failed to retrieve node data for %x", hash)
}
results = append(results, trie.SyncResult{Hash: hash, Data: data})
}
@@ -247,7 +247,7 @@ func testIterativeRandomStateSync(t *testing.T, batch int) {
// partial results are returned (Even those randomly), others sent only later.
func TestIterativeRandomDelayedStateSync(t *testing.T) {
// Create a random state to copy
- _, srcMem, srcRoot, srcAccounts := makeTestState()
+ srcDb, srcRoot, srcAccounts := makeTestState()
// Create a destination state and sync with the scheduler
dstDb, _ := ethdb.NewMemDatabase()
@@ -263,9 +263,9 @@ func TestIterativeRandomDelayedStateSync(t *testing.T) {
for hash := range queue {
delete(queue, hash)
- data, err := srcMem.Get(hash.Bytes())
+ data, err := srcDb.TrieDB().Node(hash)
if err != nil {
- t.Fatalf("failed to retrieve node data for %x: %v", hash, err)
+ t.Fatalf("failed to retrieve node data for %x", hash)
}
results = append(results, trie.SyncResult{Hash: hash, Data: data})
@@ -292,9 +292,9 @@ func TestIterativeRandomDelayedStateSync(t *testing.T) {
// the database.
func TestIncompleteStateSync(t *testing.T) {
// Create a random state to copy
- _, srcMem, srcRoot, srcAccounts := makeTestState()
+ srcDb, srcRoot, srcAccounts := makeTestState()
- checkTrieConsistency(srcMem, srcRoot)
+ checkTrieConsistency(srcDb.TrieDB().DiskDB().(ethdb.Database), srcRoot)
// Create a destination state and sync with the scheduler
dstDb, _ := ethdb.NewMemDatabase()
@@ -306,9 +306,9 @@ func TestIncompleteStateSync(t *testing.T) {
// Fetch a batch of state nodes
results := make([]trie.SyncResult, len(queue))
for i, hash := range queue {
- data, err := srcMem.Get(hash.Bytes())
+ data, err := srcDb.TrieDB().Node(hash)
if err != nil {
- t.Fatalf("failed to retrieve node data for %x: %v", hash, err)
+ t.Fatalf("failed to retrieve node data for %x", hash)
}
results[i] = trie.SyncResult{Hash: hash, Data: data}
}
diff --git a/core/state_transition.go b/core/state_transition.go
index 390473fff..b19bc12e4 100644
--- a/core/state_transition.go
+++ b/core/state_transition.go
@@ -215,6 +215,9 @@ func (st *StateTransition) TransitionDb() (ret []byte, usedGas uint64, failed bo
// Pay intrinsic gas
gas, err := IntrinsicGas(st.data, contractCreation, homestead)
+ if err != nil {
+ return nil, 0, false, err
+ }
if err = st.useGas(gas); err != nil {
return nil, 0, false, err
}
diff --git a/core/tx_pool.go b/core/tx_pool.go
index dc3ddc423..089bd215a 100644
--- a/core/tx_pool.go
+++ b/core/tx_pool.go
@@ -87,20 +87,20 @@ var (
var (
// Metrics for the pending pool
- pendingDiscardCounter = metrics.NewCounter("txpool/pending/discard")
- pendingReplaceCounter = metrics.NewCounter("txpool/pending/replace")
- pendingRateLimitCounter = metrics.NewCounter("txpool/pending/ratelimit") // Dropped due to rate limiting
- pendingNofundsCounter = metrics.NewCounter("txpool/pending/nofunds") // Dropped due to out-of-funds
+ pendingDiscardCounter = metrics.NewRegisteredCounter("txpool/pending/discard", nil)
+ pendingReplaceCounter = metrics.NewRegisteredCounter("txpool/pending/replace", nil)
+ pendingRateLimitCounter = metrics.NewRegisteredCounter("txpool/pending/ratelimit", nil) // Dropped due to rate limiting
+ pendingNofundsCounter = metrics.NewRegisteredCounter("txpool/pending/nofunds", nil) // Dropped due to out-of-funds
// Metrics for the queued pool
- queuedDiscardCounter = metrics.NewCounter("txpool/queued/discard")
- queuedReplaceCounter = metrics.NewCounter("txpool/queued/replace")
- queuedRateLimitCounter = metrics.NewCounter("txpool/queued/ratelimit") // Dropped due to rate limiting
- queuedNofundsCounter = metrics.NewCounter("txpool/queued/nofunds") // Dropped due to out-of-funds
+ queuedDiscardCounter = metrics.NewRegisteredCounter("txpool/queued/discard", nil)
+ queuedReplaceCounter = metrics.NewRegisteredCounter("txpool/queued/replace", nil)
+ queuedRateLimitCounter = metrics.NewRegisteredCounter("txpool/queued/ratelimit", nil) // Dropped due to rate limiting
+ queuedNofundsCounter = metrics.NewRegisteredCounter("txpool/queued/nofunds", nil) // Dropped due to out-of-funds
// General tx metrics
- invalidTxCounter = metrics.NewCounter("txpool/invalid")
- underpricedTxCounter = metrics.NewCounter("txpool/underpriced")
+ invalidTxCounter = metrics.NewRegisteredCounter("txpool/invalid", nil)
+ underpricedTxCounter = metrics.NewRegisteredCounter("txpool/underpriced", nil)
)
// TxStatus is the current status of a transaction as seen by the pool.
@@ -877,15 +877,14 @@ func (pool *TxPool) removeTx(hash common.Hash) {
// Remove the transaction from the pending lists and reset the account nonce
if pending := pool.pending[addr]; pending != nil {
if removed, invalids := pending.Remove(tx); removed {
- // If no more transactions are left, remove the list
+ // If no more pending transactions are left, remove the list
if pending.Empty() {
delete(pool.pending, addr)
delete(pool.beats, addr)
- } else {
- // Otherwise postpone any invalidated transactions
- for _, tx := range invalids {
- pool.enqueueTx(tx.Hash(), tx)
- }
+ }
+ // Postpone any invalidated transactions
+ for _, tx := range invalids {
+ pool.enqueueTx(tx.Hash(), tx)
}
// Update the account nonce if needed
if nonce := tx.Nonce(); pool.pendingState.GetNonce(addr) > nonce {
diff --git a/core/tx_pool_test.go b/core/tx_pool_test.go
index cd11f2ba2..1cf533aa6 100644
--- a/core/tx_pool_test.go
+++ b/core/tx_pool_test.go
@@ -78,8 +78,8 @@ func pricedTransaction(nonce uint64, gaslimit uint64, gasprice *big.Int, key *ec
}
func setupTxPool() (*TxPool, *ecdsa.PrivateKey) {
- db, _ := ethdb.NewMemDatabase()
- statedb, _ := state.New(common.Hash{}, state.NewDatabase(db))
+ diskdb, _ := ethdb.NewMemDatabase()
+ statedb, _ := state.New(common.Hash{}, state.NewDatabase(diskdb))
blockchain := &testBlockChain{statedb, 1000000, new(event.Feed)}
key, _ := crypto.GenerateKey()
@@ -557,74 +557,112 @@ func TestTransactionDropping(t *testing.T) {
func TestTransactionPostponing(t *testing.T) {
t.Parallel()
- // Create a test account and fund it
- pool, key := setupTxPool()
+ // Create the pool to test the postponing with
+ db, _ := ethdb.NewMemDatabase()
+ statedb, _ := state.New(common.Hash{}, state.NewDatabase(db))
+ blockchain := &testBlockChain{statedb, 1000000, new(event.Feed)}
+
+ pool := NewTxPool(testTxPoolConfig, params.TestChainConfig, blockchain)
defer pool.Stop()
- account, _ := deriveSender(transaction(0, 0, key))
- pool.currentState.AddBalance(account, big.NewInt(1000))
+ // Create two test accounts to produce different gap profiles with
+ keys := make([]*ecdsa.PrivateKey, 2)
+ accs := make([]common.Address, len(keys))
+ for i := 0; i < len(keys); i++ {
+ keys[i], _ = crypto.GenerateKey()
+ accs[i] = crypto.PubkeyToAddress(keys[i].PublicKey)
+
+ pool.currentState.AddBalance(crypto.PubkeyToAddress(keys[i].PublicKey), big.NewInt(50100))
+ }
// Add a batch consecutive pending transactions for validation
- txns := []*types.Transaction{}
- for i := 0; i < 100; i++ {
- var tx *types.Transaction
- if i%2 == 0 {
- tx = transaction(uint64(i), 100, key)
- } else {
- tx = transaction(uint64(i), 500, key)
+ txs := []*types.Transaction{}
+ for i, key := range keys {
+
+ for j := 0; j < 100; j++ {
+ var tx *types.Transaction
+ if (i+j)%2 == 0 {
+ tx = transaction(uint64(j), 25000, key)
+ } else {
+ tx = transaction(uint64(j), 50000, key)
+ }
+ txs = append(txs, tx)
+ }
+ }
+ for i, err := range pool.AddRemotes(txs) {
+ if err != nil {
+ t.Fatalf("tx %d: failed to add transactions: %v", i, err)
}
- pool.promoteTx(account, tx.Hash(), tx)
- txns = append(txns, tx)
}
// Check that pre and post validations leave the pool as is
- if pool.pending[account].Len() != len(txns) {
- t.Errorf("pending transaction mismatch: have %d, want %d", pool.pending[account].Len(), len(txns))
+ if pending := pool.pending[accs[0]].Len() + pool.pending[accs[1]].Len(); pending != len(txs) {
+ t.Errorf("pending transaction mismatch: have %d, want %d", pending, len(txs))
}
if len(pool.queue) != 0 {
- t.Errorf("queued transaction mismatch: have %d, want %d", pool.queue[account].Len(), 0)
+ t.Errorf("queued accounts mismatch: have %d, want %d", len(pool.queue), 0)
}
- if len(pool.all) != len(txns) {
- t.Errorf("total transaction mismatch: have %d, want %d", len(pool.all), len(txns))
+ if len(pool.all) != len(txs) {
+ t.Errorf("total transaction mismatch: have %d, want %d", len(pool.all), len(txs))
}
pool.lockedReset(nil, nil)
- if pool.pending[account].Len() != len(txns) {
- t.Errorf("pending transaction mismatch: have %d, want %d", pool.pending[account].Len(), len(txns))
+ if pending := pool.pending[accs[0]].Len() + pool.pending[accs[1]].Len(); pending != len(txs) {
+ t.Errorf("pending transaction mismatch: have %d, want %d", pending, len(txs))
}
if len(pool.queue) != 0 {
- t.Errorf("queued transaction mismatch: have %d, want %d", pool.queue[account].Len(), 0)
+ t.Errorf("queued accounts mismatch: have %d, want %d", len(pool.queue), 0)
}
- if len(pool.all) != len(txns) {
- t.Errorf("total transaction mismatch: have %d, want %d", len(pool.all), len(txns))
+ if len(pool.all) != len(txs) {
+ t.Errorf("total transaction mismatch: have %d, want %d", len(pool.all), len(txs))
}
// Reduce the balance of the account, and check that transactions are reorganised
- pool.currentState.AddBalance(account, big.NewInt(-750))
+ for _, addr := range accs {
+ pool.currentState.AddBalance(addr, big.NewInt(-1))
+ }
pool.lockedReset(nil, nil)
- if _, ok := pool.pending[account].txs.items[txns[0].Nonce()]; !ok {
- t.Errorf("tx %d: valid and funded transaction missing from pending pool: %v", 0, txns[0])
+ // The first account's first transaction remains valid, check that subsequent
+ // ones are either filtered out, or queued up for later.
+ if _, ok := pool.pending[accs[0]].txs.items[txs[0].Nonce()]; !ok {
+ t.Errorf("tx %d: valid and funded transaction missing from pending pool: %v", 0, txs[0])
}
- if _, ok := pool.queue[account].txs.items[txns[0].Nonce()]; ok {
- t.Errorf("tx %d: valid and funded transaction present in future queue: %v", 0, txns[0])
+ if _, ok := pool.queue[accs[0]].txs.items[txs[0].Nonce()]; ok {
+ t.Errorf("tx %d: valid and funded transaction present in future queue: %v", 0, txs[0])
}
- for i, tx := range txns[1:] {
+ for i, tx := range txs[1:100] {
if i%2 == 1 {
- if _, ok := pool.pending[account].txs.items[tx.Nonce()]; ok {
+ if _, ok := pool.pending[accs[0]].txs.items[tx.Nonce()]; ok {
t.Errorf("tx %d: valid but future transaction present in pending pool: %v", i+1, tx)
}
- if _, ok := pool.queue[account].txs.items[tx.Nonce()]; !ok {
+ if _, ok := pool.queue[accs[0]].txs.items[tx.Nonce()]; !ok {
t.Errorf("tx %d: valid but future transaction missing from future queue: %v", i+1, tx)
}
} else {
- if _, ok := pool.pending[account].txs.items[tx.Nonce()]; ok {
+ if _, ok := pool.pending[accs[0]].txs.items[tx.Nonce()]; ok {
t.Errorf("tx %d: out-of-fund transaction present in pending pool: %v", i+1, tx)
}
- if _, ok := pool.queue[account].txs.items[tx.Nonce()]; ok {
+ if _, ok := pool.queue[accs[0]].txs.items[tx.Nonce()]; ok {
t.Errorf("tx %d: out-of-fund transaction present in future queue: %v", i+1, tx)
}
}
}
- if len(pool.all) != len(txns)/2 {
- t.Errorf("total transaction mismatch: have %d, want %d", len(pool.all), len(txns)/2)
+ // The second account's first transaction got invalid, check that all transactions
+ // are either filtered out, or queued up for later.
+ if pool.pending[accs[1]] != nil {
+ t.Errorf("invalidated account still has pending transactions")
+ }
+ for i, tx := range txs[100:] {
+ if i%2 == 1 {
+ if _, ok := pool.queue[accs[1]].txs.items[tx.Nonce()]; !ok {
+ t.Errorf("tx %d: valid but future transaction missing from future queue: %v", 100+i, tx)
+ }
+ } else {
+ if _, ok := pool.queue[accs[1]].txs.items[tx.Nonce()]; ok {
+ t.Errorf("tx %d: out-of-fund transaction present in future queue: %v", 100+i, tx)
+ }
+ }
+ }
+ if len(pool.all) != len(txs)/2 {
+ t.Errorf("total transaction mismatch: have %d, want %d", len(pool.all), len(txs)/2)
}
}
@@ -949,11 +987,11 @@ func testTransactionLimitingEquivalency(t *testing.T, origin uint64) {
account2, _ := deriveSender(transaction(0, 0, key2))
pool2.currentState.AddBalance(account2, big.NewInt(1000000))
- txns := []*types.Transaction{}
+ txs := []*types.Transaction{}
for i := uint64(0); i < testTxPoolConfig.AccountQueue+5; i++ {
- txns = append(txns, transaction(origin+i, 100000, key2))
+ txs = append(txs, transaction(origin+i, 100000, key2))
}
- pool2.AddRemotes(txns)
+ pool2.AddRemotes(txs)
// Ensure the batch optimization honors the same pool mechanics
if len(pool1.pending) != len(pool2.pending) {
@@ -1124,7 +1162,7 @@ func TestTransactionPoolRepricing(t *testing.T) {
defer sub.Unsubscribe()
// Create a number of test accounts and fund them
- keys := make([]*ecdsa.PrivateKey, 3)
+ keys := make([]*ecdsa.PrivateKey, 4)
for i := 0; i < len(keys); i++ {
keys[i], _ = crypto.GenerateKey()
pool.currentState.AddBalance(crypto.PubkeyToAddress(keys[i].PublicKey), big.NewInt(1000000))
@@ -1136,24 +1174,28 @@ func TestTransactionPoolRepricing(t *testing.T) {
txs = append(txs, pricedTransaction(1, 100000, big.NewInt(1), keys[0]))
txs = append(txs, pricedTransaction(2, 100000, big.NewInt(2), keys[0]))
+ txs = append(txs, pricedTransaction(0, 100000, big.NewInt(1), keys[1]))
txs = append(txs, pricedTransaction(1, 100000, big.NewInt(2), keys[1]))
- txs = append(txs, pricedTransaction(2, 100000, big.NewInt(1), keys[1]))
- txs = append(txs, pricedTransaction(3, 100000, big.NewInt(2), keys[1]))
+ txs = append(txs, pricedTransaction(2, 100000, big.NewInt(2), keys[1]))
- ltx := pricedTransaction(0, 100000, big.NewInt(1), keys[2])
+ txs = append(txs, pricedTransaction(1, 100000, big.NewInt(2), keys[2]))
+ txs = append(txs, pricedTransaction(2, 100000, big.NewInt(1), keys[2]))
+ txs = append(txs, pricedTransaction(3, 100000, big.NewInt(2), keys[2]))
+
+ ltx := pricedTransaction(0, 100000, big.NewInt(1), keys[3])
// Import the batch and that both pending and queued transactions match up
pool.AddRemotes(txs)
pool.AddLocal(ltx)
pending, queued := pool.Stats()
- if pending != 4 {
- t.Fatalf("pending transactions mismatched: have %d, want %d", pending, 4)
+ if pending != 7 {
+ t.Fatalf("pending transactions mismatched: have %d, want %d", pending, 7)
}
if queued != 3 {
t.Fatalf("queued transactions mismatched: have %d, want %d", queued, 3)
}
- if err := validateEvents(events, 4); err != nil {
+ if err := validateEvents(events, 7); err != nil {
t.Fatalf("original event firing failed: %v", err)
}
if err := validateTxPoolInternals(pool); err != nil {
@@ -1166,8 +1208,8 @@ func TestTransactionPoolRepricing(t *testing.T) {
if pending != 2 {
t.Fatalf("pending transactions mismatched: have %d, want %d", pending, 2)
}
- if queued != 3 {
- t.Fatalf("queued transactions mismatched: have %d, want %d", queued, 3)
+ if queued != 5 {
+ t.Fatalf("queued transactions mismatched: have %d, want %d", queued, 5)
}
if err := validateEvents(events, 0); err != nil {
t.Fatalf("reprice event firing failed: %v", err)
@@ -1179,7 +1221,10 @@ func TestTransactionPoolRepricing(t *testing.T) {
if err := pool.AddRemote(pricedTransaction(1, 100000, big.NewInt(1), keys[0])); err != ErrUnderpriced {
t.Fatalf("adding underpriced pending transaction error mismatch: have %v, want %v", err, ErrUnderpriced)
}
- if err := pool.AddRemote(pricedTransaction(2, 100000, big.NewInt(1), keys[1])); err != ErrUnderpriced {
+ if err := pool.AddRemote(pricedTransaction(0, 100000, big.NewInt(1), keys[1])); err != ErrUnderpriced {
+ t.Fatalf("adding underpriced pending transaction error mismatch: have %v, want %v", err, ErrUnderpriced)
+ }
+ if err := pool.AddRemote(pricedTransaction(2, 100000, big.NewInt(1), keys[2])); err != ErrUnderpriced {
t.Fatalf("adding underpriced queued transaction error mismatch: have %v, want %v", err, ErrUnderpriced)
}
if err := validateEvents(events, 0); err != nil {
@@ -1189,7 +1234,7 @@ func TestTransactionPoolRepricing(t *testing.T) {
t.Fatalf("pool internal state corrupted: %v", err)
}
// However we can add local underpriced transactions
- tx := pricedTransaction(1, 100000, big.NewInt(1), keys[2])
+ tx := pricedTransaction(1, 100000, big.NewInt(1), keys[3])
if err := pool.AddLocal(tx); err != nil {
t.Fatalf("failed to add underpriced local transaction: %v", err)
}
@@ -1202,6 +1247,22 @@ func TestTransactionPoolRepricing(t *testing.T) {
if err := validateTxPoolInternals(pool); err != nil {
t.Fatalf("pool internal state corrupted: %v", err)
}
+ // And we can fill gaps with properly priced transactions
+ if err := pool.AddRemote(pricedTransaction(1, 100000, big.NewInt(2), keys[0])); err != nil {
+ t.Fatalf("failed to add pending transaction: %v", err)
+ }
+ if err := pool.AddRemote(pricedTransaction(0, 100000, big.NewInt(2), keys[1])); err != nil {
+ t.Fatalf("failed to add pending transaction: %v", err)
+ }
+ if err := pool.AddRemote(pricedTransaction(2, 100000, big.NewInt(2), keys[2])); err != nil {
+ t.Fatalf("failed to add queued transaction: %v", err)
+ }
+ if err := validateEvents(events, 5); err != nil {
+ t.Fatalf("post-reprice event firing failed: %v", err)
+ }
+ if err := validateTxPoolInternals(pool); err != nil {
+ t.Fatalf("pool internal state corrupted: %v", err)
+ }
}
// Tests that setting the transaction pool gas price to a higher value does not
diff --git a/core/types/block.go b/core/types/block.go
index ffe317342..92b868d9d 100644
--- a/core/types/block.go
+++ b/core/types/block.go
@@ -25,6 +25,7 @@ import (
"sort"
"sync/atomic"
"time"
+ "unsafe"
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/common/hexutil"
@@ -121,6 +122,12 @@ func (h *Header) HashNoNonce() common.Hash {
})
}
+// Size returns the approximate memory used by all internal contents. It is used
+// to approximate and limit the memory consumption of various caches.
+func (h *Header) Size() common.StorageSize {
+ return common.StorageSize(unsafe.Sizeof(*h)) + common.StorageSize(len(h.Extra)+(h.Difficulty.BitLen()+h.Number.BitLen()+h.Time.BitLen())/8)
+}
+
func rlpHash(x interface{}) (h common.Hash) {
hw := sha3.NewKeccak256()
rlp.Encode(hw, x)
@@ -322,6 +329,8 @@ func (b *Block) HashNoNonce() common.Hash {
return b.header.HashNoNonce()
}
+// Size returns the true RLP encoded storage size of the block, either by encoding
+// and returning it, or returning a previsouly cached value.
func (b *Block) Size() common.StorageSize {
if size := b.size.Load(); size != nil {
return size.(common.StorageSize)
diff --git a/core/types/receipt.go b/core/types/receipt.go
index 208d54aaa..f945f6f6a 100644
--- a/core/types/receipt.go
+++ b/core/types/receipt.go
@@ -20,6 +20,7 @@ import (
"bytes"
"fmt"
"io"
+ "unsafe"
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/common/hexutil"
@@ -136,6 +137,18 @@ func (r *Receipt) statusEncoding() []byte {
return r.PostState
}
+// Size returns the approximate memory used by all internal contents. It is used
+// to approximate and limit the memory consumption of various caches.
+func (r *Receipt) Size() common.StorageSize {
+ size := common.StorageSize(unsafe.Sizeof(*r)) + common.StorageSize(len(r.PostState))
+
+ size += common.StorageSize(len(r.Logs)) * common.StorageSize(unsafe.Sizeof(Log{}))
+ for _, log := range r.Logs {
+ size += common.StorageSize(len(log.Topics)*common.HashLength + len(log.Data))
+ }
+ return size
+}
+
// String implements the Stringer interface.
func (r *Receipt) String() string {
if len(r.PostState) == 0 {
diff --git a/core/types/transaction.go b/core/types/transaction.go
index a7ed211e4..5660582ba 100644
--- a/core/types/transaction.go
+++ b/core/types/transaction.go
@@ -206,6 +206,8 @@ func (tx *Transaction) Hash() common.Hash {
return v
}
+// Size returns the true RLP encoded storage size of the transaction, either by
+// encoding and returning it, or returning a previsouly cached value.
func (tx *Transaction) Size() common.StorageSize {
if size := tx.size.Load(); size != nil {
return size.(common.StorageSize)
diff --git a/core/vm/contracts.go b/core/vm/contracts.go
index 7344b6043..237450ea9 100644
--- a/core/vm/contracts.go
+++ b/core/vm/contracts.go
@@ -251,26 +251,12 @@ func (c *bigModExp) Run(input []byte) ([]byte, error) {
return common.LeftPadBytes(base.Exp(base, exp, mod).Bytes(), int(modLen)), nil
}
-var (
- // errNotOnCurve is returned if a point being unmarshalled as a bn256 elliptic
- // curve point is not on the curve.
- errNotOnCurve = errors.New("point not on elliptic curve")
-
- // errInvalidCurvePoint is returned if a point being unmarshalled as a bn256
- // elliptic curve point is invalid.
- errInvalidCurvePoint = errors.New("invalid elliptic curve point")
-)
-
// newCurvePoint unmarshals a binary blob into a bn256 elliptic curve point,
// returning it, or an error if the point is invalid.
func newCurvePoint(blob []byte) (*bn256.G1, error) {
- p, onCurve := new(bn256.G1).Unmarshal(blob)
- if !onCurve {
- return nil, errNotOnCurve
- }
- gx, gy, _, _ := p.CurvePoints()
- if gx.Cmp(bn256.P) >= 0 || gy.Cmp(bn256.P) >= 0 {
- return nil, errInvalidCurvePoint
+ p := new(bn256.G1)
+ if _, err := p.Unmarshal(blob); err != nil {
+ return nil, err
}
return p, nil
}
@@ -278,14 +264,9 @@ func newCurvePoint(blob []byte) (*bn256.G1, error) {
// newTwistPoint unmarshals a binary blob into a bn256 elliptic curve point,
// returning it, or an error if the point is invalid.
func newTwistPoint(blob []byte) (*bn256.G2, error) {
- p, onCurve := new(bn256.G2).Unmarshal(blob)
- if !onCurve {
- return nil, errNotOnCurve
- }
- x2, y2, _, _ := p.CurvePoints()
- if x2.Real().Cmp(bn256.P) >= 0 || x2.Imag().Cmp(bn256.P) >= 0 ||
- y2.Real().Cmp(bn256.P) >= 0 || y2.Imag().Cmp(bn256.P) >= 0 {
- return nil, errInvalidCurvePoint
+ p := new(bn256.G2)
+ if _, err := p.Unmarshal(blob); err != nil {
+ return nil, err
}
return p, nil
}
diff --git a/core/vm/contracts_test.go b/core/vm/contracts_test.go
index 513651835..96083337c 100644
--- a/core/vm/contracts_test.go
+++ b/core/vm/contracts_test.go
@@ -1,3 +1,19 @@
+// Copyright 2017 The go-ethereum Authors
+// This file is part of the go-ethereum library.
+//
+// The go-ethereum library is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Lesser General Public License as published by
+// the Free Software Foundation, either version 3 of the License, or
+// (at your option) any later version.
+//
+// The go-ethereum library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+// GNU Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public License
+// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
+
package vm
import (
diff --git a/core/vm/instructions.go b/core/vm/instructions.go
index 766172501..66e804fb7 100644
--- a/core/vm/instructions.go
+++ b/core/vm/instructions.go
@@ -30,6 +30,8 @@ import (
var (
bigZero = new(big.Int)
+ tt255 = math.BigPow(2, 255)
+ tt256 = math.BigPow(2, 256)
errWriteProtection = errors.New("evm: write protection")
errReturnDataOutOfBounds = errors.New("evm: return data out of bounds")
errExecutionReverted = errors.New("evm: execution reverted")
@@ -191,50 +193,71 @@ func opGt(pc *uint64, evm *EVM, contract *Contract, memory *Memory, stack *Stack
}
func opSlt(pc *uint64, evm *EVM, contract *Contract, memory *Memory, stack *Stack) ([]byte, error) {
- x, y := math.S256(stack.pop()), math.S256(stack.pop())
- if x.Cmp(math.S256(y)) < 0 {
- stack.push(evm.interpreter.intPool.get().SetUint64(1))
- } else {
- stack.push(new(big.Int))
- }
+ x, y := stack.pop(), stack.peek()
- evm.interpreter.intPool.put(x, y)
+ xSign := x.Cmp(tt255)
+ ySign := y.Cmp(tt255)
+
+ switch {
+ case xSign >= 0 && ySign < 0:
+ y.SetUint64(1)
+
+ case xSign < 0 && ySign >= 0:
+ y.SetUint64(0)
+
+ default:
+ if x.Cmp(y) < 0 {
+ y.SetUint64(1)
+ } else {
+ y.SetUint64(0)
+ }
+ }
+ evm.interpreter.intPool.put(x)
return nil, nil
}
func opSgt(pc *uint64, evm *EVM, contract *Contract, memory *Memory, stack *Stack) ([]byte, error) {
- x, y := math.S256(stack.pop()), math.S256(stack.pop())
- if x.Cmp(y) > 0 {
- stack.push(evm.interpreter.intPool.get().SetUint64(1))
- } else {
- stack.push(new(big.Int))
- }
+ x, y := stack.pop(), stack.peek()
- evm.interpreter.intPool.put(x, y)
+ xSign := x.Cmp(tt255)
+ ySign := y.Cmp(tt255)
+
+ switch {
+ case xSign >= 0 && ySign < 0:
+ y.SetUint64(0)
+
+ case xSign < 0 && ySign >= 0:
+ y.SetUint64(1)
+
+ default:
+ if x.Cmp(y) > 0 {
+ y.SetUint64(1)
+ } else {
+ y.SetUint64(0)
+ }
+ }
+ evm.interpreter.intPool.put(x)
return nil, nil
}
func opEq(pc *uint64, evm *EVM, contract *Contract, memory *Memory, stack *Stack) ([]byte, error) {
- x, y := stack.pop(), stack.pop()
+ x, y := stack.pop(), stack.peek()
if x.Cmp(y) == 0 {
- stack.push(evm.interpreter.intPool.get().SetUint64(1))
+ y.SetUint64(1)
} else {
- stack.push(new(big.Int))
+ y.SetUint64(0)
}
-
- evm.interpreter.intPool.put(x, y)
+ evm.interpreter.intPool.put(x)
return nil, nil
}
func opIszero(pc *uint64, evm *EVM, contract *Contract, memory *Memory, stack *Stack) ([]byte, error) {
- x := stack.pop()
+ x := stack.peek()
if x.Sign() > 0 {
- stack.push(new(big.Int))
+ x.SetUint64(0)
} else {
- stack.push(evm.interpreter.intPool.get().SetUint64(1))
+ x.SetUint64(1)
}
-
- evm.interpreter.intPool.put(x)
return nil, nil
}
@@ -302,6 +325,66 @@ func opMulmod(pc *uint64, evm *EVM, contract *Contract, memory *Memory, stack *S
return nil, nil
}
+// opSHL implements Shift Left
+// The SHL instruction (shift left) pops 2 values from the stack, first arg1 and then arg2,
+// and pushes on the stack arg2 shifted to the left by arg1 number of bits.
+func opSHL(pc *uint64, evm *EVM, contract *Contract, memory *Memory, stack *Stack) ([]byte, error) {
+ // Note, second operand is left in the stack; accumulate result into it, and no need to push it afterwards
+ shift, value := math.U256(stack.pop()), math.U256(stack.peek())
+ defer evm.interpreter.intPool.put(shift) // First operand back into the pool
+
+ if shift.Cmp(common.Big256) >= 0 {
+ value.SetUint64(0)
+ return nil, nil
+ }
+ n := uint(shift.Uint64())
+ math.U256(value.Lsh(value, n))
+
+ return nil, nil
+}
+
+// opSHR implements Logical Shift Right
+// The SHR instruction (logical shift right) pops 2 values from the stack, first arg1 and then arg2,
+// and pushes on the stack arg2 shifted to the right by arg1 number of bits with zero fill.
+func opSHR(pc *uint64, evm *EVM, contract *Contract, memory *Memory, stack *Stack) ([]byte, error) {
+ // Note, second operand is left in the stack; accumulate result into it, and no need to push it afterwards
+ shift, value := math.U256(stack.pop()), math.U256(stack.peek())
+ defer evm.interpreter.intPool.put(shift) // First operand back into the pool
+
+ if shift.Cmp(common.Big256) >= 0 {
+ value.SetUint64(0)
+ return nil, nil
+ }
+ n := uint(shift.Uint64())
+ math.U256(value.Rsh(value, n))
+
+ return nil, nil
+}
+
+// opSAR implements Arithmetic Shift Right
+// The SAR instruction (arithmetic shift right) pops 2 values from the stack, first arg1 and then arg2,
+// and pushes on the stack arg2 shifted to the right by arg1 number of bits with sign extension.
+func opSAR(pc *uint64, evm *EVM, contract *Contract, memory *Memory, stack *Stack) ([]byte, error) {
+ // Note, S256 returns (potentially) a new bigint, so we're popping, not peeking this one
+ shift, value := math.U256(stack.pop()), math.S256(stack.pop())
+ defer evm.interpreter.intPool.put(shift) // First operand back into the pool
+
+ if shift.Cmp(common.Big256) >= 0 {
+ if value.Sign() > 0 {
+ value.SetUint64(0)
+ } else {
+ value.SetInt64(-1)
+ }
+ stack.push(math.U256(value))
+ return nil, nil
+ }
+ n := uint(shift.Uint64())
+ value.Rsh(value, n)
+ stack.push(math.U256(value))
+
+ return nil, nil
+}
+
func opSha3(pc *uint64, evm *EVM, contract *Contract, memory *Memory, stack *Stack) ([]byte, error) {
offset, size := stack.pop(), stack.pop()
data := memory.Get(offset.Int64(), size.Int64())
diff --git a/core/vm/instructions_test.go b/core/vm/instructions_test.go
index 18644989c..134363bb7 100644
--- a/core/vm/instructions_test.go
+++ b/core/vm/instructions_test.go
@@ -1,3 +1,19 @@
+// Copyright 2017 The go-ethereum Authors
+// This file is part of the go-ethereum library.
+//
+// The go-ethereum library is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Lesser General Public License as published by
+// the Free Software Foundation, either version 3 of the License, or
+// (at your option) any later version.
+//
+// The go-ethereum library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+// GNU Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public License
+// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
+
package vm
import (
@@ -8,6 +24,48 @@ import (
"github.com/ethereum/go-ethereum/params"
)
+type twoOperandTest struct {
+ x string
+ y string
+ expected string
+}
+
+func testTwoOperandOp(t *testing.T, tests []twoOperandTest, opFn func(pc *uint64, evm *EVM, contract *Contract, memory *Memory, stack *Stack) ([]byte, error)) {
+ var (
+ env = NewEVM(Context{}, nil, params.TestChainConfig, Config{EnableJit: false, ForceJit: false})
+ stack = newstack()
+ pc = uint64(0)
+ )
+ for i, test := range tests {
+ x := new(big.Int).SetBytes(common.Hex2Bytes(test.x))
+ shift := new(big.Int).SetBytes(common.Hex2Bytes(test.y))
+ expected := new(big.Int).SetBytes(common.Hex2Bytes(test.expected))
+ stack.push(x)
+ stack.push(shift)
+ opFn(&pc, env, nil, nil, stack)
+ actual := stack.pop()
+ if actual.Cmp(expected) != 0 {
+ t.Errorf("Testcase %d, expected %v, got %v", i, expected, actual)
+ }
+ // Check pool usage
+ // 1.pool is not allowed to contain anything on the stack
+ // 2.pool is not allowed to contain the same pointers twice
+ if env.interpreter.intPool.pool.len() > 0 {
+
+ poolvals := make(map[*big.Int]struct{})
+ poolvals[actual] = struct{}{}
+
+ for env.interpreter.intPool.pool.len() > 0 {
+ key := env.interpreter.intPool.get()
+ if _, exist := poolvals[key]; exist {
+ t.Errorf("Testcase %d, pool contains double-entry", i)
+ }
+ poolvals[key] = struct{}{}
+ }
+ }
+ }
+}
+
func TestByteOp(t *testing.T) {
var (
env = NewEVM(Context{}, nil, params.TestChainConfig, Config{EnableJit: false, ForceJit: false})
@@ -41,6 +99,103 @@ func TestByteOp(t *testing.T) {
}
}
+func TestSHL(t *testing.T) {
+ // Testcases from https://github.com/ethereum/EIPs/blob/master/EIPS/eip-145.md#shl-shift-left
+ tests := []twoOperandTest{
+ {"0000000000000000000000000000000000000000000000000000000000000001", "00", "0000000000000000000000000000000000000000000000000000000000000001"},
+ {"0000000000000000000000000000000000000000000000000000000000000001", "01", "0000000000000000000000000000000000000000000000000000000000000002"},
+ {"0000000000000000000000000000000000000000000000000000000000000001", "ff", "8000000000000000000000000000000000000000000000000000000000000000"},
+ {"0000000000000000000000000000000000000000000000000000000000000001", "0100", "0000000000000000000000000000000000000000000000000000000000000000"},
+ {"0000000000000000000000000000000000000000000000000000000000000001", "0101", "0000000000000000000000000000000000000000000000000000000000000000"},
+ {"ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", "00", "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff"},
+ {"ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", "01", "fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe"},
+ {"ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", "ff", "8000000000000000000000000000000000000000000000000000000000000000"},
+ {"ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", "0100", "0000000000000000000000000000000000000000000000000000000000000000"},
+ {"0000000000000000000000000000000000000000000000000000000000000000", "01", "0000000000000000000000000000000000000000000000000000000000000000"},
+ {"7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", "01", "fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe"},
+ }
+ testTwoOperandOp(t, tests, opSHL)
+}
+
+func TestSHR(t *testing.T) {
+ // Testcases from https://github.com/ethereum/EIPs/blob/master/EIPS/eip-145.md#shr-logical-shift-right
+ tests := []twoOperandTest{
+ {"0000000000000000000000000000000000000000000000000000000000000001", "00", "0000000000000000000000000000000000000000000000000000000000000001"},
+ {"0000000000000000000000000000000000000000000000000000000000000001", "01", "0000000000000000000000000000000000000000000000000000000000000000"},
+ {"8000000000000000000000000000000000000000000000000000000000000000", "01", "4000000000000000000000000000000000000000000000000000000000000000"},
+ {"8000000000000000000000000000000000000000000000000000000000000000", "ff", "0000000000000000000000000000000000000000000000000000000000000001"},
+ {"8000000000000000000000000000000000000000000000000000000000000000", "0100", "0000000000000000000000000000000000000000000000000000000000000000"},
+ {"8000000000000000000000000000000000000000000000000000000000000000", "0101", "0000000000000000000000000000000000000000000000000000000000000000"},
+ {"ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", "00", "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff"},
+ {"ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", "01", "7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff"},
+ {"ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", "ff", "0000000000000000000000000000000000000000000000000000000000000001"},
+ {"ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", "0100", "0000000000000000000000000000000000000000000000000000000000000000"},
+ {"0000000000000000000000000000000000000000000000000000000000000000", "01", "0000000000000000000000000000000000000000000000000000000000000000"},
+ }
+ testTwoOperandOp(t, tests, opSHR)
+}
+
+func TestSAR(t *testing.T) {
+ // Testcases from https://github.com/ethereum/EIPs/blob/master/EIPS/eip-145.md#sar-arithmetic-shift-right
+ tests := []twoOperandTest{
+ {"0000000000000000000000000000000000000000000000000000000000000001", "00", "0000000000000000000000000000000000000000000000000000000000000001"},
+ {"0000000000000000000000000000000000000000000000000000000000000001", "01", "0000000000000000000000000000000000000000000000000000000000000000"},
+ {"8000000000000000000000000000000000000000000000000000000000000000", "01", "c000000000000000000000000000000000000000000000000000000000000000"},
+ {"8000000000000000000000000000000000000000000000000000000000000000", "ff", "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff"},
+ {"8000000000000000000000000000000000000000000000000000000000000000", "0100", "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff"},
+ {"8000000000000000000000000000000000000000000000000000000000000000", "0101", "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff"},
+ {"ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", "00", "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff"},
+ {"ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", "01", "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff"},
+ {"ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", "ff", "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff"},
+ {"ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", "0100", "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff"},
+ {"0000000000000000000000000000000000000000000000000000000000000000", "01", "0000000000000000000000000000000000000000000000000000000000000000"},
+ {"4000000000000000000000000000000000000000000000000000000000000000", "fe", "0000000000000000000000000000000000000000000000000000000000000001"},
+ {"7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", "f8", "000000000000000000000000000000000000000000000000000000000000007f"},
+ {"7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", "fe", "0000000000000000000000000000000000000000000000000000000000000001"},
+ {"7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", "ff", "0000000000000000000000000000000000000000000000000000000000000000"},
+ {"7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", "0100", "0000000000000000000000000000000000000000000000000000000000000000"},
+ }
+
+ testTwoOperandOp(t, tests, opSAR)
+}
+
+func TestSGT(t *testing.T) {
+ tests := []twoOperandTest{
+
+ {"0000000000000000000000000000000000000000000000000000000000000001", "0000000000000000000000000000000000000000000000000000000000000001", "0000000000000000000000000000000000000000000000000000000000000000"},
+ {"ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", "0000000000000000000000000000000000000000000000000000000000000000"},
+ {"7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", "7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", "0000000000000000000000000000000000000000000000000000000000000000"},
+ {"0000000000000000000000000000000000000000000000000000000000000001", "7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", "0000000000000000000000000000000000000000000000000000000000000001"},
+ {"7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", "0000000000000000000000000000000000000000000000000000000000000001", "0000000000000000000000000000000000000000000000000000000000000000"},
+ {"ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", "0000000000000000000000000000000000000000000000000000000000000001", "0000000000000000000000000000000000000000000000000000000000000001"},
+ {"0000000000000000000000000000000000000000000000000000000000000001", "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", "0000000000000000000000000000000000000000000000000000000000000000"},
+ {"8000000000000000000000000000000000000000000000000000000000000001", "8000000000000000000000000000000000000000000000000000000000000001", "0000000000000000000000000000000000000000000000000000000000000000"},
+ {"8000000000000000000000000000000000000000000000000000000000000001", "7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", "0000000000000000000000000000000000000000000000000000000000000001"},
+ {"7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", "8000000000000000000000000000000000000000000000000000000000000001", "0000000000000000000000000000000000000000000000000000000000000000"},
+ {"fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffb", "fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd", "0000000000000000000000000000000000000000000000000000000000000001"},
+ {"fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd", "fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffb", "0000000000000000000000000000000000000000000000000000000000000000"},
+ }
+ testTwoOperandOp(t, tests, opSgt)
+}
+
+func TestSLT(t *testing.T) {
+ tests := []twoOperandTest{
+ {"0000000000000000000000000000000000000000000000000000000000000001", "0000000000000000000000000000000000000000000000000000000000000001", "0000000000000000000000000000000000000000000000000000000000000000"},
+ {"ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", "0000000000000000000000000000000000000000000000000000000000000000"},
+ {"7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", "7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", "0000000000000000000000000000000000000000000000000000000000000000"},
+ {"0000000000000000000000000000000000000000000000000000000000000001", "7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", "0000000000000000000000000000000000000000000000000000000000000000"},
+ {"7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", "0000000000000000000000000000000000000000000000000000000000000001", "0000000000000000000000000000000000000000000000000000000000000001"},
+ {"ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", "0000000000000000000000000000000000000000000000000000000000000001", "0000000000000000000000000000000000000000000000000000000000000000"},
+ {"0000000000000000000000000000000000000000000000000000000000000001", "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", "0000000000000000000000000000000000000000000000000000000000000001"},
+ {"8000000000000000000000000000000000000000000000000000000000000001", "8000000000000000000000000000000000000000000000000000000000000001", "0000000000000000000000000000000000000000000000000000000000000000"},
+ {"8000000000000000000000000000000000000000000000000000000000000001", "7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", "0000000000000000000000000000000000000000000000000000000000000000"},
+ {"7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", "8000000000000000000000000000000000000000000000000000000000000001", "0000000000000000000000000000000000000000000000000000000000000001"},
+ {"fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffb", "fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd", "0000000000000000000000000000000000000000000000000000000000000000"},
+ {"fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd", "fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffb", "0000000000000000000000000000000000000000000000000000000000000001"},
+ }
+ testTwoOperandOp(t, tests, opSlt)
+}
+
func opBenchmark(bench *testing.B, op func(pc *uint64, evm *EVM, contract *Contract, memory *Memory, stack *Stack) ([]byte, error), args ...string) {
var (
env = NewEVM(Context{}, nil, params.TestChainConfig, Config{EnableJit: false, ForceJit: false})
@@ -199,7 +354,11 @@ func BenchmarkOpEq(b *testing.B) {
opBenchmark(b, opEq, x, y)
}
-
+func BenchmarkOpEq2(b *testing.B) {
+ x := "FBCDEF090807060504030201ffffffffFBCDEF090807060504030201ffffffff"
+ y := "FBCDEF090807060504030201ffffffffFBCDEF090807060504030201fffffffe"
+ opBenchmark(b, opEq, x, y)
+}
func BenchmarkOpAnd(b *testing.B) {
x := "ABCDEF090807060504030201ffffffffffffffffffffffffffffffffffffffff"
y := "ABCDEF090807060504030201ffffffffffffffffffffffffffffffffffffffff"
@@ -243,3 +402,26 @@ func BenchmarkOpMulmod(b *testing.B) {
opBenchmark(b, opMulmod, x, y, z)
}
+
+func BenchmarkOpSHL(b *testing.B) {
+ x := "FBCDEF090807060504030201ffffffffFBCDEF090807060504030201ffffffff"
+ y := "ff"
+
+ opBenchmark(b, opSHL, x, y)
+}
+func BenchmarkOpSHR(b *testing.B) {
+ x := "FBCDEF090807060504030201ffffffffFBCDEF090807060504030201ffffffff"
+ y := "ff"
+
+ opBenchmark(b, opSHR, x, y)
+}
+func BenchmarkOpSAR(b *testing.B) {
+ x := "FBCDEF090807060504030201ffffffffFBCDEF090807060504030201ffffffff"
+ y := "ff"
+
+ opBenchmark(b, opSAR, x, y)
+}
+func BenchmarkOpIsZero(b *testing.B) {
+ x := "FBCDEF090807060504030201ffffffffFBCDEF090807060504030201ffffffff"
+ opBenchmark(b, opIszero, x)
+}
diff --git a/core/vm/interpreter.go b/core/vm/interpreter.go
index 482e67a3a..95490adfc 100644
--- a/core/vm/interpreter.go
+++ b/core/vm/interpreter.go
@@ -20,9 +20,7 @@ import (
"fmt"
"sync/atomic"
- "github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/common/math"
- "github.com/ethereum/go-ethereum/crypto"
"github.com/ethereum/go-ethereum/params"
)
@@ -39,8 +37,6 @@ type Config struct {
// NoRecursion disabled Interpreter call, callcode,
// delegate call and create.
NoRecursion bool
- // Disable gas metering
- DisableGasMetering bool
// Enable recording of SHA3/keccak preimages
EnablePreimageRecording bool
// JumpTable contains the EVM instruction table. This
@@ -70,6 +66,8 @@ func NewInterpreter(evm *EVM, cfg Config) *Interpreter {
// we'll set the default jump table.
if !cfg.JumpTable[STOP].valid {
switch {
+ case evm.ChainConfig().IsConstantinople(evm.BlockNumber):
+ cfg.JumpTable = constantinopleInstructionSet
case evm.ChainConfig().IsByzantium(evm.BlockNumber):
cfg.JumpTable = byzantiumInstructionSet
case evm.ChainConfig().IsHomestead(evm.BlockNumber):
@@ -123,11 +121,6 @@ func (in *Interpreter) Run(contract *Contract, input []byte) (ret []byte, err er
return nil, nil
}
- codehash := contract.CodeHash // codehash is used when doing jump dest caching
- if codehash == (common.Hash{}) {
- codehash = crypto.Keccak256Hash(contract.Code)
- }
-
var (
op OpCode // current opcode
mem = NewMemory() // bound memory
@@ -194,14 +187,11 @@ func (in *Interpreter) Run(contract *Contract, input []byte) (ret []byte, err er
return nil, errGasUintOverflow
}
}
-
- if !in.cfg.DisableGasMetering {
- // consume the gas and return an error if not enough gas is available.
- // cost is explicitly set so that the capture state defer method cas get the proper cost
- cost, err = operation.gasCost(in.gasTable, in.evm, contract, stack, mem, memorySize)
- if err != nil || !contract.UseGas(cost) {
- return nil, ErrOutOfGas
- }
+ // consume the gas and return an error if not enough gas is available.
+ // cost is explicitly set so that the capture state defer method cas get the proper cost
+ cost, err = operation.gasCost(in.gasTable, in.evm, contract, stack, mem, memorySize)
+ if err != nil || !contract.UseGas(cost) {
+ return nil, ErrOutOfGas
}
if memorySize > 0 {
mem.Resize(memorySize)
diff --git a/core/vm/jump_table.go b/core/vm/jump_table.go
index a1c5ad9c6..338994135 100644
--- a/core/vm/jump_table.go
+++ b/core/vm/jump_table.go
@@ -51,11 +51,38 @@ type operation struct {
}
var (
- frontierInstructionSet = NewFrontierInstructionSet()
- homesteadInstructionSet = NewHomesteadInstructionSet()
- byzantiumInstructionSet = NewByzantiumInstructionSet()
+ frontierInstructionSet = NewFrontierInstructionSet()
+ homesteadInstructionSet = NewHomesteadInstructionSet()
+ byzantiumInstructionSet = NewByzantiumInstructionSet()
+ constantinopleInstructionSet = NewConstantinopleInstructionSet()
)
+// NewConstantinopleInstructionSet returns the frontier, homestead
+// byzantium and contantinople instructions.
+func NewConstantinopleInstructionSet() [256]operation {
+ // instructions that can be executed during the byzantium phase.
+ instructionSet := NewByzantiumInstructionSet()
+ instructionSet[SHL] = operation{
+ execute: opSHL,
+ gasCost: constGasFunc(GasFastestStep),
+ validateStack: makeStackFunc(2, 1),
+ valid: true,
+ }
+ instructionSet[SHR] = operation{
+ execute: opSHR,
+ gasCost: constGasFunc(GasFastestStep),
+ validateStack: makeStackFunc(2, 1),
+ valid: true,
+ }
+ instructionSet[SAR] = operation{
+ execute: opSAR,
+ gasCost: constGasFunc(GasFastestStep),
+ validateStack: makeStackFunc(2, 1),
+ valid: true,
+ }
+ return instructionSet
+}
+
// NewByzantiumInstructionSet returns the frontier, homestead and
// byzantium instructions.
func NewByzantiumInstructionSet() [256]operation {
diff --git a/core/vm/opcodes.go b/core/vm/opcodes.go
index 0c6550735..7fe55b72f 100644
--- a/core/vm/opcodes.go
+++ b/core/vm/opcodes.go
@@ -63,6 +63,9 @@ const (
XOR
NOT
BYTE
+ SHL
+ SHR
+ SAR
SHA3 = 0x20
)
@@ -234,6 +237,9 @@ var opCodeToString = map[OpCode]string{
OR: "OR",
XOR: "XOR",
BYTE: "BYTE",
+ SHL: "SHL",
+ SHR: "SHR",
+ SAR: "SAR",
ADDMOD: "ADDMOD",
MULMOD: "MULMOD",
@@ -400,6 +406,9 @@ var stringToOp = map[string]OpCode{
"OR": OR,
"XOR": XOR,
"BYTE": BYTE,
+ "SHL": SHL,
+ "SHR": SHR,
+ "SAR": SAR,
"ADDMOD": ADDMOD,
"MULMOD": MULMOD,
"SHA3": SHA3,
diff --git a/crypto/bn256/bn256_amd64.go b/crypto/bn256/bn256_amd64.go
new file mode 100644
index 000000000..35b4839c2
--- /dev/null
+++ b/crypto/bn256/bn256_amd64.go
@@ -0,0 +1,63 @@
+// Copyright 2018 The go-ethereum Authors
+// This file is part of the go-ethereum library.
+//
+// The go-ethereum library is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Lesser General Public License as published by
+// the Free Software Foundation, either version 3 of the License, or
+// (at your option) any later version.
+//
+// The go-ethereum library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+// GNU Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public License
+// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
+
+// +build amd64,!appengine,!gccgo
+
+// Package bn256 implements the Optimal Ate pairing over a 256-bit Barreto-Naehrig curve.
+package bn256
+
+import (
+ "math/big"
+
+ "github.com/ethereum/go-ethereum/crypto/bn256/cloudflare"
+)
+
+// G1 is an abstract cyclic group. The zero value is suitable for use as the
+// output of an operation, but cannot be used as an input.
+type G1 struct {
+ bn256.G1
+}
+
+// Add sets e to a+b and then returns e.
+func (e *G1) Add(a, b *G1) *G1 {
+ e.G1.Add(&a.G1, &b.G1)
+ return e
+}
+
+// ScalarMult sets e to a*k and then returns e.
+func (e *G1) ScalarMult(a *G1, k *big.Int) *G1 {
+ e.G1.ScalarMult(&a.G1, k)
+ return e
+}
+
+// G2 is an abstract cyclic group. The zero value is suitable for use as the
+// output of an operation, but cannot be used as an input.
+type G2 struct {
+ bn256.G2
+}
+
+// PairingCheck calculates the Optimal Ate pairing for a set of points.
+func PairingCheck(a []*G1, b []*G2) bool {
+ as := make([]*bn256.G1, len(a))
+ for i, p := range a {
+ as[i] = &p.G1
+ }
+ bs := make([]*bn256.G2, len(b))
+ for i, p := range b {
+ bs[i] = &p.G2
+ }
+ return bn256.PairingCheck(as, bs)
+}
diff --git a/crypto/bn256/bn256_other.go b/crypto/bn256/bn256_other.go
new file mode 100644
index 000000000..81977a0a8
--- /dev/null
+++ b/crypto/bn256/bn256_other.go
@@ -0,0 +1,63 @@
+// Copyright 2018 The go-ethereum Authors
+// This file is part of the go-ethereum library.
+//
+// The go-ethereum library is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Lesser General Public License as published by
+// the Free Software Foundation, either version 3 of the License, or
+// (at your option) any later version.
+//
+// The go-ethereum library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+// GNU Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public License
+// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
+
+// +build !amd64 appengine gccgo
+
+// Package bn256 implements the Optimal Ate pairing over a 256-bit Barreto-Naehrig curve.
+package bn256
+
+import (
+ "math/big"
+
+ "github.com/ethereum/go-ethereum/crypto/bn256/google"
+)
+
+// G1 is an abstract cyclic group. The zero value is suitable for use as the
+// output of an operation, but cannot be used as an input.
+type G1 struct {
+ bn256.G1
+}
+
+// Add sets e to a+b and then returns e.
+func (e *G1) Add(a, b *G1) *G1 {
+ e.G1.Add(&a.G1, &b.G1)
+ return e
+}
+
+// ScalarMult sets e to a*k and then returns e.
+func (e *G1) ScalarMult(a *G1, k *big.Int) *G1 {
+ e.G1.ScalarMult(&a.G1, k)
+ return e
+}
+
+// G2 is an abstract cyclic group. The zero value is suitable for use as the
+// output of an operation, but cannot be used as an input.
+type G2 struct {
+ bn256.G2
+}
+
+// PairingCheck calculates the Optimal Ate pairing for a set of points.
+func PairingCheck(a []*G1, b []*G2) bool {
+ as := make([]*bn256.G1, len(a))
+ for i, p := range a {
+ as[i] = &p.G1
+ }
+ bs := make([]*bn256.G2, len(b))
+ for i, p := range b {
+ bs[i] = &p.G2
+ }
+ return bn256.PairingCheck(as, bs)
+}
diff --git a/crypto/bn256/cloudflare/bn256.go b/crypto/bn256/cloudflare/bn256.go
new file mode 100644
index 000000000..c6ea2d07e
--- /dev/null
+++ b/crypto/bn256/cloudflare/bn256.go
@@ -0,0 +1,481 @@
+// Package bn256 implements a particular bilinear group at the 128-bit security
+// level.
+//
+// Bilinear groups are the basis of many of the new cryptographic protocols that
+// have been proposed over the past decade. They consist of a triplet of groups
+// (G₁, G₂ and GT) such that there exists a function e(g₁ˣ,g₂ʸ)=gTˣʸ (where gₓ
+// is a generator of the respective group). That function is called a pairing
+// function.
+//
+// This package specifically implements the Optimal Ate pairing over a 256-bit
+// Barreto-Naehrig curve as described in
+// http://cryptojedi.org/papers/dclxvi-20100714.pdf. Its output is compatible
+// with the implementation described in that paper.
+package bn256
+
+import (
+ "crypto/rand"
+ "errors"
+ "io"
+ "math/big"
+)
+
+func randomK(r io.Reader) (k *big.Int, err error) {
+ for {
+ k, err = rand.Int(r, Order)
+ if k.Sign() > 0 || err != nil {
+ return
+ }
+ }
+}
+
+// G1 is an abstract cyclic group. The zero value is suitable for use as the
+// output of an operation, but cannot be used as an input.
+type G1 struct {
+ p *curvePoint
+}
+
+// RandomG1 returns x and g₁ˣ where x is a random, non-zero number read from r.
+func RandomG1(r io.Reader) (*big.Int, *G1, error) {
+ k, err := randomK(r)
+ if err != nil {
+ return nil, nil, err
+ }
+
+ return k, new(G1).ScalarBaseMult(k), nil
+}
+
+func (g *G1) String() string {
+ return "bn256.G1" + g.p.String()
+}
+
+// ScalarBaseMult sets e to g*k where g is the generator of the group and then
+// returns e.
+func (e *G1) ScalarBaseMult(k *big.Int) *G1 {
+ if e.p == nil {
+ e.p = &curvePoint{}
+ }
+ e.p.Mul(curveGen, k)
+ return e
+}
+
+// ScalarMult sets e to a*k and then returns e.
+func (e *G1) ScalarMult(a *G1, k *big.Int) *G1 {
+ if e.p == nil {
+ e.p = &curvePoint{}
+ }
+ e.p.Mul(a.p, k)
+ return e
+}
+
+// Add sets e to a+b and then returns e.
+func (e *G1) Add(a, b *G1) *G1 {
+ if e.p == nil {
+ e.p = &curvePoint{}
+ }
+ e.p.Add(a.p, b.p)
+ return e
+}
+
+// Neg sets e to -a and then returns e.
+func (e *G1) Neg(a *G1) *G1 {
+ if e.p == nil {
+ e.p = &curvePoint{}
+ }
+ e.p.Neg(a.p)
+ return e
+}
+
+// Set sets e to a and then returns e.
+func (e *G1) Set(a *G1) *G1 {
+ if e.p == nil {
+ e.p = &curvePoint{}
+ }
+ e.p.Set(a.p)
+ return e
+}
+
+// Marshal converts e to a byte slice.
+func (e *G1) Marshal() []byte {
+ // Each value is a 256-bit number.
+ const numBytes = 256 / 8
+
+ e.p.MakeAffine()
+ ret := make([]byte, numBytes*2)
+ if e.p.IsInfinity() {
+ return ret
+ }
+ temp := &gfP{}
+
+ montDecode(temp, &e.p.x)
+ temp.Marshal(ret)
+ montDecode(temp, &e.p.y)
+ temp.Marshal(ret[numBytes:])
+
+ return ret
+}
+
+// Unmarshal sets e to the result of converting the output of Marshal back into
+// a group element and then returns e.
+func (e *G1) Unmarshal(m []byte) ([]byte, error) {
+ // Each value is a 256-bit number.
+ const numBytes = 256 / 8
+ if len(m) < 2*numBytes {
+ return nil, errors.New("bn256: not enough data")
+ }
+ // Unmarshal the points and check their caps
+ if e.p == nil {
+ e.p = &curvePoint{}
+ } else {
+ e.p.x, e.p.y = gfP{0}, gfP{0}
+ }
+ var err error
+ if err = e.p.x.Unmarshal(m); err != nil {
+ return nil, err
+ }
+ if err = e.p.y.Unmarshal(m[numBytes:]); err != nil {
+ return nil, err
+ }
+ // Encode into Montgomery form and ensure it's on the curve
+ montEncode(&e.p.x, &e.p.x)
+ montEncode(&e.p.y, &e.p.y)
+
+ zero := gfP{0}
+ if e.p.x == zero && e.p.y == zero {
+ // This is the point at infinity.
+ e.p.y = *newGFp(1)
+ e.p.z = gfP{0}
+ e.p.t = gfP{0}
+ } else {
+ e.p.z = *newGFp(1)
+ e.p.t = *newGFp(1)
+
+ if !e.p.IsOnCurve() {
+ return nil, errors.New("bn256: malformed point")
+ }
+ }
+ return m[2*numBytes:], nil
+}
+
+// G2 is an abstract cyclic group. The zero value is suitable for use as the
+// output of an operation, but cannot be used as an input.
+type G2 struct {
+ p *twistPoint
+}
+
+// RandomG2 returns x and g₂ˣ where x is a random, non-zero number read from r.
+func RandomG2(r io.Reader) (*big.Int, *G2, error) {
+ k, err := randomK(r)
+ if err != nil {
+ return nil, nil, err
+ }
+
+ return k, new(G2).ScalarBaseMult(k), nil
+}
+
+func (e *G2) String() string {
+ return "bn256.G2" + e.p.String()
+}
+
+// ScalarBaseMult sets e to g*k where g is the generator of the group and then
+// returns out.
+func (e *G2) ScalarBaseMult(k *big.Int) *G2 {
+ if e.p == nil {
+ e.p = &twistPoint{}
+ }
+ e.p.Mul(twistGen, k)
+ return e
+}
+
+// ScalarMult sets e to a*k and then returns e.
+func (e *G2) ScalarMult(a *G2, k *big.Int) *G2 {
+ if e.p == nil {
+ e.p = &twistPoint{}
+ }
+ e.p.Mul(a.p, k)
+ return e
+}
+
+// Add sets e to a+b and then returns e.
+func (e *G2) Add(a, b *G2) *G2 {
+ if e.p == nil {
+ e.p = &twistPoint{}
+ }
+ e.p.Add(a.p, b.p)
+ return e
+}
+
+// Neg sets e to -a and then returns e.
+func (e *G2) Neg(a *G2) *G2 {
+ if e.p == nil {
+ e.p = &twistPoint{}
+ }
+ e.p.Neg(a.p)
+ return e
+}
+
+// Set sets e to a and then returns e.
+func (e *G2) Set(a *G2) *G2 {
+ if e.p == nil {
+ e.p = &twistPoint{}
+ }
+ e.p.Set(a.p)
+ return e
+}
+
+// Marshal converts e into a byte slice.
+func (e *G2) Marshal() []byte {
+ // Each value is a 256-bit number.
+ const numBytes = 256 / 8
+
+ if e.p == nil {
+ e.p = &twistPoint{}
+ }
+
+ e.p.MakeAffine()
+ ret := make([]byte, numBytes*4)
+ if e.p.IsInfinity() {
+ return ret
+ }
+ temp := &gfP{}
+
+ montDecode(temp, &e.p.x.x)
+ temp.Marshal(ret)
+ montDecode(temp, &e.p.x.y)
+ temp.Marshal(ret[numBytes:])
+ montDecode(temp, &e.p.y.x)
+ temp.Marshal(ret[2*numBytes:])
+ montDecode(temp, &e.p.y.y)
+ temp.Marshal(ret[3*numBytes:])
+
+ return ret
+}
+
+// Unmarshal sets e to the result of converting the output of Marshal back into
+// a group element and then returns e.
+func (e *G2) Unmarshal(m []byte) ([]byte, error) {
+ // Each value is a 256-bit number.
+ const numBytes = 256 / 8
+ if len(m) < 4*numBytes {
+ return nil, errors.New("bn256: not enough data")
+ }
+ // Unmarshal the points and check their caps
+ if e.p == nil {
+ e.p = &twistPoint{}
+ }
+ var err error
+ if err = e.p.x.x.Unmarshal(m); err != nil {
+ return nil, err
+ }
+ if err = e.p.x.y.Unmarshal(m[numBytes:]); err != nil {
+ return nil, err
+ }
+ if err = e.p.y.x.Unmarshal(m[2*numBytes:]); err != nil {
+ return nil, err
+ }
+ if err = e.p.y.y.Unmarshal(m[3*numBytes:]); err != nil {
+ return nil, err
+ }
+ // Encode into Montgomery form and ensure it's on the curve
+ montEncode(&e.p.x.x, &e.p.x.x)
+ montEncode(&e.p.x.y, &e.p.x.y)
+ montEncode(&e.p.y.x, &e.p.y.x)
+ montEncode(&e.p.y.y, &e.p.y.y)
+
+ if e.p.x.IsZero() && e.p.y.IsZero() {
+ // This is the point at infinity.
+ e.p.y.SetOne()
+ e.p.z.SetZero()
+ e.p.t.SetZero()
+ } else {
+ e.p.z.SetOne()
+ e.p.t.SetOne()
+
+ if !e.p.IsOnCurve() {
+ return nil, errors.New("bn256: malformed point")
+ }
+ }
+ return m[4*numBytes:], nil
+}
+
+// GT is an abstract cyclic group. The zero value is suitable for use as the
+// output of an operation, but cannot be used as an input.
+type GT struct {
+ p *gfP12
+}
+
+// Pair calculates an Optimal Ate pairing.
+func Pair(g1 *G1, g2 *G2) *GT {
+ return &GT{optimalAte(g2.p, g1.p)}
+}
+
+// PairingCheck calculates the Optimal Ate pairing for a set of points.
+func PairingCheck(a []*G1, b []*G2) bool {
+ acc := new(gfP12)
+ acc.SetOne()
+
+ for i := 0; i < len(a); i++ {
+ if a[i].p.IsInfinity() || b[i].p.IsInfinity() {
+ continue
+ }
+ acc.Mul(acc, miller(b[i].p, a[i].p))
+ }
+ return finalExponentiation(acc).IsOne()
+}
+
+// Miller applies Miller's algorithm, which is a bilinear function from the
+// source groups to F_p^12. Miller(g1, g2).Finalize() is equivalent to Pair(g1,
+// g2).
+func Miller(g1 *G1, g2 *G2) *GT {
+ return &GT{miller(g2.p, g1.p)}
+}
+
+func (g *GT) String() string {
+ return "bn256.GT" + g.p.String()
+}
+
+// ScalarMult sets e to a*k and then returns e.
+func (e *GT) ScalarMult(a *GT, k *big.Int) *GT {
+ if e.p == nil {
+ e.p = &gfP12{}
+ }
+ e.p.Exp(a.p, k)
+ return e
+}
+
+// Add sets e to a+b and then returns e.
+func (e *GT) Add(a, b *GT) *GT {
+ if e.p == nil {
+ e.p = &gfP12{}
+ }
+ e.p.Mul(a.p, b.p)
+ return e
+}
+
+// Neg sets e to -a and then returns e.
+func (e *GT) Neg(a *GT) *GT {
+ if e.p == nil {
+ e.p = &gfP12{}
+ }
+ e.p.Conjugate(a.p)
+ return e
+}
+
+// Set sets e to a and then returns e.
+func (e *GT) Set(a *GT) *GT {
+ if e.p == nil {
+ e.p = &gfP12{}
+ }
+ e.p.Set(a.p)
+ return e
+}
+
+// Finalize is a linear function from F_p^12 to GT.
+func (e *GT) Finalize() *GT {
+ ret := finalExponentiation(e.p)
+ e.p.Set(ret)
+ return e
+}
+
+// Marshal converts e into a byte slice.
+func (e *GT) Marshal() []byte {
+ // Each value is a 256-bit number.
+ const numBytes = 256 / 8
+
+ ret := make([]byte, numBytes*12)
+ temp := &gfP{}
+
+ montDecode(temp, &e.p.x.x.x)
+ temp.Marshal(ret)
+ montDecode(temp, &e.p.x.x.y)
+ temp.Marshal(ret[numBytes:])
+ montDecode(temp, &e.p.x.y.x)
+ temp.Marshal(ret[2*numBytes:])
+ montDecode(temp, &e.p.x.y.y)
+ temp.Marshal(ret[3*numBytes:])
+ montDecode(temp, &e.p.x.z.x)
+ temp.Marshal(ret[4*numBytes:])
+ montDecode(temp, &e.p.x.z.y)
+ temp.Marshal(ret[5*numBytes:])
+ montDecode(temp, &e.p.y.x.x)
+ temp.Marshal(ret[6*numBytes:])
+ montDecode(temp, &e.p.y.x.y)
+ temp.Marshal(ret[7*numBytes:])
+ montDecode(temp, &e.p.y.y.x)
+ temp.Marshal(ret[8*numBytes:])
+ montDecode(temp, &e.p.y.y.y)
+ temp.Marshal(ret[9*numBytes:])
+ montDecode(temp, &e.p.y.z.x)
+ temp.Marshal(ret[10*numBytes:])
+ montDecode(temp, &e.p.y.z.y)
+ temp.Marshal(ret[11*numBytes:])
+
+ return ret
+}
+
+// Unmarshal sets e to the result of converting the output of Marshal back into
+// a group element and then returns e.
+func (e *GT) Unmarshal(m []byte) ([]byte, error) {
+ // Each value is a 256-bit number.
+ const numBytes = 256 / 8
+
+ if len(m) < 12*numBytes {
+ return nil, errors.New("bn256: not enough data")
+ }
+
+ if e.p == nil {
+ e.p = &gfP12{}
+ }
+
+ var err error
+ if err = e.p.x.x.x.Unmarshal(m); err != nil {
+ return nil, err
+ }
+ if err = e.p.x.x.y.Unmarshal(m[numBytes:]); err != nil {
+ return nil, err
+ }
+ if err = e.p.x.y.x.Unmarshal(m[2*numBytes:]); err != nil {
+ return nil, err
+ }
+ if err = e.p.x.y.y.Unmarshal(m[3*numBytes:]); err != nil {
+ return nil, err
+ }
+ if err = e.p.x.z.x.Unmarshal(m[4*numBytes:]); err != nil {
+ return nil, err
+ }
+ if err = e.p.x.z.y.Unmarshal(m[5*numBytes:]); err != nil {
+ return nil, err
+ }
+ if err = e.p.y.x.x.Unmarshal(m[6*numBytes:]); err != nil {
+ return nil, err
+ }
+ if err = e.p.y.x.y.Unmarshal(m[7*numBytes:]); err != nil {
+ return nil, err
+ }
+ if err = e.p.y.y.x.Unmarshal(m[8*numBytes:]); err != nil {
+ return nil, err
+ }
+ if err = e.p.y.y.y.Unmarshal(m[9*numBytes:]); err != nil {
+ return nil, err
+ }
+ if err = e.p.y.z.x.Unmarshal(m[10*numBytes:]); err != nil {
+ return nil, err
+ }
+ if err = e.p.y.z.y.Unmarshal(m[11*numBytes:]); err != nil {
+ return nil, err
+ }
+ montEncode(&e.p.x.x.x, &e.p.x.x.x)
+ montEncode(&e.p.x.x.y, &e.p.x.x.y)
+ montEncode(&e.p.x.y.x, &e.p.x.y.x)
+ montEncode(&e.p.x.y.y, &e.p.x.y.y)
+ montEncode(&e.p.x.z.x, &e.p.x.z.x)
+ montEncode(&e.p.x.z.y, &e.p.x.z.y)
+ montEncode(&e.p.y.x.x, &e.p.y.x.x)
+ montEncode(&e.p.y.x.y, &e.p.y.x.y)
+ montEncode(&e.p.y.y.x, &e.p.y.y.x)
+ montEncode(&e.p.y.y.y, &e.p.y.y.y)
+ montEncode(&e.p.y.z.x, &e.p.y.z.x)
+ montEncode(&e.p.y.z.y, &e.p.y.z.y)
+
+ return m[12*numBytes:], nil
+}
diff --git a/crypto/bn256/cloudflare/bn256_test.go b/crypto/bn256/cloudflare/bn256_test.go
new file mode 100644
index 000000000..369a3edaa
--- /dev/null
+++ b/crypto/bn256/cloudflare/bn256_test.go
@@ -0,0 +1,118 @@
+// +build amd64,!appengine,!gccgo
+
+package bn256
+
+import (
+ "bytes"
+ "crypto/rand"
+ "testing"
+)
+
+func TestG1Marshal(t *testing.T) {
+ _, Ga, err := RandomG1(rand.Reader)
+ if err != nil {
+ t.Fatal(err)
+ }
+ ma := Ga.Marshal()
+
+ Gb := new(G1)
+ _, err = Gb.Unmarshal(ma)
+ if err != nil {
+ t.Fatal(err)
+ }
+ mb := Gb.Marshal()
+
+ if !bytes.Equal(ma, mb) {
+ t.Fatal("bytes are different")
+ }
+}
+
+func TestG2Marshal(t *testing.T) {
+ _, Ga, err := RandomG2(rand.Reader)
+ if err != nil {
+ t.Fatal(err)
+ }
+ ma := Ga.Marshal()
+
+ Gb := new(G2)
+ _, err = Gb.Unmarshal(ma)
+ if err != nil {
+ t.Fatal(err)
+ }
+ mb := Gb.Marshal()
+
+ if !bytes.Equal(ma, mb) {
+ t.Fatal("bytes are different")
+ }
+}
+
+func TestBilinearity(t *testing.T) {
+ for i := 0; i < 2; i++ {
+ a, p1, _ := RandomG1(rand.Reader)
+ b, p2, _ := RandomG2(rand.Reader)
+ e1 := Pair(p1, p2)
+
+ e2 := Pair(&G1{curveGen}, &G2{twistGen})
+ e2.ScalarMult(e2, a)
+ e2.ScalarMult(e2, b)
+
+ if *e1.p != *e2.p {
+ t.Fatalf("bad pairing result: %s", e1)
+ }
+ }
+}
+
+func TestTripartiteDiffieHellman(t *testing.T) {
+ a, _ := rand.Int(rand.Reader, Order)
+ b, _ := rand.Int(rand.Reader, Order)
+ c, _ := rand.Int(rand.Reader, Order)
+
+ pa, pb, pc := new(G1), new(G1), new(G1)
+ qa, qb, qc := new(G2), new(G2), new(G2)
+
+ pa.Unmarshal(new(G1).ScalarBaseMult(a).Marshal())
+ qa.Unmarshal(new(G2).ScalarBaseMult(a).Marshal())
+ pb.Unmarshal(new(G1).ScalarBaseMult(b).Marshal())
+ qb.Unmarshal(new(G2).ScalarBaseMult(b).Marshal())
+ pc.Unmarshal(new(G1).ScalarBaseMult(c).Marshal())
+ qc.Unmarshal(new(G2).ScalarBaseMult(c).Marshal())
+
+ k1 := Pair(pb, qc)
+ k1.ScalarMult(k1, a)
+ k1Bytes := k1.Marshal()
+
+ k2 := Pair(pc, qa)
+ k2.ScalarMult(k2, b)
+ k2Bytes := k2.Marshal()
+
+ k3 := Pair(pa, qb)
+ k3.ScalarMult(k3, c)
+ k3Bytes := k3.Marshal()
+
+ if !bytes.Equal(k1Bytes, k2Bytes) || !bytes.Equal(k2Bytes, k3Bytes) {
+ t.Errorf("keys didn't agree")
+ }
+}
+
+func BenchmarkG1(b *testing.B) {
+ x, _ := rand.Int(rand.Reader, Order)
+ b.ResetTimer()
+
+ for i := 0; i < b.N; i++ {
+ new(G1).ScalarBaseMult(x)
+ }
+}
+
+func BenchmarkG2(b *testing.B) {
+ x, _ := rand.Int(rand.Reader, Order)
+ b.ResetTimer()
+
+ for i := 0; i < b.N; i++ {
+ new(G2).ScalarBaseMult(x)
+ }
+}
+func BenchmarkPairing(b *testing.B) {
+ for i := 0; i < b.N; i++ {
+ Pair(&G1{curveGen}, &G2{twistGen})
+ }
+}
diff --git a/crypto/bn256/cloudflare/constants.go b/crypto/bn256/cloudflare/constants.go
new file mode 100644
index 000000000..5122aae64
--- /dev/null
+++ b/crypto/bn256/cloudflare/constants.go
@@ -0,0 +1,59 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package bn256
+
+import (
+ "math/big"
+)
+
+func bigFromBase10(s string) *big.Int {
+ n, _ := new(big.Int).SetString(s, 10)
+ return n
+}
+
+// u is the BN parameter that determines the prime: 1868033³.
+var u = bigFromBase10("4965661367192848881")
+
+// Order is the number of elements in both G₁ and G₂: 36u⁴+36u³+18u²+6u+1.
+var Order = bigFromBase10("21888242871839275222246405745257275088548364400416034343698204186575808495617")
+
+// P is a prime over which we form a basic field: 36u⁴+36u³+24u²+6u+1.
+var P = bigFromBase10("21888242871839275222246405745257275088696311157297823662689037894645226208583")
+
+// p2 is p, represented as little-endian 64-bit words.
+var p2 = [4]uint64{0x3c208c16d87cfd47, 0x97816a916871ca8d, 0xb85045b68181585d, 0x30644e72e131a029}
+
+// np is the negative inverse of p, mod 2^256.
+var np = [4]uint64{0x87d20782e4866389, 0x9ede7d651eca6ac9, 0xd8afcbd01833da80, 0xf57a22b791888c6b}
+
+// rN1 is R^-1 where R = 2^256 mod p.
+var rN1 = &gfP{0xed84884a014afa37, 0xeb2022850278edf8, 0xcf63e9cfb74492d9, 0x2e67157159e5c639}
+
+// r2 is R^2 where R = 2^256 mod p.
+var r2 = &gfP{0xf32cfc5b538afa89, 0xb5e71911d44501fb, 0x47ab1eff0a417ff6, 0x06d89f71cab8351f}
+
+// r3 is R^3 where R = 2^256 mod p.
+var r3 = &gfP{0xb1cd6dafda1530df, 0x62f210e6a7283db6, 0xef7f0b0c0ada0afb, 0x20fd6e902d592544}
+
+// xiToPMinus1Over6 is ξ^((p-1)/6) where ξ = i+9.
+var xiToPMinus1Over6 = &gfP2{gfP{0xa222ae234c492d72, 0xd00f02a4565de15b, 0xdc2ff3a253dfc926, 0x10a75716b3899551}, gfP{0xaf9ba69633144907, 0xca6b1d7387afb78a, 0x11bded5ef08a2087, 0x02f34d751a1f3a7c}}
+
+// xiToPMinus1Over3 is ξ^((p-1)/3) where ξ = i+9.
+var xiToPMinus1Over3 = &gfP2{gfP{0x6e849f1ea0aa4757, 0xaa1c7b6d89f89141, 0xb6e713cdfae0ca3a, 0x26694fbb4e82ebc3}, gfP{0xb5773b104563ab30, 0x347f91c8a9aa6454, 0x7a007127242e0991, 0x1956bcd8118214ec}}
+
+// xiToPMinus1Over2 is ξ^((p-1)/2) where ξ = i+9.
+var xiToPMinus1Over2 = &gfP2{gfP{0xa1d77ce45ffe77c7, 0x07affd117826d1db, 0x6d16bd27bb7edc6b, 0x2c87200285defecc}, gfP{0xe4bbdd0c2936b629, 0xbb30f162e133bacb, 0x31a9d1b6f9645366, 0x253570bea500f8dd}}
+
+// xiToPSquaredMinus1Over3 is ξ^((p²-1)/3) where ξ = i+9.
+var xiToPSquaredMinus1Over3 = &gfP{0x3350c88e13e80b9c, 0x7dce557cdb5e56b9, 0x6001b4b8b615564a, 0x2682e617020217e0}
+
+// xiTo2PSquaredMinus2Over3 is ξ^((2p²-2)/3) where ξ = i+9 (a cubic root of unity, mod p).
+var xiTo2PSquaredMinus2Over3 = &gfP{0x71930c11d782e155, 0xa6bb947cffbe3323, 0xaa303344d4741444, 0x2c3b3f0d26594943}
+
+// xiToPSquaredMinus1Over6 is ξ^((1p²-1)/6) where ξ = i+9 (a cubic root of -1, mod p).
+var xiToPSquaredMinus1Over6 = &gfP{0xca8d800500fa1bf2, 0xf0c5d61468b39769, 0x0e201271ad0d4418, 0x04290f65bad856e6}
+
+// xiTo2PMinus2Over3 is ξ^((2p-2)/3) where ξ = i+9.
+var xiTo2PMinus2Over3 = &gfP2{gfP{0x5dddfd154bd8c949, 0x62cb29a5a4445b60, 0x37bc870a0c7dd2b9, 0x24830a9d3171f0fd}, gfP{0x7361d77f843abe92, 0xa5bb2bd3273411fb, 0x9c941f314b3e2399, 0x15df9cddbb9fd3ec}}
diff --git a/crypto/bn256/cloudflare/curve.go b/crypto/bn256/cloudflare/curve.go
new file mode 100644
index 000000000..b6aecc0a6
--- /dev/null
+++ b/crypto/bn256/cloudflare/curve.go
@@ -0,0 +1,229 @@
+package bn256
+
+import (
+ "math/big"
+)
+
+// curvePoint implements the elliptic curve y²=x³+3. Points are kept in Jacobian
+// form and t=z² when valid. G₁ is the set of points of this curve on GF(p).
+type curvePoint struct {
+ x, y, z, t gfP
+}
+
+var curveB = newGFp(3)
+
+// curveGen is the generator of G₁.
+var curveGen = &curvePoint{
+ x: *newGFp(1),
+ y: *newGFp(2),
+ z: *newGFp(1),
+ t: *newGFp(1),
+}
+
+func (c *curvePoint) String() string {
+ c.MakeAffine()
+ x, y := &gfP{}, &gfP{}
+ montDecode(x, &c.x)
+ montDecode(y, &c.y)
+ return "(" + x.String() + ", " + y.String() + ")"
+}
+
+func (c *curvePoint) Set(a *curvePoint) {
+ c.x.Set(&a.x)
+ c.y.Set(&a.y)
+ c.z.Set(&a.z)
+ c.t.Set(&a.t)
+}
+
+// IsOnCurve returns true iff c is on the curve.
+func (c *curvePoint) IsOnCurve() bool {
+ c.MakeAffine()
+ if c.IsInfinity() {
+ return true
+ }
+
+ y2, x3 := &gfP{}, &gfP{}
+ gfpMul(y2, &c.y, &c.y)
+ gfpMul(x3, &c.x, &c.x)
+ gfpMul(x3, x3, &c.x)
+ gfpAdd(x3, x3, curveB)
+
+ return *y2 == *x3
+}
+
+func (c *curvePoint) SetInfinity() {
+ c.x = gfP{0}
+ c.y = *newGFp(1)
+ c.z = gfP{0}
+ c.t = gfP{0}
+}
+
+func (c *curvePoint) IsInfinity() bool {
+ return c.z == gfP{0}
+}
+
+func (c *curvePoint) Add(a, b *curvePoint) {
+ if a.IsInfinity() {
+ c.Set(b)
+ return
+ }
+ if b.IsInfinity() {
+ c.Set(a)
+ return
+ }
+
+ // See http://hyperelliptic.org/EFD/g1p/auto-code/shortw/jacobian-0/addition/add-2007-bl.op3
+
+ // Normalize the points by replacing a = [x1:y1:z1] and b = [x2:y2:z2]
+ // by [u1:s1:z1·z2] and [u2:s2:z1·z2]
+ // where u1 = x1·z2², s1 = y1·z2³ and u1 = x2·z1², s2 = y2·z1³
+ z12, z22 := &gfP{}, &gfP{}
+ gfpMul(z12, &a.z, &a.z)
+ gfpMul(z22, &b.z, &b.z)
+
+ u1, u2 := &gfP{}, &gfP{}
+ gfpMul(u1, &a.x, z22)
+ gfpMul(u2, &b.x, z12)
+
+ t, s1 := &gfP{}, &gfP{}
+ gfpMul(t, &b.z, z22)
+ gfpMul(s1, &a.y, t)
+
+ s2 := &gfP{}
+ gfpMul(t, &a.z, z12)
+ gfpMul(s2, &b.y, t)
+
+ // Compute x = (2h)²(s²-u1-u2)
+ // where s = (s2-s1)/(u2-u1) is the slope of the line through
+ // (u1,s1) and (u2,s2). The extra factor 2h = 2(u2-u1) comes from the value of z below.
+ // This is also:
+ // 4(s2-s1)² - 4h²(u1+u2) = 4(s2-s1)² - 4h³ - 4h²(2u1)
+ // = r² - j - 2v
+ // with the notations below.
+ h := &gfP{}
+ gfpSub(h, u2, u1)
+ xEqual := *h == gfP{0}
+
+ gfpAdd(t, h, h)
+ // i = 4h²
+ i := &gfP{}
+ gfpMul(i, t, t)
+ // j = 4h³
+ j := &gfP{}
+ gfpMul(j, h, i)
+
+ gfpSub(t, s2, s1)
+ yEqual := *t == gfP{0}
+ if xEqual && yEqual {
+ c.Double(a)
+ return
+ }
+ r := &gfP{}
+ gfpAdd(r, t, t)
+
+ v := &gfP{}
+ gfpMul(v, u1, i)
+
+ // t4 = 4(s2-s1)²
+ t4, t6 := &gfP{}, &gfP{}
+ gfpMul(t4, r, r)
+ gfpAdd(t, v, v)
+ gfpSub(t6, t4, j)
+
+ gfpSub(&c.x, t6, t)
+
+ // Set y = -(2h)³(s1 + s*(x/4h²-u1))
+ // This is also
+ // y = - 2·s1·j - (s2-s1)(2x - 2i·u1) = r(v-x) - 2·s1·j
+ gfpSub(t, v, &c.x) // t7
+ gfpMul(t4, s1, j) // t8
+ gfpAdd(t6, t4, t4) // t9
+ gfpMul(t4, r, t) // t10
+ gfpSub(&c.y, t4, t6)
+
+ // Set z = 2(u2-u1)·z1·z2 = 2h·z1·z2
+ gfpAdd(t, &a.z, &b.z) // t11
+ gfpMul(t4, t, t) // t12
+ gfpSub(t, t4, z12) // t13
+ gfpSub(t4, t, z22) // t14
+ gfpMul(&c.z, t4, h)
+}
+
+func (c *curvePoint) Double(a *curvePoint) {
+ // See http://hyperelliptic.org/EFD/g1p/auto-code/shortw/jacobian-0/doubling/dbl-2009-l.op3
+ A, B, C := &gfP{}, &gfP{}, &gfP{}
+ gfpMul(A, &a.x, &a.x)
+ gfpMul(B, &a.y, &a.y)
+ gfpMul(C, B, B)
+
+ t, t2 := &gfP{}, &gfP{}
+ gfpAdd(t, &a.x, B)
+ gfpMul(t2, t, t)
+ gfpSub(t, t2, A)
+ gfpSub(t2, t, C)
+
+ d, e, f := &gfP{}, &gfP{}, &gfP{}
+ gfpAdd(d, t2, t2)
+ gfpAdd(t, A, A)
+ gfpAdd(e, t, A)
+ gfpMul(f, e, e)
+
+ gfpAdd(t, d, d)
+ gfpSub(&c.x, f, t)
+
+ gfpAdd(t, C, C)
+ gfpAdd(t2, t, t)
+ gfpAdd(t, t2, t2)
+ gfpSub(&c.y, d, &c.x)
+ gfpMul(t2, e, &c.y)
+ gfpSub(&c.y, t2, t)
+
+ gfpMul(t, &a.y, &a.z)
+ gfpAdd(&c.z, t, t)
+}
+
+func (c *curvePoint) Mul(a *curvePoint, scalar *big.Int) {
+ sum, t := &curvePoint{}, &curvePoint{}
+ sum.SetInfinity()
+
+ for i := scalar.BitLen(); i >= 0; i-- {
+ t.Double(sum)
+ if scalar.Bit(i) != 0 {
+ sum.Add(t, a)
+ } else {
+ sum.Set(t)
+ }
+ }
+ c.Set(sum)
+}
+
+func (c *curvePoint) MakeAffine() {
+ if c.z == *newGFp(1) {
+ return
+ } else if c.z == *newGFp(0) {
+ c.x = gfP{0}
+ c.y = *newGFp(1)
+ c.t = gfP{0}
+ return
+ }
+
+ zInv := &gfP{}
+ zInv.Invert(&c.z)
+
+ t, zInv2 := &gfP{}, &gfP{}
+ gfpMul(t, &c.y, zInv)
+ gfpMul(zInv2, zInv, zInv)
+
+ gfpMul(&c.x, &c.x, zInv2)
+ gfpMul(&c.y, t, zInv2)
+
+ c.z = *newGFp(1)
+ c.t = *newGFp(1)
+}
+
+func (c *curvePoint) Neg(a *curvePoint) {
+ c.x.Set(&a.x)
+ gfpNeg(&c.y, &a.y)
+ c.z.Set(&a.z)
+ c.t = gfP{0}
+}
diff --git a/crypto/bn256/cloudflare/example_test.go b/crypto/bn256/cloudflare/example_test.go
new file mode 100644
index 000000000..2ee545c67
--- /dev/null
+++ b/crypto/bn256/cloudflare/example_test.go
@@ -0,0 +1,45 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build amd64,!appengine,!gccgo
+
+package bn256
+
+import (
+ "crypto/rand"
+)
+
+func ExamplePair() {
+ // This implements the tripartite Diffie-Hellman algorithm from "A One
+ // Round Protocol for Tripartite Diffie-Hellman", A. Joux.
+ // http://www.springerlink.com/content/cddc57yyva0hburb/fulltext.pdf
+
+ // Each of three parties, a, b and c, generate a private value.
+ a, _ := rand.Int(rand.Reader, Order)
+ b, _ := rand.Int(rand.Reader, Order)
+ c, _ := rand.Int(rand.Reader, Order)
+
+ // Then each party calculates g₁ and g₂ times their private value.
+ pa := new(G1).ScalarBaseMult(a)
+ qa := new(G2).ScalarBaseMult(a)
+
+ pb := new(G1).ScalarBaseMult(b)
+ qb := new(G2).ScalarBaseMult(b)
+
+ pc := new(G1).ScalarBaseMult(c)
+ qc := new(G2).ScalarBaseMult(c)
+
+ // Now each party exchanges its public values with the other two and
+ // all parties can calculate the shared key.
+ k1 := Pair(pb, qc)
+ k1.ScalarMult(k1, a)
+
+ k2 := Pair(pc, qa)
+ k2.ScalarMult(k2, b)
+
+ k3 := Pair(pa, qb)
+ k3.ScalarMult(k3, c)
+
+ // k1, k2 and k3 will all be equal.
+}
diff --git a/crypto/bn256/cloudflare/gfp.go b/crypto/bn256/cloudflare/gfp.go
new file mode 100644
index 000000000..e8e84e7b3
--- /dev/null
+++ b/crypto/bn256/cloudflare/gfp.go
@@ -0,0 +1,81 @@
+package bn256
+
+import (
+ "errors"
+ "fmt"
+)
+
+type gfP [4]uint64
+
+func newGFp(x int64) (out *gfP) {
+ if x >= 0 {
+ out = &gfP{uint64(x)}
+ } else {
+ out = &gfP{uint64(-x)}
+ gfpNeg(out, out)
+ }
+
+ montEncode(out, out)
+ return out
+}
+
+func (e *gfP) String() string {
+ return fmt.Sprintf("%16.16x%16.16x%16.16x%16.16x", e[3], e[2], e[1], e[0])
+}
+
+func (e *gfP) Set(f *gfP) {
+ e[0] = f[0]
+ e[1] = f[1]
+ e[2] = f[2]
+ e[3] = f[3]
+}
+
+func (e *gfP) Invert(f *gfP) {
+ bits := [4]uint64{0x3c208c16d87cfd45, 0x97816a916871ca8d, 0xb85045b68181585d, 0x30644e72e131a029}
+
+ sum, power := &gfP{}, &gfP{}
+ sum.Set(rN1)
+ power.Set(f)
+
+ for word := 0; word < 4; word++ {
+ for bit := uint(0); bit < 64; bit++ {
+ if (bits[word]>>bit)&1 == 1 {
+ gfpMul(sum, sum, power)
+ }
+ gfpMul(power, power, power)
+ }
+ }
+
+ gfpMul(sum, sum, r3)
+ e.Set(sum)
+}
+
+func (e *gfP) Marshal(out []byte) {
+ for w := uint(0); w < 4; w++ {
+ for b := uint(0); b < 8; b++ {
+ out[8*w+b] = byte(e[3-w] >> (56 - 8*b))
+ }
+ }
+}
+
+func (e *gfP) Unmarshal(in []byte) error {
+ // Unmarshal the bytes into little endian form
+ for w := uint(0); w < 4; w++ {
+ for b := uint(0); b < 8; b++ {
+ e[3-w] += uint64(in[8*w+b]) << (56 - 8*b)
+ }
+ }
+ // Ensure the point respects the curve modulus
+ for i := 3; i >= 0; i-- {
+ if e[i] < p2[i] {
+ return nil
+ }
+ if e[i] > p2[i] {
+ return errors.New("bn256: coordinate exceeds modulus")
+ }
+ }
+ return errors.New("bn256: coordinate equals modulus")
+}
+
+func montEncode(c, a *gfP) { gfpMul(c, a, r2) }
+func montDecode(c, a *gfP) { gfpMul(c, a, &gfP{1}) }
diff --git a/crypto/bn256/cloudflare/gfp.h b/crypto/bn256/cloudflare/gfp.h
new file mode 100644
index 000000000..66f5a4d07
--- /dev/null
+++ b/crypto/bn256/cloudflare/gfp.h
@@ -0,0 +1,32 @@
+#define storeBlock(a0,a1,a2,a3, r) \
+ MOVQ a0, 0+r \
+ MOVQ a1, 8+r \
+ MOVQ a2, 16+r \
+ MOVQ a3, 24+r
+
+#define loadBlock(r, a0,a1,a2,a3) \
+ MOVQ 0+r, a0 \
+ MOVQ 8+r, a1 \
+ MOVQ 16+r, a2 \
+ MOVQ 24+r, a3
+
+#define gfpCarry(a0,a1,a2,a3,a4, b0,b1,b2,b3,b4) \
+ \ // b = a-p
+ MOVQ a0, b0 \
+ MOVQ a1, b1 \
+ MOVQ a2, b2 \
+ MOVQ a3, b3 \
+ MOVQ a4, b4 \
+ \
+ SUBQ ·p2+0(SB), b0 \
+ SBBQ ·p2+8(SB), b1 \
+ SBBQ ·p2+16(SB), b2 \
+ SBBQ ·p2+24(SB), b3 \
+ SBBQ $0, b4 \
+ \
+ \ // if b is negative then return a
+ \ // else return b
+ CMOVQCC b0, a0 \
+ CMOVQCC b1, a1 \
+ CMOVQCC b2, a2 \
+ CMOVQCC b3, a3
diff --git a/crypto/bn256/cloudflare/gfp12.go b/crypto/bn256/cloudflare/gfp12.go
new file mode 100644
index 000000000..93fb368a7
--- /dev/null
+++ b/crypto/bn256/cloudflare/gfp12.go
@@ -0,0 +1,160 @@
+package bn256
+
+// For details of the algorithms used, see "Multiplication and Squaring on
+// Pairing-Friendly Fields, Devegili et al.
+// http://eprint.iacr.org/2006/471.pdf.
+
+import (
+ "math/big"
+)
+
+// gfP12 implements the field of size p¹² as a quadratic extension of gfP6
+// where ω²=τ.
+type gfP12 struct {
+ x, y gfP6 // value is xω + y
+}
+
+func (e *gfP12) String() string {
+ return "(" + e.x.String() + "," + e.y.String() + ")"
+}
+
+func (e *gfP12) Set(a *gfP12) *gfP12 {
+ e.x.Set(&a.x)
+ e.y.Set(&a.y)
+ return e
+}
+
+func (e *gfP12) SetZero() *gfP12 {
+ e.x.SetZero()
+ e.y.SetZero()
+ return e
+}
+
+func (e *gfP12) SetOne() *gfP12 {
+ e.x.SetZero()
+ e.y.SetOne()
+ return e
+}
+
+func (e *gfP12) IsZero() bool {
+ return e.x.IsZero() && e.y.IsZero()
+}
+
+func (e *gfP12) IsOne() bool {
+ return e.x.IsZero() && e.y.IsOne()
+}
+
+func (e *gfP12) Conjugate(a *gfP12) *gfP12 {
+ e.x.Neg(&a.x)
+ e.y.Set(&a.y)
+ return e
+}
+
+func (e *gfP12) Neg(a *gfP12) *gfP12 {
+ e.x.Neg(&a.x)
+ e.y.Neg(&a.y)
+ return e
+}
+
+// Frobenius computes (xω+y)^p = x^p ω·ξ^((p-1)/6) + y^p
+func (e *gfP12) Frobenius(a *gfP12) *gfP12 {
+ e.x.Frobenius(&a.x)
+ e.y.Frobenius(&a.y)
+ e.x.MulScalar(&e.x, xiToPMinus1Over6)
+ return e
+}
+
+// FrobeniusP2 computes (xω+y)^p² = x^p² ω·ξ^((p²-1)/6) + y^p²
+func (e *gfP12) FrobeniusP2(a *gfP12) *gfP12 {
+ e.x.FrobeniusP2(&a.x)
+ e.x.MulGFP(&e.x, xiToPSquaredMinus1Over6)
+ e.y.FrobeniusP2(&a.y)
+ return e
+}
+
+func (e *gfP12) FrobeniusP4(a *gfP12) *gfP12 {
+ e.x.FrobeniusP4(&a.x)
+ e.x.MulGFP(&e.x, xiToPSquaredMinus1Over3)
+ e.y.FrobeniusP4(&a.y)
+ return e
+}
+
+func (e *gfP12) Add(a, b *gfP12) *gfP12 {
+ e.x.Add(&a.x, &b.x)
+ e.y.Add(&a.y, &b.y)
+ return e
+}
+
+func (e *gfP12) Sub(a, b *gfP12) *gfP12 {
+ e.x.Sub(&a.x, &b.x)
+ e.y.Sub(&a.y, &b.y)
+ return e
+}
+
+func (e *gfP12) Mul(a, b *gfP12) *gfP12 {
+ tx := (&gfP6{}).Mul(&a.x, &b.y)
+ t := (&gfP6{}).Mul(&b.x, &a.y)
+ tx.Add(tx, t)
+
+ ty := (&gfP6{}).Mul(&a.y, &b.y)
+ t.Mul(&a.x, &b.x).MulTau(t)
+
+ e.x.Set(tx)
+ e.y.Add(ty, t)
+ return e
+}
+
+func (e *gfP12) MulScalar(a *gfP12, b *gfP6) *gfP12 {
+ e.x.Mul(&e.x, b)
+ e.y.Mul(&e.y, b)
+ return e
+}
+
+func (c *gfP12) Exp(a *gfP12, power *big.Int) *gfP12 {
+ sum := (&gfP12{}).SetOne()
+ t := &gfP12{}
+
+ for i := power.BitLen() - 1; i >= 0; i-- {
+ t.Square(sum)
+ if power.Bit(i) != 0 {
+ sum.Mul(t, a)
+ } else {
+ sum.Set(t)
+ }
+ }
+
+ c.Set(sum)
+ return c
+}
+
+func (e *gfP12) Square(a *gfP12) *gfP12 {
+ // Complex squaring algorithm
+ v0 := (&gfP6{}).Mul(&a.x, &a.y)
+
+ t := (&gfP6{}).MulTau(&a.x)
+ t.Add(&a.y, t)
+ ty := (&gfP6{}).Add(&a.x, &a.y)
+ ty.Mul(ty, t).Sub(ty, v0)
+ t.MulTau(v0)
+ ty.Sub(ty, t)
+
+ e.x.Add(v0, v0)
+ e.y.Set(ty)
+ return e
+}
+
+func (e *gfP12) Invert(a *gfP12) *gfP12 {
+ // See "Implementing cryptographic pairings", M. Scott, section 3.2.
+ // ftp://136.206.11.249/pub/crypto/pairings.pdf
+ t1, t2 := &gfP6{}, &gfP6{}
+
+ t1.Square(&a.x)
+ t2.Square(&a.y)
+ t1.MulTau(t1).Sub(t2, t1)
+ t2.Invert(t1)
+
+ e.x.Neg(&a.x)
+ e.y.Set(&a.y)
+ e.MulScalar(e, t2)
+ return e
+}
diff --git a/crypto/bn256/cloudflare/gfp2.go b/crypto/bn256/cloudflare/gfp2.go
new file mode 100644
index 000000000..90a89e8b4
--- /dev/null
+++ b/crypto/bn256/cloudflare/gfp2.go
@@ -0,0 +1,156 @@
+package bn256
+
+// For details of the algorithms used, see "Multiplication and Squaring on
+// Pairing-Friendly Fields, Devegili et al.
+// http://eprint.iacr.org/2006/471.pdf.
+
+// gfP2 implements a field of size p² as a quadratic extension of the base field
+// where i²=-1.
+type gfP2 struct {
+ x, y gfP // value is xi+y.
+}
+
+func gfP2Decode(in *gfP2) *gfP2 {
+ out := &gfP2{}
+ montDecode(&out.x, &in.x)
+ montDecode(&out.y, &in.y)
+ return out
+}
+
+func (e *gfP2) String() string {
+ return "(" + e.x.String() + ", " + e.y.String() + ")"
+}
+
+func (e *gfP2) Set(a *gfP2) *gfP2 {
+ e.x.Set(&a.x)
+ e.y.Set(&a.y)
+ return e
+}
+
+func (e *gfP2) SetZero() *gfP2 {
+ e.x = gfP{0}
+ e.y = gfP{0}
+ return e
+}
+
+func (e *gfP2) SetOne() *gfP2 {
+ e.x = gfP{0}
+ e.y = *newGFp(1)
+ return e
+}
+
+func (e *gfP2) IsZero() bool {
+ zero := gfP{0}
+ return e.x == zero && e.y == zero
+}
+
+func (e *gfP2) IsOne() bool {
+ zero, one := gfP{0}, *newGFp(1)
+ return e.x == zero && e.y == one
+}
+
+func (e *gfP2) Conjugate(a *gfP2) *gfP2 {
+ e.y.Set(&a.y)
+ gfpNeg(&e.x, &a.x)
+ return e
+}
+
+func (e *gfP2) Neg(a *gfP2) *gfP2 {
+ gfpNeg(&e.x, &a.x)
+ gfpNeg(&e.y, &a.y)
+ return e
+}
+
+func (e *gfP2) Add(a, b *gfP2) *gfP2 {
+ gfpAdd(&e.x, &a.x, &b.x)
+ gfpAdd(&e.y, &a.y, &b.y)
+ return e
+}
+
+func (e *gfP2) Sub(a, b *gfP2) *gfP2 {
+ gfpSub(&e.x, &a.x, &b.x)
+ gfpSub(&e.y, &a.y, &b.y)
+ return e
+}
+
+// See "Multiplication and Squaring in Pairing-Friendly Fields",
+// http://eprint.iacr.org/2006/471.pdf
+func (e *gfP2) Mul(a, b *gfP2) *gfP2 {
+ tx, t := &gfP{}, &gfP{}
+ gfpMul(tx, &a.x, &b.y)
+ gfpMul(t, &b.x, &a.y)
+ gfpAdd(tx, tx, t)
+
+ ty := &gfP{}
+ gfpMul(ty, &a.y, &b.y)
+ gfpMul(t, &a.x, &b.x)
+ gfpSub(ty, ty, t)
+
+ e.x.Set(tx)
+ e.y.Set(ty)
+ return e
+}
+
+func (e *gfP2) MulScalar(a *gfP2, b *gfP) *gfP2 {
+ gfpMul(&e.x, &a.x, b)
+ gfpMul(&e.y, &a.y, b)
+ return e
+}
+
+// MulXi sets e=ξa where ξ=i+9 and then returns e.
+func (e *gfP2) MulXi(a *gfP2) *gfP2 {
+ // (xi+y)(i+9) = (9x+y)i+(9y-x)
+ tx := &gfP{}
+ gfpAdd(tx, &a.x, &a.x)
+ gfpAdd(tx, tx, tx)
+ gfpAdd(tx, tx, tx)
+ gfpAdd(tx, tx, &a.x)
+
+ gfpAdd(tx, tx, &a.y)
+
+ ty := &gfP{}
+ gfpAdd(ty, &a.y, &a.y)
+ gfpAdd(ty, ty, ty)
+ gfpAdd(ty, ty, ty)
+ gfpAdd(ty, ty, &a.y)
+
+ gfpSub(ty, ty, &a.x)
+
+ e.x.Set(tx)
+ e.y.Set(ty)
+ return e
+}
+
+func (e *gfP2) Square(a *gfP2) *gfP2 {
+ // Complex squaring algorithm:
+ // (xi+y)² = (x+y)(y-x) + 2*i*x*y
+ tx, ty := &gfP{}, &gfP{}
+ gfpSub(tx, &a.y, &a.x)
+ gfpAdd(ty, &a.x, &a.y)
+ gfpMul(ty, tx, ty)
+
+ gfpMul(tx, &a.x, &a.y)
+ gfpAdd(tx, tx, tx)
+
+ e.x.Set(tx)
+ e.y.Set(ty)
+ return e
+}
+
+func (e *gfP2) Invert(a *gfP2) *gfP2 {
+ // See "Implementing cryptographic pairings", M. Scott, section 3.2.
+ // ftp://136.206.11.249/pub/crypto/pairings.pdf
+ t1, t2 := &gfP{}, &gfP{}
+ gfpMul(t1, &a.x, &a.x)
+ gfpMul(t2, &a.y, &a.y)
+ gfpAdd(t1, t1, t2)
+
+ inv := &gfP{}
+ inv.Invert(t1)
+
+ gfpNeg(t1, &a.x)
+
+ gfpMul(&e.x, t1, inv)
+ gfpMul(&e.y, &a.y, inv)
+ return e
+}
diff --git a/crypto/bn256/cloudflare/gfp6.go b/crypto/bn256/cloudflare/gfp6.go
new file mode 100644
index 000000000..83d61b781
--- /dev/null
+++ b/crypto/bn256/cloudflare/gfp6.go
@@ -0,0 +1,213 @@
+package bn256
+
+// For details of the algorithms used, see "Multiplication and Squaring on
+// Pairing-Friendly Fields, Devegili et al.
+// http://eprint.iacr.org/2006/471.pdf.
+
+// gfP6 implements the field of size p⁶ as a cubic extension of gfP2 where τ³=ξ
+// and ξ=i+3.
+type gfP6 struct {
+ x, y, z gfP2 // value is xτ² + yτ + z
+}
+
+func (e *gfP6) String() string {
+ return "(" + e.x.String() + ", " + e.y.String() + ", " + e.z.String() + ")"
+}
+
+func (e *gfP6) Set(a *gfP6) *gfP6 {
+ e.x.Set(&a.x)
+ e.y.Set(&a.y)
+ e.z.Set(&a.z)
+ return e
+}
+
+func (e *gfP6) SetZero() *gfP6 {
+ e.x.SetZero()
+ e.y.SetZero()
+ e.z.SetZero()
+ return e
+}
+
+func (e *gfP6) SetOne() *gfP6 {
+ e.x.SetZero()
+ e.y.SetZero()
+ e.z.SetOne()
+ return e
+}
+
+func (e *gfP6) IsZero() bool {
+ return e.x.IsZero() && e.y.IsZero() && e.z.IsZero()
+}
+
+func (e *gfP6) IsOne() bool {
+ return e.x.IsZero() && e.y.IsZero() && e.z.IsOne()
+}
+
+func (e *gfP6) Neg(a *gfP6) *gfP6 {
+ e.x.Neg(&a.x)
+ e.y.Neg(&a.y)
+ e.z.Neg(&a.z)
+ return e
+}
+
+func (e *gfP6) Frobenius(a *gfP6) *gfP6 {
+ e.x.Conjugate(&a.x)
+ e.y.Conjugate(&a.y)
+ e.z.Conjugate(&a.z)
+
+ e.x.Mul(&e.x, xiTo2PMinus2Over3)
+ e.y.Mul(&e.y, xiToPMinus1Over3)
+ return e
+}
+
+// FrobeniusP2 computes (xτ²+yτ+z)^(p²) = xτ^(2p²) + yτ^(p²) + z
+func (e *gfP6) FrobeniusP2(a *gfP6) *gfP6 {
+ // τ^(2p²) = τ²τ^(2p²-2) = τ²ξ^((2p²-2)/3)
+ e.x.MulScalar(&a.x, xiTo2PSquaredMinus2Over3)
+ // τ^(p²) = ττ^(p²-1) = τξ^((p²-1)/3)
+ e.y.MulScalar(&a.y, xiToPSquaredMinus1Over3)
+ e.z.Set(&a.z)
+ return e
+}
+
+func (e *gfP6) FrobeniusP4(a *gfP6) *gfP6 {
+ e.x.MulScalar(&a.x, xiToPSquaredMinus1Over3)
+ e.y.MulScalar(&a.y, xiTo2PSquaredMinus2Over3)
+ e.z.Set(&a.z)
+ return e
+}
+
+func (e *gfP6) Add(a, b *gfP6) *gfP6 {
+ e.x.Add(&a.x, &b.x)
+ e.y.Add(&a.y, &b.y)
+ e.z.Add(&a.z, &b.z)
+ return e
+}
+
+func (e *gfP6) Sub(a, b *gfP6) *gfP6 {
+ e.x.Sub(&a.x, &b.x)
+ e.y.Sub(&a.y, &b.y)
+ e.z.Sub(&a.z, &b.z)
+ return e
+}
+
+func (e *gfP6) Mul(a, b *gfP6) *gfP6 {
+ // "Multiplication and Squaring on Pairing-Friendly Fields"
+ // Section 4, Karatsuba method.
+ // http://eprint.iacr.org/2006/471.pdf
+ v0 := (&gfP2{}).Mul(&a.z, &b.z)
+ v1 := (&gfP2{}).Mul(&a.y, &b.y)
+ v2 := (&gfP2{}).Mul(&a.x, &b.x)
+
+ t0 := (&gfP2{}).Add(&a.x, &a.y)
+ t1 := (&gfP2{}).Add(&b.x, &b.y)
+ tz := (&gfP2{}).Mul(t0, t1)
+ tz.Sub(tz, v1).Sub(tz, v2).MulXi(tz).Add(tz, v0)
+
+ t0.Add(&a.y, &a.z)
+ t1.Add(&b.y, &b.z)
+ ty := (&gfP2{}).Mul(t0, t1)
+ t0.MulXi(v2)
+ ty.Sub(ty, v0).Sub(ty, v1).Add(ty, t0)
+
+ t0.Add(&a.x, &a.z)
+ t1.Add(&b.x, &b.z)
+ tx := (&gfP2{}).Mul(t0, t1)
+ tx.Sub(tx, v0).Add(tx, v1).Sub(tx, v2)
+
+ e.x.Set(tx)
+ e.y.Set(ty)
+ e.z.Set(tz)
+ return e
+}
+
+func (e *gfP6) MulScalar(a *gfP6, b *gfP2) *gfP6 {
+ e.x.Mul(&a.x, b)
+ e.y.Mul(&a.y, b)
+ e.z.Mul(&a.z, b)
+ return e
+}
+
+func (e *gfP6) MulGFP(a *gfP6, b *gfP) *gfP6 {
+ e.x.MulScalar(&a.x, b)
+ e.y.MulScalar(&a.y, b)
+ e.z.MulScalar(&a.z, b)
+ return e
+}
+
+// MulTau computes τ·(aτ²+bτ+c) = bτ²+cτ+aξ
+func (e *gfP6) MulTau(a *gfP6) *gfP6 {
+ tz := (&gfP2{}).MulXi(&a.x)
+ ty := (&gfP2{}).Set(&a.y)
+
+ e.y.Set(&a.z)
+ e.x.Set(ty)
+ e.z.Set(tz)
+ return e
+}
+
+func (e *gfP6) Square(a *gfP6) *gfP6 {
+ v0 := (&gfP2{}).Square(&a.z)
+ v1 := (&gfP2{}).Square(&a.y)
+ v2 := (&gfP2{}).Square(&a.x)
+
+ c0 := (&gfP2{}).Add(&a.x, &a.y)
+ c0.Square(c0).Sub(c0, v1).Sub(c0, v2).MulXi(c0).Add(c0, v0)
+
+ c1 := (&gfP2{}).Add(&a.y, &a.z)
+ c1.Square(c1).Sub(c1, v0).Sub(c1, v1)
+ xiV2 := (&gfP2{}).MulXi(v2)
+ c1.Add(c1, xiV2)
+
+ c2 := (&gfP2{}).Add(&a.x, &a.z)
+ c2.Square(c2).Sub(c2, v0).Add(c2, v1).Sub(c2, v2)
+
+ e.x.Set(c2)
+ e.y.Set(c1)
+ e.z.Set(c0)
+ return e
+}
+
+func (e *gfP6) Invert(a *gfP6) *gfP6 {
+ // See "Implementing cryptographic pairings", M. Scott, section 3.2.
+ // ftp://136.206.11.249/pub/crypto/pairings.pdf
+
+ // Here we can give a short explanation of how it works: let j be a cubic root of
+ // unity in GF(p²) so that 1+j+j²=0.
+ // Then (xτ² + yτ + z)(xj²τ² + yjτ + z)(xjτ² + yj²τ + z)
+ // = (xτ² + yτ + z)(Cτ²+Bτ+A)
+ // = (x³ξ²+y³ξ+z³-3ξxyz) = F is an element of the base field (the norm).
+ //
+ // On the other hand (xj²τ² + yjτ + z)(xjτ² + yj²τ + z)
+ // = τ²(y²-ξxz) + τ(ξx²-yz) + (z²-ξxy)
+ //
+ // So that's why A = (z²-ξxy), B = (ξx²-yz), C = (y²-ξxz)
+ t1 := (&gfP2{}).Mul(&a.x, &a.y)
+ t1.MulXi(t1)
+
+ A := (&gfP2{}).Square(&a.z)
+ A.Sub(A, t1)
+
+ B := (&gfP2{}).Square(&a.x)
+ B.MulXi(B)
+ t1.Mul(&a.y, &a.z)
+ B.Sub(B, t1)
+
+ C := (&gfP2{}).Square(&a.y)
+ t1.Mul(&a.x, &a.z)
+ C.Sub(C, t1)
+
+ F := (&gfP2{}).Mul(C, &a.y)
+ F.MulXi(F)
+ t1.Mul(A, &a.z)
+ F.Add(F, t1)
+ t1.Mul(B, &a.x).MulXi(t1)
+ F.Add(F, t1)
+
+ F.Invert(F)
+
+ e.x.Mul(C, F)
+ e.y.Mul(B, F)
+ e.z.Mul(A, F)
+ return e
+}
diff --git a/crypto/bn256/cloudflare/gfp_amd64.go b/crypto/bn256/cloudflare/gfp_amd64.go
new file mode 100644
index 000000000..ac4f1a9c6
--- /dev/null
+++ b/crypto/bn256/cloudflare/gfp_amd64.go
@@ -0,0 +1,15 @@
+// +build amd64,!appengine,!gccgo
+
+package bn256
+
+// go:noescape
+func gfpNeg(c, a *gfP)
+
+//go:noescape
+func gfpAdd(c, a, b *gfP)
+
+//go:noescape
+func gfpSub(c, a, b *gfP)
+
+//go:noescape
+func gfpMul(c, a, b *gfP)
diff --git a/crypto/bn256/cloudflare/gfp_amd64.s b/crypto/bn256/cloudflare/gfp_amd64.s
new file mode 100644
index 000000000..2d0176f2e
--- /dev/null
+++ b/crypto/bn256/cloudflare/gfp_amd64.s
@@ -0,0 +1,97 @@
+// +build amd64,!appengine,!gccgo
+
+#include "gfp.h"
+#include "mul.h"
+#include "mul_bmi2.h"
+
+TEXT ·gfpNeg(SB),0,$0-16
+ MOVQ ·p2+0(SB), R8
+ MOVQ ·p2+8(SB), R9
+ MOVQ ·p2+16(SB), R10
+ MOVQ ·p2+24(SB), R11
+
+ MOVQ a+8(FP), DI
+ SUBQ 0(DI), R8
+ SBBQ 8(DI), R9
+ SBBQ 16(DI), R10
+ SBBQ 24(DI), R11
+
+ MOVQ $0, AX
+ gfpCarry(R8,R9,R10,R11,AX, R12,R13,R14,R15,BX)
+
+ MOVQ c+0(FP), DI
+ storeBlock(R8,R9,R10,R11, 0(DI))
+ RET
+
+TEXT ·gfpAdd(SB),0,$0-24
+ MOVQ a+8(FP), DI
+ MOVQ b+16(FP), SI
+
+ loadBlock(0(DI), R8,R9,R10,R11)
+ MOVQ $0, R12
+
+ ADDQ 0(SI), R8
+ ADCQ 8(SI), R9
+ ADCQ 16(SI), R10
+ ADCQ 24(SI), R11
+ ADCQ $0, R12
+
+ gfpCarry(R8,R9,R10,R11,R12, R13,R14,R15,AX,BX)
+
+ MOVQ c+0(FP), DI
+ storeBlock(R8,R9,R10,R11, 0(DI))
+ RET
+
+TEXT ·gfpSub(SB),0,$0-24
+ MOVQ a+8(FP), DI
+ MOVQ b+16(FP), SI
+
+ loadBlock(0(DI), R8,R9,R10,R11)
+
+ MOVQ ·p2+0(SB), R12
+ MOVQ ·p2+8(SB), R13
+ MOVQ ·p2+16(SB), R14
+ MOVQ ·p2+24(SB), R15
+ MOVQ $0, AX
+
+ SUBQ 0(SI), R8
+ SBBQ 8(SI), R9
+ SBBQ 16(SI), R10
+ SBBQ 24(SI), R11
+
+ CMOVQCC AX, R12
+ CMOVQCC AX, R13
+ CMOVQCC AX, R14
+ CMOVQCC AX, R15
+
+ ADDQ R12, R8
+ ADCQ R13, R9
+ ADCQ R14, R10
+ ADCQ R15, R11
+
+ MOVQ c+0(FP), DI
+ storeBlock(R8,R9,R10,R11, 0(DI))
+ RET
+
+TEXT ·gfpMul(SB),0,$160-24
+ MOVQ a+8(FP), DI
+ MOVQ b+16(FP), SI
+
+ // Jump to a slightly different implementation if MULX isn't supported.
+ CMPB runtime·support_bmi2(SB), $0
+ JE nobmi2Mul
+
+ mulBMI2(0(DI),8(DI),16(DI),24(DI), 0(SI))
+ storeBlock( R8, R9,R10,R11, 0(SP))
+ storeBlock(R12,R13,R14,R15, 32(SP))
+ gfpReduceBMI2()
+ JMP end
+
+nobmi2Mul:
+ mul(0(DI),8(DI),16(DI),24(DI), 0(SI), 0(SP))
+ gfpReduce(0(SP))
+
+end:
+ MOVQ c+0(FP), DI
+ storeBlock(R12,R13,R14,R15, 0(DI))
+ RET
diff --git a/crypto/bn256/cloudflare/gfp_pure.go b/crypto/bn256/cloudflare/gfp_pure.go
new file mode 100644
index 000000000..8fa5d3053
--- /dev/null
+++ b/crypto/bn256/cloudflare/gfp_pure.go
@@ -0,0 +1,19 @@
+// +build !amd64 appengine gccgo
+
+package bn256
+
+func gfpNeg(c, a *gfP) {
+ panic("unsupported architecture")
+}
+
+func gfpAdd(c, a, b *gfP) {
+ panic("unsupported architecture")
+}
+
+func gfpSub(c, a, b *gfP) {
+ panic("unsupported architecture")
+}
+
+func gfpMul(c, a, b *gfP) {
+ panic("unsupported architecture")
+}
diff --git a/crypto/bn256/cloudflare/gfp_test.go b/crypto/bn256/cloudflare/gfp_test.go
new file mode 100644
index 000000000..aff5e0531
--- /dev/null
+++ b/crypto/bn256/cloudflare/gfp_test.go
@@ -0,0 +1,62 @@
+// +build amd64,!appengine,!gccgo
+
+package bn256
+
+import (
+ "testing"
+)
+
+// Tests that negation works the same way on both assembly-optimized and pure Go
+// implementation.
+func TestGFpNeg(t *testing.T) {
+ n := &gfP{0x0123456789abcdef, 0xfedcba9876543210, 0xdeadbeefdeadbeef, 0xfeebdaedfeebdaed}
+ w := &gfP{0xfedcba9876543211, 0x0123456789abcdef, 0x2152411021524110, 0x0114251201142512}
+ h := &gfP{}
+
+ gfpNeg(h, n)
+ if *h != *w {
+ t.Errorf("negation mismatch: have %#x, want %#x", *h, *w)
+ }
+}
+
+// Tests that addition works the same way on both assembly-optimized and pure Go
+// implementation.
+func TestGFpAdd(t *testing.T) {
+ a := &gfP{0x0123456789abcdef, 0xfedcba9876543210, 0xdeadbeefdeadbeef, 0xfeebdaedfeebdaed}
+ b := &gfP{0xfedcba9876543210, 0x0123456789abcdef, 0xfeebdaedfeebdaed, 0xdeadbeefdeadbeef}
+ w := &gfP{0xc3df73e9278302b8, 0x687e956e978e3572, 0x254954275c18417f, 0xad354b6afc67f9b4}
+ h := &gfP{}
+
+ gfpAdd(h, a, b)
+ if *h != *w {
+ t.Errorf("addition mismatch: have %#x, want %#x", *h, *w)
+ }
+}
+
+// Tests that subtraction works the same way on both assembly-optimized and pure Go
+// implementation.
+func TestGFpSub(t *testing.T) {
+ a := &gfP{0x0123456789abcdef, 0xfedcba9876543210, 0xdeadbeefdeadbeef, 0xfeebdaedfeebdaed}
+ b := &gfP{0xfedcba9876543210, 0x0123456789abcdef, 0xfeebdaedfeebdaed, 0xdeadbeefdeadbeef}
+ w := &gfP{0x02468acf13579bdf, 0xfdb97530eca86420, 0xdfc1e401dfc1e402, 0x203e1bfe203e1bfd}
+ h := &gfP{}
+
+ gfpSub(h, a, b)
+ if *h != *w {
+ t.Errorf("subtraction mismatch: have %#x, want %#x", *h, *w)
+ }
+}
+
+// Tests that multiplication works the same way on both assembly-optimized and pure Go
+// implementation.
+func TestGFpMul(t *testing.T) {
+ a := &gfP{0x0123456789abcdef, 0xfedcba9876543210, 0xdeadbeefdeadbeef, 0xfeebdaedfeebdaed}
+ b := &gfP{0xfedcba9876543210, 0x0123456789abcdef, 0xfeebdaedfeebdaed, 0xdeadbeefdeadbeef}
+ w := &gfP{0xcbcbd377f7ad22d3, 0x3b89ba5d849379bf, 0x87b61627bd38b6d2, 0xc44052a2a0e654b2}
+ h := &gfP{}
+
+ gfpMul(h, a, b)
+ if *h != *w {
+ t.Errorf("multiplication mismatch: have %#x, want %#x", *h, *w)
+ }
+}
diff --git a/crypto/bn256/cloudflare/main_test.go b/crypto/bn256/cloudflare/main_test.go
new file mode 100644
index 000000000..f0d59a404
--- /dev/null
+++ b/crypto/bn256/cloudflare/main_test.go
@@ -0,0 +1,73 @@
+// +build amd64,!appengine,!gccgo
+
+package bn256
+
+import (
+ "testing"
+
+ "crypto/rand"
+)
+
+func TestRandomG2Marshal(t *testing.T) {
+ for i := 0; i < 10; i++ {
+ n, g2, err := RandomG2(rand.Reader)
+ if err != nil {
+ t.Error(err)
+ continue
+ }
+ t.Logf("%d: %x\n", n, g2.Marshal())
+ }
+}
+
+func TestPairings(t *testing.T) {
+ a1 := new(G1).ScalarBaseMult(bigFromBase10("1"))
+ a2 := new(G1).ScalarBaseMult(bigFromBase10("2"))
+ a37 := new(G1).ScalarBaseMult(bigFromBase10("37"))
+ an1 := new(G1).ScalarBaseMult(bigFromBase10("21888242871839275222246405745257275088548364400416034343698204186575808495616"))
+
+ b0 := new(G2).ScalarBaseMult(bigFromBase10("0"))
+ b1 := new(G2).ScalarBaseMult(bigFromBase10("1"))
+ b2 := new(G2).ScalarBaseMult(bigFromBase10("2"))
+ b27 := new(G2).ScalarBaseMult(bigFromBase10("27"))
+ b999 := new(G2).ScalarBaseMult(bigFromBase10("999"))
+ bn1 := new(G2).ScalarBaseMult(bigFromBase10("21888242871839275222246405745257275088548364400416034343698204186575808495616"))
+
+ p1 := Pair(a1, b1)
+ pn1 := Pair(a1, bn1)
+ np1 := Pair(an1, b1)
+ if pn1.String() != np1.String() {
+ t.Error("Pairing mismatch: e(a, -b) != e(-a, b)")
+ }
+ if !PairingCheck([]*G1{a1, an1}, []*G2{b1, b1}) {
+ t.Error("MultiAte check gave false negative!")
+ }
+ p0 := new(GT).Add(p1, pn1)
+ p0_2 := Pair(a1, b0)
+ if p0.String() != p0_2.String() {
+ t.Error("Pairing mismatch: e(a, b) * e(a, -b) != 1")
+ }
+ p0_3 := new(GT).ScalarMult(p1, bigFromBase10("21888242871839275222246405745257275088548364400416034343698204186575808495617"))
+ if p0.String() != p0_3.String() {
+ t.Error("Pairing mismatch: e(a, b) has wrong order")
+ }
+ p2 := Pair(a2, b1)
+ p2_2 := Pair(a1, b2)
+ p2_3 := new(GT).ScalarMult(p1, bigFromBase10("2"))
+ if p2.String() != p2_2.String() {
+ t.Error("Pairing mismatch: e(a, b * 2) != e(a * 2, b)")
+ }
+ if p2.String() != p2_3.String() {
+ t.Error("Pairing mismatch: e(a, b * 2) != e(a, b) ** 2")
+ }
+ if p2.String() == p1.String() {
+ t.Error("Pairing is degenerate!")
+ }
+ if PairingCheck([]*G1{a1, a1}, []*G2{b1, b1}) {
+ t.Error("MultiAte check gave false positive!")
+ }
+ p999 := Pair(a37, b27)
+ p999_2 := Pair(a1, b999)
+ if p999.String() != p999_2.String() {
+ t.Error("Pairing mismatch: e(a * 37, b * 27) != e(a, b * 999)")
+ }
+}
diff --git a/crypto/bn256/cloudflare/mul.h b/crypto/bn256/cloudflare/mul.h
new file mode 100644
index 000000000..bab5da831
--- /dev/null
+++ b/crypto/bn256/cloudflare/mul.h
@@ -0,0 +1,181 @@
+#define mul(a0,a1,a2,a3, rb, stack) \
+ MOVQ a0, AX \
+ MULQ 0+rb \
+ MOVQ AX, R8 \
+ MOVQ DX, R9 \
+ MOVQ a0, AX \
+ MULQ 8+rb \
+ ADDQ AX, R9 \
+ ADCQ $0, DX \
+ MOVQ DX, R10 \
+ MOVQ a0, AX \
+ MULQ 16+rb \
+ ADDQ AX, R10 \
+ ADCQ $0, DX \
+ MOVQ DX, R11 \
+ MOVQ a0, AX \
+ MULQ 24+rb \
+ ADDQ AX, R11 \
+ ADCQ $0, DX \
+ MOVQ DX, R12 \
+ \
+ storeBlock(R8,R9,R10,R11, 0+stack) \
+ MOVQ R12, 32+stack \
+ \
+ MOVQ a1, AX \
+ MULQ 0+rb \
+ MOVQ AX, R8 \
+ MOVQ DX, R9 \
+ MOVQ a1, AX \
+ MULQ 8+rb \
+ ADDQ AX, R9 \
+ ADCQ $0, DX \
+ MOVQ DX, R10 \
+ MOVQ a1, AX \
+ MULQ 16+rb \
+ ADDQ AX, R10 \
+ ADCQ $0, DX \
+ MOVQ DX, R11 \
+ MOVQ a1, AX \
+ MULQ 24+rb \
+ ADDQ AX, R11 \
+ ADCQ $0, DX \
+ MOVQ DX, R12 \
+ \
+ ADDQ 8+stack, R8 \
+ ADCQ 16+stack, R9 \
+ ADCQ 24+stack, R10 \
+ ADCQ 32+stack, R11 \
+ ADCQ $0, R12 \
+ storeBlock(R8,R9,R10,R11, 8+stack) \
+ MOVQ R12, 40+stack \
+ \
+ MOVQ a2, AX \
+ MULQ 0+rb \
+ MOVQ AX, R8 \
+ MOVQ DX, R9 \
+ MOVQ a2, AX \
+ MULQ 8+rb \
+ ADDQ AX, R9 \
+ ADCQ $0, DX \
+ MOVQ DX, R10 \
+ MOVQ a2, AX \
+ MULQ 16+rb \
+ ADDQ AX, R10 \
+ ADCQ $0, DX \
+ MOVQ DX, R11 \
+ MOVQ a2, AX \
+ MULQ 24+rb \
+ ADDQ AX, R11 \
+ ADCQ $0, DX \
+ MOVQ DX, R12 \
+ \
+ ADDQ 16+stack, R8 \
+ ADCQ 24+stack, R9 \
+ ADCQ 32+stack, R10 \
+ ADCQ 40+stack, R11 \
+ ADCQ $0, R12 \
+ storeBlock(R8,R9,R10,R11, 16+stack) \
+ MOVQ R12, 48+stack \
+ \
+ MOVQ a3, AX \
+ MULQ 0+rb \
+ MOVQ AX, R8 \
+ MOVQ DX, R9 \
+ MOVQ a3, AX \
+ MULQ 8+rb \
+ ADDQ AX, R9 \
+ ADCQ $0, DX \
+ MOVQ DX, R10 \
+ MOVQ a3, AX \
+ MULQ 16+rb \
+ ADDQ AX, R10 \
+ ADCQ $0, DX \
+ MOVQ DX, R11 \
+ MOVQ a3, AX \
+ MULQ 24+rb \
+ ADDQ AX, R11 \
+ ADCQ $0, DX \
+ MOVQ DX, R12 \
+ \
+ ADDQ 24+stack, R8 \
+ ADCQ 32+stack, R9 \
+ ADCQ 40+stack, R10 \
+ ADCQ 48+stack, R11 \
+ ADCQ $0, R12 \
+ storeBlock(R8,R9,R10,R11, 24+stack) \
+ MOVQ R12, 56+stack
+
+#define gfpReduce(stack) \
+ \ // m = (T * N') mod R, store m in R8:R9:R10:R11
+ MOVQ ·np+0(SB), AX \
+ MULQ 0+stack \
+ MOVQ AX, R8 \
+ MOVQ DX, R9 \
+ MOVQ ·np+0(SB), AX \
+ MULQ 8+stack \
+ ADDQ AX, R9 \
+ ADCQ $0, DX \
+ MOVQ DX, R10 \
+ MOVQ ·np+0(SB), AX \
+ MULQ 16+stack \
+ ADDQ AX, R10 \
+ ADCQ $0, DX \
+ MOVQ DX, R11 \
+ MOVQ ·np+0(SB), AX \
+ MULQ 24+stack \
+ ADDQ AX, R11 \
+ \
+ MOVQ ·np+8(SB), AX \
+ MULQ 0+stack \
+ MOVQ AX, R12 \
+ MOVQ DX, R13 \
+ MOVQ ·np+8(SB), AX \
+ MULQ 8+stack \
+ ADDQ AX, R13 \
+ ADCQ $0, DX \
+ MOVQ DX, R14 \
+ MOVQ ·np+8(SB), AX \
+ MULQ 16+stack \
+ ADDQ AX, R14 \
+ \
+ ADDQ R12, R9 \
+ ADCQ R13, R10 \
+ ADCQ R14, R11 \
+ \
+ MOVQ ·np+16(SB), AX \
+ MULQ 0+stack \
+ MOVQ AX, R12 \
+ MOVQ DX, R13 \
+ MOVQ ·np+16(SB), AX \
+ MULQ 8+stack \
+ ADDQ AX, R13 \
+ \
+ ADDQ R12, R10 \
+ ADCQ R13, R11 \
+ \
+ MOVQ ·np+24(SB), AX \
+ MULQ 0+stack \
+ ADDQ AX, R11 \
+ \
+ storeBlock(R8,R9,R10,R11, 64+stack) \
+ \
+ \ // m * N
+ mul(·p2+0(SB),·p2+8(SB),·p2+16(SB),·p2+24(SB), 64+stack, 96+stack) \
+ \
+ \ // Add the 512-bit intermediate to m*N
+ loadBlock(96+stack, R8,R9,R10,R11) \
+ loadBlock(128+stack, R12,R13,R14,R15) \
+ \
+ MOVQ $0, AX \
+ ADDQ 0+stack, R8 \
+ ADCQ 8+stack, R9 \
+ ADCQ 16+stack, R10 \
+ ADCQ 24+stack, R11 \
+ ADCQ 32+stack, R12 \
+ ADCQ 40+stack, R13 \
+ ADCQ 48+stack, R14 \
+ ADCQ 56+stack, R15 \
+ ADCQ $0, AX \
+ \
+ gfpCarry(R12,R13,R14,R15,AX, R8,R9,R10,R11,BX)
diff --git a/crypto/bn256/cloudflare/mul_bmi2.h b/crypto/bn256/cloudflare/mul_bmi2.h
new file mode 100644
index 000000000..71ad0499a
--- /dev/null
+++ b/crypto/bn256/cloudflare/mul_bmi2.h
@@ -0,0 +1,112 @@
+#define mulBMI2(a0,a1,a2,a3, rb) \
+ MOVQ a0, DX \
+ MOVQ $0, R13 \
+ MULXQ 0+rb, R8, R9 \
+ MULXQ 8+rb, AX, R10 \
+ ADDQ AX, R9 \
+ MULXQ 16+rb, AX, R11 \
+ ADCQ AX, R10 \
+ MULXQ 24+rb, AX, R12 \
+ ADCQ AX, R11 \
+ ADCQ $0, R12 \
+ ADCQ $0, R13 \
+ \
+ MOVQ a1, DX \
+ MOVQ $0, R14 \
+ MULXQ 0+rb, AX, BX \
+ ADDQ AX, R9 \
+ ADCQ BX, R10 \
+ MULXQ 16+rb, AX, BX \
+ ADCQ AX, R11 \
+ ADCQ BX, R12 \
+ ADCQ $0, R13 \
+ MULXQ 8+rb, AX, BX \
+ ADDQ AX, R10 \
+ ADCQ BX, R11 \
+ MULXQ 24+rb, AX, BX \
+ ADCQ AX, R12 \
+ ADCQ BX, R13 \
+ ADCQ $0, R14 \
+ \
+ MOVQ a2, DX \
+ MOVQ $0, R15 \
+ MULXQ 0+rb, AX, BX \
+ ADDQ AX, R10 \
+ ADCQ BX, R11 \
+ MULXQ 16+rb, AX, BX \
+ ADCQ AX, R12 \
+ ADCQ BX, R13 \
+ ADCQ $0, R14 \
+ MULXQ 8+rb, AX, BX \
+ ADDQ AX, R11 \
+ ADCQ BX, R12 \
+ MULXQ 24+rb, AX, BX \
+ ADCQ AX, R13 \
+ ADCQ BX, R14 \
+ ADCQ $0, R15 \
+ \
+ MOVQ a3, DX \
+ MULXQ 0+rb, AX, BX \
+ ADDQ AX, R11 \
+ ADCQ BX, R12 \
+ MULXQ 16+rb, AX, BX \
+ ADCQ AX, R13 \
+ ADCQ BX, R14 \
+ ADCQ $0, R15 \
+ MULXQ 8+rb, AX, BX \
+ ADDQ AX, R12 \
+ ADCQ BX, R13 \
+ MULXQ 24+rb, AX, BX \
+ ADCQ AX, R14 \
+ ADCQ BX, R15
+
+#define gfpReduceBMI2() \
+ \ // m = (T * N') mod R, store m in R8:R9:R10:R11
+ MOVQ ·np+0(SB), DX \
+ MULXQ 0(SP), R8, R9 \
+ MULXQ 8(SP), AX, R10 \
+ ADDQ AX, R9 \
+ MULXQ 16(SP), AX, R11 \
+ ADCQ AX, R10 \
+ MULXQ 24(SP), AX, BX \
+ ADCQ AX, R11 \
+ \
+ MOVQ ·np+8(SB), DX \
+ MULXQ 0(SP), AX, BX \
+ ADDQ AX, R9 \
+ ADCQ BX, R10 \
+ MULXQ 16(SP), AX, BX \
+ ADCQ AX, R11 \
+ MULXQ 8(SP), AX, BX \
+ ADDQ AX, R10 \
+ ADCQ BX, R11 \
+ \
+ MOVQ ·np+16(SB), DX \
+ MULXQ 0(SP), AX, BX \
+ ADDQ AX, R10 \
+ ADCQ BX, R11 \
+ MULXQ 8(SP), AX, BX \
+ ADDQ AX, R11 \
+ \
+ MOVQ ·np+24(SB), DX \
+ MULXQ 0(SP), AX, BX \
+ ADDQ AX, R11 \
+ \
+ storeBlock(R8,R9,R10,R11, 64(SP)) \
+ \
+ \ // m * N
+ mulBMI2(·p2+0(SB),·p2+8(SB),·p2+16(SB),·p2+24(SB), 64(SP)) \
+ \
+ \ // Add the 512-bit intermediate to m*N
+ MOVQ $0, AX \
+ ADDQ 0(SP), R8 \
+ ADCQ 8(SP), R9 \
+ ADCQ 16(SP), R10 \
+ ADCQ 24(SP), R11 \
+ ADCQ 32(SP), R12 \
+ ADCQ 40(SP), R13 \
+ ADCQ 48(SP), R14 \
+ ADCQ 56(SP), R15 \
+ ADCQ $0, AX \
+ \
+ gfpCarry(R12,R13,R14,R15,AX, R8,R9,R10,R11,BX)
diff --git a/crypto/bn256/cloudflare/optate.go b/crypto/bn256/cloudflare/optate.go
new file mode 100644
index 000000000..b71e50e3a
--- /dev/null
+++ b/crypto/bn256/cloudflare/optate.go
@@ -0,0 +1,271 @@
+package bn256
+
+func lineFunctionAdd(r, p *twistPoint, q *curvePoint, r2 *gfP2) (a, b, c *gfP2, rOut *twistPoint) {
+ // See the mixed addition algorithm from "Faster Computation of the
+ // Tate Pairing", http://arxiv.org/pdf/0904.0854v3.pdf
+ B := (&gfP2{}).Mul(&p.x, &r.t)
+
+ D := (&gfP2{}).Add(&p.y, &r.z)
+ D.Square(D).Sub(D, r2).Sub(D, &r.t).Mul(D, &r.t)
+
+ H := (&gfP2{}).Sub(B, &r.x)
+ I := (&gfP2{}).Square(H)
+
+ E := (&gfP2{}).Add(I, I)
+ E.Add(E, E)
+
+ J := (&gfP2{}).Mul(H, E)
+
+ L1 := (&gfP2{}).Sub(D, &r.y)
+ L1.Sub(L1, &r.y)
+
+ V := (&gfP2{}).Mul(&r.x, E)
+
+ rOut = &twistPoint{}
+ rOut.x.Square(L1).Sub(&rOut.x, J).Sub(&rOut.x, V).Sub(&rOut.x, V)
+
+ rOut.z.Add(&r.z, H).Square(&rOut.z).Sub(&rOut.z, &r.t).Sub(&rOut.z, I)
+
+ t := (&gfP2{}).Sub(V, &rOut.x)
+ t.Mul(t, L1)
+ t2 := (&gfP2{}).Mul(&r.y, J)
+ t2.Add(t2, t2)
+ rOut.y.Sub(t, t2)
+
+ rOut.t.Square(&rOut.z)
+
+ t.Add(&p.y, &rOut.z).Square(t).Sub(t, r2).Sub(t, &rOut.t)
+
+ t2.Mul(L1, &p.x)
+ t2.Add(t2, t2)
+ a = (&gfP2{}).Sub(t2, t)
+
+ c = (&gfP2{}).MulScalar(&rOut.z, &q.y)
+ c.Add(c, c)
+
+ b = (&gfP2{}).Neg(L1)
+ b.MulScalar(b, &q.x).Add(b, b)
+
+ return
+}
+
+func lineFunctionDouble(r *twistPoint, q *curvePoint) (a, b, c *gfP2, rOut *twistPoint) {
+ // See the doubling algorithm for a=0 from "Faster Computation of the
+ // Tate Pairing", http://arxiv.org/pdf/0904.0854v3.pdf
+ A := (&gfP2{}).Square(&r.x)
+ B := (&gfP2{}).Square(&r.y)
+ C := (&gfP2{}).Square(B)
+
+ D := (&gfP2{}).Add(&r.x, B)
+ D.Square(D).Sub(D, A).Sub(D, C).Add(D, D)
+
+ E := (&gfP2{}).Add(A, A)
+ E.Add(E, A)
+
+ G := (&gfP2{}).Square(E)
+
+ rOut = &twistPoint{}
+ rOut.x.Sub(G, D).Sub(&rOut.x, D)
+
+ rOut.z.Add(&r.y, &r.z).Square(&rOut.z).Sub(&rOut.z, B).Sub(&rOut.z, &r.t)
+
+ rOut.y.Sub(D, &rOut.x).Mul(&rOut.y, E)
+ t := (&gfP2{}).Add(C, C)
+ t.Add(t, t).Add(t, t)
+ rOut.y.Sub(&rOut.y, t)
+
+ rOut.t.Square(&rOut.z)
+
+ t.Mul(E, &r.t).Add(t, t)
+ b = (&gfP2{}).Neg(t)
+ b.MulScalar(b, &q.x)
+
+ a = (&gfP2{}).Add(&r.x, E)
+ a.Square(a).Sub(a, A).Sub(a, G)
+ t.Add(B, B).Add(t, t)
+ a.Sub(a, t)
+
+ c = (&gfP2{}).Mul(&rOut.z, &r.t)
+ c.Add(c, c).MulScalar(c, &q.y)
+
+ return
+}
+
+func mulLine(ret *gfP12, a, b, c *gfP2) {
+ a2 := &gfP6{}
+ a2.y.Set(a)
+ a2.z.Set(b)
+ a2.Mul(a2, &ret.x)
+ t3 := (&gfP6{}).MulScalar(&ret.y, c)
+
+ t := (&gfP2{}).Add(b, c)
+ t2 := &gfP6{}
+ t2.y.Set(a)
+ t2.z.Set(t)
+ ret.x.Add(&ret.x, &ret.y)
+
+ ret.y.Set(t3)
+
+ ret.x.Mul(&ret.x, t2).Sub(&ret.x, a2).Sub(&ret.x, &ret.y)
+ a2.MulTau(a2)
+ ret.y.Add(&ret.y, a2)
+}
+
+// sixuPlus2NAF is 6u+2 in non-adjacent form.
+var sixuPlus2NAF = []int8{0, 0, 0, 1, 0, 1, 0, -1, 0, 0, 1, -1, 0, 0, 1, 0,
+ 0, 1, 1, 0, -1, 0, 0, 1, 0, -1, 0, 0, 0, 0, 1, 1,
+ 1, 0, 0, -1, 0, 0, 1, 0, 0, 0, 0, 0, -1, 0, 0, 1,
+ 1, 0, 0, -1, 0, 0, 0, 1, 1, 0, -1, 0, 0, 1, 0, 1, 1}
+
+// miller implements the Miller loop for calculating the Optimal Ate pairing.
+// See algorithm 1 from http://cryptojedi.org/papers/dclxvi-20100714.pdf
+func miller(q *twistPoint, p *curvePoint) *gfP12 {
+ ret := (&gfP12{}).SetOne()
+
+ aAffine := &twistPoint{}
+ aAffine.Set(q)
+ aAffine.MakeAffine()
+
+ bAffine := &curvePoint{}
+ bAffine.Set(p)
+ bAffine.MakeAffine()
+
+ minusA := &twistPoint{}
+ minusA.Neg(aAffine)
+
+ r := &twistPoint{}
+ r.Set(aAffine)
+
+ r2 := (&gfP2{}).Square(&aAffine.y)
+
+ for i := len(sixuPlus2NAF) - 1; i > 0; i-- {
+ a, b, c, newR := lineFunctionDouble(r, bAffine)
+ if i != len(sixuPlus2NAF)-1 {
+ ret.Square(ret)
+ }
+
+ mulLine(ret, a, b, c)
+ r = newR
+
+ switch sixuPlus2NAF[i-1] {
+ case 1:
+ a, b, c, newR = lineFunctionAdd(r, aAffine, bAffine, r2)
+ case -1:
+ a, b, c, newR = lineFunctionAdd(r, minusA, bAffine, r2)
+ default:
+ continue
+ }
+
+ mulLine(ret, a, b, c)
+ r = newR
+ }
+
+ // In order to calculate Q1 we have to convert q from the sextic twist
+ // to the full GF(p^12) group, apply the Frobenius there, and convert
+ // back.
+ //
+ // The twist isomorphism is (x', y') -> (xω², yω³). If we consider just
+ // x for a moment, then after applying the Frobenius, we have x̄ω^(2p)
+ // where x̄ is the conjugate of x. If we are going to apply the inverse
+ // isomorphism we need a value with a single coefficient of ω² so we
+ // rewrite this as x̄ω^(2p-2)ω². ξ⁶ = ω and, due to the construction of
+ // p, 2p-2 is a multiple of six. Therefore we can rewrite as
+ // x̄ξ^((p-1)/3)ω² and applying the inverse isomorphism eliminates the
+ // ω².
+ //
+ // A similar argument can be made for the y value.
+
+ q1 := &twistPoint{}
+ q1.x.Conjugate(&aAffine.x).Mul(&q1.x, xiToPMinus1Over3)
+ q1.y.Conjugate(&aAffine.y).Mul(&q1.y, xiToPMinus1Over2)
+ q1.z.SetOne()
+ q1.t.SetOne()
+
+ // For Q2 we are applying the p² Frobenius. The two conjugations cancel
+ // out and we are left only with the factors from the isomorphism. In
+ // the case of x, we end up with a pure number which is why
+ // xiToPSquaredMinus1Over3 is ∈ GF(p). With y we get a factor of -1. We
+ // ignore this to end up with -Q2.
+
+ minusQ2 := &twistPoint{}
+ minusQ2.x.MulScalar(&aAffine.x, xiToPSquaredMinus1Over3)
+ minusQ2.y.Set(&aAffine.y)
+ minusQ2.z.SetOne()
+ minusQ2.t.SetOne()
+
+ r2.Square(&q1.y)
+ a, b, c, newR := lineFunctionAdd(r, q1, bAffine, r2)
+ mulLine(ret, a, b, c)
+ r = newR
+
+ r2.Square(&minusQ2.y)
+ a, b, c, newR = lineFunctionAdd(r, minusQ2, bAffine, r2)
+ mulLine(ret, a, b, c)
+ r = newR
+
+ return ret
+}
+
+// finalExponentiation computes the (p¹²-1)/Order-th power of an element of
+// GF(p¹²) to obtain an element of GT (steps 13-15 of algorithm 1 from
+// http://cryptojedi.org/papers/dclxvi-20100714.pdf)
+func finalExponentiation(in *gfP12) *gfP12 {
+ t1 := &gfP12{}
+
+ // This is the p^6-Frobenius
+ t1.x.Neg(&in.x)
+ t1.y.Set(&in.y)
+
+ inv := &gfP12{}
+ inv.Invert(in)
+ t1.Mul(t1, inv)
+
+ t2 := (&gfP12{}).FrobeniusP2(t1)
+ t1.Mul(t1, t2)
+
+ fp := (&gfP12{}).Frobenius(t1)
+ fp2 := (&gfP12{}).FrobeniusP2(t1)
+ fp3 := (&gfP12{}).Frobenius(fp2)
+
+ fu := (&gfP12{}).Exp(t1, u)
+ fu2 := (&gfP12{}).Exp(fu, u)
+ fu3 := (&gfP12{}).Exp(fu2, u)
+
+ y3 := (&gfP12{}).Frobenius(fu)
+ fu2p := (&gfP12{}).Frobenius(fu2)
+ fu3p := (&gfP12{}).Frobenius(fu3)
+ y2 := (&gfP12{}).FrobeniusP2(fu2)
+
+ y0 := &gfP12{}
+ y0.Mul(fp, fp2).Mul(y0, fp3)
+
+ y1 := (&gfP12{}).Conjugate(t1)
+ y5 := (&gfP12{}).Conjugate(fu2)
+ y3.Conjugate(y3)
+ y4 := (&gfP12{}).Mul(fu, fu2p)
+ y4.Conjugate(y4)
+
+ y6 := (&gfP12{}).Mul(fu3, fu3p)
+ y6.Conjugate(y6)
+
+ t0 := (&gfP12{}).Square(y6)
+ t0.Mul(t0, y4).Mul(t0, y5)
+ t1.Mul(y3, y5).Mul(t1, t0)
+ t0.Mul(t0, y2)
+ t1.Square(t1).Mul(t1, t0).Square(t1)
+ t0.Mul(t1, y1)
+ t1.Mul(t1, y0)
+ t0.Square(t0).Mul(t0, t1)
+
+ return t0
+}
+
+func optimalAte(a *twistPoint, b *curvePoint) *gfP12 {
+ e := miller(a, b)
+ ret := finalExponentiation(e)
+
+ if a.IsInfinity() || b.IsInfinity() {
+ ret.SetOne()
+ }
+ return ret
+}
diff --git a/crypto/bn256/cloudflare/twist.go b/crypto/bn256/cloudflare/twist.go
new file mode 100644
index 000000000..0c2f80d4e
--- /dev/null
+++ b/crypto/bn256/cloudflare/twist.go
@@ -0,0 +1,204 @@
+package bn256
+
+import (
+ "math/big"
+)
+
+// twistPoint implements the elliptic curve y²=x³+3/ξ over GF(p²). Points are
+// kept in Jacobian form and t=z² when valid. The group G₂ is the set of
+// n-torsion points of this curve over GF(p²) (where n = Order)
+type twistPoint struct {
+ x, y, z, t gfP2
+}
+
+var twistB = &gfP2{
+ gfP{0x38e7ecccd1dcff67, 0x65f0b37d93ce0d3e, 0xd749d0dd22ac00aa, 0x0141b9ce4a688d4d},
+ gfP{0x3bf938e377b802a8, 0x020b1b273633535d, 0x26b7edf049755260, 0x2514c6324384a86d},
+}
+
+// twistGen is the generator of group G₂.
+var twistGen = &twistPoint{
+ gfP2{
+ gfP{0xafb4737da84c6140, 0x6043dd5a5802d8c4, 0x09e950fc52a02f86, 0x14fef0833aea7b6b},
+ gfP{0x8e83b5d102bc2026, 0xdceb1935497b0172, 0xfbb8264797811adf, 0x19573841af96503b},
+ },
+ gfP2{
+ gfP{0x64095b56c71856ee, 0xdc57f922327d3cbb, 0x55f935be33351076, 0x0da4a0e693fd6482},
+ gfP{0x619dfa9d886be9f6, 0xfe7fd297f59e9b78, 0xff9e1a62231b7dfe, 0x28fd7eebae9e4206},
+ },
+ gfP2{*newGFp(0), *newGFp(1)},
+ gfP2{*newGFp(0), *newGFp(1)},
+}
+
+func (c *twistPoint) String() string {
+ c.MakeAffine()
+ x, y := gfP2Decode(&c.x), gfP2Decode(&c.y)
+ return "(" + x.String() + ", " + y.String() + ")"
+}
+
+func (c *twistPoint) Set(a *twistPoint) {
+ c.x.Set(&a.x)
+ c.y.Set(&a.y)
+ c.z.Set(&a.z)
+ c.t.Set(&a.t)
+}
+
+// IsOnCurve returns true iff c is on the curve.
+func (c *twistPoint) IsOnCurve() bool {
+ c.MakeAffine()
+ if c.IsInfinity() {
+ return true
+ }
+
+ y2, x3 := &gfP2{}, &gfP2{}
+ y2.Square(&c.y)
+ x3.Square(&c.x).Mul(x3, &c.x).Add(x3, twistB)
+
+ if *y2 != *x3 {
+ return false
+ }
+ cneg := &twistPoint{}
+ cneg.Mul(c, Order)
+ return cneg.z.IsZero()
+}
+
+func (c *twistPoint) SetInfinity() {
+ c.x.SetZero()
+ c.y.SetOne()
+ c.z.SetZero()
+ c.t.SetZero()
+}
+
+func (c *twistPoint) IsInfinity() bool {
+ return c.z.IsZero()
+}
+
+func (c *twistPoint) Add(a, b *twistPoint) {
+ // For additional comments, see the same function in curve.go.
+
+ if a.IsInfinity() {
+ c.Set(b)
+ return
+ }
+ if b.IsInfinity() {
+ c.Set(a)
+ return
+ }
+
+ // See http://hyperelliptic.org/EFD/g1p/auto-code/shortw/jacobian-0/addition/add-2007-bl.op3
+ z12 := (&gfP2{}).Square(&a.z)
+ z22 := (&gfP2{}).Square(&b.z)
+ u1 := (&gfP2{}).Mul(&a.x, z22)
+ u2 := (&gfP2{}).Mul(&b.x, z12)
+
+ t := (&gfP2{}).Mul(&b.z, z22)
+ s1 := (&gfP2{}).Mul(&a.y, t)
+
+ t.Mul(&a.z, z12)
+ s2 := (&gfP2{}).Mul(&b.y, t)
+
+ h := (&gfP2{}).Sub(u2, u1)
+ xEqual := h.IsZero()
+
+ t.Add(h, h)
+ i := (&gfP2{}).Square(t)
+ j := (&gfP2{}).Mul(h, i)
+
+ t.Sub(s2, s1)
+ yEqual := t.IsZero()
+ if xEqual && yEqual {
+ c.Double(a)
+ return
+ }
+ r := (&gfP2{}).Add(t, t)
+
+ v := (&gfP2{}).Mul(u1, i)
+
+ t4 := (&gfP2{}).Square(r)
+ t.Add(v, v)
+ t6 := (&gfP2{}).Sub(t4, j)
+ c.x.Sub(t6, t)
+
+ t.Sub(v, &c.x) // t7
+ t4.Mul(s1, j) // t8
+ t6.Add(t4, t4) // t9
+ t4.Mul(r, t) // t10
+ c.y.Sub(t4, t6)
+
+ t.Add(&a.z, &b.z) // t11
+ t4.Square(t) // t12
+ t.Sub(t4, z12) // t13
+ t4.Sub(t, z22) // t14
+ c.z.Mul(t4, h)
+}
+
+func (c *twistPoint) Double(a *twistPoint) {
+ // See http://hyperelliptic.org/EFD/g1p/auto-code/shortw/jacobian-0/doubling/dbl-2009-l.op3
+ A := (&gfP2{}).Square(&a.x)
+ B := (&gfP2{}).Square(&a.y)
+ C := (&gfP2{}).Square(B)
+
+ t := (&gfP2{}).Add(&a.x, B)
+ t2 := (&gfP2{}).Square(t)
+ t.Sub(t2, A)
+ t2.Sub(t, C)
+ d := (&gfP2{}).Add(t2, t2)
+ t.Add(A, A)
+ e := (&gfP2{}).Add(t, A)
+ f := (&gfP2{}).Square(e)
+
+ t.Add(d, d)
+ c.x.Sub(f, t)
+
+ t.Add(C, C)
+ t2.Add(t, t)
+ t.Add(t2, t2)
+ c.y.Sub(d, &c.x)
+ t2.Mul(e, &c.y)
+ c.y.Sub(t2, t)
+
+ t.Mul(&a.y, &a.z)
+ c.z.Add(t, t)
+}
+
+func (c *twistPoint) Mul(a *twistPoint, scalar *big.Int) {
+ sum, t := &twistPoint{}, &twistPoint{}
+
+ for i := scalar.BitLen(); i >= 0; i-- {
+ t.Double(sum)
+ if scalar.Bit(i) != 0 {
+ sum.Add(t, a)
+ } else {
+ sum.Set(t)
+ }
+ }
+
+ c.Set(sum)
+}
+
+func (c *twistPoint) MakeAffine() {
+ if c.z.IsOne() {
+ return
+ } else if c.z.IsZero() {
+ c.x.SetZero()
+ c.y.SetOne()
+ c.t.SetZero()
+ return
+ }
+
+ zInv := (&gfP2{}).Invert(&c.z)
+ t := (&gfP2{}).Mul(&c.y, zInv)
+ zInv2 := (&gfP2{}).Square(zInv)
+ c.y.Mul(t, zInv2)
+ t.Mul(&c.x, zInv2)
+ c.x.Set(t)
+ c.z.SetOne()
+ c.t.SetOne()
+}
+
+func (c *twistPoint) Neg(a *twistPoint) {
+ c.x.Set(&a.x)
+ c.y.Neg(&a.y)
+ c.z.Set(&a.z)
+ c.t.SetZero()
+}
diff --git a/crypto/bn256/bn256.go b/crypto/bn256/google/bn256.go
index 7144c31a8..5da83e033 100644
--- a/crypto/bn256/bn256.go
+++ b/crypto/bn256/google/bn256.go
@@ -18,6 +18,7 @@ package bn256
import (
"crypto/rand"
+ "errors"
"io"
"math/big"
)
@@ -115,21 +116,25 @@ func (n *G1) Marshal() []byte {
// Unmarshal sets e to the result of converting the output of Marshal back into
// a group element and then returns e.
-func (e *G1) Unmarshal(m []byte) (*G1, bool) {
+func (e *G1) Unmarshal(m []byte) ([]byte, error) {
// Each value is a 256-bit number.
const numBytes = 256 / 8
-
if len(m) != 2*numBytes {
- return nil, false
+ return nil, errors.New("bn256: not enough data")
}
-
+ // Unmarshal the points and check their caps
if e.p == nil {
e.p = newCurvePoint(nil)
}
-
e.p.x.SetBytes(m[0*numBytes : 1*numBytes])
+ if e.p.x.Cmp(P) >= 0 {
+ return nil, errors.New("bn256: coordinate exceeds modulus")
+ }
e.p.y.SetBytes(m[1*numBytes : 2*numBytes])
-
+ if e.p.y.Cmp(P) >= 0 {
+ return nil, errors.New("bn256: coordinate exceeds modulus")
+ }
+ // Ensure the point is on the curve
if e.p.x.Sign() == 0 && e.p.y.Sign() == 0 {
// This is the point at infinity.
e.p.y.SetInt64(1)
@@ -140,11 +145,10 @@ func (e *G1) Unmarshal(m []byte) (*G1, bool) {
e.p.t.SetInt64(1)
if !e.p.IsOnCurve() {
- return nil, false
+ return nil, errors.New("bn256: malformed point")
}
}
-
- return e, true
+ return m[2*numBytes:], nil
}
// G2 is an abstract cyclic group. The zero value is suitable for use as the
@@ -233,23 +237,33 @@ func (n *G2) Marshal() []byte {
// Unmarshal sets e to the result of converting the output of Marshal back into
// a group element and then returns e.
-func (e *G2) Unmarshal(m []byte) (*G2, bool) {
+func (e *G2) Unmarshal(m []byte) ([]byte, error) {
// Each value is a 256-bit number.
const numBytes = 256 / 8
-
if len(m) != 4*numBytes {
- return nil, false
+ return nil, errors.New("bn256: not enough data")
}
-
+ // Unmarshal the points and check their caps
if e.p == nil {
e.p = newTwistPoint(nil)
}
-
e.p.x.x.SetBytes(m[0*numBytes : 1*numBytes])
+ if e.p.x.x.Cmp(P) >= 0 {
+ return nil, errors.New("bn256: coordinate exceeds modulus")
+ }
e.p.x.y.SetBytes(m[1*numBytes : 2*numBytes])
+ if e.p.x.y.Cmp(P) >= 0 {
+ return nil, errors.New("bn256: coordinate exceeds modulus")
+ }
e.p.y.x.SetBytes(m[2*numBytes : 3*numBytes])
+ if e.p.y.x.Cmp(P) >= 0 {
+ return nil, errors.New("bn256: coordinate exceeds modulus")
+ }
e.p.y.y.SetBytes(m[3*numBytes : 4*numBytes])
-
+ if e.p.y.y.Cmp(P) >= 0 {
+ return nil, errors.New("bn256: coordinate exceeds modulus")
+ }
+ // Ensure the point is on the curve
if e.p.x.x.Sign() == 0 &&
e.p.x.y.Sign() == 0 &&
e.p.y.x.Sign() == 0 &&
@@ -263,11 +277,10 @@ func (e *G2) Unmarshal(m []byte) (*G2, bool) {
e.p.t.SetOne()
if !e.p.IsOnCurve() {
- return nil, false
+ return nil, errors.New("bn256: malformed point")
}
}
-
- return e, true
+ return m[4*numBytes:], nil
}
// GT is an abstract cyclic group. The zero value is suitable for use as the
diff --git a/crypto/bn256/bn256_test.go b/crypto/bn256/google/bn256_test.go
index 866065d0c..a4497ada9 100644
--- a/crypto/bn256/bn256_test.go
+++ b/crypto/bn256/google/bn256_test.go
@@ -219,15 +219,16 @@ func TestBilinearity(t *testing.T) {
func TestG1Marshal(t *testing.T) {
g := new(G1).ScalarBaseMult(new(big.Int).SetInt64(1))
form := g.Marshal()
- _, ok := new(G1).Unmarshal(form)
- if !ok {
+ _, err := new(G1).Unmarshal(form)
+ if err != nil {
t.Fatalf("failed to unmarshal")
}
g.ScalarBaseMult(Order)
form = g.Marshal()
- g2, ok := new(G1).Unmarshal(form)
- if !ok {
+
+ g2 := new(G1)
+ if _, err = g2.Unmarshal(form); err != nil {
t.Fatalf("failed to unmarshal ∞")
}
if !g2.p.IsInfinity() {
@@ -238,15 +239,15 @@ func TestG1Marshal(t *testing.T) {
func TestG2Marshal(t *testing.T) {
g := new(G2).ScalarBaseMult(new(big.Int).SetInt64(1))
form := g.Marshal()
- _, ok := new(G2).Unmarshal(form)
- if !ok {
+ _, err := new(G2).Unmarshal(form)
+ if err != nil {
t.Fatalf("failed to unmarshal")
}
g.ScalarBaseMult(Order)
form = g.Marshal()
- g2, ok := new(G2).Unmarshal(form)
- if !ok {
+ g2 := new(G2)
+ if _, err = g2.Unmarshal(form); err != nil {
t.Fatalf("failed to unmarshal ∞")
}
if !g2.p.IsInfinity() {
@@ -273,12 +274,18 @@ func TestTripartiteDiffieHellman(t *testing.T) {
b, _ := rand.Int(rand.Reader, Order)
c, _ := rand.Int(rand.Reader, Order)
- pa, _ := new(G1).Unmarshal(new(G1).ScalarBaseMult(a).Marshal())
- qa, _ := new(G2).Unmarshal(new(G2).ScalarBaseMult(a).Marshal())
- pb, _ := new(G1).Unmarshal(new(G1).ScalarBaseMult(b).Marshal())
- qb, _ := new(G2).Unmarshal(new(G2).ScalarBaseMult(b).Marshal())
- pc, _ := new(G1).Unmarshal(new(G1).ScalarBaseMult(c).Marshal())
- qc, _ := new(G2).Unmarshal(new(G2).ScalarBaseMult(c).Marshal())
+ pa := new(G1)
+ pa.Unmarshal(new(G1).ScalarBaseMult(a).Marshal())
+ qa := new(G2)
+ qa.Unmarshal(new(G2).ScalarBaseMult(a).Marshal())
+ pb := new(G1)
+ pb.Unmarshal(new(G1).ScalarBaseMult(b).Marshal())
+ qb := new(G2)
+ qb.Unmarshal(new(G2).ScalarBaseMult(b).Marshal())
+ pc := new(G1)
+ pc.Unmarshal(new(G1).ScalarBaseMult(c).Marshal())
+ qc := new(G2)
+ qc.Unmarshal(new(G2).ScalarBaseMult(c).Marshal())
k1 := Pair(pb, qc)
k1.ScalarMult(k1, a)
diff --git a/crypto/bn256/constants.go b/crypto/bn256/google/constants.go
index ab649d7f3..ab649d7f3 100644
--- a/crypto/bn256/constants.go
+++ b/crypto/bn256/google/constants.go
diff --git a/crypto/bn256/curve.go b/crypto/bn256/google/curve.go
index 3e679fdc7..3e679fdc7 100644
--- a/crypto/bn256/curve.go
+++ b/crypto/bn256/google/curve.go
diff --git a/crypto/bn256/example_test.go b/crypto/bn256/google/example_test.go
index b2d19807a..b2d19807a 100644
--- a/crypto/bn256/example_test.go
+++ b/crypto/bn256/google/example_test.go
diff --git a/crypto/bn256/gfp12.go b/crypto/bn256/google/gfp12.go
index f084eddf2..f084eddf2 100644
--- a/crypto/bn256/gfp12.go
+++ b/crypto/bn256/google/gfp12.go
diff --git a/crypto/bn256/gfp2.go b/crypto/bn256/google/gfp2.go
index 3981f6cb4..3981f6cb4 100644
--- a/crypto/bn256/gfp2.go
+++ b/crypto/bn256/google/gfp2.go
diff --git a/crypto/bn256/gfp6.go b/crypto/bn256/google/gfp6.go
index 218856617..218856617 100644
--- a/crypto/bn256/gfp6.go
+++ b/crypto/bn256/google/gfp6.go
diff --git a/crypto/bn256/main_test.go b/crypto/bn256/google/main_test.go
index 0230f1b19..0230f1b19 100644
--- a/crypto/bn256/main_test.go
+++ b/crypto/bn256/google/main_test.go
diff --git a/crypto/bn256/optate.go b/crypto/bn256/google/optate.go
index 9d6957062..9d6957062 100644
--- a/crypto/bn256/optate.go
+++ b/crypto/bn256/google/optate.go
diff --git a/crypto/bn256/twist.go b/crypto/bn256/google/twist.go
index 95b966e2e..1f5a4d9de 100644
--- a/crypto/bn256/twist.go
+++ b/crypto/bn256/google/twist.go
@@ -76,7 +76,13 @@ func (c *twistPoint) IsOnCurve() bool {
yy.Sub(yy, xxx)
yy.Sub(yy, twistB)
yy.Minimal()
- return yy.x.Sign() == 0 && yy.y.Sign() == 0
+
+ if yy.x.Sign() != 0 || yy.y.Sign() != 0 {
+ return false
+ }
+ cneg := newTwistPoint(pool)
+ cneg.Mul(c, Order, pool)
+ return cneg.z.IsZero()
}
func (c *twistPoint) SetInfinity() {
diff --git a/dashboard/README.md b/dashboard/README.md
index e010095ab..641c5f44b 100644
--- a/dashboard/README.md
+++ b/dashboard/README.md
@@ -12,28 +12,27 @@ The client's UI uses [React][React] with JSX syntax, which is validated by the [
As the dashboard depends on certain NPM packages (which are not included in the `go-ethereum` repo), these need to be installed first:
```
-$ (cd dashboard/assets && npm install)
-$ (cd dashboard/assets && ./node_modules/.bin/flow-typed install)
+$ (cd dashboard/assets && yarn install && yarn flow)
```
-Normally the dashboard assets are bundled into Geth via `go-bindata` to avoid external dependencies. Rebuilding Geth after each UI modification however is not feasible from a developer perspective. Instead, we can run `webpack` in watch mode to automatically rebundle the UI, and ask `geth` to use external assets to not rely on compiled resources:
+Normally the dashboard assets are bundled into Geth via `go-bindata` to avoid external dependencies. Rebuilding Geth after each UI modification however is not feasible from a developer perspective. Instead, we can run `yarn dev` to watch for file system changes and refresh the browser automatically.
```
-$ (cd dashboard/assets && ./node_modules/.bin/webpack --watch)
-$ geth --dashboard --dashboard.assets=dashboard/assets --vmodule=dashboard=5
+$ geth --dashboard --vmodule=dashboard=5
+$ (cd dashboard/assets && yarn dev)
```
To bundle up the final UI into Geth, run `go generate`:
```
-$ go generate ./dashboard
+$ (cd dashboard && go generate)
```
### Static type checking
Since JavaScript doesn't provide type safety, [Flow][Flow] is used to check types. These are only useful during development, so at the end of the process Babel will strip them.
-To take advantage of static type checking, your IDE needs to be prepared for it. In case of [Atom][Atom] a configuration guide can be found [here][Atom config]: Install the [Nuclide][Nuclide] package for Flow support, making sure it installs all of its support packages by enabling `Install Recommended Packages on Startup`, and set the path of the `flow-bin` which were installed previously by `npm`.
+To take advantage of static type checking, your IDE needs to be prepared for it. In case of [Atom][Atom] a configuration guide can be found [here][Atom config]: Install the [Nuclide][Nuclide] package for Flow support, making sure it installs all of its support packages by enabling `Install Recommended Packages on Startup`, and set the path of the `flow-bin` which were installed previously by `yarn`.
For more IDE support install the `linter-eslint` package too, which finds the `.eslintrc` file, and provides real-time linting. Atom warns, that these two packages are incompatible, but they seem to work well together. For third-party library errors and auto-completion [flow-typed][flow-typed] is used.
@@ -41,7 +40,7 @@ For more IDE support install the `linter-eslint` package too, which finds the `.
[Webpack][Webpack] offers handy tools for visualizing the bundle's dependency tree and space usage.
-* Generate the bundle's profile running `webpack --profile --json > stats.json`
+* Generate the bundle's profile running `yarn stats`
* For the _dependency tree_ go to [Webpack Analyze][WA], and import `stats.json`
* For the _space usage_ go to [Webpack Visualizer][WV], and import `stats.json`
diff --git a/dashboard/assets.go b/dashboard/assets.go
index 8337cf080..521d134a6 100644
--- a/dashboard/assets.go
+++ b/dashboard/assets.go
@@ -1,6 +1,6 @@
// Code generated by go-bindata. DO NOT EDIT.
// sources:
-// assets/dashboard.html
+// assets/index.html
// assets/bundle.js
package dashboard
@@ -48,7 +48,7 @@ func (fi bindataFileInfo) Sys() interface{} {
}
//nolint:misspell
-var _dashboardHtml = []byte(`<!DOCTYPE html>
+var _indexHtml = []byte(`<!DOCTYPE html>
<html lang="en" style="height: 100%">
<head>
<meta charset="UTF-8">
@@ -73,17 +73,17 @@ var _dashboardHtml = []byte(`<!DOCTYPE html>
</html>
`)
-func dashboardHtmlBytes() ([]byte, error) {
- return _dashboardHtml, nil
+func indexHtmlBytes() ([]byte, error) {
+ return _indexHtml, nil
}
-func dashboardHtml() (*asset, error) {
- bytes, err := dashboardHtmlBytes()
+func indexHtml() (*asset, error) {
+ bytes, err := indexHtmlBytes()
if err != nil {
return nil, err
}
- info := bindataFileInfo{name: "dashboard.html", size: 0, mode: os.FileMode(0), modTime: time.Unix(0, 0)}
+ info := bindataFileInfo{name: "index.html", size: 0, mode: os.FileMode(0), modTime: time.Unix(0, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x6b, 0xd9, 0xa6, 0xeb, 0x32, 0x49, 0x9b, 0xe5, 0x3a, 0xcb, 0x99, 0xd3, 0xb6, 0x69, 0x7f, 0xde, 0x35, 0x9d, 0x5, 0x96, 0x84, 0xc0, 0x14, 0xef, 0xbe, 0x58, 0x10, 0x5e, 0x40, 0xf2, 0x12, 0x97}}
return a, nil
}
@@ -116,11 +116,11 @@ var _bundleJs = []byte((((((((((`!function(modules) {
return __webpack_require__.d(getter, "a", getter), getter;
}, __webpack_require__.o = function(object, property) {
return Object.prototype.hasOwnProperty.call(object, property);
- }, __webpack_require__.p = "", __webpack_require__(__webpack_require__.s = 331);
+ }, __webpack_require__.p = "", __webpack_require__(__webpack_require__.s = 336);
}([ function(module, exports, __webpack_require__) {
"use strict";
(function(process) {
- "production" === process.env.NODE_ENV ? module.exports = __webpack_require__(332) : module.exports = __webpack_require__(333);
+ "production" === process.env.NODE_ENV ? module.exports = __webpack_require__(337) : module.exports = __webpack_require__(338);
}).call(exports, __webpack_require__(2));
}, function(module, exports, __webpack_require__) {
(function(process) {
@@ -128,8 +128,8 @@ var _bundleJs = []byte((((((((((`!function(modules) {
var REACT_ELEMENT_TYPE = "function" == typeof Symbol && Symbol.for && Symbol.for("react.element") || 60103, isValidElement = function(object) {
return "object" == typeof object && null !== object && object.$$typeof === REACT_ELEMENT_TYPE;
};
- module.exports = __webpack_require__(374)(isValidElement, !0);
- } else module.exports = __webpack_require__(375)();
+ module.exports = __webpack_require__(379)(isValidElement, !0);
+ } else module.exports = __webpack_require__(380)();
}).call(exports, __webpack_require__(2));
}, function(module, exports) {
function defaultSetTimout() {
@@ -289,7 +289,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}), __webpack_require__.d(__webpack_exports__, "o", function() {
return parseChildIndex;
});
- var __WEBPACK_IMPORTED_MODULE_0_lodash_isNil__ = __webpack_require__(20), __WEBPACK_IMPORTED_MODULE_0_lodash_isNil___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_lodash_isNil__), __WEBPACK_IMPORTED_MODULE_1_lodash_isString__ = __webpack_require__(164), __WEBPACK_IMPORTED_MODULE_1_lodash_isString___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_1_lodash_isString__), __WEBPACK_IMPORTED_MODULE_2_lodash_isObject__ = __webpack_require__(31), __WEBPACK_IMPORTED_MODULE_2_lodash_isObject___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_2_lodash_isObject__), __WEBPACK_IMPORTED_MODULE_3_lodash_isFunction__ = __webpack_require__(8), __WEBPACK_IMPORTED_MODULE_3_lodash_isFunction___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_3_lodash_isFunction__), __WEBPACK_IMPORTED_MODULE_4_lodash_isArray__ = __webpack_require__(12), __WEBPACK_IMPORTED_MODULE_4_lodash_isArray___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_4_lodash_isArray__), __WEBPACK_IMPORTED_MODULE_5_react__ = __webpack_require__(0), __WEBPACK_IMPORTED_MODULE_5_react___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_5_react__), __WEBPACK_IMPORTED_MODULE_6_prop_types__ = __webpack_require__(1), __WEBPACK_IMPORTED_MODULE_6_prop_types___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_6_prop_types__), __WEBPACK_IMPORTED_MODULE_7__DataUtils__ = __webpack_require__(9), __WEBPACK_IMPORTED_MODULE_8__PureRender__ = __webpack_require__(5), PRESENTATION_ATTRIBUTES = {
+ var __WEBPACK_IMPORTED_MODULE_0_lodash_isNil__ = __webpack_require__(20), __WEBPACK_IMPORTED_MODULE_0_lodash_isNil___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_lodash_isNil__), __WEBPACK_IMPORTED_MODULE_1_lodash_isString__ = __webpack_require__(163), __WEBPACK_IMPORTED_MODULE_1_lodash_isString___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_1_lodash_isString__), __WEBPACK_IMPORTED_MODULE_2_lodash_isObject__ = __webpack_require__(31), __WEBPACK_IMPORTED_MODULE_2_lodash_isObject___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_2_lodash_isObject__), __WEBPACK_IMPORTED_MODULE_3_lodash_isFunction__ = __webpack_require__(8), __WEBPACK_IMPORTED_MODULE_3_lodash_isFunction___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_3_lodash_isFunction__), __WEBPACK_IMPORTED_MODULE_4_lodash_isArray__ = __webpack_require__(11), __WEBPACK_IMPORTED_MODULE_4_lodash_isArray___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_4_lodash_isArray__), __WEBPACK_IMPORTED_MODULE_5_react__ = __webpack_require__(0), __WEBPACK_IMPORTED_MODULE_5_react___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_5_react__), __WEBPACK_IMPORTED_MODULE_6_prop_types__ = __webpack_require__(1), __WEBPACK_IMPORTED_MODULE_6_prop_types___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_6_prop_types__), __WEBPACK_IMPORTED_MODULE_7__DataUtils__ = __webpack_require__(9), __WEBPACK_IMPORTED_MODULE_8__PureRender__ = __webpack_require__(5), PRESENTATION_ATTRIBUTES = {
alignmentBaseline: __WEBPACK_IMPORTED_MODULE_6_prop_types___default.a.string,
angle: __WEBPACK_IMPORTED_MODULE_6_prop_types___default.a.number,
baselineShift: __WEBPACK_IMPORTED_MODULE_6_prop_types___default.a.string,
@@ -434,7 +434,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}, validateWidthHeight = function(el) {
if (!el || !el.props) return !1;
var _el$props = el.props, width = _el$props.width, height = _el$props.height;
- return !(!Object(__WEBPACK_IMPORTED_MODULE_7__DataUtils__.g)(width) || width <= 0 || !Object(__WEBPACK_IMPORTED_MODULE_7__DataUtils__.g)(height) || height <= 0);
+ return !(!Object(__WEBPACK_IMPORTED_MODULE_7__DataUtils__.h)(width) || width <= 0 || !Object(__WEBPACK_IMPORTED_MODULE_7__DataUtils__.h)(height) || height <= 0);
}, isSsr = function() {
return !("undefined" != typeof window && window.document && window.document.createElement && window.setTimeout);
}, SVG_TAGS = [ "a", "altGlyph", "altGlyphDef", "altGlyphItem", "animate", "animateColor", "animateMotion", "animateTransform", "circle", "clipPath", "color-profile", "cursor", "defs", "desc", "ellipse", "feBlend", "feColormatrix", "feComponentTransfer", "feComposite", "feConvolveMatrix", "feDiffuseLighting", "feDisplacementMap", "feDistantLight", "feFlood", "feFuncA", "feFuncB", "feFuncG", "feFuncR", "feGaussianBlur", "feImage", "feMerge", "feMergeNode", "feMorphology", "feOffset", "fePointLight", "feSpecularLighting", "feSpotLight", "feTile", "feTurbulence", "filter", "font", "font-face", "font-face-format", "font-face-name", "font-face-url", "foreignObject", "g", "glyph", "glyphRef", "hkern", "image", "line", "lineGradient", "marker", "mask", "metadata", "missing-glyph", "mpath", "path", "pattern", "polygon", "polyline", "radialGradient", "rect", "script", "set", "stop", "style", "svg", "switch", "symbol", "text", "textPath", "title", "tref", "tspan", "use", "view", "vkern" ], isSvgElement = function(child) {
@@ -501,15 +501,8 @@ var _bundleJs = []byte((((((((((`!function(modules) {
__webpack_exports__.b = shallowEqual, __webpack_exports__.a = pureRenderDecorator;
}, function(module, exports, __webpack_require__) {
"use strict";
- exports.__esModule = !0, exports.default = function(obj, keys) {
- var target = {};
- for (var i in obj) keys.indexOf(i) >= 0 || Object.prototype.hasOwnProperty.call(obj, i) && (target[i] = obj[i]);
- return target;
- };
-}, function(module, exports, __webpack_require__) {
- "use strict";
exports.__esModule = !0;
- var _assign = __webpack_require__(205), _assign2 = function(obj) {
+ var _assign = __webpack_require__(204), _assign2 = function(obj) {
return obj && obj.__esModule ? obj : {
default: obj
};
@@ -522,37 +515,46 @@ var _bundleJs = []byte((((((((((`!function(modules) {
return target;
};
}, function(module, exports, __webpack_require__) {
+ "use strict";
+ exports.__esModule = !0, exports.default = function(obj, keys) {
+ var target = {};
+ for (var i in obj) keys.indexOf(i) >= 0 || Object.prototype.hasOwnProperty.call(obj, i) && (target[i] = obj[i]);
+ return target;
+ };
+}, function(module, exports, __webpack_require__) {
function isFunction(value) {
if (!isObject(value)) return !1;
var tag = baseGetTag(value);
return tag == funcTag || tag == genTag || tag == asyncTag || tag == proxyTag;
}
- var baseGetTag = __webpack_require__(42), isObject = __webpack_require__(31), asyncTag = "[object AsyncFunction]", funcTag = "[object Function]", genTag = "[object GeneratorFunction]", proxyTag = "[object Proxy]";
+ var baseGetTag = __webpack_require__(41), isObject = __webpack_require__(31), asyncTag = "[object AsyncFunction]", funcTag = "[object Function]", genTag = "[object GeneratorFunction]", proxyTag = "[object Proxy]";
module.exports = isFunction;
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
- __webpack_require__.d(__webpack_exports__, "i", function() {
+ __webpack_require__.d(__webpack_exports__, "j", function() {
return mathSign;
- }), __webpack_require__.d(__webpack_exports__, "h", function() {
+ }), __webpack_require__.d(__webpack_exports__, "i", function() {
return isPercent;
- }), __webpack_require__.d(__webpack_exports__, "g", function() {
+ }), __webpack_require__.d(__webpack_exports__, "h", function() {
return isNumber;
- }), __webpack_require__.d(__webpack_exports__, "f", function() {
+ }), __webpack_require__.d(__webpack_exports__, "g", function() {
return isNumOrStr;
- }), __webpack_require__.d(__webpack_exports__, "j", function() {
+ }), __webpack_require__.d(__webpack_exports__, "k", function() {
return uniqueId;
- }), __webpack_require__.d(__webpack_exports__, "c", function() {
+ }), __webpack_require__.d(__webpack_exports__, "d", function() {
return getPercentValue;
}), __webpack_require__.d(__webpack_exports__, "b", function() {
return getAnyElementOfObject;
- }), __webpack_require__.d(__webpack_exports__, "d", function() {
- return hasDuplicate;
}), __webpack_require__.d(__webpack_exports__, "e", function() {
+ return hasDuplicate;
+ }), __webpack_require__.d(__webpack_exports__, "f", function() {
return interpolateNumber;
}), __webpack_require__.d(__webpack_exports__, "a", function() {
return findEntryInArray;
+ }), __webpack_require__.d(__webpack_exports__, "c", function() {
+ return getLinearRegression;
});
- var __WEBPACK_IMPORTED_MODULE_0_lodash_get__ = __webpack_require__(165), __WEBPACK_IMPORTED_MODULE_0_lodash_get___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_lodash_get__), __WEBPACK_IMPORTED_MODULE_1_lodash_isArray__ = __webpack_require__(12), __WEBPACK_IMPORTED_MODULE_1_lodash_isArray___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_1_lodash_isArray__), __WEBPACK_IMPORTED_MODULE_2_lodash_isNaN__ = __webpack_require__(117), __WEBPACK_IMPORTED_MODULE_2_lodash_isNaN___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_2_lodash_isNaN__), __WEBPACK_IMPORTED_MODULE_3_lodash_isNumber__ = __webpack_require__(170), __WEBPACK_IMPORTED_MODULE_3_lodash_isNumber___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_3_lodash_isNumber__), __WEBPACK_IMPORTED_MODULE_4_lodash_isString__ = __webpack_require__(164), __WEBPACK_IMPORTED_MODULE_4_lodash_isString___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_4_lodash_isString__), mathSign = function(value) {
+ var __WEBPACK_IMPORTED_MODULE_0_lodash_get__ = __webpack_require__(164), __WEBPACK_IMPORTED_MODULE_0_lodash_get___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_lodash_get__), __WEBPACK_IMPORTED_MODULE_1_lodash_isArray__ = __webpack_require__(11), __WEBPACK_IMPORTED_MODULE_1_lodash_isArray___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_1_lodash_isArray__), __WEBPACK_IMPORTED_MODULE_2_lodash_isNaN__ = __webpack_require__(116), __WEBPACK_IMPORTED_MODULE_2_lodash_isNaN___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_2_lodash_isNaN__), __WEBPACK_IMPORTED_MODULE_3_lodash_isNumber__ = __webpack_require__(169), __WEBPACK_IMPORTED_MODULE_3_lodash_isNumber___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_3_lodash_isNumber__), __WEBPACK_IMPORTED_MODULE_4_lodash_isString__ = __webpack_require__(163), __WEBPACK_IMPORTED_MODULE_4_lodash_isString___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_4_lodash_isString__), mathSign = function(value) {
return 0 === value ? 0 : value > 0 ? 1 : -1;
}, isPercent = function(value) {
return __WEBPACK_IMPORTED_MODULE_4_lodash_isString___default()(value) && value.indexOf("%") === value.length - 1;
@@ -594,6 +596,18 @@ var _bundleJs = []byte((((((((((`!function(modules) {
return ary && ary.length ? ary.find(function(entry) {
return entry && __WEBPACK_IMPORTED_MODULE_0_lodash_get___default()(entry, specifiedKey) === specifiedValue;
}) : null;
+ }, getLinearRegression = function(data) {
+ if (!data || !data.length) return null;
+ for (var len = data.length, xsum = 0, ysum = 0, xysum = 0, xxsum = 0, xmin = 1 / 0, xmax = -1 / 0, i = 0; i < len; i++) xsum += data[i].cx,
+ ysum += data[i].cy, xysum += data[i].cx * data[i].cy, xxsum += data[i].cx * data[i].cx,
+ xmin = Math.min(xmin, data[i].cx), xmax = Math.max(xmax, data[i].cx);
+ var a = len * xxsum != xsum * xsum ? (len * xysum - xsum * ysum) / (len * xxsum - xsum * xsum) : 0;
+ return {
+ xmin: xmin,
+ xmax: xmax,
+ a: a,
+ b: (ysum - a * xsum) / len
+ };
};
}, function(module, exports, __webpack_require__) {
"use strict";
@@ -609,12 +623,12 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
}), exports.sheetsManager = void 0;
- var _keys = __webpack_require__(41), _keys2 = _interopRequireDefault(_keys), _extends2 = __webpack_require__(7), _extends3 = _interopRequireDefault(_extends2), _getPrototypeOf = __webpack_require__(26), _getPrototypeOf2 = _interopRequireDefault(_getPrototypeOf), _classCallCheck2 = __webpack_require__(27), _classCallCheck3 = _interopRequireDefault(_classCallCheck2), _createClass2 = __webpack_require__(28), _createClass3 = _interopRequireDefault(_createClass2), _possibleConstructorReturn2 = __webpack_require__(29), _possibleConstructorReturn3 = _interopRequireDefault(_possibleConstructorReturn2), _inherits2 = __webpack_require__(30), _inherits3 = _interopRequireDefault(_inherits2), _objectWithoutProperties2 = __webpack_require__(6), _objectWithoutProperties3 = _interopRequireDefault(_objectWithoutProperties2), _map = __webpack_require__(397), _map2 = _interopRequireDefault(_map), _minSafeInteger = __webpack_require__(413), _minSafeInteger2 = _interopRequireDefault(_minSafeInteger), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _propTypes = __webpack_require__(1), _propTypes2 = _interopRequireDefault(_propTypes), _warning = __webpack_require__(11), _warning2 = _interopRequireDefault(_warning), _hoistNonReactStatics = __webpack_require__(152), _hoistNonReactStatics2 = _interopRequireDefault(_hoistNonReactStatics), _getDisplayName = __webpack_require__(226), _getDisplayName2 = _interopRequireDefault(_getDisplayName), _wrapDisplayName = __webpack_require__(75), _wrapDisplayName2 = _interopRequireDefault(_wrapDisplayName), _contextTypes = __webpack_require__(416), _contextTypes2 = _interopRequireDefault(_contextTypes), _jss = __webpack_require__(228), _ns = __webpack_require__(227), ns = function(obj) {
+ var _keys = __webpack_require__(50), _keys2 = _interopRequireDefault(_keys), _extends2 = __webpack_require__(6), _extends3 = _interopRequireDefault(_extends2), _getPrototypeOf = __webpack_require__(26), _getPrototypeOf2 = _interopRequireDefault(_getPrototypeOf), _classCallCheck2 = __webpack_require__(27), _classCallCheck3 = _interopRequireDefault(_classCallCheck2), _createClass2 = __webpack_require__(28), _createClass3 = _interopRequireDefault(_createClass2), _possibleConstructorReturn2 = __webpack_require__(29), _possibleConstructorReturn3 = _interopRequireDefault(_possibleConstructorReturn2), _inherits2 = __webpack_require__(30), _inherits3 = _interopRequireDefault(_inherits2), _objectWithoutProperties2 = __webpack_require__(7), _objectWithoutProperties3 = _interopRequireDefault(_objectWithoutProperties2), _map = __webpack_require__(401), _map2 = _interopRequireDefault(_map), _minSafeInteger = __webpack_require__(417), _minSafeInteger2 = _interopRequireDefault(_minSafeInteger), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _propTypes = __webpack_require__(1), _propTypes2 = _interopRequireDefault(_propTypes), _warning = __webpack_require__(12), _warning2 = _interopRequireDefault(_warning), _hoistNonReactStatics = __webpack_require__(151), _hoistNonReactStatics2 = _interopRequireDefault(_hoistNonReactStatics), _getDisplayName = __webpack_require__(226), _getDisplayName2 = _interopRequireDefault(_getDisplayName), _wrapDisplayName = __webpack_require__(75), _wrapDisplayName2 = _interopRequireDefault(_wrapDisplayName), _contextTypes = __webpack_require__(420), _contextTypes2 = _interopRequireDefault(_contextTypes), _jss = __webpack_require__(228), _ns = __webpack_require__(227), ns = function(obj) {
if (obj && obj.__esModule) return obj;
var newObj = {};
if (null != obj) for (var key in obj) Object.prototype.hasOwnProperty.call(obj, key) && (newObj[key] = obj[key]);
return newObj.default = obj, newObj;
- }(_ns), _jssPreset = __webpack_require__(439), _jssPreset2 = _interopRequireDefault(_jssPreset), _createMuiTheme = __webpack_require__(151), _createMuiTheme2 = _interopRequireDefault(_createMuiTheme), _themeListener = __webpack_require__(150), _themeListener2 = _interopRequireDefault(_themeListener), _createGenerateClassName = __webpack_require__(451), _createGenerateClassName2 = _interopRequireDefault(_createGenerateClassName), _getStylesCreator = __webpack_require__(452), _getStylesCreator2 = _interopRequireDefault(_getStylesCreator), jss = (0,
+ }(_ns), _jssPreset = __webpack_require__(442), _jssPreset2 = _interopRequireDefault(_jssPreset), _createMuiTheme = __webpack_require__(150), _createMuiTheme2 = _interopRequireDefault(_createMuiTheme), _themeListener = __webpack_require__(149), _themeListener2 = _interopRequireDefault(_themeListener), _createGenerateClassName = __webpack_require__(455), _createGenerateClassName2 = _interopRequireDefault(_createGenerateClassName), _getStylesCreator = __webpack_require__(456), _getStylesCreator2 = _interopRequireDefault(_getStylesCreator), jss = (0,
_jss.create)((0, _jssPreset2.default)()), generateClassName = (0, _createGenerateClassName2.default)(), indexCounter = _minSafeInteger2.default, sheetsManager = exports.sheetsManager = new _map2.default(), noopTheme = {}, defaultTheme = void 0, withStyles = function(stylesOrCreator) {
var options = arguments.length > 1 && void 0 !== arguments[1] ? arguments[1] : {};
return function(Component) {
@@ -747,6 +761,9 @@ var _bundleJs = []byte((((((((((`!function(modules) {
};
exports.default = withStyles;
}).call(exports, __webpack_require__(2));
+}, function(module, exports) {
+ var isArray = Array.isArray;
+ module.exports = isArray;
}, function(module, exports, __webpack_require__) {
"use strict";
(function(process) {
@@ -768,13 +785,10 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}
}), module.exports = warning;
}).call(exports, __webpack_require__(2));
-}, function(module, exports) {
- var isArray = Array.isArray;
- module.exports = isArray;
}, function(module, exports, __webpack_require__) {
"use strict";
exports.__esModule = !0;
- var _defineProperty = __webpack_require__(143), _defineProperty2 = function(obj) {
+ var _defineProperty = __webpack_require__(142), _defineProperty2 = function(obj) {
return obj && obj.__esModule ? obj : {
default: obj
};
@@ -812,7 +826,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
};
Layer.propTypes = propTypes, __webpack_exports__.a = Layer;
}, function(module, exports, __webpack_require__) {
- var global = __webpack_require__(158), core = __webpack_require__(159), hide = __webpack_require__(244), redefine = __webpack_require__(534), ctx = __webpack_require__(537), $export = function(type, name, source) {
+ var global = __webpack_require__(157), core = __webpack_require__(158), hide = __webpack_require__(245), redefine = __webpack_require__(536), ctx = __webpack_require__(539), $export = function(type, name, source) {
var key, own, out, exp, IS_FORCED = type & $export.F, IS_GLOBAL = type & $export.G, IS_STATIC = type & $export.S, IS_PROTO = type & $export.P, IS_BIND = type & $export.B, target = IS_GLOBAL ? global : IS_STATIC ? global[name] || (global[name] = {}) : (global[name] || {}).prototype, exports = IS_GLOBAL ? core : core[name] || (core[name] = {}), expProto = exports.prototype || (exports.prototype = {});
IS_GLOBAL && (source = name);
for (key in source) own = !IS_FORCED && target && void 0 !== target[key], out = (own ? target : source)[key],
@@ -900,37 +914,36 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}), __webpack_require__.d(__webpack_exports__, "y", function() {
return parseDomainOfCategoryAxis;
});
- var __WEBPACK_IMPORTED_MODULE_0_lodash_isEqual__ = __webpack_require__(34), __WEBPACK_IMPORTED_MODULE_0_lodash_isEqual___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_lodash_isEqual__), __WEBPACK_IMPORTED_MODULE_1_lodash_sortBy__ = __webpack_require__(281), __WEBPACK_IMPORTED_MODULE_1_lodash_sortBy___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_1_lodash_sortBy__), __WEBPACK_IMPORTED_MODULE_2_lodash_isNaN__ = __webpack_require__(117), __WEBPACK_IMPORTED_MODULE_2_lodash_isNaN___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_2_lodash_isNaN__), __WEBPACK_IMPORTED_MODULE_3_lodash_isString__ = __webpack_require__(164), __WEBPACK_IMPORTED_MODULE_3_lodash_isString___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_3_lodash_isString__), __WEBPACK_IMPORTED_MODULE_4_lodash_max__ = __webpack_require__(702), __WEBPACK_IMPORTED_MODULE_4_lodash_max___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_4_lodash_max__), __WEBPACK_IMPORTED_MODULE_5_lodash_min__ = __webpack_require__(284), __WEBPACK_IMPORTED_MODULE_5_lodash_min___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_5_lodash_min__), __WEBPACK_IMPORTED_MODULE_6_lodash_isArray__ = __webpack_require__(12), __WEBPACK_IMPORTED_MODULE_6_lodash_isArray___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_6_lodash_isArray__), __WEBPACK_IMPORTED_MODULE_7_lodash_isFunction__ = __webpack_require__(8), __WEBPACK_IMPORTED_MODULE_7_lodash_isFunction___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_7_lodash_isFunction__), __WEBPACK_IMPORTED_MODULE_8_lodash_get__ = __webpack_require__(165), __WEBPACK_IMPORTED_MODULE_8_lodash_get___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_8_lodash_get__), __WEBPACK_IMPORTED_MODULE_9_lodash_isNil__ = __webpack_require__(20), __WEBPACK_IMPORTED_MODULE_9_lodash_isNil___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_9_lodash_isNil__), __WEBPACK_IMPORTED_MODULE_10_recharts_scale__ = __webpack_require__(703), __WEBPACK_IMPORTED_MODULE_11_d3_scale__ = (__webpack_require__.n(__WEBPACK_IMPORTED_MODULE_10_recharts_scale__),
- __webpack_require__(287)), __WEBPACK_IMPORTED_MODULE_12_d3_shape__ = __webpack_require__(173), __WEBPACK_IMPORTED_MODULE_13__DataUtils__ = __webpack_require__(9), __WEBPACK_IMPORTED_MODULE_14__cartesian_ReferenceDot__ = __webpack_require__(320), __WEBPACK_IMPORTED_MODULE_15__cartesian_ReferenceLine__ = __webpack_require__(321), __WEBPACK_IMPORTED_MODULE_16__cartesian_ReferenceArea__ = __webpack_require__(322), __WEBPACK_IMPORTED_MODULE_17__cartesian_ErrorBar__ = __webpack_require__(91), __WEBPACK_IMPORTED_MODULE_18__component_Legend__ = __webpack_require__(171), __WEBPACK_IMPORTED_MODULE_19__ReactUtils__ = __webpack_require__(4), _extends = Object.assign || function(target) {
+ var __WEBPACK_IMPORTED_MODULE_0_lodash_isEqual__ = __webpack_require__(34), __WEBPACK_IMPORTED_MODULE_0_lodash_isEqual___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_lodash_isEqual__), __WEBPACK_IMPORTED_MODULE_1_lodash_sortBy__ = __webpack_require__(284), __WEBPACK_IMPORTED_MODULE_1_lodash_sortBy___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_1_lodash_sortBy__), __WEBPACK_IMPORTED_MODULE_2_lodash_isNaN__ = __webpack_require__(116), __WEBPACK_IMPORTED_MODULE_2_lodash_isNaN___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_2_lodash_isNaN__), __WEBPACK_IMPORTED_MODULE_3_lodash_isString__ = __webpack_require__(163), __WEBPACK_IMPORTED_MODULE_3_lodash_isString___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_3_lodash_isString__), __WEBPACK_IMPORTED_MODULE_4_lodash_max__ = __webpack_require__(700), __WEBPACK_IMPORTED_MODULE_4_lodash_max___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_4_lodash_max__), __WEBPACK_IMPORTED_MODULE_5_lodash_min__ = __webpack_require__(289), __WEBPACK_IMPORTED_MODULE_5_lodash_min___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_5_lodash_min__), __WEBPACK_IMPORTED_MODULE_6_lodash_isArray__ = __webpack_require__(11), __WEBPACK_IMPORTED_MODULE_6_lodash_isArray___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_6_lodash_isArray__), __WEBPACK_IMPORTED_MODULE_7_lodash_flatMap__ = __webpack_require__(701), __WEBPACK_IMPORTED_MODULE_7_lodash_flatMap___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_7_lodash_flatMap__), __WEBPACK_IMPORTED_MODULE_8_lodash_isFunction__ = __webpack_require__(8), __WEBPACK_IMPORTED_MODULE_8_lodash_isFunction___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_8_lodash_isFunction__), __WEBPACK_IMPORTED_MODULE_9_lodash_get__ = __webpack_require__(164), __WEBPACK_IMPORTED_MODULE_9_lodash_get___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_9_lodash_get__), __WEBPACK_IMPORTED_MODULE_10_lodash_isNil__ = __webpack_require__(20), __WEBPACK_IMPORTED_MODULE_10_lodash_isNil___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_10_lodash_isNil__), __WEBPACK_IMPORTED_MODULE_11_recharts_scale__ = __webpack_require__(703), __WEBPACK_IMPORTED_MODULE_12_d3_scale__ = (__webpack_require__.n(__WEBPACK_IMPORTED_MODULE_11_recharts_scale__),
+ __webpack_require__(292)), __WEBPACK_IMPORTED_MODULE_13_d3_shape__ = __webpack_require__(172), __WEBPACK_IMPORTED_MODULE_14__DataUtils__ = __webpack_require__(9), __WEBPACK_IMPORTED_MODULE_15__cartesian_ReferenceDot__ = __webpack_require__(325), __WEBPACK_IMPORTED_MODULE_16__cartesian_ReferenceLine__ = __webpack_require__(326), __WEBPACK_IMPORTED_MODULE_17__cartesian_ReferenceArea__ = __webpack_require__(327), __WEBPACK_IMPORTED_MODULE_18__cartesian_ErrorBar__ = __webpack_require__(92), __WEBPACK_IMPORTED_MODULE_19__component_Legend__ = __webpack_require__(170), __WEBPACK_IMPORTED_MODULE_20__ReactUtils__ = __webpack_require__(4), _extends = Object.assign || function(target) {
for (var i = 1; i < arguments.length; i++) {
var source = arguments[i];
for (var key in source) Object.prototype.hasOwnProperty.call(source, key) && (target[key] = source[key]);
}
return target;
}, getValueByDataKey = function(obj, dataKey, defaultValue) {
- return __WEBPACK_IMPORTED_MODULE_9_lodash_isNil___default()(obj) || __WEBPACK_IMPORTED_MODULE_9_lodash_isNil___default()(dataKey) ? defaultValue : Object(__WEBPACK_IMPORTED_MODULE_13__DataUtils__.f)(dataKey) ? __WEBPACK_IMPORTED_MODULE_8_lodash_get___default()(obj, dataKey, defaultValue) : __WEBPACK_IMPORTED_MODULE_7_lodash_isFunction___default()(dataKey) ? dataKey(obj) : defaultValue;
+ return __WEBPACK_IMPORTED_MODULE_10_lodash_isNil___default()(obj) || __WEBPACK_IMPORTED_MODULE_10_lodash_isNil___default()(dataKey) ? defaultValue : Object(__WEBPACK_IMPORTED_MODULE_14__DataUtils__.g)(dataKey) ? __WEBPACK_IMPORTED_MODULE_9_lodash_get___default()(obj, dataKey, defaultValue) : __WEBPACK_IMPORTED_MODULE_8_lodash_isFunction___default()(dataKey) ? dataKey(obj) : defaultValue;
}, getDomainOfDataByKey = function(data, key, type, filterNil) {
- var flattenData = data.reduce(function(result, entry) {
- var value = getValueByDataKey(entry, key);
- return __WEBPACK_IMPORTED_MODULE_6_lodash_isArray___default()(value) ? [].concat(_toConsumableArray(result), _toConsumableArray(value)) : [].concat(_toConsumableArray(result), [ value ]);
- }, []);
+ var flattenData = __WEBPACK_IMPORTED_MODULE_7_lodash_flatMap___default()(data, function(entry) {
+ return getValueByDataKey(entry, key);
+ });
if ("number" === type) {
- var domain = flattenData.filter(__WEBPACK_IMPORTED_MODULE_13__DataUtils__.g);
+ var domain = flattenData.filter(__WEBPACK_IMPORTED_MODULE_14__DataUtils__.h);
return [ Math.min.apply(null, domain), Math.max.apply(null, domain) ];
}
return (filterNil ? flattenData.filter(function(entry) {
- return !__WEBPACK_IMPORTED_MODULE_9_lodash_isNil___default()(entry);
+ return !__WEBPACK_IMPORTED_MODULE_10_lodash_isNil___default()(entry);
}) : flattenData).map(function(entry) {
- return Object(__WEBPACK_IMPORTED_MODULE_13__DataUtils__.f)(entry) ? entry : "";
+ return Object(__WEBPACK_IMPORTED_MODULE_14__DataUtils__.g)(entry) ? entry : "";
});
}, calculateActiveTickIndex = function(coordinate, ticks, unsortedTicks, axis) {
var index = -1, len = ticks.length;
if (len > 1) {
if (axis && "angleAxis" === axis.axisType && Math.abs(Math.abs(axis.range[1] - axis.range[0]) - 360) <= 1e-6) for (var range = axis.range, i = 0; i < len; i++) {
var before = i > 0 ? unsortedTicks[i - 1].coordinate : unsortedTicks[len - 1].coordinate, cur = unsortedTicks[i].coordinate, after = i >= len - 1 ? unsortedTicks[0].coordinate : unsortedTicks[i + 1].coordinate, sameDirectionCoord = void 0;
- if (Object(__WEBPACK_IMPORTED_MODULE_13__DataUtils__.i)(cur - before) !== Object(__WEBPACK_IMPORTED_MODULE_13__DataUtils__.i)(after - cur)) {
+ if (Object(__WEBPACK_IMPORTED_MODULE_14__DataUtils__.j)(cur - before) !== Object(__WEBPACK_IMPORTED_MODULE_14__DataUtils__.j)(after - cur)) {
var diffInterval = [];
- if (Object(__WEBPACK_IMPORTED_MODULE_13__DataUtils__.i)(after - cur) === Object(__WEBPACK_IMPORTED_MODULE_13__DataUtils__.i)(range[1] - range[0])) {
+ if (Object(__WEBPACK_IMPORTED_MODULE_14__DataUtils__.j)(after - cur) === Object(__WEBPACK_IMPORTED_MODULE_14__DataUtils__.j)(range[1] - range[0])) {
sameDirectionCoord = after;
var curInRange = cur + range[1] - range[0];
diffInterval[0] = Math.min(curInRange, (curInRange + before) / 2), diffInterval[1] = Math.max(curInRange, (curInRange + before) / 2);
@@ -971,7 +984,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}
return result;
}, getLegendProps = function(_ref) {
- var children = _ref.children, formatedGraphicalItems = _ref.formatedGraphicalItems, legendWidth = _ref.legendWidth, legendContent = _ref.legendContent, legendItem = Object(__WEBPACK_IMPORTED_MODULE_19__ReactUtils__.i)(children, __WEBPACK_IMPORTED_MODULE_18__component_Legend__.a);
+ var children = _ref.children, formatedGraphicalItems = _ref.formatedGraphicalItems, legendWidth = _ref.legendWidth, legendContent = _ref.legendContent, legendItem = Object(__WEBPACK_IMPORTED_MODULE_20__ReactUtils__.i)(children, __WEBPACK_IMPORTED_MODULE_19__component_Legend__.a);
if (!legendItem) return null;
var legendData = void 0;
return legendData = legendItem.props && legendItem.props.payload ? legendItem.props && legendItem.props.payload : "children" === legendContent ? (formatedGraphicalItems || []).reduce(function(result, _ref2) {
@@ -994,7 +1007,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
value: name || dataKey,
payload: item.props
};
- }), _extends({}, legendItem.props, __WEBPACK_IMPORTED_MODULE_18__component_Legend__.a.getWithHeight(legendItem, legendWidth), {
+ }), _extends({}, legendItem.props, __WEBPACK_IMPORTED_MODULE_19__component_Legend__.a.getWithHeight(legendItem, legendWidth), {
payload: legendData,
item: legendItem
});
@@ -1003,14 +1016,14 @@ var _bundleJs = []byte((((((((((`!function(modules) {
if (!stackGroups) return {};
for (var result = {}, numericAxisIds = Object.keys(stackGroups), i = 0, len = numericAxisIds.length; i < len; i++) for (var sgs = stackGroups[numericAxisIds[i]].stackGroups, stackIds = Object.keys(sgs), j = 0, sLen = stackIds.length; j < sLen; j++) {
var _sgs$stackIds$j = sgs[stackIds[j]], items = _sgs$stackIds$j.items, cateAxisId = _sgs$stackIds$j.cateAxisId, barItems = items.filter(function(item) {
- return Object(__WEBPACK_IMPORTED_MODULE_19__ReactUtils__.j)(item.type).indexOf("Bar") >= 0;
+ return Object(__WEBPACK_IMPORTED_MODULE_20__ReactUtils__.j)(item.type).indexOf("Bar") >= 0;
});
if (barItems && barItems.length) {
var selfSize = barItems[0].props.barSize, cateId = barItems[0].props[cateAxisId];
result[cateId] || (result[cateId] = []), result[cateId].push({
item: barItems[0],
stackList: barItems.slice(1),
- barSize: __WEBPACK_IMPORTED_MODULE_9_lodash_isNil___default()(selfSize) ? globalSize : selfSize
+ barSize: __WEBPACK_IMPORTED_MODULE_10_lodash_isNil___default()(selfSize) ? globalSize : selfSize
});
}
}
@@ -1018,7 +1031,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}, getBarPosition = function(_ref5) {
var barGap = _ref5.barGap, barCategoryGap = _ref5.barCategoryGap, bandSize = _ref5.bandSize, _ref5$sizeList = _ref5.sizeList, sizeList = void 0 === _ref5$sizeList ? [] : _ref5$sizeList, maxBarSize = _ref5.maxBarSize, len = sizeList.length;
if (len < 1) return null;
- var realBarGap = Object(__WEBPACK_IMPORTED_MODULE_13__DataUtils__.c)(barGap, bandSize, 0, !0), result = void 0;
+ var realBarGap = Object(__WEBPACK_IMPORTED_MODULE_14__DataUtils__.d)(barGap, bandSize, 0, !0), result = void 0;
if (sizeList[0].barSize === +sizeList[0].barSize) {
var useFull = !1, fullBarSize = bandSize / len, sum = sizeList.reduce(function(res, entry) {
return res + entry.barSize || 0;
@@ -1046,7 +1059,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}), newRes;
}, []);
} else {
- var _offset = Object(__WEBPACK_IMPORTED_MODULE_13__DataUtils__.c)(barCategoryGap, bandSize, 0, !0);
+ var _offset = Object(__WEBPACK_IMPORTED_MODULE_14__DataUtils__.d)(barCategoryGap, bandSize, 0, !0);
bandSize - 2 * _offset - (len - 1) * realBarGap <= 0 && (realBarGap = 0);
var originalSize = (bandSize - 2 * _offset - (len - 1) * realBarGap) / len;
originalSize > 1 && (originalSize >>= 0);
@@ -1077,14 +1090,14 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}), newOffset = offset;
if (legendProps) {
var box = legendBox || {}, align = legendProps.align, verticalAlign = legendProps.verticalAlign, layout = legendProps.layout;
- ("vertical" === layout || "horizontal" === layout && "center" === verticalAlign) && Object(__WEBPACK_IMPORTED_MODULE_13__DataUtils__.g)(offset[align]) && (newOffset = _extends({}, offset, _defineProperty({}, align, newOffset[align] + (box.width || 0)))),
- ("horizontal" === layout || "vertical" === layout && "center" === align) && Object(__WEBPACK_IMPORTED_MODULE_13__DataUtils__.g)(offset[verticalAlign]) && (newOffset = _extends({}, offset, _defineProperty({}, verticalAlign, newOffset[verticalAlign] + (box.height || 0))));
+ ("vertical" === layout || "horizontal" === layout && "center" === verticalAlign) && Object(__WEBPACK_IMPORTED_MODULE_14__DataUtils__.h)(offset[align]) && (newOffset = _extends({}, offset, _defineProperty({}, align, newOffset[align] + (box.width || 0)))),
+ ("horizontal" === layout || "vertical" === layout && "center" === align) && Object(__WEBPACK_IMPORTED_MODULE_14__DataUtils__.h)(offset[verticalAlign]) && (newOffset = _extends({}, offset, _defineProperty({}, verticalAlign, newOffset[verticalAlign] + (box.height || 0))));
}
return newOffset;
}, getDomainOfErrorBars = function(data, item, dataKey, axisType) {
- var children = item.props.children, errorBars = Object(__WEBPACK_IMPORTED_MODULE_19__ReactUtils__.h)(children, __WEBPACK_IMPORTED_MODULE_17__cartesian_ErrorBar__.a).filter(function(errorBarChild) {
+ var children = item.props.children, errorBars = Object(__WEBPACK_IMPORTED_MODULE_20__ReactUtils__.h)(children, __WEBPACK_IMPORTED_MODULE_18__cartesian_ErrorBar__.a).filter(function(errorBarChild) {
var direction = errorBarChild.props.direction;
- return !(!__WEBPACK_IMPORTED_MODULE_9_lodash_isNil___default()(direction) && !__WEBPACK_IMPORTED_MODULE_9_lodash_isNil___default()(axisType)) || axisType.indexOf(direction) >= 0;
+ return !(!__WEBPACK_IMPORTED_MODULE_10_lodash_isNil___default()(direction) && !__WEBPACK_IMPORTED_MODULE_10_lodash_isNil___default()(axisType)) || axisType.indexOf(direction) >= 0;
});
if (errorBars && errorBars.length) {
var keys = errorBars.map(function(errorBarChild) {
@@ -1103,7 +1116,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
var domains = items.map(function(item) {
return getDomainOfErrorBars(data, item, dataKey, axisType);
}).filter(function(entry) {
- return !__WEBPACK_IMPORTED_MODULE_9_lodash_isNil___default()(entry);
+ return !__WEBPACK_IMPORTED_MODULE_10_lodash_isNil___default()(entry);
});
return domains && domains.length ? domains.reduce(function(result, entry) {
return [ Math.min(result[0], entry[0]), Math.max(result[1], entry[1]) ];
@@ -1133,7 +1146,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}, getTicksOfAxis = function(axis, isGrid, isAll) {
if (!axis) return null;
var scale = axis.scale, duplicateDomain = axis.duplicateDomain, type = axis.type, range = axis.range, offset = (isGrid || isAll) && "category" === type && scale.bandwidth ? scale.bandwidth() / 2 : 0;
- return offset = "angleAxis" === axis.axisType ? 2 * Object(__WEBPACK_IMPORTED_MODULE_13__DataUtils__.i)(range[0] - range[1]) * offset : offset,
+ return offset = "angleAxis" === axis.axisType ? 2 * Object(__WEBPACK_IMPORTED_MODULE_14__DataUtils__.j)(range[0] - range[1]) * offset : offset,
isGrid && (axis.ticks || axis.niceTicks) ? (axis.ticks || axis.niceTicks).map(function(entry) {
var scaleContent = duplicateDomain ? duplicateDomain.indexOf(entry) : entry;
return {
@@ -1164,40 +1177,40 @@ var _bundleJs = []byte((((((((((`!function(modules) {
});
}, combineEventHandlers = function(defaultHandler, parentHandler, childHandler) {
var customizedHandler = void 0;
- return __WEBPACK_IMPORTED_MODULE_7_lodash_isFunction___default()(childHandler) ? customizedHandler = childHandler : __WEBPACK_IMPORTED_MODULE_7_lodash_isFunction___default()(parentHandler) && (customizedHandler = parentHandler),
- __WEBPACK_IMPORTED_MODULE_7_lodash_isFunction___default()(defaultHandler) || customizedHandler ? function(arg1, arg2, arg3, arg4) {
- __WEBPACK_IMPORTED_MODULE_7_lodash_isFunction___default()(defaultHandler) && defaultHandler(arg1, arg2, arg3, arg4),
- __WEBPACK_IMPORTED_MODULE_7_lodash_isFunction___default()(customizedHandler) && customizedHandler(arg1, arg2, arg3, arg4);
+ return __WEBPACK_IMPORTED_MODULE_8_lodash_isFunction___default()(childHandler) ? customizedHandler = childHandler : __WEBPACK_IMPORTED_MODULE_8_lodash_isFunction___default()(parentHandler) && (customizedHandler = parentHandler),
+ __WEBPACK_IMPORTED_MODULE_8_lodash_isFunction___default()(defaultHandler) || customizedHandler ? function(arg1, arg2, arg3, arg4) {
+ __WEBPACK_IMPORTED_MODULE_8_lodash_isFunction___default()(defaultHandler) && defaultHandler(arg1, arg2, arg3, arg4),
+ __WEBPACK_IMPORTED_MODULE_8_lodash_isFunction___default()(customizedHandler) && customizedHandler(arg1, arg2, arg3, arg4);
} : null;
}, parseScale = function(axis, chartType) {
var scale = axis.scale, type = axis.type, layout = axis.layout, axisType = axis.axisType;
if ("auto" === scale) return "radial" === layout && "radiusAxis" === axisType ? {
- scale: __WEBPACK_IMPORTED_MODULE_11_d3_scale__.scaleBand(),
+ scale: __WEBPACK_IMPORTED_MODULE_12_d3_scale__.scaleBand(),
realScaleType: "band"
} : "radial" === layout && "angleAxis" === axisType ? {
- scale: __WEBPACK_IMPORTED_MODULE_11_d3_scale__.scaleLinear(),
+ scale: __WEBPACK_IMPORTED_MODULE_12_d3_scale__.scaleLinear(),
realScaleType: "linear"
} : "category" === type && chartType && (chartType.indexOf("LineChart") >= 0 || chartType.indexOf("AreaChart") >= 0) ? {
- scale: __WEBPACK_IMPORTED_MODULE_11_d3_scale__.scalePoint(),
+ scale: __WEBPACK_IMPORTED_MODULE_12_d3_scale__.scalePoint(),
realScaleType: "point"
} : "category" === type ? {
- scale: __WEBPACK_IMPORTED_MODULE_11_d3_scale__.scaleBand(),
+ scale: __WEBPACK_IMPORTED_MODULE_12_d3_scale__.scaleBand(),
realScaleType: "band"
} : {
- scale: __WEBPACK_IMPORTED_MODULE_11_d3_scale__.scaleLinear(),
+ scale: __WEBPACK_IMPORTED_MODULE_12_d3_scale__.scaleLinear(),
realScaleType: "linear"
};
if (__WEBPACK_IMPORTED_MODULE_3_lodash_isString___default()(scale)) {
var name = "scale" + scale.slice(0, 1).toUpperCase() + scale.slice(1);
return {
- scale: (__WEBPACK_IMPORTED_MODULE_11_d3_scale__[name] || __WEBPACK_IMPORTED_MODULE_11_d3_scale__.scalePoint)(),
- realScaleType: __WEBPACK_IMPORTED_MODULE_11_d3_scale__[name] ? name : "point"
+ scale: (__WEBPACK_IMPORTED_MODULE_12_d3_scale__[name] || __WEBPACK_IMPORTED_MODULE_12_d3_scale__.scalePoint)(),
+ realScaleType: __WEBPACK_IMPORTED_MODULE_12_d3_scale__[name] ? name : "point"
};
}
- return __WEBPACK_IMPORTED_MODULE_7_lodash_isFunction___default()(scale) ? {
+ return __WEBPACK_IMPORTED_MODULE_8_lodash_isFunction___default()(scale) ? {
scale: scale
} : {
- scale: __WEBPACK_IMPORTED_MODULE_11_d3_scale__.scalePoint(),
+ scale: __WEBPACK_IMPORTED_MODULE_12_d3_scale__.scalePoint(),
realScaleType: "point"
};
}, checkDomainOfScale = function(scale) {
@@ -1211,10 +1224,10 @@ var _bundleJs = []byte((((((((((`!function(modules) {
for (var i = 0, len = barPosition.length; i < len; i++) if (barPosition[i].item === child) return barPosition[i].position;
return null;
}, truncateByDomain = function(value, domain) {
- if (!domain || 2 !== domain.length || !Object(__WEBPACK_IMPORTED_MODULE_13__DataUtils__.g)(domain[0]) || !Object(__WEBPACK_IMPORTED_MODULE_13__DataUtils__.g)(domain[1])) return value;
+ if (!domain || 2 !== domain.length || !Object(__WEBPACK_IMPORTED_MODULE_14__DataUtils__.h)(domain[0]) || !Object(__WEBPACK_IMPORTED_MODULE_14__DataUtils__.h)(domain[1])) return value;
var min = Math.min(domain[0], domain[1]), max = Math.max(domain[0], domain[1]), result = [ value[0], value[1] ];
- return (!Object(__WEBPACK_IMPORTED_MODULE_13__DataUtils__.g)(value[0]) || value[0] < min) && (result[0] = min),
- (!Object(__WEBPACK_IMPORTED_MODULE_13__DataUtils__.g)(value[1]) || value[1] > max) && (result[1] = max),
+ return (!Object(__WEBPACK_IMPORTED_MODULE_14__DataUtils__.h)(value[0]) || value[0] < min) && (result[0] = min),
+ (!Object(__WEBPACK_IMPORTED_MODULE_14__DataUtils__.h)(value[1]) || value[1] > max) && (result[1] = max),
result[0] > max && (result[0] = max), result[1] < min && (result[1] = min), result;
}, offsetSign = function(series) {
var n = series.length;
@@ -1225,17 +1238,17 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}
}, STACK_OFFSET_MAP = {
sign: offsetSign,
- expand: __WEBPACK_IMPORTED_MODULE_12_d3_shape__.o,
- none: __WEBPACK_IMPORTED_MODULE_12_d3_shape__.p,
- silhouette: __WEBPACK_IMPORTED_MODULE_12_d3_shape__.q,
- wiggle: __WEBPACK_IMPORTED_MODULE_12_d3_shape__.r
+ expand: __WEBPACK_IMPORTED_MODULE_13_d3_shape__.o,
+ none: __WEBPACK_IMPORTED_MODULE_13_d3_shape__.p,
+ silhouette: __WEBPACK_IMPORTED_MODULE_13_d3_shape__.q,
+ wiggle: __WEBPACK_IMPORTED_MODULE_13_d3_shape__.r
}, getStackedData = function(data, stackItems, offsetType) {
var dataKeys = stackItems.map(function(item) {
return item.props.dataKey;
});
- return Object(__WEBPACK_IMPORTED_MODULE_12_d3_shape__.n)().keys(dataKeys).value(function(d, key) {
+ return Object(__WEBPACK_IMPORTED_MODULE_13_d3_shape__.n)().keys(dataKeys).value(function(d, key) {
return +getValueByDataKey(d, key, 0);
- }).order(__WEBPACK_IMPORTED_MODULE_12_d3_shape__.s).offset(STACK_OFFSET_MAP[offsetType])(data);
+ }).order(__WEBPACK_IMPORTED_MODULE_13_d3_shape__.s).offset(STACK_OFFSET_MAP[offsetType])(data);
}, getStackGroupsByAxisId = function(data, _items, numericAxisId, cateAxisId, offsetType, reverseStackOrder) {
if (!data) return null;
var items = reverseStackOrder ? _items.reverse() : _items, stackGroups = items.reduce(function(result, item) {
@@ -1245,14 +1258,14 @@ var _bundleJs = []byte((((((((((`!function(modules) {
hasStack: !1,
stackGroups: {}
};
- if (Object(__WEBPACK_IMPORTED_MODULE_13__DataUtils__.f)(stackId)) {
+ if (Object(__WEBPACK_IMPORTED_MODULE_14__DataUtils__.g)(stackId)) {
var childGroup = parentGroup.stackGroups[stackId] || {
numericAxisId: numericAxisId,
cateAxisId: cateAxisId,
items: []
};
childGroup.items.push(item), parentGroup.hasStack = !0, parentGroup.stackGroups[stackId] = childGroup;
- } else parentGroup.stackGroups[Object(__WEBPACK_IMPORTED_MODULE_13__DataUtils__.j)("_stackId_")] = {
+ } else parentGroup.stackGroups[Object(__WEBPACK_IMPORTED_MODULE_14__DataUtils__.k)("_stackId_")] = {
numericAxisId: numericAxisId,
cateAxisId: cateAxisId,
items: [ item ]
@@ -1277,7 +1290,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
var realScaleType = opts.realScaleType, type = opts.type, tickCount = opts.tickCount, originalDomain = opts.originalDomain, allowDecimals = opts.allowDecimals, scaleType = realScaleType || opts.scale;
if ("auto" !== scaleType && "linear" !== scaleType) return null;
if (tickCount && "number" === type && originalDomain && ("auto" === originalDomain[0] || "auto" === originalDomain[1])) {
- var domain = scale.domain(), tickValues = Object(__WEBPACK_IMPORTED_MODULE_10_recharts_scale__.getNiceTickValues)(domain, tickCount, allowDecimals);
+ var domain = scale.domain(), tickValues = Object(__WEBPACK_IMPORTED_MODULE_11_recharts_scale__.getNiceTickValues)(domain, tickCount, allowDecimals);
return scale.domain(calculateDomainOfTicks(tickValues, type)), {
niceTicks: tickValues
};
@@ -1285,26 +1298,26 @@ var _bundleJs = []byte((((((((((`!function(modules) {
if (tickCount && "number" === type) {
var _domain = scale.domain();
return {
- niceTicks: Object(__WEBPACK_IMPORTED_MODULE_10_recharts_scale__.getTickValuesFixedDomain)(_domain, tickCount, allowDecimals)
+ niceTicks: Object(__WEBPACK_IMPORTED_MODULE_11_recharts_scale__.getTickValuesFixedDomain)(_domain, tickCount, allowDecimals)
};
}
return null;
}, getCateCoordinateOfLine = function(_ref6) {
var axis = _ref6.axis, ticks = _ref6.ticks, bandSize = _ref6.bandSize, entry = _ref6.entry, index = _ref6.index;
if ("category" === axis.type) {
- if (!axis.allowDuplicatedCategory && axis.dataKey && !__WEBPACK_IMPORTED_MODULE_9_lodash_isNil___default()(entry[axis.dataKey])) {
- var matchedTick = Object(__WEBPACK_IMPORTED_MODULE_13__DataUtils__.a)(ticks, "value", entry[axis.dataKey]);
+ if (!axis.allowDuplicatedCategory && axis.dataKey && !__WEBPACK_IMPORTED_MODULE_10_lodash_isNil___default()(entry[axis.dataKey])) {
+ var matchedTick = Object(__WEBPACK_IMPORTED_MODULE_14__DataUtils__.a)(ticks, "value", entry[axis.dataKey]);
if (matchedTick) return matchedTick.coordinate + bandSize / 2;
}
return ticks[index] ? ticks[index].coordinate + bandSize / 2 : null;
}
var value = getValueByDataKey(entry, axis.dataKey);
- return __WEBPACK_IMPORTED_MODULE_9_lodash_isNil___default()(value) ? null : axis.scale(value);
+ return __WEBPACK_IMPORTED_MODULE_10_lodash_isNil___default()(value) ? null : axis.scale(value);
}, getCateCoordinateOfBar = function(_ref7) {
var axis = _ref7.axis, ticks = _ref7.ticks, offset = _ref7.offset, bandSize = _ref7.bandSize, entry = _ref7.entry, index = _ref7.index;
if ("category" === axis.type) return ticks[index] ? ticks[index].coordinate + offset : null;
var value = getValueByDataKey(entry, axis.dataKey, axis.domain[index]);
- return __WEBPACK_IMPORTED_MODULE_9_lodash_isNil___default()(value) ? null : axis.scale(value) - bandSize / 2 + offset;
+ return __WEBPACK_IMPORTED_MODULE_10_lodash_isNil___default()(value) ? null : axis.scale(value) - bandSize / 2 + offset;
}, getBaseValueOfBar = function(_ref8) {
var numericAxis = _ref8.numericAxis, domain = numericAxis.scale.domain();
if ("number" === numericAxis.type) {
@@ -1313,9 +1326,9 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}
return domain[0];
}, detectReferenceElementsDomain = function(children, domain, axisId, axisType, specifiedTicks) {
- var lines = Object(__WEBPACK_IMPORTED_MODULE_19__ReactUtils__.h)(children, __WEBPACK_IMPORTED_MODULE_15__cartesian_ReferenceLine__.a), dots = Object(__WEBPACK_IMPORTED_MODULE_19__ReactUtils__.h)(children, __WEBPACK_IMPORTED_MODULE_14__cartesian_ReferenceDot__.a), elements = lines.concat(dots), areas = Object(__WEBPACK_IMPORTED_MODULE_19__ReactUtils__.h)(children, __WEBPACK_IMPORTED_MODULE_16__cartesian_ReferenceArea__.a), idKey = axisType + "Id", valueKey = axisType[0], finalDomain = domain;
+ var lines = Object(__WEBPACK_IMPORTED_MODULE_20__ReactUtils__.h)(children, __WEBPACK_IMPORTED_MODULE_16__cartesian_ReferenceLine__.a), dots = Object(__WEBPACK_IMPORTED_MODULE_20__ReactUtils__.h)(children, __WEBPACK_IMPORTED_MODULE_15__cartesian_ReferenceDot__.a), elements = lines.concat(dots), areas = Object(__WEBPACK_IMPORTED_MODULE_20__ReactUtils__.h)(children, __WEBPACK_IMPORTED_MODULE_17__cartesian_ReferenceArea__.a), idKey = axisType + "Id", valueKey = axisType[0], finalDomain = domain;
if (elements.length && (finalDomain = elements.reduce(function(result, el) {
- if (el.props[idKey] === axisId && el.props.alwaysShow && Object(__WEBPACK_IMPORTED_MODULE_13__DataUtils__.g)(el.props[valueKey])) {
+ if (el.props[idKey] === axisId && el.props.alwaysShow && Object(__WEBPACK_IMPORTED_MODULE_14__DataUtils__.h)(el.props[valueKey])) {
var value = el.props[valueKey];
return [ Math.min(result[0], value), Math.max(result[1], value) ];
}
@@ -1323,7 +1336,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}, finalDomain)), areas.length) {
var key1 = valueKey + "1", key2 = valueKey + "2";
finalDomain = areas.reduce(function(result, el) {
- if (el.props[idKey] === axisId && el.props.alwaysShow && Object(__WEBPACK_IMPORTED_MODULE_13__DataUtils__.g)(el.props[key1]) && Object(__WEBPACK_IMPORTED_MODULE_13__DataUtils__.g)(el.props[key2])) {
+ if (el.props[idKey] === axisId && el.props.alwaysShow && Object(__WEBPACK_IMPORTED_MODULE_14__DataUtils__.h)(el.props[key1]) && Object(__WEBPACK_IMPORTED_MODULE_14__DataUtils__.h)(el.props[key2])) {
var value1 = el.props[key1], value2 = el.props[key2];
return [ Math.min(result[0], value1, value2), Math.max(result[1], value1, value2) ];
}
@@ -1331,11 +1344,11 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}, finalDomain);
}
return specifiedTicks && specifiedTicks.length && (finalDomain = specifiedTicks.reduce(function(result, tick) {
- return Object(__WEBPACK_IMPORTED_MODULE_13__DataUtils__.g)(tick) ? [ Math.min(result[0], tick), Math.max(result[1], tick) ] : result;
+ return Object(__WEBPACK_IMPORTED_MODULE_14__DataUtils__.h)(tick) ? [ Math.min(result[0], tick), Math.max(result[1], tick) ] : result;
}, finalDomain)), finalDomain;
}, getStackedDataOfItem = function(item, stackGroups) {
var stackId = item.props.stackId;
- if (Object(__WEBPACK_IMPORTED_MODULE_13__DataUtils__.f)(stackId)) {
+ if (Object(__WEBPACK_IMPORTED_MODULE_14__DataUtils__.g)(stackId)) {
var group = stackGroups[stackId];
if (group && group.items.length) {
for (var itemIndex = -1, i = 0, len = group.items.length; i < len; i++) if (group.items[i] === item) {
@@ -1348,7 +1361,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
return null;
}, getDomainOfSingle = function(data) {
return data.reduce(function(result, entry) {
- return [ Math.min.apply(null, entry.concat([ result[0] ]).filter(__WEBPACK_IMPORTED_MODULE_13__DataUtils__.g)), Math.max.apply(null, entry.concat([ result[1] ]).filter(__WEBPACK_IMPORTED_MODULE_13__DataUtils__.g)) ];
+ return [ Math.min.apply(null, entry.concat([ result[0] ]).filter(__WEBPACK_IMPORTED_MODULE_14__DataUtils__.h)), Math.max.apply(null, entry.concat([ result[1] ]).filter(__WEBPACK_IMPORTED_MODULE_14__DataUtils__.h)) ];
}, [ 1 / 0, -1 / 0 ]);
}, getDomainOfStackGroups = function(stackGroups, startIndex, endIndex) {
return Object.keys(stackGroups).reduce(function(result, stackId) {
@@ -1363,14 +1376,14 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}, MIN_VALUE_REG = /^dataMin[\s]*-[\s]*([0-9]+([.]{1}[0-9]+){0,1})$/, MAX_VALUE_REG = /^dataMax[\s]*\+[\s]*([0-9]+([.]{1}[0-9]+){0,1})$/, parseSpecifiedDomain = function(specifiedDomain, dataDomain, allowDataOverflow) {
if (!__WEBPACK_IMPORTED_MODULE_6_lodash_isArray___default()(specifiedDomain)) return dataDomain;
var domain = [];
- if (Object(__WEBPACK_IMPORTED_MODULE_13__DataUtils__.g)(specifiedDomain[0])) domain[0] = allowDataOverflow ? specifiedDomain[0] : Math.min(specifiedDomain[0], dataDomain[0]); else if (MIN_VALUE_REG.test(specifiedDomain[0])) {
+ if (Object(__WEBPACK_IMPORTED_MODULE_14__DataUtils__.h)(specifiedDomain[0])) domain[0] = allowDataOverflow ? specifiedDomain[0] : Math.min(specifiedDomain[0], dataDomain[0]); else if (MIN_VALUE_REG.test(specifiedDomain[0])) {
var value = +MIN_VALUE_REG.exec(specifiedDomain[0])[1];
domain[0] = dataDomain[0] - value;
- } else __WEBPACK_IMPORTED_MODULE_7_lodash_isFunction___default()(specifiedDomain[0]) ? domain[0] = specifiedDomain[0](dataDomain[0]) : domain[0] = dataDomain[0];
- if (Object(__WEBPACK_IMPORTED_MODULE_13__DataUtils__.g)(specifiedDomain[1])) domain[1] = allowDataOverflow ? specifiedDomain[1] : Math.max(specifiedDomain[1], dataDomain[1]); else if (MAX_VALUE_REG.test(specifiedDomain[1])) {
+ } else __WEBPACK_IMPORTED_MODULE_8_lodash_isFunction___default()(specifiedDomain[0]) ? domain[0] = specifiedDomain[0](dataDomain[0]) : domain[0] = dataDomain[0];
+ if (Object(__WEBPACK_IMPORTED_MODULE_14__DataUtils__.h)(specifiedDomain[1])) domain[1] = allowDataOverflow ? specifiedDomain[1] : Math.max(specifiedDomain[1], dataDomain[1]); else if (MAX_VALUE_REG.test(specifiedDomain[1])) {
var _value = +MAX_VALUE_REG.exec(specifiedDomain[1])[1];
domain[1] = dataDomain[1] + _value;
- } else __WEBPACK_IMPORTED_MODULE_7_lodash_isFunction___default()(specifiedDomain[1]) ? domain[1] = specifiedDomain[1](dataDomain[1]) : domain[1] = dataDomain[1];
+ } else __WEBPACK_IMPORTED_MODULE_8_lodash_isFunction___default()(specifiedDomain[1]) ? domain[1] = specifiedDomain[1](dataDomain[1]) : domain[1] = dataDomain[1];
return domain;
}, validateCoordinateInRange = function(coordinate, scale) {
if (!scale) return !1;
@@ -1389,7 +1402,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}
return 0;
}, parseDomainOfCategoryAxis = function(specifiedDomain, calculatedDomain, axisChild) {
- return specifiedDomain && specifiedDomain.length ? __WEBPACK_IMPORTED_MODULE_0_lodash_isEqual___default()(specifiedDomain, __WEBPACK_IMPORTED_MODULE_8_lodash_get___default()(axisChild, "type.defaultProps.domain")) ? calculatedDomain : specifiedDomain : calculatedDomain;
+ return specifiedDomain && specifiedDomain.length ? __WEBPACK_IMPORTED_MODULE_0_lodash_isEqual___default()(specifiedDomain, __WEBPACK_IMPORTED_MODULE_9_lodash_get___default()(axisChild, "type.defaultProps.domain")) ? calculatedDomain : specifiedDomain : calculatedDomain;
};
}, function(module, exports) {
var core = module.exports = {
@@ -1436,7 +1449,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
__webpack_exports__.a = newInterval;
var t0 = new Date(), t1 = new Date();
}, function(module, exports, __webpack_require__) {
- var global = __webpack_require__(24), core = __webpack_require__(17), ctx = __webpack_require__(47), hide = __webpack_require__(40), $export = function(type, name, source) {
+ var global = __webpack_require__(24), core = __webpack_require__(17), ctx = __webpack_require__(46), hide = __webpack_require__(40), $export = function(type, name, source) {
var key, own, out, IS_FORCED = type & $export.F, IS_GLOBAL = type & $export.G, IS_STATIC = type & $export.S, IS_PROTO = type & $export.P, IS_BIND = type & $export.B, IS_WRAP = type & $export.W, exports = IS_GLOBAL ? core : core[name] || (core[name] = {}), expProto = exports.prototype, target = IS_GLOBAL ? global : IS_STATIC ? global[name] : (global[name] || {}).prototype;
IS_GLOBAL && (source = name);
for (key in source) (own = !IS_FORCED && target && void 0 !== target[key]) && key in exports || (out = own ? target[key] : source[key],
@@ -1469,12 +1482,12 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}
module.exports = isNil;
}, function(module, exports, __webpack_require__) {
- var store = __webpack_require__(140)("wks"), uid = __webpack_require__(98), Symbol = __webpack_require__(24).Symbol, USE_SYMBOL = "function" == typeof Symbol;
+ var store = __webpack_require__(139)("wks"), uid = __webpack_require__(99), Symbol = __webpack_require__(24).Symbol, USE_SYMBOL = "function" == typeof Symbol;
(module.exports = function(name) {
return store[name] || (store[name] = USE_SYMBOL && Symbol[name] || (USE_SYMBOL ? Symbol : uid)("Symbol." + name));
}).store = store;
}, function(module, exports, __webpack_require__) {
- var anObject = __webpack_require__(48), IE8_DOM_DEFINE = __webpack_require__(207), toPrimitive = __webpack_require__(134), dP = Object.defineProperty;
+ var anObject = __webpack_require__(47), IE8_DOM_DEFINE = __webpack_require__(206), toPrimitive = __webpack_require__(133), dP = Object.defineProperty;
exports.f = __webpack_require__(25) ? Object.defineProperty : function(O, P, Attributes) {
if (anObject(O), P = toPrimitive(P, !0), anObject(Attributes), IE8_DOM_DEFINE) try {
return dP(O, P, Attributes);
@@ -1525,7 +1538,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
};
return Math.min(Math.abs(width - (offset.left || 0) - (offset.right || 0)), Math.abs(height - (offset.top || 0) - (offset.bottom || 0))) / 2;
}, formatAxisMap = function(props, axisMap, offset, axisType, chartName) {
- var width = props.width, height = props.height, startAngle = props.startAngle, endAngle = props.endAngle, cx = Object(__WEBPACK_IMPORTED_MODULE_1__DataUtils__.c)(props.cx, width, width / 2), cy = Object(__WEBPACK_IMPORTED_MODULE_1__DataUtils__.c)(props.cy, height, height / 2), maxRadius = getMaxRadius(width, height, offset), innerRadius = Object(__WEBPACK_IMPORTED_MODULE_1__DataUtils__.c)(props.innerRadius, maxRadius, 0), outerRadius = Object(__WEBPACK_IMPORTED_MODULE_1__DataUtils__.c)(props.outerRadius, maxRadius, .8 * maxRadius);
+ var width = props.width, height = props.height, startAngle = props.startAngle, endAngle = props.endAngle, cx = Object(__WEBPACK_IMPORTED_MODULE_1__DataUtils__.d)(props.cx, width, width / 2), cy = Object(__WEBPACK_IMPORTED_MODULE_1__DataUtils__.d)(props.cy, height, height / 2), maxRadius = getMaxRadius(width, height, offset), innerRadius = Object(__WEBPACK_IMPORTED_MODULE_1__DataUtils__.d)(props.innerRadius, maxRadius, 0), outerRadius = Object(__WEBPACK_IMPORTED_MODULE_1__DataUtils__.d)(props.outerRadius, maxRadius, .8 * maxRadius);
return Object.keys(axisMap).reduce(function(result, id) {
var axis = axisMap[id], domain = axis.domain, reversed = axis.reversed, range = void 0;
__WEBPACK_IMPORTED_MODULE_0_lodash_isNil___default()(axis.range) ? ("angleAxis" === axisType ? range = [ startAngle, endAngle ] : "radiusAxis" === axisType && (range = [ innerRadius, outerRadius ]),
@@ -1604,7 +1617,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
var global = module.exports = "undefined" != typeof window && window.Math == Math ? window : "undefined" != typeof self && self.Math == Math ? self : Function("return this")();
"number" == typeof __g && (__g = global);
}, function(module, exports, __webpack_require__) {
- module.exports = !__webpack_require__(49)(function() {
+ module.exports = !__webpack_require__(48)(function() {
return 7 != Object.defineProperty({}, "a", {
get: function() {
return 7;
@@ -1613,7 +1626,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
});
}, function(module, exports, __webpack_require__) {
module.exports = {
- default: __webpack_require__(350),
+ default: __webpack_require__(355),
__esModule: !0
};
}, function(module, exports, __webpack_require__) {
@@ -1624,7 +1637,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}, function(module, exports, __webpack_require__) {
"use strict";
exports.__esModule = !0;
- var _defineProperty = __webpack_require__(143), _defineProperty2 = function(obj) {
+ var _defineProperty = __webpack_require__(142), _defineProperty2 = function(obj) {
return obj && obj.__esModule ? obj : {
default: obj
};
@@ -1645,7 +1658,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}, function(module, exports, __webpack_require__) {
"use strict";
exports.__esModule = !0;
- var _typeof2 = __webpack_require__(100), _typeof3 = function(obj) {
+ var _typeof2 = __webpack_require__(101), _typeof3 = function(obj) {
return obj && obj.__esModule ? obj : {
default: obj
};
@@ -1662,7 +1675,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
};
}
exports.__esModule = !0;
- var _setPrototypeOf = __webpack_require__(367), _setPrototypeOf2 = _interopRequireDefault(_setPrototypeOf), _create = __webpack_require__(371), _create2 = _interopRequireDefault(_create), _typeof2 = __webpack_require__(100), _typeof3 = _interopRequireDefault(_typeof2);
+ var _setPrototypeOf = __webpack_require__(372), _setPrototypeOf2 = _interopRequireDefault(_setPrototypeOf), _create = __webpack_require__(376), _create2 = _interopRequireDefault(_create), _typeof2 = __webpack_require__(101), _typeof3 = _interopRequireDefault(_typeof2);
exports.default = function(subClass, superClass) {
if ("function" != typeof superClass && null !== superClass) throw new TypeError("Super expression must either be null or a function, not " + (void 0 === superClass ? "undefined" : (0,
_typeof3.default)(superClass)));
@@ -1682,7 +1695,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}
module.exports = isObject;
}, function(module, exports, __webpack_require__) {
- var freeGlobal = __webpack_require__(242), freeSelf = "object" == typeof self && self && self.Object === Object && self, root = freeGlobal || freeSelf || Function("return this")();
+ var freeGlobal = __webpack_require__(243), freeSelf = "object" == typeof self && self && self.Object === Object && self, root = freeGlobal || freeSelf || Function("return this")();
module.exports = root;
}, function(module, exports, __webpack_require__) {
"use strict";
@@ -1694,7 +1707,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
}), exports.translateStyle = exports.AnimateGroup = exports.configBezier = exports.configSpring = void 0;
- var _Animate = __webpack_require__(263), _Animate2 = _interopRequireDefault(_Animate), _easing = __webpack_require__(275), _util = __webpack_require__(123), _AnimateGroup = __webpack_require__(679), _AnimateGroup2 = _interopRequireDefault(_AnimateGroup);
+ var _Animate = __webpack_require__(264), _Animate2 = _interopRequireDefault(_Animate), _easing = __webpack_require__(277), _util = __webpack_require__(122), _AnimateGroup = __webpack_require__(681), _AnimateGroup2 = _interopRequireDefault(_AnimateGroup);
exports.configSpring = _easing.configSpring, exports.configBezier = _easing.configBezier,
exports.AnimateGroup = _AnimateGroup2.default, exports.translateStyle = _util.translateStyle,
exports.default = _Animate2.default;
@@ -1702,7 +1715,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
function isEqual(value, other) {
return baseIsEqual(value, other);
}
- var baseIsEqual = __webpack_require__(178);
+ var baseIsEqual = __webpack_require__(177);
module.exports = isEqual;
}, function(module, exports) {
module.exports = function(it) {
@@ -1715,7 +1728,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
module.exports = isObjectLike;
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
- var __WEBPACK_IMPORTED_MODULE_0__src_bisect__ = __webpack_require__(288);
+ var __WEBPACK_IMPORTED_MODULE_0__src_bisect__ = __webpack_require__(293);
__webpack_require__.d(__webpack_exports__, "b", function() {
return __WEBPACK_IMPORTED_MODULE_0__src_bisect__.a;
});
@@ -1723,24 +1736,24 @@ var _bundleJs = []byte((((((((((`!function(modules) {
__webpack_require__.d(__webpack_exports__, "a", function() {
return __WEBPACK_IMPORTED_MODULE_1__src_ascending__.a;
});
- var __WEBPACK_IMPORTED_MODULE_2__src_bisector__ = __webpack_require__(289);
+ var __WEBPACK_IMPORTED_MODULE_2__src_bisector__ = __webpack_require__(294);
__webpack_require__.d(__webpack_exports__, "c", function() {
return __WEBPACK_IMPORTED_MODULE_2__src_bisector__.a;
});
var __WEBPACK_IMPORTED_MODULE_18__src_quantile__ = (__webpack_require__(707), __webpack_require__(708),
- __webpack_require__(291), __webpack_require__(293), __webpack_require__(709), __webpack_require__(712),
- __webpack_require__(713), __webpack_require__(297), __webpack_require__(714), __webpack_require__(715),
- __webpack_require__(716), __webpack_require__(717), __webpack_require__(298), __webpack_require__(290),
- __webpack_require__(718), __webpack_require__(185));
+ __webpack_require__(296), __webpack_require__(298), __webpack_require__(709), __webpack_require__(712),
+ __webpack_require__(713), __webpack_require__(302), __webpack_require__(714), __webpack_require__(715),
+ __webpack_require__(716), __webpack_require__(717), __webpack_require__(303), __webpack_require__(295),
+ __webpack_require__(718), __webpack_require__(184));
__webpack_require__.d(__webpack_exports__, "d", function() {
return __WEBPACK_IMPORTED_MODULE_18__src_quantile__.a;
});
- var __WEBPACK_IMPORTED_MODULE_19__src_range__ = __webpack_require__(295);
+ var __WEBPACK_IMPORTED_MODULE_19__src_range__ = __webpack_require__(300);
__webpack_require__.d(__webpack_exports__, "e", function() {
return __WEBPACK_IMPORTED_MODULE_19__src_range__.a;
});
var __WEBPACK_IMPORTED_MODULE_23__src_ticks__ = (__webpack_require__(719), __webpack_require__(720),
- __webpack_require__(721), __webpack_require__(296));
+ __webpack_require__(721), __webpack_require__(301));
__webpack_require__.d(__webpack_exports__, "h", function() {
return __WEBPACK_IMPORTED_MODULE_23__src_ticks__.a;
}), __webpack_require__.d(__webpack_exports__, "f", function() {
@@ -1748,7 +1761,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}), __webpack_require__.d(__webpack_exports__, "g", function() {
return __WEBPACK_IMPORTED_MODULE_23__src_ticks__.c;
});
- __webpack_require__(299), __webpack_require__(292), __webpack_require__(722);
+ __webpack_require__(304), __webpack_require__(297), __webpack_require__(722);
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
__webpack_require__.d(__webpack_exports__, "d", function() {
@@ -1786,15 +1799,10 @@ var _bundleJs = []byte((((((((((`!function(modules) {
return object[key] = value, object;
};
}, function(module, exports, __webpack_require__) {
- module.exports = {
- default: __webpack_require__(378),
- __esModule: !0
- };
-}, function(module, exports, __webpack_require__) {
function baseGetTag(value) {
return null == value ? void 0 === value ? undefinedTag : nullTag : symToStringTag && symToStringTag in Object(value) ? getRawTag(value) : objectToString(value);
}
- var Symbol = __webpack_require__(77), getRawTag = __webpack_require__(520), objectToString = __webpack_require__(521), nullTag = "[object Null]", undefinedTag = "[object Undefined]", symToStringTag = Symbol ? Symbol.toStringTag : void 0;
+ var Symbol = __webpack_require__(78), getRawTag = __webpack_require__(522), objectToString = __webpack_require__(523), nullTag = "[object Null]", undefinedTag = "[object Undefined]", symToStringTag = Symbol ? Symbol.toStringTag : void 0;
module.exports = baseGetTag;
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
@@ -1820,7 +1828,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
className: __WEBPACK_IMPORTED_MODULE_5_classnames___default()("recharts-label", className)
}, attrs, positionAttrs), label);
}
- var __WEBPACK_IMPORTED_MODULE_0_lodash_isObject__ = __webpack_require__(31), __WEBPACK_IMPORTED_MODULE_0_lodash_isObject___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_lodash_isObject__), __WEBPACK_IMPORTED_MODULE_1_lodash_isFunction__ = __webpack_require__(8), __WEBPACK_IMPORTED_MODULE_1_lodash_isFunction___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_1_lodash_isFunction__), __WEBPACK_IMPORTED_MODULE_2_lodash_isNil__ = __webpack_require__(20), __WEBPACK_IMPORTED_MODULE_2_lodash_isNil___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_2_lodash_isNil__), __WEBPACK_IMPORTED_MODULE_3_react__ = __webpack_require__(0), __WEBPACK_IMPORTED_MODULE_3_react___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_3_react__), __WEBPACK_IMPORTED_MODULE_4_prop_types__ = __webpack_require__(1), __WEBPACK_IMPORTED_MODULE_4_prop_types___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_4_prop_types__), __WEBPACK_IMPORTED_MODULE_5_classnames__ = __webpack_require__(3), __WEBPACK_IMPORTED_MODULE_5_classnames___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_5_classnames__), __WEBPACK_IMPORTED_MODULE_6__Text__ = __webpack_require__(55), __WEBPACK_IMPORTED_MODULE_7__util_ReactUtils__ = __webpack_require__(4), __WEBPACK_IMPORTED_MODULE_8__util_DataUtils__ = __webpack_require__(9), __WEBPACK_IMPORTED_MODULE_9__util_PolarUtils__ = __webpack_require__(23), _extends = Object.assign || function(target) {
+ var __WEBPACK_IMPORTED_MODULE_0_lodash_isObject__ = __webpack_require__(31), __WEBPACK_IMPORTED_MODULE_0_lodash_isObject___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_lodash_isObject__), __WEBPACK_IMPORTED_MODULE_1_lodash_isFunction__ = __webpack_require__(8), __WEBPACK_IMPORTED_MODULE_1_lodash_isFunction___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_1_lodash_isFunction__), __WEBPACK_IMPORTED_MODULE_2_lodash_isNil__ = __webpack_require__(20), __WEBPACK_IMPORTED_MODULE_2_lodash_isNil___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_2_lodash_isNil__), __WEBPACK_IMPORTED_MODULE_3_react__ = __webpack_require__(0), __WEBPACK_IMPORTED_MODULE_3_react___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_3_react__), __WEBPACK_IMPORTED_MODULE_4_prop_types__ = __webpack_require__(1), __WEBPACK_IMPORTED_MODULE_4_prop_types___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_4_prop_types__), __WEBPACK_IMPORTED_MODULE_5_classnames__ = __webpack_require__(3), __WEBPACK_IMPORTED_MODULE_5_classnames___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_5_classnames__), __WEBPACK_IMPORTED_MODULE_6__Text__ = __webpack_require__(54), __WEBPACK_IMPORTED_MODULE_7__util_ReactUtils__ = __webpack_require__(4), __WEBPACK_IMPORTED_MODULE_8__util_DataUtils__ = __webpack_require__(9), __WEBPACK_IMPORTED_MODULE_9__util_PolarUtils__ = __webpack_require__(23), _extends = Object.assign || function(target) {
for (var i = 1; i < arguments.length; i++) {
var source = arguments[i];
for (var key in source) Object.prototype.hasOwnProperty.call(source, key) && (target[key] = source[key]);
@@ -1853,13 +1861,13 @@ var _bundleJs = []byte((((((((((`!function(modules) {
var value = props.value, formatter = props.formatter, label = __WEBPACK_IMPORTED_MODULE_2_lodash_isNil___default()(props.children) ? value : props.children;
return __WEBPACK_IMPORTED_MODULE_1_lodash_isFunction___default()(formatter) ? formatter(label) : label;
}, getDeltaAngle = function(startAngle, endAngle) {
- return Object(__WEBPACK_IMPORTED_MODULE_8__util_DataUtils__.i)(endAngle - startAngle) * Math.min(Math.abs(endAngle - startAngle), 360);
+ return Object(__WEBPACK_IMPORTED_MODULE_8__util_DataUtils__.j)(endAngle - startAngle) * Math.min(Math.abs(endAngle - startAngle), 360);
}, renderRadialLabel = function(labelProps, label, attrs) {
var position = labelProps.position, viewBox = labelProps.viewBox, offset = labelProps.offset, className = labelProps.className, cx = viewBox.cx, cy = viewBox.cy, innerRadius = viewBox.innerRadius, outerRadius = viewBox.outerRadius, startAngle = viewBox.startAngle, endAngle = viewBox.endAngle, clockWise = viewBox.clockWise, radius = (innerRadius + outerRadius) / 2, deltaAngle = getDeltaAngle(startAngle, endAngle), sign = deltaAngle >= 0 ? 1 : -1, labelAngle = void 0, direction = void 0;
"insideStart" === position ? (labelAngle = startAngle + sign * offset, direction = clockWise) : "insideEnd" === position ? (labelAngle = endAngle - sign * offset,
direction = !clockWise) : "end" === position && (labelAngle = endAngle + sign * offset,
direction = clockWise), direction = deltaAngle <= 0 ? direction : !direction;
- var startPoint = Object(__WEBPACK_IMPORTED_MODULE_9__util_PolarUtils__.e)(cx, cy, radius, labelAngle), endPoint = Object(__WEBPACK_IMPORTED_MODULE_9__util_PolarUtils__.e)(cx, cy, radius, labelAngle + 359 * (direction ? 1 : -1)), path = "M" + startPoint.x + "," + startPoint.y + "\n A" + radius + "," + radius + ",0,1," + (direction ? 0 : 1) + ",\n " + endPoint.x + "," + endPoint.y, id = __WEBPACK_IMPORTED_MODULE_2_lodash_isNil___default()(labelProps.id) ? Object(__WEBPACK_IMPORTED_MODULE_8__util_DataUtils__.j)("recharts-radial-line-") : labelProps.id;
+ var startPoint = Object(__WEBPACK_IMPORTED_MODULE_9__util_PolarUtils__.e)(cx, cy, radius, labelAngle), endPoint = Object(__WEBPACK_IMPORTED_MODULE_9__util_PolarUtils__.e)(cx, cy, radius, labelAngle + 359 * (direction ? 1 : -1)), path = "M" + startPoint.x + "," + startPoint.y + "\n A" + radius + "," + radius + ",0,1," + (direction ? 0 : 1) + ",\n " + endPoint.x + "," + endPoint.y, id = __WEBPACK_IMPORTED_MODULE_2_lodash_isNil___default()(labelProps.id) ? Object(__WEBPACK_IMPORTED_MODULE_8__util_DataUtils__.k)("recharts-radial-line-") : labelProps.id;
return __WEBPACK_IMPORTED_MODULE_3_react___default.a.createElement("text", _extends({}, attrs, {
dominantBaseline: "central",
className: __WEBPACK_IMPORTED_MODULE_5_classnames___default()("recharts-radial-bar-label", className)
@@ -1955,9 +1963,9 @@ var _bundleJs = []byte((((((((((`!function(modules) {
y: y + height - sign * offset,
textAnchor: "end",
verticalAnchor: "end"
- } : __WEBPACK_IMPORTED_MODULE_0_lodash_isObject___default()(position) && (Object(__WEBPACK_IMPORTED_MODULE_8__util_DataUtils__.g)(position.x) || Object(__WEBPACK_IMPORTED_MODULE_8__util_DataUtils__.h)(position.x)) && (Object(__WEBPACK_IMPORTED_MODULE_8__util_DataUtils__.g)(position.y) || Object(__WEBPACK_IMPORTED_MODULE_8__util_DataUtils__.h)(position.y)) ? {
- x: x + Object(__WEBPACK_IMPORTED_MODULE_8__util_DataUtils__.c)(position.x, width),
- y: y + Object(__WEBPACK_IMPORTED_MODULE_8__util_DataUtils__.c)(position.y, height),
+ } : __WEBPACK_IMPORTED_MODULE_0_lodash_isObject___default()(position) && (Object(__WEBPACK_IMPORTED_MODULE_8__util_DataUtils__.h)(position.x) || Object(__WEBPACK_IMPORTED_MODULE_8__util_DataUtils__.i)(position.x)) && (Object(__WEBPACK_IMPORTED_MODULE_8__util_DataUtils__.h)(position.y) || Object(__WEBPACK_IMPORTED_MODULE_8__util_DataUtils__.i)(position.y)) ? {
+ x: x + Object(__WEBPACK_IMPORTED_MODULE_8__util_DataUtils__.d)(position.x, width),
+ y: y + Object(__WEBPACK_IMPORTED_MODULE_8__util_DataUtils__.d)(position.y, height),
textAnchor: "end",
verticalAnchor: "end"
} : {
@@ -1967,31 +1975,31 @@ var _bundleJs = []byte((((((((((`!function(modules) {
verticalAnchor: "middle"
};
}, isPolar = function(viewBox) {
- return Object(__WEBPACK_IMPORTED_MODULE_8__util_DataUtils__.g)(viewBox.cx);
+ return Object(__WEBPACK_IMPORTED_MODULE_8__util_DataUtils__.h)(viewBox.cx);
};
Label.displayName = "Label", Label.defaultProps = defaultProps, Label.propTypes = propTypes;
var parseViewBox = function(props) {
var cx = props.cx, cy = props.cy, angle = props.angle, startAngle = props.startAngle, endAngle = props.endAngle, r = props.r, radius = props.radius, innerRadius = props.innerRadius, outerRadius = props.outerRadius, x = props.x, y = props.y, top = props.top, left = props.left, width = props.width, height = props.height, clockWise = props.clockWise;
- if (Object(__WEBPACK_IMPORTED_MODULE_8__util_DataUtils__.g)(width) && Object(__WEBPACK_IMPORTED_MODULE_8__util_DataUtils__.g)(height)) {
- if (Object(__WEBPACK_IMPORTED_MODULE_8__util_DataUtils__.g)(x) && Object(__WEBPACK_IMPORTED_MODULE_8__util_DataUtils__.g)(y)) return {
+ if (Object(__WEBPACK_IMPORTED_MODULE_8__util_DataUtils__.h)(width) && Object(__WEBPACK_IMPORTED_MODULE_8__util_DataUtils__.h)(height)) {
+ if (Object(__WEBPACK_IMPORTED_MODULE_8__util_DataUtils__.h)(x) && Object(__WEBPACK_IMPORTED_MODULE_8__util_DataUtils__.h)(y)) return {
x: x,
y: y,
width: width,
height: height
};
- if (Object(__WEBPACK_IMPORTED_MODULE_8__util_DataUtils__.g)(top) && Object(__WEBPACK_IMPORTED_MODULE_8__util_DataUtils__.g)(left)) return {
+ if (Object(__WEBPACK_IMPORTED_MODULE_8__util_DataUtils__.h)(top) && Object(__WEBPACK_IMPORTED_MODULE_8__util_DataUtils__.h)(left)) return {
x: top,
y: left,
width: width,
height: height
};
}
- return Object(__WEBPACK_IMPORTED_MODULE_8__util_DataUtils__.g)(x) && Object(__WEBPACK_IMPORTED_MODULE_8__util_DataUtils__.g)(y) ? {
+ return Object(__WEBPACK_IMPORTED_MODULE_8__util_DataUtils__.h)(x) && Object(__WEBPACK_IMPORTED_MODULE_8__util_DataUtils__.h)(y) ? {
x: x,
y: y,
width: 0,
height: 0
- } : Object(__WEBPACK_IMPORTED_MODULE_8__util_DataUtils__.g)(cx) && Object(__WEBPACK_IMPORTED_MODULE_8__util_DataUtils__.g)(cy) ? {
+ } : Object(__WEBPACK_IMPORTED_MODULE_8__util_DataUtils__.h)(cx) && Object(__WEBPACK_IMPORTED_MODULE_8__util_DataUtils__.h)(cy) ? {
cx: cx,
cy: cy,
startAngle: startAngle || angle || 0,
@@ -2004,7 +2012,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
return label ? !0 === label ? __WEBPACK_IMPORTED_MODULE_3_react___default.a.createElement(Label, {
key: "label-implicit",
viewBox: viewBox
- }) : Object(__WEBPACK_IMPORTED_MODULE_8__util_DataUtils__.f)(label) ? __WEBPACK_IMPORTED_MODULE_3_react___default.a.createElement(Label, {
+ }) : Object(__WEBPACK_IMPORTED_MODULE_8__util_DataUtils__.g)(label) ? __WEBPACK_IMPORTED_MODULE_3_react___default.a.createElement(Label, {
key: "label-implicit",
viewBox: viewBox,
value: label
@@ -2032,7 +2040,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
__webpack_exports__.a = Label;
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
- var __WEBPACK_IMPORTED_MODULE_0__src_color__ = __webpack_require__(188);
+ var __WEBPACK_IMPORTED_MODULE_0__src_color__ = __webpack_require__(187);
__webpack_require__.d(__webpack_exports__, "a", function() {
return __WEBPACK_IMPORTED_MODULE_0__src_color__.e;
}), __webpack_require__.d(__webpack_exports__, "f", function() {
@@ -2082,7 +2090,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}));
})) : null;
}
- var __WEBPACK_IMPORTED_MODULE_0_lodash_isObject__ = __webpack_require__(31), __WEBPACK_IMPORTED_MODULE_0_lodash_isObject___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_lodash_isObject__), __WEBPACK_IMPORTED_MODULE_1_lodash_isFunction__ = __webpack_require__(8), __WEBPACK_IMPORTED_MODULE_1_lodash_isFunction___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_1_lodash_isFunction__), __WEBPACK_IMPORTED_MODULE_2_lodash_isNil__ = __webpack_require__(20), __WEBPACK_IMPORTED_MODULE_2_lodash_isNil___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_2_lodash_isNil__), __WEBPACK_IMPORTED_MODULE_3_lodash_last__ = __webpack_require__(781), __WEBPACK_IMPORTED_MODULE_3_lodash_last___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_3_lodash_last__), __WEBPACK_IMPORTED_MODULE_4_lodash_isArray__ = __webpack_require__(12), __WEBPACK_IMPORTED_MODULE_4_lodash_isArray___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_4_lodash_isArray__), __WEBPACK_IMPORTED_MODULE_5_react__ = __webpack_require__(0), __WEBPACK_IMPORTED_MODULE_5_react___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_5_react__), __WEBPACK_IMPORTED_MODULE_6_prop_types__ = __webpack_require__(1), __WEBPACK_IMPORTED_MODULE_6_prop_types___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_6_prop_types__), __WEBPACK_IMPORTED_MODULE_7__Label__ = __webpack_require__(43), __WEBPACK_IMPORTED_MODULE_8__container_Layer__ = __webpack_require__(14), __WEBPACK_IMPORTED_MODULE_9__util_ReactUtils__ = __webpack_require__(4), __WEBPACK_IMPORTED_MODULE_10__util_ChartUtils__ = __webpack_require__(16), _extends = Object.assign || function(target) {
+ var __WEBPACK_IMPORTED_MODULE_0_lodash_isObject__ = __webpack_require__(31), __WEBPACK_IMPORTED_MODULE_0_lodash_isObject___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_lodash_isObject__), __WEBPACK_IMPORTED_MODULE_1_lodash_isFunction__ = __webpack_require__(8), __WEBPACK_IMPORTED_MODULE_1_lodash_isFunction___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_1_lodash_isFunction__), __WEBPACK_IMPORTED_MODULE_2_lodash_isNil__ = __webpack_require__(20), __WEBPACK_IMPORTED_MODULE_2_lodash_isNil___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_2_lodash_isNil__), __WEBPACK_IMPORTED_MODULE_3_lodash_last__ = __webpack_require__(781), __WEBPACK_IMPORTED_MODULE_3_lodash_last___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_3_lodash_last__), __WEBPACK_IMPORTED_MODULE_4_lodash_isArray__ = __webpack_require__(11), __WEBPACK_IMPORTED_MODULE_4_lodash_isArray___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_4_lodash_isArray__), __WEBPACK_IMPORTED_MODULE_5_react__ = __webpack_require__(0), __WEBPACK_IMPORTED_MODULE_5_react___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_5_react__), __WEBPACK_IMPORTED_MODULE_6_prop_types__ = __webpack_require__(1), __WEBPACK_IMPORTED_MODULE_6_prop_types___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_6_prop_types__), __WEBPACK_IMPORTED_MODULE_7__Label__ = __webpack_require__(42), __WEBPACK_IMPORTED_MODULE_8__container_Layer__ = __webpack_require__(14), __WEBPACK_IMPORTED_MODULE_9__util_ReactUtils__ = __webpack_require__(4), __WEBPACK_IMPORTED_MODULE_10__util_ChartUtils__ = __webpack_require__(16), _extends = Object.assign || function(target) {
for (var i = 1; i < arguments.length; i++) {
var source = arguments[i];
for (var key in source) Object.prototype.hasOwnProperty.call(source, key) && (target[key] = source[key]);
@@ -2166,7 +2174,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}
}), superClass && (Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass);
}
- var __WEBPACK_IMPORTED_MODULE_0_lodash_sortBy__ = __webpack_require__(281), __WEBPACK_IMPORTED_MODULE_0_lodash_sortBy___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_lodash_sortBy__), __WEBPACK_IMPORTED_MODULE_1_lodash_isFunction__ = __webpack_require__(8), __WEBPACK_IMPORTED_MODULE_1_lodash_isFunction___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_1_lodash_isFunction__), __WEBPACK_IMPORTED_MODULE_2_lodash_range__ = __webpack_require__(329), __WEBPACK_IMPORTED_MODULE_2_lodash_range___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_2_lodash_range__), __WEBPACK_IMPORTED_MODULE_3_lodash_throttle__ = __webpack_require__(790), __WEBPACK_IMPORTED_MODULE_3_lodash_throttle___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_3_lodash_throttle__), __WEBPACK_IMPORTED_MODULE_4_lodash_isNil__ = __webpack_require__(20), __WEBPACK_IMPORTED_MODULE_4_lodash_isNil___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_4_lodash_isNil__), __WEBPACK_IMPORTED_MODULE_5_react__ = __webpack_require__(0), __WEBPACK_IMPORTED_MODULE_5_react___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_5_react__), __WEBPACK_IMPORTED_MODULE_6_prop_types__ = __webpack_require__(1), __WEBPACK_IMPORTED_MODULE_6_prop_types___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_6_prop_types__), __WEBPACK_IMPORTED_MODULE_7_classnames__ = __webpack_require__(3), __WEBPACK_IMPORTED_MODULE_7_classnames___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_7_classnames__), __WEBPACK_IMPORTED_MODULE_8__container_Surface__ = __webpack_require__(78), __WEBPACK_IMPORTED_MODULE_9__container_Layer__ = __webpack_require__(14), __WEBPACK_IMPORTED_MODULE_10__component_Tooltip__ = __webpack_require__(122), __WEBPACK_IMPORTED_MODULE_11__component_Legend__ = __webpack_require__(171), __WEBPACK_IMPORTED_MODULE_12__shape_Curve__ = __webpack_require__(66), __WEBPACK_IMPORTED_MODULE_13__shape_Cross__ = __webpack_require__(323), __WEBPACK_IMPORTED_MODULE_14__shape_Sector__ = __webpack_require__(128), __WEBPACK_IMPORTED_MODULE_15__shape_Dot__ = __webpack_require__(57), __WEBPACK_IMPORTED_MODULE_16__shape_Rectangle__ = __webpack_require__(65), __WEBPACK_IMPORTED_MODULE_17__util_ReactUtils__ = __webpack_require__(4), __WEBPACK_IMPORTED_MODULE_18__cartesian_CartesianAxis__ = __webpack_require__(330), __WEBPACK_IMPORTED_MODULE_19__cartesian_Brush__ = __webpack_require__(328), __WEBPACK_IMPORTED_MODULE_20__util_DOMUtils__ = __webpack_require__(184), __WEBPACK_IMPORTED_MODULE_21__util_DataUtils__ = __webpack_require__(9), __WEBPACK_IMPORTED_MODULE_22__util_ChartUtils__ = __webpack_require__(16), __WEBPACK_IMPORTED_MODULE_23__util_PolarUtils__ = __webpack_require__(23), __WEBPACK_IMPORTED_MODULE_24__util_PureRender__ = __webpack_require__(5), __WEBPACK_IMPORTED_MODULE_25__util_Events__ = __webpack_require__(791), _extends = Object.assign || function(target) {
+ var __WEBPACK_IMPORTED_MODULE_0_lodash_sortBy__ = __webpack_require__(284), __WEBPACK_IMPORTED_MODULE_0_lodash_sortBy___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_lodash_sortBy__), __WEBPACK_IMPORTED_MODULE_1_lodash_isFunction__ = __webpack_require__(8), __WEBPACK_IMPORTED_MODULE_1_lodash_isFunction___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_1_lodash_isFunction__), __WEBPACK_IMPORTED_MODULE_2_lodash_range__ = __webpack_require__(334), __WEBPACK_IMPORTED_MODULE_2_lodash_range___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_2_lodash_range__), __WEBPACK_IMPORTED_MODULE_3_lodash_throttle__ = __webpack_require__(790), __WEBPACK_IMPORTED_MODULE_3_lodash_throttle___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_3_lodash_throttle__), __WEBPACK_IMPORTED_MODULE_4_lodash_isNil__ = __webpack_require__(20), __WEBPACK_IMPORTED_MODULE_4_lodash_isNil___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_4_lodash_isNil__), __WEBPACK_IMPORTED_MODULE_5_react__ = __webpack_require__(0), __WEBPACK_IMPORTED_MODULE_5_react___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_5_react__), __WEBPACK_IMPORTED_MODULE_6_prop_types__ = __webpack_require__(1), __WEBPACK_IMPORTED_MODULE_6_prop_types___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_6_prop_types__), __WEBPACK_IMPORTED_MODULE_7_classnames__ = __webpack_require__(3), __WEBPACK_IMPORTED_MODULE_7_classnames___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_7_classnames__), __WEBPACK_IMPORTED_MODULE_8__container_Surface__ = __webpack_require__(79), __WEBPACK_IMPORTED_MODULE_9__container_Layer__ = __webpack_require__(14), __WEBPACK_IMPORTED_MODULE_10__component_Tooltip__ = __webpack_require__(121), __WEBPACK_IMPORTED_MODULE_11__component_Legend__ = __webpack_require__(170), __WEBPACK_IMPORTED_MODULE_12__shape_Curve__ = __webpack_require__(66), __WEBPACK_IMPORTED_MODULE_13__shape_Cross__ = __webpack_require__(328), __WEBPACK_IMPORTED_MODULE_14__shape_Sector__ = __webpack_require__(127), __WEBPACK_IMPORTED_MODULE_15__shape_Dot__ = __webpack_require__(56), __WEBPACK_IMPORTED_MODULE_16__shape_Rectangle__ = __webpack_require__(65), __WEBPACK_IMPORTED_MODULE_17__util_ReactUtils__ = __webpack_require__(4), __WEBPACK_IMPORTED_MODULE_18__cartesian_CartesianAxis__ = __webpack_require__(335), __WEBPACK_IMPORTED_MODULE_19__cartesian_Brush__ = __webpack_require__(333), __WEBPACK_IMPORTED_MODULE_20__util_DOMUtils__ = __webpack_require__(183), __WEBPACK_IMPORTED_MODULE_21__util_DataUtils__ = __webpack_require__(9), __WEBPACK_IMPORTED_MODULE_22__util_ChartUtils__ = __webpack_require__(16), __WEBPACK_IMPORTED_MODULE_23__util_PolarUtils__ = __webpack_require__(23), __WEBPACK_IMPORTED_MODULE_24__util_PureRender__ = __webpack_require__(5), __WEBPACK_IMPORTED_MODULE_25__util_Events__ = __webpack_require__(791), _extends = Object.assign || function(target) {
for (var i = 1; i < arguments.length; i++) {
var source = arguments[i];
for (var key in source) Object.prototype.hasOwnProperty.call(source, key) && (target[key] = source[key]);
@@ -2204,7 +2212,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
props: props
}, defaultState, {
updateId: 0
- }))), _this.uniqueChartId = __WEBPACK_IMPORTED_MODULE_4_lodash_isNil___default()(props.id) ? Object(__WEBPACK_IMPORTED_MODULE_21__util_DataUtils__.j)("recharts") : props.id,
+ }))), _this.uniqueChartId = __WEBPACK_IMPORTED_MODULE_4_lodash_isNil___default()(props.id) ? Object(__WEBPACK_IMPORTED_MODULE_21__util_DataUtils__.k)("recharts") : props.id,
props.throttleDelay && (_this.triggeredAfterMouseMove = __WEBPACK_IMPORTED_MODULE_3_lodash_throttle___default()(_this.triggeredAfterMouseMove, props.throttleDelay)),
_this;
}
@@ -2289,7 +2297,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
if (dataKey) {
if (domain = Object(__WEBPACK_IMPORTED_MODULE_22__util_ChartUtils__.n)(displayedData, dataKey, type),
"category" === type && isCategorial) {
- var duplicate = Object(__WEBPACK_IMPORTED_MODULE_21__util_DataUtils__.d)(domain);
+ var duplicate = Object(__WEBPACK_IMPORTED_MODULE_21__util_DataUtils__.e)(domain);
allowDuplicatedCategory && duplicate ? (duplicateDomain = domain, domain = __WEBPACK_IMPORTED_MODULE_2_lodash_range___default()(0, len)) : allowDuplicatedCategory || (domain = Object(__WEBPACK_IMPORTED_MODULE_22__util_ChartUtils__.y)(child.props.domain, domain, child).reduce(function(finalDomain, entry) {
return finalDomain.indexOf(entry) >= 0 ? finalDomain : [].concat(_toConsumableArray(finalDomain), [ entry ]);
}, []));
@@ -2908,7 +2916,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
if (itemsData && itemsData.length > 0) return itemsData;
if (item && item.props && item.props.data && item.props.data.length > 0) return item.props.data;
var data = props.data;
- return data && data.length && Object(__WEBPACK_IMPORTED_MODULE_21__util_DataUtils__.g)(dataStartIndex) && Object(__WEBPACK_IMPORTED_MODULE_21__util_DataUtils__.g)(dataEndIndex) ? data.slice(dataStartIndex, dataEndIndex + 1) : [];
+ return data && data.length && Object(__WEBPACK_IMPORTED_MODULE_21__util_DataUtils__.h)(dataStartIndex) && Object(__WEBPACK_IMPORTED_MODULE_21__util_DataUtils__.h)(dataEndIndex) ? data.slice(dataStartIndex, dataEndIndex + 1) : [];
}, _initialiseProps = function() {
var _this7 = this;
this.handleLegendBBoxUpdate = function(box) {
@@ -3110,10 +3118,10 @@ var _bundleJs = []byte((((((((((`!function(modules) {
var _state12 = _this7.state, xAxisMap = _state12.xAxisMap, yAxisMap = _state12.yAxisMap, offset = _state12.offset, _props6 = _this7.props, width = _props6.width, height = _props6.height, xAxis = Object(__WEBPACK_IMPORTED_MODULE_21__util_DataUtils__.b)(xAxisMap), yAxis = Object(__WEBPACK_IMPORTED_MODULE_21__util_DataUtils__.b)(yAxisMap), props = element.props || {};
return Object(__WEBPACK_IMPORTED_MODULE_5_react__.cloneElement)(element, {
key: element.key || "grid",
- x: Object(__WEBPACK_IMPORTED_MODULE_21__util_DataUtils__.g)(props.x) ? props.x : offset.left,
- y: Object(__WEBPACK_IMPORTED_MODULE_21__util_DataUtils__.g)(props.y) ? props.y : offset.top,
- width: Object(__WEBPACK_IMPORTED_MODULE_21__util_DataUtils__.g)(props.width) ? props.width : offset.width,
- height: Object(__WEBPACK_IMPORTED_MODULE_21__util_DataUtils__.g)(props.height) ? props.height : offset.height,
+ x: Object(__WEBPACK_IMPORTED_MODULE_21__util_DataUtils__.h)(props.x) ? props.x : offset.left,
+ y: Object(__WEBPACK_IMPORTED_MODULE_21__util_DataUtils__.h)(props.y) ? props.y : offset.top,
+ width: Object(__WEBPACK_IMPORTED_MODULE_21__util_DataUtils__.h)(props.width) ? props.width : offset.width,
+ height: Object(__WEBPACK_IMPORTED_MODULE_21__util_DataUtils__.h)(props.height) ? props.height : offset.height,
xAxis: xAxis,
yAxis: yAxis,
offset: offset,
@@ -3143,9 +3151,9 @@ var _bundleJs = []byte((((((((((`!function(modules) {
key: element.key || "_recharts-brush",
onChange: Object(__WEBPACK_IMPORTED_MODULE_22__util_ChartUtils__.d)(_this7.handleBrushChange, null, element.props.onChange),
data: data,
- x: Object(__WEBPACK_IMPORTED_MODULE_21__util_DataUtils__.g)(element.props.x) ? element.props.x : offset.left,
- y: Object(__WEBPACK_IMPORTED_MODULE_21__util_DataUtils__.g)(element.props.y) ? element.props.y : offset.top + offset.height + offset.brushBottom - (margin.bottom || 0),
- width: Object(__WEBPACK_IMPORTED_MODULE_21__util_DataUtils__.g)(element.props.width) ? element.props.width : offset.width,
+ x: Object(__WEBPACK_IMPORTED_MODULE_21__util_DataUtils__.h)(element.props.x) ? element.props.x : offset.left,
+ y: Object(__WEBPACK_IMPORTED_MODULE_21__util_DataUtils__.h)(element.props.y) ? element.props.y : offset.top + offset.height + offset.brushBottom - (margin.bottom || 0),
+ width: Object(__WEBPACK_IMPORTED_MODULE_21__util_DataUtils__.h)(element.props.width) ? element.props.width : offset.width,
startIndex: dataStartIndex,
endIndex: dataEndIndex,
updateId: "brush-" + updateId
@@ -3186,7 +3194,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
};
__webpack_exports__.a = generateCategoricalChart;
}, function(module, exports, __webpack_require__) {
- var aFunction = __webpack_require__(206);
+ var aFunction = __webpack_require__(205);
module.exports = function(fn, that, length) {
if (aFunction(fn), void 0 === that) return fn;
switch (length) {
@@ -3228,17 +3236,11 @@ var _bundleJs = []byte((((((((((`!function(modules) {
module.exports = function(it, key) {
return hasOwnProperty.call(it, key);
};
-}, function(module, exports) {
- var g;
- g = function() {
- return this;
- }();
- try {
- g = g || Function("return this")() || (0, eval)("this");
- } catch (e) {
- "object" == typeof window && (g = window);
- }
- module.exports = g;
+}, function(module, exports, __webpack_require__) {
+ module.exports = {
+ default: __webpack_require__(382),
+ __esModule: !0
+ };
}, function(module, exports, __webpack_require__) {
"use strict";
(function(process) {
@@ -3247,9 +3249,9 @@ var _bundleJs = []byte((((((((((`!function(modules) {
default: obj
};
}
- function capitalizeFirstLetter(string) {
- return "production" !== process.env.NODE_ENV && (0, _warning2.default)("string" == typeof string, "Material-UI: capitalizeFirstLetter(string) expects a string argument."),
- string.charAt(0).toUpperCase() + string.slice(1);
+ function capitalize(string) {
+ if ("production" !== process.env.NODE_ENV && "string" != typeof string) throw new Error("Material-UI: capitalize(string) expects a string argument.");
+ return string.charAt(0).toUpperCase() + string.slice(1);
}
function contains(obj, pred) {
return (0, _keys2.default)(pred).every(function(key) {
@@ -3283,17 +3285,17 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
});
- var _typeof2 = __webpack_require__(100), _typeof3 = _interopRequireDefault(_typeof2), _keys = __webpack_require__(41), _keys2 = _interopRequireDefault(_keys);
- exports.capitalizeFirstLetter = capitalizeFirstLetter, exports.contains = contains,
- exports.findIndex = findIndex, exports.find = find, exports.createChainedFunction = createChainedFunction;
- var _warning = __webpack_require__(11), _warning2 = _interopRequireDefault(_warning);
+ var _typeof2 = __webpack_require__(101), _typeof3 = _interopRequireDefault(_typeof2), _keys = __webpack_require__(50), _keys2 = _interopRequireDefault(_keys);
+ exports.capitalize = capitalize, exports.contains = contains, exports.findIndex = findIndex,
+ exports.find = find, exports.createChainedFunction = createChainedFunction;
+ var _warning = __webpack_require__(12), _warning2 = _interopRequireDefault(_warning);
}).call(exports, __webpack_require__(2));
}, function(module, exports, __webpack_require__) {
function getNative(object, key) {
var value = getValue(object, key);
return baseIsNative(value) ? value : void 0;
}
- var baseIsNative = __webpack_require__(562), getValue = __webpack_require__(565);
+ var baseIsNative = __webpack_require__(564), getValue = __webpack_require__(567);
module.exports = getNative;
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
@@ -3327,7 +3329,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}
}), superClass && (Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass);
}
- var _class, _temp2, __WEBPACK_IMPORTED_MODULE_0_lodash_isNil__ = __webpack_require__(20), __WEBPACK_IMPORTED_MODULE_0_lodash_isNil___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_lodash_isNil__), __WEBPACK_IMPORTED_MODULE_1_react__ = __webpack_require__(0), __WEBPACK_IMPORTED_MODULE_1_react___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_1_react__), __WEBPACK_IMPORTED_MODULE_2_prop_types__ = __webpack_require__(1), __WEBPACK_IMPORTED_MODULE_2_prop_types___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_2_prop_types__), __WEBPACK_IMPORTED_MODULE_3_reduce_css_calc__ = __webpack_require__(686), __WEBPACK_IMPORTED_MODULE_3_reduce_css_calc___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_3_reduce_css_calc__), __WEBPACK_IMPORTED_MODULE_4_classnames__ = __webpack_require__(3), __WEBPACK_IMPORTED_MODULE_4_classnames___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_4_classnames__), __WEBPACK_IMPORTED_MODULE_5__util_DataUtils__ = __webpack_require__(9), __WEBPACK_IMPORTED_MODULE_6__util_ReactUtils__ = __webpack_require__(4), __WEBPACK_IMPORTED_MODULE_7__util_DOMUtils__ = __webpack_require__(184), _extends = Object.assign || function(target) {
+ var _class, _temp2, __WEBPACK_IMPORTED_MODULE_0_lodash_isNil__ = __webpack_require__(20), __WEBPACK_IMPORTED_MODULE_0_lodash_isNil___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_lodash_isNil__), __WEBPACK_IMPORTED_MODULE_1_react__ = __webpack_require__(0), __WEBPACK_IMPORTED_MODULE_1_react___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_1_react__), __WEBPACK_IMPORTED_MODULE_2_prop_types__ = __webpack_require__(1), __WEBPACK_IMPORTED_MODULE_2_prop_types___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_2_prop_types__), __WEBPACK_IMPORTED_MODULE_3_reduce_css_calc__ = __webpack_require__(688), __WEBPACK_IMPORTED_MODULE_3_reduce_css_calc___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_3_reduce_css_calc__), __WEBPACK_IMPORTED_MODULE_4_classnames__ = __webpack_require__(3), __WEBPACK_IMPORTED_MODULE_4_classnames___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_4_classnames__), __WEBPACK_IMPORTED_MODULE_5__util_DataUtils__ = __webpack_require__(9), __WEBPACK_IMPORTED_MODULE_6__util_ReactUtils__ = __webpack_require__(4), __WEBPACK_IMPORTED_MODULE_7__util_DOMUtils__ = __webpack_require__(183), _extends = Object.assign || function(target) {
for (var i = 1; i < arguments.length; i++) {
var source = arguments[i];
for (var key in source) Object.prototype.hasOwnProperty.call(source, key) && (target[key] = source[key]);
@@ -3427,8 +3429,8 @@ var _bundleJs = []byte((((((((((`!function(modules) {
key: "render",
value: function() {
var _props = this.props, dx = _props.dx, dy = _props.dy, textAnchor = _props.textAnchor, verticalAnchor = _props.verticalAnchor, scaleToFit = _props.scaleToFit, angle = _props.angle, lineHeight = _props.lineHeight, capHeight = _props.capHeight, className = _props.className, textProps = _objectWithoutProperties(_props, [ "dx", "dy", "textAnchor", "verticalAnchor", "scaleToFit", "angle", "lineHeight", "capHeight", "className" ]), wordsByLines = this.state.wordsByLines;
- if (!Object(__WEBPACK_IMPORTED_MODULE_5__util_DataUtils__.f)(textProps.x) || !Object(__WEBPACK_IMPORTED_MODULE_5__util_DataUtils__.f)(textProps.y)) return null;
- var x = textProps.x + (Object(__WEBPACK_IMPORTED_MODULE_5__util_DataUtils__.g)(dx) ? dx : 0), y = textProps.y + (Object(__WEBPACK_IMPORTED_MODULE_5__util_DataUtils__.g)(dy) ? dy : 0), startDy = void 0;
+ if (!Object(__WEBPACK_IMPORTED_MODULE_5__util_DataUtils__.g)(textProps.x) || !Object(__WEBPACK_IMPORTED_MODULE_5__util_DataUtils__.g)(textProps.y)) return null;
+ var x = textProps.x + (Object(__WEBPACK_IMPORTED_MODULE_5__util_DataUtils__.h)(dx) ? dx : 0), y = textProps.y + (Object(__WEBPACK_IMPORTED_MODULE_5__util_DataUtils__.h)(dy) ? dy : 0), startDy = void 0;
switch (verticalAnchor) {
case "start":
startDy = __WEBPACK_IMPORTED_MODULE_3_reduce_css_calc___default()("calc(" + capHeight + ")");
@@ -3547,12 +3549,12 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}, _class = _temp)) || _class;
__webpack_exports__.a = Dot;
}, function(module, exports, __webpack_require__) {
- var IObject = __webpack_require__(135), defined = __webpack_require__(137);
+ var IObject = __webpack_require__(134), defined = __webpack_require__(136);
module.exports = function(it) {
return IObject(defined(it));
};
}, function(module, exports, __webpack_require__) {
- var defined = __webpack_require__(137);
+ var defined = __webpack_require__(136);
module.exports = function(it) {
return Object(defined(it));
};
@@ -3591,7 +3593,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
return protoProps && defineProperties(Constructor.prototype, protoProps), staticProps && defineProperties(Constructor, staticProps),
Constructor;
};
- }(), _warning = __webpack_require__(11), _warning2 = _interopRequireDefault(_warning), _toCss = __webpack_require__(153), _toCss2 = _interopRequireDefault(_toCss), _toCssValue = __webpack_require__(105), _toCssValue2 = _interopRequireDefault(_toCssValue), StyleRule = function() {
+ }(), _warning = __webpack_require__(12), _warning2 = _interopRequireDefault(_warning), _toCss = __webpack_require__(152), _toCss2 = _interopRequireDefault(_toCss), _toCssValue = __webpack_require__(106), _toCssValue2 = _interopRequireDefault(_toCssValue), StyleRule = function() {
function StyleRule(key, style, options) {
_classCallCheck(this, StyleRule), this.type = "style", this.isProcessed = !1;
var sheet = options.sheet, Renderer = options.Renderer, selector = options.selector;
@@ -3600,22 +3602,24 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}
return _createClass(StyleRule, [ {
key: "prop",
- value: function(name, nextValue) {
- if (null != nextValue) {
- if (this.style[name] !== nextValue) if (nextValue = this.options.jss.plugins.onChangeValue(nextValue, name, this),
- this.style[name] = nextValue, this.renderable) this.renderer.setStyle(this.renderable, name, nextValue); else {
- var sheet = this.options.sheet;
- sheet && sheet.attached && (0, _warning2.default)(!1, 'Rule is not linked. Missing sheet option "link: true".');
- }
- return this;
- }
- return this.style[name];
+ value: function(name, value) {
+ if (void 0 === value) return this.style[name];
+ if (this.style[name] === value) return this;
+ value = this.options.jss.plugins.onChangeValue(value, name, this);
+ var isEmpty = null == value || !1 === value, isDefined = name in this.style;
+ if (isEmpty && !isDefined) return this;
+ var remove = isEmpty && isDefined;
+ if (remove ? delete this.style[name] : this.style[name] = value, this.renderable) return remove ? this.renderer.removeProperty(this.renderable, name) : this.renderer.setProperty(this.renderable, name, value),
+ this;
+ var sheet = this.options.sheet;
+ return sheet && sheet.attached && (0, _warning2.default)(!1, 'Rule is not linked. Missing sheet option "link: true".'),
+ this;
}
}, {
key: "applyTo",
value: function(renderable) {
var json = this.toJSON();
- for (var prop in json) this.renderer.setStyle(renderable, prop, json[prop]);
+ for (var prop in json) this.renderer.setProperty(renderable, prop, json[prop]);
return this;
}
}, {
@@ -3653,75 +3657,34 @@ var _bundleJs = []byte((((((((((`!function(modules) {
} ]), StyleRule;
}();
exports.default = StyleRule;
-}, function(module, exports, __webpack_require__) {
- "use strict";
- Object.defineProperty(exports, "__esModule", {
- value: !0
- });
- var _extends = Object.assign || function(target) {
- for (var i = 1; i < arguments.length; i++) {
- var source = arguments[i];
- for (var key in source) Object.prototype.hasOwnProperty.call(source, key) && (target[key] = source[key]);
- }
- return target;
- }, menuSkeletons = [ {
- id: "home",
- menu: {
- title: "Home",
- icon: "home"
- }
- }, {
- id: "chain",
- menu: {
- title: "Chain",
- icon: "link"
- }
- }, {
- id: "txpool",
- menu: {
- title: "TxPool",
- icon: "credit-card"
- }
- }, {
- id: "network",
- menu: {
- title: "Network",
- icon: "globe"
- }
- }, {
- id: "system",
- menu: {
- title: "System",
- icon: "tachometer"
- }
- }, {
- id: "logs",
- menu: {
- title: "Logs",
- icon: "list"
- }
- } ];
- exports.MENU = new Map(menuSkeletons.map(function(_ref) {
- var id = _ref.id, menu = _ref.menu;
- return [ id, _extends({
- id: id
- }, menu) ];
- })), exports.DURATION = 200, exports.styles = {
- light: {
- color: "rgba(255, 255, 255, 0.54)"
- }
- };
+}, function(module, exports) {
+ var g;
+ g = function() {
+ return this;
+ }();
+ try {
+ g = g || Function("return this")() || (0, eval)("this");
+ } catch (e) {
+ "object" == typeof window && (g = window);
+ }
+ module.exports = g;
}, function(module, exports, __webpack_require__) {
function isSymbol(value) {
return "symbol" == typeof value || isObjectLike(value) && baseGetTag(value) == symbolTag;
}
- var baseGetTag = __webpack_require__(42), isObjectLike = __webpack_require__(36), symbolTag = "[object Symbol]";
+ var baseGetTag = __webpack_require__(41), isObjectLike = __webpack_require__(36), symbolTag = "[object Symbol]";
module.exports = isSymbol;
}, function(module, exports) {
function identity(value) {
return value;
}
module.exports = identity;
+}, function(module, exports, __webpack_require__) {
+ function baseIteratee(value) {
+ return "function" == typeof value ? value : null == value ? identity : "object" == typeof value ? isArray(value) ? baseMatchesProperty(value[0], value[1]) : baseMatches(value) : property(value);
+ }
+ var baseMatches = __webpack_require__(671), baseMatchesProperty = __webpack_require__(674), identity = __webpack_require__(62), isArray = __webpack_require__(11), property = __webpack_require__(678);
+ module.exports = baseIteratee;
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
__webpack_exports__.a = function(a, b) {
@@ -3892,7 +3855,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}
}), superClass && (Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass);
}
- var _class, _class2, _temp, __WEBPACK_IMPORTED_MODULE_0_lodash_isArray__ = __webpack_require__(12), __WEBPACK_IMPORTED_MODULE_0_lodash_isArray___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_lodash_isArray__), __WEBPACK_IMPORTED_MODULE_1_lodash_isFunction__ = __webpack_require__(8), __WEBPACK_IMPORTED_MODULE_1_lodash_isFunction___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_1_lodash_isFunction__), __WEBPACK_IMPORTED_MODULE_2_react__ = __webpack_require__(0), __WEBPACK_IMPORTED_MODULE_2_react___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_2_react__), __WEBPACK_IMPORTED_MODULE_3_prop_types__ = __webpack_require__(1), __WEBPACK_IMPORTED_MODULE_3_prop_types___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_3_prop_types__), __WEBPACK_IMPORTED_MODULE_4_d3_shape__ = __webpack_require__(173), __WEBPACK_IMPORTED_MODULE_5_classnames__ = __webpack_require__(3), __WEBPACK_IMPORTED_MODULE_5_classnames___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_5_classnames__), __WEBPACK_IMPORTED_MODULE_6__util_PureRender__ = __webpack_require__(5), __WEBPACK_IMPORTED_MODULE_7__util_ReactUtils__ = __webpack_require__(4), __WEBPACK_IMPORTED_MODULE_8__util_DataUtils__ = __webpack_require__(9), _extends = Object.assign || function(target) {
+ var _class, _class2, _temp, __WEBPACK_IMPORTED_MODULE_0_lodash_isArray__ = __webpack_require__(11), __WEBPACK_IMPORTED_MODULE_0_lodash_isArray___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_lodash_isArray__), __WEBPACK_IMPORTED_MODULE_1_lodash_isFunction__ = __webpack_require__(8), __WEBPACK_IMPORTED_MODULE_1_lodash_isFunction___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_1_lodash_isFunction__), __WEBPACK_IMPORTED_MODULE_2_react__ = __webpack_require__(0), __WEBPACK_IMPORTED_MODULE_2_react___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_2_react__), __WEBPACK_IMPORTED_MODULE_3_prop_types__ = __webpack_require__(1), __WEBPACK_IMPORTED_MODULE_3_prop_types___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_3_prop_types__), __WEBPACK_IMPORTED_MODULE_4_d3_shape__ = __webpack_require__(172), __WEBPACK_IMPORTED_MODULE_5_classnames__ = __webpack_require__(3), __WEBPACK_IMPORTED_MODULE_5_classnames___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_5_classnames__), __WEBPACK_IMPORTED_MODULE_6__util_PureRender__ = __webpack_require__(5), __WEBPACK_IMPORTED_MODULE_7__util_ReactUtils__ = __webpack_require__(4), __WEBPACK_IMPORTED_MODULE_8__util_DataUtils__ = __webpack_require__(9), _extends = Object.assign || function(target) {
for (var i = 1; i < arguments.length; i++) {
var source = arguments[i];
for (var key in source) Object.prototype.hasOwnProperty.call(source, key) && (target[key] = source[key]);
@@ -3954,7 +3917,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
return d.base.y;
}), lineFunction.defined(defined).curve(curveFactory), lineFunction(areaPoints);
}
- return lineFunction = "vertical" === layout && Object(__WEBPACK_IMPORTED_MODULE_8__util_DataUtils__.g)(baseLine) ? Object(__WEBPACK_IMPORTED_MODULE_4_d3_shape__.a)().y(getY).x1(getX).x0(baseLine) : Object(__WEBPACK_IMPORTED_MODULE_8__util_DataUtils__.g)(baseLine) ? Object(__WEBPACK_IMPORTED_MODULE_4_d3_shape__.a)().x(getX).y1(getY).y0(baseLine) : Object(__WEBPACK_IMPORTED_MODULE_4_d3_shape__.m)().x(getX).y(getY),
+ return lineFunction = "vertical" === layout && Object(__WEBPACK_IMPORTED_MODULE_8__util_DataUtils__.h)(baseLine) ? Object(__WEBPACK_IMPORTED_MODULE_4_d3_shape__.a)().y(getY).x1(getX).x0(baseLine) : Object(__WEBPACK_IMPORTED_MODULE_8__util_DataUtils__.h)(baseLine) ? Object(__WEBPACK_IMPORTED_MODULE_4_d3_shape__.a)().x(getX).y1(getY).y0(baseLine) : Object(__WEBPACK_IMPORTED_MODULE_4_d3_shape__.m)().x(getX).y(getY),
lineFunction.defined(defined).curve(curveFactory), lineFunction(formatPoints);
}
}, {
@@ -4240,7 +4203,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
};
};
}, function(module, exports, __webpack_require__) {
- var $keys = __webpack_require__(209), enumBugKeys = __webpack_require__(141);
+ var $keys = __webpack_require__(208), enumBugKeys = __webpack_require__(140);
module.exports = Object.keys || function(O) {
return $keys(O, enumBugKeys);
};
@@ -4292,7 +4255,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
}), exports.keys = void 0;
- var _extends2 = __webpack_require__(7), _extends3 = _interopRequireDefault(_extends2), _objectWithoutProperties2 = __webpack_require__(6), _objectWithoutProperties3 = _interopRequireDefault(_objectWithoutProperties2);
+ var _extends2 = __webpack_require__(6), _extends3 = _interopRequireDefault(_extends2), _objectWithoutProperties2 = __webpack_require__(7), _objectWithoutProperties3 = _interopRequireDefault(_objectWithoutProperties2);
exports.default = createBreakpoints;
var keys = exports.keys = [ "xs", "sm", "md", "lg", "xl" ];
}, function(module, exports, __webpack_require__) {
@@ -4337,7 +4300,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
return protoProps && defineProperties(Constructor.prototype, protoProps), staticProps && defineProperties(Constructor, staticProps),
Constructor;
};
- }(), _createRule = __webpack_require__(106), _createRule2 = _interopRequireDefault(_createRule), _linkRule = __webpack_require__(231), _linkRule2 = _interopRequireDefault(_linkRule), _StyleRule = __webpack_require__(60), _StyleRule2 = _interopRequireDefault(_StyleRule), _escape = __webpack_require__(424), _escape2 = _interopRequireDefault(_escape), RuleList = function() {
+ }(), _createRule = __webpack_require__(107), _createRule2 = _interopRequireDefault(_createRule), _linkRule = __webpack_require__(231), _linkRule2 = _interopRequireDefault(_linkRule), _StyleRule = __webpack_require__(59), _StyleRule2 = _interopRequireDefault(_StyleRule), _escape = __webpack_require__(428), _escape2 = _interopRequireDefault(_escape), RuleList = function() {
function RuleList(options) {
_classCallCheck(this, RuleList), this.map = {}, this.raw = {}, this.index = [],
this.options = options, this.classes = options.classes;
@@ -4424,6 +4387,64 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}();
exports.default = RuleList;
}, function(module, exports, __webpack_require__) {
+ "use strict";
+ Object.defineProperty(exports, "__esModule", {
+ value: !0
+ });
+ var _extends = Object.assign || function(target) {
+ for (var i = 1; i < arguments.length; i++) {
+ var source = arguments[i];
+ for (var key in source) Object.prototype.hasOwnProperty.call(source, key) && (target[key] = source[key]);
+ }
+ return target;
+ }, menuSkeletons = [ {
+ id: "home",
+ menu: {
+ title: "Home",
+ icon: "home"
+ }
+ }, {
+ id: "chain",
+ menu: {
+ title: "Chain",
+ icon: "link"
+ }
+ }, {
+ id: "txpool",
+ menu: {
+ title: "TxPool",
+ icon: "credit-card"
+ }
+ }, {
+ id: "network",
+ menu: {
+ title: "Network",
+ icon: "globe"
+ }
+ }, {
+ id: "system",
+ menu: {
+ title: "System",
+ icon: "tachometer"
+ }
+ }, {
+ id: "logs",
+ menu: {
+ title: "Logs",
+ icon: "list"
+ }
+ } ];
+ exports.MENU = new Map(menuSkeletons.map(function(_ref) {
+ var id = _ref.id, menu = _ref.menu;
+ return [ id, _extends({
+ id: id
+ }, menu) ];
+ })), exports.DURATION = 200, exports.styles = {
+ light: {
+ color: "rgba(255, 255, 255, 0.54)"
+ }
+ };
+}, function(module, exports, __webpack_require__) {
var root = __webpack_require__(32), Symbol = root.Symbol;
module.exports = Symbol;
}, function(module, __webpack_exports__, __webpack_require__) {
@@ -4469,9 +4490,15 @@ var _bundleJs = []byte((((((((((`!function(modules) {
children: __WEBPACK_IMPORTED_MODULE_1_prop_types___default.a.oneOfType([ __WEBPACK_IMPORTED_MODULE_1_prop_types___default.a.arrayOf(__WEBPACK_IMPORTED_MODULE_1_prop_types___default.a.node), __WEBPACK_IMPORTED_MODULE_1_prop_types___default.a.node ])
};
Surface.propTypes = propTypes, __webpack_exports__.a = Surface;
+}, function(module, exports) {
+ function arrayMap(array, iteratee) {
+ for (var index = -1, length = null == array ? 0 : array.length, result = Array(length); ++index < length; ) result[index] = iteratee(array[index], index, array);
+ return result;
+ }
+ module.exports = arrayMap;
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
- var __WEBPACK_IMPORTED_MODULE_0__src_path__ = __webpack_require__(584);
+ var __WEBPACK_IMPORTED_MODULE_0__src_path__ = __webpack_require__(586);
__webpack_require__.d(__webpack_exports__, "a", function() {
return __WEBPACK_IMPORTED_MODULE_0__src_path__.a;
});
@@ -4523,14 +4550,8 @@ var _bundleJs = []byte((((((((((`!function(modules) {
function isArrayLike(value) {
return null != value && isLength(value.length) && !isFunction(value);
}
- var isFunction = __webpack_require__(8), isLength = __webpack_require__(182);
+ var isFunction = __webpack_require__(8), isLength = __webpack_require__(181);
module.exports = isArrayLike;
-}, function(module, exports, __webpack_require__) {
- function baseIteratee(value) {
- return "function" == typeof value ? value : null == value ? identity : "object" == typeof value ? isArray(value) ? baseMatchesProperty(value[0], value[1]) : baseMatches(value) : property(value);
- }
- var baseMatches = __webpack_require__(669), baseMatchesProperty = __webpack_require__(672), identity = __webpack_require__(63), isArray = __webpack_require__(12), property = __webpack_require__(676);
- module.exports = baseIteratee;
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
function Cell() {
@@ -4580,24 +4601,24 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}, linearish(scale);
}
__webpack_exports__.b = linearish, __webpack_exports__.a = linear;
- var __WEBPACK_IMPORTED_MODULE_0_d3_array__ = __webpack_require__(37), __WEBPACK_IMPORTED_MODULE_1_d3_interpolate__ = __webpack_require__(88), __WEBPACK_IMPORTED_MODULE_2__continuous__ = __webpack_require__(126), __WEBPACK_IMPORTED_MODULE_3__tickFormat__ = __webpack_require__(742);
+ var __WEBPACK_IMPORTED_MODULE_0_d3_array__ = __webpack_require__(37), __WEBPACK_IMPORTED_MODULE_1_d3_interpolate__ = __webpack_require__(89), __WEBPACK_IMPORTED_MODULE_2__continuous__ = __webpack_require__(125), __WEBPACK_IMPORTED_MODULE_3__tickFormat__ = __webpack_require__(742);
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
- var __WEBPACK_IMPORTED_MODULE_0__src_value__ = __webpack_require__(187);
+ var __WEBPACK_IMPORTED_MODULE_0__src_value__ = __webpack_require__(186);
__webpack_require__.d(__webpack_exports__, "a", function() {
return __WEBPACK_IMPORTED_MODULE_0__src_value__.a;
});
- var __WEBPACK_IMPORTED_MODULE_5__src_number__ = (__webpack_require__(305), __webpack_require__(190),
- __webpack_require__(303), __webpack_require__(306), __webpack_require__(125));
+ var __WEBPACK_IMPORTED_MODULE_5__src_number__ = (__webpack_require__(310), __webpack_require__(189),
+ __webpack_require__(308), __webpack_require__(311), __webpack_require__(124));
__webpack_require__.d(__webpack_exports__, "c", function() {
return __WEBPACK_IMPORTED_MODULE_5__src_number__.a;
});
- var __WEBPACK_IMPORTED_MODULE_7__src_round__ = (__webpack_require__(307), __webpack_require__(732));
+ var __WEBPACK_IMPORTED_MODULE_7__src_round__ = (__webpack_require__(312), __webpack_require__(732));
__webpack_require__.d(__webpack_exports__, "d", function() {
return __WEBPACK_IMPORTED_MODULE_7__src_round__.a;
});
- var __WEBPACK_IMPORTED_MODULE_15__src_cubehelix__ = (__webpack_require__(308), __webpack_require__(733),
- __webpack_require__(736), __webpack_require__(302), __webpack_require__(737), __webpack_require__(738),
+ var __WEBPACK_IMPORTED_MODULE_15__src_cubehelix__ = (__webpack_require__(313), __webpack_require__(733),
+ __webpack_require__(736), __webpack_require__(307), __webpack_require__(737), __webpack_require__(738),
__webpack_require__(739), __webpack_require__(740));
__webpack_require__.d(__webpack_exports__, "b", function() {
return __WEBPACK_IMPORTED_MODULE_15__src_cubehelix__.a;
@@ -4629,7 +4650,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
return d ? linear(a, d) : Object(__WEBPACK_IMPORTED_MODULE_0__constant__.a)(isNaN(a) ? b : a);
}
__webpack_exports__.c = hue, __webpack_exports__.b = gamma, __webpack_exports__.a = nogamma;
- var __WEBPACK_IMPORTED_MODULE_0__constant__ = __webpack_require__(304);
+ var __WEBPACK_IMPORTED_MODULE_0__constant__ = __webpack_require__(309);
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
__webpack_exports__.a = function(s) {
@@ -4859,7 +4880,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}
}
}
- "production" === process.env.NODE_ENV ? (checkDCE(), module.exports = __webpack_require__(334)) : module.exports = __webpack_require__(337);
+ "production" === process.env.NODE_ENV ? (checkDCE(), module.exports = __webpack_require__(339)) : module.exports = __webpack_require__(342);
}).call(exports, __webpack_require__(2));
}, function(module, exports, __webpack_require__) {
"use strict";
@@ -4877,7 +4898,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
var hasOwnProperty = Object.prototype.hasOwnProperty;
module.exports = shallowEqual;
}, function(module, exports, __webpack_require__) {
- var toInteger = __webpack_require__(138), min = Math.min;
+ var toInteger = __webpack_require__(137), min = Math.min;
module.exports = function(it) {
return it > 0 ? min(toInteger(it), 9007199254740991) : 0;
};
@@ -4896,7 +4917,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
};
}
exports.__esModule = !0;
- var _iterator = __webpack_require__(352), _iterator2 = _interopRequireDefault(_iterator), _symbol = __webpack_require__(360), _symbol2 = _interopRequireDefault(_symbol), _typeof = "function" == typeof _symbol2.default && "symbol" == typeof _iterator2.default ? function(obj) {
+ var _iterator = __webpack_require__(357), _iterator2 = _interopRequireDefault(_iterator), _symbol = __webpack_require__(365), _symbol2 = _interopRequireDefault(_symbol), _typeof = "function" == typeof _symbol2.default && "symbol" == typeof _iterator2.default ? function(obj) {
return typeof obj;
} : function(obj) {
return obj && "function" == typeof _symbol2.default && obj.constructor === _symbol2.default && obj !== _symbol2.default.prototype ? "symbol" : typeof obj;
@@ -4907,9 +4928,9 @@ var _bundleJs = []byte((((((((((`!function(modules) {
return obj && "function" == typeof _symbol2.default && obj.constructor === _symbol2.default && obj !== _symbol2.default.prototype ? "symbol" : void 0 === obj ? "undefined" : _typeof(obj);
};
}, function(module, exports, __webpack_require__) {
- var anObject = __webpack_require__(48), dPs = __webpack_require__(356), enumBugKeys = __webpack_require__(141), IE_PROTO = __webpack_require__(139)("IE_PROTO"), Empty = function() {}, createDict = function() {
- var iframeDocument, iframe = __webpack_require__(208)("iframe"), i = enumBugKeys.length;
- for (iframe.style.display = "none", __webpack_require__(357).appendChild(iframe),
+ var anObject = __webpack_require__(47), dPs = __webpack_require__(361), enumBugKeys = __webpack_require__(140), IE_PROTO = __webpack_require__(138)("IE_PROTO"), Empty = function() {}, createDict = function() {
+ var iframeDocument, iframe = __webpack_require__(207)("iframe"), i = enumBugKeys.length;
+ for (iframe.style.display = "none", __webpack_require__(362).appendChild(iframe),
iframe.src = "javascript:", iframeDocument = iframe.contentWindow.document, iframeDocument.open(),
iframeDocument.write("<script>document.F=Object<\/script>"), iframeDocument.close(),
createDict = iframeDocument.F; i--; ) delete createDict.prototype[enumBugKeys[i]];
@@ -4921,7 +4942,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
result[IE_PROTO] = O) : result = createDict(), void 0 === Properties ? result : dPs(result, Properties);
};
}, function(module, exports, __webpack_require__) {
- var def = __webpack_require__(22).f, has = __webpack_require__(50), TAG = __webpack_require__(21)("toStringTag");
+ var def = __webpack_require__(22).f, has = __webpack_require__(49), TAG = __webpack_require__(21)("toStringTag");
module.exports = function(it, tag, stat) {
it && !has(it = stat ? it : it.prototype, TAG) && def(it, TAG, {
configurable: !0,
@@ -4981,7 +5002,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
var deepmerge_1 = deepmerge;
__webpack_exports__.default = deepmerge_1;
}, function(module, exports, __webpack_require__) {
- var ctx = __webpack_require__(47), call = __webpack_require__(221), isArrayIter = __webpack_require__(222), anObject = __webpack_require__(48), toLength = __webpack_require__(97), getIterFn = __webpack_require__(223), BREAK = {}, RETURN = {}, exports = module.exports = function(iterable, entries, fn, that, ITERATOR) {
+ var ctx = __webpack_require__(46), call = __webpack_require__(221), isArrayIter = __webpack_require__(222), anObject = __webpack_require__(47), toLength = __webpack_require__(98), getIterFn = __webpack_require__(223), BREAK = {}, RETURN = {}, exports = module.exports = function(iterable, entries, fn, that, ITERATOR) {
var length, step, iterator, result, iterFn = ITERATOR ? function() {
return iterable;
} : getIterFn(iterable), f = ctx(fn, that, entries ? 2 : 1), index = 0;
@@ -5026,7 +5047,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
}), exports.default = createRule;
- var _warning = __webpack_require__(11), _warning2 = _interopRequireDefault(_warning), _StyleRule = __webpack_require__(60), _StyleRule2 = _interopRequireDefault(_StyleRule), _cloneStyle = __webpack_require__(420), _cloneStyle2 = _interopRequireDefault(_cloneStyle);
+ var _warning = __webpack_require__(12), _warning2 = _interopRequireDefault(_warning), _StyleRule = __webpack_require__(59), _StyleRule2 = _interopRequireDefault(_StyleRule), _cloneStyle = __webpack_require__(424), _cloneStyle2 = _interopRequireDefault(_cloneStyle);
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
Object.defineProperty(__webpack_exports__, "__esModule", {
@@ -5042,191 +5063,6 @@ var _bundleJs = []byte((((((((((`!function(modules) {
__webpack_exports__.default = isBrowser;
}, function(module, exports, __webpack_require__) {
"use strict";
- (function(process) {
- function _interopRequireDefault(obj) {
- return obj && obj.__esModule ? obj : {
- default: obj
- };
- }
- function _objectWithoutProperties(obj, keys) {
- var target = {};
- for (var i in obj) keys.indexOf(i) >= 0 || Object.prototype.hasOwnProperty.call(obj, i) && (target[i] = obj[i]);
- return target;
- }
- function _classCallCheck(instance, Constructor) {
- if (!(instance instanceof Constructor)) throw new TypeError("Cannot call a class as a function");
- }
- function _possibleConstructorReturn(self, call) {
- if (!self) throw new ReferenceError("this hasn't been initialised - super() hasn't been called");
- return !call || "object" != typeof call && "function" != typeof call ? self : call;
- }
- function _inherits(subClass, superClass) {
- if ("function" != typeof superClass && null !== superClass) throw new TypeError("Super expression must either be null or a function, not " + typeof superClass);
- subClass.prototype = Object.create(superClass && superClass.prototype, {
- constructor: {
- value: subClass,
- enumerable: !1,
- writable: !0,
- configurable: !0
- }
- }), superClass && (Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass);
- }
- function noop() {}
- exports.__esModule = !0, exports.EXITING = exports.ENTERED = exports.ENTERING = exports.EXITED = exports.UNMOUNTED = void 0;
- var _propTypes = __webpack_require__(1), PropTypes = function(obj) {
- if (obj && obj.__esModule) return obj;
- var newObj = {};
- if (null != obj) for (var key in obj) Object.prototype.hasOwnProperty.call(obj, key) && (newObj[key] = obj[key]);
- return newObj.default = obj, newObj;
- }(_propTypes), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _reactDom = __webpack_require__(95), _reactDom2 = _interopRequireDefault(_reactDom), _PropTypes = __webpack_require__(460), UNMOUNTED = exports.UNMOUNTED = "unmounted", EXITED = exports.EXITED = "exited", ENTERING = exports.ENTERING = "entering", ENTERED = exports.ENTERED = "entered", EXITING = exports.EXITING = "exiting", Transition = function(_React$Component) {
- function Transition(props, context) {
- _classCallCheck(this, Transition);
- var _this = _possibleConstructorReturn(this, _React$Component.call(this, props, context)), parentGroup = context.transitionGroup, appear = parentGroup && !parentGroup.isMounting ? props.enter : props.appear, initialStatus = void 0;
- return _this.nextStatus = null, props.in ? appear ? (initialStatus = EXITED, _this.nextStatus = ENTERING) : initialStatus = ENTERED : initialStatus = props.unmountOnExit || props.mountOnEnter ? UNMOUNTED : EXITED,
- _this.state = {
- status: initialStatus
- }, _this.nextCallback = null, _this;
- }
- return _inherits(Transition, _React$Component), Transition.prototype.getChildContext = function() {
- return {
- transitionGroup: null
- };
- }, Transition.prototype.componentDidMount = function() {
- this.updateStatus(!0);
- }, Transition.prototype.componentWillReceiveProps = function(nextProps) {
- var _ref = this.pendingState || this.state, status = _ref.status;
- nextProps.in ? (status === UNMOUNTED && this.setState({
- status: EXITED
- }), status !== ENTERING && status !== ENTERED && (this.nextStatus = ENTERING)) : status !== ENTERING && status !== ENTERED || (this.nextStatus = EXITING);
- }, Transition.prototype.componentDidUpdate = function() {
- this.updateStatus();
- }, Transition.prototype.componentWillUnmount = function() {
- this.cancelNextCallback();
- }, Transition.prototype.getTimeouts = function() {
- var timeout = this.props.timeout, exit = void 0, enter = void 0, appear = void 0;
- return exit = enter = appear = timeout, null != timeout && "number" != typeof timeout && (exit = timeout.exit,
- enter = timeout.enter, appear = timeout.appear), {
- exit: exit,
- enter: enter,
- appear: appear
- };
- }, Transition.prototype.updateStatus = function() {
- var mounting = arguments.length > 0 && void 0 !== arguments[0] && arguments[0], nextStatus = this.nextStatus;
- if (null !== nextStatus) {
- this.nextStatus = null, this.cancelNextCallback();
- var node = _reactDom2.default.findDOMNode(this);
- nextStatus === ENTERING ? this.performEnter(node, mounting) : this.performExit(node);
- } else this.props.unmountOnExit && this.state.status === EXITED && this.setState({
- status: UNMOUNTED
- });
- }, Transition.prototype.performEnter = function(node, mounting) {
- var _this2 = this, enter = this.props.enter, appearing = this.context.transitionGroup ? this.context.transitionGroup.isMounting : mounting, timeouts = this.getTimeouts();
- if (!mounting && !enter) return void this.safeSetState({
- status: ENTERED
- }, function() {
- _this2.props.onEntered(node);
- });
- this.props.onEnter(node, appearing), this.safeSetState({
- status: ENTERING
- }, function() {
- _this2.props.onEntering(node, appearing), _this2.onTransitionEnd(node, timeouts.enter, function() {
- _this2.safeSetState({
- status: ENTERED
- }, function() {
- _this2.props.onEntered(node, appearing);
- });
- });
- });
- }, Transition.prototype.performExit = function(node) {
- var _this3 = this, exit = this.props.exit, timeouts = this.getTimeouts();
- if (!exit) return void this.safeSetState({
- status: EXITED
- }, function() {
- _this3.props.onExited(node);
- });
- this.props.onExit(node), this.safeSetState({
- status: EXITING
- }, function() {
- _this3.props.onExiting(node), _this3.onTransitionEnd(node, timeouts.exit, function() {
- _this3.safeSetState({
- status: EXITED
- }, function() {
- _this3.props.onExited(node);
- });
- });
- });
- }, Transition.prototype.cancelNextCallback = function() {
- null !== this.nextCallback && (this.nextCallback.cancel(), this.nextCallback = null);
- }, Transition.prototype.safeSetState = function(nextState, callback) {
- var _this4 = this;
- this.pendingState = nextState, callback = this.setNextCallback(callback), this.setState(nextState, function() {
- _this4.pendingState = null, callback();
- });
- }, Transition.prototype.setNextCallback = function(callback) {
- var _this5 = this, active = !0;
- return this.nextCallback = function(event) {
- active && (active = !1, _this5.nextCallback = null, callback(event));
- }, this.nextCallback.cancel = function() {
- active = !1;
- }, this.nextCallback;
- }, Transition.prototype.onTransitionEnd = function(node, timeout, handler) {
- this.setNextCallback(handler), node ? (this.props.addEndListener && this.props.addEndListener(node, this.nextCallback),
- null != timeout && setTimeout(this.nextCallback, timeout)) : setTimeout(this.nextCallback, 0);
- }, Transition.prototype.render = function() {
- var status = this.state.status;
- if (status === UNMOUNTED) return null;
- var _props = this.props, children = _props.children, childProps = _objectWithoutProperties(_props, [ "children" ]);
- if (delete childProps.in, delete childProps.mountOnEnter, delete childProps.unmountOnExit,
- delete childProps.appear, delete childProps.enter, delete childProps.exit, delete childProps.timeout,
- delete childProps.addEndListener, delete childProps.onEnter, delete childProps.onEntering,
- delete childProps.onEntered, delete childProps.onExit, delete childProps.onExiting,
- delete childProps.onExited, "function" == typeof children) return children(status, childProps);
- var child = _react2.default.Children.only(children);
- return _react2.default.cloneElement(child, childProps);
- }, Transition;
- }(_react2.default.Component);
- Transition.contextTypes = {
- transitionGroup: PropTypes.object
- }, Transition.childContextTypes = {
- transitionGroup: function() {}
- }, Transition.propTypes = "production" !== process.env.NODE_ENV ? {
- children: PropTypes.oneOfType([ PropTypes.func.isRequired, PropTypes.element.isRequired ]).isRequired,
- in: PropTypes.bool,
- mountOnEnter: PropTypes.bool,
- unmountOnExit: PropTypes.bool,
- appear: PropTypes.bool,
- enter: PropTypes.bool,
- exit: PropTypes.bool,
- timeout: function(props) {
- for (var _len = arguments.length, args = Array(_len > 1 ? _len - 1 : 0), _key = 1; _key < _len; _key++) args[_key - 1] = arguments[_key];
- var pt = _PropTypes.timeoutsShape;
- return props.addEndListener || (pt = pt.isRequired), pt.apply(void 0, [ props ].concat(args));
- },
- addEndListener: PropTypes.func,
- onEnter: PropTypes.func,
- onEntering: PropTypes.func,
- onEntered: PropTypes.func,
- onExit: PropTypes.func,
- onExiting: PropTypes.func,
- onExited: PropTypes.func
- } : {}, Transition.defaultProps = {
- in: !1,
- mountOnEnter: !1,
- unmountOnExit: !1,
- appear: !1,
- enter: !0,
- exit: !0,
- onEnter: noop,
- onEntering: noop,
- onEntered: noop,
- onExit: noop,
- onExiting: noop,
- onExited: noop
- }, Transition.UNMOUNTED = 0, Transition.EXITED = 1, Transition.ENTERING = 2, Transition.ENTERED = 3,
- Transition.EXITING = 4, exports.default = Transition;
- }).call(exports, __webpack_require__(2));
-}, function(module, exports, __webpack_require__) {
- "use strict";
function _interopRequireDefault(obj) {
return obj && obj.__esModule ? obj : {
default: obj
@@ -5235,7 +5071,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
});
- var _Typography = __webpack_require__(480);
+ var _Typography = __webpack_require__(489);
Object.defineProperty(exports, "default", {
enumerable: !0,
get: function() {
@@ -5251,7 +5087,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}
};
}, function(module, exports, __webpack_require__) {
- var getNative = __webpack_require__(53), nativeCreate = getNative(Object, "create");
+ var getNative = __webpack_require__(52), nativeCreate = getNative(Object, "create");
module.exports = nativeCreate;
}, function(module, exports, __webpack_require__) {
function ListCache(entries) {
@@ -5261,7 +5097,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
this.set(entry[0], entry[1]);
}
}
- var listCacheClear = __webpack_require__(570), listCacheDelete = __webpack_require__(571), listCacheGet = __webpack_require__(572), listCacheHas = __webpack_require__(573), listCacheSet = __webpack_require__(574);
+ var listCacheClear = __webpack_require__(572), listCacheDelete = __webpack_require__(573), listCacheGet = __webpack_require__(574), listCacheHas = __webpack_require__(575), listCacheSet = __webpack_require__(576);
ListCache.prototype.clear = listCacheClear, ListCache.prototype.delete = listCacheDelete,
ListCache.prototype.get = listCacheGet, ListCache.prototype.has = listCacheHas,
ListCache.prototype.set = listCacheSet, module.exports = ListCache;
@@ -5270,34 +5106,28 @@ var _bundleJs = []byte((((((((((`!function(modules) {
for (var length = array.length; length--; ) if (eq(array[length][0], key)) return length;
return -1;
}
- var eq = __webpack_require__(168);
+ var eq = __webpack_require__(167);
module.exports = assocIndexOf;
}, function(module, exports, __webpack_require__) {
function getMapData(map, key) {
var data = map.__data__;
return isKeyable(key) ? data["string" == typeof key ? "string" : "hash"] : data.map;
}
- var isKeyable = __webpack_require__(576);
+ var isKeyable = __webpack_require__(578);
module.exports = getMapData;
-}, function(module, exports) {
- function arrayMap(array, iteratee) {
- for (var index = -1, length = null == array ? 0 : array.length, result = Array(length); ++index < length; ) result[index] = iteratee(array[index], index, array);
- return result;
- }
- module.exports = arrayMap;
}, function(module, exports, __webpack_require__) {
function toKey(value) {
if ("string" == typeof value || isSymbol(value)) return value;
var result = value + "";
return "0" == result && 1 / value == -INFINITY ? "-0" : result;
}
- var isSymbol = __webpack_require__(62), INFINITY = 1 / 0;
+ var isSymbol = __webpack_require__(61), INFINITY = 1 / 0;
module.exports = toKey;
}, function(module, exports, __webpack_require__) {
function isNaN(value) {
return isNumber(value) && value != +value;
}
- var isNumber = __webpack_require__(170);
+ var isNumber = __webpack_require__(169);
module.exports = isNaN;
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
@@ -5466,7 +5296,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}), superClass && (Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass);
}
var _class, _class2, _temp2, __WEBPACK_IMPORTED_MODULE_0_lodash_isNil__ = __webpack_require__(20), __WEBPACK_IMPORTED_MODULE_0_lodash_isNil___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_lodash_isNil__), __WEBPACK_IMPORTED_MODULE_1_lodash_isFunction__ = __webpack_require__(8), __WEBPACK_IMPORTED_MODULE_1_lodash_isFunction___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_1_lodash_isFunction__), __WEBPACK_IMPORTED_MODULE_2_react__ = __webpack_require__(0), __WEBPACK_IMPORTED_MODULE_2_react___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_2_react__), __WEBPACK_IMPORTED_MODULE_3_prop_types__ = __webpack_require__(1), __WEBPACK_IMPORTED_MODULE_3_prop_types___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_3_prop_types__), __WEBPACK_IMPORTED_MODULE_4_react_smooth__ = __webpack_require__(33), __WEBPACK_IMPORTED_MODULE_5__DefaultTooltipContent__ = (__webpack_require__.n(__WEBPACK_IMPORTED_MODULE_4_react_smooth__),
- __webpack_require__(681)), __WEBPACK_IMPORTED_MODULE_6__util_ReactUtils__ = __webpack_require__(4), __WEBPACK_IMPORTED_MODULE_7__util_DataUtils__ = __webpack_require__(9), __WEBPACK_IMPORTED_MODULE_8__util_PureRender__ = __webpack_require__(5), _extends = Object.assign || function(target) {
+ __webpack_require__(683)), __WEBPACK_IMPORTED_MODULE_6__util_ReactUtils__ = __webpack_require__(4), __WEBPACK_IMPORTED_MODULE_7__util_DataUtils__ = __webpack_require__(9), __WEBPACK_IMPORTED_MODULE_8__util_PureRender__ = __webpack_require__(5), _extends = Object.assign || function(target) {
for (var i = 1; i < arguments.length; i++) {
var source = arguments[i];
for (var key in source) Object.prototype.hasOwnProperty.call(source, key) && (target[key] = source[key]);
@@ -5596,11 +5426,11 @@ var _bundleJs = []byte((((((((((`!function(modules) {
position: "absolute",
top: 0
}, wrapperStyle), translateX = void 0, translateY = void 0;
- if (position && Object(__WEBPACK_IMPORTED_MODULE_7__util_DataUtils__.g)(position.x) && Object(__WEBPACK_IMPORTED_MODULE_7__util_DataUtils__.g)(position.y)) translateX = position.x,
+ if (position && Object(__WEBPACK_IMPORTED_MODULE_7__util_DataUtils__.h)(position.x) && Object(__WEBPACK_IMPORTED_MODULE_7__util_DataUtils__.h)(position.y)) translateX = position.x,
translateY = position.y; else {
var _state2 = this.state, boxWidth = _state2.boxWidth, boxHeight = _state2.boxHeight;
- boxWidth > 0 && boxHeight > 0 && coordinate ? (translateX = position && Object(__WEBPACK_IMPORTED_MODULE_7__util_DataUtils__.g)(position.x) ? position.x : Math.max(coordinate.x + boxWidth + offset > viewBox.x + viewBox.width ? coordinate.x - boxWidth - offset : coordinate.x + offset, viewBox.x),
- translateY = position && Object(__WEBPACK_IMPORTED_MODULE_7__util_DataUtils__.g)(position.y) ? position.y : Math.max(coordinate.y + boxHeight + offset > viewBox.y + viewBox.height ? coordinate.y - boxHeight - offset : coordinate.y + offset, viewBox.y)) : outerStyle.visibility = "hidden";
+ boxWidth > 0 && boxHeight > 0 && coordinate ? (translateX = position && Object(__WEBPACK_IMPORTED_MODULE_7__util_DataUtils__.h)(position.x) ? position.x : Math.max(coordinate.x + boxWidth + offset > viewBox.x + viewBox.width ? coordinate.x - boxWidth - offset : coordinate.x + offset, viewBox.x),
+ translateY = position && Object(__WEBPACK_IMPORTED_MODULE_7__util_DataUtils__.h)(position.y) ? position.y : Math.max(coordinate.y + boxHeight + offset > viewBox.y + viewBox.height ? coordinate.y - boxHeight - offset : coordinate.y + offset, viewBox.y)) : outerStyle.visibility = "hidden";
}
return outerStyle = _extends({}, outerStyle, Object(__WEBPACK_IMPORTED_MODULE_4_react_smooth__.translateStyle)({
transform: this.props.useTranslate3d ? "translate3d(" + translateX + "px, " + translateY + "px, 0)" : "translate(" + translateX + "px, " + translateY + "px)"
@@ -5634,7 +5464,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
}), exports.warn = exports.getTransitionVal = exports.compose = exports.translateStyle = exports.mapObject = exports.debugf = exports.debug = exports.log = exports.generatePrefixStyle = exports.getDashCase = exports.identity = exports.getIntersectionKeys = void 0;
- var _intersection2 = __webpack_require__(645), _intersection3 = function(obj) {
+ var _intersection2 = __webpack_require__(647), _intersection3 = function(obj) {
return obj && obj.__esModule ? obj : {
default: obj
};
@@ -5714,7 +5544,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}
return result;
}
- var isSymbol = __webpack_require__(62);
+ var isSymbol = __webpack_require__(61);
module.exports = baseExtremum;
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
@@ -5792,10 +5622,10 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}, rescale();
}
__webpack_exports__.c = deinterpolateLinear, __webpack_exports__.a = copy, __webpack_exports__.b = continuous;
- var __WEBPACK_IMPORTED_MODULE_0_d3_array__ = __webpack_require__(37), __WEBPACK_IMPORTED_MODULE_1_d3_interpolate__ = __webpack_require__(88), __WEBPACK_IMPORTED_MODULE_2__array__ = __webpack_require__(56), __WEBPACK_IMPORTED_MODULE_3__constant__ = __webpack_require__(191), __WEBPACK_IMPORTED_MODULE_4__number__ = __webpack_require__(309), unit = [ 0, 1 ];
+ var __WEBPACK_IMPORTED_MODULE_0_d3_array__ = __webpack_require__(37), __WEBPACK_IMPORTED_MODULE_1_d3_interpolate__ = __webpack_require__(89), __WEBPACK_IMPORTED_MODULE_2__array__ = __webpack_require__(55), __WEBPACK_IMPORTED_MODULE_3__constant__ = __webpack_require__(190), __WEBPACK_IMPORTED_MODULE_4__number__ = __webpack_require__(314), unit = [ 0, 1 ];
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
- var __WEBPACK_IMPORTED_MODULE_0__formatDecimal__ = __webpack_require__(192);
+ var __WEBPACK_IMPORTED_MODULE_0__formatDecimal__ = __webpack_require__(191);
__webpack_exports__.a = function(x) {
return x = Object(__WEBPACK_IMPORTED_MODULE_0__formatDecimal__.a)(Math.abs(x)),
x ? x[1] : NaN;
@@ -5839,7 +5669,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Constructor;
};
}(), getDeltaAngle = function(startAngle, endAngle) {
- return Object(__WEBPACK_IMPORTED_MODULE_6__util_DataUtils__.i)(endAngle - startAngle) * Math.min(Math.abs(endAngle - startAngle), 359.999);
+ return Object(__WEBPACK_IMPORTED_MODULE_6__util_DataUtils__.j)(endAngle - startAngle) * Math.min(Math.abs(endAngle - startAngle), 359.999);
}, getTangentCircle = function(_ref) {
var cx = _ref.cx, cy = _ref.cy, radius = _ref.radius, angle = _ref.angle, sign = _ref.sign, isExternal = _ref.isExternal, cornerRadius = _ref.cornerRadius, centerRadius = cornerRadius * (isExternal ? 1 : -1) + radius, theta = Math.asin(cornerRadius / centerRadius) / __WEBPACK_IMPORTED_MODULE_5__util_PolarUtils__.a, centerAngle = angle + sign * theta;
return {
@@ -5856,7 +5686,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
} else path += "L " + cx + "," + cy + " Z";
return path;
}, getSectorWithCorner = function(_ref3) {
- var cx = _ref3.cx, cy = _ref3.cy, innerRadius = _ref3.innerRadius, outerRadius = _ref3.outerRadius, cornerRadius = _ref3.cornerRadius, startAngle = _ref3.startAngle, endAngle = _ref3.endAngle, sign = Object(__WEBPACK_IMPORTED_MODULE_6__util_DataUtils__.i)(endAngle - startAngle), _getTangentCircle = getTangentCircle({
+ var cx = _ref3.cx, cy = _ref3.cy, innerRadius = _ref3.innerRadius, outerRadius = _ref3.outerRadius, cornerRadius = _ref3.cornerRadius, startAngle = _ref3.startAngle, endAngle = _ref3.endAngle, sign = Object(__WEBPACK_IMPORTED_MODULE_6__util_DataUtils__.j)(endAngle - startAngle), _getTangentCircle = getTangentCircle({
cx: cx,
cy: cy,
radius: outerRadius,
@@ -5911,7 +5741,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
value: function() {
var _props = this.props, cx = _props.cx, cy = _props.cy, innerRadius = _props.innerRadius, outerRadius = _props.outerRadius, cornerRadius = _props.cornerRadius, startAngle = _props.startAngle, endAngle = _props.endAngle, className = _props.className;
if (outerRadius < innerRadius || startAngle === endAngle) return null;
- var layerClass = __WEBPACK_IMPORTED_MODULE_2_classnames___default()("recharts-sector", className), deltaRadius = outerRadius - innerRadius, cr = Object(__WEBPACK_IMPORTED_MODULE_6__util_DataUtils__.c)(cornerRadius, deltaRadius, 0, !0), path = void 0;
+ var layerClass = __WEBPACK_IMPORTED_MODULE_2_classnames___default()("recharts-sector", className), deltaRadius = outerRadius - innerRadius, cr = Object(__WEBPACK_IMPORTED_MODULE_6__util_DataUtils__.d)(cornerRadius, deltaRadius, 0, !0), path = void 0;
return path = cr > 0 && Math.abs(startAngle - endAngle) < 360 ? getSectorWithCorner({
cx: cx,
cy: cy,
@@ -5978,7 +5808,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}
}), superClass && (Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass);
}
- var _class, _class2, _temp, __WEBPACK_IMPORTED_MODULE_0_lodash_isFunction__ = __webpack_require__(8), __WEBPACK_IMPORTED_MODULE_0_lodash_isFunction___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_lodash_isFunction__), __WEBPACK_IMPORTED_MODULE_1_lodash_minBy__ = __webpack_require__(783), __WEBPACK_IMPORTED_MODULE_1_lodash_minBy___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_1_lodash_minBy__), __WEBPACK_IMPORTED_MODULE_2_lodash_maxBy__ = __webpack_require__(324), __WEBPACK_IMPORTED_MODULE_2_lodash_maxBy___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_2_lodash_maxBy__), __WEBPACK_IMPORTED_MODULE_3_react__ = __webpack_require__(0), __WEBPACK_IMPORTED_MODULE_3_react___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_3_react__), __WEBPACK_IMPORTED_MODULE_4_prop_types__ = __webpack_require__(1), __WEBPACK_IMPORTED_MODULE_4_prop_types___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_4_prop_types__), __WEBPACK_IMPORTED_MODULE_5__util_PureRender__ = __webpack_require__(5), __WEBPACK_IMPORTED_MODULE_6__component_Text__ = __webpack_require__(55), __WEBPACK_IMPORTED_MODULE_7__component_Label__ = __webpack_require__(43), __WEBPACK_IMPORTED_MODULE_8__container_Layer__ = __webpack_require__(14), __WEBPACK_IMPORTED_MODULE_9__util_ReactUtils__ = __webpack_require__(4), __WEBPACK_IMPORTED_MODULE_10__util_PolarUtils__ = __webpack_require__(23), _extends = Object.assign || function(target) {
+ var _class, _class2, _temp, __WEBPACK_IMPORTED_MODULE_0_lodash_isFunction__ = __webpack_require__(8), __WEBPACK_IMPORTED_MODULE_0_lodash_isFunction___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_lodash_isFunction__), __WEBPACK_IMPORTED_MODULE_1_lodash_minBy__ = __webpack_require__(783), __WEBPACK_IMPORTED_MODULE_1_lodash_minBy___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_1_lodash_minBy__), __WEBPACK_IMPORTED_MODULE_2_lodash_maxBy__ = __webpack_require__(329), __WEBPACK_IMPORTED_MODULE_2_lodash_maxBy___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_2_lodash_maxBy__), __WEBPACK_IMPORTED_MODULE_3_react__ = __webpack_require__(0), __WEBPACK_IMPORTED_MODULE_3_react___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_3_react__), __WEBPACK_IMPORTED_MODULE_4_prop_types__ = __webpack_require__(1), __WEBPACK_IMPORTED_MODULE_4_prop_types___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_4_prop_types__), __WEBPACK_IMPORTED_MODULE_5__util_PureRender__ = __webpack_require__(5), __WEBPACK_IMPORTED_MODULE_6__component_Text__ = __webpack_require__(54), __WEBPACK_IMPORTED_MODULE_7__component_Label__ = __webpack_require__(42), __WEBPACK_IMPORTED_MODULE_8__container_Layer__ = __webpack_require__(14), __WEBPACK_IMPORTED_MODULE_9__util_ReactUtils__ = __webpack_require__(4), __WEBPACK_IMPORTED_MODULE_10__util_PolarUtils__ = __webpack_require__(23), _extends = Object.assign || function(target) {
for (var i = 1; i < arguments.length; i++) {
var source = arguments[i];
for (var key in source) Object.prototype.hasOwnProperty.call(source, key) && (target[key] = source[key]);
@@ -6157,7 +5987,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}
}), superClass && (Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass);
}
- var _class, _class2, _temp, __WEBPACK_IMPORTED_MODULE_0_lodash_isFunction__ = __webpack_require__(8), __WEBPACK_IMPORTED_MODULE_0_lodash_isFunction___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_lodash_isFunction__), __WEBPACK_IMPORTED_MODULE_1_react__ = __webpack_require__(0), __WEBPACK_IMPORTED_MODULE_1_react___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_1_react__), __WEBPACK_IMPORTED_MODULE_2_prop_types__ = __webpack_require__(1), __WEBPACK_IMPORTED_MODULE_2_prop_types___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_2_prop_types__), __WEBPACK_IMPORTED_MODULE_3__util_PureRender__ = __webpack_require__(5), __WEBPACK_IMPORTED_MODULE_4__container_Layer__ = __webpack_require__(14), __WEBPACK_IMPORTED_MODULE_5__util_ReactUtils__ = __webpack_require__(4), __WEBPACK_IMPORTED_MODULE_6__shape_Dot__ = __webpack_require__(57), __WEBPACK_IMPORTED_MODULE_7__shape_Polygon__ = __webpack_require__(195), __WEBPACK_IMPORTED_MODULE_8__component_Text__ = __webpack_require__(55), __WEBPACK_IMPORTED_MODULE_9__util_PolarUtils__ = __webpack_require__(23), _extends = Object.assign || function(target) {
+ var _class, _class2, _temp, __WEBPACK_IMPORTED_MODULE_0_lodash_isFunction__ = __webpack_require__(8), __WEBPACK_IMPORTED_MODULE_0_lodash_isFunction___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_lodash_isFunction__), __WEBPACK_IMPORTED_MODULE_1_react__ = __webpack_require__(0), __WEBPACK_IMPORTED_MODULE_1_react___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_1_react__), __WEBPACK_IMPORTED_MODULE_2_prop_types__ = __webpack_require__(1), __WEBPACK_IMPORTED_MODULE_2_prop_types___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_2_prop_types__), __WEBPACK_IMPORTED_MODULE_3__util_PureRender__ = __webpack_require__(5), __WEBPACK_IMPORTED_MODULE_4__container_Layer__ = __webpack_require__(14), __WEBPACK_IMPORTED_MODULE_5__util_ReactUtils__ = __webpack_require__(4), __WEBPACK_IMPORTED_MODULE_6__shape_Dot__ = __webpack_require__(56), __WEBPACK_IMPORTED_MODULE_7__shape_Polygon__ = __webpack_require__(194), __WEBPACK_IMPORTED_MODULE_8__component_Text__ = __webpack_require__(54), __WEBPACK_IMPORTED_MODULE_9__util_PolarUtils__ = __webpack_require__(23), _extends = Object.assign || function(target) {
for (var i = 1; i < arguments.length; i++) {
var source = arguments[i];
for (var key in source) Object.prototype.hasOwnProperty.call(source, key) && (target[key] = source[key]);
@@ -6378,7 +6208,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}
}
}
- if ("production" !== process.env.NODE_ENV) var invariant = __webpack_require__(70), warning = __webpack_require__(94), ReactPropTypesSecret = __webpack_require__(133), loggedTypeFailures = {};
+ if ("production" !== process.env.NODE_ENV) var invariant = __webpack_require__(70), warning = __webpack_require__(95), ReactPropTypesSecret = __webpack_require__(132), loggedTypeFailures = {};
module.exports = checkPropTypes;
}).call(exports, __webpack_require__(2));
}, function(module, exports, __webpack_require__) {
@@ -6395,7 +6225,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
throw TypeError("Can't convert object to primitive value");
};
}, function(module, exports, __webpack_require__) {
- var cof = __webpack_require__(136);
+ var cof = __webpack_require__(135);
module.exports = Object("z").propertyIsEnumerable(0) ? Object : function(it) {
return "String" == cof(it) ? it.split("") : Object(it);
};
@@ -6415,7 +6245,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
return isNaN(it = +it) ? 0 : (it > 0 ? floor : ceil)(it);
};
}, function(module, exports, __webpack_require__) {
- var shared = __webpack_require__(140)("keys"), uid = __webpack_require__(98);
+ var shared = __webpack_require__(139)("keys"), uid = __webpack_require__(99);
module.exports = function(key) {
return shared[key] || (shared[key] = uid(key));
};
@@ -6430,13 +6260,13 @@ var _bundleJs = []byte((((((((((`!function(modules) {
exports.f = Object.getOwnPropertySymbols;
}, function(module, exports, __webpack_require__) {
module.exports = {
- default: __webpack_require__(348),
+ default: __webpack_require__(353),
__esModule: !0
};
}, function(module, exports, __webpack_require__) {
"use strict";
- var $at = __webpack_require__(354)(!0);
- __webpack_require__(145)(String, "String", function(iterated) {
+ var $at = __webpack_require__(359)(!0);
+ __webpack_require__(144)(String, "String", function(iterated) {
this._t = String(iterated), this._i = 0;
}, function() {
var point, O = this._t, index = this._i;
@@ -6450,7 +6280,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
});
}, function(module, exports, __webpack_require__) {
"use strict";
- var LIBRARY = __webpack_require__(146), $export = __webpack_require__(19), redefine = __webpack_require__(212), hide = __webpack_require__(40), has = __webpack_require__(50), Iterators = __webpack_require__(73), $iterCreate = __webpack_require__(355), setToStringTag = __webpack_require__(102), getPrototypeOf = __webpack_require__(210), ITERATOR = __webpack_require__(21)("iterator"), BUGGY = !([].keys && "next" in [].keys()), returnThis = function() {
+ var LIBRARY = __webpack_require__(145), $export = __webpack_require__(19), redefine = __webpack_require__(211), hide = __webpack_require__(40), has = __webpack_require__(49), Iterators = __webpack_require__(73), $iterCreate = __webpack_require__(360), setToStringTag = __webpack_require__(103), getPrototypeOf = __webpack_require__(209), ITERATOR = __webpack_require__(21)("iterator"), BUGGY = !([].keys && "next" in [].keys()), returnThis = function() {
return this;
};
module.exports = function(Base, NAME, Constructor, next, DEFAULT, IS_SET, FORCED) {
@@ -6485,9 +6315,9 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}, function(module, exports, __webpack_require__) {
exports.f = __webpack_require__(21);
}, function(module, exports, __webpack_require__) {
- var META = __webpack_require__(98)("meta"), isObject = __webpack_require__(35), has = __webpack_require__(50), setDesc = __webpack_require__(22).f, id = 0, isExtensible = Object.isExtensible || function() {
+ var META = __webpack_require__(99)("meta"), isObject = __webpack_require__(35), has = __webpack_require__(49), setDesc = __webpack_require__(22).f, id = 0, isExtensible = Object.isExtensible || function() {
return !0;
- }, FREEZE = !__webpack_require__(49)(function() {
+ }, FREEZE = !__webpack_require__(48)(function() {
return isExtensible(Object.preventExtensions({}));
}), setMeta = function(it) {
setDesc(it, META, {
@@ -6522,7 +6352,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
onFreeze: onFreeze
};
}, function(module, exports, __webpack_require__) {
- var global = __webpack_require__(24), core = __webpack_require__(17), LIBRARY = __webpack_require__(146), wksExt = __webpack_require__(147), defineProperty = __webpack_require__(22).f;
+ var global = __webpack_require__(24), core = __webpack_require__(17), LIBRARY = __webpack_require__(145), wksExt = __webpack_require__(146), defineProperty = __webpack_require__(22).f;
module.exports = function(name) {
var $Symbol = core.Symbol || (core.Symbol = LIBRARY ? {} : global.Symbol || {});
"_" == name.charAt(0) || name in $Symbol || defineProperty($Symbol, name, {
@@ -6582,50 +6412,55 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
});
- var _extends2 = __webpack_require__(7), _extends3 = _interopRequireDefault(_extends2), _objectWithoutProperties2 = __webpack_require__(6), _objectWithoutProperties3 = _interopRequireDefault(_objectWithoutProperties2), _deepmerge = __webpack_require__(103), _deepmerge2 = _interopRequireDefault(_deepmerge), _warning = __webpack_require__(11), _warning2 = _interopRequireDefault(_warning), _createTypography = __webpack_require__(380), _createTypography2 = _interopRequireDefault(_createTypography), _createBreakpoints = __webpack_require__(74), _createBreakpoints2 = _interopRequireDefault(_createBreakpoints), _createPalette = __webpack_require__(381), _createPalette2 = _interopRequireDefault(_createPalette), _createMixins = __webpack_require__(388), _createMixins2 = _interopRequireDefault(_createMixins), _shadows = __webpack_require__(389), _shadows2 = _interopRequireDefault(_shadows), _transitions = __webpack_require__(390), _transitions2 = _interopRequireDefault(_transitions), _zIndex = __webpack_require__(394), _zIndex2 = _interopRequireDefault(_zIndex), _spacing = __webpack_require__(395), _spacing2 = _interopRequireDefault(_spacing);
+ var _extends2 = __webpack_require__(6), _extends3 = _interopRequireDefault(_extends2), _objectWithoutProperties2 = __webpack_require__(7), _objectWithoutProperties3 = _interopRequireDefault(_objectWithoutProperties2), _deepmerge = __webpack_require__(104), _deepmerge2 = _interopRequireDefault(_deepmerge), _warning = __webpack_require__(12), _warning2 = _interopRequireDefault(_warning), _createTypography = __webpack_require__(384), _createTypography2 = _interopRequireDefault(_createTypography), _createBreakpoints = __webpack_require__(74), _createBreakpoints2 = _interopRequireDefault(_createBreakpoints), _createPalette = __webpack_require__(385), _createPalette2 = _interopRequireDefault(_createPalette), _createMixins = __webpack_require__(392), _createMixins2 = _interopRequireDefault(_createMixins), _shadows = __webpack_require__(393), _shadows2 = _interopRequireDefault(_shadows), _transitions = __webpack_require__(394), _transitions2 = _interopRequireDefault(_transitions), _zIndex = __webpack_require__(398), _zIndex2 = _interopRequireDefault(_zIndex), _spacing = __webpack_require__(399), _spacing2 = _interopRequireDefault(_spacing);
exports.default = createMuiTheme;
}).call(exports, __webpack_require__(2));
}, function(module, exports, __webpack_require__) {
- "use strict";
- var REACT_STATICS = {
- childContextTypes: !0,
- contextTypes: !0,
- defaultProps: !0,
- displayName: !0,
- getDefaultProps: !0,
- mixins: !0,
- propTypes: !0,
- type: !0
- }, KNOWN_STATICS = {
- name: !0,
- length: !0,
- prototype: !0,
- caller: !0,
- callee: !0,
- arguments: !0,
- arity: !0
- }, defineProperty = Object.defineProperty, getOwnPropertyNames = Object.getOwnPropertyNames, getOwnPropertySymbols = Object.getOwnPropertySymbols, getOwnPropertyDescriptor = Object.getOwnPropertyDescriptor, getPrototypeOf = Object.getPrototypeOf, objectPrototype = getPrototypeOf && getPrototypeOf(Object);
- module.exports = function hoistNonReactStatics(targetComponent, sourceComponent, blacklist) {
- if ("string" != typeof sourceComponent) {
- if (objectPrototype) {
- var inheritedComponent = getPrototypeOf(sourceComponent);
- inheritedComponent && inheritedComponent !== objectPrototype && hoistNonReactStatics(targetComponent, inheritedComponent, blacklist);
- }
- var keys = getOwnPropertyNames(sourceComponent);
- getOwnPropertySymbols && (keys = keys.concat(getOwnPropertySymbols(sourceComponent)));
- for (var i = 0; i < keys.length; ++i) {
- var key = keys[i];
- if (!(REACT_STATICS[key] || KNOWN_STATICS[key] || blacklist && blacklist[key])) {
- var descriptor = getOwnPropertyDescriptor(sourceComponent, key);
- try {
- defineProperty(targetComponent, key, descriptor);
- } catch (e) {}
+ !function(global, factory) {
+ module.exports = factory();
+ }(0, function() {
+ "use strict";
+ var REACT_STATICS = {
+ childContextTypes: !0,
+ contextTypes: !0,
+ defaultProps: !0,
+ displayName: !0,
+ getDefaultProps: !0,
+ getDerivedStateFromProps: !0,
+ mixins: !0,
+ propTypes: !0,
+ type: !0
+ }, KNOWN_STATICS = {
+ name: !0,
+ length: !0,
+ prototype: !0,
+ caller: !0,
+ callee: !0,
+ arguments: !0,
+ arity: !0
+ }, defineProperty = Object.defineProperty, getOwnPropertyNames = Object.getOwnPropertyNames, getOwnPropertySymbols = Object.getOwnPropertySymbols, getOwnPropertyDescriptor = Object.getOwnPropertyDescriptor, getPrototypeOf = Object.getPrototypeOf, objectPrototype = getPrototypeOf && getPrototypeOf(Object);
+ return function hoistNonReactStatics(targetComponent, sourceComponent, blacklist) {
+ if ("string" != typeof sourceComponent) {
+ if (objectPrototype) {
+ var inheritedComponent = getPrototypeOf(sourceComponent);
+ inheritedComponent && inheritedComponent !== objectPrototype && hoistNonReactStatics(targetComponent, inheritedComponent, blacklist);
+ }
+ var keys = getOwnPropertyNames(sourceComponent);
+ getOwnPropertySymbols && (keys = keys.concat(getOwnPropertySymbols(sourceComponent)));
+ for (var i = 0; i < keys.length; ++i) {
+ var key = keys[i];
+ if (!(REACT_STATICS[key] || KNOWN_STATICS[key] || blacklist && blacklist[key])) {
+ var descriptor = getOwnPropertyDescriptor(sourceComponent, key);
+ try {
+ defineProperty(targetComponent, key, descriptor);
+ } catch (e) {}
+ }
}
+ return targetComponent;
}
return targetComponent;
- }
- return targetComponent;
- };
+ };
+ });
}, function(module, exports, __webpack_require__) {
"use strict";
function indentStr(str, indent) {
@@ -6656,26 +6491,11 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
}), exports.default = toCss;
- var _toCssValue = __webpack_require__(105), _toCssValue2 = function(obj) {
+ var _toCssValue = __webpack_require__(106), _toCssValue2 = function(obj) {
return obj && obj.__esModule ? obj : {
default: obj
};
}(_toCssValue);
-}, function(module, exports) {
- module.exports = function(module) {
- return module.webpackPolyfill || (module.deprecate = function() {}, module.paths = [],
- module.children || (module.children = []), Object.defineProperty(module, "loaded", {
- enumerable: !0,
- get: function() {
- return module.l;
- }
- }), Object.defineProperty(module, "id", {
- enumerable: !0,
- get: function() {
- return module.i;
- }
- }), module.webpackPolyfill = 1), module;
- };
}, function(module, exports, __webpack_require__) {
"use strict";
Object.defineProperty(exports, "__esModule", {
@@ -6692,7 +6512,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
});
- var _isInBrowser = __webpack_require__(107), _isInBrowser2 = function(obj) {
+ var _isInBrowser = __webpack_require__(108), _isInBrowser2 = function(obj) {
return obj && obj.__esModule ? obj : {
default: obj
};
@@ -6714,6 +6534,191 @@ var _bundleJs = []byte((((((((((`!function(modules) {
css: css
};
}, function(module, exports, __webpack_require__) {
+ "use strict";
+ (function(process) {
+ function _interopRequireDefault(obj) {
+ return obj && obj.__esModule ? obj : {
+ default: obj
+ };
+ }
+ function _objectWithoutProperties(obj, keys) {
+ var target = {};
+ for (var i in obj) keys.indexOf(i) >= 0 || Object.prototype.hasOwnProperty.call(obj, i) && (target[i] = obj[i]);
+ return target;
+ }
+ function _classCallCheck(instance, Constructor) {
+ if (!(instance instanceof Constructor)) throw new TypeError("Cannot call a class as a function");
+ }
+ function _possibleConstructorReturn(self, call) {
+ if (!self) throw new ReferenceError("this hasn't been initialised - super() hasn't been called");
+ return !call || "object" != typeof call && "function" != typeof call ? self : call;
+ }
+ function _inherits(subClass, superClass) {
+ if ("function" != typeof superClass && null !== superClass) throw new TypeError("Super expression must either be null or a function, not " + typeof superClass);
+ subClass.prototype = Object.create(superClass && superClass.prototype, {
+ constructor: {
+ value: subClass,
+ enumerable: !1,
+ writable: !0,
+ configurable: !0
+ }
+ }), superClass && (Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass);
+ }
+ function noop() {}
+ exports.__esModule = !0, exports.EXITING = exports.ENTERED = exports.ENTERING = exports.EXITED = exports.UNMOUNTED = void 0;
+ var _propTypes = __webpack_require__(1), PropTypes = function(obj) {
+ if (obj && obj.__esModule) return obj;
+ var newObj = {};
+ if (null != obj) for (var key in obj) Object.prototype.hasOwnProperty.call(obj, key) && (newObj[key] = obj[key]);
+ return newObj.default = obj, newObj;
+ }(_propTypes), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _reactDom = __webpack_require__(96), _reactDom2 = _interopRequireDefault(_reactDom), _PropTypes = __webpack_require__(480), UNMOUNTED = exports.UNMOUNTED = "unmounted", EXITED = exports.EXITED = "exited", ENTERING = exports.ENTERING = "entering", ENTERED = exports.ENTERED = "entered", EXITING = exports.EXITING = "exiting", Transition = function(_React$Component) {
+ function Transition(props, context) {
+ _classCallCheck(this, Transition);
+ var _this = _possibleConstructorReturn(this, _React$Component.call(this, props, context)), parentGroup = context.transitionGroup, appear = parentGroup && !parentGroup.isMounting ? props.enter : props.appear, initialStatus = void 0;
+ return _this.nextStatus = null, props.in ? appear ? (initialStatus = EXITED, _this.nextStatus = ENTERING) : initialStatus = ENTERED : initialStatus = props.unmountOnExit || props.mountOnEnter ? UNMOUNTED : EXITED,
+ _this.state = {
+ status: initialStatus
+ }, _this.nextCallback = null, _this;
+ }
+ return _inherits(Transition, _React$Component), Transition.prototype.getChildContext = function() {
+ return {
+ transitionGroup: null
+ };
+ }, Transition.prototype.componentDidMount = function() {
+ this.updateStatus(!0);
+ }, Transition.prototype.componentWillReceiveProps = function(nextProps) {
+ var _ref = this.pendingState || this.state, status = _ref.status;
+ nextProps.in ? (status === UNMOUNTED && this.setState({
+ status: EXITED
+ }), status !== ENTERING && status !== ENTERED && (this.nextStatus = ENTERING)) : status !== ENTERING && status !== ENTERED || (this.nextStatus = EXITING);
+ }, Transition.prototype.componentDidUpdate = function() {
+ this.updateStatus();
+ }, Transition.prototype.componentWillUnmount = function() {
+ this.cancelNextCallback();
+ }, Transition.prototype.getTimeouts = function() {
+ var timeout = this.props.timeout, exit = void 0, enter = void 0, appear = void 0;
+ return exit = enter = appear = timeout, null != timeout && "number" != typeof timeout && (exit = timeout.exit,
+ enter = timeout.enter, appear = timeout.appear), {
+ exit: exit,
+ enter: enter,
+ appear: appear
+ };
+ }, Transition.prototype.updateStatus = function() {
+ var mounting = arguments.length > 0 && void 0 !== arguments[0] && arguments[0], nextStatus = this.nextStatus;
+ if (null !== nextStatus) {
+ this.nextStatus = null, this.cancelNextCallback();
+ var node = _reactDom2.default.findDOMNode(this);
+ nextStatus === ENTERING ? this.performEnter(node, mounting) : this.performExit(node);
+ } else this.props.unmountOnExit && this.state.status === EXITED && this.setState({
+ status: UNMOUNTED
+ });
+ }, Transition.prototype.performEnter = function(node, mounting) {
+ var _this2 = this, enter = this.props.enter, appearing = this.context.transitionGroup ? this.context.transitionGroup.isMounting : mounting, timeouts = this.getTimeouts();
+ if (!mounting && !enter) return void this.safeSetState({
+ status: ENTERED
+ }, function() {
+ _this2.props.onEntered(node);
+ });
+ this.props.onEnter(node, appearing), this.safeSetState({
+ status: ENTERING
+ }, function() {
+ _this2.props.onEntering(node, appearing), _this2.onTransitionEnd(node, timeouts.enter, function() {
+ _this2.safeSetState({
+ status: ENTERED
+ }, function() {
+ _this2.props.onEntered(node, appearing);
+ });
+ });
+ });
+ }, Transition.prototype.performExit = function(node) {
+ var _this3 = this, exit = this.props.exit, timeouts = this.getTimeouts();
+ if (!exit) return void this.safeSetState({
+ status: EXITED
+ }, function() {
+ _this3.props.onExited(node);
+ });
+ this.props.onExit(node), this.safeSetState({
+ status: EXITING
+ }, function() {
+ _this3.props.onExiting(node), _this3.onTransitionEnd(node, timeouts.exit, function() {
+ _this3.safeSetState({
+ status: EXITED
+ }, function() {
+ _this3.props.onExited(node);
+ });
+ });
+ });
+ }, Transition.prototype.cancelNextCallback = function() {
+ null !== this.nextCallback && (this.nextCallback.cancel(), this.nextCallback = null);
+ }, Transition.prototype.safeSetState = function(nextState, callback) {
+ var _this4 = this;
+ this.pendingState = nextState, callback = this.setNextCallback(callback), this.setState(nextState, function() {
+ _this4.pendingState = null, callback();
+ });
+ }, Transition.prototype.setNextCallback = function(callback) {
+ var _this5 = this, active = !0;
+ return this.nextCallback = function(event) {
+ active && (active = !1, _this5.nextCallback = null, callback(event));
+ }, this.nextCallback.cancel = function() {
+ active = !1;
+ }, this.nextCallback;
+ }, Transition.prototype.onTransitionEnd = function(node, timeout, handler) {
+ this.setNextCallback(handler), node ? (this.props.addEndListener && this.props.addEndListener(node, this.nextCallback),
+ null != timeout && setTimeout(this.nextCallback, timeout)) : setTimeout(this.nextCallback, 0);
+ }, Transition.prototype.render = function() {
+ var status = this.state.status;
+ if (status === UNMOUNTED) return null;
+ var _props = this.props, children = _props.children, childProps = _objectWithoutProperties(_props, [ "children" ]);
+ if (delete childProps.in, delete childProps.mountOnEnter, delete childProps.unmountOnExit,
+ delete childProps.appear, delete childProps.enter, delete childProps.exit, delete childProps.timeout,
+ delete childProps.addEndListener, delete childProps.onEnter, delete childProps.onEntering,
+ delete childProps.onEntered, delete childProps.onExit, delete childProps.onExiting,
+ delete childProps.onExited, "function" == typeof children) return children(status, childProps);
+ var child = _react2.default.Children.only(children);
+ return _react2.default.cloneElement(child, childProps);
+ }, Transition;
+ }(_react2.default.Component);
+ Transition.contextTypes = {
+ transitionGroup: PropTypes.object
+ }, Transition.childContextTypes = {
+ transitionGroup: function() {}
+ }, Transition.propTypes = "production" !== process.env.NODE_ENV ? {
+ children: PropTypes.oneOfType([ PropTypes.func.isRequired, PropTypes.element.isRequired ]).isRequired,
+ in: PropTypes.bool,
+ mountOnEnter: PropTypes.bool,
+ unmountOnExit: PropTypes.bool,
+ appear: PropTypes.bool,
+ enter: PropTypes.bool,
+ exit: PropTypes.bool,
+ timeout: function(props) {
+ for (var _len = arguments.length, args = Array(_len > 1 ? _len - 1 : 0), _key = 1; _key < _len; _key++) args[_key - 1] = arguments[_key];
+ var pt = _PropTypes.timeoutsShape;
+ return props.addEndListener || (pt = pt.isRequired), pt.apply(void 0, [ props ].concat(args));
+ },
+ addEndListener: PropTypes.func,
+ onEnter: PropTypes.func,
+ onEntering: PropTypes.func,
+ onEntered: PropTypes.func,
+ onExit: PropTypes.func,
+ onExiting: PropTypes.func,
+ onExited: PropTypes.func
+ } : {}, Transition.defaultProps = {
+ in: !1,
+ mountOnEnter: !1,
+ unmountOnExit: !1,
+ appear: !1,
+ enter: !0,
+ exit: !0,
+ onEnter: noop,
+ onEntering: noop,
+ onEntered: noop,
+ onExit: noop,
+ onExiting: noop,
+ onExited: noop
+ }, Transition.UNMOUNTED = 0, Transition.EXITED = 1, Transition.ENTERING = 2, Transition.ENTERED = 3,
+ Transition.EXITING = 4, exports.default = Transition;
+ }).call(exports, __webpack_require__(2));
+}, function(module, exports, __webpack_require__) {
function debounce(func, wait, options) {
function invokeFunc(time) {
var args = lastArgs, thisArg = lastThis;
@@ -6723,8 +6728,8 @@ var _bundleJs = []byte((((((((((`!function(modules) {
return lastInvokeTime = time, timerId = setTimeout(timerExpired, wait), leading ? invokeFunc(time) : result;
}
function remainingWait(time) {
- var timeSinceLastCall = time - lastCallTime, timeSinceLastInvoke = time - lastInvokeTime, result = wait - timeSinceLastCall;
- return maxing ? nativeMin(result, maxWait - timeSinceLastInvoke) : result;
+ var timeSinceLastCall = time - lastCallTime, timeSinceLastInvoke = time - lastInvokeTime, timeWaiting = wait - timeSinceLastCall;
+ return maxing ? nativeMin(timeWaiting, maxWait - timeSinceLastInvoke) : timeWaiting;
}
function shouldInvoke(time) {
var timeSinceLastCall = time - lastCallTime, timeSinceLastInvoke = time - lastInvokeTime;
@@ -6760,7 +6765,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
trailing = "trailing" in options ? !!options.trailing : trailing), debounced.cancel = cancel,
debounced.flush = flush, debounced;
}
- var isObject = __webpack_require__(31), now = __webpack_require__(519), toNumber = __webpack_require__(243), FUNC_ERROR_TEXT = "Expected a function", nativeMax = Math.max, nativeMin = Math.min;
+ var isObject = __webpack_require__(31), now = __webpack_require__(521), toNumber = __webpack_require__(244), FUNC_ERROR_TEXT = "Expected a function", nativeMax = Math.max, nativeMin = Math.min;
module.exports = debounce;
}, function(module, exports) {
var global = module.exports = "undefined" != typeof window && window.Math == Math ? window : "undefined" != typeof self && self.Math == Math ? self : Function("return this")();
@@ -6795,14 +6800,14 @@ var _bundleJs = []byte((((((((((`!function(modules) {
function isString(value) {
return "string" == typeof value || !isArray(value) && isObjectLike(value) && baseGetTag(value) == stringTag;
}
- var baseGetTag = __webpack_require__(42), isArray = __webpack_require__(12), isObjectLike = __webpack_require__(36), stringTag = "[object String]";
+ var baseGetTag = __webpack_require__(41), isArray = __webpack_require__(11), isObjectLike = __webpack_require__(36), stringTag = "[object String]";
module.exports = isString;
}, function(module, exports, __webpack_require__) {
function get(object, path, defaultValue) {
var result = null == object ? void 0 : baseGet(object, path);
return void 0 === result ? defaultValue : result;
}
- var baseGet = __webpack_require__(246);
+ var baseGet = __webpack_require__(247);
module.exports = get;
}, function(module, exports, __webpack_require__) {
function isKey(value, object) {
@@ -6810,7 +6815,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
var type = typeof value;
return !("number" != type && "symbol" != type && "boolean" != type && null != value && !isSymbol(value)) || (reIsPlainProp.test(value) || !reIsDeepProp.test(value) || null != object && value in Object(object));
}
- var isArray = __webpack_require__(12), isSymbol = __webpack_require__(62), reIsDeepProp = /\.|\[(?:[^[\]]*|(["'])(?:(?!\1)[^\\]|\\.)*?\1)\]/, reIsPlainProp = /^\w*$/;
+ var isArray = __webpack_require__(11), isSymbol = __webpack_require__(61), reIsDeepProp = /\.|\[(?:[^[\]]*|(["'])(?:(?!\1)[^\\]|\\.)*?\1)\]/, reIsPlainProp = /^\w*$/;
module.exports = isKey;
}, function(module, exports, __webpack_require__) {
function MapCache(entries) {
@@ -6820,7 +6825,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
this.set(entry[0], entry[1]);
}
}
- var mapCacheClear = __webpack_require__(559), mapCacheDelete = __webpack_require__(575), mapCacheGet = __webpack_require__(577), mapCacheHas = __webpack_require__(578), mapCacheSet = __webpack_require__(579);
+ var mapCacheClear = __webpack_require__(561), mapCacheDelete = __webpack_require__(577), mapCacheGet = __webpack_require__(579), mapCacheHas = __webpack_require__(580), mapCacheSet = __webpack_require__(581);
MapCache.prototype.clear = mapCacheClear, MapCache.prototype.delete = mapCacheDelete,
MapCache.prototype.get = mapCacheGet, MapCache.prototype.has = mapCacheHas, MapCache.prototype.set = mapCacheSet,
module.exports = MapCache;
@@ -6830,13 +6835,13 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}
module.exports = eq;
}, function(module, exports, __webpack_require__) {
- var getNative = __webpack_require__(53), root = __webpack_require__(32), Map = getNative(root, "Map");
+ var getNative = __webpack_require__(52), root = __webpack_require__(32), Map = getNative(root, "Map");
module.exports = Map;
}, function(module, exports, __webpack_require__) {
function isNumber(value) {
return "number" == typeof value || isObjectLike(value) && baseGetTag(value) == numberTag;
}
- var baseGetTag = __webpack_require__(42), isObjectLike = __webpack_require__(36), numberTag = "[object Number]";
+ var baseGetTag = __webpack_require__(41), isObjectLike = __webpack_require__(36), numberTag = "[object Number]";
module.exports = isNumber;
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
@@ -6858,7 +6863,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}
}), superClass && (Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass);
}
- var _class, _class2, _temp2, __WEBPACK_IMPORTED_MODULE_0_lodash_isFunction__ = __webpack_require__(8), __WEBPACK_IMPORTED_MODULE_0_lodash_isFunction___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_lodash_isFunction__), __WEBPACK_IMPORTED_MODULE_1_react__ = __webpack_require__(0), __WEBPACK_IMPORTED_MODULE_1_react___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_1_react__), __WEBPACK_IMPORTED_MODULE_2_prop_types__ = __webpack_require__(1), __WEBPACK_IMPORTED_MODULE_2_prop_types___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_2_prop_types__), __WEBPACK_IMPORTED_MODULE_3__util_PureRender__ = __webpack_require__(5), __WEBPACK_IMPORTED_MODULE_4__DefaultLegendContent__ = __webpack_require__(582), __WEBPACK_IMPORTED_MODULE_5__util_DataUtils__ = __webpack_require__(9), __WEBPACK_IMPORTED_MODULE_6__util_ReactUtils__ = __webpack_require__(4), _extends = Object.assign || function(target) {
+ var _class, _class2, _temp2, __WEBPACK_IMPORTED_MODULE_0_lodash_isFunction__ = __webpack_require__(8), __WEBPACK_IMPORTED_MODULE_0_lodash_isFunction___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_lodash_isFunction__), __WEBPACK_IMPORTED_MODULE_1_react__ = __webpack_require__(0), __WEBPACK_IMPORTED_MODULE_1_react___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_1_react__), __WEBPACK_IMPORTED_MODULE_2_prop_types__ = __webpack_require__(1), __WEBPACK_IMPORTED_MODULE_2_prop_types___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_2_prop_types__), __WEBPACK_IMPORTED_MODULE_3__util_PureRender__ = __webpack_require__(5), __WEBPACK_IMPORTED_MODULE_4__DefaultLegendContent__ = __webpack_require__(584), __WEBPACK_IMPORTED_MODULE_5__util_DataUtils__ = __webpack_require__(9), __WEBPACK_IMPORTED_MODULE_6__util_ReactUtils__ = __webpack_require__(4), _extends = Object.assign || function(target) {
for (var i = 1; i < arguments.length; i++) {
var source = arguments[i];
for (var key in source) Object.prototype.hasOwnProperty.call(source, key) && (target[key] = source[key]);
@@ -6979,7 +6984,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
key: "getWithHeight",
value: function(item, chartWidth) {
var layout = item.props.layout;
- return "vertical" === layout && Object(__WEBPACK_IMPORTED_MODULE_5__util_DataUtils__.g)(item.props.height) ? {
+ return "vertical" === layout && Object(__WEBPACK_IMPORTED_MODULE_5__util_DataUtils__.h)(item.props.height) ? {
height: item.props.height
} : "horizontal" === layout ? {
width: item.props.width || chartWidth
@@ -7042,7 +7047,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}
}), superClass && (Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass);
}
- var _class, _class2, _temp, __WEBPACK_IMPORTED_MODULE_0_react__ = __webpack_require__(0), __WEBPACK_IMPORTED_MODULE_0_react___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_react__), __WEBPACK_IMPORTED_MODULE_1_prop_types__ = __webpack_require__(1), __WEBPACK_IMPORTED_MODULE_1_prop_types___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_1_prop_types__), __WEBPACK_IMPORTED_MODULE_2_d3_shape__ = __webpack_require__(173), __WEBPACK_IMPORTED_MODULE_3_classnames__ = __webpack_require__(3), __WEBPACK_IMPORTED_MODULE_3_classnames___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_3_classnames__), __WEBPACK_IMPORTED_MODULE_4__util_PureRender__ = __webpack_require__(5), __WEBPACK_IMPORTED_MODULE_5__util_ReactUtils__ = __webpack_require__(4), _extends = Object.assign || function(target) {
+ var _class, _class2, _temp, __WEBPACK_IMPORTED_MODULE_0_react__ = __webpack_require__(0), __WEBPACK_IMPORTED_MODULE_0_react___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_react__), __WEBPACK_IMPORTED_MODULE_1_prop_types__ = __webpack_require__(1), __WEBPACK_IMPORTED_MODULE_1_prop_types___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_1_prop_types__), __WEBPACK_IMPORTED_MODULE_2_d3_shape__ = __webpack_require__(172), __WEBPACK_IMPORTED_MODULE_3_classnames__ = __webpack_require__(3), __WEBPACK_IMPORTED_MODULE_3_classnames___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_3_classnames__), __WEBPACK_IMPORTED_MODULE_4__util_PureRender__ = __webpack_require__(5), __WEBPACK_IMPORTED_MODULE_5__util_ReactUtils__ = __webpack_require__(4), _extends = Object.assign || function(target) {
for (var i = 1; i < arguments.length; i++) {
var source = arguments[i];
for (var key in source) Object.prototype.hasOwnProperty.call(source, key) && (target[key] = source[key]);
@@ -7133,80 +7138,80 @@ var _bundleJs = []byte((((((((((`!function(modules) {
__webpack_exports__.a = Symbols;
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
- var __WEBPACK_IMPORTED_MODULE_1__src_area__ = (__webpack_require__(583), __webpack_require__(249));
+ var __WEBPACK_IMPORTED_MODULE_1__src_area__ = (__webpack_require__(585), __webpack_require__(250));
__webpack_require__.d(__webpack_exports__, "a", function() {
return __WEBPACK_IMPORTED_MODULE_1__src_area__.a;
});
- var __WEBPACK_IMPORTED_MODULE_2__src_line__ = __webpack_require__(174);
+ var __WEBPACK_IMPORTED_MODULE_2__src_line__ = __webpack_require__(173);
__webpack_require__.d(__webpack_exports__, "m", function() {
return __WEBPACK_IMPORTED_MODULE_2__src_line__.a;
});
- var __WEBPACK_IMPORTED_MODULE_8__src_symbol__ = (__webpack_require__(585), __webpack_require__(588),
- __webpack_require__(251), __webpack_require__(252), __webpack_require__(589), __webpack_require__(590));
+ var __WEBPACK_IMPORTED_MODULE_8__src_symbol__ = (__webpack_require__(587), __webpack_require__(590),
+ __webpack_require__(252), __webpack_require__(253), __webpack_require__(591), __webpack_require__(592));
__webpack_require__.d(__webpack_exports__, "t", function() {
return __WEBPACK_IMPORTED_MODULE_8__src_symbol__.a;
});
- var __WEBPACK_IMPORTED_MODULE_9__src_symbol_circle__ = __webpack_require__(254);
+ var __WEBPACK_IMPORTED_MODULE_9__src_symbol_circle__ = __webpack_require__(255);
__webpack_require__.d(__webpack_exports__, "u", function() {
return __WEBPACK_IMPORTED_MODULE_9__src_symbol_circle__.a;
});
- var __WEBPACK_IMPORTED_MODULE_10__src_symbol_cross__ = __webpack_require__(255);
+ var __WEBPACK_IMPORTED_MODULE_10__src_symbol_cross__ = __webpack_require__(256);
__webpack_require__.d(__webpack_exports__, "v", function() {
return __WEBPACK_IMPORTED_MODULE_10__src_symbol_cross__.a;
});
- var __WEBPACK_IMPORTED_MODULE_11__src_symbol_diamond__ = __webpack_require__(256);
+ var __WEBPACK_IMPORTED_MODULE_11__src_symbol_diamond__ = __webpack_require__(257);
__webpack_require__.d(__webpack_exports__, "w", function() {
return __WEBPACK_IMPORTED_MODULE_11__src_symbol_diamond__.a;
});
- var __WEBPACK_IMPORTED_MODULE_12__src_symbol_square__ = __webpack_require__(258);
+ var __WEBPACK_IMPORTED_MODULE_12__src_symbol_square__ = __webpack_require__(259);
__webpack_require__.d(__webpack_exports__, "x", function() {
return __WEBPACK_IMPORTED_MODULE_12__src_symbol_square__.a;
});
- var __WEBPACK_IMPORTED_MODULE_13__src_symbol_star__ = __webpack_require__(257);
+ var __WEBPACK_IMPORTED_MODULE_13__src_symbol_star__ = __webpack_require__(258);
__webpack_require__.d(__webpack_exports__, "y", function() {
return __WEBPACK_IMPORTED_MODULE_13__src_symbol_star__.a;
});
- var __WEBPACK_IMPORTED_MODULE_14__src_symbol_triangle__ = __webpack_require__(259);
+ var __WEBPACK_IMPORTED_MODULE_14__src_symbol_triangle__ = __webpack_require__(260);
__webpack_require__.d(__webpack_exports__, "z", function() {
return __WEBPACK_IMPORTED_MODULE_14__src_symbol_triangle__.a;
});
- var __WEBPACK_IMPORTED_MODULE_15__src_symbol_wye__ = __webpack_require__(260);
+ var __WEBPACK_IMPORTED_MODULE_15__src_symbol_wye__ = __webpack_require__(261);
__webpack_require__.d(__webpack_exports__, "A", function() {
return __WEBPACK_IMPORTED_MODULE_15__src_symbol_wye__.a;
});
- var __WEBPACK_IMPORTED_MODULE_16__src_curve_basisClosed__ = __webpack_require__(591);
+ var __WEBPACK_IMPORTED_MODULE_16__src_curve_basisClosed__ = __webpack_require__(593);
__webpack_require__.d(__webpack_exports__, "c", function() {
return __WEBPACK_IMPORTED_MODULE_16__src_curve_basisClosed__.a;
});
- var __WEBPACK_IMPORTED_MODULE_17__src_curve_basisOpen__ = __webpack_require__(592);
+ var __WEBPACK_IMPORTED_MODULE_17__src_curve_basisOpen__ = __webpack_require__(594);
__webpack_require__.d(__webpack_exports__, "d", function() {
return __WEBPACK_IMPORTED_MODULE_17__src_curve_basisOpen__.a;
});
- var __WEBPACK_IMPORTED_MODULE_18__src_curve_basis__ = __webpack_require__(120);
+ var __WEBPACK_IMPORTED_MODULE_18__src_curve_basis__ = __webpack_require__(119);
__webpack_require__.d(__webpack_exports__, "b", function() {
return __WEBPACK_IMPORTED_MODULE_18__src_curve_basis__.b;
});
- var __WEBPACK_IMPORTED_MODULE_26__src_curve_linearClosed__ = (__webpack_require__(593),
- __webpack_require__(261), __webpack_require__(262), __webpack_require__(121), __webpack_require__(594),
- __webpack_require__(595), __webpack_require__(176), __webpack_require__(596));
+ var __WEBPACK_IMPORTED_MODULE_26__src_curve_linearClosed__ = (__webpack_require__(595),
+ __webpack_require__(262), __webpack_require__(263), __webpack_require__(120), __webpack_require__(596),
+ __webpack_require__(597), __webpack_require__(175), __webpack_require__(598));
__webpack_require__.d(__webpack_exports__, "f", function() {
return __WEBPACK_IMPORTED_MODULE_26__src_curve_linearClosed__.a;
});
- var __WEBPACK_IMPORTED_MODULE_27__src_curve_linear__ = __webpack_require__(118);
+ var __WEBPACK_IMPORTED_MODULE_27__src_curve_linear__ = __webpack_require__(117);
__webpack_require__.d(__webpack_exports__, "e", function() {
return __WEBPACK_IMPORTED_MODULE_27__src_curve_linear__.a;
});
- var __WEBPACK_IMPORTED_MODULE_28__src_curve_monotone__ = __webpack_require__(597);
+ var __WEBPACK_IMPORTED_MODULE_28__src_curve_monotone__ = __webpack_require__(599);
__webpack_require__.d(__webpack_exports__, "g", function() {
return __WEBPACK_IMPORTED_MODULE_28__src_curve_monotone__.a;
}), __webpack_require__.d(__webpack_exports__, "h", function() {
return __WEBPACK_IMPORTED_MODULE_28__src_curve_monotone__.b;
});
- var __WEBPACK_IMPORTED_MODULE_29__src_curve_natural__ = __webpack_require__(598);
+ var __WEBPACK_IMPORTED_MODULE_29__src_curve_natural__ = __webpack_require__(600);
__webpack_require__.d(__webpack_exports__, "i", function() {
return __WEBPACK_IMPORTED_MODULE_29__src_curve_natural__.a;
});
- var __WEBPACK_IMPORTED_MODULE_30__src_curve_step__ = __webpack_require__(599);
+ var __WEBPACK_IMPORTED_MODULE_30__src_curve_step__ = __webpack_require__(601);
__webpack_require__.d(__webpack_exports__, "j", function() {
return __WEBPACK_IMPORTED_MODULE_30__src_curve_step__.a;
}), __webpack_require__.d(__webpack_exports__, "k", function() {
@@ -7214,36 +7219,36 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}), __webpack_require__.d(__webpack_exports__, "l", function() {
return __WEBPACK_IMPORTED_MODULE_30__src_curve_step__.c;
});
- var __WEBPACK_IMPORTED_MODULE_31__src_stack__ = __webpack_require__(600);
+ var __WEBPACK_IMPORTED_MODULE_31__src_stack__ = __webpack_require__(602);
__webpack_require__.d(__webpack_exports__, "n", function() {
return __WEBPACK_IMPORTED_MODULE_31__src_stack__.a;
});
- var __WEBPACK_IMPORTED_MODULE_32__src_offset_expand__ = __webpack_require__(601);
+ var __WEBPACK_IMPORTED_MODULE_32__src_offset_expand__ = __webpack_require__(603);
__webpack_require__.d(__webpack_exports__, "o", function() {
return __WEBPACK_IMPORTED_MODULE_32__src_offset_expand__.a;
});
- var __WEBPACK_IMPORTED_MODULE_34__src_offset_none__ = (__webpack_require__(602),
- __webpack_require__(81));
+ var __WEBPACK_IMPORTED_MODULE_34__src_offset_none__ = (__webpack_require__(604),
+ __webpack_require__(83));
__webpack_require__.d(__webpack_exports__, "p", function() {
return __WEBPACK_IMPORTED_MODULE_34__src_offset_none__.a;
});
- var __WEBPACK_IMPORTED_MODULE_35__src_offset_silhouette__ = __webpack_require__(603);
+ var __WEBPACK_IMPORTED_MODULE_35__src_offset_silhouette__ = __webpack_require__(605);
__webpack_require__.d(__webpack_exports__, "q", function() {
return __WEBPACK_IMPORTED_MODULE_35__src_offset_silhouette__.a;
});
- var __WEBPACK_IMPORTED_MODULE_36__src_offset_wiggle__ = __webpack_require__(604);
+ var __WEBPACK_IMPORTED_MODULE_36__src_offset_wiggle__ = __webpack_require__(606);
__webpack_require__.d(__webpack_exports__, "r", function() {
return __WEBPACK_IMPORTED_MODULE_36__src_offset_wiggle__.a;
});
- var __WEBPACK_IMPORTED_MODULE_40__src_order_none__ = (__webpack_require__(177),
- __webpack_require__(605), __webpack_require__(606), __webpack_require__(82));
+ var __WEBPACK_IMPORTED_MODULE_40__src_order_none__ = (__webpack_require__(176),
+ __webpack_require__(607), __webpack_require__(608), __webpack_require__(84));
__webpack_require__.d(__webpack_exports__, "s", function() {
return __WEBPACK_IMPORTED_MODULE_40__src_order_none__.a;
});
- __webpack_require__(607);
+ __webpack_require__(609);
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
- var __WEBPACK_IMPORTED_MODULE_0_d3_path__ = __webpack_require__(79), __WEBPACK_IMPORTED_MODULE_1__constant__ = __webpack_require__(54), __WEBPACK_IMPORTED_MODULE_2__curve_linear__ = __webpack_require__(118), __WEBPACK_IMPORTED_MODULE_3__point__ = __webpack_require__(175);
+ var __WEBPACK_IMPORTED_MODULE_0_d3_path__ = __webpack_require__(81), __WEBPACK_IMPORTED_MODULE_1__constant__ = __webpack_require__(53), __WEBPACK_IMPORTED_MODULE_2__curve_linear__ = __webpack_require__(117), __WEBPACK_IMPORTED_MODULE_3__point__ = __webpack_require__(174);
__webpack_exports__.a = function() {
function line(data) {
var i, d, buffer, n = data.length, defined0 = !1;
@@ -7297,7 +7302,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
this._context = context, this._alpha = alpha;
}
__webpack_exports__.a = point;
- var __WEBPACK_IMPORTED_MODULE_0__math__ = __webpack_require__(80), __WEBPACK_IMPORTED_MODULE_1__cardinal__ = __webpack_require__(121);
+ var __WEBPACK_IMPORTED_MODULE_0__math__ = __webpack_require__(82), __WEBPACK_IMPORTED_MODULE_1__cardinal__ = __webpack_require__(120);
CatmullRom.prototype = {
areaStart: function() {
this._line = 0;
@@ -7360,7 +7365,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
return s;
}
__webpack_exports__.b = sum;
- var __WEBPACK_IMPORTED_MODULE_0__none__ = __webpack_require__(82);
+ var __WEBPACK_IMPORTED_MODULE_0__none__ = __webpack_require__(84);
__webpack_exports__.a = function(series) {
var sums = series.map(sum);
return Object(__WEBPACK_IMPORTED_MODULE_0__none__.a)(series).sort(function(a, b) {
@@ -7371,16 +7376,16 @@ var _bundleJs = []byte((((((((((`!function(modules) {
function baseIsEqual(value, other, bitmask, customizer, stack) {
return value === other || (null == value || null == other || !isObjectLike(value) && !isObjectLike(other) ? value !== value && other !== other : baseIsEqualDeep(value, other, bitmask, customizer, baseIsEqual, stack));
}
- var baseIsEqualDeep = __webpack_require__(608), isObjectLike = __webpack_require__(36);
+ var baseIsEqualDeep = __webpack_require__(610), isObjectLike = __webpack_require__(36);
module.exports = baseIsEqual;
}, function(module, exports, __webpack_require__) {
function keys(object) {
return isArrayLike(object) ? arrayLikeKeys(object) : baseKeys(object);
}
- var arrayLikeKeys = __webpack_require__(626), baseKeys = __webpack_require__(632), isArrayLike = __webpack_require__(83);
+ var arrayLikeKeys = __webpack_require__(628), baseKeys = __webpack_require__(634), isArrayLike = __webpack_require__(85);
module.exports = keys;
}, function(module, exports, __webpack_require__) {
- var baseIsArguments = __webpack_require__(628), isObjectLike = __webpack_require__(36), objectProto = Object.prototype, hasOwnProperty = objectProto.hasOwnProperty, propertyIsEnumerable = objectProto.propertyIsEnumerable, isArguments = baseIsArguments(function() {
+ var baseIsArguments = __webpack_require__(630), isObjectLike = __webpack_require__(36), objectProto = Object.prototype, hasOwnProperty = objectProto.hasOwnProperty, propertyIsEnumerable = objectProto.propertyIsEnumerable, isArguments = baseIsArguments(function() {
return arguments;
}()) ? baseIsArguments : function(value) {
return isObjectLike(value) && hasOwnProperty.call(value, "callee") && !propertyIsEnumerable.call(value, "callee");
@@ -7388,7 +7393,8 @@ var _bundleJs = []byte((((((((((`!function(modules) {
module.exports = isArguments;
}, function(module, exports) {
function isIndex(value, length) {
- return !!(length = null == length ? MAX_SAFE_INTEGER : length) && ("number" == typeof value || reIsUint.test(value)) && value > -1 && value % 1 == 0 && value < length;
+ var type = typeof value;
+ return !!(length = null == length ? MAX_SAFE_INTEGER : length) && ("number" == type || "symbol" != type && reIsUint.test(value)) && value > -1 && value % 1 == 0 && value < length;
}
var MAX_SAFE_INTEGER = 9007199254740991, reIsUint = /^(?:0|[1-9]\d*)$/;
module.exports = isIndex;
@@ -7461,8 +7467,11 @@ var _bundleJs = []byte((((((((((`!function(modules) {
try {
var measurementSpan = document.getElementById("recharts_measurement_span");
measurementSpan || (measurementSpan = document.createElement("span"), measurementSpan.setAttribute("id", "recharts_measurement_span"),
- document.body.appendChild(measurementSpan)), measurementSpan.setAttribute("style", getStyleString(_extends({}, SPAN_STYLE, style))),
- measurementSpan.textContent = str;
+ document.body.appendChild(measurementSpan));
+ var measurementSpanStyle = _extends({}, SPAN_STYLE, style);
+ Object.keys(measurementSpanStyle).map(function(styleKey) {
+ return measurementSpan.style[styleKey] = measurementSpanStyle[styleKey], styleKey;
+ }), measurementSpan.textContent = str;
var rect = measurementSpan.getBoundingClientRect(), result = {
width: rect.width,
height: rect.height
@@ -7493,7 +7502,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
};
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
- var __WEBPACK_IMPORTED_MODULE_0__number__ = __webpack_require__(86);
+ var __WEBPACK_IMPORTED_MODULE_0__number__ = __webpack_require__(87);
__webpack_exports__.a = function(values, p, valueof) {
if (null == valueof && (valueof = __WEBPACK_IMPORTED_MODULE_0__number__.a), n = values.length) {
if ((p = +p) <= 0 || n < 2) return +valueof(values[0], 0, values);
@@ -7570,7 +7579,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}, __webpack_exports__.a = map;
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
- var __WEBPACK_IMPORTED_MODULE_0_d3_color__ = __webpack_require__(44), __WEBPACK_IMPORTED_MODULE_1__rgb__ = __webpack_require__(302), __WEBPACK_IMPORTED_MODULE_2__array__ = __webpack_require__(305), __WEBPACK_IMPORTED_MODULE_3__date__ = __webpack_require__(306), __WEBPACK_IMPORTED_MODULE_4__number__ = __webpack_require__(125), __WEBPACK_IMPORTED_MODULE_5__object__ = __webpack_require__(307), __WEBPACK_IMPORTED_MODULE_6__string__ = __webpack_require__(308), __WEBPACK_IMPORTED_MODULE_7__constant__ = __webpack_require__(304);
+ var __WEBPACK_IMPORTED_MODULE_0_d3_color__ = __webpack_require__(43), __WEBPACK_IMPORTED_MODULE_1__rgb__ = __webpack_require__(307), __WEBPACK_IMPORTED_MODULE_2__array__ = __webpack_require__(310), __WEBPACK_IMPORTED_MODULE_3__date__ = __webpack_require__(311), __WEBPACK_IMPORTED_MODULE_4__number__ = __webpack_require__(124), __WEBPACK_IMPORTED_MODULE_5__object__ = __webpack_require__(312), __WEBPACK_IMPORTED_MODULE_6__string__ = __webpack_require__(313), __WEBPACK_IMPORTED_MODULE_7__constant__ = __webpack_require__(309);
__webpack_exports__.a = function(a, b) {
var c, t = typeof b;
return null == b || "boolean" === t ? Object(__WEBPACK_IMPORTED_MODULE_7__constant__.a)(b) : ("number" === t ? __WEBPACK_IMPORTED_MODULE_4__number__.a : "string" === t ? (c = Object(__WEBPACK_IMPORTED_MODULE_0_d3_color__.a)(b)) ? (b = c,
@@ -7628,7 +7637,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
return brighter;
}), __webpack_exports__.e = color, __webpack_exports__.h = rgbConvert, __webpack_exports__.g = rgb,
__webpack_exports__.b = Rgb, __webpack_exports__.f = hsl;
- var __WEBPACK_IMPORTED_MODULE_0__define__ = __webpack_require__(189), darker = .7, brighter = 1 / darker, reI = "\\s*([+-]?\\d+)\\s*", reN = "\\s*([+-]?\\d*\\.?\\d+(?:[eE][+-]?\\d+)?)\\s*", reP = "\\s*([+-]?\\d*\\.?\\d+(?:[eE][+-]?\\d+)?)%\\s*", reHex3 = /^#([0-9a-f]{3})$/, reHex6 = /^#([0-9a-f]{6})$/, reRgbInteger = new RegExp("^rgb\\(" + [ reI, reI, reI ] + "\\)$"), reRgbPercent = new RegExp("^rgb\\(" + [ reP, reP, reP ] + "\\)$"), reRgbaInteger = new RegExp("^rgba\\(" + [ reI, reI, reI, reN ] + "\\)$"), reRgbaPercent = new RegExp("^rgba\\(" + [ reP, reP, reP, reN ] + "\\)$"), reHslPercent = new RegExp("^hsl\\(" + [ reN, reP, reP ] + "\\)$"), reHslaPercent = new RegExp("^hsla\\(" + [ reN, reP, reP, reN ] + "\\)$"), named = {
+ var __WEBPACK_IMPORTED_MODULE_0__define__ = __webpack_require__(188), darker = .7, brighter = 1 / darker, reI = "\\s*([+-]?\\d+)\\s*", reN = "\\s*([+-]?\\d*\\.?\\d+(?:[eE][+-]?\\d+)?)\\s*", reP = "\\s*([+-]?\\d*\\.?\\d+(?:[eE][+-]?\\d+)?)%\\s*", reHex3 = /^#([0-9a-f]{3})$/, reHex6 = /^#([0-9a-f]{6})$/, reRgbInteger = new RegExp("^rgb\\(" + [ reI, reI, reI ] + "\\)$"), reRgbPercent = new RegExp("^rgb\\(" + [ reP, reP, reP ] + "\\)$"), reRgbaInteger = new RegExp("^rgba\\(" + [ reI, reI, reI, reN ] + "\\)$"), reRgbaPercent = new RegExp("^rgba\\(" + [ reP, reP, reP, reN ] + "\\)$"), reHslPercent = new RegExp("^hsl\\(" + [ reN, reP, reP ] + "\\)$"), reHslaPercent = new RegExp("^hsla\\(" + [ reN, reP, reP, reN ] + "\\)$"), named = {
aliceblue: 15792383,
antiquewhite: 16444375,
aqua: 65535,
@@ -7937,7 +7946,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}), __webpack_require__.d(__webpack_exports__, "c", function() {
return utcParse;
});
- var locale, timeFormat, timeParse, utcFormat, utcParse, __WEBPACK_IMPORTED_MODULE_0__locale__ = __webpack_require__(318);
+ var locale, timeFormat, timeParse, utcFormat, utcParse, __WEBPACK_IMPORTED_MODULE_0__locale__ = __webpack_require__(323);
!function(definition) {
locale = Object(__WEBPACK_IMPORTED_MODULE_0__locale__.a)(definition), timeFormat = locale.format,
timeParse = locale.parse, utcFormat = locale.utcFormat, utcParse = locale.utcParse;
@@ -8051,7 +8060,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}
}), superClass && (Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass);
}
- var _class, _class2, _temp2, __WEBPACK_IMPORTED_MODULE_0_lodash_isEqual__ = __webpack_require__(34), __WEBPACK_IMPORTED_MODULE_0_lodash_isEqual___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_lodash_isEqual__), __WEBPACK_IMPORTED_MODULE_1_lodash_isFunction__ = __webpack_require__(8), __WEBPACK_IMPORTED_MODULE_1_lodash_isFunction___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_1_lodash_isFunction__), __WEBPACK_IMPORTED_MODULE_2_lodash_isNil__ = __webpack_require__(20), __WEBPACK_IMPORTED_MODULE_2_lodash_isNil___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_2_lodash_isNil__), __WEBPACK_IMPORTED_MODULE_3_react__ = __webpack_require__(0), __WEBPACK_IMPORTED_MODULE_3_react___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_3_react__), __WEBPACK_IMPORTED_MODULE_4_prop_types__ = __webpack_require__(1), __WEBPACK_IMPORTED_MODULE_4_prop_types___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_4_prop_types__), __WEBPACK_IMPORTED_MODULE_5_react_smooth__ = __webpack_require__(33), __WEBPACK_IMPORTED_MODULE_5_react_smooth___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_5_react_smooth__), __WEBPACK_IMPORTED_MODULE_6_classnames__ = __webpack_require__(3), __WEBPACK_IMPORTED_MODULE_6_classnames___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_6_classnames__), __WEBPACK_IMPORTED_MODULE_7__util_PureRender__ = __webpack_require__(5), __WEBPACK_IMPORTED_MODULE_8__shape_Curve__ = __webpack_require__(66), __WEBPACK_IMPORTED_MODULE_9__shape_Dot__ = __webpack_require__(57), __WEBPACK_IMPORTED_MODULE_10__container_Layer__ = __webpack_require__(14), __WEBPACK_IMPORTED_MODULE_11__component_LabelList__ = __webpack_require__(45), __WEBPACK_IMPORTED_MODULE_12__ErrorBar__ = __webpack_require__(91), __WEBPACK_IMPORTED_MODULE_13__util_DataUtils__ = __webpack_require__(9), __WEBPACK_IMPORTED_MODULE_14__util_ReactUtils__ = __webpack_require__(4), __WEBPACK_IMPORTED_MODULE_15__util_ChartUtils__ = __webpack_require__(16), _extends = Object.assign || function(target) {
+ var _class, _class2, _temp2, __WEBPACK_IMPORTED_MODULE_0_lodash_isEqual__ = __webpack_require__(34), __WEBPACK_IMPORTED_MODULE_0_lodash_isEqual___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_lodash_isEqual__), __WEBPACK_IMPORTED_MODULE_1_lodash_isFunction__ = __webpack_require__(8), __WEBPACK_IMPORTED_MODULE_1_lodash_isFunction___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_1_lodash_isFunction__), __WEBPACK_IMPORTED_MODULE_2_lodash_isNil__ = __webpack_require__(20), __WEBPACK_IMPORTED_MODULE_2_lodash_isNil___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_2_lodash_isNil__), __WEBPACK_IMPORTED_MODULE_3_react__ = __webpack_require__(0), __WEBPACK_IMPORTED_MODULE_3_react___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_3_react__), __WEBPACK_IMPORTED_MODULE_4_prop_types__ = __webpack_require__(1), __WEBPACK_IMPORTED_MODULE_4_prop_types___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_4_prop_types__), __WEBPACK_IMPORTED_MODULE_5_react_smooth__ = __webpack_require__(33), __WEBPACK_IMPORTED_MODULE_5_react_smooth___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_5_react_smooth__), __WEBPACK_IMPORTED_MODULE_6_classnames__ = __webpack_require__(3), __WEBPACK_IMPORTED_MODULE_6_classnames___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_6_classnames__), __WEBPACK_IMPORTED_MODULE_7__util_PureRender__ = __webpack_require__(5), __WEBPACK_IMPORTED_MODULE_8__shape_Curve__ = __webpack_require__(66), __WEBPACK_IMPORTED_MODULE_9__shape_Dot__ = __webpack_require__(56), __WEBPACK_IMPORTED_MODULE_10__container_Layer__ = __webpack_require__(14), __WEBPACK_IMPORTED_MODULE_11__component_LabelList__ = __webpack_require__(44), __WEBPACK_IMPORTED_MODULE_12__ErrorBar__ = __webpack_require__(92), __WEBPACK_IMPORTED_MODULE_13__util_DataUtils__ = __webpack_require__(9), __WEBPACK_IMPORTED_MODULE_14__util_ReactUtils__ = __webpack_require__(4), __WEBPACK_IMPORTED_MODULE_15__util_ChartUtils__ = __webpack_require__(16), _extends = Object.assign || function(target) {
for (var i = 1; i < arguments.length; i++) {
var source = arguments[i];
for (var key in source) Object.prototype.hasOwnProperty.call(source, key) && (target[key] = source[key]);
@@ -8078,7 +8087,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
_this.state = {
isAnimationFinished: !0,
totalLength: 0
- }, _this.id = Object(__WEBPACK_IMPORTED_MODULE_13__util_DataUtils__.j)("recharts-line-"),
+ }, _this.id = Object(__WEBPACK_IMPORTED_MODULE_13__util_DataUtils__.k)("recharts-line-"),
_this.cachePrevData = function(points) {
_this.setState({
prevPoints: points
@@ -8243,13 +8252,13 @@ var _bundleJs = []byte((((((((((`!function(modules) {
if (prevPoints) {
var stepData = points.map(function(entry, index) {
if (prevPoints[index]) {
- var prev = prevPoints[index], _interpolatorX = Object(__WEBPACK_IMPORTED_MODULE_13__util_DataUtils__.e)(prev.x, entry.x), _interpolatorY = Object(__WEBPACK_IMPORTED_MODULE_13__util_DataUtils__.e)(prev.y, entry.y);
+ var prev = prevPoints[index], _interpolatorX = Object(__WEBPACK_IMPORTED_MODULE_13__util_DataUtils__.f)(prev.x, entry.x), _interpolatorY = Object(__WEBPACK_IMPORTED_MODULE_13__util_DataUtils__.f)(prev.y, entry.y);
return _extends({}, entry, {
x: _interpolatorX(t),
y: _interpolatorY(t)
});
}
- var interpolatorX = Object(__WEBPACK_IMPORTED_MODULE_13__util_DataUtils__.e)(2 * width, entry.x), interpolatorY = Object(__WEBPACK_IMPORTED_MODULE_13__util_DataUtils__.e)(height / 2, entry.y);
+ var interpolatorX = Object(__WEBPACK_IMPORTED_MODULE_13__util_DataUtils__.f)(2 * width, entry.x), interpolatorY = Object(__WEBPACK_IMPORTED_MODULE_13__util_DataUtils__.f)(height / 2, entry.y);
return _extends({}, entry, {
x: interpolatorX(t),
y: interpolatorY(t)
@@ -8257,7 +8266,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
});
return _this3.renderCurveStatically(stepData, needClip);
}
- var interpolator = Object(__WEBPACK_IMPORTED_MODULE_13__util_DataUtils__.e)(0, totalLength), curLength = interpolator(t), currentStrokeDasharray = void 0;
+ var interpolator = Object(__WEBPACK_IMPORTED_MODULE_13__util_DataUtils__.f)(0, totalLength), curLength = interpolator(t), currentStrokeDasharray = void 0;
if (strokeDasharray) {
var lines = strokeDasharray.split(/[,\s]+/gim).map(function(num) {
return parseFloat(num);
@@ -8397,7 +8406,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}
}), superClass && (Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass);
}
- var _class, _class2, _temp2, __WEBPACK_IMPORTED_MODULE_0_lodash_isEqual__ = __webpack_require__(34), __WEBPACK_IMPORTED_MODULE_0_lodash_isEqual___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_lodash_isEqual__), __WEBPACK_IMPORTED_MODULE_1_lodash_isNaN__ = __webpack_require__(117), __WEBPACK_IMPORTED_MODULE_1_lodash_isNaN___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_1_lodash_isNaN__), __WEBPACK_IMPORTED_MODULE_2_lodash_isFunction__ = __webpack_require__(8), __WEBPACK_IMPORTED_MODULE_2_lodash_isFunction___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_2_lodash_isFunction__), __WEBPACK_IMPORTED_MODULE_3_lodash_isNil__ = __webpack_require__(20), __WEBPACK_IMPORTED_MODULE_3_lodash_isNil___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_3_lodash_isNil__), __WEBPACK_IMPORTED_MODULE_4_lodash_isArray__ = __webpack_require__(12), __WEBPACK_IMPORTED_MODULE_4_lodash_isArray___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_4_lodash_isArray__), __WEBPACK_IMPORTED_MODULE_5_react__ = __webpack_require__(0), __WEBPACK_IMPORTED_MODULE_5_react___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_5_react__), __WEBPACK_IMPORTED_MODULE_6_prop_types__ = __webpack_require__(1), __WEBPACK_IMPORTED_MODULE_6_prop_types___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_6_prop_types__), __WEBPACK_IMPORTED_MODULE_7_classnames__ = __webpack_require__(3), __WEBPACK_IMPORTED_MODULE_7_classnames___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_7_classnames__), __WEBPACK_IMPORTED_MODULE_8_react_smooth__ = __webpack_require__(33), __WEBPACK_IMPORTED_MODULE_8_react_smooth___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_8_react_smooth__), __WEBPACK_IMPORTED_MODULE_9__shape_Curve__ = __webpack_require__(66), __WEBPACK_IMPORTED_MODULE_10__shape_Dot__ = __webpack_require__(57), __WEBPACK_IMPORTED_MODULE_11__container_Layer__ = __webpack_require__(14), __WEBPACK_IMPORTED_MODULE_12__component_LabelList__ = __webpack_require__(45), __WEBPACK_IMPORTED_MODULE_13__util_PureRender__ = __webpack_require__(5), __WEBPACK_IMPORTED_MODULE_14__util_ReactUtils__ = __webpack_require__(4), __WEBPACK_IMPORTED_MODULE_15__util_DataUtils__ = __webpack_require__(9), __WEBPACK_IMPORTED_MODULE_16__util_ChartUtils__ = __webpack_require__(16), _extends = Object.assign || function(target) {
+ var _class, _class2, _temp2, __WEBPACK_IMPORTED_MODULE_0_lodash_isEqual__ = __webpack_require__(34), __WEBPACK_IMPORTED_MODULE_0_lodash_isEqual___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_lodash_isEqual__), __WEBPACK_IMPORTED_MODULE_1_lodash_isNaN__ = __webpack_require__(116), __WEBPACK_IMPORTED_MODULE_1_lodash_isNaN___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_1_lodash_isNaN__), __WEBPACK_IMPORTED_MODULE_2_lodash_isFunction__ = __webpack_require__(8), __WEBPACK_IMPORTED_MODULE_2_lodash_isFunction___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_2_lodash_isFunction__), __WEBPACK_IMPORTED_MODULE_3_lodash_isNil__ = __webpack_require__(20), __WEBPACK_IMPORTED_MODULE_3_lodash_isNil___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_3_lodash_isNil__), __WEBPACK_IMPORTED_MODULE_4_lodash_isArray__ = __webpack_require__(11), __WEBPACK_IMPORTED_MODULE_4_lodash_isArray___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_4_lodash_isArray__), __WEBPACK_IMPORTED_MODULE_5_react__ = __webpack_require__(0), __WEBPACK_IMPORTED_MODULE_5_react___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_5_react__), __WEBPACK_IMPORTED_MODULE_6_prop_types__ = __webpack_require__(1), __WEBPACK_IMPORTED_MODULE_6_prop_types___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_6_prop_types__), __WEBPACK_IMPORTED_MODULE_7_classnames__ = __webpack_require__(3), __WEBPACK_IMPORTED_MODULE_7_classnames___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_7_classnames__), __WEBPACK_IMPORTED_MODULE_8_react_smooth__ = __webpack_require__(33), __WEBPACK_IMPORTED_MODULE_8_react_smooth___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_8_react_smooth__), __WEBPACK_IMPORTED_MODULE_9__shape_Curve__ = __webpack_require__(66), __WEBPACK_IMPORTED_MODULE_10__shape_Dot__ = __webpack_require__(56), __WEBPACK_IMPORTED_MODULE_11__container_Layer__ = __webpack_require__(14), __WEBPACK_IMPORTED_MODULE_12__component_LabelList__ = __webpack_require__(44), __WEBPACK_IMPORTED_MODULE_13__util_PureRender__ = __webpack_require__(5), __WEBPACK_IMPORTED_MODULE_14__util_ReactUtils__ = __webpack_require__(4), __WEBPACK_IMPORTED_MODULE_15__util_DataUtils__ = __webpack_require__(9), __WEBPACK_IMPORTED_MODULE_16__util_ChartUtils__ = __webpack_require__(16), _extends = Object.assign || function(target) {
for (var i = 1; i < arguments.length; i++) {
var source = arguments[i];
for (var key in source) Object.prototype.hasOwnProperty.call(source, key) && (target[key] = source[key]);
@@ -8423,7 +8432,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
return _temp = _this = _possibleConstructorReturn(this, (_ref = Area.__proto__ || Object.getPrototypeOf(Area)).call.apply(_ref, [ this ].concat(args))),
_this.state = {
isAnimationFinished: !0
- }, _this.id = Object(__WEBPACK_IMPORTED_MODULE_15__util_DataUtils__.j)("recharts-area-"),
+ }, _this.id = Object(__WEBPACK_IMPORTED_MODULE_15__util_DataUtils__.k)("recharts-area-"),
_this.cachePrevData = function(points, baseLine) {
_this.setState({
prevPoints: points,
@@ -8476,9 +8485,9 @@ var _bundleJs = []byte((((((((((`!function(modules) {
var _props3 = this.props, baseLine = _props3.baseLine, points = _props3.points, strokeWidth = _props3.strokeWidth, startX = points[0].x, endX = points[points.length - 1].x, width = alpha * Math.abs(startX - endX), maxY = Math.max.apply(null, points.map(function(entry) {
return entry.y || 0;
}));
- return Object(__WEBPACK_IMPORTED_MODULE_15__util_DataUtils__.g)(baseLine) ? maxY = Math.max(baseLine, maxY) : baseLine && __WEBPACK_IMPORTED_MODULE_4_lodash_isArray___default()(baseLine) && baseLine.length && (maxY = Math.max(Math.max.apply(null, baseLine.map(function(entry) {
+ return Object(__WEBPACK_IMPORTED_MODULE_15__util_DataUtils__.h)(baseLine) ? maxY = Math.max(baseLine, maxY) : baseLine && __WEBPACK_IMPORTED_MODULE_4_lodash_isArray___default()(baseLine) && baseLine.length && (maxY = Math.max(Math.max.apply(null, baseLine.map(function(entry) {
return entry.y || 0;
- })), maxY)), Object(__WEBPACK_IMPORTED_MODULE_15__util_DataUtils__.g)(maxY) ? __WEBPACK_IMPORTED_MODULE_5_react___default.a.createElement("rect", {
+ })), maxY)), Object(__WEBPACK_IMPORTED_MODULE_15__util_DataUtils__.h)(maxY) ? __WEBPACK_IMPORTED_MODULE_5_react___default.a.createElement("rect", {
x: startX < endX ? startX : startX - width,
y: 0,
width: width,
@@ -8491,9 +8500,9 @@ var _bundleJs = []byte((((((((((`!function(modules) {
var _props4 = this.props, baseLine = _props4.baseLine, points = _props4.points, strokeWidth = _props4.strokeWidth, startY = points[0].y, endY = points[points.length - 1].y, height = alpha * Math.abs(startY - endY), maxX = Math.max.apply(null, points.map(function(entry) {
return entry.x || 0;
}));
- return Object(__WEBPACK_IMPORTED_MODULE_15__util_DataUtils__.g)(baseLine) ? maxX = Math.max(baseLine, maxX) : baseLine && __WEBPACK_IMPORTED_MODULE_4_lodash_isArray___default()(baseLine) && baseLine.length && (maxX = Math.max(Math.max.apply(null, baseLine.map(function(entry) {
+ return Object(__WEBPACK_IMPORTED_MODULE_15__util_DataUtils__.h)(baseLine) ? maxX = Math.max(baseLine, maxX) : baseLine && __WEBPACK_IMPORTED_MODULE_4_lodash_isArray___default()(baseLine) && baseLine.length && (maxX = Math.max(Math.max.apply(null, baseLine.map(function(entry) {
return entry.x || 0;
- })), maxX)), Object(__WEBPACK_IMPORTED_MODULE_15__util_DataUtils__.g)(maxX) ? __WEBPACK_IMPORTED_MODULE_5_react___default.a.createElement("rect", {
+ })), maxX)), Object(__WEBPACK_IMPORTED_MODULE_15__util_DataUtils__.h)(maxX) ? __WEBPACK_IMPORTED_MODULE_5_react___default.a.createElement("rect", {
x: 0,
y: startY < endY ? startY : startY - height,
width: maxX + (strokeWidth || 1),
@@ -8555,7 +8564,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
if (prevPoints) {
var stepPoints = points.map(function(entry, index) {
if (prevPoints[index]) {
- var prev = prevPoints[index], interpolatorX = Object(__WEBPACK_IMPORTED_MODULE_15__util_DataUtils__.e)(prev.x, entry.x), interpolatorY = Object(__WEBPACK_IMPORTED_MODULE_15__util_DataUtils__.e)(prev.y, entry.y);
+ var prev = prevPoints[index], interpolatorX = Object(__WEBPACK_IMPORTED_MODULE_15__util_DataUtils__.f)(prev.x, entry.x), interpolatorY = Object(__WEBPACK_IMPORTED_MODULE_15__util_DataUtils__.f)(prev.y, entry.y);
return _extends({}, entry, {
x: interpolatorX(t),
y: interpolatorY(t)
@@ -8563,14 +8572,14 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}
return entry;
}), stepBaseLine = void 0;
- if (Object(__WEBPACK_IMPORTED_MODULE_15__util_DataUtils__.g)(baseLine)) {
- stepBaseLine = Object(__WEBPACK_IMPORTED_MODULE_15__util_DataUtils__.e)(prevBaseLine, baseLine)(t);
+ if (Object(__WEBPACK_IMPORTED_MODULE_15__util_DataUtils__.h)(baseLine)) {
+ stepBaseLine = Object(__WEBPACK_IMPORTED_MODULE_15__util_DataUtils__.f)(prevBaseLine, baseLine)(t);
} else if (__WEBPACK_IMPORTED_MODULE_3_lodash_isNil___default()(baseLine) || __WEBPACK_IMPORTED_MODULE_1_lodash_isNaN___default()(baseLine)) {
- var _interpolator = Object(__WEBPACK_IMPORTED_MODULE_15__util_DataUtils__.e)(prevBaseLine, 0);
+ var _interpolator = Object(__WEBPACK_IMPORTED_MODULE_15__util_DataUtils__.f)(prevBaseLine, 0);
stepBaseLine = _interpolator(t);
} else stepBaseLine = baseLine.map(function(entry, index) {
if (prevBaseLine[index]) {
- var prev = prevBaseLine[index], interpolatorX = Object(__WEBPACK_IMPORTED_MODULE_15__util_DataUtils__.e)(prev.x, entry.x), interpolatorY = Object(__WEBPACK_IMPORTED_MODULE_15__util_DataUtils__.e)(prev.y, entry.y);
+ var prev = prevBaseLine[index], interpolatorX = Object(__WEBPACK_IMPORTED_MODULE_15__util_DataUtils__.f)(prev.x, entry.x), interpolatorY = Object(__WEBPACK_IMPORTED_MODULE_15__util_DataUtils__.f)(prev.y, entry.y);
return _extends({}, entry, {
x: interpolatorX(t),
y: interpolatorY(t)
@@ -8663,7 +8672,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
animationEasing: "ease"
}, _class2.getBaseValue = function(props, xAxis, yAxis) {
var layout = props.layout, baseValue = props.baseValue;
- if (Object(__WEBPACK_IMPORTED_MODULE_15__util_DataUtils__.g)(baseValue)) return baseValue;
+ if (Object(__WEBPACK_IMPORTED_MODULE_15__util_DataUtils__.h)(baseValue)) return baseValue;
var numericAxis = "horizontal" === layout ? yAxis : xAxis, domain = numericAxis.scale.domain();
if ("number" === numericAxis.type) {
var max = Math.max(domain[0], domain[1]), min = Math.min(domain[0], domain[1]);
@@ -8742,7 +8751,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}
}), superClass && (Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass);
}
- var _class, _class2, _temp2, __WEBPACK_IMPORTED_MODULE_0_lodash_isNil__ = __webpack_require__(20), __WEBPACK_IMPORTED_MODULE_0_lodash_isNil___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_lodash_isNil__), __WEBPACK_IMPORTED_MODULE_1_lodash_isEqual__ = __webpack_require__(34), __WEBPACK_IMPORTED_MODULE_1_lodash_isEqual___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_1_lodash_isEqual__), __WEBPACK_IMPORTED_MODULE_2_lodash_isFunction__ = __webpack_require__(8), __WEBPACK_IMPORTED_MODULE_2_lodash_isFunction___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_2_lodash_isFunction__), __WEBPACK_IMPORTED_MODULE_3_lodash_isArray__ = __webpack_require__(12), __WEBPACK_IMPORTED_MODULE_3_lodash_isArray___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_3_lodash_isArray__), __WEBPACK_IMPORTED_MODULE_4_react__ = __webpack_require__(0), __WEBPACK_IMPORTED_MODULE_4_react___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_4_react__), __WEBPACK_IMPORTED_MODULE_5_prop_types__ = __webpack_require__(1), __WEBPACK_IMPORTED_MODULE_5_prop_types___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_5_prop_types__), __WEBPACK_IMPORTED_MODULE_6_classnames__ = __webpack_require__(3), __WEBPACK_IMPORTED_MODULE_6_classnames___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_6_classnames__), __WEBPACK_IMPORTED_MODULE_7_react_smooth__ = __webpack_require__(33), __WEBPACK_IMPORTED_MODULE_7_react_smooth___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_7_react_smooth__), __WEBPACK_IMPORTED_MODULE_8__shape_Rectangle__ = __webpack_require__(65), __WEBPACK_IMPORTED_MODULE_9__container_Layer__ = __webpack_require__(14), __WEBPACK_IMPORTED_MODULE_10__ErrorBar__ = __webpack_require__(91), __WEBPACK_IMPORTED_MODULE_11__component_Cell__ = __webpack_require__(85), __WEBPACK_IMPORTED_MODULE_12__component_LabelList__ = __webpack_require__(45), __WEBPACK_IMPORTED_MODULE_13__util_PureRender__ = __webpack_require__(5), __WEBPACK_IMPORTED_MODULE_14__util_DataUtils__ = __webpack_require__(9), __WEBPACK_IMPORTED_MODULE_15__util_ReactUtils__ = __webpack_require__(4), __WEBPACK_IMPORTED_MODULE_16__util_ChartUtils__ = __webpack_require__(16), _extends = Object.assign || function(target) {
+ var _class, _class2, _temp2, __WEBPACK_IMPORTED_MODULE_0_lodash_isNil__ = __webpack_require__(20), __WEBPACK_IMPORTED_MODULE_0_lodash_isNil___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_lodash_isNil__), __WEBPACK_IMPORTED_MODULE_1_lodash_isEqual__ = __webpack_require__(34), __WEBPACK_IMPORTED_MODULE_1_lodash_isEqual___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_1_lodash_isEqual__), __WEBPACK_IMPORTED_MODULE_2_lodash_isFunction__ = __webpack_require__(8), __WEBPACK_IMPORTED_MODULE_2_lodash_isFunction___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_2_lodash_isFunction__), __WEBPACK_IMPORTED_MODULE_3_lodash_isArray__ = __webpack_require__(11), __WEBPACK_IMPORTED_MODULE_3_lodash_isArray___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_3_lodash_isArray__), __WEBPACK_IMPORTED_MODULE_4_react__ = __webpack_require__(0), __WEBPACK_IMPORTED_MODULE_4_react___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_4_react__), __WEBPACK_IMPORTED_MODULE_5_prop_types__ = __webpack_require__(1), __WEBPACK_IMPORTED_MODULE_5_prop_types___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_5_prop_types__), __WEBPACK_IMPORTED_MODULE_6_classnames__ = __webpack_require__(3), __WEBPACK_IMPORTED_MODULE_6_classnames___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_6_classnames__), __WEBPACK_IMPORTED_MODULE_7_react_smooth__ = __webpack_require__(33), __WEBPACK_IMPORTED_MODULE_7_react_smooth___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_7_react_smooth__), __WEBPACK_IMPORTED_MODULE_8__shape_Rectangle__ = __webpack_require__(65), __WEBPACK_IMPORTED_MODULE_9__container_Layer__ = __webpack_require__(14), __WEBPACK_IMPORTED_MODULE_10__ErrorBar__ = __webpack_require__(92), __WEBPACK_IMPORTED_MODULE_11__component_Cell__ = __webpack_require__(86), __WEBPACK_IMPORTED_MODULE_12__component_LabelList__ = __webpack_require__(44), __WEBPACK_IMPORTED_MODULE_13__util_PureRender__ = __webpack_require__(5), __WEBPACK_IMPORTED_MODULE_14__util_DataUtils__ = __webpack_require__(9), __WEBPACK_IMPORTED_MODULE_15__util_ReactUtils__ = __webpack_require__(4), __WEBPACK_IMPORTED_MODULE_16__util_ChartUtils__ = __webpack_require__(16), _extends = Object.assign || function(target) {
for (var i = 1; i < arguments.length; i++) {
var source = arguments[i];
for (var key in source) Object.prototype.hasOwnProperty.call(source, key) && (target[key] = source[key]);
@@ -8768,7 +8777,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
return _temp = _this = _possibleConstructorReturn(this, (_ref = Bar.__proto__ || Object.getPrototypeOf(Bar)).call.apply(_ref, [ this ].concat(args))),
_this.state = {
isAnimationFinished: !1
- }, _this.id = Object(__WEBPACK_IMPORTED_MODULE_14__util_DataUtils__.j)("recharts-bar-"),
+ }, _this.id = Object(__WEBPACK_IMPORTED_MODULE_14__util_DataUtils__.k)("recharts-bar-"),
_this.cachePrevData = function(data) {
_this.setState({
prevData: data
@@ -8832,7 +8841,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
var t = _ref2.t, stepData = data.map(function(entry, index) {
var prev = prevData && prevData[index];
if (prev) {
- var interpolatorX = Object(__WEBPACK_IMPORTED_MODULE_14__util_DataUtils__.e)(prev.x, entry.x), interpolatorY = Object(__WEBPACK_IMPORTED_MODULE_14__util_DataUtils__.e)(prev.y, entry.y), interpolatorWidth = Object(__WEBPACK_IMPORTED_MODULE_14__util_DataUtils__.e)(prev.width, entry.width), interpolatorHeight = Object(__WEBPACK_IMPORTED_MODULE_14__util_DataUtils__.e)(prev.height, entry.height);
+ var interpolatorX = Object(__WEBPACK_IMPORTED_MODULE_14__util_DataUtils__.f)(prev.x, entry.x), interpolatorY = Object(__WEBPACK_IMPORTED_MODULE_14__util_DataUtils__.f)(prev.y, entry.y), interpolatorWidth = Object(__WEBPACK_IMPORTED_MODULE_14__util_DataUtils__.f)(prev.width, entry.width), interpolatorHeight = Object(__WEBPACK_IMPORTED_MODULE_14__util_DataUtils__.f)(prev.height, entry.height);
return _extends({}, entry, {
x: interpolatorX(t),
y: interpolatorY(t),
@@ -8841,13 +8850,13 @@ var _bundleJs = []byte((((((((((`!function(modules) {
});
}
if ("horizontal" === layout) {
- var _interpolatorHeight = Object(__WEBPACK_IMPORTED_MODULE_14__util_DataUtils__.e)(0, entry.height), h = _interpolatorHeight(t);
+ var _interpolatorHeight = Object(__WEBPACK_IMPORTED_MODULE_14__util_DataUtils__.f)(0, entry.height), h = _interpolatorHeight(t);
return _extends({}, entry, {
y: entry.y + entry.height - h,
height: h
});
}
- var interpolator = Object(__WEBPACK_IMPORTED_MODULE_14__util_DataUtils__.e)(0, entry.width), w = interpolator(t);
+ var interpolator = Object(__WEBPACK_IMPORTED_MODULE_14__util_DataUtils__.f)(0, entry.width), w = interpolator(t);
return _extends({}, entry, {
width: w
});
@@ -8998,7 +9007,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
width: width,
height: yAxis.height
}, Math.abs(minPointSize) > 0 && Math.abs(height) < Math.abs(minPointSize)) {
- var delta = Object(__WEBPACK_IMPORTED_MODULE_14__util_DataUtils__.i)(height || minPointSize) * (Math.abs(minPointSize) - Math.abs(height));
+ var delta = Object(__WEBPACK_IMPORTED_MODULE_14__util_DataUtils__.j)(height || minPointSize) * (Math.abs(minPointSize) - Math.abs(height));
y -= delta, height += delta;
}
} else if (x = xAxis.scale(value[0]), y = Object(__WEBPACK_IMPORTED_MODULE_16__util_ChartUtils__.k)({
@@ -9014,7 +9023,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
width: xAxis.width,
height: height
}, Math.abs(minPointSize) > 0 && Math.abs(width) < Math.abs(minPointSize)) {
- var _delta = Object(__WEBPACK_IMPORTED_MODULE_14__util_DataUtils__.i)(width || minPointSize) * (Math.abs(minPointSize) - Math.abs(width));
+ var _delta = Object(__WEBPACK_IMPORTED_MODULE_14__util_DataUtils__.j)(width || minPointSize) * (Math.abs(minPointSize) - Math.abs(width));
width += _delta;
}
return _extends({}, entry, {
@@ -9053,7 +9062,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}
}), superClass && (Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass);
}
- var _class, _class2, _temp2, __WEBPACK_IMPORTED_MODULE_0_lodash_isEqual__ = __webpack_require__(34), __WEBPACK_IMPORTED_MODULE_0_lodash_isEqual___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_lodash_isEqual__), __WEBPACK_IMPORTED_MODULE_1_lodash_isFunction__ = __webpack_require__(8), __WEBPACK_IMPORTED_MODULE_1_lodash_isFunction___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_1_lodash_isFunction__), __WEBPACK_IMPORTED_MODULE_2_lodash_isNil__ = __webpack_require__(20), __WEBPACK_IMPORTED_MODULE_2_lodash_isNil___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_2_lodash_isNil__), __WEBPACK_IMPORTED_MODULE_3_react__ = __webpack_require__(0), __WEBPACK_IMPORTED_MODULE_3_react___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_3_react__), __WEBPACK_IMPORTED_MODULE_4_prop_types__ = __webpack_require__(1), __WEBPACK_IMPORTED_MODULE_4_prop_types___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_4_prop_types__), __WEBPACK_IMPORTED_MODULE_5_react_smooth__ = __webpack_require__(33), __WEBPACK_IMPORTED_MODULE_5_react_smooth___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_5_react_smooth__), __WEBPACK_IMPORTED_MODULE_6_classnames__ = __webpack_require__(3), __WEBPACK_IMPORTED_MODULE_6_classnames___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_6_classnames__), __WEBPACK_IMPORTED_MODULE_7__util_PureRender__ = __webpack_require__(5), __WEBPACK_IMPORTED_MODULE_8__container_Layer__ = __webpack_require__(14), __WEBPACK_IMPORTED_MODULE_9__component_LabelList__ = __webpack_require__(45), __WEBPACK_IMPORTED_MODULE_10__util_ReactUtils__ = __webpack_require__(4), __WEBPACK_IMPORTED_MODULE_11__ZAxis__ = __webpack_require__(131), __WEBPACK_IMPORTED_MODULE_12__shape_Curve__ = __webpack_require__(66), __WEBPACK_IMPORTED_MODULE_13__shape_Symbols__ = __webpack_require__(172), __WEBPACK_IMPORTED_MODULE_14__ErrorBar__ = __webpack_require__(91), __WEBPACK_IMPORTED_MODULE_15__component_Cell__ = __webpack_require__(85), __WEBPACK_IMPORTED_MODULE_16__util_DataUtils__ = __webpack_require__(9), __WEBPACK_IMPORTED_MODULE_17__util_ChartUtils__ = __webpack_require__(16), _extends = Object.assign || function(target) {
+ var _class, _class2, _temp2, __WEBPACK_IMPORTED_MODULE_0_lodash_isEqual__ = __webpack_require__(34), __WEBPACK_IMPORTED_MODULE_0_lodash_isEqual___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_lodash_isEqual__), __WEBPACK_IMPORTED_MODULE_1_lodash_isFunction__ = __webpack_require__(8), __WEBPACK_IMPORTED_MODULE_1_lodash_isFunction___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_1_lodash_isFunction__), __WEBPACK_IMPORTED_MODULE_2_lodash_isNil__ = __webpack_require__(20), __WEBPACK_IMPORTED_MODULE_2_lodash_isNil___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_2_lodash_isNil__), __WEBPACK_IMPORTED_MODULE_3_react__ = __webpack_require__(0), __WEBPACK_IMPORTED_MODULE_3_react___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_3_react__), __WEBPACK_IMPORTED_MODULE_4_prop_types__ = __webpack_require__(1), __WEBPACK_IMPORTED_MODULE_4_prop_types___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_4_prop_types__), __WEBPACK_IMPORTED_MODULE_5_react_smooth__ = __webpack_require__(33), __WEBPACK_IMPORTED_MODULE_5_react_smooth___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_5_react_smooth__), __WEBPACK_IMPORTED_MODULE_6_classnames__ = __webpack_require__(3), __WEBPACK_IMPORTED_MODULE_6_classnames___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_6_classnames__), __WEBPACK_IMPORTED_MODULE_7__util_PureRender__ = __webpack_require__(5), __WEBPACK_IMPORTED_MODULE_8__container_Layer__ = __webpack_require__(14), __WEBPACK_IMPORTED_MODULE_9__component_LabelList__ = __webpack_require__(44), __WEBPACK_IMPORTED_MODULE_10__util_ReactUtils__ = __webpack_require__(4), __WEBPACK_IMPORTED_MODULE_11__ZAxis__ = __webpack_require__(130), __WEBPACK_IMPORTED_MODULE_12__shape_Curve__ = __webpack_require__(66), __WEBPACK_IMPORTED_MODULE_13__shape_Symbols__ = __webpack_require__(171), __WEBPACK_IMPORTED_MODULE_14__ErrorBar__ = __webpack_require__(92), __WEBPACK_IMPORTED_MODULE_15__component_Cell__ = __webpack_require__(86), __WEBPACK_IMPORTED_MODULE_16__util_DataUtils__ = __webpack_require__(9), __WEBPACK_IMPORTED_MODULE_17__util_ChartUtils__ = __webpack_require__(16), _extends = Object.assign || function(target) {
for (var i = 1; i < arguments.length; i++) {
var source = arguments[i];
for (var key in source) Object.prototype.hasOwnProperty.call(source, key) && (target[key] = source[key]);
@@ -9091,7 +9100,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
_this.setState({
isAnimationFinished: !1
});
- }, _this.id = Object(__WEBPACK_IMPORTED_MODULE_16__util_DataUtils__.j)("recharts-scatter-"),
+ }, _this.id = Object(__WEBPACK_IMPORTED_MODULE_16__util_DataUtils__.k)("recharts-scatter-"),
_ret = _temp, _possibleConstructorReturn(_this, _ret);
}
return _inherits(Scatter, _Component), _createClass(Scatter, [ {
@@ -9144,14 +9153,14 @@ var _bundleJs = []byte((((((((((`!function(modules) {
var t = _ref2.t, stepData = points.map(function(entry, index) {
var prev = prevPoints && prevPoints[index];
if (prev) {
- var interpolatorCx = Object(__WEBPACK_IMPORTED_MODULE_16__util_DataUtils__.e)(prev.cx, entry.cx), interpolatorCy = Object(__WEBPACK_IMPORTED_MODULE_16__util_DataUtils__.e)(prev.cy, entry.cy), interpolatorSize = Object(__WEBPACK_IMPORTED_MODULE_16__util_DataUtils__.e)(prev.size, entry.size);
+ var interpolatorCx = Object(__WEBPACK_IMPORTED_MODULE_16__util_DataUtils__.f)(prev.cx, entry.cx), interpolatorCy = Object(__WEBPACK_IMPORTED_MODULE_16__util_DataUtils__.f)(prev.cy, entry.cy), interpolatorSize = Object(__WEBPACK_IMPORTED_MODULE_16__util_DataUtils__.f)(prev.size, entry.size);
return _extends({}, entry, {
cx: interpolatorCx(t),
cy: interpolatorCy(t),
size: interpolatorSize(t)
});
}
- var interpolator = Object(__WEBPACK_IMPORTED_MODULE_16__util_DataUtils__.e)(0, entry.size);
+ var interpolator = Object(__WEBPACK_IMPORTED_MODULE_16__util_DataUtils__.f)(0, entry.size);
return _extends({}, entry, {
size: interpolator(t)
});
@@ -9202,12 +9211,23 @@ var _bundleJs = []byte((((((((((`!function(modules) {
key: "renderLine",
value: function() {
var _props6 = this.props, points = _props6.points, line = _props6.line, lineType = _props6.lineType, lineJointType = _props6.lineJointType, scatterProps = Object(__WEBPACK_IMPORTED_MODULE_10__util_ReactUtils__.k)(this.props), customLineProps = Object(__WEBPACK_IMPORTED_MODULE_10__util_ReactUtils__.k)(line), linePoints = void 0, lineItem = void 0;
- "joint" === lineType && (linePoints = points.map(function(entry) {
+ if ("joint" === lineType) linePoints = points.map(function(entry) {
return {
x: entry.cx,
y: entry.cy
};
- }));
+ }); else if ("fitting" === lineType) {
+ var _getLinearRegression = Object(__WEBPACK_IMPORTED_MODULE_16__util_DataUtils__.c)(points), xmin = _getLinearRegression.xmin, xmax = _getLinearRegression.xmax, a = _getLinearRegression.a, b = _getLinearRegression.b, linearExp = function(x) {
+ return a * x + b;
+ };
+ linePoints = [ {
+ x: xmin,
+ y: linearExp(xmin)
+ }, {
+ x: xmax,
+ y: linearExp(xmax)
+ } ];
+ }
var lineProps = _extends({}, scatterProps, {
fill: "none",
stroke: scatterProps && scatterProps.fill
@@ -9407,7 +9427,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
function containsNode(outerNode, innerNode) {
return !(!outerNode || !innerNode) && (outerNode === innerNode || !isTextNode(outerNode) && (isTextNode(innerNode) ? containsNode(outerNode, innerNode.parentNode) : "contains" in outerNode ? outerNode.contains(innerNode) : !!outerNode.compareDocumentPosition && !!(16 & outerNode.compareDocumentPosition(innerNode))));
}
- var isTextNode = __webpack_require__(335);
+ var isTextNode = __webpack_require__(340);
module.exports = containsNode;
}, function(module, exports, __webpack_require__) {
"use strict";
@@ -9419,7 +9439,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
module.exports = focusNode;
}, function(module, exports, __webpack_require__) {
module.exports = {
- default: __webpack_require__(343),
+ default: __webpack_require__(348),
__esModule: !0
};
}, function(module, exports) {
@@ -9428,8 +9448,8 @@ var _bundleJs = []byte((((((((((`!function(modules) {
return it;
};
}, function(module, exports, __webpack_require__) {
- module.exports = !__webpack_require__(25) && !__webpack_require__(49)(function() {
- return 7 != Object.defineProperty(__webpack_require__(208)("div"), "a", {
+ module.exports = !__webpack_require__(25) && !__webpack_require__(48)(function() {
+ return 7 != Object.defineProperty(__webpack_require__(207)("div"), "a", {
get: function() {
return 7;
}
@@ -9441,7 +9461,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
return is ? document.createElement(it) : {};
};
}, function(module, exports, __webpack_require__) {
- var has = __webpack_require__(50), toIObject = __webpack_require__(58), arrayIndexOf = __webpack_require__(346)(!1), IE_PROTO = __webpack_require__(139)("IE_PROTO");
+ var has = __webpack_require__(49), toIObject = __webpack_require__(57), arrayIndexOf = __webpack_require__(351)(!1), IE_PROTO = __webpack_require__(138)("IE_PROTO");
module.exports = function(object, names) {
var key, O = toIObject(object), i = 0, result = [];
for (key in O) key != IE_PROTO && has(O, key) && result.push(key);
@@ -9449,12 +9469,12 @@ var _bundleJs = []byte((((((((((`!function(modules) {
return result;
};
}, function(module, exports, __webpack_require__) {
- var has = __webpack_require__(50), toObject = __webpack_require__(59), IE_PROTO = __webpack_require__(139)("IE_PROTO"), ObjectProto = Object.prototype;
+ var has = __webpack_require__(49), toObject = __webpack_require__(58), IE_PROTO = __webpack_require__(138)("IE_PROTO"), ObjectProto = Object.prototype;
module.exports = Object.getPrototypeOf || function(O) {
return O = toObject(O), has(O, IE_PROTO) ? O[IE_PROTO] : "function" == typeof O.constructor && O instanceof O.constructor ? O.constructor.prototype : O instanceof Object ? ObjectProto : null;
};
}, function(module, exports, __webpack_require__) {
- var $export = __webpack_require__(19), core = __webpack_require__(17), fails = __webpack_require__(49);
+ var $export = __webpack_require__(19), core = __webpack_require__(17), fails = __webpack_require__(48);
module.exports = function(KEY, exec) {
var fn = (core.Object || {})[KEY] || Object[KEY], exp = {};
exp[KEY] = exec(fn), $export($export.S + $export.F * fails(function() {
@@ -9464,7 +9484,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}, function(module, exports, __webpack_require__) {
module.exports = __webpack_require__(40);
}, function(module, exports, __webpack_require__) {
- __webpack_require__(358);
+ __webpack_require__(363);
for (var global = __webpack_require__(24), hide = __webpack_require__(40), Iterators = __webpack_require__(73), TO_STRING_TAG = __webpack_require__(21)("toStringTag"), DOMIterables = "CSSRuleList,CSSStyleDeclaration,CSSValueList,ClientRectList,DOMRectList,DOMStringList,DOMTokenList,DataTransferItemList,FileList,HTMLAllCollection,HTMLCollection,HTMLFormElement,HTMLSelectElement,MediaList,MimeTypeArray,NamedNodeMap,NodeList,PaintRequestList,Plugin,PluginArray,SVGLengthList,SVGNumberList,SVGPathSegList,SVGPointList,SVGStringList,SVGTransformList,SourceBufferList,StyleSheetList,TextTrackCueList,TextTrackList,TouchList".split(","), i = 0; i < DOMIterables.length; i++) {
var NAME = DOMIterables[i], Collection = global[NAME], proto = Collection && Collection.prototype;
proto && !proto[TO_STRING_TAG] && hide(proto, TO_STRING_TAG, NAME), Iterators[NAME] = Iterators.Array;
@@ -9477,17 +9497,17 @@ var _bundleJs = []byte((((((((((`!function(modules) {
};
};
}, function(module, exports, __webpack_require__) {
- var cof = __webpack_require__(136);
+ var cof = __webpack_require__(135);
module.exports = Array.isArray || function(arg) {
return "Array" == cof(arg);
};
}, function(module, exports, __webpack_require__) {
- var $keys = __webpack_require__(209), hiddenKeys = __webpack_require__(141).concat("length", "prototype");
+ var $keys = __webpack_require__(208), hiddenKeys = __webpack_require__(140).concat("length", "prototype");
exports.f = Object.getOwnPropertyNames || function(O) {
return $keys(O, hiddenKeys);
};
}, function(module, exports, __webpack_require__) {
- var pIE = __webpack_require__(99), createDesc = __webpack_require__(71), toIObject = __webpack_require__(58), toPrimitive = __webpack_require__(134), has = __webpack_require__(50), IE8_DOM_DEFINE = __webpack_require__(207), gOPD = Object.getOwnPropertyDescriptor;
+ var pIE = __webpack_require__(100), createDesc = __webpack_require__(71), toIObject = __webpack_require__(57), toPrimitive = __webpack_require__(133), has = __webpack_require__(49), IE8_DOM_DEFINE = __webpack_require__(206), gOPD = Object.getOwnPropertyDescriptor;
exports.f = __webpack_require__(25) ? gOPD : function(O, P) {
if (O = toIObject(O), P = toPrimitive(P, !0), IE8_DOM_DEFINE) try {
return gOPD(O, P);
@@ -9495,6 +9515,27 @@ var _bundleJs = []byte((((((((((`!function(modules) {
if (has(O, P)) return createDesc(!pIE.f.call(O, P), O[P]);
};
}, function(module, exports) {}, function(module, exports, __webpack_require__) {
+ "use strict";
+ function _interopRequireDefault(obj) {
+ return obj && obj.__esModule ? obj : {
+ default: obj
+ };
+ }
+ function exactProp(propTypes, componentNameInError) {
+ return (0, _extends4.default)({}, propTypes, (0, _defineProperty3.default)({}, specialProperty, function(props) {
+ var unknownProps = (0, _keys2.default)(props).filter(function(prop) {
+ return !propTypes.hasOwnProperty(prop);
+ });
+ return unknownProps.length > 0 ? new TypeError(componentNameInError + ": unknown props found: " + unknownProps.join(", ") + ". Please remove the unknown properties.") : null;
+ }));
+ }
+ Object.defineProperty(exports, "__esModule", {
+ value: !0
+ }), exports.specialProperty = void 0;
+ var _defineProperty2 = __webpack_require__(13), _defineProperty3 = _interopRequireDefault(_defineProperty2), _keys = __webpack_require__(50), _keys2 = _interopRequireDefault(_keys), _extends3 = __webpack_require__(6), _extends4 = _interopRequireDefault(_extends3);
+ exports.default = exactProp;
+ var specialProperty = exports.specialProperty = "exact-prop: ​";
+}, function(module, exports, __webpack_require__) {
var hide = __webpack_require__(40);
module.exports = function(target, src, safe) {
for (var key in src) safe && target[key] ? target[key] = src[key] : hide(target, key, src[key]);
@@ -9506,7 +9547,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
return it;
};
}, function(module, exports, __webpack_require__) {
- var anObject = __webpack_require__(48);
+ var anObject = __webpack_require__(47);
module.exports = function(iterator, fn, value, entries) {
try {
return entries ? fn(anObject(value)[0], value[1]) : fn(value);
@@ -9526,7 +9567,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
if (void 0 != it) return it[ITERATOR] || it["@@iterator"] || Iterators[classof(it)];
};
}, function(module, exports, __webpack_require__) {
- var cof = __webpack_require__(136), TAG = __webpack_require__(21)("toStringTag"), ARG = "Arguments" == cof(function() {
+ var cof = __webpack_require__(135), TAG = __webpack_require__(21)("toStringTag"), ARG = "Arguments" == cof(function() {
return arguments;
}()), tryGet = function(it, key) {
try {
@@ -9568,14 +9609,14 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
}), exports.create = exports.createGenerateClassName = exports.sheets = exports.RuleList = exports.SheetsManager = exports.SheetsRegistry = exports.toCssValue = exports.getDynamicStyles = void 0;
- var _getDynamicStyles = __webpack_require__(418);
+ var _getDynamicStyles = __webpack_require__(422);
Object.defineProperty(exports, "getDynamicStyles", {
enumerable: !0,
get: function() {
return _interopRequireDefault(_getDynamicStyles).default;
}
});
- var _toCssValue = __webpack_require__(105);
+ var _toCssValue = __webpack_require__(106);
Object.defineProperty(exports, "toCssValue", {
enumerable: !0,
get: function() {
@@ -9589,7 +9630,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
return _interopRequireDefault(_SheetsRegistry).default;
}
});
- var _SheetsManager = __webpack_require__(419);
+ var _SheetsManager = __webpack_require__(423);
Object.defineProperty(exports, "SheetsManager", {
enumerable: !0,
get: function() {
@@ -9603,7 +9644,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
return _interopRequireDefault(_RuleList).default;
}
});
- var _sheets = __webpack_require__(155);
+ var _sheets = __webpack_require__(153);
Object.defineProperty(exports, "sheets", {
enumerable: !0,
get: function() {
@@ -9617,7 +9658,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
return _interopRequireDefault(_createGenerateClassName).default;
}
});
- var _Jss = __webpack_require__(426), _Jss2 = _interopRequireDefault(_Jss), create = exports.create = function(options) {
+ var _Jss = __webpack_require__(430), _Jss2 = _interopRequireDefault(_Jss), create = exports.create = function(options) {
return new _Jss2.default(options);
};
exports.default = create();
@@ -9687,7 +9728,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
});
- var _symbolObservable = __webpack_require__(421), _symbolObservable2 = function(obj) {
+ var _symbolObservable = __webpack_require__(425), _symbolObservable2 = function(obj) {
return obj && obj.__esModule ? obj : {
default: obj
};
@@ -9714,8 +9755,8 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
});
- var _warning = __webpack_require__(11), _warning2 = _interopRequireDefault(_warning), _StyleSheet = __webpack_require__(233), _moduleId = (_interopRequireDefault(_StyleSheet),
- __webpack_require__(425)), _moduleId2 = _interopRequireDefault(_moduleId), env = process.env.NODE_ENV;
+ var _warning = __webpack_require__(12), _warning2 = _interopRequireDefault(_warning), _StyleSheet = __webpack_require__(233), _moduleId = (_interopRequireDefault(_StyleSheet),
+ __webpack_require__(429)), _moduleId2 = _interopRequireDefault(_moduleId), env = process.env.NODE_ENV;
exports.default = function() {
var ruleCounter = 0, defaultPrefix = "production" === env ? "c" : "";
return function(rule, sheet) {
@@ -9852,7 +9893,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
});
- var _ButtonBase = __webpack_require__(463);
+ var _ButtonBase = __webpack_require__(466);
Object.defineProperty(exports, "default", {
enumerable: !0,
get: function() {
@@ -9945,6 +9986,14 @@ var _bundleJs = []byte((((((((((`!function(modules) {
for (var alias in aliases) codes[alias] = aliases[alias];
}, function(module, exports, __webpack_require__) {
"use strict";
+ function ownerDocument(node) {
+ return node && node.ownerDocument || document;
+ }
+ Object.defineProperty(exports, "__esModule", {
+ value: !0
+ }), exports.default = ownerDocument, module.exports = exports.default;
+}, function(module, exports, __webpack_require__) {
+ "use strict";
(function(process) {
function _interopRequireDefault(obj) {
return obj && obj.__esModule ? obj : {
@@ -9981,7 +10030,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
for (var key in source) Object.prototype.hasOwnProperty.call(source, key) && (target[key] = source[key]);
}
return target;
- }, _propTypes = __webpack_require__(1), _propTypes2 = _interopRequireDefault(_propTypes), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _ChildMapping = __webpack_require__(475), values = Object.values || function(obj) {
+ }, _propTypes = __webpack_require__(1), _propTypes2 = _interopRequireDefault(_propTypes), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _ChildMapping = __webpack_require__(478), values = Object.values || function(obj) {
return Object.keys(obj).map(function(k) {
return obj[k];
});
@@ -10081,27 +10130,15 @@ var _bundleJs = []byte((((((((((`!function(modules) {
default: obj
};
}
- Object.defineProperty(exports, "__esModule", {
- value: !0
- });
- var _Icon = __webpack_require__(478);
- Object.defineProperty(exports, "default", {
- enumerable: !0,
- get: function() {
- return _interopRequireDefault(_Icon).default;
- }
- });
-}, function(module, exports, __webpack_require__) {
- "use strict";
function cloneChildrenWithClassName(children, className) {
- return _react.Children.map(children, function(child) {
- return (0, _react.isValidElement)(child) && (0, _react.cloneElement)(child, {
- className: child.props.hasOwnProperty("className") ? child.props.className + " " + className : className
+ return _react2.default.Children.map(children, function(child) {
+ return _react2.default.isValidElement(child) && _react2.default.cloneElement(child, {
+ className: (0, _classnames2.default)(child.props.className, className)
});
});
}
function isMuiElement(element, muiNames) {
- return (0, _react.isValidElement)(element) && -1 !== muiNames.indexOf(element.type.muiName);
+ return _react2.default.isValidElement(element) && -1 !== muiNames.indexOf(element.type.muiName);
}
function isMuiComponent(element, muiNames) {
return -1 !== muiNames.indexOf(element.muiName);
@@ -10110,7 +10147,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
value: !0
}), exports.cloneChildrenWithClassName = cloneChildrenWithClassName, exports.isMuiElement = isMuiElement,
exports.isMuiComponent = isMuiComponent;
- var _react = __webpack_require__(0);
+ var _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _classnames = __webpack_require__(3), _classnames2 = _interopRequireDefault(_classnames);
}, function(module, exports, __webpack_require__) {
"use strict";
function _interopRequireDefault(obj) {
@@ -10121,7 +10158,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
});
- var _SvgIcon = __webpack_require__(479);
+ var _SvgIcon = __webpack_require__(482);
Object.defineProperty(exports, "default", {
enumerable: !0,
get: function() {
@@ -10130,8 +10167,25 @@ var _bundleJs = []byte((((((((((`!function(modules) {
});
}, function(module, exports, __webpack_require__) {
"use strict";
+ function _interopRequireDefault(obj) {
+ return obj && obj.__esModule ? obj : {
+ default: obj
+ };
+ }
+ Object.defineProperty(exports, "__esModule", {
+ value: !0
+ });
+ var _Icon = __webpack_require__(483);
+ Object.defineProperty(exports, "default", {
+ enumerable: !0,
+ get: function() {
+ return _interopRequireDefault(_Icon).default;
+ }
+ });
+}, function(module, exports, __webpack_require__) {
+ "use strict";
exports.__esModule = !0;
- var _setStatic = __webpack_require__(484), _setStatic2 = function(obj) {
+ var _setStatic = __webpack_require__(487), _setStatic2 = function(obj) {
return obj && obj.__esModule ? obj : {
default: obj
};
@@ -10149,7 +10203,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
});
- var _Grid = __webpack_require__(509);
+ var _Grid = __webpack_require__(513);
Object.defineProperty(exports, "default", {
enumerable: !0,
get: function() {
@@ -10160,7 +10214,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
(function(global) {
var freeGlobal = "object" == typeof global && global && global.Object === Object && global;
module.exports = freeGlobal;
- }).call(exports, __webpack_require__(51));
+ }).call(exports, __webpack_require__(60));
}, function(module, exports, __webpack_require__) {
function toNumber(value) {
if ("number" == typeof value) return value;
@@ -10174,11 +10228,11 @@ var _bundleJs = []byte((((((((((`!function(modules) {
var isBinary = reIsBinary.test(value);
return isBinary || reIsOctal.test(value) ? freeParseInt(value.slice(2), isBinary ? 2 : 8) : reIsBadHex.test(value) ? NAN : +value;
}
- var isObject = __webpack_require__(31), isSymbol = __webpack_require__(62), NAN = NaN, reTrim = /^\s+|\s+$/g, reIsBadHex = /^[-+]0x[0-9a-f]+$/i, reIsBinary = /^0b[01]+$/i, reIsOctal = /^0o[0-7]+$/i, freeParseInt = parseInt;
+ var isObject = __webpack_require__(31), isSymbol = __webpack_require__(61), NAN = NaN, reTrim = /^\s+|\s+$/g, reIsBadHex = /^[-+]0x[0-9a-f]+$/i, reIsBinary = /^0b[01]+$/i, reIsOctal = /^0o[0-7]+$/i, freeParseInt = parseInt;
module.exports = toNumber;
}, function(module, exports, __webpack_require__) {
- var dP = __webpack_require__(528), createDesc = __webpack_require__(533);
- module.exports = __webpack_require__(161) ? function(object, key, value) {
+ var dP = __webpack_require__(530), createDesc = __webpack_require__(535);
+ module.exports = __webpack_require__(160) ? function(object, key, value) {
return dP.f(object, key, createDesc(1, value));
} : function(object, key, value) {
return object[key] = value, object;
@@ -10193,13 +10247,13 @@ var _bundleJs = []byte((((((((((`!function(modules) {
for (var index = 0, length = path.length; null != object && index < length; ) object = object[toKey(path[index++])];
return index && index == length ? object : void 0;
}
- var castPath = __webpack_require__(247), toKey = __webpack_require__(116);
+ var castPath = __webpack_require__(248), toKey = __webpack_require__(115);
module.exports = baseGet;
}, function(module, exports, __webpack_require__) {
function castPath(value, object) {
return isArray(value) ? value : isKey(value, object) ? [ value ] : stringToPath(toString(value));
}
- var isArray = __webpack_require__(12), isKey = __webpack_require__(166), stringToPath = __webpack_require__(556), toString = __webpack_require__(580);
+ var isArray = __webpack_require__(11), isKey = __webpack_require__(165), stringToPath = __webpack_require__(558), toString = __webpack_require__(582);
module.exports = castPath;
}, function(module, exports) {
function toSource(func) {
@@ -10217,7 +10271,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
module.exports = toSource;
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
- var __WEBPACK_IMPORTED_MODULE_0_d3_path__ = __webpack_require__(79), __WEBPACK_IMPORTED_MODULE_1__constant__ = __webpack_require__(54), __WEBPACK_IMPORTED_MODULE_2__curve_linear__ = __webpack_require__(118), __WEBPACK_IMPORTED_MODULE_3__line__ = __webpack_require__(174), __WEBPACK_IMPORTED_MODULE_4__point__ = __webpack_require__(175);
+ var __WEBPACK_IMPORTED_MODULE_0_d3_path__ = __webpack_require__(81), __WEBPACK_IMPORTED_MODULE_1__constant__ = __webpack_require__(53), __WEBPACK_IMPORTED_MODULE_2__curve_linear__ = __webpack_require__(117), __WEBPACK_IMPORTED_MODULE_3__line__ = __webpack_require__(173), __WEBPACK_IMPORTED_MODULE_4__point__ = __webpack_require__(174);
__webpack_exports__.a = function() {
function area(data) {
var i, j, k, d, buffer, n = data.length, defined0 = !1, x0z = new Array(n), y0z = new Array(n);
@@ -10285,7 +10339,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
__webpack_require__.d(__webpack_exports__, "a", function() {
return curveRadialLinear;
}), __webpack_exports__.b = curveRadial;
- var __WEBPACK_IMPORTED_MODULE_0__linear__ = __webpack_require__(118), curveRadialLinear = curveRadial(__WEBPACK_IMPORTED_MODULE_0__linear__.a);
+ var __WEBPACK_IMPORTED_MODULE_0__linear__ = __webpack_require__(117), curveRadialLinear = curveRadial(__WEBPACK_IMPORTED_MODULE_0__linear__.a);
Radial.prototype = {
areaStart: function() {
this._curve.areaStart();
@@ -10312,8 +10366,8 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}, l;
}
__webpack_exports__.a = lineRadial;
- var __WEBPACK_IMPORTED_MODULE_0__curve_radial__ = __webpack_require__(250);
- __webpack_require__(174);
+ var __WEBPACK_IMPORTED_MODULE_0__curve_radial__ = __webpack_require__(251);
+ __webpack_require__(173);
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
__webpack_exports__.a = function(x, y) {
@@ -10327,7 +10381,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
var slice = Array.prototype.slice;
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
- var __WEBPACK_IMPORTED_MODULE_0__math__ = __webpack_require__(80);
+ var __WEBPACK_IMPORTED_MODULE_0__math__ = __webpack_require__(82);
__webpack_exports__.a = {
draw: function(context, size) {
var r = Math.sqrt(size / __WEBPACK_IMPORTED_MODULE_0__math__.j);
@@ -10357,7 +10411,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
};
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
- var __WEBPACK_IMPORTED_MODULE_0__math__ = __webpack_require__(80), kr = Math.sin(__WEBPACK_IMPORTED_MODULE_0__math__.j / 10) / Math.sin(7 * __WEBPACK_IMPORTED_MODULE_0__math__.j / 10), kx = Math.sin(__WEBPACK_IMPORTED_MODULE_0__math__.m / 10) * kr, ky = -Math.cos(__WEBPACK_IMPORTED_MODULE_0__math__.m / 10) * kr;
+ var __WEBPACK_IMPORTED_MODULE_0__math__ = __webpack_require__(82), kr = Math.sin(__WEBPACK_IMPORTED_MODULE_0__math__.j / 10) / Math.sin(7 * __WEBPACK_IMPORTED_MODULE_0__math__.j / 10), kx = Math.sin(__WEBPACK_IMPORTED_MODULE_0__math__.m / 10) * kr, ky = -Math.cos(__WEBPACK_IMPORTED_MODULE_0__math__.m / 10) * kr;
__webpack_exports__.a = {
draw: function(context, size) {
var r = Math.sqrt(.8908130915292852 * size), x = kx * r, y = ky * r;
@@ -10405,7 +10459,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
this._context = context, this._k = (1 - tension) / 6;
}
__webpack_exports__.a = CardinalClosed;
- var __WEBPACK_IMPORTED_MODULE_0__noop__ = __webpack_require__(119), __WEBPACK_IMPORTED_MODULE_1__cardinal__ = __webpack_require__(121);
+ var __WEBPACK_IMPORTED_MODULE_0__noop__ = __webpack_require__(118), __WEBPACK_IMPORTED_MODULE_1__cardinal__ = __webpack_require__(120);
CardinalClosed.prototype = {
areaStart: __WEBPACK_IMPORTED_MODULE_0__noop__.a,
areaEnd: __WEBPACK_IMPORTED_MODULE_0__noop__.a,
@@ -10462,7 +10516,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
this._context = context, this._k = (1 - tension) / 6;
}
__webpack_exports__.a = CardinalOpen;
- var __WEBPACK_IMPORTED_MODULE_0__cardinal__ = __webpack_require__(121);
+ var __WEBPACK_IMPORTED_MODULE_0__cardinal__ = __webpack_require__(120);
CardinalOpen.prototype = {
areaStart: function() {
this._line = 0;
@@ -10575,7 +10629,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
return protoProps && defineProperties(Constructor.prototype, protoProps), staticProps && defineProperties(Constructor, staticProps),
Constructor;
};
- }(), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _propTypes = __webpack_require__(1), _propTypes2 = _interopRequireDefault(_propTypes), _AnimateManager = __webpack_require__(640), _AnimateManager2 = _interopRequireDefault(_AnimateManager), _PureRender = __webpack_require__(643), _PureRender2 = _interopRequireDefault(_PureRender), _easing = __webpack_require__(275), _configUpdate = __webpack_require__(662), _configUpdate2 = _interopRequireDefault(_configUpdate), _util = __webpack_require__(123), Animate = (0,
+ }(), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _propTypes = __webpack_require__(1), _propTypes2 = _interopRequireDefault(_propTypes), _AnimateManager = __webpack_require__(642), _AnimateManager2 = _interopRequireDefault(_AnimateManager), _PureRender = __webpack_require__(645), _PureRender2 = _interopRequireDefault(_PureRender), _easing = __webpack_require__(277), _configUpdate = __webpack_require__(664), _configUpdate2 = _interopRequireDefault(_configUpdate), _util = __webpack_require__(122), Animate = (0,
_PureRender2.default)((_temp = _class2 = function(_Component) {
function Animate(props, context) {
_classCallCheck(this, Animate);
@@ -10753,7 +10807,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
var data = this.__data__ = new ListCache(entries);
this.size = data.size;
}
- var ListCache = __webpack_require__(112), stackClear = __webpack_require__(609), stackDelete = __webpack_require__(610), stackGet = __webpack_require__(611), stackHas = __webpack_require__(612), stackSet = __webpack_require__(613);
+ var ListCache = __webpack_require__(112), stackClear = __webpack_require__(611), stackDelete = __webpack_require__(612), stackGet = __webpack_require__(613), stackHas = __webpack_require__(614), stackSet = __webpack_require__(615);
Stack.prototype.clear = stackClear, Stack.prototype.delete = stackDelete, Stack.prototype.get = stackGet,
Stack.prototype.has = stackHas, Stack.prototype.set = stackSet, module.exports = Stack;
}, function(module, exports, __webpack_require__) {
@@ -10785,14 +10839,14 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}
return stack.delete(array), stack.delete(other), result;
}
- var SetCache = __webpack_require__(266), arraySome = __webpack_require__(616), cacheHas = __webpack_require__(267), COMPARE_PARTIAL_FLAG = 1, COMPARE_UNORDERED_FLAG = 2;
+ var SetCache = __webpack_require__(267), arraySome = __webpack_require__(618), cacheHas = __webpack_require__(268), COMPARE_PARTIAL_FLAG = 1, COMPARE_UNORDERED_FLAG = 2;
module.exports = equalArrays;
}, function(module, exports, __webpack_require__) {
function SetCache(values) {
var index = -1, length = null == values ? 0 : values.length;
for (this.__data__ = new MapCache(); ++index < length; ) this.add(values[index]);
}
- var MapCache = __webpack_require__(167), setCacheAdd = __webpack_require__(614), setCacheHas = __webpack_require__(615);
+ var MapCache = __webpack_require__(166), setCacheAdd = __webpack_require__(616), setCacheHas = __webpack_require__(617);
SetCache.prototype.add = SetCache.prototype.push = setCacheAdd, SetCache.prototype.has = setCacheHas,
module.exports = SetCache;
}, function(module, exports) {
@@ -10817,11 +10871,26 @@ var _bundleJs = []byte((((((((((`!function(modules) {
module.exports = arrayFilter;
}, function(module, exports, __webpack_require__) {
(function(module) {
- var root = __webpack_require__(32), stubFalse = __webpack_require__(629), freeExports = "object" == typeof exports && exports && !exports.nodeType && exports, freeModule = freeExports && "object" == typeof module && module && !module.nodeType && module, moduleExports = freeModule && freeModule.exports === freeExports, Buffer = moduleExports ? root.Buffer : void 0, nativeIsBuffer = Buffer ? Buffer.isBuffer : void 0, isBuffer = nativeIsBuffer || stubFalse;
+ var root = __webpack_require__(32), stubFalse = __webpack_require__(631), freeExports = "object" == typeof exports && exports && !exports.nodeType && exports, freeModule = freeExports && "object" == typeof module && module && !module.nodeType && module, moduleExports = freeModule && freeModule.exports === freeExports, Buffer = moduleExports ? root.Buffer : void 0, nativeIsBuffer = Buffer ? Buffer.isBuffer : void 0, isBuffer = nativeIsBuffer || stubFalse;
module.exports = isBuffer;
- }).call(exports, __webpack_require__(154)(module));
+ }).call(exports, __webpack_require__(272)(module));
+}, function(module, exports) {
+ module.exports = function(module) {
+ return module.webpackPolyfill || (module.deprecate = function() {}, module.paths = [],
+ module.children || (module.children = []), Object.defineProperty(module, "loaded", {
+ enumerable: !0,
+ get: function() {
+ return module.l;
+ }
+ }), Object.defineProperty(module, "id", {
+ enumerable: !0,
+ get: function() {
+ return module.i;
+ }
+ }), module.webpackPolyfill = 1), module;
+ };
}, function(module, exports, __webpack_require__) {
- var baseIsTypedArray = __webpack_require__(630), baseUnary = __webpack_require__(183), nodeUtil = __webpack_require__(631), nodeIsTypedArray = nodeUtil && nodeUtil.isTypedArray, isTypedArray = nodeIsTypedArray ? baseUnary(nodeIsTypedArray) : baseIsTypedArray;
+ var baseIsTypedArray = __webpack_require__(632), baseUnary = __webpack_require__(182), nodeUtil = __webpack_require__(633), nodeIsTypedArray = nodeUtil && nodeUtil.isTypedArray, isTypedArray = nodeIsTypedArray ? baseUnary(nodeIsTypedArray) : baseIsTypedArray;
module.exports = isTypedArray;
}, function(module, exports) {
function overArg(func, transform) {
@@ -10832,7 +10901,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
module.exports = overArg;
}, function(module, exports, __webpack_require__) {
(function(global) {
- for (var now = __webpack_require__(642), root = "undefined" == typeof window ? global : window, vendors = [ "moz", "webkit" ], suffix = "AnimationFrame", raf = root["request" + suffix], caf = root["cancel" + suffix] || root["cancelRequest" + suffix], i = 0; !raf && i < vendors.length; i++) raf = root[vendors[i] + "Request" + suffix],
+ for (var now = __webpack_require__(644), root = "undefined" == typeof window ? global : window, vendors = [ "moz", "webkit" ], suffix = "AnimationFrame", raf = root["request" + suffix], caf = root["cancel" + suffix] || root["cancelRequest" + suffix], i = 0; !raf && i < vendors.length; i++) raf = root[vendors[i] + "Request" + suffix],
caf = root[vendors[i] + "Cancel" + suffix] || root[vendors[i] + "CancelRequest" + suffix];
if (!raf || !caf) {
var last = 0, id = 0, queue = [];
@@ -10867,7 +10936,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}, module.exports.polyfill = function(object) {
object || (object = root), object.requestAnimationFrame = raf, object.cancelAnimationFrame = caf;
};
- }).call(exports, __webpack_require__(51));
+ }).call(exports, __webpack_require__(60));
}, function(module, exports, __webpack_require__) {
function isPlainObject(value) {
if (!isObjectLike(value) || baseGetTag(value) != objectTag) return !1;
@@ -10876,7 +10945,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
var Ctor = hasOwnProperty.call(proto, "constructor") && proto.constructor;
return "function" == typeof Ctor && Ctor instanceof Ctor && funcToString.call(Ctor) == objectCtorString;
}
- var baseGetTag = __webpack_require__(42), getPrototype = __webpack_require__(644), isObjectLike = __webpack_require__(36), objectTag = "[object Object]", funcProto = Function.prototype, objectProto = Object.prototype, funcToString = funcProto.toString, hasOwnProperty = objectProto.hasOwnProperty, objectCtorString = funcToString.call(Object);
+ var baseGetTag = __webpack_require__(41), getPrototype = __webpack_require__(646), isObjectLike = __webpack_require__(36), objectTag = "[object Object]", funcProto = Function.prototype, objectProto = Object.prototype, funcToString = funcProto.toString, hasOwnProperty = objectProto.hasOwnProperty, objectCtorString = funcToString.call(Object);
module.exports = isPlainObject;
}, function(module, exports, __webpack_require__) {
"use strict";
@@ -10890,7 +10959,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
}), exports.configEasing = exports.configSpring = exports.configBezier = void 0;
- var _util = __webpack_require__(123), cubicBezierFactor = function(c1, c2) {
+ var _util = __webpack_require__(122), cubicBezierFactor = function(c1, c2) {
return [ 0, 3 * c1, 3 * c2 - 6 * c1, 3 * c1 - 3 * c2 + 1 ];
}, multyTime = function(params, t) {
return params.map(function(param, i) {
@@ -10982,10 +11051,10 @@ var _bundleJs = []byte((((((((((`!function(modules) {
function baseRest(func, start) {
return setToString(overRest(func, start, identity), func + "");
}
- var identity = __webpack_require__(63), overRest = __webpack_require__(653), setToString = __webpack_require__(655);
+ var identity = __webpack_require__(62), overRest = __webpack_require__(655), setToString = __webpack_require__(657);
module.exports = baseRest;
}, function(module, exports, __webpack_require__) {
- var baseForOwn = __webpack_require__(665), createBaseEach = __webpack_require__(668), baseEach = createBaseEach(baseForOwn);
+ var baseForOwn = __webpack_require__(667), createBaseEach = __webpack_require__(670), baseEach = createBaseEach(baseForOwn);
module.exports = baseEach;
}, function(module, exports, __webpack_require__) {
function isStrictComparable(value) {
@@ -11016,8 +11085,35 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}
};
}).call(__webpack_exports__, __webpack_require__(2));
+}, function(module, exports) {
+ function balanced(a, b, str) {
+ a instanceof RegExp && (a = maybeMatch(a, str)), b instanceof RegExp && (b = maybeMatch(b, str));
+ var r = range(a, b, str);
+ return r && {
+ start: r[0],
+ end: r[1],
+ pre: str.slice(0, r[0]),
+ body: str.slice(r[0] + a.length, r[1]),
+ post: str.slice(r[1] + b.length)
+ };
+ }
+ function maybeMatch(reg, str) {
+ var m = str.match(reg);
+ return m ? m[0] : null;
+ }
+ function range(a, b, str) {
+ var begs, beg, left, right, result, ai = str.indexOf(a), bi = str.indexOf(b, ai + 1), i = ai;
+ if (ai >= 0 && bi > 0) {
+ for (begs = [], left = str.length; i >= 0 && !result; ) i == ai ? (begs.push(i),
+ ai = str.indexOf(a, i + 1)) : 1 == begs.length ? result = [ begs.pop(), bi ] : (beg = begs.pop(),
+ beg < left && (left = beg, right = bi), bi = str.indexOf(b, i + 1)), i = ai < bi && ai >= 0 ? ai : bi;
+ begs.length && (result = [ left, right ]);
+ }
+ return result;
+ }
+ module.exports = balanced, balanced.range = range;
}, function(module, exports, __webpack_require__) {
- var baseFlatten = __webpack_require__(695), baseOrderBy = __webpack_require__(697), baseRest = __webpack_require__(276), isIterateeCall = __webpack_require__(282), sortBy = baseRest(function(collection, iteratees) {
+ var baseFlatten = __webpack_require__(285), baseOrderBy = __webpack_require__(696), baseRest = __webpack_require__(278), isIterateeCall = __webpack_require__(287), sortBy = baseRest(function(collection, iteratees) {
if (null == collection) return [];
var length = iteratees.length;
return length > 1 && isIterateeCall(collection, iteratees[0], iteratees[1]) ? iteratees = [] : length > 2 && isIterateeCall(iteratees[0], iteratees[1], iteratees[2]) && (iteratees = [ iteratees[0] ]),
@@ -11025,12 +11121,32 @@ var _bundleJs = []byte((((((((((`!function(modules) {
});
module.exports = sortBy;
}, function(module, exports, __webpack_require__) {
+ function baseFlatten(array, depth, predicate, isStrict, result) {
+ var index = -1, length = array.length;
+ for (predicate || (predicate = isFlattenable), result || (result = []); ++index < length; ) {
+ var value = array[index];
+ depth > 0 && predicate(value) ? depth > 1 ? baseFlatten(value, depth - 1, predicate, isStrict, result) : arrayPush(result, value) : isStrict || (result[result.length] = value);
+ }
+ return result;
+ }
+ var arrayPush = __webpack_require__(269), isFlattenable = __webpack_require__(695);
+ module.exports = baseFlatten;
+}, function(module, exports, __webpack_require__) {
+ function baseMap(collection, iteratee) {
+ var index = -1, result = isArrayLike(collection) ? Array(collection.length) : [];
+ return baseEach(collection, function(value, key, collection) {
+ result[++index] = iteratee(value, key, collection);
+ }), result;
+ }
+ var baseEach = __webpack_require__(279), isArrayLike = __webpack_require__(85);
+ module.exports = baseMap;
+}, function(module, exports, __webpack_require__) {
function isIterateeCall(value, index, object) {
if (!isObject(object)) return !1;
var type = typeof index;
return !!("number" == type ? isArrayLike(object) && isIndex(index, object.length) : "string" == type && index in object) && eq(object[index], value);
}
- var eq = __webpack_require__(168), isArrayLike = __webpack_require__(83), isIndex = __webpack_require__(181), isObject = __webpack_require__(31);
+ var eq = __webpack_require__(167), isArrayLike = __webpack_require__(85), isIndex = __webpack_require__(180), isObject = __webpack_require__(31);
module.exports = isIterateeCall;
}, function(module, exports) {
function baseGt(value, other) {
@@ -11041,7 +11157,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
function min(array) {
return array && array.length ? baseExtremum(array, identity, baseLt) : void 0;
}
- var baseExtremum = __webpack_require__(124), baseLt = __webpack_require__(285), identity = __webpack_require__(63);
+ var baseExtremum = __webpack_require__(123), baseLt = __webpack_require__(290), identity = __webpack_require__(62);
module.exports = min;
}, function(module, exports) {
function baseLt(value, other) {
@@ -11129,7 +11245,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
__webpack_require__.d(__webpack_exports__, "scaleIdentity", function() {
return __WEBPACK_IMPORTED_MODULE_1__src_identity__.a;
});
- var __WEBPACK_IMPORTED_MODULE_2__src_linear__ = __webpack_require__(87);
+ var __WEBPACK_IMPORTED_MODULE_2__src_linear__ = __webpack_require__(88);
__webpack_require__.d(__webpack_exports__, "scaleLinear", function() {
return __WEBPACK_IMPORTED_MODULE_2__src_linear__.a;
});
@@ -11137,7 +11253,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
__webpack_require__.d(__webpack_exports__, "scaleLog", function() {
return __WEBPACK_IMPORTED_MODULE_3__src_log__.a;
});
- var __WEBPACK_IMPORTED_MODULE_4__src_ordinal__ = __webpack_require__(300);
+ var __WEBPACK_IMPORTED_MODULE_4__src_ordinal__ = __webpack_require__(305);
__webpack_require__.d(__webpack_exports__, "scaleOrdinal", function() {
return __WEBPACK_IMPORTED_MODULE_4__src_ordinal__.a;
}), __webpack_require__.d(__webpack_exports__, "scaleImplicit", function() {
@@ -11161,7 +11277,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
__webpack_require__.d(__webpack_exports__, "scaleThreshold", function() {
return __WEBPACK_IMPORTED_MODULE_8__src_threshold__.a;
});
- var __WEBPACK_IMPORTED_MODULE_9__src_time__ = __webpack_require__(316);
+ var __WEBPACK_IMPORTED_MODULE_9__src_time__ = __webpack_require__(321);
__webpack_require__.d(__webpack_exports__, "scaleTime", function() {
return __WEBPACK_IMPORTED_MODULE_9__src_time__.b;
});
@@ -11213,7 +11329,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
});
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
- var __WEBPACK_IMPORTED_MODULE_0__ascending__ = __webpack_require__(64), __WEBPACK_IMPORTED_MODULE_1__bisector__ = __webpack_require__(289), ascendingBisect = Object(__WEBPACK_IMPORTED_MODULE_1__bisector__.a)(__WEBPACK_IMPORTED_MODULE_0__ascending__.a), bisectRight = ascendingBisect.right;
+ var __WEBPACK_IMPORTED_MODULE_0__ascending__ = __webpack_require__(64), __WEBPACK_IMPORTED_MODULE_1__bisector__ = __webpack_require__(294), ascendingBisect = Object(__WEBPACK_IMPORTED_MODULE_1__bisector__.a)(__WEBPACK_IMPORTED_MODULE_0__ascending__.a), bisectRight = ascendingBisect.right;
ascendingBisect.left;
__webpack_exports__.a = bisectRight;
}, function(module, __webpack_exports__, __webpack_require__) {
@@ -11250,14 +11366,14 @@ var _bundleJs = []byte((((((((((`!function(modules) {
__webpack_exports__.a = pair;
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
- var __WEBPACK_IMPORTED_MODULE_0__variance__ = __webpack_require__(292);
+ var __WEBPACK_IMPORTED_MODULE_0__variance__ = __webpack_require__(297);
__webpack_exports__.a = function(array, f) {
var v = Object(__WEBPACK_IMPORTED_MODULE_0__variance__.a)(array, f);
return v ? Math.sqrt(v) : v;
};
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
- var __WEBPACK_IMPORTED_MODULE_0__number__ = __webpack_require__(86);
+ var __WEBPACK_IMPORTED_MODULE_0__number__ = __webpack_require__(87);
__webpack_exports__.a = function(values, valueof) {
var value, delta, n = values.length, m = 0, i = -1, mean = 0, sum = 0;
if (null == valueof) for (;++i < n; ) isNaN(value = Object(__WEBPACK_IMPORTED_MODULE_0__number__.a)(values[i])) || (delta = value - mean,
@@ -11333,7 +11449,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
function length(d) {
return d.length;
}
- var __WEBPACK_IMPORTED_MODULE_0__min__ = __webpack_require__(298);
+ var __WEBPACK_IMPORTED_MODULE_0__min__ = __webpack_require__(303);
__webpack_exports__.a = function(matrix) {
if (!(n = matrix.length)) return [];
for (var i = -1, m = Object(__WEBPACK_IMPORTED_MODULE_0__min__.a)(matrix, length), transpose = new Array(m); ++i < m; ) for (var n, j = -1, row = transpose[i] = new Array(n); ++j < n; ) row[j] = matrix[j][i];
@@ -11369,7 +11485,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
__webpack_require__.d(__webpack_exports__, "b", function() {
return implicit;
}), __webpack_exports__.a = ordinal;
- var __WEBPACK_IMPORTED_MODULE_0_d3_collection__ = __webpack_require__(723), __WEBPACK_IMPORTED_MODULE_1__array__ = __webpack_require__(56), implicit = {
+ var __WEBPACK_IMPORTED_MODULE_0_d3_collection__ = __webpack_require__(723), __WEBPACK_IMPORTED_MODULE_1__array__ = __webpack_require__(55), implicit = {
name: "implicit"
};
}, function(module, __webpack_exports__, __webpack_require__) {
@@ -11392,7 +11508,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
};
};
}
- var __WEBPACK_IMPORTED_MODULE_0_d3_color__ = __webpack_require__(44), __WEBPACK_IMPORTED_MODULE_1__basis__ = __webpack_require__(190), __WEBPACK_IMPORTED_MODULE_2__basisClosed__ = __webpack_require__(303), __WEBPACK_IMPORTED_MODULE_3__color__ = __webpack_require__(89);
+ var __WEBPACK_IMPORTED_MODULE_0_d3_color__ = __webpack_require__(43), __WEBPACK_IMPORTED_MODULE_1__basis__ = __webpack_require__(189), __WEBPACK_IMPORTED_MODULE_2__basisClosed__ = __webpack_require__(308), __WEBPACK_IMPORTED_MODULE_3__color__ = __webpack_require__(90);
__webpack_exports__.a = function rgbGamma(y) {
function rgb(start, end) {
var r = color((start = Object(__WEBPACK_IMPORTED_MODULE_0_d3_color__.f)(start)).r, (end = Object(__WEBPACK_IMPORTED_MODULE_0_d3_color__.f)(end)).r), g = color(start.g, end.g), b = color(start.b, end.b), opacity = Object(__WEBPACK_IMPORTED_MODULE_3__color__.a)(start.opacity, end.opacity);
@@ -11407,7 +11523,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
rgbSpline(__WEBPACK_IMPORTED_MODULE_1__basis__.b), rgbSpline(__WEBPACK_IMPORTED_MODULE_2__basisClosed__.a);
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
- var __WEBPACK_IMPORTED_MODULE_0__basis__ = __webpack_require__(190);
+ var __WEBPACK_IMPORTED_MODULE_0__basis__ = __webpack_require__(189);
__webpack_exports__.a = function(values) {
var n = values.length;
return function(t) {
@@ -11424,7 +11540,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
};
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
- var __WEBPACK_IMPORTED_MODULE_0__value__ = __webpack_require__(187);
+ var __WEBPACK_IMPORTED_MODULE_0__value__ = __webpack_require__(186);
__webpack_exports__.a = function(a, b) {
var i, nb = b ? b.length : 0, na = a ? Math.min(nb, a.length) : 0, x = new Array(na), c = new Array(nb);
for (i = 0; i < na; ++i) x[i] = Object(__WEBPACK_IMPORTED_MODULE_0__value__.a)(a[i], b[i]);
@@ -11444,7 +11560,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
};
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
- var __WEBPACK_IMPORTED_MODULE_0__value__ = __webpack_require__(187);
+ var __WEBPACK_IMPORTED_MODULE_0__value__ = __webpack_require__(186);
__webpack_exports__.a = function(a, b) {
var k, i = {}, c = {};
null !== a && "object" == typeof a || (a = {}), null !== b && "object" == typeof b || (b = {});
@@ -11466,7 +11582,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
return b(t) + "";
};
}
- var __WEBPACK_IMPORTED_MODULE_0__number__ = __webpack_require__(125), reA = /[-+]?(?:\d+\.?\d*|\.?\d+)(?:[eE][-+]?\d+)?/g, reB = new RegExp(reA.source, "g");
+ var __WEBPACK_IMPORTED_MODULE_0__number__ = __webpack_require__(124), reA = /[-+]?(?:\d+\.?\d*|\.?\d+)(?:[eE][-+]?\d+)?/g, reB = new RegExp(reA.source, "g");
__webpack_exports__.a = function(a, b) {
var am, bm, bs, bi = reA.lastIndex = reB.lastIndex = 0, i = -1, s = [], q = [];
for (a += "", b += ""; (am = reA.exec(a)) && (bm = reB.exec(b)); ) (bs = bm.index) > bi && (bs = b.slice(bi, bs),
@@ -11494,8 +11610,8 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}), __webpack_require__.d(__webpack_exports__, "b", function() {
return __WEBPACK_IMPORTED_MODULE_0__src_defaultLocale__.b;
});
- var __WEBPACK_IMPORTED_MODULE_2__src_formatSpecifier__ = (__webpack_require__(311),
- __webpack_require__(312));
+ var __WEBPACK_IMPORTED_MODULE_2__src_formatSpecifier__ = (__webpack_require__(316),
+ __webpack_require__(317));
__webpack_require__.d(__webpack_exports__, "c", function() {
return __WEBPACK_IMPORTED_MODULE_2__src_formatSpecifier__.a;
});
@@ -11513,7 +11629,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
});
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
- var __WEBPACK_IMPORTED_MODULE_0__exponent__ = __webpack_require__(127), __WEBPACK_IMPORTED_MODULE_1__formatGroup__ = __webpack_require__(744), __WEBPACK_IMPORTED_MODULE_2__formatNumerals__ = __webpack_require__(745), __WEBPACK_IMPORTED_MODULE_3__formatSpecifier__ = __webpack_require__(312), __WEBPACK_IMPORTED_MODULE_4__formatTypes__ = __webpack_require__(313), __WEBPACK_IMPORTED_MODULE_5__formatPrefixAuto__ = __webpack_require__(314), __WEBPACK_IMPORTED_MODULE_6__identity__ = __webpack_require__(748), prefixes = [ "y", "z", "a", "f", "p", "n", "µ", "m", "", "k", "M", "G", "T", "P", "E", "Z", "Y" ];
+ var __WEBPACK_IMPORTED_MODULE_0__exponent__ = __webpack_require__(126), __WEBPACK_IMPORTED_MODULE_1__formatGroup__ = __webpack_require__(744), __WEBPACK_IMPORTED_MODULE_2__formatNumerals__ = __webpack_require__(745), __WEBPACK_IMPORTED_MODULE_3__formatSpecifier__ = __webpack_require__(317), __WEBPACK_IMPORTED_MODULE_4__formatTypes__ = __webpack_require__(318), __WEBPACK_IMPORTED_MODULE_5__formatPrefixAuto__ = __webpack_require__(319), __WEBPACK_IMPORTED_MODULE_6__identity__ = __webpack_require__(748), prefixes = [ "y", "z", "a", "f", "p", "n", "µ", "m", "", "k", "M", "G", "T", "P", "E", "Z", "Y" ];
__webpack_exports__.a = function(locale) {
function newFormat(specifier) {
function format(value) {
@@ -11585,13 +11701,13 @@ var _bundleJs = []byte((((((((((`!function(modules) {
this.width = width, this.comma = comma, this.precision = precision, this.type = type;
}
__webpack_exports__.a = formatSpecifier;
- var __WEBPACK_IMPORTED_MODULE_0__formatTypes__ = __webpack_require__(313), re = /^(?:(.)?([<>=^]))?([+\-\( ])?([$#])?(0)?(\d+)?(,)?(\.\d+)?([a-z%])?$/i;
+ var __WEBPACK_IMPORTED_MODULE_0__formatTypes__ = __webpack_require__(318), re = /^(?:(.)?([<>=^]))?([+\-\( ])?([$#])?(0)?(\d+)?(,)?(\.\d+)?([a-z%])?$/i;
formatSpecifier.prototype = FormatSpecifier.prototype, FormatSpecifier.prototype.toString = function() {
return this.fill + this.align + this.sign + this.symbol + (this.zero ? "0" : "") + (null == this.width ? "" : Math.max(1, 0 | this.width)) + (this.comma ? "," : "") + (null == this.precision ? "" : "." + Math.max(0, 0 | this.precision)) + this.type;
};
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
- var __WEBPACK_IMPORTED_MODULE_0__formatDefault__ = __webpack_require__(746), __WEBPACK_IMPORTED_MODULE_1__formatPrefixAuto__ = __webpack_require__(314), __WEBPACK_IMPORTED_MODULE_2__formatRounded__ = __webpack_require__(747);
+ var __WEBPACK_IMPORTED_MODULE_0__formatDefault__ = __webpack_require__(746), __WEBPACK_IMPORTED_MODULE_1__formatPrefixAuto__ = __webpack_require__(319), __WEBPACK_IMPORTED_MODULE_2__formatRounded__ = __webpack_require__(747);
__webpack_exports__.a = {
"": __WEBPACK_IMPORTED_MODULE_0__formatDefault__.a,
"%": function(x, p) {
@@ -11635,7 +11751,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
__webpack_require__.d(__webpack_exports__, "b", function() {
return prefixExponent;
});
- var prefixExponent, __WEBPACK_IMPORTED_MODULE_0__formatDecimal__ = __webpack_require__(192);
+ var prefixExponent, __WEBPACK_IMPORTED_MODULE_0__formatDecimal__ = __webpack_require__(191);
__webpack_exports__.a = function(x, p) {
var d = Object(__WEBPACK_IMPORTED_MODULE_0__formatDecimal__.a)(x, p);
if (!d) return x + "";
@@ -11693,19 +11809,19 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}, scale;
}
__webpack_exports__.a = calendar;
- var __WEBPACK_IMPORTED_MODULE_0_d3_array__ = __webpack_require__(37), __WEBPACK_IMPORTED_MODULE_1_d3_interpolate__ = __webpack_require__(88), __WEBPACK_IMPORTED_MODULE_2_d3_time__ = __webpack_require__(193), __WEBPACK_IMPORTED_MODULE_3_d3_time_format__ = __webpack_require__(317), __WEBPACK_IMPORTED_MODULE_4__array__ = __webpack_require__(56), __WEBPACK_IMPORTED_MODULE_5__continuous__ = __webpack_require__(126), __WEBPACK_IMPORTED_MODULE_6__nice__ = __webpack_require__(315), durationSecond = 1e3, durationMinute = 60 * durationSecond, durationHour = 60 * durationMinute, durationDay = 24 * durationHour, durationWeek = 7 * durationDay, durationMonth = 30 * durationDay, durationYear = 365 * durationDay;
+ var __WEBPACK_IMPORTED_MODULE_0_d3_array__ = __webpack_require__(37), __WEBPACK_IMPORTED_MODULE_1_d3_interpolate__ = __webpack_require__(89), __WEBPACK_IMPORTED_MODULE_2_d3_time__ = __webpack_require__(192), __WEBPACK_IMPORTED_MODULE_3_d3_time_format__ = __webpack_require__(322), __WEBPACK_IMPORTED_MODULE_4__array__ = __webpack_require__(55), __WEBPACK_IMPORTED_MODULE_5__continuous__ = __webpack_require__(125), __WEBPACK_IMPORTED_MODULE_6__nice__ = __webpack_require__(320), durationSecond = 1e3, durationMinute = 60 * durationSecond, durationHour = 60 * durationMinute, durationDay = 24 * durationHour, durationWeek = 7 * durationDay, durationMonth = 30 * durationDay, durationYear = 365 * durationDay;
__webpack_exports__.b = function() {
return calendar(__WEBPACK_IMPORTED_MODULE_2_d3_time__.k, __WEBPACK_IMPORTED_MODULE_2_d3_time__.f, __WEBPACK_IMPORTED_MODULE_2_d3_time__.j, __WEBPACK_IMPORTED_MODULE_2_d3_time__.a, __WEBPACK_IMPORTED_MODULE_2_d3_time__.b, __WEBPACK_IMPORTED_MODULE_2_d3_time__.d, __WEBPACK_IMPORTED_MODULE_2_d3_time__.g, __WEBPACK_IMPORTED_MODULE_2_d3_time__.c, __WEBPACK_IMPORTED_MODULE_3_d3_time_format__.a).domain([ new Date(2e3, 0, 1), new Date(2e3, 0, 2) ]);
};
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
- var __WEBPACK_IMPORTED_MODULE_0__src_defaultLocale__ = __webpack_require__(194);
+ var __WEBPACK_IMPORTED_MODULE_0__src_defaultLocale__ = __webpack_require__(193);
__webpack_require__.d(__webpack_exports__, "a", function() {
return __WEBPACK_IMPORTED_MODULE_0__src_defaultLocale__.a;
}), __webpack_require__.d(__webpack_exports__, "b", function() {
return __WEBPACK_IMPORTED_MODULE_0__src_defaultLocale__.b;
});
- __webpack_require__(318), __webpack_require__(319), __webpack_require__(771);
+ __webpack_require__(323), __webpack_require__(324), __webpack_require__(771);
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
function localDate(d) {
@@ -12161,7 +12277,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
return Math.floor(+d / 1e3);
}
__webpack_exports__.a = formatLocale;
- var __WEBPACK_IMPORTED_MODULE_0_d3_time__ = __webpack_require__(193), pads = {
+ var __WEBPACK_IMPORTED_MODULE_0_d3_time__ = __webpack_require__(192), pads = {
"-": "",
_: " ",
"0": "0"
@@ -12174,7 +12290,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
__webpack_require__.d(__webpack_exports__, "a", function() {
return isoSpecifier;
});
- var __WEBPACK_IMPORTED_MODULE_0__defaultLocale__ = __webpack_require__(194), isoSpecifier = "%Y-%m-%dT%H:%M:%S.%LZ";
+ var __WEBPACK_IMPORTED_MODULE_0__defaultLocale__ = __webpack_require__(193), isoSpecifier = "%Y-%m-%dT%H:%M:%S.%LZ";
Date.prototype.toISOString || Object(__WEBPACK_IMPORTED_MODULE_0__defaultLocale__.b)(isoSpecifier);
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
@@ -12196,7 +12312,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}
}), superClass && (Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass);
}
- var _class, _class2, _temp, __WEBPACK_IMPORTED_MODULE_0_lodash_isFunction__ = __webpack_require__(8), __WEBPACK_IMPORTED_MODULE_0_lodash_isFunction___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_lodash_isFunction__), __WEBPACK_IMPORTED_MODULE_1_react__ = __webpack_require__(0), __WEBPACK_IMPORTED_MODULE_1_react___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_1_react__), __WEBPACK_IMPORTED_MODULE_2_prop_types__ = __webpack_require__(1), __WEBPACK_IMPORTED_MODULE_2_prop_types___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_2_prop_types__), __WEBPACK_IMPORTED_MODULE_3_classnames__ = __webpack_require__(3), __WEBPACK_IMPORTED_MODULE_3_classnames___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_3_classnames__), __WEBPACK_IMPORTED_MODULE_4__util_PureRender__ = __webpack_require__(5), __WEBPACK_IMPORTED_MODULE_5__container_Layer__ = __webpack_require__(14), __WEBPACK_IMPORTED_MODULE_6__shape_Dot__ = __webpack_require__(57), __WEBPACK_IMPORTED_MODULE_7__util_ReactUtils__ = __webpack_require__(4), __WEBPACK_IMPORTED_MODULE_8__component_Label__ = __webpack_require__(43), __WEBPACK_IMPORTED_MODULE_9__util_DataUtils__ = __webpack_require__(9), __WEBPACK_IMPORTED_MODULE_10__util_ChartUtils__ = __webpack_require__(16), _extends = Object.assign || function(target) {
+ var _class, _class2, _temp, __WEBPACK_IMPORTED_MODULE_0_lodash_isFunction__ = __webpack_require__(8), __WEBPACK_IMPORTED_MODULE_0_lodash_isFunction___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_lodash_isFunction__), __WEBPACK_IMPORTED_MODULE_1_react__ = __webpack_require__(0), __WEBPACK_IMPORTED_MODULE_1_react___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_1_react__), __WEBPACK_IMPORTED_MODULE_2_prop_types__ = __webpack_require__(1), __WEBPACK_IMPORTED_MODULE_2_prop_types___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_2_prop_types__), __WEBPACK_IMPORTED_MODULE_3_classnames__ = __webpack_require__(3), __WEBPACK_IMPORTED_MODULE_3_classnames___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_3_classnames__), __WEBPACK_IMPORTED_MODULE_4__util_PureRender__ = __webpack_require__(5), __WEBPACK_IMPORTED_MODULE_5__container_Layer__ = __webpack_require__(14), __WEBPACK_IMPORTED_MODULE_6__shape_Dot__ = __webpack_require__(56), __WEBPACK_IMPORTED_MODULE_7__util_ReactUtils__ = __webpack_require__(4), __WEBPACK_IMPORTED_MODULE_8__component_Label__ = __webpack_require__(42), __WEBPACK_IMPORTED_MODULE_9__util_DataUtils__ = __webpack_require__(9), __WEBPACK_IMPORTED_MODULE_10__util_ChartUtils__ = __webpack_require__(16), _extends = Object.assign || function(target) {
for (var i = 1; i < arguments.length; i++) {
var source = arguments[i];
for (var key in source) Object.prototype.hasOwnProperty.call(source, key) && (target[key] = source[key]);
@@ -12239,7 +12355,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}, {
key: "render",
value: function() {
- var _props2 = this.props, x = _props2.x, y = _props2.y, r = _props2.r, isX = Object(__WEBPACK_IMPORTED_MODULE_9__util_DataUtils__.f)(x), isY = Object(__WEBPACK_IMPORTED_MODULE_9__util_DataUtils__.f)(y);
+ var _props2 = this.props, x = _props2.x, y = _props2.y, r = _props2.r, isX = Object(__WEBPACK_IMPORTED_MODULE_9__util_DataUtils__.g)(x), isY = Object(__WEBPACK_IMPORTED_MODULE_9__util_DataUtils__.g)(y);
if (!isX || !isY) return null;
var coordinate = this.getCoordinate();
if (!coordinate) return null;
@@ -12303,7 +12419,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}
}), superClass && (Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass);
}
- var _class, _class2, _temp, __WEBPACK_IMPORTED_MODULE_0_lodash_isFunction__ = __webpack_require__(8), __WEBPACK_IMPORTED_MODULE_0_lodash_isFunction___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_lodash_isFunction__), __WEBPACK_IMPORTED_MODULE_1_react__ = __webpack_require__(0), __WEBPACK_IMPORTED_MODULE_1_react___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_1_react__), __WEBPACK_IMPORTED_MODULE_2_prop_types__ = __webpack_require__(1), __WEBPACK_IMPORTED_MODULE_2_prop_types___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_2_prop_types__), __WEBPACK_IMPORTED_MODULE_3_classnames__ = __webpack_require__(3), __WEBPACK_IMPORTED_MODULE_3_classnames___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_3_classnames__), __WEBPACK_IMPORTED_MODULE_4__util_PureRender__ = __webpack_require__(5), __WEBPACK_IMPORTED_MODULE_5__container_Layer__ = __webpack_require__(14), __WEBPACK_IMPORTED_MODULE_6__util_ReactUtils__ = __webpack_require__(4), __WEBPACK_IMPORTED_MODULE_7__component_Label__ = __webpack_require__(43), __WEBPACK_IMPORTED_MODULE_8__util_DataUtils__ = __webpack_require__(9), __WEBPACK_IMPORTED_MODULE_9__util_ChartUtils__ = __webpack_require__(16), _slicedToArray = function() {
+ var _class, _class2, _temp, __WEBPACK_IMPORTED_MODULE_0_lodash_isFunction__ = __webpack_require__(8), __WEBPACK_IMPORTED_MODULE_0_lodash_isFunction___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_lodash_isFunction__), __WEBPACK_IMPORTED_MODULE_1_react__ = __webpack_require__(0), __WEBPACK_IMPORTED_MODULE_1_react___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_1_react__), __WEBPACK_IMPORTED_MODULE_2_prop_types__ = __webpack_require__(1), __WEBPACK_IMPORTED_MODULE_2_prop_types___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_2_prop_types__), __WEBPACK_IMPORTED_MODULE_3_classnames__ = __webpack_require__(3), __WEBPACK_IMPORTED_MODULE_3_classnames___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_3_classnames__), __WEBPACK_IMPORTED_MODULE_4__util_PureRender__ = __webpack_require__(5), __WEBPACK_IMPORTED_MODULE_5__container_Layer__ = __webpack_require__(14), __WEBPACK_IMPORTED_MODULE_6__util_ReactUtils__ = __webpack_require__(4), __WEBPACK_IMPORTED_MODULE_7__component_Label__ = __webpack_require__(42), __WEBPACK_IMPORTED_MODULE_8__util_DataUtils__ = __webpack_require__(9), __WEBPACK_IMPORTED_MODULE_9__util_ChartUtils__ = __webpack_require__(16), _slicedToArray = function() {
function sliceIterator(arr, i) {
var _arr = [], _n = !0, _d = !1, _e = void 0;
try {
@@ -12391,7 +12507,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}, {
key: "render",
value: function() {
- var _props2 = this.props, x = _props2.x, y = _props2.y, shape = _props2.shape, className = _props2.className, isX = Object(__WEBPACK_IMPORTED_MODULE_8__util_DataUtils__.f)(x), isY = Object(__WEBPACK_IMPORTED_MODULE_8__util_DataUtils__.f)(y);
+ var _props2 = this.props, x = _props2.x, y = _props2.y, shape = _props2.shape, className = _props2.className, isX = Object(__WEBPACK_IMPORTED_MODULE_8__util_DataUtils__.g)(x), isY = Object(__WEBPACK_IMPORTED_MODULE_8__util_DataUtils__.g)(y);
if (!isX && !isY) return null;
var endPoints = this.getEndPoints(isX, isY);
if (!endPoints) return null;
@@ -12460,7 +12576,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}
}), superClass && (Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass);
}
- var _class, _class2, _temp, __WEBPACK_IMPORTED_MODULE_0_lodash_isFunction__ = __webpack_require__(8), __WEBPACK_IMPORTED_MODULE_0_lodash_isFunction___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_lodash_isFunction__), __WEBPACK_IMPORTED_MODULE_1_react__ = __webpack_require__(0), __WEBPACK_IMPORTED_MODULE_1_react___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_1_react__), __WEBPACK_IMPORTED_MODULE_2_prop_types__ = __webpack_require__(1), __WEBPACK_IMPORTED_MODULE_2_prop_types___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_2_prop_types__), __WEBPACK_IMPORTED_MODULE_3_classnames__ = __webpack_require__(3), __WEBPACK_IMPORTED_MODULE_3_classnames___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_3_classnames__), __WEBPACK_IMPORTED_MODULE_4__util_PureRender__ = __webpack_require__(5), __WEBPACK_IMPORTED_MODULE_5__container_Layer__ = __webpack_require__(14), __WEBPACK_IMPORTED_MODULE_6__component_Label__ = __webpack_require__(43), __WEBPACK_IMPORTED_MODULE_7__util_ReactUtils__ = __webpack_require__(4), __WEBPACK_IMPORTED_MODULE_8__util_DataUtils__ = __webpack_require__(9), __WEBPACK_IMPORTED_MODULE_9__util_ChartUtils__ = __webpack_require__(16), __WEBPACK_IMPORTED_MODULE_10__shape_Rectangle__ = __webpack_require__(65), _extends = Object.assign || function(target) {
+ var _class, _class2, _temp, __WEBPACK_IMPORTED_MODULE_0_lodash_isFunction__ = __webpack_require__(8), __WEBPACK_IMPORTED_MODULE_0_lodash_isFunction___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_lodash_isFunction__), __WEBPACK_IMPORTED_MODULE_1_react__ = __webpack_require__(0), __WEBPACK_IMPORTED_MODULE_1_react___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_1_react__), __WEBPACK_IMPORTED_MODULE_2_prop_types__ = __webpack_require__(1), __WEBPACK_IMPORTED_MODULE_2_prop_types___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_2_prop_types__), __WEBPACK_IMPORTED_MODULE_3_classnames__ = __webpack_require__(3), __WEBPACK_IMPORTED_MODULE_3_classnames___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_3_classnames__), __WEBPACK_IMPORTED_MODULE_4__util_PureRender__ = __webpack_require__(5), __WEBPACK_IMPORTED_MODULE_5__container_Layer__ = __webpack_require__(14), __WEBPACK_IMPORTED_MODULE_6__component_Label__ = __webpack_require__(42), __WEBPACK_IMPORTED_MODULE_7__util_ReactUtils__ = __webpack_require__(4), __WEBPACK_IMPORTED_MODULE_8__util_DataUtils__ = __webpack_require__(9), __WEBPACK_IMPORTED_MODULE_9__util_ChartUtils__ = __webpack_require__(16), __WEBPACK_IMPORTED_MODULE_10__shape_Rectangle__ = __webpack_require__(65), _extends = Object.assign || function(target) {
for (var i = 1; i < arguments.length; i++) {
var source = arguments[i];
for (var key in source) Object.prototype.hasOwnProperty.call(source, key) && (target[key] = source[key]);
@@ -12505,7 +12621,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}, {
key: "render",
value: function() {
- var _props2 = this.props, x1 = _props2.x1, x2 = _props2.x2, y1 = _props2.y1, y2 = _props2.y2, className = _props2.className, hasX1 = Object(__WEBPACK_IMPORTED_MODULE_8__util_DataUtils__.f)(x1), hasX2 = Object(__WEBPACK_IMPORTED_MODULE_8__util_DataUtils__.f)(x2), hasY1 = Object(__WEBPACK_IMPORTED_MODULE_8__util_DataUtils__.f)(y1), hasY2 = Object(__WEBPACK_IMPORTED_MODULE_8__util_DataUtils__.f)(y2);
+ var _props2 = this.props, x1 = _props2.x1, x2 = _props2.x2, y1 = _props2.y1, y2 = _props2.y2, className = _props2.className, hasX1 = Object(__WEBPACK_IMPORTED_MODULE_8__util_DataUtils__.g)(x1), hasX2 = Object(__WEBPACK_IMPORTED_MODULE_8__util_DataUtils__.g)(x2), hasY1 = Object(__WEBPACK_IMPORTED_MODULE_8__util_DataUtils__.g)(y1), hasY2 = Object(__WEBPACK_IMPORTED_MODULE_8__util_DataUtils__.g)(y2);
if (!(hasX1 || hasX2 || hasY1 || hasY2)) return null;
var rect = this.getRect(hasX1, hasX2, hasY1, hasY2);
if (!rect) return null;
@@ -12598,7 +12714,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
key: "render",
value: function() {
var _props = this.props, x = _props.x, y = _props.y, width = _props.width, height = _props.height, top = _props.top, left = _props.left, className = _props.className;
- return Object(__WEBPACK_IMPORTED_MODULE_4__util_DataUtils__.g)(x) && Object(__WEBPACK_IMPORTED_MODULE_4__util_DataUtils__.g)(y) && Object(__WEBPACK_IMPORTED_MODULE_4__util_DataUtils__.g)(width) && Object(__WEBPACK_IMPORTED_MODULE_4__util_DataUtils__.g)(height) && Object(__WEBPACK_IMPORTED_MODULE_4__util_DataUtils__.g)(top) && Object(__WEBPACK_IMPORTED_MODULE_4__util_DataUtils__.g)(left) ? __WEBPACK_IMPORTED_MODULE_0_react___default.a.createElement("path", _extends({}, Object(__WEBPACK_IMPORTED_MODULE_5__util_ReactUtils__.k)(this.props), {
+ return Object(__WEBPACK_IMPORTED_MODULE_4__util_DataUtils__.h)(x) && Object(__WEBPACK_IMPORTED_MODULE_4__util_DataUtils__.h)(y) && Object(__WEBPACK_IMPORTED_MODULE_4__util_DataUtils__.h)(width) && Object(__WEBPACK_IMPORTED_MODULE_4__util_DataUtils__.h)(height) && Object(__WEBPACK_IMPORTED_MODULE_4__util_DataUtils__.h)(top) && Object(__WEBPACK_IMPORTED_MODULE_4__util_DataUtils__.h)(left) ? __WEBPACK_IMPORTED_MODULE_0_react___default.a.createElement("path", _extends({}, Object(__WEBPACK_IMPORTED_MODULE_5__util_ReactUtils__.k)(this.props), {
className: __WEBPACK_IMPORTED_MODULE_2_classnames___default()("recharts-cross", className),
d: this.getPath(x, y, width, height, top, left)
})) : null;
@@ -12626,7 +12742,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
function maxBy(array, iteratee) {
return array && array.length ? baseExtremum(array, baseIteratee(iteratee, 2), baseGt) : void 0;
}
- var baseExtremum = __webpack_require__(124), baseGt = __webpack_require__(283), baseIteratee = __webpack_require__(84);
+ var baseExtremum = __webpack_require__(123), baseGt = __webpack_require__(288), baseIteratee = __webpack_require__(63);
module.exports = maxBy;
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
@@ -12648,7 +12764,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}
}), superClass && (Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass);
}
- var _class, _class2, _temp2, __WEBPACK_IMPORTED_MODULE_0_lodash_isEqual__ = __webpack_require__(34), __WEBPACK_IMPORTED_MODULE_0_lodash_isEqual___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_lodash_isEqual__), __WEBPACK_IMPORTED_MODULE_1_lodash_isPlainObject__ = __webpack_require__(274), __WEBPACK_IMPORTED_MODULE_1_lodash_isPlainObject___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_1_lodash_isPlainObject__), __WEBPACK_IMPORTED_MODULE_2_lodash_isFunction__ = __webpack_require__(8), __WEBPACK_IMPORTED_MODULE_2_lodash_isFunction___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_2_lodash_isFunction__), __WEBPACK_IMPORTED_MODULE_3_lodash_isNil__ = __webpack_require__(20), __WEBPACK_IMPORTED_MODULE_3_lodash_isNil___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_3_lodash_isNil__), __WEBPACK_IMPORTED_MODULE_4_react__ = __webpack_require__(0), __WEBPACK_IMPORTED_MODULE_4_react___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_4_react__), __WEBPACK_IMPORTED_MODULE_5_prop_types__ = __webpack_require__(1), __WEBPACK_IMPORTED_MODULE_5_prop_types___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_5_prop_types__), __WEBPACK_IMPORTED_MODULE_6_react_smooth__ = __webpack_require__(33), __WEBPACK_IMPORTED_MODULE_6_react_smooth___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_6_react_smooth__), __WEBPACK_IMPORTED_MODULE_7_classnames__ = __webpack_require__(3), __WEBPACK_IMPORTED_MODULE_7_classnames___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_7_classnames__), __WEBPACK_IMPORTED_MODULE_8__util_PureRender__ = __webpack_require__(5), __WEBPACK_IMPORTED_MODULE_9__container_Layer__ = __webpack_require__(14), __WEBPACK_IMPORTED_MODULE_10__shape_Sector__ = __webpack_require__(128), __WEBPACK_IMPORTED_MODULE_11__shape_Curve__ = __webpack_require__(66), __WEBPACK_IMPORTED_MODULE_12__component_Text__ = __webpack_require__(55), __WEBPACK_IMPORTED_MODULE_13__component_Label__ = __webpack_require__(43), __WEBPACK_IMPORTED_MODULE_14__component_LabelList__ = __webpack_require__(45), __WEBPACK_IMPORTED_MODULE_15__component_Cell__ = __webpack_require__(85), __WEBPACK_IMPORTED_MODULE_16__util_ReactUtils__ = __webpack_require__(4), __WEBPACK_IMPORTED_MODULE_17__util_PolarUtils__ = __webpack_require__(23), __WEBPACK_IMPORTED_MODULE_18__util_DataUtils__ = __webpack_require__(9), __WEBPACK_IMPORTED_MODULE_19__util_ChartUtils__ = __webpack_require__(16), __WEBPACK_IMPORTED_MODULE_20__util_LogUtils__ = __webpack_require__(280), _extends = Object.assign || function(target) {
+ var _class, _class2, _temp2, __WEBPACK_IMPORTED_MODULE_0_lodash_isEqual__ = __webpack_require__(34), __WEBPACK_IMPORTED_MODULE_0_lodash_isEqual___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_lodash_isEqual__), __WEBPACK_IMPORTED_MODULE_1_lodash_isPlainObject__ = __webpack_require__(276), __WEBPACK_IMPORTED_MODULE_1_lodash_isPlainObject___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_1_lodash_isPlainObject__), __WEBPACK_IMPORTED_MODULE_2_lodash_isFunction__ = __webpack_require__(8), __WEBPACK_IMPORTED_MODULE_2_lodash_isFunction___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_2_lodash_isFunction__), __WEBPACK_IMPORTED_MODULE_3_lodash_isNil__ = __webpack_require__(20), __WEBPACK_IMPORTED_MODULE_3_lodash_isNil___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_3_lodash_isNil__), __WEBPACK_IMPORTED_MODULE_4_react__ = __webpack_require__(0), __WEBPACK_IMPORTED_MODULE_4_react___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_4_react__), __WEBPACK_IMPORTED_MODULE_5_prop_types__ = __webpack_require__(1), __WEBPACK_IMPORTED_MODULE_5_prop_types___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_5_prop_types__), __WEBPACK_IMPORTED_MODULE_6_react_smooth__ = __webpack_require__(33), __WEBPACK_IMPORTED_MODULE_6_react_smooth___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_6_react_smooth__), __WEBPACK_IMPORTED_MODULE_7_classnames__ = __webpack_require__(3), __WEBPACK_IMPORTED_MODULE_7_classnames___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_7_classnames__), __WEBPACK_IMPORTED_MODULE_8__util_PureRender__ = __webpack_require__(5), __WEBPACK_IMPORTED_MODULE_9__container_Layer__ = __webpack_require__(14), __WEBPACK_IMPORTED_MODULE_10__shape_Sector__ = __webpack_require__(127), __WEBPACK_IMPORTED_MODULE_11__shape_Curve__ = __webpack_require__(66), __WEBPACK_IMPORTED_MODULE_12__component_Text__ = __webpack_require__(54), __WEBPACK_IMPORTED_MODULE_13__component_Label__ = __webpack_require__(42), __WEBPACK_IMPORTED_MODULE_14__component_LabelList__ = __webpack_require__(44), __WEBPACK_IMPORTED_MODULE_15__component_Cell__ = __webpack_require__(86), __WEBPACK_IMPORTED_MODULE_16__util_ReactUtils__ = __webpack_require__(4), __WEBPACK_IMPORTED_MODULE_17__util_PolarUtils__ = __webpack_require__(23), __WEBPACK_IMPORTED_MODULE_18__util_DataUtils__ = __webpack_require__(9), __WEBPACK_IMPORTED_MODULE_19__util_ChartUtils__ = __webpack_require__(16), __WEBPACK_IMPORTED_MODULE_20__util_LogUtils__ = __webpack_require__(282), _extends = Object.assign || function(target) {
for (var i = 1; i < arguments.length; i++) {
var source = arguments[i];
for (var key in source) Object.prototype.hasOwnProperty.call(source, key) && (target[key] = source[key]);
@@ -12675,7 +12791,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
_this.state = {
isAnimationFinished: !1,
isAnimationStarted: !1
- }, _this.id = Object(__WEBPACK_IMPORTED_MODULE_18__util_DataUtils__.j)("recharts-pie-"),
+ }, _this.id = Object(__WEBPACK_IMPORTED_MODULE_18__util_DataUtils__.k)("recharts-pie-"),
_this.cachePrevData = function(sectors) {
_this.setState({
prevSectors: sectors
@@ -12792,13 +12908,13 @@ var _bundleJs = []byte((((((((((`!function(modules) {
return sectors.forEach(function(entry, index) {
var prev = prevSectors && prevSectors[index], paddingAngle = index > 0 ? entry.paddingAngle : 0;
if (prev) {
- var angleIp = Object(__WEBPACK_IMPORTED_MODULE_18__util_DataUtils__.e)(prev.endAngle - prev.startAngle, entry.endAngle - entry.startAngle), latest = _extends({}, entry, {
+ var angleIp = Object(__WEBPACK_IMPORTED_MODULE_18__util_DataUtils__.f)(prev.endAngle - prev.startAngle, entry.endAngle - entry.startAngle), latest = _extends({}, entry, {
startAngle: curAngle + paddingAngle,
endAngle: curAngle + angleIp(t) + paddingAngle
});
stepData.push(latest), curAngle = latest.endAngle;
} else {
- var endAngle = entry.endAngle, startAngle = entry.startAngle, interpolatorAngle = Object(__WEBPACK_IMPORTED_MODULE_18__util_DataUtils__.e)(0, endAngle - startAngle), deltaAngle = interpolatorAngle(t), _latest = _extends({}, entry, {
+ var endAngle = entry.endAngle, startAngle = entry.startAngle, interpolatorAngle = Object(__WEBPACK_IMPORTED_MODULE_18__util_DataUtils__.f)(0, endAngle - startAngle), deltaAngle = interpolatorAngle(t), _latest = _extends({}, entry, {
startAngle: curAngle + paddingAngle,
endAngle: curAngle + deltaAngle + paddingAngle
});
@@ -12817,7 +12933,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
key: "render",
value: function() {
var _props5 = this.props, hide = _props5.hide, sectors = _props5.sectors, className = _props5.className, label = _props5.label, cx = _props5.cx, cy = _props5.cy, innerRadius = _props5.innerRadius, outerRadius = _props5.outerRadius, isAnimationActive = _props5.isAnimationActive, id = _props5.id;
- if (hide || !sectors || !sectors.length || !Object(__WEBPACK_IMPORTED_MODULE_18__util_DataUtils__.g)(cx) || !Object(__WEBPACK_IMPORTED_MODULE_18__util_DataUtils__.g)(cy) || !Object(__WEBPACK_IMPORTED_MODULE_18__util_DataUtils__.g)(innerRadius) || !Object(__WEBPACK_IMPORTED_MODULE_18__util_DataUtils__.g)(outerRadius)) return null;
+ if (hide || !sectors || !sectors.length || !Object(__WEBPACK_IMPORTED_MODULE_18__util_DataUtils__.h)(cx) || !Object(__WEBPACK_IMPORTED_MODULE_18__util_DataUtils__.h)(cy) || !Object(__WEBPACK_IMPORTED_MODULE_18__util_DataUtils__.h)(innerRadius) || !Object(__WEBPACK_IMPORTED_MODULE_18__util_DataUtils__.h)(outerRadius)) return null;
var isAnimationFinished = this.state.isAnimationFinished, layerClass = __WEBPACK_IMPORTED_MODULE_7_classnames___default()("recharts-pie", className);
return __WEBPACK_IMPORTED_MODULE_4_react___default.a.createElement(__WEBPACK_IMPORTED_MODULE_9__container_Layer__.a, {
className: layerClass
@@ -12878,7 +12994,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
nameKey: "name"
}, _class2.parseDeltaAngle = function(_ref3) {
var startAngle = _ref3.startAngle, endAngle = _ref3.endAngle;
- return Object(__WEBPACK_IMPORTED_MODULE_18__util_DataUtils__.i)(endAngle - startAngle) * Math.min(Math.abs(endAngle - startAngle), 360);
+ return Object(__WEBPACK_IMPORTED_MODULE_18__util_DataUtils__.j)(endAngle - startAngle) * Math.min(Math.abs(endAngle - startAngle), 360);
}, _class2.getRealPieData = function(item) {
var _item$props = item.props, data = _item$props.data, children = _item$props.children, presentationProps = Object(__WEBPACK_IMPORTED_MODULE_16__util_ReactUtils__.k)(item.props), cells = Object(__WEBPACK_IMPORTED_MODULE_16__util_ReactUtils__.h)(children, __WEBPACK_IMPORTED_MODULE_15__component_Cell__.a);
return data && data.length ? data.map(function(entry, index) {
@@ -12891,10 +13007,10 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}, _class2.parseCoordinateOfPie = function(item, offset) {
var top = offset.top, left = offset.left, width = offset.width, height = offset.height, maxPieRadius = Object(__WEBPACK_IMPORTED_MODULE_17__util_PolarUtils__.c)(width, height);
return {
- cx: left + Object(__WEBPACK_IMPORTED_MODULE_18__util_DataUtils__.c)(item.props.cx, width, width / 2),
- cy: top + Object(__WEBPACK_IMPORTED_MODULE_18__util_DataUtils__.c)(item.props.cy, height, height / 2),
- innerRadius: Object(__WEBPACK_IMPORTED_MODULE_18__util_DataUtils__.c)(item.props.innerRadius, maxPieRadius, 0),
- outerRadius: Object(__WEBPACK_IMPORTED_MODULE_18__util_DataUtils__.c)(item.props.outerRadius, maxPieRadius, .8 * maxPieRadius),
+ cx: left + Object(__WEBPACK_IMPORTED_MODULE_18__util_DataUtils__.d)(item.props.cx, width, width / 2),
+ cy: top + Object(__WEBPACK_IMPORTED_MODULE_18__util_DataUtils__.d)(item.props.cy, height, height / 2),
+ innerRadius: Object(__WEBPACK_IMPORTED_MODULE_18__util_DataUtils__.d)(item.props.innerRadius, maxPieRadius, 0),
+ outerRadius: Object(__WEBPACK_IMPORTED_MODULE_18__util_DataUtils__.d)(item.props.outerRadius, maxPieRadius, .8 * maxPieRadius),
maxRadius: item.props.maxRadius || Math.sqrt(width * width + height * height) / 2
};
}, _class2.getComposedData = function(_ref4) {
@@ -12909,14 +13025,14 @@ var _bundleJs = []byte((((((((((`!function(modules) {
realDataKey = valueKey);
var sum = pieData.reduce(function(result, entry) {
var val = Object(__WEBPACK_IMPORTED_MODULE_19__util_ChartUtils__.w)(entry, realDataKey, 0);
- return result + (Object(__WEBPACK_IMPORTED_MODULE_18__util_DataUtils__.g)(val) ? val : 0);
+ return result + (Object(__WEBPACK_IMPORTED_MODULE_18__util_DataUtils__.h)(val) ? val : 0);
}, 0), sectors = void 0;
if (sum > 0) {
var prev = void 0;
sectors = pieData.map(function(entry, i) {
- var val = Object(__WEBPACK_IMPORTED_MODULE_19__util_ChartUtils__.w)(entry, realDataKey, 0), name = Object(__WEBPACK_IMPORTED_MODULE_19__util_ChartUtils__.w)(entry, nameKey, i), percent = (Object(__WEBPACK_IMPORTED_MODULE_18__util_DataUtils__.g)(val) ? val : 0) / sum, tempStartAngle = void 0;
- tempStartAngle = i ? prev.endAngle + Object(__WEBPACK_IMPORTED_MODULE_18__util_DataUtils__.i)(deltaAngle) * paddingAngle : startAngle;
- var tempEndAngle = tempStartAngle + Object(__WEBPACK_IMPORTED_MODULE_18__util_DataUtils__.i)(deltaAngle) * (minAngle + percent * realTotalAngle), midAngle = (tempStartAngle + tempEndAngle) / 2, middleRadius = (coordinate.innerRadius + coordinate.outerRadius) / 2, tooltipPayload = [ {
+ var val = Object(__WEBPACK_IMPORTED_MODULE_19__util_ChartUtils__.w)(entry, realDataKey, 0), name = Object(__WEBPACK_IMPORTED_MODULE_19__util_ChartUtils__.w)(entry, nameKey, i), percent = (Object(__WEBPACK_IMPORTED_MODULE_18__util_DataUtils__.h)(val) ? val : 0) / sum, tempStartAngle = void 0;
+ tempStartAngle = i ? prev.endAngle + Object(__WEBPACK_IMPORTED_MODULE_18__util_DataUtils__.j)(deltaAngle) * paddingAngle : startAngle;
+ var tempEndAngle = tempStartAngle + Object(__WEBPACK_IMPORTED_MODULE_18__util_DataUtils__.j)(deltaAngle) * (minAngle + percent * realTotalAngle), midAngle = (tempStartAngle + tempEndAngle) / 2, middleRadius = (coordinate.innerRadius + coordinate.outerRadius) / 2, tooltipPayload = [ {
name: name,
value: val,
payload: entry
@@ -12934,7 +13050,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
startAngle: tempStartAngle,
endAngle: tempEndAngle,
payload: entry,
- paddingAngle: Object(__WEBPACK_IMPORTED_MODULE_18__util_DataUtils__.i)(deltaAngle) * paddingAngle
+ paddingAngle: Object(__WEBPACK_IMPORTED_MODULE_18__util_DataUtils__.j)(deltaAngle) * paddingAngle
});
});
}
@@ -12966,7 +13082,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}
}), superClass && (Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass);
}
- var _class, _class2, _temp2, __WEBPACK_IMPORTED_MODULE_0_lodash_isEqual__ = __webpack_require__(34), __WEBPACK_IMPORTED_MODULE_0_lodash_isEqual___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_lodash_isEqual__), __WEBPACK_IMPORTED_MODULE_1_lodash_isFunction__ = __webpack_require__(8), __WEBPACK_IMPORTED_MODULE_1_lodash_isFunction___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_1_lodash_isFunction__), __WEBPACK_IMPORTED_MODULE_2_react__ = __webpack_require__(0), __WEBPACK_IMPORTED_MODULE_2_react___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_2_react__), __WEBPACK_IMPORTED_MODULE_3_prop_types__ = __webpack_require__(1), __WEBPACK_IMPORTED_MODULE_3_prop_types___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_3_prop_types__), __WEBPACK_IMPORTED_MODULE_4_react_smooth__ = __webpack_require__(33), __WEBPACK_IMPORTED_MODULE_4_react_smooth___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_4_react_smooth__), __WEBPACK_IMPORTED_MODULE_5_classnames__ = __webpack_require__(3), __WEBPACK_IMPORTED_MODULE_5_classnames___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_5_classnames__), __WEBPACK_IMPORTED_MODULE_6__util_DataUtils__ = __webpack_require__(9), __WEBPACK_IMPORTED_MODULE_7__util_PureRender__ = __webpack_require__(5), __WEBPACK_IMPORTED_MODULE_8__util_ReactUtils__ = __webpack_require__(4), __WEBPACK_IMPORTED_MODULE_9__util_PolarUtils__ = __webpack_require__(23), __WEBPACK_IMPORTED_MODULE_10__util_ChartUtils__ = __webpack_require__(16), __WEBPACK_IMPORTED_MODULE_11__shape_Polygon__ = __webpack_require__(195), __WEBPACK_IMPORTED_MODULE_12__shape_Dot__ = __webpack_require__(57), __WEBPACK_IMPORTED_MODULE_13__container_Layer__ = __webpack_require__(14), __WEBPACK_IMPORTED_MODULE_14__component_LabelList__ = __webpack_require__(45), _extends = Object.assign || function(target) {
+ var _class, _class2, _temp2, __WEBPACK_IMPORTED_MODULE_0_lodash_isEqual__ = __webpack_require__(34), __WEBPACK_IMPORTED_MODULE_0_lodash_isEqual___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_lodash_isEqual__), __WEBPACK_IMPORTED_MODULE_1_lodash_isFunction__ = __webpack_require__(8), __WEBPACK_IMPORTED_MODULE_1_lodash_isFunction___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_1_lodash_isFunction__), __WEBPACK_IMPORTED_MODULE_2_react__ = __webpack_require__(0), __WEBPACK_IMPORTED_MODULE_2_react___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_2_react__), __WEBPACK_IMPORTED_MODULE_3_prop_types__ = __webpack_require__(1), __WEBPACK_IMPORTED_MODULE_3_prop_types___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_3_prop_types__), __WEBPACK_IMPORTED_MODULE_4_react_smooth__ = __webpack_require__(33), __WEBPACK_IMPORTED_MODULE_4_react_smooth___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_4_react_smooth__), __WEBPACK_IMPORTED_MODULE_5_classnames__ = __webpack_require__(3), __WEBPACK_IMPORTED_MODULE_5_classnames___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_5_classnames__), __WEBPACK_IMPORTED_MODULE_6__util_DataUtils__ = __webpack_require__(9), __WEBPACK_IMPORTED_MODULE_7__util_PureRender__ = __webpack_require__(5), __WEBPACK_IMPORTED_MODULE_8__util_ReactUtils__ = __webpack_require__(4), __WEBPACK_IMPORTED_MODULE_9__util_PolarUtils__ = __webpack_require__(23), __WEBPACK_IMPORTED_MODULE_10__util_ChartUtils__ = __webpack_require__(16), __WEBPACK_IMPORTED_MODULE_11__shape_Polygon__ = __webpack_require__(194), __WEBPACK_IMPORTED_MODULE_12__shape_Dot__ = __webpack_require__(56), __WEBPACK_IMPORTED_MODULE_13__container_Layer__ = __webpack_require__(14), __WEBPACK_IMPORTED_MODULE_14__component_LabelList__ = __webpack_require__(44), _extends = Object.assign || function(target) {
for (var i = 1; i < arguments.length; i++) {
var source = arguments[i];
for (var key in source) Object.prototype.hasOwnProperty.call(source, key) && (target[key] = source[key]);
@@ -13084,13 +13200,13 @@ var _bundleJs = []byte((((((((((`!function(modules) {
var t = _ref2.t, stepData = points.map(function(entry, index) {
var prev = prevPoints && prevPoints[index];
if (prev) {
- var _interpolatorX = Object(__WEBPACK_IMPORTED_MODULE_6__util_DataUtils__.e)(prev.x, entry.x), _interpolatorY = Object(__WEBPACK_IMPORTED_MODULE_6__util_DataUtils__.e)(prev.y, entry.y);
+ var _interpolatorX = Object(__WEBPACK_IMPORTED_MODULE_6__util_DataUtils__.f)(prev.x, entry.x), _interpolatorY = Object(__WEBPACK_IMPORTED_MODULE_6__util_DataUtils__.f)(prev.y, entry.y);
return _extends({}, entry, {
x: _interpolatorX(t),
y: _interpolatorY(t)
});
}
- var interpolatorX = Object(__WEBPACK_IMPORTED_MODULE_6__util_DataUtils__.e)(entry.cx, entry.x), interpolatorY = Object(__WEBPACK_IMPORTED_MODULE_6__util_DataUtils__.e)(entry.cy, entry.y);
+ var interpolatorX = Object(__WEBPACK_IMPORTED_MODULE_6__util_DataUtils__.f)(entry.cx, entry.x), interpolatorY = Object(__WEBPACK_IMPORTED_MODULE_6__util_DataUtils__.f)(entry.cy, entry.y);
return _extends({}, entry, {
x: interpolatorX(t),
y: interpolatorY(t)
@@ -13200,7 +13316,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}
}), superClass && (Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass);
}
- var _class, _class2, _temp2, __WEBPACK_IMPORTED_MODULE_0_lodash_isEqual__ = __webpack_require__(34), __WEBPACK_IMPORTED_MODULE_0_lodash_isEqual___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_lodash_isEqual__), __WEBPACK_IMPORTED_MODULE_1_lodash_isFunction__ = __webpack_require__(8), __WEBPACK_IMPORTED_MODULE_1_lodash_isFunction___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_1_lodash_isFunction__), __WEBPACK_IMPORTED_MODULE_2_lodash_isArray__ = __webpack_require__(12), __WEBPACK_IMPORTED_MODULE_2_lodash_isArray___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_2_lodash_isArray__), __WEBPACK_IMPORTED_MODULE_3_react__ = __webpack_require__(0), __WEBPACK_IMPORTED_MODULE_3_react___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_3_react__), __WEBPACK_IMPORTED_MODULE_4_prop_types__ = __webpack_require__(1), __WEBPACK_IMPORTED_MODULE_4_prop_types___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_4_prop_types__), __WEBPACK_IMPORTED_MODULE_5_classnames__ = __webpack_require__(3), __WEBPACK_IMPORTED_MODULE_5_classnames___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_5_classnames__), __WEBPACK_IMPORTED_MODULE_6_react_smooth__ = __webpack_require__(33), __WEBPACK_IMPORTED_MODULE_6_react_smooth___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_6_react_smooth__), __WEBPACK_IMPORTED_MODULE_7__shape_Sector__ = __webpack_require__(128), __WEBPACK_IMPORTED_MODULE_8__container_Layer__ = __webpack_require__(14), __WEBPACK_IMPORTED_MODULE_9__util_ReactUtils__ = __webpack_require__(4), __WEBPACK_IMPORTED_MODULE_10__util_PureRender__ = __webpack_require__(5), __WEBPACK_IMPORTED_MODULE_11__component_LabelList__ = __webpack_require__(45), __WEBPACK_IMPORTED_MODULE_12__component_Cell__ = __webpack_require__(85), __WEBPACK_IMPORTED_MODULE_13__util_DataUtils__ = __webpack_require__(9), __WEBPACK_IMPORTED_MODULE_14__util_ChartUtils__ = __webpack_require__(16), _extends = Object.assign || function(target) {
+ var _class, _class2, _temp2, __WEBPACK_IMPORTED_MODULE_0_lodash_isEqual__ = __webpack_require__(34), __WEBPACK_IMPORTED_MODULE_0_lodash_isEqual___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_lodash_isEqual__), __WEBPACK_IMPORTED_MODULE_1_lodash_isFunction__ = __webpack_require__(8), __WEBPACK_IMPORTED_MODULE_1_lodash_isFunction___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_1_lodash_isFunction__), __WEBPACK_IMPORTED_MODULE_2_lodash_isArray__ = __webpack_require__(11), __WEBPACK_IMPORTED_MODULE_2_lodash_isArray___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_2_lodash_isArray__), __WEBPACK_IMPORTED_MODULE_3_react__ = __webpack_require__(0), __WEBPACK_IMPORTED_MODULE_3_react___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_3_react__), __WEBPACK_IMPORTED_MODULE_4_prop_types__ = __webpack_require__(1), __WEBPACK_IMPORTED_MODULE_4_prop_types___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_4_prop_types__), __WEBPACK_IMPORTED_MODULE_5_classnames__ = __webpack_require__(3), __WEBPACK_IMPORTED_MODULE_5_classnames___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_5_classnames__), __WEBPACK_IMPORTED_MODULE_6_react_smooth__ = __webpack_require__(33), __WEBPACK_IMPORTED_MODULE_6_react_smooth___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_6_react_smooth__), __WEBPACK_IMPORTED_MODULE_7__shape_Sector__ = __webpack_require__(127), __WEBPACK_IMPORTED_MODULE_8__container_Layer__ = __webpack_require__(14), __WEBPACK_IMPORTED_MODULE_9__util_ReactUtils__ = __webpack_require__(4), __WEBPACK_IMPORTED_MODULE_10__util_PureRender__ = __webpack_require__(5), __WEBPACK_IMPORTED_MODULE_11__component_LabelList__ = __webpack_require__(44), __WEBPACK_IMPORTED_MODULE_12__component_Cell__ = __webpack_require__(86), __WEBPACK_IMPORTED_MODULE_13__util_DataUtils__ = __webpack_require__(9), __WEBPACK_IMPORTED_MODULE_14__util_ChartUtils__ = __webpack_require__(16), _extends = Object.assign || function(target) {
for (var i = 1; i < arguments.length; i++) {
var source = arguments[i];
for (var key in source) Object.prototype.hasOwnProperty.call(source, key) && (target[key] = source[key]);
@@ -13250,7 +13366,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
key: "getDeltaAngle",
value: function() {
var _props2 = this.props, startAngle = _props2.startAngle, endAngle = _props2.endAngle;
- return Object(__WEBPACK_IMPORTED_MODULE_13__util_DataUtils__.i)(endAngle - startAngle) * Math.min(Math.abs(endAngle - startAngle), 360);
+ return Object(__WEBPACK_IMPORTED_MODULE_13__util_DataUtils__.j)(endAngle - startAngle) * Math.min(Math.abs(endAngle - startAngle), 360);
}
}, {
key: "renderSectorShape",
@@ -13293,13 +13409,13 @@ var _bundleJs = []byte((((((((((`!function(modules) {
var t = _ref2.t, stepData = data.map(function(entry, index) {
var prev = prevData && prevData[index];
if (prev) {
- var interpolatorStartAngle = Object(__WEBPACK_IMPORTED_MODULE_13__util_DataUtils__.e)(prev.startAngle, entry.startAngle), interpolatorEndAngle = Object(__WEBPACK_IMPORTED_MODULE_13__util_DataUtils__.e)(prev.endAngle, entry.endAngle);
+ var interpolatorStartAngle = Object(__WEBPACK_IMPORTED_MODULE_13__util_DataUtils__.f)(prev.startAngle, entry.startAngle), interpolatorEndAngle = Object(__WEBPACK_IMPORTED_MODULE_13__util_DataUtils__.f)(prev.endAngle, entry.endAngle);
return _extends({}, entry, {
startAngle: interpolatorStartAngle(t),
endAngle: interpolatorEndAngle(t)
});
}
- var endAngle = entry.endAngle, startAngle = entry.startAngle, interpolator = Object(__WEBPACK_IMPORTED_MODULE_13__util_DataUtils__.e)(startAngle, endAngle);
+ var endAngle = entry.endAngle, startAngle = entry.startAngle, interpolator = Object(__WEBPACK_IMPORTED_MODULE_13__util_DataUtils__.f)(startAngle, endAngle);
return _extends({}, entry, {
endAngle: interpolator(t)
});
@@ -13414,7 +13530,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
outerRadius = innerRadius + pos.size;
var deltaAngle = endAngle - startAngle;
if (Math.abs(minPointSize) > 0 && Math.abs(deltaAngle) < Math.abs(minPointSize)) {
- endAngle += Object(__WEBPACK_IMPORTED_MODULE_13__util_DataUtils__.i)(deltaAngle || minPointSize) * (Math.abs(minPointSize) - Math.abs(deltaAngle));
+ endAngle += Object(__WEBPACK_IMPORTED_MODULE_13__util_DataUtils__.j)(deltaAngle || minPointSize) * (Math.abs(minPointSize) - Math.abs(deltaAngle));
}
backgroundSector = {
background: {
@@ -13438,7 +13554,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}), endAngle = startAngle + pos.size;
var deltaRadius = outerRadius - innerRadius;
if (Math.abs(minPointSize) > 0 && Math.abs(deltaRadius) < Math.abs(minPointSize)) {
- outerRadius += Object(__WEBPACK_IMPORTED_MODULE_13__util_DataUtils__.i)(deltaRadius || minPointSize) * (Math.abs(minPointSize) - Math.abs(deltaRadius));
+ outerRadius += Object(__WEBPACK_IMPORTED_MODULE_13__util_DataUtils__.j)(deltaRadius || minPointSize) * (Math.abs(minPointSize) - Math.abs(deltaRadius));
}
}
return _extends({}, entry, backgroundSector, {
@@ -13484,7 +13600,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}
}), superClass && (Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass);
}
- var _class, _class2, _temp, __WEBPACK_IMPORTED_MODULE_0_lodash_range__ = __webpack_require__(329), __WEBPACK_IMPORTED_MODULE_0_lodash_range___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_lodash_range__), __WEBPACK_IMPORTED_MODULE_1_lodash_isFunction__ = __webpack_require__(8), __WEBPACK_IMPORTED_MODULE_1_lodash_isFunction___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_1_lodash_isFunction__), __WEBPACK_IMPORTED_MODULE_2_react__ = __webpack_require__(0), __WEBPACK_IMPORTED_MODULE_2_react___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_2_react__), __WEBPACK_IMPORTED_MODULE_3_prop_types__ = __webpack_require__(1), __WEBPACK_IMPORTED_MODULE_3_prop_types___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_3_prop_types__), __WEBPACK_IMPORTED_MODULE_4_classnames__ = __webpack_require__(3), __WEBPACK_IMPORTED_MODULE_4_classnames___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_4_classnames__), __WEBPACK_IMPORTED_MODULE_5_d3_scale__ = __webpack_require__(287), __WEBPACK_IMPORTED_MODULE_6__util_ChartUtils__ = __webpack_require__(16), __WEBPACK_IMPORTED_MODULE_7__util_PureRender__ = __webpack_require__(5), __WEBPACK_IMPORTED_MODULE_8__container_Layer__ = __webpack_require__(14), __WEBPACK_IMPORTED_MODULE_9__component_Text__ = __webpack_require__(55), __WEBPACK_IMPORTED_MODULE_10__util_DataUtils__ = __webpack_require__(9), __WEBPACK_IMPORTED_MODULE_11__util_CssPrefixUtils__ = __webpack_require__(787), _extends = Object.assign || function(target) {
+ var _class, _class2, _temp, __WEBPACK_IMPORTED_MODULE_0_lodash_range__ = __webpack_require__(334), __WEBPACK_IMPORTED_MODULE_0_lodash_range___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_lodash_range__), __WEBPACK_IMPORTED_MODULE_1_lodash_isFunction__ = __webpack_require__(8), __WEBPACK_IMPORTED_MODULE_1_lodash_isFunction___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_1_lodash_isFunction__), __WEBPACK_IMPORTED_MODULE_2_react__ = __webpack_require__(0), __WEBPACK_IMPORTED_MODULE_2_react___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_2_react__), __WEBPACK_IMPORTED_MODULE_3_prop_types__ = __webpack_require__(1), __WEBPACK_IMPORTED_MODULE_3_prop_types___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_3_prop_types__), __WEBPACK_IMPORTED_MODULE_4_classnames__ = __webpack_require__(3), __WEBPACK_IMPORTED_MODULE_4_classnames___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_4_classnames__), __WEBPACK_IMPORTED_MODULE_5_d3_scale__ = __webpack_require__(292), __WEBPACK_IMPORTED_MODULE_6__util_ChartUtils__ = __webpack_require__(16), __WEBPACK_IMPORTED_MODULE_7__util_PureRender__ = __webpack_require__(5), __WEBPACK_IMPORTED_MODULE_8__container_Layer__ = __webpack_require__(14), __WEBPACK_IMPORTED_MODULE_9__component_Text__ = __webpack_require__(54), __WEBPACK_IMPORTED_MODULE_10__util_DataUtils__ = __webpack_require__(9), __WEBPACK_IMPORTED_MODULE_11__util_CssPrefixUtils__ = __webpack_require__(787), _extends = Object.assign || function(target) {
for (var i = 1; i < arguments.length; i++) {
var source = arguments[i];
for (var key in source) Object.prototype.hasOwnProperty.call(source, key) && (target[key] = source[key]);
@@ -13568,22 +13684,22 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}, {
key: "getIndex",
value: function(_ref) {
- var startX = _ref.startX, endX = _ref.endX, min = Math.min(startX, endX), max = Math.max(startX, endX);
+ var startX = _ref.startX, endX = _ref.endX, _props2 = this.props, gap = _props2.gap, data = _props2.data, lastIndex = data.length - 1, min = Math.min(startX, endX), max = Math.max(startX, endX), minIndex = this.getIndexInRange(this.scaleValues, min), maxIndex = this.getIndexInRange(this.scaleValues, max);
return {
- startIndex: this.getIndexInRange(this.scaleValues, min),
- endIndex: this.getIndexInRange(this.scaleValues, max)
+ startIndex: minIndex - minIndex % gap,
+ endIndex: maxIndex === lastIndex ? lastIndex : maxIndex - maxIndex % gap
};
}
}, {
key: "getTextOfTick",
value: function(index) {
- var _props2 = this.props, data = _props2.data, tickFormatter = _props2.tickFormatter, dataKey = _props2.dataKey, text = Object(__WEBPACK_IMPORTED_MODULE_6__util_ChartUtils__.w)(data[index], dataKey, index);
+ var _props3 = this.props, data = _props3.data, tickFormatter = _props3.tickFormatter, dataKey = _props3.dataKey, text = Object(__WEBPACK_IMPORTED_MODULE_6__util_ChartUtils__.w)(data[index], dataKey, index);
return __WEBPACK_IMPORTED_MODULE_1_lodash_isFunction___default()(tickFormatter) ? tickFormatter(text) : text;
}
}, {
key: "handleSlideDrag",
value: function(e) {
- var _state = this.state, slideMoveStartX = _state.slideMoveStartX, startX = _state.startX, endX = _state.endX, _props3 = this.props, x = _props3.x, width = _props3.width, travellerWidth = _props3.travellerWidth, startIndex = _props3.startIndex, endIndex = _props3.endIndex, onChange = _props3.onChange, delta = e.pageX - slideMoveStartX;
+ var _state = this.state, slideMoveStartX = _state.slideMoveStartX, startX = _state.startX, endX = _state.endX, _props4 = this.props, x = _props4.x, width = _props4.width, travellerWidth = _props4.travellerWidth, startIndex = _props4.startIndex, endIndex = _props4.endIndex, onChange = _props4.onChange, delta = e.pageX - slideMoveStartX;
delta > 0 ? delta = Math.min(delta, x + width - travellerWidth - endX, x + width - travellerWidth - startX) : delta < 0 && (delta = Math.max(delta, x - startX, x - endX));
var newIndex = this.getIndex({
startX: startX + delta,
@@ -13610,16 +13726,19 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}, {
key: "handleTravellerMove",
value: function(e) {
- var _setState, _state2 = this.state, brushMoveStartX = _state2.brushMoveStartX, movingTravellerId = _state2.movingTravellerId, prevValue = this.state[movingTravellerId], _props4 = this.props, x = _props4.x, width = _props4.width, travellerWidth = _props4.travellerWidth, onChange = _props4.onChange, params = {
+ var _setState, _state2 = this.state, brushMoveStartX = _state2.brushMoveStartX, movingTravellerId = _state2.movingTravellerId, endX = _state2.endX, startX = _state2.startX, prevValue = this.state[movingTravellerId], _props5 = this.props, x = _props5.x, width = _props5.width, travellerWidth = _props5.travellerWidth, onChange = _props5.onChange, gap = _props5.gap, data = _props5.data, params = {
startX: this.state.startX,
endX: this.state.endX
}, delta = e.pageX - brushMoveStartX;
delta > 0 ? delta = Math.min(delta, x + width - travellerWidth - prevValue) : delta < 0 && (delta = Math.max(delta, x - prevValue)),
params[movingTravellerId] = prevValue + delta;
- var newIndex = this.getIndex(params);
+ var newIndex = this.getIndex(params), startIndex = newIndex.startIndex, endIndex = newIndex.endIndex, isFullGap = function() {
+ var lastIndex = data.length - 1;
+ return "startX" === movingTravellerId && (endX > startX ? startIndex % gap == 0 : endIndex % gap == 0) || endX < startX && endIndex === lastIndex || "endX" === movingTravellerId && (endX > startX ? endIndex % gap == 0 : startIndex % gap == 0) || endX > startX && endIndex === lastIndex;
+ };
this.setState((_setState = {}, _defineProperty(_setState, movingTravellerId, prevValue + delta),
_defineProperty(_setState, "brushMoveStartX", e.pageX), _setState), function() {
- onChange && onChange(newIndex);
+ onChange && isFullGap() && onChange(newIndex);
});
}
}, {
@@ -13640,7 +13759,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}, {
key: "renderBackground",
value: function() {
- var _props5 = this.props, x = _props5.x, y = _props5.y, width = _props5.width, height = _props5.height, fill = _props5.fill, stroke = _props5.stroke;
+ var _props6 = this.props, x = _props6.x, y = _props6.y, width = _props6.width, height = _props6.height, fill = _props6.fill, stroke = _props6.stroke;
return __WEBPACK_IMPORTED_MODULE_2_react___default.a.createElement("rect", {
stroke: stroke,
fill: fill,
@@ -13653,7 +13772,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}, {
key: "renderPanorama",
value: function() {
- var _props6 = this.props, x = _props6.x, y = _props6.y, width = _props6.width, height = _props6.height, data = _props6.data, children = _props6.children, padding = _props6.padding, chartElement = __WEBPACK_IMPORTED_MODULE_2_react__.Children.only(children);
+ var _props7 = this.props, x = _props7.x, y = _props7.y, width = _props7.width, height = _props7.height, data = _props7.data, children = _props7.children, padding = _props7.padding, chartElement = __WEBPACK_IMPORTED_MODULE_2_react__.Children.only(children);
return chartElement ? __WEBPACK_IMPORTED_MODULE_2_react___default.a.cloneElement(chartElement, {
x: x,
y: y,
@@ -13667,7 +13786,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}, {
key: "renderTraveller",
value: function(travellerX, id) {
- var _props7 = this.props, y = _props7.y, travellerWidth = _props7.travellerWidth, height = _props7.height, stroke = _props7.stroke, lineY = Math.floor(y + height / 2) - 1, x = Math.max(travellerX, this.props.x);
+ var _props8 = this.props, y = _props8.y, travellerWidth = _props8.travellerWidth, height = _props8.height, stroke = _props8.stroke, lineY = Math.floor(y + height / 2) - 1, x = Math.max(travellerX, this.props.x);
return __WEBPACK_IMPORTED_MODULE_2_react___default.a.createElement(__WEBPACK_IMPORTED_MODULE_8__container_Layer__.a, {
className: "recharts-brush-traveller",
onMouseEnter: this.handleEnterSlideOrTraveller,
@@ -13703,7 +13822,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}, {
key: "renderSlide",
value: function(startX, endX) {
- var _props8 = this.props, y = _props8.y, height = _props8.height, stroke = _props8.stroke;
+ var _props9 = this.props, y = _props9.y, height = _props9.height, stroke = _props9.stroke;
return __WEBPACK_IMPORTED_MODULE_2_react___default.a.createElement("rect", {
className: "recharts-brush-slide",
onMouseEnter: this.handleEnterSlideOrTraveller,
@@ -13725,7 +13844,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}, {
key: "renderText",
value: function() {
- var _props9 = this.props, startIndex = _props9.startIndex, endIndex = _props9.endIndex, y = _props9.y, height = _props9.height, travellerWidth = _props9.travellerWidth, stroke = _props9.stroke, _state3 = this.state, startX = _state3.startX, endX = _state3.endX, attrs = {
+ var _props10 = this.props, startIndex = _props10.startIndex, endIndex = _props10.endIndex, y = _props10.y, height = _props10.height, travellerWidth = _props10.travellerWidth, stroke = _props10.stroke, _state3 = this.state, startX = _state3.startX, endX = _state3.endX, attrs = {
pointerEvents: "none",
fill: stroke
};
@@ -13746,8 +13865,8 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}, {
key: "render",
value: function() {
- var _props10 = this.props, data = _props10.data, className = _props10.className, children = _props10.children, x = _props10.x, y = _props10.y, width = _props10.width, height = _props10.height, _state4 = this.state, startX = _state4.startX, endX = _state4.endX, isTextActive = _state4.isTextActive, isSlideMoving = _state4.isSlideMoving, isTravellerMoving = _state4.isTravellerMoving;
- if (!data || !data.length || !Object(__WEBPACK_IMPORTED_MODULE_10__util_DataUtils__.g)(x) || !Object(__WEBPACK_IMPORTED_MODULE_10__util_DataUtils__.g)(y) || !Object(__WEBPACK_IMPORTED_MODULE_10__util_DataUtils__.g)(width) || !Object(__WEBPACK_IMPORTED_MODULE_10__util_DataUtils__.g)(height) || width <= 0 || height <= 0) return null;
+ var _props11 = this.props, data = _props11.data, className = _props11.className, children = _props11.children, x = _props11.x, y = _props11.y, width = _props11.width, height = _props11.height, _state4 = this.state, startX = _state4.startX, endX = _state4.endX, isTextActive = _state4.isTextActive, isSlideMoving = _state4.isSlideMoving, isTravellerMoving = _state4.isTravellerMoving;
+ if (!data || !data.length || !Object(__WEBPACK_IMPORTED_MODULE_10__util_DataUtils__.h)(x) || !Object(__WEBPACK_IMPORTED_MODULE_10__util_DataUtils__.h)(y) || !Object(__WEBPACK_IMPORTED_MODULE_10__util_DataUtils__.h)(width) || !Object(__WEBPACK_IMPORTED_MODULE_10__util_DataUtils__.h)(height) || width <= 0 || height <= 0) return null;
var layerClass = __WEBPACK_IMPORTED_MODULE_4_classnames___default()("recharts-brush", className), isPanoramic = 1 === __WEBPACK_IMPORTED_MODULE_2_react___default.a.Children.count(children), style = Object(__WEBPACK_IMPORTED_MODULE_11__util_CssPrefixUtils__.a)("userSelect", "none");
return __WEBPACK_IMPORTED_MODULE_2_react___default.a.createElement(__WEBPACK_IMPORTED_MODULE_8__container_Layer__.a, {
className: layerClass,
@@ -13770,6 +13889,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
width: __WEBPACK_IMPORTED_MODULE_3_prop_types___default.a.number,
height: __WEBPACK_IMPORTED_MODULE_3_prop_types___default.a.number.isRequired,
travellerWidth: __WEBPACK_IMPORTED_MODULE_3_prop_types___default.a.number,
+ gap: __WEBPACK_IMPORTED_MODULE_3_prop_types___default.a.number,
padding: __WEBPACK_IMPORTED_MODULE_3_prop_types___default.a.shape({
top: __WEBPACK_IMPORTED_MODULE_3_prop_types___default.a.number,
right: __WEBPACK_IMPORTED_MODULE_3_prop_types___default.a.number,
@@ -13787,6 +13907,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}, _class2.defaultProps = {
height: 40,
travellerWidth: 5,
+ gap: 1,
fill: "#fff",
stroke: "#666",
padding: {
@@ -13825,7 +13946,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}
}), superClass && (Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass);
}
- var _class, _temp, __WEBPACK_IMPORTED_MODULE_0_lodash_isFunction__ = __webpack_require__(8), __WEBPACK_IMPORTED_MODULE_0_lodash_isFunction___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_lodash_isFunction__), __WEBPACK_IMPORTED_MODULE_1_react__ = __webpack_require__(0), __WEBPACK_IMPORTED_MODULE_1_react___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_1_react__), __WEBPACK_IMPORTED_MODULE_2_prop_types__ = __webpack_require__(1), __WEBPACK_IMPORTED_MODULE_2_prop_types___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_2_prop_types__), __WEBPACK_IMPORTED_MODULE_3_classnames__ = __webpack_require__(3), __WEBPACK_IMPORTED_MODULE_3_classnames___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_3_classnames__), __WEBPACK_IMPORTED_MODULE_4__util_PureRender__ = __webpack_require__(5), __WEBPACK_IMPORTED_MODULE_5__util_DOMUtils__ = __webpack_require__(184), __WEBPACK_IMPORTED_MODULE_6__container_Layer__ = __webpack_require__(14), __WEBPACK_IMPORTED_MODULE_7__component_Text__ = __webpack_require__(55), __WEBPACK_IMPORTED_MODULE_8__component_Label__ = __webpack_require__(43), __WEBPACK_IMPORTED_MODULE_9__util_ReactUtils__ = __webpack_require__(4), __WEBPACK_IMPORTED_MODULE_10__util_DataUtils__ = __webpack_require__(9), _extends = Object.assign || function(target) {
+ var _class, _temp, __WEBPACK_IMPORTED_MODULE_0_lodash_isFunction__ = __webpack_require__(8), __WEBPACK_IMPORTED_MODULE_0_lodash_isFunction___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_lodash_isFunction__), __WEBPACK_IMPORTED_MODULE_1_react__ = __webpack_require__(0), __WEBPACK_IMPORTED_MODULE_1_react___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_1_react__), __WEBPACK_IMPORTED_MODULE_2_prop_types__ = __webpack_require__(1), __WEBPACK_IMPORTED_MODULE_2_prop_types___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_2_prop_types__), __WEBPACK_IMPORTED_MODULE_3_classnames__ = __webpack_require__(3), __WEBPACK_IMPORTED_MODULE_3_classnames___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_3_classnames__), __WEBPACK_IMPORTED_MODULE_4__util_PureRender__ = __webpack_require__(5), __WEBPACK_IMPORTED_MODULE_5__util_DOMUtils__ = __webpack_require__(183), __WEBPACK_IMPORTED_MODULE_6__container_Layer__ = __webpack_require__(14), __WEBPACK_IMPORTED_MODULE_7__component_Text__ = __webpack_require__(54), __WEBPACK_IMPORTED_MODULE_8__component_Label__ = __webpack_require__(42), __WEBPACK_IMPORTED_MODULE_9__util_ReactUtils__ = __webpack_require__(4), __WEBPACK_IMPORTED_MODULE_10__util_DataUtils__ = __webpack_require__(9), _extends = Object.assign || function(target) {
for (var i = 1; i < arguments.length; i++) {
var source = arguments[i];
for (var key in source) Object.prototype.hasOwnProperty.call(source, key) && (target[key] = source[key]);
@@ -13856,7 +13977,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}, {
key: "getTickLineCoord",
value: function(data) {
- var _props2 = this.props, x = _props2.x, y = _props2.y, width = _props2.width, height = _props2.height, orientation = _props2.orientation, tickSize = _props2.tickSize, mirror = _props2.mirror, tickMargin = _props2.tickMargin, x1 = void 0, x2 = void 0, y1 = void 0, y2 = void 0, tx = void 0, ty = void 0, sign = mirror ? -1 : 1, finalTickSize = data.tickSize || tickSize, tickCoord = Object(__WEBPACK_IMPORTED_MODULE_10__util_DataUtils__.g)(data.tickCoord) ? data.tickCoord : data.coordinate;
+ var _props2 = this.props, x = _props2.x, y = _props2.y, width = _props2.width, height = _props2.height, orientation = _props2.orientation, tickSize = _props2.tickSize, mirror = _props2.mirror, tickMargin = _props2.tickMargin, x1 = void 0, x2 = void 0, y1 = void 0, y2 = void 0, tx = void 0, ty = void 0, sign = mirror ? -1 : 1, finalTickSize = data.tickSize || tickSize, tickCoord = Object(__WEBPACK_IMPORTED_MODULE_10__util_DataUtils__.h)(data.tickCoord) ? data.tickCoord : data.coordinate;
switch (orientation) {
case "top":
x1 = x2 = data.coordinate, y2 = y + !mirror * height, y1 = y2 - sign * finalTickSize,
@@ -14006,7 +14127,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
key: "getTicks",
value: function(props) {
var tick = props.tick, ticks = props.ticks, viewBox = props.viewBox, minTickGap = props.minTickGap, orientation = props.orientation, interval = props.interval, tickFormatter = props.tickFormatter, unit = props.unit;
- return ticks && ticks.length && tick ? Object(__WEBPACK_IMPORTED_MODULE_10__util_DataUtils__.g)(interval) || Object(__WEBPACK_IMPORTED_MODULE_9__util_ReactUtils__.n)() ? CartesianAxis.getNumberIntervalTicks(ticks, Object(__WEBPACK_IMPORTED_MODULE_10__util_DataUtils__.g)(interval) ? interval : 0) : "preserveStartEnd" === interval ? CartesianAxis.getTicksStart({
+ return ticks && ticks.length && tick ? Object(__WEBPACK_IMPORTED_MODULE_10__util_DataUtils__.h)(interval) || Object(__WEBPACK_IMPORTED_MODULE_9__util_ReactUtils__.n)() ? CartesianAxis.getNumberIntervalTicks(ticks, Object(__WEBPACK_IMPORTED_MODULE_10__util_DataUtils__.h)(interval) ? interval : 0) : "preserveStartEnd" === interval ? CartesianAxis.getTicksStart({
ticks: ticks,
tickFormatter: tickFormatter,
viewBox: viewBox,
@@ -14039,7 +14160,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}, {
key: "getTicksStart",
value: function(_ref2, preserveEnd) {
- var ticks = _ref2.ticks, tickFormatter = _ref2.tickFormatter, viewBox = _ref2.viewBox, orientation = _ref2.orientation, minTickGap = _ref2.minTickGap, unit = _ref2.unit, x = viewBox.x, y = viewBox.y, width = viewBox.width, height = viewBox.height, sizeKey = "top" === orientation || "bottom" === orientation ? "width" : "height", result = (ticks || []).slice(), unitSize = unit ? Object(__WEBPACK_IMPORTED_MODULE_5__util_DOMUtils__.c)(unit)[sizeKey] : 0, len = result.length, sign = len >= 2 ? Object(__WEBPACK_IMPORTED_MODULE_10__util_DataUtils__.i)(result[1].coordinate - result[0].coordinate) : 1, start = void 0, end = void 0;
+ var ticks = _ref2.ticks, tickFormatter = _ref2.tickFormatter, viewBox = _ref2.viewBox, orientation = _ref2.orientation, minTickGap = _ref2.minTickGap, unit = _ref2.unit, x = viewBox.x, y = viewBox.y, width = viewBox.width, height = viewBox.height, sizeKey = "top" === orientation || "bottom" === orientation ? "width" : "height", result = (ticks || []).slice(), unitSize = unit ? Object(__WEBPACK_IMPORTED_MODULE_5__util_DOMUtils__.c)(unit)[sizeKey] : 0, len = result.length, sign = len >= 2 ? Object(__WEBPACK_IMPORTED_MODULE_10__util_DataUtils__.j)(result[1].coordinate - result[0].coordinate) : 1, start = void 0, end = void 0;
if (1 === sign ? (start = "width" === sizeKey ? x : y, end = "width" === sizeKey ? x + width : y + height) : (start = "width" === sizeKey ? x + width : y + height,
end = "width" === sizeKey ? x : y), preserveEnd) {
var tail = ticks[len - 1], tailContent = __WEBPACK_IMPORTED_MODULE_0_lodash_isFunction___default()(tickFormatter) ? tickFormatter(tail.value) : tail.value, tailSize = Object(__WEBPACK_IMPORTED_MODULE_5__util_DOMUtils__.c)(tailContent)[sizeKey] + unitSize, tailGap = sign * (tail.coordinate + sign * tailSize / 2 - end);
@@ -14073,7 +14194,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}, {
key: "getTicksEnd",
value: function(_ref3) {
- var ticks = _ref3.ticks, tickFormatter = _ref3.tickFormatter, viewBox = _ref3.viewBox, orientation = _ref3.orientation, minTickGap = _ref3.minTickGap, unit = _ref3.unit, x = viewBox.x, y = viewBox.y, width = viewBox.width, height = viewBox.height, sizeKey = "top" === orientation || "bottom" === orientation ? "width" : "height", unitSize = unit ? Object(__WEBPACK_IMPORTED_MODULE_5__util_DOMUtils__.c)(unit)[sizeKey] : 0, result = (ticks || []).slice(), len = result.length, sign = len >= 2 ? Object(__WEBPACK_IMPORTED_MODULE_10__util_DataUtils__.i)(result[1].coordinate - result[0].coordinate) : 1, start = void 0, end = void 0;
+ var ticks = _ref3.ticks, tickFormatter = _ref3.tickFormatter, viewBox = _ref3.viewBox, orientation = _ref3.orientation, minTickGap = _ref3.minTickGap, unit = _ref3.unit, x = viewBox.x, y = viewBox.y, width = viewBox.width, height = viewBox.height, sizeKey = "top" === orientation || "bottom" === orientation ? "width" : "height", unitSize = unit ? Object(__WEBPACK_IMPORTED_MODULE_5__util_DOMUtils__.c)(unit)[sizeKey] : 0, result = (ticks || []).slice(), len = result.length, sign = len >= 2 ? Object(__WEBPACK_IMPORTED_MODULE_10__util_DataUtils__.j)(result[1].coordinate - result[0].coordinate) : 1, start = void 0, end = void 0;
1 === sign ? (start = "width" === sizeKey ? x : y, end = "width" === sizeKey ? x + width : y + height) : (start = "width" === sizeKey ? x + width : y + height,
end = "width" === sizeKey ? x : y);
for (var i = len - 1; i >= 0; i--) {
@@ -14153,7 +14274,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
default: obj
};
}
- var _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _reactDom = __webpack_require__(95), _MuiThemeProvider = __webpack_require__(342), _MuiThemeProvider2 = _interopRequireDefault(_MuiThemeProvider), _createMuiTheme = __webpack_require__(151), _createMuiTheme2 = _interopRequireDefault(_createMuiTheme), _Dashboard = __webpack_require__(396), _Dashboard2 = _interopRequireDefault(_Dashboard), theme = (0,
+ var _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _reactDom = __webpack_require__(96), _MuiThemeProvider = __webpack_require__(347), _MuiThemeProvider2 = _interopRequireDefault(_MuiThemeProvider), _createMuiTheme = __webpack_require__(150), _createMuiTheme2 = _interopRequireDefault(_createMuiTheme), _Dashboard = __webpack_require__(400), _Dashboard2 = _interopRequireDefault(_Dashboard), theme = (0,
_createMuiTheme2.default)({
palette: {
type: "dark"
@@ -14280,7 +14401,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
null != e && (g = ("" + e).replace(L, "$&/") + "/"), b = N(b, g, c, d), null == a || P(a, "", S, b),
O(b);
}
- var m = __webpack_require__(69), n = __webpack_require__(93), p = __webpack_require__(39), q = "function" == typeof Symbol && Symbol.for, r = q ? Symbol.for("react.element") : 60103, t = q ? Symbol.for("react.call") : 60104, u = q ? Symbol.for("react.return") : 60105, v = q ? Symbol.for("react.portal") : 60106, w = q ? Symbol.for("react.fragment") : 60107, x = "function" == typeof Symbol && Symbol.iterator, z = {
+ var m = __webpack_require__(69), n = __webpack_require__(94), p = __webpack_require__(39), q = "function" == typeof Symbol && Symbol.for, r = q ? Symbol.for("react.element") : 60103, t = q ? Symbol.for("react.call") : 60104, u = q ? Symbol.for("react.return") : 60105, v = q ? Symbol.for("react.portal") : 60106, w = q ? Symbol.for("react.fragment") : 60107, x = "function" == typeof Symbol && Symbol.iterator, z = {
isMounted: function() {
return !1;
},
@@ -14693,7 +14814,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
for (var newElement = cloneElement.apply(this, arguments), i = 2; i < arguments.length; i++) validateChildKeys(arguments[i], newElement.type);
return validatePropTypes(newElement), newElement;
}
- var _assign = __webpack_require__(69), emptyObject = __webpack_require__(93), invariant = __webpack_require__(70), warning = __webpack_require__(94), emptyFunction = __webpack_require__(39), checkPropTypes = __webpack_require__(132), hasSymbol = "function" == typeof Symbol && Symbol.for, REACT_ELEMENT_TYPE = hasSymbol ? Symbol.for("react.element") : 60103, REACT_CALL_TYPE = hasSymbol ? Symbol.for("react.call") : 60104, REACT_RETURN_TYPE = hasSymbol ? Symbol.for("react.return") : 60105, REACT_PORTAL_TYPE = hasSymbol ? Symbol.for("react.portal") : 60106, REACT_FRAGMENT_TYPE = hasSymbol ? Symbol.for("react.fragment") : 60107, MAYBE_ITERATOR_SYMBOL = "function" == typeof Symbol && Symbol.iterator, FAUX_ITERATOR_SYMBOL = "@@iterator", lowPriorityWarning = function() {}, printWarning = function(format) {
+ var _assign = __webpack_require__(69), emptyObject = __webpack_require__(94), invariant = __webpack_require__(70), warning = __webpack_require__(95), emptyFunction = __webpack_require__(39), checkPropTypes = __webpack_require__(131), hasSymbol = "function" == typeof Symbol && Symbol.for, REACT_ELEMENT_TYPE = hasSymbol ? Symbol.for("react.element") : 60103, REACT_CALL_TYPE = hasSymbol ? Symbol.for("react.call") : 60104, REACT_RETURN_TYPE = hasSymbol ? Symbol.for("react.return") : 60105, REACT_PORTAL_TYPE = hasSymbol ? Symbol.for("react.portal") : 60106, REACT_FRAGMENT_TYPE = hasSymbol ? Symbol.for("react.fragment") : 60107, MAYBE_ITERATOR_SYMBOL = "function" == typeof Symbol && Symbol.iterator, FAUX_ITERATOR_SYMBOL = "@@iterator", lowPriorityWarning = function() {}, printWarning = function(format) {
for (var _len = arguments.length, args = Array(_len > 1 ? _len - 1 : 0), _key = 1; _key < _len; _key++) args[_key - 1] = arguments[_key];
var argIndex = 0, message = "Warning: " + format.replace(/%s/g, function() {
return args[argIndex++];
@@ -17505,7 +17626,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
function Rg(a, b) {
this._reactRootContainer = Z.createContainer(a, b);
}
- var aa = __webpack_require__(0), l = __webpack_require__(200), B = __webpack_require__(69), C = __webpack_require__(39), ba = __webpack_require__(201), da = __webpack_require__(202), ea = __webpack_require__(96), fa = __webpack_require__(203), ia = __webpack_require__(204), D = __webpack_require__(93);
+ var aa = __webpack_require__(0), l = __webpack_require__(199), B = __webpack_require__(69), C = __webpack_require__(39), ba = __webpack_require__(200), da = __webpack_require__(201), ea = __webpack_require__(97), fa = __webpack_require__(202), ia = __webpack_require__(203), D = __webpack_require__(94);
aa || E("227");
var oa = {
children: !0,
@@ -18734,7 +18855,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
function isTextNode(object) {
return isNode(object) && 3 == object.nodeType;
}
- var isNode = __webpack_require__(336);
+ var isNode = __webpack_require__(341);
module.exports = isTextNode;
}, function(module, exports, __webpack_require__) {
"use strict";
@@ -18869,7 +18990,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
var listener, stateNode = inst.stateNode;
if (!stateNode) return null;
var props = getFiberCurrentPropsFromNode(stateNode);
- return props ? (listener = props[registrationName], shouldPreventMouseEvent(registrationName, inst.type, props) ? null : (listener && "function" != typeof listener && invariant(!1, "Expected `))) + (("`" + (`%s` + "`")) + (` listener to be a function, instead got a value of ` + ("`" + `%s`)))))) + ((((("`" + (` type.", registrationName, typeof listener),
+ return props ? (listener = props[registrationName], shouldPreventMouseEvent(registrationName, inst.type, props) ? null : (listener && "function" != typeof listener && invariant(!1, "Expected `))) + (("`" + (`%s` + "`")) + (` listener to be a function, instead got a value of ` + ("`" + `%s`)))))) + ((((("`" + ` type.", registrationName, typeof listener),
listener)) : null;
}
function extractEvents(topLevelType, targetInst, nativeEvent, nativeEventTarget) {
@@ -19049,7 +19170,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
getVal;
}
function warn(action, result) {
- warning(!1, "This synthetic event is reused for performance reasons. If you're seeing this, you're %s ` + "`")) + (`%s` + ("`" + ` on a released/nullified synthetic event. %s. If you must keep the original synthetic event around, use event.persist(). See https://fb.me/react-event-pooling for more information.", action, propName, result);
+ warning(!1, "This synthetic event is reused for performance reasons. If you're seeing this, you're %s `) + ("`" + (`%s` + "`"))) + ((` on a released/nullified synthetic event. %s. If you must keep the original synthetic event around, use event.persist(). See https://fb.me/react-event-pooling for more information.", action, propName, result);
}
var isFunction = "function" == typeof getVal;
return {
@@ -20061,7 +20182,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
var info = "";
(void 0 === type || "object" == typeof type && null !== type && 0 === Object.keys(type).length) && (info += " You likely forgot to export your component from the file it's defined in, or you might have mixed up default and named imports.");
var ownerName = owner ? getComponentName(owner) : null;
- ownerName && (info += "\n\nCheck the render method of `))) + (("`" + (`" + ownerName + "` + "`")) + (`."), invariant(!1, "Element type is invalid: expected a string (for built-in components) or a class/function (for composite components) but got: %s.%s", null == type ? type : typeof type, info);
+ ownerName && (info += "\n\nCheck the render method of ` + ("`" + `" + ownerName + "`)) + ("`" + (`."), invariant(!1, "Element type is invalid: expected a string (for built-in components) or a class/function (for composite components) but got: %s.%s", null == type ? type : typeof type, info);
}
return fiber._debugSource = element._source, fiber._debugOwner = element._owner,
fiber.expirationTime = expirationTime, fiber;
@@ -20462,7 +20583,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
knownKeys.add(key);
break;
}
- warning(!1, "Encountered two children with the same key, ` + ("`" + `%s`)))) + ((("`" + (`. Keys should be unique so that components maintain their identity across updates. Non-unique keys may cause children to be duplicated and/or omitted — the behavior is unsupported and could change in a future version.%s", key, getCurrentFiberStackAddendum$1());
+ warning(!1, "Encountered two children with the same key, ` + "`")))) + (((`%s` + ("`" + `. Keys should be unique so that components maintain their identity across updates. Non-unique keys may cause children to be duplicated and/or omitted — the behavior is unsupported and could change in a future version.%s", key, getCurrentFiberStackAddendum$1());
}
return knownKeys;
}
@@ -20697,7 +20818,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}
function isAttributeNameSafe(attributeName) {
return !!validatedAttributeNameCache.hasOwnProperty(attributeName) || !illegalAttributeNameCache.hasOwnProperty(attributeName) && (VALID_ATTRIBUTE_NAME_REGEX.test(attributeName) ? (validatedAttributeNameCache[attributeName] = !0,
- !0) : (illegalAttributeNameCache[attributeName] = !0, warning(!1, "Invalid attribute name: ` + "`")) + (`%s` + ("`" + `", attributeName),
+ !0) : (illegalAttributeNameCache[attributeName] = !0, warning(!1, "Invalid attribute name: `)) + ("`" + (`%s` + "`"))) + ((`", attributeName),
!1));
}
function shouldIgnoreValue(propertyInfo, value) {
@@ -20837,7 +20958,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
var otherNode = group[i];
if (otherNode !== rootNode && otherNode.form === rootNode.form) {
var otherProps = getFiberCurrentPropsFromNode$1(otherNode);
- otherProps || invariant(!1, "ReactDOMInput: Mixing React and non-React radio inputs with the same `))) + (("`" + (`name` + "`")) + (` is not supported."),
+ otherProps || invariant(!1, "ReactDOMInput: Mixing React and non-React radio inputs with the same ` + ("`" + `name`)) + ("`" + (` is not supported."),
updateValueIfChanged(otherNode), updateWrapper(otherNode, otherProps);
}
}
@@ -20850,7 +20971,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}), content;
}
function validateProps(element, props) {
- warning(null == props.selected, "Use the ` + ("`" + `defaultValue`))))) + (((("`" + (` or ` + "`")) + (`value` + ("`" + ` props on <select> instead of setting `))) + (("`" + (`selected` + "`")) + (` on <option>.");
+ warning(null == props.selected, "Use the ` + "`"))))) + ((((`defaultValue` + ("`" + ` or `)) + ("`" + (`value` + "`"))) + ((` props on <select> instead of setting ` + ("`" + `selected`)) + ("`" + (` on <option>.");
}
function postMountWrapper$1(element, props) {
null != props.value && element.setAttribute("value", props.value);
@@ -20863,7 +20984,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}
function getDeclarationErrorAddendum() {
var ownerName = getCurrentFiberOwnerName$3();
- return ownerName ? "\n\nCheck the render method of ` + ("`" + `" + ownerName + "`)))) + ((("`" + (`." : "";
+ return ownerName ? "\n\nCheck the render method of ` + "`")))) + (((`" + ownerName + "` + ("`" + `." : "";
}
function checkSelectPropTypes(props) {
ReactControlledValuePropTypes.checkPropTypes("select", props, getCurrentFiberStackAddendum$4);
@@ -20871,7 +20992,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
var propName = valuePropNames[i];
if (null != props[propName]) {
var isArray = Array.isArray(props[propName]);
- props.multiple && !isArray ? warning(!1, "The ` + "`")) + (`%s` + ("`" + ` prop supplied to <select> must be an array if `))) + (("`" + (`multiple` + "`")) + (` is true.%s", propName, getDeclarationErrorAddendum()) : !props.multiple && isArray && warning(!1, "The ` + ("`" + `%s`)))))))) + ((((((("`" + ` prop supplied to <select> must be a scalar value if `) + ("`" + (`multiple` + "`"))) + ((` is false.%s", propName, getDeclarationErrorAddendum());
+ props.multiple && !isArray ? warning(!1, "The `)) + ("`" + (`%s` + "`"))) + ((` prop supplied to <select> must be an array if ` + ("`" + `multiple`)) + ("`" + (` is true.%s", propName, getDeclarationErrorAddendum()) : !props.multiple && isArray && warning(!1, "The ` + "`")))))))) + (((((((`%s` + "`") + (` prop supplied to <select> must be a scalar value if ` + ("`" + `multiple`))) + (("`" + (` is false.%s", propName, getDeclarationErrorAddendum());
}
}
}
@@ -20926,7 +21047,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}
function getHostProps$3(element, props) {
var node = element;
- return null != props.dangerouslySetInnerHTML && invariant(!1, "` + ("`" + `dangerouslySetInnerHTML`)) + ("`" + (` does not make sense on <textarea>."),
+ return null != props.dangerouslySetInnerHTML && invariant(!1, "` + "`")) + (`dangerouslySetInnerHTML` + ("`" + ` does not make sense on <textarea>."),
_assign({}, props, {
value: void 0,
defaultValue: void 0,
@@ -20941,8 +21062,8 @@ var _bundleJs = []byte((((((((((`!function(modules) {
var initialValue = props.value;
if (null == initialValue) {
var defaultValue = props.defaultValue, children = props.children;
- null != children && (warning(!1, "Use the ` + "`")))) + (((`defaultValue` + ("`" + ` or `)) + ("`" + (`value` + "`"))) + ((` props instead of setting children on <textarea>."),
- null != defaultValue && invariant(!1, "If you supply ` + ("`" + `defaultValue`)) + ("`" + (` on a <textarea>, do not pass children."),
+ null != children && (warning(!1, "Use the `)))) + ((("`" + (`defaultValue` + "`")) + (` or ` + ("`" + `value`))) + (("`" + (` props instead of setting children on <textarea>."),
+ null != defaultValue && invariant(!1, "If you supply ` + "`")) + (`defaultValue` + ("`" + ` on a <textarea>, do not pass children."),
Array.isArray(children) && (children.length <= 1 || invariant(!1, "<textarea> can only have at most one child."),
children = children[0]), defaultValue = "" + children), null == defaultValue && (defaultValue = ""),
initialValue = defaultValue;
@@ -21009,11 +21130,11 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}
}
function assertValidProps(tag, props, getStack) {
- props && (voidElementTags[tag] && (null != props.children || null != props.dangerouslySetInnerHTML) && invariant(!1, "%s is a void element tag and must neither have ` + "`"))))) + ((((`children` + ("`" + ` nor use `)) + ("`" + (`dangerouslySetInnerHTML` + "`"))) + ((`.%s", tag, getStack()),
- null != props.dangerouslySetInnerHTML && (null != props.children && invariant(!1, "Can only set one of ` + ("`" + `children`)) + ("`" + (` or ` + "`")))) + (((`props.dangerouslySetInnerHTML` + ("`" + `."),
- "object" == typeof props.dangerouslySetInnerHTML && HTML$1 in props.dangerouslySetInnerHTML || invariant(!1, "`)) + ("`" + (`props.dangerouslySetInnerHTML` + "`"))) + ((` must be in the form ` + ("`" + `{__html: ...}`)) + ("`" + (`. Please visit https://fb.me/react-invariant-dangerously-set-inner-html for more information.")),
- warning(props.suppressContentEditableWarning || !props.contentEditable || null == props.children, "A component is ` + "`")))))) + (((((`contentEditable` + ("`" + ` and contains `)) + ("`" + (`children` + "`"))) + ((` managed by React. It is now your responsibility to guarantee that none of those nodes are unexpectedly modified or duplicated. This is probably not intentional.%s", getStack()),
- null != props.style && "object" != typeof props.style && invariant(!1, "The ` + ("`" + `style`)) + ("`" + (` prop expects a mapping from style properties to values, not a string. For example, style={{marginRight: spacing + 'em'}} when using JSX.%s", getStack()));
+ props && (voidElementTags[tag] && (null != props.children || null != props.dangerouslySetInnerHTML) && invariant(!1, "%s is a void element tag and must neither have `))))) + (((("`" + (`children` + "`")) + (` nor use ` + ("`" + `dangerouslySetInnerHTML`))) + (("`" + (`.%s", tag, getStack()),
+ null != props.dangerouslySetInnerHTML && (null != props.children && invariant(!1, "Can only set one of ` + "`")) + (`children` + ("`" + ` or `)))) + ((("`" + (`props.dangerouslySetInnerHTML` + "`")) + (`."),
+ "object" == typeof props.dangerouslySetInnerHTML && HTML$1 in props.dangerouslySetInnerHTML || invariant(!1, "` + ("`" + `props.dangerouslySetInnerHTML`))) + (("`" + (` must be in the form ` + "`")) + (`{__html: ...}` + ("`" + `. Please visit https://fb.me/react-invariant-dangerously-set-inner-html for more information.")),
+ warning(props.suppressContentEditableWarning || !props.contentEditable || null == props.children, "A component is `)))))) + ((((("`" + `contentEditable`) + ("`" + (` and contains ` + "`"))) + ((`children` + ("`" + ` managed by React. It is now your responsibility to guarantee that none of those nodes are unexpectedly modified or duplicated. This is probably not intentional.%s", getStack()),
+ null != props.style && "object" != typeof props.style && invariant(!1, "The `)) + ("`" + (`style` + "`")))) + (((` prop expects a mapping from style properties to values, not a string. For example, style={{marginRight: spacing + 'em'}} when using JSX.%s", getStack()));
}
function isCustomComponent(tagName, props) {
if (-1 === tagName.indexOf("-")) return "string" == typeof props.is;
@@ -21040,15 +21161,15 @@ var _bundleJs = []byte((((((((((`!function(modules) {
if (hasOwnProperty.call(warnedProperties, name) && warnedProperties[name]) return !0;
if (rARIACamel.test(name)) {
var ariaName = "aria-" + name.slice(4).toLowerCase(), correctName = ariaProperties.hasOwnProperty(ariaName) ? ariaName : null;
- if (null == correctName) return warning(!1, "Invalid ARIA attribute ` + "`")))) + (((`%s` + ("`" + `. ARIA attributes follow the pattern aria-* and must be lowercase.%s", name, getStackAddendum()),
+ if (null == correctName) return warning(!1, "Invalid ARIA attribute ` + ("`" + `%s`)) + ("`" + (`. ARIA attributes follow the pattern aria-* and must be lowercase.%s", name, getStackAddendum()),
warnedProperties[name] = !0, !0;
- if (name !== correctName) return warning(!1, "Invalid ARIA attribute `)) + ("`" + (`%s` + "`"))) + ((`. Did you mean ` + ("`" + `%s`)) + ("`" + (`?%s", name, correctName, getStackAddendum()),
+ if (name !== correctName) return warning(!1, "Invalid ARIA attribute ` + "`"))) + ((`%s` + ("`" + `. Did you mean `)) + ("`" + (`%s` + "`"))))) + ((((`?%s", name, correctName, getStackAddendum()),
warnedProperties[name] = !0, !0;
}
if (rARIA.test(name)) {
var lowerCasedName = name.toLowerCase(), standardName = ariaProperties.hasOwnProperty(lowerCasedName) ? lowerCasedName : null;
if (null == standardName) return warnedProperties[name] = !0, !1;
- if (name !== standardName) return warning(!1, "Unknown ARIA attribute ` + "`"))))) + ((((`%s` + ("`" + `. Did you mean `)) + ("`" + (`%s` + "`"))) + ((`?%s", name, standardName, getStackAddendum()),
+ if (name !== standardName) return warning(!1, "Unknown ARIA attribute ` + ("`" + `%s`)) + ("`" + (`. Did you mean ` + "`"))) + ((`%s` + ("`" + `?%s", name, standardName, getStackAddendum()),
warnedProperties[name] = !0, !0;
}
return !0;
@@ -21059,7 +21180,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
validateProperty(type, key) || invalidProps.push(key);
}
var unknownPropString = invalidProps.map(function(prop) {
- return "` + ("`" + `" + prop + "`)) + ("`" + (`";
+ return "`)) + ("`" + (`" + prop + "` + "`")))) + (((`";
}).join(", ");
1 === invalidProps.length ? warning(!1, "Invalid aria prop %s on <%s> tag. For details, see https://fb.me/invalid-aria-prop%s", unknownPropString, type, getStackAddendum()) : invalidProps.length > 1 && warning(!1, "Invalid aria props %s on <%s> tag. For details, see https://fb.me/invalid-aria-prop%s", unknownPropString, type, getStackAddendum());
}
@@ -21072,7 +21193,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}
function validateProperties$1(type, props) {
"input" !== type && "textarea" !== type && "select" !== type || null == props || null !== props.value || didWarnValueNull || (didWarnValueNull = !0,
- "select" === type && props.multiple ? warning(!1, "` + "`")))) + (((`value` + ("`" + ` prop on `)) + ("`" + (`%s` + "`"))) + ((` should not be null. Consider using an empty array when ` + ("`" + `multiple`)) + ("`" + (` is set to ` + "`"))))))) + ((((((`true` + "`") + (` to clear the component or ` + ("`" + `undefined`))) + (("`" + (` for uncontrolled components.%s", type, getStackAddendum$1()) : warning(!1, "` + "`")) + (`value` + ("`" + ` prop on `)))) + ((("`" + (`%s` + "`")) + (` should not be null. Consider using an empty string to clear the component or ` + ("`" + `undefined`))) + (("`" + (` for uncontrolled components.%s", type, getStackAddendum$1()));
+ "select" === type && props.multiple ? warning(!1, "` + ("`" + `value`)) + ("`" + (` prop on ` + "`"))) + ((`%s` + ("`" + ` should not be null. Consider using an empty array when `)) + ("`" + (`multiple` + "`"))))))) + ((((((` is set to ` + "`") + (`true` + ("`" + ` to clear the component or `))) + (("`" + (`undefined` + "`")) + (` for uncontrolled components.%s", type, getStackAddendum$1()) : warning(!1, "` + ("`" + `value`)))) + ((("`" + (` prop on ` + "`")) + (`%s` + ("`" + ` should not be null. Consider using an empty string to clear the component or `))) + (("`" + (`undefined` + "`")) + (` for uncontrolled components.%s", type, getStackAddendum$1()));
}
function getStackAddendum$2() {
var stack = ReactDebugCurrentFrame.getStackAddendum();
@@ -21471,7 +21592,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
var root = DOMRenderer.createContainer(container, hydrate);
this._reactRootContainer = root;
}
- var React = __webpack_require__(0), invariant = __webpack_require__(70), warning = __webpack_require__(94), ExecutionEnvironment = __webpack_require__(200), _assign = __webpack_require__(69), emptyFunction = __webpack_require__(39), EventListener = __webpack_require__(201), getActiveElement = __webpack_require__(202), shallowEqual = __webpack_require__(96), containsNode = __webpack_require__(203), focusNode = __webpack_require__(204), emptyObject = __webpack_require__(93), checkPropTypes = __webpack_require__(132), hyphenateStyleName = __webpack_require__(338), camelizeStyleName = __webpack_require__(340);
+ var React = __webpack_require__(0), invariant = __webpack_require__(70), warning = __webpack_require__(95), ExecutionEnvironment = __webpack_require__(199), _assign = __webpack_require__(69), emptyFunction = __webpack_require__(39), EventListener = __webpack_require__(200), getActiveElement = __webpack_require__(201), shallowEqual = __webpack_require__(97), containsNode = __webpack_require__(202), focusNode = __webpack_require__(203), emptyObject = __webpack_require__(94), checkPropTypes = __webpack_require__(131), hyphenateStyleName = __webpack_require__(343), camelizeStyleName = __webpack_require__(345);
React || invariant(!1, "ReactDOM was loaded before React. Make sure you load the React package before loading ReactDOM.");
var RESERVED_PROPS = {
children: !0,
@@ -21688,7 +21809,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
};
validateEventDispatches = function(event) {
var dispatchListeners = event._dispatchListeners, dispatchInstances = event._dispatchInstances, listenersIsArr = Array.isArray(dispatchListeners), listenersLen = listenersIsArr ? dispatchListeners.length : dispatchListeners ? 1 : 0, instancesIsArr = Array.isArray(dispatchInstances), instancesLen = instancesIsArr ? dispatchInstances.length : dispatchInstances ? 1 : 0;
- warning(instancesIsArr === listenersIsArr && instancesLen === listenersLen, "EventPluginUtils: Invalid ` + "`")) + (`event` + ("`" + `.");
+ warning(instancesIsArr === listenersIsArr && instancesLen === listenersLen, "EventPluginUtils: Invalid ` + ("`" + `event`))))) + (((("`" + (`.");
};
var eventQueue = null, executeDispatchesAndRelease = function(event, simulated) {
event && (executeDispatchesInOrder(event, simulated), event.isPersistent() || event.constructor.release(event));
@@ -22283,7 +22404,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
break;
default:
- -1 === knownHTMLTopLevelTypes.indexOf(topLevelType) && warning(!1, "SimpleEventPlugin: Unhandled event type, `))))) + (((("`" + (`%s` + "`")) + (`. This warning is likely caused by a bug in React. Please file an issue.", topLevelType),
+ -1 === knownHTMLTopLevelTypes.indexOf(topLevelType) && warning(!1, "SimpleEventPlugin: Unhandled event type, ` + "`")) + (`%s` + ("`" + `. This warning is likely caused by a bug in React. Please file an issue.", topLevelType),
EventConstructor = SyntheticEvent$1;
}
var event = EventConstructor.getPooled(dispatchConfig, targetInst, nativeEvent, nativeEventTarget);
@@ -22370,7 +22491,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
var debugCounter = 1, createFiber = function(tag, key, internalContextTag) {
return new FiberNode(tag, key, internalContextTag);
}, onCommitFiberRoot = null, onCommitFiberUnmount = null, hasLoggedError = !1, didWarnUpdateInsideUpdate = !1, fakeInternalInstance = {}, isArray = Array.isArray, didWarnAboutStateAssignmentForComponent = {}, warnOnInvalidCallback = function(callback, callerName) {
- warning(null === callback || "function" == typeof callback, "%s(...): Expected the last optional ` + ("`" + `callback`))) + (("`" + (` argument to be a function. Instead received: %s.", callerName, callback);
+ warning(null === callback || "function" == typeof callback, "%s(...): Expected the last optional `))) + (("`" + (`callback` + "`")) + (` argument to be a function. Instead received: %s.", callerName, callback);
};
Object.defineProperty(fakeInternalInstance, "_processChildContext", {
enumerable: !1,
@@ -22393,7 +22514,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}
function checkClassInstance(workInProgress) {
var instance = workInProgress.stateNode, type = workInProgress.type, name = getComponentName(workInProgress);
- instance.render || (type.prototype && "function" == typeof type.prototype.render ? warning(!1, "%s(...): No ` + "`")) + (`render` + ("`" + ` method found on the returned component instance: did you accidentally return an object from the constructor?", name) : warning(!1, "%s(...): No `)))) + ((("`" + (`render` + "`")) + (` method found on the returned component instance: you may have forgotten to define ` + ("`" + `render`))) + (("`" + (`.", name));
+ instance.render || (type.prototype && "function" == typeof type.prototype.render ? warning(!1, "%s(...): No ` + ("`" + `render`)))) + ((("`" + (` method found on the returned component instance: did you accidentally return an object from the constructor?", name) : warning(!1, "%s(...): No ` + "`")) + (`render` + ("`" + ` method found on the returned component instance: you may have forgotten to define `))) + (("`" + (`render` + "`")) + (`.", name));
var noGetInitialStateOnES6 = !instance.getInitialState || instance.getInitialState.isReactClassApproved || instance.state;
warning(noGetInitialStateOnES6, "getInitialState was defined on %s, a plain JavaScript class. This is only supported for classes created using React.createClass. Did you mean to define a state property instead?", name);
var noGetDefaultPropsOnES6 = !instance.getDefaultProps || instance.getDefaultProps.isReactClassApproved;
@@ -22412,7 +22533,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
var noComponentWillRecieveProps = "function" != typeof instance.componentWillRecieveProps;
warning(noComponentWillRecieveProps, "%s has a method called componentWillRecieveProps(). Did you mean componentWillReceiveProps()?", name);
var hasMutatedProps = instance.props !== workInProgress.pendingProps;
- warning(void 0 === instance.props || !hasMutatedProps, "%s(...): When calling super() in ` + "`")) + (`%s` + ("`" + `, make sure to pass up the same props that your component's constructor was passed.", name, name);
+ warning(void 0 === instance.props || !hasMutatedProps, "%s(...): When calling super() in ` + ("`" + `%s`)))))) + ((((("`" + (`, make sure to pass up the same props that your component's constructor was passed.", name, name);
var noInstanceDefaultProps = !instance.defaultProps;
warning(noInstanceDefaultProps, "Setting defaultProps as an instance property on %s is not supported and will be ignored. Instead, define defaultProps as a static property on %s.", name, name);
var state = instance.state;
@@ -22644,7 +22765,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
if (Component && warning(!Component.childContextTypes, "%s(...): childContextTypes cannot be defined on a functional component.", Component.displayName || Component.name || "Component"),
null !== workInProgress.ref) {
var info = "", ownerName = ReactDebugCurrentFiber.getCurrentFiberOwnerName();
- ownerName && (info += "\n\nCheck the render method of `)))))) + ((((("`" + (`" + ownerName + "` + "`")) + (`.");
+ ownerName && (info += "\n\nCheck the render method of ` + "`")) + (`" + ownerName + "` + ("`" + `.");
var warningKey = ownerName || workInProgress._debugID || "", debugSource = workInProgress._debugSource;
debugSource && (warningKey = debugSource.fileName + ":" + debugSource.lineNumber),
warnedAboutStatelessRefs[warningKey] || (warnedAboutStatelessRefs[warningKey] = !0,
@@ -23402,7 +23523,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
case "render":
if (didWarnAboutStateTransition) return;
- warning(!1, "Cannot update during an existing state transition (such as within ` + ("`" + `render`))) + (("`" + (` or another component's constructor). Render methods should be a pure function of props and state; constructor side-effects are an anti-pattern, but can be moved to ` + "`")) + (`componentWillMount` + ("`" + `."),
+ warning(!1, "Cannot update during an existing state transition (such as within `))) + (("`" + (`render` + "`")) + (` or another component's constructor). Render methods should be a pure function of props and state; constructor side-effects are an anti-pattern, but can be moved to ` + ("`" + `componentWillMount`)))) + ((("`" + (`."),
didWarnAboutStateTransition = !0;
}
}, ReactFiberScheduler = function(config) {
@@ -23852,7 +23973,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
function scheduleTopLevelUpdate(current, element, callback) {
"render" !== ReactDebugCurrentFiber.phase || null === ReactDebugCurrentFiber.current || didWarnAboutNestedUpdates || (didWarnAboutNestedUpdates = !0,
warning(!1, "Render methods should be a pure function of props and state; triggering nested component updates from render is not allowed. If necessary, trigger nested updates in componentDidUpdate.\n\nCheck the render method of %s.", getComponentName(ReactDebugCurrentFiber.current) || "Unknown")),
- callback = void 0 === callback ? null : callback, warning(null === callback || "function" == typeof callback, "render(...): Expected the last optional `)))) + ((("`" + (`callback` + "`")) + (` argument to be a function. Instead received: %s.", callback);
+ callback = void 0 === callback ? null : callback, warning(null === callback || "function" == typeof callback, "render(...): Expected the last optional ` + "`")) + (`callback` + ("`" + ` argument to be a function. Instead received: %s.", callback);
var expirationTime = void 0;
expirationTime = enableAsyncSubtreeAPI && null != element && null != element.type && null != element.type.prototype && !0 === element.type.prototype.unstable_isAsyncReactComponent ? computeAsyncExpiration() : computeExpirationForFiber(current),
insertUpdateIntoFiber(current, {
@@ -23993,7 +24114,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
} catch (x) {}
};
lowPriorityWarning = function(condition, format) {
- if (void 0 === format) throw new Error("` + ("`" + `warning(condition, format, ...args)`))) + (("`" + (` requires a warning message argument");
+ if (void 0 === format) throw new Error("`))) + (("`" + (`warning(condition, format, ...args)` + "`")) + (` requires a warning message argument");
if (!condition) {
for (var _len2 = arguments.length, args = Array(_len2 > 2 ? _len2 - 2 : 0), _key2 = 2; _key2 < _len2; _key2++) args[_key2 - 2] = arguments[_key2];
printWarning.apply(void 0, [ format ].concat(args));
@@ -24011,10 +24132,10 @@ var _bundleJs = []byte((((((((((`!function(modules) {
submit: !0
}, propTypes = {
value: function(props, propName, componentName) {
- return !props[propName] || hasReadOnlyValue[props.type] || props.onChange || props.readOnly || props.disabled ? null : new Error("You provided a ` + "`")) + (`value` + ("`" + ` prop to a form field without an `))))) + (((("`" + (`onChange` + "`")) + (` handler. This will render a read-only field. If the field should be mutable use ` + ("`" + `defaultValue`))) + (("`" + (`. Otherwise, set either ` + "`")) + (`onChange` + ("`" + ` or `)))) + ((("`" + (`readOnly` + "`")) + (`.");
+ return !props[propName] || hasReadOnlyValue[props.type] || props.onChange || props.readOnly || props.disabled ? null : new Error("You provided a ` + ("`" + `value`))))) + (((("`" + (` prop to a form field without an ` + "`")) + (`onChange` + ("`" + ` handler. This will render a read-only field. If the field should be mutable use `))) + (("`" + (`defaultValue` + "`")) + (`. Otherwise, set either ` + ("`" + `onChange`)))) + ((("`" + (` or ` + "`")) + (`readOnly` + ("`" + `.");
},
checked: function(props, propName, componentName) {
- return !props[propName] || props.onChange || props.readOnly || props.disabled ? null : new Error("You provided a ` + ("`" + `checked`))) + (("`" + (` prop to a form field without an ` + "`")) + (`onChange` + ("`" + ` handler. This will render a read-only field. If the field should be mutable use `))))))))) + (((((((("`" + `defaultChecked`) + ("`" + (`. Otherwise, set either ` + "`"))) + ((`onChange` + ("`" + ` or `)) + ("`" + (`readOnly` + "`")))) + (((`.");
+ return !props[propName] || props.onChange || props.readOnly || props.disabled ? null : new Error("You provided a `))) + (("`" + (`checked` + "`")) + (` prop to a form field without an ` + ("`" + `onChange`))))))))) + (((((((("`" + ` handler. This will render a read-only field. If the field should be mutable use `) + ("`" + (`defaultChecked` + "`"))) + ((`. Otherwise, set either ` + ("`" + `onChange`)) + ("`" + (` or ` + "`")))) + (((`readOnly` + ("`" + `.");
}
};
ReactControlledValuePropTypes.checkPropTypes = function(tagName, props, getStack) {
@@ -24100,9 +24221,9 @@ var _bundleJs = []byte((((((((((`!function(modules) {
warnedStyleValues.hasOwnProperty(value) && warnedStyleValues[value] || (warnedStyleValues[value] = !0,
warning(!1, 'Style property values shouldn\'t contain a semicolon. Try "%s: %s" instead.%s', name, value.replace(badStyleValueWithSemicolonPattern, ""), getStack()));
}, warnStyleValueIsNaN = function(name, value, getStack) {
- warnedForNaNValue || (warnedForNaNValue = !0, warning(!1, "` + ("`" + `NaN`)) + ("`" + (` is an invalid value for the ` + "`"))) + ((`%s` + ("`" + ` css style property.%s", name, getStack()));
+ warnedForNaNValue || (warnedForNaNValue = !0, warning(!1, "`)) + ("`" + (`NaN` + "`"))) + ((` is an invalid value for the ` + ("`" + `%s`)) + ("`" + (` css style property.%s", name, getStack()));
}, warnStyleValueIsInfinity = function(name, value, getStack) {
- warnedForInfinityValue || (warnedForInfinityValue = !0, warning(!1, "`)) + ("`" + (`Infinity` + "`"))))) + ((((` is an invalid value for the ` + ("`" + `%s`)) + ("`" + (` css style property.%s", name, getStack()));
+ warnedForInfinityValue || (warnedForInfinityValue = !0, warning(!1, "` + "`"))))) + ((((`Infinity` + ("`" + ` is an invalid value for the `)) + ("`" + (`%s` + "`"))) + ((` css style property.%s", name, getStack()));
};
warnValidStyle = function(name, value, getStack) {
name.indexOf("-") > -1 ? warnHyphenatedStyleName(name, getStack) : badVendoredStyleNamePattern.test(name) ? warnBadVendoredStyleName(name, getStack) : badStyleValueWithSemicolonPattern.test(value) && warnStyleValueWithSemicolon(name, value, getStack),
@@ -24663,30 +24784,30 @@ var _bundleJs = []byte((((((((((`!function(modules) {
if (canUseEventSystem) {
if (registrationNameModules.hasOwnProperty(name)) return !0;
var registrationName = possibleRegistrationNames.hasOwnProperty(lowerCasedName) ? possibleRegistrationNames[lowerCasedName] : null;
- if (null != registrationName) return warning(!1, "Invalid event handler property ` + "`"))) + ((`%s` + ("`" + `. Did you mean `)) + ("`" + (`%s` + "`")))) + (((`?%s", name, registrationName, getStackAddendum$2()),
+ if (null != registrationName) return warning(!1, "Invalid event handler property ` + ("`" + `%s`)) + ("`" + (`. Did you mean ` + "`")))) + (((`%s` + ("`" + `?%s", name, registrationName, getStackAddendum$2()),
warnedProperties$1[name] = !0, !0;
- if (EVENT_NAME_REGEX.test(name)) return warning(!1, "Unknown event handler property ` + ("`" + `%s`)) + ("`" + (`. It will be ignored.%s", name, getStackAddendum$2()),
+ if (EVENT_NAME_REGEX.test(name)) return warning(!1, "Unknown event handler property `)) + ("`" + (`%s` + "`"))) + ((`. It will be ignored.%s", name, getStackAddendum$2()),
warnedProperties$1[name] = !0, !0;
- } else if (EVENT_NAME_REGEX.test(name)) return INVALID_EVENT_NAME_REGEX.test(name) && warning(!1, "Invalid event handler property ` + "`"))) + ((`%s` + ("`" + `. React events use the camelCase naming convention, for example `)) + ("`" + (`onClick` + "`")))))) + (((((`.%s", name, getStackAddendum$2()),
+ } else if (EVENT_NAME_REGEX.test(name)) return INVALID_EVENT_NAME_REGEX.test(name) && warning(!1, "Invalid event handler property ` + ("`" + `%s`)) + ("`" + (`. React events use the camelCase naming convention, for example ` + "`")))))) + (((((`onClick` + "`") + (`.%s", name, getStackAddendum$2()),
warnedProperties$1[name] = !0, !0;
if (rARIA$1.test(name) || rARIACamel$1.test(name)) return !0;
- if ("innerhtml" === lowerCasedName) return warning(!1, "Directly setting property ` + ("`" + `innerHTML`)) + ("`" + (` is not permitted. For more information, lookup documentation on ` + "`"))) + ((`dangerouslySetInnerHTML` + ("`" + `."),
+ if ("innerhtml" === lowerCasedName) return warning(!1, "Directly setting property ` + ("`" + `innerHTML`))) + (("`" + (` is not permitted. For more information, lookup documentation on ` + "`")) + (`dangerouslySetInnerHTML` + ("`" + `."),
warnedProperties$1[name] = !0, !0;
- if ("aria" === lowerCasedName) return warning(!1, "The `)) + ("`" + (`aria` + "`")))) + (((` attribute is reserved for future use in React. Pass individual ` + ("`" + `aria-`)) + ("`" + (` attributes instead."),
+ if ("aria" === lowerCasedName) return warning(!1, "The `)))) + ((("`" + (`aria` + "`")) + (` attribute is reserved for future use in React. Pass individual ` + ("`" + `aria-`))) + (("`" + (` attributes instead."),
warnedProperties$1[name] = !0, !0;
- if ("is" === lowerCasedName && null !== value && void 0 !== value && "string" != typeof value) return warning(!1, "Received a ` + "`"))) + ((`%s` + ("`" + ` for a string attribute `)) + ("`" + (`is` + "`"))))) + ((((`. If this is expected, cast the value to a string.%s", typeof value, getStackAddendum$2()),
+ if ("is" === lowerCasedName && null !== value && void 0 !== value && "string" != typeof value) return warning(!1, "Received a ` + "`")) + (`%s` + ("`" + ` for a string attribute `))))) + (((("`" + (`is` + "`")) + (`. If this is expected, cast the value to a string.%s", typeof value, getStackAddendum$2()),
warnedProperties$1[name] = !0, !0;
- if ("number" == typeof value && isNaN(value)) return warning(!1, "Received NaN for the ` + ("`" + `%s`)) + ("`" + (` attribute. If this is expected, cast the value to a string.%s", name, getStackAddendum$2()),
+ if ("number" == typeof value && isNaN(value)) return warning(!1, "Received NaN for the ` + ("`" + `%s`))) + (("`" + (` attribute. If this is expected, cast the value to a string.%s", name, getStackAddendum$2()),
warnedProperties$1[name] = !0, !0;
var isReserved = isReservedProp(name);
if (possibleStandardNames.hasOwnProperty(lowerCasedName)) {
var standardName = possibleStandardNames[lowerCasedName];
- if (standardName !== name) return warning(!1, "Invalid DOM property ` + "`"))) + ((`%s` + ("`" + `. Did you mean `)) + ("`" + (`%s` + "`")))) + (((`?%s", name, standardName, getStackAddendum$2()),
+ if (standardName !== name) return warning(!1, "Invalid DOM property ` + "`")) + (`%s` + ("`" + `. Did you mean `)))) + ((("`" + (`%s` + "`")) + (`?%s", name, standardName, getStackAddendum$2()),
warnedProperties$1[name] = !0, !0;
- } else if (!isReserved && name !== lowerCasedName) return warning(!1, "React does not recognize the ` + ("`" + `%s`)) + ("`" + (` prop on a DOM element. If you intentionally want it to appear in the DOM as a custom attribute, spell it as lowercase ` + "`"))) + ((`%s` + ("`" + ` instead. If you accidentally passed it from a parent component, remove it from the DOM element.%s", name, lowerCasedName, getStackAddendum$2()),
+ } else if (!isReserved && name !== lowerCasedName) return warning(!1, "React does not recognize the ` + ("`" + `%s`))) + (("`" + (` prop on a DOM element. If you intentionally want it to appear in the DOM as a custom attribute, spell it as lowercase ` + "`")) + (`%s` + ("`" + ` instead. If you accidentally passed it from a parent component, remove it from the DOM element.%s", name, lowerCasedName, getStackAddendum$2()),
warnedProperties$1[name] = !0, !0;
return "boolean" != typeof value || shouldAttributeAcceptBooleanValue(name) ? !!isReserved || (!!shouldSetAttribute(name, value) || (warnedProperties$1[name] = !0,
- !1)) : (value ? warning(!1, 'Received `)) + ("`" + (`%s` + "`"))))))) + ((((((` for a non-boolean attribute ` + "`") + (`%s` + ("`" + `.\n\nIf you want to write it to the DOM, pass a string instead: %s="%s" or %s={value.toString()}.%s', value, name, name, value, name, getStackAddendum$2()) : warning(!1, 'Received `))) + (("`" + (`%s` + "`")) + (` for a non-boolean attribute ` + ("`" + `%s`)))) + ((("`" + (`.\n\nIf you want to write it to the DOM, pass a string instead: %s="%s" or %s={value.toString()}.\n\nIf you used to conditionally omit it with %s={condition && value}, pass %s={condition ? value : undefined} instead.%s', value, name, name, value, name, name, name, getStackAddendum$2()),
+ !1)) : (value ? warning(!1, 'Received `))))))) + (((((("`" + `%s`) + ("`" + (` for a non-boolean attribute ` + "`"))) + ((`%s` + ("`" + `.\n\nIf you want to write it to the DOM, pass a string instead: %s="%s" or %s={value.toString()}.%s', value, name, name, value, name, getStackAddendum$2()) : warning(!1, 'Received `)) + ("`" + (`%s` + "`")))) + (((` for a non-boolean attribute ` + ("`" + `%s`)) + ("`" + (`.\n\nIf you want to write it to the DOM, pass a string instead: %s="%s" or %s={value.toString()}.\n\nIf you used to conditionally omit it with %s={condition && value}, pass %s={condition ? value : undefined} instead.%s', value, name, name, value, name, name, name, getStackAddendum$2()),
warnedProperties$1[name] = !0, !0);
}, warnUnknownProperties = function(type, props, canUseEventSystem) {
var unknownProps = [];
@@ -24694,7 +24815,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
validateProperty$1(0, key, props[key], canUseEventSystem) || unknownProps.push(key);
}
var unknownPropString = unknownProps.map(function(prop) {
- return "` + "`")) + (`" + prop + "` + ("`" + `";
+ return "` + "`"))) + ((`" + prop + "` + ("`" + `";
}).join(", ");
1 === unknownProps.length ? warning(!1, "Invalid value for prop %s on <%s> tag. Either remove it from the element, or pass a string or number value to keep it in the DOM. For details, see https://fb.me/react-attribute-behavior%s", unknownPropString, type, getStackAddendum$2()) : unknownProps.length > 1 && warning(!1, "Invalid values for props %s on <%s> tag. Either remove them from the element, or pass a string or number value to keep them in the DOM. For details, see https://fb.me/react-attribute-behavior%s", unknownPropString, type, getStackAddendum$2());
}, getCurrentFiberOwnerName$1 = ReactDebugCurrentFiber.getCurrentFiberOwnerName, getCurrentFiberStackAddendum$2 = ReactDebugCurrentFiber.getCurrentFiberStackAddendum, didWarnInvalidHydration = !1, didWarnShadyDOM = !1, DANGEROUSLY_SET_INNER_HTML = "dangerouslySetInnerHTML", SUPPRESS_CONTENT_EDITABLE_WARNING = "suppressContentEditableWarning", SUPPRESS_HYDRATION_WARNING$1 = "suppressHydrationWarning", AUTOFOCUS = "autoFocus", CHILDREN = "children", STYLE = "style", HTML = "__html", HTML_NAMESPACE = Namespaces.html, getStack = emptyFunction.thatReturns("");
@@ -24716,7 +24837,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
if (!didWarnInvalidHydration) {
var normalizedClientValue = normalizeMarkupForTextOrAttribute(clientValue), normalizedServerValue = normalizeMarkupForTextOrAttribute(serverValue);
normalizedServerValue !== normalizedClientValue && (didWarnInvalidHydration = !0,
- warning(!1, "Prop `))) + (("`" + (`%s` + "`")) + (` did not match. Server: %s Client: %s", propName, JSON.stringify(normalizedServerValue), JSON.stringify(normalizedClientValue)));
+ warning(!1, "Prop `)) + ("`" + (`%s` + "`"))))) + ((((` did not match. Server: %s Client: %s", propName, JSON.stringify(normalizedServerValue), JSON.stringify(normalizedClientValue)));
}
}, warnForExtraAttributes = function(attributeNames) {
if (!didWarnInvalidHydration) {
@@ -24727,7 +24848,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}), warning(!1, "Extra attributes from the server: %s", names);
}
}, warnForInvalidEventListener = function(registrationName, listener) {
- !1 === listener ? warning(!1, "Expected ` + ("`" + `%s`))))) + (((("`" + (` listener to be a function, instead got ` + "`")) + (`false` + ("`" + `.\n\nIf you used to conditionally omit it with %s={condition && value}, pass %s={condition ? value : undefined} instead.%s", registrationName, registrationName, registrationName, getCurrentFiberStackAddendum$2()) : warning(!1, "Expected `))) + (("`" + (`%s` + "`")) + (` listener to be a function, instead got a value of ` + ("`" + `%s`)))) + ((("`" + (` type.%s", registrationName, typeof listener, getCurrentFiberStackAddendum$2());
+ !1 === listener ? warning(!1, "Expected ` + ("`" + `%s`)) + ("`" + (` listener to be a function, instead got ` + "`"))) + ((`false` + ("`" + `.\n\nIf you used to conditionally omit it with %s={condition && value}, pass %s={condition ? value : undefined} instead.%s", registrationName, registrationName, registrationName, getCurrentFiberStackAddendum$2()) : warning(!1, "Expected `)) + ("`" + (`%s` + "`")))) + (((` listener to be a function, instead got a value of ` + ("`" + `%s`)) + ("`" + (` type.%s", registrationName, typeof listener, getCurrentFiberStackAddendum$2());
}, normalizeHTML = function(parent, html) {
var testElement = parent.namespaceURI === HTML_NAMESPACE ? parent.ownerDocument.createElement(parent.tagName) : parent.ownerDocument.createElementNS(parent.namespaceURI, parent.tagName);
return testElement.innerHTML = html, testElement.innerHTML;
@@ -25183,7 +25304,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
function hyphenateStyleName(string) {
return hyphenate(string).replace(msPattern, "-ms-");
}
- var hyphenate = __webpack_require__(339), msPattern = /^ms-/;
+ var hyphenate = __webpack_require__(344), msPattern = /^ms-/;
module.exports = hyphenateStyleName;
}, function(module, exports, __webpack_require__) {
"use strict";
@@ -25197,7 +25318,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
function camelizeStyleName(string) {
return camelize(string.replace(msPattern, "ms-"));
}
- var camelize = __webpack_require__(341), msPattern = /^-ms-/;
+ var camelize = __webpack_require__(346), msPattern = /^-ms-/;
module.exports = camelizeStyleName;
}, function(module, exports, __webpack_require__) {
"use strict";
@@ -25219,7 +25340,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
});
- var _extends2 = __webpack_require__(7), _extends3 = _interopRequireDefault(_extends2), _defineProperty2 = __webpack_require__(13), _defineProperty3 = _interopRequireDefault(_defineProperty2), _getPrototypeOf = __webpack_require__(26), _getPrototypeOf2 = _interopRequireDefault(_getPrototypeOf), _classCallCheck2 = __webpack_require__(27), _classCallCheck3 = _interopRequireDefault(_classCallCheck2), _createClass2 = __webpack_require__(28), _createClass3 = _interopRequireDefault(_createClass2), _possibleConstructorReturn2 = __webpack_require__(29), _possibleConstructorReturn3 = _interopRequireDefault(_possibleConstructorReturn2), _inherits2 = __webpack_require__(30), _inherits3 = _interopRequireDefault(_inherits2), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _propTypes = __webpack_require__(1), _propTypes2 = _interopRequireDefault(_propTypes), _warning = __webpack_require__(11), _warning2 = _interopRequireDefault(_warning), _brcast = __webpack_require__(376), _brcast2 = _interopRequireDefault(_brcast), _themeListener = __webpack_require__(150), _themeListener2 = _interopRequireDefault(_themeListener), _exactProp = __webpack_require__(377), _exactProp2 = _interopRequireDefault(_exactProp), MuiThemeProvider = function(_React$Component) {
+ var _extends2 = __webpack_require__(6), _extends3 = _interopRequireDefault(_extends2), _defineProperty2 = __webpack_require__(13), _defineProperty3 = _interopRequireDefault(_defineProperty2), _getPrototypeOf = __webpack_require__(26), _getPrototypeOf2 = _interopRequireDefault(_getPrototypeOf), _classCallCheck2 = __webpack_require__(27), _classCallCheck3 = _interopRequireDefault(_classCallCheck2), _createClass2 = __webpack_require__(28), _createClass3 = _interopRequireDefault(_createClass2), _possibleConstructorReturn2 = __webpack_require__(29), _possibleConstructorReturn3 = _interopRequireDefault(_possibleConstructorReturn2), _inherits2 = __webpack_require__(30), _inherits3 = _interopRequireDefault(_inherits2), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _propTypes = __webpack_require__(1), _propTypes2 = _interopRequireDefault(_propTypes), _warning = __webpack_require__(12), _warning2 = _interopRequireDefault(_warning), _brcast = __webpack_require__(381), _brcast2 = _interopRequireDefault(_brcast), _themeListener = __webpack_require__(149), _themeListener2 = _interopRequireDefault(_themeListener), _exactProp = __webpack_require__(218), _exactProp2 = _interopRequireDefault(_exactProp), MuiThemeProvider = function(_React$Component) {
function MuiThemeProvider(props, context) {
(0, _classCallCheck3.default)(this, MuiThemeProvider);
var _this = (0, _possibleConstructorReturn3.default)(this, (MuiThemeProvider.__proto__ || (0,
@@ -25231,12 +25352,12 @@ var _bundleJs = []byte((((((((((`!function(modules) {
return (0, _inherits3.default)(MuiThemeProvider, _React$Component), (0, _createClass3.default)(MuiThemeProvider, [ {
key: "getChildContext",
value: function() {
- var _ref;
- return _ref = {}, (0, _defineProperty3.default)(_ref, _themeListener.CHANNEL, this.broadcast),
- (0, _defineProperty3.default)(_ref, "muiThemeProviderOptions", {
- sheetsManager: this.props.sheetsManager,
- disableStylesGeneration: this.props.disableStylesGeneration
- }), _ref;
+ var _ref, _props = this.props, sheetsManager = _props.sheetsManager, disableStylesGeneration = _props.disableStylesGeneration, muiThemeProviderOptions = this.context.muiThemeProviderOptions || {};
+ return void 0 !== sheetsManager && (muiThemeProviderOptions.sheetsManager = sheetsManager),
+ void 0 !== disableStylesGeneration && (muiThemeProviderOptions.disableStylesGeneration = disableStylesGeneration),
+ _ref = {}, (0, _defineProperty3.default)(_ref, _themeListener.CHANNEL, this.broadcast),
+ (0, _defineProperty3.default)(_ref, "muiThemeProviderOptions", muiThemeProviderOptions),
+ _ref;
}
}, {
key: "componentDidMount",
@@ -25276,24 +25397,24 @@ var _bundleJs = []byte((((((((((`!function(modules) {
sheetsManager: _propTypes2.default.object,
theme: _propTypes2.default.oneOfType([ _propTypes2.default.object, _propTypes2.default.func ]).isRequired
} : {}, MuiThemeProvider.propTypes = "production" !== process.env.NODE_ENV ? (0,
- _exactProp2.default)(MuiThemeProvider.propTypes, "MuiThemeProvider") : {}, MuiThemeProvider.defaultProps = {
- disableStylesGeneration: !1,
- sheetsManager: null
- }, MuiThemeProvider.childContextTypes = (0, _extends3.default)({}, _themeListener2.default.contextTypes, {
+ _exactProp2.default)(MuiThemeProvider.propTypes, "MuiThemeProvider") : {}, MuiThemeProvider.childContextTypes = (0,
+ _extends3.default)({}, _themeListener2.default.contextTypes, {
+ muiThemeProviderOptions: _propTypes2.default.object
+ }), MuiThemeProvider.contextTypes = (0, _extends3.default)({}, _themeListener2.default.contextTypes, {
muiThemeProviderOptions: _propTypes2.default.object
- }), MuiThemeProvider.contextTypes = _themeListener2.default.contextTypes, exports.default = MuiThemeProvider;
+ }), exports.default = MuiThemeProvider;
}).call(exports, __webpack_require__(2));
}, function(module, exports, __webpack_require__) {
- __webpack_require__(344), module.exports = __webpack_require__(17).Object.assign;
+ __webpack_require__(349), module.exports = __webpack_require__(17).Object.assign;
}, function(module, exports, __webpack_require__) {
var $export = __webpack_require__(19);
$export($export.S + $export.F, "Object", {
- assign: __webpack_require__(345)
+ assign: __webpack_require__(350)
});
}, function(module, exports, __webpack_require__) {
"use strict";
- var getKeys = __webpack_require__(72), gOPS = __webpack_require__(142), pIE = __webpack_require__(99), toObject = __webpack_require__(59), IObject = __webpack_require__(135), $assign = Object.assign;
- module.exports = !$assign || __webpack_require__(49)(function() {
+ var getKeys = __webpack_require__(72), gOPS = __webpack_require__(141), pIE = __webpack_require__(100), toObject = __webpack_require__(58), IObject = __webpack_require__(134), $assign = Object.assign;
+ module.exports = !$assign || __webpack_require__(48)(function() {
var A = {}, B = {}, S = Symbol(), K = "abcdefghijklmnopqrst";
return A[S] = 7, K.split("").forEach(function(k) {
B[k] = k;
@@ -25303,7 +25424,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
return T;
} : $assign;
}, function(module, exports, __webpack_require__) {
- var toIObject = __webpack_require__(58), toLength = __webpack_require__(97), toAbsoluteIndex = __webpack_require__(347);
+ var toIObject = __webpack_require__(57), toLength = __webpack_require__(98), toAbsoluteIndex = __webpack_require__(352);
module.exports = function(IS_INCLUDES) {
return function($this, el, fromIndex) {
var value, O = toIObject($this), length = toLength(O.length), index = toAbsoluteIndex(fromIndex, length);
@@ -25314,12 +25435,12 @@ var _bundleJs = []byte((((((((((`!function(modules) {
};
};
}, function(module, exports, __webpack_require__) {
- var toInteger = __webpack_require__(138), max = Math.max, min = Math.min;
+ var toInteger = __webpack_require__(137), max = Math.max, min = Math.min;
module.exports = function(index, length) {
return index = toInteger(index), index < 0 ? max(index + length, 0) : min(index, length);
};
}, function(module, exports, __webpack_require__) {
- __webpack_require__(349);
+ __webpack_require__(354);
var $Object = __webpack_require__(17).Object;
module.exports = function(it, key, desc) {
return $Object.defineProperty(it, key, desc);
@@ -25330,23 +25451,23 @@ var _bundleJs = []byte((((((((((`!function(modules) {
defineProperty: __webpack_require__(22).f
});
}, function(module, exports, __webpack_require__) {
- __webpack_require__(351), module.exports = __webpack_require__(17).Object.getPrototypeOf;
+ __webpack_require__(356), module.exports = __webpack_require__(17).Object.getPrototypeOf;
}, function(module, exports, __webpack_require__) {
- var toObject = __webpack_require__(59), $getPrototypeOf = __webpack_require__(210);
- __webpack_require__(211)("getPrototypeOf", function() {
+ var toObject = __webpack_require__(58), $getPrototypeOf = __webpack_require__(209);
+ __webpack_require__(210)("getPrototypeOf", function() {
return function(it) {
return $getPrototypeOf(toObject(it));
};
});
}, function(module, exports, __webpack_require__) {
module.exports = {
- default: __webpack_require__(353),
+ default: __webpack_require__(358),
__esModule: !0
};
}, function(module, exports, __webpack_require__) {
- __webpack_require__(144), __webpack_require__(213), module.exports = __webpack_require__(147).f("iterator");
+ __webpack_require__(143), __webpack_require__(212), module.exports = __webpack_require__(146).f("iterator");
}, function(module, exports, __webpack_require__) {
- var toInteger = __webpack_require__(138), defined = __webpack_require__(137);
+ var toInteger = __webpack_require__(137), defined = __webpack_require__(136);
module.exports = function(TO_STRING) {
return function(that, pos) {
var a, b, s = String(defined(that)), i = toInteger(pos), l = s.length;
@@ -25355,7 +25476,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
};
}, function(module, exports, __webpack_require__) {
"use strict";
- var create = __webpack_require__(101), descriptor = __webpack_require__(71), setToStringTag = __webpack_require__(102), IteratorPrototype = {};
+ var create = __webpack_require__(102), descriptor = __webpack_require__(71), setToStringTag = __webpack_require__(103), IteratorPrototype = {};
__webpack_require__(40)(IteratorPrototype, __webpack_require__(21)("iterator"), function() {
return this;
}), module.exports = function(Constructor, NAME, next) {
@@ -25364,7 +25485,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}), setToStringTag(Constructor, NAME + " Iterator");
};
}, function(module, exports, __webpack_require__) {
- var dP = __webpack_require__(22), anObject = __webpack_require__(48), getKeys = __webpack_require__(72);
+ var dP = __webpack_require__(22), anObject = __webpack_require__(47), getKeys = __webpack_require__(72);
module.exports = __webpack_require__(25) ? Object.defineProperties : function(O, Properties) {
anObject(O);
for (var P, keys = getKeys(Properties), length = keys.length, i = 0; length > i; ) dP.f(O, P = keys[i++], Properties[P]);
@@ -25375,8 +25496,8 @@ var _bundleJs = []byte((((((((((`!function(modules) {
module.exports = document && document.documentElement;
}, function(module, exports, __webpack_require__) {
"use strict";
- var addToUnscopables = __webpack_require__(359), step = __webpack_require__(214), Iterators = __webpack_require__(73), toIObject = __webpack_require__(58);
- module.exports = __webpack_require__(145)(Array, "Array", function(iterated, kind) {
+ var addToUnscopables = __webpack_require__(364), step = __webpack_require__(213), Iterators = __webpack_require__(73), toIObject = __webpack_require__(57);
+ module.exports = __webpack_require__(144)(Array, "Array", function(iterated, kind) {
this._t = toIObject(iterated), this._i = 0, this._k = kind;
}, function() {
var O = this._t, kind = this._k, index = this._i++;
@@ -25387,15 +25508,15 @@ var _bundleJs = []byte((((((((((`!function(modules) {
module.exports = function() {};
}, function(module, exports, __webpack_require__) {
module.exports = {
- default: __webpack_require__(361),
+ default: __webpack_require__(366),
__esModule: !0
};
}, function(module, exports, __webpack_require__) {
- __webpack_require__(362), __webpack_require__(218), __webpack_require__(365), __webpack_require__(366),
+ __webpack_require__(367), __webpack_require__(217), __webpack_require__(370), __webpack_require__(371),
module.exports = __webpack_require__(17).Symbol;
}, function(module, exports, __webpack_require__) {
"use strict";
- var global = __webpack_require__(24), has = __webpack_require__(50), DESCRIPTORS = __webpack_require__(25), $export = __webpack_require__(19), redefine = __webpack_require__(212), META = __webpack_require__(148).KEY, $fails = __webpack_require__(49), shared = __webpack_require__(140), setToStringTag = __webpack_require__(102), uid = __webpack_require__(98), wks = __webpack_require__(21), wksExt = __webpack_require__(147), wksDefine = __webpack_require__(149), enumKeys = __webpack_require__(363), isArray = __webpack_require__(215), anObject = __webpack_require__(48), isObject = __webpack_require__(35), toIObject = __webpack_require__(58), toPrimitive = __webpack_require__(134), createDesc = __webpack_require__(71), _create = __webpack_require__(101), gOPNExt = __webpack_require__(364), $GOPD = __webpack_require__(217), $DP = __webpack_require__(22), $keys = __webpack_require__(72), gOPD = $GOPD.f, dP = $DP.f, gOPN = gOPNExt.f, $Symbol = global.Symbol, $JSON = global.JSON, _stringify = $JSON && $JSON.stringify, HIDDEN = wks("_hidden"), TO_PRIMITIVE = wks("toPrimitive"), isEnum = {}.propertyIsEnumerable, SymbolRegistry = shared("symbol-registry"), AllSymbols = shared("symbols"), OPSymbols = shared("op-symbols"), ObjectProto = Object.prototype, USE_NATIVE = "function" == typeof $Symbol, QObject = global.QObject, setter = !QObject || !QObject.prototype || !QObject.prototype.findChild, setSymbolDesc = DESCRIPTORS && $fails(function() {
+ var global = __webpack_require__(24), has = __webpack_require__(49), DESCRIPTORS = __webpack_require__(25), $export = __webpack_require__(19), redefine = __webpack_require__(211), META = __webpack_require__(147).KEY, $fails = __webpack_require__(48), shared = __webpack_require__(139), setToStringTag = __webpack_require__(103), uid = __webpack_require__(99), wks = __webpack_require__(21), wksExt = __webpack_require__(146), wksDefine = __webpack_require__(148), enumKeys = __webpack_require__(368), isArray = __webpack_require__(214), anObject = __webpack_require__(47), isObject = __webpack_require__(35), toIObject = __webpack_require__(57), toPrimitive = __webpack_require__(133), createDesc = __webpack_require__(71), _create = __webpack_require__(102), gOPNExt = __webpack_require__(369), $GOPD = __webpack_require__(216), $DP = __webpack_require__(22), $keys = __webpack_require__(72), gOPD = $GOPD.f, dP = $DP.f, gOPN = gOPNExt.f, $Symbol = global.Symbol, $JSON = global.JSON, _stringify = $JSON && $JSON.stringify, HIDDEN = wks("_hidden"), TO_PRIMITIVE = wks("toPrimitive"), isEnum = {}.propertyIsEnumerable, SymbolRegistry = shared("symbol-registry"), AllSymbols = shared("symbols"), OPSymbols = shared("op-symbols"), ObjectProto = Object.prototype, USE_NATIVE = "function" == typeof $Symbol, QObject = global.QObject, setter = !QObject || !QObject.prototype || !QObject.prototype.findChild, setSymbolDesc = DESCRIPTORS && $fails(function() {
return 7 != _create(dP({}, "a", {
get: function() {
return dP(this, "a", {
@@ -25454,9 +25575,9 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}), wrap(tag);
}, redefine($Symbol.prototype, "toString", function() {
return this._k;
- }), $GOPD.f = $getOwnPropertyDescriptor, $DP.f = $defineProperty, __webpack_require__(216).f = gOPNExt.f = $getOwnPropertyNames,
- __webpack_require__(99).f = $propertyIsEnumerable, __webpack_require__(142).f = $getOwnPropertySymbols,
- DESCRIPTORS && !__webpack_require__(146) && redefine(ObjectProto, "propertyIsEnumerable", $propertyIsEnumerable, !0),
+ }), $GOPD.f = $getOwnPropertyDescriptor, $DP.f = $defineProperty, __webpack_require__(215).f = gOPNExt.f = $getOwnPropertyNames,
+ __webpack_require__(100).f = $propertyIsEnumerable, __webpack_require__(141).f = $getOwnPropertySymbols,
+ DESCRIPTORS && !__webpack_require__(145) && redefine(ObjectProto, "propertyIsEnumerable", $propertyIsEnumerable, !0),
wksExt.f = function(name) {
return wrap(wks(name));
}), $export($export.G + $export.W + $export.F * !USE_NATIVE, {
@@ -25501,14 +25622,14 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}), $Symbol.prototype[TO_PRIMITIVE] || __webpack_require__(40)($Symbol.prototype, TO_PRIMITIVE, $Symbol.prototype.valueOf),
setToStringTag($Symbol, "Symbol"), setToStringTag(Math, "Math", !0), setToStringTag(global.JSON, "JSON", !0);
}, function(module, exports, __webpack_require__) {
- var getKeys = __webpack_require__(72), gOPS = __webpack_require__(142), pIE = __webpack_require__(99);
+ var getKeys = __webpack_require__(72), gOPS = __webpack_require__(141), pIE = __webpack_require__(100);
module.exports = function(it) {
var result = getKeys(it), getSymbols = gOPS.f;
if (getSymbols) for (var key, symbols = getSymbols(it), isEnum = pIE.f, i = 0; symbols.length > i; ) isEnum.call(it, key = symbols[i++]) && result.push(key);
return result;
};
}, function(module, exports, __webpack_require__) {
- var toIObject = __webpack_require__(58), gOPN = __webpack_require__(216).f, toString = {}.toString, windowNames = "object" == typeof window && window && Object.getOwnPropertyNames ? Object.getOwnPropertyNames(window) : [], getWindowNames = function(it) {
+ var toIObject = __webpack_require__(57), gOPN = __webpack_require__(215).f, toString = {}.toString, windowNames = "object" == typeof window && window && Object.getOwnPropertyNames ? Object.getOwnPropertyNames(window) : [], getWindowNames = function(it) {
try {
return gOPN(it);
} catch (e) {
@@ -25519,29 +25640,29 @@ var _bundleJs = []byte((((((((((`!function(modules) {
return windowNames && "[object Window]" == toString.call(it) ? getWindowNames(it) : gOPN(toIObject(it));
};
}, function(module, exports, __webpack_require__) {
- __webpack_require__(149)("asyncIterator");
+ __webpack_require__(148)("asyncIterator");
}, function(module, exports, __webpack_require__) {
- __webpack_require__(149)("observable");
+ __webpack_require__(148)("observable");
}, function(module, exports, __webpack_require__) {
module.exports = {
- default: __webpack_require__(368),
+ default: __webpack_require__(373),
__esModule: !0
};
}, function(module, exports, __webpack_require__) {
- __webpack_require__(369), module.exports = __webpack_require__(17).Object.setPrototypeOf;
+ __webpack_require__(374), module.exports = __webpack_require__(17).Object.setPrototypeOf;
}, function(module, exports, __webpack_require__) {
var $export = __webpack_require__(19);
$export($export.S, "Object", {
- setPrototypeOf: __webpack_require__(370).set
+ setPrototypeOf: __webpack_require__(375).set
});
}, function(module, exports, __webpack_require__) {
- var isObject = __webpack_require__(35), anObject = __webpack_require__(48), check = function(O, proto) {
+ var isObject = __webpack_require__(35), anObject = __webpack_require__(47), check = function(O, proto) {
if (anObject(O), !isObject(proto) && null !== proto) throw TypeError(proto + ": can't set as prototype!");
};
module.exports = {
set: Object.setPrototypeOf || ("__proto__" in {} ? function(test, buggy, set) {
try {
- set = __webpack_require__(47)(Function.call, __webpack_require__(217).f(Object.prototype, "__proto__").set, 2),
+ set = __webpack_require__(46)(Function.call, __webpack_require__(216).f(Object.prototype, "__proto__").set, 2),
set(test, []), buggy = !(test instanceof Array);
} catch (e) {
buggy = !0;
@@ -25554,11 +25675,11 @@ var _bundleJs = []byte((((((((((`!function(modules) {
};
}, function(module, exports, __webpack_require__) {
module.exports = {
- default: __webpack_require__(372),
+ default: __webpack_require__(377),
__esModule: !0
};
}, function(module, exports, __webpack_require__) {
- __webpack_require__(373);
+ __webpack_require__(378);
var $Object = __webpack_require__(17).Object;
module.exports = function(P, D) {
return $Object.create(P, D);
@@ -25566,12 +25687,12 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}, function(module, exports, __webpack_require__) {
var $export = __webpack_require__(19);
$export($export.S, "Object", {
- create: __webpack_require__(101)
+ create: __webpack_require__(102)
});
}, function(module, exports, __webpack_require__) {
"use strict";
(function(process) {
- var emptyFunction = __webpack_require__(39), invariant = __webpack_require__(70), warning = __webpack_require__(94), assign = __webpack_require__(69), ReactPropTypesSecret = __webpack_require__(133), checkPropTypes = __webpack_require__(132);
+ var emptyFunction = __webpack_require__(39), invariant = __webpack_require__(70), warning = __webpack_require__(95), assign = __webpack_require__(69), ReactPropTypesSecret = __webpack_require__(132), checkPropTypes = __webpack_require__(131);
module.exports = function(isValidElement, throwOnDirectAccess) {
function getIteratorFn(maybeIterable) {
var iteratorFn = maybeIterable && (ITERATOR_SYMBOL && maybeIterable[ITERATOR_SYMBOL] || maybeIterable[FAUX_ITERATOR_SYMBOL]);
@@ -25586,12 +25707,12 @@ var _bundleJs = []byte((((((((((`!function(modules) {
function createChainableTypeChecker(validate) {
function checkType(isRequired, props, propName, componentName, location, propFullName, secret) {
if (componentName = componentName || ANONYMOUS, propFullName = propFullName || propName,
- secret !== ReactPropTypesSecret) if (throwOnDirectAccess) invariant(!1, "Calling PropTypes validators directly is not supported by the ` + "`")) + (`prop-types` + ("`" + ` package. Use `))) + (("`" + (`PropTypes.checkPropTypes()` + "`")) + (` to call them. Read more at http://fb.me/use-check-prop-types"); else if ("production" !== process.env.NODE_ENV && "undefined" != typeof console) {
+ secret !== ReactPropTypesSecret) if (throwOnDirectAccess) invariant(!1, "Calling PropTypes validators directly is not supported by the ` + "`"))) + ((`prop-types` + ("`" + ` package. Use `)) + ("`" + (`PropTypes.checkPropTypes()` + "`")))))) + (((((` to call them. Read more at http://fb.me/use-check-prop-types"); else if ("production" !== process.env.NODE_ENV && "undefined" != typeof console) {
var cacheKey = componentName + ":" + propName;
- !manualPropTypeCallCache[cacheKey] && manualPropTypeWarningCount < 3 && (warning(!1, "You are manually calling a React.PropTypes validation function for the ` + ("`" + `%s`)))))) + ((((("`" + (` prop on ` + "`")) + (`%s` + ("`" + `. This is deprecated and will throw in the standalone `))) + (("`" + (`prop-types` + "`")) + (` package. You may be seeing this warning due to a third-party PropTypes library. See https://fb.me/react-warning-dont-call-proptypes for details.", propFullName, componentName),
+ !manualPropTypeCallCache[cacheKey] && manualPropTypeWarningCount < 3 && (warning(!1, "You are manually calling a React.PropTypes validation function for the ` + ("`" + `%s`)) + ("`" + (` prop on ` + "`"))) + ((`%s` + ("`" + `. This is deprecated and will throw in the standalone `)) + ("`" + (`prop-types` + "`")))) + (((` package. You may be seeing this warning due to a third-party PropTypes library. See https://fb.me/react-warning-dont-call-proptypes for details.", propFullName, componentName),
manualPropTypeCallCache[cacheKey] = !0, manualPropTypeWarningCount++);
}
- return null == props[propName] ? isRequired ? new PropTypeError(null === props[propName] ? "The " + location + " ` + ("`" + `" + propFullName + "`)))) + ((("`" + (` is marked as required in ` + "`")) + (`" + componentName + "` + ("`" + `, but its value is `))) + (("`" + (`null` + "`")) + (`." : "The " + location + " ` + ("`" + `" + propFullName + "`))))) + (((("`" + (` is marked as required in ` + "`")) + (`" + componentName + "` + ("`" + `, but its value is `))) + (("`" + (`undefined` + "`")) + (`.") : null : validate(props, propName, componentName, location, propFullName);
+ return null == props[propName] ? isRequired ? new PropTypeError(null === props[propName] ? "The " + location + " ` + ("`" + `" + propFullName + "`)) + ("`" + (` is marked as required in ` + "`"))) + ((`" + componentName + "` + ("`" + `, but its value is `)) + ("`" + (`null` + "`"))))) + ((((`." : "The " + location + " ` + ("`" + `" + propFullName + "`)) + ("`" + (` is marked as required in ` + "`"))) + ((`" + componentName + "` + ("`" + `, but its value is `)) + ("`" + (`undefined` + "`")))) + (((`.") : null : validate(props, propName, componentName, location, propFullName);
}
if ("production" !== process.env.NODE_ENV) var manualPropTypeCallCache = {}, manualPropTypeWarningCount = 0;
var chainedCheckType = checkType.bind(null, !1);
@@ -25600,17 +25721,17 @@ var _bundleJs = []byte((((((((((`!function(modules) {
function createPrimitiveTypeChecker(expectedType) {
function validate(props, propName, componentName, location, propFullName, secret) {
var propValue = props[propName];
- if (getPropType(propValue) !== expectedType) return new PropTypeError("Invalid " + location + " ` + ("`" + `" + propFullName + "`)))) + ((("`" + (` of type ` + "`")) + (`" + getPreciseType(propValue) + "` + ("`" + ` supplied to `))) + (("`" + (`" + componentName + "` + "`")) + (`, expected ` + ("`" + `" + expectedType + "`)))))))) + ((((((("`" + `.");
+ if (getPropType(propValue) !== expectedType) return new PropTypeError("Invalid " + location + " ` + ("`" + `" + propFullName + "`)) + ("`" + (` of type ` + "`"))) + ((`" + getPreciseType(propValue) + "` + ("`" + ` supplied to `)) + ("`" + (`" + componentName + "` + "`")))))))) + (((((((`, expected ` + "`") + (`" + expectedType + "` + ("`" + `.");
return null;
}
return createChainableTypeChecker(validate);
}
function createArrayOfTypeChecker(typeChecker) {
function validate(props, propName, componentName, location, propFullName) {
- if ("function" != typeof typeChecker) return new PropTypeError("Property `) + ("`" + (`" + propFullName + "` + "`"))) + ((` of component ` + ("`" + `" + componentName + "`)) + ("`" + (` has invalid PropType notation inside arrayOf.");
+ if ("function" != typeof typeChecker) return new PropTypeError("Property `))) + (("`" + (`" + propFullName + "` + "`")) + (` of component ` + ("`" + `" + componentName + "`)))) + ((("`" + (` has invalid PropType notation inside arrayOf.");
var propValue = props[propName];
if (!Array.isArray(propValue)) {
- return new PropTypeError("Invalid " + location + " ` + "`")))) + (((`" + propFullName + "` + ("`" + ` of type `)) + ("`" + (`" + getPropType(propValue) + "` + "`"))) + ((` supplied to ` + ("`" + `" + componentName + "`)) + ("`" + (`, expected an array.");
+ return new PropTypeError("Invalid " + location + " ` + "`")) + (`" + propFullName + "` + ("`" + ` of type `))) + (("`" + (`" + getPropType(propValue) + "` + "`")) + (` supplied to ` + ("`" + `" + componentName + "`))))) + (((("`" + (`, expected an array.");
}
for (var i = 0; i < propValue.length; i++) {
var error = typeChecker(propValue, i, componentName, location, propFullName + "[" + i + "]", ReactPropTypesSecret);
@@ -25624,7 +25745,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
function validate(props, propName, componentName, location, propFullName) {
if (!(props[propName] instanceof expectedClass)) {
var expectedClassName = expectedClass.name || ANONYMOUS;
- return new PropTypeError("Invalid " + location + " ` + "`"))))) + ((((`" + propFullName + "` + ("`" + ` of type `)) + ("`" + (`" + getClassName(props[propName]) + "` + "`"))) + ((` supplied to ` + ("`" + `" + componentName + "`)) + ("`" + (`, expected instance of ` + "`")))) + (((`" + expectedClassName + "` + ("`" + `.");
+ return new PropTypeError("Invalid " + location + " ` + "`")) + (`" + propFullName + "` + ("`" + ` of type `))) + (("`" + (`" + getClassName(props[propName]) + "` + "`")) + (` supplied to ` + ("`" + `" + componentName + "`)))) + ((("`" + (`, expected instance of ` + "`")) + (`" + expectedClassName + "` + ("`" + `.");
}
return null;
}
@@ -25633,16 +25754,16 @@ var _bundleJs = []byte((((((((((`!function(modules) {
function createEnumTypeChecker(expectedValues) {
function validate(props, propName, componentName, location, propFullName) {
for (var propValue = props[propName], i = 0; i < expectedValues.length; i++) if (is(propValue, expectedValues[i])) return null;
- return new PropTypeError("Invalid " + location + " `)) + ("`" + (`" + propFullName + "` + "`"))) + ((` of value ` + ("`" + `" + propValue + "`)) + ("`" + (` supplied to ` + "`")))))) + (((((`" + componentName + "` + ("`" + `, expected one of " + JSON.stringify(expectedValues) + ".");
+ return new PropTypeError("Invalid " + location + " `))) + (("`" + (`" + propFullName + "` + "`")) + (` of value ` + ("`" + `" + propValue + "`)))))) + ((((("`" + ` supplied to `) + ("`" + (`" + componentName + "` + "`"))) + ((`, expected one of " + JSON.stringify(expectedValues) + ".");
}
return Array.isArray(expectedValues) ? createChainableTypeChecker(validate) : ("production" !== process.env.NODE_ENV && warning(!1, "Invalid argument supplied to oneOf, expected an instance of array."),
emptyFunction.thatReturnsNull);
}
function createObjectOfTypeChecker(typeChecker) {
function validate(props, propName, componentName, location, propFullName) {
- if ("function" != typeof typeChecker) return new PropTypeError("Property `)) + ("`" + (`" + propFullName + "` + "`"))) + ((` of component ` + ("`" + `" + componentName + "`)) + ("`" + (` has invalid PropType notation inside objectOf.");
+ if ("function" != typeof typeChecker) return new PropTypeError("Property ` + ("`" + `" + propFullName + "`)) + ("`" + (` of component ` + "`")))) + (((`" + componentName + "` + ("`" + ` has invalid PropType notation inside objectOf.");
var propValue = props[propName], propType = getPropType(propValue);
- if ("object" !== propType) return new PropTypeError("Invalid " + location + " ` + "`")))) + (((`" + propFullName + "` + ("`" + ` of type `)) + ("`" + (`" + propType + "` + "`"))) + ((` supplied to ` + ("`" + `" + componentName + "`)) + ("`" + (`, expected an object.");
+ if ("object" !== propType) return new PropTypeError("Invalid " + location + " `)) + ("`" + (`" + propFullName + "` + "`"))) + ((` of type ` + ("`" + `" + propType + "`)) + ("`" + (` supplied to ` + "`"))))) + ((((`" + componentName + "` + ("`" + `, expected an object.");
for (var key in propValue) if (propValue.hasOwnProperty(key)) {
var error = typeChecker(propValue, key, componentName, location, propFullName + "." + key, ReactPropTypesSecret);
if (error instanceof Error) return error;
@@ -25656,7 +25777,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
for (var i = 0; i < arrayOfTypeCheckers.length; i++) {
if (null == (0, arrayOfTypeCheckers[i])(props, propName, componentName, location, propFullName, ReactPropTypesSecret)) return null;
}
- return new PropTypeError("Invalid " + location + " ` + "`"))))) + ((((`" + propFullName + "` + ("`" + ` supplied to `)) + ("`" + (`" + componentName + "` + "`"))) + ((`.");
+ return new PropTypeError("Invalid " + location + " `)) + ("`" + (`" + propFullName + "` + "`"))) + ((` supplied to ` + ("`" + `" + componentName + "`)) + ("`" + (`.");
}
if (!Array.isArray(arrayOfTypeCheckers)) return "production" !== process.env.NODE_ENV && warning(!1, "Invalid argument supplied to oneOfType, expected an instance of array."),
emptyFunction.thatReturnsNull;
@@ -25670,7 +25791,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
function createShapeTypeChecker(shapeTypes) {
function validate(props, propName, componentName, location, propFullName) {
var propValue = props[propName], propType = getPropType(propValue);
- if ("object" !== propType) return new PropTypeError("Invalid " + location + " ` + ("`" + `" + propFullName + "`)) + ("`" + (` of type ` + "`")))) + (((`" + propType + "` + ("`" + ` supplied to `)) + ("`" + (`" + componentName + "` + "`"))) + ((`, expected ` + ("`" + `object`)) + ("`" + (`.");
+ if ("object" !== propType) return new PropTypeError("Invalid " + location + " ` + "`")))) + (((`" + propFullName + "` + ("`" + ` of type `)) + ("`" + (`" + propType + "` + "`"))) + ((` supplied to ` + ("`" + `" + componentName + "`)) + ("`" + (`, expected ` + "`"))))))) + ((((((`object` + "`") + (`.");
for (var key in shapeTypes) {
var checker = shapeTypes[key];
if (checker) {
@@ -25685,11 +25806,11 @@ var _bundleJs = []byte((((((((((`!function(modules) {
function createStrictShapeTypeChecker(shapeTypes) {
function validate(props, propName, componentName, location, propFullName) {
var propValue = props[propName], propType = getPropType(propValue);
- if ("object" !== propType) return new PropTypeError("Invalid " + location + " ` + "`"))))))) + ((((((`" + propFullName + "` + "`") + (` of type ` + ("`" + `" + propType + "`))) + (("`" + (` supplied to ` + "`")) + (`" + componentName + "` + ("`" + `, expected `)))) + ((("`" + (`object` + "`")) + (`.");
+ if ("object" !== propType) return new PropTypeError("Invalid " + location + " ` + ("`" + `" + propFullName + "`))) + (("`" + (` of type ` + "`")) + (`" + propType + "` + ("`" + ` supplied to `)))) + ((("`" + (`" + componentName + "` + "`")) + (`, expected ` + ("`" + `object`))) + (("`" + (`.");
var allKeys = assign({}, props[propName], shapeTypes);
for (var key in allKeys) {
var checker = shapeTypes[key];
- if (!checker) return new PropTypeError("Invalid " + location + " ` + ("`" + `" + propFullName + "`))) + (("`" + (` key ` + "`")) + (`" + key + "` + ("`" + ` supplied to `))))) + (((("`" + (`" + componentName + "` + "`")) + (`.\nBad object: " + JSON.stringify(props[propName], null, " ") + "\nValid keys: " + JSON.stringify(Object.keys(shapeTypes), null, " "));
+ if (!checker) return new PropTypeError("Invalid " + location + " ` + "`")) + (`" + propFullName + "` + ("`" + ` key `))))) + (((("`" + (`" + key + "` + "`")) + (` supplied to ` + ("`" + `" + componentName + "`))) + (("`" + (`.\nBad object: " + JSON.stringify(props[propName], null, " ") + "\nValid keys: " + JSON.stringify(Object.keys(shapeTypes), null, " "));
var error = checker(propValue, key, componentName, location, propFullName + "." + key, ReactPropTypesSecret);
if (error) return error;
}
@@ -25776,7 +25897,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
function validate(props, propName, componentName, location, propFullName) {
var propValue = props[propName];
if (!isValidElement(propValue)) {
- return new PropTypeError("Invalid " + location + " ` + ("`" + `" + propFullName + "`))) + (("`" + (` of type ` + "`")) + (`" + getPropType(propValue) + "` + ("`" + ` supplied to `)))) + ((("`" + (`" + componentName + "` + "`")) + (`, expected a single ReactElement.");
+ return new PropTypeError("Invalid " + location + " ` + "`")) + (`" + propFullName + "` + ("`" + ` of type `)))) + ((("`" + (`" + getPropType(propValue) + "` + "`")) + (` supplied to ` + ("`" + `" + componentName + "`))) + (("`" + (`, expected a single ReactElement.");
}
return null;
}
@@ -25785,7 +25906,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
instanceOf: createInstanceTypeChecker,
node: function() {
function validate(props, propName, componentName, location, propFullName) {
- return isNode(props[propName]) ? null : new PropTypeError("Invalid " + location + " ` + ("`" + `" + propFullName + "`))) + (("`" + (` supplied to ` + "`")) + (`" + componentName + "` + ("`" + `, expected a ReactNode.");
+ return isNode(props[propName]) ? null : new PropTypeError("Invalid " + location + " ` + "`")) + (`" + propFullName + "` + ("`" + ` supplied to `)))))) + ((((("`" + (`" + componentName + "` + "`")) + (`, expected a ReactNode.");
}
return createChainableTypeChecker(validate);
}(),
@@ -25801,10 +25922,10 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}).call(exports, __webpack_require__(2));
}, function(module, exports, __webpack_require__) {
"use strict";
- var emptyFunction = __webpack_require__(39), invariant = __webpack_require__(70), ReactPropTypesSecret = __webpack_require__(133);
+ var emptyFunction = __webpack_require__(39), invariant = __webpack_require__(70), ReactPropTypesSecret = __webpack_require__(132);
module.exports = function() {
function shim(props, propName, componentName, location, propFullName, secret) {
- secret !== ReactPropTypesSecret && invariant(!1, "Calling PropTypes validators directly is not supported by the `)))))) + ((((("`" + (`prop-types` + "`")) + (` package. Use PropTypes.checkPropTypes() to call them. Read more at http://fb.me/use-check-prop-types");
+ secret !== ReactPropTypesSecret && invariant(!1, "Calling PropTypes validators directly is not supported by the ` + ("`" + `prop-types`))) + (("`" + (` package. Use PropTypes.checkPropTypes() to call them. Read more at http://fb.me/use-check-prop-types");
}
function getShim() {
return shim;
@@ -25862,31 +25983,10 @@ var _bundleJs = []byte((((((((((`!function(modules) {
value: !0
}), __webpack_exports__.default = createBroadcast;
}, function(module, exports, __webpack_require__) {
- "use strict";
- function _interopRequireDefault(obj) {
- return obj && obj.__esModule ? obj : {
- default: obj
- };
- }
- function exactProp(propTypes, componentNameInError) {
- return (0, _extends4.default)({}, propTypes, (0, _defineProperty3.default)({}, specialProperty, function(props) {
- var unknownProps = (0, _keys2.default)(props).filter(function(prop) {
- return !propTypes.hasOwnProperty(prop);
- });
- return unknownProps.length > 0 ? new TypeError(componentNameInError + ": unknown props found: " + unknownProps.join(", ") + ". Please remove the unknown properties.") : null;
- }));
- }
- Object.defineProperty(exports, "__esModule", {
- value: !0
- }), exports.specialProperty = void 0;
- var _defineProperty2 = __webpack_require__(13), _defineProperty3 = _interopRequireDefault(_defineProperty2), _keys = __webpack_require__(41), _keys2 = _interopRequireDefault(_keys), _extends3 = __webpack_require__(7), _extends4 = _interopRequireDefault(_extends3);
- exports.default = exactProp;
- var specialProperty = exports.specialProperty = "exact-prop: ​";
-}, function(module, exports, __webpack_require__) {
- __webpack_require__(379), module.exports = __webpack_require__(17).Object.keys;
+ __webpack_require__(383), module.exports = __webpack_require__(17).Object.keys;
}, function(module, exports, __webpack_require__) {
- var toObject = __webpack_require__(59), $keys = __webpack_require__(72);
- __webpack_require__(211)("keys", function() {
+ var toObject = __webpack_require__(58), $keys = __webpack_require__(72);
+ __webpack_require__(210)("keys", function() {
return function(it) {
return $keys(toObject(it));
};
@@ -26004,9 +26104,9 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
});
- var _objectWithoutProperties2 = __webpack_require__(6), _objectWithoutProperties3 = _interopRequireDefault(_objectWithoutProperties2);
+ var _objectWithoutProperties2 = __webpack_require__(7), _objectWithoutProperties3 = _interopRequireDefault(_objectWithoutProperties2);
exports.default = createTypography;
- var _deepmerge = __webpack_require__(103), _deepmerge2 = _interopRequireDefault(_deepmerge);
+ var _deepmerge = __webpack_require__(104), _deepmerge2 = _interopRequireDefault(_deepmerge);
}, function(module, exports, __webpack_require__) {
"use strict";
(function(process) {
@@ -26042,7 +26142,9 @@ var _bundleJs = []byte((((((((((`!function(modules) {
main: _pink2.default.A400,
dark: _pink2.default.A700
} : _palette$secondary, _palette$error = palette.error, error = void 0 === _palette$error ? {
- main: _red2.default[500]
+ light: _red2.default[300],
+ main: _red2.default[500],
+ dark: _red2.default[700]
} : _palette$error, _palette$type = palette.type, type = void 0 === _palette$type ? "light" : _palette$type, _palette$contrastThre = palette.contrastThreshold, contrastThreshold = void 0 === _palette$contrastThre ? 3 : _palette$contrastThre, _palette$tonalOffset = palette.tonalOffset, tonalOffset = void 0 === _palette$tonalOffset ? .2 : _palette$tonalOffset, other = (0,
_objectWithoutProperties3.default)(palette, [ "primary", "secondary", "error", "type", "contrastThreshold", "tonalOffset" ]);
augmentColor(primary, 500, 300, 700), augmentColor(secondary, "A400", "A200", "A700"),
@@ -26051,7 +26153,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
dark: dark,
light: light
};
- return "production" !== process.env.NODE_ENV && (0, _warning2.default)(types[type], "Material-UI: the palette type ` + ("`" + `" + type + "`))) + (("`" + (` is not supported."),
+ return "production" !== process.env.NODE_ENV && (0, _warning2.default)(types[type], "Material-UI: the palette type ` + "`")) + (`" + type + "` + ("`" + ` is not supported."),
(0, _deepmerge2.default)((0, _extends3.default)({
common: _common2.default,
type: type,
@@ -26061,8 +26163,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
grey: _grey2.default,
contrastThreshold: contrastThreshold,
getContrastText: getContrastText,
- tonalOffset: tonalOffset,
- types: types
+ tonalOffset: tonalOffset
}, types[type]), other, {
clone: !1
});
@@ -26070,9 +26171,9 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
}), exports.dark = exports.light = void 0;
- var _extends2 = __webpack_require__(7), _extends3 = _interopRequireDefault(_extends2), _objectWithoutProperties2 = __webpack_require__(6), _objectWithoutProperties3 = _interopRequireDefault(_objectWithoutProperties2);
+ var _extends2 = __webpack_require__(6), _extends3 = _interopRequireDefault(_extends2), _objectWithoutProperties2 = __webpack_require__(7), _objectWithoutProperties3 = _interopRequireDefault(_objectWithoutProperties2);
exports.default = createPalette;
- var _warning = __webpack_require__(11), _warning2 = _interopRequireDefault(_warning), _deepmerge = __webpack_require__(103), _deepmerge2 = _interopRequireDefault(_deepmerge), _indigo = __webpack_require__(382), _indigo2 = _interopRequireDefault(_indigo), _pink = __webpack_require__(383), _pink2 = _interopRequireDefault(_pink), _grey = __webpack_require__(384), _grey2 = _interopRequireDefault(_grey), _red = __webpack_require__(385), _red2 = _interopRequireDefault(_red), _common = __webpack_require__(386), _common2 = _interopRequireDefault(_common), _colorManipulator = __webpack_require__(387), light = exports.light = {
+ var _warning = __webpack_require__(12), _warning2 = _interopRequireDefault(_warning), _deepmerge = __webpack_require__(104), _deepmerge2 = _interopRequireDefault(_deepmerge), _indigo = __webpack_require__(386), _indigo2 = _interopRequireDefault(_indigo), _pink = __webpack_require__(387), _pink2 = _interopRequireDefault(_pink), _grey = __webpack_require__(388), _grey2 = _interopRequireDefault(_grey), _red = __webpack_require__(389), _red2 = _interopRequireDefault(_red), _common = __webpack_require__(390), _common2 = _interopRequireDefault(_common), _colorManipulator = __webpack_require__(391), light = exports.light = {
text: {
primary: "rgba(0, 0, 0, 0.87)",
secondary: "rgba(0, 0, 0, 0.54)",
@@ -26082,21 +26183,18 @@ var _bundleJs = []byte((((((((((`!function(modules) {
divider: "rgba(0, 0, 0, 0.12)",
background: {
paper: _common2.default.white,
- default: _grey2.default[50],
- appBar: _grey2.default[100],
- chip: _grey2.default[300],
- avatar: _grey2.default[400]
+ default: _grey2.default[50]
},
action: {
active: "rgba(0, 0, 0, 0.54)",
- hover: "rgba(0, 0, 0, 0.14)",
- selected: "rgba(0, 0, 0, 0.08)",
+ hover: "rgba(0, 0, 0, 0.08)",
+ selected: "rgba(0, 0, 0, 0.14)",
disabled: "rgba(0, 0, 0, 0.26)",
disabledBackground: "rgba(0, 0, 0, 0.12)"
}
}, dark = exports.dark = {
text: {
- primary: _common2.default.fullWhite,
+ primary: _common2.default.white,
secondary: "rgba(255, 255, 255, 0.7)",
disabled: "rgba(255, 255, 255, 0.5)",
hint: "rgba(255, 255, 255, 0.5)",
@@ -26105,15 +26203,12 @@ var _bundleJs = []byte((((((((((`!function(modules) {
divider: "rgba(255, 255, 255, 0.12)",
background: {
paper: _grey2.default[800],
- default: "#303030",
- appBar: _grey2.default[900],
- chip: _grey2.default[700],
- avatar: _grey2.default[600]
+ default: "#303030"
},
action: {
- active: _common2.default.fullWhite,
- hover: "rgba(255, 255, 255, 0.14)",
- selected: "rgba(255, 255, 255, 0.8)",
+ active: _common2.default.white,
+ hover: "rgba(255, 255, 255, 0.1)",
+ selected: "rgba(255, 255, 255, 0.2)",
disabled: "rgba(255, 255, 255, 0.3)",
disabledBackground: "rgba(255, 255, 255, 0.12)"
}
@@ -26213,11 +26308,8 @@ var _bundleJs = []byte((((((((((`!function(modules) {
value: !0
});
var common = {
- transparent: "transparent",
black: "#000",
- fullBlack: "rgba(0, 0, 0, 1)",
- white: "#fff",
- fullWhite: "rgba(255, 255, 255, 1)"
+ white: "#fff"
};
exports.default = common;
}, function(module, exports, __webpack_require__) {
@@ -26267,7 +26359,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
return Number((.2126 * rgb[0] + .7152 * rgb[1] + .0722 * rgb[2]).toFixed(3));
}
if (decomposedColor.type.indexOf("hsl") > -1) return decomposedColor.values[2] / 100;
- throw new Error("Material-UI: unsupported ` + "`")) + (`" + color + "` + ("`" + ` color.");
+ throw new Error("Material-UI: unsupported `)))) + ((("`" + (`" + color + "` + "`")) + (` color.");
}
function emphasize(color) {
var coefficient = arguments.length > 1 && void 0 !== arguments[1] ? arguments[1] : .15;
@@ -26296,7 +26388,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
exports.recomposeColor = recomposeColor, exports.getContrastRatio = getContrastRatio,
exports.getLuminance = getLuminance, exports.emphasize = emphasize, exports.fade = fade,
exports.darken = darken, exports.lighten = lighten;
- var _warning = __webpack_require__(11), _warning2 = function(obj) {
+ var _warning = __webpack_require__(12), _warning2 = function(obj) {
return obj && obj.__esModule ? obj : {
default: obj
};
@@ -26333,7 +26425,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
});
- var _defineProperty2 = __webpack_require__(13), _defineProperty3 = _interopRequireDefault(_defineProperty2), _extends3 = __webpack_require__(7), _extends4 = _interopRequireDefault(_extends3);
+ var _defineProperty2 = __webpack_require__(13), _defineProperty3 = _interopRequireDefault(_defineProperty2), _extends3 = __webpack_require__(6), _extends4 = _interopRequireDefault(_extends3);
exports.default = createMixins;
}, function(module, exports, __webpack_require__) {
"use strict";
@@ -26356,7 +26448,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
}), exports.isNumber = exports.isString = exports.formatMs = exports.duration = exports.easing = void 0;
- var _keys = __webpack_require__(41), _keys2 = _interopRequireDefault(_keys), _objectWithoutProperties2 = __webpack_require__(6), _objectWithoutProperties3 = _interopRequireDefault(_objectWithoutProperties2), _isNan = __webpack_require__(391), _isNan2 = _interopRequireDefault(_isNan), _warning = __webpack_require__(11), _warning2 = _interopRequireDefault(_warning), easing = exports.easing = {
+ var _keys = __webpack_require__(50), _keys2 = _interopRequireDefault(_keys), _objectWithoutProperties2 = __webpack_require__(7), _objectWithoutProperties3 = _interopRequireDefault(_objectWithoutProperties2), _isNan = __webpack_require__(395), _isNan2 = _interopRequireDefault(_isNan), _warning = __webpack_require__(12), _warning2 = _interopRequireDefault(_warning), easing = exports.easing = {
easeInOut: "cubic-bezier(0.4, 0, 0.2, 1)",
easeOut: "cubic-bezier(0.0, 0, 0.2, 1)",
easeIn: "cubic-bezier(0.4, 0, 1, 1)",
@@ -26382,13 +26474,13 @@ var _bundleJs = []byte((((((((((`!function(modules) {
create: function() {
var props = arguments.length > 0 && void 0 !== arguments[0] ? arguments[0] : [ "all" ], options = arguments.length > 1 && void 0 !== arguments[1] ? arguments[1] : {}, _options$duration = options.duration, durationOption = void 0 === _options$duration ? duration.standard : _options$duration, _options$easing = options.easing, easingOption = void 0 === _options$easing ? easing.easeInOut : _options$easing, _options$delay = options.delay, delay = void 0 === _options$delay ? 0 : _options$delay, other = (0,
_objectWithoutProperties3.default)(options, [ "duration", "easing", "delay" ]);
- return "production" !== process.env.NODE_ENV && (0, _warning2.default)(isString(props) || Array.isArray(props), 'Material-UI: argument "props" must be a string or Array'),
- "production" !== process.env.NODE_ENV && (0, _warning2.default)(isNumber(durationOption), 'Material-UI: argument "duration" must be a number but found ' + durationOption),
- "production" !== process.env.NODE_ENV && (0, _warning2.default)(isString(easingOption), 'Material-UI: argument "easing" must be a string'),
- "production" !== process.env.NODE_ENV && (0, _warning2.default)(isNumber(delay), 'Material-UI: argument "delay" must be a string'),
+ return "production" !== process.env.NODE_ENV && (0, _warning2.default)(isString(props) || Array.isArray(props), 'Material-UI: argument "props" must be a string or Array.'),
+ "production" !== process.env.NODE_ENV && (0, _warning2.default)(isNumber(durationOption) || isString(durationOption), 'Material-UI: argument "duration" must be a number or a string but found ' + durationOption + "."),
+ "production" !== process.env.NODE_ENV && (0, _warning2.default)(isString(easingOption), 'Material-UI: argument "easing" must be a string.'),
+ "production" !== process.env.NODE_ENV && (0, _warning2.default)(isNumber(delay) || isString(delay), 'Material-UI: argument "delay" must be a number or a string.'),
"production" !== process.env.NODE_ENV && (0, _warning2.default)(0 === (0, _keys2.default)(other).length, "Material-UI: unrecognized argument(s) [" + (0,
_keys2.default)(other).join(",") + "]"), (Array.isArray(props) ? props : [ props ]).map(function(animatedProp) {
- return animatedProp + " " + formatMs(durationOption) + " " + easingOption + " " + formatMs(delay);
+ return animatedProp + " " + ("string" == typeof durationOption ? durationOption : formatMs(durationOption)) + " " + easingOption + " " + ("string" == typeof delay ? delay : formatMs(delay));
}).join(",");
},
getAutoHeightDuration: function(height) {
@@ -26400,11 +26492,11 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}).call(exports, __webpack_require__(2));
}, function(module, exports, __webpack_require__) {
module.exports = {
- default: __webpack_require__(392),
+ default: __webpack_require__(396),
__esModule: !0
};
}, function(module, exports, __webpack_require__) {
- __webpack_require__(393), module.exports = __webpack_require__(17).Number.isNaN;
+ __webpack_require__(397), module.exports = __webpack_require__(17).Number.isNaN;
}, function(module, exports, __webpack_require__) {
var $export = __webpack_require__(19);
$export($export.S, "Number", {
@@ -26480,7 +26572,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
return protoProps && defineProperties(Constructor.prototype, protoProps), staticProps && defineProperties(Constructor, staticProps),
Constructor;
};
- }(), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _withStyles = __webpack_require__(10), _withStyles2 = _interopRequireDefault(_withStyles), _Header = __webpack_require__(453), _Header2 = _interopRequireDefault(_Header), _Body = __webpack_require__(486), _Body2 = _interopRequireDefault(_Body), _common = __webpack_require__(61), deepUpdate = function deepUpdate(updater, update, prev) {
+ }(), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _withStyles = __webpack_require__(10), _withStyles2 = _interopRequireDefault(_withStyles), _Header = __webpack_require__(457), _Header2 = _interopRequireDefault(_Header), _Body = __webpack_require__(490), _Body2 = _interopRequireDefault(_Body), _common = __webpack_require__(77), deepUpdate = function deepUpdate(updater, update, prev) {
if (void 0 === update) return prev;
if ("function" == typeof updater) return updater(update, prev);
var updated = {};
@@ -26506,7 +26598,11 @@ var _bundleJs = []byte((((((((((`!function(modules) {
version: null,
commit: null
},
- home: {
+ home: {},
+ chain: {},
+ txpool: {},
+ network: {},
+ system: {
activeMemory: [],
virtualMemory: [],
networkIngress: [],
@@ -26516,10 +26612,6 @@ var _bundleJs = []byte((((((((((`!function(modules) {
diskRead: [],
diskWrite: []
},
- chain: {},
- txpool: {},
- network: {},
- system: {},
logs: {
log: []
}
@@ -26528,7 +26620,11 @@ var _bundleJs = []byte((((((((((`!function(modules) {
version: replacer,
commit: replacer
},
- home: {
+ home: null,
+ chain: null,
+ txpool: null,
+ network: null,
+ system: {
activeMemory: appender(200),
virtualMemory: appender(200),
networkIngress: appender(200),
@@ -26538,10 +26634,6 @@ var _bundleJs = []byte((((((((((`!function(modules) {
diskRead: appender(200),
diskWrite: appender(200)
},
- chain: null,
- txpool: null,
- network: null,
- system: null,
logs: {
log: appender(200)
}
@@ -26616,7 +26708,6 @@ var _bundleJs = []byte((((((((((`!function(modules) {
className: this.props.classes.dashboard,
style: styles.dashboard
}, _react2.default.createElement(_Header2.default, {
- opened: this.state.sideBar,
switchSideBar: this.switchSideBar
}), _react2.default.createElement(_Body2.default, {
opened: this.state.sideBar,
@@ -26631,16 +26722,16 @@ var _bundleJs = []byte((((((((((`!function(modules) {
exports.default = (0, _withStyles2.default)(themeStyles)(Dashboard);
}, function(module, exports, __webpack_require__) {
module.exports = {
- default: __webpack_require__(398),
+ default: __webpack_require__(402),
__esModule: !0
};
}, function(module, exports, __webpack_require__) {
- __webpack_require__(218), __webpack_require__(144), __webpack_require__(213), __webpack_require__(399),
- __webpack_require__(406), __webpack_require__(409), __webpack_require__(411), module.exports = __webpack_require__(17).Map;
+ __webpack_require__(217), __webpack_require__(143), __webpack_require__(212), __webpack_require__(403),
+ __webpack_require__(410), __webpack_require__(413), __webpack_require__(415), module.exports = __webpack_require__(17).Map;
}, function(module, exports, __webpack_require__) {
"use strict";
- var strong = __webpack_require__(400), validate = __webpack_require__(225);
- module.exports = __webpack_require__(402)("Map", function(get) {
+ var strong = __webpack_require__(404), validate = __webpack_require__(225);
+ module.exports = __webpack_require__(406)("Map", function(get) {
return function() {
return get(this, arguments.length > 0 ? arguments[0] : void 0);
};
@@ -26655,7 +26746,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}, strong, !0);
}, function(module, exports, __webpack_require__) {
"use strict";
- var dP = __webpack_require__(22).f, create = __webpack_require__(101), redefineAll = __webpack_require__(219), ctx = __webpack_require__(47), anInstance = __webpack_require__(220), forOf = __webpack_require__(104), $iterDefine = __webpack_require__(145), step = __webpack_require__(214), setSpecies = __webpack_require__(401), DESCRIPTORS = __webpack_require__(25), fastKey = __webpack_require__(148).fastKey, validate = __webpack_require__(225), SIZE = DESCRIPTORS ? "_s" : "size", getEntry = function(that, key) {
+ var dP = __webpack_require__(22).f, create = __webpack_require__(102), redefineAll = __webpack_require__(219), ctx = __webpack_require__(46), anInstance = __webpack_require__(220), forOf = __webpack_require__(105), $iterDefine = __webpack_require__(144), step = __webpack_require__(213), setSpecies = __webpack_require__(405), DESCRIPTORS = __webpack_require__(25), fastKey = __webpack_require__(147).fastKey, validate = __webpack_require__(225), SIZE = DESCRIPTORS ? "_s" : "size", getEntry = function(that, key) {
var entry, index = fastKey(key);
if ("F" !== index) return that._i[index];
for (entry = that._f; entry; entry = entry.n) if (entry.k == key) return entry;
@@ -26731,7 +26822,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
};
}, function(module, exports, __webpack_require__) {
"use strict";
- var global = __webpack_require__(24), $export = __webpack_require__(19), meta = __webpack_require__(148), fails = __webpack_require__(49), hide = __webpack_require__(40), redefineAll = __webpack_require__(219), forOf = __webpack_require__(104), anInstance = __webpack_require__(220), isObject = __webpack_require__(35), setToStringTag = __webpack_require__(102), dP = __webpack_require__(22).f, each = __webpack_require__(403)(0), DESCRIPTORS = __webpack_require__(25);
+ var global = __webpack_require__(24), $export = __webpack_require__(19), meta = __webpack_require__(147), fails = __webpack_require__(48), hide = __webpack_require__(40), redefineAll = __webpack_require__(219), forOf = __webpack_require__(105), anInstance = __webpack_require__(220), isObject = __webpack_require__(35), setToStringTag = __webpack_require__(103), dP = __webpack_require__(22).f, each = __webpack_require__(407)(0), DESCRIPTORS = __webpack_require__(25);
module.exports = function(NAME, wrapper, methods, common, IS_MAP, IS_WEAK) {
var Base = global[NAME], C = Base, ADDER = IS_MAP ? "set" : "add", proto = C && C.prototype, O = {};
return DESCRIPTORS && "function" == typeof C && (IS_WEAK || proto.forEach && !fails(function() {
@@ -26754,7 +26845,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
IS_WEAK || common.setStrong(C, NAME, IS_MAP), C;
};
}, function(module, exports, __webpack_require__) {
- var ctx = __webpack_require__(47), IObject = __webpack_require__(135), toObject = __webpack_require__(59), toLength = __webpack_require__(97), asc = __webpack_require__(404);
+ var ctx = __webpack_require__(46), IObject = __webpack_require__(134), toObject = __webpack_require__(58), toLength = __webpack_require__(98), asc = __webpack_require__(408);
module.exports = function(TYPE, $create) {
var IS_MAP = 1 == TYPE, IS_FILTER = 2 == TYPE, IS_SOME = 3 == TYPE, IS_EVERY = 4 == TYPE, IS_FIND_INDEX = 6 == TYPE, NO_HOLES = 5 == TYPE || IS_FIND_INDEX, create = $create || asc;
return function($this, callbackfn, that) {
@@ -26776,12 +26867,12 @@ var _bundleJs = []byte((((((((((`!function(modules) {
};
};
}, function(module, exports, __webpack_require__) {
- var speciesConstructor = __webpack_require__(405);
+ var speciesConstructor = __webpack_require__(409);
module.exports = function(original, length) {
return new (speciesConstructor(original))(length);
};
}, function(module, exports, __webpack_require__) {
- var isObject = __webpack_require__(35), isArray = __webpack_require__(215), SPECIES = __webpack_require__(21)("species");
+ var isObject = __webpack_require__(35), isArray = __webpack_require__(214), SPECIES = __webpack_require__(21)("species");
module.exports = function(original) {
var C;
return isArray(original) && (C = original.constructor, "function" != typeof C || C !== Array && !isArray(C.prototype) || (C = void 0),
@@ -26790,10 +26881,10 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}, function(module, exports, __webpack_require__) {
var $export = __webpack_require__(19);
$export($export.P + $export.R, "Map", {
- toJSON: __webpack_require__(407)("Map")
+ toJSON: __webpack_require__(411)("Map")
});
}, function(module, exports, __webpack_require__) {
- var classof = __webpack_require__(224), from = __webpack_require__(408);
+ var classof = __webpack_require__(224), from = __webpack_require__(412);
module.exports = function(NAME) {
return function() {
if (classof(this) != NAME) throw TypeError(NAME + "#toJSON isn't generic");
@@ -26801,13 +26892,13 @@ var _bundleJs = []byte((((((((((`!function(modules) {
};
};
}, function(module, exports, __webpack_require__) {
- var forOf = __webpack_require__(104);
+ var forOf = __webpack_require__(105);
module.exports = function(iter, ITERATOR) {
var result = [];
return forOf(iter, !1, result.push, result, ITERATOR), result;
};
}, function(module, exports, __webpack_require__) {
- __webpack_require__(410)("Map");
+ __webpack_require__(414)("Map");
}, function(module, exports, __webpack_require__) {
"use strict";
var $export = __webpack_require__(19);
@@ -26820,10 +26911,10 @@ var _bundleJs = []byte((((((((((`!function(modules) {
});
};
}, function(module, exports, __webpack_require__) {
- __webpack_require__(412)("Map");
+ __webpack_require__(416)("Map");
}, function(module, exports, __webpack_require__) {
"use strict";
- var $export = __webpack_require__(19), aFunction = __webpack_require__(206), ctx = __webpack_require__(47), forOf = __webpack_require__(104);
+ var $export = __webpack_require__(19), aFunction = __webpack_require__(205), ctx = __webpack_require__(46), forOf = __webpack_require__(105);
module.exports = function(COLLECTION) {
$export($export.S, COLLECTION, {
from: function(source) {
@@ -26838,11 +26929,11 @@ var _bundleJs = []byte((((((((((`!function(modules) {
};
}, function(module, exports, __webpack_require__) {
module.exports = {
- default: __webpack_require__(414),
+ default: __webpack_require__(418),
__esModule: !0
};
}, function(module, exports, __webpack_require__) {
- __webpack_require__(415), module.exports = -9007199254740991;
+ __webpack_require__(419), module.exports = -9007199254740991;
}, function(module, exports, __webpack_require__) {
var $export = __webpack_require__(19);
$export($export.S, "Number", {
@@ -26866,7 +26957,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
var newObj = {};
if (null != obj) for (var key in obj) Object.prototype.hasOwnProperty.call(obj, key) && (newObj[key] = obj[key]);
return newObj.default = obj, newObj;
- }(_ns), _propTypes2 = __webpack_require__(417), _propTypes3 = function(obj) {
+ }(_ns), _propTypes2 = __webpack_require__(421), _propTypes3 = function(obj) {
return obj && obj.__esModule ? obj : {
default: obj
};
@@ -26937,7 +27028,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
return protoProps && defineProperties(Constructor.prototype, protoProps), staticProps && defineProperties(Constructor, staticProps),
Constructor;
};
- }(), _warning = __webpack_require__(11), _warning2 = function(obj) {
+ }(), _warning = __webpack_require__(12), _warning2 = function(obj) {
return obj && obj.__esModule ? obj : {
default: obj
};
@@ -27009,44 +27100,55 @@ var _bundleJs = []byte((((((((((`!function(modules) {
default: obj
};
}(_isObservable), isArray = Array.isArray;
-}, function(module, exports, __webpack_require__) {
- module.exports = __webpack_require__(422);
-}, function(module, exports, __webpack_require__) {
+}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
- (function(global, module) {
- Object.defineProperty(exports, "__esModule", {
- value: !0
- });
- var root, _ponyfill = __webpack_require__(423), _ponyfill2 = function(obj) {
- return obj && obj.__esModule ? obj : {
- default: obj
- };
- }(_ponyfill);
+ Object.defineProperty(__webpack_exports__, "__esModule", {
+ value: !0
+ }), function(global, module) {
+ var root, __WEBPACK_IMPORTED_MODULE_0__ponyfill_js__ = __webpack_require__(427);
root = "undefined" != typeof self ? self : "undefined" != typeof window ? window : void 0 !== global ? global : module;
- var result = (0, _ponyfill2.default)(root);
- exports.default = result;
- }).call(exports, __webpack_require__(51), __webpack_require__(154)(module));
-}, function(module, exports, __webpack_require__) {
+ var result = Object(__WEBPACK_IMPORTED_MODULE_0__ponyfill_js__.a)(root);
+ __webpack_exports__.default = result;
+ }.call(__webpack_exports__, __webpack_require__(60), __webpack_require__(426)(module));
+}, function(module, exports) {
+ module.exports = function(originalModule) {
+ if (!originalModule.webpackPolyfill) {
+ var module = Object.create(originalModule);
+ module.children || (module.children = []), Object.defineProperty(module, "loaded", {
+ enumerable: !0,
+ get: function() {
+ return module.l;
+ }
+ }), Object.defineProperty(module, "id", {
+ enumerable: !0,
+ get: function() {
+ return module.i;
+ }
+ }), Object.defineProperty(module, "exports", {
+ enumerable: !0
+ }), module.webpackPolyfill = 1;
+ }
+ return module;
+ };
+}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
function symbolObservablePonyfill(root) {
- var result, _Symbol = root.Symbol;
- return "function" == typeof _Symbol ? _Symbol.observable ? result = _Symbol.observable : (result = _Symbol("observable"),
- _Symbol.observable = result) : result = "@@observable", result;
+ var result, Symbol = root.Symbol;
+ return "function" == typeof Symbol ? Symbol.observable ? result = Symbol.observable : (result = Symbol("observable"),
+ Symbol.observable = result) : result = "@@observable", result;
}
- Object.defineProperty(exports, "__esModule", {
- value: !0
- }), exports.default = symbolObservablePonyfill;
+ __webpack_exports__.a = symbolObservablePonyfill;
}, function(module, exports, __webpack_require__) {
"use strict";
(function(global, process) {
Object.defineProperty(exports, "__esModule", {
value: !0
});
- var CSS = global.CSS, env = process.env.NODE_ENV, escapeRegex = /([[\].#*$><+~=|^:(),"'`)))) + ((("`" + (`])/g;
+ var CSS = global.CSS, env = process.env.NODE_ENV, escapeRegex = /([[\].#*$><+~=|^:(),"'` + ("`" + `])/g;
exports.default = function(str) {
return "production" === env ? str : CSS && CSS.escape ? CSS.escape(str) : str.replace(escapeRegex, "\\$1");
};
- }).call(exports, __webpack_require__(51), __webpack_require__(2));
+ }).call(exports, __webpack_require__(60), __webpack_require__(2));
}, function(module, exports, __webpack_require__) {
"use strict";
(function(global) {
@@ -27055,7 +27157,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
});
var ns = "2f1acc6c3a606b082e5eef5e54414ffb";
null == global[ns] && (global[ns] = 0), exports.default = global[ns]++;
- }).call(exports, __webpack_require__(51));
+ }).call(exports, __webpack_require__(60));
}, function(module, exports, __webpack_require__) {
"use strict";
function _interopRequireDefault(obj) {
@@ -27091,9 +27193,9 @@ var _bundleJs = []byte((((((((((`!function(modules) {
return protoProps && defineProperties(Constructor.prototype, protoProps), staticProps && defineProperties(Constructor, staticProps),
Constructor;
};
- }(), _isInBrowser = __webpack_require__(107), _isInBrowser2 = _interopRequireDefault(_isInBrowser), _StyleSheet = __webpack_require__(233), _StyleSheet2 = _interopRequireDefault(_StyleSheet), _PluginsRegistry = __webpack_require__(427), _PluginsRegistry2 = _interopRequireDefault(_PluginsRegistry), _rules = __webpack_require__(428), _rules2 = _interopRequireDefault(_rules), _observables = __webpack_require__(434), _observables2 = _interopRequireDefault(_observables), _functions = __webpack_require__(435), _functions2 = _interopRequireDefault(_functions), _sheets = __webpack_require__(155), _sheets2 = _interopRequireDefault(_sheets), _StyleRule = __webpack_require__(60), _StyleRule2 = _interopRequireDefault(_StyleRule), _createGenerateClassName = __webpack_require__(232), _createGenerateClassName2 = _interopRequireDefault(_createGenerateClassName), _createRule2 = __webpack_require__(106), _createRule3 = _interopRequireDefault(_createRule2), _DomRenderer = __webpack_require__(437), _DomRenderer2 = _interopRequireDefault(_DomRenderer), _VirtualRenderer = __webpack_require__(438), _VirtualRenderer2 = _interopRequireDefault(_VirtualRenderer), defaultPlugins = _rules2.default.concat([ _observables2.default, _functions2.default ]), instanceCounter = 0, Jss = function() {
+ }(), _isInBrowser = __webpack_require__(108), _isInBrowser2 = _interopRequireDefault(_isInBrowser), _StyleSheet = __webpack_require__(233), _StyleSheet2 = _interopRequireDefault(_StyleSheet), _PluginsRegistry = __webpack_require__(431), _PluginsRegistry2 = _interopRequireDefault(_PluginsRegistry), _rules = __webpack_require__(432), _rules2 = _interopRequireDefault(_rules), _observables = __webpack_require__(438), _observables2 = _interopRequireDefault(_observables), _functions = __webpack_require__(439), _functions2 = _interopRequireDefault(_functions), _sheets = __webpack_require__(153), _sheets2 = _interopRequireDefault(_sheets), _StyleRule = __webpack_require__(59), _StyleRule2 = _interopRequireDefault(_StyleRule), _createGenerateClassName = __webpack_require__(232), _createGenerateClassName2 = _interopRequireDefault(_createGenerateClassName), _createRule2 = __webpack_require__(107), _createRule3 = _interopRequireDefault(_createRule2), _DomRenderer = __webpack_require__(440), _DomRenderer2 = _interopRequireDefault(_DomRenderer), _VirtualRenderer = __webpack_require__(441), _VirtualRenderer2 = _interopRequireDefault(_VirtualRenderer), defaultPlugins = _rules2.default.concat([ _observables2.default, _functions2.default ]), instanceCounter = 0, Jss = function() {
function Jss(options) {
- _classCallCheck(this, Jss), this.id = instanceCounter++, this.version = "9.5.1",
+ _classCallCheck(this, Jss), this.id = instanceCounter++, this.version = "9.8.0",
this.plugins = new _PluginsRegistry2.default(), this.options = {
createGenerateClassName: _createGenerateClassName2.default,
Renderer: _isInBrowser2.default ? _DomRenderer2.default : _VirtualRenderer2.default,
@@ -27174,7 +27276,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
return protoProps && defineProperties(Constructor.prototype, protoProps), staticProps && defineProperties(Constructor, staticProps),
Constructor;
};
- }(), _warning = __webpack_require__(11), _warning2 = function(obj) {
+ }(), _warning = __webpack_require__(12), _warning2 = function(obj) {
return obj && obj.__esModule ? obj : {
default: obj
};
@@ -27247,7 +27349,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
});
- var _SimpleRule = __webpack_require__(429), _SimpleRule2 = _interopRequireDefault(_SimpleRule), _KeyframesRule = __webpack_require__(430), _KeyframesRule2 = _interopRequireDefault(_KeyframesRule), _ConditionalRule = __webpack_require__(431), _ConditionalRule2 = _interopRequireDefault(_ConditionalRule), _FontFaceRule = __webpack_require__(432), _FontFaceRule2 = _interopRequireDefault(_FontFaceRule), _ViewportRule = __webpack_require__(433), _ViewportRule2 = _interopRequireDefault(_ViewportRule), classes = {
+ var _SimpleRule = __webpack_require__(433), _SimpleRule2 = _interopRequireDefault(_SimpleRule), _KeyframesRule = __webpack_require__(434), _KeyframesRule2 = _interopRequireDefault(_KeyframesRule), _ConditionalRule = __webpack_require__(435), _ConditionalRule2 = _interopRequireDefault(_ConditionalRule), _FontFaceRule = __webpack_require__(436), _FontFaceRule2 = _interopRequireDefault(_FontFaceRule), _ViewportRule = __webpack_require__(437), _ViewportRule2 = _interopRequireDefault(_ViewportRule), classes = {
"@charset": _SimpleRule2.default,
"@import": _SimpleRule2.default,
"@namespace": _SimpleRule2.default,
@@ -27443,7 +27545,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
return protoProps && defineProperties(Constructor.prototype, protoProps), staticProps && defineProperties(Constructor, staticProps),
Constructor;
};
- }(), _toCss = __webpack_require__(153), _toCss2 = function(obj) {
+ }(), _toCss = __webpack_require__(152), _toCss2 = function(obj) {
return obj && obj.__esModule ? obj : {
default: obj
};
@@ -27485,7 +27587,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
return protoProps && defineProperties(Constructor.prototype, protoProps), staticProps && defineProperties(Constructor, staticProps),
Constructor;
};
- }(), _toCss = __webpack_require__(153), _toCss2 = function(obj) {
+ }(), _toCss = __webpack_require__(152), _toCss2 = function(obj) {
return obj && obj.__esModule ? obj : {
default: obj
};
@@ -27512,7 +27614,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
});
- var _StyleRule = __webpack_require__(60), _StyleRule2 = _interopRequireDefault(_StyleRule), _createRule = __webpack_require__(106), _createRule2 = _interopRequireDefault(_createRule), _isObservable = __webpack_require__(230), _isObservable2 = _interopRequireDefault(_isObservable);
+ var _StyleRule = __webpack_require__(59), _StyleRule2 = _interopRequireDefault(_StyleRule), _createRule = __webpack_require__(107), _createRule2 = _interopRequireDefault(_createRule), _isObservable = __webpack_require__(230), _isObservable2 = _interopRequireDefault(_isObservable);
exports.default = {
onCreateRule: function(name, decl, options) {
if (!(0, _isObservable2.default)(decl)) return null;
@@ -27548,7 +27650,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
});
- var _RuleList = __webpack_require__(76), _RuleList2 = _interopRequireDefault(_RuleList), _StyleRule = __webpack_require__(60), _StyleRule2 = _interopRequireDefault(_StyleRule), _kebabCase = __webpack_require__(436), _kebabCase2 = _interopRequireDefault(_kebabCase), _createRule = __webpack_require__(106), _createRule2 = _interopRequireDefault(_createRule), now = Date.now(), fnValuesNs = "fnValues" + now, fnStyleNs = "fnStyle" + ++now;
+ var _RuleList = __webpack_require__(76), _RuleList2 = _interopRequireDefault(_RuleList), _StyleRule = __webpack_require__(59), _StyleRule2 = _interopRequireDefault(_StyleRule), _createRule = __webpack_require__(107), _createRule2 = _interopRequireDefault(_createRule), now = Date.now(), fnValuesNs = "fnValues" + now, fnStyleNs = "fnStyle" + ++now;
exports.default = {
onCreateRule: function(name, decl, options) {
if ("function" != typeof decl) return null;
@@ -27559,7 +27661,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
var fn = {};
for (var prop in style) {
var value = style[prop];
- "function" == typeof value && (delete style[prop], fn[(0, _kebabCase2.default)(prop)] = value);
+ "function" == typeof value && (delete style[prop], fn[prop] = value);
}
return rule = rule, rule[fnValuesNs] = fn, style;
},
@@ -27578,224 +27680,223 @@ var _bundleJs = []byte((((((((((`!function(modules) {
};
}, function(module, exports, __webpack_require__) {
"use strict";
- Object.defineProperty(exports, "__esModule", {
- value: !0
- });
- var regExp = /([A-Z])/g, replace = function(str) {
- return "-" + str.toLowerCase();
- };
- exports.default = function(str) {
- return str.replace(regExp, replace);
- };
-}, function(module, exports, __webpack_require__) {
- "use strict";
- (function(global) {
- function _interopRequireDefault(obj) {
- return obj && obj.__esModule ? obj : {
- default: obj
- };
+ function _interopRequireDefault(obj) {
+ return obj && obj.__esModule ? obj : {
+ default: obj
+ };
+ }
+ function _classCallCheck(instance, Constructor) {
+ if (!(instance instanceof Constructor)) throw new TypeError("Cannot call a class as a function");
+ }
+ function getPropertyValue(cssRule, prop) {
+ try {
+ return cssRule.style.getPropertyValue(prop);
+ } catch (err) {
+ return "";
}
- function _classCallCheck(instance, Constructor) {
- if (!(instance instanceof Constructor)) throw new TypeError("Cannot call a class as a function");
+ }
+ function setProperty(cssRule, prop, value) {
+ try {
+ var cssValue = value;
+ if (Array.isArray(value) && (cssValue = (0, _toCssValue2.default)(value, !0), "!important" === value[value.length - 1])) return cssRule.style.setProperty(prop, cssValue, "important"),
+ !0;
+ cssRule.style.setProperty(prop, cssValue);
+ } catch (err) {
+ return !1;
}
- function getStyle(cssRule, prop) {
- try {
- return cssRule.style.getPropertyValue(prop);
- } catch (err) {
- return "";
- }
+ return !0;
+ }
+ function removeProperty(cssRule, prop) {
+ try {
+ cssRule.style.removeProperty(prop);
+ } catch (err) {
+ (0, _warning2.default)(!1, '[JSS] DOMException "%s" was thrown. Tried to remove property "%s".', err.message, prop);
}
- function setStyle(cssRule, prop, value) {
- try {
- var cssValue = value;
- if (Array.isArray(value) && (cssValue = (0, _toCssValue2.default)(value, !0), "!important" === value[value.length - 1])) return cssRule.style.setProperty(prop, cssValue, "important"),
- !0;
- cssRule.style.setProperty(prop, cssValue);
- } catch (err) {
- return !1;
- }
- return !0;
+ }
+ function setSelector(cssRule, selectorText) {
+ return cssRule.selectorText = selectorText, cssRule.selectorText === selectorText;
+ }
+ function findHigherSheet(registry, options) {
+ for (var i = 0; i < registry.length; i++) {
+ var sheet = registry[i];
+ if (sheet.attached && sheet.options.index > options.index && sheet.options.insertionPoint === options.insertionPoint) return sheet;
}
- function setSelector(cssRule, selectorText) {
- return cssRule.selectorText = selectorText, cssRule.selectorText === selectorText;
+ return null;
+ }
+ function findHighestSheet(registry, options) {
+ for (var i = registry.length - 1; i >= 0; i--) {
+ var sheet = registry[i];
+ if (sheet.attached && sheet.options.insertionPoint === options.insertionPoint) return sheet;
}
- function findHigherSheet(registry, options) {
- for (var i = 0; i < registry.length; i++) {
- var sheet = registry[i];
- if (sheet.attached && sheet.options.index > options.index && sheet.options.insertionPoint === options.insertionPoint) return sheet;
- }
- return null;
+ return null;
+ }
+ function findCommentNode(text) {
+ for (var head = getHead(), i = 0; i < head.childNodes.length; i++) {
+ var node = head.childNodes[i];
+ if (8 === node.nodeType && node.nodeValue.trim() === text) return node;
}
- function findHighestSheet(registry, options) {
- for (var i = registry.length - 1; i >= 0; i--) {
- var sheet = registry[i];
- if (sheet.attached && sheet.options.insertionPoint === options.insertionPoint) return sheet;
- }
- return null;
+ return null;
+ }
+ function findPrevNode(options) {
+ var registry = _sheets2.default.registry;
+ if (registry.length > 0) {
+ var sheet = findHigherSheet(registry, options);
+ if (sheet) return sheet.renderer.element;
+ if (sheet = findHighestSheet(registry, options)) return sheet.renderer.element.nextElementSibling;
}
- function findCommentNode(text) {
- for (var head = getHead(), i = 0; i < head.childNodes.length; i++) {
- var node = head.childNodes[i];
- if (8 === node.nodeType && node.nodeValue.trim() === text) return node;
- }
- return null;
+ var insertionPoint = options.insertionPoint;
+ if (insertionPoint && "string" == typeof insertionPoint) {
+ var comment = findCommentNode(insertionPoint);
+ if (comment) return comment.nextSibling;
+ (0, _warning2.default)("jss" === insertionPoint, '[JSS] Insertion point "%s" not found.', insertionPoint);
}
- function findPrevNode(options) {
- var registry = _sheets2.default.registry;
- if (registry.length > 0) {
- var sheet = findHigherSheet(registry, options);
- if (sheet) return sheet.renderer.element;
- if (sheet = findHighestSheet(registry, options)) return sheet.renderer.element.nextElementSibling;
- }
- var insertionPoint = options.insertionPoint;
- if (insertionPoint && "string" == typeof insertionPoint) {
- var comment = findCommentNode(insertionPoint);
- if (comment) return comment.nextSibling;
- (0, _warning2.default)("jss" === insertionPoint, '[JSS] Insertion point "%s" not found.', insertionPoint);
- }
- return null;
+ return null;
+ }
+ function insertStyle(style, options) {
+ var insertionPoint = options.insertionPoint, prevNode = findPrevNode(options);
+ if (prevNode) {
+ var parentNode = prevNode.parentNode;
+ return void (parentNode && parentNode.insertBefore(style, prevNode));
}
- function insertStyle(style, options) {
- var insertionPoint = options.insertionPoint, prevNode = findPrevNode(options);
- if (prevNode) {
- var parentNode = prevNode.parentNode;
- return void (parentNode && parentNode.insertBefore(style, prevNode));
- }
- if (insertionPoint && "number" == typeof insertionPoint.nodeType) {
- var insertionPointElement = insertionPoint, _parentNode = insertionPointElement.parentNode;
- return void (_parentNode ? _parentNode.insertBefore(style, insertionPointElement.nextSibling) : (0,
- _warning2.default)(!1, "[JSS] Insertion point is not in the DOM."));
- }
- getHead().insertBefore(style, prevNode);
+ if (insertionPoint && "number" == typeof insertionPoint.nodeType) {
+ var insertionPointElement = insertionPoint, _parentNode = insertionPointElement.parentNode;
+ return void (_parentNode ? _parentNode.insertBefore(style, insertionPointElement.nextSibling) : (0,
+ _warning2.default)(!1, "[JSS] Insertion point is not in the DOM."));
}
- Object.defineProperty(exports, "__esModule", {
- value: !0
- });
- var _createClass = function() {
- function defineProperties(target, props) {
- for (var i = 0; i < props.length; i++) {
- var descriptor = props[i];
- descriptor.enumerable = descriptor.enumerable || !1, descriptor.configurable = !0,
- "value" in descriptor && (descriptor.writable = !0), Object.defineProperty(target, descriptor.key, descriptor);
- }
+ getHead().insertBefore(style, prevNode);
+ }
+ Object.defineProperty(exports, "__esModule", {
+ value: !0
+ });
+ var _createClass = function() {
+ function defineProperties(target, props) {
+ for (var i = 0; i < props.length; i++) {
+ var descriptor = props[i];
+ descriptor.enumerable = descriptor.enumerable || !1, descriptor.configurable = !0,
+ "value" in descriptor && (descriptor.writable = !0), Object.defineProperty(target, descriptor.key, descriptor);
}
- return function(Constructor, protoProps, staticProps) {
- return protoProps && defineProperties(Constructor.prototype, protoProps), staticProps && defineProperties(Constructor, staticProps),
- Constructor;
- };
- }(), _warning = __webpack_require__(11), _warning2 = _interopRequireDefault(_warning), _sheets = __webpack_require__(155), _sheets2 = _interopRequireDefault(_sheets), _StyleRule = __webpack_require__(60), _StyleRule2 = _interopRequireDefault(_StyleRule), _toCssValue = __webpack_require__(105), _toCssValue2 = _interopRequireDefault(_toCssValue), CSSRuleTypes = {
- STYLE_RULE: 1,
- KEYFRAMES_RULE: 7
- }, getKey = function() {
- var extractKey = function(cssText) {
- var from = arguments.length > 1 && void 0 !== arguments[1] ? arguments[1] : 0;
- return cssText.substr(from, cssText.indexOf("{") - 1);
- };
- return function(cssRule) {
- if (cssRule.type === CSSRuleTypes.STYLE_RULE) return cssRule.selectorText;
- if (cssRule.type === CSSRuleTypes.KEYFRAMES_RULE) {
- var name = cssRule.name;
- if (name) return "@keyframes " + name;
- var cssText = cssRule.cssText;
- return "@" + extractKey(cssText, cssText.indexOf("keyframes"));
- }
- return extractKey(cssRule.cssText);
- };
- }(), getHead = function() {
- var head = void 0;
- return function() {
- return head || (head = document.head || document.getElementsByTagName("head")[0]),
- head;
- };
- }(), getUnescapedKeysMap = function() {
- var style = void 0, isAttached = !1;
- return function(rules) {
- var map = {};
- style || (style = document.createElement("style"));
- for (var i = 0; i < rules.length; i++) {
- var rule = rules[i];
- if (rule instanceof _StyleRule2.default) {
- var selector = rule.selector;
- if (selector && -1 !== selector.indexOf("\\")) {
- isAttached || (getHead().appendChild(style), isAttached = !0), style.textContent = selector + " {}";
- var _style = style, sheet = _style.sheet;
- if (sheet) {
- var cssRules = sheet.cssRules;
- cssRules && (map[cssRules[0].selectorText] = rule.key);
- }
+ }
+ return function(Constructor, protoProps, staticProps) {
+ return protoProps && defineProperties(Constructor.prototype, protoProps), staticProps && defineProperties(Constructor, staticProps),
+ Constructor;
+ };
+ }(), _warning = __webpack_require__(12), _warning2 = _interopRequireDefault(_warning), _sheets = __webpack_require__(153), _sheets2 = _interopRequireDefault(_sheets), _StyleRule = __webpack_require__(59), _StyleRule2 = _interopRequireDefault(_StyleRule), _toCssValue = __webpack_require__(106), _toCssValue2 = _interopRequireDefault(_toCssValue), memoize = function(fn) {
+ var value = void 0;
+ return function() {
+ return value || (value = fn()), value;
+ };
+ }, CSSRuleTypes = {
+ STYLE_RULE: 1,
+ KEYFRAMES_RULE: 7
+ }, getKey = function() {
+ var extractKey = function(cssText) {
+ var from = arguments.length > 1 && void 0 !== arguments[1] ? arguments[1] : 0;
+ return cssText.substr(from, cssText.indexOf("{") - 1);
+ };
+ return function(cssRule) {
+ if (cssRule.type === CSSRuleTypes.STYLE_RULE) return cssRule.selectorText;
+ if (cssRule.type === CSSRuleTypes.KEYFRAMES_RULE) {
+ var name = cssRule.name;
+ if (name) return "@keyframes " + name;
+ var cssText = cssRule.cssText;
+ return "@" + extractKey(cssText, cssText.indexOf("keyframes"));
+ }
+ return extractKey(cssRule.cssText);
+ };
+ }(), getHead = memoize(function() {
+ return document.head || document.getElementsByTagName("head")[0];
+ }), getUnescapedKeysMap = function() {
+ var style = void 0, isAttached = !1;
+ return function(rules) {
+ var map = {};
+ style || (style = document.createElement("style"));
+ for (var i = 0; i < rules.length; i++) {
+ var rule = rules[i];
+ if (rule instanceof _StyleRule2.default) {
+ var selector = rule.selector;
+ if (selector && -1 !== selector.indexOf("\\")) {
+ isAttached || (getHead().appendChild(style), isAttached = !0), style.textContent = selector + " {}";
+ var _style = style, sheet = _style.sheet;
+ if (sheet) {
+ var cssRules = sheet.cssRules;
+ cssRules && (map[cssRules[0].selectorText] = rule.key);
}
}
}
- return isAttached && (getHead().removeChild(style), isAttached = !1), map;
- };
- }(), DomRenderer = function() {
- function DomRenderer(sheet) {
- _classCallCheck(this, DomRenderer), this.getStyle = getStyle, this.setStyle = setStyle,
- this.setSelector = setSelector, this.getKey = getKey, this.getUnescapedKeysMap = getUnescapedKeysMap,
- this.hasInsertedRules = !1, sheet && _sheets2.default.add(sheet), this.sheet = sheet;
- var _ref = this.sheet ? this.sheet.options : {}, media = _ref.media, meta = _ref.meta, element = _ref.element;
- this.element = element || document.createElement("style"), this.element.type = "text/css",
- this.element.setAttribute("data-jss", ""), media && this.element.setAttribute("media", media),
- meta && this.element.setAttribute("data-meta", meta);
- var nonce = global.__webpack_nonce__;
- nonce && this.element.setAttribute("nonce", nonce);
- }
- return _createClass(DomRenderer, [ {
- key: "attach",
- value: function() {
- !this.element.parentNode && this.sheet && (this.hasInsertedRules && (this.deploy(),
- this.hasInsertedRules = !1), insertStyle(this.element, this.sheet.options));
- }
- }, {
- key: "detach",
- value: function() {
- this.element.parentNode.removeChild(this.element);
- }
- }, {
- key: "deploy",
- value: function() {
- this.sheet && (this.element.textContent = "\n" + this.sheet.toString() + "\n");
- }
- }, {
- key: "insertRule",
- value: function(rule, index) {
- var sheet = this.element.sheet, cssRules = sheet.cssRules, str = rule.toString();
- if (index || (index = cssRules.length), !str) return !1;
- try {
- sheet.insertRule(str, index);
- } catch (err) {
- return (0, _warning2.default)(!1, "[JSS] Can not insert an unsupported rule \n\r%s", rule),
- !1;
- }
- return this.hasInsertedRules = !0, cssRules[index];
- }
- }, {
- key: "deleteRule",
- value: function(cssRule) {
- var sheet = this.element.sheet, index = this.indexOf(cssRule);
- return -1 !== index && (sheet.deleteRule(index), !0);
- }
- }, {
- key: "indexOf",
- value: function(cssRule) {
- for (var cssRules = this.element.sheet.cssRules, _index = 0; _index < cssRules.length; _index++) if (cssRule === cssRules[_index]) return _index;
- return -1;
- }
- }, {
- key: "replaceRule",
- value: function(cssRule, rule) {
- var index = this.indexOf(cssRule), newCssRule = this.insertRule(rule, index);
- return this.element.sheet.deleteRule(index), newCssRule;
- }
- }, {
- key: "getRules",
- value: function() {
- return this.element.sheet.cssRules;
+ }
+ return isAttached && (getHead().removeChild(style), isAttached = !1), map;
+ };
+ }(), getNonce = memoize(function() {
+ var node = document.querySelector('meta[property="csp-nonce"]');
+ return node ? node.getAttribute("content") : null;
+ }), DomRenderer = function() {
+ function DomRenderer(sheet) {
+ _classCallCheck(this, DomRenderer), this.getPropertyValue = getPropertyValue, this.setProperty = setProperty,
+ this.removeProperty = removeProperty, this.setSelector = setSelector, this.getKey = getKey,
+ this.getUnescapedKeysMap = getUnescapedKeysMap, this.hasInsertedRules = !1, sheet && _sheets2.default.add(sheet),
+ this.sheet = sheet;
+ var _ref = this.sheet ? this.sheet.options : {}, media = _ref.media, meta = _ref.meta, element = _ref.element;
+ this.element = element || document.createElement("style"), this.element.type = "text/css",
+ this.element.setAttribute("data-jss", ""), media && this.element.setAttribute("media", media),
+ meta && this.element.setAttribute("data-meta", meta);
+ var nonce = getNonce();
+ nonce && this.element.setAttribute("nonce", nonce);
+ }
+ return _createClass(DomRenderer, [ {
+ key: "attach",
+ value: function() {
+ !this.element.parentNode && this.sheet && (this.hasInsertedRules && (this.deploy(),
+ this.hasInsertedRules = !1), insertStyle(this.element, this.sheet.options));
+ }
+ }, {
+ key: "detach",
+ value: function() {
+ this.element.parentNode.removeChild(this.element);
+ }
+ }, {
+ key: "deploy",
+ value: function() {
+ this.sheet && (this.element.textContent = "\n" + this.sheet.toString() + "\n");
+ }
+ }, {
+ key: "insertRule",
+ value: function(rule, index) {
+ var sheet = this.element.sheet, cssRules = sheet.cssRules, str = rule.toString();
+ if (index || (index = cssRules.length), !str) return !1;
+ try {
+ sheet.insertRule(str, index);
+ } catch (err) {
+ return (0, _warning2.default)(!1, "[JSS] Can not insert an unsupported rule \n\r%s", rule),
+ !1;
}
- } ]), DomRenderer;
- }();
- exports.default = DomRenderer;
- }).call(exports, __webpack_require__(51));
+ return this.hasInsertedRules = !0, cssRules[index];
+ }
+ }, {
+ key: "deleteRule",
+ value: function(cssRule) {
+ var sheet = this.element.sheet, index = this.indexOf(cssRule);
+ return -1 !== index && (sheet.deleteRule(index), !0);
+ }
+ }, {
+ key: "indexOf",
+ value: function(cssRule) {
+ for (var cssRules = this.element.sheet.cssRules, _index = 0; _index < cssRules.length; _index++) if (cssRule === cssRules[_index]) return _index;
+ return -1;
+ }
+ }, {
+ key: "replaceRule",
+ value: function(cssRule, rule) {
+ var index = this.indexOf(cssRule), newCssRule = this.insertRule(rule, index);
+ return this.element.sheet.deleteRule(index), newCssRule;
+ }
+ }, {
+ key: "getRules",
+ value: function() {
+ return this.element.sheet.cssRules;
+ }
+ } ]), DomRenderer;
+ }();
+ exports.default = DomRenderer;
}, function(module, exports, __webpack_require__) {
"use strict";
function _classCallCheck(instance, Constructor) {
@@ -27821,16 +27922,19 @@ var _bundleJs = []byte((((((((((`!function(modules) {
_classCallCheck(this, VirtualRenderer);
}
return _createClass(VirtualRenderer, [ {
- key: "setStyle",
+ key: "setProperty",
value: function() {
return !0;
}
}, {
- key: "getStyle",
+ key: "getPropertyValue",
value: function() {
return "";
}
}, {
+ key: "removeProperty",
+ value: function() {}
+ }, {
key: "setSelector",
value: function() {
return !0;
@@ -27891,7 +27995,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
});
- var _jssGlobal = __webpack_require__(440), _jssGlobal2 = _interopRequireDefault(_jssGlobal), _jssNested = __webpack_require__(441), _jssNested2 = _interopRequireDefault(_jssNested), _jssCamelCase = __webpack_require__(442), _jssCamelCase2 = _interopRequireDefault(_jssCamelCase), _jssDefaultUnit = __webpack_require__(443), _jssDefaultUnit2 = _interopRequireDefault(_jssDefaultUnit), _jssVendorPrefixer = __webpack_require__(445), _jssVendorPrefixer2 = _interopRequireDefault(_jssVendorPrefixer), _jssPropsSort = __webpack_require__(450), _jssPropsSort2 = _interopRequireDefault(_jssPropsSort);
+ var _jssGlobal = __webpack_require__(443), _jssGlobal2 = _interopRequireDefault(_jssGlobal), _jssNested = __webpack_require__(444), _jssNested2 = _interopRequireDefault(_jssNested), _jssCamelCase = __webpack_require__(445), _jssCamelCase2 = _interopRequireDefault(_jssCamelCase), _jssDefaultUnit = __webpack_require__(447), _jssDefaultUnit2 = _interopRequireDefault(_jssDefaultUnit), _jssVendorPrefixer = __webpack_require__(449), _jssVendorPrefixer2 = _interopRequireDefault(_jssVendorPrefixer), _jssPropsSort = __webpack_require__(454), _jssPropsSort2 = _interopRequireDefault(_jssPropsSort);
exports.default = jssPreset;
}, function(module, exports, __webpack_require__) {
"use strict";
@@ -28074,19 +28178,16 @@ var _bundleJs = []byte((((((((((`!function(modules) {
return target;
};
exports.default = jssNested;
- var _warning = __webpack_require__(11), _warning2 = function(obj) {
+ var _warning = __webpack_require__(12), _warning2 = function(obj) {
return obj && obj.__esModule ? obj : {
default: obj
};
}(_warning), separatorRegExp = /\s*,\s*/g, parentRegExp = /&/g, refRegExp = /\$([\w-]+)/g;
}, function(module, exports, __webpack_require__) {
"use strict";
- function replace(str) {
- return "-" + str.toLowerCase();
- }
function convertCase(style) {
var converted = {};
- for (var prop in style) converted[prop.replace(regExp, replace)] = style[prop];
+ for (var prop in style) converted[(0, _hyphenateStyleName2.default)(prop)] = style[prop];
return style.fallbacks && (Array.isArray(style.fallbacks) ? converted.fallbacks = style.fallbacks.map(convertCase) : converted.fallbacks = convertCase(style.fallbacks)),
converted;
}
@@ -28098,14 +28199,30 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}
return convertCase(style);
}
+ function onChangeValue(value, prop, rule) {
+ var hyphenatedProp = (0, _hyphenateStyleName2.default)(prop);
+ return prop === hyphenatedProp ? value : (rule.prop(hyphenatedProp, value), null);
+ }
return {
- onProcessStyle: onProcessStyle
+ onProcessStyle: onProcessStyle,
+ onChangeValue: onChangeValue
};
}
Object.defineProperty(exports, "__esModule", {
value: !0
}), exports.default = camelCase;
- var regExp = /([A-Z])/g;
+ var _hyphenateStyleName = __webpack_require__(446), _hyphenateStyleName2 = function(obj) {
+ return obj && obj.__esModule ? obj : {
+ default: obj
+ };
+ }(_hyphenateStyleName);
+}, function(module, exports, __webpack_require__) {
+ "use strict";
+ function hyphenateStyleName(string) {
+ return string in cache ? cache[string] : cache[string] = string.replace(uppercasePattern, "-$&").toLowerCase().replace(msPattern, "-ms-");
+ }
+ var uppercasePattern = /[A-Z]/g, msPattern = /^ms-/, cache = {};
+ module.exports = hyphenateStyleName;
}, function(module, exports, __webpack_require__) {
"use strict";
function addCamelCasedVersion(obj) {
@@ -28160,7 +28277,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
return obj && "function" == typeof Symbol && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj;
};
exports.default = defaultUnit;
- var _defaultUnits = __webpack_require__(444), _defaultUnits2 = function(obj) {
+ var _defaultUnits = __webpack_require__(448), _defaultUnits2 = function(obj) {
return obj && obj.__esModule ? obj : {
default: obj
};
@@ -28303,7 +28420,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
}), exports.default = jssVendorPrefixer;
- var _cssVendor = __webpack_require__(446), vendor = function(obj) {
+ var _cssVendor = __webpack_require__(450), vendor = function(obj) {
if (obj && obj.__esModule) return obj;
var newObj = {};
if (null != obj) for (var key in obj) Object.prototype.hasOwnProperty.call(obj, key) && (newObj[key] = obj[key]);
@@ -28319,7 +28436,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
}), exports.supportedValue = exports.supportedProperty = exports.prefix = void 0;
- var _prefix = __webpack_require__(156), _prefix2 = _interopRequireDefault(_prefix), _supportedProperty = __webpack_require__(447), _supportedProperty2 = _interopRequireDefault(_supportedProperty), _supportedValue = __webpack_require__(449), _supportedValue2 = _interopRequireDefault(_supportedValue);
+ var _prefix = __webpack_require__(154), _prefix2 = _interopRequireDefault(_prefix), _supportedProperty = __webpack_require__(451), _supportedProperty2 = _interopRequireDefault(_supportedProperty), _supportedValue = __webpack_require__(453), _supportedValue2 = _interopRequireDefault(_supportedValue);
exports.default = {
prefix: _prefix2.default,
supportedProperty: _supportedProperty2.default,
@@ -28341,7 +28458,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
}), exports.default = supportedProperty;
- var _isInBrowser = __webpack_require__(107), _isInBrowser2 = _interopRequireDefault(_isInBrowser), _prefix = __webpack_require__(156), _prefix2 = _interopRequireDefault(_prefix), _camelize = __webpack_require__(448), _camelize2 = _interopRequireDefault(_camelize), el = void 0, cache = {};
+ var _isInBrowser = __webpack_require__(108), _isInBrowser2 = _interopRequireDefault(_isInBrowser), _prefix = __webpack_require__(154), _prefix2 = _interopRequireDefault(_prefix), _camelize = __webpack_require__(452), _camelize2 = _interopRequireDefault(_camelize), el = void 0, cache = {};
if (_isInBrowser2.default) {
el = document.createElement("p");
var computed = window.getComputedStyle(document.documentElement, "");
@@ -28383,7 +28500,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
}), exports.default = supportedValue;
- var _isInBrowser = __webpack_require__(107), _isInBrowser2 = _interopRequireDefault(_isInBrowser), _prefix = __webpack_require__(156), _prefix2 = _interopRequireDefault(_prefix), cache = {}, el = void 0;
+ var _isInBrowser = __webpack_require__(108), _isInBrowser2 = _interopRequireDefault(_isInBrowser), _prefix = __webpack_require__(154), _prefix2 = _interopRequireDefault(_prefix), cache = {}, el = void 0;
_isInBrowser2.default && (el = document.createElement("p"));
}, function(module, exports, __webpack_require__) {
"use strict";
@@ -28408,7 +28525,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
"use strict";
(function(process) {
function createGenerateClassName() {
- var options = arguments.length > 0 && void 0 !== arguments[0] ? arguments[0] : {}, _options$dangerouslyU = options.dangerouslyUseGlobalCSS, dangerouslyUseGlobalCSS = void 0 !== _options$dangerouslyU && _options$dangerouslyU, _options$productionPr = options.productionPrefix, productionPrefix = void 0 === _options$productionPr ? "jss" : _options$productionPr, escapeRegex = /([[\].#*$><+~=|^:(),"'` + "`")) + (`\s])/g, ruleCounter = 0;
+ var options = arguments.length > 0 && void 0 !== arguments[0] ? arguments[0] : {}, _options$dangerouslyU = options.dangerouslyUseGlobalCSS, dangerouslyUseGlobalCSS = void 0 !== _options$dangerouslyU && _options$dangerouslyU, _options$productionPr = options.productionPrefix, productionPrefix = void 0 === _options$productionPr ? "jss" : _options$productionPr, escapeRegex = /([[\].#*$><+~=|^:(),"'`))) + (("`" + (`\s])/g, ruleCounter = 0;
return "production" === process.env.NODE_ENV && "undefined" != typeof window && "jss" === productionPrefix && (generatorCounter += 1) > 2 && console.error([ "Material-UI: we have detected more than needed creation of the class name generator.", "You should only use one class name generator on the client side.", "If you do otherwise, you take the risk to have conflicting class names in production." ].join("\n")),
function(rule, styleSheet) {
if (ruleCounter += 1, "production" !== process.env.NODE_ENV && (0, _warning2.default)(ruleCounter < 1e10, [ "Material-UI: you might have a memory leak.", "The ruleCounter is not supposed to grow that much." ].join("")),
@@ -28431,7 +28548,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
}), exports.default = createGenerateClassName;
- var _warning = __webpack_require__(11), _warning2 = function(obj) {
+ var _warning = __webpack_require__(12), _warning2 = function(obj) {
return obj && obj.__esModule ? obj : {
default: obj
};
@@ -28451,7 +28568,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
if (!theme.overrides || !name || !theme.overrides[name]) return styles;
var overrides = theme.overrides[name], stylesWithOverrides = (0, _extends3.default)({}, styles);
return (0, _keys2.default)(overrides).forEach(function(key) {
- "production" !== process.env.NODE_ENV && (0, _warning2.default)(stylesWithOverrides[key], [ "Material-UI: you are trying to override a style that does not exist.", "Fix the ` + ("`" + `" + key + "`))) + (("`" + (` key of ` + "`")) + (`theme.overrides." + name + "` + ("`" + `." ].join("\n")),
+ "production" !== process.env.NODE_ENV && (0, _warning2.default)(stylesWithOverrides[key], [ "Material-UI: you are trying to override a style that does not exist.", "Fix the ` + "`")) + (`" + key + "` + ("`" + ` key of `))))) + (((("`" + (`theme.overrides." + name + "` + "`")) + (`." ].join("\n")),
stylesWithOverrides[key] = (0, _deepmerge2.default)(stylesWithOverrides[key], overrides[key]);
}), stylesWithOverrides;
}
@@ -28465,7 +28582,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
});
- var _keys = __webpack_require__(41), _keys2 = _interopRequireDefault(_keys), _extends2 = __webpack_require__(7), _extends3 = _interopRequireDefault(_extends2), _warning = __webpack_require__(11), _warning2 = _interopRequireDefault(_warning), _deepmerge = __webpack_require__(103), _deepmerge2 = _interopRequireDefault(_deepmerge);
+ var _keys = __webpack_require__(50), _keys2 = _interopRequireDefault(_keys), _extends2 = __webpack_require__(6), _extends3 = _interopRequireDefault(_extends2), _warning = __webpack_require__(12), _warning2 = _interopRequireDefault(_warning), _deepmerge = __webpack_require__(104), _deepmerge2 = _interopRequireDefault(_deepmerge);
exports.default = getStylesCreator;
}).call(exports, __webpack_require__(2));
}, function(module, exports, __webpack_require__) {
@@ -28496,13 +28613,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
});
- var _extends = Object.assign || function(target) {
- for (var i = 1; i < arguments.length; i++) {
- var source = arguments[i];
- for (var key in source) Object.prototype.hasOwnProperty.call(source, key) && (target[key] = source[key]);
- }
- return target;
- }, _createClass = function() {
+ var _createClass = function() {
function defineProperties(target, props) {
for (var i = 0; i < props.length; i++) {
var descriptor = props[i];
@@ -28514,22 +28625,11 @@ var _bundleJs = []byte((((((((((`!function(modules) {
return protoProps && defineProperties(Constructor.prototype, protoProps), staticProps && defineProperties(Constructor, staticProps),
Constructor;
};
- }(), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _withStyles = __webpack_require__(10), _withStyles2 = _interopRequireDefault(_withStyles), _AppBar = __webpack_require__(454), _AppBar2 = _interopRequireDefault(_AppBar), _Toolbar = __webpack_require__(458), _Toolbar2 = _interopRequireDefault(_Toolbar), _Transition = __webpack_require__(108), _Transition2 = _interopRequireDefault(_Transition), _IconButton = __webpack_require__(461), _IconButton2 = _interopRequireDefault(_IconButton), _Typography = __webpack_require__(109), _Typography2 = _interopRequireDefault(_Typography), _ChevronLeft = __webpack_require__(481), _ChevronLeft2 = _interopRequireDefault(_ChevronLeft), _common = __webpack_require__(61), styles = {
- arrow: {
- default: {
- transition: "transform " + _common.DURATION + "ms"
- },
- transition: {
- entered: {
- transform: "rotate(180deg)"
- }
- }
- }
- }, themeStyles = function(theme) {
+ }(), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _withStyles = __webpack_require__(10), _withStyles2 = _interopRequireDefault(_withStyles), _AppBar = __webpack_require__(458), _AppBar2 = _interopRequireDefault(_AppBar), _Toolbar = __webpack_require__(462), _Toolbar2 = _interopRequireDefault(_Toolbar), _IconButton = __webpack_require__(464), _IconButton2 = _interopRequireDefault(_IconButton), _Icon = __webpack_require__(240), _Icon2 = _interopRequireDefault(_Icon), _Menu = __webpack_require__(484), _Menu2 = _interopRequireDefault(_Menu), _Typography = __webpack_require__(109), _Typography2 = _interopRequireDefault(_Typography), themeStyles = function(theme) {
return {
header: {
- backgroundColor: theme.palette.background.appBar,
- color: theme.palette.getContrastText(theme.palette.background.appBar),
+ backgroundColor: theme.palette.grey[900],
+ color: theme.palette.getContrastText(theme.palette.grey[900]),
zIndex: theme.zIndex.appBar
},
toolbar: {
@@ -28537,44 +28637,26 @@ var _bundleJs = []byte((((((((((`!function(modules) {
paddingRight: theme.spacing.unit
},
title: {
- paddingLeft: theme.spacing.unit
+ paddingLeft: theme.spacing.unit,
+ fontSize: 3 * theme.spacing.unit
}
};
}, Header = function(_Component) {
function Header() {
- var _ref, _temp, _this, _ret;
- _classCallCheck(this, Header);
- for (var _len = arguments.length, args = Array(_len), _key = 0; _key < _len; _key++) args[_key] = arguments[_key];
- return _temp = _this = _possibleConstructorReturn(this, (_ref = Header.__proto__ || Object.getPrototypeOf(Header)).call.apply(_ref, [ this ].concat(args))),
- _this.arrow = function(transitionState) {
- return _react2.default.createElement(_IconButton2.default, {
- onClick: _this.props.switchSideBar
- }, _react2.default.createElement(_ChevronLeft2.default, {
- style: _extends({}, styles.arrow.default, styles.arrow.transition[transitionState])
- }));
- }, _ret = _temp, _possibleConstructorReturn(_this, _ret);
+ return _classCallCheck(this, Header), _possibleConstructorReturn(this, (Header.__proto__ || Object.getPrototypeOf(Header)).apply(this, arguments));
}
return _inherits(Header, _Component), _createClass(Header, [ {
- key: "shouldComponentUpdate",
- value: function(nextProps) {
- return nextProps.opened !== this.props.opened;
- }
- }, {
key: "render",
value: function() {
- var _props = this.props, classes = _props.classes, opened = _props.opened;
+ var classes = this.props.classes;
return _react2.default.createElement(_AppBar2.default, {
position: "static",
className: classes.header
}, _react2.default.createElement(_Toolbar2.default, {
className: classes.toolbar
- }, _react2.default.createElement(_Transition2.default, {
- mountOnEnter: !0,
- in: opened,
- timeout: {
- enter: _common.DURATION
- }
- }, this.arrow), _react2.default.createElement(_Typography2.default, {
+ }, _react2.default.createElement(_IconButton2.default, {
+ onClick: this.props.switchSideBar
+ }, _react2.default.createElement(_Icon2.default, null, _react2.default.createElement(_Menu2.default, null))), _react2.default.createElement(_Typography2.default, {
type: "title",
color: "inherit",
noWrap: !0,
@@ -28594,7 +28676,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
});
- var _AppBar = __webpack_require__(455);
+ var _AppBar = __webpack_require__(459);
Object.defineProperty(exports, "default", {
enumerable: !0,
get: function() {
@@ -28612,8 +28694,8 @@ var _bundleJs = []byte((((((((((`!function(modules) {
function AppBar(props) {
var _classNames, children = props.children, classes = props.classes, classNameProp = props.className, color = props.color, position = props.position, other = (0,
_objectWithoutProperties3.default)(props, [ "children", "classes", "className", "color", "position" ]), className = (0,
- _classnames2.default)(classes.root, classes["position" + (0, _helpers.capitalizeFirstLetter)(position)], (_classNames = {},
- (0, _defineProperty3.default)(_classNames, classes["color" + (0, _helpers.capitalizeFirstLetter)(color)], "inherit" !== color),
+ _classnames2.default)(classes.root, classes["position" + (0, _helpers.capitalize)(position)], (_classNames = {},
+ (0, _defineProperty3.default)(_classNames, classes["color" + (0, _helpers.capitalize)(color)], "inherit" !== color),
(0, _defineProperty3.default)(_classNames, "mui-fixed", "fixed" === position), _classNames), classNameProp);
return _react2.default.createElement(_Paper2.default, (0, _extends3.default)({
square: !0,
@@ -28625,7 +28707,8 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
}), exports.styles = void 0;
- var _extends2 = __webpack_require__(7), _extends3 = _interopRequireDefault(_extends2), _defineProperty2 = __webpack_require__(13), _defineProperty3 = _interopRequireDefault(_defineProperty2), _objectWithoutProperties2 = __webpack_require__(6), _objectWithoutProperties3 = _interopRequireDefault(_objectWithoutProperties2), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _propTypes = __webpack_require__(1), _propTypes2 = _interopRequireDefault(_propTypes), _classnames = __webpack_require__(3), _classnames2 = _interopRequireDefault(_classnames), _withStyles = __webpack_require__(10), _withStyles2 = _interopRequireDefault(_withStyles), _helpers = __webpack_require__(52), _Paper = __webpack_require__(456), _Paper2 = _interopRequireDefault(_Paper), styles = exports.styles = function(theme) {
+ var _extends2 = __webpack_require__(6), _extends3 = _interopRequireDefault(_extends2), _defineProperty2 = __webpack_require__(13), _defineProperty3 = _interopRequireDefault(_defineProperty2), _objectWithoutProperties2 = __webpack_require__(7), _objectWithoutProperties3 = _interopRequireDefault(_objectWithoutProperties2), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _propTypes = __webpack_require__(1), _propTypes2 = _interopRequireDefault(_propTypes), _classnames = __webpack_require__(3), _classnames2 = _interopRequireDefault(_classnames), _withStyles = __webpack_require__(10), _withStyles2 = _interopRequireDefault(_withStyles), _helpers = __webpack_require__(51), _Paper = __webpack_require__(460), _Paper2 = _interopRequireDefault(_Paper), styles = exports.styles = function(theme) {
+ var backgroundColorDefault = "light" === theme.palette.type ? theme.palette.grey[100] : theme.palette.grey[900];
return {
root: {
display: "flex",
@@ -28647,13 +28730,18 @@ var _bundleJs = []byte((((((((((`!function(modules) {
left: "auto",
right: 0
},
+ positionSticky: {
+ position: "sticky",
+ top: 0,
+ left: "auto",
+ right: 0
+ },
positionStatic: {
- position: "static",
- flexShrink: 0
+ position: "static"
},
colorDefault: {
- backgroundColor: theme.palette.background.appBar,
- color: theme.palette.getContrastText(theme.palette.background.appBar)
+ backgroundColor: backgroundColorDefault,
+ color: theme.palette.getContrastText(backgroundColorDefault)
},
colorPrimary: {
backgroundColor: theme.palette.primary.main,
@@ -28670,7 +28758,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
classes: _propTypes2.default.object.isRequired,
className: _propTypes2.default.string,
color: _propTypes2.default.oneOf([ "inherit", "primary", "secondary", "default" ]),
- position: _propTypes2.default.oneOf([ "static", "fixed", "absolute" ])
+ position: _propTypes2.default.oneOf([ "fixed", "absolute", "sticky", "static" ])
} : {}, AppBar.defaultProps = {
color: "primary",
position: "fixed"
@@ -28688,7 +28776,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
});
- var _Paper = __webpack_require__(457);
+ var _Paper = __webpack_require__(461);
Object.defineProperty(exports, "default", {
enumerable: !0,
get: function() {
@@ -28706,8 +28794,8 @@ var _bundleJs = []byte((((((((((`!function(modules) {
function Paper(props) {
var classes = props.classes, classNameProp = props.className, Component = props.component, square = props.square, elevation = props.elevation, other = (0,
_objectWithoutProperties3.default)(props, [ "classes", "className", "component", "square", "elevation" ]);
- "production" !== process.env.NODE_ENV && (0, _warning2.default)(elevation >= 0 && elevation < 25, "Material-UI: this elevation `))))) + (((("`" + (`" + elevation + "` + "`")) + (` is not implemented.");
- var className = (0, _classnames2.default)(classes.root, classes["shadow" + (elevation >= 0 ? elevation : 0)], (0,
+ "production" !== process.env.NODE_ENV && (0, _warning2.default)(elevation >= 0 && elevation < 25, "Material-UI: this elevation ` + ("`" + `" + elevation + "`))) + (("`" + (` is not implemented.");
+ var className = (0, _classnames2.default)(classes.root, classes["shadow" + elevation], (0,
_defineProperty3.default)({}, classes.rounded, !square), classNameProp);
return _react2.default.createElement(Component, (0, _extends3.default)({
className: className
@@ -28716,7 +28804,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
}), exports.styles = void 0;
- var _defineProperty2 = __webpack_require__(13), _defineProperty3 = _interopRequireDefault(_defineProperty2), _objectWithoutProperties2 = __webpack_require__(6), _objectWithoutProperties3 = _interopRequireDefault(_objectWithoutProperties2), _extends2 = __webpack_require__(7), _extends3 = _interopRequireDefault(_extends2), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _propTypes = __webpack_require__(1), _propTypes2 = _interopRequireDefault(_propTypes), _classnames = __webpack_require__(3), _classnames2 = _interopRequireDefault(_classnames), _warning = __webpack_require__(11), _warning2 = _interopRequireDefault(_warning), _withStyles = __webpack_require__(10), _withStyles2 = _interopRequireDefault(_withStyles), styles = exports.styles = function(theme) {
+ var _defineProperty2 = __webpack_require__(13), _defineProperty3 = _interopRequireDefault(_defineProperty2), _objectWithoutProperties2 = __webpack_require__(7), _objectWithoutProperties3 = _interopRequireDefault(_objectWithoutProperties2), _extends2 = __webpack_require__(6), _extends3 = _interopRequireDefault(_extends2), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _propTypes = __webpack_require__(1), _propTypes2 = _interopRequireDefault(_propTypes), _classnames = __webpack_require__(3), _classnames2 = _interopRequireDefault(_classnames), _warning = __webpack_require__(12), _warning2 = _interopRequireDefault(_warning), _withStyles = __webpack_require__(10), _withStyles2 = _interopRequireDefault(_withStyles), styles = exports.styles = function(theme) {
var shadows = {};
return theme.shadows.forEach(function(shadow, index) {
shadows["shadow" + index] = {
@@ -28756,7 +28844,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
});
- var _Toolbar = __webpack_require__(459);
+ var _Toolbar = __webpack_require__(463);
Object.defineProperty(exports, "default", {
enumerable: !0,
get: function() {
@@ -28782,7 +28870,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
}), exports.styles = void 0;
- var _defineProperty2 = __webpack_require__(13), _defineProperty3 = _interopRequireDefault(_defineProperty2), _objectWithoutProperties2 = __webpack_require__(6), _objectWithoutProperties3 = _interopRequireDefault(_objectWithoutProperties2), _extends2 = __webpack_require__(7), _extends3 = _interopRequireDefault(_extends2), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _propTypes = __webpack_require__(1), _propTypes2 = _interopRequireDefault(_propTypes), _classnames = __webpack_require__(3), _classnames2 = _interopRequireDefault(_classnames), _withStyles = __webpack_require__(10), _withStyles2 = _interopRequireDefault(_withStyles), styles = exports.styles = function(theme) {
+ var _defineProperty2 = __webpack_require__(13), _defineProperty3 = _interopRequireDefault(_defineProperty2), _objectWithoutProperties2 = __webpack_require__(7), _objectWithoutProperties3 = _interopRequireDefault(_objectWithoutProperties2), _extends2 = __webpack_require__(6), _extends3 = _interopRequireDefault(_extends2), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _propTypes = __webpack_require__(1), _propTypes2 = _interopRequireDefault(_propTypes), _classnames = __webpack_require__(3), _classnames2 = _interopRequireDefault(_classnames), _withStyles = __webpack_require__(10), _withStyles2 = _interopRequireDefault(_withStyles), styles = exports.styles = function(theme) {
return {
root: (0, _extends3.default)({
position: "relative",
@@ -28805,38 +28893,6 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}).call(exports, __webpack_require__(2));
}, function(module, exports, __webpack_require__) {
"use strict";
- function transitionTimeout(transitionType) {
- var timeoutPropName = "transition" + transitionType + "Timeout", enabledPropName = "transition" + transitionType;
- return function(props) {
- if (props[enabledPropName]) {
- if (null == props[timeoutPropName]) return new Error(timeoutPropName + " wasn't supplied to CSSTransitionGroup: this can cause unreliable animations and won't be supported in a future version of React. See https://fb.me/react-animation-transition-group-timeout for more information.");
- if ("number" != typeof props[timeoutPropName]) return new Error(timeoutPropName + " must be a number (in milliseconds)");
- }
- return null;
- };
- }
- exports.__esModule = !0, exports.classNamesShape = exports.timeoutsShape = void 0,
- exports.transitionTimeout = transitionTimeout;
- var _propTypes = __webpack_require__(1), _propTypes2 = function(obj) {
- return obj && obj.__esModule ? obj : {
- default: obj
- };
- }(_propTypes);
- exports.timeoutsShape = _propTypes2.default.oneOfType([ _propTypes2.default.number, _propTypes2.default.shape({
- enter: _propTypes2.default.number,
- exit: _propTypes2.default.number
- }).isRequired ]), exports.classNamesShape = _propTypes2.default.oneOfType([ _propTypes2.default.string, _propTypes2.default.shape({
- enter: _propTypes2.default.string,
- exit: _propTypes2.default.string,
- active: _propTypes2.default.string
- }), _propTypes2.default.shape({
- enter: _propTypes2.default.string,
- enterActive: _propTypes2.default.string,
- exit: _propTypes2.default.string,
- exitActive: _propTypes2.default.string
- }) ]);
-}, function(module, exports, __webpack_require__) {
- "use strict";
function _interopRequireDefault(obj) {
return obj && obj.__esModule ? obj : {
default: obj
@@ -28845,7 +28901,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
});
- var _IconButton = __webpack_require__(462);
+ var _IconButton = __webpack_require__(465);
Object.defineProperty(exports, "default", {
enumerable: !0,
get: function() {
@@ -28861,31 +28917,27 @@ var _bundleJs = []byte((((((((((`!function(modules) {
};
}
function IconButton(props) {
- var _classNames, buttonRef = props.buttonRef, children = props.children, classes = props.classes, className = props.className, color = props.color, disabled = props.disabled, rootRef = props.rootRef, other = (0,
- _objectWithoutProperties3.default)(props, [ "buttonRef", "children", "classes", "className", "color", "disabled", "rootRef" ]);
+ var _classNames, children = props.children, classes = props.classes, className = props.className, color = props.color, disabled = props.disabled, other = (0,
+ _objectWithoutProperties3.default)(props, [ "children", "classes", "className", "color", "disabled" ]);
return _react2.default.createElement(_ButtonBase2.default, (0, _extends3.default)({
className: (0, _classnames2.default)(classes.root, (_classNames = {}, (0, _defineProperty3.default)(_classNames, classes["color" + (0,
- _helpers.capitalizeFirstLetter)(color)], "default" !== color), (0, _defineProperty3.default)(_classNames, classes.disabled, disabled),
+ _helpers.capitalize)(color)], "default" !== color), (0, _defineProperty3.default)(_classNames, classes.disabled, disabled),
_classNames), className),
centerRipple: !0,
focusRipple: !0,
- disabled: disabled,
- rootRef: buttonRef,
- ref: rootRef
+ disabled: disabled
}, other), _react2.default.createElement("span", {
className: classes.label
- }, "string" == typeof children ? _react2.default.createElement(_Icon2.default, {
- className: classes.icon
- }, children) : _react2.default.Children.map(children, function(child) {
+ }, _react2.default.Children.map(children, function(child) {
return (0, _reactHelpers.isMuiElement)(child, [ "Icon", "SvgIcon" ]) ? _react2.default.cloneElement(child, {
- className: (0, _classnames2.default)(classes.icon, child.props.className)
+ fontSize: !0
}) : child;
})));
}
Object.defineProperty(exports, "__esModule", {
value: !0
}), exports.styles = void 0;
- var _extends2 = __webpack_require__(7), _extends3 = _interopRequireDefault(_extends2), _defineProperty2 = __webpack_require__(13), _defineProperty3 = _interopRequireDefault(_defineProperty2), _objectWithoutProperties2 = __webpack_require__(6), _objectWithoutProperties3 = _interopRequireDefault(_objectWithoutProperties2), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _propTypes = __webpack_require__(1), _propTypes2 = _interopRequireDefault(_propTypes), _classnames = __webpack_require__(3), _classnames2 = _interopRequireDefault(_classnames), _withStyles = __webpack_require__(10), _withStyles2 = _interopRequireDefault(_withStyles), _ButtonBase = __webpack_require__(234), _ButtonBase2 = _interopRequireDefault(_ButtonBase), _helpers = __webpack_require__(52), _Icon = __webpack_require__(237), _Icon2 = _interopRequireDefault(_Icon), _reactHelpers = __webpack_require__(238);
+ var _extends2 = __webpack_require__(6), _extends3 = _interopRequireDefault(_extends2), _defineProperty2 = __webpack_require__(13), _defineProperty3 = _interopRequireDefault(_defineProperty2), _objectWithoutProperties2 = __webpack_require__(7), _objectWithoutProperties3 = _interopRequireDefault(_objectWithoutProperties2), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _propTypes = __webpack_require__(1), _propTypes2 = _interopRequireDefault(_propTypes), _classnames = __webpack_require__(3), _classnames2 = _interopRequireDefault(_classnames), _withStyles = __webpack_require__(10), _withStyles2 = _interopRequireDefault(_withStyles), _ButtonBase = __webpack_require__(234), _ButtonBase2 = _interopRequireDefault(_ButtonBase), _helpers = __webpack_require__(51), _reactHelpers = __webpack_require__(238);
__webpack_require__(239);
var styles = exports.styles = function(theme) {
return {
@@ -28919,22 +28971,16 @@ var _bundleJs = []byte((((((((((`!function(modules) {
display: "flex",
alignItems: "inherit",
justifyContent: "inherit"
- },
- icon: {
- width: "1em",
- height: "1em"
}
};
};
IconButton.propTypes = "production" !== process.env.NODE_ENV ? {
- buttonRef: _propTypes2.default.func,
children: _propTypes2.default.node,
classes: _propTypes2.default.object.isRequired,
className: _propTypes2.default.string,
color: _propTypes2.default.oneOf([ "default", "inherit", "primary", "secondary" ]),
disabled: _propTypes2.default.bool,
- disableRipple: _propTypes2.default.bool,
- rootRef: _propTypes2.default.func
+ disableRipple: _propTypes2.default.bool
} : {}, IconButton.defaultProps = {
color: "default",
disabled: !1,
@@ -28954,34 +29000,34 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
}), exports.styles = void 0;
- var _extends2 = __webpack_require__(7), _extends3 = _interopRequireDefault(_extends2), _defineProperty2 = __webpack_require__(13), _defineProperty3 = _interopRequireDefault(_defineProperty2), _objectWithoutProperties2 = __webpack_require__(6), _objectWithoutProperties3 = _interopRequireDefault(_objectWithoutProperties2), _getPrototypeOf = __webpack_require__(26), _getPrototypeOf2 = _interopRequireDefault(_getPrototypeOf), _classCallCheck2 = __webpack_require__(27), _classCallCheck3 = _interopRequireDefault(_classCallCheck2), _createClass2 = __webpack_require__(28), _createClass3 = _interopRequireDefault(_createClass2), _possibleConstructorReturn2 = __webpack_require__(29), _possibleConstructorReturn3 = _interopRequireDefault(_possibleConstructorReturn2), _inherits2 = __webpack_require__(30), _inherits3 = _interopRequireDefault(_inherits2), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _propTypes = __webpack_require__(1), _propTypes2 = _interopRequireDefault(_propTypes), _reactDom = __webpack_require__(95), _classnames = __webpack_require__(3), _classnames2 = _interopRequireDefault(_classnames), _keycode = __webpack_require__(235), _keycode2 = _interopRequireDefault(_keycode), _withStyles = __webpack_require__(10), _withStyles2 = _interopRequireDefault(_withStyles), _keyboardFocus = __webpack_require__(464), _TouchRipple = __webpack_require__(468), _TouchRipple2 = _interopRequireDefault(_TouchRipple), _createRippleHandler = __webpack_require__(477), _createRippleHandler2 = _interopRequireDefault(_createRippleHandler), styles = exports.styles = function(theme) {
- return {
- root: {
- display: "inline-flex",
- alignItems: "center",
- justifyContent: "center",
- position: "relative",
- WebkitTapHighlightColor: theme.palette.common.transparent,
- backgroundColor: "transparent",
- outline: "none",
- border: 0,
- borderRadius: 0,
- padding: 0,
- cursor: "pointer",
- userSelect: "none",
- verticalAlign: "middle",
- appearance: "none",
- textDecoration: "none",
- color: "inherit",
- "&::-moz-focus-inner": {
- borderStyle: "none"
- }
- },
- disabled: {
- pointerEvents: "none",
- cursor: "default"
+ var _extends2 = __webpack_require__(6), _extends3 = _interopRequireDefault(_extends2), _defineProperty2 = __webpack_require__(13), _defineProperty3 = _interopRequireDefault(_defineProperty2), _objectWithoutProperties2 = __webpack_require__(7), _objectWithoutProperties3 = _interopRequireDefault(_objectWithoutProperties2), _getPrototypeOf = __webpack_require__(26), _getPrototypeOf2 = _interopRequireDefault(_getPrototypeOf), _classCallCheck2 = __webpack_require__(27), _classCallCheck3 = _interopRequireDefault(_classCallCheck2), _createClass2 = __webpack_require__(28), _createClass3 = _interopRequireDefault(_createClass2), _possibleConstructorReturn2 = __webpack_require__(29), _possibleConstructorReturn3 = _interopRequireDefault(_possibleConstructorReturn2), _inherits2 = __webpack_require__(30), _inherits3 = _interopRequireDefault(_inherits2), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _propTypes = __webpack_require__(1), _propTypes2 = _interopRequireDefault(_propTypes), _reactDom = __webpack_require__(96), _classnames = __webpack_require__(3), _classnames2 = _interopRequireDefault(_classnames), _keycode = __webpack_require__(235), _keycode2 = _interopRequireDefault(_keycode), _ownerWindow = __webpack_require__(467), _ownerWindow2 = _interopRequireDefault(_ownerWindow), _withStyles = __webpack_require__(10), _withStyles2 = _interopRequireDefault(_withStyles), _keyboardFocus = __webpack_require__(468), _TouchRipple = __webpack_require__(471), _TouchRipple2 = _interopRequireDefault(_TouchRipple), _createRippleHandler = __webpack_require__(481), _createRippleHandler2 = _interopRequireDefault(_createRippleHandler), styles = exports.styles = {
+ root: {
+ display: "inline-flex",
+ alignItems: "center",
+ justifyContent: "center",
+ position: "relative",
+ WebkitTapHighlightColor: "transparent",
+ backgroundColor: "transparent",
+ outline: "none",
+ border: 0,
+ margin: 0,
+ borderRadius: 0,
+ padding: 0,
+ cursor: "pointer",
+ userSelect: "none",
+ verticalAlign: "middle",
+ "-moz-appearance": "none",
+ "-webkit-appearance": "none",
+ textDecoration: "none",
+ color: "inherit",
+ "&::-moz-focus-inner": {
+ borderStyle: "none"
}
- };
+ },
+ disabled: {
+ pointerEvents: "none",
+ cursor: "default"
+ }
}, ButtonBase = function(_React$Component) {
function ButtonBase() {
var _ref, _temp, _this, _ret;
@@ -28995,17 +29041,19 @@ var _bundleJs = []byte((((((((((`!function(modules) {
_this.keyDown = !1, _this.setState({
keyboardFocused: !0
}), _this.props.onKeyboardFocus && _this.props.onKeyboardFocus(event);
+ }, _this.onRippleRef = function(node) {
+ _this.ripple = node;
}, _this.ripple = null, _this.keyDown = !1, _this.button = null, _this.keyboardFocusTimeout = null,
_this.keyboardFocusCheckTime = 50, _this.keyboardFocusMaxCheckTimes = 5, _this.handleKeyDown = function(event) {
var _this$props = _this.props, component = _this$props.component, focusRipple = _this$props.focusRipple, onKeyDown = _this$props.onKeyDown, onClick = _this$props.onClick, key = (0,
_keycode2.default)(event);
- focusRipple && !_this.keyDown && _this.state.keyboardFocused && "space" === key && (_this.keyDown = !0,
+ focusRipple && !_this.keyDown && _this.state.keyboardFocused && _this.ripple && "space" === key && (_this.keyDown = !0,
event.persist(), _this.ripple.stop(event, function() {
_this.ripple.start(event);
- })), onKeyDown && onKeyDown(event), event.target === _this.button && onClick && component && "a" !== component && "button" !== component && ("space" === key || "enter" === key) && (event.preventDefault(),
- onClick(event));
+ })), onKeyDown && onKeyDown(event), event.target !== event.currentTarget || !component || "button" === component || "space" !== key && "enter" !== key || (event.preventDefault(),
+ onClick && onClick(event));
}, _this.handleKeyUp = function(event) {
- _this.props.focusRipple && "space" === (0, _keycode2.default)(event) && _this.state.keyboardFocused && (_this.keyDown = !1,
+ _this.props.focusRipple && "space" === (0, _keycode2.default)(event) && _this.ripple && _this.state.keyboardFocused && (_this.keyDown = !1,
event.persist(), _this.ripple.stop(event, function() {
return _this.ripple.pulsate(event);
})), _this.props.onKeyUp && _this.props.onKeyUp(event);
@@ -29035,7 +29083,8 @@ var _bundleJs = []byte((((((((((`!function(modules) {
return (0, _inherits3.default)(ButtonBase, _React$Component), (0, _createClass3.default)(ButtonBase, [ {
key: "componentDidMount",
value: function() {
- this.button = (0, _reactDom.findDOMNode)(this), (0, _keyboardFocus.listenForFocusKeys)();
+ this.button = (0, _reactDom.findDOMNode)(this), (0, _keyboardFocus.listenForFocusKeys)((0,
+ _ownerWindow2.default)(this.button));
}
}, {
key: "componentWillReceiveProps",
@@ -29057,17 +29106,16 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}, {
key: "render",
value: function() {
- var _classNames, _this2 = this, _props = this.props, centerRipple = _props.centerRipple, children = _props.children, classes = _props.classes, classNameProp = _props.className, component = _props.component, disabled = _props.disabled, disableRipple = _props.disableRipple, keyboardFocusedClassName = (_props.focusRipple,
- _props.keyboardFocusedClassName), rootRef = (_props.onBlur, _props.onFocus, _props.onKeyboardFocus,
+ var _classNames, _props = this.props, buttonRef = _props.buttonRef, centerRipple = _props.centerRipple, children = _props.children, classes = _props.classes, classNameProp = _props.className, component = _props.component, disabled = _props.disabled, disableRipple = _props.disableRipple, keyboardFocusedClassName = (_props.focusRipple,
+ _props.keyboardFocusedClassName), tabIndex = (_props.onBlur, _props.onFocus, _props.onKeyboardFocus,
_props.onKeyDown, _props.onKeyUp, _props.onMouseDown, _props.onMouseLeave, _props.onMouseUp,
- _props.onTouchEnd, _props.onTouchMove, _props.onTouchStart, _props.rootRef), tabIndex = _props.tabIndex, type = _props.type, other = (0,
- _objectWithoutProperties3.default)(_props, [ "centerRipple", "children", "classes", "className", "component", "disabled", "disableRipple", "focusRipple", "keyboardFocusedClassName", "onBlur", "onFocus", "onKeyboardFocus", "onKeyDown", "onKeyUp", "onMouseDown", "onMouseLeave", "onMouseUp", "onTouchEnd", "onTouchMove", "onTouchStart", "rootRef", "tabIndex", "type" ]), className = (0,
+ _props.onTouchEnd, _props.onTouchMove, _props.onTouchStart, _props.tabIndex), type = _props.type, other = (0,
+ _objectWithoutProperties3.default)(_props, [ "buttonRef", "centerRipple", "children", "classes", "className", "component", "disabled", "disableRipple", "focusRipple", "keyboardFocusedClassName", "onBlur", "onFocus", "onKeyboardFocus", "onKeyDown", "onKeyUp", "onMouseDown", "onMouseLeave", "onMouseUp", "onTouchEnd", "onTouchMove", "onTouchStart", "tabIndex", "type" ]), className = (0,
_classnames2.default)(classes.root, (_classNames = {}, (0, _defineProperty3.default)(_classNames, classes.disabled, disabled),
(0, _defineProperty3.default)(_classNames, keyboardFocusedClassName || "", this.state.keyboardFocused),
_classNames), classNameProp), buttonProps = {}, ComponentProp = component;
- return ComponentProp || (ComponentProp = other.href ? "a" : "button"), "button" === ComponentProp && (buttonProps.type = type || "button"),
- "a" !== ComponentProp && (buttonProps.role = "button" === buttonProps.type ? void 0 : "button",
- buttonProps.disabled = disabled), _react2.default.createElement(ComponentProp, (0,
+ return ComponentProp || (ComponentProp = other.href ? "a" : "button"), "button" === ComponentProp ? (buttonProps.type = type || "button",
+ buttonProps.disabled = disabled) : buttonProps.role = "button", _react2.default.createElement(ComponentProp, (0,
_extends3.default)({
onBlur: this.handleBlur,
onFocus: this.handleFocus,
@@ -29079,20 +29127,18 @@ var _bundleJs = []byte((((((((((`!function(modules) {
onTouchEnd: this.handleTouchEnd,
onTouchMove: this.handleTouchMove,
onTouchStart: this.handleTouchStart,
- tabIndex: disabled ? -1 : tabIndex,
- className: className
- }, buttonProps, {
- ref: rootRef
- }, other), children, disableRipple || disabled ? null : _react2.default.createElement(_TouchRipple2.default, {
- innerRef: function(node) {
- _this2.ripple = node;
- },
+ tabIndex: disabled ? "-1" : tabIndex,
+ className: className,
+ ref: buttonRef
+ }, buttonProps, other), children, disableRipple || disabled ? null : _react2.default.createElement(_TouchRipple2.default, {
+ innerRef: this.onRippleRef,
center: centerRipple
}));
}
} ]), ButtonBase;
}(_react2.default.Component);
ButtonBase.propTypes = "production" !== process.env.NODE_ENV ? {
+ buttonRef: _propTypes2.default.func,
centerRipple: _propTypes2.default.bool,
children: _propTypes2.default.node,
classes: _propTypes2.default.object.isRequired,
@@ -29115,14 +29161,13 @@ var _bundleJs = []byte((((((((((`!function(modules) {
onTouchMove: _propTypes2.default.func,
onTouchStart: _propTypes2.default.func,
role: _propTypes2.default.string,
- rootRef: _propTypes2.default.func,
tabIndex: _propTypes2.default.oneOfType([ _propTypes2.default.number, _propTypes2.default.string ]),
type: _propTypes2.default.string
} : {}, ButtonBase.defaultProps = {
centerRipple: !1,
disableRipple: !1,
focusRipple: !1,
- tabIndex: 0,
+ tabIndex: "0",
type: "button"
}, exports.default = (0, _withStyles2.default)(styles, {
name: "MuiButtonBase"
@@ -29130,6 +29175,21 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}).call(exports, __webpack_require__(2));
}, function(module, exports, __webpack_require__) {
"use strict";
+ function ownerWindow(node) {
+ var doc = (0, _ownerDocument2.default)(node);
+ return doc && doc.defaultView || doc.parentWindow;
+ }
+ Object.defineProperty(exports, "__esModule", {
+ value: !0
+ }), exports.default = ownerWindow;
+ var _ownerDocument = __webpack_require__(236), _ownerDocument2 = function(obj) {
+ return obj && obj.__esModule ? obj : {
+ default: obj
+ };
+ }(_ownerDocument);
+ module.exports = exports.default;
+}, function(module, exports, __webpack_require__) {
+ "use strict";
(function(process) {
function _interopRequireDefault(obj) {
return obj && obj.__esModule ? obj : {
@@ -29144,25 +29204,25 @@ var _bundleJs = []byte((((((((((`!function(modules) {
"production" !== process.env.NODE_ENV && (0, _warning2.default)(instance.keyboardFocusCheckTime, "Material-UI: missing instance.keyboardFocusCheckTime"),
"production" !== process.env.NODE_ENV && (0, _warning2.default)(instance.keyboardFocusMaxCheckTimes, "Material-UI: missing instance.keyboardFocusMaxCheckTimes"),
instance.keyboardFocusTimeout = setTimeout(function() {
- focusKeyPressed() && (document.activeElement === element || (0, _contains2.default)(element, document.activeElement)) ? callback() : attempt < instance.keyboardFocusMaxCheckTimes && detectKeyboardFocus(instance, element, callback, attempt + 1);
+ var doc = (0, _ownerDocument2.default)(element);
+ focusKeyPressed() && (doc.activeElement === element || (0, _contains2.default)(element, doc.activeElement)) ? callback() : attempt < instance.keyboardFocusMaxCheckTimes && detectKeyboardFocus(instance, element, callback, attempt + 1);
}, instance.keyboardFocusCheckTime);
}
function isFocusKey(event) {
return -1 !== FOCUS_KEYS.indexOf((0, _keycode2.default)(event));
}
- function listenForFocusKeys() {
- internal.listening || ((0, _addEventListener2.default)(window, "keyup", function(event) {
- isFocusKey(event) && (internal.focusKeyPressed = !0);
- }), internal.listening = !0);
+ function listenForFocusKeys(win) {
+ win.addEventListener("keyup", handleKeyUpEvent);
}
Object.defineProperty(exports, "__esModule", {
value: !0
}), exports.focusKeyPressed = focusKeyPressed, exports.detectKeyboardFocus = detectKeyboardFocus,
exports.listenForFocusKeys = listenForFocusKeys;
- var _keycode = __webpack_require__(235), _keycode2 = _interopRequireDefault(_keycode), _warning = __webpack_require__(11), _warning2 = _interopRequireDefault(_warning), _contains = __webpack_require__(465), _contains2 = _interopRequireDefault(_contains), _addEventListener = __webpack_require__(467), _addEventListener2 = _interopRequireDefault(_addEventListener), internal = {
- listening: !1,
+ var _keycode = __webpack_require__(235), _keycode2 = _interopRequireDefault(_keycode), _warning = __webpack_require__(12), _warning2 = _interopRequireDefault(_warning), _contains = __webpack_require__(469), _contains2 = _interopRequireDefault(_contains), _ownerDocument = __webpack_require__(236), _ownerDocument2 = _interopRequireDefault(_ownerDocument), internal = {
focusKeyPressed: !1
- }, FOCUS_KEYS = [ "tab", "enter", "space", "esc", "up", "down", "left", "right" ];
+ }, FOCUS_KEYS = [ "tab", "enter", "space", "esc", "up", "down", "left", "right" ], handleKeyUpEvent = function(event) {
+ isFocusKey(event) && (internal.focusKeyPressed = !0);
+ };
}).call(exports, __webpack_require__(2));
}, function(module, exports, __webpack_require__) {
"use strict";
@@ -29175,7 +29235,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
});
- var _inDOM = __webpack_require__(466), _inDOM2 = function(obj) {
+ var _inDOM = __webpack_require__(470), _inDOM2 = function(obj) {
return obj && obj.__esModule ? obj : {
default: obj
};
@@ -29193,17 +29253,6 @@ var _bundleJs = []byte((((((((((`!function(modules) {
module.exports = exports.default;
}, function(module, exports, __webpack_require__) {
"use strict";
- Object.defineProperty(exports, "__esModule", {
- value: !0
- }), exports.default = function(node, event, handler, capture) {
- return node.addEventListener(event, handler, capture), {
- remove: function() {
- node.removeEventListener(event, handler, capture);
- }
- };
- };
-}, function(module, exports, __webpack_require__) {
- "use strict";
(function(process) {
function _interopRequireDefault(obj) {
return obj && obj.__esModule ? obj : {
@@ -29213,7 +29262,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
}), exports.styles = exports.DELAY_RIPPLE = void 0;
- var _extends2 = __webpack_require__(7), _extends3 = _interopRequireDefault(_extends2), _objectWithoutProperties2 = __webpack_require__(6), _objectWithoutProperties3 = _interopRequireDefault(_objectWithoutProperties2), _toConsumableArray2 = __webpack_require__(469), _toConsumableArray3 = _interopRequireDefault(_toConsumableArray2), _getPrototypeOf = __webpack_require__(26), _getPrototypeOf2 = _interopRequireDefault(_getPrototypeOf), _classCallCheck2 = __webpack_require__(27), _classCallCheck3 = _interopRequireDefault(_classCallCheck2), _createClass2 = __webpack_require__(28), _createClass3 = _interopRequireDefault(_createClass2), _possibleConstructorReturn2 = __webpack_require__(29), _possibleConstructorReturn3 = _interopRequireDefault(_possibleConstructorReturn2), _inherits2 = __webpack_require__(30), _inherits3 = _interopRequireDefault(_inherits2), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _propTypes = __webpack_require__(1), _propTypes2 = _interopRequireDefault(_propTypes), _reactDom = __webpack_require__(95), _reactDom2 = _interopRequireDefault(_reactDom), _TransitionGroup = __webpack_require__(236), _TransitionGroup2 = _interopRequireDefault(_TransitionGroup), _classnames = __webpack_require__(3), _classnames2 = _interopRequireDefault(_classnames), _withStyles = __webpack_require__(10), _withStyles2 = _interopRequireDefault(_withStyles), _Ripple = __webpack_require__(476), _Ripple2 = _interopRequireDefault(_Ripple), DURATION = 550, DELAY_RIPPLE = exports.DELAY_RIPPLE = 80, styles = exports.styles = function(theme) {
+ var _extends2 = __webpack_require__(6), _extends3 = _interopRequireDefault(_extends2), _objectWithoutProperties2 = __webpack_require__(7), _objectWithoutProperties3 = _interopRequireDefault(_objectWithoutProperties2), _toConsumableArray2 = __webpack_require__(472), _toConsumableArray3 = _interopRequireDefault(_toConsumableArray2), _getPrototypeOf = __webpack_require__(26), _getPrototypeOf2 = _interopRequireDefault(_getPrototypeOf), _classCallCheck2 = __webpack_require__(27), _classCallCheck3 = _interopRequireDefault(_classCallCheck2), _createClass2 = __webpack_require__(28), _createClass3 = _interopRequireDefault(_createClass2), _possibleConstructorReturn2 = __webpack_require__(29), _possibleConstructorReturn3 = _interopRequireDefault(_possibleConstructorReturn2), _inherits2 = __webpack_require__(30), _inherits3 = _interopRequireDefault(_inherits2), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _propTypes = __webpack_require__(1), _propTypes2 = _interopRequireDefault(_propTypes), _reactDom = __webpack_require__(96), _reactDom2 = _interopRequireDefault(_reactDom), _TransitionGroup = __webpack_require__(237), _TransitionGroup2 = _interopRequireDefault(_TransitionGroup), _classnames = __webpack_require__(3), _classnames2 = _interopRequireDefault(_classnames), _withStyles = __webpack_require__(10), _withStyles2 = _interopRequireDefault(_withStyles), _Ripple = __webpack_require__(479), _Ripple2 = _interopRequireDefault(_Ripple), DURATION = 550, DELAY_RIPPLE = exports.DELAY_RIPPLE = 80, styles = exports.styles = function(theme) {
return {
root: {
display: "block",
@@ -29402,7 +29451,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}, function(module, exports, __webpack_require__) {
"use strict";
exports.__esModule = !0;
- var _from = __webpack_require__(470), _from2 = function(obj) {
+ var _from = __webpack_require__(473), _from2 = function(obj) {
return obj && obj.__esModule ? obj : {
default: obj
};
@@ -29416,15 +29465,15 @@ var _bundleJs = []byte((((((((((`!function(modules) {
};
}, function(module, exports, __webpack_require__) {
module.exports = {
- default: __webpack_require__(471),
+ default: __webpack_require__(474),
__esModule: !0
};
}, function(module, exports, __webpack_require__) {
- __webpack_require__(144), __webpack_require__(472), module.exports = __webpack_require__(17).Array.from;
+ __webpack_require__(143), __webpack_require__(475), module.exports = __webpack_require__(17).Array.from;
}, function(module, exports, __webpack_require__) {
"use strict";
- var ctx = __webpack_require__(47), $export = __webpack_require__(19), toObject = __webpack_require__(59), call = __webpack_require__(221), isArrayIter = __webpack_require__(222), toLength = __webpack_require__(97), createProperty = __webpack_require__(473), getIterFn = __webpack_require__(223);
- $export($export.S + $export.F * !__webpack_require__(474)(function(iter) {
+ var ctx = __webpack_require__(46), $export = __webpack_require__(19), toObject = __webpack_require__(58), call = __webpack_require__(221), isArrayIter = __webpack_require__(222), toLength = __webpack_require__(98), createProperty = __webpack_require__(476), getIterFn = __webpack_require__(223);
+ $export($export.S + $export.F * !__webpack_require__(477)(function(iter) {
Array.from(iter);
}), "Array", {
from: function(arrayLike) {
@@ -29510,7 +29559,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
});
- var _extends2 = __webpack_require__(7), _extends3 = _interopRequireDefault(_extends2), _defineProperty2 = __webpack_require__(13), _defineProperty3 = _interopRequireDefault(_defineProperty2), _objectWithoutProperties2 = __webpack_require__(6), _objectWithoutProperties3 = _interopRequireDefault(_objectWithoutProperties2), _getPrototypeOf = __webpack_require__(26), _getPrototypeOf2 = _interopRequireDefault(_getPrototypeOf), _classCallCheck2 = __webpack_require__(27), _classCallCheck3 = _interopRequireDefault(_classCallCheck2), _createClass2 = __webpack_require__(28), _createClass3 = _interopRequireDefault(_createClass2), _possibleConstructorReturn2 = __webpack_require__(29), _possibleConstructorReturn3 = _interopRequireDefault(_possibleConstructorReturn2), _inherits2 = __webpack_require__(30), _inherits3 = _interopRequireDefault(_inherits2), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _propTypes = __webpack_require__(1), _propTypes2 = _interopRequireDefault(_propTypes), _classnames = __webpack_require__(3), _classnames2 = _interopRequireDefault(_classnames), _Transition = __webpack_require__(108), _Transition2 = _interopRequireDefault(_Transition), Ripple = function(_React$Component) {
+ var _extends2 = __webpack_require__(6), _extends3 = _interopRequireDefault(_extends2), _defineProperty2 = __webpack_require__(13), _defineProperty3 = _interopRequireDefault(_defineProperty2), _objectWithoutProperties2 = __webpack_require__(7), _objectWithoutProperties3 = _interopRequireDefault(_objectWithoutProperties2), _getPrototypeOf = __webpack_require__(26), _getPrototypeOf2 = _interopRequireDefault(_getPrototypeOf), _classCallCheck2 = __webpack_require__(27), _classCallCheck3 = _interopRequireDefault(_classCallCheck2), _createClass2 = __webpack_require__(28), _createClass3 = _interopRequireDefault(_createClass2), _possibleConstructorReturn2 = __webpack_require__(29), _possibleConstructorReturn3 = _interopRequireDefault(_possibleConstructorReturn2), _inherits2 = __webpack_require__(30), _inherits3 = _interopRequireDefault(_inherits2), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _propTypes = __webpack_require__(1), _propTypes2 = _interopRequireDefault(_propTypes), _classnames = __webpack_require__(3), _classnames2 = _interopRequireDefault(_classnames), _Transition = __webpack_require__(155), _Transition2 = _interopRequireDefault(_Transition), Ripple = function(_React$Component) {
function Ripple() {
var _ref, _temp, _this, _ret;
(0, _classCallCheck3.default)(this, Ripple);
@@ -29568,6 +29617,38 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}).call(exports, __webpack_require__(2));
}, function(module, exports, __webpack_require__) {
"use strict";
+ function transitionTimeout(transitionType) {
+ var timeoutPropName = "transition" + transitionType + "Timeout", enabledPropName = "transition" + transitionType;
+ return function(props) {
+ if (props[enabledPropName]) {
+ if (null == props[timeoutPropName]) return new Error(timeoutPropName + " wasn't supplied to CSSTransitionGroup: this can cause unreliable animations and won't be supported in a future version of React. See https://fb.me/react-animation-transition-group-timeout for more information.");
+ if ("number" != typeof props[timeoutPropName]) return new Error(timeoutPropName + " must be a number (in milliseconds)");
+ }
+ return null;
+ };
+ }
+ exports.__esModule = !0, exports.classNamesShape = exports.timeoutsShape = void 0,
+ exports.transitionTimeout = transitionTimeout;
+ var _propTypes = __webpack_require__(1), _propTypes2 = function(obj) {
+ return obj && obj.__esModule ? obj : {
+ default: obj
+ };
+ }(_propTypes);
+ exports.timeoutsShape = _propTypes2.default.oneOfType([ _propTypes2.default.number, _propTypes2.default.shape({
+ enter: _propTypes2.default.number,
+ exit: _propTypes2.default.number
+ }).isRequired ]), exports.classNamesShape = _propTypes2.default.oneOfType([ _propTypes2.default.string, _propTypes2.default.shape({
+ enter: _propTypes2.default.string,
+ exit: _propTypes2.default.string,
+ active: _propTypes2.default.string
+ }), _propTypes2.default.shape({
+ enter: _propTypes2.default.string,
+ enterActive: _propTypes2.default.string,
+ exit: _propTypes2.default.string,
+ exitActive: _propTypes2.default.string
+ }) ]);
+}, function(module, exports, __webpack_require__) {
+ "use strict";
function createRippleHandler(instance, eventName, action, cb) {
return function(event) {
return cb && cb.call(instance, event), !event.defaultPrevented && (instance.ripple && instance.ripple[action](event),
@@ -29586,65 +29667,12 @@ var _bundleJs = []byte((((((((((`!function(modules) {
default: obj
};
}
- function Icon(props) {
- var children = props.children, classes = props.classes, classNameProp = props.className, color = props.color, other = (0,
- _objectWithoutProperties3.default)(props, [ "children", "classes", "className", "color" ]), className = (0,
- _classnames2.default)("material-icons", classes.root, (0, _defineProperty3.default)({}, classes["color" + (0,
- _helpers.capitalizeFirstLetter)(color)], "inherit" !== color), classNameProp);
- return _react2.default.createElement("span", (0, _extends3.default)({
- className: className,
- "aria-hidden": "true"
- }, other), children);
- }
- Object.defineProperty(exports, "__esModule", {
- value: !0
- }), exports.styles = void 0;
- var _extends2 = __webpack_require__(7), _extends3 = _interopRequireDefault(_extends2), _defineProperty2 = __webpack_require__(13), _defineProperty3 = _interopRequireDefault(_defineProperty2), _objectWithoutProperties2 = __webpack_require__(6), _objectWithoutProperties3 = _interopRequireDefault(_objectWithoutProperties2), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _propTypes = __webpack_require__(1), _propTypes2 = _interopRequireDefault(_propTypes), _classnames = __webpack_require__(3), _classnames2 = _interopRequireDefault(_classnames), _withStyles = __webpack_require__(10), _withStyles2 = _interopRequireDefault(_withStyles), _helpers = __webpack_require__(52), styles = exports.styles = function(theme) {
- return {
- root: {
- userSelect: "none"
- },
- colorPrimary: {
- color: theme.palette.primary.main
- },
- colorSecondary: {
- color: theme.palette.secondary.main
- },
- colorAction: {
- color: theme.palette.action.active
- },
- colorDisabled: {
- color: theme.palette.action.disabled
- },
- colorError: {
- color: theme.palette.error.main
- }
- };
- };
- Icon.propTypes = "production" !== process.env.NODE_ENV ? {
- children: _propTypes2.default.node,
- classes: _propTypes2.default.object.isRequired,
- className: _propTypes2.default.string,
- color: _propTypes2.default.oneOf([ "inherit", "secondary", "action", "disabled", "error", "primary" ])
- } : {}, Icon.defaultProps = {
- color: "inherit"
- }, Icon.muiName = "Icon", exports.default = (0, _withStyles2.default)(styles, {
- name: "MuiIcon"
- })(Icon);
- }).call(exports, __webpack_require__(2));
-}, function(module, exports, __webpack_require__) {
- "use strict";
- (function(process) {
- function _interopRequireDefault(obj) {
- return obj && obj.__esModule ? obj : {
- default: obj
- };
- }
function SvgIcon(props) {
- var children = props.children, classes = props.classes, classNameProp = props.className, color = props.color, nativeColor = props.nativeColor, titleAccess = props.titleAccess, viewBox = props.viewBox, other = (0,
- _objectWithoutProperties3.default)(props, [ "children", "classes", "className", "color", "nativeColor", "titleAccess", "viewBox" ]), className = (0,
- _classnames2.default)(classes.root, (0, _defineProperty3.default)({}, classes["color" + (0,
- _helpers.capitalizeFirstLetter)(color)], "inherit" !== color), classNameProp);
+ var _classNames, children = props.children, classes = props.classes, classNameProp = props.className, color = props.color, fontSize = props.fontSize, nativeColor = props.nativeColor, titleAccess = props.titleAccess, viewBox = props.viewBox, other = (0,
+ _objectWithoutProperties3.default)(props, [ "children", "classes", "className", "color", "fontSize", "nativeColor", "titleAccess", "viewBox" ]), className = (0,
+ _classnames2.default)(classes.root, (_classNames = {}, (0, _defineProperty3.default)(_classNames, classes["color" + (0,
+ _helpers.capitalize)(color)], "inherit" !== color), (0, _defineProperty3.default)(_classNames, classes.fontSize, fontSize),
+ _classNames), classNameProp);
return _react2.default.createElement("svg", (0, _extends3.default)({
className: className,
focusable: "false",
@@ -29656,7 +29684,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
}), exports.styles = void 0;
- var _extends2 = __webpack_require__(7), _extends3 = _interopRequireDefault(_extends2), _defineProperty2 = __webpack_require__(13), _defineProperty3 = _interopRequireDefault(_defineProperty2), _objectWithoutProperties2 = __webpack_require__(6), _objectWithoutProperties3 = _interopRequireDefault(_objectWithoutProperties2), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _propTypes = __webpack_require__(1), _propTypes2 = _interopRequireDefault(_propTypes), _classnames = __webpack_require__(3), _classnames2 = _interopRequireDefault(_classnames), _withStyles = __webpack_require__(10), _withStyles2 = _interopRequireDefault(_withStyles), _helpers = __webpack_require__(52), styles = exports.styles = function(theme) {
+ var _extends2 = __webpack_require__(6), _extends3 = _interopRequireDefault(_extends2), _defineProperty2 = __webpack_require__(13), _defineProperty3 = _interopRequireDefault(_defineProperty2), _objectWithoutProperties2 = __webpack_require__(7), _objectWithoutProperties3 = _interopRequireDefault(_objectWithoutProperties2), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _propTypes = __webpack_require__(1), _propTypes2 = _interopRequireDefault(_propTypes), _classnames = __webpack_require__(3), _classnames2 = _interopRequireDefault(_classnames), _withStyles = __webpack_require__(10), _withStyles2 = _interopRequireDefault(_withStyles), _helpers = __webpack_require__(51), styles = exports.styles = function(theme) {
return {
root: {
display: "inline-block",
@@ -29683,6 +29711,10 @@ var _bundleJs = []byte((((((((((`!function(modules) {
},
colorError: {
color: theme.palette.error.main
+ },
+ fontSize: {
+ width: "1em",
+ height: "1em"
}
};
};
@@ -29691,11 +29723,13 @@ var _bundleJs = []byte((((((((((`!function(modules) {
classes: _propTypes2.default.object.isRequired,
className: _propTypes2.default.string,
color: _propTypes2.default.oneOf([ "action", "disabled", "error", "inherit", "primary", "secondary" ]),
+ fontSize: _propTypes2.default.bool,
nativeColor: _propTypes2.default.string,
titleAccess: _propTypes2.default.string,
viewBox: _propTypes2.default.string
} : {}, SvgIcon.defaultProps = {
color: "inherit",
+ fontSize: !1,
viewBox: "0 0 24 24"
}, SvgIcon.muiName = "SvgIcon", exports.default = (0, _withStyles2.default)(styles, {
name: "MuiSvgIcon"
@@ -29709,63 +29743,24 @@ var _bundleJs = []byte((((((((((`!function(modules) {
default: obj
};
}
- function Typography(props) {
- var _classNames, align = props.align, classes = props.classes, classNameProp = props.className, componentProp = props.component, color = props.color, gutterBottom = props.gutterBottom, headlineMapping = props.headlineMapping, noWrap = props.noWrap, paragraph = props.paragraph, type = props.type, other = (0,
- _objectWithoutProperties3.default)(props, [ "align", "classes", "className", "component", "color", "gutterBottom", "headlineMapping", "noWrap", "paragraph", "type" ]), className = (0,
- _classnames2.default)(classes.root, classes[type], (_classNames = {}, (0, _defineProperty3.default)(_classNames, classes["color" + (0,
- _helpers.capitalizeFirstLetter)(color)], "default" !== color), (0, _defineProperty3.default)(_classNames, classes.noWrap, noWrap),
- (0, _defineProperty3.default)(_classNames, classes.gutterBottom, gutterBottom),
- (0, _defineProperty3.default)(_classNames, classes.paragraph, paragraph), (0, _defineProperty3.default)(_classNames, classes["align" + (0,
- _helpers.capitalizeFirstLetter)(align)], "inherit" !== align), _classNames), classNameProp), Component = componentProp || (paragraph ? "p" : headlineMapping[type]) || "span";
- return _react2.default.createElement(Component, (0, _extends3.default)({
- className: className
- }, other));
+ function Icon(props) {
+ var _classNames, children = props.children, classes = props.classes, classNameProp = props.className, color = props.color, fontSize = props.fontSize, other = (0,
+ _objectWithoutProperties3.default)(props, [ "children", "classes", "className", "color", "fontSize" ]), className = (0,
+ _classnames2.default)("material-icons", classes.root, (_classNames = {}, (0, _defineProperty3.default)(_classNames, classes["color" + (0,
+ _helpers.capitalize)(color)], "inherit" !== color), (0, _defineProperty3.default)(_classNames, classes.fontSize, fontSize),
+ _classNames), classNameProp);
+ return _react2.default.createElement("span", (0, _extends3.default)({
+ className: className,
+ "aria-hidden": "true"
+ }, other), children);
}
Object.defineProperty(exports, "__esModule", {
value: !0
}), exports.styles = void 0;
- var _extends2 = __webpack_require__(7), _extends3 = _interopRequireDefault(_extends2), _defineProperty2 = __webpack_require__(13), _defineProperty3 = _interopRequireDefault(_defineProperty2), _objectWithoutProperties2 = __webpack_require__(6), _objectWithoutProperties3 = _interopRequireDefault(_objectWithoutProperties2), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _propTypes = __webpack_require__(1), _propTypes2 = _interopRequireDefault(_propTypes), _classnames = __webpack_require__(3), _classnames2 = _interopRequireDefault(_classnames), _withStyles = __webpack_require__(10), _withStyles2 = _interopRequireDefault(_withStyles), _helpers = __webpack_require__(52), styles = exports.styles = function(theme) {
+ var _extends2 = __webpack_require__(6), _extends3 = _interopRequireDefault(_extends2), _defineProperty2 = __webpack_require__(13), _defineProperty3 = _interopRequireDefault(_defineProperty2), _objectWithoutProperties2 = __webpack_require__(7), _objectWithoutProperties3 = _interopRequireDefault(_objectWithoutProperties2), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _propTypes = __webpack_require__(1), _propTypes2 = _interopRequireDefault(_propTypes), _classnames = __webpack_require__(3), _classnames2 = _interopRequireDefault(_classnames), _withStyles = __webpack_require__(10), _withStyles2 = _interopRequireDefault(_withStyles), _helpers = __webpack_require__(51), styles = exports.styles = function(theme) {
return {
root: {
- display: "block",
- margin: 0
- },
- display4: theme.typography.display4,
- display3: theme.typography.display3,
- display2: theme.typography.display2,
- display1: theme.typography.display1,
- headline: theme.typography.headline,
- title: theme.typography.title,
- subheading: theme.typography.subheading,
- body2: theme.typography.body2,
- body1: theme.typography.body1,
- caption: theme.typography.caption,
- button: theme.typography.button,
- alignLeft: {
- textAlign: "left"
- },
- alignCenter: {
- textAlign: "center"
- },
- alignRight: {
- textAlign: "right"
- },
- alignJustify: {
- textAlign: "justify"
- },
- noWrap: {
- overflow: "hidden",
- textOverflow: "ellipsis",
- whiteSpace: "nowrap"
- },
- gutterBottom: {
- marginBottom: "0.35em"
- },
- paragraph: {
- marginBottom: 2 * theme.spacing.unit
- },
- colorInherit: {
- color: "inherit"
+ userSelect: "none"
},
colorPrimary: {
color: theme.palette.primary.main
@@ -29773,47 +29768,33 @@ var _bundleJs = []byte((((((((((`!function(modules) {
colorSecondary: {
color: theme.palette.secondary.main
},
- colorTextSecondary: {
- color: theme.palette.text.secondary
+ colorAction: {
+ color: theme.palette.action.active
+ },
+ colorDisabled: {
+ color: theme.palette.action.disabled
},
colorError: {
color: theme.palette.error.main
+ },
+ fontSize: {
+ width: "1em",
+ height: "1em"
}
};
};
- Typography.propTypes = "production" !== process.env.NODE_ENV ? {
- align: _propTypes2.default.oneOf([ "inherit", "left", "center", "right", "justify" ]),
+ Icon.propTypes = "production" !== process.env.NODE_ENV ? {
children: _propTypes2.default.node,
classes: _propTypes2.default.object.isRequired,
className: _propTypes2.default.string,
- color: _propTypes2.default.oneOf([ "inherit", "primary", "textSecondary", "secondary", "error", "default" ]),
- component: _propTypes2.default.oneOfType([ _propTypes2.default.string, _propTypes2.default.func ]),
- gutterBottom: _propTypes2.default.bool,
- headlineMapping: _propTypes2.default.object,
- noWrap: _propTypes2.default.bool,
- paragraph: _propTypes2.default.bool,
- type: _propTypes2.default.oneOf([ "display4", "display3", "display2", "display1", "headline", "title", "subheading", "body2", "body1", "caption", "button" ])
- } : {}, Typography.defaultProps = {
- align: "inherit",
- color: "default",
- gutterBottom: !1,
- headlineMapping: {
- display4: "h1",
- display3: "h1",
- display2: "h1",
- display1: "h1",
- headline: "h1",
- title: "h2",
- subheading: "h3",
- body2: "aside",
- body1: "p"
- },
- noWrap: !1,
- paragraph: !1,
- type: "body1"
- }, exports.default = (0, _withStyles2.default)(styles, {
- name: "MuiTypography"
- })(Typography);
+ color: _propTypes2.default.oneOf([ "inherit", "secondary", "action", "disabled", "error", "primary" ]),
+ fontSize: _propTypes2.default.bool
+ } : {}, Icon.defaultProps = {
+ color: "inherit",
+ fontSize: !1
+ }, Icon.muiName = "Icon", exports.default = (0, _withStyles2.default)(styles, {
+ name: "MuiIcon"
+ })(Icon);
}).call(exports, __webpack_require__(2));
}, function(module, exports, __webpack_require__) {
"use strict";
@@ -29826,14 +29807,13 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
});
- var _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _pure = __webpack_require__(482), _pure2 = _interopRequireDefault(_pure), _SvgIcon = __webpack_require__(239), _SvgIcon2 = _interopRequireDefault(_SvgIcon), SvgIconCustom = global.__MUI_SvgIcon__ || _SvgIcon2.default, _ref = _react2.default.createElement("path", {
- d: "M15.41 7.41L14 6l-6 6 6 6 1.41-1.41L10.83 12z"
- }), ChevronLeft = function(props) {
+ var _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _pure = __webpack_require__(485), _pure2 = _interopRequireDefault(_pure), _SvgIcon = __webpack_require__(239), _SvgIcon2 = _interopRequireDefault(_SvgIcon), SvgIconCustom = global.__MUI_SvgIcon__ || _SvgIcon2.default, _ref = _react2.default.createElement("path", {
+ d: "M3 18h18v-2H3v2zm0-5h18v-2H3v2zm0-7v2h18V6H3z"
+ }), Menu = function(props) {
return _react2.default.createElement(SvgIconCustom, props, _ref);
};
- ChevronLeft = (0, _pure2.default)(ChevronLeft), ChevronLeft.muiName = "SvgIcon",
- exports.default = ChevronLeft;
- }).call(exports, __webpack_require__(51));
+ Menu = (0, _pure2.default)(Menu), Menu.muiName = "SvgIcon", exports.default = Menu;
+ }).call(exports, __webpack_require__(60));
}, function(module, exports, __webpack_require__) {
"use strict";
(function(process) {
@@ -29843,7 +29823,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
};
}
exports.__esModule = !0;
- var _shouldUpdate = __webpack_require__(483), _shouldUpdate2 = _interopRequireDefault(_shouldUpdate), _shallowEqual = __webpack_require__(485), _shallowEqual2 = _interopRequireDefault(_shallowEqual), _setDisplayName = __webpack_require__(240), _setDisplayName2 = _interopRequireDefault(_setDisplayName), _wrapDisplayName = __webpack_require__(75), _wrapDisplayName2 = _interopRequireDefault(_wrapDisplayName), pure = function(BaseComponent) {
+ var _shouldUpdate = __webpack_require__(486), _shouldUpdate2 = _interopRequireDefault(_shouldUpdate), _shallowEqual = __webpack_require__(488), _shallowEqual2 = _interopRequireDefault(_shallowEqual), _setDisplayName = __webpack_require__(241), _setDisplayName2 = _interopRequireDefault(_setDisplayName), _wrapDisplayName = __webpack_require__(75), _wrapDisplayName2 = _interopRequireDefault(_wrapDisplayName), pure = function(BaseComponent) {
var hoc = (0, _shouldUpdate2.default)(function(props, nextProps) {
return !(0, _shallowEqual2.default)(props, nextProps);
});
@@ -29879,7 +29859,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}), superClass && (Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass);
}
exports.__esModule = !0;
- var _react = __webpack_require__(0), _setDisplayName = __webpack_require__(240), _setDisplayName2 = _interopRequireDefault(_setDisplayName), _wrapDisplayName = __webpack_require__(75), _wrapDisplayName2 = _interopRequireDefault(_wrapDisplayName), shouldUpdate = function(test) {
+ var _react = __webpack_require__(0), _setDisplayName = __webpack_require__(241), _setDisplayName2 = _interopRequireDefault(_setDisplayName), _wrapDisplayName = __webpack_require__(75), _wrapDisplayName2 = _interopRequireDefault(_wrapDisplayName), shouldUpdate = function(test) {
return function(BaseComponent) {
var factory = (0, _react.createFactory)(BaseComponent), ShouldUpdate = function(_Component) {
function ShouldUpdate() {
@@ -29909,7 +29889,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}, function(module, exports, __webpack_require__) {
"use strict";
exports.__esModule = !0;
- var _shallowEqual = __webpack_require__(96), _shallowEqual2 = function(obj) {
+ var _shallowEqual = __webpack_require__(97), _shallowEqual2 = function(obj) {
return obj && obj.__esModule ? obj : {
default: obj
};
@@ -29917,6 +29897,120 @@ var _bundleJs = []byte((((((((((`!function(modules) {
exports.default = _shallowEqual2.default;
}, function(module, exports, __webpack_require__) {
"use strict";
+ (function(process) {
+ function _interopRequireDefault(obj) {
+ return obj && obj.__esModule ? obj : {
+ default: obj
+ };
+ }
+ function Typography(props) {
+ var _classNames, align = props.align, classes = props.classes, classNameProp = props.className, componentProp = props.component, color = props.color, gutterBottom = props.gutterBottom, headlineMapping = props.headlineMapping, noWrap = props.noWrap, paragraph = props.paragraph, variant = props.variant, other = (0,
+ _objectWithoutProperties3.default)(props, [ "align", "classes", "className", "component", "color", "gutterBottom", "headlineMapping", "noWrap", "paragraph", "variant" ]), className = (0,
+ _classnames2.default)(classes.root, classes[variant], (_classNames = {}, (0, _defineProperty3.default)(_classNames, classes["color" + (0,
+ _helpers.capitalize)(color)], "default" !== color), (0, _defineProperty3.default)(_classNames, classes.noWrap, noWrap),
+ (0, _defineProperty3.default)(_classNames, classes.gutterBottom, gutterBottom),
+ (0, _defineProperty3.default)(_classNames, classes.paragraph, paragraph), (0, _defineProperty3.default)(_classNames, classes["align" + (0,
+ _helpers.capitalize)(align)], "inherit" !== align), _classNames), classNameProp), Component = componentProp || (paragraph ? "p" : headlineMapping[variant]) || "span";
+ return _react2.default.createElement(Component, (0, _extends3.default)({
+ className: className
+ }, other));
+ }
+ Object.defineProperty(exports, "__esModule", {
+ value: !0
+ }), exports.styles = void 0;
+ var _extends2 = __webpack_require__(6), _extends3 = _interopRequireDefault(_extends2), _defineProperty2 = __webpack_require__(13), _defineProperty3 = _interopRequireDefault(_defineProperty2), _objectWithoutProperties2 = __webpack_require__(7), _objectWithoutProperties3 = _interopRequireDefault(_objectWithoutProperties2), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _propTypes = __webpack_require__(1), _propTypes2 = _interopRequireDefault(_propTypes), _classnames = __webpack_require__(3), _classnames2 = _interopRequireDefault(_classnames), _withStyles = __webpack_require__(10), _withStyles2 = _interopRequireDefault(_withStyles), _helpers = __webpack_require__(51), styles = exports.styles = function(theme) {
+ return {
+ root: {
+ display: "block",
+ margin: 0
+ },
+ display4: theme.typography.display4,
+ display3: theme.typography.display3,
+ display2: theme.typography.display2,
+ display1: theme.typography.display1,
+ headline: theme.typography.headline,
+ title: theme.typography.title,
+ subheading: theme.typography.subheading,
+ body2: theme.typography.body2,
+ body1: theme.typography.body1,
+ caption: theme.typography.caption,
+ button: theme.typography.button,
+ alignLeft: {
+ textAlign: "left"
+ },
+ alignCenter: {
+ textAlign: "center"
+ },
+ alignRight: {
+ textAlign: "right"
+ },
+ alignJustify: {
+ textAlign: "justify"
+ },
+ noWrap: {
+ overflow: "hidden",
+ textOverflow: "ellipsis",
+ whiteSpace: "nowrap"
+ },
+ gutterBottom: {
+ marginBottom: "0.35em"
+ },
+ paragraph: {
+ marginBottom: 2 * theme.spacing.unit
+ },
+ colorInherit: {
+ color: "inherit"
+ },
+ colorPrimary: {
+ color: theme.palette.primary.main
+ },
+ colorSecondary: {
+ color: theme.palette.secondary.main
+ },
+ colorTextSecondary: {
+ color: theme.palette.text.secondary
+ },
+ colorError: {
+ color: theme.palette.error.main
+ }
+ };
+ };
+ Typography.propTypes = "production" !== process.env.NODE_ENV ? {
+ align: _propTypes2.default.oneOf([ "inherit", "left", "center", "right", "justify" ]),
+ children: _propTypes2.default.node,
+ classes: _propTypes2.default.object.isRequired,
+ className: _propTypes2.default.string,
+ color: _propTypes2.default.oneOf([ "inherit", "primary", "textSecondary", "secondary", "error", "default" ]),
+ component: _propTypes2.default.oneOfType([ _propTypes2.default.string, _propTypes2.default.func ]),
+ gutterBottom: _propTypes2.default.bool,
+ headlineMapping: _propTypes2.default.object,
+ noWrap: _propTypes2.default.bool,
+ paragraph: _propTypes2.default.bool,
+ variant: _propTypes2.default.oneOf([ "display4", "display3", "display2", "display1", "headline", "title", "subheading", "body2", "body1", "caption", "button" ])
+ } : {}, Typography.defaultProps = {
+ align: "inherit",
+ color: "default",
+ gutterBottom: !1,
+ headlineMapping: {
+ display4: "h1",
+ display3: "h1",
+ display2: "h1",
+ display1: "h1",
+ headline: "h1",
+ title: "h2",
+ subheading: "h3",
+ body2: "aside",
+ body1: "p"
+ },
+ noWrap: !1,
+ paragraph: !1,
+ variant: "body1"
+ }, exports.default = (0, _withStyles2.default)(styles, {
+ name: "MuiTypography"
+ })(Typography);
+ }).call(exports, __webpack_require__(2));
+}, function(module, exports, __webpack_require__) {
+ "use strict";
function _interopRequireDefault(obj) {
return obj && obj.__esModule ? obj : {
default: obj
@@ -29955,7 +30049,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
return protoProps && defineProperties(Constructor.prototype, protoProps), staticProps && defineProperties(Constructor, staticProps),
Constructor;
};
- }(), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _SideBar = __webpack_require__(487), _SideBar2 = _interopRequireDefault(_SideBar), _Main = __webpack_require__(507), _Main2 = _interopRequireDefault(_Main), styles = {
+ }(), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _SideBar = __webpack_require__(491), _SideBar2 = _interopRequireDefault(_SideBar), _Main = __webpack_require__(511), _Main2 = _interopRequireDefault(_Main), styles = {
body: {
display: "flex",
width: "100%",
@@ -30028,7 +30122,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
return protoProps && defineProperties(Constructor.prototype, protoProps), staticProps && defineProperties(Constructor, staticProps),
Constructor;
};
- }(), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _withStyles = __webpack_require__(10), _withStyles2 = _interopRequireDefault(_withStyles), _List = __webpack_require__(488), _List2 = _interopRequireDefault(_List), _Icon = __webpack_require__(237), _Icon2 = _interopRequireDefault(_Icon), _Transition = __webpack_require__(108), _Transition2 = _interopRequireDefault(_Transition), _reactFa = __webpack_require__(496), _common = __webpack_require__(61), styles = {
+ }(), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _withStyles = __webpack_require__(10), _withStyles2 = _interopRequireDefault(_withStyles), _List = __webpack_require__(492), _List2 = _interopRequireDefault(_List), _Icon = __webpack_require__(240), _Icon2 = _interopRequireDefault(_Icon), _Transition = __webpack_require__(155), _Transition2 = _interopRequireDefault(_Transition), _reactFa = __webpack_require__(500), _common = __webpack_require__(77), styles = {
menu: {
default: {
transition: "margin-left " + _common.DURATION + "ms"
@@ -30042,10 +30136,10 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}, themeStyles = function(theme) {
return {
list: {
- background: theme.palette.background.appBar
+ background: theme.palette.grey[900]
},
listItem: {
- minWidth: 3 * theme.spacing.unit
+ minWidth: 7 * theme.spacing.unit
},
icon: {
fontSize: 3 * theme.spacing.unit
@@ -30115,49 +30209,49 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
});
- var _List = __webpack_require__(489);
+ var _List = __webpack_require__(493);
Object.defineProperty(exports, "default", {
enumerable: !0,
get: function() {
return _interopRequireDefault(_List).default;
}
});
- var _ListItem = __webpack_require__(490);
+ var _ListItem = __webpack_require__(494);
Object.defineProperty(exports, "ListItem", {
enumerable: !0,
get: function() {
return _interopRequireDefault(_ListItem).default;
}
});
- var _ListItemAvatar = __webpack_require__(491);
+ var _ListItemAvatar = __webpack_require__(495);
Object.defineProperty(exports, "ListItemAvatar", {
enumerable: !0,
get: function() {
return _interopRequireDefault(_ListItemAvatar).default;
}
});
- var _ListItemText = __webpack_require__(492);
+ var _ListItemText = __webpack_require__(496);
Object.defineProperty(exports, "ListItemText", {
enumerable: !0,
get: function() {
return _interopRequireDefault(_ListItemText).default;
}
});
- var _ListItemIcon = __webpack_require__(493);
+ var _ListItemIcon = __webpack_require__(497);
Object.defineProperty(exports, "ListItemIcon", {
enumerable: !0,
get: function() {
return _interopRequireDefault(_ListItemIcon).default;
}
});
- var _ListItemSecondaryAction = __webpack_require__(494);
+ var _ListItemSecondaryAction = __webpack_require__(498);
Object.defineProperty(exports, "ListItemSecondaryAction", {
enumerable: !0,
get: function() {
return _interopRequireDefault(_ListItemSecondaryAction).default;
}
});
- var _ListSubheader = __webpack_require__(495);
+ var _ListSubheader = __webpack_require__(499);
Object.defineProperty(exports, "ListSubheader", {
enumerable: !0,
get: function() {
@@ -30175,10 +30269,9 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
}), exports.styles = void 0;
- var _extends2 = __webpack_require__(7), _extends3 = _interopRequireDefault(_extends2), _defineProperty2 = __webpack_require__(13), _defineProperty3 = _interopRequireDefault(_defineProperty2), _objectWithoutProperties2 = __webpack_require__(6), _objectWithoutProperties3 = _interopRequireDefault(_objectWithoutProperties2), _getPrototypeOf = __webpack_require__(26), _getPrototypeOf2 = _interopRequireDefault(_getPrototypeOf), _classCallCheck2 = __webpack_require__(27), _classCallCheck3 = _interopRequireDefault(_classCallCheck2), _createClass2 = __webpack_require__(28), _createClass3 = _interopRequireDefault(_createClass2), _possibleConstructorReturn2 = __webpack_require__(29), _possibleConstructorReturn3 = _interopRequireDefault(_possibleConstructorReturn2), _inherits2 = __webpack_require__(30), _inherits3 = _interopRequireDefault(_inherits2), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _propTypes = __webpack_require__(1), _propTypes2 = _interopRequireDefault(_propTypes), _classnames = __webpack_require__(3), _classnames2 = _interopRequireDefault(_classnames), _withStyles = __webpack_require__(10), _withStyles2 = _interopRequireDefault(_withStyles), styles = exports.styles = function(theme) {
+ var _extends2 = __webpack_require__(6), _extends3 = _interopRequireDefault(_extends2), _defineProperty2 = __webpack_require__(13), _defineProperty3 = _interopRequireDefault(_defineProperty2), _objectWithoutProperties2 = __webpack_require__(7), _objectWithoutProperties3 = _interopRequireDefault(_objectWithoutProperties2), _getPrototypeOf = __webpack_require__(26), _getPrototypeOf2 = _interopRequireDefault(_getPrototypeOf), _classCallCheck2 = __webpack_require__(27), _classCallCheck3 = _interopRequireDefault(_classCallCheck2), _createClass2 = __webpack_require__(28), _createClass3 = _interopRequireDefault(_createClass2), _possibleConstructorReturn2 = __webpack_require__(29), _possibleConstructorReturn3 = _interopRequireDefault(_possibleConstructorReturn2), _inherits2 = __webpack_require__(30), _inherits3 = _interopRequireDefault(_inherits2), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _propTypes = __webpack_require__(1), _propTypes2 = _interopRequireDefault(_propTypes), _classnames = __webpack_require__(3), _classnames2 = _interopRequireDefault(_classnames), _withStyles = __webpack_require__(10), _withStyles2 = _interopRequireDefault(_withStyles), styles = exports.styles = function(theme) {
return {
root: {
- flex: "1 1 auto",
listStyle: "none",
margin: 0,
padding: 0,
@@ -30251,14 +30344,17 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
}), exports.styles = void 0;
- var _extends2 = __webpack_require__(7), _extends3 = _interopRequireDefault(_extends2), _defineProperty2 = __webpack_require__(13), _defineProperty3 = _interopRequireDefault(_defineProperty2), _objectWithoutProperties2 = __webpack_require__(6), _objectWithoutProperties3 = _interopRequireDefault(_objectWithoutProperties2), _getPrototypeOf = __webpack_require__(26), _getPrototypeOf2 = _interopRequireDefault(_getPrototypeOf), _classCallCheck2 = __webpack_require__(27), _classCallCheck3 = _interopRequireDefault(_classCallCheck2), _createClass2 = __webpack_require__(28), _createClass3 = _interopRequireDefault(_createClass2), _possibleConstructorReturn2 = __webpack_require__(29), _possibleConstructorReturn3 = _interopRequireDefault(_possibleConstructorReturn2), _inherits2 = __webpack_require__(30), _inherits3 = _interopRequireDefault(_inherits2), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _propTypes = __webpack_require__(1), _propTypes2 = _interopRequireDefault(_propTypes), _classnames = __webpack_require__(3), _classnames2 = _interopRequireDefault(_classnames), _withStyles = __webpack_require__(10), _withStyles2 = _interopRequireDefault(_withStyles), _ButtonBase = __webpack_require__(234), _ButtonBase2 = _interopRequireDefault(_ButtonBase), _reactHelpers = __webpack_require__(238), styles = exports.styles = function(theme) {
+ var _extends2 = __webpack_require__(6), _extends3 = _interopRequireDefault(_extends2), _defineProperty2 = __webpack_require__(13), _defineProperty3 = _interopRequireDefault(_defineProperty2), _objectWithoutProperties2 = __webpack_require__(7), _objectWithoutProperties3 = _interopRequireDefault(_objectWithoutProperties2), _getPrototypeOf = __webpack_require__(26), _getPrototypeOf2 = _interopRequireDefault(_getPrototypeOf), _classCallCheck2 = __webpack_require__(27), _classCallCheck3 = _interopRequireDefault(_classCallCheck2), _createClass2 = __webpack_require__(28), _createClass3 = _interopRequireDefault(_createClass2), _possibleConstructorReturn2 = __webpack_require__(29), _possibleConstructorReturn3 = _interopRequireDefault(_possibleConstructorReturn2), _inherits2 = __webpack_require__(30), _inherits3 = _interopRequireDefault(_inherits2), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _propTypes = __webpack_require__(1), _propTypes2 = _interopRequireDefault(_propTypes), _classnames = __webpack_require__(3), _classnames2 = _interopRequireDefault(_classnames), _withStyles = __webpack_require__(10), _withStyles2 = _interopRequireDefault(_withStyles), _ButtonBase = __webpack_require__(234), _ButtonBase2 = _interopRequireDefault(_ButtonBase), _reactHelpers = __webpack_require__(238), styles = exports.styles = function(theme) {
return {
root: {
display: "flex",
justifyContent: "flex-start",
alignItems: "center",
position: "relative",
- textDecoration: "none"
+ textDecoration: "none",
+ width: "100%",
+ boxSizing: "border-box",
+ textAlign: "left"
},
container: {
position: "relative"
@@ -30278,7 +30374,8 @@ var _bundleJs = []byte((((((((((`!function(modules) {
opacity: .5
},
divider: {
- borderBottom: "1px solid " + theme.palette.divider
+ borderBottom: "1px solid " + theme.palette.divider,
+ backgroundClip: "padding-box"
},
gutters: {
paddingLeft: 2 * theme.spacing.unit,
@@ -30293,9 +30390,6 @@ var _bundleJs = []byte((((((((((`!function(modules) {
backgroundColor: theme.palette.action.hover,
"@media (hover: none)": {
backgroundColor: "transparent"
- },
- "&$disabled": {
- backgroundColor: "transparent"
}
}
},
@@ -30318,22 +30412,23 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}, {
key: "render",
value: function() {
- var _classNames, _props = this.props, button = _props.button, childrenProp = _props.children, classes = _props.classes, classNameProp = _props.className, componentProp = _props.component, dense = _props.dense, disabled = _props.disabled, disableGutters = _props.disableGutters, divider = _props.divider, other = (0,
- _objectWithoutProperties3.default)(_props, [ "button", "children", "classes", "className", "component", "dense", "disabled", "disableGutters", "divider" ]), isDense = dense || this.context.dense || !1, children = _react2.default.Children.toArray(childrenProp), hasAvatar = children.some(function(value) {
+ var _classNames, _props = this.props, button = _props.button, childrenProp = _props.children, classes = _props.classes, classNameProp = _props.className, componentProp = _props.component, ContainerComponent = _props.ContainerComponent, ContainerProps = _props.ContainerProps, dense = _props.dense, disabled = _props.disabled, disableGutters = _props.disableGutters, divider = _props.divider, other = (0,
+ _objectWithoutProperties3.default)(_props, [ "button", "children", "classes", "className", "component", "ContainerComponent", "ContainerProps", "dense", "disabled", "disableGutters", "divider" ]), isDense = dense || this.context.dense || !1, children = _react2.default.Children.toArray(childrenProp), hasAvatar = children.some(function(value) {
return (0, _reactHelpers.isMuiElement)(value, [ "ListItemAvatar" ]);
}), hasSecondaryAction = children.length && (0, _reactHelpers.isMuiElement)(children[children.length - 1], [ "ListItemSecondaryAction" ]), className = (0,
- _classnames2.default)(classes.root, (_classNames = {}, (0, _defineProperty3.default)(_classNames, classes.gutters, !disableGutters),
- (0, _defineProperty3.default)(_classNames, classes.divider, divider), (0, _defineProperty3.default)(_classNames, classes.disabled, disabled),
+ _classnames2.default)(classes.root, isDense || hasAvatar ? classes.dense : classes.default, (_classNames = {},
+ (0, _defineProperty3.default)(_classNames, classes.gutters, !disableGutters), (0,
+ _defineProperty3.default)(_classNames, classes.divider, divider), (0, _defineProperty3.default)(_classNames, classes.disabled, disabled),
(0, _defineProperty3.default)(_classNames, classes.button, button), (0, _defineProperty3.default)(_classNames, classes.secondaryAction, hasSecondaryAction),
- (0, _defineProperty3.default)(_classNames, isDense || hasAvatar ? classes.dense : classes.default, !0),
- _classNames), classNameProp), listItemProps = (0, _extends3.default)({
+ _classNames), classNameProp), componentProps = (0, _extends3.default)({
className: className,
disabled: disabled
- }, other), ComponentMain = componentProp;
- return button && (ComponentMain = _ButtonBase2.default, listItemProps.component = componentProp,
- listItemProps.keyboardFocusedClassName = classes.keyboardFocused), hasSecondaryAction ? _react2.default.createElement("div", {
+ }, other), Component = componentProp || "li";
+ return button && (componentProps.component = componentProp || "div", componentProps.keyboardFocusedClassName = classes.keyboardFocused,
+ Component = _ButtonBase2.default), hasSecondaryAction ? (Component = Component === _ButtonBase2.default || componentProp ? Component : "div",
+ _react2.default.createElement(ContainerComponent, (0, _extends3.default)({
className: classes.container
- }, _react2.default.createElement(ComponentMain, listItemProps, children), children.pop()) : _react2.default.createElement(ComponentMain, listItemProps, children);
+ }, ContainerProps), _react2.default.createElement(Component, componentProps, children), children.pop())) : _react2.default.createElement(Component, componentProps, children);
}
} ]), ListItem;
}(_react2.default.Component);
@@ -30343,13 +30438,15 @@ var _bundleJs = []byte((((((((((`!function(modules) {
classes: _propTypes2.default.object.isRequired,
className: _propTypes2.default.string,
component: _propTypes2.default.oneOfType([ _propTypes2.default.string, _propTypes2.default.func ]),
+ ContainerComponent: _propTypes2.default.oneOfType([ _propTypes2.default.string, _propTypes2.default.func ]),
+ ContainerProps: _propTypes2.default.object,
dense: _propTypes2.default.bool,
disabled: _propTypes2.default.bool,
disableGutters: _propTypes2.default.bool,
divider: _propTypes2.default.bool
} : {}, ListItem.defaultProps = {
button: !1,
- component: "li",
+ ContainerComponent: "li",
dense: !1,
disabled: !1,
disableGutters: !1,
@@ -30383,7 +30480,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
}), exports.styles = void 0;
- var _defineProperty2 = __webpack_require__(13), _defineProperty3 = _interopRequireDefault(_defineProperty2), _extends2 = __webpack_require__(7), _extends3 = _interopRequireDefault(_extends2), _objectWithoutProperties2 = __webpack_require__(6), _objectWithoutProperties3 = _interopRequireDefault(_objectWithoutProperties2), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _propTypes = __webpack_require__(1), _propTypes2 = _interopRequireDefault(_propTypes), _classnames = __webpack_require__(3), _classnames2 = _interopRequireDefault(_classnames), _warning = __webpack_require__(11), _warning2 = _interopRequireDefault(_warning), _withStyles = __webpack_require__(10), _withStyles2 = _interopRequireDefault(_withStyles), styles = exports.styles = function(theme) {
+ var _defineProperty2 = __webpack_require__(13), _defineProperty3 = _interopRequireDefault(_defineProperty2), _extends2 = __webpack_require__(6), _extends3 = _interopRequireDefault(_extends2), _objectWithoutProperties2 = __webpack_require__(7), _objectWithoutProperties3 = _interopRequireDefault(_objectWithoutProperties2), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _propTypes = __webpack_require__(1), _propTypes2 = _interopRequireDefault(_propTypes), _classnames = __webpack_require__(3), _classnames2 = _interopRequireDefault(_classnames), _warning = __webpack_require__(12), _warning2 = _interopRequireDefault(_warning), _withStyles = __webpack_require__(10), _withStyles2 = _interopRequireDefault(_withStyles), styles = exports.styles = function(theme) {
return {
root: {
width: 36,
@@ -30424,10 +30521,10 @@ var _bundleJs = []byte((((((((((`!function(modules) {
return _react2.default.createElement("div", (0, _extends3.default)({
className: className
}, other), primary && (disableTypography ? primary : _react2.default.createElement(_Typography2.default, {
- type: "subheading",
+ variant: "subheading",
className: (0, _classnames2.default)(classes.primary, (0, _defineProperty3.default)({}, classes.textDense, dense))
}, primary)), secondary && (disableTypography ? secondary : _react2.default.createElement(_Typography2.default, {
- type: "body1",
+ variant: "body1",
className: (0, _classnames2.default)(classes.secondary, (0, _defineProperty3.default)({}, classes.textDense, dense)),
color: "textSecondary"
}, secondary)));
@@ -30435,7 +30532,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
}), exports.styles = void 0;
- var _extends2 = __webpack_require__(7), _extends3 = _interopRequireDefault(_extends2), _defineProperty2 = __webpack_require__(13), _defineProperty3 = _interopRequireDefault(_defineProperty2), _objectWithoutProperties2 = __webpack_require__(6), _objectWithoutProperties3 = _interopRequireDefault(_objectWithoutProperties2), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _propTypes = __webpack_require__(1), _propTypes2 = _interopRequireDefault(_propTypes), _classnames = __webpack_require__(3), _classnames2 = _interopRequireDefault(_classnames), _withStyles = __webpack_require__(10), _withStyles2 = _interopRequireDefault(_withStyles), _Typography = __webpack_require__(109), _Typography2 = _interopRequireDefault(_Typography), styles = exports.styles = function(theme) {
+ var _extends2 = __webpack_require__(6), _extends3 = _interopRequireDefault(_extends2), _defineProperty2 = __webpack_require__(13), _defineProperty3 = _interopRequireDefault(_defineProperty2), _objectWithoutProperties2 = __webpack_require__(7), _objectWithoutProperties3 = _interopRequireDefault(_objectWithoutProperties2), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _propTypes = __webpack_require__(1), _propTypes2 = _interopRequireDefault(_propTypes), _classnames = __webpack_require__(3), _classnames2 = _interopRequireDefault(_classnames), _withStyles = __webpack_require__(10), _withStyles2 = _interopRequireDefault(_withStyles), _Typography = __webpack_require__(109), _Typography2 = _interopRequireDefault(_Typography), styles = exports.styles = function(theme) {
return {
root: {
flex: "1 1 auto",
@@ -30502,7 +30599,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
}), exports.styles = void 0;
- var _extends2 = __webpack_require__(7), _extends3 = _interopRequireDefault(_extends2), _objectWithoutProperties2 = __webpack_require__(6), _objectWithoutProperties3 = _interopRequireDefault(_objectWithoutProperties2), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _propTypes = __webpack_require__(1), _propTypes2 = _interopRequireDefault(_propTypes), _classnames = __webpack_require__(3), _classnames2 = _interopRequireDefault(_classnames), _withStyles = __webpack_require__(10), _withStyles2 = _interopRequireDefault(_withStyles), styles = exports.styles = function(theme) {
+ var _extends2 = __webpack_require__(6), _extends3 = _interopRequireDefault(_extends2), _objectWithoutProperties2 = __webpack_require__(7), _objectWithoutProperties3 = _interopRequireDefault(_objectWithoutProperties2), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _propTypes = __webpack_require__(1), _propTypes2 = _interopRequireDefault(_propTypes), _classnames = __webpack_require__(3), _classnames2 = _interopRequireDefault(_classnames), _withStyles = __webpack_require__(10), _withStyles2 = _interopRequireDefault(_withStyles), styles = exports.styles = function(theme) {
return {
root: {
height: 24,
@@ -30539,7 +30636,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
}), exports.styles = void 0;
- var _extends2 = __webpack_require__(7), _extends3 = _interopRequireDefault(_extends2), _objectWithoutProperties2 = __webpack_require__(6), _objectWithoutProperties3 = _interopRequireDefault(_objectWithoutProperties2), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _propTypes = __webpack_require__(1), _propTypes2 = _interopRequireDefault(_propTypes), _classnames = __webpack_require__(3), _classnames2 = _interopRequireDefault(_classnames), _withStyles = __webpack_require__(10), _withStyles2 = _interopRequireDefault(_withStyles), styles = exports.styles = function(theme) {
+ var _extends2 = __webpack_require__(6), _extends3 = _interopRequireDefault(_extends2), _objectWithoutProperties2 = __webpack_require__(7), _objectWithoutProperties3 = _interopRequireDefault(_objectWithoutProperties2), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _propTypes = __webpack_require__(1), _propTypes2 = _interopRequireDefault(_propTypes), _classnames = __webpack_require__(3), _classnames2 = _interopRequireDefault(_classnames), _withStyles = __webpack_require__(10), _withStyles2 = _interopRequireDefault(_withStyles), styles = exports.styles = function(theme) {
return {
root: {
position: "absolute",
@@ -30570,7 +30667,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
var _classNames, classes = props.classes, classNameProp = props.className, color = props.color, Component = props.component, disableSticky = props.disableSticky, inset = props.inset, other = (0,
_objectWithoutProperties3.default)(props, [ "classes", "className", "color", "component", "disableSticky", "inset" ]), className = (0,
_classnames2.default)(classes.root, (_classNames = {}, (0, _defineProperty3.default)(_classNames, classes["color" + (0,
- _helpers.capitalizeFirstLetter)(color)], "default" !== color), (0, _defineProperty3.default)(_classNames, classes.inset, inset),
+ _helpers.capitalize)(color)], "default" !== color), (0, _defineProperty3.default)(_classNames, classes.inset, inset),
(0, _defineProperty3.default)(_classNames, classes.sticky, !disableSticky), _classNames), classNameProp);
return _react2.default.createElement(Component, (0, _extends3.default)({
className: className
@@ -30579,7 +30676,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
}), exports.styles = void 0;
- var _extends2 = __webpack_require__(7), _extends3 = _interopRequireDefault(_extends2), _defineProperty2 = __webpack_require__(13), _defineProperty3 = _interopRequireDefault(_defineProperty2), _objectWithoutProperties2 = __webpack_require__(6), _objectWithoutProperties3 = _interopRequireDefault(_objectWithoutProperties2), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _propTypes = __webpack_require__(1), _propTypes2 = _interopRequireDefault(_propTypes), _classnames = __webpack_require__(3), _classnames2 = _interopRequireDefault(_classnames), _withStyles = __webpack_require__(10), _withStyles2 = _interopRequireDefault(_withStyles), _helpers = __webpack_require__(52), styles = exports.styles = function(theme) {
+ var _extends2 = __webpack_require__(6), _extends3 = _interopRequireDefault(_extends2), _defineProperty2 = __webpack_require__(13), _defineProperty3 = _interopRequireDefault(_defineProperty2), _objectWithoutProperties2 = __webpack_require__(7), _objectWithoutProperties3 = _interopRequireDefault(_objectWithoutProperties2), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _propTypes = __webpack_require__(1), _propTypes2 = _interopRequireDefault(_propTypes), _classnames = __webpack_require__(3), _classnames2 = _interopRequireDefault(_classnames), _withStyles = __webpack_require__(10), _withStyles2 = _interopRequireDefault(_withStyles), _helpers = __webpack_require__(51), styles = exports.styles = function(theme) {
return {
root: {
boxSizing: "border-box",
@@ -30635,21 +30732,21 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}
Object.defineProperty(exports, "__esModule", {
value: !0
- }), exports.IconStack = exports.Icon = exports.default = void 0, __webpack_require__(497);
- var _Icon = __webpack_require__(505), _Icon2 = _interopRequireDefault(_Icon), _IconStack = __webpack_require__(506), _IconStack2 = _interopRequireDefault(_IconStack);
+ }), exports.IconStack = exports.Icon = exports.default = void 0, __webpack_require__(501);
+ var _Icon = __webpack_require__(509), _Icon2 = _interopRequireDefault(_Icon), _IconStack = __webpack_require__(510), _IconStack2 = _interopRequireDefault(_IconStack);
exports.default = _Icon2.default, exports.Icon = _Icon2.default, exports.IconStack = _IconStack2.default;
}, function(module, exports, __webpack_require__) {
- var content = __webpack_require__(498);
+ var content = __webpack_require__(502);
"string" == typeof content && (content = [ [ module.i, content, "" ] ]);
var options = {
hmr: !0
};
options.transform = void 0;
- __webpack_require__(503)(content, options);
+ __webpack_require__(507)(content, options);
content.locals && (module.exports = content.locals);
}, function(module, exports, __webpack_require__) {
- var escape = __webpack_require__(499);
- exports = module.exports = __webpack_require__(500)(!1), exports.push([ module.i, "/*!\n * Font Awesome 4.7.0 by @davegandy - http://fontawesome.io - @fontawesome\n * License - http://fontawesome.io/license (Font: SIL OFL 1.1, CSS: MIT License)\n */\n/* FONT PATH\n * -------------------------- */\n@font-face {\n font-family: 'FontAwesome';\n \n src: url(" + escape(__webpack_require__(501)) + ") format('woff2'), url(" + escape(__webpack_require__(502)) + ') format(\'woff\');\n font-weight: normal;\n font-style: normal;\n}\n.fa {\n display: inline-block;\n font: normal normal normal 14px/1 FontAwesome;\n font-size: inherit;\n text-rendering: auto;\n -webkit-font-smoothing: antialiased;\n -moz-osx-font-smoothing: grayscale;\n}\n/* makes the font 33% larger relative to the icon container */\n.fa-lg {\n font-size: 1.33333333em;\n line-height: 0.75em;\n vertical-align: -15%;\n}\n.fa-2x {\n font-size: 2em;\n}\n.fa-3x {\n font-size: 3em;\n}\n.fa-4x {\n font-size: 4em;\n}\n.fa-5x {\n font-size: 5em;\n}\n.fa-fw {\n width: 1.28571429em;\n text-align: center;\n}\n.fa-ul {\n padding-left: 0;\n margin-left: 2.14285714em;\n list-style-type: none;\n}\n.fa-ul > li {\n position: relative;\n}\n.fa-li {\n position: absolute;\n left: -2.14285714em;\n width: 2.14285714em;\n top: 0.14285714em;\n text-align: center;\n}\n.fa-li.fa-lg {\n left: -1.85714286em;\n}\n.fa-border {\n padding: .2em .25em .15em;\n border: solid 0.08em #eeeeee;\n border-radius: .1em;\n}\n.fa-pull-left {\n float: left;\n}\n.fa-pull-right {\n float: right;\n}\n.fa.fa-pull-left {\n margin-right: .3em;\n}\n.fa.fa-pull-right {\n margin-left: .3em;\n}\n/* Deprecated as of 4.4.0 */\n.pull-right {\n float: right;\n}\n.pull-left {\n float: left;\n}\n.fa.pull-left {\n margin-right: .3em;\n}\n.fa.pull-right {\n margin-left: .3em;\n}\n.fa-spin {\n -webkit-animation: fa-spin 2s infinite linear;\n animation: fa-spin 2s infinite linear;\n}\n.fa-pulse {\n -webkit-animation: fa-spin 1s infinite steps(8);\n animation: fa-spin 1s infinite steps(8);\n}\n@-webkit-keyframes fa-spin {\n 0% {\n -webkit-transform: rotate(0deg);\n transform: rotate(0deg);\n }\n 100% {\n -webkit-transform: rotate(359deg);\n transform: rotate(359deg);\n }\n}\n@keyframes fa-spin {\n 0% {\n -webkit-transform: rotate(0deg);\n transform: rotate(0deg);\n }\n 100% {\n -webkit-transform: rotate(359deg);\n transform: rotate(359deg);\n }\n}\n.fa-rotate-90 {\n -ms-filter: "progid:DXImageTransform.Microsoft.BasicImage(rotation=1)";\n -webkit-transform: rotate(90deg);\n -ms-transform: rotate(90deg);\n transform: rotate(90deg);\n}\n.fa-rotate-180 {\n -ms-filter: "progid:DXImageTransform.Microsoft.BasicImage(rotation=2)";\n -webkit-transform: rotate(180deg);\n -ms-transform: rotate(180deg);\n transform: rotate(180deg);\n}\n.fa-rotate-270 {\n -ms-filter: "progid:DXImageTransform.Microsoft.BasicImage(rotation=3)";\n -webkit-transform: rotate(270deg);\n -ms-transform: rotate(270deg);\n transform: rotate(270deg);\n}\n.fa-flip-horizontal {\n -ms-filter: "progid:DXImageTransform.Microsoft.BasicImage(rotation=0, mirror=1)";\n -webkit-transform: scale(-1, 1);\n -ms-transform: scale(-1, 1);\n transform: scale(-1, 1);\n}\n.fa-flip-vertical {\n -ms-filter: "progid:DXImageTransform.Microsoft.BasicImage(rotation=2, mirror=1)";\n -webkit-transform: scale(1, -1);\n -ms-transform: scale(1, -1);\n transform: scale(1, -1);\n}\n:root .fa-rotate-90,\n:root .fa-rotate-180,\n:root .fa-rotate-270,\n:root .fa-flip-horizontal,\n:root .fa-flip-vertical {\n filter: none;\n}\n.fa-stack {\n position: relative;\n display: inline-block;\n width: 2em;\n height: 2em;\n line-height: 2em;\n vertical-align: middle;\n}\n.fa-stack-1x,\n.fa-stack-2x {\n position: absolute;\n left: 0;\n width: 100%;\n text-align: center;\n}\n.fa-stack-1x {\n line-height: inherit;\n}\n.fa-stack-2x {\n font-size: 2em;\n}\n.fa-inverse {\n color: #ffffff;\n}\n/* Font Awesome uses the Unicode Private Use Area (PUA) to ensure screen\n readers do not read off random characters that represent icons */\n.fa-glass:before {\n content: "\\F000";\n}\n.fa-music:before {\n content: "\\F001";\n}\n.fa-search:before {\n content: "\\F002";\n}\n.fa-envelope-o:before {\n content: "\\F003";\n}\n.fa-heart:before {\n content: "\\F004";\n}\n.fa-star:before {\n content: "\\F005";\n}\n.fa-star-o:before {\n content: "\\F006";\n}\n.fa-user:before {\n content: "\\F007";\n}\n.fa-film:before {\n content: "\\F008";\n}\n.fa-th-large:before {\n content: "\\F009";\n}\n.fa-th:before {\n content: "\\F00A";\n}\n.fa-th-list:before {\n content: "\\F00B";\n}\n.fa-check:before {\n content: "\\F00C";\n}\n.fa-remove:before,\n.fa-close:before,\n.fa-times:before {\n content: "\\F00D";\n}\n.fa-search-plus:before {\n content: "\\F00E";\n}\n.fa-search-minus:before {\n content: "\\F010";\n}\n.fa-power-off:before {\n content: "\\F011";\n}\n.fa-signal:before {\n content: "\\F012";\n}\n.fa-gear:before,\n.fa-cog:before {\n content: "\\F013";\n}\n.fa-trash-o:before {\n content: "\\F014";\n}\n.fa-home:before {\n content: "\\F015";\n}\n.fa-file-o:before {\n content: "\\F016";\n}\n.fa-clock-o:before {\n content: "\\F017";\n}\n.fa-road:before {\n content: "\\F018";\n}\n.fa-download:before {\n content: "\\F019";\n}\n.fa-arrow-circle-o-down:before {\n content: "\\F01A";\n}\n.fa-arrow-circle-o-up:before {\n content: "\\F01B";\n}\n.fa-inbox:before {\n content: "\\F01C";\n}\n.fa-play-circle-o:before {\n content: "\\F01D";\n}\n.fa-rotate-right:before,\n.fa-repeat:before {\n content: "\\F01E";\n}\n.fa-refresh:before {\n content: "\\F021";\n}\n.fa-list-alt:before {\n content: "\\F022";\n}\n.fa-lock:before {\n content: "\\F023";\n}\n.fa-flag:before {\n content: "\\F024";\n}\n.fa-headphones:before {\n content: "\\F025";\n}\n.fa-volume-off:before {\n content: "\\F026";\n}\n.fa-volume-down:before {\n content: "\\F027";\n}\n.fa-volume-up:before {\n content: "\\F028";\n}\n.fa-qrcode:before {\n content: "\\F029";\n}\n.fa-barcode:before {\n content: "\\F02A";\n}\n.fa-tag:before {\n content: "\\F02B";\n}\n.fa-tags:before {\n content: "\\F02C";\n}\n.fa-book:before {\n content: "\\F02D";\n}\n.fa-bookmark:before {\n content: "\\F02E";\n}\n.fa-print:before {\n content: "\\F02F";\n}\n.fa-camera:before {\n content: "\\F030";\n}\n.fa-font:before {\n content: "\\F031";\n}\n.fa-bold:before {\n content: "\\F032";\n}\n.fa-italic:before {\n content: "\\F033";\n}\n.fa-text-height:before {\n content: "\\F034";\n}\n.fa-text-width:before {\n content: "\\F035";\n}\n.fa-align-left:before {\n content: "\\F036";\n}\n.fa-align-center:before {\n content: "\\F037";\n}\n.fa-align-right:before {\n content: "\\F038";\n}\n.fa-align-justify:before {\n content: "\\F039";\n}\n.fa-list:before {\n content: "\\F03A";\n}\n.fa-dedent:before,\n.fa-outdent:before {\n content: "\\F03B";\n}\n.fa-indent:before {\n content: "\\F03C";\n}\n.fa-video-camera:before {\n content: "\\F03D";\n}\n.fa-photo:before,\n.fa-image:before,\n.fa-picture-o:before {\n content: "\\F03E";\n}\n.fa-pencil:before {\n content: "\\F040";\n}\n.fa-map-marker:before {\n content: "\\F041";\n}\n.fa-adjust:before {\n content: "\\F042";\n}\n.fa-tint:before {\n content: "\\F043";\n}\n.fa-edit:before,\n.fa-pencil-square-o:before {\n content: "\\F044";\n}\n.fa-share-square-o:before {\n content: "\\F045";\n}\n.fa-check-square-o:before {\n content: "\\F046";\n}\n.fa-arrows:before {\n content: "\\F047";\n}\n.fa-step-backward:before {\n content: "\\F048";\n}\n.fa-fast-backward:before {\n content: "\\F049";\n}\n.fa-backward:before {\n content: "\\F04A";\n}\n.fa-play:before {\n content: "\\F04B";\n}\n.fa-pause:before {\n content: "\\F04C";\n}\n.fa-stop:before {\n content: "\\F04D";\n}\n.fa-forward:before {\n content: "\\F04E";\n}\n.fa-fast-forward:before {\n content: "\\F050";\n}\n.fa-step-forward:before {\n content: "\\F051";\n}\n.fa-eject:before {\n content: "\\F052";\n}\n.fa-chevron-left:before {\n content: "\\F053";\n}\n.fa-chevron-right:before {\n content: "\\F054";\n}\n.fa-plus-circle:before {\n content: "\\F055";\n}\n.fa-minus-circle:before {\n content: "\\F056";\n}\n.fa-times-circle:before {\n content: "\\F057";\n}\n.fa-check-circle:before {\n content: "\\F058";\n}\n.fa-question-circle:before {\n content: "\\F059";\n}\n.fa-info-circle:before {\n content: "\\F05A";\n}\n.fa-crosshairs:before {\n content: "\\F05B";\n}\n.fa-times-circle-o:before {\n content: "\\F05C";\n}\n.fa-check-circle-o:before {\n content: "\\F05D";\n}\n.fa-ban:before {\n content: "\\F05E";\n}\n.fa-arrow-left:before {\n content: "\\F060";\n}\n.fa-arrow-right:before {\n content: "\\F061";\n}\n.fa-arrow-up:before {\n content: "\\F062";\n}\n.fa-arrow-down:before {\n content: "\\F063";\n}\n.fa-mail-forward:before,\n.fa-share:before {\n content: "\\F064";\n}\n.fa-expand:before {\n content: "\\F065";\n}\n.fa-compress:before {\n content: "\\F066";\n}\n.fa-plus:before {\n content: "\\F067";\n}\n.fa-minus:before {\n content: "\\F068";\n}\n.fa-asterisk:before {\n content: "\\F069";\n}\n.fa-exclamation-circle:before {\n content: "\\F06A";\n}\n.fa-gift:before {\n content: "\\F06B";\n}\n.fa-leaf:before {\n content: "\\F06C";\n}\n.fa-fire:before {\n content: "\\F06D";\n}\n.fa-eye:before {\n content: "\\F06E";\n}\n.fa-eye-slash:before {\n content: "\\F070";\n}\n.fa-warning:before,\n.fa-exclamation-triangle:before {\n content: "\\F071";\n}\n.fa-plane:before {\n content: "\\F072";\n}\n.fa-calendar:before {\n content: "\\F073";\n}\n.fa-random:before {\n content: "\\F074";\n}\n.fa-comment:before {\n content: "\\F075";\n}\n.fa-magnet:before {\n content: "\\F076";\n}\n.fa-chevron-up:before {\n content: "\\F077";\n}\n.fa-chevron-down:before {\n content: "\\F078";\n}\n.fa-retweet:before {\n content: "\\F079";\n}\n.fa-shopping-cart:before {\n content: "\\F07A";\n}\n.fa-folder:before {\n content: "\\F07B";\n}\n.fa-folder-open:before {\n content: "\\F07C";\n}\n.fa-arrows-v:before {\n content: "\\F07D";\n}\n.fa-arrows-h:before {\n content: "\\F07E";\n}\n.fa-bar-chart-o:before,\n.fa-bar-chart:before {\n content: "\\F080";\n}\n.fa-twitter-square:before {\n content: "\\F081";\n}\n.fa-facebook-square:before {\n content: "\\F082";\n}\n.fa-camera-retro:before {\n content: "\\F083";\n}\n.fa-key:before {\n content: "\\F084";\n}\n.fa-gears:before,\n.fa-cogs:before {\n content: "\\F085";\n}\n.fa-comments:before {\n content: "\\F086";\n}\n.fa-thumbs-o-up:before {\n content: "\\F087";\n}\n.fa-thumbs-o-down:before {\n content: "\\F088";\n}\n.fa-star-half:before {\n content: "\\F089";\n}\n.fa-heart-o:before {\n content: "\\F08A";\n}\n.fa-sign-out:before {\n content: "\\F08B";\n}\n.fa-linkedin-square:before {\n content: "\\F08C";\n}\n.fa-thumb-tack:before {\n content: "\\F08D";\n}\n.fa-external-link:before {\n content: "\\F08E";\n}\n.fa-sign-in:before {\n content: "\\F090";\n}\n.fa-trophy:before {\n content: "\\F091";\n}\n.fa-github-square:before {\n content: "\\F092";\n}\n.fa-upload:before {\n content: "\\F093";\n}\n.fa-lemon-o:before {\n content: "\\F094";\n}\n.fa-phone:before {\n content: "\\F095";\n}\n.fa-square-o:before {\n content: "\\F096";\n}\n.fa-bookmark-o:before {\n content: "\\F097";\n}\n.fa-phone-square:before {\n content: "\\F098";\n}\n.fa-twitter:before {\n content: "\\F099";\n}\n.fa-facebook-f:before,\n.fa-facebook:before {\n content: "\\F09A";\n}\n.fa-github:before {\n content: "\\F09B";\n}\n.fa-unlock:before {\n content: "\\F09C";\n}\n.fa-credit-card:before {\n content: "\\F09D";\n}\n.fa-feed:before,\n.fa-rss:before {\n content: "\\F09E";\n}\n.fa-hdd-o:before {\n content: "\\F0A0";\n}\n.fa-bullhorn:before {\n content: "\\F0A1";\n}\n.fa-bell:before {\n content: "\\F0F3";\n}\n.fa-certificate:before {\n content: "\\F0A3";\n}\n.fa-hand-o-right:before {\n content: "\\F0A4";\n}\n.fa-hand-o-left:before {\n content: "\\F0A5";\n}\n.fa-hand-o-up:before {\n content: "\\F0A6";\n}\n.fa-hand-o-down:before {\n content: "\\F0A7";\n}\n.fa-arrow-circle-left:before {\n content: "\\F0A8";\n}\n.fa-arrow-circle-right:before {\n content: "\\F0A9";\n}\n.fa-arrow-circle-up:before {\n content: "\\F0AA";\n}\n.fa-arrow-circle-down:before {\n content: "\\F0AB";\n}\n.fa-globe:before {\n content: "\\F0AC";\n}\n.fa-wrench:before {\n content: "\\F0AD";\n}\n.fa-tasks:before {\n content: "\\F0AE";\n}\n.fa-filter:before {\n content: "\\F0B0";\n}\n.fa-briefcase:before {\n content: "\\F0B1";\n}\n.fa-arrows-alt:before {\n content: "\\F0B2";\n}\n.fa-group:before,\n.fa-users:before {\n content: "\\F0C0";\n}\n.fa-chain:before,\n.fa-link:before {\n content: "\\F0C1";\n}\n.fa-cloud:before {\n content: "\\F0C2";\n}\n.fa-flask:before {\n content: "\\F0C3";\n}\n.fa-cut:before,\n.fa-scissors:before {\n content: "\\F0C4";\n}\n.fa-copy:before,\n.fa-files-o:before {\n content: "\\F0C5";\n}\n.fa-paperclip:before {\n content: "\\F0C6";\n}\n.fa-save:before,\n.fa-floppy-o:before {\n content: "\\F0C7";\n}\n.fa-square:before {\n content: "\\F0C8";\n}\n.fa-navicon:before,\n.fa-reorder:before,\n.fa-bars:before {\n content: "\\F0C9";\n}\n.fa-list-ul:before {\n content: "\\F0CA";\n}\n.fa-list-ol:before {\n content: "\\F0CB";\n}\n.fa-strikethrough:before {\n content: "\\F0CC";\n}\n.fa-underline:before {\n content: "\\F0CD";\n}\n.fa-table:before {\n content: "\\F0CE";\n}\n.fa-magic:before {\n content: "\\F0D0";\n}\n.fa-truck:before {\n content: "\\F0D1";\n}\n.fa-pinterest:before {\n content: "\\F0D2";\n}\n.fa-pinterest-square:before {\n content: "\\F0D3";\n}\n.fa-google-plus-square:before {\n content: "\\F0D4";\n}\n.fa-google-plus:before {\n content: "\\F0D5";\n}\n.fa-money:before {\n content: "\\F0D6";\n}\n.fa-caret-down:before {\n content: "\\F0D7";\n}\n.fa-caret-up:before {\n content: "\\F0D8";\n}\n.fa-caret-left:before {\n content: "\\F0D9";\n}\n.fa-caret-right:before {\n content: "\\F0DA";\n}\n.fa-columns:before {\n content: "\\F0DB";\n}\n.fa-unsorted:before,\n.fa-sort:before {\n content: "\\F0DC";\n}\n.fa-sort-down:before,\n.fa-sort-desc:before {\n content: "\\F0DD";\n}\n.fa-sort-up:before,\n.fa-sort-asc:before {\n content: "\\F0DE";\n}\n.fa-envelope:before {\n content: "\\F0E0";\n}\n.fa-linkedin:before {\n content: "\\F0E1";\n}\n.fa-rotate-left:before,\n.fa-undo:before {\n content: "\\F0E2";\n}\n.fa-legal:before,\n.fa-gavel:before {\n content: "\\F0E3";\n}\n.fa-dashboard:before,\n.fa-tachometer:before {\n content: "\\F0E4";\n}\n.fa-comment-o:before {\n content: "\\F0E5";\n}\n.fa-comments-o:before {\n content: "\\F0E6";\n}\n.fa-flash:before,\n.fa-bolt:before {\n content: "\\F0E7";\n}\n.fa-sitemap:before {\n content: "\\F0E8";\n}\n.fa-umbrella:before {\n content: "\\F0E9";\n}\n.fa-paste:before,\n.fa-clipboard:before {\n content: "\\F0EA";\n}\n.fa-lightbulb-o:before {\n content: "\\F0EB";\n}\n.fa-exchange:before {\n content: "\\F0EC";\n}\n.fa-cloud-download:before {\n content: "\\F0ED";\n}\n.fa-cloud-upload:before {\n content: "\\F0EE";\n}\n.fa-user-md:before {\n content: "\\F0F0";\n}\n.fa-stethoscope:before {\n content: "\\F0F1";\n}\n.fa-suitcase:before {\n content: "\\F0F2";\n}\n.fa-bell-o:before {\n content: "\\F0A2";\n}\n.fa-coffee:before {\n content: "\\F0F4";\n}\n.fa-cutlery:before {\n content: "\\F0F5";\n}\n.fa-file-text-o:before {\n content: "\\F0F6";\n}\n.fa-building-o:before {\n content: "\\F0F7";\n}\n.fa-hospital-o:before {\n content: "\\F0F8";\n}\n.fa-ambulance:before {\n content: "\\F0F9";\n}\n.fa-medkit:before {\n content: "\\F0FA";\n}\n.fa-fighter-jet:before {\n content: "\\F0FB";\n}\n.fa-beer:before {\n content: "\\F0FC";\n}\n.fa-h-square:before {\n content: "\\F0FD";\n}\n.fa-plus-square:before {\n content: "\\F0FE";\n}\n.fa-angle-double-left:before {\n content: "\\F100";\n}\n.fa-angle-double-right:before {\n content: "\\F101";\n}\n.fa-angle-double-up:before {\n content: "\\F102";\n}\n.fa-angle-double-down:before {\n content: "\\F103";\n}\n.fa-angle-left:before {\n content: "\\F104";\n}\n.fa-angle-right:before {\n content: "\\F105";\n}\n.fa-angle-up:before {\n content: "\\F106";\n}\n.fa-angle-down:before {\n content: "\\F107";\n}\n.fa-desktop:before {\n content: "\\F108";\n}\n.fa-laptop:before {\n content: "\\F109";\n}\n.fa-tablet:before {\n content: "\\F10A";\n}\n.fa-mobile-phone:before,\n.fa-mobile:before {\n content: "\\F10B";\n}\n.fa-circle-o:before {\n content: "\\F10C";\n}\n.fa-quote-left:before {\n content: "\\F10D";\n}\n.fa-quote-right:before {\n content: "\\F10E";\n}\n.fa-spinner:before {\n content: "\\F110";\n}\n.fa-circle:before {\n content: "\\F111";\n}\n.fa-mail-reply:before,\n.fa-reply:before {\n content: "\\F112";\n}\n.fa-github-alt:before {\n content: "\\F113";\n}\n.fa-folder-o:before {\n content: "\\F114";\n}\n.fa-folder-open-o:before {\n content: "\\F115";\n}\n.fa-smile-o:before {\n content: "\\F118";\n}\n.fa-frown-o:before {\n content: "\\F119";\n}\n.fa-meh-o:before {\n content: "\\F11A";\n}\n.fa-gamepad:before {\n content: "\\F11B";\n}\n.fa-keyboard-o:before {\n content: "\\F11C";\n}\n.fa-flag-o:before {\n content: "\\F11D";\n}\n.fa-flag-checkered:before {\n content: "\\F11E";\n}\n.fa-terminal:before {\n content: "\\F120";\n}\n.fa-code:before {\n content: "\\F121";\n}\n.fa-mail-reply-all:before,\n.fa-reply-all:before {\n content: "\\F122";\n}\n.fa-star-half-empty:before,\n.fa-star-half-full:before,\n.fa-star-half-o:before {\n content: "\\F123";\n}\n.fa-location-arrow:before {\n content: "\\F124";\n}\n.fa-crop:before {\n content: "\\F125";\n}\n.fa-code-fork:before {\n content: "\\F126";\n}\n.fa-unlink:before,\n.fa-chain-broken:before {\n content: "\\F127";\n}\n.fa-question:before {\n content: "\\F128";\n}\n.fa-info:before {\n content: "\\F129";\n}\n.fa-exclamation:before {\n content: "\\F12A";\n}\n.fa-superscript:before {\n content: "\\F12B";\n}\n.fa-subscript:before {\n content: "\\F12C";\n}\n.fa-eraser:before {\n content: "\\F12D";\n}\n.fa-puzzle-piece:before {\n content: "\\F12E";\n}\n.fa-microphone:before {\n content: "\\F130";\n}\n.fa-microphone-slash:before {\n content: "\\F131";\n}\n.fa-shield:before {\n content: "\\F132";\n}\n.fa-calendar-o:before {\n content: "\\F133";\n}\n.fa-fire-extinguisher:before {\n content: "\\F134";\n}\n.fa-rocket:before {\n content: "\\F135";\n}\n.fa-maxcdn:before {\n content: "\\F136";\n}\n.fa-chevron-circle-left:before {\n content: "\\F137";\n}\n.fa-chevron-circle-right:before {\n content: "\\F138";\n}\n.fa-chevron-circle-up:before {\n content: "\\F139";\n}\n.fa-chevron-circle-down:before {\n content: "\\F13A";\n}\n.fa-html5:before {\n content: "\\F13B";\n}\n.fa-css3:before {\n content: "\\F13C";\n}\n.fa-anchor:before {\n content: "\\F13D";\n}\n.fa-unlock-alt:before {\n content: "\\F13E";\n}\n.fa-bullseye:before {\n content: "\\F140";\n}\n.fa-ellipsis-h:before {\n content: "\\F141";\n}\n.fa-ellipsis-v:before {\n content: "\\F142";\n}\n.fa-rss-square:before {\n content: "\\F143";\n}\n.fa-play-circle:before {\n content: "\\F144";\n}\n.fa-ticket:before {\n content: "\\F145";\n}\n.fa-minus-square:before {\n content: "\\F146";\n}\n.fa-minus-square-o:before {\n content: "\\F147";\n}\n.fa-level-up:before {\n content: "\\F148";\n}\n.fa-level-down:before {\n content: "\\F149";\n}\n.fa-check-square:before {\n content: "\\F14A";\n}\n.fa-pencil-square:before {\n content: "\\F14B";\n}\n.fa-external-link-square:before {\n content: "\\F14C";\n}\n.fa-share-square:before {\n content: "\\F14D";\n}\n.fa-compass:before {\n content: "\\F14E";\n}\n.fa-toggle-down:before,\n.fa-caret-square-o-down:before {\n content: "\\F150";\n}\n.fa-toggle-up:before,\n.fa-caret-square-o-up:before {\n content: "\\F151";\n}\n.fa-toggle-right:before,\n.fa-caret-square-o-right:before {\n content: "\\F152";\n}\n.fa-euro:before,\n.fa-eur:before {\n content: "\\F153";\n}\n.fa-gbp:before {\n content: "\\F154";\n}\n.fa-dollar:before,\n.fa-usd:before {\n content: "\\F155";\n}\n.fa-rupee:before,\n.fa-inr:before {\n content: "\\F156";\n}\n.fa-cny:before,\n.fa-rmb:before,\n.fa-yen:before,\n.fa-jpy:before {\n content: "\\F157";\n}\n.fa-ruble:before,\n.fa-rouble:before,\n.fa-rub:before {\n content: "\\F158";\n}\n.fa-won:before,\n.fa-krw:before {\n content: "\\F159";\n}\n.fa-bitcoin:before,\n.fa-btc:before {\n content: "\\F15A";\n}\n.fa-file:before {\n content: "\\F15B";\n}\n.fa-file-text:before {\n content: "\\F15C";\n}\n.fa-sort-alpha-asc:before {\n content: "\\F15D";\n}\n.fa-sort-alpha-desc:before {\n content: "\\F15E";\n}\n.fa-sort-amount-asc:before {\n content: "\\F160";\n}\n.fa-sort-amount-desc:before {\n content: "\\F161";\n}\n.fa-sort-numeric-asc:before {\n content: "\\F162";\n}\n.fa-sort-numeric-desc:before {\n content: "\\F163";\n}\n.fa-thumbs-up:before {\n content: "\\F164";\n}\n.fa-thumbs-down:before {\n content: "\\F165";\n}\n.fa-youtube-square:before {\n content: "\\F166";\n}\n.fa-youtube:before {\n content: "\\F167";\n}\n.fa-xing:before {\n content: "\\F168";\n}\n.fa-xing-square:before {\n content: "\\F169";\n}\n.fa-youtube-play:before {\n content: "\\F16A";\n}\n.fa-dropbox:before {\n content: "\\F16B";\n}\n.fa-stack-overflow:before {\n content: "\\F16C";\n}\n.fa-instagram:before {\n content: "\\F16D";\n}\n.fa-flickr:before {\n content: "\\F16E";\n}\n.fa-adn:before {\n content: "\\F170";\n}\n.fa-bitbucket:before {\n content: "\\F171";\n}\n.fa-bitbucket-square:before {\n content: "\\F172";\n}\n.fa-tumblr:before {\n content: "\\F173";\n}\n.fa-tumblr-square:before {\n content: "\\F174";\n}\n.fa-long-arrow-down:before {\n content: "\\F175";\n}\n.fa-long-arrow-up:before {\n content: "\\F176";\n}\n.fa-long-arrow-left:before {\n content: "\\F177";\n}\n.fa-long-arrow-right:before {\n content: "\\F178";\n}\n.fa-apple:before {\n content: "\\F179";\n}\n.fa-windows:before {\n content: "\\F17A";\n}\n.fa-android:before {\n content: "\\F17B";\n}\n.fa-linux:before {\n content: "\\F17C";\n}\n.fa-dribbble:before {\n content: "\\F17D";\n}\n.fa-skype:before {\n content: "\\F17E";\n}\n.fa-foursquare:before {\n content: "\\F180";\n}\n.fa-trello:before {\n content: "\\F181";\n}\n.fa-female:before {\n content: "\\F182";\n}\n.fa-male:before {\n content: "\\F183";\n}\n.fa-gittip:before,\n.fa-gratipay:before {\n content: "\\F184";\n}\n.fa-sun-o:before {\n content: "\\F185";\n}\n.fa-moon-o:before {\n content: "\\F186";\n}\n.fa-archive:before {\n content: "\\F187";\n}\n.fa-bug:before {\n content: "\\F188";\n}\n.fa-vk:before {\n content: "\\F189";\n}\n.fa-weibo:before {\n content: "\\F18A";\n}\n.fa-renren:before {\n content: "\\F18B";\n}\n.fa-pagelines:before {\n content: "\\F18C";\n}\n.fa-stack-exchange:before {\n content: "\\F18D";\n}\n.fa-arrow-circle-o-right:before {\n content: "\\F18E";\n}\n.fa-arrow-circle-o-left:before {\n content: "\\F190";\n}\n.fa-toggle-left:before,\n.fa-caret-square-o-left:before {\n content: "\\F191";\n}\n.fa-dot-circle-o:before {\n content: "\\F192";\n}\n.fa-wheelchair:before {\n content: "\\F193";\n}\n.fa-vimeo-square:before {\n content: "\\F194";\n}\n.fa-turkish-lira:before,\n.fa-try:before {\n content: "\\F195";\n}\n.fa-plus-square-o:before {\n content: "\\F196";\n}\n.fa-space-shuttle:before {\n content: "\\F197";\n}\n.fa-slack:before {\n content: "\\F198";\n}\n.fa-envelope-square:before {\n content: "\\F199";\n}\n.fa-wordpress:before {\n content: "\\F19A";\n}\n.fa-openid:before {\n content: "\\F19B";\n}\n.fa-institution:before,\n.fa-bank:before,\n.fa-university:before {\n content: "\\F19C";\n}\n.fa-mortar-board:before,\n.fa-graduation-cap:before {\n content: "\\F19D";\n}\n.fa-yahoo:before {\n content: "\\F19E";\n}\n.fa-google:before {\n content: "\\F1A0";\n}\n.fa-reddit:before {\n content: "\\F1A1";\n}\n.fa-reddit-square:before {\n content: "\\F1A2";\n}\n.fa-stumbleupon-circle:before {\n content: "\\F1A3";\n}\n.fa-stumbleupon:before {\n content: "\\F1A4";\n}\n.fa-delicious:before {\n content: "\\F1A5";\n}\n.fa-digg:before {\n content: "\\F1A6";\n}\n.fa-pied-piper-pp:before {\n content: "\\F1A7";\n}\n.fa-pied-piper-alt:before {\n content: "\\F1A8";\n}\n.fa-drupal:before {\n content: "\\F1A9";\n}\n.fa-joomla:before {\n content: "\\F1AA";\n}\n.fa-language:before {\n content: "\\F1AB";\n}\n.fa-fax:before {\n content: "\\F1AC";\n}\n.fa-building:before {\n content: "\\F1AD";\n}\n.fa-child:before {\n content: "\\F1AE";\n}\n.fa-paw:before {\n content: "\\F1B0";\n}\n.fa-spoon:before {\n content: "\\F1B1";\n}\n.fa-cube:before {\n content: "\\F1B2";\n}\n.fa-cubes:before {\n content: "\\F1B3";\n}\n.fa-behance:before {\n content: "\\F1B4";\n}\n.fa-behance-square:before {\n content: "\\F1B5";\n}\n.fa-steam:before {\n content: "\\F1B6";\n}\n.fa-steam-square:before {\n content: "\\F1B7";\n}\n.fa-recycle:before {\n content: "\\F1B8";\n}\n.fa-automobile:before,\n.fa-car:before {\n content: "\\F1B9";\n}\n.fa-cab:before,\n.fa-taxi:before {\n content: "\\F1BA";\n}\n.fa-tree:before {\n content: "\\F1BB";\n}\n.fa-spotify:before {\n content: "\\F1BC";\n}\n.fa-deviantart:before {\n content: "\\F1BD";\n}\n.fa-soundcloud:before {\n content: "\\F1BE";\n}\n.fa-database:before {\n content: "\\F1C0";\n}\n.fa-file-pdf-o:before {\n content: "\\F1C1";\n}\n.fa-file-word-o:before {\n content: "\\F1C2";\n}\n.fa-file-excel-o:before {\n content: "\\F1C3";\n}\n.fa-file-powerpoint-o:before {\n content: "\\F1C4";\n}\n.fa-file-photo-o:before,\n.fa-file-picture-o:before,\n.fa-file-image-o:before {\n content: "\\F1C5";\n}\n.fa-file-zip-o:before,\n.fa-file-archive-o:before {\n content: "\\F1C6";\n}\n.fa-file-sound-o:before,\n.fa-file-audio-o:before {\n content: "\\F1C7";\n}\n.fa-file-movie-o:before,\n.fa-file-video-o:before {\n content: "\\F1C8";\n}\n.fa-file-code-o:before {\n content: "\\F1C9";\n}\n.fa-vine:before {\n content: "\\F1CA";\n}\n.fa-codepen:before {\n content: "\\F1CB";\n}\n.fa-jsfiddle:before {\n content: "\\F1CC";\n}\n.fa-life-bouy:before,\n.fa-life-buoy:before,\n.fa-life-saver:before,\n.fa-support:before,\n.fa-life-ring:before {\n content: "\\F1CD";\n}\n.fa-circle-o-notch:before {\n content: "\\F1CE";\n}\n.fa-ra:before,\n.fa-resistance:before,\n.fa-rebel:before {\n content: "\\F1D0";\n}\n.fa-ge:before,\n.fa-empire:before {\n content: "\\F1D1";\n}\n.fa-git-square:before {\n content: "\\F1D2";\n}\n.fa-git:before {\n content: "\\F1D3";\n}\n.fa-y-combinator-square:before,\n.fa-yc-square:before,\n.fa-hacker-news:before {\n content: "\\F1D4";\n}\n.fa-tencent-weibo:before {\n content: "\\F1D5";\n}\n.fa-qq:before {\n content: "\\F1D6";\n}\n.fa-wechat:before,\n.fa-weixin:before {\n content: "\\F1D7";\n}\n.fa-send:before,\n.fa-paper-plane:before {\n content: "\\F1D8";\n}\n.fa-send-o:before,\n.fa-paper-plane-o:before {\n content: "\\F1D9";\n}\n.fa-history:before {\n content: "\\F1DA";\n}\n.fa-circle-thin:before {\n content: "\\F1DB";\n}\n.fa-header:before {\n content: "\\F1DC";\n}\n.fa-paragraph:before {\n content: "\\F1DD";\n}\n.fa-sliders:before {\n content: "\\F1DE";\n}\n.fa-share-alt:before {\n content: "\\F1E0";\n}\n.fa-share-alt-square:before {\n content: "\\F1E1";\n}\n.fa-bomb:before {\n content: "\\F1E2";\n}\n.fa-soccer-ball-o:before,\n.fa-futbol-o:before {\n content: "\\F1E3";\n}\n.fa-tty:before {\n content: "\\F1E4";\n}\n.fa-binoculars:before {\n content: "\\F1E5";\n}\n.fa-plug:before {\n content: "\\F1E6";\n}\n.fa-slideshare:before {\n content: "\\F1E7";\n}\n.fa-twitch:before {\n content: "\\F1E8";\n}\n.fa-yelp:before {\n content: "\\F1E9";\n}\n.fa-newspaper-o:before {\n content: "\\F1EA";\n}\n.fa-wifi:before {\n content: "\\F1EB";\n}\n.fa-calculator:before {\n content: "\\F1EC";\n}\n.fa-paypal:before {\n content: "\\F1ED";\n}\n.fa-google-wallet:before {\n content: "\\F1EE";\n}\n.fa-cc-visa:before {\n content: "\\F1F0";\n}\n.fa-cc-mastercard:before {\n content: "\\F1F1";\n}\n.fa-cc-discover:before {\n content: "\\F1F2";\n}\n.fa-cc-amex:before {\n content: "\\F1F3";\n}\n.fa-cc-paypal:before {\n content: "\\F1F4";\n}\n.fa-cc-stripe:before {\n content: "\\F1F5";\n}\n.fa-bell-slash:before {\n content: "\\F1F6";\n}\n.fa-bell-slash-o:before {\n content: "\\F1F7";\n}\n.fa-trash:before {\n content: "\\F1F8";\n}\n.fa-copyright:before {\n content: "\\F1F9";\n}\n.fa-at:before {\n content: "\\F1FA";\n}\n.fa-eyedropper:before {\n content: "\\F1FB";\n}\n.fa-paint-brush:before {\n content: "\\F1FC";\n}\n.fa-birthday-cake:before {\n content: "\\F1FD";\n}\n.fa-area-chart:before {\n content: "\\F1FE";\n}\n.fa-pie-chart:before {\n content: "\\F200";\n}\n.fa-line-chart:before {\n content: "\\F201";\n}\n.fa-lastfm:before {\n content: "\\F202";\n}\n.fa-lastfm-square:before {\n content: "\\F203";\n}\n.fa-toggle-off:before {\n content: "\\F204";\n}\n.fa-toggle-on:before {\n content: "\\F205";\n}\n.fa-bicycle:before {\n content: "\\F206";\n}\n.fa-bus:before {\n content: "\\F207";\n}\n.fa-ioxhost:before {\n content: "\\F208";\n}\n.fa-angellist:before {\n content: "\\F209";\n}\n.fa-cc:before {\n content: "\\F20A";\n}\n.fa-shekel:before,\n.fa-sheqel:before,\n.fa-ils:before {\n content: "\\F20B";\n}\n.fa-meanpath:before {\n content: "\\F20C";\n}\n.fa-buysellads:before {\n content: "\\F20D";\n}\n.fa-connectdevelop:before {\n content: "\\F20E";\n}\n.fa-dashcube:before {\n content: "\\F210";\n}\n.fa-forumbee:before {\n content: "\\F211";\n}\n.fa-leanpub:before {\n content: "\\F212";\n}\n.fa-sellsy:before {\n content: "\\F213";\n}\n.fa-shirtsinbulk:before {\n content: "\\F214";\n}\n.fa-simplybuilt:before {\n content: "\\F215";\n}\n.fa-skyatlas:before {\n content: "\\F216";\n}\n.fa-cart-plus:before {\n content: "\\F217";\n}\n.fa-cart-arrow-down:before {\n content: "\\F218";\n}\n.fa-diamond:before {\n content: "\\F219";\n}\n.fa-ship:before {\n content: "\\F21A";\n}\n.fa-user-secret:before {\n content: "\\F21B";\n}\n.fa-motorcycle:before {\n content: "\\F21C";\n}\n.fa-street-view:before {\n content: "\\F21D";\n}\n.fa-heartbeat:before {\n content: "\\F21E";\n}\n.fa-venus:before {\n content: "\\F221";\n}\n.fa-mars:before {\n content: "\\F222";\n}\n.fa-mercury:before {\n content: "\\F223";\n}\n.fa-intersex:before,\n.fa-transgender:before {\n content: "\\F224";\n}\n.fa-transgender-alt:before {\n content: "\\F225";\n}\n.fa-venus-double:before {\n content: "\\F226";\n}\n.fa-mars-double:before {\n content: "\\F227";\n}\n.fa-venus-mars:before {\n content: "\\F228";\n}\n.fa-mars-stroke:before {\n content: "\\F229";\n}\n.fa-mars-stroke-v:before {\n content: "\\F22A";\n}\n.fa-mars-stroke-h:before {\n content: "\\F22B";\n}\n.fa-neuter:before {\n content: "\\F22C";\n}\n.fa-genderless:before {\n content: "\\F22D";\n}\n.fa-facebook-official:before {\n content: "\\F230";\n}\n.fa-pinterest-p:before {\n content: "\\F231";\n}\n.fa-whatsapp:before {\n content: "\\F232";\n}\n.fa-server:before {\n content: "\\F233";\n}\n.fa-user-plus:before {\n content: "\\F234";\n}\n.fa-user-times:before {\n content: "\\F235";\n}\n.fa-hotel:before,\n.fa-bed:before {\n content: "\\F236";\n}\n.fa-viacoin:before {\n content: "\\F237";\n}\n.fa-train:before {\n content: "\\F238";\n}\n.fa-subway:before {\n content: "\\F239";\n}\n.fa-medium:before {\n content: "\\F23A";\n}\n.fa-yc:before,\n.fa-y-combinator:before {\n content: "\\F23B";\n}\n.fa-optin-monster:before {\n content: "\\F23C";\n}\n.fa-opencart:before {\n content: "\\F23D";\n}\n.fa-expeditedssl:before {\n content: "\\F23E";\n}\n.fa-battery-4:before,\n.fa-battery:before,\n.fa-battery-full:before {\n content: "\\F240";\n}\n.fa-battery-3:before,\n.fa-battery-three-quarters:before {\n content: "\\F241";\n}\n.fa-battery-2:before,\n.fa-battery-half:before {\n content: "\\F242";\n}\n.fa-battery-1:before,\n.fa-battery-quarter:before {\n content: "\\F243";\n}\n.fa-battery-0:before,\n.fa-battery-empty:before {\n content: "\\F244";\n}\n.fa-mouse-pointer:before {\n content: "\\F245";\n}\n.fa-i-cursor:before {\n content: "\\F246";\n}\n.fa-object-group:before {\n content: "\\F247";\n}\n.fa-object-ungroup:before {\n content: "\\F248";\n}\n.fa-sticky-note:before {\n content: "\\F249";\n}\n.fa-sticky-note-o:before {\n content: "\\F24A";\n}\n.fa-cc-jcb:before {\n content: "\\F24B";\n}\n.fa-cc-diners-club:before {\n content: "\\F24C";\n}\n.fa-clone:before {\n content: "\\F24D";\n}\n.fa-balance-scale:before {\n content: "\\F24E";\n}\n.fa-hourglass-o:before {\n content: "\\F250";\n}\n.fa-hourglass-1:before,\n.fa-hourglass-start:before {\n content: "\\F251";\n}\n.fa-hourglass-2:before,\n.fa-hourglass-half:before {\n content: "\\F252";\n}\n.fa-hourglass-3:before,\n.fa-hourglass-end:before {\n content: "\\F253";\n}\n.fa-hourglass:before {\n content: "\\F254";\n}\n.fa-hand-grab-o:before,\n.fa-hand-rock-o:before {\n content: "\\F255";\n}\n.fa-hand-stop-o:before,\n.fa-hand-paper-o:before {\n content: "\\F256";\n}\n.fa-hand-scissors-o:before {\n content: "\\F257";\n}\n.fa-hand-lizard-o:before {\n content: "\\F258";\n}\n.fa-hand-spock-o:before {\n content: "\\F259";\n}\n.fa-hand-pointer-o:before {\n content: "\\F25A";\n}\n.fa-hand-peace-o:before {\n content: "\\F25B";\n}\n.fa-trademark:before {\n content: "\\F25C";\n}\n.fa-registered:before {\n content: "\\F25D";\n}\n.fa-creative-commons:before {\n content: "\\F25E";\n}\n.fa-gg:before {\n content: "\\F260";\n}\n.fa-gg-circle:before {\n content: "\\F261";\n}\n.fa-tripadvisor:before {\n content: "\\F262";\n}\n.fa-odnoklassniki:before {\n content: "\\F263";\n}\n.fa-odnoklassniki-square:before {\n content: "\\F264";\n}\n.fa-get-pocket:before {\n content: "\\F265";\n}\n.fa-wikipedia-w:before {\n content: "\\F266";\n}\n.fa-safari:before {\n content: "\\F267";\n}\n.fa-chrome:before {\n content: "\\F268";\n}\n.fa-firefox:before {\n content: "\\F269";\n}\n.fa-opera:before {\n content: "\\F26A";\n}\n.fa-internet-explorer:before {\n content: "\\F26B";\n}\n.fa-tv:before,\n.fa-television:before {\n content: "\\F26C";\n}\n.fa-contao:before {\n content: "\\F26D";\n}\n.fa-500px:before {\n content: "\\F26E";\n}\n.fa-amazon:before {\n content: "\\F270";\n}\n.fa-calendar-plus-o:before {\n content: "\\F271";\n}\n.fa-calendar-minus-o:before {\n content: "\\F272";\n}\n.fa-calendar-times-o:before {\n content: "\\F273";\n}\n.fa-calendar-check-o:before {\n content: "\\F274";\n}\n.fa-industry:before {\n content: "\\F275";\n}\n.fa-map-pin:before {\n content: "\\F276";\n}\n.fa-map-signs:before {\n content: "\\F277";\n}\n.fa-map-o:before {\n content: "\\F278";\n}\n.fa-map:before {\n content: "\\F279";\n}\n.fa-commenting:before {\n content: "\\F27A";\n}\n.fa-commenting-o:before {\n content: "\\F27B";\n}\n.fa-houzz:before {\n content: "\\F27C";\n}\n.fa-vimeo:before {\n content: "\\F27D";\n}\n.fa-black-tie:before {\n content: "\\F27E";\n}\n.fa-fonticons:before {\n content: "\\F280";\n}\n.fa-reddit-alien:before {\n content: "\\F281";\n}\n.fa-edge:before {\n content: "\\F282";\n}\n.fa-credit-card-alt:before {\n content: "\\F283";\n}\n.fa-codiepie:before {\n content: "\\F284";\n}\n.fa-modx:before {\n content: "\\F285";\n}\n.fa-fort-awesome:before {\n content: "\\F286";\n}\n.fa-usb:before {\n content: "\\F287";\n}\n.fa-product-hunt:before {\n content: "\\F288";\n}\n.fa-mixcloud:before {\n content: "\\F289";\n}\n.fa-scribd:before {\n content: "\\F28A";\n}\n.fa-pause-circle:before {\n content: "\\F28B";\n}\n.fa-pause-circle-o:before {\n content: "\\F28C";\n}\n.fa-stop-circle:before {\n content: "\\F28D";\n}\n.fa-stop-circle-o:before {\n content: "\\F28E";\n}\n.fa-shopping-bag:before {\n content: "\\F290";\n}\n.fa-shopping-basket:before {\n content: "\\F291";\n}\n.fa-hashtag:before {\n content: "\\F292";\n}\n.fa-bluetooth:before {\n content: "\\F293";\n}\n.fa-bluetooth-b:before {\n content: "\\F294";\n}\n.fa-percent:before {\n content: "\\F295";\n}\n.fa-gitlab:before {\n content: "\\F296";\n}\n.fa-wpbeginner:before {\n content: "\\F297";\n}\n.fa-wpforms:before {\n content: "\\F298";\n}\n.fa-envira:before {\n content: "\\F299";\n}\n.fa-universal-access:before {\n content: "\\F29A";\n}\n.fa-wheelchair-alt:before {\n content: "\\F29B";\n}\n.fa-question-circle-o:before {\n content: "\\F29C";\n}\n.fa-blind:before {\n content: "\\F29D";\n}\n.fa-audio-description:before {\n content: "\\F29E";\n}\n.fa-volume-control-phone:before {\n content: "\\F2A0";\n}\n.fa-braille:before {\n content: "\\F2A1";\n}\n.fa-assistive-listening-systems:before {\n content: "\\F2A2";\n}\n.fa-asl-interpreting:before,\n.fa-american-sign-language-interpreting:before {\n content: "\\F2A3";\n}\n.fa-deafness:before,\n.fa-hard-of-hearing:before,\n.fa-deaf:before {\n content: "\\F2A4";\n}\n.fa-glide:before {\n content: "\\F2A5";\n}\n.fa-glide-g:before {\n content: "\\F2A6";\n}\n.fa-signing:before,\n.fa-sign-language:before {\n content: "\\F2A7";\n}\n.fa-low-vision:before {\n content: "\\F2A8";\n}\n.fa-viadeo:before {\n content: "\\F2A9";\n}\n.fa-viadeo-square:before {\n content: "\\F2AA";\n}\n.fa-snapchat:before {\n content: "\\F2AB";\n}\n.fa-snapchat-ghost:before {\n content: "\\F2AC";\n}\n.fa-snapchat-square:before {\n content: "\\F2AD";\n}\n.fa-pied-piper:before {\n content: "\\F2AE";\n}\n.fa-first-order:before {\n content: "\\F2B0";\n}\n.fa-yoast:before {\n content: "\\F2B1";\n}\n.fa-themeisle:before {\n content: "\\F2B2";\n}\n.fa-google-plus-circle:before,\n.fa-google-plus-official:before {\n content: "\\F2B3";\n}\n.fa-fa:before,\n.fa-font-awesome:before {\n content: "\\F2B4";\n}\n.fa-handshake-o:before {\n content: "\\F2B5";\n}\n.fa-envelope-open:before {\n content: "\\F2B6";\n}\n.fa-envelope-open-o:before {\n content: "\\F2B7";\n}\n.fa-linode:before {\n content: "\\F2B8";\n}\n.fa-address-book:before {\n content: "\\F2B9";\n}\n.fa-address-book-o:before {\n content: "\\F2BA";\n}\n.fa-vcard:before,\n.fa-address-card:before {\n content: "\\F2BB";\n}\n.fa-vcard-o:before,\n.fa-address-card-o:before {\n content: "\\F2BC";\n}\n.fa-user-circle:before {\n content: "\\F2BD";\n}\n.fa-user-circle-o:before {\n content: "\\F2BE";\n}\n.fa-user-o:before {\n content: "\\F2C0";\n}\n.fa-id-badge:before {\n content: "\\F2C1";\n}\n.fa-drivers-license:before,\n.fa-id-card:before {\n content: "\\F2C2";\n}\n.fa-drivers-license-o:before,\n.fa-id-card-o:before {\n content: "\\F2C3";\n}\n.fa-quora:before {\n content: "\\F2C4";\n}\n.fa-free-code-camp:before {\n content: "\\F2C5";\n}\n.fa-telegram:before {\n content: "\\F2C6";\n}\n.fa-thermometer-4:before,\n.fa-thermometer:before,\n.fa-thermometer-full:before {\n content: "\\F2C7";\n}\n.fa-thermometer-3:before,\n.fa-thermometer-three-quarters:before {\n content: "\\F2C8";\n}\n.fa-thermometer-2:before,\n.fa-thermometer-half:before {\n content: "\\F2C9";\n}\n.fa-thermometer-1:before,\n.fa-thermometer-quarter:before {\n content: "\\F2CA";\n}\n.fa-thermometer-0:before,\n.fa-thermometer-empty:before {\n content: "\\F2CB";\n}\n.fa-shower:before {\n content: "\\F2CC";\n}\n.fa-bathtub:before,\n.fa-s15:before,\n.fa-bath:before {\n content: "\\F2CD";\n}\n.fa-podcast:before {\n content: "\\F2CE";\n}\n.fa-window-maximize:before {\n content: "\\F2D0";\n}\n.fa-window-minimize:before {\n content: "\\F2D1";\n}\n.fa-window-restore:before {\n content: "\\F2D2";\n}\n.fa-times-rectangle:before,\n.fa-window-close:before {\n content: "\\F2D3";\n}\n.fa-times-rectangle-o:before,\n.fa-window-close-o:before {\n content: "\\F2D4";\n}\n.fa-bandcamp:before {\n content: "\\F2D5";\n}\n.fa-grav:before {\n content: "\\F2D6";\n}\n.fa-etsy:before {\n content: "\\F2D7";\n}\n.fa-imdb:before {\n content: "\\F2D8";\n}\n.fa-ravelry:before {\n content: "\\F2D9";\n}\n.fa-eercast:before {\n content: "\\F2DA";\n}\n.fa-microchip:before {\n content: "\\F2DB";\n}\n.fa-snowflake-o:before {\n content: "\\F2DC";\n}\n.fa-superpowers:before {\n content: "\\F2DD";\n}\n.fa-wpexplorer:before {\n content: "\\F2DE";\n}\n.fa-meetup:before {\n content: "\\F2E0";\n}\n.sr-only {\n position: absolute;\n width: 1px;\n height: 1px;\n padding: 0;\n margin: -1px;\n overflow: hidden;\n clip: rect(0, 0, 0, 0);\n border: 0;\n}\n.sr-only-focusable:active,\n.sr-only-focusable:focus {\n position: static;\n width: auto;\n height: auto;\n margin: 0;\n overflow: visible;\n clip: auto;\n}\n', "" ]);
+ var escape = __webpack_require__(503);
+ exports = module.exports = __webpack_require__(504)(!1), exports.push([ module.i, "/*!\n * Font Awesome 4.7.0 by @davegandy - http://fontawesome.io - @fontawesome\n * License - http://fontawesome.io/license (Font: SIL OFL 1.1, CSS: MIT License)\n */\n/* FONT PATH\n * -------------------------- */\n@font-face {\n font-family: 'FontAwesome';\n \n src: url(" + escape(__webpack_require__(505)) + ") format('woff2'), url(" + escape(__webpack_require__(506)) + ') format(\'woff\');\n font-weight: normal;\n font-style: normal;\n}\n.fa {\n display: inline-block;\n font: normal normal normal 14px/1 FontAwesome;\n font-size: inherit;\n text-rendering: auto;\n -webkit-font-smoothing: antialiased;\n -moz-osx-font-smoothing: grayscale;\n}\n/* makes the font 33% larger relative to the icon container */\n.fa-lg {\n font-size: 1.33333333em;\n line-height: 0.75em;\n vertical-align: -15%;\n}\n.fa-2x {\n font-size: 2em;\n}\n.fa-3x {\n font-size: 3em;\n}\n.fa-4x {\n font-size: 4em;\n}\n.fa-5x {\n font-size: 5em;\n}\n.fa-fw {\n width: 1.28571429em;\n text-align: center;\n}\n.fa-ul {\n padding-left: 0;\n margin-left: 2.14285714em;\n list-style-type: none;\n}\n.fa-ul > li {\n position: relative;\n}\n.fa-li {\n position: absolute;\n left: -2.14285714em;\n width: 2.14285714em;\n top: 0.14285714em;\n text-align: center;\n}\n.fa-li.fa-lg {\n left: -1.85714286em;\n}\n.fa-border {\n padding: .2em .25em .15em;\n border: solid 0.08em #eeeeee;\n border-radius: .1em;\n}\n.fa-pull-left {\n float: left;\n}\n.fa-pull-right {\n float: right;\n}\n.fa.fa-pull-left {\n margin-right: .3em;\n}\n.fa.fa-pull-right {\n margin-left: .3em;\n}\n/* Deprecated as of 4.4.0 */\n.pull-right {\n float: right;\n}\n.pull-left {\n float: left;\n}\n.fa.pull-left {\n margin-right: .3em;\n}\n.fa.pull-right {\n margin-left: .3em;\n}\n.fa-spin {\n -webkit-animation: fa-spin 2s infinite linear;\n animation: fa-spin 2s infinite linear;\n}\n.fa-pulse {\n -webkit-animation: fa-spin 1s infinite steps(8);\n animation: fa-spin 1s infinite steps(8);\n}\n@-webkit-keyframes fa-spin {\n 0% {\n -webkit-transform: rotate(0deg);\n transform: rotate(0deg);\n }\n 100% {\n -webkit-transform: rotate(359deg);\n transform: rotate(359deg);\n }\n}\n@keyframes fa-spin {\n 0% {\n -webkit-transform: rotate(0deg);\n transform: rotate(0deg);\n }\n 100% {\n -webkit-transform: rotate(359deg);\n transform: rotate(359deg);\n }\n}\n.fa-rotate-90 {\n -ms-filter: "progid:DXImageTransform.Microsoft.BasicImage(rotation=1)";\n -webkit-transform: rotate(90deg);\n -ms-transform: rotate(90deg);\n transform: rotate(90deg);\n}\n.fa-rotate-180 {\n -ms-filter: "progid:DXImageTransform.Microsoft.BasicImage(rotation=2)";\n -webkit-transform: rotate(180deg);\n -ms-transform: rotate(180deg);\n transform: rotate(180deg);\n}\n.fa-rotate-270 {\n -ms-filter: "progid:DXImageTransform.Microsoft.BasicImage(rotation=3)";\n -webkit-transform: rotate(270deg);\n -ms-transform: rotate(270deg);\n transform: rotate(270deg);\n}\n.fa-flip-horizontal {\n -ms-filter: "progid:DXImageTransform.Microsoft.BasicImage(rotation=0, mirror=1)";\n -webkit-transform: scale(-1, 1);\n -ms-transform: scale(-1, 1);\n transform: scale(-1, 1);\n}\n.fa-flip-vertical {\n -ms-filter: "progid:DXImageTransform.Microsoft.BasicImage(rotation=2, mirror=1)";\n -webkit-transform: scale(1, -1);\n -ms-transform: scale(1, -1);\n transform: scale(1, -1);\n}\n:root .fa-rotate-90,\n:root .fa-rotate-180,\n:root .fa-rotate-270,\n:root .fa-flip-horizontal,\n:root .fa-flip-vertical {\n filter: none;\n}\n.fa-stack {\n position: relative;\n display: inline-block;\n width: 2em;\n height: 2em;\n line-height: 2em;\n vertical-align: middle;\n}\n.fa-stack-1x,\n.fa-stack-2x {\n position: absolute;\n left: 0;\n width: 100%;\n text-align: center;\n}\n.fa-stack-1x {\n line-height: inherit;\n}\n.fa-stack-2x {\n font-size: 2em;\n}\n.fa-inverse {\n color: #ffffff;\n}\n/* Font Awesome uses the Unicode Private Use Area (PUA) to ensure screen\n readers do not read off random characters that represent icons */\n.fa-glass:before {\n content: "\\F000";\n}\n.fa-music:before {\n content: "\\F001";\n}\n.fa-search:before {\n content: "\\F002";\n}\n.fa-envelope-o:before {\n content: "\\F003";\n}\n.fa-heart:before {\n content: "\\F004";\n}\n.fa-star:before {\n content: "\\F005";\n}\n.fa-star-o:before {\n content: "\\F006";\n}\n.fa-user:before {\n content: "\\F007";\n}\n.fa-film:before {\n content: "\\F008";\n}\n.fa-th-large:before {\n content: "\\F009";\n}\n.fa-th:before {\n content: "\\F00A";\n}\n.fa-th-list:before {\n content: "\\F00B";\n}\n.fa-check:before {\n content: "\\F00C";\n}\n.fa-remove:before,\n.fa-close:before,\n.fa-times:before {\n content: "\\F00D";\n}\n.fa-search-plus:before {\n content: "\\F00E";\n}\n.fa-search-minus:before {\n content: "\\F010";\n}\n.fa-power-off:before {\n content: "\\F011";\n}\n.fa-signal:before {\n content: "\\F012";\n}\n.fa-gear:before,\n.fa-cog:before {\n content: "\\F013";\n}\n.fa-trash-o:before {\n content: "\\F014";\n}\n.fa-home:before {\n content: "\\F015";\n}\n.fa-file-o:before {\n content: "\\F016";\n}\n.fa-clock-o:before {\n content: "\\F017";\n}\n.fa-road:before {\n content: "\\F018";\n}\n.fa-download:before {\n content: "\\F019";\n}\n.fa-arrow-circle-o-down:before {\n content: "\\F01A";\n}\n.fa-arrow-circle-o-up:before {\n content: "\\F01B";\n}\n.fa-inbox:before {\n content: "\\F01C";\n}\n.fa-play-circle-o:before {\n content: "\\F01D";\n}\n.fa-rotate-right:before,\n.fa-repeat:before {\n content: "\\F01E";\n}\n.fa-refresh:before {\n content: "\\F021";\n}\n.fa-list-alt:before {\n content: "\\F022";\n}\n.fa-lock:before {\n content: "\\F023";\n}\n.fa-flag:before {\n content: "\\F024";\n}\n.fa-headphones:before {\n content: "\\F025";\n}\n.fa-volume-off:before {\n content: "\\F026";\n}\n.fa-volume-down:before {\n content: "\\F027";\n}\n.fa-volume-up:before {\n content: "\\F028";\n}\n.fa-qrcode:before {\n content: "\\F029";\n}\n.fa-barcode:before {\n content: "\\F02A";\n}\n.fa-tag:before {\n content: "\\F02B";\n}\n.fa-tags:before {\n content: "\\F02C";\n}\n.fa-book:before {\n content: "\\F02D";\n}\n.fa-bookmark:before {\n content: "\\F02E";\n}\n.fa-print:before {\n content: "\\F02F";\n}\n.fa-camera:before {\n content: "\\F030";\n}\n.fa-font:before {\n content: "\\F031";\n}\n.fa-bold:before {\n content: "\\F032";\n}\n.fa-italic:before {\n content: "\\F033";\n}\n.fa-text-height:before {\n content: "\\F034";\n}\n.fa-text-width:before {\n content: "\\F035";\n}\n.fa-align-left:before {\n content: "\\F036";\n}\n.fa-align-center:before {\n content: "\\F037";\n}\n.fa-align-right:before {\n content: "\\F038";\n}\n.fa-align-justify:before {\n content: "\\F039";\n}\n.fa-list:before {\n content: "\\F03A";\n}\n.fa-dedent:before,\n.fa-outdent:before {\n content: "\\F03B";\n}\n.fa-indent:before {\n content: "\\F03C";\n}\n.fa-video-camera:before {\n content: "\\F03D";\n}\n.fa-photo:before,\n.fa-image:before,\n.fa-picture-o:before {\n content: "\\F03E";\n}\n.fa-pencil:before {\n content: "\\F040";\n}\n.fa-map-marker:before {\n content: "\\F041";\n}\n.fa-adjust:before {\n content: "\\F042";\n}\n.fa-tint:before {\n content: "\\F043";\n}\n.fa-edit:before,\n.fa-pencil-square-o:before {\n content: "\\F044";\n}\n.fa-share-square-o:before {\n content: "\\F045";\n}\n.fa-check-square-o:before {\n content: "\\F046";\n}\n.fa-arrows:before {\n content: "\\F047";\n}\n.fa-step-backward:before {\n content: "\\F048";\n}\n.fa-fast-backward:before {\n content: "\\F049";\n}\n.fa-backward:before {\n content: "\\F04A";\n}\n.fa-play:before {\n content: "\\F04B";\n}\n.fa-pause:before {\n content: "\\F04C";\n}\n.fa-stop:before {\n content: "\\F04D";\n}\n.fa-forward:before {\n content: "\\F04E";\n}\n.fa-fast-forward:before {\n content: "\\F050";\n}\n.fa-step-forward:before {\n content: "\\F051";\n}\n.fa-eject:before {\n content: "\\F052";\n}\n.fa-chevron-left:before {\n content: "\\F053";\n}\n.fa-chevron-right:before {\n content: "\\F054";\n}\n.fa-plus-circle:before {\n content: "\\F055";\n}\n.fa-minus-circle:before {\n content: "\\F056";\n}\n.fa-times-circle:before {\n content: "\\F057";\n}\n.fa-check-circle:before {\n content: "\\F058";\n}\n.fa-question-circle:before {\n content: "\\F059";\n}\n.fa-info-circle:before {\n content: "\\F05A";\n}\n.fa-crosshairs:before {\n content: "\\F05B";\n}\n.fa-times-circle-o:before {\n content: "\\F05C";\n}\n.fa-check-circle-o:before {\n content: "\\F05D";\n}\n.fa-ban:before {\n content: "\\F05E";\n}\n.fa-arrow-left:before {\n content: "\\F060";\n}\n.fa-arrow-right:before {\n content: "\\F061";\n}\n.fa-arrow-up:before {\n content: "\\F062";\n}\n.fa-arrow-down:before {\n content: "\\F063";\n}\n.fa-mail-forward:before,\n.fa-share:before {\n content: "\\F064";\n}\n.fa-expand:before {\n content: "\\F065";\n}\n.fa-compress:before {\n content: "\\F066";\n}\n.fa-plus:before {\n content: "\\F067";\n}\n.fa-minus:before {\n content: "\\F068";\n}\n.fa-asterisk:before {\n content: "\\F069";\n}\n.fa-exclamation-circle:before {\n content: "\\F06A";\n}\n.fa-gift:before {\n content: "\\F06B";\n}\n.fa-leaf:before {\n content: "\\F06C";\n}\n.fa-fire:before {\n content: "\\F06D";\n}\n.fa-eye:before {\n content: "\\F06E";\n}\n.fa-eye-slash:before {\n content: "\\F070";\n}\n.fa-warning:before,\n.fa-exclamation-triangle:before {\n content: "\\F071";\n}\n.fa-plane:before {\n content: "\\F072";\n}\n.fa-calendar:before {\n content: "\\F073";\n}\n.fa-random:before {\n content: "\\F074";\n}\n.fa-comment:before {\n content: "\\F075";\n}\n.fa-magnet:before {\n content: "\\F076";\n}\n.fa-chevron-up:before {\n content: "\\F077";\n}\n.fa-chevron-down:before {\n content: "\\F078";\n}\n.fa-retweet:before {\n content: "\\F079";\n}\n.fa-shopping-cart:before {\n content: "\\F07A";\n}\n.fa-folder:before {\n content: "\\F07B";\n}\n.fa-folder-open:before {\n content: "\\F07C";\n}\n.fa-arrows-v:before {\n content: "\\F07D";\n}\n.fa-arrows-h:before {\n content: "\\F07E";\n}\n.fa-bar-chart-o:before,\n.fa-bar-chart:before {\n content: "\\F080";\n}\n.fa-twitter-square:before {\n content: "\\F081";\n}\n.fa-facebook-square:before {\n content: "\\F082";\n}\n.fa-camera-retro:before {\n content: "\\F083";\n}\n.fa-key:before {\n content: "\\F084";\n}\n.fa-gears:before,\n.fa-cogs:before {\n content: "\\F085";\n}\n.fa-comments:before {\n content: "\\F086";\n}\n.fa-thumbs-o-up:before {\n content: "\\F087";\n}\n.fa-thumbs-o-down:before {\n content: "\\F088";\n}\n.fa-star-half:before {\n content: "\\F089";\n}\n.fa-heart-o:before {\n content: "\\F08A";\n}\n.fa-sign-out:before {\n content: "\\F08B";\n}\n.fa-linkedin-square:before {\n content: "\\F08C";\n}\n.fa-thumb-tack:before {\n content: "\\F08D";\n}\n.fa-external-link:before {\n content: "\\F08E";\n}\n.fa-sign-in:before {\n content: "\\F090";\n}\n.fa-trophy:before {\n content: "\\F091";\n}\n.fa-github-square:before {\n content: "\\F092";\n}\n.fa-upload:before {\n content: "\\F093";\n}\n.fa-lemon-o:before {\n content: "\\F094";\n}\n.fa-phone:before {\n content: "\\F095";\n}\n.fa-square-o:before {\n content: "\\F096";\n}\n.fa-bookmark-o:before {\n content: "\\F097";\n}\n.fa-phone-square:before {\n content: "\\F098";\n}\n.fa-twitter:before {\n content: "\\F099";\n}\n.fa-facebook-f:before,\n.fa-facebook:before {\n content: "\\F09A";\n}\n.fa-github:before {\n content: "\\F09B";\n}\n.fa-unlock:before {\n content: "\\F09C";\n}\n.fa-credit-card:before {\n content: "\\F09D";\n}\n.fa-feed:before,\n.fa-rss:before {\n content: "\\F09E";\n}\n.fa-hdd-o:before {\n content: "\\F0A0";\n}\n.fa-bullhorn:before {\n content: "\\F0A1";\n}\n.fa-bell:before {\n content: "\\F0F3";\n}\n.fa-certificate:before {\n content: "\\F0A3";\n}\n.fa-hand-o-right:before {\n content: "\\F0A4";\n}\n.fa-hand-o-left:before {\n content: "\\F0A5";\n}\n.fa-hand-o-up:before {\n content: "\\F0A6";\n}\n.fa-hand-o-down:before {\n content: "\\F0A7";\n}\n.fa-arrow-circle-left:before {\n content: "\\F0A8";\n}\n.fa-arrow-circle-right:before {\n content: "\\F0A9";\n}\n.fa-arrow-circle-up:before {\n content: "\\F0AA";\n}\n.fa-arrow-circle-down:before {\n content: "\\F0AB";\n}\n.fa-globe:before {\n content: "\\F0AC";\n}\n.fa-wrench:before {\n content: "\\F0AD";\n}\n.fa-tasks:before {\n content: "\\F0AE";\n}\n.fa-filter:before {\n content: "\\F0B0";\n}\n.fa-briefcase:before {\n content: "\\F0B1";\n}\n.fa-arrows-alt:before {\n content: "\\F0B2";\n}\n.fa-group:before,\n.fa-users:before {\n content: "\\F0C0";\n}\n.fa-chain:before,\n.fa-link:before {\n content: "\\F0C1";\n}\n.fa-cloud:before {\n content: "\\F0C2";\n}\n.fa-flask:before {\n content: "\\F0C3";\n}\n.fa-cut:before,\n.fa-scissors:before {\n content: "\\F0C4";\n}\n.fa-copy:before,\n.fa-files-o:before {\n content: "\\F0C5";\n}\n.fa-paperclip:before {\n content: "\\F0C6";\n}\n.fa-save:before,\n.fa-floppy-o:before {\n content: "\\F0C7";\n}\n.fa-square:before {\n content: "\\F0C8";\n}\n.fa-navicon:before,\n.fa-reorder:before,\n.fa-bars:before {\n content: "\\F0C9";\n}\n.fa-list-ul:before {\n content: "\\F0CA";\n}\n.fa-list-ol:before {\n content: "\\F0CB";\n}\n.fa-strikethrough:before {\n content: "\\F0CC";\n}\n.fa-underline:before {\n content: "\\F0CD";\n}\n.fa-table:before {\n content: "\\F0CE";\n}\n.fa-magic:before {\n content: "\\F0D0";\n}\n.fa-truck:before {\n content: "\\F0D1";\n}\n.fa-pinterest:before {\n content: "\\F0D2";\n}\n.fa-pinterest-square:before {\n content: "\\F0D3";\n}\n.fa-google-plus-square:before {\n content: "\\F0D4";\n}\n.fa-google-plus:before {\n content: "\\F0D5";\n}\n.fa-money:before {\n content: "\\F0D6";\n}\n.fa-caret-down:before {\n content: "\\F0D7";\n}\n.fa-caret-up:before {\n content: "\\F0D8";\n}\n.fa-caret-left:before {\n content: "\\F0D9";\n}\n.fa-caret-right:before {\n content: "\\F0DA";\n}\n.fa-columns:before {\n content: "\\F0DB";\n}\n.fa-unsorted:before,\n.fa-sort:before {\n content: "\\F0DC";\n}\n.fa-sort-down:before,\n.fa-sort-desc:before {\n content: "\\F0DD";\n}\n.fa-sort-up:before,\n.fa-sort-asc:before {\n content: "\\F0DE";\n}\n.fa-envelope:before {\n content: "\\F0E0";\n}\n.fa-linkedin:before {\n content: "\\F0E1";\n}\n.fa-rotate-left:before,\n.fa-undo:before {\n content: "\\F0E2";\n}\n.fa-legal:before,\n.fa-gavel:before {\n content: "\\F0E3";\n}\n.fa-dashboard:before,\n.fa-tachometer:before {\n content: "\\F0E4";\n}\n.fa-comment-o:before {\n content: "\\F0E5";\n}\n.fa-comments-o:before {\n content: "\\F0E6";\n}\n.fa-flash:before,\n.fa-bolt:before {\n content: "\\F0E7";\n}\n.fa-sitemap:before {\n content: "\\F0E8";\n}\n.fa-umbrella:before {\n content: "\\F0E9";\n}\n.fa-paste:before,\n.fa-clipboard:before {\n content: "\\F0EA";\n}\n.fa-lightbulb-o:before {\n content: "\\F0EB";\n}\n.fa-exchange:before {\n content: "\\F0EC";\n}\n.fa-cloud-download:before {\n content: "\\F0ED";\n}\n.fa-cloud-upload:before {\n content: "\\F0EE";\n}\n.fa-user-md:before {\n content: "\\F0F0";\n}\n.fa-stethoscope:before {\n content: "\\F0F1";\n}\n.fa-suitcase:before {\n content: "\\F0F2";\n}\n.fa-bell-o:before {\n content: "\\F0A2";\n}\n.fa-coffee:before {\n content: "\\F0F4";\n}\n.fa-cutlery:before {\n content: "\\F0F5";\n}\n.fa-file-text-o:before {\n content: "\\F0F6";\n}\n.fa-building-o:before {\n content: "\\F0F7";\n}\n.fa-hospital-o:before {\n content: "\\F0F8";\n}\n.fa-ambulance:before {\n content: "\\F0F9";\n}\n.fa-medkit:before {\n content: "\\F0FA";\n}\n.fa-fighter-jet:before {\n content: "\\F0FB";\n}\n.fa-beer:before {\n content: "\\F0FC";\n}\n.fa-h-square:before {\n content: "\\F0FD";\n}\n.fa-plus-square:before {\n content: "\\F0FE";\n}\n.fa-angle-double-left:before {\n content: "\\F100";\n}\n.fa-angle-double-right:before {\n content: "\\F101";\n}\n.fa-angle-double-up:before {\n content: "\\F102";\n}\n.fa-angle-double-down:before {\n content: "\\F103";\n}\n.fa-angle-left:before {\n content: "\\F104";\n}\n.fa-angle-right:before {\n content: "\\F105";\n}\n.fa-angle-up:before {\n content: "\\F106";\n}\n.fa-angle-down:before {\n content: "\\F107";\n}\n.fa-desktop:before {\n content: "\\F108";\n}\n.fa-laptop:before {\n content: "\\F109";\n}\n.fa-tablet:before {\n content: "\\F10A";\n}\n.fa-mobile-phone:before,\n.fa-mobile:before {\n content: "\\F10B";\n}\n.fa-circle-o:before {\n content: "\\F10C";\n}\n.fa-quote-left:before {\n content: "\\F10D";\n}\n.fa-quote-right:before {\n content: "\\F10E";\n}\n.fa-spinner:before {\n content: "\\F110";\n}\n.fa-circle:before {\n content: "\\F111";\n}\n.fa-mail-reply:before,\n.fa-reply:before {\n content: "\\F112";\n}\n.fa-github-alt:before {\n content: "\\F113";\n}\n.fa-folder-o:before {\n content: "\\F114";\n}\n.fa-folder-open-o:before {\n content: "\\F115";\n}\n.fa-smile-o:before {\n content: "\\F118";\n}\n.fa-frown-o:before {\n content: "\\F119";\n}\n.fa-meh-o:before {\n content: "\\F11A";\n}\n.fa-gamepad:before {\n content: "\\F11B";\n}\n.fa-keyboard-o:before {\n content: "\\F11C";\n}\n.fa-flag-o:before {\n content: "\\F11D";\n}\n.fa-flag-checkered:before {\n content: "\\F11E";\n}\n.fa-terminal:before {\n content: "\\F120";\n}\n.fa-code:before {\n content: "\\F121";\n}\n.fa-mail-reply-all:before,\n.fa-reply-all:before {\n content: "\\F122";\n}\n.fa-star-half-empty:before,\n.fa-star-half-full:before,\n.fa-star-half-o:before {\n content: "\\F123";\n}\n.fa-location-arrow:before {\n content: "\\F124";\n}\n.fa-crop:before {\n content: "\\F125";\n}\n.fa-code-fork:before {\n content: "\\F126";\n}\n.fa-unlink:before,\n.fa-chain-broken:before {\n content: "\\F127";\n}\n.fa-question:before {\n content: "\\F128";\n}\n.fa-info:before {\n content: "\\F129";\n}\n.fa-exclamation:before {\n content: "\\F12A";\n}\n.fa-superscript:before {\n content: "\\F12B";\n}\n.fa-subscript:before {\n content: "\\F12C";\n}\n.fa-eraser:before {\n content: "\\F12D";\n}\n.fa-puzzle-piece:before {\n content: "\\F12E";\n}\n.fa-microphone:before {\n content: "\\F130";\n}\n.fa-microphone-slash:before {\n content: "\\F131";\n}\n.fa-shield:before {\n content: "\\F132";\n}\n.fa-calendar-o:before {\n content: "\\F133";\n}\n.fa-fire-extinguisher:before {\n content: "\\F134";\n}\n.fa-rocket:before {\n content: "\\F135";\n}\n.fa-maxcdn:before {\n content: "\\F136";\n}\n.fa-chevron-circle-left:before {\n content: "\\F137";\n}\n.fa-chevron-circle-right:before {\n content: "\\F138";\n}\n.fa-chevron-circle-up:before {\n content: "\\F139";\n}\n.fa-chevron-circle-down:before {\n content: "\\F13A";\n}\n.fa-html5:before {\n content: "\\F13B";\n}\n.fa-css3:before {\n content: "\\F13C";\n}\n.fa-anchor:before {\n content: "\\F13D";\n}\n.fa-unlock-alt:before {\n content: "\\F13E";\n}\n.fa-bullseye:before {\n content: "\\F140";\n}\n.fa-ellipsis-h:before {\n content: "\\F141";\n}\n.fa-ellipsis-v:before {\n content: "\\F142";\n}\n.fa-rss-square:before {\n content: "\\F143";\n}\n.fa-play-circle:before {\n content: "\\F144";\n}\n.fa-ticket:before {\n content: "\\F145";\n}\n.fa-minus-square:before {\n content: "\\F146";\n}\n.fa-minus-square-o:before {\n content: "\\F147";\n}\n.fa-level-up:before {\n content: "\\F148";\n}\n.fa-level-down:before {\n content: "\\F149";\n}\n.fa-check-square:before {\n content: "\\F14A";\n}\n.fa-pencil-square:before {\n content: "\\F14B";\n}\n.fa-external-link-square:before {\n content: "\\F14C";\n}\n.fa-share-square:before {\n content: "\\F14D";\n}\n.fa-compass:before {\n content: "\\F14E";\n}\n.fa-toggle-down:before,\n.fa-caret-square-o-down:before {\n content: "\\F150";\n}\n.fa-toggle-up:before,\n.fa-caret-square-o-up:before {\n content: "\\F151";\n}\n.fa-toggle-right:before,\n.fa-caret-square-o-right:before {\n content: "\\F152";\n}\n.fa-euro:before,\n.fa-eur:before {\n content: "\\F153";\n}\n.fa-gbp:before {\n content: "\\F154";\n}\n.fa-dollar:before,\n.fa-usd:before {\n content: "\\F155";\n}\n.fa-rupee:before,\n.fa-inr:before {\n content: "\\F156";\n}\n.fa-cny:before,\n.fa-rmb:before,\n.fa-yen:before,\n.fa-jpy:before {\n content: "\\F157";\n}\n.fa-ruble:before,\n.fa-rouble:before,\n.fa-rub:before {\n content: "\\F158";\n}\n.fa-won:before,\n.fa-krw:before {\n content: "\\F159";\n}\n.fa-bitcoin:before,\n.fa-btc:before {\n content: "\\F15A";\n}\n.fa-file:before {\n content: "\\F15B";\n}\n.fa-file-text:before {\n content: "\\F15C";\n}\n.fa-sort-alpha-asc:before {\n content: "\\F15D";\n}\n.fa-sort-alpha-desc:before {\n content: "\\F15E";\n}\n.fa-sort-amount-asc:before {\n content: "\\F160";\n}\n.fa-sort-amount-desc:before {\n content: "\\F161";\n}\n.fa-sort-numeric-asc:before {\n content: "\\F162";\n}\n.fa-sort-numeric-desc:before {\n content: "\\F163";\n}\n.fa-thumbs-up:before {\n content: "\\F164";\n}\n.fa-thumbs-down:before {\n content: "\\F165";\n}\n.fa-youtube-square:before {\n content: "\\F166";\n}\n.fa-youtube:before {\n content: "\\F167";\n}\n.fa-xing:before {\n content: "\\F168";\n}\n.fa-xing-square:before {\n content: "\\F169";\n}\n.fa-youtube-play:before {\n content: "\\F16A";\n}\n.fa-dropbox:before {\n content: "\\F16B";\n}\n.fa-stack-overflow:before {\n content: "\\F16C";\n}\n.fa-instagram:before {\n content: "\\F16D";\n}\n.fa-flickr:before {\n content: "\\F16E";\n}\n.fa-adn:before {\n content: "\\F170";\n}\n.fa-bitbucket:before {\n content: "\\F171";\n}\n.fa-bitbucket-square:before {\n content: "\\F172";\n}\n.fa-tumblr:before {\n content: "\\F173";\n}\n.fa-tumblr-square:before {\n content: "\\F174";\n}\n.fa-long-arrow-down:before {\n content: "\\F175";\n}\n.fa-long-arrow-up:before {\n content: "\\F176";\n}\n.fa-long-arrow-left:before {\n content: "\\F177";\n}\n.fa-long-arrow-right:before {\n content: "\\F178";\n}\n.fa-apple:before {\n content: "\\F179";\n}\n.fa-windows:before {\n content: "\\F17A";\n}\n.fa-android:before {\n content: "\\F17B";\n}\n.fa-linux:before {\n content: "\\F17C";\n}\n.fa-dribbble:before {\n content: "\\F17D";\n}\n.fa-skype:before {\n content: "\\F17E";\n}\n.fa-foursquare:before {\n content: "\\F180";\n}\n.fa-trello:before {\n content: "\\F181";\n}\n.fa-female:before {\n content: "\\F182";\n}\n.fa-male:before {\n content: "\\F183";\n}\n.fa-gittip:before,\n.fa-gratipay:before {\n content: "\\F184";\n}\n.fa-sun-o:before {\n content: "\\F185";\n}\n.fa-moon-o:before {\n content: "\\F186";\n}\n.fa-archive:before {\n content: "\\F187";\n}\n.fa-bug:before {\n content: "\\F188";\n}\n.fa-vk:before {\n content: "\\F189";\n}\n.fa-weibo:before {\n content: "\\F18A";\n}\n.fa-renren:before {\n content: "\\F18B";\n}\n.fa-pagelines:before {\n content: "\\F18C";\n}\n.fa-stack-exchange:before {\n content: "\\F18D";\n}\n.fa-arrow-circle-o-right:before {\n content: "\\F18E";\n}\n.fa-arrow-circle-o-left:before {\n content: "\\F190";\n}\n.fa-toggle-left:before,\n.fa-caret-square-o-left:before {\n content: "\\F191";\n}\n.fa-dot-circle-o:before {\n content: "\\F192";\n}\n.fa-wheelchair:before {\n content: "\\F193";\n}\n.fa-vimeo-square:before {\n content: "\\F194";\n}\n.fa-turkish-lira:before,\n.fa-try:before {\n content: "\\F195";\n}\n.fa-plus-square-o:before {\n content: "\\F196";\n}\n.fa-space-shuttle:before {\n content: "\\F197";\n}\n.fa-slack:before {\n content: "\\F198";\n}\n.fa-envelope-square:before {\n content: "\\F199";\n}\n.fa-wordpress:before {\n content: "\\F19A";\n}\n.fa-openid:before {\n content: "\\F19B";\n}\n.fa-institution:before,\n.fa-bank:before,\n.fa-university:before {\n content: "\\F19C";\n}\n.fa-mortar-board:before,\n.fa-graduation-cap:before {\n content: "\\F19D";\n}\n.fa-yahoo:before {\n content: "\\F19E";\n}\n.fa-google:before {\n content: "\\F1A0";\n}\n.fa-reddit:before {\n content: "\\F1A1";\n}\n.fa-reddit-square:before {\n content: "\\F1A2";\n}\n.fa-stumbleupon-circle:before {\n content: "\\F1A3";\n}\n.fa-stumbleupon:before {\n content: "\\F1A4";\n}\n.fa-delicious:before {\n content: "\\F1A5";\n}\n.fa-digg:before {\n content: "\\F1A6";\n}\n.fa-pied-piper-pp:before {\n content: "\\F1A7";\n}\n.fa-pied-piper-alt:before {\n content: "\\F1A8";\n}\n.fa-drupal:before {\n content: "\\F1A9";\n}\n.fa-joomla:before {\n content: "\\F1AA";\n}\n.fa-language:before {\n content: "\\F1AB";\n}\n.fa-fax:before {\n content: "\\F1AC";\n}\n.fa-building:before {\n content: "\\F1AD";\n}\n.fa-child:before {\n content: "\\F1AE";\n}\n.fa-paw:before {\n content: "\\F1B0";\n}\n.fa-spoon:before {\n content: "\\F1B1";\n}\n.fa-cube:before {\n content: "\\F1B2";\n}\n.fa-cubes:before {\n content: "\\F1B3";\n}\n.fa-behance:before {\n content: "\\F1B4";\n}\n.fa-behance-square:before {\n content: "\\F1B5";\n}\n.fa-steam:before {\n content: "\\F1B6";\n}\n.fa-steam-square:before {\n content: "\\F1B7";\n}\n.fa-recycle:before {\n content: "\\F1B8";\n}\n.fa-automobile:before,\n.fa-car:before {\n content: "\\F1B9";\n}\n.fa-cab:before,\n.fa-taxi:before {\n content: "\\F1BA";\n}\n.fa-tree:before {\n content: "\\F1BB";\n}\n.fa-spotify:before {\n content: "\\F1BC";\n}\n.fa-deviantart:before {\n content: "\\F1BD";\n}\n.fa-soundcloud:before {\n content: "\\F1BE";\n}\n.fa-database:before {\n content: "\\F1C0";\n}\n.fa-file-pdf-o:before {\n content: "\\F1C1";\n}\n.fa-file-word-o:before {\n content: "\\F1C2";\n}\n.fa-file-excel-o:before {\n content: "\\F1C3";\n}\n.fa-file-powerpoint-o:before {\n content: "\\F1C4";\n}\n.fa-file-photo-o:before,\n.fa-file-picture-o:before,\n.fa-file-image-o:before {\n content: "\\F1C5";\n}\n.fa-file-zip-o:before,\n.fa-file-archive-o:before {\n content: "\\F1C6";\n}\n.fa-file-sound-o:before,\n.fa-file-audio-o:before {\n content: "\\F1C7";\n}\n.fa-file-movie-o:before,\n.fa-file-video-o:before {\n content: "\\F1C8";\n}\n.fa-file-code-o:before {\n content: "\\F1C9";\n}\n.fa-vine:before {\n content: "\\F1CA";\n}\n.fa-codepen:before {\n content: "\\F1CB";\n}\n.fa-jsfiddle:before {\n content: "\\F1CC";\n}\n.fa-life-bouy:before,\n.fa-life-buoy:before,\n.fa-life-saver:before,\n.fa-support:before,\n.fa-life-ring:before {\n content: "\\F1CD";\n}\n.fa-circle-o-notch:before {\n content: "\\F1CE";\n}\n.fa-ra:before,\n.fa-resistance:before,\n.fa-rebel:before {\n content: "\\F1D0";\n}\n.fa-ge:before,\n.fa-empire:before {\n content: "\\F1D1";\n}\n.fa-git-square:before {\n content: "\\F1D2";\n}\n.fa-git:before {\n content: "\\F1D3";\n}\n.fa-y-combinator-square:before,\n.fa-yc-square:before,\n.fa-hacker-news:before {\n content: "\\F1D4";\n}\n.fa-tencent-weibo:before {\n content: "\\F1D5";\n}\n.fa-qq:before {\n content: "\\F1D6";\n}\n.fa-wechat:before,\n.fa-weixin:before {\n content: "\\F1D7";\n}\n.fa-send:before,\n.fa-paper-plane:before {\n content: "\\F1D8";\n}\n.fa-send-o:before,\n.fa-paper-plane-o:before {\n content: "\\F1D9";\n}\n.fa-history:before {\n content: "\\F1DA";\n}\n.fa-circle-thin:before {\n content: "\\F1DB";\n}\n.fa-header:before {\n content: "\\F1DC";\n}\n.fa-paragraph:before {\n content: "\\F1DD";\n}\n.fa-sliders:before {\n content: "\\F1DE";\n}\n.fa-share-alt:before {\n content: "\\F1E0";\n}\n.fa-share-alt-square:before {\n content: "\\F1E1";\n}\n.fa-bomb:before {\n content: "\\F1E2";\n}\n.fa-soccer-ball-o:before,\n.fa-futbol-o:before {\n content: "\\F1E3";\n}\n.fa-tty:before {\n content: "\\F1E4";\n}\n.fa-binoculars:before {\n content: "\\F1E5";\n}\n.fa-plug:before {\n content: "\\F1E6";\n}\n.fa-slideshare:before {\n content: "\\F1E7";\n}\n.fa-twitch:before {\n content: "\\F1E8";\n}\n.fa-yelp:before {\n content: "\\F1E9";\n}\n.fa-newspaper-o:before {\n content: "\\F1EA";\n}\n.fa-wifi:before {\n content: "\\F1EB";\n}\n.fa-calculator:before {\n content: "\\F1EC";\n}\n.fa-paypal:before {\n content: "\\F1ED";\n}\n.fa-google-wallet:before {\n content: "\\F1EE";\n}\n.fa-cc-visa:before {\n content: "\\F1F0";\n}\n.fa-cc-mastercard:before {\n content: "\\F1F1";\n}\n.fa-cc-discover:before {\n content: "\\F1F2";\n}\n.fa-cc-amex:before {\n content: "\\F1F3";\n}\n.fa-cc-paypal:before {\n content: "\\F1F4";\n}\n.fa-cc-stripe:before {\n content: "\\F1F5";\n}\n.fa-bell-slash:before {\n content: "\\F1F6";\n}\n.fa-bell-slash-o:before {\n content: "\\F1F7";\n}\n.fa-trash:before {\n content: "\\F1F8";\n}\n.fa-copyright:before {\n content: "\\F1F9";\n}\n.fa-at:before {\n content: "\\F1FA";\n}\n.fa-eyedropper:before {\n content: "\\F1FB";\n}\n.fa-paint-brush:before {\n content: "\\F1FC";\n}\n.fa-birthday-cake:before {\n content: "\\F1FD";\n}\n.fa-area-chart:before {\n content: "\\F1FE";\n}\n.fa-pie-chart:before {\n content: "\\F200";\n}\n.fa-line-chart:before {\n content: "\\F201";\n}\n.fa-lastfm:before {\n content: "\\F202";\n}\n.fa-lastfm-square:before {\n content: "\\F203";\n}\n.fa-toggle-off:before {\n content: "\\F204";\n}\n.fa-toggle-on:before {\n content: "\\F205";\n}\n.fa-bicycle:before {\n content: "\\F206";\n}\n.fa-bus:before {\n content: "\\F207";\n}\n.fa-ioxhost:before {\n content: "\\F208";\n}\n.fa-angellist:before {\n content: "\\F209";\n}\n.fa-cc:before {\n content: "\\F20A";\n}\n.fa-shekel:before,\n.fa-sheqel:before,\n.fa-ils:before {\n content: "\\F20B";\n}\n.fa-meanpath:before {\n content: "\\F20C";\n}\n.fa-buysellads:before {\n content: "\\F20D";\n}\n.fa-connectdevelop:before {\n content: "\\F20E";\n}\n.fa-dashcube:before {\n content: "\\F210";\n}\n.fa-forumbee:before {\n content: "\\F211";\n}\n.fa-leanpub:before {\n content: "\\F212";\n}\n.fa-sellsy:before {\n content: "\\F213";\n}\n.fa-shirtsinbulk:before {\n content: "\\F214";\n}\n.fa-simplybuilt:before {\n content: "\\F215";\n}\n.fa-skyatlas:before {\n content: "\\F216";\n}\n.fa-cart-plus:before {\n content: "\\F217";\n}\n.fa-cart-arrow-down:before {\n content: "\\F218";\n}\n.fa-diamond:before {\n content: "\\F219";\n}\n.fa-ship:before {\n content: "\\F21A";\n}\n.fa-user-secret:before {\n content: "\\F21B";\n}\n.fa-motorcycle:before {\n content: "\\F21C";\n}\n.fa-street-view:before {\n content: "\\F21D";\n}\n.fa-heartbeat:before {\n content: "\\F21E";\n}\n.fa-venus:before {\n content: "\\F221";\n}\n.fa-mars:before {\n content: "\\F222";\n}\n.fa-mercury:before {\n content: "\\F223";\n}\n.fa-intersex:before,\n.fa-transgender:before {\n content: "\\F224";\n}\n.fa-transgender-alt:before {\n content: "\\F225";\n}\n.fa-venus-double:before {\n content: "\\F226";\n}\n.fa-mars-double:before {\n content: "\\F227";\n}\n.fa-venus-mars:before {\n content: "\\F228";\n}\n.fa-mars-stroke:before {\n content: "\\F229";\n}\n.fa-mars-stroke-v:before {\n content: "\\F22A";\n}\n.fa-mars-stroke-h:before {\n content: "\\F22B";\n}\n.fa-neuter:before {\n content: "\\F22C";\n}\n.fa-genderless:before {\n content: "\\F22D";\n}\n.fa-facebook-official:before {\n content: "\\F230";\n}\n.fa-pinterest-p:before {\n content: "\\F231";\n}\n.fa-whatsapp:before {\n content: "\\F232";\n}\n.fa-server:before {\n content: "\\F233";\n}\n.fa-user-plus:before {\n content: "\\F234";\n}\n.fa-user-times:before {\n content: "\\F235";\n}\n.fa-hotel:before,\n.fa-bed:before {\n content: "\\F236";\n}\n.fa-viacoin:before {\n content: "\\F237";\n}\n.fa-train:before {\n content: "\\F238";\n}\n.fa-subway:before {\n content: "\\F239";\n}\n.fa-medium:before {\n content: "\\F23A";\n}\n.fa-yc:before,\n.fa-y-combinator:before {\n content: "\\F23B";\n}\n.fa-optin-monster:before {\n content: "\\F23C";\n}\n.fa-opencart:before {\n content: "\\F23D";\n}\n.fa-expeditedssl:before {\n content: "\\F23E";\n}\n.fa-battery-4:before,\n.fa-battery:before,\n.fa-battery-full:before {\n content: "\\F240";\n}\n.fa-battery-3:before,\n.fa-battery-three-quarters:before {\n content: "\\F241";\n}\n.fa-battery-2:before,\n.fa-battery-half:before {\n content: "\\F242";\n}\n.fa-battery-1:before,\n.fa-battery-quarter:before {\n content: "\\F243";\n}\n.fa-battery-0:before,\n.fa-battery-empty:before {\n content: "\\F244";\n}\n.fa-mouse-pointer:before {\n content: "\\F245";\n}\n.fa-i-cursor:before {\n content: "\\F246";\n}\n.fa-object-group:before {\n content: "\\F247";\n}\n.fa-object-ungroup:before {\n content: "\\F248";\n}\n.fa-sticky-note:before {\n content: "\\F249";\n}\n.fa-sticky-note-o:before {\n content: "\\F24A";\n}\n.fa-cc-jcb:before {\n content: "\\F24B";\n}\n.fa-cc-diners-club:before {\n content: "\\F24C";\n}\n.fa-clone:before {\n content: "\\F24D";\n}\n.fa-balance-scale:before {\n content: "\\F24E";\n}\n.fa-hourglass-o:before {\n content: "\\F250";\n}\n.fa-hourglass-1:before,\n.fa-hourglass-start:before {\n content: "\\F251";\n}\n.fa-hourglass-2:before,\n.fa-hourglass-half:before {\n content: "\\F252";\n}\n.fa-hourglass-3:before,\n.fa-hourglass-end:before {\n content: "\\F253";\n}\n.fa-hourglass:before {\n content: "\\F254";\n}\n.fa-hand-grab-o:before,\n.fa-hand-rock-o:before {\n content: "\\F255";\n}\n.fa-hand-stop-o:before,\n.fa-hand-paper-o:before {\n content: "\\F256";\n}\n.fa-hand-scissors-o:before {\n content: "\\F257";\n}\n.fa-hand-lizard-o:before {\n content: "\\F258";\n}\n.fa-hand-spock-o:before {\n content: "\\F259";\n}\n.fa-hand-pointer-o:before {\n content: "\\F25A";\n}\n.fa-hand-peace-o:before {\n content: "\\F25B";\n}\n.fa-trademark:before {\n content: "\\F25C";\n}\n.fa-registered:before {\n content: "\\F25D";\n}\n.fa-creative-commons:before {\n content: "\\F25E";\n}\n.fa-gg:before {\n content: "\\F260";\n}\n.fa-gg-circle:before {\n content: "\\F261";\n}\n.fa-tripadvisor:before {\n content: "\\F262";\n}\n.fa-odnoklassniki:before {\n content: "\\F263";\n}\n.fa-odnoklassniki-square:before {\n content: "\\F264";\n}\n.fa-get-pocket:before {\n content: "\\F265";\n}\n.fa-wikipedia-w:before {\n content: "\\F266";\n}\n.fa-safari:before {\n content: "\\F267";\n}\n.fa-chrome:before {\n content: "\\F268";\n}\n.fa-firefox:before {\n content: "\\F269";\n}\n.fa-opera:before {\n content: "\\F26A";\n}\n.fa-internet-explorer:before {\n content: "\\F26B";\n}\n.fa-tv:before,\n.fa-television:before {\n content: "\\F26C";\n}\n.fa-contao:before {\n content: "\\F26D";\n}\n.fa-500px:before {\n content: "\\F26E";\n}\n.fa-amazon:before {\n content: "\\F270";\n}\n.fa-calendar-plus-o:before {\n content: "\\F271";\n}\n.fa-calendar-minus-o:before {\n content: "\\F272";\n}\n.fa-calendar-times-o:before {\n content: "\\F273";\n}\n.fa-calendar-check-o:before {\n content: "\\F274";\n}\n.fa-industry:before {\n content: "\\F275";\n}\n.fa-map-pin:before {\n content: "\\F276";\n}\n.fa-map-signs:before {\n content: "\\F277";\n}\n.fa-map-o:before {\n content: "\\F278";\n}\n.fa-map:before {\n content: "\\F279";\n}\n.fa-commenting:before {\n content: "\\F27A";\n}\n.fa-commenting-o:before {\n content: "\\F27B";\n}\n.fa-houzz:before {\n content: "\\F27C";\n}\n.fa-vimeo:before {\n content: "\\F27D";\n}\n.fa-black-tie:before {\n content: "\\F27E";\n}\n.fa-fonticons:before {\n content: "\\F280";\n}\n.fa-reddit-alien:before {\n content: "\\F281";\n}\n.fa-edge:before {\n content: "\\F282";\n}\n.fa-credit-card-alt:before {\n content: "\\F283";\n}\n.fa-codiepie:before {\n content: "\\F284";\n}\n.fa-modx:before {\n content: "\\F285";\n}\n.fa-fort-awesome:before {\n content: "\\F286";\n}\n.fa-usb:before {\n content: "\\F287";\n}\n.fa-product-hunt:before {\n content: "\\F288";\n}\n.fa-mixcloud:before {\n content: "\\F289";\n}\n.fa-scribd:before {\n content: "\\F28A";\n}\n.fa-pause-circle:before {\n content: "\\F28B";\n}\n.fa-pause-circle-o:before {\n content: "\\F28C";\n}\n.fa-stop-circle:before {\n content: "\\F28D";\n}\n.fa-stop-circle-o:before {\n content: "\\F28E";\n}\n.fa-shopping-bag:before {\n content: "\\F290";\n}\n.fa-shopping-basket:before {\n content: "\\F291";\n}\n.fa-hashtag:before {\n content: "\\F292";\n}\n.fa-bluetooth:before {\n content: "\\F293";\n}\n.fa-bluetooth-b:before {\n content: "\\F294";\n}\n.fa-percent:before {\n content: "\\F295";\n}\n.fa-gitlab:before {\n content: "\\F296";\n}\n.fa-wpbeginner:before {\n content: "\\F297";\n}\n.fa-wpforms:before {\n content: "\\F298";\n}\n.fa-envira:before {\n content: "\\F299";\n}\n.fa-universal-access:before {\n content: "\\F29A";\n}\n.fa-wheelchair-alt:before {\n content: "\\F29B";\n}\n.fa-question-circle-o:before {\n content: "\\F29C";\n}\n.fa-blind:before {\n content: "\\F29D";\n}\n.fa-audio-description:before {\n content: "\\F29E";\n}\n.fa-volume-control-phone:before {\n content: "\\F2A0";\n}\n.fa-braille:before {\n content: "\\F2A1";\n}\n.fa-assistive-listening-systems:before {\n content: "\\F2A2";\n}\n.fa-asl-interpreting:before,\n.fa-american-sign-language-interpreting:before {\n content: "\\F2A3";\n}\n.fa-deafness:before,\n.fa-hard-of-hearing:before,\n.fa-deaf:before {\n content: "\\F2A4";\n}\n.fa-glide:before {\n content: "\\F2A5";\n}\n.fa-glide-g:before {\n content: "\\F2A6";\n}\n.fa-signing:before,\n.fa-sign-language:before {\n content: "\\F2A7";\n}\n.fa-low-vision:before {\n content: "\\F2A8";\n}\n.fa-viadeo:before {\n content: "\\F2A9";\n}\n.fa-viadeo-square:before {\n content: "\\F2AA";\n}\n.fa-snapchat:before {\n content: "\\F2AB";\n}\n.fa-snapchat-ghost:before {\n content: "\\F2AC";\n}\n.fa-snapchat-square:before {\n content: "\\F2AD";\n}\n.fa-pied-piper:before {\n content: "\\F2AE";\n}\n.fa-first-order:before {\n content: "\\F2B0";\n}\n.fa-yoast:before {\n content: "\\F2B1";\n}\n.fa-themeisle:before {\n content: "\\F2B2";\n}\n.fa-google-plus-circle:before,\n.fa-google-plus-official:before {\n content: "\\F2B3";\n}\n.fa-fa:before,\n.fa-font-awesome:before {\n content: "\\F2B4";\n}\n.fa-handshake-o:before {\n content: "\\F2B5";\n}\n.fa-envelope-open:before {\n content: "\\F2B6";\n}\n.fa-envelope-open-o:before {\n content: "\\F2B7";\n}\n.fa-linode:before {\n content: "\\F2B8";\n}\n.fa-address-book:before {\n content: "\\F2B9";\n}\n.fa-address-book-o:before {\n content: "\\F2BA";\n}\n.fa-vcard:before,\n.fa-address-card:before {\n content: "\\F2BB";\n}\n.fa-vcard-o:before,\n.fa-address-card-o:before {\n content: "\\F2BC";\n}\n.fa-user-circle:before {\n content: "\\F2BD";\n}\n.fa-user-circle-o:before {\n content: "\\F2BE";\n}\n.fa-user-o:before {\n content: "\\F2C0";\n}\n.fa-id-badge:before {\n content: "\\F2C1";\n}\n.fa-drivers-license:before,\n.fa-id-card:before {\n content: "\\F2C2";\n}\n.fa-drivers-license-o:before,\n.fa-id-card-o:before {\n content: "\\F2C3";\n}\n.fa-quora:before {\n content: "\\F2C4";\n}\n.fa-free-code-camp:before {\n content: "\\F2C5";\n}\n.fa-telegram:before {\n content: "\\F2C6";\n}\n.fa-thermometer-4:before,\n.fa-thermometer:before,\n.fa-thermometer-full:before {\n content: "\\F2C7";\n}\n.fa-thermometer-3:before,\n.fa-thermometer-three-quarters:before {\n content: "\\F2C8";\n}\n.fa-thermometer-2:before,\n.fa-thermometer-half:before {\n content: "\\F2C9";\n}\n.fa-thermometer-1:before,\n.fa-thermometer-quarter:before {\n content: "\\F2CA";\n}\n.fa-thermometer-0:before,\n.fa-thermometer-empty:before {\n content: "\\F2CB";\n}\n.fa-shower:before {\n content: "\\F2CC";\n}\n.fa-bathtub:before,\n.fa-s15:before,\n.fa-bath:before {\n content: "\\F2CD";\n}\n.fa-podcast:before {\n content: "\\F2CE";\n}\n.fa-window-maximize:before {\n content: "\\F2D0";\n}\n.fa-window-minimize:before {\n content: "\\F2D1";\n}\n.fa-window-restore:before {\n content: "\\F2D2";\n}\n.fa-times-rectangle:before,\n.fa-window-close:before {\n content: "\\F2D3";\n}\n.fa-times-rectangle-o:before,\n.fa-window-close-o:before {\n content: "\\F2D4";\n}\n.fa-bandcamp:before {\n content: "\\F2D5";\n}\n.fa-grav:before {\n content: "\\F2D6";\n}\n.fa-etsy:before {\n content: "\\F2D7";\n}\n.fa-imdb:before {\n content: "\\F2D8";\n}\n.fa-ravelry:before {\n content: "\\F2D9";\n}\n.fa-eercast:before {\n content: "\\F2DA";\n}\n.fa-microchip:before {\n content: "\\F2DB";\n}\n.fa-snowflake-o:before {\n content: "\\F2DC";\n}\n.fa-superpowers:before {\n content: "\\F2DD";\n}\n.fa-wpexplorer:before {\n content: "\\F2DE";\n}\n.fa-meetup:before {\n content: "\\F2E0";\n}\n.sr-only {\n position: absolute;\n width: 1px;\n height: 1px;\n padding: 0;\n margin: -1px;\n overflow: hidden;\n clip: rect(0, 0, 0, 0);\n border: 0;\n}\n.sr-only-focusable:active,\n.sr-only-focusable:focus {\n position: static;\n width: auto;\n height: auto;\n margin: 0;\n overflow: visible;\n clip: auto;\n}\n', "" ]);
}, function(module, exports) {
module.exports = function(url) {
return "string" != typeof url ? url : (/^['"].*['"]$/.test(url) && (url = url.slice(1, -1)),
@@ -30827,7 +30924,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
};
}(function(target) {
return document.querySelector(target);
- }), singleton = null, singletonCounter = 0, stylesInsertedAtTop = [], fixUrls = __webpack_require__(504);
+ }), singleton = null, singletonCounter = 0, stylesInsertedAtTop = [], fixUrls = __webpack_require__(508);
module.exports = function(list, options) {
if ("undefined" != typeof DEBUG && DEBUG && "object" != typeof document) throw new Error("The style-loader cannot be used in a non-browser environment");
options = options || {}, options.attrs = "object" == typeof options.attrs ? options.attrs : {},
@@ -31079,7 +31176,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
return protoProps && defineProperties(Constructor.prototype, protoProps), staticProps && defineProperties(Constructor, staticProps),
Constructor;
};
- }(), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _withStyles = __webpack_require__(10), _withStyles2 = _interopRequireDefault(_withStyles), _common = __webpack_require__(61), _Footer = __webpack_require__(508), _Footer2 = _interopRequireDefault(_Footer), styles = {
+ }(), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _withStyles = __webpack_require__(10), _withStyles2 = _interopRequireDefault(_withStyles), _common = __webpack_require__(77), _Footer = __webpack_require__(512), _Footer2 = _interopRequireDefault(_Footer), styles = {
wrapper: {
display: "flex",
flexDirection: "column",
@@ -31126,7 +31223,8 @@ var _bundleJs = []byte((((((((((`!function(modules) {
className: classes.content,
style: styles.content
}, children), _react2.default.createElement(_Footer2.default, {
- content: content,
+ general: content.general,
+ system: content.system,
shouldUpdate: shouldUpdate
}));
}
@@ -31140,12 +31238,13 @@ var _bundleJs = []byte((((((((((`!function(modules) {
default: obj
};
}
- function _toConsumableArray(arr) {
- if (Array.isArray(arr)) {
- for (var i = 0, arr2 = Array(arr.length); i < arr.length; i++) arr2[i] = arr[i];
- return arr2;
- }
- return Array.from(arr);
+ function _defineProperty(obj, key, value) {
+ return key in obj ? Object.defineProperty(obj, key, {
+ value: value,
+ enumerable: !0,
+ configurable: !0,
+ writable: !0
+ }) : obj[key] = value, obj;
}
function _classCallCheck(instance, Constructor) {
if (!(instance instanceof Constructor)) throw new TypeError("Cannot call a class as a function");
@@ -31168,7 +31267,13 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
});
- var _createClass = function() {
+ var _extends = Object.assign || function(target) {
+ for (var i = 1; i < arguments.length; i++) {
+ var source = arguments[i];
+ for (var key in source) Object.prototype.hasOwnProperty.call(source, key) && (target[key] = source[key]);
+ }
+ return target;
+ }, _createClass = function() {
function defineProperties(target, props) {
for (var i = 0; i < props.length; i++) {
var descriptor = props[i];
@@ -31180,7 +31285,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
return protoProps && defineProperties(Constructor.prototype, protoProps), staticProps && defineProperties(Constructor, staticProps),
Constructor;
};
- }(), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _withStyles = __webpack_require__(10), _withStyles2 = _interopRequireDefault(_withStyles), _Typography = __webpack_require__(109), _Typography2 = _interopRequireDefault(_Typography), _Grid = __webpack_require__(241), _Grid2 = _interopRequireDefault(_Grid), _recharts = __webpack_require__(524), _ChartRow = __webpack_require__(804), _ChartRow2 = _interopRequireDefault(_ChartRow), _CustomTooltip = __webpack_require__(805), _CustomTooltip2 = _interopRequireDefault(_CustomTooltip), _common = __webpack_require__(61), styles = {
+ }(), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _withStyles = __webpack_require__(10), _withStyles2 = _interopRequireDefault(_withStyles), _Typography = __webpack_require__(109), _Typography2 = _interopRequireDefault(_Typography), _Grid = __webpack_require__(242), _Grid2 = _interopRequireDefault(_Grid), _recharts = __webpack_require__(526), _ChartRow = __webpack_require__(804), _ChartRow2 = _interopRequireDefault(_ChartRow), _CustomTooltip = __webpack_require__(805), _CustomTooltip2 = _interopRequireDefault(_CustomTooltip), _common = __webpack_require__(77), TOP = "Top", BOTTOM = "Bottom", styles = {
footer: {
maxWidth: "100%",
flexWrap: "nowrap",
@@ -31192,14 +31297,13 @@ var _bundleJs = []byte((((((((((`!function(modules) {
},
doubleChartWrapper: {
height: "100%",
- width: "99%",
- paddingTop: 5
+ width: "99%"
}
}, themeStyles = function(theme) {
return {
footer: {
- backgroundColor: theme.palette.background.appBar,
- color: theme.palette.getContrastText(theme.palette.background.appBar),
+ backgroundColor: theme.palette.grey[900],
+ color: theme.palette.getContrastText(theme.palette.grey[900]),
zIndex: theme.zIndex.appBar,
height: 10 * theme.spacing.unit
}
@@ -31210,76 +31314,68 @@ var _bundleJs = []byte((((((((((`!function(modules) {
_classCallCheck(this, Footer);
for (var _len = arguments.length, args = Array(_len), _key = 0; _key < _len; _key++) args[_key] = arguments[_key];
return _temp = _this = _possibleConstructorReturn(this, (_ref = Footer.__proto__ || Object.getPrototypeOf(Footer)).call.apply(_ref, [ this ].concat(args))),
- _this.info = function(about, value) {
- return value ? _react2.default.createElement(_Typography2.default, {
- type: "caption",
- color: "inherit"
- }, _react2.default.createElement("span", {
- style: _common.styles.light
- }, about), " ", value) : null;
- }, _this.doubleChart = function(syncId, topChart, bottomChart) {
- for (var topDefault = topChart.default ? topChart.default : 0, bottomDefault = bottomChart.default ? bottomChart.default : 0, topTooltip = topChart.tooltip ? _react2.default.createElement(_recharts.Tooltip, {
- cursor: !1,
- content: _react2.default.createElement(_CustomTooltip2.default, {
- tooltip: topChart.tooltip
- })
- }) : null, bottomTooltip = bottomChart.tooltip ? _react2.default.createElement(_recharts.Tooltip, {
+ _this.halfHeightChart = function(chartProps, tooltip, areaProps) {
+ return _react2.default.createElement(_recharts.ResponsiveContainer, {
+ width: "100%",
+ height: "50%"
+ }, _react2.default.createElement(_recharts.AreaChart, chartProps, !tooltip || _react2.default.createElement(_recharts.Tooltip, {
cursor: !1,
content: _react2.default.createElement(_CustomTooltip2.default, {
- tooltip: bottomChart.tooltip
+ tooltip: tooltip
})
- }) : null, data = [].concat(_toConsumableArray(topChart.data.map(function(_ref2) {
- var value = _ref2.value, d = {};
- return d.topKey = value || topDefault, d;
- }))), i = 0; i < data.length && i < bottomChart.data.length; i++) {
- var d = bottomChart.data[i];
- data[i].bottomKey = d && d.value ? -d.value : bottomDefault;
- }
- return data = [].concat(_toConsumableArray(data), _toConsumableArray(bottomChart.data.slice(data.length).map(function(_ref3) {
- var value = _ref3.value, d = {};
- return d.topKey = topDefault, d.bottomKey = -value || bottomDefault, d;
- }))), _react2.default.createElement("div", {
+ }), _react2.default.createElement(_recharts.Area, _extends({
+ isAnimationActive: !1,
+ type: "monotone"
+ }, areaProps))));
+ }, _this.doubleChart = function(syncId, chartKey, topChart, bottomChart) {
+ if (!Array.isArray(topChart.data) || !Array.isArray(bottomChart.data)) return null;
+ var topDefault = topChart.default || 0, bottomDefault = bottomChart.default || 0, topKey = "" + chartKey + TOP, bottomKey = "" + chartKey + BOTTOM;
+ return _react2.default.createElement("div", {
style: styles.doubleChartWrapper
- }, _react2.default.createElement(_recharts.ResponsiveContainer, {
- width: "100%",
- height: "50%"
- }, _react2.default.createElement(_recharts.AreaChart, {
- data: data,
- syncId: syncId
- }, topTooltip, _react2.default.createElement(_recharts.Area, {
- type: "monotone",
- dataKey: "topKey",
+ }, _this.halfHeightChart({
+ syncId: syncId,
+ data: topChart.data.map(function(_ref2) {
+ var value = _ref2.value;
+ return _defineProperty({}, topKey, value || topDefault);
+ }),
+ margin: {
+ top: 5,
+ right: 5,
+ bottom: 0,
+ left: 5
+ }
+ }, topChart.tooltip, {
+ dataKey: topKey,
stroke: "#8884d8",
fill: "#8884d8"
- }))), _react2.default.createElement("div", {
- style: {
- marginTop: -10,
- width: "100%",
- height: "50%"
- }
- }, _react2.default.createElement(_recharts.ResponsiveContainer, {
- width: "100%",
- height: "100%"
- }, _react2.default.createElement(_recharts.AreaChart, {
- data: data,
- syncId: syncId
- }, bottomTooltip, _react2.default.createElement(_recharts.Area, {
- type: "monotone",
- dataKey: "bottomKey",
+ }), _this.halfHeightChart({
+ syncId: syncId,
+ data: bottomChart.data.map(function(_ref4) {
+ var value = _ref4.value;
+ return _defineProperty({}, bottomKey, -value || -bottomDefault);
+ }),
+ margin: {
+ top: 0,
+ right: 5,
+ bottom: 5,
+ left: 5
+ }
+ }, bottomChart.tooltip, {
+ dataKey: bottomKey,
stroke: "#82ca9d",
fill: "#82ca9d"
- })))));
+ }));
}, _ret = _temp, _possibleConstructorReturn(_this, _ret);
}
return _inherits(Footer, _Component), _createClass(Footer, [ {
key: "shouldComponentUpdate",
value: function(nextProps) {
- return void 0 !== nextProps.shouldUpdate.home;
+ return void 0 !== nextProps.shouldUpdate.general || void 0 !== nextProps.shouldUpdate.system;
}
}, {
key: "render",
value: function() {
- var content = this.props.content, general = content.general, home = content.home;
+ var _props = this.props, general = _props.general, system = _props.system;
return _react2.default.createElement(_Grid2.default, {
container: !0,
className: this.props.classes.footer,
@@ -31290,33 +31386,50 @@ var _bundleJs = []byte((((((((((`!function(modules) {
item: !0,
xs: !0,
style: styles.chartRowWrapper
- }, _react2.default.createElement(_ChartRow2.default, null, this.doubleChart("all", {
- data: home.processCPU,
- tooltip: (0, _CustomTooltip.percentPlotter)("Process")
+ }, _react2.default.createElement(_ChartRow2.default, null, this.doubleChart("footerSyncId", "cpu", {
+ data: system.processCPU,
+ tooltip: (0, _CustomTooltip.percentPlotter)("Process load")
}, {
- data: home.systemCPU,
- tooltip: (0, _CustomTooltip.percentPlotter)("System", (0, _CustomTooltip.multiplier)(-1))
- }), this.doubleChart("all", {
- data: home.activeMemory,
- tooltip: (0, _CustomTooltip.bytePlotter)("Active")
+ data: system.systemCPU,
+ tooltip: (0, _CustomTooltip.percentPlotter)("System load", (0, _CustomTooltip.multiplier)(-1))
+ }), this.doubleChart("footerSyncId", "memory", {
+ data: system.activeMemory,
+ tooltip: (0, _CustomTooltip.bytePlotter)("Active memory")
}, {
- data: home.virtualMemory,
- tooltip: (0, _CustomTooltip.bytePlotter)("Virtual", (0, _CustomTooltip.multiplier)(-1))
- }), this.doubleChart("all", {
- data: home.diskRead,
- tooltip: (0, _CustomTooltip.bytePerSecPlotter)("Disk Read")
+ data: system.virtualMemory,
+ tooltip: (0, _CustomTooltip.bytePlotter)("Virtual memory", (0, _CustomTooltip.multiplier)(-1))
+ }), this.doubleChart("footerSyncId", "disk", {
+ data: system.diskRead,
+ tooltip: (0, _CustomTooltip.bytePerSecPlotter)("Disk read")
}, {
- data: home.diskWrite,
- tooltip: (0, _CustomTooltip.bytePerSecPlotter)("Disk Write", (0, _CustomTooltip.multiplier)(-1))
- }), this.doubleChart("all", {
- data: home.networkIngress,
+ data: system.diskWrite,
+ tooltip: (0, _CustomTooltip.bytePerSecPlotter)("Disk write", (0, _CustomTooltip.multiplier)(-1))
+ }), this.doubleChart("footerSyncId", "traffic", {
+ data: system.networkIngress,
tooltip: (0, _CustomTooltip.bytePerSecPlotter)("Download")
}, {
- data: home.networkEgress,
+ data: system.networkEgress,
tooltip: (0, _CustomTooltip.bytePerSecPlotter)("Upload", (0, _CustomTooltip.multiplier)(-1))
}))), _react2.default.createElement(_Grid2.default, {
item: !0
- }, this.info("Geth", general.version), this.info("Commit", general.commit ? general.commit.substring(0, 7) : null)));
+ }, _react2.default.createElement(_Typography2.default, {
+ type: "caption",
+ color: "inherit"
+ }, _react2.default.createElement("span", {
+ style: _common.styles.light
+ }, "Geth"), " ", general.version), general.commit && _react2.default.createElement(_Typography2.default, {
+ type: "caption",
+ color: "inherit"
+ }, _react2.default.createElement("span", {
+ style: _common.styles.light
+ }, "Commit "), _react2.default.createElement("a", {
+ href: "https://github.com/ethereum/go-ethereum/commit/" + general.commit,
+ target: "_blank",
+ style: {
+ color: "inherit",
+ textDecoration: "none"
+ }
+ }, general.commit.substring(0, 8)))));
}
} ]), Footer;
}(_react.Component);
@@ -31337,7 +31450,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
});
GRID_SIZES.forEach(function(size) {
if ("boolean" != typeof size) {
- var width = Math.round(size / 12 * Math.pow(10, 6)) / Math.pow(10, 4) + "%";
+ var width = Math.round(size / 12 * 1e7) / 1e5 + "%";
styles["grid-" + breakpoint + "-" + size] = {
flexBasis: width,
maxWidth: width
@@ -31381,7 +31494,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
}), exports.styles = void 0;
- var _objectWithoutProperties2 = __webpack_require__(6), _objectWithoutProperties3 = _interopRequireDefault(_objectWithoutProperties2), _extends2 = __webpack_require__(7), _extends3 = _interopRequireDefault(_extends2), _defineProperty2 = __webpack_require__(13), _defineProperty3 = _interopRequireDefault(_defineProperty2), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _propTypes = __webpack_require__(1), _propTypes2 = _interopRequireDefault(_propTypes), _classnames = __webpack_require__(3), _classnames2 = _interopRequireDefault(_classnames), _withStyles = __webpack_require__(10), _withStyles2 = _interopRequireDefault(_withStyles), _createBreakpoints = __webpack_require__(74), _requirePropFactory = __webpack_require__(510), _requirePropFactory2 = _interopRequireDefault(_requirePropFactory), _Hidden = __webpack_require__(511), _Hidden2 = _interopRequireDefault(_Hidden), GUTTERS = [ 0, 8, 16, 24, 40 ], GRID_SIZES = [ !0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12 ], styles = exports.styles = function(theme) {
+ var _objectWithoutProperties2 = __webpack_require__(7), _objectWithoutProperties3 = _interopRequireDefault(_objectWithoutProperties2), _extends2 = __webpack_require__(6), _extends3 = _interopRequireDefault(_extends2), _defineProperty2 = __webpack_require__(13), _defineProperty3 = _interopRequireDefault(_defineProperty2), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _propTypes = __webpack_require__(1), _propTypes2 = _interopRequireDefault(_propTypes), _classnames = __webpack_require__(3), _classnames2 = _interopRequireDefault(_classnames), _withStyles = __webpack_require__(10), _withStyles2 = _interopRequireDefault(_withStyles), _createBreakpoints = __webpack_require__(74), _requirePropFactory = __webpack_require__(514), _requirePropFactory2 = _interopRequireDefault(_requirePropFactory), _Hidden = __webpack_require__(515), _Hidden2 = _interopRequireDefault(_Hidden), GUTTERS = [ 0, 8, 16, 24, 40 ], GRID_SIZES = [ !0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12 ], styles = exports.styles = function(theme) {
return (0, _extends3.default)({
typeContainer: {
boxSizing: "border-box",
@@ -31520,7 +31633,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
return function(requiredProp) {
return function(props, propName, componentName, location, propFullName) {
var propFullNameSafe = propFullName || propName;
- return void 0 === props[propName] || props[requiredProp] ? null : new Error("The property ` + ("`" + `" + propFullNameSafe + "`))) + (("`" + (` of ` + "`")) + (`" + componentNameInError + "` + ("`" + ` must be used on `)))) + ((("`" + (`" + requiredProp + "` + "`")) + (`.");
+ return void 0 === props[propName] || props[requiredProp] ? null : new Error("The property ` + "`")) + (`" + propFullNameSafe + "` + ("`" + ` of `)))) + ((("`" + (`" + componentNameInError + "` + "`")) + (` must be used on ` + ("`" + `" + requiredProp + "`))) + (("`" + (`.");
};
};
};
@@ -31535,7 +31648,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
});
- var _Hidden = __webpack_require__(512);
+ var _Hidden = __webpack_require__(516);
Object.defineProperty(exports, "default", {
enumerable: !0,
get: function() {
@@ -31557,7 +31670,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
});
- var _objectWithoutProperties2 = __webpack_require__(6), _objectWithoutProperties3 = _interopRequireDefault(_objectWithoutProperties2), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _propTypes = __webpack_require__(1), _propTypes2 = _interopRequireDefault(_propTypes), _HiddenJs = __webpack_require__(513), _HiddenJs2 = _interopRequireDefault(_HiddenJs), _HiddenCss = __webpack_require__(523), _HiddenCss2 = _interopRequireDefault(_HiddenCss);
+ var _objectWithoutProperties2 = __webpack_require__(7), _objectWithoutProperties3 = _interopRequireDefault(_objectWithoutProperties2), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _propTypes = __webpack_require__(1), _propTypes2 = _interopRequireDefault(_propTypes), _HiddenJs = __webpack_require__(517), _HiddenJs2 = _interopRequireDefault(_HiddenJs), _HiddenCss = __webpack_require__(525), _HiddenCss2 = _interopRequireDefault(_HiddenCss);
Hidden.propTypes = "production" !== process.env.NODE_ENV ? {
children: _propTypes2.default.node,
className: _propTypes2.default.string,
@@ -31590,71 +31703,53 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}).call(exports, __webpack_require__(2));
}, function(module, exports, __webpack_require__) {
"use strict";
- (function(process) {
- function _interopRequireDefault(obj) {
- return obj && obj.__esModule ? obj : {
- default: obj
- };
- }
- function HiddenJs(props) {
- var children = props.children, only = (props.lgDown, props.lgUp, props.mdDown, props.mdUp,
- props.only), width = (props.smDown, props.smUp, props.width), other = (props.xlDown,
- props.xlUp, props.xsDown, props.xsUp, (0, _objectWithoutProperties3.default)(props, [ "children", "lgDown", "lgUp", "mdDown", "mdUp", "only", "smDown", "smUp", "width", "xlDown", "xlUp", "xsDown", "xsUp" ]));
- "production" !== process.env.NODE_ENV && (0, _warning2.default)(0 === (0, _keys2.default)(other).length, "Material-UI: unsupported properties received " + (0,
- _stringify2.default)(other) + " by ` + ("`" + `<Hidden />`))) + (("`" + (`.");
- var visible = !0;
- if (only) if (Array.isArray(only)) for (var i = 0; i < only.length; i += 1) {
- var breakpoint = only[i];
- if (width === breakpoint) {
- visible = !1;
- break;
- }
- } else only && width === only && (visible = !1);
- if (visible) for (var _i = 0; _i < _createBreakpoints.keys.length; _i += 1) {
- var _breakpoint = _createBreakpoints.keys[_i], breakpointUp = props[_breakpoint + "Up"], breakpointDown = props[_breakpoint + "Down"];
- if (breakpointUp && (0, _withWidth.isWidthUp)(_breakpoint, width) || breakpointDown && (0,
- _withWidth.isWidthDown)(_breakpoint, width)) {
- visible = !1;
- break;
- }
+ function _interopRequireDefault(obj) {
+ return obj && obj.__esModule ? obj : {
+ default: obj
+ };
+ }
+ function HiddenJs(props) {
+ var children = props.children, only = props.only, width = props.width, visible = !0;
+ if (only) if (Array.isArray(only)) for (var i = 0; i < only.length; i += 1) {
+ var breakpoint = only[i];
+ if (width === breakpoint) {
+ visible = !1;
+ break;
+ }
+ } else only && width === only && (visible = !1);
+ if (visible) for (var _i = 0; _i < _createBreakpoints.keys.length; _i += 1) {
+ var _breakpoint = _createBreakpoints.keys[_i], breakpointUp = props[_breakpoint + "Up"], breakpointDown = props[_breakpoint + "Down"];
+ if (breakpointUp && (0, _withWidth.isWidthUp)(_breakpoint, width) || breakpointDown && (0,
+ _withWidth.isWidthDown)(_breakpoint, width)) {
+ visible = !1;
+ break;
}
- return visible ? children : null;
}
- Object.defineProperty(exports, "__esModule", {
- value: !0
- });
- var _stringify = __webpack_require__(514), _stringify2 = _interopRequireDefault(_stringify), _keys = __webpack_require__(41), _keys2 = _interopRequireDefault(_keys), _objectWithoutProperties2 = __webpack_require__(6), _objectWithoutProperties3 = _interopRequireDefault(_objectWithoutProperties2), _propTypes = __webpack_require__(1), _propTypes2 = _interopRequireDefault(_propTypes), _warning = __webpack_require__(11), _warning2 = _interopRequireDefault(_warning), _createBreakpoints = __webpack_require__(74), _withWidth = __webpack_require__(516), _withWidth2 = _interopRequireDefault(_withWidth);
- HiddenJs.propTypes = {
- children: _propTypes2.default.node,
- className: _propTypes2.default.string,
- implementation: _propTypes2.default.oneOf([ "js", "css" ]),
- initialWidth: _propTypes2.default.oneOf([ "xs", "sm", "md", "lg", "xl" ]),
- lgDown: _propTypes2.default.bool,
- lgUp: _propTypes2.default.bool,
- mdDown: _propTypes2.default.bool,
- mdUp: _propTypes2.default.bool,
- only: _propTypes2.default.oneOfType([ _propTypes2.default.oneOf([ "xs", "sm", "md", "lg", "xl" ]), _propTypes2.default.arrayOf(_propTypes2.default.oneOf([ "xs", "sm", "md", "lg", "xl" ])) ]),
- smDown: _propTypes2.default.bool,
- smUp: _propTypes2.default.bool,
- width: _propTypes2.default.string.isRequired,
- xlDown: _propTypes2.default.bool,
- xlUp: _propTypes2.default.bool,
- xsDown: _propTypes2.default.bool,
- xsUp: _propTypes2.default.bool
- }, exports.default = (0, _withWidth2.default)()(HiddenJs);
- }).call(exports, __webpack_require__(2));
-}, function(module, exports, __webpack_require__) {
- module.exports = {
- default: __webpack_require__(515),
- __esModule: !0
- };
-}, function(module, exports, __webpack_require__) {
- var core = __webpack_require__(17), $JSON = core.JSON || (core.JSON = {
- stringify: JSON.stringify
+ return visible ? children : null;
+ }
+ Object.defineProperty(exports, "__esModule", {
+ value: !0
});
- module.exports = function(it) {
- return $JSON.stringify.apply($JSON, arguments);
- };
+ var _propTypes = __webpack_require__(1), _propTypes2 = _interopRequireDefault(_propTypes), _createBreakpoints = __webpack_require__(74), _withWidth = __webpack_require__(518), _withWidth2 = _interopRequireDefault(_withWidth), _exactProp = __webpack_require__(218), _exactProp2 = _interopRequireDefault(_exactProp);
+ HiddenJs.propTypes = {
+ children: _propTypes2.default.node,
+ className: _propTypes2.default.string,
+ implementation: _propTypes2.default.oneOf([ "js", "css" ]),
+ initialWidth: _propTypes2.default.oneOf([ "xs", "sm", "md", "lg", "xl" ]),
+ lgDown: _propTypes2.default.bool,
+ lgUp: _propTypes2.default.bool,
+ mdDown: _propTypes2.default.bool,
+ mdUp: _propTypes2.default.bool,
+ only: _propTypes2.default.oneOfType([ _propTypes2.default.oneOf([ "xs", "sm", "md", "lg", "xl" ]), _propTypes2.default.arrayOf(_propTypes2.default.oneOf([ "xs", "sm", "md", "lg", "xl" ])) ]),
+ smDown: _propTypes2.default.bool,
+ smUp: _propTypes2.default.bool,
+ width: _propTypes2.default.string.isRequired,
+ xlDown: _propTypes2.default.bool,
+ xlUp: _propTypes2.default.bool,
+ xsDown: _propTypes2.default.bool,
+ xsUp: _propTypes2.default.bool
+ }, HiddenJs.propTypes = (0, _exactProp2.default)(HiddenJs.propTypes, "HiddenJs"),
+ exports.default = (0, _withWidth2.default)()(HiddenJs);
}, function(module, exports, __webpack_require__) {
"use strict";
(function(process) {
@@ -31666,7 +31761,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
}), exports.isWidthDown = exports.isWidthUp = void 0;
- var _extends2 = __webpack_require__(7), _extends3 = _interopRequireDefault(_extends2), _objectWithoutProperties2 = __webpack_require__(6), _objectWithoutProperties3 = _interopRequireDefault(_objectWithoutProperties2), _getPrototypeOf = __webpack_require__(26), _getPrototypeOf2 = _interopRequireDefault(_getPrototypeOf), _classCallCheck2 = __webpack_require__(27), _classCallCheck3 = _interopRequireDefault(_classCallCheck2), _createClass2 = __webpack_require__(28), _createClass3 = _interopRequireDefault(_createClass2), _possibleConstructorReturn2 = __webpack_require__(29), _possibleConstructorReturn3 = _interopRequireDefault(_possibleConstructorReturn2), _inherits2 = __webpack_require__(30), _inherits3 = _interopRequireDefault(_inherits2), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _propTypes = __webpack_require__(1), _propTypes2 = _interopRequireDefault(_propTypes), _reactEventListener = __webpack_require__(517), _reactEventListener2 = _interopRequireDefault(_reactEventListener), _debounce = __webpack_require__(157), _debounce2 = _interopRequireDefault(_debounce), _wrapDisplayName = __webpack_require__(75), _wrapDisplayName2 = _interopRequireDefault(_wrapDisplayName), _hoistNonReactStatics = __webpack_require__(152), _hoistNonReactStatics2 = _interopRequireDefault(_hoistNonReactStatics), _withTheme = __webpack_require__(522), _withTheme2 = _interopRequireDefault(_withTheme), _createBreakpoints = __webpack_require__(74), withWidth = (exports.isWidthUp = function(breakpoint, width) {
+ var _extends2 = __webpack_require__(6), _extends3 = _interopRequireDefault(_extends2), _objectWithoutProperties2 = __webpack_require__(7), _objectWithoutProperties3 = _interopRequireDefault(_objectWithoutProperties2), _getPrototypeOf = __webpack_require__(26), _getPrototypeOf2 = _interopRequireDefault(_getPrototypeOf), _classCallCheck2 = __webpack_require__(27), _classCallCheck3 = _interopRequireDefault(_classCallCheck2), _createClass2 = __webpack_require__(28), _createClass3 = _interopRequireDefault(_createClass2), _possibleConstructorReturn2 = __webpack_require__(29), _possibleConstructorReturn3 = _interopRequireDefault(_possibleConstructorReturn2), _inherits2 = __webpack_require__(30), _inherits3 = _interopRequireDefault(_inherits2), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _propTypes = __webpack_require__(1), _propTypes2 = _interopRequireDefault(_propTypes), _reactEventListener = __webpack_require__(519), _reactEventListener2 = _interopRequireDefault(_reactEventListener), _debounce = __webpack_require__(156), _debounce2 = _interopRequireDefault(_debounce), _wrapDisplayName = __webpack_require__(75), _wrapDisplayName2 = _interopRequireDefault(_wrapDisplayName), _hoistNonReactStatics = __webpack_require__(151), _hoistNonReactStatics2 = _interopRequireDefault(_hoistNonReactStatics), _withTheme = __webpack_require__(524), _withTheme2 = _interopRequireDefault(_withTheme), _createBreakpoints = __webpack_require__(74), withWidth = (exports.isWidthUp = function(breakpoint, width) {
return arguments.length > 2 && void 0 !== arguments[2] && !arguments[2] ? _createBreakpoints.keys.indexOf(breakpoint) < _createBreakpoints.keys.indexOf(width) : _createBreakpoints.keys.indexOf(breakpoint) <= _createBreakpoints.keys.indexOf(width);
}, exports.isWidthDown = function(breakpoint, width) {
return arguments.length > 2 && void 0 !== arguments[2] && !arguments[2] ? _createBreakpoints.keys.indexOf(width) < _createBreakpoints.keys.indexOf(breakpoint) : _createBreakpoints.keys.indexOf(width) <= _createBreakpoints.keys.indexOf(breakpoint);
@@ -31782,9 +31877,9 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
});
- var _getPrototypeOf = __webpack_require__(26), _getPrototypeOf2 = _interopRequireDefault(_getPrototypeOf), _classCallCheck2 = __webpack_require__(27), _classCallCheck3 = _interopRequireDefault(_classCallCheck2), _createClass2 = __webpack_require__(28), _createClass3 = _interopRequireDefault(_createClass2), _possibleConstructorReturn2 = __webpack_require__(29), _possibleConstructorReturn3 = _interopRequireDefault(_possibleConstructorReturn2), _inherits2 = __webpack_require__(30), _inherits3 = _interopRequireDefault(_inherits2), _typeof2 = __webpack_require__(100), _typeof3 = _interopRequireDefault(_typeof2), _keys = __webpack_require__(41), _keys2 = _interopRequireDefault(_keys), _objectWithoutProperties2 = __webpack_require__(6), _objectWithoutProperties3 = _interopRequireDefault(_objectWithoutProperties2), _assign = __webpack_require__(205), _assign2 = _interopRequireDefault(_assign);
+ var _getPrototypeOf = __webpack_require__(26), _getPrototypeOf2 = _interopRequireDefault(_getPrototypeOf), _classCallCheck2 = __webpack_require__(27), _classCallCheck3 = _interopRequireDefault(_classCallCheck2), _createClass2 = __webpack_require__(28), _createClass3 = _interopRequireDefault(_createClass2), _possibleConstructorReturn2 = __webpack_require__(29), _possibleConstructorReturn3 = _interopRequireDefault(_possibleConstructorReturn2), _inherits2 = __webpack_require__(30), _inherits3 = _interopRequireDefault(_inherits2), _typeof2 = __webpack_require__(101), _typeof3 = _interopRequireDefault(_typeof2), _keys = __webpack_require__(50), _keys2 = _interopRequireDefault(_keys), _objectWithoutProperties2 = __webpack_require__(7), _objectWithoutProperties3 = _interopRequireDefault(_objectWithoutProperties2), _assign = __webpack_require__(204), _assign2 = _interopRequireDefault(_assign);
exports.withOptions = withOptions;
- var _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _propTypes = __webpack_require__(1), _propTypes2 = _interopRequireDefault(_propTypes), _shallowEqual = __webpack_require__(96), _shallowEqual2 = _interopRequireDefault(_shallowEqual), _warning = __webpack_require__(11), _warning2 = _interopRequireDefault(_warning), _supports = __webpack_require__(518), defaultEventOptions = {
+ var _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _propTypes = __webpack_require__(1), _propTypes2 = _interopRequireDefault(_propTypes), _shallowEqual = __webpack_require__(97), _shallowEqual2 = _interopRequireDefault(_shallowEqual), _warning = __webpack_require__(12), _warning2 = _interopRequireDefault(_warning), _supports = __webpack_require__(520), defaultEventOptions = {
capture: !1,
passive: !1
}, EventListener = function(_React$Component) {
@@ -31856,7 +31951,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
}), exports.passiveOption = void 0;
- var _defineProperty = __webpack_require__(143), _defineProperty2 = function(obj) {
+ var _defineProperty = __webpack_require__(142), _defineProperty2 = function(obj) {
return obj && obj.__esModule ? obj : {
default: obj
};
@@ -31893,7 +31988,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
return unmasked && (isOwn ? value[symToStringTag] = tag : delete value[symToStringTag]),
result;
}
- var Symbol = __webpack_require__(77), objectProto = Object.prototype, hasOwnProperty = objectProto.hasOwnProperty, nativeObjectToString = objectProto.toString, symToStringTag = Symbol ? Symbol.toStringTag : void 0;
+ var Symbol = __webpack_require__(78), objectProto = Object.prototype, hasOwnProperty = objectProto.hasOwnProperty, nativeObjectToString = objectProto.toString, symToStringTag = Symbol ? Symbol.toStringTag : void 0;
module.exports = getRawTag;
}, function(module, exports) {
function objectToString(value) {
@@ -31915,7 +32010,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
});
- var _extends2 = __webpack_require__(7), _extends3 = _interopRequireDefault(_extends2), _getPrototypeOf = __webpack_require__(26), _getPrototypeOf2 = _interopRequireDefault(_getPrototypeOf), _classCallCheck2 = __webpack_require__(27), _classCallCheck3 = _interopRequireDefault(_classCallCheck2), _createClass2 = __webpack_require__(28), _createClass3 = _interopRequireDefault(_createClass2), _possibleConstructorReturn2 = __webpack_require__(29), _possibleConstructorReturn3 = _interopRequireDefault(_possibleConstructorReturn2), _inherits2 = __webpack_require__(30), _inherits3 = _interopRequireDefault(_inherits2), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _hoistNonReactStatics = __webpack_require__(152), _hoistNonReactStatics2 = _interopRequireDefault(_hoistNonReactStatics), _wrapDisplayName = __webpack_require__(75), _wrapDisplayName2 = _interopRequireDefault(_wrapDisplayName), _createMuiTheme = __webpack_require__(151), _createMuiTheme2 = _interopRequireDefault(_createMuiTheme), _themeListener = __webpack_require__(150), _themeListener2 = _interopRequireDefault(_themeListener), defaultTheme = void 0, withTheme = function() {
+ var _extends2 = __webpack_require__(6), _extends3 = _interopRequireDefault(_extends2), _getPrototypeOf = __webpack_require__(26), _getPrototypeOf2 = _interopRequireDefault(_getPrototypeOf), _classCallCheck2 = __webpack_require__(27), _classCallCheck3 = _interopRequireDefault(_classCallCheck2), _createClass2 = __webpack_require__(28), _createClass3 = _interopRequireDefault(_createClass2), _possibleConstructorReturn2 = __webpack_require__(29), _possibleConstructorReturn3 = _interopRequireDefault(_possibleConstructorReturn2), _inherits2 = __webpack_require__(30), _inherits3 = _interopRequireDefault(_inherits2), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _hoistNonReactStatics = __webpack_require__(151), _hoistNonReactStatics2 = _interopRequireDefault(_hoistNonReactStatics), _wrapDisplayName = __webpack_require__(75), _wrapDisplayName2 = _interopRequireDefault(_wrapDisplayName), _createMuiTheme = __webpack_require__(150), _createMuiTheme2 = _interopRequireDefault(_createMuiTheme), _themeListener = __webpack_require__(149), _themeListener2 = _interopRequireDefault(_themeListener), defaultTheme = void 0, withTheme = function() {
return function(Component) {
var WithTheme = function(_React$Component) {
function WithTheme(props, context) {
@@ -31966,34 +32061,36 @@ var _bundleJs = []byte((((((((((`!function(modules) {
};
}
function HiddenCss(props) {
- var children = props.children, classes = props.classes, only = (props.lgDown, props.lgUp,
- props.mdDown, props.mdUp, props.only), other = (props.smDown, props.smUp, props.xlDown,
- props.xlUp, props.xsDown, props.xsUp, (0, _objectWithoutProperties3.default)(props, [ "children", "classes", "lgDown", "lgUp", "mdDown", "mdUp", "only", "smDown", "smUp", "xlDown", "xlUp", "xsDown", "xsUp" ]));
+ var children = props.children, classes = props.classes, className = props.className, only = (props.lgDown,
+ props.lgUp, props.mdDown, props.mdUp, props.only), other = (props.smDown, props.smUp,
+ props.xlDown, props.xlUp, props.xsDown, props.xsUp, (0, _objectWithoutProperties3.default)(props, [ "children", "classes", "className", "lgDown", "lgUp", "mdDown", "mdUp", "only", "smDown", "smUp", "xlDown", "xlUp", "xsDown", "xsUp" ]));
"production" !== process.env.NODE_ENV && (0, _warning2.default)(0 === (0, _keys2.default)(other).length || 1 === (0,
_keys2.default)(other).length && other.hasOwnProperty("ref"), "Material-UI: unsupported properties received " + (0,
_keys2.default)(other).join(", ") + " by ` + "`")) + (`<Hidden />` + ("`" + `.");
- for (var className = [], i = 0; i < _createBreakpoints.keys.length; i += 1) {
+ var classNames = [];
+ className && classNames.push(className);
+ for (var i = 0; i < _createBreakpoints.keys.length; i += 1) {
var breakpoint = _createBreakpoints.keys[i], breakpointUp = props[breakpoint + "Up"], breakpointDown = props[breakpoint + "Down"];
- breakpointUp && className.push(classes[breakpoint + "Up"]), breakpointDown && className.push(classes[breakpoint + "Down"]);
+ breakpointUp && classNames.push(classes[breakpoint + "Up"]), breakpointDown && classNames.push(classes[breakpoint + "Down"]);
}
if (only) {
(Array.isArray(only) ? only : [ only ]).forEach(function(breakpoint) {
- className.push(classes["only" + (0, _helpers.capitalizeFirstLetter)(breakpoint)]);
+ classNames.push(classes["only" + (0, _helpers.capitalize)(breakpoint)]);
});
}
return _react2.default.createElement("div", {
- className: className
+ className: classNames.join(" ")
}, children);
}
Object.defineProperty(exports, "__esModule", {
value: !0
});
- var _keys = __webpack_require__(41), _keys2 = _interopRequireDefault(_keys), _objectWithoutProperties2 = __webpack_require__(6), _objectWithoutProperties3 = _interopRequireDefault(_objectWithoutProperties2), _defineProperty2 = __webpack_require__(13), _defineProperty3 = _interopRequireDefault(_defineProperty2), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _propTypes = __webpack_require__(1), _propTypes2 = _interopRequireDefault(_propTypes), _warning = __webpack_require__(11), _warning2 = _interopRequireDefault(_warning), _createBreakpoints = __webpack_require__(74), _helpers = __webpack_require__(52), _withStyles = __webpack_require__(10), _withStyles2 = _interopRequireDefault(_withStyles), styles = function(theme) {
+ var _keys = __webpack_require__(50), _keys2 = _interopRequireDefault(_keys), _objectWithoutProperties2 = __webpack_require__(7), _objectWithoutProperties3 = _interopRequireDefault(_objectWithoutProperties2), _defineProperty2 = __webpack_require__(13), _defineProperty3 = _interopRequireDefault(_defineProperty2), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _propTypes = __webpack_require__(1), _propTypes2 = _interopRequireDefault(_propTypes), _warning = __webpack_require__(12), _warning2 = _interopRequireDefault(_warning), _createBreakpoints = __webpack_require__(74), _helpers = __webpack_require__(51), _withStyles = __webpack_require__(10), _withStyles2 = _interopRequireDefault(_withStyles), styles = function(theme) {
var hidden = {
display: "none"
};
return _createBreakpoints.keys.reduce(function(acc, key) {
- return acc["only" + (0, _helpers.capitalizeFirstLetter)(key)] = (0, _defineProperty3.default)({}, theme.breakpoints.only(key), hidden),
+ return acc["only" + (0, _helpers.capitalize)(key)] = (0, _defineProperty3.default)({}, theme.breakpoints.only(key), hidden),
acc[key + "Up"] = (0, _defineProperty3.default)({}, theme.breakpoints.up(key), hidden),
acc[key + "Down"] = (0, _defineProperty3.default)({}, theme.breakpoints.down(key), hidden),
acc;
@@ -32024,8 +32121,8 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(__webpack_exports__, "__esModule", {
value: !0
});
- var __WEBPACK_IMPORTED_MODULE_1__container_Surface__ = (__webpack_require__(525),
- __webpack_require__(78));
+ var __WEBPACK_IMPORTED_MODULE_1__container_Surface__ = (__webpack_require__(527),
+ __webpack_require__(79));
__webpack_require__.d(__webpack_exports__, "Surface", function() {
return __WEBPACK_IMPORTED_MODULE_1__container_Surface__.a;
});
@@ -32033,35 +32130,35 @@ var _bundleJs = []byte((((((((((`!function(modules) {
__webpack_require__.d(__webpack_exports__, "Layer", function() {
return __WEBPACK_IMPORTED_MODULE_2__container_Layer__.a;
});
- var __WEBPACK_IMPORTED_MODULE_3__component_Legend__ = __webpack_require__(171);
+ var __WEBPACK_IMPORTED_MODULE_3__component_Legend__ = __webpack_require__(170);
__webpack_require__.d(__webpack_exports__, "Legend", function() {
return __WEBPACK_IMPORTED_MODULE_3__component_Legend__.a;
});
- var __WEBPACK_IMPORTED_MODULE_4__component_Tooltip__ = __webpack_require__(122);
+ var __WEBPACK_IMPORTED_MODULE_4__component_Tooltip__ = __webpack_require__(121);
__webpack_require__.d(__webpack_exports__, "Tooltip", function() {
return __WEBPACK_IMPORTED_MODULE_4__component_Tooltip__.a;
});
- var __WEBPACK_IMPORTED_MODULE_5__component_ResponsiveContainer__ = __webpack_require__(682);
+ var __WEBPACK_IMPORTED_MODULE_5__component_ResponsiveContainer__ = __webpack_require__(684);
__webpack_require__.d(__webpack_exports__, "ResponsiveContainer", function() {
return __WEBPACK_IMPORTED_MODULE_5__component_ResponsiveContainer__.a;
});
- var __WEBPACK_IMPORTED_MODULE_6__component_Cell__ = __webpack_require__(85);
+ var __WEBPACK_IMPORTED_MODULE_6__component_Cell__ = __webpack_require__(86);
__webpack_require__.d(__webpack_exports__, "Cell", function() {
return __WEBPACK_IMPORTED_MODULE_6__component_Cell__.a;
});
- var __WEBPACK_IMPORTED_MODULE_7__component_Text__ = __webpack_require__(55);
+ var __WEBPACK_IMPORTED_MODULE_7__component_Text__ = __webpack_require__(54);
__webpack_require__.d(__webpack_exports__, "Text", function() {
return __WEBPACK_IMPORTED_MODULE_7__component_Text__.a;
});
- var __WEBPACK_IMPORTED_MODULE_8__component_Label__ = __webpack_require__(43);
+ var __WEBPACK_IMPORTED_MODULE_8__component_Label__ = __webpack_require__(42);
__webpack_require__.d(__webpack_exports__, "Label", function() {
return __WEBPACK_IMPORTED_MODULE_8__component_Label__.a;
});
- var __WEBPACK_IMPORTED_MODULE_9__component_LabelList__ = __webpack_require__(45);
+ var __WEBPACK_IMPORTED_MODULE_9__component_LabelList__ = __webpack_require__(44);
__webpack_require__.d(__webpack_exports__, "LabelList", function() {
return __WEBPACK_IMPORTED_MODULE_9__component_LabelList__.a;
});
- var __WEBPACK_IMPORTED_MODULE_10__shape_Sector__ = __webpack_require__(128);
+ var __WEBPACK_IMPORTED_MODULE_10__shape_Sector__ = __webpack_require__(127);
__webpack_require__.d(__webpack_exports__, "Sector", function() {
return __WEBPACK_IMPORTED_MODULE_10__shape_Sector__.a;
});
@@ -32073,19 +32170,19 @@ var _bundleJs = []byte((((((((((`!function(modules) {
__webpack_require__.d(__webpack_exports__, "Rectangle", function() {
return __WEBPACK_IMPORTED_MODULE_12__shape_Rectangle__.a;
});
- var __WEBPACK_IMPORTED_MODULE_13__shape_Polygon__ = __webpack_require__(195);
+ var __WEBPACK_IMPORTED_MODULE_13__shape_Polygon__ = __webpack_require__(194);
__webpack_require__.d(__webpack_exports__, "Polygon", function() {
return __WEBPACK_IMPORTED_MODULE_13__shape_Polygon__.a;
});
- var __WEBPACK_IMPORTED_MODULE_14__shape_Dot__ = __webpack_require__(57);
+ var __WEBPACK_IMPORTED_MODULE_14__shape_Dot__ = __webpack_require__(56);
__webpack_require__.d(__webpack_exports__, "Dot", function() {
return __WEBPACK_IMPORTED_MODULE_14__shape_Dot__.a;
});
- var __WEBPACK_IMPORTED_MODULE_15__shape_Cross__ = __webpack_require__(323);
+ var __WEBPACK_IMPORTED_MODULE_15__shape_Cross__ = __webpack_require__(328);
__webpack_require__.d(__webpack_exports__, "Cross", function() {
return __WEBPACK_IMPORTED_MODULE_15__shape_Cross__.a;
});
- var __WEBPACK_IMPORTED_MODULE_16__shape_Symbols__ = __webpack_require__(172);
+ var __WEBPACK_IMPORTED_MODULE_16__shape_Symbols__ = __webpack_require__(171);
__webpack_require__.d(__webpack_exports__, "Symbols", function() {
return __WEBPACK_IMPORTED_MODULE_16__shape_Symbols__.a;
});
@@ -32093,43 +32190,43 @@ var _bundleJs = []byte((((((((((`!function(modules) {
__webpack_require__.d(__webpack_exports__, "PolarGrid", function() {
return __WEBPACK_IMPORTED_MODULE_17__polar_PolarGrid__.a;
});
- var __WEBPACK_IMPORTED_MODULE_18__polar_PolarRadiusAxis__ = __webpack_require__(129);
+ var __WEBPACK_IMPORTED_MODULE_18__polar_PolarRadiusAxis__ = __webpack_require__(128);
__webpack_require__.d(__webpack_exports__, "PolarRadiusAxis", function() {
return __WEBPACK_IMPORTED_MODULE_18__polar_PolarRadiusAxis__.a;
});
- var __WEBPACK_IMPORTED_MODULE_19__polar_PolarAngleAxis__ = __webpack_require__(130);
+ var __WEBPACK_IMPORTED_MODULE_19__polar_PolarAngleAxis__ = __webpack_require__(129);
__webpack_require__.d(__webpack_exports__, "PolarAngleAxis", function() {
return __WEBPACK_IMPORTED_MODULE_19__polar_PolarAngleAxis__.a;
});
- var __WEBPACK_IMPORTED_MODULE_20__polar_Pie__ = __webpack_require__(325);
+ var __WEBPACK_IMPORTED_MODULE_20__polar_Pie__ = __webpack_require__(330);
__webpack_require__.d(__webpack_exports__, "Pie", function() {
return __WEBPACK_IMPORTED_MODULE_20__polar_Pie__.a;
});
- var __WEBPACK_IMPORTED_MODULE_21__polar_Radar__ = __webpack_require__(326);
+ var __WEBPACK_IMPORTED_MODULE_21__polar_Radar__ = __webpack_require__(331);
__webpack_require__.d(__webpack_exports__, "Radar", function() {
return __WEBPACK_IMPORTED_MODULE_21__polar_Radar__.a;
});
- var __WEBPACK_IMPORTED_MODULE_22__polar_RadialBar__ = __webpack_require__(327);
+ var __WEBPACK_IMPORTED_MODULE_22__polar_RadialBar__ = __webpack_require__(332);
__webpack_require__.d(__webpack_exports__, "RadialBar", function() {
return __WEBPACK_IMPORTED_MODULE_22__polar_RadialBar__.a;
});
- var __WEBPACK_IMPORTED_MODULE_23__cartesian_Brush__ = __webpack_require__(328);
+ var __WEBPACK_IMPORTED_MODULE_23__cartesian_Brush__ = __webpack_require__(333);
__webpack_require__.d(__webpack_exports__, "Brush", function() {
return __WEBPACK_IMPORTED_MODULE_23__cartesian_Brush__.a;
});
- var __WEBPACK_IMPORTED_MODULE_24__cartesian_ReferenceLine__ = __webpack_require__(321);
+ var __WEBPACK_IMPORTED_MODULE_24__cartesian_ReferenceLine__ = __webpack_require__(326);
__webpack_require__.d(__webpack_exports__, "ReferenceLine", function() {
return __WEBPACK_IMPORTED_MODULE_24__cartesian_ReferenceLine__.a;
});
- var __WEBPACK_IMPORTED_MODULE_25__cartesian_ReferenceDot__ = __webpack_require__(320);
+ var __WEBPACK_IMPORTED_MODULE_25__cartesian_ReferenceDot__ = __webpack_require__(325);
__webpack_require__.d(__webpack_exports__, "ReferenceDot", function() {
return __WEBPACK_IMPORTED_MODULE_25__cartesian_ReferenceDot__.a;
});
- var __WEBPACK_IMPORTED_MODULE_26__cartesian_ReferenceArea__ = __webpack_require__(322);
+ var __WEBPACK_IMPORTED_MODULE_26__cartesian_ReferenceArea__ = __webpack_require__(327);
__webpack_require__.d(__webpack_exports__, "ReferenceArea", function() {
return __WEBPACK_IMPORTED_MODULE_26__cartesian_ReferenceArea__.a;
});
- var __WEBPACK_IMPORTED_MODULE_27__cartesian_CartesianAxis__ = __webpack_require__(330);
+ var __WEBPACK_IMPORTED_MODULE_27__cartesian_CartesianAxis__ = __webpack_require__(335);
__webpack_require__.d(__webpack_exports__, "CartesianAxis", function() {
return __WEBPACK_IMPORTED_MODULE_27__cartesian_CartesianAxis__.a;
});
@@ -32137,19 +32234,19 @@ var _bundleJs = []byte((((((((((`!function(modules) {
__webpack_require__.d(__webpack_exports__, "CartesianGrid", function() {
return __WEBPACK_IMPORTED_MODULE_28__cartesian_CartesianGrid__.a;
});
- var __WEBPACK_IMPORTED_MODULE_29__cartesian_Line__ = __webpack_require__(196);
+ var __WEBPACK_IMPORTED_MODULE_29__cartesian_Line__ = __webpack_require__(195);
__webpack_require__.d(__webpack_exports__, "Line", function() {
return __WEBPACK_IMPORTED_MODULE_29__cartesian_Line__.a;
});
- var __WEBPACK_IMPORTED_MODULE_30__cartesian_Area__ = __webpack_require__(197);
+ var __WEBPACK_IMPORTED_MODULE_30__cartesian_Area__ = __webpack_require__(196);
__webpack_require__.d(__webpack_exports__, "Area", function() {
return __WEBPACK_IMPORTED_MODULE_30__cartesian_Area__.a;
});
- var __WEBPACK_IMPORTED_MODULE_31__cartesian_Bar__ = __webpack_require__(198);
+ var __WEBPACK_IMPORTED_MODULE_31__cartesian_Bar__ = __webpack_require__(197);
__webpack_require__.d(__webpack_exports__, "Bar", function() {
return __WEBPACK_IMPORTED_MODULE_31__cartesian_Bar__.a;
});
- var __WEBPACK_IMPORTED_MODULE_32__cartesian_Scatter__ = __webpack_require__(199);
+ var __WEBPACK_IMPORTED_MODULE_32__cartesian_Scatter__ = __webpack_require__(198);
__webpack_require__.d(__webpack_exports__, "Scatter", function() {
return __WEBPACK_IMPORTED_MODULE_32__cartesian_Scatter__.a;
});
@@ -32161,11 +32258,11 @@ var _bundleJs = []byte((((((((((`!function(modules) {
__webpack_require__.d(__webpack_exports__, "YAxis", function() {
return __WEBPACK_IMPORTED_MODULE_34__cartesian_YAxis__.a;
});
- var __WEBPACK_IMPORTED_MODULE_35__cartesian_ZAxis__ = __webpack_require__(131);
+ var __WEBPACK_IMPORTED_MODULE_35__cartesian_ZAxis__ = __webpack_require__(130);
__webpack_require__.d(__webpack_exports__, "ZAxis", function() {
return __WEBPACK_IMPORTED_MODULE_35__cartesian_ZAxis__.a;
});
- var __WEBPACK_IMPORTED_MODULE_36__cartesian_ErrorBar__ = __webpack_require__(91);
+ var __WEBPACK_IMPORTED_MODULE_36__cartesian_ErrorBar__ = __webpack_require__(92);
__webpack_require__.d(__webpack_exports__, "ErrorBar", function() {
return __WEBPACK_IMPORTED_MODULE_36__cartesian_ErrorBar__.a;
});
@@ -32211,7 +32308,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
});
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
- var __WEBPACK_IMPORTED_MODULE_0_core_js_es6_math__ = __webpack_require__(526), testObject = (__webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_core_js_es6_math__),
+ var __WEBPACK_IMPORTED_MODULE_0_core_js_es6_math__ = __webpack_require__(528), testObject = (__webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_core_js_es6_math__),
{});
if (!Object.setPrototypeOf && !testObject.__proto__) {
var nativeGetPrototypeOf = Object.getPrototypeOf;
@@ -32220,21 +32317,21 @@ var _bundleJs = []byte((((((((((`!function(modules) {
};
}
}, function(module, exports, __webpack_require__) {
- __webpack_require__(527), __webpack_require__(539), __webpack_require__(540), __webpack_require__(541),
- __webpack_require__(542), __webpack_require__(543), __webpack_require__(544), __webpack_require__(545),
- __webpack_require__(547), __webpack_require__(548), __webpack_require__(549), __webpack_require__(550),
- __webpack_require__(551), __webpack_require__(552), __webpack_require__(553), __webpack_require__(554),
- __webpack_require__(555), module.exports = __webpack_require__(159).Math;
+ __webpack_require__(529), __webpack_require__(541), __webpack_require__(542), __webpack_require__(543),
+ __webpack_require__(544), __webpack_require__(545), __webpack_require__(546), __webpack_require__(547),
+ __webpack_require__(549), __webpack_require__(550), __webpack_require__(551), __webpack_require__(552),
+ __webpack_require__(553), __webpack_require__(554), __webpack_require__(555), __webpack_require__(556),
+ __webpack_require__(557), module.exports = __webpack_require__(158).Math;
}, function(module, exports, __webpack_require__) {
- var $export = __webpack_require__(15), log1p = __webpack_require__(245), sqrt = Math.sqrt, $acosh = Math.acosh;
+ var $export = __webpack_require__(15), log1p = __webpack_require__(246), sqrt = Math.sqrt, $acosh = Math.acosh;
$export($export.S + $export.F * !($acosh && 710 == Math.floor($acosh(Number.MAX_VALUE)) && $acosh(1 / 0) == 1 / 0), "Math", {
acosh: function(x) {
return (x = +x) < 1 ? NaN : x > 94906265.62425156 ? Math.log(x) + Math.LN2 : log1p(x - 1 + sqrt(x - 1) * sqrt(x + 1));
}
});
}, function(module, exports, __webpack_require__) {
- var anObject = __webpack_require__(529), IE8_DOM_DEFINE = __webpack_require__(530), toPrimitive = __webpack_require__(532), dP = Object.defineProperty;
- exports.f = __webpack_require__(161) ? Object.defineProperty : function(O, P, Attributes) {
+ var anObject = __webpack_require__(531), IE8_DOM_DEFINE = __webpack_require__(532), toPrimitive = __webpack_require__(534), dP = Object.defineProperty;
+ exports.f = __webpack_require__(160) ? Object.defineProperty : function(O, P, Attributes) {
if (anObject(O), P = toPrimitive(P, !0), anObject(Attributes), IE8_DOM_DEFINE) try {
return dP(O, P, Attributes);
} catch (e) {}
@@ -32242,26 +32339,26 @@ var _bundleJs = []byte((((((((((`!function(modules) {
return "value" in Attributes && (O[P] = Attributes.value), O;
};
}, function(module, exports, __webpack_require__) {
- var isObject = __webpack_require__(160);
+ var isObject = __webpack_require__(159);
module.exports = function(it) {
if (!isObject(it)) throw TypeError(it + " is not an object!");
return it;
};
}, function(module, exports, __webpack_require__) {
- module.exports = !__webpack_require__(161) && !__webpack_require__(110)(function() {
- return 7 != Object.defineProperty(__webpack_require__(531)("div"), "a", {
+ module.exports = !__webpack_require__(160) && !__webpack_require__(110)(function() {
+ return 7 != Object.defineProperty(__webpack_require__(533)("div"), "a", {
get: function() {
return 7;
}
}).a;
});
}, function(module, exports, __webpack_require__) {
- var isObject = __webpack_require__(160), document = __webpack_require__(158).document, is = isObject(document) && isObject(document.createElement);
+ var isObject = __webpack_require__(159), document = __webpack_require__(157).document, is = isObject(document) && isObject(document.createElement);
module.exports = function(it) {
return is ? document.createElement(it) : {};
};
}, function(module, exports, __webpack_require__) {
- var isObject = __webpack_require__(160);
+ var isObject = __webpack_require__(159);
module.exports = function(it, S) {
if (!isObject(it)) return it;
var fn, val;
@@ -32280,8 +32377,8 @@ var _bundleJs = []byte((((((((((`!function(modules) {
};
};
}, function(module, exports, __webpack_require__) {
- var global = __webpack_require__(158), hide = __webpack_require__(244), has = __webpack_require__(535), SRC = __webpack_require__(536)("src"), $toString = Function.toString, TPL = ("" + $toString).split("toString");
- __webpack_require__(159).inspectSource = function(it) {
+ var global = __webpack_require__(157), hide = __webpack_require__(245), has = __webpack_require__(537), SRC = __webpack_require__(538)("src"), $toString = Function.toString, TPL = ("" + $toString).split("toString");
+ __webpack_require__(158).inspectSource = function(it) {
return $toString.call(it);
}, (module.exports = function(O, key, val, safe) {
var isFunction = "function" == typeof val;
@@ -32302,7 +32399,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
return "Symbol(".concat(void 0 === key ? "" : key, ")_", (++id + px).toString(36));
};
}, function(module, exports, __webpack_require__) {
- var aFunction = __webpack_require__(538);
+ var aFunction = __webpack_require__(540);
module.exports = function(fn, that, length) {
if (aFunction(fn), void 0 === that) return fn;
switch (length) {
@@ -32346,7 +32443,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}
});
}, function(module, exports, __webpack_require__) {
- var $export = __webpack_require__(15), sign = __webpack_require__(162);
+ var $export = __webpack_require__(15), sign = __webpack_require__(161);
$export($export.S, "Math", {
cbrt: function(x) {
return sign(x = +x) * Math.pow(Math.abs(x), 1 / 3);
@@ -32367,17 +32464,17 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}
});
}, function(module, exports, __webpack_require__) {
- var $export = __webpack_require__(15), $expm1 = __webpack_require__(163);
+ var $export = __webpack_require__(15), $expm1 = __webpack_require__(162);
$export($export.S + $export.F * ($expm1 != Math.expm1), "Math", {
expm1: $expm1
});
}, function(module, exports, __webpack_require__) {
var $export = __webpack_require__(15);
$export($export.S, "Math", {
- fround: __webpack_require__(546)
+ fround: __webpack_require__(548)
});
}, function(module, exports, __webpack_require__) {
- var sign = __webpack_require__(162), pow = Math.pow, EPSILON = pow(2, -52), EPSILON32 = pow(2, -23), MAX32 = pow(2, 127) * (2 - EPSILON32), MIN32 = pow(2, -126), roundTiesToEven = function(n) {
+ var sign = __webpack_require__(161), pow = Math.pow, EPSILON = pow(2, -52), EPSILON32 = pow(2, -23), MAX32 = pow(2, 127) * (2 - EPSILON32), MIN32 = pow(2, -126), roundTiesToEven = function(n) {
return n + 1 / EPSILON - 1 / EPSILON;
};
module.exports = Math.fround || function(x) {
@@ -32415,7 +32512,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}, function(module, exports, __webpack_require__) {
var $export = __webpack_require__(15);
$export($export.S, "Math", {
- log1p: __webpack_require__(245)
+ log1p: __webpack_require__(246)
});
}, function(module, exports, __webpack_require__) {
var $export = __webpack_require__(15);
@@ -32427,10 +32524,10 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}, function(module, exports, __webpack_require__) {
var $export = __webpack_require__(15);
$export($export.S, "Math", {
- sign: __webpack_require__(162)
+ sign: __webpack_require__(161)
});
}, function(module, exports, __webpack_require__) {
- var $export = __webpack_require__(15), expm1 = __webpack_require__(163), exp = Math.exp;
+ var $export = __webpack_require__(15), expm1 = __webpack_require__(162), exp = Math.exp;
$export($export.S + $export.F * __webpack_require__(110)(function() {
return -2e-17 != !Math.sinh(-2e-17);
}), "Math", {
@@ -32439,7 +32536,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}
});
}, function(module, exports, __webpack_require__) {
- var $export = __webpack_require__(15), expm1 = __webpack_require__(163), exp = Math.exp;
+ var $export = __webpack_require__(15), expm1 = __webpack_require__(162), exp = Math.exp;
$export($export.S, "Math", {
tanh: function(x) {
var a = expm1(x = +x), b = expm1(-x);
@@ -32454,10 +32551,10 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}
});
}, function(module, exports, __webpack_require__) {
- var memoizeCapped = __webpack_require__(557), reLeadingDot = /^\./, rePropName = /[^.[\]]+|\[(?:(-?\d+(?:\.\d+)?)|(["'])((?:(?!\2)[^\\]|\\.)*?)\2)\]|(?=(?:\.|\[\])(?:\.|\[\]|$))/g, reEscapeChar = /\\(\\)?/g, stringToPath = memoizeCapped(function(string) {
+ var memoizeCapped = __webpack_require__(559), rePropName = /[^.[\]]+|\[(?:(-?\d+(?:\.\d+)?)|(["'])((?:(?!\2)[^\\]|\\.)*?)\2)\]|(?=(?:\.|\[\])(?:\.|\[\]|$))/g, reEscapeChar = /\\(\\)?/g, stringToPath = memoizeCapped(function(string) {
var result = [];
- return reLeadingDot.test(string) && result.push(""), string.replace(rePropName, function(match, number, quote, string) {
- result.push(quote ? string.replace(reEscapeChar, "$1") : number || match);
+ return 46 === string.charCodeAt(0) && result.push(""), string.replace(rePropName, function(match, number, quote, subString) {
+ result.push(quote ? subString.replace(reEscapeChar, "$1") : number || match);
}), result;
});
module.exports = stringToPath;
@@ -32468,7 +32565,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}), cache = result.cache;
return result;
}
- var memoize = __webpack_require__(558), MAX_MEMOIZE_SIZE = 500;
+ var memoize = __webpack_require__(560), MAX_MEMOIZE_SIZE = 500;
module.exports = memoizeCapped;
}, function(module, exports, __webpack_require__) {
function memoize(func, resolver) {
@@ -32481,7 +32578,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
};
return memoized.cache = new (memoize.Cache || MapCache)(), memoized;
}
- var MapCache = __webpack_require__(167), FUNC_ERROR_TEXT = "Expected a function";
+ var MapCache = __webpack_require__(166), FUNC_ERROR_TEXT = "Expected a function";
memoize.Cache = MapCache, module.exports = memoize;
}, function(module, exports, __webpack_require__) {
function mapCacheClear() {
@@ -32491,7 +32588,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
string: new Hash()
};
}
- var Hash = __webpack_require__(560), ListCache = __webpack_require__(112), Map = __webpack_require__(169);
+ var Hash = __webpack_require__(562), ListCache = __webpack_require__(112), Map = __webpack_require__(168);
module.exports = mapCacheClear;
}, function(module, exports, __webpack_require__) {
function Hash(entries) {
@@ -32501,7 +32598,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
this.set(entry[0], entry[1]);
}
}
- var hashClear = __webpack_require__(561), hashDelete = __webpack_require__(566), hashGet = __webpack_require__(567), hashHas = __webpack_require__(568), hashSet = __webpack_require__(569);
+ var hashClear = __webpack_require__(563), hashDelete = __webpack_require__(568), hashGet = __webpack_require__(569), hashHas = __webpack_require__(570), hashSet = __webpack_require__(571);
Hash.prototype.clear = hashClear, Hash.prototype.delete = hashDelete, Hash.prototype.get = hashGet,
Hash.prototype.has = hashHas, Hash.prototype.set = hashSet, module.exports = Hash;
}, function(module, exports, __webpack_require__) {
@@ -32514,13 +32611,13 @@ var _bundleJs = []byte((((((((((`!function(modules) {
function baseIsNative(value) {
return !(!isObject(value) || isMasked(value)) && (isFunction(value) ? reIsNative : reIsHostCtor).test(toSource(value));
}
- var isFunction = __webpack_require__(8), isMasked = __webpack_require__(563), isObject = __webpack_require__(31), toSource = __webpack_require__(248), reRegExpChar = /[\\^$.*+?()[\]{}|]/g, reIsHostCtor = /^\[object .+?Constructor\]$/, funcProto = Function.prototype, objectProto = Object.prototype, funcToString = funcProto.toString, hasOwnProperty = objectProto.hasOwnProperty, reIsNative = RegExp("^" + funcToString.call(hasOwnProperty).replace(reRegExpChar, "\\$&").replace(/hasOwnProperty|(function).*?(?=\\\()| for .+?(?=\\\])/g, "$1.*?") + "$");
+ var isFunction = __webpack_require__(8), isMasked = __webpack_require__(565), isObject = __webpack_require__(31), toSource = __webpack_require__(249), reRegExpChar = /[\\^$.*+?()[\]{}|]/g, reIsHostCtor = /^\[object .+?Constructor\]$/, funcProto = Function.prototype, objectProto = Object.prototype, funcToString = funcProto.toString, hasOwnProperty = objectProto.hasOwnProperty, reIsNative = RegExp("^" + funcToString.call(hasOwnProperty).replace(reRegExpChar, "\\$&").replace(/hasOwnProperty|(function).*?(?=\\\()| for .+?(?=\\\])/g, "$1.*?") + "$");
module.exports = baseIsNative;
}, function(module, exports, __webpack_require__) {
function isMasked(func) {
return !!maskSrcKey && maskSrcKey in func;
}
- var coreJsData = __webpack_require__(564), maskSrcKey = function() {
+ var coreJsData = __webpack_require__(566), maskSrcKey = function() {
var uid = /[^.]+$/.exec(coreJsData && coreJsData.keys && coreJsData.keys.IE_PROTO || "");
return uid ? "Symbol(src)_1." + uid : "";
}();
@@ -32635,7 +32732,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
function toString(value) {
return null == value ? "" : baseToString(value);
}
- var baseToString = __webpack_require__(581);
+ var baseToString = __webpack_require__(583);
module.exports = toString;
}, function(module, exports, __webpack_require__) {
function baseToString(value) {
@@ -32645,7 +32742,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
var result = value + "";
return "0" == result && 1 / value == -INFINITY ? "-0" : result;
}
- var Symbol = __webpack_require__(77), arrayMap = __webpack_require__(115), isArray = __webpack_require__(12), isSymbol = __webpack_require__(62), INFINITY = 1 / 0, symbolProto = Symbol ? Symbol.prototype : void 0, symbolToString = symbolProto ? symbolProto.toString : void 0;
+ var Symbol = __webpack_require__(78), arrayMap = __webpack_require__(80), isArray = __webpack_require__(11), isSymbol = __webpack_require__(61), INFINITY = 1 / 0, symbolProto = Symbol ? Symbol.prototype : void 0, symbolToString = symbolProto ? symbolProto.toString : void 0;
module.exports = baseToString;
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
@@ -32675,7 +32772,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}
}), superClass && (Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass);
}
- var _class, _class2, _temp, __WEBPACK_IMPORTED_MODULE_0_react__ = __webpack_require__(0), __WEBPACK_IMPORTED_MODULE_0_react___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_react__), __WEBPACK_IMPORTED_MODULE_1_prop_types__ = __webpack_require__(1), __WEBPACK_IMPORTED_MODULE_1_prop_types___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_1_prop_types__), __WEBPACK_IMPORTED_MODULE_2_classnames__ = __webpack_require__(3), __WEBPACK_IMPORTED_MODULE_2_classnames___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_2_classnames__), __WEBPACK_IMPORTED_MODULE_3__util_PureRender__ = __webpack_require__(5), __WEBPACK_IMPORTED_MODULE_4__container_Surface__ = __webpack_require__(78), __WEBPACK_IMPORTED_MODULE_5__shape_Symbols__ = __webpack_require__(172), __WEBPACK_IMPORTED_MODULE_6__util_ReactUtils__ = __webpack_require__(4), _extends = Object.assign || function(target) {
+ var _class, _class2, _temp, __WEBPACK_IMPORTED_MODULE_0_react__ = __webpack_require__(0), __WEBPACK_IMPORTED_MODULE_0_react___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_react__), __WEBPACK_IMPORTED_MODULE_1_prop_types__ = __webpack_require__(1), __WEBPACK_IMPORTED_MODULE_1_prop_types___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_1_prop_types__), __WEBPACK_IMPORTED_MODULE_2_classnames__ = __webpack_require__(3), __WEBPACK_IMPORTED_MODULE_2_classnames___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_2_classnames__), __WEBPACK_IMPORTED_MODULE_3__util_PureRender__ = __webpack_require__(5), __WEBPACK_IMPORTED_MODULE_4__container_Surface__ = __webpack_require__(79), __WEBPACK_IMPORTED_MODULE_5__shape_Symbols__ = __webpack_require__(171), __WEBPACK_IMPORTED_MODULE_6__util_ReactUtils__ = __webpack_require__(4), _extends = Object.assign || function(target) {
for (var i = 1; i < arguments.length; i++) {
var source = arguments[i];
for (var key in source) Object.prototype.hasOwnProperty.call(source, key) && (target[key] = source[key]);
@@ -32812,7 +32909,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
__webpack_exports__.a = DefaultLegendContent;
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
- __webpack_require__(79), __webpack_require__(54), __webpack_require__(80);
+ __webpack_require__(81), __webpack_require__(53), __webpack_require__(82);
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
function Path() {
@@ -32865,7 +32962,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}, __webpack_exports__.a = path;
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
- __webpack_require__(54), __webpack_require__(586), __webpack_require__(587), __webpack_require__(80);
+ __webpack_require__(53), __webpack_require__(588), __webpack_require__(589), __webpack_require__(82);
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
__webpack_exports__.a = function(a, b) {
@@ -32878,14 +32975,14 @@ var _bundleJs = []byte((((((((((`!function(modules) {
};
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
- __webpack_require__(250), __webpack_require__(249), __webpack_require__(251);
+ __webpack_require__(251), __webpack_require__(250), __webpack_require__(252);
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
- __webpack_require__(79), __webpack_require__(253), __webpack_require__(54), __webpack_require__(175),
- __webpack_require__(252);
+ __webpack_require__(81), __webpack_require__(254), __webpack_require__(53), __webpack_require__(174),
+ __webpack_require__(253);
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
- var __WEBPACK_IMPORTED_MODULE_0_d3_path__ = __webpack_require__(79), __WEBPACK_IMPORTED_MODULE_1__symbol_circle__ = __webpack_require__(254), __WEBPACK_IMPORTED_MODULE_2__symbol_cross__ = __webpack_require__(255), __WEBPACK_IMPORTED_MODULE_3__symbol_diamond__ = __webpack_require__(256), __WEBPACK_IMPORTED_MODULE_4__symbol_star__ = __webpack_require__(257), __WEBPACK_IMPORTED_MODULE_5__symbol_square__ = __webpack_require__(258), __WEBPACK_IMPORTED_MODULE_6__symbol_triangle__ = __webpack_require__(259), __WEBPACK_IMPORTED_MODULE_7__symbol_wye__ = __webpack_require__(260), __WEBPACK_IMPORTED_MODULE_8__constant__ = __webpack_require__(54);
+ var __WEBPACK_IMPORTED_MODULE_0_d3_path__ = __webpack_require__(81), __WEBPACK_IMPORTED_MODULE_1__symbol_circle__ = __webpack_require__(255), __WEBPACK_IMPORTED_MODULE_2__symbol_cross__ = __webpack_require__(256), __WEBPACK_IMPORTED_MODULE_3__symbol_diamond__ = __webpack_require__(257), __WEBPACK_IMPORTED_MODULE_4__symbol_star__ = __webpack_require__(258), __WEBPACK_IMPORTED_MODULE_5__symbol_square__ = __webpack_require__(259), __WEBPACK_IMPORTED_MODULE_6__symbol_triangle__ = __webpack_require__(260), __WEBPACK_IMPORTED_MODULE_7__symbol_wye__ = __webpack_require__(261), __WEBPACK_IMPORTED_MODULE_8__constant__ = __webpack_require__(53);
__WEBPACK_IMPORTED_MODULE_1__symbol_circle__.a, __WEBPACK_IMPORTED_MODULE_2__symbol_cross__.a,
__WEBPACK_IMPORTED_MODULE_3__symbol_diamond__.a, __WEBPACK_IMPORTED_MODULE_5__symbol_square__.a,
__WEBPACK_IMPORTED_MODULE_4__symbol_star__.a, __WEBPACK_IMPORTED_MODULE_6__symbol_triangle__.a,
@@ -32913,7 +33010,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
function BasisClosed(context) {
this._context = context;
}
- var __WEBPACK_IMPORTED_MODULE_0__noop__ = __webpack_require__(119), __WEBPACK_IMPORTED_MODULE_1__basis__ = __webpack_require__(120);
+ var __WEBPACK_IMPORTED_MODULE_0__noop__ = __webpack_require__(118), __WEBPACK_IMPORTED_MODULE_1__basis__ = __webpack_require__(119);
BasisClosed.prototype = {
areaStart: __WEBPACK_IMPORTED_MODULE_0__noop__.a,
areaEnd: __WEBPACK_IMPORTED_MODULE_0__noop__.a,
@@ -32964,7 +33061,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
function BasisOpen(context) {
this._context = context;
}
- var __WEBPACK_IMPORTED_MODULE_0__basis__ = __webpack_require__(120);
+ var __WEBPACK_IMPORTED_MODULE_0__basis__ = __webpack_require__(119);
BasisOpen.prototype = {
areaStart: function() {
this._line = 0;
@@ -33011,7 +33108,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
function Bundle(context, beta) {
this._basis = new __WEBPACK_IMPORTED_MODULE_0__basis__.a(context), this._beta = beta;
}
- var __WEBPACK_IMPORTED_MODULE_0__basis__ = __webpack_require__(120);
+ var __WEBPACK_IMPORTED_MODULE_0__basis__ = __webpack_require__(119);
Bundle.prototype = {
lineStart: function() {
this._x = [], this._y = [], this._basis.lineStart();
@@ -33039,7 +33136,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
function CatmullRomClosed(context, alpha) {
this._context = context, this._alpha = alpha;
}
- var __WEBPACK_IMPORTED_MODULE_0__cardinalClosed__ = __webpack_require__(261), __WEBPACK_IMPORTED_MODULE_1__noop__ = __webpack_require__(119), __WEBPACK_IMPORTED_MODULE_2__catmullRom__ = __webpack_require__(176);
+ var __WEBPACK_IMPORTED_MODULE_0__cardinalClosed__ = __webpack_require__(262), __WEBPACK_IMPORTED_MODULE_1__noop__ = __webpack_require__(118), __WEBPACK_IMPORTED_MODULE_2__catmullRom__ = __webpack_require__(175);
CatmullRomClosed.prototype = {
areaStart: __WEBPACK_IMPORTED_MODULE_1__noop__.a,
areaEnd: __WEBPACK_IMPORTED_MODULE_1__noop__.a,
@@ -33100,7 +33197,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
function CatmullRomOpen(context, alpha) {
this._context = context, this._alpha = alpha;
}
- var __WEBPACK_IMPORTED_MODULE_0__cardinalOpen__ = __webpack_require__(262), __WEBPACK_IMPORTED_MODULE_1__catmullRom__ = __webpack_require__(176);
+ var __WEBPACK_IMPORTED_MODULE_0__cardinalOpen__ = __webpack_require__(263), __WEBPACK_IMPORTED_MODULE_1__catmullRom__ = __webpack_require__(175);
CatmullRomOpen.prototype = {
areaStart: function() {
this._line = 0;
@@ -33157,7 +33254,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
function LinearClosed(context) {
this._context = context;
}
- var __WEBPACK_IMPORTED_MODULE_0__noop__ = __webpack_require__(119);
+ var __WEBPACK_IMPORTED_MODULE_0__noop__ = __webpack_require__(118);
LinearClosed.prototype = {
areaStart: __WEBPACK_IMPORTED_MODULE_0__noop__.a,
areaEnd: __WEBPACK_IMPORTED_MODULE_0__noop__.a,
@@ -33355,7 +33452,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
function stackValue(d, key) {
return d[key];
}
- var __WEBPACK_IMPORTED_MODULE_0__array__ = __webpack_require__(253), __WEBPACK_IMPORTED_MODULE_1__constant__ = __webpack_require__(54), __WEBPACK_IMPORTED_MODULE_2__offset_none__ = __webpack_require__(81), __WEBPACK_IMPORTED_MODULE_3__order_none__ = __webpack_require__(82);
+ var __WEBPACK_IMPORTED_MODULE_0__array__ = __webpack_require__(254), __WEBPACK_IMPORTED_MODULE_1__constant__ = __webpack_require__(53), __WEBPACK_IMPORTED_MODULE_2__offset_none__ = __webpack_require__(83), __WEBPACK_IMPORTED_MODULE_3__order_none__ = __webpack_require__(84);
__webpack_exports__.a = function() {
function stack(data) {
var i, oz, kz = keys.apply(this, arguments), m = data.length, n = kz.length, sz = new Array(n);
@@ -33384,7 +33481,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
};
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
- var __WEBPACK_IMPORTED_MODULE_0__none__ = __webpack_require__(81);
+ var __WEBPACK_IMPORTED_MODULE_0__none__ = __webpack_require__(83);
__webpack_exports__.a = function(series, order) {
if ((n = series.length) > 0) {
for (var i, n, y, j = 0, m = series[0].length; j < m; ++j) {
@@ -33398,7 +33495,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
"use strict";
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
- var __WEBPACK_IMPORTED_MODULE_0__none__ = __webpack_require__(81);
+ var __WEBPACK_IMPORTED_MODULE_0__none__ = __webpack_require__(83);
__webpack_exports__.a = function(series, order) {
if ((n = series.length) > 0) {
for (var n, j = 0, s0 = series[order[0]], m = s0.length; j < m; ++j) {
@@ -33410,7 +33507,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
};
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
- var __WEBPACK_IMPORTED_MODULE_0__none__ = __webpack_require__(81);
+ var __WEBPACK_IMPORTED_MODULE_0__none__ = __webpack_require__(83);
__webpack_exports__.a = function(series, order) {
if ((n = series.length) > 0 && (m = (s0 = series[order[0]]).length) > 0) {
for (var s0, m, n, y = 0, j = 1; j < m; ++j) {
@@ -33428,13 +33525,13 @@ var _bundleJs = []byte((((((((((`!function(modules) {
};
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
- __webpack_require__(177);
+ __webpack_require__(176);
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
- __webpack_require__(82), __webpack_require__(177);
+ __webpack_require__(84), __webpack_require__(176);
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
- __webpack_require__(82);
+ __webpack_require__(84);
}, function(module, exports, __webpack_require__) {
function baseIsEqualDeep(object, other, bitmask, customizer, equalFunc, stack) {
var objIsArr = isArray(object), othIsArr = isArray(other), objTag = objIsArr ? arrayTag : getTag(object), othTag = othIsArr ? arrayTag : getTag(other);
@@ -33454,7 +33551,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}
return !!isSameTag && (stack || (stack = new Stack()), equalObjects(object, other, bitmask, customizer, equalFunc, stack));
}
- var Stack = __webpack_require__(264), equalArrays = __webpack_require__(265), equalByTag = __webpack_require__(617), equalObjects = __webpack_require__(621), getTag = __webpack_require__(635), isArray = __webpack_require__(12), isBuffer = __webpack_require__(270), isTypedArray = __webpack_require__(271), COMPARE_PARTIAL_FLAG = 1, argsTag = "[object Arguments]", arrayTag = "[object Array]", objectTag = "[object Object]", objectProto = Object.prototype, hasOwnProperty = objectProto.hasOwnProperty;
+ var Stack = __webpack_require__(265), equalArrays = __webpack_require__(266), equalByTag = __webpack_require__(619), equalObjects = __webpack_require__(623), getTag = __webpack_require__(637), isArray = __webpack_require__(11), isBuffer = __webpack_require__(271), isTypedArray = __webpack_require__(273), COMPARE_PARTIAL_FLAG = 1, argsTag = "[object Arguments]", arrayTag = "[object Array]", objectTag = "[object Object]", objectProto = Object.prototype, hasOwnProperty = objectProto.hasOwnProperty;
module.exports = baseIsEqualDeep;
}, function(module, exports, __webpack_require__) {
function stackClear() {
@@ -33489,7 +33586,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}
return data.set(key, value), this.size = data.size, this;
}
- var ListCache = __webpack_require__(112), Map = __webpack_require__(169), MapCache = __webpack_require__(167), LARGE_ARRAY_SIZE = 200;
+ var ListCache = __webpack_require__(112), Map = __webpack_require__(168), MapCache = __webpack_require__(166), LARGE_ARRAY_SIZE = 200;
module.exports = stackSet;
}, function(module, exports) {
function setCacheAdd(value) {
@@ -33547,7 +33644,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}
return !1;
}
- var Symbol = __webpack_require__(77), Uint8Array = __webpack_require__(618), eq = __webpack_require__(168), equalArrays = __webpack_require__(265), mapToArray = __webpack_require__(619), setToArray = __webpack_require__(620), COMPARE_PARTIAL_FLAG = 1, COMPARE_UNORDERED_FLAG = 2, boolTag = "[object Boolean]", dateTag = "[object Date]", errorTag = "[object Error]", mapTag = "[object Map]", numberTag = "[object Number]", regexpTag = "[object RegExp]", setTag = "[object Set]", stringTag = "[object String]", symbolTag = "[object Symbol]", arrayBufferTag = "[object ArrayBuffer]", dataViewTag = "[object DataView]", symbolProto = Symbol ? Symbol.prototype : void 0, symbolValueOf = symbolProto ? symbolProto.valueOf : void 0;
+ var Symbol = __webpack_require__(78), Uint8Array = __webpack_require__(620), eq = __webpack_require__(167), equalArrays = __webpack_require__(266), mapToArray = __webpack_require__(621), setToArray = __webpack_require__(622), COMPARE_PARTIAL_FLAG = 1, COMPARE_UNORDERED_FLAG = 2, boolTag = "[object Boolean]", dateTag = "[object Date]", errorTag = "[object Error]", mapTag = "[object Map]", numberTag = "[object Number]", regexpTag = "[object RegExp]", setTag = "[object Set]", stringTag = "[object String]", symbolTag = "[object Symbol]", arrayBufferTag = "[object ArrayBuffer]", dataViewTag = "[object DataView]", symbolProto = Symbol ? Symbol.prototype : void 0, symbolValueOf = symbolProto ? symbolProto.valueOf : void 0;
module.exports = equalByTag;
}, function(module, exports, __webpack_require__) {
var root = __webpack_require__(32), Uint8Array = root.Uint8Array;
@@ -33596,23 +33693,23 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}
return stack.delete(object), stack.delete(other), result;
}
- var getAllKeys = __webpack_require__(622), COMPARE_PARTIAL_FLAG = 1, objectProto = Object.prototype, hasOwnProperty = objectProto.hasOwnProperty;
+ var getAllKeys = __webpack_require__(624), COMPARE_PARTIAL_FLAG = 1, objectProto = Object.prototype, hasOwnProperty = objectProto.hasOwnProperty;
module.exports = equalObjects;
}, function(module, exports, __webpack_require__) {
function getAllKeys(object) {
return baseGetAllKeys(object, keys, getSymbols);
}
- var baseGetAllKeys = __webpack_require__(623), getSymbols = __webpack_require__(624), keys = __webpack_require__(179);
+ var baseGetAllKeys = __webpack_require__(625), getSymbols = __webpack_require__(626), keys = __webpack_require__(178);
module.exports = getAllKeys;
}, function(module, exports, __webpack_require__) {
function baseGetAllKeys(object, keysFunc, symbolsFunc) {
var result = keysFunc(object);
return isArray(object) ? result : arrayPush(result, symbolsFunc(object));
}
- var arrayPush = __webpack_require__(268), isArray = __webpack_require__(12);
+ var arrayPush = __webpack_require__(269), isArray = __webpack_require__(11);
module.exports = baseGetAllKeys;
}, function(module, exports, __webpack_require__) {
- var arrayFilter = __webpack_require__(269), stubArray = __webpack_require__(625), objectProto = Object.prototype, propertyIsEnumerable = objectProto.propertyIsEnumerable, nativeGetSymbols = Object.getOwnPropertySymbols, getSymbols = nativeGetSymbols ? function(object) {
+ var arrayFilter = __webpack_require__(270), stubArray = __webpack_require__(627), objectProto = Object.prototype, propertyIsEnumerable = objectProto.propertyIsEnumerable, nativeGetSymbols = Object.getOwnPropertySymbols, getSymbols = nativeGetSymbols ? function(object) {
return null == object ? [] : (object = Object(object), arrayFilter(nativeGetSymbols(object), function(symbol) {
return propertyIsEnumerable.call(object, symbol);
}));
@@ -33629,7 +33726,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
for (var key in value) !inherited && !hasOwnProperty.call(value, key) || skipIndexes && ("length" == key || isBuff && ("offset" == key || "parent" == key) || isType && ("buffer" == key || "byteLength" == key || "byteOffset" == key) || isIndex(key, length)) || result.push(key);
return result;
}
- var baseTimes = __webpack_require__(627), isArguments = __webpack_require__(180), isArray = __webpack_require__(12), isBuffer = __webpack_require__(270), isIndex = __webpack_require__(181), isTypedArray = __webpack_require__(271), objectProto = Object.prototype, hasOwnProperty = objectProto.hasOwnProperty;
+ var baseTimes = __webpack_require__(629), isArguments = __webpack_require__(179), isArray = __webpack_require__(11), isBuffer = __webpack_require__(271), isIndex = __webpack_require__(180), isTypedArray = __webpack_require__(273), objectProto = Object.prototype, hasOwnProperty = objectProto.hasOwnProperty;
module.exports = arrayLikeKeys;
}, function(module, exports) {
function baseTimes(n, iteratee) {
@@ -33641,7 +33738,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
function baseIsArguments(value) {
return isObjectLike(value) && baseGetTag(value) == argsTag;
}
- var baseGetTag = __webpack_require__(42), isObjectLike = __webpack_require__(36), argsTag = "[object Arguments]";
+ var baseGetTag = __webpack_require__(41), isObjectLike = __webpack_require__(36), argsTag = "[object Arguments]";
module.exports = baseIsArguments;
}, function(module, exports) {
function stubFalse() {
@@ -33652,19 +33749,19 @@ var _bundleJs = []byte((((((((((`!function(modules) {
function baseIsTypedArray(value) {
return isObjectLike(value) && isLength(value.length) && !!typedArrayTags[baseGetTag(value)];
}
- var baseGetTag = __webpack_require__(42), isLength = __webpack_require__(182), isObjectLike = __webpack_require__(36), typedArrayTags = {};
+ var baseGetTag = __webpack_require__(41), isLength = __webpack_require__(181), isObjectLike = __webpack_require__(36), typedArrayTags = {};
typedArrayTags["[object Float32Array]"] = typedArrayTags["[object Float64Array]"] = typedArrayTags["[object Int8Array]"] = typedArrayTags["[object Int16Array]"] = typedArrayTags["[object Int32Array]"] = typedArrayTags["[object Uint8Array]"] = typedArrayTags["[object Uint8ClampedArray]"] = typedArrayTags["[object Uint16Array]"] = typedArrayTags["[object Uint32Array]"] = !0,
typedArrayTags["[object Arguments]"] = typedArrayTags["[object Array]"] = typedArrayTags["[object ArrayBuffer]"] = typedArrayTags["[object Boolean]"] = typedArrayTags["[object DataView]"] = typedArrayTags["[object Date]"] = typedArrayTags["[object Error]"] = typedArrayTags["[object Function]"] = typedArrayTags["[object Map]"] = typedArrayTags["[object Number]"] = typedArrayTags["[object Object]"] = typedArrayTags["[object RegExp]"] = typedArrayTags["[object Set]"] = typedArrayTags["[object String]"] = typedArrayTags["[object WeakMap]"] = !1,
module.exports = baseIsTypedArray;
}, function(module, exports, __webpack_require__) {
(function(module) {
- var freeGlobal = __webpack_require__(242), freeExports = "object" == typeof exports && exports && !exports.nodeType && exports, freeModule = freeExports && "object" == typeof module && module && !module.nodeType && module, moduleExports = freeModule && freeModule.exports === freeExports, freeProcess = moduleExports && freeGlobal.process, nodeUtil = function() {
+ var freeGlobal = __webpack_require__(243), freeExports = "object" == typeof exports && exports && !exports.nodeType && exports, freeModule = freeExports && "object" == typeof module && module && !module.nodeType && module, moduleExports = freeModule && freeModule.exports === freeExports, freeProcess = moduleExports && freeGlobal.process, nodeUtil = function() {
try {
return freeProcess && freeProcess.binding && freeProcess.binding("util");
} catch (e) {}
}();
module.exports = nodeUtil;
- }).call(exports, __webpack_require__(154)(module));
+ }).call(exports, __webpack_require__(272)(module));
}, function(module, exports, __webpack_require__) {
function baseKeys(object) {
if (!isPrototype(object)) return nativeKeys(object);
@@ -33672,7 +33769,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
for (var key in Object(object)) hasOwnProperty.call(object, key) && "constructor" != key && result.push(key);
return result;
}
- var isPrototype = __webpack_require__(633), nativeKeys = __webpack_require__(634), objectProto = Object.prototype, hasOwnProperty = objectProto.hasOwnProperty;
+ var isPrototype = __webpack_require__(635), nativeKeys = __webpack_require__(636), objectProto = Object.prototype, hasOwnProperty = objectProto.hasOwnProperty;
module.exports = baseKeys;
}, function(module, exports) {
function isPrototype(value) {
@@ -33682,10 +33779,10 @@ var _bundleJs = []byte((((((((((`!function(modules) {
var objectProto = Object.prototype;
module.exports = isPrototype;
}, function(module, exports, __webpack_require__) {
- var overArg = __webpack_require__(272), nativeKeys = overArg(Object.keys, Object);
+ var overArg = __webpack_require__(274), nativeKeys = overArg(Object.keys, Object);
module.exports = nativeKeys;
}, function(module, exports, __webpack_require__) {
- var DataView = __webpack_require__(636), Map = __webpack_require__(169), Promise = __webpack_require__(637), Set = __webpack_require__(638), WeakMap = __webpack_require__(639), baseGetTag = __webpack_require__(42), toSource = __webpack_require__(248), dataViewCtorString = toSource(DataView), mapCtorString = toSource(Map), promiseCtorString = toSource(Promise), setCtorString = toSource(Set), weakMapCtorString = toSource(WeakMap), getTag = baseGetTag;
+ var DataView = __webpack_require__(638), Map = __webpack_require__(168), Promise = __webpack_require__(639), Set = __webpack_require__(640), WeakMap = __webpack_require__(641), baseGetTag = __webpack_require__(41), toSource = __webpack_require__(249), dataViewCtorString = toSource(DataView), mapCtorString = toSource(Map), promiseCtorString = toSource(Promise), setCtorString = toSource(Set), weakMapCtorString = toSource(WeakMap), getTag = baseGetTag;
(DataView && "[object DataView]" != getTag(new DataView(new ArrayBuffer(1))) || Map && "[object Map]" != getTag(new Map()) || Promise && "[object Promise]" != getTag(Promise.resolve()) || Set && "[object Set]" != getTag(new Set()) || WeakMap && "[object WeakMap]" != getTag(new WeakMap())) && (getTag = function(value) {
var result = baseGetTag(value), Ctor = "[object Object]" == result ? value.constructor : void 0, ctorString = Ctor ? toSource(Ctor) : "";
if (ctorString) switch (ctorString) {
@@ -33707,16 +33804,16 @@ var _bundleJs = []byte((((((((((`!function(modules) {
return result;
}), module.exports = getTag;
}, function(module, exports, __webpack_require__) {
- var getNative = __webpack_require__(53), root = __webpack_require__(32), DataView = getNative(root, "DataView");
+ var getNative = __webpack_require__(52), root = __webpack_require__(32), DataView = getNative(root, "DataView");
module.exports = DataView;
}, function(module, exports, __webpack_require__) {
- var getNative = __webpack_require__(53), root = __webpack_require__(32), Promise = getNative(root, "Promise");
+ var getNative = __webpack_require__(52), root = __webpack_require__(32), Promise = getNative(root, "Promise");
module.exports = Promise;
}, function(module, exports, __webpack_require__) {
- var getNative = __webpack_require__(53), root = __webpack_require__(32), Set = getNative(root, "Set");
+ var getNative = __webpack_require__(52), root = __webpack_require__(32), Set = getNative(root, "Set");
module.exports = Set;
}, function(module, exports, __webpack_require__) {
- var getNative = __webpack_require__(53), root = __webpack_require__(32), WeakMap = getNative(root, "WeakMap");
+ var getNative = __webpack_require__(52), root = __webpack_require__(32), WeakMap = getNative(root, "WeakMap");
module.exports = WeakMap;
}, function(module, exports, __webpack_require__) {
"use strict";
@@ -33763,7 +33860,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
return obj && "function" == typeof Symbol && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj;
};
exports.default = createAnimateManager;
- var _setRafTimeout = __webpack_require__(641), _setRafTimeout2 = function(obj) {
+ var _setRafTimeout = __webpack_require__(643), _setRafTimeout2 = function(obj) {
return obj && obj.__esModule ? obj : {
default: obj
};
@@ -33780,7 +33877,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
}), exports.default = setRafTimeout;
- var _raf = __webpack_require__(273), _raf2 = function(obj) {
+ var _raf = __webpack_require__(275), _raf2 = function(obj) {
return obj && obj.__esModule ? obj : {
default: obj
};
@@ -33838,17 +33935,17 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
}), exports.shallowEqual = void 0;
- var _isPlainObject2 = __webpack_require__(274), _isPlainObject3 = _interopRequireDefault(_isPlainObject2), _isEqual2 = __webpack_require__(34), _isEqual3 = _interopRequireDefault(_isEqual2), _isArray2 = __webpack_require__(12), _isArray3 = _interopRequireDefault(_isArray2), _typeof = "function" == typeof Symbol && "symbol" == typeof Symbol.iterator ? function(obj) {
+ var _isPlainObject2 = __webpack_require__(276), _isPlainObject3 = _interopRequireDefault(_isPlainObject2), _isEqual2 = __webpack_require__(34), _isEqual3 = _interopRequireDefault(_isEqual2), _isArray2 = __webpack_require__(11), _isArray3 = _interopRequireDefault(_isArray2), _typeof = "function" == typeof Symbol && "symbol" == typeof Symbol.iterator ? function(obj) {
return typeof obj;
} : function(obj) {
return obj && "function" == typeof Symbol && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj;
};
exports.shallowEqual = shallowEqual, exports.default = pureRenderDecorator;
}, function(module, exports, __webpack_require__) {
- var overArg = __webpack_require__(272), getPrototype = overArg(Object.getPrototypeOf, Object);
+ var overArg = __webpack_require__(274), getPrototype = overArg(Object.getPrototypeOf, Object);
module.exports = getPrototype;
}, function(module, exports, __webpack_require__) {
- var arrayMap = __webpack_require__(115), baseIntersection = __webpack_require__(646), baseRest = __webpack_require__(276), castArrayLikeObject = __webpack_require__(660), intersection = baseRest(function(arrays) {
+ var arrayMap = __webpack_require__(80), baseIntersection = __webpack_require__(648), baseRest = __webpack_require__(278), castArrayLikeObject = __webpack_require__(662), intersection = baseRest(function(arrays) {
var mapped = arrayMap(arrays, castArrayLikeObject);
return mapped.length && mapped[0] === arrays[0] ? baseIntersection(mapped) : [];
});
@@ -33874,19 +33971,19 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}
return result;
}
- var SetCache = __webpack_require__(266), arrayIncludes = __webpack_require__(647), arrayIncludesWith = __webpack_require__(652), arrayMap = __webpack_require__(115), baseUnary = __webpack_require__(183), cacheHas = __webpack_require__(267), nativeMin = Math.min;
+ var SetCache = __webpack_require__(267), arrayIncludes = __webpack_require__(649), arrayIncludesWith = __webpack_require__(654), arrayMap = __webpack_require__(80), baseUnary = __webpack_require__(182), cacheHas = __webpack_require__(268), nativeMin = Math.min;
module.exports = baseIntersection;
}, function(module, exports, __webpack_require__) {
function arrayIncludes(array, value) {
return !!(null == array ? 0 : array.length) && baseIndexOf(array, value, 0) > -1;
}
- var baseIndexOf = __webpack_require__(648);
+ var baseIndexOf = __webpack_require__(650);
module.exports = arrayIncludes;
}, function(module, exports, __webpack_require__) {
function baseIndexOf(array, value, fromIndex) {
return value === value ? strictIndexOf(array, value, fromIndex) : baseFindIndex(array, baseIsNaN, fromIndex);
}
- var baseFindIndex = __webpack_require__(649), baseIsNaN = __webpack_require__(650), strictIndexOf = __webpack_require__(651);
+ var baseFindIndex = __webpack_require__(651), baseIsNaN = __webpack_require__(652), strictIndexOf = __webpack_require__(653);
module.exports = baseIndexOf;
}, function(module, exports) {
function baseFindIndex(array, predicate, fromIndex, fromRight) {
@@ -33920,7 +34017,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
return otherArgs[start] = transform(array), apply(func, this, otherArgs);
};
}
- var apply = __webpack_require__(654), nativeMax = Math.max;
+ var apply = __webpack_require__(656), nativeMax = Math.max;
module.exports = overRest;
}, function(module, exports) {
function apply(func, thisArg, args) {
@@ -33941,10 +34038,10 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}
module.exports = apply;
}, function(module, exports, __webpack_require__) {
- var baseSetToString = __webpack_require__(656), shortOut = __webpack_require__(659), setToString = shortOut(baseSetToString);
+ var baseSetToString = __webpack_require__(658), shortOut = __webpack_require__(661), setToString = shortOut(baseSetToString);
module.exports = setToString;
}, function(module, exports, __webpack_require__) {
- var constant = __webpack_require__(657), defineProperty = __webpack_require__(658), identity = __webpack_require__(63), baseSetToString = defineProperty ? function(func, string) {
+ var constant = __webpack_require__(659), defineProperty = __webpack_require__(660), identity = __webpack_require__(62), baseSetToString = defineProperty ? function(func, string) {
return defineProperty(func, "toString", {
configurable: !0,
enumerable: !1,
@@ -33961,7 +34058,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}
module.exports = constant;
}, function(module, exports, __webpack_require__) {
- var getNative = __webpack_require__(53), defineProperty = function() {
+ var getNative = __webpack_require__(52), defineProperty = function() {
try {
var func = getNative(Object, "defineProperty");
return func({}, "", {}), func;
@@ -33985,13 +34082,13 @@ var _bundleJs = []byte((((((((((`!function(modules) {
function castArrayLikeObject(value) {
return isArrayLikeObject(value) ? value : [];
}
- var isArrayLikeObject = __webpack_require__(661);
+ var isArrayLikeObject = __webpack_require__(663);
module.exports = castArrayLikeObject;
}, function(module, exports, __webpack_require__) {
function isArrayLikeObject(value) {
return isObjectLike(value) && isArrayLike(value);
}
- var isArrayLike = __webpack_require__(83), isObjectLike = __webpack_require__(36);
+ var isArrayLike = __webpack_require__(85), isObjectLike = __webpack_require__(36);
module.exports = isArrayLikeObject;
}, function(module, exports, __webpack_require__) {
"use strict";
@@ -34018,7 +34115,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
});
- var _filter2 = __webpack_require__(663), _filter3 = _interopRequireDefault(_filter2), _extends = Object.assign || function(target) {
+ var _filter2 = __webpack_require__(665), _filter3 = _interopRequireDefault(_filter2), _extends = Object.assign || function(target) {
for (var i = 1; i < arguments.length; i++) {
var source = arguments[i];
for (var key in source) Object.prototype.hasOwnProperty.call(source, key) && (target[key] = source[key]);
@@ -34046,7 +34143,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
if (Symbol.iterator in Object(arr)) return sliceIterator(arr, i);
throw new TypeError("Invalid attempt to destructure non-iterable instance");
};
- }(), _raf = __webpack_require__(273), _raf2 = _interopRequireDefault(_raf), _util = __webpack_require__(123), alpha = function(begin, end, k) {
+ }(), _raf = __webpack_require__(275), _raf2 = _interopRequireDefault(_raf), _util = __webpack_require__(122), alpha = function(begin, end, k) {
return begin + (end - begin) * k;
}, needContinue = function(_ref) {
return _ref.from !== _ref.to;
@@ -34112,7 +34209,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
function filter(collection, predicate) {
return (isArray(collection) ? arrayFilter : baseFilter)(collection, baseIteratee(predicate, 3));
}
- var arrayFilter = __webpack_require__(269), baseFilter = __webpack_require__(664), baseIteratee = __webpack_require__(84), isArray = __webpack_require__(12);
+ var arrayFilter = __webpack_require__(270), baseFilter = __webpack_require__(666), baseIteratee = __webpack_require__(63), isArray = __webpack_require__(11);
module.exports = filter;
}, function(module, exports, __webpack_require__) {
function baseFilter(collection, predicate) {
@@ -34121,16 +34218,16 @@ var _bundleJs = []byte((((((((((`!function(modules) {
predicate(value, index, collection) && result.push(value);
}), result;
}
- var baseEach = __webpack_require__(277);
+ var baseEach = __webpack_require__(279);
module.exports = baseFilter;
}, function(module, exports, __webpack_require__) {
function baseForOwn(object, iteratee) {
return object && baseFor(object, iteratee, keys);
}
- var baseFor = __webpack_require__(666), keys = __webpack_require__(179);
+ var baseFor = __webpack_require__(668), keys = __webpack_require__(178);
module.exports = baseForOwn;
}, function(module, exports, __webpack_require__) {
- var createBaseFor = __webpack_require__(667), baseFor = createBaseFor();
+ var createBaseFor = __webpack_require__(669), baseFor = createBaseFor();
module.exports = baseFor;
}, function(module, exports) {
function createBaseFor(fromRight) {
@@ -34152,7 +34249,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
return collection;
};
}
- var isArrayLike = __webpack_require__(83);
+ var isArrayLike = __webpack_require__(85);
module.exports = createBaseEach;
}, function(module, exports, __webpack_require__) {
function baseMatches(source) {
@@ -34161,7 +34258,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
return object === source || baseIsMatch(object, source, matchData);
};
}
- var baseIsMatch = __webpack_require__(670), getMatchData = __webpack_require__(671), matchesStrictComparable = __webpack_require__(279);
+ var baseIsMatch = __webpack_require__(672), getMatchData = __webpack_require__(673), matchesStrictComparable = __webpack_require__(281);
module.exports = baseMatches;
}, function(module, exports, __webpack_require__) {
function baseIsMatch(object, source, matchData, customizer) {
@@ -34184,7 +34281,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}
return !0;
}
- var Stack = __webpack_require__(264), baseIsEqual = __webpack_require__(178), COMPARE_PARTIAL_FLAG = 1, COMPARE_UNORDERED_FLAG = 2;
+ var Stack = __webpack_require__(265), baseIsEqual = __webpack_require__(177), COMPARE_PARTIAL_FLAG = 1, COMPARE_UNORDERED_FLAG = 2;
module.exports = baseIsMatch;
}, function(module, exports, __webpack_require__) {
function getMatchData(object) {
@@ -34194,7 +34291,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}
return result;
}
- var isStrictComparable = __webpack_require__(278), keys = __webpack_require__(179);
+ var isStrictComparable = __webpack_require__(280), keys = __webpack_require__(178);
module.exports = getMatchData;
}, function(module, exports, __webpack_require__) {
function baseMatchesProperty(path, srcValue) {
@@ -34203,13 +34300,13 @@ var _bundleJs = []byte((((((((((`!function(modules) {
return void 0 === objValue && objValue === srcValue ? hasIn(object, path) : baseIsEqual(srcValue, objValue, COMPARE_PARTIAL_FLAG | COMPARE_UNORDERED_FLAG);
};
}
- var baseIsEqual = __webpack_require__(178), get = __webpack_require__(165), hasIn = __webpack_require__(673), isKey = __webpack_require__(166), isStrictComparable = __webpack_require__(278), matchesStrictComparable = __webpack_require__(279), toKey = __webpack_require__(116), COMPARE_PARTIAL_FLAG = 1, COMPARE_UNORDERED_FLAG = 2;
+ var baseIsEqual = __webpack_require__(177), get = __webpack_require__(164), hasIn = __webpack_require__(675), isKey = __webpack_require__(165), isStrictComparable = __webpack_require__(280), matchesStrictComparable = __webpack_require__(281), toKey = __webpack_require__(115), COMPARE_PARTIAL_FLAG = 1, COMPARE_UNORDERED_FLAG = 2;
module.exports = baseMatchesProperty;
}, function(module, exports, __webpack_require__) {
function hasIn(object, path) {
return null != object && hasPath(object, path, baseHasIn);
}
- var baseHasIn = __webpack_require__(674), hasPath = __webpack_require__(675);
+ var baseHasIn = __webpack_require__(676), hasPath = __webpack_require__(677);
module.exports = hasIn;
}, function(module, exports) {
function baseHasIn(object, key) {
@@ -34226,13 +34323,13 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}
return result || ++index != length ? result : !!(length = null == object ? 0 : object.length) && isLength(length) && isIndex(key, length) && (isArray(object) || isArguments(object));
}
- var castPath = __webpack_require__(247), isArguments = __webpack_require__(180), isArray = __webpack_require__(12), isIndex = __webpack_require__(181), isLength = __webpack_require__(182), toKey = __webpack_require__(116);
+ var castPath = __webpack_require__(248), isArguments = __webpack_require__(179), isArray = __webpack_require__(11), isIndex = __webpack_require__(180), isLength = __webpack_require__(181), toKey = __webpack_require__(115);
module.exports = hasPath;
}, function(module, exports, __webpack_require__) {
function property(path) {
return isKey(path) ? baseProperty(toKey(path)) : basePropertyDeep(path);
}
- var baseProperty = __webpack_require__(677), basePropertyDeep = __webpack_require__(678), isKey = __webpack_require__(166), toKey = __webpack_require__(116);
+ var baseProperty = __webpack_require__(679), basePropertyDeep = __webpack_require__(680), isKey = __webpack_require__(165), toKey = __webpack_require__(115);
module.exports = property;
}, function(module, exports) {
function baseProperty(key) {
@@ -34247,7 +34344,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
return baseGet(object, path);
};
}
- var baseGet = __webpack_require__(246);
+ var baseGet = __webpack_require__(247);
module.exports = basePropertyDeep;
}, function(module, exports, __webpack_require__) {
"use strict";
@@ -34289,7 +34386,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
return protoProps && defineProperties(Constructor.prototype, protoProps), staticProps && defineProperties(Constructor, staticProps),
Constructor;
};
- }(), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _TransitionGroup = __webpack_require__(236), _TransitionGroup2 = _interopRequireDefault(_TransitionGroup), _propTypes = __webpack_require__(1), _propTypes2 = _interopRequireDefault(_propTypes), _AnimateGroupChild = __webpack_require__(680), _AnimateGroupChild2 = _interopRequireDefault(_AnimateGroupChild), AnimateGroup = (_temp = _class = function(_Component) {
+ }(), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _TransitionGroup = __webpack_require__(237), _TransitionGroup2 = _interopRequireDefault(_TransitionGroup), _propTypes = __webpack_require__(1), _propTypes2 = _interopRequireDefault(_propTypes), _AnimateGroupChild = __webpack_require__(682), _AnimateGroupChild2 = _interopRequireDefault(_AnimateGroupChild), AnimateGroup = (_temp = _class = function(_Component) {
function AnimateGroup() {
return _classCallCheck(this, AnimateGroup), _possibleConstructorReturn(this, (AnimateGroup.__proto__ || Object.getPrototypeOf(AnimateGroup)).apply(this, arguments));
}
@@ -34352,7 +34449,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
});
- var _class, _temp2, _isNumber2 = __webpack_require__(170), _isNumber3 = _interopRequireDefault(_isNumber2), _extends = Object.assign || function(target) {
+ var _class, _temp2, _isNumber2 = __webpack_require__(169), _isNumber3 = _interopRequireDefault(_isNumber2), _extends = Object.assign || function(target) {
for (var i = 1; i < arguments.length; i++) {
var source = arguments[i];
for (var key in source) Object.prototype.hasOwnProperty.call(source, key) && (target[key] = source[key]);
@@ -34370,7 +34467,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
return protoProps && defineProperties(Constructor.prototype, protoProps), staticProps && defineProperties(Constructor, staticProps),
Constructor;
};
- }(), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _Transition = __webpack_require__(108), _Transition2 = _interopRequireDefault(_Transition), _propTypes = __webpack_require__(1), _propTypes2 = _interopRequireDefault(_propTypes), _Animate = __webpack_require__(263), _Animate2 = _interopRequireDefault(_Animate), parseDurationOfSingleTransition = function() {
+ }(), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _Transition = __webpack_require__(155), _Transition2 = _interopRequireDefault(_Transition), _propTypes = __webpack_require__(1), _propTypes2 = _interopRequireDefault(_propTypes), _Animate = __webpack_require__(264), _Animate2 = _interopRequireDefault(_Animate), parseDurationOfSingleTransition = function() {
var options = arguments.length > 0 && void 0 !== arguments[0] ? arguments[0] : {}, steps = options.steps, duration = options.duration;
return steps && steps.length ? steps.reduce(function(result, entry) {
return result + ((0, _isNumber3.default)(entry.duration) && entry.duration > 0 ? entry.duration : 0);
@@ -34450,7 +34547,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}
}), superClass && (Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass);
}
- var _class, _class2, _temp, __WEBPACK_IMPORTED_MODULE_0_lodash_isArray__ = __webpack_require__(12), __WEBPACK_IMPORTED_MODULE_0_lodash_isArray___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_lodash_isArray__), __WEBPACK_IMPORTED_MODULE_1_react__ = __webpack_require__(0), __WEBPACK_IMPORTED_MODULE_1_react___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_1_react__), __WEBPACK_IMPORTED_MODULE_2_prop_types__ = __webpack_require__(1), __WEBPACK_IMPORTED_MODULE_2_prop_types___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_2_prop_types__), __WEBPACK_IMPORTED_MODULE_3__util_PureRender__ = __webpack_require__(5), __WEBPACK_IMPORTED_MODULE_4__util_DataUtils__ = __webpack_require__(9), _extends = Object.assign || function(target) {
+ var _class, _class2, _temp, __WEBPACK_IMPORTED_MODULE_0_lodash_isArray__ = __webpack_require__(11), __WEBPACK_IMPORTED_MODULE_0_lodash_isArray___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_lodash_isArray__), __WEBPACK_IMPORTED_MODULE_1_react__ = __webpack_require__(0), __WEBPACK_IMPORTED_MODULE_1_react___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_1_react__), __WEBPACK_IMPORTED_MODULE_2_prop_types__ = __webpack_require__(1), __WEBPACK_IMPORTED_MODULE_2_prop_types___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_2_prop_types__), __WEBPACK_IMPORTED_MODULE_3__util_PureRender__ = __webpack_require__(5), __WEBPACK_IMPORTED_MODULE_4__util_DataUtils__ = __webpack_require__(9), _extends = Object.assign || function(target) {
for (var i = 1; i < arguments.length; i++) {
var source = arguments[i];
for (var key in source) Object.prototype.hasOwnProperty.call(source, key) && (target[key] = source[key]);
@@ -34469,7 +34566,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Constructor;
};
}(), defaultFormatter = function(value) {
- return __WEBPACK_IMPORTED_MODULE_0_lodash_isArray___default()(value) && Object(__WEBPACK_IMPORTED_MODULE_4__util_DataUtils__.f)(value[0]) && Object(__WEBPACK_IMPORTED_MODULE_4__util_DataUtils__.f)(value[1]) ? value.join(" ~ ") : value;
+ return __WEBPACK_IMPORTED_MODULE_0_lodash_isArray___default()(value) && Object(__WEBPACK_IMPORTED_MODULE_4__util_DataUtils__.g)(value[0]) && Object(__WEBPACK_IMPORTED_MODULE_4__util_DataUtils__.g)(value[1]) ? value.join(" ~ ") : value;
}, DefaultTooltipContent = Object(__WEBPACK_IMPORTED_MODULE_3__util_PureRender__.a)((_temp = _class2 = function(_Component) {
function DefaultTooltipContent() {
return _classCallCheck(this, DefaultTooltipContent), _possibleConstructorReturn(this, (DefaultTooltipContent.__proto__ || Object.getPrototypeOf(DefaultTooltipContent)).apply(this, arguments));
@@ -34488,7 +34585,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
paddingTop: 4,
paddingBottom: 4,
color: entry.color || "#000"
- }, itemStyle), hasName = Object(__WEBPACK_IMPORTED_MODULE_4__util_DataUtils__.f)(entry.name), finalFormatter = entry.formatter || formatter || defaultFormatter;
+ }, itemStyle), hasName = Object(__WEBPACK_IMPORTED_MODULE_4__util_DataUtils__.g)(entry.name), finalFormatter = entry.formatter || formatter || defaultFormatter;
return __WEBPACK_IMPORTED_MODULE_1_react___default.a.createElement("li", {
className: "recharts-tooltip-item",
key: "tooltip-item-" + i,
@@ -34521,7 +34618,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
whiteSpace: "nowrap"
}, wrapperStyle), finalLabelStyle = _extends({
margin: 0
- }, labelStyle), hasLabel = Object(__WEBPACK_IMPORTED_MODULE_4__util_DataUtils__.f)(label), finalLabel = hasLabel ? label : "";
+ }, labelStyle), hasLabel = Object(__WEBPACK_IMPORTED_MODULE_4__util_DataUtils__.g)(label), finalLabel = hasLabel ? label : "";
return hasLabel && labelFormatter && (finalLabel = labelFormatter(label)), __WEBPACK_IMPORTED_MODULE_1_react___default.a.createElement("div", {
className: "recharts-default-tooltip",
style: finalStyle
@@ -34572,7 +34669,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}
}), superClass && (Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass);
}
- var _class, _temp, __WEBPACK_IMPORTED_MODULE_0_lodash_debounce__ = __webpack_require__(157), __WEBPACK_IMPORTED_MODULE_0_lodash_debounce___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_lodash_debounce__), __WEBPACK_IMPORTED_MODULE_1_react__ = __webpack_require__(0), __WEBPACK_IMPORTED_MODULE_1_react___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_1_react__), __WEBPACK_IMPORTED_MODULE_2_prop_types__ = __webpack_require__(1), __WEBPACK_IMPORTED_MODULE_2_prop_types___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_2_prop_types__), __WEBPACK_IMPORTED_MODULE_3_classnames__ = __webpack_require__(3), __WEBPACK_IMPORTED_MODULE_3_classnames___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_3_classnames__), __WEBPACK_IMPORTED_MODULE_4_react_resize_detector__ = __webpack_require__(683), __WEBPACK_IMPORTED_MODULE_4_react_resize_detector___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_4_react_resize_detector__), __WEBPACK_IMPORTED_MODULE_5__util_DataUtils__ = __webpack_require__(9), __WEBPACK_IMPORTED_MODULE_6__util_LogUtils__ = __webpack_require__(280), _createClass = function() {
+ var _class, _temp, __WEBPACK_IMPORTED_MODULE_0_lodash_debounce__ = __webpack_require__(156), __WEBPACK_IMPORTED_MODULE_0_lodash_debounce___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_lodash_debounce__), __WEBPACK_IMPORTED_MODULE_1_react__ = __webpack_require__(0), __WEBPACK_IMPORTED_MODULE_1_react___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_1_react__), __WEBPACK_IMPORTED_MODULE_2_prop_types__ = __webpack_require__(1), __WEBPACK_IMPORTED_MODULE_2_prop_types___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_2_prop_types__), __WEBPACK_IMPORTED_MODULE_3_classnames__ = __webpack_require__(3), __WEBPACK_IMPORTED_MODULE_3_classnames___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_3_classnames__), __WEBPACK_IMPORTED_MODULE_4_react_resize_detector__ = __webpack_require__(685), __WEBPACK_IMPORTED_MODULE_4_react_resize_detector___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_4_react_resize_detector__), __WEBPACK_IMPORTED_MODULE_5__util_DataUtils__ = __webpack_require__(9), __WEBPACK_IMPORTED_MODULE_6__util_LogUtils__ = __webpack_require__(282), _createClass = function() {
function defineProperties(target, props) {
for (var i = 0; i < props.length; i++) {
var descriptor = props[i];
@@ -34631,9 +34728,9 @@ var _bundleJs = []byte((((((((((`!function(modules) {
var _state = this.state, containerWidth = _state.containerWidth, containerHeight = _state.containerHeight;
if (containerWidth < 0 || containerHeight < 0) return null;
var _props = this.props, aspect = _props.aspect, width = _props.width, height = _props.height, minWidth = _props.minWidth, minHeight = _props.minHeight, maxHeight = _props.maxHeight, children = _props.children;
- Object(__WEBPACK_IMPORTED_MODULE_6__util_LogUtils__.a)(Object(__WEBPACK_IMPORTED_MODULE_5__util_DataUtils__.h)(width) || Object(__WEBPACK_IMPORTED_MODULE_5__util_DataUtils__.h)(height), "The width(%s) and height(%s) are both fixed numbers,\n maybe you don't need to use a ResponsiveContainer.", width, height),
+ Object(__WEBPACK_IMPORTED_MODULE_6__util_LogUtils__.a)(Object(__WEBPACK_IMPORTED_MODULE_5__util_DataUtils__.i)(width) || Object(__WEBPACK_IMPORTED_MODULE_5__util_DataUtils__.i)(height), "The width(%s) and height(%s) are both fixed numbers,\n maybe you don't need to use a ResponsiveContainer.", width, height),
Object(__WEBPACK_IMPORTED_MODULE_6__util_LogUtils__.a)(!aspect || aspect > 0, "The aspect(%s) must be greater than zero.", aspect);
- var calculatedWidth = Object(__WEBPACK_IMPORTED_MODULE_5__util_DataUtils__.h)(width) ? containerWidth : width, calculatedHeight = Object(__WEBPACK_IMPORTED_MODULE_5__util_DataUtils__.h)(height) ? containerHeight : height;
+ var calculatedWidth = Object(__WEBPACK_IMPORTED_MODULE_5__util_DataUtils__.i)(width) ? containerWidth : width, calculatedHeight = Object(__WEBPACK_IMPORTED_MODULE_5__util_DataUtils__.i)(height) ? containerHeight : height;
return aspect && aspect > 0 && (calculatedHeight = calculatedWidth / aspect, maxHeight && calculatedHeight > maxHeight && (calculatedHeight = maxHeight)),
Object(__WEBPACK_IMPORTED_MODULE_6__util_LogUtils__.a)(calculatedWidth > 0 || calculatedHeight > 0, "The width(%s) and height(%s) of chart should be greater than 0,\n please check the style of container, or the props width(%s) and height(%s),\n or add a minWidth(%s) or minHeight(%s) or use aspect(%s) to control the\n height and width.", calculatedWidth, calculatedHeight, width, height, minWidth, minHeight, aspect),
__WEBPACK_IMPORTED_MODULE_1_react___default.a.cloneElement(children, {
@@ -34688,7 +34785,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
});
- var _ResizeDetector = __webpack_require__(684), _ResizeDetector2 = function(obj) {
+ var _ResizeDetector = __webpack_require__(686), _ResizeDetector2 = function(obj) {
return obj && obj.__esModule ? obj : {
default: obj
};
@@ -34762,7 +34859,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
return protoProps && defineProperties(Constructor.prototype, protoProps), staticProps && defineProperties(Constructor, staticProps),
Constructor;
};
- }(), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _propTypes = __webpack_require__(1), _propTypes2 = _interopRequireDefault(_propTypes), _resizeDetectorStyles = __webpack_require__(685), ResizeDetector = function(_Component) {
+ }(), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _propTypes = __webpack_require__(1), _propTypes2 = _interopRequireDefault(_propTypes), _resizeDetectorStyles = __webpack_require__(687), ResizeDetector = function(_Component) {
function ResizeDetector() {
_classCallCheck(this, ResizeDetector);
var _this = _possibleConstructorReturn(this, (ResizeDetector.__proto__ || Object.getPrototypeOf(ResizeDetector)).call(this));
@@ -34939,35 +35036,8 @@ var _bundleJs = []byte((((((((((`!function(modules) {
uniqueLowerCaseUnits.push(matches[1].toLowerCase())), matches = unitRegEx.exec(expression));
return uniqueUnits;
}
- var stack, balanced = __webpack_require__(687), reduceFunctionCall = __webpack_require__(688), mexp = __webpack_require__(690), MAX_STACK = 100, NESTED_CALC_RE = /(\+|\-|\*|\\|[^a-z]|)(\s*)(\()/g;
+ var stack, balanced = __webpack_require__(283), reduceFunctionCall = __webpack_require__(689), mexp = __webpack_require__(690), MAX_STACK = 100, NESTED_CALC_RE = /(\+|\-|\*|\\|[^a-z]|)(\s*)(\()/g;
module.exports = reduceCSSCalc;
-}, function(module, exports) {
- function balanced(a, b, str) {
- a instanceof RegExp && (a = maybeMatch(a, str)), b instanceof RegExp && (b = maybeMatch(b, str));
- var r = range(a, b, str);
- return r && {
- start: r[0],
- end: r[1],
- pre: str.slice(0, r[0]),
- body: str.slice(r[0] + a.length, r[1]),
- post: str.slice(r[1] + b.length)
- };
- }
- function maybeMatch(reg, str) {
- var m = str.match(reg);
- return m ? m[0] : null;
- }
- function range(a, b, str) {
- var begs, beg, left, right, result, ai = str.indexOf(a), bi = str.indexOf(b, ai + 1), i = ai;
- if (ai >= 0 && bi > 0) {
- for (begs = [], left = str.length; i >= 0 && !result; ) i == ai ? (begs.push(i),
- ai = str.indexOf(a, i + 1)) : 1 == begs.length ? result = [ begs.pop(), bi ] : (beg = begs.pop(),
- beg < left && (left = beg, right = bi), bi = str.indexOf(b, i + 1)), i = ai < bi && ai >= 0 ? ai : bi;
- begs.length && (result = [ left, right ]);
- }
- return result;
- }
- module.exports = balanced, balanced.range = range;
}, function(module, exports, __webpack_require__) {
function reduceFunctionCall(string, functionRE, callback) {
var call = string;
@@ -34993,35 +35063,8 @@ var _bundleJs = []byte((((((((((`!function(modules) {
function evalFunctionCall(string, functionIdentifier, callback, call, functionRE) {
return callback(reduceFunctionCall(string, functionRE, callback), functionIdentifier, call);
}
- var balanced = __webpack_require__(689);
+ var balanced = __webpack_require__(283);
module.exports = reduceFunctionCall;
-}, function(module, exports) {
- function balanced(a, b, str) {
- a instanceof RegExp && (a = maybeMatch(a, str)), b instanceof RegExp && (b = maybeMatch(b, str));
- var r = range(a, b, str);
- return r && {
- start: r[0],
- end: r[1],
- pre: str.slice(0, r[0]),
- body: str.slice(r[0] + a.length, r[1]),
- post: str.slice(r[1] + b.length)
- };
- }
- function maybeMatch(reg, str) {
- var m = str.match(reg);
- return m ? m[0] : null;
- }
- function range(a, b, str) {
- var begs, beg, left, right, result, ai = str.indexOf(a), bi = str.indexOf(b, ai + 1), i = ai;
- if (ai >= 0 && bi > 0) {
- for (begs = [], left = str.length; i >= 0 && !result; ) i == ai ? (begs.push(i),
- ai = str.indexOf(a, i + 1)) : 1 == begs.length ? result = [ begs.pop(), bi ] : (beg = begs.pop(),
- beg < left && (left = beg, right = bi), bi = str.indexOf(b, i + 1)), i = ai < bi && ai >= 0 ? ai : bi;
- begs.length && (result = [ left, right ]);
- }
- return result;
- }
- module.exports = balanced, balanced.range = range;
}, function(module, exports, __webpack_require__) {
var Mexp = __webpack_require__(691);
Mexp.prototype.formulaEval = function() {
@@ -35413,21 +35456,10 @@ var _bundleJs = []byte((((((((((`!function(modules) {
this.message = message;
}, module.exports = Mexp;
}, function(module, exports, __webpack_require__) {
- function baseFlatten(array, depth, predicate, isStrict, result) {
- var index = -1, length = array.length;
- for (predicate || (predicate = isFlattenable), result || (result = []); ++index < length; ) {
- var value = array[index];
- depth > 0 && predicate(value) ? depth > 1 ? baseFlatten(value, depth - 1, predicate, isStrict, result) : arrayPush(result, value) : isStrict || (result[result.length] = value);
- }
- return result;
- }
- var arrayPush = __webpack_require__(268), isFlattenable = __webpack_require__(696);
- module.exports = baseFlatten;
-}, function(module, exports, __webpack_require__) {
function isFlattenable(value) {
return isArray(value) || isArguments(value) || !!(spreadableSymbol && value && value[spreadableSymbol]);
}
- var Symbol = __webpack_require__(77), isArguments = __webpack_require__(180), isArray = __webpack_require__(12), spreadableSymbol = Symbol ? Symbol.isConcatSpreadable : void 0;
+ var Symbol = __webpack_require__(78), isArguments = __webpack_require__(179), isArray = __webpack_require__(11), spreadableSymbol = Symbol ? Symbol.isConcatSpreadable : void 0;
module.exports = isFlattenable;
}, function(module, exports, __webpack_require__) {
function baseOrderBy(collection, iteratees, orders) {
@@ -35446,17 +35478,8 @@ var _bundleJs = []byte((((((((((`!function(modules) {
return compareMultiple(object, other, orders);
});
}
- var arrayMap = __webpack_require__(115), baseIteratee = __webpack_require__(84), baseMap = __webpack_require__(698), baseSortBy = __webpack_require__(699), baseUnary = __webpack_require__(183), compareMultiple = __webpack_require__(700), identity = __webpack_require__(63);
+ var arrayMap = __webpack_require__(80), baseIteratee = __webpack_require__(63), baseMap = __webpack_require__(286), baseSortBy = __webpack_require__(697), baseUnary = __webpack_require__(182), compareMultiple = __webpack_require__(698), identity = __webpack_require__(62);
module.exports = baseOrderBy;
-}, function(module, exports, __webpack_require__) {
- function baseMap(collection, iteratee) {
- var index = -1, result = isArrayLike(collection) ? Array(collection.length) : [];
- return baseEach(collection, function(value, key, collection) {
- result[++index] = iteratee(value, key, collection);
- }), result;
- }
- var baseEach = __webpack_require__(277), isArrayLike = __webpack_require__(83);
- module.exports = baseMap;
}, function(module, exports) {
function baseSortBy(array, comparer) {
var length = array.length;
@@ -35475,7 +35498,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}
return object.index - other.index;
}
- var compareAscending = __webpack_require__(701);
+ var compareAscending = __webpack_require__(699);
module.exports = compareMultiple;
}, function(module, exports, __webpack_require__) {
function compareAscending(value, other) {
@@ -35486,15 +35509,27 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}
return 0;
}
- var isSymbol = __webpack_require__(62);
+ var isSymbol = __webpack_require__(61);
module.exports = compareAscending;
}, function(module, exports, __webpack_require__) {
function max(array) {
return array && array.length ? baseExtremum(array, identity, baseGt) : void 0;
}
- var baseExtremum = __webpack_require__(124), baseGt = __webpack_require__(283), identity = __webpack_require__(63);
+ var baseExtremum = __webpack_require__(123), baseGt = __webpack_require__(288), identity = __webpack_require__(62);
module.exports = max;
}, function(module, exports, __webpack_require__) {
+ function flatMap(collection, iteratee) {
+ return baseFlatten(map(collection, iteratee), 1);
+ }
+ var baseFlatten = __webpack_require__(285), map = __webpack_require__(702);
+ module.exports = flatMap;
+}, function(module, exports, __webpack_require__) {
+ function map(collection, iteratee) {
+ return (isArray(collection) ? arrayMap : baseMap)(collection, baseIteratee(iteratee, 3));
+ }
+ var arrayMap = __webpack_require__(80), baseIteratee = __webpack_require__(63), baseMap = __webpack_require__(286), isArray = __webpack_require__(11);
+ module.exports = map;
+}, function(module, exports, __webpack_require__) {
"use strict";
Object.defineProperty(exports, "__esModule", {
value: !0
@@ -35607,7 +35642,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
if (Symbol.iterator in Object(arr)) return sliceIterator(arr, i);
throw new TypeError("Invalid attempt to destructure non-iterable instance");
};
- }(), _utils = __webpack_require__(286), _arithmetic = __webpack_require__(705), _arithmetic2 = function(obj) {
+ }(), _utils = __webpack_require__(291), _arithmetic = __webpack_require__(705), _arithmetic2 = function(obj) {
return obj && obj.__esModule ? obj : {
default: obj
};
@@ -35659,7 +35694,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
Object.defineProperty(exports, "__esModule", {
value: !0
});
- var _utils = __webpack_require__(286), interpolateNumber = (0, _utils.curry)(function(a, b, t) {
+ var _utils = __webpack_require__(291), interpolateNumber = (0, _utils.curry)(function(a, b, t) {
var newA = +a;
return newA + t * (+b - newA);
}), uninterpolateNumber = (0, _utils.curry)(function(a, b, x) {
@@ -35734,16 +35769,16 @@ var _bundleJs = []byte((((((((((`!function(modules) {
return pointish(band().paddingInner(1));
}
__webpack_exports__.a = band, __webpack_exports__.b = point;
- var __WEBPACK_IMPORTED_MODULE_0_d3_array__ = __webpack_require__(37), __WEBPACK_IMPORTED_MODULE_1__ordinal__ = __webpack_require__(300);
+ var __WEBPACK_IMPORTED_MODULE_0_d3_array__ = __webpack_require__(37), __WEBPACK_IMPORTED_MODULE_1__ordinal__ = __webpack_require__(305);
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
- __webpack_require__(290);
+ __webpack_require__(295);
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
- __webpack_require__(294), __webpack_require__(288), __webpack_require__(710), __webpack_require__(293),
- __webpack_require__(711), __webpack_require__(295), __webpack_require__(296), __webpack_require__(297);
+ __webpack_require__(299), __webpack_require__(293), __webpack_require__(710), __webpack_require__(298),
+ __webpack_require__(711), __webpack_require__(300), __webpack_require__(301), __webpack_require__(302);
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
__webpack_exports__.a = function(x) {
@@ -35758,18 +35793,18 @@ var _bundleJs = []byte((((((((((`!function(modules) {
};
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
- __webpack_require__(294), __webpack_require__(64), __webpack_require__(86), __webpack_require__(185);
+ __webpack_require__(299), __webpack_require__(64), __webpack_require__(87), __webpack_require__(184);
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
- __webpack_require__(291);
+ __webpack_require__(296);
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
- __webpack_require__(86);
+ __webpack_require__(87);
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
- __webpack_require__(64), __webpack_require__(86), __webpack_require__(185);
+ __webpack_require__(64), __webpack_require__(87), __webpack_require__(184);
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
}, function(module, __webpack_exports__, __webpack_require__) {
@@ -35783,18 +35818,18 @@ var _bundleJs = []byte((((((((((`!function(modules) {
"use strict";
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
- __webpack_require__(299);
+ __webpack_require__(304);
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
var __WEBPACK_IMPORTED_MODULE_2__src_map__ = (__webpack_require__(724), __webpack_require__(725),
- __webpack_require__(186));
+ __webpack_require__(185));
__webpack_require__.d(__webpack_exports__, "a", function() {
return __WEBPACK_IMPORTED_MODULE_2__src_map__.a;
});
__webpack_require__(726), __webpack_require__(727), __webpack_require__(728);
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
- __webpack_require__(186);
+ __webpack_require__(185);
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
function Set() {}
@@ -35808,7 +35843,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}
return set;
}
- var __WEBPACK_IMPORTED_MODULE_0__map__ = __webpack_require__(186), proto = __WEBPACK_IMPORTED_MODULE_0__map__.a.prototype;
+ var __WEBPACK_IMPORTED_MODULE_0__map__ = __webpack_require__(185), proto = __WEBPACK_IMPORTED_MODULE_0__map__.a.prototype;
Set.prototype = set.prototype = {
constructor: Set,
has: proto.has,
@@ -35844,7 +35879,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}, Object(__WEBPACK_IMPORTED_MODULE_1__linear__.b)(scale);
}
__webpack_exports__.a = identity;
- var __WEBPACK_IMPORTED_MODULE_0__array__ = __webpack_require__(56), __WEBPACK_IMPORTED_MODULE_1__linear__ = __webpack_require__(87), __WEBPACK_IMPORTED_MODULE_2__number__ = __webpack_require__(309);
+ var __WEBPACK_IMPORTED_MODULE_0__array__ = __webpack_require__(55), __WEBPACK_IMPORTED_MODULE_1__linear__ = __webpack_require__(88), __WEBPACK_IMPORTED_MODULE_2__number__ = __webpack_require__(314);
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
function labConvert(o) {
@@ -35888,7 +35923,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
this.h = +h, this.c = +c, this.l = +l, this.opacity = +opacity;
}
__webpack_exports__.a = lab, __webpack_exports__.b = hcl;
- var __WEBPACK_IMPORTED_MODULE_0__define__ = __webpack_require__(189), __WEBPACK_IMPORTED_MODULE_1__color__ = __webpack_require__(188), __WEBPACK_IMPORTED_MODULE_2__math__ = __webpack_require__(301), Xn = .95047, Yn = 1, Zn = 1.08883, t0 = 4 / 29, t1 = 6 / 29, t2 = 3 * t1 * t1, t3 = t1 * t1 * t1;
+ var __WEBPACK_IMPORTED_MODULE_0__define__ = __webpack_require__(188), __WEBPACK_IMPORTED_MODULE_1__color__ = __webpack_require__(187), __WEBPACK_IMPORTED_MODULE_2__math__ = __webpack_require__(306), Xn = .95047, Yn = 1, Zn = 1.08883, t0 = 4 / 29, t1 = 6 / 29, t2 = 3 * t1 * t1, t3 = t1 * t1 * t1;
Object(__WEBPACK_IMPORTED_MODULE_0__define__.a)(Lab, lab, Object(__WEBPACK_IMPORTED_MODULE_0__define__.b)(__WEBPACK_IMPORTED_MODULE_1__color__.a, {
brighter: function(k) {
return new Lab(this.l + 18 * (null == k ? 1 : k), this.a, this.b, this.opacity);
@@ -35926,7 +35961,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
this.h = +h, this.s = +s, this.l = +l, this.opacity = +opacity;
}
__webpack_exports__.a = cubehelix;
- var __WEBPACK_IMPORTED_MODULE_0__define__ = __webpack_require__(189), __WEBPACK_IMPORTED_MODULE_1__color__ = __webpack_require__(188), __WEBPACK_IMPORTED_MODULE_2__math__ = __webpack_require__(301), A = -.14861, B = 1.78277, C = -.29227, D = -.90649, E = 1.97294, ED = E * D, EB = E * B, BC_DA = B * C - D * A;
+ var __WEBPACK_IMPORTED_MODULE_0__define__ = __webpack_require__(188), __WEBPACK_IMPORTED_MODULE_1__color__ = __webpack_require__(187), __WEBPACK_IMPORTED_MODULE_2__math__ = __webpack_require__(306), A = -.14861, B = 1.78277, C = -.29227, D = -.90649, E = 1.97294, ED = E * D, EB = E * B, BC_DA = B * C - D * A;
Object(__WEBPACK_IMPORTED_MODULE_0__define__.a)(Cubehelix, cubehelix, Object(__WEBPACK_IMPORTED_MODULE_0__define__.b)(__WEBPACK_IMPORTED_MODULE_1__color__.a, {
brighter: function(k) {
return k = null == k ? __WEBPACK_IMPORTED_MODULE_1__color__.c : Math.pow(__WEBPACK_IMPORTED_MODULE_1__color__.c, k),
@@ -36000,7 +36035,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
};
};
}
- var __WEBPACK_IMPORTED_MODULE_0__number__ = __webpack_require__(125), __WEBPACK_IMPORTED_MODULE_1__parse__ = __webpack_require__(734);
+ var __WEBPACK_IMPORTED_MODULE_0__number__ = __webpack_require__(124), __WEBPACK_IMPORTED_MODULE_1__parse__ = __webpack_require__(734);
interpolateTransform(__WEBPACK_IMPORTED_MODULE_1__parse__.a, "px, ", "px)", "deg)"),
interpolateTransform(__WEBPACK_IMPORTED_MODULE_1__parse__.b, ", ", ")", ")");
}, function(module, __webpack_exports__, __webpack_require__) {
@@ -36059,11 +36094,11 @@ var _bundleJs = []byte((((((((((`!function(modules) {
};
};
}
- var __WEBPACK_IMPORTED_MODULE_0_d3_color__ = __webpack_require__(44), __WEBPACK_IMPORTED_MODULE_1__color__ = __webpack_require__(89);
+ var __WEBPACK_IMPORTED_MODULE_0_d3_color__ = __webpack_require__(43), __WEBPACK_IMPORTED_MODULE_1__color__ = __webpack_require__(90);
hsl(__WEBPACK_IMPORTED_MODULE_1__color__.c), hsl(__WEBPACK_IMPORTED_MODULE_1__color__.a);
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
- __webpack_require__(44), __webpack_require__(89);
+ __webpack_require__(43), __webpack_require__(90);
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
function hcl(hue) {
@@ -36075,7 +36110,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
};
};
}
- var __WEBPACK_IMPORTED_MODULE_0_d3_color__ = __webpack_require__(44), __WEBPACK_IMPORTED_MODULE_1__color__ = __webpack_require__(89);
+ var __WEBPACK_IMPORTED_MODULE_0_d3_color__ = __webpack_require__(43), __WEBPACK_IMPORTED_MODULE_1__color__ = __webpack_require__(90);
hcl(__WEBPACK_IMPORTED_MODULE_1__color__.c), hcl(__WEBPACK_IMPORTED_MODULE_1__color__.a);
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
@@ -36094,13 +36129,13 @@ var _bundleJs = []byte((((((((((`!function(modules) {
__webpack_require__.d(__webpack_exports__, "a", function() {
return cubehelixLong;
});
- var __WEBPACK_IMPORTED_MODULE_0_d3_color__ = __webpack_require__(44), __WEBPACK_IMPORTED_MODULE_1__color__ = __webpack_require__(89), cubehelixLong = (cubehelix(__WEBPACK_IMPORTED_MODULE_1__color__.c),
+ var __WEBPACK_IMPORTED_MODULE_0_d3_color__ = __webpack_require__(43), __WEBPACK_IMPORTED_MODULE_1__color__ = __webpack_require__(90), cubehelixLong = (cubehelix(__WEBPACK_IMPORTED_MODULE_1__color__.c),
cubehelix(__WEBPACK_IMPORTED_MODULE_1__color__.a));
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
- var __WEBPACK_IMPORTED_MODULE_0_d3_array__ = __webpack_require__(37), __WEBPACK_IMPORTED_MODULE_1_d3_format__ = __webpack_require__(310);
+ var __WEBPACK_IMPORTED_MODULE_0_d3_array__ = __webpack_require__(37), __WEBPACK_IMPORTED_MODULE_1_d3_format__ = __webpack_require__(315);
__webpack_exports__.a = function(domain, count, specifier) {
var precision, start = domain[0], stop = domain[domain.length - 1], step = Object(__WEBPACK_IMPORTED_MODULE_0_d3_array__.g)(start, stop, null == count ? 10 : count);
switch (specifier = Object(__WEBPACK_IMPORTED_MODULE_1_d3_format__.c)(null == specifier ? ",f" : specifier),
@@ -36131,7 +36166,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}), __webpack_require__.d(__webpack_exports__, "b", function() {
return formatPrefix;
});
- var locale, format, formatPrefix, __WEBPACK_IMPORTED_MODULE_0__locale__ = __webpack_require__(311);
+ var locale, format, formatPrefix, __WEBPACK_IMPORTED_MODULE_0__locale__ = __webpack_require__(316);
!function(definition) {
locale = Object(__WEBPACK_IMPORTED_MODULE_0__locale__.a)(definition), format = locale.format,
formatPrefix = locale.formatPrefix;
@@ -36182,7 +36217,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
};
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
- var __WEBPACK_IMPORTED_MODULE_0__formatDecimal__ = __webpack_require__(192);
+ var __WEBPACK_IMPORTED_MODULE_0__formatDecimal__ = __webpack_require__(191);
__webpack_exports__.a = function(x, p) {
var d = Object(__WEBPACK_IMPORTED_MODULE_0__formatDecimal__.a)(x, p);
if (!d) return x + "";
@@ -36196,19 +36231,19 @@ var _bundleJs = []byte((((((((((`!function(modules) {
};
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
- var __WEBPACK_IMPORTED_MODULE_0__exponent__ = __webpack_require__(127);
+ var __WEBPACK_IMPORTED_MODULE_0__exponent__ = __webpack_require__(126);
__webpack_exports__.a = function(step) {
return Math.max(0, -Object(__WEBPACK_IMPORTED_MODULE_0__exponent__.a)(Math.abs(step)));
};
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
- var __WEBPACK_IMPORTED_MODULE_0__exponent__ = __webpack_require__(127);
+ var __WEBPACK_IMPORTED_MODULE_0__exponent__ = __webpack_require__(126);
__webpack_exports__.a = function(step, value) {
return Math.max(0, 3 * Math.max(-8, Math.min(8, Math.floor(Object(__WEBPACK_IMPORTED_MODULE_0__exponent__.a)(value) / 3))) - Object(__WEBPACK_IMPORTED_MODULE_0__exponent__.a)(Math.abs(step)));
};
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
- var __WEBPACK_IMPORTED_MODULE_0__exponent__ = __webpack_require__(127);
+ var __WEBPACK_IMPORTED_MODULE_0__exponent__ = __webpack_require__(126);
__webpack_exports__.a = function(step, max) {
return step = Math.abs(step), max = Math.abs(max) - step, Math.max(0, Object(__WEBPACK_IMPORTED_MODULE_0__exponent__.a)(max) - Object(__WEBPACK_IMPORTED_MODULE_0__exponent__.a)(step)) + 1;
};
@@ -36294,7 +36329,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}, scale;
}
__webpack_exports__.a = log;
- var __WEBPACK_IMPORTED_MODULE_0_d3_array__ = __webpack_require__(37), __WEBPACK_IMPORTED_MODULE_1_d3_format__ = __webpack_require__(310), __WEBPACK_IMPORTED_MODULE_2__constant__ = __webpack_require__(191), __WEBPACK_IMPORTED_MODULE_3__nice__ = __webpack_require__(315), __WEBPACK_IMPORTED_MODULE_4__continuous__ = __webpack_require__(126);
+ var __WEBPACK_IMPORTED_MODULE_0_d3_array__ = __webpack_require__(37), __WEBPACK_IMPORTED_MODULE_1_d3_format__ = __webpack_require__(315), __WEBPACK_IMPORTED_MODULE_2__constant__ = __webpack_require__(190), __WEBPACK_IMPORTED_MODULE_3__nice__ = __webpack_require__(320), __WEBPACK_IMPORTED_MODULE_4__continuous__ = __webpack_require__(125);
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
function raise(x, exponent) {
@@ -36322,7 +36357,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
return pow().exponent(.5);
}
__webpack_exports__.a = pow, __webpack_exports__.b = sqrt;
- var __WEBPACK_IMPORTED_MODULE_0__constant__ = __webpack_require__(191), __WEBPACK_IMPORTED_MODULE_1__linear__ = __webpack_require__(87), __WEBPACK_IMPORTED_MODULE_2__continuous__ = __webpack_require__(126);
+ var __WEBPACK_IMPORTED_MODULE_0__constant__ = __webpack_require__(190), __WEBPACK_IMPORTED_MODULE_1__linear__ = __webpack_require__(88), __WEBPACK_IMPORTED_MODULE_2__continuous__ = __webpack_require__(125);
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
function quantile() {
@@ -36353,7 +36388,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}, scale;
}
__webpack_exports__.a = quantile;
- var __WEBPACK_IMPORTED_MODULE_0_d3_array__ = __webpack_require__(37), __WEBPACK_IMPORTED_MODULE_1__array__ = __webpack_require__(56);
+ var __WEBPACK_IMPORTED_MODULE_0_d3_array__ = __webpack_require__(37), __WEBPACK_IMPORTED_MODULE_1__array__ = __webpack_require__(55);
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
function quantize() {
@@ -36379,7 +36414,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}, Object(__WEBPACK_IMPORTED_MODULE_2__linear__.b)(scale);
}
__webpack_exports__.a = quantize;
- var __WEBPACK_IMPORTED_MODULE_0_d3_array__ = __webpack_require__(37), __WEBPACK_IMPORTED_MODULE_1__array__ = __webpack_require__(56), __WEBPACK_IMPORTED_MODULE_2__linear__ = __webpack_require__(87);
+ var __WEBPACK_IMPORTED_MODULE_0_d3_array__ = __webpack_require__(37), __WEBPACK_IMPORTED_MODULE_1__array__ = __webpack_require__(55), __WEBPACK_IMPORTED_MODULE_2__linear__ = __webpack_require__(88);
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
function threshold() {
@@ -36401,7 +36436,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}, scale;
}
__webpack_exports__.a = threshold;
- var __WEBPACK_IMPORTED_MODULE_0_d3_array__ = __webpack_require__(37), __WEBPACK_IMPORTED_MODULE_1__array__ = __webpack_require__(56);
+ var __WEBPACK_IMPORTED_MODULE_0_d3_array__ = __webpack_require__(37), __WEBPACK_IMPORTED_MODULE_1__array__ = __webpack_require__(55);
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
var __WEBPACK_IMPORTED_MODULE_0__interval__ = __webpack_require__(18), millisecond = Object(__WEBPACK_IMPORTED_MODULE_0__interval__.a)(function() {}, function(date, step) {
@@ -36624,33 +36659,33 @@ var _bundleJs = []byte((((((((((`!function(modules) {
var date = new Date(string);
return isNaN(date) ? null : date;
}
- var __WEBPACK_IMPORTED_MODULE_0__isoFormat__ = __webpack_require__(319), __WEBPACK_IMPORTED_MODULE_1__defaultLocale__ = __webpack_require__(194);
+ var __WEBPACK_IMPORTED_MODULE_0__isoFormat__ = __webpack_require__(324), __WEBPACK_IMPORTED_MODULE_1__defaultLocale__ = __webpack_require__(193);
+new Date("2000-01-01T00:00:00.000Z") || Object(__WEBPACK_IMPORTED_MODULE_1__defaultLocale__.c)(__WEBPACK_IMPORTED_MODULE_0__isoFormat__.a);
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
- var __WEBPACK_IMPORTED_MODULE_0__time__ = __webpack_require__(316), __WEBPACK_IMPORTED_MODULE_1_d3_time_format__ = __webpack_require__(317), __WEBPACK_IMPORTED_MODULE_2_d3_time__ = __webpack_require__(193);
+ var __WEBPACK_IMPORTED_MODULE_0__time__ = __webpack_require__(321), __WEBPACK_IMPORTED_MODULE_1_d3_time_format__ = __webpack_require__(322), __WEBPACK_IMPORTED_MODULE_2_d3_time__ = __webpack_require__(192);
__webpack_exports__.a = function() {
return Object(__WEBPACK_IMPORTED_MODULE_0__time__.a)(__WEBPACK_IMPORTED_MODULE_2_d3_time__.v, __WEBPACK_IMPORTED_MODULE_2_d3_time__.q, __WEBPACK_IMPORTED_MODULE_2_d3_time__.u, __WEBPACK_IMPORTED_MODULE_2_d3_time__.l, __WEBPACK_IMPORTED_MODULE_2_d3_time__.m, __WEBPACK_IMPORTED_MODULE_2_d3_time__.o, __WEBPACK_IMPORTED_MODULE_2_d3_time__.r, __WEBPACK_IMPORTED_MODULE_2_d3_time__.n, __WEBPACK_IMPORTED_MODULE_1_d3_time_format__.b).domain([ Date.UTC(2e3, 0, 1), Date.UTC(2e3, 0, 2) ]);
};
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
- var __WEBPACK_IMPORTED_MODULE_0__colors__ = __webpack_require__(90);
+ var __WEBPACK_IMPORTED_MODULE_0__colors__ = __webpack_require__(91);
__webpack_exports__.a = Object(__WEBPACK_IMPORTED_MODULE_0__colors__.a)("1f77b4ff7f0e2ca02cd627289467bd8c564be377c27f7f7fbcbd2217becf");
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
- var __WEBPACK_IMPORTED_MODULE_0__colors__ = __webpack_require__(90);
+ var __WEBPACK_IMPORTED_MODULE_0__colors__ = __webpack_require__(91);
__webpack_exports__.a = Object(__WEBPACK_IMPORTED_MODULE_0__colors__.a)("393b795254a36b6ecf9c9ede6379398ca252b5cf6bcedb9c8c6d31bd9e39e7ba52e7cb94843c39ad494ad6616be7969c7b4173a55194ce6dbdde9ed6");
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
- var __WEBPACK_IMPORTED_MODULE_0__colors__ = __webpack_require__(90);
+ var __WEBPACK_IMPORTED_MODULE_0__colors__ = __webpack_require__(91);
__webpack_exports__.a = Object(__WEBPACK_IMPORTED_MODULE_0__colors__.a)("3182bd6baed69ecae1c6dbefe6550dfd8d3cfdae6bfdd0a231a35474c476a1d99bc7e9c0756bb19e9ac8bcbddcdadaeb636363969696bdbdbdd9d9d9");
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
- var __WEBPACK_IMPORTED_MODULE_0__colors__ = __webpack_require__(90);
+ var __WEBPACK_IMPORTED_MODULE_0__colors__ = __webpack_require__(91);
__webpack_exports__.a = Object(__WEBPACK_IMPORTED_MODULE_0__colors__.a)("1f77b4aec7e8ff7f0effbb782ca02c98df8ad62728ff98969467bdc5b0d58c564bc49c94e377c2f7b6d27f7f7fc7c7c7bcbd22dbdb8d17becf9edae5");
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
- var __WEBPACK_IMPORTED_MODULE_0_d3_color__ = __webpack_require__(44), __WEBPACK_IMPORTED_MODULE_1_d3_interpolate__ = __webpack_require__(88);
+ var __WEBPACK_IMPORTED_MODULE_0_d3_color__ = __webpack_require__(43), __WEBPACK_IMPORTED_MODULE_1_d3_interpolate__ = __webpack_require__(89);
__webpack_exports__.a = Object(__WEBPACK_IMPORTED_MODULE_1_d3_interpolate__.b)(Object(__WEBPACK_IMPORTED_MODULE_0_d3_color__.b)(300, .5, 0), Object(__WEBPACK_IMPORTED_MODULE_0_d3_color__.b)(-240, .5, 1));
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
@@ -36659,7 +36694,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}), __webpack_require__.d(__webpack_exports__, "a", function() {
return cool;
});
- var __WEBPACK_IMPORTED_MODULE_0_d3_color__ = __webpack_require__(44), __WEBPACK_IMPORTED_MODULE_1_d3_interpolate__ = __webpack_require__(88), warm = Object(__WEBPACK_IMPORTED_MODULE_1_d3_interpolate__.b)(Object(__WEBPACK_IMPORTED_MODULE_0_d3_color__.b)(-100, .75, .35), Object(__WEBPACK_IMPORTED_MODULE_0_d3_color__.b)(80, 1.5, .8)), cool = Object(__WEBPACK_IMPORTED_MODULE_1_d3_interpolate__.b)(Object(__WEBPACK_IMPORTED_MODULE_0_d3_color__.b)(260, .75, .35), Object(__WEBPACK_IMPORTED_MODULE_0_d3_color__.b)(80, 1.5, .8)), rainbow = Object(__WEBPACK_IMPORTED_MODULE_0_d3_color__.b)();
+ var __WEBPACK_IMPORTED_MODULE_0_d3_color__ = __webpack_require__(43), __WEBPACK_IMPORTED_MODULE_1_d3_interpolate__ = __webpack_require__(89), warm = Object(__WEBPACK_IMPORTED_MODULE_1_d3_interpolate__.b)(Object(__WEBPACK_IMPORTED_MODULE_0_d3_color__.b)(-100, .75, .35), Object(__WEBPACK_IMPORTED_MODULE_0_d3_color__.b)(80, 1.5, .8)), cool = Object(__WEBPACK_IMPORTED_MODULE_1_d3_interpolate__.b)(Object(__WEBPACK_IMPORTED_MODULE_0_d3_color__.b)(260, .75, .35), Object(__WEBPACK_IMPORTED_MODULE_0_d3_color__.b)(80, 1.5, .8)), rainbow = Object(__WEBPACK_IMPORTED_MODULE_0_d3_color__.b)();
__webpack_exports__.b = function(t) {
(t < 0 || t > 1) && (t -= Math.floor(t));
var ts = Math.abs(t - .5);
@@ -36681,7 +36716,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}), __webpack_require__.d(__webpack_exports__, "d", function() {
return plasma;
});
- var __WEBPACK_IMPORTED_MODULE_0__colors__ = __webpack_require__(90);
+ var __WEBPACK_IMPORTED_MODULE_0__colors__ = __webpack_require__(91);
__webpack_exports__.a = ramp(Object(__WEBPACK_IMPORTED_MODULE_0__colors__.a)("44015444025645045745055946075a46085c460a5d460b5e470d60470e6147106347116447136548146748166848176948186a481a6c481b6d481c6e481d6f481f70482071482173482374482475482576482677482878482979472a7a472c7a472d7b472e7c472f7d46307e46327e46337f463480453581453781453882443983443a83443b84433d84433e85423f854240864241864142874144874045884046883f47883f48893e49893e4a893e4c8a3d4d8a3d4e8a3c4f8a3c508b3b518b3b528b3a538b3a548c39558c39568c38588c38598c375a8c375b8d365c8d365d8d355e8d355f8d34608d34618d33628d33638d32648e32658e31668e31678e31688e30698e306a8e2f6b8e2f6c8e2e6d8e2e6e8e2e6f8e2d708e2d718e2c718e2c728e2c738e2b748e2b758e2a768e2a778e2a788e29798e297a8e297b8e287c8e287d8e277e8e277f8e27808e26818e26828e26828e25838e25848e25858e24868e24878e23888e23898e238a8d228b8d228c8d228d8d218e8d218f8d21908d21918c20928c20928c20938c1f948c1f958b1f968b1f978b1f988b1f998a1f9a8a1e9b8a1e9c891e9d891f9e891f9f881fa0881fa1881fa1871fa28720a38620a48621a58521a68522a78522a88423a98324aa8325ab8225ac8226ad8127ad8128ae8029af7f2ab07f2cb17e2db27d2eb37c2fb47c31b57b32b67a34b67935b77937b87838b9773aba763bbb753dbc743fbc7340bd7242be7144bf7046c06f48c16e4ac16d4cc26c4ec36b50c46a52c56954c56856c66758c7655ac8645cc8635ec96260ca6063cb5f65cb5e67cc5c69cd5b6ccd5a6ece5870cf5773d05675d05477d1537ad1517cd2507fd34e81d34d84d44b86d54989d5488bd6468ed64590d74393d74195d84098d83e9bd93c9dd93ba0da39a2da37a5db36a8db34aadc32addc30b0dd2fb2dd2db5de2bb8de29bade28bddf26c0df25c2df23c5e021c8e020cae11fcde11dd0e11cd2e21bd5e21ad8e219dae319dde318dfe318e2e418e5e419e7e419eae51aece51befe51cf1e51df4e61ef6e620f8e621fbe723fde725"));
var magma = ramp(Object(__WEBPACK_IMPORTED_MODULE_0__colors__.a)("00000401000501010601010802010902020b02020d03030f03031204041405041606051806051a07061c08071e0907200a08220b09240c09260d0a290e0b2b100b2d110c2f120d31130d34140e36150e38160f3b180f3d19103f1a10421c10441d11471e114920114b21114e22115024125325125527125829115a2a115c2c115f2d11612f116331116533106734106936106b38106c390f6e3b0f703d0f713f0f72400f74420f75440f764510774710784910784a10794c117a4e117b4f127b51127c52137c54137d56147d57157e59157e5a167e5c167f5d177f5f187f601880621980641a80651a80671b80681c816a1c816b1d816d1d816e1e81701f81721f817320817521817621817822817922827b23827c23827e24828025828125818326818426818627818827818928818b29818c29818e2a81902a81912b81932b80942c80962c80982d80992d809b2e7f9c2e7f9e2f7fa02f7fa1307ea3307ea5317ea6317da8327daa337dab337cad347cae347bb0357bb2357bb3367ab5367ab73779b83779ba3878bc3978bd3977bf3a77c03a76c23b75c43c75c53c74c73d73c83e73ca3e72cc3f71cd4071cf4070d0416fd2426fd3436ed5446dd6456cd8456cd9466bdb476adc4869de4968df4a68e04c67e24d66e34e65e44f64e55064e75263e85362e95462ea5661eb5760ec5860ed5a5fee5b5eef5d5ef05f5ef1605df2625df2645cf3655cf4675cf4695cf56b5cf66c5cf66e5cf7705cf7725cf8745cf8765cf9785df9795df97b5dfa7d5efa7f5efa815ffb835ffb8560fb8761fc8961fc8a62fc8c63fc8e64fc9065fd9266fd9467fd9668fd9869fd9a6afd9b6bfe9d6cfe9f6dfea16efea36ffea571fea772fea973feaa74feac76feae77feb078feb27afeb47bfeb67cfeb77efeb97ffebb81febd82febf84fec185fec287fec488fec68afec88cfeca8dfecc8ffecd90fecf92fed194fed395fed597fed799fed89afdda9cfddc9efddea0fde0a1fde2a3fde3a5fde5a7fde7a9fde9aafdebacfcecaefceeb0fcf0b2fcf2b4fcf4b6fcf6b8fcf7b9fcf9bbfcfbbdfcfdbf")), inferno = ramp(Object(__WEBPACK_IMPORTED_MODULE_0__colors__.a)("00000401000501010601010802010a02020c02020e03021004031204031405041706041907051b08051d09061f0a07220b07240c08260d08290e092b10092d110a30120a32140b34150b37160b39180c3c190c3e1b0c411c0c431e0c451f0c48210c4a230c4c240c4f260c51280b53290b552b0b572d0b592f0a5b310a5c320a5e340a5f3609613809623909633b09643d09653e0966400a67420a68440a68450a69470b6a490b6a4a0c6b4c0c6b4d0d6c4f0d6c510e6c520e6d540f6d550f6d57106e59106e5a116e5c126e5d126e5f136e61136e62146e64156e65156e67166e69166e6a176e6c186e6d186e6f196e71196e721a6e741a6e751b6e771c6d781c6d7a1d6d7c1d6d7d1e6d7f1e6c801f6c82206c84206b85216b87216b88226a8a226a8c23698d23698f24699025689225689326679526679727669827669a28659b29649d29649f2a63a02a63a22b62a32c61a52c60a62d60a82e5fa92e5eab2f5ead305dae305cb0315bb1325ab3325ab43359b63458b73557b93556ba3655bc3754bd3853bf3952c03a51c13a50c33b4fc43c4ec63d4dc73e4cc83f4bca404acb4149cc4248ce4347cf4446d04545d24644d34743d44842d54a41d74b3fd84c3ed94d3dda4e3cdb503bdd513ade5238df5337e05536e15635e25734e35933e45a31e55c30e65d2fe75e2ee8602de9612bea632aeb6429eb6628ec6726ed6925ee6a24ef6c23ef6e21f06f20f1711ff1731df2741cf3761bf37819f47918f57b17f57d15f67e14f68013f78212f78410f8850ff8870ef8890cf98b0bf98c0af98e09fa9008fa9207fa9407fb9606fb9706fb9906fb9b06fb9d07fc9f07fca108fca309fca50afca60cfca80dfcaa0ffcac11fcae12fcb014fcb216fcb418fbb61afbb81dfbba1ffbbc21fbbe23fac026fac228fac42afac62df9c72ff9c932f9cb35f8cd37f8cf3af7d13df7d340f6d543f6d746f5d949f5db4cf4dd4ff4df53f4e156f3e35af3e55df2e661f2e865f2ea69f1ec6df1ed71f1ef75f1f179f2f27df2f482f3f586f3f68af4f88ef5f992f6fa96f8fb9af9fc9dfafda1fcffa4")), plasma = ramp(Object(__WEBPACK_IMPORTED_MODULE_0__colors__.a)("0d088710078813078916078a19068c1b068d1d068e20068f2206902406912605912805922a05932c05942e05952f059631059733059735049837049938049a3a049a3c049b3e049c3f049c41049d43039e44039e46039f48039f4903a04b03a14c02a14e02a25002a25102a35302a35502a45601a45801a45901a55b01a55c01a65e01a66001a66100a76300a76400a76600a76700a86900a86a00a86c00a86e00a86f00a87100a87201a87401a87501a87701a87801a87a02a87b02a87d03a87e03a88004a88104a78305a78405a78606a68707a68808a68a09a58b0aa58d0ba58e0ca48f0da4910ea3920fa39410a29511a19613a19814a099159f9a169f9c179e9d189d9e199da01a9ca11b9ba21d9aa31e9aa51f99a62098a72197a82296aa2395ab2494ac2694ad2793ae2892b02991b12a90b22b8fb32c8eb42e8db52f8cb6308bb7318ab83289ba3388bb3488bc3587bd3786be3885bf3984c03a83c13b82c23c81c33d80c43e7fc5407ec6417dc7427cc8437bc9447aca457acb4679cc4778cc4977cd4a76ce4b75cf4c74d04d73d14e72d24f71d35171d45270d5536fd5546ed6556dd7566cd8576bd9586ada5a6ada5b69db5c68dc5d67dd5e66de5f65de6164df6263e06363e16462e26561e26660e3685fe4695ee56a5de56b5de66c5ce76e5be76f5ae87059e97158e97257ea7457eb7556eb7655ec7754ed7953ed7a52ee7b51ef7c51ef7e50f07f4ff0804ef1814df1834cf2844bf3854bf3874af48849f48948f58b47f58c46f68d45f68f44f79044f79143f79342f89441f89540f9973ff9983ef99a3efa9b3dfa9c3cfa9e3bfb9f3afba139fba238fca338fca537fca636fca835fca934fdab33fdac33fdae32fdaf31fdb130fdb22ffdb42ffdb52efeb72dfeb82cfeba2cfebb2bfebd2afebe2afec029fdc229fdc328fdc527fdc627fdc827fdca26fdcb26fccd25fcce25fcd025fcd225fbd324fbd524fbd724fad824fada24f9dc24f9dd25f8df25f8e125f7e225f7e425f6e626f6e826f5e926f5eb27f4ed27f3ee27f3f027f2f227f1f426f1f525f0f724f0f921"));
}, function(module, __webpack_exports__, __webpack_require__) {
@@ -36703,7 +36738,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}, Object(__WEBPACK_IMPORTED_MODULE_0__linear__.b)(scale);
}
__webpack_exports__.a = sequential;
- var __WEBPACK_IMPORTED_MODULE_0__linear__ = __webpack_require__(87);
+ var __WEBPACK_IMPORTED_MODULE_0__linear__ = __webpack_require__(88);
}, function(module, exports) {
function last(array) {
var length = null == array ? 0 : array.length;
@@ -36851,7 +36886,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
function minBy(array, iteratee) {
return array && array.length ? baseExtremum(array, baseIteratee(iteratee, 2), baseLt) : void 0;
}
- var baseExtremum = __webpack_require__(124), baseIteratee = __webpack_require__(84), baseLt = __webpack_require__(285);
+ var baseExtremum = __webpack_require__(123), baseIteratee = __webpack_require__(63), baseLt = __webpack_require__(290);
module.exports = minBy;
}, function(module, exports, __webpack_require__) {
function createRange(fromRight) {
@@ -36861,7 +36896,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
step = void 0 === step ? start < end ? 1 : -1 : toFinite(step), baseRange(start, end, step, fromRight);
};
}
- var baseRange = __webpack_require__(785), isIterateeCall = __webpack_require__(282), toFinite = __webpack_require__(786);
+ var baseRange = __webpack_require__(785), isIterateeCall = __webpack_require__(287), toFinite = __webpack_require__(786);
module.exports = createRange;
}, function(module, exports) {
function baseRange(start, end, step, fromRight) {
@@ -36879,7 +36914,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}
return value === value ? value : 0;
}
- var toNumber = __webpack_require__(243), INFINITY = 1 / 0, MAX_INTEGER = 1.7976931348623157e308;
+ var toNumber = __webpack_require__(244), INFINITY = 1 / 0, MAX_INTEGER = 1.7976931348623157e308;
module.exports = toFinite;
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
@@ -37092,7 +37127,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
key: "render",
value: function() {
var _props6 = this.props, x = _props6.x, y = _props6.y, width = _props6.width, height = _props6.height, horizontal = _props6.horizontal, vertical = _props6.vertical, horizontalCoordinatesGenerator = _props6.horizontalCoordinatesGenerator, verticalCoordinatesGenerator = _props6.verticalCoordinatesGenerator, xAxis = _props6.xAxis, yAxis = _props6.yAxis, offset = _props6.offset, chartWidth = _props6.chartWidth, chartHeight = _props6.chartHeight;
- if (!Object(__WEBPACK_IMPORTED_MODULE_5__util_DataUtils__.g)(width) || width <= 0 || !Object(__WEBPACK_IMPORTED_MODULE_5__util_DataUtils__.g)(height) || height <= 0 || !Object(__WEBPACK_IMPORTED_MODULE_5__util_DataUtils__.g)(x) || x !== +x || !Object(__WEBPACK_IMPORTED_MODULE_5__util_DataUtils__.g)(y) || y !== +y) return null;
+ if (!Object(__WEBPACK_IMPORTED_MODULE_5__util_DataUtils__.h)(width) || width <= 0 || !Object(__WEBPACK_IMPORTED_MODULE_5__util_DataUtils__.h)(height) || height <= 0 || !Object(__WEBPACK_IMPORTED_MODULE_5__util_DataUtils__.h)(x) || x !== +x || !Object(__WEBPACK_IMPORTED_MODULE_5__util_DataUtils__.h)(y) || y !== +y) return null;
var _props7 = this.props, horizontalPoints = _props7.horizontalPoints, verticalPoints = _props7.verticalPoints;
return horizontalPoints && horizontalPoints.length || !__WEBPACK_IMPORTED_MODULE_0_lodash_isFunction___default()(horizontalCoordinatesGenerator) || (horizontalPoints = horizontalCoordinatesGenerator({
yAxis: yAxis,
@@ -37141,7 +37176,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
__webpack_exports__.a = CartesianGrid;
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
- var __WEBPACK_IMPORTED_MODULE_0__generateCategoricalChart__ = __webpack_require__(46), __WEBPACK_IMPORTED_MODULE_1__cartesian_Line__ = __webpack_require__(196), __WEBPACK_IMPORTED_MODULE_2__cartesian_XAxis__ = __webpack_require__(67), __WEBPACK_IMPORTED_MODULE_3__cartesian_YAxis__ = __webpack_require__(68), __WEBPACK_IMPORTED_MODULE_4__util_CartesianUtils__ = __webpack_require__(92);
+ var __WEBPACK_IMPORTED_MODULE_0__generateCategoricalChart__ = __webpack_require__(45), __WEBPACK_IMPORTED_MODULE_1__cartesian_Line__ = __webpack_require__(195), __WEBPACK_IMPORTED_MODULE_2__cartesian_XAxis__ = __webpack_require__(67), __WEBPACK_IMPORTED_MODULE_3__cartesian_YAxis__ = __webpack_require__(68), __WEBPACK_IMPORTED_MODULE_4__util_CartesianUtils__ = __webpack_require__(93);
__webpack_exports__.a = Object(__WEBPACK_IMPORTED_MODULE_0__generateCategoricalChart__.a)({
chartName: "LineChart",
GraphicalChild: __WEBPACK_IMPORTED_MODULE_1__cartesian_Line__.a,
@@ -37165,7 +37200,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
trailing: trailing
});
}
- var debounce = __webpack_require__(157), isObject = __webpack_require__(31), FUNC_ERROR_TEXT = "Expected a function";
+ var debounce = __webpack_require__(156), isObject = __webpack_require__(31), FUNC_ERROR_TEXT = "Expected a function";
module.exports = throttle;
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
@@ -37279,7 +37314,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
};
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
- var __WEBPACK_IMPORTED_MODULE_0__generateCategoricalChart__ = __webpack_require__(46), __WEBPACK_IMPORTED_MODULE_1__cartesian_Bar__ = __webpack_require__(198), __WEBPACK_IMPORTED_MODULE_2__cartesian_XAxis__ = __webpack_require__(67), __WEBPACK_IMPORTED_MODULE_3__cartesian_YAxis__ = __webpack_require__(68), __WEBPACK_IMPORTED_MODULE_4__util_CartesianUtils__ = __webpack_require__(92);
+ var __WEBPACK_IMPORTED_MODULE_0__generateCategoricalChart__ = __webpack_require__(45), __WEBPACK_IMPORTED_MODULE_1__cartesian_Bar__ = __webpack_require__(197), __WEBPACK_IMPORTED_MODULE_2__cartesian_XAxis__ = __webpack_require__(67), __WEBPACK_IMPORTED_MODULE_3__cartesian_YAxis__ = __webpack_require__(68), __WEBPACK_IMPORTED_MODULE_4__util_CartesianUtils__ = __webpack_require__(93);
__webpack_exports__.a = Object(__WEBPACK_IMPORTED_MODULE_0__generateCategoricalChart__.a)({
chartName: "BarChart",
GraphicalChild: __WEBPACK_IMPORTED_MODULE_1__cartesian_Bar__.a,
@@ -37294,7 +37329,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
});
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
- var __WEBPACK_IMPORTED_MODULE_0_prop_types__ = __webpack_require__(1), __WEBPACK_IMPORTED_MODULE_0_prop_types___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_prop_types__), __WEBPACK_IMPORTED_MODULE_1__generateCategoricalChart__ = __webpack_require__(46), __WEBPACK_IMPORTED_MODULE_2__polar_PolarAngleAxis__ = __webpack_require__(130), __WEBPACK_IMPORTED_MODULE_3__polar_PolarRadiusAxis__ = __webpack_require__(129), __WEBPACK_IMPORTED_MODULE_4__util_PolarUtils__ = __webpack_require__(23), __WEBPACK_IMPORTED_MODULE_5__polar_Pie__ = __webpack_require__(325);
+ var __WEBPACK_IMPORTED_MODULE_0_prop_types__ = __webpack_require__(1), __WEBPACK_IMPORTED_MODULE_0_prop_types___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_prop_types__), __WEBPACK_IMPORTED_MODULE_1__generateCategoricalChart__ = __webpack_require__(45), __WEBPACK_IMPORTED_MODULE_2__polar_PolarAngleAxis__ = __webpack_require__(129), __WEBPACK_IMPORTED_MODULE_3__polar_PolarRadiusAxis__ = __webpack_require__(128), __WEBPACK_IMPORTED_MODULE_4__util_PolarUtils__ = __webpack_require__(23), __WEBPACK_IMPORTED_MODULE_5__polar_Pie__ = __webpack_require__(330);
__webpack_exports__.a = Object(__WEBPACK_IMPORTED_MODULE_1__generateCategoricalChart__.a)({
chartName: "PieChart",
GraphicalChild: __WEBPACK_IMPORTED_MODULE_5__polar_Pie__.a,
@@ -37352,7 +37387,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}
}), superClass && (Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass);
}
- var _class, _class2, _temp2, __WEBPACK_IMPORTED_MODULE_0_lodash_isFunction__ = __webpack_require__(8), __WEBPACK_IMPORTED_MODULE_0_lodash_isFunction___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_lodash_isFunction__), __WEBPACK_IMPORTED_MODULE_1_lodash_isNaN__ = __webpack_require__(117), __WEBPACK_IMPORTED_MODULE_1_lodash_isNaN___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_1_lodash_isNaN__), __WEBPACK_IMPORTED_MODULE_2_react__ = __webpack_require__(0), __WEBPACK_IMPORTED_MODULE_2_react___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_2_react__), __WEBPACK_IMPORTED_MODULE_3_prop_types__ = __webpack_require__(1), __WEBPACK_IMPORTED_MODULE_3_prop_types___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_3_prop_types__), __WEBPACK_IMPORTED_MODULE_4_react_smooth__ = __webpack_require__(33), __WEBPACK_IMPORTED_MODULE_4_react_smooth___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_4_react_smooth__), __WEBPACK_IMPORTED_MODULE_5_classnames__ = __webpack_require__(3), __WEBPACK_IMPORTED_MODULE_5_classnames___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_5_classnames__), __WEBPACK_IMPORTED_MODULE_6__container_Surface__ = __webpack_require__(78), __WEBPACK_IMPORTED_MODULE_7__container_Layer__ = __webpack_require__(14), __WEBPACK_IMPORTED_MODULE_8__shape_Rectangle__ = __webpack_require__(65), __WEBPACK_IMPORTED_MODULE_9__util_ReactUtils__ = __webpack_require__(4), __WEBPACK_IMPORTED_MODULE_10__component_Tooltip__ = __webpack_require__(122), __WEBPACK_IMPORTED_MODULE_11__util_PureRender__ = __webpack_require__(5), __WEBPACK_IMPORTED_MODULE_12__util_ChartUtils__ = __webpack_require__(16), _createClass = function() {
+ var _class, _class2, _temp2, __WEBPACK_IMPORTED_MODULE_0_lodash_isFunction__ = __webpack_require__(8), __WEBPACK_IMPORTED_MODULE_0_lodash_isFunction___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_lodash_isFunction__), __WEBPACK_IMPORTED_MODULE_1_lodash_isNaN__ = __webpack_require__(116), __WEBPACK_IMPORTED_MODULE_1_lodash_isNaN___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_1_lodash_isNaN__), __WEBPACK_IMPORTED_MODULE_2_react__ = __webpack_require__(0), __WEBPACK_IMPORTED_MODULE_2_react___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_2_react__), __WEBPACK_IMPORTED_MODULE_3_prop_types__ = __webpack_require__(1), __WEBPACK_IMPORTED_MODULE_3_prop_types___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_3_prop_types__), __WEBPACK_IMPORTED_MODULE_4_react_smooth__ = __webpack_require__(33), __WEBPACK_IMPORTED_MODULE_4_react_smooth___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_4_react_smooth__), __WEBPACK_IMPORTED_MODULE_5_classnames__ = __webpack_require__(3), __WEBPACK_IMPORTED_MODULE_5_classnames___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_5_classnames__), __WEBPACK_IMPORTED_MODULE_6__container_Surface__ = __webpack_require__(79), __WEBPACK_IMPORTED_MODULE_7__container_Layer__ = __webpack_require__(14), __WEBPACK_IMPORTED_MODULE_8__shape_Rectangle__ = __webpack_require__(65), __WEBPACK_IMPORTED_MODULE_9__util_ReactUtils__ = __webpack_require__(4), __WEBPACK_IMPORTED_MODULE_10__component_Tooltip__ = __webpack_require__(121), __WEBPACK_IMPORTED_MODULE_11__util_PureRender__ = __webpack_require__(5), __WEBPACK_IMPORTED_MODULE_12__util_ChartUtils__ = __webpack_require__(16), _createClass = function() {
function defineProperties(target, props) {
for (var i = 0; i < props.length; i++) {
var descriptor = props[i];
@@ -37685,7 +37720,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
}
}), superClass && (Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass);
}
- var _class, _class2, _temp, __WEBPACK_IMPORTED_MODULE_0_lodash_isFunction__ = __webpack_require__(8), __WEBPACK_IMPORTED_MODULE_0_lodash_isFunction___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_lodash_isFunction__), __WEBPACK_IMPORTED_MODULE_1_lodash_sumBy__ = __webpack_require__(797), __WEBPACK_IMPORTED_MODULE_1_lodash_sumBy___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_1_lodash_sumBy__), __WEBPACK_IMPORTED_MODULE_2_lodash_min__ = __webpack_require__(284), __WEBPACK_IMPORTED_MODULE_2_lodash_min___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_2_lodash_min__), __WEBPACK_IMPORTED_MODULE_3_lodash_maxBy__ = __webpack_require__(324), __WEBPACK_IMPORTED_MODULE_3_lodash_maxBy___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_3_lodash_maxBy__), __WEBPACK_IMPORTED_MODULE_4_react__ = __webpack_require__(0), __WEBPACK_IMPORTED_MODULE_4_react___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_4_react__), __WEBPACK_IMPORTED_MODULE_5_prop_types__ = __webpack_require__(1), __WEBPACK_IMPORTED_MODULE_5_prop_types___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_5_prop_types__), __WEBPACK_IMPORTED_MODULE_6_classnames__ = __webpack_require__(3), __WEBPACK_IMPORTED_MODULE_6_classnames___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_6_classnames__), __WEBPACK_IMPORTED_MODULE_7__container_Surface__ = __webpack_require__(78), __WEBPACK_IMPORTED_MODULE_8__container_Layer__ = __webpack_require__(14), __WEBPACK_IMPORTED_MODULE_9__component_Tooltip__ = __webpack_require__(122), __WEBPACK_IMPORTED_MODULE_10__shape_Rectangle__ = __webpack_require__(65), __WEBPACK_IMPORTED_MODULE_11__util_PureRender__ = __webpack_require__(5), __WEBPACK_IMPORTED_MODULE_12__util_ReactUtils__ = __webpack_require__(4), __WEBPACK_IMPORTED_MODULE_13__util_ChartUtils__ = __webpack_require__(16), _createClass = function() {
+ var _class, _class2, _temp, __WEBPACK_IMPORTED_MODULE_0_lodash_isFunction__ = __webpack_require__(8), __WEBPACK_IMPORTED_MODULE_0_lodash_isFunction___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_lodash_isFunction__), __WEBPACK_IMPORTED_MODULE_1_lodash_sumBy__ = __webpack_require__(797), __WEBPACK_IMPORTED_MODULE_1_lodash_sumBy___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_1_lodash_sumBy__), __WEBPACK_IMPORTED_MODULE_2_lodash_min__ = __webpack_require__(289), __WEBPACK_IMPORTED_MODULE_2_lodash_min___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_2_lodash_min__), __WEBPACK_IMPORTED_MODULE_3_lodash_maxBy__ = __webpack_require__(329), __WEBPACK_IMPORTED_MODULE_3_lodash_maxBy___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_3_lodash_maxBy__), __WEBPACK_IMPORTED_MODULE_4_react__ = __webpack_require__(0), __WEBPACK_IMPORTED_MODULE_4_react___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_4_react__), __WEBPACK_IMPORTED_MODULE_5_prop_types__ = __webpack_require__(1), __WEBPACK_IMPORTED_MODULE_5_prop_types___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_5_prop_types__), __WEBPACK_IMPORTED_MODULE_6_classnames__ = __webpack_require__(3), __WEBPACK_IMPORTED_MODULE_6_classnames___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_6_classnames__), __WEBPACK_IMPORTED_MODULE_7__container_Surface__ = __webpack_require__(79), __WEBPACK_IMPORTED_MODULE_8__container_Layer__ = __webpack_require__(14), __WEBPACK_IMPORTED_MODULE_9__component_Tooltip__ = __webpack_require__(121), __WEBPACK_IMPORTED_MODULE_10__shape_Rectangle__ = __webpack_require__(65), __WEBPACK_IMPORTED_MODULE_11__util_PureRender__ = __webpack_require__(5), __WEBPACK_IMPORTED_MODULE_12__util_ReactUtils__ = __webpack_require__(4), __WEBPACK_IMPORTED_MODULE_13__util_ChartUtils__ = __webpack_require__(16), _createClass = function() {
function defineProperties(target, props) {
for (var i = 0; i < props.length; i++) {
var descriptor = props[i];
@@ -38089,7 +38124,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
function sumBy(array, iteratee) {
return array && array.length ? baseSum(array, baseIteratee(iteratee, 2)) : 0;
}
- var baseIteratee = __webpack_require__(84), baseSum = __webpack_require__(798);
+ var baseIteratee = __webpack_require__(63), baseSum = __webpack_require__(798);
module.exports = sumBy;
}, function(module, exports) {
function baseSum(array, iteratee) {
@@ -38102,7 +38137,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
module.exports = baseSum;
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
- var __WEBPACK_IMPORTED_MODULE_0_prop_types__ = __webpack_require__(1), __WEBPACK_IMPORTED_MODULE_0_prop_types___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_prop_types__), __WEBPACK_IMPORTED_MODULE_1__generateCategoricalChart__ = __webpack_require__(46), __WEBPACK_IMPORTED_MODULE_2__polar_Radar__ = __webpack_require__(326), __WEBPACK_IMPORTED_MODULE_3__polar_PolarAngleAxis__ = __webpack_require__(130), __WEBPACK_IMPORTED_MODULE_4__polar_PolarRadiusAxis__ = __webpack_require__(129), __WEBPACK_IMPORTED_MODULE_5__util_PolarUtils__ = __webpack_require__(23);
+ var __WEBPACK_IMPORTED_MODULE_0_prop_types__ = __webpack_require__(1), __WEBPACK_IMPORTED_MODULE_0_prop_types___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_prop_types__), __WEBPACK_IMPORTED_MODULE_1__generateCategoricalChart__ = __webpack_require__(45), __WEBPACK_IMPORTED_MODULE_2__polar_Radar__ = __webpack_require__(331), __WEBPACK_IMPORTED_MODULE_3__polar_PolarAngleAxis__ = __webpack_require__(129), __WEBPACK_IMPORTED_MODULE_4__polar_PolarRadiusAxis__ = __webpack_require__(128), __WEBPACK_IMPORTED_MODULE_5__util_PolarUtils__ = __webpack_require__(23);
__webpack_exports__.a = Object(__WEBPACK_IMPORTED_MODULE_1__generateCategoricalChart__.a)({
chartName: "RadarChart",
GraphicalChild: __WEBPACK_IMPORTED_MODULE_2__polar_Radar__.a,
@@ -38135,7 +38170,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
});
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
- var __WEBPACK_IMPORTED_MODULE_0__generateCategoricalChart__ = __webpack_require__(46), __WEBPACK_IMPORTED_MODULE_1__cartesian_Scatter__ = __webpack_require__(199), __WEBPACK_IMPORTED_MODULE_2__cartesian_XAxis__ = __webpack_require__(67), __WEBPACK_IMPORTED_MODULE_3__cartesian_YAxis__ = __webpack_require__(68), __WEBPACK_IMPORTED_MODULE_4__cartesian_ZAxis__ = __webpack_require__(131), __WEBPACK_IMPORTED_MODULE_5__util_CartesianUtils__ = __webpack_require__(92);
+ var __WEBPACK_IMPORTED_MODULE_0__generateCategoricalChart__ = __webpack_require__(45), __WEBPACK_IMPORTED_MODULE_1__cartesian_Scatter__ = __webpack_require__(198), __WEBPACK_IMPORTED_MODULE_2__cartesian_XAxis__ = __webpack_require__(67), __WEBPACK_IMPORTED_MODULE_3__cartesian_YAxis__ = __webpack_require__(68), __WEBPACK_IMPORTED_MODULE_4__cartesian_ZAxis__ = __webpack_require__(130), __WEBPACK_IMPORTED_MODULE_5__util_CartesianUtils__ = __webpack_require__(93);
__webpack_exports__.a = Object(__WEBPACK_IMPORTED_MODULE_0__generateCategoricalChart__.a)({
chartName: "ScatterChart",
GraphicalChild: __WEBPACK_IMPORTED_MODULE_1__cartesian_Scatter__.a,
@@ -38154,7 +38189,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
});
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
- var __WEBPACK_IMPORTED_MODULE_0__generateCategoricalChart__ = __webpack_require__(46), __WEBPACK_IMPORTED_MODULE_1__cartesian_Area__ = __webpack_require__(197), __WEBPACK_IMPORTED_MODULE_2__cartesian_XAxis__ = __webpack_require__(67), __WEBPACK_IMPORTED_MODULE_3__cartesian_YAxis__ = __webpack_require__(68), __WEBPACK_IMPORTED_MODULE_4__util_CartesianUtils__ = __webpack_require__(92);
+ var __WEBPACK_IMPORTED_MODULE_0__generateCategoricalChart__ = __webpack_require__(45), __WEBPACK_IMPORTED_MODULE_1__cartesian_Area__ = __webpack_require__(196), __WEBPACK_IMPORTED_MODULE_2__cartesian_XAxis__ = __webpack_require__(67), __WEBPACK_IMPORTED_MODULE_3__cartesian_YAxis__ = __webpack_require__(68), __WEBPACK_IMPORTED_MODULE_4__util_CartesianUtils__ = __webpack_require__(93);
__webpack_exports__.a = Object(__WEBPACK_IMPORTED_MODULE_0__generateCategoricalChart__.a)({
chartName: "AreaChart",
GraphicalChild: __WEBPACK_IMPORTED_MODULE_1__cartesian_Area__.a,
@@ -38169,7 +38204,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
});
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
- var __WEBPACK_IMPORTED_MODULE_0_prop_types__ = __webpack_require__(1), __WEBPACK_IMPORTED_MODULE_0_prop_types___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_prop_types__), __WEBPACK_IMPORTED_MODULE_1__generateCategoricalChart__ = __webpack_require__(46), __WEBPACK_IMPORTED_MODULE_2__polar_PolarAngleAxis__ = __webpack_require__(130), __WEBPACK_IMPORTED_MODULE_3__polar_PolarRadiusAxis__ = __webpack_require__(129), __WEBPACK_IMPORTED_MODULE_4__util_PolarUtils__ = __webpack_require__(23), __WEBPACK_IMPORTED_MODULE_5__polar_RadialBar__ = __webpack_require__(327);
+ var __WEBPACK_IMPORTED_MODULE_0_prop_types__ = __webpack_require__(1), __WEBPACK_IMPORTED_MODULE_0_prop_types___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_prop_types__), __WEBPACK_IMPORTED_MODULE_1__generateCategoricalChart__ = __webpack_require__(45), __WEBPACK_IMPORTED_MODULE_2__polar_PolarAngleAxis__ = __webpack_require__(129), __WEBPACK_IMPORTED_MODULE_3__polar_PolarRadiusAxis__ = __webpack_require__(128), __WEBPACK_IMPORTED_MODULE_4__util_PolarUtils__ = __webpack_require__(23), __WEBPACK_IMPORTED_MODULE_5__polar_RadialBar__ = __webpack_require__(332);
__webpack_exports__.a = Object(__WEBPACK_IMPORTED_MODULE_1__generateCategoricalChart__.a)({
chartName: "RadialBarChart",
GraphicalChild: __WEBPACK_IMPORTED_MODULE_5__polar_RadialBar__.a,
@@ -38203,7 +38238,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
});
}, function(module, __webpack_exports__, __webpack_require__) {
"use strict";
- var __WEBPACK_IMPORTED_MODULE_0__generateCategoricalChart__ = __webpack_require__(46), __WEBPACK_IMPORTED_MODULE_1__cartesian_Area__ = __webpack_require__(197), __WEBPACK_IMPORTED_MODULE_2__cartesian_Bar__ = __webpack_require__(198), __WEBPACK_IMPORTED_MODULE_3__cartesian_Line__ = __webpack_require__(196), __WEBPACK_IMPORTED_MODULE_4__cartesian_Scatter__ = __webpack_require__(199), __WEBPACK_IMPORTED_MODULE_5__cartesian_XAxis__ = __webpack_require__(67), __WEBPACK_IMPORTED_MODULE_6__cartesian_YAxis__ = __webpack_require__(68), __WEBPACK_IMPORTED_MODULE_7__cartesian_ZAxis__ = __webpack_require__(131), __WEBPACK_IMPORTED_MODULE_8__util_CartesianUtils__ = __webpack_require__(92);
+ var __WEBPACK_IMPORTED_MODULE_0__generateCategoricalChart__ = __webpack_require__(45), __WEBPACK_IMPORTED_MODULE_1__cartesian_Area__ = __webpack_require__(196), __WEBPACK_IMPORTED_MODULE_2__cartesian_Bar__ = __webpack_require__(197), __WEBPACK_IMPORTED_MODULE_3__cartesian_Line__ = __webpack_require__(195), __WEBPACK_IMPORTED_MODULE_4__cartesian_Scatter__ = __webpack_require__(198), __WEBPACK_IMPORTED_MODULE_5__cartesian_XAxis__ = __webpack_require__(67), __WEBPACK_IMPORTED_MODULE_6__cartesian_YAxis__ = __webpack_require__(68), __WEBPACK_IMPORTED_MODULE_7__cartesian_ZAxis__ = __webpack_require__(130), __WEBPACK_IMPORTED_MODULE_8__util_CartesianUtils__ = __webpack_require__(93);
__webpack_exports__.a = Object(__WEBPACK_IMPORTED_MODULE_0__generateCategoricalChart__.a)({
chartName: "ComposedChart",
GraphicalChild: [ __WEBPACK_IMPORTED_MODULE_3__cartesian_Line__.a, __WEBPACK_IMPORTED_MODULE_1__cartesian_Area__.a, __WEBPACK_IMPORTED_MODULE_2__cartesian_Bar__.a, __WEBPACK_IMPORTED_MODULE_4__cartesian_Scatter__.a ],
@@ -38259,7 +38294,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
return protoProps && defineProperties(Constructor.prototype, protoProps), staticProps && defineProperties(Constructor, staticProps),
Constructor;
};
- }(), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _Grid = __webpack_require__(241), _Grid2 = _interopRequireDefault(_Grid), styles = {
+ }(), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _Grid = __webpack_require__(242), _Grid2 = _interopRequireDefault(_Grid), styles = {
container: {
flexWrap: "nowrap",
height: "100%",
@@ -38333,7 +38368,7 @@ var _bundleJs = []byte((((((((((`!function(modules) {
return protoProps && defineProperties(Constructor.prototype, protoProps), staticProps && defineProperties(Constructor, staticProps),
Constructor;
};
- }(), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _Typography = __webpack_require__(109), _Typography2 = _interopRequireDefault(_Typography), _common = __webpack_require__(61), multiplier = exports.multiplier = function() {
+ }(), _react = __webpack_require__(0), _react2 = _interopRequireDefault(_react), _Typography = __webpack_require__(109), _Typography2 = _interopRequireDefault(_Typography), _common = __webpack_require__(77), multiplier = exports.multiplier = function() {
var by = arguments.length > 0 && void 0 !== arguments[0] ? arguments[0] : 1;
return function(x) {
return x * by;
@@ -38349,9 +38384,9 @@ var _bundleJs = []byte((((((((((`!function(modules) {
style: _common.styles.light
}, text), " ", p.toFixed(2), " %");
};
- }, [ "B", "KB", "MB", "GB", "TB", "PB" ]), simplifyBytes = function(x) {
- for (var i = 0; x > 1024 && i < 5; i++) x /= 1024;
- return x.toFixed(2).toString().concat(" ", unit[i]);
+ }, [ "", "Ki", "Mi", "Gi", "Ti", "Pi", "Ei", "Zi", "Yi" ]), simplifyBytes = function(x) {
+ for (var i = 0; x > 1024 && i < 8; i++) x /= 1024;
+ return x.toFixed(2).toString().concat(" ", unit[i], "B");
}, CustomTooltip = (exports.bytePlotter = function(text) {
var mapper = arguments.length > 1 && void 0 !== arguments[1] ? arguments[1] : multiplier(1);
return function(payload) {
@@ -38400,7 +38435,7 @@ func bundleJs() (*asset, error) {
}
info := bindataFileInfo{name: "bundle.js", size: 0, mode: os.FileMode(0), modTime: time.Unix(0, 0)}
- a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xc0, 0x90, 0x71, 0xbf, 0x69, 0x84, 0xe8, 0x63, 0x9a, 0x6c, 0x14, 0x49, 0xbd, 0x8b, 0x72, 0x2b, 0xe2, 0xd7, 0xdf, 0x49, 0x80, 0xea, 0x49, 0x2e, 0x7d, 0x4f, 0x23, 0x6b, 0xef, 0xcd, 0xc4, 0xdb}}
+ a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xe2, 0xc0, 0xfa, 0x13, 0x71, 0x8b, 0x49, 0x5c, 0xa5, 0xae, 0xc8, 0x41, 0x3, 0x7a, 0x1, 0x3d, 0x5, 0x2a, 0xe7, 0xb7, 0x80, 0x2c, 0x6e, 0xbd, 0x9, 0xaf, 0xab, 0xd5, 0x25, 0xce, 0x1, 0x9f}}
return a, nil
}
@@ -38495,7 +38530,7 @@ func AssetNames() []string {
// _bindata is a table, holding each asset generator, mapped to its name.
var _bindata = map[string]func() (*asset, error){
- "dashboard.html": dashboardHtml,
+ "index.html": indexHtml,
"bundle.js": bundleJs,
}
@@ -38541,8 +38576,8 @@ type bintree struct {
}
var _bintree = &bintree{nil, map[string]*bintree{
- "bundle.js": {bundleJs, map[string]*bintree{}},
- "dashboard.html": {dashboardHtml, map[string]*bintree{}},
+ "bundle.js": {bundleJs, map[string]*bintree{}},
+ "index.html": {indexHtml, map[string]*bintree{}},
}}
// RestoreAsset restores an asset under the given directory.
diff --git a/dashboard/assets/components/CustomTooltip.jsx b/dashboard/assets/components/CustomTooltip.jsx
index be7c624cf..3405f9305 100644
--- a/dashboard/assets/components/CustomTooltip.jsx
+++ b/dashboard/assets/components/CustomTooltip.jsx
@@ -38,15 +38,15 @@ export const percentPlotter = <T>(text: string, mapper: (T => T) = multiplier(1)
};
// unit contains the units for the bytePlotter.
-const unit = ['B', 'KB', 'MB', 'GB', 'TB', 'PB'];
+const unit = ['', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi', 'Yi'];
// simplifyBytes returns the simplified version of the given value followed by the unit.
const simplifyBytes = (x: number) => {
let i = 0;
- for (; x > 1024 && i < 5; i++) {
+ for (; x > 1024 && i < 8; i++) {
x /= 1024;
}
- return x.toFixed(2).toString().concat(' ', unit[i]);
+ return x.toFixed(2).toString().concat(' ', unit[i], 'B');
};
// bytePlotter renders a tooltip, which displays the payload as a byte value.
diff --git a/dashboard/assets/components/Dashboard.jsx b/dashboard/assets/components/Dashboard.jsx
index 90b1a785c..8e6bf9869 100644
--- a/dashboard/assets/components/Dashboard.jsx
+++ b/dashboard/assets/components/Dashboard.jsx
@@ -81,7 +81,11 @@ const defaultContent: Content = {
version: null,
commit: null,
},
- home: {
+ home: {},
+ chain: {},
+ txpool: {},
+ network: {},
+ system: {
activeMemory: [],
virtualMemory: [],
networkIngress: [],
@@ -91,10 +95,6 @@ const defaultContent: Content = {
diskRead: [],
diskWrite: [],
},
- chain: {},
- txpool: {},
- network: {},
- system: {},
logs: {
log: [],
},
@@ -108,7 +108,11 @@ const updaters = {
version: replacer,
commit: replacer,
},
- home: {
+ home: null,
+ chain: null,
+ txpool: null,
+ network: null,
+ system: {
activeMemory: appender(200),
virtualMemory: appender(200),
networkIngress: appender(200),
@@ -118,11 +122,7 @@ const updaters = {
diskRead: appender(200),
diskWrite: appender(200),
},
- chain: null,
- txpool: null,
- network: null,
- system: null,
- logs: {
+ logs: {
log: appender(200),
},
};
@@ -136,7 +136,7 @@ const styles = {
height: '100%',
zIndex: 1,
overflow: 'hidden',
- }
+ },
};
// themeStyles returns the styles generated from the theme for the component.
@@ -178,7 +178,8 @@ class Dashboard extends Component<Props, State> {
// reconnect establishes a websocket connection with the server, listens for incoming messages
// and tries to reconnect on connection loss.
reconnect = () => {
- const server = new WebSocket(`${((window.location.protocol === 'https:') ? 'wss://' : 'ws://') + window.location.host}/api`);
+ // PROD is defined by webpack.
+ const server = new WebSocket(`${((window.location.protocol === 'https:') ? 'wss://' : 'ws://')}${PROD ? window.location.host : 'localhost:8080'}/api`);
server.onopen = () => {
this.setState({content: defaultContent, shouldUpdate: {}});
};
@@ -217,7 +218,6 @@ class Dashboard extends Component<Props, State> {
return (
<div className={this.props.classes.dashboard} style={styles.dashboard}>
<Header
- opened={this.state.sideBar}
switchSideBar={this.switchSideBar}
/>
<Body
diff --git a/dashboard/assets/components/Footer.jsx b/dashboard/assets/components/Footer.jsx
index 54b67c464..20830cbba 100644
--- a/dashboard/assets/components/Footer.jsx
+++ b/dashboard/assets/components/Footer.jsx
@@ -26,7 +26,17 @@ import {ResponsiveContainer, AreaChart, Area, Tooltip} from 'recharts';
import ChartRow from './ChartRow';
import CustomTooltip, {bytePlotter, bytePerSecPlotter, percentPlotter, multiplier} from './CustomTooltip';
import {styles as commonStyles} from '../common';
-import type {Content} from '../types/content';
+import type {General, System} from '../types/content';
+
+const FOOTER_SYNC_ID = 'footerSyncId';
+
+const CPU = 'cpu';
+const MEMORY = 'memory';
+const DISK = 'disk';
+const TRAFFIC = 'traffic';
+
+const TOP = 'Top';
+const BOTTOM = 'Bottom';
// styles contains the constant styles of the component.
const styles = {
@@ -40,17 +50,16 @@ const styles = {
padding: 0,
},
doubleChartWrapper: {
- height: '100%',
- width: '99%',
- paddingTop: 5,
+ height: '100%',
+ width: '99%',
},
};
// themeStyles returns the styles generated from the theme for the component.
const themeStyles: Object = (theme: Object) => ({
footer: {
- backgroundColor: theme.palette.background.appBar,
- color: theme.palette.getContrastText(theme.palette.background.appBar),
+ backgroundColor: theme.palette.grey[900],
+ color: theme.palette.getContrastText(theme.palette.grey[900]),
zIndex: theme.zIndex.appBar,
height: theme.spacing.unit * 10,
},
@@ -59,111 +68,108 @@ const themeStyles: Object = (theme: Object) => ({
export type Props = {
classes: Object, // injected by withStyles()
theme: Object,
- content: Content,
+ general: General,
+ system: System,
shouldUpdate: Object,
};
// Footer renders the footer of the dashboard.
class Footer extends Component<Props> {
shouldComponentUpdate(nextProps) {
- return typeof nextProps.shouldUpdate.home !== 'undefined';
+ return typeof nextProps.shouldUpdate.general !== 'undefined' || typeof nextProps.shouldUpdate.system !== 'undefined';
}
- // info renders a label with the given values.
- info = (about: string, value: ?string) => (value ? (
- <Typography type='caption' color='inherit'>
- <span style={commonStyles.light}>{about}</span> {value}
- </Typography>
- ) : null);
+ // halfHeightChart renders an area chart with half of the height of its parent.
+ halfHeightChart = (chartProps, tooltip, areaProps) => (
+ <ResponsiveContainer width='100%' height='50%'>
+ <AreaChart {...chartProps} >
+ {!tooltip || (<Tooltip cursor={false} content={<CustomTooltip tooltip={tooltip} />} />)}
+ <Area isAnimationActive={false} type='monotone' {...areaProps} />
+ </AreaChart>
+ </ResponsiveContainer>
+ );
// doubleChart renders a pair of charts separated by the baseline.
- doubleChart = (syncId, topChart, bottomChart) => {
- const topKey = 'topKey';
- const bottomKey = 'bottomKey';
- const topDefault = topChart.default ? topChart.default : 0;
- const bottomDefault = bottomChart.default ? bottomChart.default : 0;
- const topTooltip = topChart.tooltip ? (
- <Tooltip cursor={false} content={<CustomTooltip tooltip={topChart.tooltip} />} />
- ) : null;
- const bottomTooltip = bottomChart.tooltip ? (
- <Tooltip cursor={false} content={<CustomTooltip tooltip={bottomChart.tooltip} />} />
- ) : null;
+ doubleChart = (syncId, chartKey, topChart, bottomChart) => {
+ if (!Array.isArray(topChart.data) || !Array.isArray(bottomChart.data)) {
+ return null;
+ }
+ const topDefault = topChart.default || 0;
+ const bottomDefault = bottomChart.default || 0;
+ const topKey = `${chartKey}${TOP}`;
+ const bottomKey = `${chartKey}${BOTTOM}`;
const topColor = '#8884d8';
const bottomColor = '#82ca9d';
- // Put the samples of the two charts into the same array in order to avoid problems
- // at the synchronized area charts. If one of the two arrays doesn't have value at
- // a given position, give it a 0 default value.
- let data = [...topChart.data.map(({value}) => {
- const d = {};
- d[topKey] = value || topDefault;
- return d;
- })];
- for (let i = 0; i < data.length && i < bottomChart.data.length; i++) {
- // The value needs to be negative in order to plot it upside down.
- const d = bottomChart.data[i];
- data[i][bottomKey] = d && d.value ? -d.value : bottomDefault;
- }
- data = [...data, ...bottomChart.data.slice(data.length).map(({value}) => {
- const d = {};
- d[topKey] = topDefault;
- d[bottomKey] = -value || bottomDefault;
- return d;
- })];
-
return (
<div style={styles.doubleChartWrapper}>
- <ResponsiveContainer width='100%' height='50%'>
- <AreaChart data={data} syncId={syncId} >
- {topTooltip}
- <Area type='monotone' dataKey={topKey} stroke={topColor} fill={topColor} />
- </AreaChart>
- </ResponsiveContainer>
- <div style={{marginTop: -10, width: '100%', height: '50%'}}>
- <ResponsiveContainer width='100%' height='100%'>
- <AreaChart data={data} syncId={syncId} >
- {bottomTooltip}
- <Area type='monotone' dataKey={bottomKey} stroke={bottomColor} fill={bottomColor} />
- </AreaChart>
- </ResponsiveContainer>
- </div>
+ {this.halfHeightChart(
+ {
+ syncId,
+ data: topChart.data.map(({value}) => ({[topKey]: value || topDefault})),
+ margin: {top: 5, right: 5, bottom: 0, left: 5},
+ },
+ topChart.tooltip,
+ {dataKey: topKey, stroke: topColor, fill: topColor},
+ )}
+ {this.halfHeightChart(
+ {
+ syncId,
+ data: bottomChart.data.map(({value}) => ({[bottomKey]: -value || -bottomDefault})),
+ margin: {top: 0, right: 5, bottom: 5, left: 5},
+ },
+ bottomChart.tooltip,
+ {dataKey: bottomKey, stroke: bottomColor, fill: bottomColor},
+ )}
</div>
);
- }
+ };
render() {
- const {content} = this.props;
- const {general, home} = content;
+ const {general, system} = this.props;
return (
<Grid container className={this.props.classes.footer} direction='row' alignItems='center' style={styles.footer}>
<Grid item xs style={styles.chartRowWrapper}>
<ChartRow>
{this.doubleChart(
- 'all',
- {data: home.processCPU, tooltip: percentPlotter('Process')},
- {data: home.systemCPU, tooltip: percentPlotter('System', multiplier(-1))},
+ FOOTER_SYNC_ID,
+ CPU,
+ {data: system.processCPU, tooltip: percentPlotter('Process load')},
+ {data: system.systemCPU, tooltip: percentPlotter('System load', multiplier(-1))},
)}
{this.doubleChart(
- 'all',
- {data: home.activeMemory, tooltip: bytePlotter('Active')},
- {data: home.virtualMemory, tooltip: bytePlotter('Virtual', multiplier(-1))},
+ FOOTER_SYNC_ID,
+ MEMORY,
+ {data: system.activeMemory, tooltip: bytePlotter('Active memory')},
+ {data: system.virtualMemory, tooltip: bytePlotter('Virtual memory', multiplier(-1))},
)}
{this.doubleChart(
- 'all',
- {data: home.diskRead, tooltip: bytePerSecPlotter('Disk Read')},
- {data: home.diskWrite, tooltip: bytePerSecPlotter('Disk Write', multiplier(-1))},
+ FOOTER_SYNC_ID,
+ DISK,
+ {data: system.diskRead, tooltip: bytePerSecPlotter('Disk read')},
+ {data: system.diskWrite, tooltip: bytePerSecPlotter('Disk write', multiplier(-1))},
)}
{this.doubleChart(
- 'all',
- {data: home.networkIngress, tooltip: bytePerSecPlotter('Download')},
- {data: home.networkEgress, tooltip: bytePerSecPlotter('Upload', multiplier(-1))},
+ FOOTER_SYNC_ID,
+ TRAFFIC,
+ {data: system.networkIngress, tooltip: bytePerSecPlotter('Download')},
+ {data: system.networkEgress, tooltip: bytePerSecPlotter('Upload', multiplier(-1))},
)}
</ChartRow>
</Grid>
<Grid item >
- {this.info('Geth', general.version)}
- {this.info('Commit', general.commit ? general.commit.substring(0, 7) : null)}
+ <Typography type='caption' color='inherit'>
+ <span style={commonStyles.light}>Geth</span> {general.version}
+ </Typography>
+ {general.commit && (
+ <Typography type='caption' color='inherit'>
+ <span style={commonStyles.light}>{'Commit '}</span>
+ <a href={`https://github.com/ethereum/go-ethereum/commit/${general.commit}`} target='_blank' style={{color: 'inherit', textDecoration: 'none'}} >
+ {general.commit.substring(0, 8)}
+ </a>
+ </Typography>
+ )}
</Grid>
</Grid>
);
diff --git a/dashboard/assets/components/Header.jsx b/dashboard/assets/components/Header.jsx
index e91885af3..ccdfbc6f0 100644
--- a/dashboard/assets/components/Header.jsx
+++ b/dashboard/assets/components/Header.jsx
@@ -21,30 +21,16 @@ import React, {Component} from 'react';
import withStyles from 'material-ui/styles/withStyles';
import AppBar from 'material-ui/AppBar';
import Toolbar from 'material-ui/Toolbar';
-import Transition from 'react-transition-group/Transition';
import IconButton from 'material-ui/IconButton';
+import Icon from 'material-ui/Icon';
+import MenuIcon from 'material-ui-icons/Menu';
import Typography from 'material-ui/Typography';
-import ChevronLeftIcon from 'material-ui-icons/ChevronLeft';
-
-import {DURATION} from '../common';
-
-// styles contains the constant styles of the component.
-const styles = {
- arrow: {
- default: {
- transition: `transform ${DURATION}ms`,
- },
- transition: {
- entered: {transform: 'rotate(180deg)'},
- },
- },
-};
// themeStyles returns the styles generated from the theme for the component.
const themeStyles = (theme: Object) => ({
header: {
- backgroundColor: theme.palette.background.appBar,
- color: theme.palette.getContrastText(theme.palette.background.appBar),
+ backgroundColor: theme.palette.grey[900],
+ color: theme.palette.getContrastText(theme.palette.grey[900]),
zIndex: theme.zIndex.appBar,
},
toolbar: {
@@ -53,42 +39,28 @@ const themeStyles = (theme: Object) => ({
},
title: {
paddingLeft: theme.spacing.unit,
+ fontSize: 3 * theme.spacing.unit,
},
});
export type Props = {
classes: Object, // injected by withStyles()
- opened: boolean,
switchSideBar: () => void,
};
// Header renders the header of the dashboard.
class Header extends Component<Props> {
- shouldComponentUpdate(nextProps) {
- return nextProps.opened !== this.props.opened;
- }
-
- // arrow renders a button, which changes the sidebar's state.
- arrow = (transitionState: string) => (
- <IconButton onClick={this.props.switchSideBar}>
- <ChevronLeftIcon
- style={{
- ...styles.arrow.default,
- ...styles.arrow.transition[transitionState],
- }}
- />
- </IconButton>
- );
-
render() {
- const {classes, opened} = this.props;
+ const {classes} = this.props;
return (
<AppBar position='static' className={classes.header}>
<Toolbar className={classes.toolbar}>
- <Transition mountOnEnter in={opened} timeout={{enter: DURATION}}>
- {this.arrow}
- </Transition>
+ <IconButton onClick={this.props.switchSideBar}>
+ <Icon>
+ <MenuIcon />
+ </Icon>
+ </IconButton>
<Typography type='title' color='inherit' noWrap className={classes.title}>
Go Ethereum Dashboard
</Typography>
diff --git a/dashboard/assets/components/Main.jsx b/dashboard/assets/components/Main.jsx
index a9e3d3578..fba8ca1f6 100644
--- a/dashboard/assets/components/Main.jsx
+++ b/dashboard/assets/components/Main.jsx
@@ -76,7 +76,8 @@ class Main extends Component<Props> {
<div style={styles.wrapper}>
<div className={classes.content} style={styles.content}>{children}</div>
<Footer
- content={content}
+ general={content.general}
+ system={content.system}
shouldUpdate={shouldUpdate}
/>
</div>
diff --git a/dashboard/assets/components/SideBar.jsx b/dashboard/assets/components/SideBar.jsx
index c2e419ae9..463d6cb40 100644
--- a/dashboard/assets/components/SideBar.jsx
+++ b/dashboard/assets/components/SideBar.jsx
@@ -41,10 +41,10 @@ const styles = {
// themeStyles returns the styles generated from the theme for the component.
const themeStyles = theme => ({
list: {
- background: theme.palette.background.appBar,
+ background: theme.palette.grey[900],
},
listItem: {
- minWidth: theme.spacing.unit * 3,
+ minWidth: theme.spacing.unit * 7,
},
icon: {
fontSize: theme.spacing.unit * 3,
diff --git a/dashboard/assets/dashboard.html b/dashboard/assets/index.html
index 2491bf1ea..2491bf1ea 100644
--- a/dashboard/assets/dashboard.html
+++ b/dashboard/assets/index.html
diff --git a/dashboard/assets/package-lock.json b/dashboard/assets/package-lock.json
deleted file mode 100644
index a70fda6d5..000000000
--- a/dashboard/assets/package-lock.json
+++ /dev/null
@@ -1,7678 +0,0 @@
-{
- "requires": true,
- "lockfileVersion": 1,
- "dependencies": {
- "@babel/code-frame": {
- "version": "7.0.0-beta.36",
- "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.0.0-beta.36.tgz",
- "integrity": "sha512-sW77BFwJ48YvQp3Gzz5xtAUiXuYOL2aMJKDwiaY3OcvdqBFurtYfOpSa4QrNyDxmOGRFSYzUpabU2m9QrlWE7w==",
- "requires": {
- "chalk": "2.3.0",
- "esutils": "2.0.2",
- "js-tokens": "3.0.2"
- },
- "dependencies": {
- "ansi-styles": {
- "version": "3.2.0",
- "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.0.tgz",
- "integrity": "sha512-NnSOmMEYtVR2JVMIGTzynRkkaxtiq1xnFBcdQD/DnNCYPoEPsVJhM98BDyaoNOQIi7p4okdi3E27eN7GQbsUug==",
- "requires": {
- "color-convert": "1.9.1"
- }
- },
- "chalk": {
- "version": "2.3.0",
- "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.3.0.tgz",
- "integrity": "sha512-Az5zJR2CBujap2rqXGaJKaPHyJ0IrUimvYNX+ncCy8PJP4ltOGTrHUIo097ZaL2zMeKYpiCdqDvS6zdrTFok3Q==",
- "requires": {
- "ansi-styles": "3.2.0",
- "escape-string-regexp": "1.0.5",
- "supports-color": "4.5.0"
- }
- },
- "supports-color": {
- "version": "4.5.0",
- "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-4.5.0.tgz",
- "integrity": "sha1-vnoN5ITexcXN34s9WRJQRJEvY1s=",
- "requires": {
- "has-flag": "2.0.0"
- }
- }
- }
- },
- "@babel/helper-function-name": {
- "version": "7.0.0-beta.36",
- "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.0.0-beta.36.tgz",
- "integrity": "sha512-/SGPOyifPf20iTrMN+WdlY2MbKa7/o4j7B/4IAsdOusASp2icT+Wcdjf4tjJHaXNX8Pe9bpgVxLNxhRvcf8E5w==",
- "requires": {
- "@babel/helper-get-function-arity": "7.0.0-beta.36",
- "@babel/template": "7.0.0-beta.36",
- "@babel/types": "7.0.0-beta.36"
- }
- },
- "@babel/helper-get-function-arity": {
- "version": "7.0.0-beta.36",
- "resolved": "https://registry.npmjs.org/@babel/helper-get-function-arity/-/helper-get-function-arity-7.0.0-beta.36.tgz",
- "integrity": "sha512-vPPcx2vsSoDbcyWr9S3nd0FM3B4hEXnt0p1oKpwa08GwK0fSRxa98MyaRGf8suk8frdQlG1P3mDrz5p/Rr3pbA==",
- "requires": {
- "@babel/types": "7.0.0-beta.36"
- }
- },
- "@babel/template": {
- "version": "7.0.0-beta.36",
- "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.0.0-beta.36.tgz",
- "integrity": "sha512-mUBi90WRyZ9iVvlWLEdeo8gn/tROyJdjKNC4W5xJTSZL+9MS89rTJSqiaJKXIkxk/YRDL/g/8snrG/O0xl33uA==",
- "requires": {
- "@babel/code-frame": "7.0.0-beta.36",
- "@babel/types": "7.0.0-beta.36",
- "babylon": "7.0.0-beta.36",
- "lodash": "4.17.4"
- },
- "dependencies": {
- "babylon": {
- "version": "7.0.0-beta.36",
- "resolved": "https://registry.npmjs.org/babylon/-/babylon-7.0.0-beta.36.tgz",
- "integrity": "sha512-rw4YdadGwajAMMRl6a5swhQ0JCOOFyaYCfJ0AsmNBD8uBD/r4J8mux7wBaqavvFKqUKQYWOzA1Speams4YDzsQ=="
- }
- }
- },
- "@babel/traverse": {
- "version": "7.0.0-beta.36",
- "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.0.0-beta.36.tgz",
- "integrity": "sha512-OTUb6iSKVR/98dGThRJ1BiyfwbuX10BVnkz89IpaerjTPRhDfMBfLsqmzxz5MiywUOW4M0Clta0o7rSxkfcuzw==",
- "requires": {
- "@babel/code-frame": "7.0.0-beta.36",
- "@babel/helper-function-name": "7.0.0-beta.36",
- "@babel/types": "7.0.0-beta.36",
- "babylon": "7.0.0-beta.36",
- "debug": "3.1.0",
- "globals": "11.1.0",
- "invariant": "2.2.2",
- "lodash": "4.17.4"
- },
- "dependencies": {
- "babylon": {
- "version": "7.0.0-beta.36",
- "resolved": "https://registry.npmjs.org/babylon/-/babylon-7.0.0-beta.36.tgz",
- "integrity": "sha512-rw4YdadGwajAMMRl6a5swhQ0JCOOFyaYCfJ0AsmNBD8uBD/r4J8mux7wBaqavvFKqUKQYWOzA1Speams4YDzsQ=="
- },
- "debug": {
- "version": "3.1.0",
- "resolved": "https://registry.npmjs.org/debug/-/debug-3.1.0.tgz",
- "integrity": "sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g==",
- "requires": {
- "ms": "2.0.0"
- }
- },
- "globals": {
- "version": "11.1.0",
- "resolved": "https://registry.npmjs.org/globals/-/globals-11.1.0.tgz",
- "integrity": "sha512-uEuWt9mqTlPDwSqi+sHjD4nWU/1N+q0fiWI9T1mZpD2UENqX20CFD5T/ziLZvztPaBKl7ZylUi1q6Qfm7E2CiQ=="
- }
- }
- },
- "@babel/types": {
- "version": "7.0.0-beta.36",
- "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.0.0-beta.36.tgz",
- "integrity": "sha512-PyAORDO9um9tfnrddXgmWN9e6Sq9qxraQIt5ynqBOSXKA5qvK1kUr+Q3nSzKFdzorsiK+oqcUnAFvEoKxv9D+Q==",
- "requires": {
- "esutils": "2.0.2",
- "lodash": "4.17.4",
- "to-fast-properties": "2.0.0"
- },
- "dependencies": {
- "to-fast-properties": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz",
- "integrity": "sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4="
- }
- }
- },
- "@types/jss": {
- "version": "9.3.0",
- "resolved": "https://registry.npmjs.org/@types/jss/-/jss-9.3.0.tgz",
- "integrity": "sha512-n7MUYCO/Wt4d6Yj0ZewXSSkqBcrdLFgpQ4mUBRXBWDmLfXtgT3tJ26GVPr8HiyRLLze6iQfaBJTlvjRTjgZpRg=="
- },
- "@types/react": {
- "version": "16.0.34",
- "resolved": "https://registry.npmjs.org/@types/react/-/react-16.0.34.tgz",
- "integrity": "sha512-Ee66fX2qMsDnDq7sPnDtq1bGoo479j6Fo1BlSnne+L5rp6ndzBUgz72+MRNuN56zg9uuteRCkJAMdDJEX2Uqig=="
- },
- "@types/react-transition-group": {
- "version": "2.0.6",
- "resolved": "https://registry.npmjs.org/@types/react-transition-group/-/react-transition-group-2.0.6.tgz",
- "integrity": "sha512-mVhRv+d0MIoLWl6hEFA7Nnd/obW2RQpZViTAKhM37mltuTDWCdoj8xAZv94ntB8wgAc6DDiDCXxFXPgClGnsfQ==",
- "requires": {
- "@types/react": "16.0.34"
- }
- },
- "acorn": {
- "version": "5.3.0",
- "resolved": "https://registry.npmjs.org/acorn/-/acorn-5.3.0.tgz",
- "integrity": "sha512-Yej+zOJ1Dm/IMZzzj78OntP/r3zHEaKcyNoU2lAaxPtrseM6rF0xwqoz5Q5ysAiED9hTjI2hgtvLXitlCN1/Ug=="
- },
- "acorn-dynamic-import": {
- "version": "2.0.2",
- "resolved": "https://registry.npmjs.org/acorn-dynamic-import/-/acorn-dynamic-import-2.0.2.tgz",
- "integrity": "sha1-x1K9IQvvZ5UBtsbLf8hPj0cVjMQ=",
- "requires": {
- "acorn": "4.0.13"
- },
- "dependencies": {
- "acorn": {
- "version": "4.0.13",
- "resolved": "https://registry.npmjs.org/acorn/-/acorn-4.0.13.tgz",
- "integrity": "sha1-EFSVrlNh1pe9GVyCUZLhrX8lN4c="
- }
- }
- },
- "acorn-jsx": {
- "version": "3.0.1",
- "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-3.0.1.tgz",
- "integrity": "sha1-r9+UiPsezvyDSPb7IvRk4ypYs2s=",
- "requires": {
- "acorn": "3.3.0"
- },
- "dependencies": {
- "acorn": {
- "version": "3.3.0",
- "resolved": "https://registry.npmjs.org/acorn/-/acorn-3.3.0.tgz",
- "integrity": "sha1-ReN/s56No/JbruP/U2niu18iAXo="
- }
- }
- },
- "ajv": {
- "version": "5.5.2",
- "resolved": "https://registry.npmjs.org/ajv/-/ajv-5.5.2.tgz",
- "integrity": "sha1-c7Xuyj+rZT49P5Qis0GtQiBdyWU=",
- "requires": {
- "co": "4.6.0",
- "fast-deep-equal": "1.0.0",
- "fast-json-stable-stringify": "2.0.0",
- "json-schema-traverse": "0.3.1"
- }
- },
- "ajv-keywords": {
- "version": "2.1.1",
- "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-2.1.1.tgz",
- "integrity": "sha1-YXmX/F9gV2iUxDX5QNgZ4TW4B2I="
- },
- "align-text": {
- "version": "0.1.4",
- "resolved": "https://registry.npmjs.org/align-text/-/align-text-0.1.4.tgz",
- "integrity": "sha1-DNkKVhCT810KmSVsIrcGlDP60Rc=",
- "requires": {
- "kind-of": "3.2.2",
- "longest": "1.0.1",
- "repeat-string": "1.6.1"
- }
- },
- "alphanum-sort": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/alphanum-sort/-/alphanum-sort-1.0.2.tgz",
- "integrity": "sha1-l6ERlkmyEa0zaR2fn0hqjsn74KM="
- },
- "ansi-escapes": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-3.0.0.tgz",
- "integrity": "sha512-O/klc27mWNUigtv0F8NJWbLF00OcegQalkqKURWdosW08YZKi4m6CnSUSvIZG1otNJbTWhN01Hhz389DW7mvDQ=="
- },
- "ansi-regex": {
- "version": "2.1.1",
- "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz",
- "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8="
- },
- "ansi-styles": {
- "version": "2.2.1",
- "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-2.2.1.tgz",
- "integrity": "sha1-tDLdM1i2NM914eRmQ2gkBTPB3b4="
- },
- "anymatch": {
- "version": "1.3.2",
- "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-1.3.2.tgz",
- "integrity": "sha512-0XNayC8lTHQ2OI8aljNCN3sSx6hsr/1+rlcDAotXJR7C1oZZHCNsfpbKwMjRA3Uqb5tF1Rae2oloTr4xpq+WjA==",
- "requires": {
- "micromatch": "2.3.11",
- "normalize-path": "2.1.1"
- }
- },
- "argparse": {
- "version": "1.0.9",
- "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.9.tgz",
- "integrity": "sha1-c9g7wmP4bpf4zE9rrhsOkKfSLIY=",
- "requires": {
- "sprintf-js": "1.0.3"
- }
- },
- "aria-query": {
- "version": "0.7.0",
- "resolved": "https://registry.npmjs.org/aria-query/-/aria-query-0.7.0.tgz",
- "integrity": "sha512-/r2lHl09V3o74+2MLKEdewoj37YZqiQZnfen1O4iNlrOjUgeKuu1U2yF3iKh6HJxqF+OXkLMfQv65Z/cvxD6vA==",
- "requires": {
- "ast-types-flow": "0.0.7"
- }
- },
- "arr-diff": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/arr-diff/-/arr-diff-2.0.0.tgz",
- "integrity": "sha1-jzuCf5Vai9ZpaX5KQlasPOrjVs8=",
- "requires": {
- "arr-flatten": "1.1.0"
- }
- },
- "arr-flatten": {
- "version": "1.1.0",
- "resolved": "https://registry.npmjs.org/arr-flatten/-/arr-flatten-1.1.0.tgz",
- "integrity": "sha512-L3hKV5R/p5o81R7O02IGnwpDmkp6E982XhtbuwSe3O4qOtMMMtodicASA1Cny2U+aCXcNpml+m4dPsvsJ3jatg=="
- },
- "array-includes": {
- "version": "3.0.3",
- "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.0.3.tgz",
- "integrity": "sha1-GEtI9i2S10UrsxsyMWXH+L0CJm0=",
- "requires": {
- "define-properties": "1.1.2",
- "es-abstract": "1.10.0"
- }
- },
- "array-union": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/array-union/-/array-union-1.0.2.tgz",
- "integrity": "sha1-mjRBDk9OPaI96jdb5b5w8kd47Dk=",
- "requires": {
- "array-uniq": "1.0.3"
- }
- },
- "array-uniq": {
- "version": "1.0.3",
- "resolved": "https://registry.npmjs.org/array-uniq/-/array-uniq-1.0.3.tgz",
- "integrity": "sha1-r2rId6Jcx/dOBYiUdThY39sk/bY="
- },
- "array-unique": {
- "version": "0.2.1",
- "resolved": "https://registry.npmjs.org/array-unique/-/array-unique-0.2.1.tgz",
- "integrity": "sha1-odl8yvy8JiXMcPrc6zalDFiwGlM="
- },
- "arrify": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz",
- "integrity": "sha1-iYUI2iIm84DfkEcoRWhJwVAaSw0="
- },
- "asap": {
- "version": "2.0.6",
- "resolved": "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz",
- "integrity": "sha1-5QNHYR1+aQlDIIu9r+vLwvuGbUY="
- },
- "asn1": {
- "version": "0.2.3",
- "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.3.tgz",
- "integrity": "sha1-2sh4dxPJlmhJ/IGAd36+nB3fO4Y="
- },
- "asn1.js": {
- "version": "4.9.2",
- "resolved": "https://registry.npmjs.org/asn1.js/-/asn1.js-4.9.2.tgz",
- "integrity": "sha512-b/OsSjvWEo8Pi8H0zsDd2P6Uqo2TK2pH8gNLSJtNLM2Db0v2QaAZ0pBQJXVjAn4gBuugeVDr7s63ZogpUIwWDg==",
- "requires": {
- "bn.js": "4.11.8",
- "inherits": "2.0.3",
- "minimalistic-assert": "1.0.0"
- }
- },
- "assert": {
- "version": "1.4.1",
- "resolved": "https://registry.npmjs.org/assert/-/assert-1.4.1.tgz",
- "integrity": "sha1-mZEtWRg2tab1s0XA8H7vwI/GXZE=",
- "requires": {
- "util": "0.10.3"
- }
- },
- "assert-plus": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz",
- "integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU="
- },
- "ast-types-flow": {
- "version": "0.0.7",
- "resolved": "https://registry.npmjs.org/ast-types-flow/-/ast-types-flow-0.0.7.tgz",
- "integrity": "sha1-9wtzXGvKGlycItmCw+Oef+ujva0="
- },
- "async": {
- "version": "2.6.0",
- "resolved": "https://registry.npmjs.org/async/-/async-2.6.0.tgz",
- "integrity": "sha512-xAfGg1/NTLBBKlHFmnd7PlmUW9KhVQIUuSrYem9xzFUZy13ScvtyGGejaae9iAVRiRq9+Cx7DPFaAAhCpyxyPw==",
- "requires": {
- "lodash": "4.17.4"
- }
- },
- "async-each": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/async-each/-/async-each-1.0.1.tgz",
- "integrity": "sha1-GdOGodntxufByF04iu28xW0zYC0="
- },
- "asynckit": {
- "version": "0.4.0",
- "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
- "integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k="
- },
- "autoprefixer": {
- "version": "6.7.7",
- "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-6.7.7.tgz",
- "integrity": "sha1-Hb0cg1ZY41zj+ZhAmdsAWFx4IBQ=",
- "requires": {
- "browserslist": "1.7.7",
- "caniuse-db": "1.0.30000793",
- "normalize-range": "0.1.2",
- "num2fraction": "1.2.2",
- "postcss": "5.2.18",
- "postcss-value-parser": "3.3.0"
- },
- "dependencies": {
- "browserslist": {
- "version": "1.7.7",
- "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-1.7.7.tgz",
- "integrity": "sha1-C9dnBCWL6CmyOYu1Dkti0aFmsLk=",
- "requires": {
- "caniuse-db": "1.0.30000793",
- "electron-to-chromium": "1.3.31"
- }
- }
- }
- },
- "aws-sign2": {
- "version": "0.7.0",
- "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz",
- "integrity": "sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg="
- },
- "aws4": {
- "version": "1.6.0",
- "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.6.0.tgz",
- "integrity": "sha1-g+9cqGCysy5KDe7e6MdxudtXRx4="
- },
- "axobject-query": {
- "version": "0.1.0",
- "resolved": "https://registry.npmjs.org/axobject-query/-/axobject-query-0.1.0.tgz",
- "integrity": "sha1-YvWdvFnJ+SQnWco0mWDnov48NsA=",
- "requires": {
- "ast-types-flow": "0.0.7"
- }
- },
- "babel-code-frame": {
- "version": "6.26.0",
- "resolved": "https://registry.npmjs.org/babel-code-frame/-/babel-code-frame-6.26.0.tgz",
- "integrity": "sha1-Y/1D99weO7fONZR9uP42mj9Yx0s=",
- "requires": {
- "chalk": "1.1.3",
- "esutils": "2.0.2",
- "js-tokens": "3.0.2"
- }
- },
- "babel-core": {
- "version": "6.26.0",
- "resolved": "https://registry.npmjs.org/babel-core/-/babel-core-6.26.0.tgz",
- "integrity": "sha1-rzL3izGm/O8RnIew/Y2XU/A6C7g=",
- "requires": {
- "babel-code-frame": "6.26.0",
- "babel-generator": "6.26.0",
- "babel-helpers": "6.24.1",
- "babel-messages": "6.23.0",
- "babel-register": "6.26.0",
- "babel-runtime": "6.26.0",
- "babel-template": "6.26.0",
- "babel-traverse": "6.26.0",
- "babel-types": "6.26.0",
- "babylon": "6.18.0",
- "convert-source-map": "1.5.1",
- "debug": "2.6.9",
- "json5": "0.5.1",
- "lodash": "4.17.4",
- "minimatch": "3.0.4",
- "path-is-absolute": "1.0.1",
- "private": "0.1.8",
- "slash": "1.0.0",
- "source-map": "0.5.7"
- }
- },
- "babel-eslint": {
- "version": "8.2.1",
- "resolved": "https://registry.npmjs.org/babel-eslint/-/babel-eslint-8.2.1.tgz",
- "integrity": "sha512-RzdVOyWKQRUnLXhwLk+eKb4oyW+BykZSkpYwFhM4tnfzAG5OWfvG0w/uyzMp5XKEU0jN82+JefHr39bG2+KhRQ==",
- "requires": {
- "@babel/code-frame": "7.0.0-beta.36",
- "@babel/traverse": "7.0.0-beta.36",
- "@babel/types": "7.0.0-beta.36",
- "babylon": "7.0.0-beta.36",
- "eslint-scope": "3.7.1",
- "eslint-visitor-keys": "1.0.0"
- },
- "dependencies": {
- "babylon": {
- "version": "7.0.0-beta.36",
- "resolved": "https://registry.npmjs.org/babylon/-/babylon-7.0.0-beta.36.tgz",
- "integrity": "sha512-rw4YdadGwajAMMRl6a5swhQ0JCOOFyaYCfJ0AsmNBD8uBD/r4J8mux7wBaqavvFKqUKQYWOzA1Speams4YDzsQ=="
- }
- }
- },
- "babel-generator": {
- "version": "6.26.0",
- "resolved": "https://registry.npmjs.org/babel-generator/-/babel-generator-6.26.0.tgz",
- "integrity": "sha1-rBriAHC3n248odMmlhMFN3TyDcU=",
- "requires": {
- "babel-messages": "6.23.0",
- "babel-runtime": "6.26.0",
- "babel-types": "6.26.0",
- "detect-indent": "4.0.0",
- "jsesc": "1.3.0",
- "lodash": "4.17.4",
- "source-map": "0.5.7",
- "trim-right": "1.0.1"
- }
- },
- "babel-helper-bindify-decorators": {
- "version": "6.24.1",
- "resolved": "https://registry.npmjs.org/babel-helper-bindify-decorators/-/babel-helper-bindify-decorators-6.24.1.tgz",
- "integrity": "sha1-FMGeXxQte0fxmlJDHlKxzLxAozA=",
- "requires": {
- "babel-runtime": "6.26.0",
- "babel-traverse": "6.26.0",
- "babel-types": "6.26.0"
- }
- },
- "babel-helper-builder-binary-assignment-operator-visitor": {
- "version": "6.24.1",
- "resolved": "https://registry.npmjs.org/babel-helper-builder-binary-assignment-operator-visitor/-/babel-helper-builder-binary-assignment-operator-visitor-6.24.1.tgz",
- "integrity": "sha1-zORReto1b0IgvK6KAsKzRvmlZmQ=",
- "requires": {
- "babel-helper-explode-assignable-expression": "6.24.1",
- "babel-runtime": "6.26.0",
- "babel-types": "6.26.0"
- }
- },
- "babel-helper-builder-react-jsx": {
- "version": "6.26.0",
- "resolved": "https://registry.npmjs.org/babel-helper-builder-react-jsx/-/babel-helper-builder-react-jsx-6.26.0.tgz",
- "integrity": "sha1-Of+DE7dci2Xc7/HzHTg+D/KkCKA=",
- "requires": {
- "babel-runtime": "6.26.0",
- "babel-types": "6.26.0",
- "esutils": "2.0.2"
- }
- },
- "babel-helper-call-delegate": {
- "version": "6.24.1",
- "resolved": "https://registry.npmjs.org/babel-helper-call-delegate/-/babel-helper-call-delegate-6.24.1.tgz",
- "integrity": "sha1-7Oaqzdx25Bw0YfiL/Fdb0Nqi340=",
- "requires": {
- "babel-helper-hoist-variables": "6.24.1",
- "babel-runtime": "6.26.0",
- "babel-traverse": "6.26.0",
- "babel-types": "6.26.0"
- }
- },
- "babel-helper-define-map": {
- "version": "6.26.0",
- "resolved": "https://registry.npmjs.org/babel-helper-define-map/-/babel-helper-define-map-6.26.0.tgz",
- "integrity": "sha1-pfVtq0GiX5fstJjH66ypgZ+Vvl8=",
- "requires": {
- "babel-helper-function-name": "6.24.1",
- "babel-runtime": "6.26.0",
- "babel-types": "6.26.0",
- "lodash": "4.17.4"
- }
- },
- "babel-helper-explode-assignable-expression": {
- "version": "6.24.1",
- "resolved": "https://registry.npmjs.org/babel-helper-explode-assignable-expression/-/babel-helper-explode-assignable-expression-6.24.1.tgz",
- "integrity": "sha1-8luCz33BBDPFX3BZLVdGQArCLKo=",
- "requires": {
- "babel-runtime": "6.26.0",
- "babel-traverse": "6.26.0",
- "babel-types": "6.26.0"
- }
- },
- "babel-helper-explode-class": {
- "version": "6.24.1",
- "resolved": "https://registry.npmjs.org/babel-helper-explode-class/-/babel-helper-explode-class-6.24.1.tgz",
- "integrity": "sha1-fcKjkQ3uAHBW4eMdZAztPVTqqes=",
- "requires": {
- "babel-helper-bindify-decorators": "6.24.1",
- "babel-runtime": "6.26.0",
- "babel-traverse": "6.26.0",
- "babel-types": "6.26.0"
- }
- },
- "babel-helper-function-name": {
- "version": "6.24.1",
- "resolved": "https://registry.npmjs.org/babel-helper-function-name/-/babel-helper-function-name-6.24.1.tgz",
- "integrity": "sha1-00dbjAPtmCQqJbSDUasYOZ01gKk=",
- "requires": {
- "babel-helper-get-function-arity": "6.24.1",
- "babel-runtime": "6.26.0",
- "babel-template": "6.26.0",
- "babel-traverse": "6.26.0",
- "babel-types": "6.26.0"
- }
- },
- "babel-helper-get-function-arity": {
- "version": "6.24.1",
- "resolved": "https://registry.npmjs.org/babel-helper-get-function-arity/-/babel-helper-get-function-arity-6.24.1.tgz",
- "integrity": "sha1-j3eCqpNAfEHTqlCQj4mwMbG2hT0=",
- "requires": {
- "babel-runtime": "6.26.0",
- "babel-types": "6.26.0"
- }
- },
- "babel-helper-hoist-variables": {
- "version": "6.24.1",
- "resolved": "https://registry.npmjs.org/babel-helper-hoist-variables/-/babel-helper-hoist-variables-6.24.1.tgz",
- "integrity": "sha1-HssnaJydJVE+rbyZFKc/VAi+enY=",
- "requires": {
- "babel-runtime": "6.26.0",
- "babel-types": "6.26.0"
- }
- },
- "babel-helper-optimise-call-expression": {
- "version": "6.24.1",
- "resolved": "https://registry.npmjs.org/babel-helper-optimise-call-expression/-/babel-helper-optimise-call-expression-6.24.1.tgz",
- "integrity": "sha1-96E0J7qfc/j0+pk8VKl4gtEkQlc=",
- "requires": {
- "babel-runtime": "6.26.0",
- "babel-types": "6.26.0"
- }
- },
- "babel-helper-regex": {
- "version": "6.26.0",
- "resolved": "https://registry.npmjs.org/babel-helper-regex/-/babel-helper-regex-6.26.0.tgz",
- "integrity": "sha1-MlxZ+QL4LyS3T6zu0DY5VPZJXnI=",
- "requires": {
- "babel-runtime": "6.26.0",
- "babel-types": "6.26.0",
- "lodash": "4.17.4"
- }
- },
- "babel-helper-remap-async-to-generator": {
- "version": "6.24.1",
- "resolved": "https://registry.npmjs.org/babel-helper-remap-async-to-generator/-/babel-helper-remap-async-to-generator-6.24.1.tgz",
- "integrity": "sha1-XsWBgnrXI/7N04HxySg5BnbkVRs=",
- "requires": {
- "babel-helper-function-name": "6.24.1",
- "babel-runtime": "6.26.0",
- "babel-template": "6.26.0",
- "babel-traverse": "6.26.0",
- "babel-types": "6.26.0"
- }
- },
- "babel-helper-replace-supers": {
- "version": "6.24.1",
- "resolved": "https://registry.npmjs.org/babel-helper-replace-supers/-/babel-helper-replace-supers-6.24.1.tgz",
- "integrity": "sha1-v22/5Dk40XNpohPKiov3S2qQqxo=",
- "requires": {
- "babel-helper-optimise-call-expression": "6.24.1",
- "babel-messages": "6.23.0",
- "babel-runtime": "6.26.0",
- "babel-template": "6.26.0",
- "babel-traverse": "6.26.0",
- "babel-types": "6.26.0"
- }
- },
- "babel-helpers": {
- "version": "6.24.1",
- "resolved": "https://registry.npmjs.org/babel-helpers/-/babel-helpers-6.24.1.tgz",
- "integrity": "sha1-NHHenK7DiOXIUOWX5Yom3fN2ArI=",
- "requires": {
- "babel-runtime": "6.26.0",
- "babel-template": "6.26.0"
- }
- },
- "babel-loader": {
- "version": "7.1.2",
- "resolved": "https://registry.npmjs.org/babel-loader/-/babel-loader-7.1.2.tgz",
- "integrity": "sha512-jRwlFbINAeyDStqK6Dd5YuY0k5YuzQUvlz2ZamuXrXmxav3pNqe9vfJ402+2G+OmlJSXxCOpB6Uz0INM7RQe2A==",
- "requires": {
- "find-cache-dir": "1.0.0",
- "loader-utils": "1.1.0",
- "mkdirp": "0.5.1"
- }
- },
- "babel-messages": {
- "version": "6.23.0",
- "resolved": "https://registry.npmjs.org/babel-messages/-/babel-messages-6.23.0.tgz",
- "integrity": "sha1-8830cDhYA1sqKVHG7F7fbGLyYw4=",
- "requires": {
- "babel-runtime": "6.26.0"
- }
- },
- "babel-plugin-check-es2015-constants": {
- "version": "6.22.0",
- "resolved": "https://registry.npmjs.org/babel-plugin-check-es2015-constants/-/babel-plugin-check-es2015-constants-6.22.0.tgz",
- "integrity": "sha1-NRV7EBQm/S/9PaP3XH0ekYNbv4o=",
- "requires": {
- "babel-runtime": "6.26.0"
- }
- },
- "babel-plugin-syntax-async-functions": {
- "version": "6.13.0",
- "resolved": "https://registry.npmjs.org/babel-plugin-syntax-async-functions/-/babel-plugin-syntax-async-functions-6.13.0.tgz",
- "integrity": "sha1-ytnK0RkbWtY0vzCuCHI5HgZHvpU="
- },
- "babel-plugin-syntax-async-generators": {
- "version": "6.13.0",
- "resolved": "https://registry.npmjs.org/babel-plugin-syntax-async-generators/-/babel-plugin-syntax-async-generators-6.13.0.tgz",
- "integrity": "sha1-a8lj67FuzLrmuStZbrfzXDQqi5o="
- },
- "babel-plugin-syntax-class-constructor-call": {
- "version": "6.18.0",
- "resolved": "https://registry.npmjs.org/babel-plugin-syntax-class-constructor-call/-/babel-plugin-syntax-class-constructor-call-6.18.0.tgz",
- "integrity": "sha1-nLnTn+Q8hgC+yBRkVt3L1OGnZBY="
- },
- "babel-plugin-syntax-class-properties": {
- "version": "6.13.0",
- "resolved": "https://registry.npmjs.org/babel-plugin-syntax-class-properties/-/babel-plugin-syntax-class-properties-6.13.0.tgz",
- "integrity": "sha1-1+sjt5oxf4VDlixQW4J8fWysJ94="
- },
- "babel-plugin-syntax-decorators": {
- "version": "6.13.0",
- "resolved": "https://registry.npmjs.org/babel-plugin-syntax-decorators/-/babel-plugin-syntax-decorators-6.13.0.tgz",
- "integrity": "sha1-MSVjtNvePMgGzuPkFszurd0RrAs="
- },
- "babel-plugin-syntax-do-expressions": {
- "version": "6.13.0",
- "resolved": "https://registry.npmjs.org/babel-plugin-syntax-do-expressions/-/babel-plugin-syntax-do-expressions-6.13.0.tgz",
- "integrity": "sha1-V0d1YTmqJtOQ0JQQsDdEugfkeW0="
- },
- "babel-plugin-syntax-dynamic-import": {
- "version": "6.18.0",
- "resolved": "https://registry.npmjs.org/babel-plugin-syntax-dynamic-import/-/babel-plugin-syntax-dynamic-import-6.18.0.tgz",
- "integrity": "sha1-jWomIpyDdFqZgqRBBRVyyqF5sdo="
- },
- "babel-plugin-syntax-exponentiation-operator": {
- "version": "6.13.0",
- "resolved": "https://registry.npmjs.org/babel-plugin-syntax-exponentiation-operator/-/babel-plugin-syntax-exponentiation-operator-6.13.0.tgz",
- "integrity": "sha1-nufoM3KQ2pUoggGmpX9BcDF4MN4="
- },
- "babel-plugin-syntax-export-extensions": {
- "version": "6.13.0",
- "resolved": "https://registry.npmjs.org/babel-plugin-syntax-export-extensions/-/babel-plugin-syntax-export-extensions-6.13.0.tgz",
- "integrity": "sha1-cKFITw+QiaToStRLrDU8lbmxJyE="
- },
- "babel-plugin-syntax-flow": {
- "version": "6.18.0",
- "resolved": "https://registry.npmjs.org/babel-plugin-syntax-flow/-/babel-plugin-syntax-flow-6.18.0.tgz",
- "integrity": "sha1-TDqyCiryaqIM0lmVw5jE63AxDI0="
- },
- "babel-plugin-syntax-function-bind": {
- "version": "6.13.0",
- "resolved": "https://registry.npmjs.org/babel-plugin-syntax-function-bind/-/babel-plugin-syntax-function-bind-6.13.0.tgz",
- "integrity": "sha1-SMSV8Xe98xqYHnMvVa3AvdJgH0Y="
- },
- "babel-plugin-syntax-jsx": {
- "version": "6.18.0",
- "resolved": "https://registry.npmjs.org/babel-plugin-syntax-jsx/-/babel-plugin-syntax-jsx-6.18.0.tgz",
- "integrity": "sha1-CvMqmm4Tyno/1QaeYtew9Y0NiUY="
- },
- "babel-plugin-syntax-object-rest-spread": {
- "version": "6.13.0",
- "resolved": "https://registry.npmjs.org/babel-plugin-syntax-object-rest-spread/-/babel-plugin-syntax-object-rest-spread-6.13.0.tgz",
- "integrity": "sha1-/WU28rzhODb/o6VFjEkDpZe7O/U="
- },
- "babel-plugin-syntax-trailing-function-commas": {
- "version": "6.22.0",
- "resolved": "https://registry.npmjs.org/babel-plugin-syntax-trailing-function-commas/-/babel-plugin-syntax-trailing-function-commas-6.22.0.tgz",
- "integrity": "sha1-ugNgk3+NBuQBgKQ/4NVhb/9TLPM="
- },
- "babel-plugin-transform-async-generator-functions": {
- "version": "6.24.1",
- "resolved": "https://registry.npmjs.org/babel-plugin-transform-async-generator-functions/-/babel-plugin-transform-async-generator-functions-6.24.1.tgz",
- "integrity": "sha1-8FiQAUX9PpkHpt3yjaWfIVJYpds=",
- "requires": {
- "babel-helper-remap-async-to-generator": "6.24.1",
- "babel-plugin-syntax-async-generators": "6.13.0",
- "babel-runtime": "6.26.0"
- }
- },
- "babel-plugin-transform-async-to-generator": {
- "version": "6.24.1",
- "resolved": "https://registry.npmjs.org/babel-plugin-transform-async-to-generator/-/babel-plugin-transform-async-to-generator-6.24.1.tgz",
- "integrity": "sha1-ZTbjeK/2yx1VF6wOQOs+n8jQh2E=",
- "requires": {
- "babel-helper-remap-async-to-generator": "6.24.1",
- "babel-plugin-syntax-async-functions": "6.13.0",
- "babel-runtime": "6.26.0"
- }
- },
- "babel-plugin-transform-class-constructor-call": {
- "version": "6.24.1",
- "resolved": "https://registry.npmjs.org/babel-plugin-transform-class-constructor-call/-/babel-plugin-transform-class-constructor-call-6.24.1.tgz",
- "integrity": "sha1-gNwoVQWsBn3LjWxl4vbxGrd2Xvk=",
- "requires": {
- "babel-plugin-syntax-class-constructor-call": "6.18.0",
- "babel-runtime": "6.26.0",
- "babel-template": "6.26.0"
- }
- },
- "babel-plugin-transform-class-properties": {
- "version": "6.24.1",
- "resolved": "https://registry.npmjs.org/babel-plugin-transform-class-properties/-/babel-plugin-transform-class-properties-6.24.1.tgz",
- "integrity": "sha1-anl2PqYdM9NvN7YRqp3vgagbRqw=",
- "requires": {
- "babel-helper-function-name": "6.24.1",
- "babel-plugin-syntax-class-properties": "6.13.0",
- "babel-runtime": "6.26.0",
- "babel-template": "6.26.0"
- }
- },
- "babel-plugin-transform-decorators": {
- "version": "6.24.1",
- "resolved": "https://registry.npmjs.org/babel-plugin-transform-decorators/-/babel-plugin-transform-decorators-6.24.1.tgz",
- "integrity": "sha1-eIAT2PjGtSIr33s0Q5Df13Vp4k0=",
- "requires": {
- "babel-helper-explode-class": "6.24.1",
- "babel-plugin-syntax-decorators": "6.13.0",
- "babel-runtime": "6.26.0",
- "babel-template": "6.26.0",
- "babel-types": "6.26.0"
- }
- },
- "babel-plugin-transform-decorators-legacy": {
- "version": "1.3.4",
- "resolved": "https://registry.npmjs.org/babel-plugin-transform-decorators-legacy/-/babel-plugin-transform-decorators-legacy-1.3.4.tgz",
- "integrity": "sha1-dBtY9sW86eYCfgiC2cmU8E82aSU=",
- "requires": {
- "babel-plugin-syntax-decorators": "6.13.0",
- "babel-runtime": "6.26.0",
- "babel-template": "6.26.0"
- }
- },
- "babel-plugin-transform-do-expressions": {
- "version": "6.22.0",
- "resolved": "https://registry.npmjs.org/babel-plugin-transform-do-expressions/-/babel-plugin-transform-do-expressions-6.22.0.tgz",
- "integrity": "sha1-KMyvkoEtlJws0SgfaQyP3EaK6bs=",
- "requires": {
- "babel-plugin-syntax-do-expressions": "6.13.0",
- "babel-runtime": "6.26.0"
- }
- },
- "babel-plugin-transform-es2015-arrow-functions": {
- "version": "6.22.0",
- "resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-arrow-functions/-/babel-plugin-transform-es2015-arrow-functions-6.22.0.tgz",
- "integrity": "sha1-RSaSy3EdX3ncf4XkQM5BufJE0iE=",
- "requires": {
- "babel-runtime": "6.26.0"
- }
- },
- "babel-plugin-transform-es2015-block-scoped-functions": {
- "version": "6.22.0",
- "resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-block-scoped-functions/-/babel-plugin-transform-es2015-block-scoped-functions-6.22.0.tgz",
- "integrity": "sha1-u8UbSflk1wy42OC5ToICRs46YUE=",
- "requires": {
- "babel-runtime": "6.26.0"
- }
- },
- "babel-plugin-transform-es2015-block-scoping": {
- "version": "6.26.0",
- "resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-block-scoping/-/babel-plugin-transform-es2015-block-scoping-6.26.0.tgz",
- "integrity": "sha1-1w9SmcEwjQXBL0Y4E7CgnnOxiV8=",
- "requires": {
- "babel-runtime": "6.26.0",
- "babel-template": "6.26.0",
- "babel-traverse": "6.26.0",
- "babel-types": "6.26.0",
- "lodash": "4.17.4"
- }
- },
- "babel-plugin-transform-es2015-classes": {
- "version": "6.24.1",
- "resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-classes/-/babel-plugin-transform-es2015-classes-6.24.1.tgz",
- "integrity": "sha1-WkxYpQyclGHlZLSyo7+ryXolhNs=",
- "requires": {
- "babel-helper-define-map": "6.26.0",
- "babel-helper-function-name": "6.24.1",
- "babel-helper-optimise-call-expression": "6.24.1",
- "babel-helper-replace-supers": "6.24.1",
- "babel-messages": "6.23.0",
- "babel-runtime": "6.26.0",
- "babel-template": "6.26.0",
- "babel-traverse": "6.26.0",
- "babel-types": "6.26.0"
- }
- },
- "babel-plugin-transform-es2015-computed-properties": {
- "version": "6.24.1",
- "resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-computed-properties/-/babel-plugin-transform-es2015-computed-properties-6.24.1.tgz",
- "integrity": "sha1-b+Ko0WiV1WNPTNmZttNICjCBWbM=",
- "requires": {
- "babel-runtime": "6.26.0",
- "babel-template": "6.26.0"
- }
- },
- "babel-plugin-transform-es2015-destructuring": {
- "version": "6.23.0",
- "resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-destructuring/-/babel-plugin-transform-es2015-destructuring-6.23.0.tgz",
- "integrity": "sha1-mXux8auWf2gtKwh2/jWNYOdlxW0=",
- "requires": {
- "babel-runtime": "6.26.0"
- }
- },
- "babel-plugin-transform-es2015-duplicate-keys": {
- "version": "6.24.1",
- "resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-duplicate-keys/-/babel-plugin-transform-es2015-duplicate-keys-6.24.1.tgz",
- "integrity": "sha1-c+s9MQypaePvnskcU3QabxV2Qj4=",
- "requires": {
- "babel-runtime": "6.26.0",
- "babel-types": "6.26.0"
- }
- },
- "babel-plugin-transform-es2015-for-of": {
- "version": "6.23.0",
- "resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-for-of/-/babel-plugin-transform-es2015-for-of-6.23.0.tgz",
- "integrity": "sha1-9HyVsrYT3x0+zC/bdXNiPHUkhpE=",
- "requires": {
- "babel-runtime": "6.26.0"
- }
- },
- "babel-plugin-transform-es2015-function-name": {
- "version": "6.24.1",
- "resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-function-name/-/babel-plugin-transform-es2015-function-name-6.24.1.tgz",
- "integrity": "sha1-g0yJhTvDaxrw86TF26qU/Y6sqos=",
- "requires": {
- "babel-helper-function-name": "6.24.1",
- "babel-runtime": "6.26.0",
- "babel-types": "6.26.0"
- }
- },
- "babel-plugin-transform-es2015-literals": {
- "version": "6.22.0",
- "resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-literals/-/babel-plugin-transform-es2015-literals-6.22.0.tgz",
- "integrity": "sha1-T1SgLWzWbPkVKAAZox0xklN3yi4=",
- "requires": {
- "babel-runtime": "6.26.0"
- }
- },
- "babel-plugin-transform-es2015-modules-amd": {
- "version": "6.24.1",
- "resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-modules-amd/-/babel-plugin-transform-es2015-modules-amd-6.24.1.tgz",
- "integrity": "sha1-Oz5UAXI5hC1tGcMBHEvS8AoA0VQ=",
- "requires": {
- "babel-plugin-transform-es2015-modules-commonjs": "6.26.0",
- "babel-runtime": "6.26.0",
- "babel-template": "6.26.0"
- }
- },
- "babel-plugin-transform-es2015-modules-commonjs": {
- "version": "6.26.0",
- "resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-modules-commonjs/-/babel-plugin-transform-es2015-modules-commonjs-6.26.0.tgz",
- "integrity": "sha1-DYOUApt9xqvhqX7xgeAHWN0uXYo=",
- "requires": {
- "babel-plugin-transform-strict-mode": "6.24.1",
- "babel-runtime": "6.26.0",
- "babel-template": "6.26.0",
- "babel-types": "6.26.0"
- }
- },
- "babel-plugin-transform-es2015-modules-systemjs": {
- "version": "6.24.1",
- "resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-modules-systemjs/-/babel-plugin-transform-es2015-modules-systemjs-6.24.1.tgz",
- "integrity": "sha1-/4mhQrkRmpBhlfXxBuzzBdlAfSM=",
- "requires": {
- "babel-helper-hoist-variables": "6.24.1",
- "babel-runtime": "6.26.0",
- "babel-template": "6.26.0"
- }
- },
- "babel-plugin-transform-es2015-modules-umd": {
- "version": "6.24.1",
- "resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-modules-umd/-/babel-plugin-transform-es2015-modules-umd-6.24.1.tgz",
- "integrity": "sha1-rJl+YoXNGO1hdq22B9YCNErThGg=",
- "requires": {
- "babel-plugin-transform-es2015-modules-amd": "6.24.1",
- "babel-runtime": "6.26.0",
- "babel-template": "6.26.0"
- }
- },
- "babel-plugin-transform-es2015-object-super": {
- "version": "6.24.1",
- "resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-object-super/-/babel-plugin-transform-es2015-object-super-6.24.1.tgz",
- "integrity": "sha1-JM72muIcuDp/hgPa0CH1cusnj40=",
- "requires": {
- "babel-helper-replace-supers": "6.24.1",
- "babel-runtime": "6.26.0"
- }
- },
- "babel-plugin-transform-es2015-parameters": {
- "version": "6.24.1",
- "resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-parameters/-/babel-plugin-transform-es2015-parameters-6.24.1.tgz",
- "integrity": "sha1-V6w1GrScrxSpfNE7CfZv3wpiXys=",
- "requires": {
- "babel-helper-call-delegate": "6.24.1",
- "babel-helper-get-function-arity": "6.24.1",
- "babel-runtime": "6.26.0",
- "babel-template": "6.26.0",
- "babel-traverse": "6.26.0",
- "babel-types": "6.26.0"
- }
- },
- "babel-plugin-transform-es2015-shorthand-properties": {
- "version": "6.24.1",
- "resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-shorthand-properties/-/babel-plugin-transform-es2015-shorthand-properties-6.24.1.tgz",
- "integrity": "sha1-JPh11nIch2YbvZmkYi5R8U3jiqA=",
- "requires": {
- "babel-runtime": "6.26.0",
- "babel-types": "6.26.0"
- }
- },
- "babel-plugin-transform-es2015-spread": {
- "version": "6.22.0",
- "resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-spread/-/babel-plugin-transform-es2015-spread-6.22.0.tgz",
- "integrity": "sha1-1taKmfia7cRTbIGlQujdnxdG+NE=",
- "requires": {
- "babel-runtime": "6.26.0"
- }
- },
- "babel-plugin-transform-es2015-sticky-regex": {
- "version": "6.24.1",
- "resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-sticky-regex/-/babel-plugin-transform-es2015-sticky-regex-6.24.1.tgz",
- "integrity": "sha1-AMHNsaynERLN8M9hJsLta0V8zbw=",
- "requires": {
- "babel-helper-regex": "6.26.0",
- "babel-runtime": "6.26.0",
- "babel-types": "6.26.0"
- }
- },
- "babel-plugin-transform-es2015-template-literals": {
- "version": "6.22.0",
- "resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-template-literals/-/babel-plugin-transform-es2015-template-literals-6.22.0.tgz",
- "integrity": "sha1-qEs0UPfp+PH2g51taH2oS7EjbY0=",
- "requires": {
- "babel-runtime": "6.26.0"
- }
- },
- "babel-plugin-transform-es2015-typeof-symbol": {
- "version": "6.23.0",
- "resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-typeof-symbol/-/babel-plugin-transform-es2015-typeof-symbol-6.23.0.tgz",
- "integrity": "sha1-3sCfHN3/lLUqxz1QXITfWdzOs3I=",
- "requires": {
- "babel-runtime": "6.26.0"
- }
- },
- "babel-plugin-transform-es2015-unicode-regex": {
- "version": "6.24.1",
- "resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-unicode-regex/-/babel-plugin-transform-es2015-unicode-regex-6.24.1.tgz",
- "integrity": "sha1-04sS9C6nMj9yk4fxinxa4frrNek=",
- "requires": {
- "babel-helper-regex": "6.26.0",
- "babel-runtime": "6.26.0",
- "regexpu-core": "2.0.0"
- }
- },
- "babel-plugin-transform-exponentiation-operator": {
- "version": "6.24.1",
- "resolved": "https://registry.npmjs.org/babel-plugin-transform-exponentiation-operator/-/babel-plugin-transform-exponentiation-operator-6.24.1.tgz",
- "integrity": "sha1-KrDJx/MJj6SJB3cruBP+QejeOg4=",
- "requires": {
- "babel-helper-builder-binary-assignment-operator-visitor": "6.24.1",
- "babel-plugin-syntax-exponentiation-operator": "6.13.0",
- "babel-runtime": "6.26.0"
- }
- },
- "babel-plugin-transform-export-extensions": {
- "version": "6.22.0",
- "resolved": "https://registry.npmjs.org/babel-plugin-transform-export-extensions/-/babel-plugin-transform-export-extensions-6.22.0.tgz",
- "integrity": "sha1-U3OLR+deghhYnuqUbLvTkQm75lM=",
- "requires": {
- "babel-plugin-syntax-export-extensions": "6.13.0",
- "babel-runtime": "6.26.0"
- }
- },
- "babel-plugin-transform-flow-strip-types": {
- "version": "6.22.0",
- "resolved": "https://registry.npmjs.org/babel-plugin-transform-flow-strip-types/-/babel-plugin-transform-flow-strip-types-6.22.0.tgz",
- "integrity": "sha1-hMtnKTXUNxT9wyvOhFaNh0Qc988=",
- "requires": {
- "babel-plugin-syntax-flow": "6.18.0",
- "babel-runtime": "6.26.0"
- }
- },
- "babel-plugin-transform-function-bind": {
- "version": "6.22.0",
- "resolved": "https://registry.npmjs.org/babel-plugin-transform-function-bind/-/babel-plugin-transform-function-bind-6.22.0.tgz",
- "integrity": "sha1-xvuOlqwpajELjPjqQBRiQH3fapc=",
- "requires": {
- "babel-plugin-syntax-function-bind": "6.13.0",
- "babel-runtime": "6.26.0"
- }
- },
- "babel-plugin-transform-object-rest-spread": {
- "version": "6.26.0",
- "resolved": "https://registry.npmjs.org/babel-plugin-transform-object-rest-spread/-/babel-plugin-transform-object-rest-spread-6.26.0.tgz",
- "integrity": "sha1-DzZpLVD+9rfi1LOsFHgTepY7ewY=",
- "requires": {
- "babel-plugin-syntax-object-rest-spread": "6.13.0",
- "babel-runtime": "6.26.0"
- }
- },
- "babel-plugin-transform-react-display-name": {
- "version": "6.25.0",
- "resolved": "https://registry.npmjs.org/babel-plugin-transform-react-display-name/-/babel-plugin-transform-react-display-name-6.25.0.tgz",
- "integrity": "sha1-Z+K/Hx6ck6sI25Z5LgU5K/LMKNE=",
- "requires": {
- "babel-runtime": "6.26.0"
- }
- },
- "babel-plugin-transform-react-jsx": {
- "version": "6.24.1",
- "resolved": "https://registry.npmjs.org/babel-plugin-transform-react-jsx/-/babel-plugin-transform-react-jsx-6.24.1.tgz",
- "integrity": "sha1-hAoCjn30YN/DotKfDA2R9jduZqM=",
- "requires": {
- "babel-helper-builder-react-jsx": "6.26.0",
- "babel-plugin-syntax-jsx": "6.18.0",
- "babel-runtime": "6.26.0"
- }
- },
- "babel-plugin-transform-react-jsx-self": {
- "version": "6.22.0",
- "resolved": "https://registry.npmjs.org/babel-plugin-transform-react-jsx-self/-/babel-plugin-transform-react-jsx-self-6.22.0.tgz",
- "integrity": "sha1-322AqdomEqEh5t3XVYvL7PBuY24=",
- "requires": {
- "babel-plugin-syntax-jsx": "6.18.0",
- "babel-runtime": "6.26.0"
- }
- },
- "babel-plugin-transform-react-jsx-source": {
- "version": "6.22.0",
- "resolved": "https://registry.npmjs.org/babel-plugin-transform-react-jsx-source/-/babel-plugin-transform-react-jsx-source-6.22.0.tgz",
- "integrity": "sha1-ZqwSFT9c0tF7PBkmj0vwGX9E7NY=",
- "requires": {
- "babel-plugin-syntax-jsx": "6.18.0",
- "babel-runtime": "6.26.0"
- }
- },
- "babel-plugin-transform-regenerator": {
- "version": "6.26.0",
- "resolved": "https://registry.npmjs.org/babel-plugin-transform-regenerator/-/babel-plugin-transform-regenerator-6.26.0.tgz",
- "integrity": "sha1-4HA2lvveJ/Cj78rPi03KL3s6jy8=",
- "requires": {
- "regenerator-transform": "0.10.1"
- }
- },
- "babel-plugin-transform-runtime": {
- "version": "6.23.0",
- "resolved": "https://registry.npmjs.org/babel-plugin-transform-runtime/-/babel-plugin-transform-runtime-6.23.0.tgz",
- "integrity": "sha1-iEkNRGUC6puOfvsP4J7E2ZR5se4=",
- "requires": {
- "babel-runtime": "6.26.0"
- }
- },
- "babel-plugin-transform-strict-mode": {
- "version": "6.24.1",
- "resolved": "https://registry.npmjs.org/babel-plugin-transform-strict-mode/-/babel-plugin-transform-strict-mode-6.24.1.tgz",
- "integrity": "sha1-1fr3qleKZbvlkc9e2uBKDGcCB1g=",
- "requires": {
- "babel-runtime": "6.26.0",
- "babel-types": "6.26.0"
- }
- },
- "babel-polyfill": {
- "version": "6.26.0",
- "resolved": "https://registry.npmjs.org/babel-polyfill/-/babel-polyfill-6.26.0.tgz",
- "integrity": "sha1-N5k3q8Z9eJWXCtxiHyhM2WbPIVM=",
- "requires": {
- "babel-runtime": "6.26.0",
- "core-js": "2.5.3",
- "regenerator-runtime": "0.10.5"
- },
- "dependencies": {
- "regenerator-runtime": {
- "version": "0.10.5",
- "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.10.5.tgz",
- "integrity": "sha1-M2w+/BIgrc7dosn6tntaeVWjNlg="
- }
- }
- },
- "babel-preset-env": {
- "version": "1.6.1",
- "resolved": "https://registry.npmjs.org/babel-preset-env/-/babel-preset-env-1.6.1.tgz",
- "integrity": "sha512-W6VIyA6Ch9ePMI7VptNn2wBM6dbG0eSz25HEiL40nQXCsXGTGZSTZu1Iap+cj3Q0S5a7T9+529l/5Bkvd+afNA==",
- "requires": {
- "babel-plugin-check-es2015-constants": "6.22.0",
- "babel-plugin-syntax-trailing-function-commas": "6.22.0",
- "babel-plugin-transform-async-to-generator": "6.24.1",
- "babel-plugin-transform-es2015-arrow-functions": "6.22.0",
- "babel-plugin-transform-es2015-block-scoped-functions": "6.22.0",
- "babel-plugin-transform-es2015-block-scoping": "6.26.0",
- "babel-plugin-transform-es2015-classes": "6.24.1",
- "babel-plugin-transform-es2015-computed-properties": "6.24.1",
- "babel-plugin-transform-es2015-destructuring": "6.23.0",
- "babel-plugin-transform-es2015-duplicate-keys": "6.24.1",
- "babel-plugin-transform-es2015-for-of": "6.23.0",
- "babel-plugin-transform-es2015-function-name": "6.24.1",
- "babel-plugin-transform-es2015-literals": "6.22.0",
- "babel-plugin-transform-es2015-modules-amd": "6.24.1",
- "babel-plugin-transform-es2015-modules-commonjs": "6.26.0",
- "babel-plugin-transform-es2015-modules-systemjs": "6.24.1",
- "babel-plugin-transform-es2015-modules-umd": "6.24.1",
- "babel-plugin-transform-es2015-object-super": "6.24.1",
- "babel-plugin-transform-es2015-parameters": "6.24.1",
- "babel-plugin-transform-es2015-shorthand-properties": "6.24.1",
- "babel-plugin-transform-es2015-spread": "6.22.0",
- "babel-plugin-transform-es2015-sticky-regex": "6.24.1",
- "babel-plugin-transform-es2015-template-literals": "6.22.0",
- "babel-plugin-transform-es2015-typeof-symbol": "6.23.0",
- "babel-plugin-transform-es2015-unicode-regex": "6.24.1",
- "babel-plugin-transform-exponentiation-operator": "6.24.1",
- "babel-plugin-transform-regenerator": "6.26.0",
- "browserslist": "2.11.3",
- "invariant": "2.2.2",
- "semver": "5.5.0"
- }
- },
- "babel-preset-flow": {
- "version": "6.23.0",
- "resolved": "https://registry.npmjs.org/babel-preset-flow/-/babel-preset-flow-6.23.0.tgz",
- "integrity": "sha1-5xIYiHCFrpoktb5Baa/7WZgWxJ0=",
- "requires": {
- "babel-plugin-transform-flow-strip-types": "6.22.0"
- }
- },
- "babel-preset-react": {
- "version": "6.24.1",
- "resolved": "https://registry.npmjs.org/babel-preset-react/-/babel-preset-react-6.24.1.tgz",
- "integrity": "sha1-umnfrqRfw+xjm2pOzqbhdwLJE4A=",
- "requires": {
- "babel-plugin-syntax-jsx": "6.18.0",
- "babel-plugin-transform-react-display-name": "6.25.0",
- "babel-plugin-transform-react-jsx": "6.24.1",
- "babel-plugin-transform-react-jsx-self": "6.22.0",
- "babel-plugin-transform-react-jsx-source": "6.22.0",
- "babel-preset-flow": "6.23.0"
- }
- },
- "babel-preset-stage-0": {
- "version": "6.24.1",
- "resolved": "https://registry.npmjs.org/babel-preset-stage-0/-/babel-preset-stage-0-6.24.1.tgz",
- "integrity": "sha1-VkLRUEL5E4TX5a+LyIsduVsDnmo=",
- "requires": {
- "babel-plugin-transform-do-expressions": "6.22.0",
- "babel-plugin-transform-function-bind": "6.22.0",
- "babel-preset-stage-1": "6.24.1"
- }
- },
- "babel-preset-stage-1": {
- "version": "6.24.1",
- "resolved": "https://registry.npmjs.org/babel-preset-stage-1/-/babel-preset-stage-1-6.24.1.tgz",
- "integrity": "sha1-dpLNfc1oSZB+auSgqFWJz7niv7A=",
- "requires": {
- "babel-plugin-transform-class-constructor-call": "6.24.1",
- "babel-plugin-transform-export-extensions": "6.22.0",
- "babel-preset-stage-2": "6.24.1"
- }
- },
- "babel-preset-stage-2": {
- "version": "6.24.1",
- "resolved": "https://registry.npmjs.org/babel-preset-stage-2/-/babel-preset-stage-2-6.24.1.tgz",
- "integrity": "sha1-2eKWD7PXEYfw5k7sYrwHdnIZvcE=",
- "requires": {
- "babel-plugin-syntax-dynamic-import": "6.18.0",
- "babel-plugin-transform-class-properties": "6.24.1",
- "babel-plugin-transform-decorators": "6.24.1",
- "babel-preset-stage-3": "6.24.1"
- }
- },
- "babel-preset-stage-3": {
- "version": "6.24.1",
- "resolved": "https://registry.npmjs.org/babel-preset-stage-3/-/babel-preset-stage-3-6.24.1.tgz",
- "integrity": "sha1-g2raCp56f6N8sTj7kyb4eTSkg5U=",
- "requires": {
- "babel-plugin-syntax-trailing-function-commas": "6.22.0",
- "babel-plugin-transform-async-generator-functions": "6.24.1",
- "babel-plugin-transform-async-to-generator": "6.24.1",
- "babel-plugin-transform-exponentiation-operator": "6.24.1",
- "babel-plugin-transform-object-rest-spread": "6.26.0"
- }
- },
- "babel-register": {
- "version": "6.26.0",
- "resolved": "https://registry.npmjs.org/babel-register/-/babel-register-6.26.0.tgz",
- "integrity": "sha1-btAhFz4vy0htestFxgCahW9kcHE=",
- "requires": {
- "babel-core": "6.26.0",
- "babel-runtime": "6.26.0",
- "core-js": "2.5.3",
- "home-or-tmp": "2.0.0",
- "lodash": "4.17.4",
- "mkdirp": "0.5.1",
- "source-map-support": "0.4.18"
- }
- },
- "babel-runtime": {
- "version": "6.26.0",
- "resolved": "https://registry.npmjs.org/babel-runtime/-/babel-runtime-6.26.0.tgz",
- "integrity": "sha1-llxwWGaOgrVde/4E/yM3vItWR/4=",
- "requires": {
- "core-js": "2.5.3",
- "regenerator-runtime": "0.11.1"
- }
- },
- "babel-template": {
- "version": "6.26.0",
- "resolved": "https://registry.npmjs.org/babel-template/-/babel-template-6.26.0.tgz",
- "integrity": "sha1-3gPi0WOWsGn0bdn/+FIfsaDjXgI=",
- "requires": {
- "babel-runtime": "6.26.0",
- "babel-traverse": "6.26.0",
- "babel-types": "6.26.0",
- "babylon": "6.18.0",
- "lodash": "4.17.4"
- }
- },
- "babel-traverse": {
- "version": "6.26.0",
- "resolved": "https://registry.npmjs.org/babel-traverse/-/babel-traverse-6.26.0.tgz",
- "integrity": "sha1-RqnL1+3MYsjlwGTi0tjQ9ANXZu4=",
- "requires": {
- "babel-code-frame": "6.26.0",
- "babel-messages": "6.23.0",
- "babel-runtime": "6.26.0",
- "babel-types": "6.26.0",
- "babylon": "6.18.0",
- "debug": "2.6.9",
- "globals": "9.18.0",
- "invariant": "2.2.2",
- "lodash": "4.17.4"
- }
- },
- "babel-types": {
- "version": "6.26.0",
- "resolved": "https://registry.npmjs.org/babel-types/-/babel-types-6.26.0.tgz",
- "integrity": "sha1-o7Bz+Uq0nrb6Vc1lInozQ4BjJJc=",
- "requires": {
- "babel-runtime": "6.26.0",
- "esutils": "2.0.2",
- "lodash": "4.17.4",
- "to-fast-properties": "1.0.3"
- }
- },
- "babylon": {
- "version": "6.18.0",
- "resolved": "https://registry.npmjs.org/babylon/-/babylon-6.18.0.tgz",
- "integrity": "sha512-q/UEjfGJ2Cm3oKV71DJz9d25TPnq5rhBVL2Q4fA5wcC3jcrdn7+SssEybFIxwAvvP+YCsCYNKughoF33GxgycQ=="
- },
- "balanced-match": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz",
- "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c="
- },
- "base64-js": {
- "version": "1.2.1",
- "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.2.1.tgz",
- "integrity": "sha512-dwVUVIXsBZXwTuwnXI9RK8sBmgq09NDHzyR9SAph9eqk76gKK2JSQmZARC2zRC81JC2QTtxD0ARU5qTS25gIGw=="
- },
- "bcrypt-pbkdf": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.1.tgz",
- "integrity": "sha1-Y7xdy2EzG5K8Bf1SiVPDNGKgb40=",
- "optional": true,
- "requires": {
- "tweetnacl": "0.14.5"
- }
- },
- "big.js": {
- "version": "3.2.0",
- "resolved": "https://registry.npmjs.org/big.js/-/big.js-3.2.0.tgz",
- "integrity": "sha512-+hN/Zh2D08Mx65pZ/4g5bsmNiZUuChDiQfTUQ7qJr4/kuopCr88xZsAXv6mBoZEsUI4OuGHlX59qE94K2mMW8Q=="
- },
- "binary": {
- "version": "0.3.0",
- "resolved": "https://registry.npmjs.org/binary/-/binary-0.3.0.tgz",
- "integrity": "sha1-n2BVO8XOjDOG87VTz/R0Yq3sqnk=",
- "requires": {
- "buffers": "0.1.1",
- "chainsaw": "0.1.0"
- }
- },
- "binary-extensions": {
- "version": "1.11.0",
- "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-1.11.0.tgz",
- "integrity": "sha1-RqoXUftqL5PuXmibsQh9SxTGwgU="
- },
- "bn.js": {
- "version": "4.11.8",
- "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.11.8.tgz",
- "integrity": "sha512-ItfYfPLkWHUjckQCk8xC+LwxgK8NYcXywGigJgSwOP8Y2iyWT4f2vsZnoOXTTbo+o5yXmIUJ4gn5538SO5S3gA=="
- },
- "boom": {
- "version": "4.3.1",
- "resolved": "https://registry.npmjs.org/boom/-/boom-4.3.1.tgz",
- "integrity": "sha1-T4owBctKfjiJ90kDD9JbluAdLjE=",
- "requires": {
- "hoek": "4.2.0"
- }
- },
- "brace-expansion": {
- "version": "1.1.8",
- "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.8.tgz",
- "integrity": "sha1-wHshHHyVLsH479Uad+8NHTmQopI=",
- "requires": {
- "balanced-match": "1.0.0",
- "concat-map": "0.0.1"
- }
- },
- "braces": {
- "version": "1.8.5",
- "resolved": "https://registry.npmjs.org/braces/-/braces-1.8.5.tgz",
- "integrity": "sha1-uneWLhLf+WnWt2cR6RS3N4V79qc=",
- "requires": {
- "expand-range": "1.8.2",
- "preserve": "0.2.0",
- "repeat-element": "1.1.2"
- }
- },
- "brcast": {
- "version": "3.0.1",
- "resolved": "https://registry.npmjs.org/brcast/-/brcast-3.0.1.tgz",
- "integrity": "sha512-eI3yqf9YEqyGl9PCNTR46MGvDylGtaHjalcz6Q3fAPnP/PhpKkkve52vFdfGpwp4VUvK6LUr4TQN+2stCrEwTg=="
- },
- "brorand": {
- "version": "1.1.0",
- "resolved": "https://registry.npmjs.org/brorand/-/brorand-1.1.0.tgz",
- "integrity": "sha1-EsJe/kCkXjwyPrhnWgoM5XsiNx8="
- },
- "browserify-aes": {
- "version": "1.1.1",
- "resolved": "https://registry.npmjs.org/browserify-aes/-/browserify-aes-1.1.1.tgz",
- "integrity": "sha512-UGnTYAnB2a3YuYKIRy1/4FB2HdM866E0qC46JXvVTYKlBlZlnvfpSfY6OKfXZAkv70eJ2a1SqzpAo5CRhZGDFg==",
- "requires": {
- "buffer-xor": "1.0.3",
- "cipher-base": "1.0.4",
- "create-hash": "1.1.3",
- "evp_bytestokey": "1.0.3",
- "inherits": "2.0.3",
- "safe-buffer": "5.1.1"
- }
- },
- "browserify-cipher": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/browserify-cipher/-/browserify-cipher-1.0.0.tgz",
- "integrity": "sha1-mYgkSHS/XtTijalWZtzWasj8Njo=",
- "requires": {
- "browserify-aes": "1.1.1",
- "browserify-des": "1.0.0",
- "evp_bytestokey": "1.0.3"
- }
- },
- "browserify-des": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/browserify-des/-/browserify-des-1.0.0.tgz",
- "integrity": "sha1-2qJ3cXRwki7S/hhZQRihdUOXId0=",
- "requires": {
- "cipher-base": "1.0.4",
- "des.js": "1.0.0",
- "inherits": "2.0.3"
- }
- },
- "browserify-rsa": {
- "version": "4.0.1",
- "resolved": "https://registry.npmjs.org/browserify-rsa/-/browserify-rsa-4.0.1.tgz",
- "integrity": "sha1-IeCr+vbyApzy+vsTNWenAdQTVSQ=",
- "requires": {
- "bn.js": "4.11.8",
- "randombytes": "2.0.6"
- }
- },
- "browserify-sign": {
- "version": "4.0.4",
- "resolved": "https://registry.npmjs.org/browserify-sign/-/browserify-sign-4.0.4.tgz",
- "integrity": "sha1-qk62jl17ZYuqa/alfmMMvXqT0pg=",
- "requires": {
- "bn.js": "4.11.8",
- "browserify-rsa": "4.0.1",
- "create-hash": "1.1.3",
- "create-hmac": "1.1.6",
- "elliptic": "6.4.0",
- "inherits": "2.0.3",
- "parse-asn1": "5.1.0"
- }
- },
- "browserify-zlib": {
- "version": "0.2.0",
- "resolved": "https://registry.npmjs.org/browserify-zlib/-/browserify-zlib-0.2.0.tgz",
- "integrity": "sha512-Z942RysHXmJrhqk88FmKBVq/v5tqmSkDz7p54G/MGyjMnCFFnC79XWNbg+Vta8W6Wb2qtSZTSxIGkJrRpCFEiA==",
- "requires": {
- "pako": "1.0.6"
- }
- },
- "browserslist": {
- "version": "2.11.3",
- "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-2.11.3.tgz",
- "integrity": "sha512-yWu5cXT7Av6mVwzWc8lMsJMHWn4xyjSuGYi4IozbVTLUOEYPSagUB8kiMDUHA1fS3zjr8nkxkn9jdvug4BBRmA==",
- "requires": {
- "caniuse-lite": "1.0.30000792",
- "electron-to-chromium": "1.3.31"
- }
- },
- "buffer": {
- "version": "4.9.1",
- "resolved": "https://registry.npmjs.org/buffer/-/buffer-4.9.1.tgz",
- "integrity": "sha1-bRu2AbB6TvztlwlBMgkwJ8lbwpg=",
- "requires": {
- "base64-js": "1.2.1",
- "ieee754": "1.1.8",
- "isarray": "1.0.0"
- }
- },
- "buffer-xor": {
- "version": "1.0.3",
- "resolved": "https://registry.npmjs.org/buffer-xor/-/buffer-xor-1.0.3.tgz",
- "integrity": "sha1-JuYe0UIvtw3ULm42cp7VHYVf6Nk="
- },
- "buffers": {
- "version": "0.1.1",
- "resolved": "https://registry.npmjs.org/buffers/-/buffers-0.1.1.tgz",
- "integrity": "sha1-skV5w77U1tOWru5tmorn9Ugqt7s="
- },
- "builtin-modules": {
- "version": "1.1.1",
- "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-1.1.1.tgz",
- "integrity": "sha1-Jw8HbFpywC9bZaR9+Uxf46J4iS8="
- },
- "builtin-status-codes": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/builtin-status-codes/-/builtin-status-codes-3.0.0.tgz",
- "integrity": "sha1-hZgoeOIbmOHGZCXgPQF0eI9Wnug="
- },
- "caller-path": {
- "version": "0.1.0",
- "resolved": "https://registry.npmjs.org/caller-path/-/caller-path-0.1.0.tgz",
- "integrity": "sha1-lAhe9jWB7NPaqSREqP6U6CV3dR8=",
- "requires": {
- "callsites": "0.2.0"
- }
- },
- "callsites": {
- "version": "0.2.0",
- "resolved": "https://registry.npmjs.org/callsites/-/callsites-0.2.0.tgz",
- "integrity": "sha1-r6uWJikQp/M8GaV3WCXGnzTjUMo="
- },
- "camelcase": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-3.0.0.tgz",
- "integrity": "sha1-MvxLn82vhF/N9+c7uXysImHwqwo="
- },
- "caniuse-api": {
- "version": "1.6.1",
- "resolved": "https://registry.npmjs.org/caniuse-api/-/caniuse-api-1.6.1.tgz",
- "integrity": "sha1-tTTnxzTE+B7F++isoq0kNUuWLGw=",
- "requires": {
- "browserslist": "1.7.7",
- "caniuse-db": "1.0.30000793",
- "lodash.memoize": "4.1.2",
- "lodash.uniq": "4.5.0"
- },
- "dependencies": {
- "browserslist": {
- "version": "1.7.7",
- "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-1.7.7.tgz",
- "integrity": "sha1-C9dnBCWL6CmyOYu1Dkti0aFmsLk=",
- "requires": {
- "caniuse-db": "1.0.30000793",
- "electron-to-chromium": "1.3.31"
- }
- }
- }
- },
- "caniuse-db": {
- "version": "1.0.30000793",
- "resolved": "https://registry.npmjs.org/caniuse-db/-/caniuse-db-1.0.30000793.tgz",
- "integrity": "sha1-PADGbkI6ehkHx92Wdpp4sq+opy4="
- },
- "caniuse-lite": {
- "version": "1.0.30000792",
- "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30000792.tgz",
- "integrity": "sha1-0M6pgfgRjzlhRxr7tDyaHlu/AzI="
- },
- "caseless": {
- "version": "0.12.0",
- "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz",
- "integrity": "sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw="
- },
- "center-align": {
- "version": "0.1.3",
- "resolved": "https://registry.npmjs.org/center-align/-/center-align-0.1.3.tgz",
- "integrity": "sha1-qg0yYptu6XIgBBHL1EYckHvCt60=",
- "requires": {
- "align-text": "0.1.4",
- "lazy-cache": "1.0.4"
- }
- },
- "chain-function": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/chain-function/-/chain-function-1.0.0.tgz",
- "integrity": "sha1-DUqzfn4Y6tC9xHuSB2QRjOWHM9w="
- },
- "chainsaw": {
- "version": "0.1.0",
- "resolved": "https://registry.npmjs.org/chainsaw/-/chainsaw-0.1.0.tgz",
- "integrity": "sha1-XqtQsor+WAdNDVgpE4iCi15fvJg=",
- "requires": {
- "traverse": "0.3.9"
- }
- },
- "chalk": {
- "version": "1.1.3",
- "resolved": "https://registry.npmjs.org/chalk/-/chalk-1.1.3.tgz",
- "integrity": "sha1-qBFcVeSnAv5NFQq9OHKCKn4J/Jg=",
- "requires": {
- "ansi-styles": "2.2.1",
- "escape-string-regexp": "1.0.5",
- "has-ansi": "2.0.0",
- "strip-ansi": "3.0.1",
- "supports-color": "2.0.0"
- }
- },
- "change-emitter": {
- "version": "0.1.6",
- "resolved": "https://registry.npmjs.org/change-emitter/-/change-emitter-0.1.6.tgz",
- "integrity": "sha1-6LL+PX8at9aaMhma/5HqaTFAlRU="
- },
- "chardet": {
- "version": "0.4.2",
- "resolved": "https://registry.npmjs.org/chardet/-/chardet-0.4.2.tgz",
- "integrity": "sha1-tUc7M9yXxCTl2Y3IfVXU2KKci/I="
- },
- "charenc": {
- "version": "0.0.2",
- "resolved": "https://registry.npmjs.org/charenc/-/charenc-0.0.2.tgz",
- "integrity": "sha1-wKHS86cJLgN3S/qD8UwPxXkKhmc="
- },
- "child-process": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/child-process/-/child-process-1.0.2.tgz",
- "integrity": "sha1-mJdNx+0e5MYin44wX6cxOmiFp/I="
- },
- "chokidar": {
- "version": "1.7.0",
- "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-1.7.0.tgz",
- "integrity": "sha1-eY5ol3gVHIB2tLNg5e3SjNortGg=",
- "requires": {
- "anymatch": "1.3.2",
- "async-each": "1.0.1",
- "fsevents": "1.1.3",
- "glob-parent": "2.0.0",
- "inherits": "2.0.3",
- "is-binary-path": "1.0.1",
- "is-glob": "2.0.1",
- "path-is-absolute": "1.0.1",
- "readdirp": "2.1.0"
- }
- },
- "cipher-base": {
- "version": "1.0.4",
- "resolved": "https://registry.npmjs.org/cipher-base/-/cipher-base-1.0.4.tgz",
- "integrity": "sha512-Kkht5ye6ZGmwv40uUDZztayT2ThLQGfnj/T71N/XzeZeo3nf8foyW7zGTsPYkEya3m5f3cAypH+qe7YOrM1U2Q==",
- "requires": {
- "inherits": "2.0.3",
- "safe-buffer": "5.1.1"
- }
- },
- "circular-json": {
- "version": "0.3.3",
- "resolved": "https://registry.npmjs.org/circular-json/-/circular-json-0.3.3.tgz",
- "integrity": "sha512-UZK3NBx2Mca+b5LsG7bY183pHWt5Y1xts4P3Pz7ENTwGVnJOUWbRb3ocjvX7hx9tq/yTAdclXm9sZ38gNuem4A=="
- },
- "clap": {
- "version": "1.2.3",
- "resolved": "https://registry.npmjs.org/clap/-/clap-1.2.3.tgz",
- "integrity": "sha512-4CoL/A3hf90V3VIEjeuhSvlGFEHKzOz+Wfc2IVZc+FaUgU0ZQafJTP49fvnULipOPcAfqhyI2duwQyns6xqjYA==",
- "requires": {
- "chalk": "1.1.3"
- }
- },
- "classnames": {
- "version": "2.2.5",
- "resolved": "https://registry.npmjs.org/classnames/-/classnames-2.2.5.tgz",
- "integrity": "sha1-+zgB1FNGdknvNgPH1hoCvRKb3m0="
- },
- "cli-cursor": {
- "version": "2.1.0",
- "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-2.1.0.tgz",
- "integrity": "sha1-s12sN2R5+sw+lHR9QdDQ9SOP/LU=",
- "requires": {
- "restore-cursor": "2.0.0"
- }
- },
- "cli-width": {
- "version": "2.2.0",
- "resolved": "https://registry.npmjs.org/cli-width/-/cli-width-2.2.0.tgz",
- "integrity": "sha1-/xnt6Kml5XkyQUewwR8PvLq+1jk="
- },
- "cliui": {
- "version": "3.2.0",
- "resolved": "https://registry.npmjs.org/cliui/-/cliui-3.2.0.tgz",
- "integrity": "sha1-EgYBU3qRbSmUD5NNo7SNWFo5IT0=",
- "requires": {
- "string-width": "1.0.2",
- "strip-ansi": "3.0.1",
- "wrap-ansi": "2.1.0"
- },
- "dependencies": {
- "is-fullwidth-code-point": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz",
- "integrity": "sha1-754xOG8DGn8NZDr4L95QxFfvAMs=",
- "requires": {
- "number-is-nan": "1.0.1"
- }
- },
- "string-width": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz",
- "integrity": "sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M=",
- "requires": {
- "code-point-at": "1.1.0",
- "is-fullwidth-code-point": "1.0.0",
- "strip-ansi": "3.0.1"
- }
- }
- }
- },
- "clone": {
- "version": "1.0.3",
- "resolved": "https://registry.npmjs.org/clone/-/clone-1.0.3.tgz",
- "integrity": "sha1-KY1+IjFmD0DAA8LtMUDezz9TCF8="
- },
- "co": {
- "version": "4.6.0",
- "resolved": "https://registry.npmjs.org/co/-/co-4.6.0.tgz",
- "integrity": "sha1-bqa989hTrlTMuOR7+gvz+QMfsYQ="
- },
- "coa": {
- "version": "1.0.4",
- "resolved": "https://registry.npmjs.org/coa/-/coa-1.0.4.tgz",
- "integrity": "sha1-qe8VNmDWqGqL3sAomlxoTSF0Mv0=",
- "requires": {
- "q": "1.5.1"
- }
- },
- "code-point-at": {
- "version": "1.1.0",
- "resolved": "https://registry.npmjs.org/code-point-at/-/code-point-at-1.1.0.tgz",
- "integrity": "sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c="
- },
- "color": {
- "version": "0.11.4",
- "resolved": "https://registry.npmjs.org/color/-/color-0.11.4.tgz",
- "integrity": "sha1-bXtcdPtl6EHNSHkq0e1eB7kE12Q=",
- "requires": {
- "clone": "1.0.3",
- "color-convert": "1.9.1",
- "color-string": "0.3.0"
- }
- },
- "color-convert": {
- "version": "1.9.1",
- "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.1.tgz",
- "integrity": "sha512-mjGanIiwQJskCC18rPR6OmrZ6fm2Lc7PeGFYwCmy5J34wC6F1PzdGL6xeMfmgicfYcNLGuVFA3WzXtIDCQSZxQ==",
- "requires": {
- "color-name": "1.1.3"
- }
- },
- "color-name": {
- "version": "1.1.3",
- "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz",
- "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU="
- },
- "color-string": {
- "version": "0.3.0",
- "resolved": "https://registry.npmjs.org/color-string/-/color-string-0.3.0.tgz",
- "integrity": "sha1-J9RvtnAlxcL6JZk7+/V55HhBuZE=",
- "requires": {
- "color-name": "1.1.3"
- }
- },
- "colormin": {
- "version": "1.1.2",
- "resolved": "https://registry.npmjs.org/colormin/-/colormin-1.1.2.tgz",
- "integrity": "sha1-6i90IKcrlogaOKrlnsEkpvcpgTM=",
- "requires": {
- "color": "0.11.4",
- "css-color-names": "0.0.4",
- "has": "1.0.1"
- }
- },
- "colors": {
- "version": "1.1.2",
- "resolved": "https://registry.npmjs.org/colors/-/colors-1.1.2.tgz",
- "integrity": "sha1-FopHAXVran9RoSzgyXv6KMCE7WM="
- },
- "combined-stream": {
- "version": "1.0.5",
- "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.5.tgz",
- "integrity": "sha1-k4NwpXtKUd6ix3wV1cX9+JUWQAk=",
- "requires": {
- "delayed-stream": "1.0.0"
- }
- },
- "commondir": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/commondir/-/commondir-1.0.1.tgz",
- "integrity": "sha1-3dgA2gxmEnOTzKWVDqloo6rxJTs="
- },
- "concat-map": {
- "version": "0.0.1",
- "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
- "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s="
- },
- "concat-stream": {
- "version": "1.6.0",
- "resolved": "https://registry.npmjs.org/concat-stream/-/concat-stream-1.6.0.tgz",
- "integrity": "sha1-CqxmL9Ur54lk1VMvaUeE5wEQrPc=",
- "requires": {
- "inherits": "2.0.3",
- "readable-stream": "2.3.3",
- "typedarray": "0.0.6"
- }
- },
- "console-browserify": {
- "version": "1.1.0",
- "resolved": "https://registry.npmjs.org/console-browserify/-/console-browserify-1.1.0.tgz",
- "integrity": "sha1-8CQcRXMKn8YyOyBtvzjtx0HQuxA=",
- "requires": {
- "date-now": "0.1.4"
- }
- },
- "constants-browserify": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/constants-browserify/-/constants-browserify-1.0.0.tgz",
- "integrity": "sha1-wguW2MYXdIqvHBYCF2DNJ/y4y3U="
- },
- "contains-path": {
- "version": "0.1.0",
- "resolved": "https://registry.npmjs.org/contains-path/-/contains-path-0.1.0.tgz",
- "integrity": "sha1-/ozxhP9mcLa67wGp1IYaXL7EEgo="
- },
- "convert-source-map": {
- "version": "1.5.1",
- "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.5.1.tgz",
- "integrity": "sha1-uCeAl7m8IpNl3lxiz1/K7YtVmeU="
- },
- "core-js": {
- "version": "2.5.3",
- "resolved": "https://registry.npmjs.org/core-js/-/core-js-2.5.3.tgz",
- "integrity": "sha1-isw4NFgk8W2DZbfJtCWRaOjtYD4="
- },
- "core-util-is": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz",
- "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac="
- },
- "create-ecdh": {
- "version": "4.0.0",
- "resolved": "https://registry.npmjs.org/create-ecdh/-/create-ecdh-4.0.0.tgz",
- "integrity": "sha1-iIxyNZbN92EvZJgjPuvXo1MBc30=",
- "requires": {
- "bn.js": "4.11.8",
- "elliptic": "6.4.0"
- }
- },
- "create-hash": {
- "version": "1.1.3",
- "resolved": "https://registry.npmjs.org/create-hash/-/create-hash-1.1.3.tgz",
- "integrity": "sha1-YGBCrIuSYnUPSDyt2rD1gZFy2P0=",
- "requires": {
- "cipher-base": "1.0.4",
- "inherits": "2.0.3",
- "ripemd160": "2.0.1",
- "sha.js": "2.4.10"
- }
- },
- "create-hmac": {
- "version": "1.1.6",
- "resolved": "https://registry.npmjs.org/create-hmac/-/create-hmac-1.1.6.tgz",
- "integrity": "sha1-rLniIaThe9sHbpBlfEK5PjcmzwY=",
- "requires": {
- "cipher-base": "1.0.4",
- "create-hash": "1.1.3",
- "inherits": "2.0.3",
- "ripemd160": "2.0.1",
- "safe-buffer": "5.1.1",
- "sha.js": "2.4.10"
- }
- },
- "cross-spawn": {
- "version": "5.1.0",
- "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-5.1.0.tgz",
- "integrity": "sha1-6L0O/uWPz/b4+UUQoKVUu/ojVEk=",
- "requires": {
- "lru-cache": "4.1.1",
- "shebang-command": "1.2.0",
- "which": "1.3.0"
- }
- },
- "crypt": {
- "version": "0.0.2",
- "resolved": "https://registry.npmjs.org/crypt/-/crypt-0.0.2.tgz",
- "integrity": "sha1-iNf/fsDfuG9xPch7u0LQRNPmxBs="
- },
- "cryptiles": {
- "version": "3.1.2",
- "resolved": "https://registry.npmjs.org/cryptiles/-/cryptiles-3.1.2.tgz",
- "integrity": "sha1-qJ+7Ig9c4l7FboxKqKT9e1sNKf4=",
- "requires": {
- "boom": "5.2.0"
- },
- "dependencies": {
- "boom": {
- "version": "5.2.0",
- "resolved": "https://registry.npmjs.org/boom/-/boom-5.2.0.tgz",
- "integrity": "sha512-Z5BTk6ZRe4tXXQlkqftmsAUANpXmuwlsF5Oov8ThoMbQRzdGTA1ngYRW160GexgOgjsFOKJz0LYhoNi+2AMBUw==",
- "requires": {
- "hoek": "4.2.0"
- }
- }
- }
- },
- "crypto-browserify": {
- "version": "3.12.0",
- "resolved": "https://registry.npmjs.org/crypto-browserify/-/crypto-browserify-3.12.0.tgz",
- "integrity": "sha512-fz4spIh+znjO2VjL+IdhEpRJ3YN6sMzITSBijk6FK2UvTqruSQW+/cCZTSNsMiZNvUeq0CqurF+dAbyiGOY6Wg==",
- "requires": {
- "browserify-cipher": "1.0.0",
- "browserify-sign": "4.0.4",
- "create-ecdh": "4.0.0",
- "create-hash": "1.1.3",
- "create-hmac": "1.1.6",
- "diffie-hellman": "5.0.2",
- "inherits": "2.0.3",
- "pbkdf2": "3.0.14",
- "public-encrypt": "4.0.0",
- "randombytes": "2.0.6",
- "randomfill": "1.0.3"
- }
- },
- "css-color-names": {
- "version": "0.0.4",
- "resolved": "https://registry.npmjs.org/css-color-names/-/css-color-names-0.0.4.tgz",
- "integrity": "sha1-gIrcLnnPhHOAabZGyyDsJ762KeA="
- },
- "css-loader": {
- "version": "0.28.9",
- "resolved": "https://registry.npmjs.org/css-loader/-/css-loader-0.28.9.tgz",
- "integrity": "sha512-r3dgelMm/mkPz5Y7m9SeiGE46i2VsEU/OYbez+1llfxtv8b2y5/b5StaeEvPK3S5tlNQI+tDW/xDIhKJoZgDtw==",
- "requires": {
- "babel-code-frame": "6.26.0",
- "css-selector-tokenizer": "0.7.0",
- "cssnano": "3.10.0",
- "icss-utils": "2.1.0",
- "loader-utils": "1.1.0",
- "lodash.camelcase": "4.3.0",
- "object-assign": "4.1.1",
- "postcss": "5.2.18",
- "postcss-modules-extract-imports": "1.2.0",
- "postcss-modules-local-by-default": "1.2.0",
- "postcss-modules-scope": "1.1.0",
- "postcss-modules-values": "1.3.0",
- "postcss-value-parser": "3.3.0",
- "source-list-map": "2.0.0"
- }
- },
- "css-selector-tokenizer": {
- "version": "0.7.0",
- "resolved": "https://registry.npmjs.org/css-selector-tokenizer/-/css-selector-tokenizer-0.7.0.tgz",
- "integrity": "sha1-5piEdK6MlTR3v15+/s/OzNnPTIY=",
- "requires": {
- "cssesc": "0.1.0",
- "fastparse": "1.1.1",
- "regexpu-core": "1.0.0"
- },
- "dependencies": {
- "regexpu-core": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/regexpu-core/-/regexpu-core-1.0.0.tgz",
- "integrity": "sha1-hqdj9Y7k18L2sQLkdkBQ3n7ZDGs=",
- "requires": {
- "regenerate": "1.3.3",
- "regjsgen": "0.2.0",
- "regjsparser": "0.1.5"
- }
- }
- }
- },
- "css-vendor": {
- "version": "0.3.8",
- "resolved": "https://registry.npmjs.org/css-vendor/-/css-vendor-0.3.8.tgz",
- "integrity": "sha1-ZCHP0wNM5mT+dnOXL9ARn8KJQfo=",
- "requires": {
- "is-in-browser": "1.1.3"
- }
- },
- "cssesc": {
- "version": "0.1.0",
- "resolved": "https://registry.npmjs.org/cssesc/-/cssesc-0.1.0.tgz",
- "integrity": "sha1-yBSQPkViM3GgR3tAEJqq++6t27Q="
- },
- "cssnano": {
- "version": "3.10.0",
- "resolved": "https://registry.npmjs.org/cssnano/-/cssnano-3.10.0.tgz",
- "integrity": "sha1-Tzj2zqK5sX+gFJDyPx3GjqZcHDg=",
- "requires": {
- "autoprefixer": "6.7.7",
- "decamelize": "1.2.0",
- "defined": "1.0.0",
- "has": "1.0.1",
- "object-assign": "4.1.1",
- "postcss": "5.2.18",
- "postcss-calc": "5.3.1",
- "postcss-colormin": "2.2.2",
- "postcss-convert-values": "2.6.1",
- "postcss-discard-comments": "2.0.4",
- "postcss-discard-duplicates": "2.1.0",
- "postcss-discard-empty": "2.1.0",
- "postcss-discard-overridden": "0.1.1",
- "postcss-discard-unused": "2.2.3",
- "postcss-filter-plugins": "2.0.2",
- "postcss-merge-idents": "2.1.7",
- "postcss-merge-longhand": "2.0.2",
- "postcss-merge-rules": "2.1.2",
- "postcss-minify-font-values": "1.0.5",
- "postcss-minify-gradients": "1.0.5",
- "postcss-minify-params": "1.2.2",
- "postcss-minify-selectors": "2.1.1",
- "postcss-normalize-charset": "1.1.1",
- "postcss-normalize-url": "3.0.8",
- "postcss-ordered-values": "2.2.3",
- "postcss-reduce-idents": "2.4.0",
- "postcss-reduce-initial": "1.0.1",
- "postcss-reduce-transforms": "1.0.4",
- "postcss-svgo": "2.1.6",
- "postcss-unique-selectors": "2.0.2",
- "postcss-value-parser": "3.3.0",
- "postcss-zindex": "2.2.0"
- }
- },
- "csso": {
- "version": "2.3.2",
- "resolved": "https://registry.npmjs.org/csso/-/csso-2.3.2.tgz",
- "integrity": "sha1-3dUsWHAz9J6Utx/FVWnyUuj/X4U=",
- "requires": {
- "clap": "1.2.3",
- "source-map": "0.5.7"
- }
- },
- "d": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/d/-/d-1.0.0.tgz",
- "integrity": "sha1-dUu1v+VUUdpppYuU1F9MWwRi1Y8=",
- "requires": {
- "es5-ext": "0.10.38"
- }
- },
- "d3-array": {
- "version": "1.2.1",
- "resolved": "https://registry.npmjs.org/d3-array/-/d3-array-1.2.1.tgz",
- "integrity": "sha512-CyINJQ0SOUHojDdFDH4JEM0552vCR1utGyLHegJHyYH0JyCpSeTPxi4OBqHMA2jJZq4NH782LtaJWBImqI/HBw=="
- },
- "d3-collection": {
- "version": "1.0.4",
- "resolved": "https://registry.npmjs.org/d3-collection/-/d3-collection-1.0.4.tgz",
- "integrity": "sha1-NC39EoN8kJdPM/HMCnha6lcNzcI="
- },
- "d3-color": {
- "version": "1.0.3",
- "resolved": "https://registry.npmjs.org/d3-color/-/d3-color-1.0.3.tgz",
- "integrity": "sha1-vHZD/KjlOoNH4vva/6I2eWtYUJs="
- },
- "d3-format": {
- "version": "1.2.2",
- "resolved": "https://registry.npmjs.org/d3-format/-/d3-format-1.2.2.tgz",
- "integrity": "sha512-zH9CfF/3C8zUI47nsiKfD0+AGDEuM8LwBIP7pBVpyR4l/sKkZqITmMtxRp04rwBrlshIZ17XeFAaovN3++wzkw=="
- },
- "d3-interpolate": {
- "version": "1.1.6",
- "resolved": "https://registry.npmjs.org/d3-interpolate/-/d3-interpolate-1.1.6.tgz",
- "integrity": "sha512-mOnv5a+pZzkNIHtw/V6I+w9Lqm9L5bG3OTXPM5A+QO0yyVMQ4W1uZhR+VOJmazaOZXri2ppbiZ5BUNWT0pFM9A==",
- "requires": {
- "d3-color": "1.0.3"
- }
- },
- "d3-path": {
- "version": "1.0.5",
- "resolved": "https://registry.npmjs.org/d3-path/-/d3-path-1.0.5.tgz",
- "integrity": "sha1-JB6xhJvZ6egCHA0KeZ+KDo5EF2Q="
- },
- "d3-scale": {
- "version": "1.0.6",
- "resolved": "https://registry.npmjs.org/d3-scale/-/d3-scale-1.0.6.tgz",
- "integrity": "sha1-vOGdqA06DPQiyVQ64zIghiILNO0=",
- "requires": {
- "d3-array": "1.2.1",
- "d3-collection": "1.0.4",
- "d3-color": "1.0.3",
- "d3-format": "1.2.2",
- "d3-interpolate": "1.1.6",
- "d3-time": "1.0.8",
- "d3-time-format": "2.1.1"
- }
- },
- "d3-shape": {
- "version": "1.2.0",
- "resolved": "https://registry.npmjs.org/d3-shape/-/d3-shape-1.2.0.tgz",
- "integrity": "sha1-RdAVOPBkuv0F6j1tLLdI/YxB93c=",
- "requires": {
- "d3-path": "1.0.5"
- }
- },
- "d3-time": {
- "version": "1.0.8",
- "resolved": "https://registry.npmjs.org/d3-time/-/d3-time-1.0.8.tgz",
- "integrity": "sha512-YRZkNhphZh3KcnBfitvF3c6E0JOFGikHZ4YqD+Lzv83ZHn1/u6yGenRU1m+KAk9J1GnZMnKcrtfvSktlA1DXNQ=="
- },
- "d3-time-format": {
- "version": "2.1.1",
- "resolved": "https://registry.npmjs.org/d3-time-format/-/d3-time-format-2.1.1.tgz",
- "integrity": "sha512-8kAkymq2WMfzW7e+s/IUNAtN/y3gZXGRrdGfo6R8NKPAA85UBTxZg5E61bR6nLwjPjj4d3zywSQe1CkYLPFyrw==",
- "requires": {
- "d3-time": "1.0.8"
- }
- },
- "damerau-levenshtein": {
- "version": "1.0.4",
- "resolved": "https://registry.npmjs.org/damerau-levenshtein/-/damerau-levenshtein-1.0.4.tgz",
- "integrity": "sha1-AxkcQyy27qFou3fzpV/9zLiXhRQ="
- },
- "dashdash": {
- "version": "1.14.1",
- "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz",
- "integrity": "sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA=",
- "requires": {
- "assert-plus": "1.0.0"
- }
- },
- "date-now": {
- "version": "0.1.4",
- "resolved": "https://registry.npmjs.org/date-now/-/date-now-0.1.4.tgz",
- "integrity": "sha1-6vQ5/U1ISK105cx9vvIAZyueNFs="
- },
- "debug": {
- "version": "2.6.9",
- "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz",
- "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==",
- "requires": {
- "ms": "2.0.0"
- }
- },
- "decamelize": {
- "version": "1.2.0",
- "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz",
- "integrity": "sha1-9lNNFRSCabIDUue+4m9QH5oZEpA="
- },
- "decompress-response": {
- "version": "3.3.0",
- "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-3.3.0.tgz",
- "integrity": "sha1-gKTdMjdIOEv6JICDYirt7Jgq3/M=",
- "requires": {
- "mimic-response": "1.0.0"
- }
- },
- "deep-is": {
- "version": "0.1.3",
- "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.3.tgz",
- "integrity": "sha1-s2nW+128E+7PUk+RsHD+7cNXzzQ="
- },
- "deepmerge": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-2.0.1.tgz",
- "integrity": "sha512-VIPwiMJqJ13ZQfaCsIFnp5Me9tnjURiaIFxfz7EH0Ci0dTSQpZtSLrqOicXqEd/z2r+z+Klk9GzmnRsgpgbOsQ=="
- },
- "define-properties": {
- "version": "1.1.2",
- "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.2.tgz",
- "integrity": "sha1-g6c/L+pWmJj7c3GTyPhzyvbUXJQ=",
- "requires": {
- "foreach": "2.0.5",
- "object-keys": "1.0.11"
- }
- },
- "defined": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/defined/-/defined-1.0.0.tgz",
- "integrity": "sha1-yY2bzvdWdBiOEQlpFRGZ45sfppM="
- },
- "del": {
- "version": "2.2.2",
- "resolved": "https://registry.npmjs.org/del/-/del-2.2.2.tgz",
- "integrity": "sha1-wSyYHQZ4RshLyvhiz/kw2Qf/0ag=",
- "requires": {
- "globby": "5.0.0",
- "is-path-cwd": "1.0.0",
- "is-path-in-cwd": "1.0.0",
- "object-assign": "4.1.1",
- "pify": "2.3.0",
- "pinkie-promise": "2.0.1",
- "rimraf": "2.6.2"
- },
- "dependencies": {
- "pify": {
- "version": "2.3.0",
- "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz",
- "integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw="
- }
- }
- },
- "delayed-stream": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
- "integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk="
- },
- "des.js": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/des.js/-/des.js-1.0.0.tgz",
- "integrity": "sha1-wHTS4qpqipoH29YfmhXCzYPsjsw=",
- "requires": {
- "inherits": "2.0.3",
- "minimalistic-assert": "1.0.0"
- }
- },
- "detect-indent": {
- "version": "4.0.0",
- "resolved": "https://registry.npmjs.org/detect-indent/-/detect-indent-4.0.0.tgz",
- "integrity": "sha1-920GQ1LN9Docts5hnE7jqUdd4gg=",
- "requires": {
- "repeating": "2.0.1"
- }
- },
- "diffie-hellman": {
- "version": "5.0.2",
- "resolved": "https://registry.npmjs.org/diffie-hellman/-/diffie-hellman-5.0.2.tgz",
- "integrity": "sha1-tYNXOScM/ias9jIJn97SoH8gnl4=",
- "requires": {
- "bn.js": "4.11.8",
- "miller-rabin": "4.0.1",
- "randombytes": "2.0.6"
- }
- },
- "doctrine": {
- "version": "2.1.0",
- "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz",
- "integrity": "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==",
- "requires": {
- "esutils": "2.0.2"
- }
- },
- "dom-helpers": {
- "version": "3.3.1",
- "resolved": "https://registry.npmjs.org/dom-helpers/-/dom-helpers-3.3.1.tgz",
- "integrity": "sha512-2Sm+JaYn74OiTM2wHvxJOo3roiq/h25Yi69Fqk269cNUwIXsCvATB6CRSFC9Am/20G2b28hGv/+7NiWydIrPvg=="
- },
- "dom-walk": {
- "version": "0.1.1",
- "resolved": "https://registry.npmjs.org/dom-walk/-/dom-walk-0.1.1.tgz",
- "integrity": "sha1-ZyIm3HTI95mtNTB9+TaroRrNYBg="
- },
- "domain-browser": {
- "version": "1.1.7",
- "resolved": "https://registry.npmjs.org/domain-browser/-/domain-browser-1.1.7.tgz",
- "integrity": "sha1-hnqksJP6oF8d4IwG9NeyH9+GmLw="
- },
- "duplexer3": {
- "version": "0.1.4",
- "resolved": "https://registry.npmjs.org/duplexer3/-/duplexer3-0.1.4.tgz",
- "integrity": "sha1-7gHdHKwO08vH/b6jfcCo8c4ALOI="
- },
- "ecc-jsbn": {
- "version": "0.1.1",
- "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.1.tgz",
- "integrity": "sha1-D8c6ntXw1Tw4GTOYUj735UN3dQU=",
- "optional": true,
- "requires": {
- "jsbn": "0.1.1"
- }
- },
- "electron-to-chromium": {
- "version": "1.3.31",
- "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.3.31.tgz",
- "integrity": "sha512-XE4CLbswkZgZFn34cKFy1xaX+F5LHxeDLjY1+rsK9asDzknhbrd9g/n/01/acbU25KTsUSiLKwvlLyA+6XLUOA=="
- },
- "elliptic": {
- "version": "6.4.0",
- "resolved": "https://registry.npmjs.org/elliptic/-/elliptic-6.4.0.tgz",
- "integrity": "sha1-ysmvh2LIWDYYcAPI3+GT5eLq5d8=",
- "requires": {
- "bn.js": "4.11.8",
- "brorand": "1.1.0",
- "hash.js": "1.1.3",
- "hmac-drbg": "1.0.1",
- "inherits": "2.0.3",
- "minimalistic-assert": "1.0.0",
- "minimalistic-crypto-utils": "1.0.1"
- }
- },
- "emoji-regex": {
- "version": "6.5.1",
- "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-6.5.1.tgz",
- "integrity": "sha512-PAHp6TxrCy7MGMFidro8uikr+zlJJKJ/Q6mm2ExZ7HwkyR9lSVFfE3kt36qcwa24BQL7y0G9axycGjK1A/0uNQ=="
- },
- "emojis-list": {
- "version": "2.1.0",
- "resolved": "https://registry.npmjs.org/emojis-list/-/emojis-list-2.1.0.tgz",
- "integrity": "sha1-TapNnbAPmBmIDHn6RXrlsJof04k="
- },
- "encoding": {
- "version": "0.1.12",
- "resolved": "https://registry.npmjs.org/encoding/-/encoding-0.1.12.tgz",
- "integrity": "sha1-U4tm8+5izRq1HsMjgp0flIDHS+s=",
- "requires": {
- "iconv-lite": "0.4.19"
- }
- },
- "enhanced-resolve": {
- "version": "3.4.1",
- "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-3.4.1.tgz",
- "integrity": "sha1-BCHjOf1xQZs9oT0Smzl5BAIwR24=",
- "requires": {
- "graceful-fs": "4.1.11",
- "memory-fs": "0.4.1",
- "object-assign": "4.1.1",
- "tapable": "0.2.8"
- }
- },
- "errno": {
- "version": "0.1.6",
- "resolved": "https://registry.npmjs.org/errno/-/errno-0.1.6.tgz",
- "integrity": "sha512-IsORQDpaaSwcDP4ZZnHxgE85werpo34VYn1Ud3mq+eUsF593faR8oCZNXrROVkpFu2TsbrNhHin0aUrTsQ9vNw==",
- "requires": {
- "prr": "1.0.1"
- }
- },
- "error-ex": {
- "version": "1.3.1",
- "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.1.tgz",
- "integrity": "sha1-+FWobOYa3E6GIcPNoh56dhLDqNw=",
- "requires": {
- "is-arrayish": "0.2.1"
- }
- },
- "es-abstract": {
- "version": "1.10.0",
- "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.10.0.tgz",
- "integrity": "sha512-/uh/DhdqIOSkAWifU+8nG78vlQxdLckUdI/sPgy0VhuXi2qJ7T8czBmqIYtLQVpCIFYafChnsRsB5pyb1JdmCQ==",
- "requires": {
- "es-to-primitive": "1.1.1",
- "function-bind": "1.1.1",
- "has": "1.0.1",
- "is-callable": "1.1.3",
- "is-regex": "1.0.4"
- }
- },
- "es-to-primitive": {
- "version": "1.1.1",
- "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.1.1.tgz",
- "integrity": "sha1-RTVSSKiJeQNLZ5Lhm7gfK3l13Q0=",
- "requires": {
- "is-callable": "1.1.3",
- "is-date-object": "1.0.1",
- "is-symbol": "1.0.1"
- }
- },
- "es5-ext": {
- "version": "0.10.38",
- "resolved": "https://registry.npmjs.org/es5-ext/-/es5-ext-0.10.38.tgz",
- "integrity": "sha512-jCMyePo7AXbUESwbl8Qi01VSH2piY9s/a3rSU/5w/MlTIx8HPL1xn2InGN8ejt/xulcJgnTO7vqNtOAxzYd2Kg==",
- "requires": {
- "es6-iterator": "2.0.3",
- "es6-symbol": "3.1.1"
- }
- },
- "es6-iterator": {
- "version": "2.0.3",
- "resolved": "https://registry.npmjs.org/es6-iterator/-/es6-iterator-2.0.3.tgz",
- "integrity": "sha1-p96IkUGgWpSwhUQDstCg+/qY87c=",
- "requires": {
- "d": "1.0.0",
- "es5-ext": "0.10.38",
- "es6-symbol": "3.1.1"
- }
- },
- "es6-map": {
- "version": "0.1.5",
- "resolved": "https://registry.npmjs.org/es6-map/-/es6-map-0.1.5.tgz",
- "integrity": "sha1-kTbgUD3MBqMBaQ8LsU/042TpSfA=",
- "requires": {
- "d": "1.0.0",
- "es5-ext": "0.10.38",
- "es6-iterator": "2.0.3",
- "es6-set": "0.1.5",
- "es6-symbol": "3.1.1",
- "event-emitter": "0.3.5"
- }
- },
- "es6-set": {
- "version": "0.1.5",
- "resolved": "https://registry.npmjs.org/es6-set/-/es6-set-0.1.5.tgz",
- "integrity": "sha1-0rPsXU2ADO2BjbU40ol02wpzzLE=",
- "requires": {
- "d": "1.0.0",
- "es5-ext": "0.10.38",
- "es6-iterator": "2.0.3",
- "es6-symbol": "3.1.1",
- "event-emitter": "0.3.5"
- }
- },
- "es6-symbol": {
- "version": "3.1.1",
- "resolved": "https://registry.npmjs.org/es6-symbol/-/es6-symbol-3.1.1.tgz",
- "integrity": "sha1-vwDvT9q2uhtG7Le2KbTH7VcVzHc=",
- "requires": {
- "d": "1.0.0",
- "es5-ext": "0.10.38"
- }
- },
- "es6-weak-map": {
- "version": "2.0.2",
- "resolved": "https://registry.npmjs.org/es6-weak-map/-/es6-weak-map-2.0.2.tgz",
- "integrity": "sha1-XjqzIlH/0VOKH45f+hNXdy+S2W8=",
- "requires": {
- "d": "1.0.0",
- "es5-ext": "0.10.38",
- "es6-iterator": "2.0.3",
- "es6-symbol": "3.1.1"
- }
- },
- "escape-string-regexp": {
- "version": "1.0.5",
- "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz",
- "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ="
- },
- "escope": {
- "version": "3.6.0",
- "resolved": "https://registry.npmjs.org/escope/-/escope-3.6.0.tgz",
- "integrity": "sha1-4Bl16BJ4GhY6ba392AOY3GTIicM=",
- "requires": {
- "es6-map": "0.1.5",
- "es6-weak-map": "2.0.2",
- "esrecurse": "4.2.0",
- "estraverse": "4.2.0"
- }
- },
- "eslint": {
- "version": "4.16.0",
- "resolved": "https://registry.npmjs.org/eslint/-/eslint-4.16.0.tgz",
- "integrity": "sha512-YVXV4bDhNoHHcv0qzU4Meof7/P26B4EuaktMi5L1Tnt52Aov85KmYA8c5D+xyZr/BkhvwUqr011jDSD/QTULxg==",
- "requires": {
- "ajv": "5.5.2",
- "babel-code-frame": "6.26.0",
- "chalk": "2.3.0",
- "concat-stream": "1.6.0",
- "cross-spawn": "5.1.0",
- "debug": "3.1.0",
- "doctrine": "2.1.0",
- "eslint-scope": "3.7.1",
- "eslint-visitor-keys": "1.0.0",
- "espree": "3.5.2",
- "esquery": "1.0.0",
- "esutils": "2.0.2",
- "file-entry-cache": "2.0.0",
- "functional-red-black-tree": "1.0.1",
- "glob": "7.1.2",
- "globals": "11.1.0",
- "ignore": "3.3.7",
- "imurmurhash": "0.1.4",
- "inquirer": "3.3.0",
- "is-resolvable": "1.1.0",
- "js-yaml": "3.10.0",
- "json-stable-stringify-without-jsonify": "1.0.1",
- "levn": "0.3.0",
- "lodash": "4.17.4",
- "minimatch": "3.0.4",
- "mkdirp": "0.5.1",
- "natural-compare": "1.4.0",
- "optionator": "0.8.2",
- "path-is-inside": "1.0.2",
- "pluralize": "7.0.0",
- "progress": "2.0.0",
- "require-uncached": "1.0.3",
- "semver": "5.5.0",
- "strip-ansi": "4.0.0",
- "strip-json-comments": "2.0.1",
- "table": "4.0.2",
- "text-table": "0.2.0"
- },
- "dependencies": {
- "ansi-regex": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz",
- "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg="
- },
- "ansi-styles": {
- "version": "3.2.0",
- "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.0.tgz",
- "integrity": "sha512-NnSOmMEYtVR2JVMIGTzynRkkaxtiq1xnFBcdQD/DnNCYPoEPsVJhM98BDyaoNOQIi7p4okdi3E27eN7GQbsUug==",
- "requires": {
- "color-convert": "1.9.1"
- }
- },
- "chalk": {
- "version": "2.3.0",
- "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.3.0.tgz",
- "integrity": "sha512-Az5zJR2CBujap2rqXGaJKaPHyJ0IrUimvYNX+ncCy8PJP4ltOGTrHUIo097ZaL2zMeKYpiCdqDvS6zdrTFok3Q==",
- "requires": {
- "ansi-styles": "3.2.0",
- "escape-string-regexp": "1.0.5",
- "supports-color": "4.5.0"
- }
- },
- "debug": {
- "version": "3.1.0",
- "resolved": "https://registry.npmjs.org/debug/-/debug-3.1.0.tgz",
- "integrity": "sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g==",
- "requires": {
- "ms": "2.0.0"
- }
- },
- "esprima": {
- "version": "4.0.0",
- "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.0.tgz",
- "integrity": "sha512-oftTcaMu/EGrEIu904mWteKIv8vMuOgGYo7EhVJJN00R/EED9DCua/xxHRdYnKtcECzVg7xOWhflvJMnqcFZjw=="
- },
- "globals": {
- "version": "11.1.0",
- "resolved": "https://registry.npmjs.org/globals/-/globals-11.1.0.tgz",
- "integrity": "sha512-uEuWt9mqTlPDwSqi+sHjD4nWU/1N+q0fiWI9T1mZpD2UENqX20CFD5T/ziLZvztPaBKl7ZylUi1q6Qfm7E2CiQ=="
- },
- "js-yaml": {
- "version": "3.10.0",
- "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.10.0.tgz",
- "integrity": "sha512-O2v52ffjLa9VeM43J4XocZE//WT9N0IiwDa3KSHH7Tu8CtH+1qM8SIZvnsTh6v+4yFy5KUY3BHUVwjpfAWsjIA==",
- "requires": {
- "argparse": "1.0.9",
- "esprima": "4.0.0"
- }
- },
- "strip-ansi": {
- "version": "4.0.0",
- "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz",
- "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=",
- "requires": {
- "ansi-regex": "3.0.0"
- }
- },
- "supports-color": {
- "version": "4.5.0",
- "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-4.5.0.tgz",
- "integrity": "sha1-vnoN5ITexcXN34s9WRJQRJEvY1s=",
- "requires": {
- "has-flag": "2.0.0"
- }
- }
- }
- },
- "eslint-config-airbnb": {
- "version": "16.1.0",
- "resolved": "https://registry.npmjs.org/eslint-config-airbnb/-/eslint-config-airbnb-16.1.0.tgz",
- "integrity": "sha512-zLyOhVWhzB/jwbz7IPSbkUuj7X2ox4PHXTcZkEmDqTvd0baJmJyuxlFPDlZOE/Y5bC+HQRaEkT3FoHo9wIdRiw==",
- "requires": {
- "eslint-config-airbnb-base": "12.1.0"
- }
- },
- "eslint-config-airbnb-base": {
- "version": "12.1.0",
- "resolved": "https://registry.npmjs.org/eslint-config-airbnb-base/-/eslint-config-airbnb-base-12.1.0.tgz",
- "integrity": "sha512-/vjm0Px5ZCpmJqnjIzcFb9TKZrKWz0gnuG/7Gfkt0Db1ELJR51xkZth+t14rYdqWgX836XbuxtArbIHlVhbLBA==",
- "requires": {
- "eslint-restricted-globals": "0.1.1"
- }
- },
- "eslint-import-resolver-node": {
- "version": "0.3.2",
- "resolved": "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.2.tgz",
- "integrity": "sha512-sfmTqJfPSizWu4aymbPr4Iidp5yKm8yDkHp+Ir3YiTHiiDfxh69mOUsmiqW6RZ9zRXFaF64GtYmN7e+8GHBv6Q==",
- "requires": {
- "debug": "2.6.9",
- "resolve": "1.5.0"
- }
- },
- "eslint-loader": {
- "version": "1.9.0",
- "resolved": "https://registry.npmjs.org/eslint-loader/-/eslint-loader-1.9.0.tgz",
- "integrity": "sha512-40aN976qSNPyb9ejTqjEthZITpls1SVKtwguahmH1dzGCwQU/vySE+xX33VZmD8csU0ahVNCtFlsPgKqRBiqgg==",
- "requires": {
- "loader-fs-cache": "1.0.1",
- "loader-utils": "1.1.0",
- "object-assign": "4.1.1",
- "object-hash": "1.2.0",
- "rimraf": "2.6.2"
- }
- },
- "eslint-module-utils": {
- "version": "2.1.1",
- "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.1.1.tgz",
- "integrity": "sha512-jDI/X5l/6D1rRD/3T43q8Qgbls2nq5km5KSqiwlyUbGo5+04fXhMKdCPhjwbqAa6HXWaMxj8Q4hQDIh7IadJQw==",
- "requires": {
- "debug": "2.6.9",
- "pkg-dir": "1.0.0"
- },
- "dependencies": {
- "find-up": {
- "version": "1.1.2",
- "resolved": "https://registry.npmjs.org/find-up/-/find-up-1.1.2.tgz",
- "integrity": "sha1-ay6YIrGizgpgq2TWEOzK1TyyTQ8=",
- "requires": {
- "path-exists": "2.1.0",
- "pinkie-promise": "2.0.1"
- }
- },
- "path-exists": {
- "version": "2.1.0",
- "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-2.1.0.tgz",
- "integrity": "sha1-D+tsZPD8UY2adU3V77YscCJ2H0s=",
- "requires": {
- "pinkie-promise": "2.0.1"
- }
- },
- "pkg-dir": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-1.0.0.tgz",
- "integrity": "sha1-ektQio1bstYp1EcFb/TpyTFM89Q=",
- "requires": {
- "find-up": "1.1.2"
- }
- }
- }
- },
- "eslint-plugin-flowtype": {
- "version": "2.41.0",
- "resolved": "https://registry.npmjs.org/eslint-plugin-flowtype/-/eslint-plugin-flowtype-2.41.0.tgz",
- "integrity": "sha512-M5X6qu/zvvXQ7flXp9plyBRlNRMQGNl3c+kQmox+m/jpnCZt0txgauxcrBKAVa9LKE/hBnsItJ9BojdmkefAkA==",
- "requires": {
- "lodash": "4.17.4"
- }
- },
- "eslint-plugin-import": {
- "version": "2.8.0",
- "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.8.0.tgz",
- "integrity": "sha512-Rf7dfKJxZ16QuTgVv1OYNxkZcsu/hULFnC+e+w0Gzi6jMC3guQoWQgxYxc54IDRinlb6/0v5z/PxxIKmVctN+g==",
- "requires": {
- "builtin-modules": "1.1.1",
- "contains-path": "0.1.0",
- "debug": "2.6.9",
- "doctrine": "1.5.0",
- "eslint-import-resolver-node": "0.3.2",
- "eslint-module-utils": "2.1.1",
- "has": "1.0.1",
- "lodash.cond": "4.5.2",
- "minimatch": "3.0.4",
- "read-pkg-up": "2.0.0"
- },
- "dependencies": {
- "doctrine": {
- "version": "1.5.0",
- "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-1.5.0.tgz",
- "integrity": "sha1-N53Ocw9hZvds76TmcHoVmwLFpvo=",
- "requires": {
- "esutils": "2.0.2",
- "isarray": "1.0.0"
- }
- }
- }
- },
- "eslint-plugin-jsx-a11y": {
- "version": "6.0.3",
- "resolved": "https://registry.npmjs.org/eslint-plugin-jsx-a11y/-/eslint-plugin-jsx-a11y-6.0.3.tgz",
- "integrity": "sha1-VFg9GuRCSDFi4EDhPMMYZUZRAOU=",
- "requires": {
- "aria-query": "0.7.0",
- "array-includes": "3.0.3",
- "ast-types-flow": "0.0.7",
- "axobject-query": "0.1.0",
- "damerau-levenshtein": "1.0.4",
- "emoji-regex": "6.5.1",
- "jsx-ast-utils": "2.0.1"
- }
- },
- "eslint-plugin-react": {
- "version": "7.5.1",
- "resolved": "https://registry.npmjs.org/eslint-plugin-react/-/eslint-plugin-react-7.5.1.tgz",
- "integrity": "sha512-YGSjB9Qu6QbVTroUZi66pYky3DfoIPLdHQ/wmrBGyBRnwxQsBXAov9j2rpXt/55i8nyMv6IRWJv2s4d4YnduzQ==",
- "requires": {
- "doctrine": "2.1.0",
- "has": "1.0.1",
- "jsx-ast-utils": "2.0.1",
- "prop-types": "15.6.0"
- }
- },
- "eslint-restricted-globals": {
- "version": "0.1.1",
- "resolved": "https://registry.npmjs.org/eslint-restricted-globals/-/eslint-restricted-globals-0.1.1.tgz",
- "integrity": "sha1-NfDVy8ZMLj7WLpO0saevBbp+1Nc="
- },
- "eslint-scope": {
- "version": "3.7.1",
- "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-3.7.1.tgz",
- "integrity": "sha1-PWPD7f2gLgbgGkUq2IyqzHzctug=",
- "requires": {
- "esrecurse": "4.2.0",
- "estraverse": "4.2.0"
- }
- },
- "eslint-visitor-keys": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.0.0.tgz",
- "integrity": "sha512-qzm/XxIbxm/FHyH341ZrbnMUpe+5Bocte9xkmFMzPMjRaZMcXww+MpBptFvtU+79L362nqiLhekCxCxDPaUMBQ=="
- },
- "espree": {
- "version": "3.5.2",
- "resolved": "https://registry.npmjs.org/espree/-/espree-3.5.2.tgz",
- "integrity": "sha512-sadKeYwaR/aJ3stC2CdvgXu1T16TdYN+qwCpcWbMnGJ8s0zNWemzrvb2GbD4OhmJ/fwpJjudThAlLobGbWZbCQ==",
- "requires": {
- "acorn": "5.3.0",
- "acorn-jsx": "3.0.1"
- }
- },
- "esprima": {
- "version": "2.7.3",
- "resolved": "https://registry.npmjs.org/esprima/-/esprima-2.7.3.tgz",
- "integrity": "sha1-luO3DVd59q1JzQMmc9HDEnZ7pYE="
- },
- "esquery": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.0.0.tgz",
- "integrity": "sha1-z7qLV9f7qT8XKYqKAGoEzaE9gPo=",
- "requires": {
- "estraverse": "4.2.0"
- }
- },
- "esrecurse": {
- "version": "4.2.0",
- "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.2.0.tgz",
- "integrity": "sha1-+pVo2Y04I/mkHZHpAtyrnqblsWM=",
- "requires": {
- "estraverse": "4.2.0",
- "object-assign": "4.1.1"
- }
- },
- "estraverse": {
- "version": "4.2.0",
- "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.2.0.tgz",
- "integrity": "sha1-De4/7TH81GlhjOc0IJn8GvoL2xM="
- },
- "esutils": {
- "version": "2.0.2",
- "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.2.tgz",
- "integrity": "sha1-Cr9PHKpbyx96nYrMbepPqqBLrJs="
- },
- "event-emitter": {
- "version": "0.3.5",
- "resolved": "https://registry.npmjs.org/event-emitter/-/event-emitter-0.3.5.tgz",
- "integrity": "sha1-34xp7vFkeSPHFXuc6DhAYQsCzDk=",
- "requires": {
- "d": "1.0.0",
- "es5-ext": "0.10.38"
- }
- },
- "events": {
- "version": "1.1.1",
- "resolved": "https://registry.npmjs.org/events/-/events-1.1.1.tgz",
- "integrity": "sha1-nr23Y1rQmccNzEwqH1AEKI6L2SQ="
- },
- "evp_bytestokey": {
- "version": "1.0.3",
- "resolved": "https://registry.npmjs.org/evp_bytestokey/-/evp_bytestokey-1.0.3.tgz",
- "integrity": "sha512-/f2Go4TognH/KvCISP7OUsHn85hT9nUkxxA9BEWxFn+Oj9o8ZNLm/40hdlgSLyuOimsrTKLUMEorQexp/aPQeA==",
- "requires": {
- "md5.js": "1.3.4",
- "safe-buffer": "5.1.1"
- }
- },
- "execa": {
- "version": "0.7.0",
- "resolved": "https://registry.npmjs.org/execa/-/execa-0.7.0.tgz",
- "integrity": "sha1-lEvs00zEHuMqY6n68nrVpl/Fl3c=",
- "requires": {
- "cross-spawn": "5.1.0",
- "get-stream": "3.0.0",
- "is-stream": "1.1.0",
- "npm-run-path": "2.0.2",
- "p-finally": "1.0.0",
- "signal-exit": "3.0.2",
- "strip-eof": "1.0.0"
- }
- },
- "expand-brackets": {
- "version": "0.1.5",
- "resolved": "https://registry.npmjs.org/expand-brackets/-/expand-brackets-0.1.5.tgz",
- "integrity": "sha1-3wcoTjQqgHzXM6xa9yQR5YHRF3s=",
- "requires": {
- "is-posix-bracket": "0.1.1"
- }
- },
- "expand-range": {
- "version": "1.8.2",
- "resolved": "https://registry.npmjs.org/expand-range/-/expand-range-1.8.2.tgz",
- "integrity": "sha1-opnv/TNf4nIeuujiV+x5ZE/IUzc=",
- "requires": {
- "fill-range": "2.2.3"
- }
- },
- "extend": {
- "version": "3.0.1",
- "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.1.tgz",
- "integrity": "sha1-p1Xqe8Gt/MWjHOfnYtuq3F5jZEQ="
- },
- "external-editor": {
- "version": "2.1.0",
- "resolved": "https://registry.npmjs.org/external-editor/-/external-editor-2.1.0.tgz",
- "integrity": "sha512-E44iT5QVOUJBKij4IIV3uvxuNlbKS38Tw1HiupxEIHPv9qtC2PrDYohbXV5U+1jnfIXttny8gUhj+oZvflFlzA==",
- "requires": {
- "chardet": "0.4.2",
- "iconv-lite": "0.4.19",
- "tmp": "0.0.33"
- }
- },
- "extglob": {
- "version": "0.3.2",
- "resolved": "https://registry.npmjs.org/extglob/-/extglob-0.3.2.tgz",
- "integrity": "sha1-Lhj/PS9JqydlzskCPwEdqo2DSaE=",
- "requires": {
- "is-extglob": "1.0.0"
- }
- },
- "extsprintf": {
- "version": "1.3.0",
- "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz",
- "integrity": "sha1-lpGEQOMEGnpBT4xS48V06zw+HgU="
- },
- "fast-deep-equal": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-1.0.0.tgz",
- "integrity": "sha1-liVqO8l1WV6zbYLpkp0GDYk0Of8="
- },
- "fast-json-stable-stringify": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.0.0.tgz",
- "integrity": "sha1-1RQsDK7msRifh9OnYREGT4bIu/I="
- },
- "fast-levenshtein": {
- "version": "2.0.6",
- "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz",
- "integrity": "sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc="
- },
- "fastparse": {
- "version": "1.1.1",
- "resolved": "https://registry.npmjs.org/fastparse/-/fastparse-1.1.1.tgz",
- "integrity": "sha1-0eJkOzipTXWDtHkGDmxK/8lAcfg="
- },
- "fbjs": {
- "version": "0.8.16",
- "resolved": "https://registry.npmjs.org/fbjs/-/fbjs-0.8.16.tgz",
- "integrity": "sha1-XmdDL1UNxBtXK/VYR7ispk5TN9s=",
- "requires": {
- "core-js": "1.2.7",
- "isomorphic-fetch": "2.2.1",
- "loose-envify": "1.3.1",
- "object-assign": "4.1.1",
- "promise": "7.3.1",
- "setimmediate": "1.0.5",
- "ua-parser-js": "0.7.17"
- },
- "dependencies": {
- "core-js": {
- "version": "1.2.7",
- "resolved": "https://registry.npmjs.org/core-js/-/core-js-1.2.7.tgz",
- "integrity": "sha1-ZSKUwUZR2yj6k70tX/KYOk8IxjY="
- }
- }
- },
- "figures": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/figures/-/figures-2.0.0.tgz",
- "integrity": "sha1-OrGi0qYsi/tDGgyUy3l6L84nyWI=",
- "requires": {
- "escape-string-regexp": "1.0.5"
- }
- },
- "file-entry-cache": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-2.0.0.tgz",
- "integrity": "sha1-w5KZDD5oR4PYOLjISkXYoEhFg2E=",
- "requires": {
- "flat-cache": "1.3.0",
- "object-assign": "4.1.1"
- }
- },
- "file-loader": {
- "version": "1.1.6",
- "resolved": "https://registry.npmjs.org/file-loader/-/file-loader-1.1.6.tgz",
- "integrity": "sha512-873ztuL+/hfvXbLDJ262PGO6XjERnybJu2gW1/5j8HUfxSiFJI9Hj/DhZ50ZGRUxBvuNiazb/cM2rh9pqrxP6Q==",
- "requires": {
- "loader-utils": "1.1.0",
- "schema-utils": "0.3.0"
- }
- },
- "filename-regex": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/filename-regex/-/filename-regex-2.0.1.tgz",
- "integrity": "sha1-wcS5vuPglyXdsQa3XB4wH+LxiyY="
- },
- "fill-range": {
- "version": "2.2.3",
- "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-2.2.3.tgz",
- "integrity": "sha1-ULd9/X5Gm8dJJHCWNpn+eoSFpyM=",
- "requires": {
- "is-number": "2.1.0",
- "isobject": "2.1.0",
- "randomatic": "1.1.7",
- "repeat-element": "1.1.2",
- "repeat-string": "1.6.1"
- },
- "dependencies": {
- "isobject": {
- "version": "2.1.0",
- "resolved": "https://registry.npmjs.org/isobject/-/isobject-2.1.0.tgz",
- "integrity": "sha1-8GVWEJaj8dou9GJy+BXIQNh+DIk=",
- "requires": {
- "isarray": "1.0.0"
- }
- }
- }
- },
- "find-cache-dir": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-1.0.0.tgz",
- "integrity": "sha1-kojj6ePMN0hxfTnq3hfPcfww7m8=",
- "requires": {
- "commondir": "1.0.1",
- "make-dir": "1.1.0",
- "pkg-dir": "2.0.0"
- }
- },
- "find-up": {
- "version": "2.1.0",
- "resolved": "https://registry.npmjs.org/find-up/-/find-up-2.1.0.tgz",
- "integrity": "sha1-RdG35QbHF93UgndaK3eSCjwMV6c=",
- "requires": {
- "locate-path": "2.0.0"
- }
- },
- "flat-cache": {
- "version": "1.3.0",
- "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-1.3.0.tgz",
- "integrity": "sha1-0wMLMrOBVPTjt+nHCfSQ9++XxIE=",
- "requires": {
- "circular-json": "0.3.3",
- "del": "2.2.2",
- "graceful-fs": "4.1.11",
- "write": "0.2.1"
- }
- },
- "flatten": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/flatten/-/flatten-1.0.2.tgz",
- "integrity": "sha1-2uRqnXj74lKSJYzB54CkHZXAN4I="
- },
- "flow-bin": {
- "version": "0.63.1",
- "resolved": "https://registry.npmjs.org/flow-bin/-/flow-bin-0.63.1.tgz",
- "integrity": "sha512-aWKHYs3UECgpwrIDVUiABjSC8dgaKmonymQDWO+6FhGcp9lnnxdDBE6Sfm3F7YaRPfLYsWAY4lndBrfrfyn+9g=="
- },
- "flow-bin-loader": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/flow-bin-loader/-/flow-bin-loader-1.0.2.tgz",
- "integrity": "sha1-5c6CL/S51tXYJnAk21OnaYNIC+M=",
- "requires": {
- "child-process": "1.0.2"
- }
- },
- "flow-typed": {
- "version": "2.2.3",
- "resolved": "https://registry.npmjs.org/flow-typed/-/flow-typed-2.2.3.tgz",
- "integrity": "sha512-xCKOrKn/DnA4BPbNKyp3s2vW7Pzvff1zJhEdzNviTgwpiEkTG6uxQLjofcqvKdzTyZ/PjHBzkx02iyhP/2NrCg==",
- "requires": {
- "babel-polyfill": "6.26.0",
- "colors": "1.1.2",
- "fs-extra": "4.0.3",
- "github": "0.2.4",
- "glob": "7.1.2",
- "got": "7.1.0",
- "md5": "2.2.1",
- "mkdirp": "0.5.1",
- "request": "2.83.0",
- "rimraf": "2.6.2",
- "semver": "5.5.0",
- "table": "4.0.2",
- "through": "2.3.8",
- "unzip": "0.1.11",
- "which": "1.3.0",
- "yargs": "4.8.1"
- }
- },
- "font-awesome": {
- "version": "4.7.0",
- "resolved": "https://registry.npmjs.org/font-awesome/-/font-awesome-4.7.0.tgz",
- "integrity": "sha1-j6jPBBGhoxr9B7BtKQK7n8gVoTM="
- },
- "for-in": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/for-in/-/for-in-1.0.2.tgz",
- "integrity": "sha1-gQaNKVqBQuwKxybG4iAMMPttXoA="
- },
- "for-own": {
- "version": "0.1.5",
- "resolved": "https://registry.npmjs.org/for-own/-/for-own-0.1.5.tgz",
- "integrity": "sha1-UmXGgaTylNq78XyVCbZ2OqhFEM4=",
- "requires": {
- "for-in": "1.0.2"
- }
- },
- "foreach": {
- "version": "2.0.5",
- "resolved": "https://registry.npmjs.org/foreach/-/foreach-2.0.5.tgz",
- "integrity": "sha1-C+4AUBiusmDQo6865ljdATbsG5k="
- },
- "forever-agent": {
- "version": "0.6.1",
- "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz",
- "integrity": "sha1-+8cfDEGt6zf5bFd60e1C2P2sypE="
- },
- "form-data": {
- "version": "2.3.1",
- "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.1.tgz",
- "integrity": "sha1-b7lPvXGIUwbXPRXMSX/kzE7NRL8=",
- "requires": {
- "asynckit": "0.4.0",
- "combined-stream": "1.0.5",
- "mime-types": "2.1.17"
- }
- },
- "fs-extra": {
- "version": "4.0.3",
- "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-4.0.3.tgz",
- "integrity": "sha512-q6rbdDd1o2mAnQreO7YADIxf/Whx4AHBiRf6d+/cVT8h44ss+lHgxf1FemcqDnQt9X3ct4McHr+JMGlYSsK7Cg==",
- "requires": {
- "graceful-fs": "4.1.11",
- "jsonfile": "4.0.0",
- "universalify": "0.1.1"
- }
- },
- "fs.realpath": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz",
- "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8="
- },
- "fsevents": {
- "version": "1.1.3",
- "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-1.1.3.tgz",
- "integrity": "sha512-WIr7iDkdmdbxu/Gh6eKEZJL6KPE74/5MEsf2whTOFNxbIoIixogroLdKYqB6FDav4Wavh/lZdzzd3b2KxIXC5Q==",
- "optional": true,
- "requires": {
- "nan": "2.8.0",
- "node-pre-gyp": "0.6.39"
- },
- "dependencies": {
- "abbrev": {
- "version": "1.1.0",
- "bundled": true,
- "optional": true
- },
- "ajv": {
- "version": "4.11.8",
- "bundled": true,
- "optional": true,
- "requires": {
- "co": "4.6.0",
- "json-stable-stringify": "1.0.1"
- }
- },
- "ansi-regex": {
- "version": "2.1.1",
- "bundled": true
- },
- "aproba": {
- "version": "1.1.1",
- "bundled": true,
- "optional": true
- },
- "are-we-there-yet": {
- "version": "1.1.4",
- "bundled": true,
- "optional": true,
- "requires": {
- "delegates": "1.0.0",
- "readable-stream": "2.2.9"
- }
- },
- "asn1": {
- "version": "0.2.3",
- "bundled": true,
- "optional": true
- },
- "assert-plus": {
- "version": "0.2.0",
- "bundled": true,
- "optional": true
- },
- "asynckit": {
- "version": "0.4.0",
- "bundled": true,
- "optional": true
- },
- "aws-sign2": {
- "version": "0.6.0",
- "bundled": true,
- "optional": true
- },
- "aws4": {
- "version": "1.6.0",
- "bundled": true,
- "optional": true
- },
- "balanced-match": {
- "version": "0.4.2",
- "bundled": true
- },
- "bcrypt-pbkdf": {
- "version": "1.0.1",
- "bundled": true,
- "optional": true,
- "requires": {
- "tweetnacl": "0.14.5"
- }
- },
- "block-stream": {
- "version": "0.0.9",
- "bundled": true,
- "requires": {
- "inherits": "2.0.3"
- }
- },
- "boom": {
- "version": "2.10.1",
- "bundled": true,
- "requires": {
- "hoek": "2.16.3"
- }
- },
- "brace-expansion": {
- "version": "1.1.7",
- "bundled": true,
- "requires": {
- "balanced-match": "0.4.2",
- "concat-map": "0.0.1"
- }
- },
- "buffer-shims": {
- "version": "1.0.0",
- "bundled": true
- },
- "caseless": {
- "version": "0.12.0",
- "bundled": true,
- "optional": true
- },
- "co": {
- "version": "4.6.0",
- "bundled": true,
- "optional": true
- },
- "code-point-at": {
- "version": "1.1.0",
- "bundled": true
- },
- "combined-stream": {
- "version": "1.0.5",
- "bundled": true,
- "requires": {
- "delayed-stream": "1.0.0"
- }
- },
- "concat-map": {
- "version": "0.0.1",
- "bundled": true
- },
- "console-control-strings": {
- "version": "1.1.0",
- "bundled": true
- },
- "core-util-is": {
- "version": "1.0.2",
- "bundled": true
- },
- "cryptiles": {
- "version": "2.0.5",
- "bundled": true,
- "requires": {
- "boom": "2.10.1"
- }
- },
- "dashdash": {
- "version": "1.14.1",
- "bundled": true,
- "optional": true,
- "requires": {
- "assert-plus": "1.0.0"
- },
- "dependencies": {
- "assert-plus": {
- "version": "1.0.0",
- "bundled": true,
- "optional": true
- }
- }
- },
- "debug": {
- "version": "2.6.8",
- "bundled": true,
- "optional": true,
- "requires": {
- "ms": "2.0.0"
- }
- },
- "deep-extend": {
- "version": "0.4.2",
- "bundled": true,
- "optional": true
- },
- "delayed-stream": {
- "version": "1.0.0",
- "bundled": true
- },
- "delegates": {
- "version": "1.0.0",
- "bundled": true,
- "optional": true
- },
- "detect-libc": {
- "version": "1.0.2",
- "bundled": true,
- "optional": true
- },
- "ecc-jsbn": {
- "version": "0.1.1",
- "bundled": true,
- "optional": true,
- "requires": {
- "jsbn": "0.1.1"
- }
- },
- "extend": {
- "version": "3.0.1",
- "bundled": true,
- "optional": true
- },
- "extsprintf": {
- "version": "1.0.2",
- "bundled": true
- },
- "forever-agent": {
- "version": "0.6.1",
- "bundled": true,
- "optional": true
- },
- "form-data": {
- "version": "2.1.4",
- "bundled": true,
- "optional": true,
- "requires": {
- "asynckit": "0.4.0",
- "combined-stream": "1.0.5",
- "mime-types": "2.1.15"
- }
- },
- "fs.realpath": {
- "version": "1.0.0",
- "bundled": true
- },
- "fstream": {
- "version": "1.0.11",
- "bundled": true,
- "requires": {
- "graceful-fs": "4.1.11",
- "inherits": "2.0.3",
- "mkdirp": "0.5.1",
- "rimraf": "2.6.1"
- }
- },
- "fstream-ignore": {
- "version": "1.0.5",
- "bundled": true,
- "optional": true,
- "requires": {
- "fstream": "1.0.11",
- "inherits": "2.0.3",
- "minimatch": "3.0.4"
- }
- },
- "gauge": {
- "version": "2.7.4",
- "bundled": true,
- "optional": true,
- "requires": {
- "aproba": "1.1.1",
- "console-control-strings": "1.1.0",
- "has-unicode": "2.0.1",
- "object-assign": "4.1.1",
- "signal-exit": "3.0.2",
- "string-width": "1.0.2",
- "strip-ansi": "3.0.1",
- "wide-align": "1.1.2"
- }
- },
- "getpass": {
- "version": "0.1.7",
- "bundled": true,
- "optional": true,
- "requires": {
- "assert-plus": "1.0.0"
- },
- "dependencies": {
- "assert-plus": {
- "version": "1.0.0",
- "bundled": true,
- "optional": true
- }
- }
- },
- "glob": {
- "version": "7.1.2",
- "bundled": true,
- "requires": {
- "fs.realpath": "1.0.0",
- "inflight": "1.0.6",
- "inherits": "2.0.3",
- "minimatch": "3.0.4",
- "once": "1.4.0",
- "path-is-absolute": "1.0.1"
- }
- },
- "graceful-fs": {
- "version": "4.1.11",
- "bundled": true
- },
- "har-schema": {
- "version": "1.0.5",
- "bundled": true,
- "optional": true
- },
- "har-validator": {
- "version": "4.2.1",
- "bundled": true,
- "optional": true,
- "requires": {
- "ajv": "4.11.8",
- "har-schema": "1.0.5"
- }
- },
- "has-unicode": {
- "version": "2.0.1",
- "bundled": true,
- "optional": true
- },
- "hawk": {
- "version": "3.1.3",
- "bundled": true,
- "requires": {
- "boom": "2.10.1",
- "cryptiles": "2.0.5",
- "hoek": "2.16.3",
- "sntp": "1.0.9"
- }
- },
- "hoek": {
- "version": "2.16.3",
- "bundled": true
- },
- "http-signature": {
- "version": "1.1.1",
- "bundled": true,
- "optional": true,
- "requires": {
- "assert-plus": "0.2.0",
- "jsprim": "1.4.0",
- "sshpk": "1.13.0"
- }
- },
- "inflight": {
- "version": "1.0.6",
- "bundled": true,
- "requires": {
- "once": "1.4.0",
- "wrappy": "1.0.2"
- }
- },
- "inherits": {
- "version": "2.0.3",
- "bundled": true
- },
- "ini": {
- "version": "1.3.4",
- "bundled": true,
- "optional": true
- },
- "is-fullwidth-code-point": {
- "version": "1.0.0",
- "bundled": true,
- "requires": {
- "number-is-nan": "1.0.1"
- }
- },
- "is-typedarray": {
- "version": "1.0.0",
- "bundled": true,
- "optional": true
- },
- "isarray": {
- "version": "1.0.0",
- "bundled": true
- },
- "isstream": {
- "version": "0.1.2",
- "bundled": true,
- "optional": true
- },
- "jodid25519": {
- "version": "1.0.2",
- "bundled": true,
- "optional": true,
- "requires": {
- "jsbn": "0.1.1"
- }
- },
- "jsbn": {
- "version": "0.1.1",
- "bundled": true,
- "optional": true
- },
- "json-schema": {
- "version": "0.2.3",
- "bundled": true,
- "optional": true
- },
- "json-stable-stringify": {
- "version": "1.0.1",
- "bundled": true,
- "optional": true,
- "requires": {
- "jsonify": "0.0.0"
- }
- },
- "json-stringify-safe": {
- "version": "5.0.1",
- "bundled": true,
- "optional": true
- },
- "jsonify": {
- "version": "0.0.0",
- "bundled": true,
- "optional": true
- },
- "jsprim": {
- "version": "1.4.0",
- "bundled": true,
- "optional": true,
- "requires": {
- "assert-plus": "1.0.0",
- "extsprintf": "1.0.2",
- "json-schema": "0.2.3",
- "verror": "1.3.6"
- },
- "dependencies": {
- "assert-plus": {
- "version": "1.0.0",
- "bundled": true,
- "optional": true
- }
- }
- },
- "mime-db": {
- "version": "1.27.0",
- "bundled": true
- },
- "mime-types": {
- "version": "2.1.15",
- "bundled": true,
- "requires": {
- "mime-db": "1.27.0"
- }
- },
- "minimatch": {
- "version": "3.0.4",
- "bundled": true,
- "requires": {
- "brace-expansion": "1.1.7"
- }
- },
- "minimist": {
- "version": "0.0.8",
- "bundled": true
- },
- "mkdirp": {
- "version": "0.5.1",
- "bundled": true,
- "requires": {
- "minimist": "0.0.8"
- }
- },
- "ms": {
- "version": "2.0.0",
- "bundled": true,
- "optional": true
- },
- "node-pre-gyp": {
- "version": "0.6.39",
- "bundled": true,
- "optional": true,
- "requires": {
- "detect-libc": "1.0.2",
- "hawk": "3.1.3",
- "mkdirp": "0.5.1",
- "nopt": "4.0.1",
- "npmlog": "4.1.0",
- "rc": "1.2.1",
- "request": "2.81.0",
- "rimraf": "2.6.1",
- "semver": "5.3.0",
- "tar": "2.2.1",
- "tar-pack": "3.4.0"
- }
- },
- "nopt": {
- "version": "4.0.1",
- "bundled": true,
- "optional": true,
- "requires": {
- "abbrev": "1.1.0",
- "osenv": "0.1.4"
- }
- },
- "npmlog": {
- "version": "4.1.0",
- "bundled": true,
- "optional": true,
- "requires": {
- "are-we-there-yet": "1.1.4",
- "console-control-strings": "1.1.0",
- "gauge": "2.7.4",
- "set-blocking": "2.0.0"
- }
- },
- "number-is-nan": {
- "version": "1.0.1",
- "bundled": true
- },
- "oauth-sign": {
- "version": "0.8.2",
- "bundled": true,
- "optional": true
- },
- "object-assign": {
- "version": "4.1.1",
- "bundled": true,
- "optional": true
- },
- "once": {
- "version": "1.4.0",
- "bundled": true,
- "requires": {
- "wrappy": "1.0.2"
- }
- },
- "os-homedir": {
- "version": "1.0.2",
- "bundled": true,
- "optional": true
- },
- "os-tmpdir": {
- "version": "1.0.2",
- "bundled": true,
- "optional": true
- },
- "osenv": {
- "version": "0.1.4",
- "bundled": true,
- "optional": true,
- "requires": {
- "os-homedir": "1.0.2",
- "os-tmpdir": "1.0.2"
- }
- },
- "path-is-absolute": {
- "version": "1.0.1",
- "bundled": true
- },
- "performance-now": {
- "version": "0.2.0",
- "bundled": true,
- "optional": true
- },
- "process-nextick-args": {
- "version": "1.0.7",
- "bundled": true
- },
- "punycode": {
- "version": "1.4.1",
- "bundled": true,
- "optional": true
- },
- "qs": {
- "version": "6.4.0",
- "bundled": true,
- "optional": true
- },
- "rc": {
- "version": "1.2.1",
- "bundled": true,
- "optional": true,
- "requires": {
- "deep-extend": "0.4.2",
- "ini": "1.3.4",
- "minimist": "1.2.0",
- "strip-json-comments": "2.0.1"
- },
- "dependencies": {
- "minimist": {
- "version": "1.2.0",
- "bundled": true,
- "optional": true
- }
- }
- },
- "readable-stream": {
- "version": "2.2.9",
- "bundled": true,
- "requires": {
- "buffer-shims": "1.0.0",
- "core-util-is": "1.0.2",
- "inherits": "2.0.3",
- "isarray": "1.0.0",
- "process-nextick-args": "1.0.7",
- "string_decoder": "1.0.1",
- "util-deprecate": "1.0.2"
- }
- },
- "request": {
- "version": "2.81.0",
- "bundled": true,
- "optional": true,
- "requires": {
- "aws-sign2": "0.6.0",
- "aws4": "1.6.0",
- "caseless": "0.12.0",
- "combined-stream": "1.0.5",
- "extend": "3.0.1",
- "forever-agent": "0.6.1",
- "form-data": "2.1.4",
- "har-validator": "4.2.1",
- "hawk": "3.1.3",
- "http-signature": "1.1.1",
- "is-typedarray": "1.0.0",
- "isstream": "0.1.2",
- "json-stringify-safe": "5.0.1",
- "mime-types": "2.1.15",
- "oauth-sign": "0.8.2",
- "performance-now": "0.2.0",
- "qs": "6.4.0",
- "safe-buffer": "5.0.1",
- "stringstream": "0.0.5",
- "tough-cookie": "2.3.2",
- "tunnel-agent": "0.6.0",
- "uuid": "3.0.1"
- }
- },
- "rimraf": {
- "version": "2.6.1",
- "bundled": true,
- "requires": {
- "glob": "7.1.2"
- }
- },
- "safe-buffer": {
- "version": "5.0.1",
- "bundled": true
- },
- "semver": {
- "version": "5.3.0",
- "bundled": true,
- "optional": true
- },
- "set-blocking": {
- "version": "2.0.0",
- "bundled": true,
- "optional": true
- },
- "signal-exit": {
- "version": "3.0.2",
- "bundled": true,
- "optional": true
- },
- "sntp": {
- "version": "1.0.9",
- "bundled": true,
- "requires": {
- "hoek": "2.16.3"
- }
- },
- "sshpk": {
- "version": "1.13.0",
- "bundled": true,
- "optional": true,
- "requires": {
- "asn1": "0.2.3",
- "assert-plus": "1.0.0",
- "bcrypt-pbkdf": "1.0.1",
- "dashdash": "1.14.1",
- "ecc-jsbn": "0.1.1",
- "getpass": "0.1.7",
- "jodid25519": "1.0.2",
- "jsbn": "0.1.1",
- "tweetnacl": "0.14.5"
- },
- "dependencies": {
- "assert-plus": {
- "version": "1.0.0",
- "bundled": true,
- "optional": true
- }
- }
- },
- "string-width": {
- "version": "1.0.2",
- "bundled": true,
- "requires": {
- "code-point-at": "1.1.0",
- "is-fullwidth-code-point": "1.0.0",
- "strip-ansi": "3.0.1"
- }
- },
- "string_decoder": {
- "version": "1.0.1",
- "bundled": true,
- "requires": {
- "safe-buffer": "5.0.1"
- }
- },
- "stringstream": {
- "version": "0.0.5",
- "bundled": true,
- "optional": true
- },
- "strip-ansi": {
- "version": "3.0.1",
- "bundled": true,
- "requires": {
- "ansi-regex": "2.1.1"
- }
- },
- "strip-json-comments": {
- "version": "2.0.1",
- "bundled": true,
- "optional": true
- },
- "tar": {
- "version": "2.2.1",
- "bundled": true,
- "requires": {
- "block-stream": "0.0.9",
- "fstream": "1.0.11",
- "inherits": "2.0.3"
- }
- },
- "tar-pack": {
- "version": "3.4.0",
- "bundled": true,
- "optional": true,
- "requires": {
- "debug": "2.6.8",
- "fstream": "1.0.11",
- "fstream-ignore": "1.0.5",
- "once": "1.4.0",
- "readable-stream": "2.2.9",
- "rimraf": "2.6.1",
- "tar": "2.2.1",
- "uid-number": "0.0.6"
- }
- },
- "tough-cookie": {
- "version": "2.3.2",
- "bundled": true,
- "optional": true,
- "requires": {
- "punycode": "1.4.1"
- }
- },
- "tunnel-agent": {
- "version": "0.6.0",
- "bundled": true,
- "optional": true,
- "requires": {
- "safe-buffer": "5.0.1"
- }
- },
- "tweetnacl": {
- "version": "0.14.5",
- "bundled": true,
- "optional": true
- },
- "uid-number": {
- "version": "0.0.6",
- "bundled": true,
- "optional": true
- },
- "util-deprecate": {
- "version": "1.0.2",
- "bundled": true
- },
- "uuid": {
- "version": "3.0.1",
- "bundled": true,
- "optional": true
- },
- "verror": {
- "version": "1.3.6",
- "bundled": true,
- "optional": true,
- "requires": {
- "extsprintf": "1.0.2"
- }
- },
- "wide-align": {
- "version": "1.1.2",
- "bundled": true,
- "optional": true,
- "requires": {
- "string-width": "1.0.2"
- }
- },
- "wrappy": {
- "version": "1.0.2",
- "bundled": true
- }
- }
- },
- "fstream": {
- "version": "0.1.31",
- "resolved": "https://registry.npmjs.org/fstream/-/fstream-0.1.31.tgz",
- "integrity": "sha1-czfwWPu7vvqMn1YaKMqwhJICyYg=",
- "requires": {
- "graceful-fs": "3.0.11",
- "inherits": "2.0.3",
- "mkdirp": "0.5.1",
- "rimraf": "2.6.2"
- },
- "dependencies": {
- "graceful-fs": {
- "version": "3.0.11",
- "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-3.0.11.tgz",
- "integrity": "sha1-dhPHeKGv6mLyXGMKCG1/Osu92Bg=",
- "requires": {
- "natives": "1.1.1"
- }
- }
- }
- },
- "function-bind": {
- "version": "1.1.1",
- "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz",
- "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A=="
- },
- "functional-red-black-tree": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz",
- "integrity": "sha1-GwqzvVU7Kg1jmdKcDj6gslIHgyc="
- },
- "get-caller-file": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-1.0.2.tgz",
- "integrity": "sha1-9wLmMSfn4jHBYKgMFVSstw1QR+U="
- },
- "get-stream": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-3.0.0.tgz",
- "integrity": "sha1-jpQ9E1jcN1VQVOy+LtsFqhdO3hQ="
- },
- "getpass": {
- "version": "0.1.7",
- "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz",
- "integrity": "sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo=",
- "requires": {
- "assert-plus": "1.0.0"
- }
- },
- "github": {
- "version": "0.2.4",
- "resolved": "https://registry.npmjs.org/github/-/github-0.2.4.tgz",
- "integrity": "sha1-JPp/DhP6EblGr5ETTFGYKpHOU4s=",
- "requires": {
- "mime": "1.6.0"
- }
- },
- "glob": {
- "version": "7.1.2",
- "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.2.tgz",
- "integrity": "sha512-MJTUg1kjuLeQCJ+ccE4Vpa6kKVXkPYJ2mOCQyUuKLcLQsdrMCpBPUi8qVE6+YuaJkozeA9NusTAw3hLr8Xe5EQ==",
- "requires": {
- "fs.realpath": "1.0.0",
- "inflight": "1.0.6",
- "inherits": "2.0.3",
- "minimatch": "3.0.4",
- "once": "1.4.0",
- "path-is-absolute": "1.0.1"
- }
- },
- "glob-base": {
- "version": "0.3.0",
- "resolved": "https://registry.npmjs.org/glob-base/-/glob-base-0.3.0.tgz",
- "integrity": "sha1-27Fk9iIbHAscz4Kuoyi0l98Oo8Q=",
- "requires": {
- "glob-parent": "2.0.0",
- "is-glob": "2.0.1"
- }
- },
- "glob-parent": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-2.0.0.tgz",
- "integrity": "sha1-gTg9ctsFT8zPUzbaqQLxgvbtuyg=",
- "requires": {
- "is-glob": "2.0.1"
- }
- },
- "global": {
- "version": "4.3.2",
- "resolved": "https://registry.npmjs.org/global/-/global-4.3.2.tgz",
- "integrity": "sha1-52mJJopsdMOJCLEwWxD8DjlOnQ8=",
- "requires": {
- "min-document": "2.19.0",
- "process": "0.5.2"
- }
- },
- "globals": {
- "version": "9.18.0",
- "resolved": "https://registry.npmjs.org/globals/-/globals-9.18.0.tgz",
- "integrity": "sha512-S0nG3CLEQiY/ILxqtztTWH/3iRRdyBLw6KMDxnKMchrtbj2OFmehVh0WUCfW3DUrIgx/qFrJPICrq4Z4sTR9UQ=="
- },
- "globby": {
- "version": "5.0.0",
- "resolved": "https://registry.npmjs.org/globby/-/globby-5.0.0.tgz",
- "integrity": "sha1-69hGZ8oNuzMLmbz8aOrCvFQ3Dg0=",
- "requires": {
- "array-union": "1.0.2",
- "arrify": "1.0.1",
- "glob": "7.1.2",
- "object-assign": "4.1.1",
- "pify": "2.3.0",
- "pinkie-promise": "2.0.1"
- },
- "dependencies": {
- "pify": {
- "version": "2.3.0",
- "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz",
- "integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw="
- }
- }
- },
- "got": {
- "version": "7.1.0",
- "resolved": "https://registry.npmjs.org/got/-/got-7.1.0.tgz",
- "integrity": "sha512-Y5WMo7xKKq1muPsxD+KmrR8DH5auG7fBdDVueZwETwV6VytKyU9OX/ddpq2/1hp1vIPvVb4T81dKQz3BivkNLw==",
- "requires": {
- "decompress-response": "3.3.0",
- "duplexer3": "0.1.4",
- "get-stream": "3.0.0",
- "is-plain-obj": "1.1.0",
- "is-retry-allowed": "1.1.0",
- "is-stream": "1.1.0",
- "isurl": "1.0.0",
- "lowercase-keys": "1.0.0",
- "p-cancelable": "0.3.0",
- "p-timeout": "1.2.1",
- "safe-buffer": "5.1.1",
- "timed-out": "4.0.1",
- "url-parse-lax": "1.0.0",
- "url-to-options": "1.0.1"
- }
- },
- "graceful-fs": {
- "version": "4.1.11",
- "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.1.11.tgz",
- "integrity": "sha1-Dovf5NHduIVNZOBOp8AOKgJuVlg="
- },
- "har-schema": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz",
- "integrity": "sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI="
- },
- "har-validator": {
- "version": "5.0.3",
- "resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.0.3.tgz",
- "integrity": "sha1-ukAsJmGU8VlW7xXg/PJCmT9qff0=",
- "requires": {
- "ajv": "5.5.2",
- "har-schema": "2.0.0"
- }
- },
- "has": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/has/-/has-1.0.1.tgz",
- "integrity": "sha1-hGFzP1OLCDfJNh45qauelwTcLyg=",
- "requires": {
- "function-bind": "1.1.1"
- }
- },
- "has-ansi": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/has-ansi/-/has-ansi-2.0.0.tgz",
- "integrity": "sha1-NPUEnOHs3ysGSa8+8k5F7TVBbZE=",
- "requires": {
- "ansi-regex": "2.1.1"
- }
- },
- "has-flag": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-2.0.0.tgz",
- "integrity": "sha1-6CB68cx7MNRGzHC3NLXovhj4jVE="
- },
- "has-symbol-support-x": {
- "version": "1.4.1",
- "resolved": "https://registry.npmjs.org/has-symbol-support-x/-/has-symbol-support-x-1.4.1.tgz",
- "integrity": "sha512-JkaetveU7hFbqnAC1EV1sF4rlojU2D4Usc5CmS69l6NfmPDnpnFUegzFg33eDkkpNCxZ0mQp65HwUDrNFS/8MA=="
- },
- "has-to-string-tag-x": {
- "version": "1.4.1",
- "resolved": "https://registry.npmjs.org/has-to-string-tag-x/-/has-to-string-tag-x-1.4.1.tgz",
- "integrity": "sha512-vdbKfmw+3LoOYVr+mtxHaX5a96+0f3DljYd8JOqvOLsf5mw2Otda2qCDT9qRqLAhrjyQ0h7ual5nOiASpsGNFw==",
- "requires": {
- "has-symbol-support-x": "1.4.1"
- }
- },
- "hash-base": {
- "version": "2.0.2",
- "resolved": "https://registry.npmjs.org/hash-base/-/hash-base-2.0.2.tgz",
- "integrity": "sha1-ZuodhW206KVHDK32/OI65SRO8uE=",
- "requires": {
- "inherits": "2.0.3"
- }
- },
- "hash.js": {
- "version": "1.1.3",
- "resolved": "https://registry.npmjs.org/hash.js/-/hash.js-1.1.3.tgz",
- "integrity": "sha512-/UETyP0W22QILqS+6HowevwhEFJ3MBJnwTf75Qob9Wz9t0DPuisL8kW8YZMK62dHAKE1c1p+gY1TtOLY+USEHA==",
- "requires": {
- "inherits": "2.0.3",
- "minimalistic-assert": "1.0.0"
- }
- },
- "hawk": {
- "version": "6.0.2",
- "resolved": "https://registry.npmjs.org/hawk/-/hawk-6.0.2.tgz",
- "integrity": "sha512-miowhl2+U7Qle4vdLqDdPt9m09K6yZhkLDTWGoUiUzrQCn+mHHSmfJgAyGaLRZbPmTqfFFjRV1QWCW0VWUJBbQ==",
- "requires": {
- "boom": "4.3.1",
- "cryptiles": "3.1.2",
- "hoek": "4.2.0",
- "sntp": "2.1.0"
- }
- },
- "hmac-drbg": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/hmac-drbg/-/hmac-drbg-1.0.1.tgz",
- "integrity": "sha1-0nRXAQJabHdabFRXk+1QL8DGSaE=",
- "requires": {
- "hash.js": "1.1.3",
- "minimalistic-assert": "1.0.0",
- "minimalistic-crypto-utils": "1.0.1"
- }
- },
- "hoek": {
- "version": "4.2.0",
- "resolved": "https://registry.npmjs.org/hoek/-/hoek-4.2.0.tgz",
- "integrity": "sha512-v0XCLxICi9nPfYrS9RL8HbYnXi9obYAeLbSP00BmnZwCK9+Ih9WOjoZ8YoHCoav2csqn4FOz4Orldsy2dmDwmQ=="
- },
- "hoist-non-react-statics": {
- "version": "2.3.1",
- "resolved": "https://registry.npmjs.org/hoist-non-react-statics/-/hoist-non-react-statics-2.3.1.tgz",
- "integrity": "sha1-ND24TGAYxlB3iJgkATWhQg7iLOA="
- },
- "home-or-tmp": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/home-or-tmp/-/home-or-tmp-2.0.0.tgz",
- "integrity": "sha1-42w/LSyufXRqhX440Y1fMqeILbg=",
- "requires": {
- "os-homedir": "1.0.2",
- "os-tmpdir": "1.0.2"
- }
- },
- "hosted-git-info": {
- "version": "2.5.0",
- "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.5.0.tgz",
- "integrity": "sha512-pNgbURSuab90KbTqvRPsseaTxOJCZBD0a7t+haSN33piP9cCM4l0CqdzAif2hUqm716UovKB2ROmiabGAKVXyg=="
- },
- "html-comment-regex": {
- "version": "1.1.1",
- "resolved": "https://registry.npmjs.org/html-comment-regex/-/html-comment-regex-1.1.1.tgz",
- "integrity": "sha1-ZouTd26q5V696POtRkswekljYl4="
- },
- "http-signature": {
- "version": "1.2.0",
- "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz",
- "integrity": "sha1-muzZJRFHcvPZW2WmCruPfBj7rOE=",
- "requires": {
- "assert-plus": "1.0.0",
- "jsprim": "1.4.1",
- "sshpk": "1.13.1"
- }
- },
- "https-browserify": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/https-browserify/-/https-browserify-1.0.0.tgz",
- "integrity": "sha1-7AbBDgo0wPL68Zn3/X/Hj//QPHM="
- },
- "iconv-lite": {
- "version": "0.4.19",
- "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.19.tgz",
- "integrity": "sha512-oTZqweIP51xaGPI4uPa56/Pri/480R+mo7SeU+YETByQNhDG55ycFyNLIgta9vXhILrxXDmF7ZGhqZIcuN0gJQ=="
- },
- "icss-replace-symbols": {
- "version": "1.1.0",
- "resolved": "https://registry.npmjs.org/icss-replace-symbols/-/icss-replace-symbols-1.1.0.tgz",
- "integrity": "sha1-Bupvg2ead0njhs/h/oEq5dsiPe0="
- },
- "icss-utils": {
- "version": "2.1.0",
- "resolved": "https://registry.npmjs.org/icss-utils/-/icss-utils-2.1.0.tgz",
- "integrity": "sha1-g/Cg7DeL8yRheLbCrZE28TWxyWI=",
- "requires": {
- "postcss": "6.0.16"
- },
- "dependencies": {
- "ansi-styles": {
- "version": "3.2.0",
- "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.0.tgz",
- "integrity": "sha512-NnSOmMEYtVR2JVMIGTzynRkkaxtiq1xnFBcdQD/DnNCYPoEPsVJhM98BDyaoNOQIi7p4okdi3E27eN7GQbsUug==",
- "requires": {
- "color-convert": "1.9.1"
- }
- },
- "chalk": {
- "version": "2.3.0",
- "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.3.0.tgz",
- "integrity": "sha512-Az5zJR2CBujap2rqXGaJKaPHyJ0IrUimvYNX+ncCy8PJP4ltOGTrHUIo097ZaL2zMeKYpiCdqDvS6zdrTFok3Q==",
- "requires": {
- "ansi-styles": "3.2.0",
- "escape-string-regexp": "1.0.5",
- "supports-color": "4.5.0"
- },
- "dependencies": {
- "supports-color": {
- "version": "4.5.0",
- "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-4.5.0.tgz",
- "integrity": "sha1-vnoN5ITexcXN34s9WRJQRJEvY1s=",
- "requires": {
- "has-flag": "2.0.0"
- }
- }
- }
- },
- "postcss": {
- "version": "6.0.16",
- "resolved": "https://registry.npmjs.org/postcss/-/postcss-6.0.16.tgz",
- "integrity": "sha512-m758RWPmSjFH/2MyyG3UOW1fgYbR9rtdzz5UNJnlm7OLtu4B2h9C6gi+bE4qFKghsBRFfZT8NzoQBs6JhLotoA==",
- "requires": {
- "chalk": "2.3.0",
- "source-map": "0.6.1",
- "supports-color": "5.1.0"
- }
- },
- "source-map": {
- "version": "0.6.1",
- "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz",
- "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g=="
- },
- "supports-color": {
- "version": "5.1.0",
- "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.1.0.tgz",
- "integrity": "sha512-Ry0AwkoKjDpVKK4sV4h6o3UJmNRbjYm2uXhwfj3J56lMVdvnUNqzQVRztOOMGQ++w1K/TjNDFvpJk0F/LoeBCQ==",
- "requires": {
- "has-flag": "2.0.0"
- }
- }
- }
- },
- "ieee754": {
- "version": "1.1.8",
- "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.1.8.tgz",
- "integrity": "sha1-vjPUCsEO8ZJnAfbwii2G+/0a0+Q="
- },
- "ignore": {
- "version": "3.3.7",
- "resolved": "https://registry.npmjs.org/ignore/-/ignore-3.3.7.tgz",
- "integrity": "sha512-YGG3ejvBNHRqu0559EOxxNFihD0AjpvHlC/pdGKd3X3ofe+CoJkYazwNJYTNebqpPKN+VVQbh4ZFn1DivMNuHA=="
- },
- "imurmurhash": {
- "version": "0.1.4",
- "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz",
- "integrity": "sha1-khi5srkoojixPcT7a21XbyMUU+o="
- },
- "indexes-of": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/indexes-of/-/indexes-of-1.0.1.tgz",
- "integrity": "sha1-8w9xbI4r00bHtn0985FVZqfAVgc="
- },
- "indexof": {
- "version": "0.0.1",
- "resolved": "https://registry.npmjs.org/indexof/-/indexof-0.0.1.tgz",
- "integrity": "sha1-gtwzbSMrkGIXnQWrMpOmYFn9Q10="
- },
- "inflight": {
- "version": "1.0.6",
- "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz",
- "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=",
- "requires": {
- "once": "1.4.0",
- "wrappy": "1.0.2"
- }
- },
- "inherits": {
- "version": "2.0.3",
- "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz",
- "integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4="
- },
- "inquirer": {
- "version": "3.3.0",
- "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-3.3.0.tgz",
- "integrity": "sha512-h+xtnyk4EwKvFWHrUYsWErEVR+igKtLdchu+o0Z1RL7VU/jVMFbYir2bp6bAj8efFNxWqHX0dIss6fJQ+/+qeQ==",
- "requires": {
- "ansi-escapes": "3.0.0",
- "chalk": "2.3.0",
- "cli-cursor": "2.1.0",
- "cli-width": "2.2.0",
- "external-editor": "2.1.0",
- "figures": "2.0.0",
- "lodash": "4.17.4",
- "mute-stream": "0.0.7",
- "run-async": "2.3.0",
- "rx-lite": "4.0.8",
- "rx-lite-aggregates": "4.0.8",
- "string-width": "2.1.1",
- "strip-ansi": "4.0.0",
- "through": "2.3.8"
- },
- "dependencies": {
- "ansi-regex": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz",
- "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg="
- },
- "ansi-styles": {
- "version": "3.2.0",
- "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.0.tgz",
- "integrity": "sha512-NnSOmMEYtVR2JVMIGTzynRkkaxtiq1xnFBcdQD/DnNCYPoEPsVJhM98BDyaoNOQIi7p4okdi3E27eN7GQbsUug==",
- "requires": {
- "color-convert": "1.9.1"
- }
- },
- "chalk": {
- "version": "2.3.0",
- "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.3.0.tgz",
- "integrity": "sha512-Az5zJR2CBujap2rqXGaJKaPHyJ0IrUimvYNX+ncCy8PJP4ltOGTrHUIo097ZaL2zMeKYpiCdqDvS6zdrTFok3Q==",
- "requires": {
- "ansi-styles": "3.2.0",
- "escape-string-regexp": "1.0.5",
- "supports-color": "4.5.0"
- }
- },
- "strip-ansi": {
- "version": "4.0.0",
- "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz",
- "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=",
- "requires": {
- "ansi-regex": "3.0.0"
- }
- },
- "supports-color": {
- "version": "4.5.0",
- "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-4.5.0.tgz",
- "integrity": "sha1-vnoN5ITexcXN34s9WRJQRJEvY1s=",
- "requires": {
- "has-flag": "2.0.0"
- }
- }
- }
- },
- "interpret": {
- "version": "1.1.0",
- "resolved": "https://registry.npmjs.org/interpret/-/interpret-1.1.0.tgz",
- "integrity": "sha1-ftGxQQxqDg94z5XTuEQMY/eLhhQ="
- },
- "invariant": {
- "version": "2.2.2",
- "resolved": "https://registry.npmjs.org/invariant/-/invariant-2.2.2.tgz",
- "integrity": "sha1-nh9WrArNtr8wMwbzOL47IErmA2A=",
- "requires": {
- "loose-envify": "1.3.1"
- }
- },
- "invert-kv": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/invert-kv/-/invert-kv-1.0.0.tgz",
- "integrity": "sha1-EEqOSqym09jNFXqO+L+rLXo//bY="
- },
- "is-absolute-url": {
- "version": "2.1.0",
- "resolved": "https://registry.npmjs.org/is-absolute-url/-/is-absolute-url-2.1.0.tgz",
- "integrity": "sha1-UFMN+4T8yap9vnhS6Do3uTufKqY="
- },
- "is-arrayish": {
- "version": "0.2.1",
- "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz",
- "integrity": "sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0="
- },
- "is-binary-path": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-1.0.1.tgz",
- "integrity": "sha1-dfFmQrSA8YenEcgUFh/TpKdlWJg=",
- "requires": {
- "binary-extensions": "1.11.0"
- }
- },
- "is-buffer": {
- "version": "1.1.6",
- "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.6.tgz",
- "integrity": "sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w=="
- },
- "is-builtin-module": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/is-builtin-module/-/is-builtin-module-1.0.0.tgz",
- "integrity": "sha1-VAVy0096wxGfj3bDDLwbHgN6/74=",
- "requires": {
- "builtin-modules": "1.1.1"
- }
- },
- "is-callable": {
- "version": "1.1.3",
- "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.1.3.tgz",
- "integrity": "sha1-hut1OSgF3cM69xySoO7fdO52BLI="
- },
- "is-date-object": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.1.tgz",
- "integrity": "sha1-mqIOtq7rv/d/vTPnTKAbM1gdOhY="
- },
- "is-dotfile": {
- "version": "1.0.3",
- "resolved": "https://registry.npmjs.org/is-dotfile/-/is-dotfile-1.0.3.tgz",
- "integrity": "sha1-pqLzL/0t+wT1yiXs0Pa4PPeYoeE="
- },
- "is-equal-shallow": {
- "version": "0.1.3",
- "resolved": "https://registry.npmjs.org/is-equal-shallow/-/is-equal-shallow-0.1.3.tgz",
- "integrity": "sha1-IjgJj8Ih3gvPpdnqxMRdY4qhxTQ=",
- "requires": {
- "is-primitive": "2.0.0"
- }
- },
- "is-extendable": {
- "version": "0.1.1",
- "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz",
- "integrity": "sha1-YrEQ4omkcUGOPsNqYX1HLjAd/Ik="
- },
- "is-extglob": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-1.0.0.tgz",
- "integrity": "sha1-rEaBd8SUNAWgkvyPKXYMb/xiBsA="
- },
- "is-finite": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/is-finite/-/is-finite-1.0.2.tgz",
- "integrity": "sha1-zGZ3aVYCvlUO8R6LSqYwU0K20Ko=",
- "requires": {
- "number-is-nan": "1.0.1"
- }
- },
- "is-fullwidth-code-point": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz",
- "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8="
- },
- "is-function": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/is-function/-/is-function-1.0.1.tgz",
- "integrity": "sha1-Es+5i2W1fdPRk6MSH19uL0N2ArU="
- },
- "is-glob": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-2.0.1.tgz",
- "integrity": "sha1-0Jb5JqPe1WAPP9/ZEZjLCIjC2GM=",
- "requires": {
- "is-extglob": "1.0.0"
- }
- },
- "is-in-browser": {
- "version": "1.1.3",
- "resolved": "https://registry.npmjs.org/is-in-browser/-/is-in-browser-1.1.3.tgz",
- "integrity": "sha1-Vv9NtoOgeMYILrldrX3GLh0E+DU="
- },
- "is-number": {
- "version": "2.1.0",
- "resolved": "https://registry.npmjs.org/is-number/-/is-number-2.1.0.tgz",
- "integrity": "sha1-Afy7s5NGOlSPL0ZszhbezknbkI8=",
- "requires": {
- "kind-of": "3.2.2"
- }
- },
- "is-object": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/is-object/-/is-object-1.0.1.tgz",
- "integrity": "sha1-iVJojF7C/9awPsyF52ngKQMINHA="
- },
- "is-path-cwd": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/is-path-cwd/-/is-path-cwd-1.0.0.tgz",
- "integrity": "sha1-0iXsIxMuie3Tj9p2dHLmLmXxEG0="
- },
- "is-path-in-cwd": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/is-path-in-cwd/-/is-path-in-cwd-1.0.0.tgz",
- "integrity": "sha1-ZHdYK4IU1gI0YJRWcAO+ip6sBNw=",
- "requires": {
- "is-path-inside": "1.0.1"
- }
- },
- "is-path-inside": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-1.0.1.tgz",
- "integrity": "sha1-jvW33lBDej/cprToZe96pVy0gDY=",
- "requires": {
- "path-is-inside": "1.0.2"
- }
- },
- "is-plain-obj": {
- "version": "1.1.0",
- "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-1.1.0.tgz",
- "integrity": "sha1-caUMhCnfync8kqOQpKA7OfzVHT4="
- },
- "is-plain-object": {
- "version": "2.0.4",
- "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-2.0.4.tgz",
- "integrity": "sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og==",
- "requires": {
- "isobject": "3.0.1"
- }
- },
- "is-posix-bracket": {
- "version": "0.1.1",
- "resolved": "https://registry.npmjs.org/is-posix-bracket/-/is-posix-bracket-0.1.1.tgz",
- "integrity": "sha1-MzTceXdDaOkvAW5vvAqI9c1ua8Q="
- },
- "is-primitive": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/is-primitive/-/is-primitive-2.0.0.tgz",
- "integrity": "sha1-IHurkWOEmcB7Kt8kCkGochADRXU="
- },
- "is-promise": {
- "version": "2.1.0",
- "resolved": "https://registry.npmjs.org/is-promise/-/is-promise-2.1.0.tgz",
- "integrity": "sha1-eaKp7OfwlugPNtKy87wWwf9L8/o="
- },
- "is-regex": {
- "version": "1.0.4",
- "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.0.4.tgz",
- "integrity": "sha1-VRdIm1RwkbCTDglWVM7SXul+lJE=",
- "requires": {
- "has": "1.0.1"
- }
- },
- "is-resolvable": {
- "version": "1.1.0",
- "resolved": "https://registry.npmjs.org/is-resolvable/-/is-resolvable-1.1.0.tgz",
- "integrity": "sha512-qgDYXFSR5WvEfuS5dMj6oTMEbrrSaM0CrFk2Yiq/gXnBvD9pMa2jGXxyhGLfvhZpuMZe18CJpFxAt3CRs42NMg=="
- },
- "is-retry-allowed": {
- "version": "1.1.0",
- "resolved": "https://registry.npmjs.org/is-retry-allowed/-/is-retry-allowed-1.1.0.tgz",
- "integrity": "sha1-EaBgVotnM5REAz0BJaYaINVk+zQ="
- },
- "is-stream": {
- "version": "1.1.0",
- "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-1.1.0.tgz",
- "integrity": "sha1-EtSj3U5o4Lec6428hBc66A2RykQ="
- },
- "is-svg": {
- "version": "2.1.0",
- "resolved": "https://registry.npmjs.org/is-svg/-/is-svg-2.1.0.tgz",
- "integrity": "sha1-z2EJDaDZ77yrhyLeum8DIgjbsOk=",
- "requires": {
- "html-comment-regex": "1.1.1"
- }
- },
- "is-symbol": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.1.tgz",
- "integrity": "sha1-PMWfAAJRlLarLjjbrmaJJWtmBXI="
- },
- "is-typedarray": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz",
- "integrity": "sha1-5HnICFjfDBsR3dppQPlgEfzaSpo="
- },
- "is-utf8": {
- "version": "0.2.1",
- "resolved": "https://registry.npmjs.org/is-utf8/-/is-utf8-0.2.1.tgz",
- "integrity": "sha1-Sw2hRCEE0bM2NA6AeX6GXPOffXI="
- },
- "isarray": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz",
- "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE="
- },
- "isexe": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz",
- "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA="
- },
- "isobject": {
- "version": "3.0.1",
- "resolved": "https://registry.npmjs.org/isobject/-/isobject-3.0.1.tgz",
- "integrity": "sha1-TkMekrEalzFjaqH5yNHMvP2reN8="
- },
- "isomorphic-fetch": {
- "version": "2.2.1",
- "resolved": "https://registry.npmjs.org/isomorphic-fetch/-/isomorphic-fetch-2.2.1.tgz",
- "integrity": "sha1-YRrhrPFPXoH3KVB0coGf6XM1WKk=",
- "requires": {
- "node-fetch": "1.7.3",
- "whatwg-fetch": "2.0.3"
- }
- },
- "isstream": {
- "version": "0.1.2",
- "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz",
- "integrity": "sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo="
- },
- "isurl": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/isurl/-/isurl-1.0.0.tgz",
- "integrity": "sha512-1P/yWsxPlDtn7QeRD+ULKQPaIaN6yF368GZ2vDfv0AL0NwpStafjWCDDdn0k8wgFMWpVAqG7oJhxHnlud42i9w==",
- "requires": {
- "has-to-string-tag-x": "1.4.1",
- "is-object": "1.0.1"
- }
- },
- "js-base64": {
- "version": "2.4.1",
- "resolved": "https://registry.npmjs.org/js-base64/-/js-base64-2.4.1.tgz",
- "integrity": "sha512-2h586r2I/CqU7z1aa1kBgWaVAXWAZK+zHnceGi/jFgn7+7VSluxYer/i3xOZVearCxxXvyDkLtTBo+OeJCA3kA=="
- },
- "js-tokens": {
- "version": "3.0.2",
- "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-3.0.2.tgz",
- "integrity": "sha1-mGbfOVECEw449/mWvOtlRDIJwls="
- },
- "js-yaml": {
- "version": "3.7.0",
- "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.7.0.tgz",
- "integrity": "sha1-XJZ93YN6m/3KXy3oQlOr6KHAO4A=",
- "requires": {
- "argparse": "1.0.9",
- "esprima": "2.7.3"
- }
- },
- "jsbn": {
- "version": "0.1.1",
- "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz",
- "integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM=",
- "optional": true
- },
- "jsesc": {
- "version": "1.3.0",
- "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-1.3.0.tgz",
- "integrity": "sha1-RsP+yMGJKxKwgz25vHYiF226s0s="
- },
- "json-loader": {
- "version": "0.5.7",
- "resolved": "https://registry.npmjs.org/json-loader/-/json-loader-0.5.7.tgz",
- "integrity": "sha512-QLPs8Dj7lnf3e3QYS1zkCo+4ZwqOiF9d/nZnYozTISxXWCfNs9yuky5rJw4/W34s7POaNlbZmQGaB5NiXCbP4w=="
- },
- "json-schema": {
- "version": "0.2.3",
- "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz",
- "integrity": "sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM="
- },
- "json-schema-traverse": {
- "version": "0.3.1",
- "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.3.1.tgz",
- "integrity": "sha1-NJptRMU6Ud6JtAgFxdXlm0F9M0A="
- },
- "json-stable-stringify-without-jsonify": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz",
- "integrity": "sha1-nbe1lJatPzz+8wp1FC0tkwrXJlE="
- },
- "json-stringify-safe": {
- "version": "5.0.1",
- "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz",
- "integrity": "sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus="
- },
- "json5": {
- "version": "0.5.1",
- "resolved": "https://registry.npmjs.org/json5/-/json5-0.5.1.tgz",
- "integrity": "sha1-Hq3nrMASA0rYTiOWdn6tn6VJWCE="
- },
- "jsonfile": {
- "version": "4.0.0",
- "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-4.0.0.tgz",
- "integrity": "sha1-h3Gq4HmbZAdrdmQPygWPnBDjPss=",
- "requires": {
- "graceful-fs": "4.1.11"
- }
- },
- "jsprim": {
- "version": "1.4.1",
- "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.1.tgz",
- "integrity": "sha1-MT5mvB5cwG5Di8G3SZwuXFastqI=",
- "requires": {
- "assert-plus": "1.0.0",
- "extsprintf": "1.3.0",
- "json-schema": "0.2.3",
- "verror": "1.10.0"
- }
- },
- "jss": {
- "version": "9.5.1",
- "resolved": "https://registry.npmjs.org/jss/-/jss-9.5.1.tgz",
- "integrity": "sha512-py//ogG1xeztpEDmosJtrkfUXibx3qiAr+1GQvfLHp7azpqkzTPLCnainDgH7Zn0q6S7rcM1eINrVT9n/r5f2w==",
- "requires": {
- "is-in-browser": "1.1.3",
- "symbol-observable": "1.1.0",
- "warning": "3.0.0"
- }
- },
- "jss-camel-case": {
- "version": "6.0.0",
- "resolved": "https://registry.npmjs.org/jss-camel-case/-/jss-camel-case-6.0.0.tgz",
- "integrity": "sha512-XAYa7JpGkLdlLgEfuzSQSVONRzSVvv4Tvyv5H8hLmJuHeFHTWwVrJrW1Cg/buED3izXKwTU2KBGpeXjIR5Eaew=="
- },
- "jss-compose": {
- "version": "5.0.0",
- "resolved": "https://registry.npmjs.org/jss-compose/-/jss-compose-5.0.0.tgz",
- "integrity": "sha512-YofRYuiA0+VbeOw0VjgkyO380sA4+TWDrW52nSluD9n+1FWOlDzNbgpZ/Sb3Y46+DcAbOS21W5jo6SAqUEiuwA==",
- "requires": {
- "warning": "3.0.0"
- }
- },
- "jss-default-unit": {
- "version": "8.0.2",
- "resolved": "https://registry.npmjs.org/jss-default-unit/-/jss-default-unit-8.0.2.tgz",
- "integrity": "sha512-WxNHrF/18CdoAGw2H0FqOEvJdREXVXLazn7PQYU7V6/BWkCV0GkmWsppNiExdw8dP4TU1ma1dT9zBNJ95feLmg=="
- },
- "jss-expand": {
- "version": "5.1.0",
- "resolved": "https://registry.npmjs.org/jss-expand/-/jss-expand-5.1.0.tgz",
- "integrity": "sha512-WTxmNipgj0V8kr8gc8Gc6Et7uQZH60H7FFNG9zZHjR6TPJoj7TDK+/EBxwRHtCRQD4B8RTwoa7MyEKD4ReKfXw=="
- },
- "jss-extend": {
- "version": "6.1.0",
- "resolved": "https://registry.npmjs.org/jss-extend/-/jss-extend-6.1.0.tgz",
- "integrity": "sha512-bSNwLDOZnMxABsUqvq2lwLJ/MMFs8ThligiLZBOUeyoZCoHqAbcTghvunk2QDVxiOhRTDS57VvhXVJZETW58Bw==",
- "requires": {
- "warning": "3.0.0"
- }
- },
- "jss-global": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/jss-global/-/jss-global-3.0.0.tgz",
- "integrity": "sha512-wxYn7vL+TImyQYGAfdplg7yaxnPQ9RaXY/cIA8hawaVnmmWxDHzBK32u1y+RAvWboa3lW83ya3nVZ/C+jyjZ5Q=="
- },
- "jss-nested": {
- "version": "6.0.1",
- "resolved": "https://registry.npmjs.org/jss-nested/-/jss-nested-6.0.1.tgz",
- "integrity": "sha512-rn964TralHOZxoyEgeq3hXY8hyuCElnvQoVrQwKHVmu55VRDd6IqExAx9be5HgK0yN/+hQdgAXQl/GUrBbbSTA==",
- "requires": {
- "warning": "3.0.0"
- }
- },
- "jss-preset-default": {
- "version": "4.1.0",
- "resolved": "https://registry.npmjs.org/jss-preset-default/-/jss-preset-default-4.1.0.tgz",
- "integrity": "sha512-C6SyfDg99EFrt0bv0lsg2OEN3e72Fry9/hMPW2sO6MSVsx+vc/Og6TJJY3F2MY5Z/V2/wlARHVmCb3TYMr0zFA==",
- "requires": {
- "jss-camel-case": "6.0.0",
- "jss-compose": "5.0.0",
- "jss-default-unit": "8.0.2",
- "jss-expand": "5.1.0",
- "jss-extend": "6.1.0",
- "jss-global": "3.0.0",
- "jss-nested": "6.0.1",
- "jss-props-sort": "6.0.0",
- "jss-template": "1.0.1",
- "jss-vendor-prefixer": "7.0.0"
- }
- },
- "jss-props-sort": {
- "version": "6.0.0",
- "resolved": "https://registry.npmjs.org/jss-props-sort/-/jss-props-sort-6.0.0.tgz",
- "integrity": "sha512-E89UDcrphmI0LzmvYk25Hp4aE5ZBsXqMWlkFXS0EtPkunJkRr+WXdCNYbXbksIPnKlBenGB9OxzQY+mVc70S+g=="
- },
- "jss-template": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/jss-template/-/jss-template-1.0.1.tgz",
- "integrity": "sha512-m5BqEWha17fmIVXm1z8xbJhY6GFJxNB9H68GVnCWPyGYfxiAgY9WTQyvDAVj+pYRgrXSOfN5V1T4+SzN1sJTeg==",
- "requires": {
- "warning": "3.0.0"
- }
- },
- "jss-vendor-prefixer": {
- "version": "7.0.0",
- "resolved": "https://registry.npmjs.org/jss-vendor-prefixer/-/jss-vendor-prefixer-7.0.0.tgz",
- "integrity": "sha512-Agd+FKmvsI0HLcYXkvy8GYOw3AAASBUpsmIRvVQheps+JWaN892uFOInTr0DRydwaD91vSSUCU4NssschvF7MA==",
- "requires": {
- "css-vendor": "0.3.8"
- }
- },
- "jsx-ast-utils": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/jsx-ast-utils/-/jsx-ast-utils-2.0.1.tgz",
- "integrity": "sha1-6AGxs5mF4g//yHtA43SAgOLcrH8=",
- "requires": {
- "array-includes": "3.0.3"
- }
- },
- "keycode": {
- "version": "2.1.9",
- "resolved": "https://registry.npmjs.org/keycode/-/keycode-2.1.9.tgz",
- "integrity": "sha1-lkojxU5IiUBbSGGlyfBIDUUUHfo="
- },
- "kind-of": {
- "version": "3.2.2",
- "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz",
- "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=",
- "requires": {
- "is-buffer": "1.1.6"
- }
- },
- "lazy-cache": {
- "version": "1.0.4",
- "resolved": "https://registry.npmjs.org/lazy-cache/-/lazy-cache-1.0.4.tgz",
- "integrity": "sha1-odePw6UEdMuAhF07O24dpJpEbo4="
- },
- "lcid": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/lcid/-/lcid-1.0.0.tgz",
- "integrity": "sha1-MIrMr6C8SDo4Z7S28rlQYlHRuDU=",
- "requires": {
- "invert-kv": "1.0.0"
- }
- },
- "levn": {
- "version": "0.3.0",
- "resolved": "https://registry.npmjs.org/levn/-/levn-0.3.0.tgz",
- "integrity": "sha1-OwmSTt+fCDwEkP3UwLxEIeBHZO4=",
- "requires": {
- "prelude-ls": "1.1.2",
- "type-check": "0.3.2"
- }
- },
- "load-json-file": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-2.0.0.tgz",
- "integrity": "sha1-eUfkIUmvgNaWy/eXvKq8/h/inKg=",
- "requires": {
- "graceful-fs": "4.1.11",
- "parse-json": "2.2.0",
- "pify": "2.3.0",
- "strip-bom": "3.0.0"
- },
- "dependencies": {
- "pify": {
- "version": "2.3.0",
- "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz",
- "integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw="
- }
- }
- },
- "loader-fs-cache": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/loader-fs-cache/-/loader-fs-cache-1.0.1.tgz",
- "integrity": "sha1-VuC/CL2XCLJqdltoUJhAyN7J/bw=",
- "requires": {
- "find-cache-dir": "0.1.1",
- "mkdirp": "0.5.1"
- },
- "dependencies": {
- "find-cache-dir": {
- "version": "0.1.1",
- "resolved": "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-0.1.1.tgz",
- "integrity": "sha1-yN765XyKUqinhPnjHFfHQumToLk=",
- "requires": {
- "commondir": "1.0.1",
- "mkdirp": "0.5.1",
- "pkg-dir": "1.0.0"
- }
- },
- "find-up": {
- "version": "1.1.2",
- "resolved": "https://registry.npmjs.org/find-up/-/find-up-1.1.2.tgz",
- "integrity": "sha1-ay6YIrGizgpgq2TWEOzK1TyyTQ8=",
- "requires": {
- "path-exists": "2.1.0",
- "pinkie-promise": "2.0.1"
- }
- },
- "path-exists": {
- "version": "2.1.0",
- "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-2.1.0.tgz",
- "integrity": "sha1-D+tsZPD8UY2adU3V77YscCJ2H0s=",
- "requires": {
- "pinkie-promise": "2.0.1"
- }
- },
- "pkg-dir": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-1.0.0.tgz",
- "integrity": "sha1-ektQio1bstYp1EcFb/TpyTFM89Q=",
- "requires": {
- "find-up": "1.1.2"
- }
- }
- }
- },
- "loader-runner": {
- "version": "2.3.0",
- "resolved": "https://registry.npmjs.org/loader-runner/-/loader-runner-2.3.0.tgz",
- "integrity": "sha1-9IKuqC1UPgeSFwDVpG7yb9rGuKI="
- },
- "loader-utils": {
- "version": "1.1.0",
- "resolved": "https://registry.npmjs.org/loader-utils/-/loader-utils-1.1.0.tgz",
- "integrity": "sha1-yYrvSIvM7aL/teLeZG1qdUQp9c0=",
- "requires": {
- "big.js": "3.2.0",
- "emojis-list": "2.1.0",
- "json5": "0.5.1"
- }
- },
- "locate-path": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-2.0.0.tgz",
- "integrity": "sha1-K1aLJl7slExtnA3pw9u7ygNUzY4=",
- "requires": {
- "p-locate": "2.0.0",
- "path-exists": "3.0.0"
- }
- },
- "lodash": {
- "version": "4.17.4",
- "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.4.tgz",
- "integrity": "sha1-eCA6TRwyiuHYbcpkYONptX9AVa4="
- },
- "lodash.assign": {
- "version": "4.2.0",
- "resolved": "https://registry.npmjs.org/lodash.assign/-/lodash.assign-4.2.0.tgz",
- "integrity": "sha1-DZnzzNem0mHRm9rrkkUAXShYCOc="
- },
- "lodash.camelcase": {
- "version": "4.3.0",
- "resolved": "https://registry.npmjs.org/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz",
- "integrity": "sha1-soqmKIorn8ZRA1x3EfZathkDMaY="
- },
- "lodash.cond": {
- "version": "4.5.2",
- "resolved": "https://registry.npmjs.org/lodash.cond/-/lodash.cond-4.5.2.tgz",
- "integrity": "sha1-9HGh2khr5g9quVXRcRVSPdHSVdU="
- },
- "lodash.memoize": {
- "version": "4.1.2",
- "resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz",
- "integrity": "sha1-vMbEmkKihA7Zl/Mj6tpezRguC/4="
- },
- "lodash.uniq": {
- "version": "4.5.0",
- "resolved": "https://registry.npmjs.org/lodash.uniq/-/lodash.uniq-4.5.0.tgz",
- "integrity": "sha1-0CJTc662Uq3BvILklFM5qEJ1R3M="
- },
- "longest": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/longest/-/longest-1.0.1.tgz",
- "integrity": "sha1-MKCy2jj3N3DoKUoNIuZiXtd9AJc="
- },
- "loose-envify": {
- "version": "1.3.1",
- "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.3.1.tgz",
- "integrity": "sha1-0aitM/qc4OcT1l/dCsi3SNR4yEg=",
- "requires": {
- "js-tokens": "3.0.2"
- }
- },
- "lowercase-keys": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-1.0.0.tgz",
- "integrity": "sha1-TjNms55/VFfjXxMkvfb4jQv8cwY="
- },
- "lru-cache": {
- "version": "4.1.1",
- "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-4.1.1.tgz",
- "integrity": "sha512-q4spe4KTfsAS1SUHLO0wz8Qiyf1+vMIAgpRYioFYDMNqKfHQbg+AVDH3i4fvpl71/P1L0dBl+fQi+P37UYf0ew==",
- "requires": {
- "pseudomap": "1.0.2",
- "yallist": "2.1.2"
- }
- },
- "macaddress": {
- "version": "0.2.8",
- "resolved": "https://registry.npmjs.org/macaddress/-/macaddress-0.2.8.tgz",
- "integrity": "sha1-WQTcU3w57G2+/q6QIycTX6hRHxI="
- },
- "make-dir": {
- "version": "1.1.0",
- "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-1.1.0.tgz",
- "integrity": "sha512-0Pkui4wLJ7rxvmfUvs87skoEaxmu0hCUApF8nonzpl7q//FWp9zu8W61Scz4sd/kUiqDxvUhtoam2efDyiBzcA==",
- "requires": {
- "pify": "3.0.0"
- }
- },
- "match-stream": {
- "version": "0.0.2",
- "resolved": "https://registry.npmjs.org/match-stream/-/match-stream-0.0.2.tgz",
- "integrity": "sha1-mesFAJOzTf+t5CG5rAtBCpz6F88=",
- "requires": {
- "buffers": "0.1.1",
- "readable-stream": "1.0.34"
- },
- "dependencies": {
- "isarray": {
- "version": "0.0.1",
- "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz",
- "integrity": "sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8="
- },
- "readable-stream": {
- "version": "1.0.34",
- "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.34.tgz",
- "integrity": "sha1-Elgg40vIQtLyqq+v5MKRbuMsFXw=",
- "requires": {
- "core-util-is": "1.0.2",
- "inherits": "2.0.3",
- "isarray": "0.0.1",
- "string_decoder": "0.10.31"
- }
- },
- "string_decoder": {
- "version": "0.10.31",
- "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz",
- "integrity": "sha1-YuIDvEF2bGwoyfyEMB2rHFMQ+pQ="
- }
- }
- },
- "material-ui": {
- "version": "1.0.0-beta.30",
- "resolved": "https://registry.npmjs.org/material-ui/-/material-ui-1.0.0-beta.30.tgz",
- "integrity": "sha512-cgUUYf+sXjkUQmImjngHPmwo6kBIfvZKrNxmp56cpATYyaeoBydfERwv9SfUu1zzJyXLhJ6tt17XeUBn8aSqug==",
- "requires": {
- "@types/jss": "9.3.0",
- "@types/react-transition-group": "2.0.6",
- "babel-runtime": "6.26.0",
- "brcast": "3.0.1",
- "classnames": "2.2.5",
- "deepmerge": "2.0.1",
- "dom-helpers": "3.3.1",
- "hoist-non-react-statics": "2.3.1",
- "jss": "9.5.1",
- "jss-camel-case": "6.0.0",
- "jss-default-unit": "8.0.2",
- "jss-global": "3.0.0",
- "jss-nested": "6.0.1",
- "jss-props-sort": "6.0.0",
- "jss-vendor-prefixer": "7.0.0",
- "keycode": "2.1.9",
- "lodash": "4.17.4",
- "normalize-scroll-left": "0.1.2",
- "prop-types": "15.6.0",
- "react-event-listener": "0.5.3",
- "react-jss": "8.2.1",
- "react-popper": "0.7.5",
- "react-scrollbar-size": "2.0.2",
- "react-transition-group": "2.2.1",
- "recompose": "0.26.0",
- "scroll": "2.0.1",
- "warning": "3.0.0"
- }
- },
- "material-ui-icons": {
- "version": "1.0.0-beta.17",
- "resolved": "https://registry.npmjs.org/material-ui-icons/-/material-ui-icons-1.0.0-beta.17.tgz",
- "integrity": "sha1-XxmvVKLZnu7zR6VUFKaFPhyFDcM=",
- "requires": {
- "recompose": "0.26.0"
- }
- },
- "math-expression-evaluator": {
- "version": "1.2.17",
- "resolved": "https://registry.npmjs.org/math-expression-evaluator/-/math-expression-evaluator-1.2.17.tgz",
- "integrity": "sha1-3oGf282E3M2PrlnGrreWFbnSZqw="
- },
- "md5": {
- "version": "2.2.1",
- "resolved": "https://registry.npmjs.org/md5/-/md5-2.2.1.tgz",
- "integrity": "sha1-U6s41f48iJG6RlMp6iP6wFQBJvk=",
- "requires": {
- "charenc": "0.0.2",
- "crypt": "0.0.2",
- "is-buffer": "1.1.6"
- }
- },
- "md5.js": {
- "version": "1.3.4",
- "resolved": "https://registry.npmjs.org/md5.js/-/md5.js-1.3.4.tgz",
- "integrity": "sha1-6b296UogpawYsENA/Fdk1bCdkB0=",
- "requires": {
- "hash-base": "3.0.4",
- "inherits": "2.0.3"
- },
- "dependencies": {
- "hash-base": {
- "version": "3.0.4",
- "resolved": "https://registry.npmjs.org/hash-base/-/hash-base-3.0.4.tgz",
- "integrity": "sha1-X8hoaEfs1zSZQDMZprCj8/auSRg=",
- "requires": {
- "inherits": "2.0.3",
- "safe-buffer": "5.1.1"
- }
- }
- }
- },
- "mem": {
- "version": "1.1.0",
- "resolved": "https://registry.npmjs.org/mem/-/mem-1.1.0.tgz",
- "integrity": "sha1-Xt1StIXKHZAP5kiVUFOZoN+kX3Y=",
- "requires": {
- "mimic-fn": "1.1.0"
- }
- },
- "memory-fs": {
- "version": "0.4.1",
- "resolved": "https://registry.npmjs.org/memory-fs/-/memory-fs-0.4.1.tgz",
- "integrity": "sha1-OpoguEYlI+RHz7x+i7gO1me/xVI=",
- "requires": {
- "errno": "0.1.6",
- "readable-stream": "2.3.3"
- }
- },
- "micromatch": {
- "version": "2.3.11",
- "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-2.3.11.tgz",
- "integrity": "sha1-hmd8l9FyCzY0MdBNDRUpO9OMFWU=",
- "requires": {
- "arr-diff": "2.0.0",
- "array-unique": "0.2.1",
- "braces": "1.8.5",
- "expand-brackets": "0.1.5",
- "extglob": "0.3.2",
- "filename-regex": "2.0.1",
- "is-extglob": "1.0.0",
- "is-glob": "2.0.1",
- "kind-of": "3.2.2",
- "normalize-path": "2.1.1",
- "object.omit": "2.0.1",
- "parse-glob": "3.0.4",
- "regex-cache": "0.4.4"
- }
- },
- "miller-rabin": {
- "version": "4.0.1",
- "resolved": "https://registry.npmjs.org/miller-rabin/-/miller-rabin-4.0.1.tgz",
- "integrity": "sha512-115fLhvZVqWwHPbClyntxEVfVDfl9DLLTuJvq3g2O/Oxi8AiNouAHvDSzHS0viUJc+V5vm3eq91Xwqn9dp4jRA==",
- "requires": {
- "bn.js": "4.11.8",
- "brorand": "1.1.0"
- }
- },
- "mime": {
- "version": "1.6.0",
- "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz",
- "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg=="
- },
- "mime-db": {
- "version": "1.30.0",
- "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.30.0.tgz",
- "integrity": "sha1-dMZD2i3Z1qRTmZY0ZbJtXKfXHwE="
- },
- "mime-types": {
- "version": "2.1.17",
- "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.17.tgz",
- "integrity": "sha1-Cdejk/A+mVp5+K+Fe3Cp4KsWVXo=",
- "requires": {
- "mime-db": "1.30.0"
- }
- },
- "mimic-fn": {
- "version": "1.1.0",
- "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-1.1.0.tgz",
- "integrity": "sha1-5md4PZLonb00KBi1IwudYqZyrRg="
- },
- "mimic-response": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-1.0.0.tgz",
- "integrity": "sha1-3z02Uqc/3ta5sLJBRub9BSNTRY4="
- },
- "min-document": {
- "version": "2.19.0",
- "resolved": "https://registry.npmjs.org/min-document/-/min-document-2.19.0.tgz",
- "integrity": "sha1-e9KC4/WELtKVu3SM3Z8f+iyCRoU=",
- "requires": {
- "dom-walk": "0.1.1"
- }
- },
- "minimalistic-assert": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/minimalistic-assert/-/minimalistic-assert-1.0.0.tgz",
- "integrity": "sha1-cCvi3aazf0g2vLP121ZkG2Sh09M="
- },
- "minimalistic-crypto-utils": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/minimalistic-crypto-utils/-/minimalistic-crypto-utils-1.0.1.tgz",
- "integrity": "sha1-9sAMHAsIIkblxNmd+4x8CDsrWCo="
- },
- "minimatch": {
- "version": "3.0.4",
- "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
- "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==",
- "requires": {
- "brace-expansion": "1.1.8"
- }
- },
- "minimist": {
- "version": "0.0.8",
- "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz",
- "integrity": "sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0="
- },
- "mkdirp": {
- "version": "0.5.1",
- "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz",
- "integrity": "sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=",
- "requires": {
- "minimist": "0.0.8"
- }
- },
- "ms": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
- "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g="
- },
- "mute-stream": {
- "version": "0.0.7",
- "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.7.tgz",
- "integrity": "sha1-MHXOk7whuPq0PhvE2n6BFe0ee6s="
- },
- "nan": {
- "version": "2.8.0",
- "resolved": "https://registry.npmjs.org/nan/-/nan-2.8.0.tgz",
- "integrity": "sha1-7XFfP+neArV6XmJS2QqWZ14fCFo=",
- "optional": true
- },
- "natives": {
- "version": "1.1.1",
- "resolved": "https://registry.npmjs.org/natives/-/natives-1.1.1.tgz",
- "integrity": "sha512-8eRaxn8u/4wN8tGkhlc2cgwwvOLMLUMUn4IYTexMgWd+LyUDfeXVkk2ygQR0hvIHbJQXgHujia3ieUUDwNGkEA=="
- },
- "natural-compare": {
- "version": "1.4.0",
- "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz",
- "integrity": "sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc="
- },
- "node-fetch": {
- "version": "1.7.3",
- "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-1.7.3.tgz",
- "integrity": "sha512-NhZ4CsKx7cYm2vSrBAr2PvFOe6sWDf0UYLRqA6svUYg7+/TSfVAu49jYC4BvQ4Sms9SZgdqGBgroqfDhJdTyKQ==",
- "requires": {
- "encoding": "0.1.12",
- "is-stream": "1.1.0"
- }
- },
- "node-libs-browser": {
- "version": "2.1.0",
- "resolved": "https://registry.npmjs.org/node-libs-browser/-/node-libs-browser-2.1.0.tgz",
- "integrity": "sha512-5AzFzdoIMb89hBGMZglEegffzgRg+ZFoUmisQ8HI4j1KDdpx13J0taNp2y9xPbur6W61gepGDDotGBVQ7mfUCg==",
- "requires": {
- "assert": "1.4.1",
- "browserify-zlib": "0.2.0",
- "buffer": "4.9.1",
- "console-browserify": "1.1.0",
- "constants-browserify": "1.0.0",
- "crypto-browserify": "3.12.0",
- "domain-browser": "1.1.7",
- "events": "1.1.1",
- "https-browserify": "1.0.0",
- "os-browserify": "0.3.0",
- "path-browserify": "0.0.0",
- "process": "0.11.10",
- "punycode": "1.4.1",
- "querystring-es3": "0.2.1",
- "readable-stream": "2.3.3",
- "stream-browserify": "2.0.1",
- "stream-http": "2.8.0",
- "string_decoder": "1.0.3",
- "timers-browserify": "2.0.4",
- "tty-browserify": "0.0.0",
- "url": "0.11.0",
- "util": "0.10.3",
- "vm-browserify": "0.0.4"
- },
- "dependencies": {
- "process": {
- "version": "0.11.10",
- "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz",
- "integrity": "sha1-czIwDoQBYb2j5podHZGn1LwW8YI="
- }
- }
- },
- "normalize-package-data": {
- "version": "2.4.0",
- "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.4.0.tgz",
- "integrity": "sha512-9jjUFbTPfEy3R/ad/2oNbKtW9Hgovl5O1FvFWKkKblNXoN/Oou6+9+KKohPK13Yc3/TyunyWhJp6gvRNR/PPAw==",
- "requires": {
- "hosted-git-info": "2.5.0",
- "is-builtin-module": "1.0.0",
- "semver": "5.5.0",
- "validate-npm-package-license": "3.0.1"
- }
- },
- "normalize-path": {
- "version": "2.1.1",
- "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-2.1.1.tgz",
- "integrity": "sha1-GrKLVW4Zg2Oowab35vogE3/mrtk=",
- "requires": {
- "remove-trailing-separator": "1.1.0"
- }
- },
- "normalize-range": {
- "version": "0.1.2",
- "resolved": "https://registry.npmjs.org/normalize-range/-/normalize-range-0.1.2.tgz",
- "integrity": "sha1-LRDAa9/TEuqXd2laTShDlFa3WUI="
- },
- "normalize-scroll-left": {
- "version": "0.1.2",
- "resolved": "https://registry.npmjs.org/normalize-scroll-left/-/normalize-scroll-left-0.1.2.tgz",
- "integrity": "sha512-F9YMRls0zCF6BFIE2YnXDRpHPpfd91nOIaNdDgrx5YMoPLo8Wqj+6jNXHQsYBavJeXP4ww8HCt0xQAKc5qk2Fg=="
- },
- "normalize-url": {
- "version": "1.9.1",
- "resolved": "https://registry.npmjs.org/normalize-url/-/normalize-url-1.9.1.tgz",
- "integrity": "sha1-LMDWazHqIwNkWENuNiDYWVTGbDw=",
- "requires": {
- "object-assign": "4.1.1",
- "prepend-http": "1.0.4",
- "query-string": "4.3.4",
- "sort-keys": "1.1.2"
- }
- },
- "npm-run-path": {
- "version": "2.0.2",
- "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-2.0.2.tgz",
- "integrity": "sha1-NakjLfo11wZ7TLLd8jV7GHFTbF8=",
- "requires": {
- "path-key": "2.0.1"
- }
- },
- "num2fraction": {
- "version": "1.2.2",
- "resolved": "https://registry.npmjs.org/num2fraction/-/num2fraction-1.2.2.tgz",
- "integrity": "sha1-b2gragJ6Tp3fpFZM0lidHU5mnt4="
- },
- "number-is-nan": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/number-is-nan/-/number-is-nan-1.0.1.tgz",
- "integrity": "sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0="
- },
- "oauth-sign": {
- "version": "0.8.2",
- "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.8.2.tgz",
- "integrity": "sha1-Rqarfwrq2N6unsBWV4C31O/rnUM="
- },
- "object-assign": {
- "version": "4.1.1",
- "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz",
- "integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM="
- },
- "object-hash": {
- "version": "1.2.0",
- "resolved": "https://registry.npmjs.org/object-hash/-/object-hash-1.2.0.tgz",
- "integrity": "sha512-smRWXzkvxw72VquyZ0wggySl7PFUtoDhvhpdwgESXxUrH7vVhhp9asfup1+rVLrhsl7L45Ee1Q/l5R2Ul4MwUg=="
- },
- "object-keys": {
- "version": "1.0.11",
- "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.0.11.tgz",
- "integrity": "sha1-xUYBd4rVYPEULODgG8yotW0TQm0="
- },
- "object.omit": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/object.omit/-/object.omit-2.0.1.tgz",
- "integrity": "sha1-Gpx0SCnznbuFjHbKNXmuKlTr0fo=",
- "requires": {
- "for-own": "0.1.5",
- "is-extendable": "0.1.1"
- }
- },
- "once": {
- "version": "1.4.0",
- "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
- "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=",
- "requires": {
- "wrappy": "1.0.2"
- }
- },
- "onetime": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/onetime/-/onetime-2.0.1.tgz",
- "integrity": "sha1-BnQoIw/WdEOyeUsiu6UotoZ5YtQ=",
- "requires": {
- "mimic-fn": "1.1.0"
- }
- },
- "optionator": {
- "version": "0.8.2",
- "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.8.2.tgz",
- "integrity": "sha1-NkxeQJ0/TWMB1sC0wFu6UBgK62Q=",
- "requires": {
- "deep-is": "0.1.3",
- "fast-levenshtein": "2.0.6",
- "levn": "0.3.0",
- "prelude-ls": "1.1.2",
- "type-check": "0.3.2",
- "wordwrap": "1.0.0"
- }
- },
- "os-browserify": {
- "version": "0.3.0",
- "resolved": "https://registry.npmjs.org/os-browserify/-/os-browserify-0.3.0.tgz",
- "integrity": "sha1-hUNzx/XCMVkU/Jv8a9gjj92h7Cc="
- },
- "os-homedir": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/os-homedir/-/os-homedir-1.0.2.tgz",
- "integrity": "sha1-/7xJiDNuDoM94MFox+8VISGqf7M="
- },
- "os-locale": {
- "version": "1.4.0",
- "resolved": "https://registry.npmjs.org/os-locale/-/os-locale-1.4.0.tgz",
- "integrity": "sha1-IPnxeuKe00XoveWDsT0gCYA8FNk=",
- "requires": {
- "lcid": "1.0.0"
- }
- },
- "os-tmpdir": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz",
- "integrity": "sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ="
- },
- "over": {
- "version": "0.0.5",
- "resolved": "https://registry.npmjs.org/over/-/over-0.0.5.tgz",
- "integrity": "sha1-8phS5w/X4l82DgE6jsRMgq7bVwg="
- },
- "p-cancelable": {
- "version": "0.3.0",
- "resolved": "https://registry.npmjs.org/p-cancelable/-/p-cancelable-0.3.0.tgz",
- "integrity": "sha512-RVbZPLso8+jFeq1MfNvgXtCRED2raz/dKpacfTNxsx6pLEpEomM7gah6VeHSYV3+vo0OAi4MkArtQcWWXuQoyw=="
- },
- "p-finally": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/p-finally/-/p-finally-1.0.0.tgz",
- "integrity": "sha1-P7z7FbiZpEEjs0ttzBi3JDNqLK4="
- },
- "p-limit": {
- "version": "1.2.0",
- "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-1.2.0.tgz",
- "integrity": "sha512-Y/OtIaXtUPr4/YpMv1pCL5L5ed0rumAaAeBSj12F+bSlMdys7i8oQF/GUJmfpTS/QoaRrS/k6pma29haJpsMng==",
- "requires": {
- "p-try": "1.0.0"
- }
- },
- "p-locate": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-2.0.0.tgz",
- "integrity": "sha1-IKAQOyIqcMj9OcwuWAaA893l7EM=",
- "requires": {
- "p-limit": "1.2.0"
- }
- },
- "p-timeout": {
- "version": "1.2.1",
- "resolved": "https://registry.npmjs.org/p-timeout/-/p-timeout-1.2.1.tgz",
- "integrity": "sha1-XrOzU7f86Z8QGhA4iAuwVOu+o4Y=",
- "requires": {
- "p-finally": "1.0.0"
- }
- },
- "p-try": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/p-try/-/p-try-1.0.0.tgz",
- "integrity": "sha1-y8ec26+P1CKOE/Yh8rGiN8GyB7M="
- },
- "pako": {
- "version": "1.0.6",
- "resolved": "https://registry.npmjs.org/pako/-/pako-1.0.6.tgz",
- "integrity": "sha512-lQe48YPsMJAig+yngZ87Lus+NF+3mtu7DVOBu6b/gHO1YpKwIj5AWjZ/TOS7i46HD/UixzWb1zeWDZfGZ3iYcg=="
- },
- "parse-asn1": {
- "version": "5.1.0",
- "resolved": "https://registry.npmjs.org/parse-asn1/-/parse-asn1-5.1.0.tgz",
- "integrity": "sha1-N8T5t+06tlx0gXtfJICTf7+XxxI=",
- "requires": {
- "asn1.js": "4.9.2",
- "browserify-aes": "1.1.1",
- "create-hash": "1.1.3",
- "evp_bytestokey": "1.0.3",
- "pbkdf2": "3.0.14"
- }
- },
- "parse-glob": {
- "version": "3.0.4",
- "resolved": "https://registry.npmjs.org/parse-glob/-/parse-glob-3.0.4.tgz",
- "integrity": "sha1-ssN2z7EfNVE7rdFz7wu246OIORw=",
- "requires": {
- "glob-base": "0.3.0",
- "is-dotfile": "1.0.3",
- "is-extglob": "1.0.0",
- "is-glob": "2.0.1"
- }
- },
- "parse-json": {
- "version": "2.2.0",
- "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-2.2.0.tgz",
- "integrity": "sha1-9ID0BDTvgHQfhGkJn43qGPVaTck=",
- "requires": {
- "error-ex": "1.3.1"
- }
- },
- "path": {
- "version": "0.12.7",
- "resolved": "https://registry.npmjs.org/path/-/path-0.12.7.tgz",
- "integrity": "sha1-1NwqUGxM4hl+tIHr/NWzbAFAsQ8=",
- "requires": {
- "process": "0.11.10",
- "util": "0.10.3"
- },
- "dependencies": {
- "process": {
- "version": "0.11.10",
- "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz",
- "integrity": "sha1-czIwDoQBYb2j5podHZGn1LwW8YI="
- }
- }
- },
- "path-browserify": {
- "version": "0.0.0",
- "resolved": "https://registry.npmjs.org/path-browserify/-/path-browserify-0.0.0.tgz",
- "integrity": "sha1-oLhwcpquIUAFt9UDLsLLuw+0RRo="
- },
- "path-exists": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz",
- "integrity": "sha1-zg6+ql94yxiSXqfYENe1mwEP1RU="
- },
- "path-is-absolute": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz",
- "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18="
- },
- "path-is-inside": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/path-is-inside/-/path-is-inside-1.0.2.tgz",
- "integrity": "sha1-NlQX3t5EQw0cEa9hAn+s8HS9/FM="
- },
- "path-key": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz",
- "integrity": "sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A="
- },
- "path-parse": {
- "version": "1.0.5",
- "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.5.tgz",
- "integrity": "sha1-PBrfhx6pzWyUMbbqK9dKD/BVxME="
- },
- "path-type": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/path-type/-/path-type-2.0.0.tgz",
- "integrity": "sha1-8BLMuEFbcJb8LaoQVMPXI4lZTHM=",
- "requires": {
- "pify": "2.3.0"
- },
- "dependencies": {
- "pify": {
- "version": "2.3.0",
- "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz",
- "integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw="
- }
- }
- },
- "pbkdf2": {
- "version": "3.0.14",
- "resolved": "https://registry.npmjs.org/pbkdf2/-/pbkdf2-3.0.14.tgz",
- "integrity": "sha512-gjsZW9O34fm0R7PaLHRJmLLVfSoesxztjPjE9o6R+qtVJij90ltg1joIovN9GKrRW3t1PzhDDG3UMEMFfZ+1wA==",
- "requires": {
- "create-hash": "1.1.3",
- "create-hmac": "1.1.6",
- "ripemd160": "2.0.1",
- "safe-buffer": "5.1.1",
- "sha.js": "2.4.10"
- }
- },
- "performance-now": {
- "version": "2.1.0",
- "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz",
- "integrity": "sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns="
- },
- "pify": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz",
- "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY="
- },
- "pinkie": {
- "version": "2.0.4",
- "resolved": "https://registry.npmjs.org/pinkie/-/pinkie-2.0.4.tgz",
- "integrity": "sha1-clVrgM+g1IqXToDnckjoDtT3+HA="
- },
- "pinkie-promise": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/pinkie-promise/-/pinkie-promise-2.0.1.tgz",
- "integrity": "sha1-ITXW36ejWMBprJsXh3YogihFD/o=",
- "requires": {
- "pinkie": "2.0.4"
- }
- },
- "pkg-dir": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-2.0.0.tgz",
- "integrity": "sha1-9tXREJ4Z1j7fQo4L1X4Sd3YVM0s=",
- "requires": {
- "find-up": "2.1.0"
- }
- },
- "pluralize": {
- "version": "7.0.0",
- "resolved": "https://registry.npmjs.org/pluralize/-/pluralize-7.0.0.tgz",
- "integrity": "sha512-ARhBOdzS3e41FbkW/XWrTEtukqqLoK5+Z/4UeDaLuSW+39JPeFgs4gCGqsrJHVZX0fUrx//4OF0K1CUGwlIFow=="
- },
- "popper.js": {
- "version": "1.12.9",
- "resolved": "https://registry.npmjs.org/popper.js/-/popper.js-1.12.9.tgz",
- "integrity": "sha1-DfvC3/lsRRuzMu3Pz6r1ZtMx1bM="
- },
- "postcss": {
- "version": "5.2.18",
- "resolved": "https://registry.npmjs.org/postcss/-/postcss-5.2.18.tgz",
- "integrity": "sha512-zrUjRRe1bpXKsX1qAJNJjqZViErVuyEkMTRrwu4ud4sbTtIBRmtaYDrHmcGgmrbsW3MHfmtIf+vJumgQn+PrXg==",
- "requires": {
- "chalk": "1.1.3",
- "js-base64": "2.4.1",
- "source-map": "0.5.7",
- "supports-color": "3.2.3"
- },
- "dependencies": {
- "has-flag": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-1.0.0.tgz",
- "integrity": "sha1-nZ55MWXOAXoA8AQYxD+UKnsdEfo="
- },
- "supports-color": {
- "version": "3.2.3",
- "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-3.2.3.tgz",
- "integrity": "sha1-ZawFBLOVQXHYpklGsq48u4pfVPY=",
- "requires": {
- "has-flag": "1.0.0"
- }
- }
- }
- },
- "postcss-calc": {
- "version": "5.3.1",
- "resolved": "https://registry.npmjs.org/postcss-calc/-/postcss-calc-5.3.1.tgz",
- "integrity": "sha1-d7rnypKK2FcW4v2kLyYb98HWW14=",
- "requires": {
- "postcss": "5.2.18",
- "postcss-message-helpers": "2.0.0",
- "reduce-css-calc": "1.3.0"
- }
- },
- "postcss-colormin": {
- "version": "2.2.2",
- "resolved": "https://registry.npmjs.org/postcss-colormin/-/postcss-colormin-2.2.2.tgz",
- "integrity": "sha1-ZjFBfV8OkJo9fsJrJMio0eT5bks=",
- "requires": {
- "colormin": "1.1.2",
- "postcss": "5.2.18",
- "postcss-value-parser": "3.3.0"
- }
- },
- "postcss-convert-values": {
- "version": "2.6.1",
- "resolved": "https://registry.npmjs.org/postcss-convert-values/-/postcss-convert-values-2.6.1.tgz",
- "integrity": "sha1-u9hZPFwf0uPRwyK7kl3K6Nrk1i0=",
- "requires": {
- "postcss": "5.2.18",
- "postcss-value-parser": "3.3.0"
- }
- },
- "postcss-discard-comments": {
- "version": "2.0.4",
- "resolved": "https://registry.npmjs.org/postcss-discard-comments/-/postcss-discard-comments-2.0.4.tgz",
- "integrity": "sha1-vv6J+v1bPazlzM5Rt2uBUUvgDj0=",
- "requires": {
- "postcss": "5.2.18"
- }
- },
- "postcss-discard-duplicates": {
- "version": "2.1.0",
- "resolved": "https://registry.npmjs.org/postcss-discard-duplicates/-/postcss-discard-duplicates-2.1.0.tgz",
- "integrity": "sha1-uavye4isGIFYpesSq8riAmO5GTI=",
- "requires": {
- "postcss": "5.2.18"
- }
- },
- "postcss-discard-empty": {
- "version": "2.1.0",
- "resolved": "https://registry.npmjs.org/postcss-discard-empty/-/postcss-discard-empty-2.1.0.tgz",
- "integrity": "sha1-0rS9nVztXr2Nyt52QMfXzX9PkrU=",
- "requires": {
- "postcss": "5.2.18"
- }
- },
- "postcss-discard-overridden": {
- "version": "0.1.1",
- "resolved": "https://registry.npmjs.org/postcss-discard-overridden/-/postcss-discard-overridden-0.1.1.tgz",
- "integrity": "sha1-ix6vVU9ob7KIzYdMVWZ7CqNmjVg=",
- "requires": {
- "postcss": "5.2.18"
- }
- },
- "postcss-discard-unused": {
- "version": "2.2.3",
- "resolved": "https://registry.npmjs.org/postcss-discard-unused/-/postcss-discard-unused-2.2.3.tgz",
- "integrity": "sha1-vOMLLMWR/8Y0Mitfs0ZLbZNPRDM=",
- "requires": {
- "postcss": "5.2.18",
- "uniqs": "2.0.0"
- }
- },
- "postcss-filter-plugins": {
- "version": "2.0.2",
- "resolved": "https://registry.npmjs.org/postcss-filter-plugins/-/postcss-filter-plugins-2.0.2.tgz",
- "integrity": "sha1-bYWGJTTXNaxCDkqFgG4fXUKG2Ew=",
- "requires": {
- "postcss": "5.2.18",
- "uniqid": "4.1.1"
- }
- },
- "postcss-merge-idents": {
- "version": "2.1.7",
- "resolved": "https://registry.npmjs.org/postcss-merge-idents/-/postcss-merge-idents-2.1.7.tgz",
- "integrity": "sha1-TFUwMTwI4dWzu/PSu8dH4njuonA=",
- "requires": {
- "has": "1.0.1",
- "postcss": "5.2.18",
- "postcss-value-parser": "3.3.0"
- }
- },
- "postcss-merge-longhand": {
- "version": "2.0.2",
- "resolved": "https://registry.npmjs.org/postcss-merge-longhand/-/postcss-merge-longhand-2.0.2.tgz",
- "integrity": "sha1-I9kM0Sewp3mUkVMyc5A0oaTz1lg=",
- "requires": {
- "postcss": "5.2.18"
- }
- },
- "postcss-merge-rules": {
- "version": "2.1.2",
- "resolved": "https://registry.npmjs.org/postcss-merge-rules/-/postcss-merge-rules-2.1.2.tgz",
- "integrity": "sha1-0d9d+qexrMO+VT8OnhDofGG19yE=",
- "requires": {
- "browserslist": "1.7.7",
- "caniuse-api": "1.6.1",
- "postcss": "5.2.18",
- "postcss-selector-parser": "2.2.3",
- "vendors": "1.0.1"
- },
- "dependencies": {
- "browserslist": {
- "version": "1.7.7",
- "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-1.7.7.tgz",
- "integrity": "sha1-C9dnBCWL6CmyOYu1Dkti0aFmsLk=",
- "requires": {
- "caniuse-db": "1.0.30000793",
- "electron-to-chromium": "1.3.31"
- }
- }
- }
- },
- "postcss-message-helpers": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/postcss-message-helpers/-/postcss-message-helpers-2.0.0.tgz",
- "integrity": "sha1-pPL0+rbk/gAvCu0ABHjN9S+bpg4="
- },
- "postcss-minify-font-values": {
- "version": "1.0.5",
- "resolved": "https://registry.npmjs.org/postcss-minify-font-values/-/postcss-minify-font-values-1.0.5.tgz",
- "integrity": "sha1-S1jttWZB66fIR0qzUmyv17vey2k=",
- "requires": {
- "object-assign": "4.1.1",
- "postcss": "5.2.18",
- "postcss-value-parser": "3.3.0"
- }
- },
- "postcss-minify-gradients": {
- "version": "1.0.5",
- "resolved": "https://registry.npmjs.org/postcss-minify-gradients/-/postcss-minify-gradients-1.0.5.tgz",
- "integrity": "sha1-Xb2hE3NwP4PPtKPqOIHY11/15uE=",
- "requires": {
- "postcss": "5.2.18",
- "postcss-value-parser": "3.3.0"
- }
- },
- "postcss-minify-params": {
- "version": "1.2.2",
- "resolved": "https://registry.npmjs.org/postcss-minify-params/-/postcss-minify-params-1.2.2.tgz",
- "integrity": "sha1-rSzgcTc7lDs9kwo/pZo1jCjW8fM=",
- "requires": {
- "alphanum-sort": "1.0.2",
- "postcss": "5.2.18",
- "postcss-value-parser": "3.3.0",
- "uniqs": "2.0.0"
- }
- },
- "postcss-minify-selectors": {
- "version": "2.1.1",
- "resolved": "https://registry.npmjs.org/postcss-minify-selectors/-/postcss-minify-selectors-2.1.1.tgz",
- "integrity": "sha1-ssapjAByz5G5MtGkllCBFDEXNb8=",
- "requires": {
- "alphanum-sort": "1.0.2",
- "has": "1.0.1",
- "postcss": "5.2.18",
- "postcss-selector-parser": "2.2.3"
- }
- },
- "postcss-modules-extract-imports": {
- "version": "1.2.0",
- "resolved": "https://registry.npmjs.org/postcss-modules-extract-imports/-/postcss-modules-extract-imports-1.2.0.tgz",
- "integrity": "sha1-ZhQOzs447wa/DT41XWm/WdFB6oU=",
- "requires": {
- "postcss": "6.0.16"
- },
- "dependencies": {
- "ansi-styles": {
- "version": "3.2.0",
- "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.0.tgz",
- "integrity": "sha512-NnSOmMEYtVR2JVMIGTzynRkkaxtiq1xnFBcdQD/DnNCYPoEPsVJhM98BDyaoNOQIi7p4okdi3E27eN7GQbsUug==",
- "requires": {
- "color-convert": "1.9.1"
- }
- },
- "chalk": {
- "version": "2.3.0",
- "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.3.0.tgz",
- "integrity": "sha512-Az5zJR2CBujap2rqXGaJKaPHyJ0IrUimvYNX+ncCy8PJP4ltOGTrHUIo097ZaL2zMeKYpiCdqDvS6zdrTFok3Q==",
- "requires": {
- "ansi-styles": "3.2.0",
- "escape-string-regexp": "1.0.5",
- "supports-color": "4.5.0"
- },
- "dependencies": {
- "supports-color": {
- "version": "4.5.0",
- "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-4.5.0.tgz",
- "integrity": "sha1-vnoN5ITexcXN34s9WRJQRJEvY1s=",
- "requires": {
- "has-flag": "2.0.0"
- }
- }
- }
- },
- "postcss": {
- "version": "6.0.16",
- "resolved": "https://registry.npmjs.org/postcss/-/postcss-6.0.16.tgz",
- "integrity": "sha512-m758RWPmSjFH/2MyyG3UOW1fgYbR9rtdzz5UNJnlm7OLtu4B2h9C6gi+bE4qFKghsBRFfZT8NzoQBs6JhLotoA==",
- "requires": {
- "chalk": "2.3.0",
- "source-map": "0.6.1",
- "supports-color": "5.1.0"
- }
- },
- "source-map": {
- "version": "0.6.1",
- "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz",
- "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g=="
- },
- "supports-color": {
- "version": "5.1.0",
- "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.1.0.tgz",
- "integrity": "sha512-Ry0AwkoKjDpVKK4sV4h6o3UJmNRbjYm2uXhwfj3J56lMVdvnUNqzQVRztOOMGQ++w1K/TjNDFvpJk0F/LoeBCQ==",
- "requires": {
- "has-flag": "2.0.0"
- }
- }
- }
- },
- "postcss-modules-local-by-default": {
- "version": "1.2.0",
- "resolved": "https://registry.npmjs.org/postcss-modules-local-by-default/-/postcss-modules-local-by-default-1.2.0.tgz",
- "integrity": "sha1-99gMOYxaOT+nlkRmvRlQCn1hwGk=",
- "requires": {
- "css-selector-tokenizer": "0.7.0",
- "postcss": "6.0.16"
- },
- "dependencies": {
- "ansi-styles": {
- "version": "3.2.0",
- "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.0.tgz",
- "integrity": "sha512-NnSOmMEYtVR2JVMIGTzynRkkaxtiq1xnFBcdQD/DnNCYPoEPsVJhM98BDyaoNOQIi7p4okdi3E27eN7GQbsUug==",
- "requires": {
- "color-convert": "1.9.1"
- }
- },
- "chalk": {
- "version": "2.3.0",
- "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.3.0.tgz",
- "integrity": "sha512-Az5zJR2CBujap2rqXGaJKaPHyJ0IrUimvYNX+ncCy8PJP4ltOGTrHUIo097ZaL2zMeKYpiCdqDvS6zdrTFok3Q==",
- "requires": {
- "ansi-styles": "3.2.0",
- "escape-string-regexp": "1.0.5",
- "supports-color": "4.5.0"
- },
- "dependencies": {
- "supports-color": {
- "version": "4.5.0",
- "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-4.5.0.tgz",
- "integrity": "sha1-vnoN5ITexcXN34s9WRJQRJEvY1s=",
- "requires": {
- "has-flag": "2.0.0"
- }
- }
- }
- },
- "postcss": {
- "version": "6.0.16",
- "resolved": "https://registry.npmjs.org/postcss/-/postcss-6.0.16.tgz",
- "integrity": "sha512-m758RWPmSjFH/2MyyG3UOW1fgYbR9rtdzz5UNJnlm7OLtu4B2h9C6gi+bE4qFKghsBRFfZT8NzoQBs6JhLotoA==",
- "requires": {
- "chalk": "2.3.0",
- "source-map": "0.6.1",
- "supports-color": "5.1.0"
- }
- },
- "source-map": {
- "version": "0.6.1",
- "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz",
- "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g=="
- },
- "supports-color": {
- "version": "5.1.0",
- "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.1.0.tgz",
- "integrity": "sha512-Ry0AwkoKjDpVKK4sV4h6o3UJmNRbjYm2uXhwfj3J56lMVdvnUNqzQVRztOOMGQ++w1K/TjNDFvpJk0F/LoeBCQ==",
- "requires": {
- "has-flag": "2.0.0"
- }
- }
- }
- },
- "postcss-modules-scope": {
- "version": "1.1.0",
- "resolved": "https://registry.npmjs.org/postcss-modules-scope/-/postcss-modules-scope-1.1.0.tgz",
- "integrity": "sha1-1upkmUx5+XtipytCb75gVqGUu5A=",
- "requires": {
- "css-selector-tokenizer": "0.7.0",
- "postcss": "6.0.16"
- },
- "dependencies": {
- "ansi-styles": {
- "version": "3.2.0",
- "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.0.tgz",
- "integrity": "sha512-NnSOmMEYtVR2JVMIGTzynRkkaxtiq1xnFBcdQD/DnNCYPoEPsVJhM98BDyaoNOQIi7p4okdi3E27eN7GQbsUug==",
- "requires": {
- "color-convert": "1.9.1"
- }
- },
- "chalk": {
- "version": "2.3.0",
- "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.3.0.tgz",
- "integrity": "sha512-Az5zJR2CBujap2rqXGaJKaPHyJ0IrUimvYNX+ncCy8PJP4ltOGTrHUIo097ZaL2zMeKYpiCdqDvS6zdrTFok3Q==",
- "requires": {
- "ansi-styles": "3.2.0",
- "escape-string-regexp": "1.0.5",
- "supports-color": "4.5.0"
- },
- "dependencies": {
- "supports-color": {
- "version": "4.5.0",
- "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-4.5.0.tgz",
- "integrity": "sha1-vnoN5ITexcXN34s9WRJQRJEvY1s=",
- "requires": {
- "has-flag": "2.0.0"
- }
- }
- }
- },
- "postcss": {
- "version": "6.0.16",
- "resolved": "https://registry.npmjs.org/postcss/-/postcss-6.0.16.tgz",
- "integrity": "sha512-m758RWPmSjFH/2MyyG3UOW1fgYbR9rtdzz5UNJnlm7OLtu4B2h9C6gi+bE4qFKghsBRFfZT8NzoQBs6JhLotoA==",
- "requires": {
- "chalk": "2.3.0",
- "source-map": "0.6.1",
- "supports-color": "5.1.0"
- }
- },
- "source-map": {
- "version": "0.6.1",
- "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz",
- "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g=="
- },
- "supports-color": {
- "version": "5.1.0",
- "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.1.0.tgz",
- "integrity": "sha512-Ry0AwkoKjDpVKK4sV4h6o3UJmNRbjYm2uXhwfj3J56lMVdvnUNqzQVRztOOMGQ++w1K/TjNDFvpJk0F/LoeBCQ==",
- "requires": {
- "has-flag": "2.0.0"
- }
- }
- }
- },
- "postcss-modules-values": {
- "version": "1.3.0",
- "resolved": "https://registry.npmjs.org/postcss-modules-values/-/postcss-modules-values-1.3.0.tgz",
- "integrity": "sha1-7P+p1+GSUYOJ9CrQ6D9yrsRW6iA=",
- "requires": {
- "icss-replace-symbols": "1.1.0",
- "postcss": "6.0.16"
- },
- "dependencies": {
- "ansi-styles": {
- "version": "3.2.0",
- "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.0.tgz",
- "integrity": "sha512-NnSOmMEYtVR2JVMIGTzynRkkaxtiq1xnFBcdQD/DnNCYPoEPsVJhM98BDyaoNOQIi7p4okdi3E27eN7GQbsUug==",
- "requires": {
- "color-convert": "1.9.1"
- }
- },
- "chalk": {
- "version": "2.3.0",
- "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.3.0.tgz",
- "integrity": "sha512-Az5zJR2CBujap2rqXGaJKaPHyJ0IrUimvYNX+ncCy8PJP4ltOGTrHUIo097ZaL2zMeKYpiCdqDvS6zdrTFok3Q==",
- "requires": {
- "ansi-styles": "3.2.0",
- "escape-string-regexp": "1.0.5",
- "supports-color": "4.5.0"
- },
- "dependencies": {
- "supports-color": {
- "version": "4.5.0",
- "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-4.5.0.tgz",
- "integrity": "sha1-vnoN5ITexcXN34s9WRJQRJEvY1s=",
- "requires": {
- "has-flag": "2.0.0"
- }
- }
- }
- },
- "postcss": {
- "version": "6.0.16",
- "resolved": "https://registry.npmjs.org/postcss/-/postcss-6.0.16.tgz",
- "integrity": "sha512-m758RWPmSjFH/2MyyG3UOW1fgYbR9rtdzz5UNJnlm7OLtu4B2h9C6gi+bE4qFKghsBRFfZT8NzoQBs6JhLotoA==",
- "requires": {
- "chalk": "2.3.0",
- "source-map": "0.6.1",
- "supports-color": "5.1.0"
- }
- },
- "source-map": {
- "version": "0.6.1",
- "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz",
- "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g=="
- },
- "supports-color": {
- "version": "5.1.0",
- "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.1.0.tgz",
- "integrity": "sha512-Ry0AwkoKjDpVKK4sV4h6o3UJmNRbjYm2uXhwfj3J56lMVdvnUNqzQVRztOOMGQ++w1K/TjNDFvpJk0F/LoeBCQ==",
- "requires": {
- "has-flag": "2.0.0"
- }
- }
- }
- },
- "postcss-normalize-charset": {
- "version": "1.1.1",
- "resolved": "https://registry.npmjs.org/postcss-normalize-charset/-/postcss-normalize-charset-1.1.1.tgz",
- "integrity": "sha1-757nEhLX/nWceO0WL2HtYrXLk/E=",
- "requires": {
- "postcss": "5.2.18"
- }
- },
- "postcss-normalize-url": {
- "version": "3.0.8",
- "resolved": "https://registry.npmjs.org/postcss-normalize-url/-/postcss-normalize-url-3.0.8.tgz",
- "integrity": "sha1-EI90s/L82viRov+j6kWSJ5/HgiI=",
- "requires": {
- "is-absolute-url": "2.1.0",
- "normalize-url": "1.9.1",
- "postcss": "5.2.18",
- "postcss-value-parser": "3.3.0"
- }
- },
- "postcss-ordered-values": {
- "version": "2.2.3",
- "resolved": "https://registry.npmjs.org/postcss-ordered-values/-/postcss-ordered-values-2.2.3.tgz",
- "integrity": "sha1-7sbCpntsQSqNsgQud/6NpD+VwR0=",
- "requires": {
- "postcss": "5.2.18",
- "postcss-value-parser": "3.3.0"
- }
- },
- "postcss-reduce-idents": {
- "version": "2.4.0",
- "resolved": "https://registry.npmjs.org/postcss-reduce-idents/-/postcss-reduce-idents-2.4.0.tgz",
- "integrity": "sha1-wsbSDMlYKE9qv75j92Cb9AkFmtM=",
- "requires": {
- "postcss": "5.2.18",
- "postcss-value-parser": "3.3.0"
- }
- },
- "postcss-reduce-initial": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/postcss-reduce-initial/-/postcss-reduce-initial-1.0.1.tgz",
- "integrity": "sha1-aPgGlfBF0IJjqHmtJA343WT2ROo=",
- "requires": {
- "postcss": "5.2.18"
- }
- },
- "postcss-reduce-transforms": {
- "version": "1.0.4",
- "resolved": "https://registry.npmjs.org/postcss-reduce-transforms/-/postcss-reduce-transforms-1.0.4.tgz",
- "integrity": "sha1-/3b02CEkN7McKYpC0uFEQCV3GuE=",
- "requires": {
- "has": "1.0.1",
- "postcss": "5.2.18",
- "postcss-value-parser": "3.3.0"
- }
- },
- "postcss-selector-parser": {
- "version": "2.2.3",
- "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-2.2.3.tgz",
- "integrity": "sha1-+UN3iGBsPJrO4W/+jYsWKX8nu5A=",
- "requires": {
- "flatten": "1.0.2",
- "indexes-of": "1.0.1",
- "uniq": "1.0.1"
- }
- },
- "postcss-svgo": {
- "version": "2.1.6",
- "resolved": "https://registry.npmjs.org/postcss-svgo/-/postcss-svgo-2.1.6.tgz",
- "integrity": "sha1-tt8YqmE7Zm4TPwittSGcJoSsEI0=",
- "requires": {
- "is-svg": "2.1.0",
- "postcss": "5.2.18",
- "postcss-value-parser": "3.3.0",
- "svgo": "0.7.2"
- }
- },
- "postcss-unique-selectors": {
- "version": "2.0.2",
- "resolved": "https://registry.npmjs.org/postcss-unique-selectors/-/postcss-unique-selectors-2.0.2.tgz",
- "integrity": "sha1-mB1X0p3csz57Hf4f1DuGSfkzyh0=",
- "requires": {
- "alphanum-sort": "1.0.2",
- "postcss": "5.2.18",
- "uniqs": "2.0.0"
- }
- },
- "postcss-value-parser": {
- "version": "3.3.0",
- "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-3.3.0.tgz",
- "integrity": "sha1-h/OPnxj3dKSrTIojL1xc6IcqnRU="
- },
- "postcss-zindex": {
- "version": "2.2.0",
- "resolved": "https://registry.npmjs.org/postcss-zindex/-/postcss-zindex-2.2.0.tgz",
- "integrity": "sha1-0hCd3AVbka9n/EyzsCWUZjnSryI=",
- "requires": {
- "has": "1.0.1",
- "postcss": "5.2.18",
- "uniqs": "2.0.0"
- }
- },
- "prelude-ls": {
- "version": "1.1.2",
- "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.1.2.tgz",
- "integrity": "sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ="
- },
- "prepend-http": {
- "version": "1.0.4",
- "resolved": "https://registry.npmjs.org/prepend-http/-/prepend-http-1.0.4.tgz",
- "integrity": "sha1-1PRWKwzjaW5BrFLQ4ALlemNdxtw="
- },
- "preserve": {
- "version": "0.2.0",
- "resolved": "https://registry.npmjs.org/preserve/-/preserve-0.2.0.tgz",
- "integrity": "sha1-gV7R9uvGWSb4ZbMQwHE7yzMVzks="
- },
- "private": {
- "version": "0.1.8",
- "resolved": "https://registry.npmjs.org/private/-/private-0.1.8.tgz",
- "integrity": "sha512-VvivMrbvd2nKkiG38qjULzlc+4Vx4wm/whI9pQD35YrARNnhxeiRktSOhSukRLFNlzg6Br/cJPet5J/u19r/mg=="
- },
- "process": {
- "version": "0.5.2",
- "resolved": "https://registry.npmjs.org/process/-/process-0.5.2.tgz",
- "integrity": "sha1-FjjYqONML0QKkduVq5rrZ3/Bhc8="
- },
- "process-nextick-args": {
- "version": "1.0.7",
- "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-1.0.7.tgz",
- "integrity": "sha1-FQ4gt1ZZCtP5EJPyWk8q2L/zC6M="
- },
- "progress": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/progress/-/progress-2.0.0.tgz",
- "integrity": "sha1-ihvjZr+Pwj2yvSPxDG/pILQ4nR8="
- },
- "promise": {
- "version": "7.3.1",
- "resolved": "https://registry.npmjs.org/promise/-/promise-7.3.1.tgz",
- "integrity": "sha512-nolQXZ/4L+bP/UGlkfaIujX9BKxGwmQ9OT4mOt5yvy8iK1h3wqTEJCijzGANTCCl9nWjY41juyAn2K3Q1hLLTg==",
- "requires": {
- "asap": "2.0.6"
- }
- },
- "prop-types": {
- "version": "15.6.0",
- "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.6.0.tgz",
- "integrity": "sha1-zq8IMCL8RrSjX2nhPvda7Q1jmFY=",
- "requires": {
- "fbjs": "0.8.16",
- "loose-envify": "1.3.1",
- "object-assign": "4.1.1"
- }
- },
- "prr": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/prr/-/prr-1.0.1.tgz",
- "integrity": "sha1-0/wRS6BplaRexok/SEzrHXj19HY="
- },
- "pseudomap": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/pseudomap/-/pseudomap-1.0.2.tgz",
- "integrity": "sha1-8FKijacOYYkX7wqKw0wa5aaChrM="
- },
- "public-encrypt": {
- "version": "4.0.0",
- "resolved": "https://registry.npmjs.org/public-encrypt/-/public-encrypt-4.0.0.tgz",
- "integrity": "sha1-OfaZ86RlYN1eusvKaTyvfGXBjMY=",
- "requires": {
- "bn.js": "4.11.8",
- "browserify-rsa": "4.0.1",
- "create-hash": "1.1.3",
- "parse-asn1": "5.1.0",
- "randombytes": "2.0.6"
- }
- },
- "pullstream": {
- "version": "0.4.1",
- "resolved": "https://registry.npmjs.org/pullstream/-/pullstream-0.4.1.tgz",
- "integrity": "sha1-1vs79a7Wl+gxFQ6xACwlo/iuExQ=",
- "requires": {
- "over": "0.0.5",
- "readable-stream": "1.0.34",
- "setimmediate": "1.0.5",
- "slice-stream": "1.0.0"
- },
- "dependencies": {
- "isarray": {
- "version": "0.0.1",
- "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz",
- "integrity": "sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8="
- },
- "readable-stream": {
- "version": "1.0.34",
- "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.34.tgz",
- "integrity": "sha1-Elgg40vIQtLyqq+v5MKRbuMsFXw=",
- "requires": {
- "core-util-is": "1.0.2",
- "inherits": "2.0.3",
- "isarray": "0.0.1",
- "string_decoder": "0.10.31"
- }
- },
- "string_decoder": {
- "version": "0.10.31",
- "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz",
- "integrity": "sha1-YuIDvEF2bGwoyfyEMB2rHFMQ+pQ="
- }
- }
- },
- "punycode": {
- "version": "1.4.1",
- "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz",
- "integrity": "sha1-wNWmOycYgArY4esPpSachN1BhF4="
- },
- "q": {
- "version": "1.5.1",
- "resolved": "https://registry.npmjs.org/q/-/q-1.5.1.tgz",
- "integrity": "sha1-fjL3W0E4EpHQRhHxvxQQmsAGUdc="
- },
- "qs": {
- "version": "6.5.1",
- "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.1.tgz",
- "integrity": "sha512-eRzhrN1WSINYCDCbrz796z37LOe3m5tmW7RQf6oBntukAG1nmovJvhnwHHRMAfeoItc1m2Hk02WER2aQ/iqs+A=="
- },
- "query-string": {
- "version": "4.3.4",
- "resolved": "https://registry.npmjs.org/query-string/-/query-string-4.3.4.tgz",
- "integrity": "sha1-u7aTucqRXCMlFbIosaArYJBD2+s=",
- "requires": {
- "object-assign": "4.1.1",
- "strict-uri-encode": "1.1.0"
- }
- },
- "querystring": {
- "version": "0.2.0",
- "resolved": "https://registry.npmjs.org/querystring/-/querystring-0.2.0.tgz",
- "integrity": "sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA="
- },
- "querystring-es3": {
- "version": "0.2.1",
- "resolved": "https://registry.npmjs.org/querystring-es3/-/querystring-es3-0.2.1.tgz",
- "integrity": "sha1-nsYfeQSYdXB9aUFFlv2Qek1xHnM="
- },
- "raf": {
- "version": "3.4.0",
- "resolved": "https://registry.npmjs.org/raf/-/raf-3.4.0.tgz",
- "integrity": "sha512-pDP/NMRAXoTfrhCfyfSEwJAKLaxBU9eApMeBPB1TkDouZmvPerIClV8lTAd+uF8ZiTaVl69e1FCxQrAd/VTjGw==",
- "requires": {
- "performance-now": "2.1.0"
- }
- },
- "rafl": {
- "version": "1.2.2",
- "resolved": "https://registry.npmjs.org/rafl/-/rafl-1.2.2.tgz",
- "integrity": "sha1-/pMPdYIRAg1H44gV9Rlqi+QVB0A=",
- "requires": {
- "global": "4.3.2"
- }
- },
- "randomatic": {
- "version": "1.1.7",
- "resolved": "https://registry.npmjs.org/randomatic/-/randomatic-1.1.7.tgz",
- "integrity": "sha512-D5JUjPyJbaJDkuAazpVnSfVkLlpeO3wDlPROTMLGKG1zMFNFRgrciKo1ltz/AzNTkqE0HzDx655QOL51N06how==",
- "requires": {
- "is-number": "3.0.0",
- "kind-of": "4.0.0"
- },
- "dependencies": {
- "is-number": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz",
- "integrity": "sha1-JP1iAaR4LPUFYcgQJ2r8fRLXEZU=",
- "requires": {
- "kind-of": "3.2.2"
- },
- "dependencies": {
- "kind-of": {
- "version": "3.2.2",
- "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz",
- "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=",
- "requires": {
- "is-buffer": "1.1.6"
- }
- }
- }
- },
- "kind-of": {
- "version": "4.0.0",
- "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-4.0.0.tgz",
- "integrity": "sha1-IIE989cSkosgc3hpGkUGb65y3Vc=",
- "requires": {
- "is-buffer": "1.1.6"
- }
- }
- }
- },
- "randombytes": {
- "version": "2.0.6",
- "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.0.6.tgz",
- "integrity": "sha512-CIQ5OFxf4Jou6uOKe9t1AOgqpeU5fd70A8NPdHSGeYXqXsPe6peOwI0cUl88RWZ6sP1vPMV3avd/R6cZ5/sP1A==",
- "requires": {
- "safe-buffer": "5.1.1"
- }
- },
- "randomfill": {
- "version": "1.0.3",
- "resolved": "https://registry.npmjs.org/randomfill/-/randomfill-1.0.3.tgz",
- "integrity": "sha512-YL6GrhrWoic0Eq8rXVbMptH7dAxCs0J+mh5Y0euNekPPYaxEmdVGim6GdoxoRzKW2yJoU8tueifS7mYxvcFDEQ==",
- "requires": {
- "randombytes": "2.0.6",
- "safe-buffer": "5.1.1"
- }
- },
- "react": {
- "version": "16.2.0",
- "resolved": "https://registry.npmjs.org/react/-/react-16.2.0.tgz",
- "integrity": "sha512-ZmIomM7EE1DvPEnSFAHZn9Vs9zJl5A9H7el0EGTE6ZbW9FKe/14IYAlPbC8iH25YarEQxZL+E8VW7Mi7kfQrDQ==",
- "requires": {
- "fbjs": "0.8.16",
- "loose-envify": "1.3.1",
- "object-assign": "4.1.1",
- "prop-types": "15.6.0"
- }
- },
- "react-dom": {
- "version": "16.2.0",
- "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-16.2.0.tgz",
- "integrity": "sha512-zpGAdwHVn9K0091d+hr+R0qrjoJ84cIBFL2uU60KvWBPfZ7LPSrfqviTxGHWN0sjPZb2hxWzMexwrvJdKePvjg==",
- "requires": {
- "fbjs": "0.8.16",
- "loose-envify": "1.3.1",
- "object-assign": "4.1.1",
- "prop-types": "15.6.0"
- }
- },
- "react-event-listener": {
- "version": "0.5.3",
- "resolved": "https://registry.npmjs.org/react-event-listener/-/react-event-listener-0.5.3.tgz",
- "integrity": "sha512-fTGYvhe7eTsqq0m664Km0rxKQcqLIGZWZINmy1LU0fu312tay8Mt3Twq2P5Xj1dfDVvvzT1Ql3/FDkiMPJ1MOg==",
- "requires": {
- "babel-runtime": "6.26.0",
- "fbjs": "0.8.16",
- "prop-types": "15.6.0",
- "warning": "3.0.0"
- }
- },
- "react-fa": {
- "version": "5.0.0",
- "resolved": "https://registry.npmjs.org/react-fa/-/react-fa-5.0.0.tgz",
- "integrity": "sha512-pBEJigNkDJPAP/P9mQXT55VbJbbtwqi4ayieXuFvGpd+gl3aZ9IbjjVKJihdhdysJP0XRgrSa3sT3yOmkQi8wQ==",
- "requires": {
- "font-awesome": "4.7.0",
- "prop-types": "15.6.0"
- }
- },
- "react-jss": {
- "version": "8.2.1",
- "resolved": "https://registry.npmjs.org/react-jss/-/react-jss-8.2.1.tgz",
- "integrity": "sha512-H1fm32xG8pi4LMHkXjqpLyFOvSDsravd0HI6Dtlb/iyma1tfi7qqqSH2bf0kKyTAJV5hvYL0ls0qvRJWKfDPcA==",
- "requires": {
- "hoist-non-react-statics": "2.3.1",
- "jss": "9.5.1",
- "jss-preset-default": "4.1.0",
- "prop-types": "15.6.0",
- "theming": "1.3.0"
- }
- },
- "react-popper": {
- "version": "0.7.5",
- "resolved": "https://registry.npmjs.org/react-popper/-/react-popper-0.7.5.tgz",
- "integrity": "sha512-ya9dhhGCf74JTOB2uyksEHhIGw7w9tNZRUJF73lEq2h4H5JT6MBa4PdT4G+sx6fZwq+xKZAL/sVNAIuojPn7Dg==",
- "requires": {
- "popper.js": "1.12.9",
- "prop-types": "15.6.0"
- }
- },
- "react-resize-detector": {
- "version": "1.1.0",
- "resolved": "https://registry.npmjs.org/react-resize-detector/-/react-resize-detector-1.1.0.tgz",
- "integrity": "sha512-68KVcQlhcWQGXMAie82YueCa4f4yqwEoiQbVyYlSgJEin1zMtNBLLeU/+6FLNf1TTgjwSfpbMTJTw/uU0HNgtQ==",
- "requires": {
- "prop-types": "15.6.0"
- }
- },
- "react-scrollbar-size": {
- "version": "2.0.2",
- "resolved": "https://registry.npmjs.org/react-scrollbar-size/-/react-scrollbar-size-2.0.2.tgz",
- "integrity": "sha512-scpDs2PZFf9CJteBeDu7jkk7s+YX06Si4rQGVHsH6vjR/p7417q1Jv5SpOblLLesOgNrfWekwoHQG1g0/p3tvw==",
- "requires": {
- "babel-runtime": "6.26.0",
- "prop-types": "15.6.0",
- "react-event-listener": "0.5.3"
- }
- },
- "react-smooth": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/react-smooth/-/react-smooth-1.0.0.tgz",
- "integrity": "sha1-sp2+vd3bBtIbWwiWIWf7nqwYl9g=",
- "requires": {
- "lodash": "4.17.4",
- "prop-types": "15.6.0",
- "raf": "3.4.0",
- "react-transition-group": "2.2.1"
- }
- },
- "react-transition-group": {
- "version": "2.2.1",
- "resolved": "https://registry.npmjs.org/react-transition-group/-/react-transition-group-2.2.1.tgz",
- "integrity": "sha512-q54UBM22bs/CekG8r3+vi9TugSqh0t7qcEVycaRc9M0p0aCEu+h6rp/RFiW7fHfgd1IKpd9oILFTl5QK+FpiPA==",
- "requires": {
- "chain-function": "1.0.0",
- "classnames": "2.2.5",
- "dom-helpers": "3.3.1",
- "loose-envify": "1.3.1",
- "prop-types": "15.6.0",
- "warning": "3.0.0"
- }
- },
- "read-pkg": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-2.0.0.tgz",
- "integrity": "sha1-jvHAYjxqbbDcZxPEv6xGMysjaPg=",
- "requires": {
- "load-json-file": "2.0.0",
- "normalize-package-data": "2.4.0",
- "path-type": "2.0.0"
- }
- },
- "read-pkg-up": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-2.0.0.tgz",
- "integrity": "sha1-a3KoBImE4MQeeVEP1en6mbO1Sb4=",
- "requires": {
- "find-up": "2.1.0",
- "read-pkg": "2.0.0"
- }
- },
- "readable-stream": {
- "version": "2.3.3",
- "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.3.tgz",
- "integrity": "sha512-m+qzzcn7KUxEmd1gMbchF+Y2eIUbieUaxkWtptyHywrX0rE8QEYqPC07Vuy4Wm32/xE16NcdBctb8S0Xe/5IeQ==",
- "requires": {
- "core-util-is": "1.0.2",
- "inherits": "2.0.3",
- "isarray": "1.0.0",
- "process-nextick-args": "1.0.7",
- "safe-buffer": "5.1.1",
- "string_decoder": "1.0.3",
- "util-deprecate": "1.0.2"
- }
- },
- "readdirp": {
- "version": "2.1.0",
- "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-2.1.0.tgz",
- "integrity": "sha1-TtCtBg3zBzMAxIRANz9y0cxkLXg=",
- "requires": {
- "graceful-fs": "4.1.11",
- "minimatch": "3.0.4",
- "readable-stream": "2.3.3",
- "set-immediate-shim": "1.0.1"
- }
- },
- "recharts": {
- "version": "1.0.0-beta.9",
- "resolved": "https://registry.npmjs.org/recharts/-/recharts-1.0.0-beta.9.tgz",
- "integrity": "sha1-8A/33Jt7AXyLT2ahC5bzmyDNE1M=",
- "requires": {
- "classnames": "2.2.5",
- "core-js": "2.5.1",
- "d3-interpolate": "1.1.6",
- "d3-scale": "1.0.6",
- "d3-shape": "1.2.0",
- "lodash": "4.17.4",
- "prop-types": "15.6.0",
- "react-resize-detector": "1.1.0",
- "react-smooth": "1.0.0",
- "recharts-scale": "0.3.2",
- "reduce-css-calc": "1.3.0"
- },
- "dependencies": {
- "core-js": {
- "version": "2.5.1",
- "resolved": "https://registry.npmjs.org/core-js/-/core-js-2.5.1.tgz",
- "integrity": "sha1-rmh03GaTd4m4B1T/VCjfZoGcpQs="
- }
- }
- },
- "recharts-scale": {
- "version": "0.3.2",
- "resolved": "https://registry.npmjs.org/recharts-scale/-/recharts-scale-0.3.2.tgz",
- "integrity": "sha1-2sdiFxSkdl0VLLKtvDDHO4MSCMk="
- },
- "recompose": {
- "version": "0.26.0",
- "resolved": "https://registry.npmjs.org/recompose/-/recompose-0.26.0.tgz",
- "integrity": "sha512-KwOu6ztO0mN5vy3+zDcc45lgnaUoaQse/a5yLVqtzTK13czSWnFGmXbQVmnoMgDkI5POd1EwIKSbjU1V7xdZog==",
- "requires": {
- "change-emitter": "0.1.6",
- "fbjs": "0.8.16",
- "hoist-non-react-statics": "2.3.1",
- "symbol-observable": "1.1.0"
- }
- },
- "reduce-css-calc": {
- "version": "1.3.0",
- "resolved": "https://registry.npmjs.org/reduce-css-calc/-/reduce-css-calc-1.3.0.tgz",
- "integrity": "sha1-dHyRTgSWFKTJz7umKYca0dKSdxY=",
- "requires": {
- "balanced-match": "0.4.2",
- "math-expression-evaluator": "1.2.17",
- "reduce-function-call": "1.0.2"
- },
- "dependencies": {
- "balanced-match": {
- "version": "0.4.2",
- "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-0.4.2.tgz",
- "integrity": "sha1-yz8+PHMtwPAe5wtAPzAuYddwmDg="
- }
- }
- },
- "reduce-function-call": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/reduce-function-call/-/reduce-function-call-1.0.2.tgz",
- "integrity": "sha1-WiAL+S4ON3UXUv5FsKszD9S2vpk=",
- "requires": {
- "balanced-match": "0.4.2"
- },
- "dependencies": {
- "balanced-match": {
- "version": "0.4.2",
- "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-0.4.2.tgz",
- "integrity": "sha1-yz8+PHMtwPAe5wtAPzAuYddwmDg="
- }
- }
- },
- "regenerate": {
- "version": "1.3.3",
- "resolved": "https://registry.npmjs.org/regenerate/-/regenerate-1.3.3.tgz",
- "integrity": "sha512-jVpo1GadrDAK59t/0jRx5VxYWQEDkkEKi6+HjE3joFVLfDOh9Xrdh0dF1eSq+BI/SwvTQ44gSscJ8N5zYL61sg=="
- },
- "regenerator-runtime": {
- "version": "0.11.1",
- "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.11.1.tgz",
- "integrity": "sha512-MguG95oij0fC3QV3URf4V2SDYGJhJnJGqvIIgdECeODCT98wSWDAJ94SSuVpYQUoTcGUIL6L4yNB7j1DFFHSBg=="
- },
- "regenerator-transform": {
- "version": "0.10.1",
- "resolved": "https://registry.npmjs.org/regenerator-transform/-/regenerator-transform-0.10.1.tgz",
- "integrity": "sha512-PJepbvDbuK1xgIgnau7Y90cwaAmO/LCLMI2mPvaXq2heGMR3aWW5/BQvYrhJ8jgmQjXewXvBjzfqKcVOmhjZ6Q==",
- "requires": {
- "babel-runtime": "6.26.0",
- "babel-types": "6.26.0",
- "private": "0.1.8"
- }
- },
- "regex-cache": {
- "version": "0.4.4",
- "resolved": "https://registry.npmjs.org/regex-cache/-/regex-cache-0.4.4.tgz",
- "integrity": "sha512-nVIZwtCjkC9YgvWkpM55B5rBhBYRZhAaJbgcFYXXsHnbZ9UZI9nnVWYZpBlCqv9ho2eZryPnWrZGsOdPwVWXWQ==",
- "requires": {
- "is-equal-shallow": "0.1.3"
- }
- },
- "regexpu-core": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/regexpu-core/-/regexpu-core-2.0.0.tgz",
- "integrity": "sha1-SdA4g3uNz4v6W5pCE5k45uoq4kA=",
- "requires": {
- "regenerate": "1.3.3",
- "regjsgen": "0.2.0",
- "regjsparser": "0.1.5"
- }
- },
- "regjsgen": {
- "version": "0.2.0",
- "resolved": "https://registry.npmjs.org/regjsgen/-/regjsgen-0.2.0.tgz",
- "integrity": "sha1-bAFq3qxVT3WCP+N6wFuS1aTtsfc="
- },
- "regjsparser": {
- "version": "0.1.5",
- "resolved": "https://registry.npmjs.org/regjsparser/-/regjsparser-0.1.5.tgz",
- "integrity": "sha1-fuj4Tcb6eS0/0K4ijSS9lJ6tIFw=",
- "requires": {
- "jsesc": "0.5.0"
- },
- "dependencies": {
- "jsesc": {
- "version": "0.5.0",
- "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-0.5.0.tgz",
- "integrity": "sha1-597mbjXW/Bb3EP6R1c9p9w8IkR0="
- }
- }
- },
- "remove-trailing-separator": {
- "version": "1.1.0",
- "resolved": "https://registry.npmjs.org/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz",
- "integrity": "sha1-wkvOKig62tW8P1jg1IJJuSN52O8="
- },
- "repeat-element": {
- "version": "1.1.2",
- "resolved": "https://registry.npmjs.org/repeat-element/-/repeat-element-1.1.2.tgz",
- "integrity": "sha1-7wiaF40Ug7quTZPrmLT55OEdmQo="
- },
- "repeat-string": {
- "version": "1.6.1",
- "resolved": "https://registry.npmjs.org/repeat-string/-/repeat-string-1.6.1.tgz",
- "integrity": "sha1-jcrkcOHIirwtYA//Sndihtp15jc="
- },
- "repeating": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/repeating/-/repeating-2.0.1.tgz",
- "integrity": "sha1-UhTFOpJtNVJwdSf7q0FdvAjQbdo=",
- "requires": {
- "is-finite": "1.0.2"
- }
- },
- "request": {
- "version": "2.83.0",
- "resolved": "https://registry.npmjs.org/request/-/request-2.83.0.tgz",
- "integrity": "sha512-lR3gD69osqm6EYLk9wB/G1W/laGWjzH90t1vEa2xuxHD5KUrSzp9pUSfTm+YC5Nxt2T8nMPEvKlhbQayU7bgFw==",
- "requires": {
- "aws-sign2": "0.7.0",
- "aws4": "1.6.0",
- "caseless": "0.12.0",
- "combined-stream": "1.0.5",
- "extend": "3.0.1",
- "forever-agent": "0.6.1",
- "form-data": "2.3.1",
- "har-validator": "5.0.3",
- "hawk": "6.0.2",
- "http-signature": "1.2.0",
- "is-typedarray": "1.0.0",
- "isstream": "0.1.2",
- "json-stringify-safe": "5.0.1",
- "mime-types": "2.1.17",
- "oauth-sign": "0.8.2",
- "performance-now": "2.1.0",
- "qs": "6.5.1",
- "safe-buffer": "5.1.1",
- "stringstream": "0.0.5",
- "tough-cookie": "2.3.3",
- "tunnel-agent": "0.6.0",
- "uuid": "3.2.1"
- }
- },
- "require-directory": {
- "version": "2.1.1",
- "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz",
- "integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I="
- },
- "require-main-filename": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-1.0.1.tgz",
- "integrity": "sha1-l/cXtp1IeE9fUmpsWqj/3aBVpNE="
- },
- "require-uncached": {
- "version": "1.0.3",
- "resolved": "https://registry.npmjs.org/require-uncached/-/require-uncached-1.0.3.tgz",
- "integrity": "sha1-Tg1W1slmL9MeQwEcS5WqSZVUIdM=",
- "requires": {
- "caller-path": "0.1.0",
- "resolve-from": "1.0.1"
- }
- },
- "resolve": {
- "version": "1.5.0",
- "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.5.0.tgz",
- "integrity": "sha512-hgoSGrc3pjzAPHNBg+KnFcK2HwlHTs/YrAGUr6qgTVUZmXv1UEXXl0bZNBKMA9fud6lRYFdPGz0xXxycPzmmiw==",
- "requires": {
- "path-parse": "1.0.5"
- }
- },
- "resolve-from": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-1.0.1.tgz",
- "integrity": "sha1-Jsv+k10a7uq7Kbw/5a6wHpPUQiY="
- },
- "restore-cursor": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-2.0.0.tgz",
- "integrity": "sha1-n37ih/gv0ybU/RYpI9YhKe7g368=",
- "requires": {
- "onetime": "2.0.1",
- "signal-exit": "3.0.2"
- }
- },
- "right-align": {
- "version": "0.1.3",
- "resolved": "https://registry.npmjs.org/right-align/-/right-align-0.1.3.tgz",
- "integrity": "sha1-YTObci/mo1FWiSENJOFMlhSGE+8=",
- "requires": {
- "align-text": "0.1.4"
- }
- },
- "rimraf": {
- "version": "2.6.2",
- "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.6.2.tgz",
- "integrity": "sha512-lreewLK/BlghmxtfH36YYVg1i8IAce4TI7oao75I1g245+6BctqTVQiBP3YUJ9C6DQOXJmkYR9X9fCLtCOJc5w==",
- "requires": {
- "glob": "7.1.2"
- }
- },
- "ripemd160": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/ripemd160/-/ripemd160-2.0.1.tgz",
- "integrity": "sha1-D0WEKVxTo2KK9+bXmsohzlfRxuc=",
- "requires": {
- "hash-base": "2.0.2",
- "inherits": "2.0.3"
- }
- },
- "run-async": {
- "version": "2.3.0",
- "resolved": "https://registry.npmjs.org/run-async/-/run-async-2.3.0.tgz",
- "integrity": "sha1-A3GrSuC91yDUFm19/aZP96RFpsA=",
- "requires": {
- "is-promise": "2.1.0"
- }
- },
- "rx-lite": {
- "version": "4.0.8",
- "resolved": "https://registry.npmjs.org/rx-lite/-/rx-lite-4.0.8.tgz",
- "integrity": "sha1-Cx4Rr4vESDbwSmQH6S2kJGe3lEQ="
- },
- "rx-lite-aggregates": {
- "version": "4.0.8",
- "resolved": "https://registry.npmjs.org/rx-lite-aggregates/-/rx-lite-aggregates-4.0.8.tgz",
- "integrity": "sha1-dTuHqJoRyVRnxKwWJsTvxOBcZ74=",
- "requires": {
- "rx-lite": "4.0.8"
- }
- },
- "safe-buffer": {
- "version": "5.1.1",
- "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.1.tgz",
- "integrity": "sha512-kKvNJn6Mm93gAczWVJg7wH+wGYWNrDHdWvpUmHyEsgCtIwwo3bqPtV4tR5tuPaUhTOo/kvhVwd8XwwOllGYkbg=="
- },
- "sax": {
- "version": "1.2.4",
- "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz",
- "integrity": "sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw=="
- },
- "schema-utils": {
- "version": "0.3.0",
- "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-0.3.0.tgz",
- "integrity": "sha1-9YdyIs4+kx7a4DnxfrNxbnE3+M8=",
- "requires": {
- "ajv": "5.5.2"
- }
- },
- "scroll": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/scroll/-/scroll-2.0.1.tgz",
- "integrity": "sha1-tMfSfovPOuiligQvJyaK4/VfnM0=",
- "requires": {
- "rafl": "1.2.2"
- }
- },
- "semver": {
- "version": "5.5.0",
- "resolved": "https://registry.npmjs.org/semver/-/semver-5.5.0.tgz",
- "integrity": "sha512-4SJ3dm0WAwWy/NVeioZh5AntkdJoWKxHxcmyP622fOkgHa4z3R0TdBJICINyaSDE6uNwVc8gZr+ZinwZAH4xIA=="
- },
- "set-blocking": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz",
- "integrity": "sha1-BF+XgtARrppoA93TgrJDkrPYkPc="
- },
- "set-immediate-shim": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/set-immediate-shim/-/set-immediate-shim-1.0.1.tgz",
- "integrity": "sha1-SysbJ+uAip+NzEgaWOXlb1mfP2E="
- },
- "setimmediate": {
- "version": "1.0.5",
- "resolved": "https://registry.npmjs.org/setimmediate/-/setimmediate-1.0.5.tgz",
- "integrity": "sha1-KQy7Iy4waULX1+qbg3Mqt4VvgoU="
- },
- "sha.js": {
- "version": "2.4.10",
- "resolved": "https://registry.npmjs.org/sha.js/-/sha.js-2.4.10.tgz",
- "integrity": "sha512-vnwmrFDlOExK4Nm16J2KMWHLrp14lBrjxMxBJpu++EnsuBmpiYaM/MEs46Vxxm/4FvdP5yTwuCTO9it5FSjrqA==",
- "requires": {
- "inherits": "2.0.3",
- "safe-buffer": "5.1.1"
- }
- },
- "shebang-command": {
- "version": "1.2.0",
- "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz",
- "integrity": "sha1-RKrGW2lbAzmJaMOfNj/uXer98eo=",
- "requires": {
- "shebang-regex": "1.0.0"
- }
- },
- "shebang-regex": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz",
- "integrity": "sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM="
- },
- "signal-exit": {
- "version": "3.0.2",
- "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.2.tgz",
- "integrity": "sha1-tf3AjxKH6hF4Yo5BXiUTK3NkbG0="
- },
- "slash": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/slash/-/slash-1.0.0.tgz",
- "integrity": "sha1-xB8vbDn8FtHNF61LXYlhFK5HDVU="
- },
- "slice-ansi": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-1.0.0.tgz",
- "integrity": "sha512-POqxBK6Lb3q6s047D/XsDVNPnF9Dl8JSaqe9h9lURl0OdNqy/ujDrOiIHtsqXMGbWWTIomRzAMaTyawAU//Reg==",
- "requires": {
- "is-fullwidth-code-point": "2.0.0"
- }
- },
- "slice-stream": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/slice-stream/-/slice-stream-1.0.0.tgz",
- "integrity": "sha1-WzO9ZvATsaf4ZGCwPUY97DmtPqA=",
- "requires": {
- "readable-stream": "1.0.34"
- },
- "dependencies": {
- "isarray": {
- "version": "0.0.1",
- "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz",
- "integrity": "sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8="
- },
- "readable-stream": {
- "version": "1.0.34",
- "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.34.tgz",
- "integrity": "sha1-Elgg40vIQtLyqq+v5MKRbuMsFXw=",
- "requires": {
- "core-util-is": "1.0.2",
- "inherits": "2.0.3",
- "isarray": "0.0.1",
- "string_decoder": "0.10.31"
- }
- },
- "string_decoder": {
- "version": "0.10.31",
- "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz",
- "integrity": "sha1-YuIDvEF2bGwoyfyEMB2rHFMQ+pQ="
- }
- }
- },
- "sntp": {
- "version": "2.1.0",
- "resolved": "https://registry.npmjs.org/sntp/-/sntp-2.1.0.tgz",
- "integrity": "sha512-FL1b58BDrqS3A11lJ0zEdnJ3UOKqVxawAkF3k7F0CVN7VQ34aZrV+G8BZ1WC9ZL7NyrwsW0oviwsWDgRuVYtJg==",
- "requires": {
- "hoek": "4.2.0"
- }
- },
- "sort-keys": {
- "version": "1.1.2",
- "resolved": "https://registry.npmjs.org/sort-keys/-/sort-keys-1.1.2.tgz",
- "integrity": "sha1-RBttTTRnmPG05J6JIK37oOVD+a0=",
- "requires": {
- "is-plain-obj": "1.1.0"
- }
- },
- "source-list-map": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/source-list-map/-/source-list-map-2.0.0.tgz",
- "integrity": "sha512-I2UmuJSRr/T8jisiROLU3A3ltr+swpniSmNPI4Ml3ZCX6tVnDsuZzK7F2hl5jTqbZBWCEKlj5HRQiPExXLgE8A=="
- },
- "source-map": {
- "version": "0.5.7",
- "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz",
- "integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w="
- },
- "source-map-support": {
- "version": "0.4.18",
- "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.4.18.tgz",
- "integrity": "sha512-try0/JqxPLF9nOjvSta7tVondkP5dwgyLDjVoyMDlmjugT2lRZ1OfsrYTkCd2hkDnJTKRbO/Rl3orm8vlsUzbA==",
- "requires": {
- "source-map": "0.5.7"
- }
- },
- "spdx-correct": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-1.0.2.tgz",
- "integrity": "sha1-SzBz2TP/UfORLwOsVRlJikFQ20A=",
- "requires": {
- "spdx-license-ids": "1.2.2"
- }
- },
- "spdx-expression-parse": {
- "version": "1.0.4",
- "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-1.0.4.tgz",
- "integrity": "sha1-m98vIOH0DtRH++JzJmGR/O1RYmw="
- },
- "spdx-license-ids": {
- "version": "1.2.2",
- "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-1.2.2.tgz",
- "integrity": "sha1-yd96NCRZSt5r0RkA1ZZpbcBrrFc="
- },
- "sprintf-js": {
- "version": "1.0.3",
- "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz",
- "integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw="
- },
- "sshpk": {
- "version": "1.13.1",
- "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.13.1.tgz",
- "integrity": "sha1-US322mKHFEMW3EwY/hzx2UBzm+M=",
- "requires": {
- "asn1": "0.2.3",
- "assert-plus": "1.0.0",
- "bcrypt-pbkdf": "1.0.1",
- "dashdash": "1.14.1",
- "ecc-jsbn": "0.1.1",
- "getpass": "0.1.7",
- "jsbn": "0.1.1",
- "tweetnacl": "0.14.5"
- }
- },
- "stream-browserify": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/stream-browserify/-/stream-browserify-2.0.1.tgz",
- "integrity": "sha1-ZiZu5fm9uZQKTkUUyvtDu3Hlyds=",
- "requires": {
- "inherits": "2.0.3",
- "readable-stream": "2.3.3"
- }
- },
- "stream-http": {
- "version": "2.8.0",
- "resolved": "https://registry.npmjs.org/stream-http/-/stream-http-2.8.0.tgz",
- "integrity": "sha512-sZOFxI/5xw058XIRHl4dU3dZ+TTOIGJR78Dvo0oEAejIt4ou27k+3ne1zYmCV+v7UucbxIFQuOgnkTVHh8YPnw==",
- "requires": {
- "builtin-status-codes": "3.0.0",
- "inherits": "2.0.3",
- "readable-stream": "2.3.3",
- "to-arraybuffer": "1.0.1",
- "xtend": "4.0.1"
- }
- },
- "strict-uri-encode": {
- "version": "1.1.0",
- "resolved": "https://registry.npmjs.org/strict-uri-encode/-/strict-uri-encode-1.1.0.tgz",
- "integrity": "sha1-J5siXfHVgrH1TmWt3UNS4Y+qBxM="
- },
- "string-width": {
- "version": "2.1.1",
- "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz",
- "integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==",
- "requires": {
- "is-fullwidth-code-point": "2.0.0",
- "strip-ansi": "4.0.0"
- },
- "dependencies": {
- "ansi-regex": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz",
- "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg="
- },
- "strip-ansi": {
- "version": "4.0.0",
- "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz",
- "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=",
- "requires": {
- "ansi-regex": "3.0.0"
- }
- }
- }
- },
- "string_decoder": {
- "version": "1.0.3",
- "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.0.3.tgz",
- "integrity": "sha512-4AH6Z5fzNNBcH+6XDMfA/BTt87skxqJlO0lAh3Dker5zThcAxG6mKz+iGu308UKoPPQ8Dcqx/4JhujzltRa+hQ==",
- "requires": {
- "safe-buffer": "5.1.1"
- }
- },
- "stringstream": {
- "version": "0.0.5",
- "resolved": "https://registry.npmjs.org/stringstream/-/stringstream-0.0.5.tgz",
- "integrity": "sha1-TkhM1N5aC7vuGORjB3EKioFiGHg="
- },
- "strip-ansi": {
- "version": "3.0.1",
- "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz",
- "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=",
- "requires": {
- "ansi-regex": "2.1.1"
- }
- },
- "strip-bom": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz",
- "integrity": "sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM="
- },
- "strip-eof": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/strip-eof/-/strip-eof-1.0.0.tgz",
- "integrity": "sha1-u0P/VZim6wXYm1n80SnJgzE2Br8="
- },
- "strip-json-comments": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz",
- "integrity": "sha1-PFMZQukIwml8DsNEhYwobHygpgo="
- },
- "style-loader": {
- "version": "0.19.1",
- "resolved": "https://registry.npmjs.org/style-loader/-/style-loader-0.19.1.tgz",
- "integrity": "sha512-IRE+ijgojrygQi3rsqT0U4dd+UcPCqcVvauZpCnQrGAlEe+FUIyrK93bUDScamesjP08JlQNsFJU+KmPedP5Og==",
- "requires": {
- "loader-utils": "1.1.0",
- "schema-utils": "0.3.0"
- }
- },
- "supports-color": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-2.0.0.tgz",
- "integrity": "sha1-U10EXOa2Nj+kARcIRimZXp3zJMc="
- },
- "svgo": {
- "version": "0.7.2",
- "resolved": "https://registry.npmjs.org/svgo/-/svgo-0.7.2.tgz",
- "integrity": "sha1-n1dyQTlSE1xv779Ar+ak+qiLS7U=",
- "requires": {
- "coa": "1.0.4",
- "colors": "1.1.2",
- "csso": "2.3.2",
- "js-yaml": "3.7.0",
- "mkdirp": "0.5.1",
- "sax": "1.2.4",
- "whet.extend": "0.9.9"
- }
- },
- "symbol-observable": {
- "version": "1.1.0",
- "resolved": "https://registry.npmjs.org/symbol-observable/-/symbol-observable-1.1.0.tgz",
- "integrity": "sha512-dQoid9tqQ+uotGhuTKEY11X4xhyYePVnqGSoSm3OGKh2E8LZ6RPULp1uXTctk33IeERlrRJYoVSBglsL05F5Uw=="
- },
- "table": {
- "version": "4.0.2",
- "resolved": "https://registry.npmjs.org/table/-/table-4.0.2.tgz",
- "integrity": "sha512-UUkEAPdSGxtRpiV9ozJ5cMTtYiqz7Ni1OGqLXRCynrvzdtR1p+cfOWe2RJLwvUG8hNanaSRjecIqwOjqeatDsA==",
- "requires": {
- "ajv": "5.5.2",
- "ajv-keywords": "2.1.1",
- "chalk": "2.3.0",
- "lodash": "4.17.4",
- "slice-ansi": "1.0.0",
- "string-width": "2.1.1"
- },
- "dependencies": {
- "ansi-styles": {
- "version": "3.2.0",
- "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.0.tgz",
- "integrity": "sha512-NnSOmMEYtVR2JVMIGTzynRkkaxtiq1xnFBcdQD/DnNCYPoEPsVJhM98BDyaoNOQIi7p4okdi3E27eN7GQbsUug==",
- "requires": {
- "color-convert": "1.9.1"
- }
- },
- "chalk": {
- "version": "2.3.0",
- "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.3.0.tgz",
- "integrity": "sha512-Az5zJR2CBujap2rqXGaJKaPHyJ0IrUimvYNX+ncCy8PJP4ltOGTrHUIo097ZaL2zMeKYpiCdqDvS6zdrTFok3Q==",
- "requires": {
- "ansi-styles": "3.2.0",
- "escape-string-regexp": "1.0.5",
- "supports-color": "4.5.0"
- }
- },
- "supports-color": {
- "version": "4.5.0",
- "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-4.5.0.tgz",
- "integrity": "sha1-vnoN5ITexcXN34s9WRJQRJEvY1s=",
- "requires": {
- "has-flag": "2.0.0"
- }
- }
- }
- },
- "tapable": {
- "version": "0.2.8",
- "resolved": "https://registry.npmjs.org/tapable/-/tapable-0.2.8.tgz",
- "integrity": "sha1-mTcqXJmb8t8WCvwNdL7U9HlIzSI="
- },
- "text-table": {
- "version": "0.2.0",
- "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz",
- "integrity": "sha1-f17oI66AUgfACvLfSoTsP8+lcLQ="
- },
- "theming": {
- "version": "1.3.0",
- "resolved": "https://registry.npmjs.org/theming/-/theming-1.3.0.tgz",
- "integrity": "sha512-ya5Ef7XDGbTPBv5ENTwrwkPUexrlPeiAg/EI9kdlUAZhNlRbCdhMKRgjNX1IcmsmiPcqDQZE6BpSaH+cr31FKw==",
- "requires": {
- "brcast": "3.0.1",
- "is-function": "1.0.1",
- "is-plain-object": "2.0.4",
- "prop-types": "15.6.0"
- }
- },
- "through": {
- "version": "2.3.8",
- "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz",
- "integrity": "sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU="
- },
- "timed-out": {
- "version": "4.0.1",
- "resolved": "https://registry.npmjs.org/timed-out/-/timed-out-4.0.1.tgz",
- "integrity": "sha1-8y6srFoXW+ol1/q1Zas+2HQe9W8="
- },
- "timers-browserify": {
- "version": "2.0.4",
- "resolved": "https://registry.npmjs.org/timers-browserify/-/timers-browserify-2.0.4.tgz",
- "integrity": "sha512-uZYhyU3EX8O7HQP+J9fTVYwsq90Vr68xPEFo7yrVImIxYvHgukBEgOB/SgGoorWVTzGM/3Z+wUNnboA4M8jWrg==",
- "requires": {
- "setimmediate": "1.0.5"
- }
- },
- "tmp": {
- "version": "0.0.33",
- "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.33.tgz",
- "integrity": "sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==",
- "requires": {
- "os-tmpdir": "1.0.2"
- }
- },
- "to-arraybuffer": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/to-arraybuffer/-/to-arraybuffer-1.0.1.tgz",
- "integrity": "sha1-fSKbH8xjfkZsoIEYCDanqr/4P0M="
- },
- "to-fast-properties": {
- "version": "1.0.3",
- "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-1.0.3.tgz",
- "integrity": "sha1-uDVx+k2MJbguIxsG46MFXeTKGkc="
- },
- "tough-cookie": {
- "version": "2.3.3",
- "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.3.3.tgz",
- "integrity": "sha1-C2GKVWW23qkL80JdBNVe3EdadWE=",
- "requires": {
- "punycode": "1.4.1"
- }
- },
- "traverse": {
- "version": "0.3.9",
- "resolved": "https://registry.npmjs.org/traverse/-/traverse-0.3.9.tgz",
- "integrity": "sha1-cXuPIgzAu3tE5AUUwisui7xw2Lk="
- },
- "trim-right": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/trim-right/-/trim-right-1.0.1.tgz",
- "integrity": "sha1-yy4SAwZ+DI3h9hQJS5/kVwTqYAM="
- },
- "tty-browserify": {
- "version": "0.0.0",
- "resolved": "https://registry.npmjs.org/tty-browserify/-/tty-browserify-0.0.0.tgz",
- "integrity": "sha1-oVe6QC2iTpv5V/mqadUk7tQpAaY="
- },
- "tunnel-agent": {
- "version": "0.6.0",
- "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz",
- "integrity": "sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0=",
- "requires": {
- "safe-buffer": "5.1.1"
- }
- },
- "tweetnacl": {
- "version": "0.14.5",
- "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz",
- "integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q=",
- "optional": true
- },
- "type-check": {
- "version": "0.3.2",
- "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.3.2.tgz",
- "integrity": "sha1-WITKtRLPHTVeP7eE8wgEsrUg23I=",
- "requires": {
- "prelude-ls": "1.1.2"
- }
- },
- "typedarray": {
- "version": "0.0.6",
- "resolved": "https://registry.npmjs.org/typedarray/-/typedarray-0.0.6.tgz",
- "integrity": "sha1-hnrHTjhkGHsdPUfZlqeOxciDB3c="
- },
- "ua-parser-js": {
- "version": "0.7.17",
- "resolved": "https://registry.npmjs.org/ua-parser-js/-/ua-parser-js-0.7.17.tgz",
- "integrity": "sha512-uRdSdu1oA1rncCQL7sCj8vSyZkgtL7faaw9Tc9rZ3mGgraQ7+Pdx7w5mnOSF3gw9ZNG6oc+KXfkon3bKuROm0g=="
- },
- "uglify-js": {
- "version": "2.8.29",
- "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-2.8.29.tgz",
- "integrity": "sha1-KcVzMUgFe7Th913zW3qcty5qWd0=",
- "requires": {
- "source-map": "0.5.7",
- "uglify-to-browserify": "1.0.2",
- "yargs": "3.10.0"
- },
- "dependencies": {
- "camelcase": {
- "version": "1.2.1",
- "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-1.2.1.tgz",
- "integrity": "sha1-m7UwTS4LVmmLLHWLCKPqqdqlijk="
- },
- "cliui": {
- "version": "2.1.0",
- "resolved": "https://registry.npmjs.org/cliui/-/cliui-2.1.0.tgz",
- "integrity": "sha1-S0dXYP+AJkx2LDoXGQMukcf+oNE=",
- "requires": {
- "center-align": "0.1.3",
- "right-align": "0.1.3",
- "wordwrap": "0.0.2"
- }
- },
- "window-size": {
- "version": "0.1.0",
- "resolved": "https://registry.npmjs.org/window-size/-/window-size-0.1.0.tgz",
- "integrity": "sha1-VDjNLqk7IC76Ohn+iIeu58lPnJ0="
- },
- "wordwrap": {
- "version": "0.0.2",
- "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.2.tgz",
- "integrity": "sha1-t5Zpu0LstAn4PVg8rVLKF+qhZD8="
- },
- "yargs": {
- "version": "3.10.0",
- "resolved": "https://registry.npmjs.org/yargs/-/yargs-3.10.0.tgz",
- "integrity": "sha1-9+572FfdfB0tOMDnTvvWgdFDH9E=",
- "requires": {
- "camelcase": "1.2.1",
- "cliui": "2.1.0",
- "decamelize": "1.2.0",
- "window-size": "0.1.0"
- }
- }
- }
- },
- "uglify-to-browserify": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/uglify-to-browserify/-/uglify-to-browserify-1.0.2.tgz",
- "integrity": "sha1-bgkk1r2mta/jSeOabWMoUKD4grc=",
- "optional": true
- },
- "uglifyjs-webpack-plugin": {
- "version": "0.4.6",
- "resolved": "https://registry.npmjs.org/uglifyjs-webpack-plugin/-/uglifyjs-webpack-plugin-0.4.6.tgz",
- "integrity": "sha1-uVH0q7a9YX5m9j64kUmOORdj4wk=",
- "requires": {
- "source-map": "0.5.7",
- "uglify-js": "2.8.29",
- "webpack-sources": "1.1.0"
- }
- },
- "uniq": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/uniq/-/uniq-1.0.1.tgz",
- "integrity": "sha1-sxxa6CVIRKOoKBVBzisEuGWnNP8="
- },
- "uniqid": {
- "version": "4.1.1",
- "resolved": "https://registry.npmjs.org/uniqid/-/uniqid-4.1.1.tgz",
- "integrity": "sha1-iSIN32t1GuUrX3JISGNShZa7hME=",
- "requires": {
- "macaddress": "0.2.8"
- }
- },
- "uniqs": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/uniqs/-/uniqs-2.0.0.tgz",
- "integrity": "sha1-/+3ks2slKQaW5uFl1KWe25mOawI="
- },
- "universalify": {
- "version": "0.1.1",
- "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.1.tgz",
- "integrity": "sha1-+nG63UQ3r0wUiEHjs7Fl+enlkLc="
- },
- "unzip": {
- "version": "0.1.11",
- "resolved": "https://registry.npmjs.org/unzip/-/unzip-0.1.11.tgz",
- "integrity": "sha1-iXScY7BY19kNYZ+GuYqhU107l/A=",
- "requires": {
- "binary": "0.3.0",
- "fstream": "0.1.31",
- "match-stream": "0.0.2",
- "pullstream": "0.4.1",
- "readable-stream": "1.0.34",
- "setimmediate": "1.0.5"
- },
- "dependencies": {
- "isarray": {
- "version": "0.0.1",
- "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz",
- "integrity": "sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8="
- },
- "readable-stream": {
- "version": "1.0.34",
- "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.34.tgz",
- "integrity": "sha1-Elgg40vIQtLyqq+v5MKRbuMsFXw=",
- "requires": {
- "core-util-is": "1.0.2",
- "inherits": "2.0.3",
- "isarray": "0.0.1",
- "string_decoder": "0.10.31"
- }
- },
- "string_decoder": {
- "version": "0.10.31",
- "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz",
- "integrity": "sha1-YuIDvEF2bGwoyfyEMB2rHFMQ+pQ="
- }
- }
- },
- "url": {
- "version": "0.11.0",
- "resolved": "https://registry.npmjs.org/url/-/url-0.11.0.tgz",
- "integrity": "sha1-ODjpfPxgUh63PFJajlW/3Z4uKPE=",
- "requires": {
- "punycode": "1.3.2",
- "querystring": "0.2.0"
- },
- "dependencies": {
- "punycode": {
- "version": "1.3.2",
- "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz",
- "integrity": "sha1-llOgNvt8HuQjQvIyXM7v6jkmxI0="
- }
- }
- },
- "url-loader": {
- "version": "0.6.2",
- "resolved": "https://registry.npmjs.org/url-loader/-/url-loader-0.6.2.tgz",
- "integrity": "sha512-h3qf9TNn53BpuXTTcpC+UehiRrl0Cv45Yr/xWayApjw6G8Bg2dGke7rIwDQ39piciWCWrC+WiqLjOh3SUp9n0Q==",
- "requires": {
- "loader-utils": "1.1.0",
- "mime": "1.6.0",
- "schema-utils": "0.3.0"
- }
- },
- "url-parse-lax": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/url-parse-lax/-/url-parse-lax-1.0.0.tgz",
- "integrity": "sha1-evjzA2Rem9eaJy56FKxovAYJ2nM=",
- "requires": {
- "prepend-http": "1.0.4"
- }
- },
- "url-to-options": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/url-to-options/-/url-to-options-1.0.1.tgz",
- "integrity": "sha1-FQWgOiiaSMvXpDTvuu7FBV9WM6k="
- },
- "util": {
- "version": "0.10.3",
- "resolved": "https://registry.npmjs.org/util/-/util-0.10.3.tgz",
- "integrity": "sha1-evsa/lCAUkZInj23/g7TeTNqwPk=",
- "requires": {
- "inherits": "2.0.1"
- },
- "dependencies": {
- "inherits": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.1.tgz",
- "integrity": "sha1-sX0I0ya0Qj5Wjv9xn5GwscvfafE="
- }
- }
- },
- "util-deprecate": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",
- "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8="
- },
- "uuid": {
- "version": "3.2.1",
- "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.2.1.tgz",
- "integrity": "sha512-jZnMwlb9Iku/O3smGWvZhauCf6cvvpKi4BKRiliS3cxnI+Gz9j5MEpTz2UFuXiKPJocb7gnsLHwiS05ige5BEA=="
- },
- "validate-npm-package-license": {
- "version": "3.0.1",
- "resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.1.tgz",
- "integrity": "sha1-KAS6vnEq0zeUWaz74kdGqywwP7w=",
- "requires": {
- "spdx-correct": "1.0.2",
- "spdx-expression-parse": "1.0.4"
- }
- },
- "vendors": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/vendors/-/vendors-1.0.1.tgz",
- "integrity": "sha1-N61zyO5Bf7PVgOeFMSMH0nSEfyI="
- },
- "verror": {
- "version": "1.10.0",
- "resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz",
- "integrity": "sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA=",
- "requires": {
- "assert-plus": "1.0.0",
- "core-util-is": "1.0.2",
- "extsprintf": "1.3.0"
- }
- },
- "vm-browserify": {
- "version": "0.0.4",
- "resolved": "https://registry.npmjs.org/vm-browserify/-/vm-browserify-0.0.4.tgz",
- "integrity": "sha1-XX6kW7755Kb/ZflUOOCofDV9WnM=",
- "requires": {
- "indexof": "0.0.1"
- }
- },
- "warning": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/warning/-/warning-3.0.0.tgz",
- "integrity": "sha1-MuU3fLVy3kqwR1O9+IIcAe1gW3w=",
- "requires": {
- "loose-envify": "1.3.1"
- }
- },
- "watchpack": {
- "version": "1.4.0",
- "resolved": "https://registry.npmjs.org/watchpack/-/watchpack-1.4.0.tgz",
- "integrity": "sha1-ShRyvLuVK9Cpu0A2gB+VTfs5+qw=",
- "requires": {
- "async": "2.6.0",
- "chokidar": "1.7.0",
- "graceful-fs": "4.1.11"
- }
- },
- "webpack": {
- "version": "3.10.0",
- "resolved": "https://registry.npmjs.org/webpack/-/webpack-3.10.0.tgz",
- "integrity": "sha512-fxxKXoicjdXNUMY7LIdY89tkJJJ0m1Oo8PQutZ5rLgWbV5QVKI15Cn7+/IHnRTd3vfKfiwBx6SBqlorAuNA8LA==",
- "requires": {
- "acorn": "5.3.0",
- "acorn-dynamic-import": "2.0.2",
- "ajv": "5.5.2",
- "ajv-keywords": "2.1.1",
- "async": "2.6.0",
- "enhanced-resolve": "3.4.1",
- "escope": "3.6.0",
- "interpret": "1.1.0",
- "json-loader": "0.5.7",
- "json5": "0.5.1",
- "loader-runner": "2.3.0",
- "loader-utils": "1.1.0",
- "memory-fs": "0.4.1",
- "mkdirp": "0.5.1",
- "node-libs-browser": "2.1.0",
- "source-map": "0.5.7",
- "supports-color": "4.5.0",
- "tapable": "0.2.8",
- "uglifyjs-webpack-plugin": "0.4.6",
- "watchpack": "1.4.0",
- "webpack-sources": "1.1.0",
- "yargs": "8.0.2"
- },
- "dependencies": {
- "camelcase": {
- "version": "4.1.0",
- "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-4.1.0.tgz",
- "integrity": "sha1-1UVjW+HjPFQmScaRc+Xeas+uNN0="
- },
- "os-locale": {
- "version": "2.1.0",
- "resolved": "https://registry.npmjs.org/os-locale/-/os-locale-2.1.0.tgz",
- "integrity": "sha512-3sslG3zJbEYcaC4YVAvDorjGxc7tv6KVATnLPZONiljsUncvihe9BQoVCEs0RZ1kmf4Hk9OBqlZfJZWI4GanKA==",
- "requires": {
- "execa": "0.7.0",
- "lcid": "1.0.0",
- "mem": "1.1.0"
- }
- },
- "supports-color": {
- "version": "4.5.0",
- "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-4.5.0.tgz",
- "integrity": "sha1-vnoN5ITexcXN34s9WRJQRJEvY1s=",
- "requires": {
- "has-flag": "2.0.0"
- }
- },
- "which-module": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/which-module/-/which-module-2.0.0.tgz",
- "integrity": "sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho="
- },
- "yargs": {
- "version": "8.0.2",
- "resolved": "https://registry.npmjs.org/yargs/-/yargs-8.0.2.tgz",
- "integrity": "sha1-YpmpBVsc78lp/355wdkY3Osiw2A=",
- "requires": {
- "camelcase": "4.1.0",
- "cliui": "3.2.0",
- "decamelize": "1.2.0",
- "get-caller-file": "1.0.2",
- "os-locale": "2.1.0",
- "read-pkg-up": "2.0.0",
- "require-directory": "2.1.1",
- "require-main-filename": "1.0.1",
- "set-blocking": "2.0.0",
- "string-width": "2.1.1",
- "which-module": "2.0.0",
- "y18n": "3.2.1",
- "yargs-parser": "7.0.0"
- }
- },
- "yargs-parser": {
- "version": "7.0.0",
- "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-7.0.0.tgz",
- "integrity": "sha1-jQrELxbqVd69MyyvTEA4s+P139k=",
- "requires": {
- "camelcase": "4.1.0"
- }
- }
- }
- },
- "webpack-sources": {
- "version": "1.1.0",
- "resolved": "https://registry.npmjs.org/webpack-sources/-/webpack-sources-1.1.0.tgz",
- "integrity": "sha512-aqYp18kPphgoO5c/+NaUvEeACtZjMESmDChuD3NBciVpah3XpMEU9VAAtIaB1BsfJWWTSdv8Vv1m3T0aRk2dUw==",
- "requires": {
- "source-list-map": "2.0.0",
- "source-map": "0.6.1"
- },
- "dependencies": {
- "source-map": {
- "version": "0.6.1",
- "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz",
- "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g=="
- }
- }
- },
- "whatwg-fetch": {
- "version": "2.0.3",
- "resolved": "https://registry.npmjs.org/whatwg-fetch/-/whatwg-fetch-2.0.3.tgz",
- "integrity": "sha1-nITsLc9oGH/wC8ZOEnS0QhduHIQ="
- },
- "whet.extend": {
- "version": "0.9.9",
- "resolved": "https://registry.npmjs.org/whet.extend/-/whet.extend-0.9.9.tgz",
- "integrity": "sha1-+HfVv2SMl+WqVC+twW1qJZucEaE="
- },
- "which": {
- "version": "1.3.0",
- "resolved": "https://registry.npmjs.org/which/-/which-1.3.0.tgz",
- "integrity": "sha512-xcJpopdamTuY5duC/KnTTNBraPK54YwpenP4lzxU8H91GudWpFv38u0CKjclE1Wi2EH2EDz5LRcHcKbCIzqGyg==",
- "requires": {
- "isexe": "2.0.0"
- }
- },
- "which-module": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/which-module/-/which-module-1.0.0.tgz",
- "integrity": "sha1-u6Y8qGGUiZT/MHc2CJ47lgJsKk8="
- },
- "window-size": {
- "version": "0.2.0",
- "resolved": "https://registry.npmjs.org/window-size/-/window-size-0.2.0.tgz",
- "integrity": "sha1-tDFbtCFKPXBY6+7okuE/ok2YsHU="
- },
- "wordwrap": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-1.0.0.tgz",
- "integrity": "sha1-J1hIEIkUVqQXHI0CJkQa3pDLyus="
- },
- "wrap-ansi": {
- "version": "2.1.0",
- "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-2.1.0.tgz",
- "integrity": "sha1-2Pw9KE3QV5T+hJc8rs3Rz4JP3YU=",
- "requires": {
- "string-width": "1.0.2",
- "strip-ansi": "3.0.1"
- },
- "dependencies": {
- "is-fullwidth-code-point": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz",
- "integrity": "sha1-754xOG8DGn8NZDr4L95QxFfvAMs=",
- "requires": {
- "number-is-nan": "1.0.1"
- }
- },
- "string-width": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz",
- "integrity": "sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M=",
- "requires": {
- "code-point-at": "1.1.0",
- "is-fullwidth-code-point": "1.0.0",
- "strip-ansi": "3.0.1"
- }
- }
- }
- },
- "wrappy": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
- "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8="
- },
- "write": {
- "version": "0.2.1",
- "resolved": "https://registry.npmjs.org/write/-/write-0.2.1.tgz",
- "integrity": "sha1-X8A4KOJkzqP+kUVUdvejxWbLB1c=",
- "requires": {
- "mkdirp": "0.5.1"
- }
- },
- "xtend": {
- "version": "4.0.1",
- "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.1.tgz",
- "integrity": "sha1-pcbVMr5lbiPbgg77lDofBJmNY68="
- },
- "y18n": {
- "version": "3.2.1",
- "resolved": "https://registry.npmjs.org/y18n/-/y18n-3.2.1.tgz",
- "integrity": "sha1-bRX7qITAhnnA136I53WegR4H+kE="
- },
- "yallist": {
- "version": "2.1.2",
- "resolved": "https://registry.npmjs.org/yallist/-/yallist-2.1.2.tgz",
- "integrity": "sha1-HBH5IY8HYImkfdUS+TxmmaaoHVI="
- },
- "yargs": {
- "version": "4.8.1",
- "resolved": "https://registry.npmjs.org/yargs/-/yargs-4.8.1.tgz",
- "integrity": "sha1-wMQpJMpKqmsObaFznfshZDn53cA=",
- "requires": {
- "cliui": "3.2.0",
- "decamelize": "1.2.0",
- "get-caller-file": "1.0.2",
- "lodash.assign": "4.2.0",
- "os-locale": "1.4.0",
- "read-pkg-up": "1.0.1",
- "require-directory": "2.1.1",
- "require-main-filename": "1.0.1",
- "set-blocking": "2.0.0",
- "string-width": "1.0.2",
- "which-module": "1.0.0",
- "window-size": "0.2.0",
- "y18n": "3.2.1",
- "yargs-parser": "2.4.1"
- },
- "dependencies": {
- "find-up": {
- "version": "1.1.2",
- "resolved": "https://registry.npmjs.org/find-up/-/find-up-1.1.2.tgz",
- "integrity": "sha1-ay6YIrGizgpgq2TWEOzK1TyyTQ8=",
- "requires": {
- "path-exists": "2.1.0",
- "pinkie-promise": "2.0.1"
- }
- },
- "is-fullwidth-code-point": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz",
- "integrity": "sha1-754xOG8DGn8NZDr4L95QxFfvAMs=",
- "requires": {
- "number-is-nan": "1.0.1"
- }
- },
- "load-json-file": {
- "version": "1.1.0",
- "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-1.1.0.tgz",
- "integrity": "sha1-lWkFcI1YtLq0wiYbBPWfMcmTdMA=",
- "requires": {
- "graceful-fs": "4.1.11",
- "parse-json": "2.2.0",
- "pify": "2.3.0",
- "pinkie-promise": "2.0.1",
- "strip-bom": "2.0.0"
- }
- },
- "path-exists": {
- "version": "2.1.0",
- "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-2.1.0.tgz",
- "integrity": "sha1-D+tsZPD8UY2adU3V77YscCJ2H0s=",
- "requires": {
- "pinkie-promise": "2.0.1"
- }
- },
- "path-type": {
- "version": "1.1.0",
- "resolved": "https://registry.npmjs.org/path-type/-/path-type-1.1.0.tgz",
- "integrity": "sha1-WcRPfuSR2nBNpBXaWkBwuk+P5EE=",
- "requires": {
- "graceful-fs": "4.1.11",
- "pify": "2.3.0",
- "pinkie-promise": "2.0.1"
- }
- },
- "pify": {
- "version": "2.3.0",
- "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz",
- "integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw="
- },
- "read-pkg": {
- "version": "1.1.0",
- "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-1.1.0.tgz",
- "integrity": "sha1-9f+qXs0pyzHAR0vKfXVra7KePyg=",
- "requires": {
- "load-json-file": "1.1.0",
- "normalize-package-data": "2.4.0",
- "path-type": "1.1.0"
- }
- },
- "read-pkg-up": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-1.0.1.tgz",
- "integrity": "sha1-nWPBMnbAZZGNV/ACpX9AobZD+wI=",
- "requires": {
- "find-up": "1.1.2",
- "read-pkg": "1.1.0"
- }
- },
- "string-width": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz",
- "integrity": "sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M=",
- "requires": {
- "code-point-at": "1.1.0",
- "is-fullwidth-code-point": "1.0.0",
- "strip-ansi": "3.0.1"
- }
- },
- "strip-bom": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-2.0.0.tgz",
- "integrity": "sha1-YhmoVhZSBJHzV4i9vxRHqZx+aw4=",
- "requires": {
- "is-utf8": "0.2.1"
- }
- }
- }
- },
- "yargs-parser": {
- "version": "2.4.1",
- "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-2.4.1.tgz",
- "integrity": "sha1-hVaN488VD/SfpRgl8DqMiA3cxcQ=",
- "requires": {
- "camelcase": "3.0.0",
- "lodash.assign": "4.2.0"
- }
- }
- }
-}
diff --git a/dashboard/assets/package.json b/dashboard/assets/package.json
index 847fd4bc1..5ac179b9e 100644
--- a/dashboard/assets/package.json
+++ b/dashboard/assets/package.json
@@ -15,11 +15,11 @@
"css-loader": "^0.28.9",
"eslint": "^4.16.0",
"eslint-config-airbnb": "^16.1.0",
- "eslint-loader": "^1.9.0",
+ "eslint-loader": "^2.0.0",
+ "eslint-plugin-flowtype": "^2.41.0",
"eslint-plugin-import": "^2.8.0",
"eslint-plugin-jsx-a11y": "^6.0.3",
"eslint-plugin-react": "^7.5.1",
- "eslint-plugin-flowtype": "^2.41.0",
"file-loader": "^1.1.6",
"flow-bin": "^0.63.1",
"flow-bin-loader": "^1.0.2",
@@ -35,6 +35,13 @@
"style-loader": "^0.19.1",
"url": "^0.11.0",
"url-loader": "^0.6.2",
- "webpack": "^3.10.0"
+ "webpack": "^3.10.0",
+ "webpack-dev-server": "^2.11.1"
+ },
+ "scripts": {
+ "build": "NODE_ENV=production webpack",
+ "stats": "webpack --profile --json > stats.json",
+ "dev": "webpack-dev-server --port 8081",
+ "flow": "flow-typed install"
}
}
diff --git a/dashboard/assets/types/content.jsx b/dashboard/assets/types/content.jsx
index 546125397..0e2e0b49d 100644
--- a/dashboard/assets/types/content.jsx
+++ b/dashboard/assets/types/content.jsx
@@ -26,22 +26,6 @@ export type Content = {
logs: Logs,
};
-export type General = {
- version: ?string,
- commit: ?string,
-};
-
-export type Home = {
- activeMemory: ChartEntries,
- virtualMemory: ChartEntries,
- networkIngress: ChartEntries,
- networkEgress: ChartEntries,
- processCPU: ChartEntries,
- systemCPU: ChartEntries,
- diskRead: ChartEntries,
- diskWrite: ChartEntries,
-};
-
export type ChartEntries = Array<ChartEntry>;
export type ChartEntry = {
@@ -49,6 +33,15 @@ export type ChartEntry = {
value: number,
};
+export type General = {
+ version: ?string,
+ commit: ?string,
+};
+
+export type Home = {
+ /* TODO (kurkomisi) */
+};
+
export type Chain = {
/* TODO (kurkomisi) */
};
@@ -62,7 +55,14 @@ export type Network = {
};
export type System = {
- /* TODO (kurkomisi) */
+ activeMemory: ChartEntries,
+ virtualMemory: ChartEntries,
+ networkIngress: ChartEntries,
+ networkEgress: ChartEntries,
+ processCPU: ChartEntries,
+ systemCPU: ChartEntries,
+ diskRead: ChartEntries,
+ diskWrite: ChartEntries,
};
export type Logs = {
diff --git a/dashboard/assets/webpack.config.js b/dashboard/assets/webpack.config.js
index d90c4fabd..f58d35ad1 100644
--- a/dashboard/assets/webpack.config.js
+++ b/dashboard/assets/webpack.config.js
@@ -32,6 +32,9 @@ module.exports = {
mangle: false,
beautify: true,
}),
+ new webpack.DefinePlugin({
+ PROD: process.env.NODE_ENV === 'production',
+ }),
],
module: {
rules: [
diff --git a/dashboard/assets/yarn.lock b/dashboard/assets/yarn.lock
new file mode 100644
index 000000000..7480c719f
--- /dev/null
+++ b/dashboard/assets/yarn.lock
@@ -0,0 +1,6551 @@
+# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
+# yarn lockfile v1
+
+
+"@babel/code-frame@7.0.0-beta.40", "@babel/code-frame@^7.0.0-beta.40":
+ version "7.0.0-beta.40"
+ resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.0.0-beta.40.tgz#37e2b0cf7c56026b4b21d3927cadf81adec32ac6"
+ dependencies:
+ "@babel/highlight" "7.0.0-beta.40"
+
+"@babel/generator@7.0.0-beta.40":
+ version "7.0.0-beta.40"
+ resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.0.0-beta.40.tgz#ab61f9556f4f71dbd1138949c795bb9a21e302ea"
+ dependencies:
+ "@babel/types" "7.0.0-beta.40"
+ jsesc "^2.5.1"
+ lodash "^4.2.0"
+ source-map "^0.5.0"
+ trim-right "^1.0.1"
+
+"@babel/helper-function-name@7.0.0-beta.40":
+ version "7.0.0-beta.40"
+ resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.0.0-beta.40.tgz#9d033341ab16517f40d43a73f2d81fc431ccd7b6"
+ dependencies:
+ "@babel/helper-get-function-arity" "7.0.0-beta.40"
+ "@babel/template" "7.0.0-beta.40"
+ "@babel/types" "7.0.0-beta.40"
+
+"@babel/helper-get-function-arity@7.0.0-beta.40":
+ version "7.0.0-beta.40"
+ resolved "https://registry.yarnpkg.com/@babel/helper-get-function-arity/-/helper-get-function-arity-7.0.0-beta.40.tgz#ac0419cf067b0ec16453e1274f03878195791c6e"
+ dependencies:
+ "@babel/types" "7.0.0-beta.40"
+
+"@babel/highlight@7.0.0-beta.40":
+ version "7.0.0-beta.40"
+ resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.0.0-beta.40.tgz#b43d67d76bf46e1d10d227f68cddcd263786b255"
+ dependencies:
+ chalk "^2.0.0"
+ esutils "^2.0.2"
+ js-tokens "^3.0.0"
+
+"@babel/template@7.0.0-beta.40":
+ version "7.0.0-beta.40"
+ resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.0.0-beta.40.tgz#034988c6424eb5c3268fe6a608626de1f4410fc8"
+ dependencies:
+ "@babel/code-frame" "7.0.0-beta.40"
+ "@babel/types" "7.0.0-beta.40"
+ babylon "7.0.0-beta.40"
+ lodash "^4.2.0"
+
+"@babel/traverse@^7.0.0-beta.40":
+ version "7.0.0-beta.40"
+ resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.0.0-beta.40.tgz#d140e449b2e093ef9fe1a2eecc28421ffb4e521e"
+ dependencies:
+ "@babel/code-frame" "7.0.0-beta.40"
+ "@babel/generator" "7.0.0-beta.40"
+ "@babel/helper-function-name" "7.0.0-beta.40"
+ "@babel/types" "7.0.0-beta.40"
+ babylon "7.0.0-beta.40"
+ debug "^3.0.1"
+ globals "^11.1.0"
+ invariant "^2.2.0"
+ lodash "^4.2.0"
+
+"@babel/types@7.0.0-beta.40", "@babel/types@^7.0.0-beta.40":
+ version "7.0.0-beta.40"
+ resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.0.0-beta.40.tgz#25c3d7aae14126abe05fcb098c65a66b6d6b8c14"
+ dependencies:
+ esutils "^2.0.2"
+ lodash "^4.2.0"
+ to-fast-properties "^2.0.0"
+
+"@types/jss@^9.3.0":
+ version "9.5.0"
+ resolved "https://registry.yarnpkg.com/@types/jss/-/jss-9.5.0.tgz#65d9b5c61f1e95ad3acd53e9a8b2d2342aa917e8"
+ dependencies:
+ csstype "^1.6.0"
+
+"@types/react-transition-group@^2.0.6":
+ version "2.0.7"
+ resolved "https://registry.yarnpkg.com/@types/react-transition-group/-/react-transition-group-2.0.7.tgz#2847292d54c5685d982ae5a3ecb6960946689d87"
+ dependencies:
+ "@types/react" "*"
+
+"@types/react@*":
+ version "16.0.40"
+ resolved "https://registry.yarnpkg.com/@types/react/-/react-16.0.40.tgz#caabc2296886f40b67f6fc80f0f3464476461df9"
+
+abbrev@1:
+ version "1.1.1"
+ resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.1.1.tgz#f8f2c887ad10bf67f634f005b6987fed3179aac8"
+
+accepts@~1.3.4:
+ version "1.3.5"
+ resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.5.tgz#eb777df6011723a3b14e8a72c0805c8e86746bd2"
+ dependencies:
+ mime-types "~2.1.18"
+ negotiator "0.6.1"
+
+acorn-dynamic-import@^2.0.0:
+ version "2.0.2"
+ resolved "https://registry.yarnpkg.com/acorn-dynamic-import/-/acorn-dynamic-import-2.0.2.tgz#c752bd210bef679501b6c6cb7fc84f8f47158cc4"
+ dependencies:
+ acorn "^4.0.3"
+
+acorn-jsx@^3.0.0:
+ version "3.0.1"
+ resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-3.0.1.tgz#afdf9488fb1ecefc8348f6fb22f464e32a58b36b"
+ dependencies:
+ acorn "^3.0.4"
+
+acorn@^3.0.4:
+ version "3.3.0"
+ resolved "https://registry.yarnpkg.com/acorn/-/acorn-3.3.0.tgz#45e37fb39e8da3f25baee3ff5369e2bb5f22017a"
+
+acorn@^4.0.3:
+ version "4.0.13"
+ resolved "https://registry.yarnpkg.com/acorn/-/acorn-4.0.13.tgz#105495ae5361d697bd195c825192e1ad7f253787"
+
+acorn@^5.0.0, acorn@^5.4.0:
+ version "5.5.0"
+ resolved "https://registry.yarnpkg.com/acorn/-/acorn-5.5.0.tgz#1abb587fbf051f94e3de20e6b26ef910b1828298"
+
+ajv-keywords@^2.1.0:
+ version "2.1.1"
+ resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-2.1.1.tgz#617997fc5f60576894c435f940d819e135b80762"
+
+ajv-keywords@^3.0.0, ajv-keywords@^3.1.0:
+ version "3.1.0"
+ resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-3.1.0.tgz#ac2b27939c543e95d2c06e7f7f5c27be4aa543be"
+
+ajv@^4.9.1:
+ version "4.11.8"
+ resolved "https://registry.yarnpkg.com/ajv/-/ajv-4.11.8.tgz#82ffb02b29e662ae53bdc20af15947706739c536"
+ dependencies:
+ co "^4.6.0"
+ json-stable-stringify "^1.0.1"
+
+ajv@^5.0.0, ajv@^5.2.3, ajv@^5.3.0:
+ version "5.5.2"
+ resolved "https://registry.yarnpkg.com/ajv/-/ajv-5.5.2.tgz#73b5eeca3fab653e3d3f9422b341ad42205dc965"
+ dependencies:
+ co "^4.6.0"
+ fast-deep-equal "^1.0.0"
+ fast-json-stable-stringify "^2.0.0"
+ json-schema-traverse "^0.3.0"
+
+ajv@^6.0.1, ajv@^6.1.0:
+ version "6.2.0"
+ resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.2.0.tgz#afac295bbaa0152449e522742e4547c1ae9328d2"
+ dependencies:
+ fast-deep-equal "^1.0.0"
+ fast-json-stable-stringify "^2.0.0"
+ json-schema-traverse "^0.3.0"
+
+align-text@^0.1.1, align-text@^0.1.3:
+ version "0.1.4"
+ resolved "https://registry.yarnpkg.com/align-text/-/align-text-0.1.4.tgz#0cd90a561093f35d0a99256c22b7069433fad117"
+ dependencies:
+ kind-of "^3.0.2"
+ longest "^1.0.1"
+ repeat-string "^1.5.2"
+
+alphanum-sort@^1.0.1, alphanum-sort@^1.0.2:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/alphanum-sort/-/alphanum-sort-1.0.2.tgz#97a1119649b211ad33691d9f9f486a8ec9fbe0a3"
+
+ansi-escapes@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-3.0.0.tgz#ec3e8b4e9f8064fc02c3ac9b65f1c275bda8ef92"
+
+ansi-html@0.0.7:
+ version "0.0.7"
+ resolved "https://registry.yarnpkg.com/ansi-html/-/ansi-html-0.0.7.tgz#813584021962a9e9e6fd039f940d12f56ca7859e"
+
+ansi-regex@^2.0.0:
+ version "2.1.1"
+ resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-2.1.1.tgz#c3b33ab5ee360d86e0e628f0468ae7ef27d654df"
+
+ansi-regex@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-3.0.0.tgz#ed0317c322064f79466c02966bddb605ab37d998"
+
+ansi-styles@^2.2.1:
+ version "2.2.1"
+ resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-2.2.1.tgz#b432dd3358b634cf75e1e4664368240533c1ddbe"
+
+ansi-styles@^3.2.1:
+ version "3.2.1"
+ resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d"
+ dependencies:
+ color-convert "^1.9.0"
+
+anymatch@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-2.0.0.tgz#bcb24b4f37934d9aa7ac17b4adaf89e7c76ef2eb"
+ dependencies:
+ micromatch "^3.1.4"
+ normalize-path "^2.1.1"
+
+aproba@^1.0.3:
+ version "1.2.0"
+ resolved "https://registry.yarnpkg.com/aproba/-/aproba-1.2.0.tgz#6802e6264efd18c790a1b0d517f0f2627bf2c94a"
+
+are-we-there-yet@~1.1.2:
+ version "1.1.4"
+ resolved "https://registry.yarnpkg.com/are-we-there-yet/-/are-we-there-yet-1.1.4.tgz#bb5dca382bb94f05e15194373d16fd3ba1ca110d"
+ dependencies:
+ delegates "^1.0.0"
+ readable-stream "^2.0.6"
+
+argparse@^1.0.7:
+ version "1.0.10"
+ resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911"
+ dependencies:
+ sprintf-js "~1.0.2"
+
+aria-query@^0.7.0:
+ version "0.7.1"
+ resolved "https://registry.yarnpkg.com/aria-query/-/aria-query-0.7.1.tgz#26cbb5aff64144b0a825be1846e0b16cfa00b11e"
+ dependencies:
+ ast-types-flow "0.0.7"
+ commander "^2.11.0"
+
+arr-diff@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/arr-diff/-/arr-diff-2.0.0.tgz#8f3b827f955a8bd669697e4a4256ac3ceae356cf"
+ dependencies:
+ arr-flatten "^1.0.1"
+
+arr-diff@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/arr-diff/-/arr-diff-4.0.0.tgz#d6461074febfec71e7e15235761a329a5dc7c520"
+
+arr-flatten@^1.0.1, arr-flatten@^1.1.0:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/arr-flatten/-/arr-flatten-1.1.0.tgz#36048bbff4e7b47e136644316c99669ea5ae91f1"
+
+arr-union@^3.1.0:
+ version "3.1.0"
+ resolved "https://registry.yarnpkg.com/arr-union/-/arr-union-3.1.0.tgz#e39b09aea9def866a8f206e288af63919bae39c4"
+
+array-find-index@^1.0.1:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/array-find-index/-/array-find-index-1.0.2.tgz#df010aa1287e164bbda6f9723b0a96a1ec4187a1"
+
+array-flatten@1.1.1:
+ version "1.1.1"
+ resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-1.1.1.tgz#9a5f699051b1e7073328f2a008968b64ea2955d2"
+
+array-flatten@^2.1.0:
+ version "2.1.1"
+ resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-2.1.1.tgz#426bb9da84090c1838d812c8150af20a8331e296"
+
+array-includes@^3.0.3:
+ version "3.0.3"
+ resolved "https://registry.yarnpkg.com/array-includes/-/array-includes-3.0.3.tgz#184b48f62d92d7452bb31b323165c7f8bd02266d"
+ dependencies:
+ define-properties "^1.1.2"
+ es-abstract "^1.7.0"
+
+array-union@^1.0.1:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/array-union/-/array-union-1.0.2.tgz#9a34410e4f4e3da23dea375be5be70f24778ec39"
+ dependencies:
+ array-uniq "^1.0.1"
+
+array-uniq@^1.0.1:
+ version "1.0.3"
+ resolved "https://registry.yarnpkg.com/array-uniq/-/array-uniq-1.0.3.tgz#af6ac877a25cc7f74e058894753858dfdb24fdb6"
+
+array-unique@^0.2.1:
+ version "0.2.1"
+ resolved "https://registry.yarnpkg.com/array-unique/-/array-unique-0.2.1.tgz#a1d97ccafcbc2625cc70fadceb36a50c58b01a53"
+
+array-unique@^0.3.2:
+ version "0.3.2"
+ resolved "https://registry.yarnpkg.com/array-unique/-/array-unique-0.3.2.tgz#a894b75d4bc4f6cd679ef3244a9fd8f46ae2d428"
+
+arrify@^1.0.0:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/arrify/-/arrify-1.0.1.tgz#898508da2226f380df904728456849c1501a4b0d"
+
+asap@~2.0.3:
+ version "2.0.6"
+ resolved "https://registry.yarnpkg.com/asap/-/asap-2.0.6.tgz#e50347611d7e690943208bbdafebcbc2fb866d46"
+
+asn1.js@^4.0.0:
+ version "4.10.1"
+ resolved "https://registry.yarnpkg.com/asn1.js/-/asn1.js-4.10.1.tgz#b9c2bf5805f1e64aadeed6df3a2bfafb5a73f5a0"
+ dependencies:
+ bn.js "^4.0.0"
+ inherits "^2.0.1"
+ minimalistic-assert "^1.0.0"
+
+asn1@~0.2.3:
+ version "0.2.3"
+ resolved "https://registry.yarnpkg.com/asn1/-/asn1-0.2.3.tgz#dac8787713c9966849fc8180777ebe9c1ddf3b86"
+
+assert-plus@1.0.0, assert-plus@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/assert-plus/-/assert-plus-1.0.0.tgz#f12e0f3c5d77b0b1cdd9146942e4e96c1e4dd525"
+
+assert-plus@^0.2.0:
+ version "0.2.0"
+ resolved "https://registry.yarnpkg.com/assert-plus/-/assert-plus-0.2.0.tgz#d74e1b87e7affc0db8aadb7021f3fe48101ab234"
+
+assert@^1.1.1:
+ version "1.4.1"
+ resolved "https://registry.yarnpkg.com/assert/-/assert-1.4.1.tgz#99912d591836b5a6f5b345c0f07eefc08fc65d91"
+ dependencies:
+ util "0.10.3"
+
+assign-symbols@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/assign-symbols/-/assign-symbols-1.0.0.tgz#59667f41fadd4f20ccbc2bb96b8d4f7f78ec0367"
+
+ast-types-flow@0.0.7:
+ version "0.0.7"
+ resolved "https://registry.yarnpkg.com/ast-types-flow/-/ast-types-flow-0.0.7.tgz#f70b735c6bca1a5c9c22d982c3e39e7feba3bdad"
+
+async-each@^1.0.0:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/async-each/-/async-each-1.0.1.tgz#19d386a1d9edc6e7c1c85d388aedbcc56d33602d"
+
+async@^1.5.2:
+ version "1.5.2"
+ resolved "https://registry.yarnpkg.com/async/-/async-1.5.2.tgz#ec6a61ae56480c0c3cb241c95618e20892f9672a"
+
+async@^2.1.2:
+ version "2.6.0"
+ resolved "https://registry.yarnpkg.com/async/-/async-2.6.0.tgz#61a29abb6fcc026fea77e56d1c6ec53a795951f4"
+ dependencies:
+ lodash "^4.14.0"
+
+asynckit@^0.4.0:
+ version "0.4.0"
+ resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79"
+
+atob@^2.0.0:
+ version "2.0.3"
+ resolved "https://registry.yarnpkg.com/atob/-/atob-2.0.3.tgz#19c7a760473774468f20b2d2d03372ad7d4cbf5d"
+
+autoprefixer@^6.3.1:
+ version "6.7.7"
+ resolved "https://registry.yarnpkg.com/autoprefixer/-/autoprefixer-6.7.7.tgz#1dbd1c835658e35ce3f9984099db00585c782014"
+ dependencies:
+ browserslist "^1.7.6"
+ caniuse-db "^1.0.30000634"
+ normalize-range "^0.1.2"
+ num2fraction "^1.2.2"
+ postcss "^5.2.16"
+ postcss-value-parser "^3.2.3"
+
+aws-sign2@~0.6.0:
+ version "0.6.0"
+ resolved "https://registry.yarnpkg.com/aws-sign2/-/aws-sign2-0.6.0.tgz#14342dd38dbcc94d0e5b87d763cd63612c0e794f"
+
+aws4@^1.2.1:
+ version "1.6.0"
+ resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.6.0.tgz#83ef5ca860b2b32e4a0deedee8c771b9db57471e"
+
+axobject-query@^0.1.0:
+ version "0.1.0"
+ resolved "https://registry.yarnpkg.com/axobject-query/-/axobject-query-0.1.0.tgz#62f59dbc59c9f9242759ca349960e7a2fe3c36c0"
+ dependencies:
+ ast-types-flow "0.0.7"
+
+babel-code-frame@^6.22.0, babel-code-frame@^6.26.0:
+ version "6.26.0"
+ resolved "https://registry.yarnpkg.com/babel-code-frame/-/babel-code-frame-6.26.0.tgz#63fd43f7dc1e3bb7ce35947db8fe369a3f58c74b"
+ dependencies:
+ chalk "^1.1.3"
+ esutils "^2.0.2"
+ js-tokens "^3.0.2"
+
+babel-core@^6.26.0:
+ version "6.26.0"
+ resolved "https://registry.yarnpkg.com/babel-core/-/babel-core-6.26.0.tgz#af32f78b31a6fcef119c87b0fd8d9753f03a0bb8"
+ dependencies:
+ babel-code-frame "^6.26.0"
+ babel-generator "^6.26.0"
+ babel-helpers "^6.24.1"
+ babel-messages "^6.23.0"
+ babel-register "^6.26.0"
+ babel-runtime "^6.26.0"
+ babel-template "^6.26.0"
+ babel-traverse "^6.26.0"
+ babel-types "^6.26.0"
+ babylon "^6.18.0"
+ convert-source-map "^1.5.0"
+ debug "^2.6.8"
+ json5 "^0.5.1"
+ lodash "^4.17.4"
+ minimatch "^3.0.4"
+ path-is-absolute "^1.0.1"
+ private "^0.1.7"
+ slash "^1.0.0"
+ source-map "^0.5.6"
+
+babel-eslint@^8.2.1:
+ version "8.2.2"
+ resolved "https://registry.yarnpkg.com/babel-eslint/-/babel-eslint-8.2.2.tgz#1102273354c6f0b29b4ea28a65f97d122296b68b"
+ dependencies:
+ "@babel/code-frame" "^7.0.0-beta.40"
+ "@babel/traverse" "^7.0.0-beta.40"
+ "@babel/types" "^7.0.0-beta.40"
+ babylon "^7.0.0-beta.40"
+ eslint-scope "~3.7.1"
+ eslint-visitor-keys "^1.0.0"
+
+babel-generator@^6.26.0:
+ version "6.26.1"
+ resolved "https://registry.yarnpkg.com/babel-generator/-/babel-generator-6.26.1.tgz#1844408d3b8f0d35a404ea7ac180f087a601bd90"
+ dependencies:
+ babel-messages "^6.23.0"
+ babel-runtime "^6.26.0"
+ babel-types "^6.26.0"
+ detect-indent "^4.0.0"
+ jsesc "^1.3.0"
+ lodash "^4.17.4"
+ source-map "^0.5.7"
+ trim-right "^1.0.1"
+
+babel-helper-bindify-decorators@^6.24.1:
+ version "6.24.1"
+ resolved "https://registry.yarnpkg.com/babel-helper-bindify-decorators/-/babel-helper-bindify-decorators-6.24.1.tgz#14c19e5f142d7b47f19a52431e52b1ccbc40a330"
+ dependencies:
+ babel-runtime "^6.22.0"
+ babel-traverse "^6.24.1"
+ babel-types "^6.24.1"
+
+babel-helper-builder-binary-assignment-operator-visitor@^6.24.1:
+ version "6.24.1"
+ resolved "https://registry.yarnpkg.com/babel-helper-builder-binary-assignment-operator-visitor/-/babel-helper-builder-binary-assignment-operator-visitor-6.24.1.tgz#cce4517ada356f4220bcae8a02c2b346f9a56664"
+ dependencies:
+ babel-helper-explode-assignable-expression "^6.24.1"
+ babel-runtime "^6.22.0"
+ babel-types "^6.24.1"
+
+babel-helper-builder-react-jsx@^6.24.1:
+ version "6.26.0"
+ resolved "https://registry.yarnpkg.com/babel-helper-builder-react-jsx/-/babel-helper-builder-react-jsx-6.26.0.tgz#39ff8313b75c8b65dceff1f31d383e0ff2a408a0"
+ dependencies:
+ babel-runtime "^6.26.0"
+ babel-types "^6.26.0"
+ esutils "^2.0.2"
+
+babel-helper-call-delegate@^6.24.1:
+ version "6.24.1"
+ resolved "https://registry.yarnpkg.com/babel-helper-call-delegate/-/babel-helper-call-delegate-6.24.1.tgz#ece6aacddc76e41c3461f88bfc575bd0daa2df8d"
+ dependencies:
+ babel-helper-hoist-variables "^6.24.1"
+ babel-runtime "^6.22.0"
+ babel-traverse "^6.24.1"
+ babel-types "^6.24.1"
+
+babel-helper-define-map@^6.24.1:
+ version "6.26.0"
+ resolved "https://registry.yarnpkg.com/babel-helper-define-map/-/babel-helper-define-map-6.26.0.tgz#a5f56dab41a25f97ecb498c7ebaca9819f95be5f"
+ dependencies:
+ babel-helper-function-name "^6.24.1"
+ babel-runtime "^6.26.0"
+ babel-types "^6.26.0"
+ lodash "^4.17.4"
+
+babel-helper-explode-assignable-expression@^6.24.1:
+ version "6.24.1"
+ resolved "https://registry.yarnpkg.com/babel-helper-explode-assignable-expression/-/babel-helper-explode-assignable-expression-6.24.1.tgz#f25b82cf7dc10433c55f70592d5746400ac22caa"
+ dependencies:
+ babel-runtime "^6.22.0"
+ babel-traverse "^6.24.1"
+ babel-types "^6.24.1"
+
+babel-helper-explode-class@^6.24.1:
+ version "6.24.1"
+ resolved "https://registry.yarnpkg.com/babel-helper-explode-class/-/babel-helper-explode-class-6.24.1.tgz#7dc2a3910dee007056e1e31d640ced3d54eaa9eb"
+ dependencies:
+ babel-helper-bindify-decorators "^6.24.1"
+ babel-runtime "^6.22.0"
+ babel-traverse "^6.24.1"
+ babel-types "^6.24.1"
+
+babel-helper-function-name@^6.24.1:
+ version "6.24.1"
+ resolved "https://registry.yarnpkg.com/babel-helper-function-name/-/babel-helper-function-name-6.24.1.tgz#d3475b8c03ed98242a25b48351ab18399d3580a9"
+ dependencies:
+ babel-helper-get-function-arity "^6.24.1"
+ babel-runtime "^6.22.0"
+ babel-template "^6.24.1"
+ babel-traverse "^6.24.1"
+ babel-types "^6.24.1"
+
+babel-helper-get-function-arity@^6.24.1:
+ version "6.24.1"
+ resolved "https://registry.yarnpkg.com/babel-helper-get-function-arity/-/babel-helper-get-function-arity-6.24.1.tgz#8f7782aa93407c41d3aa50908f89b031b1b6853d"
+ dependencies:
+ babel-runtime "^6.22.0"
+ babel-types "^6.24.1"
+
+babel-helper-hoist-variables@^6.24.1:
+ version "6.24.1"
+ resolved "https://registry.yarnpkg.com/babel-helper-hoist-variables/-/babel-helper-hoist-variables-6.24.1.tgz#1ecb27689c9d25513eadbc9914a73f5408be7a76"
+ dependencies:
+ babel-runtime "^6.22.0"
+ babel-types "^6.24.1"
+
+babel-helper-optimise-call-expression@^6.24.1:
+ version "6.24.1"
+ resolved "https://registry.yarnpkg.com/babel-helper-optimise-call-expression/-/babel-helper-optimise-call-expression-6.24.1.tgz#f7a13427ba9f73f8f4fa993c54a97882d1244257"
+ dependencies:
+ babel-runtime "^6.22.0"
+ babel-types "^6.24.1"
+
+babel-helper-regex@^6.24.1:
+ version "6.26.0"
+ resolved "https://registry.yarnpkg.com/babel-helper-regex/-/babel-helper-regex-6.26.0.tgz#325c59f902f82f24b74faceed0363954f6495e72"
+ dependencies:
+ babel-runtime "^6.26.0"
+ babel-types "^6.26.0"
+ lodash "^4.17.4"
+
+babel-helper-remap-async-to-generator@^6.24.1:
+ version "6.24.1"
+ resolved "https://registry.yarnpkg.com/babel-helper-remap-async-to-generator/-/babel-helper-remap-async-to-generator-6.24.1.tgz#5ec581827ad723fecdd381f1c928390676e4551b"
+ dependencies:
+ babel-helper-function-name "^6.24.1"
+ babel-runtime "^6.22.0"
+ babel-template "^6.24.1"
+ babel-traverse "^6.24.1"
+ babel-types "^6.24.1"
+
+babel-helper-replace-supers@^6.24.1:
+ version "6.24.1"
+ resolved "https://registry.yarnpkg.com/babel-helper-replace-supers/-/babel-helper-replace-supers-6.24.1.tgz#bf6dbfe43938d17369a213ca8a8bf74b6a90ab1a"
+ dependencies:
+ babel-helper-optimise-call-expression "^6.24.1"
+ babel-messages "^6.23.0"
+ babel-runtime "^6.22.0"
+ babel-template "^6.24.1"
+ babel-traverse "^6.24.1"
+ babel-types "^6.24.1"
+
+babel-helpers@^6.24.1:
+ version "6.24.1"
+ resolved "https://registry.yarnpkg.com/babel-helpers/-/babel-helpers-6.24.1.tgz#3471de9caec388e5c850e597e58a26ddf37602b2"
+ dependencies:
+ babel-runtime "^6.22.0"
+ babel-template "^6.24.1"
+
+babel-loader@^7.1.2:
+ version "7.1.3"
+ resolved "https://registry.yarnpkg.com/babel-loader/-/babel-loader-7.1.3.tgz#ff5b440da716e9153abb946251a9ab7670037b16"
+ dependencies:
+ find-cache-dir "^1.0.0"
+ loader-utils "^1.0.2"
+ mkdirp "^0.5.1"
+
+babel-messages@^6.23.0:
+ version "6.23.0"
+ resolved "https://registry.yarnpkg.com/babel-messages/-/babel-messages-6.23.0.tgz#f3cdf4703858035b2a2951c6ec5edf6c62f2630e"
+ dependencies:
+ babel-runtime "^6.22.0"
+
+babel-plugin-check-es2015-constants@^6.22.0:
+ version "6.22.0"
+ resolved "https://registry.yarnpkg.com/babel-plugin-check-es2015-constants/-/babel-plugin-check-es2015-constants-6.22.0.tgz#35157b101426fd2ffd3da3f75c7d1e91835bbf8a"
+ dependencies:
+ babel-runtime "^6.22.0"
+
+babel-plugin-syntax-async-functions@^6.8.0:
+ version "6.13.0"
+ resolved "https://registry.yarnpkg.com/babel-plugin-syntax-async-functions/-/babel-plugin-syntax-async-functions-6.13.0.tgz#cad9cad1191b5ad634bf30ae0872391e0647be95"
+
+babel-plugin-syntax-async-generators@^6.5.0:
+ version "6.13.0"
+ resolved "https://registry.yarnpkg.com/babel-plugin-syntax-async-generators/-/babel-plugin-syntax-async-generators-6.13.0.tgz#6bc963ebb16eccbae6b92b596eb7f35c342a8b9a"
+
+babel-plugin-syntax-class-constructor-call@^6.18.0:
+ version "6.18.0"
+ resolved "https://registry.yarnpkg.com/babel-plugin-syntax-class-constructor-call/-/babel-plugin-syntax-class-constructor-call-6.18.0.tgz#9cb9d39fe43c8600bec8146456ddcbd4e1a76416"
+
+babel-plugin-syntax-class-properties@^6.8.0:
+ version "6.13.0"
+ resolved "https://registry.yarnpkg.com/babel-plugin-syntax-class-properties/-/babel-plugin-syntax-class-properties-6.13.0.tgz#d7eb23b79a317f8543962c505b827c7d6cac27de"
+
+babel-plugin-syntax-decorators@^6.1.18, babel-plugin-syntax-decorators@^6.13.0:
+ version "6.13.0"
+ resolved "https://registry.yarnpkg.com/babel-plugin-syntax-decorators/-/babel-plugin-syntax-decorators-6.13.0.tgz#312563b4dbde3cc806cee3e416cceeaddd11ac0b"
+
+babel-plugin-syntax-do-expressions@^6.8.0:
+ version "6.13.0"
+ resolved "https://registry.yarnpkg.com/babel-plugin-syntax-do-expressions/-/babel-plugin-syntax-do-expressions-6.13.0.tgz#5747756139aa26d390d09410b03744ba07e4796d"
+
+babel-plugin-syntax-dynamic-import@^6.18.0:
+ version "6.18.0"
+ resolved "https://registry.yarnpkg.com/babel-plugin-syntax-dynamic-import/-/babel-plugin-syntax-dynamic-import-6.18.0.tgz#8d6a26229c83745a9982a441051572caa179b1da"
+
+babel-plugin-syntax-exponentiation-operator@^6.8.0:
+ version "6.13.0"
+ resolved "https://registry.yarnpkg.com/babel-plugin-syntax-exponentiation-operator/-/babel-plugin-syntax-exponentiation-operator-6.13.0.tgz#9ee7e8337290da95288201a6a57f4170317830de"
+
+babel-plugin-syntax-export-extensions@^6.8.0:
+ version "6.13.0"
+ resolved "https://registry.yarnpkg.com/babel-plugin-syntax-export-extensions/-/babel-plugin-syntax-export-extensions-6.13.0.tgz#70a1484f0f9089a4e84ad44bac353c95b9b12721"
+
+babel-plugin-syntax-flow@^6.18.0:
+ version "6.18.0"
+ resolved "https://registry.yarnpkg.com/babel-plugin-syntax-flow/-/babel-plugin-syntax-flow-6.18.0.tgz#4c3ab20a2af26aa20cd25995c398c4eb70310c8d"
+
+babel-plugin-syntax-function-bind@^6.8.0:
+ version "6.13.0"
+ resolved "https://registry.yarnpkg.com/babel-plugin-syntax-function-bind/-/babel-plugin-syntax-function-bind-6.13.0.tgz#48c495f177bdf31a981e732f55adc0bdd2601f46"
+
+babel-plugin-syntax-jsx@^6.3.13, babel-plugin-syntax-jsx@^6.8.0:
+ version "6.18.0"
+ resolved "https://registry.yarnpkg.com/babel-plugin-syntax-jsx/-/babel-plugin-syntax-jsx-6.18.0.tgz#0af32a9a6e13ca7a3fd5069e62d7b0f58d0d8946"
+
+babel-plugin-syntax-object-rest-spread@^6.8.0:
+ version "6.13.0"
+ resolved "https://registry.yarnpkg.com/babel-plugin-syntax-object-rest-spread/-/babel-plugin-syntax-object-rest-spread-6.13.0.tgz#fd6536f2bce13836ffa3a5458c4903a597bb3bf5"
+
+babel-plugin-syntax-trailing-function-commas@^6.22.0:
+ version "6.22.0"
+ resolved "https://registry.yarnpkg.com/babel-plugin-syntax-trailing-function-commas/-/babel-plugin-syntax-trailing-function-commas-6.22.0.tgz#ba0360937f8d06e40180a43fe0d5616fff532cf3"
+
+babel-plugin-transform-async-generator-functions@^6.24.1:
+ version "6.24.1"
+ resolved "https://registry.yarnpkg.com/babel-plugin-transform-async-generator-functions/-/babel-plugin-transform-async-generator-functions-6.24.1.tgz#f058900145fd3e9907a6ddf28da59f215258a5db"
+ dependencies:
+ babel-helper-remap-async-to-generator "^6.24.1"
+ babel-plugin-syntax-async-generators "^6.5.0"
+ babel-runtime "^6.22.0"
+
+babel-plugin-transform-async-to-generator@^6.22.0, babel-plugin-transform-async-to-generator@^6.24.1:
+ version "6.24.1"
+ resolved "https://registry.yarnpkg.com/babel-plugin-transform-async-to-generator/-/babel-plugin-transform-async-to-generator-6.24.1.tgz#6536e378aff6cb1d5517ac0e40eb3e9fc8d08761"
+ dependencies:
+ babel-helper-remap-async-to-generator "^6.24.1"
+ babel-plugin-syntax-async-functions "^6.8.0"
+ babel-runtime "^6.22.0"
+
+babel-plugin-transform-class-constructor-call@^6.24.1:
+ version "6.24.1"
+ resolved "https://registry.yarnpkg.com/babel-plugin-transform-class-constructor-call/-/babel-plugin-transform-class-constructor-call-6.24.1.tgz#80dc285505ac067dcb8d6c65e2f6f11ab7765ef9"
+ dependencies:
+ babel-plugin-syntax-class-constructor-call "^6.18.0"
+ babel-runtime "^6.22.0"
+ babel-template "^6.24.1"
+
+babel-plugin-transform-class-properties@^6.24.1:
+ version "6.24.1"
+ resolved "https://registry.yarnpkg.com/babel-plugin-transform-class-properties/-/babel-plugin-transform-class-properties-6.24.1.tgz#6a79763ea61d33d36f37b611aa9def81a81b46ac"
+ dependencies:
+ babel-helper-function-name "^6.24.1"
+ babel-plugin-syntax-class-properties "^6.8.0"
+ babel-runtime "^6.22.0"
+ babel-template "^6.24.1"
+
+babel-plugin-transform-decorators-legacy@^1.3.4:
+ version "1.3.4"
+ resolved "https://registry.yarnpkg.com/babel-plugin-transform-decorators-legacy/-/babel-plugin-transform-decorators-legacy-1.3.4.tgz#741b58f6c5bce9e6027e0882d9c994f04f366925"
+ dependencies:
+ babel-plugin-syntax-decorators "^6.1.18"
+ babel-runtime "^6.2.0"
+ babel-template "^6.3.0"
+
+babel-plugin-transform-decorators@^6.24.1:
+ version "6.24.1"
+ resolved "https://registry.yarnpkg.com/babel-plugin-transform-decorators/-/babel-plugin-transform-decorators-6.24.1.tgz#788013d8f8c6b5222bdf7b344390dfd77569e24d"
+ dependencies:
+ babel-helper-explode-class "^6.24.1"
+ babel-plugin-syntax-decorators "^6.13.0"
+ babel-runtime "^6.22.0"
+ babel-template "^6.24.1"
+ babel-types "^6.24.1"
+
+babel-plugin-transform-do-expressions@^6.22.0:
+ version "6.22.0"
+ resolved "https://registry.yarnpkg.com/babel-plugin-transform-do-expressions/-/babel-plugin-transform-do-expressions-6.22.0.tgz#28ccaf92812d949c2cd1281f690c8fdc468ae9bb"
+ dependencies:
+ babel-plugin-syntax-do-expressions "^6.8.0"
+ babel-runtime "^6.22.0"
+
+babel-plugin-transform-es2015-arrow-functions@^6.22.0:
+ version "6.22.0"
+ resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-arrow-functions/-/babel-plugin-transform-es2015-arrow-functions-6.22.0.tgz#452692cb711d5f79dc7f85e440ce41b9f244d221"
+ dependencies:
+ babel-runtime "^6.22.0"
+
+babel-plugin-transform-es2015-block-scoped-functions@^6.22.0:
+ version "6.22.0"
+ resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-block-scoped-functions/-/babel-plugin-transform-es2015-block-scoped-functions-6.22.0.tgz#bbc51b49f964d70cb8d8e0b94e820246ce3a6141"
+ dependencies:
+ babel-runtime "^6.22.0"
+
+babel-plugin-transform-es2015-block-scoping@^6.23.0:
+ version "6.26.0"
+ resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-block-scoping/-/babel-plugin-transform-es2015-block-scoping-6.26.0.tgz#d70f5299c1308d05c12f463813b0a09e73b1895f"
+ dependencies:
+ babel-runtime "^6.26.0"
+ babel-template "^6.26.0"
+ babel-traverse "^6.26.0"
+ babel-types "^6.26.0"
+ lodash "^4.17.4"
+
+babel-plugin-transform-es2015-classes@^6.23.0:
+ version "6.24.1"
+ resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-classes/-/babel-plugin-transform-es2015-classes-6.24.1.tgz#5a4c58a50c9c9461e564b4b2a3bfabc97a2584db"
+ dependencies:
+ babel-helper-define-map "^6.24.1"
+ babel-helper-function-name "^6.24.1"
+ babel-helper-optimise-call-expression "^6.24.1"
+ babel-helper-replace-supers "^6.24.1"
+ babel-messages "^6.23.0"
+ babel-runtime "^6.22.0"
+ babel-template "^6.24.1"
+ babel-traverse "^6.24.1"
+ babel-types "^6.24.1"
+
+babel-plugin-transform-es2015-computed-properties@^6.22.0:
+ version "6.24.1"
+ resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-computed-properties/-/babel-plugin-transform-es2015-computed-properties-6.24.1.tgz#6fe2a8d16895d5634f4cd999b6d3480a308159b3"
+ dependencies:
+ babel-runtime "^6.22.0"
+ babel-template "^6.24.1"
+
+babel-plugin-transform-es2015-destructuring@^6.23.0:
+ version "6.23.0"
+ resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-destructuring/-/babel-plugin-transform-es2015-destructuring-6.23.0.tgz#997bb1f1ab967f682d2b0876fe358d60e765c56d"
+ dependencies:
+ babel-runtime "^6.22.0"
+
+babel-plugin-transform-es2015-duplicate-keys@^6.22.0:
+ version "6.24.1"
+ resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-duplicate-keys/-/babel-plugin-transform-es2015-duplicate-keys-6.24.1.tgz#73eb3d310ca969e3ef9ec91c53741a6f1576423e"
+ dependencies:
+ babel-runtime "^6.22.0"
+ babel-types "^6.24.1"
+
+babel-plugin-transform-es2015-for-of@^6.23.0:
+ version "6.23.0"
+ resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-for-of/-/babel-plugin-transform-es2015-for-of-6.23.0.tgz#f47c95b2b613df1d3ecc2fdb7573623c75248691"
+ dependencies:
+ babel-runtime "^6.22.0"
+
+babel-plugin-transform-es2015-function-name@^6.22.0:
+ version "6.24.1"
+ resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-function-name/-/babel-plugin-transform-es2015-function-name-6.24.1.tgz#834c89853bc36b1af0f3a4c5dbaa94fd8eacaa8b"
+ dependencies:
+ babel-helper-function-name "^6.24.1"
+ babel-runtime "^6.22.0"
+ babel-types "^6.24.1"
+
+babel-plugin-transform-es2015-literals@^6.22.0:
+ version "6.22.0"
+ resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-literals/-/babel-plugin-transform-es2015-literals-6.22.0.tgz#4f54a02d6cd66cf915280019a31d31925377ca2e"
+ dependencies:
+ babel-runtime "^6.22.0"
+
+babel-plugin-transform-es2015-modules-amd@^6.22.0, babel-plugin-transform-es2015-modules-amd@^6.24.1:
+ version "6.24.1"
+ resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-modules-amd/-/babel-plugin-transform-es2015-modules-amd-6.24.1.tgz#3b3e54017239842d6d19c3011c4bd2f00a00d154"
+ dependencies:
+ babel-plugin-transform-es2015-modules-commonjs "^6.24.1"
+ babel-runtime "^6.22.0"
+ babel-template "^6.24.1"
+
+babel-plugin-transform-es2015-modules-commonjs@^6.23.0, babel-plugin-transform-es2015-modules-commonjs@^6.24.1:
+ version "6.26.0"
+ resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-modules-commonjs/-/babel-plugin-transform-es2015-modules-commonjs-6.26.0.tgz#0d8394029b7dc6abe1a97ef181e00758dd2e5d8a"
+ dependencies:
+ babel-plugin-transform-strict-mode "^6.24.1"
+ babel-runtime "^6.26.0"
+ babel-template "^6.26.0"
+ babel-types "^6.26.0"
+
+babel-plugin-transform-es2015-modules-systemjs@^6.23.0:
+ version "6.24.1"
+ resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-modules-systemjs/-/babel-plugin-transform-es2015-modules-systemjs-6.24.1.tgz#ff89a142b9119a906195f5f106ecf305d9407d23"
+ dependencies:
+ babel-helper-hoist-variables "^6.24.1"
+ babel-runtime "^6.22.0"
+ babel-template "^6.24.1"
+
+babel-plugin-transform-es2015-modules-umd@^6.23.0:
+ version "6.24.1"
+ resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-modules-umd/-/babel-plugin-transform-es2015-modules-umd-6.24.1.tgz#ac997e6285cd18ed6176adb607d602344ad38468"
+ dependencies:
+ babel-plugin-transform-es2015-modules-amd "^6.24.1"
+ babel-runtime "^6.22.0"
+ babel-template "^6.24.1"
+
+babel-plugin-transform-es2015-object-super@^6.22.0:
+ version "6.24.1"
+ resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-object-super/-/babel-plugin-transform-es2015-object-super-6.24.1.tgz#24cef69ae21cb83a7f8603dad021f572eb278f8d"
+ dependencies:
+ babel-helper-replace-supers "^6.24.1"
+ babel-runtime "^6.22.0"
+
+babel-plugin-transform-es2015-parameters@^6.23.0:
+ version "6.24.1"
+ resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-parameters/-/babel-plugin-transform-es2015-parameters-6.24.1.tgz#57ac351ab49caf14a97cd13b09f66fdf0a625f2b"
+ dependencies:
+ babel-helper-call-delegate "^6.24.1"
+ babel-helper-get-function-arity "^6.24.1"
+ babel-runtime "^6.22.0"
+ babel-template "^6.24.1"
+ babel-traverse "^6.24.1"
+ babel-types "^6.24.1"
+
+babel-plugin-transform-es2015-shorthand-properties@^6.22.0:
+ version "6.24.1"
+ resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-shorthand-properties/-/babel-plugin-transform-es2015-shorthand-properties-6.24.1.tgz#24f875d6721c87661bbd99a4622e51f14de38aa0"
+ dependencies:
+ babel-runtime "^6.22.0"
+ babel-types "^6.24.1"
+
+babel-plugin-transform-es2015-spread@^6.22.0:
+ version "6.22.0"
+ resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-spread/-/babel-plugin-transform-es2015-spread-6.22.0.tgz#d6d68a99f89aedc4536c81a542e8dd9f1746f8d1"
+ dependencies:
+ babel-runtime "^6.22.0"
+
+babel-plugin-transform-es2015-sticky-regex@^6.22.0:
+ version "6.24.1"
+ resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-sticky-regex/-/babel-plugin-transform-es2015-sticky-regex-6.24.1.tgz#00c1cdb1aca71112cdf0cf6126c2ed6b457ccdbc"
+ dependencies:
+ babel-helper-regex "^6.24.1"
+ babel-runtime "^6.22.0"
+ babel-types "^6.24.1"
+
+babel-plugin-transform-es2015-template-literals@^6.22.0:
+ version "6.22.0"
+ resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-template-literals/-/babel-plugin-transform-es2015-template-literals-6.22.0.tgz#a84b3450f7e9f8f1f6839d6d687da84bb1236d8d"
+ dependencies:
+ babel-runtime "^6.22.0"
+
+babel-plugin-transform-es2015-typeof-symbol@^6.23.0:
+ version "6.23.0"
+ resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-typeof-symbol/-/babel-plugin-transform-es2015-typeof-symbol-6.23.0.tgz#dec09f1cddff94b52ac73d505c84df59dcceb372"
+ dependencies:
+ babel-runtime "^6.22.0"
+
+babel-plugin-transform-es2015-unicode-regex@^6.22.0:
+ version "6.24.1"
+ resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-unicode-regex/-/babel-plugin-transform-es2015-unicode-regex-6.24.1.tgz#d38b12f42ea7323f729387f18a7c5ae1faeb35e9"
+ dependencies:
+ babel-helper-regex "^6.24.1"
+ babel-runtime "^6.22.0"
+ regexpu-core "^2.0.0"
+
+babel-plugin-transform-exponentiation-operator@^6.22.0, babel-plugin-transform-exponentiation-operator@^6.24.1:
+ version "6.24.1"
+ resolved "https://registry.yarnpkg.com/babel-plugin-transform-exponentiation-operator/-/babel-plugin-transform-exponentiation-operator-6.24.1.tgz#2ab0c9c7f3098fa48907772bb813fe41e8de3a0e"
+ dependencies:
+ babel-helper-builder-binary-assignment-operator-visitor "^6.24.1"
+ babel-plugin-syntax-exponentiation-operator "^6.8.0"
+ babel-runtime "^6.22.0"
+
+babel-plugin-transform-export-extensions@^6.22.0:
+ version "6.22.0"
+ resolved "https://registry.yarnpkg.com/babel-plugin-transform-export-extensions/-/babel-plugin-transform-export-extensions-6.22.0.tgz#53738b47e75e8218589eea946cbbd39109bbe653"
+ dependencies:
+ babel-plugin-syntax-export-extensions "^6.8.0"
+ babel-runtime "^6.22.0"
+
+babel-plugin-transform-flow-strip-types@^6.22.0:
+ version "6.22.0"
+ resolved "https://registry.yarnpkg.com/babel-plugin-transform-flow-strip-types/-/babel-plugin-transform-flow-strip-types-6.22.0.tgz#84cb672935d43714fdc32bce84568d87441cf7cf"
+ dependencies:
+ babel-plugin-syntax-flow "^6.18.0"
+ babel-runtime "^6.22.0"
+
+babel-plugin-transform-function-bind@^6.22.0:
+ version "6.22.0"
+ resolved "https://registry.yarnpkg.com/babel-plugin-transform-function-bind/-/babel-plugin-transform-function-bind-6.22.0.tgz#c6fb8e96ac296a310b8cf8ea401462407ddf6a97"
+ dependencies:
+ babel-plugin-syntax-function-bind "^6.8.0"
+ babel-runtime "^6.22.0"
+
+babel-plugin-transform-object-rest-spread@^6.22.0:
+ version "6.26.0"
+ resolved "https://registry.yarnpkg.com/babel-plugin-transform-object-rest-spread/-/babel-plugin-transform-object-rest-spread-6.26.0.tgz#0f36692d50fef6b7e2d4b3ac1478137a963b7b06"
+ dependencies:
+ babel-plugin-syntax-object-rest-spread "^6.8.0"
+ babel-runtime "^6.26.0"
+
+babel-plugin-transform-react-display-name@^6.23.0:
+ version "6.25.0"
+ resolved "https://registry.yarnpkg.com/babel-plugin-transform-react-display-name/-/babel-plugin-transform-react-display-name-6.25.0.tgz#67e2bf1f1e9c93ab08db96792e05392bf2cc28d1"
+ dependencies:
+ babel-runtime "^6.22.0"
+
+babel-plugin-transform-react-jsx-self@^6.22.0:
+ version "6.22.0"
+ resolved "https://registry.yarnpkg.com/babel-plugin-transform-react-jsx-self/-/babel-plugin-transform-react-jsx-self-6.22.0.tgz#df6d80a9da2612a121e6ddd7558bcbecf06e636e"
+ dependencies:
+ babel-plugin-syntax-jsx "^6.8.0"
+ babel-runtime "^6.22.0"
+
+babel-plugin-transform-react-jsx-source@^6.22.0:
+ version "6.22.0"
+ resolved "https://registry.yarnpkg.com/babel-plugin-transform-react-jsx-source/-/babel-plugin-transform-react-jsx-source-6.22.0.tgz#66ac12153f5cd2d17b3c19268f4bf0197f44ecd6"
+ dependencies:
+ babel-plugin-syntax-jsx "^6.8.0"
+ babel-runtime "^6.22.0"
+
+babel-plugin-transform-react-jsx@^6.24.1:
+ version "6.24.1"
+ resolved "https://registry.yarnpkg.com/babel-plugin-transform-react-jsx/-/babel-plugin-transform-react-jsx-6.24.1.tgz#840a028e7df460dfc3a2d29f0c0d91f6376e66a3"
+ dependencies:
+ babel-helper-builder-react-jsx "^6.24.1"
+ babel-plugin-syntax-jsx "^6.8.0"
+ babel-runtime "^6.22.0"
+
+babel-plugin-transform-regenerator@^6.22.0:
+ version "6.26.0"
+ resolved "https://registry.yarnpkg.com/babel-plugin-transform-regenerator/-/babel-plugin-transform-regenerator-6.26.0.tgz#e0703696fbde27f0a3efcacf8b4dca2f7b3a8f2f"
+ dependencies:
+ regenerator-transform "^0.10.0"
+
+babel-plugin-transform-runtime@^6.23.0:
+ version "6.23.0"
+ resolved "https://registry.yarnpkg.com/babel-plugin-transform-runtime/-/babel-plugin-transform-runtime-6.23.0.tgz#88490d446502ea9b8e7efb0fe09ec4d99479b1ee"
+ dependencies:
+ babel-runtime "^6.22.0"
+
+babel-plugin-transform-strict-mode@^6.24.1:
+ version "6.24.1"
+ resolved "https://registry.yarnpkg.com/babel-plugin-transform-strict-mode/-/babel-plugin-transform-strict-mode-6.24.1.tgz#d5faf7aa578a65bbe591cf5edae04a0c67020758"
+ dependencies:
+ babel-runtime "^6.22.0"
+ babel-types "^6.24.1"
+
+babel-polyfill@^6.26.0:
+ version "6.26.0"
+ resolved "https://registry.yarnpkg.com/babel-polyfill/-/babel-polyfill-6.26.0.tgz#379937abc67d7895970adc621f284cd966cf2153"
+ dependencies:
+ babel-runtime "^6.26.0"
+ core-js "^2.5.0"
+ regenerator-runtime "^0.10.5"
+
+babel-preset-env@^1.6.1:
+ version "1.6.1"
+ resolved "https://registry.yarnpkg.com/babel-preset-env/-/babel-preset-env-1.6.1.tgz#a18b564cc9b9afdf4aae57ae3c1b0d99188e6f48"
+ dependencies:
+ babel-plugin-check-es2015-constants "^6.22.0"
+ babel-plugin-syntax-trailing-function-commas "^6.22.0"
+ babel-plugin-transform-async-to-generator "^6.22.0"
+ babel-plugin-transform-es2015-arrow-functions "^6.22.0"
+ babel-plugin-transform-es2015-block-scoped-functions "^6.22.0"
+ babel-plugin-transform-es2015-block-scoping "^6.23.0"
+ babel-plugin-transform-es2015-classes "^6.23.0"
+ babel-plugin-transform-es2015-computed-properties "^6.22.0"
+ babel-plugin-transform-es2015-destructuring "^6.23.0"
+ babel-plugin-transform-es2015-duplicate-keys "^6.22.0"
+ babel-plugin-transform-es2015-for-of "^6.23.0"
+ babel-plugin-transform-es2015-function-name "^6.22.0"
+ babel-plugin-transform-es2015-literals "^6.22.0"
+ babel-plugin-transform-es2015-modules-amd "^6.22.0"
+ babel-plugin-transform-es2015-modules-commonjs "^6.23.0"
+ babel-plugin-transform-es2015-modules-systemjs "^6.23.0"
+ babel-plugin-transform-es2015-modules-umd "^6.23.0"
+ babel-plugin-transform-es2015-object-super "^6.22.0"
+ babel-plugin-transform-es2015-parameters "^6.23.0"
+ babel-plugin-transform-es2015-shorthand-properties "^6.22.0"
+ babel-plugin-transform-es2015-spread "^6.22.0"
+ babel-plugin-transform-es2015-sticky-regex "^6.22.0"
+ babel-plugin-transform-es2015-template-literals "^6.22.0"
+ babel-plugin-transform-es2015-typeof-symbol "^6.23.0"
+ babel-plugin-transform-es2015-unicode-regex "^6.22.0"
+ babel-plugin-transform-exponentiation-operator "^6.22.0"
+ babel-plugin-transform-regenerator "^6.22.0"
+ browserslist "^2.1.2"
+ invariant "^2.2.2"
+ semver "^5.3.0"
+
+babel-preset-flow@^6.23.0:
+ version "6.23.0"
+ resolved "https://registry.yarnpkg.com/babel-preset-flow/-/babel-preset-flow-6.23.0.tgz#e71218887085ae9a24b5be4169affb599816c49d"
+ dependencies:
+ babel-plugin-transform-flow-strip-types "^6.22.0"
+
+babel-preset-react@^6.24.1:
+ version "6.24.1"
+ resolved "https://registry.yarnpkg.com/babel-preset-react/-/babel-preset-react-6.24.1.tgz#ba69dfaea45fc3ec639b6a4ecea6e17702c91380"
+ dependencies:
+ babel-plugin-syntax-jsx "^6.3.13"
+ babel-plugin-transform-react-display-name "^6.23.0"
+ babel-plugin-transform-react-jsx "^6.24.1"
+ babel-plugin-transform-react-jsx-self "^6.22.0"
+ babel-plugin-transform-react-jsx-source "^6.22.0"
+ babel-preset-flow "^6.23.0"
+
+babel-preset-stage-0@^6.24.1:
+ version "6.24.1"
+ resolved "https://registry.yarnpkg.com/babel-preset-stage-0/-/babel-preset-stage-0-6.24.1.tgz#5642d15042f91384d7e5af8bc88b1db95b039e6a"
+ dependencies:
+ babel-plugin-transform-do-expressions "^6.22.0"
+ babel-plugin-transform-function-bind "^6.22.0"
+ babel-preset-stage-1 "^6.24.1"
+
+babel-preset-stage-1@^6.24.1:
+ version "6.24.1"
+ resolved "https://registry.yarnpkg.com/babel-preset-stage-1/-/babel-preset-stage-1-6.24.1.tgz#7692cd7dcd6849907e6ae4a0a85589cfb9e2bfb0"
+ dependencies:
+ babel-plugin-transform-class-constructor-call "^6.24.1"
+ babel-plugin-transform-export-extensions "^6.22.0"
+ babel-preset-stage-2 "^6.24.1"
+
+babel-preset-stage-2@^6.24.1:
+ version "6.24.1"
+ resolved "https://registry.yarnpkg.com/babel-preset-stage-2/-/babel-preset-stage-2-6.24.1.tgz#d9e2960fb3d71187f0e64eec62bc07767219bdc1"
+ dependencies:
+ babel-plugin-syntax-dynamic-import "^6.18.0"
+ babel-plugin-transform-class-properties "^6.24.1"
+ babel-plugin-transform-decorators "^6.24.1"
+ babel-preset-stage-3 "^6.24.1"
+
+babel-preset-stage-3@^6.24.1:
+ version "6.24.1"
+ resolved "https://registry.yarnpkg.com/babel-preset-stage-3/-/babel-preset-stage-3-6.24.1.tgz#836ada0a9e7a7fa37cb138fb9326f87934a48395"
+ dependencies:
+ babel-plugin-syntax-trailing-function-commas "^6.22.0"
+ babel-plugin-transform-async-generator-functions "^6.24.1"
+ babel-plugin-transform-async-to-generator "^6.24.1"
+ babel-plugin-transform-exponentiation-operator "^6.24.1"
+ babel-plugin-transform-object-rest-spread "^6.22.0"
+
+babel-register@^6.26.0:
+ version "6.26.0"
+ resolved "https://registry.yarnpkg.com/babel-register/-/babel-register-6.26.0.tgz#6ed021173e2fcb486d7acb45c6009a856f647071"
+ dependencies:
+ babel-core "^6.26.0"
+ babel-runtime "^6.26.0"
+ core-js "^2.5.0"
+ home-or-tmp "^2.0.0"
+ lodash "^4.17.4"
+ mkdirp "^0.5.1"
+ source-map-support "^0.4.15"
+
+babel-runtime@^6.18.0, babel-runtime@^6.2.0, babel-runtime@^6.22.0, babel-runtime@^6.26.0:
+ version "6.26.0"
+ resolved "https://registry.yarnpkg.com/babel-runtime/-/babel-runtime-6.26.0.tgz#965c7058668e82b55d7bfe04ff2337bc8b5647fe"
+ dependencies:
+ core-js "^2.4.0"
+ regenerator-runtime "^0.11.0"
+
+babel-template@^6.24.1, babel-template@^6.26.0, babel-template@^6.3.0:
+ version "6.26.0"
+ resolved "https://registry.yarnpkg.com/babel-template/-/babel-template-6.26.0.tgz#de03e2d16396b069f46dd9fff8521fb1a0e35e02"
+ dependencies:
+ babel-runtime "^6.26.0"
+ babel-traverse "^6.26.0"
+ babel-types "^6.26.0"
+ babylon "^6.18.0"
+ lodash "^4.17.4"
+
+babel-traverse@^6.24.1, babel-traverse@^6.26.0:
+ version "6.26.0"
+ resolved "https://registry.yarnpkg.com/babel-traverse/-/babel-traverse-6.26.0.tgz#46a9cbd7edcc62c8e5c064e2d2d8d0f4035766ee"
+ dependencies:
+ babel-code-frame "^6.26.0"
+ babel-messages "^6.23.0"
+ babel-runtime "^6.26.0"
+ babel-types "^6.26.0"
+ babylon "^6.18.0"
+ debug "^2.6.8"
+ globals "^9.18.0"
+ invariant "^2.2.2"
+ lodash "^4.17.4"
+
+babel-types@^6.19.0, babel-types@^6.24.1, babel-types@^6.26.0:
+ version "6.26.0"
+ resolved "https://registry.yarnpkg.com/babel-types/-/babel-types-6.26.0.tgz#a3b073f94ab49eb6fa55cd65227a334380632497"
+ dependencies:
+ babel-runtime "^6.26.0"
+ esutils "^2.0.2"
+ lodash "^4.17.4"
+ to-fast-properties "^1.0.3"
+
+babylon@7.0.0-beta.40, babylon@^7.0.0-beta.40:
+ version "7.0.0-beta.40"
+ resolved "https://registry.yarnpkg.com/babylon/-/babylon-7.0.0-beta.40.tgz#91fc8cd56d5eb98b28e6fde41045f2957779940a"
+
+babylon@^6.18.0:
+ version "6.18.0"
+ resolved "https://registry.yarnpkg.com/babylon/-/babylon-6.18.0.tgz#af2f3b88fa6f5c1e4c634d1a0f8eac4f55b395e3"
+
+balanced-match@^0.4.2:
+ version "0.4.2"
+ resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-0.4.2.tgz#cb3f3e3c732dc0f01ee70b403f302e61d7709838"
+
+balanced-match@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.0.tgz#89b4d199ab2bee49de164ea02b89ce462d71b767"
+
+base64-js@^1.0.2:
+ version "1.2.3"
+ resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.2.3.tgz#fb13668233d9614cf5fb4bce95a9ba4096cdf801"
+
+base@^0.11.1:
+ version "0.11.2"
+ resolved "https://registry.yarnpkg.com/base/-/base-0.11.2.tgz#7bde5ced145b6d551a90db87f83c558b4eb48a8f"
+ dependencies:
+ cache-base "^1.0.1"
+ class-utils "^0.3.5"
+ component-emitter "^1.2.1"
+ define-property "^1.0.0"
+ isobject "^3.0.1"
+ mixin-deep "^1.2.0"
+ pascalcase "^0.1.1"
+
+batch@0.6.1:
+ version "0.6.1"
+ resolved "https://registry.yarnpkg.com/batch/-/batch-0.6.1.tgz#dc34314f4e679318093fc760272525f94bf25c16"
+
+bcrypt-pbkdf@^1.0.0:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.1.tgz#63bc5dcb61331b92bc05fd528953c33462a06f8d"
+ dependencies:
+ tweetnacl "^0.14.3"
+
+big.js@^3.1.3:
+ version "3.2.0"
+ resolved "https://registry.yarnpkg.com/big.js/-/big.js-3.2.0.tgz#a5fc298b81b9e0dca2e458824784b65c52ba588e"
+
+binary-extensions@^1.0.0:
+ version "1.11.0"
+ resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-1.11.0.tgz#46aa1751fb6a2f93ee5e689bb1087d4b14c6c205"
+
+"binary@>= 0.3.0 < 1":
+ version "0.3.0"
+ resolved "https://registry.yarnpkg.com/binary/-/binary-0.3.0.tgz#9f60553bc5ce8c3386f3b553cff47462adecaa79"
+ dependencies:
+ buffers "~0.1.1"
+ chainsaw "~0.1.0"
+
+block-stream@*:
+ version "0.0.9"
+ resolved "https://registry.yarnpkg.com/block-stream/-/block-stream-0.0.9.tgz#13ebfe778a03205cfe03751481ebb4b3300c126a"
+ dependencies:
+ inherits "~2.0.0"
+
+bn.js@^4.0.0, bn.js@^4.1.0, bn.js@^4.1.1, bn.js@^4.4.0:
+ version "4.11.8"
+ resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-4.11.8.tgz#2cde09eb5ee341f484746bb0309b3253b1b1442f"
+
+body-parser@1.18.2:
+ version "1.18.2"
+ resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.18.2.tgz#87678a19d84b47d859b83199bd59bce222b10454"
+ dependencies:
+ bytes "3.0.0"
+ content-type "~1.0.4"
+ debug "2.6.9"
+ depd "~1.1.1"
+ http-errors "~1.6.2"
+ iconv-lite "0.4.19"
+ on-finished "~2.3.0"
+ qs "6.5.1"
+ raw-body "2.3.2"
+ type-is "~1.6.15"
+
+bonjour@^3.5.0:
+ version "3.5.0"
+ resolved "https://registry.yarnpkg.com/bonjour/-/bonjour-3.5.0.tgz#8e890a183d8ee9a2393b3844c691a42bcf7bc9f5"
+ dependencies:
+ array-flatten "^2.1.0"
+ deep-equal "^1.0.1"
+ dns-equal "^1.0.0"
+ dns-txt "^2.0.2"
+ multicast-dns "^6.0.1"
+ multicast-dns-service-types "^1.1.0"
+
+boom@2.x.x:
+ version "2.10.1"
+ resolved "https://registry.yarnpkg.com/boom/-/boom-2.10.1.tgz#39c8918ceff5799f83f9492a848f625add0c766f"
+ dependencies:
+ hoek "2.x.x"
+
+brace-expansion@^1.1.7:
+ version "1.1.11"
+ resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd"
+ dependencies:
+ balanced-match "^1.0.0"
+ concat-map "0.0.1"
+
+braces@^1.8.2:
+ version "1.8.5"
+ resolved "https://registry.yarnpkg.com/braces/-/braces-1.8.5.tgz#ba77962e12dff969d6b76711e914b737857bf6a7"
+ dependencies:
+ expand-range "^1.8.1"
+ preserve "^0.2.0"
+ repeat-element "^1.1.2"
+
+braces@^2.3.0, braces@^2.3.1:
+ version "2.3.1"
+ resolved "https://registry.yarnpkg.com/braces/-/braces-2.3.1.tgz#7086c913b4e5a08dbe37ac0ee6a2500c4ba691bb"
+ dependencies:
+ arr-flatten "^1.1.0"
+ array-unique "^0.3.2"
+ define-property "^1.0.0"
+ extend-shallow "^2.0.1"
+ fill-range "^4.0.0"
+ isobject "^3.0.1"
+ kind-of "^6.0.2"
+ repeat-element "^1.1.2"
+ snapdragon "^0.8.1"
+ snapdragon-node "^2.0.1"
+ split-string "^3.0.2"
+ to-regex "^3.0.1"
+
+brcast@^3.0.1:
+ version "3.0.1"
+ resolved "https://registry.yarnpkg.com/brcast/-/brcast-3.0.1.tgz#6256a8349b20de9eed44257a9b24d71493cd48dd"
+
+brorand@^1.0.1:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/brorand/-/brorand-1.1.0.tgz#12c25efe40a45e3c323eb8675a0a0ce57b22371f"
+
+browserify-aes@^1.0.0, browserify-aes@^1.0.4:
+ version "1.1.1"
+ resolved "https://registry.yarnpkg.com/browserify-aes/-/browserify-aes-1.1.1.tgz#38b7ab55edb806ff2dcda1a7f1620773a477c49f"
+ dependencies:
+ buffer-xor "^1.0.3"
+ cipher-base "^1.0.0"
+ create-hash "^1.1.0"
+ evp_bytestokey "^1.0.3"
+ inherits "^2.0.1"
+ safe-buffer "^5.0.1"
+
+browserify-cipher@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/browserify-cipher/-/browserify-cipher-1.0.0.tgz#9988244874bf5ed4e28da95666dcd66ac8fc363a"
+ dependencies:
+ browserify-aes "^1.0.4"
+ browserify-des "^1.0.0"
+ evp_bytestokey "^1.0.0"
+
+browserify-des@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/browserify-des/-/browserify-des-1.0.0.tgz#daa277717470922ed2fe18594118a175439721dd"
+ dependencies:
+ cipher-base "^1.0.1"
+ des.js "^1.0.0"
+ inherits "^2.0.1"
+
+browserify-rsa@^4.0.0:
+ version "4.0.1"
+ resolved "https://registry.yarnpkg.com/browserify-rsa/-/browserify-rsa-4.0.1.tgz#21e0abfaf6f2029cf2fafb133567a701d4135524"
+ dependencies:
+ bn.js "^4.1.0"
+ randombytes "^2.0.1"
+
+browserify-sign@^4.0.0:
+ version "4.0.4"
+ resolved "https://registry.yarnpkg.com/browserify-sign/-/browserify-sign-4.0.4.tgz#aa4eb68e5d7b658baa6bf6a57e630cbd7a93d298"
+ dependencies:
+ bn.js "^4.1.1"
+ browserify-rsa "^4.0.0"
+ create-hash "^1.1.0"
+ create-hmac "^1.1.2"
+ elliptic "^6.0.0"
+ inherits "^2.0.1"
+ parse-asn1 "^5.0.0"
+
+browserify-zlib@^0.2.0:
+ version "0.2.0"
+ resolved "https://registry.yarnpkg.com/browserify-zlib/-/browserify-zlib-0.2.0.tgz#2869459d9aa3be245fe8fe2ca1f46e2e7f54d73f"
+ dependencies:
+ pako "~1.0.5"
+
+browserslist@^1.3.6, browserslist@^1.5.2, browserslist@^1.7.6:
+ version "1.7.7"
+ resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-1.7.7.tgz#0bd76704258be829b2398bb50e4b62d1a166b0b9"
+ dependencies:
+ caniuse-db "^1.0.30000639"
+ electron-to-chromium "^1.2.7"
+
+browserslist@^2.1.2:
+ version "2.11.3"
+ resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-2.11.3.tgz#fe36167aed1bbcde4827ebfe71347a2cc70b99b2"
+ dependencies:
+ caniuse-lite "^1.0.30000792"
+ electron-to-chromium "^1.3.30"
+
+buffer-indexof@^1.0.0:
+ version "1.1.1"
+ resolved "https://registry.yarnpkg.com/buffer-indexof/-/buffer-indexof-1.1.1.tgz#52fabcc6a606d1a00302802648ef68f639da268c"
+
+buffer-xor@^1.0.3:
+ version "1.0.3"
+ resolved "https://registry.yarnpkg.com/buffer-xor/-/buffer-xor-1.0.3.tgz#26e61ed1422fb70dd42e6e36729ed51d855fe8d9"
+
+buffer@^4.3.0:
+ version "4.9.1"
+ resolved "https://registry.yarnpkg.com/buffer/-/buffer-4.9.1.tgz#6d1bb601b07a4efced97094132093027c95bc298"
+ dependencies:
+ base64-js "^1.0.2"
+ ieee754 "^1.1.4"
+ isarray "^1.0.0"
+
+buffers@~0.1.1:
+ version "0.1.1"
+ resolved "https://registry.yarnpkg.com/buffers/-/buffers-0.1.1.tgz#b24579c3bed4d6d396aeee6d9a8ae7f5482ab7bb"
+
+builtin-modules@^1.0.0, builtin-modules@^1.1.1:
+ version "1.1.1"
+ resolved "https://registry.yarnpkg.com/builtin-modules/-/builtin-modules-1.1.1.tgz#270f076c5a72c02f5b65a47df94c5fe3a278892f"
+
+builtin-status-codes@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/builtin-status-codes/-/builtin-status-codes-3.0.0.tgz#85982878e21b98e1c66425e03d0174788f569ee8"
+
+bytes@3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.0.0.tgz#d32815404d689699f85a4ea4fa8755dd13a96048"
+
+cache-base@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/cache-base/-/cache-base-1.0.1.tgz#0a7f46416831c8b662ee36fe4e7c59d76f666ab2"
+ dependencies:
+ collection-visit "^1.0.0"
+ component-emitter "^1.2.1"
+ get-value "^2.0.6"
+ has-value "^1.0.0"
+ isobject "^3.0.1"
+ set-value "^2.0.0"
+ to-object-path "^0.3.0"
+ union-value "^1.0.0"
+ unset-value "^1.0.0"
+
+caller-path@^0.1.0:
+ version "0.1.0"
+ resolved "https://registry.yarnpkg.com/caller-path/-/caller-path-0.1.0.tgz#94085ef63581ecd3daa92444a8fe94e82577751f"
+ dependencies:
+ callsites "^0.2.0"
+
+callsites@^0.2.0:
+ version "0.2.0"
+ resolved "https://registry.yarnpkg.com/callsites/-/callsites-0.2.0.tgz#afab96262910a7f33c19a5775825c69f34e350ca"
+
+camelcase-keys@^2.0.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/camelcase-keys/-/camelcase-keys-2.1.0.tgz#308beeaffdf28119051efa1d932213c91b8f92e7"
+ dependencies:
+ camelcase "^2.0.0"
+ map-obj "^1.0.0"
+
+camelcase@^1.0.2:
+ version "1.2.1"
+ resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-1.2.1.tgz#9bb5304d2e0b56698b2c758b08a3eaa9daa58a39"
+
+camelcase@^2.0.0:
+ version "2.1.1"
+ resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-2.1.1.tgz#7c1d16d679a1bbe59ca02cacecfb011e201f5a1f"
+
+camelcase@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-3.0.0.tgz#32fc4b9fcdaf845fcdf7e73bb97cac2261f0ab0a"
+
+camelcase@^4.1.0:
+ version "4.1.0"
+ resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-4.1.0.tgz#d545635be1e33c542649c69173e5de6acfae34dd"
+
+caniuse-api@^1.5.2:
+ version "1.6.1"
+ resolved "https://registry.yarnpkg.com/caniuse-api/-/caniuse-api-1.6.1.tgz#b534e7c734c4f81ec5fbe8aca2ad24354b962c6c"
+ dependencies:
+ browserslist "^1.3.6"
+ caniuse-db "^1.0.30000529"
+ lodash.memoize "^4.1.2"
+ lodash.uniq "^4.5.0"
+
+caniuse-db@^1.0.30000529, caniuse-db@^1.0.30000634, caniuse-db@^1.0.30000639:
+ version "1.0.30000811"
+ resolved "https://registry.yarnpkg.com/caniuse-db/-/caniuse-db-1.0.30000811.tgz#19efb9238393d40078332c34485c818d641c4305"
+
+caniuse-lite@^1.0.30000792:
+ version "1.0.30000811"
+ resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30000811.tgz#0b6e40f2efccc27bd3cb52f91ee7ca4673d77d10"
+
+caseless@~0.12.0:
+ version "0.12.0"
+ resolved "https://registry.yarnpkg.com/caseless/-/caseless-0.12.0.tgz#1b681c21ff84033c826543090689420d187151dc"
+
+center-align@^0.1.1:
+ version "0.1.3"
+ resolved "https://registry.yarnpkg.com/center-align/-/center-align-0.1.3.tgz#aa0d32629b6ee972200411cbd4461c907bc2b7ad"
+ dependencies:
+ align-text "^0.1.3"
+ lazy-cache "^1.0.3"
+
+chain-function@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/chain-function/-/chain-function-1.0.0.tgz#0d4ab37e7e18ead0bdc47b920764118ce58733dc"
+
+chainsaw@~0.1.0:
+ version "0.1.0"
+ resolved "https://registry.yarnpkg.com/chainsaw/-/chainsaw-0.1.0.tgz#5eab50b28afe58074d0d58291388828b5e5fbc98"
+ dependencies:
+ traverse ">=0.3.0 <0.4"
+
+chalk@^1.1.3:
+ version "1.1.3"
+ resolved "https://registry.yarnpkg.com/chalk/-/chalk-1.1.3.tgz#a8115c55e4a702fe4d150abd3872822a7e09fc98"
+ dependencies:
+ ansi-styles "^2.2.1"
+ escape-string-regexp "^1.0.2"
+ has-ansi "^2.0.0"
+ strip-ansi "^3.0.0"
+ supports-color "^2.0.0"
+
+chalk@^2.0.0, chalk@^2.1.0, chalk@^2.3.1:
+ version "2.3.2"
+ resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.3.2.tgz#250dc96b07491bfd601e648d66ddf5f60c7a5c65"
+ dependencies:
+ ansi-styles "^3.2.1"
+ escape-string-regexp "^1.0.5"
+ supports-color "^5.3.0"
+
+change-emitter@^0.1.2:
+ version "0.1.6"
+ resolved "https://registry.yarnpkg.com/change-emitter/-/change-emitter-0.1.6.tgz#e8b2fe3d7f1ab7d69a32199aff91ea6931409515"
+
+chardet@^0.4.0:
+ version "0.4.2"
+ resolved "https://registry.yarnpkg.com/chardet/-/chardet-0.4.2.tgz#b5473b33dc97c424e5d98dc87d55d4d8a29c8bf2"
+
+charenc@~0.0.1:
+ version "0.0.2"
+ resolved "https://registry.yarnpkg.com/charenc/-/charenc-0.0.2.tgz#c0a1d2f3a7092e03774bfa83f14c0fc5790a8667"
+
+chokidar@^2.0.0, chokidar@^2.0.2:
+ version "2.0.2"
+ resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-2.0.2.tgz#4dc65139eeb2714977735b6a35d06e97b494dfd7"
+ dependencies:
+ anymatch "^2.0.0"
+ async-each "^1.0.0"
+ braces "^2.3.0"
+ glob-parent "^3.1.0"
+ inherits "^2.0.1"
+ is-binary-path "^1.0.0"
+ is-glob "^4.0.0"
+ normalize-path "^2.1.1"
+ path-is-absolute "^1.0.0"
+ readdirp "^2.0.0"
+ upath "^1.0.0"
+ optionalDependencies:
+ fsevents "^1.0.0"
+
+cipher-base@^1.0.0, cipher-base@^1.0.1, cipher-base@^1.0.3:
+ version "1.0.4"
+ resolved "https://registry.yarnpkg.com/cipher-base/-/cipher-base-1.0.4.tgz#8760e4ecc272f4c363532f926d874aae2c1397de"
+ dependencies:
+ inherits "^2.0.1"
+ safe-buffer "^5.0.1"
+
+circular-json@^0.3.1:
+ version "0.3.3"
+ resolved "https://registry.yarnpkg.com/circular-json/-/circular-json-0.3.3.tgz#815c99ea84f6809529d2f45791bdf82711352d66"
+
+clap@^1.0.9:
+ version "1.2.3"
+ resolved "https://registry.yarnpkg.com/clap/-/clap-1.2.3.tgz#4f36745b32008492557f46412d66d50cb99bce51"
+ dependencies:
+ chalk "^1.1.3"
+
+class-utils@^0.3.5:
+ version "0.3.6"
+ resolved "https://registry.yarnpkg.com/class-utils/-/class-utils-0.3.6.tgz#f93369ae8b9a7ce02fd41faad0ca83033190c463"
+ dependencies:
+ arr-union "^3.1.0"
+ define-property "^0.2.5"
+ isobject "^3.0.0"
+ static-extend "^0.1.1"
+
+classnames@2.2.5, classnames@^2.2.5:
+ version "2.2.5"
+ resolved "https://registry.yarnpkg.com/classnames/-/classnames-2.2.5.tgz#fb3801d453467649ef3603c7d61a02bd129bde6d"
+
+cli-cursor@^2.1.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/cli-cursor/-/cli-cursor-2.1.0.tgz#b35dac376479facc3e94747d41d0d0f5238ffcb5"
+ dependencies:
+ restore-cursor "^2.0.0"
+
+cli-width@^2.0.0:
+ version "2.2.0"
+ resolved "https://registry.yarnpkg.com/cli-width/-/cli-width-2.2.0.tgz#ff19ede8a9a5e579324147b0c11f0fbcbabed639"
+
+cliui@^2.1.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/cliui/-/cliui-2.1.0.tgz#4b475760ff80264c762c3a1719032e91c7fea0d1"
+ dependencies:
+ center-align "^0.1.1"
+ right-align "^0.1.1"
+ wordwrap "0.0.2"
+
+cliui@^3.2.0:
+ version "3.2.0"
+ resolved "https://registry.yarnpkg.com/cliui/-/cliui-3.2.0.tgz#120601537a916d29940f934da3b48d585a39213d"
+ dependencies:
+ string-width "^1.0.1"
+ strip-ansi "^3.0.1"
+ wrap-ansi "^2.0.0"
+
+clone@^1.0.2:
+ version "1.0.3"
+ resolved "https://registry.yarnpkg.com/clone/-/clone-1.0.3.tgz#298d7e2231660f40c003c2ed3140decf3f53085f"
+
+co@^4.6.0:
+ version "4.6.0"
+ resolved "https://registry.yarnpkg.com/co/-/co-4.6.0.tgz#6ea6bdf3d853ae54ccb8e47bfa0bf3f9031fb184"
+
+coa@~1.0.1:
+ version "1.0.4"
+ resolved "https://registry.yarnpkg.com/coa/-/coa-1.0.4.tgz#a9ef153660d6a86a8bdec0289a5c684d217432fd"
+ dependencies:
+ q "^1.1.2"
+
+code-point-at@^1.0.0:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/code-point-at/-/code-point-at-1.1.0.tgz#0d070b4d043a5bea33a2f1a40e2edb3d9a4ccf77"
+
+collection-visit@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/collection-visit/-/collection-visit-1.0.0.tgz#4bc0373c164bc3291b4d368c829cf1a80a59dca0"
+ dependencies:
+ map-visit "^1.0.0"
+ object-visit "^1.0.0"
+
+color-convert@^1.3.0, color-convert@^1.9.0:
+ version "1.9.1"
+ resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.1.tgz#c1261107aeb2f294ebffec9ed9ecad529a6097ed"
+ dependencies:
+ color-name "^1.1.1"
+
+color-name@^1.0.0, color-name@^1.1.1:
+ version "1.1.3"
+ resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25"
+
+color-string@^0.3.0:
+ version "0.3.0"
+ resolved "https://registry.yarnpkg.com/color-string/-/color-string-0.3.0.tgz#27d46fb67025c5c2fa25993bfbf579e47841b991"
+ dependencies:
+ color-name "^1.0.0"
+
+color@^0.11.0:
+ version "0.11.4"
+ resolved "https://registry.yarnpkg.com/color/-/color-0.11.4.tgz#6d7b5c74fb65e841cd48792ad1ed5e07b904d764"
+ dependencies:
+ clone "^1.0.2"
+ color-convert "^1.3.0"
+ color-string "^0.3.0"
+
+colormin@^1.0.5:
+ version "1.1.2"
+ resolved "https://registry.yarnpkg.com/colormin/-/colormin-1.1.2.tgz#ea2f7420a72b96881a38aae59ec124a6f7298133"
+ dependencies:
+ color "^0.11.0"
+ css-color-names "0.0.4"
+ has "^1.0.1"
+
+colors@^1.1.2, colors@~1.1.2:
+ version "1.1.2"
+ resolved "https://registry.yarnpkg.com/colors/-/colors-1.1.2.tgz#168a4701756b6a7f51a12ce0c97bfa28c084ed63"
+
+combined-stream@^1.0.5, combined-stream@~1.0.5:
+ version "1.0.6"
+ resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.6.tgz#723e7df6e801ac5613113a7e445a9b69cb632818"
+ dependencies:
+ delayed-stream "~1.0.0"
+
+commander@^2.11.0:
+ version "2.14.1"
+ resolved "https://registry.yarnpkg.com/commander/-/commander-2.14.1.tgz#2235123e37af8ca3c65df45b026dbd357b01b9aa"
+
+commondir@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/commondir/-/commondir-1.0.1.tgz#ddd800da0c66127393cca5950ea968a3aaf1253b"
+
+component-emitter@^1.2.1:
+ version "1.2.1"
+ resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.2.1.tgz#137918d6d78283f7df7a6b7c5a63e140e69425e6"
+
+compressible@~2.0.13:
+ version "2.0.13"
+ resolved "https://registry.yarnpkg.com/compressible/-/compressible-2.0.13.tgz#0d1020ab924b2fdb4d6279875c7d6daba6baa7a9"
+ dependencies:
+ mime-db ">= 1.33.0 < 2"
+
+compression@^1.5.2:
+ version "1.7.2"
+ resolved "https://registry.yarnpkg.com/compression/-/compression-1.7.2.tgz#aaffbcd6aaf854b44ebb280353d5ad1651f59a69"
+ dependencies:
+ accepts "~1.3.4"
+ bytes "3.0.0"
+ compressible "~2.0.13"
+ debug "2.6.9"
+ on-headers "~1.0.1"
+ safe-buffer "5.1.1"
+ vary "~1.1.2"
+
+concat-map@0.0.1:
+ version "0.0.1"
+ resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b"
+
+concat-stream@^1.6.0:
+ version "1.6.1"
+ resolved "https://registry.yarnpkg.com/concat-stream/-/concat-stream-1.6.1.tgz#261b8f518301f1d834e36342b9fea095d2620a26"
+ dependencies:
+ inherits "^2.0.3"
+ readable-stream "^2.2.2"
+ typedarray "^0.0.6"
+
+connect-history-api-fallback@^1.3.0:
+ version "1.5.0"
+ resolved "https://registry.yarnpkg.com/connect-history-api-fallback/-/connect-history-api-fallback-1.5.0.tgz#b06873934bc5e344fef611a196a6faae0aee015a"
+
+console-browserify@^1.1.0:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/console-browserify/-/console-browserify-1.1.0.tgz#f0241c45730a9fc6323b206dbf38edc741d0bb10"
+ dependencies:
+ date-now "^0.1.4"
+
+console-control-strings@^1.0.0, console-control-strings@~1.1.0:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/console-control-strings/-/console-control-strings-1.1.0.tgz#3d7cf4464db6446ea644bf4b39507f9851008e8e"
+
+constants-browserify@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/constants-browserify/-/constants-browserify-1.0.0.tgz#c20b96d8c617748aaf1c16021760cd27fcb8cb75"
+
+contains-path@^0.1.0:
+ version "0.1.0"
+ resolved "https://registry.yarnpkg.com/contains-path/-/contains-path-0.1.0.tgz#fe8cf184ff6670b6baef01a9d4861a5cbec4120a"
+
+content-disposition@0.5.2:
+ version "0.5.2"
+ resolved "https://registry.yarnpkg.com/content-disposition/-/content-disposition-0.5.2.tgz#0cf68bb9ddf5f2be7961c3a85178cb85dba78cb4"
+
+content-type@~1.0.4:
+ version "1.0.4"
+ resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.4.tgz#e138cc75e040c727b1966fe5e5f8c9aee256fe3b"
+
+convert-source-map@^1.5.0:
+ version "1.5.1"
+ resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.5.1.tgz#b8278097b9bc229365de5c62cf5fcaed8b5599e5"
+
+cookie-signature@1.0.6:
+ version "1.0.6"
+ resolved "https://registry.yarnpkg.com/cookie-signature/-/cookie-signature-1.0.6.tgz#e303a882b342cc3ee8ca513a79999734dab3ae2c"
+
+cookie@0.3.1:
+ version "0.3.1"
+ resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.3.1.tgz#e7e0a1f9ef43b4c8ba925c5c5a96e806d16873bb"
+
+copy-descriptor@^0.1.0:
+ version "0.1.1"
+ resolved "https://registry.yarnpkg.com/copy-descriptor/-/copy-descriptor-0.1.1.tgz#676f6eb3c39997c2ee1ac3a924fd6124748f578d"
+
+core-js@2.5.1:
+ version "2.5.1"
+ resolved "https://registry.yarnpkg.com/core-js/-/core-js-2.5.1.tgz#ae6874dc66937789b80754ff5428df66819ca50b"
+
+core-js@^1.0.0:
+ version "1.2.7"
+ resolved "https://registry.yarnpkg.com/core-js/-/core-js-1.2.7.tgz#652294c14651db28fa93bd2d5ff2983a4f08c636"
+
+core-js@^2.4.0, core-js@^2.5.0:
+ version "2.5.3"
+ resolved "https://registry.yarnpkg.com/core-js/-/core-js-2.5.3.tgz#8acc38345824f16d8365b7c9b4259168e8ed603e"
+
+core-util-is@1.0.2, core-util-is@~1.0.0:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7"
+
+create-ecdh@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/create-ecdh/-/create-ecdh-4.0.0.tgz#888c723596cdf7612f6498233eebd7a35301737d"
+ dependencies:
+ bn.js "^4.1.0"
+ elliptic "^6.0.0"
+
+create-hash@^1.1.0, create-hash@^1.1.2:
+ version "1.1.3"
+ resolved "https://registry.yarnpkg.com/create-hash/-/create-hash-1.1.3.tgz#606042ac8b9262750f483caddab0f5819172d8fd"
+ dependencies:
+ cipher-base "^1.0.1"
+ inherits "^2.0.1"
+ ripemd160 "^2.0.0"
+ sha.js "^2.4.0"
+
+create-hmac@^1.1.0, create-hmac@^1.1.2, create-hmac@^1.1.4:
+ version "1.1.6"
+ resolved "https://registry.yarnpkg.com/create-hmac/-/create-hmac-1.1.6.tgz#acb9e221a4e17bdb076e90657c42b93e3726cf06"
+ dependencies:
+ cipher-base "^1.0.3"
+ create-hash "^1.1.0"
+ inherits "^2.0.1"
+ ripemd160 "^2.0.0"
+ safe-buffer "^5.0.1"
+ sha.js "^2.4.8"
+
+cross-spawn@^5.0.1, cross-spawn@^5.1.0:
+ version "5.1.0"
+ resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-5.1.0.tgz#e8bd0efee58fcff6f8f94510a0a554bbfa235449"
+ dependencies:
+ lru-cache "^4.0.1"
+ shebang-command "^1.2.0"
+ which "^1.2.9"
+
+crypt@~0.0.1:
+ version "0.0.2"
+ resolved "https://registry.yarnpkg.com/crypt/-/crypt-0.0.2.tgz#88d7ff7ec0dfb86f713dc87bbb42d044d3e6c41b"
+
+cryptiles@2.x.x:
+ version "2.0.5"
+ resolved "https://registry.yarnpkg.com/cryptiles/-/cryptiles-2.0.5.tgz#3bdfecdc608147c1c67202fa291e7dca59eaa3b8"
+ dependencies:
+ boom "2.x.x"
+
+crypto-browserify@^3.11.0:
+ version "3.12.0"
+ resolved "https://registry.yarnpkg.com/crypto-browserify/-/crypto-browserify-3.12.0.tgz#396cf9f3137f03e4b8e532c58f698254e00f80ec"
+ dependencies:
+ browserify-cipher "^1.0.0"
+ browserify-sign "^4.0.0"
+ create-ecdh "^4.0.0"
+ create-hash "^1.1.0"
+ create-hmac "^1.1.0"
+ diffie-hellman "^5.0.0"
+ inherits "^2.0.1"
+ pbkdf2 "^3.0.3"
+ public-encrypt "^4.0.0"
+ randombytes "^2.0.0"
+ randomfill "^1.0.3"
+
+css-color-names@0.0.4:
+ version "0.0.4"
+ resolved "https://registry.yarnpkg.com/css-color-names/-/css-color-names-0.0.4.tgz#808adc2e79cf84738069b646cb20ec27beb629e0"
+
+css-loader@^0.28.9:
+ version "0.28.10"
+ resolved "https://registry.yarnpkg.com/css-loader/-/css-loader-0.28.10.tgz#40282e79230f7bcb4e483efa631d670b735ebf42"
+ dependencies:
+ babel-code-frame "^6.26.0"
+ css-selector-tokenizer "^0.7.0"
+ cssnano "^3.10.0"
+ icss-utils "^2.1.0"
+ loader-utils "^1.0.2"
+ lodash.camelcase "^4.3.0"
+ object-assign "^4.1.1"
+ postcss "^5.0.6"
+ postcss-modules-extract-imports "^1.2.0"
+ postcss-modules-local-by-default "^1.2.0"
+ postcss-modules-scope "^1.1.0"
+ postcss-modules-values "^1.3.0"
+ postcss-value-parser "^3.3.0"
+ source-list-map "^2.0.0"
+
+css-selector-tokenizer@^0.7.0:
+ version "0.7.0"
+ resolved "https://registry.yarnpkg.com/css-selector-tokenizer/-/css-selector-tokenizer-0.7.0.tgz#e6988474ae8c953477bf5e7efecfceccd9cf4c86"
+ dependencies:
+ cssesc "^0.1.0"
+ fastparse "^1.1.1"
+ regexpu-core "^1.0.0"
+
+css-vendor@^0.3.8:
+ version "0.3.8"
+ resolved "https://registry.yarnpkg.com/css-vendor/-/css-vendor-0.3.8.tgz#6421cfd3034ce664fe7673972fd0119fc28941fa"
+ dependencies:
+ is-in-browser "^1.0.2"
+
+cssesc@^0.1.0:
+ version "0.1.0"
+ resolved "https://registry.yarnpkg.com/cssesc/-/cssesc-0.1.0.tgz#c814903e45623371a0477b40109aaafbeeaddbb4"
+
+cssnano@^3.10.0:
+ version "3.10.0"
+ resolved "https://registry.yarnpkg.com/cssnano/-/cssnano-3.10.0.tgz#4f38f6cea2b9b17fa01490f23f1dc68ea65c1c38"
+ dependencies:
+ autoprefixer "^6.3.1"
+ decamelize "^1.1.2"
+ defined "^1.0.0"
+ has "^1.0.1"
+ object-assign "^4.0.1"
+ postcss "^5.0.14"
+ postcss-calc "^5.2.0"
+ postcss-colormin "^2.1.8"
+ postcss-convert-values "^2.3.4"
+ postcss-discard-comments "^2.0.4"
+ postcss-discard-duplicates "^2.0.1"
+ postcss-discard-empty "^2.0.1"
+ postcss-discard-overridden "^0.1.1"
+ postcss-discard-unused "^2.2.1"
+ postcss-filter-plugins "^2.0.0"
+ postcss-merge-idents "^2.1.5"
+ postcss-merge-longhand "^2.0.1"
+ postcss-merge-rules "^2.0.3"
+ postcss-minify-font-values "^1.0.2"
+ postcss-minify-gradients "^1.0.1"
+ postcss-minify-params "^1.0.4"
+ postcss-minify-selectors "^2.0.4"
+ postcss-normalize-charset "^1.1.0"
+ postcss-normalize-url "^3.0.7"
+ postcss-ordered-values "^2.1.0"
+ postcss-reduce-idents "^2.2.2"
+ postcss-reduce-initial "^1.0.0"
+ postcss-reduce-transforms "^1.0.3"
+ postcss-svgo "^2.1.1"
+ postcss-unique-selectors "^2.0.2"
+ postcss-value-parser "^3.2.3"
+ postcss-zindex "^2.0.1"
+
+csso@~2.3.1:
+ version "2.3.2"
+ resolved "https://registry.yarnpkg.com/csso/-/csso-2.3.2.tgz#ddd52c587033f49e94b71fc55569f252e8ff5f85"
+ dependencies:
+ clap "^1.0.9"
+ source-map "^0.5.3"
+
+csstype@^1.6.0:
+ version "1.8.2"
+ resolved "https://registry.yarnpkg.com/csstype/-/csstype-1.8.2.tgz#2c0f16da08b99f13fe7fbb242e87d1a19dbe77a7"
+
+currently-unhandled@^0.4.1:
+ version "0.4.1"
+ resolved "https://registry.yarnpkg.com/currently-unhandled/-/currently-unhandled-0.4.1.tgz#988df33feab191ef799a61369dd76c17adf957ea"
+ dependencies:
+ array-find-index "^1.0.1"
+
+d3-array@^1.2.0:
+ version "1.2.1"
+ resolved "https://registry.yarnpkg.com/d3-array/-/d3-array-1.2.1.tgz#d1ca33de2f6ac31efadb8e050a021d7e2396d5dc"
+
+d3-collection@1:
+ version "1.0.4"
+ resolved "https://registry.yarnpkg.com/d3-collection/-/d3-collection-1.0.4.tgz#342dfd12837c90974f33f1cc0a785aea570dcdc2"
+
+d3-color@1:
+ version "1.0.3"
+ resolved "https://registry.yarnpkg.com/d3-color/-/d3-color-1.0.3.tgz#bc7643fca8e53a8347e2fbdaffa236796b58509b"
+
+d3-format@1:
+ version "1.2.2"
+ resolved "https://registry.yarnpkg.com/d3-format/-/d3-format-1.2.2.tgz#1a39c479c8a57fe5051b2e67a3bee27061a74e7a"
+
+d3-interpolate@1, d3-interpolate@^1.1.5:
+ version "1.1.6"
+ resolved "https://registry.yarnpkg.com/d3-interpolate/-/d3-interpolate-1.1.6.tgz#2cf395ae2381804df08aa1bf766b7f97b5f68fb6"
+ dependencies:
+ d3-color "1"
+
+d3-path@1:
+ version "1.0.5"
+ resolved "https://registry.yarnpkg.com/d3-path/-/d3-path-1.0.5.tgz#241eb1849bd9e9e8021c0d0a799f8a0e8e441764"
+
+d3-scale@1.0.6:
+ version "1.0.6"
+ resolved "https://registry.yarnpkg.com/d3-scale/-/d3-scale-1.0.6.tgz#bce19da80d3a0cf422c9543ae3322086220b34ed"
+ dependencies:
+ d3-array "^1.2.0"
+ d3-collection "1"
+ d3-color "1"
+ d3-format "1"
+ d3-interpolate "1"
+ d3-time "1"
+ d3-time-format "2"
+
+d3-shape@1.2.0:
+ version "1.2.0"
+ resolved "https://registry.yarnpkg.com/d3-shape/-/d3-shape-1.2.0.tgz#45d01538f064bafd05ea3d6d2cb748fd8c41f777"
+ dependencies:
+ d3-path "1"
+
+d3-time-format@2:
+ version "2.1.1"
+ resolved "https://registry.yarnpkg.com/d3-time-format/-/d3-time-format-2.1.1.tgz#85b7cdfbc9ffca187f14d3c456ffda268081bb31"
+ dependencies:
+ d3-time "1"
+
+d3-time@1:
+ version "1.0.8"
+ resolved "https://registry.yarnpkg.com/d3-time/-/d3-time-1.0.8.tgz#dbd2d6007bf416fe67a76d17947b784bffea1e84"
+
+d@1:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/d/-/d-1.0.0.tgz#754bb5bfe55451da69a58b94d45f4c5b0462d58f"
+ dependencies:
+ es5-ext "^0.10.9"
+
+damerau-levenshtein@^1.0.0:
+ version "1.0.4"
+ resolved "https://registry.yarnpkg.com/damerau-levenshtein/-/damerau-levenshtein-1.0.4.tgz#03191c432cb6eea168bb77f3a55ffdccb8978514"
+
+dashdash@^1.12.0:
+ version "1.14.1"
+ resolved "https://registry.yarnpkg.com/dashdash/-/dashdash-1.14.1.tgz#853cfa0f7cbe2fed5de20326b8dd581035f6e2f0"
+ dependencies:
+ assert-plus "^1.0.0"
+
+date-now@^0.1.4:
+ version "0.1.4"
+ resolved "https://registry.yarnpkg.com/date-now/-/date-now-0.1.4.tgz#eaf439fd4d4848ad74e5cc7dbef200672b9e345b"
+
+debug@2.6.9, debug@^2.2.0, debug@^2.3.3, debug@^2.6.6, debug@^2.6.8, debug@^2.6.9:
+ version "2.6.9"
+ resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f"
+ dependencies:
+ ms "2.0.0"
+
+debug@^3.0.1, debug@^3.1.0:
+ version "3.1.0"
+ resolved "https://registry.yarnpkg.com/debug/-/debug-3.1.0.tgz#5bb5a0672628b64149566ba16819e61518c67261"
+ dependencies:
+ ms "2.0.0"
+
+decamelize@^1.0.0, decamelize@^1.1.1, decamelize@^1.1.2:
+ version "1.2.0"
+ resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290"
+
+decode-uri-component@^0.2.0:
+ version "0.2.0"
+ resolved "https://registry.yarnpkg.com/decode-uri-component/-/decode-uri-component-0.2.0.tgz#eb3913333458775cb84cd1a1fae062106bb87545"
+
+decompress-response@^3.2.0:
+ version "3.3.0"
+ resolved "https://registry.yarnpkg.com/decompress-response/-/decompress-response-3.3.0.tgz#80a4dd323748384bfa248083622aedec982adff3"
+ dependencies:
+ mimic-response "^1.0.0"
+
+deep-equal@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/deep-equal/-/deep-equal-1.0.1.tgz#f5d260292b660e084eff4cdbc9f08ad3247448b5"
+
+deep-extend@~0.4.0:
+ version "0.4.2"
+ resolved "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.4.2.tgz#48b699c27e334bf89f10892be432f6e4c7d34a7f"
+
+deep-is@~0.1.3:
+ version "0.1.3"
+ resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.3.tgz#b369d6fb5dbc13eecf524f91b070feedc357cf34"
+
+deepmerge@^2.0.1:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/deepmerge/-/deepmerge-2.0.1.tgz#25c1c24f110fb914f80001b925264dd77f3f4312"
+
+define-properties@^1.1.2:
+ version "1.1.2"
+ resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.1.2.tgz#83a73f2fea569898fb737193c8f873caf6d45c94"
+ dependencies:
+ foreach "^2.0.5"
+ object-keys "^1.0.8"
+
+define-property@^0.2.5:
+ version "0.2.5"
+ resolved "https://registry.yarnpkg.com/define-property/-/define-property-0.2.5.tgz#c35b1ef918ec3c990f9a5bc57be04aacec5c8116"
+ dependencies:
+ is-descriptor "^0.1.0"
+
+define-property@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/define-property/-/define-property-1.0.0.tgz#769ebaaf3f4a63aad3af9e8d304c9bbe79bfb0e6"
+ dependencies:
+ is-descriptor "^1.0.0"
+
+define-property@^2.0.2:
+ version "2.0.2"
+ resolved "https://registry.yarnpkg.com/define-property/-/define-property-2.0.2.tgz#d459689e8d654ba77e02a817f8710d702cb16e9d"
+ dependencies:
+ is-descriptor "^1.0.2"
+ isobject "^3.0.1"
+
+defined@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/defined/-/defined-1.0.0.tgz#c98d9bcef75674188e110969151199e39b1fa693"
+
+del@^2.0.2:
+ version "2.2.2"
+ resolved "https://registry.yarnpkg.com/del/-/del-2.2.2.tgz#c12c981d067846c84bcaf862cff930d907ffd1a8"
+ dependencies:
+ globby "^5.0.0"
+ is-path-cwd "^1.0.0"
+ is-path-in-cwd "^1.0.0"
+ object-assign "^4.0.1"
+ pify "^2.0.0"
+ pinkie-promise "^2.0.0"
+ rimraf "^2.2.8"
+
+del@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/del/-/del-3.0.0.tgz#53ecf699ffcbcb39637691ab13baf160819766e5"
+ dependencies:
+ globby "^6.1.0"
+ is-path-cwd "^1.0.0"
+ is-path-in-cwd "^1.0.0"
+ p-map "^1.1.1"
+ pify "^3.0.0"
+ rimraf "^2.2.8"
+
+delayed-stream@~1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619"
+
+delegates@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/delegates/-/delegates-1.0.0.tgz#84c6e159b81904fdca59a0ef44cd870d31250f9a"
+
+depd@1.1.1:
+ version "1.1.1"
+ resolved "https://registry.yarnpkg.com/depd/-/depd-1.1.1.tgz#5783b4e1c459f06fa5ca27f991f3d06e7a310359"
+
+depd@~1.1.1:
+ version "1.1.2"
+ resolved "https://registry.yarnpkg.com/depd/-/depd-1.1.2.tgz#9bcd52e14c097763e749b274c4346ed2e560b5a9"
+
+des.js@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/des.js/-/des.js-1.0.0.tgz#c074d2e2aa6a8a9a07dbd61f9a15c2cd83ec8ecc"
+ dependencies:
+ inherits "^2.0.1"
+ minimalistic-assert "^1.0.0"
+
+destroy@~1.0.4:
+ version "1.0.4"
+ resolved "https://registry.yarnpkg.com/destroy/-/destroy-1.0.4.tgz#978857442c44749e4206613e37946205826abd80"
+
+detect-indent@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/detect-indent/-/detect-indent-4.0.0.tgz#f76d064352cdf43a1cb6ce619c4ee3a9475de208"
+ dependencies:
+ repeating "^2.0.0"
+
+detect-libc@^1.0.2:
+ version "1.0.3"
+ resolved "https://registry.yarnpkg.com/detect-libc/-/detect-libc-1.0.3.tgz#fa137c4bd698edf55cd5cd02ac559f91a4c4ba9b"
+
+detect-node@^2.0.3:
+ version "2.0.3"
+ resolved "https://registry.yarnpkg.com/detect-node/-/detect-node-2.0.3.tgz#a2033c09cc8e158d37748fbde7507832bd6ce127"
+
+diffie-hellman@^5.0.0:
+ version "5.0.2"
+ resolved "https://registry.yarnpkg.com/diffie-hellman/-/diffie-hellman-5.0.2.tgz#b5835739270cfe26acf632099fded2a07f209e5e"
+ dependencies:
+ bn.js "^4.1.0"
+ miller-rabin "^4.0.0"
+ randombytes "^2.0.0"
+
+dns-equal@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/dns-equal/-/dns-equal-1.0.0.tgz#b39e7f1da6eb0a75ba9c17324b34753c47e0654d"
+
+dns-packet@^1.3.1:
+ version "1.3.1"
+ resolved "https://registry.yarnpkg.com/dns-packet/-/dns-packet-1.3.1.tgz#12aa426981075be500b910eedcd0b47dd7deda5a"
+ dependencies:
+ ip "^1.1.0"
+ safe-buffer "^5.0.1"
+
+dns-txt@^2.0.2:
+ version "2.0.2"
+ resolved "https://registry.yarnpkg.com/dns-txt/-/dns-txt-2.0.2.tgz#b91d806f5d27188e4ab3e7d107d881a1cc4642b6"
+ dependencies:
+ buffer-indexof "^1.0.0"
+
+doctrine@1.5.0:
+ version "1.5.0"
+ resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-1.5.0.tgz#379dce730f6166f76cefa4e6707a159b02c5a6fa"
+ dependencies:
+ esutils "^2.0.2"
+ isarray "^1.0.0"
+
+doctrine@^2.0.2, doctrine@^2.1.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-2.1.0.tgz#5cd01fc101621b42c4cd7f5d1a66243716d3f39d"
+ dependencies:
+ esutils "^2.0.2"
+
+dom-helpers@^3.2.0, dom-helpers@^3.2.1:
+ version "3.3.1"
+ resolved "https://registry.yarnpkg.com/dom-helpers/-/dom-helpers-3.3.1.tgz#fc1a4e15ffdf60ddde03a480a9c0fece821dd4a6"
+
+dom-walk@^0.1.0:
+ version "0.1.1"
+ resolved "https://registry.yarnpkg.com/dom-walk/-/dom-walk-0.1.1.tgz#672226dc74c8f799ad35307df936aba11acd6018"
+
+domain-browser@^1.1.1:
+ version "1.2.0"
+ resolved "https://registry.yarnpkg.com/domain-browser/-/domain-browser-1.2.0.tgz#3d31f50191a6749dd1375a7f522e823d42e54eda"
+
+duplexer3@^0.1.4:
+ version "0.1.4"
+ resolved "https://registry.yarnpkg.com/duplexer3/-/duplexer3-0.1.4.tgz#ee01dd1cac0ed3cbc7fdbea37dc0a8f1ce002ce2"
+
+ecc-jsbn@~0.1.1:
+ version "0.1.1"
+ resolved "https://registry.yarnpkg.com/ecc-jsbn/-/ecc-jsbn-0.1.1.tgz#0fc73a9ed5f0d53c38193398523ef7e543777505"
+ dependencies:
+ jsbn "~0.1.0"
+
+ee-first@1.1.1:
+ version "1.1.1"
+ resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d"
+
+electron-to-chromium@^1.2.7, electron-to-chromium@^1.3.30:
+ version "1.3.34"
+ resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.34.tgz#d93498f40391bb0c16a603d8241b9951404157ed"
+
+elliptic@^6.0.0:
+ version "6.4.0"
+ resolved "https://registry.yarnpkg.com/elliptic/-/elliptic-6.4.0.tgz#cac9af8762c85836187003c8dfe193e5e2eae5df"
+ dependencies:
+ bn.js "^4.4.0"
+ brorand "^1.0.1"
+ hash.js "^1.0.0"
+ hmac-drbg "^1.0.0"
+ inherits "^2.0.1"
+ minimalistic-assert "^1.0.0"
+ minimalistic-crypto-utils "^1.0.0"
+
+emoji-regex@^6.1.0:
+ version "6.5.1"
+ resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-6.5.1.tgz#9baea929b155565c11ea41c6626eaa65cef992c2"
+
+emojis-list@^2.0.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/emojis-list/-/emojis-list-2.1.0.tgz#4daa4d9db00f9819880c79fa457ae5b09a1fd389"
+
+encodeurl@~1.0.1:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59"
+
+encoding@^0.1.11:
+ version "0.1.12"
+ resolved "https://registry.yarnpkg.com/encoding/-/encoding-0.1.12.tgz#538b66f3ee62cd1ab51ec323829d1f9480c74beb"
+ dependencies:
+ iconv-lite "~0.4.13"
+
+enhanced-resolve@^3.4.0:
+ version "3.4.1"
+ resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-3.4.1.tgz#0421e339fd71419b3da13d129b3979040230476e"
+ dependencies:
+ graceful-fs "^4.1.2"
+ memory-fs "^0.4.0"
+ object-assign "^4.0.1"
+ tapable "^0.2.7"
+
+errno@^0.1.3:
+ version "0.1.7"
+ resolved "https://registry.yarnpkg.com/errno/-/errno-0.1.7.tgz#4684d71779ad39af177e3f007996f7c67c852618"
+ dependencies:
+ prr "~1.0.1"
+
+error-ex@^1.2.0:
+ version "1.3.1"
+ resolved "https://registry.yarnpkg.com/error-ex/-/error-ex-1.3.1.tgz#f855a86ce61adc4e8621c3cda21e7a7612c3a8dc"
+ dependencies:
+ is-arrayish "^0.2.1"
+
+es-abstract@^1.7.0:
+ version "1.10.0"
+ resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.10.0.tgz#1ecb36c197842a00d8ee4c2dfd8646bb97d60864"
+ dependencies:
+ es-to-primitive "^1.1.1"
+ function-bind "^1.1.1"
+ has "^1.0.1"
+ is-callable "^1.1.3"
+ is-regex "^1.0.4"
+
+es-to-primitive@^1.1.1:
+ version "1.1.1"
+ resolved "https://registry.yarnpkg.com/es-to-primitive/-/es-to-primitive-1.1.1.tgz#45355248a88979034b6792e19bb81f2b7975dd0d"
+ dependencies:
+ is-callable "^1.1.1"
+ is-date-object "^1.0.1"
+ is-symbol "^1.0.1"
+
+es5-ext@^0.10.14, es5-ext@^0.10.35, es5-ext@^0.10.9, es5-ext@~0.10.14:
+ version "0.10.39"
+ resolved "https://registry.yarnpkg.com/es5-ext/-/es5-ext-0.10.39.tgz#fca21b67559277ca4ac1a1ed7048b107b6f76d87"
+ dependencies:
+ es6-iterator "~2.0.3"
+ es6-symbol "~3.1.1"
+
+es6-iterator@^2.0.1, es6-iterator@~2.0.1, es6-iterator@~2.0.3:
+ version "2.0.3"
+ resolved "https://registry.yarnpkg.com/es6-iterator/-/es6-iterator-2.0.3.tgz#a7de889141a05a94b0854403b2d0a0fbfa98f3b7"
+ dependencies:
+ d "1"
+ es5-ext "^0.10.35"
+ es6-symbol "^3.1.1"
+
+es6-map@^0.1.3:
+ version "0.1.5"
+ resolved "https://registry.yarnpkg.com/es6-map/-/es6-map-0.1.5.tgz#9136e0503dcc06a301690f0bb14ff4e364e949f0"
+ dependencies:
+ d "1"
+ es5-ext "~0.10.14"
+ es6-iterator "~2.0.1"
+ es6-set "~0.1.5"
+ es6-symbol "~3.1.1"
+ event-emitter "~0.3.5"
+
+es6-set@~0.1.5:
+ version "0.1.5"
+ resolved "https://registry.yarnpkg.com/es6-set/-/es6-set-0.1.5.tgz#d2b3ec5d4d800ced818db538d28974db0a73ccb1"
+ dependencies:
+ d "1"
+ es5-ext "~0.10.14"
+ es6-iterator "~2.0.1"
+ es6-symbol "3.1.1"
+ event-emitter "~0.3.5"
+
+es6-symbol@3.1.1, es6-symbol@^3.1.1, es6-symbol@~3.1.1:
+ version "3.1.1"
+ resolved "https://registry.yarnpkg.com/es6-symbol/-/es6-symbol-3.1.1.tgz#bf00ef4fdab6ba1b46ecb7b629b4c7ed5715cc77"
+ dependencies:
+ d "1"
+ es5-ext "~0.10.14"
+
+es6-weak-map@^2.0.1:
+ version "2.0.2"
+ resolved "https://registry.yarnpkg.com/es6-weak-map/-/es6-weak-map-2.0.2.tgz#5e3ab32251ffd1538a1f8e5ffa1357772f92d96f"
+ dependencies:
+ d "1"
+ es5-ext "^0.10.14"
+ es6-iterator "^2.0.1"
+ es6-symbol "^3.1.1"
+
+escape-html@~1.0.3:
+ version "1.0.3"
+ resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988"
+
+escape-string-regexp@^1.0.2, escape-string-regexp@^1.0.5:
+ version "1.0.5"
+ resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4"
+
+escope@^3.6.0:
+ version "3.6.0"
+ resolved "https://registry.yarnpkg.com/escope/-/escope-3.6.0.tgz#e01975e812781a163a6dadfdd80398dc64c889c3"
+ dependencies:
+ es6-map "^0.1.3"
+ es6-weak-map "^2.0.1"
+ esrecurse "^4.1.0"
+ estraverse "^4.1.1"
+
+eslint-config-airbnb-base@^12.1.0:
+ version "12.1.0"
+ resolved "https://registry.yarnpkg.com/eslint-config-airbnb-base/-/eslint-config-airbnb-base-12.1.0.tgz#386441e54a12ccd957b0a92564a4bafebd747944"
+ dependencies:
+ eslint-restricted-globals "^0.1.1"
+
+eslint-config-airbnb@^16.1.0:
+ version "16.1.0"
+ resolved "https://registry.yarnpkg.com/eslint-config-airbnb/-/eslint-config-airbnb-16.1.0.tgz#2546bfb02cc9fe92284bf1723ccf2e87bc45ca46"
+ dependencies:
+ eslint-config-airbnb-base "^12.1.0"
+
+eslint-import-resolver-node@^0.3.1:
+ version "0.3.2"
+ resolved "https://registry.yarnpkg.com/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.2.tgz#58f15fb839b8d0576ca980413476aab2472db66a"
+ dependencies:
+ debug "^2.6.9"
+ resolve "^1.5.0"
+
+eslint-loader@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/eslint-loader/-/eslint-loader-2.0.0.tgz#d136619b5c684e36531ffc28c60a56e404608f5d"
+ dependencies:
+ loader-fs-cache "^1.0.0"
+ loader-utils "^1.0.2"
+ object-assign "^4.0.1"
+ object-hash "^1.1.4"
+ rimraf "^2.6.1"
+
+eslint-module-utils@^2.1.1:
+ version "2.1.1"
+ resolved "https://registry.yarnpkg.com/eslint-module-utils/-/eslint-module-utils-2.1.1.tgz#abaec824177613b8a95b299639e1b6facf473449"
+ dependencies:
+ debug "^2.6.8"
+ pkg-dir "^1.0.0"
+
+eslint-plugin-flowtype@^2.41.0:
+ version "2.46.1"
+ resolved "https://registry.yarnpkg.com/eslint-plugin-flowtype/-/eslint-plugin-flowtype-2.46.1.tgz#c4f81d580cd89c82bc3a85a1ccf4ae3a915143a4"
+ dependencies:
+ lodash "^4.15.0"
+
+eslint-plugin-import@^2.8.0:
+ version "2.9.0"
+ resolved "https://registry.yarnpkg.com/eslint-plugin-import/-/eslint-plugin-import-2.9.0.tgz#26002efbfca5989b7288ac047508bd24f217b169"
+ dependencies:
+ builtin-modules "^1.1.1"
+ contains-path "^0.1.0"
+ debug "^2.6.8"
+ doctrine "1.5.0"
+ eslint-import-resolver-node "^0.3.1"
+ eslint-module-utils "^2.1.1"
+ has "^1.0.1"
+ lodash "^4.17.4"
+ minimatch "^3.0.3"
+ read-pkg-up "^2.0.0"
+
+eslint-plugin-jsx-a11y@^6.0.3:
+ version "6.0.3"
+ resolved "https://registry.yarnpkg.com/eslint-plugin-jsx-a11y/-/eslint-plugin-jsx-a11y-6.0.3.tgz#54583d1ae442483162e040e13cc31865465100e5"
+ dependencies:
+ aria-query "^0.7.0"
+ array-includes "^3.0.3"
+ ast-types-flow "0.0.7"
+ axobject-query "^0.1.0"
+ damerau-levenshtein "^1.0.0"
+ emoji-regex "^6.1.0"
+ jsx-ast-utils "^2.0.0"
+
+eslint-plugin-react@^7.5.1:
+ version "7.7.0"
+ resolved "https://registry.yarnpkg.com/eslint-plugin-react/-/eslint-plugin-react-7.7.0.tgz#f606c719dbd8a1a2b3d25c16299813878cca0160"
+ dependencies:
+ doctrine "^2.0.2"
+ has "^1.0.1"
+ jsx-ast-utils "^2.0.1"
+ prop-types "^15.6.0"
+
+eslint-restricted-globals@^0.1.1:
+ version "0.1.1"
+ resolved "https://registry.yarnpkg.com/eslint-restricted-globals/-/eslint-restricted-globals-0.1.1.tgz#35f0d5cbc64c2e3ed62e93b4b1a7af05ba7ed4d7"
+
+eslint-scope@^3.7.1, eslint-scope@~3.7.1:
+ version "3.7.1"
+ resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-3.7.1.tgz#3d63c3edfda02e06e01a452ad88caacc7cdcb6e8"
+ dependencies:
+ esrecurse "^4.1.0"
+ estraverse "^4.1.1"
+
+eslint-visitor-keys@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-1.0.0.tgz#3f3180fb2e291017716acb4c9d6d5b5c34a6a81d"
+
+eslint@^4.16.0:
+ version "4.18.2"
+ resolved "https://registry.yarnpkg.com/eslint/-/eslint-4.18.2.tgz#0f81267ad1012e7d2051e186a9004cc2267b8d45"
+ dependencies:
+ ajv "^5.3.0"
+ babel-code-frame "^6.22.0"
+ chalk "^2.1.0"
+ concat-stream "^1.6.0"
+ cross-spawn "^5.1.0"
+ debug "^3.1.0"
+ doctrine "^2.1.0"
+ eslint-scope "^3.7.1"
+ eslint-visitor-keys "^1.0.0"
+ espree "^3.5.2"
+ esquery "^1.0.0"
+ esutils "^2.0.2"
+ file-entry-cache "^2.0.0"
+ functional-red-black-tree "^1.0.1"
+ glob "^7.1.2"
+ globals "^11.0.1"
+ ignore "^3.3.3"
+ imurmurhash "^0.1.4"
+ inquirer "^3.0.6"
+ is-resolvable "^1.0.0"
+ js-yaml "^3.9.1"
+ json-stable-stringify-without-jsonify "^1.0.1"
+ levn "^0.3.0"
+ lodash "^4.17.4"
+ minimatch "^3.0.2"
+ mkdirp "^0.5.1"
+ natural-compare "^1.4.0"
+ optionator "^0.8.2"
+ path-is-inside "^1.0.2"
+ pluralize "^7.0.0"
+ progress "^2.0.0"
+ require-uncached "^1.0.3"
+ semver "^5.3.0"
+ strip-ansi "^4.0.0"
+ strip-json-comments "~2.0.1"
+ table "4.0.2"
+ text-table "~0.2.0"
+
+espree@^3.5.2:
+ version "3.5.3"
+ resolved "https://registry.yarnpkg.com/espree/-/espree-3.5.3.tgz#931e0af64e7fbbed26b050a29daad1fc64799fa6"
+ dependencies:
+ acorn "^5.4.0"
+ acorn-jsx "^3.0.0"
+
+esprima@^2.6.0:
+ version "2.7.3"
+ resolved "https://registry.yarnpkg.com/esprima/-/esprima-2.7.3.tgz#96e3b70d5779f6ad49cd032673d1c312767ba581"
+
+esprima@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.0.tgz#4499eddcd1110e0b218bacf2fa7f7f59f55ca804"
+
+esquery@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/esquery/-/esquery-1.0.0.tgz#cfba8b57d7fba93f17298a8a006a04cda13d80fa"
+ dependencies:
+ estraverse "^4.0.0"
+
+esrecurse@^4.1.0:
+ version "4.2.1"
+ resolved "https://registry.yarnpkg.com/esrecurse/-/esrecurse-4.2.1.tgz#007a3b9fdbc2b3bb87e4879ea19c92fdbd3942cf"
+ dependencies:
+ estraverse "^4.1.0"
+
+estraverse@^4.0.0, estraverse@^4.1.0, estraverse@^4.1.1:
+ version "4.2.0"
+ resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.2.0.tgz#0dee3fed31fcd469618ce7342099fc1afa0bdb13"
+
+esutils@^2.0.2:
+ version "2.0.2"
+ resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.2.tgz#0abf4f1caa5bcb1f7a9d8acc6dea4faaa04bac9b"
+
+etag@~1.8.1:
+ version "1.8.1"
+ resolved "https://registry.yarnpkg.com/etag/-/etag-1.8.1.tgz#41ae2eeb65efa62268aebfea83ac7d79299b0887"
+
+event-emitter@~0.3.5:
+ version "0.3.5"
+ resolved "https://registry.yarnpkg.com/event-emitter/-/event-emitter-0.3.5.tgz#df8c69eef1647923c7157b9ce83840610b02cc39"
+ dependencies:
+ d "1"
+ es5-ext "~0.10.14"
+
+eventemitter3@1.x.x:
+ version "1.2.0"
+ resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-1.2.0.tgz#1c86991d816ad1e504750e73874224ecf3bec508"
+
+events@^1.0.0:
+ version "1.1.1"
+ resolved "https://registry.yarnpkg.com/events/-/events-1.1.1.tgz#9ebdb7635ad099c70dcc4c2a1f5004288e8bd924"
+
+eventsource@0.1.6:
+ version "0.1.6"
+ resolved "https://registry.yarnpkg.com/eventsource/-/eventsource-0.1.6.tgz#0acede849ed7dd1ccc32c811bb11b944d4f29232"
+ dependencies:
+ original ">=0.0.5"
+
+evp_bytestokey@^1.0.0, evp_bytestokey@^1.0.3:
+ version "1.0.3"
+ resolved "https://registry.yarnpkg.com/evp_bytestokey/-/evp_bytestokey-1.0.3.tgz#7fcbdb198dc71959432efe13842684e0525acb02"
+ dependencies:
+ md5.js "^1.3.4"
+ safe-buffer "^5.1.1"
+
+execa@^0.7.0:
+ version "0.7.0"
+ resolved "https://registry.yarnpkg.com/execa/-/execa-0.7.0.tgz#944becd34cc41ee32a63a9faf27ad5a65fc59777"
+ dependencies:
+ cross-spawn "^5.0.1"
+ get-stream "^3.0.0"
+ is-stream "^1.1.0"
+ npm-run-path "^2.0.0"
+ p-finally "^1.0.0"
+ signal-exit "^3.0.0"
+ strip-eof "^1.0.0"
+
+expand-brackets@^0.1.4:
+ version "0.1.5"
+ resolved "https://registry.yarnpkg.com/expand-brackets/-/expand-brackets-0.1.5.tgz#df07284e342a807cd733ac5af72411e581d1177b"
+ dependencies:
+ is-posix-bracket "^0.1.0"
+
+expand-brackets@^2.1.4:
+ version "2.1.4"
+ resolved "https://registry.yarnpkg.com/expand-brackets/-/expand-brackets-2.1.4.tgz#b77735e315ce30f6b6eff0f83b04151a22449622"
+ dependencies:
+ debug "^2.3.3"
+ define-property "^0.2.5"
+ extend-shallow "^2.0.1"
+ posix-character-classes "^0.1.0"
+ regex-not "^1.0.0"
+ snapdragon "^0.8.1"
+ to-regex "^3.0.1"
+
+expand-range@^1.8.1:
+ version "1.8.2"
+ resolved "https://registry.yarnpkg.com/expand-range/-/expand-range-1.8.2.tgz#a299effd335fe2721ebae8e257ec79644fc85337"
+ dependencies:
+ fill-range "^2.1.0"
+
+express@^4.16.2:
+ version "4.16.2"
+ resolved "https://registry.yarnpkg.com/express/-/express-4.16.2.tgz#e35c6dfe2d64b7dca0a5cd4f21781be3299e076c"
+ dependencies:
+ accepts "~1.3.4"
+ array-flatten "1.1.1"
+ body-parser "1.18.2"
+ content-disposition "0.5.2"
+ content-type "~1.0.4"
+ cookie "0.3.1"
+ cookie-signature "1.0.6"
+ debug "2.6.9"
+ depd "~1.1.1"
+ encodeurl "~1.0.1"
+ escape-html "~1.0.3"
+ etag "~1.8.1"
+ finalhandler "1.1.0"
+ fresh "0.5.2"
+ merge-descriptors "1.0.1"
+ methods "~1.1.2"
+ on-finished "~2.3.0"
+ parseurl "~1.3.2"
+ path-to-regexp "0.1.7"
+ proxy-addr "~2.0.2"
+ qs "6.5.1"
+ range-parser "~1.2.0"
+ safe-buffer "5.1.1"
+ send "0.16.1"
+ serve-static "1.13.1"
+ setprototypeof "1.1.0"
+ statuses "~1.3.1"
+ type-is "~1.6.15"
+ utils-merge "1.0.1"
+ vary "~1.1.2"
+
+extend-shallow@^2.0.1:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-2.0.1.tgz#51af7d614ad9a9f610ea1bafbb989d6b1c56890f"
+ dependencies:
+ is-extendable "^0.1.0"
+
+extend-shallow@^3.0.0, extend-shallow@^3.0.2:
+ version "3.0.2"
+ resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-3.0.2.tgz#26a71aaf073b39fb2127172746131c2704028db8"
+ dependencies:
+ assign-symbols "^1.0.0"
+ is-extendable "^1.0.1"
+
+extend@~3.0.0:
+ version "3.0.1"
+ resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.1.tgz#a755ea7bc1adfcc5a31ce7e762dbaadc5e636444"
+
+external-editor@^2.0.4:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/external-editor/-/external-editor-2.1.0.tgz#3d026a21b7f95b5726387d4200ac160d372c3b48"
+ dependencies:
+ chardet "^0.4.0"
+ iconv-lite "^0.4.17"
+ tmp "^0.0.33"
+
+extglob@^0.3.1:
+ version "0.3.2"
+ resolved "https://registry.yarnpkg.com/extglob/-/extglob-0.3.2.tgz#2e18ff3d2f49ab2765cec9023f011daa8d8349a1"
+ dependencies:
+ is-extglob "^1.0.0"
+
+extglob@^2.0.4:
+ version "2.0.4"
+ resolved "https://registry.yarnpkg.com/extglob/-/extglob-2.0.4.tgz#ad00fe4dc612a9232e8718711dc5cb5ab0285543"
+ dependencies:
+ array-unique "^0.3.2"
+ define-property "^1.0.0"
+ expand-brackets "^2.1.4"
+ extend-shallow "^2.0.1"
+ fragment-cache "^0.2.1"
+ regex-not "^1.0.0"
+ snapdragon "^0.8.1"
+ to-regex "^3.0.1"
+
+extsprintf@1.3.0:
+ version "1.3.0"
+ resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.3.0.tgz#96918440e3041a7a414f8c52e3c574eb3c3e1e05"
+
+extsprintf@^1.2.0:
+ version "1.4.0"
+ resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.4.0.tgz#e2689f8f356fad62cca65a3a91c5df5f9551692f"
+
+fast-deep-equal@^1.0.0:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-1.1.0.tgz#c053477817c86b51daa853c81e059b733d023614"
+
+fast-json-stable-stringify@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.0.0.tgz#d5142c0caee6b1189f87d3a76111064f86c8bbf2"
+
+fast-levenshtein@~2.0.4:
+ version "2.0.6"
+ resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917"
+
+fastparse@^1.1.1:
+ version "1.1.1"
+ resolved "https://registry.yarnpkg.com/fastparse/-/fastparse-1.1.1.tgz#d1e2643b38a94d7583b479060e6c4affc94071f8"
+
+faye-websocket@^0.10.0:
+ version "0.10.0"
+ resolved "https://registry.yarnpkg.com/faye-websocket/-/faye-websocket-0.10.0.tgz#4e492f8d04dfb6f89003507f6edbf2d501e7c6f4"
+ dependencies:
+ websocket-driver ">=0.5.1"
+
+faye-websocket@~0.11.0:
+ version "0.11.1"
+ resolved "https://registry.yarnpkg.com/faye-websocket/-/faye-websocket-0.11.1.tgz#f0efe18c4f56e4f40afc7e06c719fd5ee6188f38"
+ dependencies:
+ websocket-driver ">=0.5.1"
+
+fbjs@^0.8.1, fbjs@^0.8.16:
+ version "0.8.16"
+ resolved "https://registry.yarnpkg.com/fbjs/-/fbjs-0.8.16.tgz#5e67432f550dc41b572bf55847b8aca64e5337db"
+ dependencies:
+ core-js "^1.0.0"
+ isomorphic-fetch "^2.1.1"
+ loose-envify "^1.0.0"
+ object-assign "^4.1.0"
+ promise "^7.1.1"
+ setimmediate "^1.0.5"
+ ua-parser-js "^0.7.9"
+
+figures@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/figures/-/figures-2.0.0.tgz#3ab1a2d2a62c8bfb431a0c94cb797a2fce27c962"
+ dependencies:
+ escape-string-regexp "^1.0.5"
+
+file-entry-cache@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/file-entry-cache/-/file-entry-cache-2.0.0.tgz#c392990c3e684783d838b8c84a45d8a048458361"
+ dependencies:
+ flat-cache "^1.2.1"
+ object-assign "^4.0.1"
+
+file-loader@^1.1.6:
+ version "1.1.11"
+ resolved "https://registry.yarnpkg.com/file-loader/-/file-loader-1.1.11.tgz#6fe886449b0f2a936e43cabaac0cdbfb369506f8"
+ dependencies:
+ loader-utils "^1.0.2"
+ schema-utils "^0.4.5"
+
+filename-regex@^2.0.0:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/filename-regex/-/filename-regex-2.0.1.tgz#c1c4b9bee3e09725ddb106b75c1e301fe2f18b26"
+
+fill-range@^2.1.0:
+ version "2.2.3"
+ resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-2.2.3.tgz#50b77dfd7e469bc7492470963699fe7a8485a723"
+ dependencies:
+ is-number "^2.1.0"
+ isobject "^2.0.0"
+ randomatic "^1.1.3"
+ repeat-element "^1.1.2"
+ repeat-string "^1.5.2"
+
+fill-range@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-4.0.0.tgz#d544811d428f98eb06a63dc402d2403c328c38f7"
+ dependencies:
+ extend-shallow "^2.0.1"
+ is-number "^3.0.0"
+ repeat-string "^1.6.1"
+ to-regex-range "^2.1.0"
+
+finalhandler@1.1.0:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.1.0.tgz#ce0b6855b45853e791b2fcc680046d88253dd7f5"
+ dependencies:
+ debug "2.6.9"
+ encodeurl "~1.0.1"
+ escape-html "~1.0.3"
+ on-finished "~2.3.0"
+ parseurl "~1.3.2"
+ statuses "~1.3.1"
+ unpipe "~1.0.0"
+
+find-cache-dir@^0.1.1:
+ version "0.1.1"
+ resolved "https://registry.yarnpkg.com/find-cache-dir/-/find-cache-dir-0.1.1.tgz#c8defae57c8a52a8a784f9e31c57c742e993a0b9"
+ dependencies:
+ commondir "^1.0.1"
+ mkdirp "^0.5.1"
+ pkg-dir "^1.0.0"
+
+find-cache-dir@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/find-cache-dir/-/find-cache-dir-1.0.0.tgz#9288e3e9e3cc3748717d39eade17cf71fc30ee6f"
+ dependencies:
+ commondir "^1.0.1"
+ make-dir "^1.0.0"
+ pkg-dir "^2.0.0"
+
+find-up@^1.0.0:
+ version "1.1.2"
+ resolved "https://registry.yarnpkg.com/find-up/-/find-up-1.1.2.tgz#6b2e9822b1a2ce0a60ab64d610eccad53cb24d0f"
+ dependencies:
+ path-exists "^2.0.0"
+ pinkie-promise "^2.0.0"
+
+find-up@^2.0.0, find-up@^2.1.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/find-up/-/find-up-2.1.0.tgz#45d1b7e506c717ddd482775a2b77920a3c0c57a7"
+ dependencies:
+ locate-path "^2.0.0"
+
+flat-cache@^1.2.1:
+ version "1.3.0"
+ resolved "https://registry.yarnpkg.com/flat-cache/-/flat-cache-1.3.0.tgz#d3030b32b38154f4e3b7e9c709f490f7ef97c481"
+ dependencies:
+ circular-json "^0.3.1"
+ del "^2.0.2"
+ graceful-fs "^4.1.2"
+ write "^0.2.1"
+
+flatten@^1.0.2:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/flatten/-/flatten-1.0.2.tgz#dae46a9d78fbe25292258cc1e780a41d95c03782"
+
+flow-bin-loader@^1.0.2:
+ version "1.0.3"
+ resolved "https://registry.yarnpkg.com/flow-bin-loader/-/flow-bin-loader-1.0.3.tgz#1b95260437bea24786a5abc022ef3efa02df77c5"
+
+flow-bin@^0.63.1:
+ version "0.63.1"
+ resolved "https://registry.yarnpkg.com/flow-bin/-/flow-bin-0.63.1.tgz#ab00067c197169a5fb5b4996c8f6927b06694828"
+
+flow-typed@^2.2.3:
+ version "2.3.0"
+ resolved "https://registry.yarnpkg.com/flow-typed/-/flow-typed-2.3.0.tgz#0f8604faab60691b885024e16ec0e3256e3b680e"
+ dependencies:
+ babel-polyfill "^6.26.0"
+ colors "^1.1.2"
+ fs-extra "^5.0.0"
+ github "0.2.4"
+ glob "^7.1.2"
+ got "^7.1.0"
+ md5 "^2.1.0"
+ mkdirp "^0.5.1"
+ rimraf "^2.6.2"
+ semver "^5.5.0"
+ table "^4.0.2"
+ through "^2.3.8"
+ unzip "^0.1.11"
+ which "^1.3.0"
+ yargs "^4.2.0"
+
+font-awesome@^4.3.0:
+ version "4.7.0"
+ resolved "https://registry.yarnpkg.com/font-awesome/-/font-awesome-4.7.0.tgz#8fa8cf0411a1a31afd07b06d2902bb9fc815a133"
+
+for-in@^1.0.1, for-in@^1.0.2:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/for-in/-/for-in-1.0.2.tgz#81068d295a8142ec0ac726c6e2200c30fb6d5e80"
+
+for-own@^0.1.4:
+ version "0.1.5"
+ resolved "https://registry.yarnpkg.com/for-own/-/for-own-0.1.5.tgz#5265c681a4f294dabbf17c9509b6763aa84510ce"
+ dependencies:
+ for-in "^1.0.1"
+
+foreach@^2.0.5:
+ version "2.0.5"
+ resolved "https://registry.yarnpkg.com/foreach/-/foreach-2.0.5.tgz#0bee005018aeb260d0a3af3ae658dd0136ec1b99"
+
+forever-agent@~0.6.1:
+ version "0.6.1"
+ resolved "https://registry.yarnpkg.com/forever-agent/-/forever-agent-0.6.1.tgz#fbc71f0c41adeb37f96c577ad1ed42d8fdacca91"
+
+form-data@~2.1.1:
+ version "2.1.4"
+ resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.1.4.tgz#33c183acf193276ecaa98143a69e94bfee1750d1"
+ dependencies:
+ asynckit "^0.4.0"
+ combined-stream "^1.0.5"
+ mime-types "^2.1.12"
+
+forwarded@~0.1.2:
+ version "0.1.2"
+ resolved "https://registry.yarnpkg.com/forwarded/-/forwarded-0.1.2.tgz#98c23dab1175657b8c0573e8ceccd91b0ff18c84"
+
+fragment-cache@^0.2.1:
+ version "0.2.1"
+ resolved "https://registry.yarnpkg.com/fragment-cache/-/fragment-cache-0.2.1.tgz#4290fad27f13e89be7f33799c6bc5a0abfff0d19"
+ dependencies:
+ map-cache "^0.2.2"
+
+fresh@0.5.2:
+ version "0.5.2"
+ resolved "https://registry.yarnpkg.com/fresh/-/fresh-0.5.2.tgz#3d8cadd90d976569fa835ab1f8e4b23a105605a7"
+
+fs-extra@^5.0.0:
+ version "5.0.0"
+ resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-5.0.0.tgz#414d0110cdd06705734d055652c5411260c31abd"
+ dependencies:
+ graceful-fs "^4.1.2"
+ jsonfile "^4.0.0"
+ universalify "^0.1.0"
+
+fs.realpath@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f"
+
+fsevents@^1.0.0:
+ version "1.1.3"
+ resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-1.1.3.tgz#11f82318f5fe7bb2cd22965a108e9306208216d8"
+ dependencies:
+ nan "^2.3.0"
+ node-pre-gyp "^0.6.39"
+
+fstream-ignore@^1.0.5:
+ version "1.0.5"
+ resolved "https://registry.yarnpkg.com/fstream-ignore/-/fstream-ignore-1.0.5.tgz#9c31dae34767018fe1d249b24dada67d092da105"
+ dependencies:
+ fstream "^1.0.0"
+ inherits "2"
+ minimatch "^3.0.0"
+
+"fstream@>= 0.1.30 < 1":
+ version "0.1.31"
+ resolved "https://registry.yarnpkg.com/fstream/-/fstream-0.1.31.tgz#7337f058fbbbbefa8c9f561a28cab0849202c988"
+ dependencies:
+ graceful-fs "~3.0.2"
+ inherits "~2.0.0"
+ mkdirp "0.5"
+ rimraf "2"
+
+fstream@^1.0.0, fstream@^1.0.10, fstream@^1.0.2:
+ version "1.0.11"
+ resolved "https://registry.yarnpkg.com/fstream/-/fstream-1.0.11.tgz#5c1fb1f117477114f0632a0eb4b71b3cb0fd3171"
+ dependencies:
+ graceful-fs "^4.1.2"
+ inherits "~2.0.0"
+ mkdirp ">=0.5 0"
+ rimraf "2"
+
+function-bind@^1.0.2, function-bind@^1.1.1:
+ version "1.1.1"
+ resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d"
+
+functional-red-black-tree@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz#1b0ab3bd553b2a0d6399d29c0e3ea0b252078327"
+
+gauge@~2.7.3:
+ version "2.7.4"
+ resolved "https://registry.yarnpkg.com/gauge/-/gauge-2.7.4.tgz#2c03405c7538c39d7eb37b317022e325fb018bf7"
+ dependencies:
+ aproba "^1.0.3"
+ console-control-strings "^1.0.0"
+ has-unicode "^2.0.0"
+ object-assign "^4.1.0"
+ signal-exit "^3.0.0"
+ string-width "^1.0.1"
+ strip-ansi "^3.0.1"
+ wide-align "^1.1.0"
+
+get-caller-file@^1.0.1:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-1.0.2.tgz#f702e63127e7e231c160a80c1554acb70d5047e5"
+
+get-stdin@^4.0.1:
+ version "4.0.1"
+ resolved "https://registry.yarnpkg.com/get-stdin/-/get-stdin-4.0.1.tgz#b968c6b0a04384324902e8bf1a5df32579a450fe"
+
+get-stream@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-3.0.0.tgz#8e943d1358dc37555054ecbe2edb05aa174ede14"
+
+get-value@^2.0.3, get-value@^2.0.6:
+ version "2.0.6"
+ resolved "https://registry.yarnpkg.com/get-value/-/get-value-2.0.6.tgz#dc15ca1c672387ca76bd37ac0a395ba2042a2c28"
+
+getpass@^0.1.1:
+ version "0.1.7"
+ resolved "https://registry.yarnpkg.com/getpass/-/getpass-0.1.7.tgz#5eff8e3e684d569ae4cb2b1282604e8ba62149fa"
+ dependencies:
+ assert-plus "^1.0.0"
+
+github@0.2.4:
+ version "0.2.4"
+ resolved "https://registry.yarnpkg.com/github/-/github-0.2.4.tgz#24fa7f0e13fa11b946af91134c51982a91ce538b"
+ dependencies:
+ mime "^1.2.11"
+
+glob-base@^0.3.0:
+ version "0.3.0"
+ resolved "https://registry.yarnpkg.com/glob-base/-/glob-base-0.3.0.tgz#dbb164f6221b1c0b1ccf82aea328b497df0ea3c4"
+ dependencies:
+ glob-parent "^2.0.0"
+ is-glob "^2.0.0"
+
+glob-parent@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-2.0.0.tgz#81383d72db054fcccf5336daa902f182f6edbb28"
+ dependencies:
+ is-glob "^2.0.0"
+
+glob-parent@^3.1.0:
+ version "3.1.0"
+ resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-3.1.0.tgz#9e6af6299d8d3bd2bd40430832bd113df906c5ae"
+ dependencies:
+ is-glob "^3.1.0"
+ path-dirname "^1.0.0"
+
+glob@^7.0.3, glob@^7.0.5, glob@^7.1.2:
+ version "7.1.2"
+ resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.2.tgz#c19c9df9a028702d678612384a6552404c636d15"
+ dependencies:
+ fs.realpath "^1.0.0"
+ inflight "^1.0.4"
+ inherits "2"
+ minimatch "^3.0.4"
+ once "^1.3.0"
+ path-is-absolute "^1.0.0"
+
+global@~4.3.0:
+ version "4.3.2"
+ resolved "https://registry.yarnpkg.com/global/-/global-4.3.2.tgz#e76989268a6c74c38908b1305b10fc0e394e9d0f"
+ dependencies:
+ min-document "^2.19.0"
+ process "~0.5.1"
+
+globals@^11.0.1, globals@^11.1.0:
+ version "11.3.0"
+ resolved "https://registry.yarnpkg.com/globals/-/globals-11.3.0.tgz#e04fdb7b9796d8adac9c8f64c14837b2313378b0"
+
+globals@^9.18.0:
+ version "9.18.0"
+ resolved "https://registry.yarnpkg.com/globals/-/globals-9.18.0.tgz#aa3896b3e69b487f17e31ed2143d69a8e30c2d8a"
+
+globby@^5.0.0:
+ version "5.0.0"
+ resolved "https://registry.yarnpkg.com/globby/-/globby-5.0.0.tgz#ebd84667ca0dbb330b99bcfc68eac2bc54370e0d"
+ dependencies:
+ array-union "^1.0.1"
+ arrify "^1.0.0"
+ glob "^7.0.3"
+ object-assign "^4.0.1"
+ pify "^2.0.0"
+ pinkie-promise "^2.0.0"
+
+globby@^6.1.0:
+ version "6.1.0"
+ resolved "https://registry.yarnpkg.com/globby/-/globby-6.1.0.tgz#f5a6d70e8395e21c858fb0489d64df02424d506c"
+ dependencies:
+ array-union "^1.0.1"
+ glob "^7.0.3"
+ object-assign "^4.0.1"
+ pify "^2.0.0"
+ pinkie-promise "^2.0.0"
+
+got@^7.1.0:
+ version "7.1.0"
+ resolved "https://registry.yarnpkg.com/got/-/got-7.1.0.tgz#05450fd84094e6bbea56f451a43a9c289166385a"
+ dependencies:
+ decompress-response "^3.2.0"
+ duplexer3 "^0.1.4"
+ get-stream "^3.0.0"
+ is-plain-obj "^1.1.0"
+ is-retry-allowed "^1.0.0"
+ is-stream "^1.0.0"
+ isurl "^1.0.0-alpha5"
+ lowercase-keys "^1.0.0"
+ p-cancelable "^0.3.0"
+ p-timeout "^1.1.1"
+ safe-buffer "^5.0.1"
+ timed-out "^4.0.0"
+ url-parse-lax "^1.0.0"
+ url-to-options "^1.0.1"
+
+graceful-fs@^4.1.2, graceful-fs@^4.1.6:
+ version "4.1.11"
+ resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.1.11.tgz#0e8bdfe4d1ddb8854d64e04ea7c00e2a026e5658"
+
+graceful-fs@~3.0.2:
+ version "3.0.11"
+ resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-3.0.11.tgz#7613c778a1afea62f25c630a086d7f3acbbdd818"
+ dependencies:
+ natives "^1.1.0"
+
+handle-thing@^1.2.5:
+ version "1.2.5"
+ resolved "https://registry.yarnpkg.com/handle-thing/-/handle-thing-1.2.5.tgz#fd7aad726bf1a5fd16dfc29b2f7a6601d27139c4"
+
+har-schema@^1.0.5:
+ version "1.0.5"
+ resolved "https://registry.yarnpkg.com/har-schema/-/har-schema-1.0.5.tgz#d263135f43307c02c602afc8fe95970c0151369e"
+
+har-validator@~4.2.1:
+ version "4.2.1"
+ resolved "https://registry.yarnpkg.com/har-validator/-/har-validator-4.2.1.tgz#33481d0f1bbff600dd203d75812a6a5fba002e2a"
+ dependencies:
+ ajv "^4.9.1"
+ har-schema "^1.0.5"
+
+has-ansi@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/has-ansi/-/has-ansi-2.0.0.tgz#34f5049ce1ecdf2b0649af3ef24e45ed35416d91"
+ dependencies:
+ ansi-regex "^2.0.0"
+
+has-flag@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-1.0.0.tgz#9d9e793165ce017a00f00418c43f942a7b1d11fa"
+
+has-flag@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-2.0.0.tgz#e8207af1cc7b30d446cc70b734b5e8be18f88d51"
+
+has-flag@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd"
+
+has-symbol-support-x@^1.4.1:
+ version "1.4.2"
+ resolved "https://registry.yarnpkg.com/has-symbol-support-x/-/has-symbol-support-x-1.4.2.tgz#1409f98bc00247da45da67cee0a36f282ff26455"
+
+has-to-string-tag-x@^1.2.0:
+ version "1.4.1"
+ resolved "https://registry.yarnpkg.com/has-to-string-tag-x/-/has-to-string-tag-x-1.4.1.tgz#a045ab383d7b4b2012a00148ab0aa5f290044d4d"
+ dependencies:
+ has-symbol-support-x "^1.4.1"
+
+has-unicode@^2.0.0:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/has-unicode/-/has-unicode-2.0.1.tgz#e0e6fe6a28cf51138855e086d1691e771de2a8b9"
+
+has-value@^0.3.1:
+ version "0.3.1"
+ resolved "https://registry.yarnpkg.com/has-value/-/has-value-0.3.1.tgz#7b1f58bada62ca827ec0a2078025654845995e1f"
+ dependencies:
+ get-value "^2.0.3"
+ has-values "^0.1.4"
+ isobject "^2.0.0"
+
+has-value@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/has-value/-/has-value-1.0.0.tgz#18b281da585b1c5c51def24c930ed29a0be6b177"
+ dependencies:
+ get-value "^2.0.6"
+ has-values "^1.0.0"
+ isobject "^3.0.0"
+
+has-values@^0.1.4:
+ version "0.1.4"
+ resolved "https://registry.yarnpkg.com/has-values/-/has-values-0.1.4.tgz#6d61de95d91dfca9b9a02089ad384bff8f62b771"
+
+has-values@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/has-values/-/has-values-1.0.0.tgz#95b0b63fec2146619a6fe57fe75628d5a39efe4f"
+ dependencies:
+ is-number "^3.0.0"
+ kind-of "^4.0.0"
+
+has@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/has/-/has-1.0.1.tgz#8461733f538b0837c9361e39a9ab9e9704dc2f28"
+ dependencies:
+ function-bind "^1.0.2"
+
+hash-base@^2.0.0:
+ version "2.0.2"
+ resolved "https://registry.yarnpkg.com/hash-base/-/hash-base-2.0.2.tgz#66ea1d856db4e8a5470cadf6fce23ae5244ef2e1"
+ dependencies:
+ inherits "^2.0.1"
+
+hash-base@^3.0.0:
+ version "3.0.4"
+ resolved "https://registry.yarnpkg.com/hash-base/-/hash-base-3.0.4.tgz#5fc8686847ecd73499403319a6b0a3f3f6ae4918"
+ dependencies:
+ inherits "^2.0.1"
+ safe-buffer "^5.0.1"
+
+hash.js@^1.0.0, hash.js@^1.0.3:
+ version "1.1.3"
+ resolved "https://registry.yarnpkg.com/hash.js/-/hash.js-1.1.3.tgz#340dedbe6290187151c1ea1d777a3448935df846"
+ dependencies:
+ inherits "^2.0.3"
+ minimalistic-assert "^1.0.0"
+
+hawk@3.1.3, hawk@~3.1.3:
+ version "3.1.3"
+ resolved "https://registry.yarnpkg.com/hawk/-/hawk-3.1.3.tgz#078444bd7c1640b0fe540d2c9b73d59678e8e1c4"
+ dependencies:
+ boom "2.x.x"
+ cryptiles "2.x.x"
+ hoek "2.x.x"
+ sntp "1.x.x"
+
+hmac-drbg@^1.0.0:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/hmac-drbg/-/hmac-drbg-1.0.1.tgz#d2745701025a6c775a6c545793ed502fc0c649a1"
+ dependencies:
+ hash.js "^1.0.3"
+ minimalistic-assert "^1.0.0"
+ minimalistic-crypto-utils "^1.0.1"
+
+hoek@2.x.x:
+ version "2.16.3"
+ resolved "https://registry.yarnpkg.com/hoek/-/hoek-2.16.3.tgz#20bb7403d3cea398e91dc4710a8ff1b8274a25ed"
+
+hoist-non-react-statics@^2.3.1:
+ version "2.5.0"
+ resolved "https://registry.yarnpkg.com/hoist-non-react-statics/-/hoist-non-react-statics-2.5.0.tgz#d2ca2dfc19c5a91c5a6615ce8e564ef0347e2a40"
+
+home-or-tmp@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/home-or-tmp/-/home-or-tmp-2.0.0.tgz#e36c3f2d2cae7d746a857e38d18d5f32a7882db8"
+ dependencies:
+ os-homedir "^1.0.0"
+ os-tmpdir "^1.0.1"
+
+hosted-git-info@^2.1.4:
+ version "2.5.0"
+ resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.5.0.tgz#6d60e34b3abbc8313062c3b798ef8d901a07af3c"
+
+hpack.js@^2.1.6:
+ version "2.1.6"
+ resolved "https://registry.yarnpkg.com/hpack.js/-/hpack.js-2.1.6.tgz#87774c0949e513f42e84575b3c45681fade2a0b2"
+ dependencies:
+ inherits "^2.0.1"
+ obuf "^1.0.0"
+ readable-stream "^2.0.1"
+ wbuf "^1.1.0"
+
+html-comment-regex@^1.1.0:
+ version "1.1.1"
+ resolved "https://registry.yarnpkg.com/html-comment-regex/-/html-comment-regex-1.1.1.tgz#668b93776eaae55ebde8f3ad464b307a4963625e"
+
+html-entities@^1.2.0:
+ version "1.2.1"
+ resolved "https://registry.yarnpkg.com/html-entities/-/html-entities-1.2.1.tgz#0df29351f0721163515dfb9e5543e5f6eed5162f"
+
+http-deceiver@^1.2.7:
+ version "1.2.7"
+ resolved "https://registry.yarnpkg.com/http-deceiver/-/http-deceiver-1.2.7.tgz#fa7168944ab9a519d337cb0bec7284dc3e723d87"
+
+http-errors@1.6.2, http-errors@~1.6.2:
+ version "1.6.2"
+ resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.6.2.tgz#0a002cc85707192a7e7946ceedc11155f60ec736"
+ dependencies:
+ depd "1.1.1"
+ inherits "2.0.3"
+ setprototypeof "1.0.3"
+ statuses ">= 1.3.1 < 2"
+
+http-parser-js@>=0.4.0:
+ version "0.4.10"
+ resolved "https://registry.yarnpkg.com/http-parser-js/-/http-parser-js-0.4.10.tgz#92c9c1374c35085f75db359ec56cc257cbb93fa4"
+
+http-proxy-middleware@~0.17.4:
+ version "0.17.4"
+ resolved "https://registry.yarnpkg.com/http-proxy-middleware/-/http-proxy-middleware-0.17.4.tgz#642e8848851d66f09d4f124912846dbaeb41b833"
+ dependencies:
+ http-proxy "^1.16.2"
+ is-glob "^3.1.0"
+ lodash "^4.17.2"
+ micromatch "^2.3.11"
+
+http-proxy@^1.16.2:
+ version "1.16.2"
+ resolved "https://registry.yarnpkg.com/http-proxy/-/http-proxy-1.16.2.tgz#06dff292952bf64dbe8471fa9df73066d4f37742"
+ dependencies:
+ eventemitter3 "1.x.x"
+ requires-port "1.x.x"
+
+http-signature@~1.1.0:
+ version "1.1.1"
+ resolved "https://registry.yarnpkg.com/http-signature/-/http-signature-1.1.1.tgz#df72e267066cd0ac67fb76adf8e134a8fbcf91bf"
+ dependencies:
+ assert-plus "^0.2.0"
+ jsprim "^1.2.2"
+ sshpk "^1.7.0"
+
+https-browserify@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/https-browserify/-/https-browserify-1.0.0.tgz#ec06c10e0a34c0f2faf199f7fd7fc78fffd03c73"
+
+hyphenate-style-name@^1.0.2:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/hyphenate-style-name/-/hyphenate-style-name-1.0.2.tgz#31160a36930adaf1fc04c6074f7eb41465d4ec4b"
+
+iconv-lite@0.4.19, iconv-lite@^0.4.17, iconv-lite@~0.4.13:
+ version "0.4.19"
+ resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.19.tgz#f7468f60135f5e5dad3399c0a81be9a1603a082b"
+
+icss-replace-symbols@^1.1.0:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/icss-replace-symbols/-/icss-replace-symbols-1.1.0.tgz#06ea6f83679a7749e386cfe1fe812ae5db223ded"
+
+icss-utils@^2.1.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/icss-utils/-/icss-utils-2.1.0.tgz#83f0a0ec378bf3246178b6c2ad9136f135b1c962"
+ dependencies:
+ postcss "^6.0.1"
+
+ieee754@^1.1.4:
+ version "1.1.8"
+ resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.1.8.tgz#be33d40ac10ef1926701f6f08a2d86fbfd1ad3e4"
+
+ignore@^3.3.3:
+ version "3.3.7"
+ resolved "https://registry.yarnpkg.com/ignore/-/ignore-3.3.7.tgz#612289bfb3c220e186a58118618d5be8c1bab021"
+
+import-local@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/import-local/-/import-local-1.0.0.tgz#5e4ffdc03f4fe6c009c6729beb29631c2f8227bc"
+ dependencies:
+ pkg-dir "^2.0.0"
+ resolve-cwd "^2.0.0"
+
+imurmurhash@^0.1.4:
+ version "0.1.4"
+ resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea"
+
+indent-string@^2.1.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-2.1.0.tgz#8e2d48348742121b4a8218b7a137e9a52049dc80"
+ dependencies:
+ repeating "^2.0.0"
+
+indexes-of@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/indexes-of/-/indexes-of-1.0.1.tgz#f30f716c8e2bd346c7b67d3df3915566a7c05607"
+
+indexof@0.0.1:
+ version "0.0.1"
+ resolved "https://registry.yarnpkg.com/indexof/-/indexof-0.0.1.tgz#82dc336d232b9062179d05ab3293a66059fd435d"
+
+inflight@^1.0.4:
+ version "1.0.6"
+ resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9"
+ dependencies:
+ once "^1.3.0"
+ wrappy "1"
+
+inherits@2, inherits@2.0.3, inherits@^2.0.1, inherits@^2.0.3, inherits@~2.0.0, inherits@~2.0.1, inherits@~2.0.3:
+ version "2.0.3"
+ resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de"
+
+inherits@2.0.1:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.1.tgz#b17d08d326b4423e568eff719f91b0b1cbdf69f1"
+
+ini@~1.3.0:
+ version "1.3.5"
+ resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.5.tgz#eee25f56db1c9ec6085e0c22778083f596abf927"
+
+inquirer@^3.0.6:
+ version "3.3.0"
+ resolved "https://registry.yarnpkg.com/inquirer/-/inquirer-3.3.0.tgz#9dd2f2ad765dcab1ff0443b491442a20ba227dc9"
+ dependencies:
+ ansi-escapes "^3.0.0"
+ chalk "^2.0.0"
+ cli-cursor "^2.1.0"
+ cli-width "^2.0.0"
+ external-editor "^2.0.4"
+ figures "^2.0.0"
+ lodash "^4.3.0"
+ mute-stream "0.0.7"
+ run-async "^2.2.0"
+ rx-lite "^4.0.8"
+ rx-lite-aggregates "^4.0.8"
+ string-width "^2.1.0"
+ strip-ansi "^4.0.0"
+ through "^2.3.6"
+
+internal-ip@1.2.0:
+ version "1.2.0"
+ resolved "https://registry.yarnpkg.com/internal-ip/-/internal-ip-1.2.0.tgz#ae9fbf93b984878785d50a8de1b356956058cf5c"
+ dependencies:
+ meow "^3.3.0"
+
+interpret@^1.0.0:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/interpret/-/interpret-1.1.0.tgz#7ed1b1410c6a0e0f78cf95d3b8440c63f78b8614"
+
+invariant@^2.2.0, invariant@^2.2.2:
+ version "2.2.3"
+ resolved "https://registry.yarnpkg.com/invariant/-/invariant-2.2.3.tgz#1a827dfde7dcbd7c323f0ca826be8fa7c5e9d688"
+ dependencies:
+ loose-envify "^1.0.0"
+
+invert-kv@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/invert-kv/-/invert-kv-1.0.0.tgz#104a8e4aaca6d3d8cd157a8ef8bfab2d7a3ffdb6"
+
+ip@^1.1.0, ip@^1.1.5:
+ version "1.1.5"
+ resolved "https://registry.yarnpkg.com/ip/-/ip-1.1.5.tgz#bdded70114290828c0a039e72ef25f5aaec4354a"
+
+ipaddr.js@1.6.0:
+ version "1.6.0"
+ resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.6.0.tgz#e3fa357b773da619f26e95f049d055c72796f86b"
+
+is-absolute-url@^2.0.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/is-absolute-url/-/is-absolute-url-2.1.0.tgz#50530dfb84fcc9aa7dbe7852e83a37b93b9f2aa6"
+
+is-accessor-descriptor@^0.1.6:
+ version "0.1.6"
+ resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-0.1.6.tgz#a9e12cb3ae8d876727eeef3843f8a0897b5c98d6"
+ dependencies:
+ kind-of "^3.0.2"
+
+is-accessor-descriptor@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz#169c2f6d3df1f992618072365c9b0ea1f6878656"
+ dependencies:
+ kind-of "^6.0.0"
+
+is-arrayish@^0.2.1:
+ version "0.2.1"
+ resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d"
+
+is-binary-path@^1.0.0:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-1.0.1.tgz#75f16642b480f187a711c814161fd3a4a7655898"
+ dependencies:
+ binary-extensions "^1.0.0"
+
+is-buffer@^1.1.5, is-buffer@~1.1.1:
+ version "1.1.6"
+ resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be"
+
+is-builtin-module@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/is-builtin-module/-/is-builtin-module-1.0.0.tgz#540572d34f7ac3119f8f76c30cbc1b1e037affbe"
+ dependencies:
+ builtin-modules "^1.0.0"
+
+is-callable@^1.1.1, is-callable@^1.1.3:
+ version "1.1.3"
+ resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.1.3.tgz#86eb75392805ddc33af71c92a0eedf74ee7604b2"
+
+is-data-descriptor@^0.1.4:
+ version "0.1.4"
+ resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz#0b5ee648388e2c860282e793f1856fec3f301b56"
+ dependencies:
+ kind-of "^3.0.2"
+
+is-data-descriptor@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz#d84876321d0e7add03990406abbbbd36ba9268c7"
+ dependencies:
+ kind-of "^6.0.0"
+
+is-date-object@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/is-date-object/-/is-date-object-1.0.1.tgz#9aa20eb6aeebbff77fbd33e74ca01b33581d3a16"
+
+is-descriptor@^0.1.0:
+ version "0.1.6"
+ resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-0.1.6.tgz#366d8240dde487ca51823b1ab9f07a10a78251ca"
+ dependencies:
+ is-accessor-descriptor "^0.1.6"
+ is-data-descriptor "^0.1.4"
+ kind-of "^5.0.0"
+
+is-descriptor@^1.0.0, is-descriptor@^1.0.2:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-1.0.2.tgz#3b159746a66604b04f8c81524ba365c5f14d86ec"
+ dependencies:
+ is-accessor-descriptor "^1.0.0"
+ is-data-descriptor "^1.0.0"
+ kind-of "^6.0.2"
+
+is-dotfile@^1.0.0:
+ version "1.0.3"
+ resolved "https://registry.yarnpkg.com/is-dotfile/-/is-dotfile-1.0.3.tgz#a6a2f32ffd2dfb04f5ca25ecd0f6b83cf798a1e1"
+
+is-equal-shallow@^0.1.3:
+ version "0.1.3"
+ resolved "https://registry.yarnpkg.com/is-equal-shallow/-/is-equal-shallow-0.1.3.tgz#2238098fc221de0bcfa5d9eac4c45d638aa1c534"
+ dependencies:
+ is-primitive "^2.0.0"
+
+is-extendable@^0.1.0, is-extendable@^0.1.1:
+ version "0.1.1"
+ resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-0.1.1.tgz#62b110e289a471418e3ec36a617d472e301dfc89"
+
+is-extendable@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-1.0.1.tgz#a7470f9e426733d81bd81e1155264e3a3507cab4"
+ dependencies:
+ is-plain-object "^2.0.4"
+
+is-extglob@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-1.0.0.tgz#ac468177c4943405a092fc8f29760c6ffc6206c0"
+
+is-extglob@^2.1.0, is-extglob@^2.1.1:
+ version "2.1.1"
+ resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2"
+
+is-finite@^1.0.0:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/is-finite/-/is-finite-1.0.2.tgz#cc6677695602be550ef11e8b4aa6305342b6d0aa"
+ dependencies:
+ number-is-nan "^1.0.0"
+
+is-fullwidth-code-point@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz#ef9e31386f031a7f0d643af82fde50c457ef00cb"
+ dependencies:
+ number-is-nan "^1.0.0"
+
+is-fullwidth-code-point@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz#a3b30a5c4f199183167aaab93beefae3ddfb654f"
+
+is-function@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/is-function/-/is-function-1.0.1.tgz#12cfb98b65b57dd3d193a3121f5f6e2f437602b5"
+
+is-glob@^2.0.0, is-glob@^2.0.1:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-2.0.1.tgz#d096f926a3ded5600f3fdfd91198cb0888c2d863"
+ dependencies:
+ is-extglob "^1.0.0"
+
+is-glob@^3.1.0:
+ version "3.1.0"
+ resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-3.1.0.tgz#7ba5ae24217804ac70707b96922567486cc3e84a"
+ dependencies:
+ is-extglob "^2.1.0"
+
+is-glob@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.0.tgz#9521c76845cc2610a85203ddf080a958c2ffabc0"
+ dependencies:
+ is-extglob "^2.1.1"
+
+is-in-browser@^1.0.2, is-in-browser@^1.1.3:
+ version "1.1.3"
+ resolved "https://registry.yarnpkg.com/is-in-browser/-/is-in-browser-1.1.3.tgz#56ff4db683a078c6082eb95dad7dc62e1d04f835"
+
+is-number@^2.1.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/is-number/-/is-number-2.1.0.tgz#01fcbbb393463a548f2f466cce16dece49db908f"
+ dependencies:
+ kind-of "^3.0.2"
+
+is-number@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/is-number/-/is-number-3.0.0.tgz#24fd6201a4782cf50561c810276afc7d12d71195"
+ dependencies:
+ kind-of "^3.0.2"
+
+is-number@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/is-number/-/is-number-4.0.0.tgz#0026e37f5454d73e356dfe6564699867c6a7f0ff"
+
+is-object@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/is-object/-/is-object-1.0.1.tgz#8952688c5ec2ffd6b03ecc85e769e02903083470"
+
+is-odd@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/is-odd/-/is-odd-2.0.0.tgz#7646624671fd7ea558ccd9a2795182f2958f1b24"
+ dependencies:
+ is-number "^4.0.0"
+
+is-path-cwd@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/is-path-cwd/-/is-path-cwd-1.0.0.tgz#d225ec23132e89edd38fda767472e62e65f1106d"
+
+is-path-in-cwd@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/is-path-in-cwd/-/is-path-in-cwd-1.0.0.tgz#6477582b8214d602346094567003be8a9eac04dc"
+ dependencies:
+ is-path-inside "^1.0.0"
+
+is-path-inside@^1.0.0:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-1.0.1.tgz#8ef5b7de50437a3fdca6b4e865ef7aa55cb48036"
+ dependencies:
+ path-is-inside "^1.0.1"
+
+is-plain-obj@^1.0.0, is-plain-obj@^1.1.0:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-1.1.0.tgz#71a50c8429dfca773c92a390a4a03b39fcd51d3e"
+
+is-plain-object@^2.0.1, is-plain-object@^2.0.3, is-plain-object@^2.0.4:
+ version "2.0.4"
+ resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-2.0.4.tgz#2c163b3fafb1b606d9d17928f05c2a1c38e07677"
+ dependencies:
+ isobject "^3.0.1"
+
+is-posix-bracket@^0.1.0:
+ version "0.1.1"
+ resolved "https://registry.yarnpkg.com/is-posix-bracket/-/is-posix-bracket-0.1.1.tgz#3334dc79774368e92f016e6fbc0a88f5cd6e6bc4"
+
+is-primitive@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/is-primitive/-/is-primitive-2.0.0.tgz#207bab91638499c07b2adf240a41a87210034575"
+
+is-promise@^2.1.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/is-promise/-/is-promise-2.1.0.tgz#79a2a9ece7f096e80f36d2b2f3bc16c1ff4bf3fa"
+
+is-regex@^1.0.4:
+ version "1.0.4"
+ resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.0.4.tgz#5517489b547091b0930e095654ced25ee97e9491"
+ dependencies:
+ has "^1.0.1"
+
+is-resolvable@^1.0.0:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/is-resolvable/-/is-resolvable-1.1.0.tgz#fb18f87ce1feb925169c9a407c19318a3206ed88"
+
+is-retry-allowed@^1.0.0:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/is-retry-allowed/-/is-retry-allowed-1.1.0.tgz#11a060568b67339444033d0125a61a20d564fb34"
+
+is-stream@^1.0.0, is-stream@^1.0.1, is-stream@^1.1.0:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44"
+
+is-svg@^2.0.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/is-svg/-/is-svg-2.1.0.tgz#cf61090da0d9efbcab8722deba6f032208dbb0e9"
+ dependencies:
+ html-comment-regex "^1.1.0"
+
+is-symbol@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/is-symbol/-/is-symbol-1.0.1.tgz#3cc59f00025194b6ab2e38dbae6689256b660572"
+
+is-typedarray@~1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a"
+
+is-utf8@^0.2.0:
+ version "0.2.1"
+ resolved "https://registry.yarnpkg.com/is-utf8/-/is-utf8-0.2.1.tgz#4b0da1442104d1b336340e80797e865cf39f7d72"
+
+is-windows@^1.0.2:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/is-windows/-/is-windows-1.0.2.tgz#d1850eb9791ecd18e6182ce12a30f396634bb19d"
+
+is-wsl@^1.1.0:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/is-wsl/-/is-wsl-1.1.0.tgz#1f16e4aa22b04d1336b66188a66af3c600c3a66d"
+
+isarray@0.0.1:
+ version "0.0.1"
+ resolved "https://registry.yarnpkg.com/isarray/-/isarray-0.0.1.tgz#8a18acfca9a8f4177e09abfc6038939b05d1eedf"
+
+isarray@1.0.0, isarray@^1.0.0, isarray@~1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11"
+
+isexe@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10"
+
+isobject@^2.0.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/isobject/-/isobject-2.1.0.tgz#f065561096a3f1da2ef46272f815c840d87e0c89"
+ dependencies:
+ isarray "1.0.0"
+
+isobject@^3.0.0, isobject@^3.0.1:
+ version "3.0.1"
+ resolved "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df"
+
+isomorphic-fetch@^2.1.1:
+ version "2.2.1"
+ resolved "https://registry.yarnpkg.com/isomorphic-fetch/-/isomorphic-fetch-2.2.1.tgz#611ae1acf14f5e81f729507472819fe9733558a9"
+ dependencies:
+ node-fetch "^1.0.1"
+ whatwg-fetch ">=0.10.0"
+
+isstream@~0.1.2:
+ version "0.1.2"
+ resolved "https://registry.yarnpkg.com/isstream/-/isstream-0.1.2.tgz#47e63f7af55afa6f92e1500e690eb8b8529c099a"
+
+isurl@^1.0.0-alpha5:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/isurl/-/isurl-1.0.0.tgz#b27f4f49f3cdaa3ea44a0a5b7f3462e6edc39d67"
+ dependencies:
+ has-to-string-tag-x "^1.2.0"
+ is-object "^1.0.1"
+
+js-base64@^2.1.9:
+ version "2.4.3"
+ resolved "https://registry.yarnpkg.com/js-base64/-/js-base64-2.4.3.tgz#2e545ec2b0f2957f41356510205214e98fad6582"
+
+js-tokens@^3.0.0, js-tokens@^3.0.2:
+ version "3.0.2"
+ resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-3.0.2.tgz#9866df395102130e38f7f996bceb65443209c25b"
+
+js-yaml@^3.9.1:
+ version "3.10.0"
+ resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.10.0.tgz#2e78441646bd4682e963f22b6e92823c309c62dc"
+ dependencies:
+ argparse "^1.0.7"
+ esprima "^4.0.0"
+
+js-yaml@~3.7.0:
+ version "3.7.0"
+ resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.7.0.tgz#5c967ddd837a9bfdca5f2de84253abe8a1c03b80"
+ dependencies:
+ argparse "^1.0.7"
+ esprima "^2.6.0"
+
+jsbn@~0.1.0:
+ version "0.1.1"
+ resolved "https://registry.yarnpkg.com/jsbn/-/jsbn-0.1.1.tgz#a5e654c2e5a2deb5f201d96cefbca80c0ef2f513"
+
+jsesc@^1.3.0:
+ version "1.3.0"
+ resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-1.3.0.tgz#46c3fec8c1892b12b0833db9bc7622176dbab34b"
+
+jsesc@^2.5.1:
+ version "2.5.1"
+ resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-2.5.1.tgz#e421a2a8e20d6b0819df28908f782526b96dd1fe"
+
+jsesc@~0.5.0:
+ version "0.5.0"
+ resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-0.5.0.tgz#e7dee66e35d6fc16f710fe91d5cf69f70f08911d"
+
+json-loader@^0.5.4:
+ version "0.5.7"
+ resolved "https://registry.yarnpkg.com/json-loader/-/json-loader-0.5.7.tgz#dca14a70235ff82f0ac9a3abeb60d337a365185d"
+
+json-schema-traverse@^0.3.0:
+ version "0.3.1"
+ resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.3.1.tgz#349a6d44c53a51de89b40805c5d5e59b417d3340"
+
+json-schema@0.2.3:
+ version "0.2.3"
+ resolved "https://registry.yarnpkg.com/json-schema/-/json-schema-0.2.3.tgz#b480c892e59a2f05954ce727bd3f2a4e882f9e13"
+
+json-stable-stringify-without-jsonify@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz#9db7b59496ad3f3cfef30a75142d2d930ad72651"
+
+json-stable-stringify@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/json-stable-stringify/-/json-stable-stringify-1.0.1.tgz#9a759d39c5f2ff503fd5300646ed445f88c4f9af"
+ dependencies:
+ jsonify "~0.0.0"
+
+json-stringify-safe@~5.0.1:
+ version "5.0.1"
+ resolved "https://registry.yarnpkg.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz#1296a2d58fd45f19a0f6ce01d65701e2c735b6eb"
+
+json3@^3.3.2:
+ version "3.3.2"
+ resolved "https://registry.yarnpkg.com/json3/-/json3-3.3.2.tgz#3c0434743df93e2f5c42aee7b19bcb483575f4e1"
+
+json5@^0.5.0, json5@^0.5.1:
+ version "0.5.1"
+ resolved "https://registry.yarnpkg.com/json5/-/json5-0.5.1.tgz#1eade7acc012034ad84e2396767ead9fa5495821"
+
+jsonfile@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-4.0.0.tgz#8771aae0799b64076b76640fca058f9c10e33ecb"
+ optionalDependencies:
+ graceful-fs "^4.1.6"
+
+jsonify@~0.0.0:
+ version "0.0.0"
+ resolved "https://registry.yarnpkg.com/jsonify/-/jsonify-0.0.0.tgz#2c74b6ee41d93ca51b7b5aaee8f503631d252a73"
+
+jsprim@^1.2.2:
+ version "1.4.1"
+ resolved "https://registry.yarnpkg.com/jsprim/-/jsprim-1.4.1.tgz#313e66bc1e5cc06e438bc1b7499c2e5c56acb6a2"
+ dependencies:
+ assert-plus "1.0.0"
+ extsprintf "1.3.0"
+ json-schema "0.2.3"
+ verror "1.10.0"
+
+jss-camel-case@^6.0.0, jss-camel-case@^6.1.0:
+ version "6.1.0"
+ resolved "https://registry.yarnpkg.com/jss-camel-case/-/jss-camel-case-6.1.0.tgz#ccb1ff8d6c701c02a1fed6fb6fb6b7896e11ce44"
+ dependencies:
+ hyphenate-style-name "^1.0.2"
+
+jss-compose@^5.0.0:
+ version "5.0.0"
+ resolved "https://registry.yarnpkg.com/jss-compose/-/jss-compose-5.0.0.tgz#ce01b2e4521d65c37ea42cf49116e5f7ab596484"
+ dependencies:
+ warning "^3.0.0"
+
+jss-default-unit@^8.0.2:
+ version "8.0.2"
+ resolved "https://registry.yarnpkg.com/jss-default-unit/-/jss-default-unit-8.0.2.tgz#cc1e889bae4c0b9419327b314ab1c8e2826890e6"
+
+jss-expand@^5.1.0:
+ version "5.1.0"
+ resolved "https://registry.yarnpkg.com/jss-expand/-/jss-expand-5.1.0.tgz#b1ad74ec18631f34f65a2124fcfceb6400610e3d"
+
+jss-extend@^6.2.0:
+ version "6.2.0"
+ resolved "https://registry.yarnpkg.com/jss-extend/-/jss-extend-6.2.0.tgz#4af09d0b72fb98ee229970f8ca852fec1ca2a8dc"
+ dependencies:
+ warning "^3.0.0"
+
+jss-global@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/jss-global/-/jss-global-3.0.0.tgz#e19e5c91ab2b96353c227e30aa2cbd938cdaafa2"
+
+jss-nested@^6.0.1:
+ version "6.0.1"
+ resolved "https://registry.yarnpkg.com/jss-nested/-/jss-nested-6.0.1.tgz#ef992b79d6e8f63d939c4397b9d99b5cbbe824ca"
+ dependencies:
+ warning "^3.0.0"
+
+jss-preset-default@^4.3.0:
+ version "4.3.0"
+ resolved "https://registry.yarnpkg.com/jss-preset-default/-/jss-preset-default-4.3.0.tgz#7bc91b0b282492557d36ed4e5c6d7c8cb3154bb8"
+ dependencies:
+ jss-camel-case "^6.1.0"
+ jss-compose "^5.0.0"
+ jss-default-unit "^8.0.2"
+ jss-expand "^5.1.0"
+ jss-extend "^6.2.0"
+ jss-global "^3.0.0"
+ jss-nested "^6.0.1"
+ jss-props-sort "^6.0.0"
+ jss-template "^1.0.1"
+ jss-vendor-prefixer "^7.0.0"
+
+jss-props-sort@^6.0.0:
+ version "6.0.0"
+ resolved "https://registry.yarnpkg.com/jss-props-sort/-/jss-props-sort-6.0.0.tgz#9105101a3b5071fab61e2d85ea74cc22e9b16323"
+
+jss-template@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/jss-template/-/jss-template-1.0.1.tgz#09aed9d86cc547b07f53ef355d7e1777f7da430a"
+ dependencies:
+ warning "^3.0.0"
+
+jss-vendor-prefixer@^7.0.0:
+ version "7.0.0"
+ resolved "https://registry.yarnpkg.com/jss-vendor-prefixer/-/jss-vendor-prefixer-7.0.0.tgz#0166729650015ef19d9f02437c73667231605c71"
+ dependencies:
+ css-vendor "^0.3.8"
+
+jss@^9.3.2, jss@^9.3.3:
+ version "9.8.0"
+ resolved "https://registry.yarnpkg.com/jss/-/jss-9.8.0.tgz#77830def563870103f8671ed31ce3a3d2f32aa2b"
+ dependencies:
+ is-in-browser "^1.1.3"
+ symbol-observable "^1.1.0"
+ warning "^3.0.0"
+
+jsx-ast-utils@^2.0.0, jsx-ast-utils@^2.0.1:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/jsx-ast-utils/-/jsx-ast-utils-2.0.1.tgz#e801b1b39985e20fffc87b40e3748080e2dcac7f"
+ dependencies:
+ array-includes "^3.0.3"
+
+keycode@^2.1.9:
+ version "2.1.9"
+ resolved "https://registry.yarnpkg.com/keycode/-/keycode-2.1.9.tgz#964a23c54e4889405b4861a5c9f0480d45141dfa"
+
+killable@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/killable/-/killable-1.0.0.tgz#da8b84bd47de5395878f95d64d02f2449fe05e6b"
+
+kind-of@^3.0.2, kind-of@^3.0.3, kind-of@^3.2.0:
+ version "3.2.2"
+ resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64"
+ dependencies:
+ is-buffer "^1.1.5"
+
+kind-of@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-4.0.0.tgz#20813df3d712928b207378691a45066fae72dd57"
+ dependencies:
+ is-buffer "^1.1.5"
+
+kind-of@^5.0.0:
+ version "5.1.0"
+ resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-5.1.0.tgz#729c91e2d857b7a419a1f9aa65685c4c33f5845d"
+
+kind-of@^6.0.0, kind-of@^6.0.2:
+ version "6.0.2"
+ resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-6.0.2.tgz#01146b36a6218e64e58f3a8d66de5d7fc6f6d051"
+
+lazy-cache@^1.0.3:
+ version "1.0.4"
+ resolved "https://registry.yarnpkg.com/lazy-cache/-/lazy-cache-1.0.4.tgz#a1d78fc3a50474cb80845d3b3b6e1da49a446e8e"
+
+lazy-cache@^2.0.2:
+ version "2.0.2"
+ resolved "https://registry.yarnpkg.com/lazy-cache/-/lazy-cache-2.0.2.tgz#b9190a4f913354694840859f8a8f7084d8822264"
+ dependencies:
+ set-getter "^0.1.0"
+
+lcid@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/lcid/-/lcid-1.0.0.tgz#308accafa0bc483a3867b4b6f2b9506251d1b835"
+ dependencies:
+ invert-kv "^1.0.0"
+
+levn@^0.3.0, levn@~0.3.0:
+ version "0.3.0"
+ resolved "https://registry.yarnpkg.com/levn/-/levn-0.3.0.tgz#3b09924edf9f083c0490fdd4c0bc4421e04764ee"
+ dependencies:
+ prelude-ls "~1.1.2"
+ type-check "~0.3.2"
+
+load-json-file@^1.0.0:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-1.1.0.tgz#956905708d58b4bab4c2261b04f59f31c99374c0"
+ dependencies:
+ graceful-fs "^4.1.2"
+ parse-json "^2.2.0"
+ pify "^2.0.0"
+ pinkie-promise "^2.0.0"
+ strip-bom "^2.0.0"
+
+load-json-file@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-2.0.0.tgz#7947e42149af80d696cbf797bcaabcfe1fe29ca8"
+ dependencies:
+ graceful-fs "^4.1.2"
+ parse-json "^2.2.0"
+ pify "^2.0.0"
+ strip-bom "^3.0.0"
+
+loader-fs-cache@^1.0.0:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/loader-fs-cache/-/loader-fs-cache-1.0.1.tgz#56e0bf08bd9708b26a765b68509840c8dec9fdbc"
+ dependencies:
+ find-cache-dir "^0.1.1"
+ mkdirp "0.5.1"
+
+loader-runner@^2.3.0:
+ version "2.3.0"
+ resolved "https://registry.yarnpkg.com/loader-runner/-/loader-runner-2.3.0.tgz#f482aea82d543e07921700d5a46ef26fdac6b8a2"
+
+loader-utils@^1.0.2, loader-utils@^1.1.0:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-1.1.0.tgz#c98aef488bcceda2ffb5e2de646d6a754429f5cd"
+ dependencies:
+ big.js "^3.1.3"
+ emojis-list "^2.0.0"
+ json5 "^0.5.0"
+
+locate-path@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-2.0.0.tgz#2b568b265eec944c6d9c0de9c3dbbbca0354cd8e"
+ dependencies:
+ p-locate "^2.0.0"
+ path-exists "^3.0.0"
+
+lodash.assign@^4.0.3, lodash.assign@^4.0.6:
+ version "4.2.0"
+ resolved "https://registry.yarnpkg.com/lodash.assign/-/lodash.assign-4.2.0.tgz#0d99f3ccd7a6d261d19bdaeb9245005d285808e7"
+
+lodash.camelcase@^4.3.0:
+ version "4.3.0"
+ resolved "https://registry.yarnpkg.com/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz#b28aa6288a2b9fc651035c7711f65ab6190331a6"
+
+lodash.memoize@^4.1.2:
+ version "4.1.2"
+ resolved "https://registry.yarnpkg.com/lodash.memoize/-/lodash.memoize-4.1.2.tgz#bcc6c49a42a2840ed997f323eada5ecd182e0bfe"
+
+lodash.uniq@^4.5.0:
+ version "4.5.0"
+ resolved "https://registry.yarnpkg.com/lodash.uniq/-/lodash.uniq-4.5.0.tgz#d0225373aeb652adc1bc82e4945339a842754773"
+
+lodash@^4.14.0, lodash@^4.15.0, lodash@^4.17.2, lodash@^4.17.4, lodash@^4.2.0, lodash@^4.3.0, lodash@~4.17.4:
+ version "4.17.5"
+ resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.5.tgz#99a92d65c0272debe8c96b6057bc8fbfa3bed511"
+
+loglevel@^1.4.1:
+ version "1.6.1"
+ resolved "https://registry.yarnpkg.com/loglevel/-/loglevel-1.6.1.tgz#e0fc95133b6ef276cdc8887cdaf24aa6f156f8fa"
+
+longest@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/longest/-/longest-1.0.1.tgz#30a0b2da38f73770e8294a0d22e6625ed77d0097"
+
+loose-envify@^1.0.0, loose-envify@^1.1.0, loose-envify@^1.3.1:
+ version "1.3.1"
+ resolved "https://registry.yarnpkg.com/loose-envify/-/loose-envify-1.3.1.tgz#d1a8ad33fa9ce0e713d65fdd0ac8b748d478c848"
+ dependencies:
+ js-tokens "^3.0.0"
+
+loud-rejection@^1.0.0:
+ version "1.6.0"
+ resolved "https://registry.yarnpkg.com/loud-rejection/-/loud-rejection-1.6.0.tgz#5b46f80147edee578870f086d04821cf998e551f"
+ dependencies:
+ currently-unhandled "^0.4.1"
+ signal-exit "^3.0.0"
+
+lowercase-keys@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-1.0.0.tgz#4e3366b39e7f5457e35f1324bdf6f88d0bfc7306"
+
+lru-cache@^4.0.1:
+ version "4.1.1"
+ resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-4.1.1.tgz#622e32e82488b49279114a4f9ecf45e7cd6bba55"
+ dependencies:
+ pseudomap "^1.0.2"
+ yallist "^2.1.2"
+
+macaddress@^0.2.8:
+ version "0.2.8"
+ resolved "https://registry.yarnpkg.com/macaddress/-/macaddress-0.2.8.tgz#5904dc537c39ec6dbefeae902327135fa8511f12"
+
+make-dir@^1.0.0:
+ version "1.2.0"
+ resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-1.2.0.tgz#6d6a49eead4aae296c53bbf3a1a008bd6c89469b"
+ dependencies:
+ pify "^3.0.0"
+
+map-cache@^0.2.2:
+ version "0.2.2"
+ resolved "https://registry.yarnpkg.com/map-cache/-/map-cache-0.2.2.tgz#c32abd0bd6525d9b051645bb4f26ac5dc98a0dbf"
+
+map-obj@^1.0.0, map-obj@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/map-obj/-/map-obj-1.0.1.tgz#d933ceb9205d82bdcf4886f6742bdc2b4dea146d"
+
+map-visit@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/map-visit/-/map-visit-1.0.0.tgz#ecdca8f13144e660f1b5bd41f12f3479d98dfb8f"
+ dependencies:
+ object-visit "^1.0.0"
+
+"match-stream@>= 0.0.2 < 1":
+ version "0.0.2"
+ resolved "https://registry.yarnpkg.com/match-stream/-/match-stream-0.0.2.tgz#99eb050093b34dffade421b9ac0b410a9cfa17cf"
+ dependencies:
+ buffers "~0.1.1"
+ readable-stream "~1.0.0"
+
+material-ui-icons@^1.0.0-beta.17:
+ version "1.0.0-beta.35"
+ resolved "https://registry.yarnpkg.com/material-ui-icons/-/material-ui-icons-1.0.0-beta.35.tgz#f795fafa385918ef7c88ee382e5e5bf4401f008c"
+ dependencies:
+ recompose "^0.26.0"
+
+material-ui@^1.0.0-beta.30:
+ version "1.0.0-beta.35"
+ resolved "https://registry.yarnpkg.com/material-ui/-/material-ui-1.0.0-beta.35.tgz#eeac6be307c0469943c7686e5c6bd4eaa6b1b563"
+ dependencies:
+ "@types/jss" "^9.3.0"
+ "@types/react-transition-group" "^2.0.6"
+ babel-runtime "^6.26.0"
+ brcast "^3.0.1"
+ classnames "^2.2.5"
+ deepmerge "^2.0.1"
+ dom-helpers "^3.2.1"
+ hoist-non-react-statics "^2.3.1"
+ jss "^9.3.3"
+ jss-camel-case "^6.0.0"
+ jss-default-unit "^8.0.2"
+ jss-global "^3.0.0"
+ jss-nested "^6.0.1"
+ jss-props-sort "^6.0.0"
+ jss-vendor-prefixer "^7.0.0"
+ keycode "^2.1.9"
+ lodash "^4.2.0"
+ normalize-scroll-left "^0.1.2"
+ prop-types "^15.6.0"
+ react-event-listener "^0.5.1"
+ react-jss "^8.1.0"
+ react-popper "^0.8.0"
+ react-scrollbar-size "^2.0.2"
+ react-transition-group "^2.2.1"
+ recompose "^0.26.0"
+ scroll "^2.0.3"
+ warning "^3.0.0"
+
+math-expression-evaluator@^1.2.14:
+ version "1.2.17"
+ resolved "https://registry.yarnpkg.com/math-expression-evaluator/-/math-expression-evaluator-1.2.17.tgz#de819fdbcd84dccd8fae59c6aeb79615b9d266ac"
+
+md5.js@^1.3.4:
+ version "1.3.4"
+ resolved "https://registry.yarnpkg.com/md5.js/-/md5.js-1.3.4.tgz#e9bdbde94a20a5ac18b04340fc5764d5b09d901d"
+ dependencies:
+ hash-base "^3.0.0"
+ inherits "^2.0.1"
+
+md5@^2.1.0:
+ version "2.2.1"
+ resolved "https://registry.yarnpkg.com/md5/-/md5-2.2.1.tgz#53ab38d5fe3c8891ba465329ea23fac0540126f9"
+ dependencies:
+ charenc "~0.0.1"
+ crypt "~0.0.1"
+ is-buffer "~1.1.1"
+
+media-typer@0.3.0:
+ version "0.3.0"
+ resolved "https://registry.yarnpkg.com/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748"
+
+mem@^1.1.0:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/mem/-/mem-1.1.0.tgz#5edd52b485ca1d900fe64895505399a0dfa45f76"
+ dependencies:
+ mimic-fn "^1.0.0"
+
+memory-fs@^0.4.0, memory-fs@~0.4.1:
+ version "0.4.1"
+ resolved "https://registry.yarnpkg.com/memory-fs/-/memory-fs-0.4.1.tgz#3a9a20b8462523e447cfbc7e8bb80ed667bfc552"
+ dependencies:
+ errno "^0.1.3"
+ readable-stream "^2.0.1"
+
+meow@^3.3.0:
+ version "3.7.0"
+ resolved "https://registry.yarnpkg.com/meow/-/meow-3.7.0.tgz#72cb668b425228290abbfa856892587308a801fb"
+ dependencies:
+ camelcase-keys "^2.0.0"
+ decamelize "^1.1.2"
+ loud-rejection "^1.0.0"
+ map-obj "^1.0.1"
+ minimist "^1.1.3"
+ normalize-package-data "^2.3.4"
+ object-assign "^4.0.1"
+ read-pkg-up "^1.0.1"
+ redent "^1.0.0"
+ trim-newlines "^1.0.0"
+
+merge-descriptors@1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.1.tgz#b00aaa556dd8b44568150ec9d1b953f3f90cbb61"
+
+methods@~1.1.2:
+ version "1.1.2"
+ resolved "https://registry.yarnpkg.com/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee"
+
+micromatch@^2.3.11:
+ version "2.3.11"
+ resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-2.3.11.tgz#86677c97d1720b363431d04d0d15293bd38c1565"
+ dependencies:
+ arr-diff "^2.0.0"
+ array-unique "^0.2.1"
+ braces "^1.8.2"
+ expand-brackets "^0.1.4"
+ extglob "^0.3.1"
+ filename-regex "^2.0.0"
+ is-extglob "^1.0.0"
+ is-glob "^2.0.1"
+ kind-of "^3.0.2"
+ normalize-path "^2.0.1"
+ object.omit "^2.0.0"
+ parse-glob "^3.0.4"
+ regex-cache "^0.4.2"
+
+micromatch@^3.1.4:
+ version "3.1.9"
+ resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-3.1.9.tgz#15dc93175ae39e52e93087847096effc73efcf89"
+ dependencies:
+ arr-diff "^4.0.0"
+ array-unique "^0.3.2"
+ braces "^2.3.1"
+ define-property "^2.0.2"
+ extend-shallow "^3.0.2"
+ extglob "^2.0.4"
+ fragment-cache "^0.2.1"
+ kind-of "^6.0.2"
+ nanomatch "^1.2.9"
+ object.pick "^1.3.0"
+ regex-not "^1.0.0"
+ snapdragon "^0.8.1"
+ to-regex "^3.0.1"
+
+miller-rabin@^4.0.0:
+ version "4.0.1"
+ resolved "https://registry.yarnpkg.com/miller-rabin/-/miller-rabin-4.0.1.tgz#f080351c865b0dc562a8462966daa53543c78a4d"
+ dependencies:
+ bn.js "^4.0.0"
+ brorand "^1.0.1"
+
+"mime-db@>= 1.33.0 < 2", mime-db@~1.33.0:
+ version "1.33.0"
+ resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.33.0.tgz#a3492050a5cb9b63450541e39d9788d2272783db"
+
+mime-types@^2.1.12, mime-types@~2.1.17, mime-types@~2.1.18, mime-types@~2.1.7:
+ version "2.1.18"
+ resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.18.tgz#6f323f60a83d11146f831ff11fd66e2fe5503bb8"
+ dependencies:
+ mime-db "~1.33.0"
+
+mime@1.4.1:
+ version "1.4.1"
+ resolved "https://registry.yarnpkg.com/mime/-/mime-1.4.1.tgz#121f9ebc49e3766f311a76e1fa1c8003c4b03aa6"
+
+mime@^1.2.11, mime@^1.4.1, mime@^1.5.0:
+ version "1.6.0"
+ resolved "https://registry.yarnpkg.com/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1"
+
+mimic-fn@^1.0.0:
+ version "1.2.0"
+ resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-1.2.0.tgz#820c86a39334640e99516928bd03fca88057d022"
+
+mimic-response@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/mimic-response/-/mimic-response-1.0.0.tgz#df3d3652a73fded6b9b0b24146e6fd052353458e"
+
+min-document@^2.19.0:
+ version "2.19.0"
+ resolved "https://registry.yarnpkg.com/min-document/-/min-document-2.19.0.tgz#7bd282e3f5842ed295bb748cdd9f1ffa2c824685"
+ dependencies:
+ dom-walk "^0.1.0"
+
+minimalistic-assert@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/minimalistic-assert/-/minimalistic-assert-1.0.0.tgz#702be2dda6b37f4836bcb3f5db56641b64a1d3d3"
+
+minimalistic-crypto-utils@^1.0.0, minimalistic-crypto-utils@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/minimalistic-crypto-utils/-/minimalistic-crypto-utils-1.0.1.tgz#f6c00c1c0b082246e5c4d99dfb8c7c083b2b582a"
+
+minimatch@^3.0.0, minimatch@^3.0.2, minimatch@^3.0.3, minimatch@^3.0.4:
+ version "3.0.4"
+ resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083"
+ dependencies:
+ brace-expansion "^1.1.7"
+
+minimist@0.0.8:
+ version "0.0.8"
+ resolved "https://registry.yarnpkg.com/minimist/-/minimist-0.0.8.tgz#857fcabfc3397d2625b8228262e86aa7a011b05d"
+
+minimist@^1.1.3, minimist@^1.2.0:
+ version "1.2.0"
+ resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.0.tgz#a35008b20f41383eec1fb914f4cd5df79a264284"
+
+mixin-deep@^1.2.0:
+ version "1.3.1"
+ resolved "https://registry.yarnpkg.com/mixin-deep/-/mixin-deep-1.3.1.tgz#a49e7268dce1a0d9698e45326c5626df3543d0fe"
+ dependencies:
+ for-in "^1.0.2"
+ is-extendable "^1.0.1"
+
+mkdirp@0.5, mkdirp@0.5.1, mkdirp@0.5.x, "mkdirp@>=0.5 0", mkdirp@^0.5.1, mkdirp@~0.5.0, mkdirp@~0.5.1:
+ version "0.5.1"
+ resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.1.tgz#30057438eac6cf7f8c4767f38648d6697d75c903"
+ dependencies:
+ minimist "0.0.8"
+
+ms@2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8"
+
+multicast-dns-service-types@^1.1.0:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/multicast-dns-service-types/-/multicast-dns-service-types-1.1.0.tgz#899f11d9686e5e05cb91b35d5f0e63b773cfc901"
+
+multicast-dns@^6.0.1:
+ version "6.2.3"
+ resolved "https://registry.yarnpkg.com/multicast-dns/-/multicast-dns-6.2.3.tgz#a0ec7bd9055c4282f790c3c82f4e28db3b31b229"
+ dependencies:
+ dns-packet "^1.3.1"
+ thunky "^1.0.2"
+
+mute-stream@0.0.7:
+ version "0.0.7"
+ resolved "https://registry.yarnpkg.com/mute-stream/-/mute-stream-0.0.7.tgz#3075ce93bc21b8fab43e1bc4da7e8115ed1e7bab"
+
+nan@^2.3.0:
+ version "2.9.2"
+ resolved "https://registry.yarnpkg.com/nan/-/nan-2.9.2.tgz#f564d75f5f8f36a6d9456cca7a6c4fe488ab7866"
+
+nanomatch@^1.2.9:
+ version "1.2.9"
+ resolved "https://registry.yarnpkg.com/nanomatch/-/nanomatch-1.2.9.tgz#879f7150cb2dab7a471259066c104eee6e0fa7c2"
+ dependencies:
+ arr-diff "^4.0.0"
+ array-unique "^0.3.2"
+ define-property "^2.0.2"
+ extend-shallow "^3.0.2"
+ fragment-cache "^0.2.1"
+ is-odd "^2.0.0"
+ is-windows "^1.0.2"
+ kind-of "^6.0.2"
+ object.pick "^1.3.0"
+ regex-not "^1.0.0"
+ snapdragon "^0.8.1"
+ to-regex "^3.0.1"
+
+natives@^1.1.0:
+ version "1.1.1"
+ resolved "https://registry.yarnpkg.com/natives/-/natives-1.1.1.tgz#011acce1f7cbd87f7ba6b3093d6cd9392be1c574"
+
+natural-compare@^1.4.0:
+ version "1.4.0"
+ resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7"
+
+negotiator@0.6.1:
+ version "0.6.1"
+ resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.1.tgz#2b327184e8992101177b28563fb5e7102acd0ca9"
+
+neo-async@^2.5.0:
+ version "2.5.0"
+ resolved "https://registry.yarnpkg.com/neo-async/-/neo-async-2.5.0.tgz#76b1c823130cca26acfbaccc8fbaf0a2fa33b18f"
+
+node-fetch@^1.0.1:
+ version "1.7.3"
+ resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-1.7.3.tgz#980f6f72d85211a5347c6b2bc18c5b84c3eb47ef"
+ dependencies:
+ encoding "^0.1.11"
+ is-stream "^1.0.1"
+
+node-forge@0.7.1:
+ version "0.7.1"
+ resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-0.7.1.tgz#9da611ea08982f4b94206b3beb4cc9665f20c300"
+
+node-libs-browser@^2.0.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/node-libs-browser/-/node-libs-browser-2.1.0.tgz#5f94263d404f6e44767d726901fff05478d600df"
+ dependencies:
+ assert "^1.1.1"
+ browserify-zlib "^0.2.0"
+ buffer "^4.3.0"
+ console-browserify "^1.1.0"
+ constants-browserify "^1.0.0"
+ crypto-browserify "^3.11.0"
+ domain-browser "^1.1.1"
+ events "^1.0.0"
+ https-browserify "^1.0.0"
+ os-browserify "^0.3.0"
+ path-browserify "0.0.0"
+ process "^0.11.10"
+ punycode "^1.2.4"
+ querystring-es3 "^0.2.0"
+ readable-stream "^2.3.3"
+ stream-browserify "^2.0.1"
+ stream-http "^2.7.2"
+ string_decoder "^1.0.0"
+ timers-browserify "^2.0.4"
+ tty-browserify "0.0.0"
+ url "^0.11.0"
+ util "^0.10.3"
+ vm-browserify "0.0.4"
+
+node-pre-gyp@^0.6.39:
+ version "0.6.39"
+ resolved "https://registry.yarnpkg.com/node-pre-gyp/-/node-pre-gyp-0.6.39.tgz#c00e96860b23c0e1420ac7befc5044e1d78d8649"
+ dependencies:
+ detect-libc "^1.0.2"
+ hawk "3.1.3"
+ mkdirp "^0.5.1"
+ nopt "^4.0.1"
+ npmlog "^4.0.2"
+ rc "^1.1.7"
+ request "2.81.0"
+ rimraf "^2.6.1"
+ semver "^5.3.0"
+ tar "^2.2.1"
+ tar-pack "^3.4.0"
+
+nopt@^4.0.1:
+ version "4.0.1"
+ resolved "https://registry.yarnpkg.com/nopt/-/nopt-4.0.1.tgz#d0d4685afd5415193c8c7505602d0d17cd64474d"
+ dependencies:
+ abbrev "1"
+ osenv "^0.1.4"
+
+normalize-package-data@^2.3.2, normalize-package-data@^2.3.4:
+ version "2.4.0"
+ resolved "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-2.4.0.tgz#12f95a307d58352075a04907b84ac8be98ac012f"
+ dependencies:
+ hosted-git-info "^2.1.4"
+ is-builtin-module "^1.0.0"
+ semver "2 || 3 || 4 || 5"
+ validate-npm-package-license "^3.0.1"
+
+normalize-path@^2.0.1, normalize-path@^2.1.1:
+ version "2.1.1"
+ resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-2.1.1.tgz#1ab28b556e198363a8c1a6f7e6fa20137fe6aed9"
+ dependencies:
+ remove-trailing-separator "^1.0.1"
+
+normalize-range@^0.1.2:
+ version "0.1.2"
+ resolved "https://registry.yarnpkg.com/normalize-range/-/normalize-range-0.1.2.tgz#2d10c06bdfd312ea9777695a4d28439456b75942"
+
+normalize-scroll-left@^0.1.2:
+ version "0.1.2"
+ resolved "https://registry.yarnpkg.com/normalize-scroll-left/-/normalize-scroll-left-0.1.2.tgz#6b79691ba79eb5fb107fa5edfbdc06b55caee2aa"
+
+normalize-url@^1.4.0:
+ version "1.9.1"
+ resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-1.9.1.tgz#2cc0d66b31ea23036458436e3620d85954c66c3c"
+ dependencies:
+ object-assign "^4.0.1"
+ prepend-http "^1.0.0"
+ query-string "^4.1.0"
+ sort-keys "^1.0.0"
+
+npm-run-path@^2.0.0:
+ version "2.0.2"
+ resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-2.0.2.tgz#35a9232dfa35d7067b4cb2ddf2357b1871536c5f"
+ dependencies:
+ path-key "^2.0.0"
+
+npmlog@^4.0.2:
+ version "4.1.2"
+ resolved "https://registry.yarnpkg.com/npmlog/-/npmlog-4.1.2.tgz#08a7f2a8bf734604779a9efa4ad5cc717abb954b"
+ dependencies:
+ are-we-there-yet "~1.1.2"
+ console-control-strings "~1.1.0"
+ gauge "~2.7.3"
+ set-blocking "~2.0.0"
+
+num2fraction@^1.2.2:
+ version "1.2.2"
+ resolved "https://registry.yarnpkg.com/num2fraction/-/num2fraction-1.2.2.tgz#6f682b6a027a4e9ddfa4564cd2589d1d4e669ede"
+
+number-is-nan@^1.0.0:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/number-is-nan/-/number-is-nan-1.0.1.tgz#097b602b53422a522c1afb8790318336941a011d"
+
+oauth-sign@~0.8.1:
+ version "0.8.2"
+ resolved "https://registry.yarnpkg.com/oauth-sign/-/oauth-sign-0.8.2.tgz#46a6ab7f0aead8deae9ec0565780b7d4efeb9d43"
+
+object-assign@^4.0.1, object-assign@^4.1.0, object-assign@^4.1.1:
+ version "4.1.1"
+ resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863"
+
+object-copy@^0.1.0:
+ version "0.1.0"
+ resolved "https://registry.yarnpkg.com/object-copy/-/object-copy-0.1.0.tgz#7e7d858b781bd7c991a41ba975ed3812754e998c"
+ dependencies:
+ copy-descriptor "^0.1.0"
+ define-property "^0.2.5"
+ kind-of "^3.0.3"
+
+object-hash@^1.1.4:
+ version "1.2.0"
+ resolved "https://registry.yarnpkg.com/object-hash/-/object-hash-1.2.0.tgz#e96af0e96981996a1d47f88ead8f74f1ebc4422b"
+
+object-keys@^1.0.8:
+ version "1.0.11"
+ resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.0.11.tgz#c54601778ad560f1142ce0e01bcca8b56d13426d"
+
+object-visit@^1.0.0:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/object-visit/-/object-visit-1.0.1.tgz#f79c4493af0c5377b59fe39d395e41042dd045bb"
+ dependencies:
+ isobject "^3.0.0"
+
+object.omit@^2.0.0:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/object.omit/-/object.omit-2.0.1.tgz#1a9c744829f39dbb858c76ca3579ae2a54ebd1fa"
+ dependencies:
+ for-own "^0.1.4"
+ is-extendable "^0.1.1"
+
+object.pick@^1.3.0:
+ version "1.3.0"
+ resolved "https://registry.yarnpkg.com/object.pick/-/object.pick-1.3.0.tgz#87a10ac4c1694bd2e1cbf53591a66141fb5dd747"
+ dependencies:
+ isobject "^3.0.1"
+
+obuf@^1.0.0, obuf@^1.1.1:
+ version "1.1.1"
+ resolved "https://registry.yarnpkg.com/obuf/-/obuf-1.1.1.tgz#104124b6c602c6796881a042541d36db43a5264e"
+
+on-finished@~2.3.0:
+ version "2.3.0"
+ resolved "https://registry.yarnpkg.com/on-finished/-/on-finished-2.3.0.tgz#20f1336481b083cd75337992a16971aa2d906947"
+ dependencies:
+ ee-first "1.1.1"
+
+on-headers@~1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/on-headers/-/on-headers-1.0.1.tgz#928f5d0f470d49342651ea6794b0857c100693f7"
+
+once@^1.3.0, once@^1.3.3:
+ version "1.4.0"
+ resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1"
+ dependencies:
+ wrappy "1"
+
+onetime@^2.0.0:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/onetime/-/onetime-2.0.1.tgz#067428230fd67443b2794b22bba528b6867962d4"
+ dependencies:
+ mimic-fn "^1.0.0"
+
+opn@^5.1.0:
+ version "5.2.0"
+ resolved "https://registry.yarnpkg.com/opn/-/opn-5.2.0.tgz#71fdf934d6827d676cecbea1531f95d354641225"
+ dependencies:
+ is-wsl "^1.1.0"
+
+optionator@^0.8.2:
+ version "0.8.2"
+ resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.8.2.tgz#364c5e409d3f4d6301d6c0b4c05bba50180aeb64"
+ dependencies:
+ deep-is "~0.1.3"
+ fast-levenshtein "~2.0.4"
+ levn "~0.3.0"
+ prelude-ls "~1.1.2"
+ type-check "~0.3.2"
+ wordwrap "~1.0.0"
+
+original@>=0.0.5:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/original/-/original-1.0.0.tgz#9147f93fa1696d04be61e01bd50baeaca656bd3b"
+ dependencies:
+ url-parse "1.0.x"
+
+os-browserify@^0.3.0:
+ version "0.3.0"
+ resolved "https://registry.yarnpkg.com/os-browserify/-/os-browserify-0.3.0.tgz#854373c7f5c2315914fc9bfc6bd8238fdda1ec27"
+
+os-homedir@^1.0.0:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/os-homedir/-/os-homedir-1.0.2.tgz#ffbc4988336e0e833de0c168c7ef152121aa7fb3"
+
+os-locale@^1.4.0:
+ version "1.4.0"
+ resolved "https://registry.yarnpkg.com/os-locale/-/os-locale-1.4.0.tgz#20f9f17ae29ed345e8bde583b13d2009803c14d9"
+ dependencies:
+ lcid "^1.0.0"
+
+os-locale@^2.0.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/os-locale/-/os-locale-2.1.0.tgz#42bc2900a6b5b8bd17376c8e882b65afccf24bf2"
+ dependencies:
+ execa "^0.7.0"
+ lcid "^1.0.0"
+ mem "^1.1.0"
+
+os-tmpdir@^1.0.0, os-tmpdir@^1.0.1, os-tmpdir@~1.0.2:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274"
+
+osenv@^0.1.4:
+ version "0.1.5"
+ resolved "https://registry.yarnpkg.com/osenv/-/osenv-0.1.5.tgz#85cdfafaeb28e8677f416e287592b5f3f49ea410"
+ dependencies:
+ os-homedir "^1.0.0"
+ os-tmpdir "^1.0.0"
+
+"over@>= 0.0.5 < 1":
+ version "0.0.5"
+ resolved "https://registry.yarnpkg.com/over/-/over-0.0.5.tgz#f29852e70fd7e25f360e013a8ec44c82aedb5708"
+
+p-cancelable@^0.3.0:
+ version "0.3.0"
+ resolved "https://registry.yarnpkg.com/p-cancelable/-/p-cancelable-0.3.0.tgz#b9e123800bcebb7ac13a479be195b507b98d30fa"
+
+p-finally@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/p-finally/-/p-finally-1.0.0.tgz#3fbcfb15b899a44123b34b6dcc18b724336a2cae"
+
+p-limit@^1.1.0:
+ version "1.2.0"
+ resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-1.2.0.tgz#0e92b6bedcb59f022c13d0f1949dc82d15909f1c"
+ dependencies:
+ p-try "^1.0.0"
+
+p-locate@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-2.0.0.tgz#20a0103b222a70c8fd39cc2e580680f3dde5ec43"
+ dependencies:
+ p-limit "^1.1.0"
+
+p-map@^1.1.1:
+ version "1.2.0"
+ resolved "https://registry.yarnpkg.com/p-map/-/p-map-1.2.0.tgz#e4e94f311eabbc8633a1e79908165fca26241b6b"
+
+p-timeout@^1.1.1:
+ version "1.2.1"
+ resolved "https://registry.yarnpkg.com/p-timeout/-/p-timeout-1.2.1.tgz#5eb3b353b7fce99f101a1038880bb054ebbea386"
+ dependencies:
+ p-finally "^1.0.0"
+
+p-try@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/p-try/-/p-try-1.0.0.tgz#cbc79cdbaf8fd4228e13f621f2b1a237c1b207b3"
+
+pako@~1.0.5:
+ version "1.0.6"
+ resolved "https://registry.yarnpkg.com/pako/-/pako-1.0.6.tgz#0101211baa70c4bca4a0f63f2206e97b7dfaf258"
+
+parse-asn1@^5.0.0:
+ version "5.1.0"
+ resolved "https://registry.yarnpkg.com/parse-asn1/-/parse-asn1-5.1.0.tgz#37c4f9b7ed3ab65c74817b5f2480937fbf97c712"
+ dependencies:
+ asn1.js "^4.0.0"
+ browserify-aes "^1.0.0"
+ create-hash "^1.1.0"
+ evp_bytestokey "^1.0.0"
+ pbkdf2 "^3.0.3"
+
+parse-glob@^3.0.4:
+ version "3.0.4"
+ resolved "https://registry.yarnpkg.com/parse-glob/-/parse-glob-3.0.4.tgz#b2c376cfb11f35513badd173ef0bb6e3a388391c"
+ dependencies:
+ glob-base "^0.3.0"
+ is-dotfile "^1.0.0"
+ is-extglob "^1.0.0"
+ is-glob "^2.0.0"
+
+parse-json@^2.2.0:
+ version "2.2.0"
+ resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-2.2.0.tgz#f480f40434ef80741f8469099f8dea18f55a4dc9"
+ dependencies:
+ error-ex "^1.2.0"
+
+parseurl@~1.3.2:
+ version "1.3.2"
+ resolved "https://registry.yarnpkg.com/parseurl/-/parseurl-1.3.2.tgz#fc289d4ed8993119460c156253262cdc8de65bf3"
+
+pascalcase@^0.1.1:
+ version "0.1.1"
+ resolved "https://registry.yarnpkg.com/pascalcase/-/pascalcase-0.1.1.tgz#b363e55e8006ca6fe21784d2db22bd15d7917f14"
+
+path-browserify@0.0.0:
+ version "0.0.0"
+ resolved "https://registry.yarnpkg.com/path-browserify/-/path-browserify-0.0.0.tgz#a0b870729aae214005b7d5032ec2cbbb0fb4451a"
+
+path-dirname@^1.0.0:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/path-dirname/-/path-dirname-1.0.2.tgz#cc33d24d525e099a5388c0336c6e32b9160609e0"
+
+path-exists@^2.0.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-2.1.0.tgz#0feb6c64f0fc518d9a754dd5efb62c7022761f4b"
+ dependencies:
+ pinkie-promise "^2.0.0"
+
+path-exists@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515"
+
+path-is-absolute@^1.0.0, path-is-absolute@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f"
+
+path-is-inside@^1.0.1, path-is-inside@^1.0.2:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/path-is-inside/-/path-is-inside-1.0.2.tgz#365417dede44430d1c11af61027facf074bdfc53"
+
+path-key@^2.0.0:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/path-key/-/path-key-2.0.1.tgz#411cadb574c5a140d3a4b1910d40d80cc9f40b40"
+
+path-parse@^1.0.5:
+ version "1.0.5"
+ resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.5.tgz#3c1adf871ea9cd6c9431b6ea2bd74a0ff055c4c1"
+
+path-to-regexp@0.1.7:
+ version "0.1.7"
+ resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.7.tgz#df604178005f522f15eb4490e7247a1bfaa67f8c"
+
+path-type@^1.0.0:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/path-type/-/path-type-1.1.0.tgz#59c44f7ee491da704da415da5a4070ba4f8fe441"
+ dependencies:
+ graceful-fs "^4.1.2"
+ pify "^2.0.0"
+ pinkie-promise "^2.0.0"
+
+path-type@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/path-type/-/path-type-2.0.0.tgz#f012ccb8415b7096fc2daa1054c3d72389594c73"
+ dependencies:
+ pify "^2.0.0"
+
+path@^0.12.7:
+ version "0.12.7"
+ resolved "https://registry.yarnpkg.com/path/-/path-0.12.7.tgz#d4dc2a506c4ce2197eb481ebfcd5b36c0140b10f"
+ dependencies:
+ process "^0.11.1"
+ util "^0.10.3"
+
+pbkdf2@^3.0.3:
+ version "3.0.14"
+ resolved "https://registry.yarnpkg.com/pbkdf2/-/pbkdf2-3.0.14.tgz#a35e13c64799b06ce15320f459c230e68e73bade"
+ dependencies:
+ create-hash "^1.1.2"
+ create-hmac "^1.1.4"
+ ripemd160 "^2.0.1"
+ safe-buffer "^5.0.1"
+ sha.js "^2.4.8"
+
+performance-now@^0.2.0:
+ version "0.2.0"
+ resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-0.2.0.tgz#33ef30c5c77d4ea21c5a53869d91b56d8f2555e5"
+
+performance-now@^2.1.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-2.1.0.tgz#6309f4e0e5fa913ec1c69307ae364b4b377c9e7b"
+
+pify@^2.0.0:
+ version "2.3.0"
+ resolved "https://registry.yarnpkg.com/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c"
+
+pify@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/pify/-/pify-3.0.0.tgz#e5a4acd2c101fdf3d9a4d07f0dbc4db49dd28176"
+
+pinkie-promise@^2.0.0:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/pinkie-promise/-/pinkie-promise-2.0.1.tgz#2135d6dfa7a358c069ac9b178776288228450ffa"
+ dependencies:
+ pinkie "^2.0.0"
+
+pinkie@^2.0.0:
+ version "2.0.4"
+ resolved "https://registry.yarnpkg.com/pinkie/-/pinkie-2.0.4.tgz#72556b80cfa0d48a974e80e77248e80ed4f7f870"
+
+pkg-dir@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-1.0.0.tgz#7a4b508a8d5bb2d629d447056ff4e9c9314cf3d4"
+ dependencies:
+ find-up "^1.0.0"
+
+pkg-dir@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-2.0.0.tgz#f6d5d1109e19d63edf428e0bd57e12777615334b"
+ dependencies:
+ find-up "^2.1.0"
+
+pluralize@^7.0.0:
+ version "7.0.0"
+ resolved "https://registry.yarnpkg.com/pluralize/-/pluralize-7.0.0.tgz#298b89df8b93b0221dbf421ad2b1b1ea23fc6777"
+
+popper.js@^1.12.9:
+ version "1.12.9"
+ resolved "https://registry.yarnpkg.com/popper.js/-/popper.js-1.12.9.tgz#0dfbc2dff96c451bb332edcfcfaaf566d331d5b3"
+
+portfinder@^1.0.9:
+ version "1.0.13"
+ resolved "https://registry.yarnpkg.com/portfinder/-/portfinder-1.0.13.tgz#bb32ecd87c27104ae6ee44b5a3ccbf0ebb1aede9"
+ dependencies:
+ async "^1.5.2"
+ debug "^2.2.0"
+ mkdirp "0.5.x"
+
+posix-character-classes@^0.1.0:
+ version "0.1.1"
+ resolved "https://registry.yarnpkg.com/posix-character-classes/-/posix-character-classes-0.1.1.tgz#01eac0fe3b5af71a2a6c02feabb8c1fef7e00eab"
+
+postcss-calc@^5.2.0:
+ version "5.3.1"
+ resolved "https://registry.yarnpkg.com/postcss-calc/-/postcss-calc-5.3.1.tgz#77bae7ca928ad85716e2fda42f261bf7c1d65b5e"
+ dependencies:
+ postcss "^5.0.2"
+ postcss-message-helpers "^2.0.0"
+ reduce-css-calc "^1.2.6"
+
+postcss-colormin@^2.1.8:
+ version "2.2.2"
+ resolved "https://registry.yarnpkg.com/postcss-colormin/-/postcss-colormin-2.2.2.tgz#6631417d5f0e909a3d7ec26b24c8a8d1e4f96e4b"
+ dependencies:
+ colormin "^1.0.5"
+ postcss "^5.0.13"
+ postcss-value-parser "^3.2.3"
+
+postcss-convert-values@^2.3.4:
+ version "2.6.1"
+ resolved "https://registry.yarnpkg.com/postcss-convert-values/-/postcss-convert-values-2.6.1.tgz#bbd8593c5c1fd2e3d1c322bb925dcae8dae4d62d"
+ dependencies:
+ postcss "^5.0.11"
+ postcss-value-parser "^3.1.2"
+
+postcss-discard-comments@^2.0.4:
+ version "2.0.4"
+ resolved "https://registry.yarnpkg.com/postcss-discard-comments/-/postcss-discard-comments-2.0.4.tgz#befe89fafd5b3dace5ccce51b76b81514be00e3d"
+ dependencies:
+ postcss "^5.0.14"
+
+postcss-discard-duplicates@^2.0.1:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/postcss-discard-duplicates/-/postcss-discard-duplicates-2.1.0.tgz#b9abf27b88ac188158a5eb12abcae20263b91932"
+ dependencies:
+ postcss "^5.0.4"
+
+postcss-discard-empty@^2.0.1:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/postcss-discard-empty/-/postcss-discard-empty-2.1.0.tgz#d2b4bd9d5ced5ebd8dcade7640c7d7cd7f4f92b5"
+ dependencies:
+ postcss "^5.0.14"
+
+postcss-discard-overridden@^0.1.1:
+ version "0.1.1"
+ resolved "https://registry.yarnpkg.com/postcss-discard-overridden/-/postcss-discard-overridden-0.1.1.tgz#8b1eaf554f686fb288cd874c55667b0aa3668d58"
+ dependencies:
+ postcss "^5.0.16"
+
+postcss-discard-unused@^2.2.1:
+ version "2.2.3"
+ resolved "https://registry.yarnpkg.com/postcss-discard-unused/-/postcss-discard-unused-2.2.3.tgz#bce30b2cc591ffc634322b5fb3464b6d934f4433"
+ dependencies:
+ postcss "^5.0.14"
+ uniqs "^2.0.0"
+
+postcss-filter-plugins@^2.0.0:
+ version "2.0.2"
+ resolved "https://registry.yarnpkg.com/postcss-filter-plugins/-/postcss-filter-plugins-2.0.2.tgz#6d85862534d735ac420e4a85806e1f5d4286d84c"
+ dependencies:
+ postcss "^5.0.4"
+ uniqid "^4.0.0"
+
+postcss-merge-idents@^2.1.5:
+ version "2.1.7"
+ resolved "https://registry.yarnpkg.com/postcss-merge-idents/-/postcss-merge-idents-2.1.7.tgz#4c5530313c08e1d5b3bbf3d2bbc747e278eea270"
+ dependencies:
+ has "^1.0.1"
+ postcss "^5.0.10"
+ postcss-value-parser "^3.1.1"
+
+postcss-merge-longhand@^2.0.1:
+ version "2.0.2"
+ resolved "https://registry.yarnpkg.com/postcss-merge-longhand/-/postcss-merge-longhand-2.0.2.tgz#23d90cd127b0a77994915332739034a1a4f3d658"
+ dependencies:
+ postcss "^5.0.4"
+
+postcss-merge-rules@^2.0.3:
+ version "2.1.2"
+ resolved "https://registry.yarnpkg.com/postcss-merge-rules/-/postcss-merge-rules-2.1.2.tgz#d1df5dfaa7b1acc3be553f0e9e10e87c61b5f721"
+ dependencies:
+ browserslist "^1.5.2"
+ caniuse-api "^1.5.2"
+ postcss "^5.0.4"
+ postcss-selector-parser "^2.2.2"
+ vendors "^1.0.0"
+
+postcss-message-helpers@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/postcss-message-helpers/-/postcss-message-helpers-2.0.0.tgz#a4f2f4fab6e4fe002f0aed000478cdf52f9ba60e"
+
+postcss-minify-font-values@^1.0.2:
+ version "1.0.5"
+ resolved "https://registry.yarnpkg.com/postcss-minify-font-values/-/postcss-minify-font-values-1.0.5.tgz#4b58edb56641eba7c8474ab3526cafd7bbdecb69"
+ dependencies:
+ object-assign "^4.0.1"
+ postcss "^5.0.4"
+ postcss-value-parser "^3.0.2"
+
+postcss-minify-gradients@^1.0.1:
+ version "1.0.5"
+ resolved "https://registry.yarnpkg.com/postcss-minify-gradients/-/postcss-minify-gradients-1.0.5.tgz#5dbda11373703f83cfb4a3ea3881d8d75ff5e6e1"
+ dependencies:
+ postcss "^5.0.12"
+ postcss-value-parser "^3.3.0"
+
+postcss-minify-params@^1.0.4:
+ version "1.2.2"
+ resolved "https://registry.yarnpkg.com/postcss-minify-params/-/postcss-minify-params-1.2.2.tgz#ad2ce071373b943b3d930a3fa59a358c28d6f1f3"
+ dependencies:
+ alphanum-sort "^1.0.1"
+ postcss "^5.0.2"
+ postcss-value-parser "^3.0.2"
+ uniqs "^2.0.0"
+
+postcss-minify-selectors@^2.0.4:
+ version "2.1.1"
+ resolved "https://registry.yarnpkg.com/postcss-minify-selectors/-/postcss-minify-selectors-2.1.1.tgz#b2c6a98c0072cf91b932d1a496508114311735bf"
+ dependencies:
+ alphanum-sort "^1.0.2"
+ has "^1.0.1"
+ postcss "^5.0.14"
+ postcss-selector-parser "^2.0.0"
+
+postcss-modules-extract-imports@^1.2.0:
+ version "1.2.0"
+ resolved "https://registry.yarnpkg.com/postcss-modules-extract-imports/-/postcss-modules-extract-imports-1.2.0.tgz#66140ecece38ef06bf0d3e355d69bf59d141ea85"
+ dependencies:
+ postcss "^6.0.1"
+
+postcss-modules-local-by-default@^1.2.0:
+ version "1.2.0"
+ resolved "https://registry.yarnpkg.com/postcss-modules-local-by-default/-/postcss-modules-local-by-default-1.2.0.tgz#f7d80c398c5a393fa7964466bd19500a7d61c069"
+ dependencies:
+ css-selector-tokenizer "^0.7.0"
+ postcss "^6.0.1"
+
+postcss-modules-scope@^1.1.0:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/postcss-modules-scope/-/postcss-modules-scope-1.1.0.tgz#d6ea64994c79f97b62a72b426fbe6056a194bb90"
+ dependencies:
+ css-selector-tokenizer "^0.7.0"
+ postcss "^6.0.1"
+
+postcss-modules-values@^1.3.0:
+ version "1.3.0"
+ resolved "https://registry.yarnpkg.com/postcss-modules-values/-/postcss-modules-values-1.3.0.tgz#ecffa9d7e192518389f42ad0e83f72aec456ea20"
+ dependencies:
+ icss-replace-symbols "^1.1.0"
+ postcss "^6.0.1"
+
+postcss-normalize-charset@^1.1.0:
+ version "1.1.1"
+ resolved "https://registry.yarnpkg.com/postcss-normalize-charset/-/postcss-normalize-charset-1.1.1.tgz#ef9ee71212d7fe759c78ed162f61ed62b5cb93f1"
+ dependencies:
+ postcss "^5.0.5"
+
+postcss-normalize-url@^3.0.7:
+ version "3.0.8"
+ resolved "https://registry.yarnpkg.com/postcss-normalize-url/-/postcss-normalize-url-3.0.8.tgz#108f74b3f2fcdaf891a2ffa3ea4592279fc78222"
+ dependencies:
+ is-absolute-url "^2.0.0"
+ normalize-url "^1.4.0"
+ postcss "^5.0.14"
+ postcss-value-parser "^3.2.3"
+
+postcss-ordered-values@^2.1.0:
+ version "2.2.3"
+ resolved "https://registry.yarnpkg.com/postcss-ordered-values/-/postcss-ordered-values-2.2.3.tgz#eec6c2a67b6c412a8db2042e77fe8da43f95c11d"
+ dependencies:
+ postcss "^5.0.4"
+ postcss-value-parser "^3.0.1"
+
+postcss-reduce-idents@^2.2.2:
+ version "2.4.0"
+ resolved "https://registry.yarnpkg.com/postcss-reduce-idents/-/postcss-reduce-idents-2.4.0.tgz#c2c6d20cc958284f6abfbe63f7609bf409059ad3"
+ dependencies:
+ postcss "^5.0.4"
+ postcss-value-parser "^3.0.2"
+
+postcss-reduce-initial@^1.0.0:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/postcss-reduce-initial/-/postcss-reduce-initial-1.0.1.tgz#68f80695f045d08263a879ad240df8dd64f644ea"
+ dependencies:
+ postcss "^5.0.4"
+
+postcss-reduce-transforms@^1.0.3:
+ version "1.0.4"
+ resolved "https://registry.yarnpkg.com/postcss-reduce-transforms/-/postcss-reduce-transforms-1.0.4.tgz#ff76f4d8212437b31c298a42d2e1444025771ae1"
+ dependencies:
+ has "^1.0.1"
+ postcss "^5.0.8"
+ postcss-value-parser "^3.0.1"
+
+postcss-selector-parser@^2.0.0, postcss-selector-parser@^2.2.2:
+ version "2.2.3"
+ resolved "https://registry.yarnpkg.com/postcss-selector-parser/-/postcss-selector-parser-2.2.3.tgz#f9437788606c3c9acee16ffe8d8b16297f27bb90"
+ dependencies:
+ flatten "^1.0.2"
+ indexes-of "^1.0.1"
+ uniq "^1.0.1"
+
+postcss-svgo@^2.1.1:
+ version "2.1.6"
+ resolved "https://registry.yarnpkg.com/postcss-svgo/-/postcss-svgo-2.1.6.tgz#b6df18aa613b666e133f08adb5219c2684ac108d"
+ dependencies:
+ is-svg "^2.0.0"
+ postcss "^5.0.14"
+ postcss-value-parser "^3.2.3"
+ svgo "^0.7.0"
+
+postcss-unique-selectors@^2.0.2:
+ version "2.0.2"
+ resolved "https://registry.yarnpkg.com/postcss-unique-selectors/-/postcss-unique-selectors-2.0.2.tgz#981d57d29ddcb33e7b1dfe1fd43b8649f933ca1d"
+ dependencies:
+ alphanum-sort "^1.0.1"
+ postcss "^5.0.4"
+ uniqs "^2.0.0"
+
+postcss-value-parser@^3.0.1, postcss-value-parser@^3.0.2, postcss-value-parser@^3.1.1, postcss-value-parser@^3.1.2, postcss-value-parser@^3.2.3, postcss-value-parser@^3.3.0:
+ version "3.3.0"
+ resolved "https://registry.yarnpkg.com/postcss-value-parser/-/postcss-value-parser-3.3.0.tgz#87f38f9f18f774a4ab4c8a232f5c5ce8872a9d15"
+
+postcss-zindex@^2.0.1:
+ version "2.2.0"
+ resolved "https://registry.yarnpkg.com/postcss-zindex/-/postcss-zindex-2.2.0.tgz#d2109ddc055b91af67fc4cb3b025946639d2af22"
+ dependencies:
+ has "^1.0.1"
+ postcss "^5.0.4"
+ uniqs "^2.0.0"
+
+postcss@^5.0.10, postcss@^5.0.11, postcss@^5.0.12, postcss@^5.0.13, postcss@^5.0.14, postcss@^5.0.16, postcss@^5.0.2, postcss@^5.0.4, postcss@^5.0.5, postcss@^5.0.6, postcss@^5.0.8, postcss@^5.2.16:
+ version "5.2.18"
+ resolved "https://registry.yarnpkg.com/postcss/-/postcss-5.2.18.tgz#badfa1497d46244f6390f58b319830d9107853c5"
+ dependencies:
+ chalk "^1.1.3"
+ js-base64 "^2.1.9"
+ source-map "^0.5.6"
+ supports-color "^3.2.3"
+
+postcss@^6.0.1:
+ version "6.0.19"
+ resolved "https://registry.yarnpkg.com/postcss/-/postcss-6.0.19.tgz#76a78386f670b9d9494a655bf23ac012effd1555"
+ dependencies:
+ chalk "^2.3.1"
+ source-map "^0.6.1"
+ supports-color "^5.2.0"
+
+prelude-ls@~1.1.2:
+ version "1.1.2"
+ resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.1.2.tgz#21932a549f5e52ffd9a827f570e04be62a97da54"
+
+prepend-http@^1.0.0, prepend-http@^1.0.1:
+ version "1.0.4"
+ resolved "https://registry.yarnpkg.com/prepend-http/-/prepend-http-1.0.4.tgz#d4f4562b0ce3696e41ac52d0e002e57a635dc6dc"
+
+preserve@^0.2.0:
+ version "0.2.0"
+ resolved "https://registry.yarnpkg.com/preserve/-/preserve-0.2.0.tgz#815ed1f6ebc65926f865b310c0713bcb3315ce4b"
+
+private@^0.1.6, private@^0.1.7:
+ version "0.1.8"
+ resolved "https://registry.yarnpkg.com/private/-/private-0.1.8.tgz#2381edb3689f7a53d653190060fcf822d2f368ff"
+
+process-nextick-args@~2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.0.tgz#a37d732f4271b4ab1ad070d35508e8290788ffaa"
+
+process@^0.11.1, process@^0.11.10:
+ version "0.11.10"
+ resolved "https://registry.yarnpkg.com/process/-/process-0.11.10.tgz#7332300e840161bda3e69a1d1d91a7d4bc16f182"
+
+process@~0.5.1:
+ version "0.5.2"
+ resolved "https://registry.yarnpkg.com/process/-/process-0.5.2.tgz#1638d8a8e34c2f440a91db95ab9aeb677fc185cf"
+
+progress@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/progress/-/progress-2.0.0.tgz#8a1be366bf8fc23db2bd23f10c6fe920b4389d1f"
+
+promise@^7.1.1:
+ version "7.3.1"
+ resolved "https://registry.yarnpkg.com/promise/-/promise-7.3.1.tgz#064b72602b18f90f29192b8b1bc418ffd1ebd3bf"
+ dependencies:
+ asap "~2.0.3"
+
+prop-types@^15.5.10, prop-types@^15.5.8, prop-types@^15.6.0:
+ version "15.6.1"
+ resolved "https://registry.yarnpkg.com/prop-types/-/prop-types-15.6.1.tgz#36644453564255ddda391191fb3a125cbdf654ca"
+ dependencies:
+ fbjs "^0.8.16"
+ loose-envify "^1.3.1"
+ object-assign "^4.1.1"
+
+proxy-addr@~2.0.2:
+ version "2.0.3"
+ resolved "https://registry.yarnpkg.com/proxy-addr/-/proxy-addr-2.0.3.tgz#355f262505a621646b3130a728eb647e22055341"
+ dependencies:
+ forwarded "~0.1.2"
+ ipaddr.js "1.6.0"
+
+prr@~1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/prr/-/prr-1.0.1.tgz#d3fc114ba06995a45ec6893f484ceb1d78f5f476"
+
+pseudomap@^1.0.2:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/pseudomap/-/pseudomap-1.0.2.tgz#f052a28da70e618917ef0a8ac34c1ae5a68286b3"
+
+public-encrypt@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/public-encrypt/-/public-encrypt-4.0.0.tgz#39f699f3a46560dd5ebacbca693caf7c65c18cc6"
+ dependencies:
+ bn.js "^4.1.0"
+ browserify-rsa "^4.0.0"
+ create-hash "^1.1.0"
+ parse-asn1 "^5.0.0"
+ randombytes "^2.0.1"
+
+"pullstream@>= 0.4.1 < 1":
+ version "0.4.1"
+ resolved "https://registry.yarnpkg.com/pullstream/-/pullstream-0.4.1.tgz#d6fb3bf5aed697e831150eb1002c25a3f8ae1314"
+ dependencies:
+ over ">= 0.0.5 < 1"
+ readable-stream "~1.0.31"
+ setimmediate ">= 1.0.2 < 2"
+ slice-stream ">= 1.0.0 < 2"
+
+punycode@1.3.2:
+ version "1.3.2"
+ resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.3.2.tgz#9653a036fb7c1ee42342f2325cceefea3926c48d"
+
+punycode@^1.2.4, punycode@^1.4.1:
+ version "1.4.1"
+ resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.4.1.tgz#c0d5a63b2718800ad8e1eb0fa5269c84dd41845e"
+
+q@^1.1.2:
+ version "1.5.1"
+ resolved "https://registry.yarnpkg.com/q/-/q-1.5.1.tgz#7e32f75b41381291d04611f1bf14109ac00651d7"
+
+qs@6.5.1:
+ version "6.5.1"
+ resolved "https://registry.yarnpkg.com/qs/-/qs-6.5.1.tgz#349cdf6eef89ec45c12d7d5eb3fc0c870343a6d8"
+
+qs@~6.4.0:
+ version "6.4.0"
+ resolved "https://registry.yarnpkg.com/qs/-/qs-6.4.0.tgz#13e26d28ad6b0ffaa91312cd3bf708ed351e7233"
+
+query-string@^4.1.0:
+ version "4.3.4"
+ resolved "https://registry.yarnpkg.com/query-string/-/query-string-4.3.4.tgz#bbb693b9ca915c232515b228b1a02b609043dbeb"
+ dependencies:
+ object-assign "^4.1.0"
+ strict-uri-encode "^1.0.0"
+
+querystring-es3@^0.2.0:
+ version "0.2.1"
+ resolved "https://registry.yarnpkg.com/querystring-es3/-/querystring-es3-0.2.1.tgz#9ec61f79049875707d69414596fd907a4d711e73"
+
+querystring@0.2.0:
+ version "0.2.0"
+ resolved "https://registry.yarnpkg.com/querystring/-/querystring-0.2.0.tgz#b209849203bb25df820da756e747005878521620"
+
+querystringify@0.0.x:
+ version "0.0.4"
+ resolved "https://registry.yarnpkg.com/querystringify/-/querystringify-0.0.4.tgz#0cf7f84f9463ff0ae51c4c4b142d95be37724d9c"
+
+querystringify@~1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/querystringify/-/querystringify-1.0.0.tgz#6286242112c5b712fa654e526652bf6a13ff05cb"
+
+raf@^3.2.0:
+ version "3.4.0"
+ resolved "https://registry.yarnpkg.com/raf/-/raf-3.4.0.tgz#a28876881b4bc2ca9117d4138163ddb80f781575"
+ dependencies:
+ performance-now "^2.1.0"
+
+rafl@~1.2.1:
+ version "1.2.2"
+ resolved "https://registry.yarnpkg.com/rafl/-/rafl-1.2.2.tgz#fe930f758211020d47e38815f5196a8be4150740"
+ dependencies:
+ global "~4.3.0"
+
+randomatic@^1.1.3:
+ version "1.1.7"
+ resolved "https://registry.yarnpkg.com/randomatic/-/randomatic-1.1.7.tgz#c7abe9cc8b87c0baa876b19fde83fd464797e38c"
+ dependencies:
+ is-number "^3.0.0"
+ kind-of "^4.0.0"
+
+randombytes@^2.0.0, randombytes@^2.0.1, randombytes@^2.0.5:
+ version "2.0.6"
+ resolved "https://registry.yarnpkg.com/randombytes/-/randombytes-2.0.6.tgz#d302c522948588848a8d300c932b44c24231da80"
+ dependencies:
+ safe-buffer "^5.1.0"
+
+randomfill@^1.0.3:
+ version "1.0.4"
+ resolved "https://registry.yarnpkg.com/randomfill/-/randomfill-1.0.4.tgz#c92196fc86ab42be983f1bf31778224931d61458"
+ dependencies:
+ randombytes "^2.0.5"
+ safe-buffer "^5.1.0"
+
+range-parser@^1.0.3, range-parser@~1.2.0:
+ version "1.2.0"
+ resolved "https://registry.yarnpkg.com/range-parser/-/range-parser-1.2.0.tgz#f49be6b487894ddc40dcc94a322f611092e00d5e"
+
+raw-body@2.3.2:
+ version "2.3.2"
+ resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.3.2.tgz#bcd60c77d3eb93cde0050295c3f379389bc88f89"
+ dependencies:
+ bytes "3.0.0"
+ http-errors "1.6.2"
+ iconv-lite "0.4.19"
+ unpipe "1.0.0"
+
+rc@^1.1.7:
+ version "1.2.5"
+ resolved "https://registry.yarnpkg.com/rc/-/rc-1.2.5.tgz#275cd687f6e3b36cc756baa26dfee80a790301fd"
+ dependencies:
+ deep-extend "~0.4.0"
+ ini "~1.3.0"
+ minimist "^1.2.0"
+ strip-json-comments "~2.0.1"
+
+react-dom@^16.2.0:
+ version "16.2.0"
+ resolved "https://registry.yarnpkg.com/react-dom/-/react-dom-16.2.0.tgz#69003178601c0ca19b709b33a83369fe6124c044"
+ dependencies:
+ fbjs "^0.8.16"
+ loose-envify "^1.1.0"
+ object-assign "^4.1.1"
+ prop-types "^15.6.0"
+
+react-event-listener@^0.5.1:
+ version "0.5.3"
+ resolved "https://registry.yarnpkg.com/react-event-listener/-/react-event-listener-0.5.3.tgz#a8b492596ad601865314fcc2c18cb87b6ce3876e"
+ dependencies:
+ babel-runtime "^6.26.0"
+ fbjs "^0.8.16"
+ prop-types "^15.6.0"
+ warning "^3.0.0"
+
+react-fa@^5.0.0:
+ version "5.0.0"
+ resolved "https://registry.yarnpkg.com/react-fa/-/react-fa-5.0.0.tgz#d571732856c6cb2c155c46daef018ba67a75b973"
+ dependencies:
+ font-awesome "^4.3.0"
+ prop-types "^15.5.8"
+
+react-jss@^8.1.0:
+ version "8.3.3"
+ resolved "https://registry.yarnpkg.com/react-jss/-/react-jss-8.3.3.tgz#677a57569d3e4f5099fcdeafeddd8d2c62ab5977"
+ dependencies:
+ hoist-non-react-statics "^2.3.1"
+ jss "^9.3.2"
+ jss-preset-default "^4.3.0"
+ prop-types "^15.6.0"
+ theming "^1.3.0"
+
+react-popper@^0.8.0:
+ version "0.8.2"
+ resolved "https://registry.yarnpkg.com/react-popper/-/react-popper-0.8.2.tgz#092095ff13933211d3856d9f325511ec3a42f12c"
+ dependencies:
+ popper.js "^1.12.9"
+ prop-types "^15.6.0"
+
+react-resize-detector@1.1.0:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/react-resize-detector/-/react-resize-detector-1.1.0.tgz#4a9831fa3caad32230478dd0185cbd2aa91a5ebf"
+ dependencies:
+ prop-types "^15.5.10"
+
+react-scrollbar-size@^2.0.2:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/react-scrollbar-size/-/react-scrollbar-size-2.1.0.tgz#105e797135cab92b1f9e16f00071db7f29f80754"
+ dependencies:
+ babel-runtime "^6.26.0"
+ prop-types "^15.6.0"
+ react-event-listener "^0.5.1"
+ stifle "^1.0.2"
+
+react-smooth@1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/react-smooth/-/react-smooth-1.0.0.tgz#b29dbebddddb06d21b5b08962167fb9eac1897d8"
+ dependencies:
+ lodash "~4.17.4"
+ prop-types "^15.6.0"
+ raf "^3.2.0"
+ react-transition-group "^2.2.1"
+
+react-transition-group@^2.2.1:
+ version "2.2.1"
+ resolved "https://registry.yarnpkg.com/react-transition-group/-/react-transition-group-2.2.1.tgz#e9fb677b79e6455fd391b03823afe84849df4a10"
+ dependencies:
+ chain-function "^1.0.0"
+ classnames "^2.2.5"
+ dom-helpers "^3.2.0"
+ loose-envify "^1.3.1"
+ prop-types "^15.5.8"
+ warning "^3.0.0"
+
+react@^16.2.0:
+ version "16.2.0"
+ resolved "https://registry.yarnpkg.com/react/-/react-16.2.0.tgz#a31bd2dab89bff65d42134fa187f24d054c273ba"
+ dependencies:
+ fbjs "^0.8.16"
+ loose-envify "^1.1.0"
+ object-assign "^4.1.1"
+ prop-types "^15.6.0"
+
+read-pkg-up@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-1.0.1.tgz#9d63c13276c065918d57f002a57f40a1b643fb02"
+ dependencies:
+ find-up "^1.0.0"
+ read-pkg "^1.0.0"
+
+read-pkg-up@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-2.0.0.tgz#6b72a8048984e0c41e79510fd5e9fa99b3b549be"
+ dependencies:
+ find-up "^2.0.0"
+ read-pkg "^2.0.0"
+
+read-pkg@^1.0.0:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-1.1.0.tgz#f5ffaa5ecd29cb31c0474bca7d756b6bb29e3f28"
+ dependencies:
+ load-json-file "^1.0.0"
+ normalize-package-data "^2.3.2"
+ path-type "^1.0.0"
+
+read-pkg@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-2.0.0.tgz#8ef1c0623c6a6db0dc6713c4bfac46332b2368f8"
+ dependencies:
+ load-json-file "^2.0.0"
+ normalize-package-data "^2.3.2"
+ path-type "^2.0.0"
+
+readable-stream@^2.0.1, readable-stream@^2.0.2, readable-stream@^2.0.6, readable-stream@^2.1.4, readable-stream@^2.2.2, readable-stream@^2.3.3:
+ version "2.3.4"
+ resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.4.tgz#c946c3f47fa7d8eabc0b6150f4a12f69a4574071"
+ dependencies:
+ core-util-is "~1.0.0"
+ inherits "~2.0.3"
+ isarray "~1.0.0"
+ process-nextick-args "~2.0.0"
+ safe-buffer "~5.1.1"
+ string_decoder "~1.0.3"
+ util-deprecate "~1.0.1"
+
+readable-stream@^2.2.9:
+ version "2.3.5"
+ resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.5.tgz#b4f85003a938cbb6ecbce2a124fb1012bd1a838d"
+ dependencies:
+ core-util-is "~1.0.0"
+ inherits "~2.0.3"
+ isarray "~1.0.0"
+ process-nextick-args "~2.0.0"
+ safe-buffer "~5.1.1"
+ string_decoder "~1.0.3"
+ util-deprecate "~1.0.1"
+
+readable-stream@~1.0.0, readable-stream@~1.0.31:
+ version "1.0.34"
+ resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-1.0.34.tgz#125820e34bc842d2f2aaafafe4c2916ee32c157c"
+ dependencies:
+ core-util-is "~1.0.0"
+ inherits "~2.0.1"
+ isarray "0.0.1"
+ string_decoder "~0.10.x"
+
+readdirp@^2.0.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-2.1.0.tgz#4ed0ad060df3073300c48440373f72d1cc642d78"
+ dependencies:
+ graceful-fs "^4.1.2"
+ minimatch "^3.0.2"
+ readable-stream "^2.0.2"
+ set-immediate-shim "^1.0.1"
+
+recharts-scale@0.3.2:
+ version "0.3.2"
+ resolved "https://registry.yarnpkg.com/recharts-scale/-/recharts-scale-0.3.2.tgz#dac7621714a4765d152cb2adbc30c73b831208c9"
+
+recharts@^1.0.0-beta.9:
+ version "1.0.0-beta.10"
+ resolved "https://registry.yarnpkg.com/recharts/-/recharts-1.0.0-beta.10.tgz#d3cd15df6b7879d5968da3c942b5fcdaf2504fe1"
+ dependencies:
+ classnames "2.2.5"
+ core-js "2.5.1"
+ d3-interpolate "^1.1.5"
+ d3-scale "1.0.6"
+ d3-shape "1.2.0"
+ lodash "~4.17.4"
+ prop-types "^15.6.0"
+ react-resize-detector "1.1.0"
+ react-smooth "1.0.0"
+ recharts-scale "0.3.2"
+ reduce-css-calc "1.3.0"
+
+recompose@^0.26.0:
+ version "0.26.0"
+ resolved "https://registry.yarnpkg.com/recompose/-/recompose-0.26.0.tgz#9babff039cb72ba5bd17366d55d7232fbdfb2d30"
+ dependencies:
+ change-emitter "^0.1.2"
+ fbjs "^0.8.1"
+ hoist-non-react-statics "^2.3.1"
+ symbol-observable "^1.0.4"
+
+redent@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/redent/-/redent-1.0.0.tgz#cf916ab1fd5f1f16dfb20822dd6ec7f730c2afde"
+ dependencies:
+ indent-string "^2.1.0"
+ strip-indent "^1.0.1"
+
+reduce-css-calc@1.3.0, reduce-css-calc@^1.2.6:
+ version "1.3.0"
+ resolved "https://registry.yarnpkg.com/reduce-css-calc/-/reduce-css-calc-1.3.0.tgz#747c914e049614a4c9cfbba629871ad1d2927716"
+ dependencies:
+ balanced-match "^0.4.2"
+ math-expression-evaluator "^1.2.14"
+ reduce-function-call "^1.0.1"
+
+reduce-function-call@^1.0.1:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/reduce-function-call/-/reduce-function-call-1.0.2.tgz#5a200bf92e0e37751752fe45b0ab330fd4b6be99"
+ dependencies:
+ balanced-match "^0.4.2"
+
+regenerate@^1.2.1:
+ version "1.3.3"
+ resolved "https://registry.yarnpkg.com/regenerate/-/regenerate-1.3.3.tgz#0c336d3980553d755c39b586ae3b20aa49c82b7f"
+
+regenerator-runtime@^0.10.5:
+ version "0.10.5"
+ resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.10.5.tgz#336c3efc1220adcedda2c9fab67b5a7955a33658"
+
+regenerator-runtime@^0.11.0:
+ version "0.11.1"
+ resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.11.1.tgz#be05ad7f9bf7d22e056f9726cee5017fbf19e2e9"
+
+regenerator-transform@^0.10.0:
+ version "0.10.1"
+ resolved "https://registry.yarnpkg.com/regenerator-transform/-/regenerator-transform-0.10.1.tgz#1e4996837231da8b7f3cf4114d71b5691a0680dd"
+ dependencies:
+ babel-runtime "^6.18.0"
+ babel-types "^6.19.0"
+ private "^0.1.6"
+
+regex-cache@^0.4.2:
+ version "0.4.4"
+ resolved "https://registry.yarnpkg.com/regex-cache/-/regex-cache-0.4.4.tgz#75bdc58a2a1496cec48a12835bc54c8d562336dd"
+ dependencies:
+ is-equal-shallow "^0.1.3"
+
+regex-not@^1.0.0, regex-not@^1.0.2:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/regex-not/-/regex-not-1.0.2.tgz#1f4ece27e00b0b65e0247a6810e6a85d83a5752c"
+ dependencies:
+ extend-shallow "^3.0.2"
+ safe-regex "^1.1.0"
+
+regexpu-core@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/regexpu-core/-/regexpu-core-1.0.0.tgz#86a763f58ee4d7c2f6b102e4764050de7ed90c6b"
+ dependencies:
+ regenerate "^1.2.1"
+ regjsgen "^0.2.0"
+ regjsparser "^0.1.4"
+
+regexpu-core@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/regexpu-core/-/regexpu-core-2.0.0.tgz#49d038837b8dcf8bfa5b9a42139938e6ea2ae240"
+ dependencies:
+ regenerate "^1.2.1"
+ regjsgen "^0.2.0"
+ regjsparser "^0.1.4"
+
+regjsgen@^0.2.0:
+ version "0.2.0"
+ resolved "https://registry.yarnpkg.com/regjsgen/-/regjsgen-0.2.0.tgz#6c016adeac554f75823fe37ac05b92d5a4edb1f7"
+
+regjsparser@^0.1.4:
+ version "0.1.5"
+ resolved "https://registry.yarnpkg.com/regjsparser/-/regjsparser-0.1.5.tgz#7ee8f84dc6fa792d3fd0ae228d24bd949ead205c"
+ dependencies:
+ jsesc "~0.5.0"
+
+remove-trailing-separator@^1.0.1:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz#c24bce2a283adad5bc3f58e0d48249b92379d8ef"
+
+repeat-element@^1.1.2:
+ version "1.1.2"
+ resolved "https://registry.yarnpkg.com/repeat-element/-/repeat-element-1.1.2.tgz#ef089a178d1483baae4d93eb98b4f9e4e11d990a"
+
+repeat-string@^1.5.2, repeat-string@^1.6.1:
+ version "1.6.1"
+ resolved "https://registry.yarnpkg.com/repeat-string/-/repeat-string-1.6.1.tgz#8dcae470e1c88abc2d600fff4a776286da75e637"
+
+repeating@^2.0.0:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/repeating/-/repeating-2.0.1.tgz#5214c53a926d3552707527fbab415dbc08d06dda"
+ dependencies:
+ is-finite "^1.0.0"
+
+request@2.81.0:
+ version "2.81.0"
+ resolved "https://registry.yarnpkg.com/request/-/request-2.81.0.tgz#c6928946a0e06c5f8d6f8a9333469ffda46298a0"
+ dependencies:
+ aws-sign2 "~0.6.0"
+ aws4 "^1.2.1"
+ caseless "~0.12.0"
+ combined-stream "~1.0.5"
+ extend "~3.0.0"
+ forever-agent "~0.6.1"
+ form-data "~2.1.1"
+ har-validator "~4.2.1"
+ hawk "~3.1.3"
+ http-signature "~1.1.0"
+ is-typedarray "~1.0.0"
+ isstream "~0.1.2"
+ json-stringify-safe "~5.0.1"
+ mime-types "~2.1.7"
+ oauth-sign "~0.8.1"
+ performance-now "^0.2.0"
+ qs "~6.4.0"
+ safe-buffer "^5.0.1"
+ stringstream "~0.0.4"
+ tough-cookie "~2.3.0"
+ tunnel-agent "^0.6.0"
+ uuid "^3.0.0"
+
+require-directory@^2.1.1:
+ version "2.1.1"
+ resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42"
+
+require-main-filename@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-1.0.1.tgz#97f717b69d48784f5f526a6c5aa8ffdda055a4d1"
+
+require-uncached@^1.0.3:
+ version "1.0.3"
+ resolved "https://registry.yarnpkg.com/require-uncached/-/require-uncached-1.0.3.tgz#4e0d56d6c9662fd31e43011c4b95aa49955421d3"
+ dependencies:
+ caller-path "^0.1.0"
+ resolve-from "^1.0.0"
+
+requires-port@1.0.x, requires-port@1.x.x, requires-port@~1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/requires-port/-/requires-port-1.0.0.tgz#925d2601d39ac485e091cf0da5c6e694dc3dcaff"
+
+resolve-cwd@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/resolve-cwd/-/resolve-cwd-2.0.0.tgz#00a9f7387556e27038eae232caa372a6a59b665a"
+ dependencies:
+ resolve-from "^3.0.0"
+
+resolve-from@^1.0.0:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-1.0.1.tgz#26cbfe935d1aeeeabb29bc3fe5aeb01e93d44226"
+
+resolve-from@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-3.0.0.tgz#b22c7af7d9d6881bc8b6e653335eebcb0a188748"
+
+resolve-url@^0.2.1:
+ version "0.2.1"
+ resolved "https://registry.yarnpkg.com/resolve-url/-/resolve-url-0.2.1.tgz#2c637fe77c893afd2a663fe21aa9080068e2052a"
+
+resolve@^1.5.0:
+ version "1.5.0"
+ resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.5.0.tgz#1f09acce796c9a762579f31b2c1cc4c3cddf9f36"
+ dependencies:
+ path-parse "^1.0.5"
+
+restore-cursor@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/restore-cursor/-/restore-cursor-2.0.0.tgz#9f7ee287f82fd326d4fd162923d62129eee0dfaf"
+ dependencies:
+ onetime "^2.0.0"
+ signal-exit "^3.0.2"
+
+ret@~0.1.10:
+ version "0.1.15"
+ resolved "https://registry.yarnpkg.com/ret/-/ret-0.1.15.tgz#b8a4825d5bdb1fc3f6f53c2bc33f81388681c7bc"
+
+right-align@^0.1.1:
+ version "0.1.3"
+ resolved "https://registry.yarnpkg.com/right-align/-/right-align-0.1.3.tgz#61339b722fe6a3515689210d24e14c96148613ef"
+ dependencies:
+ align-text "^0.1.1"
+
+rimraf@2, rimraf@^2.2.8, rimraf@^2.5.1, rimraf@^2.6.1, rimraf@^2.6.2:
+ version "2.6.2"
+ resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.6.2.tgz#2ed8150d24a16ea8651e6d6ef0f47c4158ce7a36"
+ dependencies:
+ glob "^7.0.5"
+
+ripemd160@^2.0.0, ripemd160@^2.0.1:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/ripemd160/-/ripemd160-2.0.1.tgz#0f4584295c53a3628af7e6d79aca21ce57d1c6e7"
+ dependencies:
+ hash-base "^2.0.0"
+ inherits "^2.0.1"
+
+run-async@^2.2.0:
+ version "2.3.0"
+ resolved "https://registry.yarnpkg.com/run-async/-/run-async-2.3.0.tgz#0371ab4ae0bdd720d4166d7dfda64ff7a445a6c0"
+ dependencies:
+ is-promise "^2.1.0"
+
+rx-lite-aggregates@^4.0.8:
+ version "4.0.8"
+ resolved "https://registry.yarnpkg.com/rx-lite-aggregates/-/rx-lite-aggregates-4.0.8.tgz#753b87a89a11c95467c4ac1626c4efc4e05c67be"
+ dependencies:
+ rx-lite "*"
+
+rx-lite@*, rx-lite@^4.0.8:
+ version "4.0.8"
+ resolved "https://registry.yarnpkg.com/rx-lite/-/rx-lite-4.0.8.tgz#0b1e11af8bc44836f04a6407e92da42467b79444"
+
+safe-buffer@5.1.1, safe-buffer@^5.0.1, safe-buffer@^5.1.0, safe-buffer@^5.1.1, safe-buffer@~5.1.0, safe-buffer@~5.1.1:
+ version "5.1.1"
+ resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.1.tgz#893312af69b2123def71f57889001671eeb2c853"
+
+safe-regex@^1.1.0:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/safe-regex/-/safe-regex-1.1.0.tgz#40a3669f3b077d1e943d44629e157dd48023bf2e"
+ dependencies:
+ ret "~0.1.10"
+
+sax@~1.2.1:
+ version "1.2.4"
+ resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9"
+
+schema-utils@^0.3.0:
+ version "0.3.0"
+ resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-0.3.0.tgz#f5877222ce3e931edae039f17eb3716e7137f8cf"
+ dependencies:
+ ajv "^5.0.0"
+
+schema-utils@^0.4.5:
+ version "0.4.5"
+ resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-0.4.5.tgz#21836f0608aac17b78f9e3e24daff14a5ca13a3e"
+ dependencies:
+ ajv "^6.1.0"
+ ajv-keywords "^3.1.0"
+
+scroll@^2.0.3:
+ version "2.0.3"
+ resolved "https://registry.yarnpkg.com/scroll/-/scroll-2.0.3.tgz#0951b785544205fd17753bc3d294738ba16fc2ab"
+ dependencies:
+ rafl "~1.2.1"
+
+select-hose@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/select-hose/-/select-hose-2.0.0.tgz#625d8658f865af43ec962bfc376a37359a4994ca"
+
+selfsigned@^1.9.1:
+ version "1.10.2"
+ resolved "https://registry.yarnpkg.com/selfsigned/-/selfsigned-1.10.2.tgz#b4449580d99929b65b10a48389301a6592088758"
+ dependencies:
+ node-forge "0.7.1"
+
+"semver@2 || 3 || 4 || 5", semver@^5.3.0, semver@^5.5.0:
+ version "5.5.0"
+ resolved "https://registry.yarnpkg.com/semver/-/semver-5.5.0.tgz#dc4bbc7a6ca9d916dee5d43516f0092b58f7b8ab"
+
+send@0.16.1:
+ version "0.16.1"
+ resolved "https://registry.yarnpkg.com/send/-/send-0.16.1.tgz#a70e1ca21d1382c11d0d9f6231deb281080d7ab3"
+ dependencies:
+ debug "2.6.9"
+ depd "~1.1.1"
+ destroy "~1.0.4"
+ encodeurl "~1.0.1"
+ escape-html "~1.0.3"
+ etag "~1.8.1"
+ fresh "0.5.2"
+ http-errors "~1.6.2"
+ mime "1.4.1"
+ ms "2.0.0"
+ on-finished "~2.3.0"
+ range-parser "~1.2.0"
+ statuses "~1.3.1"
+
+serve-index@^1.7.2:
+ version "1.9.1"
+ resolved "https://registry.yarnpkg.com/serve-index/-/serve-index-1.9.1.tgz#d3768d69b1e7d82e5ce050fff5b453bea12a9239"
+ dependencies:
+ accepts "~1.3.4"
+ batch "0.6.1"
+ debug "2.6.9"
+ escape-html "~1.0.3"
+ http-errors "~1.6.2"
+ mime-types "~2.1.17"
+ parseurl "~1.3.2"
+
+serve-static@1.13.1:
+ version "1.13.1"
+ resolved "https://registry.yarnpkg.com/serve-static/-/serve-static-1.13.1.tgz#4c57d53404a761d8f2e7c1e8a18a47dbf278a719"
+ dependencies:
+ encodeurl "~1.0.1"
+ escape-html "~1.0.3"
+ parseurl "~1.3.2"
+ send "0.16.1"
+
+set-blocking@^2.0.0, set-blocking@~2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7"
+
+set-getter@^0.1.0:
+ version "0.1.0"
+ resolved "https://registry.yarnpkg.com/set-getter/-/set-getter-0.1.0.tgz#d769c182c9d5a51f409145f2fba82e5e86e80376"
+ dependencies:
+ to-object-path "^0.3.0"
+
+set-immediate-shim@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/set-immediate-shim/-/set-immediate-shim-1.0.1.tgz#4b2b1b27eb808a9f8dcc481a58e5e56f599f3f61"
+
+set-value@^0.4.3:
+ version "0.4.3"
+ resolved "https://registry.yarnpkg.com/set-value/-/set-value-0.4.3.tgz#7db08f9d3d22dc7f78e53af3c3bf4666ecdfccf1"
+ dependencies:
+ extend-shallow "^2.0.1"
+ is-extendable "^0.1.1"
+ is-plain-object "^2.0.1"
+ to-object-path "^0.3.0"
+
+set-value@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/set-value/-/set-value-2.0.0.tgz#71ae4a88f0feefbbf52d1ea604f3fb315ebb6274"
+ dependencies:
+ extend-shallow "^2.0.1"
+ is-extendable "^0.1.1"
+ is-plain-object "^2.0.3"
+ split-string "^3.0.1"
+
+"setimmediate@>= 1.0.1 < 2", "setimmediate@>= 1.0.2 < 2", setimmediate@^1.0.4, setimmediate@^1.0.5:
+ version "1.0.5"
+ resolved "https://registry.yarnpkg.com/setimmediate/-/setimmediate-1.0.5.tgz#290cbb232e306942d7d7ea9b83732ab7856f8285"
+
+setprototypeof@1.0.3:
+ version "1.0.3"
+ resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.0.3.tgz#66567e37043eeb4f04d91bd658c0cbefb55b8e04"
+
+setprototypeof@1.1.0:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.1.0.tgz#d0bd85536887b6fe7c0d818cb962d9d91c54e656"
+
+sha.js@^2.4.0, sha.js@^2.4.8:
+ version "2.4.10"
+ resolved "https://registry.yarnpkg.com/sha.js/-/sha.js-2.4.10.tgz#b1fde5cd7d11a5626638a07c604ab909cfa31f9b"
+ dependencies:
+ inherits "^2.0.1"
+ safe-buffer "^5.0.1"
+
+shebang-command@^1.2.0:
+ version "1.2.0"
+ resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-1.2.0.tgz#44aac65b695b03398968c39f363fee5deafdf1ea"
+ dependencies:
+ shebang-regex "^1.0.0"
+
+shebang-regex@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-1.0.0.tgz#da42f49740c0b42db2ca9728571cb190c98efea3"
+
+signal-exit@^3.0.0, signal-exit@^3.0.2:
+ version "3.0.2"
+ resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.2.tgz#b5fdc08f1287ea1178628e415e25132b73646c6d"
+
+slash@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/slash/-/slash-1.0.0.tgz#c41f2f6c39fc16d1cd17ad4b5d896114ae470d55"
+
+slice-ansi@1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/slice-ansi/-/slice-ansi-1.0.0.tgz#044f1a49d8842ff307aad6b505ed178bd950134d"
+ dependencies:
+ is-fullwidth-code-point "^2.0.0"
+
+"slice-stream@>= 1.0.0 < 2":
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/slice-stream/-/slice-stream-1.0.0.tgz#5b33bd66f013b1a7f86460b03d463dec39ad3ea0"
+ dependencies:
+ readable-stream "~1.0.31"
+
+snapdragon-node@^2.0.1:
+ version "2.1.1"
+ resolved "https://registry.yarnpkg.com/snapdragon-node/-/snapdragon-node-2.1.1.tgz#6c175f86ff14bdb0724563e8f3c1b021a286853b"
+ dependencies:
+ define-property "^1.0.0"
+ isobject "^3.0.0"
+ snapdragon-util "^3.0.1"
+
+snapdragon-util@^3.0.1:
+ version "3.0.1"
+ resolved "https://registry.yarnpkg.com/snapdragon-util/-/snapdragon-util-3.0.1.tgz#f956479486f2acd79700693f6f7b805e45ab56e2"
+ dependencies:
+ kind-of "^3.2.0"
+
+snapdragon@^0.8.1:
+ version "0.8.1"
+ resolved "https://registry.yarnpkg.com/snapdragon/-/snapdragon-0.8.1.tgz#e12b5487faded3e3dea0ac91e9400bf75b401370"
+ dependencies:
+ base "^0.11.1"
+ debug "^2.2.0"
+ define-property "^0.2.5"
+ extend-shallow "^2.0.1"
+ map-cache "^0.2.2"
+ source-map "^0.5.6"
+ source-map-resolve "^0.5.0"
+ use "^2.0.0"
+
+sntp@1.x.x:
+ version "1.0.9"
+ resolved "https://registry.yarnpkg.com/sntp/-/sntp-1.0.9.tgz#6541184cc90aeea6c6e7b35e2659082443c66198"
+ dependencies:
+ hoek "2.x.x"
+
+sockjs-client@1.1.4:
+ version "1.1.4"
+ resolved "https://registry.yarnpkg.com/sockjs-client/-/sockjs-client-1.1.4.tgz#5babe386b775e4cf14e7520911452654016c8b12"
+ dependencies:
+ debug "^2.6.6"
+ eventsource "0.1.6"
+ faye-websocket "~0.11.0"
+ inherits "^2.0.1"
+ json3 "^3.3.2"
+ url-parse "^1.1.8"
+
+sockjs@0.3.19:
+ version "0.3.19"
+ resolved "https://registry.yarnpkg.com/sockjs/-/sockjs-0.3.19.tgz#d976bbe800af7bd20ae08598d582393508993c0d"
+ dependencies:
+ faye-websocket "^0.10.0"
+ uuid "^3.0.1"
+
+sort-keys@^1.0.0:
+ version "1.1.2"
+ resolved "https://registry.yarnpkg.com/sort-keys/-/sort-keys-1.1.2.tgz#441b6d4d346798f1b4e49e8920adfba0e543f9ad"
+ dependencies:
+ is-plain-obj "^1.0.0"
+
+source-list-map@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/source-list-map/-/source-list-map-2.0.0.tgz#aaa47403f7b245a92fbc97ea08f250d6087ed085"
+
+source-map-resolve@^0.5.0:
+ version "0.5.1"
+ resolved "https://registry.yarnpkg.com/source-map-resolve/-/source-map-resolve-0.5.1.tgz#7ad0f593f2281598e854df80f19aae4b92d7a11a"
+ dependencies:
+ atob "^2.0.0"
+ decode-uri-component "^0.2.0"
+ resolve-url "^0.2.1"
+ source-map-url "^0.4.0"
+ urix "^0.1.0"
+
+source-map-support@^0.4.15:
+ version "0.4.18"
+ resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.4.18.tgz#0286a6de8be42641338594e97ccea75f0a2c585f"
+ dependencies:
+ source-map "^0.5.6"
+
+source-map-url@^0.4.0:
+ version "0.4.0"
+ resolved "https://registry.yarnpkg.com/source-map-url/-/source-map-url-0.4.0.tgz#3e935d7ddd73631b97659956d55128e87b5084a3"
+
+source-map@^0.5.0, source-map@^0.5.3, source-map@^0.5.6, source-map@^0.5.7, source-map@~0.5.1:
+ version "0.5.7"
+ resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc"
+
+source-map@^0.6.1, source-map@~0.6.1:
+ version "0.6.1"
+ resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263"
+
+spdx-correct@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/spdx-correct/-/spdx-correct-3.0.0.tgz#05a5b4d7153a195bc92c3c425b69f3b2a9524c82"
+ dependencies:
+ spdx-expression-parse "^3.0.0"
+ spdx-license-ids "^3.0.0"
+
+spdx-exceptions@^2.1.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/spdx-exceptions/-/spdx-exceptions-2.1.0.tgz#2c7ae61056c714a5b9b9b2b2af7d311ef5c78fe9"
+
+spdx-expression-parse@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/spdx-expression-parse/-/spdx-expression-parse-3.0.0.tgz#99e119b7a5da00e05491c9fa338b7904823b41d0"
+ dependencies:
+ spdx-exceptions "^2.1.0"
+ spdx-license-ids "^3.0.0"
+
+spdx-license-ids@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-3.0.0.tgz#7a7cd28470cc6d3a1cfe6d66886f6bc430d3ac87"
+
+spdy-transport@^2.0.18:
+ version "2.0.20"
+ resolved "https://registry.yarnpkg.com/spdy-transport/-/spdy-transport-2.0.20.tgz#735e72054c486b2354fe89e702256004a39ace4d"
+ dependencies:
+ debug "^2.6.8"
+ detect-node "^2.0.3"
+ hpack.js "^2.1.6"
+ obuf "^1.1.1"
+ readable-stream "^2.2.9"
+ safe-buffer "^5.0.1"
+ wbuf "^1.7.2"
+
+spdy@^3.4.1:
+ version "3.4.7"
+ resolved "https://registry.yarnpkg.com/spdy/-/spdy-3.4.7.tgz#42ff41ece5cc0f99a3a6c28aabb73f5c3b03acbc"
+ dependencies:
+ debug "^2.6.8"
+ handle-thing "^1.2.5"
+ http-deceiver "^1.2.7"
+ safe-buffer "^5.0.1"
+ select-hose "^2.0.0"
+ spdy-transport "^2.0.18"
+
+split-string@^3.0.1, split-string@^3.0.2:
+ version "3.1.0"
+ resolved "https://registry.yarnpkg.com/split-string/-/split-string-3.1.0.tgz#7cb09dda3a86585705c64b39a6466038682e8fe2"
+ dependencies:
+ extend-shallow "^3.0.0"
+
+sprintf-js@~1.0.2:
+ version "1.0.3"
+ resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c"
+
+sshpk@^1.7.0:
+ version "1.13.1"
+ resolved "https://registry.yarnpkg.com/sshpk/-/sshpk-1.13.1.tgz#512df6da6287144316dc4c18fe1cf1d940739be3"
+ dependencies:
+ asn1 "~0.2.3"
+ assert-plus "^1.0.0"
+ dashdash "^1.12.0"
+ getpass "^0.1.1"
+ optionalDependencies:
+ bcrypt-pbkdf "^1.0.0"
+ ecc-jsbn "~0.1.1"
+ jsbn "~0.1.0"
+ tweetnacl "~0.14.0"
+
+static-extend@^0.1.1:
+ version "0.1.2"
+ resolved "https://registry.yarnpkg.com/static-extend/-/static-extend-0.1.2.tgz#60809c39cbff55337226fd5e0b520f341f1fb5c6"
+ dependencies:
+ define-property "^0.2.5"
+ object-copy "^0.1.0"
+
+"statuses@>= 1.3.1 < 2":
+ version "1.4.0"
+ resolved "https://registry.yarnpkg.com/statuses/-/statuses-1.4.0.tgz#bb73d446da2796106efcc1b601a253d6c46bd087"
+
+statuses@~1.3.1:
+ version "1.3.1"
+ resolved "https://registry.yarnpkg.com/statuses/-/statuses-1.3.1.tgz#faf51b9eb74aaef3b3acf4ad5f61abf24cb7b93e"
+
+stifle@^1.0.2:
+ version "1.0.4"
+ resolved "https://registry.yarnpkg.com/stifle/-/stifle-1.0.4.tgz#8b3bcdf52419b0a9c79e35adadce50123c1d8e99"
+
+stream-browserify@^2.0.1:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/stream-browserify/-/stream-browserify-2.0.1.tgz#66266ee5f9bdb9940a4e4514cafb43bb71e5c9db"
+ dependencies:
+ inherits "~2.0.1"
+ readable-stream "^2.0.2"
+
+stream-http@^2.7.2:
+ version "2.8.0"
+ resolved "https://registry.yarnpkg.com/stream-http/-/stream-http-2.8.0.tgz#fd86546dac9b1c91aff8fc5d287b98fafb41bc10"
+ dependencies:
+ builtin-status-codes "^3.0.0"
+ inherits "^2.0.1"
+ readable-stream "^2.3.3"
+ to-arraybuffer "^1.0.0"
+ xtend "^4.0.0"
+
+strict-uri-encode@^1.0.0:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/strict-uri-encode/-/strict-uri-encode-1.1.0.tgz#279b225df1d582b1f54e65addd4352e18faa0713"
+
+string-width@^1.0.1, string-width@^1.0.2:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/string-width/-/string-width-1.0.2.tgz#118bdf5b8cdc51a2a7e70d211e07e2b0b9b107d3"
+ dependencies:
+ code-point-at "^1.0.0"
+ is-fullwidth-code-point "^1.0.0"
+ strip-ansi "^3.0.0"
+
+string-width@^2.0.0, string-width@^2.1.0, string-width@^2.1.1:
+ version "2.1.1"
+ resolved "https://registry.yarnpkg.com/string-width/-/string-width-2.1.1.tgz#ab93f27a8dc13d28cac815c462143a6d9012ae9e"
+ dependencies:
+ is-fullwidth-code-point "^2.0.0"
+ strip-ansi "^4.0.0"
+
+string_decoder@^1.0.0, string_decoder@~1.0.3:
+ version "1.0.3"
+ resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.0.3.tgz#0fc67d7c141825de94282dd536bec6b9bce860ab"
+ dependencies:
+ safe-buffer "~5.1.0"
+
+string_decoder@~0.10.x:
+ version "0.10.31"
+ resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-0.10.31.tgz#62e203bc41766c6c28c9fc84301dab1c5310fa94"
+
+stringstream@~0.0.4:
+ version "0.0.5"
+ resolved "https://registry.yarnpkg.com/stringstream/-/stringstream-0.0.5.tgz#4e484cd4de5a0bbbee18e46307710a8a81621878"
+
+strip-ansi@^3.0.0, strip-ansi@^3.0.1:
+ version "3.0.1"
+ resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-3.0.1.tgz#6a385fb8853d952d5ff05d0e8aaf94278dc63dcf"
+ dependencies:
+ ansi-regex "^2.0.0"
+
+strip-ansi@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-4.0.0.tgz#a8479022eb1ac368a871389b635262c505ee368f"
+ dependencies:
+ ansi-regex "^3.0.0"
+
+strip-bom@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-2.0.0.tgz#6219a85616520491f35788bdbf1447a99c7e6b0e"
+ dependencies:
+ is-utf8 "^0.2.0"
+
+strip-bom@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-3.0.0.tgz#2334c18e9c759f7bdd56fdef7e9ae3d588e68ed3"
+
+strip-eof@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/strip-eof/-/strip-eof-1.0.0.tgz#bb43ff5598a6eb05d89b59fcd129c983313606bf"
+
+strip-indent@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/strip-indent/-/strip-indent-1.0.1.tgz#0c7962a6adefa7bbd4ac366460a638552ae1a0a2"
+ dependencies:
+ get-stdin "^4.0.1"
+
+strip-json-comments@~2.0.1:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a"
+
+style-loader@^0.19.1:
+ version "0.19.1"
+ resolved "https://registry.yarnpkg.com/style-loader/-/style-loader-0.19.1.tgz#591ffc80bcefe268b77c5d9ebc0505d772619f85"
+ dependencies:
+ loader-utils "^1.0.2"
+ schema-utils "^0.3.0"
+
+supports-color@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-2.0.0.tgz#535d045ce6b6363fa40117084629995e9df324c7"
+
+supports-color@^3.2.3:
+ version "3.2.3"
+ resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-3.2.3.tgz#65ac0504b3954171d8a64946b2ae3cbb8a5f54f6"
+ dependencies:
+ has-flag "^1.0.0"
+
+supports-color@^4.2.1:
+ version "4.5.0"
+ resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-4.5.0.tgz#be7a0de484dec5c5cddf8b3d59125044912f635b"
+ dependencies:
+ has-flag "^2.0.0"
+
+supports-color@^5.1.0, supports-color@^5.2.0, supports-color@^5.3.0:
+ version "5.3.0"
+ resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.3.0.tgz#5b24ac15db80fa927cf5227a4a33fd3c4c7676c0"
+ dependencies:
+ has-flag "^3.0.0"
+
+svgo@^0.7.0:
+ version "0.7.2"
+ resolved "https://registry.yarnpkg.com/svgo/-/svgo-0.7.2.tgz#9f5772413952135c6fefbf40afe6a4faa88b4bb5"
+ dependencies:
+ coa "~1.0.1"
+ colors "~1.1.2"
+ csso "~2.3.1"
+ js-yaml "~3.7.0"
+ mkdirp "~0.5.1"
+ sax "~1.2.1"
+ whet.extend "~0.9.9"
+
+symbol-observable@^1.0.4, symbol-observable@^1.1.0:
+ version "1.2.0"
+ resolved "https://registry.yarnpkg.com/symbol-observable/-/symbol-observable-1.2.0.tgz#c22688aed4eab3cdc2dfeacbb561660560a00804"
+
+table@4.0.2:
+ version "4.0.2"
+ resolved "https://registry.yarnpkg.com/table/-/table-4.0.2.tgz#a33447375391e766ad34d3486e6e2aedc84d2e36"
+ dependencies:
+ ajv "^5.2.3"
+ ajv-keywords "^2.1.0"
+ chalk "^2.1.0"
+ lodash "^4.17.4"
+ slice-ansi "1.0.0"
+ string-width "^2.1.1"
+
+table@^4.0.2:
+ version "4.0.3"
+ resolved "https://registry.yarnpkg.com/table/-/table-4.0.3.tgz#00b5e2b602f1794b9acaf9ca908a76386a7813bc"
+ dependencies:
+ ajv "^6.0.1"
+ ajv-keywords "^3.0.0"
+ chalk "^2.1.0"
+ lodash "^4.17.4"
+ slice-ansi "1.0.0"
+ string-width "^2.1.1"
+
+tapable@^0.2.7:
+ version "0.2.8"
+ resolved "https://registry.yarnpkg.com/tapable/-/tapable-0.2.8.tgz#99372a5c999bf2df160afc0d74bed4f47948cd22"
+
+tar-pack@^3.4.0:
+ version "3.4.1"
+ resolved "https://registry.yarnpkg.com/tar-pack/-/tar-pack-3.4.1.tgz#e1dbc03a9b9d3ba07e896ad027317eb679a10a1f"
+ dependencies:
+ debug "^2.2.0"
+ fstream "^1.0.10"
+ fstream-ignore "^1.0.5"
+ once "^1.3.3"
+ readable-stream "^2.1.4"
+ rimraf "^2.5.1"
+ tar "^2.2.1"
+ uid-number "^0.0.6"
+
+tar@^2.2.1:
+ version "2.2.1"
+ resolved "https://registry.yarnpkg.com/tar/-/tar-2.2.1.tgz#8e4d2a256c0e2185c6b18ad694aec968b83cb1d1"
+ dependencies:
+ block-stream "*"
+ fstream "^1.0.2"
+ inherits "2"
+
+text-table@~0.2.0:
+ version "0.2.0"
+ resolved "https://registry.yarnpkg.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4"
+
+theming@^1.3.0:
+ version "1.3.0"
+ resolved "https://registry.yarnpkg.com/theming/-/theming-1.3.0.tgz#286d5bae80be890d0adc645e5ca0498723725bdc"
+ dependencies:
+ brcast "^3.0.1"
+ is-function "^1.0.1"
+ is-plain-object "^2.0.1"
+ prop-types "^15.5.8"
+
+through@^2.3.6, through@^2.3.8:
+ version "2.3.8"
+ resolved "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5"
+
+thunky@^1.0.2:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/thunky/-/thunky-1.0.2.tgz#a862e018e3fb1ea2ec3fce5d55605cf57f247371"
+
+time-stamp@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/time-stamp/-/time-stamp-2.0.0.tgz#95c6a44530e15ba8d6f4a3ecb8c3a3fac46da357"
+
+timed-out@^4.0.0:
+ version "4.0.1"
+ resolved "https://registry.yarnpkg.com/timed-out/-/timed-out-4.0.1.tgz#f32eacac5a175bea25d7fab565ab3ed8741ef56f"
+
+timers-browserify@^2.0.4:
+ version "2.0.6"
+ resolved "https://registry.yarnpkg.com/timers-browserify/-/timers-browserify-2.0.6.tgz#241e76927d9ca05f4d959819022f5b3664b64bae"
+ dependencies:
+ setimmediate "^1.0.4"
+
+tmp@^0.0.33:
+ version "0.0.33"
+ resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.0.33.tgz#6d34335889768d21b2bcda0aa277ced3b1bfadf9"
+ dependencies:
+ os-tmpdir "~1.0.2"
+
+to-arraybuffer@^1.0.0:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/to-arraybuffer/-/to-arraybuffer-1.0.1.tgz#7d229b1fcc637e466ca081180836a7aabff83f43"
+
+to-fast-properties@^1.0.3:
+ version "1.0.3"
+ resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-1.0.3.tgz#b83571fa4d8c25b82e231b06e3a3055de4ca1a47"
+
+to-fast-properties@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e"
+
+to-object-path@^0.3.0:
+ version "0.3.0"
+ resolved "https://registry.yarnpkg.com/to-object-path/-/to-object-path-0.3.0.tgz#297588b7b0e7e0ac08e04e672f85c1f4999e17af"
+ dependencies:
+ kind-of "^3.0.2"
+
+to-regex-range@^2.1.0:
+ version "2.1.1"
+ resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-2.1.1.tgz#7c80c17b9dfebe599e27367e0d4dd5590141db38"
+ dependencies:
+ is-number "^3.0.0"
+ repeat-string "^1.6.1"
+
+to-regex@^3.0.1:
+ version "3.0.2"
+ resolved "https://registry.yarnpkg.com/to-regex/-/to-regex-3.0.2.tgz#13cfdd9b336552f30b51f33a8ae1b42a7a7599ce"
+ dependencies:
+ define-property "^2.0.2"
+ extend-shallow "^3.0.2"
+ regex-not "^1.0.2"
+ safe-regex "^1.1.0"
+
+tough-cookie@~2.3.0:
+ version "2.3.4"
+ resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.3.4.tgz#ec60cee38ac675063ffc97a5c18970578ee83655"
+ dependencies:
+ punycode "^1.4.1"
+
+"traverse@>=0.3.0 <0.4":
+ version "0.3.9"
+ resolved "https://registry.yarnpkg.com/traverse/-/traverse-0.3.9.tgz#717b8f220cc0bb7b44e40514c22b2e8bbc70d8b9"
+
+trim-newlines@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/trim-newlines/-/trim-newlines-1.0.0.tgz#5887966bb582a4503a41eb524f7d35011815a613"
+
+trim-right@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/trim-right/-/trim-right-1.0.1.tgz#cb2e1203067e0c8de1f614094b9fe45704ea6003"
+
+tty-browserify@0.0.0:
+ version "0.0.0"
+ resolved "https://registry.yarnpkg.com/tty-browserify/-/tty-browserify-0.0.0.tgz#a157ba402da24e9bf957f9aa69d524eed42901a6"
+
+tunnel-agent@^0.6.0:
+ version "0.6.0"
+ resolved "https://registry.yarnpkg.com/tunnel-agent/-/tunnel-agent-0.6.0.tgz#27a5dea06b36b04a0a9966774b290868f0fc40fd"
+ dependencies:
+ safe-buffer "^5.0.1"
+
+tweetnacl@^0.14.3, tweetnacl@~0.14.0:
+ version "0.14.5"
+ resolved "https://registry.yarnpkg.com/tweetnacl/-/tweetnacl-0.14.5.tgz#5ae68177f192d4456269d108afa93ff8743f4f64"
+
+type-check@~0.3.2:
+ version "0.3.2"
+ resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.3.2.tgz#5884cab512cf1d355e3fb784f30804b2b520db72"
+ dependencies:
+ prelude-ls "~1.1.2"
+
+type-is@~1.6.15:
+ version "1.6.16"
+ resolved "https://registry.yarnpkg.com/type-is/-/type-is-1.6.16.tgz#f89ce341541c672b25ee7ae3c73dee3b2be50194"
+ dependencies:
+ media-typer "0.3.0"
+ mime-types "~2.1.18"
+
+typedarray@^0.0.6:
+ version "0.0.6"
+ resolved "https://registry.yarnpkg.com/typedarray/-/typedarray-0.0.6.tgz#867ac74e3864187b1d3d47d996a78ec5c8830777"
+
+ua-parser-js@^0.7.9:
+ version "0.7.17"
+ resolved "https://registry.yarnpkg.com/ua-parser-js/-/ua-parser-js-0.7.17.tgz#e9ec5f9498b9ec910e7ae3ac626a805c4d09ecac"
+
+uglify-js@^2.8.29:
+ version "2.8.29"
+ resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-2.8.29.tgz#29c5733148057bb4e1f75df35b7a9cb72e6a59dd"
+ dependencies:
+ source-map "~0.5.1"
+ yargs "~3.10.0"
+ optionalDependencies:
+ uglify-to-browserify "~1.0.0"
+
+uglify-to-browserify@~1.0.0:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/uglify-to-browserify/-/uglify-to-browserify-1.0.2.tgz#6e0924d6bda6b5afe349e39a6d632850a0f882b7"
+
+uglifyjs-webpack-plugin@^0.4.6:
+ version "0.4.6"
+ resolved "https://registry.yarnpkg.com/uglifyjs-webpack-plugin/-/uglifyjs-webpack-plugin-0.4.6.tgz#b951f4abb6bd617e66f63eb891498e391763e309"
+ dependencies:
+ source-map "^0.5.6"
+ uglify-js "^2.8.29"
+ webpack-sources "^1.0.1"
+
+uid-number@^0.0.6:
+ version "0.0.6"
+ resolved "https://registry.yarnpkg.com/uid-number/-/uid-number-0.0.6.tgz#0ea10e8035e8eb5b8e4449f06da1c730663baa81"
+
+union-value@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/union-value/-/union-value-1.0.0.tgz#5c71c34cb5bad5dcebe3ea0cd08207ba5aa1aea4"
+ dependencies:
+ arr-union "^3.1.0"
+ get-value "^2.0.6"
+ is-extendable "^0.1.1"
+ set-value "^0.4.3"
+
+uniq@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/uniq/-/uniq-1.0.1.tgz#b31c5ae8254844a3a8281541ce2b04b865a734ff"
+
+uniqid@^4.0.0:
+ version "4.1.1"
+ resolved "https://registry.yarnpkg.com/uniqid/-/uniqid-4.1.1.tgz#89220ddf6b751ae52b5f72484863528596bb84c1"
+ dependencies:
+ macaddress "^0.2.8"
+
+uniqs@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/uniqs/-/uniqs-2.0.0.tgz#ffede4b36b25290696e6e165d4a59edb998e6b02"
+
+universalify@^0.1.0:
+ version "0.1.1"
+ resolved "https://registry.yarnpkg.com/universalify/-/universalify-0.1.1.tgz#fa71badd4437af4c148841e3b3b165f9e9e590b7"
+
+unpipe@1.0.0, unpipe@~1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec"
+
+unset-value@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/unset-value/-/unset-value-1.0.0.tgz#8376873f7d2335179ffb1e6fc3a8ed0dfc8ab559"
+ dependencies:
+ has-value "^0.3.1"
+ isobject "^3.0.0"
+
+unzip@^0.1.11:
+ version "0.1.11"
+ resolved "https://registry.yarnpkg.com/unzip/-/unzip-0.1.11.tgz#89749c63b058d7d90d619f86b98aa1535d3b97f0"
+ dependencies:
+ binary ">= 0.3.0 < 1"
+ fstream ">= 0.1.30 < 1"
+ match-stream ">= 0.0.2 < 1"
+ pullstream ">= 0.4.1 < 1"
+ readable-stream "~1.0.31"
+ setimmediate ">= 1.0.1 < 2"
+
+upath@^1.0.0:
+ version "1.0.4"
+ resolved "https://registry.yarnpkg.com/upath/-/upath-1.0.4.tgz#ee2321ba0a786c50973db043a50b7bcba822361d"
+
+urix@^0.1.0:
+ version "0.1.0"
+ resolved "https://registry.yarnpkg.com/urix/-/urix-0.1.0.tgz#da937f7a62e21fec1fd18d49b35c2935067a6c72"
+
+url-loader@^0.6.2:
+ version "0.6.2"
+ resolved "https://registry.yarnpkg.com/url-loader/-/url-loader-0.6.2.tgz#a007a7109620e9d988d14bce677a1decb9a993f7"
+ dependencies:
+ loader-utils "^1.0.2"
+ mime "^1.4.1"
+ schema-utils "^0.3.0"
+
+url-parse-lax@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/url-parse-lax/-/url-parse-lax-1.0.0.tgz#7af8f303645e9bd79a272e7a14ac68bc0609da73"
+ dependencies:
+ prepend-http "^1.0.1"
+
+url-parse@1.0.x:
+ version "1.0.5"
+ resolved "https://registry.yarnpkg.com/url-parse/-/url-parse-1.0.5.tgz#0854860422afdcfefeb6c965c662d4800169927b"
+ dependencies:
+ querystringify "0.0.x"
+ requires-port "1.0.x"
+
+url-parse@^1.1.8:
+ version "1.2.0"
+ resolved "https://registry.yarnpkg.com/url-parse/-/url-parse-1.2.0.tgz#3a19e8aaa6d023ddd27dcc44cb4fc8f7fec23986"
+ dependencies:
+ querystringify "~1.0.0"
+ requires-port "~1.0.0"
+
+url-to-options@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/url-to-options/-/url-to-options-1.0.1.tgz#1505a03a289a48cbd7a434efbaeec5055f5633a9"
+
+url@^0.11.0:
+ version "0.11.0"
+ resolved "https://registry.yarnpkg.com/url/-/url-0.11.0.tgz#3838e97cfc60521eb73c525a8e55bfdd9e2e28f1"
+ dependencies:
+ punycode "1.3.2"
+ querystring "0.2.0"
+
+use@^2.0.0:
+ version "2.0.2"
+ resolved "https://registry.yarnpkg.com/use/-/use-2.0.2.tgz#ae28a0d72f93bf22422a18a2e379993112dec8e8"
+ dependencies:
+ define-property "^0.2.5"
+ isobject "^3.0.0"
+ lazy-cache "^2.0.2"
+
+util-deprecate@~1.0.1:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf"
+
+util@0.10.3, util@^0.10.3:
+ version "0.10.3"
+ resolved "https://registry.yarnpkg.com/util/-/util-0.10.3.tgz#7afb1afe50805246489e3db7fe0ed379336ac0f9"
+ dependencies:
+ inherits "2.0.1"
+
+utils-merge@1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713"
+
+uuid@^3.0.0, uuid@^3.0.1:
+ version "3.2.1"
+ resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.2.1.tgz#12c528bb9d58d0b9265d9a2f6f0fe8be17ff1f14"
+
+validate-npm-package-license@^3.0.1:
+ version "3.0.3"
+ resolved "https://registry.yarnpkg.com/validate-npm-package-license/-/validate-npm-package-license-3.0.3.tgz#81643bcbef1bdfecd4623793dc4648948ba98338"
+ dependencies:
+ spdx-correct "^3.0.0"
+ spdx-expression-parse "^3.0.0"
+
+vary@~1.1.2:
+ version "1.1.2"
+ resolved "https://registry.yarnpkg.com/vary/-/vary-1.1.2.tgz#2299f02c6ded30d4a5961b0b9f74524a18f634fc"
+
+vendors@^1.0.0:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/vendors/-/vendors-1.0.1.tgz#37ad73c8ee417fb3d580e785312307d274847f22"
+
+verror@1.10.0:
+ version "1.10.0"
+ resolved "https://registry.yarnpkg.com/verror/-/verror-1.10.0.tgz#3a105ca17053af55d6e270c1f8288682e18da400"
+ dependencies:
+ assert-plus "^1.0.0"
+ core-util-is "1.0.2"
+ extsprintf "^1.2.0"
+
+vm-browserify@0.0.4:
+ version "0.0.4"
+ resolved "https://registry.yarnpkg.com/vm-browserify/-/vm-browserify-0.0.4.tgz#5d7ea45bbef9e4a6ff65f95438e0a87c357d5a73"
+ dependencies:
+ indexof "0.0.1"
+
+warning@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/warning/-/warning-3.0.0.tgz#32e5377cb572de4ab04753bdf8821c01ed605b7c"
+ dependencies:
+ loose-envify "^1.0.0"
+
+watchpack@^1.4.0:
+ version "1.5.0"
+ resolved "https://registry.yarnpkg.com/watchpack/-/watchpack-1.5.0.tgz#231e783af830a22f8966f65c4c4bacc814072eed"
+ dependencies:
+ chokidar "^2.0.2"
+ graceful-fs "^4.1.2"
+ neo-async "^2.5.0"
+
+wbuf@^1.1.0, wbuf@^1.7.2:
+ version "1.7.2"
+ resolved "https://registry.yarnpkg.com/wbuf/-/wbuf-1.7.2.tgz#d697b99f1f59512df2751be42769c1580b5801fe"
+ dependencies:
+ minimalistic-assert "^1.0.0"
+
+webpack-dev-middleware@1.12.2:
+ version "1.12.2"
+ resolved "https://registry.yarnpkg.com/webpack-dev-middleware/-/webpack-dev-middleware-1.12.2.tgz#f8fc1120ce3b4fc5680ceecb43d777966b21105e"
+ dependencies:
+ memory-fs "~0.4.1"
+ mime "^1.5.0"
+ path-is-absolute "^1.0.0"
+ range-parser "^1.0.3"
+ time-stamp "^2.0.0"
+
+webpack-dev-server@^2.11.1:
+ version "2.11.2"
+ resolved "https://registry.yarnpkg.com/webpack-dev-server/-/webpack-dev-server-2.11.2.tgz#1f4f4c78bf1895378f376815910812daf79a216f"
+ dependencies:
+ ansi-html "0.0.7"
+ array-includes "^3.0.3"
+ bonjour "^3.5.0"
+ chokidar "^2.0.0"
+ compression "^1.5.2"
+ connect-history-api-fallback "^1.3.0"
+ debug "^3.1.0"
+ del "^3.0.0"
+ express "^4.16.2"
+ html-entities "^1.2.0"
+ http-proxy-middleware "~0.17.4"
+ import-local "^1.0.0"
+ internal-ip "1.2.0"
+ ip "^1.1.5"
+ killable "^1.0.0"
+ loglevel "^1.4.1"
+ opn "^5.1.0"
+ portfinder "^1.0.9"
+ selfsigned "^1.9.1"
+ serve-index "^1.7.2"
+ sockjs "0.3.19"
+ sockjs-client "1.1.4"
+ spdy "^3.4.1"
+ strip-ansi "^3.0.0"
+ supports-color "^5.1.0"
+ webpack-dev-middleware "1.12.2"
+ yargs "6.6.0"
+
+webpack-sources@^1.0.1:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/webpack-sources/-/webpack-sources-1.1.0.tgz#a101ebae59d6507354d71d8013950a3a8b7a5a54"
+ dependencies:
+ source-list-map "^2.0.0"
+ source-map "~0.6.1"
+
+webpack@^3.10.0:
+ version "3.11.0"
+ resolved "https://registry.yarnpkg.com/webpack/-/webpack-3.11.0.tgz#77da451b1d7b4b117adaf41a1a93b5742f24d894"
+ dependencies:
+ acorn "^5.0.0"
+ acorn-dynamic-import "^2.0.0"
+ ajv "^6.1.0"
+ ajv-keywords "^3.1.0"
+ async "^2.1.2"
+ enhanced-resolve "^3.4.0"
+ escope "^3.6.0"
+ interpret "^1.0.0"
+ json-loader "^0.5.4"
+ json5 "^0.5.1"
+ loader-runner "^2.3.0"
+ loader-utils "^1.1.0"
+ memory-fs "~0.4.1"
+ mkdirp "~0.5.0"
+ node-libs-browser "^2.0.0"
+ source-map "^0.5.3"
+ supports-color "^4.2.1"
+ tapable "^0.2.7"
+ uglifyjs-webpack-plugin "^0.4.6"
+ watchpack "^1.4.0"
+ webpack-sources "^1.0.1"
+ yargs "^8.0.2"
+
+websocket-driver@>=0.5.1:
+ version "0.7.0"
+ resolved "https://registry.yarnpkg.com/websocket-driver/-/websocket-driver-0.7.0.tgz#0caf9d2d755d93aee049d4bdd0d3fe2cca2a24eb"
+ dependencies:
+ http-parser-js ">=0.4.0"
+ websocket-extensions ">=0.1.1"
+
+websocket-extensions@>=0.1.1:
+ version "0.1.3"
+ resolved "https://registry.yarnpkg.com/websocket-extensions/-/websocket-extensions-0.1.3.tgz#5d2ff22977003ec687a4b87073dfbbac146ccf29"
+
+whatwg-fetch@>=0.10.0:
+ version "2.0.3"
+ resolved "https://registry.yarnpkg.com/whatwg-fetch/-/whatwg-fetch-2.0.3.tgz#9c84ec2dcf68187ff00bc64e1274b442176e1c84"
+
+whet.extend@~0.9.9:
+ version "0.9.9"
+ resolved "https://registry.yarnpkg.com/whet.extend/-/whet.extend-0.9.9.tgz#f877d5bf648c97e5aa542fadc16d6a259b9c11a1"
+
+which-module@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/which-module/-/which-module-1.0.0.tgz#bba63ca861948994ff307736089e3b96026c2a4f"
+
+which-module@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/which-module/-/which-module-2.0.0.tgz#d9ef07dce77b9902b8a3a8fa4b31c3e3f7e6e87a"
+
+which@^1.2.9, which@^1.3.0:
+ version "1.3.0"
+ resolved "https://registry.yarnpkg.com/which/-/which-1.3.0.tgz#ff04bdfc010ee547d780bec38e1ac1c2777d253a"
+ dependencies:
+ isexe "^2.0.0"
+
+wide-align@^1.1.0:
+ version "1.1.2"
+ resolved "https://registry.yarnpkg.com/wide-align/-/wide-align-1.1.2.tgz#571e0f1b0604636ebc0dfc21b0339bbe31341710"
+ dependencies:
+ string-width "^1.0.2"
+
+window-size@0.1.0:
+ version "0.1.0"
+ resolved "https://registry.yarnpkg.com/window-size/-/window-size-0.1.0.tgz#5438cd2ea93b202efa3a19fe8887aee7c94f9c9d"
+
+window-size@^0.2.0:
+ version "0.2.0"
+ resolved "https://registry.yarnpkg.com/window-size/-/window-size-0.2.0.tgz#b4315bb4214a3d7058ebeee892e13fa24d98b075"
+
+wordwrap@0.0.2:
+ version "0.0.2"
+ resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-0.0.2.tgz#b79669bb42ecb409f83d583cad52ca17eaa1643f"
+
+wordwrap@~1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-1.0.0.tgz#27584810891456a4171c8d0226441ade90cbcaeb"
+
+wrap-ansi@^2.0.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-2.1.0.tgz#d8fc3d284dd05794fe84973caecdd1cf824fdd85"
+ dependencies:
+ string-width "^1.0.1"
+ strip-ansi "^3.0.1"
+
+wrappy@1:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f"
+
+write@^0.2.1:
+ version "0.2.1"
+ resolved "https://registry.yarnpkg.com/write/-/write-0.2.1.tgz#5fc03828e264cea3fe91455476f7a3c566cb0757"
+ dependencies:
+ mkdirp "^0.5.1"
+
+xtend@^4.0.0:
+ version "4.0.1"
+ resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.1.tgz#a5c6d532be656e23db820efb943a1f04998d63af"
+
+y18n@^3.2.1:
+ version "3.2.1"
+ resolved "https://registry.yarnpkg.com/y18n/-/y18n-3.2.1.tgz#6d15fba884c08679c0d77e88e7759e811e07fa41"
+
+yallist@^2.1.2:
+ version "2.1.2"
+ resolved "https://registry.yarnpkg.com/yallist/-/yallist-2.1.2.tgz#1c11f9218f076089a47dd512f93c6699a6a81d52"
+
+yargs-parser@^2.4.1:
+ version "2.4.1"
+ resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-2.4.1.tgz#85568de3cf150ff49fa51825f03a8c880ddcc5c4"
+ dependencies:
+ camelcase "^3.0.0"
+ lodash.assign "^4.0.6"
+
+yargs-parser@^4.2.0:
+ version "4.2.1"
+ resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-4.2.1.tgz#29cceac0dc4f03c6c87b4a9f217dd18c9f74871c"
+ dependencies:
+ camelcase "^3.0.0"
+
+yargs-parser@^7.0.0:
+ version "7.0.0"
+ resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-7.0.0.tgz#8d0ac42f16ea55debd332caf4c4038b3e3f5dfd9"
+ dependencies:
+ camelcase "^4.1.0"
+
+yargs@6.6.0:
+ version "6.6.0"
+ resolved "https://registry.yarnpkg.com/yargs/-/yargs-6.6.0.tgz#782ec21ef403345f830a808ca3d513af56065208"
+ dependencies:
+ camelcase "^3.0.0"
+ cliui "^3.2.0"
+ decamelize "^1.1.1"
+ get-caller-file "^1.0.1"
+ os-locale "^1.4.0"
+ read-pkg-up "^1.0.1"
+ require-directory "^2.1.1"
+ require-main-filename "^1.0.1"
+ set-blocking "^2.0.0"
+ string-width "^1.0.2"
+ which-module "^1.0.0"
+ y18n "^3.2.1"
+ yargs-parser "^4.2.0"
+
+yargs@^4.2.0:
+ version "4.8.1"
+ resolved "https://registry.yarnpkg.com/yargs/-/yargs-4.8.1.tgz#c0c42924ca4aaa6b0e6da1739dfb216439f9ddc0"
+ dependencies:
+ cliui "^3.2.0"
+ decamelize "^1.1.1"
+ get-caller-file "^1.0.1"
+ lodash.assign "^4.0.3"
+ os-locale "^1.4.0"
+ read-pkg-up "^1.0.1"
+ require-directory "^2.1.1"
+ require-main-filename "^1.0.1"
+ set-blocking "^2.0.0"
+ string-width "^1.0.1"
+ which-module "^1.0.0"
+ window-size "^0.2.0"
+ y18n "^3.2.1"
+ yargs-parser "^2.4.1"
+
+yargs@^8.0.2:
+ version "8.0.2"
+ resolved "https://registry.yarnpkg.com/yargs/-/yargs-8.0.2.tgz#6299a9055b1cefc969ff7e79c1d918dceb22c360"
+ dependencies:
+ camelcase "^4.1.0"
+ cliui "^3.2.0"
+ decamelize "^1.1.1"
+ get-caller-file "^1.0.1"
+ os-locale "^2.0.0"
+ read-pkg-up "^2.0.0"
+ require-directory "^2.1.1"
+ require-main-filename "^1.0.1"
+ set-blocking "^2.0.0"
+ string-width "^2.0.0"
+ which-module "^2.0.0"
+ y18n "^3.2.1"
+ yargs-parser "^7.0.0"
+
+yargs@~3.10.0:
+ version "3.10.0"
+ resolved "https://registry.yarnpkg.com/yargs/-/yargs-3.10.0.tgz#f7ee7bd857dd7c1d2d38c0e74efbd681d1431fd1"
+ dependencies:
+ camelcase "^1.0.2"
+ cliui "^2.1.0"
+ decamelize "^1.0.0"
+ window-size "0.1.0"
diff --git a/dashboard/config.go b/dashboard/config.go
index 604a5f2c9..c260ed4f0 100644
--- a/dashboard/config.go
+++ b/dashboard/config.go
@@ -38,8 +38,4 @@ type Config struct {
// Refresh is the refresh rate of the data updates, the chartEntry will be collected this often.
Refresh time.Duration `toml:",omitempty"`
-
- // Assets offers a possibility to manually set the dashboard website's location on the server side.
- // It is useful for debugging, avoids the repeated generation of the binary.
- Assets string `toml:",omitempty"`
}
diff --git a/dashboard/dashboard.go b/dashboard/dashboard.go
index 09038638e..399fa34c0 100644
--- a/dashboard/dashboard.go
+++ b/dashboard/dashboard.go
@@ -16,19 +16,17 @@
package dashboard
-//go:generate npm --prefix ./assets install
-//go:generate ./assets/node_modules/.bin/webpack --config ./assets/webpack.config.js --context ./assets
-//go:generate go-bindata -nometadata -o assets.go -prefix assets -nocompress -pkg dashboard assets/dashboard.html assets/bundle.js
+//go:generate yarn --cwd ./assets install
+//go:generate yarn --cwd ./assets build
+//go:generate go-bindata -nometadata -o assets.go -prefix assets -nocompress -pkg dashboard assets/index.html assets/bundle.js
//go:generate sh -c "sed 's#var _bundleJs#//nolint:misspell\\\n&#' assets.go > assets.go.tmp && mv assets.go.tmp assets.go"
-//go:generate sh -c "sed 's#var _dashboardHtml#//nolint:misspell\\\n&#' assets.go > assets.go.tmp && mv assets.go.tmp assets.go"
+//go:generate sh -c "sed 's#var _indexHtml#//nolint:misspell\\\n&#' assets.go > assets.go.tmp && mv assets.go.tmp assets.go"
//go:generate gofmt -w -s assets.go
import (
"fmt"
- "io/ioutil"
"net"
"net/http"
- "path/filepath"
"runtime"
"sync"
"sync/atomic"
@@ -36,10 +34,10 @@ import (
"github.com/elastic/gosigar"
"github.com/ethereum/go-ethereum/log"
+ "github.com/ethereum/go-ethereum/metrics"
"github.com/ethereum/go-ethereum/p2p"
"github.com/ethereum/go-ethereum/params"
"github.com/ethereum/go-ethereum/rpc"
- "github.com/rcrowley/go-metrics"
"golang.org/x/net/websocket"
)
@@ -62,7 +60,7 @@ type Dashboard struct {
listener net.Listener
conns map[uint32]*client // Currently live websocket connections
- charts *HomeMessage
+ charts *SystemMessage
commit string
lock sync.RWMutex // Lock protecting the dashboard's internals
@@ -84,7 +82,7 @@ func New(config *Config, commit string) (*Dashboard, error) {
conns: make(map[uint32]*client),
config: config,
quit: make(chan chan error),
- charts: &HomeMessage{
+ charts: &SystemMessage{
ActiveMemory: emptyChartEntries(now, activeMemorySampleLimit, config.Refresh),
VirtualMemory: emptyChartEntries(now, virtualMemorySampleLimit, config.Refresh),
NetworkIngress: emptyChartEntries(now, networkIngressSampleLimit, config.Refresh),
@@ -180,18 +178,7 @@ func (db *Dashboard) webHandler(w http.ResponseWriter, r *http.Request) {
path := r.URL.String()
if path == "/" {
- path = "/dashboard.html"
- }
- // If the path of the assets is manually set
- if db.config.Assets != "" {
- blob, err := ioutil.ReadFile(filepath.Join(db.config.Assets, path))
- if err != nil {
- log.Warn("Failed to read file", "path", path, "err", err)
- http.Error(w, "not found", http.StatusNotFound)
- return
- }
- w.Write(blob)
- return
+ path = "/index.html"
}
blob, err := Asset(path[1:])
if err != nil {
@@ -241,7 +228,7 @@ func (db *Dashboard) apiHandler(conn *websocket.Conn) {
Version: fmt.Sprintf("v%d.%d.%d%s", params.VersionMajor, params.VersionMinor, params.VersionPatch, versionMeta),
Commit: db.commit,
},
- Home: &HomeMessage{
+ System: &SystemMessage{
ActiveMemory: db.charts.ActiveMemory,
VirtualMemory: db.charts.VirtualMemory,
NetworkIngress: db.charts.NetworkIngress,
@@ -277,12 +264,14 @@ func (db *Dashboard) collectData() {
systemCPUUsage := gosigar.Cpu{}
systemCPUUsage.Get()
var (
+ mem runtime.MemStats
+
prevNetworkIngress = metrics.DefaultRegistry.Get("p2p/InboundTraffic").(metrics.Meter).Count()
prevNetworkEgress = metrics.DefaultRegistry.Get("p2p/OutboundTraffic").(metrics.Meter).Count()
prevProcessCPUTime = getProcessCPUTime()
prevSystemCPUUsage = systemCPUUsage
- prevDiskRead = metrics.DefaultRegistry.Get("eth/db/chaindata/compact/input").(metrics.Meter).Count()
- prevDiskWrite = metrics.DefaultRegistry.Get("eth/db/chaindata/compact/output").(metrics.Meter).Count()
+ prevDiskRead = metrics.DefaultRegistry.Get("eth/db/chaindata/disk/read").(metrics.Meter).Count()
+ prevDiskWrite = metrics.DefaultRegistry.Get("eth/db/chaindata/disk/write").(metrics.Meter).Count()
frequency = float64(db.config.Refresh / time.Second)
numCPU = float64(runtime.NumCPU())
@@ -300,13 +289,13 @@ func (db *Dashboard) collectData() {
curNetworkEgress = metrics.DefaultRegistry.Get("p2p/OutboundTraffic").(metrics.Meter).Count()
curProcessCPUTime = getProcessCPUTime()
curSystemCPUUsage = systemCPUUsage
- curDiskRead = metrics.DefaultRegistry.Get("eth/db/chaindata/compact/input").(metrics.Meter).Count()
- curDiskWrite = metrics.DefaultRegistry.Get("eth/db/chaindata/compact/output").(metrics.Meter).Count()
+ curDiskRead = metrics.DefaultRegistry.Get("eth/db/chaindata/disk/read").(metrics.Meter).Count()
+ curDiskWrite = metrics.DefaultRegistry.Get("eth/db/chaindata/disk/write").(metrics.Meter).Count()
deltaNetworkIngress = float64(curNetworkIngress - prevNetworkIngress)
deltaNetworkEgress = float64(curNetworkEgress - prevNetworkEgress)
deltaProcessCPUTime = curProcessCPUTime - prevProcessCPUTime
- deltaSystemCPUUsage = systemCPUUsage.Delta(prevSystemCPUUsage)
+ deltaSystemCPUUsage = curSystemCPUUsage.Delta(prevSystemCPUUsage)
deltaDiskRead = curDiskRead - prevDiskRead
deltaDiskWrite = curDiskWrite - prevDiskWrite
)
@@ -319,7 +308,6 @@ func (db *Dashboard) collectData() {
now := time.Now()
- var mem runtime.MemStats
runtime.ReadMemStats(&mem)
activeMemory := &ChartEntry{
Time: now,
@@ -363,7 +351,7 @@ func (db *Dashboard) collectData() {
db.charts.DiskWrite = append(db.charts.DiskRead[1:], diskWrite)
db.sendToAll(&Message{
- Home: &HomeMessage{
+ System: &SystemMessage{
ActiveMemory: ChartEntries{activeMemory},
VirtualMemory: ChartEntries{virtualMemory},
NetworkIngress: ChartEntries{networkIngress},
diff --git a/dashboard/message.go b/dashboard/message.go
index f0d4280e8..84b8d7443 100644
--- a/dashboard/message.go
+++ b/dashboard/message.go
@@ -28,27 +28,20 @@ type Message struct {
Logs *LogsMessage `json:"logs,omitempty"`
}
+type ChartEntries []*ChartEntry
+
+type ChartEntry struct {
+ Time time.Time `json:"time,omitempty"`
+ Value float64 `json:"value,omitempty"`
+}
+
type GeneralMessage struct {
Version string `json:"version,omitempty"`
Commit string `json:"commit,omitempty"`
}
type HomeMessage struct {
- ActiveMemory ChartEntries `json:"activeMemory,omitempty"`
- VirtualMemory ChartEntries `json:"virtualMemory,omitempty"`
- NetworkIngress ChartEntries `json:"networkIngress,omitempty"`
- NetworkEgress ChartEntries `json:"networkEgress,omitempty"`
- ProcessCPU ChartEntries `json:"processCPU,omitempty"`
- SystemCPU ChartEntries `json:"systemCPU,omitempty"`
- DiskRead ChartEntries `json:"diskRead,omitempty"`
- DiskWrite ChartEntries `json:"diskWrite,omitempty"`
-}
-
-type ChartEntries []*ChartEntry
-
-type ChartEntry struct {
- Time time.Time `json:"time,omitempty"`
- Value float64 `json:"value,omitempty"`
+ /* TODO (kurkomisi) */
}
type ChainMessage struct {
@@ -64,7 +57,14 @@ type NetworkMessage struct {
}
type SystemMessage struct {
- /* TODO (kurkomisi) */
+ ActiveMemory ChartEntries `json:"activeMemory,omitempty"`
+ VirtualMemory ChartEntries `json:"virtualMemory,omitempty"`
+ NetworkIngress ChartEntries `json:"networkIngress,omitempty"`
+ NetworkEgress ChartEntries `json:"networkEgress,omitempty"`
+ ProcessCPU ChartEntries `json:"processCPU,omitempty"`
+ SystemCPU ChartEntries `json:"systemCPU,omitempty"`
+ DiskRead ChartEntries `json:"diskRead,omitempty"`
+ DiskWrite ChartEntries `json:"diskWrite,omitempty"`
}
type LogsMessage struct {
diff --git a/eth/api.go b/eth/api.go
index 0db3eb554..a345b57e4 100644
--- a/eth/api.go
+++ b/eth/api.go
@@ -462,11 +462,11 @@ func (api *PrivateDebugAPI) getModifiedAccounts(startBlock, endBlock *types.Bloc
return nil, fmt.Errorf("start block height (%d) must be less than end block height (%d)", startBlock.Number().Uint64(), endBlock.Number().Uint64())
}
- oldTrie, err := trie.NewSecure(startBlock.Root(), api.eth.chainDb, 0)
+ oldTrie, err := trie.NewSecure(startBlock.Root(), trie.NewDatabase(api.eth.chainDb), 0)
if err != nil {
return nil, err
}
- newTrie, err := trie.NewSecure(endBlock.Root(), api.eth.chainDb, 0)
+ newTrie, err := trie.NewSecure(endBlock.Root(), trie.NewDatabase(api.eth.chainDb), 0)
if err != nil {
return nil, err
}
diff --git a/eth/api_backend.go b/eth/api_backend.go
index 91f392f94..ecd5488a2 100644
--- a/eth/api_backend.go
+++ b/eth/api_backend.go
@@ -104,6 +104,18 @@ func (b *EthApiBackend) GetReceipts(ctx context.Context, blockHash common.Hash)
return core.GetBlockReceipts(b.eth.chainDb, blockHash, core.GetBlockNumber(b.eth.chainDb, blockHash)), nil
}
+func (b *EthApiBackend) GetLogs(ctx context.Context, blockHash common.Hash) ([][]*types.Log, error) {
+ receipts := core.GetBlockReceipts(b.eth.chainDb, blockHash, core.GetBlockNumber(b.eth.chainDb, blockHash))
+ if receipts == nil {
+ return nil, nil
+ }
+ logs := make([][]*types.Log, len(receipts))
+ for i, receipt := range receipts {
+ logs[i] = receipt.Logs
+ }
+ return logs, nil
+}
+
func (b *EthApiBackend) GetTd(blockHash common.Hash) *big.Int {
return b.eth.blockchain.GetTdByHash(blockHash)
}
diff --git a/eth/api_test.go b/eth/api_test.go
index 248bc3ab6..900a82bb6 100644
--- a/eth/api_test.go
+++ b/eth/api_test.go
@@ -1,4 +1,4 @@
-// Copyright 2016 The go-ethereum Authors
+// Copyright 2017 The go-ethereum Authors
// This file is part of the go-ethereum library.
//
// The go-ethereum library is free software: you can redistribute it and/or modify
diff --git a/eth/api_tracer.go b/eth/api_tracer.go
index d49f077ae..07c4457bc 100644
--- a/eth/api_tracer.go
+++ b/eth/api_tracer.go
@@ -24,7 +24,6 @@ import (
"io/ioutil"
"runtime"
"sync"
- "sync/atomic"
"time"
"github.com/ethereum/go-ethereum/common"
@@ -34,7 +33,6 @@ import (
"github.com/ethereum/go-ethereum/core/types"
"github.com/ethereum/go-ethereum/core/vm"
"github.com/ethereum/go-ethereum/eth/tracers"
- "github.com/ethereum/go-ethereum/ethdb"
"github.com/ethereum/go-ethereum/internal/ethapi"
"github.com/ethereum/go-ethereum/log"
"github.com/ethereum/go-ethereum/rlp"
@@ -72,6 +70,7 @@ type txTraceResult struct {
type blockTraceTask struct {
statedb *state.StateDB // Intermediate state prepped for tracing
block *types.Block // Block to trace the transactions from
+ rootref common.Hash // Trie root reference held for this task
results []*txTraceResult // Trace results procudes by the task
}
@@ -90,59 +89,6 @@ type txTraceTask struct {
index int // Transaction offset in the block
}
-// ephemeralDatabase is a memory wrapper around a proper database, which acts as
-// an ephemeral write layer. This construct is used by the chain tracer to write
-// state tries for intermediate blocks without serializing to disk, but at the
-// same time to allow disk fallback for reads that do no hit the memory layer.
-type ephemeralDatabase struct {
- diskdb ethdb.Database // Persistent disk database to fall back to with reads
- memdb *ethdb.MemDatabase // Ephemeral memory database for primary reads and writes
-}
-
-func (db *ephemeralDatabase) Put(key []byte, value []byte) error { return db.memdb.Put(key, value) }
-func (db *ephemeralDatabase) Delete(key []byte) error { return errors.New("delete not supported") }
-func (db *ephemeralDatabase) Close() { db.memdb.Close() }
-func (db *ephemeralDatabase) NewBatch() ethdb.Batch {
- return db.memdb.NewBatch()
-}
-func (db *ephemeralDatabase) Has(key []byte) (bool, error) {
- if has, _ := db.memdb.Has(key); has {
- return has, nil
- }
- return db.diskdb.Has(key)
-}
-func (db *ephemeralDatabase) Get(key []byte) ([]byte, error) {
- if blob, _ := db.memdb.Get(key); blob != nil {
- return blob, nil
- }
- return db.diskdb.Get(key)
-}
-
-// Prune does a state sync into a new memory write layer and replaces the old one.
-// This allows us to discard entries that are no longer referenced from the current
-// state.
-func (db *ephemeralDatabase) Prune(root common.Hash) {
- // Pull the still relevant state data into memory
- sync := state.NewStateSync(root, db.diskdb)
- for sync.Pending() > 0 {
- hash := sync.Missing(1)[0]
-
- // Move the next trie node from the memory layer into a sync struct
- node, err := db.memdb.Get(hash[:])
- if err != nil {
- panic(err) // memdb must have the data
- }
- if _, _, err := sync.Process([]trie.SyncResult{{Hash: hash, Data: node}}); err != nil {
- panic(err) // it's not possible to fail processing a node
- }
- }
- // Discard the old memory layer and write a new one
- db.memdb, _ = ethdb.NewMemDatabaseWithCap(db.memdb.Len())
- if _, err := sync.Commit(db); err != nil {
- panic(err) // writing into a memdb cannot fail
- }
-}
-
// TraceChain returns the structured logs created during the execution of EVM
// between two blocks (excluding start) and returns them as a JSON object.
func (api *PrivateDebugAPI) TraceChain(ctx context.Context, start, end rpc.BlockNumber, config *TraceConfig) (*rpc.Subscription, error) {
@@ -188,19 +134,15 @@ func (api *PrivateDebugAPI) traceChain(ctx context.Context, start, end *types.Bl
// Ensure we have a valid starting state before doing any work
origin := start.NumberU64()
+ database := state.NewDatabase(api.eth.ChainDb())
- memdb, _ := ethdb.NewMemDatabase()
- db := &ephemeralDatabase{
- diskdb: api.eth.ChainDb(),
- memdb: memdb,
- }
if number := start.NumberU64(); number > 0 {
start = api.eth.blockchain.GetBlock(start.ParentHash(), start.NumberU64()-1)
if start == nil {
return nil, fmt.Errorf("parent block #%d not found", number-1)
}
}
- statedb, err := state.New(start.Root(), state.NewDatabase(db))
+ statedb, err := state.New(start.Root(), database)
if err != nil {
// If the starting state is missing, allow some number of blocks to be reexecuted
reexec := defaultTraceReexec
@@ -213,7 +155,7 @@ func (api *PrivateDebugAPI) traceChain(ctx context.Context, start, end *types.Bl
if start == nil {
break
}
- if statedb, err = state.New(start.Root(), state.NewDatabase(db)); err == nil {
+ if statedb, err = state.New(start.Root(), database); err == nil {
break
}
}
@@ -256,7 +198,7 @@ func (api *PrivateDebugAPI) traceChain(ctx context.Context, start, end *types.Bl
res, err := api.traceTx(ctx, msg, vmctx, task.statedb, config)
if err != nil {
task.results[i] = &txTraceResult{Error: err.Error()}
- log.Warn("Tracing failed", "err", err)
+ log.Warn("Tracing failed", "hash", tx.Hash(), "block", task.block.NumberU64(), "err", err)
break
}
task.statedb.DeleteSuicides()
@@ -273,7 +215,6 @@ func (api *PrivateDebugAPI) traceChain(ctx context.Context, start, end *types.Bl
}
// Start a goroutine to feed all the blocks into the tracers
begin := time.Now()
- complete := start.NumberU64()
go func() {
var (
@@ -281,6 +222,7 @@ func (api *PrivateDebugAPI) traceChain(ctx context.Context, start, end *types.Bl
number uint64
traced uint64
failed error
+ proot common.Hash
)
// Ensure everything is properly cleaned up on any exit path
defer func() {
@@ -308,7 +250,7 @@ func (api *PrivateDebugAPI) traceChain(ctx context.Context, start, end *types.Bl
// Print progress logs if long enough time elapsed
if time.Since(logged) > 8*time.Second {
if number > origin {
- log.Info("Tracing chain segment", "start", origin, "end", end.NumberU64(), "current", number, "transactions", traced, "elapsed", time.Since(begin))
+ log.Info("Tracing chain segment", "start", origin, "end", end.NumberU64(), "current", number, "transactions", traced, "elapsed", time.Since(begin), "memory", database.TrieDB().Size())
} else {
log.Info("Preparing state for chain trace", "block", number, "start", origin, "elapsed", time.Since(begin))
}
@@ -325,13 +267,11 @@ func (api *PrivateDebugAPI) traceChain(ctx context.Context, start, end *types.Bl
txs := block.Transactions()
select {
- case tasks <- &blockTraceTask{statedb: statedb.Copy(), block: block, results: make([]*txTraceResult, len(txs))}:
+ case tasks <- &blockTraceTask{statedb: statedb.Copy(), block: block, rootref: proot, results: make([]*txTraceResult, len(txs))}:
case <-notifier.Closed():
return
}
traced += uint64(len(txs))
- } else {
- atomic.StoreUint64(&complete, number)
}
// Generate the next state snapshot fast without tracing
_, _, _, err := api.eth.blockchain.Processor().Process(block, statedb, vm.Config{})
@@ -340,7 +280,7 @@ func (api *PrivateDebugAPI) traceChain(ctx context.Context, start, end *types.Bl
break
}
// Finalize the state so any modifications are written to the trie
- root, err := statedb.CommitTo(db, true)
+ root, err := statedb.Commit(true)
if err != nil {
failed = err
break
@@ -349,26 +289,14 @@ func (api *PrivateDebugAPI) traceChain(ctx context.Context, start, end *types.Bl
failed = err
break
}
- // After every N blocks, prune the database to only retain relevant data
- if (number-start.NumberU64())%4096 == 0 {
- // Wait until currently pending trace jobs finish
- for atomic.LoadUint64(&complete) != number {
- select {
- case <-time.After(100 * time.Millisecond):
- case <-notifier.Closed():
- return
- }
- }
- // No more concurrent access at this point, prune the database
- var (
- nodes = db.memdb.Len()
- start = time.Now()
- )
- db.Prune(root)
- log.Info("Pruned tracer state entries", "deleted", nodes-db.memdb.Len(), "left", db.memdb.Len(), "elapsed", time.Since(start))
-
- statedb, _ = state.New(root, state.NewDatabase(db))
+ // Reference the trie twice, once for us, once for the trancer
+ database.TrieDB().Reference(root, common.Hash{})
+ if number >= origin {
+ database.TrieDB().Reference(root, common.Hash{})
}
+ // Dereference all past tries we ourselves are done working with
+ database.TrieDB().Dereference(proot, common.Hash{})
+ proot = root
}
}()
@@ -387,12 +315,14 @@ func (api *PrivateDebugAPI) traceChain(ctx context.Context, start, end *types.Bl
}
done[uint64(result.Block)] = result
+ // Dereference any paret tries held in memory by this task
+ database.TrieDB().Dereference(res.rootref, common.Hash{})
+
// Stream completed traces to the user, aborting on the first error
for result, ok := done[next]; ok; result, ok = done[next] {
if len(result.Traces) > 0 || next == end.NumberU64() {
notifier.Notify(sub.ID, result)
}
- atomic.StoreUint64(&complete, next)
delete(done, next)
next++
}
@@ -544,18 +474,14 @@ func (api *PrivateDebugAPI) computeStateDB(block *types.Block, reexec uint64) (*
}
// Otherwise try to reexec blocks until we find a state or reach our limit
origin := block.NumberU64()
+ database := state.NewDatabase(api.eth.ChainDb())
- memdb, _ := ethdb.NewMemDatabase()
- db := &ephemeralDatabase{
- diskdb: api.eth.ChainDb(),
- memdb: memdb,
- }
for i := uint64(0); i < reexec; i++ {
block = api.eth.blockchain.GetBlock(block.ParentHash(), block.NumberU64()-1)
if block == nil {
break
}
- if statedb, err = state.New(block.Root(), state.NewDatabase(db)); err == nil {
+ if statedb, err = state.New(block.Root(), database); err == nil {
break
}
}
@@ -571,6 +497,7 @@ func (api *PrivateDebugAPI) computeStateDB(block *types.Block, reexec uint64) (*
var (
start = time.Now()
logged time.Time
+ proot common.Hash
)
for block.NumberU64() < origin {
// Print progress logs if long enough time elapsed
@@ -587,26 +514,18 @@ func (api *PrivateDebugAPI) computeStateDB(block *types.Block, reexec uint64) (*
return nil, err
}
// Finalize the state so any modifications are written to the trie
- root, err := statedb.CommitTo(db, true)
+ root, err := statedb.Commit(true)
if err != nil {
return nil, err
}
if err := statedb.Reset(root); err != nil {
return nil, err
}
- // After every N blocks, prune the database to only retain relevant data
- if block.NumberU64()%4096 == 0 || block.NumberU64() == origin {
- var (
- nodes = db.memdb.Len()
- begin = time.Now()
- )
- db.Prune(root)
- log.Info("Pruned tracer state entries", "deleted", nodes-db.memdb.Len(), "left", db.memdb.Len(), "elapsed", time.Since(begin))
-
- statedb, _ = state.New(root, state.NewDatabase(db))
- }
+ database.TrieDB().Reference(root, common.Hash{})
+ database.TrieDB().Dereference(proot, common.Hash{})
+ proot = root
}
- log.Info("Historical state regenerated", "block", block.NumberU64(), "elapsed", time.Since(start))
+ log.Info("Historical state regenerated", "block", block.NumberU64(), "elapsed", time.Since(start), "size", database.TrieDB().Size())
return statedb, nil
}
diff --git a/eth/backend.go b/eth/backend.go
index c39974a2c..94aad2310 100644
--- a/eth/backend.go
+++ b/eth/backend.go
@@ -144,9 +144,11 @@ func New(ctx *node.ServiceContext, config *Config) (*Ethereum, error) {
}
core.WriteBlockChainVersion(chainDb, core.BlockChainVersion)
}
-
- vmConfig := vm.Config{EnablePreimageRecording: config.EnablePreimageRecording}
- eth.blockchain, err = core.NewBlockChain(chainDb, eth.chainConfig, eth.engine, vmConfig)
+ var (
+ vmConfig = vm.Config{EnablePreimageRecording: config.EnablePreimageRecording}
+ cacheConfig = &core.CacheConfig{Disabled: config.NoPruning, TrieNodeLimit: config.TrieCache, TrieTimeLimit: config.TrieTimeout}
+ )
+ eth.blockchain, err = core.NewBlockChain(chainDb, cacheConfig, eth.chainConfig, eth.engine, vmConfig)
if err != nil {
return nil, err
}
@@ -393,10 +395,10 @@ func (s *Ethereum) Start(srvr *p2p.Server) error {
// Figure out a max peers count based on the server limits
maxPeers := srvr.MaxPeers
if s.config.LightServ > 0 {
- maxPeers -= s.config.LightPeers
- if maxPeers < srvr.MaxPeers/2 {
- maxPeers = srvr.MaxPeers / 2
+ if s.config.LightPeers >= srvr.MaxPeers {
+ return fmt.Errorf("invalid peer config: light peer count (%d) >= total peer count (%d)", s.config.LightPeers, srvr.MaxPeers)
}
+ maxPeers -= s.config.LightPeers
}
// Start the networking layer and the light server if requested
s.protocolManager.Start(maxPeers)
diff --git a/eth/bind.go b/eth/bind.go
deleted file mode 100644
index 769a6c741..000000000
--- a/eth/bind.go
+++ /dev/null
@@ -1,136 +0,0 @@
-// Copyright 2015 The go-ethereum Authors
-// This file is part of the go-ethereum library.
-//
-// The go-ethereum library is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Lesser General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-//
-// The go-ethereum library is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU Lesser General Public License for more details.
-//
-// You should have received a copy of the GNU Lesser General Public License
-// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
-
-package eth
-
-import (
- "context"
- "math/big"
-
- "github.com/ethereum/go-ethereum"
- "github.com/ethereum/go-ethereum/common"
- "github.com/ethereum/go-ethereum/common/hexutil"
- "github.com/ethereum/go-ethereum/core/types"
- "github.com/ethereum/go-ethereum/internal/ethapi"
- "github.com/ethereum/go-ethereum/rlp"
- "github.com/ethereum/go-ethereum/rpc"
-)
-
-// ContractBackend implements bind.ContractBackend with direct calls to Ethereum
-// internals to support operating on contracts within subprotocols like eth and
-// swarm.
-//
-// Internally this backend uses the already exposed API endpoints of the Ethereum
-// object. These should be rewritten to internal Go method calls when the Go API
-// is refactored to support a clean library use.
-type ContractBackend struct {
- eapi *ethapi.PublicEthereumAPI // Wrapper around the Ethereum object to access metadata
- bcapi *ethapi.PublicBlockChainAPI // Wrapper around the blockchain to access chain data
- txapi *ethapi.PublicTransactionPoolAPI // Wrapper around the transaction pool to access transaction data
-}
-
-// NewContractBackend creates a new native contract backend using an existing
-// Ethereum object.
-func NewContractBackend(apiBackend ethapi.Backend) *ContractBackend {
- return &ContractBackend{
- eapi: ethapi.NewPublicEthereumAPI(apiBackend),
- bcapi: ethapi.NewPublicBlockChainAPI(apiBackend),
- txapi: ethapi.NewPublicTransactionPoolAPI(apiBackend, new(ethapi.AddrLocker)),
- }
-}
-
-// CodeAt retrieves any code associated with the contract from the local API.
-func (b *ContractBackend) CodeAt(ctx context.Context, contract common.Address, blockNum *big.Int) ([]byte, error) {
- return b.bcapi.GetCode(ctx, contract, toBlockNumber(blockNum))
-}
-
-// CodeAt retrieves any code associated with the contract from the local API.
-func (b *ContractBackend) PendingCodeAt(ctx context.Context, contract common.Address) ([]byte, error) {
- return b.bcapi.GetCode(ctx, contract, rpc.PendingBlockNumber)
-}
-
-// ContractCall implements bind.ContractCaller executing an Ethereum contract
-// call with the specified data as the input. The pending flag requests execution
-// against the pending block, not the stable head of the chain.
-func (b *ContractBackend) CallContract(ctx context.Context, msg ethereum.CallMsg, blockNum *big.Int) ([]byte, error) {
- out, err := b.bcapi.Call(ctx, toCallArgs(msg), toBlockNumber(blockNum))
- return out, err
-}
-
-// ContractCall implements bind.ContractCaller executing an Ethereum contract
-// call with the specified data as the input. The pending flag requests execution
-// against the pending block, not the stable head of the chain.
-func (b *ContractBackend) PendingCallContract(ctx context.Context, msg ethereum.CallMsg) ([]byte, error) {
- out, err := b.bcapi.Call(ctx, toCallArgs(msg), rpc.PendingBlockNumber)
- return out, err
-}
-
-func toCallArgs(msg ethereum.CallMsg) ethapi.CallArgs {
- args := ethapi.CallArgs{
- To: msg.To,
- From: msg.From,
- Data: msg.Data,
- Gas: hexutil.Uint64(msg.Gas),
- }
- if msg.GasPrice != nil {
- args.GasPrice = hexutil.Big(*msg.GasPrice)
- }
- if msg.Value != nil {
- args.Value = hexutil.Big(*msg.Value)
- }
- return args
-}
-
-func toBlockNumber(num *big.Int) rpc.BlockNumber {
- if num == nil {
- return rpc.LatestBlockNumber
- }
- return rpc.BlockNumber(num.Int64())
-}
-
-// PendingAccountNonce implements bind.ContractTransactor retrieving the current
-// pending nonce associated with an account.
-func (b *ContractBackend) PendingNonceAt(ctx context.Context, account common.Address) (nonce uint64, err error) {
- out, err := b.txapi.GetTransactionCount(ctx, account, rpc.PendingBlockNumber)
- if out != nil {
- nonce = uint64(*out)
- }
- return nonce, err
-}
-
-// SuggestGasPrice implements bind.ContractTransactor retrieving the currently
-// suggested gas price to allow a timely execution of a transaction.
-func (b *ContractBackend) SuggestGasPrice(ctx context.Context) (*big.Int, error) {
- return b.eapi.GasPrice(ctx)
-}
-
-// EstimateGasLimit implements bind.ContractTransactor triing to estimate the gas
-// needed to execute a specific transaction based on the current pending state of
-// the backend blockchain. There is no guarantee that this is the true gas limit
-// requirement as other transactions may be added or removed by miners, but it
-// should provide a basis for setting a reasonable default.
-func (b *ContractBackend) EstimateGas(ctx context.Context, msg ethereum.CallMsg) (uint64, error) {
- gas, err := b.bcapi.EstimateGas(ctx, toCallArgs(msg))
- return uint64(gas), err
-}
-
-// SendTransaction implements bind.ContractTransactor injects the transaction
-// into the pending pool for execution.
-func (b *ContractBackend) SendTransaction(ctx context.Context, tx *types.Transaction) error {
- raw, _ := rlp.EncodeToBytes(tx)
- _, err := b.txapi.SendRawTransaction(ctx, raw)
- return err
-}
diff --git a/eth/config.go b/eth/config.go
index 4399560fa..dd7f42c7d 100644
--- a/eth/config.go
+++ b/eth/config.go
@@ -22,6 +22,7 @@ import (
"os/user"
"path/filepath"
"runtime"
+ "time"
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/common/hexutil"
@@ -43,8 +44,10 @@ var DefaultConfig = Config{
DatasetsOnDisk: 2,
},
NetworkId: 1,
- LightPeers: 20,
- DatabaseCache: 128,
+ LightPeers: 100,
+ DatabaseCache: 768,
+ TrieCache: 256,
+ TrieTimeout: 5 * time.Minute,
GasPrice: big.NewInt(18 * params.Shannon),
TxPool: core.DefaultTxPoolConfig,
@@ -78,6 +81,7 @@ type Config struct {
// Protocol options
NetworkId uint64 // Network ID to use for selecting peers to connect to
SyncMode downloader.SyncMode
+ NoPruning bool
// Light client options
LightServ int `toml:",omitempty"` // Maximum percentage of time allowed for serving LES requests
@@ -87,6 +91,8 @@ type Config struct {
SkipBcVersionCheck bool `toml:"-"`
DatabaseHandles int `toml:"-"`
DatabaseCache int
+ TrieCache int
+ TrieTimeout time.Duration
// Mining-related options
Etherbase common.Address `toml:",omitempty"`
diff --git a/eth/downloader/downloader.go b/eth/downloader/downloader.go
index b338129e0..62842adbc 100644
--- a/eth/downloader/downloader.go
+++ b/eth/downloader/downloader.go
@@ -18,10 +18,8 @@
package downloader
import (
- "crypto/rand"
"errors"
"fmt"
- "math"
"math/big"
"sync"
"sync/atomic"
@@ -29,12 +27,13 @@ import (
ethereum "github.com/ethereum/go-ethereum"
"github.com/ethereum/go-ethereum/common"
+ "github.com/ethereum/go-ethereum/core"
"github.com/ethereum/go-ethereum/core/types"
"github.com/ethereum/go-ethereum/ethdb"
"github.com/ethereum/go-ethereum/event"
"github.com/ethereum/go-ethereum/log"
+ "github.com/ethereum/go-ethereum/metrics"
"github.com/ethereum/go-ethereum/params"
- "github.com/rcrowley/go-metrics"
)
var (
@@ -61,12 +60,11 @@ var (
maxHeadersProcess = 2048 // Number of header download results to import at once into the chain
maxResultsProcess = 2048 // Number of content download results to import at once into the chain
- fsHeaderCheckFrequency = 100 // Verification frequency of the downloaded headers during fast sync
- fsHeaderSafetyNet = 2048 // Number of headers to discard in case a chain violation is detected
- fsHeaderForceVerify = 24 // Number of headers to verify before and after the pivot to accept it
- fsPivotInterval = 256 // Number of headers out of which to randomize the pivot point
- fsMinFullBlocks = 64 // Number of blocks to retrieve fully even in fast sync
- fsCriticalTrials = uint32(32) // Number of times to retry in the cricical section before bailing
+ fsHeaderCheckFrequency = 100 // Verification frequency of the downloaded headers during fast sync
+ fsHeaderSafetyNet = 2048 // Number of headers to discard in case a chain violation is detected
+ fsHeaderForceVerify = 24 // Number of headers to verify before and after the pivot to accept it
+ fsHeaderContCheck = 3 * time.Second // Time interval to check for header continuations during state download
+ fsMinFullBlocks = 64 // Number of blocks to retrieve fully even in fast sync
)
var (
@@ -102,9 +100,6 @@ type Downloader struct {
peers *peerSet // Set of active peers from which download can proceed
stateDB ethdb.Database
- fsPivotLock *types.Header // Pivot header on critical section entry (cannot change between retries)
- fsPivotFails uint32 // Number of subsequent fast sync failures in the critical section
-
rttEstimate uint64 // Round trip time to target for download requests
rttConfidence uint64 // Confidence in the estimated RTT (unit: millionths to allow atomic ops)
@@ -124,6 +119,7 @@ type Downloader struct {
synchroniseMock func(id string, hash common.Hash) error // Replacement for synchronise during testing
synchronising int32
notified int32
+ committed int32
// Channels
headerCh chan dataPack // [eth/62] Channel receiving inbound block headers
@@ -156,7 +152,7 @@ type Downloader struct {
// LightChain encapsulates functions required to synchronise a light chain.
type LightChain interface {
// HasHeader verifies a header's presence in the local chain.
- HasHeader(h common.Hash, number uint64) bool
+ HasHeader(common.Hash, uint64) bool
// GetHeaderByHash retrieves a header from the local chain.
GetHeaderByHash(common.Hash) *types.Header
@@ -164,8 +160,8 @@ type LightChain interface {
// CurrentHeader retrieves the head header from the local chain.
CurrentHeader() *types.Header
- // GetTdByHash returns the total difficulty of a local block.
- GetTdByHash(common.Hash) *big.Int
+ // GetTd returns the total difficulty of a local block.
+ GetTd(common.Hash, uint64) *big.Int
// InsertHeaderChain inserts a batch of headers into the local chain.
InsertHeaderChain([]*types.Header, int) (int, error)
@@ -178,8 +174,8 @@ type LightChain interface {
type BlockChain interface {
LightChain
- // HasBlockAndState verifies block and associated states' presence in the local chain.
- HasBlockAndState(common.Hash) bool
+ // HasBlock verifies a block's presence in the local chain.
+ HasBlock(common.Hash, uint64) bool
// GetBlockByHash retrieves a block from the local chain.
GetBlockByHash(common.Hash) *types.Block
@@ -226,7 +222,10 @@ func New(mode SyncMode, stateDb ethdb.Database, mux *event.TypeMux, chain BlockC
quitCh: make(chan struct{}),
stateCh: make(chan dataPack),
stateSyncStart: make(chan *stateSync),
- trackStateReq: make(chan *stateReq),
+ syncStatsState: stateSyncStats{
+ processed: core.GetTrieSyncProgress(stateDb),
+ },
+ trackStateReq: make(chan *stateReq),
}
go dl.qosTuner()
go dl.stateFetcher()
@@ -271,7 +270,6 @@ func (d *Downloader) Synchronising() bool {
// RegisterPeer injects a new download peer into the set of block source to be
// used for fetching hashes and blocks from.
func (d *Downloader) RegisterPeer(id string, version int, peer Peer) error {
-
logger := log.New("peer", id)
logger.Trace("Registering sync peer")
if err := d.peers.Register(newPeerConnection(id, version, peer, logger)); err != nil {
@@ -324,8 +322,13 @@ func (d *Downloader) Synchronise(id string, head common.Hash, td *big.Int, mode
errEmptyHeaderSet, errPeersUnavailable, errTooOld,
errInvalidAncestor, errInvalidChain:
log.Warn("Synchronisation failed, dropping peer", "peer", id, "err", err)
- d.dropPeer(id)
-
+ if d.dropPeer == nil {
+ // The dropPeer method is nil when `--copydb` is used for a local copy.
+ // Timeouts can occur if e.g. compaction hits at the wrong time, and can be ignored
+ log.Warn("Downloader wants to drop peer, but peerdrop-function is not set", "peer", id)
+ } else {
+ d.dropPeer(id)
+ }
default:
log.Warn("Synchronisation failed, retrying", "err", err)
}
@@ -386,9 +389,7 @@ func (d *Downloader) synchronise(id string, hash common.Hash, td *big.Int, mode
// Set the requested sync mode, unless it's forbidden
d.mode = mode
- if d.mode == FastSync && atomic.LoadUint32(&d.fsPivotFails) >= fsCriticalTrials {
- d.mode = FullSync
- }
+
// Retrieve the origin peer and initiate the downloading process
p := d.peers.Peer(id)
if p == nil {
@@ -436,57 +437,40 @@ func (d *Downloader) syncWithPeer(p *peerConnection, hash common.Hash, td *big.I
d.syncStatsChainHeight = height
d.syncStatsLock.Unlock()
- // Initiate the sync using a concurrent header and content retrieval algorithm
+ // Ensure our origin point is below any fast sync pivot point
pivot := uint64(0)
- switch d.mode {
- case LightSync:
- pivot = height
- case FastSync:
- // Calculate the new fast/slow sync pivot point
- if d.fsPivotLock == nil {
- pivotOffset, err := rand.Int(rand.Reader, big.NewInt(int64(fsPivotInterval)))
- if err != nil {
- panic(fmt.Sprintf("Failed to access crypto random source: %v", err))
- }
- if height > uint64(fsMinFullBlocks)+pivotOffset.Uint64() {
- pivot = height - uint64(fsMinFullBlocks) - pivotOffset.Uint64()
- }
+ if d.mode == FastSync {
+ if height <= uint64(fsMinFullBlocks) {
+ origin = 0
} else {
- // Pivot point locked in, use this and do not pick a new one!
- pivot = d.fsPivotLock.Number.Uint64()
- }
- // If the point is below the origin, move origin back to ensure state download
- if pivot < origin {
- if pivot > 0 {
+ pivot = height - uint64(fsMinFullBlocks)
+ if pivot <= origin {
origin = pivot - 1
- } else {
- origin = 0
}
}
- log.Debug("Fast syncing until pivot block", "pivot", pivot)
}
- d.queue.Prepare(origin+1, d.mode, pivot, latest)
+ d.committed = 1
+ if d.mode == FastSync && pivot != 0 {
+ d.committed = 0
+ }
+ // Initiate the sync using a concurrent header and content retrieval algorithm
+ d.queue.Prepare(origin+1, d.mode)
if d.syncInitHook != nil {
d.syncInitHook(origin, height)
}
fetchers := []func() error{
- func() error { return d.fetchHeaders(p, origin+1) }, // Headers are always retrieved
- func() error { return d.fetchBodies(origin + 1) }, // Bodies are retrieved during normal and fast sync
- func() error { return d.fetchReceipts(origin + 1) }, // Receipts are retrieved during fast sync
- func() error { return d.processHeaders(origin+1, td) },
+ func() error { return d.fetchHeaders(p, origin+1, pivot) }, // Headers are always retrieved
+ func() error { return d.fetchBodies(origin + 1) }, // Bodies are retrieved during normal and fast sync
+ func() error { return d.fetchReceipts(origin + 1) }, // Receipts are retrieved during fast sync
+ func() error { return d.processHeaders(origin+1, pivot, td) },
}
if d.mode == FastSync {
fetchers = append(fetchers, func() error { return d.processFastSyncContent(latest) })
} else if d.mode == FullSync {
fetchers = append(fetchers, d.processFullSyncContent)
}
- err = d.spawnSync(fetchers)
- if err != nil && d.mode == FastSync && d.fsPivotLock != nil {
- // If sync failed in the critical section, bump the fail counter.
- atomic.AddUint32(&d.fsPivotFails, 1)
- }
- return err
+ return d.spawnSync(fetchers)
}
// spawnSync runs d.process and all given fetcher functions to completion in
@@ -602,7 +586,6 @@ func (d *Downloader) findAncestor(p *peerConnection, height uint64) (uint64, err
// Figure out the valid ancestor range to prevent rewrite attacks
floor, ceil := int64(-1), d.lightchain.CurrentHeader().Number.Uint64()
- p.log.Debug("Looking for common ancestor", "local", ceil, "remote", height)
if d.mode == FullSync {
ceil = d.blockchain.CurrentBlock().NumberU64()
} else if d.mode == FastSync {
@@ -611,6 +594,8 @@ func (d *Downloader) findAncestor(p *peerConnection, height uint64) (uint64, err
if ceil >= MaxForkAncestry {
floor = int64(ceil - MaxForkAncestry)
}
+ p.log.Debug("Looking for common ancestor", "local", ceil, "remote", height)
+
// Request the topmost blocks to short circuit binary ancestor lookup
head := ceil
if head > height {
@@ -666,7 +651,7 @@ func (d *Downloader) findAncestor(p *peerConnection, height uint64) (uint64, err
continue
}
// Otherwise check if we already know the header or not
- if (d.mode == FullSync && d.blockchain.HasBlockAndState(headers[i].Hash())) || (d.mode != FullSync && d.lightchain.HasHeader(headers[i].Hash(), headers[i].Number.Uint64())) {
+ if (d.mode == FullSync && d.blockchain.HasBlock(headers[i].Hash(), headers[i].Number.Uint64())) || (d.mode != FullSync && d.lightchain.HasHeader(headers[i].Hash(), headers[i].Number.Uint64())) {
number, hash = headers[i].Number.Uint64(), headers[i].Hash()
// If every header is known, even future ones, the peer straight out lied about its head
@@ -731,7 +716,7 @@ func (d *Downloader) findAncestor(p *peerConnection, height uint64) (uint64, err
arrived = true
// Modify the search interval based on the response
- if (d.mode == FullSync && !d.blockchain.HasBlockAndState(headers[0].Hash())) || (d.mode != FullSync && !d.lightchain.HasHeader(headers[0].Hash(), headers[0].Number.Uint64())) {
+ if (d.mode == FullSync && !d.blockchain.HasBlock(headers[0].Hash(), headers[0].Number.Uint64())) || (d.mode != FullSync && !d.lightchain.HasHeader(headers[0].Hash(), headers[0].Number.Uint64())) {
end = check
break
}
@@ -769,7 +754,7 @@ func (d *Downloader) findAncestor(p *peerConnection, height uint64) (uint64, err
// other peers are only accepted if they map cleanly to the skeleton. If no one
// can fill in the skeleton - not even the origin peer - it's assumed invalid and
// the origin is dropped.
-func (d *Downloader) fetchHeaders(p *peerConnection, from uint64) error {
+func (d *Downloader) fetchHeaders(p *peerConnection, from uint64, pivot uint64) error {
p.log.Debug("Directing header downloads", "origin", from)
defer p.log.Debug("Header download terminated")
@@ -820,6 +805,18 @@ func (d *Downloader) fetchHeaders(p *peerConnection, from uint64) error {
}
// If no more headers are inbound, notify the content fetchers and return
if packet.Items() == 0 {
+ // Don't abort header fetches while the pivot is downloading
+ if atomic.LoadInt32(&d.committed) == 0 && pivot <= from {
+ p.log.Debug("No headers, waiting for pivot commit")
+ select {
+ case <-time.After(fsHeaderContCheck):
+ getHeaders(from)
+ continue
+ case <-d.cancelCh:
+ return errCancelHeaderFetch
+ }
+ }
+ // Pivot done (or not in fast sync) and no more headers, terminate the process
p.log.Debug("No more headers available")
select {
case d.headerProcCh <- nil:
@@ -853,6 +850,12 @@ func (d *Downloader) fetchHeaders(p *peerConnection, from uint64) error {
getHeaders(from)
case <-timeout.C:
+ if d.dropPeer == nil {
+ // The dropPeer method is nil when `--copydb` is used for a local copy.
+ // Timeouts can occur if e.g. compaction hits at the wrong time, and can be ignored
+ p.log.Warn("Downloader wants to drop peer, but peerdrop-function is not set", "peer", p.id)
+ break
+ }
// Header retrieval timed out, consider the peer bad and drop
p.log.Debug("Header request timed out", "elapsed", ttl)
headerTimeoutMeter.Mark(1)
@@ -1071,7 +1074,13 @@ func (d *Downloader) fetchParts(errCancel error, deliveryCh chan dataPack, deliv
setIdle(peer, 0)
} else {
peer.log.Debug("Stalling delivery, dropping", "type", kind)
- d.dropPeer(pid)
+ if d.dropPeer == nil {
+ // The dropPeer method is nil when `--copydb` is used for a local copy.
+ // Timeouts can occur if e.g. compaction hits at the wrong time, and can be ignored
+ peer.log.Warn("Downloader wants to drop peer, but peerdrop-function is not set", "peer", pid)
+ } else {
+ d.dropPeer(pid)
+ }
}
}
}
@@ -1112,10 +1121,8 @@ func (d *Downloader) fetchParts(errCancel error, deliveryCh chan dataPack, deliv
}
if request.From > 0 {
peer.log.Trace("Requesting new batch of data", "type", kind, "from", request.From)
- } else if len(request.Headers) > 0 {
- peer.log.Trace("Requesting new batch of data", "type", kind, "count", len(request.Headers), "from", request.Headers[0].Number)
} else {
- peer.log.Trace("Requesting new batch of data", "type", kind, "count", len(request.Hashes))
+ peer.log.Trace("Requesting new batch of data", "type", kind, "count", len(request.Headers), "from", request.Headers[0].Number)
}
// Fetch the chunk and make sure any errors return the hashes to the queue
if fetchHook != nil {
@@ -1143,10 +1150,7 @@ func (d *Downloader) fetchParts(errCancel error, deliveryCh chan dataPack, deliv
// processHeaders takes batches of retrieved headers from an input channel and
// keeps processing and scheduling them into the header chain and downloader's
// queue until the stream ends or a failure occurs.
-func (d *Downloader) processHeaders(origin uint64, td *big.Int) error {
- // Calculate the pivoting point for switching from fast to slow sync
- pivot := d.queue.FastSyncPivot()
-
+func (d *Downloader) processHeaders(origin uint64, pivot uint64, td *big.Int) error {
// Keep a count of uncertain headers to roll back
rollback := []*types.Header{}
defer func() {
@@ -1171,19 +1175,6 @@ func (d *Downloader) processHeaders(origin uint64, td *big.Int) error {
"header", fmt.Sprintf("%d->%d", lastHeader, d.lightchain.CurrentHeader().Number),
"fast", fmt.Sprintf("%d->%d", lastFastBlock, curFastBlock),
"block", fmt.Sprintf("%d->%d", lastBlock, curBlock))
-
- // If we're already past the pivot point, this could be an attack, thread carefully
- if rollback[len(rollback)-1].Number.Uint64() > pivot {
- // If we didn't ever fail, lock in the pivot header (must! not! change!)
- if atomic.LoadUint32(&d.fsPivotFails) == 0 {
- for _, header := range rollback {
- if header.Number.Uint64() == pivot {
- log.Warn("Fast-sync pivot locked in", "number", pivot, "hash", header.Hash())
- d.fsPivotLock = header
- }
- }
- }
- }
}
}()
@@ -1218,7 +1209,8 @@ func (d *Downloader) processHeaders(origin uint64, td *big.Int) error {
// L: Request new headers up from 11 (R's TD was higher, it must have something)
// R: Nothing to give
if d.mode != LightSync {
- if !gotHeaders && td.Cmp(d.blockchain.GetTdByHash(d.blockchain.CurrentBlock().Hash())) > 0 {
+ head := d.blockchain.CurrentBlock()
+ if !gotHeaders && td.Cmp(d.blockchain.GetTd(head.Hash(), head.NumberU64())) > 0 {
return errStallingPeer
}
}
@@ -1230,7 +1222,8 @@ func (d *Downloader) processHeaders(origin uint64, td *big.Int) error {
// queued for processing when the header download completes. However, as long as the
// peer gave us something useful, we're already happy/progressed (above check).
if d.mode == FastSync || d.mode == LightSync {
- if td.Cmp(d.lightchain.GetTdByHash(d.lightchain.CurrentHeader().Hash())) > 0 {
+ head := d.lightchain.CurrentHeader()
+ if td.Cmp(d.lightchain.GetTd(head.Hash(), head.Number.Uint64())) > 0 {
return errStallingPeer
}
}
@@ -1283,13 +1276,6 @@ func (d *Downloader) processHeaders(origin uint64, td *big.Int) error {
rollback = append(rollback[:0], rollback[len(rollback)-fsHeaderSafetyNet:]...)
}
}
- // If we're fast syncing and just pulled in the pivot, make sure it's the one locked in
- if d.mode == FastSync && d.fsPivotLock != nil && chunk[0].Number.Uint64() <= pivot && chunk[len(chunk)-1].Number.Uint64() >= pivot {
- if pivot := chunk[int(pivot-chunk[0].Number.Uint64())]; pivot.Hash() != d.fsPivotLock.Hash() {
- log.Warn("Pivot doesn't match locked in one", "remoteNumber", pivot.Number, "remoteHash", pivot.Hash(), "localNumber", d.fsPivotLock.Number, "localHash", d.fsPivotLock.Hash())
- return errInvalidChain
- }
- }
// Unless we're doing light chains, schedule the headers for associated content retrieval
if d.mode == FullSync || d.mode == FastSync {
// If we've reached the allowed number of pending headers, stall a bit
@@ -1310,6 +1296,14 @@ func (d *Downloader) processHeaders(origin uint64, td *big.Int) error {
headers = headers[limit:]
origin += uint64(limit)
}
+
+ // Update the highest block number we know if a higher one is found.
+ d.syncStatsLock.Lock()
+ if d.syncStatsChainHeight < origin {
+ d.syncStatsChainHeight = origin - 1
+ }
+ d.syncStatsLock.Unlock()
+
// Signal the content downloaders of the availablility of new tasks
for _, ch := range []chan bool{d.bodyWakeCh, d.receiptWakeCh} {
select {
@@ -1324,7 +1318,7 @@ func (d *Downloader) processHeaders(origin uint64, td *big.Int) error {
// processFullSyncContent takes fetch results from the queue and imports them into the chain.
func (d *Downloader) processFullSyncContent() error {
for {
- results := d.queue.WaitResults()
+ results := d.queue.Results(true)
if len(results) == 0 {
return nil
}
@@ -1338,30 +1332,28 @@ func (d *Downloader) processFullSyncContent() error {
}
func (d *Downloader) importBlockResults(results []*fetchResult) error {
- for len(results) != 0 {
- // Check for any termination requests. This makes clean shutdown faster.
- select {
- case <-d.quitCh:
- return errCancelContentProcessing
- default:
- }
- // Retrieve the a batch of results to import
- items := int(math.Min(float64(len(results)), float64(maxResultsProcess)))
- first, last := results[0].Header, results[items-1].Header
- log.Debug("Inserting downloaded chain", "items", len(results),
- "firstnum", first.Number, "firsthash", first.Hash(),
- "lastnum", last.Number, "lasthash", last.Hash(),
- )
- blocks := make([]*types.Block, items)
- for i, result := range results[:items] {
- blocks[i] = types.NewBlockWithHeader(result.Header).WithBody(result.Transactions, result.Uncles)
- }
- if index, err := d.blockchain.InsertChain(blocks); err != nil {
- log.Debug("Downloaded item processing failed", "number", results[index].Header.Number, "hash", results[index].Header.Hash(), "err", err)
- return errInvalidChain
- }
- // Shift the results to the next batch
- results = results[items:]
+ // Check for any early termination requests
+ if len(results) == 0 {
+ return nil
+ }
+ select {
+ case <-d.quitCh:
+ return errCancelContentProcessing
+ default:
+ }
+ // Retrieve the a batch of results to import
+ first, last := results[0].Header, results[len(results)-1].Header
+ log.Debug("Inserting downloaded chain", "items", len(results),
+ "firstnum", first.Number, "firsthash", first.Hash(),
+ "lastnum", last.Number, "lasthash", last.Hash(),
+ )
+ blocks := make([]*types.Block, len(results))
+ for i, result := range results {
+ blocks[i] = types.NewBlockWithHeader(result.Header).WithBody(result.Transactions, result.Uncles)
+ }
+ if index, err := d.blockchain.InsertChain(blocks); err != nil {
+ log.Debug("Downloaded item processing failed", "number", results[index].Header.Number, "hash", results[index].Header.Hash(), "err", err)
+ return errInvalidChain
}
return nil
}
@@ -1369,35 +1361,92 @@ func (d *Downloader) importBlockResults(results []*fetchResult) error {
// processFastSyncContent takes fetch results from the queue and writes them to the
// database. It also controls the synchronisation of state nodes of the pivot block.
func (d *Downloader) processFastSyncContent(latest *types.Header) error {
- // Start syncing state of the reported head block.
- // This should get us most of the state of the pivot block.
+ // Start syncing state of the reported head block. This should get us most of
+ // the state of the pivot block.
stateSync := d.syncState(latest.Root)
defer stateSync.Cancel()
go func() {
- if err := stateSync.Wait(); err != nil {
+ if err := stateSync.Wait(); err != nil && err != errCancelStateFetch {
d.queue.Close() // wake up WaitResults
}
}()
-
- pivot := d.queue.FastSyncPivot()
+ // Figure out the ideal pivot block. Note, that this goalpost may move if the
+ // sync takes long enough for the chain head to move significantly.
+ pivot := uint64(0)
+ if height := latest.Number.Uint64(); height > uint64(fsMinFullBlocks) {
+ pivot = height - uint64(fsMinFullBlocks)
+ }
+ // To cater for moving pivot points, track the pivot block and subsequently
+ // accumulated download results separatey.
+ var (
+ oldPivot *fetchResult // Locked in pivot block, might change eventually
+ oldTail []*fetchResult // Downloaded content after the pivot
+ )
for {
- results := d.queue.WaitResults()
+ // Wait for the next batch of downloaded data to be available, and if the pivot
+ // block became stale, move the goalpost
+ results := d.queue.Results(oldPivot == nil) // Block if we're not monitoring pivot staleness
if len(results) == 0 {
- return stateSync.Cancel()
+ // If pivot sync is done, stop
+ if oldPivot == nil {
+ return stateSync.Cancel()
+ }
+ // If sync failed, stop
+ select {
+ case <-d.cancelCh:
+ return stateSync.Cancel()
+ default:
+ }
}
if d.chainInsertHook != nil {
d.chainInsertHook(results)
}
+ if oldPivot != nil {
+ results = append(append([]*fetchResult{oldPivot}, oldTail...), results...)
+ }
+ // Split around the pivot block and process the two sides via fast/full sync
+ if atomic.LoadInt32(&d.committed) == 0 {
+ latest = results[len(results)-1].Header
+ if height := latest.Number.Uint64(); height > pivot+2*uint64(fsMinFullBlocks) {
+ log.Warn("Pivot became stale, moving", "old", pivot, "new", height-uint64(fsMinFullBlocks))
+ pivot = height - uint64(fsMinFullBlocks)
+ }
+ }
P, beforeP, afterP := splitAroundPivot(pivot, results)
if err := d.commitFastSyncData(beforeP, stateSync); err != nil {
return err
}
if P != nil {
- stateSync.Cancel()
- if err := d.commitPivotBlock(P); err != nil {
- return err
+ // If new pivot block found, cancel old state retrieval and restart
+ if oldPivot != P {
+ stateSync.Cancel()
+
+ stateSync = d.syncState(P.Header.Root)
+ defer stateSync.Cancel()
+ go func() {
+ if err := stateSync.Wait(); err != nil && err != errCancelStateFetch {
+ d.queue.Close() // wake up WaitResults
+ }
+ }()
+ oldPivot = P
+ }
+ // Wait for completion, occasionally checking for pivot staleness
+ select {
+ case <-stateSync.done:
+ if stateSync.err != nil {
+ return stateSync.err
+ }
+ if err := d.commitPivotBlock(P); err != nil {
+ return err
+ }
+ oldPivot = nil
+
+ case <-time.After(time.Second):
+ oldTail = afterP
+ continue
}
}
+ // Fast sync done, pivot commit done, full import
if err := d.importBlockResults(afterP); err != nil {
return err
}
@@ -1420,52 +1469,49 @@ func splitAroundPivot(pivot uint64, results []*fetchResult) (p *fetchResult, bef
}
func (d *Downloader) commitFastSyncData(results []*fetchResult, stateSync *stateSync) error {
- for len(results) != 0 {
- // Check for any termination requests.
- select {
- case <-d.quitCh:
- return errCancelContentProcessing
- case <-stateSync.done:
- if err := stateSync.Wait(); err != nil {
- return err
- }
- default:
- }
- // Retrieve the a batch of results to import
- items := int(math.Min(float64(len(results)), float64(maxResultsProcess)))
- first, last := results[0].Header, results[items-1].Header
- log.Debug("Inserting fast-sync blocks", "items", len(results),
- "firstnum", first.Number, "firsthash", first.Hash(),
- "lastnumn", last.Number, "lasthash", last.Hash(),
- )
- blocks := make([]*types.Block, items)
- receipts := make([]types.Receipts, items)
- for i, result := range results[:items] {
- blocks[i] = types.NewBlockWithHeader(result.Header).WithBody(result.Transactions, result.Uncles)
- receipts[i] = result.Receipts
- }
- if index, err := d.blockchain.InsertReceiptChain(blocks, receipts); err != nil {
- log.Debug("Downloaded item processing failed", "number", results[index].Header.Number, "hash", results[index].Header.Hash(), "err", err)
- return errInvalidChain
+ // Check for any early termination requests
+ if len(results) == 0 {
+ return nil
+ }
+ select {
+ case <-d.quitCh:
+ return errCancelContentProcessing
+ case <-stateSync.done:
+ if err := stateSync.Wait(); err != nil {
+ return err
}
- // Shift the results to the next batch
- results = results[items:]
+ default:
+ }
+ // Retrieve the a batch of results to import
+ first, last := results[0].Header, results[len(results)-1].Header
+ log.Debug("Inserting fast-sync blocks", "items", len(results),
+ "firstnum", first.Number, "firsthash", first.Hash(),
+ "lastnumn", last.Number, "lasthash", last.Hash(),
+ )
+ blocks := make([]*types.Block, len(results))
+ receipts := make([]types.Receipts, len(results))
+ for i, result := range results {
+ blocks[i] = types.NewBlockWithHeader(result.Header).WithBody(result.Transactions, result.Uncles)
+ receipts[i] = result.Receipts
+ }
+ if index, err := d.blockchain.InsertReceiptChain(blocks, receipts); err != nil {
+ log.Debug("Downloaded item processing failed", "number", results[index].Header.Number, "hash", results[index].Header.Hash(), "err", err)
+ return errInvalidChain
}
return nil
}
func (d *Downloader) commitPivotBlock(result *fetchResult) error {
- b := types.NewBlockWithHeader(result.Header).WithBody(result.Transactions, result.Uncles)
- // Sync the pivot block state. This should complete reasonably quickly because
- // we've already synced up to the reported head block state earlier.
- if err := d.syncState(b.Root()).Wait(); err != nil {
+ block := types.NewBlockWithHeader(result.Header).WithBody(result.Transactions, result.Uncles)
+ log.Debug("Committing fast sync pivot as new head", "number", block.Number(), "hash", block.Hash())
+ if _, err := d.blockchain.InsertReceiptChain([]*types.Block{block}, []types.Receipts{result.Receipts}); err != nil {
return err
}
- log.Debug("Committing fast sync pivot as new head", "number", b.Number(), "hash", b.Hash())
- if _, err := d.blockchain.InsertReceiptChain([]*types.Block{b}, []types.Receipts{result.Receipts}); err != nil {
+ if err := d.blockchain.FastSyncCommitHead(block.Hash()); err != nil {
return err
}
- return d.blockchain.FastSyncCommitHead(b.Hash())
+ atomic.StoreInt32(&d.committed, 1)
+ return nil
}
// DeliverHeaders injects a new batch of block headers received from a remote
diff --git a/eth/downloader/downloader_test.go b/eth/downloader/downloader_test.go
index ad5a62c48..cb671a7df 100644
--- a/eth/downloader/downloader_test.go
+++ b/eth/downloader/downloader_test.go
@@ -28,7 +28,6 @@ import (
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/consensus/ethash"
"github.com/ethereum/go-ethereum/core"
- "github.com/ethereum/go-ethereum/core/state"
"github.com/ethereum/go-ethereum/core/types"
"github.com/ethereum/go-ethereum/crypto"
"github.com/ethereum/go-ethereum/ethdb"
@@ -45,8 +44,8 @@ var (
// Reduce some of the parameters to make the tester faster.
func init() {
MaxForkAncestry = uint64(10000)
- blockCacheLimit = 1024
- fsCriticalTrials = 10
+ blockCacheItems = 1024
+ fsHeaderContCheck = 500 * time.Millisecond
}
// downloadTester is a test simulator for mocking out local block chain.
@@ -222,14 +221,9 @@ func (dl *downloadTester) HasHeader(hash common.Hash, number uint64) bool {
return dl.GetHeaderByHash(hash) != nil
}
-// HasBlockAndState checks if a block and associated state is present in the testers canonical chain.
-func (dl *downloadTester) HasBlockAndState(hash common.Hash) bool {
- block := dl.GetBlockByHash(hash)
- if block == nil {
- return false
- }
- _, err := dl.stateDb.Get(block.Root().Bytes())
- return err == nil
+// HasBlock checks if a block is present in the testers canonical chain.
+func (dl *downloadTester) HasBlock(hash common.Hash, number uint64) bool {
+ return dl.GetBlockByHash(hash) != nil
}
// GetHeader retrieves a header from the testers canonical chain.
@@ -293,14 +287,14 @@ func (dl *downloadTester) CurrentFastBlock() *types.Block {
func (dl *downloadTester) FastSyncCommitHead(hash common.Hash) error {
// For now only check that the state trie is correct
if block := dl.GetBlockByHash(hash); block != nil {
- _, err := trie.NewSecure(block.Root(), dl.stateDb, 0)
+ _, err := trie.NewSecure(block.Root(), trie.NewDatabase(dl.stateDb), 0)
return err
}
return fmt.Errorf("non existent block: %x", hash[:4])
}
-// GetTdByHash retrieves the block's total difficulty from the canonical chain.
-func (dl *downloadTester) GetTdByHash(hash common.Hash) *big.Int {
+// GetTd retrieves the block's total difficulty from the canonical chain.
+func (dl *downloadTester) GetTd(hash common.Hash, number uint64) *big.Int {
dl.lock.RLock()
defer dl.lock.RUnlock()
@@ -619,28 +613,22 @@ func assertOwnChain(t *testing.T, tester *downloadTester, length int) {
// number of items of the various chain components.
func assertOwnForkedChain(t *testing.T, tester *downloadTester, common int, lengths []int) {
// Initialize the counters for the first fork
- headers, blocks := lengths[0], lengths[0]
+ headers, blocks, receipts := lengths[0], lengths[0], lengths[0]-fsMinFullBlocks
- minReceipts, maxReceipts := lengths[0]-fsMinFullBlocks-fsPivotInterval, lengths[0]-fsMinFullBlocks
- if minReceipts < 0 {
- minReceipts = 1
- }
- if maxReceipts < 0 {
- maxReceipts = 1
+ if receipts < 0 {
+ receipts = 1
}
// Update the counters for each subsequent fork
for _, length := range lengths[1:] {
headers += length - common
blocks += length - common
-
- minReceipts += length - common - fsMinFullBlocks - fsPivotInterval
- maxReceipts += length - common - fsMinFullBlocks
+ receipts += length - common - fsMinFullBlocks
}
switch tester.downloader.mode {
case FullSync:
- minReceipts, maxReceipts = 1, 1
+ receipts = 1
case LightSync:
- blocks, minReceipts, maxReceipts = 1, 1, 1
+ blocks, receipts = 1, 1
}
if hs := len(tester.ownHeaders); hs != headers {
t.Fatalf("synchronised headers mismatch: have %v, want %v", hs, headers)
@@ -648,11 +636,12 @@ func assertOwnForkedChain(t *testing.T, tester *downloadTester, common int, leng
if bs := len(tester.ownBlocks); bs != blocks {
t.Fatalf("synchronised blocks mismatch: have %v, want %v", bs, blocks)
}
- if rs := len(tester.ownReceipts); rs < minReceipts || rs > maxReceipts {
- t.Fatalf("synchronised receipts mismatch: have %v, want between [%v, %v]", rs, minReceipts, maxReceipts)
+ if rs := len(tester.ownReceipts); rs != receipts {
+ t.Fatalf("synchronised receipts mismatch: have %v, want %v", rs, receipts)
}
// Verify the state trie too for fast syncs
- if tester.downloader.mode == FastSync {
+ /*if tester.downloader.mode == FastSync {
+ pivot := uint64(0)
var index int
if pivot := int(tester.downloader.queue.fastSyncPivot); pivot < common {
index = pivot
@@ -660,11 +649,11 @@ func assertOwnForkedChain(t *testing.T, tester *downloadTester, common int, leng
index = len(tester.ownHashes) - lengths[len(lengths)-1] + int(tester.downloader.queue.fastSyncPivot)
}
if index > 0 {
- if statedb, err := state.New(tester.ownHeaders[tester.ownHashes[index]].Root, state.NewDatabase(tester.stateDb)); statedb == nil || err != nil {
+ if statedb, err := state.New(tester.ownHeaders[tester.ownHashes[index]].Root, state.NewDatabase(trie.NewDatabase(tester.stateDb))); statedb == nil || err != nil {
t.Fatalf("state reconstruction failed: %v", err)
}
}
- }
+ }*/
}
// Tests that simple synchronization against a canonical chain works correctly.
@@ -684,7 +673,7 @@ func testCanonicalSynchronisation(t *testing.T, protocol int, mode SyncMode) {
defer tester.terminate()
// Create a small enough block chain to download
- targetBlocks := blockCacheLimit - 15
+ targetBlocks := blockCacheItems - 15
hashes, headers, blocks, receipts := tester.makeChain(targetBlocks, 0, tester.genesis, nil, false)
tester.newPeer("peer", protocol, hashes, headers, blocks, receipts)
@@ -710,7 +699,7 @@ func testThrottling(t *testing.T, protocol int, mode SyncMode) {
defer tester.terminate()
// Create a long block chain to download and the tester
- targetBlocks := 8 * blockCacheLimit
+ targetBlocks := 8 * blockCacheItems
hashes, headers, blocks, receipts := tester.makeChain(targetBlocks, 0, tester.genesis, nil, false)
tester.newPeer("peer", protocol, hashes, headers, blocks, receipts)
@@ -745,9 +734,9 @@ func testThrottling(t *testing.T, protocol int, mode SyncMode) {
cached = len(tester.downloader.queue.blockDonePool)
if mode == FastSync {
if receipts := len(tester.downloader.queue.receiptDonePool); receipts < cached {
- if tester.downloader.queue.resultCache[receipts].Header.Number.Uint64() < tester.downloader.queue.fastSyncPivot {
- cached = receipts
- }
+ //if tester.downloader.queue.resultCache[receipts].Header.Number.Uint64() < tester.downloader.queue.fastSyncPivot {
+ cached = receipts
+ //}
}
}
frozen = int(atomic.LoadUint32(&blocked))
@@ -755,7 +744,7 @@ func testThrottling(t *testing.T, protocol int, mode SyncMode) {
tester.downloader.queue.lock.Unlock()
tester.lock.Unlock()
- if cached == blockCacheLimit || retrieved+cached+frozen == targetBlocks+1 {
+ if cached == blockCacheItems || retrieved+cached+frozen == targetBlocks+1 {
break
}
}
@@ -765,8 +754,8 @@ func testThrottling(t *testing.T, protocol int, mode SyncMode) {
tester.lock.RLock()
retrieved = len(tester.ownBlocks)
tester.lock.RUnlock()
- if cached != blockCacheLimit && retrieved+cached+frozen != targetBlocks+1 {
- t.Fatalf("block count mismatch: have %v, want %v (owned %v, blocked %v, target %v)", cached, blockCacheLimit, retrieved, frozen, targetBlocks+1)
+ if cached != blockCacheItems && retrieved+cached+frozen != targetBlocks+1 {
+ t.Fatalf("block count mismatch: have %v, want %v (owned %v, blocked %v, target %v)", cached, blockCacheItems, retrieved, frozen, targetBlocks+1)
}
// Permit the blocked blocks to import
if atomic.LoadUint32(&blocked) > 0 {
@@ -974,7 +963,7 @@ func testCancel(t *testing.T, protocol int, mode SyncMode) {
defer tester.terminate()
// Create a small enough block chain to download and the tester
- targetBlocks := blockCacheLimit - 15
+ targetBlocks := blockCacheItems - 15
if targetBlocks >= MaxHashFetch {
targetBlocks = MaxHashFetch - 15
}
@@ -1016,12 +1005,12 @@ func testMultiSynchronisation(t *testing.T, protocol int, mode SyncMode) {
// Create various peers with various parts of the chain
targetPeers := 8
- targetBlocks := targetPeers*blockCacheLimit - 15
+ targetBlocks := targetPeers*blockCacheItems - 15
hashes, headers, blocks, receipts := tester.makeChain(targetBlocks, 0, tester.genesis, nil, false)
for i := 0; i < targetPeers; i++ {
id := fmt.Sprintf("peer #%d", i)
- tester.newPeer(id, protocol, hashes[i*blockCacheLimit:], headers, blocks, receipts)
+ tester.newPeer(id, protocol, hashes[i*blockCacheItems:], headers, blocks, receipts)
}
if err := tester.sync("peer #0", nil, mode); err != nil {
t.Fatalf("failed to synchronise blocks: %v", err)
@@ -1045,7 +1034,7 @@ func testMultiProtoSync(t *testing.T, protocol int, mode SyncMode) {
defer tester.terminate()
// Create a small enough block chain to download
- targetBlocks := blockCacheLimit - 15
+ targetBlocks := blockCacheItems - 15
hashes, headers, blocks, receipts := tester.makeChain(targetBlocks, 0, tester.genesis, nil, false)
// Create peers of every type
@@ -1084,7 +1073,7 @@ func testEmptyShortCircuit(t *testing.T, protocol int, mode SyncMode) {
defer tester.terminate()
// Create a block chain to download
- targetBlocks := 2*blockCacheLimit - 15
+ targetBlocks := 2*blockCacheItems - 15
hashes, headers, blocks, receipts := tester.makeChain(targetBlocks, 0, tester.genesis, nil, false)
tester.newPeer("peer", protocol, hashes, headers, blocks, receipts)
@@ -1110,8 +1099,8 @@ func testEmptyShortCircuit(t *testing.T, protocol int, mode SyncMode) {
bodiesNeeded++
}
}
- for hash, receipt := range receipts {
- if mode == FastSync && len(receipt) > 0 && headers[hash].Number.Uint64() <= tester.downloader.queue.fastSyncPivot {
+ for _, receipt := range receipts {
+ if mode == FastSync && len(receipt) > 0 {
receiptsNeeded++
}
}
@@ -1139,7 +1128,7 @@ func testMissingHeaderAttack(t *testing.T, protocol int, mode SyncMode) {
defer tester.terminate()
// Create a small enough block chain to download
- targetBlocks := blockCacheLimit - 15
+ targetBlocks := blockCacheItems - 15
hashes, headers, blocks, receipts := tester.makeChain(targetBlocks, 0, tester.genesis, nil, false)
// Attempt a full sync with an attacker feeding gapped headers
@@ -1174,7 +1163,7 @@ func testShiftedHeaderAttack(t *testing.T, protocol int, mode SyncMode) {
defer tester.terminate()
// Create a small enough block chain to download
- targetBlocks := blockCacheLimit - 15
+ targetBlocks := blockCacheItems - 15
hashes, headers, blocks, receipts := tester.makeChain(targetBlocks, 0, tester.genesis, nil, false)
// Attempt a full sync with an attacker feeding shifted headers
@@ -1208,7 +1197,7 @@ func testInvalidHeaderRollback(t *testing.T, protocol int, mode SyncMode) {
defer tester.terminate()
// Create a small enough block chain to download
- targetBlocks := 3*fsHeaderSafetyNet + fsPivotInterval + fsMinFullBlocks
+ targetBlocks := 3*fsHeaderSafetyNet + 256 + fsMinFullBlocks
hashes, headers, blocks, receipts := tester.makeChain(targetBlocks, 0, tester.genesis, nil, false)
// Attempt to sync with an attacker that feeds junk during the fast sync phase.
@@ -1248,7 +1237,6 @@ func testInvalidHeaderRollback(t *testing.T, protocol int, mode SyncMode) {
tester.newPeer("withhold-attack", protocol, hashes, headers, blocks, receipts)
missing = 3*fsHeaderSafetyNet + MaxHeaderFetch + 1
- tester.downloader.fsPivotFails = 0
tester.downloader.syncInitHook = func(uint64, uint64) {
for i := missing; i <= len(hashes); i++ {
delete(tester.peerHeaders["withhold-attack"], hashes[len(hashes)-i])
@@ -1267,8 +1255,6 @@ func testInvalidHeaderRollback(t *testing.T, protocol int, mode SyncMode) {
t.Errorf("fast sync pivot block #%d not rolled back", head)
}
}
- tester.downloader.fsPivotFails = fsCriticalTrials
-
// Synchronise with the valid peer and make sure sync succeeds. Since the last
// rollback should also disable fast syncing for this process, verify that we
// did a fresh full sync. Note, we can't assert anything about the receipts
@@ -1383,7 +1369,7 @@ func testSyncProgress(t *testing.T, protocol int, mode SyncMode) {
defer tester.terminate()
// Create a small enough block chain to download
- targetBlocks := blockCacheLimit - 15
+ targetBlocks := blockCacheItems - 15
hashes, headers, blocks, receipts := tester.makeChain(targetBlocks, 0, tester.genesis, nil, false)
// Set a sync init hook to catch progress changes
@@ -1532,7 +1518,7 @@ func testFailedSyncProgress(t *testing.T, protocol int, mode SyncMode) {
defer tester.terminate()
// Create a small enough block chain to download
- targetBlocks := blockCacheLimit - 15
+ targetBlocks := blockCacheItems - 15
hashes, headers, blocks, receipts := tester.makeChain(targetBlocks, 0, tester.genesis, nil, false)
// Set a sync init hook to catch progress changes
@@ -1609,7 +1595,7 @@ func testFakedSyncProgress(t *testing.T, protocol int, mode SyncMode) {
defer tester.terminate()
// Create a small block chain
- targetBlocks := blockCacheLimit - 15
+ targetBlocks := blockCacheItems - 15
hashes, headers, blocks, receipts := tester.makeChain(targetBlocks+3, 0, tester.genesis, nil, false)
// Set a sync init hook to catch progress changes
@@ -1697,6 +1683,7 @@ func TestDeliverHeadersHang(t *testing.T) {
type floodingTestPeer struct {
peer Peer
tester *downloadTester
+ pend sync.WaitGroup
}
func (ftp *floodingTestPeer) Head() (common.Hash, *big.Int) { return ftp.peer.Head() }
@@ -1717,9 +1704,12 @@ func (ftp *floodingTestPeer) RequestHeadersByNumber(from uint64, count, skip int
deliveriesDone := make(chan struct{}, 500)
for i := 0; i < cap(deliveriesDone); i++ {
peer := fmt.Sprintf("fake-peer%d", i)
+ ftp.pend.Add(1)
+
go func() {
ftp.tester.downloader.DeliverHeaders(peer, []*types.Header{{}, {}, {}, {}})
deliveriesDone <- struct{}{}
+ ftp.pend.Done()
}()
}
// Deliver the actual requested headers.
@@ -1751,110 +1741,15 @@ func testDeliverHeadersHang(t *testing.T, protocol int, mode SyncMode) {
// Whenever the downloader requests headers, flood it with
// a lot of unrequested header deliveries.
tester.downloader.peers.peers["peer"].peer = &floodingTestPeer{
- tester.downloader.peers.peers["peer"].peer,
- tester,
+ peer: tester.downloader.peers.peers["peer"].peer,
+ tester: tester,
}
if err := tester.sync("peer", nil, mode); err != nil {
- t.Errorf("sync failed: %v", err)
+ t.Errorf("test %d: sync failed: %v", i, err)
}
tester.terminate()
- }
-}
-// Tests that if fast sync aborts in the critical section, it can restart a few
-// times before giving up.
-// We use data driven subtests to manage this so that it will be parallel on its own
-// and not with the other tests, avoiding intermittent failures.
-func TestFastCriticalRestarts(t *testing.T) {
- testCases := []struct {
- protocol int
- progress bool
- }{
- {63, false},
- {64, false},
- {63, true},
- {64, true},
- }
- for _, tc := range testCases {
- t.Run(fmt.Sprintf("protocol %d progress %v", tc.protocol, tc.progress), func(t *testing.T) {
- testFastCriticalRestarts(t, tc.protocol, tc.progress)
- })
- }
-}
-
-func testFastCriticalRestarts(t *testing.T, protocol int, progress bool) {
- t.Parallel()
-
- tester := newTester()
- defer tester.terminate()
-
- // Create a large enough blockchin to actually fast sync on
- targetBlocks := fsMinFullBlocks + 2*fsPivotInterval - 15
- hashes, headers, blocks, receipts := tester.makeChain(targetBlocks, 0, tester.genesis, nil, false)
-
- // Create a tester peer with a critical section header missing (force failures)
- tester.newPeer("peer", protocol, hashes, headers, blocks, receipts)
- delete(tester.peerHeaders["peer"], hashes[fsMinFullBlocks-1])
- tester.downloader.dropPeer = func(id string) {} // We reuse the same "faulty" peer throughout the test
-
- // Remove all possible pivot state roots and slow down replies (test failure resets later)
- for i := 0; i < fsPivotInterval; i++ {
- tester.peerMissingStates["peer"][headers[hashes[fsMinFullBlocks+i]].Root] = true
- }
- (tester.downloader.peers.peers["peer"].peer).(*downloadTesterPeer).setDelay(500 * time.Millisecond) // Enough to reach the critical section
-
- // Synchronise with the peer a few times and make sure they fail until the retry limit
- for i := 0; i < int(fsCriticalTrials)-1; i++ {
- // Attempt a sync and ensure it fails properly
- if err := tester.sync("peer", nil, FastSync); err == nil {
- t.Fatalf("failing fast sync succeeded: %v", err)
- }
- time.Sleep(150 * time.Millisecond) // Make sure no in-flight requests remain
-
- // If it's the first failure, pivot should be locked => reenable all others to detect pivot changes
- if i == 0 {
- time.Sleep(150 * time.Millisecond) // Make sure no in-flight requests remain
- if tester.downloader.fsPivotLock == nil {
- time.Sleep(400 * time.Millisecond) // Make sure the first huge timeout expires too
- t.Fatalf("pivot block not locked in after critical section failure")
- }
- tester.lock.Lock()
- tester.peerHeaders["peer"][hashes[fsMinFullBlocks-1]] = headers[hashes[fsMinFullBlocks-1]]
- tester.peerMissingStates["peer"] = map[common.Hash]bool{tester.downloader.fsPivotLock.Root: true}
- (tester.downloader.peers.peers["peer"].peer).(*downloadTesterPeer).setDelay(0)
- tester.lock.Unlock()
- }
- }
- // Return all nodes if we're testing fast sync progression
- if progress {
- tester.lock.Lock()
- tester.peerMissingStates["peer"] = map[common.Hash]bool{}
- tester.lock.Unlock()
-
- if err := tester.sync("peer", nil, FastSync); err != nil {
- t.Fatalf("failed to synchronise blocks in progressed fast sync: %v", err)
- }
- time.Sleep(150 * time.Millisecond) // Make sure no in-flight requests remain
-
- if fails := atomic.LoadUint32(&tester.downloader.fsPivotFails); fails != 1 {
- t.Fatalf("progressed pivot trial count mismatch: have %v, want %v", fails, 1)
- }
- assertOwnChain(t, tester, targetBlocks+1)
- } else {
- if err := tester.sync("peer", nil, FastSync); err == nil {
- t.Fatalf("succeeded to synchronise blocks in failed fast sync")
- }
- time.Sleep(150 * time.Millisecond) // Make sure no in-flight requests remain
-
- if fails := atomic.LoadUint32(&tester.downloader.fsPivotFails); fails != fsCriticalTrials {
- t.Fatalf("failed pivot trial count mismatch: have %v, want %v", fails, fsCriticalTrials)
- }
- }
- // Retry limit exhausted, downloader will switch to full sync, should succeed
- if err := tester.sync("peer", nil, FastSync); err != nil {
- t.Fatalf("failed to synchronise blocks in slow sync: %v", err)
+ // Flush all goroutines to prevent messing with subsequent tests
+ tester.downloader.peers.peers["peer"].peer.(*floodingTestPeer).pend.Wait()
}
- // Note, we can't assert the chain here because the test asserter assumes sync
- // completed using a single mode of operation, whereas fast-then-slow can result
- // in arbitrary intermediate state that's not cleanly verifiable.
}
diff --git a/eth/downloader/metrics.go b/eth/downloader/metrics.go
index 58764ccf0..d4eb33794 100644
--- a/eth/downloader/metrics.go
+++ b/eth/downloader/metrics.go
@@ -23,21 +23,21 @@ import (
)
var (
- headerInMeter = metrics.NewMeter("eth/downloader/headers/in")
- headerReqTimer = metrics.NewTimer("eth/downloader/headers/req")
- headerDropMeter = metrics.NewMeter("eth/downloader/headers/drop")
- headerTimeoutMeter = metrics.NewMeter("eth/downloader/headers/timeout")
+ headerInMeter = metrics.NewRegisteredMeter("eth/downloader/headers/in", nil)
+ headerReqTimer = metrics.NewRegisteredTimer("eth/downloader/headers/req", nil)
+ headerDropMeter = metrics.NewRegisteredMeter("eth/downloader/headers/drop", nil)
+ headerTimeoutMeter = metrics.NewRegisteredMeter("eth/downloader/headers/timeout", nil)
- bodyInMeter = metrics.NewMeter("eth/downloader/bodies/in")
- bodyReqTimer = metrics.NewTimer("eth/downloader/bodies/req")
- bodyDropMeter = metrics.NewMeter("eth/downloader/bodies/drop")
- bodyTimeoutMeter = metrics.NewMeter("eth/downloader/bodies/timeout")
+ bodyInMeter = metrics.NewRegisteredMeter("eth/downloader/bodies/in", nil)
+ bodyReqTimer = metrics.NewRegisteredTimer("eth/downloader/bodies/req", nil)
+ bodyDropMeter = metrics.NewRegisteredMeter("eth/downloader/bodies/drop", nil)
+ bodyTimeoutMeter = metrics.NewRegisteredMeter("eth/downloader/bodies/timeout", nil)
- receiptInMeter = metrics.NewMeter("eth/downloader/receipts/in")
- receiptReqTimer = metrics.NewTimer("eth/downloader/receipts/req")
- receiptDropMeter = metrics.NewMeter("eth/downloader/receipts/drop")
- receiptTimeoutMeter = metrics.NewMeter("eth/downloader/receipts/timeout")
+ receiptInMeter = metrics.NewRegisteredMeter("eth/downloader/receipts/in", nil)
+ receiptReqTimer = metrics.NewRegisteredTimer("eth/downloader/receipts/req", nil)
+ receiptDropMeter = metrics.NewRegisteredMeter("eth/downloader/receipts/drop", nil)
+ receiptTimeoutMeter = metrics.NewRegisteredMeter("eth/downloader/receipts/timeout", nil)
- stateInMeter = metrics.NewMeter("eth/downloader/states/in")
- stateDropMeter = metrics.NewMeter("eth/downloader/states/drop")
+ stateInMeter = metrics.NewRegisteredMeter("eth/downloader/states/in", nil)
+ stateDropMeter = metrics.NewRegisteredMeter("eth/downloader/states/drop", nil)
)
diff --git a/eth/downloader/queue.go b/eth/downloader/queue.go
index 6926f1d8c..359cce54b 100644
--- a/eth/downloader/queue.go
+++ b/eth/downloader/queue.go
@@ -28,11 +28,15 @@ import (
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/core/types"
"github.com/ethereum/go-ethereum/log"
- "github.com/rcrowley/go-metrics"
+ "github.com/ethereum/go-ethereum/metrics"
"gopkg.in/karalabe/cookiejar.v2/collections/prque"
)
-var blockCacheLimit = 8192 // Maximum number of blocks to cache before throttling the download
+var (
+ blockCacheItems = 8192 // Maximum number of blocks to cache before throttling the download
+ blockCacheMemory = 64 * 1024 * 1024 // Maximum amount of memory to use for block caching
+ blockCacheSizeWeight = 0.1 // Multiplier to approximate the average block size based on past ones
+)
var (
errNoFetchesPending = errors.New("no fetches pending")
@@ -41,17 +45,17 @@ var (
// fetchRequest is a currently running data retrieval operation.
type fetchRequest struct {
- Peer *peerConnection // Peer to which the request was sent
- From uint64 // [eth/62] Requested chain element index (used for skeleton fills only)
- Hashes map[common.Hash]int // [eth/61] Requested hashes with their insertion index (priority)
- Headers []*types.Header // [eth/62] Requested headers, sorted by request order
- Time time.Time // Time when the request was made
+ Peer *peerConnection // Peer to which the request was sent
+ From uint64 // [eth/62] Requested chain element index (used for skeleton fills only)
+ Headers []*types.Header // [eth/62] Requested headers, sorted by request order
+ Time time.Time // Time when the request was made
}
// fetchResult is a struct collecting partial results from data fetchers until
// all outstanding pieces complete and the result as a whole can be processed.
type fetchResult struct {
- Pending int // Number of data fetches still pending
+ Pending int // Number of data fetches still pending
+ Hash common.Hash // Hash of the header to prevent recalculating
Header *types.Header
Uncles []*types.Header
@@ -61,12 +65,10 @@ type fetchResult struct {
// queue represents hashes that are either need fetching or are being fetched
type queue struct {
- mode SyncMode // Synchronisation mode to decide on the block parts to schedule for fetching
- fastSyncPivot uint64 // Block number where the fast sync pivots into archive synchronisation mode
-
- headerHead common.Hash // [eth/62] Hash of the last queued header to verify order
+ mode SyncMode // Synchronisation mode to decide on the block parts to schedule for fetching
// Headers are "special", they download in batches, supported by a skeleton chain
+ headerHead common.Hash // [eth/62] Hash of the last queued header to verify order
headerTaskPool map[uint64]*types.Header // [eth/62] Pending header retrieval tasks, mapping starting indexes to skeleton headers
headerTaskQueue *prque.Prque // [eth/62] Priority queue of the skeleton indexes to fetch the filling headers for
headerPeerMiss map[string]map[uint64]struct{} // [eth/62] Set of per-peer header batches known to be unavailable
@@ -87,8 +89,9 @@ type queue struct {
receiptPendPool map[string]*fetchRequest // [eth/63] Currently pending receipt retrieval operations
receiptDonePool map[common.Hash]struct{} // [eth/63] Set of the completed receipt fetches
- resultCache []*fetchResult // Downloaded but not yet delivered fetch results
- resultOffset uint64 // Offset of the first cached fetch result in the block chain
+ resultCache []*fetchResult // Downloaded but not yet delivered fetch results
+ resultOffset uint64 // Offset of the first cached fetch result in the block chain
+ resultSize common.StorageSize // Approximate size of a block (exponential moving average)
lock *sync.Mutex
active *sync.Cond
@@ -109,7 +112,7 @@ func newQueue() *queue {
receiptTaskQueue: prque.New(),
receiptPendPool: make(map[string]*fetchRequest),
receiptDonePool: make(map[common.Hash]struct{}),
- resultCache: make([]*fetchResult, blockCacheLimit),
+ resultCache: make([]*fetchResult, blockCacheItems),
active: sync.NewCond(lock),
lock: lock,
}
@@ -122,10 +125,8 @@ func (q *queue) Reset() {
q.closed = false
q.mode = FullSync
- q.fastSyncPivot = 0
q.headerHead = common.Hash{}
-
q.headerPendPool = make(map[string]*fetchRequest)
q.blockTaskPool = make(map[common.Hash]*types.Header)
@@ -138,7 +139,7 @@ func (q *queue) Reset() {
q.receiptPendPool = make(map[string]*fetchRequest)
q.receiptDonePool = make(map[common.Hash]struct{})
- q.resultCache = make([]*fetchResult, blockCacheLimit)
+ q.resultCache = make([]*fetchResult, blockCacheItems)
q.resultOffset = 0
}
@@ -214,27 +215,13 @@ func (q *queue) Idle() bool {
return (queued + pending + cached) == 0
}
-// FastSyncPivot retrieves the currently used fast sync pivot point.
-func (q *queue) FastSyncPivot() uint64 {
- q.lock.Lock()
- defer q.lock.Unlock()
-
- return q.fastSyncPivot
-}
-
// ShouldThrottleBlocks checks if the download should be throttled (active block (body)
// fetches exceed block cache).
func (q *queue) ShouldThrottleBlocks() bool {
q.lock.Lock()
defer q.lock.Unlock()
- // Calculate the currently in-flight block (body) requests
- pending := 0
- for _, request := range q.blockPendPool {
- pending += len(request.Hashes) + len(request.Headers)
- }
- // Throttle if more blocks (bodies) are in-flight than free space in the cache
- return pending >= len(q.resultCache)-len(q.blockDonePool)
+ return q.resultSlots(q.blockPendPool, q.blockDonePool) <= 0
}
// ShouldThrottleReceipts checks if the download should be throttled (active receipt
@@ -243,13 +230,39 @@ func (q *queue) ShouldThrottleReceipts() bool {
q.lock.Lock()
defer q.lock.Unlock()
- // Calculate the currently in-flight receipt requests
+ return q.resultSlots(q.receiptPendPool, q.receiptDonePool) <= 0
+}
+
+// resultSlots calculates the number of results slots available for requests
+// whilst adhering to both the item and the memory limit too of the results
+// cache.
+func (q *queue) resultSlots(pendPool map[string]*fetchRequest, donePool map[common.Hash]struct{}) int {
+ // Calculate the maximum length capped by the memory limit
+ limit := len(q.resultCache)
+ if common.StorageSize(len(q.resultCache))*q.resultSize > common.StorageSize(blockCacheMemory) {
+ limit = int((common.StorageSize(blockCacheMemory) + q.resultSize - 1) / q.resultSize)
+ }
+ // Calculate the number of slots already finished
+ finished := 0
+ for _, result := range q.resultCache[:limit] {
+ if result == nil {
+ break
+ }
+ if _, ok := donePool[result.Hash]; ok {
+ finished++
+ }
+ }
+ // Calculate the number of slots currently downloading
pending := 0
- for _, request := range q.receiptPendPool {
- pending += len(request.Headers)
+ for _, request := range pendPool {
+ for _, header := range request.Headers {
+ if header.Number.Uint64() < q.resultOffset+uint64(limit) {
+ pending++
+ }
+ }
}
- // Throttle if more receipts are in-flight than free space in the cache
- return pending >= len(q.resultCache)-len(q.receiptDonePool)
+ // Return the free slots to distribute
+ return limit - finished - pending
}
// ScheduleSkeleton adds a batch of header retrieval tasks to the queue to fill
@@ -323,8 +336,7 @@ func (q *queue) Schedule(headers []*types.Header, from uint64) []*types.Header {
q.blockTaskPool[hash] = header
q.blockTaskQueue.Push(header, -float32(header.Number.Uint64()))
- if q.mode == FastSync && header.Number.Uint64() <= q.fastSyncPivot {
- // Fast phase of the fast sync, retrieve receipts too
+ if q.mode == FastSync {
q.receiptTaskPool[hash] = header
q.receiptTaskQueue.Push(header, -float32(header.Number.Uint64()))
}
@@ -335,18 +347,25 @@ func (q *queue) Schedule(headers []*types.Header, from uint64) []*types.Header {
return inserts
}
-// WaitResults retrieves and permanently removes a batch of fetch
-// results from the cache. the result slice will be empty if the queue
-// has been closed.
-func (q *queue) WaitResults() []*fetchResult {
+// Results retrieves and permanently removes a batch of fetch results from
+// the cache. the result slice will be empty if the queue has been closed.
+func (q *queue) Results(block bool) []*fetchResult {
q.lock.Lock()
defer q.lock.Unlock()
+ // Count the number of items available for processing
nproc := q.countProcessableItems()
for nproc == 0 && !q.closed {
+ if !block {
+ return nil
+ }
q.active.Wait()
nproc = q.countProcessableItems()
}
+ // Since we have a batch limit, don't pull more into "dangling" memory
+ if nproc > maxResultsProcess {
+ nproc = maxResultsProcess
+ }
results := make([]*fetchResult, nproc)
copy(results, q.resultCache[:nproc])
if len(results) > 0 {
@@ -363,6 +382,21 @@ func (q *queue) WaitResults() []*fetchResult {
}
// Advance the expected block number of the first cache entry.
q.resultOffset += uint64(nproc)
+
+ // Recalculate the result item weights to prevent memory exhaustion
+ for _, result := range results {
+ size := result.Header.Size()
+ for _, uncle := range result.Uncles {
+ size += uncle.Size()
+ }
+ for _, receipt := range result.Receipts {
+ size += receipt.Size()
+ }
+ for _, tx := range result.Transactions {
+ size += tx.Size()
+ }
+ q.resultSize = common.StorageSize(blockCacheSizeWeight)*size + (1-common.StorageSize(blockCacheSizeWeight))*q.resultSize
+ }
}
return results
}
@@ -370,21 +404,9 @@ func (q *queue) WaitResults() []*fetchResult {
// countProcessableItems counts the processable items.
func (q *queue) countProcessableItems() int {
for i, result := range q.resultCache {
- // Don't process incomplete or unavailable items.
if result == nil || result.Pending > 0 {
return i
}
- // Stop before processing the pivot block to ensure that
- // resultCache has space for fsHeaderForceVerify items. Not
- // doing this could leave us unable to download the required
- // amount of headers.
- if q.mode == FastSync && result.Header.Number.Uint64() == q.fastSyncPivot {
- for j := 0; j < fsHeaderForceVerify; j++ {
- if i+j+1 >= len(q.resultCache) || q.resultCache[i+j+1] == nil {
- return i
- }
- }
- }
}
return len(q.resultCache)
}
@@ -473,10 +495,8 @@ func (q *queue) reserveHeaders(p *peerConnection, count int, taskPool map[common
return nil, false, nil
}
// Calculate an upper limit on the items we might fetch (i.e. throttling)
- space := len(q.resultCache) - len(donePool)
- for _, request := range pendPool {
- space -= len(request.Headers)
- }
+ space := q.resultSlots(pendPool, donePool)
+
// Retrieve a batch of tasks, skipping previously failed ones
send := make([]*types.Header, 0, count)
skip := make([]*types.Header, 0)
@@ -484,6 +504,7 @@ func (q *queue) reserveHeaders(p *peerConnection, count int, taskPool map[common
progress := false
for proc := 0; proc < space && len(send) < count && !taskQueue.Empty(); proc++ {
header := taskQueue.PopItem().(*types.Header)
+ hash := header.Hash()
// If we're the first to request this task, initialise the result container
index := int(header.Number.Int64() - int64(q.resultOffset))
@@ -493,18 +514,19 @@ func (q *queue) reserveHeaders(p *peerConnection, count int, taskPool map[common
}
if q.resultCache[index] == nil {
components := 1
- if q.mode == FastSync && header.Number.Uint64() <= q.fastSyncPivot {
+ if q.mode == FastSync {
components = 2
}
q.resultCache[index] = &fetchResult{
Pending: components,
+ Hash: hash,
Header: header,
}
}
// If this fetch task is a noop, skip this fetch operation
if isNoop(header) {
- donePool[header.Hash()] = struct{}{}
- delete(taskPool, header.Hash())
+ donePool[hash] = struct{}{}
+ delete(taskPool, hash)
space, proc = space-1, proc-1
q.resultCache[index].Pending--
@@ -512,7 +534,7 @@ func (q *queue) reserveHeaders(p *peerConnection, count int, taskPool map[common
continue
}
// Otherwise unless the peer is known not to have the data, add to the retrieve list
- if p.Lacks(header.Hash()) {
+ if p.Lacks(hash) {
skip = append(skip, header)
} else {
send = append(send, header)
@@ -565,9 +587,6 @@ func (q *queue) cancel(request *fetchRequest, taskQueue *prque.Prque, pendPool m
if request.From > 0 {
taskQueue.Push(request.From, -float32(request.From))
}
- for hash, index := range request.Hashes {
- taskQueue.Push(hash, float32(index))
- }
for _, header := range request.Headers {
taskQueue.Push(header, -float32(header.Number.Uint64()))
}
@@ -640,18 +659,11 @@ func (q *queue) expire(timeout time.Duration, pendPool map[string]*fetchRequest,
if request.From > 0 {
taskQueue.Push(request.From, -float32(request.From))
}
- for hash, index := range request.Hashes {
- taskQueue.Push(hash, float32(index))
- }
for _, header := range request.Headers {
taskQueue.Push(header, -float32(header.Number.Uint64()))
}
// Add the peer to the expiry report along the the number of failed requests
- expirations := len(request.Hashes)
- if expirations < len(request.Headers) {
- expirations = len(request.Headers)
- }
- expiries[id] = expirations
+ expiries[id] = len(request.Headers)
}
}
// Remove the expired requests from the pending pool
@@ -828,14 +840,16 @@ func (q *queue) deliver(id string, taskPool map[common.Hash]*types.Header, taskQ
failure = err
break
}
- donePool[header.Hash()] = struct{}{}
+ hash := header.Hash()
+
+ donePool[hash] = struct{}{}
q.resultCache[index].Pending--
useful = true
accepted++
// Clean up a successful fetch
request.Headers[i] = nil
- delete(taskPool, header.Hash())
+ delete(taskPool, hash)
}
// Return all failed or missing fetches to the queue
for _, header := range request.Headers {
@@ -860,7 +874,7 @@ func (q *queue) deliver(id string, taskPool map[common.Hash]*types.Header, taskQ
// Prepare configures the result cache to allow accepting and caching inbound
// fetch results.
-func (q *queue) Prepare(offset uint64, mode SyncMode, pivot uint64, head *types.Header) {
+func (q *queue) Prepare(offset uint64, mode SyncMode) {
q.lock.Lock()
defer q.lock.Unlock()
@@ -868,6 +882,5 @@ func (q *queue) Prepare(offset uint64, mode SyncMode, pivot uint64, head *types.
if q.resultOffset < offset {
q.resultOffset = offset
}
- q.fastSyncPivot = pivot
q.mode = mode
}
diff --git a/eth/downloader/statesync.go b/eth/downloader/statesync.go
index 937828b94..ee6c7b491 100644
--- a/eth/downloader/statesync.go
+++ b/eth/downloader/statesync.go
@@ -20,10 +20,10 @@ import (
"fmt"
"hash"
"sync"
- "sync/atomic"
"time"
"github.com/ethereum/go-ethereum/common"
+ "github.com/ethereum/go-ethereum/core"
"github.com/ethereum/go-ethereum/core/state"
"github.com/ethereum/go-ethereum/crypto/sha3"
"github.com/ethereum/go-ethereum/ethdb"
@@ -294,6 +294,9 @@ func (s *stateSync) loop() error {
case <-s.cancel:
return errCancelStateFetch
+ case <-s.d.cancelCh:
+ return errCancelStateFetch
+
case req := <-s.deliver:
// Response, disconnect or timeout triggered, drop the peer if stalling
log.Trace("Received node data response", "peer", req.peer.id, "count", len(req.response), "dropped", req.dropped, "timeout", !req.dropped && req.timedOut())
@@ -304,15 +307,11 @@ func (s *stateSync) loop() error {
s.d.dropPeer(req.peer.id)
}
// Process all the received blobs and check for stale delivery
- stale, err := s.process(req)
- if err != nil {
+ if err := s.process(req); err != nil {
log.Warn("Node data write error", "err", err)
return err
}
- // The the delivery contains requested data, mark the node idle (otherwise it's a timed out delivery)
- if !stale {
- req.peer.SetNodeDataIdle(len(req.response))
- }
+ req.peer.SetNodeDataIdle(len(req.response))
}
}
return s.commit(true)
@@ -352,6 +351,7 @@ func (s *stateSync) assignTasks() {
case s.d.trackStateReq <- req:
req.peer.FetchNodeData(req.items)
case <-s.cancel:
+ case <-s.d.cancelCh:
}
}
}
@@ -390,7 +390,7 @@ func (s *stateSync) fillTasks(n int, req *stateReq) {
// process iterates over a batch of delivered state data, injecting each item
// into a running state sync, re-queuing any items that were requested but not
// delivered.
-func (s *stateSync) process(req *stateReq) (bool, error) {
+func (s *stateSync) process(req *stateReq) error {
// Collect processing stats and update progress if valid data was received
duplicate, unexpected := 0, 0
@@ -401,7 +401,7 @@ func (s *stateSync) process(req *stateReq) (bool, error) {
}(time.Now())
// Iterate over all the delivered data and inject one-by-one into the trie
- progress, stale := false, len(req.response) > 0
+ progress := false
for _, blob := range req.response {
prog, hash, err := s.processNodeData(blob)
@@ -415,20 +415,12 @@ func (s *stateSync) process(req *stateReq) (bool, error) {
case trie.ErrAlreadyProcessed:
duplicate++
default:
- return stale, fmt.Errorf("invalid state node %s: %v", hash.TerminalString(), err)
+ return fmt.Errorf("invalid state node %s: %v", hash.TerminalString(), err)
}
- // If the node delivered a requested item, mark the delivery non-stale
if _, ok := req.tasks[hash]; ok {
delete(req.tasks, hash)
- stale = false
}
}
- // If we're inside the critical section, reset fail counter since we progressed.
- if progress && atomic.LoadUint32(&s.d.fsPivotFails) > 1 {
- log.Trace("Fast-sync progressed, resetting fail counter", "previous", atomic.LoadUint32(&s.d.fsPivotFails))
- atomic.StoreUint32(&s.d.fsPivotFails, 1) // Don't ever reset to 0, as that will unlock the pivot block
- }
-
// Put unfulfilled tasks back into the retry queue
npeers := s.d.peers.Len()
for hash, task := range req.tasks {
@@ -441,12 +433,12 @@ func (s *stateSync) process(req *stateReq) (bool, error) {
// If we've requested the node too many times already, it may be a malicious
// sync where nobody has the right data. Abort.
if len(task.attempts) >= npeers {
- return stale, fmt.Errorf("state node %s failed with all peers (%d tries, %d peers)", hash.TerminalString(), len(task.attempts), npeers)
+ return fmt.Errorf("state node %s failed with all peers (%d tries, %d peers)", hash.TerminalString(), len(task.attempts), npeers)
}
// Missing item, place into the retry queue.
s.tasks[hash] = task
}
- return stale, nil
+ return nil
}
// processNodeData tries to inject a trie node data blob delivered from a remote
@@ -475,4 +467,7 @@ func (s *stateSync) updateStats(written, duplicate, unexpected int, duration tim
if written > 0 || duplicate > 0 || unexpected > 0 {
log.Info("Imported new state entries", "count", written, "elapsed", common.PrettyDuration(duration), "processed", s.d.syncStatsState.processed, "pending", s.d.syncStatsState.pending, "retry", len(s.tasks), "duplicate", s.d.syncStatsState.duplicate, "unexpected", s.d.syncStatsState.unexpected)
}
+ if written > 0 {
+ core.WriteTrieSyncProgress(s.d.stateDB, s.d.syncStatsState.processed)
+ }
}
diff --git a/eth/fetcher/fetcher.go b/eth/fetcher/fetcher.go
index 50966f5ee..db554e144 100644
--- a/eth/fetcher/fetcher.go
+++ b/eth/fetcher/fetcher.go
@@ -633,7 +633,7 @@ func (f *Fetcher) enqueue(peer string, block *types.Block) {
}
// insert spawns a new goroutine to run a block insertion into the chain. If the
-// block's number is at the same height as the current import phase, if updates
+// block's number is at the same height as the current import phase, it updates
// the phase states accordingly.
func (f *Fetcher) insert(peer string, block *types.Block) {
hash := block.Hash()
diff --git a/eth/fetcher/metrics.go b/eth/fetcher/metrics.go
index 1ed8075bf..d68d12f00 100644
--- a/eth/fetcher/metrics.go
+++ b/eth/fetcher/metrics.go
@@ -23,21 +23,21 @@ import (
)
var (
- propAnnounceInMeter = metrics.NewMeter("eth/fetcher/prop/announces/in")
- propAnnounceOutTimer = metrics.NewTimer("eth/fetcher/prop/announces/out")
- propAnnounceDropMeter = metrics.NewMeter("eth/fetcher/prop/announces/drop")
- propAnnounceDOSMeter = metrics.NewMeter("eth/fetcher/prop/announces/dos")
+ propAnnounceInMeter = metrics.NewRegisteredMeter("eth/fetcher/prop/announces/in", nil)
+ propAnnounceOutTimer = metrics.NewRegisteredTimer("eth/fetcher/prop/announces/out", nil)
+ propAnnounceDropMeter = metrics.NewRegisteredMeter("eth/fetcher/prop/announces/drop", nil)
+ propAnnounceDOSMeter = metrics.NewRegisteredMeter("eth/fetcher/prop/announces/dos", nil)
- propBroadcastInMeter = metrics.NewMeter("eth/fetcher/prop/broadcasts/in")
- propBroadcastOutTimer = metrics.NewTimer("eth/fetcher/prop/broadcasts/out")
- propBroadcastDropMeter = metrics.NewMeter("eth/fetcher/prop/broadcasts/drop")
- propBroadcastDOSMeter = metrics.NewMeter("eth/fetcher/prop/broadcasts/dos")
+ propBroadcastInMeter = metrics.NewRegisteredMeter("eth/fetcher/prop/broadcasts/in", nil)
+ propBroadcastOutTimer = metrics.NewRegisteredTimer("eth/fetcher/prop/broadcasts/out", nil)
+ propBroadcastDropMeter = metrics.NewRegisteredMeter("eth/fetcher/prop/broadcasts/drop", nil)
+ propBroadcastDOSMeter = metrics.NewRegisteredMeter("eth/fetcher/prop/broadcasts/dos", nil)
- headerFetchMeter = metrics.NewMeter("eth/fetcher/fetch/headers")
- bodyFetchMeter = metrics.NewMeter("eth/fetcher/fetch/bodies")
+ headerFetchMeter = metrics.NewRegisteredMeter("eth/fetcher/fetch/headers", nil)
+ bodyFetchMeter = metrics.NewRegisteredMeter("eth/fetcher/fetch/bodies", nil)
- headerFilterInMeter = metrics.NewMeter("eth/fetcher/filter/headers/in")
- headerFilterOutMeter = metrics.NewMeter("eth/fetcher/filter/headers/out")
- bodyFilterInMeter = metrics.NewMeter("eth/fetcher/filter/bodies/in")
- bodyFilterOutMeter = metrics.NewMeter("eth/fetcher/filter/bodies/out")
+ headerFilterInMeter = metrics.NewRegisteredMeter("eth/fetcher/filter/headers/in", nil)
+ headerFilterOutMeter = metrics.NewRegisteredMeter("eth/fetcher/filter/headers/out", nil)
+ bodyFilterInMeter = metrics.NewRegisteredMeter("eth/fetcher/filter/bodies/in", nil)
+ bodyFilterOutMeter = metrics.NewRegisteredMeter("eth/fetcher/filter/bodies/out", nil)
)
diff --git a/eth/filters/api.go b/eth/filters/api.go
index 03c1d6afc..406c9442e 100644
--- a/eth/filters/api.go
+++ b/eth/filters/api.go
@@ -25,6 +25,7 @@ import (
"sync"
"time"
+ ethereum "github.com/ethereum/go-ethereum"
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/common/hexutil"
"github.com/ethereum/go-ethereum/core/types"
@@ -240,7 +241,7 @@ func (api *PublicFilterAPI) Logs(ctx context.Context, crit FilterCriteria) (*rpc
matchedLogs = make(chan []*types.Log)
)
- logsSub, err := api.events.SubscribeLogs(crit, matchedLogs)
+ logsSub, err := api.events.SubscribeLogs(ethereum.FilterQuery(crit), matchedLogs)
if err != nil {
return nil, err
}
@@ -267,6 +268,8 @@ func (api *PublicFilterAPI) Logs(ctx context.Context, crit FilterCriteria) (*rpc
}
// FilterCriteria represents a request to create a new filter.
+//
+// TODO(karalabe): Kill this in favor of ethereum.FilterQuery.
type FilterCriteria struct {
FromBlock *big.Int
ToBlock *big.Int
@@ -289,7 +292,7 @@ type FilterCriteria struct {
// https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_newfilter
func (api *PublicFilterAPI) NewFilter(crit FilterCriteria) (rpc.ID, error) {
logs := make(chan []*types.Log)
- logsSub, err := api.events.SubscribeLogs(crit, logs)
+ logsSub, err := api.events.SubscribeLogs(ethereum.FilterQuery(crit), logs)
if err != nil {
return rpc.ID(""), err
}
diff --git a/eth/filters/filter.go b/eth/filters/filter.go
index 43d7e2a81..5dfe60e77 100644
--- a/eth/filters/filter.go
+++ b/eth/filters/filter.go
@@ -34,6 +34,7 @@ type Backend interface {
EventMux() *event.TypeMux
HeaderByNumber(ctx context.Context, blockNr rpc.BlockNumber) (*types.Header, error)
GetReceipts(ctx context.Context, blockHash common.Hash) (types.Receipts, error)
+ GetLogs(ctx context.Context, blockHash common.Hash) ([][]*types.Log, error)
SubscribeTxPreEvent(chan<- core.TxPreEvent) event.Subscription
SubscribeChainEvent(ch chan<- core.ChainEvent) event.Subscription
@@ -201,16 +202,28 @@ func (f *Filter) unindexedLogs(ctx context.Context, end uint64) ([]*types.Log, e
// match the filter criteria. This function is called when the bloom filter signals a potential match.
func (f *Filter) checkMatches(ctx context.Context, header *types.Header) (logs []*types.Log, err error) {
// Get the logs of the block
- receipts, err := f.backend.GetReceipts(ctx, header.Hash())
+ logsList, err := f.backend.GetLogs(ctx, header.Hash())
if err != nil {
return nil, err
}
var unfiltered []*types.Log
- for _, receipt := range receipts {
- unfiltered = append(unfiltered, receipt.Logs...)
+ for _, logs := range logsList {
+ unfiltered = append(unfiltered, logs...)
}
logs = filterLogs(unfiltered, nil, nil, f.addresses, f.topics)
if len(logs) > 0 {
+ // We have matching logs, check if we need to resolve full logs via the light client
+ if logs[0].TxHash == (common.Hash{}) {
+ receipts, err := f.backend.GetReceipts(ctx, header.Hash())
+ if err != nil {
+ return nil, err
+ }
+ unfiltered = unfiltered[:0]
+ for _, receipt := range receipts {
+ unfiltered = append(unfiltered, receipt.Logs...)
+ }
+ logs = filterLogs(unfiltered, nil, nil, f.addresses, f.topics)
+ }
return logs, nil
}
return nil, nil
diff --git a/eth/filters/filter_system.go b/eth/filters/filter_system.go
index e08cedb27..f8097c7b9 100644
--- a/eth/filters/filter_system.go
+++ b/eth/filters/filter_system.go
@@ -25,6 +25,7 @@ import (
"sync"
"time"
+ ethereum "github.com/ethereum/go-ethereum"
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/core"
"github.com/ethereum/go-ethereum/core/types"
@@ -75,7 +76,7 @@ type subscription struct {
id rpc.ID
typ Type
created time.Time
- logsCrit FilterCriteria
+ logsCrit ethereum.FilterQuery
logs chan []*types.Log
hashes chan common.Hash
headers chan *types.Header
@@ -162,7 +163,7 @@ func (es *EventSystem) subscribe(sub *subscription) *Subscription {
// SubscribeLogs creates a subscription that will write all logs matching the
// given criteria to the given logs channel. Default value for the from and to
// block is "latest". If the fromBlock > toBlock an error is returned.
-func (es *EventSystem) SubscribeLogs(crit FilterCriteria, logs chan []*types.Log) (*Subscription, error) {
+func (es *EventSystem) SubscribeLogs(crit ethereum.FilterQuery, logs chan []*types.Log) (*Subscription, error) {
var from, to rpc.BlockNumber
if crit.FromBlock == nil {
from = rpc.LatestBlockNumber
@@ -200,7 +201,7 @@ func (es *EventSystem) SubscribeLogs(crit FilterCriteria, logs chan []*types.Log
// subscribeMinedPendingLogs creates a subscription that returned mined and
// pending logs that match the given criteria.
-func (es *EventSystem) subscribeMinedPendingLogs(crit FilterCriteria, logs chan []*types.Log) *Subscription {
+func (es *EventSystem) subscribeMinedPendingLogs(crit ethereum.FilterQuery, logs chan []*types.Log) *Subscription {
sub := &subscription{
id: rpc.NewID(),
typ: MinedAndPendingLogsSubscription,
@@ -217,7 +218,7 @@ func (es *EventSystem) subscribeMinedPendingLogs(crit FilterCriteria, logs chan
// subscribeLogs creates a subscription that will write all logs matching the
// given criteria to the given logs channel.
-func (es *EventSystem) subscribeLogs(crit FilterCriteria, logs chan []*types.Log) *Subscription {
+func (es *EventSystem) subscribeLogs(crit ethereum.FilterQuery, logs chan []*types.Log) *Subscription {
sub := &subscription{
id: rpc.NewID(),
typ: LogsSubscription,
@@ -234,7 +235,7 @@ func (es *EventSystem) subscribeLogs(crit FilterCriteria, logs chan []*types.Log
// subscribePendingLogs creates a subscription that writes transaction hashes for
// transactions that enter the transaction pool.
-func (es *EventSystem) subscribePendingLogs(crit FilterCriteria, logs chan []*types.Log) *Subscription {
+func (es *EventSystem) subscribePendingLogs(crit ethereum.FilterQuery, logs chan []*types.Log) *Subscription {
sub := &subscription{
id: rpc.NewID(),
typ: PendingLogsSubscription,
@@ -374,19 +375,35 @@ func (es *EventSystem) lightFilterLogs(header *types.Header, addresses []common.
// Get the logs of the block
ctx, cancel := context.WithTimeout(context.Background(), time.Second*5)
defer cancel()
- receipts, err := es.backend.GetReceipts(ctx, header.Hash())
+ logsList, err := es.backend.GetLogs(ctx, header.Hash())
if err != nil {
return nil
}
var unfiltered []*types.Log
- for _, receipt := range receipts {
- for _, log := range receipt.Logs {
+ for _, logs := range logsList {
+ for _, log := range logs {
logcopy := *log
logcopy.Removed = remove
unfiltered = append(unfiltered, &logcopy)
}
}
logs := filterLogs(unfiltered, nil, nil, addresses, topics)
+ if len(logs) > 0 && logs[0].TxHash == (common.Hash{}) {
+ // We have matching but non-derived logs
+ receipts, err := es.backend.GetReceipts(ctx, header.Hash())
+ if err != nil {
+ return nil
+ }
+ unfiltered = unfiltered[:0]
+ for _, receipt := range receipts {
+ for _, log := range receipt.Logs {
+ logcopy := *log
+ logcopy.Removed = remove
+ unfiltered = append(unfiltered, &logcopy)
+ }
+ }
+ logs = filterLogs(unfiltered, nil, nil, addresses, topics)
+ }
return logs
}
return nil
diff --git a/eth/filters/filter_system_test.go b/eth/filters/filter_system_test.go
index dd6d4433d..61761151a 100644
--- a/eth/filters/filter_system_test.go
+++ b/eth/filters/filter_system_test.go
@@ -25,6 +25,7 @@ import (
"testing"
"time"
+ ethereum "github.com/ethereum/go-ethereum"
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/consensus/ethash"
"github.com/ethereum/go-ethereum/core"
@@ -68,8 +69,19 @@ func (b *testBackend) HeaderByNumber(ctx context.Context, blockNr rpc.BlockNumbe
}
func (b *testBackend) GetReceipts(ctx context.Context, blockHash common.Hash) (types.Receipts, error) {
- num := core.GetBlockNumber(b.db, blockHash)
- return core.GetBlockReceipts(b.db, blockHash, num), nil
+ number := core.GetBlockNumber(b.db, blockHash)
+ return core.GetBlockReceipts(b.db, blockHash, number), nil
+}
+
+func (b *testBackend) GetLogs(ctx context.Context, blockHash common.Hash) ([][]*types.Log, error) {
+ number := core.GetBlockNumber(b.db, blockHash)
+ receipts := core.GetBlockReceipts(b.db, blockHash, number)
+
+ logs := make([][]*types.Log, len(receipts))
+ for i, receipt := range receipts {
+ logs[i] = receipt.Logs
+ }
+ return logs, nil
}
func (b *testBackend) SubscribeTxPreEvent(ch chan<- core.TxPreEvent) event.Subscription {
@@ -488,27 +500,27 @@ func TestPendingLogsSubscription(t *testing.T) {
}
testCases = []struct {
- crit FilterCriteria
+ crit ethereum.FilterQuery
expected []*types.Log
c chan []*types.Log
sub *Subscription
}{
// match all
- {FilterCriteria{}, convertLogs(allLogs), nil, nil},
+ {ethereum.FilterQuery{}, convertLogs(allLogs), nil, nil},
// match none due to no matching addresses
- {FilterCriteria{Addresses: []common.Address{{}, notUsedAddress}, Topics: [][]common.Hash{nil}}, []*types.Log{}, nil, nil},
+ {ethereum.FilterQuery{Addresses: []common.Address{{}, notUsedAddress}, Topics: [][]common.Hash{nil}}, []*types.Log{}, nil, nil},
// match logs based on addresses, ignore topics
- {FilterCriteria{Addresses: []common.Address{firstAddr}}, append(convertLogs(allLogs[:2]), allLogs[5].Logs[3]), nil, nil},
+ {ethereum.FilterQuery{Addresses: []common.Address{firstAddr}}, append(convertLogs(allLogs[:2]), allLogs[5].Logs[3]), nil, nil},
// match none due to no matching topics (match with address)
- {FilterCriteria{Addresses: []common.Address{secondAddr}, Topics: [][]common.Hash{{notUsedTopic}}}, []*types.Log{}, nil, nil},
+ {ethereum.FilterQuery{Addresses: []common.Address{secondAddr}, Topics: [][]common.Hash{{notUsedTopic}}}, []*types.Log{}, nil, nil},
// match logs based on addresses and topics
- {FilterCriteria{Addresses: []common.Address{thirdAddress}, Topics: [][]common.Hash{{firstTopic, secondTopic}}}, append(convertLogs(allLogs[3:5]), allLogs[5].Logs[0]), nil, nil},
+ {ethereum.FilterQuery{Addresses: []common.Address{thirdAddress}, Topics: [][]common.Hash{{firstTopic, secondTopic}}}, append(convertLogs(allLogs[3:5]), allLogs[5].Logs[0]), nil, nil},
// match logs based on multiple addresses and "or" topics
- {FilterCriteria{Addresses: []common.Address{secondAddr, thirdAddress}, Topics: [][]common.Hash{{firstTopic, secondTopic}}}, append(convertLogs(allLogs[2:5]), allLogs[5].Logs[0]), nil, nil},
+ {ethereum.FilterQuery{Addresses: []common.Address{secondAddr, thirdAddress}, Topics: [][]common.Hash{{firstTopic, secondTopic}}}, append(convertLogs(allLogs[2:5]), allLogs[5].Logs[0]), nil, nil},
// block numbers are ignored for filters created with New***Filter, these return all logs that match the given criteria when the state changes
- {FilterCriteria{Addresses: []common.Address{firstAddr}, FromBlock: big.NewInt(2), ToBlock: big.NewInt(3)}, append(convertLogs(allLogs[:2]), allLogs[5].Logs[3]), nil, nil},
+ {ethereum.FilterQuery{Addresses: []common.Address{firstAddr}, FromBlock: big.NewInt(2), ToBlock: big.NewInt(3)}, append(convertLogs(allLogs[:2]), allLogs[5].Logs[3]), nil, nil},
// multiple pending logs, should match only 2 topics from the logs in block 5
- {FilterCriteria{Addresses: []common.Address{thirdAddress}, Topics: [][]common.Hash{{firstTopic, fourthTopic}}}, []*types.Log{allLogs[5].Logs[0], allLogs[5].Logs[2]}, nil, nil},
+ {ethereum.FilterQuery{Addresses: []common.Address{thirdAddress}, Topics: [][]common.Hash{{firstTopic, fourthTopic}}}, []*types.Log{allLogs[5].Logs[0], allLogs[5].Logs[2]}, nil, nil},
}
)
diff --git a/eth/handler.go b/eth/handler.go
index 074cffd96..3fae0cd00 100644
--- a/eth/handler.go
+++ b/eth/handler.go
@@ -71,7 +71,6 @@ type ProtocolManager struct {
txpool txPool
blockchain *core.BlockChain
- chaindb ethdb.Database
chainconfig *params.ChainConfig
maxPeers int
@@ -106,7 +105,6 @@ func NewProtocolManager(config *params.ChainConfig, mode downloader.SyncMode, ne
eventMux: mux,
txpool: txpool,
blockchain: blockchain,
- chaindb: chaindb,
chainconfig: config,
peers: newPeerSet(),
newPeerCh: make(chan *peer),
@@ -251,14 +249,21 @@ func (pm *ProtocolManager) newPeer(pv int, p *p2p.Peer, rw p2p.MsgReadWriter) *p
// handle is the callback invoked to manage the life cycle of an eth peer. When
// this function terminates, the peer is disconnected.
func (pm *ProtocolManager) handle(p *peer) error {
- if pm.peers.Len() >= pm.maxPeers {
+ // Ignore maxPeers if this is a trusted peer
+ if pm.peers.Len() >= pm.maxPeers && !p.Peer.Info().Network.Trusted {
return p2p.DiscTooManyPeers
}
p.Log().Debug("Ethereum peer connected", "name", p.Name())
// Execute the Ethereum handshake
- td, head, genesis := pm.blockchain.Status()
- if err := p.Handshake(pm.networkId, td, head, genesis); err != nil {
+ var (
+ genesis = pm.blockchain.Genesis()
+ head = pm.blockchain.CurrentHeader()
+ hash = head.Hash()
+ number = head.Number.Uint64()
+ td = pm.blockchain.GetTd(hash, number)
+ )
+ if err := p.Handshake(pm.networkId, td, hash, genesis.Hash()); err != nil {
p.Log().Debug("Ethereum handshake failed", "err", err)
return err
}
@@ -532,7 +537,7 @@ func (pm *ProtocolManager) handleMsg(p *peer) error {
return errResp(ErrDecode, "msg %v: %v", msg, err)
}
// Retrieve the requested state entry, stopping if enough was found
- if entry, err := pm.chaindb.Get(hash.Bytes()); err == nil {
+ if entry, err := pm.blockchain.TrieNode(hash); err == nil {
data = append(data, entry)
bytes += len(entry)
}
@@ -570,7 +575,7 @@ func (pm *ProtocolManager) handleMsg(p *peer) error {
return errResp(ErrDecode, "msg %v: %v", msg, err)
}
// Retrieve the requested block's receipts, skipping if unknown to us
- results := core.GetBlockReceipts(pm.chaindb, hash, core.GetBlockNumber(pm.chaindb, hash))
+ results := pm.blockchain.GetReceiptsByHash(hash)
if results == nil {
if header := pm.blockchain.GetHeaderByHash(hash); header == nil || header.ReceiptHash != types.EmptyRootHash {
continue
diff --git a/eth/handler_test.go b/eth/handler_test.go
index 9a02eddfb..e336dfa28 100644
--- a/eth/handler_test.go
+++ b/eth/handler_test.go
@@ -56,7 +56,7 @@ func TestProtocolCompatibility(t *testing.T) {
for i, tt := range tests {
ProtocolVersions = []uint{tt.version}
- pm, err := newTestProtocolManager(tt.mode, 0, nil, nil)
+ pm, _, err := newTestProtocolManager(tt.mode, 0, nil, nil)
if pm != nil {
defer pm.Stop()
}
@@ -71,7 +71,7 @@ func TestGetBlockHeaders62(t *testing.T) { testGetBlockHeaders(t, 62) }
func TestGetBlockHeaders63(t *testing.T) { testGetBlockHeaders(t, 63) }
func testGetBlockHeaders(t *testing.T, protocol int) {
- pm := newTestProtocolManagerMust(t, downloader.FullSync, downloader.MaxHashFetch+15, nil, nil)
+ pm, _ := newTestProtocolManagerMust(t, downloader.FullSync, downloader.MaxHashFetch+15, nil, nil)
peer, _ := newTestPeer("peer", protocol, pm, true)
defer peer.close()
@@ -230,7 +230,7 @@ func TestGetBlockBodies62(t *testing.T) { testGetBlockBodies(t, 62) }
func TestGetBlockBodies63(t *testing.T) { testGetBlockBodies(t, 63) }
func testGetBlockBodies(t *testing.T, protocol int) {
- pm := newTestProtocolManagerMust(t, downloader.FullSync, downloader.MaxBlockFetch+15, nil, nil)
+ pm, _ := newTestProtocolManagerMust(t, downloader.FullSync, downloader.MaxBlockFetch+15, nil, nil)
peer, _ := newTestPeer("peer", protocol, pm, true)
defer peer.close()
@@ -337,13 +337,13 @@ func testGetNodeData(t *testing.T, protocol int) {
}
}
// Assemble the test environment
- pm := newTestProtocolManagerMust(t, downloader.FullSync, 4, generator, nil)
+ pm, db := newTestProtocolManagerMust(t, downloader.FullSync, 4, generator, nil)
peer, _ := newTestPeer("peer", protocol, pm, true)
defer peer.close()
// Fetch for now the entire chain db
hashes := []common.Hash{}
- for _, key := range pm.chaindb.(*ethdb.MemDatabase).Keys() {
+ for _, key := range db.Keys() {
if len(key) == len(common.Hash{}) {
hashes = append(hashes, common.BytesToHash(key))
}
@@ -429,7 +429,7 @@ func testGetReceipt(t *testing.T, protocol int) {
}
}
// Assemble the test environment
- pm := newTestProtocolManagerMust(t, downloader.FullSync, 4, generator, nil)
+ pm, _ := newTestProtocolManagerMust(t, downloader.FullSync, 4, generator, nil)
peer, _ := newTestPeer("peer", protocol, pm, true)
defer peer.close()
@@ -439,7 +439,7 @@ func testGetReceipt(t *testing.T, protocol int) {
block := pm.blockchain.GetBlockByNumber(i)
hashes = append(hashes, block.Hash())
- receipts = append(receipts, core.GetBlockReceipts(pm.chaindb, block.Hash(), block.NumberU64()))
+ receipts = append(receipts, pm.blockchain.GetReceiptsByHash(block.Hash()))
}
// Send the hash request and verify the response
p2p.Send(peer.app, 0x0f, hashes)
@@ -472,7 +472,7 @@ func testDAOChallenge(t *testing.T, localForked, remoteForked bool, timeout bool
config = &params.ChainConfig{DAOForkBlock: big.NewInt(1), DAOForkSupport: localForked}
gspec = &core.Genesis{Config: config}
genesis = gspec.MustCommit(db)
- blockchain, _ = core.NewBlockChain(db, config, pow, vm.Config{})
+ blockchain, _ = core.NewBlockChain(db, nil, config, pow, vm.Config{})
)
pm, err := NewProtocolManager(config, downloader.FullSync, DefaultConfig.NetworkId, evmux, new(testTxPool), pow, blockchain, db)
if err != nil {
diff --git a/eth/helper_test.go b/eth/helper_test.go
index d44574b86..2b05cea80 100644
--- a/eth/helper_test.go
+++ b/eth/helper_test.go
@@ -49,7 +49,7 @@ var (
// newTestProtocolManager creates a new protocol manager for testing purposes,
// with the given number of blocks already known, and potential notification
// channels for different events.
-func newTestProtocolManager(mode downloader.SyncMode, blocks int, generator func(int, *core.BlockGen), newtx chan<- []*types.Transaction) (*ProtocolManager, error) {
+func newTestProtocolManager(mode downloader.SyncMode, blocks int, generator func(int, *core.BlockGen), newtx chan<- []*types.Transaction) (*ProtocolManager, *ethdb.MemDatabase, error) {
var (
evmux = new(event.TypeMux)
engine = ethash.NewFaker()
@@ -59,7 +59,7 @@ func newTestProtocolManager(mode downloader.SyncMode, blocks int, generator func
Alloc: core.GenesisAlloc{testBank: {Balance: big.NewInt(1000000)}},
}
genesis = gspec.MustCommit(db)
- blockchain, _ = core.NewBlockChain(db, gspec.Config, engine, vm.Config{})
+ blockchain, _ = core.NewBlockChain(db, nil, gspec.Config, engine, vm.Config{})
)
chain, _ := core.GenerateChain(gspec.Config, genesis, ethash.NewFaker(), db, blocks, generator)
if _, err := blockchain.InsertChain(chain); err != nil {
@@ -68,22 +68,22 @@ func newTestProtocolManager(mode downloader.SyncMode, blocks int, generator func
pm, err := NewProtocolManager(gspec.Config, mode, DefaultConfig.NetworkId, evmux, &testTxPool{added: newtx}, engine, blockchain, db)
if err != nil {
- return nil, err
+ return nil, nil, err
}
pm.Start(1000)
- return pm, nil
+ return pm, db, nil
}
// newTestProtocolManagerMust creates a new protocol manager for testing purposes,
// with the given number of blocks already known, and potential notification
// channels for different events. In case of an error, the constructor force-
// fails the test.
-func newTestProtocolManagerMust(t *testing.T, mode downloader.SyncMode, blocks int, generator func(int, *core.BlockGen), newtx chan<- []*types.Transaction) *ProtocolManager {
- pm, err := newTestProtocolManager(mode, blocks, generator, newtx)
+func newTestProtocolManagerMust(t *testing.T, mode downloader.SyncMode, blocks int, generator func(int, *core.BlockGen), newtx chan<- []*types.Transaction) (*ProtocolManager, *ethdb.MemDatabase) {
+ pm, db, err := newTestProtocolManager(mode, blocks, generator, newtx)
if err != nil {
t.Fatalf("Failed to create protocol manager: %v", err)
}
- return pm
+ return pm, db
}
// testTxPool is a fake, helper transaction pool for testing purposes
@@ -166,8 +166,12 @@ func newTestPeer(name string, version int, pm *ProtocolManager, shake bool) (*te
tp := &testPeer{app: app, net: net, peer: peer}
// Execute any implicitly requested handshakes and return
if shake {
- td, head, genesis := pm.blockchain.Status()
- tp.handshake(nil, td, head, genesis)
+ var (
+ genesis = pm.blockchain.Genesis()
+ head = pm.blockchain.CurrentHeader()
+ td = pm.blockchain.GetTd(head.Hash(), head.Number.Uint64())
+ )
+ tp.handshake(nil, td, head.Hash(), genesis.Hash())
}
return tp, errc
}
diff --git a/eth/metrics.go b/eth/metrics.go
index 5fa2597d4..0533a2a87 100644
--- a/eth/metrics.go
+++ b/eth/metrics.go
@@ -22,38 +22,38 @@ import (
)
var (
- propTxnInPacketsMeter = metrics.NewMeter("eth/prop/txns/in/packets")
- propTxnInTrafficMeter = metrics.NewMeter("eth/prop/txns/in/traffic")
- propTxnOutPacketsMeter = metrics.NewMeter("eth/prop/txns/out/packets")
- propTxnOutTrafficMeter = metrics.NewMeter("eth/prop/txns/out/traffic")
- propHashInPacketsMeter = metrics.NewMeter("eth/prop/hashes/in/packets")
- propHashInTrafficMeter = metrics.NewMeter("eth/prop/hashes/in/traffic")
- propHashOutPacketsMeter = metrics.NewMeter("eth/prop/hashes/out/packets")
- propHashOutTrafficMeter = metrics.NewMeter("eth/prop/hashes/out/traffic")
- propBlockInPacketsMeter = metrics.NewMeter("eth/prop/blocks/in/packets")
- propBlockInTrafficMeter = metrics.NewMeter("eth/prop/blocks/in/traffic")
- propBlockOutPacketsMeter = metrics.NewMeter("eth/prop/blocks/out/packets")
- propBlockOutTrafficMeter = metrics.NewMeter("eth/prop/blocks/out/traffic")
- reqHeaderInPacketsMeter = metrics.NewMeter("eth/req/headers/in/packets")
- reqHeaderInTrafficMeter = metrics.NewMeter("eth/req/headers/in/traffic")
- reqHeaderOutPacketsMeter = metrics.NewMeter("eth/req/headers/out/packets")
- reqHeaderOutTrafficMeter = metrics.NewMeter("eth/req/headers/out/traffic")
- reqBodyInPacketsMeter = metrics.NewMeter("eth/req/bodies/in/packets")
- reqBodyInTrafficMeter = metrics.NewMeter("eth/req/bodies/in/traffic")
- reqBodyOutPacketsMeter = metrics.NewMeter("eth/req/bodies/out/packets")
- reqBodyOutTrafficMeter = metrics.NewMeter("eth/req/bodies/out/traffic")
- reqStateInPacketsMeter = metrics.NewMeter("eth/req/states/in/packets")
- reqStateInTrafficMeter = metrics.NewMeter("eth/req/states/in/traffic")
- reqStateOutPacketsMeter = metrics.NewMeter("eth/req/states/out/packets")
- reqStateOutTrafficMeter = metrics.NewMeter("eth/req/states/out/traffic")
- reqReceiptInPacketsMeter = metrics.NewMeter("eth/req/receipts/in/packets")
- reqReceiptInTrafficMeter = metrics.NewMeter("eth/req/receipts/in/traffic")
- reqReceiptOutPacketsMeter = metrics.NewMeter("eth/req/receipts/out/packets")
- reqReceiptOutTrafficMeter = metrics.NewMeter("eth/req/receipts/out/traffic")
- miscInPacketsMeter = metrics.NewMeter("eth/misc/in/packets")
- miscInTrafficMeter = metrics.NewMeter("eth/misc/in/traffic")
- miscOutPacketsMeter = metrics.NewMeter("eth/misc/out/packets")
- miscOutTrafficMeter = metrics.NewMeter("eth/misc/out/traffic")
+ propTxnInPacketsMeter = metrics.NewRegisteredMeter("eth/prop/txns/in/packets", nil)
+ propTxnInTrafficMeter = metrics.NewRegisteredMeter("eth/prop/txns/in/traffic", nil)
+ propTxnOutPacketsMeter = metrics.NewRegisteredMeter("eth/prop/txns/out/packets", nil)
+ propTxnOutTrafficMeter = metrics.NewRegisteredMeter("eth/prop/txns/out/traffic", nil)
+ propHashInPacketsMeter = metrics.NewRegisteredMeter("eth/prop/hashes/in/packets", nil)
+ propHashInTrafficMeter = metrics.NewRegisteredMeter("eth/prop/hashes/in/traffic", nil)
+ propHashOutPacketsMeter = metrics.NewRegisteredMeter("eth/prop/hashes/out/packets", nil)
+ propHashOutTrafficMeter = metrics.NewRegisteredMeter("eth/prop/hashes/out/traffic", nil)
+ propBlockInPacketsMeter = metrics.NewRegisteredMeter("eth/prop/blocks/in/packets", nil)
+ propBlockInTrafficMeter = metrics.NewRegisteredMeter("eth/prop/blocks/in/traffic", nil)
+ propBlockOutPacketsMeter = metrics.NewRegisteredMeter("eth/prop/blocks/out/packets", nil)
+ propBlockOutTrafficMeter = metrics.NewRegisteredMeter("eth/prop/blocks/out/traffic", nil)
+ reqHeaderInPacketsMeter = metrics.NewRegisteredMeter("eth/req/headers/in/packets", nil)
+ reqHeaderInTrafficMeter = metrics.NewRegisteredMeter("eth/req/headers/in/traffic", nil)
+ reqHeaderOutPacketsMeter = metrics.NewRegisteredMeter("eth/req/headers/out/packets", nil)
+ reqHeaderOutTrafficMeter = metrics.NewRegisteredMeter("eth/req/headers/out/traffic", nil)
+ reqBodyInPacketsMeter = metrics.NewRegisteredMeter("eth/req/bodies/in/packets", nil)
+ reqBodyInTrafficMeter = metrics.NewRegisteredMeter("eth/req/bodies/in/traffic", nil)
+ reqBodyOutPacketsMeter = metrics.NewRegisteredMeter("eth/req/bodies/out/packets", nil)
+ reqBodyOutTrafficMeter = metrics.NewRegisteredMeter("eth/req/bodies/out/traffic", nil)
+ reqStateInPacketsMeter = metrics.NewRegisteredMeter("eth/req/states/in/packets", nil)
+ reqStateInTrafficMeter = metrics.NewRegisteredMeter("eth/req/states/in/traffic", nil)
+ reqStateOutPacketsMeter = metrics.NewRegisteredMeter("eth/req/states/out/packets", nil)
+ reqStateOutTrafficMeter = metrics.NewRegisteredMeter("eth/req/states/out/traffic", nil)
+ reqReceiptInPacketsMeter = metrics.NewRegisteredMeter("eth/req/receipts/in/packets", nil)
+ reqReceiptInTrafficMeter = metrics.NewRegisteredMeter("eth/req/receipts/in/traffic", nil)
+ reqReceiptOutPacketsMeter = metrics.NewRegisteredMeter("eth/req/receipts/out/packets", nil)
+ reqReceiptOutTrafficMeter = metrics.NewRegisteredMeter("eth/req/receipts/out/traffic", nil)
+ miscInPacketsMeter = metrics.NewRegisteredMeter("eth/misc/in/packets", nil)
+ miscInTrafficMeter = metrics.NewRegisteredMeter("eth/misc/in/traffic", nil)
+ miscOutPacketsMeter = metrics.NewRegisteredMeter("eth/misc/out/packets", nil)
+ miscOutTrafficMeter = metrics.NewRegisteredMeter("eth/misc/out/traffic", nil)
)
// meteredMsgReadWriter is a wrapper around a p2p.MsgReadWriter, capable of
diff --git a/eth/protocol_test.go b/eth/protocol_test.go
index d3a44ae91..b2f93d8dd 100644
--- a/eth/protocol_test.go
+++ b/eth/protocol_test.go
@@ -41,8 +41,12 @@ func TestStatusMsgErrors62(t *testing.T) { testStatusMsgErrors(t, 62) }
func TestStatusMsgErrors63(t *testing.T) { testStatusMsgErrors(t, 63) }
func testStatusMsgErrors(t *testing.T, protocol int) {
- pm := newTestProtocolManagerMust(t, downloader.FullSync, 0, nil, nil)
- td, currentBlock, genesis := pm.blockchain.Status()
+ pm, _ := newTestProtocolManagerMust(t, downloader.FullSync, 0, nil, nil)
+ var (
+ genesis = pm.blockchain.Genesis()
+ head = pm.blockchain.CurrentHeader()
+ td = pm.blockchain.GetTd(head.Hash(), head.Number.Uint64())
+ )
defer pm.Stop()
tests := []struct {
@@ -55,16 +59,16 @@ func testStatusMsgErrors(t *testing.T, protocol int) {
wantError: errResp(ErrNoStatusMsg, "first msg has code 2 (!= 0)"),
},
{
- code: StatusMsg, data: statusData{10, DefaultConfig.NetworkId, td, currentBlock, genesis},
+ code: StatusMsg, data: statusData{10, DefaultConfig.NetworkId, td, head.Hash(), genesis.Hash()},
wantError: errResp(ErrProtocolVersionMismatch, "10 (!= %d)", protocol),
},
{
- code: StatusMsg, data: statusData{uint32(protocol), 999, td, currentBlock, genesis},
+ code: StatusMsg, data: statusData{uint32(protocol), 999, td, head.Hash(), genesis.Hash()},
wantError: errResp(ErrNetworkIdMismatch, "999 (!= 1)"),
},
{
- code: StatusMsg, data: statusData{uint32(protocol), DefaultConfig.NetworkId, td, currentBlock, common.Hash{3}},
- wantError: errResp(ErrGenesisBlockMismatch, "0300000000000000 (!= %x)", genesis[:8]),
+ code: StatusMsg, data: statusData{uint32(protocol), DefaultConfig.NetworkId, td, head.Hash(), common.Hash{3}},
+ wantError: errResp(ErrGenesisBlockMismatch, "0300000000000000 (!= %x)", genesis.Hash().Bytes()[:8]),
},
}
@@ -94,7 +98,7 @@ func TestRecvTransactions63(t *testing.T) { testRecvTransactions(t, 63) }
func testRecvTransactions(t *testing.T, protocol int) {
txAdded := make(chan []*types.Transaction)
- pm := newTestProtocolManagerMust(t, downloader.FullSync, 0, nil, txAdded)
+ pm, _ := newTestProtocolManagerMust(t, downloader.FullSync, 0, nil, txAdded)
pm.acceptTxs = 1 // mark synced to accept transactions
p, _ := newTestPeer("peer", protocol, pm, true)
defer pm.Stop()
@@ -121,7 +125,7 @@ func TestSendTransactions62(t *testing.T) { testSendTransactions(t, 62) }
func TestSendTransactions63(t *testing.T) { testSendTransactions(t, 63) }
func testSendTransactions(t *testing.T, protocol int) {
- pm := newTestProtocolManagerMust(t, downloader.FullSync, 0, nil, nil)
+ pm, _ := newTestProtocolManagerMust(t, downloader.FullSync, 0, nil, nil)
defer pm.Stop()
// Fill the pool with big transactions.
diff --git a/eth/sync.go b/eth/sync.go
index a8ae64617..e49e40087 100644
--- a/eth/sync.go
+++ b/eth/sync.go
@@ -188,19 +188,22 @@ func (pm *ProtocolManager) synchronise(peer *peer) {
atomic.StoreUint32(&pm.fastSync, 1)
mode = downloader.FastSync
}
- // Run the sync cycle, and disable fast sync if we've went past the pivot block
- err := pm.downloader.Synchronise(peer.id, pHead, pTd, mode)
- if atomic.LoadUint32(&pm.fastSync) == 1 {
- // Disable fast sync if we indeed have something in our chain
- if pm.blockchain.CurrentBlock().NumberU64() > 0 {
- log.Info("Fast sync complete, auto disabling")
- atomic.StoreUint32(&pm.fastSync, 0)
+ if mode == downloader.FastSync {
+ // Make sure the peer's total difficulty we are synchronizing is higher.
+ if pm.blockchain.GetTdByHash(pm.blockchain.CurrentFastBlock().Hash()).Cmp(pTd) >= 0 {
+ return
}
}
- if err != nil {
+
+ // Run the sync cycle, and disable fast sync if we've went past the pivot block
+ if err := pm.downloader.Synchronise(peer.id, pHead, pTd, mode); err != nil {
return
}
+ if atomic.LoadUint32(&pm.fastSync) == 1 {
+ log.Info("Fast sync complete, auto disabling")
+ atomic.StoreUint32(&pm.fastSync, 0)
+ }
atomic.StoreUint32(&pm.acceptTxs, 1) // Mark initial sync done
if head := pm.blockchain.CurrentBlock(); head.NumberU64() > 0 {
// We've completed a sync cycle, notify all peers of new state. This path is
diff --git a/eth/sync_test.go b/eth/sync_test.go
index 9eaa1156f..88c10c7f7 100644
--- a/eth/sync_test.go
+++ b/eth/sync_test.go
@@ -30,12 +30,12 @@ import (
// imported into the blockchain.
func TestFastSyncDisabling(t *testing.T) {
// Create a pristine protocol manager, check that fast sync is left enabled
- pmEmpty := newTestProtocolManagerMust(t, downloader.FastSync, 0, nil, nil)
+ pmEmpty, _ := newTestProtocolManagerMust(t, downloader.FastSync, 0, nil, nil)
if atomic.LoadUint32(&pmEmpty.fastSync) == 0 {
t.Fatalf("fast sync disabled on pristine blockchain")
}
// Create a full protocol manager, check that fast sync gets disabled
- pmFull := newTestProtocolManagerMust(t, downloader.FastSync, 1024, nil, nil)
+ pmFull, _ := newTestProtocolManagerMust(t, downloader.FastSync, 1024, nil, nil)
if atomic.LoadUint32(&pmFull.fastSync) == 1 {
t.Fatalf("fast sync not disabled on non-empty blockchain")
}
diff --git a/eth/tracers/tracer.go b/eth/tracers/tracer.go
index f3f848fc1..4cec9e633 100644
--- a/eth/tracers/tracer.go
+++ b/eth/tracers/tracer.go
@@ -1,4 +1,4 @@
-// Copyright 2016 The go-ethereum Authors
+// Copyright 2017 The go-ethereum Authors
// This file is part of the go-ethereum library.
//
// The go-ethereum library is free software: you can redistribute it and/or modify
diff --git a/eth/tracers/tracer_test.go b/eth/tracers/tracer_test.go
index 7224a1489..117c376b8 100644
--- a/eth/tracers/tracer_test.go
+++ b/eth/tracers/tracer_test.go
@@ -1,4 +1,4 @@
-// Copyright 2016 The go-ethereum Authors
+// Copyright 2017 The go-ethereum Authors
// This file is part of the go-ethereum library.
//
// The go-ethereum library is free software: you can redistribute it and/or modify
diff --git a/ethdb/database.go b/ethdb/database.go
index 93755dd7e..8c557e482 100644
--- a/ethdb/database.go
+++ b/ethdb/database.go
@@ -29,8 +29,6 @@ import (
"github.com/syndtr/goleveldb/leveldb/filter"
"github.com/syndtr/goleveldb/leveldb/iterator"
"github.com/syndtr/goleveldb/leveldb/opt"
-
- gometrics "github.com/rcrowley/go-metrics"
)
var OpenFileLimit = 64
@@ -39,15 +37,11 @@ type LDBDatabase struct {
fn string // filename for reporting
db *leveldb.DB // LevelDB instance
- getTimer gometrics.Timer // Timer for measuring the database get request counts and latencies
- putTimer gometrics.Timer // Timer for measuring the database put request counts and latencies
- delTimer gometrics.Timer // Timer for measuring the database delete request counts and latencies
- missMeter gometrics.Meter // Meter for measuring the missed database get requests
- readMeter gometrics.Meter // Meter for measuring the database get request data usage
- writeMeter gometrics.Meter // Meter for measuring the database put request data usage
- compTimeMeter gometrics.Meter // Meter for measuring the total time spent in database compaction
- compReadMeter gometrics.Meter // Meter for measuring the data read during compaction
- compWriteMeter gometrics.Meter // Meter for measuring the data written during compaction
+ compTimeMeter metrics.Meter // Meter for measuring the total time spent in database compaction
+ compReadMeter metrics.Meter // Meter for measuring the data read during compaction
+ compWriteMeter metrics.Meter // Meter for measuring the data written during compaction
+ diskReadMeter metrics.Meter // Meter for measuring the effective amount of data read
+ diskWriteMeter metrics.Meter // Meter for measuring the effective amount of data written
quitLock sync.Mutex // Mutex protecting the quit channel access
quitChan chan chan error // Quit channel to stop the metrics collection before closing the database
@@ -96,16 +90,9 @@ func (db *LDBDatabase) Path() string {
// Put puts the given key / value to the queue
func (db *LDBDatabase) Put(key []byte, value []byte) error {
- // Measure the database put latency, if requested
- if db.putTimer != nil {
- defer db.putTimer.UpdateSince(time.Now())
- }
// Generate the data to write to disk, update the meter and write
//value = rle.Compress(value)
- if db.writeMeter != nil {
- db.writeMeter.Mark(int64(len(value)))
- }
return db.db.Put(key, value, nil)
}
@@ -115,32 +102,17 @@ func (db *LDBDatabase) Has(key []byte) (bool, error) {
// Get returns the given key if it's present.
func (db *LDBDatabase) Get(key []byte) ([]byte, error) {
- // Measure the database get latency, if requested
- if db.getTimer != nil {
- defer db.getTimer.UpdateSince(time.Now())
- }
// Retrieve the key and increment the miss counter if not found
dat, err := db.db.Get(key, nil)
if err != nil {
- if db.missMeter != nil {
- db.missMeter.Mark(1)
- }
return nil, err
}
- // Otherwise update the actually retrieved amount of data
- if db.readMeter != nil {
- db.readMeter.Mark(int64(len(dat)))
- }
return dat, nil
//return rle.Decompress(dat)
}
// Delete deletes the key from the queue and database
func (db *LDBDatabase) Delete(key []byte) error {
- // Measure the database delete latency, if requested
- if db.delTimer != nil {
- defer db.delTimer.UpdateSince(time.Now())
- }
// Execute the actual operation
return db.db.Delete(key, nil)
}
@@ -180,15 +152,11 @@ func (db *LDBDatabase) Meter(prefix string) {
return
}
// Initialize all the metrics collector at the requested prefix
- db.getTimer = metrics.NewTimer(prefix + "user/gets")
- db.putTimer = metrics.NewTimer(prefix + "user/puts")
- db.delTimer = metrics.NewTimer(prefix + "user/dels")
- db.missMeter = metrics.NewMeter(prefix + "user/misses")
- db.readMeter = metrics.NewMeter(prefix + "user/reads")
- db.writeMeter = metrics.NewMeter(prefix + "user/writes")
- db.compTimeMeter = metrics.NewMeter(prefix + "compact/time")
- db.compReadMeter = metrics.NewMeter(prefix + "compact/input")
- db.compWriteMeter = metrics.NewMeter(prefix + "compact/output")
+ db.compTimeMeter = metrics.NewRegisteredMeter(prefix+"compact/time", nil)
+ db.compReadMeter = metrics.NewRegisteredMeter(prefix+"compact/input", nil)
+ db.compWriteMeter = metrics.NewRegisteredMeter(prefix+"compact/output", nil)
+ db.diskReadMeter = metrics.NewRegisteredMeter(prefix+"disk/read", nil)
+ db.diskWriteMeter = metrics.NewRegisteredMeter(prefix+"disk/write", nil)
// Create a quit channel for the periodic collector and run it
db.quitLock.Lock()
@@ -209,12 +177,17 @@ func (db *LDBDatabase) Meter(prefix string) {
// 1 | 85 | 109.27913 | 28.09293 | 213.92493 | 214.26294
// 2 | 523 | 1000.37159 | 7.26059 | 66.86342 | 66.77884
// 3 | 570 | 1113.18458 | 0.00000 | 0.00000 | 0.00000
+//
+// This is how the iostats look like (currently):
+// Read(MB):3895.04860 Write(MB):3654.64712
func (db *LDBDatabase) meter(refresh time.Duration) {
- // Create the counters to store current and previous values
- counters := make([][]float64, 2)
+ // Create the counters to store current and previous compaction values
+ compactions := make([][]float64, 2)
for i := 0; i < 2; i++ {
- counters[i] = make([]float64, 3)
+ compactions[i] = make([]float64, 3)
}
+ // Create storage for iostats.
+ var iostats [2]float64
// Iterate ad infinitum and collect the stats
for i := 1; ; i++ {
// Retrieve the database stats
@@ -235,8 +208,8 @@ func (db *LDBDatabase) meter(refresh time.Duration) {
lines = lines[3:]
// Iterate over all the table rows, and accumulate the entries
- for j := 0; j < len(counters[i%2]); j++ {
- counters[i%2][j] = 0
+ for j := 0; j < len(compactions[i%2]); j++ {
+ compactions[i%2][j] = 0
}
for _, line := range lines {
parts := strings.Split(line, "|")
@@ -249,19 +222,60 @@ func (db *LDBDatabase) meter(refresh time.Duration) {
db.log.Error("Compaction entry parsing failed", "err", err)
return
}
- counters[i%2][idx] += value
+ compactions[i%2][idx] += value
}
}
// Update all the requested meters
if db.compTimeMeter != nil {
- db.compTimeMeter.Mark(int64((counters[i%2][0] - counters[(i-1)%2][0]) * 1000 * 1000 * 1000))
+ db.compTimeMeter.Mark(int64((compactions[i%2][0] - compactions[(i-1)%2][0]) * 1000 * 1000 * 1000))
}
if db.compReadMeter != nil {
- db.compReadMeter.Mark(int64((counters[i%2][1] - counters[(i-1)%2][1]) * 1024 * 1024))
+ db.compReadMeter.Mark(int64((compactions[i%2][1] - compactions[(i-1)%2][1]) * 1024 * 1024))
}
if db.compWriteMeter != nil {
- db.compWriteMeter.Mark(int64((counters[i%2][2] - counters[(i-1)%2][2]) * 1024 * 1024))
+ db.compWriteMeter.Mark(int64((compactions[i%2][2] - compactions[(i-1)%2][2]) * 1024 * 1024))
+ }
+
+ // Retrieve the database iostats.
+ ioStats, err := db.db.GetProperty("leveldb.iostats")
+ if err != nil {
+ db.log.Error("Failed to read database iostats", "err", err)
+ return
+ }
+ parts := strings.Split(ioStats, " ")
+ if len(parts) < 2 {
+ db.log.Error("Bad syntax of ioStats", "ioStats", ioStats)
+ return
+ }
+ r := strings.Split(parts[0], ":")
+ if len(r) < 2 {
+ db.log.Error("Bad syntax of read entry", "entry", parts[0])
+ return
}
+ read, err := strconv.ParseFloat(r[1], 64)
+ if err != nil {
+ db.log.Error("Read entry parsing failed", "err", err)
+ return
+ }
+ w := strings.Split(parts[1], ":")
+ if len(w) < 2 {
+ db.log.Error("Bad syntax of write entry", "entry", parts[1])
+ return
+ }
+ write, err := strconv.ParseFloat(w[1], 64)
+ if err != nil {
+ db.log.Error("Write entry parsing failed", "err", err)
+ return
+ }
+ if db.diskReadMeter != nil {
+ db.diskReadMeter.Mark(int64((read - iostats[0]) * 1024 * 1024))
+ }
+ if db.diskWriteMeter != nil {
+ db.diskWriteMeter.Mark(int64((write - iostats[1]) * 1024 * 1024))
+ }
+ iostats[0] = read
+ iostats[1] = write
+
// Sleep a bit, then repeat the stats collection
select {
case errc := <-db.quitChan:
@@ -299,6 +313,11 @@ func (b *ldbBatch) ValueSize() int {
return b.size
}
+func (b *ldbBatch) Reset() {
+ b.b.Reset()
+ b.size = 0
+}
+
type table struct {
db Database
prefix string
@@ -358,3 +377,7 @@ func (tb *tableBatch) Write() error {
func (tb *tableBatch) ValueSize() int {
return tb.batch.ValueSize()
}
+
+func (tb *tableBatch) Reset() {
+ tb.batch.Reset()
+}
diff --git a/ethdb/interface.go b/ethdb/interface.go
index 99a5b770d..537312003 100644
--- a/ethdb/interface.go
+++ b/ethdb/interface.go
@@ -41,4 +41,6 @@ type Batch interface {
Putter
ValueSize() int // amount of data in the batch
Write() error
+ // Reset resets the batch for reuse
+ Reset()
}
diff --git a/ethdb/memory_database.go b/ethdb/memory_database.go
index 0dd93a279..8efd7bf84 100644
--- a/ethdb/memory_database.go
+++ b/ethdb/memory_database.go
@@ -123,3 +123,8 @@ func (b *memBatch) Write() error {
func (b *memBatch) ValueSize() int {
return b.size
}
+
+func (b *memBatch) Reset() {
+ b.writes = b.writes[:0]
+ b.size = 0
+}
diff --git a/internal/build/env.go b/internal/build/env.go
index c9848bf82..b553e0ed8 100644
--- a/internal/build/env.go
+++ b/internal/build/env.go
@@ -94,7 +94,7 @@ func LocalEnv() Environment {
}
if env.Branch == "" {
if head != "HEAD" {
- env.Branch = strings.TrimLeft(head, "refs/heads/")
+ env.Branch = strings.TrimPrefix(head, "refs/heads/")
}
}
if info, err := os.Stat(".git/objects"); err == nil && info.IsDir() && env.Tag == "" {
diff --git a/internal/cmdtest/test_cmd.go b/internal/cmdtest/test_cmd.go
index fae61cfe3..20e82ec2a 100644
--- a/internal/cmdtest/test_cmd.go
+++ b/internal/cmdtest/test_cmd.go
@@ -1,18 +1,18 @@
-// Copyright 2016 The go-ethereum Authors
-// This file is part of go-ethereum.
+// Copyright 2017 The go-ethereum Authors
+// This file is part of the go-ethereum library.
//
-// go-ethereum is free software: you can redistribute it and/or modify
-// it under the terms of the GNU General Public License as published by
+// The go-ethereum library is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
-// go-ethereum is distributed in the hope that it will be useful,
+// The go-ethereum library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU General Public License for more details.
+// GNU Lesser General Public License for more details.
//
-// You should have received a copy of the GNU General Public License
-// along with go-ethereum. If not, see <http://www.gnu.org/licenses/>.
+// You should have received a copy of the GNU Lesser General Public License
+// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
package cmdtest
diff --git a/internal/debug/api.go b/internal/debug/api.go
index 3547b0564..048b7d763 100644
--- a/internal/debug/api.go
+++ b/internal/debug/api.go
@@ -140,10 +140,9 @@ func (h *HandlerT) GoTrace(file string, nsec uint) error {
return nil
}
-// BlockProfile turns on CPU profiling for nsec seconds and writes
-// profile data to file. It uses a profile rate of 1 for most accurate
-// information. If a different rate is desired, set the rate
-// and write the profile manually.
+// BlockProfile turns on goroutine profiling for nsec seconds and writes profile data to
+// file. It uses a profile rate of 1 for most accurate information. If a different rate is
+// desired, set the rate and write the profile manually.
func (*HandlerT) BlockProfile(file string, nsec uint) error {
runtime.SetBlockProfileRate(1)
time.Sleep(time.Duration(nsec) * time.Second)
@@ -162,6 +161,26 @@ func (*HandlerT) WriteBlockProfile(file string) error {
return writeProfile("block", file)
}
+// MutexProfile turns on mutex profiling for nsec seconds and writes profile data to file.
+// It uses a profile rate of 1 for most accurate information. If a different rate is
+// desired, set the rate and write the profile manually.
+func (*HandlerT) MutexProfile(file string, nsec uint) error {
+ runtime.SetMutexProfileFraction(1)
+ time.Sleep(time.Duration(nsec) * time.Second)
+ defer runtime.SetMutexProfileFraction(0)
+ return writeProfile("mutex", file)
+}
+
+// SetMutexProfileFraction sets the rate of mutex profiling.
+func (*HandlerT) SetMutexProfileFraction(rate int) {
+ runtime.SetMutexProfileFraction(rate)
+}
+
+// WriteMutexProfile writes a goroutine blocking profile to the given file.
+func (*HandlerT) WriteMutexProfile(file string) error {
+ return writeProfile("mutex", file)
+}
+
// WriteMemProfile writes an allocation profile to the given file.
// Note that the profiling rate cannot be set through the API,
// it must be set on the command line.
diff --git a/internal/debug/flags.go b/internal/debug/flags.go
index 6247cc7dc..1f181bf8b 100644
--- a/internal/debug/flags.go
+++ b/internal/debug/flags.go
@@ -26,6 +26,8 @@ import (
"github.com/ethereum/go-ethereum/log"
"github.com/ethereum/go-ethereum/log/term"
+ "github.com/ethereum/go-ethereum/metrics"
+ "github.com/ethereum/go-ethereum/metrics/exp"
colorable "github.com/mattn/go-colorable"
"gopkg.in/urfave/cli.v1"
)
@@ -127,6 +129,10 @@ func Setup(ctx *cli.Context) error {
// pprof server
if ctx.GlobalBool(pprofFlag.Name) {
+ // Hook go-metrics into expvar on any /debug/metrics request, load all vars
+ // from the registry into expvar, and execute regular expvar handler.
+ exp.Exp(metrics.DefaultRegistry)
+
address := fmt.Sprintf("%s:%d", ctx.GlobalString(pprofAddrFlag.Name), ctx.GlobalInt(pprofPortFlag.Name))
go func() {
log.Info("Starting pprof server", "addr", fmt.Sprintf("http://%s/debug/pprof", address))
diff --git a/internal/ethapi/addrlock.go b/internal/ethapi/addrlock.go
index 5a9c948b8..61ddff688 100644
--- a/internal/ethapi/addrlock.go
+++ b/internal/ethapi/addrlock.go
@@ -1,4 +1,4 @@
-// Copyright 2015 The go-ethereum Authors
+// Copyright 2017 The go-ethereum Authors
// This file is part of the go-ethereum library.
//
// The go-ethereum library is free software: you can redistribute it and/or modify
diff --git a/internal/ethapi/api.go b/internal/ethapi/api.go
index 55bd5aa1b..6525aa212 100644
--- a/internal/ethapi/api.go
+++ b/internal/ethapi/api.go
@@ -333,28 +333,19 @@ func (s *PrivateAccountAPI) LockAccount(addr common.Address) bool {
return fetchKeystore(s.am).Lock(addr) == nil
}
-// SendTransaction will create a transaction from the given arguments and
-// tries to sign it with the key associated with args.To. If the given passwd isn't
-// able to decrypt the key it fails.
-func (s *PrivateAccountAPI) SendTransaction(ctx context.Context, args SendTxArgs, passwd string) (common.Hash, error) {
+// signTransactions sets defaults and signs the given transaction
+// NOTE: the caller needs to ensure that the nonceLock is held, if applicable,
+// and release it after the transaction has been submitted to the tx pool
+func (s *PrivateAccountAPI) signTransaction(ctx context.Context, args SendTxArgs, passwd string) (*types.Transaction, error) {
// Look up the wallet containing the requested signer
account := accounts.Account{Address: args.From}
-
wallet, err := s.am.Find(account)
if err != nil {
- return common.Hash{}, err
- }
-
- if args.Nonce == nil {
- // Hold the addresse's mutex around signing to prevent concurrent assignment of
- // the same nonce to multiple accounts.
- s.nonceLock.LockAddr(args.From)
- defer s.nonceLock.UnlockAddr(args.From)
+ return nil, err
}
-
// Set some sanity defaults and terminate on failure
if err := args.setDefaults(ctx, s.b); err != nil {
- return common.Hash{}, err
+ return nil, err
}
// Assemble the transaction and sign with the wallet
tx := args.toTransaction()
@@ -363,13 +354,53 @@ func (s *PrivateAccountAPI) SendTransaction(ctx context.Context, args SendTxArgs
if config := s.b.ChainConfig(); config.IsEIP155(s.b.CurrentBlock().Number()) {
chainID = config.ChainId
}
- signed, err := wallet.SignTxWithPassphrase(account, passwd, tx, chainID)
+ return wallet.SignTxWithPassphrase(account, passwd, tx, chainID)
+}
+
+// SendTransaction will create a transaction from the given arguments and
+// tries to sign it with the key associated with args.To. If the given passwd isn't
+// able to decrypt the key it fails.
+func (s *PrivateAccountAPI) SendTransaction(ctx context.Context, args SendTxArgs, passwd string) (common.Hash, error) {
+ if args.Nonce == nil {
+ // Hold the addresse's mutex around signing to prevent concurrent assignment of
+ // the same nonce to multiple accounts.
+ s.nonceLock.LockAddr(args.From)
+ defer s.nonceLock.UnlockAddr(args.From)
+ }
+ signed, err := s.signTransaction(ctx, args, passwd)
if err != nil {
return common.Hash{}, err
}
return submitTransaction(ctx, s.b, signed)
}
+// SignTransaction will create a transaction from the given arguments and
+// tries to sign it with the key associated with args.To. If the given passwd isn't
+// able to decrypt the key it fails. The transaction is returned in RLP-form, not broadcast
+// to other nodes
+func (s *PrivateAccountAPI) SignTransaction(ctx context.Context, args SendTxArgs, passwd string) (*SignTransactionResult, error) {
+ // No need to obtain the noncelock mutex, since we won't be sending this
+ // tx into the transaction pool, but right back to the user
+ if args.Gas == nil {
+ return nil, fmt.Errorf("gas not specified")
+ }
+ if args.GasPrice == nil {
+ return nil, fmt.Errorf("gasPrice not specified")
+ }
+ if args.Nonce == nil {
+ return nil, fmt.Errorf("nonce not specified")
+ }
+ signed, err := s.signTransaction(ctx, args, passwd)
+ if err != nil {
+ return nil, err
+ }
+ data, err := rlp.EncodeToBytes(signed)
+ if err != nil {
+ return nil, err
+ }
+ return &SignTransactionResult{data, signed}, nil
+}
+
// signHash is a helper function that calculates a hash for the given message that can be
// safely used to calculate a signature from.
//
@@ -580,7 +611,7 @@ type CallArgs struct {
Data hexutil.Bytes `json:"data"`
}
-func (s *PublicBlockChainAPI) doCall(ctx context.Context, args CallArgs, blockNr rpc.BlockNumber, vmCfg vm.Config) ([]byte, uint64, bool, error) {
+func (s *PublicBlockChainAPI) doCall(ctx context.Context, args CallArgs, blockNr rpc.BlockNumber, vmCfg vm.Config, timeout time.Duration) ([]byte, uint64, bool, error) {
defer func(start time.Time) { log.Debug("Executing EVM call finished", "runtime", time.Since(start)) }(time.Now())
state, header, err := s.b.StateAndHeaderByNumber(ctx, blockNr)
@@ -599,7 +630,7 @@ func (s *PublicBlockChainAPI) doCall(ctx context.Context, args CallArgs, blockNr
// Set default gas & gas price if none were set
gas, gasPrice := uint64(args.Gas), args.GasPrice.ToInt()
if gas == 0 {
- gas = 50000000
+ gas = math.MaxUint64 / 2
}
if gasPrice.Sign() == 0 {
gasPrice = new(big.Int).SetUint64(defaultGasPrice)
@@ -611,14 +642,14 @@ func (s *PublicBlockChainAPI) doCall(ctx context.Context, args CallArgs, blockNr
// Setup context so it may be cancelled the call has completed
// or, in case of unmetered gas, setup a context with a timeout.
var cancel context.CancelFunc
- if vmCfg.DisableGasMetering {
- ctx, cancel = context.WithTimeout(ctx, time.Second*5)
+ if timeout > 0 {
+ ctx, cancel = context.WithTimeout(ctx, timeout)
} else {
ctx, cancel = context.WithCancel(ctx)
}
// Make sure the context is cancelled when the call has completed
// this makes sure resources are cleaned up.
- defer func() { cancel() }()
+ defer cancel()
// Get a new instance of the EVM.
evm, vmError, err := s.b.GetEVM(ctx, msg, state, header, vmCfg)
@@ -645,7 +676,7 @@ func (s *PublicBlockChainAPI) doCall(ctx context.Context, args CallArgs, blockNr
// Call executes the given transaction on the state for the given block number.
// It doesn't make and changes in the state/blockchain and is useful to execute and retrieve values.
func (s *PublicBlockChainAPI) Call(ctx context.Context, args CallArgs, blockNr rpc.BlockNumber) (hexutil.Bytes, error) {
- result, _, _, err := s.doCall(ctx, args, blockNr, vm.Config{DisableGasMetering: true})
+ result, _, _, err := s.doCall(ctx, args, blockNr, vm.Config{}, 5*time.Second)
return (hexutil.Bytes)(result), err
}
@@ -674,7 +705,7 @@ func (s *PublicBlockChainAPI) EstimateGas(ctx context.Context, args CallArgs) (h
executable := func(gas uint64) bool {
args.Gas = hexutil.Uint64(gas)
- _, _, failed, err := s.doCall(ctx, args, rpc.PendingBlockNumber, vm.Config{})
+ _, _, failed, err := s.doCall(ctx, args, rpc.PendingBlockNumber, vm.Config{}, 0)
if err != nil || failed {
return false
}
@@ -777,7 +808,7 @@ func (s *PublicBlockChainAPI) rpcOutputBlock(b *types.Block, inclTx bool, fullTx
"difficulty": (*hexutil.Big)(head.Difficulty),
"totalDifficulty": (*hexutil.Big)(s.b.GetTd(b.Hash())),
"extraData": hexutil.Bytes(head.Extra),
- "size": hexutil.Uint64(uint64(b.Size().Int64())),
+ "size": hexutil.Uint64(b.Size()),
"gasLimit": hexutil.Uint64(head.GasLimit),
"gasUsed": hexutil.Uint64(head.GasUsed),
"timestamp": (*hexutil.Big)(head.Time),
@@ -1001,15 +1032,19 @@ func (s *PublicTransactionPoolAPI) GetRawTransactionByHash(ctx context.Context,
}
// GetTransactionReceipt returns the transaction receipt for the given transaction hash.
-func (s *PublicTransactionPoolAPI) GetTransactionReceipt(hash common.Hash) (map[string]interface{}, error) {
+func (s *PublicTransactionPoolAPI) GetTransactionReceipt(ctx context.Context, hash common.Hash) (map[string]interface{}, error) {
tx, blockHash, blockNumber, index := core.GetTransaction(s.b.ChainDb(), hash)
if tx == nil {
- return nil, errors.New("unknown transaction")
+ return nil, nil
}
- receipt, _, _, _ := core.GetReceipt(s.b.ChainDb(), hash) // Old receipts don't have the lookup data available
- if receipt == nil {
- return nil, errors.New("unknown receipt")
+ receipts, err := s.b.GetReceipts(ctx, blockHash)
+ if err != nil {
+ return nil, err
+ }
+ if len(receipts) <= int(index) {
+ return nil, nil
}
+ receipt := receipts[index]
var signer types.Signer = types.FrontierSigner{}
if tx.Protected() {
@@ -1104,6 +1139,18 @@ func (args *SendTxArgs) setDefaults(ctx context.Context, b Backend) error {
if args.Data != nil && args.Input != nil && !bytes.Equal(*args.Data, *args.Input) {
return errors.New(`Both "data" and "input" are set and not equal. Please use "input" to pass transaction call data.`)
}
+ if args.To == nil {
+ // Contract creation
+ var input []byte
+ if args.Data != nil {
+ input = *args.Data
+ } else if args.Input != nil {
+ input = *args.Input
+ }
+ if len(input) == 0 {
+ return errors.New(`contract creation without any data provided`)
+ }
+ }
return nil
}
@@ -1221,11 +1268,14 @@ type SignTransactionResult struct {
// The node needs to have the private key of the account corresponding with
// the given from address and it needs to be unlocked.
func (s *PublicTransactionPoolAPI) SignTransaction(ctx context.Context, args SendTxArgs) (*SignTransactionResult, error) {
+ if args.Gas == nil {
+ return nil, fmt.Errorf("gas not specified")
+ }
+ if args.GasPrice == nil {
+ return nil, fmt.Errorf("gasPrice not specified")
+ }
if args.Nonce == nil {
- // Hold the addresse's mutex around signing to prevent concurrent assignment of
- // the same nonce to multiple accounts.
- s.nonceLock.LockAddr(args.From)
- defer s.nonceLock.UnlockAddr(args.From)
+ return nil, fmt.Errorf("nonce not specified")
}
if err := args.setDefaults(ctx, s.b); err != nil {
return nil, err
@@ -1287,10 +1337,10 @@ func (s *PublicTransactionPoolAPI) Resend(ctx context.Context, sendArgs SendTxAr
if pFrom, err := types.Sender(signer, p); err == nil && pFrom == sendArgs.From && signer.Hash(p) == wantSigHash {
// Match. Re-sign and send the transaction.
- if gasPrice != nil {
+ if gasPrice != nil && (*big.Int)(gasPrice).Sign() != 0 {
sendArgs.GasPrice = gasPrice
}
- if gasLimit != nil {
+ if gasLimit != nil && *gasLimit != 0 {
sendArgs.Gas = gasLimit
}
signedTx, err := s.sign(sendArgs.From, sendArgs.toTransaction())
diff --git a/internal/web3ext/web3ext.go b/internal/web3ext/web3ext.go
index e11aa402f..9d6ce8c6c 100644
--- a/internal/web3ext/web3ext.go
+++ b/internal/web3ext/web3ext.go
@@ -308,6 +308,21 @@ web3._extend({
params: 1
}),
new web3._extend.Method({
+ name: 'mutexProfile',
+ call: 'debug_mutexProfile',
+ params: 2
+ }),
+ new web3._extend.Method({
+ name: 'setMutexProfileRate',
+ call: 'debug_setMutexProfileRate',
+ params: 1
+ }),
+ new web3._extend.Method({
+ name: 'writeMutexProfile',
+ call: 'debug_writeMutexProfile',
+ params: 1
+ }),
+ new web3._extend.Method({
name: 'writeMemProfile',
call: 'debug_writeMemProfile',
params: 1
@@ -517,6 +532,12 @@ web3._extend({
call: 'personal_deriveAccount',
params: 3
}),
+ new web3._extend.Method({
+ name: 'signTransaction',
+ call: 'personal_signTransaction',
+ params: 2,
+ inputFormatter: [web3._extend.formatters.inputTransactionFormatter, null]
+ }),
],
properties: [
new web3._extend.Property({
diff --git a/les/api_backend.go b/les/api_backend.go
index 56f617a7d..3fc5c33a4 100644
--- a/les/api_backend.go
+++ b/les/api_backend.go
@@ -87,6 +87,10 @@ func (b *LesApiBackend) GetReceipts(ctx context.Context, blockHash common.Hash)
return light.GetBlockReceipts(ctx, b.eth.odr, blockHash, core.GetBlockNumber(b.eth.chainDb, blockHash))
}
+func (b *LesApiBackend) GetLogs(ctx context.Context, blockHash common.Hash) ([][]*types.Log, error) {
+ return light.GetBlockLogs(ctx, b.eth.odr, blockHash, core.GetBlockNumber(b.eth.chainDb, blockHash))
+}
+
func (b *LesApiBackend) GetTd(blockHash common.Hash) *big.Int {
return b.eth.blockchain.GetTdByHash(blockHash)
}
diff --git a/les/backend.go b/les/backend.go
index 798e44e85..6a324cb04 100644
--- a/les/backend.go
+++ b/les/backend.go
@@ -46,6 +46,8 @@ import (
)
type LightEthereum struct {
+ config *eth.Config
+
odr *LesOdr
relay *LesTxRelay
chainConfig *params.ChainConfig
@@ -92,6 +94,7 @@ func New(ctx *node.ServiceContext, config *eth.Config) (*LightEthereum, error) {
quitSync := make(chan struct{})
leth := &LightEthereum{
+ config: config,
chainConfig: chainConfig,
chainDb: chainDb,
eventMux: ctx.EventMux,
@@ -224,7 +227,7 @@ func (s *LightEthereum) Start(srvr *p2p.Server) error {
// clients are searching for the first advertised protocol in the list
protocolVersion := AdvertiseProtocolVersions[0]
s.serverPool.start(srvr, lesTopic(s.blockchain.Genesis().Hash(), protocolVersion))
- s.protocolManager.Start()
+ s.protocolManager.Start(s.config.LightPeers)
return nil
}
diff --git a/les/fetcher.go b/les/fetcher.go
index 3fc4df30b..e12a2c78a 100644
--- a/les/fetcher.go
+++ b/les/fetcher.go
@@ -36,24 +36,26 @@ const (
maxNodeCount = 20 // maximum number of fetcherTreeNode entries remembered for each peer
)
-// lightFetcher
+// lightFetcher implements retrieval of newly announced headers. It also provides a peerHasBlock function for the
+// ODR system to ensure that we only request data related to a certain block from peers who have already processed
+// and announced that block.
type lightFetcher struct {
pm *ProtocolManager
odr *LesOdr
chain *light.LightChain
+ lock sync.Mutex // lock protects access to the fetcher's internal state variables except sent requests
maxConfirmedTd *big.Int
peers map[*peer]*fetcherPeerInfo
lastUpdateStats *updateStatsEntry
+ syncing bool
+ syncDone chan *peer
- lock sync.Mutex // qwerqwerqwe
- deliverChn chan fetchResponse
- reqMu sync.RWMutex
+ reqMu sync.RWMutex // reqMu protects access to sent header fetch requests
requested map[uint64]fetchRequest
+ deliverChn chan fetchResponse
timeoutChn chan uint64
requestChn chan bool // true if initiated from outside
- syncing bool
- syncDone chan *peer
}
// fetcherPeerInfo holds fetcher-specific information about each active peer
@@ -425,6 +427,9 @@ func (f *lightFetcher) nextRequest() (*distReq, uint64) {
},
canSend: func(dp distPeer) bool {
p := dp.(*peer)
+ f.lock.Lock()
+ defer f.lock.Unlock()
+
fp := f.peers[p]
return fp != nil && fp.nodeByHash[bestHash] != nil
},
@@ -557,8 +562,13 @@ func (f *lightFetcher) checkAnnouncedHeaders(fp *fetcherPeerInfo, headers []*typ
return true
}
// we ran out of recently delivered headers but have not reached a node known by this peer yet, continue matching
- td = f.chain.GetTd(header.ParentHash, header.Number.Uint64()-1)
- header = f.chain.GetHeader(header.ParentHash, header.Number.Uint64()-1)
+ hash, number := header.ParentHash, header.Number.Uint64()-1
+ td = f.chain.GetTd(hash, number)
+ header = f.chain.GetHeader(hash, number)
+ if header == nil || td == nil {
+ log.Error("Missing parent of validated header", "hash", hash, "number", number)
+ return false
+ }
} else {
header = headers[i]
td = tds[i]
@@ -642,13 +652,18 @@ func (f *lightFetcher) checkKnownNode(p *peer, n *fetcherTreeNode) bool {
if td == nil {
return false
}
+ header := f.chain.GetHeader(n.hash, n.number)
+ // check the availability of both header and td because reads are not protected by chain db mutex
+ // Note: returning false is always safe here
+ if header == nil {
+ return false
+ }
fp := f.peers[p]
if fp == nil {
p.Log().Debug("Unknown peer to check known nodes")
return false
}
- header := f.chain.GetHeader(n.hash, n.number)
if !f.checkAnnouncedHeaders(fp, []*types.Header{header}, []*big.Int{td}) {
p.Log().Debug("Inconsistent announcement")
go f.pm.removePeer(p.id)
diff --git a/les/handler.go b/les/handler.go
index d627c3e18..9627f392b 100644
--- a/les/handler.go
+++ b/les/handler.go
@@ -18,7 +18,6 @@
package les
import (
- "bytes"
"encoding/binary"
"errors"
"fmt"
@@ -77,13 +76,12 @@ type BlockChain interface {
GetHeader(hash common.Hash, number uint64) *types.Header
GetHeaderByHash(hash common.Hash) *types.Header
CurrentHeader() *types.Header
- GetTdByHash(hash common.Hash) *big.Int
+ GetTd(hash common.Hash, number uint64) *big.Int
+ State() (*state.StateDB, error)
InsertHeaderChain(chain []*types.Header, checkFreq int) (int, error)
Rollback(chain []common.Hash)
- Status() (td *big.Int, currentBlock common.Hash, genesisBlock common.Hash)
GetHeaderByNumber(number uint64) *types.Header
GetBlockHashesFromHash(hash common.Hash, max uint64) []common.Hash
- LastBlockHash() common.Hash
Genesis() *types.Block
SubscribeChainHeadEvent(ch chan<- core.ChainHeadEvent) event.Subscription
}
@@ -111,6 +109,7 @@ type ProtocolManager struct {
downloader *downloader.Downloader
fetcher *lightFetcher
peers *peerSet
+ maxPeers int
SubProtocols []p2p.Protocol
@@ -218,7 +217,9 @@ func (pm *ProtocolManager) removePeer(id string) {
pm.peers.Unregister(id)
}
-func (pm *ProtocolManager) Start() {
+func (pm *ProtocolManager) Start(maxPeers int) {
+ pm.maxPeers = maxPeers
+
if pm.lightSync {
go pm.syncer()
} else {
@@ -259,12 +260,22 @@ func (pm *ProtocolManager) newPeer(pv int, nv uint64, p *p2p.Peer, rw p2p.MsgRea
// handle is the callback invoked to manage the life cycle of a les peer. When
// this function terminates, the peer is disconnected.
func (pm *ProtocolManager) handle(p *peer) error {
+ // Ignore maxPeers if this is a trusted peer
+ if pm.peers.Len() >= pm.maxPeers && !p.Peer.Info().Network.Trusted {
+ return p2p.DiscTooManyPeers
+ }
+
p.Log().Debug("Light Ethereum peer connected", "name", p.Name())
// Execute the LES handshake
- td, head, genesis := pm.blockchain.Status()
- headNum := core.GetBlockNumber(pm.chainDb, head)
- if err := p.Handshake(td, head, headNum, genesis, pm.server); err != nil {
+ var (
+ genesis = pm.blockchain.Genesis()
+ head = pm.blockchain.CurrentHeader()
+ hash = head.Hash()
+ number = head.Number.Uint64()
+ td = pm.blockchain.GetTd(hash, number)
+ )
+ if err := p.Handshake(td, hash, number, genesis.Hash(), pm.server); err != nil {
p.Log().Debug("Light Ethereum handshake failed", "err", err)
return err
}
@@ -569,17 +580,19 @@ func (pm *ProtocolManager) handleMsg(p *peer) error {
for _, req := range req.Reqs {
// Retrieve the requested state entry, stopping if enough was found
if header := core.GetHeader(pm.chainDb, req.BHash, core.GetBlockNumber(pm.chainDb, req.BHash)); header != nil {
- if trie, _ := trie.New(header.Root, pm.chainDb); trie != nil {
- sdata := trie.Get(req.AccKey)
- var acc state.Account
- if err := rlp.DecodeBytes(sdata, &acc); err == nil {
- entry, _ := pm.chainDb.Get(acc.CodeHash)
- if bytes+len(entry) >= softResponseLimit {
- break
- }
- data = append(data, entry)
- bytes += len(entry)
- }
+ statedb, err := pm.blockchain.State()
+ if err != nil {
+ continue
+ }
+ account, err := pm.getAccount(statedb, header.Root, common.BytesToHash(req.AccKey))
+ if err != nil {
+ continue
+ }
+ code, _ := statedb.Database().TrieDB().Node(common.BytesToHash(account.CodeHash))
+
+ data = append(data, code)
+ if bytes += len(code); bytes >= softResponseLimit {
+ break
}
}
}
@@ -691,25 +704,29 @@ func (pm *ProtocolManager) handleMsg(p *peer) error {
return errResp(ErrRequestRejected, "")
}
for _, req := range req.Reqs {
- if bytes >= softResponseLimit {
- break
- }
// Retrieve the requested state entry, stopping if enough was found
if header := core.GetHeader(pm.chainDb, req.BHash, core.GetBlockNumber(pm.chainDb, req.BHash)); header != nil {
- if tr, _ := trie.New(header.Root, pm.chainDb); tr != nil {
- if len(req.AccKey) > 0 {
- sdata := tr.Get(req.AccKey)
- tr = nil
- var acc state.Account
- if err := rlp.DecodeBytes(sdata, &acc); err == nil {
- tr, _ = trie.New(acc.Root, pm.chainDb)
- }
+ statedb, err := pm.blockchain.State()
+ if err != nil {
+ continue
+ }
+ var trie state.Trie
+ if len(req.AccKey) > 0 {
+ account, err := pm.getAccount(statedb, header.Root, common.BytesToHash(req.AccKey))
+ if err != nil {
+ continue
}
- if tr != nil {
- var proof light.NodeList
- tr.Prove(req.Key, 0, &proof)
- proofs = append(proofs, proof)
- bytes += proof.DataSize()
+ trie, _ = statedb.Database().OpenStorageTrie(common.BytesToHash(req.AccKey), account.Root)
+ } else {
+ trie, _ = statedb.Database().OpenTrie(header.Root)
+ }
+ if trie != nil {
+ var proof light.NodeList
+ trie.Prove(req.Key, 0, &proof)
+
+ proofs = append(proofs, proof)
+ if bytes += proof.DataSize(); bytes >= softResponseLimit {
+ break
}
}
}
@@ -730,9 +747,9 @@ func (pm *ProtocolManager) handleMsg(p *peer) error {
}
// Gather state data until the fetch or network limits is reached
var (
- lastBHash common.Hash
- lastAccKey []byte
- tr, str *trie.Trie
+ lastBHash common.Hash
+ statedb *state.StateDB
+ root common.Hash
)
reqCnt := len(req.Reqs)
if reject(uint64(reqCnt), MaxProofsFetch) {
@@ -742,41 +759,41 @@ func (pm *ProtocolManager) handleMsg(p *peer) error {
nodes := light.NewNodeSet()
for _, req := range req.Reqs {
- if nodes.DataSize() >= softResponseLimit {
- break
- }
- if tr == nil || req.BHash != lastBHash {
+ // Look up the state belonging to the request
+ if statedb == nil || req.BHash != lastBHash {
+ statedb, root, lastBHash = nil, common.Hash{}, req.BHash
+
if header := core.GetHeader(pm.chainDb, req.BHash, core.GetBlockNumber(pm.chainDb, req.BHash)); header != nil {
- tr, _ = trie.New(header.Root, pm.chainDb)
- } else {
- tr = nil
+ statedb, _ = pm.blockchain.State()
+ root = header.Root
}
- lastBHash = req.BHash
- str = nil
}
- if tr != nil {
- if len(req.AccKey) > 0 {
- if str == nil || !bytes.Equal(req.AccKey, lastAccKey) {
- sdata := tr.Get(req.AccKey)
- str = nil
- var acc state.Account
- if err := rlp.DecodeBytes(sdata, &acc); err == nil {
- str, _ = trie.New(acc.Root, pm.chainDb)
- }
- lastAccKey = common.CopyBytes(req.AccKey)
- }
- if str != nil {
- str.Prove(req.Key, req.FromLevel, nodes)
- }
- } else {
- tr.Prove(req.Key, req.FromLevel, nodes)
+ if statedb == nil {
+ continue
+ }
+ // Pull the account or storage trie of the request
+ var trie state.Trie
+ if len(req.AccKey) > 0 {
+ account, err := pm.getAccount(statedb, root, common.BytesToHash(req.AccKey))
+ if err != nil {
+ continue
}
+ trie, _ = statedb.Database().OpenStorageTrie(common.BytesToHash(req.AccKey), account.Root)
+ } else {
+ trie, _ = statedb.Database().OpenTrie(root)
+ }
+ if trie == nil {
+ continue
+ }
+ // Prove the user's request from the account or stroage trie
+ trie.Prove(req.Key, req.FromLevel, nodes)
+ if nodes.DataSize() >= softResponseLimit {
+ break
}
}
- proofs := nodes.NodeList()
bv, rcost := p.fcClient.RequestProcessed(costs.baseCost + uint64(reqCnt)*costs.reqCost)
pm.server.fcCostStats.update(msg.Code, uint64(reqCnt), rcost)
- return p.SendProofsV2(req.ReqID, bv, proofs)
+ return p.SendProofsV2(req.ReqID, bv, nodes.NodeList())
case ProofsV1Msg:
if pm.odr == nil {
@@ -839,22 +856,24 @@ func (pm *ProtocolManager) handleMsg(p *peer) error {
if reject(uint64(reqCnt), MaxHelperTrieProofsFetch) {
return errResp(ErrRequestRejected, "")
}
- trieDb := ethdb.NewTable(pm.chainDb, light.ChtTablePrefix)
+ trieDb := trie.NewDatabase(ethdb.NewTable(pm.chainDb, light.ChtTablePrefix))
for _, req := range req.Reqs {
- if bytes >= softResponseLimit {
- break
- }
-
if header := pm.blockchain.GetHeaderByNumber(req.BlockNum); header != nil {
- sectionHead := core.GetCanonicalHash(pm.chainDb, req.ChtNum*light.ChtV1Frequency-1)
+ sectionHead := core.GetCanonicalHash(pm.chainDb, req.ChtNum*light.CHTFrequencyServer-1)
if root := light.GetChtRoot(pm.chainDb, req.ChtNum-1, sectionHead); root != (common.Hash{}) {
- if tr, _ := trie.New(root, trieDb); tr != nil {
- var encNumber [8]byte
- binary.BigEndian.PutUint64(encNumber[:], req.BlockNum)
- var proof light.NodeList
- tr.Prove(encNumber[:], 0, &proof)
- proofs = append(proofs, ChtResp{Header: header, Proof: proof})
- bytes += proof.DataSize() + estHeaderRlpSize
+ trie, err := trie.New(root, trieDb)
+ if err != nil {
+ continue
+ }
+ var encNumber [8]byte
+ binary.BigEndian.PutUint64(encNumber[:], req.BlockNum)
+
+ var proof light.NodeList
+ trie.Prove(encNumber[:], 0, &proof)
+
+ proofs = append(proofs, ChtResp{Header: header, Proof: proof})
+ if bytes += proof.DataSize() + estHeaderRlpSize; bytes >= softResponseLimit {
+ break
}
}
}
@@ -887,25 +906,17 @@ func (pm *ProtocolManager) handleMsg(p *peer) error {
lastIdx uint64
lastType uint
root common.Hash
- tr *trie.Trie
+ auxTrie *trie.Trie
)
-
nodes := light.NewNodeSet()
-
for _, req := range req.Reqs {
- if nodes.DataSize()+auxBytes >= softResponseLimit {
- break
- }
- if tr == nil || req.HelperTrieType != lastType || req.TrieIdx != lastIdx {
+ if auxTrie == nil || req.Type != lastType || req.TrieIdx != lastIdx {
+ auxTrie, lastType, lastIdx = nil, req.Type, req.TrieIdx
+
var prefix string
- root, prefix = pm.getHelperTrie(req.HelperTrieType, req.TrieIdx)
- if root != (common.Hash{}) {
- if t, err := trie.New(root, ethdb.NewTable(pm.chainDb, prefix)); err == nil {
- tr = t
- }
+ if root, prefix = pm.getHelperTrie(req.Type, req.TrieIdx); root != (common.Hash{}) {
+ auxTrie, _ = trie.New(root, trie.NewDatabase(ethdb.NewTable(pm.chainDb, prefix)))
}
- lastType = req.HelperTrieType
- lastIdx = req.TrieIdx
}
if req.AuxReq == auxRoot {
var data []byte
@@ -915,8 +926,8 @@ func (pm *ProtocolManager) handleMsg(p *peer) error {
auxData = append(auxData, data)
auxBytes += len(data)
} else {
- if tr != nil {
- tr.Prove(req.Key, req.FromLevel, nodes)
+ if auxTrie != nil {
+ auxTrie.Prove(req.Key, req.FromLevel, nodes)
}
if req.AuxReq != 0 {
data := pm.getHelperTrieAuxData(req)
@@ -924,11 +935,13 @@ func (pm *ProtocolManager) handleMsg(p *peer) error {
auxBytes += len(data)
}
}
+ if nodes.DataSize()+auxBytes >= softResponseLimit {
+ break
+ }
}
- proofs := nodes.NodeList()
bv, rcost := p.fcClient.RequestProcessed(costs.baseCost + uint64(reqCnt)*costs.reqCost)
pm.server.fcCostStats.update(msg.Code, uint64(reqCnt), rcost)
- return p.SendHelperTrieProofs(req.ReqID, bv, HelperTrieResps{Proofs: proofs, AuxData: auxData})
+ return p.SendHelperTrieProofs(req.ReqID, bv, HelperTrieResps{Proofs: nodes.NodeList(), AuxData: auxData})
case HeaderProofsMsg:
if pm.odr == nil {
@@ -1014,7 +1027,7 @@ func (pm *ProtocolManager) handleMsg(p *peer) error {
for i, stat := range stats {
if stat.Status == core.TxStatusUnknown {
if errs := pm.txpool.AddRemotes([]*types.Transaction{req.Txs[i]}); errs[0] != nil {
- stats[i].Error = errs[0]
+ stats[i].Error = errs[0].Error()
continue
}
stats[i] = pm.txStatus([]common.Hash{hashes[i]})[0]
@@ -1055,7 +1068,7 @@ func (pm *ProtocolManager) handleMsg(p *peer) error {
p.Log().Trace("Received tx status response")
var resp struct {
ReqID, BV uint64
- Status []core.TxStatus
+ Status []txStatus
}
if err := msg.Decode(&resp); err != nil {
return errResp(ErrDecode, "msg %v: %v", msg, err)
@@ -1080,11 +1093,28 @@ func (pm *ProtocolManager) handleMsg(p *peer) error {
return nil
}
+// getAccount retrieves an account from the state based at root.
+func (pm *ProtocolManager) getAccount(statedb *state.StateDB, root, hash common.Hash) (state.Account, error) {
+ trie, err := trie.New(root, statedb.Database().TrieDB())
+ if err != nil {
+ return state.Account{}, err
+ }
+ blob, err := trie.TryGet(hash[:])
+ if err != nil {
+ return state.Account{}, err
+ }
+ var account state.Account
+ if err = rlp.DecodeBytes(blob, &account); err != nil {
+ return state.Account{}, err
+ }
+ return account, nil
+}
+
// getHelperTrie returns the post-processed trie root for the given trie ID and section index
func (pm *ProtocolManager) getHelperTrie(id uint, idx uint64) (common.Hash, string) {
switch id {
case htCanonical:
- sectionHead := core.GetCanonicalHash(pm.chainDb, (idx+1)*light.ChtFrequency-1)
+ sectionHead := core.GetCanonicalHash(pm.chainDb, (idx+1)*light.CHTFrequencyClient-1)
return light.GetChtV2Root(pm.chainDb, idx, sectionHead), light.ChtTablePrefix
case htBloomBits:
sectionHead := core.GetCanonicalHash(pm.chainDb, (idx+1)*light.BloomTrieFrequency-1)
@@ -1095,10 +1125,8 @@ func (pm *ProtocolManager) getHelperTrie(id uint, idx uint64) (common.Hash, stri
// getHelperTrieAuxData returns requested auxiliary data for the given HelperTrie request
func (pm *ProtocolManager) getHelperTrieAuxData(req HelperTrieReq) []byte {
- if req.HelperTrieType == htCanonical && req.AuxReq == auxHeader {
- if len(req.Key) != 8 {
- return nil
- }
+ switch {
+ case req.Type == htCanonical && req.AuxReq == auxHeader && len(req.Key) == 8:
blockNum := binary.BigEndian.Uint64(req.Key)
hash := core.GetCanonicalHash(pm.chainDb, blockNum)
return core.GetHeaderRLP(pm.chainDb, hash, blockNum)
@@ -1135,12 +1163,15 @@ type NodeInfo struct {
// NodeInfo retrieves some protocol metadata about the running host node.
func (self *ProtocolManager) NodeInfo() *NodeInfo {
+ head := self.blockchain.CurrentHeader()
+ hash := head.Hash()
+
return &NodeInfo{
Network: self.networkId,
- Difficulty: self.blockchain.GetTdByHash(self.blockchain.LastBlockHash()),
+ Difficulty: self.blockchain.GetTd(hash, head.Number.Uint64()),
Genesis: self.blockchain.Genesis().Hash(),
Config: self.blockchain.Config(),
- Head: self.blockchain.LastBlockHash(),
+ Head: hash,
}
}
diff --git a/les/handler_test.go b/les/handler_test.go
index 7d67af26a..9468032f6 100644
--- a/les/handler_test.go
+++ b/les/handler_test.go
@@ -17,7 +17,7 @@
package les
import (
- "bytes"
+ "encoding/binary"
"math/big"
"math/rand"
"testing"
@@ -45,27 +45,8 @@ func expectResponse(r p2p.MsgReader, msgcode, reqID, bv uint64, data interface{}
return p2p.ExpectMsg(r, msgcode, resp{reqID, bv, data})
}
-func testCheckProof(t *testing.T, exp *light.NodeSet, got light.NodeList) {
- if exp.KeyCount() > len(got) {
- t.Errorf("proof has fewer nodes than expected")
- return
- }
- if exp.KeyCount() < len(got) {
- t.Errorf("proof has more nodes than expected")
- return
- }
- for _, node := range got {
- n, _ := exp.Get(crypto.Keccak256(node))
- if !bytes.Equal(n, node) {
- t.Errorf("proof contents mismatch")
- return
- }
- }
-}
-
// Tests that block headers can be retrieved from a remote chain based on user queries.
func TestGetBlockHeadersLes1(t *testing.T) { testGetBlockHeaders(t, 1) }
-
func TestGetBlockHeadersLes2(t *testing.T) { testGetBlockHeaders(t, 2) }
func testGetBlockHeaders(t *testing.T, protocol int) {
@@ -196,7 +177,6 @@ func testGetBlockHeaders(t *testing.T, protocol int) {
// Tests that block contents can be retrieved from a remote chain based on their hashes.
func TestGetBlockBodiesLes1(t *testing.T) { testGetBlockBodies(t, 1) }
-
func TestGetBlockBodiesLes2(t *testing.T) { testGetBlockBodies(t, 2) }
func testGetBlockBodies(t *testing.T, protocol int) {
@@ -274,7 +254,6 @@ func testGetBlockBodies(t *testing.T, protocol int) {
// Tests that the contract codes can be retrieved based on account addresses.
func TestGetCodeLes1(t *testing.T) { testGetCode(t, 1) }
-
func TestGetCodeLes2(t *testing.T) { testGetCode(t, 2) }
func testGetCode(t *testing.T, protocol int) {
@@ -309,7 +288,6 @@ func testGetCode(t *testing.T, protocol int) {
// Tests that the transaction receipts can be retrieved based on hashes.
func TestGetReceiptLes1(t *testing.T) { testGetReceipt(t, 1) }
-
func TestGetReceiptLes2(t *testing.T) { testGetReceipt(t, 2) }
func testGetReceipt(t *testing.T, protocol int) {
@@ -338,7 +316,6 @@ func testGetReceipt(t *testing.T, protocol int) {
// Tests that trie merkle proofs can be retrieved
func TestGetProofsLes1(t *testing.T) { testGetProofs(t, 1) }
-
func TestGetProofsLes2(t *testing.T) { testGetProofs(t, 2) }
func testGetProofs(t *testing.T, protocol int) {
@@ -359,7 +336,7 @@ func testGetProofs(t *testing.T, protocol int) {
for i := uint64(0); i <= bc.CurrentBlock().NumberU64(); i++ {
header := bc.GetHeaderByNumber(i)
root := header.Root
- trie, _ := trie.New(root, db)
+ trie, _ := trie.New(root, trie.NewDatabase(db))
for _, acc := range accounts {
req := ProofReq{
@@ -389,27 +366,126 @@ func testGetProofs(t *testing.T, protocol int) {
case 2:
cost := peer.GetRequestCost(GetProofsV2Msg, len(proofreqs))
sendRequest(peer.app, GetProofsV2Msg, 42, cost, proofreqs)
- msg, err := peer.app.ReadMsg()
- if err != nil {
- t.Errorf("Message read error: %v", err)
- }
- var resp struct {
- ReqID, BV uint64
- Data light.NodeList
- }
- if err := msg.Decode(&resp); err != nil {
- t.Errorf("reply decode error: %v", err)
+ if err := expectResponse(peer.app, ProofsV2Msg, 42, testBufLimit, proofsV2.NodeList()); err != nil {
+ t.Errorf("proofs mismatch: %v", err)
}
- if msg.Code != ProofsV2Msg {
- t.Errorf("Message code mismatch")
+ }
+}
+
+// Tests that CHT proofs can be correctly retrieved.
+func TestGetCHTProofsLes1(t *testing.T) { testGetCHTProofs(t, 1) }
+func TestGetCHTProofsLes2(t *testing.T) { testGetCHTProofs(t, 2) }
+
+func testGetCHTProofs(t *testing.T, protocol int) {
+ // Figure out the client's CHT frequency
+ frequency := uint64(light.CHTFrequencyClient)
+ if protocol == 1 {
+ frequency = uint64(light.CHTFrequencyServer)
+ }
+ // Assemble the test environment
+ db, _ := ethdb.NewMemDatabase()
+ pm := newTestProtocolManagerMust(t, false, int(frequency)+light.HelperTrieProcessConfirmations, testChainGen, nil, nil, db)
+ bc := pm.blockchain.(*core.BlockChain)
+ peer, _ := newTestPeer(t, "peer", protocol, pm, true)
+ defer peer.close()
+
+ // Wait a while for the CHT indexer to process the new headers
+ time.Sleep(100 * time.Millisecond * time.Duration(frequency/light.CHTFrequencyServer)) // Chain indexer throttling
+ time.Sleep(250 * time.Millisecond) // CI tester slack
+
+ // Assemble the proofs from the different protocols
+ header := bc.GetHeaderByNumber(frequency)
+ rlp, _ := rlp.EncodeToBytes(header)
+
+ key := make([]byte, 8)
+ binary.BigEndian.PutUint64(key, frequency)
+
+ proofsV1 := []ChtResp{{
+ Header: header,
+ }}
+ proofsV2 := HelperTrieResps{
+ AuxData: [][]byte{rlp},
+ }
+ switch protocol {
+ case 1:
+ root := light.GetChtRoot(db, 0, bc.GetHeaderByNumber(frequency-1).Hash())
+ trie, _ := trie.New(root, trie.NewDatabase(ethdb.NewTable(db, light.ChtTablePrefix)))
+
+ var proof light.NodeList
+ trie.Prove(key, 0, &proof)
+ proofsV1[0].Proof = proof
+
+ case 2:
+ root := light.GetChtV2Root(db, 0, bc.GetHeaderByNumber(frequency-1).Hash())
+ trie, _ := trie.New(root, trie.NewDatabase(ethdb.NewTable(db, light.ChtTablePrefix)))
+ trie.Prove(key, 0, &proofsV2.Proofs)
+ }
+ // Assemble the requests for the different protocols
+ requestsV1 := []ChtReq{{
+ ChtNum: 1,
+ BlockNum: frequency,
+ }}
+ requestsV2 := []HelperTrieReq{{
+ Type: htCanonical,
+ TrieIdx: 0,
+ Key: key,
+ AuxReq: auxHeader,
+ }}
+ // Send the proof request and verify the response
+ switch protocol {
+ case 1:
+ cost := peer.GetRequestCost(GetHeaderProofsMsg, len(requestsV1))
+ sendRequest(peer.app, GetHeaderProofsMsg, 42, cost, requestsV1)
+ if err := expectResponse(peer.app, HeaderProofsMsg, 42, testBufLimit, proofsV1); err != nil {
+ t.Errorf("proofs mismatch: %v", err)
}
- if resp.ReqID != 42 {
- t.Errorf("ReqID mismatch")
+ case 2:
+ cost := peer.GetRequestCost(GetHelperTrieProofsMsg, len(requestsV2))
+ sendRequest(peer.app, GetHelperTrieProofsMsg, 42, cost, requestsV2)
+ if err := expectResponse(peer.app, HelperTrieProofsMsg, 42, testBufLimit, proofsV2); err != nil {
+ t.Errorf("proofs mismatch: %v", err)
}
- if resp.BV != testBufLimit {
- t.Errorf("BV mismatch")
+ }
+}
+
+// Tests that bloombits proofs can be correctly retrieved.
+func TestGetBloombitsProofs(t *testing.T) {
+ // Assemble the test environment
+ db, _ := ethdb.NewMemDatabase()
+ pm := newTestProtocolManagerMust(t, false, light.BloomTrieFrequency+256, testChainGen, nil, nil, db)
+ bc := pm.blockchain.(*core.BlockChain)
+ peer, _ := newTestPeer(t, "peer", 2, pm, true)
+ defer peer.close()
+
+ // Wait a while for the bloombits indexer to process the new headers
+ time.Sleep(100 * time.Millisecond * time.Duration(light.BloomTrieFrequency/4096)) // Chain indexer throttling
+ time.Sleep(250 * time.Millisecond) // CI tester slack
+
+ // Request and verify each bit of the bloom bits proofs
+ for bit := 0; bit < 2048; bit++ {
+ // Assemble therequest and proofs for the bloombits
+ key := make([]byte, 10)
+
+ binary.BigEndian.PutUint16(key[:2], uint16(bit))
+ binary.BigEndian.PutUint64(key[2:], uint64(light.BloomTrieFrequency))
+
+ requests := []HelperTrieReq{{
+ Type: htBloomBits,
+ TrieIdx: 0,
+ Key: key,
+ }}
+ var proofs HelperTrieResps
+
+ root := light.GetBloomTrieRoot(db, 0, bc.GetHeaderByNumber(light.BloomTrieFrequency-1).Hash())
+ trie, _ := trie.New(root, trie.NewDatabase(ethdb.NewTable(db, light.BloomTrieTablePrefix)))
+ trie.Prove(key, 0, &proofs.Proofs)
+
+ // Send the proof request and verify the response
+ cost := peer.GetRequestCost(GetHelperTrieProofsMsg, len(requests))
+ sendRequest(peer.app, GetHelperTrieProofsMsg, 42, cost, requests)
+ if err := expectResponse(peer.app, HelperTrieProofsMsg, 42, testBufLimit, proofs); err != nil {
+ t.Errorf("bit %d: proofs mismatch: %v", bit, err)
}
- testCheckProof(t, proofsV2, resp.Data)
}
}
@@ -444,7 +520,7 @@ func TestTransactionStatusLes2(t *testing.T) {
// test error status by sending an underpriced transaction
tx0, _ := types.SignTx(types.NewTransaction(0, acc1Addr, big.NewInt(10000), params.TxGas, nil, nil), signer, testBankKey)
- test(tx0, true, txStatus{Status: core.TxStatusUnknown, Error: core.ErrUnderpriced})
+ test(tx0, true, txStatus{Status: core.TxStatusUnknown, Error: core.ErrUnderpriced.Error()})
tx1, _ := types.SignTx(types.NewTransaction(0, acc1Addr, big.NewInt(10000), params.TxGas, big.NewInt(100000000000), nil), signer, testBankKey)
test(tx1, false, txStatus{Status: core.TxStatusUnknown}) // query before sending, should be unknown
diff --git a/les/helper_test.go b/les/helper_test.go
index b881b41ce..6d997a1a3 100644
--- a/les/helper_test.go
+++ b/les/helper_test.go
@@ -31,6 +31,7 @@ import (
"github.com/ethereum/go-ethereum/core/types"
"github.com/ethereum/go-ethereum/core/vm"
"github.com/ethereum/go-ethereum/crypto"
+ "github.com/ethereum/go-ethereum/eth"
"github.com/ethereum/go-ethereum/ethdb"
"github.com/ethereum/go-ethereum/event"
"github.com/ethereum/go-ethereum/les/flowcontrol"
@@ -55,6 +56,9 @@ var (
testContractCodeDeployed = testContractCode[16:]
testContractDeployed = uint64(2)
+ testEventEmitterCode = common.Hex2Bytes("60606040523415600e57600080fd5b7f57050ab73f6b9ebdd9f76b8d4997793f48cf956e965ee070551b9ca0bb71584e60405160405180910390a160358060476000396000f3006060604052600080fd00a165627a7a723058203f727efcad8b5811f8cb1fc2620ce5e8c63570d697aef968172de296ea3994140029")
+ testEventEmitterAddr common.Address
+
testBufLimit = uint64(100)
)
@@ -85,15 +89,19 @@ func testChainGen(i int, block *core.BlockGen) {
// In block 2, the test bank sends some more ether to account #1.
// acc1Addr passes it on to account #2.
// acc1Addr creates a test contract.
- tx1, _ := types.SignTx(types.NewTransaction(block.TxNonce(testBankAddress), acc1Addr, big.NewInt(1000), params.TxGas, nil, nil), signer, testBankKey)
+ // acc1Addr creates a test event.
nonce := block.TxNonce(acc1Addr)
+
+ tx1, _ := types.SignTx(types.NewTransaction(block.TxNonce(testBankAddress), acc1Addr, big.NewInt(1000), params.TxGas, nil, nil), signer, testBankKey)
tx2, _ := types.SignTx(types.NewTransaction(nonce, acc2Addr, big.NewInt(1000), params.TxGas, nil, nil), signer, acc1Key)
- nonce++
- tx3, _ := types.SignTx(types.NewContractCreation(nonce, big.NewInt(0), 200000, big.NewInt(0), testContractCode), signer, acc1Key)
- testContractAddr = crypto.CreateAddress(acc1Addr, nonce)
+ tx3, _ := types.SignTx(types.NewContractCreation(nonce+1, big.NewInt(0), 200000, big.NewInt(0), testContractCode), signer, acc1Key)
+ testContractAddr = crypto.CreateAddress(acc1Addr, nonce+1)
+ tx4, _ := types.SignTx(types.NewContractCreation(nonce+2, big.NewInt(0), 200000, big.NewInt(0), testEventEmitterCode), signer, acc1Key)
+ testEventEmitterAddr = crypto.CreateAddress(acc1Addr, nonce+2)
block.AddTx(tx1)
block.AddTx(tx2)
block.AddTx(tx3)
+ block.AddTx(tx4)
case 2:
// Block 3 is empty but was mined by account #2.
block.SetCoinbase(acc2Addr)
@@ -146,7 +154,17 @@ func newTestProtocolManager(lightSync bool, blocks int, generator func(int, *cor
if lightSync {
chain, _ = light.NewLightChain(odr, gspec.Config, engine)
} else {
- blockchain, _ := core.NewBlockChain(db, gspec.Config, engine, vm.Config{})
+ blockchain, _ := core.NewBlockChain(db, nil, gspec.Config, engine, vm.Config{})
+
+ chtIndexer := light.NewChtIndexer(db, false)
+ chtIndexer.Start(blockchain)
+
+ bbtIndexer := light.NewBloomTrieIndexer(db, false)
+
+ bloomIndexer := eth.NewBloomIndexer(db, params.BloomBitsBlocks)
+ bloomIndexer.AddChildIndexer(bbtIndexer)
+ bloomIndexer.Start(blockchain)
+
gchain, _ := core.GenerateChain(gspec.Config, genesis, ethash.NewFaker(), db, blocks, generator)
if _, err := blockchain.InsertChain(gchain); err != nil {
panic(err)
@@ -176,7 +194,7 @@ func newTestProtocolManager(lightSync bool, blocks int, generator func(int, *cor
srv.fcManager = flowcontrol.NewClientManager(50, 10, 1000000000)
srv.fcCostStats = newCostStats(nil)
}
- pm.Start()
+ pm.Start(1000)
return pm, nil
}
@@ -227,9 +245,12 @@ func newTestPeer(t *testing.T, name string, version int, pm *ProtocolManager, sh
}
// Execute any implicitly requested handshakes and return
if shake {
- td, head, genesis := pm.blockchain.Status()
- headNum := pm.blockchain.CurrentHeader().Number.Uint64()
- tp.handshake(t, td, head, headNum, genesis)
+ var (
+ genesis = pm.blockchain.Genesis()
+ head = pm.blockchain.CurrentHeader()
+ td = pm.blockchain.GetTd(head.Hash(), head.Number.Uint64())
+ )
+ tp.handshake(t, td, head.Hash(), head.Number.Uint64(), genesis.Hash())
}
return tp, errc
}
diff --git a/les/metrics.go b/les/metrics.go
index 0162a1d1a..c282a62a1 100644
--- a/les/metrics.go
+++ b/les/metrics.go
@@ -58,10 +58,10 @@ var (
reqReceiptInTrafficMeter = metrics.NewMeter("eth/req/receipts/in/traffic")
reqReceiptOutPacketsMeter = metrics.NewMeter("eth/req/receipts/out/packets")
reqReceiptOutTrafficMeter = metrics.NewMeter("eth/req/receipts/out/traffic")*/
- miscInPacketsMeter = metrics.NewMeter("les/misc/in/packets")
- miscInTrafficMeter = metrics.NewMeter("les/misc/in/traffic")
- miscOutPacketsMeter = metrics.NewMeter("les/misc/out/packets")
- miscOutTrafficMeter = metrics.NewMeter("les/misc/out/traffic")
+ miscInPacketsMeter = metrics.NewRegisteredMeter("les/misc/in/packets", nil)
+ miscInTrafficMeter = metrics.NewRegisteredMeter("les/misc/in/traffic", nil)
+ miscOutPacketsMeter = metrics.NewRegisteredMeter("les/misc/out/packets", nil)
+ miscOutTrafficMeter = metrics.NewRegisteredMeter("les/misc/out/traffic", nil)
)
// meteredMsgReadWriter is a wrapper around a p2p.MsgReadWriter, capable of
diff --git a/les/odr_requests.go b/les/odr_requests.go
index 937a4f1d9..34d759dd2 100644
--- a/les/odr_requests.go
+++ b/les/odr_requests.go
@@ -321,7 +321,7 @@ const (
)
type HelperTrieReq struct {
- HelperTrieType uint
+ Type uint
TrieIdx uint64
Key []byte
FromLevel, AuxReq uint
@@ -365,7 +365,7 @@ func (r *ChtRequest) CanSend(peer *peer) bool {
peer.lock.RLock()
defer peer.lock.RUnlock()
- return peer.headInfo.Number >= light.HelperTrieConfirmations && r.ChtNum <= (peer.headInfo.Number-light.HelperTrieConfirmations)/light.ChtFrequency
+ return peer.headInfo.Number >= light.HelperTrieConfirmations && r.ChtNum <= (peer.headInfo.Number-light.HelperTrieConfirmations)/light.CHTFrequencyClient
}
// Request sends an ODR request to the LES network (implementation of LesOdrRequest)
@@ -374,10 +374,10 @@ func (r *ChtRequest) Request(reqID uint64, peer *peer) error {
var encNum [8]byte
binary.BigEndian.PutUint64(encNum[:], r.BlockNum)
req := HelperTrieReq{
- HelperTrieType: htCanonical,
- TrieIdx: r.ChtNum,
- Key: encNum[:],
- AuxReq: auxHeader,
+ Type: htCanonical,
+ TrieIdx: r.ChtNum,
+ Key: encNum[:],
+ AuxReq: auxHeader,
}
return peer.RequestHelperTrieProofs(reqID, r.GetCost(peer), []HelperTrieReq{req})
}
@@ -493,14 +493,14 @@ func (r *BloomRequest) Request(reqID uint64, peer *peer) error {
reqs := make([]HelperTrieReq, len(r.SectionIdxList))
var encNumber [10]byte
- binary.BigEndian.PutUint16(encNumber[0:2], uint16(r.BitIdx))
+ binary.BigEndian.PutUint16(encNumber[:2], uint16(r.BitIdx))
for i, sectionIdx := range r.SectionIdxList {
- binary.BigEndian.PutUint64(encNumber[2:10], sectionIdx)
+ binary.BigEndian.PutUint64(encNumber[2:], sectionIdx)
reqs[i] = HelperTrieReq{
- HelperTrieType: htBloomBits,
- TrieIdx: r.BloomTrieNum,
- Key: common.CopyBytes(encNumber[:]),
+ Type: htBloomBits,
+ TrieIdx: r.BloomTrieNum,
+ Key: common.CopyBytes(encNumber[:]),
}
}
return peer.RequestHelperTrieProofs(reqID, r.GetCost(peer), reqs)
@@ -525,10 +525,10 @@ func (r *BloomRequest) Validate(db ethdb.Database, msg *Msg) error {
// Verify the proofs
var encNumber [10]byte
- binary.BigEndian.PutUint16(encNumber[0:2], uint16(r.BitIdx))
+ binary.BigEndian.PutUint16(encNumber[:2], uint16(r.BitIdx))
for i, idx := range r.SectionIdxList {
- binary.BigEndian.PutUint64(encNumber[2:10], idx)
+ binary.BigEndian.PutUint64(encNumber[2:], idx)
value, err, _ := trie.VerifyProof(r.BloomTrieRoot, encNumber[:], reads)
if err != nil {
return err
diff --git a/les/odr_test.go b/les/odr_test.go
index cf609be88..88e121cda 100644
--- a/les/odr_test.go
+++ b/les/odr_test.go
@@ -101,7 +101,6 @@ func odrAccounts(ctx context.Context, db ethdb.Database, config *params.ChainCon
res = append(res, rlp...)
}
}
-
return res
}
diff --git a/les/peer.go b/les/peer.go
index b72c80d35..caf568077 100644
--- a/les/peer.go
+++ b/les/peer.go
@@ -281,7 +281,6 @@ func (p *peer) RequestProofs(reqID, cost uint64, reqs []ProofReq) error {
default:
panic(nil)
}
-
}
// RequestHelperTrieProofs fetches a batch of HelperTrie merkle proofs from a remote node.
@@ -291,12 +290,12 @@ func (p *peer) RequestHelperTrieProofs(reqID, cost uint64, reqs []HelperTrieReq)
case lpv1:
reqsV1 := make([]ChtReq, len(reqs))
for i, req := range reqs {
- if req.HelperTrieType != htCanonical || req.AuxReq != auxHeader || len(req.Key) != 8 {
+ if req.Type != htCanonical || req.AuxReq != auxHeader || len(req.Key) != 8 {
return fmt.Errorf("Request invalid in LES/1 mode")
}
blockNum := binary.BigEndian.Uint64(req.Key)
// convert HelperTrie request to old CHT request
- reqsV1[i] = ChtReq{ChtNum: (req.TrieIdx + 1) * (light.ChtFrequency / light.ChtV1Frequency), BlockNum: blockNum, FromLevel: req.FromLevel}
+ reqsV1[i] = ChtReq{ChtNum: (req.TrieIdx + 1) * (light.CHTFrequencyClient / light.CHTFrequencyServer), BlockNum: blockNum, FromLevel: req.FromLevel}
}
return sendRequest(p.rw, GetHeaderProofsMsg, reqID, cost, reqsV1)
case lpv2:
diff --git a/les/protocol.go b/les/protocol.go
index 6a7354d1c..e1c4625bc 100644
--- a/les/protocol.go
+++ b/les/protocol.go
@@ -224,6 +224,6 @@ type proofsData [][]rlp.RawValue
type txStatus struct {
Status core.TxStatus
- Lookup *core.TxLookupEntry
- Error error
+ Lookup *core.TxLookupEntry `rlp:"nil"`
+ Error string
}
diff --git a/les/retrieve.go b/les/retrieve.go
index dd15b56ac..e262a3cb4 100644
--- a/les/retrieve.go
+++ b/les/retrieve.go
@@ -1,4 +1,4 @@
-// Copyright 2016 The go-ethereum Authors
+// Copyright 2017 The go-ethereum Authors
// This file is part of the go-ethereum library.
//
// The go-ethereum library is free software: you can redistribute it and/or modify
diff --git a/les/server.go b/les/server.go
index ec2e44fec..28b87008a 100644
--- a/les/server.go
+++ b/les/server.go
@@ -20,7 +20,6 @@ package les
import (
"crypto/ecdsa"
"encoding/binary"
- "fmt"
"math"
"sync"
@@ -38,6 +37,7 @@ import (
)
type LesServer struct {
+ config *eth.Config
protocolManager *ProtocolManager
fcManager *flowcontrol.ClientManager // nil if our node is client only
fcCostStats *requestCostStats
@@ -62,6 +62,7 @@ func NewLesServer(eth *eth.Ethereum, config *eth.Config) (*LesServer, error) {
}
srv := &LesServer{
+ config: config,
protocolManager: pm,
quitSync: quitSync,
lesTopics: lesTopics,
@@ -71,23 +72,22 @@ func NewLesServer(eth *eth.Ethereum, config *eth.Config) (*LesServer, error) {
logger := log.New()
chtV1SectionCount, _, _ := srv.chtIndexer.Sections() // indexer still uses LES/1 4k section size for backwards server compatibility
- chtV2SectionCount := chtV1SectionCount / (light.ChtFrequency / light.ChtV1Frequency)
+ chtV2SectionCount := chtV1SectionCount / (light.CHTFrequencyClient / light.CHTFrequencyServer)
if chtV2SectionCount != 0 {
// convert to LES/2 section
chtLastSection := chtV2SectionCount - 1
// convert last LES/2 section index back to LES/1 index for chtIndexer.SectionHead
- chtLastSectionV1 := (chtLastSection+1)*(light.ChtFrequency/light.ChtV1Frequency) - 1
+ chtLastSectionV1 := (chtLastSection+1)*(light.CHTFrequencyClient/light.CHTFrequencyServer) - 1
chtSectionHead := srv.chtIndexer.SectionHead(chtLastSectionV1)
chtRoot := light.GetChtV2Root(pm.chainDb, chtLastSection, chtSectionHead)
- logger.Info("CHT", "section", chtLastSection, "sectionHead", fmt.Sprintf("%064x", chtSectionHead), "root", fmt.Sprintf("%064x", chtRoot))
+ logger.Info("Loaded CHT", "section", chtLastSection, "head", chtSectionHead, "root", chtRoot)
}
-
bloomTrieSectionCount, _, _ := srv.bloomTrieIndexer.Sections()
if bloomTrieSectionCount != 0 {
bloomTrieLastSection := bloomTrieSectionCount - 1
bloomTrieSectionHead := srv.bloomTrieIndexer.SectionHead(bloomTrieLastSection)
bloomTrieRoot := light.GetBloomTrieRoot(pm.chainDb, bloomTrieLastSection, bloomTrieSectionHead)
- logger.Info("BloomTrie", "section", bloomTrieLastSection, "sectionHead", fmt.Sprintf("%064x", bloomTrieSectionHead), "root", fmt.Sprintf("%064x", bloomTrieRoot))
+ logger.Info("Loaded bloom trie", "section", bloomTrieLastSection, "head", bloomTrieSectionHead, "root", bloomTrieRoot)
}
srv.chtIndexer.Start(eth.BlockChain())
@@ -108,16 +108,18 @@ func (s *LesServer) Protocols() []p2p.Protocol {
// Start starts the LES server
func (s *LesServer) Start(srvr *p2p.Server) {
- s.protocolManager.Start()
- for _, topic := range s.lesTopics {
- topic := topic
- go func() {
- logger := log.New("topic", topic)
- logger.Info("Starting topic registration")
- defer logger.Info("Terminated topic registration")
-
- srvr.DiscV5.RegisterTopic(topic, s.quitSync)
- }()
+ s.protocolManager.Start(s.config.LightPeers)
+ if srvr.DiscV5 != nil {
+ for _, topic := range s.lesTopics {
+ topic := topic
+ go func() {
+ logger := log.New("topic", topic)
+ logger.Info("Starting topic registration")
+ defer logger.Info("Terminated topic registration")
+
+ srvr.DiscV5.RegisterTopic(topic, s.quitSync)
+ }()
+ }
}
s.privateKey = srvr.PrivateKey
s.protocolManager.blockLoop()
diff --git a/light/lightchain.go b/light/lightchain.go
index 0d97ce1a2..2784615d3 100644
--- a/light/lightchain.go
+++ b/light/lightchain.go
@@ -18,6 +18,7 @@ package light
import (
"context"
+ "errors"
"math/big"
"sync"
"sync/atomic"
@@ -26,6 +27,7 @@ import (
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/consensus"
"github.com/ethereum/go-ethereum/core"
+ "github.com/ethereum/go-ethereum/core/state"
"github.com/ethereum/go-ethereum/core/types"
"github.com/ethereum/go-ethereum/ethdb"
"github.com/ethereum/go-ethereum/event"
@@ -98,7 +100,6 @@ func NewLightChain(odr OdrBackend, config *params.ChainConfig, engine consensus.
if cp, ok := trustedCheckpoints[bc.genesisBlock.Hash()]; ok {
bc.addTrustedCheckpoint(cp)
}
-
if err := bc.loadLastState(); err != nil {
return nil, err
}
@@ -126,7 +127,7 @@ func (self *LightChain) addTrustedCheckpoint(cp trustedCheckpoint) {
if self.odr.BloomIndexer() != nil {
self.odr.BloomIndexer().AddKnownSectionHead(cp.sectionIdx, cp.sectionHead)
}
- log.Info("Added trusted checkpoint", "chain name", cp.name)
+ log.Info("Added trusted checkpoint", "chain", cp.name, "block", (cp.sectionIdx+1)*CHTFrequencyClient-1, "hash", cp.sectionHead)
}
func (self *LightChain) getProcInterrupt() bool {
@@ -170,31 +171,9 @@ func (bc *LightChain) SetHead(head uint64) {
// GasLimit returns the gas limit of the current HEAD block.
func (self *LightChain) GasLimit() uint64 {
- self.mu.RLock()
- defer self.mu.RUnlock()
-
return self.hc.CurrentHeader().GasLimit
}
-// LastBlockHash return the hash of the HEAD block.
-func (self *LightChain) LastBlockHash() common.Hash {
- self.mu.RLock()
- defer self.mu.RUnlock()
-
- return self.hc.CurrentHeader().Hash()
-}
-
-// Status returns status information about the current chain such as the HEAD Td,
-// the HEAD hash and the hash of the genesis block.
-func (self *LightChain) Status() (td *big.Int, currentBlock common.Hash, genesisBlock common.Hash) {
- self.mu.RLock()
- defer self.mu.RUnlock()
-
- header := self.hc.CurrentHeader()
- hash := header.Hash()
- return self.GetTd(hash, header.Number.Uint64()), hash, self.genesisBlock.Hash()
-}
-
// Reset purges the entire blockchain, restoring it to its genesis state.
func (bc *LightChain) Reset() {
bc.ResetWithGenesisBlock(bc.genesisBlock)
@@ -231,6 +210,11 @@ func (bc *LightChain) Genesis() *types.Block {
return bc.genesisBlock
}
+// State returns a new mutable state based on the current HEAD block.
+func (bc *LightChain) State() (*state.StateDB, error) {
+ return nil, errors.New("not implemented, needs client/server interface split")
+}
+
// GetBody retrieves a block body (transactions and uncles) from the database
// or ODR service by hash, caching it if found.
func (self *LightChain) GetBody(ctx context.Context, hash common.Hash) (*types.Body, error) {
@@ -337,7 +321,7 @@ func (self *LightChain) postChainEvents(events []interface{}) {
for _, event := range events {
switch ev := event.(type) {
case core.ChainEvent:
- if self.LastBlockHash() == ev.Hash {
+ if self.CurrentHeader().Hash() == ev.Hash {
self.chainHeadFeed.Send(core.ChainHeadEvent{Block: ev.Block})
}
self.chainFeed.Send(ev)
@@ -400,9 +384,6 @@ func (self *LightChain) InsertHeaderChain(chain []*types.Header, checkFreq int)
// CurrentHeader retrieves the current head header of the canonical chain. The
// header is retrieved from the HeaderChain's internal cache.
func (self *LightChain) CurrentHeader() *types.Header {
- self.mu.RLock()
- defer self.mu.RUnlock()
-
return self.hc.CurrentHeader()
}
@@ -466,8 +447,8 @@ func (self *LightChain) SyncCht(ctx context.Context) bool {
}
headNum := self.CurrentHeader().Number.Uint64()
chtCount, _, _ := self.odr.ChtIndexer().Sections()
- if headNum+1 < chtCount*ChtFrequency {
- num := chtCount*ChtFrequency - 1
+ if headNum+1 < chtCount*CHTFrequencyClient {
+ num := chtCount*CHTFrequencyClient - 1
header, err := GetHeaderByNumber(ctx, self.odr, num)
if header != nil && err == nil {
self.mu.Lock()
diff --git a/light/nodeset.go b/light/nodeset.go
index c530a4fbe..6f25219c1 100644
--- a/light/nodeset.go
+++ b/light/nodeset.go
@@ -1,4 +1,4 @@
-// Copyright 2014 The go-ethereum Authors
+// Copyright 2017 The go-ethereum Authors
// This file is part of the go-ethereum library.
//
// The go-ethereum library is free software: you can redistribute it and/or modify
@@ -22,14 +22,16 @@ import (
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/crypto"
+ "github.com/ethereum/go-ethereum/ethdb"
"github.com/ethereum/go-ethereum/rlp"
- "github.com/ethereum/go-ethereum/trie"
)
// NodeSet stores a set of trie nodes. It implements trie.Database and can also
// act as a cache for another trie.Database.
type NodeSet struct {
- db map[string][]byte
+ nodes map[string][]byte
+ order []string
+
dataSize int
lock sync.RWMutex
}
@@ -37,7 +39,7 @@ type NodeSet struct {
// NewNodeSet creates an empty node set
func NewNodeSet() *NodeSet {
return &NodeSet{
- db: make(map[string][]byte),
+ nodes: make(map[string][]byte),
}
}
@@ -46,10 +48,15 @@ func (db *NodeSet) Put(key []byte, value []byte) error {
db.lock.Lock()
defer db.lock.Unlock()
- if _, ok := db.db[string(key)]; !ok {
- db.db[string(key)] = common.CopyBytes(value)
- db.dataSize += len(value)
+ if _, ok := db.nodes[string(key)]; ok {
+ return nil
}
+ keystr := string(key)
+
+ db.nodes[keystr] = common.CopyBytes(value)
+ db.order = append(db.order, keystr)
+ db.dataSize += len(value)
+
return nil
}
@@ -58,7 +65,7 @@ func (db *NodeSet) Get(key []byte) ([]byte, error) {
db.lock.RLock()
defer db.lock.RUnlock()
- if entry, ok := db.db[string(key)]; ok {
+ if entry, ok := db.nodes[string(key)]; ok {
return entry, nil
}
return nil, errors.New("not found")
@@ -75,7 +82,7 @@ func (db *NodeSet) KeyCount() int {
db.lock.RLock()
defer db.lock.RUnlock()
- return len(db.db)
+ return len(db.nodes)
}
// DataSize returns the aggregated data size of nodes in the set
@@ -92,27 +99,27 @@ func (db *NodeSet) NodeList() NodeList {
defer db.lock.RUnlock()
var values NodeList
- for _, value := range db.db {
- values = append(values, value)
+ for _, key := range db.order {
+ values = append(values, db.nodes[key])
}
return values
}
// Store writes the contents of the set to the given database
-func (db *NodeSet) Store(target trie.Database) {
+func (db *NodeSet) Store(target ethdb.Putter) {
db.lock.RLock()
defer db.lock.RUnlock()
- for key, value := range db.db {
+ for key, value := range db.nodes {
target.Put([]byte(key), value)
}
}
-// NodeList stores an ordered list of trie nodes. It implements trie.DatabaseWriter.
+// NodeList stores an ordered list of trie nodes. It implements ethdb.Putter.
type NodeList []rlp.RawValue
// Store writes the contents of the list to the given database
-func (n NodeList) Store(db trie.Database) {
+func (n NodeList) Store(db ethdb.Putter) {
for _, node := range n {
db.Put(crypto.Keccak256(node), node)
}
diff --git a/light/odr_test.go b/light/odr_test.go
index e3d07518a..d3f9374fd 100644
--- a/light/odr_test.go
+++ b/light/odr_test.go
@@ -74,7 +74,7 @@ func (odr *testOdr) Retrieve(ctx context.Context, req OdrRequest) error {
case *ReceiptsRequest:
req.Receipts = core.GetBlockReceipts(odr.sdb, req.Hash, core.GetBlockNumber(odr.sdb, req.Hash))
case *TrieRequest:
- t, _ := trie.New(req.Id.Root, odr.sdb)
+ t, _ := trie.New(req.Id.Root, trie.NewDatabase(odr.sdb))
nodes := NewNodeSet()
t.Prove(req.Key, 0, nodes)
req.Proof = nodes
@@ -239,7 +239,7 @@ func testChainOdr(t *testing.T, protocol int, fn odrTestFn) {
)
gspec.MustCommit(ldb)
// Assemble the test environment
- blockchain, _ := core.NewBlockChain(sdb, params.TestChainConfig, ethash.NewFullFaker(), vm.Config{})
+ blockchain, _ := core.NewBlockChain(sdb, nil, params.TestChainConfig, ethash.NewFullFaker(), vm.Config{})
gchain, _ := core.GenerateChain(params.TestChainConfig, genesis, ethash.NewFaker(), sdb, 4, testChainGen)
if _, err := blockchain.InsertChain(gchain); err != nil {
t.Fatal(err)
diff --git a/light/odr_util.go b/light/odr_util.go
index a0eb6303d..d56330e36 100644
--- a/light/odr_util.go
+++ b/light/odr_util.go
@@ -52,23 +52,20 @@ func GetHeaderByNumber(ctx context.Context, odr OdrBackend, number uint64) (*typ
for chtCount > 0 && canonicalHash != sectionHead && canonicalHash != (common.Hash{}) {
chtCount--
if chtCount > 0 {
- sectionHeadNum = chtCount*ChtFrequency - 1
+ sectionHeadNum = chtCount*CHTFrequencyClient - 1
sectionHead = odr.ChtIndexer().SectionHead(chtCount - 1)
canonicalHash = core.GetCanonicalHash(db, sectionHeadNum)
}
}
}
-
- if number >= chtCount*ChtFrequency {
+ if number >= chtCount*CHTFrequencyClient {
return nil, ErrNoTrustedCht
}
-
r := &ChtRequest{ChtRoot: GetChtRoot(db, chtCount-1, sectionHead), ChtNum: chtCount - 1, BlockNum: number}
if err := odr.Retrieve(ctx, r); err != nil {
return nil, err
- } else {
- return r.Header, nil
}
+ return r.Header, nil
}
func GetCanonicalHash(ctx context.Context, odr OdrBackend, number uint64) (common.Hash, error) {
@@ -129,15 +126,50 @@ func GetBlock(ctx context.Context, odr OdrBackend, hash common.Hash, number uint
// GetBlockReceipts retrieves the receipts generated by the transactions included
// in a block given by its hash.
func GetBlockReceipts(ctx context.Context, odr OdrBackend, hash common.Hash, number uint64) (types.Receipts, error) {
+ // Retrieve the potentially incomplete receipts from disk or network
receipts := core.GetBlockReceipts(odr.Database(), hash, number)
- if receipts != nil {
- return receipts, nil
+ if receipts == nil {
+ r := &ReceiptsRequest{Hash: hash, Number: number}
+ if err := odr.Retrieve(ctx, r); err != nil {
+ return nil, err
+ }
+ receipts = r.Receipts
}
- r := &ReceiptsRequest{Hash: hash, Number: number}
- if err := odr.Retrieve(ctx, r); err != nil {
- return nil, err
+ // If the receipts are incomplete, fill the derived fields
+ if len(receipts) > 0 && receipts[0].TxHash == (common.Hash{}) {
+ block, err := GetBlock(ctx, odr, hash, number)
+ if err != nil {
+ return nil, err
+ }
+ genesis := core.GetCanonicalHash(odr.Database(), 0)
+ config, _ := core.GetChainConfig(odr.Database(), genesis)
+
+ if err := core.SetReceiptsData(config, block, receipts); err != nil {
+ return nil, err
+ }
+ core.WriteBlockReceipts(odr.Database(), hash, number, receipts)
+ }
+ return receipts, nil
+}
+
+// GetBlockLogs retrieves the logs generated by the transactions included in a
+// block given by its hash.
+func GetBlockLogs(ctx context.Context, odr OdrBackend, hash common.Hash, number uint64) ([][]*types.Log, error) {
+ // Retrieve the potentially incomplete receipts from disk or network
+ receipts := core.GetBlockReceipts(odr.Database(), hash, number)
+ if receipts == nil {
+ r := &ReceiptsRequest{Hash: hash, Number: number}
+ if err := odr.Retrieve(ctx, r); err != nil {
+ return nil, err
+ }
+ receipts = r.Receipts
+ }
+ // Return the logs without deriving any computed fields on the receipts
+ logs := make([][]*types.Log, len(receipts))
+ for i, receipt := range receipts {
+ logs[i] = receipt.Logs
}
- return r.Receipts, nil
+ return logs, nil
}
// GetBloomBits retrieves a batch of compressed bloomBits vectors belonging to the given bit index and section indexes
diff --git a/light/postprocess.go b/light/postprocess.go
index 32dbc102b..384a635f7 100644
--- a/light/postprocess.go
+++ b/light/postprocess.go
@@ -1,4 +1,4 @@
-// Copyright 2016 The go-ethereum Authors
+// Copyright 2017 The go-ethereum Authors
// This file is part of the go-ethereum library.
//
// The go-ethereum library is free software: you can redistribute it and/or modify
@@ -19,7 +19,6 @@ package light
import (
"encoding/binary"
"errors"
- "fmt"
"math/big"
"time"
@@ -35,8 +34,14 @@ import (
)
const (
- ChtFrequency = 32768
- ChtV1Frequency = 4096 // as long as we want to retain LES/1 compatibility, servers generate CHTs with the old, higher frequency
+ // CHTFrequencyClient is the block frequency for creating CHTs on the client side.
+ CHTFrequencyClient = 32768
+
+ // CHTFrequencyServer is the block frequency for creating CHTs on the server side.
+ // Eventually this can be merged back with the client version, but that requires a
+ // full database upgrade, so that should be left for a suitable moment.
+ CHTFrequencyServer = 4096
+
HelperTrieConfirmations = 2048 // number of confirmations before a server is expected to have the given HelperTrie available
HelperTrieProcessConfirmations = 256 // number of confirmations before a HelperTrie is generated
)
@@ -52,19 +57,19 @@ type trustedCheckpoint struct {
var (
mainnetCheckpoint = trustedCheckpoint{
- name: "ETH mainnet",
- sectionIdx: 150,
- sectionHead: common.HexToHash("1e2e67f289565cbe7bd4367f7960dbd73a3f7c53439e1047cd7ba331c8109e39"),
- chtRoot: common.HexToHash("f2a6c9ca143d647b44523cc249f1072c8912358ab873a77a5fdc792b8df99e80"),
- bloomTrieRoot: common.HexToHash("c018952fa1513c97857e79fbb9a37acaf8432d5b85e52a78eca7dff5fd5900ee"),
+ name: "mainnet",
+ sectionIdx: 157,
+ sectionHead: common.HexToHash("1963c080887ca7f406c2bb114293eea83e54f783f94df24b447f7e3b6317c747"),
+ chtRoot: common.HexToHash("42abc436567dfb678a38fa6a9f881aa4c8a4cc8eaa2def08359292c3d0bd48ec"),
+ bloomTrieRoot: common.HexToHash("281c9f8fb3cb8b37ae45e9907ef8f3b19cd22c54e297c2d6c09c1db1593dce42"),
}
ropstenCheckpoint = trustedCheckpoint{
- name: "Ropsten testnet",
- sectionIdx: 75,
- sectionHead: common.HexToHash("12e68324f4578ea3e8e7fb3968167686729396c9279287fa1f1a8b51bb2d05b4"),
- chtRoot: common.HexToHash("3e51dc095c69fa654a4cac766e0afff7357515b4b3c3a379c675f810363e54be"),
- bloomTrieRoot: common.HexToHash("33e3a70b33c1d73aa698d496a80615e98ed31fa8f56969876180553b32333339"),
+ name: "ropsten",
+ sectionIdx: 83,
+ sectionHead: common.HexToHash("3ca623586bc0da35f1fc8d9b6b55950f3b1f69be9c6501846a2df672adb61236"),
+ chtRoot: common.HexToHash("8f08ec7783969768c6ef06e5fe3398223cbf4ae2907b676da7b6fe6c7f55b059"),
+ bloomTrieRoot: common.HexToHash("02d86d3c6a87f8f8a92c2a59bbba2132ff6f9f61b0915a5dc28a9d8279219fd0"),
}
)
@@ -100,7 +105,7 @@ func GetChtRoot(db ethdb.Database, sectionIdx uint64, sectionHead common.Hash) c
// GetChtV2Root reads the CHT root assoctiated to the given section from the database
// Note that sectionIdx is specified according to LES/2 CHT section size
func GetChtV2Root(db ethdb.Database, sectionIdx uint64, sectionHead common.Hash) common.Hash {
- return GetChtRoot(db, (sectionIdx+1)*(ChtFrequency/ChtV1Frequency)-1, sectionHead)
+ return GetChtRoot(db, (sectionIdx+1)*(CHTFrequencyClient/CHTFrequencyServer)-1, sectionHead)
}
// StoreChtRoot writes the CHT root assoctiated to the given section into the database
@@ -113,7 +118,8 @@ func StoreChtRoot(db ethdb.Database, sectionIdx uint64, sectionHead, root common
// ChtIndexerBackend implements core.ChainIndexerBackend
type ChtIndexerBackend struct {
- db, cdb ethdb.Database
+ diskdb ethdb.Database
+ triedb *trie.Database
section, sectionSize uint64
lastHash common.Hash
trie *trie.Trie
@@ -121,27 +127,31 @@ type ChtIndexerBackend struct {
// NewBloomTrieIndexer creates a BloomTrie chain indexer
func NewChtIndexer(db ethdb.Database, clientMode bool) *core.ChainIndexer {
- cdb := ethdb.NewTable(db, ChtTablePrefix)
- idb := ethdb.NewTable(db, "chtIndex-")
var sectionSize, confirmReq uint64
if clientMode {
- sectionSize = ChtFrequency
+ sectionSize = CHTFrequencyClient
confirmReq = HelperTrieConfirmations
} else {
- sectionSize = ChtV1Frequency
+ sectionSize = CHTFrequencyServer
confirmReq = HelperTrieProcessConfirmations
}
- return core.NewChainIndexer(db, idb, &ChtIndexerBackend{db: db, cdb: cdb, sectionSize: sectionSize}, sectionSize, confirmReq, time.Millisecond*100, "cht")
+ idb := ethdb.NewTable(db, "chtIndex-")
+ backend := &ChtIndexerBackend{
+ diskdb: db,
+ triedb: trie.NewDatabase(ethdb.NewTable(db, ChtTablePrefix)),
+ sectionSize: sectionSize,
+ }
+ return core.NewChainIndexer(db, idb, backend, sectionSize, confirmReq, time.Millisecond*100, "cht")
}
// Reset implements core.ChainIndexerBackend
func (c *ChtIndexerBackend) Reset(section uint64, lastSectionHead common.Hash) error {
var root common.Hash
if section > 0 {
- root = GetChtRoot(c.db, section-1, lastSectionHead)
+ root = GetChtRoot(c.diskdb, section-1, lastSectionHead)
}
var err error
- c.trie, err = trie.New(root, c.cdb)
+ c.trie, err = trie.New(root, c.triedb)
c.section = section
return err
}
@@ -151,7 +161,7 @@ func (c *ChtIndexerBackend) Process(header *types.Header) {
hash, num := header.Hash(), header.Number.Uint64()
c.lastHash = hash
- td := core.GetTd(c.db, hash, num)
+ td := core.GetTd(c.diskdb, hash, num)
if td == nil {
panic(nil)
}
@@ -163,17 +173,16 @@ func (c *ChtIndexerBackend) Process(header *types.Header) {
// Commit implements core.ChainIndexerBackend
func (c *ChtIndexerBackend) Commit() error {
- batch := c.cdb.NewBatch()
- root, err := c.trie.CommitTo(batch)
+ root, err := c.trie.Commit(nil)
if err != nil {
return err
- } else {
- batch.Write()
- if ((c.section+1)*c.sectionSize)%ChtFrequency == 0 {
- log.Info("Storing CHT", "idx", c.section*c.sectionSize/ChtFrequency, "sectionHead", fmt.Sprintf("%064x", c.lastHash), "root", fmt.Sprintf("%064x", root))
- }
- StoreChtRoot(c.db, c.section, c.lastHash, root)
}
+ c.triedb.Commit(root, false)
+
+ if ((c.section+1)*c.sectionSize)%CHTFrequencyClient == 0 {
+ log.Info("Storing CHT", "section", c.section*c.sectionSize/CHTFrequencyClient, "head", c.lastHash, "root", root)
+ }
+ StoreChtRoot(c.diskdb, c.section, c.lastHash, root)
return nil
}
@@ -205,7 +214,8 @@ func StoreBloomTrieRoot(db ethdb.Database, sectionIdx uint64, sectionHead, root
// BloomTrieIndexerBackend implements core.ChainIndexerBackend
type BloomTrieIndexerBackend struct {
- db, cdb ethdb.Database
+ diskdb ethdb.Database
+ triedb *trie.Database
section, parentSectionSize, bloomTrieRatio uint64
trie *trie.Trie
sectionHeads []common.Hash
@@ -213,9 +223,12 @@ type BloomTrieIndexerBackend struct {
// NewBloomTrieIndexer creates a BloomTrie chain indexer
func NewBloomTrieIndexer(db ethdb.Database, clientMode bool) *core.ChainIndexer {
- cdb := ethdb.NewTable(db, BloomTrieTablePrefix)
+ backend := &BloomTrieIndexerBackend{
+ diskdb: db,
+ triedb: trie.NewDatabase(ethdb.NewTable(db, BloomTrieTablePrefix)),
+ }
idb := ethdb.NewTable(db, "bltIndex-")
- backend := &BloomTrieIndexerBackend{db: db, cdb: cdb}
+
var confirmReq uint64
if clientMode {
backend.parentSectionSize = BloomTrieFrequency
@@ -233,10 +246,10 @@ func NewBloomTrieIndexer(db ethdb.Database, clientMode bool) *core.ChainIndexer
func (b *BloomTrieIndexerBackend) Reset(section uint64, lastSectionHead common.Hash) error {
var root common.Hash
if section > 0 {
- root = GetBloomTrieRoot(b.db, section-1, lastSectionHead)
+ root = GetBloomTrieRoot(b.diskdb, section-1, lastSectionHead)
}
var err error
- b.trie, err = trie.New(root, b.cdb)
+ b.trie, err = trie.New(root, b.triedb)
b.section = section
return err
}
@@ -259,7 +272,7 @@ func (b *BloomTrieIndexerBackend) Commit() error {
binary.BigEndian.PutUint64(encKey[2:10], b.section)
var decomp []byte
for j := uint64(0); j < b.bloomTrieRatio; j++ {
- data, err := core.GetBloomBits(b.db, i, b.section*b.bloomTrieRatio+j, b.sectionHeads[j])
+ data, err := core.GetBloomBits(b.diskdb, i, b.section*b.bloomTrieRatio+j, b.sectionHeads[j])
if err != nil {
return err
}
@@ -279,17 +292,15 @@ func (b *BloomTrieIndexerBackend) Commit() error {
b.trie.Delete(encKey[:])
}
}
-
- batch := b.cdb.NewBatch()
- root, err := b.trie.CommitTo(batch)
+ root, err := b.trie.Commit(nil)
if err != nil {
return err
- } else {
- batch.Write()
- sectionHead := b.sectionHeads[b.bloomTrieRatio-1]
- log.Info("Storing BloomTrie", "section", b.section, "sectionHead", fmt.Sprintf("%064x", sectionHead), "root", fmt.Sprintf("%064x", root), "compression ratio", float64(compSize)/float64(decompSize))
- StoreBloomTrieRoot(b.db, b.section, sectionHead, root)
}
+ b.triedb.Commit(root, false)
+
+ sectionHead := b.sectionHeads[b.bloomTrieRatio-1]
+ log.Info("Storing bloom trie", "section", b.section, "head", sectionHead, "root", root, "compression", float64(compSize)/float64(decompSize))
+ StoreBloomTrieRoot(b.diskdb, b.section, sectionHead, root)
return nil
}
diff --git a/light/trie.go b/light/trie.go
index 7a9c86b98..c07e99461 100644
--- a/light/trie.go
+++ b/light/trie.go
@@ -18,12 +18,14 @@ package light
import (
"context"
+ "errors"
"fmt"
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/core/state"
"github.com/ethereum/go-ethereum/core/types"
"github.com/ethereum/go-ethereum/crypto"
+ "github.com/ethereum/go-ethereum/ethdb"
"github.com/ethereum/go-ethereum/trie"
)
@@ -83,6 +85,10 @@ func (db *odrDatabase) ContractCodeSize(addrHash, codeHash common.Hash) (int, er
return len(code), err
}
+func (db *odrDatabase) TrieDB() *trie.Database {
+ return nil
+}
+
type odrTrie struct {
db *odrDatabase
id *TrieID
@@ -113,11 +119,11 @@ func (t *odrTrie) TryDelete(key []byte) error {
})
}
-func (t *odrTrie) CommitTo(db trie.DatabaseWriter) (common.Hash, error) {
+func (t *odrTrie) Commit(onleaf trie.LeafCallback) (common.Hash, error) {
if t.trie == nil {
return t.id.Root, nil
}
- return t.trie.CommitTo(db)
+ return t.trie.Commit(onleaf)
}
func (t *odrTrie) Hash() common.Hash {
@@ -135,13 +141,17 @@ func (t *odrTrie) GetKey(sha []byte) []byte {
return nil
}
+func (t *odrTrie) Prove(key []byte, fromLevel uint, proofDb ethdb.Putter) error {
+ return errors.New("not implemented, needs client/server interface split")
+}
+
// do tries and retries to execute a function until it returns with no error or
// an error type other than MissingNodeError
func (t *odrTrie) do(key []byte, fn func() error) error {
for {
var err error
if t.trie == nil {
- t.trie, err = trie.New(t.id.Root, t.db.backend.Database())
+ t.trie, err = trie.New(t.id.Root, trie.NewDatabase(t.db.backend.Database()))
}
if err == nil {
err = fn()
@@ -167,7 +177,7 @@ func newNodeIterator(t *odrTrie, startkey []byte) trie.NodeIterator {
// Open the actual non-ODR trie if that hasn't happened yet.
if t.trie == nil {
it.do(func() error {
- t, err := trie.New(t.id.Root, t.db.backend.Database())
+ t, err := trie.New(t.id.Root, trie.NewDatabase(t.db.backend.Database()))
if err == nil {
it.t.trie = t
}
diff --git a/light/trie_test.go b/light/trie_test.go
index d99664718..0d6b2cc1d 100644
--- a/light/trie_test.go
+++ b/light/trie_test.go
@@ -40,7 +40,7 @@ func TestNodeIterator(t *testing.T) {
genesis = gspec.MustCommit(fulldb)
)
gspec.MustCommit(lightdb)
- blockchain, _ := core.NewBlockChain(fulldb, params.TestChainConfig, ethash.NewFullFaker(), vm.Config{})
+ blockchain, _ := core.NewBlockChain(fulldb, nil, params.TestChainConfig, ethash.NewFullFaker(), vm.Config{})
gchain, _ := core.GenerateChain(params.TestChainConfig, genesis, ethash.NewFaker(), fulldb, 4, testChainGen)
if _, err := blockchain.InsertChain(gchain); err != nil {
panic(err)
diff --git a/light/txpool_test.go b/light/txpool_test.go
index b343f79b0..13d7d3ceb 100644
--- a/light/txpool_test.go
+++ b/light/txpool_test.go
@@ -88,7 +88,7 @@ func TestTxPool(t *testing.T) {
)
gspec.MustCommit(ldb)
// Assemble the test environment
- blockchain, _ := core.NewBlockChain(sdb, params.TestChainConfig, ethash.NewFullFaker(), vm.Config{})
+ blockchain, _ := core.NewBlockChain(sdb, nil, params.TestChainConfig, ethash.NewFullFaker(), vm.Config{})
gchain, _ := core.GenerateChain(params.TestChainConfig, genesis, ethash.NewFaker(), sdb, poolTestBlocks, txPoolTestChainGen)
if _, err := blockchain.InsertChain(gchain); err != nil {
panic(err)
diff --git a/metrics/FORK.md b/metrics/FORK.md
new file mode 100644
index 000000000..b19985bf5
--- /dev/null
+++ b/metrics/FORK.md
@@ -0,0 +1 @@
+This repo has been forked from https://github.com/rcrowley/go-metrics at commit e181e09
diff --git a/vendor/github.com/rcrowley/go-metrics/LICENSE b/metrics/LICENSE
index 363fa9ee7..363fa9ee7 100644
--- a/vendor/github.com/rcrowley/go-metrics/LICENSE
+++ b/metrics/LICENSE
diff --git a/vendor/github.com/rcrowley/go-metrics/README.md b/metrics/README.md
index 2d1a6dcfa..bc2a45a83 100644
--- a/vendor/github.com/rcrowley/go-metrics/README.md
+++ b/metrics/README.md
@@ -42,12 +42,22 @@ t.Update(47)
Register() is not threadsafe. For threadsafe metric registration use
GetOrRegister:
-```
+```go
t := metrics.GetOrRegisterTimer("account.create.latency", nil)
t.Time(func() {})
t.Update(47)
```
+**NOTE:** Be sure to unregister short-lived meters and timers otherwise they will
+leak memory:
+
+```go
+// Will call Stop() on the Meter to allow for garbage collection
+metrics.Unregister("quux")
+// Or similarly for a Timer that embeds a Meter
+metrics.Unregister("bang")
+```
+
Periodically log every metric in human-readable form to standard error:
```go
@@ -81,12 +91,13 @@ issues [#121](https://github.com/rcrowley/go-metrics/issues/121) and
```go
import "github.com/vrischmann/go-metrics-influxdb"
-go influxdb.Influxdb(metrics.DefaultRegistry, 10e9, &influxdb.Config{
- Host: "127.0.0.1:8086",
- Database: "metrics",
- Username: "test",
- Password: "test",
-})
+go influxdb.InfluxDB(metrics.DefaultRegistry,
+ 10e9,
+ "127.0.0.1:8086",
+ "database-name",
+ "username",
+ "password"
+)
```
Periodically upload every metric to Librato using the [Librato client](https://github.com/mihasya/go-metrics-librato):
@@ -146,8 +157,10 @@ Publishing Metrics
Clients are available for the following destinations:
-* Librato - [https://github.com/mihasya/go-metrics-librato](https://github.com/mihasya/go-metrics-librato)
-* Graphite - [https://github.com/cyberdelia/go-metrics-graphite](https://github.com/cyberdelia/go-metrics-graphite)
-* InfluxDB - [https://github.com/vrischmann/go-metrics-influxdb](https://github.com/vrischmann/go-metrics-influxdb)
-* Ganglia - [https://github.com/appscode/metlia](https://github.com/appscode/metlia)
-* Prometheus - [https://github.com/deathowl/go-metrics-prometheus](https://github.com/deathowl/go-metrics-prometheus)
+* Librato - https://github.com/mihasya/go-metrics-librato
+* Graphite - https://github.com/cyberdelia/go-metrics-graphite
+* InfluxDB - https://github.com/vrischmann/go-metrics-influxdb
+* Ganglia - https://github.com/appscode/metlia
+* Prometheus - https://github.com/deathowl/go-metrics-prometheus
+* DataDog - https://github.com/syntaqx/go-metrics-datadog
+* SignalFX - https://github.com/pascallouisperez/go-metrics-signalfx
diff --git a/vendor/github.com/rcrowley/go-metrics/counter.go b/metrics/counter.go
index bb7b039cb..c7f2b4bd3 100644
--- a/vendor/github.com/rcrowley/go-metrics/counter.go
+++ b/metrics/counter.go
@@ -22,7 +22,7 @@ func GetOrRegisterCounter(name string, r Registry) Counter {
// NewCounter constructs a new StandardCounter.
func NewCounter() Counter {
- if UseNilMetrics {
+ if !Enabled {
return NilCounter{}
}
return &StandardCounter{0}
diff --git a/metrics/counter_test.go b/metrics/counter_test.go
new file mode 100644
index 000000000..dfb03b4e8
--- /dev/null
+++ b/metrics/counter_test.go
@@ -0,0 +1,77 @@
+package metrics
+
+import "testing"
+
+func BenchmarkCounter(b *testing.B) {
+ c := NewCounter()
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ c.Inc(1)
+ }
+}
+
+func TestCounterClear(t *testing.T) {
+ c := NewCounter()
+ c.Inc(1)
+ c.Clear()
+ if count := c.Count(); 0 != count {
+ t.Errorf("c.Count(): 0 != %v\n", count)
+ }
+}
+
+func TestCounterDec1(t *testing.T) {
+ c := NewCounter()
+ c.Dec(1)
+ if count := c.Count(); -1 != count {
+ t.Errorf("c.Count(): -1 != %v\n", count)
+ }
+}
+
+func TestCounterDec2(t *testing.T) {
+ c := NewCounter()
+ c.Dec(2)
+ if count := c.Count(); -2 != count {
+ t.Errorf("c.Count(): -2 != %v\n", count)
+ }
+}
+
+func TestCounterInc1(t *testing.T) {
+ c := NewCounter()
+ c.Inc(1)
+ if count := c.Count(); 1 != count {
+ t.Errorf("c.Count(): 1 != %v\n", count)
+ }
+}
+
+func TestCounterInc2(t *testing.T) {
+ c := NewCounter()
+ c.Inc(2)
+ if count := c.Count(); 2 != count {
+ t.Errorf("c.Count(): 2 != %v\n", count)
+ }
+}
+
+func TestCounterSnapshot(t *testing.T) {
+ c := NewCounter()
+ c.Inc(1)
+ snapshot := c.Snapshot()
+ c.Inc(1)
+ if count := snapshot.Count(); 1 != count {
+ t.Errorf("c.Count(): 1 != %v\n", count)
+ }
+}
+
+func TestCounterZero(t *testing.T) {
+ c := NewCounter()
+ if count := c.Count(); 0 != count {
+ t.Errorf("c.Count(): 0 != %v\n", count)
+ }
+}
+
+func TestGetOrRegisterCounter(t *testing.T) {
+ r := NewRegistry()
+ NewRegisteredCounter("foo", r).Inc(47)
+ if c := GetOrRegisterCounter("foo", r); 47 != c.Count() {
+ t.Fatal(c)
+ }
+}
diff --git a/vendor/github.com/rcrowley/go-metrics/debug.go b/metrics/debug.go
index 043ccefab..de4a2739f 100644
--- a/vendor/github.com/rcrowley/go-metrics/debug.go
+++ b/metrics/debug.go
@@ -22,7 +22,7 @@ var (
// Capture new values for the Go garbage collector statistics exported in
// debug.GCStats. This is designed to be called as a goroutine.
func CaptureDebugGCStats(r Registry, d time.Duration) {
- for _ = range time.Tick(d) {
+ for range time.Tick(d) {
CaptureDebugGCStatsOnce(r)
}
}
@@ -41,8 +41,8 @@ func CaptureDebugGCStatsOnce(r Registry) {
debug.ReadGCStats(&gcStats)
debugMetrics.ReadGCStats.UpdateSince(t)
- debugMetrics.GCStats.LastGC.Update(int64(gcStats.LastGC.UnixNano()))
- debugMetrics.GCStats.NumGC.Update(int64(gcStats.NumGC))
+ debugMetrics.GCStats.LastGC.Update(gcStats.LastGC.UnixNano())
+ debugMetrics.GCStats.NumGC.Update(gcStats.NumGC)
if lastGC != gcStats.LastGC && 0 < len(gcStats.Pause) {
debugMetrics.GCStats.Pause.Update(int64(gcStats.Pause[0]))
}
diff --git a/metrics/debug_test.go b/metrics/debug_test.go
new file mode 100644
index 000000000..07eb86784
--- /dev/null
+++ b/metrics/debug_test.go
@@ -0,0 +1,48 @@
+package metrics
+
+import (
+ "runtime"
+ "runtime/debug"
+ "testing"
+ "time"
+)
+
+func BenchmarkDebugGCStats(b *testing.B) {
+ r := NewRegistry()
+ RegisterDebugGCStats(r)
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ CaptureDebugGCStatsOnce(r)
+ }
+}
+
+func TestDebugGCStatsBlocking(t *testing.T) {
+ if g := runtime.GOMAXPROCS(0); g < 2 {
+ t.Skipf("skipping TestDebugGCMemStatsBlocking with GOMAXPROCS=%d\n", g)
+ return
+ }
+ ch := make(chan int)
+ go testDebugGCStatsBlocking(ch)
+ var gcStats debug.GCStats
+ t0 := time.Now()
+ debug.ReadGCStats(&gcStats)
+ t1 := time.Now()
+ t.Log("i++ during debug.ReadGCStats:", <-ch)
+ go testDebugGCStatsBlocking(ch)
+ d := t1.Sub(t0)
+ t.Log(d)
+ time.Sleep(d)
+ t.Log("i++ during time.Sleep:", <-ch)
+}
+
+func testDebugGCStatsBlocking(ch chan int) {
+ i := 0
+ for {
+ select {
+ case ch <- i:
+ return
+ default:
+ i++
+ }
+ }
+}
diff --git a/vendor/github.com/rcrowley/go-metrics/ewma.go b/metrics/ewma.go
index 694a1d033..3aecd4fa3 100644
--- a/vendor/github.com/rcrowley/go-metrics/ewma.go
+++ b/metrics/ewma.go
@@ -17,7 +17,7 @@ type EWMA interface {
// NewEWMA constructs a new EWMA with the given alpha.
func NewEWMA(alpha float64) EWMA {
- if UseNilMetrics {
+ if !Enabled {
return NilEWMA{}
}
return &StandardEWMA{alpha: alpha}
diff --git a/metrics/ewma_test.go b/metrics/ewma_test.go
new file mode 100644
index 000000000..0430fbd24
--- /dev/null
+++ b/metrics/ewma_test.go
@@ -0,0 +1,225 @@
+package metrics
+
+import "testing"
+
+func BenchmarkEWMA(b *testing.B) {
+ a := NewEWMA1()
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ a.Update(1)
+ a.Tick()
+ }
+}
+
+func TestEWMA1(t *testing.T) {
+ a := NewEWMA1()
+ a.Update(3)
+ a.Tick()
+ if rate := a.Rate(); 0.6 != rate {
+ t.Errorf("initial a.Rate(): 0.6 != %v\n", rate)
+ }
+ elapseMinute(a)
+ if rate := a.Rate(); 0.22072766470286553 != rate {
+ t.Errorf("1 minute a.Rate(): 0.22072766470286553 != %v\n", rate)
+ }
+ elapseMinute(a)
+ if rate := a.Rate(); 0.08120116994196772 != rate {
+ t.Errorf("2 minute a.Rate(): 0.08120116994196772 != %v\n", rate)
+ }
+ elapseMinute(a)
+ if rate := a.Rate(); 0.029872241020718428 != rate {
+ t.Errorf("3 minute a.Rate(): 0.029872241020718428 != %v\n", rate)
+ }
+ elapseMinute(a)
+ if rate := a.Rate(); 0.01098938333324054 != rate {
+ t.Errorf("4 minute a.Rate(): 0.01098938333324054 != %v\n", rate)
+ }
+ elapseMinute(a)
+ if rate := a.Rate(); 0.004042768199451294 != rate {
+ t.Errorf("5 minute a.Rate(): 0.004042768199451294 != %v\n", rate)
+ }
+ elapseMinute(a)
+ if rate := a.Rate(); 0.0014872513059998212 != rate {
+ t.Errorf("6 minute a.Rate(): 0.0014872513059998212 != %v\n", rate)
+ }
+ elapseMinute(a)
+ if rate := a.Rate(); 0.0005471291793327122 != rate {
+ t.Errorf("7 minute a.Rate(): 0.0005471291793327122 != %v\n", rate)
+ }
+ elapseMinute(a)
+ if rate := a.Rate(); 0.00020127757674150815 != rate {
+ t.Errorf("8 minute a.Rate(): 0.00020127757674150815 != %v\n", rate)
+ }
+ elapseMinute(a)
+ if rate := a.Rate(); 7.404588245200814e-05 != rate {
+ t.Errorf("9 minute a.Rate(): 7.404588245200814e-05 != %v\n", rate)
+ }
+ elapseMinute(a)
+ if rate := a.Rate(); 2.7239957857491083e-05 != rate {
+ t.Errorf("10 minute a.Rate(): 2.7239957857491083e-05 != %v\n", rate)
+ }
+ elapseMinute(a)
+ if rate := a.Rate(); 1.0021020474147462e-05 != rate {
+ t.Errorf("11 minute a.Rate(): 1.0021020474147462e-05 != %v\n", rate)
+ }
+ elapseMinute(a)
+ if rate := a.Rate(); 3.6865274119969525e-06 != rate {
+ t.Errorf("12 minute a.Rate(): 3.6865274119969525e-06 != %v\n", rate)
+ }
+ elapseMinute(a)
+ if rate := a.Rate(); 1.3561976441886433e-06 != rate {
+ t.Errorf("13 minute a.Rate(): 1.3561976441886433e-06 != %v\n", rate)
+ }
+ elapseMinute(a)
+ if rate := a.Rate(); 4.989172314621449e-07 != rate {
+ t.Errorf("14 minute a.Rate(): 4.989172314621449e-07 != %v\n", rate)
+ }
+ elapseMinute(a)
+ if rate := a.Rate(); 1.8354139230109722e-07 != rate {
+ t.Errorf("15 minute a.Rate(): 1.8354139230109722e-07 != %v\n", rate)
+ }
+}
+
+func TestEWMA5(t *testing.T) {
+ a := NewEWMA5()
+ a.Update(3)
+ a.Tick()
+ if rate := a.Rate(); 0.6 != rate {
+ t.Errorf("initial a.Rate(): 0.6 != %v\n", rate)
+ }
+ elapseMinute(a)
+ if rate := a.Rate(); 0.49123845184678905 != rate {
+ t.Errorf("1 minute a.Rate(): 0.49123845184678905 != %v\n", rate)
+ }
+ elapseMinute(a)
+ if rate := a.Rate(); 0.4021920276213837 != rate {
+ t.Errorf("2 minute a.Rate(): 0.4021920276213837 != %v\n", rate)
+ }
+ elapseMinute(a)
+ if rate := a.Rate(); 0.32928698165641596 != rate {
+ t.Errorf("3 minute a.Rate(): 0.32928698165641596 != %v\n", rate)
+ }
+ elapseMinute(a)
+ if rate := a.Rate(); 0.269597378470333 != rate {
+ t.Errorf("4 minute a.Rate(): 0.269597378470333 != %v\n", rate)
+ }
+ elapseMinute(a)
+ if rate := a.Rate(); 0.2207276647028654 != rate {
+ t.Errorf("5 minute a.Rate(): 0.2207276647028654 != %v\n", rate)
+ }
+ elapseMinute(a)
+ if rate := a.Rate(); 0.18071652714732128 != rate {
+ t.Errorf("6 minute a.Rate(): 0.18071652714732128 != %v\n", rate)
+ }
+ elapseMinute(a)
+ if rate := a.Rate(); 0.14795817836496392 != rate {
+ t.Errorf("7 minute a.Rate(): 0.14795817836496392 != %v\n", rate)
+ }
+ elapseMinute(a)
+ if rate := a.Rate(); 0.12113791079679326 != rate {
+ t.Errorf("8 minute a.Rate(): 0.12113791079679326 != %v\n", rate)
+ }
+ elapseMinute(a)
+ if rate := a.Rate(); 0.09917933293295193 != rate {
+ t.Errorf("9 minute a.Rate(): 0.09917933293295193 != %v\n", rate)
+ }
+ elapseMinute(a)
+ if rate := a.Rate(); 0.08120116994196763 != rate {
+ t.Errorf("10 minute a.Rate(): 0.08120116994196763 != %v\n", rate)
+ }
+ elapseMinute(a)
+ if rate := a.Rate(); 0.06648189501740036 != rate {
+ t.Errorf("11 minute a.Rate(): 0.06648189501740036 != %v\n", rate)
+ }
+ elapseMinute(a)
+ if rate := a.Rate(); 0.05443077197364752 != rate {
+ t.Errorf("12 minute a.Rate(): 0.05443077197364752 != %v\n", rate)
+ }
+ elapseMinute(a)
+ if rate := a.Rate(); 0.04456414692860035 != rate {
+ t.Errorf("13 minute a.Rate(): 0.04456414692860035 != %v\n", rate)
+ }
+ elapseMinute(a)
+ if rate := a.Rate(); 0.03648603757513079 != rate {
+ t.Errorf("14 minute a.Rate(): 0.03648603757513079 != %v\n", rate)
+ }
+ elapseMinute(a)
+ if rate := a.Rate(); 0.0298722410207183831020718428 != rate {
+ t.Errorf("15 minute a.Rate(): 0.0298722410207183831020718428 != %v\n", rate)
+ }
+}
+
+func TestEWMA15(t *testing.T) {
+ a := NewEWMA15()
+ a.Update(3)
+ a.Tick()
+ if rate := a.Rate(); 0.6 != rate {
+ t.Errorf("initial a.Rate(): 0.6 != %v\n", rate)
+ }
+ elapseMinute(a)
+ if rate := a.Rate(); 0.5613041910189706 != rate {
+ t.Errorf("1 minute a.Rate(): 0.5613041910189706 != %v\n", rate)
+ }
+ elapseMinute(a)
+ if rate := a.Rate(); 0.5251039914257684 != rate {
+ t.Errorf("2 minute a.Rate(): 0.5251039914257684 != %v\n", rate)
+ }
+ elapseMinute(a)
+ if rate := a.Rate(); 0.4912384518467888184678905 != rate {
+ t.Errorf("3 minute a.Rate(): 0.4912384518467888184678905 != %v\n", rate)
+ }
+ elapseMinute(a)
+ if rate := a.Rate(); 0.459557003018789 != rate {
+ t.Errorf("4 minute a.Rate(): 0.459557003018789 != %v\n", rate)
+ }
+ elapseMinute(a)
+ if rate := a.Rate(); 0.4299187863442732 != rate {
+ t.Errorf("5 minute a.Rate(): 0.4299187863442732 != %v\n", rate)
+ }
+ elapseMinute(a)
+ if rate := a.Rate(); 0.4021920276213831 != rate {
+ t.Errorf("6 minute a.Rate(): 0.4021920276213831 != %v\n", rate)
+ }
+ elapseMinute(a)
+ if rate := a.Rate(); 0.37625345116383313 != rate {
+ t.Errorf("7 minute a.Rate(): 0.37625345116383313 != %v\n", rate)
+ }
+ elapseMinute(a)
+ if rate := a.Rate(); 0.3519877317060185 != rate {
+ t.Errorf("8 minute a.Rate(): 0.3519877317060185 != %v\n", rate)
+ }
+ elapseMinute(a)
+ if rate := a.Rate(); 0.3292869816564153165641596 != rate {
+ t.Errorf("9 minute a.Rate(): 0.3292869816564153165641596 != %v\n", rate)
+ }
+ elapseMinute(a)
+ if rate := a.Rate(); 0.3080502714195546 != rate {
+ t.Errorf("10 minute a.Rate(): 0.3080502714195546 != %v\n", rate)
+ }
+ elapseMinute(a)
+ if rate := a.Rate(); 0.2881831806538789 != rate {
+ t.Errorf("11 minute a.Rate(): 0.2881831806538789 != %v\n", rate)
+ }
+ elapseMinute(a)
+ if rate := a.Rate(); 0.26959737847033216 != rate {
+ t.Errorf("12 minute a.Rate(): 0.26959737847033216 != %v\n", rate)
+ }
+ elapseMinute(a)
+ if rate := a.Rate(); 0.2522102307052083 != rate {
+ t.Errorf("13 minute a.Rate(): 0.2522102307052083 != %v\n", rate)
+ }
+ elapseMinute(a)
+ if rate := a.Rate(); 0.23594443252115815 != rate {
+ t.Errorf("14 minute a.Rate(): 0.23594443252115815 != %v\n", rate)
+ }
+ elapseMinute(a)
+ if rate := a.Rate(); 0.2207276647028646247028654470286553 != rate {
+ t.Errorf("15 minute a.Rate(): 0.2207276647028646247028654470286553 != %v\n", rate)
+ }
+}
+
+func elapseMinute(a EWMA) {
+ for i := 0; i < 12; i++ {
+ a.Tick()
+ }
+}
diff --git a/vendor/github.com/rcrowley/go-metrics/exp/exp.go b/metrics/exp/exp.go
index 11dd3f898..c19d00a94 100644
--- a/vendor/github.com/rcrowley/go-metrics/exp/exp.go
+++ b/metrics/exp/exp.go
@@ -8,7 +8,7 @@ import (
"net/http"
"sync"
- "github.com/rcrowley/go-metrics"
+ "github.com/ethereum/go-ethereum/metrics"
)
type exp struct {
@@ -97,22 +97,22 @@ func (exp *exp) publishHistogram(name string, metric metrics.Histogram) {
exp.getInt(name + ".count").Set(h.Count())
exp.getFloat(name + ".min").Set(float64(h.Min()))
exp.getFloat(name + ".max").Set(float64(h.Max()))
- exp.getFloat(name + ".mean").Set(float64(h.Mean()))
- exp.getFloat(name + ".std-dev").Set(float64(h.StdDev()))
- exp.getFloat(name + ".50-percentile").Set(float64(ps[0]))
- exp.getFloat(name + ".75-percentile").Set(float64(ps[1]))
- exp.getFloat(name + ".95-percentile").Set(float64(ps[2]))
- exp.getFloat(name + ".99-percentile").Set(float64(ps[3]))
- exp.getFloat(name + ".999-percentile").Set(float64(ps[4]))
+ exp.getFloat(name + ".mean").Set(h.Mean())
+ exp.getFloat(name + ".std-dev").Set(h.StdDev())
+ exp.getFloat(name + ".50-percentile").Set(ps[0])
+ exp.getFloat(name + ".75-percentile").Set(ps[1])
+ exp.getFloat(name + ".95-percentile").Set(ps[2])
+ exp.getFloat(name + ".99-percentile").Set(ps[3])
+ exp.getFloat(name + ".999-percentile").Set(ps[4])
}
func (exp *exp) publishMeter(name string, metric metrics.Meter) {
m := metric.Snapshot()
exp.getInt(name + ".count").Set(m.Count())
- exp.getFloat(name + ".one-minute").Set(float64(m.Rate1()))
- exp.getFloat(name + ".five-minute").Set(float64(m.Rate5()))
- exp.getFloat(name + ".fifteen-minute").Set(float64((m.Rate15())))
- exp.getFloat(name + ".mean").Set(float64(m.RateMean()))
+ exp.getFloat(name + ".one-minute").Set(m.Rate1())
+ exp.getFloat(name + ".five-minute").Set(m.Rate5())
+ exp.getFloat(name + ".fifteen-minute").Set((m.Rate15()))
+ exp.getFloat(name + ".mean").Set(m.RateMean())
}
func (exp *exp) publishTimer(name string, metric metrics.Timer) {
@@ -121,17 +121,17 @@ func (exp *exp) publishTimer(name string, metric metrics.Timer) {
exp.getInt(name + ".count").Set(t.Count())
exp.getFloat(name + ".min").Set(float64(t.Min()))
exp.getFloat(name + ".max").Set(float64(t.Max()))
- exp.getFloat(name + ".mean").Set(float64(t.Mean()))
- exp.getFloat(name + ".std-dev").Set(float64(t.StdDev()))
- exp.getFloat(name + ".50-percentile").Set(float64(ps[0]))
- exp.getFloat(name + ".75-percentile").Set(float64(ps[1]))
- exp.getFloat(name + ".95-percentile").Set(float64(ps[2]))
- exp.getFloat(name + ".99-percentile").Set(float64(ps[3]))
- exp.getFloat(name + ".999-percentile").Set(float64(ps[4]))
- exp.getFloat(name + ".one-minute").Set(float64(t.Rate1()))
- exp.getFloat(name + ".five-minute").Set(float64(t.Rate5()))
- exp.getFloat(name + ".fifteen-minute").Set(float64((t.Rate15())))
- exp.getFloat(name + ".mean-rate").Set(float64(t.RateMean()))
+ exp.getFloat(name + ".mean").Set(t.Mean())
+ exp.getFloat(name + ".std-dev").Set(t.StdDev())
+ exp.getFloat(name + ".50-percentile").Set(ps[0])
+ exp.getFloat(name + ".75-percentile").Set(ps[1])
+ exp.getFloat(name + ".95-percentile").Set(ps[2])
+ exp.getFloat(name + ".99-percentile").Set(ps[3])
+ exp.getFloat(name + ".999-percentile").Set(ps[4])
+ exp.getFloat(name + ".one-minute").Set(t.Rate1())
+ exp.getFloat(name + ".five-minute").Set(t.Rate5())
+ exp.getFloat(name + ".fifteen-minute").Set(t.Rate15())
+ exp.getFloat(name + ".mean-rate").Set(t.RateMean())
}
func (exp *exp) syncToExpvar() {
diff --git a/vendor/github.com/rcrowley/go-metrics/gauge.go b/metrics/gauge.go
index cb57a9388..0fbfdb860 100644
--- a/vendor/github.com/rcrowley/go-metrics/gauge.go
+++ b/metrics/gauge.go
@@ -20,7 +20,7 @@ func GetOrRegisterGauge(name string, r Registry) Gauge {
// NewGauge constructs a new StandardGauge.
func NewGauge() Gauge {
- if UseNilMetrics {
+ if !Enabled {
return NilGauge{}
}
return &StandardGauge{0}
@@ -38,7 +38,7 @@ func NewRegisteredGauge(name string, r Registry) Gauge {
// NewFunctionalGauge constructs a new FunctionalGauge.
func NewFunctionalGauge(f func() int64) Gauge {
- if UseNilMetrics {
+ if !Enabled {
return NilGauge{}
}
return &FunctionalGauge{value: f}
diff --git a/vendor/github.com/rcrowley/go-metrics/gauge_float64.go b/metrics/gauge_float64.go
index 6f93920b2..66819c957 100644
--- a/vendor/github.com/rcrowley/go-metrics/gauge_float64.go
+++ b/metrics/gauge_float64.go
@@ -20,7 +20,7 @@ func GetOrRegisterGaugeFloat64(name string, r Registry) GaugeFloat64 {
// NewGaugeFloat64 constructs a new StandardGaugeFloat64.
func NewGaugeFloat64() GaugeFloat64 {
- if UseNilMetrics {
+ if !Enabled {
return NilGaugeFloat64{}
}
return &StandardGaugeFloat64{
@@ -40,7 +40,7 @@ func NewRegisteredGaugeFloat64(name string, r Registry) GaugeFloat64 {
// NewFunctionalGauge constructs a new FunctionalGauge.
func NewFunctionalGaugeFloat64(f func() float64) GaugeFloat64 {
- if UseNilMetrics {
+ if !Enabled {
return NilGaugeFloat64{}
}
return &FunctionalGaugeFloat64{value: f}
diff --git a/metrics/gauge_float64_test.go b/metrics/gauge_float64_test.go
new file mode 100644
index 000000000..99e62a403
--- /dev/null
+++ b/metrics/gauge_float64_test.go
@@ -0,0 +1,59 @@
+package metrics
+
+import "testing"
+
+func BenchmarkGuageFloat64(b *testing.B) {
+ g := NewGaugeFloat64()
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ g.Update(float64(i))
+ }
+}
+
+func TestGaugeFloat64(t *testing.T) {
+ g := NewGaugeFloat64()
+ g.Update(float64(47.0))
+ if v := g.Value(); float64(47.0) != v {
+ t.Errorf("g.Value(): 47.0 != %v\n", v)
+ }
+}
+
+func TestGaugeFloat64Snapshot(t *testing.T) {
+ g := NewGaugeFloat64()
+ g.Update(float64(47.0))
+ snapshot := g.Snapshot()
+ g.Update(float64(0))
+ if v := snapshot.Value(); float64(47.0) != v {
+ t.Errorf("g.Value(): 47.0 != %v\n", v)
+ }
+}
+
+func TestGetOrRegisterGaugeFloat64(t *testing.T) {
+ r := NewRegistry()
+ NewRegisteredGaugeFloat64("foo", r).Update(float64(47.0))
+ t.Logf("registry: %v", r)
+ if g := GetOrRegisterGaugeFloat64("foo", r); float64(47.0) != g.Value() {
+ t.Fatal(g)
+ }
+}
+
+func TestFunctionalGaugeFloat64(t *testing.T) {
+ var counter float64
+ fg := NewFunctionalGaugeFloat64(func() float64 {
+ counter++
+ return counter
+ })
+ fg.Value()
+ fg.Value()
+ if counter != 2 {
+ t.Error("counter != 2")
+ }
+}
+
+func TestGetOrRegisterFunctionalGaugeFloat64(t *testing.T) {
+ r := NewRegistry()
+ NewRegisteredFunctionalGaugeFloat64("foo", r, func() float64 { return 47 })
+ if g := GetOrRegisterGaugeFloat64("foo", r); 47 != g.Value() {
+ t.Fatal(g)
+ }
+}
diff --git a/metrics/gauge_test.go b/metrics/gauge_test.go
new file mode 100644
index 000000000..1f2603d33
--- /dev/null
+++ b/metrics/gauge_test.go
@@ -0,0 +1,68 @@
+package metrics
+
+import (
+ "fmt"
+ "testing"
+)
+
+func BenchmarkGuage(b *testing.B) {
+ g := NewGauge()
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ g.Update(int64(i))
+ }
+}
+
+func TestGauge(t *testing.T) {
+ g := NewGauge()
+ g.Update(int64(47))
+ if v := g.Value(); 47 != v {
+ t.Errorf("g.Value(): 47 != %v\n", v)
+ }
+}
+
+func TestGaugeSnapshot(t *testing.T) {
+ g := NewGauge()
+ g.Update(int64(47))
+ snapshot := g.Snapshot()
+ g.Update(int64(0))
+ if v := snapshot.Value(); 47 != v {
+ t.Errorf("g.Value(): 47 != %v\n", v)
+ }
+}
+
+func TestGetOrRegisterGauge(t *testing.T) {
+ r := NewRegistry()
+ NewRegisteredGauge("foo", r).Update(47)
+ if g := GetOrRegisterGauge("foo", r); 47 != g.Value() {
+ t.Fatal(g)
+ }
+}
+
+func TestFunctionalGauge(t *testing.T) {
+ var counter int64
+ fg := NewFunctionalGauge(func() int64 {
+ counter++
+ return counter
+ })
+ fg.Value()
+ fg.Value()
+ if counter != 2 {
+ t.Error("counter != 2")
+ }
+}
+
+func TestGetOrRegisterFunctionalGauge(t *testing.T) {
+ r := NewRegistry()
+ NewRegisteredFunctionalGauge("foo", r, func() int64 { return 47 })
+ if g := GetOrRegisterGauge("foo", r); 47 != g.Value() {
+ t.Fatal(g)
+ }
+}
+
+func ExampleGetOrRegisterGauge() {
+ m := "server.bytes_sent"
+ g := GetOrRegisterGauge(m, nil)
+ g.Update(47)
+ fmt.Println(g.Value()) // Output: 47
+}
diff --git a/vendor/github.com/rcrowley/go-metrics/graphite.go b/metrics/graphite.go
index abd0a7d29..142eec86b 100644
--- a/vendor/github.com/rcrowley/go-metrics/graphite.go
+++ b/metrics/graphite.go
@@ -39,7 +39,7 @@ func Graphite(r Registry, d time.Duration, prefix string, addr *net.TCPAddr) {
// but it takes a GraphiteConfig instead.
func GraphiteWithConfig(c GraphiteConfig) {
log.Printf("WARNING: This go-metrics client has been DEPRECATED! It has been moved to https://github.com/cyberdelia/go-metrics-graphite and will be removed from rcrowley/go-metrics on August 12th 2015")
- for _ = range time.Tick(c.FlushInterval) {
+ for range time.Tick(c.FlushInterval) {
if err := graphite(&c); nil != err {
log.Println(err)
}
diff --git a/metrics/graphite_test.go b/metrics/graphite_test.go
new file mode 100644
index 000000000..c797c781d
--- /dev/null
+++ b/metrics/graphite_test.go
@@ -0,0 +1,22 @@
+package metrics
+
+import (
+ "net"
+ "time"
+)
+
+func ExampleGraphite() {
+ addr, _ := net.ResolveTCPAddr("net", ":2003")
+ go Graphite(DefaultRegistry, 1*time.Second, "some.prefix", addr)
+}
+
+func ExampleGraphiteWithConfig() {
+ addr, _ := net.ResolveTCPAddr("net", ":2003")
+ go GraphiteWithConfig(GraphiteConfig{
+ Addr: addr,
+ Registry: DefaultRegistry,
+ FlushInterval: 1 * time.Second,
+ DurationUnit: time.Millisecond,
+ Percentiles: []float64{0.5, 0.75, 0.99, 0.999},
+ })
+}
diff --git a/vendor/github.com/rcrowley/go-metrics/healthcheck.go b/metrics/healthcheck.go
index 445131cae..f1ae31e34 100644
--- a/vendor/github.com/rcrowley/go-metrics/healthcheck.go
+++ b/metrics/healthcheck.go
@@ -11,7 +11,7 @@ type Healthcheck interface {
// NewHealthcheck constructs a new Healthcheck which will use the given
// function to update its status.
func NewHealthcheck(f func(Healthcheck)) Healthcheck {
- if UseNilMetrics {
+ if !Enabled {
return NilHealthcheck{}
}
return &StandardHealthcheck{nil, f}
diff --git a/vendor/github.com/rcrowley/go-metrics/histogram.go b/metrics/histogram.go
index dbc837fe4..46f3bbd2f 100644
--- a/vendor/github.com/rcrowley/go-metrics/histogram.go
+++ b/metrics/histogram.go
@@ -28,7 +28,7 @@ func GetOrRegisterHistogram(name string, r Registry, s Sample) Histogram {
// NewHistogram constructs a new StandardHistogram from a Sample.
func NewHistogram(s Sample) Histogram {
- if UseNilMetrics {
+ if !Enabled {
return NilHistogram{}
}
return &StandardHistogram{sample: s}
diff --git a/metrics/histogram_test.go b/metrics/histogram_test.go
new file mode 100644
index 000000000..d7f4f0171
--- /dev/null
+++ b/metrics/histogram_test.go
@@ -0,0 +1,95 @@
+package metrics
+
+import "testing"
+
+func BenchmarkHistogram(b *testing.B) {
+ h := NewHistogram(NewUniformSample(100))
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ h.Update(int64(i))
+ }
+}
+
+func TestGetOrRegisterHistogram(t *testing.T) {
+ r := NewRegistry()
+ s := NewUniformSample(100)
+ NewRegisteredHistogram("foo", r, s).Update(47)
+ if h := GetOrRegisterHistogram("foo", r, s); 1 != h.Count() {
+ t.Fatal(h)
+ }
+}
+
+func TestHistogram10000(t *testing.T) {
+ h := NewHistogram(NewUniformSample(100000))
+ for i := 1; i <= 10000; i++ {
+ h.Update(int64(i))
+ }
+ testHistogram10000(t, h)
+}
+
+func TestHistogramEmpty(t *testing.T) {
+ h := NewHistogram(NewUniformSample(100))
+ if count := h.Count(); 0 != count {
+ t.Errorf("h.Count(): 0 != %v\n", count)
+ }
+ if min := h.Min(); 0 != min {
+ t.Errorf("h.Min(): 0 != %v\n", min)
+ }
+ if max := h.Max(); 0 != max {
+ t.Errorf("h.Max(): 0 != %v\n", max)
+ }
+ if mean := h.Mean(); 0.0 != mean {
+ t.Errorf("h.Mean(): 0.0 != %v\n", mean)
+ }
+ if stdDev := h.StdDev(); 0.0 != stdDev {
+ t.Errorf("h.StdDev(): 0.0 != %v\n", stdDev)
+ }
+ ps := h.Percentiles([]float64{0.5, 0.75, 0.99})
+ if 0.0 != ps[0] {
+ t.Errorf("median: 0.0 != %v\n", ps[0])
+ }
+ if 0.0 != ps[1] {
+ t.Errorf("75th percentile: 0.0 != %v\n", ps[1])
+ }
+ if 0.0 != ps[2] {
+ t.Errorf("99th percentile: 0.0 != %v\n", ps[2])
+ }
+}
+
+func TestHistogramSnapshot(t *testing.T) {
+ h := NewHistogram(NewUniformSample(100000))
+ for i := 1; i <= 10000; i++ {
+ h.Update(int64(i))
+ }
+ snapshot := h.Snapshot()
+ h.Update(0)
+ testHistogram10000(t, snapshot)
+}
+
+func testHistogram10000(t *testing.T, h Histogram) {
+ if count := h.Count(); 10000 != count {
+ t.Errorf("h.Count(): 10000 != %v\n", count)
+ }
+ if min := h.Min(); 1 != min {
+ t.Errorf("h.Min(): 1 != %v\n", min)
+ }
+ if max := h.Max(); 10000 != max {
+ t.Errorf("h.Max(): 10000 != %v\n", max)
+ }
+ if mean := h.Mean(); 5000.5 != mean {
+ t.Errorf("h.Mean(): 5000.5 != %v\n", mean)
+ }
+ if stdDev := h.StdDev(); 2886.751331514372 != stdDev {
+ t.Errorf("h.StdDev(): 2886.751331514372 != %v\n", stdDev)
+ }
+ ps := h.Percentiles([]float64{0.5, 0.75, 0.99})
+ if 5000.5 != ps[0] {
+ t.Errorf("median: 5000.5 != %v\n", ps[0])
+ }
+ if 7500.75 != ps[1] {
+ t.Errorf("75th percentile: 7500.75 != %v\n", ps[1])
+ }
+ if 9900.99 != ps[2] {
+ t.Errorf("99th percentile: 9900.99 != %v\n", ps[2])
+ }
+}
diff --git a/metrics/influxdb/LICENSE b/metrics/influxdb/LICENSE
new file mode 100644
index 000000000..e5bf20cdb
--- /dev/null
+++ b/metrics/influxdb/LICENSE
@@ -0,0 +1,19 @@
+Copyright (c) 2015 Vincent Rischmann
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/metrics/influxdb/README.md b/metrics/influxdb/README.md
new file mode 100644
index 000000000..b76b1a3f9
--- /dev/null
+++ b/metrics/influxdb/README.md
@@ -0,0 +1,30 @@
+go-metrics-influxdb
+===================
+
+This is a reporter for the [go-metrics](https://github.com/rcrowley/go-metrics) library which will post the metrics to [InfluxDB](https://influxdb.com/).
+
+Note
+----
+
+This is only compatible with InfluxDB 0.9+.
+
+Usage
+-----
+
+```go
+import "github.com/vrischmann/go-metrics-influxdb"
+
+go influxdb.InfluxDB(
+ metrics.DefaultRegistry, // metrics registry
+ time.Second * 10, // interval
+ "http://localhost:8086", // the InfluxDB url
+ "mydb", // your InfluxDB database
+ "myuser", // your InfluxDB user
+ "mypassword", // your InfluxDB password
+)
+```
+
+License
+-------
+
+go-metrics-influxdb is licensed under the MIT license. See the LICENSE file for details.
diff --git a/metrics/influxdb/influxdb.go b/metrics/influxdb/influxdb.go
new file mode 100644
index 000000000..d5cb4da66
--- /dev/null
+++ b/metrics/influxdb/influxdb.go
@@ -0,0 +1,227 @@
+package influxdb
+
+import (
+ "fmt"
+ "log"
+ uurl "net/url"
+ "time"
+
+ "github.com/ethereum/go-ethereum/metrics"
+ "github.com/influxdata/influxdb/client"
+)
+
+type reporter struct {
+ reg metrics.Registry
+ interval time.Duration
+
+ url uurl.URL
+ database string
+ username string
+ password string
+ namespace string
+ tags map[string]string
+
+ client *client.Client
+
+ cache map[string]int64
+}
+
+// InfluxDB starts a InfluxDB reporter which will post the from the given metrics.Registry at each d interval.
+func InfluxDB(r metrics.Registry, d time.Duration, url, database, username, password, namespace string) {
+ InfluxDBWithTags(r, d, url, database, username, password, namespace, nil)
+}
+
+// InfluxDBWithTags starts a InfluxDB reporter which will post the from the given metrics.Registry at each d interval with the specified tags
+func InfluxDBWithTags(r metrics.Registry, d time.Duration, url, database, username, password, namespace string, tags map[string]string) {
+ u, err := uurl.Parse(url)
+ if err != nil {
+ log.Printf("unable to parse InfluxDB url %s. err=%v", url, err)
+ return
+ }
+
+ rep := &reporter{
+ reg: r,
+ interval: d,
+ url: *u,
+ database: database,
+ username: username,
+ password: password,
+ namespace: namespace,
+ tags: tags,
+ cache: make(map[string]int64),
+ }
+ if err := rep.makeClient(); err != nil {
+ log.Printf("unable to make InfluxDB client. err=%v", err)
+ return
+ }
+
+ rep.run()
+}
+
+func (r *reporter) makeClient() (err error) {
+ r.client, err = client.NewClient(client.Config{
+ URL: r.url,
+ Username: r.username,
+ Password: r.password,
+ })
+
+ return
+}
+
+func (r *reporter) run() {
+ intervalTicker := time.Tick(r.interval)
+ pingTicker := time.Tick(time.Second * 5)
+
+ for {
+ select {
+ case <-intervalTicker:
+ if err := r.send(); err != nil {
+ log.Printf("unable to send to InfluxDB. err=%v", err)
+ }
+ case <-pingTicker:
+ _, _, err := r.client.Ping()
+ if err != nil {
+ log.Printf("got error while sending a ping to InfluxDB, trying to recreate client. err=%v", err)
+
+ if err = r.makeClient(); err != nil {
+ log.Printf("unable to make InfluxDB client. err=%v", err)
+ }
+ }
+ }
+ }
+}
+
+func (r *reporter) send() error {
+ var pts []client.Point
+
+ r.reg.Each(func(name string, i interface{}) {
+ now := time.Now()
+ namespace := r.namespace
+
+ switch metric := i.(type) {
+ case metrics.Counter:
+ v := metric.Count()
+ l := r.cache[name]
+ pts = append(pts, client.Point{
+ Measurement: fmt.Sprintf("%s%s.count", namespace, name),
+ Tags: r.tags,
+ Fields: map[string]interface{}{
+ "value": v - l,
+ },
+ Time: now,
+ })
+ r.cache[name] = v
+ case metrics.Gauge:
+ ms := metric.Snapshot()
+ pts = append(pts, client.Point{
+ Measurement: fmt.Sprintf("%s%s.gauge", namespace, name),
+ Tags: r.tags,
+ Fields: map[string]interface{}{
+ "value": ms.Value(),
+ },
+ Time: now,
+ })
+ case metrics.GaugeFloat64:
+ ms := metric.Snapshot()
+ pts = append(pts, client.Point{
+ Measurement: fmt.Sprintf("%s%s.gauge", namespace, name),
+ Tags: r.tags,
+ Fields: map[string]interface{}{
+ "value": ms.Value(),
+ },
+ Time: now,
+ })
+ case metrics.Histogram:
+ ms := metric.Snapshot()
+ ps := ms.Percentiles([]float64{0.5, 0.75, 0.95, 0.99, 0.999, 0.9999})
+ pts = append(pts, client.Point{
+ Measurement: fmt.Sprintf("%s%s.histogram", namespace, name),
+ Tags: r.tags,
+ Fields: map[string]interface{}{
+ "count": ms.Count(),
+ "max": ms.Max(),
+ "mean": ms.Mean(),
+ "min": ms.Min(),
+ "stddev": ms.StdDev(),
+ "variance": ms.Variance(),
+ "p50": ps[0],
+ "p75": ps[1],
+ "p95": ps[2],
+ "p99": ps[3],
+ "p999": ps[4],
+ "p9999": ps[5],
+ },
+ Time: now,
+ })
+ case metrics.Meter:
+ ms := metric.Snapshot()
+ pts = append(pts, client.Point{
+ Measurement: fmt.Sprintf("%s%s.meter", namespace, name),
+ Tags: r.tags,
+ Fields: map[string]interface{}{
+ "count": ms.Count(),
+ "m1": ms.Rate1(),
+ "m5": ms.Rate5(),
+ "m15": ms.Rate15(),
+ "mean": ms.RateMean(),
+ },
+ Time: now,
+ })
+ case metrics.Timer:
+ ms := metric.Snapshot()
+ ps := ms.Percentiles([]float64{0.5, 0.75, 0.95, 0.99, 0.999, 0.9999})
+ pts = append(pts, client.Point{
+ Measurement: fmt.Sprintf("%s%s.timer", namespace, name),
+ Tags: r.tags,
+ Fields: map[string]interface{}{
+ "count": ms.Count(),
+ "max": ms.Max(),
+ "mean": ms.Mean(),
+ "min": ms.Min(),
+ "stddev": ms.StdDev(),
+ "variance": ms.Variance(),
+ "p50": ps[0],
+ "p75": ps[1],
+ "p95": ps[2],
+ "p99": ps[3],
+ "p999": ps[4],
+ "p9999": ps[5],
+ "m1": ms.Rate1(),
+ "m5": ms.Rate5(),
+ "m15": ms.Rate15(),
+ "meanrate": ms.RateMean(),
+ },
+ Time: now,
+ })
+ case metrics.ResettingTimer:
+ t := metric.Snapshot()
+
+ if len(t.Values()) > 0 {
+ ps := t.Percentiles([]float64{50, 95, 99})
+ val := t.Values()
+ pts = append(pts, client.Point{
+ Measurement: fmt.Sprintf("%s%s.span", namespace, name),
+ Tags: r.tags,
+ Fields: map[string]interface{}{
+ "count": len(val),
+ "max": val[len(val)-1],
+ "mean": t.Mean(),
+ "min": val[0],
+ "p50": ps[0],
+ "p95": ps[1],
+ "p99": ps[2],
+ },
+ Time: now,
+ })
+ }
+ }
+ })
+
+ bps := client.BatchPoints{
+ Points: pts,
+ Database: r.database,
+ }
+
+ _, err := r.client.Write(bps)
+ return err
+}
diff --git a/metrics/init_test.go b/metrics/init_test.go
new file mode 100644
index 000000000..43401e833
--- /dev/null
+++ b/metrics/init_test.go
@@ -0,0 +1,5 @@
+package metrics
+
+func init() {
+ Enabled = true
+}
diff --git a/metrics/json.go b/metrics/json.go
new file mode 100644
index 000000000..2087d8211
--- /dev/null
+++ b/metrics/json.go
@@ -0,0 +1,31 @@
+package metrics
+
+import (
+ "encoding/json"
+ "io"
+ "time"
+)
+
+// MarshalJSON returns a byte slice containing a JSON representation of all
+// the metrics in the Registry.
+func (r *StandardRegistry) MarshalJSON() ([]byte, error) {
+ return json.Marshal(r.GetAll())
+}
+
+// WriteJSON writes metrics from the given registry periodically to the
+// specified io.Writer as JSON.
+func WriteJSON(r Registry, d time.Duration, w io.Writer) {
+ for range time.Tick(d) {
+ WriteJSONOnce(r, w)
+ }
+}
+
+// WriteJSONOnce writes metrics from the given registry to the specified
+// io.Writer as JSON.
+func WriteJSONOnce(r Registry, w io.Writer) {
+ json.NewEncoder(w).Encode(r)
+}
+
+func (p *PrefixedRegistry) MarshalJSON() ([]byte, error) {
+ return json.Marshal(p.GetAll())
+}
diff --git a/metrics/json_test.go b/metrics/json_test.go
new file mode 100644
index 000000000..cf70051f7
--- /dev/null
+++ b/metrics/json_test.go
@@ -0,0 +1,28 @@
+package metrics
+
+import (
+ "bytes"
+ "encoding/json"
+ "testing"
+)
+
+func TestRegistryMarshallJSON(t *testing.T) {
+ b := &bytes.Buffer{}
+ enc := json.NewEncoder(b)
+ r := NewRegistry()
+ r.Register("counter", NewCounter())
+ enc.Encode(r)
+ if s := b.String(); "{\"counter\":{\"count\":0}}\n" != s {
+ t.Fatalf(s)
+ }
+}
+
+func TestRegistryWriteJSONOnce(t *testing.T) {
+ r := NewRegistry()
+ r.Register("counter", NewCounter())
+ b := &bytes.Buffer{}
+ WriteJSONOnce(r, b)
+ if s := b.String(); s != "{\"counter\":{\"count\":0}}\n" {
+ t.Fail()
+ }
+}
diff --git a/metrics/librato/client.go b/metrics/librato/client.go
new file mode 100644
index 000000000..8c0c850e3
--- /dev/null
+++ b/metrics/librato/client.go
@@ -0,0 +1,102 @@
+package librato
+
+import (
+ "bytes"
+ "encoding/json"
+ "fmt"
+ "io/ioutil"
+ "net/http"
+)
+
+const Operations = "operations"
+const OperationsShort = "ops"
+
+type LibratoClient struct {
+ Email, Token string
+}
+
+// property strings
+const (
+ // display attributes
+ Color = "color"
+ DisplayMax = "display_max"
+ DisplayMin = "display_min"
+ DisplayUnitsLong = "display_units_long"
+ DisplayUnitsShort = "display_units_short"
+ DisplayStacked = "display_stacked"
+ DisplayTransform = "display_transform"
+ // special gauge display attributes
+ SummarizeFunction = "summarize_function"
+ Aggregate = "aggregate"
+
+ // metric keys
+ Name = "name"
+ Period = "period"
+ Description = "description"
+ DisplayName = "display_name"
+ Attributes = "attributes"
+
+ // measurement keys
+ MeasureTime = "measure_time"
+ Source = "source"
+ Value = "value"
+
+ // special gauge keys
+ Count = "count"
+ Sum = "sum"
+ Max = "max"
+ Min = "min"
+ SumSquares = "sum_squares"
+
+ // batch keys
+ Counters = "counters"
+ Gauges = "gauges"
+
+ MetricsPostUrl = "https://metrics-api.librato.com/v1/metrics"
+)
+
+type Measurement map[string]interface{}
+type Metric map[string]interface{}
+
+type Batch struct {
+ Gauges []Measurement `json:"gauges,omitempty"`
+ Counters []Measurement `json:"counters,omitempty"`
+ MeasureTime int64 `json:"measure_time"`
+ Source string `json:"source"`
+}
+
+func (self *LibratoClient) PostMetrics(batch Batch) (err error) {
+ var (
+ js []byte
+ req *http.Request
+ resp *http.Response
+ )
+
+ if len(batch.Counters) == 0 && len(batch.Gauges) == 0 {
+ return nil
+ }
+
+ if js, err = json.Marshal(batch); err != nil {
+ return
+ }
+
+ if req, err = http.NewRequest("POST", MetricsPostUrl, bytes.NewBuffer(js)); err != nil {
+ return
+ }
+
+ req.Header.Set("Content-Type", "application/json")
+ req.SetBasicAuth(self.Email, self.Token)
+
+ if resp, err = http.DefaultClient.Do(req); err != nil {
+ return
+ }
+
+ if resp.StatusCode != http.StatusOK {
+ var body []byte
+ if body, err = ioutil.ReadAll(resp.Body); err != nil {
+ body = []byte(fmt.Sprintf("(could not fetch response body for error: %s)", err))
+ }
+ err = fmt.Errorf("Unable to post to Librato: %d %s %s", resp.StatusCode, resp.Status, string(body))
+ }
+ return
+}
diff --git a/metrics/librato/librato.go b/metrics/librato/librato.go
new file mode 100644
index 000000000..f8c8c9ecb
--- /dev/null
+++ b/metrics/librato/librato.go
@@ -0,0 +1,235 @@
+package librato
+
+import (
+ "fmt"
+ "log"
+ "math"
+ "regexp"
+ "time"
+
+ "github.com/ethereum/go-ethereum/metrics"
+)
+
+// a regexp for extracting the unit from time.Duration.String
+var unitRegexp = regexp.MustCompile(`[^\\d]+$`)
+
+// a helper that turns a time.Duration into librato display attributes for timer metrics
+func translateTimerAttributes(d time.Duration) (attrs map[string]interface{}) {
+ attrs = make(map[string]interface{})
+ attrs[DisplayTransform] = fmt.Sprintf("x/%d", int64(d))
+ attrs[DisplayUnitsShort] = string(unitRegexp.Find([]byte(d.String())))
+ return
+}
+
+type Reporter struct {
+ Email, Token string
+ Namespace string
+ Source string
+ Interval time.Duration
+ Registry metrics.Registry
+ Percentiles []float64 // percentiles to report on histogram metrics
+ TimerAttributes map[string]interface{} // units in which timers will be displayed
+ intervalSec int64
+}
+
+func NewReporter(r metrics.Registry, d time.Duration, e string, t string, s string, p []float64, u time.Duration) *Reporter {
+ return &Reporter{e, t, "", s, d, r, p, translateTimerAttributes(u), int64(d / time.Second)}
+}
+
+func Librato(r metrics.Registry, d time.Duration, e string, t string, s string, p []float64, u time.Duration) {
+ NewReporter(r, d, e, t, s, p, u).Run()
+}
+
+func (self *Reporter) Run() {
+ log.Printf("WARNING: This client has been DEPRECATED! It has been moved to https://github.com/mihasya/go-metrics-librato and will be removed from rcrowley/go-metrics on August 5th 2015")
+ ticker := time.Tick(self.Interval)
+ metricsApi := &LibratoClient{self.Email, self.Token}
+ for now := range ticker {
+ var metrics Batch
+ var err error
+ if metrics, err = self.BuildRequest(now, self.Registry); err != nil {
+ log.Printf("ERROR constructing librato request body %s", err)
+ continue
+ }
+ if err := metricsApi.PostMetrics(metrics); err != nil {
+ log.Printf("ERROR sending metrics to librato %s", err)
+ continue
+ }
+ }
+}
+
+// calculate sum of squares from data provided by metrics.Histogram
+// see http://en.wikipedia.org/wiki/Standard_deviation#Rapid_calculation_methods
+func sumSquares(s metrics.Sample) float64 {
+ count := float64(s.Count())
+ sumSquared := math.Pow(count*s.Mean(), 2)
+ sumSquares := math.Pow(count*s.StdDev(), 2) + sumSquared/count
+ if math.IsNaN(sumSquares) {
+ return 0.0
+ }
+ return sumSquares
+}
+func sumSquaresTimer(t metrics.Timer) float64 {
+ count := float64(t.Count())
+ sumSquared := math.Pow(count*t.Mean(), 2)
+ sumSquares := math.Pow(count*t.StdDev(), 2) + sumSquared/count
+ if math.IsNaN(sumSquares) {
+ return 0.0
+ }
+ return sumSquares
+}
+
+func (self *Reporter) BuildRequest(now time.Time, r metrics.Registry) (snapshot Batch, err error) {
+ snapshot = Batch{
+ // coerce timestamps to a stepping fn so that they line up in Librato graphs
+ MeasureTime: (now.Unix() / self.intervalSec) * self.intervalSec,
+ Source: self.Source,
+ }
+ snapshot.Gauges = make([]Measurement, 0)
+ snapshot.Counters = make([]Measurement, 0)
+ histogramGaugeCount := 1 + len(self.Percentiles)
+ r.Each(func(name string, metric interface{}) {
+ if self.Namespace != "" {
+ name = fmt.Sprintf("%s.%s", self.Namespace, name)
+ }
+ measurement := Measurement{}
+ measurement[Period] = self.Interval.Seconds()
+ switch m := metric.(type) {
+ case metrics.Counter:
+ if m.Count() > 0 {
+ measurement[Name] = fmt.Sprintf("%s.%s", name, "count")
+ measurement[Value] = float64(m.Count())
+ measurement[Attributes] = map[string]interface{}{
+ DisplayUnitsLong: Operations,
+ DisplayUnitsShort: OperationsShort,
+ DisplayMin: "0",
+ }
+ snapshot.Counters = append(snapshot.Counters, measurement)
+ }
+ case metrics.Gauge:
+ measurement[Name] = name
+ measurement[Value] = float64(m.Value())
+ snapshot.Gauges = append(snapshot.Gauges, measurement)
+ case metrics.GaugeFloat64:
+ measurement[Name] = name
+ measurement[Value] = m.Value()
+ snapshot.Gauges = append(snapshot.Gauges, measurement)
+ case metrics.Histogram:
+ if m.Count() > 0 {
+ gauges := make([]Measurement, histogramGaugeCount)
+ s := m.Sample()
+ measurement[Name] = fmt.Sprintf("%s.%s", name, "hist")
+ measurement[Count] = uint64(s.Count())
+ measurement[Max] = float64(s.Max())
+ measurement[Min] = float64(s.Min())
+ measurement[Sum] = float64(s.Sum())
+ measurement[SumSquares] = sumSquares(s)
+ gauges[0] = measurement
+ for i, p := range self.Percentiles {
+ gauges[i+1] = Measurement{
+ Name: fmt.Sprintf("%s.%.2f", measurement[Name], p),
+ Value: s.Percentile(p),
+ Period: measurement[Period],
+ }
+ }
+ snapshot.Gauges = append(snapshot.Gauges, gauges...)
+ }
+ case metrics.Meter:
+ measurement[Name] = name
+ measurement[Value] = float64(m.Count())
+ snapshot.Counters = append(snapshot.Counters, measurement)
+ snapshot.Gauges = append(snapshot.Gauges,
+ Measurement{
+ Name: fmt.Sprintf("%s.%s", name, "1min"),
+ Value: m.Rate1(),
+ Period: int64(self.Interval.Seconds()),
+ Attributes: map[string]interface{}{
+ DisplayUnitsLong: Operations,
+ DisplayUnitsShort: OperationsShort,
+ DisplayMin: "0",
+ },
+ },
+ Measurement{
+ Name: fmt.Sprintf("%s.%s", name, "5min"),
+ Value: m.Rate5(),
+ Period: int64(self.Interval.Seconds()),
+ Attributes: map[string]interface{}{
+ DisplayUnitsLong: Operations,
+ DisplayUnitsShort: OperationsShort,
+ DisplayMin: "0",
+ },
+ },
+ Measurement{
+ Name: fmt.Sprintf("%s.%s", name, "15min"),
+ Value: m.Rate15(),
+ Period: int64(self.Interval.Seconds()),
+ Attributes: map[string]interface{}{
+ DisplayUnitsLong: Operations,
+ DisplayUnitsShort: OperationsShort,
+ DisplayMin: "0",
+ },
+ },
+ )
+ case metrics.Timer:
+ measurement[Name] = name
+ measurement[Value] = float64(m.Count())
+ snapshot.Counters = append(snapshot.Counters, measurement)
+ if m.Count() > 0 {
+ libratoName := fmt.Sprintf("%s.%s", name, "timer.mean")
+ gauges := make([]Measurement, histogramGaugeCount)
+ gauges[0] = Measurement{
+ Name: libratoName,
+ Count: uint64(m.Count()),
+ Sum: m.Mean() * float64(m.Count()),
+ Max: float64(m.Max()),
+ Min: float64(m.Min()),
+ SumSquares: sumSquaresTimer(m),
+ Period: int64(self.Interval.Seconds()),
+ Attributes: self.TimerAttributes,
+ }
+ for i, p := range self.Percentiles {
+ gauges[i+1] = Measurement{
+ Name: fmt.Sprintf("%s.timer.%2.0f", name, p*100),
+ Value: m.Percentile(p),
+ Period: int64(self.Interval.Seconds()),
+ Attributes: self.TimerAttributes,
+ }
+ }
+ snapshot.Gauges = append(snapshot.Gauges, gauges...)
+ snapshot.Gauges = append(snapshot.Gauges,
+ Measurement{
+ Name: fmt.Sprintf("%s.%s", name, "rate.1min"),
+ Value: m.Rate1(),
+ Period: int64(self.Interval.Seconds()),
+ Attributes: map[string]interface{}{
+ DisplayUnitsLong: Operations,
+ DisplayUnitsShort: OperationsShort,
+ DisplayMin: "0",
+ },
+ },
+ Measurement{
+ Name: fmt.Sprintf("%s.%s", name, "rate.5min"),
+ Value: m.Rate5(),
+ Period: int64(self.Interval.Seconds()),
+ Attributes: map[string]interface{}{
+ DisplayUnitsLong: Operations,
+ DisplayUnitsShort: OperationsShort,
+ DisplayMin: "0",
+ },
+ },
+ Measurement{
+ Name: fmt.Sprintf("%s.%s", name, "rate.15min"),
+ Value: m.Rate15(),
+ Period: int64(self.Interval.Seconds()),
+ Attributes: map[string]interface{}{
+ DisplayUnitsLong: Operations,
+ DisplayUnitsShort: OperationsShort,
+ DisplayMin: "0",
+ },
+ },
+ )
+ }
+ }
+ })
+ return
+}
diff --git a/vendor/github.com/rcrowley/go-metrics/log.go b/metrics/log.go
index f8074c045..0c8ea7c97 100644
--- a/vendor/github.com/rcrowley/go-metrics/log.go
+++ b/metrics/log.go
@@ -18,7 +18,7 @@ func LogScaled(r Registry, freq time.Duration, scale time.Duration, l Logger) {
du := float64(scale)
duSuffix := scale.String()[1:]
- for _ = range time.Tick(freq) {
+ for range time.Tick(freq) {
r.Each(func(name string, i interface{}) {
switch metric := i.(type) {
case Counter:
diff --git a/vendor/github.com/rcrowley/go-metrics/memory.md b/metrics/memory.md
index 47454f54b..47454f54b 100644
--- a/vendor/github.com/rcrowley/go-metrics/memory.md
+++ b/metrics/memory.md
diff --git a/vendor/github.com/rcrowley/go-metrics/meter.go b/metrics/meter.go
index 0389ab0b8..82b2141a6 100644
--- a/vendor/github.com/rcrowley/go-metrics/meter.go
+++ b/metrics/meter.go
@@ -15,10 +15,13 @@ type Meter interface {
Rate15() float64
RateMean() float64
Snapshot() Meter
+ Stop()
}
// GetOrRegisterMeter returns an existing Meter or constructs and registers a
// new StandardMeter.
+// Be sure to unregister the meter from the registry once it is of no use to
+// allow for garbage collection.
func GetOrRegisterMeter(name string, r Registry) Meter {
if nil == r {
r = DefaultRegistry
@@ -27,14 +30,15 @@ func GetOrRegisterMeter(name string, r Registry) Meter {
}
// NewMeter constructs a new StandardMeter and launches a goroutine.
+// Be sure to call Stop() once the meter is of no use to allow for garbage collection.
func NewMeter() Meter {
- if UseNilMetrics {
+ if !Enabled {
return NilMeter{}
}
m := newStandardMeter()
arbiter.Lock()
defer arbiter.Unlock()
- arbiter.meters = append(arbiter.meters, m)
+ arbiter.meters[m] = struct{}{}
if !arbiter.started {
arbiter.started = true
go arbiter.tick()
@@ -44,6 +48,8 @@ func NewMeter() Meter {
// NewMeter constructs and registers a new StandardMeter and launches a
// goroutine.
+// Be sure to unregister the meter from the registry once it is of no use to
+// allow for garbage collection.
func NewRegisteredMeter(name string, r Registry) Meter {
c := NewMeter()
if nil == r {
@@ -86,6 +92,9 @@ func (m *MeterSnapshot) RateMean() float64 { return m.rateMean }
// Snapshot returns the snapshot.
func (m *MeterSnapshot) Snapshot() Meter { return m }
+// Stop is a no-op.
+func (m *MeterSnapshot) Stop() {}
+
// NilMeter is a no-op Meter.
type NilMeter struct{}
@@ -110,12 +119,16 @@ func (NilMeter) RateMean() float64 { return 0.0 }
// Snapshot is a no-op.
func (NilMeter) Snapshot() Meter { return NilMeter{} }
+// Stop is a no-op.
+func (NilMeter) Stop() {}
+
// StandardMeter is the standard implementation of a Meter.
type StandardMeter struct {
lock sync.RWMutex
snapshot *MeterSnapshot
a1, a5, a15 EWMA
startTime time.Time
+ stopped bool
}
func newStandardMeter() *StandardMeter {
@@ -128,6 +141,19 @@ func newStandardMeter() *StandardMeter {
}
}
+// Stop stops the meter, Mark() will be a no-op if you use it after being stopped.
+func (m *StandardMeter) Stop() {
+ m.lock.Lock()
+ stopped := m.stopped
+ m.stopped = true
+ m.lock.Unlock()
+ if !stopped {
+ arbiter.Lock()
+ delete(arbiter.meters, m)
+ arbiter.Unlock()
+ }
+}
+
// Count returns the number of events recorded.
func (m *StandardMeter) Count() int64 {
m.lock.RLock()
@@ -136,10 +162,13 @@ func (m *StandardMeter) Count() int64 {
return count
}
-// Mark records the occurance of n events.
+// Mark records the occurrence of n events.
func (m *StandardMeter) Mark(n int64) {
m.lock.Lock()
defer m.lock.Unlock()
+ if m.stopped {
+ return
+ }
m.snapshot.count += n
m.a1.Update(n)
m.a5.Update(n)
@@ -205,29 +234,28 @@ func (m *StandardMeter) tick() {
m.updateSnapshot()
}
+// meterArbiter ticks meters every 5s from a single goroutine.
+// meters are references in a set for future stopping.
type meterArbiter struct {
sync.RWMutex
started bool
- meters []*StandardMeter
+ meters map[*StandardMeter]struct{}
ticker *time.Ticker
}
-var arbiter = meterArbiter{ticker: time.NewTicker(5e9)}
+var arbiter = meterArbiter{ticker: time.NewTicker(5e9), meters: make(map[*StandardMeter]struct{})}
// Ticks meters on the scheduled interval
func (ma *meterArbiter) tick() {
- for {
- select {
- case <-ma.ticker.C:
- ma.tickMeters()
- }
+ for range ma.ticker.C {
+ ma.tickMeters()
}
}
func (ma *meterArbiter) tickMeters() {
ma.RLock()
defer ma.RUnlock()
- for _, meter := range ma.meters {
+ for meter := range ma.meters {
meter.tick()
}
}
diff --git a/metrics/meter_test.go b/metrics/meter_test.go
new file mode 100644
index 000000000..e88922260
--- /dev/null
+++ b/metrics/meter_test.go
@@ -0,0 +1,73 @@
+package metrics
+
+import (
+ "testing"
+ "time"
+)
+
+func BenchmarkMeter(b *testing.B) {
+ m := NewMeter()
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ m.Mark(1)
+ }
+}
+
+func TestGetOrRegisterMeter(t *testing.T) {
+ r := NewRegistry()
+ NewRegisteredMeter("foo", r).Mark(47)
+ if m := GetOrRegisterMeter("foo", r); 47 != m.Count() {
+ t.Fatal(m)
+ }
+}
+
+func TestMeterDecay(t *testing.T) {
+ ma := meterArbiter{
+ ticker: time.NewTicker(time.Millisecond),
+ meters: make(map[*StandardMeter]struct{}),
+ }
+ m := newStandardMeter()
+ ma.meters[m] = struct{}{}
+ go ma.tick()
+ m.Mark(1)
+ rateMean := m.RateMean()
+ time.Sleep(100 * time.Millisecond)
+ if m.RateMean() >= rateMean {
+ t.Error("m.RateMean() didn't decrease")
+ }
+}
+
+func TestMeterNonzero(t *testing.T) {
+ m := NewMeter()
+ m.Mark(3)
+ if count := m.Count(); 3 != count {
+ t.Errorf("m.Count(): 3 != %v\n", count)
+ }
+}
+
+func TestMeterStop(t *testing.T) {
+ l := len(arbiter.meters)
+ m := NewMeter()
+ if len(arbiter.meters) != l+1 {
+ t.Errorf("arbiter.meters: %d != %d\n", l+1, len(arbiter.meters))
+ }
+ m.Stop()
+ if len(arbiter.meters) != l {
+ t.Errorf("arbiter.meters: %d != %d\n", l, len(arbiter.meters))
+ }
+}
+
+func TestMeterSnapshot(t *testing.T) {
+ m := NewMeter()
+ m.Mark(1)
+ if snapshot := m.Snapshot(); m.RateMean() != snapshot.RateMean() {
+ t.Fatal(snapshot)
+ }
+}
+
+func TestMeterZero(t *testing.T) {
+ m := NewMeter()
+ if count := m.Count(); 0 != count {
+ t.Errorf("m.Count(): 0 != %v\n", count)
+ }
+}
diff --git a/metrics/metrics.go b/metrics/metrics.go
index c82661d80..e24324814 100644
--- a/metrics/metrics.go
+++ b/metrics/metrics.go
@@ -1,20 +1,8 @@
-// Copyright 2015 The go-ethereum Authors
-// This file is part of the go-ethereum library.
+// Go port of Coda Hale's Metrics library
//
-// The go-ethereum library is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Lesser General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
+// <https://github.com/rcrowley/go-metrics>
//
-// The go-ethereum library is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU Lesser General Public License for more details.
-//
-// You should have received a copy of the GNU Lesser General Public License
-// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
-
-// Package metrics provides general system and process level metrics collection.
+// Coda Hale's original work: <https://github.com/codahale/metrics>
package metrics
import (
@@ -24,17 +12,19 @@ import (
"time"
"github.com/ethereum/go-ethereum/log"
- "github.com/rcrowley/go-metrics"
- "github.com/rcrowley/go-metrics/exp"
)
+// Enabled is checked by the constructor functions for all of the
+// standard metrics. If it is true, the metric returned is a stub.
+//
+// This global kill-switch helps quantify the observer effect and makes
+// for less cluttered pprof profiles.
+var Enabled bool = false
+
// MetricsEnabledFlag is the CLI flag name to use to enable metrics collections.
const MetricsEnabledFlag = "metrics"
const DashboardEnabledFlag = "dashboard"
-// Enabled is the flag specifying if metrics are enable or not.
-var Enabled = false
-
// Init enables or disables the metrics system. Since we need this to run before
// any other code gets to create meters and timers, we'll actually do an ugly hack
// and peek into the command line args for the metrics flag.
@@ -45,34 +35,6 @@ func init() {
Enabled = true
}
}
- exp.Exp(metrics.DefaultRegistry)
-}
-
-// NewCounter create a new metrics Counter, either a real one of a NOP stub depending
-// on the metrics flag.
-func NewCounter(name string) metrics.Counter {
- if !Enabled {
- return new(metrics.NilCounter)
- }
- return metrics.GetOrRegisterCounter(name, metrics.DefaultRegistry)
-}
-
-// NewMeter create a new metrics Meter, either a real one of a NOP stub depending
-// on the metrics flag.
-func NewMeter(name string) metrics.Meter {
- if !Enabled {
- return new(metrics.NilMeter)
- }
- return metrics.GetOrRegisterMeter(name, metrics.DefaultRegistry)
-}
-
-// NewTimer create a new metrics Timer, either a real one of a NOP stub depending
-// on the metrics flag.
-func NewTimer(name string) metrics.Timer {
- if !Enabled {
- return new(metrics.NilTimer)
- }
- return metrics.GetOrRegisterTimer(name, metrics.DefaultRegistry)
}
// CollectProcessMetrics periodically collects various metrics about the running
@@ -90,17 +52,17 @@ func CollectProcessMetrics(refresh time.Duration) {
diskstats[i] = new(DiskStats)
}
// Define the various metrics to collect
- memAllocs := metrics.GetOrRegisterMeter("system/memory/allocs", metrics.DefaultRegistry)
- memFrees := metrics.GetOrRegisterMeter("system/memory/frees", metrics.DefaultRegistry)
- memInuse := metrics.GetOrRegisterMeter("system/memory/inuse", metrics.DefaultRegistry)
- memPauses := metrics.GetOrRegisterMeter("system/memory/pauses", metrics.DefaultRegistry)
+ memAllocs := GetOrRegisterMeter("system/memory/allocs", DefaultRegistry)
+ memFrees := GetOrRegisterMeter("system/memory/frees", DefaultRegistry)
+ memInuse := GetOrRegisterMeter("system/memory/inuse", DefaultRegistry)
+ memPauses := GetOrRegisterMeter("system/memory/pauses", DefaultRegistry)
- var diskReads, diskReadBytes, diskWrites, diskWriteBytes metrics.Meter
+ var diskReads, diskReadBytes, diskWrites, diskWriteBytes Meter
if err := ReadDiskStats(diskstats[0]); err == nil {
- diskReads = metrics.GetOrRegisterMeter("system/disk/readcount", metrics.DefaultRegistry)
- diskReadBytes = metrics.GetOrRegisterMeter("system/disk/readdata", metrics.DefaultRegistry)
- diskWrites = metrics.GetOrRegisterMeter("system/disk/writecount", metrics.DefaultRegistry)
- diskWriteBytes = metrics.GetOrRegisterMeter("system/disk/writedata", metrics.DefaultRegistry)
+ diskReads = GetOrRegisterMeter("system/disk/readcount", DefaultRegistry)
+ diskReadBytes = GetOrRegisterMeter("system/disk/readdata", DefaultRegistry)
+ diskWrites = GetOrRegisterMeter("system/disk/writecount", DefaultRegistry)
+ diskWriteBytes = GetOrRegisterMeter("system/disk/writedata", DefaultRegistry)
} else {
log.Debug("Failed to read disk metrics", "err", err)
}
diff --git a/metrics/metrics_test.go b/metrics/metrics_test.go
new file mode 100644
index 000000000..df36da0ad
--- /dev/null
+++ b/metrics/metrics_test.go
@@ -0,0 +1,125 @@
+package metrics
+
+import (
+ "fmt"
+ "io/ioutil"
+ "log"
+ "sync"
+ "testing"
+ "time"
+)
+
+const FANOUT = 128
+
+// Stop the compiler from complaining during debugging.
+var (
+ _ = ioutil.Discard
+ _ = log.LstdFlags
+)
+
+func BenchmarkMetrics(b *testing.B) {
+ r := NewRegistry()
+ c := NewRegisteredCounter("counter", r)
+ g := NewRegisteredGauge("gauge", r)
+ gf := NewRegisteredGaugeFloat64("gaugefloat64", r)
+ h := NewRegisteredHistogram("histogram", r, NewUniformSample(100))
+ m := NewRegisteredMeter("meter", r)
+ t := NewRegisteredTimer("timer", r)
+ RegisterDebugGCStats(r)
+ RegisterRuntimeMemStats(r)
+ b.ResetTimer()
+ ch := make(chan bool)
+
+ wgD := &sync.WaitGroup{}
+ /*
+ wgD.Add(1)
+ go func() {
+ defer wgD.Done()
+ //log.Println("go CaptureDebugGCStats")
+ for {
+ select {
+ case <-ch:
+ //log.Println("done CaptureDebugGCStats")
+ return
+ default:
+ CaptureDebugGCStatsOnce(r)
+ }
+ }
+ }()
+ //*/
+
+ wgR := &sync.WaitGroup{}
+ //*
+ wgR.Add(1)
+ go func() {
+ defer wgR.Done()
+ //log.Println("go CaptureRuntimeMemStats")
+ for {
+ select {
+ case <-ch:
+ //log.Println("done CaptureRuntimeMemStats")
+ return
+ default:
+ CaptureRuntimeMemStatsOnce(r)
+ }
+ }
+ }()
+ //*/
+
+ wgW := &sync.WaitGroup{}
+ /*
+ wgW.Add(1)
+ go func() {
+ defer wgW.Done()
+ //log.Println("go Write")
+ for {
+ select {
+ case <-ch:
+ //log.Println("done Write")
+ return
+ default:
+ WriteOnce(r, ioutil.Discard)
+ }
+ }
+ }()
+ //*/
+
+ wg := &sync.WaitGroup{}
+ wg.Add(FANOUT)
+ for i := 0; i < FANOUT; i++ {
+ go func(i int) {
+ defer wg.Done()
+ //log.Println("go", i)
+ for i := 0; i < b.N; i++ {
+ c.Inc(1)
+ g.Update(int64(i))
+ gf.Update(float64(i))
+ h.Update(int64(i))
+ m.Mark(1)
+ t.Update(1)
+ }
+ //log.Println("done", i)
+ }(i)
+ }
+ wg.Wait()
+ close(ch)
+ wgD.Wait()
+ wgR.Wait()
+ wgW.Wait()
+}
+
+func Example() {
+ c := NewCounter()
+ Register("money", c)
+ c.Inc(17)
+
+ // Threadsafe registration
+ t := GetOrRegisterTimer("db.get.latency", nil)
+ t.Time(func() { time.Sleep(10 * time.Millisecond) })
+ t.Update(1)
+
+ fmt.Println(c.Count())
+ fmt.Println(t.Min())
+ // Output: 17
+ // 1
+}
diff --git a/vendor/github.com/rcrowley/go-metrics/opentsdb.go b/metrics/opentsdb.go
index 266b6c93d..df7f152ed 100644
--- a/vendor/github.com/rcrowley/go-metrics/opentsdb.go
+++ b/metrics/opentsdb.go
@@ -38,7 +38,7 @@ func OpenTSDB(r Registry, d time.Duration, prefix string, addr *net.TCPAddr) {
// OpenTSDBWithConfig is a blocking exporter function just like OpenTSDB,
// but it takes a OpenTSDBConfig instead.
func OpenTSDBWithConfig(c OpenTSDBConfig) {
- for _ = range time.Tick(c.FlushInterval) {
+ for range time.Tick(c.FlushInterval) {
if err := openTSDB(&c); nil != err {
log.Println(err)
}
diff --git a/metrics/opentsdb_test.go b/metrics/opentsdb_test.go
new file mode 100644
index 000000000..c43728960
--- /dev/null
+++ b/metrics/opentsdb_test.go
@@ -0,0 +1,21 @@
+package metrics
+
+import (
+ "net"
+ "time"
+)
+
+func ExampleOpenTSDB() {
+ addr, _ := net.ResolveTCPAddr("net", ":2003")
+ go OpenTSDB(DefaultRegistry, 1*time.Second, "some.prefix", addr)
+}
+
+func ExampleOpenTSDBWithConfig() {
+ addr, _ := net.ResolveTCPAddr("net", ":2003")
+ go OpenTSDBWithConfig(OpenTSDBConfig{
+ Addr: addr,
+ Registry: DefaultRegistry,
+ FlushInterval: 1 * time.Second,
+ DurationUnit: time.Millisecond,
+ })
+}
diff --git a/vendor/github.com/rcrowley/go-metrics/registry.go b/metrics/registry.go
index 2bb7a1e7d..cc34c9dfd 100644
--- a/vendor/github.com/rcrowley/go-metrics/registry.go
+++ b/metrics/registry.go
@@ -29,6 +29,9 @@ type Registry interface {
// Get the metric by the given name or nil if none is registered.
Get(string) interface{}
+ // GetAll metrics in the Registry.
+ GetAll() map[string]map[string]interface{}
+
// Gets an existing metric or registers the given one.
// The interface can be the metric to register if not found in registry,
// or a function returning the metric for lazy instantiation.
@@ -109,10 +112,72 @@ func (r *StandardRegistry) RunHealthchecks() {
}
}
+// GetAll metrics in the Registry
+func (r *StandardRegistry) GetAll() map[string]map[string]interface{} {
+ data := make(map[string]map[string]interface{})
+ r.Each(func(name string, i interface{}) {
+ values := make(map[string]interface{})
+ switch metric := i.(type) {
+ case Counter:
+ values["count"] = metric.Count()
+ case Gauge:
+ values["value"] = metric.Value()
+ case GaugeFloat64:
+ values["value"] = metric.Value()
+ case Healthcheck:
+ values["error"] = nil
+ metric.Check()
+ if err := metric.Error(); nil != err {
+ values["error"] = metric.Error().Error()
+ }
+ case Histogram:
+ h := metric.Snapshot()
+ ps := h.Percentiles([]float64{0.5, 0.75, 0.95, 0.99, 0.999})
+ values["count"] = h.Count()
+ values["min"] = h.Min()
+ values["max"] = h.Max()
+ values["mean"] = h.Mean()
+ values["stddev"] = h.StdDev()
+ values["median"] = ps[0]
+ values["75%"] = ps[1]
+ values["95%"] = ps[2]
+ values["99%"] = ps[3]
+ values["99.9%"] = ps[4]
+ case Meter:
+ m := metric.Snapshot()
+ values["count"] = m.Count()
+ values["1m.rate"] = m.Rate1()
+ values["5m.rate"] = m.Rate5()
+ values["15m.rate"] = m.Rate15()
+ values["mean.rate"] = m.RateMean()
+ case Timer:
+ t := metric.Snapshot()
+ ps := t.Percentiles([]float64{0.5, 0.75, 0.95, 0.99, 0.999})
+ values["count"] = t.Count()
+ values["min"] = t.Min()
+ values["max"] = t.Max()
+ values["mean"] = t.Mean()
+ values["stddev"] = t.StdDev()
+ values["median"] = ps[0]
+ values["75%"] = ps[1]
+ values["95%"] = ps[2]
+ values["99%"] = ps[3]
+ values["99.9%"] = ps[4]
+ values["1m.rate"] = t.Rate1()
+ values["5m.rate"] = t.Rate5()
+ values["15m.rate"] = t.Rate15()
+ values["mean.rate"] = t.RateMean()
+ }
+ data[name] = values
+ })
+ return data
+}
+
// Unregister the metric with the given name.
func (r *StandardRegistry) Unregister(name string) {
r.mutex.Lock()
defer r.mutex.Unlock()
+ r.stop(name)
delete(r.metrics, name)
}
@@ -120,7 +185,8 @@ func (r *StandardRegistry) Unregister(name string) {
func (r *StandardRegistry) UnregisterAll() {
r.mutex.Lock()
defer r.mutex.Unlock()
- for name, _ := range r.metrics {
+ for name := range r.metrics {
+ r.stop(name)
delete(r.metrics, name)
}
}
@@ -130,7 +196,7 @@ func (r *StandardRegistry) register(name string, i interface{}) error {
return DuplicateMetric(name)
}
switch i.(type) {
- case Counter, Gauge, GaugeFloat64, Healthcheck, Histogram, Meter, Timer:
+ case Counter, Gauge, GaugeFloat64, Healthcheck, Histogram, Meter, Timer, ResettingTimer:
r.metrics[name] = i
}
return nil
@@ -146,6 +212,19 @@ func (r *StandardRegistry) registered() map[string]interface{} {
return metrics
}
+func (r *StandardRegistry) stop(name string) {
+ if i, ok := r.metrics[name]; ok {
+ if s, ok := i.(Stoppable); ok {
+ s.Stop()
+ }
+ }
+}
+
+// Stoppable defines the metrics which has to be stopped.
+type Stoppable interface {
+ Stop()
+}
+
type PrefixedRegistry struct {
underlying Registry
prefix string
@@ -216,6 +295,11 @@ func (r *PrefixedRegistry) RunHealthchecks() {
r.underlying.RunHealthchecks()
}
+// GetAll metrics in the Registry
+func (r *PrefixedRegistry) GetAll() map[string]map[string]interface{} {
+ return r.underlying.GetAll()
+}
+
// Unregister the metric with the given name. The name will be prefixed.
func (r *PrefixedRegistry) Unregister(name string) {
realName := r.prefix + name
diff --git a/metrics/registry_test.go b/metrics/registry_test.go
new file mode 100644
index 000000000..a63e485fe
--- /dev/null
+++ b/metrics/registry_test.go
@@ -0,0 +1,305 @@
+package metrics
+
+import (
+ "testing"
+)
+
+func BenchmarkRegistry(b *testing.B) {
+ r := NewRegistry()
+ r.Register("foo", NewCounter())
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ r.Each(func(string, interface{}) {})
+ }
+}
+
+func TestRegistry(t *testing.T) {
+ r := NewRegistry()
+ r.Register("foo", NewCounter())
+ i := 0
+ r.Each(func(name string, iface interface{}) {
+ i++
+ if "foo" != name {
+ t.Fatal(name)
+ }
+ if _, ok := iface.(Counter); !ok {
+ t.Fatal(iface)
+ }
+ })
+ if 1 != i {
+ t.Fatal(i)
+ }
+ r.Unregister("foo")
+ i = 0
+ r.Each(func(string, interface{}) { i++ })
+ if 0 != i {
+ t.Fatal(i)
+ }
+}
+
+func TestRegistryDuplicate(t *testing.T) {
+ r := NewRegistry()
+ if err := r.Register("foo", NewCounter()); nil != err {
+ t.Fatal(err)
+ }
+ if err := r.Register("foo", NewGauge()); nil == err {
+ t.Fatal(err)
+ }
+ i := 0
+ r.Each(func(name string, iface interface{}) {
+ i++
+ if _, ok := iface.(Counter); !ok {
+ t.Fatal(iface)
+ }
+ })
+ if 1 != i {
+ t.Fatal(i)
+ }
+}
+
+func TestRegistryGet(t *testing.T) {
+ r := NewRegistry()
+ r.Register("foo", NewCounter())
+ if count := r.Get("foo").(Counter).Count(); 0 != count {
+ t.Fatal(count)
+ }
+ r.Get("foo").(Counter).Inc(1)
+ if count := r.Get("foo").(Counter).Count(); 1 != count {
+ t.Fatal(count)
+ }
+}
+
+func TestRegistryGetOrRegister(t *testing.T) {
+ r := NewRegistry()
+
+ // First metric wins with GetOrRegister
+ _ = r.GetOrRegister("foo", NewCounter())
+ m := r.GetOrRegister("foo", NewGauge())
+ if _, ok := m.(Counter); !ok {
+ t.Fatal(m)
+ }
+
+ i := 0
+ r.Each(func(name string, iface interface{}) {
+ i++
+ if name != "foo" {
+ t.Fatal(name)
+ }
+ if _, ok := iface.(Counter); !ok {
+ t.Fatal(iface)
+ }
+ })
+ if i != 1 {
+ t.Fatal(i)
+ }
+}
+
+func TestRegistryGetOrRegisterWithLazyInstantiation(t *testing.T) {
+ r := NewRegistry()
+
+ // First metric wins with GetOrRegister
+ _ = r.GetOrRegister("foo", NewCounter)
+ m := r.GetOrRegister("foo", NewGauge)
+ if _, ok := m.(Counter); !ok {
+ t.Fatal(m)
+ }
+
+ i := 0
+ r.Each(func(name string, iface interface{}) {
+ i++
+ if name != "foo" {
+ t.Fatal(name)
+ }
+ if _, ok := iface.(Counter); !ok {
+ t.Fatal(iface)
+ }
+ })
+ if i != 1 {
+ t.Fatal(i)
+ }
+}
+
+func TestRegistryUnregister(t *testing.T) {
+ l := len(arbiter.meters)
+ r := NewRegistry()
+ r.Register("foo", NewCounter())
+ r.Register("bar", NewMeter())
+ r.Register("baz", NewTimer())
+ if len(arbiter.meters) != l+2 {
+ t.Errorf("arbiter.meters: %d != %d\n", l+2, len(arbiter.meters))
+ }
+ r.Unregister("foo")
+ r.Unregister("bar")
+ r.Unregister("baz")
+ if len(arbiter.meters) != l {
+ t.Errorf("arbiter.meters: %d != %d\n", l+2, len(arbiter.meters))
+ }
+}
+
+func TestPrefixedChildRegistryGetOrRegister(t *testing.T) {
+ r := NewRegistry()
+ pr := NewPrefixedChildRegistry(r, "prefix.")
+
+ _ = pr.GetOrRegister("foo", NewCounter())
+
+ i := 0
+ r.Each(func(name string, m interface{}) {
+ i++
+ if name != "prefix.foo" {
+ t.Fatal(name)
+ }
+ })
+ if i != 1 {
+ t.Fatal(i)
+ }
+}
+
+func TestPrefixedRegistryGetOrRegister(t *testing.T) {
+ r := NewPrefixedRegistry("prefix.")
+
+ _ = r.GetOrRegister("foo", NewCounter())
+
+ i := 0
+ r.Each(func(name string, m interface{}) {
+ i++
+ if name != "prefix.foo" {
+ t.Fatal(name)
+ }
+ })
+ if i != 1 {
+ t.Fatal(i)
+ }
+}
+
+func TestPrefixedRegistryRegister(t *testing.T) {
+ r := NewPrefixedRegistry("prefix.")
+ err := r.Register("foo", NewCounter())
+ c := NewCounter()
+ Register("bar", c)
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+
+ i := 0
+ r.Each(func(name string, m interface{}) {
+ i++
+ if name != "prefix.foo" {
+ t.Fatal(name)
+ }
+ })
+ if i != 1 {
+ t.Fatal(i)
+ }
+}
+
+func TestPrefixedRegistryUnregister(t *testing.T) {
+ r := NewPrefixedRegistry("prefix.")
+
+ _ = r.Register("foo", NewCounter())
+
+ i := 0
+ r.Each(func(name string, m interface{}) {
+ i++
+ if name != "prefix.foo" {
+ t.Fatal(name)
+ }
+ })
+ if i != 1 {
+ t.Fatal(i)
+ }
+
+ r.Unregister("foo")
+
+ i = 0
+ r.Each(func(name string, m interface{}) {
+ i++
+ })
+
+ if i != 0 {
+ t.Fatal(i)
+ }
+}
+
+func TestPrefixedRegistryGet(t *testing.T) {
+ pr := NewPrefixedRegistry("prefix.")
+ name := "foo"
+ pr.Register(name, NewCounter())
+
+ fooCounter := pr.Get(name)
+ if fooCounter == nil {
+ t.Fatal(name)
+ }
+}
+
+func TestPrefixedChildRegistryGet(t *testing.T) {
+ r := NewRegistry()
+ pr := NewPrefixedChildRegistry(r, "prefix.")
+ name := "foo"
+ pr.Register(name, NewCounter())
+ fooCounter := pr.Get(name)
+ if fooCounter == nil {
+ t.Fatal(name)
+ }
+}
+
+func TestChildPrefixedRegistryRegister(t *testing.T) {
+ r := NewPrefixedChildRegistry(DefaultRegistry, "prefix.")
+ err := r.Register("foo", NewCounter())
+ c := NewCounter()
+ Register("bar", c)
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+
+ i := 0
+ r.Each(func(name string, m interface{}) {
+ i++
+ if name != "prefix.foo" {
+ t.Fatal(name)
+ }
+ })
+ if i != 1 {
+ t.Fatal(i)
+ }
+}
+
+func TestChildPrefixedRegistryOfChildRegister(t *testing.T) {
+ r := NewPrefixedChildRegistry(NewRegistry(), "prefix.")
+ r2 := NewPrefixedChildRegistry(r, "prefix2.")
+ err := r.Register("foo2", NewCounter())
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+ err = r2.Register("baz", NewCounter())
+ c := NewCounter()
+ Register("bars", c)
+
+ i := 0
+ r2.Each(func(name string, m interface{}) {
+ i++
+ if name != "prefix.prefix2.baz" {
+ //t.Fatal(name)
+ }
+ })
+ if i != 1 {
+ t.Fatal(i)
+ }
+}
+
+func TestWalkRegistries(t *testing.T) {
+ r := NewPrefixedChildRegistry(NewRegistry(), "prefix.")
+ r2 := NewPrefixedChildRegistry(r, "prefix2.")
+ err := r.Register("foo2", NewCounter())
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+ err = r2.Register("baz", NewCounter())
+ c := NewCounter()
+ Register("bars", c)
+
+ _, prefix := findPrefix(r2, "")
+ if "prefix.prefix2." != prefix {
+ t.Fatal(prefix)
+ }
+
+}
diff --git a/metrics/resetting_timer.go b/metrics/resetting_timer.go
new file mode 100644
index 000000000..57bcb3134
--- /dev/null
+++ b/metrics/resetting_timer.go
@@ -0,0 +1,237 @@
+package metrics
+
+import (
+ "math"
+ "sort"
+ "sync"
+ "time"
+)
+
+// Initial slice capacity for the values stored in a ResettingTimer
+const InitialResettingTimerSliceCap = 10
+
+// ResettingTimer is used for storing aggregated values for timers, which are reset on every flush interval.
+type ResettingTimer interface {
+ Values() []int64
+ Snapshot() ResettingTimer
+ Percentiles([]float64) []int64
+ Mean() float64
+ Time(func())
+ Update(time.Duration)
+ UpdateSince(time.Time)
+}
+
+// GetOrRegisterResettingTimer returns an existing ResettingTimer or constructs and registers a
+// new StandardResettingTimer.
+func GetOrRegisterResettingTimer(name string, r Registry) ResettingTimer {
+ if nil == r {
+ r = DefaultRegistry
+ }
+ return r.GetOrRegister(name, NewResettingTimer).(ResettingTimer)
+}
+
+// NewRegisteredResettingTimer constructs and registers a new StandardResettingTimer.
+func NewRegisteredResettingTimer(name string, r Registry) ResettingTimer {
+ c := NewResettingTimer()
+ if nil == r {
+ r = DefaultRegistry
+ }
+ r.Register(name, c)
+ return c
+}
+
+// NewResettingTimer constructs a new StandardResettingTimer
+func NewResettingTimer() ResettingTimer {
+ if !Enabled {
+ return NilResettingTimer{}
+ }
+ return &StandardResettingTimer{
+ values: make([]int64, 0, InitialResettingTimerSliceCap),
+ }
+}
+
+// NilResettingTimer is a no-op ResettingTimer.
+type NilResettingTimer struct {
+}
+
+// Values is a no-op.
+func (NilResettingTimer) Values() []int64 { return nil }
+
+// Snapshot is a no-op.
+func (NilResettingTimer) Snapshot() ResettingTimer { return NilResettingTimer{} }
+
+// Time is a no-op.
+func (NilResettingTimer) Time(func()) {}
+
+// Update is a no-op.
+func (NilResettingTimer) Update(time.Duration) {}
+
+// Percentiles panics.
+func (NilResettingTimer) Percentiles([]float64) []int64 {
+ panic("Percentiles called on a NilResettingTimer")
+}
+
+// Mean panics.
+func (NilResettingTimer) Mean() float64 {
+ panic("Mean called on a NilResettingTimer")
+}
+
+// UpdateSince is a no-op.
+func (NilResettingTimer) UpdateSince(time.Time) {}
+
+// StandardResettingTimer is the standard implementation of a ResettingTimer.
+// and Meter.
+type StandardResettingTimer struct {
+ values []int64
+ mutex sync.Mutex
+}
+
+// Values returns a slice with all measurements.
+func (t *StandardResettingTimer) Values() []int64 {
+ return t.values
+}
+
+// Snapshot resets the timer and returns a read-only copy of its contents.
+func (t *StandardResettingTimer) Snapshot() ResettingTimer {
+ t.mutex.Lock()
+ defer t.mutex.Unlock()
+ currentValues := t.values
+ t.values = make([]int64, 0, InitialResettingTimerSliceCap)
+
+ return &ResettingTimerSnapshot{
+ values: currentValues,
+ }
+}
+
+// Percentiles panics.
+func (t *StandardResettingTimer) Percentiles([]float64) []int64 {
+ panic("Percentiles called on a StandardResettingTimer")
+}
+
+// Mean panics.
+func (t *StandardResettingTimer) Mean() float64 {
+ panic("Mean called on a StandardResettingTimer")
+}
+
+// Record the duration of the execution of the given function.
+func (t *StandardResettingTimer) Time(f func()) {
+ ts := time.Now()
+ f()
+ t.Update(time.Since(ts))
+}
+
+// Record the duration of an event.
+func (t *StandardResettingTimer) Update(d time.Duration) {
+ t.mutex.Lock()
+ defer t.mutex.Unlock()
+ t.values = append(t.values, int64(d))
+}
+
+// Record the duration of an event that started at a time and ends now.
+func (t *StandardResettingTimer) UpdateSince(ts time.Time) {
+ t.mutex.Lock()
+ defer t.mutex.Unlock()
+ t.values = append(t.values, int64(time.Since(ts)))
+}
+
+// ResettingTimerSnapshot is a point-in-time copy of another ResettingTimer.
+type ResettingTimerSnapshot struct {
+ values []int64
+ mean float64
+ thresholdBoundaries []int64
+ calculated bool
+}
+
+// Snapshot returns the snapshot.
+func (t *ResettingTimerSnapshot) Snapshot() ResettingTimer { return t }
+
+// Time panics.
+func (*ResettingTimerSnapshot) Time(func()) {
+ panic("Time called on a ResettingTimerSnapshot")
+}
+
+// Update panics.
+func (*ResettingTimerSnapshot) Update(time.Duration) {
+ panic("Update called on a ResettingTimerSnapshot")
+}
+
+// UpdateSince panics.
+func (*ResettingTimerSnapshot) UpdateSince(time.Time) {
+ panic("UpdateSince called on a ResettingTimerSnapshot")
+}
+
+// Values returns all values from snapshot.
+func (t *ResettingTimerSnapshot) Values() []int64 {
+ return t.values
+}
+
+// Percentiles returns the boundaries for the input percentiles.
+func (t *ResettingTimerSnapshot) Percentiles(percentiles []float64) []int64 {
+ t.calc(percentiles)
+
+ return t.thresholdBoundaries
+}
+
+// Mean returns the mean of the snapshotted values
+func (t *ResettingTimerSnapshot) Mean() float64 {
+ if !t.calculated {
+ t.calc([]float64{})
+ }
+
+ return t.mean
+}
+
+func (t *ResettingTimerSnapshot) calc(percentiles []float64) {
+ sort.Sort(Int64Slice(t.values))
+
+ count := len(t.values)
+ if count > 0 {
+ min := t.values[0]
+ max := t.values[count-1]
+
+ cumulativeValues := make([]int64, count)
+ cumulativeValues[0] = min
+ for i := 1; i < count; i++ {
+ cumulativeValues[i] = t.values[i] + cumulativeValues[i-1]
+ }
+
+ t.thresholdBoundaries = make([]int64, len(percentiles))
+
+ thresholdBoundary := max
+
+ for i, pct := range percentiles {
+ if count > 1 {
+ var abs float64
+ if pct >= 0 {
+ abs = pct
+ } else {
+ abs = 100 + pct
+ }
+ // poor man's math.Round(x):
+ // math.Floor(x + 0.5)
+ indexOfPerc := int(math.Floor(((abs / 100.0) * float64(count)) + 0.5))
+ if pct >= 0 {
+ indexOfPerc -= 1 // index offset=0
+ }
+ thresholdBoundary = t.values[indexOfPerc]
+ }
+
+ t.thresholdBoundaries[i] = thresholdBoundary
+ }
+
+ sum := cumulativeValues[count-1]
+ t.mean = float64(sum) / float64(count)
+ } else {
+ t.thresholdBoundaries = make([]int64, len(percentiles))
+ t.mean = 0
+ }
+
+ t.calculated = true
+}
+
+// Int64Slice attaches the methods of sort.Interface to []int64, sorting in increasing order.
+type Int64Slice []int64
+
+func (s Int64Slice) Len() int { return len(s) }
+func (s Int64Slice) Less(i, j int) bool { return s[i] < s[j] }
+func (s Int64Slice) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
diff --git a/metrics/resetting_timer_test.go b/metrics/resetting_timer_test.go
new file mode 100644
index 000000000..58fd47f35
--- /dev/null
+++ b/metrics/resetting_timer_test.go
@@ -0,0 +1,106 @@
+package metrics
+
+import (
+ "testing"
+ "time"
+)
+
+func TestResettingTimer(t *testing.T) {
+ tests := []struct {
+ values []int64
+ start int
+ end int
+ wantP50 int64
+ wantP95 int64
+ wantP99 int64
+ wantMean float64
+ wantMin int64
+ wantMax int64
+ }{
+ {
+ values: []int64{},
+ start: 1,
+ end: 11,
+ wantP50: 5, wantP95: 10, wantP99: 10,
+ wantMin: 1, wantMax: 10, wantMean: 5.5,
+ },
+ {
+ values: []int64{},
+ start: 1,
+ end: 101,
+ wantP50: 50, wantP95: 95, wantP99: 99,
+ wantMin: 1, wantMax: 100, wantMean: 50.5,
+ },
+ {
+ values: []int64{1},
+ start: 0,
+ end: 0,
+ wantP50: 1, wantP95: 1, wantP99: 1,
+ wantMin: 1, wantMax: 1, wantMean: 1,
+ },
+ {
+ values: []int64{0},
+ start: 0,
+ end: 0,
+ wantP50: 0, wantP95: 0, wantP99: 0,
+ wantMin: 0, wantMax: 0, wantMean: 0,
+ },
+ {
+ values: []int64{},
+ start: 0,
+ end: 0,
+ wantP50: 0, wantP95: 0, wantP99: 0,
+ wantMin: 0, wantMax: 0, wantMean: 0,
+ },
+ {
+ values: []int64{1, 10},
+ start: 0,
+ end: 0,
+ wantP50: 1, wantP95: 10, wantP99: 10,
+ wantMin: 1, wantMax: 10, wantMean: 5.5,
+ },
+ }
+ for ind, tt := range tests {
+ timer := NewResettingTimer()
+
+ for i := tt.start; i < tt.end; i++ {
+ tt.values = append(tt.values, int64(i))
+ }
+
+ for _, v := range tt.values {
+ timer.Update(time.Duration(v))
+ }
+
+ snap := timer.Snapshot()
+
+ ps := snap.Percentiles([]float64{50, 95, 99})
+
+ val := snap.Values()
+
+ if len(val) > 0 {
+ if tt.wantMin != val[0] {
+ t.Fatalf("%d: min: got %d, want %d", ind, val[0], tt.wantMin)
+ }
+
+ if tt.wantMax != val[len(val)-1] {
+ t.Fatalf("%d: max: got %d, want %d", ind, val[len(val)-1], tt.wantMax)
+ }
+ }
+
+ if tt.wantMean != snap.Mean() {
+ t.Fatalf("%d: mean: got %.2f, want %.2f", ind, snap.Mean(), tt.wantMean)
+ }
+
+ if tt.wantP50 != ps[0] {
+ t.Fatalf("%d: p50: got %d, want %d", ind, ps[0], tt.wantP50)
+ }
+
+ if tt.wantP95 != ps[1] {
+ t.Fatalf("%d: p95: got %d, want %d", ind, ps[1], tt.wantP95)
+ }
+
+ if tt.wantP99 != ps[2] {
+ t.Fatalf("%d: p99: got %d, want %d", ind, ps[2], tt.wantP99)
+ }
+ }
+}
diff --git a/vendor/github.com/rcrowley/go-metrics/runtime.go b/metrics/runtime.go
index 11c6b785a..9450c479b 100644
--- a/vendor/github.com/rcrowley/go-metrics/runtime.go
+++ b/metrics/runtime.go
@@ -55,7 +55,7 @@ var (
// Capture new values for the Go runtime statistics exported in
// runtime.MemStats. This is designed to be called as a goroutine.
func CaptureRuntimeMemStats(r Registry, d time.Duration) {
- for _ = range time.Tick(d) {
+ for range time.Tick(d) {
CaptureRuntimeMemStatsOnce(r)
}
}
diff --git a/vendor/github.com/rcrowley/go-metrics/runtime_cgo.go b/metrics/runtime_cgo.go
index e3391f4e8..e3391f4e8 100644
--- a/vendor/github.com/rcrowley/go-metrics/runtime_cgo.go
+++ b/metrics/runtime_cgo.go
diff --git a/vendor/github.com/rcrowley/go-metrics/runtime_gccpufraction.go b/metrics/runtime_gccpufraction.go
index ca12c05ba..ca12c05ba 100644
--- a/vendor/github.com/rcrowley/go-metrics/runtime_gccpufraction.go
+++ b/metrics/runtime_gccpufraction.go
diff --git a/vendor/github.com/rcrowley/go-metrics/runtime_no_cgo.go b/metrics/runtime_no_cgo.go
index 616a3b475..616a3b475 100644
--- a/vendor/github.com/rcrowley/go-metrics/runtime_no_cgo.go
+++ b/metrics/runtime_no_cgo.go
diff --git a/vendor/github.com/rcrowley/go-metrics/runtime_no_gccpufraction.go b/metrics/runtime_no_gccpufraction.go
index be96aa6f1..be96aa6f1 100644
--- a/vendor/github.com/rcrowley/go-metrics/runtime_no_gccpufraction.go
+++ b/metrics/runtime_no_gccpufraction.go
diff --git a/metrics/runtime_test.go b/metrics/runtime_test.go
new file mode 100644
index 000000000..ebbfd501a
--- /dev/null
+++ b/metrics/runtime_test.go
@@ -0,0 +1,88 @@
+package metrics
+
+import (
+ "runtime"
+ "testing"
+ "time"
+)
+
+func BenchmarkRuntimeMemStats(b *testing.B) {
+ r := NewRegistry()
+ RegisterRuntimeMemStats(r)
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ CaptureRuntimeMemStatsOnce(r)
+ }
+}
+
+func TestRuntimeMemStats(t *testing.T) {
+ r := NewRegistry()
+ RegisterRuntimeMemStats(r)
+ CaptureRuntimeMemStatsOnce(r)
+ zero := runtimeMetrics.MemStats.PauseNs.Count() // Get a "zero" since GC may have run before these tests.
+ runtime.GC()
+ CaptureRuntimeMemStatsOnce(r)
+ if count := runtimeMetrics.MemStats.PauseNs.Count(); 1 != count-zero {
+ t.Fatal(count - zero)
+ }
+ runtime.GC()
+ runtime.GC()
+ CaptureRuntimeMemStatsOnce(r)
+ if count := runtimeMetrics.MemStats.PauseNs.Count(); 3 != count-zero {
+ t.Fatal(count - zero)
+ }
+ for i := 0; i < 256; i++ {
+ runtime.GC()
+ }
+ CaptureRuntimeMemStatsOnce(r)
+ if count := runtimeMetrics.MemStats.PauseNs.Count(); 259 != count-zero {
+ t.Fatal(count - zero)
+ }
+ for i := 0; i < 257; i++ {
+ runtime.GC()
+ }
+ CaptureRuntimeMemStatsOnce(r)
+ if count := runtimeMetrics.MemStats.PauseNs.Count(); 515 != count-zero { // We lost one because there were too many GCs between captures.
+ t.Fatal(count - zero)
+ }
+}
+
+func TestRuntimeMemStatsNumThread(t *testing.T) {
+ r := NewRegistry()
+ RegisterRuntimeMemStats(r)
+ CaptureRuntimeMemStatsOnce(r)
+
+ if value := runtimeMetrics.NumThread.Value(); value < 1 {
+ t.Fatalf("got NumThread: %d, wanted at least 1", value)
+ }
+}
+
+func TestRuntimeMemStatsBlocking(t *testing.T) {
+ if g := runtime.GOMAXPROCS(0); g < 2 {
+ t.Skipf("skipping TestRuntimeMemStatsBlocking with GOMAXPROCS=%d\n", g)
+ }
+ ch := make(chan int)
+ go testRuntimeMemStatsBlocking(ch)
+ var memStats runtime.MemStats
+ t0 := time.Now()
+ runtime.ReadMemStats(&memStats)
+ t1 := time.Now()
+ t.Log("i++ during runtime.ReadMemStats:", <-ch)
+ go testRuntimeMemStatsBlocking(ch)
+ d := t1.Sub(t0)
+ t.Log(d)
+ time.Sleep(d)
+ t.Log("i++ during time.Sleep:", <-ch)
+}
+
+func testRuntimeMemStatsBlocking(ch chan int) {
+ i := 0
+ for {
+ select {
+ case ch <- i:
+ return
+ default:
+ i++
+ }
+ }
+}
diff --git a/vendor/github.com/rcrowley/go-metrics/sample.go b/metrics/sample.go
index fecee5ef6..5c4845a4f 100644
--- a/vendor/github.com/rcrowley/go-metrics/sample.go
+++ b/metrics/sample.go
@@ -46,7 +46,7 @@ type ExpDecaySample struct {
// NewExpDecaySample constructs a new exponentially-decaying sample with the
// given reservoir size and alpha.
func NewExpDecaySample(reservoirSize int, alpha float64) Sample {
- if UseNilMetrics {
+ if !Enabled {
return NilSample{}
}
s := &ExpDecaySample{
@@ -407,7 +407,7 @@ type UniformSample struct {
// NewUniformSample constructs a new uniform sample with the given reservoir
// size.
func NewUniformSample(reservoirSize int) Sample {
- if UseNilMetrics {
+ if !Enabled {
return NilSample{}
}
return &UniformSample{
diff --git a/metrics/sample_test.go b/metrics/sample_test.go
new file mode 100644
index 000000000..d60e99c5b
--- /dev/null
+++ b/metrics/sample_test.go
@@ -0,0 +1,363 @@
+package metrics
+
+import (
+ "math/rand"
+ "runtime"
+ "testing"
+ "time"
+)
+
+// Benchmark{Compute,Copy}{1000,1000000} demonstrate that, even for relatively
+// expensive computations like Variance, the cost of copying the Sample, as
+// approximated by a make and copy, is much greater than the cost of the
+// computation for small samples and only slightly less for large samples.
+func BenchmarkCompute1000(b *testing.B) {
+ s := make([]int64, 1000)
+ for i := 0; i < len(s); i++ {
+ s[i] = int64(i)
+ }
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ SampleVariance(s)
+ }
+}
+func BenchmarkCompute1000000(b *testing.B) {
+ s := make([]int64, 1000000)
+ for i := 0; i < len(s); i++ {
+ s[i] = int64(i)
+ }
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ SampleVariance(s)
+ }
+}
+func BenchmarkCopy1000(b *testing.B) {
+ s := make([]int64, 1000)
+ for i := 0; i < len(s); i++ {
+ s[i] = int64(i)
+ }
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ sCopy := make([]int64, len(s))
+ copy(sCopy, s)
+ }
+}
+func BenchmarkCopy1000000(b *testing.B) {
+ s := make([]int64, 1000000)
+ for i := 0; i < len(s); i++ {
+ s[i] = int64(i)
+ }
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ sCopy := make([]int64, len(s))
+ copy(sCopy, s)
+ }
+}
+
+func BenchmarkExpDecaySample257(b *testing.B) {
+ benchmarkSample(b, NewExpDecaySample(257, 0.015))
+}
+
+func BenchmarkExpDecaySample514(b *testing.B) {
+ benchmarkSample(b, NewExpDecaySample(514, 0.015))
+}
+
+func BenchmarkExpDecaySample1028(b *testing.B) {
+ benchmarkSample(b, NewExpDecaySample(1028, 0.015))
+}
+
+func BenchmarkUniformSample257(b *testing.B) {
+ benchmarkSample(b, NewUniformSample(257))
+}
+
+func BenchmarkUniformSample514(b *testing.B) {
+ benchmarkSample(b, NewUniformSample(514))
+}
+
+func BenchmarkUniformSample1028(b *testing.B) {
+ benchmarkSample(b, NewUniformSample(1028))
+}
+
+func TestExpDecaySample10(t *testing.T) {
+ rand.Seed(1)
+ s := NewExpDecaySample(100, 0.99)
+ for i := 0; i < 10; i++ {
+ s.Update(int64(i))
+ }
+ if size := s.Count(); 10 != size {
+ t.Errorf("s.Count(): 10 != %v\n", size)
+ }
+ if size := s.Size(); 10 != size {
+ t.Errorf("s.Size(): 10 != %v\n", size)
+ }
+ if l := len(s.Values()); 10 != l {
+ t.Errorf("len(s.Values()): 10 != %v\n", l)
+ }
+ for _, v := range s.Values() {
+ if v > 10 || v < 0 {
+ t.Errorf("out of range [0, 10): %v\n", v)
+ }
+ }
+}
+
+func TestExpDecaySample100(t *testing.T) {
+ rand.Seed(1)
+ s := NewExpDecaySample(1000, 0.01)
+ for i := 0; i < 100; i++ {
+ s.Update(int64(i))
+ }
+ if size := s.Count(); 100 != size {
+ t.Errorf("s.Count(): 100 != %v\n", size)
+ }
+ if size := s.Size(); 100 != size {
+ t.Errorf("s.Size(): 100 != %v\n", size)
+ }
+ if l := len(s.Values()); 100 != l {
+ t.Errorf("len(s.Values()): 100 != %v\n", l)
+ }
+ for _, v := range s.Values() {
+ if v > 100 || v < 0 {
+ t.Errorf("out of range [0, 100): %v\n", v)
+ }
+ }
+}
+
+func TestExpDecaySample1000(t *testing.T) {
+ rand.Seed(1)
+ s := NewExpDecaySample(100, 0.99)
+ for i := 0; i < 1000; i++ {
+ s.Update(int64(i))
+ }
+ if size := s.Count(); 1000 != size {
+ t.Errorf("s.Count(): 1000 != %v\n", size)
+ }
+ if size := s.Size(); 100 != size {
+ t.Errorf("s.Size(): 100 != %v\n", size)
+ }
+ if l := len(s.Values()); 100 != l {
+ t.Errorf("len(s.Values()): 100 != %v\n", l)
+ }
+ for _, v := range s.Values() {
+ if v > 1000 || v < 0 {
+ t.Errorf("out of range [0, 1000): %v\n", v)
+ }
+ }
+}
+
+// This test makes sure that the sample's priority is not amplified by using
+// nanosecond duration since start rather than second duration since start.
+// The priority becomes +Inf quickly after starting if this is done,
+// effectively freezing the set of samples until a rescale step happens.
+func TestExpDecaySampleNanosecondRegression(t *testing.T) {
+ rand.Seed(1)
+ s := NewExpDecaySample(100, 0.99)
+ for i := 0; i < 100; i++ {
+ s.Update(10)
+ }
+ time.Sleep(1 * time.Millisecond)
+ for i := 0; i < 100; i++ {
+ s.Update(20)
+ }
+ v := s.Values()
+ avg := float64(0)
+ for i := 0; i < len(v); i++ {
+ avg += float64(v[i])
+ }
+ avg /= float64(len(v))
+ if avg > 16 || avg < 14 {
+ t.Errorf("out of range [14, 16]: %v\n", avg)
+ }
+}
+
+func TestExpDecaySampleRescale(t *testing.T) {
+ s := NewExpDecaySample(2, 0.001).(*ExpDecaySample)
+ s.update(time.Now(), 1)
+ s.update(time.Now().Add(time.Hour+time.Microsecond), 1)
+ for _, v := range s.values.Values() {
+ if v.k == 0.0 {
+ t.Fatal("v.k == 0.0")
+ }
+ }
+}
+
+func TestExpDecaySampleSnapshot(t *testing.T) {
+ now := time.Now()
+ rand.Seed(1)
+ s := NewExpDecaySample(100, 0.99)
+ for i := 1; i <= 10000; i++ {
+ s.(*ExpDecaySample).update(now.Add(time.Duration(i)), int64(i))
+ }
+ snapshot := s.Snapshot()
+ s.Update(1)
+ testExpDecaySampleStatistics(t, snapshot)
+}
+
+func TestExpDecaySampleStatistics(t *testing.T) {
+ now := time.Now()
+ rand.Seed(1)
+ s := NewExpDecaySample(100, 0.99)
+ for i := 1; i <= 10000; i++ {
+ s.(*ExpDecaySample).update(now.Add(time.Duration(i)), int64(i))
+ }
+ testExpDecaySampleStatistics(t, s)
+}
+
+func TestUniformSample(t *testing.T) {
+ rand.Seed(1)
+ s := NewUniformSample(100)
+ for i := 0; i < 1000; i++ {
+ s.Update(int64(i))
+ }
+ if size := s.Count(); 1000 != size {
+ t.Errorf("s.Count(): 1000 != %v\n", size)
+ }
+ if size := s.Size(); 100 != size {
+ t.Errorf("s.Size(): 100 != %v\n", size)
+ }
+ if l := len(s.Values()); 100 != l {
+ t.Errorf("len(s.Values()): 100 != %v\n", l)
+ }
+ for _, v := range s.Values() {
+ if v > 1000 || v < 0 {
+ t.Errorf("out of range [0, 100): %v\n", v)
+ }
+ }
+}
+
+func TestUniformSampleIncludesTail(t *testing.T) {
+ rand.Seed(1)
+ s := NewUniformSample(100)
+ max := 100
+ for i := 0; i < max; i++ {
+ s.Update(int64(i))
+ }
+ v := s.Values()
+ sum := 0
+ exp := (max - 1) * max / 2
+ for i := 0; i < len(v); i++ {
+ sum += int(v[i])
+ }
+ if exp != sum {
+ t.Errorf("sum: %v != %v\n", exp, sum)
+ }
+}
+
+func TestUniformSampleSnapshot(t *testing.T) {
+ s := NewUniformSample(100)
+ for i := 1; i <= 10000; i++ {
+ s.Update(int64(i))
+ }
+ snapshot := s.Snapshot()
+ s.Update(1)
+ testUniformSampleStatistics(t, snapshot)
+}
+
+func TestUniformSampleStatistics(t *testing.T) {
+ rand.Seed(1)
+ s := NewUniformSample(100)
+ for i := 1; i <= 10000; i++ {
+ s.Update(int64(i))
+ }
+ testUniformSampleStatistics(t, s)
+}
+
+func benchmarkSample(b *testing.B, s Sample) {
+ var memStats runtime.MemStats
+ runtime.ReadMemStats(&memStats)
+ pauseTotalNs := memStats.PauseTotalNs
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ s.Update(1)
+ }
+ b.StopTimer()
+ runtime.GC()
+ runtime.ReadMemStats(&memStats)
+ b.Logf("GC cost: %d ns/op", int(memStats.PauseTotalNs-pauseTotalNs)/b.N)
+}
+
+func testExpDecaySampleStatistics(t *testing.T, s Sample) {
+ if count := s.Count(); 10000 != count {
+ t.Errorf("s.Count(): 10000 != %v\n", count)
+ }
+ if min := s.Min(); 107 != min {
+ t.Errorf("s.Min(): 107 != %v\n", min)
+ }
+ if max := s.Max(); 10000 != max {
+ t.Errorf("s.Max(): 10000 != %v\n", max)
+ }
+ if mean := s.Mean(); 4965.98 != mean {
+ t.Errorf("s.Mean(): 4965.98 != %v\n", mean)
+ }
+ if stdDev := s.StdDev(); 2959.825156930727 != stdDev {
+ t.Errorf("s.StdDev(): 2959.825156930727 != %v\n", stdDev)
+ }
+ ps := s.Percentiles([]float64{0.5, 0.75, 0.99})
+ if 4615 != ps[0] {
+ t.Errorf("median: 4615 != %v\n", ps[0])
+ }
+ if 7672 != ps[1] {
+ t.Errorf("75th percentile: 7672 != %v\n", ps[1])
+ }
+ if 9998.99 != ps[2] {
+ t.Errorf("99th percentile: 9998.99 != %v\n", ps[2])
+ }
+}
+
+func testUniformSampleStatistics(t *testing.T, s Sample) {
+ if count := s.Count(); 10000 != count {
+ t.Errorf("s.Count(): 10000 != %v\n", count)
+ }
+ if min := s.Min(); 37 != min {
+ t.Errorf("s.Min(): 37 != %v\n", min)
+ }
+ if max := s.Max(); 9989 != max {
+ t.Errorf("s.Max(): 9989 != %v\n", max)
+ }
+ if mean := s.Mean(); 4748.14 != mean {
+ t.Errorf("s.Mean(): 4748.14 != %v\n", mean)
+ }
+ if stdDev := s.StdDev(); 2826.684117548333 != stdDev {
+ t.Errorf("s.StdDev(): 2826.684117548333 != %v\n", stdDev)
+ }
+ ps := s.Percentiles([]float64{0.5, 0.75, 0.99})
+ if 4599 != ps[0] {
+ t.Errorf("median: 4599 != %v\n", ps[0])
+ }
+ if 7380.5 != ps[1] {
+ t.Errorf("75th percentile: 7380.5 != %v\n", ps[1])
+ }
+ if 9986.429999999998 != ps[2] {
+ t.Errorf("99th percentile: 9986.429999999998 != %v\n", ps[2])
+ }
+}
+
+// TestUniformSampleConcurrentUpdateCount would expose data race problems with
+// concurrent Update and Count calls on Sample when test is called with -race
+// argument
+func TestUniformSampleConcurrentUpdateCount(t *testing.T) {
+ if testing.Short() {
+ t.Skip("skipping in short mode")
+ }
+ s := NewUniformSample(100)
+ for i := 0; i < 100; i++ {
+ s.Update(int64(i))
+ }
+ quit := make(chan struct{})
+ go func() {
+ t := time.NewTicker(10 * time.Millisecond)
+ for {
+ select {
+ case <-t.C:
+ s.Update(rand.Int63())
+ case <-quit:
+ t.Stop()
+ return
+ }
+ }
+ }()
+ for i := 0; i < 1000; i++ {
+ s.Count()
+ time.Sleep(5 * time.Millisecond)
+ }
+ quit <- struct{}{}
+}
diff --git a/vendor/github.com/rcrowley/go-metrics/syslog.go b/metrics/syslog.go
index 693f19085..a0ed4b1b2 100644
--- a/vendor/github.com/rcrowley/go-metrics/syslog.go
+++ b/metrics/syslog.go
@@ -11,7 +11,7 @@ import (
// Output each metric in the given registry to syslog periodically using
// the given syslogger.
func Syslog(r Registry, d time.Duration, w *syslog.Writer) {
- for _ = range time.Tick(d) {
+ for range time.Tick(d) {
r.Each(func(name string, i interface{}) {
switch metric := i.(type) {
case Counter:
diff --git a/vendor/github.com/rcrowley/go-metrics/timer.go b/metrics/timer.go
index 17db8f8d2..89e22208f 100644
--- a/vendor/github.com/rcrowley/go-metrics/timer.go
+++ b/metrics/timer.go
@@ -19,6 +19,7 @@ type Timer interface {
RateMean() float64
Snapshot() Timer
StdDev() float64
+ Stop()
Sum() int64
Time(func())
Update(time.Duration)
@@ -28,6 +29,8 @@ type Timer interface {
// GetOrRegisterTimer returns an existing Timer or constructs and registers a
// new StandardTimer.
+// Be sure to unregister the meter from the registry once it is of no use to
+// allow for garbage collection.
func GetOrRegisterTimer(name string, r Registry) Timer {
if nil == r {
r = DefaultRegistry
@@ -36,8 +39,9 @@ func GetOrRegisterTimer(name string, r Registry) Timer {
}
// NewCustomTimer constructs a new StandardTimer from a Histogram and a Meter.
+// Be sure to call Stop() once the timer is of no use to allow for garbage collection.
func NewCustomTimer(h Histogram, m Meter) Timer {
- if UseNilMetrics {
+ if !Enabled {
return NilTimer{}
}
return &StandardTimer{
@@ -47,6 +51,8 @@ func NewCustomTimer(h Histogram, m Meter) Timer {
}
// NewRegisteredTimer constructs and registers a new StandardTimer.
+// Be sure to unregister the meter from the registry once it is of no use to
+// allow for garbage collection.
func NewRegisteredTimer(name string, r Registry) Timer {
c := NewTimer()
if nil == r {
@@ -58,8 +64,9 @@ func NewRegisteredTimer(name string, r Registry) Timer {
// NewTimer constructs a new StandardTimer using an exponentially-decaying
// sample with the same reservoir size and alpha as UNIX load averages.
+// Be sure to call Stop() once the timer is of no use to allow for garbage collection.
func NewTimer() Timer {
- if UseNilMetrics {
+ if !Enabled {
return NilTimer{}
}
return &StandardTimer{
@@ -112,6 +119,9 @@ func (NilTimer) Snapshot() Timer { return NilTimer{} }
// StdDev is a no-op.
func (NilTimer) StdDev() float64 { return 0.0 }
+// Stop is a no-op.
+func (NilTimer) Stop() {}
+
// Sum is a no-op.
func (NilTimer) Sum() int64 { return 0 }
@@ -201,6 +211,11 @@ func (t *StandardTimer) StdDev() float64 {
return t.histogram.StdDev()
}
+// Stop stops the meter.
+func (t *StandardTimer) Stop() {
+ t.meter.Stop()
+}
+
// Sum returns the sum in the sample.
func (t *StandardTimer) Sum() int64 {
return t.histogram.Sum()
@@ -288,6 +303,9 @@ func (t *TimerSnapshot) Snapshot() Timer { return t }
// was taken.
func (t *TimerSnapshot) StdDev() float64 { return t.histogram.StdDev() }
+// Stop is a no-op.
+func (t *TimerSnapshot) Stop() {}
+
// Sum returns the sum at the time the snapshot was taken.
func (t *TimerSnapshot) Sum() int64 { return t.histogram.Sum() }
diff --git a/metrics/timer_test.go b/metrics/timer_test.go
new file mode 100644
index 000000000..c1f0ff938
--- /dev/null
+++ b/metrics/timer_test.go
@@ -0,0 +1,101 @@
+package metrics
+
+import (
+ "fmt"
+ "math"
+ "testing"
+ "time"
+)
+
+func BenchmarkTimer(b *testing.B) {
+ tm := NewTimer()
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ tm.Update(1)
+ }
+}
+
+func TestGetOrRegisterTimer(t *testing.T) {
+ r := NewRegistry()
+ NewRegisteredTimer("foo", r).Update(47)
+ if tm := GetOrRegisterTimer("foo", r); 1 != tm.Count() {
+ t.Fatal(tm)
+ }
+}
+
+func TestTimerExtremes(t *testing.T) {
+ tm := NewTimer()
+ tm.Update(math.MaxInt64)
+ tm.Update(0)
+ if stdDev := tm.StdDev(); 4.611686018427388e+18 != stdDev {
+ t.Errorf("tm.StdDev(): 4.611686018427388e+18 != %v\n", stdDev)
+ }
+}
+
+func TestTimerStop(t *testing.T) {
+ l := len(arbiter.meters)
+ tm := NewTimer()
+ if len(arbiter.meters) != l+1 {
+ t.Errorf("arbiter.meters: %d != %d\n", l+1, len(arbiter.meters))
+ }
+ tm.Stop()
+ if len(arbiter.meters) != l {
+ t.Errorf("arbiter.meters: %d != %d\n", l, len(arbiter.meters))
+ }
+}
+
+func TestTimerFunc(t *testing.T) {
+ tm := NewTimer()
+ tm.Time(func() { time.Sleep(50e6) })
+ if max := tm.Max(); 35e6 > max || max > 95e6 {
+ t.Errorf("tm.Max(): 35e6 > %v || %v > 95e6\n", max, max)
+ }
+}
+
+func TestTimerZero(t *testing.T) {
+ tm := NewTimer()
+ if count := tm.Count(); 0 != count {
+ t.Errorf("tm.Count(): 0 != %v\n", count)
+ }
+ if min := tm.Min(); 0 != min {
+ t.Errorf("tm.Min(): 0 != %v\n", min)
+ }
+ if max := tm.Max(); 0 != max {
+ t.Errorf("tm.Max(): 0 != %v\n", max)
+ }
+ if mean := tm.Mean(); 0.0 != mean {
+ t.Errorf("tm.Mean(): 0.0 != %v\n", mean)
+ }
+ if stdDev := tm.StdDev(); 0.0 != stdDev {
+ t.Errorf("tm.StdDev(): 0.0 != %v\n", stdDev)
+ }
+ ps := tm.Percentiles([]float64{0.5, 0.75, 0.99})
+ if 0.0 != ps[0] {
+ t.Errorf("median: 0.0 != %v\n", ps[0])
+ }
+ if 0.0 != ps[1] {
+ t.Errorf("75th percentile: 0.0 != %v\n", ps[1])
+ }
+ if 0.0 != ps[2] {
+ t.Errorf("99th percentile: 0.0 != %v\n", ps[2])
+ }
+ if rate1 := tm.Rate1(); 0.0 != rate1 {
+ t.Errorf("tm.Rate1(): 0.0 != %v\n", rate1)
+ }
+ if rate5 := tm.Rate5(); 0.0 != rate5 {
+ t.Errorf("tm.Rate5(): 0.0 != %v\n", rate5)
+ }
+ if rate15 := tm.Rate15(); 0.0 != rate15 {
+ t.Errorf("tm.Rate15(): 0.0 != %v\n", rate15)
+ }
+ if rateMean := tm.RateMean(); 0.0 != rateMean {
+ t.Errorf("tm.RateMean(): 0.0 != %v\n", rateMean)
+ }
+}
+
+func ExampleGetOrRegisterTimer() {
+ m := "account.create.latency"
+ t := GetOrRegisterTimer(m, nil)
+ t.Update(47)
+ fmt.Println(t.Max()) // Output: 47
+}
diff --git a/vendor/github.com/rcrowley/go-metrics/validate.sh b/metrics/validate.sh
index f6499982e..c4ae91e64 100755
--- a/vendor/github.com/rcrowley/go-metrics/validate.sh
+++ b/metrics/validate.sh
@@ -7,4 +7,4 @@ GOFMT_LINES=`gofmt -l . | wc -l | xargs`
test $GOFMT_LINES -eq 0 || echo "gofmt needs to be run, ${GOFMT_LINES} files have issues"
# run the tests for the root package
-go test .
+go test -race .
diff --git a/vendor/github.com/rcrowley/go-metrics/writer.go b/metrics/writer.go
index 091e971d2..88521a80d 100644
--- a/vendor/github.com/rcrowley/go-metrics/writer.go
+++ b/metrics/writer.go
@@ -10,7 +10,7 @@ import (
// Write sorts writes each metric in the given registry periodically to the
// given io.Writer.
func Write(r Registry, d time.Duration, w io.Writer) {
- for _ = range time.Tick(d) {
+ for range time.Tick(d) {
WriteOnce(r, w)
}
}
diff --git a/metrics/writer_test.go b/metrics/writer_test.go
new file mode 100644
index 000000000..1aacc2871
--- /dev/null
+++ b/metrics/writer_test.go
@@ -0,0 +1,22 @@
+package metrics
+
+import (
+ "sort"
+ "testing"
+)
+
+func TestMetricsSorting(t *testing.T) {
+ var namedMetrics = namedMetricSlice{
+ {name: "zzz"},
+ {name: "bbb"},
+ {name: "fff"},
+ {name: "ggg"},
+ }
+
+ sort.Sort(namedMetrics)
+ for i, name := range []string{"bbb", "fff", "ggg", "zzz"} {
+ if namedMetrics[i].name != name {
+ t.Fail()
+ }
+ }
+}
diff --git a/miner/miner.go b/miner/miner.go
index fec0a40f5..d9256e978 100644
--- a/miner/miner.go
+++ b/miner/miner.go
@@ -105,8 +105,7 @@ out:
func (self *Miner) Start(coinbase common.Address) {
atomic.StoreInt32(&self.shouldStart, 1)
- self.worker.setEtherbase(coinbase)
- self.coinbase = coinbase
+ self.SetEtherbase(coinbase)
if atomic.LoadInt32(&self.canStart) == 0 {
log.Info("Network syncing, will start miner afterwards")
diff --git a/miner/worker.go b/miner/worker.go
index 1520277e1..15395ae0b 100644
--- a/miner/worker.go
+++ b/miner/worker.go
@@ -309,7 +309,7 @@ func (self *worker) wait() {
for _, log := range work.state.Logs() {
log.BlockHash = block.Hash()
}
- stat, err := self.chain.WriteBlockAndState(block, work.receipts, work.state)
+ stat, err := self.chain.WriteBlockWithState(block, work.receipts, work.state)
if err != nil {
log.Error("Failed writing block to chain", "err", err)
continue
diff --git a/mobile/android_test.go b/mobile/android_test.go
index 345e009b4..3d3bd66d0 100644
--- a/mobile/android_test.go
+++ b/mobile/android_test.go
@@ -72,7 +72,7 @@ public class AndroidTest extends InstrumentationTestCase {
Transaction tx = new Transaction(
1, new Address("0x0000000000000000000000000000000000000000"),
- new BigInt(0), new BigInt(0), new BigInt(1), null); // Random empty transaction
+ new BigInt(0), 0, new BigInt(1), null); // Random empty transaction
BigInt chain = new BigInt(1); // Chain identifier of the main net
// Sign a transaction with a single authorization
@@ -164,12 +164,17 @@ func TestAndroid(t *testing.T) {
t.Skip("command gradle not found, skipping")
}
if sdk := os.Getenv("ANDROID_HOME"); sdk == "" {
- t.Skip("ANDROID_HOME environment var not set, skipping")
+ // Android SDK not explicitly given, try to auto-resolve
+ autopath := filepath.Join(os.Getenv("HOME"), "Android", "Sdk")
+ if _, err := os.Stat(autopath); err != nil {
+ t.Skip("ANDROID_HOME environment var not set, skipping")
+ }
+ os.Setenv("ANDROID_HOME", autopath)
}
if _, err := exec.Command("which", "gomobile").CombinedOutput(); err != nil {
t.Log("gomobile missing, installing it...")
- if _, err := exec.Command("go", "install", "golang.org/x/mobile/cmd/gomobile").CombinedOutput(); err != nil {
- t.Fatalf("install failed: %v", err)
+ if out, err := exec.Command("go", "get", "golang.org/x/mobile/cmd/gomobile").CombinedOutput(); err != nil {
+ t.Fatalf("install failed: %v\n%s", err, string(out))
}
t.Log("initializing gomobile...")
start := time.Now()
@@ -239,7 +244,7 @@ const gradleConfig = `buildscript {
jcenter()
}
dependencies {
- classpath 'com.android.tools.build:gradle:1.5.0'
+ classpath 'com.android.tools.build:gradle:2.2.3'
}
}
allprojects {
diff --git a/mobile/bind.go b/mobile/bind.go
index 1e861d0bc..d6e621a25 100644
--- a/mobile/bind.go
+++ b/mobile/bind.go
@@ -138,7 +138,7 @@ func BindContract(address *Address, abiJSON string, client *EthereumClient) (con
return nil, err
}
return &BoundContract{
- contract: bind.NewBoundContract(address.address, parsed, client.client, client.client),
+ contract: bind.NewBoundContract(address.address, parsed, client.client, client.client, client.client),
address: address.address,
}, nil
}
@@ -154,12 +154,20 @@ func (c *BoundContract) GetDeployer() *Transaction {
// Call invokes the (constant) contract method with params as input values and
// sets the output to result.
func (c *BoundContract) Call(opts *CallOpts, out *Interfaces, method string, args *Interfaces) error {
- results := make([]interface{}, len(out.objects))
- copy(results, out.objects)
- if err := c.contract.Call(&opts.opts, &results, method, args.objects...); err != nil {
- return err
+ if len(out.objects) == 1 {
+ result := out.objects[0]
+ if err := c.contract.Call(&opts.opts, result, method, args.objects...); err != nil {
+ return err
+ }
+ out.objects[0] = result
+ } else {
+ results := make([]interface{}, len(out.objects))
+ copy(results, out.objects)
+ if err := c.contract.Call(&opts.opts, &results, method, args.objects...); err != nil {
+ return err
+ }
+ copy(out.objects, results)
}
- copy(out.objects, results)
return nil
}
diff --git a/node/api.go b/node/api.go
index 1b04b7093..a3b8bc0bb 100644
--- a/node/api.go
+++ b/node/api.go
@@ -24,10 +24,10 @@ import (
"github.com/ethereum/go-ethereum/common/hexutil"
"github.com/ethereum/go-ethereum/crypto"
+ "github.com/ethereum/go-ethereum/metrics"
"github.com/ethereum/go-ethereum/p2p"
"github.com/ethereum/go-ethereum/p2p/discover"
"github.com/ethereum/go-ethereum/rpc"
- "github.com/rcrowley/go-metrics"
)
// PrivateAdminAPI is the collection of administrative API methods exposed only
@@ -114,7 +114,7 @@ func (api *PrivateAdminAPI) PeerEvents(ctx context.Context) (*rpc.Subscription,
}
// StartRPC starts the HTTP RPC API server.
-func (api *PrivateAdminAPI) StartRPC(host *string, port *int, cors *string, apis *string) (bool, error) {
+func (api *PrivateAdminAPI) StartRPC(host *string, port *int, cors *string, apis *string, vhosts *string) (bool, error) {
api.node.lock.Lock()
defer api.node.lock.Unlock()
@@ -141,6 +141,14 @@ func (api *PrivateAdminAPI) StartRPC(host *string, port *int, cors *string, apis
}
}
+ allowedVHosts := api.node.config.HTTPVirtualHosts
+ if vhosts != nil {
+ allowedVHosts = nil
+ for _, vhost := range strings.Split(*host, ",") {
+ allowedVHosts = append(allowedVHosts, strings.TrimSpace(vhost))
+ }
+ }
+
modules := api.node.httpWhitelist
if apis != nil {
modules = nil
@@ -149,7 +157,7 @@ func (api *PrivateAdminAPI) StartRPC(host *string, port *int, cors *string, apis
}
}
- if err := api.node.startHTTP(fmt.Sprintf("%s:%d", *host, *port), api.node.rpcAPIs, modules, allowedOrigins); err != nil {
+ if err := api.node.startHTTP(fmt.Sprintf("%s:%d", *host, *port), api.node.rpcAPIs, modules, allowedOrigins, allowedVHosts); err != nil {
return false, err
}
return true, nil
@@ -300,6 +308,11 @@ func (api *PublicDebugAPI) Metrics(raw bool) (map[string]interface{}, error) {
// Fill the counter with the metric details, formatting if requested
if raw {
switch metric := metric.(type) {
+ case metrics.Counter:
+ root[name] = map[string]interface{}{
+ "Overall": float64(metric.Count()),
+ }
+
case metrics.Meter:
root[name] = map[string]interface{}{
"AvgRate01Min": metric.Rate1(),
@@ -330,6 +343,11 @@ func (api *PublicDebugAPI) Metrics(raw bool) (map[string]interface{}, error) {
}
} else {
switch metric := metric.(type) {
+ case metrics.Counter:
+ root[name] = map[string]interface{}{
+ "Overall": float64(metric.Count()),
+ }
+
case metrics.Meter:
root[name] = map[string]interface{}{
"Avg01Min": format(metric.Rate1()*60, metric.Rate1()),
diff --git a/node/config.go b/node/config.go
index 7a0c1688e..dda24583e 100644
--- a/node/config.go
+++ b/node/config.go
@@ -105,6 +105,15 @@ type Config struct {
// useless for custom HTTP clients.
HTTPCors []string `toml:",omitempty"`
+ // HTTPVirtualHosts is the list of virtual hostnames which are allowed on incoming requests.
+ // This is by default {'localhost'}. Using this prevents attacks like
+ // DNS rebinding, which bypasses SOP by simply masquerading as being within the same
+ // origin. These attacks do not utilize CORS, since they are not cross-domain.
+ // By explicitly checking the Host-header, the server will not allow requests
+ // made against the server with a malicious host domain.
+ // Requests using ip address directly are not affected
+ HTTPVirtualHosts []string `toml:",omitempty"`
+
// HTTPModules is a list of API modules to expose via the HTTP RPC interface.
// If the module list is empty, all RPC API endpoints designated public will be
// exposed.
@@ -137,7 +146,7 @@ type Config struct {
WSExposeAll bool `toml:",omitempty"`
// Logger is a custom logger to use with the p2p.Server.
- Logger log.Logger
+ Logger log.Logger `toml:",omitempty"`
}
// IPCEndpoint resolves an IPC endpoint based on a configured value, taking into
diff --git a/node/defaults.go b/node/defaults.go
index d4e148683..887560580 100644
--- a/node/defaults.go
+++ b/node/defaults.go
@@ -35,11 +35,12 @@ const (
// DefaultConfig contains reasonable default settings.
var DefaultConfig = Config{
- DataDir: DefaultDataDir(),
- HTTPPort: DefaultHTTPPort,
- HTTPModules: []string{"net", "web3"},
- WSPort: DefaultWSPort,
- WSModules: []string{"net", "web3"},
+ DataDir: DefaultDataDir(),
+ HTTPPort: DefaultHTTPPort,
+ HTTPModules: []string{"net", "web3"},
+ HTTPVirtualHosts: []string{"localhost"},
+ WSPort: DefaultWSPort,
+ WSModules: []string{"net", "web3"},
P2P: p2p.Config{
ListenAddr: ":30303",
MaxPeers: 25,
diff --git a/node/node.go b/node/node.go
index ff7258033..b02aecfad 100644
--- a/node/node.go
+++ b/node/node.go
@@ -263,7 +263,7 @@ func (n *Node) startRPC(services map[reflect.Type]Service) error {
n.stopInProc()
return err
}
- if err := n.startHTTP(n.httpEndpoint, apis, n.config.HTTPModules, n.config.HTTPCors); err != nil {
+ if err := n.startHTTP(n.httpEndpoint, apis, n.config.HTTPModules, n.config.HTTPCors, n.config.HTTPVirtualHosts); err != nil {
n.stopIPC()
n.stopInProc()
return err
@@ -287,7 +287,7 @@ func (n *Node) startInProc(apis []rpc.API) error {
if err := handler.RegisterName(api.Namespace, api.Service); err != nil {
return err
}
- n.log.Debug(fmt.Sprintf("InProc registered %T under '%s'", api.Service, api.Namespace))
+ n.log.Debug("InProc registered", "service", api.Service, "namespace", api.Namespace)
}
n.inprocHandler = handler
return nil
@@ -313,7 +313,7 @@ func (n *Node) startIPC(apis []rpc.API) error {
if err := handler.RegisterName(api.Namespace, api.Service); err != nil {
return err
}
- n.log.Debug(fmt.Sprintf("IPC registered %T under '%s'", api.Service, api.Namespace))
+ n.log.Debug("IPC registered", "service", api.Service, "namespace", api.Namespace)
}
// All APIs registered, start the IPC listener
var (
@@ -324,7 +324,7 @@ func (n *Node) startIPC(apis []rpc.API) error {
return err
}
go func() {
- n.log.Info(fmt.Sprintf("IPC endpoint opened: %s", n.ipcEndpoint))
+ n.log.Info("IPC endpoint opened", "url", n.ipcEndpoint)
for {
conn, err := listener.Accept()
@@ -337,7 +337,7 @@ func (n *Node) startIPC(apis []rpc.API) error {
return
}
// Not closed, just some error; report and continue
- n.log.Error(fmt.Sprintf("IPC accept failed: %v", err))
+ n.log.Error("IPC accept failed", "err", err)
continue
}
go handler.ServeCodec(rpc.NewJSONCodec(conn), rpc.OptionMethodInvocation|rpc.OptionSubscriptions)
@@ -356,7 +356,7 @@ func (n *Node) stopIPC() {
n.ipcListener.Close()
n.ipcListener = nil
- n.log.Info(fmt.Sprintf("IPC endpoint closed: %s", n.ipcEndpoint))
+ n.log.Info("IPC endpoint closed", "endpoint", n.ipcEndpoint)
}
if n.ipcHandler != nil {
n.ipcHandler.Stop()
@@ -365,7 +365,7 @@ func (n *Node) stopIPC() {
}
// startHTTP initializes and starts the HTTP RPC endpoint.
-func (n *Node) startHTTP(endpoint string, apis []rpc.API, modules []string, cors []string) error {
+func (n *Node) startHTTP(endpoint string, apis []rpc.API, modules []string, cors []string, vhosts []string) error {
// Short circuit if the HTTP endpoint isn't being exposed
if endpoint == "" {
return nil
@@ -382,7 +382,7 @@ func (n *Node) startHTTP(endpoint string, apis []rpc.API, modules []string, cors
if err := handler.RegisterName(api.Namespace, api.Service); err != nil {
return err
}
- n.log.Debug(fmt.Sprintf("HTTP registered %T under '%s'", api.Service, api.Namespace))
+ n.log.Debug("HTTP registered", "service", api.Service, "namespace", api.Namespace)
}
}
// All APIs registered, start the HTTP listener
@@ -393,9 +393,8 @@ func (n *Node) startHTTP(endpoint string, apis []rpc.API, modules []string, cors
if listener, err = net.Listen("tcp", endpoint); err != nil {
return err
}
- go rpc.NewHTTPServer(cors, handler).Serve(listener)
- n.log.Info(fmt.Sprintf("HTTP endpoint opened: http://%s", endpoint))
-
+ go rpc.NewHTTPServer(cors, vhosts, handler).Serve(listener)
+ n.log.Info("HTTP endpoint opened", "url", fmt.Sprintf("http://%s", endpoint), "cors", strings.Join(cors, ","), "vhosts", strings.Join(vhosts, ","))
// All listeners booted successfully
n.httpEndpoint = endpoint
n.httpListener = listener
@@ -410,7 +409,7 @@ func (n *Node) stopHTTP() {
n.httpListener.Close()
n.httpListener = nil
- n.log.Info(fmt.Sprintf("HTTP endpoint closed: http://%s", n.httpEndpoint))
+ n.log.Info("HTTP endpoint closed", "url", fmt.Sprintf("http://%s", n.httpEndpoint))
}
if n.httpHandler != nil {
n.httpHandler.Stop()
@@ -436,7 +435,7 @@ func (n *Node) startWS(endpoint string, apis []rpc.API, modules []string, wsOrig
if err := handler.RegisterName(api.Namespace, api.Service); err != nil {
return err
}
- n.log.Debug(fmt.Sprintf("WebSocket registered %T under '%s'", api.Service, api.Namespace))
+ n.log.Debug("WebSocket registered", "service", api.Service, "namespace", api.Namespace)
}
}
// All APIs registered, start the HTTP listener
@@ -448,7 +447,7 @@ func (n *Node) startWS(endpoint string, apis []rpc.API, modules []string, wsOrig
return err
}
go rpc.NewWSServer(wsOrigins, handler).Serve(listener)
- n.log.Info(fmt.Sprintf("WebSocket endpoint opened: ws://%s", listener.Addr()))
+ n.log.Info("WebSocket endpoint opened", "url", fmt.Sprintf("ws://%s", listener.Addr()))
// All listeners booted successfully
n.wsEndpoint = endpoint
@@ -464,7 +463,7 @@ func (n *Node) stopWS() {
n.wsListener.Close()
n.wsListener = nil
- n.log.Info(fmt.Sprintf("WebSocket endpoint closed: ws://%s", n.wsEndpoint))
+ n.log.Info("WebSocket endpoint closed", "url", fmt.Sprintf("ws://%s", n.wsEndpoint))
}
if n.wsHandler != nil {
n.wsHandler.Stop()
diff --git a/p2p/dial.go b/p2p/dial.go
index f5ff2c211..d8feceb9f 100644
--- a/p2p/dial.go
+++ b/p2p/dial.go
@@ -154,6 +154,9 @@ func (s *dialstate) addStatic(n *discover.Node) {
func (s *dialstate) removeStatic(n *discover.Node) {
// This removes a task so future attempts to connect will not be made.
delete(s.static, n.ID)
+ // This removes a previous dial timestamp so that application
+ // can force a server to reconnect with chosen peer immediately.
+ s.hist.remove(n.ID)
}
func (s *dialstate) newTasks(nRunning int, peers map[discover.NodeID]*Peer, now time.Time) []task {
@@ -390,6 +393,16 @@ func (h dialHistory) min() pastDial {
}
func (h *dialHistory) add(id discover.NodeID, exp time.Time) {
heap.Push(h, pastDial{id, exp})
+
+}
+func (h *dialHistory) remove(id discover.NodeID) bool {
+ for i, v := range *h {
+ if v.id == id {
+ heap.Remove(h, i)
+ return true
+ }
+ }
+ return false
}
func (h dialHistory) contains(id discover.NodeID) bool {
for _, v := range h {
diff --git a/p2p/dial_test.go b/p2p/dial_test.go
index ad18ef9ab..2a7941fc6 100644
--- a/p2p/dial_test.go
+++ b/p2p/dial_test.go
@@ -515,6 +515,50 @@ func TestDialStateStaticDial(t *testing.T) {
})
}
+// This test checks that static peers will be redialed immediately if they were re-added to a static list.
+func TestDialStaticAfterReset(t *testing.T) {
+ wantStatic := []*discover.Node{
+ {ID: uintID(1)},
+ {ID: uintID(2)},
+ }
+
+ rounds := []round{
+ // Static dials are launched for the nodes that aren't yet connected.
+ {
+ peers: nil,
+ new: []task{
+ &dialTask{flags: staticDialedConn, dest: &discover.Node{ID: uintID(1)}},
+ &dialTask{flags: staticDialedConn, dest: &discover.Node{ID: uintID(2)}},
+ },
+ },
+ // No new dial tasks, all peers are connected.
+ {
+ peers: []*Peer{
+ {rw: &conn{flags: staticDialedConn, id: uintID(1)}},
+ {rw: &conn{flags: staticDialedConn, id: uintID(2)}},
+ },
+ done: []task{
+ &dialTask{flags: staticDialedConn, dest: &discover.Node{ID: uintID(1)}},
+ &dialTask{flags: staticDialedConn, dest: &discover.Node{ID: uintID(2)}},
+ },
+ new: []task{
+ &waitExpireTask{Duration: 30 * time.Second},
+ },
+ },
+ }
+ dTest := dialtest{
+ init: newDialState(wantStatic, nil, fakeTable{}, 0, nil),
+ rounds: rounds,
+ }
+ runDialTest(t, dTest)
+ for _, n := range wantStatic {
+ dTest.init.removeStatic(n)
+ dTest.init.addStatic(n)
+ }
+ // without removing peers they will be considered recently dialed
+ runDialTest(t, dTest)
+}
+
// This test checks that past dials are not retried for some time.
func TestDialStateCache(t *testing.T) {
wantStatic := []*discover.Node{
diff --git a/p2p/discover/database.go b/p2p/discover/database.go
index b136609f2..6f98de9b4 100644
--- a/p2p/discover/database.go
+++ b/p2p/discover/database.go
@@ -257,7 +257,7 @@ func (db *nodeDB) expireNodes() error {
}
// Skip the node if not expired yet (and not self)
if !bytes.Equal(id[:], db.self[:]) {
- if seen := db.lastPong(id); seen.After(threshold) {
+ if seen := db.bondTime(id); seen.After(threshold) {
continue
}
}
@@ -278,13 +278,18 @@ func (db *nodeDB) updateLastPing(id NodeID, instance time.Time) error {
return db.storeInt64(makeKey(id, nodeDBDiscoverPing), instance.Unix())
}
-// lastPong retrieves the time of the last successful contact from remote node.
-func (db *nodeDB) lastPong(id NodeID) time.Time {
+// bondTime retrieves the time of the last successful pong from remote node.
+func (db *nodeDB) bondTime(id NodeID) time.Time {
return time.Unix(db.fetchInt64(makeKey(id, nodeDBDiscoverPong)), 0)
}
-// updateLastPong updates the last time a remote node successfully contacted.
-func (db *nodeDB) updateLastPong(id NodeID, instance time.Time) error {
+// hasBond reports whether the given node is considered bonded.
+func (db *nodeDB) hasBond(id NodeID) bool {
+ return time.Since(db.bondTime(id)) < nodeDBNodeExpiration
+}
+
+// updateBondTime updates the last pong time of a node.
+func (db *nodeDB) updateBondTime(id NodeID, instance time.Time) error {
return db.storeInt64(makeKey(id, nodeDBDiscoverPong), instance.Unix())
}
@@ -327,7 +332,7 @@ seek:
if n.ID == db.self {
continue seek
}
- if now.Sub(db.lastPong(n.ID)) > maxAge {
+ if now.Sub(db.bondTime(n.ID)) > maxAge {
continue seek
}
for i := range nodes {
diff --git a/p2p/discover/database_test.go b/p2p/discover/database_test.go
index be972fd2c..c4fa44d09 100644
--- a/p2p/discover/database_test.go
+++ b/p2p/discover/database_test.go
@@ -125,13 +125,13 @@ func TestNodeDBFetchStore(t *testing.T) {
t.Errorf("ping: value mismatch: have %v, want %v", stored, inst)
}
// Check fetch/store operations on a node pong object
- if stored := db.lastPong(node.ID); stored.Unix() != 0 {
+ if stored := db.bondTime(node.ID); stored.Unix() != 0 {
t.Errorf("pong: non-existing object: %v", stored)
}
- if err := db.updateLastPong(node.ID, inst); err != nil {
+ if err := db.updateBondTime(node.ID, inst); err != nil {
t.Errorf("pong: failed to update: %v", err)
}
- if stored := db.lastPong(node.ID); stored.Unix() != inst.Unix() {
+ if stored := db.bondTime(node.ID); stored.Unix() != inst.Unix() {
t.Errorf("pong: value mismatch: have %v, want %v", stored, inst)
}
// Check fetch/store operations on a node findnode-failure object
@@ -224,8 +224,8 @@ func TestNodeDBSeedQuery(t *testing.T) {
if err := db.updateNode(seed.node); err != nil {
t.Fatalf("node %d: failed to insert: %v", i, err)
}
- if err := db.updateLastPong(seed.node.ID, seed.pong); err != nil {
- t.Fatalf("node %d: failed to insert lastPong: %v", i, err)
+ if err := db.updateBondTime(seed.node.ID, seed.pong); err != nil {
+ t.Fatalf("node %d: failed to insert bondTime: %v", i, err)
}
}
@@ -332,8 +332,8 @@ func TestNodeDBExpiration(t *testing.T) {
if err := db.updateNode(seed.node); err != nil {
t.Fatalf("node %d: failed to insert: %v", i, err)
}
- if err := db.updateLastPong(seed.node.ID, seed.pong); err != nil {
- t.Fatalf("node %d: failed to update pong: %v", i, err)
+ if err := db.updateBondTime(seed.node.ID, seed.pong); err != nil {
+ t.Fatalf("node %d: failed to update bondTime: %v", i, err)
}
}
// Expire some of them, and check the rest
@@ -365,8 +365,8 @@ func TestNodeDBSelfExpiration(t *testing.T) {
if err := db.updateNode(seed.node); err != nil {
t.Fatalf("node %d: failed to insert: %v", i, err)
}
- if err := db.updateLastPong(seed.node.ID, seed.pong); err != nil {
- t.Fatalf("node %d: failed to update pong: %v", i, err)
+ if err := db.updateBondTime(seed.node.ID, seed.pong); err != nil {
+ t.Fatalf("node %d: failed to update bondTime: %v", i, err)
}
}
// Expire the nodes and make sure self has been evacuated too
diff --git a/p2p/discover/node.go b/p2p/discover/node.go
index fc928a91a..3b0c84115 100644
--- a/p2p/discover/node.go
+++ b/p2p/discover/node.go
@@ -29,6 +29,7 @@ import (
"regexp"
"strconv"
"strings"
+ "time"
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/crypto"
@@ -51,9 +52,8 @@ type Node struct {
// with ID.
sha common.Hash
- // whether this node is currently being pinged in order to replace
- // it in a bucket
- contested bool
+ // Time when the node was added to the table.
+ addedAt time.Time
}
// NewNode creates a new node. It is mostly meant to be used for
diff --git a/p2p/discover/table.go b/p2p/discover/table.go
index ec4eb94ad..6509326e6 100644
--- a/p2p/discover/table.go
+++ b/p2p/discover/table.go
@@ -23,10 +23,11 @@
package discover
import (
- "crypto/rand"
+ crand "crypto/rand"
"encoding/binary"
"errors"
"fmt"
+ mrand "math/rand"
"net"
"sort"
"sync"
@@ -35,29 +36,45 @@ import (
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/crypto"
"github.com/ethereum/go-ethereum/log"
+ "github.com/ethereum/go-ethereum/p2p/netutil"
)
const (
- alpha = 3 // Kademlia concurrency factor
- bucketSize = 16 // Kademlia bucket size
- hashBits = len(common.Hash{}) * 8
- nBuckets = hashBits + 1 // Number of buckets
-
- maxBondingPingPongs = 16
- maxFindnodeFailures = 5
-
- autoRefreshInterval = 1 * time.Hour
- seedCount = 30
- seedMaxAge = 5 * 24 * time.Hour
+ alpha = 3 // Kademlia concurrency factor
+ bucketSize = 16 // Kademlia bucket size
+ maxReplacements = 10 // Size of per-bucket replacement list
+
+ // We keep buckets for the upper 1/15 of distances because
+ // it's very unlikely we'll ever encounter a node that's closer.
+ hashBits = len(common.Hash{}) * 8
+ nBuckets = hashBits / 15 // Number of buckets
+ bucketMinDistance = hashBits - nBuckets // Log distance of closest bucket
+
+ // IP address limits.
+ bucketIPLimit, bucketSubnet = 2, 24 // at most 2 addresses from the same /24
+ tableIPLimit, tableSubnet = 10, 24
+
+ maxBondingPingPongs = 16 // Limit on the number of concurrent ping/pong interactions
+ maxFindnodeFailures = 5 // Nodes exceeding this limit are dropped
+
+ refreshInterval = 30 * time.Minute
+ revalidateInterval = 10 * time.Second
+ copyNodesInterval = 30 * time.Second
+ seedMinTableTime = 5 * time.Minute
+ seedCount = 30
+ seedMaxAge = 5 * 24 * time.Hour
)
type Table struct {
- mutex sync.Mutex // protects buckets, their content, and nursery
+ mutex sync.Mutex // protects buckets, bucket content, nursery, rand
buckets [nBuckets]*bucket // index of known nodes by distance
nursery []*Node // bootstrap nodes
- db *nodeDB // database of known nodes
+ rand *mrand.Rand // source of randomness, periodically reseeded
+ ips netutil.DistinctNetSet
+ db *nodeDB // database of known nodes
refreshReq chan chan struct{}
+ initDone chan struct{}
closeReq chan struct{}
closed chan struct{}
@@ -89,9 +106,13 @@ type transport interface {
// bucket contains nodes, ordered by their last activity. the entry
// that was most recently active is the first element in entries.
-type bucket struct{ entries []*Node }
+type bucket struct {
+ entries []*Node // live entries, sorted by time of last contact
+ replacements []*Node // recently seen nodes to be used if revalidation fails
+ ips netutil.DistinctNetSet
+}
-func newTable(t transport, ourID NodeID, ourAddr *net.UDPAddr, nodeDBPath string) (*Table, error) {
+func newTable(t transport, ourID NodeID, ourAddr *net.UDPAddr, nodeDBPath string, bootnodes []*Node) (*Table, error) {
// If no node database was given, use an in-memory one
db, err := newNodeDB(nodeDBPath, Version, ourID)
if err != nil {
@@ -104,19 +125,42 @@ func newTable(t transport, ourID NodeID, ourAddr *net.UDPAddr, nodeDBPath string
bonding: make(map[NodeID]*bondproc),
bondslots: make(chan struct{}, maxBondingPingPongs),
refreshReq: make(chan chan struct{}),
+ initDone: make(chan struct{}),
closeReq: make(chan struct{}),
closed: make(chan struct{}),
+ rand: mrand.New(mrand.NewSource(0)),
+ ips: netutil.DistinctNetSet{Subnet: tableSubnet, Limit: tableIPLimit},
+ }
+ if err := tab.setFallbackNodes(bootnodes); err != nil {
+ return nil, err
}
for i := 0; i < cap(tab.bondslots); i++ {
tab.bondslots <- struct{}{}
}
for i := range tab.buckets {
- tab.buckets[i] = new(bucket)
+ tab.buckets[i] = &bucket{
+ ips: netutil.DistinctNetSet{Subnet: bucketSubnet, Limit: bucketIPLimit},
+ }
}
- go tab.refreshLoop()
+ tab.seedRand()
+ tab.loadSeedNodes(false)
+ // Start the background expiration goroutine after loading seeds so that the search for
+ // seed nodes also considers older nodes that would otherwise be removed by the
+ // expiration.
+ tab.db.ensureExpirer()
+ go tab.loop()
return tab, nil
}
+func (tab *Table) seedRand() {
+ var b [8]byte
+ crand.Read(b[:])
+
+ tab.mutex.Lock()
+ tab.rand.Seed(int64(binary.BigEndian.Uint64(b[:])))
+ tab.mutex.Unlock()
+}
+
// Self returns the local node.
// The returned node should not be modified by the caller.
func (tab *Table) Self() *Node {
@@ -127,9 +171,12 @@ func (tab *Table) Self() *Node {
// table. It will not write the same node more than once. The nodes in
// the slice are copies and can be modified by the caller.
func (tab *Table) ReadRandomNodes(buf []*Node) (n int) {
+ if !tab.isInitDone() {
+ return 0
+ }
tab.mutex.Lock()
defer tab.mutex.Unlock()
- // TODO: tree-based buckets would help here
+
// Find all non-empty buckets and get a fresh slice of their entries.
var buckets [][]*Node
for _, b := range tab.buckets {
@@ -141,8 +188,8 @@ func (tab *Table) ReadRandomNodes(buf []*Node) (n int) {
return 0
}
// Shuffle the buckets.
- for i := uint32(len(buckets)) - 1; i > 0; i-- {
- j := randUint(i)
+ for i := len(buckets) - 1; i > 0; i-- {
+ j := tab.rand.Intn(len(buckets))
buckets[i], buckets[j] = buckets[j], buckets[i]
}
// Move head of each bucket into buf, removing buckets that become empty.
@@ -161,15 +208,6 @@ func (tab *Table) ReadRandomNodes(buf []*Node) (n int) {
return i + 1
}
-func randUint(max uint32) uint32 {
- if max == 0 {
- return 0
- }
- var b [4]byte
- rand.Read(b[:])
- return binary.BigEndian.Uint32(b[:]) % max
-}
-
// Close terminates the network listener and flushes the node database.
func (tab *Table) Close() {
select {
@@ -180,16 +218,15 @@ func (tab *Table) Close() {
}
}
-// SetFallbackNodes sets the initial points of contact. These nodes
+// setFallbackNodes sets the initial points of contact. These nodes
// are used to connect to the network if the table is empty and there
// are no known nodes in the database.
-func (tab *Table) SetFallbackNodes(nodes []*Node) error {
+func (tab *Table) setFallbackNodes(nodes []*Node) error {
for _, n := range nodes {
if err := n.validateComplete(); err != nil {
return fmt.Errorf("bad bootstrap/fallback node %q (%v)", n, err)
}
}
- tab.mutex.Lock()
tab.nursery = make([]*Node, 0, len(nodes))
for _, n := range nodes {
cpy := *n
@@ -198,11 +235,19 @@ func (tab *Table) SetFallbackNodes(nodes []*Node) error {
cpy.sha = crypto.Keccak256Hash(n.ID[:])
tab.nursery = append(tab.nursery, &cpy)
}
- tab.mutex.Unlock()
- tab.refresh()
return nil
}
+// isInitDone returns whether the table's initial seeding procedure has completed.
+func (tab *Table) isInitDone() bool {
+ select {
+ case <-tab.initDone:
+ return true
+ default:
+ return false
+ }
+}
+
// Resolve searches for a specific node with the given ID.
// It returns nil if the node could not be found.
func (tab *Table) Resolve(targetID NodeID) *Node {
@@ -314,33 +359,49 @@ func (tab *Table) refresh() <-chan struct{} {
return done
}
-// refreshLoop schedules doRefresh runs and coordinates shutdown.
-func (tab *Table) refreshLoop() {
+// loop schedules refresh, revalidate runs and coordinates shutdown.
+func (tab *Table) loop() {
var (
- timer = time.NewTicker(autoRefreshInterval)
- waiting []chan struct{} // accumulates waiting callers while doRefresh runs
- done chan struct{} // where doRefresh reports completion
+ revalidate = time.NewTimer(tab.nextRevalidateTime())
+ refresh = time.NewTicker(refreshInterval)
+ copyNodes = time.NewTicker(copyNodesInterval)
+ revalidateDone = make(chan struct{})
+ refreshDone = make(chan struct{}) // where doRefresh reports completion
+ waiting = []chan struct{}{tab.initDone} // holds waiting callers while doRefresh runs
)
+ defer refresh.Stop()
+ defer revalidate.Stop()
+ defer copyNodes.Stop()
+
+ // Start initial refresh.
+ go tab.doRefresh(refreshDone)
+
loop:
for {
select {
- case <-timer.C:
- if done == nil {
- done = make(chan struct{})
- go tab.doRefresh(done)
+ case <-refresh.C:
+ tab.seedRand()
+ if refreshDone == nil {
+ refreshDone = make(chan struct{})
+ go tab.doRefresh(refreshDone)
}
case req := <-tab.refreshReq:
waiting = append(waiting, req)
- if done == nil {
- done = make(chan struct{})
- go tab.doRefresh(done)
+ if refreshDone == nil {
+ refreshDone = make(chan struct{})
+ go tab.doRefresh(refreshDone)
}
- case <-done:
+ case <-refreshDone:
for _, ch := range waiting {
close(ch)
}
- waiting = nil
- done = nil
+ waiting, refreshDone = nil, nil
+ case <-revalidate.C:
+ go tab.doRevalidate(revalidateDone)
+ case <-revalidateDone:
+ revalidate.Reset(tab.nextRevalidateTime())
+ case <-copyNodes.C:
+ go tab.copyBondedNodes()
case <-tab.closeReq:
break loop
}
@@ -349,8 +410,8 @@ loop:
if tab.net != nil {
tab.net.close()
}
- if done != nil {
- <-done
+ if refreshDone != nil {
+ <-refreshDone
}
for _, ch := range waiting {
close(ch)
@@ -365,38 +426,109 @@ loop:
func (tab *Table) doRefresh(done chan struct{}) {
defer close(done)
+ // Load nodes from the database and insert
+ // them. This should yield a few previously seen nodes that are
+ // (hopefully) still alive.
+ tab.loadSeedNodes(true)
+
+ // Run self lookup to discover new neighbor nodes.
+ tab.lookup(tab.self.ID, false)
+
// The Kademlia paper specifies that the bucket refresh should
// perform a lookup in the least recently used bucket. We cannot
// adhere to this because the findnode target is a 512bit value
// (not hash-sized) and it is not easily possible to generate a
// sha3 preimage that falls into a chosen bucket.
- // We perform a lookup with a random target instead.
- var target NodeID
- rand.Read(target[:])
- result := tab.lookup(target, false)
- if len(result) > 0 {
- return
+ // We perform a few lookups with a random target instead.
+ for i := 0; i < 3; i++ {
+ var target NodeID
+ crand.Read(target[:])
+ tab.lookup(target, false)
}
+}
- // The table is empty. Load nodes from the database and insert
- // them. This should yield a few previously seen nodes that are
- // (hopefully) still alive.
+func (tab *Table) loadSeedNodes(bond bool) {
seeds := tab.db.querySeeds(seedCount, seedMaxAge)
- seeds = tab.bondall(append(seeds, tab.nursery...))
+ seeds = append(seeds, tab.nursery...)
+ if bond {
+ seeds = tab.bondall(seeds)
+ }
+ for i := range seeds {
+ seed := seeds[i]
+ age := log.Lazy{Fn: func() interface{} { return time.Since(tab.db.bondTime(seed.ID)) }}
+ log.Debug("Found seed node in database", "id", seed.ID, "addr", seed.addr(), "age", age)
+ tab.add(seed)
+ }
+}
- if len(seeds) == 0 {
- log.Debug("No discv4 seed nodes found")
+// doRevalidate checks that the last node in a random bucket is still live
+// and replaces or deletes the node if it isn't.
+func (tab *Table) doRevalidate(done chan<- struct{}) {
+ defer func() { done <- struct{}{} }()
+
+ last, bi := tab.nodeToRevalidate()
+ if last == nil {
+ // No non-empty bucket found.
+ return
+ }
+
+ // Ping the selected node and wait for a pong.
+ err := tab.ping(last.ID, last.addr())
+
+ tab.mutex.Lock()
+ defer tab.mutex.Unlock()
+ b := tab.buckets[bi]
+ if err == nil {
+ // The node responded, move it to the front.
+ log.Debug("Revalidated node", "b", bi, "id", last.ID)
+ b.bump(last)
+ return
}
- for _, n := range seeds {
- age := log.Lazy{Fn: func() time.Duration { return time.Since(tab.db.lastPong(n.ID)) }}
- log.Trace("Found seed node in database", "id", n.ID, "addr", n.addr(), "age", age)
+ // No reply received, pick a replacement or delete the node if there aren't
+ // any replacements.
+ if r := tab.replace(b, last); r != nil {
+ log.Debug("Replaced dead node", "b", bi, "id", last.ID, "ip", last.IP, "r", r.ID, "rip", r.IP)
+ } else {
+ log.Debug("Removed dead node", "b", bi, "id", last.ID, "ip", last.IP)
}
+}
+
+// nodeToRevalidate returns the last node in a random, non-empty bucket.
+func (tab *Table) nodeToRevalidate() (n *Node, bi int) {
tab.mutex.Lock()
- tab.stuff(seeds)
- tab.mutex.Unlock()
+ defer tab.mutex.Unlock()
- // Finally, do a self lookup to fill up the buckets.
- tab.lookup(tab.self.ID, false)
+ for _, bi = range tab.rand.Perm(len(tab.buckets)) {
+ b := tab.buckets[bi]
+ if len(b.entries) > 0 {
+ last := b.entries[len(b.entries)-1]
+ return last, bi
+ }
+ }
+ return nil, 0
+}
+
+func (tab *Table) nextRevalidateTime() time.Duration {
+ tab.mutex.Lock()
+ defer tab.mutex.Unlock()
+
+ return time.Duration(tab.rand.Int63n(int64(revalidateInterval)))
+}
+
+// copyBondedNodes adds nodes from the table to the database if they have been in the table
+// longer then minTableTime.
+func (tab *Table) copyBondedNodes() {
+ tab.mutex.Lock()
+ defer tab.mutex.Unlock()
+
+ now := time.Now()
+ for _, b := range tab.buckets {
+ for _, n := range b.entries {
+ if now.Sub(n.addedAt) >= seedMinTableTime {
+ tab.db.updateNode(n)
+ }
+ }
+ }
}
// closest returns the n nodes in the table that are closest to the
@@ -459,15 +591,14 @@ func (tab *Table) bond(pinged bool, id NodeID, addr *net.UDPAddr, tcpPort uint16
if id == tab.self.ID {
return nil, errors.New("is self")
}
- // Retrieve a previously known node and any recent findnode failures
- node, fails := tab.db.node(id), 0
- if node != nil {
- fails = tab.db.findFails(id)
+ if pinged && !tab.isInitDone() {
+ return nil, errors.New("still initializing")
}
- // If the node is unknown (non-bonded) or failed (remotely unknown), bond from scratch
+ // Start bonding if we haven't seen this node for a while or if it failed findnode too often.
+ node, fails := tab.db.node(id), tab.db.findFails(id)
+ age := time.Since(tab.db.bondTime(id))
var result error
- age := time.Since(tab.db.lastPong(id))
- if node == nil || fails > 0 || age > nodeDBNodeExpiration {
+ if fails > 0 || age > nodeDBNodeExpiration {
log.Trace("Starting bonding ping/pong", "id", id, "known", node != nil, "failcount", fails, "age", age)
tab.bondmu.Lock()
@@ -494,10 +625,10 @@ func (tab *Table) bond(pinged bool, id NodeID, addr *net.UDPAddr, tcpPort uint16
node = w.n
}
}
+ // Add the node to the table even if the bonding ping/pong
+ // fails. It will be relaced quickly if it continues to be
+ // unresponsive.
if node != nil {
- // Add the node to the table even if the bonding ping/pong
- // fails. It will be relaced quickly if it continues to be
- // unresponsive.
tab.add(node)
tab.db.updateFindFails(id, 0)
}
@@ -522,7 +653,6 @@ func (tab *Table) pingpong(w *bondproc, pinged bool, id NodeID, addr *net.UDPAdd
}
// Bonding succeeded, update the node database.
w.n = NewNode(id, addr.IP, uint16(addr.Port), tcpPort)
- tab.db.updateNode(w.n)
close(w.done)
}
@@ -533,17 +663,19 @@ func (tab *Table) ping(id NodeID, addr *net.UDPAddr) error {
if err := tab.net.ping(id, addr); err != nil {
return err
}
- tab.db.updateLastPong(id, time.Now())
-
- // Start the background expiration goroutine after the first
- // successful communication. Subsequent calls have no effect if it
- // is already running. We do this here instead of somewhere else
- // so that the search for seed nodes also considers older nodes
- // that would otherwise be removed by the expiration.
- tab.db.ensureExpirer()
+ tab.db.updateBondTime(id, time.Now())
return nil
}
+// bucket returns the bucket for the given node ID hash.
+func (tab *Table) bucket(sha common.Hash) *bucket {
+ d := logdist(tab.self.sha, sha)
+ if d <= bucketMinDistance {
+ return tab.buckets[0]
+ }
+ return tab.buckets[d-bucketMinDistance-1]
+}
+
// add attempts to add the given node its corresponding bucket. If the
// bucket has space available, adding the node succeeds immediately.
// Otherwise, the node is added if the least recently active node in
@@ -551,57 +683,29 @@ func (tab *Table) ping(id NodeID, addr *net.UDPAddr) error {
//
// The caller must not hold tab.mutex.
func (tab *Table) add(new *Node) {
- b := tab.buckets[logdist(tab.self.sha, new.sha)]
tab.mutex.Lock()
defer tab.mutex.Unlock()
- if b.bump(new) {
- return
- }
- var oldest *Node
- if len(b.entries) == bucketSize {
- oldest = b.entries[bucketSize-1]
- if oldest.contested {
- // The node is already being replaced, don't attempt
- // to replace it.
- return
- }
- oldest.contested = true
- // Let go of the mutex so other goroutines can access
- // the table while we ping the least recently active node.
- tab.mutex.Unlock()
- err := tab.ping(oldest.ID, oldest.addr())
- tab.mutex.Lock()
- oldest.contested = false
- if err == nil {
- // The node responded, don't replace it.
- return
- }
- }
- added := b.replace(new, oldest)
- if added && tab.nodeAddedHook != nil {
- tab.nodeAddedHook(new)
+
+ b := tab.bucket(new.sha)
+ if !tab.bumpOrAdd(b, new) {
+ // Node is not in table. Add it to the replacement list.
+ tab.addReplacement(b, new)
}
}
// stuff adds nodes the table to the end of their corresponding bucket
-// if the bucket is not full. The caller must hold tab.mutex.
+// if the bucket is not full. The caller must not hold tab.mutex.
func (tab *Table) stuff(nodes []*Node) {
-outer:
+ tab.mutex.Lock()
+ defer tab.mutex.Unlock()
+
for _, n := range nodes {
if n.ID == tab.self.ID {
continue // don't add self
}
- bucket := tab.buckets[logdist(tab.self.sha, n.sha)]
- for i := range bucket.entries {
- if bucket.entries[i].ID == n.ID {
- continue outer // already in bucket
- }
- }
- if len(bucket.entries) < bucketSize {
- bucket.entries = append(bucket.entries, n)
- if tab.nodeAddedHook != nil {
- tab.nodeAddedHook(n)
- }
+ b := tab.bucket(n.sha)
+ if len(b.entries) < bucketSize {
+ tab.bumpOrAdd(b, n)
}
}
}
@@ -611,36 +715,72 @@ outer:
func (tab *Table) delete(node *Node) {
tab.mutex.Lock()
defer tab.mutex.Unlock()
- bucket := tab.buckets[logdist(tab.self.sha, node.sha)]
- for i := range bucket.entries {
- if bucket.entries[i].ID == node.ID {
- bucket.entries = append(bucket.entries[:i], bucket.entries[i+1:]...)
- return
- }
- }
+
+ tab.deleteInBucket(tab.bucket(node.sha), node)
}
-func (b *bucket) replace(n *Node, last *Node) bool {
- // Don't add if b already contains n.
- for i := range b.entries {
- if b.entries[i].ID == n.ID {
- return false
- }
+func (tab *Table) addIP(b *bucket, ip net.IP) bool {
+ if netutil.IsLAN(ip) {
+ return true
}
- // Replace last if it is still the last entry or just add n if b
- // isn't full. If is no longer the last entry, it has either been
- // replaced with someone else or became active.
- if len(b.entries) == bucketSize && (last == nil || b.entries[bucketSize-1].ID != last.ID) {
+ if !tab.ips.Add(ip) {
+ log.Debug("IP exceeds table limit", "ip", ip)
return false
}
- if len(b.entries) < bucketSize {
- b.entries = append(b.entries, nil)
+ if !b.ips.Add(ip) {
+ log.Debug("IP exceeds bucket limit", "ip", ip)
+ tab.ips.Remove(ip)
+ return false
}
- copy(b.entries[1:], b.entries)
- b.entries[0] = n
return true
}
+func (tab *Table) removeIP(b *bucket, ip net.IP) {
+ if netutil.IsLAN(ip) {
+ return
+ }
+ tab.ips.Remove(ip)
+ b.ips.Remove(ip)
+}
+
+func (tab *Table) addReplacement(b *bucket, n *Node) {
+ for _, e := range b.replacements {
+ if e.ID == n.ID {
+ return // already in list
+ }
+ }
+ if !tab.addIP(b, n.IP) {
+ return
+ }
+ var removed *Node
+ b.replacements, removed = pushNode(b.replacements, n, maxReplacements)
+ if removed != nil {
+ tab.removeIP(b, removed.IP)
+ }
+}
+
+// replace removes n from the replacement list and replaces 'last' with it if it is the
+// last entry in the bucket. If 'last' isn't the last entry, it has either been replaced
+// with someone else or became active.
+func (tab *Table) replace(b *bucket, last *Node) *Node {
+ if len(b.entries) == 0 || b.entries[len(b.entries)-1].ID != last.ID {
+ // Entry has moved, don't replace it.
+ return nil
+ }
+ // Still the last entry.
+ if len(b.replacements) == 0 {
+ tab.deleteInBucket(b, last)
+ return nil
+ }
+ r := b.replacements[tab.rand.Intn(len(b.replacements))]
+ b.replacements = deleteNode(b.replacements, r)
+ b.entries[len(b.entries)-1] = r
+ tab.removeIP(b, last.IP)
+ return r
+}
+
+// bump moves the given node to the front of the bucket entry list
+// if it is contained in that list.
func (b *bucket) bump(n *Node) bool {
for i := range b.entries {
if b.entries[i].ID == n.ID {
@@ -653,6 +793,50 @@ func (b *bucket) bump(n *Node) bool {
return false
}
+// bumpOrAdd moves n to the front of the bucket entry list or adds it if the list isn't
+// full. The return value is true if n is in the bucket.
+func (tab *Table) bumpOrAdd(b *bucket, n *Node) bool {
+ if b.bump(n) {
+ return true
+ }
+ if len(b.entries) >= bucketSize || !tab.addIP(b, n.IP) {
+ return false
+ }
+ b.entries, _ = pushNode(b.entries, n, bucketSize)
+ b.replacements = deleteNode(b.replacements, n)
+ n.addedAt = time.Now()
+ if tab.nodeAddedHook != nil {
+ tab.nodeAddedHook(n)
+ }
+ return true
+}
+
+func (tab *Table) deleteInBucket(b *bucket, n *Node) {
+ b.entries = deleteNode(b.entries, n)
+ tab.removeIP(b, n.IP)
+}
+
+// pushNode adds n to the front of list, keeping at most max items.
+func pushNode(list []*Node, n *Node, max int) ([]*Node, *Node) {
+ if len(list) < max {
+ list = append(list, nil)
+ }
+ removed := list[len(list)-1]
+ copy(list[1:], list)
+ list[0] = n
+ return list, removed
+}
+
+// deleteNode removes n from list.
+func deleteNode(list []*Node, n *Node) []*Node {
+ for i := range list {
+ if list[i].ID == n.ID {
+ return append(list[:i], list[i+1:]...)
+ }
+ }
+ return list
+}
+
// nodesByDistance is a list of nodes, ordered by
// distance to target.
type nodesByDistance struct {
diff --git a/p2p/discover/table_test.go b/p2p/discover/table_test.go
index 1037cc609..3ce48d299 100644
--- a/p2p/discover/table_test.go
+++ b/p2p/discover/table_test.go
@@ -20,6 +20,7 @@ import (
"crypto/ecdsa"
"fmt"
"math/rand"
+ "sync"
"net"
"reflect"
@@ -32,60 +33,65 @@ import (
)
func TestTable_pingReplace(t *testing.T) {
- doit := func(newNodeIsResponding, lastInBucketIsResponding bool) {
- transport := newPingRecorder()
- tab, _ := newTable(transport, NodeID{}, &net.UDPAddr{}, "")
- defer tab.Close()
- pingSender := NewNode(MustHexID("a502af0f59b2aab7746995408c79e9ca312d2793cc997e44fc55eda62f0150bbb8c59a6f9269ba3a081518b62699ee807c7c19c20125ddfccca872608af9e370"), net.IP{}, 99, 99)
+ run := func(newNodeResponding, lastInBucketResponding bool) {
+ name := fmt.Sprintf("newNodeResponding=%t/lastInBucketResponding=%t", newNodeResponding, lastInBucketResponding)
+ t.Run(name, func(t *testing.T) {
+ t.Parallel()
+ testPingReplace(t, newNodeResponding, lastInBucketResponding)
+ })
+ }
- // fill up the sender's bucket.
- last := fillBucket(tab, 253)
+ run(true, true)
+ run(false, true)
+ run(true, false)
+ run(false, false)
+}
- // this call to bond should replace the last node
- // in its bucket if the node is not responding.
- transport.responding[last.ID] = lastInBucketIsResponding
- transport.responding[pingSender.ID] = newNodeIsResponding
- tab.bond(true, pingSender.ID, &net.UDPAddr{}, 0)
+func testPingReplace(t *testing.T, newNodeIsResponding, lastInBucketIsResponding bool) {
+ transport := newPingRecorder()
+ tab, _ := newTable(transport, NodeID{}, &net.UDPAddr{}, "", nil)
+ defer tab.Close()
- // first ping goes to sender (bonding pingback)
- if !transport.pinged[pingSender.ID] {
- t.Error("table did not ping back sender")
- }
- if newNodeIsResponding {
- // second ping goes to oldest node in bucket
- // to see whether it is still alive.
- if !transport.pinged[last.ID] {
- t.Error("table did not ping last node in bucket")
- }
- }
+ // Wait for init so bond is accepted.
+ <-tab.initDone
- tab.mutex.Lock()
- defer tab.mutex.Unlock()
- if l := len(tab.buckets[253].entries); l != bucketSize {
- t.Errorf("wrong bucket size after bond: got %d, want %d", l, bucketSize)
- }
+ // fill up the sender's bucket.
+ pingSender := NewNode(MustHexID("a502af0f59b2aab7746995408c79e9ca312d2793cc997e44fc55eda62f0150bbb8c59a6f9269ba3a081518b62699ee807c7c19c20125ddfccca872608af9e370"), net.IP{}, 99, 99)
+ last := fillBucket(tab, pingSender)
- if lastInBucketIsResponding || !newNodeIsResponding {
- if !contains(tab.buckets[253].entries, last.ID) {
- t.Error("last entry was removed")
- }
- if contains(tab.buckets[253].entries, pingSender.ID) {
- t.Error("new entry was added")
- }
- } else {
- if contains(tab.buckets[253].entries, last.ID) {
- t.Error("last entry was not removed")
- }
- if !contains(tab.buckets[253].entries, pingSender.ID) {
- t.Error("new entry was not added")
- }
- }
+ // this call to bond should replace the last node
+ // in its bucket if the node is not responding.
+ transport.dead[last.ID] = !lastInBucketIsResponding
+ transport.dead[pingSender.ID] = !newNodeIsResponding
+ tab.bond(true, pingSender.ID, &net.UDPAddr{}, 0)
+ tab.doRevalidate(make(chan struct{}, 1))
+
+ // first ping goes to sender (bonding pingback)
+ if !transport.pinged[pingSender.ID] {
+ t.Error("table did not ping back sender")
+ }
+ if !transport.pinged[last.ID] {
+ // second ping goes to oldest node in bucket
+ // to see whether it is still alive.
+ t.Error("table did not ping last node in bucket")
}
- doit(true, true)
- doit(false, true)
- doit(true, false)
- doit(false, false)
+ tab.mutex.Lock()
+ defer tab.mutex.Unlock()
+ wantSize := bucketSize
+ if !lastInBucketIsResponding && !newNodeIsResponding {
+ wantSize--
+ }
+ if l := len(tab.bucket(pingSender.sha).entries); l != wantSize {
+ t.Errorf("wrong bucket size after bond: got %d, want %d", l, wantSize)
+ }
+ if found := contains(tab.bucket(pingSender.sha).entries, last.ID); found != lastInBucketIsResponding {
+ t.Errorf("last entry found: %t, want: %t", found, lastInBucketIsResponding)
+ }
+ wantNewEntry := newNodeIsResponding && !lastInBucketIsResponding
+ if found := contains(tab.bucket(pingSender.sha).entries, pingSender.ID); found != wantNewEntry {
+ t.Errorf("new entry found: %t, want: %t", found, wantNewEntry)
+ }
}
func TestBucket_bumpNoDuplicates(t *testing.T) {
@@ -130,11 +136,45 @@ func TestBucket_bumpNoDuplicates(t *testing.T) {
}
}
+// This checks that the table-wide IP limit is applied correctly.
+func TestTable_IPLimit(t *testing.T) {
+ transport := newPingRecorder()
+ tab, _ := newTable(transport, NodeID{}, &net.UDPAddr{}, "", nil)
+ defer tab.Close()
+
+ for i := 0; i < tableIPLimit+1; i++ {
+ n := nodeAtDistance(tab.self.sha, i)
+ n.IP = net.IP{172, 0, 1, byte(i)}
+ tab.add(n)
+ }
+ if tab.len() > tableIPLimit {
+ t.Errorf("too many nodes in table")
+ }
+}
+
+// This checks that the table-wide IP limit is applied correctly.
+func TestTable_BucketIPLimit(t *testing.T) {
+ transport := newPingRecorder()
+ tab, _ := newTable(transport, NodeID{}, &net.UDPAddr{}, "", nil)
+ defer tab.Close()
+
+ d := 3
+ for i := 0; i < bucketIPLimit+1; i++ {
+ n := nodeAtDistance(tab.self.sha, d)
+ n.IP = net.IP{172, 0, 1, byte(i)}
+ tab.add(n)
+ }
+ if tab.len() > bucketIPLimit {
+ t.Errorf("too many nodes in table")
+ }
+}
+
// fillBucket inserts nodes into the given bucket until
// it is full. The node's IDs dont correspond to their
// hashes.
-func fillBucket(tab *Table, ld int) (last *Node) {
- b := tab.buckets[ld]
+func fillBucket(tab *Table, n *Node) (last *Node) {
+ ld := logdist(tab.self.sha, n.sha)
+ b := tab.bucket(n.sha)
for len(b.entries) < bucketSize {
b.entries = append(b.entries, nodeAtDistance(tab.self.sha, ld))
}
@@ -146,30 +186,39 @@ func fillBucket(tab *Table, ld int) (last *Node) {
func nodeAtDistance(base common.Hash, ld int) (n *Node) {
n = new(Node)
n.sha = hashAtDistance(base, ld)
- n.IP = net.IP{10, 0, 2, byte(ld)}
+ n.IP = net.IP{byte(ld), 0, 2, byte(ld)}
copy(n.ID[:], n.sha[:]) // ensure the node still has a unique ID
return n
}
-type pingRecorder struct{ responding, pinged map[NodeID]bool }
+type pingRecorder struct {
+ mu sync.Mutex
+ dead, pinged map[NodeID]bool
+}
func newPingRecorder() *pingRecorder {
- return &pingRecorder{make(map[NodeID]bool), make(map[NodeID]bool)}
+ return &pingRecorder{
+ dead: make(map[NodeID]bool),
+ pinged: make(map[NodeID]bool),
+ }
}
func (t *pingRecorder) findnode(toid NodeID, toaddr *net.UDPAddr, target NodeID) ([]*Node, error) {
- panic("findnode called on pingRecorder")
+ return nil, nil
}
func (t *pingRecorder) close() {}
func (t *pingRecorder) waitping(from NodeID) error {
return nil // remote always pings
}
func (t *pingRecorder) ping(toid NodeID, toaddr *net.UDPAddr) error {
+ t.mu.Lock()
+ defer t.mu.Unlock()
+
t.pinged[toid] = true
- if t.responding[toid] {
- return nil
- } else {
+ if t.dead[toid] {
return errTimeout
+ } else {
+ return nil
}
}
@@ -178,7 +227,8 @@ func TestTable_closest(t *testing.T) {
test := func(test *closeTest) bool {
// for any node table, Target and N
- tab, _ := newTable(nil, test.Self, &net.UDPAddr{}, "")
+ transport := newPingRecorder()
+ tab, _ := newTable(transport, test.Self, &net.UDPAddr{}, "", nil)
defer tab.Close()
tab.stuff(test.All)
@@ -237,8 +287,11 @@ func TestTable_ReadRandomNodesGetAll(t *testing.T) {
},
}
test := func(buf []*Node) bool {
- tab, _ := newTable(nil, NodeID{}, &net.UDPAddr{}, "")
+ transport := newPingRecorder()
+ tab, _ := newTable(transport, NodeID{}, &net.UDPAddr{}, "", nil)
defer tab.Close()
+ <-tab.initDone
+
for i := 0; i < len(buf); i++ {
ld := cfg.Rand.Intn(len(tab.buckets))
tab.stuff([]*Node{nodeAtDistance(tab.self.sha, ld)})
@@ -280,7 +333,7 @@ func (*closeTest) Generate(rand *rand.Rand, size int) reflect.Value {
func TestTable_Lookup(t *testing.T) {
self := nodeAtDistance(common.Hash{}, 0)
- tab, _ := newTable(lookupTestnet, self.ID, &net.UDPAddr{}, "")
+ tab, _ := newTable(lookupTestnet, self.ID, &net.UDPAddr{}, "", nil)
defer tab.Close()
// lookup on empty table returns no nodes
diff --git a/p2p/discover/udp.go b/p2p/discover/udp.go
index 60436952d..524c6e498 100644
--- a/p2p/discover/udp.go
+++ b/p2p/discover/udp.go
@@ -216,9 +216,22 @@ type ReadPacket struct {
Addr *net.UDPAddr
}
+// Config holds Table-related settings.
+type Config struct {
+ // These settings are required and configure the UDP listener:
+ PrivateKey *ecdsa.PrivateKey
+
+ // These settings are optional:
+ AnnounceAddr *net.UDPAddr // local address announced in the DHT
+ NodeDBPath string // if set, the node database is stored at this filesystem location
+ NetRestrict *netutil.Netlist // network whitelist
+ Bootnodes []*Node // list of bootstrap nodes
+ Unhandled chan<- ReadPacket // unhandled packets are sent on this channel
+}
+
// ListenUDP returns a new table that listens for UDP packets on laddr.
-func ListenUDP(priv *ecdsa.PrivateKey, conn conn, realaddr *net.UDPAddr, unhandled chan ReadPacket, nodeDBPath string, netrestrict *netutil.Netlist) (*Table, error) {
- tab, _, err := newUDP(priv, conn, realaddr, unhandled, nodeDBPath, netrestrict)
+func ListenUDP(c conn, cfg Config) (*Table, error) {
+ tab, _, err := newUDP(c, cfg)
if err != nil {
return nil, err
}
@@ -226,25 +239,29 @@ func ListenUDP(priv *ecdsa.PrivateKey, conn conn, realaddr *net.UDPAddr, unhandl
return tab, nil
}
-func newUDP(priv *ecdsa.PrivateKey, c conn, realaddr *net.UDPAddr, unhandled chan ReadPacket, nodeDBPath string, netrestrict *netutil.Netlist) (*Table, *udp, error) {
+func newUDP(c conn, cfg Config) (*Table, *udp, error) {
udp := &udp{
conn: c,
- priv: priv,
- netrestrict: netrestrict,
+ priv: cfg.PrivateKey,
+ netrestrict: cfg.NetRestrict,
closing: make(chan struct{}),
gotreply: make(chan reply),
addpending: make(chan *pending),
}
+ realaddr := c.LocalAddr().(*net.UDPAddr)
+ if cfg.AnnounceAddr != nil {
+ realaddr = cfg.AnnounceAddr
+ }
// TODO: separate TCP port
udp.ourEndpoint = makeEndpoint(realaddr, uint16(realaddr.Port))
- tab, err := newTable(udp, PubkeyID(&priv.PublicKey), realaddr, nodeDBPath)
+ tab, err := newTable(udp, PubkeyID(&cfg.PrivateKey.PublicKey), realaddr, cfg.NodeDBPath, cfg.Bootnodes)
if err != nil {
return nil, nil, err
}
udp.Table = tab
go udp.loop()
- go udp.readLoop(unhandled)
+ go udp.readLoop(cfg.Unhandled)
return udp.Table, udp, nil
}
@@ -256,14 +273,20 @@ func (t *udp) close() {
// ping sends a ping message to the given node and waits for a reply.
func (t *udp) ping(toid NodeID, toaddr *net.UDPAddr) error {
- // TODO: maybe check for ReplyTo field in callback to measure RTT
- errc := t.pending(toid, pongPacket, func(interface{}) bool { return true })
- t.send(toaddr, pingPacket, &ping{
+ req := &ping{
Version: Version,
From: t.ourEndpoint,
To: makeEndpoint(toaddr, 0), // TODO: maybe use known TCP port from DB
Expiration: uint64(time.Now().Add(expiration).Unix()),
+ }
+ packet, hash, err := encodePacket(t.priv, pingPacket, req)
+ if err != nil {
+ return err
+ }
+ errc := t.pending(toid, pongPacket, func(p interface{}) bool {
+ return bytes.Equal(p.(*pong).ReplyTok, hash)
})
+ t.write(toaddr, req.name(), packet)
return <-errc
}
@@ -447,40 +470,45 @@ func init() {
}
}
-func (t *udp) send(toaddr *net.UDPAddr, ptype byte, req packet) error {
- packet, err := encodePacket(t.priv, ptype, req)
+func (t *udp) send(toaddr *net.UDPAddr, ptype byte, req packet) ([]byte, error) {
+ packet, hash, err := encodePacket(t.priv, ptype, req)
if err != nil {
- return err
+ return hash, err
}
- _, err = t.conn.WriteToUDP(packet, toaddr)
- log.Trace(">> "+req.name(), "addr", toaddr, "err", err)
+ return hash, t.write(toaddr, req.name(), packet)
+}
+
+func (t *udp) write(toaddr *net.UDPAddr, what string, packet []byte) error {
+ _, err := t.conn.WriteToUDP(packet, toaddr)
+ log.Trace(">> "+what, "addr", toaddr, "err", err)
return err
}
-func encodePacket(priv *ecdsa.PrivateKey, ptype byte, req interface{}) ([]byte, error) {
+func encodePacket(priv *ecdsa.PrivateKey, ptype byte, req interface{}) (packet, hash []byte, err error) {
b := new(bytes.Buffer)
b.Write(headSpace)
b.WriteByte(ptype)
if err := rlp.Encode(b, req); err != nil {
log.Error("Can't encode discv4 packet", "err", err)
- return nil, err
+ return nil, nil, err
}
- packet := b.Bytes()
+ packet = b.Bytes()
sig, err := crypto.Sign(crypto.Keccak256(packet[headSize:]), priv)
if err != nil {
log.Error("Can't sign discv4 packet", "err", err)
- return nil, err
+ return nil, nil, err
}
copy(packet[macSize:], sig)
// add the hash to the front. Note: this doesn't protect the
// packet in any way. Our public key will be part of this hash in
// The future.
- copy(packet, crypto.Keccak256(packet[macSize:]))
- return packet, nil
+ hash = crypto.Keccak256(packet[macSize:])
+ copy(packet, hash)
+ return packet, hash, nil
}
// readLoop runs in its own goroutine. it handles incoming UDP packets.
-func (t *udp) readLoop(unhandled chan ReadPacket) {
+func (t *udp) readLoop(unhandled chan<- ReadPacket) {
defer t.conn.Close()
if unhandled != nil {
defer close(unhandled)
@@ -585,7 +613,7 @@ func (req *findnode) handle(t *udp, from *net.UDPAddr, fromID NodeID, mac []byte
if expired(req.Expiration) {
return errExpired
}
- if t.db.node(fromID) == nil {
+ if !t.db.hasBond(fromID) {
// No bond exists, we don't process the packet. This prevents
// an attack vector where the discovery protocol could be used
// to amplify traffic in a DDOS attack. A malicious actor
@@ -601,18 +629,22 @@ func (req *findnode) handle(t *udp, from *net.UDPAddr, fromID NodeID, mac []byte
t.mutex.Unlock()
p := neighbors{Expiration: uint64(time.Now().Add(expiration).Unix())}
+ var sent bool
// Send neighbors in chunks with at most maxNeighbors per packet
// to stay below the 1280 byte limit.
- for i, n := range closest {
- if netutil.CheckRelayIP(from.IP, n.IP) != nil {
- continue
+ for _, n := range closest {
+ if netutil.CheckRelayIP(from.IP, n.IP) == nil {
+ p.Nodes = append(p.Nodes, nodeToRPC(n))
}
- p.Nodes = append(p.Nodes, nodeToRPC(n))
- if len(p.Nodes) == maxNeighbors || i == len(closest)-1 {
+ if len(p.Nodes) == maxNeighbors {
t.send(from, neighborsPacket, &p)
p.Nodes = p.Nodes[:0]
+ sent = true
}
}
+ if len(p.Nodes) > 0 || !sent {
+ t.send(from, neighborsPacket, &p)
+ }
return nil
}
diff --git a/p2p/discover/udp_test.go b/p2p/discover/udp_test.go
index b81caf839..db9804f7b 100644
--- a/p2p/discover/udp_test.go
+++ b/p2p/discover/udp_test.go
@@ -70,14 +70,15 @@ func newUDPTest(t *testing.T) *udpTest {
remotekey: newkey(),
remoteaddr: &net.UDPAddr{IP: net.IP{10, 0, 1, 99}, Port: 30303},
}
- realaddr := test.pipe.LocalAddr().(*net.UDPAddr)
- test.table, test.udp, _ = newUDP(test.localkey, test.pipe, realaddr, nil, "", nil)
+ test.table, test.udp, _ = newUDP(test.pipe, Config{PrivateKey: test.localkey})
+ // Wait for initial refresh so the table doesn't send unexpected findnode.
+ <-test.table.initDone
return test
}
// handles a packet as if it had been sent to the transport.
func (test *udpTest) packetIn(wantError error, ptype byte, data packet) error {
- enc, err := encodePacket(test.remotekey, ptype, data)
+ enc, _, err := encodePacket(test.remotekey, ptype, data)
if err != nil {
return test.errorf("packet (%d) encode error: %v", ptype, err)
}
@@ -90,19 +91,19 @@ func (test *udpTest) packetIn(wantError error, ptype byte, data packet) error {
// waits for a packet to be sent by the transport.
// validate should have type func(*udpTest, X) error, where X is a packet type.
-func (test *udpTest) waitPacketOut(validate interface{}) error {
+func (test *udpTest) waitPacketOut(validate interface{}) ([]byte, error) {
dgram := test.pipe.waitPacketOut()
- p, _, _, err := decodePacket(dgram)
+ p, _, hash, err := decodePacket(dgram)
if err != nil {
- return test.errorf("sent packet decode error: %v", err)
+ return hash, test.errorf("sent packet decode error: %v", err)
}
fn := reflect.ValueOf(validate)
exptype := fn.Type().In(0)
if reflect.TypeOf(p) != exptype {
- return test.errorf("sent packet type mismatch, got: %v, want: %v", reflect.TypeOf(p), exptype)
+ return hash, test.errorf("sent packet type mismatch, got: %v, want: %v", reflect.TypeOf(p), exptype)
}
fn.Call([]reflect.Value{reflect.ValueOf(p)})
- return nil
+ return hash, nil
}
func (test *udpTest) errorf(format string, args ...interface{}) error {
@@ -246,12 +247,8 @@ func TestUDP_findnode(t *testing.T) {
// ensure there's a bond with the test node,
// findnode won't be accepted otherwise.
- test.table.db.updateNode(NewNode(
- PubkeyID(&test.remotekey.PublicKey),
- test.remoteaddr.IP,
- uint16(test.remoteaddr.Port),
- 99,
- ))
+ test.table.db.updateBondTime(PubkeyID(&test.remotekey.PublicKey), time.Now())
+
// check that closest neighbors are returned.
test.packetIn(nil, findnodePacket, &findnode{Target: testTarget, Expiration: futureExp})
expected := test.table.closest(targetHash, bucketSize)
@@ -351,7 +348,7 @@ func TestUDP_successfulPing(t *testing.T) {
})
// remote is unknown, the table pings back.
- test.waitPacketOut(func(p *ping) error {
+ hash, _ := test.waitPacketOut(func(p *ping) error {
if !reflect.DeepEqual(p.From, test.udp.ourEndpoint) {
t.Errorf("got ping.From %v, want %v", p.From, test.udp.ourEndpoint)
}
@@ -365,7 +362,7 @@ func TestUDP_successfulPing(t *testing.T) {
}
return nil
})
- test.packetIn(nil, pongPacket, &pong{Expiration: futureExp})
+ test.packetIn(nil, pongPacket, &pong{ReplyTok: hash, Expiration: futureExp})
// the node should be added to the table shortly after getting the
// pong packet.
diff --git a/p2p/discv5/net.go b/p2p/discv5/net.go
index f9baf126f..52c677b62 100644
--- a/p2p/discv5/net.go
+++ b/p2p/discv5/net.go
@@ -565,11 +565,8 @@ loop:
if lookupChn := searchInfo[res.target.topic].lookupChn; lookupChn != nil {
lookupChn <- net.ticketStore.radius[res.target.topic].converged
}
- net.ticketStore.searchLookupDone(res.target, res.nodes, func(n *Node) []byte {
- net.ping(n, n.addr())
- return n.pingEcho
- }, func(n *Node, topic Topic) []byte {
- if n.state == known {
+ net.ticketStore.searchLookupDone(res.target, res.nodes, func(n *Node, topic Topic) []byte {
+ if n.state != nil && n.state.canQuery {
return net.conn.send(n, topicQueryPacket, topicQuery{Topic: topic}) // TODO: set expiration
} else {
if n.state == unknown {
@@ -633,15 +630,20 @@ loop:
}
net.refreshResp <- refreshDone
case <-refreshDone:
- log.Trace("<-net.refreshDone")
- refreshDone = nil
- list := searchReqWhenRefreshDone
- searchReqWhenRefreshDone = nil
- go func() {
- for _, req := range list {
- net.topicSearchReq <- req
- }
- }()
+ log.Trace("<-net.refreshDone", "table size", net.tab.count)
+ if net.tab.count != 0 {
+ refreshDone = nil
+ list := searchReqWhenRefreshDone
+ searchReqWhenRefreshDone = nil
+ go func() {
+ for _, req := range list {
+ net.topicSearchReq <- req
+ }
+ }()
+ } else {
+ refreshDone = make(chan struct{})
+ net.refresh(refreshDone)
+ }
}
}
log.Trace("loop stopped")
@@ -751,7 +753,15 @@ func (net *Network) internNodeFromNeighbours(sender *net.UDPAddr, rn rpcNode) (n
return n, err
}
if !n.IP.Equal(rn.IP) || n.UDP != rn.UDP || n.TCP != rn.TCP {
- err = fmt.Errorf("metadata mismatch: got %v, want %v", rn, n)
+ if n.state == known {
+ // reject address change if node is known by us
+ err = fmt.Errorf("metadata mismatch: got %v, want %v", rn, n)
+ } else {
+ // accept otherwise; this will be handled nicer with signed ENRs
+ n.IP = rn.IP
+ n.UDP = rn.UDP
+ n.TCP = rn.TCP
+ }
}
return n, err
}
diff --git a/p2p/discv5/ticket.go b/p2p/discv5/ticket.go
index 1ecef37e4..b3d1ac4ba 100644
--- a/p2p/discv5/ticket.go
+++ b/p2p/discv5/ticket.go
@@ -420,11 +420,14 @@ func (s *ticketStore) nextRegisterableTicket() (*ticketRef, time.Duration) {
func (s *ticketStore) removeTicketRef(ref ticketRef) {
log.Trace("Removing discovery ticket reference", "node", ref.t.node.ID, "serial", ref.t.serial)
+ // Make nextRegisterableTicket return the next available ticket.
+ s.nextTicketCached = nil
+
topic := ref.topic()
tickets := s.tickets[topic]
if tickets == nil {
- log.Warn("Removing tickets from unknown topic", "topic", topic)
+ log.Trace("Removing tickets from unknown topic", "topic", topic)
return
}
bucket := timeBucket(ref.t.regTime[ref.idx] / mclock.AbsTime(ticketTimeBucketLen))
@@ -450,9 +453,6 @@ func (s *ticketStore) removeTicketRef(ref ticketRef) {
delete(s.nodes, ref.t.node)
delete(s.nodeLastReq, ref.t.node)
}
-
- // Make nextRegisterableTicket return the next available ticket.
- s.nextTicketCached = nil
}
type lookupInfo struct {
@@ -494,13 +494,13 @@ func (s *ticketStore) registerLookupDone(lookup lookupInfo, nodes []*Node, ping
}
}
-func (s *ticketStore) searchLookupDone(lookup lookupInfo, nodes []*Node, ping func(n *Node) []byte, query func(n *Node, topic Topic) []byte) {
+func (s *ticketStore) searchLookupDone(lookup lookupInfo, nodes []*Node, query func(n *Node, topic Topic) []byte) {
now := mclock.Now()
for i, n := range nodes {
if i == 0 || (binary.BigEndian.Uint64(n.sha[:8])^binary.BigEndian.Uint64(lookup.target[:8])) < s.radius[lookup.topic].minRadius {
if lookup.radiusLookup {
if lastReq, ok := s.nodeLastReq[n]; !ok || time.Duration(now-lastReq.time) > radiusTC {
- s.nodeLastReq[n] = reqInfo{pingHash: ping(n), lookup: lookup, time: now}
+ s.nodeLastReq[n] = reqInfo{pingHash: nil, lookup: lookup, time: now}
}
} // else {
if s.canQueryTopic(n, lookup.topic) {
diff --git a/p2p/discv5/udp.go b/p2p/discv5/udp.go
index 543771817..6ce72d2c1 100644
--- a/p2p/discv5/udp.go
+++ b/p2p/discv5/udp.go
@@ -49,7 +49,7 @@ var (
// Timeouts
const (
respTimeout = 500 * time.Millisecond
- sendTimeout = 500 * time.Millisecond
+ queryDelay = 1000 * time.Millisecond
expiration = 20 * time.Second
ntpFailureThreshold = 32 // Continuous timeouts after which to check NTP
@@ -318,20 +318,20 @@ func (t *udp) sendTopicRegister(remote *Node, topics []Topic, idx int, pong []by
func (t *udp) sendTopicNodes(remote *Node, queryHash common.Hash, nodes []*Node) {
p := topicNodes{Echo: queryHash}
- if len(nodes) == 0 {
- t.sendPacket(remote.ID, remote.addr(), byte(topicNodesPacket), p)
- return
- }
- for i, result := range nodes {
- if netutil.CheckRelayIP(remote.IP, result.IP) != nil {
- continue
+ var sent bool
+ for _, result := range nodes {
+ if result.IP.Equal(t.net.tab.self.IP) || netutil.CheckRelayIP(remote.IP, result.IP) == nil {
+ p.Nodes = append(p.Nodes, nodeToRPC(result))
}
- p.Nodes = append(p.Nodes, nodeToRPC(result))
- if len(p.Nodes) == maxTopicNodes || i == len(nodes)-1 {
+ if len(p.Nodes) == maxTopicNodes {
t.sendPacket(remote.ID, remote.addr(), byte(topicNodesPacket), p)
p.Nodes = p.Nodes[:0]
+ sent = true
}
}
+ if !sent || len(p.Nodes) > 0 {
+ t.sendPacket(remote.ID, remote.addr(), byte(topicNodesPacket), p)
+ }
}
func (t *udp) sendPacket(toid NodeID, toaddr *net.UDPAddr, ptype byte, req interface{}) (hash []byte, err error) {
diff --git a/p2p/message.go b/p2p/message.go
index 5690494bf..50b419970 100644
--- a/p2p/message.go
+++ b/p2p/message.go
@@ -22,8 +22,6 @@ import (
"fmt"
"io"
"io/ioutil"
- "net"
- "sync"
"sync/atomic"
"time"
@@ -112,30 +110,6 @@ func SendItems(w MsgWriter, msgcode uint64, elems ...interface{}) error {
return Send(w, msgcode, elems)
}
-// netWrapper wraps a MsgReadWriter with locks around
-// ReadMsg/WriteMsg and applies read/write deadlines.
-type netWrapper struct {
- rmu, wmu sync.Mutex
-
- rtimeout, wtimeout time.Duration
- conn net.Conn
- wrapped MsgReadWriter
-}
-
-func (rw *netWrapper) ReadMsg() (Msg, error) {
- rw.rmu.Lock()
- defer rw.rmu.Unlock()
- rw.conn.SetReadDeadline(time.Now().Add(rw.rtimeout))
- return rw.wrapped.ReadMsg()
-}
-
-func (rw *netWrapper) WriteMsg(msg Msg) error {
- rw.wmu.Lock()
- defer rw.wmu.Unlock()
- rw.conn.SetWriteDeadline(time.Now().Add(rw.wtimeout))
- return rw.wrapped.WriteMsg(msg)
-}
-
// eofSignal wraps a reader with eof signaling. the eof channel is
// closed when the wrapped reader returns an error or when count bytes
// have been read.
diff --git a/p2p/metrics.go b/p2p/metrics.go
index 98b61901d..4cbff90ac 100644
--- a/p2p/metrics.go
+++ b/p2p/metrics.go
@@ -25,10 +25,10 @@ import (
)
var (
- ingressConnectMeter = metrics.NewMeter("p2p/InboundConnects")
- ingressTrafficMeter = metrics.NewMeter("p2p/InboundTraffic")
- egressConnectMeter = metrics.NewMeter("p2p/OutboundConnects")
- egressTrafficMeter = metrics.NewMeter("p2p/OutboundTraffic")
+ ingressConnectMeter = metrics.NewRegisteredMeter("p2p/InboundConnects", nil)
+ ingressTrafficMeter = metrics.NewRegisteredMeter("p2p/InboundTraffic", nil)
+ egressConnectMeter = metrics.NewRegisteredMeter("p2p/OutboundConnects", nil)
+ egressTrafficMeter = metrics.NewRegisteredMeter("p2p/OutboundTraffic", nil)
)
// meteredConn is a wrapper around a network TCP connection that meters both the
diff --git a/p2p/netutil/net.go b/p2p/netutil/net.go
index f6005afd2..656abb682 100644
--- a/p2p/netutil/net.go
+++ b/p2p/netutil/net.go
@@ -18,8 +18,11 @@
package netutil
import (
+ "bytes"
"errors"
+ "fmt"
"net"
+ "sort"
"strings"
)
@@ -189,3 +192,131 @@ func CheckRelayIP(sender, addr net.IP) error {
}
return nil
}
+
+// SameNet reports whether two IP addresses have an equal prefix of the given bit length.
+func SameNet(bits uint, ip, other net.IP) bool {
+ ip4, other4 := ip.To4(), other.To4()
+ switch {
+ case (ip4 == nil) != (other4 == nil):
+ return false
+ case ip4 != nil:
+ return sameNet(bits, ip4, other4)
+ default:
+ return sameNet(bits, ip.To16(), other.To16())
+ }
+}
+
+func sameNet(bits uint, ip, other net.IP) bool {
+ nb := int(bits / 8)
+ mask := ^byte(0xFF >> (bits % 8))
+ if mask != 0 && nb < len(ip) && ip[nb]&mask != other[nb]&mask {
+ return false
+ }
+ return nb <= len(ip) && bytes.Equal(ip[:nb], other[:nb])
+}
+
+// DistinctNetSet tracks IPs, ensuring that at most N of them
+// fall into the same network range.
+type DistinctNetSet struct {
+ Subnet uint // number of common prefix bits
+ Limit uint // maximum number of IPs in each subnet
+
+ members map[string]uint
+ buf net.IP
+}
+
+// Add adds an IP address to the set. It returns false (and doesn't add the IP) if the
+// number of existing IPs in the defined range exceeds the limit.
+func (s *DistinctNetSet) Add(ip net.IP) bool {
+ key := s.key(ip)
+ n := s.members[string(key)]
+ if n < s.Limit {
+ s.members[string(key)] = n + 1
+ return true
+ }
+ return false
+}
+
+// Remove removes an IP from the set.
+func (s *DistinctNetSet) Remove(ip net.IP) {
+ key := s.key(ip)
+ if n, ok := s.members[string(key)]; ok {
+ if n == 1 {
+ delete(s.members, string(key))
+ } else {
+ s.members[string(key)] = n - 1
+ }
+ }
+}
+
+// Contains whether the given IP is contained in the set.
+func (s DistinctNetSet) Contains(ip net.IP) bool {
+ key := s.key(ip)
+ _, ok := s.members[string(key)]
+ return ok
+}
+
+// Len returns the number of tracked IPs.
+func (s DistinctNetSet) Len() int {
+ n := uint(0)
+ for _, i := range s.members {
+ n += i
+ }
+ return int(n)
+}
+
+// key encodes the map key for an address into a temporary buffer.
+//
+// The first byte of key is '4' or '6' to distinguish IPv4/IPv6 address types.
+// The remainder of the key is the IP, truncated to the number of bits.
+func (s *DistinctNetSet) key(ip net.IP) net.IP {
+ // Lazily initialize storage.
+ if s.members == nil {
+ s.members = make(map[string]uint)
+ s.buf = make(net.IP, 17)
+ }
+ // Canonicalize ip and bits.
+ typ := byte('6')
+ if ip4 := ip.To4(); ip4 != nil {
+ typ, ip = '4', ip4
+ }
+ bits := s.Subnet
+ if bits > uint(len(ip)*8) {
+ bits = uint(len(ip) * 8)
+ }
+ // Encode the prefix into s.buf.
+ nb := int(bits / 8)
+ mask := ^byte(0xFF >> (bits % 8))
+ s.buf[0] = typ
+ buf := append(s.buf[:1], ip[:nb]...)
+ if nb < len(ip) && mask != 0 {
+ buf = append(buf, ip[nb]&mask)
+ }
+ return buf
+}
+
+// String implements fmt.Stringer
+func (s DistinctNetSet) String() string {
+ var buf bytes.Buffer
+ buf.WriteString("{")
+ keys := make([]string, 0, len(s.members))
+ for k := range s.members {
+ keys = append(keys, k)
+ }
+ sort.Strings(keys)
+ for i, k := range keys {
+ var ip net.IP
+ if k[0] == '4' {
+ ip = make(net.IP, 4)
+ } else {
+ ip = make(net.IP, 16)
+ }
+ copy(ip, k[1:])
+ fmt.Fprintf(&buf, "%v×%d", ip, s.members[k])
+ if i != len(keys)-1 {
+ buf.WriteString(" ")
+ }
+ }
+ buf.WriteString("}")
+ return buf.String()
+}
diff --git a/p2p/netutil/net_test.go b/p2p/netutil/net_test.go
index 1ee1fcb4d..3a6aa081f 100644
--- a/p2p/netutil/net_test.go
+++ b/p2p/netutil/net_test.go
@@ -17,9 +17,11 @@
package netutil
import (
+ "fmt"
"net"
"reflect"
"testing"
+ "testing/quick"
"github.com/davecgh/go-spew/spew"
)
@@ -171,3 +173,90 @@ func BenchmarkCheckRelayIP(b *testing.B) {
CheckRelayIP(sender, addr)
}
}
+
+func TestSameNet(t *testing.T) {
+ tests := []struct {
+ ip, other string
+ bits uint
+ want bool
+ }{
+ {"0.0.0.0", "0.0.0.0", 32, true},
+ {"0.0.0.0", "0.0.0.1", 0, true},
+ {"0.0.0.0", "0.0.0.1", 31, true},
+ {"0.0.0.0", "0.0.0.1", 32, false},
+ {"0.33.0.1", "0.34.0.2", 8, true},
+ {"0.33.0.1", "0.34.0.2", 13, true},
+ {"0.33.0.1", "0.34.0.2", 15, false},
+ }
+
+ for _, test := range tests {
+ if ok := SameNet(test.bits, parseIP(test.ip), parseIP(test.other)); ok != test.want {
+ t.Errorf("SameNet(%d, %s, %s) == %t, want %t", test.bits, test.ip, test.other, ok, test.want)
+ }
+ }
+}
+
+func ExampleSameNet() {
+ // This returns true because the IPs are in the same /24 network:
+ fmt.Println(SameNet(24, net.IP{127, 0, 0, 1}, net.IP{127, 0, 0, 3}))
+ // This call returns false:
+ fmt.Println(SameNet(24, net.IP{127, 3, 0, 1}, net.IP{127, 5, 0, 3}))
+ // Output:
+ // true
+ // false
+}
+
+func TestDistinctNetSet(t *testing.T) {
+ ops := []struct {
+ add, remove string
+ fails bool
+ }{
+ {add: "127.0.0.1"},
+ {add: "127.0.0.2"},
+ {add: "127.0.0.3", fails: true},
+ {add: "127.32.0.1"},
+ {add: "127.32.0.2"},
+ {add: "127.32.0.3", fails: true},
+ {add: "127.33.0.1", fails: true},
+ {add: "127.34.0.1"},
+ {add: "127.34.0.2"},
+ {add: "127.34.0.3", fails: true},
+ // Make room for an address, then add again.
+ {remove: "127.0.0.1"},
+ {add: "127.0.0.3"},
+ {add: "127.0.0.3", fails: true},
+ }
+
+ set := DistinctNetSet{Subnet: 15, Limit: 2}
+ for _, op := range ops {
+ var desc string
+ if op.add != "" {
+ desc = fmt.Sprintf("Add(%s)", op.add)
+ if ok := set.Add(parseIP(op.add)); ok != !op.fails {
+ t.Errorf("%s == %t, want %t", desc, ok, !op.fails)
+ }
+ } else {
+ desc = fmt.Sprintf("Remove(%s)", op.remove)
+ set.Remove(parseIP(op.remove))
+ }
+ t.Logf("%s: %v", desc, set)
+ }
+}
+
+func TestDistinctNetSetAddRemove(t *testing.T) {
+ cfg := &quick.Config{}
+ fn := func(ips []net.IP) bool {
+ s := DistinctNetSet{Limit: 3, Subnet: 2}
+ for _, ip := range ips {
+ s.Add(ip)
+ }
+ for _, ip := range ips {
+ s.Remove(ip)
+ }
+ return s.Len() == 0
+ }
+
+ if err := quick.Check(fn, cfg); err != nil {
+ t.Fatal(err)
+ }
+}
diff --git a/p2p/peer.go b/p2p/peer.go
index bad1c8c8b..477d8c219 100644
--- a/p2p/peer.go
+++ b/p2p/peer.go
@@ -419,6 +419,9 @@ type PeerInfo struct {
Network struct {
LocalAddress string `json:"localAddress"` // Local endpoint of the TCP data connection
RemoteAddress string `json:"remoteAddress"` // Remote endpoint of the TCP data connection
+ Inbound bool `json:"inbound"`
+ Trusted bool `json:"trusted"`
+ Static bool `json:"static"`
} `json:"network"`
Protocols map[string]interface{} `json:"protocols"` // Sub-protocol specific metadata fields
}
@@ -439,6 +442,9 @@ func (p *Peer) Info() *PeerInfo {
}
info.Network.LocalAddress = p.LocalAddr().String()
info.Network.RemoteAddress = p.RemoteAddr().String()
+ info.Network.Inbound = p.rw.is(inboundConn)
+ info.Network.Trusted = p.rw.is(trustedConn)
+ info.Network.Static = p.rw.is(staticDialedConn)
// Gather all the running protocol infos
for _, proto := range p.running {
diff --git a/p2p/protocols/protocol.go b/p2p/protocols/protocol.go
new file mode 100644
index 000000000..9914c9958
--- /dev/null
+++ b/p2p/protocols/protocol.go
@@ -0,0 +1,311 @@
+// Copyright 2017 The go-ethereum Authors
+// This file is part of the go-ethereum library.
+//
+// The go-ethereum library is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Lesser General Public License as published by
+// the Free Software Foundation, either version 3 of the License, or
+// (at your option) any later version.
+//
+// The go-ethereum library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+// GNU Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public License
+// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
+
+/*
+Package protocols is an extension to p2p. It offers a user friendly simple way to define
+devp2p subprotocols by abstracting away code standardly shared by protocols.
+
+* automate assigments of code indexes to messages
+* automate RLP decoding/encoding based on reflecting
+* provide the forever loop to read incoming messages
+* standardise error handling related to communication
+* standardised handshake negotiation
+* TODO: automatic generation of wire protocol specification for peers
+
+*/
+package protocols
+
+import (
+ "context"
+ "fmt"
+ "reflect"
+ "sync"
+
+ "github.com/ethereum/go-ethereum/p2p"
+)
+
+// error codes used by this protocol scheme
+const (
+ ErrMsgTooLong = iota
+ ErrDecode
+ ErrWrite
+ ErrInvalidMsgCode
+ ErrInvalidMsgType
+ ErrHandshake
+ ErrNoHandler
+ ErrHandler
+)
+
+// error description strings associated with the codes
+var errorToString = map[int]string{
+ ErrMsgTooLong: "Message too long",
+ ErrDecode: "Invalid message (RLP error)",
+ ErrWrite: "Error sending message",
+ ErrInvalidMsgCode: "Invalid message code",
+ ErrInvalidMsgType: "Invalid message type",
+ ErrHandshake: "Handshake error",
+ ErrNoHandler: "No handler registered error",
+ ErrHandler: "Message handler error",
+}
+
+/*
+Error implements the standard go error interface.
+Use:
+
+ errorf(code, format, params ...interface{})
+
+Prints as:
+
+ <description>: <details>
+
+where description is given by code in errorToString
+and details is fmt.Sprintf(format, params...)
+
+exported field Code can be checked
+*/
+type Error struct {
+ Code int
+ message string
+ format string
+ params []interface{}
+}
+
+func (e Error) Error() (message string) {
+ if len(e.message) == 0 {
+ name, ok := errorToString[e.Code]
+ if !ok {
+ panic("invalid message code")
+ }
+ e.message = name
+ if e.format != "" {
+ e.message += ": " + fmt.Sprintf(e.format, e.params...)
+ }
+ }
+ return e.message
+}
+
+func errorf(code int, format string, params ...interface{}) *Error {
+ return &Error{
+ Code: code,
+ format: format,
+ params: params,
+ }
+}
+
+// Spec is a protocol specification including its name and version as well as
+// the types of messages which are exchanged
+type Spec struct {
+ // Name is the name of the protocol, often a three-letter word
+ Name string
+
+ // Version is the version number of the protocol
+ Version uint
+
+ // MaxMsgSize is the maximum accepted length of the message payload
+ MaxMsgSize uint32
+
+ // Messages is a list of message data types which this protocol uses, with
+ // each message type being sent with its array index as the code (so
+ // [&foo{}, &bar{}, &baz{}] would send foo, bar and baz with codes
+ // 0, 1 and 2 respectively)
+ // each message must have a single unique data type
+ Messages []interface{}
+
+ initOnce sync.Once
+ codes map[reflect.Type]uint64
+ types map[uint64]reflect.Type
+}
+
+func (s *Spec) init() {
+ s.initOnce.Do(func() {
+ s.codes = make(map[reflect.Type]uint64, len(s.Messages))
+ s.types = make(map[uint64]reflect.Type, len(s.Messages))
+ for i, msg := range s.Messages {
+ code := uint64(i)
+ typ := reflect.TypeOf(msg)
+ if typ.Kind() == reflect.Ptr {
+ typ = typ.Elem()
+ }
+ s.codes[typ] = code
+ s.types[code] = typ
+ }
+ })
+}
+
+// Length returns the number of message types in the protocol
+func (s *Spec) Length() uint64 {
+ return uint64(len(s.Messages))
+}
+
+// GetCode returns the message code of a type, and boolean second argument is
+// false if the message type is not found
+func (s *Spec) GetCode(msg interface{}) (uint64, bool) {
+ s.init()
+ typ := reflect.TypeOf(msg)
+ if typ.Kind() == reflect.Ptr {
+ typ = typ.Elem()
+ }
+ code, ok := s.codes[typ]
+ return code, ok
+}
+
+// NewMsg construct a new message type given the code
+func (s *Spec) NewMsg(code uint64) (interface{}, bool) {
+ s.init()
+ typ, ok := s.types[code]
+ if !ok {
+ return nil, false
+ }
+ return reflect.New(typ).Interface(), true
+}
+
+// Peer represents a remote peer or protocol instance that is running on a peer connection with
+// a remote peer
+type Peer struct {
+ *p2p.Peer // the p2p.Peer object representing the remote
+ rw p2p.MsgReadWriter // p2p.MsgReadWriter to send messages to and read messages from
+ spec *Spec
+}
+
+// NewPeer constructs a new peer
+// this constructor is called by the p2p.Protocol#Run function
+// the first two arguments are the arguments passed to p2p.Protocol.Run function
+// the third argument is the Spec describing the protocol
+func NewPeer(p *p2p.Peer, rw p2p.MsgReadWriter, spec *Spec) *Peer {
+ return &Peer{
+ Peer: p,
+ rw: rw,
+ spec: spec,
+ }
+}
+
+// Run starts the forever loop that handles incoming messages
+// called within the p2p.Protocol#Run function
+// the handler argument is a function which is called for each message received
+// from the remote peer, a returned error causes the loop to exit
+// resulting in disconnection
+func (p *Peer) Run(handler func(msg interface{}) error) error {
+ for {
+ if err := p.handleIncoming(handler); err != nil {
+ return err
+ }
+ }
+}
+
+// Drop disconnects a peer.
+// TODO: may need to implement protocol drop only? don't want to kick off the peer
+// if they are useful for other protocols
+func (p *Peer) Drop(err error) {
+ p.Disconnect(p2p.DiscSubprotocolError)
+}
+
+// Send takes a message, encodes it in RLP, finds the right message code and sends the
+// message off to the peer
+// this low level call will be wrapped by libraries providing routed or broadcast sends
+// but often just used to forward and push messages to directly connected peers
+func (p *Peer) Send(msg interface{}) error {
+ code, found := p.spec.GetCode(msg)
+ if !found {
+ return errorf(ErrInvalidMsgType, "%v", code)
+ }
+ return p2p.Send(p.rw, code, msg)
+}
+
+// handleIncoming(code)
+// is called each cycle of the main forever loop that dispatches incoming messages
+// if this returns an error the loop returns and the peer is disconnected with the error
+// this generic handler
+// * checks message size,
+// * checks for out-of-range message codes,
+// * handles decoding with reflection,
+// * call handlers as callbacks
+func (p *Peer) handleIncoming(handle func(msg interface{}) error) error {
+ msg, err := p.rw.ReadMsg()
+ if err != nil {
+ return err
+ }
+ // make sure that the payload has been fully consumed
+ defer msg.Discard()
+
+ if msg.Size > p.spec.MaxMsgSize {
+ return errorf(ErrMsgTooLong, "%v > %v", msg.Size, p.spec.MaxMsgSize)
+ }
+
+ val, ok := p.spec.NewMsg(msg.Code)
+ if !ok {
+ return errorf(ErrInvalidMsgCode, "%v", msg.Code)
+ }
+ if err := msg.Decode(val); err != nil {
+ return errorf(ErrDecode, "<= %v: %v", msg, err)
+ }
+
+ // call the registered handler callbacks
+ // a registered callback take the decoded message as argument as an interface
+ // which the handler is supposed to cast to the appropriate type
+ // it is entirely safe not to check the cast in the handler since the handler is
+ // chosen based on the proper type in the first place
+ if err := handle(val); err != nil {
+ return errorf(ErrHandler, "(msg code %v): %v", msg.Code, err)
+ }
+ return nil
+}
+
+// Handshake negotiates a handshake on the peer connection
+// * arguments
+// * context
+// * the local handshake to be sent to the remote peer
+// * funcion to be called on the remote handshake (can be nil)
+// * expects a remote handshake back of the same type
+// * the dialing peer needs to send the handshake first and then waits for remote
+// * the listening peer waits for the remote handshake and then sends it
+// returns the remote handshake and an error
+func (p *Peer) Handshake(ctx context.Context, hs interface{}, verify func(interface{}) error) (rhs interface{}, err error) {
+ if _, ok := p.spec.GetCode(hs); !ok {
+ return nil, errorf(ErrHandshake, "unknown handshake message type: %T", hs)
+ }
+ errc := make(chan error, 2)
+ handle := func(msg interface{}) error {
+ rhs = msg
+ if verify != nil {
+ return verify(rhs)
+ }
+ return nil
+ }
+ send := func() { errc <- p.Send(hs) }
+ receive := func() { errc <- p.handleIncoming(handle) }
+
+ go func() {
+ if p.Inbound() {
+ receive()
+ send()
+ } else {
+ send()
+ receive()
+ }
+ }()
+
+ for i := 0; i < 2; i++ {
+ select {
+ case err = <-errc:
+ case <-ctx.Done():
+ err = ctx.Err()
+ }
+ if err != nil {
+ return nil, errorf(ErrHandshake, err.Error())
+ }
+ }
+ return rhs, nil
+}
diff --git a/p2p/protocols/protocol_test.go b/p2p/protocols/protocol_test.go
new file mode 100644
index 000000000..053f537a6
--- /dev/null
+++ b/p2p/protocols/protocol_test.go
@@ -0,0 +1,389 @@
+// Copyright 2017 The go-ethereum Authors
+// This file is part of the go-ethereum library.
+//
+// The go-ethereum library is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Lesser General Public License as published by
+// the Free Software Foundation, either version 3 of the License, or
+// (at your option) any later version.
+//
+// The go-ethereum library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+// GNU Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public License
+// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
+
+package protocols
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "testing"
+ "time"
+
+ "github.com/ethereum/go-ethereum/p2p"
+ "github.com/ethereum/go-ethereum/p2p/discover"
+ "github.com/ethereum/go-ethereum/p2p/simulations/adapters"
+ p2ptest "github.com/ethereum/go-ethereum/p2p/testing"
+)
+
+// handshake message type
+type hs0 struct {
+ C uint
+}
+
+// message to kill/drop the peer with nodeID
+type kill struct {
+ C discover.NodeID
+}
+
+// message to drop connection
+type drop struct {
+}
+
+/// protoHandshake represents module-independent aspects of the protocol and is
+// the first message peers send and receive as part the initial exchange
+type protoHandshake struct {
+ Version uint // local and remote peer should have identical version
+ NetworkID string // local and remote peer should have identical network id
+}
+
+// checkProtoHandshake verifies local and remote protoHandshakes match
+func checkProtoHandshake(testVersion uint, testNetworkID string) func(interface{}) error {
+ return func(rhs interface{}) error {
+ remote := rhs.(*protoHandshake)
+ if remote.NetworkID != testNetworkID {
+ return fmt.Errorf("%s (!= %s)", remote.NetworkID, testNetworkID)
+ }
+
+ if remote.Version != testVersion {
+ return fmt.Errorf("%d (!= %d)", remote.Version, testVersion)
+ }
+ return nil
+ }
+}
+
+// newProtocol sets up a protocol
+// the run function here demonstrates a typical protocol using peerPool, handshake
+// and messages registered to handlers
+func newProtocol(pp *p2ptest.TestPeerPool) func(*p2p.Peer, p2p.MsgReadWriter) error {
+ spec := &Spec{
+ Name: "test",
+ Version: 42,
+ MaxMsgSize: 10 * 1024,
+ Messages: []interface{}{
+ protoHandshake{},
+ hs0{},
+ kill{},
+ drop{},
+ },
+ }
+ return func(p *p2p.Peer, rw p2p.MsgReadWriter) error {
+ peer := NewPeer(p, rw, spec)
+
+ // initiate one-off protohandshake and check validity
+ ctx, cancel := context.WithTimeout(context.Background(), time.Second)
+ defer cancel()
+ phs := &protoHandshake{42, "420"}
+ hsCheck := checkProtoHandshake(phs.Version, phs.NetworkID)
+ _, err := peer.Handshake(ctx, phs, hsCheck)
+ if err != nil {
+ return err
+ }
+
+ lhs := &hs0{42}
+ // module handshake demonstrating a simple repeatable exchange of same-type message
+ hs, err := peer.Handshake(ctx, lhs, nil)
+ if err != nil {
+ return err
+ }
+
+ if rmhs := hs.(*hs0); rmhs.C > lhs.C {
+ return fmt.Errorf("handshake mismatch remote %v > local %v", rmhs.C, lhs.C)
+ }
+
+ handle := func(msg interface{}) error {
+ switch msg := msg.(type) {
+
+ case *protoHandshake:
+ return errors.New("duplicate handshake")
+
+ case *hs0:
+ rhs := msg
+ if rhs.C > lhs.C {
+ return fmt.Errorf("handshake mismatch remote %v > local %v", rhs.C, lhs.C)
+ }
+ lhs.C += rhs.C
+ return peer.Send(lhs)
+
+ case *kill:
+ // demonstrates use of peerPool, killing another peer connection as a response to a message
+ id := msg.C
+ pp.Get(id).Drop(errors.New("killed"))
+ return nil
+
+ case *drop:
+ // for testing we can trigger self induced disconnect upon receiving drop message
+ return errors.New("dropped")
+
+ default:
+ return fmt.Errorf("unknown message type: %T", msg)
+ }
+ }
+
+ pp.Add(peer)
+ defer pp.Remove(peer)
+ return peer.Run(handle)
+ }
+}
+
+func protocolTester(t *testing.T, pp *p2ptest.TestPeerPool) *p2ptest.ProtocolTester {
+ conf := adapters.RandomNodeConfig()
+ return p2ptest.NewProtocolTester(t, conf.ID, 2, newProtocol(pp))
+}
+
+func protoHandshakeExchange(id discover.NodeID, proto *protoHandshake) []p2ptest.Exchange {
+
+ return []p2ptest.Exchange{
+ {
+ Expects: []p2ptest.Expect{
+ {
+ Code: 0,
+ Msg: &protoHandshake{42, "420"},
+ Peer: id,
+ },
+ },
+ },
+ {
+ Triggers: []p2ptest.Trigger{
+ {
+ Code: 0,
+ Msg: proto,
+ Peer: id,
+ },
+ },
+ },
+ }
+}
+
+func runProtoHandshake(t *testing.T, proto *protoHandshake, errs ...error) {
+ pp := p2ptest.NewTestPeerPool()
+ s := protocolTester(t, pp)
+ // TODO: make this more than one handshake
+ id := s.IDs[0]
+ if err := s.TestExchanges(protoHandshakeExchange(id, proto)...); err != nil {
+ t.Fatal(err)
+ }
+ var disconnects []*p2ptest.Disconnect
+ for i, err := range errs {
+ disconnects = append(disconnects, &p2ptest.Disconnect{Peer: s.IDs[i], Error: err})
+ }
+ if err := s.TestDisconnected(disconnects...); err != nil {
+ t.Fatal(err)
+ }
+}
+
+func TestProtoHandshakeVersionMismatch(t *testing.T) {
+ runProtoHandshake(t, &protoHandshake{41, "420"}, errorf(ErrHandshake, errorf(ErrHandler, "(msg code 0): 41 (!= 42)").Error()))
+}
+
+func TestProtoHandshakeNetworkIDMismatch(t *testing.T) {
+ runProtoHandshake(t, &protoHandshake{42, "421"}, errorf(ErrHandshake, errorf(ErrHandler, "(msg code 0): 421 (!= 420)").Error()))
+}
+
+func TestProtoHandshakeSuccess(t *testing.T) {
+ runProtoHandshake(t, &protoHandshake{42, "420"})
+}
+
+func moduleHandshakeExchange(id discover.NodeID, resp uint) []p2ptest.Exchange {
+
+ return []p2ptest.Exchange{
+ {
+ Expects: []p2ptest.Expect{
+ {
+ Code: 1,
+ Msg: &hs0{42},
+ Peer: id,
+ },
+ },
+ },
+ {
+ Triggers: []p2ptest.Trigger{
+ {
+ Code: 1,
+ Msg: &hs0{resp},
+ Peer: id,
+ },
+ },
+ },
+ }
+}
+
+func runModuleHandshake(t *testing.T, resp uint, errs ...error) {
+ pp := p2ptest.NewTestPeerPool()
+ s := protocolTester(t, pp)
+ id := s.IDs[0]
+ if err := s.TestExchanges(protoHandshakeExchange(id, &protoHandshake{42, "420"})...); err != nil {
+ t.Fatal(err)
+ }
+ if err := s.TestExchanges(moduleHandshakeExchange(id, resp)...); err != nil {
+ t.Fatal(err)
+ }
+ var disconnects []*p2ptest.Disconnect
+ for i, err := range errs {
+ disconnects = append(disconnects, &p2ptest.Disconnect{Peer: s.IDs[i], Error: err})
+ }
+ if err := s.TestDisconnected(disconnects...); err != nil {
+ t.Fatal(err)
+ }
+}
+
+func TestModuleHandshakeError(t *testing.T) {
+ runModuleHandshake(t, 43, fmt.Errorf("handshake mismatch remote 43 > local 42"))
+}
+
+func TestModuleHandshakeSuccess(t *testing.T) {
+ runModuleHandshake(t, 42)
+}
+
+// testing complex interactions over multiple peers, relaying, dropping
+func testMultiPeerSetup(a, b discover.NodeID) []p2ptest.Exchange {
+
+ return []p2ptest.Exchange{
+ {
+ Label: "primary handshake",
+ Expects: []p2ptest.Expect{
+ {
+ Code: 0,
+ Msg: &protoHandshake{42, "420"},
+ Peer: a,
+ },
+ {
+ Code: 0,
+ Msg: &protoHandshake{42, "420"},
+ Peer: b,
+ },
+ },
+ },
+ {
+ Label: "module handshake",
+ Triggers: []p2ptest.Trigger{
+ {
+ Code: 0,
+ Msg: &protoHandshake{42, "420"},
+ Peer: a,
+ },
+ {
+ Code: 0,
+ Msg: &protoHandshake{42, "420"},
+ Peer: b,
+ },
+ },
+ Expects: []p2ptest.Expect{
+ {
+ Code: 1,
+ Msg: &hs0{42},
+ Peer: a,
+ },
+ {
+ Code: 1,
+ Msg: &hs0{42},
+ Peer: b,
+ },
+ },
+ },
+
+ {Label: "alternative module handshake", Triggers: []p2ptest.Trigger{{Code: 1, Msg: &hs0{41}, Peer: a},
+ {Code: 1, Msg: &hs0{41}, Peer: b}}},
+ {Label: "repeated module handshake", Triggers: []p2ptest.Trigger{{Code: 1, Msg: &hs0{1}, Peer: a}}},
+ {Label: "receiving repeated module handshake", Expects: []p2ptest.Expect{{Code: 1, Msg: &hs0{43}, Peer: a}}}}
+}
+
+func runMultiplePeers(t *testing.T, peer int, errs ...error) {
+ pp := p2ptest.NewTestPeerPool()
+ s := protocolTester(t, pp)
+
+ if err := s.TestExchanges(testMultiPeerSetup(s.IDs[0], s.IDs[1])...); err != nil {
+ t.Fatal(err)
+ }
+ // after some exchanges of messages, we can test state changes
+ // here this is simply demonstrated by the peerPool
+ // after the handshake negotiations peers must be added to the pool
+ // time.Sleep(1)
+ tick := time.NewTicker(10 * time.Millisecond)
+ timeout := time.NewTimer(1 * time.Second)
+WAIT:
+ for {
+ select {
+ case <-tick.C:
+ if pp.Has(s.IDs[0]) {
+ break WAIT
+ }
+ case <-timeout.C:
+ t.Fatal("timeout")
+ }
+ }
+ if !pp.Has(s.IDs[1]) {
+ t.Fatalf("missing peer test-1: %v (%v)", pp, s.IDs)
+ }
+
+ // peer 0 sends kill request for peer with index <peer>
+ err := s.TestExchanges(p2ptest.Exchange{
+ Triggers: []p2ptest.Trigger{
+ {
+ Code: 2,
+ Msg: &kill{s.IDs[peer]},
+ Peer: s.IDs[0],
+ },
+ },
+ })
+
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ // the peer not killed sends a drop request
+ err = s.TestExchanges(p2ptest.Exchange{
+ Triggers: []p2ptest.Trigger{
+ {
+ Code: 3,
+ Msg: &drop{},
+ Peer: s.IDs[(peer+1)%2],
+ },
+ },
+ })
+
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ // check the actual discconnect errors on the individual peers
+ var disconnects []*p2ptest.Disconnect
+ for i, err := range errs {
+ disconnects = append(disconnects, &p2ptest.Disconnect{Peer: s.IDs[i], Error: err})
+ }
+ if err := s.TestDisconnected(disconnects...); err != nil {
+ t.Fatal(err)
+ }
+ // test if disconnected peers have been removed from peerPool
+ if pp.Has(s.IDs[peer]) {
+ t.Fatalf("peer test-%v not dropped: %v (%v)", peer, pp, s.IDs)
+ }
+
+}
+
+func TestMultiplePeersDropSelf(t *testing.T) {
+ runMultiplePeers(t, 0,
+ fmt.Errorf("subprotocol error"),
+ fmt.Errorf("Message handler error: (msg code 3): dropped"),
+ )
+}
+
+func TestMultiplePeersDropOther(t *testing.T) {
+ runMultiplePeers(t, 1,
+ fmt.Errorf("Message handler error: (msg code 3): dropped"),
+ fmt.Errorf("subprotocol error"),
+ )
+}
diff --git a/p2p/rlpx.go b/p2p/rlpx.go
index 24037ecc1..1889edac9 100644
--- a/p2p/rlpx.go
+++ b/p2p/rlpx.go
@@ -108,17 +108,19 @@ func (t *rlpx) close(err error) {
// Tell the remote end why we're disconnecting if possible.
if t.rw != nil {
if r, ok := err.(DiscReason); ok && r != DiscNetworkError {
- t.fd.SetWriteDeadline(time.Now().Add(discWriteTimeout))
- SendItems(t.rw, discMsg, r)
+ // rlpx tries to send DiscReason to disconnected peer
+ // if the connection is net.Pipe (in-memory simulation)
+ // it hangs forever, since net.Pipe does not implement
+ // a write deadline. Because of this only try to send
+ // the disconnect reason message if there is no error.
+ if err := t.fd.SetWriteDeadline(time.Now().Add(discWriteTimeout)); err == nil {
+ SendItems(t.rw, discMsg, r)
+ }
}
}
t.fd.Close()
}
-// doEncHandshake runs the protocol handshake using authenticated
-// messages. the protocol handshake is the first authenticated message
-// and also verifies whether the encryption handshake 'worked' and the
-// remote side actually provided the right public key.
func (t *rlpx) doProtoHandshake(our *protoHandshake) (their *protoHandshake, err error) {
// Writing our handshake happens concurrently, we prefer
// returning the handshake read error. If the remote side
@@ -169,6 +171,10 @@ func readProtocolHandshake(rw MsgReader, our *protoHandshake) (*protoHandshake,
return &hs, nil
}
+// doEncHandshake runs the protocol handshake using authenticated
+// messages. the protocol handshake is the first authenticated message
+// and also verifies whether the encryption handshake 'worked' and the
+// remote side actually provided the right public key.
func (t *rlpx) doEncHandshake(prv *ecdsa.PrivateKey, dial *discover.Node) (discover.NodeID, error) {
var (
sec secrets
diff --git a/p2p/rlpx_test.go b/p2p/rlpx_test.go
index f4cefa650..bca460402 100644
--- a/p2p/rlpx_test.go
+++ b/p2p/rlpx_test.go
@@ -156,14 +156,18 @@ func TestProtocolHandshake(t *testing.T) {
node1 = &discover.Node{ID: discover.PubkeyID(&prv1.PublicKey), IP: net.IP{5, 6, 7, 8}, TCP: 44}
hs1 = &protoHandshake{Version: 3, ID: node1.ID, Caps: []Cap{{"c", 1}, {"d", 3}}}
- fd0, fd1 = net.Pipe()
- wg sync.WaitGroup
+ wg sync.WaitGroup
)
+ fd0, fd1, err := tcpPipe()
+ if err != nil {
+ t.Fatal(err)
+ }
+
wg.Add(2)
go func() {
defer wg.Done()
- defer fd1.Close()
+ defer fd0.Close()
rlpx := newRLPX(fd0)
remid, err := rlpx.doEncHandshake(prv0, node1)
if err != nil {
@@ -597,3 +601,31 @@ func TestHandshakeForwardCompatibility(t *testing.T) {
t.Errorf("ingress-mac('foo') mismatch:\ngot %x\nwant %x", fooIngressHash, wantFooIngressHash)
}
}
+
+// tcpPipe creates an in process full duplex pipe based on a localhost TCP socket
+func tcpPipe() (net.Conn, net.Conn, error) {
+ l, err := net.Listen("tcp", "127.0.0.1:0")
+ if err != nil {
+ return nil, nil, err
+ }
+ defer l.Close()
+
+ var aconn net.Conn
+ aerr := make(chan error, 1)
+ go func() {
+ var err error
+ aconn, err = l.Accept()
+ aerr <- err
+ }()
+
+ dconn, err := net.Dial("tcp", l.Addr().String())
+ if err != nil {
+ <-aerr
+ return nil, nil, err
+ }
+ if err := <-aerr; err != nil {
+ dconn.Close()
+ return nil, nil, err
+ }
+ return aconn, dconn, nil
+}
diff --git a/p2p/server.go b/p2p/server.go
index 2cff94ea5..c41d1dc15 100644
--- a/p2p/server.go
+++ b/p2p/server.go
@@ -36,15 +36,12 @@ import (
)
const (
- defaultDialTimeout = 15 * time.Second
- refreshPeersInterval = 30 * time.Second
- staticPeerCheckInterval = 15 * time.Second
+ defaultDialTimeout = 15 * time.Second
- // Maximum number of concurrently handshaking inbound connections.
- maxAcceptConns = 50
-
- // Maximum number of concurrently dialing outbound connections.
- maxActiveDialTasks = 16
+ // Connectivity defaults.
+ maxActiveDialTasks = 16
+ defaultMaxPendingPeers = 50
+ defaultDialRatio = 3
// Maximum time allowed for reading a complete message.
// This is effectively the amount of time a connection can be idle.
@@ -70,6 +67,11 @@ type Config struct {
// Zero defaults to preset values.
MaxPendingPeers int `toml:",omitempty"`
+ // DialRatio controls the ratio of inbound to dialed connections.
+ // Example: a DialRatio of 2 allows 1/2 of connections to be dialed.
+ // Setting DialRatio to zero defaults it to 3.
+ DialRatio int `toml:",omitempty"`
+
// NoDiscovery can be used to disable the peer discovery mechanism.
// Disabling is useful for protocol debugging (manual topology).
NoDiscovery bool
@@ -138,7 +140,7 @@ type Config struct {
EnableMsgEvents bool
// Logger is a custom logger to use with the p2p.Server.
- Logger log.Logger
+ Logger log.Logger `toml:",omitempty"`
}
// Server manages all peer connections.
@@ -427,7 +429,6 @@ func (srv *Server) Start() (err error) {
if err != nil {
return err
}
-
realaddr = conn.LocalAddr().(*net.UDPAddr)
if srv.NAT != nil {
if !realaddr.IP.IsLoopback() {
@@ -447,11 +448,16 @@ func (srv *Server) Start() (err error) {
// node table
if !srv.NoDiscovery {
- ntab, err := discover.ListenUDP(srv.PrivateKey, conn, realaddr, unhandled, srv.NodeDatabase, srv.NetRestrict)
- if err != nil {
- return err
+ cfg := discover.Config{
+ PrivateKey: srv.PrivateKey,
+ AnnounceAddr: realaddr,
+ NodeDBPath: srv.NodeDatabase,
+ NetRestrict: srv.NetRestrict,
+ Bootnodes: srv.BootstrapNodes,
+ Unhandled: unhandled,
}
- if err := ntab.SetFallbackNodes(srv.BootstrapNodes); err != nil {
+ ntab, err := discover.ListenUDP(conn, cfg)
+ if err != nil {
return err
}
srv.ntab = ntab
@@ -476,10 +482,7 @@ func (srv *Server) Start() (err error) {
srv.DiscV5 = ntab
}
- dynPeers := (srv.MaxPeers + 1) / 2
- if srv.NoDiscovery {
- dynPeers = 0
- }
+ dynPeers := srv.maxDialedConns()
dialer := newDialState(srv.StaticNodes, srv.BootstrapNodes, srv.ntab, dynPeers, srv.NetRestrict)
// handshake
@@ -536,6 +539,7 @@ func (srv *Server) run(dialstate dialer) {
defer srv.loopWG.Done()
var (
peers = make(map[discover.NodeID]*Peer)
+ inboundCount = 0
trusted = make(map[discover.NodeID]bool, len(srv.TrustedNodes))
taskdone = make(chan task, maxActiveDialTasks)
runningTasks []task
@@ -621,14 +625,14 @@ running:
}
// TODO: track in-progress inbound node IDs (pre-Peer) to avoid dialing them.
select {
- case c.cont <- srv.encHandshakeChecks(peers, c):
+ case c.cont <- srv.encHandshakeChecks(peers, inboundCount, c):
case <-srv.quit:
break running
}
case c := <-srv.addpeer:
// At this point the connection is past the protocol handshake.
// Its capabilities are known and the remote identity is verified.
- err := srv.protoHandshakeChecks(peers, c)
+ err := srv.protoHandshakeChecks(peers, inboundCount, c)
if err == nil {
// The handshakes are done and it passed all checks.
p := newPeer(c, srv.Protocols)
@@ -639,8 +643,11 @@ running:
}
name := truncateName(c.name)
srv.log.Debug("Adding p2p peer", "name", name, "addr", c.fd.RemoteAddr(), "peers", len(peers)+1)
- peers[c.id] = p
go srv.runPeer(p)
+ peers[c.id] = p
+ if p.Inbound() {
+ inboundCount++
+ }
}
// The dialer logic relies on the assumption that
// dial tasks complete after the peer has been added or
@@ -655,6 +662,9 @@ running:
d := common.PrettyDuration(mclock.Now() - pd.created)
pd.log.Debug("Removing p2p peer", "duration", d, "peers", len(peers)-1, "req", pd.requested, "err", pd.err)
delete(peers, pd.ID())
+ if pd.Inbound() {
+ inboundCount--
+ }
}
}
@@ -681,20 +691,22 @@ running:
}
}
-func (srv *Server) protoHandshakeChecks(peers map[discover.NodeID]*Peer, c *conn) error {
+func (srv *Server) protoHandshakeChecks(peers map[discover.NodeID]*Peer, inboundCount int, c *conn) error {
// Drop connections with no matching protocols.
if len(srv.Protocols) > 0 && countMatchingProtocols(srv.Protocols, c.caps) == 0 {
return DiscUselessPeer
}
// Repeat the encryption handshake checks because the
// peer set might have changed between the handshakes.
- return srv.encHandshakeChecks(peers, c)
+ return srv.encHandshakeChecks(peers, inboundCount, c)
}
-func (srv *Server) encHandshakeChecks(peers map[discover.NodeID]*Peer, c *conn) error {
+func (srv *Server) encHandshakeChecks(peers map[discover.NodeID]*Peer, inboundCount int, c *conn) error {
switch {
case !c.is(trustedConn|staticDialedConn) && len(peers) >= srv.MaxPeers:
return DiscTooManyPeers
+ case !c.is(trustedConn) && c.is(inboundConn) && inboundCount >= srv.maxInboundConns():
+ return DiscTooManyPeers
case peers[c.id] != nil:
return DiscAlreadyConnected
case c.id == srv.Self().ID:
@@ -704,6 +716,21 @@ func (srv *Server) encHandshakeChecks(peers map[discover.NodeID]*Peer, c *conn)
}
}
+func (srv *Server) maxInboundConns() int {
+ return srv.MaxPeers - srv.maxDialedConns()
+}
+
+func (srv *Server) maxDialedConns() int {
+ if srv.NoDiscovery || srv.NoDial {
+ return 0
+ }
+ r := srv.DialRatio
+ if r == 0 {
+ r = defaultDialRatio
+ }
+ return srv.MaxPeers / r
+}
+
type tempError interface {
Temporary() bool
}
@@ -714,10 +741,7 @@ func (srv *Server) listenLoop() {
defer srv.loopWG.Done()
srv.log.Info("RLPx listener up", "self", srv.makeSelf(srv.listener, srv.ntab))
- // This channel acts as a semaphore limiting
- // active inbound connections that are lingering pre-handshake.
- // If all slots are taken, no further connections are accepted.
- tokens := maxAcceptConns
+ tokens := defaultMaxPendingPeers
if srv.MaxPendingPeers > 0 {
tokens = srv.MaxPendingPeers
}
@@ -758,9 +782,6 @@ func (srv *Server) listenLoop() {
fd = newMeteredConn(fd, true)
srv.log.Trace("Accepted connection", "addr", fd.RemoteAddr())
-
- // Spawn the handler. It will give the slot back when the connection
- // has been established.
go func() {
srv.SetupConn(fd, inboundConn, nil)
slots <- struct{}{}
diff --git a/p2p/simulations/adapters/state.go b/p2p/simulations/adapters/state.go
index 8b1dfef90..0d4ecfb0f 100644
--- a/p2p/simulations/adapters/state.go
+++ b/p2p/simulations/adapters/state.go
@@ -13,6 +13,7 @@
//
// You should have received a copy of the GNU Lesser General Public License
// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
+
package adapters
type SimStateStore struct {
diff --git a/p2p/testing/peerpool.go b/p2p/testing/peerpool.go
new file mode 100644
index 000000000..45c6e6142
--- /dev/null
+++ b/p2p/testing/peerpool.go
@@ -0,0 +1,67 @@
+// Copyright 2017 The go-ethereum Authors
+// This file is part of the go-ethereum library.
+//
+// The go-ethereum library is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Lesser General Public License as published by
+// the Free Software Foundation, either version 3 of the License, or
+// (at your option) any later version.
+//
+// The go-ethereum library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+// GNU Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public License
+// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
+
+package testing
+
+import (
+ "fmt"
+ "sync"
+
+ "github.com/ethereum/go-ethereum/log"
+ "github.com/ethereum/go-ethereum/p2p/discover"
+)
+
+type TestPeer interface {
+ ID() discover.NodeID
+ Drop(error)
+}
+
+// TestPeerPool is an example peerPool to demonstrate registration of peer connections
+type TestPeerPool struct {
+ lock sync.Mutex
+ peers map[discover.NodeID]TestPeer
+}
+
+func NewTestPeerPool() *TestPeerPool {
+ return &TestPeerPool{peers: make(map[discover.NodeID]TestPeer)}
+}
+
+func (self *TestPeerPool) Add(p TestPeer) {
+ self.lock.Lock()
+ defer self.lock.Unlock()
+ log.Trace(fmt.Sprintf("pp add peer %v", p.ID()))
+ self.peers[p.ID()] = p
+
+}
+
+func (self *TestPeerPool) Remove(p TestPeer) {
+ self.lock.Lock()
+ defer self.lock.Unlock()
+ delete(self.peers, p.ID())
+}
+
+func (self *TestPeerPool) Has(id discover.NodeID) bool {
+ self.lock.Lock()
+ defer self.lock.Unlock()
+ _, ok := self.peers[id]
+ return ok
+}
+
+func (self *TestPeerPool) Get(id discover.NodeID) TestPeer {
+ self.lock.Lock()
+ defer self.lock.Unlock()
+ return self.peers[id]
+}
diff --git a/p2p/testing/protocolsession.go b/p2p/testing/protocolsession.go
new file mode 100644
index 000000000..361285f06
--- /dev/null
+++ b/p2p/testing/protocolsession.go
@@ -0,0 +1,280 @@
+// Copyright 2017 The go-ethereum Authors
+// This file is part of the go-ethereum library.
+//
+// The go-ethereum library is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Lesser General Public License as published by
+// the Free Software Foundation, either version 3 of the License, or
+// (at your option) any later version.
+//
+// The go-ethereum library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+// GNU Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public License
+// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
+
+package testing
+
+import (
+ "errors"
+ "fmt"
+ "sync"
+ "time"
+
+ "github.com/ethereum/go-ethereum/log"
+ "github.com/ethereum/go-ethereum/p2p"
+ "github.com/ethereum/go-ethereum/p2p/discover"
+ "github.com/ethereum/go-ethereum/p2p/simulations/adapters"
+)
+
+var errTimedOut = errors.New("timed out")
+
+// ProtocolSession is a quasi simulation of a pivot node running
+// a service and a number of dummy peers that can send (trigger) or
+// receive (expect) messages
+type ProtocolSession struct {
+ Server *p2p.Server
+ IDs []discover.NodeID
+ adapter *adapters.SimAdapter
+ events chan *p2p.PeerEvent
+}
+
+// Exchange is the basic units of protocol tests
+// the triggers and expects in the arrays are run immediately and asynchronously
+// thus one cannot have multiple expects for the SAME peer with DIFFERENT message types
+// because it's unpredictable which expect will receive which message
+// (with expect #1 and #2, messages might be sent #2 and #1, and both expects will complain about wrong message code)
+// an exchange is defined on a session
+type Exchange struct {
+ Label string
+ Triggers []Trigger
+ Expects []Expect
+ Timeout time.Duration
+}
+
+// Trigger is part of the exchange, incoming message for the pivot node
+// sent by a peer
+type Trigger struct {
+ Msg interface{} // type of message to be sent
+ Code uint64 // code of message is given
+ Peer discover.NodeID // the peer to send the message to
+ Timeout time.Duration // timeout duration for the sending
+}
+
+// Expect is part of an exchange, outgoing message from the pivot node
+// received by a peer
+type Expect struct {
+ Msg interface{} // type of message to expect
+ Code uint64 // code of message is now given
+ Peer discover.NodeID // the peer that expects the message
+ Timeout time.Duration // timeout duration for receiving
+}
+
+// Disconnect represents a disconnect event, used and checked by TestDisconnected
+type Disconnect struct {
+ Peer discover.NodeID // discconnected peer
+ Error error // disconnect reason
+}
+
+// trigger sends messages from peers
+func (self *ProtocolSession) trigger(trig Trigger) error {
+ simNode, ok := self.adapter.GetNode(trig.Peer)
+ if !ok {
+ return fmt.Errorf("trigger: peer %v does not exist (1- %v)", trig.Peer, len(self.IDs))
+ }
+ mockNode, ok := simNode.Services()[0].(*mockNode)
+ if !ok {
+ return fmt.Errorf("trigger: peer %v is not a mock", trig.Peer)
+ }
+
+ errc := make(chan error)
+
+ go func() {
+ errc <- mockNode.Trigger(&trig)
+ }()
+
+ t := trig.Timeout
+ if t == time.Duration(0) {
+ t = 1000 * time.Millisecond
+ }
+ select {
+ case err := <-errc:
+ return err
+ case <-time.After(t):
+ return fmt.Errorf("timout expecting %v to send to peer %v", trig.Msg, trig.Peer)
+ }
+}
+
+// expect checks an expectation of a message sent out by the pivot node
+func (self *ProtocolSession) expect(exps []Expect) error {
+ // construct a map of expectations for each node
+ peerExpects := make(map[discover.NodeID][]Expect)
+ for _, exp := range exps {
+ if exp.Msg == nil {
+ return errors.New("no message to expect")
+ }
+ peerExpects[exp.Peer] = append(peerExpects[exp.Peer], exp)
+ }
+
+ // construct a map of mockNodes for each node
+ mockNodes := make(map[discover.NodeID]*mockNode)
+ for nodeID := range peerExpects {
+ simNode, ok := self.adapter.GetNode(nodeID)
+ if !ok {
+ return fmt.Errorf("trigger: peer %v does not exist (1- %v)", nodeID, len(self.IDs))
+ }
+ mockNode, ok := simNode.Services()[0].(*mockNode)
+ if !ok {
+ return fmt.Errorf("trigger: peer %v is not a mock", nodeID)
+ }
+ mockNodes[nodeID] = mockNode
+ }
+
+ // done chanell cancels all created goroutines when function returns
+ done := make(chan struct{})
+ defer close(done)
+ // errc catches the first error from
+ errc := make(chan error)
+
+ wg := &sync.WaitGroup{}
+ wg.Add(len(mockNodes))
+ for nodeID, mockNode := range mockNodes {
+ nodeID := nodeID
+ mockNode := mockNode
+ go func() {
+ defer wg.Done()
+
+ // Sum all Expect timeouts to give the maximum
+ // time for all expectations to finish.
+ // mockNode.Expect checks all received messages against
+ // a list of expected messages and timeout for each
+ // of them can not be checked separately.
+ var t time.Duration
+ for _, exp := range peerExpects[nodeID] {
+ if exp.Timeout == time.Duration(0) {
+ t += 2000 * time.Millisecond
+ } else {
+ t += exp.Timeout
+ }
+ }
+ alarm := time.NewTimer(t)
+ defer alarm.Stop()
+
+ // expectErrc is used to check if error returned
+ // from mockNode.Expect is not nil and to send it to
+ // errc only in that case.
+ // done channel will be closed when function
+ expectErrc := make(chan error)
+ go func() {
+ select {
+ case expectErrc <- mockNode.Expect(peerExpects[nodeID]...):
+ case <-done:
+ case <-alarm.C:
+ }
+ }()
+
+ select {
+ case err := <-expectErrc:
+ if err != nil {
+ select {
+ case errc <- err:
+ case <-done:
+ case <-alarm.C:
+ errc <- errTimedOut
+ }
+ }
+ case <-done:
+ case <-alarm.C:
+ errc <- errTimedOut
+ }
+
+ }()
+ }
+
+ go func() {
+ wg.Wait()
+ // close errc when all goroutines finish to return nill err from errc
+ close(errc)
+ }()
+
+ return <-errc
+}
+
+// TestExchanges tests a series of exchanges against the session
+func (self *ProtocolSession) TestExchanges(exchanges ...Exchange) error {
+ for i, e := range exchanges {
+ if err := self.testExchange(e); err != nil {
+ return fmt.Errorf("exchange #%d %q: %v", i, e.Label, err)
+ }
+ log.Trace(fmt.Sprintf("exchange #%d %q: run successfully", i, e.Label))
+ }
+ return nil
+}
+
+// testExchange tests a single Exchange.
+// Default timeout value is 2 seconds.
+func (self *ProtocolSession) testExchange(e Exchange) error {
+ errc := make(chan error)
+ done := make(chan struct{})
+ defer close(done)
+
+ go func() {
+ for _, trig := range e.Triggers {
+ err := self.trigger(trig)
+ if err != nil {
+ errc <- err
+ return
+ }
+ }
+
+ select {
+ case errc <- self.expect(e.Expects):
+ case <-done:
+ }
+ }()
+
+ // time out globally or finish when all expectations satisfied
+ t := e.Timeout
+ if t == 0 {
+ t = 2000 * time.Millisecond
+ }
+ alarm := time.NewTimer(t)
+ select {
+ case err := <-errc:
+ return err
+ case <-alarm.C:
+ return errTimedOut
+ }
+}
+
+// TestDisconnected tests the disconnections given as arguments
+// the disconnect structs describe what disconnect error is expected on which peer
+func (self *ProtocolSession) TestDisconnected(disconnects ...*Disconnect) error {
+ expects := make(map[discover.NodeID]error)
+ for _, disconnect := range disconnects {
+ expects[disconnect.Peer] = disconnect.Error
+ }
+
+ timeout := time.After(time.Second)
+ for len(expects) > 0 {
+ select {
+ case event := <-self.events:
+ if event.Type != p2p.PeerEventTypeDrop {
+ continue
+ }
+ expectErr, ok := expects[event.Peer]
+ if !ok {
+ continue
+ }
+
+ if !(expectErr == nil && event.Error == "" || expectErr != nil && expectErr.Error() == event.Error) {
+ return fmt.Errorf("unexpected error on peer %v. expected '%v', got '%v'", event.Peer, expectErr, event.Error)
+ }
+ delete(expects, event.Peer)
+ case <-timeout:
+ return fmt.Errorf("timed out waiting for peers to disconnect")
+ }
+ }
+ return nil
+}
diff --git a/p2p/testing/protocoltester.go b/p2p/testing/protocoltester.go
new file mode 100644
index 000000000..a797412d6
--- /dev/null
+++ b/p2p/testing/protocoltester.go
@@ -0,0 +1,269 @@
+// Copyright 2017 The go-ethereum Authors
+// This file is part of the go-ethereum library.
+//
+// The go-ethereum library is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Lesser General Public License as published by
+// the Free Software Foundation, either version 3 of the License, or
+// (at your option) any later version.
+//
+// The go-ethereum library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+// GNU Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public License
+// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
+
+/*
+the p2p/testing package provides a unit test scheme to check simple
+protocol message exchanges with one pivot node and a number of dummy peers
+The pivot test node runs a node.Service, the dummy peers run a mock node
+that can be used to send and receive messages
+*/
+
+package testing
+
+import (
+ "bytes"
+ "fmt"
+ "io"
+ "io/ioutil"
+ "strings"
+ "sync"
+ "testing"
+
+ "github.com/ethereum/go-ethereum/log"
+ "github.com/ethereum/go-ethereum/node"
+ "github.com/ethereum/go-ethereum/p2p"
+ "github.com/ethereum/go-ethereum/p2p/discover"
+ "github.com/ethereum/go-ethereum/p2p/simulations"
+ "github.com/ethereum/go-ethereum/p2p/simulations/adapters"
+ "github.com/ethereum/go-ethereum/rlp"
+ "github.com/ethereum/go-ethereum/rpc"
+)
+
+// ProtocolTester is the tester environment used for unit testing protocol
+// message exchanges. It uses p2p/simulations framework
+type ProtocolTester struct {
+ *ProtocolSession
+ network *simulations.Network
+}
+
+// NewProtocolTester constructs a new ProtocolTester
+// it takes as argument the pivot node id, the number of dummy peers and the
+// protocol run function called on a peer connection by the p2p server
+func NewProtocolTester(t *testing.T, id discover.NodeID, n int, run func(*p2p.Peer, p2p.MsgReadWriter) error) *ProtocolTester {
+ services := adapters.Services{
+ "test": func(ctx *adapters.ServiceContext) (node.Service, error) {
+ return &testNode{run}, nil
+ },
+ "mock": func(ctx *adapters.ServiceContext) (node.Service, error) {
+ return newMockNode(), nil
+ },
+ }
+ adapter := adapters.NewSimAdapter(services)
+ net := simulations.NewNetwork(adapter, &simulations.NetworkConfig{})
+ if _, err := net.NewNodeWithConfig(&adapters.NodeConfig{
+ ID: id,
+ EnableMsgEvents: true,
+ Services: []string{"test"},
+ }); err != nil {
+ panic(err.Error())
+ }
+ if err := net.Start(id); err != nil {
+ panic(err.Error())
+ }
+
+ node := net.GetNode(id).Node.(*adapters.SimNode)
+ peers := make([]*adapters.NodeConfig, n)
+ peerIDs := make([]discover.NodeID, n)
+ for i := 0; i < n; i++ {
+ peers[i] = adapters.RandomNodeConfig()
+ peers[i].Services = []string{"mock"}
+ peerIDs[i] = peers[i].ID
+ }
+ events := make(chan *p2p.PeerEvent, 1000)
+ node.SubscribeEvents(events)
+ ps := &ProtocolSession{
+ Server: node.Server(),
+ IDs: peerIDs,
+ adapter: adapter,
+ events: events,
+ }
+ self := &ProtocolTester{
+ ProtocolSession: ps,
+ network: net,
+ }
+
+ self.Connect(id, peers...)
+
+ return self
+}
+
+// Stop stops the p2p server
+func (self *ProtocolTester) Stop() error {
+ self.Server.Stop()
+ return nil
+}
+
+// Connect brings up the remote peer node and connects it using the
+// p2p/simulations network connection with the in memory network adapter
+func (self *ProtocolTester) Connect(selfID discover.NodeID, peers ...*adapters.NodeConfig) {
+ for _, peer := range peers {
+ log.Trace(fmt.Sprintf("start node %v", peer.ID))
+ if _, err := self.network.NewNodeWithConfig(peer); err != nil {
+ panic(fmt.Sprintf("error starting peer %v: %v", peer.ID, err))
+ }
+ if err := self.network.Start(peer.ID); err != nil {
+ panic(fmt.Sprintf("error starting peer %v: %v", peer.ID, err))
+ }
+ log.Trace(fmt.Sprintf("connect to %v", peer.ID))
+ if err := self.network.Connect(selfID, peer.ID); err != nil {
+ panic(fmt.Sprintf("error connecting to peer %v: %v", peer.ID, err))
+ }
+ }
+
+}
+
+// testNode wraps a protocol run function and implements the node.Service
+// interface
+type testNode struct {
+ run func(*p2p.Peer, p2p.MsgReadWriter) error
+}
+
+func (t *testNode) Protocols() []p2p.Protocol {
+ return []p2p.Protocol{{
+ Length: 100,
+ Run: t.run,
+ }}
+}
+
+func (t *testNode) APIs() []rpc.API {
+ return nil
+}
+
+func (t *testNode) Start(server *p2p.Server) error {
+ return nil
+}
+
+func (t *testNode) Stop() error {
+ return nil
+}
+
+// mockNode is a testNode which doesn't actually run a protocol, instead
+// exposing channels so that tests can manually trigger and expect certain
+// messages
+type mockNode struct {
+ testNode
+
+ trigger chan *Trigger
+ expect chan []Expect
+ err chan error
+ stop chan struct{}
+ stopOnce sync.Once
+}
+
+func newMockNode() *mockNode {
+ mock := &mockNode{
+ trigger: make(chan *Trigger),
+ expect: make(chan []Expect),
+ err: make(chan error),
+ stop: make(chan struct{}),
+ }
+ mock.testNode.run = mock.Run
+ return mock
+}
+
+// Run is a protocol run function which just loops waiting for tests to
+// instruct it to either trigger or expect a message from the peer
+func (m *mockNode) Run(peer *p2p.Peer, rw p2p.MsgReadWriter) error {
+ for {
+ select {
+ case trig := <-m.trigger:
+ m.err <- p2p.Send(rw, trig.Code, trig.Msg)
+ case exps := <-m.expect:
+ m.err <- expectMsgs(rw, exps)
+ case <-m.stop:
+ return nil
+ }
+ }
+}
+
+func (m *mockNode) Trigger(trig *Trigger) error {
+ m.trigger <- trig
+ return <-m.err
+}
+
+func (m *mockNode) Expect(exp ...Expect) error {
+ m.expect <- exp
+ return <-m.err
+}
+
+func (m *mockNode) Stop() error {
+ m.stopOnce.Do(func() { close(m.stop) })
+ return nil
+}
+
+func expectMsgs(rw p2p.MsgReadWriter, exps []Expect) error {
+ matched := make([]bool, len(exps))
+ for {
+ msg, err := rw.ReadMsg()
+ if err != nil {
+ if err == io.EOF {
+ break
+ }
+ return err
+ }
+ actualContent, err := ioutil.ReadAll(msg.Payload)
+ if err != nil {
+ return err
+ }
+ var found bool
+ for i, exp := range exps {
+ if exp.Code == msg.Code && bytes.Equal(actualContent, mustEncodeMsg(exp.Msg)) {
+ if matched[i] {
+ return fmt.Errorf("message #%d received two times", i)
+ }
+ matched[i] = true
+ found = true
+ break
+ }
+ }
+ if !found {
+ expected := make([]string, 0)
+ for i, exp := range exps {
+ if matched[i] {
+ continue
+ }
+ expected = append(expected, fmt.Sprintf("code %d payload %x", exp.Code, mustEncodeMsg(exp.Msg)))
+ }
+ return fmt.Errorf("unexpected message code %d payload %x, expected %s", msg.Code, actualContent, strings.Join(expected, " or "))
+ }
+ done := true
+ for _, m := range matched {
+ if !m {
+ done = false
+ break
+ }
+ }
+ if done {
+ return nil
+ }
+ }
+ for i, m := range matched {
+ if !m {
+ return fmt.Errorf("expected message #%d not received", i)
+ }
+ }
+ return nil
+}
+
+// mustEncodeMsg uses rlp to encode a message.
+// In case of error it panics.
+func mustEncodeMsg(msg interface{}) []byte {
+ contentEnc, err := rlp.EncodeToBytes(msg)
+ if err != nil {
+ panic("content encode error: " + err.Error())
+ }
+ return contentEnc
+}
diff --git a/params/bootnodes.go b/params/bootnodes.go
index 849b56920..c7190ae67 100644
--- a/params/bootnodes.go
+++ b/params/bootnodes.go
@@ -33,8 +33,10 @@ var MainnetBootnodes = []string{
// TestnetBootnodes are the enode URLs of the P2P bootstrap nodes running on the
// Ropsten test network.
var TestnetBootnodes = []string{
- "enode://6ce05930c72abc632c58e2e4324f7c7ea478cec0ed4fa2528982cf34483094e9cbc9216e7aa349691242576d552a2a56aaeae426c5303ded677ce455ba1acd9d@13.84.180.240:30303", // US-TX
- "enode://20c9ad97c081d63397d7b685a412227a40e23c8bdc6688c6f37e97cfbc22d2b4d1db1510d8f61e6a8866ad7f0e17c02b14182d37ea7c3c8b9c2683aeb6b733a1@52.169.14.227:30303", // IE
+ "enode://30b7ab30a01c124a6cceca36863ece12c4f5fa68e3ba9b0b51407ccc002eeed3b3102d20a88f1c1d3c3154e2449317b8ef95090e77b312d5cc39354f86d5d606@52.176.7.10:30303", // US-Azure geth
+ "enode://865a63255b3bb68023b6bffd5095118fcc13e79dcf014fe4e47e065c350c7cc72af2e53eff895f11ba1bbb6a2b33271c1116ee870f266618eadfc2e78aa7349c@52.176.100.77:30303", // US-Azure parity
+ "enode://6332792c4a00e3e4ee0926ed89e0d27ef985424d97b6a45bf0f23e51f0dcb5e66b875777506458aea7af6f9e4ffb69f43f3778ee73c81ed9d34c51c4b16b0b0f@52.232.243.152:30303", // Parity
+ "enode://94c15d1b9e2fe7ce56e458b9a3b672ef11894ddedd0c6f247e0f1d3487f52b66208fb4aeb8179fce6e3a749ea93ed147c37976d67af557508d199d9594c35f09@192.81.208.223:30303", // @gpip
}
// RinkebyBootnodes are the enode URLs of the P2P bootstrap nodes running on the
@@ -45,17 +47,10 @@ var RinkebyBootnodes = []string{
"enode://b6b28890b006743680c52e64e0d16db57f28124885595fa03a562be1d2bf0f3a1da297d56b13da25fb992888fd556d4c1a27b1f39d531bde7de1921c90061cc6@159.89.28.211:30303", // AKASHA
}
-// RinkebyV5Bootnodes are the enode URLs of the P2P bootstrap nodes running on the
-// Rinkeby test network for the experimental RLPx v5 topic-discovery network.
-var RinkebyV5Bootnodes = []string{
- "enode://a24ac7c5484ef4ed0c5eb2d36620ba4e4aa13b8c84684e1b4aab0cebea2ae45cb4d375b77eab56516d34bfbd3c1a833fc51296ff084b770b94fb9028c4d25ccf@52.169.42.101:30303?discport=30304", // IE
- "enode://343149e4feefa15d882d9fe4ac7d88f885bd05ebb735e547f12e12080a9fa07c8014ca6fd7f373123488102fe5e34111f8509cf0b7de3f5b44339c9f25e87cb8@52.3.158.184:30303?discport=30304", // INFURA
- "enode://b6b28890b006743680c52e64e0d16db57f28124885595fa03a562be1d2bf0f3a1da297d56b13da25fb992888fd556d4c1a27b1f39d531bde7de1921c90061cc6@159.89.28.211:30303?discport=30304", // AKASHA
-}
-
// DiscoveryV5Bootnodes are the enode URLs of the P2P bootstrap nodes for the
// experimental RLPx v5 topic-discovery network.
var DiscoveryV5Bootnodes = []string{
+ "enode://06051a5573c81934c9554ef2898eb13b33a34b94cf36b202b69fde139ca17a85051979867720d4bdae4323d4943ddf9aeeb6643633aa656e0be843659795007a@35.177.226.168:30303",
"enode://0cc5f5ffb5d9098c8b8c62325f3797f56509bff942704687b6530992ac706e2cb946b90a34f1f19548cd3c7baccbcaea354531e5983c7d1bc0dee16ce4b6440b@40.118.3.223:30304",
"enode://1c7a64d76c0334b0418c004af2f67c50e36a3be60b5e4790bdac0439d21603469a85fad36f2473c9a80eb043ae60936df905fa28f1ff614c3e5dc34f15dcd2dc@40.118.3.223:30306",
"enode://85c85d7143ae8bb96924f2b54f1b3e70d8c4d367af305325d30a61385a432f247d2c75c45c6b4a60335060d072d7f5b35dd1d4c45f76941f62a4f83b6e75daaf@40.118.3.223:30307",
diff --git a/params/config.go b/params/config.go
index 345f6394a..dc02c7ca3 100644
--- a/params/config.go
+++ b/params/config.go
@@ -31,46 +31,46 @@ var (
var (
// MainnetChainConfig is the chain parameters to run a node on the main network.
MainnetChainConfig = &ChainConfig{
- ChainId: big.NewInt(1),
- HomesteadBlock: big.NewInt(1150000),
- DAOForkBlock: big.NewInt(1920000),
- DAOForkSupport: true,
- EIP150Block: big.NewInt(2463000),
- EIP150Hash: common.HexToHash("0x2086799aeebeae135c246c65021c82b4e15a2c451340993aacfd2751886514f0"),
- EIP155Block: big.NewInt(2675000),
- EIP158Block: big.NewInt(2675000),
- ByzantiumBlock: big.NewInt(4370000),
-
- Ethash: new(EthashConfig),
+ ChainId: big.NewInt(1),
+ HomesteadBlock: big.NewInt(1150000),
+ DAOForkBlock: big.NewInt(1920000),
+ DAOForkSupport: true,
+ EIP150Block: big.NewInt(2463000),
+ EIP150Hash: common.HexToHash("0x2086799aeebeae135c246c65021c82b4e15a2c451340993aacfd2751886514f0"),
+ EIP155Block: big.NewInt(2675000),
+ EIP158Block: big.NewInt(2675000),
+ ByzantiumBlock: big.NewInt(4370000),
+ ConstantinopleBlock: nil,
+ Ethash: new(EthashConfig),
}
// TestnetChainConfig contains the chain parameters to run a node on the Ropsten test network.
TestnetChainConfig = &ChainConfig{
- ChainId: big.NewInt(3),
- HomesteadBlock: big.NewInt(0),
- DAOForkBlock: nil,
- DAOForkSupport: true,
- EIP150Block: big.NewInt(0),
- EIP150Hash: common.HexToHash("0x41941023680923e0fe4d74a34bdac8141f2540e3ae90623718e47d66d1ca4a2d"),
- EIP155Block: big.NewInt(10),
- EIP158Block: big.NewInt(10),
- ByzantiumBlock: big.NewInt(1700000),
-
- Ethash: new(EthashConfig),
+ ChainId: big.NewInt(3),
+ HomesteadBlock: big.NewInt(0),
+ DAOForkBlock: nil,
+ DAOForkSupport: true,
+ EIP150Block: big.NewInt(0),
+ EIP150Hash: common.HexToHash("0x41941023680923e0fe4d74a34bdac8141f2540e3ae90623718e47d66d1ca4a2d"),
+ EIP155Block: big.NewInt(10),
+ EIP158Block: big.NewInt(10),
+ ByzantiumBlock: big.NewInt(1700000),
+ ConstantinopleBlock: nil,
+ Ethash: new(EthashConfig),
}
// RinkebyChainConfig contains the chain parameters to run a node on the Rinkeby test network.
RinkebyChainConfig = &ChainConfig{
- ChainId: big.NewInt(4),
- HomesteadBlock: big.NewInt(1),
- DAOForkBlock: nil,
- DAOForkSupport: true,
- EIP150Block: big.NewInt(2),
- EIP150Hash: common.HexToHash("0x9b095b36c15eaf13044373aef8ee0bd3a382a5abb92e402afa44b8249c3a90e9"),
- EIP155Block: big.NewInt(3),
- EIP158Block: big.NewInt(3),
- ByzantiumBlock: big.NewInt(1035301),
-
+ ChainId: big.NewInt(4),
+ HomesteadBlock: big.NewInt(1),
+ DAOForkBlock: nil,
+ DAOForkSupport: true,
+ EIP150Block: big.NewInt(2),
+ EIP150Hash: common.HexToHash("0x9b095b36c15eaf13044373aef8ee0bd3a382a5abb92e402afa44b8249c3a90e9"),
+ EIP155Block: big.NewInt(3),
+ EIP158Block: big.NewInt(3),
+ ByzantiumBlock: big.NewInt(1035301),
+ ConstantinopleBlock: nil,
Clique: &CliqueConfig{
Period: 15,
Epoch: 30000,
@@ -82,16 +82,16 @@ var (
//
// This configuration is intentionally not using keyed fields to force anyone
// adding flags to the config to also have to set these fields.
- AllEthashProtocolChanges = &ChainConfig{big.NewInt(1337), big.NewInt(0), nil, false, big.NewInt(0), common.Hash{}, big.NewInt(0), big.NewInt(0), big.NewInt(0), new(EthashConfig), nil}
+ AllEthashProtocolChanges = &ChainConfig{big.NewInt(1337), big.NewInt(0), nil, false, big.NewInt(0), common.Hash{}, big.NewInt(0), big.NewInt(0), big.NewInt(0), nil, new(EthashConfig), nil}
// AllCliqueProtocolChanges contains every protocol change (EIPs) introduced
// and accepted by the Ethereum core developers into the Clique consensus.
//
// This configuration is intentionally not using keyed fields to force anyone
// adding flags to the config to also have to set these fields.
- AllCliqueProtocolChanges = &ChainConfig{big.NewInt(1337), big.NewInt(0), nil, false, big.NewInt(0), common.Hash{}, big.NewInt(0), big.NewInt(0), big.NewInt(0), nil, &CliqueConfig{Period: 0, Epoch: 30000}}
+ AllCliqueProtocolChanges = &ChainConfig{big.NewInt(1337), big.NewInt(0), nil, false, big.NewInt(0), common.Hash{}, big.NewInt(0), big.NewInt(0), big.NewInt(0), nil, nil, &CliqueConfig{Period: 0, Epoch: 30000}}
- TestChainConfig = &ChainConfig{big.NewInt(1), big.NewInt(0), nil, false, big.NewInt(0), common.Hash{}, big.NewInt(0), big.NewInt(0), big.NewInt(0), new(EthashConfig), nil}
+ TestChainConfig = &ChainConfig{big.NewInt(1), big.NewInt(0), nil, false, big.NewInt(0), common.Hash{}, big.NewInt(0), big.NewInt(0), big.NewInt(0), nil, new(EthashConfig), nil}
TestRules = TestChainConfig.Rules(new(big.Int))
)
@@ -115,7 +115,8 @@ type ChainConfig struct {
EIP155Block *big.Int `json:"eip155Block,omitempty"` // EIP155 HF block
EIP158Block *big.Int `json:"eip158Block,omitempty"` // EIP158 HF block
- ByzantiumBlock *big.Int `json:"byzantiumBlock,omitempty"` // Byzantium switch block (nil = no fork, 0 = already on byzantium)
+ ByzantiumBlock *big.Int `json:"byzantiumBlock,omitempty"` // Byzantium switch block (nil = no fork, 0 = already on byzantium)
+ ConstantinopleBlock *big.Int `json:"constantinopleBlock,omitempty"` // Constantinople switch block (nil = no fork, 0 = already activated)
// Various consensus engines
Ethash *EthashConfig `json:"ethash,omitempty"`
@@ -152,7 +153,7 @@ func (c *ChainConfig) String() string {
default:
engine = "unknown"
}
- return fmt.Sprintf("{ChainID: %v Homestead: %v DAO: %v DAOSupport: %v EIP150: %v EIP155: %v EIP158: %v Byzantium: %v Engine: %v}",
+ return fmt.Sprintf("{ChainID: %v Homestead: %v DAO: %v DAOSupport: %v EIP150: %v EIP155: %v EIP158: %v Byzantium: %v Constantinople: %v Engine: %v}",
c.ChainId,
c.HomesteadBlock,
c.DAOForkBlock,
@@ -161,6 +162,7 @@ func (c *ChainConfig) String() string {
c.EIP155Block,
c.EIP158Block,
c.ByzantiumBlock,
+ c.ConstantinopleBlock,
engine,
)
}
@@ -191,6 +193,10 @@ func (c *ChainConfig) IsByzantium(num *big.Int) bool {
return isForked(c.ByzantiumBlock, num)
}
+func (c *ChainConfig) IsConstantinople(num *big.Int) bool {
+ return isForked(c.ConstantinopleBlock, num)
+}
+
// GasTable returns the gas table corresponding to the current phase (homestead or homestead reprice).
//
// The returned GasTable's fields shouldn't, under any circumstances, be changed.
@@ -251,6 +257,9 @@ func (c *ChainConfig) checkCompatible(newcfg *ChainConfig, head *big.Int) *Confi
if isForkIncompatible(c.ByzantiumBlock, newcfg.ByzantiumBlock, head) {
return newCompatError("Byzantium fork block", c.ByzantiumBlock, newcfg.ByzantiumBlock)
}
+ if isForkIncompatible(c.ConstantinopleBlock, newcfg.ConstantinopleBlock, head) {
+ return newCompatError("Constantinople fork block", c.ConstantinopleBlock, newcfg.ConstantinopleBlock)
+ }
return nil
}
diff --git a/params/version.go b/params/version.go
index 32d4a2e23..921d07599 100644
--- a/params/version.go
+++ b/params/version.go
@@ -23,7 +23,7 @@ import (
const (
VersionMajor = 1 // Major version component of the current release
VersionMinor = 8 // Minor version component of the current release
- VersionPatch = 0 // Patch version component of the current release
+ VersionPatch = 3 // Patch version component of the current release
VersionMeta = "unstable" // Version metadata to append to the version string
)
diff --git a/rpc/http.go b/rpc/http.go
index d61b0e470..9805d69b6 100644
--- a/rpc/http.go
+++ b/rpc/http.go
@@ -27,6 +27,7 @@ import (
"mime"
"net"
"net/http"
+ "strings"
"sync"
"time"
@@ -34,8 +35,8 @@ import (
)
const (
- contentType = "application/json"
- maxHTTPRequestContentLength = 1024 * 128
+ contentType = "application/json"
+ maxRequestContentLength = 1024 * 128
)
var nullAddr, _ = net.ResolveTCPAddr("tcp", "127.0.0.1:0")
@@ -65,8 +66,9 @@ func (hc *httpConn) Close() error {
return nil
}
-// DialHTTP creates a new RPC clients that connection to an RPC server over HTTP.
-func DialHTTP(endpoint string) (*Client, error) {
+// DialHTTPWithClient creates a new RPC client that connects to an RPC server over HTTP
+// using the provided HTTP Client.
+func DialHTTPWithClient(endpoint string, client *http.Client) (*Client, error) {
req, err := http.NewRequest(http.MethodPost, endpoint, nil)
if err != nil {
return nil, err
@@ -76,10 +78,15 @@ func DialHTTP(endpoint string) (*Client, error) {
initctx := context.Background()
return newClient(initctx, func(context.Context) (net.Conn, error) {
- return &httpConn{client: new(http.Client), req: req, closed: make(chan struct{})}, nil
+ return &httpConn{client: client, req: req, closed: make(chan struct{})}, nil
})
}
+// DialHTTP creates a new RPC client that connects to an RPC server over HTTP.
+func DialHTTP(endpoint string) (*Client, error) {
+ return DialHTTPWithClient(endpoint, new(http.Client))
+}
+
func (c *Client) sendHTTP(ctx context.Context, op *requestOp, msg interface{}) error {
hc := c.writeConn.(*httpConn)
respBody, err := hc.doRequest(ctx, msg)
@@ -142,8 +149,11 @@ func (t *httpReadWriteNopCloser) Close() error {
// NewHTTPServer creates a new HTTP RPC server around an API provider.
//
// Deprecated: Server implements http.Handler
-func NewHTTPServer(cors []string, srv *Server) *http.Server {
- return &http.Server{Handler: newCorsHandler(srv, cors)}
+func NewHTTPServer(cors []string, vhosts []string, srv *Server) *http.Server {
+ // Wrap the CORS-handler within a host-handler
+ handler := newCorsHandler(srv, cors)
+ handler = newVHostHandler(vhosts, handler)
+ return &http.Server{Handler: handler}
}
// ServeHTTP serves JSON-RPC requests over HTTP.
@@ -172,8 +182,8 @@ func validateRequest(r *http.Request) (int, error) {
if r.Method == http.MethodPut || r.Method == http.MethodDelete {
return http.StatusMethodNotAllowed, errors.New("method not allowed")
}
- if r.ContentLength > maxHTTPRequestContentLength {
- err := fmt.Errorf("content length too large (%d>%d)", r.ContentLength, maxHTTPRequestContentLength)
+ if r.ContentLength > maxRequestContentLength {
+ err := fmt.Errorf("content length too large (%d>%d)", r.ContentLength, maxRequestContentLength)
return http.StatusRequestEntityTooLarge, err
}
mt, _, err := mime.ParseMediaType(r.Header.Get("content-type"))
@@ -189,7 +199,6 @@ func newCorsHandler(srv *Server, allowedOrigins []string) http.Handler {
if len(allowedOrigins) == 0 {
return srv
}
-
c := cors.New(cors.Options{
AllowedOrigins: allowedOrigins,
AllowedMethods: []string{http.MethodPost, http.MethodGet},
@@ -198,3 +207,50 @@ func newCorsHandler(srv *Server, allowedOrigins []string) http.Handler {
})
return c.Handler(srv)
}
+
+// virtualHostHandler is a handler which validates the Host-header of incoming requests.
+// The virtualHostHandler can prevent DNS rebinding attacks, which do not utilize CORS-headers,
+// since they do in-domain requests against the RPC api. Instead, we can see on the Host-header
+// which domain was used, and validate that against a whitelist.
+type virtualHostHandler struct {
+ vhosts map[string]struct{}
+ next http.Handler
+}
+
+// ServeHTTP serves JSON-RPC requests over HTTP, implements http.Handler
+func (h *virtualHostHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
+ // if r.Host is not set, we can continue serving since a browser would set the Host header
+ if r.Host == "" {
+ h.next.ServeHTTP(w, r)
+ return
+ }
+ host, _, err := net.SplitHostPort(r.Host)
+ if err != nil {
+ // Either invalid (too many colons) or no port specified
+ host = r.Host
+ }
+ if ipAddr := net.ParseIP(host); ipAddr != nil {
+ // It's an IP address, we can serve that
+ h.next.ServeHTTP(w, r)
+ return
+
+ }
+ // Not an ip address, but a hostname. Need to validate
+ if _, exist := h.vhosts["*"]; exist {
+ h.next.ServeHTTP(w, r)
+ return
+ }
+ if _, exist := h.vhosts[host]; exist {
+ h.next.ServeHTTP(w, r)
+ return
+ }
+ http.Error(w, "invalid host specified", http.StatusForbidden)
+}
+
+func newVHostHandler(vhosts []string, next http.Handler) http.Handler {
+ vhostMap := make(map[string]struct{})
+ for _, allowedHost := range vhosts {
+ vhostMap[strings.ToLower(allowedHost)] = struct{}{}
+ }
+ return &virtualHostHandler{vhostMap, next}
+}
diff --git a/rpc/http_test.go b/rpc/http_test.go
index aed84f683..b3f694d8a 100644
--- a/rpc/http_test.go
+++ b/rpc/http_test.go
@@ -32,7 +32,7 @@ func TestHTTPErrorResponseWithPut(t *testing.T) {
}
func TestHTTPErrorResponseWithMaxContentLength(t *testing.T) {
- body := make([]rune, maxHTTPRequestContentLength+1)
+ body := make([]rune, maxRequestContentLength+1)
testHTTPErrorResponse(t,
http.MethodPost, contentType, string(body), http.StatusRequestEntityTooLarge)
}
diff --git a/rpc/json.go b/rpc/json.go
index 2e7fd599e..837011f51 100644
--- a/rpc/json.go
+++ b/rpc/json.go
@@ -76,13 +76,13 @@ type jsonNotification struct {
// jsonCodec reads and writes JSON-RPC messages to the underlying connection. It
// also has support for parsing arguments and serializing (result) objects.
type jsonCodec struct {
- closer sync.Once // close closed channel once
- closed chan interface{} // closed on Close
- decMu sync.Mutex // guards d
- d *json.Decoder // decodes incoming requests
- encMu sync.Mutex // guards e
- e *json.Encoder // encodes responses
- rw io.ReadWriteCloser // connection
+ closer sync.Once // close closed channel once
+ closed chan interface{} // closed on Close
+ decMu sync.Mutex // guards the decoder
+ decode func(v interface{}) error // decoder to allow multiple transports
+ encMu sync.Mutex // guards the encoder
+ encode func(v interface{}) error // encoder to allow multiple transports
+ rw io.ReadWriteCloser // connection
}
func (err *jsonError) Error() string {
@@ -96,11 +96,29 @@ func (err *jsonError) ErrorCode() int {
return err.Code
}
-// NewJSONCodec creates a new RPC server codec with support for JSON-RPC 2.0
+// NewCodec creates a new RPC server codec with support for JSON-RPC 2.0 based
+// on explicitly given encoding and decoding methods.
+func NewCodec(rwc io.ReadWriteCloser, encode, decode func(v interface{}) error) ServerCodec {
+ return &jsonCodec{
+ closed: make(chan interface{}),
+ encode: encode,
+ decode: decode,
+ rw: rwc,
+ }
+}
+
+// NewJSONCodec creates a new RPC server codec with support for JSON-RPC 2.0.
func NewJSONCodec(rwc io.ReadWriteCloser) ServerCodec {
- d := json.NewDecoder(rwc)
- d.UseNumber()
- return &jsonCodec{closed: make(chan interface{}), d: d, e: json.NewEncoder(rwc), rw: rwc}
+ enc := json.NewEncoder(rwc)
+ dec := json.NewDecoder(rwc)
+ dec.UseNumber()
+
+ return &jsonCodec{
+ closed: make(chan interface{}),
+ encode: enc.Encode,
+ decode: dec.Decode,
+ rw: rwc,
+ }
}
// isBatch returns true when the first non-whitespace characters is '['
@@ -123,14 +141,12 @@ func (c *jsonCodec) ReadRequestHeaders() ([]rpcRequest, bool, Error) {
defer c.decMu.Unlock()
var incomingMsg json.RawMessage
- if err := c.d.Decode(&incomingMsg); err != nil {
+ if err := c.decode(&incomingMsg); err != nil {
return nil, false, &invalidRequestError{err.Error()}
}
-
if isBatch(incomingMsg) {
return parseBatchRequest(incomingMsg)
}
-
return parseRequest(incomingMsg)
}
@@ -338,7 +354,7 @@ func (c *jsonCodec) Write(res interface{}) error {
c.encMu.Lock()
defer c.encMu.Unlock()
- return c.e.Encode(res)
+ return c.encode(res)
}
// Close the underlying connection
diff --git a/rpc/server.go b/rpc/server.go
index 30c288349..11373b504 100644
--- a/rpc/server.go
+++ b/rpc/server.go
@@ -421,7 +421,7 @@ func (s *Server) readRequest(codec ServerCodec) ([]*serverRequest, bool, Error)
}
}
} else {
- requests[i] = &serverRequest{id: r.id, err: &methodNotFoundError{r.method, r.method}}
+ requests[i] = &serverRequest{id: r.id, err: &methodNotFoundError{r.service, r.method}}
}
continue
}
diff --git a/rpc/types_test.go b/rpc/types_test.go
index 30cef9b22..68b6d3c54 100644
--- a/rpc/types_test.go
+++ b/rpc/types_test.go
@@ -1,4 +1,4 @@
-// Copyright 2017 The go-ethereum Authors
+// Copyright 2015 The go-ethereum Authors
// This file is part of the go-ethereum library.
//
// The go-ethereum library is free software: you can redistribute it and/or modify
diff --git a/rpc/websocket.go b/rpc/websocket.go
index 4214fc86a..a6e1cec28 100644
--- a/rpc/websocket.go
+++ b/rpc/websocket.go
@@ -17,8 +17,10 @@
package rpc
import (
+ "bytes"
"context"
"crypto/tls"
+ "encoding/json"
"fmt"
"net"
"net/http"
@@ -32,6 +34,23 @@ import (
"gopkg.in/fatih/set.v0"
)
+// websocketJSONCodec is a custom JSON codec with payload size enforcement and
+// special number parsing.
+var websocketJSONCodec = websocket.Codec{
+ // Marshal is the stock JSON marshaller used by the websocket library too.
+ Marshal: func(v interface{}) ([]byte, byte, error) {
+ msg, err := json.Marshal(v)
+ return msg, websocket.TextFrame, err
+ },
+ // Unmarshal is a specialized unmarshaller to properly convert numbers.
+ Unmarshal: func(msg []byte, payloadType byte, v interface{}) error {
+ dec := json.NewDecoder(bytes.NewReader(msg))
+ dec.UseNumber()
+
+ return dec.Decode(v)
+ },
+}
+
// WebsocketHandler returns a handler that serves JSON-RPC to WebSocket connections.
//
// allowedOrigins should be a comma-separated list of allowed origin URLs.
@@ -40,7 +59,16 @@ func (srv *Server) WebsocketHandler(allowedOrigins []string) http.Handler {
return websocket.Server{
Handshake: wsHandshakeValidator(allowedOrigins),
Handler: func(conn *websocket.Conn) {
- srv.ServeCodec(NewJSONCodec(conn), OptionMethodInvocation|OptionSubscriptions)
+ // Create a custom encode/decode pair to enforce payload size and number encoding
+ conn.MaxPayloadBytes = maxRequestContentLength
+
+ encoder := func(v interface{}) error {
+ return websocketJSONCodec.Send(conn, v)
+ }
+ decoder := func(v interface{}) error {
+ return websocketJSONCodec.Receive(conn, v)
+ }
+ srv.ServeCodec(NewCodec(conn, encoder, decoder), OptionMethodInvocation|OptionSubscriptions)
},
}
}
diff --git a/swarm/api/api.go b/swarm/api/api.go
index 8c4bca2ec..0cf12fdbe 100644
--- a/swarm/api/api.go
+++ b/swarm/api/api.go
@@ -20,6 +20,7 @@ import (
"fmt"
"io"
"net/http"
+ "path"
"regexp"
"strings"
"sync"
@@ -31,15 +32,110 @@ import (
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/log"
+ "github.com/ethereum/go-ethereum/metrics"
"github.com/ethereum/go-ethereum/swarm/storage"
)
var hashMatcher = regexp.MustCompile("^[0-9A-Fa-f]{64}")
+//setup metrics
+var (
+ apiResolveCount = metrics.NewRegisteredCounter("api.resolve.count", nil)
+ apiResolveFail = metrics.NewRegisteredCounter("api.resolve.fail", nil)
+ apiPutCount = metrics.NewRegisteredCounter("api.put.count", nil)
+ apiPutFail = metrics.NewRegisteredCounter("api.put.fail", nil)
+ apiGetCount = metrics.NewRegisteredCounter("api.get.count", nil)
+ apiGetNotFound = metrics.NewRegisteredCounter("api.get.notfound", nil)
+ apiGetHttp300 = metrics.NewRegisteredCounter("api.get.http.300", nil)
+ apiModifyCount = metrics.NewRegisteredCounter("api.modify.count", nil)
+ apiModifyFail = metrics.NewRegisteredCounter("api.modify.fail", nil)
+ apiAddFileCount = metrics.NewRegisteredCounter("api.addfile.count", nil)
+ apiAddFileFail = metrics.NewRegisteredCounter("api.addfile.fail", nil)
+ apiRmFileCount = metrics.NewRegisteredCounter("api.removefile.count", nil)
+ apiRmFileFail = metrics.NewRegisteredCounter("api.removefile.fail", nil)
+ apiAppendFileCount = metrics.NewRegisteredCounter("api.appendfile.count", nil)
+ apiAppendFileFail = metrics.NewRegisteredCounter("api.appendfile.fail", nil)
+)
+
type Resolver interface {
Resolve(string) (common.Hash, error)
}
+// NoResolverError is returned by MultiResolver.Resolve if no resolver
+// can be found for the address.
+type NoResolverError struct {
+ TLD string
+}
+
+func NewNoResolverError(tld string) *NoResolverError {
+ return &NoResolverError{TLD: tld}
+}
+
+func (e *NoResolverError) Error() string {
+ if e.TLD == "" {
+ return "no ENS resolver"
+ }
+ return fmt.Sprintf("no ENS endpoint configured to resolve .%s TLD names", e.TLD)
+}
+
+// MultiResolver is used to resolve URL addresses based on their TLDs.
+// Each TLD can have multiple resolvers, and the resoluton from the
+// first one in the sequence will be returned.
+type MultiResolver struct {
+ resolvers map[string][]Resolver
+}
+
+// MultiResolverOption sets options for MultiResolver and is used as
+// arguments for its constructor.
+type MultiResolverOption func(*MultiResolver)
+
+// MultiResolverOptionWithResolver adds a Resolver to a list of resolvers
+// for a specific TLD. If TLD is an empty string, the resolver will be added
+// to the list of default resolver, the ones that will be used for resolution
+// of addresses which do not have their TLD resolver specified.
+func MultiResolverOptionWithResolver(r Resolver, tld string) MultiResolverOption {
+ return func(m *MultiResolver) {
+ m.resolvers[tld] = append(m.resolvers[tld], r)
+ }
+}
+
+// NewMultiResolver creates a new instance of MultiResolver.
+func NewMultiResolver(opts ...MultiResolverOption) (m *MultiResolver) {
+ m = &MultiResolver{
+ resolvers: make(map[string][]Resolver),
+ }
+ for _, o := range opts {
+ o(m)
+ }
+ return m
+}
+
+// Resolve resolves address by choosing a Resolver by TLD.
+// If there are more default Resolvers, or for a specific TLD,
+// the Hash from the the first one which does not return error
+// will be returned.
+func (m MultiResolver) Resolve(addr string) (h common.Hash, err error) {
+ rs := m.resolvers[""]
+ tld := path.Ext(addr)
+ if tld != "" {
+ tld = tld[1:]
+ rstld, ok := m.resolvers[tld]
+ if ok {
+ rs = rstld
+ }
+ }
+ if rs == nil {
+ return h, NewNoResolverError(tld)
+ }
+ for _, r := range rs {
+ h, err = r.Resolve(addr)
+ if err == nil {
+ return
+ }
+ }
+ return
+}
+
/*
Api implements webserver/file system related content storage and retrieval
on top of the dpa
@@ -79,6 +175,7 @@ type ErrResolve error
// DNS Resolver
func (self *Api) Resolve(uri *URI) (storage.Key, error) {
+ apiResolveCount.Inc(1)
log.Trace(fmt.Sprintf("Resolving : %v", uri.Addr))
// if the URI is immutable, check if the address is a hash
@@ -93,6 +190,7 @@ func (self *Api) Resolve(uri *URI) (storage.Key, error) {
// if DNS is not configured, check if the address is a hash
if self.dns == nil {
if !isHash {
+ apiResolveFail.Inc(1)
return nil, fmt.Errorf("no DNS to resolve name: %q", uri.Addr)
}
return common.Hex2Bytes(uri.Addr), nil
@@ -103,6 +201,7 @@ func (self *Api) Resolve(uri *URI) (storage.Key, error) {
if err == nil {
return resolved[:], nil
} else if !isHash {
+ apiResolveFail.Inc(1)
return nil, err
}
return common.Hex2Bytes(uri.Addr), nil
@@ -110,16 +209,19 @@ func (self *Api) Resolve(uri *URI) (storage.Key, error) {
// Put provides singleton manifest creation on top of dpa store
func (self *Api) Put(content, contentType string) (storage.Key, error) {
+ apiPutCount.Inc(1)
r := strings.NewReader(content)
wg := &sync.WaitGroup{}
key, err := self.dpa.Store(r, int64(len(content)), wg, nil)
if err != nil {
+ apiPutFail.Inc(1)
return nil, err
}
manifest := fmt.Sprintf(`{"entries":[{"hash":"%v","contentType":"%s"}]}`, key, contentType)
r = strings.NewReader(manifest)
key, err = self.dpa.Store(r, int64(len(manifest)), wg, nil)
if err != nil {
+ apiPutFail.Inc(1)
return nil, err
}
wg.Wait()
@@ -130,8 +232,10 @@ func (self *Api) Put(content, contentType string) (storage.Key, error) {
// to resolve basePath to content using dpa retrieve
// it returns a section reader, mimeType, status and an error
func (self *Api) Get(key storage.Key, path string) (reader storage.LazySectionReader, mimeType string, status int, err error) {
+ apiGetCount.Inc(1)
trie, err := loadManifest(self.dpa, key, nil)
if err != nil {
+ apiGetNotFound.Inc(1)
status = http.StatusNotFound
log.Warn(fmt.Sprintf("loadManifestTrie error: %v", err))
return
@@ -145,6 +249,7 @@ func (self *Api) Get(key storage.Key, path string) (reader storage.LazySectionRe
key = common.Hex2Bytes(entry.Hash)
status = entry.Status
if status == http.StatusMultipleChoices {
+ apiGetHttp300.Inc(1)
return
} else {
mimeType = entry.ContentType
@@ -153,6 +258,7 @@ func (self *Api) Get(key storage.Key, path string) (reader storage.LazySectionRe
}
} else {
status = http.StatusNotFound
+ apiGetNotFound.Inc(1)
err = fmt.Errorf("manifest entry for '%s' not found", path)
log.Warn(fmt.Sprintf("%v", err))
}
@@ -160,9 +266,11 @@ func (self *Api) Get(key storage.Key, path string) (reader storage.LazySectionRe
}
func (self *Api) Modify(key storage.Key, path, contentHash, contentType string) (storage.Key, error) {
+ apiModifyCount.Inc(1)
quitC := make(chan bool)
trie, err := loadManifest(self.dpa, key, quitC)
if err != nil {
+ apiModifyFail.Inc(1)
return nil, err
}
if contentHash != "" {
@@ -177,19 +285,23 @@ func (self *Api) Modify(key storage.Key, path, contentHash, contentType string)
}
if err := trie.recalcAndStore(); err != nil {
+ apiModifyFail.Inc(1)
return nil, err
}
return trie.hash, nil
}
func (self *Api) AddFile(mhash, path, fname string, content []byte, nameresolver bool) (storage.Key, string, error) {
+ apiAddFileCount.Inc(1)
uri, err := Parse("bzz:/" + mhash)
if err != nil {
+ apiAddFileFail.Inc(1)
return nil, "", err
}
mkey, err := self.Resolve(uri)
if err != nil {
+ apiAddFileFail.Inc(1)
return nil, "", err
}
@@ -208,16 +320,19 @@ func (self *Api) AddFile(mhash, path, fname string, content []byte, nameresolver
mw, err := self.NewManifestWriter(mkey, nil)
if err != nil {
+ apiAddFileFail.Inc(1)
return nil, "", err
}
fkey, err := mw.AddEntry(bytes.NewReader(content), entry)
if err != nil {
+ apiAddFileFail.Inc(1)
return nil, "", err
}
newMkey, err := mw.Store()
if err != nil {
+ apiAddFileFail.Inc(1)
return nil, "", err
}
@@ -227,13 +342,16 @@ func (self *Api) AddFile(mhash, path, fname string, content []byte, nameresolver
}
func (self *Api) RemoveFile(mhash, path, fname string, nameresolver bool) (string, error) {
+ apiRmFileCount.Inc(1)
uri, err := Parse("bzz:/" + mhash)
if err != nil {
+ apiRmFileFail.Inc(1)
return "", err
}
mkey, err := self.Resolve(uri)
if err != nil {
+ apiRmFileFail.Inc(1)
return "", err
}
@@ -244,16 +362,19 @@ func (self *Api) RemoveFile(mhash, path, fname string, nameresolver bool) (strin
mw, err := self.NewManifestWriter(mkey, nil)
if err != nil {
+ apiRmFileFail.Inc(1)
return "", err
}
err = mw.RemoveEntry(filepath.Join(path, fname))
if err != nil {
+ apiRmFileFail.Inc(1)
return "", err
}
newMkey, err := mw.Store()
if err != nil {
+ apiRmFileFail.Inc(1)
return "", err
}
@@ -262,6 +383,7 @@ func (self *Api) RemoveFile(mhash, path, fname string, nameresolver bool) (strin
}
func (self *Api) AppendFile(mhash, path, fname string, existingSize int64, content []byte, oldKey storage.Key, offset int64, addSize int64, nameresolver bool) (storage.Key, string, error) {
+ apiAppendFileCount.Inc(1)
buffSize := offset + addSize
if buffSize < existingSize {
@@ -290,10 +412,12 @@ func (self *Api) AppendFile(mhash, path, fname string, existingSize int64, conte
uri, err := Parse("bzz:/" + mhash)
if err != nil {
+ apiAppendFileFail.Inc(1)
return nil, "", err
}
mkey, err := self.Resolve(uri)
if err != nil {
+ apiAppendFileFail.Inc(1)
return nil, "", err
}
@@ -304,11 +428,13 @@ func (self *Api) AppendFile(mhash, path, fname string, existingSize int64, conte
mw, err := self.NewManifestWriter(mkey, nil)
if err != nil {
+ apiAppendFileFail.Inc(1)
return nil, "", err
}
err = mw.RemoveEntry(filepath.Join(path, fname))
if err != nil {
+ apiAppendFileFail.Inc(1)
return nil, "", err
}
@@ -322,11 +448,13 @@ func (self *Api) AppendFile(mhash, path, fname string, existingSize int64, conte
fkey, err := mw.AddEntry(io.Reader(combinedReader), entry)
if err != nil {
+ apiAppendFileFail.Inc(1)
return nil, "", err
}
newMkey, err := mw.Store()
if err != nil {
+ apiAppendFileFail.Inc(1)
return nil, "", err
}
@@ -336,6 +464,7 @@ func (self *Api) AppendFile(mhash, path, fname string, existingSize int64, conte
}
func (self *Api) BuildDirectoryTree(mhash string, nameresolver bool) (key storage.Key, manifestEntryMap map[string]*manifestTrieEntry, err error) {
+
uri, err := Parse("bzz:/" + mhash)
if err != nil {
return nil, nil, err
diff --git a/swarm/api/api_test.go b/swarm/api/api_test.go
index e673f76c4..4ee26bd8a 100644
--- a/swarm/api/api_test.go
+++ b/swarm/api/api_test.go
@@ -237,3 +237,128 @@ func TestAPIResolve(t *testing.T) {
})
}
}
+
+func TestMultiResolver(t *testing.T) {
+ doesntResolve := newTestResolver("")
+
+ ethAddr := "swarm.eth"
+ ethHash := "0x2222222222222222222222222222222222222222222222222222222222222222"
+ ethResolve := newTestResolver(ethHash)
+
+ testAddr := "swarm.test"
+ testHash := "0x1111111111111111111111111111111111111111111111111111111111111111"
+ testResolve := newTestResolver(testHash)
+
+ tests := []struct {
+ desc string
+ r Resolver
+ addr string
+ result string
+ err error
+ }{
+ {
+ desc: "No resolvers, returns error",
+ r: NewMultiResolver(),
+ err: NewNoResolverError(""),
+ },
+ {
+ desc: "One default resolver, returns resolved address",
+ r: NewMultiResolver(MultiResolverOptionWithResolver(ethResolve, "")),
+ addr: ethAddr,
+ result: ethHash,
+ },
+ {
+ desc: "Two default resolvers, returns resolved address",
+ r: NewMultiResolver(
+ MultiResolverOptionWithResolver(ethResolve, ""),
+ MultiResolverOptionWithResolver(ethResolve, ""),
+ ),
+ addr: ethAddr,
+ result: ethHash,
+ },
+ {
+ desc: "Two default resolvers, first doesn't resolve, returns resolved address",
+ r: NewMultiResolver(
+ MultiResolverOptionWithResolver(doesntResolve, ""),
+ MultiResolverOptionWithResolver(ethResolve, ""),
+ ),
+ addr: ethAddr,
+ result: ethHash,
+ },
+ {
+ desc: "Default resolver doesn't resolve, tld resolver resolve, returns resolved address",
+ r: NewMultiResolver(
+ MultiResolverOptionWithResolver(doesntResolve, ""),
+ MultiResolverOptionWithResolver(ethResolve, "eth"),
+ ),
+ addr: ethAddr,
+ result: ethHash,
+ },
+ {
+ desc: "Three TLD resolvers, third resolves, returns resolved address",
+ r: NewMultiResolver(
+ MultiResolverOptionWithResolver(doesntResolve, "eth"),
+ MultiResolverOptionWithResolver(doesntResolve, "eth"),
+ MultiResolverOptionWithResolver(ethResolve, "eth"),
+ ),
+ addr: ethAddr,
+ result: ethHash,
+ },
+ {
+ desc: "One TLD resolver doesn't resolve, returns error",
+ r: NewMultiResolver(
+ MultiResolverOptionWithResolver(doesntResolve, ""),
+ MultiResolverOptionWithResolver(ethResolve, "eth"),
+ ),
+ addr: ethAddr,
+ result: ethHash,
+ },
+ {
+ desc: "One defautl and one TLD resolver, all doesn't resolve, returns error",
+ r: NewMultiResolver(
+ MultiResolverOptionWithResolver(doesntResolve, ""),
+ MultiResolverOptionWithResolver(doesntResolve, "eth"),
+ ),
+ addr: ethAddr,
+ result: ethHash,
+ err: errors.New(`DNS name not found: "swarm.eth"`),
+ },
+ {
+ desc: "Two TLD resolvers, both resolve, returns resolved address",
+ r: NewMultiResolver(
+ MultiResolverOptionWithResolver(ethResolve, "eth"),
+ MultiResolverOptionWithResolver(testResolve, "test"),
+ ),
+ addr: testAddr,
+ result: testHash,
+ },
+ {
+ desc: "One TLD resolver, no default resolver, returns error for different TLD",
+ r: NewMultiResolver(
+ MultiResolverOptionWithResolver(ethResolve, "eth"),
+ ),
+ addr: testAddr,
+ err: NewNoResolverError("test"),
+ },
+ }
+ for _, x := range tests {
+ t.Run(x.desc, func(t *testing.T) {
+ res, err := x.r.Resolve(x.addr)
+ if err == nil {
+ if x.err != nil {
+ t.Fatalf("expected error %q, got result %q", x.err, res.Hex())
+ }
+ if res.Hex() != x.result {
+ t.Fatalf("expected result %q, got %q", x.result, res.Hex())
+ }
+ } else {
+ if x.err == nil {
+ t.Fatalf("expected no error, got %q", err)
+ }
+ if err.Error() != x.err.Error() {
+ t.Fatalf("expected error %q, got %q", x.err, err)
+ }
+ }
+ })
+ }
+}
diff --git a/swarm/api/config.go b/swarm/api/config.go
index 140c938ae..6b224140a 100644
--- a/swarm/api/config.go
+++ b/swarm/api/config.go
@@ -48,7 +48,7 @@ type Config struct {
*network.SyncParams
Contract common.Address
EnsRoot common.Address
- EnsApi string
+ EnsAPIs []string
Path string
ListenAddr string
Port string
@@ -75,7 +75,7 @@ func NewDefaultConfig() (self *Config) {
ListenAddr: DefaultHTTPListenAddr,
Port: DefaultHTTPPort,
Path: node.DefaultDataDir(),
- EnsApi: node.DefaultIPCEndpoint("geth"),
+ EnsAPIs: nil,
EnsRoot: ens.TestNetAddress,
NetworkId: network.NetworkId,
SwapEnabled: false,
diff --git a/swarm/api/http/error.go b/swarm/api/http/error.go
index dbd97182f..9a65412cf 100644
--- a/swarm/api/http/error.go
+++ b/swarm/api/http/error.go
@@ -29,11 +29,19 @@ import (
"time"
"github.com/ethereum/go-ethereum/log"
+ "github.com/ethereum/go-ethereum/metrics"
"github.com/ethereum/go-ethereum/swarm/api"
)
//templateMap holds a mapping of an HTTP error code to a template
var templateMap map[int]*template.Template
+var caseErrors []CaseError
+
+//metrics variables
+var (
+ htmlCounter = metrics.NewRegisteredCounter("api.http.errorpage.html.count", nil)
+ jsonCounter = metrics.NewRegisteredCounter("api.http.errorpage.json.count", nil)
+)
//parameters needed for formatting the correct HTML page
type ErrorParams struct {
@@ -44,6 +52,13 @@ type ErrorParams struct {
Details template.HTML
}
+//a custom error case struct that would be used to store validators and
+//additional error info to display with client responses.
+type CaseError struct {
+ Validator func(*Request) bool
+ Msg func(*Request) string
+}
+
//we init the error handling right on boot time, so lookup and http response is fast
func init() {
initErrHandling()
@@ -67,6 +82,29 @@ func initErrHandling() {
//assign formatted HTML to the code
templateMap[code] = template.Must(template.New(fmt.Sprintf("%d", code)).Parse(tname))
}
+
+ caseErrors = []CaseError{
+ {
+ Validator: func(r *Request) bool { return r.uri != nil && r.uri.Addr != "" && strings.HasPrefix(r.uri.Addr, "0x") },
+ Msg: func(r *Request) string {
+ uriCopy := r.uri
+ uriCopy.Addr = strings.TrimPrefix(uriCopy.Addr, "0x")
+ return fmt.Sprintf(`The requested hash seems to be prefixed with '0x'. You will be redirected to the correct URL within 5 seconds.<br/>
+ Please click <a href='%[1]s'>here</a> if your browser does not redirect you.<script>setTimeout("location.href='%[1]s';",5000);</script>`, "/"+uriCopy.String())
+ },
+ }}
+}
+
+//ValidateCaseErrors is a method that process the request object through certain validators
+//that assert if certain conditions are met for further information to log as an error
+func ValidateCaseErrors(r *Request) string {
+ for _, err := range caseErrors {
+ if err.Validator(r) {
+ return err.Msg(r)
+ }
+ }
+
+ return ""
}
//ShowMultipeChoices is used when a user requests a resource in a manifest which results
@@ -75,10 +113,10 @@ func initErrHandling() {
//For example, if the user requests bzz:/<hash>/read and that manifest contains entries
//"readme.md" and "readinglist.txt", a HTML page is returned with this two links.
//This only applies if the manifest has no default entry
-func ShowMultipleChoices(w http.ResponseWriter, r *http.Request, list api.ManifestList) {
+func ShowMultipleChoices(w http.ResponseWriter, r *Request, list api.ManifestList) {
msg := ""
if list.Entries == nil {
- ShowError(w, r, "Internal Server Error", http.StatusInternalServerError)
+ ShowError(w, r, "Could not resolve", http.StatusInternalServerError)
return
}
//make links relative
@@ -95,7 +133,7 @@ func ShowMultipleChoices(w http.ResponseWriter, r *http.Request, list api.Manife
//create clickable link for each entry
msg += "<a href='" + base + e.Path + "'>" + e.Path + "</a><br/>"
}
- respond(w, r, &ErrorParams{
+ respond(w, &r.Request, &ErrorParams{
Code: http.StatusMultipleChoices,
Details: template.HTML(msg),
Timestamp: time.Now().Format(time.RFC1123),
@@ -108,13 +146,15 @@ func ShowMultipleChoices(w http.ResponseWriter, r *http.Request, list api.Manife
//The function just takes a string message which will be displayed in the error page.
//The code is used to evaluate which template will be displayed
//(and return the correct HTTP status code)
-func ShowError(w http.ResponseWriter, r *http.Request, msg string, code int) {
+func ShowError(w http.ResponseWriter, r *Request, msg string, code int) {
+ additionalMessage := ValidateCaseErrors(r)
if code == http.StatusInternalServerError {
log.Error(msg)
}
- respond(w, r, &ErrorParams{
+ respond(w, &r.Request, &ErrorParams{
Code: code,
Msg: msg,
+ Details: template.HTML(additionalMessage),
Timestamp: time.Now().Format(time.RFC1123),
template: getTemplate(code),
})
@@ -132,6 +172,7 @@ func respond(w http.ResponseWriter, r *http.Request, params *ErrorParams) {
//return a HTML page
func respondHtml(w http.ResponseWriter, params *ErrorParams) {
+ htmlCounter.Inc(1)
err := params.template.Execute(w, params)
if err != nil {
log.Error(err.Error())
@@ -140,6 +181,7 @@ func respondHtml(w http.ResponseWriter, params *ErrorParams) {
//return JSON
func respondJson(w http.ResponseWriter, params *ErrorParams) {
+ jsonCounter.Inc(1)
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(params)
}
diff --git a/swarm/api/http/error_templates.go b/swarm/api/http/error_templates.go
index 2c20ba8f9..cc9b996ba 100644
--- a/swarm/api/http/error_templates.go
+++ b/swarm/api/http/error_templates.go
@@ -37,7 +37,7 @@ func GetGenericErrorPage() string {
<meta http-equiv="X-UA-Compatible" ww="chrome=1">
<meta name="description" content="Ethereum/Swarm error page">
<meta property="og:url" content="https://swarm-gateways.net/bzz:/theswarm.eth">
-
+ <link rel="shortcut icon" type="image/x-icon" href="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAHsAAAB5CAYAAAAZD150AAAAAXNSR0IArs4c6QAAAAlwSFlzAAALEwAACxMBAJqcGAAAAVlpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IlhNUCBDb3JlIDUuNC4wIj4KICAgPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4KICAgICAgPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIKICAgICAgICAgICAgeG1sbnM6dGlmZj0iaHR0cDovL25zLmFkb2JlLmNvbS90aWZmLzEuMC8iPgogICAgICAgICA8dGlmZjpPcmllbnRhdGlvbj4xPC90aWZmOk9yaWVudGF0aW9uPgogICAgICA8L3JkZjpEZXNjcmlwdGlvbj4KICAgPC9yZGY6UkRGPgo8L3g6eG1wbWV0YT4KTMInWQAAFzFJREFUeAHtnXuwJFV9x7vnzrKwIC95w/ImYFkxlZgiGswCIiCGxPhHCEIwmIemUqYwJkYrJmXlUZRa+IpAWYYEk4qJD4gJGAmoiBIxMQFRQCkXdhdCeL+fYdk7nc+ne87dvnPn0TPTPdNzmd/W2X6fc76/7/n9zjm/PrcniuYy18BcA3MNzDUw18BcA3MNzDUw18BcA3MNrGYNLERroyMAeNQ0QcbTLPxFUfaOOx4Wbd12ThRHp0TJ4s5RFF8RtVoXg/2BSeOfk12lxtc0fjva1jibItZEDf4li3u3i3sqipOPRIvRp6ssvjPvhc4Tq/y4Ab71pEXS1oqw7hw1mz8XJc2LoiQ6iTJaaTkxth0l69jXwNayOZ10AvubSQ+RrFOl8mKy7EOiRuN3UfjrUfItUaN1SbQt+lqp2m02T4payS9HSfxK8k1IGdEWkln2Xuzldd7keBunrsO1f4r9b5Mqk3zBlRUy9Ywb0TtQ6O9Tjx1JkvA8SSXfiJK5Fj1MGkf2jBYWzo9a8Y+TiQRaxnLpTnb+HuvzDerzLk4+mb9Q1v5qduPrUPvxURx/DiWeicK2W1lKdCT2Q7n+VqzuOei5m+Nnh1TsPtGahbOiaOFCrHn/vs8ud+PdbrWLOZr6vIX6PEF97uH4uW43jnpudVp2Mzo1ajV+AwPbgGJUYr4/1Or+jxSwu90R5f6QQdPnaBK603zD4LCLNJsnkusfcUXXrKfoL4MtOzxvfXegPrdGjeRjlHEVx4PrE57us11tlr0b/fKlKIq+OTqsjXulS80sO5Dtbdugfnf4Py5qxGenxEeppbez6LJpte6LFuIWlvhT3J/Pq8vNnBps2eE562t97A608G+xnZMdtMN2H+z3LSj+n9g3cNGN4Nzt9tddCZK0ncnnrChuHBElySaOH8k/mNtf5Pr3onWtL0WthfWUuA/XGGX3KHsw2ZYtqc9Rs4dIW2m4mxjwXd8+z2Y8mXXLXoDk38Eaz4c7+2VJzLvsXtrpRXa43+svh/TTIf1QSP0Rx0+Ei8u2L0RPM6i6hn77e9SFhsJzGWnLG9wgshvpeOEJnn+6nX+D8u+ck51p43hIvhyS38DhHiQJKiqDyDYfG42WeiAk3oO93uLJ3tK6P0pa1zK/vj5aSI6FtF15Zjvhvcle5F69xzMkywxdAk+US7aDgVmSbMTaaFwM0VdS8QNIQTll4TA/SdKd3kvS0oqWgRveeku0uO2NjM4v5TmDJda52/MSqyXfy3YrqXJxEDArcjB92Dvh4RdIjoC1hLJFUp6BAPMOBHQjalC5SdR6wfj3FVFjDV1Bcg698S7ka172y0+3y5DwiRncxAoC1OjSiN6LJTNQSd5MJruQnDqVLbrTB0mPkXEgetwy7oH0T2LppzOrv4bMnif/B0hPsS/pozSkketUZ8s2KPIq3OH5ONWjQVgFwSpO69KajVpVpfzHom3b3hctNHajwR5OOUbyLGt7n85B1VJPspvRydFi/Hba/gYUoFLKJlqPpmXx9ikdBRcZsHH7WLIjZT1F472TXHaFZ6dqO5Gsx0SkbmTvS7/8caYbG1CMJGt1Vciz5O5UKuRflUV31t1yJPdx2jDlp2MPw6wTsfA6ke1U47Pg1mVXqfwXyP1Bypg2dgiOXcDwKJgPZPsSknGPyoiv0wAtZlz6CERsAXB4v1tV/apsTFS/sIgP7xLfTdrMPhbfc6pWONNeN1alzF7lDTqfuTkHS3F0FzcbbHixCG/cUtLvAHBZs4Fluqsb2fnK+XpBS6fVp4Mo+7q6WGS+nmXvE8SJf0imDxKGlfTS3Pq0+61BipJc+9j7gOzI1bCoS3tKUwB51U3st9cwar+Kgeq/sR8GkWPXs+5kbwcYpy/yXWQg2U5brPtqI905yEPRttbfgm0jqVSZHbID7CyUeRc078kpo2lrSFWSbt6lWRd5dYreizdcLI1K4m9F2xa/zHEleGaP7Kzftj933ZgDuZcwc9W9q7QylWRevLeKd+d/lgMnt3I84M0XdwwnNlQCRvHVkOwiBaZh1ckskh20kfXnCQpKI1Opa9fFlxWRWsvSpoPIex1pbxIWxyvVVuv9lGFAZlxp0ohc8PgZMqpkgWFnBWeZ7DwWB3H/g/IMTGjlvoceRbDmBuu/kj2waBf0x9o2/ysvYN1n8ELmtbzFugDHrrsddlWqgy/z3Bw1WeGyNbrNjCclFl4XIagSn0Fl7IdHEQc3W0nh9aQvG2zMna7dt1sheBHK8R5IiPdlcwBbYtc5iZdWwNh3MyuITyFtIN7F0qTCrp0GGB8TLbQuixaTf+HJ+3MlTGQ3a7MTKWpgIQ0W8H2Bu1B4KaIFaeW7k/I4bRBbOBcauv0yHiE+iPvDOS7nJE7oV00rRFd8O57gPK78YMXV5Sesg3GNKgd7y0vsOKpzUKWjqkMfZoM43Xu20CGz3nw2TuOSxqGQfVhPovP3r9x3FejReKSv8gLnQ1w+ZuUtS2csf2pEW4vuLXmpfhPdGdeNd6us1sRy33RlyHPs69bXsIjwWQjGktN+WXff6eo5lZPMjfda4xZI/Ele5JxK3jSc5Cae7uYJcplOfjfv3iZf+vISy3bjy3PffkQ/2zgaeosP4nq78e25bt9Tp48S/TqZba1i+6tlNL5d1Sv3Mut23VcUP8hlRu4t57YOxPbhWNL7W/bKPHud0fpd9eJfiNSui1ztZKtw5rCxUyTduKTuQMK1J3eRHoBwR+Au8ldGIT00JgM8z5KH/XIt9VrLSqn1EgSlx/eQj4v7guS7LfdZAJjcDcf3YYjHQLVTrmEJd9WLixTDc/kyQrm12K42slU0wQ8jXLELIIq+FtW138zgah8oO5DkXN+8AoHsLhPPO4WzITkQqy3B+VqvFrKDsiE4NmDie2DPhfN5zL32mZm0HuaJx0l7ECo9hBs7FwSanyQb3rRfLtqYuHX6slrIJmoW/y/qDNOjYUjOs+Bz9rkP82UG30IdSjogd4MNgYHekoxazlIGk9yZdbIZdMX2l1pz+YqPkzvo0/kbrnivNskzZcmdDWkWyXaEjZtOp1H2mVpi+URnmlI//M1XYhRuDclpmttefTmX6iuzSDZz5XTwFbRaFdGd+dtFmCTbvrzqcimiXJkFslWq1mu/bFDEgdE0Fe1oX9K1cufs06wLxReXupPdLShSB+Xqxp1yOeqX8MHxdW6attSZbIMi9pX2y4HgsJ223kL5DtgC6evYr9OLpVDHpW3dyNaS80ERLahuBC8pL7cj6TZKrVz3rl5rV+86kU28Or6Tce56FOUaL5VVO4VRp15iXXXriqtlbLS1knq5nVZyDV8KJDqVuAhAK69CxGw4tIrpkx+9uZFlR39A/s7/ayV1tZyXQvovwcdPoC0XEYbIWBnKY+qU8KYrDXWWkZ+NR690G/H1L2DP3ykj0yryqCvZAavfUTkNZ34cJ3SRZVhjmWQzKIs3QvKlNMfvUj8Ha7WVupMdFHdU1Gy8DapdQDgu4WWRjbdJ/oEIwGXUyThA7aUOfbarPwdZhMt8voJr541T+rUCnxmV9HH6bAe0z7DG7N9x3H9BDXTZReqxJ/e5eGKqMk2y/bq+X0F6P256X1T2X2iiv4W0WndA+k2QzouP5Cjux0oLKTuv5FHIdrC4EyRfB8kfpcR/5dgR9yDxC4zvaWPcv42xzPHHoPKXXZ+OG1+ITuNN0gXUZD+SS3lcAcr3OpN30/ddx3ERa/GjtOfy3CvS5/mvoIzgxvkcxkLrI4wa/HuvYpJ+GTn+MDeLUc/lHyeI8b1g/BrHRTByW3kyabJfA0HvAOepQHDAZTDClm7SehzwXM3fP13E/rdJReRl5MmH5ZbWbJtnPylCtnrBZcd3UcVroOlyjgflG8rky8bpLxbkMeqxnHdnHiJinXmrdSHHN4SHJrGdFNmHo4A/h+RXA8p1XvmAQyA74NXNYu3xDSiEZ9Lf0AjXem13IHZ1DKtLzsBeDuYmX5b0kkFk2zXwfPIZSP4q+w/3yqjjvBj/lOd+lvNiFFeQQHY4Dhj/A4x/xslN4UKV26rJXktbfifEkbA9NNEFTCfZ+Vt4JrkYm9LSi/SRMf35aTxzOvdLWjfpRzZ/MpT8J+V9nAcZFxSSHcB4HvB+j7u13G4YO8nOZyzGT1LmJzhZBGP+2aH2bWFVyDpeCZxJoOFvUMIbKSBvyZ3l6R5NPST268M/jxr96sIWbgohye73J8lGCLsBK/Odc7d14WLehRRI8djI1818uf9CrPmzHA+aHXALXc5C9Cs8dwkY38RxP4yW1Q/jBjCeXhijpY8g5Vv2QgS58btR5eHUR0X2AZnWuJ9lB0iZxSTRj/i884dxkFeHCwO2B0H6L+JT7D7CGCFv2by4iLew3uxTUHUL9xQhWVR4jvgPwXgkzxTB2M+yA4SAcSMYLxgCY3h+4LYssq3oelr5BSj2BPbz/dWgShQhO58HFht/k77uPZzcQgoWmr+nc//HCMqcy517cWEtj2jxkJ98Hkv+R/YHNUjzU1fraTwf4rmT2B8GYxGyLSMIf3AYXY+HEuNmUpH6hWd7bssg24HJuSjgNynFl/j9BkfdKjIs2eZhf4wVJn+HGiRroycHyAI/rgZJrdfQUB7jub/nfteWF5FD2xh/i5vtHobFOCzZ1mkUjD7XU8YjuxH9CQ3+zeSuxdj6ilhZZ2VGIds8rPsC/7P6M+HzF9EHOS5S/s7cZzSrmLU0/IUffiQm4lMbo2MchWyKW4bxi9T4A+06eG1oGYXsHZiBnsgPlv0lpRkGHMaddavgqGTn86LvxUobvFrcFn2dfZU7jqwB4/FgdIRsQx4X46hk5zEEjAaeruXC0BiHI7tJf9xqvB0Dej2FhQFPvkKj7JdBtuU6bqA/jr/CgOuvUch1nhxaMoxvA+NpPFsWxjLIFop8OWbhj//Tn5e0YReWomTzoyiNC1CAo1qiXMO3qj41KovsUISjY+ar8U30zX/M/uZwYcB2PzAS3kyDIrr6oS2nT/5lkR2KCBhvBuP7OLkpXOi39aF+sgv28i5G2Q6C1pO8v0i/2C/Pzmv2ncX6z84nux9bP0KdRNLi+NexhRY1vpXjXvNgMZ7HvZ9Pn8meLRuj+VWB0RnQW8Ho92O+3wcjlzK3kO50+Y8fMIk/TTYncc3RZ9kKCEWWbdkhX7d6Lqcx32EQ5yDrCU/mZB3WfAnQ3sA559hVYSzbsnMQUoy8keObaklyJheezF/M79vP9ZImLd7R7kZueJrU795eeUzzvET7c02bqLnTGLufTmlynegZwZpsdeisYtycctUd4xJm3V0vCa7HD8r5VWBbjNOPfs/0ymvS531l+hiFOsWSdLH0slrdqxiZwqXLgWcHo/H77GsPASNQektR4uwVGPTwNipJvyvmWx2nAr0U2LvE6q4I2H7ZLyG4htu6ea6orHqMRclWYZnisq8D8is26apP56BKHUgPrTwMhIYhOkOxyjEOQ3ZQiFtXXegmn4TmfdkaQhxFuTw2tvglhEfJpcypkpUSo685/Vnj/djOPMZRyVYZkmvfeC/KcF66G8ntJKzcsh09+xkqlzVV2dD8UmLAuDtlBdKrxlk6xnHIBncq9nUq3PfNKsI3SuZbhTJUgJ+Q1OLCdLBKoikmlU6MDuIc4c8UxjLIztQh8Iz0zewZM9cKypzK2BdryT3nkaEiFW4Dxi2ziLFMsoOOtQL70KdRiIS7KmTUyJtWK8mOsJ3rG6suswGR3UiSx2j35Z8olYXRGUUl3qoKstWelXXu+hDbxyH9pWzzS4E4HCiSGn5G0SibUgeis5psx/gwJxzE6c2ckobZQLiv39YGorcyshcwVkK0laiKbPMOEoIyRrBCfx6u9do6ElaJDsIqA9+r8BHOZ0GZjHRnJ0X0OnGMRSo1AvYVj4QBzl1YgG7PxOvIZVaQeYOspRvA0UJmgWiqmYp1NTxbW4yTIlttZMRlLsv+XLduUCYQ6odeJTnMl8N5Ts2MDML4BBiduUwF4yTJzjOmC7OfykjPwpsqYBYJzuPK79cO4zTIDoQyuuZ7ZrEx9sRRu7H2adQnT1BZ+3mMfg+VgVjiAG6qGCetXJXA9Cm+n63ujBFo4opUpxuORh2dGpgZdRrDo1OXLhjTvysXo8lgjJgnjnGSZBttYoTd8+uEXpdw31ipDK0gWAi7MyEOKokx9MQoiED6xDFWTbZk2RfTP6df8y8aFPE9tEqRcFOdRYySTAg3foStYdwi8QAxqg9nJRPBWCXZAlYBD7KVOKWIErwvNJKgEK2gqli05Y0q4rEhP8B2FIw2koAxkF5FvD3FVxXZAI/vowRDnOO6Yj2D+Ui2/XnRBsOtlUrZGJ12aumVYSyTbEnVhT0Gv8bGbaHjEk0WqZiP/bmkB9c+8QEOZecxgrPUwE/A6JglWHmpGMsgOxCKu05J1jUp4Xx2VM7/NiAblBYg6br3SYhYLFuMklx1UKQSjOOSrQKcL+uy7bOqIJhsV4jlGje3zNCfr7ippBPTxhhcu93YWDIK2YFQ3E1qyaFfDufHqtAQD1ueXsT5ujh0fWUO4sy/DhhtbPbnYhsL47BkqwBaWmrJKtmKTJpkilwh9ue6Vvu4cQc44nHwdS/bumC0TmNjHIZslZkPGNSBZKq0JDa8cYMyAaOvV+vSkJcAtus0MsZBZDvN0VWGEbb9R91IpkorJMxdHcTp+vqJeCTWhuwsImCsO85hMKb4B5Ad21c4FXB0qNRdAVkts3raSB3EBfLCtc4trjrexMlZxzgw/mAf10ue5w/FbuWP+w6EYt89D8ysV0YDztvHjj3S7FKGDRPC+XBdI/kAe3d2uUeMt08AY4iDd6nCWKfaXim+u43xjn65FbHUBmPdV/FH+K+D9CPITEspS3Cf6as/CS9LxMQSqJiG2voivfjXOdbK+0nAeBIYj+TGkjG68DJxTX1ZEjD6jfOA0fFGXylCdsiAT080j4uS1tmc0CMMzDw82GdbNtl6H9aD8aG87MsLuvFhxE+IvJofU/9VHioRY6lki5HGyIfyFtOGXBjjMGQHpe3EVwTPobBXcEKLHGQ14blu27LIVgH8kUJyI7W5kH1XwYwjYoTwFCNeYlyMpZAdMPJFiRQjL5mGk1HIDiUchUJORCHHcsJ8RiF9XLJVwAIkX4tFXkl7vy1UrqTtkW2MP0N+Y2Aci2wxNtsYrxgH4zhkq09d3SHtr/zvx77hy2FkHLKZVjH4Wmx9kAI3kcroVrrVXYwHtzHuz/4IGEcmm2ljvKWNcTNlO8ceWcYle3vBzeapGPfJzFgZjKQCkQNlWLLb9XWRQOufoffygSWUeUOzeQrlngLG3cjWuhTEOBTZASNz/hTjZWVBKI/srEZ74PY2MEA6ETW4wC7MXXvVdxiytWQV8CVIvpoMeQM1Fdm9jfG1xTEWJpspqG/VUozXgO6BMhGWTbZ1M8/dUMibaPgb2O/neoqQbX7+ZMNVjAr+in0HJkUsitsqE+u0axvj8ewPwDiQbPPjQz/Jl8F4Cfu+Ri0do4VUKUfT151Fte3P7fs6AXDcc57twIT+Me2XL2L/dlIdxV8mOruN0YhkF4w9yZ4oxqrJlhy/VXRstNg4GZs/hmNjukEh3ci2TmuJat2KO7sCm/kmx8MOinhkohIwGngSo3PfHMYVZOcxXtnGWGYgpyv4SZAdCjZg8dPtgIUvJ5yqdZJtS+drDsknUMA32LdhzJKI8ZVgJA6RvoBpY1xGthiZORD4yYIiE8M4SbIDaX5o7lws4GWcIGacunEDF3yJKflvqP4o+1rGLMtOYPw1ML4cEASeEkfvvl8Qo4Gfj6X7/DdJmQbZAZ9BmRNQxOsIwX4XBVzBhR+Ei6tkG4IyjNxb38fqxwqKzLpOHLTtRRrwqnWmYYpx71WOcaYJmld+roG5BuYamGtgroG5BuYamJoG/h/ff6XOIB4wOAAAAABJRU5ErkJggg=="/>
<style>
body, div, header, footer {
@@ -89,8 +89,8 @@ func GetGenericErrorPage() string {
display: block;
margin: 0 auto;
/* width: 50%; */
- min-height: 60vh;
- max-height: 60vh;
+ min-height: 60vh;
+ max-height: 60vh;
padding: 50px 20px;
opacity: 0.6;
background-color: #A9F5BF;
@@ -168,6 +168,11 @@ func GetGenericErrorPage() string {
{{.Msg}}
</td>
</tr>
+ <tr>
+ <td class="value">
+ {{.Details}}
+ </td>
+ </tr>
<tr>
<td class="key">
@@ -212,7 +217,7 @@ func GetNotFoundErrorPage() string {
<meta http-equiv="X-UA-Compatible" ww="chrome=1">
<meta name="description" content="Ethereum/Swarm error page">
<meta property="og:url" content="https://swarm-gateways.net/bzz:/theswarm.eth">
-
+ <link rel="shortcut icon" type="image/x-icon" href="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAHsAAAB5CAYAAAAZD150AAAAAXNSR0IArs4c6QAAAAlwSFlzAAALEwAACxMBAJqcGAAAAVlpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IlhNUCBDb3JlIDUuNC4wIj4KICAgPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4KICAgICAgPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIKICAgICAgICAgICAgeG1sbnM6dGlmZj0iaHR0cDovL25zLmFkb2JlLmNvbS90aWZmLzEuMC8iPgogICAgICAgICA8dGlmZjpPcmllbnRhdGlvbj4xPC90aWZmOk9yaWVudGF0aW9uPgogICAgICA8L3JkZjpEZXNjcmlwdGlvbj4KICAgPC9yZGY6UkRGPgo8L3g6eG1wbWV0YT4KTMInWQAAFzFJREFUeAHtnXuwJFV9x7vnzrKwIC95w/ImYFkxlZgiGswCIiCGxPhHCEIwmIemUqYwJkYrJmXlUZRa+IpAWYYEk4qJD4gJGAmoiBIxMQFRQCkXdhdCeL+fYdk7nc+ne87dvnPn0TPTPdNzmd/W2X6fc76/7/n9zjm/PrcniuYy18BcA3MNzDUw18BcA3MNzDUw18BcA3MNrGYNLERroyMAeNQ0QcbTLPxFUfaOOx4Wbd12ThRHp0TJ4s5RFF8RtVoXg/2BSeOfk12lxtc0fjva1jibItZEDf4li3u3i3sqipOPRIvRp6ssvjPvhc4Tq/y4Ab71pEXS1oqw7hw1mz8XJc2LoiQ6iTJaaTkxth0l69jXwNayOZ10AvubSQ+RrFOl8mKy7EOiRuN3UfjrUfItUaN1SbQt+lqp2m02T4payS9HSfxK8k1IGdEWkln2Xuzldd7keBunrsO1f4r9b5Mqk3zBlRUy9Ywb0TtQ6O9Tjx1JkvA8SSXfiJK5Fj1MGkf2jBYWzo9a8Y+TiQRaxnLpTnb+HuvzDerzLk4+mb9Q1v5qduPrUPvxURx/DiWeicK2W1lKdCT2Q7n+VqzuOei5m+Nnh1TsPtGahbOiaOFCrHn/vs8ud+PdbrWLOZr6vIX6PEF97uH4uW43jnpudVp2Mzo1ajV+AwPbgGJUYr4/1Or+jxSwu90R5f6QQdPnaBK603zD4LCLNJsnkusfcUXXrKfoL4MtOzxvfXegPrdGjeRjlHEVx4PrE57us11tlr0b/fKlKIq+OTqsjXulS80sO5Dtbdugfnf4Py5qxGenxEeppbez6LJpte6LFuIWlvhT3J/Pq8vNnBps2eE562t97A608G+xnZMdtMN2H+z3LSj+n9g3cNGN4Nzt9tddCZK0ncnnrChuHBElySaOH8k/mNtf5Pr3onWtL0WthfWUuA/XGGX3KHsw2ZYtqc9Rs4dIW2m4mxjwXd8+z2Y8mXXLXoDk38Eaz4c7+2VJzLvsXtrpRXa43+svh/TTIf1QSP0Rx0+Ei8u2L0RPM6i6hn77e9SFhsJzGWnLG9wgshvpeOEJnn+6nX+D8u+ck51p43hIvhyS38DhHiQJKiqDyDYfG42WeiAk3oO93uLJ3tK6P0pa1zK/vj5aSI6FtF15Zjvhvcle5F69xzMkywxdAk+US7aDgVmSbMTaaFwM0VdS8QNIQTll4TA/SdKd3kvS0oqWgRveeku0uO2NjM4v5TmDJda52/MSqyXfy3YrqXJxEDArcjB92Dvh4RdIjoC1hLJFUp6BAPMOBHQjalC5SdR6wfj3FVFjDV1Bcg698S7ka172y0+3y5DwiRncxAoC1OjSiN6LJTNQSd5MJruQnDqVLbrTB0mPkXEgetwy7oH0T2LppzOrv4bMnif/B0hPsS/pozSkketUZ8s2KPIq3OH5ONWjQVgFwSpO69KajVpVpfzHom3b3hctNHajwR5OOUbyLGt7n85B1VJPspvRydFi/Hba/gYUoFLKJlqPpmXx9ikdBRcZsHH7WLIjZT1F472TXHaFZ6dqO5Gsx0SkbmTvS7/8caYbG1CMJGt1Vciz5O5UKuRflUV31t1yJPdx2jDlp2MPw6wTsfA6ke1U47Pg1mVXqfwXyP1Bypg2dgiOXcDwKJgPZPsSknGPyoiv0wAtZlz6CERsAXB4v1tV/apsTFS/sIgP7xLfTdrMPhbfc6pWONNeN1alzF7lDTqfuTkHS3F0FzcbbHixCG/cUtLvAHBZs4Fluqsb2fnK+XpBS6fVp4Mo+7q6WGS+nmXvE8SJf0imDxKGlfTS3Pq0+61BipJc+9j7gOzI1bCoS3tKUwB51U3st9cwar+Kgeq/sR8GkWPXs+5kbwcYpy/yXWQg2U5brPtqI905yEPRttbfgm0jqVSZHbID7CyUeRc078kpo2lrSFWSbt6lWRd5dYreizdcLI1K4m9F2xa/zHEleGaP7Kzftj933ZgDuZcwc9W9q7QylWRevLeKd+d/lgMnt3I84M0XdwwnNlQCRvHVkOwiBaZh1ckskh20kfXnCQpKI1Opa9fFlxWRWsvSpoPIex1pbxIWxyvVVuv9lGFAZlxp0ohc8PgZMqpkgWFnBWeZ7DwWB3H/g/IMTGjlvoceRbDmBuu/kj2waBf0x9o2/ysvYN1n8ELmtbzFugDHrrsddlWqgy/z3Bw1WeGyNbrNjCclFl4XIagSn0Fl7IdHEQc3W0nh9aQvG2zMna7dt1sheBHK8R5IiPdlcwBbYtc5iZdWwNh3MyuITyFtIN7F0qTCrp0GGB8TLbQuixaTf+HJ+3MlTGQ3a7MTKWpgIQ0W8H2Bu1B4KaIFaeW7k/I4bRBbOBcauv0yHiE+iPvDOS7nJE7oV00rRFd8O57gPK78YMXV5Sesg3GNKgd7y0vsOKpzUKWjqkMfZoM43Xu20CGz3nw2TuOSxqGQfVhPovP3r9x3FejReKSv8gLnQ1w+ZuUtS2csf2pEW4vuLXmpfhPdGdeNd6us1sRy33RlyHPs69bXsIjwWQjGktN+WXff6eo5lZPMjfda4xZI/Ele5JxK3jSc5Cae7uYJcplOfjfv3iZf+vISy3bjy3PffkQ/2zgaeosP4nq78e25bt9Tp48S/TqZba1i+6tlNL5d1Sv3Mut23VcUP8hlRu4t57YOxPbhWNL7W/bKPHud0fpd9eJfiNSui1ztZKtw5rCxUyTduKTuQMK1J3eRHoBwR+Au8ldGIT00JgM8z5KH/XIt9VrLSqn1EgSlx/eQj4v7guS7LfdZAJjcDcf3YYjHQLVTrmEJd9WLixTDc/kyQrm12K42slU0wQ8jXLELIIq+FtW138zgah8oO5DkXN+8AoHsLhPPO4WzITkQqy3B+VqvFrKDsiE4NmDie2DPhfN5zL32mZm0HuaJx0l7ECo9hBs7FwSanyQb3rRfLtqYuHX6slrIJmoW/y/qDNOjYUjOs+Bz9rkP82UG30IdSjogd4MNgYHekoxazlIGk9yZdbIZdMX2l1pz+YqPkzvo0/kbrnivNskzZcmdDWkWyXaEjZtOp1H2mVpi+URnmlI//M1XYhRuDclpmttefTmX6iuzSDZz5XTwFbRaFdGd+dtFmCTbvrzqcimiXJkFslWq1mu/bFDEgdE0Fe1oX9K1cufs06wLxReXupPdLShSB+Xqxp1yOeqX8MHxdW6attSZbIMi9pX2y4HgsJ223kL5DtgC6evYr9OLpVDHpW3dyNaS80ERLahuBC8pL7cj6TZKrVz3rl5rV+86kU28Or6Tce56FOUaL5VVO4VRp15iXXXriqtlbLS1knq5nVZyDV8KJDqVuAhAK69CxGw4tIrpkx+9uZFlR39A/s7/ayV1tZyXQvovwcdPoC0XEYbIWBnKY+qU8KYrDXWWkZ+NR690G/H1L2DP3ykj0yryqCvZAavfUTkNZ34cJ3SRZVhjmWQzKIs3QvKlNMfvUj8Ha7WVupMdFHdU1Gy8DapdQDgu4WWRjbdJ/oEIwGXUyThA7aUOfbarPwdZhMt8voJr541T+rUCnxmV9HH6bAe0z7DG7N9x3H9BDXTZReqxJ/e5eGKqMk2y/bq+X0F6P256X1T2X2iiv4W0WndA+k2QzouP5Cjux0oLKTuv5FHIdrC4EyRfB8kfpcR/5dgR9yDxC4zvaWPcv42xzPHHoPKXXZ+OG1+ITuNN0gXUZD+SS3lcAcr3OpN30/ddx3ERa/GjtOfy3CvS5/mvoIzgxvkcxkLrI4wa/HuvYpJ+GTn+MDeLUc/lHyeI8b1g/BrHRTByW3kyabJfA0HvAOepQHDAZTDClm7SehzwXM3fP13E/rdJReRl5MmH5ZbWbJtnPylCtnrBZcd3UcVroOlyjgflG8rky8bpLxbkMeqxnHdnHiJinXmrdSHHN4SHJrGdFNmHo4A/h+RXA8p1XvmAQyA74NXNYu3xDSiEZ9Lf0AjXem13IHZ1DKtLzsBeDuYmX5b0kkFk2zXwfPIZSP4q+w/3yqjjvBj/lOd+lvNiFFeQQHY4Dhj/A4x/xslN4UKV26rJXktbfifEkbA9NNEFTCfZ+Vt4JrkYm9LSi/SRMf35aTxzOvdLWjfpRzZ/MpT8J+V9nAcZFxSSHcB4HvB+j7u13G4YO8nOZyzGT1LmJzhZBGP+2aH2bWFVyDpeCZxJoOFvUMIbKSBvyZ3l6R5NPST268M/jxr96sIWbgohye73J8lGCLsBK/Odc7d14WLehRRI8djI1818uf9CrPmzHA+aHXALXc5C9Cs8dwkY38RxP4yW1Q/jBjCeXhijpY8g5Vv2QgS58btR5eHUR0X2AZnWuJ9lB0iZxSTRj/i884dxkFeHCwO2B0H6L+JT7D7CGCFv2by4iLew3uxTUHUL9xQhWVR4jvgPwXgkzxTB2M+yA4SAcSMYLxgCY3h+4LYssq3oelr5BSj2BPbz/dWgShQhO58HFht/k77uPZzcQgoWmr+nc//HCMqcy517cWEtj2jxkJ98Hkv+R/YHNUjzU1fraTwf4rmT2B8GYxGyLSMIf3AYXY+HEuNmUpH6hWd7bssg24HJuSjgNynFl/j9BkfdKjIs2eZhf4wVJn+HGiRroycHyAI/rgZJrdfQUB7jub/nfteWF5FD2xh/i5vtHobFOCzZ1mkUjD7XU8YjuxH9CQ3+zeSuxdj6ilhZZ2VGIds8rPsC/7P6M+HzF9EHOS5S/s7cZzSrmLU0/IUffiQm4lMbo2MchWyKW4bxi9T4A+06eG1oGYXsHZiBnsgPlv0lpRkGHMaddavgqGTn86LvxUobvFrcFn2dfZU7jqwB4/FgdIRsQx4X46hk5zEEjAaeruXC0BiHI7tJf9xqvB0Dej2FhQFPvkKj7JdBtuU6bqA/jr/CgOuvUch1nhxaMoxvA+NpPFsWxjLIFop8OWbhj//Tn5e0YReWomTzoyiNC1CAo1qiXMO3qj41KovsUISjY+ar8U30zX/M/uZwYcB2PzAS3kyDIrr6oS2nT/5lkR2KCBhvBuP7OLkpXOi39aF+sgv28i5G2Q6C1pO8v0i/2C/Pzmv2ncX6z84nux9bP0KdRNLi+NexhRY1vpXjXvNgMZ7HvZ9Pn8meLRuj+VWB0RnQW8Ho92O+3wcjlzK3kO50+Y8fMIk/TTYncc3RZ9kKCEWWbdkhX7d6Lqcx32EQ5yDrCU/mZB3WfAnQ3sA559hVYSzbsnMQUoy8keObaklyJheezF/M79vP9ZImLd7R7kZueJrU795eeUzzvET7c02bqLnTGLufTmlynegZwZpsdeisYtycctUd4xJm3V0vCa7HD8r5VWBbjNOPfs/0ymvS531l+hiFOsWSdLH0slrdqxiZwqXLgWcHo/H77GsPASNQektR4uwVGPTwNipJvyvmWx2nAr0U2LvE6q4I2H7ZLyG4htu6ea6orHqMRclWYZnisq8D8is26apP56BKHUgPrTwMhIYhOkOxyjEOQ3ZQiFtXXegmn4TmfdkaQhxFuTw2tvglhEfJpcypkpUSo685/Vnj/djOPMZRyVYZkmvfeC/KcF66G8ntJKzcsh09+xkqlzVV2dD8UmLAuDtlBdKrxlk6xnHIBncq9nUq3PfNKsI3SuZbhTJUgJ+Q1OLCdLBKoikmlU6MDuIc4c8UxjLIztQh8Iz0zewZM9cKypzK2BdryT3nkaEiFW4Dxi2ziLFMsoOOtQL70KdRiIS7KmTUyJtWK8mOsJ3rG6suswGR3UiSx2j35Z8olYXRGUUl3qoKstWelXXu+hDbxyH9pWzzS4E4HCiSGn5G0SibUgeis5psx/gwJxzE6c2ckobZQLiv39YGorcyshcwVkK0laiKbPMOEoIyRrBCfx6u9do6ElaJDsIqA9+r8BHOZ0GZjHRnJ0X0OnGMRSo1AvYVj4QBzl1YgG7PxOvIZVaQeYOspRvA0UJmgWiqmYp1NTxbW4yTIlttZMRlLsv+XLduUCYQ6odeJTnMl8N5Ts2MDML4BBiduUwF4yTJzjOmC7OfykjPwpsqYBYJzuPK79cO4zTIDoQyuuZ7ZrEx9sRRu7H2adQnT1BZ+3mMfg+VgVjiAG6qGCetXJXA9Cm+n63ujBFo4opUpxuORh2dGpgZdRrDo1OXLhjTvysXo8lgjJgnjnGSZBttYoTd8+uEXpdw31ipDK0gWAi7MyEOKokx9MQoiED6xDFWTbZk2RfTP6df8y8aFPE9tEqRcFOdRYySTAg3foStYdwi8QAxqg9nJRPBWCXZAlYBD7KVOKWIErwvNJKgEK2gqli05Y0q4rEhP8B2FIw2koAxkF5FvD3FVxXZAI/vowRDnOO6Yj2D+Ui2/XnRBsOtlUrZGJ12aumVYSyTbEnVhT0Gv8bGbaHjEk0WqZiP/bmkB9c+8QEOZecxgrPUwE/A6JglWHmpGMsgOxCKu05J1jUp4Xx2VM7/NiAblBYg6br3SYhYLFuMklx1UKQSjOOSrQKcL+uy7bOqIJhsV4jlGje3zNCfr7ippBPTxhhcu93YWDIK2YFQ3E1qyaFfDufHqtAQD1ueXsT5ujh0fWUO4sy/DhhtbPbnYhsL47BkqwBaWmrJKtmKTJpkilwh9ue6Vvu4cQc44nHwdS/bumC0TmNjHIZslZkPGNSBZKq0JDa8cYMyAaOvV+vSkJcAtus0MsZBZDvN0VWGEbb9R91IpkorJMxdHcTp+vqJeCTWhuwsImCsO85hMKb4B5Ad21c4FXB0qNRdAVkts3raSB3EBfLCtc4trjrexMlZxzgw/mAf10ue5w/FbuWP+w6EYt89D8ysV0YDztvHjj3S7FKGDRPC+XBdI/kAe3d2uUeMt08AY4iDd6nCWKfaXim+u43xjn65FbHUBmPdV/FH+K+D9CPITEspS3Cf6as/CS9LxMQSqJiG2voivfjXOdbK+0nAeBIYj+TGkjG68DJxTX1ZEjD6jfOA0fFGXylCdsiAT080j4uS1tmc0CMMzDw82GdbNtl6H9aD8aG87MsLuvFhxE+IvJofU/9VHioRY6lki5HGyIfyFtOGXBjjMGQHpe3EVwTPobBXcEKLHGQ14blu27LIVgH8kUJyI7W5kH1XwYwjYoTwFCNeYlyMpZAdMPJFiRQjL5mGk1HIDiUchUJORCHHcsJ8RiF9XLJVwAIkX4tFXkl7vy1UrqTtkW2MP0N+Y2Aci2wxNtsYrxgH4zhkq09d3SHtr/zvx77hy2FkHLKZVjH4Wmx9kAI3kcroVrrVXYwHtzHuz/4IGEcmm2ljvKWNcTNlO8ceWcYle3vBzeapGPfJzFgZjKQCkQNlWLLb9XWRQOufoffygSWUeUOzeQrlngLG3cjWuhTEOBTZASNz/hTjZWVBKI/srEZ74PY2MEA6ETW4wC7MXXvVdxiytWQV8CVIvpoMeQM1Fdm9jfG1xTEWJpspqG/VUozXgO6BMhGWTbZ1M8/dUMibaPgb2O/neoqQbX7+ZMNVjAr+in0HJkUsitsqE+u0axvj8ewPwDiQbPPjQz/Jl8F4Cfu+Ri0do4VUKUfT151Fte3P7fs6AXDcc57twIT+Me2XL2L/dlIdxV8mOruN0YhkF4w9yZ4oxqrJlhy/VXRstNg4GZs/hmNjukEh3ci2TmuJat2KO7sCm/kmx8MOinhkohIwGngSo3PfHMYVZOcxXtnGWGYgpyv4SZAdCjZg8dPtgIUvJ5yqdZJtS+drDsknUMA32LdhzJKI8ZVgJA6RvoBpY1xGthiZORD4yYIiE8M4SbIDaX5o7lws4GWcIGacunEDF3yJKflvqP4o+1rGLMtOYPw1ML4cEASeEkfvvl8Qo4Gfj6X7/DdJmQbZAZ9BmRNQxOsIwX4XBVzBhR+Ei6tkG4IyjNxb38fqxwqKzLpOHLTtRRrwqnWmYYpx71WOcaYJmld+roG5BuYamGtgroG5BuYamJoG/h/ff6XOIB4wOAAAAABJRU5ErkJggg=="/>
<style>
body, div, header, footer {
@@ -264,8 +269,8 @@ func GetNotFoundErrorPage() string {
display: block;
margin: 0 auto;
/* width: 50%; */
- min-height: 60vh;
- max-height: 60vh;
+ min-height: 60vh;
+ max-height: 60vh;
padding: 50px 20px;
opacity: 0.6;
background-color: #A9F5BF;
@@ -329,7 +334,7 @@ func GetNotFoundErrorPage() string {
<table>
<thead>
<td style="height: 150px; font-size: 1.3em; color: black; font-weight: bold">
- Unfortunately, the resource you were trying to access could not be found on swarm.
+ Unfortunately, the resource you were trying to access could not be found on swarm.
</td>
</thead>
<tbody>
@@ -342,6 +347,12 @@ func GetNotFoundErrorPage() string {
{{.Msg}}
</td>
</tr>
+ <tr>
+ <td class="value">
+ {{.Details}}
+ </td>
+ </tr>
+
<tr>
<td class="key">
@@ -388,7 +399,7 @@ func GetMultipleChoicesErrorPage() string {
<meta http-equiv="X-UA-Compatible" ww="chrome=1">
<meta name="description" content="Ethereum/Swarm multiple options page">
<meta property="og:url" content="https://swarm-gateways.net/bzz:/theswarm.eth">
-
+ <link rel="shortcut icon" type="image/x-icon" href="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAHsAAAB5CAYAAAAZD150AAAAAXNSR0IArs4c6QAAAAlwSFlzAAALEwAACxMBAJqcGAAAAVlpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IlhNUCBDb3JlIDUuNC4wIj4KICAgPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4KICAgICAgPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIKICAgICAgICAgICAgeG1sbnM6dGlmZj0iaHR0cDovL25zLmFkb2JlLmNvbS90aWZmLzEuMC8iPgogICAgICAgICA8dGlmZjpPcmllbnRhdGlvbj4xPC90aWZmOk9yaWVudGF0aW9uPgogICAgICA8L3JkZjpEZXNjcmlwdGlvbj4KICAgPC9yZGY6UkRGPgo8L3g6eG1wbWV0YT4KTMInWQAAFzFJREFUeAHtnXuwJFV9x7vnzrKwIC95w/ImYFkxlZgiGswCIiCGxPhHCEIwmIemUqYwJkYrJmXlUZRa+IpAWYYEk4qJD4gJGAmoiBIxMQFRQCkXdhdCeL+fYdk7nc+ne87dvnPn0TPTPdNzmd/W2X6fc76/7/n9zjm/PrcniuYy18BcA3MNzDUw18BcA3MNzDUw18BcA3MNrGYNLERroyMAeNQ0QcbTLPxFUfaOOx4Wbd12ThRHp0TJ4s5RFF8RtVoXg/2BSeOfk12lxtc0fjva1jibItZEDf4li3u3i3sqipOPRIvRp6ssvjPvhc4Tq/y4Ab71pEXS1oqw7hw1mz8XJc2LoiQ6iTJaaTkxth0l69jXwNayOZ10AvubSQ+RrFOl8mKy7EOiRuN3UfjrUfItUaN1SbQt+lqp2m02T4payS9HSfxK8k1IGdEWkln2Xuzldd7keBunrsO1f4r9b5Mqk3zBlRUy9Ywb0TtQ6O9Tjx1JkvA8SSXfiJK5Fj1MGkf2jBYWzo9a8Y+TiQRaxnLpTnb+HuvzDerzLk4+mb9Q1v5qduPrUPvxURx/DiWeicK2W1lKdCT2Q7n+VqzuOei5m+Nnh1TsPtGahbOiaOFCrHn/vs8ud+PdbrWLOZr6vIX6PEF97uH4uW43jnpudVp2Mzo1ajV+AwPbgGJUYr4/1Or+jxSwu90R5f6QQdPnaBK603zD4LCLNJsnkusfcUXXrKfoL4MtOzxvfXegPrdGjeRjlHEVx4PrE57us11tlr0b/fKlKIq+OTqsjXulS80sO5Dtbdugfnf4Py5qxGenxEeppbez6LJpte6LFuIWlvhT3J/Pq8vNnBps2eE562t97A608G+xnZMdtMN2H+z3LSj+n9g3cNGN4Nzt9tddCZK0ncnnrChuHBElySaOH8k/mNtf5Pr3onWtL0WthfWUuA/XGGX3KHsw2ZYtqc9Rs4dIW2m4mxjwXd8+z2Y8mXXLXoDk38Eaz4c7+2VJzLvsXtrpRXa43+svh/TTIf1QSP0Rx0+Ei8u2L0RPM6i6hn77e9SFhsJzGWnLG9wgshvpeOEJnn+6nX+D8u+ck51p43hIvhyS38DhHiQJKiqDyDYfG42WeiAk3oO93uLJ3tK6P0pa1zK/vj5aSI6FtF15Zjvhvcle5F69xzMkywxdAk+US7aDgVmSbMTaaFwM0VdS8QNIQTll4TA/SdKd3kvS0oqWgRveeku0uO2NjM4v5TmDJda52/MSqyXfy3YrqXJxEDArcjB92Dvh4RdIjoC1hLJFUp6BAPMOBHQjalC5SdR6wfj3FVFjDV1Bcg698S7ka172y0+3y5DwiRncxAoC1OjSiN6LJTNQSd5MJruQnDqVLbrTB0mPkXEgetwy7oH0T2LppzOrv4bMnif/B0hPsS/pozSkketUZ8s2KPIq3OH5ONWjQVgFwSpO69KajVpVpfzHom3b3hctNHajwR5OOUbyLGt7n85B1VJPspvRydFi/Hba/gYUoFLKJlqPpmXx9ikdBRcZsHH7WLIjZT1F472TXHaFZ6dqO5Gsx0SkbmTvS7/8caYbG1CMJGt1Vciz5O5UKuRflUV31t1yJPdx2jDlp2MPw6wTsfA6ke1U47Pg1mVXqfwXyP1Bypg2dgiOXcDwKJgPZPsSknGPyoiv0wAtZlz6CERsAXB4v1tV/apsTFS/sIgP7xLfTdrMPhbfc6pWONNeN1alzF7lDTqfuTkHS3F0FzcbbHixCG/cUtLvAHBZs4Fluqsb2fnK+XpBS6fVp4Mo+7q6WGS+nmXvE8SJf0imDxKGlfTS3Pq0+61BipJc+9j7gOzI1bCoS3tKUwB51U3st9cwar+Kgeq/sR8GkWPXs+5kbwcYpy/yXWQg2U5brPtqI905yEPRttbfgm0jqVSZHbID7CyUeRc078kpo2lrSFWSbt6lWRd5dYreizdcLI1K4m9F2xa/zHEleGaP7Kzftj933ZgDuZcwc9W9q7QylWRevLeKd+d/lgMnt3I84M0XdwwnNlQCRvHVkOwiBaZh1ckskh20kfXnCQpKI1Opa9fFlxWRWsvSpoPIex1pbxIWxyvVVuv9lGFAZlxp0ohc8PgZMqpkgWFnBWeZ7DwWB3H/g/IMTGjlvoceRbDmBuu/kj2waBf0x9o2/ysvYN1n8ELmtbzFugDHrrsddlWqgy/z3Bw1WeGyNbrNjCclFl4XIagSn0Fl7IdHEQc3W0nh9aQvG2zMna7dt1sheBHK8R5IiPdlcwBbYtc5iZdWwNh3MyuITyFtIN7F0qTCrp0GGB8TLbQuixaTf+HJ+3MlTGQ3a7MTKWpgIQ0W8H2Bu1B4KaIFaeW7k/I4bRBbOBcauv0yHiE+iPvDOS7nJE7oV00rRFd8O57gPK78YMXV5Sesg3GNKgd7y0vsOKpzUKWjqkMfZoM43Xu20CGz3nw2TuOSxqGQfVhPovP3r9x3FejReKSv8gLnQ1w+ZuUtS2csf2pEW4vuLXmpfhPdGdeNd6us1sRy33RlyHPs69bXsIjwWQjGktN+WXff6eo5lZPMjfda4xZI/Ele5JxK3jSc5Cae7uYJcplOfjfv3iZf+vISy3bjy3PffkQ/2zgaeosP4nq78e25bt9Tp48S/TqZba1i+6tlNL5d1Sv3Mut23VcUP8hlRu4t57YOxPbhWNL7W/bKPHud0fpd9eJfiNSui1ztZKtw5rCxUyTduKTuQMK1J3eRHoBwR+Au8ldGIT00JgM8z5KH/XIt9VrLSqn1EgSlx/eQj4v7guS7LfdZAJjcDcf3YYjHQLVTrmEJd9WLixTDc/kyQrm12K42slU0wQ8jXLELIIq+FtW138zgah8oO5DkXN+8AoHsLhPPO4WzITkQqy3B+VqvFrKDsiE4NmDie2DPhfN5zL32mZm0HuaJx0l7ECo9hBs7FwSanyQb3rRfLtqYuHX6slrIJmoW/y/qDNOjYUjOs+Bz9rkP82UG30IdSjogd4MNgYHekoxazlIGk9yZdbIZdMX2l1pz+YqPkzvo0/kbrnivNskzZcmdDWkWyXaEjZtOp1H2mVpi+URnmlI//M1XYhRuDclpmttefTmX6iuzSDZz5XTwFbRaFdGd+dtFmCTbvrzqcimiXJkFslWq1mu/bFDEgdE0Fe1oX9K1cufs06wLxReXupPdLShSB+Xqxp1yOeqX8MHxdW6attSZbIMi9pX2y4HgsJ223kL5DtgC6evYr9OLpVDHpW3dyNaS80ERLahuBC8pL7cj6TZKrVz3rl5rV+86kU28Or6Tce56FOUaL5VVO4VRp15iXXXriqtlbLS1knq5nVZyDV8KJDqVuAhAK69CxGw4tIrpkx+9uZFlR39A/s7/ayV1tZyXQvovwcdPoC0XEYbIWBnKY+qU8KYrDXWWkZ+NR690G/H1L2DP3ykj0yryqCvZAavfUTkNZ34cJ3SRZVhjmWQzKIs3QvKlNMfvUj8Ha7WVupMdFHdU1Gy8DapdQDgu4WWRjbdJ/oEIwGXUyThA7aUOfbarPwdZhMt8voJr541T+rUCnxmV9HH6bAe0z7DG7N9x3H9BDXTZReqxJ/e5eGKqMk2y/bq+X0F6P256X1T2X2iiv4W0WndA+k2QzouP5Cjux0oLKTuv5FHIdrC4EyRfB8kfpcR/5dgR9yDxC4zvaWPcv42xzPHHoPKXXZ+OG1+ITuNN0gXUZD+SS3lcAcr3OpN30/ddx3ERa/GjtOfy3CvS5/mvoIzgxvkcxkLrI4wa/HuvYpJ+GTn+MDeLUc/lHyeI8b1g/BrHRTByW3kyabJfA0HvAOepQHDAZTDClm7SehzwXM3fP13E/rdJReRl5MmH5ZbWbJtnPylCtnrBZcd3UcVroOlyjgflG8rky8bpLxbkMeqxnHdnHiJinXmrdSHHN4SHJrGdFNmHo4A/h+RXA8p1XvmAQyA74NXNYu3xDSiEZ9Lf0AjXem13IHZ1DKtLzsBeDuYmX5b0kkFk2zXwfPIZSP4q+w/3yqjjvBj/lOd+lvNiFFeQQHY4Dhj/A4x/xslN4UKV26rJXktbfifEkbA9NNEFTCfZ+Vt4JrkYm9LSi/SRMf35aTxzOvdLWjfpRzZ/MpT8J+V9nAcZFxSSHcB4HvB+j7u13G4YO8nOZyzGT1LmJzhZBGP+2aH2bWFVyDpeCZxJoOFvUMIbKSBvyZ3l6R5NPST268M/jxr96sIWbgohye73J8lGCLsBK/Odc7d14WLehRRI8djI1818uf9CrPmzHA+aHXALXc5C9Cs8dwkY38RxP4yW1Q/jBjCeXhijpY8g5Vv2QgS58btR5eHUR0X2AZnWuJ9lB0iZxSTRj/i884dxkFeHCwO2B0H6L+JT7D7CGCFv2by4iLew3uxTUHUL9xQhWVR4jvgPwXgkzxTB2M+yA4SAcSMYLxgCY3h+4LYssq3oelr5BSj2BPbz/dWgShQhO58HFht/k77uPZzcQgoWmr+nc//HCMqcy517cWEtj2jxkJ98Hkv+R/YHNUjzU1fraTwf4rmT2B8GYxGyLSMIf3AYXY+HEuNmUpH6hWd7bssg24HJuSjgNynFl/j9BkfdKjIs2eZhf4wVJn+HGiRroycHyAI/rgZJrdfQUB7jub/nfteWF5FD2xh/i5vtHobFOCzZ1mkUjD7XU8YjuxH9CQ3+zeSuxdj6ilhZZ2VGIds8rPsC/7P6M+HzF9EHOS5S/s7cZzSrmLU0/IUffiQm4lMbo2MchWyKW4bxi9T4A+06eG1oGYXsHZiBnsgPlv0lpRkGHMaddavgqGTn86LvxUobvFrcFn2dfZU7jqwB4/FgdIRsQx4X46hk5zEEjAaeruXC0BiHI7tJf9xqvB0Dej2FhQFPvkKj7JdBtuU6bqA/jr/CgOuvUch1nhxaMoxvA+NpPFsWxjLIFop8OWbhj//Tn5e0YReWomTzoyiNC1CAo1qiXMO3qj41KovsUISjY+ar8U30zX/M/uZwYcB2PzAS3kyDIrr6oS2nT/5lkR2KCBhvBuP7OLkpXOi39aF+sgv28i5G2Q6C1pO8v0i/2C/Pzmv2ncX6z84nux9bP0KdRNLi+NexhRY1vpXjXvNgMZ7HvZ9Pn8meLRuj+VWB0RnQW8Ho92O+3wcjlzK3kO50+Y8fMIk/TTYncc3RZ9kKCEWWbdkhX7d6Lqcx32EQ5yDrCU/mZB3WfAnQ3sA559hVYSzbsnMQUoy8keObaklyJheezF/M79vP9ZImLd7R7kZueJrU795eeUzzvET7c02bqLnTGLufTmlynegZwZpsdeisYtycctUd4xJm3V0vCa7HD8r5VWBbjNOPfs/0ymvS531l+hiFOsWSdLH0slrdqxiZwqXLgWcHo/H77GsPASNQektR4uwVGPTwNipJvyvmWx2nAr0U2LvE6q4I2H7ZLyG4htu6ea6orHqMRclWYZnisq8D8is26apP56BKHUgPrTwMhIYhOkOxyjEOQ3ZQiFtXXegmn4TmfdkaQhxFuTw2tvglhEfJpcypkpUSo685/Vnj/djOPMZRyVYZkmvfeC/KcF66G8ntJKzcsh09+xkqlzVV2dD8UmLAuDtlBdKrxlk6xnHIBncq9nUq3PfNKsI3SuZbhTJUgJ+Q1OLCdLBKoikmlU6MDuIc4c8UxjLIztQh8Iz0zewZM9cKypzK2BdryT3nkaEiFW4Dxi2ziLFMsoOOtQL70KdRiIS7KmTUyJtWK8mOsJ3rG6suswGR3UiSx2j35Z8olYXRGUUl3qoKstWelXXu+hDbxyH9pWzzS4E4HCiSGn5G0SibUgeis5psx/gwJxzE6c2ckobZQLiv39YGorcyshcwVkK0laiKbPMOEoIyRrBCfx6u9do6ElaJDsIqA9+r8BHOZ0GZjHRnJ0X0OnGMRSo1AvYVj4QBzl1YgG7PxOvIZVaQeYOspRvA0UJmgWiqmYp1NTxbW4yTIlttZMRlLsv+XLduUCYQ6odeJTnMl8N5Ts2MDML4BBiduUwF4yTJzjOmC7OfykjPwpsqYBYJzuPK79cO4zTIDoQyuuZ7ZrEx9sRRu7H2adQnT1BZ+3mMfg+VgVjiAG6qGCetXJXA9Cm+n63ujBFo4opUpxuORh2dGpgZdRrDo1OXLhjTvysXo8lgjJgnjnGSZBttYoTd8+uEXpdw31ipDK0gWAi7MyEOKokx9MQoiED6xDFWTbZk2RfTP6df8y8aFPE9tEqRcFOdRYySTAg3foStYdwi8QAxqg9nJRPBWCXZAlYBD7KVOKWIErwvNJKgEK2gqli05Y0q4rEhP8B2FIw2koAxkF5FvD3FVxXZAI/vowRDnOO6Yj2D+Ui2/XnRBsOtlUrZGJ12aumVYSyTbEnVhT0Gv8bGbaHjEk0WqZiP/bmkB9c+8QEOZecxgrPUwE/A6JglWHmpGMsgOxCKu05J1jUp4Xx2VM7/NiAblBYg6br3SYhYLFuMklx1UKQSjOOSrQKcL+uy7bOqIJhsV4jlGje3zNCfr7ippBPTxhhcu93YWDIK2YFQ3E1qyaFfDufHqtAQD1ueXsT5ujh0fWUO4sy/DhhtbPbnYhsL47BkqwBaWmrJKtmKTJpkilwh9ue6Vvu4cQc44nHwdS/bumC0TmNjHIZslZkPGNSBZKq0JDa8cYMyAaOvV+vSkJcAtus0MsZBZDvN0VWGEbb9R91IpkorJMxdHcTp+vqJeCTWhuwsImCsO85hMKb4B5Ad21c4FXB0qNRdAVkts3raSB3EBfLCtc4trjrexMlZxzgw/mAf10ue5w/FbuWP+w6EYt89D8ysV0YDztvHjj3S7FKGDRPC+XBdI/kAe3d2uUeMt08AY4iDd6nCWKfaXim+u43xjn65FbHUBmPdV/FH+K+D9CPITEspS3Cf6as/CS9LxMQSqJiG2voivfjXOdbK+0nAeBIYj+TGkjG68DJxTX1ZEjD6jfOA0fFGXylCdsiAT080j4uS1tmc0CMMzDw82GdbNtl6H9aD8aG87MsLuvFhxE+IvJofU/9VHioRY6lki5HGyIfyFtOGXBjjMGQHpe3EVwTPobBXcEKLHGQ14blu27LIVgH8kUJyI7W5kH1XwYwjYoTwFCNeYlyMpZAdMPJFiRQjL5mGk1HIDiUchUJORCHHcsJ8RiF9XLJVwAIkX4tFXkl7vy1UrqTtkW2MP0N+Y2Aci2wxNtsYrxgH4zhkq09d3SHtr/zvx77hy2FkHLKZVjH4Wmx9kAI3kcroVrrVXYwHtzHuz/4IGEcmm2ljvKWNcTNlO8ceWcYle3vBzeapGPfJzFgZjKQCkQNlWLLb9XWRQOufoffygSWUeUOzeQrlngLG3cjWuhTEOBTZASNz/hTjZWVBKI/srEZ74PY2MEA6ETW4wC7MXXvVdxiytWQV8CVIvpoMeQM1Fdm9jfG1xTEWJpspqG/VUozXgO6BMhGWTbZ1M8/dUMibaPgb2O/neoqQbX7+ZMNVjAr+in0HJkUsitsqE+u0axvj8ewPwDiQbPPjQz/Jl8F4Cfu+Ri0do4VUKUfT151Fte3P7fs6AXDcc57twIT+Me2XL2L/dlIdxV8mOruN0YhkF4w9yZ4oxqrJlhy/VXRstNg4GZs/hmNjukEh3ci2TmuJat2KO7sCm/kmx8MOinhkohIwGngSo3PfHMYVZOcxXtnGWGYgpyv4SZAdCjZg8dPtgIUvJ5yqdZJtS+drDsknUMA32LdhzJKI8ZVgJA6RvoBpY1xGthiZORD4yYIiE8M4SbIDaX5o7lws4GWcIGacunEDF3yJKflvqP4o+1rGLMtOYPw1ML4cEASeEkfvvl8Qo4Gfj6X7/DdJmQbZAZ9BmRNQxOsIwX4XBVzBhR+Ei6tkG4IyjNxb38fqxwqKzLpOHLTtRRrwqnWmYYpx71WOcaYJmld+roG5BuYamGtgroG5BuYamJoG/h/ff6XOIB4wOAAAAABJRU5ErkJggg=="/>
<style>
body, div, header, footer {
@@ -440,8 +451,8 @@ func GetMultipleChoicesErrorPage() string {
display: block;
margin: 0 auto;
/* width: 50%; */
- min-height: 60vh;
- max-height: 60vh;
+ min-height: 60vh;
+ max-height: 60vh;
padding: 50px 20px;
opacity: 0.6;
background-color: #A9F5BF;
diff --git a/swarm/api/http/error_test.go b/swarm/api/http/error_test.go
index ed52bafbd..dc545722e 100644
--- a/swarm/api/http/error_test.go
+++ b/swarm/api/http/error_test.go
@@ -1,4 +1,4 @@
-// Copyright 2016 The go-ethereum Authors
+// Copyright 2017 The go-ethereum Authors
// This file is part of the go-ethereum library.
//
// The go-ethereum library is free software: you can redistribute it and/or modify
@@ -18,12 +18,13 @@ package http_test
import (
"encoding/json"
- "golang.org/x/net/html"
"io/ioutil"
"net/http"
"strings"
"testing"
+ "golang.org/x/net/html"
+
"github.com/ethereum/go-ethereum/swarm/testutil"
)
@@ -96,8 +97,37 @@ func Test500Page(t *testing.T) {
defer resp.Body.Close()
respbody, err = ioutil.ReadAll(resp.Body)
- if resp.StatusCode != 500 || !strings.Contains(string(respbody), "500") {
- t.Fatalf("Invalid Status Code received, expected 500, got %d", resp.StatusCode)
+ if resp.StatusCode != 404 {
+ t.Fatalf("Invalid Status Code received, expected 404, got %d", resp.StatusCode)
+ }
+
+ _, err = html.Parse(strings.NewReader(string(respbody)))
+ if err != nil {
+ t.Fatalf("HTML validation failed for error page returned!")
+ }
+}
+func Test500PageWith0xHashPrefix(t *testing.T) {
+ srv := testutil.NewTestSwarmServer(t)
+ defer srv.Close()
+
+ var resp *http.Response
+ var respbody []byte
+
+ url := srv.URL + "/bzz:/0xthisShouldFailWith500CodeAndAHelpfulMessage"
+ resp, err := http.Get(url)
+
+ if err != nil {
+ t.Fatalf("Request failed: %v", err)
+ }
+ defer resp.Body.Close()
+ respbody, err = ioutil.ReadAll(resp.Body)
+
+ if resp.StatusCode != 404 {
+ t.Fatalf("Invalid Status Code received, expected 404, got %d", resp.StatusCode)
+ }
+
+ if !strings.Contains(string(respbody), "The requested hash seems to be prefixed with") {
+ t.Fatalf("Did not receive the expected error message")
}
_, err = html.Parse(strings.NewReader(string(respbody)))
@@ -127,8 +157,8 @@ func TestJsonResponse(t *testing.T) {
defer resp.Body.Close()
respbody, err = ioutil.ReadAll(resp.Body)
- if resp.StatusCode != 500 {
- t.Fatalf("Invalid Status Code received, expected 500, got %d", resp.StatusCode)
+ if resp.StatusCode != 404 {
+ t.Fatalf("Invalid Status Code received, expected 404, got %d", resp.StatusCode)
}
if !isJSON(string(respbody)) {
diff --git a/swarm/api/http/server.go b/swarm/api/http/server.go
index 74341899d..b8e7436cf 100644
--- a/swarm/api/http/server.go
+++ b/swarm/api/http/server.go
@@ -37,11 +37,35 @@ import (
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/log"
+ "github.com/ethereum/go-ethereum/metrics"
"github.com/ethereum/go-ethereum/swarm/api"
"github.com/ethereum/go-ethereum/swarm/storage"
"github.com/rs/cors"
)
+//setup metrics
+var (
+ postRawCount = metrics.NewRegisteredCounter("api.http.post.raw.count", nil)
+ postRawFail = metrics.NewRegisteredCounter("api.http.post.raw.fail", nil)
+ postFilesCount = metrics.NewRegisteredCounter("api.http.post.files.count", nil)
+ postFilesFail = metrics.NewRegisteredCounter("api.http.post.files.fail", nil)
+ deleteCount = metrics.NewRegisteredCounter("api.http.delete.count", nil)
+ deleteFail = metrics.NewRegisteredCounter("api.http.delete.fail", nil)
+ getCount = metrics.NewRegisteredCounter("api.http.get.count", nil)
+ getFail = metrics.NewRegisteredCounter("api.http.get.fail", nil)
+ getFileCount = metrics.NewRegisteredCounter("api.http.get.file.count", nil)
+ getFileNotFound = metrics.NewRegisteredCounter("api.http.get.file.notfound", nil)
+ getFileFail = metrics.NewRegisteredCounter("api.http.get.file.fail", nil)
+ getFilesCount = metrics.NewRegisteredCounter("api.http.get.files.count", nil)
+ getFilesFail = metrics.NewRegisteredCounter("api.http.get.files.fail", nil)
+ getListCount = metrics.NewRegisteredCounter("api.http.get.list.count", nil)
+ getListFail = metrics.NewRegisteredCounter("api.http.get.list.fail", nil)
+ requestCount = metrics.NewRegisteredCounter("http.request.count", nil)
+ htmlRequestCount = metrics.NewRegisteredCounter("http.request.html.count", nil)
+ jsonRequestCount = metrics.NewRegisteredCounter("http.request.json.count", nil)
+ requestTimer = metrics.NewRegisteredResettingTimer("http.request.time", nil)
+)
+
// ServerConfig is the basic configuration needed for the HTTP server and also
// includes CORS settings.
type ServerConfig struct {
@@ -89,18 +113,22 @@ type Request struct {
// HandlePostRaw handles a POST request to a raw bzz-raw:/ URI, stores the request
// body in swarm and returns the resulting storage key as a text/plain response
func (s *Server) HandlePostRaw(w http.ResponseWriter, r *Request) {
+ postRawCount.Inc(1)
if r.uri.Path != "" {
+ postRawFail.Inc(1)
s.BadRequest(w, r, "raw POST request cannot contain a path")
return
}
if r.Header.Get("Content-Length") == "" {
+ postRawFail.Inc(1)
s.BadRequest(w, r, "missing Content-Length header in request")
return
}
key, err := s.api.Store(r.Body, r.ContentLength, nil)
if err != nil {
+ postRawFail.Inc(1)
s.Error(w, r, err)
return
}
@@ -117,8 +145,10 @@ func (s *Server) HandlePostRaw(w http.ResponseWriter, r *Request) {
// existing manifest or to a new manifest under <path> and returns the
// resulting manifest hash as a text/plain response
func (s *Server) HandlePostFiles(w http.ResponseWriter, r *Request) {
+ postFilesCount.Inc(1)
contentType, params, err := mime.ParseMediaType(r.Header.Get("Content-Type"))
if err != nil {
+ postFilesFail.Inc(1)
s.BadRequest(w, r, err.Error())
return
}
@@ -127,12 +157,14 @@ func (s *Server) HandlePostFiles(w http.ResponseWriter, r *Request) {
if r.uri.Addr != "" {
key, err = s.api.Resolve(r.uri)
if err != nil {
+ postFilesFail.Inc(1)
s.Error(w, r, fmt.Errorf("error resolving %s: %s", r.uri.Addr, err))
return
}
} else {
key, err = s.api.NewManifest()
if err != nil {
+ postFilesFail.Inc(1)
s.Error(w, r, err)
return
}
@@ -152,6 +184,7 @@ func (s *Server) HandlePostFiles(w http.ResponseWriter, r *Request) {
}
})
if err != nil {
+ postFilesFail.Inc(1)
s.Error(w, r, fmt.Errorf("error creating manifest: %s", err))
return
}
@@ -270,8 +303,10 @@ func (s *Server) handleDirectUpload(req *Request, mw *api.ManifestWriter) error
// <path> from <manifest> and returns the resulting manifest hash as a
// text/plain response
func (s *Server) HandleDelete(w http.ResponseWriter, r *Request) {
+ deleteCount.Inc(1)
key, err := s.api.Resolve(r.uri)
if err != nil {
+ deleteFail.Inc(1)
s.Error(w, r, fmt.Errorf("error resolving %s: %s", r.uri.Addr, err))
return
}
@@ -281,6 +316,7 @@ func (s *Server) HandleDelete(w http.ResponseWriter, r *Request) {
return mw.RemoveEntry(r.uri.Path)
})
if err != nil {
+ deleteFail.Inc(1)
s.Error(w, r, fmt.Errorf("error updating manifest: %s", err))
return
}
@@ -296,9 +332,11 @@ func (s *Server) HandleDelete(w http.ResponseWriter, r *Request) {
// - bzz-hash://<key> and responds with the hash of the content stored
// at the given storage key as a text/plain response
func (s *Server) HandleGet(w http.ResponseWriter, r *Request) {
+ getCount.Inc(1)
key, err := s.api.Resolve(r.uri)
if err != nil {
- s.Error(w, r, fmt.Errorf("error resolving %s: %s", r.uri.Addr, err))
+ getFail.Inc(1)
+ s.NotFound(w, r, fmt.Errorf("error resolving %s: %s", r.uri.Addr, err))
return
}
@@ -307,6 +345,7 @@ func (s *Server) HandleGet(w http.ResponseWriter, r *Request) {
if r.uri.Path != "" {
walker, err := s.api.NewManifestWalker(key, nil)
if err != nil {
+ getFail.Inc(1)
s.BadRequest(w, r, fmt.Sprintf("%s is not a manifest", key))
return
}
@@ -335,6 +374,7 @@ func (s *Server) HandleGet(w http.ResponseWriter, r *Request) {
return api.SkipManifest
})
if entry == nil {
+ getFail.Inc(1)
s.NotFound(w, r, fmt.Errorf("Manifest entry could not be loaded"))
return
}
@@ -344,12 +384,13 @@ func (s *Server) HandleGet(w http.ResponseWriter, r *Request) {
// check the root chunk exists by retrieving the file's size
reader := s.api.Retrieve(key)
if _, err := reader.Size(nil); err != nil {
+ getFail.Inc(1)
s.NotFound(w, r, fmt.Errorf("Root chunk not found %s: %s", key, err))
return
}
switch {
- case r.uri.Raw():
+ case r.uri.Raw() || r.uri.DeprecatedRaw():
// allow the request to overwrite the content type using a query
// parameter
contentType := "application/octet-stream"
@@ -370,19 +411,23 @@ func (s *Server) HandleGet(w http.ResponseWriter, r *Request) {
// header of "application/x-tar" and returns a tar stream of all files
// contained in the manifest
func (s *Server) HandleGetFiles(w http.ResponseWriter, r *Request) {
+ getFilesCount.Inc(1)
if r.uri.Path != "" {
+ getFilesFail.Inc(1)
s.BadRequest(w, r, "files request cannot contain a path")
return
}
key, err := s.api.Resolve(r.uri)
if err != nil {
- s.Error(w, r, fmt.Errorf("error resolving %s: %s", r.uri.Addr, err))
+ getFilesFail.Inc(1)
+ s.NotFound(w, r, fmt.Errorf("error resolving %s: %s", r.uri.Addr, err))
return
}
walker, err := s.api.NewManifestWalker(key, nil)
if err != nil {
+ getFilesFail.Inc(1)
s.Error(w, r, err)
return
}
@@ -430,6 +475,7 @@ func (s *Server) HandleGetFiles(w http.ResponseWriter, r *Request) {
return nil
})
if err != nil {
+ getFilesFail.Inc(1)
s.logError("error generating tar stream: %s", err)
}
}
@@ -438,6 +484,7 @@ func (s *Server) HandleGetFiles(w http.ResponseWriter, r *Request) {
// a list of all files contained in <manifest> under <path> grouped into
// common prefixes using "/" as a delimiter
func (s *Server) HandleGetList(w http.ResponseWriter, r *Request) {
+ getListCount.Inc(1)
// ensure the root path has a trailing slash so that relative URLs work
if r.uri.Path == "" && !strings.HasSuffix(r.URL.Path, "/") {
http.Redirect(w, &r.Request, r.URL.Path+"/", http.StatusMovedPermanently)
@@ -446,13 +493,15 @@ func (s *Server) HandleGetList(w http.ResponseWriter, r *Request) {
key, err := s.api.Resolve(r.uri)
if err != nil {
- s.Error(w, r, fmt.Errorf("error resolving %s: %s", r.uri.Addr, err))
+ getListFail.Inc(1)
+ s.NotFound(w, r, fmt.Errorf("error resolving %s: %s", r.uri.Addr, err))
return
}
list, err := s.getManifestList(key, r.uri.Path)
if err != nil {
+ getListFail.Inc(1)
s.Error(w, r, err)
return
}
@@ -470,6 +519,7 @@ func (s *Server) HandleGetList(w http.ResponseWriter, r *Request) {
List: &list,
})
if err != nil {
+ getListFail.Inc(1)
s.logError("error rendering list HTML: %s", err)
}
return
@@ -538,6 +588,7 @@ func (s *Server) getManifestList(key storage.Key, prefix string) (list api.Manif
// HandleGetFile handles a GET request to bzz://<manifest>/<path> and responds
// with the content of the file at <path> from the given <manifest>
func (s *Server) HandleGetFile(w http.ResponseWriter, r *Request) {
+ getFileCount.Inc(1)
// ensure the root path has a trailing slash so that relative URLs work
if r.uri.Path == "" && !strings.HasSuffix(r.URL.Path, "/") {
http.Redirect(w, &r.Request, r.URL.Path+"/", http.StatusMovedPermanently)
@@ -546,7 +597,8 @@ func (s *Server) HandleGetFile(w http.ResponseWriter, r *Request) {
key, err := s.api.Resolve(r.uri)
if err != nil {
- s.Error(w, r, fmt.Errorf("error resolving %s: %s", r.uri.Addr, err))
+ getFileFail.Inc(1)
+ s.NotFound(w, r, fmt.Errorf("error resolving %s: %s", r.uri.Addr, err))
return
}
@@ -554,8 +606,10 @@ func (s *Server) HandleGetFile(w http.ResponseWriter, r *Request) {
if err != nil {
switch status {
case http.StatusNotFound:
+ getFileNotFound.Inc(1)
s.NotFound(w, r, err)
default:
+ getFileFail.Inc(1)
s.Error(w, r, err)
}
return
@@ -567,18 +621,20 @@ func (s *Server) HandleGetFile(w http.ResponseWriter, r *Request) {
list, err := s.getManifestList(key, r.uri.Path)
if err != nil {
+ getFileFail.Inc(1)
s.Error(w, r, err)
return
}
s.logDebug(fmt.Sprintf("Multiple choices! --> %v", list))
//show a nice page links to available entries
- ShowMultipleChoices(w, &r.Request, list)
+ ShowMultipleChoices(w, r, list)
return
}
// check the root chunk exists by retrieving the file's size
if _, err := reader.Size(nil); err != nil {
+ getFileNotFound.Inc(1)
s.NotFound(w, r, fmt.Errorf("File not found %s: %s", r.uri, err))
return
}
@@ -589,8 +645,30 @@ func (s *Server) HandleGetFile(w http.ResponseWriter, r *Request) {
}
func (s *Server) ServeHTTP(w http.ResponseWriter, r *http.Request) {
+ if metrics.Enabled {
+ //The increment for request count and request timer themselves have a flag check
+ //for metrics.Enabled. Nevertheless, we introduce the if here because we
+ //are looking into the header just to see what request type it is (json/html).
+ //So let's take advantage and add all metrics related stuff here
+ requestCount.Inc(1)
+ defer requestTimer.UpdateSince(time.Now())
+ if r.Header.Get("Accept") == "application/json" {
+ jsonRequestCount.Inc(1)
+ } else {
+ htmlRequestCount.Inc(1)
+ }
+ }
s.logDebug("HTTP %s request URL: '%s', Host: '%s', Path: '%s', Referer: '%s', Accept: '%s'", r.Method, r.RequestURI, r.URL.Host, r.URL.Path, r.Referer(), r.Header.Get("Accept"))
+ if r.RequestURI == "/" && strings.Contains(r.Header.Get("Accept"), "text/html") {
+
+ err := landingPageTemplate.Execute(w, nil)
+ if err != nil {
+ s.logError("error rendering landing page: %s", err)
+ }
+ return
+ }
+
uri, err := api.Parse(strings.TrimLeft(r.URL.Path, "/"))
req := &Request{Request: *r, uri: uri}
if err != nil {
@@ -615,7 +693,7 @@ func (s *Server) ServeHTTP(w http.ResponseWriter, r *http.Request) {
// strictly a traditional PUT request which replaces content
// at a URI, and POST is more ubiquitous)
if uri.Raw() || uri.DeprecatedRaw() {
- ShowError(w, r, fmt.Sprintf("No PUT to %s allowed.", uri), http.StatusBadRequest)
+ ShowError(w, req, fmt.Sprintf("No PUT to %s allowed.", uri), http.StatusBadRequest)
return
} else {
s.HandlePostFiles(w, req)
@@ -623,7 +701,7 @@ func (s *Server) ServeHTTP(w http.ResponseWriter, r *http.Request) {
case "DELETE":
if uri.Raw() || uri.DeprecatedRaw() {
- ShowError(w, r, fmt.Sprintf("No DELETE to %s allowed.", uri), http.StatusBadRequest)
+ ShowError(w, req, fmt.Sprintf("No DELETE to %s allowed.", uri), http.StatusBadRequest)
return
}
s.HandleDelete(w, req)
@@ -647,7 +725,7 @@ func (s *Server) ServeHTTP(w http.ResponseWriter, r *http.Request) {
s.HandleGetFile(w, req)
default:
- ShowError(w, r, fmt.Sprintf("Method "+r.Method+" is not supported.", uri), http.StatusMethodNotAllowed)
+ ShowError(w, req, fmt.Sprintf("Method "+r.Method+" is not supported.", uri), http.StatusMethodNotAllowed)
}
}
@@ -679,13 +757,13 @@ func (s *Server) logError(format string, v ...interface{}) {
}
func (s *Server) BadRequest(w http.ResponseWriter, r *Request, reason string) {
- ShowError(w, &r.Request, fmt.Sprintf("Bad request %s %s: %s", r.Method, r.uri, reason), http.StatusBadRequest)
+ ShowError(w, r, fmt.Sprintf("Bad request %s %s: %s", r.Request.Method, r.uri, reason), http.StatusBadRequest)
}
func (s *Server) Error(w http.ResponseWriter, r *Request, err error) {
- ShowError(w, &r.Request, fmt.Sprintf("Error serving %s %s: %s", r.Method, r.uri, err), http.StatusInternalServerError)
+ ShowError(w, r, fmt.Sprintf("Error serving %s %s: %s", r.Request.Method, r.uri, err), http.StatusInternalServerError)
}
func (s *Server) NotFound(w http.ResponseWriter, r *Request, err error) {
- ShowError(w, &r.Request, fmt.Sprintf("NOT FOUND error serving %s %s: %s", r.Method, r.uri, err), http.StatusNotFound)
+ ShowError(w, r, fmt.Sprintf("NOT FOUND error serving %s %s: %s", r.Request.Method, r.uri, err), http.StatusNotFound)
}
diff --git a/swarm/api/http/server_test.go b/swarm/api/http/server_test.go
index 305d5cf7d..b2efc0ea1 100644
--- a/swarm/api/http/server_test.go
+++ b/swarm/api/http/server_test.go
@@ -144,17 +144,17 @@ func TestBzzGetPath(t *testing.T) {
{
path: "/",
json: `{"common_prefixes":["a/"]}`,
- html: "<!DOCTYPE html>\n<html>\n<head>\n <meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\">\n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1\">\n <title>Swarm index of bzz:/262e5c08c03c2789b6daef487dfa14b4d132f5340d781a3ecb1d5122ab65640c/</title>\n</head>\n\n<body>\n <h1>Swarm index of bzz:/262e5c08c03c2789b6daef487dfa14b4d132f5340d781a3ecb1d5122ab65640c/</h1>\n <hr>\n <table>\n <thead>\n <tr>\n\t<th>Path</th>\n\t<th>Type</th>\n\t<th>Size</th>\n </tr>\n </thead>\n\n <tbody>\n \n\t<tr>\n\t <td><a href=\"a/\">a/</a></td>\n\t <td>DIR</td>\n\t <td>-</td>\n\t</tr>\n \n\n \n </table>\n <hr>\n</body>\n",
+ html: "<!DOCTYPE html>\n<html>\n<head>\n <meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\">\n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1\">\n\t\t<link rel=\"shortcut icon\" type=\"image/x-icon\" href=\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAHsAAAB5CAYAAAAZD150AAAAAXNSR0IArs4c6QAAAAlwSFlzAAALEwAACxMBAJqcGAAAAVlpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IlhNUCBDb3JlIDUuNC4wIj4KICAgPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4KICAgICAgPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIKICAgICAgICAgICAgeG1sbnM6dGlmZj0iaHR0cDovL25zLmFkb2JlLmNvbS90aWZmLzEuMC8iPgogICAgICAgICA8dGlmZjpPcmllbnRhdGlvbj4xPC90aWZmOk9yaWVudGF0aW9uPgogICAgICA8L3JkZjpEZXNjcmlwdGlvbj4KICAgPC9yZGY6UkRGPgo8L3g6eG1wbWV0YT4KTMInWQAAFzFJREFUeAHtnXuwJFV9x7vnzrKwIC95w/ImYFkxlZgiGswCIiCGxPhHCEIwmIemUqYwJkYrJmXlUZRa+IpAWYYEk4qJD4gJGAmoiBIxMQFRQCkXdhdCeL+fYdk7nc+ne87dvnPn0TPTPdNzmd/W2X6fc76/7/n9zjm/PrcniuYy18BcA3MNzDUw18BcA3MNzDUw18BcA3MNrGYNLERroyMAeNQ0QcbTLPxFUfaOOx4Wbd12ThRHp0TJ4s5RFF8RtVoXg/2BSeOfk12lxtc0fjva1jibItZEDf4li3u3i3sqipOPRIvRp6ssvjPvhc4Tq/y4Ab71pEXS1oqw7hw1mz8XJc2LoiQ6iTJaaTkxth0l69jXwNayOZ10AvubSQ+RrFOl8mKy7EOiRuN3UfjrUfItUaN1SbQt+lqp2m02T4payS9HSfxK8k1IGdEWkln2Xuzldd7keBunrsO1f4r9b5Mqk3zBlRUy9Ywb0TtQ6O9Tjx1JkvA8SSXfiJK5Fj1MGkf2jBYWzo9a8Y+TiQRaxnLpTnb+HuvzDerzLk4+mb9Q1v5qduPrUPvxURx/DiWeicK2W1lKdCT2Q7n+VqzuOei5m+Nnh1TsPtGahbOiaOFCrHn/vs8ud+PdbrWLOZr6vIX6PEF97uH4uW43jnpudVp2Mzo1ajV+AwPbgGJUYr4/1Or+jxSwu90R5f6QQdPnaBK603zD4LCLNJsnkusfcUXXrKfoL4MtOzxvfXegPrdGjeRjlHEVx4PrE57us11tlr0b/fKlKIq+OTqsjXulS80sO5Dtbdugfnf4Py5qxGenxEeppbez6LJpte6LFuIWlvhT3J/Pq8vNnBps2eE562t97A608G+xnZMdtMN2H+z3LSj+n9g3cNGN4Nzt9tddCZK0ncnnrChuHBElySaOH8k/mNtf5Pr3onWtL0WthfWUuA/XGGX3KHsw2ZYtqc9Rs4dIW2m4mxjwXd8+z2Y8mXXLXoDk38Eaz4c7+2VJzLvsXtrpRXa43+svh/TTIf1QSP0Rx0+Ei8u2L0RPM6i6hn77e9SFhsJzGWnLG9wgshvpeOEJnn+6nX+D8u+ck51p43hIvhyS38DhHiQJKiqDyDYfG42WeiAk3oO93uLJ3tK6P0pa1zK/vj5aSI6FtF15Zjvhvcle5F69xzMkywxdAk+US7aDgVmSbMTaaFwM0VdS8QNIQTll4TA/SdKd3kvS0oqWgRveeku0uO2NjM4v5TmDJda52/MSqyXfy3YrqXJxEDArcjB92Dvh4RdIjoC1hLJFUp6BAPMOBHQjalC5SdR6wfj3FVFjDV1Bcg698S7ka172y0+3y5DwiRncxAoC1OjSiN6LJTNQSd5MJruQnDqVLbrTB0mPkXEgetwy7oH0T2LppzOrv4bMnif/B0hPsS/pozSkketUZ8s2KPIq3OH5ONWjQVgFwSpO69KajVpVpfzHom3b3hctNHajwR5OOUbyLGt7n85B1VJPspvRydFi/Hba/gYUoFLKJlqPpmXx9ikdBRcZsHH7WLIjZT1F472TXHaFZ6dqO5Gsx0SkbmTvS7/8caYbG1CMJGt1Vciz5O5UKuRflUV31t1yJPdx2jDlp2MPw6wTsfA6ke1U47Pg1mVXqfwXyP1Bypg2dgiOXcDwKJgPZPsSknGPyoiv0wAtZlz6CERsAXB4v1tV/apsTFS/sIgP7xLfTdrMPhbfc6pWONNeN1alzF7lDTqfuTkHS3F0FzcbbHixCG/cUtLvAHBZs4Fluqsb2fnK+XpBS6fVp4Mo+7q6WGS+nmXvE8SJf0imDxKGlfTS3Pq0+61BipJc+9j7gOzI1bCoS3tKUwB51U3st9cwar+Kgeq/sR8GkWPXs+5kbwcYpy/yXWQg2U5brPtqI905yEPRttbfgm0jqVSZHbID7CyUeRc078kpo2lrSFWSbt6lWRd5dYreizdcLI1K4m9F2xa/zHEleGaP7Kzftj933ZgDuZcwc9W9q7QylWRevLeKd+d/lgMnt3I84M0XdwwnNlQCRvHVkOwiBaZh1ckskh20kfXnCQpKI1Opa9fFlxWRWsvSpoPIex1pbxIWxyvVVuv9lGFAZlxp0ohc8PgZMqpkgWFnBWeZ7DwWB3H/g/IMTGjlvoceRbDmBuu/kj2waBf0x9o2/ysvYN1n8ELmtbzFugDHrrsddlWqgy/z3Bw1WeGyNbrNjCclFl4XIagSn0Fl7IdHEQc3W0nh9aQvG2zMna7dt1sheBHK8R5IiPdlcwBbYtc5iZdWwNh3MyuITyFtIN7F0qTCrp0GGB8TLbQuixaTf+HJ+3MlTGQ3a7MTKWpgIQ0W8H2Bu1B4KaIFaeW7k/I4bRBbOBcauv0yHiE+iPvDOS7nJE7oV00rRFd8O57gPK78YMXV5Sesg3GNKgd7y0vsOKpzUKWjqkMfZoM43Xu20CGz3nw2TuOSxqGQfVhPovP3r9x3FejReKSv8gLnQ1w+ZuUtS2csf2pEW4vuLXmpfhPdGdeNd6us1sRy33RlyHPs69bXsIjwWQjGktN+WXff6eo5lZPMjfda4xZI/Ele5JxK3jSc5Cae7uYJcplOfjfv3iZf+vISy3bjy3PffkQ/2zgaeosP4nq78e25bt9Tp48S/TqZba1i+6tlNL5d1Sv3Mut23VcUP8hlRu4t57YOxPbhWNL7W/bKPHud0fpd9eJfiNSui1ztZKtw5rCxUyTduKTuQMK1J3eRHoBwR+Au8ldGIT00JgM8z5KH/XIt9VrLSqn1EgSlx/eQj4v7guS7LfdZAJjcDcf3YYjHQLVTrmEJd9WLixTDc/kyQrm12K42slU0wQ8jXLELIIq+FtW138zgah8oO5DkXN+8AoHsLhPPO4WzITkQqy3B+VqvFrKDsiE4NmDie2DPhfN5zL32mZm0HuaJx0l7ECo9hBs7FwSanyQb3rRfLtqYuHX6slrIJmoW/y/qDNOjYUjOs+Bz9rkP82UG30IdSjogd4MNgYHekoxazlIGk9yZdbIZdMX2l1pz+YqPkzvo0/kbrnivNskzZcmdDWkWyXaEjZtOp1H2mVpi+URnmlI//M1XYhRuDclpmttefTmX6iuzSDZz5XTwFbRaFdGd+dtFmCTbvrzqcimiXJkFslWq1mu/bFDEgdE0Fe1oX9K1cufs06wLxReXupPdLShSB+Xqxp1yOeqX8MHxdW6attSZbIMi9pX2y4HgsJ223kL5DtgC6evYr9OLpVDHpW3dyNaS80ERLahuBC8pL7cj6TZKrVz3rl5rV+86kU28Or6Tce56FOUaL5VVO4VRp15iXXXriqtlbLS1knq5nVZyDV8KJDqVuAhAK69CxGw4tIrpkx+9uZFlR39A/s7/ayV1tZyXQvovwcdPoC0XEYbIWBnKY+qU8KYrDXWWkZ+NR690G/H1L2DP3ykj0yryqCvZAavfUTkNZ34cJ3SRZVhjmWQzKIs3QvKlNMfvUj8Ha7WVupMdFHdU1Gy8DapdQDgu4WWRjbdJ/oEIwGXUyThA7aUOfbarPwdZhMt8voJr541T+rUCnxmV9HH6bAe0z7DG7N9x3H9BDXTZReqxJ/e5eGKqMk2y/bq+X0F6P256X1T2X2iiv4W0WndA+k2QzouP5Cjux0oLKTuv5FHIdrC4EyRfB8kfpcR/5dgR9yDxC4zvaWPcv42xzPHHoPKXXZ+OG1+ITuNN0gXUZD+SS3lcAcr3OpN30/ddx3ERa/GjtOfy3CvS5/mvoIzgxvkcxkLrI4wa/HuvYpJ+GTn+MDeLUc/lHyeI8b1g/BrHRTByW3kyabJfA0HvAOepQHDAZTDClm7SehzwXM3fP13E/rdJReRl5MmH5ZbWbJtnPylCtnrBZcd3UcVroOlyjgflG8rky8bpLxbkMeqxnHdnHiJinXmrdSHHN4SHJrGdFNmHo4A/h+RXA8p1XvmAQyA74NXNYu3xDSiEZ9Lf0AjXem13IHZ1DKtLzsBeDuYmX5b0kkFk2zXwfPIZSP4q+w/3yqjjvBj/lOd+lvNiFFeQQHY4Dhj/A4x/xslN4UKV26rJXktbfifEkbA9NNEFTCfZ+Vt4JrkYm9LSi/SRMf35aTxzOvdLWjfpRzZ/MpT8J+V9nAcZFxSSHcB4HvB+j7u13G4YO8nOZyzGT1LmJzhZBGP+2aH2bWFVyDpeCZxJoOFvUMIbKSBvyZ3l6R5NPST268M/jxr96sIWbgohye73J8lGCLsBK/Odc7d14WLehRRI8djI1818uf9CrPmzHA+aHXALXc5C9Cs8dwkY38RxP4yW1Q/jBjCeXhijpY8g5Vv2QgS58btR5eHUR0X2AZnWuJ9lB0iZxSTRj/i884dxkFeHCwO2B0H6L+JT7D7CGCFv2by4iLew3uxTUHUL9xQhWVR4jvgPwXgkzxTB2M+yA4SAcSMYLxgCY3h+4LYssq3oelr5BSj2BPbz/dWgShQhO58HFht/k77uPZzcQgoWmr+nc//HCMqcy517cWEtj2jxkJ98Hkv+R/YHNUjzU1fraTwf4rmT2B8GYxGyLSMIf3AYXY+HEuNmUpH6hWd7bssg24HJuSjgNynFl/j9BkfdKjIs2eZhf4wVJn+HGiRroycHyAI/rgZJrdfQUB7jub/nfteWF5FD2xh/i5vtHobFOCzZ1mkUjD7XU8YjuxH9CQ3+zeSuxdj6ilhZZ2VGIds8rPsC/7P6M+HzF9EHOS5S/s7cZzSrmLU0/IUffiQm4lMbo2MchWyKW4bxi9T4A+06eG1oGYXsHZiBnsgPlv0lpRkGHMaddavgqGTn86LvxUobvFrcFn2dfZU7jqwB4/FgdIRsQx4X46hk5zEEjAaeruXC0BiHI7tJf9xqvB0Dej2FhQFPvkKj7JdBtuU6bqA/jr/CgOuvUch1nhxaMoxvA+NpPFsWxjLIFop8OWbhj//Tn5e0YReWomTzoyiNC1CAo1qiXMO3qj41KovsUISjY+ar8U30zX/M/uZwYcB2PzAS3kyDIrr6oS2nT/5lkR2KCBhvBuP7OLkpXOi39aF+sgv28i5G2Q6C1pO8v0i/2C/Pzmv2ncX6z84nux9bP0KdRNLi+NexhRY1vpXjXvNgMZ7HvZ9Pn8meLRuj+VWB0RnQW8Ho92O+3wcjlzK3kO50+Y8fMIk/TTYncc3RZ9kKCEWWbdkhX7d6Lqcx32EQ5yDrCU/mZB3WfAnQ3sA559hVYSzbsnMQUoy8keObaklyJheezF/M79vP9ZImLd7R7kZueJrU795eeUzzvET7c02bqLnTGLufTmlynegZwZpsdeisYtycctUd4xJm3V0vCa7HD8r5VWBbjNOPfs/0ymvS531l+hiFOsWSdLH0slrdqxiZwqXLgWcHo/H77GsPASNQektR4uwVGPTwNipJvyvmWx2nAr0U2LvE6q4I2H7ZLyG4htu6ea6orHqMRclWYZnisq8D8is26apP56BKHUgPrTwMhIYhOkOxyjEOQ3ZQiFtXXegmn4TmfdkaQhxFuTw2tvglhEfJpcypkpUSo685/Vnj/djOPMZRyVYZkmvfeC/KcF66G8ntJKzcsh09+xkqlzVV2dD8UmLAuDtlBdKrxlk6xnHIBncq9nUq3PfNKsI3SuZbhTJUgJ+Q1OLCdLBKoikmlU6MDuIc4c8UxjLIztQh8Iz0zewZM9cKypzK2BdryT3nkaEiFW4Dxi2ziLFMsoOOtQL70KdRiIS7KmTUyJtWK8mOsJ3rG6suswGR3UiSx2j35Z8olYXRGUUl3qoKstWelXXu+hDbxyH9pWzzS4E4HCiSGn5G0SibUgeis5psx/gwJxzE6c2ckobZQLiv39YGorcyshcwVkK0laiKbPMOEoIyRrBCfx6u9do6ElaJDsIqA9+r8BHOZ0GZjHRnJ0X0OnGMRSo1AvYVj4QBzl1YgG7PxOvIZVaQeYOspRvA0UJmgWiqmYp1NTxbW4yTIlttZMRlLsv+XLduUCYQ6odeJTnMl8N5Ts2MDML4BBiduUwF4yTJzjOmC7OfykjPwpsqYBYJzuPK79cO4zTIDoQyuuZ7ZrEx9sRRu7H2adQnT1BZ+3mMfg+VgVjiAG6qGCetXJXA9Cm+n63ujBFo4opUpxuORh2dGpgZdRrDo1OXLhjTvysXo8lgjJgnjnGSZBttYoTd8+uEXpdw31ipDK0gWAi7MyEOKokx9MQoiED6xDFWTbZk2RfTP6df8y8aFPE9tEqRcFOdRYySTAg3foStYdwi8QAxqg9nJRPBWCXZAlYBD7KVOKWIErwvNJKgEK2gqli05Y0q4rEhP8B2FIw2koAxkF5FvD3FVxXZAI/vowRDnOO6Yj2D+Ui2/XnRBsOtlUrZGJ12aumVYSyTbEnVhT0Gv8bGbaHjEk0WqZiP/bmkB9c+8QEOZecxgrPUwE/A6JglWHmpGMsgOxCKu05J1jUp4Xx2VM7/NiAblBYg6br3SYhYLFuMklx1UKQSjOOSrQKcL+uy7bOqIJhsV4jlGje3zNCfr7ippBPTxhhcu93YWDIK2YFQ3E1qyaFfDufHqtAQD1ueXsT5ujh0fWUO4sy/DhhtbPbnYhsL47BkqwBaWmrJKtmKTJpkilwh9ue6Vvu4cQc44nHwdS/bumC0TmNjHIZslZkPGNSBZKq0JDa8cYMyAaOvV+vSkJcAtus0MsZBZDvN0VWGEbb9R91IpkorJMxdHcTp+vqJeCTWhuwsImCsO85hMKb4B5Ad21c4FXB0qNRdAVkts3raSB3EBfLCtc4trjrexMlZxzgw/mAf10ue5w/FbuWP+w6EYt89D8ysV0YDztvHjj3S7FKGDRPC+XBdI/kAe3d2uUeMt08AY4iDd6nCWKfaXim+u43xjn65FbHUBmPdV/FH+K+D9CPITEspS3Cf6as/CS9LxMQSqJiG2voivfjXOdbK+0nAeBIYj+TGkjG68DJxTX1ZEjD6jfOA0fFGXylCdsiAT080j4uS1tmc0CMMzDw82GdbNtl6H9aD8aG87MsLuvFhxE+IvJofU/9VHioRY6lki5HGyIfyFtOGXBjjMGQHpe3EVwTPobBXcEKLHGQ14blu27LIVgH8kUJyI7W5kH1XwYwjYoTwFCNeYlyMpZAdMPJFiRQjL5mGk1HIDiUchUJORCHHcsJ8RiF9XLJVwAIkX4tFXkl7vy1UrqTtkW2MP0N+Y2Aci2wxNtsYrxgH4zhkq09d3SHtr/zvx77hy2FkHLKZVjH4Wmx9kAI3kcroVrrVXYwHtzHuz/4IGEcmm2ljvKWNcTNlO8ceWcYle3vBzeapGPfJzFgZjKQCkQNlWLLb9XWRQOufoffygSWUeUOzeQrlngLG3cjWuhTEOBTZASNz/hTjZWVBKI/srEZ74PY2MEA6ETW4wC7MXXvVdxiytWQV8CVIvpoMeQM1Fdm9jfG1xTEWJpspqG/VUozXgO6BMhGWTbZ1M8/dUMibaPgb2O/neoqQbX7+ZMNVjAr+in0HJkUsitsqE+u0axvj8ewPwDiQbPPjQz/Jl8F4Cfu+Ri0do4VUKUfT151Fte3P7fs6AXDcc57twIT+Me2XL2L/dlIdxV8mOruN0YhkF4w9yZ4oxqrJlhy/VXRstNg4GZs/hmNjukEh3ci2TmuJat2KO7sCm/kmx8MOinhkohIwGngSo3PfHMYVZOcxXtnGWGYgpyv4SZAdCjZg8dPtgIUvJ5yqdZJtS+drDsknUMA32LdhzJKI8ZVgJA6RvoBpY1xGthiZORD4yYIiE8M4SbIDaX5o7lws4GWcIGacunEDF3yJKflvqP4o+1rGLMtOYPw1ML4cEASeEkfvvl8Qo4Gfj6X7/DdJmQbZAZ9BmRNQxOsIwX4XBVzBhR+Ei6tkG4IyjNxb38fqxwqKzLpOHLTtRRrwqnWmYYpx71WOcaYJmld+roG5BuYamGtgroG5BuYamJoG/h/ff6XOIB4wOAAAAABJRU5ErkJggg==\"/>\n\t<title>Swarm index of bzz:/262e5c08c03c2789b6daef487dfa14b4d132f5340d781a3ecb1d5122ab65640c/</title>\n</head>\n\n<body>\n <h1>Swarm index of bzz:/262e5c08c03c2789b6daef487dfa14b4d132f5340d781a3ecb1d5122ab65640c/</h1>\n <hr>\n <table>\n <thead>\n <tr>\n\t<th>Path</th>\n\t<th>Type</th>\n\t<th>Size</th>\n </tr>\n </thead>\n\n <tbody>\n \n\t<tr>\n\t <td><a href=\"a/\">a/</a></td>\n\t <td>DIR</td>\n\t <td>-</td>\n\t</tr>\n \n\n \n </table>\n <hr>\n</body>\n",
},
{
path: "/a/",
json: `{"common_prefixes":["a/b/"],"entries":[{"hash":"011b4d03dd8c01f1049143cf9c4c817e4b167f1d1b83e5c6f0f10d89ba1e7bce","path":"a/a","mod_time":"0001-01-01T00:00:00Z"}]}`,
- html: "<!DOCTYPE html>\n<html>\n<head>\n <meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\">\n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1\">\n <title>Swarm index of bzz:/262e5c08c03c2789b6daef487dfa14b4d132f5340d781a3ecb1d5122ab65640c/a/</title>\n</head>\n\n<body>\n <h1>Swarm index of bzz:/262e5c08c03c2789b6daef487dfa14b4d132f5340d781a3ecb1d5122ab65640c/a/</h1>\n <hr>\n <table>\n <thead>\n <tr>\n\t<th>Path</th>\n\t<th>Type</th>\n\t<th>Size</th>\n </tr>\n </thead>\n\n <tbody>\n \n\t<tr>\n\t <td><a href=\"b/\">b/</a></td>\n\t <td>DIR</td>\n\t <td>-</td>\n\t</tr>\n \n\n \n\t<tr>\n\t <td><a href=\"a\">a</a></td>\n\t <td></td>\n\t <td>0</td>\n\t</tr>\n \n </table>\n <hr>\n</body>\n",
+ html: "<!DOCTYPE html>\n<html>\n<head>\n <meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\">\n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1\">\n\t\t<link rel=\"shortcut icon\" type=\"image/x-icon\" href=\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAHsAAAB5CAYAAAAZD150AAAAAXNSR0IArs4c6QAAAAlwSFlzAAALEwAACxMBAJqcGAAAAVlpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IlhNUCBDb3JlIDUuNC4wIj4KICAgPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4KICAgICAgPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIKICAgICAgICAgICAgeG1sbnM6dGlmZj0iaHR0cDovL25zLmFkb2JlLmNvbS90aWZmLzEuMC8iPgogICAgICAgICA8dGlmZjpPcmllbnRhdGlvbj4xPC90aWZmOk9yaWVudGF0aW9uPgogICAgICA8L3JkZjpEZXNjcmlwdGlvbj4KICAgPC9yZGY6UkRGPgo8L3g6eG1wbWV0YT4KTMInWQAAFzFJREFUeAHtnXuwJFV9x7vnzrKwIC95w/ImYFkxlZgiGswCIiCGxPhHCEIwmIemUqYwJkYrJmXlUZRa+IpAWYYEk4qJD4gJGAmoiBIxMQFRQCkXdhdCeL+fYdk7nc+ne87dvnPn0TPTPdNzmd/W2X6fc76/7/n9zjm/PrcniuYy18BcA3MNzDUw18BcA3MNzDUw18BcA3MNrGYNLERroyMAeNQ0QcbTLPxFUfaOOx4Wbd12ThRHp0TJ4s5RFF8RtVoXg/2BSeOfk12lxtc0fjva1jibItZEDf4li3u3i3sqipOPRIvRp6ssvjPvhc4Tq/y4Ab71pEXS1oqw7hw1mz8XJc2LoiQ6iTJaaTkxth0l69jXwNayOZ10AvubSQ+RrFOl8mKy7EOiRuN3UfjrUfItUaN1SbQt+lqp2m02T4payS9HSfxK8k1IGdEWkln2Xuzldd7keBunrsO1f4r9b5Mqk3zBlRUy9Ywb0TtQ6O9Tjx1JkvA8SSXfiJK5Fj1MGkf2jBYWzo9a8Y+TiQRaxnLpTnb+HuvzDerzLk4+mb9Q1v5qduPrUPvxURx/DiWeicK2W1lKdCT2Q7n+VqzuOei5m+Nnh1TsPtGahbOiaOFCrHn/vs8ud+PdbrWLOZr6vIX6PEF97uH4uW43jnpudVp2Mzo1ajV+AwPbgGJUYr4/1Or+jxSwu90R5f6QQdPnaBK603zD4LCLNJsnkusfcUXXrKfoL4MtOzxvfXegPrdGjeRjlHEVx4PrE57us11tlr0b/fKlKIq+OTqsjXulS80sO5Dtbdugfnf4Py5qxGenxEeppbez6LJpte6LFuIWlvhT3J/Pq8vNnBps2eE562t97A608G+xnZMdtMN2H+z3LSj+n9g3cNGN4Nzt9tddCZK0ncnnrChuHBElySaOH8k/mNtf5Pr3onWtL0WthfWUuA/XGGX3KHsw2ZYtqc9Rs4dIW2m4mxjwXd8+z2Y8mXXLXoDk38Eaz4c7+2VJzLvsXtrpRXa43+svh/TTIf1QSP0Rx0+Ei8u2L0RPM6i6hn77e9SFhsJzGWnLG9wgshvpeOEJnn+6nX+D8u+ck51p43hIvhyS38DhHiQJKiqDyDYfG42WeiAk3oO93uLJ3tK6P0pa1zK/vj5aSI6FtF15Zjvhvcle5F69xzMkywxdAk+US7aDgVmSbMTaaFwM0VdS8QNIQTll4TA/SdKd3kvS0oqWgRveeku0uO2NjM4v5TmDJda52/MSqyXfy3YrqXJxEDArcjB92Dvh4RdIjoC1hLJFUp6BAPMOBHQjalC5SdR6wfj3FVFjDV1Bcg698S7ka172y0+3y5DwiRncxAoC1OjSiN6LJTNQSd5MJruQnDqVLbrTB0mPkXEgetwy7oH0T2LppzOrv4bMnif/B0hPsS/pozSkketUZ8s2KPIq3OH5ONWjQVgFwSpO69KajVpVpfzHom3b3hctNHajwR5OOUbyLGt7n85B1VJPspvRydFi/Hba/gYUoFLKJlqPpmXx9ikdBRcZsHH7WLIjZT1F472TXHaFZ6dqO5Gsx0SkbmTvS7/8caYbG1CMJGt1Vciz5O5UKuRflUV31t1yJPdx2jDlp2MPw6wTsfA6ke1U47Pg1mVXqfwXyP1Bypg2dgiOXcDwKJgPZPsSknGPyoiv0wAtZlz6CERsAXB4v1tV/apsTFS/sIgP7xLfTdrMPhbfc6pWONNeN1alzF7lDTqfuTkHS3F0FzcbbHixCG/cUtLvAHBZs4Fluqsb2fnK+XpBS6fVp4Mo+7q6WGS+nmXvE8SJf0imDxKGlfTS3Pq0+61BipJc+9j7gOzI1bCoS3tKUwB51U3st9cwar+Kgeq/sR8GkWPXs+5kbwcYpy/yXWQg2U5brPtqI905yEPRttbfgm0jqVSZHbID7CyUeRc078kpo2lrSFWSbt6lWRd5dYreizdcLI1K4m9F2xa/zHEleGaP7Kzftj933ZgDuZcwc9W9q7QylWRevLeKd+d/lgMnt3I84M0XdwwnNlQCRvHVkOwiBaZh1ckskh20kfXnCQpKI1Opa9fFlxWRWsvSpoPIex1pbxIWxyvVVuv9lGFAZlxp0ohc8PgZMqpkgWFnBWeZ7DwWB3H/g/IMTGjlvoceRbDmBuu/kj2waBf0x9o2/ysvYN1n8ELmtbzFugDHrrsddlWqgy/z3Bw1WeGyNbrNjCclFl4XIagSn0Fl7IdHEQc3W0nh9aQvG2zMna7dt1sheBHK8R5IiPdlcwBbYtc5iZdWwNh3MyuITyFtIN7F0qTCrp0GGB8TLbQuixaTf+HJ+3MlTGQ3a7MTKWpgIQ0W8H2Bu1B4KaIFaeW7k/I4bRBbOBcauv0yHiE+iPvDOS7nJE7oV00rRFd8O57gPK78YMXV5Sesg3GNKgd7y0vsOKpzUKWjqkMfZoM43Xu20CGz3nw2TuOSxqGQfVhPovP3r9x3FejReKSv8gLnQ1w+ZuUtS2csf2pEW4vuLXmpfhPdGdeNd6us1sRy33RlyHPs69bXsIjwWQjGktN+WXff6eo5lZPMjfda4xZI/Ele5JxK3jSc5Cae7uYJcplOfjfv3iZf+vISy3bjy3PffkQ/2zgaeosP4nq78e25bt9Tp48S/TqZba1i+6tlNL5d1Sv3Mut23VcUP8hlRu4t57YOxPbhWNL7W/bKPHud0fpd9eJfiNSui1ztZKtw5rCxUyTduKTuQMK1J3eRHoBwR+Au8ldGIT00JgM8z5KH/XIt9VrLSqn1EgSlx/eQj4v7guS7LfdZAJjcDcf3YYjHQLVTrmEJd9WLixTDc/kyQrm12K42slU0wQ8jXLELIIq+FtW138zgah8oO5DkXN+8AoHsLhPPO4WzITkQqy3B+VqvFrKDsiE4NmDie2DPhfN5zL32mZm0HuaJx0l7ECo9hBs7FwSanyQb3rRfLtqYuHX6slrIJmoW/y/qDNOjYUjOs+Bz9rkP82UG30IdSjogd4MNgYHekoxazlIGk9yZdbIZdMX2l1pz+YqPkzvo0/kbrnivNskzZcmdDWkWyXaEjZtOp1H2mVpi+URnmlI//M1XYhRuDclpmttefTmX6iuzSDZz5XTwFbRaFdGd+dtFmCTbvrzqcimiXJkFslWq1mu/bFDEgdE0Fe1oX9K1cufs06wLxReXupPdLShSB+Xqxp1yOeqX8MHxdW6attSZbIMi9pX2y4HgsJ223kL5DtgC6evYr9OLpVDHpW3dyNaS80ERLahuBC8pL7cj6TZKrVz3rl5rV+86kU28Or6Tce56FOUaL5VVO4VRp15iXXXriqtlbLS1knq5nVZyDV8KJDqVuAhAK69CxGw4tIrpkx+9uZFlR39A/s7/ayV1tZyXQvovwcdPoC0XEYbIWBnKY+qU8KYrDXWWkZ+NR690G/H1L2DP3ykj0yryqCvZAavfUTkNZ34cJ3SRZVhjmWQzKIs3QvKlNMfvUj8Ha7WVupMdFHdU1Gy8DapdQDgu4WWRjbdJ/oEIwGXUyThA7aUOfbarPwdZhMt8voJr541T+rUCnxmV9HH6bAe0z7DG7N9x3H9BDXTZReqxJ/e5eGKqMk2y/bq+X0F6P256X1T2X2iiv4W0WndA+k2QzouP5Cjux0oLKTuv5FHIdrC4EyRfB8kfpcR/5dgR9yDxC4zvaWPcv42xzPHHoPKXXZ+OG1+ITuNN0gXUZD+SS3lcAcr3OpN30/ddx3ERa/GjtOfy3CvS5/mvoIzgxvkcxkLrI4wa/HuvYpJ+GTn+MDeLUc/lHyeI8b1g/BrHRTByW3kyabJfA0HvAOepQHDAZTDClm7SehzwXM3fP13E/rdJReRl5MmH5ZbWbJtnPylCtnrBZcd3UcVroOlyjgflG8rky8bpLxbkMeqxnHdnHiJinXmrdSHHN4SHJrGdFNmHo4A/h+RXA8p1XvmAQyA74NXNYu3xDSiEZ9Lf0AjXem13IHZ1DKtLzsBeDuYmX5b0kkFk2zXwfPIZSP4q+w/3yqjjvBj/lOd+lvNiFFeQQHY4Dhj/A4x/xslN4UKV26rJXktbfifEkbA9NNEFTCfZ+Vt4JrkYm9LSi/SRMf35aTxzOvdLWjfpRzZ/MpT8J+V9nAcZFxSSHcB4HvB+j7u13G4YO8nOZyzGT1LmJzhZBGP+2aH2bWFVyDpeCZxJoOFvUMIbKSBvyZ3l6R5NPST268M/jxr96sIWbgohye73J8lGCLsBK/Odc7d14WLehRRI8djI1818uf9CrPmzHA+aHXALXc5C9Cs8dwkY38RxP4yW1Q/jBjCeXhijpY8g5Vv2QgS58btR5eHUR0X2AZnWuJ9lB0iZxSTRj/i884dxkFeHCwO2B0H6L+JT7D7CGCFv2by4iLew3uxTUHUL9xQhWVR4jvgPwXgkzxTB2M+yA4SAcSMYLxgCY3h+4LYssq3oelr5BSj2BPbz/dWgShQhO58HFht/k77uPZzcQgoWmr+nc//HCMqcy517cWEtj2jxkJ98Hkv+R/YHNUjzU1fraTwf4rmT2B8GYxGyLSMIf3AYXY+HEuNmUpH6hWd7bssg24HJuSjgNynFl/j9BkfdKjIs2eZhf4wVJn+HGiRroycHyAI/rgZJrdfQUB7jub/nfteWF5FD2xh/i5vtHobFOCzZ1mkUjD7XU8YjuxH9CQ3+zeSuxdj6ilhZZ2VGIds8rPsC/7P6M+HzF9EHOS5S/s7cZzSrmLU0/IUffiQm4lMbo2MchWyKW4bxi9T4A+06eG1oGYXsHZiBnsgPlv0lpRkGHMaddavgqGTn86LvxUobvFrcFn2dfZU7jqwB4/FgdIRsQx4X46hk5zEEjAaeruXC0BiHI7tJf9xqvB0Dej2FhQFPvkKj7JdBtuU6bqA/jr/CgOuvUch1nhxaMoxvA+NpPFsWxjLIFop8OWbhj//Tn5e0YReWomTzoyiNC1CAo1qiXMO3qj41KovsUISjY+ar8U30zX/M/uZwYcB2PzAS3kyDIrr6oS2nT/5lkR2KCBhvBuP7OLkpXOi39aF+sgv28i5G2Q6C1pO8v0i/2C/Pzmv2ncX6z84nux9bP0KdRNLi+NexhRY1vpXjXvNgMZ7HvZ9Pn8meLRuj+VWB0RnQW8Ho92O+3wcjlzK3kO50+Y8fMIk/TTYncc3RZ9kKCEWWbdkhX7d6Lqcx32EQ5yDrCU/mZB3WfAnQ3sA559hVYSzbsnMQUoy8keObaklyJheezF/M79vP9ZImLd7R7kZueJrU795eeUzzvET7c02bqLnTGLufTmlynegZwZpsdeisYtycctUd4xJm3V0vCa7HD8r5VWBbjNOPfs/0ymvS531l+hiFOsWSdLH0slrdqxiZwqXLgWcHo/H77GsPASNQektR4uwVGPTwNipJvyvmWx2nAr0U2LvE6q4I2H7ZLyG4htu6ea6orHqMRclWYZnisq8D8is26apP56BKHUgPrTwMhIYhOkOxyjEOQ3ZQiFtXXegmn4TmfdkaQhxFuTw2tvglhEfJpcypkpUSo685/Vnj/djOPMZRyVYZkmvfeC/KcF66G8ntJKzcsh09+xkqlzVV2dD8UmLAuDtlBdKrxlk6xnHIBncq9nUq3PfNKsI3SuZbhTJUgJ+Q1OLCdLBKoikmlU6MDuIc4c8UxjLIztQh8Iz0zewZM9cKypzK2BdryT3nkaEiFW4Dxi2ziLFMsoOOtQL70KdRiIS7KmTUyJtWK8mOsJ3rG6suswGR3UiSx2j35Z8olYXRGUUl3qoKstWelXXu+hDbxyH9pWzzS4E4HCiSGn5G0SibUgeis5psx/gwJxzE6c2ckobZQLiv39YGorcyshcwVkK0laiKbPMOEoIyRrBCfx6u9do6ElaJDsIqA9+r8BHOZ0GZjHRnJ0X0OnGMRSo1AvYVj4QBzl1YgG7PxOvIZVaQeYOspRvA0UJmgWiqmYp1NTxbW4yTIlttZMRlLsv+XLduUCYQ6odeJTnMl8N5Ts2MDML4BBiduUwF4yTJzjOmC7OfykjPwpsqYBYJzuPK79cO4zTIDoQyuuZ7ZrEx9sRRu7H2adQnT1BZ+3mMfg+VgVjiAG6qGCetXJXA9Cm+n63ujBFo4opUpxuORh2dGpgZdRrDo1OXLhjTvysXo8lgjJgnjnGSZBttYoTd8+uEXpdw31ipDK0gWAi7MyEOKokx9MQoiED6xDFWTbZk2RfTP6df8y8aFPE9tEqRcFOdRYySTAg3foStYdwi8QAxqg9nJRPBWCXZAlYBD7KVOKWIErwvNJKgEK2gqli05Y0q4rEhP8B2FIw2koAxkF5FvD3FVxXZAI/vowRDnOO6Yj2D+Ui2/XnRBsOtlUrZGJ12aumVYSyTbEnVhT0Gv8bGbaHjEk0WqZiP/bmkB9c+8QEOZecxgrPUwE/A6JglWHmpGMsgOxCKu05J1jUp4Xx2VM7/NiAblBYg6br3SYhYLFuMklx1UKQSjOOSrQKcL+uy7bOqIJhsV4jlGje3zNCfr7ippBPTxhhcu93YWDIK2YFQ3E1qyaFfDufHqtAQD1ueXsT5ujh0fWUO4sy/DhhtbPbnYhsL47BkqwBaWmrJKtmKTJpkilwh9ue6Vvu4cQc44nHwdS/bumC0TmNjHIZslZkPGNSBZKq0JDa8cYMyAaOvV+vSkJcAtus0MsZBZDvN0VWGEbb9R91IpkorJMxdHcTp+vqJeCTWhuwsImCsO85hMKb4B5Ad21c4FXB0qNRdAVkts3raSB3EBfLCtc4trjrexMlZxzgw/mAf10ue5w/FbuWP+w6EYt89D8ysV0YDztvHjj3S7FKGDRPC+XBdI/kAe3d2uUeMt08AY4iDd6nCWKfaXim+u43xjn65FbHUBmPdV/FH+K+D9CPITEspS3Cf6as/CS9LxMQSqJiG2voivfjXOdbK+0nAeBIYj+TGkjG68DJxTX1ZEjD6jfOA0fFGXylCdsiAT080j4uS1tmc0CMMzDw82GdbNtl6H9aD8aG87MsLuvFhxE+IvJofU/9VHioRY6lki5HGyIfyFtOGXBjjMGQHpe3EVwTPobBXcEKLHGQ14blu27LIVgH8kUJyI7W5kH1XwYwjYoTwFCNeYlyMpZAdMPJFiRQjL5mGk1HIDiUchUJORCHHcsJ8RiF9XLJVwAIkX4tFXkl7vy1UrqTtkW2MP0N+Y2Aci2wxNtsYrxgH4zhkq09d3SHtr/zvx77hy2FkHLKZVjH4Wmx9kAI3kcroVrrVXYwHtzHuz/4IGEcmm2ljvKWNcTNlO8ceWcYle3vBzeapGPfJzFgZjKQCkQNlWLLb9XWRQOufoffygSWUeUOzeQrlngLG3cjWuhTEOBTZASNz/hTjZWVBKI/srEZ74PY2MEA6ETW4wC7MXXvVdxiytWQV8CVIvpoMeQM1Fdm9jfG1xTEWJpspqG/VUozXgO6BMhGWTbZ1M8/dUMibaPgb2O/neoqQbX7+ZMNVjAr+in0HJkUsitsqE+u0axvj8ewPwDiQbPPjQz/Jl8F4Cfu+Ri0do4VUKUfT151Fte3P7fs6AXDcc57twIT+Me2XL2L/dlIdxV8mOruN0YhkF4w9yZ4oxqrJlhy/VXRstNg4GZs/hmNjukEh3ci2TmuJat2KO7sCm/kmx8MOinhkohIwGngSo3PfHMYVZOcxXtnGWGYgpyv4SZAdCjZg8dPtgIUvJ5yqdZJtS+drDsknUMA32LdhzJKI8ZVgJA6RvoBpY1xGthiZORD4yYIiE8M4SbIDaX5o7lws4GWcIGacunEDF3yJKflvqP4o+1rGLMtOYPw1ML4cEASeEkfvvl8Qo4Gfj6X7/DdJmQbZAZ9BmRNQxOsIwX4XBVzBhR+Ei6tkG4IyjNxb38fqxwqKzLpOHLTtRRrwqnWmYYpx71WOcaYJmld+roG5BuYamGtgroG5BuYamJoG/h/ff6XOIB4wOAAAAABJRU5ErkJggg==\"/>\n\t<title>Swarm index of bzz:/262e5c08c03c2789b6daef487dfa14b4d132f5340d781a3ecb1d5122ab65640c/a/</title>\n</head>\n\n<body>\n <h1>Swarm index of bzz:/262e5c08c03c2789b6daef487dfa14b4d132f5340d781a3ecb1d5122ab65640c/a/</h1>\n <hr>\n <table>\n <thead>\n <tr>\n\t<th>Path</th>\n\t<th>Type</th>\n\t<th>Size</th>\n </tr>\n </thead>\n\n <tbody>\n \n\t<tr>\n\t <td><a href=\"b/\">b/</a></td>\n\t <td>DIR</td>\n\t <td>-</td>\n\t</tr>\n \n\n \n\t<tr>\n\t <td><a href=\"a\">a</a></td>\n\t <td></td>\n\t <td>0</td>\n\t</tr>\n \n </table>\n <hr>\n</body>\n",
},
{
path: "/a/b/",
json: `{"entries":[{"hash":"011b4d03dd8c01f1049143cf9c4c817e4b167f1d1b83e5c6f0f10d89ba1e7bce","path":"a/b/b","mod_time":"0001-01-01T00:00:00Z"},{"hash":"011b4d03dd8c01f1049143cf9c4c817e4b167f1d1b83e5c6f0f10d89ba1e7bce","path":"a/b/c","mod_time":"0001-01-01T00:00:00Z"}]}`,
- html: "<!DOCTYPE html>\n<html>\n<head>\n <meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\">\n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1\">\n <title>Swarm index of bzz:/262e5c08c03c2789b6daef487dfa14b4d132f5340d781a3ecb1d5122ab65640c/a/b/</title>\n</head>\n\n<body>\n <h1>Swarm index of bzz:/262e5c08c03c2789b6daef487dfa14b4d132f5340d781a3ecb1d5122ab65640c/a/b/</h1>\n <hr>\n <table>\n <thead>\n <tr>\n\t<th>Path</th>\n\t<th>Type</th>\n\t<th>Size</th>\n </tr>\n </thead>\n\n <tbody>\n \n\n \n\t<tr>\n\t <td><a href=\"b\">b</a></td>\n\t <td></td>\n\t <td>0</td>\n\t</tr>\n \n\t<tr>\n\t <td><a href=\"c\">c</a></td>\n\t <td></td>\n\t <td>0</td>\n\t</tr>\n \n </table>\n <hr>\n</body>\n",
+ html: "<!DOCTYPE html>\n<html>\n<head>\n <meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\">\n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1\">\n\t\t<link rel=\"shortcut icon\" type=\"image/x-icon\" href=\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAHsAAAB5CAYAAAAZD150AAAAAXNSR0IArs4c6QAAAAlwSFlzAAALEwAACxMBAJqcGAAAAVlpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IlhNUCBDb3JlIDUuNC4wIj4KICAgPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4KICAgICAgPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIKICAgICAgICAgICAgeG1sbnM6dGlmZj0iaHR0cDovL25zLmFkb2JlLmNvbS90aWZmLzEuMC8iPgogICAgICAgICA8dGlmZjpPcmllbnRhdGlvbj4xPC90aWZmOk9yaWVudGF0aW9uPgogICAgICA8L3JkZjpEZXNjcmlwdGlvbj4KICAgPC9yZGY6UkRGPgo8L3g6eG1wbWV0YT4KTMInWQAAFzFJREFUeAHtnXuwJFV9x7vnzrKwIC95w/ImYFkxlZgiGswCIiCGxPhHCEIwmIemUqYwJkYrJmXlUZRa+IpAWYYEk4qJD4gJGAmoiBIxMQFRQCkXdhdCeL+fYdk7nc+ne87dvnPn0TPTPdNzmd/W2X6fc76/7/n9zjm/PrcniuYy18BcA3MNzDUw18BcA3MNzDUw18BcA3MNrGYNLERroyMAeNQ0QcbTLPxFUfaOOx4Wbd12ThRHp0TJ4s5RFF8RtVoXg/2BSeOfk12lxtc0fjva1jibItZEDf4li3u3i3sqipOPRIvRp6ssvjPvhc4Tq/y4Ab71pEXS1oqw7hw1mz8XJc2LoiQ6iTJaaTkxth0l69jXwNayOZ10AvubSQ+RrFOl8mKy7EOiRuN3UfjrUfItUaN1SbQt+lqp2m02T4payS9HSfxK8k1IGdEWkln2Xuzldd7keBunrsO1f4r9b5Mqk3zBlRUy9Ywb0TtQ6O9Tjx1JkvA8SSXfiJK5Fj1MGkf2jBYWzo9a8Y+TiQRaxnLpTnb+HuvzDerzLk4+mb9Q1v5qduPrUPvxURx/DiWeicK2W1lKdCT2Q7n+VqzuOei5m+Nnh1TsPtGahbOiaOFCrHn/vs8ud+PdbrWLOZr6vIX6PEF97uH4uW43jnpudVp2Mzo1ajV+AwPbgGJUYr4/1Or+jxSwu90R5f6QQdPnaBK603zD4LCLNJsnkusfcUXXrKfoL4MtOzxvfXegPrdGjeRjlHEVx4PrE57us11tlr0b/fKlKIq+OTqsjXulS80sO5Dtbdugfnf4Py5qxGenxEeppbez6LJpte6LFuIWlvhT3J/Pq8vNnBps2eE562t97A608G+xnZMdtMN2H+z3LSj+n9g3cNGN4Nzt9tddCZK0ncnnrChuHBElySaOH8k/mNtf5Pr3onWtL0WthfWUuA/XGGX3KHsw2ZYtqc9Rs4dIW2m4mxjwXd8+z2Y8mXXLXoDk38Eaz4c7+2VJzLvsXtrpRXa43+svh/TTIf1QSP0Rx0+Ei8u2L0RPM6i6hn77e9SFhsJzGWnLG9wgshvpeOEJnn+6nX+D8u+ck51p43hIvhyS38DhHiQJKiqDyDYfG42WeiAk3oO93uLJ3tK6P0pa1zK/vj5aSI6FtF15Zjvhvcle5F69xzMkywxdAk+US7aDgVmSbMTaaFwM0VdS8QNIQTll4TA/SdKd3kvS0oqWgRveeku0uO2NjM4v5TmDJda52/MSqyXfy3YrqXJxEDArcjB92Dvh4RdIjoC1hLJFUp6BAPMOBHQjalC5SdR6wfj3FVFjDV1Bcg698S7ka172y0+3y5DwiRncxAoC1OjSiN6LJTNQSd5MJruQnDqVLbrTB0mPkXEgetwy7oH0T2LppzOrv4bMnif/B0hPsS/pozSkketUZ8s2KPIq3OH5ONWjQVgFwSpO69KajVpVpfzHom3b3hctNHajwR5OOUbyLGt7n85B1VJPspvRydFi/Hba/gYUoFLKJlqPpmXx9ikdBRcZsHH7WLIjZT1F472TXHaFZ6dqO5Gsx0SkbmTvS7/8caYbG1CMJGt1Vciz5O5UKuRflUV31t1yJPdx2jDlp2MPw6wTsfA6ke1U47Pg1mVXqfwXyP1Bypg2dgiOXcDwKJgPZPsSknGPyoiv0wAtZlz6CERsAXB4v1tV/apsTFS/sIgP7xLfTdrMPhbfc6pWONNeN1alzF7lDTqfuTkHS3F0FzcbbHixCG/cUtLvAHBZs4Fluqsb2fnK+XpBS6fVp4Mo+7q6WGS+nmXvE8SJf0imDxKGlfTS3Pq0+61BipJc+9j7gOzI1bCoS3tKUwB51U3st9cwar+Kgeq/sR8GkWPXs+5kbwcYpy/yXWQg2U5brPtqI905yEPRttbfgm0jqVSZHbID7CyUeRc078kpo2lrSFWSbt6lWRd5dYreizdcLI1K4m9F2xa/zHEleGaP7Kzftj933ZgDuZcwc9W9q7QylWRevLeKd+d/lgMnt3I84M0XdwwnNlQCRvHVkOwiBaZh1ckskh20kfXnCQpKI1Opa9fFlxWRWsvSpoPIex1pbxIWxyvVVuv9lGFAZlxp0ohc8PgZMqpkgWFnBWeZ7DwWB3H/g/IMTGjlvoceRbDmBuu/kj2waBf0x9o2/ysvYN1n8ELmtbzFugDHrrsddlWqgy/z3Bw1WeGyNbrNjCclFl4XIagSn0Fl7IdHEQc3W0nh9aQvG2zMna7dt1sheBHK8R5IiPdlcwBbYtc5iZdWwNh3MyuITyFtIN7F0qTCrp0GGB8TLbQuixaTf+HJ+3MlTGQ3a7MTKWpgIQ0W8H2Bu1B4KaIFaeW7k/I4bRBbOBcauv0yHiE+iPvDOS7nJE7oV00rRFd8O57gPK78YMXV5Sesg3GNKgd7y0vsOKpzUKWjqkMfZoM43Xu20CGz3nw2TuOSxqGQfVhPovP3r9x3FejReKSv8gLnQ1w+ZuUtS2csf2pEW4vuLXmpfhPdGdeNd6us1sRy33RlyHPs69bXsIjwWQjGktN+WXff6eo5lZPMjfda4xZI/Ele5JxK3jSc5Cae7uYJcplOfjfv3iZf+vISy3bjy3PffkQ/2zgaeosP4nq78e25bt9Tp48S/TqZba1i+6tlNL5d1Sv3Mut23VcUP8hlRu4t57YOxPbhWNL7W/bKPHud0fpd9eJfiNSui1ztZKtw5rCxUyTduKTuQMK1J3eRHoBwR+Au8ldGIT00JgM8z5KH/XIt9VrLSqn1EgSlx/eQj4v7guS7LfdZAJjcDcf3YYjHQLVTrmEJd9WLixTDc/kyQrm12K42slU0wQ8jXLELIIq+FtW138zgah8oO5DkXN+8AoHsLhPPO4WzITkQqy3B+VqvFrKDsiE4NmDie2DPhfN5zL32mZm0HuaJx0l7ECo9hBs7FwSanyQb3rRfLtqYuHX6slrIJmoW/y/qDNOjYUjOs+Bz9rkP82UG30IdSjogd4MNgYHekoxazlIGk9yZdbIZdMX2l1pz+YqPkzvo0/kbrnivNskzZcmdDWkWyXaEjZtOp1H2mVpi+URnmlI//M1XYhRuDclpmttefTmX6iuzSDZz5XTwFbRaFdGd+dtFmCTbvrzqcimiXJkFslWq1mu/bFDEgdE0Fe1oX9K1cufs06wLxReXupPdLShSB+Xqxp1yOeqX8MHxdW6attSZbIMi9pX2y4HgsJ223kL5DtgC6evYr9OLpVDHpW3dyNaS80ERLahuBC8pL7cj6TZKrVz3rl5rV+86kU28Or6Tce56FOUaL5VVO4VRp15iXXXriqtlbLS1knq5nVZyDV8KJDqVuAhAK69CxGw4tIrpkx+9uZFlR39A/s7/ayV1tZyXQvovwcdPoC0XEYbIWBnKY+qU8KYrDXWWkZ+NR690G/H1L2DP3ykj0yryqCvZAavfUTkNZ34cJ3SRZVhjmWQzKIs3QvKlNMfvUj8Ha7WVupMdFHdU1Gy8DapdQDgu4WWRjbdJ/oEIwGXUyThA7aUOfbarPwdZhMt8voJr541T+rUCnxmV9HH6bAe0z7DG7N9x3H9BDXTZReqxJ/e5eGKqMk2y/bq+X0F6P256X1T2X2iiv4W0WndA+k2QzouP5Cjux0oLKTuv5FHIdrC4EyRfB8kfpcR/5dgR9yDxC4zvaWPcv42xzPHHoPKXXZ+OG1+ITuNN0gXUZD+SS3lcAcr3OpN30/ddx3ERa/GjtOfy3CvS5/mvoIzgxvkcxkLrI4wa/HuvYpJ+GTn+MDeLUc/lHyeI8b1g/BrHRTByW3kyabJfA0HvAOepQHDAZTDClm7SehzwXM3fP13E/rdJReRl5MmH5ZbWbJtnPylCtnrBZcd3UcVroOlyjgflG8rky8bpLxbkMeqxnHdnHiJinXmrdSHHN4SHJrGdFNmHo4A/h+RXA8p1XvmAQyA74NXNYu3xDSiEZ9Lf0AjXem13IHZ1DKtLzsBeDuYmX5b0kkFk2zXwfPIZSP4q+w/3yqjjvBj/lOd+lvNiFFeQQHY4Dhj/A4x/xslN4UKV26rJXktbfifEkbA9NNEFTCfZ+Vt4JrkYm9LSi/SRMf35aTxzOvdLWjfpRzZ/MpT8J+V9nAcZFxSSHcB4HvB+j7u13G4YO8nOZyzGT1LmJzhZBGP+2aH2bWFVyDpeCZxJoOFvUMIbKSBvyZ3l6R5NPST268M/jxr96sIWbgohye73J8lGCLsBK/Odc7d14WLehRRI8djI1818uf9CrPmzHA+aHXALXc5C9Cs8dwkY38RxP4yW1Q/jBjCeXhijpY8g5Vv2QgS58btR5eHUR0X2AZnWuJ9lB0iZxSTRj/i884dxkFeHCwO2B0H6L+JT7D7CGCFv2by4iLew3uxTUHUL9xQhWVR4jvgPwXgkzxTB2M+yA4SAcSMYLxgCY3h+4LYssq3oelr5BSj2BPbz/dWgShQhO58HFht/k77uPZzcQgoWmr+nc//HCMqcy517cWEtj2jxkJ98Hkv+R/YHNUjzU1fraTwf4rmT2B8GYxGyLSMIf3AYXY+HEuNmUpH6hWd7bssg24HJuSjgNynFl/j9BkfdKjIs2eZhf4wVJn+HGiRroycHyAI/rgZJrdfQUB7jub/nfteWF5FD2xh/i5vtHobFOCzZ1mkUjD7XU8YjuxH9CQ3+zeSuxdj6ilhZZ2VGIds8rPsC/7P6M+HzF9EHOS5S/s7cZzSrmLU0/IUffiQm4lMbo2MchWyKW4bxi9T4A+06eG1oGYXsHZiBnsgPlv0lpRkGHMaddavgqGTn86LvxUobvFrcFn2dfZU7jqwB4/FgdIRsQx4X46hk5zEEjAaeruXC0BiHI7tJf9xqvB0Dej2FhQFPvkKj7JdBtuU6bqA/jr/CgOuvUch1nhxaMoxvA+NpPFsWxjLIFop8OWbhj//Tn5e0YReWomTzoyiNC1CAo1qiXMO3qj41KovsUISjY+ar8U30zX/M/uZwYcB2PzAS3kyDIrr6oS2nT/5lkR2KCBhvBuP7OLkpXOi39aF+sgv28i5G2Q6C1pO8v0i/2C/Pzmv2ncX6z84nux9bP0KdRNLi+NexhRY1vpXjXvNgMZ7HvZ9Pn8meLRuj+VWB0RnQW8Ho92O+3wcjlzK3kO50+Y8fMIk/TTYncc3RZ9kKCEWWbdkhX7d6Lqcx32EQ5yDrCU/mZB3WfAnQ3sA559hVYSzbsnMQUoy8keObaklyJheezF/M79vP9ZImLd7R7kZueJrU795eeUzzvET7c02bqLnTGLufTmlynegZwZpsdeisYtycctUd4xJm3V0vCa7HD8r5VWBbjNOPfs/0ymvS531l+hiFOsWSdLH0slrdqxiZwqXLgWcHo/H77GsPASNQektR4uwVGPTwNipJvyvmWx2nAr0U2LvE6q4I2H7ZLyG4htu6ea6orHqMRclWYZnisq8D8is26apP56BKHUgPrTwMhIYhOkOxyjEOQ3ZQiFtXXegmn4TmfdkaQhxFuTw2tvglhEfJpcypkpUSo685/Vnj/djOPMZRyVYZkmvfeC/KcF66G8ntJKzcsh09+xkqlzVV2dD8UmLAuDtlBdKrxlk6xnHIBncq9nUq3PfNKsI3SuZbhTJUgJ+Q1OLCdLBKoikmlU6MDuIc4c8UxjLIztQh8Iz0zewZM9cKypzK2BdryT3nkaEiFW4Dxi2ziLFMsoOOtQL70KdRiIS7KmTUyJtWK8mOsJ3rG6suswGR3UiSx2j35Z8olYXRGUUl3qoKstWelXXu+hDbxyH9pWzzS4E4HCiSGn5G0SibUgeis5psx/gwJxzE6c2ckobZQLiv39YGorcyshcwVkK0laiKbPMOEoIyRrBCfx6u9do6ElaJDsIqA9+r8BHOZ0GZjHRnJ0X0OnGMRSo1AvYVj4QBzl1YgG7PxOvIZVaQeYOspRvA0UJmgWiqmYp1NTxbW4yTIlttZMRlLsv+XLduUCYQ6odeJTnMl8N5Ts2MDML4BBiduUwF4yTJzjOmC7OfykjPwpsqYBYJzuPK79cO4zTIDoQyuuZ7ZrEx9sRRu7H2adQnT1BZ+3mMfg+VgVjiAG6qGCetXJXA9Cm+n63ujBFo4opUpxuORh2dGpgZdRrDo1OXLhjTvysXo8lgjJgnjnGSZBttYoTd8+uEXpdw31ipDK0gWAi7MyEOKokx9MQoiED6xDFWTbZk2RfTP6df8y8aFPE9tEqRcFOdRYySTAg3foStYdwi8QAxqg9nJRPBWCXZAlYBD7KVOKWIErwvNJKgEK2gqli05Y0q4rEhP8B2FIw2koAxkF5FvD3FVxXZAI/vowRDnOO6Yj2D+Ui2/XnRBsOtlUrZGJ12aumVYSyTbEnVhT0Gv8bGbaHjEk0WqZiP/bmkB9c+8QEOZecxgrPUwE/A6JglWHmpGMsgOxCKu05J1jUp4Xx2VM7/NiAblBYg6br3SYhYLFuMklx1UKQSjOOSrQKcL+uy7bOqIJhsV4jlGje3zNCfr7ippBPTxhhcu93YWDIK2YFQ3E1qyaFfDufHqtAQD1ueXsT5ujh0fWUO4sy/DhhtbPbnYhsL47BkqwBaWmrJKtmKTJpkilwh9ue6Vvu4cQc44nHwdS/bumC0TmNjHIZslZkPGNSBZKq0JDa8cYMyAaOvV+vSkJcAtus0MsZBZDvN0VWGEbb9R91IpkorJMxdHcTp+vqJeCTWhuwsImCsO85hMKb4B5Ad21c4FXB0qNRdAVkts3raSB3EBfLCtc4trjrexMlZxzgw/mAf10ue5w/FbuWP+w6EYt89D8ysV0YDztvHjj3S7FKGDRPC+XBdI/kAe3d2uUeMt08AY4iDd6nCWKfaXim+u43xjn65FbHUBmPdV/FH+K+D9CPITEspS3Cf6as/CS9LxMQSqJiG2voivfjXOdbK+0nAeBIYj+TGkjG68DJxTX1ZEjD6jfOA0fFGXylCdsiAT080j4uS1tmc0CMMzDw82GdbNtl6H9aD8aG87MsLuvFhxE+IvJofU/9VHioRY6lki5HGyIfyFtOGXBjjMGQHpe3EVwTPobBXcEKLHGQ14blu27LIVgH8kUJyI7W5kH1XwYwjYoTwFCNeYlyMpZAdMPJFiRQjL5mGk1HIDiUchUJORCHHcsJ8RiF9XLJVwAIkX4tFXkl7vy1UrqTtkW2MP0N+Y2Aci2wxNtsYrxgH4zhkq09d3SHtr/zvx77hy2FkHLKZVjH4Wmx9kAI3kcroVrrVXYwHtzHuz/4IGEcmm2ljvKWNcTNlO8ceWcYle3vBzeapGPfJzFgZjKQCkQNlWLLb9XWRQOufoffygSWUeUOzeQrlngLG3cjWuhTEOBTZASNz/hTjZWVBKI/srEZ74PY2MEA6ETW4wC7MXXvVdxiytWQV8CVIvpoMeQM1Fdm9jfG1xTEWJpspqG/VUozXgO6BMhGWTbZ1M8/dUMibaPgb2O/neoqQbX7+ZMNVjAr+in0HJkUsitsqE+u0axvj8ewPwDiQbPPjQz/Jl8F4Cfu+Ri0do4VUKUfT151Fte3P7fs6AXDcc57twIT+Me2XL2L/dlIdxV8mOruN0YhkF4w9yZ4oxqrJlhy/VXRstNg4GZs/hmNjukEh3ci2TmuJat2KO7sCm/kmx8MOinhkohIwGngSo3PfHMYVZOcxXtnGWGYgpyv4SZAdCjZg8dPtgIUvJ5yqdZJtS+drDsknUMA32LdhzJKI8ZVgJA6RvoBpY1xGthiZORD4yYIiE8M4SbIDaX5o7lws4GWcIGacunEDF3yJKflvqP4o+1rGLMtOYPw1ML4cEASeEkfvvl8Qo4Gfj6X7/DdJmQbZAZ9BmRNQxOsIwX4XBVzBhR+Ei6tkG4IyjNxb38fqxwqKzLpOHLTtRRrwqnWmYYpx71WOcaYJmld+roG5BuYamGtgroG5BuYamJoG/h/ff6XOIB4wOAAAAABJRU5ErkJggg==\"/>\n\t<title>Swarm index of bzz:/262e5c08c03c2789b6daef487dfa14b4d132f5340d781a3ecb1d5122ab65640c/a/b/</title>\n</head>\n\n<body>\n <h1>Swarm index of bzz:/262e5c08c03c2789b6daef487dfa14b4d132f5340d781a3ecb1d5122ab65640c/a/b/</h1>\n <hr>\n <table>\n <thead>\n <tr>\n\t<th>Path</th>\n\t<th>Type</th>\n\t<th>Size</th>\n </tr>\n </thead>\n\n <tbody>\n \n\n \n\t<tr>\n\t <td><a href=\"b\">b</a></td>\n\t <td></td>\n\t <td>0</td>\n\t</tr>\n \n\t<tr>\n\t <td><a href=\"c\">c</a></td>\n\t <td></td>\n\t <td>0</td>\n\t</tr>\n \n </table>\n <hr>\n</body>\n",
},
{
path: "/x",
diff --git a/swarm/api/http/templates.go b/swarm/api/http/templates.go
index 53ce7b5a2..cd9d21289 100644
--- a/swarm/api/http/templates.go
+++ b/swarm/api/http/templates.go
@@ -34,7 +34,8 @@ var htmlListTemplate = template.Must(template.New("html-list").Funcs(template.Fu
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
- <title>Swarm index of {{ .URI }}</title>
+ <link rel="shortcut icon" type="image/x-icon" href="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAHsAAAB5CAYAAAAZD150AAAAAXNSR0IArs4c6QAAAAlwSFlzAAALEwAACxMBAJqcGAAAAVlpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IlhNUCBDb3JlIDUuNC4wIj4KICAgPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4KICAgICAgPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIKICAgICAgICAgICAgeG1sbnM6dGlmZj0iaHR0cDovL25zLmFkb2JlLmNvbS90aWZmLzEuMC8iPgogICAgICAgICA8dGlmZjpPcmllbnRhdGlvbj4xPC90aWZmOk9yaWVudGF0aW9uPgogICAgICA8L3JkZjpEZXNjcmlwdGlvbj4KICAgPC9yZGY6UkRGPgo8L3g6eG1wbWV0YT4KTMInWQAAFzFJREFUeAHtnXuwJFV9x7vnzrKwIC95w/ImYFkxlZgiGswCIiCGxPhHCEIwmIemUqYwJkYrJmXlUZRa+IpAWYYEk4qJD4gJGAmoiBIxMQFRQCkXdhdCeL+fYdk7nc+ne87dvnPn0TPTPdNzmd/W2X6fc76/7/n9zjm/PrcniuYy18BcA3MNzDUw18BcA3MNzDUw18BcA3MNrGYNLERroyMAeNQ0QcbTLPxFUfaOOx4Wbd12ThRHp0TJ4s5RFF8RtVoXg/2BSeOfk12lxtc0fjva1jibItZEDf4li3u3i3sqipOPRIvRp6ssvjPvhc4Tq/y4Ab71pEXS1oqw7hw1mz8XJc2LoiQ6iTJaaTkxth0l69jXwNayOZ10AvubSQ+RrFOl8mKy7EOiRuN3UfjrUfItUaN1SbQt+lqp2m02T4payS9HSfxK8k1IGdEWkln2Xuzldd7keBunrsO1f4r9b5Mqk3zBlRUy9Ywb0TtQ6O9Tjx1JkvA8SSXfiJK5Fj1MGkf2jBYWzo9a8Y+TiQRaxnLpTnb+HuvzDerzLk4+mb9Q1v5qduPrUPvxURx/DiWeicK2W1lKdCT2Q7n+VqzuOei5m+Nnh1TsPtGahbOiaOFCrHn/vs8ud+PdbrWLOZr6vIX6PEF97uH4uW43jnpudVp2Mzo1ajV+AwPbgGJUYr4/1Or+jxSwu90R5f6QQdPnaBK603zD4LCLNJsnkusfcUXXrKfoL4MtOzxvfXegPrdGjeRjlHEVx4PrE57us11tlr0b/fKlKIq+OTqsjXulS80sO5Dtbdugfnf4Py5qxGenxEeppbez6LJpte6LFuIWlvhT3J/Pq8vNnBps2eE562t97A608G+xnZMdtMN2H+z3LSj+n9g3cNGN4Nzt9tddCZK0ncnnrChuHBElySaOH8k/mNtf5Pr3onWtL0WthfWUuA/XGGX3KHsw2ZYtqc9Rs4dIW2m4mxjwXd8+z2Y8mXXLXoDk38Eaz4c7+2VJzLvsXtrpRXa43+svh/TTIf1QSP0Rx0+Ei8u2L0RPM6i6hn77e9SFhsJzGWnLG9wgshvpeOEJnn+6nX+D8u+ck51p43hIvhyS38DhHiQJKiqDyDYfG42WeiAk3oO93uLJ3tK6P0pa1zK/vj5aSI6FtF15Zjvhvcle5F69xzMkywxdAk+US7aDgVmSbMTaaFwM0VdS8QNIQTll4TA/SdKd3kvS0oqWgRveeku0uO2NjM4v5TmDJda52/MSqyXfy3YrqXJxEDArcjB92Dvh4RdIjoC1hLJFUp6BAPMOBHQjalC5SdR6wfj3FVFjDV1Bcg698S7ka172y0+3y5DwiRncxAoC1OjSiN6LJTNQSd5MJruQnDqVLbrTB0mPkXEgetwy7oH0T2LppzOrv4bMnif/B0hPsS/pozSkketUZ8s2KPIq3OH5ONWjQVgFwSpO69KajVpVpfzHom3b3hctNHajwR5OOUbyLGt7n85B1VJPspvRydFi/Hba/gYUoFLKJlqPpmXx9ikdBRcZsHH7WLIjZT1F472TXHaFZ6dqO5Gsx0SkbmTvS7/8caYbG1CMJGt1Vciz5O5UKuRflUV31t1yJPdx2jDlp2MPw6wTsfA6ke1U47Pg1mVXqfwXyP1Bypg2dgiOXcDwKJgPZPsSknGPyoiv0wAtZlz6CERsAXB4v1tV/apsTFS/sIgP7xLfTdrMPhbfc6pWONNeN1alzF7lDTqfuTkHS3F0FzcbbHixCG/cUtLvAHBZs4Fluqsb2fnK+XpBS6fVp4Mo+7q6WGS+nmXvE8SJf0imDxKGlfTS3Pq0+61BipJc+9j7gOzI1bCoS3tKUwB51U3st9cwar+Kgeq/sR8GkWPXs+5kbwcYpy/yXWQg2U5brPtqI905yEPRttbfgm0jqVSZHbID7CyUeRc078kpo2lrSFWSbt6lWRd5dYreizdcLI1K4m9F2xa/zHEleGaP7Kzftj933ZgDuZcwc9W9q7QylWRevLeKd+d/lgMnt3I84M0XdwwnNlQCRvHVkOwiBaZh1ckskh20kfXnCQpKI1Opa9fFlxWRWsvSpoPIex1pbxIWxyvVVuv9lGFAZlxp0ohc8PgZMqpkgWFnBWeZ7DwWB3H/g/IMTGjlvoceRbDmBuu/kj2waBf0x9o2/ysvYN1n8ELmtbzFugDHrrsddlWqgy/z3Bw1WeGyNbrNjCclFl4XIagSn0Fl7IdHEQc3W0nh9aQvG2zMna7dt1sheBHK8R5IiPdlcwBbYtc5iZdWwNh3MyuITyFtIN7F0qTCrp0GGB8TLbQuixaTf+HJ+3MlTGQ3a7MTKWpgIQ0W8H2Bu1B4KaIFaeW7k/I4bRBbOBcauv0yHiE+iPvDOS7nJE7oV00rRFd8O57gPK78YMXV5Sesg3GNKgd7y0vsOKpzUKWjqkMfZoM43Xu20CGz3nw2TuOSxqGQfVhPovP3r9x3FejReKSv8gLnQ1w+ZuUtS2csf2pEW4vuLXmpfhPdGdeNd6us1sRy33RlyHPs69bXsIjwWQjGktN+WXff6eo5lZPMjfda4xZI/Ele5JxK3jSc5Cae7uYJcplOfjfv3iZf+vISy3bjy3PffkQ/2zgaeosP4nq78e25bt9Tp48S/TqZba1i+6tlNL5d1Sv3Mut23VcUP8hlRu4t57YOxPbhWNL7W/bKPHud0fpd9eJfiNSui1ztZKtw5rCxUyTduKTuQMK1J3eRHoBwR+Au8ldGIT00JgM8z5KH/XIt9VrLSqn1EgSlx/eQj4v7guS7LfdZAJjcDcf3YYjHQLVTrmEJd9WLixTDc/kyQrm12K42slU0wQ8jXLELIIq+FtW138zgah8oO5DkXN+8AoHsLhPPO4WzITkQqy3B+VqvFrKDsiE4NmDie2DPhfN5zL32mZm0HuaJx0l7ECo9hBs7FwSanyQb3rRfLtqYuHX6slrIJmoW/y/qDNOjYUjOs+Bz9rkP82UG30IdSjogd4MNgYHekoxazlIGk9yZdbIZdMX2l1pz+YqPkzvo0/kbrnivNskzZcmdDWkWyXaEjZtOp1H2mVpi+URnmlI//M1XYhRuDclpmttefTmX6iuzSDZz5XTwFbRaFdGd+dtFmCTbvrzqcimiXJkFslWq1mu/bFDEgdE0Fe1oX9K1cufs06wLxReXupPdLShSB+Xqxp1yOeqX8MHxdW6attSZbIMi9pX2y4HgsJ223kL5DtgC6evYr9OLpVDHpW3dyNaS80ERLahuBC8pL7cj6TZKrVz3rl5rV+86kU28Or6Tce56FOUaL5VVO4VRp15iXXXriqtlbLS1knq5nVZyDV8KJDqVuAhAK69CxGw4tIrpkx+9uZFlR39A/s7/ayV1tZyXQvovwcdPoC0XEYbIWBnKY+qU8KYrDXWWkZ+NR690G/H1L2DP3ykj0yryqCvZAavfUTkNZ34cJ3SRZVhjmWQzKIs3QvKlNMfvUj8Ha7WVupMdFHdU1Gy8DapdQDgu4WWRjbdJ/oEIwGXUyThA7aUOfbarPwdZhMt8voJr541T+rUCnxmV9HH6bAe0z7DG7N9x3H9BDXTZReqxJ/e5eGKqMk2y/bq+X0F6P256X1T2X2iiv4W0WndA+k2QzouP5Cjux0oLKTuv5FHIdrC4EyRfB8kfpcR/5dgR9yDxC4zvaWPcv42xzPHHoPKXXZ+OG1+ITuNN0gXUZD+SS3lcAcr3OpN30/ddx3ERa/GjtOfy3CvS5/mvoIzgxvkcxkLrI4wa/HuvYpJ+GTn+MDeLUc/lHyeI8b1g/BrHRTByW3kyabJfA0HvAOepQHDAZTDClm7SehzwXM3fP13E/rdJReRl5MmH5ZbWbJtnPylCtnrBZcd3UcVroOlyjgflG8rky8bpLxbkMeqxnHdnHiJinXmrdSHHN4SHJrGdFNmHo4A/h+RXA8p1XvmAQyA74NXNYu3xDSiEZ9Lf0AjXem13IHZ1DKtLzsBeDuYmX5b0kkFk2zXwfPIZSP4q+w/3yqjjvBj/lOd+lvNiFFeQQHY4Dhj/A4x/xslN4UKV26rJXktbfifEkbA9NNEFTCfZ+Vt4JrkYm9LSi/SRMf35aTxzOvdLWjfpRzZ/MpT8J+V9nAcZFxSSHcB4HvB+j7u13G4YO8nOZyzGT1LmJzhZBGP+2aH2bWFVyDpeCZxJoOFvUMIbKSBvyZ3l6R5NPST268M/jxr96sIWbgohye73J8lGCLsBK/Odc7d14WLehRRI8djI1818uf9CrPmzHA+aHXALXc5C9Cs8dwkY38RxP4yW1Q/jBjCeXhijpY8g5Vv2QgS58btR5eHUR0X2AZnWuJ9lB0iZxSTRj/i884dxkFeHCwO2B0H6L+JT7D7CGCFv2by4iLew3uxTUHUL9xQhWVR4jvgPwXgkzxTB2M+yA4SAcSMYLxgCY3h+4LYssq3oelr5BSj2BPbz/dWgShQhO58HFht/k77uPZzcQgoWmr+nc//HCMqcy517cWEtj2jxkJ98Hkv+R/YHNUjzU1fraTwf4rmT2B8GYxGyLSMIf3AYXY+HEuNmUpH6hWd7bssg24HJuSjgNynFl/j9BkfdKjIs2eZhf4wVJn+HGiRroycHyAI/rgZJrdfQUB7jub/nfteWF5FD2xh/i5vtHobFOCzZ1mkUjD7XU8YjuxH9CQ3+zeSuxdj6ilhZZ2VGIds8rPsC/7P6M+HzF9EHOS5S/s7cZzSrmLU0/IUffiQm4lMbo2MchWyKW4bxi9T4A+06eG1oGYXsHZiBnsgPlv0lpRkGHMaddavgqGTn86LvxUobvFrcFn2dfZU7jqwB4/FgdIRsQx4X46hk5zEEjAaeruXC0BiHI7tJf9xqvB0Dej2FhQFPvkKj7JdBtuU6bqA/jr/CgOuvUch1nhxaMoxvA+NpPFsWxjLIFop8OWbhj//Tn5e0YReWomTzoyiNC1CAo1qiXMO3qj41KovsUISjY+ar8U30zX/M/uZwYcB2PzAS3kyDIrr6oS2nT/5lkR2KCBhvBuP7OLkpXOi39aF+sgv28i5G2Q6C1pO8v0i/2C/Pzmv2ncX6z84nux9bP0KdRNLi+NexhRY1vpXjXvNgMZ7HvZ9Pn8meLRuj+VWB0RnQW8Ho92O+3wcjlzK3kO50+Y8fMIk/TTYncc3RZ9kKCEWWbdkhX7d6Lqcx32EQ5yDrCU/mZB3WfAnQ3sA559hVYSzbsnMQUoy8keObaklyJheezF/M79vP9ZImLd7R7kZueJrU795eeUzzvET7c02bqLnTGLufTmlynegZwZpsdeisYtycctUd4xJm3V0vCa7HD8r5VWBbjNOPfs/0ymvS531l+hiFOsWSdLH0slrdqxiZwqXLgWcHo/H77GsPASNQektR4uwVGPTwNipJvyvmWx2nAr0U2LvE6q4I2H7ZLyG4htu6ea6orHqMRclWYZnisq8D8is26apP56BKHUgPrTwMhIYhOkOxyjEOQ3ZQiFtXXegmn4TmfdkaQhxFuTw2tvglhEfJpcypkpUSo685/Vnj/djOPMZRyVYZkmvfeC/KcF66G8ntJKzcsh09+xkqlzVV2dD8UmLAuDtlBdKrxlk6xnHIBncq9nUq3PfNKsI3SuZbhTJUgJ+Q1OLCdLBKoikmlU6MDuIc4c8UxjLIztQh8Iz0zewZM9cKypzK2BdryT3nkaEiFW4Dxi2ziLFMsoOOtQL70KdRiIS7KmTUyJtWK8mOsJ3rG6suswGR3UiSx2j35Z8olYXRGUUl3qoKstWelXXu+hDbxyH9pWzzS4E4HCiSGn5G0SibUgeis5psx/gwJxzE6c2ckobZQLiv39YGorcyshcwVkK0laiKbPMOEoIyRrBCfx6u9do6ElaJDsIqA9+r8BHOZ0GZjHRnJ0X0OnGMRSo1AvYVj4QBzl1YgG7PxOvIZVaQeYOspRvA0UJmgWiqmYp1NTxbW4yTIlttZMRlLsv+XLduUCYQ6odeJTnMl8N5Ts2MDML4BBiduUwF4yTJzjOmC7OfykjPwpsqYBYJzuPK79cO4zTIDoQyuuZ7ZrEx9sRRu7H2adQnT1BZ+3mMfg+VgVjiAG6qGCetXJXA9Cm+n63ujBFo4opUpxuORh2dGpgZdRrDo1OXLhjTvysXo8lgjJgnjnGSZBttYoTd8+uEXpdw31ipDK0gWAi7MyEOKokx9MQoiED6xDFWTbZk2RfTP6df8y8aFPE9tEqRcFOdRYySTAg3foStYdwi8QAxqg9nJRPBWCXZAlYBD7KVOKWIErwvNJKgEK2gqli05Y0q4rEhP8B2FIw2koAxkF5FvD3FVxXZAI/vowRDnOO6Yj2D+Ui2/XnRBsOtlUrZGJ12aumVYSyTbEnVhT0Gv8bGbaHjEk0WqZiP/bmkB9c+8QEOZecxgrPUwE/A6JglWHmpGMsgOxCKu05J1jUp4Xx2VM7/NiAblBYg6br3SYhYLFuMklx1UKQSjOOSrQKcL+uy7bOqIJhsV4jlGje3zNCfr7ippBPTxhhcu93YWDIK2YFQ3E1qyaFfDufHqtAQD1ueXsT5ujh0fWUO4sy/DhhtbPbnYhsL47BkqwBaWmrJKtmKTJpkilwh9ue6Vvu4cQc44nHwdS/bumC0TmNjHIZslZkPGNSBZKq0JDa8cYMyAaOvV+vSkJcAtus0MsZBZDvN0VWGEbb9R91IpkorJMxdHcTp+vqJeCTWhuwsImCsO85hMKb4B5Ad21c4FXB0qNRdAVkts3raSB3EBfLCtc4trjrexMlZxzgw/mAf10ue5w/FbuWP+w6EYt89D8ysV0YDztvHjj3S7FKGDRPC+XBdI/kAe3d2uUeMt08AY4iDd6nCWKfaXim+u43xjn65FbHUBmPdV/FH+K+D9CPITEspS3Cf6as/CS9LxMQSqJiG2voivfjXOdbK+0nAeBIYj+TGkjG68DJxTX1ZEjD6jfOA0fFGXylCdsiAT080j4uS1tmc0CMMzDw82GdbNtl6H9aD8aG87MsLuvFhxE+IvJofU/9VHioRY6lki5HGyIfyFtOGXBjjMGQHpe3EVwTPobBXcEKLHGQ14blu27LIVgH8kUJyI7W5kH1XwYwjYoTwFCNeYlyMpZAdMPJFiRQjL5mGk1HIDiUchUJORCHHcsJ8RiF9XLJVwAIkX4tFXkl7vy1UrqTtkW2MP0N+Y2Aci2wxNtsYrxgH4zhkq09d3SHtr/zvx77hy2FkHLKZVjH4Wmx9kAI3kcroVrrVXYwHtzHuz/4IGEcmm2ljvKWNcTNlO8ceWcYle3vBzeapGPfJzFgZjKQCkQNlWLLb9XWRQOufoffygSWUeUOzeQrlngLG3cjWuhTEOBTZASNz/hTjZWVBKI/srEZ74PY2MEA6ETW4wC7MXXvVdxiytWQV8CVIvpoMeQM1Fdm9jfG1xTEWJpspqG/VUozXgO6BMhGWTbZ1M8/dUMibaPgb2O/neoqQbX7+ZMNVjAr+in0HJkUsitsqE+u0axvj8ewPwDiQbPPjQz/Jl8F4Cfu+Ri0do4VUKUfT151Fte3P7fs6AXDcc57twIT+Me2XL2L/dlIdxV8mOruN0YhkF4w9yZ4oxqrJlhy/VXRstNg4GZs/hmNjukEh3ci2TmuJat2KO7sCm/kmx8MOinhkohIwGngSo3PfHMYVZOcxXtnGWGYgpyv4SZAdCjZg8dPtgIUvJ5yqdZJtS+drDsknUMA32LdhzJKI8ZVgJA6RvoBpY1xGthiZORD4yYIiE8M4SbIDaX5o7lws4GWcIGacunEDF3yJKflvqP4o+1rGLMtOYPw1ML4cEASeEkfvvl8Qo4Gfj6X7/DdJmQbZAZ9BmRNQxOsIwX4XBVzBhR+Ei6tkG4IyjNxb38fqxwqKzLpOHLTtRRrwqnWmYYpx71WOcaYJmld+roG5BuYamGtgroG5BuYamJoG/h/ff6XOIB4wOAAAAABJRU5ErkJggg=="/>
+ <title>Swarm index of {{ .URI }}</title>
</head>
<body>
@@ -69,3 +70,146 @@ var htmlListTemplate = template.Must(template.New("html-list").Funcs(template.Fu
<hr>
</body>
`[1:]))
+
+var landingPageTemplate = template.Must(template.New("landingPage").Parse(`
+<html>
+ <head>
+ <meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
+ <meta name="viewport" content="width=device-width, initial-scale=1.0, maximum-scale=1.0">
+ <meta http-equiv="X-UA-Compatible" ww="chrome=1">
+ <meta name="description" content="Ethereum/Swarm Landing page">
+ <meta property="og:url" content="https://swarm-gateways.net/bzz:/theswarm.eth">
+ <style>
+
+ body, div, header, footer {
+ margin: 0;
+ padding: 0;
+ }
+
+ body {
+ overflow: hidden;
+ }
+
+ .container {
+ min-width: 100%;
+ min-height: 100%;
+ max-height: 100%;
+ }
+
+ header {
+ display: flex;
+ align-items: center;
+ background-color: #ffa500;
+ /* height: 20vh; */
+ padding: 5px;
+ }
+
+ .header-left, .header-right {
+ width: 20%;
+ }
+
+ .header-left {
+ padding-left: 40px;
+ float: left;
+ }
+
+ .header-right {
+ padding-right: 40px;
+ float: right;
+ }
+
+ .page-title {
+ /* margin-top: 4.5vh; */
+ text-align: center;
+ float: left;
+ width: 60%;
+ color: white;
+ }
+
+ content-body {
+ display: block;
+ margin: 0 auto;
+ text-align: center;
+ /* width: 50%; */
+ min-height: 60vh;
+ max-height: 60vh;
+ padding: 50px 20px;
+ opacity: 0.6;
+ background-color: #A9F5BF;
+ }
+
+ table {
+ font-size: 1.2em;
+ margin: 0 auto;
+ }
+
+ tr {
+ height: 60px;
+ }
+
+ td {
+ text-align: center;
+ }
+
+ .key {
+ color: #111;
+ font-weight: bold;
+ width: 200px;
+ }
+
+ .value {
+ color: red;
+ font-weight: bold
+ }
+
+ footer {
+ height: 20vh;
+ background-color: #ffa500;
+ font-size: 1em;
+ text-align: center;
+ padding: 20px;
+ }
+
+ </style>
+ <title>Swarm :: Welcome to Swarm</title>
+ </head>
+ <body>
+
+
+ <header>
+ <div class="header-left">
+ <img style="height:18vh;margin-left:40px" src="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAJYAAACrCAYAAACE5WWRAAAABmJLR0QA/wD/AP+gvaeTAAAACXBIWXMAAAsTAAALEwEAmpwYAAAAB3RJTUUH4QMKDzsK7uq5KAAAGndJREFUeNrtnXuU3MV15z+3qnokjYQkQA8wD4NsApg4WfzaEIN5GQgxWSfxiWObtZOsE++exDl2nGTxcZKTk3iPj53jRxwM8XHYYHsdP9aQALIBARbExDjGMbGDMCwvIQGWkITempGm+1d3/6j6df+6p3tmeqZ75tet+p6jwzDd0/37Vn3r3lu3qm5BQkJCQkJCQkJCQkJCQkJCQkJCQkJCQkJCQkJCQkJCQkJCQkJCQkJCQkJCQkJCQkLCbGALPzszkI+dUEYsEgt2HcgqMr8bACOg5X5sST1XMhgHvhZ+HnGnU+OdwOWQLQVuA67H6wt1C1bzSVgJXcDZ/0EmVwMVBAPZ6vjKAeCTeP183Xr58pmv5ApnbEnEIHIKIhnKBAjYHrikim0Iw9ilGHsBYq9DuRTwcfgL6Gg0BIuAqxC5CJHNwE6UDAOIAy2HBUsWq+OQM5D5XFQvBX4f+AXgYeAGvH4rtKCAzlJd1kKW5S7wUuDXUF5NkKsv9JKBbFVLfzmgBtwHfA6v3y2TBUvC6tgsuRWR9wJ/CCyOvzwSO/QHwHvxuqvrzqxYqEZBOXccykfwvDKKRds8TjthFVED/hn4AF73l0FcyRU2XF2jS42MInIhIl8D3tZkPUInWuA04LcQGQe2oozNaMZWqUC1Bs6twcg78PIZlBOn0XnRFbZ9euBM4F2I7AOeQxlPFqsMoqrHOXIF8G7gDbHDssI7FThcaDeJluxR4GvRJfkgLu0sMOsuRvkQyqpoAafrpeksVlFgI8Am4K8x3EFtYYIue9QLSqKojKxA5MYYS51eEFI7tyMt/78SeD1wNSKP4nVrPU5rF39Z2YaIB141o8E9vcUqCr8GOIR9wHdQkrDm3e0pILIGkXcB/wicwfTzvFqHDhZgKfAORF6GyNPAi2gb9+g1w/sfMWK+AXIKypo429NZCkuiux5H2IkwATwN3J+ENZ8InW0R+V3gIzGOqrW4vW6FVXz9HOAq4DREHsfrvsmxloOJ2kG8vwtjfoSwFOGcKBDtSljCGMI+hIMFl/hUEta8MG0Kzi8EbgZ+ETg2imGmmE5YRIEuAk5CeA7l4Unv8D7MAT2gfjvIRoT7MfI6YHmTuDoLKwNeRDgUfy7GfgsqLDP0gjKSWymDkTMxcj2wHnhJHyYveZ5iHOEn0YLIlBKtP6d6fPYw2fI3AzcCO2P/SAfh7ovfMVHGZndDP9MLgfmpwPuBXwJWAYf6NMM+FK3HRNez7syDqYDfrXiux7rbUK4C3omyLFgtfBRrbqFKaxiG02LZKConYOSDwP3A24FlMV3Qa2TADoQ9BVF1D1+FUQEqkNWew1Y+i+hVGL0LOILwAmGt0Jc9VTScFksZxcjP4fkIIXF4uE/flEUrtb9nHT2mQDUKbQKybA+WP0HMCtB1MW8mlHzjzHAIqznBeRnw3wkJTumDqEy0GAcQxmYYzM9Stvkk1SwOlkqeCoG9rgGWAD4Jq29uLy4WG1kLfLogqKxP3zgWk49Z13HU3GM4D+wF2Qe6CjixrJZr8GOszMOxKwT4akFU/UIVYUcfRTtzZ4+8APIIsK+MMddwBO/79wvCiwjPxGl6P2dMUqK+y0C2gmwOlqxjeiIJa86uQtiPsAV4kaMHY0FgPDmnWWkS1gxcRbBgW0Ojl3963qOxdRDkUWBHFNiCxV9umFs5xkTbgCUoxwKjlP58y9ymMkAF5A7gTvBZElZ/MY4wjjIKrKHTTs3BH0g7gS+Q8cRCZyKOFmHlTX8I2IJyHCELX+mzwLRpBlncR987MRlgF/AdhNupeQ3fQxLWPI9qRdgF7AeOiS6y15lsRVGQlcDVGDbh9eGYb+vVfvQKIfm7AfhO4zBrz8WbhNV1/AW7CWtva2L81aseWYSakxFGQVcDt2PkZuDP8bqvB+JyhMMc/0Dm99etIZRCVEezsIoSqwLPohxD2Ju1aPZuT0ZQjkVlNSAoGueiVeCtwCUY+Thwe3Rf3Rwfs3FAbAa+QeYfaXKvWblWd4Zjo58RiR23bNbyEiYQDsX9TYs7BPgZUk9EFuMoQWUtyEtQWd7yyfnO1Iywvnc58IZ48PXh+PxTO2Iji4CzgJuAW8n89sa3a0nH61BMssUAXwfW9qxdQuy1sqWNJmJ23zZiKTkG5GS0wyAVPQx6uIO3eAx4H15/PK3sEUPFZkzUYs0GLfXENlmsqWaQYRbpYqBsmiyWMgrmJFROmHKABovVbuuzB1YDv4nIWkSeRaN7bG/BtO7ufPkzJUlYU1vzsGNTGG8ITMbAnBzjqMXTmo3OwiqmI84FrkDkdEQewuvhQe+S5Aq7Q4aaM9EuAvzOrrBTf+wGLsPrQK91plnhTKwWHAR2gFQRfxjMS6Y9C9g9aoTdqEcYgjXcJKwpHCywH2QXMB4FNBLco98S9p/LWlROidLSOQh3f9yNmg1LnyRhdXJ58BzIgQ5hgwBHQLciug3MWWjLWcCZYSwewNChCk2SsCZZjyqwD2QnM99qU0X8D8GsAU5CWcbUS0RK2NJyAGkqMDJUSMJqdOxOkL2x06XLDreI3xX+Xo9FzUuZfNhBCHmw/cHaDfcesSQsOATyfCElILMXqGYIuxC/C5XTUHlJ4fW9hdoKMOQbD49mYY0De6KV6n0niz6J6HZUVkVBHSW7WI9OYZng6mQHYVdD1sfOdsA4os8SMveL6P/+rySsBcKOGJjPlzvKPz/Pvldi7CVJWIMfmGcxjtoRg+aF7NRqFNgiQk4sBe8D6vZaE5xl6Mi8julEFNf0641JWKVBBvJsjKOkpLMwXxDYKENWBG+YhGVoTnAqg+FqfBwAI9FFumFwkUNyYPVcHyqxyI4YSzFgnSPRch0h7G6oDnqPDL75tQ6y50H1LozsJ2zhNX1sr2V9iokM4YDEH5HpnmGYNQ2BEzShWCyANccDvwz8LHAM3RWunQ4V0LX07jSPjZ/1CPB1Mn0wfItAdbDj+eEIGPMDBeefBVt2jmPk32NnLSFcBtBLIfTKYo0STtx8GvgymW5hsQ172f3gd8lgWqzWc3lWIOvQ19acAbyHcLRrzpfA9chi1YAvY7iJqmZtn7+VY0nvJRwei2WbykKuRDkc5CKTO8Y5qGW7Ub0bIxOEiskr5yCwuVgsR6h2/C/A/yLTB/FoW1E1czwOZXzQMl2DI6wRAV+vhlwB+SDw5/GEy/dRzZpOtyixSH+8aFL1SYw8RChQdgazW7ebjbBMdMn3AZ8i02+iHMLFnTm+g6WqiAW5JnI8MXCkNgj3QQ+GK5zsEq4EPg6cQKP21U7gj4H78DHgauc68lPD1qwAfhP4mS7d2mxc4QvAJ8l0U8fnmszxCuATkeNhwrLUTuCDwLem5JiE1bXAzgfeC1wRcz6exuKuicHwBuC6pttGobnxKwaq9Rnk2YQ7b86Kr/oeCEui29sC3AXcTKZ+yjiwwfH1hNvHihyzmNfKLd89wGfw+kBHjklYUz1WPcZYB3wYOI9wx0y1JQiutbiqMeAB4MN43TytFbRmJArrrcCpTH1/4HTCqsS//wfgHjLd1XFyUfxd4PgXwM9HjkVOWQvnnOO/An+J16eTxZrO5RnCdHuxwGEWYXg/4aqSTnvIpzoMKsD1wHXAoY4juuEeBbgyWrDKLIQlwPeAT5Pp3uldu4BlBOV9wB9Ei9TuIVuF1fqdnwWunZLjURu857MgD1gZJeNtCH8PvJmplzf8NG7pQuBNwDgiz4Tb52m+CSzPgamC6hMgDyAsgbbnBluDdxsF8UPgM2T6VZTDbW+3t4U7Eo2MIvw6cAPwK9Nw1Gk4viEOhmaOCxzkSylE1XAJb45B+DoaWenp8kHTZdZzS/A48Am8bmifC8ur4NXjr5OB/xJdcB7vFC3WCPAM8DngYbJ42rldQO0kFvEAjFwF/E/g5TPkmDH92mHO8Qng43WOC3gzysILy4gBTokzvYvo/u7Abt6/BPg2cA3wTMfZVfMS0U/FGeSqYMF0TRTa/wW+QhbvgJ7KDYkIhlNQ/gq4tMtnnomwihglXEp1DYbN9Tuh53kGKQsgpGLyb13stN8mbHg70uWndSusPMA+DHwR+Apen5hBesJGQZwPugflS3jd2TEwb+Z4WuT4O1HY3XLsVljdcRw6i2XkzwhXva2i3XW1/RNWztsC2wk3rX4sWC8Jv810srjCz0tBx2dkpQLHDwFXE8oVzZbjbITVyvGfgI/i1YcFbvruImWeLdQIcDHwN8BxzH3nQa0HnzESk49/hHAvmWZdj+5mjpU4abg2Dpq5Pt9shdWO4x/j2QiR40xya6UUVt7gIiBcRLju7RcKwTAlEFYe/C4C7gb+N17vm/HEA4qTj4sIC95X9pBjL4SV93WeYL0Br/cOnsVqHsEnxcD8vBhY9rICea+EVUwnHAIeAv60nmCdLsFp5QSUT8QE59Iec+yVsFo5/hD4k3qCtcfWqz95LBEQWYbIB4CvxFmf7YNj9/R295ISlmNOBf4bIh4jm8h0cseqgJVliLwvzhBPpT83XmifOJ4C/FaYscp/tOVYOotlZRnK5+NM6kgfI8VeW6zWthkFHkS4mkz3tXAcRbkB+MU4A+sXx15brHbu8QcYeRu1WDO+R7FFP+AQLMIThGp4g3ZoQwj37zyNUIkCa8fRIDxOOGUzqBw3I1hUR3srgP4gN9/57Vv7UVYzGMfNPLAnFrTNU9c6xXurCNuBAwPGcW+sItiX9Lybp5FxCGEMZSVh9X6Ecm1Xy4uujSEcoPsziUcDx9IJqxHLhRrpB+L1IqsK1m2hm3wvjU2Ds409jwaOpRNWczAa6m7uR1nLwlZfmUDYTe8vYcsvGtiHcsKQciydsPLR4hF+AixFWRHzPzpP33247rr62+GK8BOUpYRDHEuYny0H88mxVMIqNsBYvP003wPVr9tPBahFS3KE+avtILFzxwlXCK+mfwXYFopj6YRVHNljwOZ4++nKHk/ffbwXZ/+CcgyD6Jkh5lg6YRVH927gYJxdLWP2Gfu8OP9YrAE6QTlyTUWOKwhlAHrFsUpJtpuXMeeS3z6/E9iLcjyzO8s3hrCPRmbelJDjrhjgHxdTFN0s3dgYQxU5luYMQ9mTeSHB2nz7/ExmZLti8DoIx9vyBOu+OEseCo6DkCXOg98t0XWsIGxx8ZMsAPU7Bget9HW+vDI9x8Cv9BzdADU80ewfBJahrCo07P7Y4FnZXEIPOe4rXORUeo6DWCoyi418EGVZXJ7IGK4KxAPP0Q3kqA4jeifCSJy2jzA89VQLHGUXopawjXugOLoBa/AJkO2ENa8a6GIatdMtIbPdjw2FC8iRYyPHKiGxungQOA6KsBTYNcWtEho74UBs+EEszu+B3dPcnJELrPQcXclHb4g1wiUAM01wjsfGH4n/ym6hwt4o5EWY8bW947E9FpWVY1mFZWJj76CxLdd0Kci88RdTzsuRTBw0L8ySo28jME3C6owqyLYYoM/V1Gfxc/LLkcyQcjwUBVYajmURVrxjmT0gu+ntqrzE+OtgwT0uRPBb5LiH3iY4pRBjLlpAjqURVt6wO6Kg+rm7UWPHFi9Hmi+OGjnuof8JzoXgWCphaczVbGN+V+Xz27eqhfhrWDnm7rEy7MLKG/ZAtFB5jCEL8Bxhu0log17ffiol4agx/pr3G17dPBOdALaBjFGe27lq0T3ZHgS/+WL4T0rEUXrMsVTCymhO/pUtsdeLBGvOcRflvNJuXpPI/RSWIT/8GVzCBIORDR8vBL+LZuhudrdwlCHiWDphHQI5QKOC3aAsseTx1+EZDIYxkKeHgGPPXWN/qs2YkSOgm4CTCNuK++XT+zXjyRt+C/BRMn2qA8fH5oFj7rb6FdxvjRyf7McsrT+wxgA/B7wReFkcHT2MGfS4KK5etscosIlQXvHe+q0S0L5+VIPjpYRKyL3muJJw5rLXHB8pcMyoSIgSe1SjVPooqmLtzgrwekI9TktvTuX2Wlj5ndLXA/fVy2tPfWVdg6MzIyjnAf+1xxx7KSwThf/ZKKjpOZbSYtUjOQM1D8YsQXgn4XKkJcytoFivhGViMPsDwiUA+7pu6BET5lvegzVLorh+JloGXwJh5Rwfihz39rP+6PwGm821088gFLl9XSGemW9hmWhZNgLryfSRMAgKxf7nZqVfHjn+5zlynIuwTJygbQRuq3Pss6jmfxbT3PAWeCmhGOwJdF+1bi7CGomB+ceAp+uVkntxF/NkjqdGjifOkuNshbWIcHPGx4DNZFqbL1Et7PTYWahleQdcAVxG2L8eG7Tnwsq5vgjcQqY3972hm0V2OXA54WiXdMGxG2HlHHdHjjfVOSpDfjNF5044lnDh0MWEwwNHeiiskdjY3wA2kOmOBbLSKyPHS7rgOFNhVYA9keNdZPrCQifKKJG4JI7oX4kdUJujsPLirXcAfwfsJVMNdzbECcXCcFweOV44A47TCStPH9xOuE1sD5nqlCmSo1BYxdF9JvCOGH+127Q2lbDy1MEW4DoyfWw+44upQwAHtVpxEnN15Og6cOwkrAZH5Tp8iThSxiWI5qm7iTPHywi3oI7TdJ34JGFJDFo3Abch8m1qvlqWxm48pYRLFrImjm+MHA+3cGwVVpHjeuDbZDpRtvuhy7u21Wy9RoDXxPxQXtOgVVj5ove1wD+T6XidoQ4Mx1cD72zhWBSWISReryckOMfLZKUGQ1jFxod8dI8Srmg7G1gchTVK2LD3b8Cn6tnkQYJEF1mt5QnW3wDOCYNGVxDWIscISdy/JtOxQaA0GGhOT5wBXAT6RuDfCcm/H5d19M7SgsUEq14C/AfznOA8umBN8WeLlVVYcUPF0dlWjquHjmOpYaT5v8M8kKxJ/Z2QkJCQkJCQkJCQkJCQkJCQkJCQkJCQkJBwdMCZLt+7LPxsl1HfkjtTmOKib5u1XlPp/L3FNbyR+DnFtUtrYMS151ZpfK50fOaRsK/UCZg2p+zdovBawgwwOlroGFmKtWdg7csxprGL0rbpbGeDuqTltREBawXboWziMctzUR3T5jOLojsLUzkfa89seo/Ez80FOhr3HR4zaUuSY6TwOxveP+oKvzNmGabyGsSdXWiQVmH+NMa+CiuxSswqqNikmymRD8qRyjqM/SLOPoBzd+LcBpy7H+P+FuOODyO+YC2WW4ux92Ds6rafa+2pWHcLzq6g0maEW/cWrLu2rWVx9iycW4+rbMC4r+Aqd2PcHVj3n8LrlZtw5oymv63YMzHu8/H10zHuizj3XZy9vEW4x+Mqd8dn+CDOfQfrbqVS2YB1d2LMKwuW9QKsuxdXuR3rbsO5B7GVdyfRzBS2cjzOfR3rfh1nljcaVo7Fud/D2jsYcaOTXZb7S5z7wya3lIvPufcg7hGcfU2H7/xHXOWyNm7ycoy7F+eubBHqpVh3O85ehavcgnOvaHn9HMT9H4x5NcZ9E2t/B2fWTraIbg2u8n2M+1Os/QNMYf++sZdiK7dgKyfj7M9j3XpM4fmNOxlX+RKu8m5Gka7Ch6NTWO5crPtY55jI/R3O/vZky+LOwbgNLG1Tmsm4m7DuL3Dur8J7i3FQ5Wysu5MTW1yXtWdj3T0YWdchHjsN476Ac9/DFV0X4OwrEHcrxq7H2Fc2f27Rsrk1GPdYfUCELyk+99tx7pNYdwvWva6NJV6NdZ/H2lVl68Yyynw/yqlYe0zbeMf4ryHyXP33+dnAWu0RRPZyxJ0X3VH8u8prEUYwtWvJOJcRljfVZhC5ANjINrQu0kUYvLwF9G/x+nQ9sPdZI8jPas8gbMQz2maDtwLHI+Y2fPZw0yuTa9CM4XVDY5AUzjq67Ftk/DSeF8hqD7aICrC7gW3A6UlYU87wHIjfAvwrmBsx9gNYdyXWnY2XNWAsNX8P1dqdVNrM4MTfipcLGEWoZnkX/wbwNarswshWMvemesdU3BK8fy3Kzc0itYtxnIwx97M6zhmywrnS/GflBwiH250GRKjgql+dhnGoC2r1iabvzzGhOwDF6cYwWIoCzSCbyBCeB3lpEtZUqNWg5mv42qdRrgGeBP0p0HcDH8fZv8HYc4FG5ZqmbpKNiL6CqlsaXckq0LPQ7NYgvOwGlLc3OoY14X26Hdck1AqeGqpVdh7q/LyeHbQ/yWxQnucIB6ZhLAgTVP00R+1NqCjYtqSIeKR8h2LKtUm/ePrEV5/ieJ5inxUUg+DwejEi12Hl/WS+2TVUDHjdg8pevK4jnGy5AOV7eD0YLcJ3seKx7hVktR8jegHIY9SyZvWI5NeR2GmG5WI6n3Sa/hiaKogx01u1eH5wgFAui5UpOPvy+uztRaCWKVmWkdWO4Gt3IvJRsB+a9LdVD1l2BOHfQN4U7cF5wDdjoJyr5m7g/PjzLyF8u60oVCYI9UXbJ05Dn5/N3KsOD2WGs4TBu/wsmEsndWjdVWXbQaYoBKL34rkE51aDrEVkE9aGw6ABG1DOwZnXAsupyUNtXHIV9RtR+UA9pio+S8XBUgTkKvpVIDgJq8dQ/RFeX4V1y5oC5lotzH3Uvg+Jta3aWr1sK5bnQf8M4f8h1YNkphgXbQMs3vw+sJ56lN8aP2XrQSeoVD41KXiv1uCwvQb0ECLPDqvVGR5hVQxYsxnhX4DrMO6tWHsh1r0BW3krz1a+AH4rtdrncO9qE6NF4+H1y2RyCV4foIZvinq1dghhE3AumK+3jzyjTlaveQ9ewbobMe4tOHcxxv0q1t2Il5NYlH2Yxt3NzWH91OWJGoH39FX+PFMWytVpXl+gcLlUT+MVMq+s9t9n3D4FnILwMpBTQBS4hdHal0LJ+4cnV1dRzfNMzyLyIqIbUZ3cccbsQHiUrPoQdiW0lnvwgKvA/n3g/F14eR5kHcKpgEN0PSuyv+cAE4h5EngK9bXC548h8jjqt0/d+qaK8jjqt3bWntkC/lFU2wvVyG6Qp/D+ULKTU85TTfPP1kr4V9g2MJPlC+ekY1hdcY2Yyc5w2I04wdnwr47CKk3+eS2L4KZiOufsoLE+ajpMEJa3WONOPmfp0qSdhISEhISEhISEhISEhISEhISEhISEhISEhISEhIThxP8HhRpz3L2ZmSwAAAAASUVORK5CYII="/>
+ </div>
+ <div class="page-title">
+ <h1>Welcome to Swarm</h1>
+ </div>
+ </header>
+
+ <script type="text/javascript">
+ function goToPage() {
+ var page = document.getElementById('page').value;
+ if (page == "") {
+ var page = "theswarm.eth"
+ }
+ var address = "/bzz:/" + page;
+ location.href = address;
+ console.log(address)
+ }
+ </script>
+ <content-body>
+
+ <h1>Enter the hash or ENS of a Swarm-hosted file below:</h1>
+ <input type="text" id="page" size="64"/>
+ <input type="submit" value="submit" onclick="goToPage();" />
+
+ </content-body>
+ <footer>
+ <p>
+ Swarm: Serverless Hosting Incentivised Peer-To-Peer Storage And Content Distribution<br/>
+ <a href="http://swarm-gateways.net/bzz:/theswarm.eth">Swarm</a>
+ </p>
+ </footer>
+
+ </body>
+</html>
+`[1:]))
diff --git a/swarm/fuse/swarmfs_util.go b/swarm/fuse/swarmfs_util.go
index d39966c0e..169b67487 100644
--- a/swarm/fuse/swarmfs_util.go
+++ b/swarm/fuse/swarmfs_util.go
@@ -47,7 +47,6 @@ func externalUnmount(mountPoint string) error {
}
func addFileToSwarm(sf *SwarmFile, content []byte, size int) error {
-
fkey, mhash, err := sf.mountInfo.swarmApi.AddFile(sf.mountInfo.LatestManifest, sf.path, sf.name, content, true)
if err != nil {
return err
@@ -64,11 +63,9 @@ func addFileToSwarm(sf *SwarmFile, content []byte, size int) error {
log.Info("Added new file:", "fname", sf.name, "New Manifest hash", mhash)
return nil
-
}
func removeFileFromSwarm(sf *SwarmFile) error {
-
mkey, err := sf.mountInfo.swarmApi.RemoveFile(sf.mountInfo.LatestManifest, sf.path, sf.name, true)
if err != nil {
return err
@@ -83,7 +80,6 @@ func removeFileFromSwarm(sf *SwarmFile) error {
}
func removeDirectoryFromSwarm(sd *SwarmDir) error {
-
if len(sd.directories) == 0 && len(sd.files) == 0 {
return nil
}
@@ -103,11 +99,9 @@ func removeDirectoryFromSwarm(sd *SwarmDir) error {
}
return nil
-
}
func appendToExistingFileInSwarm(sf *SwarmFile, content []byte, offset int64, length int64) error {
-
fkey, mhash, err := sf.mountInfo.swarmApi.AppendFile(sf.mountInfo.LatestManifest, sf.path, sf.name, sf.fileSize, content, sf.key, offset, length, true)
if err != nil {
return err
@@ -124,5 +118,4 @@ func appendToExistingFileInSwarm(sf *SwarmFile, content []byte, offset int64, le
log.Info("Appended file:", "fname", sf.name, "New Manifest hash", mhash)
return nil
-
}
diff --git a/swarm/metrics/flags.go b/swarm/metrics/flags.go
new file mode 100644
index 000000000..48b231b21
--- /dev/null
+++ b/swarm/metrics/flags.go
@@ -0,0 +1,91 @@
+// Copyright 2018 The go-ethereum Authors
+// This file is part of the go-ethereum library.
+//
+// The go-ethereum library is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Lesser General Public License as published by
+// the Free Software Foundation, either version 3 of the License, or
+// (at your option) any later version.
+//
+// The go-ethereum library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+// GNU Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public License
+// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
+
+package metrics
+
+import (
+ "time"
+
+ "github.com/ethereum/go-ethereum/cmd/utils"
+ "github.com/ethereum/go-ethereum/log"
+ gethmetrics "github.com/ethereum/go-ethereum/metrics"
+ "github.com/ethereum/go-ethereum/metrics/influxdb"
+ "gopkg.in/urfave/cli.v1"
+)
+
+var (
+ metricsEnableInfluxDBExportFlag = cli.BoolFlag{
+ Name: "metrics.influxdb.export",
+ Usage: "Enable metrics export/push to an external InfluxDB database",
+ }
+ metricsInfluxDBEndpointFlag = cli.StringFlag{
+ Name: "metrics.influxdb.endpoint",
+ Usage: "Metrics InfluxDB endpoint",
+ Value: "http://127.0.0.1:8086",
+ }
+ metricsInfluxDBDatabaseFlag = cli.StringFlag{
+ Name: "metrics.influxdb.database",
+ Usage: "Metrics InfluxDB database",
+ Value: "metrics",
+ }
+ metricsInfluxDBUsernameFlag = cli.StringFlag{
+ Name: "metrics.influxdb.username",
+ Usage: "Metrics InfluxDB username",
+ Value: "",
+ }
+ metricsInfluxDBPasswordFlag = cli.StringFlag{
+ Name: "metrics.influxdb.password",
+ Usage: "Metrics InfluxDB password",
+ Value: "",
+ }
+ // The `host` tag is part of every measurement sent to InfluxDB. Queries on tags are faster in InfluxDB.
+ // It is used so that we can group all nodes and average a measurement across all of them, but also so
+ // that we can select a specific node and inspect its measurements.
+ // https://docs.influxdata.com/influxdb/v1.4/concepts/key_concepts/#tag-key
+ metricsInfluxDBHostTagFlag = cli.StringFlag{
+ Name: "metrics.influxdb.host.tag",
+ Usage: "Metrics InfluxDB `host` tag attached to all measurements",
+ Value: "localhost",
+ }
+)
+
+// Flags holds all command-line flags required for metrics collection.
+var Flags = []cli.Flag{
+ utils.MetricsEnabledFlag,
+ metricsEnableInfluxDBExportFlag,
+ metricsInfluxDBEndpointFlag, metricsInfluxDBDatabaseFlag, metricsInfluxDBUsernameFlag, metricsInfluxDBPasswordFlag, metricsInfluxDBHostTagFlag,
+}
+
+func Setup(ctx *cli.Context) {
+ if gethmetrics.Enabled {
+ log.Info("Enabling swarm metrics collection")
+ var (
+ enableExport = ctx.GlobalBool(metricsEnableInfluxDBExportFlag.Name)
+ endpoint = ctx.GlobalString(metricsInfluxDBEndpointFlag.Name)
+ database = ctx.GlobalString(metricsInfluxDBDatabaseFlag.Name)
+ username = ctx.GlobalString(metricsInfluxDBUsernameFlag.Name)
+ password = ctx.GlobalString(metricsInfluxDBPasswordFlag.Name)
+ hosttag = ctx.GlobalString(metricsInfluxDBHostTagFlag.Name)
+ )
+
+ if enableExport {
+ log.Info("Enabling swarm metrics export to InfluxDB")
+ go influxdb.InfluxDBWithTags(gethmetrics.DefaultRegistry, 10*time.Second, endpoint, database, username, password, "swarm.", map[string]string{
+ "host": hosttag,
+ })
+ }
+ }
+}
diff --git a/swarm/network/depo.go b/swarm/network/depo.go
index 17540d2f9..5ffbf8be1 100644
--- a/swarm/network/depo.go
+++ b/swarm/network/depo.go
@@ -23,9 +23,19 @@ import (
"time"
"github.com/ethereum/go-ethereum/log"
+ "github.com/ethereum/go-ethereum/metrics"
"github.com/ethereum/go-ethereum/swarm/storage"
)
+//metrics variables
+var (
+ syncReceiveCount = metrics.NewRegisteredCounter("network.sync.recv.count", nil)
+ syncReceiveIgnore = metrics.NewRegisteredCounter("network.sync.recv.ignore", nil)
+ syncSendCount = metrics.NewRegisteredCounter("network.sync.send.count", nil)
+ syncSendRefused = metrics.NewRegisteredCounter("network.sync.send.refused", nil)
+ syncSendNotFound = metrics.NewRegisteredCounter("network.sync.send.notfound", nil)
+)
+
// Handler for storage/retrieval related protocol requests
// implements the StorageHandler interface used by the bzz protocol
type Depo struct {
@@ -107,6 +117,7 @@ func (self *Depo) HandleStoreRequestMsg(req *storeRequestMsgData, p *peer) {
log.Trace(fmt.Sprintf("Depo.handleStoreRequest: %v not found locally. create new chunk/request", req.Key))
// not found in memory cache, ie., a genuine store request
// create chunk
+ syncReceiveCount.Inc(1)
chunk = storage.NewChunk(req.Key, nil)
case chunk.SData == nil:
@@ -116,6 +127,7 @@ func (self *Depo) HandleStoreRequestMsg(req *storeRequestMsgData, p *peer) {
default:
// data is found, store request ignored
// this should update access count?
+ syncReceiveIgnore.Inc(1)
log.Trace(fmt.Sprintf("Depo.HandleStoreRequest: %v found locally. ignore.", req))
islocal = true
//return
@@ -172,11 +184,14 @@ func (self *Depo) HandleRetrieveRequestMsg(req *retrieveRequestMsgData, p *peer)
SData: chunk.SData,
requestTimeout: req.timeout, //
}
+ syncSendCount.Inc(1)
p.syncer.addRequest(sreq, DeliverReq)
} else {
+ syncSendRefused.Inc(1)
log.Trace(fmt.Sprintf("Depo.HandleRetrieveRequest: %v - content found, not wanted", req.Key.Log()))
}
} else {
+ syncSendNotFound.Inc(1)
log.Trace(fmt.Sprintf("Depo.HandleRetrieveRequest: %v - content not found locally. asked swarm for help. will get back", req.Key.Log()))
}
}
diff --git a/swarm/network/hive.go b/swarm/network/hive.go
index 2504a4610..8404ffcc2 100644
--- a/swarm/network/hive.go
+++ b/swarm/network/hive.go
@@ -24,6 +24,7 @@ import (
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/log"
+ "github.com/ethereum/go-ethereum/metrics"
"github.com/ethereum/go-ethereum/p2p/discover"
"github.com/ethereum/go-ethereum/p2p/netutil"
"github.com/ethereum/go-ethereum/swarm/network/kademlia"
@@ -39,6 +40,12 @@ import (
// connections and disconnections are reported and relayed
// to keep the nodetable uptodate
+var (
+ peersNumGauge = metrics.NewRegisteredGauge("network.peers.num", nil)
+ addPeerCounter = metrics.NewRegisteredCounter("network.addpeer.count", nil)
+ removePeerCounter = metrics.NewRegisteredCounter("network.removepeer.count", nil)
+)
+
type Hive struct {
listenAddr func() string
callInterval uint64
@@ -192,6 +199,7 @@ func (self *Hive) Start(id discover.NodeID, listenAddr func() string, connectPee
func (self *Hive) keepAlive() {
alarm := time.NewTicker(time.Duration(self.callInterval)).C
for {
+ peersNumGauge.Update(int64(self.kad.Count()))
select {
case <-alarm:
if self.kad.DBCount() > 0 {
@@ -223,6 +231,7 @@ func (self *Hive) Stop() error {
// called at the end of a successful protocol handshake
func (self *Hive) addPeer(p *peer) error {
+ addPeerCounter.Inc(1)
defer func() {
select {
case self.more <- true:
@@ -247,6 +256,7 @@ func (self *Hive) addPeer(p *peer) error {
// called after peer disconnected
func (self *Hive) removePeer(p *peer) {
+ removePeerCounter.Inc(1)
log.Debug(fmt.Sprintf("bee %v removed", p))
self.kad.Off(p, saveSync)
select {
diff --git a/swarm/network/kademlia/kademlia.go b/swarm/network/kademlia/kademlia.go
index 0abc42a19..b5999b52d 100644
--- a/swarm/network/kademlia/kademlia.go
+++ b/swarm/network/kademlia/kademlia.go
@@ -24,6 +24,16 @@ import (
"time"
"github.com/ethereum/go-ethereum/log"
+ "github.com/ethereum/go-ethereum/metrics"
+)
+
+//metrics variables
+//For metrics, we want to count how many times peers are added/removed
+//at a certain index. Thus we do that with an array of counters with
+//entry for each index
+var (
+ bucketAddIndexCount []metrics.Counter
+ bucketRmIndexCount []metrics.Counter
)
const (
@@ -88,12 +98,14 @@ type Node interface {
// params is KadParams configuration
func New(addr Address, params *KadParams) *Kademlia {
buckets := make([][]Node, params.MaxProx+1)
- return &Kademlia{
+ kad := &Kademlia{
addr: addr,
KadParams: params,
buckets: buckets,
db: newKadDb(addr, params),
}
+ kad.initMetricsVariables()
+ return kad
}
// accessor for KAD base address
@@ -138,6 +150,7 @@ func (self *Kademlia) On(node Node, cb func(*NodeRecord, Node) error) (err error
// TODO: give priority to peers with active traffic
if len(bucket) < self.BucketSize { // >= allows us to add peers beyond the bucketsize limitation
self.buckets[index] = append(bucket, node)
+ bucketAddIndexCount[index].Inc(1)
log.Debug(fmt.Sprintf("add node %v to table", node))
self.setProxLimit(index, true)
record.node = node
@@ -178,6 +191,7 @@ func (self *Kademlia) Off(node Node, cb func(*NodeRecord, Node)) (err error) {
defer self.lock.Unlock()
index := self.proximityBin(node.Addr())
+ bucketRmIndexCount[index].Inc(1)
bucket := self.buckets[index]
for i := 0; i < len(bucket); i++ {
if node.Addr() == bucket[i].Addr() {
@@ -426,3 +440,15 @@ func (self *Kademlia) String() string {
rows = append(rows, "=========================================================================")
return strings.Join(rows, "\n")
}
+
+//We have to build up the array of counters for each index
+func (self *Kademlia) initMetricsVariables() {
+ //create the arrays
+ bucketAddIndexCount = make([]metrics.Counter, self.MaxProx+1)
+ bucketRmIndexCount = make([]metrics.Counter, self.MaxProx+1)
+ //at each index create a metrics counter
+ for i := 0; i < (self.KadParams.MaxProx + 1); i++ {
+ bucketAddIndexCount[i] = metrics.NewRegisteredCounter(fmt.Sprintf("network.kademlia.bucket.add.%d.index", i), nil)
+ bucketRmIndexCount[i] = metrics.NewRegisteredCounter(fmt.Sprintf("network.kademlia.bucket.rm.%d.index", i), nil)
+ }
+}
diff --git a/swarm/network/protocol.go b/swarm/network/protocol.go
index a418c1dbb..1cbe00a97 100644
--- a/swarm/network/protocol.go
+++ b/swarm/network/protocol.go
@@ -39,12 +39,26 @@ import (
"github.com/ethereum/go-ethereum/contracts/chequebook"
"github.com/ethereum/go-ethereum/log"
+ "github.com/ethereum/go-ethereum/metrics"
"github.com/ethereum/go-ethereum/p2p"
bzzswap "github.com/ethereum/go-ethereum/swarm/services/swap"
"github.com/ethereum/go-ethereum/swarm/services/swap/swap"
"github.com/ethereum/go-ethereum/swarm/storage"
)
+//metrics variables
+var (
+ storeRequestMsgCounter = metrics.NewRegisteredCounter("network.protocol.msg.storerequest.count", nil)
+ retrieveRequestMsgCounter = metrics.NewRegisteredCounter("network.protocol.msg.retrieverequest.count", nil)
+ peersMsgCounter = metrics.NewRegisteredCounter("network.protocol.msg.peers.count", nil)
+ syncRequestMsgCounter = metrics.NewRegisteredCounter("network.protocol.msg.syncrequest.count", nil)
+ unsyncedKeysMsgCounter = metrics.NewRegisteredCounter("network.protocol.msg.unsyncedkeys.count", nil)
+ deliverRequestMsgCounter = metrics.NewRegisteredCounter("network.protocol.msg.deliverrequest.count", nil)
+ paymentMsgCounter = metrics.NewRegisteredCounter("network.protocol.msg.payment.count", nil)
+ invalidMsgCounter = metrics.NewRegisteredCounter("network.protocol.msg.invalid.count", nil)
+ handleStatusMsgCounter = metrics.NewRegisteredCounter("network.protocol.msg.handlestatus.count", nil)
+)
+
const (
Version = 0
ProtocolLength = uint64(8)
@@ -206,6 +220,7 @@ func (self *bzz) handle() error {
case storeRequestMsg:
// store requests are dispatched to netStore
+ storeRequestMsgCounter.Inc(1)
var req storeRequestMsgData
if err := msg.Decode(&req); err != nil {
return fmt.Errorf("<- %v: %v", msg, err)
@@ -221,6 +236,7 @@ func (self *bzz) handle() error {
case retrieveRequestMsg:
// retrieve Requests are dispatched to netStore
+ retrieveRequestMsgCounter.Inc(1)
var req retrieveRequestMsgData
if err := msg.Decode(&req); err != nil {
return fmt.Errorf("<- %v: %v", msg, err)
@@ -241,6 +257,7 @@ func (self *bzz) handle() error {
case peersMsg:
// response to lookups and immediate response to retrieve requests
// dispatches new peer data to the hive that adds them to KADDB
+ peersMsgCounter.Inc(1)
var req peersMsgData
if err := msg.Decode(&req); err != nil {
return fmt.Errorf("<- %v: %v", msg, err)
@@ -250,6 +267,7 @@ func (self *bzz) handle() error {
self.hive.HandlePeersMsg(&req, &peer{bzz: self})
case syncRequestMsg:
+ syncRequestMsgCounter.Inc(1)
var req syncRequestMsgData
if err := msg.Decode(&req); err != nil {
return fmt.Errorf("<- %v: %v", msg, err)
@@ -260,6 +278,7 @@ func (self *bzz) handle() error {
case unsyncedKeysMsg:
// coming from parent node offering
+ unsyncedKeysMsgCounter.Inc(1)
var req unsyncedKeysMsgData
if err := msg.Decode(&req); err != nil {
return fmt.Errorf("<- %v: %v", msg, err)
@@ -274,6 +293,7 @@ func (self *bzz) handle() error {
case deliveryRequestMsg:
// response to syncKeysMsg hashes filtered not existing in db
// also relays the last synced state to the source
+ deliverRequestMsgCounter.Inc(1)
var req deliveryRequestMsgData
if err := msg.Decode(&req); err != nil {
return fmt.Errorf("<-msg %v: %v", msg, err)
@@ -287,6 +307,7 @@ func (self *bzz) handle() error {
case paymentMsg:
// swap protocol message for payment, Units paid for, Cheque paid with
+ paymentMsgCounter.Inc(1)
if self.swapEnabled {
var req paymentMsgData
if err := msg.Decode(&req); err != nil {
@@ -298,6 +319,7 @@ func (self *bzz) handle() error {
default:
// no other message is allowed
+ invalidMsgCounter.Inc(1)
return fmt.Errorf("invalid message code: %v", msg.Code)
}
return nil
@@ -332,6 +354,8 @@ func (self *bzz) handleStatus() (err error) {
return fmt.Errorf("first msg has code %x (!= %x)", msg.Code, statusMsg)
}
+ handleStatusMsgCounter.Inc(1)
+
if msg.Size > ProtocolMaxMsgSize {
return fmt.Errorf("message too long: %v > %v", msg.Size, ProtocolMaxMsgSize)
}
diff --git a/swarm/storage/chunker.go b/swarm/storage/chunker.go
index 98cd6e75e..2b397f801 100644
--- a/swarm/storage/chunker.go
+++ b/swarm/storage/chunker.go
@@ -23,6 +23,8 @@ import (
"io"
"sync"
"time"
+
+ "github.com/ethereum/go-ethereum/metrics"
)
/*
@@ -63,6 +65,11 @@ var (
errOperationTimedOut = errors.New("operation timed out")
)
+//metrics variables
+var (
+ newChunkCounter = metrics.NewRegisteredCounter("storage.chunks.new", nil)
+)
+
type TreeChunker struct {
branches int64
hashFunc SwarmHasher
@@ -298,6 +305,13 @@ func (self *TreeChunker) hashChunk(hasher SwarmHash, job *hashJob, chunkC chan *
job.parentWg.Done()
if chunkC != nil {
+ //NOTE: this increases the chunk count even if the local node already has this chunk;
+ //on file upload the node will increase this counter even if the same file has already been uploaded
+ //So it should be evaluated whether it is worth keeping this counter
+ //and/or actually better track when the chunk is Put to the local database
+ //(which may question the need for disambiguation when a completely new chunk has been created
+ //and/or a chunk is being put to the local DB; for chunk tracking it may be worth distinguishing
+ newChunkCounter.Inc(1)
chunkC <- newChunk
}
}
diff --git a/swarm/storage/chunker_test.go b/swarm/storage/chunker_test.go
index 6b828970b..c8bd12299 100644
--- a/swarm/storage/chunker_test.go
+++ b/swarm/storage/chunker_test.go
@@ -363,7 +363,7 @@ func TestRandomData(t *testing.T) {
}
-func TestRandomBrokenData(t *testing.T) {
+func XTestRandomBrokenData(t *testing.T) {
sizes := []int{1, 60, 83, 179, 253, 1024, 4095, 4096, 4097, 8191, 8192, 8193, 12287, 12288, 12289, 123456, 2345678}
tester := &chunkerTester{t: t}
chunker := NewTreeChunker(NewChunkerParams())
diff --git a/swarm/storage/dbstore.go b/swarm/storage/dbstore.go
index 46a5c16cc..421bb061d 100644
--- a/swarm/storage/dbstore.go
+++ b/swarm/storage/dbstore.go
@@ -33,11 +33,18 @@ import (
"sync"
"github.com/ethereum/go-ethereum/log"
+ "github.com/ethereum/go-ethereum/metrics"
"github.com/ethereum/go-ethereum/rlp"
"github.com/syndtr/goleveldb/leveldb"
"github.com/syndtr/goleveldb/leveldb/iterator"
)
+//metrics variables
+var (
+ gcCounter = metrics.NewRegisteredCounter("storage.db.dbstore.gc.count", nil)
+ dbStoreDeleteCounter = metrics.NewRegisteredCounter("storage.db.dbstore.rm.count", nil)
+)
+
const (
defaultDbCapacity = 5000000
defaultRadius = 0 // not yet used
@@ -255,6 +262,7 @@ func (s *DbStore) collectGarbage(ratio float32) {
// actual gc
for i := 0; i < gcnt; i++ {
if s.gcArray[i].value <= cutval {
+ gcCounter.Inc(1)
s.delete(s.gcArray[i].idx, s.gcArray[i].idxKey)
}
}
@@ -383,6 +391,7 @@ func (s *DbStore) delete(idx uint64, idxKey []byte) {
batch := new(leveldb.Batch)
batch.Delete(idxKey)
batch.Delete(getDataKey(idx))
+ dbStoreDeleteCounter.Inc(1)
s.entryCnt--
batch.Put(keyEntryCnt, U64ToBytes(s.entryCnt))
s.db.Write(batch)
diff --git a/swarm/storage/localstore.go b/swarm/storage/localstore.go
index b442e6cc5..ece0c8615 100644
--- a/swarm/storage/localstore.go
+++ b/swarm/storage/localstore.go
@@ -18,6 +18,13 @@ package storage
import (
"encoding/binary"
+
+ "github.com/ethereum/go-ethereum/metrics"
+)
+
+//metrics variables
+var (
+ dbStorePutCounter = metrics.NewRegisteredCounter("storage.db.dbstore.put.count", nil)
)
// LocalStore is a combination of inmemory db over a disk persisted db
@@ -39,6 +46,14 @@ func NewLocalStore(hash SwarmHasher, params *StoreParams) (*LocalStore, error) {
}, nil
}
+func (self *LocalStore) CacheCounter() uint64 {
+ return uint64(self.memStore.(*MemStore).Counter())
+}
+
+func (self *LocalStore) DbCounter() uint64 {
+ return self.DbStore.(*DbStore).Counter()
+}
+
// LocalStore is itself a chunk store
// unsafe, in that the data is not integrity checked
func (self *LocalStore) Put(chunk *Chunk) {
@@ -48,6 +63,7 @@ func (self *LocalStore) Put(chunk *Chunk) {
chunk.wg.Add(1)
}
go func() {
+ dbStorePutCounter.Inc(1)
self.DbStore.Put(chunk)
if chunk.wg != nil {
chunk.wg.Done()
diff --git a/swarm/storage/memstore.go b/swarm/storage/memstore.go
index 3cb25ac62..d6be54220 100644
--- a/swarm/storage/memstore.go
+++ b/swarm/storage/memstore.go
@@ -23,6 +23,13 @@ import (
"sync"
"github.com/ethereum/go-ethereum/log"
+ "github.com/ethereum/go-ethereum/metrics"
+)
+
+//metrics variables
+var (
+ memstorePutCounter = metrics.NewRegisteredCounter("storage.db.memstore.put.count", nil)
+ memstoreRemoveCounter = metrics.NewRegisteredCounter("storage.db.memstore.rm.count", nil)
)
const (
@@ -130,6 +137,10 @@ func (s *MemStore) setCapacity(c uint) {
s.capacity = c
}
+func (s *MemStore) Counter() uint {
+ return s.entryCnt
+}
+
// entry (not its copy) is going to be in MemStore
func (s *MemStore) Put(entry *Chunk) {
if s.capacity == 0 {
@@ -145,6 +156,8 @@ func (s *MemStore) Put(entry *Chunk) {
s.accessCnt++
+ memstorePutCounter.Inc(1)
+
node := s.memtree
bitpos := uint(0)
for node.entry == nil {
@@ -289,6 +302,7 @@ func (s *MemStore) removeOldest() {
}
if node.entry.SData != nil {
+ memstoreRemoveCounter.Inc(1)
node.entry = nil
s.entryCnt--
}
diff --git a/swarm/swarm.go b/swarm/swarm.go
index 3be3660b5..0a120db1f 100644
--- a/swarm/swarm.go
+++ b/swarm/swarm.go
@@ -21,7 +21,11 @@ import (
"context"
"crypto/ecdsa"
"fmt"
+ "math/big"
"net"
+ "strings"
+ "time"
+ "unicode"
"github.com/ethereum/go-ethereum/accounts/abi/bind"
"github.com/ethereum/go-ethereum/common"
@@ -30,9 +34,11 @@ import (
"github.com/ethereum/go-ethereum/crypto"
"github.com/ethereum/go-ethereum/ethclient"
"github.com/ethereum/go-ethereum/log"
+ "github.com/ethereum/go-ethereum/metrics"
"github.com/ethereum/go-ethereum/node"
"github.com/ethereum/go-ethereum/p2p"
"github.com/ethereum/go-ethereum/p2p/discover"
+ "github.com/ethereum/go-ethereum/params"
"github.com/ethereum/go-ethereum/rpc"
"github.com/ethereum/go-ethereum/swarm/api"
httpapi "github.com/ethereum/go-ethereum/swarm/api/http"
@@ -41,6 +47,16 @@ import (
"github.com/ethereum/go-ethereum/swarm/storage"
)
+var (
+ startTime time.Time
+ updateGaugesPeriod = 5 * time.Second
+ startCounter = metrics.NewRegisteredCounter("stack,start", nil)
+ stopCounter = metrics.NewRegisteredCounter("stack,stop", nil)
+ uptimeGauge = metrics.NewRegisteredGauge("stack.uptime", nil)
+ dbSizeGauge = metrics.NewRegisteredGauge("storage.db.chunks.size", nil)
+ cacheSizeGauge = metrics.NewRegisteredGauge("storage.db.cache.size", nil)
+)
+
// the swarm stack
type Swarm struct {
config *api.Config // swarm configuration
@@ -76,7 +92,7 @@ func (self *Swarm) API() *SwarmAPI {
// creates a new swarm service instance
// implements node.Service
-func NewSwarm(ctx *node.ServiceContext, backend chequebook.Backend, ensClient *ethclient.Client, config *api.Config, swapEnabled, syncEnabled bool, cors string) (self *Swarm, err error) {
+func NewSwarm(ctx *node.ServiceContext, backend chequebook.Backend, config *api.Config) (self *Swarm, err error) {
if bytes.Equal(common.FromHex(config.PublicKey), storage.ZeroKey) {
return nil, fmt.Errorf("empty public key")
}
@@ -86,10 +102,10 @@ func NewSwarm(ctx *node.ServiceContext, backend chequebook.Backend, ensClient *e
self = &Swarm{
config: config,
- swapEnabled: swapEnabled,
+ swapEnabled: config.SwapEnabled,
backend: backend,
privateKey: config.Swap.PrivateKey(),
- corsString: cors,
+ corsString: config.Cors,
}
log.Debug(fmt.Sprintf("Setting up Swarm service components"))
@@ -109,8 +125,8 @@ func NewSwarm(ctx *node.ServiceContext, backend chequebook.Backend, ensClient *e
self.hive = network.NewHive(
common.HexToHash(self.config.BzzKey), // key to hive (kademlia base address)
config.HiveParams, // configuration parameters
- swapEnabled, // SWAP enabled
- syncEnabled, // syncronisation enabled
+ config.SwapEnabled, // SWAP enabled
+ config.SyncEnabled, // syncronisation enabled
)
log.Debug(fmt.Sprintf("Set up swarm network with Kademlia hive"))
@@ -133,18 +149,18 @@ func NewSwarm(ctx *node.ServiceContext, backend chequebook.Backend, ensClient *e
self.dpa = storage.NewDPA(dpaChunkStore, self.config.ChunkerParams)
log.Debug(fmt.Sprintf("-> Content Store API"))
- // set up high level api
- transactOpts := bind.NewKeyedTransactor(self.privateKey)
-
- if ensClient == nil {
- log.Warn("No ENS, please specify non-empty --ens-api to use domain name resolution")
- } else {
- self.dns, err = ens.NewENS(transactOpts, config.EnsRoot, ensClient)
- if err != nil {
- return nil, err
+ if len(config.EnsAPIs) > 0 {
+ opts := []api.MultiResolverOption{}
+ for _, c := range config.EnsAPIs {
+ tld, endpoint, addr := parseEnsAPIAddress(c)
+ r, err := newEnsClient(endpoint, addr, config)
+ if err != nil {
+ return nil, err
+ }
+ opts = append(opts, api.MultiResolverOptionWithResolver(r, tld))
}
+ self.dns = api.NewMultiResolver(opts...)
}
- log.Debug(fmt.Sprintf("-> Swarm Domain Name Registrar @ address %v", config.EnsRoot.Hex()))
self.api = api.NewApi(self.dpa, self.dns)
// Manifests for Smart Hosting
@@ -156,6 +172,95 @@ func NewSwarm(ctx *node.ServiceContext, backend chequebook.Backend, ensClient *e
return self, nil
}
+// parseEnsAPIAddress parses string according to format
+// [tld:][contract-addr@]url and returns ENSClientConfig structure
+// with endpoint, contract address and TLD.
+func parseEnsAPIAddress(s string) (tld, endpoint string, addr common.Address) {
+ isAllLetterString := func(s string) bool {
+ for _, r := range s {
+ if !unicode.IsLetter(r) {
+ return false
+ }
+ }
+ return true
+ }
+ endpoint = s
+ if i := strings.Index(endpoint, ":"); i > 0 {
+ if isAllLetterString(endpoint[:i]) && len(endpoint) > i+2 && endpoint[i+1:i+3] != "//" {
+ tld = endpoint[:i]
+ endpoint = endpoint[i+1:]
+ }
+ }
+ if i := strings.Index(endpoint, "@"); i > 0 {
+ addr = common.HexToAddress(endpoint[:i])
+ endpoint = endpoint[i+1:]
+ }
+ return
+}
+
+// newEnsClient creates a new ENS client for that is a consumer of
+// a ENS API on a specific endpoint. It is used as a helper function
+// for creating multiple resolvers in NewSwarm function.
+func newEnsClient(endpoint string, addr common.Address, config *api.Config) (*ens.ENS, error) {
+ log.Info("connecting to ENS API", "url", endpoint)
+ client, err := rpc.Dial(endpoint)
+ if err != nil {
+ return nil, fmt.Errorf("error connecting to ENS API %s: %s", endpoint, err)
+ }
+ ensClient := ethclient.NewClient(client)
+
+ ensRoot := config.EnsRoot
+ if addr != (common.Address{}) {
+ ensRoot = addr
+ } else {
+ a, err := detectEnsAddr(client)
+ if err == nil {
+ ensRoot = a
+ } else {
+ log.Warn(fmt.Sprintf("could not determine ENS contract address, using default %s", ensRoot), "err", err)
+ }
+ }
+ transactOpts := bind.NewKeyedTransactor(config.Swap.PrivateKey())
+ dns, err := ens.NewENS(transactOpts, ensRoot, ensClient)
+ if err != nil {
+ return nil, err
+ }
+ log.Debug(fmt.Sprintf("-> Swarm Domain Name Registrar %v @ address %v", endpoint, ensRoot.Hex()))
+ return dns, err
+}
+
+// detectEnsAddr determines the ENS contract address by getting both the
+// version and genesis hash using the client and matching them to either
+// mainnet or testnet addresses
+func detectEnsAddr(client *rpc.Client) (common.Address, error) {
+ ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
+ defer cancel()
+
+ var version string
+ if err := client.CallContext(ctx, &version, "net_version"); err != nil {
+ return common.Address{}, err
+ }
+
+ block, err := ethclient.NewClient(client).BlockByNumber(ctx, big.NewInt(0))
+ if err != nil {
+ return common.Address{}, err
+ }
+
+ switch {
+
+ case version == "1" && block.Hash() == params.MainnetGenesisHash:
+ log.Info("using Mainnet ENS contract address", "addr", ens.MainNetAddress)
+ return ens.MainNetAddress, nil
+
+ case version == "3" && block.Hash() == params.TestnetGenesisHash:
+ log.Info("using Testnet ENS contract address", "addr", ens.TestNetAddress)
+ return ens.TestNetAddress, nil
+
+ default:
+ return common.Address{}, fmt.Errorf("unknown version and genesis hash: %s %s", version, block.Hash())
+ }
+}
+
/*
Start is called when the stack is started
* starts the network kademlia hive peer management
@@ -168,6 +273,7 @@ Start is called when the stack is started
*/
// implements the node.Service interface
func (self *Swarm) Start(srv *p2p.Server) error {
+ startTime = time.Now()
connectPeer := func(url string) error {
node, err := discover.ParseNode(url)
if err != nil {
@@ -213,9 +319,28 @@ func (self *Swarm) Start(srv *p2p.Server) error {
}
}
+ self.periodicallyUpdateGauges()
+
+ startCounter.Inc(1)
return nil
}
+func (self *Swarm) periodicallyUpdateGauges() {
+ ticker := time.NewTicker(updateGaugesPeriod)
+
+ go func() {
+ for range ticker.C {
+ self.updateGauges()
+ }
+ }()
+}
+
+func (self *Swarm) updateGauges() {
+ dbSizeGauge.Update(int64(self.lstore.DbCounter()))
+ cacheSizeGauge.Update(int64(self.lstore.CacheCounter()))
+ uptimeGauge.Update(time.Since(startTime).Nanoseconds())
+}
+
// implements the node.Service interface
// stops all component services.
func (self *Swarm) Stop() error {
@@ -230,6 +355,7 @@ func (self *Swarm) Stop() error {
self.lstore.DbStore.Close()
}
self.sfs.Stop()
+ stopCounter.Inc(1)
return err
}
diff --git a/swarm/swarm_test.go b/swarm/swarm_test.go
new file mode 100644
index 000000000..8b1ae2888
--- /dev/null
+++ b/swarm/swarm_test.go
@@ -0,0 +1,119 @@
+// Copyright 2017 The go-ethereum Authors
+// This file is part of the go-ethereum library.
+//
+// The go-ethereum library is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Lesser General Public License as published by
+// the Free Software Foundation, either version 3 of the License, or
+// (at your option) any later version.
+//
+// The go-ethereum library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+// GNU Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public License
+// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
+
+package swarm
+
+import (
+ "testing"
+
+ "github.com/ethereum/go-ethereum/common"
+)
+
+func TestParseEnsAPIAddress(t *testing.T) {
+ for _, x := range []struct {
+ description string
+ value string
+ tld string
+ endpoint string
+ addr common.Address
+ }{
+ {
+ description: "IPC endpoint",
+ value: "/data/testnet/geth.ipc",
+ endpoint: "/data/testnet/geth.ipc",
+ },
+ {
+ description: "HTTP endpoint",
+ value: "http://127.0.0.1:1234",
+ endpoint: "http://127.0.0.1:1234",
+ },
+ {
+ description: "WS endpoint",
+ value: "ws://127.0.0.1:1234",
+ endpoint: "ws://127.0.0.1:1234",
+ },
+ {
+ description: "IPC Endpoint and TLD",
+ value: "test:/data/testnet/geth.ipc",
+ endpoint: "/data/testnet/geth.ipc",
+ tld: "test",
+ },
+ {
+ description: "HTTP endpoint and TLD",
+ value: "test:http://127.0.0.1:1234",
+ endpoint: "http://127.0.0.1:1234",
+ tld: "test",
+ },
+ {
+ description: "WS endpoint and TLD",
+ value: "test:ws://127.0.0.1:1234",
+ endpoint: "ws://127.0.0.1:1234",
+ tld: "test",
+ },
+ {
+ description: "IPC Endpoint and contract address",
+ value: "314159265dD8dbb310642f98f50C066173C1259b@/data/testnet/geth.ipc",
+ endpoint: "/data/testnet/geth.ipc",
+ addr: common.HexToAddress("314159265dD8dbb310642f98f50C066173C1259b"),
+ },
+ {
+ description: "HTTP endpoint and contract address",
+ value: "314159265dD8dbb310642f98f50C066173C1259b@http://127.0.0.1:1234",
+ endpoint: "http://127.0.0.1:1234",
+ addr: common.HexToAddress("314159265dD8dbb310642f98f50C066173C1259b"),
+ },
+ {
+ description: "WS endpoint and contract address",
+ value: "314159265dD8dbb310642f98f50C066173C1259b@ws://127.0.0.1:1234",
+ endpoint: "ws://127.0.0.1:1234",
+ addr: common.HexToAddress("314159265dD8dbb310642f98f50C066173C1259b"),
+ },
+ {
+ description: "IPC Endpoint, TLD and contract address",
+ value: "test:314159265dD8dbb310642f98f50C066173C1259b@/data/testnet/geth.ipc",
+ endpoint: "/data/testnet/geth.ipc",
+ addr: common.HexToAddress("314159265dD8dbb310642f98f50C066173C1259b"),
+ tld: "test",
+ },
+ {
+ description: "HTTP endpoint, TLD and contract address",
+ value: "eth:314159265dD8dbb310642f98f50C066173C1259b@http://127.0.0.1:1234",
+ endpoint: "http://127.0.0.1:1234",
+ addr: common.HexToAddress("314159265dD8dbb310642f98f50C066173C1259b"),
+ tld: "eth",
+ },
+ {
+ description: "WS endpoint, TLD and contract address",
+ value: "eth:314159265dD8dbb310642f98f50C066173C1259b@ws://127.0.0.1:1234",
+ endpoint: "ws://127.0.0.1:1234",
+ addr: common.HexToAddress("314159265dD8dbb310642f98f50C066173C1259b"),
+ tld: "eth",
+ },
+ } {
+ t.Run(x.description, func(t *testing.T) {
+ tld, endpoint, addr := parseEnsAPIAddress(x.value)
+ if endpoint != x.endpoint {
+ t.Errorf("expected Endpoint %q, got %q", x.endpoint, endpoint)
+ }
+ if addr != x.addr {
+ t.Errorf("expected ContractAddress %q, got %q", x.addr.String(), addr.String())
+ }
+ if tld != x.tld {
+ t.Errorf("expected TLD %q, got %q", x.tld, tld)
+ }
+ })
+ }
+}
diff --git a/tests/block_test_util.go b/tests/block_test_util.go
index 53a5f7fcc..beba48483 100644
--- a/tests/block_test_util.go
+++ b/tests/block_test_util.go
@@ -110,7 +110,7 @@ func (t *BlockTest) Run() error {
return fmt.Errorf("genesis block state root does not match test: computed=%x, test=%x", gblock.Root().Bytes()[:6], t.json.Genesis.StateRoot[:6])
}
- chain, err := core.NewBlockChain(db, config, ethash.NewShared(), vm.Config{})
+ chain, err := core.NewBlockChain(db, nil, config, ethash.NewShared(), vm.Config{})
if err != nil {
return err
}
@@ -120,7 +120,7 @@ func (t *BlockTest) Run() error {
if err != nil {
return err
}
- cmlast := chain.LastBlockHash()
+ cmlast := chain.CurrentBlock().Hash()
if common.Hash(t.json.BestBlock) != cmlast {
return fmt.Errorf("last block hash validation mismatch: want: %x, have: %x", t.json.BestBlock, cmlast)
}
diff --git a/tests/difficulty_test.go b/tests/difficulty_test.go
index a449b1cfa..600637300 100644
--- a/tests/difficulty_test.go
+++ b/tests/difficulty_test.go
@@ -13,7 +13,6 @@
//
// You should have received a copy of the GNU Lesser General Public License
// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
-//
package tests
diff --git a/tests/difficulty_test_util.go b/tests/difficulty_test_util.go
index 754147793..00d699cf7 100644
--- a/tests/difficulty_test_util.go
+++ b/tests/difficulty_test_util.go
@@ -13,7 +13,6 @@
//
// You should have received a copy of the GNU Lesser General Public License
// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
-//
package tests
diff --git a/tests/init.go b/tests/init.go
index 9e884efe3..ff8ee7da1 100644
--- a/tests/init.go
+++ b/tests/init.go
@@ -1,4 +1,4 @@
-// Copyright 2017 The go-ethereum Authors
+// Copyright 2015 The go-ethereum Authors
// This file is part of the go-ethereum library.
//
// The go-ethereum library is free software: you can redistribute it and/or modify
diff --git a/tests/init_test.go b/tests/init_test.go
index ebb0d32c3..fbb214b08 100644
--- a/tests/init_test.go
+++ b/tests/init_test.go
@@ -1,4 +1,4 @@
-// Copyright 2015 The go-ethereum Authors
+// Copyright 2017 The go-ethereum Authors
// This file is part of the go-ethereum library.
//
// The go-ethereum library is free software: you can redistribute it and/or modify
diff --git a/tests/state_test.go b/tests/state_test.go
index 100c776c1..9ca5f1830 100644
--- a/tests/state_test.go
+++ b/tests/state_test.go
@@ -1,4 +1,4 @@
-// Copyright 2017 The go-ethereum Authors
+// Copyright 2015 The go-ethereum Authors
// This file is part of the go-ethereum library.
//
// The go-ethereum library is free software: you can redistribute it and/or modify
diff --git a/tests/state_test_util.go b/tests/state_test_util.go
index 78c05b024..3b761bd77 100644
--- a/tests/state_test_util.go
+++ b/tests/state_test_util.go
@@ -1,4 +1,4 @@
-// Copyright 2017 The go-ethereum Authors
+// Copyright 2015 The go-ethereum Authors
// This file is part of the go-ethereum library.
//
// The go-ethereum library is free software: you can redistribute it and/or modify
@@ -125,7 +125,7 @@ func (t *StateTest) Run(subtest StateSubtest, vmconfig vm.Config) (*state.StateD
if !ok {
return nil, UnsupportedForkError{subtest.Fork}
}
- block, _ := t.genesis(config).ToBlock()
+ block := t.genesis(config).ToBlock(nil)
db, _ := ethdb.NewMemDatabase()
statedb := MakePreState(db, t.json.Pre)
@@ -147,7 +147,7 @@ func (t *StateTest) Run(subtest StateSubtest, vmconfig vm.Config) (*state.StateD
if logs := rlpHash(statedb.Logs()); logs != common.Hash(post.Logs) {
return statedb, fmt.Errorf("post state logs hash mismatch: got %x, want %x", logs, post.Logs)
}
- root, _ := statedb.CommitTo(db, config.IsEIP158(block.Number()))
+ root, _ := statedb.Commit(config.IsEIP158(block.Number()))
if root != common.Hash(post.Root) {
return statedb, fmt.Errorf("post state root mismatch: got %x, want %x", root, post.Root)
}
@@ -170,7 +170,7 @@ func MakePreState(db ethdb.Database, accounts core.GenesisAlloc) *state.StateDB
}
}
// Commit and re-open to start with a clean state.
- root, _ := statedb.CommitTo(db, false)
+ root, _ := statedb.Commit(false)
statedb, _ = state.New(root, sdb)
return statedb
}
diff --git a/trie/database.go b/trie/database.go
new file mode 100644
index 000000000..da36e72f9
--- /dev/null
+++ b/trie/database.go
@@ -0,0 +1,355 @@
+// Copyright 2018 The go-ethereum Authors
+// This file is part of the go-ethereum library.
+//
+// The go-ethereum library is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Lesser General Public License as published by
+// the Free Software Foundation, either version 3 of the License, or
+// (at your option) any later version.
+//
+// The go-ethereum library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+// GNU Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public License
+// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
+
+package trie
+
+import (
+ "sync"
+ "time"
+
+ "github.com/ethereum/go-ethereum/common"
+ "github.com/ethereum/go-ethereum/ethdb"
+ "github.com/ethereum/go-ethereum/log"
+)
+
+// secureKeyPrefix is the database key prefix used to store trie node preimages.
+var secureKeyPrefix = []byte("secure-key-")
+
+// secureKeyLength is the length of the above prefix + 32byte hash.
+const secureKeyLength = 11 + 32
+
+// DatabaseReader wraps the Get and Has method of a backing store for the trie.
+type DatabaseReader interface {
+ // Get retrieves the value associated with key form the database.
+ Get(key []byte) (value []byte, err error)
+
+ // Has retrieves whether a key is present in the database.
+ Has(key []byte) (bool, error)
+}
+
+// Database is an intermediate write layer between the trie data structures and
+// the disk database. The aim is to accumulate trie writes in-memory and only
+// periodically flush a couple tries to disk, garbage collecting the remainder.
+type Database struct {
+ diskdb ethdb.Database // Persistent storage for matured trie nodes
+
+ nodes map[common.Hash]*cachedNode // Data and references relationships of a node
+ preimages map[common.Hash][]byte // Preimages of nodes from the secure trie
+ seckeybuf [secureKeyLength]byte // Ephemeral buffer for calculating preimage keys
+
+ gctime time.Duration // Time spent on garbage collection since last commit
+ gcnodes uint64 // Nodes garbage collected since last commit
+ gcsize common.StorageSize // Data storage garbage collected since last commit
+
+ nodesSize common.StorageSize // Storage size of the nodes cache
+ preimagesSize common.StorageSize // Storage size of the preimages cache
+
+ lock sync.RWMutex
+}
+
+// cachedNode is all the information we know about a single cached node in the
+// memory database write layer.
+type cachedNode struct {
+ blob []byte // Cached data block of the trie node
+ parents int // Number of live nodes referencing this one
+ children map[common.Hash]int // Children referenced by this nodes
+}
+
+// NewDatabase creates a new trie database to store ephemeral trie content before
+// its written out to disk or garbage collected.
+func NewDatabase(diskdb ethdb.Database) *Database {
+ return &Database{
+ diskdb: diskdb,
+ nodes: map[common.Hash]*cachedNode{
+ {}: {children: make(map[common.Hash]int)},
+ },
+ preimages: make(map[common.Hash][]byte),
+ }
+}
+
+// DiskDB retrieves the persistent storage backing the trie database.
+func (db *Database) DiskDB() DatabaseReader {
+ return db.diskdb
+}
+
+// Insert writes a new trie node to the memory database if it's yet unknown. The
+// method will make a copy of the slice.
+func (db *Database) Insert(hash common.Hash, blob []byte) {
+ db.lock.Lock()
+ defer db.lock.Unlock()
+
+ db.insert(hash, blob)
+}
+
+// insert is the private locked version of Insert.
+func (db *Database) insert(hash common.Hash, blob []byte) {
+ if _, ok := db.nodes[hash]; ok {
+ return
+ }
+ db.nodes[hash] = &cachedNode{
+ blob: common.CopyBytes(blob),
+ children: make(map[common.Hash]int),
+ }
+ db.nodesSize += common.StorageSize(common.HashLength + len(blob))
+}
+
+// insertPreimage writes a new trie node pre-image to the memory database if it's
+// yet unknown. The method will make a copy of the slice.
+//
+// Note, this method assumes that the database's lock is held!
+func (db *Database) insertPreimage(hash common.Hash, preimage []byte) {
+ if _, ok := db.preimages[hash]; ok {
+ return
+ }
+ db.preimages[hash] = common.CopyBytes(preimage)
+ db.preimagesSize += common.StorageSize(common.HashLength + len(preimage))
+}
+
+// Node retrieves a cached trie node from memory. If it cannot be found cached,
+// the method queries the persistent database for the content.
+func (db *Database) Node(hash common.Hash) ([]byte, error) {
+ // Retrieve the node from cache if available
+ db.lock.RLock()
+ node := db.nodes[hash]
+ db.lock.RUnlock()
+
+ if node != nil {
+ return node.blob, nil
+ }
+ // Content unavailable in memory, attempt to retrieve from disk
+ return db.diskdb.Get(hash[:])
+}
+
+// preimage retrieves a cached trie node pre-image from memory. If it cannot be
+// found cached, the method queries the persistent database for the content.
+func (db *Database) preimage(hash common.Hash) ([]byte, error) {
+ // Retrieve the node from cache if available
+ db.lock.RLock()
+ preimage := db.preimages[hash]
+ db.lock.RUnlock()
+
+ if preimage != nil {
+ return preimage, nil
+ }
+ // Content unavailable in memory, attempt to retrieve from disk
+ return db.diskdb.Get(db.secureKey(hash[:]))
+}
+
+// secureKey returns the database key for the preimage of key, as an ephemeral
+// buffer. The caller must not hold onto the return value because it will become
+// invalid on the next call.
+func (db *Database) secureKey(key []byte) []byte {
+ buf := append(db.seckeybuf[:0], secureKeyPrefix...)
+ buf = append(buf, key...)
+ return buf
+}
+
+// Nodes retrieves the hashes of all the nodes cached within the memory database.
+// This method is extremely expensive and should only be used to validate internal
+// states in test code.
+func (db *Database) Nodes() []common.Hash {
+ db.lock.RLock()
+ defer db.lock.RUnlock()
+
+ var hashes = make([]common.Hash, 0, len(db.nodes))
+ for hash := range db.nodes {
+ if hash != (common.Hash{}) { // Special case for "root" references/nodes
+ hashes = append(hashes, hash)
+ }
+ }
+ return hashes
+}
+
+// Reference adds a new reference from a parent node to a child node.
+func (db *Database) Reference(child common.Hash, parent common.Hash) {
+ db.lock.RLock()
+ defer db.lock.RUnlock()
+
+ db.reference(child, parent)
+}
+
+// reference is the private locked version of Reference.
+func (db *Database) reference(child common.Hash, parent common.Hash) {
+ // If the node does not exist, it's a node pulled from disk, skip
+ node, ok := db.nodes[child]
+ if !ok {
+ return
+ }
+ // If the reference already exists, only duplicate for roots
+ if _, ok = db.nodes[parent].children[child]; ok && parent != (common.Hash{}) {
+ return
+ }
+ node.parents++
+ db.nodes[parent].children[child]++
+}
+
+// Dereference removes an existing reference from a parent node to a child node.
+func (db *Database) Dereference(child common.Hash, parent common.Hash) {
+ db.lock.Lock()
+ defer db.lock.Unlock()
+
+ nodes, storage, start := len(db.nodes), db.nodesSize, time.Now()
+ db.dereference(child, parent)
+
+ db.gcnodes += uint64(nodes - len(db.nodes))
+ db.gcsize += storage - db.nodesSize
+ db.gctime += time.Since(start)
+
+ log.Debug("Dereferenced trie from memory database", "nodes", nodes-len(db.nodes), "size", storage-db.nodesSize, "time", time.Since(start),
+ "gcnodes", db.gcnodes, "gcsize", db.gcsize, "gctime", db.gctime, "livenodes", len(db.nodes), "livesize", db.nodesSize)
+}
+
+// dereference is the private locked version of Dereference.
+func (db *Database) dereference(child common.Hash, parent common.Hash) {
+ // Dereference the parent-child
+ node := db.nodes[parent]
+
+ node.children[child]--
+ if node.children[child] == 0 {
+ delete(node.children, child)
+ }
+ // If the node does not exist, it's a previously committed node.
+ node, ok := db.nodes[child]
+ if !ok {
+ return
+ }
+ // If there are no more references to the child, delete it and cascade
+ node.parents--
+ if node.parents == 0 {
+ for hash := range node.children {
+ db.dereference(hash, child)
+ }
+ delete(db.nodes, child)
+ db.nodesSize -= common.StorageSize(common.HashLength + len(node.blob))
+ }
+}
+
+// Commit iterates over all the children of a particular node, writes them out
+// to disk, forcefully tearing down all references in both directions.
+//
+// As a side effect, all pre-images accumulated up to this point are also written.
+func (db *Database) Commit(node common.Hash, report bool) error {
+ // Create a database batch to flush persistent data out. It is important that
+ // outside code doesn't see an inconsistent state (referenced data removed from
+ // memory cache during commit but not yet in persistent storage). This is ensured
+ // by only uncaching existing data when the database write finalizes.
+ db.lock.RLock()
+
+ start := time.Now()
+ batch := db.diskdb.NewBatch()
+
+ // Move all of the accumulated preimages into a write batch
+ for hash, preimage := range db.preimages {
+ if err := batch.Put(db.secureKey(hash[:]), preimage); err != nil {
+ log.Error("Failed to commit preimage from trie database", "err", err)
+ db.lock.RUnlock()
+ return err
+ }
+ if batch.ValueSize() > ethdb.IdealBatchSize {
+ if err := batch.Write(); err != nil {
+ return err
+ }
+ batch.Reset()
+ }
+ }
+ // Move the trie itself into the batch, flushing if enough data is accumulated
+ nodes, storage := len(db.nodes), db.nodesSize+db.preimagesSize
+ if err := db.commit(node, batch); err != nil {
+ log.Error("Failed to commit trie from trie database", "err", err)
+ db.lock.RUnlock()
+ return err
+ }
+ // Write batch ready, unlock for readers during persistence
+ if err := batch.Write(); err != nil {
+ log.Error("Failed to write trie to disk", "err", err)
+ db.lock.RUnlock()
+ return err
+ }
+ db.lock.RUnlock()
+
+ // Write successful, clear out the flushed data
+ db.lock.Lock()
+ defer db.lock.Unlock()
+
+ db.preimages = make(map[common.Hash][]byte)
+ db.preimagesSize = 0
+
+ db.uncache(node)
+
+ logger := log.Info
+ if !report {
+ logger = log.Debug
+ }
+ logger("Persisted trie from memory database", "nodes", nodes-len(db.nodes), "size", storage-db.nodesSize, "time", time.Since(start),
+ "gcnodes", db.gcnodes, "gcsize", db.gcsize, "gctime", db.gctime, "livenodes", len(db.nodes), "livesize", db.nodesSize)
+
+ // Reset the garbage collection statistics
+ db.gcnodes, db.gcsize, db.gctime = 0, 0, 0
+
+ return nil
+}
+
+// commit is the private locked version of Commit.
+func (db *Database) commit(hash common.Hash, batch ethdb.Batch) error {
+ // If the node does not exist, it's a previously committed node
+ node, ok := db.nodes[hash]
+ if !ok {
+ return nil
+ }
+ for child := range node.children {
+ if err := db.commit(child, batch); err != nil {
+ return err
+ }
+ }
+ if err := batch.Put(hash[:], node.blob); err != nil {
+ return err
+ }
+ // If we've reached an optimal match size, commit and start over
+ if batch.ValueSize() >= ethdb.IdealBatchSize {
+ if err := batch.Write(); err != nil {
+ return err
+ }
+ batch.Reset()
+ }
+ return nil
+}
+
+// uncache is the post-processing step of a commit operation where the already
+// persisted trie is removed from the cache. The reason behind the two-phase
+// commit is to ensure consistent data availability while moving from memory
+// to disk.
+func (db *Database) uncache(hash common.Hash) {
+ // If the node does not exist, we're done on this path
+ node, ok := db.nodes[hash]
+ if !ok {
+ return
+ }
+ // Otherwise uncache the node's subtries and remove the node itself too
+ for child := range node.children {
+ db.uncache(child)
+ }
+ delete(db.nodes, hash)
+ db.nodesSize -= common.StorageSize(common.HashLength + len(node.blob))
+}
+
+// Size returns the current storage size of the memory cache in front of the
+// persistent database layer.
+func (db *Database) Size() common.StorageSize {
+ db.lock.RLock()
+ defer db.lock.RUnlock()
+
+ return db.nodesSize + db.preimagesSize
+}
diff --git a/trie/hasher.go b/trie/hasher.go
index 4719aabf6..2fc44787a 100644
--- a/trie/hasher.go
+++ b/trie/hasher.go
@@ -27,21 +27,23 @@ import (
)
type hasher struct {
- tmp *bytes.Buffer
- sha hash.Hash
- cachegen, cachelimit uint16
+ tmp *bytes.Buffer
+ sha hash.Hash
+ cachegen uint16
+ cachelimit uint16
+ onleaf LeafCallback
}
-// hashers live in a global pool.
+// hashers live in a global db.
var hasherPool = sync.Pool{
New: func() interface{} {
return &hasher{tmp: new(bytes.Buffer), sha: sha3.NewKeccak256()}
},
}
-func newHasher(cachegen, cachelimit uint16) *hasher {
+func newHasher(cachegen, cachelimit uint16, onleaf LeafCallback) *hasher {
h := hasherPool.Get().(*hasher)
- h.cachegen, h.cachelimit = cachegen, cachelimit
+ h.cachegen, h.cachelimit, h.onleaf = cachegen, cachelimit, onleaf
return h
}
@@ -51,7 +53,7 @@ func returnHasherToPool(h *hasher) {
// hash collapses a node down into a hash node, also returning a copy of the
// original node initialized with the computed hash to replace the original one.
-func (h *hasher) hash(n node, db DatabaseWriter, force bool) (node, node, error) {
+func (h *hasher) hash(n node, db *Database, force bool) (node, node, error) {
// If we're not storing the node, just hashing, use available cached data
if hash, dirty := n.cache(); hash != nil {
if db == nil {
@@ -98,7 +100,7 @@ func (h *hasher) hash(n node, db DatabaseWriter, force bool) (node, node, error)
// hashChildren replaces the children of a node with their hashes if the encoded
// size of the child is larger than a hash, returning the collapsed node as well
// as a replacement for the original node with the child hashes cached in.
-func (h *hasher) hashChildren(original node, db DatabaseWriter) (node, node, error) {
+func (h *hasher) hashChildren(original node, db *Database) (node, node, error) {
var err error
switch n := original.(type) {
@@ -145,7 +147,10 @@ func (h *hasher) hashChildren(original node, db DatabaseWriter) (node, node, err
}
}
-func (h *hasher) store(n node, db DatabaseWriter, force bool) (node, error) {
+// store hashes the node n and if we have a storage layer specified, it writes
+// the key/value pair to it and tracks any node->child references as well as any
+// node->external trie references.
+func (h *hasher) store(n node, db *Database, force bool) (node, error) {
// Don't store hashes or empty nodes.
if _, isHash := n.(hashNode); n == nil || isHash {
return n, nil
@@ -155,7 +160,6 @@ func (h *hasher) store(n node, db DatabaseWriter, force bool) (node, error) {
if err := rlp.Encode(h.tmp, n); err != nil {
panic("encode error: " + err.Error())
}
-
if h.tmp.Len() < 32 && !force {
return n, nil // Nodes smaller than 32 bytes are stored inside their parent
}
@@ -167,7 +171,42 @@ func (h *hasher) store(n node, db DatabaseWriter, force bool) (node, error) {
hash = hashNode(h.sha.Sum(nil))
}
if db != nil {
- return hash, db.Put(hash, h.tmp.Bytes())
+ // We are pooling the trie nodes into an intermediate memory cache
+ db.lock.Lock()
+
+ hash := common.BytesToHash(hash)
+ db.insert(hash, h.tmp.Bytes())
+
+ // Track all direct parent->child node references
+ switch n := n.(type) {
+ case *shortNode:
+ if child, ok := n.Val.(hashNode); ok {
+ db.reference(common.BytesToHash(child), hash)
+ }
+ case *fullNode:
+ for i := 0; i < 16; i++ {
+ if child, ok := n.Children[i].(hashNode); ok {
+ db.reference(common.BytesToHash(child), hash)
+ }
+ }
+ }
+ db.lock.Unlock()
+
+ // Track external references from account->storage trie
+ if h.onleaf != nil {
+ switch n := n.(type) {
+ case *shortNode:
+ if child, ok := n.Val.(valueNode); ok {
+ h.onleaf(child, hash)
+ }
+ case *fullNode:
+ for i := 0; i < 16; i++ {
+ if child, ok := n.Children[i].(valueNode); ok {
+ h.onleaf(child, hash)
+ }
+ }
+ }
+ }
}
return hash, nil
}
diff --git a/trie/iterator_test.go b/trie/iterator_test.go
index 4808d8b0c..dce1c78b5 100644
--- a/trie/iterator_test.go
+++ b/trie/iterator_test.go
@@ -42,7 +42,7 @@ func TestIterator(t *testing.T) {
all[val.k] = val.v
trie.Update([]byte(val.k), []byte(val.v))
}
- trie.Commit()
+ trie.Commit(nil)
found := make(map[string]string)
it := NewIterator(trie.NodeIterator(nil))
@@ -109,11 +109,18 @@ func TestNodeIteratorCoverage(t *testing.T) {
}
// Cross check the hashes and the database itself
for hash := range hashes {
- if _, err := db.Get(hash.Bytes()); err != nil {
+ if _, err := db.Node(hash); err != nil {
t.Errorf("failed to retrieve reported node %x: %v", hash, err)
}
}
- for _, key := range db.(*ethdb.MemDatabase).Keys() {
+ for hash, obj := range db.nodes {
+ if obj != nil && hash != (common.Hash{}) {
+ if _, ok := hashes[hash]; !ok {
+ t.Errorf("state entry not reported %x", hash)
+ }
+ }
+ }
+ for _, key := range db.diskdb.(*ethdb.MemDatabase).Keys() {
if _, ok := hashes[common.BytesToHash(key)]; !ok {
t.Errorf("state entry not reported %x", key)
}
@@ -191,13 +198,13 @@ func TestDifferenceIterator(t *testing.T) {
for _, val := range testdata1 {
triea.Update([]byte(val.k), []byte(val.v))
}
- triea.Commit()
+ triea.Commit(nil)
trieb := newEmpty()
for _, val := range testdata2 {
trieb.Update([]byte(val.k), []byte(val.v))
}
- trieb.Commit()
+ trieb.Commit(nil)
found := make(map[string]string)
di, _ := NewDifferenceIterator(triea.NodeIterator(nil), trieb.NodeIterator(nil))
@@ -227,13 +234,13 @@ func TestUnionIterator(t *testing.T) {
for _, val := range testdata1 {
triea.Update([]byte(val.k), []byte(val.v))
}
- triea.Commit()
+ triea.Commit(nil)
trieb := newEmpty()
for _, val := range testdata2 {
trieb.Update([]byte(val.k), []byte(val.v))
}
- trieb.Commit()
+ trieb.Commit(nil)
di, _ := NewUnionIterator([]NodeIterator{triea.NodeIterator(nil), trieb.NodeIterator(nil)})
it := NewIterator(di)
@@ -278,43 +285,75 @@ func TestIteratorNoDups(t *testing.T) {
}
// This test checks that nodeIterator.Next can be retried after inserting missing trie nodes.
-func TestIteratorContinueAfterError(t *testing.T) {
- db, _ := ethdb.NewMemDatabase()
- tr, _ := New(common.Hash{}, db)
+func TestIteratorContinueAfterErrorDisk(t *testing.T) { testIteratorContinueAfterError(t, false) }
+func TestIteratorContinueAfterErrorMemonly(t *testing.T) { testIteratorContinueAfterError(t, true) }
+
+func testIteratorContinueAfterError(t *testing.T, memonly bool) {
+ diskdb, _ := ethdb.NewMemDatabase()
+ triedb := NewDatabase(diskdb)
+
+ tr, _ := New(common.Hash{}, triedb)
for _, val := range testdata1 {
tr.Update([]byte(val.k), []byte(val.v))
}
- tr.Commit()
+ tr.Commit(nil)
+ if !memonly {
+ triedb.Commit(tr.Hash(), true)
+ }
wantNodeCount := checkIteratorNoDups(t, tr.NodeIterator(nil), nil)
- keys := db.Keys()
- t.Log("node count", wantNodeCount)
+ var (
+ diskKeys [][]byte
+ memKeys []common.Hash
+ )
+ if memonly {
+ memKeys = triedb.Nodes()
+ } else {
+ diskKeys = diskdb.Keys()
+ }
for i := 0; i < 20; i++ {
// Create trie that will load all nodes from DB.
- tr, _ := New(tr.Hash(), db)
+ tr, _ := New(tr.Hash(), triedb)
// Remove a random node from the database. It can't be the root node
// because that one is already loaded.
- var rkey []byte
+ var (
+ rkey common.Hash
+ rval []byte
+ robj *cachedNode
+ )
for {
- if rkey = keys[rand.Intn(len(keys))]; !bytes.Equal(rkey, tr.Hash().Bytes()) {
+ if memonly {
+ rkey = memKeys[rand.Intn(len(memKeys))]
+ } else {
+ copy(rkey[:], diskKeys[rand.Intn(len(diskKeys))])
+ }
+ if rkey != tr.Hash() {
break
}
}
- rval, _ := db.Get(rkey)
- db.Delete(rkey)
-
+ if memonly {
+ robj = triedb.nodes[rkey]
+ delete(triedb.nodes, rkey)
+ } else {
+ rval, _ = diskdb.Get(rkey[:])
+ diskdb.Delete(rkey[:])
+ }
// Iterate until the error is hit.
seen := make(map[string]bool)
it := tr.NodeIterator(nil)
checkIteratorNoDups(t, it, seen)
missing, ok := it.Error().(*MissingNodeError)
- if !ok || !bytes.Equal(missing.NodeHash[:], rkey) {
+ if !ok || missing.NodeHash != rkey {
t.Fatal("didn't hit missing node, got", it.Error())
}
// Add the node back and continue iteration.
- db.Put(rkey, rval)
+ if memonly {
+ triedb.nodes[rkey] = robj
+ } else {
+ diskdb.Put(rkey[:], rval)
+ }
checkIteratorNoDups(t, it, seen)
if it.Error() != nil {
t.Fatal("unexpected error", it.Error())
@@ -328,21 +367,41 @@ func TestIteratorContinueAfterError(t *testing.T) {
// Similar to the test above, this one checks that failure to create nodeIterator at a
// certain key prefix behaves correctly when Next is called. The expectation is that Next
// should retry seeking before returning true for the first time.
-func TestIteratorContinueAfterSeekError(t *testing.T) {
+func TestIteratorContinueAfterSeekErrorDisk(t *testing.T) {
+ testIteratorContinueAfterSeekError(t, false)
+}
+func TestIteratorContinueAfterSeekErrorMemonly(t *testing.T) {
+ testIteratorContinueAfterSeekError(t, true)
+}
+
+func testIteratorContinueAfterSeekError(t *testing.T, memonly bool) {
// Commit test trie to db, then remove the node containing "bars".
- db, _ := ethdb.NewMemDatabase()
- ctr, _ := New(common.Hash{}, db)
+ diskdb, _ := ethdb.NewMemDatabase()
+ triedb := NewDatabase(diskdb)
+
+ ctr, _ := New(common.Hash{}, triedb)
for _, val := range testdata1 {
ctr.Update([]byte(val.k), []byte(val.v))
}
- root, _ := ctr.Commit()
+ root, _ := ctr.Commit(nil)
+ if !memonly {
+ triedb.Commit(root, true)
+ }
barNodeHash := common.HexToHash("05041990364eb72fcb1127652ce40d8bab765f2bfe53225b1170d276cc101c2e")
- barNode, _ := db.Get(barNodeHash[:])
- db.Delete(barNodeHash[:])
-
+ var (
+ barNodeBlob []byte
+ barNodeObj *cachedNode
+ )
+ if memonly {
+ barNodeObj = triedb.nodes[barNodeHash]
+ delete(triedb.nodes, barNodeHash)
+ } else {
+ barNodeBlob, _ = diskdb.Get(barNodeHash[:])
+ diskdb.Delete(barNodeHash[:])
+ }
// Create a new iterator that seeks to "bars". Seeking can't proceed because
// the node is missing.
- tr, _ := New(root, db)
+ tr, _ := New(root, triedb)
it := tr.NodeIterator([]byte("bars"))
missing, ok := it.Error().(*MissingNodeError)
if !ok {
@@ -350,10 +409,12 @@ func TestIteratorContinueAfterSeekError(t *testing.T) {
} else if missing.NodeHash != barNodeHash {
t.Fatal("wrong node missing")
}
-
// Reinsert the missing node.
- db.Put(barNodeHash[:], barNode[:])
-
+ if memonly {
+ triedb.nodes[barNodeHash] = barNodeObj
+ } else {
+ diskdb.Put(barNodeHash[:], barNodeBlob)
+ }
// Check that iteration produces the right set of values.
if err := checkIteratorOrder(testdata1[2:], NewIterator(it)); err != nil {
t.Fatal(err)
diff --git a/trie/proof.go b/trie/proof.go
index 5e886a259..508e4a6cf 100644
--- a/trie/proof.go
+++ b/trie/proof.go
@@ -22,20 +22,19 @@ import (
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/crypto"
+ "github.com/ethereum/go-ethereum/ethdb"
"github.com/ethereum/go-ethereum/log"
"github.com/ethereum/go-ethereum/rlp"
)
-// Prove constructs a merkle proof for key. The result contains all
-// encoded nodes on the path to the value at key. The value itself is
-// also included in the last node and can be retrieved by verifying
-// the proof.
+// Prove constructs a merkle proof for key. The result contains all encoded nodes
+// on the path to the value at key. The value itself is also included in the last
+// node and can be retrieved by verifying the proof.
//
-// If the trie does not contain a value for key, the returned proof
-// contains all nodes of the longest existing prefix of the key
-// (at least the root node), ending with the node that proves the
-// absence of the key.
-func (t *Trie) Prove(key []byte, fromLevel uint, proofDb DatabaseWriter) error {
+// If the trie does not contain a value for key, the returned proof contains all
+// nodes of the longest existing prefix of the key (at least the root node), ending
+// with the node that proves the absence of the key.
+func (t *Trie) Prove(key []byte, fromLevel uint, proofDb ethdb.Putter) error {
// Collect all nodes on the path to key.
key = keybytesToHex(key)
nodes := []node{}
@@ -66,7 +65,7 @@ func (t *Trie) Prove(key []byte, fromLevel uint, proofDb DatabaseWriter) error {
panic(fmt.Sprintf("%T: invalid node: %v", tn, tn))
}
}
- hasher := newHasher(0, 0)
+ hasher := newHasher(0, 0, nil)
for i, n := range nodes {
// Don't bother checking for errors here since hasher panics
// if encoding doesn't work and we're not writing to any database.
@@ -89,19 +88,29 @@ func (t *Trie) Prove(key []byte, fromLevel uint, proofDb DatabaseWriter) error {
return nil
}
-// VerifyProof checks merkle proofs. The given proof must contain the
-// value for key in a trie with the given root hash. VerifyProof
-// returns an error if the proof contains invalid trie nodes or the
-// wrong value.
+// Prove constructs a merkle proof for key. The result contains all encoded nodes
+// on the path to the value at key. The value itself is also included in the last
+// node and can be retrieved by verifying the proof.
+//
+// If the trie does not contain a value for key, the returned proof contains all
+// nodes of the longest existing prefix of the key (at least the root node), ending
+// with the node that proves the absence of the key.
+func (t *SecureTrie) Prove(key []byte, fromLevel uint, proofDb ethdb.Putter) error {
+ return t.trie.Prove(key, fromLevel, proofDb)
+}
+
+// VerifyProof checks merkle proofs. The given proof must contain the value for
+// key in a trie with the given root hash. VerifyProof returns an error if the
+// proof contains invalid trie nodes or the wrong value.
func VerifyProof(rootHash common.Hash, key []byte, proofDb DatabaseReader) (value []byte, err error, nodes int) {
key = keybytesToHex(key)
- wantHash := rootHash[:]
+ wantHash := rootHash
for i := 0; ; i++ {
- buf, _ := proofDb.Get(wantHash)
+ buf, _ := proofDb.Get(wantHash[:])
if buf == nil {
- return nil, fmt.Errorf("proof node %d (hash %064x) missing", i, wantHash[:]), i
+ return nil, fmt.Errorf("proof node %d (hash %064x) missing", i, wantHash), i
}
- n, err := decodeNode(wantHash, buf, 0)
+ n, err := decodeNode(wantHash[:], buf, 0)
if err != nil {
return nil, fmt.Errorf("bad proof node %d: %v", i, err), i
}
@@ -112,7 +121,7 @@ func VerifyProof(rootHash common.Hash, key []byte, proofDb DatabaseReader) (valu
return nil, nil, i
case hashNode:
key = keyrest
- wantHash = cld
+ copy(wantHash[:], cld)
case valueNode:
return cld, nil, i + 1
}
diff --git a/trie/secure_trie.go b/trie/secure_trie.go
index 20c303f31..3881ee18a 100644
--- a/trie/secure_trie.go
+++ b/trie/secure_trie.go
@@ -23,10 +23,6 @@ import (
"github.com/ethereum/go-ethereum/log"
)
-var secureKeyPrefix = []byte("secure-key-")
-
-const secureKeyLength = 11 + 32 // Length of the above prefix + 32byte hash
-
// SecureTrie wraps a trie with key hashing. In a secure trie, all
// access operations hash the key using keccak256. This prevents
// calling code from creating long chains of nodes that
@@ -39,25 +35,25 @@ const secureKeyLength = 11 + 32 // Length of the above prefix + 32byte hash
// SecureTrie is not safe for concurrent use.
type SecureTrie struct {
trie Trie
- hashKeyBuf [secureKeyLength]byte
- secKeyBuf [200]byte
+ hashKeyBuf [common.HashLength]byte
secKeyCache map[string][]byte
secKeyCacheOwner *SecureTrie // Pointer to self, replace the key cache on mismatch
}
-// NewSecure creates a trie with an existing root node from db.
+// NewSecure creates a trie with an existing root node from a backing database
+// and optional intermediate in-memory node pool.
//
// If root is the zero hash or the sha3 hash of an empty string, the
// trie is initially empty. Otherwise, New will panic if db is nil
// and returns MissingNodeError if the root node cannot be found.
//
-// Accessing the trie loads nodes from db on demand.
+// Accessing the trie loads nodes from the database or node pool on demand.
// Loaded nodes are kept around until their 'cache generation' expires.
// A new cache generation is created by each call to Commit.
// cachelimit sets the number of past cache generations to keep.
-func NewSecure(root common.Hash, db Database, cachelimit uint16) (*SecureTrie, error) {
+func NewSecure(root common.Hash, db *Database, cachelimit uint16) (*SecureTrie, error) {
if db == nil {
- panic("NewSecure called with nil database")
+ panic("trie.NewSecure called without a database")
}
trie, err := New(root, db)
if err != nil {
@@ -135,7 +131,7 @@ func (t *SecureTrie) GetKey(shaKey []byte) []byte {
if key, ok := t.getSecKeyCache()[string(shaKey)]; ok {
return key
}
- key, _ := t.trie.db.Get(t.secKey(shaKey))
+ key, _ := t.trie.db.preimage(common.BytesToHash(shaKey))
return key
}
@@ -144,8 +140,19 @@ func (t *SecureTrie) GetKey(shaKey []byte) []byte {
//
// Committing flushes nodes from memory. Subsequent Get calls will load nodes
// from the database.
-func (t *SecureTrie) Commit() (root common.Hash, err error) {
- return t.CommitTo(t.trie.db)
+func (t *SecureTrie) Commit(onleaf LeafCallback) (root common.Hash, err error) {
+ // Write all the pre-images to the actual disk database
+ if len(t.getSecKeyCache()) > 0 {
+ t.trie.db.lock.Lock()
+ for hk, key := range t.secKeyCache {
+ t.trie.db.insertPreimage(common.BytesToHash([]byte(hk)), key)
+ }
+ t.trie.db.lock.Unlock()
+
+ t.secKeyCache = make(map[string][]byte)
+ }
+ // Commit the trie to its intermediate node database
+ return t.trie.Commit(onleaf)
}
func (t *SecureTrie) Hash() common.Hash {
@@ -167,38 +174,11 @@ func (t *SecureTrie) NodeIterator(start []byte) NodeIterator {
return t.trie.NodeIterator(start)
}
-// CommitTo writes all nodes and the secure hash pre-images to the given database.
-// Nodes are stored with their sha3 hash as the key.
-//
-// Committing flushes nodes from memory. Subsequent Get calls will load nodes from
-// the trie's database. Calling code must ensure that the changes made to db are
-// written back to the trie's attached database before using the trie.
-func (t *SecureTrie) CommitTo(db DatabaseWriter) (root common.Hash, err error) {
- if len(t.getSecKeyCache()) > 0 {
- for hk, key := range t.secKeyCache {
- if err := db.Put(t.secKey([]byte(hk)), key); err != nil {
- return common.Hash{}, err
- }
- }
- t.secKeyCache = make(map[string][]byte)
- }
- return t.trie.CommitTo(db)
-}
-
-// secKey returns the database key for the preimage of key, as an ephemeral buffer.
-// The caller must not hold onto the return value because it will become
-// invalid on the next call to hashKey or secKey.
-func (t *SecureTrie) secKey(key []byte) []byte {
- buf := append(t.secKeyBuf[:0], secureKeyPrefix...)
- buf = append(buf, key...)
- return buf
-}
-
// hashKey returns the hash of key as an ephemeral buffer.
// The caller must not hold onto the return value because it will become
// invalid on the next call to hashKey or secKey.
func (t *SecureTrie) hashKey(key []byte) []byte {
- h := newHasher(0, 0)
+ h := newHasher(0, 0, nil)
h.sha.Reset()
h.sha.Write(key)
buf := h.sha.Sum(t.hashKeyBuf[:0])
diff --git a/trie/secure_trie_test.go b/trie/secure_trie_test.go
index d74102e2a..aedf5a1cd 100644
--- a/trie/secure_trie_test.go
+++ b/trie/secure_trie_test.go
@@ -28,16 +28,20 @@ import (
)
func newEmptySecure() *SecureTrie {
- db, _ := ethdb.NewMemDatabase()
- trie, _ := NewSecure(common.Hash{}, db, 0)
+ diskdb, _ := ethdb.NewMemDatabase()
+ triedb := NewDatabase(diskdb)
+
+ trie, _ := NewSecure(common.Hash{}, triedb, 0)
return trie
}
// makeTestSecureTrie creates a large enough secure trie for testing.
-func makeTestSecureTrie() (ethdb.Database, *SecureTrie, map[string][]byte) {
+func makeTestSecureTrie() (*Database, *SecureTrie, map[string][]byte) {
// Create an empty trie
- db, _ := ethdb.NewMemDatabase()
- trie, _ := NewSecure(common.Hash{}, db, 0)
+ diskdb, _ := ethdb.NewMemDatabase()
+ triedb := NewDatabase(diskdb)
+
+ trie, _ := NewSecure(common.Hash{}, triedb, 0)
// Fill it with some arbitrary data
content := make(map[string][]byte)
@@ -58,10 +62,10 @@ func makeTestSecureTrie() (ethdb.Database, *SecureTrie, map[string][]byte) {
trie.Update(key, val)
}
}
- trie.Commit()
+ trie.Commit(nil)
// Return the generated trie
- return db, trie, content
+ return triedb, trie, content
}
func TestSecureDelete(t *testing.T) {
@@ -137,7 +141,7 @@ func TestSecureTrieConcurrency(t *testing.T) {
tries[index].Update(key, val)
}
}
- tries[index].Commit()
+ tries[index].Commit(nil)
}(i)
}
// Wait for all threads to finish
diff --git a/trie/sync.go b/trie/sync.go
index fea10051f..b573a9f73 100644
--- a/trie/sync.go
+++ b/trie/sync.go
@@ -21,6 +21,7 @@ import (
"fmt"
"github.com/ethereum/go-ethereum/common"
+ "github.com/ethereum/go-ethereum/ethdb"
"gopkg.in/karalabe/cookiejar.v2/collections/prque"
)
@@ -42,7 +43,7 @@ type request struct {
depth int // Depth level within the trie the node is located to prioritise DFS
deps int // Number of dependencies before allowed to commit this node
- callback TrieSyncLeafCallback // Callback to invoke if a leaf node it reached on this branch
+ callback LeafCallback // Callback to invoke if a leaf node it reached on this branch
}
// SyncResult is a simple list to return missing nodes along with their request
@@ -67,11 +68,6 @@ func newSyncMemBatch() *syncMemBatch {
}
}
-// TrieSyncLeafCallback is a callback type invoked when a trie sync reaches a
-// leaf node. It's used by state syncing to check if the leaf node requires some
-// further data syncing.
-type TrieSyncLeafCallback func(leaf []byte, parent common.Hash) error
-
// TrieSync is the main state trie synchronisation scheduler, which provides yet
// unknown trie hashes to retrieve, accepts node data associated with said hashes
// and reconstructs the trie step by step until all is done.
@@ -83,7 +79,7 @@ type TrieSync struct {
}
// NewTrieSync creates a new trie data download scheduler.
-func NewTrieSync(root common.Hash, database DatabaseReader, callback TrieSyncLeafCallback) *TrieSync {
+func NewTrieSync(root common.Hash, database DatabaseReader, callback LeafCallback) *TrieSync {
ts := &TrieSync{
database: database,
membatch: newSyncMemBatch(),
@@ -95,7 +91,7 @@ func NewTrieSync(root common.Hash, database DatabaseReader, callback TrieSyncLea
}
// AddSubTrie registers a new trie to the sync code, rooted at the designated parent.
-func (s *TrieSync) AddSubTrie(root common.Hash, depth int, parent common.Hash, callback TrieSyncLeafCallback) {
+func (s *TrieSync) AddSubTrie(root common.Hash, depth int, parent common.Hash, callback LeafCallback) {
// Short circuit if the trie is empty or already known
if root == emptyRoot {
return
@@ -217,7 +213,7 @@ func (s *TrieSync) Process(results []SyncResult) (bool, int, error) {
// Commit flushes the data stored in the internal membatch out to persistent
// storage, returning th enumber of items written and any occurred error.
-func (s *TrieSync) Commit(dbw DatabaseWriter) (int, error) {
+func (s *TrieSync) Commit(dbw ethdb.Putter) (int, error) {
// Dump the membatch into a database dbw
for i, key := range s.membatch.order {
if err := dbw.Put(key[:], s.membatch.batch[key]); err != nil {
diff --git a/trie/sync_test.go b/trie/sync_test.go
index ec16a25bd..4a720612b 100644
--- a/trie/sync_test.go
+++ b/trie/sync_test.go
@@ -25,10 +25,11 @@ import (
)
// makeTestTrie create a sample test trie to test node-wise reconstruction.
-func makeTestTrie() (ethdb.Database, *Trie, map[string][]byte) {
+func makeTestTrie() (*Database, *Trie, map[string][]byte) {
// Create an empty trie
- db, _ := ethdb.NewMemDatabase()
- trie, _ := New(common.Hash{}, db)
+ diskdb, _ := ethdb.NewMemDatabase()
+ triedb := NewDatabase(diskdb)
+ trie, _ := New(common.Hash{}, triedb)
// Fill it with some arbitrary data
content := make(map[string][]byte)
@@ -49,15 +50,15 @@ func makeTestTrie() (ethdb.Database, *Trie, map[string][]byte) {
trie.Update(key, val)
}
}
- trie.Commit()
+ trie.Commit(nil)
// Return the generated trie
- return db, trie, content
+ return triedb, trie, content
}
// checkTrieContents cross references a reconstructed trie with an expected data
// content map.
-func checkTrieContents(t *testing.T, db Database, root []byte, content map[string][]byte) {
+func checkTrieContents(t *testing.T, db *Database, root []byte, content map[string][]byte) {
// Check root availability and trie contents
trie, err := New(common.BytesToHash(root), db)
if err != nil {
@@ -74,7 +75,7 @@ func checkTrieContents(t *testing.T, db Database, root []byte, content map[strin
}
// checkTrieConsistency checks that all nodes in a trie are indeed present.
-func checkTrieConsistency(db Database, root common.Hash) error {
+func checkTrieConsistency(db *Database, root common.Hash) error {
// Create and iterate a trie rooted in a subnode
trie, err := New(root, db)
if err != nil {
@@ -88,12 +89,18 @@ func checkTrieConsistency(db Database, root common.Hash) error {
// Tests that an empty trie is not scheduled for syncing.
func TestEmptyTrieSync(t *testing.T) {
- emptyA, _ := New(common.Hash{}, nil)
- emptyB, _ := New(emptyRoot, nil)
+ diskdbA, _ := ethdb.NewMemDatabase()
+ triedbA := NewDatabase(diskdbA)
+
+ diskdbB, _ := ethdb.NewMemDatabase()
+ triedbB := NewDatabase(diskdbB)
+
+ emptyA, _ := New(common.Hash{}, triedbA)
+ emptyB, _ := New(emptyRoot, triedbB)
for i, trie := range []*Trie{emptyA, emptyB} {
- db, _ := ethdb.NewMemDatabase()
- if req := NewTrieSync(common.BytesToHash(trie.Root()), db, nil).Missing(1); len(req) != 0 {
+ diskdb, _ := ethdb.NewMemDatabase()
+ if req := NewTrieSync(trie.Hash(), diskdb, nil).Missing(1); len(req) != 0 {
t.Errorf("test %d: content requested for empty trie: %v", i, req)
}
}
@@ -109,14 +116,15 @@ func testIterativeTrieSync(t *testing.T, batch int) {
srcDb, srcTrie, srcData := makeTestTrie()
// Create a destination trie and sync with the scheduler
- dstDb, _ := ethdb.NewMemDatabase()
- sched := NewTrieSync(common.BytesToHash(srcTrie.Root()), dstDb, nil)
+ diskdb, _ := ethdb.NewMemDatabase()
+ triedb := NewDatabase(diskdb)
+ sched := NewTrieSync(srcTrie.Hash(), diskdb, nil)
queue := append([]common.Hash{}, sched.Missing(batch)...)
for len(queue) > 0 {
results := make([]SyncResult, len(queue))
for i, hash := range queue {
- data, err := srcDb.Get(hash.Bytes())
+ data, err := srcDb.Node(hash)
if err != nil {
t.Fatalf("failed to retrieve node data for %x: %v", hash, err)
}
@@ -125,13 +133,13 @@ func testIterativeTrieSync(t *testing.T, batch int) {
if _, index, err := sched.Process(results); err != nil {
t.Fatalf("failed to process result #%d: %v", index, err)
}
- if index, err := sched.Commit(dstDb); err != nil {
+ if index, err := sched.Commit(diskdb); err != nil {
t.Fatalf("failed to commit data #%d: %v", index, err)
}
queue = append(queue[:0], sched.Missing(batch)...)
}
// Cross check that the two tries are in sync
- checkTrieContents(t, dstDb, srcTrie.Root(), srcData)
+ checkTrieContents(t, triedb, srcTrie.Root(), srcData)
}
// Tests that the trie scheduler can correctly reconstruct the state even if only
@@ -141,15 +149,16 @@ func TestIterativeDelayedTrieSync(t *testing.T) {
srcDb, srcTrie, srcData := makeTestTrie()
// Create a destination trie and sync with the scheduler
- dstDb, _ := ethdb.NewMemDatabase()
- sched := NewTrieSync(common.BytesToHash(srcTrie.Root()), dstDb, nil)
+ diskdb, _ := ethdb.NewMemDatabase()
+ triedb := NewDatabase(diskdb)
+ sched := NewTrieSync(srcTrie.Hash(), diskdb, nil)
queue := append([]common.Hash{}, sched.Missing(10000)...)
for len(queue) > 0 {
// Sync only half of the scheduled nodes
results := make([]SyncResult, len(queue)/2+1)
for i, hash := range queue[:len(results)] {
- data, err := srcDb.Get(hash.Bytes())
+ data, err := srcDb.Node(hash)
if err != nil {
t.Fatalf("failed to retrieve node data for %x: %v", hash, err)
}
@@ -158,13 +167,13 @@ func TestIterativeDelayedTrieSync(t *testing.T) {
if _, index, err := sched.Process(results); err != nil {
t.Fatalf("failed to process result #%d: %v", index, err)
}
- if index, err := sched.Commit(dstDb); err != nil {
+ if index, err := sched.Commit(diskdb); err != nil {
t.Fatalf("failed to commit data #%d: %v", index, err)
}
queue = append(queue[len(results):], sched.Missing(10000)...)
}
// Cross check that the two tries are in sync
- checkTrieContents(t, dstDb, srcTrie.Root(), srcData)
+ checkTrieContents(t, triedb, srcTrie.Root(), srcData)
}
// Tests that given a root hash, a trie can sync iteratively on a single thread,
@@ -178,8 +187,9 @@ func testIterativeRandomTrieSync(t *testing.T, batch int) {
srcDb, srcTrie, srcData := makeTestTrie()
// Create a destination trie and sync with the scheduler
- dstDb, _ := ethdb.NewMemDatabase()
- sched := NewTrieSync(common.BytesToHash(srcTrie.Root()), dstDb, nil)
+ diskdb, _ := ethdb.NewMemDatabase()
+ triedb := NewDatabase(diskdb)
+ sched := NewTrieSync(srcTrie.Hash(), diskdb, nil)
queue := make(map[common.Hash]struct{})
for _, hash := range sched.Missing(batch) {
@@ -189,7 +199,7 @@ func testIterativeRandomTrieSync(t *testing.T, batch int) {
// Fetch all the queued nodes in a random order
results := make([]SyncResult, 0, len(queue))
for hash := range queue {
- data, err := srcDb.Get(hash.Bytes())
+ data, err := srcDb.Node(hash)
if err != nil {
t.Fatalf("failed to retrieve node data for %x: %v", hash, err)
}
@@ -199,7 +209,7 @@ func testIterativeRandomTrieSync(t *testing.T, batch int) {
if _, index, err := sched.Process(results); err != nil {
t.Fatalf("failed to process result #%d: %v", index, err)
}
- if index, err := sched.Commit(dstDb); err != nil {
+ if index, err := sched.Commit(diskdb); err != nil {
t.Fatalf("failed to commit data #%d: %v", index, err)
}
queue = make(map[common.Hash]struct{})
@@ -208,7 +218,7 @@ func testIterativeRandomTrieSync(t *testing.T, batch int) {
}
}
// Cross check that the two tries are in sync
- checkTrieContents(t, dstDb, srcTrie.Root(), srcData)
+ checkTrieContents(t, triedb, srcTrie.Root(), srcData)
}
// Tests that the trie scheduler can correctly reconstruct the state even if only
@@ -218,8 +228,9 @@ func TestIterativeRandomDelayedTrieSync(t *testing.T) {
srcDb, srcTrie, srcData := makeTestTrie()
// Create a destination trie and sync with the scheduler
- dstDb, _ := ethdb.NewMemDatabase()
- sched := NewTrieSync(common.BytesToHash(srcTrie.Root()), dstDb, nil)
+ diskdb, _ := ethdb.NewMemDatabase()
+ triedb := NewDatabase(diskdb)
+ sched := NewTrieSync(srcTrie.Hash(), diskdb, nil)
queue := make(map[common.Hash]struct{})
for _, hash := range sched.Missing(10000) {
@@ -229,7 +240,7 @@ func TestIterativeRandomDelayedTrieSync(t *testing.T) {
// Sync only half of the scheduled nodes, even those in random order
results := make([]SyncResult, 0, len(queue)/2+1)
for hash := range queue {
- data, err := srcDb.Get(hash.Bytes())
+ data, err := srcDb.Node(hash)
if err != nil {
t.Fatalf("failed to retrieve node data for %x: %v", hash, err)
}
@@ -243,7 +254,7 @@ func TestIterativeRandomDelayedTrieSync(t *testing.T) {
if _, index, err := sched.Process(results); err != nil {
t.Fatalf("failed to process result #%d: %v", index, err)
}
- if index, err := sched.Commit(dstDb); err != nil {
+ if index, err := sched.Commit(diskdb); err != nil {
t.Fatalf("failed to commit data #%d: %v", index, err)
}
for _, result := range results {
@@ -254,7 +265,7 @@ func TestIterativeRandomDelayedTrieSync(t *testing.T) {
}
}
// Cross check that the two tries are in sync
- checkTrieContents(t, dstDb, srcTrie.Root(), srcData)
+ checkTrieContents(t, triedb, srcTrie.Root(), srcData)
}
// Tests that a trie sync will not request nodes multiple times, even if they
@@ -264,8 +275,9 @@ func TestDuplicateAvoidanceTrieSync(t *testing.T) {
srcDb, srcTrie, srcData := makeTestTrie()
// Create a destination trie and sync with the scheduler
- dstDb, _ := ethdb.NewMemDatabase()
- sched := NewTrieSync(common.BytesToHash(srcTrie.Root()), dstDb, nil)
+ diskdb, _ := ethdb.NewMemDatabase()
+ triedb := NewDatabase(diskdb)
+ sched := NewTrieSync(srcTrie.Hash(), diskdb, nil)
queue := append([]common.Hash{}, sched.Missing(0)...)
requested := make(map[common.Hash]struct{})
@@ -273,7 +285,7 @@ func TestDuplicateAvoidanceTrieSync(t *testing.T) {
for len(queue) > 0 {
results := make([]SyncResult, len(queue))
for i, hash := range queue {
- data, err := srcDb.Get(hash.Bytes())
+ data, err := srcDb.Node(hash)
if err != nil {
t.Fatalf("failed to retrieve node data for %x: %v", hash, err)
}
@@ -287,13 +299,13 @@ func TestDuplicateAvoidanceTrieSync(t *testing.T) {
if _, index, err := sched.Process(results); err != nil {
t.Fatalf("failed to process result #%d: %v", index, err)
}
- if index, err := sched.Commit(dstDb); err != nil {
+ if index, err := sched.Commit(diskdb); err != nil {
t.Fatalf("failed to commit data #%d: %v", index, err)
}
queue = append(queue[:0], sched.Missing(0)...)
}
// Cross check that the two tries are in sync
- checkTrieContents(t, dstDb, srcTrie.Root(), srcData)
+ checkTrieContents(t, triedb, srcTrie.Root(), srcData)
}
// Tests that at any point in time during a sync, only complete sub-tries are in
@@ -303,8 +315,9 @@ func TestIncompleteTrieSync(t *testing.T) {
srcDb, srcTrie, _ := makeTestTrie()
// Create a destination trie and sync with the scheduler
- dstDb, _ := ethdb.NewMemDatabase()
- sched := NewTrieSync(common.BytesToHash(srcTrie.Root()), dstDb, nil)
+ diskdb, _ := ethdb.NewMemDatabase()
+ triedb := NewDatabase(diskdb)
+ sched := NewTrieSync(srcTrie.Hash(), diskdb, nil)
added := []common.Hash{}
queue := append([]common.Hash{}, sched.Missing(1)...)
@@ -312,7 +325,7 @@ func TestIncompleteTrieSync(t *testing.T) {
// Fetch a batch of trie nodes
results := make([]SyncResult, len(queue))
for i, hash := range queue {
- data, err := srcDb.Get(hash.Bytes())
+ data, err := srcDb.Node(hash)
if err != nil {
t.Fatalf("failed to retrieve node data for %x: %v", hash, err)
}
@@ -322,7 +335,7 @@ func TestIncompleteTrieSync(t *testing.T) {
if _, index, err := sched.Process(results); err != nil {
t.Fatalf("failed to process result #%d: %v", index, err)
}
- if index, err := sched.Commit(dstDb); err != nil {
+ if index, err := sched.Commit(diskdb); err != nil {
t.Fatalf("failed to commit data #%d: %v", index, err)
}
for _, result := range results {
@@ -330,7 +343,7 @@ func TestIncompleteTrieSync(t *testing.T) {
}
// Check that all known sub-tries in the synced trie are complete
for _, root := range added {
- if err := checkTrieConsistency(dstDb, root); err != nil {
+ if err := checkTrieConsistency(triedb, root); err != nil {
t.Fatalf("trie inconsistent: %v", err)
}
}
@@ -340,12 +353,12 @@ func TestIncompleteTrieSync(t *testing.T) {
// Sanity check that removing any node from the database is detected
for _, node := range added[1:] {
key := node.Bytes()
- value, _ := dstDb.Get(key)
+ value, _ := diskdb.Get(key)
- dstDb.Delete(key)
- if err := checkTrieConsistency(dstDb, added[0]); err == nil {
+ diskdb.Delete(key)
+ if err := checkTrieConsistency(triedb, added[0]); err == nil {
t.Fatalf("trie inconsistency not caught, missing: %x", key)
}
- dstDb.Put(key, value)
+ diskdb.Put(key, value)
}
}
diff --git a/trie/trie.go b/trie/trie.go
index 8fe98d835..31a404e3a 100644
--- a/trie/trie.go
+++ b/trie/trie.go
@@ -22,16 +22,17 @@ import (
"fmt"
"github.com/ethereum/go-ethereum/common"
- "github.com/ethereum/go-ethereum/crypto/sha3"
+ "github.com/ethereum/go-ethereum/crypto"
"github.com/ethereum/go-ethereum/log"
- "github.com/rcrowley/go-metrics"
+ "github.com/ethereum/go-ethereum/metrics"
)
var (
- // This is the known root hash of an empty trie.
+ // emptyRoot is the known root hash of an empty trie.
emptyRoot = common.HexToHash("56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421")
- // This is the known hash of an empty state trie entry.
- emptyState common.Hash
+
+ // emptyState is the known hash of an empty state trie entry.
+ emptyState = crypto.Keccak256Hash(nil)
)
var (
@@ -53,29 +54,10 @@ func CacheUnloads() int64 {
return cacheUnloadCounter.Count()
}
-func init() {
- sha3.NewKeccak256().Sum(emptyState[:0])
-}
-
-// Database must be implemented by backing stores for the trie.
-type Database interface {
- DatabaseReader
- DatabaseWriter
-}
-
-// DatabaseReader wraps the Get method of a backing store for the trie.
-type DatabaseReader interface {
- Get(key []byte) (value []byte, err error)
- Has(key []byte) (bool, error)
-}
-
-// DatabaseWriter wraps the Put method of a backing store for the trie.
-type DatabaseWriter interface {
- // Put stores the mapping key->value in the database.
- // Implementations must not hold onto the value bytes, the trie
- // will reuse the slice across calls to Put.
- Put(key, value []byte) error
-}
+// LeafCallback is a callback type invoked when a trie operation reaches a leaf
+// node. It's used by state sync and commit to allow handling external references
+// between account and storage tries.
+type LeafCallback func(leaf []byte, parent common.Hash) error
// Trie is a Merkle Patricia Trie.
// The zero value is an empty trie with no database.
@@ -83,8 +65,8 @@ type DatabaseWriter interface {
//
// Trie is not safe for concurrent use.
type Trie struct {
+ db *Database
root node
- db Database
originalRoot common.Hash
// Cache generation values.
@@ -111,12 +93,15 @@ func (t *Trie) newFlag() nodeFlag {
// trie is initially empty and does not require a database. Otherwise,
// New will panic if db is nil and returns a MissingNodeError if root does
// not exist in the database. Accessing the trie loads nodes from db on demand.
-func New(root common.Hash, db Database) (*Trie, error) {
- trie := &Trie{db: db, originalRoot: root}
+func New(root common.Hash, db *Database) (*Trie, error) {
+ if db == nil {
+ panic("trie.New called without a database")
+ }
+ trie := &Trie{
+ db: db,
+ originalRoot: root,
+ }
if (root != common.Hash{}) && root != emptyRoot {
- if db == nil {
- panic("trie.New: cannot use existing root without a database")
- }
rootnode, err := trie.resolveHash(root[:], nil)
if err != nil {
return nil, err
@@ -447,12 +432,13 @@ func (t *Trie) resolve(n node, prefix []byte) (node, error) {
func (t *Trie) resolveHash(n hashNode, prefix []byte) (node, error) {
cacheMissCounter.Inc(1)
- enc, err := t.db.Get(n)
+ hash := common.BytesToHash(n)
+
+ enc, err := t.db.Node(hash)
if err != nil || enc == nil {
- return nil, &MissingNodeError{NodeHash: common.BytesToHash(n), Path: prefix}
+ return nil, &MissingNodeError{NodeHash: hash, Path: prefix}
}
- dec := mustDecodeNode(n, enc, t.cachegen)
- return dec, nil
+ return mustDecodeNode(n, enc, t.cachegen), nil
}
// Root returns the root hash of the trie.
@@ -462,32 +448,18 @@ func (t *Trie) Root() []byte { return t.Hash().Bytes() }
// Hash returns the root hash of the trie. It does not write to the
// database and can be used even if the trie doesn't have one.
func (t *Trie) Hash() common.Hash {
- hash, cached, _ := t.hashRoot(nil)
+ hash, cached, _ := t.hashRoot(nil, nil)
t.root = cached
return common.BytesToHash(hash.(hashNode))
}
-// Commit writes all nodes to the trie's database.
-// Nodes are stored with their sha3 hash as the key.
-//
-// Committing flushes nodes from memory.
-// Subsequent Get calls will load nodes from the database.
-func (t *Trie) Commit() (root common.Hash, err error) {
+// Commit writes all nodes to the trie's memory database, tracking the internal
+// and external (for account tries) references.
+func (t *Trie) Commit(onleaf LeafCallback) (root common.Hash, err error) {
if t.db == nil {
- panic("Commit called on trie with nil database")
+ panic("commit called on trie with nil database")
}
- return t.CommitTo(t.db)
-}
-
-// CommitTo writes all nodes to the given database.
-// Nodes are stored with their sha3 hash as the key.
-//
-// Committing flushes nodes from memory. Subsequent Get calls will
-// load nodes from the trie's database. Calling code must ensure that
-// the changes made to db are written back to the trie's attached
-// database before using the trie.
-func (t *Trie) CommitTo(db DatabaseWriter) (root common.Hash, err error) {
- hash, cached, err := t.hashRoot(db)
+ hash, cached, err := t.hashRoot(t.db, onleaf)
if err != nil {
return common.Hash{}, err
}
@@ -496,11 +468,11 @@ func (t *Trie) CommitTo(db DatabaseWriter) (root common.Hash, err error) {
return common.BytesToHash(hash.(hashNode)), nil
}
-func (t *Trie) hashRoot(db DatabaseWriter) (node, node, error) {
+func (t *Trie) hashRoot(db *Database, onleaf LeafCallback) (node, node, error) {
if t.root == nil {
return hashNode(emptyRoot.Bytes()), nil, nil
}
- h := newHasher(t.cachegen, t.cachelimit)
+ h := newHasher(t.cachegen, t.cachelimit, onleaf)
defer returnHasherToPool(h)
return h.hash(t.root, db, true)
}
diff --git a/trie/trie_test.go b/trie/trie_test.go
index 1e28c3bc4..997222628 100644
--- a/trie/trie_test.go
+++ b/trie/trie_test.go
@@ -43,8 +43,8 @@ func init() {
// Used for testing
func newEmpty() *Trie {
- db, _ := ethdb.NewMemDatabase()
- trie, _ := New(common.Hash{}, db)
+ diskdb, _ := ethdb.NewMemDatabase()
+ trie, _ := New(common.Hash{}, NewDatabase(diskdb))
return trie
}
@@ -68,8 +68,8 @@ func TestNull(t *testing.T) {
}
func TestMissingRoot(t *testing.T) {
- db, _ := ethdb.NewMemDatabase()
- trie, err := New(common.HexToHash("0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33"), db)
+ diskdb, _ := ethdb.NewMemDatabase()
+ trie, err := New(common.HexToHash("0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33"), NewDatabase(diskdb))
if trie != nil {
t.Error("New returned non-nil trie for invalid root")
}
@@ -78,70 +78,75 @@ func TestMissingRoot(t *testing.T) {
}
}
-func TestMissingNode(t *testing.T) {
- db, _ := ethdb.NewMemDatabase()
- trie, _ := New(common.Hash{}, db)
+func TestMissingNodeDisk(t *testing.T) { testMissingNode(t, false) }
+func TestMissingNodeMemonly(t *testing.T) { testMissingNode(t, true) }
+
+func testMissingNode(t *testing.T, memonly bool) {
+ diskdb, _ := ethdb.NewMemDatabase()
+ triedb := NewDatabase(diskdb)
+
+ trie, _ := New(common.Hash{}, triedb)
updateString(trie, "120000", "qwerqwerqwerqwerqwerqwerqwerqwer")
updateString(trie, "123456", "asdfasdfasdfasdfasdfasdfasdfasdf")
- root, _ := trie.Commit()
+ root, _ := trie.Commit(nil)
+ if !memonly {
+ triedb.Commit(root, true)
+ }
- trie, _ = New(root, db)
+ trie, _ = New(root, triedb)
_, err := trie.TryGet([]byte("120000"))
if err != nil {
t.Errorf("Unexpected error: %v", err)
}
-
- trie, _ = New(root, db)
+ trie, _ = New(root, triedb)
_, err = trie.TryGet([]byte("120099"))
if err != nil {
t.Errorf("Unexpected error: %v", err)
}
-
- trie, _ = New(root, db)
+ trie, _ = New(root, triedb)
_, err = trie.TryGet([]byte("123456"))
if err != nil {
t.Errorf("Unexpected error: %v", err)
}
-
- trie, _ = New(root, db)
+ trie, _ = New(root, triedb)
err = trie.TryUpdate([]byte("120099"), []byte("zxcvzxcvzxcvzxcvzxcvzxcvzxcvzxcv"))
if err != nil {
t.Errorf("Unexpected error: %v", err)
}
-
- trie, _ = New(root, db)
+ trie, _ = New(root, triedb)
err = trie.TryDelete([]byte("123456"))
if err != nil {
t.Errorf("Unexpected error: %v", err)
}
- db.Delete(common.FromHex("e1d943cc8f061a0c0b98162830b970395ac9315654824bf21b73b891365262f9"))
+ hash := common.HexToHash("0xe1d943cc8f061a0c0b98162830b970395ac9315654824bf21b73b891365262f9")
+ if memonly {
+ delete(triedb.nodes, hash)
+ } else {
+ diskdb.Delete(hash[:])
+ }
- trie, _ = New(root, db)
+ trie, _ = New(root, triedb)
_, err = trie.TryGet([]byte("120000"))
if _, ok := err.(*MissingNodeError); !ok {
t.Errorf("Wrong error: %v", err)
}
-
- trie, _ = New(root, db)
+ trie, _ = New(root, triedb)
_, err = trie.TryGet([]byte("120099"))
if _, ok := err.(*MissingNodeError); !ok {
t.Errorf("Wrong error: %v", err)
}
-
- trie, _ = New(root, db)
+ trie, _ = New(root, triedb)
_, err = trie.TryGet([]byte("123456"))
if err != nil {
t.Errorf("Unexpected error: %v", err)
}
-
- trie, _ = New(root, db)
+ trie, _ = New(root, triedb)
err = trie.TryUpdate([]byte("120099"), []byte("zxcv"))
if _, ok := err.(*MissingNodeError); !ok {
t.Errorf("Wrong error: %v", err)
}
-
- trie, _ = New(root, db)
+ trie, _ = New(root, triedb)
err = trie.TryDelete([]byte("123456"))
if _, ok := err.(*MissingNodeError); !ok {
t.Errorf("Wrong error: %v", err)
@@ -165,7 +170,7 @@ func TestInsert(t *testing.T) {
updateString(trie, "A", "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa")
exp = common.HexToHash("d23786fb4a010da3ce639d66d5e904a11dbc02746d1ce25029e53290cabf28ab")
- root, err := trie.Commit()
+ root, err := trie.Commit(nil)
if err != nil {
t.Fatalf("commit error: %v", err)
}
@@ -194,7 +199,7 @@ func TestGet(t *testing.T) {
if i == 1 {
return
}
- trie.Commit()
+ trie.Commit(nil)
}
}
@@ -263,7 +268,7 @@ func TestReplication(t *testing.T) {
for _, val := range vals {
updateString(trie, val.k, val.v)
}
- exp, err := trie.Commit()
+ exp, err := trie.Commit(nil)
if err != nil {
t.Fatalf("commit error: %v", err)
}
@@ -278,7 +283,7 @@ func TestReplication(t *testing.T) {
t.Errorf("trie2 doesn't have %q => %q", kv.k, kv.v)
}
}
- hash, err := trie2.Commit()
+ hash, err := trie2.Commit(nil)
if err != nil {
t.Fatalf("commit error: %v", err)
}
@@ -314,7 +319,7 @@ func TestLargeValue(t *testing.T) {
}
type countingDB struct {
- Database
+ ethdb.Database
gets map[string]int
}
@@ -332,19 +337,20 @@ func TestCacheUnload(t *testing.T) {
key2 := "---some other branch"
updateString(trie, key1, "this is the branch of key1.")
updateString(trie, key2, "this is the branch of key2.")
- root, _ := trie.Commit()
+
+ root, _ := trie.Commit(nil)
+ trie.db.Commit(root, true)
// Commit the trie repeatedly and access key1.
// The branch containing it is loaded from DB exactly two times:
// in the 0th and 6th iteration.
- db := &countingDB{Database: trie.db, gets: make(map[string]int)}
- trie, _ = New(root, db)
+ db := &countingDB{Database: trie.db.diskdb, gets: make(map[string]int)}
+ trie, _ = New(root, NewDatabase(db))
trie.SetCacheLimit(5)
for i := 0; i < 12; i++ {
getString(trie, key1)
- trie.Commit()
+ trie.Commit(nil)
}
-
// Check that it got loaded two times.
for dbkey, count := range db.gets {
if count != 2 {
@@ -407,8 +413,10 @@ func (randTest) Generate(r *rand.Rand, size int) reflect.Value {
}
func runRandTest(rt randTest) bool {
- db, _ := ethdb.NewMemDatabase()
- tr, _ := New(common.Hash{}, db)
+ diskdb, _ := ethdb.NewMemDatabase()
+ triedb := NewDatabase(diskdb)
+
+ tr, _ := New(common.Hash{}, triedb)
values := make(map[string]string) // tracks content of the trie
for i, step := range rt {
@@ -426,23 +434,23 @@ func runRandTest(rt randTest) bool {
rt[i].err = fmt.Errorf("mismatch for key 0x%x, got 0x%x want 0x%x", step.key, v, want)
}
case opCommit:
- _, rt[i].err = tr.Commit()
+ _, rt[i].err = tr.Commit(nil)
case opHash:
tr.Hash()
case opReset:
- hash, err := tr.Commit()
+ hash, err := tr.Commit(nil)
if err != nil {
rt[i].err = err
return false
}
- newtr, err := New(hash, db)
+ newtr, err := New(hash, triedb)
if err != nil {
rt[i].err = err
return false
}
tr = newtr
case opItercheckhash:
- checktr, _ := New(common.Hash{}, nil)
+ checktr, _ := New(common.Hash{}, triedb)
it := NewIterator(tr.NodeIterator(nil))
for it.Next() {
checktr.Update(it.Key, it.Value)
@@ -524,7 +532,7 @@ func benchGet(b *testing.B, commit bool) {
}
binary.LittleEndian.PutUint64(k, benchElemCount/2)
if commit {
- trie.Commit()
+ trie.Commit(nil)
}
b.ResetTimer()
@@ -534,7 +542,7 @@ func benchGet(b *testing.B, commit bool) {
b.StopTimer()
if commit {
- ldb := trie.db.(*ethdb.LDBDatabase)
+ ldb := trie.db.diskdb.(*ethdb.LDBDatabase)
ldb.Close()
os.RemoveAll(ldb.Path())
}
@@ -585,16 +593,16 @@ func BenchmarkHash(b *testing.B) {
trie.Hash()
}
-func tempDB() (string, Database) {
+func tempDB() (string, *Database) {
dir, err := ioutil.TempDir("", "trie-bench")
if err != nil {
panic(fmt.Sprintf("can't create temporary directory: %v", err))
}
- db, err := ethdb.NewLDBDatabase(dir, 256, 0)
+ diskdb, err := ethdb.NewLDBDatabase(dir, 256, 0)
if err != nil {
panic(fmt.Sprintf("can't create temporary database: %v", err))
}
- return dir, db
+ return dir, NewDatabase(diskdb)
}
func getString(trie *Trie, k string) []byte {
diff --git a/vendor/github.com/influxdata/influxdb/LICENSE b/vendor/github.com/influxdata/influxdb/LICENSE
new file mode 100644
index 000000000..63cef79ba
--- /dev/null
+++ b/vendor/github.com/influxdata/influxdb/LICENSE
@@ -0,0 +1,20 @@
+The MIT License (MIT)
+
+Copyright (c) 2013-2016 Errplane Inc.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software is furnished to do so,
+subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
+FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
+COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
+IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/vendor/github.com/influxdata/influxdb/LICENSE_OF_DEPENDENCIES.md b/vendor/github.com/influxdata/influxdb/LICENSE_OF_DEPENDENCIES.md
new file mode 100644
index 000000000..ea6fc69f3
--- /dev/null
+++ b/vendor/github.com/influxdata/influxdb/LICENSE_OF_DEPENDENCIES.md
@@ -0,0 +1,62 @@
+- # List
+- bootstrap 3.3.5 [MIT LICENSE](https://github.com/twbs/bootstrap/blob/master/LICENSE)
+- collectd.org [ISC LICENSE](https://github.com/collectd/go-collectd/blob/master/LICENSE)
+- github.com/BurntSushi/toml [MIT LICENSE](https://github.com/BurntSushi/toml/blob/master/COPYING)
+- github.com/RoaringBitmap/roaring [APACHE LICENSE](https://github.com/RoaringBitmap/roaring/blob/master/LICENSE)
+- github.com/beorn7/perks [MIT LICENSE](https://github.com/beorn7/perks/blob/master/LICENSE)
+- github.com/bmizerany/pat [MIT LICENSE](https://github.com/bmizerany/pat#license)
+- github.com/boltdb/bolt [MIT LICENSE](https://github.com/boltdb/bolt/blob/master/LICENSE)
+- github.com/cespare/xxhash [MIT LICENSE](https://github.com/cespare/xxhash/blob/master/LICENSE.txt)
+- github.com/clarkduvall/hyperloglog [MIT LICENSE](https://github.com/clarkduvall/hyperloglog/blob/master/LICENSE)
+- github.com/davecgh/go-spew/spew [ISC LICENSE](https://github.com/davecgh/go-spew/blob/master/LICENSE)
+- github.com/dgrijalva/jwt-go [MIT LICENSE](https://github.com/dgrijalva/jwt-go/blob/master/LICENSE)
+- github.com/dgryski/go-bits [MIT LICENSE](https://github.com/dgryski/go-bits/blob/master/LICENSE)
+- github.com/dgryski/go-bitstream [MIT LICENSE](https://github.com/dgryski/go-bitstream/blob/master/LICENSE)
+- github.com/glycerine/go-unsnap-stream [MIT LICENSE](https://github.com/glycerine/go-unsnap-stream/blob/master/LICENSE)
+- github.com/gogo/protobuf/proto [BSD LICENSE](https://github.com/gogo/protobuf/blob/master/LICENSE)
+- github.com/golang/protobuf [BSD LICENSE](https://github.com/golang/protobuf/blob/master/LICENSE)
+- github.com/golang/snappy [BSD LICENSE](https://github.com/golang/snappy/blob/master/LICENSE)
+- github.com/google/go-cmp [BSD LICENSE](https://github.com/google/go-cmp/blob/master/LICENSE)
+- github.com/influxdata/influxql [MIT LICENSE](https://github.com/influxdata/influxql/blob/master/LICENSE)
+- github.com/influxdata/usage-client [MIT LICENSE](https://github.com/influxdata/usage-client/blob/master/LICENSE.txt)
+- github.com/influxdata/yamux [MOZILLA PUBLIC LICENSE](https://github.com/influxdata/yamux/blob/master/LICENSE)
+- github.com/influxdata/yarpc [MIT LICENSE](https://github.com/influxdata/yarpc/blob/master/LICENSE)
+- github.com/jsternberg/zap-logfmt [MIT LICENSE](https://github.com/jsternberg/zap-logfmt/blob/master/LICENSE)
+- github.com/jwilder/encoding [MIT LICENSE](https://github.com/jwilder/encoding/blob/master/LICENSE)
+- github.com/mattn/go-isatty [MIT LICENSE](https://github.com/mattn/go-isatty/blob/master/LICENSE)
+- github.com/matttproud/golang_protobuf_extensions [APACHE LICENSE](https://github.com/matttproud/golang_protobuf_extensions/blob/master/LICENSE)
+- github.com/opentracing/opentracing-go [MIT LICENSE](https://github.com/opentracing/opentracing-go/blob/master/LICENSE)
+- github.com/paulbellamy/ratecounter [MIT LICENSE](https://github.com/paulbellamy/ratecounter/blob/master/LICENSE)
+- github.com/peterh/liner [MIT LICENSE](https://github.com/peterh/liner/blob/master/COPYING)
+- github.com/philhofer/fwd [MIT LICENSE](https://github.com/philhofer/fwd/blob/master/LICENSE.md)
+- github.com/prometheus/client_golang [MIT LICENSE](https://github.com/prometheus/client_golang/blob/master/LICENSE)
+- github.com/prometheus/client_model [MIT LICENSE](https://github.com/prometheus/client_model/blob/master/LICENSE)
+- github.com/prometheus/common [APACHE LICENSE](https://github.com/prometheus/common/blob/master/LICENSE)
+- github.com/prometheus/procfs [APACHE LICENSE](https://github.com/prometheus/procfs/blob/master/LICENSE)
+- github.com/rakyll/statik [APACHE LICENSE](https://github.com/rakyll/statik/blob/master/LICENSE)
+- github.com/retailnext/hllpp [BSD LICENSE](https://github.com/retailnext/hllpp/blob/master/LICENSE)
+- github.com/tinylib/msgp [MIT LICENSE](https://github.com/tinylib/msgp/blob/master/LICENSE)
+- go.uber.org/atomic [MIT LICENSE](https://github.com/uber-go/atomic/blob/master/LICENSE.txt)
+- go.uber.org/multierr [MIT LICENSE](https://github.com/uber-go/multierr/blob/master/LICENSE.txt)
+- go.uber.org/zap [MIT LICENSE](https://github.com/uber-go/zap/blob/master/LICENSE.txt)
+- golang.org/x/crypto [BSD LICENSE](https://github.com/golang/crypto/blob/master/LICENSE)
+- golang.org/x/net [BSD LICENSE](https://github.com/golang/net/blob/master/LICENSE)
+- golang.org/x/sys [BSD LICENSE](https://github.com/golang/sys/blob/master/LICENSE)
+- golang.org/x/text [BSD LICENSE](https://github.com/golang/text/blob/master/LICENSE)
+- golang.org/x/time [BSD LICENSE](https://github.com/golang/time/blob/master/LICENSE)
+- jquery 2.1.4 [MIT LICENSE](https://github.com/jquery/jquery/blob/master/LICENSE.txt)
+- github.com/xlab/treeprint [MIT LICENSE](https://github.com/xlab/treeprint/blob/master/LICENSE)
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/vendor/github.com/influxdata/influxdb/client/README.md b/vendor/github.com/influxdata/influxdb/client/README.md
new file mode 100644
index 000000000..773a11122
--- /dev/null
+++ b/vendor/github.com/influxdata/influxdb/client/README.md
@@ -0,0 +1,306 @@
+# InfluxDB Client
+
+[![GoDoc](https://godoc.org/github.com/influxdata/influxdb?status.svg)](http://godoc.org/github.com/influxdata/influxdb/client/v2)
+
+## Description
+
+**NOTE:** The Go client library now has a "v2" version, with the old version
+being deprecated. The new version can be imported at
+`import "github.com/influxdata/influxdb/client/v2"`. It is not backwards-compatible.
+
+A Go client library written and maintained by the **InfluxDB** team.
+This package provides convenience functions to read and write time series data.
+It uses the HTTP protocol to communicate with your **InfluxDB** cluster.
+
+
+## Getting Started
+
+### Connecting To Your Database
+
+Connecting to an **InfluxDB** database is straightforward. You will need a host
+name, a port and the cluster user credentials if applicable. The default port is
+8086. You can customize these settings to your specific installation via the
+**InfluxDB** configuration file.
+
+Though not necessary for experimentation, you may want to create a new user
+and authenticate the connection to your database.
+
+For more information please check out the
+[Admin Docs](https://docs.influxdata.com/influxdb/latest/administration/).
+
+For the impatient, you can create a new admin user _bubba_ by firing off the
+[InfluxDB CLI](https://github.com/influxdata/influxdb/blob/master/cmd/influx/main.go).
+
+```shell
+influx
+> create user bubba with password 'bumblebeetuna'
+> grant all privileges to bubba
+```
+
+And now for good measure set the credentials in you shell environment.
+In the example below we will use $INFLUX_USER and $INFLUX_PWD
+
+Now with the administrivia out of the way, let's connect to our database.
+
+NOTE: If you've opted out of creating a user, you can omit Username and Password in
+the configuration below.
+
+```go
+package main
+
+import (
+ "log"
+ "time"
+
+ "github.com/influxdata/influxdb/client/v2"
+)
+
+const (
+ MyDB = "square_holes"
+ username = "bubba"
+ password = "bumblebeetuna"
+)
+
+
+func main() {
+ // Create a new HTTPClient
+ c, err := client.NewHTTPClient(client.HTTPConfig{
+ Addr: "http://localhost:8086",
+ Username: username,
+ Password: password,
+ })
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ // Create a new point batch
+ bp, err := client.NewBatchPoints(client.BatchPointsConfig{
+ Database: MyDB,
+ Precision: "s",
+ })
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ // Create a point and add to batch
+ tags := map[string]string{"cpu": "cpu-total"}
+ fields := map[string]interface{}{
+ "idle": 10.1,
+ "system": 53.3,
+ "user": 46.6,
+ }
+
+ pt, err := client.NewPoint("cpu_usage", tags, fields, time.Now())
+ if err != nil {
+ log.Fatal(err)
+ }
+ bp.AddPoint(pt)
+
+ // Write the batch
+ if err := c.Write(bp); err != nil {
+ log.Fatal(err)
+ }
+}
+
+```
+
+### Inserting Data
+
+Time series data aka *points* are written to the database using batch inserts.
+The mechanism is to create one or more points and then create a batch aka
+*batch points* and write these to a given database and series. A series is a
+combination of a measurement (time/values) and a set of tags.
+
+In this sample we will create a batch of a 1,000 points. Each point has a time and
+a single value as well as 2 tags indicating a shape and color. We write these points
+to a database called _square_holes_ using a measurement named _shapes_.
+
+NOTE: You can specify a RetentionPolicy as part of the batch points. If not
+provided InfluxDB will use the database _default_ retention policy.
+
+```go
+
+func writePoints(clnt client.Client) {
+ sampleSize := 1000
+
+ bp, err := client.NewBatchPoints(client.BatchPointsConfig{
+ Database: "systemstats",
+ Precision: "us",
+ })
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ rand.Seed(time.Now().UnixNano())
+ for i := 0; i < sampleSize; i++ {
+ regions := []string{"us-west1", "us-west2", "us-west3", "us-east1"}
+ tags := map[string]string{
+ "cpu": "cpu-total",
+ "host": fmt.Sprintf("host%d", rand.Intn(1000)),
+ "region": regions[rand.Intn(len(regions))],
+ }
+
+ idle := rand.Float64() * 100.0
+ fields := map[string]interface{}{
+ "idle": idle,
+ "busy": 100.0 - idle,
+ }
+
+ pt, err := client.NewPoint(
+ "cpu_usage",
+ tags,
+ fields,
+ time.Now(),
+ )
+ if err != nil {
+ log.Fatal(err)
+ }
+ bp.AddPoint(pt)
+ }
+
+ if err := clnt.Write(bp); err != nil {
+ log.Fatal(err)
+ }
+}
+```
+
+#### Uint64 Support
+
+The `uint64` data type is supported if your server is version `1.4.0` or
+greater. To write a data point as an unsigned integer, you must insert
+the point as `uint64`. You cannot use `uint` or any of the other
+derivatives because previous versions of the client have supported
+writing those types as an integer.
+
+### Querying Data
+
+One nice advantage of using **InfluxDB** the ability to query your data using familiar
+SQL constructs. In this example we can create a convenience function to query the database
+as follows:
+
+```go
+// queryDB convenience function to query the database
+func queryDB(clnt client.Client, cmd string) (res []client.Result, err error) {
+ q := client.Query{
+ Command: cmd,
+ Database: MyDB,
+ }
+ if response, err := clnt.Query(q); err == nil {
+ if response.Error() != nil {
+ return res, response.Error()
+ }
+ res = response.Results
+ } else {
+ return res, err
+ }
+ return res, nil
+}
+```
+
+#### Creating a Database
+
+```go
+_, err := queryDB(clnt, fmt.Sprintf("CREATE DATABASE %s", MyDB))
+if err != nil {
+ log.Fatal(err)
+}
+```
+
+#### Count Records
+
+```go
+q := fmt.Sprintf("SELECT count(%s) FROM %s", "value", MyMeasurement)
+res, err := queryDB(clnt, q)
+if err != nil {
+ log.Fatal(err)
+}
+count := res[0].Series[0].Values[0][1]
+log.Printf("Found a total of %v records\n", count)
+```
+
+#### Find the last 10 _shapes_ records
+
+```go
+q := fmt.Sprintf("SELECT * FROM %s LIMIT %d", MyMeasurement, 10)
+res, err = queryDB(clnt, q)
+if err != nil {
+ log.Fatal(err)
+}
+
+for i, row := range res[0].Series[0].Values {
+ t, err := time.Parse(time.RFC3339, row[0].(string))
+ if err != nil {
+ log.Fatal(err)
+ }
+ val := row[1].(string)
+ log.Printf("[%2d] %s: %s\n", i, t.Format(time.Stamp), val)
+}
+```
+
+### Using the UDP Client
+
+The **InfluxDB** client also supports writing over UDP.
+
+```go
+func WriteUDP() {
+ // Make client
+ c, err := client.NewUDPClient("localhost:8089")
+ if err != nil {
+ panic(err.Error())
+ }
+
+ // Create a new point batch
+ bp, _ := client.NewBatchPoints(client.BatchPointsConfig{
+ Precision: "s",
+ })
+
+ // Create a point and add to batch
+ tags := map[string]string{"cpu": "cpu-total"}
+ fields := map[string]interface{}{
+ "idle": 10.1,
+ "system": 53.3,
+ "user": 46.6,
+ }
+ pt, err := client.NewPoint("cpu_usage", tags, fields, time.Now())
+ if err != nil {
+ panic(err.Error())
+ }
+ bp.AddPoint(pt)
+
+ // Write the batch
+ c.Write(bp)
+}
+```
+
+### Point Splitting
+
+The UDP client now supports splitting single points that exceed the configured
+payload size. The logic for processing each point is listed here, starting with
+an empty payload.
+
+1. If adding the point to the current (non-empty) payload would exceed the
+ configured size, send the current payload. Otherwise, add it to the current
+ payload.
+1. If the point is smaller than the configured size, add it to the payload.
+1. If the point has no timestamp, just try to send the entire point as a single
+ UDP payload, and process the next point.
+1. Since the point has a timestamp, re-use the existing measurement name,
+ tagset, and timestamp and create multiple new points by splitting up the
+ fields. The per-point length will be kept close to the configured size,
+ staying under it if possible. This does mean that one large field, maybe a
+ long string, could be sent as a larger-than-configured payload.
+
+The above logic attempts to respect configured payload sizes, but not sacrifice
+any data integrity. Points without a timestamp can't be split, as that may
+cause fields to have differing timestamps when processed by the server.
+
+## Go Docs
+
+Please refer to
+[http://godoc.org/github.com/influxdata/influxdb/client/v2](http://godoc.org/github.com/influxdata/influxdb/client/v2)
+for documentation.
+
+## See Also
+
+You can also examine how the client library is used by the
+[InfluxDB CLI](https://github.com/influxdata/influxdb/blob/master/cmd/influx/main.go).
diff --git a/vendor/github.com/influxdata/influxdb/client/influxdb.go b/vendor/github.com/influxdata/influxdb/client/influxdb.go
new file mode 100644
index 000000000..98d362d50
--- /dev/null
+++ b/vendor/github.com/influxdata/influxdb/client/influxdb.go
@@ -0,0 +1,840 @@
+// Package client implements a now-deprecated client for InfluxDB;
+// use github.com/influxdata/influxdb/client/v2 instead.
+package client // import "github.com/influxdata/influxdb/client"
+
+import (
+ "bytes"
+ "context"
+ "crypto/tls"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "io"
+ "io/ioutil"
+ "net"
+ "net/http"
+ "net/url"
+ "strconv"
+ "strings"
+ "time"
+
+ "github.com/influxdata/influxdb/models"
+)
+
+const (
+ // DefaultHost is the default host used to connect to an InfluxDB instance
+ DefaultHost = "localhost"
+
+ // DefaultPort is the default port used to connect to an InfluxDB instance
+ DefaultPort = 8086
+
+ // DefaultTimeout is the default connection timeout used to connect to an InfluxDB instance
+ DefaultTimeout = 0
+)
+
+// Query is used to send a command to the server. Both Command and Database are required.
+type Query struct {
+ Command string
+ Database string
+
+ // Chunked tells the server to send back chunked responses. This places
+ // less load on the server by sending back chunks of the response rather
+ // than waiting for the entire response all at once.
+ Chunked bool
+
+ // ChunkSize sets the maximum number of rows that will be returned per
+ // chunk. Chunks are either divided based on their series or if they hit
+ // the chunk size limit.
+ //
+ // Chunked must be set to true for this option to be used.
+ ChunkSize int
+}
+
+// ParseConnectionString will parse a string to create a valid connection URL
+func ParseConnectionString(path string, ssl bool) (url.URL, error) {
+ var host string
+ var port int
+
+ h, p, err := net.SplitHostPort(path)
+ if err != nil {
+ if path == "" {
+ host = DefaultHost
+ } else {
+ host = path
+ }
+ // If they didn't specify a port, always use the default port
+ port = DefaultPort
+ } else {
+ host = h
+ port, err = strconv.Atoi(p)
+ if err != nil {
+ return url.URL{}, fmt.Errorf("invalid port number %q: %s\n", path, err)
+ }
+ }
+
+ u := url.URL{
+ Scheme: "http",
+ }
+ if ssl {
+ u.Scheme = "https"
+ }
+
+ u.Host = net.JoinHostPort(host, strconv.Itoa(port))
+
+ return u, nil
+}
+
+// Config is used to specify what server to connect to.
+// URL: The URL of the server connecting to.
+// Username/Password are optional. They will be passed via basic auth if provided.
+// UserAgent: If not provided, will default "InfluxDBClient",
+// Timeout: If not provided, will default to 0 (no timeout)
+type Config struct {
+ URL url.URL
+ UnixSocket string
+ Username string
+ Password string
+ UserAgent string
+ Timeout time.Duration
+ Precision string
+ WriteConsistency string
+ UnsafeSsl bool
+}
+
+// NewConfig will create a config to be used in connecting to the client
+func NewConfig() Config {
+ return Config{
+ Timeout: DefaultTimeout,
+ }
+}
+
+// Client is used to make calls to the server.
+type Client struct {
+ url url.URL
+ unixSocket string
+ username string
+ password string
+ httpClient *http.Client
+ userAgent string
+ precision string
+}
+
+const (
+ // ConsistencyOne requires at least one data node acknowledged a write.
+ ConsistencyOne = "one"
+
+ // ConsistencyAll requires all data nodes to acknowledge a write.
+ ConsistencyAll = "all"
+
+ // ConsistencyQuorum requires a quorum of data nodes to acknowledge a write.
+ ConsistencyQuorum = "quorum"
+
+ // ConsistencyAny allows for hinted hand off, potentially no write happened yet.
+ ConsistencyAny = "any"
+)
+
+// NewClient will instantiate and return a connected client to issue commands to the server.
+func NewClient(c Config) (*Client, error) {
+ tlsConfig := &tls.Config{
+ InsecureSkipVerify: c.UnsafeSsl,
+ }
+
+ tr := &http.Transport{
+ TLSClientConfig: tlsConfig,
+ }
+
+ if c.UnixSocket != "" {
+ // No need for compression in local communications.
+ tr.DisableCompression = true
+
+ tr.DialContext = func(_ context.Context, _, _ string) (net.Conn, error) {
+ return net.Dial("unix", c.UnixSocket)
+ }
+ }
+
+ client := Client{
+ url: c.URL,
+ unixSocket: c.UnixSocket,
+ username: c.Username,
+ password: c.Password,
+ httpClient: &http.Client{Timeout: c.Timeout, Transport: tr},
+ userAgent: c.UserAgent,
+ precision: c.Precision,
+ }
+ if client.userAgent == "" {
+ client.userAgent = "InfluxDBClient"
+ }
+ return &client, nil
+}
+
+// SetAuth will update the username and passwords
+func (c *Client) SetAuth(u, p string) {
+ c.username = u
+ c.password = p
+}
+
+// SetPrecision will update the precision
+func (c *Client) SetPrecision(precision string) {
+ c.precision = precision
+}
+
+// Query sends a command to the server and returns the Response
+func (c *Client) Query(q Query) (*Response, error) {
+ return c.QueryContext(context.Background(), q)
+}
+
+// QueryContext sends a command to the server and returns the Response
+// It uses a context that can be cancelled by the command line client
+func (c *Client) QueryContext(ctx context.Context, q Query) (*Response, error) {
+ u := c.url
+
+ u.Path = "query"
+ values := u.Query()
+ values.Set("q", q.Command)
+ values.Set("db", q.Database)
+ if q.Chunked {
+ values.Set("chunked", "true")
+ if q.ChunkSize > 0 {
+ values.Set("chunk_size", strconv.Itoa(q.ChunkSize))
+ }
+ }
+ if c.precision != "" {
+ values.Set("epoch", c.precision)
+ }
+ u.RawQuery = values.Encode()
+
+ req, err := http.NewRequest("POST", u.String(), nil)
+ if err != nil {
+ return nil, err
+ }
+ req.Header.Set("User-Agent", c.userAgent)
+ if c.username != "" {
+ req.SetBasicAuth(c.username, c.password)
+ }
+
+ req = req.WithContext(ctx)
+
+ resp, err := c.httpClient.Do(req)
+ if err != nil {
+ return nil, err
+ }
+ defer resp.Body.Close()
+
+ var response Response
+ if q.Chunked {
+ cr := NewChunkedResponse(resp.Body)
+ for {
+ r, err := cr.NextResponse()
+ if err != nil {
+ // If we got an error while decoding the response, send that back.
+ return nil, err
+ }
+
+ if r == nil {
+ break
+ }
+
+ response.Results = append(response.Results, r.Results...)
+ if r.Err != nil {
+ response.Err = r.Err
+ break
+ }
+ }
+ } else {
+ dec := json.NewDecoder(resp.Body)
+ dec.UseNumber()
+ if err := dec.Decode(&response); err != nil {
+ // Ignore EOF errors if we got an invalid status code.
+ if !(err == io.EOF && resp.StatusCode != http.StatusOK) {
+ return nil, err
+ }
+ }
+ }
+
+ // If we don't have an error in our json response, and didn't get StatusOK,
+ // then send back an error.
+ if resp.StatusCode != http.StatusOK && response.Error() == nil {
+ return &response, fmt.Errorf("received status code %d from server", resp.StatusCode)
+ }
+ return &response, nil
+}
+
+// Write takes BatchPoints and allows for writing of multiple points with defaults
+// If successful, error is nil and Response is nil
+// If an error occurs, Response may contain additional information if populated.
+func (c *Client) Write(bp BatchPoints) (*Response, error) {
+ u := c.url
+ u.Path = "write"
+
+ var b bytes.Buffer
+ for _, p := range bp.Points {
+ err := checkPointTypes(p)
+ if err != nil {
+ return nil, err
+ }
+ if p.Raw != "" {
+ if _, err := b.WriteString(p.Raw); err != nil {
+ return nil, err
+ }
+ } else {
+ for k, v := range bp.Tags {
+ if p.Tags == nil {
+ p.Tags = make(map[string]string, len(bp.Tags))
+ }
+ p.Tags[k] = v
+ }
+
+ if _, err := b.WriteString(p.MarshalString()); err != nil {
+ return nil, err
+ }
+ }
+
+ if err := b.WriteByte('\n'); err != nil {
+ return nil, err
+ }
+ }
+
+ req, err := http.NewRequest("POST", u.String(), &b)
+ if err != nil {
+ return nil, err
+ }
+ req.Header.Set("Content-Type", "")
+ req.Header.Set("User-Agent", c.userAgent)
+ if c.username != "" {
+ req.SetBasicAuth(c.username, c.password)
+ }
+
+ precision := bp.Precision
+ if precision == "" {
+ precision = "ns"
+ }
+
+ params := req.URL.Query()
+ params.Set("db", bp.Database)
+ params.Set("rp", bp.RetentionPolicy)
+ params.Set("precision", precision)
+ params.Set("consistency", bp.WriteConsistency)
+ req.URL.RawQuery = params.Encode()
+
+ resp, err := c.httpClient.Do(req)
+ if err != nil {
+ return nil, err
+ }
+ defer resp.Body.Close()
+
+ var response Response
+ body, err := ioutil.ReadAll(resp.Body)
+ if err != nil {
+ return nil, err
+ }
+
+ if resp.StatusCode != http.StatusNoContent && resp.StatusCode != http.StatusOK {
+ var err = fmt.Errorf(string(body))
+ response.Err = err
+ return &response, err
+ }
+
+ return nil, nil
+}
+
+// WriteLineProtocol takes a string with line returns to delimit each write
+// If successful, error is nil and Response is nil
+// If an error occurs, Response may contain additional information if populated.
+func (c *Client) WriteLineProtocol(data, database, retentionPolicy, precision, writeConsistency string) (*Response, error) {
+ u := c.url
+ u.Path = "write"
+
+ r := strings.NewReader(data)
+
+ req, err := http.NewRequest("POST", u.String(), r)
+ if err != nil {
+ return nil, err
+ }
+ req.Header.Set("Content-Type", "")
+ req.Header.Set("User-Agent", c.userAgent)
+ if c.username != "" {
+ req.SetBasicAuth(c.username, c.password)
+ }
+ params := req.URL.Query()
+ params.Set("db", database)
+ params.Set("rp", retentionPolicy)
+ params.Set("precision", precision)
+ params.Set("consistency", writeConsistency)
+ req.URL.RawQuery = params.Encode()
+
+ resp, err := c.httpClient.Do(req)
+ if err != nil {
+ return nil, err
+ }
+ defer resp.Body.Close()
+
+ var response Response
+ body, err := ioutil.ReadAll(resp.Body)
+ if err != nil {
+ return nil, err
+ }
+
+ if resp.StatusCode != http.StatusNoContent && resp.StatusCode != http.StatusOK {
+ err := fmt.Errorf(string(body))
+ response.Err = err
+ return &response, err
+ }
+
+ return nil, nil
+}
+
+// Ping will check to see if the server is up
+// Ping returns how long the request took, the version of the server it connected to, and an error if one occurred.
+func (c *Client) Ping() (time.Duration, string, error) {
+ now := time.Now()
+ u := c.url
+ u.Path = "ping"
+
+ req, err := http.NewRequest("GET", u.String(), nil)
+ if err != nil {
+ return 0, "", err
+ }
+ req.Header.Set("User-Agent", c.userAgent)
+ if c.username != "" {
+ req.SetBasicAuth(c.username, c.password)
+ }
+
+ resp, err := c.httpClient.Do(req)
+ if err != nil {
+ return 0, "", err
+ }
+ defer resp.Body.Close()
+
+ version := resp.Header.Get("X-Influxdb-Version")
+ return time.Since(now), version, nil
+}
+
+// Structs
+
+// Message represents a user message.
+type Message struct {
+ Level string `json:"level,omitempty"`
+ Text string `json:"text,omitempty"`
+}
+
+// Result represents a resultset returned from a single statement.
+type Result struct {
+ Series []models.Row
+ Messages []*Message
+ Err error
+}
+
+// MarshalJSON encodes the result into JSON.
+func (r *Result) MarshalJSON() ([]byte, error) {
+ // Define a struct that outputs "error" as a string.
+ var o struct {
+ Series []models.Row `json:"series,omitempty"`
+ Messages []*Message `json:"messages,omitempty"`
+ Err string `json:"error,omitempty"`
+ }
+
+ // Copy fields to output struct.
+ o.Series = r.Series
+ o.Messages = r.Messages
+ if r.Err != nil {
+ o.Err = r.Err.Error()
+ }
+
+ return json.Marshal(&o)
+}
+
+// UnmarshalJSON decodes the data into the Result struct
+func (r *Result) UnmarshalJSON(b []byte) error {
+ var o struct {
+ Series []models.Row `json:"series,omitempty"`
+ Messages []*Message `json:"messages,omitempty"`
+ Err string `json:"error,omitempty"`
+ }
+
+ dec := json.NewDecoder(bytes.NewBuffer(b))
+ dec.UseNumber()
+ err := dec.Decode(&o)
+ if err != nil {
+ return err
+ }
+ r.Series = o.Series
+ r.Messages = o.Messages
+ if o.Err != "" {
+ r.Err = errors.New(o.Err)
+ }
+ return nil
+}
+
+// Response represents a list of statement results.
+type Response struct {
+ Results []Result
+ Err error
+}
+
+// MarshalJSON encodes the response into JSON.
+func (r *Response) MarshalJSON() ([]byte, error) {
+ // Define a struct that outputs "error" as a string.
+ var o struct {
+ Results []Result `json:"results,omitempty"`
+ Err string `json:"error,omitempty"`
+ }
+
+ // Copy fields to output struct.
+ o.Results = r.Results
+ if r.Err != nil {
+ o.Err = r.Err.Error()
+ }
+
+ return json.Marshal(&o)
+}
+
+// UnmarshalJSON decodes the data into the Response struct
+func (r *Response) UnmarshalJSON(b []byte) error {
+ var o struct {
+ Results []Result `json:"results,omitempty"`
+ Err string `json:"error,omitempty"`
+ }
+
+ dec := json.NewDecoder(bytes.NewBuffer(b))
+ dec.UseNumber()
+ err := dec.Decode(&o)
+ if err != nil {
+ return err
+ }
+ r.Results = o.Results
+ if o.Err != "" {
+ r.Err = errors.New(o.Err)
+ }
+ return nil
+}
+
+// Error returns the first error from any statement.
+// Returns nil if no errors occurred on any statements.
+func (r *Response) Error() error {
+ if r.Err != nil {
+ return r.Err
+ }
+ for _, result := range r.Results {
+ if result.Err != nil {
+ return result.Err
+ }
+ }
+ return nil
+}
+
+// duplexReader reads responses and writes it to another writer while
+// satisfying the reader interface.
+type duplexReader struct {
+ r io.Reader
+ w io.Writer
+}
+
+func (r *duplexReader) Read(p []byte) (n int, err error) {
+ n, err = r.r.Read(p)
+ if err == nil {
+ r.w.Write(p[:n])
+ }
+ return n, err
+}
+
+// ChunkedResponse represents a response from the server that
+// uses chunking to stream the output.
+type ChunkedResponse struct {
+ dec *json.Decoder
+ duplex *duplexReader
+ buf bytes.Buffer
+}
+
+// NewChunkedResponse reads a stream and produces responses from the stream.
+func NewChunkedResponse(r io.Reader) *ChunkedResponse {
+ resp := &ChunkedResponse{}
+ resp.duplex = &duplexReader{r: r, w: &resp.buf}
+ resp.dec = json.NewDecoder(resp.duplex)
+ resp.dec.UseNumber()
+ return resp
+}
+
+// NextResponse reads the next line of the stream and returns a response.
+func (r *ChunkedResponse) NextResponse() (*Response, error) {
+ var response Response
+ if err := r.dec.Decode(&response); err != nil {
+ if err == io.EOF {
+ return nil, nil
+ }
+ // A decoding error happened. This probably means the server crashed
+ // and sent a last-ditch error message to us. Ensure we have read the
+ // entirety of the connection to get any remaining error text.
+ io.Copy(ioutil.Discard, r.duplex)
+ return nil, errors.New(strings.TrimSpace(r.buf.String()))
+ }
+ r.buf.Reset()
+ return &response, nil
+}
+
+// Point defines the fields that will be written to the database
+// Measurement, Time, and Fields are required
+// Precision can be specified if the time is in epoch format (integer).
+// Valid values for Precision are n, u, ms, s, m, and h
+type Point struct {
+ Measurement string
+ Tags map[string]string
+ Time time.Time
+ Fields map[string]interface{}
+ Precision string
+ Raw string
+}
+
+// MarshalJSON will format the time in RFC3339Nano
+// Precision is also ignored as it is only used for writing, not reading
+// Or another way to say it is we always send back in nanosecond precision
+func (p *Point) MarshalJSON() ([]byte, error) {
+ point := struct {
+ Measurement string `json:"measurement,omitempty"`
+ Tags map[string]string `json:"tags,omitempty"`
+ Time string `json:"time,omitempty"`
+ Fields map[string]interface{} `json:"fields,omitempty"`
+ Precision string `json:"precision,omitempty"`
+ }{
+ Measurement: p.Measurement,
+ Tags: p.Tags,
+ Fields: p.Fields,
+ Precision: p.Precision,
+ }
+ // Let it omit empty if it's really zero
+ if !p.Time.IsZero() {
+ point.Time = p.Time.UTC().Format(time.RFC3339Nano)
+ }
+ return json.Marshal(&point)
+}
+
+// MarshalString renders string representation of a Point with specified
+// precision. The default precision is nanoseconds.
+func (p *Point) MarshalString() string {
+ pt, err := models.NewPoint(p.Measurement, models.NewTags(p.Tags), p.Fields, p.Time)
+ if err != nil {
+ return "# ERROR: " + err.Error() + " " + p.Measurement
+ }
+ if p.Precision == "" || p.Precision == "ns" || p.Precision == "n" {
+ return pt.String()
+ }
+ return pt.PrecisionString(p.Precision)
+}
+
+// UnmarshalJSON decodes the data into the Point struct
+func (p *Point) UnmarshalJSON(b []byte) error {
+ var normal struct {
+ Measurement string `json:"measurement"`
+ Tags map[string]string `json:"tags"`
+ Time time.Time `json:"time"`
+ Precision string `json:"precision"`
+ Fields map[string]interface{} `json:"fields"`
+ }
+ var epoch struct {
+ Measurement string `json:"measurement"`
+ Tags map[string]string `json:"tags"`
+ Time *int64 `json:"time"`
+ Precision string `json:"precision"`
+ Fields map[string]interface{} `json:"fields"`
+ }
+
+ if err := func() error {
+ var err error
+ dec := json.NewDecoder(bytes.NewBuffer(b))
+ dec.UseNumber()
+ if err = dec.Decode(&epoch); err != nil {
+ return err
+ }
+ // Convert from epoch to time.Time, but only if Time
+ // was actually set.
+ var ts time.Time
+ if epoch.Time != nil {
+ ts, err = EpochToTime(*epoch.Time, epoch.Precision)
+ if err != nil {
+ return err
+ }
+ }
+ p.Measurement = epoch.Measurement
+ p.Tags = epoch.Tags
+ p.Time = ts
+ p.Precision = epoch.Precision
+ p.Fields = normalizeFields(epoch.Fields)
+ return nil
+ }(); err == nil {
+ return nil
+ }
+
+ dec := json.NewDecoder(bytes.NewBuffer(b))
+ dec.UseNumber()
+ if err := dec.Decode(&normal); err != nil {
+ return err
+ }
+ normal.Time = SetPrecision(normal.Time, normal.Precision)
+ p.Measurement = normal.Measurement
+ p.Tags = normal.Tags
+ p.Time = normal.Time
+ p.Precision = normal.Precision
+ p.Fields = normalizeFields(normal.Fields)
+
+ return nil
+}
+
+// Remove any notion of json.Number
+func normalizeFields(fields map[string]interface{}) map[string]interface{} {
+ newFields := map[string]interface{}{}
+
+ for k, v := range fields {
+ switch v := v.(type) {
+ case json.Number:
+ jv, e := v.Float64()
+ if e != nil {
+ panic(fmt.Sprintf("unable to convert json.Number to float64: %s", e))
+ }
+ newFields[k] = jv
+ default:
+ newFields[k] = v
+ }
+ }
+ return newFields
+}
+
+// BatchPoints is used to send batched data in a single write.
+// Database and Points are required
+// If no retention policy is specified, it will use the databases default retention policy.
+// If tags are specified, they will be "merged" with all points. If a point already has that tag, it will be ignored.
+// If time is specified, it will be applied to any point with an empty time.
+// Precision can be specified if the time is in epoch format (integer).
+// Valid values for Precision are n, u, ms, s, m, and h
+type BatchPoints struct {
+ Points []Point `json:"points,omitempty"`
+ Database string `json:"database,omitempty"`
+ RetentionPolicy string `json:"retentionPolicy,omitempty"`
+ Tags map[string]string `json:"tags,omitempty"`
+ Time time.Time `json:"time,omitempty"`
+ Precision string `json:"precision,omitempty"`
+ WriteConsistency string `json:"-"`
+}
+
+// UnmarshalJSON decodes the data into the BatchPoints struct
+func (bp *BatchPoints) UnmarshalJSON(b []byte) error {
+ var normal struct {
+ Points []Point `json:"points"`
+ Database string `json:"database"`
+ RetentionPolicy string `json:"retentionPolicy"`
+ Tags map[string]string `json:"tags"`
+ Time time.Time `json:"time"`
+ Precision string `json:"precision"`
+ }
+ var epoch struct {
+ Points []Point `json:"points"`
+ Database string `json:"database"`
+ RetentionPolicy string `json:"retentionPolicy"`
+ Tags map[string]string `json:"tags"`
+ Time *int64 `json:"time"`
+ Precision string `json:"precision"`
+ }
+
+ if err := func() error {
+ var err error
+ if err = json.Unmarshal(b, &epoch); err != nil {
+ return err
+ }
+ // Convert from epoch to time.Time
+ var ts time.Time
+ if epoch.Time != nil {
+ ts, err = EpochToTime(*epoch.Time, epoch.Precision)
+ if err != nil {
+ return err
+ }
+ }
+ bp.Points = epoch.Points
+ bp.Database = epoch.Database
+ bp.RetentionPolicy = epoch.RetentionPolicy
+ bp.Tags = epoch.Tags
+ bp.Time = ts
+ bp.Precision = epoch.Precision
+ return nil
+ }(); err == nil {
+ return nil
+ }
+
+ if err := json.Unmarshal(b, &normal); err != nil {
+ return err
+ }
+ normal.Time = SetPrecision(normal.Time, normal.Precision)
+ bp.Points = normal.Points
+ bp.Database = normal.Database
+ bp.RetentionPolicy = normal.RetentionPolicy
+ bp.Tags = normal.Tags
+ bp.Time = normal.Time
+ bp.Precision = normal.Precision
+
+ return nil
+}
+
+// utility functions
+
+// Addr provides the current url as a string of the server the client is connected to.
+func (c *Client) Addr() string {
+ if c.unixSocket != "" {
+ return c.unixSocket
+ }
+ return c.url.String()
+}
+
+// checkPointTypes ensures no unsupported types are submitted to influxdb, returning error if they are found.
+func checkPointTypes(p Point) error {
+ for _, v := range p.Fields {
+ switch v.(type) {
+ case int, int8, int16, int32, int64, uint, uint8, uint16, uint32, uint64, float32, float64, bool, string, nil:
+ return nil
+ default:
+ return fmt.Errorf("unsupported point type: %T", v)
+ }
+ }
+ return nil
+}
+
+// helper functions
+
+// EpochToTime takes a unix epoch time and uses precision to return back a time.Time
+func EpochToTime(epoch int64, precision string) (time.Time, error) {
+ if precision == "" {
+ precision = "s"
+ }
+ var t time.Time
+ switch precision {
+ case "h":
+ t = time.Unix(0, epoch*int64(time.Hour))
+ case "m":
+ t = time.Unix(0, epoch*int64(time.Minute))
+ case "s":
+ t = time.Unix(0, epoch*int64(time.Second))
+ case "ms":
+ t = time.Unix(0, epoch*int64(time.Millisecond))
+ case "u":
+ t = time.Unix(0, epoch*int64(time.Microsecond))
+ case "n":
+ t = time.Unix(0, epoch)
+ default:
+ return time.Time{}, fmt.Errorf("Unknown precision %q", precision)
+ }
+ return t, nil
+}
+
+// SetPrecision will round a time to the specified precision
+func SetPrecision(t time.Time, precision string) time.Time {
+ switch precision {
+ case "n":
+ case "u":
+ return t.Round(time.Microsecond)
+ case "ms":
+ return t.Round(time.Millisecond)
+ case "s":
+ return t.Round(time.Second)
+ case "m":
+ return t.Round(time.Minute)
+ case "h":
+ return t.Round(time.Hour)
+ }
+ return t
+}
diff --git a/vendor/github.com/influxdata/influxdb/client/v2/client.go b/vendor/github.com/influxdata/influxdb/client/v2/client.go
new file mode 100644
index 000000000..77d44f2b3
--- /dev/null
+++ b/vendor/github.com/influxdata/influxdb/client/v2/client.go
@@ -0,0 +1,635 @@
+// Package client (v2) is the current official Go client for InfluxDB.
+package client // import "github.com/influxdata/influxdb/client/v2"
+
+import (
+ "bytes"
+ "crypto/tls"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "io"
+ "io/ioutil"
+ "mime"
+ "net/http"
+ "net/url"
+ "strconv"
+ "strings"
+ "time"
+
+ "github.com/influxdata/influxdb/models"
+)
+
+// HTTPConfig is the config data needed to create an HTTP Client.
+type HTTPConfig struct {
+ // Addr should be of the form "http://host:port"
+ // or "http://[ipv6-host%zone]:port".
+ Addr string
+
+ // Username is the influxdb username, optional.
+ Username string
+
+ // Password is the influxdb password, optional.
+ Password string
+
+ // UserAgent is the http User Agent, defaults to "InfluxDBClient".
+ UserAgent string
+
+ // Timeout for influxdb writes, defaults to no timeout.
+ Timeout time.Duration
+
+ // InsecureSkipVerify gets passed to the http client, if true, it will
+ // skip https certificate verification. Defaults to false.
+ InsecureSkipVerify bool
+
+ // TLSConfig allows the user to set their own TLS config for the HTTP
+ // Client. If set, this option overrides InsecureSkipVerify.
+ TLSConfig *tls.Config
+}
+
+// BatchPointsConfig is the config data needed to create an instance of the BatchPoints struct.
+type BatchPointsConfig struct {
+ // Precision is the write precision of the points, defaults to "ns".
+ Precision string
+
+ // Database is the database to write points to.
+ Database string
+
+ // RetentionPolicy is the retention policy of the points.
+ RetentionPolicy string
+
+ // Write consistency is the number of servers required to confirm write.
+ WriteConsistency string
+}
+
+// Client is a client interface for writing & querying the database.
+type Client interface {
+ // Ping checks that status of cluster, and will always return 0 time and no
+ // error for UDP clients.
+ Ping(timeout time.Duration) (time.Duration, string, error)
+
+ // Write takes a BatchPoints object and writes all Points to InfluxDB.
+ Write(bp BatchPoints) error
+
+ // Query makes an InfluxDB Query on the database. This will fail if using
+ // the UDP client.
+ Query(q Query) (*Response, error)
+
+ // Close releases any resources a Client may be using.
+ Close() error
+}
+
+// NewHTTPClient returns a new Client from the provided config.
+// Client is safe for concurrent use by multiple goroutines.
+func NewHTTPClient(conf HTTPConfig) (Client, error) {
+ if conf.UserAgent == "" {
+ conf.UserAgent = "InfluxDBClient"
+ }
+
+ u, err := url.Parse(conf.Addr)
+ if err != nil {
+ return nil, err
+ } else if u.Scheme != "http" && u.Scheme != "https" {
+ m := fmt.Sprintf("Unsupported protocol scheme: %s, your address"+
+ " must start with http:// or https://", u.Scheme)
+ return nil, errors.New(m)
+ }
+
+ tr := &http.Transport{
+ TLSClientConfig: &tls.Config{
+ InsecureSkipVerify: conf.InsecureSkipVerify,
+ },
+ }
+ if conf.TLSConfig != nil {
+ tr.TLSClientConfig = conf.TLSConfig
+ }
+ return &client{
+ url: *u,
+ username: conf.Username,
+ password: conf.Password,
+ useragent: conf.UserAgent,
+ httpClient: &http.Client{
+ Timeout: conf.Timeout,
+ Transport: tr,
+ },
+ transport: tr,
+ }, nil
+}
+
+// Ping will check to see if the server is up with an optional timeout on waiting for leader.
+// Ping returns how long the request took, the version of the server it connected to, and an error if one occurred.
+func (c *client) Ping(timeout time.Duration) (time.Duration, string, error) {
+ now := time.Now()
+ u := c.url
+ u.Path = "ping"
+
+ req, err := http.NewRequest("GET", u.String(), nil)
+ if err != nil {
+ return 0, "", err
+ }
+
+ req.Header.Set("User-Agent", c.useragent)
+
+ if c.username != "" {
+ req.SetBasicAuth(c.username, c.password)
+ }
+
+ if timeout > 0 {
+ params := req.URL.Query()
+ params.Set("wait_for_leader", fmt.Sprintf("%.0fs", timeout.Seconds()))
+ req.URL.RawQuery = params.Encode()
+ }
+
+ resp, err := c.httpClient.Do(req)
+ if err != nil {
+ return 0, "", err
+ }
+ defer resp.Body.Close()
+
+ body, err := ioutil.ReadAll(resp.Body)
+ if err != nil {
+ return 0, "", err
+ }
+
+ if resp.StatusCode != http.StatusNoContent {
+ var err = fmt.Errorf(string(body))
+ return 0, "", err
+ }
+
+ version := resp.Header.Get("X-Influxdb-Version")
+ return time.Since(now), version, nil
+}
+
+// Close releases the client's resources.
+func (c *client) Close() error {
+ c.transport.CloseIdleConnections()
+ return nil
+}
+
+// client is safe for concurrent use as the fields are all read-only
+// once the client is instantiated.
+type client struct {
+ // N.B - if url.UserInfo is accessed in future modifications to the
+ // methods on client, you will need to syncronise access to url.
+ url url.URL
+ username string
+ password string
+ useragent string
+ httpClient *http.Client
+ transport *http.Transport
+}
+
+// BatchPoints is an interface into a batched grouping of points to write into
+// InfluxDB together. BatchPoints is NOT thread-safe, you must create a separate
+// batch for each goroutine.
+type BatchPoints interface {
+ // AddPoint adds the given point to the Batch of points.
+ AddPoint(p *Point)
+ // AddPoints adds the given points to the Batch of points.
+ AddPoints(ps []*Point)
+ // Points lists the points in the Batch.
+ Points() []*Point
+
+ // Precision returns the currently set precision of this Batch.
+ Precision() string
+ // SetPrecision sets the precision of this batch.
+ SetPrecision(s string) error
+
+ // Database returns the currently set database of this Batch.
+ Database() string
+ // SetDatabase sets the database of this Batch.
+ SetDatabase(s string)
+
+ // WriteConsistency returns the currently set write consistency of this Batch.
+ WriteConsistency() string
+ // SetWriteConsistency sets the write consistency of this Batch.
+ SetWriteConsistency(s string)
+
+ // RetentionPolicy returns the currently set retention policy of this Batch.
+ RetentionPolicy() string
+ // SetRetentionPolicy sets the retention policy of this Batch.
+ SetRetentionPolicy(s string)
+}
+
+// NewBatchPoints returns a BatchPoints interface based on the given config.
+func NewBatchPoints(conf BatchPointsConfig) (BatchPoints, error) {
+ if conf.Precision == "" {
+ conf.Precision = "ns"
+ }
+ if _, err := time.ParseDuration("1" + conf.Precision); err != nil {
+ return nil, err
+ }
+ bp := &batchpoints{
+ database: conf.Database,
+ precision: conf.Precision,
+ retentionPolicy: conf.RetentionPolicy,
+ writeConsistency: conf.WriteConsistency,
+ }
+ return bp, nil
+}
+
+type batchpoints struct {
+ points []*Point
+ database string
+ precision string
+ retentionPolicy string
+ writeConsistency string
+}
+
+func (bp *batchpoints) AddPoint(p *Point) {
+ bp.points = append(bp.points, p)
+}
+
+func (bp *batchpoints) AddPoints(ps []*Point) {
+ bp.points = append(bp.points, ps...)
+}
+
+func (bp *batchpoints) Points() []*Point {
+ return bp.points
+}
+
+func (bp *batchpoints) Precision() string {
+ return bp.precision
+}
+
+func (bp *batchpoints) Database() string {
+ return bp.database
+}
+
+func (bp *batchpoints) WriteConsistency() string {
+ return bp.writeConsistency
+}
+
+func (bp *batchpoints) RetentionPolicy() string {
+ return bp.retentionPolicy
+}
+
+func (bp *batchpoints) SetPrecision(p string) error {
+ if _, err := time.ParseDuration("1" + p); err != nil {
+ return err
+ }
+ bp.precision = p
+ return nil
+}
+
+func (bp *batchpoints) SetDatabase(db string) {
+ bp.database = db
+}
+
+func (bp *batchpoints) SetWriteConsistency(wc string) {
+ bp.writeConsistency = wc
+}
+
+func (bp *batchpoints) SetRetentionPolicy(rp string) {
+ bp.retentionPolicy = rp
+}
+
+// Point represents a single data point.
+type Point struct {
+ pt models.Point
+}
+
+// NewPoint returns a point with the given timestamp. If a timestamp is not
+// given, then data is sent to the database without a timestamp, in which case
+// the server will assign local time upon reception. NOTE: it is recommended to
+// send data with a timestamp.
+func NewPoint(
+ name string,
+ tags map[string]string,
+ fields map[string]interface{},
+ t ...time.Time,
+) (*Point, error) {
+ var T time.Time
+ if len(t) > 0 {
+ T = t[0]
+ }
+
+ pt, err := models.NewPoint(name, models.NewTags(tags), fields, T)
+ if err != nil {
+ return nil, err
+ }
+ return &Point{
+ pt: pt,
+ }, nil
+}
+
+// String returns a line-protocol string of the Point.
+func (p *Point) String() string {
+ return p.pt.String()
+}
+
+// PrecisionString returns a line-protocol string of the Point,
+// with the timestamp formatted for the given precision.
+func (p *Point) PrecisionString(precison string) string {
+ return p.pt.PrecisionString(precison)
+}
+
+// Name returns the measurement name of the point.
+func (p *Point) Name() string {
+ return string(p.pt.Name())
+}
+
+// Tags returns the tags associated with the point.
+func (p *Point) Tags() map[string]string {
+ return p.pt.Tags().Map()
+}
+
+// Time return the timestamp for the point.
+func (p *Point) Time() time.Time {
+ return p.pt.Time()
+}
+
+// UnixNano returns timestamp of the point in nanoseconds since Unix epoch.
+func (p *Point) UnixNano() int64 {
+ return p.pt.UnixNano()
+}
+
+// Fields returns the fields for the point.
+func (p *Point) Fields() (map[string]interface{}, error) {
+ return p.pt.Fields()
+}
+
+// NewPointFrom returns a point from the provided models.Point.
+func NewPointFrom(pt models.Point) *Point {
+ return &Point{pt: pt}
+}
+
+func (c *client) Write(bp BatchPoints) error {
+ var b bytes.Buffer
+
+ for _, p := range bp.Points() {
+ if _, err := b.WriteString(p.pt.PrecisionString(bp.Precision())); err != nil {
+ return err
+ }
+
+ if err := b.WriteByte('\n'); err != nil {
+ return err
+ }
+ }
+
+ u := c.url
+ u.Path = "write"
+ req, err := http.NewRequest("POST", u.String(), &b)
+ if err != nil {
+ return err
+ }
+ req.Header.Set("Content-Type", "")
+ req.Header.Set("User-Agent", c.useragent)
+ if c.username != "" {
+ req.SetBasicAuth(c.username, c.password)
+ }
+
+ params := req.URL.Query()
+ params.Set("db", bp.Database())
+ params.Set("rp", bp.RetentionPolicy())
+ params.Set("precision", bp.Precision())
+ params.Set("consistency", bp.WriteConsistency())
+ req.URL.RawQuery = params.Encode()
+
+ resp, err := c.httpClient.Do(req)
+ if err != nil {
+ return err
+ }
+ defer resp.Body.Close()
+
+ body, err := ioutil.ReadAll(resp.Body)
+ if err != nil {
+ return err
+ }
+
+ if resp.StatusCode != http.StatusNoContent && resp.StatusCode != http.StatusOK {
+ var err = fmt.Errorf(string(body))
+ return err
+ }
+
+ return nil
+}
+
+// Query defines a query to send to the server.
+type Query struct {
+ Command string
+ Database string
+ Precision string
+ Chunked bool
+ ChunkSize int
+ Parameters map[string]interface{}
+}
+
+// NewQuery returns a query object.
+// The database and precision arguments can be empty strings if they are not needed for the query.
+func NewQuery(command, database, precision string) Query {
+ return Query{
+ Command: command,
+ Database: database,
+ Precision: precision,
+ Parameters: make(map[string]interface{}),
+ }
+}
+
+// NewQueryWithParameters returns a query object.
+// The database and precision arguments can be empty strings if they are not needed for the query.
+// parameters is a map of the parameter names used in the command to their values.
+func NewQueryWithParameters(command, database, precision string, parameters map[string]interface{}) Query {
+ return Query{
+ Command: command,
+ Database: database,
+ Precision: precision,
+ Parameters: parameters,
+ }
+}
+
+// Response represents a list of statement results.
+type Response struct {
+ Results []Result
+ Err string `json:"error,omitempty"`
+}
+
+// Error returns the first error from any statement.
+// It returns nil if no errors occurred on any statements.
+func (r *Response) Error() error {
+ if r.Err != "" {
+ return fmt.Errorf(r.Err)
+ }
+ for _, result := range r.Results {
+ if result.Err != "" {
+ return fmt.Errorf(result.Err)
+ }
+ }
+ return nil
+}
+
+// Message represents a user message.
+type Message struct {
+ Level string
+ Text string
+}
+
+// Result represents a resultset returned from a single statement.
+type Result struct {
+ Series []models.Row
+ Messages []*Message
+ Err string `json:"error,omitempty"`
+}
+
+// Query sends a command to the server and returns the Response.
+func (c *client) Query(q Query) (*Response, error) {
+ u := c.url
+ u.Path = "query"
+
+ jsonParameters, err := json.Marshal(q.Parameters)
+
+ if err != nil {
+ return nil, err
+ }
+
+ req, err := http.NewRequest("POST", u.String(), nil)
+ if err != nil {
+ return nil, err
+ }
+
+ req.Header.Set("Content-Type", "")
+ req.Header.Set("User-Agent", c.useragent)
+
+ if c.username != "" {
+ req.SetBasicAuth(c.username, c.password)
+ }
+
+ params := req.URL.Query()
+ params.Set("q", q.Command)
+ params.Set("db", q.Database)
+ params.Set("params", string(jsonParameters))
+ if q.Chunked {
+ params.Set("chunked", "true")
+ if q.ChunkSize > 0 {
+ params.Set("chunk_size", strconv.Itoa(q.ChunkSize))
+ }
+ }
+
+ if q.Precision != "" {
+ params.Set("epoch", q.Precision)
+ }
+ req.URL.RawQuery = params.Encode()
+
+ resp, err := c.httpClient.Do(req)
+ if err != nil {
+ return nil, err
+ }
+ defer resp.Body.Close()
+
+ // If we lack a X-Influxdb-Version header, then we didn't get a response from influxdb
+ // but instead some other service. If the error code is also a 500+ code, then some
+ // downstream loadbalancer/proxy/etc had an issue and we should report that.
+ if resp.Header.Get("X-Influxdb-Version") == "" && resp.StatusCode >= http.StatusInternalServerError {
+ body, err := ioutil.ReadAll(resp.Body)
+ if err != nil || len(body) == 0 {
+ return nil, fmt.Errorf("received status code %d from downstream server", resp.StatusCode)
+ }
+
+ return nil, fmt.Errorf("received status code %d from downstream server, with response body: %q", resp.StatusCode, body)
+ }
+
+ // If we get an unexpected content type, then it is also not from influx direct and therefore
+ // we want to know what we received and what status code was returned for debugging purposes.
+ if cType, _, _ := mime.ParseMediaType(resp.Header.Get("Content-Type")); cType != "application/json" {
+ // Read up to 1kb of the body to help identify downstream errors and limit the impact of things
+ // like downstream serving a large file
+ body, err := ioutil.ReadAll(io.LimitReader(resp.Body, 1024))
+ if err != nil || len(body) == 0 {
+ return nil, fmt.Errorf("expected json response, got empty body, with status: %v", resp.StatusCode)
+ }
+
+ return nil, fmt.Errorf("expected json response, got %q, with status: %v and response body: %q", cType, resp.StatusCode, body)
+ }
+
+ var response Response
+ if q.Chunked {
+ cr := NewChunkedResponse(resp.Body)
+ for {
+ r, err := cr.NextResponse()
+ if err != nil {
+ // If we got an error while decoding the response, send that back.
+ return nil, err
+ }
+
+ if r == nil {
+ break
+ }
+
+ response.Results = append(response.Results, r.Results...)
+ if r.Err != "" {
+ response.Err = r.Err
+ break
+ }
+ }
+ } else {
+ dec := json.NewDecoder(resp.Body)
+ dec.UseNumber()
+ decErr := dec.Decode(&response)
+
+ // ignore this error if we got an invalid status code
+ if decErr != nil && decErr.Error() == "EOF" && resp.StatusCode != http.StatusOK {
+ decErr = nil
+ }
+ // If we got a valid decode error, send that back
+ if decErr != nil {
+ return nil, fmt.Errorf("unable to decode json: received status code %d err: %s", resp.StatusCode, decErr)
+ }
+ }
+
+ // If we don't have an error in our json response, and didn't get statusOK
+ // then send back an error
+ if resp.StatusCode != http.StatusOK && response.Error() == nil {
+ return &response, fmt.Errorf("received status code %d from server", resp.StatusCode)
+ }
+ return &response, nil
+}
+
+// duplexReader reads responses and writes it to another writer while
+// satisfying the reader interface.
+type duplexReader struct {
+ r io.Reader
+ w io.Writer
+}
+
+func (r *duplexReader) Read(p []byte) (n int, err error) {
+ n, err = r.r.Read(p)
+ if err == nil {
+ r.w.Write(p[:n])
+ }
+ return n, err
+}
+
+// ChunkedResponse represents a response from the server that
+// uses chunking to stream the output.
+type ChunkedResponse struct {
+ dec *json.Decoder
+ duplex *duplexReader
+ buf bytes.Buffer
+}
+
+// NewChunkedResponse reads a stream and produces responses from the stream.
+func NewChunkedResponse(r io.Reader) *ChunkedResponse {
+ resp := &ChunkedResponse{}
+ resp.duplex = &duplexReader{r: r, w: &resp.buf}
+ resp.dec = json.NewDecoder(resp.duplex)
+ resp.dec.UseNumber()
+ return resp
+}
+
+// NextResponse reads the next line of the stream and returns a response.
+func (r *ChunkedResponse) NextResponse() (*Response, error) {
+ var response Response
+
+ if err := r.dec.Decode(&response); err != nil {
+ if err == io.EOF {
+ return nil, nil
+ }
+ // A decoding error happened. This probably means the server crashed
+ // and sent a last-ditch error message to us. Ensure we have read the
+ // entirety of the connection to get any remaining error text.
+ io.Copy(ioutil.Discard, r.duplex)
+ return nil, errors.New(strings.TrimSpace(r.buf.String()))
+ }
+
+ r.buf.Reset()
+ return &response, nil
+}
diff --git a/vendor/github.com/influxdata/influxdb/client/v2/udp.go b/vendor/github.com/influxdata/influxdb/client/v2/udp.go
new file mode 100644
index 000000000..779a28b33
--- /dev/null
+++ b/vendor/github.com/influxdata/influxdb/client/v2/udp.go
@@ -0,0 +1,112 @@
+package client
+
+import (
+ "fmt"
+ "io"
+ "net"
+ "time"
+)
+
+const (
+ // UDPPayloadSize is a reasonable default payload size for UDP packets that
+ // could be travelling over the internet.
+ UDPPayloadSize = 512
+)
+
+// UDPConfig is the config data needed to create a UDP Client.
+type UDPConfig struct {
+ // Addr should be of the form "host:port"
+ // or "[ipv6-host%zone]:port".
+ Addr string
+
+ // PayloadSize is the maximum size of a UDP client message, optional
+ // Tune this based on your network. Defaults to UDPPayloadSize.
+ PayloadSize int
+}
+
+// NewUDPClient returns a client interface for writing to an InfluxDB UDP
+// service from the given config.
+func NewUDPClient(conf UDPConfig) (Client, error) {
+ var udpAddr *net.UDPAddr
+ udpAddr, err := net.ResolveUDPAddr("udp", conf.Addr)
+ if err != nil {
+ return nil, err
+ }
+
+ conn, err := net.DialUDP("udp", nil, udpAddr)
+ if err != nil {
+ return nil, err
+ }
+
+ payloadSize := conf.PayloadSize
+ if payloadSize == 0 {
+ payloadSize = UDPPayloadSize
+ }
+
+ return &udpclient{
+ conn: conn,
+ payloadSize: payloadSize,
+ }, nil
+}
+
+// Close releases the udpclient's resources.
+func (uc *udpclient) Close() error {
+ return uc.conn.Close()
+}
+
+type udpclient struct {
+ conn io.WriteCloser
+ payloadSize int
+}
+
+func (uc *udpclient) Write(bp BatchPoints) error {
+ var b = make([]byte, 0, uc.payloadSize) // initial buffer size, it will grow as needed
+ var d, _ = time.ParseDuration("1" + bp.Precision())
+
+ var delayedError error
+
+ var checkBuffer = func(n int) {
+ if len(b) > 0 && len(b)+n > uc.payloadSize {
+ if _, err := uc.conn.Write(b); err != nil {
+ delayedError = err
+ }
+ b = b[:0]
+ }
+ }
+
+ for _, p := range bp.Points() {
+ p.pt.Round(d)
+ pointSize := p.pt.StringSize() + 1 // include newline in size
+ //point := p.pt.RoundedString(d) + "\n"
+
+ checkBuffer(pointSize)
+
+ if p.Time().IsZero() || pointSize <= uc.payloadSize {
+ b = p.pt.AppendString(b)
+ b = append(b, '\n')
+ continue
+ }
+
+ points := p.pt.Split(uc.payloadSize - 1) // account for newline character
+ for _, sp := range points {
+ checkBuffer(sp.StringSize() + 1)
+ b = sp.AppendString(b)
+ b = append(b, '\n')
+ }
+ }
+
+ if len(b) > 0 {
+ if _, err := uc.conn.Write(b); err != nil {
+ return err
+ }
+ }
+ return delayedError
+}
+
+func (uc *udpclient) Query(q Query) (*Response, error) {
+ return nil, fmt.Errorf("Querying via UDP is not supported")
+}
+
+func (uc *udpclient) Ping(timeout time.Duration) (time.Duration, string, error) {
+ return 0, "", nil
+}
diff --git a/vendor/github.com/influxdata/influxdb/models/consistency.go b/vendor/github.com/influxdata/influxdb/models/consistency.go
new file mode 100644
index 000000000..2a3269bca
--- /dev/null
+++ b/vendor/github.com/influxdata/influxdb/models/consistency.go
@@ -0,0 +1,48 @@
+package models
+
+import (
+ "errors"
+ "strings"
+)
+
+// ConsistencyLevel represent a required replication criteria before a write can
+// be returned as successful.
+//
+// The consistency level is handled in open-source InfluxDB but only applicable to clusters.
+type ConsistencyLevel int
+
+const (
+ // ConsistencyLevelAny allows for hinted handoff, potentially no write happened yet.
+ ConsistencyLevelAny ConsistencyLevel = iota
+
+ // ConsistencyLevelOne requires at least one data node acknowledged a write.
+ ConsistencyLevelOne
+
+ // ConsistencyLevelQuorum requires a quorum of data nodes to acknowledge a write.
+ ConsistencyLevelQuorum
+
+ // ConsistencyLevelAll requires all data nodes to acknowledge a write.
+ ConsistencyLevelAll
+)
+
+var (
+ // ErrInvalidConsistencyLevel is returned when parsing the string version
+ // of a consistency level.
+ ErrInvalidConsistencyLevel = errors.New("invalid consistency level")
+)
+
+// ParseConsistencyLevel converts a consistency level string to the corresponding ConsistencyLevel const.
+func ParseConsistencyLevel(level string) (ConsistencyLevel, error) {
+ switch strings.ToLower(level) {
+ case "any":
+ return ConsistencyLevelAny, nil
+ case "one":
+ return ConsistencyLevelOne, nil
+ case "quorum":
+ return ConsistencyLevelQuorum, nil
+ case "all":
+ return ConsistencyLevelAll, nil
+ default:
+ return 0, ErrInvalidConsistencyLevel
+ }
+}
diff --git a/vendor/github.com/influxdata/influxdb/models/inline_fnv.go b/vendor/github.com/influxdata/influxdb/models/inline_fnv.go
new file mode 100644
index 000000000..eec1ae8b0
--- /dev/null
+++ b/vendor/github.com/influxdata/influxdb/models/inline_fnv.go
@@ -0,0 +1,32 @@
+package models // import "github.com/influxdata/influxdb/models"
+
+// from stdlib hash/fnv/fnv.go
+const (
+ prime64 = 1099511628211
+ offset64 = 14695981039346656037
+)
+
+// InlineFNV64a is an alloc-free port of the standard library's fnv64a.
+// See https://en.wikipedia.org/wiki/Fowler%E2%80%93Noll%E2%80%93Vo_hash_function.
+type InlineFNV64a uint64
+
+// NewInlineFNV64a returns a new instance of InlineFNV64a.
+func NewInlineFNV64a() InlineFNV64a {
+ return offset64
+}
+
+// Write adds data to the running hash.
+func (s *InlineFNV64a) Write(data []byte) (int, error) {
+ hash := uint64(*s)
+ for _, c := range data {
+ hash ^= uint64(c)
+ hash *= prime64
+ }
+ *s = InlineFNV64a(hash)
+ return len(data), nil
+}
+
+// Sum64 returns the uint64 of the current resulting hash.
+func (s *InlineFNV64a) Sum64() uint64 {
+ return uint64(*s)
+}
diff --git a/vendor/github.com/influxdata/influxdb/models/inline_strconv_parse.go b/vendor/github.com/influxdata/influxdb/models/inline_strconv_parse.go
new file mode 100644
index 000000000..8db483738
--- /dev/null
+++ b/vendor/github.com/influxdata/influxdb/models/inline_strconv_parse.go
@@ -0,0 +1,44 @@
+package models // import "github.com/influxdata/influxdb/models"
+
+import (
+ "reflect"
+ "strconv"
+ "unsafe"
+)
+
+// parseIntBytes is a zero-alloc wrapper around strconv.ParseInt.
+func parseIntBytes(b []byte, base int, bitSize int) (i int64, err error) {
+ s := unsafeBytesToString(b)
+ return strconv.ParseInt(s, base, bitSize)
+}
+
+// parseUintBytes is a zero-alloc wrapper around strconv.ParseUint.
+func parseUintBytes(b []byte, base int, bitSize int) (i uint64, err error) {
+ s := unsafeBytesToString(b)
+ return strconv.ParseUint(s, base, bitSize)
+}
+
+// parseFloatBytes is a zero-alloc wrapper around strconv.ParseFloat.
+func parseFloatBytes(b []byte, bitSize int) (float64, error) {
+ s := unsafeBytesToString(b)
+ return strconv.ParseFloat(s, bitSize)
+}
+
+// parseBoolBytes is a zero-alloc wrapper around strconv.ParseBool.
+func parseBoolBytes(b []byte) (bool, error) {
+ return strconv.ParseBool(unsafeBytesToString(b))
+}
+
+// unsafeBytesToString converts a []byte to a string without a heap allocation.
+//
+// It is unsafe, and is intended to prepare input to short-lived functions
+// that require strings.
+func unsafeBytesToString(in []byte) string {
+ src := *(*reflect.SliceHeader)(unsafe.Pointer(&in))
+ dst := reflect.StringHeader{
+ Data: src.Data,
+ Len: src.Len,
+ }
+ s := *(*string)(unsafe.Pointer(&dst))
+ return s
+}
diff --git a/vendor/github.com/influxdata/influxdb/models/points.go b/vendor/github.com/influxdata/influxdb/models/points.go
new file mode 100644
index 000000000..ad80a816b
--- /dev/null
+++ b/vendor/github.com/influxdata/influxdb/models/points.go
@@ -0,0 +1,2337 @@
+// Package models implements basic objects used throughout the TICK stack.
+package models // import "github.com/influxdata/influxdb/models"
+
+import (
+ "bytes"
+ "encoding/binary"
+ "errors"
+ "fmt"
+ "io"
+ "math"
+ "sort"
+ "strconv"
+ "strings"
+ "time"
+
+ "github.com/influxdata/influxdb/pkg/escape"
+)
+
+var (
+ measurementEscapeCodes = map[byte][]byte{
+ ',': []byte(`\,`),
+ ' ': []byte(`\ `),
+ }
+
+ tagEscapeCodes = map[byte][]byte{
+ ',': []byte(`\,`),
+ ' ': []byte(`\ `),
+ '=': []byte(`\=`),
+ }
+
+ // ErrPointMustHaveAField is returned when operating on a point that does not have any fields.
+ ErrPointMustHaveAField = errors.New("point without fields is unsupported")
+
+ // ErrInvalidNumber is returned when a number is expected but not provided.
+ ErrInvalidNumber = errors.New("invalid number")
+
+ // ErrInvalidPoint is returned when a point cannot be parsed correctly.
+ ErrInvalidPoint = errors.New("point is invalid")
+)
+
+const (
+ // MaxKeyLength is the largest allowed size of the combined measurement and tag keys.
+ MaxKeyLength = 65535
+)
+
+// enableUint64Support will enable uint64 support if set to true.
+var enableUint64Support = false
+
+// EnableUintSupport manually enables uint support for the point parser.
+// This function will be removed in the future and only exists for unit tests during the
+// transition.
+func EnableUintSupport() {
+ enableUint64Support = true
+}
+
+// Point defines the values that will be written to the database.
+type Point interface {
+ // Name return the measurement name for the point.
+ Name() []byte
+
+ // SetName updates the measurement name for the point.
+ SetName(string)
+
+ // Tags returns the tag set for the point.
+ Tags() Tags
+
+ // AddTag adds or replaces a tag value for a point.
+ AddTag(key, value string)
+
+ // SetTags replaces the tags for the point.
+ SetTags(tags Tags)
+
+ // HasTag returns true if the tag exists for the point.
+ HasTag(tag []byte) bool
+
+ // Fields returns the fields for the point.
+ Fields() (Fields, error)
+
+ // Time return the timestamp for the point.
+ Time() time.Time
+
+ // SetTime updates the timestamp for the point.
+ SetTime(t time.Time)
+
+ // UnixNano returns the timestamp of the point as nanoseconds since Unix epoch.
+ UnixNano() int64
+
+ // HashID returns a non-cryptographic checksum of the point's key.
+ HashID() uint64
+
+ // Key returns the key (measurement joined with tags) of the point.
+ Key() []byte
+
+ // String returns a string representation of the point. If there is a
+ // timestamp associated with the point then it will be specified with the default
+ // precision of nanoseconds.
+ String() string
+
+ // MarshalBinary returns a binary representation of the point.
+ MarshalBinary() ([]byte, error)
+
+ // PrecisionString returns a string representation of the point. If there
+ // is a timestamp associated with the point then it will be specified in the
+ // given unit.
+ PrecisionString(precision string) string
+
+ // RoundedString returns a string representation of the point. If there
+ // is a timestamp associated with the point, then it will be rounded to the
+ // given duration.
+ RoundedString(d time.Duration) string
+
+ // Split will attempt to return multiple points with the same timestamp whose
+ // string representations are no longer than size. Points with a single field or
+ // a point without a timestamp may exceed the requested size.
+ Split(size int) []Point
+
+ // Round will round the timestamp of the point to the given duration.
+ Round(d time.Duration)
+
+ // StringSize returns the length of the string that would be returned by String().
+ StringSize() int
+
+ // AppendString appends the result of String() to the provided buffer and returns
+ // the result, potentially reducing string allocations.
+ AppendString(buf []byte) []byte
+
+ // FieldIterator retuns a FieldIterator that can be used to traverse the
+ // fields of a point without constructing the in-memory map.
+ FieldIterator() FieldIterator
+}
+
+// FieldType represents the type of a field.
+type FieldType int
+
+const (
+ // Integer indicates the field's type is integer.
+ Integer FieldType = iota
+
+ // Float indicates the field's type is float.
+ Float
+
+ // Boolean indicates the field's type is boolean.
+ Boolean
+
+ // String indicates the field's type is string.
+ String
+
+ // Empty is used to indicate that there is no field.
+ Empty
+
+ // Unsigned indicates the field's type is an unsigned integer.
+ Unsigned
+)
+
+// FieldIterator provides a low-allocation interface to iterate through a point's fields.
+type FieldIterator interface {
+ // Next indicates whether there any fields remaining.
+ Next() bool
+
+ // FieldKey returns the key of the current field.
+ FieldKey() []byte
+
+ // Type returns the FieldType of the current field.
+ Type() FieldType
+
+ // StringValue returns the string value of the current field.
+ StringValue() string
+
+ // IntegerValue returns the integer value of the current field.
+ IntegerValue() (int64, error)
+
+ // UnsignedValue returns the unsigned value of the current field.
+ UnsignedValue() (uint64, error)
+
+ // BooleanValue returns the boolean value of the current field.
+ BooleanValue() (bool, error)
+
+ // FloatValue returns the float value of the current field.
+ FloatValue() (float64, error)
+
+ // Reset resets the iterator to its initial state.
+ Reset()
+}
+
+// Points represents a sortable list of points by timestamp.
+type Points []Point
+
+// Len implements sort.Interface.
+func (a Points) Len() int { return len(a) }
+
+// Less implements sort.Interface.
+func (a Points) Less(i, j int) bool { return a[i].Time().Before(a[j].Time()) }
+
+// Swap implements sort.Interface.
+func (a Points) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
+
+// point is the default implementation of Point.
+type point struct {
+ time time.Time
+
+ // text encoding of measurement and tags
+ // key must always be stored sorted by tags, if the original line was not sorted,
+ // we need to resort it
+ key []byte
+
+ // text encoding of field data
+ fields []byte
+
+ // text encoding of timestamp
+ ts []byte
+
+ // cached version of parsed fields from data
+ cachedFields map[string]interface{}
+
+ // cached version of parsed name from key
+ cachedName string
+
+ // cached version of parsed tags
+ cachedTags Tags
+
+ it fieldIterator
+}
+
+// type assertions
+var (
+ _ Point = (*point)(nil)
+ _ FieldIterator = (*point)(nil)
+)
+
+const (
+ // the number of characters for the largest possible int64 (9223372036854775807)
+ maxInt64Digits = 19
+
+ // the number of characters for the smallest possible int64 (-9223372036854775808)
+ minInt64Digits = 20
+
+ // the number of characters for the largest possible uint64 (18446744073709551615)
+ maxUint64Digits = 20
+
+ // the number of characters required for the largest float64 before a range check
+ // would occur during parsing
+ maxFloat64Digits = 25
+
+ // the number of characters required for smallest float64 before a range check occur
+ // would occur during parsing
+ minFloat64Digits = 27
+)
+
+// ParsePoints returns a slice of Points from a text representation of a point
+// with each point separated by newlines. If any points fail to parse, a non-nil error
+// will be returned in addition to the points that parsed successfully.
+func ParsePoints(buf []byte) ([]Point, error) {
+ return ParsePointsWithPrecision(buf, time.Now().UTC(), "n")
+}
+
+// ParsePointsString is identical to ParsePoints but accepts a string.
+func ParsePointsString(buf string) ([]Point, error) {
+ return ParsePoints([]byte(buf))
+}
+
+// ParseKey returns the measurement name and tags from a point.
+//
+// NOTE: to minimize heap allocations, the returned Tags will refer to subslices of buf.
+// This can have the unintended effect preventing buf from being garbage collected.
+func ParseKey(buf []byte) (string, Tags) {
+ meas, tags := ParseKeyBytes(buf)
+ return string(meas), tags
+}
+
+func ParseKeyBytes(buf []byte) ([]byte, Tags) {
+ // Ignore the error because scanMeasurement returns "missing fields" which we ignore
+ // when just parsing a key
+ state, i, _ := scanMeasurement(buf, 0)
+
+ var tags Tags
+ if state == tagKeyState {
+ tags = parseTags(buf)
+ // scanMeasurement returns the location of the comma if there are tags, strip that off
+ return buf[:i-1], tags
+ }
+ return buf[:i], tags
+}
+
+func ParseTags(buf []byte) Tags {
+ return parseTags(buf)
+}
+
+func ParseName(buf []byte) ([]byte, error) {
+ // Ignore the error because scanMeasurement returns "missing fields" which we ignore
+ // when just parsing a key
+ state, i, _ := scanMeasurement(buf, 0)
+ if state == tagKeyState {
+ return buf[:i-1], nil
+ }
+ return buf[:i], nil
+}
+
+// ParsePointsWithPrecision is similar to ParsePoints, but allows the
+// caller to provide a precision for time.
+//
+// NOTE: to minimize heap allocations, the returned Points will refer to subslices of buf.
+// This can have the unintended effect preventing buf from being garbage collected.
+func ParsePointsWithPrecision(buf []byte, defaultTime time.Time, precision string) ([]Point, error) {
+ points := make([]Point, 0, bytes.Count(buf, []byte{'\n'})+1)
+ var (
+ pos int
+ block []byte
+ failed []string
+ )
+ for pos < len(buf) {
+ pos, block = scanLine(buf, pos)
+ pos++
+
+ if len(block) == 0 {
+ continue
+ }
+
+ // lines which start with '#' are comments
+ start := skipWhitespace(block, 0)
+
+ // If line is all whitespace, just skip it
+ if start >= len(block) {
+ continue
+ }
+
+ if block[start] == '#' {
+ continue
+ }
+
+ // strip the newline if one is present
+ if block[len(block)-1] == '\n' {
+ block = block[:len(block)-1]
+ }
+
+ pt, err := parsePoint(block[start:], defaultTime, precision)
+ if err != nil {
+ failed = append(failed, fmt.Sprintf("unable to parse '%s': %v", string(block[start:]), err))
+ } else {
+ points = append(points, pt)
+ }
+
+ }
+ if len(failed) > 0 {
+ return points, fmt.Errorf("%s", strings.Join(failed, "\n"))
+ }
+ return points, nil
+
+}
+
+func parsePoint(buf []byte, defaultTime time.Time, precision string) (Point, error) {
+ // scan the first block which is measurement[,tag1=value1,tag2=value=2...]
+ pos, key, err := scanKey(buf, 0)
+ if err != nil {
+ return nil, err
+ }
+
+ // measurement name is required
+ if len(key) == 0 {
+ return nil, fmt.Errorf("missing measurement")
+ }
+
+ if len(key) > MaxKeyLength {
+ return nil, fmt.Errorf("max key length exceeded: %v > %v", len(key), MaxKeyLength)
+ }
+
+ // scan the second block is which is field1=value1[,field2=value2,...]
+ pos, fields, err := scanFields(buf, pos)
+ if err != nil {
+ return nil, err
+ }
+
+ // at least one field is required
+ if len(fields) == 0 {
+ return nil, fmt.Errorf("missing fields")
+ }
+
+ var maxKeyErr error
+ walkFields(fields, func(k, v []byte) bool {
+ if sz := seriesKeySize(key, k); sz > MaxKeyLength {
+ maxKeyErr = fmt.Errorf("max key length exceeded: %v > %v", sz, MaxKeyLength)
+ return false
+ }
+ return true
+ })
+
+ if maxKeyErr != nil {
+ return nil, maxKeyErr
+ }
+
+ // scan the last block which is an optional integer timestamp
+ pos, ts, err := scanTime(buf, pos)
+ if err != nil {
+ return nil, err
+ }
+
+ pt := &point{
+ key: key,
+ fields: fields,
+ ts: ts,
+ }
+
+ if len(ts) == 0 {
+ pt.time = defaultTime
+ pt.SetPrecision(precision)
+ } else {
+ ts, err := parseIntBytes(ts, 10, 64)
+ if err != nil {
+ return nil, err
+ }
+ pt.time, err = SafeCalcTime(ts, precision)
+ if err != nil {
+ return nil, err
+ }
+
+ // Determine if there are illegal non-whitespace characters after the
+ // timestamp block.
+ for pos < len(buf) {
+ if buf[pos] != ' ' {
+ return nil, ErrInvalidPoint
+ }
+ pos++
+ }
+ }
+ return pt, nil
+}
+
+// GetPrecisionMultiplier will return a multiplier for the precision specified.
+func GetPrecisionMultiplier(precision string) int64 {
+ d := time.Nanosecond
+ switch precision {
+ case "u":
+ d = time.Microsecond
+ case "ms":
+ d = time.Millisecond
+ case "s":
+ d = time.Second
+ case "m":
+ d = time.Minute
+ case "h":
+ d = time.Hour
+ }
+ return int64(d)
+}
+
+// scanKey scans buf starting at i for the measurement and tag portion of the point.
+// It returns the ending position and the byte slice of key within buf. If there
+// are tags, they will be sorted if they are not already.
+func scanKey(buf []byte, i int) (int, []byte, error) {
+ start := skipWhitespace(buf, i)
+
+ i = start
+
+ // Determines whether the tags are sort, assume they are
+ sorted := true
+
+ // indices holds the indexes within buf of the start of each tag. For example,
+ // a buf of 'cpu,host=a,region=b,zone=c' would have indices slice of [4,11,20]
+ // which indicates that the first tag starts at buf[4], seconds at buf[11], and
+ // last at buf[20]
+ indices := make([]int, 100)
+
+ // tracks how many commas we've seen so we know how many values are indices.
+ // Since indices is an arbitrarily large slice,
+ // we need to know how many values in the buffer are in use.
+ commas := 0
+
+ // First scan the Point's measurement.
+ state, i, err := scanMeasurement(buf, i)
+ if err != nil {
+ return i, buf[start:i], err
+ }
+
+ // Optionally scan tags if needed.
+ if state == tagKeyState {
+ i, commas, indices, err = scanTags(buf, i, indices)
+ if err != nil {
+ return i, buf[start:i], err
+ }
+ }
+
+ // Now we know where the key region is within buf, and the location of tags, we
+ // need to determine if duplicate tags exist and if the tags are sorted. This iterates
+ // over the list comparing each tag in the sequence with each other.
+ for j := 0; j < commas-1; j++ {
+ // get the left and right tags
+ _, left := scanTo(buf[indices[j]:indices[j+1]-1], 0, '=')
+ _, right := scanTo(buf[indices[j+1]:indices[j+2]-1], 0, '=')
+
+ // If left is greater than right, the tags are not sorted. We do not have to
+ // continue because the short path no longer works.
+ // If the tags are equal, then there are duplicate tags, and we should abort.
+ // If the tags are not sorted, this pass may not find duplicate tags and we
+ // need to do a more exhaustive search later.
+ if cmp := bytes.Compare(left, right); cmp > 0 {
+ sorted = false
+ break
+ } else if cmp == 0 {
+ return i, buf[start:i], fmt.Errorf("duplicate tags")
+ }
+ }
+
+ // If the tags are not sorted, then sort them. This sort is inline and
+ // uses the tag indices we created earlier. The actual buffer is not sorted, the
+ // indices are using the buffer for value comparison. After the indices are sorted,
+ // the buffer is reconstructed from the sorted indices.
+ if !sorted && commas > 0 {
+ // Get the measurement name for later
+ measurement := buf[start : indices[0]-1]
+
+ // Sort the indices
+ indices := indices[:commas]
+ insertionSort(0, commas, buf, indices)
+
+ // Create a new key using the measurement and sorted indices
+ b := make([]byte, len(buf[start:i]))
+ pos := copy(b, measurement)
+ for _, i := range indices {
+ b[pos] = ','
+ pos++
+ _, v := scanToSpaceOr(buf, i, ',')
+ pos += copy(b[pos:], v)
+ }
+
+ // Check again for duplicate tags now that the tags are sorted.
+ for j := 0; j < commas-1; j++ {
+ // get the left and right tags
+ _, left := scanTo(buf[indices[j]:], 0, '=')
+ _, right := scanTo(buf[indices[j+1]:], 0, '=')
+
+ // If the tags are equal, then there are duplicate tags, and we should abort.
+ // If the tags are not sorted, this pass may not find duplicate tags and we
+ // need to do a more exhaustive search later.
+ if bytes.Equal(left, right) {
+ return i, b, fmt.Errorf("duplicate tags")
+ }
+ }
+
+ return i, b, nil
+ }
+
+ return i, buf[start:i], nil
+}
+
+// The following constants allow us to specify which state to move to
+// next, when scanning sections of a Point.
+const (
+ tagKeyState = iota
+ tagValueState
+ fieldsState
+)
+
+// scanMeasurement examines the measurement part of a Point, returning
+// the next state to move to, and the current location in the buffer.
+func scanMeasurement(buf []byte, i int) (int, int, error) {
+ // Check first byte of measurement, anything except a comma is fine.
+ // It can't be a space, since whitespace is stripped prior to this
+ // function call.
+ if i >= len(buf) || buf[i] == ',' {
+ return -1, i, fmt.Errorf("missing measurement")
+ }
+
+ for {
+ i++
+ if i >= len(buf) {
+ // cpu
+ return -1, i, fmt.Errorf("missing fields")
+ }
+
+ if buf[i-1] == '\\' {
+ // Skip character (it's escaped).
+ continue
+ }
+
+ // Unescaped comma; move onto scanning the tags.
+ if buf[i] == ',' {
+ return tagKeyState, i + 1, nil
+ }
+
+ // Unescaped space; move onto scanning the fields.
+ if buf[i] == ' ' {
+ // cpu value=1.0
+ return fieldsState, i, nil
+ }
+ }
+}
+
+// scanTags examines all the tags in a Point, keeping track of and
+// returning the updated indices slice, number of commas and location
+// in buf where to start examining the Point fields.
+func scanTags(buf []byte, i int, indices []int) (int, int, []int, error) {
+ var (
+ err error
+ commas int
+ state = tagKeyState
+ )
+
+ for {
+ switch state {
+ case tagKeyState:
+ // Grow our indices slice if we have too many tags.
+ if commas >= len(indices) {
+ newIndics := make([]int, cap(indices)*2)
+ copy(newIndics, indices)
+ indices = newIndics
+ }
+ indices[commas] = i
+ commas++
+
+ i, err = scanTagsKey(buf, i)
+ state = tagValueState // tag value always follows a tag key
+ case tagValueState:
+ state, i, err = scanTagsValue(buf, i)
+ case fieldsState:
+ indices[commas] = i + 1
+ return i, commas, indices, nil
+ }
+
+ if err != nil {
+ return i, commas, indices, err
+ }
+ }
+}
+
+// scanTagsKey scans each character in a tag key.
+func scanTagsKey(buf []byte, i int) (int, error) {
+ // First character of the key.
+ if i >= len(buf) || buf[i] == ' ' || buf[i] == ',' || buf[i] == '=' {
+ // cpu,{'', ' ', ',', '='}
+ return i, fmt.Errorf("missing tag key")
+ }
+
+ // Examine each character in the tag key until we hit an unescaped
+ // equals (the tag value), or we hit an error (i.e., unescaped
+ // space or comma).
+ for {
+ i++
+
+ // Either we reached the end of the buffer or we hit an
+ // unescaped comma or space.
+ if i >= len(buf) ||
+ ((buf[i] == ' ' || buf[i] == ',') && buf[i-1] != '\\') {
+ // cpu,tag{'', ' ', ','}
+ return i, fmt.Errorf("missing tag value")
+ }
+
+ if buf[i] == '=' && buf[i-1] != '\\' {
+ // cpu,tag=
+ return i + 1, nil
+ }
+ }
+}
+
+// scanTagsValue scans each character in a tag value.
+func scanTagsValue(buf []byte, i int) (int, int, error) {
+ // Tag value cannot be empty.
+ if i >= len(buf) || buf[i] == ',' || buf[i] == ' ' {
+ // cpu,tag={',', ' '}
+ return -1, i, fmt.Errorf("missing tag value")
+ }
+
+ // Examine each character in the tag value until we hit an unescaped
+ // comma (move onto next tag key), an unescaped space (move onto
+ // fields), or we error out.
+ for {
+ i++
+ if i >= len(buf) {
+ // cpu,tag=value
+ return -1, i, fmt.Errorf("missing fields")
+ }
+
+ // An unescaped equals sign is an invalid tag value.
+ if buf[i] == '=' && buf[i-1] != '\\' {
+ // cpu,tag={'=', 'fo=o'}
+ return -1, i, fmt.Errorf("invalid tag format")
+ }
+
+ if buf[i] == ',' && buf[i-1] != '\\' {
+ // cpu,tag=foo,
+ return tagKeyState, i + 1, nil
+ }
+
+ // cpu,tag=foo value=1.0
+ // cpu, tag=foo\= value=1.0
+ if buf[i] == ' ' && buf[i-1] != '\\' {
+ return fieldsState, i, nil
+ }
+ }
+}
+
+func insertionSort(l, r int, buf []byte, indices []int) {
+ for i := l + 1; i < r; i++ {
+ for j := i; j > l && less(buf, indices, j, j-1); j-- {
+ indices[j], indices[j-1] = indices[j-1], indices[j]
+ }
+ }
+}
+
+func less(buf []byte, indices []int, i, j int) bool {
+ // This grabs the tag names for i & j, it ignores the values
+ _, a := scanTo(buf, indices[i], '=')
+ _, b := scanTo(buf, indices[j], '=')
+ return bytes.Compare(a, b) < 0
+}
+
+// scanFields scans buf, starting at i for the fields section of a point. It returns
+// the ending position and the byte slice of the fields within buf.
+func scanFields(buf []byte, i int) (int, []byte, error) {
+ start := skipWhitespace(buf, i)
+ i = start
+ quoted := false
+
+ // tracks how many '=' we've seen
+ equals := 0
+
+ // tracks how many commas we've seen
+ commas := 0
+
+ for {
+ // reached the end of buf?
+ if i >= len(buf) {
+ break
+ }
+
+ // escaped characters?
+ if buf[i] == '\\' && i+1 < len(buf) {
+ i += 2
+ continue
+ }
+
+ // If the value is quoted, scan until we get to the end quote
+ // Only quote values in the field value since quotes are not significant
+ // in the field key
+ if buf[i] == '"' && equals > commas {
+ quoted = !quoted
+ i++
+ continue
+ }
+
+ // If we see an =, ensure that there is at least on char before and after it
+ if buf[i] == '=' && !quoted {
+ equals++
+
+ // check for "... =123" but allow "a\ =123"
+ if buf[i-1] == ' ' && buf[i-2] != '\\' {
+ return i, buf[start:i], fmt.Errorf("missing field key")
+ }
+
+ // check for "...a=123,=456" but allow "a=123,a\,=456"
+ if buf[i-1] == ',' && buf[i-2] != '\\' {
+ return i, buf[start:i], fmt.Errorf("missing field key")
+ }
+
+ // check for "... value="
+ if i+1 >= len(buf) {
+ return i, buf[start:i], fmt.Errorf("missing field value")
+ }
+
+ // check for "... value=,value2=..."
+ if buf[i+1] == ',' || buf[i+1] == ' ' {
+ return i, buf[start:i], fmt.Errorf("missing field value")
+ }
+
+ if isNumeric(buf[i+1]) || buf[i+1] == '-' || buf[i+1] == 'N' || buf[i+1] == 'n' {
+ var err error
+ i, err = scanNumber(buf, i+1)
+ if err != nil {
+ return i, buf[start:i], err
+ }
+ continue
+ }
+ // If next byte is not a double-quote, the value must be a boolean
+ if buf[i+1] != '"' {
+ var err error
+ i, _, err = scanBoolean(buf, i+1)
+ if err != nil {
+ return i, buf[start:i], err
+ }
+ continue
+ }
+ }
+
+ if buf[i] == ',' && !quoted {
+ commas++
+ }
+
+ // reached end of block?
+ if buf[i] == ' ' && !quoted {
+ break
+ }
+ i++
+ }
+
+ if quoted {
+ return i, buf[start:i], fmt.Errorf("unbalanced quotes")
+ }
+
+ // check that all field sections had key and values (e.g. prevent "a=1,b"
+ if equals == 0 || commas != equals-1 {
+ return i, buf[start:i], fmt.Errorf("invalid field format")
+ }
+
+ return i, buf[start:i], nil
+}
+
+// scanTime scans buf, starting at i for the time section of a point. It
+// returns the ending position and the byte slice of the timestamp within buf
+// and and error if the timestamp is not in the correct numeric format.
+func scanTime(buf []byte, i int) (int, []byte, error) {
+ start := skipWhitespace(buf, i)
+ i = start
+
+ for {
+ // reached the end of buf?
+ if i >= len(buf) {
+ break
+ }
+
+ // Reached end of block or trailing whitespace?
+ if buf[i] == '\n' || buf[i] == ' ' {
+ break
+ }
+
+ // Handle negative timestamps
+ if i == start && buf[i] == '-' {
+ i++
+ continue
+ }
+
+ // Timestamps should be integers, make sure they are so we don't need
+ // to actually parse the timestamp until needed.
+ if buf[i] < '0' || buf[i] > '9' {
+ return i, buf[start:i], fmt.Errorf("bad timestamp")
+ }
+ i++
+ }
+ return i, buf[start:i], nil
+}
+
+func isNumeric(b byte) bool {
+ return (b >= '0' && b <= '9') || b == '.'
+}
+
+// scanNumber returns the end position within buf, start at i after
+// scanning over buf for an integer, or float. It returns an
+// error if a invalid number is scanned.
+func scanNumber(buf []byte, i int) (int, error) {
+ start := i
+ var isInt, isUnsigned bool
+
+ // Is negative number?
+ if i < len(buf) && buf[i] == '-' {
+ i++
+ // There must be more characters now, as just '-' is illegal.
+ if i == len(buf) {
+ return i, ErrInvalidNumber
+ }
+ }
+
+ // how many decimal points we've see
+ decimal := false
+
+ // indicates the number is float in scientific notation
+ scientific := false
+
+ for {
+ if i >= len(buf) {
+ break
+ }
+
+ if buf[i] == ',' || buf[i] == ' ' {
+ break
+ }
+
+ if buf[i] == 'i' && i > start && !(isInt || isUnsigned) {
+ isInt = true
+ i++
+ continue
+ } else if buf[i] == 'u' && i > start && !(isInt || isUnsigned) {
+ isUnsigned = true
+ i++
+ continue
+ }
+
+ if buf[i] == '.' {
+ // Can't have more than 1 decimal (e.g. 1.1.1 should fail)
+ if decimal {
+ return i, ErrInvalidNumber
+ }
+ decimal = true
+ }
+
+ // `e` is valid for floats but not as the first char
+ if i > start && (buf[i] == 'e' || buf[i] == 'E') {
+ scientific = true
+ i++
+ continue
+ }
+
+ // + and - are only valid at this point if they follow an e (scientific notation)
+ if (buf[i] == '+' || buf[i] == '-') && (buf[i-1] == 'e' || buf[i-1] == 'E') {
+ i++
+ continue
+ }
+
+ // NaN is an unsupported value
+ if i+2 < len(buf) && (buf[i] == 'N' || buf[i] == 'n') {
+ return i, ErrInvalidNumber
+ }
+
+ if !isNumeric(buf[i]) {
+ return i, ErrInvalidNumber
+ }
+ i++
+ }
+
+ if (isInt || isUnsigned) && (decimal || scientific) {
+ return i, ErrInvalidNumber
+ }
+
+ numericDigits := i - start
+ if isInt {
+ numericDigits--
+ }
+ if decimal {
+ numericDigits--
+ }
+ if buf[start] == '-' {
+ numericDigits--
+ }
+
+ if numericDigits == 0 {
+ return i, ErrInvalidNumber
+ }
+
+ // It's more common that numbers will be within min/max range for their type but we need to prevent
+ // out or range numbers from being parsed successfully. This uses some simple heuristics to decide
+ // if we should parse the number to the actual type. It does not do it all the time because it incurs
+ // extra allocations and we end up converting the type again when writing points to disk.
+ if isInt {
+ // Make sure the last char is an 'i' for integers (e.g. 9i10 is not valid)
+ if buf[i-1] != 'i' {
+ return i, ErrInvalidNumber
+ }
+ // Parse the int to check bounds the number of digits could be larger than the max range
+ // We subtract 1 from the index to remove the `i` from our tests
+ if len(buf[start:i-1]) >= maxInt64Digits || len(buf[start:i-1]) >= minInt64Digits {
+ if _, err := parseIntBytes(buf[start:i-1], 10, 64); err != nil {
+ return i, fmt.Errorf("unable to parse integer %s: %s", buf[start:i-1], err)
+ }
+ }
+ } else if isUnsigned {
+ // Return an error if uint64 support has not been enabled.
+ if !enableUint64Support {
+ return i, ErrInvalidNumber
+ }
+ // Make sure the last char is a 'u' for unsigned
+ if buf[i-1] != 'u' {
+ return i, ErrInvalidNumber
+ }
+ // Make sure the first char is not a '-' for unsigned
+ if buf[start] == '-' {
+ return i, ErrInvalidNumber
+ }
+ // Parse the uint to check bounds the number of digits could be larger than the max range
+ // We subtract 1 from the index to remove the `u` from our tests
+ if len(buf[start:i-1]) >= maxUint64Digits {
+ if _, err := parseUintBytes(buf[start:i-1], 10, 64); err != nil {
+ return i, fmt.Errorf("unable to parse unsigned %s: %s", buf[start:i-1], err)
+ }
+ }
+ } else {
+ // Parse the float to check bounds if it's scientific or the number of digits could be larger than the max range
+ if scientific || len(buf[start:i]) >= maxFloat64Digits || len(buf[start:i]) >= minFloat64Digits {
+ if _, err := parseFloatBytes(buf[start:i], 10); err != nil {
+ return i, fmt.Errorf("invalid float")
+ }
+ }
+ }
+
+ return i, nil
+}
+
+// scanBoolean returns the end position within buf, start at i after
+// scanning over buf for boolean. Valid values for a boolean are
+// t, T, true, TRUE, f, F, false, FALSE. It returns an error if a invalid boolean
+// is scanned.
+func scanBoolean(buf []byte, i int) (int, []byte, error) {
+ start := i
+
+ if i < len(buf) && (buf[i] != 't' && buf[i] != 'f' && buf[i] != 'T' && buf[i] != 'F') {
+ return i, buf[start:i], fmt.Errorf("invalid boolean")
+ }
+
+ i++
+ for {
+ if i >= len(buf) {
+ break
+ }
+
+ if buf[i] == ',' || buf[i] == ' ' {
+ break
+ }
+ i++
+ }
+
+ // Single char bool (t, T, f, F) is ok
+ if i-start == 1 {
+ return i, buf[start:i], nil
+ }
+
+ // length must be 4 for true or TRUE
+ if (buf[start] == 't' || buf[start] == 'T') && i-start != 4 {
+ return i, buf[start:i], fmt.Errorf("invalid boolean")
+ }
+
+ // length must be 5 for false or FALSE
+ if (buf[start] == 'f' || buf[start] == 'F') && i-start != 5 {
+ return i, buf[start:i], fmt.Errorf("invalid boolean")
+ }
+
+ // Otherwise
+ valid := false
+ switch buf[start] {
+ case 't':
+ valid = bytes.Equal(buf[start:i], []byte("true"))
+ case 'f':
+ valid = bytes.Equal(buf[start:i], []byte("false"))
+ case 'T':
+ valid = bytes.Equal(buf[start:i], []byte("TRUE")) || bytes.Equal(buf[start:i], []byte("True"))
+ case 'F':
+ valid = bytes.Equal(buf[start:i], []byte("FALSE")) || bytes.Equal(buf[start:i], []byte("False"))
+ }
+
+ if !valid {
+ return i, buf[start:i], fmt.Errorf("invalid boolean")
+ }
+
+ return i, buf[start:i], nil
+
+}
+
+// skipWhitespace returns the end position within buf, starting at i after
+// scanning over spaces in tags.
+func skipWhitespace(buf []byte, i int) int {
+ for i < len(buf) {
+ if buf[i] != ' ' && buf[i] != '\t' && buf[i] != 0 {
+ break
+ }
+ i++
+ }
+ return i
+}
+
+// scanLine returns the end position in buf and the next line found within
+// buf.
+func scanLine(buf []byte, i int) (int, []byte) {
+ start := i
+ quoted := false
+ fields := false
+
+ // tracks how many '=' and commas we've seen
+ // this duplicates some of the functionality in scanFields
+ equals := 0
+ commas := 0
+ for {
+ // reached the end of buf?
+ if i >= len(buf) {
+ break
+ }
+
+ // skip past escaped characters
+ if buf[i] == '\\' && i+2 < len(buf) {
+ i += 2
+ continue
+ }
+
+ if buf[i] == ' ' {
+ fields = true
+ }
+
+ // If we see a double quote, makes sure it is not escaped
+ if fields {
+ if !quoted && buf[i] == '=' {
+ i++
+ equals++
+ continue
+ } else if !quoted && buf[i] == ',' {
+ i++
+ commas++
+ continue
+ } else if buf[i] == '"' && equals > commas {
+ i++
+ quoted = !quoted
+ continue
+ }
+ }
+
+ if buf[i] == '\n' && !quoted {
+ break
+ }
+
+ i++
+ }
+
+ return i, buf[start:i]
+}
+
+// scanTo returns the end position in buf and the next consecutive block
+// of bytes, starting from i and ending with stop byte, where stop byte
+// has not been escaped.
+//
+// If there are leading spaces, they are skipped.
+func scanTo(buf []byte, i int, stop byte) (int, []byte) {
+ start := i
+ for {
+ // reached the end of buf?
+ if i >= len(buf) {
+ break
+ }
+
+ // Reached unescaped stop value?
+ if buf[i] == stop && (i == 0 || buf[i-1] != '\\') {
+ break
+ }
+ i++
+ }
+
+ return i, buf[start:i]
+}
+
+// scanTo returns the end position in buf and the next consecutive block
+// of bytes, starting from i and ending with stop byte. If there are leading
+// spaces, they are skipped.
+func scanToSpaceOr(buf []byte, i int, stop byte) (int, []byte) {
+ start := i
+ if buf[i] == stop || buf[i] == ' ' {
+ return i, buf[start:i]
+ }
+
+ for {
+ i++
+ if buf[i-1] == '\\' {
+ continue
+ }
+
+ // reached the end of buf?
+ if i >= len(buf) {
+ return i, buf[start:i]
+ }
+
+ // reached end of block?
+ if buf[i] == stop || buf[i] == ' ' {
+ return i, buf[start:i]
+ }
+ }
+}
+
+func scanTagValue(buf []byte, i int) (int, []byte) {
+ start := i
+ for {
+ if i >= len(buf) {
+ break
+ }
+
+ if buf[i] == ',' && buf[i-1] != '\\' {
+ break
+ }
+ i++
+ }
+ if i > len(buf) {
+ return i, nil
+ }
+ return i, buf[start:i]
+}
+
+func scanFieldValue(buf []byte, i int) (int, []byte) {
+ start := i
+ quoted := false
+ for i < len(buf) {
+ // Only escape char for a field value is a double-quote and backslash
+ if buf[i] == '\\' && i+1 < len(buf) && (buf[i+1] == '"' || buf[i+1] == '\\') {
+ i += 2
+ continue
+ }
+
+ // Quoted value? (e.g. string)
+ if buf[i] == '"' {
+ i++
+ quoted = !quoted
+ continue
+ }
+
+ if buf[i] == ',' && !quoted {
+ break
+ }
+ i++
+ }
+ return i, buf[start:i]
+}
+
+func EscapeMeasurement(in []byte) []byte {
+ for b, esc := range measurementEscapeCodes {
+ in = bytes.Replace(in, []byte{b}, esc, -1)
+ }
+ return in
+}
+
+func unescapeMeasurement(in []byte) []byte {
+ for b, esc := range measurementEscapeCodes {
+ in = bytes.Replace(in, esc, []byte{b}, -1)
+ }
+ return in
+}
+
+func escapeTag(in []byte) []byte {
+ for b, esc := range tagEscapeCodes {
+ if bytes.IndexByte(in, b) != -1 {
+ in = bytes.Replace(in, []byte{b}, esc, -1)
+ }
+ }
+ return in
+}
+
+func unescapeTag(in []byte) []byte {
+ if bytes.IndexByte(in, '\\') == -1 {
+ return in
+ }
+
+ for b, esc := range tagEscapeCodes {
+ if bytes.IndexByte(in, b) != -1 {
+ in = bytes.Replace(in, esc, []byte{b}, -1)
+ }
+ }
+ return in
+}
+
+// escapeStringFieldReplacer replaces double quotes and backslashes
+// with the same character preceded by a backslash.
+// As of Go 1.7 this benchmarked better in allocations and CPU time
+// compared to iterating through a string byte-by-byte and appending to a new byte slice,
+// calling strings.Replace twice, and better than (*Regex).ReplaceAllString.
+var escapeStringFieldReplacer = strings.NewReplacer(`"`, `\"`, `\`, `\\`)
+
+// EscapeStringField returns a copy of in with any double quotes or
+// backslashes with escaped values.
+func EscapeStringField(in string) string {
+ return escapeStringFieldReplacer.Replace(in)
+}
+
+// unescapeStringField returns a copy of in with any escaped double-quotes
+// or backslashes unescaped.
+func unescapeStringField(in string) string {
+ if strings.IndexByte(in, '\\') == -1 {
+ return in
+ }
+
+ var out []byte
+ i := 0
+ for {
+ if i >= len(in) {
+ break
+ }
+ // unescape backslashes
+ if in[i] == '\\' && i+1 < len(in) && in[i+1] == '\\' {
+ out = append(out, '\\')
+ i += 2
+ continue
+ }
+ // unescape double-quotes
+ if in[i] == '\\' && i+1 < len(in) && in[i+1] == '"' {
+ out = append(out, '"')
+ i += 2
+ continue
+ }
+ out = append(out, in[i])
+ i++
+
+ }
+ return string(out)
+}
+
+// NewPoint returns a new point with the given measurement name, tags, fields and timestamp. If
+// an unsupported field value (NaN) or out of range time is passed, this function returns an error.
+func NewPoint(name string, tags Tags, fields Fields, t time.Time) (Point, error) {
+ key, err := pointKey(name, tags, fields, t)
+ if err != nil {
+ return nil, err
+ }
+
+ return &point{
+ key: key,
+ time: t,
+ fields: fields.MarshalBinary(),
+ }, nil
+}
+
+// pointKey checks some basic requirements for valid points, and returns the
+// key, along with an possible error.
+func pointKey(measurement string, tags Tags, fields Fields, t time.Time) ([]byte, error) {
+ if len(fields) == 0 {
+ return nil, ErrPointMustHaveAField
+ }
+
+ if !t.IsZero() {
+ if err := CheckTime(t); err != nil {
+ return nil, err
+ }
+ }
+
+ for key, value := range fields {
+ switch value := value.(type) {
+ case float64:
+ // Ensure the caller validates and handles invalid field values
+ if math.IsNaN(value) {
+ return nil, fmt.Errorf("NaN is an unsupported value for field %s", key)
+ }
+ case float32:
+ // Ensure the caller validates and handles invalid field values
+ if math.IsNaN(float64(value)) {
+ return nil, fmt.Errorf("NaN is an unsupported value for field %s", key)
+ }
+ }
+ if len(key) == 0 {
+ return nil, fmt.Errorf("all fields must have non-empty names")
+ }
+ }
+
+ key := MakeKey([]byte(measurement), tags)
+ for field := range fields {
+ sz := seriesKeySize(key, []byte(field))
+ if sz > MaxKeyLength {
+ return nil, fmt.Errorf("max key length exceeded: %v > %v", sz, MaxKeyLength)
+ }
+ }
+
+ return key, nil
+}
+
+func seriesKeySize(key, field []byte) int {
+ // 4 is the length of the tsm1.fieldKeySeparator constant. It's inlined here to avoid a circular
+ // dependency.
+ return len(key) + 4 + len(field)
+}
+
+// NewPointFromBytes returns a new Point from a marshalled Point.
+func NewPointFromBytes(b []byte) (Point, error) {
+ p := &point{}
+ if err := p.UnmarshalBinary(b); err != nil {
+ return nil, err
+ }
+
+ // This does some basic validation to ensure there are fields and they
+ // can be unmarshalled as well.
+ iter := p.FieldIterator()
+ var hasField bool
+ for iter.Next() {
+ if len(iter.FieldKey()) == 0 {
+ continue
+ }
+ hasField = true
+ switch iter.Type() {
+ case Float:
+ _, err := iter.FloatValue()
+ if err != nil {
+ return nil, fmt.Errorf("unable to unmarshal field %s: %s", string(iter.FieldKey()), err)
+ }
+ case Integer:
+ _, err := iter.IntegerValue()
+ if err != nil {
+ return nil, fmt.Errorf("unable to unmarshal field %s: %s", string(iter.FieldKey()), err)
+ }
+ case Unsigned:
+ _, err := iter.UnsignedValue()
+ if err != nil {
+ return nil, fmt.Errorf("unable to unmarshal field %s: %s", string(iter.FieldKey()), err)
+ }
+ case String:
+ // Skip since this won't return an error
+ case Boolean:
+ _, err := iter.BooleanValue()
+ if err != nil {
+ return nil, fmt.Errorf("unable to unmarshal field %s: %s", string(iter.FieldKey()), err)
+ }
+ }
+ }
+
+ if !hasField {
+ return nil, ErrPointMustHaveAField
+ }
+
+ return p, nil
+}
+
+// MustNewPoint returns a new point with the given measurement name, tags, fields and timestamp. If
+// an unsupported field value (NaN) is passed, this function panics.
+func MustNewPoint(name string, tags Tags, fields Fields, time time.Time) Point {
+ pt, err := NewPoint(name, tags, fields, time)
+ if err != nil {
+ panic(err.Error())
+ }
+ return pt
+}
+
+// Key returns the key (measurement joined with tags) of the point.
+func (p *point) Key() []byte {
+ return p.key
+}
+
+func (p *point) name() []byte {
+ _, name := scanTo(p.key, 0, ',')
+ return name
+}
+
+func (p *point) Name() []byte {
+ return escape.Unescape(p.name())
+}
+
+// SetName updates the measurement name for the point.
+func (p *point) SetName(name string) {
+ p.cachedName = ""
+ p.key = MakeKey([]byte(name), p.Tags())
+}
+
+// Time return the timestamp for the point.
+func (p *point) Time() time.Time {
+ return p.time
+}
+
+// SetTime updates the timestamp for the point.
+func (p *point) SetTime(t time.Time) {
+ p.time = t
+}
+
+// Round will round the timestamp of the point to the given duration.
+func (p *point) Round(d time.Duration) {
+ p.time = p.time.Round(d)
+}
+
+// Tags returns the tag set for the point.
+func (p *point) Tags() Tags {
+ if p.cachedTags != nil {
+ return p.cachedTags
+ }
+ p.cachedTags = parseTags(p.key)
+ return p.cachedTags
+}
+
+func (p *point) HasTag(tag []byte) bool {
+ if len(p.key) == 0 {
+ return false
+ }
+
+ var exists bool
+ walkTags(p.key, func(key, value []byte) bool {
+ if bytes.Equal(tag, key) {
+ exists = true
+ return false
+ }
+ return true
+ })
+
+ return exists
+}
+
+func walkTags(buf []byte, fn func(key, value []byte) bool) {
+ if len(buf) == 0 {
+ return
+ }
+
+ pos, name := scanTo(buf, 0, ',')
+
+ // it's an empty key, so there are no tags
+ if len(name) == 0 {
+ return
+ }
+
+ hasEscape := bytes.IndexByte(buf, '\\') != -1
+ i := pos + 1
+ var key, value []byte
+ for {
+ if i >= len(buf) {
+ break
+ }
+ i, key = scanTo(buf, i, '=')
+ i, value = scanTagValue(buf, i+1)
+
+ if len(value) == 0 {
+ continue
+ }
+
+ if hasEscape {
+ if !fn(unescapeTag(key), unescapeTag(value)) {
+ return
+ }
+ } else {
+ if !fn(key, value) {
+ return
+ }
+ }
+
+ i++
+ }
+}
+
+// walkFields walks each field key and value via fn. If fn returns false, the iteration
+// is stopped. The values are the raw byte slices and not the converted types.
+func walkFields(buf []byte, fn func(key, value []byte) bool) {
+ var i int
+ var key, val []byte
+ for len(buf) > 0 {
+ i, key = scanTo(buf, 0, '=')
+ buf = buf[i+1:]
+ i, val = scanFieldValue(buf, 0)
+ buf = buf[i:]
+ if !fn(key, val) {
+ break
+ }
+
+ // slice off comma
+ if len(buf) > 0 {
+ buf = buf[1:]
+ }
+ }
+}
+
+func parseTags(buf []byte) Tags {
+ if len(buf) == 0 {
+ return nil
+ }
+
+ tags := make(Tags, bytes.Count(buf, []byte(",")))
+ p := 0
+ walkTags(buf, func(key, value []byte) bool {
+ tags[p].Key = key
+ tags[p].Value = value
+ p++
+ return true
+ })
+ return tags
+}
+
+// MakeKey creates a key for a set of tags.
+func MakeKey(name []byte, tags Tags) []byte {
+ // unescape the name and then re-escape it to avoid double escaping.
+ // The key should always be stored in escaped form.
+ return append(EscapeMeasurement(unescapeMeasurement(name)), tags.HashKey()...)
+}
+
+// SetTags replaces the tags for the point.
+func (p *point) SetTags(tags Tags) {
+ p.key = MakeKey(p.Name(), tags)
+ p.cachedTags = tags
+}
+
+// AddTag adds or replaces a tag value for a point.
+func (p *point) AddTag(key, value string) {
+ tags := p.Tags()
+ tags = append(tags, Tag{Key: []byte(key), Value: []byte(value)})
+ sort.Sort(tags)
+ p.cachedTags = tags
+ p.key = MakeKey(p.Name(), tags)
+}
+
+// Fields returns the fields for the point.
+func (p *point) Fields() (Fields, error) {
+ if p.cachedFields != nil {
+ return p.cachedFields, nil
+ }
+ cf, err := p.unmarshalBinary()
+ if err != nil {
+ return nil, err
+ }
+ p.cachedFields = cf
+ return p.cachedFields, nil
+}
+
+// SetPrecision will round a time to the specified precision.
+func (p *point) SetPrecision(precision string) {
+ switch precision {
+ case "n":
+ case "u":
+ p.SetTime(p.Time().Truncate(time.Microsecond))
+ case "ms":
+ p.SetTime(p.Time().Truncate(time.Millisecond))
+ case "s":
+ p.SetTime(p.Time().Truncate(time.Second))
+ case "m":
+ p.SetTime(p.Time().Truncate(time.Minute))
+ case "h":
+ p.SetTime(p.Time().Truncate(time.Hour))
+ }
+}
+
+// String returns the string representation of the point.
+func (p *point) String() string {
+ if p.Time().IsZero() {
+ return string(p.Key()) + " " + string(p.fields)
+ }
+ return string(p.Key()) + " " + string(p.fields) + " " + strconv.FormatInt(p.UnixNano(), 10)
+}
+
+// AppendString appends the string representation of the point to buf.
+func (p *point) AppendString(buf []byte) []byte {
+ buf = append(buf, p.key...)
+ buf = append(buf, ' ')
+ buf = append(buf, p.fields...)
+
+ if !p.time.IsZero() {
+ buf = append(buf, ' ')
+ buf = strconv.AppendInt(buf, p.UnixNano(), 10)
+ }
+
+ return buf
+}
+
+// StringSize returns the length of the string that would be returned by String().
+func (p *point) StringSize() int {
+ size := len(p.key) + len(p.fields) + 1
+
+ if !p.time.IsZero() {
+ digits := 1 // even "0" has one digit
+ t := p.UnixNano()
+ if t < 0 {
+ // account for negative sign, then negate
+ digits++
+ t = -t
+ }
+ for t > 9 { // already accounted for one digit
+ digits++
+ t /= 10
+ }
+ size += digits + 1 // digits and a space
+ }
+
+ return size
+}
+
+// MarshalBinary returns a binary representation of the point.
+func (p *point) MarshalBinary() ([]byte, error) {
+ if len(p.fields) == 0 {
+ return nil, ErrPointMustHaveAField
+ }
+
+ tb, err := p.time.MarshalBinary()
+ if err != nil {
+ return nil, err
+ }
+
+ b := make([]byte, 8+len(p.key)+len(p.fields)+len(tb))
+ i := 0
+
+ binary.BigEndian.PutUint32(b[i:], uint32(len(p.key)))
+ i += 4
+
+ i += copy(b[i:], p.key)
+
+ binary.BigEndian.PutUint32(b[i:i+4], uint32(len(p.fields)))
+ i += 4
+
+ i += copy(b[i:], p.fields)
+
+ copy(b[i:], tb)
+ return b, nil
+}
+
+// UnmarshalBinary decodes a binary representation of the point into a point struct.
+func (p *point) UnmarshalBinary(b []byte) error {
+ var n int
+
+ // Read key length.
+ if len(b) < 4 {
+ return io.ErrShortBuffer
+ }
+ n, b = int(binary.BigEndian.Uint32(b[:4])), b[4:]
+
+ // Read key.
+ if len(b) < n {
+ return io.ErrShortBuffer
+ }
+ p.key, b = b[:n], b[n:]
+
+ // Read fields length.
+ if len(b) < 4 {
+ return io.ErrShortBuffer
+ }
+ n, b = int(binary.BigEndian.Uint32(b[:4])), b[4:]
+
+ // Read fields.
+ if len(b) < n {
+ return io.ErrShortBuffer
+ }
+ p.fields, b = b[:n], b[n:]
+
+ // Read timestamp.
+ if err := p.time.UnmarshalBinary(b); err != nil {
+ return err
+ }
+ return nil
+}
+
+// PrecisionString returns a string representation of the point. If there
+// is a timestamp associated with the point then it will be specified in the
+// given unit.
+func (p *point) PrecisionString(precision string) string {
+ if p.Time().IsZero() {
+ return fmt.Sprintf("%s %s", p.Key(), string(p.fields))
+ }
+ return fmt.Sprintf("%s %s %d", p.Key(), string(p.fields),
+ p.UnixNano()/GetPrecisionMultiplier(precision))
+}
+
+// RoundedString returns a string representation of the point. If there
+// is a timestamp associated with the point, then it will be rounded to the
+// given duration.
+func (p *point) RoundedString(d time.Duration) string {
+ if p.Time().IsZero() {
+ return fmt.Sprintf("%s %s", p.Key(), string(p.fields))
+ }
+ return fmt.Sprintf("%s %s %d", p.Key(), string(p.fields),
+ p.time.Round(d).UnixNano())
+}
+
+func (p *point) unmarshalBinary() (Fields, error) {
+ iter := p.FieldIterator()
+ fields := make(Fields, 8)
+ for iter.Next() {
+ if len(iter.FieldKey()) == 0 {
+ continue
+ }
+ switch iter.Type() {
+ case Float:
+ v, err := iter.FloatValue()
+ if err != nil {
+ return nil, fmt.Errorf("unable to unmarshal field %s: %s", string(iter.FieldKey()), err)
+ }
+ fields[string(iter.FieldKey())] = v
+ case Integer:
+ v, err := iter.IntegerValue()
+ if err != nil {
+ return nil, fmt.Errorf("unable to unmarshal field %s: %s", string(iter.FieldKey()), err)
+ }
+ fields[string(iter.FieldKey())] = v
+ case Unsigned:
+ v, err := iter.UnsignedValue()
+ if err != nil {
+ return nil, fmt.Errorf("unable to unmarshal field %s: %s", string(iter.FieldKey()), err)
+ }
+ fields[string(iter.FieldKey())] = v
+ case String:
+ fields[string(iter.FieldKey())] = iter.StringValue()
+ case Boolean:
+ v, err := iter.BooleanValue()
+ if err != nil {
+ return nil, fmt.Errorf("unable to unmarshal field %s: %s", string(iter.FieldKey()), err)
+ }
+ fields[string(iter.FieldKey())] = v
+ }
+ }
+ return fields, nil
+}
+
+// HashID returns a non-cryptographic checksum of the point's key.
+func (p *point) HashID() uint64 {
+ h := NewInlineFNV64a()
+ h.Write(p.key)
+ sum := h.Sum64()
+ return sum
+}
+
+// UnixNano returns the timestamp of the point as nanoseconds since Unix epoch.
+func (p *point) UnixNano() int64 {
+ return p.Time().UnixNano()
+}
+
+// Split will attempt to return multiple points with the same timestamp whose
+// string representations are no longer than size. Points with a single field or
+// a point without a timestamp may exceed the requested size.
+func (p *point) Split(size int) []Point {
+ if p.time.IsZero() || p.StringSize() <= size {
+ return []Point{p}
+ }
+
+ // key string, timestamp string, spaces
+ size -= len(p.key) + len(strconv.FormatInt(p.time.UnixNano(), 10)) + 2
+
+ var points []Point
+ var start, cur int
+
+ for cur < len(p.fields) {
+ end, _ := scanTo(p.fields, cur, '=')
+ end, _ = scanFieldValue(p.fields, end+1)
+
+ if cur > start && end-start > size {
+ points = append(points, &point{
+ key: p.key,
+ time: p.time,
+ fields: p.fields[start : cur-1],
+ })
+ start = cur
+ }
+
+ cur = end + 1
+ }
+
+ points = append(points, &point{
+ key: p.key,
+ time: p.time,
+ fields: p.fields[start:],
+ })
+
+ return points
+}
+
+// Tag represents a single key/value tag pair.
+type Tag struct {
+ Key []byte
+ Value []byte
+}
+
+// NewTag returns a new Tag.
+func NewTag(key, value []byte) Tag {
+ return Tag{
+ Key: key,
+ Value: value,
+ }
+}
+
+// Size returns the size of the key and value.
+func (t Tag) Size() int { return len(t.Key) + len(t.Value) }
+
+// Clone returns a shallow copy of Tag.
+//
+// Tags associated with a Point created by ParsePointsWithPrecision will hold references to the byte slice that was parsed.
+// Use Clone to create a Tag with new byte slices that do not refer to the argument to ParsePointsWithPrecision.
+func (t Tag) Clone() Tag {
+ other := Tag{
+ Key: make([]byte, len(t.Key)),
+ Value: make([]byte, len(t.Value)),
+ }
+
+ copy(other.Key, t.Key)
+ copy(other.Value, t.Value)
+
+ return other
+}
+
+// String returns the string reprsentation of the tag.
+func (t *Tag) String() string {
+ var buf bytes.Buffer
+ buf.WriteByte('{')
+ buf.WriteString(string(t.Key))
+ buf.WriteByte(' ')
+ buf.WriteString(string(t.Value))
+ buf.WriteByte('}')
+ return buf.String()
+}
+
+// Tags represents a sorted list of tags.
+type Tags []Tag
+
+// NewTags returns a new Tags from a map.
+func NewTags(m map[string]string) Tags {
+ if len(m) == 0 {
+ return nil
+ }
+ a := make(Tags, 0, len(m))
+ for k, v := range m {
+ a = append(a, NewTag([]byte(k), []byte(v)))
+ }
+ sort.Sort(a)
+ return a
+}
+
+// Keys returns the list of keys for a tag set.
+func (a Tags) Keys() []string {
+ if len(a) == 0 {
+ return nil
+ }
+ keys := make([]string, len(a))
+ for i, tag := range a {
+ keys[i] = string(tag.Key)
+ }
+ return keys
+}
+
+// Values returns the list of values for a tag set.
+func (a Tags) Values() []string {
+ if len(a) == 0 {
+ return nil
+ }
+ values := make([]string, len(a))
+ for i, tag := range a {
+ values[i] = string(tag.Value)
+ }
+ return values
+}
+
+// String returns the string representation of the tags.
+func (a Tags) String() string {
+ var buf bytes.Buffer
+ buf.WriteByte('[')
+ for i := range a {
+ buf.WriteString(a[i].String())
+ if i < len(a)-1 {
+ buf.WriteByte(' ')
+ }
+ }
+ buf.WriteByte(']')
+ return buf.String()
+}
+
+// Size returns the number of bytes needed to store all tags. Note, this is
+// the number of bytes needed to store all keys and values and does not account
+// for data structures or delimiters for example.
+func (a Tags) Size() int {
+ var total int
+ for _, t := range a {
+ total += t.Size()
+ }
+ return total
+}
+
+// Clone returns a copy of the slice where the elements are a result of calling `Clone` on the original elements
+//
+// Tags associated with a Point created by ParsePointsWithPrecision will hold references to the byte slice that was parsed.
+// Use Clone to create Tags with new byte slices that do not refer to the argument to ParsePointsWithPrecision.
+func (a Tags) Clone() Tags {
+ if len(a) == 0 {
+ return nil
+ }
+
+ others := make(Tags, len(a))
+ for i := range a {
+ others[i] = a[i].Clone()
+ }
+
+ return others
+}
+
+func (a Tags) Len() int { return len(a) }
+func (a Tags) Less(i, j int) bool { return bytes.Compare(a[i].Key, a[j].Key) == -1 }
+func (a Tags) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
+
+// Equal returns true if a equals other.
+func (a Tags) Equal(other Tags) bool {
+ if len(a) != len(other) {
+ return false
+ }
+ for i := range a {
+ if !bytes.Equal(a[i].Key, other[i].Key) || !bytes.Equal(a[i].Value, other[i].Value) {
+ return false
+ }
+ }
+ return true
+}
+
+// CompareTags returns -1 if a < b, 1 if a > b, and 0 if a == b.
+func CompareTags(a, b Tags) int {
+ // Compare each key & value until a mismatch.
+ for i := 0; i < len(a) && i < len(b); i++ {
+ if cmp := bytes.Compare(a[i].Key, b[i].Key); cmp != 0 {
+ return cmp
+ }
+ if cmp := bytes.Compare(a[i].Value, b[i].Value); cmp != 0 {
+ return cmp
+ }
+ }
+
+ // If all tags are equal up to this point then return shorter tagset.
+ if len(a) < len(b) {
+ return -1
+ } else if len(a) > len(b) {
+ return 1
+ }
+
+ // All tags are equal.
+ return 0
+}
+
+// Get returns the value for a key.
+func (a Tags) Get(key []byte) []byte {
+ // OPTIMIZE: Use sort.Search if tagset is large.
+
+ for _, t := range a {
+ if bytes.Equal(t.Key, key) {
+ return t.Value
+ }
+ }
+ return nil
+}
+
+// GetString returns the string value for a string key.
+func (a Tags) GetString(key string) string {
+ return string(a.Get([]byte(key)))
+}
+
+// Set sets the value for a key.
+func (a *Tags) Set(key, value []byte) {
+ for i, t := range *a {
+ if bytes.Equal(t.Key, key) {
+ (*a)[i].Value = value
+ return
+ }
+ }
+ *a = append(*a, Tag{Key: key, Value: value})
+ sort.Sort(*a)
+}
+
+// SetString sets the string value for a string key.
+func (a *Tags) SetString(key, value string) {
+ a.Set([]byte(key), []byte(value))
+}
+
+// Delete removes a tag by key.
+func (a *Tags) Delete(key []byte) {
+ for i, t := range *a {
+ if bytes.Equal(t.Key, key) {
+ copy((*a)[i:], (*a)[i+1:])
+ (*a)[len(*a)-1] = Tag{}
+ *a = (*a)[:len(*a)-1]
+ return
+ }
+ }
+}
+
+// Map returns a map representation of the tags.
+func (a Tags) Map() map[string]string {
+ m := make(map[string]string, len(a))
+ for _, t := range a {
+ m[string(t.Key)] = string(t.Value)
+ }
+ return m
+}
+
+// Merge merges the tags combining the two. If both define a tag with the
+// same key, the merged value overwrites the old value.
+// A new map is returned.
+func (a Tags) Merge(other map[string]string) Tags {
+ merged := make(map[string]string, len(a)+len(other))
+ for _, t := range a {
+ merged[string(t.Key)] = string(t.Value)
+ }
+ for k, v := range other {
+ merged[k] = v
+ }
+ return NewTags(merged)
+}
+
+// HashKey hashes all of a tag's keys.
+func (a Tags) HashKey() []byte {
+ // Empty maps marshal to empty bytes.
+ if len(a) == 0 {
+ return nil
+ }
+
+ // Type invariant: Tags are sorted
+
+ escaped := make(Tags, 0, len(a))
+ sz := 0
+ for _, t := range a {
+ ek := escapeTag(t.Key)
+ ev := escapeTag(t.Value)
+
+ if len(ev) > 0 {
+ escaped = append(escaped, Tag{Key: ek, Value: ev})
+ sz += len(ek) + len(ev)
+ }
+ }
+
+ sz += len(escaped) + (len(escaped) * 2) // separators
+
+ // Generate marshaled bytes.
+ b := make([]byte, sz)
+ buf := b
+ idx := 0
+ for _, k := range escaped {
+ buf[idx] = ','
+ idx++
+ copy(buf[idx:idx+len(k.Key)], k.Key)
+ idx += len(k.Key)
+ buf[idx] = '='
+ idx++
+ copy(buf[idx:idx+len(k.Value)], k.Value)
+ idx += len(k.Value)
+ }
+ return b[:idx]
+}
+
+// CopyTags returns a shallow copy of tags.
+func CopyTags(a Tags) Tags {
+ other := make(Tags, len(a))
+ copy(other, a)
+ return other
+}
+
+// DeepCopyTags returns a deep copy of tags.
+func DeepCopyTags(a Tags) Tags {
+ // Calculate size of keys/values in bytes.
+ var n int
+ for _, t := range a {
+ n += len(t.Key) + len(t.Value)
+ }
+
+ // Build single allocation for all key/values.
+ buf := make([]byte, n)
+
+ // Copy tags to new set.
+ other := make(Tags, len(a))
+ for i, t := range a {
+ copy(buf, t.Key)
+ other[i].Key, buf = buf[:len(t.Key)], buf[len(t.Key):]
+
+ copy(buf, t.Value)
+ other[i].Value, buf = buf[:len(t.Value)], buf[len(t.Value):]
+ }
+
+ return other
+}
+
+// Fields represents a mapping between a Point's field names and their
+// values.
+type Fields map[string]interface{}
+
+// FieldIterator retuns a FieldIterator that can be used to traverse the
+// fields of a point without constructing the in-memory map.
+func (p *point) FieldIterator() FieldIterator {
+ p.Reset()
+ return p
+}
+
+type fieldIterator struct {
+ start, end int
+ key, keybuf []byte
+ valueBuf []byte
+ fieldType FieldType
+}
+
+// Next indicates whether there any fields remaining.
+func (p *point) Next() bool {
+ p.it.start = p.it.end
+ if p.it.start >= len(p.fields) {
+ return false
+ }
+
+ p.it.end, p.it.key = scanTo(p.fields, p.it.start, '=')
+ if escape.IsEscaped(p.it.key) {
+ p.it.keybuf = escape.AppendUnescaped(p.it.keybuf[:0], p.it.key)
+ p.it.key = p.it.keybuf
+ }
+
+ p.it.end, p.it.valueBuf = scanFieldValue(p.fields, p.it.end+1)
+ p.it.end++
+
+ if len(p.it.valueBuf) == 0 {
+ p.it.fieldType = Empty
+ return true
+ }
+
+ c := p.it.valueBuf[0]
+
+ if c == '"' {
+ p.it.fieldType = String
+ return true
+ }
+
+ if strings.IndexByte(`0123456789-.nNiIu`, c) >= 0 {
+ if p.it.valueBuf[len(p.it.valueBuf)-1] == 'i' {
+ p.it.fieldType = Integer
+ p.it.valueBuf = p.it.valueBuf[:len(p.it.valueBuf)-1]
+ } else if p.it.valueBuf[len(p.it.valueBuf)-1] == 'u' {
+ p.it.fieldType = Unsigned
+ p.it.valueBuf = p.it.valueBuf[:len(p.it.valueBuf)-1]
+ } else {
+ p.it.fieldType = Float
+ }
+ return true
+ }
+
+ // to keep the same behavior that currently exists, default to boolean
+ p.it.fieldType = Boolean
+ return true
+}
+
+// FieldKey returns the key of the current field.
+func (p *point) FieldKey() []byte {
+ return p.it.key
+}
+
+// Type returns the FieldType of the current field.
+func (p *point) Type() FieldType {
+ return p.it.fieldType
+}
+
+// StringValue returns the string value of the current field.
+func (p *point) StringValue() string {
+ return unescapeStringField(string(p.it.valueBuf[1 : len(p.it.valueBuf)-1]))
+}
+
+// IntegerValue returns the integer value of the current field.
+func (p *point) IntegerValue() (int64, error) {
+ n, err := parseIntBytes(p.it.valueBuf, 10, 64)
+ if err != nil {
+ return 0, fmt.Errorf("unable to parse integer value %q: %v", p.it.valueBuf, err)
+ }
+ return n, nil
+}
+
+// UnsignedValue returns the unsigned value of the current field.
+func (p *point) UnsignedValue() (uint64, error) {
+ n, err := parseUintBytes(p.it.valueBuf, 10, 64)
+ if err != nil {
+ return 0, fmt.Errorf("unable to parse unsigned value %q: %v", p.it.valueBuf, err)
+ }
+ return n, nil
+}
+
+// BooleanValue returns the boolean value of the current field.
+func (p *point) BooleanValue() (bool, error) {
+ b, err := parseBoolBytes(p.it.valueBuf)
+ if err != nil {
+ return false, fmt.Errorf("unable to parse bool value %q: %v", p.it.valueBuf, err)
+ }
+ return b, nil
+}
+
+// FloatValue returns the float value of the current field.
+func (p *point) FloatValue() (float64, error) {
+ f, err := parseFloatBytes(p.it.valueBuf, 64)
+ if err != nil {
+ return 0, fmt.Errorf("unable to parse floating point value %q: %v", p.it.valueBuf, err)
+ }
+ return f, nil
+}
+
+// Reset resets the iterator to its initial state.
+func (p *point) Reset() {
+ p.it.fieldType = Empty
+ p.it.key = nil
+ p.it.valueBuf = nil
+ p.it.start = 0
+ p.it.end = 0
+}
+
+// MarshalBinary encodes all the fields to their proper type and returns the binary
+// represenation
+// NOTE: uint64 is specifically not supported due to potential overflow when we decode
+// again later to an int64
+// NOTE2: uint is accepted, and may be 64 bits, and is for some reason accepted...
+func (p Fields) MarshalBinary() []byte {
+ var b []byte
+ keys := make([]string, 0, len(p))
+
+ for k := range p {
+ keys = append(keys, k)
+ }
+
+ // Not really necessary, can probably be removed.
+ sort.Strings(keys)
+
+ for i, k := range keys {
+ if i > 0 {
+ b = append(b, ',')
+ }
+ b = appendField(b, k, p[k])
+ }
+
+ return b
+}
+
+func appendField(b []byte, k string, v interface{}) []byte {
+ b = append(b, []byte(escape.String(k))...)
+ b = append(b, '=')
+
+ // check popular types first
+ switch v := v.(type) {
+ case float64:
+ b = strconv.AppendFloat(b, v, 'f', -1, 64)
+ case int64:
+ b = strconv.AppendInt(b, v, 10)
+ b = append(b, 'i')
+ case string:
+ b = append(b, '"')
+ b = append(b, []byte(EscapeStringField(v))...)
+ b = append(b, '"')
+ case bool:
+ b = strconv.AppendBool(b, v)
+ case int32:
+ b = strconv.AppendInt(b, int64(v), 10)
+ b = append(b, 'i')
+ case int16:
+ b = strconv.AppendInt(b, int64(v), 10)
+ b = append(b, 'i')
+ case int8:
+ b = strconv.AppendInt(b, int64(v), 10)
+ b = append(b, 'i')
+ case int:
+ b = strconv.AppendInt(b, int64(v), 10)
+ b = append(b, 'i')
+ case uint64:
+ b = strconv.AppendUint(b, v, 10)
+ b = append(b, 'u')
+ case uint32:
+ b = strconv.AppendInt(b, int64(v), 10)
+ b = append(b, 'i')
+ case uint16:
+ b = strconv.AppendInt(b, int64(v), 10)
+ b = append(b, 'i')
+ case uint8:
+ b = strconv.AppendInt(b, int64(v), 10)
+ b = append(b, 'i')
+ case uint:
+ // TODO: 'uint' should be converted to writing as an unsigned integer,
+ // but we cannot since that would break backwards compatibility.
+ b = strconv.AppendInt(b, int64(v), 10)
+ b = append(b, 'i')
+ case float32:
+ b = strconv.AppendFloat(b, float64(v), 'f', -1, 32)
+ case []byte:
+ b = append(b, v...)
+ case nil:
+ // skip
+ default:
+ // Can't determine the type, so convert to string
+ b = append(b, '"')
+ b = append(b, []byte(EscapeStringField(fmt.Sprintf("%v", v)))...)
+ b = append(b, '"')
+
+ }
+
+ return b
+}
+
+type byteSlices [][]byte
+
+func (a byteSlices) Len() int { return len(a) }
+func (a byteSlices) Less(i, j int) bool { return bytes.Compare(a[i], a[j]) == -1 }
+func (a byteSlices) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
diff --git a/vendor/github.com/influxdata/influxdb/models/rows.go b/vendor/github.com/influxdata/influxdb/models/rows.go
new file mode 100644
index 000000000..c087a4882
--- /dev/null
+++ b/vendor/github.com/influxdata/influxdb/models/rows.go
@@ -0,0 +1,62 @@
+package models
+
+import (
+ "sort"
+)
+
+// Row represents a single row returned from the execution of a statement.
+type Row struct {
+ Name string `json:"name,omitempty"`
+ Tags map[string]string `json:"tags,omitempty"`
+ Columns []string `json:"columns,omitempty"`
+ Values [][]interface{} `json:"values,omitempty"`
+ Partial bool `json:"partial,omitempty"`
+}
+
+// SameSeries returns true if r contains values for the same series as o.
+func (r *Row) SameSeries(o *Row) bool {
+ return r.tagsHash() == o.tagsHash() && r.Name == o.Name
+}
+
+// tagsHash returns a hash of tag key/value pairs.
+func (r *Row) tagsHash() uint64 {
+ h := NewInlineFNV64a()
+ keys := r.tagsKeys()
+ for _, k := range keys {
+ h.Write([]byte(k))
+ h.Write([]byte(r.Tags[k]))
+ }
+ return h.Sum64()
+}
+
+// tagKeys returns a sorted list of tag keys.
+func (r *Row) tagsKeys() []string {
+ a := make([]string, 0, len(r.Tags))
+ for k := range r.Tags {
+ a = append(a, k)
+ }
+ sort.Strings(a)
+ return a
+}
+
+// Rows represents a collection of rows. Rows implements sort.Interface.
+type Rows []*Row
+
+// Len implements sort.Interface.
+func (p Rows) Len() int { return len(p) }
+
+// Less implements sort.Interface.
+func (p Rows) Less(i, j int) bool {
+ // Sort by name first.
+ if p[i].Name != p[j].Name {
+ return p[i].Name < p[j].Name
+ }
+
+ // Sort by tag set hash. Tags don't have a meaningful sort order so we
+ // just compute a hash and sort by that instead. This allows the tests
+ // to receive rows in a predictable order every time.
+ return p[i].tagsHash() < p[j].tagsHash()
+}
+
+// Swap implements sort.Interface.
+func (p Rows) Swap(i, j int) { p[i], p[j] = p[j], p[i] }
diff --git a/vendor/github.com/influxdata/influxdb/models/statistic.go b/vendor/github.com/influxdata/influxdb/models/statistic.go
new file mode 100644
index 000000000..553e9d09f
--- /dev/null
+++ b/vendor/github.com/influxdata/influxdb/models/statistic.go
@@ -0,0 +1,42 @@
+package models
+
+// Statistic is the representation of a statistic used by the monitoring service.
+type Statistic struct {
+ Name string `json:"name"`
+ Tags map[string]string `json:"tags"`
+ Values map[string]interface{} `json:"values"`
+}
+
+// NewStatistic returns an initialized Statistic.
+func NewStatistic(name string) Statistic {
+ return Statistic{
+ Name: name,
+ Tags: make(map[string]string),
+ Values: make(map[string]interface{}),
+ }
+}
+
+// StatisticTags is a map that can be merged with others without causing
+// mutations to either map.
+type StatisticTags map[string]string
+
+// Merge creates a new map containing the merged contents of tags and t.
+// If both tags and the receiver map contain the same key, the value in tags
+// is used in the resulting map.
+//
+// Merge always returns a usable map.
+func (t StatisticTags) Merge(tags map[string]string) map[string]string {
+ // Add everything in tags to the result.
+ out := make(map[string]string, len(tags))
+ for k, v := range tags {
+ out[k] = v
+ }
+
+ // Only add values from t that don't appear in tags.
+ for k, v := range t {
+ if _, ok := tags[k]; !ok {
+ out[k] = v
+ }
+ }
+ return out
+}
diff --git a/vendor/github.com/influxdata/influxdb/models/time.go b/vendor/github.com/influxdata/influxdb/models/time.go
new file mode 100644
index 000000000..e98f2cb33
--- /dev/null
+++ b/vendor/github.com/influxdata/influxdb/models/time.go
@@ -0,0 +1,74 @@
+package models
+
+// Helper time methods since parsing time can easily overflow and we only support a
+// specific time range.
+
+import (
+ "fmt"
+ "math"
+ "time"
+)
+
+const (
+ // MinNanoTime is the minumum time that can be represented.
+ //
+ // 1677-09-21 00:12:43.145224194 +0000 UTC
+ //
+ // The two lowest minimum integers are used as sentinel values. The
+ // minimum value needs to be used as a value lower than any other value for
+ // comparisons and another separate value is needed to act as a sentinel
+ // default value that is unusable by the user, but usable internally.
+ // Because these two values need to be used for a special purpose, we do
+ // not allow users to write points at these two times.
+ MinNanoTime = int64(math.MinInt64) + 2
+
+ // MaxNanoTime is the maximum time that can be represented.
+ //
+ // 2262-04-11 23:47:16.854775806 +0000 UTC
+ //
+ // The highest time represented by a nanosecond needs to be used for an
+ // exclusive range in the shard group, so the maximum time needs to be one
+ // less than the possible maximum number of nanoseconds representable by an
+ // int64 so that we don't lose a point at that one time.
+ MaxNanoTime = int64(math.MaxInt64) - 1
+)
+
+var (
+ minNanoTime = time.Unix(0, MinNanoTime).UTC()
+ maxNanoTime = time.Unix(0, MaxNanoTime).UTC()
+
+ // ErrTimeOutOfRange gets returned when time is out of the representable range using int64 nanoseconds since the epoch.
+ ErrTimeOutOfRange = fmt.Errorf("time outside range %d - %d", MinNanoTime, MaxNanoTime)
+)
+
+// SafeCalcTime safely calculates the time given. Will return error if the time is outside the
+// supported range.
+func SafeCalcTime(timestamp int64, precision string) (time.Time, error) {
+ mult := GetPrecisionMultiplier(precision)
+ if t, ok := safeSignedMult(timestamp, mult); ok {
+ tme := time.Unix(0, t).UTC()
+ return tme, CheckTime(tme)
+ }
+
+ return time.Time{}, ErrTimeOutOfRange
+}
+
+// CheckTime checks that a time is within the safe range.
+func CheckTime(t time.Time) error {
+ if t.Before(minNanoTime) || t.After(maxNanoTime) {
+ return ErrTimeOutOfRange
+ }
+ return nil
+}
+
+// Perform the multiplication and check to make sure it didn't overflow.
+func safeSignedMult(a, b int64) (int64, bool) {
+ if a == 0 || b == 0 || a == 1 || b == 1 {
+ return a * b, true
+ }
+ if a == MinNanoTime || b == MaxNanoTime {
+ return 0, false
+ }
+ c := a * b
+ return c, c/b == a
+}
diff --git a/vendor/github.com/influxdata/influxdb/models/uint_support.go b/vendor/github.com/influxdata/influxdb/models/uint_support.go
new file mode 100644
index 000000000..18d1ca06e
--- /dev/null
+++ b/vendor/github.com/influxdata/influxdb/models/uint_support.go
@@ -0,0 +1,7 @@
+// +build uint uint64
+
+package models
+
+func init() {
+ EnableUintSupport()
+}
diff --git a/vendor/github.com/influxdata/influxdb/pkg/escape/bytes.go b/vendor/github.com/influxdata/influxdb/pkg/escape/bytes.go
new file mode 100644
index 000000000..f3b31f42d
--- /dev/null
+++ b/vendor/github.com/influxdata/influxdb/pkg/escape/bytes.go
@@ -0,0 +1,115 @@
+// Package escape contains utilities for escaping parts of InfluxQL
+// and InfluxDB line protocol.
+package escape // import "github.com/influxdata/influxdb/pkg/escape"
+
+import (
+ "bytes"
+ "strings"
+)
+
+// Codes is a map of bytes to be escaped.
+var Codes = map[byte][]byte{
+ ',': []byte(`\,`),
+ '"': []byte(`\"`),
+ ' ': []byte(`\ `),
+ '=': []byte(`\=`),
+}
+
+// Bytes escapes characters on the input slice, as defined by Codes.
+func Bytes(in []byte) []byte {
+ for b, esc := range Codes {
+ in = bytes.Replace(in, []byte{b}, esc, -1)
+ }
+ return in
+}
+
+const escapeChars = `," =`
+
+// IsEscaped returns whether b has any escaped characters,
+// i.e. whether b seems to have been processed by Bytes.
+func IsEscaped(b []byte) bool {
+ for len(b) > 0 {
+ i := bytes.IndexByte(b, '\\')
+ if i < 0 {
+ return false
+ }
+
+ if i+1 < len(b) && strings.IndexByte(escapeChars, b[i+1]) >= 0 {
+ return true
+ }
+ b = b[i+1:]
+ }
+ return false
+}
+
+// AppendUnescaped appends the unescaped version of src to dst
+// and returns the resulting slice.
+func AppendUnescaped(dst, src []byte) []byte {
+ var pos int
+ for len(src) > 0 {
+ next := bytes.IndexByte(src[pos:], '\\')
+ if next < 0 || pos+next+1 >= len(src) {
+ return append(dst, src...)
+ }
+
+ if pos+next+1 < len(src) && strings.IndexByte(escapeChars, src[pos+next+1]) >= 0 {
+ if pos+next > 0 {
+ dst = append(dst, src[:pos+next]...)
+ }
+ src = src[pos+next+1:]
+ pos = 0
+ } else {
+ pos += next + 1
+ }
+ }
+
+ return dst
+}
+
+// Unescape returns a new slice containing the unescaped version of in.
+func Unescape(in []byte) []byte {
+ if len(in) == 0 {
+ return nil
+ }
+
+ if bytes.IndexByte(in, '\\') == -1 {
+ return in
+ }
+
+ i := 0
+ inLen := len(in)
+
+ // The output size will be no more than inLen. Preallocating the
+ // capacity of the output is faster and uses less memory than
+ // letting append() do its own (over)allocation.
+ out := make([]byte, 0, inLen)
+
+ for {
+ if i >= inLen {
+ break
+ }
+ if in[i] == '\\' && i+1 < inLen {
+ switch in[i+1] {
+ case ',':
+ out = append(out, ',')
+ i += 2
+ continue
+ case '"':
+ out = append(out, '"')
+ i += 2
+ continue
+ case ' ':
+ out = append(out, ' ')
+ i += 2
+ continue
+ case '=':
+ out = append(out, '=')
+ i += 2
+ continue
+ }
+ }
+ out = append(out, in[i])
+ i += 1
+ }
+ return out
+}
diff --git a/vendor/github.com/influxdata/influxdb/pkg/escape/strings.go b/vendor/github.com/influxdata/influxdb/pkg/escape/strings.go
new file mode 100644
index 000000000..db98033b0
--- /dev/null
+++ b/vendor/github.com/influxdata/influxdb/pkg/escape/strings.go
@@ -0,0 +1,21 @@
+package escape
+
+import "strings"
+
+var (
+ escaper = strings.NewReplacer(`,`, `\,`, `"`, `\"`, ` `, `\ `, `=`, `\=`)
+ unescaper = strings.NewReplacer(`\,`, `,`, `\"`, `"`, `\ `, ` `, `\=`, `=`)
+)
+
+// UnescapeString returns unescaped version of in.
+func UnescapeString(in string) string {
+ if strings.IndexByte(in, '\\') == -1 {
+ return in
+ }
+ return unescaper.Replace(in)
+}
+
+// String returns the escaped version of in.
+func String(in string) string {
+ return escaper.Replace(in)
+}
diff --git a/vendor/github.com/rcrowley/go-metrics/json.go b/vendor/github.com/rcrowley/go-metrics/json.go
deleted file mode 100644
index 2fdcbcfbf..000000000
--- a/vendor/github.com/rcrowley/go-metrics/json.go
+++ /dev/null
@@ -1,87 +0,0 @@
-package metrics
-
-import (
- "encoding/json"
- "io"
- "time"
-)
-
-// MarshalJSON returns a byte slice containing a JSON representation of all
-// the metrics in the Registry.
-func (r *StandardRegistry) MarshalJSON() ([]byte, error) {
- data := make(map[string]map[string]interface{})
- r.Each(func(name string, i interface{}) {
- values := make(map[string]interface{})
- switch metric := i.(type) {
- case Counter:
- values["count"] = metric.Count()
- case Gauge:
- values["value"] = metric.Value()
- case GaugeFloat64:
- values["value"] = metric.Value()
- case Healthcheck:
- values["error"] = nil
- metric.Check()
- if err := metric.Error(); nil != err {
- values["error"] = metric.Error().Error()
- }
- case Histogram:
- h := metric.Snapshot()
- ps := h.Percentiles([]float64{0.5, 0.75, 0.95, 0.99, 0.999})
- values["count"] = h.Count()
- values["min"] = h.Min()
- values["max"] = h.Max()
- values["mean"] = h.Mean()
- values["stddev"] = h.StdDev()
- values["median"] = ps[0]
- values["75%"] = ps[1]
- values["95%"] = ps[2]
- values["99%"] = ps[3]
- values["99.9%"] = ps[4]
- case Meter:
- m := metric.Snapshot()
- values["count"] = m.Count()
- values["1m.rate"] = m.Rate1()
- values["5m.rate"] = m.Rate5()
- values["15m.rate"] = m.Rate15()
- values["mean.rate"] = m.RateMean()
- case Timer:
- t := metric.Snapshot()
- ps := t.Percentiles([]float64{0.5, 0.75, 0.95, 0.99, 0.999})
- values["count"] = t.Count()
- values["min"] = t.Min()
- values["max"] = t.Max()
- values["mean"] = t.Mean()
- values["stddev"] = t.StdDev()
- values["median"] = ps[0]
- values["75%"] = ps[1]
- values["95%"] = ps[2]
- values["99%"] = ps[3]
- values["99.9%"] = ps[4]
- values["1m.rate"] = t.Rate1()
- values["5m.rate"] = t.Rate5()
- values["15m.rate"] = t.Rate15()
- values["mean.rate"] = t.RateMean()
- }
- data[name] = values
- })
- return json.Marshal(data)
-}
-
-// WriteJSON writes metrics from the given registry periodically to the
-// specified io.Writer as JSON.
-func WriteJSON(r Registry, d time.Duration, w io.Writer) {
- for _ = range time.Tick(d) {
- WriteJSONOnce(r, w)
- }
-}
-
-// WriteJSONOnce writes metrics from the given registry to the specified
-// io.Writer as JSON.
-func WriteJSONOnce(r Registry, w io.Writer) {
- json.NewEncoder(w).Encode(r)
-}
-
-func (p *PrefixedRegistry) MarshalJSON() ([]byte, error) {
- return json.Marshal(p.underlying)
-}
diff --git a/vendor/github.com/rcrowley/go-metrics/metrics.go b/vendor/github.com/rcrowley/go-metrics/metrics.go
deleted file mode 100644
index b97a49ed1..000000000
--- a/vendor/github.com/rcrowley/go-metrics/metrics.go
+++ /dev/null
@@ -1,13 +0,0 @@
-// Go port of Coda Hale's Metrics library
-//
-// <https://github.com/rcrowley/go-metrics>
-//
-// Coda Hale's original work: <https://github.com/codahale/metrics>
-package metrics
-
-// UseNilMetrics is checked by the constructor functions for all of the
-// standard metrics. If it is true, the metric returned is a stub.
-//
-// This global kill-switch helps quantify the observer effect and makes
-// for less cluttered pprof profiles.
-var UseNilMetrics bool = false
diff --git a/vendor/github.com/rjeczalik/notify/watcher_fsevents_cgo.go b/vendor/github.com/rjeczalik/notify/watcher_fsevents_cgo.go
index 2248a1b12..a2b332a2e 100644
--- a/vendor/github.com/rjeczalik/notify/watcher_fsevents_cgo.go
+++ b/vendor/github.com/rjeczalik/notify/watcher_fsevents_cgo.go
@@ -48,7 +48,7 @@ var wg sync.WaitGroup // used to wait until the runloop starts
// started and is ready via the wg. It also serves purpose of a dummy source,
// thanks to it the runloop does not return as it also has at least one source
// registered.
-var source = C.CFRunLoopSourceCreate(refZero, 0, &C.CFRunLoopSourceContext{
+var source = C.CFRunLoopSourceCreate(nil, 0, &C.CFRunLoopSourceContext{
perform: (C.CFRunLoopPerformCallBack)(C.gosource),
})
@@ -162,8 +162,8 @@ func (s *stream) Start() error {
return nil
}
wg.Wait()
- p := C.CFStringCreateWithCStringNoCopy(refZero, C.CString(s.path), C.kCFStringEncodingUTF8, refZero)
- path := C.CFArrayCreate(refZero, (*unsafe.Pointer)(unsafe.Pointer(&p)), 1, nil)
+ p := C.CFStringCreateWithCStringNoCopy(nil, C.CString(s.path), C.kCFStringEncodingUTF8, nil)
+ path := C.CFArrayCreate(nil, (*unsafe.Pointer)(unsafe.Pointer(&p)), 1, nil)
ctx := C.FSEventStreamContext{}
ref := C.EventStreamCreate(&ctx, C.uintptr_t(s.info), path, C.FSEventStreamEventId(atomic.LoadUint64(&since)), latency, flags)
if ref == nilstream {
diff --git a/vendor/github.com/rjeczalik/notify/watcher_fsevents_go1.10.go b/vendor/github.com/rjeczalik/notify/watcher_fsevents_go1.10.go
deleted file mode 100644
index 0edd3782f..000000000
--- a/vendor/github.com/rjeczalik/notify/watcher_fsevents_go1.10.go
+++ /dev/null
@@ -1,9 +0,0 @@
-// Copyright (c) 2017 The Notify Authors. All rights reserved.
-// Use of this source code is governed by the MIT license that can be
-// found in the LICENSE file.
-
-// +build darwin,!kqueue,go1.10
-
-package notify
-
-const refZero = 0
diff --git a/vendor/github.com/rjeczalik/notify/watcher_fsevents_go1.9.go b/vendor/github.com/rjeczalik/notify/watcher_fsevents_go1.9.go
deleted file mode 100644
index b81c3c185..000000000
--- a/vendor/github.com/rjeczalik/notify/watcher_fsevents_go1.9.go
+++ /dev/null
@@ -1,14 +0,0 @@
-// Copyright (c) 2017 The Notify Authors. All rights reserved.
-// Use of this source code is governed by the MIT license that can be
-// found in the LICENSE file.
-
-// +build darwin,!kqueue,cgo,!go1.10
-
-package notify
-
-/*
-#include <CoreServices/CoreServices.h>
-*/
-import "C"
-
-var refZero = (*C.struct___CFAllocator)(nil)
diff --git a/vendor/github.com/syndtr/goleveldb/.travis.yml b/vendor/github.com/syndtr/goleveldb/.travis.yml
deleted file mode 100644
index 82de37735..000000000
--- a/vendor/github.com/syndtr/goleveldb/.travis.yml
+++ /dev/null
@@ -1,12 +0,0 @@
-language: go
-
-go:
- - 1.4
- - 1.5
- - 1.6
- - 1.7
- - tip
-
-script:
- - go test -timeout 1h ./...
- - go test -timeout 30m -race -run "TestDB_(Concurrent|GoleveldbIssue74)" ./leveldb
diff --git a/vendor/github.com/syndtr/goleveldb/README.md b/vendor/github.com/syndtr/goleveldb/README.md
index 259286f55..41a47614c 100644
--- a/vendor/github.com/syndtr/goleveldb/README.md
+++ b/vendor/github.com/syndtr/goleveldb/README.md
@@ -10,13 +10,15 @@ Installation
Requirements
-----------
-* Need at least `go1.4` or newer.
+* Need at least `go1.5` or newer.
Usage
-----------
Create or open a database:
```go
+// The returned DB instance is safe for concurrent use. Which mean that all
+// DB's methods may be called concurrently from multiple goroutine.
db, err := leveldb.OpenFile("path/to/db", nil)
...
defer db.Close()
diff --git a/vendor/github.com/syndtr/goleveldb/leveldb/db.go b/vendor/github.com/syndtr/goleveldb/leveldb/db.go
index b0cdcb3d0..3655418ad 100644
--- a/vendor/github.com/syndtr/goleveldb/leveldb/db.go
+++ b/vendor/github.com/syndtr/goleveldb/leveldb/db.go
@@ -32,6 +32,11 @@ type DB struct {
// Need 64-bit alignment.
seq uint64
+ // Stats. Need 64-bit alignment.
+ cWriteDelay int64 // The cumulative duration of write delays
+ cWriteDelayN int32 // The cumulative number of write delays
+ aliveSnaps, aliveIters int32
+
// Session.
s *session
@@ -49,9 +54,6 @@ type DB struct {
snapsMu sync.Mutex
snapsList *list.List
- // Stats.
- aliveSnaps, aliveIters int32
-
// Write.
batchPool sync.Pool
writeMergeC chan writeMerge
@@ -321,7 +323,7 @@ func recoverTable(s *session, o *opt.Options) error {
}
}
err = iter.Error()
- if err != nil {
+ if err != nil && !errors.IsCorrupted(err) {
return
}
err = tw.Close()
@@ -392,7 +394,7 @@ func recoverTable(s *session, o *opt.Options) error {
}
imax = append(imax[:0], key...)
}
- if err := iter.Error(); err != nil {
+ if err := iter.Error(); err != nil && !errors.IsCorrupted(err) {
iter.Release()
return err
}
@@ -904,6 +906,10 @@ func (db *DB) GetSnapshot() (*Snapshot, error) {
// Returns the number of files at level 'n'.
// leveldb.stats
// Returns statistics of the underlying DB.
+// leveldb.iostats
+// Returns statistics of effective disk read and write.
+// leveldb.writedelay
+// Returns cumulative write delay caused by compaction.
// leveldb.sstables
// Returns sstables list for each level.
// leveldb.blockpool
@@ -955,6 +961,13 @@ func (db *DB) GetProperty(name string) (value string, err error) {
level, len(tables), float64(tables.size())/1048576.0, duration.Seconds(),
float64(read)/1048576.0, float64(write)/1048576.0)
}
+ case p == "iostats":
+ value = fmt.Sprintf("Read(MB):%.5f Write(MB):%.5f",
+ float64(db.s.stor.reads())/1048576.0,
+ float64(db.s.stor.writes())/1048576.0)
+ case p == "writedelay":
+ writeDelayN, writeDelay := atomic.LoadInt32(&db.cWriteDelayN), time.Duration(atomic.LoadInt64(&db.cWriteDelay))
+ value = fmt.Sprintf("DelayN:%d Delay:%s", writeDelayN, writeDelay)
case p == "sstables":
for level, tables := range v.levels {
value += fmt.Sprintf("--- level %d ---\n", level)
diff --git a/vendor/github.com/syndtr/goleveldb/leveldb/db_write.go b/vendor/github.com/syndtr/goleveldb/leveldb/db_write.go
index 5b6cb487d..31f4bc5ef 100644
--- a/vendor/github.com/syndtr/goleveldb/leveldb/db_write.go
+++ b/vendor/github.com/syndtr/goleveldb/leveldb/db_write.go
@@ -7,6 +7,7 @@
package leveldb
import (
+ "sync/atomic"
"time"
"github.com/syndtr/goleveldb/leveldb/memdb"
@@ -117,6 +118,8 @@ func (db *DB) flush(n int) (mdb *memDB, mdbFree int, err error) {
db.writeDelayN++
} else if db.writeDelayN > 0 {
db.logf("db@write was delayed N·%d T·%v", db.writeDelayN, db.writeDelay)
+ atomic.AddInt32(&db.cWriteDelayN, int32(db.writeDelayN))
+ atomic.AddInt64(&db.cWriteDelay, int64(db.writeDelay))
db.writeDelay = 0
db.writeDelayN = 0
}
@@ -143,7 +146,7 @@ func (db *DB) unlockWrite(overflow bool, merged int, err error) {
}
}
-// ourBatch if defined should equal with batch.
+// ourBatch is batch that we can modify.
func (db *DB) writeLocked(batch, ourBatch *Batch, merge, sync bool) error {
// Try to flush memdb. This method would also trying to throttle writes
// if it is too fast and compaction cannot catch-up.
@@ -212,6 +215,11 @@ func (db *DB) writeLocked(batch, ourBatch *Batch, merge, sync bool) error {
}
}
+ // Release ourBatch if any.
+ if ourBatch != nil {
+ defer db.batchPool.Put(ourBatch)
+ }
+
// Seq number.
seq := db.seq + 1
diff --git a/vendor/github.com/syndtr/goleveldb/leveldb/iterator/iter.go b/vendor/github.com/syndtr/goleveldb/leveldb/iterator/iter.go
index 3b5553274..b16e3a704 100644
--- a/vendor/github.com/syndtr/goleveldb/leveldb/iterator/iter.go
+++ b/vendor/github.com/syndtr/goleveldb/leveldb/iterator/iter.go
@@ -88,7 +88,7 @@ type Iterator interface {
// its contents may change on the next call to any 'seeks method'.
Key() []byte
- // Value returns the key of the current key/value pair, or nil if done.
+ // Value returns the value of the current key/value pair, or nil if done.
// The caller should not modify the contents of the returned slice, and
// its contents may change on the next call to any 'seeks method'.
Value() []byte
diff --git a/vendor/github.com/syndtr/goleveldb/leveldb/memdb/memdb.go b/vendor/github.com/syndtr/goleveldb/leveldb/memdb/memdb.go
index 18a19ed42..b661c08a9 100644
--- a/vendor/github.com/syndtr/goleveldb/leveldb/memdb/memdb.go
+++ b/vendor/github.com/syndtr/goleveldb/leveldb/memdb/memdb.go
@@ -329,7 +329,7 @@ func (p *DB) Delete(key []byte) error {
h := p.nodeData[node+nHeight]
for i, n := range p.prevNode[:h] {
- m := n + 4 + i
+ m := n + nNext + i
p.nodeData[m] = p.nodeData[p.nodeData[m]+nNext+i]
}
diff --git a/vendor/github.com/syndtr/goleveldb/leveldb/session.go b/vendor/github.com/syndtr/goleveldb/leveldb/session.go
index ad68a8703..3f391f934 100644
--- a/vendor/github.com/syndtr/goleveldb/leveldb/session.go
+++ b/vendor/github.com/syndtr/goleveldb/leveldb/session.go
@@ -42,7 +42,7 @@ type session struct {
stTempFileNum int64
stSeqNum uint64 // last mem compacted seq; need external synchronization
- stor storage.Storage
+ stor *iStorage
storLock storage.Locker
o *cachedOptions
icmp *iComparer
@@ -68,7 +68,7 @@ func newSession(stor storage.Storage, o *opt.Options) (s *session, err error) {
return
}
s = &session{
- stor: stor,
+ stor: newIStorage(stor),
storLock: storLock,
fileRef: make(map[int64]int),
}
diff --git a/vendor/github.com/syndtr/goleveldb/leveldb/storage.go b/vendor/github.com/syndtr/goleveldb/leveldb/storage.go
new file mode 100644
index 000000000..d45fb5dfe
--- /dev/null
+++ b/vendor/github.com/syndtr/goleveldb/leveldb/storage.go
@@ -0,0 +1,63 @@
+package leveldb
+
+import (
+ "github.com/syndtr/goleveldb/leveldb/storage"
+ "sync/atomic"
+)
+
+type iStorage struct {
+ storage.Storage
+ read uint64
+ write uint64
+}
+
+func (c *iStorage) Open(fd storage.FileDesc) (storage.Reader, error) {
+ r, err := c.Storage.Open(fd)
+ return &iStorageReader{r, c}, err
+}
+
+func (c *iStorage) Create(fd storage.FileDesc) (storage.Writer, error) {
+ w, err := c.Storage.Create(fd)
+ return &iStorageWriter{w, c}, err
+}
+
+func (c *iStorage) reads() uint64 {
+ return atomic.LoadUint64(&c.read)
+}
+
+func (c *iStorage) writes() uint64 {
+ return atomic.LoadUint64(&c.write)
+}
+
+// newIStorage returns the given storage wrapped by iStorage.
+func newIStorage(s storage.Storage) *iStorage {
+ return &iStorage{s, 0, 0}
+}
+
+type iStorageReader struct {
+ storage.Reader
+ c *iStorage
+}
+
+func (r *iStorageReader) Read(p []byte) (n int, err error) {
+ n, err = r.Reader.Read(p)
+ atomic.AddUint64(&r.c.read, uint64(n))
+ return n, err
+}
+
+func (r *iStorageReader) ReadAt(p []byte, off int64) (n int, err error) {
+ n, err = r.Reader.ReadAt(p, off)
+ atomic.AddUint64(&r.c.read, uint64(n))
+ return n, err
+}
+
+type iStorageWriter struct {
+ storage.Writer
+ c *iStorage
+}
+
+func (w *iStorageWriter) Write(p []byte) (n int, err error) {
+ n, err = w.Writer.Write(p)
+ atomic.AddUint64(&w.c.write, uint64(n))
+ return n, err
+}
diff --git a/vendor/github.com/syndtr/goleveldb/leveldb/storage/file_storage_plan9.go b/vendor/github.com/syndtr/goleveldb/leveldb/storage/file_storage_plan9.go
index bab62bfce..b82979801 100644
--- a/vendor/github.com/syndtr/goleveldb/leveldb/storage/file_storage_plan9.go
+++ b/vendor/github.com/syndtr/goleveldb/leveldb/storage/file_storage_plan9.go
@@ -8,7 +8,6 @@ package storage
import (
"os"
- "path/filepath"
)
type plan9FileLock struct {
@@ -48,8 +47,7 @@ func rename(oldpath, newpath string) error {
}
}
- _, fname := filepath.Split(newpath)
- return os.Rename(oldpath, fname)
+ return os.Rename(oldpath, newpath)
}
func syncDir(name string) error {
diff --git a/vendor/github.com/syndtr/goleveldb/leveldb/util/util.go b/vendor/github.com/syndtr/goleveldb/leveldb/util/util.go
index f35976865..80614afc5 100644
--- a/vendor/github.com/syndtr/goleveldb/leveldb/util/util.go
+++ b/vendor/github.com/syndtr/goleveldb/leveldb/util/util.go
@@ -19,7 +19,7 @@ var (
// Releaser is the interface that wraps the basic Release method.
type Releaser interface {
// Release releases associated resources. Release should always success
- // and can be called multipe times without causing error.
+ // and can be called multiple times without causing error.
Release()
}
diff --git a/vendor/gopkg.in/olebedev/go-duktape.v3/api.go b/vendor/gopkg.in/olebedev/go-duktape.v3/api.go
index 6617ec23d..924b5c32f 100644
--- a/vendor/gopkg.in/olebedev/go-duktape.v3/api.go
+++ b/vendor/gopkg.in/olebedev/go-duktape.v3/api.go
@@ -1,8 +1,8 @@
package duktape
/*
-#cgo !windows CFLAGS: -std=c99 -O3 -Wall -fomit-frame-pointer -fstrict-aliasing
-#cgo windows CFLAGS: -O3 -Wall -fomit-frame-pointer -fstrict-aliasing
+#cgo !windows CFLAGS: -std=c99 -O3 -Wall -Wno-unused-value -fomit-frame-pointer -fstrict-aliasing
+#cgo windows CFLAGS: -O3 -Wall -Wno-unused-value -fomit-frame-pointer -fstrict-aliasing
#include "duktape.h"
#include "duk_logging.h"
diff --git a/vendor/gopkg.in/olebedev/go-duktape.v3/duktape.go b/vendor/gopkg.in/olebedev/go-duktape.v3/duktape.go
index f0806fcae..83a7a8087 100644
--- a/vendor/gopkg.in/olebedev/go-duktape.v3/duktape.go
+++ b/vendor/gopkg.in/olebedev/go-duktape.v3/duktape.go
@@ -5,6 +5,7 @@ package duktape
#cgo windows CFLAGS: -O3 -Wall -fomit-frame-pointer -fstrict-aliasing
#cgo linux LDFLAGS: -lm
#cgo freebsd LDFLAGS: -lm
+#cgo openbsd LDFLAGS: -lm
#include "duktape.h"
#include "duk_logging.h"
diff --git a/vendor/vendor.json b/vendor/vendor.json
index 6962485d1..bf55bc437 100644
--- a/vendor/vendor.json
+++ b/vendor/vendor.json
@@ -207,6 +207,30 @@
"revisionTime": "2016-12-24T10:41:01Z"
},
{
+ "checksumSHA1": "6tNwbL5tUS0dxYzADKVZtI2d/lE=",
+ "path": "github.com/influxdata/influxdb/client",
+ "revision": "a55dd0f50edd14c9c798d3564189eb4f53914309",
+ "revisionTime": "2017-10-09T17:24:46Z"
+ },
+ {
+ "checksumSHA1": "O4XpbSNeUhSIMD2FWtQximJiFIs=",
+ "path": "github.com/influxdata/influxdb/client/v2",
+ "revision": "b36b9f109f2da91c8941679caf5356e08eee0b2b",
+ "revisionTime": "2018-01-17T01:42:09Z"
+ },
+ {
+ "checksumSHA1": "cfumoC9gHEUROd+fA8qK3WLFAZQ=",
+ "path": "github.com/influxdata/influxdb/models",
+ "revision": "b36b9f109f2da91c8941679caf5356e08eee0b2b",
+ "revisionTime": "2018-01-17T01:42:09Z"
+ },
+ {
+ "checksumSHA1": "Z0Bb5PWa5WL/j5Dm2KJCLGn1l7U=",
+ "path": "github.com/influxdata/influxdb/pkg/escape",
+ "revision": "01288bdb0883a01cac999326bd34421b29acaec8",
+ "revisionTime": "2018-02-21T22:33:40Z"
+ },
+ {
"checksumSHA1": "vTGKMIfiMwz43y5bsgx9PrL+AVw=",
"path": "github.com/jackpal/go-nat-pmp",
"revision": "1fa385a6f45828c83361136b45b1a21a12139493",
@@ -310,22 +334,10 @@
"revisionTime": "2017-08-14T17:01:13Z"
},
{
- "checksumSHA1": "KAzbLjI9MzW2tjfcAsK75lVRp6I=",
- "path": "github.com/rcrowley/go-metrics",
- "revision": "1f30fe9094a513ce4c700b9a54458bbb0c96996c",
- "revisionTime": "2016-11-28T21:05:44Z"
- },
- {
- "checksumSHA1": "q/d9nXRQYKEJ/EWn+5y6jL8rPGs=",
- "path": "github.com/rcrowley/go-metrics/exp",
- "revision": "1f30fe9094a513ce4c700b9a54458bbb0c96996c",
- "revisionTime": "2016-11-28T21:05:44Z"
- },
- {
- "checksumSHA1": "1ESHllhZOIBg7MnlGHUdhz047bI=",
+ "checksumSHA1": "28UVHMmHx0iqO0XiJsjx+fwILyI=",
"path": "github.com/rjeczalik/notify",
- "revision": "27b537f07230b3f917421af6dcf044038dbe57e2",
- "revisionTime": "2018-01-03T13:19:05Z"
+ "revision": "c31e5f2cb22b3e4ef3f882f413847669bf2652b9",
+ "revisionTime": "2018-02-03T14:01:15Z"
},
{
"checksumSHA1": "5uqO4ITTDMklKi3uNaE/D9LQ5nM=",
@@ -394,76 +406,76 @@
"revisionTime": "2017-07-05T02:17:15Z"
},
{
- "checksumSHA1": "yHbyLpI/Meh0DGrmi8x6FrDxxUY=",
+ "checksumSHA1": "3QsnhPTXGytTbW3uDvQLgSo9s9M=",
"path": "github.com/syndtr/goleveldb/leveldb",
- "revision": "b89cc31ef7977104127d34c1bd31ebd1a9db2199",
- "revisionTime": "2017-07-25T06:48:36Z"
+ "revision": "169b1b37be738edb2813dab48c97a549bcf99bb5",
+ "revisionTime": "2018-03-07T11:33:52Z"
},
{
"checksumSHA1": "EKIow7XkgNdWvR/982ffIZxKG8Y=",
"path": "github.com/syndtr/goleveldb/leveldb/cache",
- "revision": "b89cc31ef7977104127d34c1bd31ebd1a9db2199",
- "revisionTime": "2017-07-25T06:48:36Z"
+ "revision": "169b1b37be738edb2813dab48c97a549bcf99bb5",
+ "revisionTime": "2018-03-07T11:33:52Z"
},
{
"checksumSHA1": "5KPgnvCPlR0ysDAqo6jApzRQ3tw=",
"path": "github.com/syndtr/goleveldb/leveldb/comparer",
- "revision": "b89cc31ef7977104127d34c1bd31ebd1a9db2199",
- "revisionTime": "2017-07-25T06:48:36Z"
+ "revision": "169b1b37be738edb2813dab48c97a549bcf99bb5",
+ "revisionTime": "2018-03-07T11:33:52Z"
},
{
"checksumSHA1": "1DRAxdlWzS4U0xKN/yQ/fdNN7f0=",
"path": "github.com/syndtr/goleveldb/leveldb/errors",
- "revision": "b89cc31ef7977104127d34c1bd31ebd1a9db2199",
- "revisionTime": "2017-07-25T06:48:36Z"
+ "revision": "169b1b37be738edb2813dab48c97a549bcf99bb5",
+ "revisionTime": "2018-03-07T11:33:52Z"
},
{
"checksumSHA1": "eqKeD6DS7eNCtxVYZEHHRKkyZrw=",
"path": "github.com/syndtr/goleveldb/leveldb/filter",
- "revision": "b89cc31ef7977104127d34c1bd31ebd1a9db2199",
- "revisionTime": "2017-07-25T06:48:36Z"
+ "revision": "169b1b37be738edb2813dab48c97a549bcf99bb5",
+ "revisionTime": "2018-03-07T11:33:52Z"
},
{
- "checksumSHA1": "8dXuAVIsbtaMiGGuHjzGR6Ny/5c=",
+ "checksumSHA1": "weSsccMav4BCerDpSLzh3mMxAYo=",
"path": "github.com/syndtr/goleveldb/leveldb/iterator",
- "revision": "b89cc31ef7977104127d34c1bd31ebd1a9db2199",
- "revisionTime": "2017-07-25T06:48:36Z"
+ "revision": "169b1b37be738edb2813dab48c97a549bcf99bb5",
+ "revisionTime": "2018-03-07T11:33:52Z"
},
{
"checksumSHA1": "gJY7bRpELtO0PJpZXgPQ2BYFJ88=",
"path": "github.com/syndtr/goleveldb/leveldb/journal",
- "revision": "b89cc31ef7977104127d34c1bd31ebd1a9db2199",
- "revisionTime": "2017-07-25T06:48:36Z"
+ "revision": "169b1b37be738edb2813dab48c97a549bcf99bb5",
+ "revisionTime": "2018-03-07T11:33:52Z"
},
{
- "checksumSHA1": "j+uaQ6DwJ50dkIdfMQu1TXdlQcY=",
+ "checksumSHA1": "MtYY1b2234y/MlS+djL8tXVAcQs=",
"path": "github.com/syndtr/goleveldb/leveldb/memdb",
- "revision": "b89cc31ef7977104127d34c1bd31ebd1a9db2199",
- "revisionTime": "2017-07-25T06:48:36Z"
+ "revision": "169b1b37be738edb2813dab48c97a549bcf99bb5",
+ "revisionTime": "2018-03-07T11:33:52Z"
},
{
"checksumSHA1": "UmQeotV+m8/FduKEfLOhjdp18rs=",
"path": "github.com/syndtr/goleveldb/leveldb/opt",
- "revision": "b89cc31ef7977104127d34c1bd31ebd1a9db2199",
- "revisionTime": "2017-07-25T06:48:36Z"
+ "revision": "169b1b37be738edb2813dab48c97a549bcf99bb5",
+ "revisionTime": "2018-03-07T11:33:52Z"
},
{
- "checksumSHA1": "tQ2AqXXAEy9icbZI9dLVdZGvWMw=",
+ "checksumSHA1": "QCSae2ub87f8awH+PKMpd8ZYOtg=",
"path": "github.com/syndtr/goleveldb/leveldb/storage",
- "revision": "b89cc31ef7977104127d34c1bd31ebd1a9db2199",
- "revisionTime": "2017-07-25T06:48:36Z"
+ "revision": "169b1b37be738edb2813dab48c97a549bcf99bb5",
+ "revisionTime": "2018-03-07T11:33:52Z"
},
{
"checksumSHA1": "gWFPMz8OQeul0t54RM66yMTX49g=",
"path": "github.com/syndtr/goleveldb/leveldb/table",
- "revision": "b89cc31ef7977104127d34c1bd31ebd1a9db2199",
- "revisionTime": "2017-07-25T06:48:36Z"
+ "revision": "169b1b37be738edb2813dab48c97a549bcf99bb5",
+ "revisionTime": "2018-03-07T11:33:52Z"
},
{
- "checksumSHA1": "4zil8Gwg8VPkDn1YzlgCvtukJFU=",
+ "checksumSHA1": "V/Dh7NV0/fy/5jX1KaAjmGcNbzI=",
"path": "github.com/syndtr/goleveldb/leveldb/util",
- "revision": "b89cc31ef7977104127d34c1bd31ebd1a9db2199",
- "revisionTime": "2017-07-25T06:48:36Z"
+ "revision": "169b1b37be738edb2813dab48c97a549bcf99bb5",
+ "revisionTime": "2018-03-07T11:33:52Z"
},
{
"checksumSHA1": "TT1rac6kpQp2vz24m5yDGUNQ/QQ=",
@@ -730,10 +742,10 @@
"revisionTime": "2016-06-21T03:49:01Z"
},
{
- "checksumSHA1": "YvuEbc0a032zr+BTp/YbBQojiuY=",
+ "checksumSHA1": "vndxs5Tv09/8S+Dr2PBAiM557lI=",
"path": "gopkg.in/olebedev/go-duktape.v3",
- "revision": "9af39127cb024b355a6a0221769f6ddfe3f542e7",
- "revisionTime": "2017-12-20T12:19:14Z"
+ "revision": "abf0ba0be5d5d36b1f9266463cc320b9a5ab224e",
+ "revisionTime": "2018-03-02T12:15:09Z"
},
{
"checksumSHA1": "4BwmmgQUhWtizsR2soXND0nqZ1I=",
diff --git a/whisper/mailserver/mailserver.go b/whisper/mailserver/mailserver.go
index 0ec6ec570..57e6505ad 100644
--- a/whisper/mailserver/mailserver.go
+++ b/whisper/mailserver/mailserver.go
@@ -17,7 +17,6 @@
package mailserver
import (
- "bytes"
"encoding/binary"
"fmt"
@@ -26,7 +25,7 @@ import (
"github.com/ethereum/go-ethereum/crypto"
"github.com/ethereum/go-ethereum/log"
"github.com/ethereum/go-ethereum/rlp"
- whisper "github.com/ethereum/go-ethereum/whisper/whisperv5"
+ whisper "github.com/ethereum/go-ethereum/whisper/whisperv6"
"github.com/syndtr/goleveldb/leveldb"
"github.com/syndtr/goleveldb/leveldb/util"
)
@@ -108,17 +107,16 @@ func (s *WMailServer) DeliverMail(peer *whisper.Peer, request *whisper.Envelope)
return
}
- ok, lower, upper, topic := s.validateRequest(peer.ID(), request)
+ ok, lower, upper, bloom := s.validateRequest(peer.ID(), request)
if ok {
- s.processRequest(peer, lower, upper, topic)
+ s.processRequest(peer, lower, upper, bloom)
}
}
-func (s *WMailServer) processRequest(peer *whisper.Peer, lower, upper uint32, topic whisper.TopicType) []*whisper.Envelope {
+func (s *WMailServer) processRequest(peer *whisper.Peer, lower, upper uint32, bloom []byte) []*whisper.Envelope {
ret := make([]*whisper.Envelope, 0)
var err error
var zero common.Hash
- var empty whisper.TopicType
kl := NewDbKey(lower, zero)
ku := NewDbKey(upper, zero)
i := s.db.NewIterator(&util.Range{Start: kl.raw, Limit: ku.raw}, nil)
@@ -131,7 +129,7 @@ func (s *WMailServer) processRequest(peer *whisper.Peer, lower, upper uint32, to
log.Error(fmt.Sprintf("RLP decoding failed: %s", err))
}
- if topic == empty || envelope.Topic == topic {
+ if whisper.BloomFilterMatch(bloom, envelope.Bloom()) {
if peer == nil {
// used for test purposes
ret = append(ret, &envelope)
@@ -153,39 +151,45 @@ func (s *WMailServer) processRequest(peer *whisper.Peer, lower, upper uint32, to
return ret
}
-func (s *WMailServer) validateRequest(peerID []byte, request *whisper.Envelope) (bool, uint32, uint32, whisper.TopicType) {
- var topic whisper.TopicType
+func (s *WMailServer) validateRequest(peerID []byte, request *whisper.Envelope) (bool, uint32, uint32, []byte) {
if s.pow > 0.0 && request.PoW() < s.pow {
- return false, 0, 0, topic
+ return false, 0, 0, nil
}
f := whisper.Filter{KeySym: s.key}
decrypted := request.Open(&f)
if decrypted == nil {
log.Warn(fmt.Sprintf("Failed to decrypt p2p request"))
- return false, 0, 0, topic
- }
-
- if len(decrypted.Payload) < 8 {
- log.Warn(fmt.Sprintf("Undersized p2p request"))
- return false, 0, 0, topic
+ return false, 0, 0, nil
}
src := crypto.FromECDSAPub(decrypted.Src)
if len(src)-len(peerID) == 1 {
src = src[1:]
}
- if !bytes.Equal(peerID, src) {
+
+ // if you want to check the signature, you can do it here. e.g.:
+ // if !bytes.Equal(peerID, src) {
+ if src == nil {
log.Warn(fmt.Sprintf("Wrong signature of p2p request"))
- return false, 0, 0, topic
+ return false, 0, 0, nil
}
- lower := binary.BigEndian.Uint32(decrypted.Payload[:4])
- upper := binary.BigEndian.Uint32(decrypted.Payload[4:8])
-
- if len(decrypted.Payload) >= 8+whisper.TopicLength {
- topic = whisper.BytesToTopic(decrypted.Payload[8:])
+ var bloom []byte
+ payloadSize := len(decrypted.Payload)
+ if payloadSize < 8 {
+ log.Warn(fmt.Sprintf("Undersized p2p request"))
+ return false, 0, 0, nil
+ } else if payloadSize == 8 {
+ bloom = whisper.MakeFullNodeBloom()
+ } else if payloadSize < 8+whisper.BloomFilterSize {
+ log.Warn(fmt.Sprintf("Undersized bloom filter in p2p request"))
+ return false, 0, 0, nil
+ } else {
+ bloom = decrypted.Payload[8 : 8+whisper.BloomFilterSize]
}
- return true, lower, upper, topic
+ lower := binary.BigEndian.Uint32(decrypted.Payload[:4])
+ upper := binary.BigEndian.Uint32(decrypted.Payload[4:8])
+ return true, lower, upper, bloom
}
diff --git a/whisper/mailserver/server_test.go b/whisper/mailserver/server_test.go
index 9155ee85a..d5b993afb 100644
--- a/whisper/mailserver/server_test.go
+++ b/whisper/mailserver/server_test.go
@@ -17,6 +17,7 @@
package mailserver
import (
+ "bytes"
"crypto/ecdsa"
"encoding/binary"
"io/ioutil"
@@ -26,7 +27,7 @@ import (
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/crypto"
- whisper "github.com/ethereum/go-ethereum/whisper/whisperv5"
+ whisper "github.com/ethereum/go-ethereum/whisper/whisperv6"
)
const powRequirement = 0.00001
@@ -61,7 +62,7 @@ func generateEnvelope(t *testing.T) *whisper.Envelope {
h := crypto.Keccak256Hash([]byte("test sample data"))
params := &whisper.MessageParams{
KeySym: h[:],
- Topic: whisper.TopicType{},
+ Topic: whisper.TopicType{0x1F, 0x7E, 0xA1, 0x7F},
Payload: []byte("test payload"),
PoW: powRequirement,
WorkTime: 2,
@@ -121,6 +122,7 @@ func deliverTest(t *testing.T, server *WMailServer, env *whisper.Envelope) {
upp: birth + 1,
key: testPeerID,
}
+
singleRequest(t, server, env, p, true)
p.low, p.upp = birth+1, 0xffffffff
@@ -131,14 +133,14 @@ func deliverTest(t *testing.T, server *WMailServer, env *whisper.Envelope) {
p.low = birth - 1
p.upp = birth + 1
- p.topic[0]++
+ p.topic[0] = 0xFF
singleRequest(t, server, env, p, false)
}
func singleRequest(t *testing.T, server *WMailServer, env *whisper.Envelope, p *ServerTestParams, expect bool) {
request := createRequest(t, p)
src := crypto.FromECDSAPub(&p.key.PublicKey)
- ok, lower, upper, topic := server.validateRequest(src, request)
+ ok, lower, upper, bloom := server.validateRequest(src, request)
if !ok {
t.Fatalf("request validation failed, seed: %d.", seed)
}
@@ -148,12 +150,13 @@ func singleRequest(t *testing.T, server *WMailServer, env *whisper.Envelope, p *
if upper != p.upp {
t.Fatalf("request validation failed (upper bound), seed: %d.", seed)
}
- if topic != p.topic {
+ expectedBloom := whisper.TopicToBloom(p.topic)
+ if !bytes.Equal(bloom, expectedBloom) {
t.Fatalf("request validation failed (topic), seed: %d.", seed)
}
var exist bool
- mail := server.processRequest(nil, p.low, p.upp, p.topic)
+ mail := server.processRequest(nil, p.low, p.upp, bloom)
for _, msg := range mail {
if msg.Hash() == env.Hash() {
exist = true
@@ -166,17 +169,19 @@ func singleRequest(t *testing.T, server *WMailServer, env *whisper.Envelope, p *
}
src[0]++
- ok, lower, upper, topic = server.validateRequest(src, request)
- if ok {
- t.Fatalf("request validation false positive, seed: %d (lower: %d, upper: %d).", seed, lower, upper)
+ ok, lower, upper, bloom = server.validateRequest(src, request)
+ if !ok {
+ // request should be valid regardless of signature
+ t.Fatalf("request validation false negative, seed: %d (lower: %d, upper: %d).", seed, lower, upper)
}
}
func createRequest(t *testing.T, p *ServerTestParams) *whisper.Envelope {
- data := make([]byte, 8+whisper.TopicLength)
+ bloom := whisper.TopicToBloom(p.topic)
+ data := make([]byte, 8)
binary.BigEndian.PutUint32(data, p.low)
binary.BigEndian.PutUint32(data[4:], p.upp)
- copy(data[8:], p.topic[:])
+ data = append(data, bloom...)
key, err := shh.GetSymKey(keyID)
if err != nil {
diff --git a/whisper/whisperv2/api.go b/whisper/whisperv2/api.go
deleted file mode 100644
index 5c6d17095..000000000
--- a/whisper/whisperv2/api.go
+++ /dev/null
@@ -1,402 +0,0 @@
-// Copyright 2015 The go-ethereum Authors
-// This file is part of the go-ethereum library.
-//
-// The go-ethereum library is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Lesser General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-//
-// The go-ethereum library is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU Lesser General Public License for more details.
-//
-// You should have received a copy of the GNU Lesser General Public License
-// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
-
-package whisperv2
-
-import (
- "encoding/json"
- "fmt"
- "sync"
- "time"
-
- "github.com/ethereum/go-ethereum/common"
- "github.com/ethereum/go-ethereum/common/hexutil"
- "github.com/ethereum/go-ethereum/crypto"
-)
-
-// PublicWhisperAPI provides the whisper RPC service.
-type PublicWhisperAPI struct {
- w *Whisper
-
- messagesMu sync.RWMutex
- messages map[hexutil.Uint]*whisperFilter
-}
-
-type whisperOfflineError struct{}
-
-func (e *whisperOfflineError) Error() string {
- return "whisper is offline"
-}
-
-// whisperOffLineErr is returned when the node doesn't offer the shh service.
-var whisperOffLineErr = new(whisperOfflineError)
-
-// NewPublicWhisperAPI create a new RPC whisper service.
-func NewPublicWhisperAPI(w *Whisper) *PublicWhisperAPI {
- return &PublicWhisperAPI{w: w, messages: make(map[hexutil.Uint]*whisperFilter)}
-}
-
-// Version returns the Whisper version this node offers.
-func (s *PublicWhisperAPI) Version() (hexutil.Uint, error) {
- if s.w == nil {
- return 0, whisperOffLineErr
- }
- return hexutil.Uint(s.w.Version()), nil
-}
-
-// HasIdentity checks if the the whisper node is configured with the private key
-// of the specified public pair.
-func (s *PublicWhisperAPI) HasIdentity(identity string) (bool, error) {
- if s.w == nil {
- return false, whisperOffLineErr
- }
- return s.w.HasIdentity(crypto.ToECDSAPub(common.FromHex(identity))), nil
-}
-
-// NewIdentity generates a new cryptographic identity for the client, and injects
-// it into the known identities for message decryption.
-func (s *PublicWhisperAPI) NewIdentity() (string, error) {
- if s.w == nil {
- return "", whisperOffLineErr
- }
-
- identity := s.w.NewIdentity()
- return common.ToHex(crypto.FromECDSAPub(&identity.PublicKey)), nil
-}
-
-type NewFilterArgs struct {
- To string
- From string
- Topics [][][]byte
-}
-
-// NewWhisperFilter creates and registers a new message filter to watch for inbound whisper messages.
-func (s *PublicWhisperAPI) NewFilter(args NewFilterArgs) (hexutil.Uint, error) {
- if s.w == nil {
- return 0, whisperOffLineErr
- }
-
- var id hexutil.Uint
- filter := Filter{
- To: crypto.ToECDSAPub(common.FromHex(args.To)),
- From: crypto.ToECDSAPub(common.FromHex(args.From)),
- Topics: NewFilterTopics(args.Topics...),
- Fn: func(message *Message) {
- wmsg := NewWhisperMessage(message)
- s.messagesMu.RLock() // Only read lock to the filter pool
- defer s.messagesMu.RUnlock()
- if s.messages[id] != nil {
- s.messages[id].insert(wmsg)
- }
- },
- }
- id = hexutil.Uint(s.w.Watch(filter))
-
- s.messagesMu.Lock()
- s.messages[id] = newWhisperFilter(id, s.w)
- s.messagesMu.Unlock()
-
- return id, nil
-}
-
-// GetFilterChanges retrieves all the new messages matched by a filter since the last retrieval.
-func (s *PublicWhisperAPI) GetFilterChanges(filterId hexutil.Uint) []WhisperMessage {
- s.messagesMu.RLock()
- defer s.messagesMu.RUnlock()
-
- if s.messages[filterId] != nil {
- if changes := s.messages[filterId].retrieve(); changes != nil {
- return changes
- }
- }
- return returnWhisperMessages(nil)
-}
-
-// UninstallFilter disables and removes an existing filter.
-func (s *PublicWhisperAPI) UninstallFilter(filterId hexutil.Uint) bool {
- s.messagesMu.Lock()
- defer s.messagesMu.Unlock()
-
- if _, ok := s.messages[filterId]; ok {
- delete(s.messages, filterId)
- return true
- }
- return false
-}
-
-// GetMessages retrieves all the known messages that match a specific filter.
-func (s *PublicWhisperAPI) GetMessages(filterId hexutil.Uint) []WhisperMessage {
- // Retrieve all the cached messages matching a specific, existing filter
- s.messagesMu.RLock()
- defer s.messagesMu.RUnlock()
-
- var messages []*Message
- if s.messages[filterId] != nil {
- messages = s.messages[filterId].messages()
- }
-
- return returnWhisperMessages(messages)
-}
-
-// returnWhisperMessages converts aNhisper message to a RPC whisper message.
-func returnWhisperMessages(messages []*Message) []WhisperMessage {
- msgs := make([]WhisperMessage, len(messages))
- for i, msg := range messages {
- msgs[i] = NewWhisperMessage(msg)
- }
- return msgs
-}
-
-type PostArgs struct {
- From string `json:"from"`
- To string `json:"to"`
- Topics [][]byte `json:"topics"`
- Payload string `json:"payload"`
- Priority int64 `json:"priority"`
- TTL int64 `json:"ttl"`
-}
-
-// Post injects a message into the whisper network for distribution.
-func (s *PublicWhisperAPI) Post(args PostArgs) (bool, error) {
- if s.w == nil {
- return false, whisperOffLineErr
- }
-
- // construct whisper message with transmission options
- message := NewMessage(common.FromHex(args.Payload))
- options := Options{
- To: crypto.ToECDSAPub(common.FromHex(args.To)),
- TTL: time.Duration(args.TTL) * time.Second,
- Topics: NewTopics(args.Topics...),
- }
-
- // set sender identity
- if len(args.From) > 0 {
- if key := s.w.GetIdentity(crypto.ToECDSAPub(common.FromHex(args.From))); key != nil {
- options.From = key
- } else {
- return false, fmt.Errorf("unknown identity to send from: %s", args.From)
- }
- }
-
- // Wrap and send the message
- pow := time.Duration(args.Priority) * time.Millisecond
- envelope, err := message.Wrap(pow, options)
- if err != nil {
- return false, err
- }
-
- return true, s.w.Send(envelope)
-}
-
-// WhisperMessage is the RPC representation of a whisper message.
-type WhisperMessage struct {
- ref *Message
-
- Payload string `json:"payload"`
- To string `json:"to"`
- From string `json:"from"`
- Sent int64 `json:"sent"`
- TTL int64 `json:"ttl"`
- Hash string `json:"hash"`
-}
-
-func (args *PostArgs) UnmarshalJSON(data []byte) (err error) {
- var obj struct {
- From string `json:"from"`
- To string `json:"to"`
- Topics []string `json:"topics"`
- Payload string `json:"payload"`
- Priority hexutil.Uint64 `json:"priority"`
- TTL hexutil.Uint64 `json:"ttl"`
- }
-
- if err := json.Unmarshal(data, &obj); err != nil {
- return err
- }
-
- args.From = obj.From
- args.To = obj.To
- args.Payload = obj.Payload
- args.Priority = int64(obj.Priority) // TODO(gluk256): handle overflow
- args.TTL = int64(obj.TTL) // ... here too ...
-
- // decode topic strings
- args.Topics = make([][]byte, len(obj.Topics))
- for i, topic := range obj.Topics {
- args.Topics[i] = common.FromHex(topic)
- }
-
- return nil
-}
-
-// UnmarshalJSON implements the json.Unmarshaler interface, invoked to convert a
-// JSON message blob into a WhisperFilterArgs structure.
-func (args *NewFilterArgs) UnmarshalJSON(b []byte) (err error) {
- // Unmarshal the JSON message and sanity check
- var obj struct {
- To interface{} `json:"to"`
- From interface{} `json:"from"`
- Topics interface{} `json:"topics"`
- }
- if err := json.Unmarshal(b, &obj); err != nil {
- return err
- }
-
- // Retrieve the simple data contents of the filter arguments
- if obj.To == nil {
- args.To = ""
- } else {
- argstr, ok := obj.To.(string)
- if !ok {
- return fmt.Errorf("to is not a string")
- }
- args.To = argstr
- }
- if obj.From == nil {
- args.From = ""
- } else {
- argstr, ok := obj.From.(string)
- if !ok {
- return fmt.Errorf("from is not a string")
- }
- args.From = argstr
- }
- // Construct the nested topic array
- if obj.Topics != nil {
- // Make sure we have an actual topic array
- list, ok := obj.Topics.([]interface{})
- if !ok {
- return fmt.Errorf("topics is not an array")
- }
- // Iterate over each topic and handle nil, string or array
- topics := make([][]string, len(list))
- for idx, field := range list {
- switch value := field.(type) {
- case nil:
- topics[idx] = []string{}
-
- case string:
- topics[idx] = []string{value}
-
- case []interface{}:
- topics[idx] = make([]string, len(value))
- for i, nested := range value {
- switch value := nested.(type) {
- case nil:
- topics[idx][i] = ""
-
- case string:
- topics[idx][i] = value
-
- default:
- return fmt.Errorf("topic[%d][%d] is not a string", idx, i)
- }
- }
- default:
- return fmt.Errorf("topic[%d] not a string or array", idx)
- }
- }
-
- topicsDecoded := make([][][]byte, len(topics))
- for i, condition := range topics {
- topicsDecoded[i] = make([][]byte, len(condition))
- for j, topic := range condition {
- topicsDecoded[i][j] = common.FromHex(topic)
- }
- }
-
- args.Topics = topicsDecoded
- }
- return nil
-}
-
-// whisperFilter is the message cache matching a specific filter, accumulating
-// inbound messages until the are requested by the client.
-type whisperFilter struct {
- id hexutil.Uint // Filter identifier for old message retrieval
- ref *Whisper // Whisper reference for old message retrieval
-
- cache []WhisperMessage // Cache of messages not yet polled
- skip map[common.Hash]struct{} // List of retrieved messages to avoid duplication
- update time.Time // Time of the last message query
-
- lock sync.RWMutex // Lock protecting the filter internals
-}
-
-// messages retrieves all the cached messages from the entire pool matching the
-// filter, resetting the filter's change buffer.
-func (w *whisperFilter) messages() []*Message {
- w.lock.Lock()
- defer w.lock.Unlock()
-
- w.cache = nil
- w.update = time.Now()
-
- w.skip = make(map[common.Hash]struct{})
- messages := w.ref.Messages(int(w.id))
- for _, message := range messages {
- w.skip[message.Hash] = struct{}{}
- }
- return messages
-}
-
-// insert injects a new batch of messages into the filter cache.
-func (w *whisperFilter) insert(messages ...WhisperMessage) {
- w.lock.Lock()
- defer w.lock.Unlock()
-
- for _, message := range messages {
- if _, ok := w.skip[message.ref.Hash]; !ok {
- w.cache = append(w.cache, messages...)
- }
- }
-}
-
-// retrieve fetches all the cached messages from the filter.
-func (w *whisperFilter) retrieve() (messages []WhisperMessage) {
- w.lock.Lock()
- defer w.lock.Unlock()
-
- messages, w.cache = w.cache, nil
- w.update = time.Now()
-
- return
-}
-
-// newWhisperFilter creates a new serialized, poll based whisper topic filter.
-func newWhisperFilter(id hexutil.Uint, ref *Whisper) *whisperFilter {
- return &whisperFilter{
- id: id,
- ref: ref,
- update: time.Now(),
- skip: make(map[common.Hash]struct{}),
- }
-}
-
-// NewWhisperMessage converts an internal message into an API version.
-func NewWhisperMessage(message *Message) WhisperMessage {
- return WhisperMessage{
- ref: message,
-
- Payload: common.ToHex(message.Payload),
- From: common.ToHex(crypto.FromECDSAPub(message.Recover())),
- To: common.ToHex(crypto.FromECDSAPub(message.To)),
- Sent: message.Sent.Unix(),
- TTL: int64(message.TTL / time.Second),
- Hash: common.ToHex(message.Hash.Bytes()),
- }
-}
diff --git a/whisper/whisperv2/doc.go b/whisper/whisperv2/doc.go
deleted file mode 100644
index 7252f44b1..000000000
--- a/whisper/whisperv2/doc.go
+++ /dev/null
@@ -1,32 +0,0 @@
-// Copyright 2014 The go-ethereum Authors
-// This file is part of the go-ethereum library.
-//
-// The go-ethereum library is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Lesser General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-//
-// The go-ethereum library is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU Lesser General Public License for more details.
-//
-// You should have received a copy of the GNU Lesser General Public License
-// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
-
-/*
-Package whisper implements the Whisper PoC-1.
-
-(https://github.com/ethereum/wiki/wiki/Whisper-PoC-1-Protocol-Spec)
-
-Whisper combines aspects of both DHTs and datagram messaging systems (e.g. UDP).
-As such it may be likened and compared to both, not dissimilar to the
-matter/energy duality (apologies to physicists for the blatant abuse of a
-fundamental and beautiful natural principle).
-
-Whisper is a pure identity-based messaging system. Whisper provides a low-level
-(non-application-specific) but easily-accessible API without being based upon
-or prejudiced by the low-level hardware attributes and characteristics,
-particularly the notion of singular endpoints.
-*/
-package whisperv2
diff --git a/whisper/whisperv2/envelope.go b/whisper/whisperv2/envelope.go
deleted file mode 100644
index 9f1c68204..000000000
--- a/whisper/whisperv2/envelope.go
+++ /dev/null
@@ -1,150 +0,0 @@
-// Copyright 2014 The go-ethereum Authors
-// This file is part of the go-ethereum library.
-//
-// The go-ethereum library is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Lesser General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-//
-// The go-ethereum library is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU Lesser General Public License for more details.
-//
-// You should have received a copy of the GNU Lesser General Public License
-// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
-
-// Contains the Whisper protocol Envelope element. For formal details please see
-// the specs at https://github.com/ethereum/wiki/wiki/Whisper-PoC-1-Protocol-Spec#envelopes.
-
-package whisperv2
-
-import (
- "crypto/ecdsa"
- "encoding/binary"
- "fmt"
- "math/big"
- "time"
-
- "github.com/ethereum/go-ethereum/common"
- "github.com/ethereum/go-ethereum/common/math"
- "github.com/ethereum/go-ethereum/crypto"
- "github.com/ethereum/go-ethereum/crypto/ecies"
- "github.com/ethereum/go-ethereum/rlp"
-)
-
-// Envelope represents a clear-text data packet to transmit through the Whisper
-// network. Its contents may or may not be encrypted and signed.
-type Envelope struct {
- Expiry uint32 // Whisper protocol specifies int32, really should be int64
- TTL uint32 // ^^^^^^
- Topics []Topic
- Data []byte
- Nonce uint32
-
- hash common.Hash // Cached hash of the envelope to avoid rehashing every time
-}
-
-// NewEnvelope wraps a Whisper message with expiration and destination data
-// included into an envelope for network forwarding.
-func NewEnvelope(ttl time.Duration, topics []Topic, msg *Message) *Envelope {
- return &Envelope{
- Expiry: uint32(time.Now().Add(ttl).Unix()),
- TTL: uint32(ttl.Seconds()),
- Topics: topics,
- Data: msg.bytes(),
- Nonce: 0,
- }
-}
-
-// Seal closes the envelope by spending the requested amount of time as a proof
-// of work on hashing the data.
-func (self *Envelope) Seal(pow time.Duration) {
- d := make([]byte, 64)
- copy(d[:32], self.rlpWithoutNonce())
-
- finish, bestBit := time.Now().Add(pow).UnixNano(), 0
- for nonce := uint32(0); time.Now().UnixNano() < finish; {
- for i := 0; i < 1024; i++ {
- binary.BigEndian.PutUint32(d[60:], nonce)
-
- d := new(big.Int).SetBytes(crypto.Keccak256(d))
- firstBit := math.FirstBitSet(d)
- if firstBit > bestBit {
- self.Nonce, bestBit = nonce, firstBit
- }
- nonce++
- }
- }
-}
-
-// rlpWithoutNonce returns the RLP encoded envelope contents, except the nonce.
-func (self *Envelope) rlpWithoutNonce() []byte {
- enc, _ := rlp.EncodeToBytes([]interface{}{self.Expiry, self.TTL, self.Topics, self.Data})
- return enc
-}
-
-// Open extracts the message contained within a potentially encrypted envelope.
-func (self *Envelope) Open(key *ecdsa.PrivateKey) (msg *Message, err error) {
- // Split open the payload into a message construct
- data := self.Data
-
- message := &Message{
- Flags: data[0],
- Sent: time.Unix(int64(self.Expiry-self.TTL), 0),
- TTL: time.Duration(self.TTL) * time.Second,
- Hash: self.Hash(),
- }
- data = data[1:]
-
- if message.Flags&signatureFlag == signatureFlag {
- if len(data) < signatureLength {
- return nil, fmt.Errorf("unable to open envelope. First bit set but len(data) < len(signature)")
- }
- message.Signature, data = data[:signatureLength], data[signatureLength:]
- }
- message.Payload = data
-
- // Decrypt the message, if requested
- if key == nil {
- return message, nil
- }
- err = message.decrypt(key)
- switch err {
- case nil:
- return message, nil
-
- case ecies.ErrInvalidPublicKey: // Payload isn't encrypted
- return message, err
-
- default:
- return nil, fmt.Errorf("unable to open envelope, decrypt failed: %v", err)
- }
-}
-
-// Hash returns the SHA3 hash of the envelope, calculating it if not yet done.
-func (self *Envelope) Hash() common.Hash {
- if (self.hash == common.Hash{}) {
- enc, _ := rlp.EncodeToBytes(self)
- self.hash = crypto.Keccak256Hash(enc)
- }
- return self.hash
-}
-
-// DecodeRLP decodes an Envelope from an RLP data stream.
-func (self *Envelope) DecodeRLP(s *rlp.Stream) error {
- raw, err := s.Raw()
- if err != nil {
- return err
- }
- // The decoding of Envelope uses the struct fields but also needs
- // to compute the hash of the whole RLP-encoded envelope. This
- // type has the same structure as Envelope but is not an
- // rlp.Decoder so we can reuse the Envelope struct definition.
- type rlpenv Envelope
- if err := rlp.DecodeBytes(raw, (*rlpenv)(self)); err != nil {
- return err
- }
- self.hash = crypto.Keccak256Hash(raw)
- return nil
-}
diff --git a/whisper/whisperv2/envelope_test.go b/whisper/whisperv2/envelope_test.go
deleted file mode 100644
index 490ed9f6f..000000000
--- a/whisper/whisperv2/envelope_test.go
+++ /dev/null
@@ -1,158 +0,0 @@
-// Copyright 2015 The go-ethereum Authors
-// This file is part of the go-ethereum library.
-//
-// The go-ethereum library is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Lesser General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-//
-// The go-ethereum library is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU Lesser General Public License for more details.
-//
-// You should have received a copy of the GNU Lesser General Public License
-// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
-
-package whisperv2
-
-import (
- "bytes"
- "testing"
- "time"
-
- "github.com/ethereum/go-ethereum/crypto"
- "github.com/ethereum/go-ethereum/crypto/ecies"
-)
-
-func TestEnvelopeOpen(t *testing.T) {
- payload := []byte("hello world")
- message := NewMessage(payload)
-
- envelope, err := message.Wrap(DefaultPoW, Options{})
- if err != nil {
- t.Fatalf("failed to wrap message: %v", err)
- }
- opened, err := envelope.Open(nil)
- if err != nil {
- t.Fatalf("failed to open envelope: %v", err)
- }
- if opened.Flags != message.Flags {
- t.Fatalf("flags mismatch: have %d, want %d", opened.Flags, message.Flags)
- }
- if !bytes.Equal(opened.Signature, message.Signature) {
- t.Fatalf("signature mismatch: have 0x%x, want 0x%x", opened.Signature, message.Signature)
- }
- if !bytes.Equal(opened.Payload, message.Payload) {
- t.Fatalf("payload mismatch: have 0x%x, want 0x%x", opened.Payload, message.Payload)
- }
- if opened.Sent.Unix() != message.Sent.Unix() {
- t.Fatalf("send time mismatch: have %v, want %v", opened.Sent, message.Sent)
- }
- if opened.TTL/time.Second != DefaultTTL/time.Second {
- t.Fatalf("message TTL mismatch: have %v, want %v", opened.TTL, DefaultTTL)
- }
-
- if opened.Hash != envelope.Hash() {
- t.Fatalf("message hash mismatch: have 0x%x, want 0x%x", opened.Hash, envelope.Hash())
- }
-}
-
-func TestEnvelopeAnonymousOpenUntargeted(t *testing.T) {
- payload := []byte("hello envelope")
- envelope, err := NewMessage(payload).Wrap(DefaultPoW, Options{})
- if err != nil {
- t.Fatalf("failed to wrap message: %v", err)
- }
- opened, err := envelope.Open(nil)
- if err != nil {
- t.Fatalf("failed to open envelope: %v", err)
- }
- if opened.To != nil {
- t.Fatalf("recipient mismatch: have 0x%x, want nil", opened.To)
- }
- if !bytes.Equal(opened.Payload, payload) {
- t.Fatalf("payload mismatch: have 0x%x, want 0x%x", opened.Payload, payload)
- }
-}
-
-func TestEnvelopeAnonymousOpenTargeted(t *testing.T) {
- key, err := crypto.GenerateKey()
- if err != nil {
- t.Fatalf("failed to generate test identity: %v", err)
- }
-
- payload := []byte("hello envelope")
- envelope, err := NewMessage(payload).Wrap(DefaultPoW, Options{
- To: &key.PublicKey,
- })
- if err != nil {
- t.Fatalf("failed to wrap message: %v", err)
- }
- opened, err := envelope.Open(nil)
- if err != nil {
- t.Fatalf("failed to open envelope: %v", err)
- }
- if opened.To != nil {
- t.Fatalf("recipient mismatch: have 0x%x, want nil", opened.To)
- }
- if bytes.Equal(opened.Payload, payload) {
- t.Fatalf("payload match, should have been encrypted: 0x%x", opened.Payload)
- }
-}
-
-func TestEnvelopeIdentifiedOpenUntargeted(t *testing.T) {
- key, err := crypto.GenerateKey()
- if err != nil {
- t.Fatalf("failed to generate test identity: %v", err)
- }
-
- payload := []byte("hello envelope")
- envelope, err := NewMessage(payload).Wrap(DefaultPoW, Options{})
- if err != nil {
- t.Fatalf("failed to wrap message: %v", err)
- }
- opened, err := envelope.Open(key)
- switch err {
- case nil:
- t.Fatalf("envelope opened with bad key: %v", opened)
-
- case ecies.ErrInvalidPublicKey:
- // Ok, key mismatch but opened
-
- default:
- t.Fatalf("failed to open envelope: %v", err)
- }
-
- if opened.To != nil {
- t.Fatalf("recipient mismatch: have 0x%x, want nil", opened.To)
- }
- if !bytes.Equal(opened.Payload, payload) {
- t.Fatalf("payload mismatch: have 0x%x, want 0x%x", opened.Payload, payload)
- }
-}
-
-func TestEnvelopeIdentifiedOpenTargeted(t *testing.T) {
- key, err := crypto.GenerateKey()
- if err != nil {
- t.Fatalf("failed to generate test identity: %v", err)
- }
-
- payload := []byte("hello envelope")
- envelope, err := NewMessage(payload).Wrap(DefaultPoW, Options{
- To: &key.PublicKey,
- })
- if err != nil {
- t.Fatalf("failed to wrap message: %v", err)
- }
- opened, err := envelope.Open(key)
- if err != nil {
- t.Fatalf("failed to open envelope: %v", err)
- }
- if opened.To != nil {
- t.Fatalf("recipient mismatch: have 0x%x, want nil", opened.To)
- }
- if !bytes.Equal(opened.Payload, payload) {
- t.Fatalf("payload mismatch: have 0x%x, want 0x%x", opened.Payload, payload)
- }
-}
diff --git a/whisper/whisperv2/filter.go b/whisper/whisperv2/filter.go
deleted file mode 100644
index 7404859b7..000000000
--- a/whisper/whisperv2/filter.go
+++ /dev/null
@@ -1,129 +0,0 @@
-// Copyright 2014 The go-ethereum Authors
-// This file is part of the go-ethereum library.
-//
-// The go-ethereum library is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Lesser General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-//
-// The go-ethereum library is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU Lesser General Public License for more details.
-//
-// You should have received a copy of the GNU Lesser General Public License
-// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
-
-// Contains the message filter for fine grained subscriptions.
-
-package whisperv2
-
-import (
- "crypto/ecdsa"
-
- "github.com/ethereum/go-ethereum/event/filter"
-)
-
-// Filter is used to subscribe to specific types of whisper messages.
-type Filter struct {
- To *ecdsa.PublicKey // Recipient of the message
- From *ecdsa.PublicKey // Sender of the message
- Topics [][]Topic // Topics to filter messages with
- Fn func(msg *Message) // Handler in case of a match
-}
-
-// NewFilterTopics creates a 2D topic array used by whisper.Filter from binary
-// data elements.
-func NewFilterTopics(data ...[][]byte) [][]Topic {
- filter := make([][]Topic, len(data))
- for i, condition := range data {
- // Handle the special case of condition == [[]byte{}]
- if len(condition) == 1 && len(condition[0]) == 0 {
- filter[i] = []Topic{}
- continue
- }
- // Otherwise flatten normally
- filter[i] = NewTopics(condition...)
- }
- return filter
-}
-
-// NewFilterTopicsFlat creates a 2D topic array used by whisper.Filter from flat
-// binary data elements.
-func NewFilterTopicsFlat(data ...[]byte) [][]Topic {
- filter := make([][]Topic, len(data))
- for i, element := range data {
- // Only add non-wildcard topics
- filter[i] = make([]Topic, 0, 1)
- if len(element) > 0 {
- filter[i] = append(filter[i], NewTopic(element))
- }
- }
- return filter
-}
-
-// NewFilterTopicsFromStrings creates a 2D topic array used by whisper.Filter
-// from textual data elements.
-func NewFilterTopicsFromStrings(data ...[]string) [][]Topic {
- filter := make([][]Topic, len(data))
- for i, condition := range data {
- // Handle the special case of condition == [""]
- if len(condition) == 1 && condition[0] == "" {
- filter[i] = []Topic{}
- continue
- }
- // Otherwise flatten normally
- filter[i] = NewTopicsFromStrings(condition...)
- }
- return filter
-}
-
-// NewFilterTopicsFromStringsFlat creates a 2D topic array used by whisper.Filter from flat
-// binary data elements.
-func NewFilterTopicsFromStringsFlat(data ...string) [][]Topic {
- filter := make([][]Topic, len(data))
- for i, element := range data {
- // Only add non-wildcard topics
- filter[i] = make([]Topic, 0, 1)
- if element != "" {
- filter[i] = append(filter[i], NewTopicFromString(element))
- }
- }
- return filter
-}
-
-// filterer is the internal, fully initialized filter ready to match inbound
-// messages to a variety of criteria.
-type filterer struct {
- to string // Recipient of the message
- from string // Sender of the message
- matcher *topicMatcher // Topics to filter messages with
- fn func(data interface{}) // Handler in case of a match
-}
-
-// Compare checks if the specified filter matches the current one.
-func (self filterer) Compare(f filter.Filter) bool {
- filter := f.(filterer)
-
- // Check the message sender and recipient
- if len(self.to) > 0 && self.to != filter.to {
- return false
- }
- if len(self.from) > 0 && self.from != filter.from {
- return false
- }
- // Check the topic filtering
- topics := make([]Topic, len(filter.matcher.conditions))
- for i, group := range filter.matcher.conditions {
- // Message should contain a single topic entry, extract
- for topics[i] = range group {
- break
- }
- }
- return self.matcher.Matches(topics)
-}
-
-// Trigger is called when a filter successfully matches an inbound message.
-func (self filterer) Trigger(data interface{}) {
- self.fn(data)
-}
diff --git a/whisper/whisperv2/filter_test.go b/whisper/whisperv2/filter_test.go
deleted file mode 100644
index ffdfd7b34..000000000
--- a/whisper/whisperv2/filter_test.go
+++ /dev/null
@@ -1,215 +0,0 @@
-// Copyright 2015 The go-ethereum Authors
-// This file is part of the go-ethereum library.
-//
-// The go-ethereum library is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Lesser General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-//
-// The go-ethereum library is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU Lesser General Public License for more details.
-//
-// You should have received a copy of the GNU Lesser General Public License
-// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
-
-package whisperv2
-
-import (
- "bytes"
-
- "testing"
-)
-
-var filterTopicsCreationTests = []struct {
- topics [][]string
- filter [][][4]byte
-}{
- { // Simple topic filter
- topics: [][]string{
- {"abc", "def", "ghi"},
- {"def"},
- {"ghi", "abc"},
- },
- filter: [][][4]byte{
- {{0x4e, 0x03, 0x65, 0x7a}, {0x34, 0x60, 0x7c, 0x9b}, {0x21, 0x41, 0x7d, 0xf9}},
- {{0x34, 0x60, 0x7c, 0x9b}},
- {{0x21, 0x41, 0x7d, 0xf9}, {0x4e, 0x03, 0x65, 0x7a}},
- },
- },
- { // Wild-carded topic filter
- topics: [][]string{
- {"abc", "def", "ghi"},
- {},
- {""},
- {"def"},
- },
- filter: [][][4]byte{
- {{0x4e, 0x03, 0x65, 0x7a}, {0x34, 0x60, 0x7c, 0x9b}, {0x21, 0x41, 0x7d, 0xf9}},
- {},
- {},
- {{0x34, 0x60, 0x7c, 0x9b}},
- },
- },
-}
-
-var filterTopicsCreationFlatTests = []struct {
- topics []string
- filter [][][4]byte
-}{
- { // Simple topic list
- topics: []string{"abc", "def", "ghi"},
- filter: [][][4]byte{
- {{0x4e, 0x03, 0x65, 0x7a}},
- {{0x34, 0x60, 0x7c, 0x9b}},
- {{0x21, 0x41, 0x7d, 0xf9}},
- },
- },
- { // Wild-carded topic list
- topics: []string{"abc", "", "ghi"},
- filter: [][][4]byte{
- {{0x4e, 0x03, 0x65, 0x7a}},
- {},
- {{0x21, 0x41, 0x7d, 0xf9}},
- },
- },
-}
-
-func TestFilterTopicsCreation(t *testing.T) {
- // Check full filter creation
- for i, tt := range filterTopicsCreationTests {
- // Check the textual creation
- filter := NewFilterTopicsFromStrings(tt.topics...)
- if len(filter) != len(tt.topics) {
- t.Errorf("test %d: condition count mismatch: have %v, want %v", i, len(filter), len(tt.topics))
- continue
- }
- for j, condition := range filter {
- if len(condition) != len(tt.filter[j]) {
- t.Errorf("test %d, condition %d: size mismatch: have %v, want %v", i, j, len(condition), len(tt.filter[j]))
- continue
- }
- for k := 0; k < len(condition); k++ {
- if !bytes.Equal(condition[k][:], tt.filter[j][k][:]) {
- t.Errorf("test %d, condition %d, segment %d: filter mismatch: have 0x%x, want 0x%x", i, j, k, condition[k], tt.filter[j][k])
- }
- }
- }
- // Check the binary creation
- binary := make([][][]byte, len(tt.topics))
- for j, condition := range tt.topics {
- binary[j] = make([][]byte, len(condition))
- for k, segment := range condition {
- binary[j][k] = []byte(segment)
- }
- }
- filter = NewFilterTopics(binary...)
- if len(filter) != len(tt.topics) {
- t.Errorf("test %d: condition count mismatch: have %v, want %v", i, len(filter), len(tt.topics))
- continue
- }
- for j, condition := range filter {
- if len(condition) != len(tt.filter[j]) {
- t.Errorf("test %d, condition %d: size mismatch: have %v, want %v", i, j, len(condition), len(tt.filter[j]))
- continue
- }
- for k := 0; k < len(condition); k++ {
- if !bytes.Equal(condition[k][:], tt.filter[j][k][:]) {
- t.Errorf("test %d, condition %d, segment %d: filter mismatch: have 0x%x, want 0x%x", i, j, k, condition[k], tt.filter[j][k])
- }
- }
- }
- }
- // Check flat filter creation
- for i, tt := range filterTopicsCreationFlatTests {
- // Check the textual creation
- filter := NewFilterTopicsFromStringsFlat(tt.topics...)
- if len(filter) != len(tt.topics) {
- t.Errorf("test %d: condition count mismatch: have %v, want %v", i, len(filter), len(tt.topics))
- continue
- }
- for j, condition := range filter {
- if len(condition) != len(tt.filter[j]) {
- t.Errorf("test %d, condition %d: size mismatch: have %v, want %v", i, j, len(condition), len(tt.filter[j]))
- continue
- }
- for k := 0; k < len(condition); k++ {
- if !bytes.Equal(condition[k][:], tt.filter[j][k][:]) {
- t.Errorf("test %d, condition %d, segment %d: filter mismatch: have 0x%x, want 0x%x", i, j, k, condition[k], tt.filter[j][k])
- }
- }
- }
- // Check the binary creation
- binary := make([][]byte, len(tt.topics))
- for j, topic := range tt.topics {
- binary[j] = []byte(topic)
- }
- filter = NewFilterTopicsFlat(binary...)
- if len(filter) != len(tt.topics) {
- t.Errorf("test %d: condition count mismatch: have %v, want %v", i, len(filter), len(tt.topics))
- continue
- }
- for j, condition := range filter {
- if len(condition) != len(tt.filter[j]) {
- t.Errorf("test %d, condition %d: size mismatch: have %v, want %v", i, j, len(condition), len(tt.filter[j]))
- continue
- }
- for k := 0; k < len(condition); k++ {
- if !bytes.Equal(condition[k][:], tt.filter[j][k][:]) {
- t.Errorf("test %d, condition %d, segment %d: filter mismatch: have 0x%x, want 0x%x", i, j, k, condition[k], tt.filter[j][k])
- }
- }
- }
- }
-}
-
-var filterCompareTests = []struct {
- matcher filterer
- message filterer
- match bool
-}{
- { // Wild-card filter matching anything
- matcher: filterer{to: "", from: "", matcher: newTopicMatcher()},
- message: filterer{to: "to", from: "from", matcher: newTopicMatcher(NewFilterTopicsFromStringsFlat("topic")...)},
- match: true,
- },
- { // Filter matching the to field
- matcher: filterer{to: "to", from: "", matcher: newTopicMatcher()},
- message: filterer{to: "to", from: "from", matcher: newTopicMatcher(NewFilterTopicsFromStringsFlat("topic")...)},
- match: true,
- },
- { // Filter rejecting the to field
- matcher: filterer{to: "to", from: "", matcher: newTopicMatcher()},
- message: filterer{to: "", from: "from", matcher: newTopicMatcher(NewFilterTopicsFromStringsFlat("topic")...)},
- match: false,
- },
- { // Filter matching the from field
- matcher: filterer{to: "", from: "from", matcher: newTopicMatcher()},
- message: filterer{to: "to", from: "from", matcher: newTopicMatcher(NewFilterTopicsFromStringsFlat("topic")...)},
- match: true,
- },
- { // Filter rejecting the from field
- matcher: filterer{to: "", from: "from", matcher: newTopicMatcher()},
- message: filterer{to: "to", from: "", matcher: newTopicMatcher(NewFilterTopicsFromStringsFlat("topic")...)},
- match: false,
- },
- { // Filter matching the topic field
- matcher: filterer{to: "", from: "from", matcher: newTopicMatcher(NewFilterTopicsFromStringsFlat("topic")...)},
- message: filterer{to: "to", from: "from", matcher: newTopicMatcher(NewFilterTopicsFromStringsFlat("topic")...)},
- match: true,
- },
- { // Filter rejecting the topic field
- matcher: filterer{to: "", from: "", matcher: newTopicMatcher(NewFilterTopicsFromStringsFlat("topic")...)},
- message: filterer{to: "to", from: "from", matcher: newTopicMatcher()},
- match: false,
- },
-}
-
-func TestFilterCompare(t *testing.T) {
- for i, tt := range filterCompareTests {
- if match := tt.matcher.Compare(tt.message); match != tt.match {
- t.Errorf("test %d: match mismatch: have %v, want %v", i, match, tt.match)
- }
- }
-}
diff --git a/whisper/whisperv2/main.go b/whisper/whisperv2/main.go
deleted file mode 100644
index be4160489..000000000
--- a/whisper/whisperv2/main.go
+++ /dev/null
@@ -1,106 +0,0 @@
-// Copyright 2014 The go-ethereum Authors
-// This file is part of the go-ethereum library.
-//
-// The go-ethereum library is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Lesser General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-//
-// The go-ethereum library is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU Lesser General Public License for more details.
-//
-// You should have received a copy of the GNU Lesser General Public License
-// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
-
-// +build none
-
-// Contains a simple whisper peer setup and self messaging to allow playing
-// around with the protocol and API without a fancy client implementation.
-
-package main
-
-import (
- "fmt"
- "log"
- "os"
- "time"
-
- "github.com/ethereum/go-ethereum/common"
- "github.com/ethereum/go-ethereum/crypto"
- "github.com/ethereum/go-ethereum/logger"
- "github.com/ethereum/go-ethereum/p2p"
- "github.com/ethereum/go-ethereum/p2p/nat"
- "github.com/ethereum/go-ethereum/whisper"
-)
-
-func main() {
- logger.AddLogSystem(logger.NewStdLogSystem(os.Stdout, log.LstdFlags, logger.InfoLevel))
-
- // Generate the peer identity
- key, err := crypto.GenerateKey()
- if err != nil {
- fmt.Printf("Failed to generate peer key: %v.\n", err)
- os.Exit(-1)
- }
- name := common.MakeName("whisper-go", "1.0")
- shh := whisper.New()
-
- // Create an Ethereum peer to communicate through
- server := p2p.Server{
- PrivateKey: key,
- MaxPeers: 10,
- Name: name,
- Protocols: []p2p.Protocol{shh.Protocol()},
- ListenAddr: ":30300",
- NAT: nat.Any(),
- }
- fmt.Println("Starting Ethereum peer...")
- if err := server.Start(); err != nil {
- fmt.Printf("Failed to start Ethereum peer: %v.\n", err)
- os.Exit(1)
- }
-
- // Send a message to self to check that something works
- payload := fmt.Sprintf("Hello world, this is %v. In case you're wondering, the time is %v", name, time.Now())
- if err := selfSend(shh, []byte(payload)); err != nil {
- fmt.Printf("Failed to self message: %v.\n", err)
- os.Exit(-1)
- }
-}
-
-// SendSelf wraps a payload into a Whisper envelope and forwards it to itself.
-func selfSend(shh *whisper.Whisper, payload []byte) error {
- ok := make(chan struct{})
-
- // Start watching for self messages, output any arrivals
- id := shh.NewIdentity()
- shh.Watch(whisper.Filter{
- To: &id.PublicKey,
- Fn: func(msg *whisper.Message) {
- fmt.Printf("Message received: %s, signed with 0x%x.\n", string(msg.Payload), msg.Signature)
- close(ok)
- },
- })
- // Wrap the payload and encrypt it
- msg := whisper.NewMessage(payload)
- envelope, err := msg.Wrap(whisper.DefaultPoW, whisper.Options{
- From: id,
- To: &id.PublicKey,
- TTL: whisper.DefaultTTL,
- })
- if err != nil {
- return fmt.Errorf("failed to seal message: %v", err)
- }
- // Dump the message into the system and wait for it to pop back out
- if err := shh.Send(envelope); err != nil {
- return fmt.Errorf("failed to send self-message: %v", err)
- }
- select {
- case <-ok:
- case <-time.After(time.Second):
- return fmt.Errorf("failed to receive message in time")
- }
- return nil
-}
diff --git a/whisper/whisperv2/message.go b/whisper/whisperv2/message.go
deleted file mode 100644
index 66648c3be..000000000
--- a/whisper/whisperv2/message.go
+++ /dev/null
@@ -1,158 +0,0 @@
-// Copyright 2014 The go-ethereum Authors
-// This file is part of the go-ethereum library.
-//
-// The go-ethereum library is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Lesser General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-//
-// The go-ethereum library is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU Lesser General Public License for more details.
-//
-// You should have received a copy of the GNU Lesser General Public License
-// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
-
-// Contains the Whisper protocol Message element. For formal details please see
-// the specs at https://github.com/ethereum/wiki/wiki/Whisper-PoC-1-Protocol-Spec#messages.
-
-package whisperv2
-
-import (
- "crypto/ecdsa"
- crand "crypto/rand"
- "fmt"
- "math/rand"
- "time"
-
- "github.com/ethereum/go-ethereum/common"
- "github.com/ethereum/go-ethereum/crypto"
- "github.com/ethereum/go-ethereum/crypto/ecies"
- "github.com/ethereum/go-ethereum/log"
-)
-
-// Message represents an end-user data packet to transmit through the Whisper
-// protocol. These are wrapped into Envelopes that need not be understood by
-// intermediate nodes, just forwarded.
-type Message struct {
- Flags byte // First bit is signature presence, rest reserved and should be random
- Signature []byte
- Payload []byte
-
- Sent time.Time // Time when the message was posted into the network
- TTL time.Duration // Maximum time to live allowed for the message
-
- To *ecdsa.PublicKey // Message recipient (identity used to decode the message)
- Hash common.Hash // Message envelope hash to act as a unique id
-}
-
-// Options specifies the exact way a message should be wrapped into an Envelope.
-type Options struct {
- From *ecdsa.PrivateKey
- To *ecdsa.PublicKey
- TTL time.Duration
- Topics []Topic
-}
-
-// NewMessage creates and initializes a non-signed, non-encrypted Whisper message.
-func NewMessage(payload []byte) *Message {
- // Construct an initial flag set: no signature, rest random
- flags := byte(rand.Intn(256))
- flags &= ^signatureFlag
-
- // Assemble and return the message
- return &Message{
- Flags: flags,
- Payload: payload,
- Sent: time.Now(),
- }
-}
-
-// Wrap bundles the message into an Envelope to transmit over the network.
-//
-// pow (Proof Of Work) controls how much time to spend on hashing the message,
-// inherently controlling its priority through the network (smaller hash, bigger
-// priority).
-//
-// The user can control the amount of identity, privacy and encryption through
-// the options parameter as follows:
-// - options.From == nil && options.To == nil: anonymous broadcast
-// - options.From != nil && options.To == nil: signed broadcast (known sender)
-// - options.From == nil && options.To != nil: encrypted anonymous message
-// - options.From != nil && options.To != nil: encrypted signed message
-func (self *Message) Wrap(pow time.Duration, options Options) (*Envelope, error) {
- // Use the default TTL if non was specified
- if options.TTL == 0 {
- options.TTL = DefaultTTL
- }
- self.TTL = options.TTL
-
- // Sign and encrypt the message if requested
- if options.From != nil {
- if err := self.sign(options.From); err != nil {
- return nil, err
- }
- }
- if options.To != nil {
- if err := self.encrypt(options.To); err != nil {
- return nil, err
- }
- }
- // Wrap the processed message, seal it and return
- envelope := NewEnvelope(options.TTL, options.Topics, self)
- envelope.Seal(pow)
-
- return envelope, nil
-}
-
-// sign calculates and sets the cryptographic signature for the message , also
-// setting the sign flag.
-func (self *Message) sign(key *ecdsa.PrivateKey) (err error) {
- self.Flags |= signatureFlag
- self.Signature, err = crypto.Sign(self.hash(), key)
- return
-}
-
-// Recover retrieves the public key of the message signer.
-func (self *Message) Recover() *ecdsa.PublicKey {
- defer func() { recover() }() // in case of invalid signature
-
- // Short circuit if no signature is present
- if self.Signature == nil {
- return nil
- }
- // Otherwise try and recover the signature
- pub, err := crypto.SigToPub(self.hash(), self.Signature)
- if err != nil {
- log.Error(fmt.Sprintf("Could not get public key from signature: %v", err))
- return nil
- }
- return pub
-}
-
-// encrypt encrypts a message payload with a public key.
-func (self *Message) encrypt(key *ecdsa.PublicKey) (err error) {
- self.Payload, err = ecies.Encrypt(crand.Reader, ecies.ImportECDSAPublic(key), self.Payload, nil, nil)
- return
-}
-
-// decrypt decrypts an encrypted payload with a private key.
-func (self *Message) decrypt(key *ecdsa.PrivateKey) error {
- cleartext, err := ecies.ImportECDSA(key).Decrypt(crand.Reader, self.Payload, nil, nil)
- if err == nil {
- self.Payload = cleartext
- }
- return err
-}
-
-// hash calculates the SHA3 checksum of the message flags and payload.
-func (self *Message) hash() []byte {
- return crypto.Keccak256(append([]byte{self.Flags}, self.Payload...))
-}
-
-// bytes flattens the message contents (flags, signature and payload) into a
-// single binary blob.
-func (self *Message) bytes() []byte {
- return append([]byte{self.Flags}, append(self.Signature, self.Payload...)...)
-}
diff --git a/whisper/whisperv2/message_test.go b/whisper/whisperv2/message_test.go
deleted file mode 100644
index c760ac54c..000000000
--- a/whisper/whisperv2/message_test.go
+++ /dev/null
@@ -1,158 +0,0 @@
-// Copyright 2014 The go-ethereum Authors
-// This file is part of the go-ethereum library.
-//
-// The go-ethereum library is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Lesser General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-//
-// The go-ethereum library is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU Lesser General Public License for more details.
-//
-// You should have received a copy of the GNU Lesser General Public License
-// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
-
-package whisperv2
-
-import (
- "bytes"
- "crypto/elliptic"
- "testing"
- "time"
-
- "github.com/ethereum/go-ethereum/crypto"
-)
-
-// Tests whether a message can be wrapped without any identity or encryption.
-func TestMessageSimpleWrap(t *testing.T) {
- payload := []byte("hello world")
-
- msg := NewMessage(payload)
- if _, err := msg.Wrap(DefaultPoW, Options{}); err != nil {
- t.Fatalf("failed to wrap message: %v", err)
- }
- if msg.Flags&signatureFlag != 0 {
- t.Fatalf("signature flag mismatch: have %d, want %d", msg.Flags&signatureFlag, 0)
- }
- if len(msg.Signature) != 0 {
- t.Fatalf("signature found for simple wrapping: 0x%x", msg.Signature)
- }
- if !bytes.Equal(msg.Payload, payload) {
- t.Fatalf("payload mismatch after wrapping: have 0x%x, want 0x%x", msg.Payload, payload)
- }
- if msg.TTL/time.Second != DefaultTTL/time.Second {
- t.Fatalf("message TTL mismatch: have %v, want %v", msg.TTL, DefaultTTL)
- }
-}
-
-// Tests whether a message can be signed, and wrapped in plain-text.
-func TestMessageCleartextSignRecover(t *testing.T) {
- key, err := crypto.GenerateKey()
- if err != nil {
- t.Fatalf("failed to create crypto key: %v", err)
- }
- payload := []byte("hello world")
-
- msg := NewMessage(payload)
- if _, err := msg.Wrap(DefaultPoW, Options{
- From: key,
- }); err != nil {
- t.Fatalf("failed to sign message: %v", err)
- }
- if msg.Flags&signatureFlag != signatureFlag {
- t.Fatalf("signature flag mismatch: have %d, want %d", msg.Flags&signatureFlag, signatureFlag)
- }
- if !bytes.Equal(msg.Payload, payload) {
- t.Fatalf("payload mismatch after signing: have 0x%x, want 0x%x", msg.Payload, payload)
- }
-
- pubKey := msg.Recover()
- if pubKey == nil {
- t.Fatalf("failed to recover public key")
- }
- p1 := elliptic.Marshal(crypto.S256(), key.PublicKey.X, key.PublicKey.Y)
- p2 := elliptic.Marshal(crypto.S256(), pubKey.X, pubKey.Y)
- if !bytes.Equal(p1, p2) {
- t.Fatalf("public key mismatch: have 0x%x, want 0x%x", p2, p1)
- }
-}
-
-// Tests whether a message can be encrypted and decrypted using an anonymous
-// sender (i.e. no signature).
-func TestMessageAnonymousEncryptDecrypt(t *testing.T) {
- key, err := crypto.GenerateKey()
- if err != nil {
- t.Fatalf("failed to create recipient crypto key: %v", err)
- }
- payload := []byte("hello world")
-
- msg := NewMessage(payload)
- envelope, err := msg.Wrap(DefaultPoW, Options{
- To: &key.PublicKey,
- })
- if err != nil {
- t.Fatalf("failed to encrypt message: %v", err)
- }
- if msg.Flags&signatureFlag != 0 {
- t.Fatalf("signature flag mismatch: have %d, want %d", msg.Flags&signatureFlag, 0)
- }
- if len(msg.Signature) != 0 {
- t.Fatalf("signature found for anonymous message: 0x%x", msg.Signature)
- }
-
- out, err := envelope.Open(key)
- if err != nil {
- t.Fatalf("failed to open encrypted message: %v", err)
- }
- if !bytes.Equal(out.Payload, payload) {
- t.Errorf("payload mismatch: have 0x%x, want 0x%x", out.Payload, payload)
- }
-}
-
-// Tests whether a message can be properly signed and encrypted.
-func TestMessageFullCrypto(t *testing.T) {
- fromKey, err := crypto.GenerateKey()
- if err != nil {
- t.Fatalf("failed to create sender crypto key: %v", err)
- }
- toKey, err := crypto.GenerateKey()
- if err != nil {
- t.Fatalf("failed to create recipient crypto key: %v", err)
- }
-
- payload := []byte("hello world")
- msg := NewMessage(payload)
- envelope, err := msg.Wrap(DefaultPoW, Options{
- From: fromKey,
- To: &toKey.PublicKey,
- })
- if err != nil {
- t.Fatalf("failed to encrypt message: %v", err)
- }
- if msg.Flags&signatureFlag != signatureFlag {
- t.Fatalf("signature flag mismatch: have %d, want %d", msg.Flags&signatureFlag, signatureFlag)
- }
- if len(msg.Signature) == 0 {
- t.Fatalf("no signature found for signed message")
- }
-
- out, err := envelope.Open(toKey)
- if err != nil {
- t.Fatalf("failed to open encrypted message: %v", err)
- }
- if !bytes.Equal(out.Payload, payload) {
- t.Errorf("payload mismatch: have 0x%x, want 0x%x", out.Payload, payload)
- }
-
- pubKey := out.Recover()
- if pubKey == nil {
- t.Fatalf("failed to recover public key")
- }
- p1 := elliptic.Marshal(crypto.S256(), fromKey.PublicKey.X, fromKey.PublicKey.Y)
- p2 := elliptic.Marshal(crypto.S256(), pubKey.X, pubKey.Y)
- if !bytes.Equal(p1, p2) {
- t.Fatalf("public key mismatch: have 0x%x, want 0x%x", p2, p1)
- }
-}
diff --git a/whisper/whisperv2/peer.go b/whisper/whisperv2/peer.go
deleted file mode 100644
index 71798408b..000000000
--- a/whisper/whisperv2/peer.go
+++ /dev/null
@@ -1,174 +0,0 @@
-// Copyright 2014 The go-ethereum Authors
-// This file is part of the go-ethereum library.
-//
-// The go-ethereum library is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Lesser General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-//
-// The go-ethereum library is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU Lesser General Public License for more details.
-//
-// You should have received a copy of the GNU Lesser General Public License
-// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
-
-package whisperv2
-
-import (
- "fmt"
- "time"
-
- "github.com/ethereum/go-ethereum/common"
- "github.com/ethereum/go-ethereum/log"
- "github.com/ethereum/go-ethereum/p2p"
- "github.com/ethereum/go-ethereum/rlp"
- "gopkg.in/fatih/set.v0"
-)
-
-// peer represents a whisper protocol peer connection.
-type peer struct {
- host *Whisper
- peer *p2p.Peer
- ws p2p.MsgReadWriter
-
- known *set.Set // Messages already known by the peer to avoid wasting bandwidth
-
- quit chan struct{}
-}
-
-// newPeer creates a new whisper peer object, but does not run the handshake itself.
-func newPeer(host *Whisper, remote *p2p.Peer, rw p2p.MsgReadWriter) *peer {
- return &peer{
- host: host,
- peer: remote,
- ws: rw,
- known: set.New(),
- quit: make(chan struct{}),
- }
-}
-
-// start initiates the peer updater, periodically broadcasting the whisper packets
-// into the network.
-func (self *peer) start() {
- go self.update()
- log.Debug(fmt.Sprintf("%v: whisper started", self.peer))
-}
-
-// stop terminates the peer updater, stopping message forwarding to it.
-func (self *peer) stop() {
- close(self.quit)
- log.Debug(fmt.Sprintf("%v: whisper stopped", self.peer))
-}
-
-// handshake sends the protocol initiation status message to the remote peer and
-// verifies the remote status too.
-func (self *peer) handshake() error {
- // Send the handshake status message asynchronously
- errc := make(chan error, 1)
- go func() {
- errc <- p2p.SendItems(self.ws, statusCode, protocolVersion)
- }()
- // Fetch the remote status packet and verify protocol match
- packet, err := self.ws.ReadMsg()
- if err != nil {
- return err
- }
- if packet.Code != statusCode {
- return fmt.Errorf("peer sent %x before status packet", packet.Code)
- }
- s := rlp.NewStream(packet.Payload, uint64(packet.Size))
- if _, err := s.List(); err != nil {
- return fmt.Errorf("bad status message: %v", err)
- }
- peerVersion, err := s.Uint()
- if err != nil {
- return fmt.Errorf("bad status message: %v", err)
- }
- if peerVersion != protocolVersion {
- return fmt.Errorf("protocol version mismatch %d != %d", peerVersion, protocolVersion)
- }
- // Wait until out own status is consumed too
- if err := <-errc; err != nil {
- return fmt.Errorf("failed to send status packet: %v", err)
- }
- return nil
-}
-
-// update executes periodic operations on the peer, including message transmission
-// and expiration.
-func (self *peer) update() {
- // Start the tickers for the updates
- expire := time.NewTicker(expirationCycle)
- transmit := time.NewTicker(transmissionCycle)
-
- // Loop and transmit until termination is requested
- for {
- select {
- case <-expire.C:
- self.expire()
-
- case <-transmit.C:
- if err := self.broadcast(); err != nil {
- log.Info(fmt.Sprintf("%v: broadcast failed: %v", self.peer, err))
- return
- }
-
- case <-self.quit:
- return
- }
- }
-}
-
-// mark marks an envelope known to the peer so that it won't be sent back.
-func (self *peer) mark(envelope *Envelope) {
- self.known.Add(envelope.Hash())
-}
-
-// marked checks if an envelope is already known to the remote peer.
-func (self *peer) marked(envelope *Envelope) bool {
- return self.known.Has(envelope.Hash())
-}
-
-// expire iterates over all the known envelopes in the host and removes all
-// expired (unknown) ones from the known list.
-func (self *peer) expire() {
- // Assemble the list of available envelopes
- available := set.NewNonTS()
- for _, envelope := range self.host.envelopes() {
- available.Add(envelope.Hash())
- }
- // Cross reference availability with known status
- unmark := make(map[common.Hash]struct{})
- self.known.Each(func(v interface{}) bool {
- if !available.Has(v.(common.Hash)) {
- unmark[v.(common.Hash)] = struct{}{}
- }
- return true
- })
- // Dump all known but unavailable
- for hash := range unmark {
- self.known.Remove(hash)
- }
-}
-
-// broadcast iterates over the collection of envelopes and transmits yet unknown
-// ones over the network.
-func (self *peer) broadcast() error {
- // Fetch the envelopes and collect the unknown ones
- envelopes := self.host.envelopes()
- transmit := make([]*Envelope, 0, len(envelopes))
- for _, envelope := range envelopes {
- if !self.marked(envelope) {
- transmit = append(transmit, envelope)
- self.mark(envelope)
- }
- }
- // Transmit the unknown batch (potentially empty)
- if err := p2p.Send(self.ws, messagesCode, transmit); err != nil {
- return err
- }
- log.Trace(fmt.Sprint(self.peer, "broadcasted", len(transmit), "message(s)"))
- return nil
-}
diff --git a/whisper/whisperv2/peer_test.go b/whisper/whisperv2/peer_test.go
deleted file mode 100644
index 87ca5063d..000000000
--- a/whisper/whisperv2/peer_test.go
+++ /dev/null
@@ -1,261 +0,0 @@
-// Copyright 2015 The go-ethereum Authors
-// This file is part of the go-ethereum library.
-//
-// The go-ethereum library is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Lesser General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-//
-// The go-ethereum library is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU Lesser General Public License for more details.
-//
-// You should have received a copy of the GNU Lesser General Public License
-// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
-
-package whisperv2
-
-import (
- "testing"
- "time"
-
- "github.com/ethereum/go-ethereum/p2p"
- "github.com/ethereum/go-ethereum/p2p/discover"
-)
-
-type testPeer struct {
- client *Whisper
- stream *p2p.MsgPipeRW
- termed chan struct{}
-}
-
-func startTestPeer() *testPeer {
- // Create a simulated P2P remote peer and data streams to it
- remote := p2p.NewPeer(discover.NodeID{}, "", nil)
- tester, tested := p2p.MsgPipe()
-
- // Create a whisper client and connect with it to the tester peer
- client := New()
- client.Start(nil)
-
- termed := make(chan struct{})
- go func() {
- defer client.Stop()
- defer close(termed)
- defer tested.Close()
-
- client.handlePeer(remote, tested)
- }()
-
- return &testPeer{
- client: client,
- stream: tester,
- termed: termed,
- }
-}
-
-func startTestPeerInited() (*testPeer, error) {
- peer := startTestPeer()
-
- if err := p2p.ExpectMsg(peer.stream, statusCode, []uint64{protocolVersion}); err != nil {
- peer.stream.Close()
- return nil, err
- }
- if err := p2p.SendItems(peer.stream, statusCode, protocolVersion); err != nil {
- peer.stream.Close()
- return nil, err
- }
- return peer, nil
-}
-
-func TestPeerStatusMessage(t *testing.T) {
- tester := startTestPeer()
-
- // Wait for the handshake status message and check it
- if err := p2p.ExpectMsg(tester.stream, statusCode, []uint64{protocolVersion}); err != nil {
- t.Fatalf("status message mismatch: %v", err)
- }
- // Terminate the node
- tester.stream.Close()
-
- select {
- case <-tester.termed:
- case <-time.After(time.Second):
- t.Fatalf("local close timed out")
- }
-}
-
-func TestPeerHandshakeFail(t *testing.T) {
- tester := startTestPeer()
-
- // Wait for and check the handshake
- if err := p2p.ExpectMsg(tester.stream, statusCode, []uint64{protocolVersion}); err != nil {
- t.Fatalf("status message mismatch: %v", err)
- }
- // Send an invalid handshake status and verify disconnect
- if err := p2p.SendItems(tester.stream, messagesCode); err != nil {
- t.Fatalf("failed to send malformed status: %v", err)
- }
- select {
- case <-tester.termed:
- case <-time.After(time.Second):
- t.Fatalf("remote close timed out")
- }
-}
-
-func TestPeerHandshakeSuccess(t *testing.T) {
- tester := startTestPeer()
-
- // Wait for and check the handshake
- if err := p2p.ExpectMsg(tester.stream, statusCode, []uint64{protocolVersion}); err != nil {
- t.Fatalf("status message mismatch: %v", err)
- }
- // Send a valid handshake status and make sure connection stays live
- if err := p2p.SendItems(tester.stream, statusCode, protocolVersion); err != nil {
- t.Fatalf("failed to send status: %v", err)
- }
- select {
- case <-tester.termed:
- t.Fatalf("valid handshake disconnected")
-
- case <-time.After(100 * time.Millisecond):
- }
- // Clean up the test
- tester.stream.Close()
-
- select {
- case <-tester.termed:
- case <-time.After(time.Second):
- t.Fatalf("local close timed out")
- }
-}
-
-func TestPeerSend(t *testing.T) {
- // Start a tester and execute the handshake
- tester, err := startTestPeerInited()
- if err != nil {
- t.Fatalf("failed to start initialized peer: %v", err)
- }
- defer tester.stream.Close()
-
- // Construct a message and inject into the tester
- message := NewMessage([]byte("peer broadcast test message"))
- envelope, err := message.Wrap(DefaultPoW, Options{
- TTL: DefaultTTL,
- })
- if err != nil {
- t.Fatalf("failed to wrap message: %v", err)
- }
- if err := tester.client.Send(envelope); err != nil {
- t.Fatalf("failed to send message: %v", err)
- }
- // Check that the message is eventually forwarded
- payload := []interface{}{envelope}
- if err := p2p.ExpectMsg(tester.stream, messagesCode, payload); err != nil {
- t.Fatalf("message mismatch: %v", err)
- }
- // Make sure that even with a re-insert, an empty batch is received
- if err := tester.client.Send(envelope); err != nil {
- t.Fatalf("failed to send message: %v", err)
- }
- if err := p2p.ExpectMsg(tester.stream, messagesCode, []interface{}{}); err != nil {
- t.Fatalf("message mismatch: %v", err)
- }
-}
-
-func TestPeerDeliver(t *testing.T) {
- // Start a tester and execute the handshake
- tester, err := startTestPeerInited()
- if err != nil {
- t.Fatalf("failed to start initialized peer: %v", err)
- }
- defer tester.stream.Close()
-
- // Watch for all inbound messages
- arrived := make(chan struct{}, 1)
- tester.client.Watch(Filter{
- Fn: func(message *Message) {
- arrived <- struct{}{}
- },
- })
- // Construct a message and deliver it to the tester peer
- message := NewMessage([]byte("peer broadcast test message"))
- envelope, err := message.Wrap(DefaultPoW, Options{
- TTL: DefaultTTL,
- })
- if err != nil {
- t.Fatalf("failed to wrap message: %v", err)
- }
- if err := p2p.Send(tester.stream, messagesCode, []*Envelope{envelope}); err != nil {
- t.Fatalf("failed to transfer message: %v", err)
- }
- // Check that the message is delivered upstream
- select {
- case <-arrived:
- case <-time.After(time.Second):
- t.Fatalf("message delivery timeout")
- }
- // Check that a resend is not delivered
- if err := p2p.Send(tester.stream, messagesCode, []*Envelope{envelope}); err != nil {
- t.Fatalf("failed to transfer message: %v", err)
- }
- select {
- case <-time.After(2 * transmissionCycle):
- case <-arrived:
- t.Fatalf("repeating message arrived")
- }
-}
-
-func TestPeerMessageExpiration(t *testing.T) {
- // Start a tester and execute the handshake
- tester, err := startTestPeerInited()
- if err != nil {
- t.Fatalf("failed to start initialized peer: %v", err)
- }
- defer tester.stream.Close()
-
- // Fetch the peer instance for later inspection
- tester.client.peerMu.RLock()
- if peers := len(tester.client.peers); peers != 1 {
- t.Fatalf("peer pool size mismatch: have %v, want %v", peers, 1)
- }
- var peer *peer
- for peer = range tester.client.peers {
- break
- }
- tester.client.peerMu.RUnlock()
-
- // Construct a message and pass it through the tester
- message := NewMessage([]byte("peer test message"))
- envelope, err := message.Wrap(DefaultPoW, Options{
- TTL: time.Second,
- })
- if err != nil {
- t.Fatalf("failed to wrap message: %v", err)
- }
- if err := tester.client.Send(envelope); err != nil {
- t.Fatalf("failed to send message: %v", err)
- }
- payload := []interface{}{envelope}
- if err := p2p.ExpectMsg(tester.stream, messagesCode, payload); err != nil {
- // A premature empty message may have been broadcast, check the next too
- if err := p2p.ExpectMsg(tester.stream, messagesCode, payload); err != nil {
- t.Fatalf("message mismatch: %v", err)
- }
- }
- // Check that the message is inside the cache
- if !peer.known.Has(envelope.Hash()) {
- t.Fatalf("message not found in cache")
- }
- // Discard messages until expiration and check cache again
- exp := time.Now().Add(time.Second + 2*expirationCycle + 100*time.Millisecond)
- for time.Now().Before(exp) {
- if err := p2p.ExpectMsg(tester.stream, messagesCode, []interface{}{}); err != nil {
- t.Fatalf("message mismatch: %v", err)
- }
- }
- if peer.known.Has(envelope.Hash()) {
- t.Fatalf("message not expired from cache")
- }
-}
diff --git a/whisper/whisperv2/topic.go b/whisper/whisperv2/topic.go
deleted file mode 100644
index 3e2b47bd3..000000000
--- a/whisper/whisperv2/topic.go
+++ /dev/null
@@ -1,140 +0,0 @@
-// Copyright 2015 The go-ethereum Authors
-// This file is part of the go-ethereum library.
-//
-// The go-ethereum library is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Lesser General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-//
-// The go-ethereum library is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU Lesser General Public License for more details.
-//
-// You should have received a copy of the GNU Lesser General Public License
-// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
-
-// Contains the Whisper protocol Topic element. For formal details please see
-// the specs at https://github.com/ethereum/wiki/wiki/Whisper-PoC-1-Protocol-Spec#topics.
-
-package whisperv2
-
-import "github.com/ethereum/go-ethereum/crypto"
-
-// Topic represents a cryptographically secure, probabilistic partial
-// classifications of a message, determined as the first (left) 4 bytes of the
-// SHA3 hash of some arbitrary data given by the original author of the message.
-type Topic [4]byte
-
-// NewTopic creates a topic from the 4 byte prefix of the SHA3 hash of the data.
-//
-// Note, empty topics are considered the wildcard, and cannot be used in messages.
-func NewTopic(data []byte) Topic {
- prefix := [4]byte{}
- copy(prefix[:], crypto.Keccak256(data)[:4])
- return Topic(prefix)
-}
-
-// NewTopics creates a list of topics from a list of binary data elements, by
-// iteratively calling NewTopic on each of them.
-func NewTopics(data ...[]byte) []Topic {
- topics := make([]Topic, len(data))
- for i, element := range data {
- topics[i] = NewTopic(element)
- }
- return topics
-}
-
-// NewTopicFromString creates a topic using the binary data contents of the
-// specified string.
-func NewTopicFromString(data string) Topic {
- return NewTopic([]byte(data))
-}
-
-// NewTopicsFromStrings creates a list of topics from a list of textual data
-// elements, by iteratively calling NewTopicFromString on each of them.
-func NewTopicsFromStrings(data ...string) []Topic {
- topics := make([]Topic, len(data))
- for i, element := range data {
- topics[i] = NewTopicFromString(element)
- }
- return topics
-}
-
-// String converts a topic byte array to a string representation.
-func (self *Topic) String() string {
- return string(self[:])
-}
-
-// topicMatcher is a filter expression to verify if a list of topics contained
-// in an arriving message matches some topic conditions. The topic matcher is
-// built up of a list of conditions, each of which must be satisfied by the
-// corresponding topic in the message. Each condition may require: a) an exact
-// topic match; b) a match from a set of topics; or c) a wild-card matching all.
-//
-// If a message contains more topics than required by the matcher, those beyond
-// the condition count are ignored and assumed to match.
-//
-// Consider the following sample topic matcher:
-// sample := {
-// {TopicA1, TopicA2, TopicA3},
-// {TopicB},
-// nil,
-// {TopicD1, TopicD2}
-// }
-// In order for a message to pass this filter, it should enumerate at least 4
-// topics, the first any of [TopicA1, TopicA2, TopicA3], the second mandatory
-// "TopicB", the third is ignored by the filter and the fourth either "TopicD1"
-// or "TopicD2". If the message contains further topics, the filter will match
-// them too.
-type topicMatcher struct {
- conditions []map[Topic]struct{}
-}
-
-// newTopicMatcher create a topic matcher from a list of topic conditions.
-func newTopicMatcher(topics ...[]Topic) *topicMatcher {
- matcher := make([]map[Topic]struct{}, len(topics))
- for i, condition := range topics {
- matcher[i] = make(map[Topic]struct{})
- for _, topic := range condition {
- matcher[i][topic] = struct{}{}
- }
- }
- return &topicMatcher{conditions: matcher}
-}
-
-// newTopicMatcherFromBinary create a topic matcher from a list of binary conditions.
-func newTopicMatcherFromBinary(data ...[][]byte) *topicMatcher {
- topics := make([][]Topic, len(data))
- for i, condition := range data {
- topics[i] = NewTopics(condition...)
- }
- return newTopicMatcher(topics...)
-}
-
-// newTopicMatcherFromStrings creates a topic matcher from a list of textual
-// conditions.
-func newTopicMatcherFromStrings(data ...[]string) *topicMatcher {
- topics := make([][]Topic, len(data))
- for i, condition := range data {
- topics[i] = NewTopicsFromStrings(condition...)
- }
- return newTopicMatcher(topics...)
-}
-
-// Matches checks if a list of topics matches this particular condition set.
-func (self *topicMatcher) Matches(topics []Topic) bool {
- // Mismatch if there aren't enough topics
- if len(self.conditions) > len(topics) {
- return false
- }
- // Check each topic condition for existence (skip wild-cards)
- for i := 0; i < len(topics) && i < len(self.conditions); i++ {
- if len(self.conditions[i]) > 0 {
- if _, ok := self.conditions[i][topics[i]]; !ok {
- return false
- }
- }
- }
- return true
-}
diff --git a/whisper/whisperv2/topic_test.go b/whisper/whisperv2/topic_test.go
deleted file mode 100644
index bb6568996..000000000
--- a/whisper/whisperv2/topic_test.go
+++ /dev/null
@@ -1,215 +0,0 @@
-// Copyright 2015 The go-ethereum Authors
-// This file is part of the go-ethereum library.
-//
-// The go-ethereum library is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Lesser General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-//
-// The go-ethereum library is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU Lesser General Public License for more details.
-//
-// You should have received a copy of the GNU Lesser General Public License
-// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
-
-package whisperv2
-
-import (
- "bytes"
- "testing"
-)
-
-var topicCreationTests = []struct {
- data []byte
- hash [4]byte
-}{
- {hash: [4]byte{0x8f, 0x9a, 0x2b, 0x7d}, data: []byte("test name")},
- {hash: [4]byte{0xf2, 0x6e, 0x77, 0x79}, data: []byte("some other test")},
-}
-
-func TestTopicCreation(t *testing.T) {
- // Create the topics individually
- for i, tt := range topicCreationTests {
- topic := NewTopic(tt.data)
- if !bytes.Equal(topic[:], tt.hash[:]) {
- t.Errorf("binary test %d: hash mismatch: have %v, want %v.", i, topic, tt.hash)
- }
- }
- for i, tt := range topicCreationTests {
- topic := NewTopicFromString(string(tt.data))
- if !bytes.Equal(topic[:], tt.hash[:]) {
- t.Errorf("textual test %d: hash mismatch: have %v, want %v.", i, topic, tt.hash)
- }
- }
- // Create the topics in batches
- binaryData := make([][]byte, len(topicCreationTests))
- for i, tt := range topicCreationTests {
- binaryData[i] = tt.data
- }
- textualData := make([]string, len(topicCreationTests))
- for i, tt := range topicCreationTests {
- textualData[i] = string(tt.data)
- }
-
- topics := NewTopics(binaryData...)
- for i, tt := range topicCreationTests {
- if !bytes.Equal(topics[i][:], tt.hash[:]) {
- t.Errorf("binary batch test %d: hash mismatch: have %v, want %v.", i, topics[i], tt.hash)
- }
- }
- topics = NewTopicsFromStrings(textualData...)
- for i, tt := range topicCreationTests {
- if !bytes.Equal(topics[i][:], tt.hash[:]) {
- t.Errorf("textual batch test %d: hash mismatch: have %v, want %v.", i, topics[i], tt.hash)
- }
- }
-}
-
-var topicMatcherCreationTest = struct {
- binary [][][]byte
- textual [][]string
- matcher []map[[4]byte]struct{}
-}{
- binary: [][][]byte{
- {},
- {
- []byte("Topic A"),
- },
- {
- []byte("Topic B1"),
- []byte("Topic B2"),
- []byte("Topic B3"),
- },
- },
- textual: [][]string{
- {},
- {"Topic A"},
- {"Topic B1", "Topic B2", "Topic B3"},
- },
- matcher: []map[[4]byte]struct{}{
- {},
- {
- {0x25, 0xfc, 0x95, 0x66}: {},
- },
- {
- {0x93, 0x6d, 0xec, 0x09}: {},
- {0x25, 0x23, 0x34, 0xd3}: {},
- {0x6b, 0xc2, 0x73, 0xd1}: {},
- },
- },
-}
-
-func TestTopicMatcherCreation(t *testing.T) {
- test := topicMatcherCreationTest
-
- matcher := newTopicMatcherFromBinary(test.binary...)
- for i, cond := range matcher.conditions {
- for topic := range cond {
- if _, ok := test.matcher[i][topic]; !ok {
- t.Errorf("condition %d; extra topic found: 0x%x", i, topic[:])
- }
- }
- }
- for i, cond := range test.matcher {
- for topic := range cond {
- if _, ok := matcher.conditions[i][topic]; !ok {
- t.Errorf("condition %d; topic not found: 0x%x", i, topic[:])
- }
- }
- }
-
- matcher = newTopicMatcherFromStrings(test.textual...)
- for i, cond := range matcher.conditions {
- for topic := range cond {
- if _, ok := test.matcher[i][topic]; !ok {
- t.Errorf("condition %d; extra topic found: 0x%x", i, topic[:])
- }
- }
- }
- for i, cond := range test.matcher {
- for topic := range cond {
- if _, ok := matcher.conditions[i][topic]; !ok {
- t.Errorf("condition %d; topic not found: 0x%x", i, topic[:])
- }
- }
- }
-}
-
-var topicMatcherTests = []struct {
- filter [][]string
- topics []string
- match bool
-}{
- // Empty topic matcher should match everything
- {
- filter: [][]string{},
- topics: []string{},
- match: true,
- },
- {
- filter: [][]string{},
- topics: []string{"a", "b", "c"},
- match: true,
- },
- // Fixed topic matcher should match strictly, but only prefix
- {
- filter: [][]string{{"a"}, {"b"}},
- topics: []string{"a"},
- match: false,
- },
- {
- filter: [][]string{{"a"}, {"b"}},
- topics: []string{"a", "b"},
- match: true,
- },
- {
- filter: [][]string{{"a"}, {"b"}},
- topics: []string{"a", "b", "c"},
- match: true,
- },
- // Multi-matcher should match any from a sub-group
- {
- filter: [][]string{{"a1", "a2"}},
- topics: []string{"a"},
- match: false,
- },
- {
- filter: [][]string{{"a1", "a2"}},
- topics: []string{"a1"},
- match: true,
- },
- {
- filter: [][]string{{"a1", "a2"}},
- topics: []string{"a2"},
- match: true,
- },
- // Wild-card condition should match anything
- {
- filter: [][]string{{}, {"b"}},
- topics: []string{"a"},
- match: false,
- },
- {
- filter: [][]string{{}, {"b"}},
- topics: []string{"a", "b"},
- match: true,
- },
- {
- filter: [][]string{{}, {"b"}},
- topics: []string{"b", "b"},
- match: true,
- },
-}
-
-func TestTopicMatcher(t *testing.T) {
- for i, tt := range topicMatcherTests {
- topics := NewTopicsFromStrings(tt.topics...)
-
- matcher := newTopicMatcherFromStrings(tt.filter...)
- if match := matcher.Matches(topics); match != tt.match {
- t.Errorf("test %d: match mismatch: have %v, want %v", i, match, tt.match)
- }
- }
-}
diff --git a/whisper/whisperv2/whisper.go b/whisper/whisperv2/whisper.go
deleted file mode 100644
index e111a3414..000000000
--- a/whisper/whisperv2/whisper.go
+++ /dev/null
@@ -1,378 +0,0 @@
-// Copyright 2014 The go-ethereum Authors
-// This file is part of the go-ethereum library.
-//
-// The go-ethereum library is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Lesser General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-//
-// The go-ethereum library is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU Lesser General Public License for more details.
-//
-// You should have received a copy of the GNU Lesser General Public License
-// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
-
-package whisperv2
-
-import (
- "crypto/ecdsa"
- "fmt"
- "sync"
- "time"
-
- "github.com/ethereum/go-ethereum/common"
- "github.com/ethereum/go-ethereum/crypto"
- "github.com/ethereum/go-ethereum/crypto/ecies"
- "github.com/ethereum/go-ethereum/event/filter"
- "github.com/ethereum/go-ethereum/log"
- "github.com/ethereum/go-ethereum/p2p"
- "github.com/ethereum/go-ethereum/rpc"
-
- "gopkg.in/fatih/set.v0"
-)
-
-const (
- statusCode = 0x00
- messagesCode = 0x01
-
- protocolVersion uint64 = 0x02
- protocolName = "shh"
-
- signatureFlag = byte(1 << 7)
- signatureLength = 65
-
- expirationCycle = 800 * time.Millisecond
- transmissionCycle = 300 * time.Millisecond
-)
-
-const (
- DefaultTTL = 50 * time.Second
- DefaultPoW = 50 * time.Millisecond
-)
-
-type MessageEvent struct {
- To *ecdsa.PrivateKey
- From *ecdsa.PublicKey
- Message *Message
-}
-
-// Whisper represents a dark communication interface through the Ethereum
-// network, using its very own P2P communication layer.
-type Whisper struct {
- protocol p2p.Protocol
- filters *filter.Filters
-
- keys map[string]*ecdsa.PrivateKey
-
- messages map[common.Hash]*Envelope // Pool of messages currently tracked by this node
- expirations map[uint32]*set.SetNonTS // Message expiration pool (TODO: something lighter)
- poolMu sync.RWMutex // Mutex to sync the message and expiration pools
-
- peers map[*peer]struct{} // Set of currently active peers
- peerMu sync.RWMutex // Mutex to sync the active peer set
-
- quit chan struct{}
-}
-
-// New creates a Whisper client ready to communicate through the Ethereum P2P
-// network.
-func New() *Whisper {
- whisper := &Whisper{
- filters: filter.New(),
- keys: make(map[string]*ecdsa.PrivateKey),
- messages: make(map[common.Hash]*Envelope),
- expirations: make(map[uint32]*set.SetNonTS),
- peers: make(map[*peer]struct{}),
- quit: make(chan struct{}),
- }
- whisper.filters.Start()
-
- // p2p whisper sub protocol handler
- whisper.protocol = p2p.Protocol{
- Name: protocolName,
- Version: uint(protocolVersion),
- Length: 2,
- Run: whisper.handlePeer,
- }
-
- return whisper
-}
-
-// APIs returns the RPC descriptors the Whisper implementation offers
-func (s *Whisper) APIs() []rpc.API {
- return []rpc.API{
- {
- Namespace: "shh",
- Version: "1.0",
- Service: NewPublicWhisperAPI(s),
- Public: true,
- },
- }
-}
-
-// Protocols returns the whisper sub-protocols ran by this particular client.
-func (self *Whisper) Protocols() []p2p.Protocol {
- return []p2p.Protocol{self.protocol}
-}
-
-// Version returns the whisper sub-protocols version number.
-func (self *Whisper) Version() uint {
- return self.protocol.Version
-}
-
-// NewIdentity generates a new cryptographic identity for the client, and injects
-// it into the known identities for message decryption.
-func (self *Whisper) NewIdentity() *ecdsa.PrivateKey {
- key, err := crypto.GenerateKey()
- if err != nil {
- panic(err)
- }
- self.keys[string(crypto.FromECDSAPub(&key.PublicKey))] = key
-
- return key
-}
-
-// HasIdentity checks if the the whisper node is configured with the private key
-// of the specified public pair.
-func (self *Whisper) HasIdentity(key *ecdsa.PublicKey) bool {
- return self.keys[string(crypto.FromECDSAPub(key))] != nil
-}
-
-// GetIdentity retrieves the private key of the specified public identity.
-func (self *Whisper) GetIdentity(key *ecdsa.PublicKey) *ecdsa.PrivateKey {
- return self.keys[string(crypto.FromECDSAPub(key))]
-}
-
-// Watch installs a new message handler to run in case a matching packet arrives
-// from the whisper network.
-func (self *Whisper) Watch(options Filter) int {
- filter := filterer{
- to: string(crypto.FromECDSAPub(options.To)),
- from: string(crypto.FromECDSAPub(options.From)),
- matcher: newTopicMatcher(options.Topics...),
- fn: func(data interface{}) {
- options.Fn(data.(*Message))
- },
- }
- return self.filters.Install(filter)
-}
-
-// Unwatch removes an installed message handler.
-func (self *Whisper) Unwatch(id int) {
- self.filters.Uninstall(id)
-}
-
-// Send injects a message into the whisper send queue, to be distributed in the
-// network in the coming cycles.
-func (self *Whisper) Send(envelope *Envelope) error {
- return self.add(envelope)
-}
-
-// Start implements node.Service, starting the background data propagation thread
-// of the Whisper protocol.
-func (self *Whisper) Start(*p2p.Server) error {
- log.Info("Whisper started")
- go self.update()
- return nil
-}
-
-// Stop implements node.Service, stopping the background data propagation thread
-// of the Whisper protocol.
-func (self *Whisper) Stop() error {
- close(self.quit)
- log.Info("Whisper stopped")
- return nil
-}
-
-// Messages retrieves all the currently pooled messages matching a filter id.
-func (self *Whisper) Messages(id int) []*Message {
- messages := make([]*Message, 0)
- if filter := self.filters.Get(id); filter != nil {
- for _, envelope := range self.messages {
- if message := self.open(envelope); message != nil {
- if self.filters.Match(filter, createFilter(message, envelope.Topics)) {
- messages = append(messages, message)
- }
- }
- }
- }
- return messages
-}
-
-// handlePeer is called by the underlying P2P layer when the whisper sub-protocol
-// connection is negotiated.
-func (self *Whisper) handlePeer(peer *p2p.Peer, rw p2p.MsgReadWriter) error {
- // Create the new peer and start tracking it
- whisperPeer := newPeer(self, peer, rw)
-
- self.peerMu.Lock()
- self.peers[whisperPeer] = struct{}{}
- self.peerMu.Unlock()
-
- defer func() {
- self.peerMu.Lock()
- delete(self.peers, whisperPeer)
- self.peerMu.Unlock()
- }()
-
- // Run the peer handshake and state updates
- if err := whisperPeer.handshake(); err != nil {
- return err
- }
- whisperPeer.start()
- defer whisperPeer.stop()
-
- // Read and process inbound messages directly to merge into client-global state
- for {
- // Fetch the next packet and decode the contained envelopes
- packet, err := rw.ReadMsg()
- if err != nil {
- return err
- }
- var envelopes []*Envelope
- if err := packet.Decode(&envelopes); err != nil {
- log.Info(fmt.Sprintf("%v: failed to decode envelope: %v", peer, err))
- continue
- }
- // Inject all envelopes into the internal pool
- for _, envelope := range envelopes {
- if err := self.add(envelope); err != nil {
- // TODO Punish peer here. Invalid envelope.
- log.Debug(fmt.Sprintf("%v: failed to pool envelope: %v", peer, err))
- }
- whisperPeer.mark(envelope)
- }
- }
-}
-
-// add inserts a new envelope into the message pool to be distributed within the
-// whisper network. It also inserts the envelope into the expiration pool at the
-// appropriate time-stamp.
-func (self *Whisper) add(envelope *Envelope) error {
- self.poolMu.Lock()
- defer self.poolMu.Unlock()
-
- // short circuit when a received envelope has already expired
- if envelope.Expiry < uint32(time.Now().Unix()) {
- return nil
- }
-
- // Insert the message into the tracked pool
- hash := envelope.Hash()
- if _, ok := self.messages[hash]; ok {
- log.Trace(fmt.Sprintf("whisper envelope already cached: %x\n", hash))
- return nil
- }
- self.messages[hash] = envelope
-
- // Insert the message into the expiration pool for later removal
- if self.expirations[envelope.Expiry] == nil {
- self.expirations[envelope.Expiry] = set.NewNonTS()
- }
- if !self.expirations[envelope.Expiry].Has(hash) {
- self.expirations[envelope.Expiry].Add(hash)
-
- // Notify the local node of a message arrival
- go self.postEvent(envelope)
- }
- log.Trace(fmt.Sprintf("cached whisper envelope %x\n", hash))
- return nil
-}
-
-// postEvent opens an envelope with the configured identities and delivers the
-// message upstream from application processing.
-func (self *Whisper) postEvent(envelope *Envelope) {
- if message := self.open(envelope); message != nil {
- self.filters.Notify(createFilter(message, envelope.Topics), message)
- }
-}
-
-// open tries to decrypt a whisper envelope with all the configured identities,
-// returning the decrypted message and the key used to achieve it. If not keys
-// are configured, open will return the payload as if non encrypted.
-func (self *Whisper) open(envelope *Envelope) *Message {
- // Short circuit if no identity is set, and assume clear-text
- if len(self.keys) == 0 {
- if message, err := envelope.Open(nil); err == nil {
- return message
- }
- }
- // Iterate over the keys and try to decrypt the message
- for _, key := range self.keys {
- message, err := envelope.Open(key)
- if err == nil {
- message.To = &key.PublicKey
- return message
- } else if err == ecies.ErrInvalidPublicKey {
- return message
- }
- }
- // Failed to decrypt, don't return anything
- return nil
-}
-
-// createFilter creates a message filter to check against installed handlers.
-func createFilter(message *Message, topics []Topic) filter.Filter {
- matcher := make([][]Topic, len(topics))
- for i, topic := range topics {
- matcher[i] = []Topic{topic}
- }
- return filterer{
- to: string(crypto.FromECDSAPub(message.To)),
- from: string(crypto.FromECDSAPub(message.Recover())),
- matcher: newTopicMatcher(matcher...),
- }
-}
-
-// update loops until the lifetime of the whisper node, updating its internal
-// state by expiring stale messages from the pool.
-func (self *Whisper) update() {
- // Start a ticker to check for expirations
- expire := time.NewTicker(expirationCycle)
-
- // Repeat updates until termination is requested
- for {
- select {
- case <-expire.C:
- self.expire()
-
- case <-self.quit:
- return
- }
- }
-}
-
-// expire iterates over all the expiration timestamps, removing all stale
-// messages from the pools.
-func (self *Whisper) expire() {
- self.poolMu.Lock()
- defer self.poolMu.Unlock()
-
- now := uint32(time.Now().Unix())
- for then, hashSet := range self.expirations {
- // Short circuit if a future time
- if then > now {
- continue
- }
- // Dump all expired messages and remove timestamp
- hashSet.Each(func(v interface{}) bool {
- delete(self.messages, v.(common.Hash))
- return true
- })
- self.expirations[then].Clear()
- }
-}
-
-// envelopes retrieves all the messages currently pooled by the node.
-func (self *Whisper) envelopes() []*Envelope {
- self.poolMu.RLock()
- defer self.poolMu.RUnlock()
-
- envelopes := make([]*Envelope, 0, len(self.messages))
- for _, envelope := range self.messages {
- envelopes = append(envelopes, envelope)
- }
- return envelopes
-}
diff --git a/whisper/whisperv2/whisper_test.go b/whisper/whisperv2/whisper_test.go
deleted file mode 100644
index 1e0d3f85d..000000000
--- a/whisper/whisperv2/whisper_test.go
+++ /dev/null
@@ -1,216 +0,0 @@
-// Copyright 2014 The go-ethereum Authors
-// This file is part of the go-ethereum library.
-//
-// The go-ethereum library is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Lesser General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-//
-// The go-ethereum library is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU Lesser General Public License for more details.
-//
-// You should have received a copy of the GNU Lesser General Public License
-// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
-
-package whisperv2
-
-import (
- "testing"
- "time"
-
- "github.com/ethereum/go-ethereum/p2p"
- "github.com/ethereum/go-ethereum/p2p/discover"
-)
-
-func startTestCluster(n int) []*Whisper {
- // Create the batch of simulated peers
- nodes := make([]*p2p.Peer, n)
- for i := 0; i < n; i++ {
- nodes[i] = p2p.NewPeer(discover.NodeID{}, "", nil)
- }
- whispers := make([]*Whisper, n)
- for i := 0; i < n; i++ {
- whispers[i] = New()
- whispers[i].Start(nil)
- }
- // Wire all the peers to the root one
- for i := 1; i < n; i++ {
- src, dst := p2p.MsgPipe()
-
- go whispers[0].handlePeer(nodes[i], src)
- go whispers[i].handlePeer(nodes[0], dst)
- }
- return whispers
-}
-
-func TestSelfMessage(t *testing.T) {
- // Start the single node cluster
- client := startTestCluster(1)[0]
-
- // Start watching for self messages, signal any arrivals
- self := client.NewIdentity()
- done := make(chan struct{})
-
- client.Watch(Filter{
- To: &self.PublicKey,
- Fn: func(msg *Message) {
- close(done)
- },
- })
- // Send a dummy message to oneself
- msg := NewMessage([]byte("self whisper"))
- envelope, err := msg.Wrap(DefaultPoW, Options{
- From: self,
- To: &self.PublicKey,
- TTL: DefaultTTL,
- })
- if err != nil {
- t.Fatalf("failed to wrap message: %v", err)
- }
- // Dump the message into the system and wait for it to pop back out
- if err := client.Send(envelope); err != nil {
- t.Fatalf("failed to send self-message: %v", err)
- }
- select {
- case <-done:
- case <-time.After(time.Second):
- t.Fatalf("self-message receive timeout")
- }
-}
-
-func TestDirectMessage(t *testing.T) {
- // Start the sender-recipient cluster
- cluster := startTestCluster(2)
-
- sender := cluster[0]
- senderId := sender.NewIdentity()
-
- recipient := cluster[1]
- recipientId := recipient.NewIdentity()
-
- // Watch for arriving messages on the recipient
- done := make(chan struct{})
- recipient.Watch(Filter{
- To: &recipientId.PublicKey,
- Fn: func(msg *Message) {
- close(done)
- },
- })
- // Send a dummy message from the sender
- msg := NewMessage([]byte("direct whisper"))
- envelope, err := msg.Wrap(DefaultPoW, Options{
- From: senderId,
- To: &recipientId.PublicKey,
- TTL: DefaultTTL,
- })
- if err != nil {
- t.Fatalf("failed to wrap message: %v", err)
- }
- if err := sender.Send(envelope); err != nil {
- t.Fatalf("failed to send direct message: %v", err)
- }
- // Wait for an arrival or a timeout
- select {
- case <-done:
- case <-time.After(time.Second):
- t.Fatalf("direct message receive timeout")
- }
-}
-
-func TestAnonymousBroadcast(t *testing.T) {
- testBroadcast(true, t)
-}
-
-func TestIdentifiedBroadcast(t *testing.T) {
- testBroadcast(false, t)
-}
-
-func testBroadcast(anonymous bool, t *testing.T) {
- // Start the single sender multi recipient cluster
- cluster := startTestCluster(3)
-
- sender := cluster[1]
- targets := cluster[1:]
- for _, target := range targets {
- if !anonymous {
- target.NewIdentity()
- }
- }
- // Watch for arriving messages on the recipients
- dones := make([]chan struct{}, len(targets))
- for i := 0; i < len(targets); i++ {
- done := make(chan struct{}) // need for the closure
- dones[i] = done
-
- targets[i].Watch(Filter{
- Topics: NewFilterTopicsFromStringsFlat("broadcast topic"),
- Fn: func(msg *Message) {
- close(done)
- },
- })
- }
- // Send a dummy message from the sender
- msg := NewMessage([]byte("broadcast whisper"))
- envelope, err := msg.Wrap(DefaultPoW, Options{
- Topics: NewTopicsFromStrings("broadcast topic"),
- TTL: DefaultTTL,
- })
- if err != nil {
- t.Fatalf("failed to wrap message: %v", err)
- }
- if err := sender.Send(envelope); err != nil {
- t.Fatalf("failed to send broadcast message: %v", err)
- }
- // Wait for an arrival on each recipient, or timeouts
- timeout := time.After(time.Second)
- for _, done := range dones {
- select {
- case <-done:
- case <-timeout:
- t.Fatalf("broadcast message receive timeout")
- }
- }
-}
-
-func TestMessageExpiration(t *testing.T) {
- // Start the single node cluster and inject a dummy message
- node := startTestCluster(1)[0]
-
- message := NewMessage([]byte("expiring message"))
- envelope, err := message.Wrap(DefaultPoW, Options{TTL: time.Second})
- if err != nil {
- t.Fatalf("failed to wrap message: %v", err)
- }
- if err := node.Send(envelope); err != nil {
- t.Fatalf("failed to inject message: %v", err)
- }
- // Check that the message is inside the cache
- node.poolMu.RLock()
- _, found := node.messages[envelope.Hash()]
- node.poolMu.RUnlock()
-
- if !found {
- t.Fatalf("message not found in cache")
- }
- // Wait for expiration and check cache again
- time.Sleep(time.Second) // wait for expiration
- time.Sleep(2 * expirationCycle) // wait for cleanup cycle
-
- node.poolMu.RLock()
- _, found = node.messages[envelope.Hash()]
- node.poolMu.RUnlock()
- if found {
- t.Fatalf("message not expired from cache")
- }
-
- // Check that adding an expired envelope doesn't do anything.
- node.add(envelope)
- node.poolMu.RLock()
- _, found = node.messages[envelope.Hash()]
- node.poolMu.RUnlock()
- if found {
- t.Fatalf("message was added to cache")
- }
-}
diff --git a/whisper/whisperv5/api.go b/whisper/whisperv5/api.go
index 96c4b0e6c..ee566625c 100644
--- a/whisper/whisperv5/api.go
+++ b/whisper/whisperv5/api.go
@@ -60,32 +60,9 @@ func NewPublicWhisperAPI(w *Whisper) *PublicWhisperAPI {
w: w,
lastUsed: make(map[string]time.Time),
}
-
- go api.run()
return api
}
-// run the api event loop.
-// this loop deletes filter that have not been used within filterTimeout
-func (api *PublicWhisperAPI) run() {
- timeout := time.NewTicker(2 * time.Minute)
- for {
- <-timeout.C
-
- api.mu.Lock()
- for id, lastUsed := range api.lastUsed {
- if time.Since(lastUsed).Seconds() >= filterTimeout {
- delete(api.lastUsed, id)
- if err := api.w.Unsubscribe(id); err != nil {
- log.Error("could not unsubscribe whisper filter", "error", err)
- }
- log.Debug("delete whisper filter (timeout)", "id", id)
- }
- }
- api.mu.Unlock()
- }
-}
-
// Version returns the Whisper sub-protocol version.
func (api *PublicWhisperAPI) Version(ctx context.Context) string {
return ProtocolVersionStr
@@ -562,7 +539,7 @@ func (api *PublicWhisperAPI) NewMessageFilter(req Criteria) (string, error) {
}
if len(req.Topics) > 0 {
- topics = make([][]byte, 1)
+ topics = make([][]byte, 0, len(req.Topics))
for _, topic := range req.Topics {
topics = append(topics, topic[:])
}
diff --git a/whisper/whisperv5/filter.go b/whisper/whisperv5/filter.go
index b5e893e0f..3190334eb 100644
--- a/whisper/whisperv5/filter.go
+++ b/whisper/whisperv5/filter.go
@@ -216,8 +216,12 @@ func (f *Filter) MatchTopic(topic TopicType) bool {
}
func matchSingleTopic(topic TopicType, bt []byte) bool {
- if len(bt) > 4 {
- bt = bt[:4]
+ if len(bt) > TopicLength {
+ bt = bt[:TopicLength]
+ }
+
+ if len(bt) < TopicLength {
+ return false
}
for j, b := range bt {
diff --git a/whisper/whisperv5/filter_test.go b/whisper/whisperv5/filter_test.go
index 72568b94e..01034a351 100644
--- a/whisper/whisperv5/filter_test.go
+++ b/whisper/whisperv5/filter_test.go
@@ -776,6 +776,7 @@ func TestWatchers(t *testing.T) {
func TestVariableTopics(t *testing.T) {
InitSingleTest()
+ const lastTopicByte = 3
var match bool
params, err := generateMessageParams()
if err != nil {
@@ -796,19 +797,52 @@ func TestVariableTopics(t *testing.T) {
}
for i := 0; i < 4; i++ {
- arr := make([]byte, i+1, 4)
- copy(arr, env.Topic[:i+1])
-
- f.Topics[4] = arr
+ env.Topic = BytesToTopic(f.Topics[i])
match = f.MatchEnvelope(env)
if !match {
t.Fatalf("failed MatchEnvelope symmetric with seed %d, step %d.", seed, i)
}
- f.Topics[4][i]++
+ f.Topics[i][lastTopicByte]++
match = f.MatchEnvelope(env)
if match {
t.Fatalf("MatchEnvelope symmetric with seed %d, step %d: false positive.", seed, i)
}
}
}
+
+func TestMatchSingleTopic_ReturnTrue(t *testing.T) {
+ bt := []byte("test")
+ topic := BytesToTopic(bt)
+
+ if !matchSingleTopic(topic, bt) {
+ t.FailNow()
+ }
+}
+
+func TestMatchSingleTopic_WithTail_ReturnTrue(t *testing.T) {
+ bt := []byte("test with tail")
+ topic := BytesToTopic([]byte("test"))
+
+ if !matchSingleTopic(topic, bt) {
+ t.FailNow()
+ }
+}
+
+func TestMatchSingleTopic_NotEquals_ReturnFalse(t *testing.T) {
+ bt := []byte("tes")
+ topic := BytesToTopic(bt)
+
+ if matchSingleTopic(topic, bt) {
+ t.FailNow()
+ }
+}
+
+func TestMatchSingleTopic_InsufficientLength_ReturnFalse(t *testing.T) {
+ bt := []byte("test")
+ topic := BytesToTopic([]byte("not_equal"))
+
+ if matchSingleTopic(topic, bt) {
+ t.FailNow()
+ }
+}
diff --git a/whisper/whisperv6/api.go b/whisper/whisperv6/api.go
index 0e8490b41..96e2b17e7 100644
--- a/whisper/whisperv6/api.go
+++ b/whisper/whisperv6/api.go
@@ -36,6 +36,7 @@ const (
filterTimeout = 300 // filters are considered timeout out after filterTimeout seconds
)
+// List of errors
var (
ErrSymAsym = errors.New("specify either a symmetric or an asymmetric key")
ErrInvalidSymmetricKey = errors.New("invalid symmetric key")
@@ -60,32 +61,9 @@ func NewPublicWhisperAPI(w *Whisper) *PublicWhisperAPI {
w: w,
lastUsed: make(map[string]time.Time),
}
-
- go api.run()
return api
}
-// run the api event loop.
-// this loop deletes filter that have not been used within filterTimeout
-func (api *PublicWhisperAPI) run() {
- timeout := time.NewTicker(2 * time.Minute)
- for {
- <-timeout.C
-
- api.mu.Lock()
- for id, lastUsed := range api.lastUsed {
- if time.Since(lastUsed).Seconds() >= filterTimeout {
- delete(api.lastUsed, id)
- if err := api.w.Unsubscribe(id); err != nil {
- log.Error("could not unsubscribe whisper filter", "error", err)
- }
- log.Debug("delete whisper filter (timeout)", "id", id)
- }
- }
- api.mu.Unlock()
- }
-}
-
// Version returns the Whisper sub-protocol version.
func (api *PublicWhisperAPI) Version(ctx context.Context) string {
return ProtocolVersionStr
@@ -116,7 +94,7 @@ func (api *PublicWhisperAPI) SetMaxMessageSize(ctx context.Context, size uint32)
return true, api.w.SetMaxMessageSize(size)
}
-// SetMinPow sets the minimum PoW, and notifies the peers.
+// SetMinPoW sets the minimum PoW, and notifies the peers.
func (api *PublicWhisperAPI) SetMinPoW(ctx context.Context, pow float64) (bool, error) {
return true, api.w.SetMinimumPoW(pow)
}
@@ -174,7 +152,7 @@ func (api *PublicWhisperAPI) GetPublicKey(ctx context.Context, id string) (hexut
return crypto.FromECDSAPub(&key.PublicKey), nil
}
-// GetPublicKey returns the private key associated with the given key. The key is the hex
+// GetPrivateKey returns the private key associated with the given key. The key is the hex
// encoded representation of a key in the form specified in section 4.3.6 of ANSI X9.62.
func (api *PublicWhisperAPI) GetPrivateKey(ctx context.Context, id string) (hexutil.Bytes, error) {
key, err := api.w.GetPrivateKey(id)
@@ -218,6 +196,19 @@ func (api *PublicWhisperAPI) DeleteSymKey(ctx context.Context, id string) bool {
return api.w.DeleteSymKey(id)
}
+// MakeLightClient turns the node into light client, which does not forward
+// any incoming messages, and sends only messages originated in this node.
+func (api *PublicWhisperAPI) MakeLightClient(ctx context.Context) bool {
+ api.w.lightClient = true
+ return api.w.lightClient
+}
+
+// CancelLightClient cancels light client mode.
+func (api *PublicWhisperAPI) CancelLightClient(ctx context.Context) bool {
+ api.w.lightClient = false
+ return !api.w.lightClient
+}
+
//go:generate gencodec -type NewMessage -field-override newMessageOverride -out gen_newmessage_json.go
// NewMessage represents a new whisper message that is posted through the RPC.
@@ -277,7 +268,7 @@ func (api *PublicWhisperAPI) Post(ctx context.Context, req NewMessage) (bool, er
if params.KeySym, err = api.w.GetSymKey(req.SymKeyID); err != nil {
return false, err
}
- if !validateSymmetricKey(params.KeySym) {
+ if !validateDataIntegrity(params.KeySym, aesKeyLength) {
return false, ErrInvalidSymmetricKey
}
}
@@ -383,7 +374,7 @@ func (api *PublicWhisperAPI) Messages(ctx context.Context, crit Criteria) (*rpc.
if err != nil {
return nil, err
}
- if !validateSymmetricKey(key) {
+ if !validateDataIntegrity(key, aesKeyLength) {
return nil, ErrInvalidSymmetricKey
}
filter.KeySym = key
@@ -555,7 +546,7 @@ func (api *PublicWhisperAPI) NewMessageFilter(req Criteria) (string, error) {
if keySym, err = api.w.GetSymKey(req.SymKeyID); err != nil {
return "", err
}
- if !validateSymmetricKey(keySym) {
+ if !validateDataIntegrity(keySym, aesKeyLength) {
return "", ErrInvalidSymmetricKey
}
}
@@ -567,7 +558,7 @@ func (api *PublicWhisperAPI) NewMessageFilter(req Criteria) (string, error) {
}
if len(req.Topics) > 0 {
- topics = make([][]byte, 1)
+ topics = make([][]byte, 0, len(req.Topics))
for _, topic := range req.Topics {
topics = append(topics, topic[:])
}
diff --git a/whisper/whisperv6/config.go b/whisper/whisperv6/config.go
index d7f817aa2..61419de00 100644
--- a/whisper/whisperv6/config.go
+++ b/whisper/whisperv6/config.go
@@ -16,11 +16,13 @@
package whisperv6
+// Config represents the configuration state of a whisper node.
type Config struct {
MaxMessageSize uint32 `toml:",omitempty"`
MinimumAcceptedPOW float64 `toml:",omitempty"`
}
+// DefaultConfig represents (shocker!) the default configuration.
var DefaultConfig = Config{
MaxMessageSize: DefaultMaxMessageSize,
MinimumAcceptedPOW: DefaultMinimumPoW,
diff --git a/whisper/whisperv6/doc.go b/whisper/whisperv6/doc.go
index da1b4ee5b..066a9766d 100644
--- a/whisper/whisperv6/doc.go
+++ b/whisper/whisperv6/doc.go
@@ -27,6 +27,9 @@ Whisper is a pure identity-based messaging system. Whisper provides a low-level
or prejudiced by the low-level hardware attributes and characteristics,
particularly the notion of singular endpoints.
*/
+
+// Contains the Whisper protocol constant definitions
+
package whisperv6
import (
@@ -34,10 +37,11 @@ import (
"time"
)
+// Whisper protocol parameters
const (
- ProtocolVersion = uint64(6)
- ProtocolVersionStr = "6.0"
- ProtocolName = "shh"
+ ProtocolVersion = uint64(6) // Protocol version number
+ ProtocolVersionStr = "6.0" // The same, as a string
+ ProtocolName = "shh" // Nickname of the protocol in geth
// whisper protocol message codes, according to EIP-627
statusCode = 0 // used by whisper protocol
@@ -48,15 +52,16 @@ const (
p2pMessageCode = 127 // peer-to-peer message (to be consumed by the peer, but not forwarded any further)
NumberOfMessageCodes = 128
- paddingMask = byte(3)
+ SizeMask = byte(3) // mask used to extract the size of payload size field from the flags
signatureFlag = byte(4)
TopicLength = 4 // in bytes
signatureLength = 65 // in bytes
aesKeyLength = 32 // in bytes
- AESNonceLength = 12 // in bytes
- keyIdSize = 32 // in bytes
- bloomFilterSize = 64 // in bytes
+ aesNonceLength = 12 // in bytes; for more info please see cipher.gcmStandardNonceSize & aesgcm.NonceSize()
+ keyIDSize = 32 // in bytes
+ BloomFilterSize = 64 // in bytes
+ flagsLength = 1
EnvelopeHeaderLength = 20
@@ -64,7 +69,7 @@ const (
DefaultMaxMessageSize = uint32(1024 * 1024)
DefaultMinimumPoW = 0.2
- padSizeLimit = 256 // just an arbitrary number, could be changed without breaking the protocol (must not exceed 2^24)
+ padSizeLimit = 256 // just an arbitrary number, could be changed without breaking the protocol
messageQueueLimit = 1024
expirationCycle = time.Second
diff --git a/whisper/whisperv6/envelope.go b/whisper/whisperv6/envelope.go
index 9ed712b93..c42d1fa8a 100644
--- a/whisper/whisperv6/envelope.go
+++ b/whisper/whisperv6/envelope.go
@@ -77,15 +77,19 @@ func NewEnvelope(ttl uint32, topic TopicType, msg *sentMessage) *Envelope {
// Seal closes the envelope by spending the requested amount of time as a proof
// of work on hashing the data.
func (e *Envelope) Seal(options *MessageParams) error {
- var target, bestBit int
if options.PoW == 0 {
- // adjust for the duration of Seal() execution only if execution time is predefined unconditionally
+ // PoW is not required
+ return nil
+ }
+
+ var target, bestBit int
+ if options.PoW < 0 {
+ // target is not set - the function should run for a period
+ // of time specified in WorkTime param. Since we can predict
+ // the execution time, we can also adjust Expiry.
e.Expiry += options.WorkTime
} else {
target = e.powToFirstBit(options.PoW)
- if target < 1 {
- target = 1
- }
}
buf := make([]byte, 64)
@@ -115,6 +119,8 @@ func (e *Envelope) Seal(options *MessageParams) error {
return nil
}
+// PoW computes (if necessary) and returns the proof of work target
+// of the envelope.
func (e *Envelope) PoW() float64 {
if e.pow == 0 {
e.calculatePoW(0)
@@ -141,7 +147,11 @@ func (e *Envelope) powToFirstBit(pow float64) int {
x *= float64(e.TTL)
bits := gmath.Log2(x)
bits = gmath.Ceil(bits)
- return int(bits)
+ res := int(bits)
+ if res < 1 {
+ res = 1
+ }
+ return res
}
// Hash returns the SHA3 hash of the envelope, calculating it if not yet done.
@@ -198,8 +208,11 @@ func (e *Envelope) OpenSymmetric(key []byte) (msg *ReceivedMessage, err error) {
// Open tries to decrypt an envelope, and populates the message fields in case of success.
func (e *Envelope) Open(watcher *Filter) (msg *ReceivedMessage) {
- // The API interface forbids filters doing both symmetric and
- // asymmetric encryption.
+ if watcher == nil {
+ return nil
+ }
+
+ // The API interface forbids filters doing both symmetric and asymmetric encryption.
if watcher.expectsAsymmetricEncryption() && watcher.expectsSymmetricEncryption() {
return nil
}
@@ -217,7 +230,7 @@ func (e *Envelope) Open(watcher *Filter) (msg *ReceivedMessage) {
}
if msg != nil {
- ok := msg.Validate()
+ ok := msg.ValidateAndParse()
if !ok {
return nil
}
@@ -240,7 +253,7 @@ func (e *Envelope) Bloom() []byte {
// TopicToBloom converts the topic (4 bytes) to the bloom filter (64 bytes)
func TopicToBloom(topic TopicType) []byte {
- b := make([]byte, bloomFilterSize)
+ b := make([]byte, BloomFilterSize)
var index [3]int
for j := 0; j < 3; j++ {
index[j] = int(topic[j])
@@ -256,3 +269,11 @@ func TopicToBloom(topic TopicType) []byte {
}
return b
}
+
+// GetEnvelope retrieves an envelope from the message queue by its hash.
+// It returns nil if the envelope can not be found.
+func (w *Whisper) GetEnvelope(hash common.Hash) *Envelope {
+ w.poolMu.RLock()
+ defer w.poolMu.RUnlock()
+ return w.envelopes[hash]
+}
diff --git a/whisper/whisperv6/filter.go b/whisper/whisperv6/filter.go
index 2f52dd6b9..2f170ddeb 100644
--- a/whisper/whisperv6/filter.go
+++ b/whisper/whisperv6/filter.go
@@ -26,6 +26,7 @@ import (
"github.com/ethereum/go-ethereum/log"
)
+// Filter represents a Whisper message filter
type Filter struct {
Src *ecdsa.PublicKey // Sender of the message
KeyAsym *ecdsa.PrivateKey // Private Key of recipient
@@ -34,24 +35,34 @@ type Filter struct {
PoW float64 // Proof of work as described in the Whisper spec
AllowP2P bool // Indicates whether this filter is interested in direct peer-to-peer messages
SymKeyHash common.Hash // The Keccak256Hash of the symmetric key, needed for optimization
+ id string // unique identifier
Messages map[common.Hash]*ReceivedMessage
mutex sync.RWMutex
}
+// Filters represents a collection of filters
type Filters struct {
watchers map[string]*Filter
- whisper *Whisper
- mutex sync.RWMutex
+
+ topicMatcher map[TopicType]map[*Filter]struct{} // map a topic to the filters that are interested in being notified when a message matches that topic
+ allTopicsMatcher map[*Filter]struct{} // list all the filters that will be notified of a new message, no matter what its topic is
+
+ whisper *Whisper
+ mutex sync.RWMutex
}
+// NewFilters returns a newly created filter collection
func NewFilters(w *Whisper) *Filters {
return &Filters{
- watchers: make(map[string]*Filter),
- whisper: w,
+ watchers: make(map[string]*Filter),
+ topicMatcher: make(map[TopicType]map[*Filter]struct{}),
+ allTopicsMatcher: make(map[*Filter]struct{}),
+ whisper: w,
}
}
+// Install will add a new filter to the filter collection
func (fs *Filters) Install(watcher *Filter) (string, error) {
if watcher.KeySym != nil && watcher.KeyAsym != nil {
return "", fmt.Errorf("filters must choose between symmetric and asymmetric keys")
@@ -77,37 +88,82 @@ func (fs *Filters) Install(watcher *Filter) (string, error) {
watcher.SymKeyHash = crypto.Keccak256Hash(watcher.KeySym)
}
+ watcher.id = id
fs.watchers[id] = watcher
+ fs.addTopicMatcher(watcher)
return id, err
}
+// Uninstall will remove a filter whose id has been specified from
+// the filter collection
func (fs *Filters) Uninstall(id string) bool {
fs.mutex.Lock()
defer fs.mutex.Unlock()
if fs.watchers[id] != nil {
+ fs.removeFromTopicMatchers(fs.watchers[id])
delete(fs.watchers, id)
return true
}
return false
}
+// addTopicMatcher adds a filter to the topic matchers.
+// If the filter's Topics array is empty, it will be tried on every topic.
+// Otherwise, it will be tried on the topics specified.
+func (fs *Filters) addTopicMatcher(watcher *Filter) {
+ if len(watcher.Topics) == 0 {
+ fs.allTopicsMatcher[watcher] = struct{}{}
+ } else {
+ for _, t := range watcher.Topics {
+ topic := BytesToTopic(t)
+ if fs.topicMatcher[topic] == nil {
+ fs.topicMatcher[topic] = make(map[*Filter]struct{})
+ }
+ fs.topicMatcher[topic][watcher] = struct{}{}
+ }
+ }
+}
+
+// removeFromTopicMatchers removes a filter from the topic matchers
+func (fs *Filters) removeFromTopicMatchers(watcher *Filter) {
+ delete(fs.allTopicsMatcher, watcher)
+ for _, topic := range watcher.Topics {
+ delete(fs.topicMatcher[BytesToTopic(topic)], watcher)
+ }
+}
+
+// getWatchersByTopic returns a slice containing the filters that
+// match a specific topic
+func (fs *Filters) getWatchersByTopic(topic TopicType) []*Filter {
+ res := make([]*Filter, 0, len(fs.allTopicsMatcher))
+ for watcher := range fs.allTopicsMatcher {
+ res = append(res, watcher)
+ }
+ for watcher := range fs.topicMatcher[topic] {
+ res = append(res, watcher)
+ }
+ return res
+}
+
+// Get returns a filter from the collection with a specific ID
func (fs *Filters) Get(id string) *Filter {
fs.mutex.RLock()
defer fs.mutex.RUnlock()
return fs.watchers[id]
}
+// NotifyWatchers notifies any filter that has declared interest
+// for the envelope's topic.
func (fs *Filters) NotifyWatchers(env *Envelope, p2pMessage bool) {
var msg *ReceivedMessage
fs.mutex.RLock()
defer fs.mutex.RUnlock()
- i := -1 // only used for logging info
- for _, watcher := range fs.watchers {
- i++
+ candidates := fs.getWatchersByTopic(env.Topic)
+ for _, watcher := range candidates {
if p2pMessage && !watcher.AllowP2P {
- log.Trace(fmt.Sprintf("msg [%x], filter [%d]: p2p messages are not allowed", env.Hash(), i))
+ log.Trace(fmt.Sprintf("msg [%x], filter [%s]: p2p messages are not allowed", env.Hash(), watcher.id))
continue
}
@@ -119,10 +175,10 @@ func (fs *Filters) NotifyWatchers(env *Envelope, p2pMessage bool) {
if match {
msg = env.Open(watcher)
if msg == nil {
- log.Trace("processing message: failed to open", "message", env.Hash().Hex(), "filter", i)
+ log.Trace("processing message: failed to open", "message", env.Hash().Hex(), "filter", watcher.id)
}
} else {
- log.Trace("processing message: does not match", "message", env.Hash().Hex(), "filter", i)
+ log.Trace("processing message: does not match", "message", env.Hash().Hex(), "filter", watcher.id)
}
}
@@ -135,20 +191,6 @@ func (fs *Filters) NotifyWatchers(env *Envelope, p2pMessage bool) {
}
}
-func (f *Filter) processEnvelope(env *Envelope) *ReceivedMessage {
- if f.MatchEnvelope(env) {
- msg := env.Open(f)
- if msg != nil {
- return msg
- } else {
- log.Trace("processing envelope: failed to open", "hash", env.Hash().Hex())
- }
- } else {
- log.Trace("processing envelope: does not match", "hash", env.Hash().Hex())
- }
- return nil
-}
-
func (f *Filter) expectsAsymmetricEncryption() bool {
return f.KeyAsym != nil
}
@@ -157,6 +199,8 @@ func (f *Filter) expectsSymmetricEncryption() bool {
return f.KeySym != nil
}
+// Trigger adds a yet-unknown message to the filter's list of
+// received messages.
func (f *Filter) Trigger(msg *ReceivedMessage) {
f.mutex.Lock()
defer f.mutex.Unlock()
@@ -166,6 +210,8 @@ func (f *Filter) Trigger(msg *ReceivedMessage) {
}
}
+// Retrieve will return the list of all received messages associated
+// to a filter.
func (f *Filter) Retrieve() (all []*ReceivedMessage) {
f.mutex.Lock()
defer f.mutex.Unlock()
@@ -181,48 +227,36 @@ func (f *Filter) Retrieve() (all []*ReceivedMessage) {
// MatchMessage checks if the filter matches an already decrypted
// message (i.e. a Message that has already been handled by
-// MatchEnvelope when checked by a previous filter)
+// MatchEnvelope when checked by a previous filter).
+// Topics are not checked here, since this is done by topic matchers.
func (f *Filter) MatchMessage(msg *ReceivedMessage) bool {
if f.PoW > 0 && msg.PoW < f.PoW {
return false
}
if f.expectsAsymmetricEncryption() && msg.isAsymmetricEncryption() {
- return IsPubKeyEqual(&f.KeyAsym.PublicKey, msg.Dst) && f.MatchTopic(msg.Topic)
+ return IsPubKeyEqual(&f.KeyAsym.PublicKey, msg.Dst)
} else if f.expectsSymmetricEncryption() && msg.isSymmetricEncryption() {
- return f.SymKeyHash == msg.SymKeyHash && f.MatchTopic(msg.Topic)
+ return f.SymKeyHash == msg.SymKeyHash
}
return false
}
-// MatchEvelope checks if it's worth decrypting the message. If
+// MatchEnvelope checks if it's worth decrypting the message. If
// it returns `true`, client code is expected to attempt decrypting
// the message and subsequently call MatchMessage.
+// Topics are not checked here, since this is done by topic matchers.
func (f *Filter) MatchEnvelope(envelope *Envelope) bool {
- if f.PoW > 0 && envelope.pow < f.PoW {
- return false
- }
-
- return f.MatchTopic(envelope.Topic)
+ return f.PoW <= 0 || envelope.pow >= f.PoW
}
-func (f *Filter) MatchTopic(topic TopicType) bool {
- if len(f.Topics) == 0 {
- // any topic matches
- return true
- }
-
- for _, bt := range f.Topics {
- if matchSingleTopic(topic, bt) {
- return true
- }
+func matchSingleTopic(topic TopicType, bt []byte) bool {
+ if len(bt) > TopicLength {
+ bt = bt[:TopicLength]
}
- return false
-}
-func matchSingleTopic(topic TopicType, bt []byte) bool {
- if len(bt) > 4 {
- bt = bt[:4]
+ if len(bt) < TopicLength {
+ return false
}
for j, b := range bt {
@@ -233,6 +267,7 @@ func matchSingleTopic(topic TopicType, bt []byte) bool {
return true
}
+// IsPubKeyEqual checks that two public keys are equal
func IsPubKeyEqual(a, b *ecdsa.PublicKey) bool {
if !ValidatePublicKey(a) {
return false
diff --git a/whisper/whisperv6/filter_test.go b/whisper/whisperv6/filter_test.go
index e2877b233..0bb7986c3 100644
--- a/whisper/whisperv6/filter_test.go
+++ b/whisper/whisperv6/filter_test.go
@@ -109,7 +109,7 @@ func TestInstallFilters(t *testing.T) {
t.Fatalf("seed %d: failed to install filter: %s", seed, err)
}
tst[i].id = j
- if len(j) != keyIdSize*2 {
+ if len(j) != keyIDSize*2 {
t.Fatalf("seed %d: wrong filter id size [%d]", seed, len(j))
}
}
@@ -303,9 +303,8 @@ func TestMatchEnvelope(t *testing.T) {
t.Fatalf("failed generateMessageParams with seed %d: %s.", seed, err)
}
- params.Topic[0] = 0xFF // ensure mismatch
+ params.Topic[0] = 0xFF // topic mismatch
- // mismatch with pseudo-random data
msg, err := NewSentMessage(params)
if err != nil {
t.Fatalf("failed to create new message with seed %d: %s.", seed, err)
@@ -314,14 +313,6 @@ func TestMatchEnvelope(t *testing.T) {
if err != nil {
t.Fatalf("failed Wrap with seed %d: %s.", seed, err)
}
- match := fsym.MatchEnvelope(env)
- if match {
- t.Fatalf("failed MatchEnvelope symmetric with seed %d.", seed)
- }
- match = fasym.MatchEnvelope(env)
- if match {
- t.Fatalf("failed MatchEnvelope asymmetric with seed %d.", seed)
- }
// encrypt symmetrically
i := mrand.Int() % 4
@@ -337,7 +328,7 @@ func TestMatchEnvelope(t *testing.T) {
}
// symmetric + matching topic: match
- match = fsym.MatchEnvelope(env)
+ match := fsym.MatchEnvelope(env)
if !match {
t.Fatalf("failed MatchEnvelope() symmetric with seed %d.", seed)
}
@@ -396,7 +387,7 @@ func TestMatchEnvelope(t *testing.T) {
// asymmetric + matching topic: match
fasym.Topics[i] = fasym.Topics[i+1]
match = fasym.MatchEnvelope(env)
- if match {
+ if !match {
t.Fatalf("failed MatchEnvelope(asymmetric + matching topic) with seed %d.", seed)
}
@@ -431,7 +422,8 @@ func TestMatchEnvelope(t *testing.T) {
// filter with topic + envelope without topic: mismatch
fasym.Topics = fsym.Topics
match = fasym.MatchEnvelope(env)
- if match {
+ if !match {
+ // topic mismatch should have no affect, as topics are handled by topic matchers
t.Fatalf("failed MatchEnvelope(filter without topic + envelope without topic) with seed %d.", seed)
}
}
@@ -487,7 +479,8 @@ func TestMatchMessageSym(t *testing.T) {
// topic mismatch
f.Topics[index][0]++
- if f.MatchMessage(msg) {
+ if !f.MatchMessage(msg) {
+ // topic mismatch should have no affect, as topics are handled by topic matchers
t.Fatalf("failed MatchEnvelope(topic mismatch) with seed %d.", seed)
}
f.Topics[index][0]--
@@ -580,7 +573,8 @@ func TestMatchMessageAsym(t *testing.T) {
// topic mismatch
f.Topics[index][0]++
- if f.MatchMessage(msg) {
+ if !f.MatchMessage(msg) {
+ // topic mismatch should have no affect, as topics are handled by topic matchers
t.Fatalf("failed MatchEnvelope(topic mismatch) with seed %d.", seed)
}
f.Topics[index][0]--
@@ -800,6 +794,7 @@ func TestWatchers(t *testing.T) {
func TestVariableTopics(t *testing.T) {
InitSingleTest()
+ const lastTopicByte = 3
var match bool
params, err := generateMessageParams()
if err != nil {
@@ -820,19 +815,53 @@ func TestVariableTopics(t *testing.T) {
}
for i := 0; i < 4; i++ {
- arr := make([]byte, i+1, 4)
- copy(arr, env.Topic[:i+1])
-
- f.Topics[4] = arr
+ env.Topic = BytesToTopic(f.Topics[i])
match = f.MatchEnvelope(env)
if !match {
t.Fatalf("failed MatchEnvelope symmetric with seed %d, step %d.", seed, i)
}
- f.Topics[4][i]++
+ f.Topics[i][lastTopicByte]++
match = f.MatchEnvelope(env)
- if match {
- t.Fatalf("MatchEnvelope symmetric with seed %d, step %d: false positive.", seed, i)
+ if !match {
+ // topic mismatch should have no affect, as topics are handled by topic matchers
+ t.Fatalf("MatchEnvelope symmetric with seed %d, step %d.", seed, i)
}
}
}
+
+func TestMatchSingleTopic_ReturnTrue(t *testing.T) {
+ bt := []byte("test")
+ topic := BytesToTopic(bt)
+
+ if !matchSingleTopic(topic, bt) {
+ t.FailNow()
+ }
+}
+
+func TestMatchSingleTopic_WithTail_ReturnTrue(t *testing.T) {
+ bt := []byte("test with tail")
+ topic := BytesToTopic([]byte("test"))
+
+ if !matchSingleTopic(topic, bt) {
+ t.FailNow()
+ }
+}
+
+func TestMatchSingleTopic_NotEquals_ReturnFalse(t *testing.T) {
+ bt := []byte("tes")
+ topic := BytesToTopic(bt)
+
+ if matchSingleTopic(topic, bt) {
+ t.FailNow()
+ }
+}
+
+func TestMatchSingleTopic_InsufficientLength_ReturnFalse(t *testing.T) {
+ bt := []byte("test")
+ topic := BytesToTopic([]byte("not_equal"))
+
+ if matchSingleTopic(topic, bt) {
+ t.FailNow()
+ }
+}
diff --git a/whisper/whisperv6/gen_criteria_json.go b/whisper/whisperv6/gen_criteria_json.go
index a298396cc..1a428d6df 100644
--- a/whisper/whisperv6/gen_criteria_json.go
+++ b/whisper/whisperv6/gen_criteria_json.go
@@ -10,6 +10,7 @@ import (
var _ = (*criteriaOverride)(nil)
+// MarshalJSON marshals type Criteria to a json string
func (c Criteria) MarshalJSON() ([]byte, error) {
type Criteria struct {
SymKeyID string `json:"symKeyID"`
@@ -29,6 +30,7 @@ func (c Criteria) MarshalJSON() ([]byte, error) {
return json.Marshal(&enc)
}
+// UnmarshalJSON unmarshals type Criteria to a json string
func (c *Criteria) UnmarshalJSON(input []byte) error {
type Criteria struct {
SymKeyID *string `json:"symKeyID"`
diff --git a/whisper/whisperv6/gen_message_json.go b/whisper/whisperv6/gen_message_json.go
index e193ba3e2..6218f5df6 100644
--- a/whisper/whisperv6/gen_message_json.go
+++ b/whisper/whisperv6/gen_message_json.go
@@ -10,6 +10,7 @@ import (
var _ = (*messageOverride)(nil)
+// MarshalJSON marshals type Message to a json string
func (m Message) MarshalJSON() ([]byte, error) {
type Message struct {
Sig hexutil.Bytes `json:"sig,omitempty"`
@@ -35,6 +36,7 @@ func (m Message) MarshalJSON() ([]byte, error) {
return json.Marshal(&enc)
}
+// UnmarshalJSON unmarshals type Message to a json string
func (m *Message) UnmarshalJSON(input []byte) error {
type Message struct {
Sig *hexutil.Bytes `json:"sig,omitempty"`
diff --git a/whisper/whisperv6/gen_newmessage_json.go b/whisper/whisperv6/gen_newmessage_json.go
index 6250579f4..75a1279ae 100644
--- a/whisper/whisperv6/gen_newmessage_json.go
+++ b/whisper/whisperv6/gen_newmessage_json.go
@@ -10,6 +10,7 @@ import (
var _ = (*newMessageOverride)(nil)
+// MarshalJSON marshals type NewMessage to a json string
func (n NewMessage) MarshalJSON() ([]byte, error) {
type NewMessage struct {
SymKeyID string `json:"symKeyID"`
@@ -37,6 +38,7 @@ func (n NewMessage) MarshalJSON() ([]byte, error) {
return json.Marshal(&enc)
}
+// UnmarshalJSON unmarshals type NewMessage to a json string
func (n *NewMessage) UnmarshalJSON(input []byte) error {
type NewMessage struct {
SymKeyID *string `json:"symKeyID"`
diff --git a/whisper/whisperv6/message.go b/whisper/whisperv6/message.go
index f8df50336..b8318cbe8 100644
--- a/whisper/whisperv6/message.go
+++ b/whisper/whisperv6/message.go
@@ -25,6 +25,7 @@ import (
crand "crypto/rand"
"encoding/binary"
"errors"
+ mrand "math/rand"
"strconv"
"github.com/ethereum/go-ethereum/common"
@@ -33,7 +34,8 @@ import (
"github.com/ethereum/go-ethereum/log"
)
-// Options specifies the exact way a message should be wrapped into an Envelope.
+// MessageParams specifies the exact way a message should be wrapped
+// into an Envelope.
type MessageParams struct {
TTL uint32
Src *ecdsa.PrivateKey
@@ -54,7 +56,7 @@ type sentMessage struct {
}
// ReceivedMessage represents a data packet to be received through the
-// Whisper protocol.
+// Whisper protocol and successfully decrypted.
type ReceivedMessage struct {
Raw []byte
@@ -70,7 +72,7 @@ type ReceivedMessage struct {
Dst *ecdsa.PublicKey // Message recipient (identity used to decode the message)
Topic TopicType
- SymKeyHash common.Hash // The Keccak256Hash of the key, associated with the Topic
+ SymKeyHash common.Hash // The Keccak256Hash of the key
EnvelopeHash common.Hash // Message envelope hash to act as a unique id
}
@@ -86,83 +88,62 @@ func (msg *ReceivedMessage) isAsymmetricEncryption() bool {
return msg.Dst != nil
}
-// NewMessage creates and initializes a non-signed, non-encrypted Whisper message.
+// NewSentMessage creates and initializes a non-signed, non-encrypted Whisper message.
func NewSentMessage(params *MessageParams) (*sentMessage, error) {
+ const payloadSizeFieldMaxSize = 4
msg := sentMessage{}
- msg.Raw = make([]byte, 1, len(params.Payload)+len(params.Padding)+signatureLength+padSizeLimit)
+ msg.Raw = make([]byte, 1,
+ flagsLength+payloadSizeFieldMaxSize+len(params.Payload)+len(params.Padding)+signatureLength+padSizeLimit)
msg.Raw[0] = 0 // set all the flags to zero
- err := msg.appendPadding(params)
- if err != nil {
- return nil, err
- }
+ msg.addPayloadSizeField(params.Payload)
msg.Raw = append(msg.Raw, params.Payload...)
- return &msg, nil
+ err := msg.appendPadding(params)
+ return &msg, err
}
-// getSizeOfLength returns the number of bytes necessary to encode the entire size padding (including these bytes)
-func getSizeOfLength(b []byte) (sz int, err error) {
- sz = intSize(len(b)) // first iteration
- sz = intSize(len(b) + sz) // second iteration
- if sz > 3 {
- err = errors.New("oversized padding parameter")
- }
- return sz, err
+// addPayloadSizeField appends the auxiliary field containing the size of payload
+func (msg *sentMessage) addPayloadSizeField(payload []byte) {
+ fieldSize := getSizeOfPayloadSizeField(payload)
+ field := make([]byte, 4)
+ binary.LittleEndian.PutUint32(field, uint32(len(payload)))
+ field = field[:fieldSize]
+ msg.Raw = append(msg.Raw, field...)
+ msg.Raw[0] |= byte(fieldSize)
}
-// sizeOfIntSize returns minimal number of bytes necessary to encode an integer value
-func intSize(i int) (s int) {
- for s = 1; i >= 256; s++ {
- i /= 256
+// getSizeOfPayloadSizeField returns the number of bytes necessary to encode the size of payload
+func getSizeOfPayloadSizeField(payload []byte) int {
+ s := 1
+ for i := len(payload); i >= 256; i /= 256 {
+ s++
}
return s
}
-// appendPadding appends the pseudorandom padding bytes and sets the padding flag.
-// The last byte contains the size of padding (thus, its size must not exceed 256).
+// appendPadding appends the padding specified in params.
+// If no padding is provided in params, then random padding is generated.
func (msg *sentMessage) appendPadding(params *MessageParams) error {
- rawSize := len(params.Payload) + 1
- if params.Src != nil {
- rawSize += signatureLength
+ if len(params.Padding) != 0 {
+ // padding data was provided by the Dapp, just use it as is
+ msg.Raw = append(msg.Raw, params.Padding...)
+ return nil
}
- if params.KeySym != nil {
- rawSize += AESNonceLength
+ rawSize := flagsLength + getSizeOfPayloadSizeField(params.Payload) + len(params.Payload)
+ if params.Src != nil {
+ rawSize += signatureLength
}
odd := rawSize % padSizeLimit
-
- if len(params.Padding) != 0 {
- padSize := len(params.Padding)
- padLengthSize, err := getSizeOfLength(params.Padding)
- if err != nil {
- return err
- }
- totalPadSize := padSize + padLengthSize
- buf := make([]byte, 8)
- binary.LittleEndian.PutUint32(buf, uint32(totalPadSize))
- buf = buf[:padLengthSize]
- msg.Raw = append(msg.Raw, buf...)
- msg.Raw = append(msg.Raw, params.Padding...)
- msg.Raw[0] |= byte(padLengthSize) // number of bytes indicating the padding size
- } else if odd != 0 {
- totalPadSize := padSizeLimit - odd
- if totalPadSize > 255 {
- // this algorithm is only valid if padSizeLimit < 256.
- // if padSizeLimit will ever change, please fix the algorithm
- // (please see also ReceivedMessage.extractPadding() function).
- panic("please fix the padding algorithm before releasing new version")
- }
- buf := make([]byte, totalPadSize)
- _, err := crand.Read(buf[1:])
- if err != nil {
- return err
- }
- if totalPadSize > 6 && !validateSymmetricKey(buf) {
- return errors.New("failed to generate random padding of size " + strconv.Itoa(totalPadSize))
- }
- buf[0] = byte(totalPadSize)
- msg.Raw = append(msg.Raw, buf...)
- msg.Raw[0] |= byte(0x1) // number of bytes indicating the padding size
+ paddingSize := padSizeLimit - odd
+ pad := make([]byte, paddingSize)
+ _, err := crand.Read(pad)
+ if err != nil {
+ return err
}
+ if !validateDataIntegrity(pad, paddingSize) {
+ return errors.New("failed to generate random padding of size " + strconv.Itoa(paddingSize))
+ }
+ msg.Raw = append(msg.Raw, pad...)
return nil
}
@@ -175,11 +156,11 @@ func (msg *sentMessage) sign(key *ecdsa.PrivateKey) error {
return nil
}
- msg.Raw[0] |= signatureFlag
+ msg.Raw[0] |= signatureFlag // it is important to set this flag before signing
hash := crypto.Keccak256(msg.Raw)
signature, err := crypto.Sign(hash, key)
if err != nil {
- msg.Raw[0] &= ^signatureFlag // clear the flag
+ msg.Raw[0] &= (0xFF ^ signatureFlag) // clear the flag
return err
}
msg.Raw = append(msg.Raw, signature...)
@@ -201,10 +182,9 @@ func (msg *sentMessage) encryptAsymmetric(key *ecdsa.PublicKey) error {
// encryptSymmetric encrypts a message with a topic key, using AES-GCM-256.
// nonce size should be 12 bytes (see cipher.gcmStandardNonceSize).
func (msg *sentMessage) encryptSymmetric(key []byte) (err error) {
- if !validateSymmetricKey(key) {
- return errors.New("invalid key provided for symmetric encryption")
+ if !validateDataIntegrity(key, aesKeyLength) {
+ return errors.New("invalid key provided for symmetric encryption, size: " + strconv.Itoa(len(key)))
}
-
block, err := aes.NewCipher(key)
if err != nil {
return err
@@ -213,20 +193,46 @@ func (msg *sentMessage) encryptSymmetric(key []byte) (err error) {
if err != nil {
return err
}
-
- // never use more than 2^32 random nonces with a given key
- salt := make([]byte, aesgcm.NonceSize())
- _, err = crand.Read(salt)
+ salt, err := generateSecureRandomData(aesNonceLength) // never use more than 2^32 random nonces with a given key
if err != nil {
return err
- } else if !validateSymmetricKey(salt) {
- return errors.New("crypto/rand failed to generate salt")
}
-
- msg.Raw = append(aesgcm.Seal(nil, salt, msg.Raw, nil), salt...)
+ encrypted := aesgcm.Seal(nil, salt, msg.Raw, nil)
+ msg.Raw = append(encrypted, salt...)
return nil
}
+// generateSecureRandomData generates random data where extra security is required.
+// The purpose of this function is to prevent some bugs in software or in hardware
+// from delivering not-very-random data. This is especially useful for AES nonce,
+// where true randomness does not really matter, but it is very important to have
+// a unique nonce for every message.
+func generateSecureRandomData(length int) ([]byte, error) {
+ x := make([]byte, length)
+ y := make([]byte, length)
+ res := make([]byte, length)
+
+ _, err := crand.Read(x)
+ if err != nil {
+ return nil, err
+ } else if !validateDataIntegrity(x, length) {
+ return nil, errors.New("crypto/rand failed to generate secure random data")
+ }
+ _, err = mrand.Read(y)
+ if err != nil {
+ return nil, err
+ } else if !validateDataIntegrity(y, length) {
+ return nil, errors.New("math/rand failed to generate secure random data")
+ }
+ for i := 0; i < length; i++ {
+ res[i] = x[i] ^ y[i]
+ }
+ if !validateDataIntegrity(res, length) {
+ return nil, errors.New("failed to generate secure random data")
+ }
+ return res, nil
+}
+
// Wrap bundles the message into an Envelope to transmit over the network.
func (msg *sentMessage) Wrap(options *MessageParams) (envelope *Envelope, err error) {
if options.TTL == 0 {
@@ -258,12 +264,11 @@ func (msg *sentMessage) Wrap(options *MessageParams) (envelope *Envelope, err er
// decryptSymmetric decrypts a message with a topic key, using AES-GCM-256.
// nonce size should be 12 bytes (see cipher.gcmStandardNonceSize).
func (msg *ReceivedMessage) decryptSymmetric(key []byte) error {
- // In v6, symmetric messages are expected to contain the 12-byte
- // "salt" at the end of the payload.
- if len(msg.Raw) < AESNonceLength {
+ // symmetric messages are expected to contain the 12-byte nonce at the end of the payload
+ if len(msg.Raw) < aesNonceLength {
return errors.New("missing salt or invalid payload in symmetric message")
}
- salt := msg.Raw[len(msg.Raw)-AESNonceLength:]
+ salt := msg.Raw[len(msg.Raw)-aesNonceLength:]
block, err := aes.NewCipher(key)
if err != nil {
@@ -273,11 +278,7 @@ func (msg *ReceivedMessage) decryptSymmetric(key []byte) error {
if err != nil {
return err
}
- if len(salt) != aesgcm.NonceSize() {
- log.Error("decrypting the message", "AES salt size", len(salt))
- return errors.New("wrong AES salt size")
- }
- decrypted, err := aesgcm.Open(nil, salt, msg.Raw[:len(msg.Raw)-AESNonceLength], nil)
+ decrypted, err := aesgcm.Open(nil, salt, msg.Raw[:len(msg.Raw)-aesNonceLength], nil)
if err != nil {
return err
}
@@ -295,8 +296,8 @@ func (msg *ReceivedMessage) decryptAsymmetric(key *ecdsa.PrivateKey) error {
return err
}
-// Validate checks the validity and extracts the fields in case of success
-func (msg *ReceivedMessage) Validate() bool {
+// ValidateAndParse checks the message validity and extracts the fields in case of success.
+func (msg *ReceivedMessage) ValidateAndParse() bool {
end := len(msg.Raw)
if end < 1 {
return false
@@ -307,41 +308,32 @@ func (msg *ReceivedMessage) Validate() bool {
if end <= 1 {
return false
}
- msg.Signature = msg.Raw[end:]
+ msg.Signature = msg.Raw[end : end+signatureLength]
msg.Src = msg.SigToPubKey()
if msg.Src == nil {
return false
}
}
- padSize, ok := msg.extractPadding(end)
- if !ok {
- return false
+ beg := 1
+ payloadSize := 0
+ sizeOfPayloadSizeField := int(msg.Raw[0] & SizeMask) // number of bytes indicating the size of payload
+ if sizeOfPayloadSizeField != 0 {
+ payloadSize = int(bytesToUintLittleEndian(msg.Raw[beg : beg+sizeOfPayloadSizeField]))
+ if payloadSize+1 > end {
+ return false
+ }
+ beg += sizeOfPayloadSizeField
+ msg.Payload = msg.Raw[beg : beg+payloadSize]
}
- msg.Payload = msg.Raw[1+padSize : end]
+ beg += payloadSize
+ msg.Padding = msg.Raw[beg:end]
return true
}
-// extractPadding extracts the padding from raw message.
-// although we don't support sending messages with padding size
-// exceeding 255 bytes, such messages are perfectly valid, and
-// can be successfully decrypted.
-func (msg *ReceivedMessage) extractPadding(end int) (int, bool) {
- paddingSize := 0
- sz := int(msg.Raw[0] & paddingMask) // number of bytes indicating the entire size of padding (including these bytes)
- // could be zero -- it means no padding
- if sz != 0 {
- paddingSize = int(bytesToUintLittleEndian(msg.Raw[1 : 1+sz]))
- if paddingSize < sz || paddingSize+1 > end {
- return 0, false
- }
- msg.Padding = msg.Raw[1+sz : 1+paddingSize]
- }
- return paddingSize, true
-}
-
-// Recover retrieves the public key of the message signer.
+// SigToPubKey returns the public key associated to the message's
+// signature.
func (msg *ReceivedMessage) SigToPubKey() *ecdsa.PublicKey {
defer func() { recover() }() // in case of invalid signature
@@ -353,7 +345,7 @@ func (msg *ReceivedMessage) SigToPubKey() *ecdsa.PublicKey {
return pub
}
-// hash calculates the SHA3 checksum of the message flags, payload and padding.
+// hash calculates the SHA3 checksum of the message flags, payload size field, payload and padding.
func (msg *ReceivedMessage) hash() []byte {
if isMessageSigned(msg.Raw[0]) {
sz := len(msg.Raw) - signatureLength
diff --git a/whisper/whisperv6/message_test.go b/whisper/whisperv6/message_test.go
index c90bcc01e..0a5c1c853 100644
--- a/whisper/whisperv6/message_test.go
+++ b/whisper/whisperv6/message_test.go
@@ -18,9 +18,12 @@ package whisperv6
import (
"bytes"
+ "crypto/aes"
+ "crypto/cipher"
mrand "math/rand"
"testing"
+ "github.com/ethereum/go-ethereum/common/hexutil"
"github.com/ethereum/go-ethereum/crypto"
"github.com/ethereum/go-ethereum/rlp"
)
@@ -90,8 +93,8 @@ func singleMessageTest(t *testing.T, symmetric bool) {
t.Fatalf("failed to encrypt with seed %d: %s.", seed, err)
}
- if !decrypted.Validate() {
- t.Fatalf("failed to validate with seed %d.", seed)
+ if !decrypted.ValidateAndParse() {
+ t.Fatalf("failed to validate with seed %d, symmetric = %v.", seed, symmetric)
}
if !bytes.Equal(text, decrypted.Payload) {
@@ -206,7 +209,7 @@ func TestEnvelopeOpen(t *testing.T) {
InitSingleTest()
var symmetric bool
- for i := 0; i < 256; i++ {
+ for i := 0; i < 32; i++ {
singleEnvelopeOpenTest(t, symmetric)
symmetric = !symmetric
}
@@ -417,30 +420,6 @@ func TestPadding(t *testing.T) {
}
}
-func TestPaddingAppendedToSymMessages(t *testing.T) {
- params := &MessageParams{
- Payload: make([]byte, 246),
- KeySym: make([]byte, aesKeyLength),
- }
-
- // Simulate a message with a payload just under 256 so that
- // payload + flag + aesnonce > 256. Check that the result
- // is padded on the next 256 boundary.
- msg := sentMessage{}
- msg.Raw = make([]byte, len(params.Payload)+1+AESNonceLength)
-
- err := msg.appendPadding(params)
-
- if err != nil {
- t.Fatalf("Error appending padding to message %v", err)
- return
- }
-
- if len(msg.Raw) != 512 {
- t.Errorf("Invalid size %d != 512", len(msg.Raw))
- }
-}
-
func TestPaddingAppendedToSymMessagesWithSignature(t *testing.T) {
params := &MessageParams{
Payload: make([]byte, 246),
@@ -456,10 +435,11 @@ func TestPaddingAppendedToSymMessagesWithSignature(t *testing.T) {
params.Src = pSrc
// Simulate a message with a payload just under 256 so that
- // payload + flag + aesnonce > 256. Check that the result
+ // payload + flag + signature > 256. Check that the result
// is padded on the next 256 boundary.
msg := sentMessage{}
- msg.Raw = make([]byte, len(params.Payload)+1+AESNonceLength+signatureLength)
+ const payloadSizeFieldMinSize = 1
+ msg.Raw = make([]byte, flagsLength+payloadSizeFieldMinSize+len(params.Payload))
err = msg.appendPadding(params)
@@ -468,7 +448,24 @@ func TestPaddingAppendedToSymMessagesWithSignature(t *testing.T) {
return
}
- if len(msg.Raw) != 512 {
+ if len(msg.Raw) != 512-signatureLength {
t.Errorf("Invalid size %d != 512", len(msg.Raw))
}
}
+
+func TestAesNonce(t *testing.T) {
+ key := hexutil.MustDecode("0x03ca634cae0d49acb401d8a4c6b6fe8c55b70d115bf400769cc1400f3258cd31")
+ block, err := aes.NewCipher(key)
+ if err != nil {
+ t.Fatalf("NewCipher failed: %s", err)
+ }
+ aesgcm, err := cipher.NewGCM(block)
+ if err != nil {
+ t.Fatalf("NewGCM failed: %s", err)
+ }
+ // This is the most important single test in this package.
+ // If it fails, whisper will not be working.
+ if aesgcm.NonceSize() != aesNonceLength {
+ t.Fatalf("Nonce size is wrong. This is a critical error. Apparently AES nonce size have changed in the new version of AES GCM package. Whisper will not be working until this problem is resolved.")
+ }
+}
diff --git a/whisper/whisperv6/peer.go b/whisper/whisperv6/peer.go
index 08071c0f7..2bf1c905b 100644
--- a/whisper/whisperv6/peer.go
+++ b/whisper/whisperv6/peer.go
@@ -19,6 +19,7 @@ package whisperv6
import (
"fmt"
"math"
+ "sync"
"time"
"github.com/ethereum/go-ethereum/common"
@@ -28,7 +29,7 @@ import (
set "gopkg.in/fatih/set.v0"
)
-// peer represents a whisper protocol peer connection.
+// Peer represents a whisper protocol peer connection.
type Peer struct {
host *Whisper
peer *p2p.Peer
@@ -36,7 +37,9 @@ type Peer struct {
trusted bool
powRequirement float64
- bloomFilter []byte // may contain nil in case of full node
+ bloomMu sync.Mutex
+ bloomFilter []byte
+ fullNode bool
known *set.Set // Messages already known by the peer to avoid wasting bandwidth
@@ -53,53 +56,55 @@ func newPeer(host *Whisper, remote *p2p.Peer, rw p2p.MsgReadWriter) *Peer {
powRequirement: 0.0,
known: set.New(),
quit: make(chan struct{}),
+ bloomFilter: MakeFullNodeBloom(),
+ fullNode: true,
}
}
// start initiates the peer updater, periodically broadcasting the whisper packets
// into the network.
-func (p *Peer) start() {
- go p.update()
- log.Trace("start", "peer", p.ID())
+func (peer *Peer) start() {
+ go peer.update()
+ log.Trace("start", "peer", peer.ID())
}
// stop terminates the peer updater, stopping message forwarding to it.
-func (p *Peer) stop() {
- close(p.quit)
- log.Trace("stop", "peer", p.ID())
+func (peer *Peer) stop() {
+ close(peer.quit)
+ log.Trace("stop", "peer", peer.ID())
}
// handshake sends the protocol initiation status message to the remote peer and
// verifies the remote status too.
-func (p *Peer) handshake() error {
+func (peer *Peer) handshake() error {
// Send the handshake status message asynchronously
errc := make(chan error, 1)
go func() {
- pow := p.host.MinPow()
+ pow := peer.host.MinPow()
powConverted := math.Float64bits(pow)
- bloom := p.host.BloomFilter()
- errc <- p2p.SendItems(p.ws, statusCode, ProtocolVersion, powConverted, bloom)
+ bloom := peer.host.BloomFilter()
+ errc <- p2p.SendItems(peer.ws, statusCode, ProtocolVersion, powConverted, bloom)
}()
// Fetch the remote status packet and verify protocol match
- packet, err := p.ws.ReadMsg()
+ packet, err := peer.ws.ReadMsg()
if err != nil {
return err
}
if packet.Code != statusCode {
- return fmt.Errorf("peer [%x] sent packet %x before status packet", p.ID(), packet.Code)
+ return fmt.Errorf("peer [%x] sent packet %x before status packet", peer.ID(), packet.Code)
}
s := rlp.NewStream(packet.Payload, uint64(packet.Size))
_, err = s.List()
if err != nil {
- return fmt.Errorf("peer [%x] sent bad status message: %v", p.ID(), err)
+ return fmt.Errorf("peer [%x] sent bad status message: %v", peer.ID(), err)
}
peerVersion, err := s.Uint()
if err != nil {
- return fmt.Errorf("peer [%x] sent bad status message (unable to decode version): %v", p.ID(), err)
+ return fmt.Errorf("peer [%x] sent bad status message (unable to decode version): %v", peer.ID(), err)
}
if peerVersion != ProtocolVersion {
- return fmt.Errorf("peer [%x]: protocol version mismatch %d != %d", p.ID(), peerVersion, ProtocolVersion)
+ return fmt.Errorf("peer [%x]: protocol version mismatch %d != %d", peer.ID(), peerVersion, ProtocolVersion)
}
// only version is mandatory, subsequent parameters are optional
@@ -107,34 +112,30 @@ func (p *Peer) handshake() error {
if err == nil {
pow := math.Float64frombits(powRaw)
if math.IsInf(pow, 0) || math.IsNaN(pow) || pow < 0.0 {
- return fmt.Errorf("peer [%x] sent bad status message: invalid pow", p.ID())
+ return fmt.Errorf("peer [%x] sent bad status message: invalid pow", peer.ID())
}
- p.powRequirement = pow
+ peer.powRequirement = pow
var bloom []byte
err = s.Decode(&bloom)
if err == nil {
sz := len(bloom)
- if sz != bloomFilterSize && sz != 0 {
- return fmt.Errorf("peer [%x] sent bad status message: wrong bloom filter size %d", p.ID(), sz)
- }
- if isFullNode(bloom) {
- p.bloomFilter = nil
- } else {
- p.bloomFilter = bloom
+ if sz != BloomFilterSize && sz != 0 {
+ return fmt.Errorf("peer [%x] sent bad status message: wrong bloom filter size %d", peer.ID(), sz)
}
+ peer.setBloomFilter(bloom)
}
}
if err := <-errc; err != nil {
- return fmt.Errorf("peer [%x] failed to send status packet: %v", p.ID(), err)
+ return fmt.Errorf("peer [%x] failed to send status packet: %v", peer.ID(), err)
}
return nil
}
// update executes periodic operations on the peer, including message transmission
// and expiration.
-func (p *Peer) update() {
+func (peer *Peer) update() {
// Start the tickers for the updates
expire := time.NewTicker(expirationCycle)
transmit := time.NewTicker(transmissionCycle)
@@ -143,15 +144,15 @@ func (p *Peer) update() {
for {
select {
case <-expire.C:
- p.expire()
+ peer.expire()
case <-transmit.C:
- if err := p.broadcast(); err != nil {
- log.Trace("broadcast failed", "reason", err, "peer", p.ID())
+ if err := peer.broadcast(); err != nil {
+ log.Trace("broadcast failed", "reason", err, "peer", peer.ID())
return
}
- case <-p.quit:
+ case <-peer.quit:
return
}
}
@@ -185,24 +186,24 @@ func (peer *Peer) expire() {
// broadcast iterates over the collection of envelopes and transmits yet unknown
// ones over the network.
-func (p *Peer) broadcast() error {
- envelopes := p.host.Envelopes()
+func (peer *Peer) broadcast() error {
+ envelopes := peer.host.Envelopes()
bundle := make([]*Envelope, 0, len(envelopes))
for _, envelope := range envelopes {
- if !p.marked(envelope) && envelope.PoW() >= p.powRequirement && p.bloomMatch(envelope) {
+ if !peer.marked(envelope) && envelope.PoW() >= peer.powRequirement && peer.bloomMatch(envelope) {
bundle = append(bundle, envelope)
}
}
if len(bundle) > 0 {
// transmit the batch of envelopes
- if err := p2p.Send(p.ws, messagesCode, bundle); err != nil {
+ if err := p2p.Send(peer.ws, messagesCode, bundle); err != nil {
return err
}
// mark envelopes only if they were successfully sent
for _, e := range bundle {
- p.mark(e)
+ peer.mark(e)
}
log.Trace("broadcast", "num. messages", len(bundle))
@@ -210,25 +211,41 @@ func (p *Peer) broadcast() error {
return nil
}
-func (p *Peer) ID() []byte {
- id := p.peer.ID()
+// ID returns a peer's id
+func (peer *Peer) ID() []byte {
+ id := peer.peer.ID()
return id[:]
}
-func (p *Peer) notifyAboutPowRequirementChange(pow float64) error {
+func (peer *Peer) notifyAboutPowRequirementChange(pow float64) error {
i := math.Float64bits(pow)
- return p2p.Send(p.ws, powRequirementCode, i)
+ return p2p.Send(peer.ws, powRequirementCode, i)
}
-func (p *Peer) notifyAboutBloomFilterChange(bloom []byte) error {
- return p2p.Send(p.ws, bloomFilterExCode, bloom)
+func (peer *Peer) notifyAboutBloomFilterChange(bloom []byte) error {
+ return p2p.Send(peer.ws, bloomFilterExCode, bloom)
}
-func (p *Peer) bloomMatch(env *Envelope) bool {
- if p.bloomFilter == nil {
- // no filter - full node, accepts all envelops
- return true
+func (peer *Peer) bloomMatch(env *Envelope) bool {
+ peer.bloomMu.Lock()
+ defer peer.bloomMu.Unlock()
+ return peer.fullNode || BloomFilterMatch(peer.bloomFilter, env.Bloom())
+}
+
+func (peer *Peer) setBloomFilter(bloom []byte) {
+ peer.bloomMu.Lock()
+ defer peer.bloomMu.Unlock()
+ peer.bloomFilter = bloom
+ peer.fullNode = isFullNode(bloom)
+ if peer.fullNode && peer.bloomFilter == nil {
+ peer.bloomFilter = MakeFullNodeBloom()
}
+}
- return bloomFilterMatch(p.bloomFilter, env.Bloom())
+func MakeFullNodeBloom() []byte {
+ bloom := make([]byte, BloomFilterSize)
+ for i := 0; i < BloomFilterSize; i++ {
+ bloom[i] = 0xFF
+ }
+ return bloom
}
diff --git a/whisper/whisperv6/peer_test.go b/whisper/whisperv6/peer_test.go
index 8a65cb714..ec985ae65 100644
--- a/whisper/whisperv6/peer_test.go
+++ b/whisper/whisperv6/peer_test.go
@@ -23,17 +23,19 @@ import (
mrand "math/rand"
"net"
"sync"
+ "sync/atomic"
"testing"
"time"
"github.com/ethereum/go-ethereum/common"
+ "github.com/ethereum/go-ethereum/common/hexutil"
"github.com/ethereum/go-ethereum/crypto"
"github.com/ethereum/go-ethereum/p2p"
"github.com/ethereum/go-ethereum/p2p/discover"
"github.com/ethereum/go-ethereum/p2p/nat"
)
-var keys []string = []string{
+var keys = []string{
"d49dcf37238dc8a7aac57dc61b9fee68f0a97f062968978b9fafa7d1033d03a9",
"73fd6143c48e80ed3c56ea159fe7494a0b6b393a392227b422f4c3e8f1b54f98",
"119dd32adb1daa7a4c7bf77f847fb28730785aa92947edf42fdd997b54de40dc",
@@ -69,9 +71,8 @@ var keys []string = []string{
"7184c1701569e3a4c4d2ddce691edd983b81e42e09196d332e1ae2f1e062cff4",
}
-const NumNodes = 16 // must not exceed the number of keys (32)
-
type TestData struct {
+ started int64
counter [NumNodes]int
mutex sync.RWMutex
}
@@ -80,24 +81,32 @@ type TestNode struct {
shh *Whisper
id *ecdsa.PrivateKey
server *p2p.Server
- filerId string
+ filerID string
}
+const NumNodes = 8 // must not exceed the number of keys (32)
+
var result TestData
var nodes [NumNodes]*TestNode
-var sharedKey []byte = []byte("some arbitrary data here")
-var sharedTopic TopicType = TopicType{0xF, 0x1, 0x2, 0}
-var expectedMessage []byte = []byte("per rectum ad astra")
+var sharedKey = hexutil.MustDecode("0x03ca634cae0d49acb401d8a4c6b6fe8c55b70d115bf400769cc1400f3258cd31")
+var wrongKey = hexutil.MustDecode("0xf91156714d7ec88d3edc1c652c2181dbb3044e8771c683f3b30d33c12b986b11")
+var sharedTopic = TopicType{0xF, 0x1, 0x2, 0}
+var wrongTopic = TopicType{0, 0, 0, 0}
+var expectedMessage = []byte("per aspera ad astra")
+var unexpectedMessage = []byte("per rectum ad astra")
var masterBloomFilter []byte
var masterPow = 0.00000001
-var round int = 1
+var round = 1
+var debugMode = false
+var prevTime time.Time
+var cntPrev int
func TestSimulation(t *testing.T) {
// create a chain of whisper nodes,
// installs the filters with shared (predefined) parameters
initialize(t)
- // each node sends a number of random (undecryptable) messages
+ // each node sends one random (not decryptable) message
for i := 0; i < NumNodes; i++ {
sendMsg(t, false, i)
}
@@ -114,7 +123,6 @@ func TestSimulation(t *testing.T) {
// send new pow and bloom exchange messages
resetParams(t)
- round++
// node #1 sends one expected (decryptable) message
sendMsg(t, true, 1)
@@ -122,11 +130,6 @@ func TestSimulation(t *testing.T) {
// check if each node (except node #0) have received and decrypted exactly one message
checkPropagation(t, false)
- for i := 1; i < NumNodes; i++ {
- time.Sleep(20 * time.Millisecond)
- sendMsg(t, true, i)
- }
-
// check if corresponding protocol-level messages were correctly decoded
checkPowExchangeForNodeZero(t)
checkBloomFilterExchange(t)
@@ -144,10 +147,12 @@ func resetParams(t *testing.T) {
for i := 0; i < NumNodes; i++ {
nodes[i].shh.SetBloomFilter(masterBloomFilter)
}
+
+ round++
}
func initBloom(t *testing.T) {
- masterBloomFilter = make([]byte, bloomFilterSize)
+ masterBloomFilter = make([]byte, BloomFilterSize)
_, err := mrand.Read(masterBloomFilter)
if err != nil {
t.Fatalf("rand failed: %s.", err)
@@ -159,7 +164,7 @@ func initBloom(t *testing.T) {
masterBloomFilter[i] = 0xFF
}
- if !bloomFilterMatch(masterBloomFilter, msgBloom) {
+ if !BloomFilterMatch(masterBloomFilter, msgBloom) {
t.Fatalf("bloom mismatch on initBloom.")
}
}
@@ -173,7 +178,7 @@ func initialize(t *testing.T) {
for i := 0; i < NumNodes; i++ {
var node TestNode
- b := make([]byte, bloomFilterSize)
+ b := make([]byte, BloomFilterSize)
copy(b, masterBloomFilter)
node.shh = New(&DefaultConfig)
node.shh.SetMinimumPoW(masterPow)
@@ -186,7 +191,7 @@ func initialize(t *testing.T) {
topics = append(topics, sharedTopic)
f := Filter{KeySym: sharedKey}
f.Topics = [][]byte{topics[0][:]}
- node.filerId, err = node.shh.Subscribe(&f)
+ node.filerID, err = node.shh.Subscribe(&f)
if err != nil {
t.Fatalf("failed to install the filter: %s.", err)
}
@@ -199,9 +204,9 @@ func initialize(t *testing.T) {
name := common.MakeName("whisper-go", "2.0")
var peers []*discover.Node
if i > 0 {
- peerNodeId := nodes[i-1].id
+ peerNodeID := nodes[i-1].id
peerPort := uint16(port - 1)
- peerNode := discover.PubkeyID(&peerNodeId.PublicKey)
+ peerNode := discover.PubkeyID(&peerNodeID.PublicKey)
peer := discover.NewNode(peerNode, ip, peerPort, peerPort)
peers = append(peers, peer)
}
@@ -223,22 +228,27 @@ func initialize(t *testing.T) {
nodes[i] = &node
}
- for i := 1; i < NumNodes; i++ {
- go nodes[i].server.Start()
+ for i := 0; i < NumNodes; i++ {
+ go startServer(t, nodes[i].server)
}
- // we need to wait until the first node actually starts
- err = nodes[0].server.Start()
+ waitForServersToStart(t)
+}
+
+func startServer(t *testing.T, s *p2p.Server) {
+ err := s.Start()
if err != nil {
t.Fatalf("failed to start the fisrt server.")
}
+
+ atomic.AddInt64(&result.started, 1)
}
func stopServers() {
for i := 0; i < NumNodes; i++ {
n := nodes[i]
if n != nil {
- n.shh.Unsubscribe(n.filerId)
+ n.shh.Unsubscribe(n.filerID)
n.shh.Stop()
n.server.Stop()
}
@@ -250,8 +260,10 @@ func checkPropagation(t *testing.T, includingNodeZero bool) {
return
}
- const cycle = 50
- const iterations = 200
+ prevTime = time.Now()
+ // (cycle * iterations) should not exceed 50 seconds, since TTL=50
+ const cycle = 200 // time in milliseconds
+ const iterations = 250
first := 0
if !includingNodeZero {
@@ -260,35 +272,35 @@ func checkPropagation(t *testing.T, includingNodeZero bool) {
for j := 0; j < iterations; j++ {
for i := first; i < NumNodes; i++ {
- f := nodes[i].shh.GetFilter(nodes[i].filerId)
+ f := nodes[i].shh.GetFilter(nodes[i].filerID)
if f == nil {
- t.Fatalf("failed to get filterId %s from node %d, round %d.", nodes[i].filerId, i, round)
+ t.Fatalf("failed to get filterId %s from node %d, round %d.", nodes[i].filerID, i, round)
}
mail := f.Retrieve()
- if !validateMail(t, i, mail) {
- return
- }
+ validateMail(t, i, mail)
if isTestComplete() {
+ checkTestStatus()
return
}
}
+ checkTestStatus()
time.Sleep(cycle * time.Millisecond)
}
- t.Fatalf("Test was not complete: timeout %d seconds.", iterations*cycle/1000)
-
if !includingNodeZero {
- f := nodes[0].shh.GetFilter(nodes[0].filerId)
+ f := nodes[0].shh.GetFilter(nodes[0].filerID)
if f != nil {
t.Fatalf("node zero received a message with low PoW.")
}
}
+
+ t.Fatalf("Test was not complete (%d round): timeout %d seconds. nodes=%v", round, iterations*cycle/1000, nodes)
}
-func validateMail(t *testing.T, index int, mail []*ReceivedMessage) bool {
+func validateMail(t *testing.T, index int, mail []*ReceivedMessage) {
var cnt int
for _, m := range mail {
if bytes.Equal(m.Payload, expectedMessage) {
@@ -298,14 +310,13 @@ func validateMail(t *testing.T, index int, mail []*ReceivedMessage) bool {
if cnt == 0 {
// no messages received yet: nothing is wrong
- return true
+ return
}
if cnt > 1 {
t.Fatalf("node %d received %d.", index, cnt)
- return false
}
- if cnt > 0 {
+ if cnt == 1 {
result.mutex.Lock()
defer result.mutex.Unlock()
result.counter[index] += cnt
@@ -313,7 +324,28 @@ func validateMail(t *testing.T, index int, mail []*ReceivedMessage) bool {
t.Fatalf("node %d accumulated %d.", index, result.counter[index])
}
}
- return true
+}
+
+func checkTestStatus() {
+ var cnt int
+ var arr [NumNodes]int
+
+ for i := 0; i < NumNodes; i++ {
+ arr[i] = nodes[i].server.PeerCount()
+ envelopes := nodes[i].shh.Envelopes()
+ if len(envelopes) >= NumNodes {
+ cnt++
+ }
+ }
+
+ if debugMode {
+ if cntPrev != cnt {
+ fmt.Printf(" %v \t number of nodes that have received all msgs: %d, number of peers per node: %v \n",
+ time.Since(prevTime), cnt, arr)
+ prevTime = time.Now()
+ cntPrev = cnt
+ }
+ }
}
func isTestComplete() bool {
@@ -328,7 +360,7 @@ func isTestComplete() bool {
for i := 0; i < NumNodes; i++ {
envelopes := nodes[i].shh.Envelopes()
- if len(envelopes) < 2 {
+ if len(envelopes) < NumNodes+1 {
return false
}
}
@@ -343,9 +375,10 @@ func sendMsg(t *testing.T, expected bool, id int) {
opt := MessageParams{KeySym: sharedKey, Topic: sharedTopic, Payload: expectedMessage, PoW: 0.00000001, WorkTime: 1}
if !expected {
- opt.KeySym[0]++
- opt.Topic[0]++
- opt.Payload = opt.Payload[1:]
+ opt.KeySym = wrongKey
+ opt.Topic = wrongTopic
+ opt.Payload = unexpectedMessage
+ opt.Payload[0] = byte(id)
}
msg, err := NewSentMessage(&opt)
@@ -389,20 +422,37 @@ func TestPeerBasic(t *testing.T) {
}
func checkPowExchangeForNodeZero(t *testing.T) {
+ const iterations = 200
+ for j := 0; j < iterations; j++ {
+ lastCycle := (j == iterations-1)
+ ok := checkPowExchangeForNodeZeroOnce(t, lastCycle)
+ if ok {
+ break
+ }
+ time.Sleep(50 * time.Millisecond)
+ }
+}
+
+func checkPowExchangeForNodeZeroOnce(t *testing.T, mustPass bool) bool {
cnt := 0
for i, node := range nodes {
for peer := range node.shh.peers {
if peer.peer.ID() == discover.PubkeyID(&nodes[0].id.PublicKey) {
cnt++
if peer.powRequirement != masterPow {
- t.Fatalf("node %d: failed to set the new pow requirement.", i)
+ if mustPass {
+ t.Fatalf("node %d: failed to set the new pow requirement for node zero.", i)
+ } else {
+ return false
+ }
}
}
}
}
if cnt == 0 {
- t.Fatalf("no matching peers found.")
+ t.Fatalf("looking for node zero: no matching peers found.")
}
+ return true
}
func checkPowExchange(t *testing.T) {
@@ -418,13 +468,47 @@ func checkPowExchange(t *testing.T) {
}
}
-func checkBloomFilterExchange(t *testing.T) {
+func checkBloomFilterExchangeOnce(t *testing.T, mustPass bool) bool {
for i, node := range nodes {
for peer := range node.shh.peers {
- if !bytes.Equal(peer.bloomFilter, masterBloomFilter) {
- t.Fatalf("node %d: failed to exchange bloom filter requirement in round %d. \n%x expected \n%x got",
- i, round, masterBloomFilter, peer.bloomFilter)
+ peer.bloomMu.Lock()
+ equals := bytes.Equal(peer.bloomFilter, masterBloomFilter)
+ peer.bloomMu.Unlock()
+ if !equals {
+ if mustPass {
+ t.Fatalf("node %d: failed to exchange bloom filter requirement in round %d. \n%x expected \n%x got",
+ i, round, masterBloomFilter, peer.bloomFilter)
+ } else {
+ return false
+ }
}
}
}
+
+ return true
+}
+
+func checkBloomFilterExchange(t *testing.T) {
+ const iterations = 200
+ for j := 0; j < iterations; j++ {
+ lastCycle := (j == iterations-1)
+ ok := checkBloomFilterExchangeOnce(t, lastCycle)
+ if ok {
+ break
+ }
+ time.Sleep(50 * time.Millisecond)
+ }
+}
+
+func waitForServersToStart(t *testing.T) {
+ const iterations = 200
+ var started int64
+ for j := 0; j < iterations; j++ {
+ time.Sleep(50 * time.Millisecond)
+ started = atomic.LoadInt64(&result.started)
+ if started == NumNodes {
+ return
+ }
+ }
+ t.Fatalf("Failed to start all the servers, running: %d", started)
}
diff --git a/whisper/whisperv6/topic.go b/whisper/whisperv6/topic.go
index bf5da01e3..4dd8f283c 100644
--- a/whisper/whisperv6/topic.go
+++ b/whisper/whisperv6/topic.go
@@ -23,11 +23,13 @@ import (
"github.com/ethereum/go-ethereum/common/hexutil"
)
-// Topic represents a cryptographically secure, probabilistic partial
+// TopicType represents a cryptographically secure, probabilistic partial
// classifications of a message, determined as the first (left) 4 bytes of the
// SHA3 hash of some arbitrary data given by the original author of the message.
type TopicType [TopicLength]byte
+// BytesToTopic converts from the byte array representation of a topic
+// into the TopicType type.
func BytesToTopic(b []byte) (t TopicType) {
sz := TopicLength
if x := len(b); x < TopicLength {
diff --git a/whisper/whisperv6/whisper.go b/whisper/whisperv6/whisper.go
index bc89aadcc..880cced09 100644
--- a/whisper/whisperv6/whisper.go
+++ b/whisper/whisperv6/whisper.go
@@ -19,7 +19,6 @@ package whisperv6
import (
"bytes"
"crypto/ecdsa"
- crand "crypto/rand"
"crypto/sha256"
"fmt"
"math"
@@ -39,6 +38,8 @@ import (
set "gopkg.in/fatih/set.v0"
)
+// Statistics holds several message-related counter for analytics
+// purposes.
type Statistics struct {
messagesCleared int
memoryCleared int
@@ -81,6 +82,8 @@ type Whisper struct {
syncAllowance int // maximum time in seconds allowed to process the whisper-related messages
+ lightClient bool // indicates is this node is pure light client (does not forward any messages)
+
statsMu sync.Mutex // guard stats
stats Statistics // Statistics of whisper node
@@ -130,8 +133,8 @@ func New(cfg *Config) *Whisper {
}
// MinPow returns the PoW value required by this node.
-func (w *Whisper) MinPow() float64 {
- val, exist := w.settings.Load(minPowIdx)
+func (whisper *Whisper) MinPow() float64 {
+ val, exist := whisper.settings.Load(minPowIdx)
if !exist || val == nil {
return DefaultMinimumPoW
}
@@ -146,8 +149,8 @@ func (w *Whisper) MinPow() float64 {
// MinPowTolerance returns the value of minimum PoW which is tolerated for a limited
// time after PoW was changed. If sufficient time have elapsed or no change of PoW
// have ever occurred, the return value will be the same as return value of MinPow().
-func (w *Whisper) MinPowTolerance() float64 {
- val, exist := w.settings.Load(minPowToleranceIdx)
+func (whisper *Whisper) MinPowTolerance() float64 {
+ val, exist := whisper.settings.Load(minPowToleranceIdx)
if !exist || val == nil {
return DefaultMinimumPoW
}
@@ -158,8 +161,8 @@ func (w *Whisper) MinPowTolerance() float64 {
// The nodes are required to send only messages that match the advertised bloom filter.
// If a message does not match the bloom, it will tantamount to spam, and the peer will
// be disconnected.
-func (w *Whisper) BloomFilter() []byte {
- val, exist := w.settings.Load(bloomFilterIdx)
+func (whisper *Whisper) BloomFilter() []byte {
+ val, exist := whisper.settings.Load(bloomFilterIdx)
if !exist || val == nil {
return nil
}
@@ -170,8 +173,8 @@ func (w *Whisper) BloomFilter() []byte {
// time after new bloom was advertised to the peers. If sufficient time have elapsed
// or no change of bloom filter have ever occurred, the return value will be the same
// as return value of BloomFilter().
-func (w *Whisper) BloomFilterTolerance() []byte {
- val, exist := w.settings.Load(bloomFilterToleranceIdx)
+func (whisper *Whisper) BloomFilterTolerance() []byte {
+ val, exist := whisper.settings.Load(bloomFilterToleranceIdx)
if !exist || val == nil {
return nil
}
@@ -179,24 +182,24 @@ func (w *Whisper) BloomFilterTolerance() []byte {
}
// MaxMessageSize returns the maximum accepted message size.
-func (w *Whisper) MaxMessageSize() uint32 {
- val, _ := w.settings.Load(maxMsgSizeIdx)
+func (whisper *Whisper) MaxMessageSize() uint32 {
+ val, _ := whisper.settings.Load(maxMsgSizeIdx)
return val.(uint32)
}
// Overflow returns an indication if the message queue is full.
-func (w *Whisper) Overflow() bool {
- val, _ := w.settings.Load(overflowIdx)
+func (whisper *Whisper) Overflow() bool {
+ val, _ := whisper.settings.Load(overflowIdx)
return val.(bool)
}
// APIs returns the RPC descriptors the Whisper implementation offers
-func (w *Whisper) APIs() []rpc.API {
+func (whisper *Whisper) APIs() []rpc.API {
return []rpc.API{
{
Namespace: ProtocolName,
Version: ProtocolVersionStr,
- Service: NewPublicWhisperAPI(w),
+ Service: NewPublicWhisperAPI(whisper),
Public: true,
},
}
@@ -204,77 +207,77 @@ func (w *Whisper) APIs() []rpc.API {
// RegisterServer registers MailServer interface.
// MailServer will process all the incoming messages with p2pRequestCode.
-func (w *Whisper) RegisterServer(server MailServer) {
- w.mailServer = server
+func (whisper *Whisper) RegisterServer(server MailServer) {
+ whisper.mailServer = server
}
// Protocols returns the whisper sub-protocols ran by this particular client.
-func (w *Whisper) Protocols() []p2p.Protocol {
- return []p2p.Protocol{w.protocol}
+func (whisper *Whisper) Protocols() []p2p.Protocol {
+ return []p2p.Protocol{whisper.protocol}
}
// Version returns the whisper sub-protocols version number.
-func (w *Whisper) Version() uint {
- return w.protocol.Version
+func (whisper *Whisper) Version() uint {
+ return whisper.protocol.Version
}
// SetMaxMessageSize sets the maximal message size allowed by this node
-func (w *Whisper) SetMaxMessageSize(size uint32) error {
+func (whisper *Whisper) SetMaxMessageSize(size uint32) error {
if size > MaxMessageSize {
return fmt.Errorf("message size too large [%d>%d]", size, MaxMessageSize)
}
- w.settings.Store(maxMsgSizeIdx, size)
+ whisper.settings.Store(maxMsgSizeIdx, size)
return nil
}
// SetBloomFilter sets the new bloom filter
-func (w *Whisper) SetBloomFilter(bloom []byte) error {
- if len(bloom) != bloomFilterSize {
+func (whisper *Whisper) SetBloomFilter(bloom []byte) error {
+ if len(bloom) != BloomFilterSize {
return fmt.Errorf("invalid bloom filter size: %d", len(bloom))
}
- b := make([]byte, bloomFilterSize)
+ b := make([]byte, BloomFilterSize)
copy(b, bloom)
- w.settings.Store(bloomFilterIdx, b)
- w.notifyPeersAboutBloomFilterChange(b)
+ whisper.settings.Store(bloomFilterIdx, b)
+ whisper.notifyPeersAboutBloomFilterChange(b)
go func() {
// allow some time before all the peers have processed the notification
- time.Sleep(time.Duration(w.syncAllowance) * time.Second)
- w.settings.Store(bloomFilterToleranceIdx, b)
+ time.Sleep(time.Duration(whisper.syncAllowance) * time.Second)
+ whisper.settings.Store(bloomFilterToleranceIdx, b)
}()
return nil
}
// SetMinimumPoW sets the minimal PoW required by this node
-func (w *Whisper) SetMinimumPoW(val float64) error {
+func (whisper *Whisper) SetMinimumPoW(val float64) error {
if val < 0.0 {
return fmt.Errorf("invalid PoW: %f", val)
}
- w.settings.Store(minPowIdx, val)
- w.notifyPeersAboutPowRequirementChange(val)
+ whisper.settings.Store(minPowIdx, val)
+ whisper.notifyPeersAboutPowRequirementChange(val)
go func() {
// allow some time before all the peers have processed the notification
- time.Sleep(time.Duration(w.syncAllowance) * time.Second)
- w.settings.Store(minPowToleranceIdx, val)
+ time.Sleep(time.Duration(whisper.syncAllowance) * time.Second)
+ whisper.settings.Store(minPowToleranceIdx, val)
}()
return nil
}
-// SetMinimumPoW sets the minimal PoW in test environment
-func (w *Whisper) SetMinimumPowTest(val float64) {
- w.settings.Store(minPowIdx, val)
- w.notifyPeersAboutPowRequirementChange(val)
- w.settings.Store(minPowToleranceIdx, val)
+// SetMinimumPowTest sets the minimal PoW in test environment
+func (whisper *Whisper) SetMinimumPowTest(val float64) {
+ whisper.settings.Store(minPowIdx, val)
+ whisper.notifyPeersAboutPowRequirementChange(val)
+ whisper.settings.Store(minPowToleranceIdx, val)
}
-func (w *Whisper) notifyPeersAboutPowRequirementChange(pow float64) {
- arr := w.getPeers()
+func (whisper *Whisper) notifyPeersAboutPowRequirementChange(pow float64) {
+ arr := whisper.getPeers()
for _, p := range arr {
err := p.notifyAboutPowRequirementChange(pow)
if err != nil {
@@ -287,8 +290,8 @@ func (w *Whisper) notifyPeersAboutPowRequirementChange(pow float64) {
}
}
-func (w *Whisper) notifyPeersAboutBloomFilterChange(bloom []byte) {
- arr := w.getPeers()
+func (whisper *Whisper) notifyPeersAboutBloomFilterChange(bloom []byte) {
+ arr := whisper.getPeers()
for _, p := range arr {
err := p.notifyAboutBloomFilterChange(bloom)
if err != nil {
@@ -301,23 +304,23 @@ func (w *Whisper) notifyPeersAboutBloomFilterChange(bloom []byte) {
}
}
-func (w *Whisper) getPeers() []*Peer {
- arr := make([]*Peer, len(w.peers))
+func (whisper *Whisper) getPeers() []*Peer {
+ arr := make([]*Peer, len(whisper.peers))
i := 0
- w.peerMu.Lock()
- for p := range w.peers {
+ whisper.peerMu.Lock()
+ for p := range whisper.peers {
arr[i] = p
i++
}
- w.peerMu.Unlock()
+ whisper.peerMu.Unlock()
return arr
}
// getPeer retrieves peer by ID
-func (w *Whisper) getPeer(peerID []byte) (*Peer, error) {
- w.peerMu.Lock()
- defer w.peerMu.Unlock()
- for p := range w.peers {
+func (whisper *Whisper) getPeer(peerID []byte) (*Peer, error) {
+ whisper.peerMu.Lock()
+ defer whisper.peerMu.Unlock()
+ for p := range whisper.peers {
id := p.peer.ID()
if bytes.Equal(peerID, id[:]) {
return p, nil
@@ -328,8 +331,8 @@ func (w *Whisper) getPeer(peerID []byte) (*Peer, error) {
// AllowP2PMessagesFromPeer marks specific peer trusted,
// which will allow it to send historic (expired) messages.
-func (w *Whisper) AllowP2PMessagesFromPeer(peerID []byte) error {
- p, err := w.getPeer(peerID)
+func (whisper *Whisper) AllowP2PMessagesFromPeer(peerID []byte) error {
+ p, err := whisper.getPeer(peerID)
if err != nil {
return err
}
@@ -342,8 +345,8 @@ func (w *Whisper) AllowP2PMessagesFromPeer(peerID []byte) error {
// request and respond with a number of peer-to-peer messages (possibly expired),
// which are not supposed to be forwarded any further.
// The whisper protocol is agnostic of the format and contents of envelope.
-func (w *Whisper) RequestHistoricMessages(peerID []byte, envelope *Envelope) error {
- p, err := w.getPeer(peerID)
+func (whisper *Whisper) RequestHistoricMessages(peerID []byte, envelope *Envelope) error {
+ p, err := whisper.getPeer(peerID)
if err != nil {
return err
}
@@ -352,22 +355,22 @@ func (w *Whisper) RequestHistoricMessages(peerID []byte, envelope *Envelope) err
}
// SendP2PMessage sends a peer-to-peer message to a specific peer.
-func (w *Whisper) SendP2PMessage(peerID []byte, envelope *Envelope) error {
- p, err := w.getPeer(peerID)
+func (whisper *Whisper) SendP2PMessage(peerID []byte, envelope *Envelope) error {
+ p, err := whisper.getPeer(peerID)
if err != nil {
return err
}
- return w.SendP2PDirect(p, envelope)
+ return whisper.SendP2PDirect(p, envelope)
}
// SendP2PDirect sends a peer-to-peer message to a specific peer.
-func (w *Whisper) SendP2PDirect(peer *Peer, envelope *Envelope) error {
+func (whisper *Whisper) SendP2PDirect(peer *Peer, envelope *Envelope) error {
return p2p.Send(peer.ws, p2pMessageCode, envelope)
}
// NewKeyPair generates a new cryptographic identity for the client, and injects
// it into the known identities for message decryption. Returns ID of the new key pair.
-func (w *Whisper) NewKeyPair() (string, error) {
+func (whisper *Whisper) NewKeyPair() (string, error) {
key, err := crypto.GenerateKey()
if err != nil || !validatePrivateKey(key) {
key, err = crypto.GenerateKey() // retry once
@@ -384,55 +387,55 @@ func (w *Whisper) NewKeyPair() (string, error) {
return "", fmt.Errorf("failed to generate ID: %s", err)
}
- w.keyMu.Lock()
- defer w.keyMu.Unlock()
+ whisper.keyMu.Lock()
+ defer whisper.keyMu.Unlock()
- if w.privateKeys[id] != nil {
+ if whisper.privateKeys[id] != nil {
return "", fmt.Errorf("failed to generate unique ID")
}
- w.privateKeys[id] = key
+ whisper.privateKeys[id] = key
return id, nil
}
// DeleteKeyPair deletes the specified key if it exists.
-func (w *Whisper) DeleteKeyPair(key string) bool {
- w.keyMu.Lock()
- defer w.keyMu.Unlock()
+func (whisper *Whisper) DeleteKeyPair(key string) bool {
+ whisper.keyMu.Lock()
+ defer whisper.keyMu.Unlock()
- if w.privateKeys[key] != nil {
- delete(w.privateKeys, key)
+ if whisper.privateKeys[key] != nil {
+ delete(whisper.privateKeys, key)
return true
}
return false
}
// AddKeyPair imports a asymmetric private key and returns it identifier.
-func (w *Whisper) AddKeyPair(key *ecdsa.PrivateKey) (string, error) {
+func (whisper *Whisper) AddKeyPair(key *ecdsa.PrivateKey) (string, error) {
id, err := GenerateRandomID()
if err != nil {
return "", fmt.Errorf("failed to generate ID: %s", err)
}
- w.keyMu.Lock()
- w.privateKeys[id] = key
- w.keyMu.Unlock()
+ whisper.keyMu.Lock()
+ whisper.privateKeys[id] = key
+ whisper.keyMu.Unlock()
return id, nil
}
// HasKeyPair checks if the the whisper node is configured with the private key
// of the specified public pair.
-func (w *Whisper) HasKeyPair(id string) bool {
- w.keyMu.RLock()
- defer w.keyMu.RUnlock()
- return w.privateKeys[id] != nil
+func (whisper *Whisper) HasKeyPair(id string) bool {
+ whisper.keyMu.RLock()
+ defer whisper.keyMu.RUnlock()
+ return whisper.privateKeys[id] != nil
}
// GetPrivateKey retrieves the private key of the specified identity.
-func (w *Whisper) GetPrivateKey(id string) (*ecdsa.PrivateKey, error) {
- w.keyMu.RLock()
- defer w.keyMu.RUnlock()
- key := w.privateKeys[id]
+func (whisper *Whisper) GetPrivateKey(id string) (*ecdsa.PrivateKey, error) {
+ whisper.keyMu.RLock()
+ defer whisper.keyMu.RUnlock()
+ key := whisper.privateKeys[id]
if key == nil {
return nil, fmt.Errorf("invalid id")
}
@@ -441,12 +444,11 @@ func (w *Whisper) GetPrivateKey(id string) (*ecdsa.PrivateKey, error) {
// GenerateSymKey generates a random symmetric key and stores it under id,
// which is then returned. Will be used in the future for session key exchange.
-func (w *Whisper) GenerateSymKey() (string, error) {
- key := make([]byte, aesKeyLength)
- _, err := crand.Read(key)
+func (whisper *Whisper) GenerateSymKey() (string, error) {
+ key, err := generateSecureRandomData(aesKeyLength)
if err != nil {
return "", err
- } else if !validateSymmetricKey(key) {
+ } else if !validateDataIntegrity(key, aesKeyLength) {
return "", fmt.Errorf("error in GenerateSymKey: crypto/rand failed to generate random data")
}
@@ -455,18 +457,18 @@ func (w *Whisper) GenerateSymKey() (string, error) {
return "", fmt.Errorf("failed to generate ID: %s", err)
}
- w.keyMu.Lock()
- defer w.keyMu.Unlock()
+ whisper.keyMu.Lock()
+ defer whisper.keyMu.Unlock()
- if w.symKeys[id] != nil {
+ if whisper.symKeys[id] != nil {
return "", fmt.Errorf("failed to generate unique ID")
}
- w.symKeys[id] = key
+ whisper.symKeys[id] = key
return id, nil
}
// AddSymKeyDirect stores the key, and returns its id.
-func (w *Whisper) AddSymKeyDirect(key []byte) (string, error) {
+func (whisper *Whisper) AddSymKeyDirect(key []byte) (string, error) {
if len(key) != aesKeyLength {
return "", fmt.Errorf("wrong key size: %d", len(key))
}
@@ -476,23 +478,23 @@ func (w *Whisper) AddSymKeyDirect(key []byte) (string, error) {
return "", fmt.Errorf("failed to generate ID: %s", err)
}
- w.keyMu.Lock()
- defer w.keyMu.Unlock()
+ whisper.keyMu.Lock()
+ defer whisper.keyMu.Unlock()
- if w.symKeys[id] != nil {
+ if whisper.symKeys[id] != nil {
return "", fmt.Errorf("failed to generate unique ID")
}
- w.symKeys[id] = key
+ whisper.symKeys[id] = key
return id, nil
}
// AddSymKeyFromPassword generates the key from password, stores it, and returns its id.
-func (w *Whisper) AddSymKeyFromPassword(password string) (string, error) {
+func (whisper *Whisper) AddSymKeyFromPassword(password string) (string, error) {
id, err := GenerateRandomID()
if err != nil {
return "", fmt.Errorf("failed to generate ID: %s", err)
}
- if w.HasSymKey(id) {
+ if whisper.HasSymKey(id) {
return "", fmt.Errorf("failed to generate unique ID")
}
@@ -503,81 +505,81 @@ func (w *Whisper) AddSymKeyFromPassword(password string) (string, error) {
return "", err
}
- w.keyMu.Lock()
- defer w.keyMu.Unlock()
+ whisper.keyMu.Lock()
+ defer whisper.keyMu.Unlock()
// double check is necessary, because deriveKeyMaterial() is very slow
- if w.symKeys[id] != nil {
+ if whisper.symKeys[id] != nil {
return "", fmt.Errorf("critical error: failed to generate unique ID")
}
- w.symKeys[id] = derived
+ whisper.symKeys[id] = derived
return id, nil
}
// HasSymKey returns true if there is a key associated with the given id.
// Otherwise returns false.
-func (w *Whisper) HasSymKey(id string) bool {
- w.keyMu.RLock()
- defer w.keyMu.RUnlock()
- return w.symKeys[id] != nil
+func (whisper *Whisper) HasSymKey(id string) bool {
+ whisper.keyMu.RLock()
+ defer whisper.keyMu.RUnlock()
+ return whisper.symKeys[id] != nil
}
// DeleteSymKey deletes the key associated with the name string if it exists.
-func (w *Whisper) DeleteSymKey(id string) bool {
- w.keyMu.Lock()
- defer w.keyMu.Unlock()
- if w.symKeys[id] != nil {
- delete(w.symKeys, id)
+func (whisper *Whisper) DeleteSymKey(id string) bool {
+ whisper.keyMu.Lock()
+ defer whisper.keyMu.Unlock()
+ if whisper.symKeys[id] != nil {
+ delete(whisper.symKeys, id)
return true
}
return false
}
// GetSymKey returns the symmetric key associated with the given id.
-func (w *Whisper) GetSymKey(id string) ([]byte, error) {
- w.keyMu.RLock()
- defer w.keyMu.RUnlock()
- if w.symKeys[id] != nil {
- return w.symKeys[id], nil
+func (whisper *Whisper) GetSymKey(id string) ([]byte, error) {
+ whisper.keyMu.RLock()
+ defer whisper.keyMu.RUnlock()
+ if whisper.symKeys[id] != nil {
+ return whisper.symKeys[id], nil
}
return nil, fmt.Errorf("non-existent key ID")
}
// Subscribe installs a new message handler used for filtering, decrypting
// and subsequent storing of incoming messages.
-func (w *Whisper) Subscribe(f *Filter) (string, error) {
- s, err := w.filters.Install(f)
+func (whisper *Whisper) Subscribe(f *Filter) (string, error) {
+ s, err := whisper.filters.Install(f)
if err == nil {
- w.updateBloomFilter(f)
+ whisper.updateBloomFilter(f)
}
return s, err
}
// updateBloomFilter recalculates the new value of bloom filter,
// and informs the peers if necessary.
-func (w *Whisper) updateBloomFilter(f *Filter) {
- aggregate := make([]byte, bloomFilterSize)
+func (whisper *Whisper) updateBloomFilter(f *Filter) {
+ aggregate := make([]byte, BloomFilterSize)
for _, t := range f.Topics {
top := BytesToTopic(t)
b := TopicToBloom(top)
aggregate = addBloom(aggregate, b)
}
- if !bloomFilterMatch(w.BloomFilter(), aggregate) {
+ if !BloomFilterMatch(whisper.BloomFilter(), aggregate) {
// existing bloom filter must be updated
- aggregate = addBloom(w.BloomFilter(), aggregate)
- w.SetBloomFilter(aggregate)
+ aggregate = addBloom(whisper.BloomFilter(), aggregate)
+ whisper.SetBloomFilter(aggregate)
}
}
// GetFilter returns the filter by id.
-func (w *Whisper) GetFilter(id string) *Filter {
- return w.filters.Get(id)
+func (whisper *Whisper) GetFilter(id string) *Filter {
+ return whisper.filters.Get(id)
}
// Unsubscribe removes an installed message handler.
-func (w *Whisper) Unsubscribe(id string) error {
- ok := w.filters.Uninstall(id)
+func (whisper *Whisper) Unsubscribe(id string) error {
+ ok := whisper.filters.Uninstall(id)
if !ok {
return fmt.Errorf("Unsubscribe: Invalid ID")
}
@@ -586,12 +588,9 @@ func (w *Whisper) Unsubscribe(id string) error {
// Send injects a message into the whisper send queue, to be distributed in the
// network in the coming cycles.
-func (w *Whisper) Send(envelope *Envelope) error {
- ok, err := w.add(envelope)
- if err != nil {
- return err
- }
- if !ok {
+func (whisper *Whisper) Send(envelope *Envelope) error {
+ ok, err := whisper.add(envelope, false)
+ if err == nil && !ok {
return fmt.Errorf("failed to add envelope")
}
return err
@@ -599,13 +598,13 @@ func (w *Whisper) Send(envelope *Envelope) error {
// Start implements node.Service, starting the background data propagation thread
// of the Whisper protocol.
-func (w *Whisper) Start(*p2p.Server) error {
+func (whisper *Whisper) Start(*p2p.Server) error {
log.Info("started whisper v." + ProtocolVersionStr)
- go w.update()
+ go whisper.update()
numCPU := runtime.NumCPU()
for i := 0; i < numCPU; i++ {
- go w.processQueue()
+ go whisper.processQueue()
}
return nil
@@ -613,26 +612,26 @@ func (w *Whisper) Start(*p2p.Server) error {
// Stop implements node.Service, stopping the background data propagation thread
// of the Whisper protocol.
-func (w *Whisper) Stop() error {
- close(w.quit)
+func (whisper *Whisper) Stop() error {
+ close(whisper.quit)
log.Info("whisper stopped")
return nil
}
// HandlePeer is called by the underlying P2P layer when the whisper sub-protocol
// connection is negotiated.
-func (wh *Whisper) HandlePeer(peer *p2p.Peer, rw p2p.MsgReadWriter) error {
+func (whisper *Whisper) HandlePeer(peer *p2p.Peer, rw p2p.MsgReadWriter) error {
// Create the new peer and start tracking it
- whisperPeer := newPeer(wh, peer, rw)
+ whisperPeer := newPeer(whisper, peer, rw)
- wh.peerMu.Lock()
- wh.peers[whisperPeer] = struct{}{}
- wh.peerMu.Unlock()
+ whisper.peerMu.Lock()
+ whisper.peers[whisperPeer] = struct{}{}
+ whisper.peerMu.Unlock()
defer func() {
- wh.peerMu.Lock()
- delete(wh.peers, whisperPeer)
- wh.peerMu.Unlock()
+ whisper.peerMu.Lock()
+ delete(whisper.peers, whisperPeer)
+ whisper.peerMu.Unlock()
}()
// Run the peer handshake and state updates
@@ -642,11 +641,11 @@ func (wh *Whisper) HandlePeer(peer *p2p.Peer, rw p2p.MsgReadWriter) error {
whisperPeer.start()
defer whisperPeer.stop()
- return wh.runMessageLoop(whisperPeer, rw)
+ return whisper.runMessageLoop(whisperPeer, rw)
}
// runMessageLoop reads and processes inbound messages directly to merge into client-global state.
-func (wh *Whisper) runMessageLoop(p *Peer, rw p2p.MsgReadWriter) error {
+func (whisper *Whisper) runMessageLoop(p *Peer, rw p2p.MsgReadWriter) error {
for {
// fetch the next packet
packet, err := rw.ReadMsg()
@@ -654,7 +653,7 @@ func (wh *Whisper) runMessageLoop(p *Peer, rw p2p.MsgReadWriter) error {
log.Warn("message loop", "peer", p.peer.ID(), "err", err)
return err
}
- if packet.Size > wh.MaxMessageSize() {
+ if packet.Size > whisper.MaxMessageSize() {
log.Warn("oversized message received", "peer", p.peer.ID())
return errors.New("oversized message received")
}
@@ -673,7 +672,7 @@ func (wh *Whisper) runMessageLoop(p *Peer, rw p2p.MsgReadWriter) error {
trouble := false
for _, env := range envelopes {
- cached, err := wh.add(env)
+ cached, err := whisper.add(env, whisper.lightClient)
if err != nil {
trouble = true
log.Error("bad envelope received, peer will be disconnected", "peer", p.peer.ID(), "err", err)
@@ -702,7 +701,7 @@ func (wh *Whisper) runMessageLoop(p *Peer, rw p2p.MsgReadWriter) error {
case bloomFilterExCode:
var bloom []byte
err := packet.Decode(&bloom)
- if err == nil && len(bloom) != bloomFilterSize {
+ if err == nil && len(bloom) != BloomFilterSize {
err = fmt.Errorf("wrong bloom filter size %d", len(bloom))
}
@@ -710,11 +709,7 @@ func (wh *Whisper) runMessageLoop(p *Peer, rw p2p.MsgReadWriter) error {
log.Warn("failed to decode bloom filter exchange message, peer will be disconnected", "peer", p.peer.ID(), "err", err)
return errors.New("invalid bloom filter exchange message")
}
- if isFullNode(bloom) {
- p.bloomFilter = nil
- } else {
- p.bloomFilter = bloom
- }
+ p.setBloomFilter(bloom)
case p2pMessageCode:
// peer-to-peer message, sent directly to peer bypassing PoW checks, etc.
// this message is not supposed to be forwarded to other peers, and
@@ -726,17 +721,17 @@ func (wh *Whisper) runMessageLoop(p *Peer, rw p2p.MsgReadWriter) error {
log.Warn("failed to decode direct message, peer will be disconnected", "peer", p.peer.ID(), "err", err)
return errors.New("invalid direct message")
}
- wh.postEvent(&envelope, true)
+ whisper.postEvent(&envelope, true)
}
case p2pRequestCode:
// Must be processed if mail server is implemented. Otherwise ignore.
- if wh.mailServer != nil {
+ if whisper.mailServer != nil {
var request Envelope
if err := packet.Decode(&request); err != nil {
log.Warn("failed to decode p2p request message, peer will be disconnected", "peer", p.peer.ID(), "err", err)
return errors.New("invalid p2p request")
}
- wh.mailServer.DeliverMail(p, &request)
+ whisper.mailServer.DeliverMail(p, &request)
}
default:
// New message types might be implemented in the future versions of Whisper.
@@ -750,128 +745,127 @@ func (wh *Whisper) runMessageLoop(p *Peer, rw p2p.MsgReadWriter) error {
// add inserts a new envelope into the message pool to be distributed within the
// whisper network. It also inserts the envelope into the expiration pool at the
// appropriate time-stamp. In case of error, connection should be dropped.
-func (wh *Whisper) add(envelope *Envelope) (bool, error) {
+// param isP2P indicates whether the message is peer-to-peer (should not be forwarded).
+func (whisper *Whisper) add(envelope *Envelope, isP2P bool) (bool, error) {
now := uint32(time.Now().Unix())
sent := envelope.Expiry - envelope.TTL
if sent > now {
if sent-DefaultSyncAllowance > now {
return false, fmt.Errorf("envelope created in the future [%x]", envelope.Hash())
- } else {
- // recalculate PoW, adjusted for the time difference, plus one second for latency
- envelope.calculatePoW(sent - now + 1)
}
+ // recalculate PoW, adjusted for the time difference, plus one second for latency
+ envelope.calculatePoW(sent - now + 1)
}
if envelope.Expiry < now {
if envelope.Expiry+DefaultSyncAllowance*2 < now {
return false, fmt.Errorf("very old message")
- } else {
- log.Debug("expired envelope dropped", "hash", envelope.Hash().Hex())
- return false, nil // drop envelope without error
}
+ log.Debug("expired envelope dropped", "hash", envelope.Hash().Hex())
+ return false, nil // drop envelope without error
}
- if uint32(envelope.size()) > wh.MaxMessageSize() {
+ if uint32(envelope.size()) > whisper.MaxMessageSize() {
return false, fmt.Errorf("huge messages are not allowed [%x]", envelope.Hash())
}
- if envelope.PoW() < wh.MinPow() {
+ if envelope.PoW() < whisper.MinPow() {
// maybe the value was recently changed, and the peers did not adjust yet.
// in this case the previous value is retrieved by MinPowTolerance()
// for a short period of peer synchronization.
- if envelope.PoW() < wh.MinPowTolerance() {
+ if envelope.PoW() < whisper.MinPowTolerance() {
return false, fmt.Errorf("envelope with low PoW received: PoW=%f, hash=[%v]", envelope.PoW(), envelope.Hash().Hex())
}
}
- if !bloomFilterMatch(wh.BloomFilter(), envelope.Bloom()) {
+ if !BloomFilterMatch(whisper.BloomFilter(), envelope.Bloom()) {
// maybe the value was recently changed, and the peers did not adjust yet.
// in this case the previous value is retrieved by BloomFilterTolerance()
// for a short period of peer synchronization.
- if !bloomFilterMatch(wh.BloomFilterTolerance(), envelope.Bloom()) {
+ if !BloomFilterMatch(whisper.BloomFilterTolerance(), envelope.Bloom()) {
return false, fmt.Errorf("envelope does not match bloom filter, hash=[%v], bloom: \n%x \n%x \n%x",
- envelope.Hash().Hex(), wh.BloomFilter(), envelope.Bloom(), envelope.Topic)
+ envelope.Hash().Hex(), whisper.BloomFilter(), envelope.Bloom(), envelope.Topic)
}
}
hash := envelope.Hash()
- wh.poolMu.Lock()
- _, alreadyCached := wh.envelopes[hash]
+ whisper.poolMu.Lock()
+ _, alreadyCached := whisper.envelopes[hash]
if !alreadyCached {
- wh.envelopes[hash] = envelope
- if wh.expirations[envelope.Expiry] == nil {
- wh.expirations[envelope.Expiry] = set.NewNonTS()
+ whisper.envelopes[hash] = envelope
+ if whisper.expirations[envelope.Expiry] == nil {
+ whisper.expirations[envelope.Expiry] = set.NewNonTS()
}
- if !wh.expirations[envelope.Expiry].Has(hash) {
- wh.expirations[envelope.Expiry].Add(hash)
+ if !whisper.expirations[envelope.Expiry].Has(hash) {
+ whisper.expirations[envelope.Expiry].Add(hash)
}
}
- wh.poolMu.Unlock()
+ whisper.poolMu.Unlock()
if alreadyCached {
log.Trace("whisper envelope already cached", "hash", envelope.Hash().Hex())
} else {
log.Trace("cached whisper envelope", "hash", envelope.Hash().Hex())
- wh.statsMu.Lock()
- wh.stats.memoryUsed += envelope.size()
- wh.statsMu.Unlock()
- wh.postEvent(envelope, false) // notify the local node about the new message
- if wh.mailServer != nil {
- wh.mailServer.Archive(envelope)
+ whisper.statsMu.Lock()
+ whisper.stats.memoryUsed += envelope.size()
+ whisper.statsMu.Unlock()
+ whisper.postEvent(envelope, isP2P) // notify the local node about the new message
+ if whisper.mailServer != nil {
+ whisper.mailServer.Archive(envelope)
}
}
return true, nil
}
// postEvent queues the message for further processing.
-func (w *Whisper) postEvent(envelope *Envelope, isP2P bool) {
+func (whisper *Whisper) postEvent(envelope *Envelope, isP2P bool) {
if isP2P {
- w.p2pMsgQueue <- envelope
+ whisper.p2pMsgQueue <- envelope
} else {
- w.checkOverflow()
- w.messageQueue <- envelope
+ whisper.checkOverflow()
+ whisper.messageQueue <- envelope
}
}
// checkOverflow checks if message queue overflow occurs and reports it if necessary.
-func (w *Whisper) checkOverflow() {
- queueSize := len(w.messageQueue)
+func (whisper *Whisper) checkOverflow() {
+ queueSize := len(whisper.messageQueue)
if queueSize == messageQueueLimit {
- if !w.Overflow() {
- w.settings.Store(overflowIdx, true)
+ if !whisper.Overflow() {
+ whisper.settings.Store(overflowIdx, true)
log.Warn("message queue overflow")
}
} else if queueSize <= messageQueueLimit/2 {
- if w.Overflow() {
- w.settings.Store(overflowIdx, false)
+ if whisper.Overflow() {
+ whisper.settings.Store(overflowIdx, false)
log.Warn("message queue overflow fixed (back to normal)")
}
}
}
// processQueue delivers the messages to the watchers during the lifetime of the whisper node.
-func (w *Whisper) processQueue() {
+func (whisper *Whisper) processQueue() {
var e *Envelope
for {
select {
- case <-w.quit:
+ case <-whisper.quit:
return
- case e = <-w.messageQueue:
- w.filters.NotifyWatchers(e, false)
+ case e = <-whisper.messageQueue:
+ whisper.filters.NotifyWatchers(e, false)
- case e = <-w.p2pMsgQueue:
- w.filters.NotifyWatchers(e, true)
+ case e = <-whisper.p2pMsgQueue:
+ whisper.filters.NotifyWatchers(e, true)
}
}
}
// update loops until the lifetime of the whisper node, updating its internal
// state by expiring stale messages from the pool.
-func (w *Whisper) update() {
+func (whisper *Whisper) update() {
// Start a ticker to check for expirations
expire := time.NewTicker(expirationCycle)
@@ -879,9 +873,9 @@ func (w *Whisper) update() {
for {
select {
case <-expire.C:
- w.expire()
+ whisper.expire()
- case <-w.quit:
+ case <-whisper.quit:
return
}
}
@@ -889,75 +883,57 @@ func (w *Whisper) update() {
// expire iterates over all the expiration timestamps, removing all stale
// messages from the pools.
-func (w *Whisper) expire() {
- w.poolMu.Lock()
- defer w.poolMu.Unlock()
+func (whisper *Whisper) expire() {
+ whisper.poolMu.Lock()
+ defer whisper.poolMu.Unlock()
- w.statsMu.Lock()
- defer w.statsMu.Unlock()
- w.stats.reset()
+ whisper.statsMu.Lock()
+ defer whisper.statsMu.Unlock()
+ whisper.stats.reset()
now := uint32(time.Now().Unix())
- for expiry, hashSet := range w.expirations {
+ for expiry, hashSet := range whisper.expirations {
if expiry < now {
// Dump all expired messages and remove timestamp
hashSet.Each(func(v interface{}) bool {
- sz := w.envelopes[v.(common.Hash)].size()
- delete(w.envelopes, v.(common.Hash))
- w.stats.messagesCleared++
- w.stats.memoryCleared += sz
- w.stats.memoryUsed -= sz
+ sz := whisper.envelopes[v.(common.Hash)].size()
+ delete(whisper.envelopes, v.(common.Hash))
+ whisper.stats.messagesCleared++
+ whisper.stats.memoryCleared += sz
+ whisper.stats.memoryUsed -= sz
return true
})
- w.expirations[expiry].Clear()
- delete(w.expirations, expiry)
+ whisper.expirations[expiry].Clear()
+ delete(whisper.expirations, expiry)
}
}
}
// Stats returns the whisper node statistics.
-func (w *Whisper) Stats() Statistics {
- w.statsMu.Lock()
- defer w.statsMu.Unlock()
+func (whisper *Whisper) Stats() Statistics {
+ whisper.statsMu.Lock()
+ defer whisper.statsMu.Unlock()
- return w.stats
+ return whisper.stats
}
// Envelopes retrieves all the messages currently pooled by the node.
-func (w *Whisper) Envelopes() []*Envelope {
- w.poolMu.RLock()
- defer w.poolMu.RUnlock()
+func (whisper *Whisper) Envelopes() []*Envelope {
+ whisper.poolMu.RLock()
+ defer whisper.poolMu.RUnlock()
- all := make([]*Envelope, 0, len(w.envelopes))
- for _, envelope := range w.envelopes {
+ all := make([]*Envelope, 0, len(whisper.envelopes))
+ for _, envelope := range whisper.envelopes {
all = append(all, envelope)
}
return all
}
-// Messages iterates through all currently floating envelopes
-// and retrieves all the messages, that this filter could decrypt.
-func (w *Whisper) Messages(id string) []*ReceivedMessage {
- result := make([]*ReceivedMessage, 0)
- w.poolMu.RLock()
- defer w.poolMu.RUnlock()
-
- if filter := w.filters.Get(id); filter != nil {
- for _, env := range w.envelopes {
- msg := filter.processEnvelope(env)
- if msg != nil {
- result = append(result, msg)
- }
- }
- }
- return result
-}
-
// isEnvelopeCached checks if envelope with specific hash has already been received and cached.
-func (w *Whisper) isEnvelopeCached(hash common.Hash) bool {
- w.poolMu.Lock()
- defer w.poolMu.Unlock()
+func (whisper *Whisper) isEnvelopeCached(hash common.Hash) bool {
+ whisper.poolMu.Lock()
+ defer whisper.poolMu.Unlock()
- _, exist := w.envelopes[hash]
+ _, exist := whisper.envelopes[hash]
return exist
}
@@ -983,9 +959,16 @@ func validatePrivateKey(k *ecdsa.PrivateKey) bool {
return ValidatePublicKey(&k.PublicKey)
}
-// validateSymmetricKey returns false if the key contains all zeros
-func validateSymmetricKey(k []byte) bool {
- return len(k) > 0 && !containsOnlyZeros(k)
+// validateDataIntegrity returns false if the data have the wrong or contains all zeros,
+// which is the simplest and the most common bug.
+func validateDataIntegrity(k []byte, expectedSize int) bool {
+ if len(k) != expectedSize {
+ return false
+ }
+ if expectedSize > 3 && containsOnlyZeros(k) {
+ return false
+ }
+ return true
}
// containsOnlyZeros checks if the data contain only zeros.
@@ -1019,12 +1002,11 @@ func BytesToUintBigEndian(b []byte) (res uint64) {
// GenerateRandomID generates a random string, which is then returned to be used as a key id
func GenerateRandomID() (id string, err error) {
- buf := make([]byte, keyIdSize)
- _, err = crand.Read(buf)
+ buf, err := generateSecureRandomData(keyIDSize)
if err != nil {
return "", err
}
- if !validateSymmetricKey(buf) {
+ if !validateDataIntegrity(buf, keyIDSize) {
return "", fmt.Errorf("error in generateRandomID: crypto/rand failed to generate random data")
}
id = common.Bytes2Hex(buf)
@@ -1043,13 +1025,12 @@ func isFullNode(bloom []byte) bool {
return true
}
-func bloomFilterMatch(filter, sample []byte) bool {
+func BloomFilterMatch(filter, sample []byte) bool {
if filter == nil {
- // full node, accepts all messages
return true
}
- for i := 0; i < bloomFilterSize; i++ {
+ for i := 0; i < BloomFilterSize; i++ {
f := filter[i]
s := sample[i]
if (f | s) != f {
@@ -1061,8 +1042,8 @@ func bloomFilterMatch(filter, sample []byte) bool {
}
func addBloom(a, b []byte) []byte {
- c := make([]byte, bloomFilterSize)
- for i := 0; i < bloomFilterSize; i++ {
+ c := make([]byte, BloomFilterSize)
+ for i := 0; i < BloomFilterSize; i++ {
c[i] = a[i] | b[i]
}
return c
diff --git a/whisper/whisperv6/whisper_test.go b/whisper/whisperv6/whisper_test.go
index fa14acb1b..7fe256309 100644
--- a/whisper/whisperv6/whisper_test.go
+++ b/whisper/whisperv6/whisper_test.go
@@ -75,13 +75,9 @@ func TestWhisperBasic(t *testing.T) {
if len(mail) != 0 {
t.Fatalf("failed w.Envelopes().")
}
- m := w.Messages("non-existent")
- if len(m) != 0 {
- t.Fatalf("failed w.Messages.")
- }
derived := pbkdf2.Key([]byte(peerID), nil, 65356, aesKeyLength, sha256.New)
- if !validateSymmetricKey(derived) {
+ if !validateDataIntegrity(derived, aesKeyLength) {
t.Fatalf("failed validateSymmetricKey with param = %v.", derived)
}
if containsOnlyZeros(derived) {
@@ -448,24 +444,12 @@ func TestWhisperSymKeyManagement(t *testing.T) {
if !w.HasSymKey(id2) {
t.Fatalf("HasSymKey(id2) failed.")
}
- if k1 == nil {
- t.Fatalf("k1 does not exist.")
- }
- if k2 == nil {
- t.Fatalf("k2 does not exist.")
+ if !validateDataIntegrity(k2, aesKeyLength) {
+ t.Fatalf("key validation failed.")
}
if !bytes.Equal(k1, k2) {
t.Fatalf("k1 != k2.")
}
- if len(k1) != aesKeyLength {
- t.Fatalf("wrong length of k1.")
- }
- if len(k2) != aesKeyLength {
- t.Fatalf("wrong length of k2.")
- }
- if !validateSymmetricKey(k2) {
- t.Fatalf("key validation failed.")
- }
}
func TestExpiry(t *testing.T) {
@@ -605,7 +589,7 @@ func TestCustomization(t *testing.T) {
}
// check w.messages()
- id, err := w.Subscribe(f)
+ _, err = w.Subscribe(f)
if err != nil {
t.Fatalf("failed subscribe with seed %d: %s.", seed, err)
}
@@ -614,11 +598,6 @@ func TestCustomization(t *testing.T) {
if len(mail) > 0 {
t.Fatalf("received premature mail")
}
-
- mail = w.Messages(id)
- if len(mail) != 2 {
- t.Fatalf("failed to get whisper messages")
- }
}
func TestSymmetricSendCycle(t *testing.T) {
@@ -847,11 +826,11 @@ func TestSymmetricSendKeyMismatch(t *testing.T) {
func TestBloom(t *testing.T) {
topic := TopicType{0, 0, 255, 6}
b := TopicToBloom(topic)
- x := make([]byte, bloomFilterSize)
+ x := make([]byte, BloomFilterSize)
x[0] = byte(1)
x[32] = byte(1)
- x[bloomFilterSize-1] = byte(128)
- if !bloomFilterMatch(x, b) || !bloomFilterMatch(b, x) {
+ x[BloomFilterSize-1] = byte(128)
+ if !BloomFilterMatch(x, b) || !BloomFilterMatch(b, x) {
t.Fatalf("bloom filter does not match the mask")
}
@@ -863,11 +842,11 @@ func TestBloom(t *testing.T) {
if err != nil {
t.Fatalf("math rand error")
}
- if !bloomFilterMatch(b, b) {
+ if !BloomFilterMatch(b, b) {
t.Fatalf("bloom filter does not match self")
}
x = addBloom(x, b)
- if !bloomFilterMatch(x, b) {
+ if !BloomFilterMatch(x, b) {
t.Fatalf("bloom filter does not match combined bloom")
}
if !isFullNode(nil) {
@@ -877,16 +856,16 @@ func TestBloom(t *testing.T) {
if isFullNode(x) {
t.Fatalf("isFullNode false positive")
}
- for i := 0; i < bloomFilterSize; i++ {
+ for i := 0; i < BloomFilterSize; i++ {
b[i] = byte(255)
}
if !isFullNode(b) {
t.Fatalf("isFullNode false negative")
}
- if bloomFilterMatch(x, b) {
+ if BloomFilterMatch(x, b) {
t.Fatalf("bloomFilterMatch false positive")
}
- if !bloomFilterMatch(b, x) {
+ if !BloomFilterMatch(b, x) {
t.Fatalf("bloomFilterMatch false negative")
}
@@ -900,7 +879,7 @@ func TestBloom(t *testing.T) {
t.Fatalf("failed to set bloom filter: %s", err)
}
f = w.BloomFilter()
- if !bloomFilterMatch(f, x) || !bloomFilterMatch(x, f) {
+ if !BloomFilterMatch(f, x) || !BloomFilterMatch(x, f) {
t.Fatalf("retireved wrong bloom filter")
}
}