Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
G
Geth-Modification
Project
Project
Details
Activity
Releases
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
张蕾
Geth-Modification
Commits
dd708c16
Unverified
Commit
dd708c16
authored
Mar 26, 2018
by
Péter Szilágyi
Committed by
GitHub
Mar 26, 2018
Browse files
Options
Browse Files
Download
Plain Diff
Merge pull request #16319 from rjl493456442/dump_preimages
cmd: implement preimage dump and import cmds
parents
7c131f4d
495bdb0c
Changes
4
Hide whitespace changes
Inline
Side-by-side
Showing
4 changed files
with
160 additions
and
4 deletions
+160
-4
chaincmd.go
cmd/geth/chaincmd.go
+61
-1
main.go
cmd/geth/main.go
+2
-0
cmd.go
cmd/utils/cmd.go
+91
-3
database.go
ethdb/database.go
+6
-0
No files found.
cmd/geth/chaincmd.go
View file @
dd708c16
...
...
@@ -95,6 +95,34 @@ Requires a first argument of the file to write to.
Optional second and third arguments control the first and
last block to write. In this mode, the file will be appended
if already existing.`
,
}
importPreimagesCommand
=
cli
.
Command
{
Action
:
utils
.
MigrateFlags
(
importPreimages
),
Name
:
"import-preimages"
,
Usage
:
"Import the preimage database from an RLP stream"
,
ArgsUsage
:
"<datafile>"
,
Flags
:
[]
cli
.
Flag
{
utils
.
DataDirFlag
,
utils
.
CacheFlag
,
utils
.
LightModeFlag
,
},
Category
:
"BLOCKCHAIN COMMANDS"
,
Description
:
`
The import-preimages command imports hash preimages from an RLP encoded stream.`
,
}
exportPreimagesCommand
=
cli
.
Command
{
Action
:
utils
.
MigrateFlags
(
exportPreimages
),
Name
:
"export-preimages"
,
Usage
:
"Export the preimage database into an RLP stream"
,
ArgsUsage
:
"<dumpfile>"
,
Flags
:
[]
cli
.
Flag
{
utils
.
DataDirFlag
,
utils
.
CacheFlag
,
utils
.
LightModeFlag
,
},
Category
:
"BLOCKCHAIN COMMANDS"
,
Description
:
`
The export-preimages command export hash preimages to an RLP encoded stream`
,
}
copydbCommand
=
cli
.
Command
{
Action
:
utils
.
MigrateFlags
(
copyDb
),
...
...
@@ -299,7 +327,39 @@ func exportChain(ctx *cli.Context) error {
if
err
!=
nil
{
utils
.
Fatalf
(
"Export error: %v
\n
"
,
err
)
}
fmt
.
Printf
(
"Export done in %v"
,
time
.
Since
(
start
))
fmt
.
Printf
(
"Export done in %v
\n
"
,
time
.
Since
(
start
))
return
nil
}
// importPreimages imports preimage data from the specified file.
func
importPreimages
(
ctx
*
cli
.
Context
)
error
{
if
len
(
ctx
.
Args
())
<
1
{
utils
.
Fatalf
(
"This command requires an argument."
)
}
stack
:=
makeFullNode
(
ctx
)
diskdb
:=
utils
.
MakeChainDatabase
(
ctx
,
stack
)
.
(
*
ethdb
.
LDBDatabase
)
start
:=
time
.
Now
()
if
err
:=
utils
.
ImportPreimages
(
diskdb
,
ctx
.
Args
()
.
First
());
err
!=
nil
{
utils
.
Fatalf
(
"Export error: %v
\n
"
,
err
)
}
fmt
.
Printf
(
"Export done in %v
\n
"
,
time
.
Since
(
start
))
return
nil
}
// exportPreimages dumps the preimage data to specified json file in streaming way.
func
exportPreimages
(
ctx
*
cli
.
Context
)
error
{
if
len
(
ctx
.
Args
())
<
1
{
utils
.
Fatalf
(
"This command requires an argument."
)
}
stack
:=
makeFullNode
(
ctx
)
diskdb
:=
utils
.
MakeChainDatabase
(
ctx
,
stack
)
.
(
*
ethdb
.
LDBDatabase
)
start
:=
time
.
Now
()
if
err
:=
utils
.
ExportPreimages
(
diskdb
,
ctx
.
Args
()
.
First
());
err
!=
nil
{
utils
.
Fatalf
(
"Export error: %v
\n
"
,
err
)
}
fmt
.
Printf
(
"Export done in %v
\n
"
,
time
.
Since
(
start
))
return
nil
}
...
...
cmd/geth/main.go
View file @
dd708c16
...
...
@@ -155,6 +155,8 @@ func init() {
initCommand
,
importCommand
,
exportCommand
,
importPreimagesCommand
,
exportPreimagesCommand
,
copydbCommand
,
removedbCommand
,
dumpCommand
,
...
...
cmd/utils/cmd.go
View file @
dd708c16
...
...
@@ -27,8 +27,11 @@ import (
"strings"
"syscall"
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/core"
"github.com/ethereum/go-ethereum/core/types"
"github.com/ethereum/go-ethereum/crypto"
"github.com/ethereum/go-ethereum/ethdb"
"github.com/ethereum/go-ethereum/internal/debug"
"github.com/ethereum/go-ethereum/log"
"github.com/ethereum/go-ethereum/node"
...
...
@@ -105,6 +108,8 @@ func ImportChain(chain *core.BlockChain, fn string) error {
}
log
.
Info
(
"Importing blockchain"
,
"file"
,
fn
)
// Open the file handle and potentially unwrap the gzip stream
fh
,
err
:=
os
.
Open
(
fn
)
if
err
!=
nil
{
return
err
...
...
@@ -180,8 +185,12 @@ func missingBlocks(chain *core.BlockChain, blocks []*types.Block) []*types.Block
return
nil
}
// ExportChain exports a blockchain into the specified file, truncating any data
// already present in the file.
func
ExportChain
(
blockchain
*
core
.
BlockChain
,
fn
string
)
error
{
log
.
Info
(
"Exporting blockchain"
,
"file"
,
fn
)
// Open the file handle and potentially wrap with a gzip stream
fh
,
err
:=
os
.
OpenFile
(
fn
,
os
.
O_CREATE
|
os
.
O_WRONLY
|
os
.
O_TRUNC
,
os
.
ModePerm
)
if
err
!=
nil
{
return
err
...
...
@@ -193,7 +202,7 @@ func ExportChain(blockchain *core.BlockChain, fn string) error {
writer
=
gzip
.
NewWriter
(
writer
)
defer
writer
.
(
*
gzip
.
Writer
)
.
Close
()
}
// Iterate over the blocks and export them
if
err
:=
blockchain
.
Export
(
writer
);
err
!=
nil
{
return
err
}
...
...
@@ -202,9 +211,12 @@ func ExportChain(blockchain *core.BlockChain, fn string) error {
return
nil
}
// ExportAppendChain exports a blockchain into the specified file, appending to
// the file if data already exists in it.
func
ExportAppendChain
(
blockchain
*
core
.
BlockChain
,
fn
string
,
first
uint64
,
last
uint64
)
error
{
log
.
Info
(
"Exporting blockchain"
,
"file"
,
fn
)
// TODO verify mode perms
// Open the file handle and potentially wrap with a gzip stream
fh
,
err
:=
os
.
OpenFile
(
fn
,
os
.
O_CREATE
|
os
.
O_APPEND
|
os
.
O_WRONLY
,
os
.
ModePerm
)
if
err
!=
nil
{
return
err
...
...
@@ -216,10 +228,86 @@ func ExportAppendChain(blockchain *core.BlockChain, fn string, first uint64, las
writer
=
gzip
.
NewWriter
(
writer
)
defer
writer
.
(
*
gzip
.
Writer
)
.
Close
()
}
// Iterate over the blocks and export them
if
err
:=
blockchain
.
ExportN
(
writer
,
first
,
last
);
err
!=
nil
{
return
err
}
log
.
Info
(
"Exported blockchain to"
,
"file"
,
fn
)
return
nil
}
// ImportPreimages imports a batch of exported hash preimages into the database.
func
ImportPreimages
(
db
*
ethdb
.
LDBDatabase
,
fn
string
)
error
{
log
.
Info
(
"Importing preimages"
,
"file"
,
fn
)
// Open the file handle and potentially unwrap the gzip stream
fh
,
err
:=
os
.
Open
(
fn
)
if
err
!=
nil
{
return
err
}
defer
fh
.
Close
()
var
reader
io
.
Reader
=
fh
if
strings
.
HasSuffix
(
fn
,
".gz"
)
{
if
reader
,
err
=
gzip
.
NewReader
(
reader
);
err
!=
nil
{
return
err
}
}
stream
:=
rlp
.
NewStream
(
reader
,
0
)
// Import the preimages in batches to prevent disk trashing
preimages
:=
make
(
map
[
common
.
Hash
][]
byte
)
for
{
// Read the next entry and ensure it's not junk
var
blob
[]
byte
if
err
:=
stream
.
Decode
(
&
blob
);
err
!=
nil
{
if
err
==
io
.
EOF
{
break
}
return
err
}
// Accumulate the preimages and flush when enough ws gathered
preimages
[
crypto
.
Keccak256Hash
(
blob
)]
=
common
.
CopyBytes
(
blob
)
if
len
(
preimages
)
>
1024
{
if
err
:=
core
.
WritePreimages
(
db
,
0
,
preimages
);
err
!=
nil
{
return
err
}
preimages
=
make
(
map
[
common
.
Hash
][]
byte
)
}
}
// Flush the last batch preimage data
if
len
(
preimages
)
>
0
{
return
core
.
WritePreimages
(
db
,
0
,
preimages
)
}
return
nil
}
// ExportPreimages exports all known hash preimages into the specified file,
// truncating any data already present in the file.
func
ExportPreimages
(
db
*
ethdb
.
LDBDatabase
,
fn
string
)
error
{
log
.
Info
(
"Exporting preimages"
,
"file"
,
fn
)
// Open the file handle and potentially wrap with a gzip stream
fh
,
err
:=
os
.
OpenFile
(
fn
,
os
.
O_CREATE
|
os
.
O_WRONLY
|
os
.
O_TRUNC
,
os
.
ModePerm
)
if
err
!=
nil
{
return
err
}
defer
fh
.
Close
()
var
writer
io
.
Writer
=
fh
if
strings
.
HasSuffix
(
fn
,
".gz"
)
{
writer
=
gzip
.
NewWriter
(
writer
)
defer
writer
.
(
*
gzip
.
Writer
)
.
Close
()
}
// Iterate over the preimages and export them
it
:=
db
.
NewIteratorWithPrefix
([]
byte
(
"secure-key-"
))
for
it
.
Next
()
{
if
err
:=
rlp
.
Encode
(
writer
,
it
.
Value
());
err
!=
nil
{
return
err
}
}
log
.
Info
(
"Exported preimages"
,
"file"
,
fn
)
return
nil
}
ethdb/database.go
View file @
dd708c16
...
...
@@ -29,6 +29,7 @@ import (
"github.com/syndtr/goleveldb/leveldb/filter"
"github.com/syndtr/goleveldb/leveldb/iterator"
"github.com/syndtr/goleveldb/leveldb/opt"
"github.com/syndtr/goleveldb/leveldb/util"
)
var
OpenFileLimit
=
64
...
...
@@ -121,6 +122,11 @@ func (db *LDBDatabase) NewIterator() iterator.Iterator {
return
db
.
db
.
NewIterator
(
nil
,
nil
)
}
// NewIteratorWithPrefix returns a iterator to iterate over subset of database content with a particular prefix.
func
(
db
*
LDBDatabase
)
NewIteratorWithPrefix
(
prefix
[]
byte
)
iterator
.
Iterator
{
return
db
.
db
.
NewIterator
(
util
.
BytesPrefix
(
prefix
),
nil
)
}
func
(
db
*
LDBDatabase
)
Close
()
{
// Stop the metrics collection to avoid internal database races
db
.
quitLock
.
Lock
()
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment