Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
G
Geth-Modification
Project
Project
Details
Activity
Releases
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
张蕾
Geth-Modification
Commits
1cc2f080
Commit
1cc2f080
authored
Sep 16, 2015
by
Jeffrey Wilcke
Browse files
Options
Browse Files
Download
Plain Diff
Merge pull request #1784 from karalabe/standard-sync-stats
eth, rpc: standardize the chain sync progress counters
parents
e9a80518
d4d3fc6a
Changes
8
Hide whitespace changes
Inline
Side-by-side
Showing
8 changed files
with
1174 additions
and
251 deletions
+1174
-251
downloader.go
eth/downloader/downloader.go
+140
-48
downloader_test.go
eth/downloader/downloader_test.go
+394
-4
queue.go
eth/downloader/queue.go
+15
-2
ethereum_js.go
jsre/ethereum_js.go
+609
-180
admin.go
rpc/api/admin.go
+0
-12
admin_js.go
rpc/api/admin_js.go
+0
-4
eth.go
rpc/api/eth.go
+15
-0
utils.go
rpc/api/utils.go
+1
-1
No files found.
eth/downloader/downloader.go
View file @
1cc2f080
...
@@ -130,10 +130,9 @@ type Downloader struct {
...
@@ -130,10 +130,9 @@ type Downloader struct {
interrupt
int32
// Atomic boolean to signal termination
interrupt
int32
// Atomic boolean to signal termination
// Statistics
// Statistics
importStart
time
.
Time
// Instance when the last blocks were taken from the cache
syncStatsOrigin
uint64
// Origin block number where syncing started at
importQueue
[]
*
Block
// Previously taken blocks to check import progress
syncStatsHeight
uint64
// Highest block number known when syncing started
importDone
int
// Number of taken blocks already imported from the last batch
syncStatsLock
sync
.
RWMutex
// Lock protecting the sync stats fields
importLock
sync
.
Mutex
// Callbacks
// Callbacks
hasBlock
hashCheckFn
// Checks if a block is present in the chain
hasBlock
hashCheckFn
// Checks if a block is present in the chain
...
@@ -161,6 +160,7 @@ type Downloader struct {
...
@@ -161,6 +160,7 @@ type Downloader struct {
cancelLock
sync
.
RWMutex
// Lock to protect the cancel channel in delivers
cancelLock
sync
.
RWMutex
// Lock to protect the cancel channel in delivers
// Testing hooks
// Testing hooks
syncInitHook
func
(
uint64
,
uint64
)
// Method to call upon initiating a new sync run
bodyFetchHook
func
([]
*
types
.
Header
)
// Method to call upon starting a block body fetch
bodyFetchHook
func
([]
*
types
.
Header
)
// Method to call upon starting a block body fetch
chainInsertHook
func
([]
*
Block
)
// Method to call upon inserting a chain of blocks (possibly in multiple invocations)
chainInsertHook
func
([]
*
Block
)
// Method to call upon inserting a chain of blocks (possibly in multiple invocations)
}
}
...
@@ -192,27 +192,14 @@ func New(mux *event.TypeMux, hasBlock hashCheckFn, getBlock blockRetrievalFn, he
...
@@ -192,27 +192,14 @@ func New(mux *event.TypeMux, hasBlock hashCheckFn, getBlock blockRetrievalFn, he
}
}
}
}
// Stats retrieves the current status of the downloader.
// Boundaries retrieves the synchronisation boundaries, specifically the origin
func
(
d
*
Downloader
)
Stats
()
(
pending
int
,
cached
int
,
importing
int
,
estimate
time
.
Duration
)
{
// block where synchronisation started at (may have failed/suspended) and the
// Fetch the download status
// latest known block which the synchonisation targets.
pending
,
cached
=
d
.
queue
.
Size
()
func
(
d
*
Downloader
)
Boundaries
()
(
uint64
,
uint64
)
{
d
.
syncStatsLock
.
RLock
()
defer
d
.
syncStatsLock
.
RUnlock
()
// Figure out the import progress
return
d
.
syncStatsOrigin
,
d
.
syncStatsHeight
d
.
importLock
.
Lock
()
defer
d
.
importLock
.
Unlock
()
for
len
(
d
.
importQueue
)
>
0
&&
d
.
hasBlock
(
d
.
importQueue
[
0
]
.
RawBlock
.
Hash
())
{
d
.
importQueue
=
d
.
importQueue
[
1
:
]
d
.
importDone
++
}
importing
=
len
(
d
.
importQueue
)
// Make an estimate on the total sync
estimate
=
0
if
d
.
importDone
>
0
{
estimate
=
time
.
Since
(
d
.
importStart
)
/
time
.
Duration
(
d
.
importDone
)
*
time
.
Duration
(
pending
+
cached
+
importing
)
}
return
}
}
// Synchronising returns whether the downloader is currently retrieving blocks.
// Synchronising returns whether the downloader is currently retrieving blocks.
...
@@ -333,14 +320,29 @@ func (d *Downloader) syncWithPeer(p *peer, hash common.Hash, td *big.Int) (err e
...
@@ -333,14 +320,29 @@ func (d *Downloader) syncWithPeer(p *peer, hash common.Hash, td *big.Int) (err e
switch
{
switch
{
case
p
.
version
==
eth61
:
case
p
.
version
==
eth61
:
// Old eth/61, use forward, concurrent hash and block retrieval algorithm
// Look up the sync boundaries: the common ancestor and the target block
number
,
err
:=
d
.
findAncestor61
(
p
)
latest
,
err
:=
d
.
fetchHeight61
(
p
)
if
err
!=
nil
{
return
err
}
origin
,
err
:=
d
.
findAncestor61
(
p
)
if
err
!=
nil
{
if
err
!=
nil
{
return
err
return
err
}
}
d
.
syncStatsLock
.
Lock
()
if
d
.
syncStatsHeight
<=
origin
||
d
.
syncStatsOrigin
>
origin
{
d
.
syncStatsOrigin
=
origin
}
d
.
syncStatsHeight
=
latest
d
.
syncStatsLock
.
Unlock
()
// Initiate the sync using a concurrent hash and block retrieval algorithm
if
d
.
syncInitHook
!=
nil
{
d
.
syncInitHook
(
origin
,
latest
)
}
errc
:=
make
(
chan
error
,
2
)
errc
:=
make
(
chan
error
,
2
)
go
func
()
{
errc
<-
d
.
fetchHashes61
(
p
,
td
,
number
+
1
)
}()
go
func
()
{
errc
<-
d
.
fetchHashes61
(
p
,
td
,
origin
+
1
)
}()
go
func
()
{
errc
<-
d
.
fetchBlocks61
(
number
+
1
)
}()
go
func
()
{
errc
<-
d
.
fetchBlocks61
(
origin
+
1
)
}()
// If any fetcher fails, cancel the other
// If any fetcher fails, cancel the other
if
err
:=
<-
errc
;
err
!=
nil
{
if
err
:=
<-
errc
;
err
!=
nil
{
...
@@ -351,14 +353,29 @@ func (d *Downloader) syncWithPeer(p *peer, hash common.Hash, td *big.Int) (err e
...
@@ -351,14 +353,29 @@ func (d *Downloader) syncWithPeer(p *peer, hash common.Hash, td *big.Int) (err e
return
<-
errc
return
<-
errc
case
p
.
version
>=
eth62
:
case
p
.
version
>=
eth62
:
//
New eth/62, use forward, concurrent header and block body retrieval algorithm
//
Look up the sync boundaries: the common ancestor and the target block
number
,
err
:=
d
.
findAncestor
(
p
)
latest
,
err
:=
d
.
fetchHeight
(
p
)
if
err
!=
nil
{
if
err
!=
nil
{
return
err
return
err
}
}
origin
,
err
:=
d
.
findAncestor
(
p
)
if
err
!=
nil
{
return
err
}
d
.
syncStatsLock
.
Lock
()
if
d
.
syncStatsHeight
<=
origin
||
d
.
syncStatsOrigin
>
origin
{
d
.
syncStatsOrigin
=
origin
}
d
.
syncStatsHeight
=
latest
d
.
syncStatsLock
.
Unlock
()
// Initiate the sync using a concurrent hash and block retrieval algorithm
if
d
.
syncInitHook
!=
nil
{
d
.
syncInitHook
(
origin
,
latest
)
}
errc
:=
make
(
chan
error
,
2
)
errc
:=
make
(
chan
error
,
2
)
go
func
()
{
errc
<-
d
.
fetchHeaders
(
p
,
td
,
number
+
1
)
}()
go
func
()
{
errc
<-
d
.
fetchHeaders
(
p
,
td
,
origin
+
1
)
}()
go
func
()
{
errc
<-
d
.
fetchBodies
(
number
+
1
)
}()
go
func
()
{
errc
<-
d
.
fetchBodies
(
origin
+
1
)
}()
// If any fetcher fails, cancel the other
// If any fetcher fails, cancel the other
if
err
:=
<-
errc
;
err
!=
nil
{
if
err
:=
<-
errc
;
err
!=
nil
{
...
@@ -401,6 +418,50 @@ func (d *Downloader) Terminate() {
...
@@ -401,6 +418,50 @@ func (d *Downloader) Terminate() {
d
.
cancel
()
d
.
cancel
()
}
}
// fetchHeight61 retrieves the head block of the remote peer to aid in estimating
// the total time a pending synchronisation would take.
func
(
d
*
Downloader
)
fetchHeight61
(
p
*
peer
)
(
uint64
,
error
)
{
glog
.
V
(
logger
.
Debug
)
.
Infof
(
"%v: retrieving remote chain height"
,
p
)
// Request the advertised remote head block and wait for the response
go
p
.
getBlocks
([]
common
.
Hash
{
p
.
head
})
timeout
:=
time
.
After
(
blockSoftTTL
)
for
{
select
{
case
<-
d
.
cancelCh
:
return
0
,
errCancelBlockFetch
case
<-
d
.
headerCh
:
// Out of bounds eth/62 block headers received, ignore them
case
<-
d
.
bodyCh
:
// Out of bounds eth/62 block bodies received, ignore them
case
<-
d
.
hashCh
:
// Out of bounds hashes received, ignore them
case
blockPack
:=
<-
d
.
blockCh
:
// Discard anything not from the origin peer
if
blockPack
.
peerId
!=
p
.
id
{
glog
.
V
(
logger
.
Debug
)
.
Infof
(
"Received blocks from incorrect peer(%s)"
,
blockPack
.
peerId
)
break
}
// Make sure the peer actually gave something valid
blocks
:=
blockPack
.
blocks
if
len
(
blocks
)
!=
1
{
glog
.
V
(
logger
.
Debug
)
.
Infof
(
"%v: invalid number of head blocks: %d != 1"
,
p
,
len
(
blocks
))
return
0
,
errBadPeer
}
return
blocks
[
0
]
.
NumberU64
(),
nil
case
<-
timeout
:
glog
.
V
(
logger
.
Debug
)
.
Infof
(
"%v: head block timeout"
,
p
)
return
0
,
errTimeout
}
}
}
// findAncestor61 tries to locate the common ancestor block of the local chain and
// findAncestor61 tries to locate the common ancestor block of the local chain and
// a remote peers blockchain. In the general case when our node was in sync and
// a remote peers blockchain. In the general case when our node was in sync and
// on the correct chain, checking the top N blocks should already get us a match.
// on the correct chain, checking the top N blocks should already get us a match.
...
@@ -776,6 +837,50 @@ func (d *Downloader) fetchBlocks61(from uint64) error {
...
@@ -776,6 +837,50 @@ func (d *Downloader) fetchBlocks61(from uint64) error {
}
}
}
}
// fetchHeight retrieves the head header of the remote peer to aid in estimating
// the total time a pending synchronisation would take.
func
(
d
*
Downloader
)
fetchHeight
(
p
*
peer
)
(
uint64
,
error
)
{
glog
.
V
(
logger
.
Debug
)
.
Infof
(
"%v: retrieving remote chain height"
,
p
)
// Request the advertised remote head block and wait for the response
go
p
.
getRelHeaders
(
p
.
head
,
1
,
0
,
false
)
timeout
:=
time
.
After
(
headerTTL
)
for
{
select
{
case
<-
d
.
cancelCh
:
return
0
,
errCancelBlockFetch
case
headerPack
:=
<-
d
.
headerCh
:
// Discard anything not from the origin peer
if
headerPack
.
peerId
!=
p
.
id
{
glog
.
V
(
logger
.
Debug
)
.
Infof
(
"Received headers from incorrect peer(%s)"
,
headerPack
.
peerId
)
break
}
// Make sure the peer actually gave something valid
headers
:=
headerPack
.
headers
if
len
(
headers
)
!=
1
{
glog
.
V
(
logger
.
Debug
)
.
Infof
(
"%v: invalid number of head headers: %d != 1"
,
p
,
len
(
headers
))
return
0
,
errBadPeer
}
return
headers
[
0
]
.
Number
.
Uint64
(),
nil
case
<-
d
.
bodyCh
:
// Out of bounds block bodies received, ignore them
case
<-
d
.
hashCh
:
// Out of bounds eth/61 hashes received, ignore them
case
<-
d
.
blockCh
:
// Out of bounds eth/61 blocks received, ignore them
case
<-
timeout
:
glog
.
V
(
logger
.
Debug
)
.
Infof
(
"%v: head header timeout"
,
p
)
return
0
,
errTimeout
}
}
}
// findAncestor tries to locate the common ancestor block of the local chain and
// findAncestor tries to locate the common ancestor block of the local chain and
// a remote peers blockchain. In the general case when our node was in sync and
// a remote peers blockchain. In the general case when our node was in sync and
// on the correct chain, checking the top N blocks should already get us a match.
// on the correct chain, checking the top N blocks should already get us a match.
...
@@ -973,7 +1078,7 @@ func (d *Downloader) fetchHeaders(p *peer, td *big.Int, from uint64) error {
...
@@ -973,7 +1078,7 @@ func (d *Downloader) fetchHeaders(p *peer, td *big.Int, from uint64) error {
// Otherwise insert all the new headers, aborting in case of junk
// Otherwise insert all the new headers, aborting in case of junk
glog
.
V
(
logger
.
Detail
)
.
Infof
(
"%v: inserting %d headers from #%d"
,
p
,
len
(
headerPack
.
headers
),
from
)
glog
.
V
(
logger
.
Detail
)
.
Infof
(
"%v: inserting %d headers from #%d"
,
p
,
len
(
headerPack
.
headers
),
from
)
inserts
:=
d
.
queue
.
Insert
(
headerPack
.
headers
)
inserts
:=
d
.
queue
.
Insert
(
headerPack
.
headers
,
from
)
if
len
(
inserts
)
!=
len
(
headerPack
.
headers
)
{
if
len
(
inserts
)
!=
len
(
headerPack
.
headers
)
{
glog
.
V
(
logger
.
Debug
)
.
Infof
(
"%v: stale headers"
,
p
)
glog
.
V
(
logger
.
Debug
)
.
Infof
(
"%v: stale headers"
,
p
)
return
errBadPeer
return
errBadPeer
...
@@ -1203,16 +1308,10 @@ func (d *Downloader) process() {
...
@@ -1203,16 +1308,10 @@ func (d *Downloader) process() {
d
.
process
()
d
.
process
()
}
}
}()
}()
// Release the lock upon exit (note, before checking for reentry!)
, and set
// Release the lock upon exit (note, before checking for reentry!)
// the import statistics to zero.
// the import statistics to zero.
defer
func
()
{
defer
atomic
.
StoreInt32
(
&
d
.
processing
,
0
)
d
.
importLock
.
Lock
()
d
.
importQueue
=
nil
d
.
importDone
=
0
d
.
importLock
.
Unlock
()
atomic
.
StoreInt32
(
&
d
.
processing
,
0
)
}()
// Repeat the processing as long as there are blocks to import
// Repeat the processing as long as there are blocks to import
for
{
for
{
// Fetch the next batch of blocks
// Fetch the next batch of blocks
...
@@ -1223,13 +1322,6 @@ func (d *Downloader) process() {
...
@@ -1223,13 +1322,6 @@ func (d *Downloader) process() {
if
d
.
chainInsertHook
!=
nil
{
if
d
.
chainInsertHook
!=
nil
{
d
.
chainInsertHook
(
blocks
)
d
.
chainInsertHook
(
blocks
)
}
}
// Reset the import statistics
d
.
importLock
.
Lock
()
d
.
importStart
=
time
.
Now
()
d
.
importQueue
=
blocks
d
.
importDone
=
0
d
.
importLock
.
Unlock
()
// Actually import the blocks
// Actually import the blocks
glog
.
V
(
logger
.
Debug
)
.
Infof
(
"Inserting chain with %d blocks (#%v - #%v)
\n
"
,
len
(
blocks
),
blocks
[
0
]
.
RawBlock
.
Number
(),
blocks
[
len
(
blocks
)
-
1
]
.
RawBlock
.
Number
())
glog
.
V
(
logger
.
Debug
)
.
Infof
(
"Inserting chain with %d blocks (#%v - #%v)
\n
"
,
len
(
blocks
),
blocks
[
0
]
.
RawBlock
.
Number
(),
blocks
[
len
(
blocks
)
-
1
]
.
RawBlock
.
Number
())
for
len
(
blocks
)
!=
0
{
for
len
(
blocks
)
!=
0
{
...
...
eth/downloader/downloader_test.go
View file @
1cc2f080
...
@@ -20,6 +20,7 @@ import (
...
@@ -20,6 +20,7 @@ import (
"errors"
"errors"
"fmt"
"fmt"
"math/big"
"math/big"
"sync"
"sync/atomic"
"sync/atomic"
"testing"
"testing"
"time"
"time"
...
@@ -99,6 +100,8 @@ type downloadTester struct {
...
@@ -99,6 +100,8 @@ type downloadTester struct {
peerHashes
map
[
string
][]
common
.
Hash
// Hash chain belonging to different test peers
peerHashes
map
[
string
][]
common
.
Hash
// Hash chain belonging to different test peers
peerBlocks
map
[
string
]
map
[
common
.
Hash
]
*
types
.
Block
// Blocks belonging to different test peers
peerBlocks
map
[
string
]
map
[
common
.
Hash
]
*
types
.
Block
// Blocks belonging to different test peers
peerChainTds
map
[
string
]
map
[
common
.
Hash
]
*
big
.
Int
// Total difficulties of the blocks in the peer chains
peerChainTds
map
[
string
]
map
[
common
.
Hash
]
*
big
.
Int
// Total difficulties of the blocks in the peer chains
lock
sync
.
RWMutex
}
}
// newTester creates a new downloader test mocker.
// newTester creates a new downloader test mocker.
...
@@ -118,8 +121,8 @@ func newTester() *downloadTester {
...
@@ -118,8 +121,8 @@ func newTester() *downloadTester {
// sync starts synchronizing with a remote peer, blocking until it completes.
// sync starts synchronizing with a remote peer, blocking until it completes.
func
(
dl
*
downloadTester
)
sync
(
id
string
,
td
*
big
.
Int
)
error
{
func
(
dl
*
downloadTester
)
sync
(
id
string
,
td
*
big
.
Int
)
error
{
dl
.
lock
.
RLock
()
hash
:=
dl
.
peerHashes
[
id
][
0
]
hash
:=
dl
.
peerHashes
[
id
][
0
]
// If no particular TD was requested, load from the peer's blockchain
// If no particular TD was requested, load from the peer's blockchain
if
td
==
nil
{
if
td
==
nil
{
td
=
big
.
NewInt
(
1
)
td
=
big
.
NewInt
(
1
)
...
@@ -127,8 +130,9 @@ func (dl *downloadTester) sync(id string, td *big.Int) error {
...
@@ -127,8 +130,9 @@ func (dl *downloadTester) sync(id string, td *big.Int) error {
td
=
diff
td
=
diff
}
}
}
}
err
:=
dl
.
downloader
.
synchronise
(
id
,
hash
,
td
)
dl
.
lock
.
RUnlock
(
)
err
:=
dl
.
downloader
.
synchronise
(
id
,
hash
,
td
)
for
{
for
{
// If the queue is empty and processing stopped, break
// If the queue is empty and processing stopped, break
hashes
,
blocks
:=
dl
.
downloader
.
queue
.
Size
()
hashes
,
blocks
:=
dl
.
downloader
.
queue
.
Size
()
...
@@ -143,26 +147,41 @@ func (dl *downloadTester) sync(id string, td *big.Int) error {
...
@@ -143,26 +147,41 @@ func (dl *downloadTester) sync(id string, td *big.Int) error {
// hasBlock checks if a block is pres ent in the testers canonical chain.
// hasBlock checks if a block is pres ent in the testers canonical chain.
func
(
dl
*
downloadTester
)
hasBlock
(
hash
common
.
Hash
)
bool
{
func
(
dl
*
downloadTester
)
hasBlock
(
hash
common
.
Hash
)
bool
{
dl
.
lock
.
RLock
()
defer
dl
.
lock
.
RUnlock
()
return
dl
.
getBlock
(
hash
)
!=
nil
return
dl
.
getBlock
(
hash
)
!=
nil
}
}
// getBlock retrieves a block from the testers canonical chain.
// getBlock retrieves a block from the testers canonical chain.
func
(
dl
*
downloadTester
)
getBlock
(
hash
common
.
Hash
)
*
types
.
Block
{
func
(
dl
*
downloadTester
)
getBlock
(
hash
common
.
Hash
)
*
types
.
Block
{
dl
.
lock
.
RLock
()
defer
dl
.
lock
.
RUnlock
()
return
dl
.
ownBlocks
[
hash
]
return
dl
.
ownBlocks
[
hash
]
}
}
// headBlock retrieves the current head block from the canonical chain.
// headBlock retrieves the current head block from the canonical chain.
func
(
dl
*
downloadTester
)
headBlock
()
*
types
.
Block
{
func
(
dl
*
downloadTester
)
headBlock
()
*
types
.
Block
{
dl
.
lock
.
RLock
()
defer
dl
.
lock
.
RUnlock
()
return
dl
.
getBlock
(
dl
.
ownHashes
[
len
(
dl
.
ownHashes
)
-
1
])
return
dl
.
getBlock
(
dl
.
ownHashes
[
len
(
dl
.
ownHashes
)
-
1
])
}
}
// getTd retrieves the block's total difficulty from the canonical chain.
// getTd retrieves the block's total difficulty from the canonical chain.
func
(
dl
*
downloadTester
)
getTd
(
hash
common
.
Hash
)
*
big
.
Int
{
func
(
dl
*
downloadTester
)
getTd
(
hash
common
.
Hash
)
*
big
.
Int
{
dl
.
lock
.
RLock
()
defer
dl
.
lock
.
RUnlock
()
return
dl
.
ownChainTd
[
hash
]
return
dl
.
ownChainTd
[
hash
]
}
}
// insertChain injects a new batch of blocks into the simulated chain.
// insertChain injects a new batch of blocks into the simulated chain.
func
(
dl
*
downloadTester
)
insertChain
(
blocks
types
.
Blocks
)
(
int
,
error
)
{
func
(
dl
*
downloadTester
)
insertChain
(
blocks
types
.
Blocks
)
(
int
,
error
)
{
dl
.
lock
.
Lock
()
defer
dl
.
lock
.
Unlock
()
for
i
,
block
:=
range
blocks
{
for
i
,
block
:=
range
blocks
{
if
_
,
ok
:=
dl
.
ownBlocks
[
block
.
ParentHash
()];
!
ok
{
if
_
,
ok
:=
dl
.
ownBlocks
[
block
.
ParentHash
()];
!
ok
{
return
i
,
errors
.
New
(
"unknown parent"
)
return
i
,
errors
.
New
(
"unknown parent"
)
...
@@ -183,9 +202,12 @@ func (dl *downloadTester) newPeer(id string, version int, hashes []common.Hash,
...
@@ -183,9 +202,12 @@ func (dl *downloadTester) newPeer(id string, version int, hashes []common.Hash,
// specific delay time on processing the network packets sent to it, simulating
// specific delay time on processing the network packets sent to it, simulating
// potentially slow network IO.
// potentially slow network IO.
func
(
dl
*
downloadTester
)
newSlowPeer
(
id
string
,
version
int
,
hashes
[]
common
.
Hash
,
blocks
map
[
common
.
Hash
]
*
types
.
Block
,
delay
time
.
Duration
)
error
{
func
(
dl
*
downloadTester
)
newSlowPeer
(
id
string
,
version
int
,
hashes
[]
common
.
Hash
,
blocks
map
[
common
.
Hash
]
*
types
.
Block
,
delay
time
.
Duration
)
error
{
dl
.
lock
.
Lock
()
defer
dl
.
lock
.
Unlock
()
err
:=
dl
.
downloader
.
RegisterPeer
(
id
,
version
,
hashes
[
0
],
err
:=
dl
.
downloader
.
RegisterPeer
(
id
,
version
,
hashes
[
0
],
dl
.
peerGetRelHashesFn
(
id
,
delay
),
dl
.
peerGetAbsHashesFn
(
id
,
delay
),
dl
.
peerGetBlocksFn
(
id
,
delay
),
dl
.
peerGetRelHashesFn
(
id
,
delay
),
dl
.
peerGetAbsHashesFn
(
id
,
delay
),
dl
.
peerGetBlocksFn
(
id
,
delay
),
nil
,
dl
.
peerGetAbsHeadersFn
(
id
,
delay
),
dl
.
peerGetBodiesFn
(
id
,
delay
))
dl
.
peerGetRelHeadersFn
(
id
,
delay
)
,
dl
.
peerGetAbsHeadersFn
(
id
,
delay
),
dl
.
peerGetBodiesFn
(
id
,
delay
))
if
err
==
nil
{
if
err
==
nil
{
// Assign the owned hashes and blocks to the peer (deep copy)
// Assign the owned hashes and blocks to the peer (deep copy)
dl
.
peerHashes
[
id
]
=
make
([]
common
.
Hash
,
len
(
hashes
))
dl
.
peerHashes
[
id
]
=
make
([]
common
.
Hash
,
len
(
hashes
))
...
@@ -207,6 +229,9 @@ func (dl *downloadTester) newSlowPeer(id string, version int, hashes []common.Ha
...
@@ -207,6 +229,9 @@ func (dl *downloadTester) newSlowPeer(id string, version int, hashes []common.Ha
// dropPeer simulates a hard peer removal from the connection pool.
// dropPeer simulates a hard peer removal from the connection pool.
func
(
dl
*
downloadTester
)
dropPeer
(
id
string
)
{
func
(
dl
*
downloadTester
)
dropPeer
(
id
string
)
{
dl
.
lock
.
Lock
()
defer
dl
.
lock
.
Unlock
()
delete
(
dl
.
peerHashes
,
id
)
delete
(
dl
.
peerHashes
,
id
)
delete
(
dl
.
peerBlocks
,
id
)
delete
(
dl
.
peerBlocks
,
id
)
delete
(
dl
.
peerChainTds
,
id
)
delete
(
dl
.
peerChainTds
,
id
)
...
@@ -221,6 +246,9 @@ func (dl *downloadTester) peerGetRelHashesFn(id string, delay time.Duration) fun
...
@@ -221,6 +246,9 @@ func (dl *downloadTester) peerGetRelHashesFn(id string, delay time.Duration) fun
return
func
(
head
common
.
Hash
)
error
{
return
func
(
head
common
.
Hash
)
error
{
time
.
Sleep
(
delay
)
time
.
Sleep
(
delay
)
dl
.
lock
.
RLock
()
defer
dl
.
lock
.
RUnlock
()
// Gather the next batch of hashes
// Gather the next batch of hashes
hashes
:=
dl
.
peerHashes
[
id
]
hashes
:=
dl
.
peerHashes
[
id
]
result
:=
make
([]
common
.
Hash
,
0
,
MaxHashFetch
)
result
:=
make
([]
common
.
Hash
,
0
,
MaxHashFetch
)
...
@@ -250,6 +278,9 @@ func (dl *downloadTester) peerGetAbsHashesFn(id string, delay time.Duration) fun
...
@@ -250,6 +278,9 @@ func (dl *downloadTester) peerGetAbsHashesFn(id string, delay time.Duration) fun
return
func
(
head
uint64
,
count
int
)
error
{
return
func
(
head
uint64
,
count
int
)
error
{
time
.
Sleep
(
delay
)
time
.
Sleep
(
delay
)
dl
.
lock
.
RLock
()
defer
dl
.
lock
.
RUnlock
()
// Gather the next batch of hashes
// Gather the next batch of hashes
hashes
:=
dl
.
peerHashes
[
id
]
hashes
:=
dl
.
peerHashes
[
id
]
result
:=
make
([]
common
.
Hash
,
0
,
count
)
result
:=
make
([]
common
.
Hash
,
0
,
count
)
...
@@ -271,6 +302,10 @@ func (dl *downloadTester) peerGetAbsHashesFn(id string, delay time.Duration) fun
...
@@ -271,6 +302,10 @@ func (dl *downloadTester) peerGetAbsHashesFn(id string, delay time.Duration) fun
func
(
dl
*
downloadTester
)
peerGetBlocksFn
(
id
string
,
delay
time
.
Duration
)
func
([]
common
.
Hash
)
error
{
func
(
dl
*
downloadTester
)
peerGetBlocksFn
(
id
string
,
delay
time
.
Duration
)
func
([]
common
.
Hash
)
error
{
return
func
(
hashes
[]
common
.
Hash
)
error
{
return
func
(
hashes
[]
common
.
Hash
)
error
{
time
.
Sleep
(
delay
)
time
.
Sleep
(
delay
)
dl
.
lock
.
RLock
()
defer
dl
.
lock
.
RUnlock
()
blocks
:=
dl
.
peerBlocks
[
id
]
blocks
:=
dl
.
peerBlocks
[
id
]
result
:=
make
([]
*
types
.
Block
,
0
,
len
(
hashes
))
result
:=
make
([]
*
types
.
Block
,
0
,
len
(
hashes
))
for
_
,
hash
:=
range
hashes
{
for
_
,
hash
:=
range
hashes
{
...
@@ -284,6 +319,27 @@ func (dl *downloadTester) peerGetBlocksFn(id string, delay time.Duration) func([
...
@@ -284,6 +319,27 @@ func (dl *downloadTester) peerGetBlocksFn(id string, delay time.Duration) func([
}
}
}
}
// peerGetRelHeadersFn constructs a GetBlockHeaders function based on a hashed
// origin; associated with a particular peer in the download tester. The returned
// function can be used to retrieve batches of headers from the particular peer.
func
(
dl
*
downloadTester
)
peerGetRelHeadersFn
(
id
string
,
delay
time
.
Duration
)
func
(
common
.
Hash
,
int
,
int
,
bool
)
error
{
return
func
(
origin
common
.
Hash
,
amount
int
,
skip
int
,
reverse
bool
)
error
{
// Find the canonical number of the hash
dl
.
lock
.
RLock
()
number
:=
uint64
(
0
)
for
num
,
hash
:=
range
dl
.
peerHashes
[
id
]
{
if
hash
==
origin
{
number
=
uint64
(
len
(
dl
.
peerHashes
[
id
])
-
num
-
1
)
break
}
}
dl
.
lock
.
RUnlock
()
// Use the absolute header fetcher to satisfy the query
return
dl
.
peerGetAbsHeadersFn
(
id
,
delay
)(
number
,
amount
,
skip
,
reverse
)
}
}
// peerGetAbsHeadersFn constructs a GetBlockHeaders function based on a numbered
// peerGetAbsHeadersFn constructs a GetBlockHeaders function based on a numbered
// origin; associated with a particular peer in the download tester. The returned
// origin; associated with a particular peer in the download tester. The returned
// function can be used to retrieve batches of headers from the particular peer.
// function can be used to retrieve batches of headers from the particular peer.
...
@@ -291,6 +347,9 @@ func (dl *downloadTester) peerGetAbsHeadersFn(id string, delay time.Duration) fu
...
@@ -291,6 +347,9 @@ func (dl *downloadTester) peerGetAbsHeadersFn(id string, delay time.Duration) fu
return
func
(
origin
uint64
,
amount
int
,
skip
int
,
reverse
bool
)
error
{
return
func
(
origin
uint64
,
amount
int
,
skip
int
,
reverse
bool
)
error
{
time
.
Sleep
(
delay
)
time
.
Sleep
(
delay
)
dl
.
lock
.
RLock
()
defer
dl
.
lock
.
RUnlock
()
// Gather the next batch of hashes
// Gather the next batch of hashes
hashes
:=
dl
.
peerHashes
[
id
]
hashes
:=
dl
.
peerHashes
[
id
]
blocks
:=
dl
.
peerBlocks
[
id
]
blocks
:=
dl
.
peerBlocks
[
id
]
...
@@ -315,6 +374,10 @@ func (dl *downloadTester) peerGetAbsHeadersFn(id string, delay time.Duration) fu
...
@@ -315,6 +374,10 @@ func (dl *downloadTester) peerGetAbsHeadersFn(id string, delay time.Duration) fu
func
(
dl
*
downloadTester
)
peerGetBodiesFn
(
id
string
,
delay
time
.
Duration
)
func
([]
common
.
Hash
)
error
{
func
(
dl
*
downloadTester
)
peerGetBodiesFn
(
id
string
,
delay
time
.
Duration
)
func
([]
common
.
Hash
)
error
{
return
func
(
hashes
[]
common
.
Hash
)
error
{
return
func
(
hashes
[]
common
.
Hash
)
error
{
time
.
Sleep
(
delay
)
time
.
Sleep
(
delay
)
dl
.
lock
.
RLock
()
defer
dl
.
lock
.
RUnlock
()
blocks
:=
dl
.
peerBlocks
[
id
]
blocks
:=
dl
.
peerBlocks
[
id
]
transactions
:=
make
([][]
*
types
.
Transaction
,
0
,
len
(
hashes
))
transactions
:=
make
([][]
*
types
.
Transaction
,
0
,
len
(
hashes
))
...
@@ -384,13 +447,23 @@ func testThrottling(t *testing.T, protocol int) {
...
@@ -384,13 +447,23 @@ func testThrottling(t *testing.T, protocol int) {
errc
<-
tester
.
sync
(
"peer"
,
nil
)
errc
<-
tester
.
sync
(
"peer"
,
nil
)
}()
}()
// Iteratively take some blocks, always checking the retrieval count
// Iteratively take some blocks, always checking the retrieval count
for
len
(
tester
.
ownBlocks
)
<
targetBlocks
+
1
{
for
{
// Check the retrieval count synchronously (! reason for this ugly block)
tester
.
lock
.
RLock
()
retrieved
:=
len
(
tester
.
ownBlocks
)
tester
.
lock
.
RUnlock
()
if
retrieved
>=
targetBlocks
+
1
{
break
}
// Wait a bit for sync to throttle itself
// Wait a bit for sync to throttle itself
var
cached
int
var
cached
int
for
start
:=
time
.
Now
();
time
.
Since
(
start
)
<
time
.
Second
;
{
for
start
:=
time
.
Now
();
time
.
Since
(
start
)
<
time
.
Second
;
{
time
.
Sleep
(
25
*
time
.
Millisecond
)
time
.
Sleep
(
25
*
time
.
Millisecond
)
tester
.
downloader
.
queue
.
lock
.
RLock
()
cached
=
len
(
tester
.
downloader
.
queue
.
blockPool
)
cached
=
len
(
tester
.
downloader
.
queue
.
blockPool
)
tester
.
downloader
.
queue
.
lock
.
RUnlock
()
if
cached
==
blockCacheLimit
||
len
(
tester
.
ownBlocks
)
+
cached
+
int
(
atomic
.
LoadUint32
(
&
blocked
))
==
targetBlocks
+
1
{
if
cached
==
blockCacheLimit
||
len
(
tester
.
ownBlocks
)
+
cached
+
int
(
atomic
.
LoadUint32
(
&
blocked
))
==
targetBlocks
+
1
{
break
break
}
}
...
@@ -583,6 +656,67 @@ func testEmptyBlockShortCircuit(t *testing.T, protocol int) {
...
@@ -583,6 +656,67 @@ func testEmptyBlockShortCircuit(t *testing.T, protocol int) {
}
}
}
}
// Tests that headers are enqueued continuously, preventing malicious nodes from
// stalling the downloader by feeding gapped header chains.
func
TestMissingHeaderAttack62
(
t
*
testing
.
T
)
{
testMissingHeaderAttack
(
t
,
62
)
}
func
TestMissingHeaderAttack63
(
t
*
testing
.
T
)
{
testMissingHeaderAttack
(
t
,
63
)
}
func
TestMissingHeaderAttack64
(
t
*
testing
.
T
)
{
testMissingHeaderAttack
(
t
,
64
)
}
func
testMissingHeaderAttack
(
t
*
testing
.
T
,
protocol
int
)
{
// Create a small enough block chain to download
targetBlocks
:=
blockCacheLimit
-
15
hashes
,
blocks
:=
makeChain
(
targetBlocks
,
0
,
genesis
)
tester
:=
newTester
()
// Attempt a full sync with an attacker feeding gapped headers
tester
.
newPeer
(
"attack"
,
protocol
,
hashes
,
blocks
)
missing
:=
targetBlocks
/
2
delete
(
tester
.
peerBlocks
[
"attack"
],
hashes
[
missing
])
if
err
:=
tester
.
sync
(
"attack"
,
nil
);
err
==
nil
{
t
.
Fatalf
(
"succeeded attacker synchronisation"
)
}
// Synchronise with the valid peer and make sure sync succeeds
tester
.
newPeer
(
"valid"
,
protocol
,
hashes
,
blocks
)
if
err
:=
tester
.
sync
(
"valid"
,
nil
);
err
!=
nil
{
t
.
Fatalf
(
"failed to synchronise blocks: %v"
,
err
)
}
if
imported
:=
len
(
tester
.
ownBlocks
);
imported
!=
len
(
hashes
)
{
t
.
Fatalf
(
"synchronised block mismatch: have %v, want %v"
,
imported
,
len
(
hashes
))
}
}
// Tests that if requested headers are shifted (i.e. first is missing), the queue
// detects the invalid numbering.
func
TestShiftedHeaderAttack62
(
t
*
testing
.
T
)
{
testShiftedHeaderAttack
(
t
,
62
)
}
func
TestShiftedHeaderAttack63
(
t
*
testing
.
T
)
{
testShiftedHeaderAttack
(
t
,
63
)
}
func
TestShiftedHeaderAttack64
(
t
*
testing
.
T
)
{
testShiftedHeaderAttack
(
t
,
64
)
}
func
testShiftedHeaderAttack
(
t
*
testing
.
T
,
protocol
int
)
{
// Create a small enough block chain to download
targetBlocks
:=
blockCacheLimit
-
15
hashes
,
blocks
:=
makeChain
(
targetBlocks
,
0
,
genesis
)
tester
:=
newTester
()
// Attempt a full sync with an attacker feeding shifted headers
tester
.
newPeer
(
"attack"
,
protocol
,
hashes
,
blocks
)
delete
(
tester
.
peerBlocks
[
"attack"
],
hashes
[
len
(
hashes
)
-
2
])
if
err
:=
tester
.
sync
(
"attack"
,
nil
);
err
==
nil
{
t
.
Fatalf
(
"succeeded attacker synchronisation"
)
}
// Synchronise with the valid peer and make sure sync succeeds
tester
.
newPeer
(
"valid"
,
protocol
,
hashes
,
blocks
)
if
err
:=
tester
.
sync
(
"valid"
,
nil
);
err
!=
nil
{
t
.
Fatalf
(
"failed to synchronise blocks: %v"
,
err
)
}
if
imported
:=
len
(
tester
.
ownBlocks
);
imported
!=
len
(
hashes
)
{
t
.
Fatalf
(
"synchronised block mismatch: have %v, want %v"
,
imported
,
len
(
hashes
))
}
}
// Tests that if a peer sends an invalid body for a requested block, it gets
// Tests that if a peer sends an invalid body for a requested block, it gets
// dropped immediately by the downloader.
// dropped immediately by the downloader.
func
TestInvalidBlockBodyAttack62
(
t
*
testing
.
T
)
{
testInvalidBlockBodyAttack
(
t
,
62
)
}
func
TestInvalidBlockBodyAttack62
(
t
*
testing
.
T
)
{
testInvalidBlockBodyAttack
(
t
,
62
)
}
...
@@ -727,3 +861,259 @@ func testBlockBodyAttackerDropping(t *testing.T, protocol int) {
...
@@ -727,3 +861,259 @@ func testBlockBodyAttackerDropping(t *testing.T, protocol int) {
}
}
}
}
}
}
// Tests that synchronisation boundaries (origin block number and highest block
// number) is tracked and updated correctly.
func
TestSyncBoundaries61
(
t
*
testing
.
T
)
{
testSyncBoundaries
(
t
,
61
)
}
func
TestSyncBoundaries62
(
t
*
testing
.
T
)
{
testSyncBoundaries
(
t
,
62
)
}
func
TestSyncBoundaries63
(
t
*
testing
.
T
)
{
testSyncBoundaries
(
t
,
63
)
}
func
TestSyncBoundaries64
(
t
*
testing
.
T
)
{
testSyncBoundaries
(
t
,
64
)
}
func
testSyncBoundaries
(
t
*
testing
.
T
,
protocol
int
)
{
// Create a small enough block chain to download
targetBlocks
:=
blockCacheLimit
-
15
hashes
,
blocks
:=
makeChain
(
targetBlocks
,
0
,
genesis
)
// Set a sync init hook to catch boundary changes
starting
:=
make
(
chan
struct
{})
progress
:=
make
(
chan
struct
{})
tester
:=
newTester
()
tester
.
downloader
.
syncInitHook
=
func
(
origin
,
latest
uint64
)
{
starting
<-
struct
{}{}
<-
progress
}
// Retrieve the sync boundaries and ensure they are zero (pristine sync)
if
origin
,
latest
:=
tester
.
downloader
.
Boundaries
();
origin
!=
0
||
latest
!=
0
{
t
.
Fatalf
(
"Pristine boundary mismatch: have %v/%v, want %v/%v"
,
origin
,
latest
,
0
,
0
)
}
// Synchronise half the blocks and check initial boundaries
tester
.
newPeer
(
"peer-half"
,
protocol
,
hashes
[
targetBlocks
/
2
:
],
blocks
)
pending
:=
new
(
sync
.
WaitGroup
)
pending
.
Add
(
1
)
go
func
()
{
defer
pending
.
Done
()
if
err
:=
tester
.
sync
(
"peer-half"
,
nil
);
err
!=
nil
{
t
.
Fatalf
(
"failed to synchronise blocks: %v"
,
err
)
}
}()
<-
starting
if
origin
,
latest
:=
tester
.
downloader
.
Boundaries
();
origin
!=
0
||
latest
!=
uint64
(
targetBlocks
/
2
+
1
)
{
t
.
Fatalf
(
"Initial boundary mismatch: have %v/%v, want %v/%v"
,
origin
,
latest
,
0
,
targetBlocks
/
2
+
1
)
}
progress
<-
struct
{}{}
pending
.
Wait
()
// Synchronise all the blocks and check continuation boundaries
tester
.
newPeer
(
"peer-full"
,
protocol
,
hashes
,
blocks
)
pending
.
Add
(
1
)
go
func
()
{
defer
pending
.
Done
()
if
err
:=
tester
.
sync
(
"peer-full"
,
nil
);
err
!=
nil
{
t
.
Fatalf
(
"failed to synchronise blocks: %v"
,
err
)
}
}()
<-
starting
if
origin
,
latest
:=
tester
.
downloader
.
Boundaries
();
origin
!=
uint64
(
targetBlocks
/
2
+
1
)
||
latest
!=
uint64
(
targetBlocks
)
{
t
.
Fatalf
(
"Completing boundary mismatch: have %v/%v, want %v/%v"
,
origin
,
latest
,
targetBlocks
/
2
+
1
,
targetBlocks
)
}
progress
<-
struct
{}{}
pending
.
Wait
()
}
// Tests that synchronisation boundaries (origin block number and highest block
// number) is tracked and updated correctly in case of a fork (or manual head
// revertal).
func
TestForkedSyncBoundaries61
(
t
*
testing
.
T
)
{
testForkedSyncBoundaries
(
t
,
61
)
}
func
TestForkedSyncBoundaries62
(
t
*
testing
.
T
)
{
testForkedSyncBoundaries
(
t
,
62
)
}
func
TestForkedSyncBoundaries63
(
t
*
testing
.
T
)
{
testForkedSyncBoundaries
(
t
,
63
)
}
func
TestForkedSyncBoundaries64
(
t
*
testing
.
T
)
{
testForkedSyncBoundaries
(
t
,
64
)
}
func
testForkedSyncBoundaries
(
t
*
testing
.
T
,
protocol
int
)
{
// Create a forked chain to simulate origin revertal
common
,
fork
:=
MaxHashFetch
,
2
*
MaxHashFetch
hashesA
,
hashesB
,
blocksA
,
blocksB
:=
makeChainFork
(
common
+
fork
,
fork
,
genesis
)
// Set a sync init hook to catch boundary changes
starting
:=
make
(
chan
struct
{})
progress
:=
make
(
chan
struct
{})
tester
:=
newTester
()
tester
.
downloader
.
syncInitHook
=
func
(
origin
,
latest
uint64
)
{
starting
<-
struct
{}{}
<-
progress
}
// Retrieve the sync boundaries and ensure they are zero (pristine sync)
if
origin
,
latest
:=
tester
.
downloader
.
Boundaries
();
origin
!=
0
||
latest
!=
0
{
t
.
Fatalf
(
"Pristine boundary mismatch: have %v/%v, want %v/%v"
,
origin
,
latest
,
0
,
0
)
}
// Synchronise with one of the forks and check boundaries
tester
.
newPeer
(
"fork A"
,
protocol
,
hashesA
,
blocksA
)
pending
:=
new
(
sync
.
WaitGroup
)
pending
.
Add
(
1
)
go
func
()
{
defer
pending
.
Done
()
if
err
:=
tester
.
sync
(
"fork A"
,
nil
);
err
!=
nil
{
t
.
Fatalf
(
"failed to synchronise blocks: %v"
,
err
)
}
}()
<-
starting
if
origin
,
latest
:=
tester
.
downloader
.
Boundaries
();
origin
!=
0
||
latest
!=
uint64
(
len
(
hashesA
)
-
1
)
{
t
.
Fatalf
(
"Initial boundary mismatch: have %v/%v, want %v/%v"
,
origin
,
latest
,
0
,
len
(
hashesA
)
-
1
)
}
progress
<-
struct
{}{}
pending
.
Wait
()
// Simulate a successful sync above the fork
tester
.
downloader
.
syncStatsOrigin
=
tester
.
downloader
.
syncStatsHeight
// Synchronise with the second fork and check boundary resets
tester
.
newPeer
(
"fork B"
,
protocol
,
hashesB
,
blocksB
)
pending
.
Add
(
1
)
go
func
()
{
defer
pending
.
Done
()
if
err
:=
tester
.
sync
(
"fork B"
,
nil
);
err
!=
nil
{
t
.
Fatalf
(
"failed to synchronise blocks: %v"
,
err
)
}
}()
<-
starting
if
origin
,
latest
:=
tester
.
downloader
.
Boundaries
();
origin
!=
uint64
(
common
)
||
latest
!=
uint64
(
len
(
hashesB
)
-
1
)
{
t
.
Fatalf
(
"Forking boundary mismatch: have %v/%v, want %v/%v"
,
origin
,
latest
,
common
,
len
(
hashesB
)
-
1
)
}
progress
<-
struct
{}{}
pending
.
Wait
()
}
// Tests that if synchronisation is aborted due to some failure, then the boundary
// origin is not updated in the next sync cycle, as it should be considered the
// continuation of the previous sync and not a new instance.
func
TestFailedSyncBoundaries61
(
t
*
testing
.
T
)
{
testFailedSyncBoundaries
(
t
,
61
)
}
func
TestFailedSyncBoundaries62
(
t
*
testing
.
T
)
{
testFailedSyncBoundaries
(
t
,
62
)
}
func
TestFailedSyncBoundaries63
(
t
*
testing
.
T
)
{
testFailedSyncBoundaries
(
t
,
63
)
}
func
TestFailedSyncBoundaries64
(
t
*
testing
.
T
)
{
testFailedSyncBoundaries
(
t
,
64
)
}
func
testFailedSyncBoundaries
(
t
*
testing
.
T
,
protocol
int
)
{
// Create a small enough block chain to download
targetBlocks
:=
blockCacheLimit
-
15
hashes
,
blocks
:=
makeChain
(
targetBlocks
,
0
,
genesis
)
// Set a sync init hook to catch boundary changes
starting
:=
make
(
chan
struct
{})
progress
:=
make
(
chan
struct
{})
tester
:=
newTester
()
tester
.
downloader
.
syncInitHook
=
func
(
origin
,
latest
uint64
)
{
starting
<-
struct
{}{}
<-
progress
}
// Retrieve the sync boundaries and ensure they are zero (pristine sync)
if
origin
,
latest
:=
tester
.
downloader
.
Boundaries
();
origin
!=
0
||
latest
!=
0
{
t
.
Fatalf
(
"Pristine boundary mismatch: have %v/%v, want %v/%v"
,
origin
,
latest
,
0
,
0
)
}
// Attempt a full sync with a faulty peer
tester
.
newPeer
(
"faulty"
,
protocol
,
hashes
,
blocks
)
missing
:=
targetBlocks
/
2
delete
(
tester
.
peerBlocks
[
"faulty"
],
hashes
[
missing
])
pending
:=
new
(
sync
.
WaitGroup
)
pending
.
Add
(
1
)
go
func
()
{
defer
pending
.
Done
()
if
err
:=
tester
.
sync
(
"faulty"
,
nil
);
err
==
nil
{
t
.
Fatalf
(
"succeeded faulty synchronisation"
)
}
}()
<-
starting
if
origin
,
latest
:=
tester
.
downloader
.
Boundaries
();
origin
!=
0
||
latest
!=
uint64
(
targetBlocks
)
{
t
.
Fatalf
(
"Initial boundary mismatch: have %v/%v, want %v/%v"
,
origin
,
latest
,
0
,
targetBlocks
)
}
progress
<-
struct
{}{}
pending
.
Wait
()
// Synchronise with a good peer and check that the boundary origin remind the same after a failure
tester
.
newPeer
(
"valid"
,
protocol
,
hashes
,
blocks
)
pending
.
Add
(
1
)
go
func
()
{
defer
pending
.
Done
()
if
err
:=
tester
.
sync
(
"valid"
,
nil
);
err
!=
nil
{
t
.
Fatalf
(
"failed to synchronise blocks: %v"
,
err
)
}
}()
<-
starting
if
origin
,
latest
:=
tester
.
downloader
.
Boundaries
();
origin
!=
0
||
latest
!=
uint64
(
targetBlocks
)
{
t
.
Fatalf
(
"Completing boundary mismatch: have %v/%v, want %v/%v"
,
origin
,
latest
,
0
,
targetBlocks
)
}
progress
<-
struct
{}{}
pending
.
Wait
()
}
// Tests that if an attacker fakes a chain height, after the attack is detected,
// the boundary height is successfully reduced at the next sync invocation.
func
TestFakedSyncBoundaries61
(
t
*
testing
.
T
)
{
testFakedSyncBoundaries
(
t
,
61
)
}
func
TestFakedSyncBoundaries62
(
t
*
testing
.
T
)
{
testFakedSyncBoundaries
(
t
,
62
)
}
func
TestFakedSyncBoundaries63
(
t
*
testing
.
T
)
{
testFakedSyncBoundaries
(
t
,
63
)
}
func
TestFakedSyncBoundaries64
(
t
*
testing
.
T
)
{
testFakedSyncBoundaries
(
t
,
64
)
}
func
testFakedSyncBoundaries
(
t
*
testing
.
T
,
protocol
int
)
{
// Create a small block chain
targetBlocks
:=
blockCacheLimit
-
15
hashes
,
blocks
:=
makeChain
(
targetBlocks
+
3
,
0
,
genesis
)
// Set a sync init hook to catch boundary changes
starting
:=
make
(
chan
struct
{})
progress
:=
make
(
chan
struct
{})
tester
:=
newTester
()
tester
.
downloader
.
syncInitHook
=
func
(
origin
,
latest
uint64
)
{
starting
<-
struct
{}{}
<-
progress
}
// Retrieve the sync boundaries and ensure they are zero (pristine sync)
if
origin
,
latest
:=
tester
.
downloader
.
Boundaries
();
origin
!=
0
||
latest
!=
0
{
t
.
Fatalf
(
"Pristine boundary mismatch: have %v/%v, want %v/%v"
,
origin
,
latest
,
0
,
0
)
}
// Create and sync with an attacker that promises a higher chain than available
tester
.
newPeer
(
"attack"
,
protocol
,
hashes
,
blocks
)
for
i
:=
1
;
i
<
3
;
i
++
{
delete
(
tester
.
peerBlocks
[
"attack"
],
hashes
[
i
])
}
pending
:=
new
(
sync
.
WaitGroup
)
pending
.
Add
(
1
)
go
func
()
{
defer
pending
.
Done
()
if
err
:=
tester
.
sync
(
"attack"
,
nil
);
err
==
nil
{
t
.
Fatalf
(
"succeeded attacker synchronisation"
)
}
}()
<-
starting
if
origin
,
latest
:=
tester
.
downloader
.
Boundaries
();
origin
!=
0
||
latest
!=
uint64
(
targetBlocks
+
3
)
{
t
.
Fatalf
(
"Initial boundary mismatch: have %v/%v, want %v/%v"
,
origin
,
latest
,
0
,
targetBlocks
+
3
)
}
progress
<-
struct
{}{}
pending
.
Wait
()
// Synchronise with a good peer and check that the boundary height has been reduced to the true value
tester
.
newPeer
(
"valid"
,
protocol
,
hashes
[
3
:
],
blocks
)
pending
.
Add
(
1
)
go
func
()
{
defer
pending
.
Done
()
if
err
:=
tester
.
sync
(
"valid"
,
nil
);
err
!=
nil
{
t
.
Fatalf
(
"failed to synchronise blocks: %v"
,
err
)
}
}()
<-
starting
if
origin
,
latest
:=
tester
.
downloader
.
Boundaries
();
origin
!=
0
||
latest
!=
uint64
(
targetBlocks
)
{
t
.
Fatalf
(
"Initial boundary mismatch: have %v/%v, want %v/%v"
,
origin
,
latest
,
0
,
targetBlocks
)
}
progress
<-
struct
{}{}
pending
.
Wait
()
}
eth/downloader/queue.go
View file @
1cc2f080
...
@@ -57,6 +57,7 @@ type queue struct {
...
@@ -57,6 +57,7 @@ type queue struct {
headerPool
map
[
common
.
Hash
]
*
types
.
Header
// [eth/62] Pending headers, mapping from their hashes
headerPool
map
[
common
.
Hash
]
*
types
.
Header
// [eth/62] Pending headers, mapping from their hashes
headerQueue
*
prque
.
Prque
// [eth/62] Priority queue of the headers to fetch the bodies for
headerQueue
*
prque
.
Prque
// [eth/62] Priority queue of the headers to fetch the bodies for
headerHead
common
.
Hash
// [eth/62] Hash of the last queued header to verify order
pendPool
map
[
string
]
*
fetchRequest
// Currently pending block retrieval operations
pendPool
map
[
string
]
*
fetchRequest
// Currently pending block retrieval operations
...
@@ -91,6 +92,7 @@ func (q *queue) Reset() {
...
@@ -91,6 +92,7 @@ func (q *queue) Reset() {
q
.
headerPool
=
make
(
map
[
common
.
Hash
]
*
types
.
Header
)
q
.
headerPool
=
make
(
map
[
common
.
Hash
]
*
types
.
Header
)
q
.
headerQueue
.
Reset
()
q
.
headerQueue
.
Reset
()
q
.
headerHead
=
common
.
Hash
{}
q
.
pendPool
=
make
(
map
[
string
]
*
fetchRequest
)
q
.
pendPool
=
make
(
map
[
string
]
*
fetchRequest
)
...
@@ -186,7 +188,7 @@ func (q *queue) Insert61(hashes []common.Hash, fifo bool) []common.Hash {
...
@@ -186,7 +188,7 @@ func (q *queue) Insert61(hashes []common.Hash, fifo bool) []common.Hash {
// Insert adds a set of headers for the download queue for scheduling, returning
// Insert adds a set of headers for the download queue for scheduling, returning
// the new headers encountered.
// the new headers encountered.
func
(
q
*
queue
)
Insert
(
headers
[]
*
types
.
Header
)
[]
*
types
.
Header
{
func
(
q
*
queue
)
Insert
(
headers
[]
*
types
.
Header
,
from
uint64
)
[]
*
types
.
Header
{
q
.
lock
.
Lock
()
q
.
lock
.
Lock
()
defer
q
.
lock
.
Unlock
()
defer
q
.
lock
.
Unlock
()
...
@@ -196,13 +198,24 @@ func (q *queue) Insert(headers []*types.Header) []*types.Header {
...
@@ -196,13 +198,24 @@ func (q *queue) Insert(headers []*types.Header) []*types.Header {
// Make sure no duplicate requests are executed
// Make sure no duplicate requests are executed
hash
:=
header
.
Hash
()
hash
:=
header
.
Hash
()
if
_
,
ok
:=
q
.
headerPool
[
hash
];
ok
{
if
_
,
ok
:=
q
.
headerPool
[
hash
];
ok
{
glog
.
V
(
logger
.
Warn
)
.
Infof
(
"Header
%x already scheduled"
,
hash
)
glog
.
V
(
logger
.
Warn
)
.
Infof
(
"Header
#%d [%x] already scheduled"
,
header
.
Number
.
Uint64
(),
hash
[
:
4
]
)
continue
continue
}
}
// Make sure chain order is honored and preserved throughout
if
header
.
Number
==
nil
||
header
.
Number
.
Uint64
()
!=
from
{
glog
.
V
(
logger
.
Warn
)
.
Infof
(
"Header #%v [%x] broke chain ordering, expected %d"
,
header
.
Number
,
hash
[
:
4
],
from
)
break
}
if
q
.
headerHead
!=
(
common
.
Hash
{})
&&
q
.
headerHead
!=
header
.
ParentHash
{
glog
.
V
(
logger
.
Warn
)
.
Infof
(
"Header #%v [%x] broke chain ancestry"
,
header
.
Number
,
hash
[
:
4
])
break
}
// Queue the header for body retrieval
// Queue the header for body retrieval
inserts
=
append
(
inserts
,
header
)
inserts
=
append
(
inserts
,
header
)
q
.
headerPool
[
hash
]
=
header
q
.
headerPool
[
hash
]
=
header
q
.
headerQueue
.
Push
(
header
,
-
float32
(
header
.
Number
.
Uint64
()))
q
.
headerQueue
.
Push
(
header
,
-
float32
(
header
.
Number
.
Uint64
()))
q
.
headerHead
=
hash
from
++
}
}
return
inserts
return
inserts
}
}
...
...
jsre/ethereum_js.go
View file @
1cc2f080
...
@@ -650,7 +650,7 @@ module.exports = SolidityTypeBytes;
...
@@ -650,7 +650,7 @@ module.exports = SolidityTypeBytes;
You should have received a copy of the GNU Lesser General Public License
You should have received a copy of the GNU Lesser General Public License
along with ethereum.js. If not, see <http://www.gnu.org/licenses/>.
along with ethereum.js. If not, see <http://www.gnu.org/licenses/>.
*/
*/
/**
/**
* @file coder.js
* @file coder.js
* @author Marek Kotewicz <marek@ethdev.com>
* @author Marek Kotewicz <marek@ethdev.com>
* @date 2015
* @date 2015
...
@@ -680,7 +680,7 @@ var SolidityCoder = function (types) {
...
@@ -680,7 +680,7 @@ var SolidityCoder = function (types) {
*
*
* @method _requireType
* @method _requireType
* @param {String} type
* @param {String} type
* @returns {SolidityType}
* @returns {SolidityType}
* @throws {Error} throws if no matching type is found
* @throws {Error} throws if no matching type is found
*/
*/
SolidityCoder.prototype._requireType = function (type) {
SolidityCoder.prototype._requireType = function (type) {
...
@@ -726,7 +726,7 @@ SolidityCoder.prototype.encodeParams = function (types, params) {
...
@@ -726,7 +726,7 @@ SolidityCoder.prototype.encodeParams = function (types, params) {
return acc + solidityType.staticPartLength(types[index]);
return acc + solidityType.staticPartLength(types[index]);
}, 0);
}, 0);
var result = this.encodeMultiWithOffset(types, solidityTypes, encodeds, dynamicOffset);
var result = this.encodeMultiWithOffset(types, solidityTypes, encodeds, dynamicOffset);
return result;
return result;
};
};
...
@@ -751,7 +751,7 @@ SolidityCoder.prototype.encodeMultiWithOffset = function (types, solidityTypes,
...
@@ -751,7 +751,7 @@ SolidityCoder.prototype.encodeMultiWithOffset = function (types, solidityTypes,
// TODO: figure out nested arrays
// TODO: figure out nested arrays
});
});
types.forEach(function (type, i) {
types.forEach(function (type, i) {
if (isDynamic(i)) {
if (isDynamic(i)) {
var e = self.encodeWithOffset(types[i], solidityTypes[i], encodeds[i], dynamicOffset);
var e = self.encodeWithOffset(types[i], solidityTypes[i], encodeds[i], dynamicOffset);
...
@@ -771,7 +771,7 @@ SolidityCoder.prototype.encodeWithOffset = function (type, solidityType, encoded
...
@@ -771,7 +771,7 @@ SolidityCoder.prototype.encodeWithOffset = function (type, solidityType, encoded
var nestedName = solidityType.nestedName(type);
var nestedName = solidityType.nestedName(type);
var nestedStaticPartLength = solidityType.staticPartLength(nestedName);
var nestedStaticPartLength = solidityType.staticPartLength(nestedName);
var result = encoded[0];
var result = encoded[0];
(function () {
(function () {
var previousLength = 2; // in int
var previousLength = 2; // in int
if (solidityType.isDynamicArray(nestedName)) {
if (solidityType.isDynamicArray(nestedName)) {
...
@@ -781,7 +781,7 @@ SolidityCoder.prototype.encodeWithOffset = function (type, solidityType, encoded
...
@@ -781,7 +781,7 @@ SolidityCoder.prototype.encodeWithOffset = function (type, solidityType, encoded
}
}
}
}
})();
})();
// first element is length, skip it
// first element is length, skip it
(function () {
(function () {
for (var i = 0; i < encoded.length - 1; i++) {
for (var i = 0; i < encoded.length - 1; i++) {
...
@@ -792,7 +792,7 @@ SolidityCoder.prototype.encodeWithOffset = function (type, solidityType, encoded
...
@@ -792,7 +792,7 @@ SolidityCoder.prototype.encodeWithOffset = function (type, solidityType, encoded
return result;
return result;
})();
})();
} else if (solidityType.isStaticArray(type)) {
} else if (solidityType.isStaticArray(type)) {
return (function () {
return (function () {
var nestedName = solidityType.nestedName(type);
var nestedName = solidityType.nestedName(type);
...
@@ -805,7 +805,7 @@ SolidityCoder.prototype.encodeWithOffset = function (type, solidityType, encoded
...
@@ -805,7 +805,7 @@ SolidityCoder.prototype.encodeWithOffset = function (type, solidityType, encoded
var previousLength = 0; // in int
var previousLength = 0; // in int
for (var i = 0; i < encoded.length; i++) {
for (var i = 0; i < encoded.length; i++) {
// calculate length of previous item
// calculate length of previous item
previousLength += +(encoded[i - 1] || [])[0] || 0;
previousLength += +(encoded[i - 1] || [])[0] || 0;
result += f.formatInputInt(offset + i * nestedStaticPartLength + previousLength * 32).encode();
result += f.formatInputInt(offset + i * nestedStaticPartLength + previousLength * 32).encode();
}
}
})();
})();
...
@@ -848,7 +848,7 @@ SolidityCoder.prototype.decodeParam = function (type, bytes) {
...
@@ -848,7 +848,7 @@ SolidityCoder.prototype.decodeParam = function (type, bytes) {
SolidityCoder.prototype.decodeParams = function (types, bytes) {
SolidityCoder.prototype.decodeParams = function (types, bytes) {
var solidityTypes = this.getSolidityTypes(types);
var solidityTypes = this.getSolidityTypes(types);
var offsets = this.getOffsets(types, solidityTypes);
var offsets = this.getOffsets(types, solidityTypes);
return solidityTypes.map(function (solidityType, index) {
return solidityTypes.map(function (solidityType, index) {
return solidityType.decode(bytes, offsets[index], types[index], index);
return solidityType.decode(bytes, offsets[index], types[index], index);
});
});
...
@@ -856,10 +856,10 @@ SolidityCoder.prototype.decodeParams = function (types, bytes) {
...
@@ -856,10 +856,10 @@ SolidityCoder.prototype.decodeParams = function (types, bytes) {
SolidityCoder.prototype.getOffsets = function (types, solidityTypes) {
SolidityCoder.prototype.getOffsets = function (types, solidityTypes) {
var lengths = solidityTypes.map(function (solidityType, index) {
var lengths = solidityTypes.map(function (solidityType, index) {
return solidityType.staticPartLength(types[index]);
return solidityType.staticPartLength(types[index]);
// get length
// get length
});
});
for (var i = 0; i < lengths.length; i++) {
for (var i = 0; i < lengths.length; i++) {
// sum with length of previous element
// sum with length of previous element
var previous = (lengths[i - 1] || 0);
var previous = (lengths[i - 1] || 0);
...
@@ -938,7 +938,7 @@ module.exports = SolidityTypeDynamicBytes;
...
@@ -938,7 +938,7 @@ module.exports = SolidityTypeDynamicBytes;
You should have received a copy of the GNU Lesser General Public License
You should have received a copy of the GNU Lesser General Public License
along with ethereum.js. If not, see <http://www.gnu.org/licenses/>.
along with ethereum.js. If not, see <http://www.gnu.org/licenses/>.
*/
*/
/**
/**
* @file formatters.js
* @file formatters.js
* @author Marek Kotewicz <marek@ethdev.com>
* @author Marek Kotewicz <marek@ethdev.com>
* @date 2015
* @date 2015
...
@@ -1002,7 +1002,7 @@ var formatInputDynamicBytes = function (value) {
...
@@ -1002,7 +1002,7 @@ var formatInputDynamicBytes = function (value) {
* @returns {SolidityParam}
* @returns {SolidityParam}
*/
*/
var formatInputString = function (value) {
var formatInputString = function (value) {
var result = utils.from
Ascii
(value).substr(2);
var result = utils.from
Utf8
(value).substr(2);
var length = result.length / 2;
var length = result.length / 2;
var l = Math.floor((result.length + 63) / 64);
var l = Math.floor((result.length + 63) / 64);
result = utils.padRight(result, l * 64);
result = utils.padRight(result, l * 64);
...
@@ -1082,7 +1082,7 @@ var formatOutputUInt = function (param) {
...
@@ -1082,7 +1082,7 @@ var formatOutputUInt = function (param) {
* @returns {BigNumber} input bytes formatted to real
* @returns {BigNumber} input bytes formatted to real
*/
*/
var formatOutputReal = function (param) {
var formatOutputReal = function (param) {
return formatOutputInt(param).dividedBy(new BigNumber(2).pow(128));
return formatOutputInt(param).dividedBy(new BigNumber(2).pow(128));
};
};
/**
/**
...
@@ -1093,7 +1093,7 @@ var formatOutputReal = function (param) {
...
@@ -1093,7 +1093,7 @@ var formatOutputReal = function (param) {
* @returns {BigNumber} input bytes formatted to ureal
* @returns {BigNumber} input bytes formatted to ureal
*/
*/
var formatOutputUReal = function (param) {
var formatOutputUReal = function (param) {
return formatOutputUInt(param).dividedBy(new BigNumber(2).pow(128));
return formatOutputUInt(param).dividedBy(new BigNumber(2).pow(128));
};
};
/**
/**
...
@@ -1139,7 +1139,7 @@ var formatOutputDynamicBytes = function (param) {
...
@@ -1139,7 +1139,7 @@ var formatOutputDynamicBytes = function (param) {
*/
*/
var formatOutputString = function (param) {
var formatOutputString = function (param) {
var length = (new BigNumber(param.dynamicPart().slice(0, 64), 16)).toNumber() * 2;
var length = (new BigNumber(param.dynamicPart().slice(0, 64), 16)).toNumber() * 2;
return utils.to
Ascii
(param.dynamicPart().substr(64, length));
return utils.to
Utf8
(param.dynamicPart().substr(64, length));
};
};
/**
/**
...
@@ -1228,7 +1228,7 @@ module.exports = SolidityTypeInt;
...
@@ -1228,7 +1228,7 @@ module.exports = SolidityTypeInt;
You should have received a copy of the GNU Lesser General Public License
You should have received a copy of the GNU Lesser General Public License
along with ethereum.js. If not, see <http://www.gnu.org/licenses/>.
along with ethereum.js. If not, see <http://www.gnu.org/licenses/>.
*/
*/
/**
/**
* @file param.js
* @file param.js
* @author Marek Kotewicz <marek@ethdev.com>
* @author Marek Kotewicz <marek@ethdev.com>
* @date 2015
* @date 2015
...
@@ -1247,7 +1247,7 @@ var SolidityParam = function (value, offset) {
...
@@ -1247,7 +1247,7 @@ var SolidityParam = function (value, offset) {
/**
/**
* This method should be used to get length of params's dynamic part
* This method should be used to get length of params's dynamic part
*
*
* @method dynamicPartLength
* @method dynamicPartLength
* @returns {Number} length of dynamic part (in bytes)
* @returns {Number} length of dynamic part (in bytes)
*/
*/
...
@@ -1275,7 +1275,7 @@ SolidityParam.prototype.withOffset = function (offset) {
...
@@ -1275,7 +1275,7 @@ SolidityParam.prototype.withOffset = function (offset) {
* @param {SolidityParam} result of combination
* @param {SolidityParam} result of combination
*/
*/
SolidityParam.prototype.combine = function (param) {
SolidityParam.prototype.combine = function (param) {
return new SolidityParam(this.value + param.value);
return new SolidityParam(this.value + param.value);
};
};
/**
/**
...
@@ -1307,8 +1307,8 @@ SolidityParam.prototype.offsetAsBytes = function () {
...
@@ -1307,8 +1307,8 @@ SolidityParam.prototype.offsetAsBytes = function () {
*/
*/
SolidityParam.prototype.staticPart = function () {
SolidityParam.prototype.staticPart = function () {
if (!this.isDynamic()) {
if (!this.isDynamic()) {
return this.value;
return this.value;
}
}
return this.offsetAsBytes();
return this.offsetAsBytes();
};
};
...
@@ -1340,7 +1340,7 @@ SolidityParam.prototype.encode = function () {
...
@@ -1340,7 +1340,7 @@ SolidityParam.prototype.encode = function () {
* @returns {String}
* @returns {String}
*/
*/
SolidityParam.encodeList = function (params) {
SolidityParam.encodeList = function (params) {
// updating offsets
// updating offsets
var totalOffset = params.length * 32;
var totalOffset = params.length * 32;
var offsetParams = params.map(function (param) {
var offsetParams = params.map(function (param) {
...
@@ -1466,13 +1466,13 @@ SolidityType.prototype.staticPartLength = function (name) {
...
@@ -1466,13 +1466,13 @@ SolidityType.prototype.staticPartLength = function (name) {
/**
/**
* Should be used to determine if type is dynamic array
* Should be used to determine if type is dynamic array
* eg:
* eg:
* "type[]" => true
* "type[]" => true
* "type[4]" => false
* "type[4]" => false
*
*
* @method isDynamicArray
* @method isDynamicArray
* @param {String} name
* @param {String} name
* @return {Bool} true if the type is dynamic array
* @return {Bool} true if the type is dynamic array
*/
*/
SolidityType.prototype.isDynamicArray = function (name) {
SolidityType.prototype.isDynamicArray = function (name) {
var nestedTypes = this.nestedTypes(name);
var nestedTypes = this.nestedTypes(name);
...
@@ -1481,13 +1481,13 @@ SolidityType.prototype.isDynamicArray = function (name) {
...
@@ -1481,13 +1481,13 @@ SolidityType.prototype.isDynamicArray = function (name) {
/**
/**
* Should be used to determine if type is static array
* Should be used to determine if type is static array
* eg:
* eg:
* "type[]" => false
* "type[]" => false
* "type[4]" => true
* "type[4]" => true
*
*
* @method isStaticArray
* @method isStaticArray
* @param {String} name
* @param {String} name
* @return {Bool} true if the type is static array
* @return {Bool} true if the type is static array
*/
*/
SolidityType.prototype.isStaticArray = function (name) {
SolidityType.prototype.isStaticArray = function (name) {
var nestedTypes = this.nestedTypes(name);
var nestedTypes = this.nestedTypes(name);
...
@@ -1496,7 +1496,7 @@ SolidityType.prototype.isStaticArray = function (name) {
...
@@ -1496,7 +1496,7 @@ SolidityType.prototype.isStaticArray = function (name) {
/**
/**
* Should return length of static array
* Should return length of static array
* eg.
* eg.
* "int[32]" => 32
* "int[32]" => 32
* "int256[14]" => 14
* "int256[14]" => 14
* "int[2][3]" => 3
* "int[2][3]" => 3
...
@@ -1571,7 +1571,7 @@ SolidityType.prototype.nestedTypes = function (name) {
...
@@ -1571,7 +1571,7 @@ SolidityType.prototype.nestedTypes = function (name) {
* Should be used to encode the value
* Should be used to encode the value
*
*
* @method encode
* @method encode
* @param {Object} value
* @param {Object} value
* @param {String} name
* @param {String} name
* @return {String} encoded value
* @return {String} encoded value
*/
*/
...
@@ -1585,7 +1585,7 @@ SolidityType.prototype.encode = function (value, name) {
...
@@ -1585,7 +1585,7 @@ SolidityType.prototype.encode = function (value, name) {
var result = [];
var result = [];
result.push(f.formatInputInt(length).encode());
result.push(f.formatInputInt(length).encode());
value.forEach(function (v) {
value.forEach(function (v) {
result.push(self.encode(v, nestedName));
result.push(self.encode(v, nestedName));
});
});
...
@@ -1659,12 +1659,12 @@ SolidityType.prototype.decode = function (bytes, offset, name) {
...
@@ -1659,12 +1659,12 @@ SolidityType.prototype.decode = function (bytes, offset, name) {
return result;
return result;
})();
})();
} else if (this.isDynamicType(name)) {
} else if (this.isDynamicType(name)) {
return (function () {
return (function () {
var dynamicOffset = parseInt('0x' + bytes.substr(offset * 2, 64)); // in bytes
var dynamicOffset = parseInt('0x' + bytes.substr(offset * 2, 64)); // in bytes
var length = parseInt('0x' + bytes.substr(dynamicOffset * 2, 64)); // in bytes
var length = parseInt('0x' + bytes.substr(dynamicOffset * 2, 64)); // in bytes
var roundedLength = Math.floor((length + 31) / 32); // in int
var roundedLength = Math.floor((length + 31) / 32); // in int
return self._outputFormatter(new SolidityParam(bytes.substr(dynamicOffset * 2, ( 1 + roundedLength) * 64), 0));
return self._outputFormatter(new SolidityParam(bytes.substr(dynamicOffset * 2, ( 1 + roundedLength) * 64), 0));
})();
})();
}
}
...
@@ -1697,7 +1697,7 @@ var SolidityType = require('./type');
...
@@ -1697,7 +1697,7 @@ var SolidityType = require('./type');
*/
*/
var SolidityTypeUInt = function () {
var SolidityTypeUInt = function () {
this._inputFormatter = f.formatInputInt;
this._inputFormatter = f.formatInputInt;
this._outputFormatter = f.formatOutputInt;
this._outputFormatter = f.formatOutput
U
Int;
};
};
SolidityTypeUInt.prototype = new SolidityType({});
SolidityTypeUInt.prototype = new SolidityType({});
...
@@ -1787,13 +1787,13 @@ if (typeof XMLHttpRequest === 'undefined') {
...
@@ -1787,13 +1787,13 @@ if (typeof XMLHttpRequest === 'undefined') {
/**
/**
* Utils
* Utils
*
*
* @module utils
* @module utils
*/
*/
/**
/**
* Utility functions
* Utility functions
*
*
* @class [utils] config
* @class [utils] config
* @constructor
* @constructor
*/
*/
...
@@ -1860,7 +1860,7 @@ module.exports = {
...
@@ -1860,7 +1860,7 @@ module.exports = {
You should have received a copy of the GNU Lesser General Public License
You should have received a copy of the GNU Lesser General Public License
along with ethereum.js. If not, see <http://www.gnu.org/licenses/>.
along with ethereum.js. If not, see <http://www.gnu.org/licenses/>.
*/
*/
/**
/**
* @file sha3.js
* @file sha3.js
* @author Marek Kotewicz <marek@ethdev.com>
* @author Marek Kotewicz <marek@ethdev.com>
* @date 2015
* @date 2015
...
@@ -1876,7 +1876,7 @@ module.exports = function (str, isNew) {
...
@@ -1876,7 +1876,7 @@ module.exports = function (str, isNew) {
console.warn('new usage: \'web3.sha3("hello")\'');
console.warn('new usage: \'web3.sha3("hello")\'');
console.warn('see https://github.com/ethereum/web3.js/pull/205');
console.warn('see https://github.com/ethereum/web3.js/pull/205');
console.warn('if you need to hash hex value, you can do \'sha3("0xfff", true)\'');
console.warn('if you need to hash hex value, you can do \'sha3("0xfff", true)\'');
str = utils.to
Ascii
(str);
str = utils.to
Utf8
(str);
}
}
return sha3(str, {
return sha3(str, {
...
@@ -1885,7 +1885,7 @@ module.exports = function (str, isNew) {
...
@@ -1885,7 +1885,7 @@ module.exports = function (str, isNew) {
};
};
},{"./utils":20,"crypto-js/sha3":4
7
}],20:[function(require,module,exports){
},{"./utils":20,"crypto-js/sha3":4
8
}],20:[function(require,module,exports){
/*
/*
This file is part of ethereum.js.
This file is part of ethereum.js.
...
@@ -1902,7 +1902,7 @@ module.exports = function (str, isNew) {
...
@@ -1902,7 +1902,7 @@ module.exports = function (str, isNew) {
You should have received a copy of the GNU Lesser General Public License
You should have received a copy of the GNU Lesser General Public License
along with ethereum.js. If not, see <http://www.gnu.org/licenses/>.
along with ethereum.js. If not, see <http://www.gnu.org/licenses/>.
*/
*/
/**
/**
* @file utils.js
* @file utils.js
* @author Marek Kotewicz <marek@ethdev.com>
* @author Marek Kotewicz <marek@ethdev.com>
* @date 2015
* @date 2015
...
@@ -1910,19 +1910,20 @@ module.exports = function (str, isNew) {
...
@@ -1910,19 +1910,20 @@ module.exports = function (str, isNew) {
/**
/**
* Utils
* Utils
*
*
* @module utils
* @module utils
*/
*/
/**
/**
* Utility functions
* Utility functions
*
*
* @class [utils] utils
* @class [utils] utils
* @constructor
* @constructor
*/
*/
var BigNumber = require('bignumber.js');
var BigNumber = require('bignumber.js');
var utf8 = require('utf8');
var unitMap = {
var unitMap = {
'wei': '1',
'wei': '1',
...
@@ -1977,9 +1978,30 @@ var padRight = function (string, chars, sign) {
...
@@ -1977,9 +1978,30 @@ var padRight = function (string, chars, sign) {
return string + (new Array(chars - string.length + 1).join(sign ? sign : "0"));
return string + (new Array(chars - string.length + 1).join(sign ? sign : "0"));
};
};
/**
/**
* Should be called to get sting from it's hex representation
* Should be called to get utf8 from it's hex representation
* TODO: it should be called toUTF8
*
* @method toUtf8
* @param {String} string in hex
* @returns {String} ascii string representation of hex value
*/
var toUtf8 = function(hex) {
// Find termination
var str = "";
var i = 0, l = hex.length;
if (hex.substring(0, 2) === '0x') {
i = 2;
}
for (; i < l; i+=2) {
var code = parseInt(hex.substr(i, 2), 16);
str += String.fromCharCode(code);
}
return utf8.decode(str);
};
/**
* Should be called to get ascii from it's hex representation
*
*
* @method toAscii
* @method toAscii
* @param {String} string in hex
* @param {String} string in hex
...
@@ -1997,40 +2019,44 @@ var toAscii = function(hex) {
...
@@ -1997,40 +2019,44 @@ var toAscii = function(hex) {
str += String.fromCharCode(code);
str += String.fromCharCode(code);
}
}
return
decodeURIComponent(escape(str)); // jshint ignore:line
return
str;
};
};
/**
/**
* Shold be called to get hex representation (prefixed by 0x) of
ascii string
* Shold be called to get hex representation (prefixed by 0x) of
utf8 string
*
*
* @method
toHexNative
* @method
fromUtf8
* @param {String} string
* @param {String} string
* @param {Number} optional padding
* @returns {String} hex representation of input string
* @returns {String} hex representation of input string
*/
*/
var
toHexNative
= function(str) {
var
fromUtf8
= function(str) {
str = u
nescape(encodeURIComponent(str)); // jshint ignore:line
str = u
tf8.encode(str);
var hex = "";
var hex = "";
for(var i = 0; i < str.length; i++) {
for(var i = 0; i < str.length; i++) {
var n = str.charCodeAt(i).toString(16);
var n = str.charCodeAt(i).toString(16);
hex += n.length < 2 ? '0' + n : n;
hex += n.length < 2 ? '0' + n : n;
}
}
return hex;
return
"0x" +
hex;
};
};
/**
/**
* Shold be called to get hex representation (prefixed by 0x) of ascii string
* Shold be called to get hex representation (prefixed by 0x) of ascii string
*
*
* @method fromAscii
* @method fromAscii
* @param {String} string
* @param {String} string
* @param {Number} optional padding
* @param {Number} optional padding
* @returns {String} hex representation of input string
* @returns {String} hex representation of input string
*/
*/
var fromAscii = function(str, pad) {
var fromAscii = function(str) {
pad = pad === undefined ? 0 : pad;
var hex = "";
var hex = toHexNative(str);
for(var i = 0; i < str.length; i++) {
while (hex.length < pad*2)
var code = str.charCodeAt(i);
hex += "00";
var n = code.toString(16);
hex += n.length < 2 ? '0' + n : n;
}
return "0x" + hex;
return "0x" + hex;
};
};
...
@@ -2052,13 +2078,13 @@ var transformToFullName = function (json) {
...
@@ -2052,13 +2078,13 @@ var transformToFullName = function (json) {
/**
/**
* Should be called to get display name of contract function
* Should be called to get display name of contract function
*
*
* @method extractDisplayName
* @method extractDisplayName
* @param {String} name of function/event
* @param {String} name of function/event
* @returns {String} display name for function/event eg. multiply(uint256) -> multiply
* @returns {String} display name for function/event eg. multiply(uint256) -> multiply
*/
*/
var extractDisplayName = function (name) {
var extractDisplayName = function (name) {
var length = name.indexOf('(');
var length = name.indexOf('(');
return length !== -1 ? name.substr(0, length) : name;
return length !== -1 ? name.substr(0, length) : name;
};
};
...
@@ -2113,7 +2139,7 @@ var toHex = function (val) {
...
@@ -2113,7 +2139,7 @@ var toHex = function (val) {
return fromDecimal(val);
return fromDecimal(val);
if (isObject(val))
if (isObject(val))
return from
Ascii
(JSON.stringify(val));
return from
Utf8
(JSON.stringify(val));
// if its a negative number, pass it through fromDecimal
// if its a negative number, pass it through fromDecimal
if (isString(val)) {
if (isString(val)) {
...
@@ -2156,7 +2182,7 @@ var getValueOfUnit = function (unit) {
...
@@ -2156,7 +2182,7 @@ var getValueOfUnit = function (unit) {
* - -- microether szabo micro
* - -- microether szabo micro
* - -- milliether finney milli
* - -- milliether finney milli
* - ether -- --
* - ether -- --
* - kether einstein grand
* - kether einstein grand
* - mether
* - mether
* - gether
* - gether
* - tether
* - tether
...
@@ -2169,7 +2195,7 @@ var getValueOfUnit = function (unit) {
...
@@ -2169,7 +2195,7 @@ var getValueOfUnit = function (unit) {
var fromWei = function(number, unit) {
var fromWei = function(number, unit) {
var returnValue = toBigNumber(number).dividedBy(getValueOfUnit(unit));
var returnValue = toBigNumber(number).dividedBy(getValueOfUnit(unit));
return isBigNumber(number) ? returnValue : returnValue.toString(10);
return isBigNumber(number) ? returnValue : returnValue.toString(10);
};
};
/**
/**
...
@@ -2178,12 +2204,12 @@ var fromWei = function(number, unit) {
...
@@ -2178,12 +2204,12 @@ var fromWei = function(number, unit) {
* Possible units are:
* Possible units are:
* SI Short SI Full Effigy Other
* SI Short SI Full Effigy Other
* - kwei femtoether ada
* - kwei femtoether ada
* - mwei picoether babbage
* - mwei picoether babbage
* - gwei nanoether shannon nano
* - gwei nanoether shannon nano
* - -- microether szabo micro
* - -- microether szabo micro
* - -- milliether finney milli
* - -- milliether finney milli
* - ether -- --
* - ether -- --
* - kether einstein grand
* - kether einstein grand
* - mether
* - mether
* - gether
* - gether
* - tether
* - tether
...
@@ -2196,7 +2222,7 @@ var fromWei = function(number, unit) {
...
@@ -2196,7 +2222,7 @@ var fromWei = function(number, unit) {
var toWei = function(number, unit) {
var toWei = function(number, unit) {
var returnValue = toBigNumber(number).times(getValueOfUnit(unit));
var returnValue = toBigNumber(number).times(getValueOfUnit(unit));
return isBigNumber(number) ? returnValue : returnValue.toString(10);
return isBigNumber(number) ? returnValue : returnValue.toString(10);
};
};
/**
/**
...
@@ -2215,7 +2241,7 @@ var toBigNumber = function(number) {
...
@@ -2215,7 +2241,7 @@ var toBigNumber = function(number) {
if (isString(number) && (number.indexOf('0x') === 0 || number.indexOf('-0x') === 0)) {
if (isString(number) && (number.indexOf('0x') === 0 || number.indexOf('-0x') === 0)) {
return new BigNumber(number.replace('0x',''), 16);
return new BigNumber(number.replace('0x',''), 16);
}
}
return new BigNumber(number.toString(10), 10);
return new BigNumber(number.toString(10), 10);
};
};
...
@@ -2242,7 +2268,7 @@ var toTwosComplement = function (number) {
...
@@ -2242,7 +2268,7 @@ var toTwosComplement = function (number) {
* @return {Boolean}
* @return {Boolean}
*/
*/
var isStrictAddress = function (address) {
var isStrictAddress = function (address) {
return /^0x[0-9a-f]{40}$/.test(address);
return /^0x[0-9a-f]{40}$/
i
.test(address);
};
};
/**
/**
...
@@ -2253,7 +2279,7 @@ var isStrictAddress = function (address) {
...
@@ -2253,7 +2279,7 @@ var isStrictAddress = function (address) {
* @return {Boolean}
* @return {Boolean}
*/
*/
var isAddress = function (address) {
var isAddress = function (address) {
return /^(0x)?[0-9a-f]{40}$/.test(address);
return /^(0x)?[0-9a-f]{40}$/
i
.test(address);
};
};
/**
/**
...
@@ -2267,7 +2293,7 @@ var toAddress = function (address) {
...
@@ -2267,7 +2293,7 @@ var toAddress = function (address) {
if (isStrictAddress(address)) {
if (isStrictAddress(address)) {
return address;
return address;
}
}
if (/^[0-9a-f]{40}$/.test(address)) {
if (/^[0-9a-f]{40}$/.test(address)) {
return '0x' + address;
return '0x' + address;
}
}
...
@@ -2281,7 +2307,7 @@ var toAddress = function (address) {
...
@@ -2281,7 +2307,7 @@ var toAddress = function (address) {
*
*
* @method isBigNumber
* @method isBigNumber
* @param {Object}
* @param {Object}
* @return {Boolean}
* @return {Boolean}
*/
*/
var isBigNumber = function (object) {
var isBigNumber = function (object) {
return object instanceof BigNumber ||
return object instanceof BigNumber ||
...
@@ -2290,7 +2316,7 @@ var isBigNumber = function (object) {
...
@@ -2290,7 +2316,7 @@ var isBigNumber = function (object) {
/**
/**
* Returns true if object is string, otherwise false
* Returns true if object is string, otherwise false
*
*
* @method isString
* @method isString
* @param {Object}
* @param {Object}
* @return {Boolean}
* @return {Boolean}
...
@@ -2341,12 +2367,12 @@ var isBoolean = function (object) {
...
@@ -2341,12 +2367,12 @@ var isBoolean = function (object) {
* @return {Boolean}
* @return {Boolean}
*/
*/
var isArray = function (object) {
var isArray = function (object) {
return object instanceof Array;
return object instanceof Array;
};
};
/**
/**
* Returns true if given string is valid json object
* Returns true if given string is valid json object
*
*
* @method isJson
* @method isJson
* @param {String}
* @param {String}
* @return {Boolean}
* @return {Boolean}
...
@@ -2365,7 +2391,9 @@ module.exports = {
...
@@ -2365,7 +2391,9 @@ module.exports = {
toHex: toHex,
toHex: toHex,
toDecimal: toDecimal,
toDecimal: toDecimal,
fromDecimal: fromDecimal,
fromDecimal: fromDecimal,
toUtf8: toUtf8,
toAscii: toAscii,
toAscii: toAscii,
fromUtf8: fromUtf8,
fromAscii: fromAscii,
fromAscii: fromAscii,
transformToFullName: transformToFullName,
transformToFullName: transformToFullName,
extractDisplayName: extractDisplayName,
extractDisplayName: extractDisplayName,
...
@@ -2386,10 +2414,9 @@ module.exports = {
...
@@ -2386,10 +2414,9 @@ module.exports = {
isJson: isJson
isJson: isJson
};
};
},{"bignumber.js":"bignumber.js","utf8":50}],21:[function(require,module,exports){
},{"bignumber.js":"bignumber.js"}],21:[function(require,module,exports){
module.exports={
module.exports={
"version": "0.1
2.1
"
"version": "0.1
3.0
"
}
}
},{}],22:[function(require,module,exports){
},{}],22:[function(require,module,exports){
...
@@ -2426,6 +2453,7 @@ var db = require('./web3/methods/db');
...
@@ -2426,6 +2453,7 @@ var db = require('./web3/methods/db');
var shh = require('./web3/methods/shh');
var shh = require('./web3/methods/shh');
var watches = require('./web3/methods/watches');
var watches = require('./web3/methods/watches');
var Filter = require('./web3/filter');
var Filter = require('./web3/filter');
var IsSyncing = require('./web3/syncing');
var utils = require('./utils/utils');
var utils = require('./utils/utils');
var formatters = require('./web3/formatters');
var formatters = require('./web3/formatters');
var RequestManager = require('./web3/requestmanager');
var RequestManager = require('./web3/requestmanager');
...
@@ -2480,6 +2508,10 @@ web3.version = {};
...
@@ -2480,6 +2508,10 @@ web3.version = {};
web3.version.api = version.version;
web3.version.api = version.version;
web3.eth = {};
web3.eth = {};
web3.eth.isSyncing = function (callback) {
return new IsSyncing(callback);
};
/*jshint maxparams:4 */
/*jshint maxparams:4 */
web3.eth.filter = function (fil, callback) {
web3.eth.filter = function (fil, callback) {
return new Filter(fil, watches.eth(), formatters.outputLogFormatter, callback);
return new Filter(fil, watches.eth(), formatters.outputLogFormatter, callback);
...
@@ -2499,14 +2531,16 @@ web3.setProvider = function (provider) {
...
@@ -2499,14 +2531,16 @@ web3.setProvider = function (provider) {
web3.isConnected = function(){
web3.isConnected = function(){
return (this.currentProvider && this.currentProvider.isConnected());
return (this.currentProvider && this.currentProvider.isConnected());
};
};
web3.reset = function () {
web3.reset = function (
keepIsSyncing
) {
RequestManager.getInstance().reset();
RequestManager.getInstance().reset(
keepIsSyncing
);
c.defaultBlock = 'latest';
c.defaultBlock = 'latest';
c.defaultAccount = undefined;
c.defaultAccount = undefined;
};
};
web3.toHex = utils.toHex;
web3.toHex = utils.toHex;
web3.toAscii = utils.toAscii;
web3.toAscii = utils.toAscii;
web3.toUtf8 = utils.toUtf8;
web3.fromAscii = utils.fromAscii;
web3.fromAscii = utils.fromAscii;
web3.fromUtf8 = utils.fromUtf8;
web3.toDecimal = utils.toDecimal;
web3.toDecimal = utils.toDecimal;
web3.fromDecimal = utils.fromDecimal;
web3.fromDecimal = utils.fromDecimal;
web3.toBigNumber = utils.toBigNumber;
web3.toBigNumber = utils.toBigNumber;
...
@@ -2569,7 +2603,7 @@ setupMethods(web3.shh, shh.methods);
...
@@ -2569,7 +2603,7 @@ setupMethods(web3.shh, shh.methods);
module.exports = web3;
module.exports = web3;
},{"./utils/config":18,"./utils/sha3":19,"./utils/utils":20,"./version.json":21,"./web3/batch":24,"./web3/filter":28,"./web3/formatters":29,"./web3/method":35,"./web3/methods/db":36,"./web3/methods/eth":37,"./web3/methods/net":38,"./web3/methods/shh":39,"./web3/methods/watches":40,"./web3/property":42,"./web3/requestmanager":43}],23:[function(require,module,exports){
},{"./utils/config":18,"./utils/sha3":19,"./utils/utils":20,"./version.json":21,"./web3/batch":24,"./web3/filter":28,"./web3/formatters":29,"./web3/method":35,"./web3/methods/db":36,"./web3/methods/eth":37,"./web3/methods/net":38,"./web3/methods/shh":39,"./web3/methods/watches":40,"./web3/property":42,"./web3/requestmanager":43
,"./web3/syncing":44
}],23:[function(require,module,exports){
/*
/*
This file is part of ethereum.js.
This file is part of ethereum.js.
...
@@ -2586,7 +2620,7 @@ module.exports = web3;
...
@@ -2586,7 +2620,7 @@ module.exports = web3;
You should have received a copy of the GNU Lesser General Public License
You should have received a copy of the GNU Lesser General Public License
along with ethereum.js. If not, see <http://www.gnu.org/licenses/>.
along with ethereum.js. If not, see <http://www.gnu.org/licenses/>.
*/
*/
/**
/**
* @file allevents.js
* @file allevents.js
* @author Marek Kotewicz <marek@ethdev.com>
* @author Marek Kotewicz <marek@ethdev.com>
* @date 2014
* @date 2014
...
@@ -2675,7 +2709,7 @@ module.exports = AllSolidityEvents;
...
@@ -2675,7 +2709,7 @@ module.exports = AllSolidityEvents;
You should have received a copy of the GNU Lesser General Public License
You should have received a copy of the GNU Lesser General Public License
along with ethereum.js. If not, see <http://www.gnu.org/licenses/>.
along with ethereum.js. If not, see <http://www.gnu.org/licenses/>.
*/
*/
/**
/**
* @file batch.js
* @file batch.js
* @author Marek Kotewicz <marek@ethdev.com>
* @author Marek Kotewicz <marek@ethdev.com>
* @date 2015
* @date 2015
...
@@ -2720,7 +2754,7 @@ Batch.prototype.execute = function () {
...
@@ -2720,7 +2754,7 @@ Batch.prototype.execute = function () {
requests[index].callback(null, (requests[index].format ? requests[index].format(result.result) : result.result));
requests[index].callback(null, (requests[index].format ? requests[index].format(result.result) : result.result));
}
}
});
});
});
});
};
};
module.exports = Batch;
module.exports = Batch;
...
@@ -2743,13 +2777,13 @@ module.exports = Batch;
...
@@ -2743,13 +2777,13 @@ module.exports = Batch;
You should have received a copy of the GNU Lesser General Public License
You should have received a copy of the GNU Lesser General Public License
along with ethereum.js. If not, see <http://www.gnu.org/licenses/>.
along with ethereum.js. If not, see <http://www.gnu.org/licenses/>.
*/
*/
/**
/**
* @file contract.js
* @file contract.js
* @author Marek Kotewicz <marek@ethdev.com>
* @author Marek Kotewicz <marek@ethdev.com>
* @date 2014
* @date 2014
*/
*/
var web3 = require('../web3');
var web3 = require('../web3');
var utils = require('../utils/utils');
var utils = require('../utils/utils');
var coder = require('../solidity/coder');
var coder = require('../solidity/coder');
var SolidityEvent = require('./event');
var SolidityEvent = require('./event');
...
@@ -2806,7 +2840,7 @@ var addEventsToContract = function (contract, abi) {
...
@@ -2806,7 +2840,7 @@ var addEventsToContract = function (contract, abi) {
var All = new AllEvents(events, contract.address);
var All = new AllEvents(events, contract.address);
All.attachToContract(contract);
All.attachToContract(contract);
events.map(function (json) {
events.map(function (json) {
return new SolidityEvent(json, contract.address);
return new SolidityEvent(json, contract.address);
}).forEach(function (e) {
}).forEach(function (e) {
...
@@ -2846,7 +2880,7 @@ var checkForContractAddress = function(contract, abi, callback){
...
@@ -2846,7 +2880,7 @@ var checkForContractAddress = function(contract, abi, callback){
// stop watching after 50 blocks (timeout)
// stop watching after 50 blocks (timeout)
if(count > 50) {
if(count > 50) {
filter.stopWatching();
filter.stopWatching();
callbackFired = true;
callbackFired = true;
...
@@ -2866,7 +2900,7 @@ var checkForContractAddress = function(contract, abi, callback){
...
@@ -2866,7 +2900,7 @@ var checkForContractAddress = function(contract, abi, callback){
if(callbackFired)
if(callbackFired)
return;
return;
filter.stopWatching();
filter.stopWatching();
callbackFired = true;
callbackFired = true;
...
@@ -2910,7 +2944,7 @@ var ContractFactory = function (abi) {
...
@@ -2910,7 +2944,7 @@ var ContractFactory = function (abi) {
/**
/**
* Should be called to create new contract on a blockchain
* Should be called to create new contract on a blockchain
*
*
* @method new
* @method new
* @param {Any} contract constructor param1 (optional)
* @param {Any} contract constructor param1 (optional)
* @param {Any} contract constructor param2 (optional)
* @param {Any} contract constructor param2 (optional)
...
@@ -2984,10 +3018,10 @@ ContractFactory.prototype.at = function (address, callback) {
...
@@ -2984,10 +3018,10 @@ ContractFactory.prototype.at = function (address, callback) {
// attach functions
// attach functions
addFunctionsToContract(contract, this.abi);
addFunctionsToContract(contract, this.abi);
addEventsToContract(contract, this.abi);
addEventsToContract(contract, this.abi);
if (callback) {
if (callback) {
callback(null, contract);
callback(null, contract);
}
}
return contract;
return contract;
};
};
...
@@ -3022,7 +3056,7 @@ module.exports = contract;
...
@@ -3022,7 +3056,7 @@ module.exports = contract;
You should have received a copy of the GNU Lesser General Public License
You should have received a copy of the GNU Lesser General Public License
along with ethereum.js. If not, see <http://www.gnu.org/licenses/>.
along with ethereum.js. If not, see <http://www.gnu.org/licenses/>.
*/
*/
/**
/**
* @file errors.js
* @file errors.js
* @author Marek Kotewicz <marek@ethdev.com>
* @author Marek Kotewicz <marek@ethdev.com>
* @date 2015
* @date 2015
...
@@ -3062,7 +3096,7 @@ module.exports = {
...
@@ -3062,7 +3096,7 @@ module.exports = {
You should have received a copy of the GNU Lesser General Public License
You should have received a copy of the GNU Lesser General Public License
along with ethereum.js. If not, see <http://www.gnu.org/licenses/>.
along with ethereum.js. If not, see <http://www.gnu.org/licenses/>.
*/
*/
/**
/**
* @file event.js
* @file event.js
* @author Marek Kotewicz <marek@ethdev.com>
* @author Marek Kotewicz <marek@ethdev.com>
* @date 2014
* @date 2014
...
@@ -3132,7 +3166,7 @@ SolidityEvent.prototype.signature = function () {
...
@@ -3132,7 +3166,7 @@ SolidityEvent.prototype.signature = function () {
/**
/**
* Should be used to encode indexed params and options to one final object
* Should be used to encode indexed params and options to one final object
*
*
* @method encode
* @method encode
* @param {Object} indexed
* @param {Object} indexed
* @param {Object} options
* @param {Object} options
...
@@ -3163,7 +3197,7 @@ SolidityEvent.prototype.encode = function (indexed, options) {
...
@@ -3163,7 +3197,7 @@ SolidityEvent.prototype.encode = function (indexed, options) {
if (value === undefined || value === null) {
if (value === undefined || value === null) {
return null;
return null;
}
}
if (utils.isArray(value)) {
if (utils.isArray(value)) {
return value.map(function (v) {
return value.map(function (v) {
return '0x' + coder.encodeParam(i.type, v);
return '0x' + coder.encodeParam(i.type, v);
...
@@ -3185,17 +3219,17 @@ SolidityEvent.prototype.encode = function (indexed, options) {
...
@@ -3185,17 +3219,17 @@ SolidityEvent.prototype.encode = function (indexed, options) {
* @return {Object} result object with decoded indexed && not indexed params
* @return {Object} result object with decoded indexed && not indexed params
*/
*/
SolidityEvent.prototype.decode = function (data) {
SolidityEvent.prototype.decode = function (data) {
data.data = data.data || '';
data.data = data.data || '';
data.topics = data.topics || [];
data.topics = data.topics || [];
var argTopics = this._anonymous ? data.topics : data.topics.slice(1);
var argTopics = this._anonymous ? data.topics : data.topics.slice(1);
var indexedData = argTopics.map(function (topics) { return topics.slice(2); }).join("");
var indexedData = argTopics.map(function (topics) { return topics.slice(2); }).join("");
var indexedParams = coder.decodeParams(this.types(true), indexedData);
var indexedParams = coder.decodeParams(this.types(true), indexedData);
var notIndexedData = data.data.slice(2);
var notIndexedData = data.data.slice(2);
var notIndexedParams = coder.decodeParams(this.types(false), notIndexedData);
var notIndexedParams = coder.decodeParams(this.types(false), notIndexedData);
var result = formatters.outputLogFormatter(data);
var result = formatters.outputLogFormatter(data);
result.event = this.displayName();
result.event = this.displayName();
result.address = data.address;
result.address = data.address;
...
@@ -3230,7 +3264,7 @@ SolidityEvent.prototype.execute = function (indexed, options, callback) {
...
@@ -3230,7 +3264,7 @@ SolidityEvent.prototype.execute = function (indexed, options, callback) {
indexed = {};
indexed = {};
}
}
}
}
var o = this.encode(indexed, options);
var o = this.encode(indexed, options);
var formatter = this.decode.bind(this);
var formatter = this.decode.bind(this);
return new Filter(o, watches.eth(), formatter, callback);
return new Filter(o, watches.eth(), formatter, callback);
...
@@ -3301,7 +3335,7 @@ var toTopic = function(value){
...
@@ -3301,7 +3335,7 @@ var toTopic = function(value){
if(value.indexOf('0x') === 0)
if(value.indexOf('0x') === 0)
return value;
return value;
else
else
return utils.from
Ascii
(value);
return utils.from
Utf8
(value);
};
};
/// This method should be called on options object, to verify deprecated properties && lazy load dynamic ones
/// This method should be called on options object, to verify deprecated properties && lazy load dynamic ones
...
@@ -3311,7 +3345,7 @@ var getOptions = function (options) {
...
@@ -3311,7 +3345,7 @@ var getOptions = function (options) {
if (utils.isString(options)) {
if (utils.isString(options)) {
return options;
return options;
}
}
options = options || {};
options = options || {};
...
@@ -3327,8 +3361,8 @@ var getOptions = function (options) {
...
@@ -3327,8 +3361,8 @@ var getOptions = function (options) {
to: options.to,
to: options.to,
address: options.address,
address: options.address,
fromBlock: formatters.inputBlockNumberFormatter(options.fromBlock),
fromBlock: formatters.inputBlockNumberFormatter(options.fromBlock),
toBlock: formatters.inputBlockNumberFormatter(options.toBlock)
toBlock: formatters.inputBlockNumberFormatter(options.toBlock)
};
};
};
};
/**
/**
...
@@ -3336,7 +3370,7 @@ Adds the callback and sets up the methods, to iterate over the results.
...
@@ -3336,7 +3370,7 @@ Adds the callback and sets up the methods, to iterate over the results.
@method getLogsAtStart
@method getLogsAtStart
@param {Object} self
@param {Object} self
@param {funciton}
@param {funciton}
*/
*/
var getLogsAtStart = function(self, callback){
var getLogsAtStart = function(self, callback){
// call getFilterLogs for the first watch callback start
// call getFilterLogs for the first watch callback start
...
@@ -3371,12 +3405,14 @@ var pollFilter = function(self) {
...
@@ -3371,12 +3405,14 @@ var pollFilter = function(self) {
});
});
}
}
messages.forEach(function (message) {
if(utils.isArray(messages)) {
message = self.formatter ? self.formatter(message) : message;
messages.forEach(function (message) {
self.callbacks.forEach(function (callback) {
message = self.formatter ? self.formatter(message) : message;
callback(null, message);
self.callbacks.forEach(function (callback) {
callback(null, message);
});
});
});
}
);
}
};
};
RequestManager.getInstance().startPolling({
RequestManager.getInstance().startPolling({
...
@@ -3396,6 +3432,7 @@ var Filter = function (options, methods, formatter, callback) {
...
@@ -3396,6 +3432,7 @@ var Filter = function (options, methods, formatter, callback) {
this.implementation = implementation;
this.implementation = implementation;
this.filterId = null;
this.filterId = null;
this.callbacks = [];
this.callbacks = [];
this.getLogsCallbacks = [];
this.pollFilters = [];
this.pollFilters = [];
this.formatter = formatter;
this.formatter = formatter;
this.implementation.newFilter(this.options, function(error, id){
this.implementation.newFilter(this.options, function(error, id){
...
@@ -3406,6 +3443,13 @@ var Filter = function (options, methods, formatter, callback) {
...
@@ -3406,6 +3443,13 @@ var Filter = function (options, methods, formatter, callback) {
} else {
} else {
self.filterId = id;
self.filterId = id;
// check if there are get pending callbacks as a consequence
// of calling get() with filterId unassigned.
self.getLogsCallbacks.forEach(function (cb){
self.get(cb);
});
self.getLogsCallbacks = [];
// get filter logs for the already existing watch calls
// get filter logs for the already existing watch calls
self.callbacks.forEach(function(cb){
self.callbacks.forEach(function(cb){
getLogsAtStart(self, cb);
getLogsAtStart(self, cb);
...
@@ -3444,16 +3488,25 @@ Filter.prototype.stopWatching = function () {
...
@@ -3444,16 +3488,25 @@ Filter.prototype.stopWatching = function () {
Filter.prototype.get = function (callback) {
Filter.prototype.get = function (callback) {
var self = this;
var self = this;
if (utils.isFunction(callback)) {
if (utils.isFunction(callback)) {
this.implementation.getLogs(this.filterId, function(err, res){
if (this.filterId === null) {
if (err) {
// If filterId is not set yet, call it back
callback(err);
// when newFilter() assigns it.
} else {
this.getLogsCallbacks.push(callback);
callback(null, res.map(function (log) {
} else {
return self.formatter ? self.formatter(log) : log;
this.implementation.getLogs(this.filterId, function(err, res){
}));
if (err) {
}
callback(err);
});
} else {
callback(null, res.map(function (log) {
return self.formatter ? self.formatter(log) : log;
}));
}
});
}
} else {
} else {
if (this.filterId === null) {
throw new Error('Filter ID Error: filter().get() can\'t be chained synchronous, please provide a callback for the get() method.');
}
var logs = this.implementation.getLogs(this.filterId);
var logs = this.implementation.getLogs(this.filterId);
return logs.map(function (log) {
return logs.map(function (log) {
return self.formatter ? self.formatter(log) : log;
return self.formatter ? self.formatter(log) : log;
...
@@ -3483,7 +3536,7 @@ module.exports = Filter;
...
@@ -3483,7 +3536,7 @@ module.exports = Filter;
You should have received a copy of the GNU Lesser General Public License
You should have received a copy of the GNU Lesser General Public License
along with ethereum.js. If not, see <http://www.gnu.org/licenses/>.
along with ethereum.js. If not, see <http://www.gnu.org/licenses/>.
*/
*/
/**
/**
* @file formatters.js
* @file formatters.js
* @author Marek Kotewicz <marek@ethdev.com>
* @author Marek Kotewicz <marek@ethdev.com>
* @author Fabian Vogelsteller <fabian@ethdev.com>
* @author Fabian Vogelsteller <fabian@ethdev.com>
...
@@ -3550,7 +3603,7 @@ var inputCallFormatter = function (options){
...
@@ -3550,7 +3603,7 @@ var inputCallFormatter = function (options){
options[key] = utils.fromDecimal(options[key]);
options[key] = utils.fromDecimal(options[key]);
});
});
return options;
return options;
};
};
/**
/**
...
@@ -3575,12 +3628,12 @@ var inputTransactionFormatter = function (options){
...
@@ -3575,12 +3628,12 @@ var inputTransactionFormatter = function (options){
options[key] = utils.fromDecimal(options[key]);
options[key] = utils.fromDecimal(options[key]);
});
});
return options;
return options;
};
};
/**
/**
* Formats the output of a transaction to its proper values
* Formats the output of a transaction to its proper values
*
*
* @method outputTransactionFormatter
* @method outputTransactionFormatter
* @param {Object} tx
* @param {Object} tx
* @returns {Object}
* @returns {Object}
...
@@ -3599,7 +3652,7 @@ var outputTransactionFormatter = function (tx){
...
@@ -3599,7 +3652,7 @@ var outputTransactionFormatter = function (tx){
/**
/**
* Formats the output of a transaction receipt to its proper values
* Formats the output of a transaction receipt to its proper values
*
*
* @method outputTransactionReceiptFormatter
* @method outputTransactionReceiptFormatter
* @param {Object} receipt
* @param {Object} receipt
* @returns {Object}
* @returns {Object}
...
@@ -3625,7 +3678,7 @@ var outputTransactionReceiptFormatter = function (receipt){
...
@@ -3625,7 +3678,7 @@ var outputTransactionReceiptFormatter = function (receipt){
* Formats the output of a block to its proper values
* Formats the output of a block to its proper values
*
*
* @method outputBlockFormatter
* @method outputBlockFormatter
* @param {Object} block
* @param {Object} block
* @returns {Object}
* @returns {Object}
*/
*/
var outputBlockFormatter = function(block) {
var outputBlockFormatter = function(block) {
...
@@ -3653,7 +3706,7 @@ var outputBlockFormatter = function(block) {
...
@@ -3653,7 +3706,7 @@ var outputBlockFormatter = function(block) {
/**
/**
* Formats the output of a log
* Formats the output of a log
*
*
* @method outputLogFormatter
* @method outputLogFormatter
* @param {Object} log object
* @param {Object} log object
* @returns {Object} log
* @returns {Object} log
...
@@ -3690,10 +3743,10 @@ var inputPostFormatter = function(post) {
...
@@ -3690,10 +3743,10 @@ var inputPostFormatter = function(post) {
// format the following options
// format the following options
post.topics = post.topics.map(function(topic){
post.topics = post.topics.map(function(topic){
return utils.from
Ascii
(topic);
return utils.from
Utf8
(topic);
});
});
return post;
return post;
};
};
/**
/**
...
@@ -3710,7 +3763,7 @@ var outputPostFormatter = function(post){
...
@@ -3710,7 +3763,7 @@ var outputPostFormatter = function(post){
post.ttl = utils.toDecimal(post.ttl);
post.ttl = utils.toDecimal(post.ttl);
post.workProved = utils.toDecimal(post.workProved);
post.workProved = utils.toDecimal(post.workProved);
post.payloadRaw = post.payload;
post.payloadRaw = post.payload;
post.payload = utils.to
Ascii
(post.payload);
post.payload = utils.to
Utf8
(post.payload);
if (utils.isJson(post.payload)) {
if (utils.isJson(post.payload)) {
post.payload = JSON.parse(post.payload);
post.payload = JSON.parse(post.payload);
...
@@ -3721,7 +3774,7 @@ var outputPostFormatter = function(post){
...
@@ -3721,7 +3774,7 @@ var outputPostFormatter = function(post){
post.topics = [];
post.topics = [];
}
}
post.topics = post.topics.map(function(topic){
post.topics = post.topics.map(function(topic){
return utils.to
Ascii
(topic);
return utils.to
Utf8
(topic);
});
});
return post;
return post;
...
@@ -3739,6 +3792,16 @@ var inputAddressFormatter = function (address) {
...
@@ -3739,6 +3792,16 @@ var inputAddressFormatter = function (address) {
throw 'invalid address';
throw 'invalid address';
};
};
var outputSyncingFormatter = function(result) {
result.startingBlock = utils.toDecimal(result.startingBlock);
result.currentBlock = utils.toDecimal(result.currentBlock);
result.highestBlock = utils.toDecimal(result.highestBlock);
return result;
};
module.exports = {
module.exports = {
inputDefaultBlockNumberFormatter: inputDefaultBlockNumberFormatter,
inputDefaultBlockNumberFormatter: inputDefaultBlockNumberFormatter,
inputBlockNumberFormatter: inputBlockNumberFormatter,
inputBlockNumberFormatter: inputBlockNumberFormatter,
...
@@ -3751,7 +3814,8 @@ module.exports = {
...
@@ -3751,7 +3814,8 @@ module.exports = {
outputTransactionReceiptFormatter: outputTransactionReceiptFormatter,
outputTransactionReceiptFormatter: outputTransactionReceiptFormatter,
outputBlockFormatter: outputBlockFormatter,
outputBlockFormatter: outputBlockFormatter,
outputLogFormatter: outputLogFormatter,
outputLogFormatter: outputLogFormatter,
outputPostFormatter: outputPostFormatter
outputPostFormatter: outputPostFormatter,
outputSyncingFormatter: outputSyncingFormatter
};
};
...
@@ -3869,8 +3933,8 @@ SolidityFunction.prototype.call = function () {
...
@@ -3869,8 +3933,8 @@ SolidityFunction.prototype.call = function () {
if (!callback) {
if (!callback) {
var output = web3.eth.call(payload, defaultBlock);
var output = web3.eth.call(payload, defaultBlock);
return this.unpackOutput(output);
return this.unpackOutput(output);
}
}
var self = this;
var self = this;
web3.eth.call(payload, defaultBlock, function (error, output) {
web3.eth.call(payload, defaultBlock, function (error, output) {
callback(error, self.unpackOutput(output));
callback(error, self.unpackOutput(output));
...
@@ -3944,11 +4008,11 @@ SolidityFunction.prototype.request = function () {
...
@@ -3944,11 +4008,11 @@ SolidityFunction.prototype.request = function () {
var callback = this.extractCallback(args);
var callback = this.extractCallback(args);
var payload = this.toPayload(args);
var payload = this.toPayload(args);
var format = this.unpackOutput.bind(this);
var format = this.unpackOutput.bind(this);
return {
return {
method: this._constant ? 'eth_call' : 'eth_sendTransaction',
method: this._constant ? 'eth_call' : 'eth_sendTransaction',
callback: callback,
callback: callback,
params: [payload],
params: [payload],
format: format
format: format
};
};
};
};
...
@@ -4079,7 +4143,7 @@ HttpProvider.prototype.send = function (payload) {
...
@@ -4079,7 +4143,7 @@ HttpProvider.prototype.send = function (payload) {
try {
try {
result = JSON.parse(result);
result = JSON.parse(result);
} catch(e) {
} catch(e) {
throw errors.InvalidResponse(request.responseText);
throw errors.InvalidResponse(request.responseText);
}
}
return result;
return result;
...
@@ -4093,7 +4157,7 @@ HttpProvider.prototype.send = function (payload) {
...
@@ -4093,7 +4157,7 @@ HttpProvider.prototype.send = function (payload) {
* @param {Function} callback triggered on end with (err, result)
* @param {Function} callback triggered on end with (err, result)
*/
*/
HttpProvider.prototype.sendAsync = function (payload, callback) {
HttpProvider.prototype.sendAsync = function (payload, callback) {
var request = this.prepareRequest(true);
var request = this.prepareRequest(true);
request.onreadystatechange = function() {
request.onreadystatechange = function() {
if (request.readyState === 4) {
if (request.readyState === 4) {
...
@@ -4103,13 +4167,13 @@ HttpProvider.prototype.sendAsync = function (payload, callback) {
...
@@ -4103,13 +4167,13 @@ HttpProvider.prototype.sendAsync = function (payload, callback) {
try {
try {
result = JSON.parse(result);
result = JSON.parse(result);
} catch(e) {
} catch(e) {
error = errors.InvalidResponse(request.responseText);
error = errors.InvalidResponse(request.responseText);
}
}
callback(error, result);
callback(error, result);
}
}
};
};
try {
try {
request.send(JSON.stringify(payload));
request.send(JSON.stringify(payload));
} catch(error) {
} catch(error) {
...
@@ -4157,7 +4221,7 @@ module.exports = HttpProvider;
...
@@ -4157,7 +4221,7 @@ module.exports = HttpProvider;
You should have received a copy of the GNU Lesser General Public License
You should have received a copy of the GNU Lesser General Public License
along with ethereum.js. If not, see <http://www.gnu.org/licenses/>.
along with ethereum.js. If not, see <http://www.gnu.org/licenses/>.
*/
*/
/**
/**
* @file iban.js
* @file iban.js
* @author Marek Kotewicz <marek@ethdev.com>
* @author Marek Kotewicz <marek@ethdev.com>
* @date 2015
* @date 2015
...
@@ -4289,7 +4353,7 @@ Iban.isValid = function (iban) {
...
@@ -4289,7 +4353,7 @@ Iban.isValid = function (iban) {
* @returns {Boolean} true if it is, otherwise false
* @returns {Boolean} true if it is, otherwise false
*/
*/
Iban.prototype.isValid = function () {
Iban.prototype.isValid = function () {
return /^XE[0-9]{2}(ETH[0-9A-Z]{13}|[0-9A-Z]{30})$/.test(this._iban) &&
return /^XE[0-9]{2}(ETH[0-9A-Z]{13}|[0-9A-Z]{30
,31
})$/.test(this._iban) &&
mod9710(iso13616Prepare(this._iban)) === 1;
mod9710(iso13616Prepare(this._iban)) === 1;
};
};
...
@@ -4357,7 +4421,7 @@ Iban.prototype.address = function () {
...
@@ -4357,7 +4421,7 @@ Iban.prototype.address = function () {
var base36 = this._iban.substr(4);
var base36 = this._iban.substr(4);
var asBn = new BigNumber(base36, 36);
var asBn = new BigNumber(base36, 36);
return padLeft(asBn.toString(16), 20);
return padLeft(asBn.toString(16), 20);
}
}
return '';
return '';
};
};
...
@@ -4401,9 +4465,9 @@ var errorTimeout = function (method, id) {
...
@@ -4401,9 +4465,9 @@ var errorTimeout = function (method, id) {
var err = {
var err = {
"jsonrpc": "2.0",
"jsonrpc": "2.0",
"error": {
"error": {
"code": -32603,
"code": -32603,
"message": "IPC Request timed out for method \'" + method + "\'"
"message": "IPC Request timed out for method \'" + method + "\'"
},
},
"id": id
"id": id
};
};
return JSON.stringify(err);
return JSON.stringify(err);
...
@@ -4413,7 +4477,7 @@ var IpcProvider = function (path, net) {
...
@@ -4413,7 +4477,7 @@ var IpcProvider = function (path, net) {
var _this = this;
var _this = this;
this.responseCallbacks = {};
this.responseCallbacks = {};
this.path = path;
this.path = path;
this.connection = net.connect({path: this.path});
this.connection = net.connect({path: this.path});
this.connection.on('error', function(e){
this.connection.on('error', function(e){
...
@@ -4423,7 +4487,7 @@ var IpcProvider = function (path, net) {
...
@@ -4423,7 +4487,7 @@ var IpcProvider = function (path, net) {
this.connection.on('end', function(){
this.connection.on('end', function(){
_this._timeout();
_this._timeout();
});
});
// LISTEN FOR CONNECTION RESPONSES
// LISTEN FOR CONNECTION RESPONSES
...
@@ -4462,7 +4526,7 @@ Will parse the response and make an array out of it.
...
@@ -4462,7 +4526,7 @@ Will parse the response and make an array out of it.
IpcProvider.prototype._parseResponse = function(data) {
IpcProvider.prototype._parseResponse = function(data) {
var _this = this,
var _this = this,
returnValues = [];
returnValues = [];
// DE-CHUNKER
// DE-CHUNKER
var dechunkedData = data
var dechunkedData = data
.replace(/\}\{/g,'}|--|{') // }{
.replace(/\}\{/g,'}|--|{') // }{
...
@@ -4566,7 +4630,7 @@ IpcProvider.prototype.send = function (payload) {
...
@@ -4566,7 +4630,7 @@ IpcProvider.prototype.send = function (payload) {
try {
try {
result = JSON.parse(data);
result = JSON.parse(data);
} catch(e) {
} catch(e) {
throw errors.InvalidResponse(data);
throw errors.InvalidResponse(data);
}
}
return result;
return result;
...
@@ -4743,7 +4807,7 @@ Method.prototype.extractCallback = function (args) {
...
@@ -4743,7 +4807,7 @@ Method.prototype.extractCallback = function (args) {
/**
/**
* Should be called to check if the number of arguments is correct
* Should be called to check if the number of arguments is correct
*
*
* @method validateArgs
* @method validateArgs
* @param {Array} arguments
* @param {Array} arguments
* @throws {Error} if it is not
* @throws {Error} if it is not
...
@@ -4756,7 +4820,7 @@ Method.prototype.validateArgs = function (args) {
...
@@ -4756,7 +4820,7 @@ Method.prototype.validateArgs = function (args) {
/**
/**
* Should be called to format input args of method
* Should be called to format input args of method
*
*
* @method formatInput
* @method formatInput
* @param {Array}
* @param {Array}
* @return {Array}
* @return {Array}
...
@@ -4784,7 +4848,7 @@ Method.prototype.formatOutput = function (result) {
...
@@ -4784,7 +4848,7 @@ Method.prototype.formatOutput = function (result) {
/**
/**
* Should attach function to method
* Should attach function to method
*
*
* @method attachToObject
* @method attachToObject
* @param {Object}
* @param {Object}
* @param {Function}
* @param {Function}
...
@@ -4798,7 +4862,7 @@ Method.prototype.attachToObject = function (obj) {
...
@@ -4798,7 +4862,7 @@ Method.prototype.attachToObject = function (obj) {
obj[name[0]] = obj[name[0]] || {};
obj[name[0]] = obj[name[0]] || {};
obj[name[0]][name[1]] = func;
obj[name[0]][name[1]] = func;
} else {
} else {
obj[name[0]] = func;
obj[name[0]] = func;
}
}
};
};
...
@@ -5185,6 +5249,11 @@ var properties = [
...
@@ -5185,6 +5249,11 @@ var properties = [
getter: 'eth_hashrate',
getter: 'eth_hashrate',
outputFormatter: utils.toDecimal
outputFormatter: utils.toDecimal
}),
}),
new Property({
name: 'syncing',
getter: 'eth_syncing',
outputFormatter: formatters.outputSyncingFormatter
}),
new Property({
new Property({
name: 'gasPrice',
name: 'gasPrice',
getter: 'eth_gasPrice',
getter: 'eth_gasPrice',
...
@@ -5284,8 +5353,8 @@ var Method = require('../method');
...
@@ -5284,8 +5353,8 @@ var Method = require('../method');
var formatters = require('../formatters');
var formatters = require('../formatters');
var post = new Method({
var post = new Method({
name: 'post',
name: 'post',
call: 'shh_post',
call: 'shh_post',
params: 1,
params: 1,
inputFormatter: [formatters.inputPostFormatter]
inputFormatter: [formatters.inputPostFormatter]
});
});
...
@@ -5460,7 +5529,7 @@ module.exports = {
...
@@ -5460,7 +5529,7 @@ module.exports = {
You should have received a copy of the GNU Lesser General Public License
You should have received a copy of the GNU Lesser General Public License
along with ethereum.js. If not, see <http://www.gnu.org/licenses/>.
along with ethereum.js. If not, see <http://www.gnu.org/licenses/>.
*/
*/
/**
/**
* @file namereg.js
* @file namereg.js
* @author Marek Kotewicz <marek@ethdev.com>
* @author Marek Kotewicz <marek@ethdev.com>
* @date 2015
* @date 2015
...
@@ -5516,7 +5585,7 @@ var Property = function (options) {
...
@@ -5516,7 +5585,7 @@ var Property = function (options) {
/**
/**
* Should be called to format input args of method
* Should be called to format input args of method
*
*
* @method formatInput
* @method formatInput
* @param {Array}
* @param {Array}
* @return {Array}
* @return {Array}
...
@@ -5551,7 +5620,7 @@ Property.prototype.extractCallback = function (args) {
...
@@ -5551,7 +5620,7 @@ Property.prototype.extractCallback = function (args) {
/**
/**
* Should attach function to method
* Should attach function to method
*
*
* @method attachToObject
* @method attachToObject
* @param {Object}
* @param {Object}
* @param {Function}
* @param {Function}
...
@@ -5568,7 +5637,7 @@ Property.prototype.attachToObject = function (obj) {
...
@@ -5568,7 +5637,7 @@ Property.prototype.attachToObject = function (obj) {
obj = obj[names[0]];
obj = obj[names[0]];
name = names[1];
name = names[1];
}
}
Object.defineProperty(obj, name, proto);
Object.defineProperty(obj, name, proto);
var toAsyncName = function (prefix, name) {
var toAsyncName = function (prefix, name) {
...
@@ -5648,7 +5717,7 @@ module.exports = Property;
...
@@ -5648,7 +5717,7 @@ module.exports = Property;
You should have received a copy of the GNU Lesser General Public License
You should have received a copy of the GNU Lesser General Public License
along with ethereum.js. If not, see <http://www.gnu.org/licenses/>.
along with ethereum.js. If not, see <http://www.gnu.org/licenses/>.
*/
*/
/**
/**
* @file requestmanager.js
* @file requestmanager.js
* @author Jeffrey Wilcke <jeff@ethdev.com>
* @author Jeffrey Wilcke <jeff@ethdev.com>
* @author Marek Kotewicz <marek@ethdev.com>
* @author Marek Kotewicz <marek@ethdev.com>
...
@@ -5730,7 +5799,7 @@ RequestManager.prototype.sendAsync = function (data, callback) {
...
@@ -5730,7 +5799,7 @@ RequestManager.prototype.sendAsync = function (data, callback) {
if (err) {
if (err) {
return callback(err);
return callback(err);
}
}
if (!Jsonrpc.getInstance().isValidResponse(result)) {
if (!Jsonrpc.getInstance().isValidResponse(result)) {
return callback(errors.InvalidResponse(result));
return callback(errors.InvalidResponse(result));
}
}
...
@@ -5763,7 +5832,7 @@ RequestManager.prototype.sendBatch = function (data, callback) {
...
@@ -5763,7 +5832,7 @@ RequestManager.prototype.sendBatch = function (data, callback) {
}
}
callback(err, results);
callback(err, results);
});
});
};
};
/**
/**
...
@@ -5811,11 +5880,15 @@ RequestManager.prototype.stopPolling = function (pollId) {
...
@@ -5811,11 +5880,15 @@ RequestManager.prototype.stopPolling = function (pollId) {
*
*
* @method reset
* @method reset
*/
*/
RequestManager.prototype.reset = function () {
RequestManager.prototype.reset = function (
keepIsSyncing
) {
for (var key in this.polls) {
for (var key in this.polls) {
this.polls[key].uninstall();
// remove all polls, except sync polls,
// they need to be removed manually by calling syncing.stopWatching()
if(!keepIsSyncing || key.indexOf('syncPoll_') === -1) {
this.polls[key].uninstall();
delete this.polls[key];
}
}
}
this.polls = {};
if (this.timeout) {
if (this.timeout) {
clearTimeout(this.timeout);
clearTimeout(this.timeout);
...
@@ -5843,10 +5916,10 @@ RequestManager.prototype.poll = function () {
...
@@ -5843,10 +5916,10 @@ RequestManager.prototype.poll = function () {
}
}
var pollsData = [];
var pollsData = [];
var polls
Key
s = [];
var polls
Id
s = [];
for (var key in this.polls) {
for (var key in this.polls) {
pollsData.push(this.polls[key].data);
pollsData.push(this.polls[key].data);
polls
Key
s.push(key);
polls
Id
s.push(key);
}
}
if (pollsData.length === 0) {
if (pollsData.length === 0) {
...
@@ -5855,8 +5928,17 @@ RequestManager.prototype.poll = function () {
...
@@ -5855,8 +5928,17 @@ RequestManager.prototype.poll = function () {
var payload = Jsonrpc.getInstance().toBatchPayload(pollsData);
var payload = Jsonrpc.getInstance().toBatchPayload(pollsData);
// map the request id to they poll id
var pollsIdMap = {};
payload.forEach(function(load, index){
pollsIdMap[load.id] = pollsIds[index];
});
var self = this;
var self = this;
this.provider.sendAsync(payload, function (error, results) {
this.provider.sendAsync(payload, function (error, results) {
// TODO: console log?
// TODO: console log?
if (error) {
if (error) {
return;
return;
...
@@ -5865,25 +5947,23 @@ RequestManager.prototype.poll = function () {
...
@@ -5865,25 +5947,23 @@ RequestManager.prototype.poll = function () {
if (!utils.isArray(results)) {
if (!utils.isArray(results)) {
throw errors.InvalidResponse(results);
throw errors.InvalidResponse(results);
}
}
results.map(function (result) {
var id = pollsIdMap[result.id];
results.map(function (result, index) {
var key = pollsKeys[index];
// make sure the filter is still installed after arrival of the request
// make sure the filter is still installed after arrival of the request
if (self.polls[
key
]) {
if (self.polls[
id
]) {
result.callback = self.polls[
key
].callback;
result.callback = self.polls[
id
].callback;
return result;
return result;
} else
} else
return false;
return false;
}).filter(function (result) {
}).filter(function (result) {
return !!result;
return !!result;
}).filter(function (result) {
}).filter(function (result) {
var valid = Jsonrpc.getInstance().isValidResponse(result);
var valid = Jsonrpc.getInstance().isValidResponse(result);
if (!valid) {
if (!valid) {
result.callback(errors.InvalidResponse(result));
result.callback(errors.InvalidResponse(result));
}
}
return valid;
return valid;
}).filter(function (result) {
return utils.isArray(result.result) && result.result.length > 0;
}).forEach(function (result) {
}).forEach(function (result) {
result.callback(null, result.result);
result.callback(null, result.result);
});
});
...
@@ -5910,7 +5990,110 @@ module.exports = RequestManager;
...
@@ -5910,7 +5990,110 @@ module.exports = RequestManager;
You should have received a copy of the GNU Lesser General Public License
You should have received a copy of the GNU Lesser General Public License
along with ethereum.js. If not, see <http://www.gnu.org/licenses/>.
along with ethereum.js. If not, see <http://www.gnu.org/licenses/>.
*/
*/
/**
/** @file syncing.js
* @authors:
* Fabian Vogelsteller <fabian@ethdev.com>
* @date 2015
*/
var RequestManager = require('./requestmanager');
var Method = require('./method');
var formatters = require('./formatters');
var utils = require('../utils/utils');
/**
Adds the callback and sets up the methods, to iterate over the results.
@method pollSyncing
@param {Object} self
*/
var pollSyncing = function(self) {
var lastSyncState = false;
var onMessage = function (error, sync) {
if (error) {
return self.callbacks.forEach(function (callback) {
callback(error);
});
}
if(utils.isObject(sync))
sync = self.implementation.outputFormatter(sync);
self.callbacks.forEach(function (callback) {
if(lastSyncState !== sync) {
// call the callback with true first so the app can stop anything, before receiving the sync data
if(!lastSyncState && utils.isObject(sync))
callback(null, true);
// call on the next CPU cycle, so the actions of the sync stop can be processes first
setTimeout(function() {
callback(null, sync);
}, 1);
lastSyncState = sync;
}
});
};
RequestManager.getInstance().startPolling({
method: self.implementation.call,
params: [],
}, self.pollId, onMessage, self.stopWatching.bind(self));
};
var IsSyncing = function (callback) {
this.pollId = 'syncPoll_'+ Math.floor(Math.random() * 1000);
this.callbacks = [];
this.implementation = new Method({
name: 'isSyncing',
call: 'eth_syncing',
params: 0,
outputFormatter: formatters.outputSyncingFormatter
});
this.addCallback(callback);
pollSyncing(this);
return this;
};
IsSyncing.prototype.addCallback = function (callback) {
if(callback)
this.callbacks.push(callback);
return this;
};
IsSyncing.prototype.stopWatching = function () {
RequestManager.getInstance().stopPolling(this.pollId);
this.callbacks = [];
};
module.exports = IsSyncing;
},{"../utils/utils":20,"./formatters":29,"./method":35,"./requestmanager":43}],45:[function(require,module,exports){
/*
This file is part of ethereum.js.
ethereum.js is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
ethereum.js is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with ethereum.js. If not, see <http://www.gnu.org/licenses/>.
*/
/**
* @file transfer.js
* @file transfer.js
* @author Marek Kotewicz <marek@ethdev.com>
* @author Marek Kotewicz <marek@ethdev.com>
* @date 2015
* @date 2015
...
@@ -5932,7 +6115,7 @@ var exchangeAbi = require('../contracts/SmartExchange.json');
...
@@ -5932,7 +6115,7 @@ var exchangeAbi = require('../contracts/SmartExchange.json');
* @param {Function} callback, callback
* @param {Function} callback, callback
*/
*/
var transfer = function (from, to, value, callback) {
var transfer = function (from, to, value, callback) {
var iban = new Iban(to);
var iban = new Iban(to);
if (!iban.isValid()) {
if (!iban.isValid()) {
throw new Error('invalid iban address');
throw new Error('invalid iban address');
}
}
...
@@ -5940,7 +6123,7 @@ var transfer = function (from, to, value, callback) {
...
@@ -5940,7 +6123,7 @@ var transfer = function (from, to, value, callback) {
if (iban.isDirect()) {
if (iban.isDirect()) {
return transferToAddress(from, iban.address(), value, callback);
return transferToAddress(from, iban.address(), value, callback);
}
}
if (!callback) {
if (!callback) {
var address = namereg.addr(iban.institution());
var address = namereg.addr(iban.institution());
return deposit(from, address, value, iban.client());
return deposit(from, address, value, iban.client());
...
@@ -5949,7 +6132,7 @@ var transfer = function (from, to, value, callback) {
...
@@ -5949,7 +6132,7 @@ var transfer = function (from, to, value, callback) {
namereg.addr(iban.institution(), function (err, address) {
namereg.addr(iban.institution(), function (err, address) {
return deposit(from, address, value, iban.client(), callback);
return deposit(from, address, value, iban.client(), callback);
});
});
};
};
/**
/**
...
@@ -5990,9 +6173,9 @@ var deposit = function (from, to, value, client, callback) {
...
@@ -5990,9 +6173,9 @@ var deposit = function (from, to, value, client, callback) {
module.exports = transfer;
module.exports = transfer;
},{"../contracts/SmartExchange.json":3,"../web3":22,"./contract":25,"./iban":32,"./namereg":41}],4
5
:[function(require,module,exports){
},{"../contracts/SmartExchange.json":3,"../web3":22,"./contract":25,"./iban":32,"./namereg":41}],4
6
:[function(require,module,exports){
},{}],4
6
:[function(require,module,exports){
},{}],4
7
:[function(require,module,exports){
;(function (root, factory) {
;(function (root, factory) {
if (typeof exports === "object") {
if (typeof exports === "object") {
// CommonJS
// CommonJS
...
@@ -6735,7 +6918,7 @@ module.exports = transfer;
...
@@ -6735,7 +6918,7 @@ module.exports = transfer;
return CryptoJS;
return CryptoJS;
}));
}));
},{}],4
7
:[function(require,module,exports){
},{}],4
8
:[function(require,module,exports){
;(function (root, factory, undef) {
;(function (root, factory, undef) {
if (typeof exports === "object") {
if (typeof exports === "object") {
// CommonJS
// CommonJS
...
@@ -7059,7 +7242,7 @@ module.exports = transfer;
...
@@ -7059,7 +7242,7 @@ module.exports = transfer;
return CryptoJS.SHA3;
return CryptoJS.SHA3;
}));
}));
},{"./core":4
6,"./x64-core":48}],48
:[function(require,module,exports){
},{"./core":4
7,"./x64-core":49}],49
:[function(require,module,exports){
;(function (root, factory) {
;(function (root, factory) {
if (typeof exports === "object") {
if (typeof exports === "object") {
// CommonJS
// CommonJS
...
@@ -7364,7 +7547,253 @@ module.exports = transfer;
...
@@ -7364,7 +7547,253 @@ module.exports = transfer;
return CryptoJS;
return CryptoJS;
}));
}));
},{"./core":46}],"bignumber.js":[function(require,module,exports){
},{"./core":47}],50:[function(require,module,exports){
/*! https://mths.be/utf8js v2.0.0 by @mathias */
;(function(root) {
// Detect free variables 'exports'
var freeExports = typeof exports == 'object' && exports;
// Detect free variable 'module'
var freeModule = typeof module == 'object' && module &&
module.exports == freeExports && module;
// Detect free variable 'global', from Node.js or Browserified code,
// and use it as 'root'
var freeGlobal = typeof global == 'object' && global;
if (freeGlobal.global === freeGlobal || freeGlobal.window === freeGlobal) {
root = freeGlobal;
}
/*--------------------------------------------------------------------------*/
var stringFromCharCode = String.fromCharCode;
// Taken from https://mths.be/punycode
function ucs2decode(string) {
var output = [];
var counter = 0;
var length = string.length;
var value;
var extra;
while (counter < length) {
value = string.charCodeAt(counter++);
if (value >= 0xD800 && value <= 0xDBFF && counter < length) {
// high surrogate, and there is a next character
extra = string.charCodeAt(counter++);
if ((extra & 0xFC00) == 0xDC00) { // low surrogate
output.push(((value & 0x3FF) << 10) + (extra & 0x3FF) + 0x10000);
} else {
// unmatched surrogate; only append this code unit, in case the next
// code unit is the high surrogate of a surrogate pair
output.push(value);
counter--;
}
} else {
output.push(value);
}
}
return output;
}
// Taken from https://mths.be/punycode
function ucs2encode(array) {
var length = array.length;
var index = -1;
var value;
var output = '';
while (++index < length) {
value = array[index];
if (value > 0xFFFF) {
value -= 0x10000;
output += stringFromCharCode(value >>> 10 & 0x3FF | 0xD800);
value = 0xDC00 | value & 0x3FF;
}
output += stringFromCharCode(value);
}
return output;
}
function checkScalarValue(codePoint) {
if (codePoint >= 0xD800 && codePoint <= 0xDFFF) {
throw Error(
'Lone surrogate U+' + codePoint.toString(16).toUpperCase() +
' is not a scalar value'
);
}
}
/*--------------------------------------------------------------------------*/
function createByte(codePoint, shift) {
return stringFromCharCode(((codePoint >> shift) & 0x3F) | 0x80);
}
function encodeCodePoint(codePoint) {
if ((codePoint & 0xFFFFFF80) == 0) { // 1-byte sequence
return stringFromCharCode(codePoint);
}
var symbol = '';
if ((codePoint & 0xFFFFF800) == 0) { // 2-byte sequence
symbol = stringFromCharCode(((codePoint >> 6) & 0x1F) | 0xC0);
}
else if ((codePoint & 0xFFFF0000) == 0) { // 3-byte sequence
checkScalarValue(codePoint);
symbol = stringFromCharCode(((codePoint >> 12) & 0x0F) | 0xE0);
symbol += createByte(codePoint, 6);
}
else if ((codePoint & 0xFFE00000) == 0) { // 4-byte sequence
symbol = stringFromCharCode(((codePoint >> 18) & 0x07) | 0xF0);
symbol += createByte(codePoint, 12);
symbol += createByte(codePoint, 6);
}
symbol += stringFromCharCode((codePoint & 0x3F) | 0x80);
return symbol;
}
function utf8encode(string) {
var codePoints = ucs2decode(string);
var length = codePoints.length;
var index = -1;
var codePoint;
var byteString = '';
while (++index < length) {
codePoint = codePoints[index];
byteString += encodeCodePoint(codePoint);
}
return byteString;
}
/*--------------------------------------------------------------------------*/
function readContinuationByte() {
if (byteIndex >= byteCount) {
throw Error('Invalid byte index');
}
var continuationByte = byteArray[byteIndex] & 0xFF;
byteIndex++;
if ((continuationByte & 0xC0) == 0x80) {
return continuationByte & 0x3F;
}
// If we end up here, it’s not a continuation byte
throw Error('Invalid continuation byte');
}
function decodeSymbol() {
var byte1;
var byte2;
var byte3;
var byte4;
var codePoint;
if (byteIndex > byteCount) {
throw Error('Invalid byte index');
}
if (byteIndex == byteCount) {
return false;
}
// Read first byte
byte1 = byteArray[byteIndex] & 0xFF;
byteIndex++;
// 1-byte sequence (no continuation bytes)
if ((byte1 & 0x80) == 0) {
return byte1;
}
// 2-byte sequence
if ((byte1 & 0xE0) == 0xC0) {
var byte2 = readContinuationByte();
codePoint = ((byte1 & 0x1F) << 6) | byte2;
if (codePoint >= 0x80) {
return codePoint;
} else {
throw Error('Invalid continuation byte');
}
}
// 3-byte sequence (may include unpaired surrogates)
if ((byte1 & 0xF0) == 0xE0) {
byte2 = readContinuationByte();
byte3 = readContinuationByte();
codePoint = ((byte1 & 0x0F) << 12) | (byte2 << 6) | byte3;
if (codePoint >= 0x0800) {
checkScalarValue(codePoint);
return codePoint;
} else {
throw Error('Invalid continuation byte');
}
}
// 4-byte sequence
if ((byte1 & 0xF8) == 0xF0) {
byte2 = readContinuationByte();
byte3 = readContinuationByte();
byte4 = readContinuationByte();
codePoint = ((byte1 & 0x0F) << 0x12) | (byte2 << 0x0C) |
(byte3 << 0x06) | byte4;
if (codePoint >= 0x010000 && codePoint <= 0x10FFFF) {
return codePoint;
}
}
throw Error('Invalid UTF-8 detected');
}
var byteArray;
var byteCount;
var byteIndex;
function utf8decode(byteString) {
byteArray = ucs2decode(byteString);
byteCount = byteArray.length;
byteIndex = 0;
var codePoints = [];
var tmp;
while ((tmp = decodeSymbol()) !== false) {
codePoints.push(tmp);
}
return ucs2encode(codePoints);
}
/*--------------------------------------------------------------------------*/
var utf8 = {
'version': '2.0.0',
'encode': utf8encode,
'decode': utf8decode
};
// Some AMD build optimizers, like r.js, check for specific condition patterns
// like the following:
if (
typeof define == 'function' &&
typeof define.amd == 'object' &&
define.amd
) {
define(function() {
return utf8;
});
} else if (freeExports && !freeExports.nodeType) {
if (freeModule) { // in Node.js or RingoJS v0.8.0+
freeModule.exports = utf8;
} else { // in Narwhal or RingoJS v0.7.0-
var object = {};
var hasOwnProperty = object.hasOwnProperty;
for (var key in utf8) {
hasOwnProperty.call(utf8, key) && (freeExports[key] = utf8[key]);
}
}
} else { // in Rhino or a web browser
root.utf8 = utf8;
}
}(this));
},{}],"bignumber.js":[function(require,module,exports){
'use strict';
'use strict';
module.exports = BigNumber; // jshint ignore:line
module.exports = BigNumber; // jshint ignore:line
...
@@ -7391,6 +7820,6 @@ if (typeof window !== 'undefined' && typeof window.web3 === 'undefined') {
...
@@ -7391,6 +7820,6 @@ if (typeof window !== 'undefined' && typeof window.web3 === 'undefined') {
module.exports = web3;
module.exports = web3;
},{"./lib/web3":22,"./lib/web3/contract":25,"./lib/web3/httpprovider":31,"./lib/web3/iban":32,"./lib/web3/ipcprovider":33,"./lib/web3/namereg":41,"./lib/web3/transfer":4
4
}]},{},["web3"])
},{"./lib/web3":22,"./lib/web3/contract":25,"./lib/web3/httpprovider":31,"./lib/web3/iban":32,"./lib/web3/ipcprovider":33,"./lib/web3/namereg":41,"./lib/web3/transfer":4
5
}]},{},["web3"])
//# sourceMappingURL=web3-light.js.map
//# sourceMappingURL=web3-light.js.map
`
`
rpc/api/admin.go
View file @
1cc2f080
...
@@ -55,7 +55,6 @@ var (
...
@@ -55,7 +55,6 @@ var (
"admin_exportChain"
:
(
*
adminApi
)
.
ExportChain
,
"admin_exportChain"
:
(
*
adminApi
)
.
ExportChain
,
"admin_importChain"
:
(
*
adminApi
)
.
ImportChain
,
"admin_importChain"
:
(
*
adminApi
)
.
ImportChain
,
"admin_verbosity"
:
(
*
adminApi
)
.
Verbosity
,
"admin_verbosity"
:
(
*
adminApi
)
.
Verbosity
,
"admin_chainSyncStatus"
:
(
*
adminApi
)
.
ChainSyncStatus
,
"admin_setSolc"
:
(
*
adminApi
)
.
SetSolc
,
"admin_setSolc"
:
(
*
adminApi
)
.
SetSolc
,
"admin_datadir"
:
(
*
adminApi
)
.
DataDir
,
"admin_datadir"
:
(
*
adminApi
)
.
DataDir
,
"admin_startRPC"
:
(
*
adminApi
)
.
StartRPC
,
"admin_startRPC"
:
(
*
adminApi
)
.
StartRPC
,
...
@@ -232,17 +231,6 @@ func (self *adminApi) Verbosity(req *shared.Request) (interface{}, error) {
...
@@ -232,17 +231,6 @@ func (self *adminApi) Verbosity(req *shared.Request) (interface{}, error) {
return
true
,
nil
return
true
,
nil
}
}
func
(
self
*
adminApi
)
ChainSyncStatus
(
req
*
shared
.
Request
)
(
interface
{},
error
)
{
pending
,
cached
,
importing
,
estimate
:=
self
.
ethereum
.
Downloader
()
.
Stats
()
return
map
[
string
]
interface
{}{
"blocksAvailable"
:
pending
,
"blocksWaitingForImport"
:
cached
,
"importing"
:
importing
,
"estimate"
:
estimate
.
String
(),
},
nil
}
func
(
self
*
adminApi
)
SetSolc
(
req
*
shared
.
Request
)
(
interface
{},
error
)
{
func
(
self
*
adminApi
)
SetSolc
(
req
*
shared
.
Request
)
(
interface
{},
error
)
{
args
:=
new
(
SetSolcArgs
)
args
:=
new
(
SetSolcArgs
)
if
err
:=
self
.
coder
.
Decode
(
req
.
Params
,
&
args
);
err
!=
nil
{
if
err
:=
self
.
coder
.
Decode
(
req
.
Params
,
&
args
);
err
!=
nil
{
...
...
rpc/api/admin_js.go
View file @
1cc2f080
...
@@ -143,10 +143,6 @@ web3._extend({
...
@@ -143,10 +143,6 @@ web3._extend({
new web3._extend.Property({
new web3._extend.Property({
name: 'datadir',
name: 'datadir',
getter: 'admin_datadir'
getter: 'admin_datadir'
}),
new web3._extend.Property({
name: 'chainSyncStatus',
getter: 'admin_chainSyncStatus'
})
})
]
]
});
});
...
...
rpc/api/eth.go
View file @
1cc2f080
...
@@ -55,6 +55,7 @@ var (
...
@@ -55,6 +55,7 @@ var (
"eth_protocolVersion"
:
(
*
ethApi
)
.
ProtocolVersion
,
"eth_protocolVersion"
:
(
*
ethApi
)
.
ProtocolVersion
,
"eth_coinbase"
:
(
*
ethApi
)
.
Coinbase
,
"eth_coinbase"
:
(
*
ethApi
)
.
Coinbase
,
"eth_mining"
:
(
*
ethApi
)
.
IsMining
,
"eth_mining"
:
(
*
ethApi
)
.
IsMining
,
"eth_syncing"
:
(
*
ethApi
)
.
IsSyncing
,
"eth_gasPrice"
:
(
*
ethApi
)
.
GasPrice
,
"eth_gasPrice"
:
(
*
ethApi
)
.
GasPrice
,
"eth_getStorage"
:
(
*
ethApi
)
.
GetStorage
,
"eth_getStorage"
:
(
*
ethApi
)
.
GetStorage
,
"eth_storageAt"
:
(
*
ethApi
)
.
GetStorage
,
"eth_storageAt"
:
(
*
ethApi
)
.
GetStorage
,
...
@@ -166,6 +167,20 @@ func (self *ethApi) IsMining(req *shared.Request) (interface{}, error) {
...
@@ -166,6 +167,20 @@ func (self *ethApi) IsMining(req *shared.Request) (interface{}, error) {
return
self
.
xeth
.
IsMining
(),
nil
return
self
.
xeth
.
IsMining
(),
nil
}
}
func
(
self
*
ethApi
)
IsSyncing
(
req
*
shared
.
Request
)
(
interface
{},
error
)
{
current
:=
self
.
ethereum
.
ChainManager
()
.
CurrentBlock
()
.
NumberU64
()
origin
,
height
:=
self
.
ethereum
.
Downloader
()
.
Boundaries
()
if
current
<
height
{
return
map
[
string
]
interface
{}{
"startingBlock"
:
newHexNum
(
big
.
NewInt
(
int64
(
origin
))
.
Bytes
()),
"currentBlock"
:
newHexNum
(
big
.
NewInt
(
int64
(
current
))
.
Bytes
()),
"highestBlock"
:
newHexNum
(
big
.
NewInt
(
int64
(
height
))
.
Bytes
()),
},
nil
}
return
false
,
nil
}
func
(
self
*
ethApi
)
GasPrice
(
req
*
shared
.
Request
)
(
interface
{},
error
)
{
func
(
self
*
ethApi
)
GasPrice
(
req
*
shared
.
Request
)
(
interface
{},
error
)
{
return
newHexNum
(
self
.
xeth
.
DefaultGasPrice
()
.
Bytes
()),
nil
return
newHexNum
(
self
.
xeth
.
DefaultGasPrice
()
.
Bytes
()),
nil
}
}
...
...
rpc/api/utils.go
View file @
1cc2f080
...
@@ -32,7 +32,6 @@ var (
...
@@ -32,7 +32,6 @@ var (
AutoCompletion
=
map
[
string
][]
string
{
AutoCompletion
=
map
[
string
][]
string
{
"admin"
:
[]
string
{
"admin"
:
[]
string
{
"addPeer"
,
"addPeer"
,
"chainSyncStatus"
,
"datadir"
,
"datadir"
,
"exportChain"
,
"exportChain"
,
"getContractInfo"
,
"getContractInfo"
,
...
@@ -99,6 +98,7 @@ var (
...
@@ -99,6 +98,7 @@ var (
"sendRawTransaction"
,
"sendRawTransaction"
,
"sendTransaction"
,
"sendTransaction"
,
"sign"
,
"sign"
,
"syncing"
,
},
},
"miner"
:
[]
string
{
"miner"
:
[]
string
{
"hashrate"
,
"hashrate"
,
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment