[services/wallet] Several changes in API after feedback (#1517)

[services/wallet] Several changes in API after feedback

- Timestamp from block header stored in blocks table and added to each transfers
- From field is computed from signature to offload this computation from client side
- `history` event is added back, so that client can ignore historical blocks when watching
only for new blocks
-  block number and timestamp are marshalled in hex. consistent with ethereum data structures
This commit is contained in:
Dmitry Shulyak 2019-07-10 12:08:43 +03:00 committed by GitHub
parent 03cf452b8f
commit e20648ecc7
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
13 changed files with 153 additions and 115 deletions

View File

@ -46,82 +46,36 @@ Returns avaiable transfers in a given range.
List of objects like:
```json
[
{
"type": "erc20",
"address": "0x5dc6108dc6296b052bbd33000553afe0ea576b5e",
"blockNumber": 5687981,
"blockhash": "0xcc4553f125be0bc6cc974518368145fcf1344f41e5de238205db0a1c185ea2fc",
"type": "eth",
"address": "0xd448dbe70b62304fc157319e00a041eea238c5eb",
"blockNumber": "0x1",
"blockhash": "0x2c8f84bc61572e82b39c7dc6bf067d7e71e5d53e745b1174e1b7df4938df0053",
"timestamp": "0x2332",
"transaction": {
"nonce": "0x57",
"gasPrice": "0x3b9aca00",
"gas": "0x44ba8",
"to": "0xc55cf4b03948d7ebc8b9e8bad92643703811d162",
"value": "0x0",
"input": "0xcae9ca5100000000000000000000000039d16cdb56b5a6a89e1a397a13fe48034694316e0000000000000000000000000000000000000000000000015af1d78b58c40000000000000000000000000000000000000000000000000000000000000000006000000000000000000000000000000000000000000000000000000000000000449134709e00000000000000000000000000000000000000000000000000000000000000010000000000000000000000005dc6108dc6296b052bbd33000553afe0ea576b5e00000000000000000000000000000000000000000000000000000000",
"v": "0x29",
"r": "0x124587e9c1d16d8bd02fda1221aefbfca8e2f4cd6300ed2077ebf736789179ab",
"s": "0x4309fddc1226dacb877488221a439c4f97d77dc2c3f5c8ea51f34f42417d3bda",
"hash": "0x259dd45c9c4d52137f32b7787e6e1fb6c9faf70ba40b8137bf66ba03abc0da00"
"nonce": "0x0",
"gasPrice": "0x1",
"gas": "0x5208",
"to": "0xd448dbe70b62304fc157319e00a041eea238c5eb",
"value": "0x16345785d8a0000",
"input": "0x",
"v": "0xa95",
"r": "0x73159b07b55d810b3898b60a0e3aed87e59e097be6bcae508a9b60e3e1f0ec3a",
"s": "0x2b58524c9b96228e1e996a1e236a52e4a10beb54aad7c9ee1bf36b613f4d9cfb",
"hash": "0x23da761563d8aa59398649df43a89a9ae3a7497861313674c401481b7400e8f9"
},
"from": "0x27bc544041e129501a6e6fb3c54cf6f12970b1e3",
"receipt": {
"root": "0x",
"status": "0x1",
"cumulativeGasUsed": "0x389e1e",
"logsBloom": "0x00000000000000000000000000000000000000000000000000000000000200000000020000000000000000000000000000000000004000000000000000200000000000000020000000000008000000000000000000000000000000000000000000000000020000000000002000000800000000100000000000000010000000000000000000400000000000000001000000000040000000400000000400000000020000000000000008000000000020000000010000000002000000000000020000000002000000000000000000000000000000000200000000000000000020000010000000000000000000000400000000000000000000000000000000000000",
"logs": [
{
"address": "0xc55cf4b03948d7ebc8b9e8bad92643703811d162",
"topics": [
"0x8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200ac8c7c3b925",
"0x0000000000000000000000005dc6108dc6296b052bbd33000553afe0ea576b5e",
"0x00000000000000000000000039d16cdb56b5a6a89e1a397a13fe48034694316e"
],
"data": "0x0000000000000000000000000000000000000000000000015af1d78b58c40000",
"blockNumber": "0x56caad",
"transactionHash": "0x259dd45c9c4d52137f32b7787e6e1fb6c9faf70ba40b8137bf66ba03abc0da00",
"transactionIndex": "0x10",
"blockHash": "0xcc4553f125be0bc6cc974518368145fcf1344f41e5de238205db0a1c185ea2fc",
"logIndex": "0xd",
"removed": false
},
{
"address": "0xc55cf4b03948d7ebc8b9e8bad92643703811d162",
"topics": [
"0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef",
"0x0000000000000000000000005dc6108dc6296b052bbd33000553afe0ea576b5e",
"0x000000000000000000000000ee55b1661fd24c4760d92026cedb252a5a0f2a4e"
],
"data": "0x0000000000000000000000000000000000000000000000015af1d78b58c40000",
"blockNumber": "0x56caad",
"transactionHash": "0x259dd45c9c4d52137f32b7787e6e1fb6c9faf70ba40b8137bf66ba03abc0da00",
"transactionIndex": "0x10",
"blockHash": "0xcc4553f125be0bc6cc974518368145fcf1344f41e5de238205db0a1c185ea2fc",
"logIndex": "0xe",
"removed": false
},
{
"address": "0x39d16cdb56b5a6a89e1a397a13fe48034694316e",
"topics": [
"0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef",
"0x0000000000000000000000000000000000000000000000000000000000000000",
"0x0000000000000000000000005dc6108dc6296b052bbd33000553afe0ea576b5e",
"0x0000000000000000000000000000000000000000000000000000000000000044"
],
"data": "0x",
"blockNumber": "0x56caad",
"transactionHash": "0x259dd45c9c4d52137f32b7787e6e1fb6c9faf70ba40b8137bf66ba03abc0da00",
"transactionIndex": "0x10",
"blockHash": "0xcc4553f125be0bc6cc974518368145fcf1344f41e5de238205db0a1c185ea2fc",
"logIndex": "0xf",
"removed": false
}
],
"transactionHash": "0x259dd45c9c4d52137f32b7787e6e1fb6c9faf70ba40b8137bf66ba03abc0da00",
"cumulativeGasUsed": "0x5208",
"logsBloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
"logs": [],
"transactionHash": "0x23da761563d8aa59398649df43a89a9ae3a7497861313674c401481b7400e8f9",
"contractAddress": "0x0000000000000000000000000000000000000000",
"gasUsed": "0x34f42"
"gasUsed": "0x5208"
}
}
]
@ -226,3 +180,21 @@ Client expected to request new transfers from received block and replace transfe
}
}
```
3. `history` signal
Emmited when historical transfers were downloaded. Block number will refer the first block where historical transfers
were found.
```json
{
"type": "wallet",
"event": {
"type": "history",
"blockNumber": 0,
"accounts": [
"0x42c8f505b4006d417dd4e0ba0e880692986adbd8"
]
}
}
```

View File

@ -68,7 +68,7 @@ func (c *ethHistoricalCommand) Run(ctx context.Context) (err error) {
if len(transfers) > 0 {
// we download all or nothing
c.feed.Send(Event{
Type: EventNewBlock,
Type: EventNewHistory,
BlockNumber: c.from,
Accounts: []common.Address{c.address},
})
@ -123,7 +123,7 @@ func (c *erc20HistoricalCommand) Run(ctx context.Context) (err error) {
if len(transfers) > 0 {
log.Debug("erc20 downloader imported transfers", "len", len(transfers), "time", time.Since(start))
c.feed.Send(Event{
Type: EventNewBlock,
Type: EventNewHistory,
BlockNumber: c.iterator.Header().Number,
Accounts: []common.Address{c.address},
})
@ -313,7 +313,7 @@ func (c *controlCommand) fastIndex(ctx context.Context, to *DBHeader) error {
for _, address := range c.accounts {
erc20 := &erc20HistoricalCommand{
db: c.db,
erc20: NewERC20TransfersDownloader(c.client, []common.Address{address}),
erc20: NewERC20TransfersDownloader(c.client, []common.Address{address}, types.NewEIP155Signer(c.chain)),
client: c.client,
feed: c.feed,
address: address,
@ -442,6 +442,7 @@ func headersFromTransfers(transfers []Transfer) []*DBHeader {
rst = append(rst, &DBHeader{
Hash: transfers[i].BlockHash,
Number: transfers[i].BlockNumber,
Timestamp: transfers[i].Timestamp,
})
}
return rst

View File

@ -43,7 +43,7 @@ func (s *NewBlocksSuite) SetupTest() {
s.cmd = &newBlocksTransfersCommand{
db: s.db,
accounts: []common.Address{s.address},
erc20: NewERC20TransfersDownloader(s.backend.Client, []common.Address{s.address}),
erc20: NewERC20TransfersDownloader(s.backend.Client, []common.Address{s.address}, s.backend.Signer),
eth: &ETHTransferDownloader{
client: s.backend.Client,
signer: s.backend.Signer,

View File

@ -17,6 +17,7 @@ import (
type DBHeader struct {
Number *big.Int
Hash common.Hash
Timestamp uint64
// Head is true if the block was a head at the time it was pulled from chain.
Head bool
}
@ -25,15 +26,14 @@ func toDBHeader(header *types.Header) *DBHeader {
return &DBHeader{
Hash: header.Hash(),
Number: header.Number,
Timestamp: header.Time,
}
}
func toHead(header *types.Header) *DBHeader {
return &DBHeader{
Hash: header.Hash(),
Number: header.Number,
Head: true,
}
dbheader := toDBHeader(header)
dbheader.Head = true
return dbheader
}
// SyncOption is used to specify that application processed transfers for that block.
@ -76,7 +76,7 @@ func (i *SQLBigInt) Scan(value interface{}) error {
// Value implements interface.
func (i *SQLBigInt) Value() (driver.Value, error) {
if !(*big.Int)(i).IsInt64() {
return nil, errors.New("not at int64")
return nil, errors.New("not an int64")
}
return (*big.Int)(i).Int64(), nil
}
@ -168,12 +168,6 @@ func (db *Database) GetTransfers(start, end *big.Int) (rst []Transfer, err error
return query.Scan(rows)
}
// SaveHeader stores a single header.
func (db *Database) SaveHeader(header *types.Header) error {
_, err := db.db.Exec("INSERT INTO blocks(number, hash) VALUES (?, ?)", (*SQLBigInt)(header.Number), header.Hash())
return err
}
// SaveHeaders stores a list of headers atomically.
func (db *Database) SaveHeaders(headers []*types.Header) (err error) {
var (
@ -184,7 +178,7 @@ func (db *Database) SaveHeaders(headers []*types.Header) (err error) {
if err != nil {
return
}
insert, err = tx.Prepare("INSERT INTO blocks(number, hash) VALUES (?,?)")
insert, err = tx.Prepare("INSERT INTO blocks(number, hash, timestamp) VALUES (?, ?, ?)")
if err != nil {
return
}
@ -197,7 +191,7 @@ func (db *Database) SaveHeaders(headers []*types.Header) (err error) {
}()
for _, h := range headers {
_, err = insert.Exec((*SQLBigInt)(h.Number), h.Hash())
_, err = insert.Exec((*SQLBigInt)(h.Number), h.Hash(), h.Time)
if err != nil {
return
}
@ -302,12 +296,12 @@ func deleteHeaders(creator statementCreator, headers []*DBHeader) error {
}
func insertHeaders(creator statementCreator, headers []*DBHeader) error {
insert, err := creator.Prepare("INSERT OR IGNORE INTO blocks(hash, number, head) VALUES (?, ?, ?)")
insert, err := creator.Prepare("INSERT OR IGNORE INTO blocks(hash, number, timestamp, head) VALUES (?, ?, ?, ?)")
if err != nil {
return err
}
for _, h := range headers {
_, err = insert.Exec(h.Hash, (*SQLBigInt)(h.Number), h.Head)
_, err = insert.Exec(h.Hash, (*SQLBigInt)(h.Number), h.Timestamp, h.Head)
if err != nil {
return err
}
@ -316,12 +310,12 @@ func insertHeaders(creator statementCreator, headers []*DBHeader) error {
}
func insertTransfers(creator statementCreator, transfers []Transfer) error {
insert, err := creator.Prepare("INSERT OR IGNORE INTO transfers(hash, blk_hash, address, tx, receipt, type) VALUES (?, ?, ?, ?, ?, ?)")
insert, err := creator.Prepare("INSERT OR IGNORE INTO transfers(hash, blk_hash, address, tx, sender, receipt, type) VALUES (?, ?, ?, ?, ?, ?, ?)")
if err != nil {
return err
}
for _, t := range transfers {
_, err = insert.Exec(t.ID, t.BlockHash, t.Address, &JSONBlob{t.Transaction}, &JSONBlob{t.Receipt}, t.Type)
_, err = insert.Exec(t.ID, t.BlockHash, t.Address, &JSONBlob{t.Transaction}, t.From, &JSONBlob{t.Receipt}, t.Type)
if err != nil {
return err
}

View File

@ -30,7 +30,7 @@ func TestDBGetHeaderByNumber(t *testing.T) {
Difficulty: big.NewInt(1),
Time: 1,
}
require.NoError(t, db.SaveHeader(header))
require.NoError(t, db.SaveHeaders([]*types.Header{header}))
rst, err := db.GetHeaderByNumber(header.Number)
require.NoError(t, err)
require.Equal(t, header.Hash(), rst.Hash)
@ -52,7 +52,7 @@ func TestDBHeaderExists(t *testing.T) {
Difficulty: big.NewInt(1),
Time: 1,
}
require.NoError(t, db.SaveHeader(header))
require.NoError(t, db.SaveHeaders([]*types.Header{header}))
rst, err := db.HeaderExists(header.Hash())
require.NoError(t, err)
require.True(t, rst)
@ -104,10 +104,10 @@ func TestDBReorgTransfers(t *testing.T) {
originalTX := types.NewTransaction(1, common.Address{1}, nil, 10, big.NewInt(10), nil)
replacedTX := types.NewTransaction(2, common.Address{1}, nil, 10, big.NewInt(10), nil)
require.NoError(t, db.ProcessTranfers([]Transfer{
{ethTransfer, common.Hash{1}, *originalTX.To(), original.Number, original.Hash, originalTX, rcpt},
{ethTransfer, common.Hash{1}, *originalTX.To(), original.Number, original.Hash, 100, originalTX, common.Address{1}, rcpt},
}, nil, []*DBHeader{original}, nil, 0))
require.NoError(t, db.ProcessTranfers([]Transfer{
{ethTransfer, common.Hash{2}, *replacedTX.To(), replaced.Number, replaced.Hash, replacedTX, rcpt},
{ethTransfer, common.Hash{2}, *replacedTX.To(), replaced.Number, replaced.Hash, 100, replacedTX, common.Address{1}, rcpt},
}, nil, []*DBHeader{replaced}, []*DBHeader{original}, 0))
all, err := db.GetTransfers(big.NewInt(0), nil)
@ -166,8 +166,7 @@ func TestDBLatestSynced(t *testing.T) {
Difficulty: big.NewInt(1),
Time: 1,
}
require.NoError(t, db.SaveHeader(h1))
require.NoError(t, db.SaveHeader(h2))
require.NoError(t, db.SaveHeaders([]*types.Header{h1, h2}))
require.NoError(t, db.SaveSyncedHeader(address, h1, ethSync))
require.NoError(t, db.SaveSyncedHeader(address, h2, ethSync))

View File

@ -3,12 +3,14 @@ package wallet
import (
"context"
"encoding/binary"
"encoding/json"
"errors"
"math/big"
"time"
"github.com/ethereum/go-ethereum"
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/common/hexutil"
"github.com/ethereum/go-ethereum/core/types"
"github.com/ethereum/go-ethereum/crypto"
"github.com/ethereum/go-ethereum/ethclient"
@ -38,7 +40,54 @@ type Transfer struct {
Address common.Address `json:"address"`
BlockNumber *big.Int `json:"blockNumber"`
BlockHash common.Hash `json:"blockhash"`
Timestamp uint64 `json:"timestamp"`
Transaction *types.Transaction `json:"transaction"`
// From is derived from tx signature in order to offload this computation from UI component.
From common.Address `json:"from"`
Receipt *types.Receipt `json:"receipt"`
}
func (t Transfer) MarshalJSON() ([]byte, error) {
m := transferMarshaling{}
m.Type = t.Type
m.Address = t.Address
m.BlockNumber = (*hexutil.Big)(t.BlockNumber)
m.BlockHash = t.BlockHash
m.Timestamp = hexutil.Uint64(t.Timestamp)
m.Transaction = t.Transaction
m.From = t.From
m.Receipt = t.Receipt
return json.Marshal(m)
}
func (t *Transfer) UnmarshalJSON(input []byte) error {
m := transferMarshaling{}
err := json.Unmarshal(input, &m)
if err != nil {
return err
}
t.Type = m.Type
t.Address = m.Address
t.BlockNumber = (*big.Int)(m.BlockNumber)
t.BlockHash = m.BlockHash
t.Timestamp = uint64(m.Timestamp)
t.Transaction = m.Transaction
m.From = t.From
m.Receipt = t.Receipt
return nil
}
// transferMarshaling ensures that all integers will be marshalled with hexutil
// to be consistent with types.Transaction and types.Receipt.
type transferMarshaling struct {
Type TransferType `json:"type"`
Address common.Address `json:"address"`
BlockNumber *hexutil.Big `json:"blockNumber"`
BlockHash common.Hash `json:"blockhash"`
Timestamp hexutil.Uint64 `json:"timestamp"`
Transaction *types.Transaction `json:"transaction"`
// From is derived from tx signature in order to offload this computation from UI component.
From common.Address `json:"from"`
Receipt *types.Receipt `json:"receipt"`
}
@ -121,7 +170,10 @@ func (d *ETHTransferDownloader) getTransfersInBlock(ctx context.Context, blk *ty
Address: *address,
BlockNumber: blk.Number(),
BlockHash: blk.Hash(),
Transaction: tx, Receipt: receipt})
Timestamp: blk.Time(),
Transaction: tx,
From: from,
Receipt: receipt})
}
}
@ -130,12 +182,13 @@ func (d *ETHTransferDownloader) getTransfersInBlock(ctx context.Context, blk *ty
}
// NewERC20TransfersDownloader returns new instance.
func NewERC20TransfersDownloader(client *ethclient.Client, accounts []common.Address) *ERC20TransfersDownloader {
func NewERC20TransfersDownloader(client *ethclient.Client, accounts []common.Address, signer types.Signer) *ERC20TransfersDownloader {
signature := crypto.Keccak256Hash([]byte(erc20TransferEventSignature))
return &ERC20TransfersDownloader{
client: client,
accounts: accounts,
signature: signature,
signer: signer,
}
}
@ -146,6 +199,9 @@ type ERC20TransfersDownloader struct {
// hash of the Transfer event signature
signature common.Hash
// signer is used to derive tx sender from tx signature
signer types.Signer
}
func (d *ERC20TransfersDownloader) paddedAddress(address common.Address) common.Hash {
@ -169,12 +225,22 @@ func (d *ERC20TransfersDownloader) transferFromLog(parent context.Context, log t
if err != nil {
return Transfer{}, err
}
from, err := types.Sender(d.signer, tx)
if err != nil {
return Transfer{}, err
}
ctx, cancel = context.WithTimeout(parent, 3*time.Second)
receipt, err := d.client.TransactionReceipt(ctx, log.TxHash)
cancel()
if err != nil {
return Transfer{}, err
}
ctx, cancel = context.WithTimeout(parent, 3*time.Second)
blk, err := d.client.BlockByHash(ctx, log.BlockHash)
cancel()
if err != nil {
return Transfer{}, err
}
// TODO(dshulyak) what is the max number of logs?
index := [4]byte{}
binary.BigEndian.PutUint32(index[:], uint32(log.Index))
@ -186,7 +252,9 @@ func (d *ERC20TransfersDownloader) transferFromLog(parent context.Context, log t
BlockNumber: new(big.Int).SetUint64(log.BlockNumber),
BlockHash: log.BlockHash,
Transaction: tx,
From: from,
Receipt: receipt,
Timestamp: blk.Time(),
}, nil
}

View File

@ -138,6 +138,7 @@ type ERC20TransferSuite struct {
func (s *ERC20TransferSuite) SetupTest() {
var err error
s.signer = types.NewEIP155Signer(big.NewInt(1337))
s.identity, err = crypto.GenerateKey()
s.Require().NoError(err)
s.faucet, err = crypto.GenerateKey()
@ -150,7 +151,7 @@ func (s *ERC20TransferSuite) SetupTest() {
client, err := node.Attach()
s.Require().NoError(err)
s.ethclient = ethclient.NewClient(client)
s.downloader = NewERC20TransfersDownloader(s.ethclient, []common.Address{crypto.PubkeyToAddress(s.identity.PublicKey)})
s.downloader = NewERC20TransfersDownloader(s.ethclient, []common.Address{crypto.PubkeyToAddress(s.identity.PublicKey)}, s.signer)
_, tx, contract, err := erc20.DeployERC20Transfer(bind.NewKeyedTransactor(s.faucet), s.ethclient)
s.Require().NoError(err)
@ -159,7 +160,6 @@ func (s *ERC20TransferSuite) SetupTest() {
_, err = bind.WaitMined(timeout, s.ethclient, tx)
s.Require().NoError(err)
s.contract = contract
s.signer = types.NewEIP155Signer(big.NewInt(1337))
}
func (s *ERC20TransferSuite) TestNoEvents() {

View File

@ -25,7 +25,7 @@ func bindata_read(data []byte, name string) ([]byte, error) {
return buf.Bytes(), nil
}
var __0001_transfers_down_db_sql = []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x72\x09\xf2\x0f\x50\x08\x71\x74\xf2\x71\x55\x28\x29\x4a\xcc\x2b\x4e\x4b\x2d\x2a\xb6\xe6\x42\x12\x4d\xca\xc9\x4f\xce\x2e\xb6\xe6\x02\x04\x00\x00\xff\xff\x27\x4d\x7a\xa1\x29\x00\x00\x00")
var __0001_transfers_down_db_sql = []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x72\x09\xf2\x0f\x50\x08\x71\x74\xf2\x71\x55\x28\x29\x4a\xcc\x2b\x4e\x4b\x2d\x2a\xb6\xe6\x42\x12\x4d\xca\xc9\x4f\xce\x46\x15\x4a\x4c\x4e\xce\x2f\xcd\x2b\x29\x8e\x2f\xc9\x8f\x87\x49\x03\x02\x00\x00\xff\xff\xe1\x80\x1c\xac\x48\x00\x00\x00")
func _0001_transfers_down_db_sql() ([]byte, error) {
return bindata_read(
@ -34,7 +34,7 @@ func _0001_transfers_down_db_sql() ([]byte, error) {
)
}
var __0001_transfers_up_db_sql = []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x8c\x91\xd1\x72\xaa\x30\x10\x86\xef\xf3\x14\x7b\x29\x33\x79\x83\x73\x15\x60\xd1\xcc\xc9\x49\x4e\x43\xa8\xf5\x8a\x41\x4c\xab\xa3\x06\x4a\x60\xa6\xbe\x7d\x07\x01\xad\xad\xe3\xf4\x32\xbb\xd9\xdd\xef\xff\xff\x48\x23\x33\x08\x86\x85\x02\x81\x27\x20\x95\x01\x7c\xe1\xa9\x49\xa1\x6d\x0a\xe7\x5f\x6d\xe3\x61\x46\xb6\x85\xdf\xc2\x33\xd3\xd1\x82\x69\xc8\x24\x7f\xca\x90\x92\x62\xb3\x69\xac\xf7\x97\x7a\x3f\x2b\x33\x21\x28\x59\x1f\xf6\xf9\xcd\xc8\xb5\xd5\x7e\x40\x28\x54\x48\x49\x63\x4b\xbb\xab\xdb\xf1\xd5\x9e\x6a\x7b\xe7\x77\xa2\x34\xf2\xb9\x84\xbf\xb8\x9a\x4d\x4b\x03\xd0\x98\xa0\x46\x19\x61\x0a\xeb\x43\x55\xee\xfd\x6c\xa8\x2b\x09\x31\x0a\x34\x08\x11\x4b\x23\x16\x23\x25\x91\x92\xa9\xd1\x8c\x4b\x03\x9d\xdb\xbd\x77\x36\x9f\x64\xe5\x95\x3b\xaf\xcb\x27\x19\x83\x2c\x38\xef\xa2\x63\x31\x20\xc1\x1f\x42\x1e\x98\x34\xdc\xff\xee\xd0\x7f\xcd\xff\x31\xbd\xea\xb1\x29\x71\xdd\x71\x6d\x1b\x08\xf9\xbc\xa7\x18\xaf\x5c\x25\x6e\x6d\xb1\x81\x50\x29\x01\x31\x26\x2c\x13\x06\x12\x26\x52\x24\x01\x2c\xb9\x59\xa8\xcc\x80\x56\x4b\x1e\x3f\xc6\x28\xca\xb2\xea\x5c\xeb\xf3\xb6\xca\x2f\x48\x8f\xf3\xb9\xc5\xba\xf6\xfc\xc9\x95\xc0\xa5\xf9\x69\xfe\x30\x71\xcf\xfe\xa9\xf3\xab\x00\x8e\x45\x5d\xef\xdc\x5b\xef\xff\x48\x38\x20\x4f\x44\x53\x0e\x63\x93\x7e\x39\xdd\xa7\xf1\x19\x00\x00\xff\xff\xfc\x91\xad\x60\xb2\x02\x00\x00")
var __0001_transfers_up_db_sql = []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x8c\x91\xcf\x8e\x9b\x30\x10\xc6\xef\x7e\x8a\x39\x06\x89\x37\xe8\xc9\xc0\x90\x58\x75\xed\xd6\x98\xa6\x39\x21\x02\x6e\x83\x12\x0c\xc5\x20\x6d\xde\x7e\x45\x80\xfc\xd9\x8d\xa2\xbd\xce\x7c\x33\xdf\x6f\xe6\x0b\x15\x52\x8d\xa0\x69\xc0\x11\x58\x0c\x42\x6a\xc0\x3f\x2c\xd1\x09\xf4\x5d\x6e\xdd\x5f\xd3\x39\x58\x91\x43\xee\x0e\xf0\x9b\xaa\x70\x43\x15\xa4\x82\xfd\x4a\xd1\x27\x79\x59\x76\xc6\xb9\x6b\x7d\x9c\x15\x29\xe7\x3e\xd9\x9f\x8e\xd9\xc3\xc8\xad\xd5\xbf\x41\xc0\x65\xe0\x13\x67\x6c\x69\xba\x27\x8a\xce\x14\xa6\x6a\xfb\x59\xd6\x9f\x5b\xf3\x44\x14\x4b\x85\x6c\x2d\xe0\x3b\xee\x56\x8b\x9b\x07\x0a\x63\x54\x28\x42\x4c\x60\x7f\x6a\x8a\xa3\x5b\x4d\x75\x29\x20\x42\x8e\x1a\x21\xa4\x49\x48\x23\xf4\x49\x28\x45\xa2\x15\x65\x42\xc3\x60\xab\xff\x83\xc9\x96\x7b\xb3\xc6\x5e\xd6\x65\xcb\x7d\xd3\xbd\x70\xd9\xe5\xcf\x45\x8f\x78\xdf\x08\x79\xf1\xbd\xc9\xff\xe3\xeb\x7e\x2a\xf6\x83\xaa\xdd\x88\xed\x13\x3b\xd4\x7b\xd3\x41\xc0\xd6\x23\xc5\xec\x72\xf7\xa9\xaa\x36\xae\xcf\xeb\x16\x52\x91\xb0\xb5\xc0\x68\x91\xde\x34\x07\x93\x97\x10\x48\xc9\x21\xc2\x98\xa6\x5c\x43\x4c\x79\x82\xc4\x83\x2d\xd3\x1b\x99\x6a\x50\x72\xcb\xa2\xd7\xa8\x79\x51\x34\x83\xed\x5d\xd6\x37\xd9\x15\xfb\x75\xb8\x8f\xe8\xb7\x9e\x3b\xdb\x02\x98\xd0\x9f\x03\x9a\x26\x9e\x45\xb4\x74\xbe\x14\x52\x9d\xb7\x6d\x65\xff\x8d\x19\xcd\x84\x13\xf2\x42\xb4\x64\x35\x37\xfd\x3b\xeb\x31\xb1\xf7\x00\x00\x00\xff\xff\x76\x37\x2b\x31\xef\x02\x00\x00")
func _0001_transfers_up_db_sql() ([]byte, error) {
return bindata_read(

View File

@ -3,6 +3,7 @@ hash VARCHAR UNIQUE,
address VARCHAR NOT NULL,
blk_hash VARCHAR NOT NULL,
tx BLOB,
sender VARCHAR NOT NULL,
receipt BLOB,
type VARCHAR NOT NULL,
FOREIGN KEY(blk_hash) REFERENCES blocks(hash) ON DELETE CASCADE,
@ -12,6 +13,7 @@ CONSTRAINT unique_transfer_on_hash_address UNIQUE (hash,address)
CREATE TABLE IF NOT EXISTS blocks (
hash VARCHAR PRIMARY KEY,
number BIGINT UNIQUE NOT NULL,
timestamp UNSIGNED BIGINT NOT NULL,
head BOOL DEFAULT FALSE
) WITHOUT ROWID;

View File

@ -80,6 +80,7 @@ func (r *Reactor) Start() error {
return errors.New("already running")
}
r.group = NewGroup(context.Background())
signer := types.NewEIP155Signer(r.chain)
// TODO(dshulyak) to support adding accounts in runtime implement keyed group
// and export private api to start downloaders from accounts
// private api should have access only to reactor
@ -91,9 +92,9 @@ func (r *Reactor) Start() error {
eth: &ETHTransferDownloader{
client: r.client,
accounts: r.accounts,
signer: types.NewEIP155Signer(r.chain),
signer: signer,
},
erc20: NewERC20TransfersDownloader(r.client, r.accounts),
erc20: NewERC20TransfersDownloader(r.client, r.accounts, signer),
feed: r.feed,
safetyDepth: reorgSafetyDepth,
}

View File

@ -9,7 +9,7 @@ import (
"github.com/ethereum/go-ethereum/core/types"
)
const baseTransfersQuery = "SELECT transfers.hash, type, blocks.hash, blocks.number, address, tx, receipt FROM transfers JOIN blocks ON blk_hash = blocks.hash"
const baseTransfersQuery = "SELECT transfers.hash, type, blocks.hash, blocks.number, blocks.timestamp, address, tx, sender, receipt FROM transfers JOIN blocks ON blk_hash = blocks.hash"
func newTransfersQuery() *transfersQuery {
buf := bytes.NewBuffer(nil)
@ -75,8 +75,9 @@ func (q *transfersQuery) Scan(rows *sql.Rows) (rst []Transfer, err error) {
Receipt: &types.Receipt{},
}
err = rows.Scan(
&transfer.ID, &transfer.Type, &transfer.BlockHash, (*SQLBigInt)(transfer.BlockNumber), &transfer.Address,
&JSONBlob{transfer.Transaction}, &JSONBlob{transfer.Receipt})
&transfer.ID, &transfer.Type, &transfer.BlockHash,
(*SQLBigInt)(transfer.BlockNumber), &transfer.Timestamp, &transfer.Address,
&JSONBlob{transfer.Transaction}, &transfer.From, &JSONBlob{transfer.Receipt})
if err != nil {
return nil, err
}

View File

@ -106,8 +106,8 @@ func (s *TransfersSuite) TestHistoricalTransfers() {
if err != nil {
return err
}
if len(all) >= 30 {
return fmt.Errorf("waiting for atleast 30 transfers")
if len(all) < 30 {
return fmt.Errorf("waiting for atleast 30 transfers, got %d", len(all))
}
return nil
}, 30*time.Second, 1*time.Second))