[wallet] Fix multiple notifications on a single erc20 transfer
This commit is contained in:
parent
d65946e9c0
commit
d39ca7fea4
|
@ -166,7 +166,7 @@ func (s *Service) transactionsHandler(payload TransactionEvent) {
|
||||||
limit := 20
|
limit := 20
|
||||||
if payload.BlockNumber != nil {
|
if payload.BlockNumber != nil {
|
||||||
for _, address := range payload.Accounts {
|
for _, address := range payload.Accounts {
|
||||||
if payload.BlockNumber.Cmp(payload.MaxKnownBlocks[address]) == 1 {
|
if payload.BlockNumber.Cmp(payload.MaxKnownBlocks[address]) >= 0 {
|
||||||
log.Info("Handled transfer for address", "info", address)
|
log.Info("Handled transfer for address", "info", address)
|
||||||
transfers, err := s.walletDB.GetTransfersByAddressAndBlock(address, payload.BlockNumber, int64(limit))
|
transfers, err := s.walletDB.GetTransfersByAddressAndBlock(address, payload.BlockNumber, int64(limit))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -235,14 +235,26 @@ func (s *Service) StartWalletWatcher() {
|
||||||
return
|
return
|
||||||
case event := <-events:
|
case event := <-events:
|
||||||
if event.Type == wallet.EventNewBlock && len(maxKnownBlocks) > 0 {
|
if event.Type == wallet.EventNewBlock && len(maxKnownBlocks) > 0 {
|
||||||
s.transmitter.publisher.Send(TransactionEvent{
|
newBlocks := false
|
||||||
Type: string(event.Type),
|
for _, address := range event.Accounts {
|
||||||
BlockNumber: event.BlockNumber,
|
if _, ok := maxKnownBlocks[address]; !ok {
|
||||||
Accounts: event.Accounts,
|
newBlocks = true
|
||||||
NewTransactionsPerAccount: event.NewTransactionsPerAccount,
|
maxKnownBlocks[address] = event.BlockNumber
|
||||||
ERC20: event.ERC20,
|
} else if event.BlockNumber.Cmp(maxKnownBlocks[address]) == 1 {
|
||||||
MaxKnownBlocks: maxKnownBlocks,
|
maxKnownBlocks[address] = event.BlockNumber
|
||||||
})
|
newBlocks = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if newBlocks {
|
||||||
|
s.transmitter.publisher.Send(TransactionEvent{
|
||||||
|
Type: string(event.Type),
|
||||||
|
BlockNumber: event.BlockNumber,
|
||||||
|
Accounts: event.Accounts,
|
||||||
|
NewTransactionsPerAccount: event.NewTransactionsPerAccount,
|
||||||
|
ERC20: event.ERC20,
|
||||||
|
MaxKnownBlocks: maxKnownBlocks,
|
||||||
|
})
|
||||||
|
}
|
||||||
} else if event.Type == wallet.EventMaxKnownBlock {
|
} else if event.Type == wallet.EventMaxKnownBlock {
|
||||||
for _, address := range event.Accounts {
|
for _, address := range event.Accounts {
|
||||||
if _, ok := maxKnownBlocks[address]; !ok {
|
if _, ok := maxKnownBlocks[address]; !ok {
|
||||||
|
|
|
@ -990,9 +990,27 @@ func (c *findAndCheckBlockRangeCommand) Run(parent context.Context) (err error)
|
||||||
for _, address := range c.accounts {
|
for _, address := range c.accounts {
|
||||||
ethHeaders := ethHeadersByAddress[address]
|
ethHeaders := ethHeadersByAddress[address]
|
||||||
erc20Headers := erc20HeadersByAddress[address]
|
erc20Headers := erc20HeadersByAddress[address]
|
||||||
|
|
||||||
allHeaders := append(ethHeaders, erc20Headers...)
|
allHeaders := append(ethHeaders, erc20Headers...)
|
||||||
foundHeaders[address] = allHeaders
|
|
||||||
|
uniqHeadersByHash := map[common.Hash]*DBHeader{}
|
||||||
|
for _, header := range allHeaders {
|
||||||
|
uniqHeader, ok := uniqHeadersByHash[header.Hash]
|
||||||
|
if ok {
|
||||||
|
if len(header.Erc20Transfers) > 0 {
|
||||||
|
uniqHeader.Erc20Transfers = append(uniqHeader.Erc20Transfers, header.Erc20Transfers...)
|
||||||
|
}
|
||||||
|
uniqHeadersByHash[header.Hash] = uniqHeader
|
||||||
|
} else {
|
||||||
|
uniqHeadersByHash[header.Hash] = header
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
uniqHeaders := []*DBHeader{}
|
||||||
|
for _, header := range uniqHeadersByHash {
|
||||||
|
uniqHeaders = append(uniqHeaders, header)
|
||||||
|
}
|
||||||
|
|
||||||
|
foundHeaders[address] = uniqHeaders
|
||||||
|
|
||||||
for _, header := range allHeaders {
|
for _, header := range allHeaders {
|
||||||
if header.Number.Cmp(maxBlockNumber) == 1 {
|
if header.Number.Cmp(maxBlockNumber) == 1 {
|
||||||
|
@ -1000,8 +1018,8 @@ func (c *findAndCheckBlockRangeCommand) Run(parent context.Context) (err error)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
log.Debug("saving headers", "len", len(allHeaders), "address")
|
log.Debug("saving headers", "len", len(uniqHeaders), "address")
|
||||||
err = c.db.ProcessBlocks(address, newFromByAddress[address], c.toByAddress[address], allHeaders)
|
err = c.db.ProcessBlocks(address, newFromByAddress[address], c.toByAddress[address], uniqHeaders)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
|
@ -15,12 +15,12 @@ import (
|
||||||
|
|
||||||
// DBHeader fields from header that are stored in database.
|
// DBHeader fields from header that are stored in database.
|
||||||
type DBHeader struct {
|
type DBHeader struct {
|
||||||
Number *big.Int
|
Number *big.Int
|
||||||
Hash common.Hash
|
Hash common.Hash
|
||||||
Timestamp uint64
|
Timestamp uint64
|
||||||
Erc20Transfer *Transfer
|
Erc20Transfers []*Transfer
|
||||||
Network uint64
|
Network uint64
|
||||||
Address common.Address
|
Address common.Address
|
||||||
// Head is true if the block was a head at the time it was pulled from chain.
|
// Head is true if the block was a head at the time it was pulled from chain.
|
||||||
Head bool
|
Head bool
|
||||||
// Loaded is true if trasfers from this block has been already fetched
|
// Loaded is true if trasfers from this block has been already fetched
|
||||||
|
@ -765,23 +765,25 @@ func insertBlocksWithTransactions(creator statementCreator, account common.Addre
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if header.Erc20Transfer != nil {
|
if len(header.Erc20Transfers) > 0 {
|
||||||
res, err := updateTx.Exec(&JSONBlob{header.Erc20Transfer.Log}, network, account, header.Erc20Transfer.ID)
|
for _, transfer := range header.Erc20Transfers {
|
||||||
if err != nil {
|
res, err := updateTx.Exec(&JSONBlob{transfer.Log}, network, account, transfer.ID)
|
||||||
return err
|
if err != nil {
|
||||||
}
|
return err
|
||||||
affected, err := res.RowsAffected()
|
}
|
||||||
if err != nil {
|
affected, err := res.RowsAffected()
|
||||||
return err
|
if err != nil {
|
||||||
}
|
return err
|
||||||
if affected > 0 {
|
}
|
||||||
continue
|
if affected > 0 {
|
||||||
}
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
_, err = insertTx.Exec(network, account, account, header.Erc20Transfer.ID, (*SQLBigInt)(header.Number), header.Hash, erc20Transfer, header.Erc20Transfer.Timestamp, &JSONBlob{header.Erc20Transfer.Log})
|
_, err = insertTx.Exec(network, account, account, transfer.ID, (*SQLBigInt)(header.Number), header.Hash, erc20Transfer, transfer.Timestamp, &JSONBlob{transfer.Log})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("error saving erc20transfer", "err", err)
|
log.Error("error saving erc20transfer", "err", err)
|
||||||
return err
|
return err
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -296,7 +296,7 @@ func (d *ERC20TransfersDownloader) blocksFromLogs(parent context.Context, logs [
|
||||||
header := &DBHeader{
|
header := &DBHeader{
|
||||||
Number: big.NewInt(int64(l.BlockNumber)),
|
Number: big.NewInt(int64(l.BlockNumber)),
|
||||||
Hash: l.BlockHash,
|
Hash: l.BlockHash,
|
||||||
Erc20Transfer: &Transfer{
|
Erc20Transfers: []*Transfer{{
|
||||||
Address: address,
|
Address: address,
|
||||||
BlockNumber: big.NewInt(int64(l.BlockNumber)),
|
BlockNumber: big.NewInt(int64(l.BlockNumber)),
|
||||||
BlockHash: l.BlockHash,
|
BlockHash: l.BlockHash,
|
||||||
|
@ -304,9 +304,7 @@ func (d *ERC20TransfersDownloader) blocksFromLogs(parent context.Context, logs [
|
||||||
From: address,
|
From: address,
|
||||||
Loaded: false,
|
Loaded: false,
|
||||||
Type: erc20Transfer,
|
Type: erc20Transfer,
|
||||||
Log: &l,
|
Log: &l}}}
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
concurrent.Add(func(ctx context.Context) error {
|
concurrent.Add(func(ctx context.Context) error {
|
||||||
concurrent.PushHeader(header)
|
concurrent.PushHeader(header)
|
||||||
|
|
Loading…
Reference in New Issue