2023-05-19 08:19:48 +00:00
package transfer
import (
"database/sql"
"math/big"
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/log"
"github.com/status-im/status-go/services/wallet/bigint"
)
2023-12-11 13:29:10 +00:00
type BlockRangeDAOer interface {
2024-02-19 15:50:07 +00:00
getBlockRange ( chainID uint64 , address common . Address ) ( blockRange * ethTokensBlockRanges , exists bool , err error )
2024-03-07 14:51:21 +00:00
getBlockRanges ( chainID uint64 , addresses [ ] common . Address ) ( blockRanges map [ common . Address ] * ethTokensBlockRanges , err error )
2023-12-11 13:29:10 +00:00
upsertRange ( chainID uint64 , account common . Address , newBlockRange * ethTokensBlockRanges ) ( err error )
updateTokenRange ( chainID uint64 , account common . Address , newBlockRange * BlockRange ) ( err error )
upsertEthRange ( chainID uint64 , account common . Address , newBlockRange * BlockRange ) ( err error )
}
2023-05-19 08:19:48 +00:00
type BlockRangeSequentialDAO struct {
db * sql . DB
}
type BlockRange struct {
Start * big . Int // Block of first transfer
FirstKnown * big . Int // Oldest scanned block
LastKnown * big . Int // Last scanned block
}
func NewBlockRange ( ) * BlockRange {
2024-02-19 15:50:07 +00:00
return & BlockRange { Start : nil , FirstKnown : nil , LastKnown : nil }
2023-05-19 08:19:48 +00:00
}
2023-11-27 10:08:17 +00:00
type ethTokensBlockRanges struct {
2024-01-19 15:57:04 +00:00
eth * BlockRange
tokens * BlockRange
balanceCheckHash string
2023-11-27 10:08:17 +00:00
}
func newEthTokensBlockRanges ( ) * ethTokensBlockRanges {
return & ethTokensBlockRanges { eth : NewBlockRange ( ) , tokens : NewBlockRange ( ) }
}
2024-03-07 14:51:21 +00:00
func scanRanges ( rows * sql . Rows ) ( map [ common . Address ] * ethTokensBlockRanges , error ) {
blockRanges := make ( map [ common . Address ] * ethTokensBlockRanges )
for rows . Next ( ) {
2024-02-19 15:50:07 +00:00
efk := & bigint . NilableSQLBigInt { }
elk := & bigint . NilableSQLBigInt { }
es := & bigint . NilableSQLBigInt { }
tfk := & bigint . NilableSQLBigInt { }
tlk := & bigint . NilableSQLBigInt { }
ts := & bigint . NilableSQLBigInt { }
2024-03-07 14:51:21 +00:00
addressB := [ ] byte { }
blockRange := newEthTokensBlockRanges ( )
err := rows . Scan ( & addressB , es , efk , elk , ts , tfk , tlk , & blockRange . balanceCheckHash )
if err != nil {
return nil , err
}
address := common . BytesToAddress ( addressB )
blockRanges [ address ] = blockRange
2024-02-19 15:50:07 +00:00
if ! es . IsNil ( ) {
2024-03-07 14:51:21 +00:00
blockRanges [ address ] . eth . Start = big . NewInt ( es . Int64 ( ) )
2024-02-19 15:50:07 +00:00
}
if ! efk . IsNil ( ) {
2024-03-07 14:51:21 +00:00
blockRanges [ address ] . eth . FirstKnown = big . NewInt ( efk . Int64 ( ) )
2024-02-19 15:50:07 +00:00
}
if ! elk . IsNil ( ) {
2024-03-07 14:51:21 +00:00
blockRanges [ address ] . eth . LastKnown = big . NewInt ( elk . Int64 ( ) )
2024-02-19 15:50:07 +00:00
}
if ! ts . IsNil ( ) {
2024-03-07 14:51:21 +00:00
blockRanges [ address ] . tokens . Start = big . NewInt ( ts . Int64 ( ) )
2024-02-19 15:50:07 +00:00
}
if ! tfk . IsNil ( ) {
2024-03-07 14:51:21 +00:00
blockRanges [ address ] . tokens . FirstKnown = big . NewInt ( tfk . Int64 ( ) )
2024-02-19 15:50:07 +00:00
}
if ! tlk . IsNil ( ) {
2024-03-07 14:51:21 +00:00
blockRanges [ address ] . tokens . LastKnown = big . NewInt ( tlk . Int64 ( ) )
2024-02-19 15:50:07 +00:00
}
2024-03-07 14:51:21 +00:00
}
return blockRanges , nil
}
2024-02-19 15:50:07 +00:00
2024-03-07 14:51:21 +00:00
func ( b * BlockRangeSequentialDAO ) getBlockRange ( chainID uint64 , address common . Address ) ( blockRange * ethTokensBlockRanges , exists bool , err error ) {
query := ` SELECT address , blk_start , blk_first , blk_last , token_blk_start , token_blk_first , token_blk_last , balance_check_hash FROM blocks_ranges_sequential
WHERE address = ?
AND network_id = ? `
rows , err := b . db . Query ( query , address , chainID )
if err != nil {
return
}
defer rows . Close ( )
2023-05-19 08:19:48 +00:00
2024-03-07 14:51:21 +00:00
ranges , err := scanRanges ( rows )
if err != nil {
return nil , false , err
}
blockRange , exists = ranges [ address ]
if ! exists {
blockRange = newEthTokensBlockRanges ( )
2023-05-19 08:19:48 +00:00
}
2024-02-19 15:50:07 +00:00
return blockRange , exists , nil
2023-05-19 08:19:48 +00:00
}
2024-03-07 14:51:21 +00:00
func ( b * BlockRangeSequentialDAO ) getBlockRanges ( chainID uint64 , addresses [ ] common . Address ) ( blockRanges map [ common . Address ] * ethTokensBlockRanges , err error ) {
blockRanges = make ( map [ common . Address ] * ethTokensBlockRanges )
addressesPlaceholder := ""
for i := 0 ; i < len ( addresses ) ; i ++ {
addressesPlaceholder += "?"
if i < len ( addresses ) - 1 {
addressesPlaceholder += ","
}
}
query := "SELECT address, blk_start, blk_first, blk_last, token_blk_start, token_blk_first, token_blk_last, balance_check_hash FROM blocks_ranges_sequential WHERE address IN (" +
addressesPlaceholder + ") AND network_id = ?"
params := [ ] interface { } { }
for _ , address := range addresses {
params = append ( params , address )
}
params = append ( params , chainID )
rows , err := b . db . Query ( query , params ... )
if err != nil {
return
}
defer rows . Close ( )
return scanRanges ( rows )
}
2023-11-28 14:23:03 +00:00
func ( b * BlockRangeSequentialDAO ) deleteRange ( account common . Address ) error {
log . Debug ( "delete blocks range" , "account" , account )
delete , err := b . db . Prepare ( ` DELETE FROM blocks_ranges_sequential WHERE address = ? ` )
2023-05-19 08:19:48 +00:00
if err != nil {
2023-05-26 08:27:48 +00:00
log . Error ( "Failed to prepare deletion of sequential block range" , "error" , err )
2023-05-19 08:19:48 +00:00
return err
}
2023-11-28 14:23:03 +00:00
_ , err = delete . Exec ( account )
2023-05-19 08:19:48 +00:00
return err
}
2023-11-27 10:08:17 +00:00
func ( b * BlockRangeSequentialDAO ) upsertRange ( chainID uint64 , account common . Address , newBlockRange * ethTokensBlockRanges ) ( err error ) {
2024-02-19 15:50:07 +00:00
ethTokensBlockRange , exists , err := b . getBlockRange ( chainID , account )
2023-11-27 10:08:17 +00:00
if err != nil {
return err
}
2024-01-24 13:00:08 +00:00
ethBlockRange := prepareUpdatedBlockRange ( ethTokensBlockRange . eth , newBlockRange . eth )
tokensBlockRange := prepareUpdatedBlockRange ( ethTokensBlockRange . tokens , newBlockRange . tokens )
2023-05-19 08:19:48 +00:00
2024-02-19 15:50:07 +00:00
log . Debug ( "upsert eth and tokens blocks range" ,
"account" , account , "chainID" , chainID ,
"eth.start" , ethBlockRange . Start ,
"eth.first" , ethBlockRange . FirstKnown ,
"eth.last" , ethBlockRange . LastKnown ,
"tokens.first" , tokensBlockRange . FirstKnown ,
"tokens.last" , tokensBlockRange . LastKnown ,
"hash" , newBlockRange . balanceCheckHash )
var query * sql . Stmt
if exists {
query , err = b . db . Prepare ( ` UPDATE blocks_ranges_sequential SET
blk_start = ? ,
blk_first = ? ,
blk_last = ? ,
token_blk_start = ? ,
token_blk_first = ? ,
token_blk_last = ? ,
balance_check_hash = ?
WHERE network_id = ? AND address = ? ` )
} else {
query , err = b . db . Prepare ( ` INSERT INTO blocks_ranges_sequential
( blk_start , blk_first , blk_last , token_blk_start , token_blk_first , token_blk_last , balance_check_hash , network_id , address ) VALUES ( ? , ? , ? , ? , ? , ? , ? , ? , ? ) ` )
}
2023-05-19 08:19:48 +00:00
if err != nil {
return err
}
2024-02-19 15:50:07 +00:00
_ , err = query . Exec ( ( * bigint . SQLBigInt ) ( ethBlockRange . Start ) , ( * bigint . SQLBigInt ) ( ethBlockRange . FirstKnown ) , ( * bigint . SQLBigInt ) ( ethBlockRange . LastKnown ) ,
( * bigint . SQLBigInt ) ( tokensBlockRange . Start ) , ( * bigint . SQLBigInt ) ( tokensBlockRange . FirstKnown ) , ( * bigint . SQLBigInt ) ( tokensBlockRange . LastKnown ) , newBlockRange . balanceCheckHash , chainID , account )
2023-05-19 08:19:48 +00:00
2023-11-27 10:08:17 +00:00
return err
}
2023-05-19 08:19:48 +00:00
2023-11-27 10:08:17 +00:00
func ( b * BlockRangeSequentialDAO ) upsertEthRange ( chainID uint64 , account common . Address ,
newBlockRange * BlockRange ) ( err error ) {
2023-05-19 08:19:48 +00:00
2024-02-19 15:50:07 +00:00
ethTokensBlockRange , exists , err := b . getBlockRange ( chainID , account )
2023-11-27 10:08:17 +00:00
if err != nil {
return err
2023-05-19 08:19:48 +00:00
}
2024-01-24 13:00:08 +00:00
blockRange := prepareUpdatedBlockRange ( ethTokensBlockRange . eth , newBlockRange )
2023-11-27 10:08:17 +00:00
2024-02-19 15:50:07 +00:00
log . Debug ( "upsert eth blocks range" , "account" , account , "chainID" , chainID ,
"start" , blockRange . Start ,
"first" , blockRange . FirstKnown ,
"last" , blockRange . LastKnown ,
"old hash" , ethTokensBlockRange . balanceCheckHash )
2023-11-27 10:08:17 +00:00
2024-02-19 15:50:07 +00:00
var query * sql . Stmt
2023-05-19 08:19:48 +00:00
2024-02-19 15:50:07 +00:00
if exists {
query , err = b . db . Prepare ( ` UPDATE blocks_ranges_sequential SET
blk_start = ? ,
blk_first = ? ,
blk_last = ?
WHERE network_id = ? AND address = ? ` )
} else {
query , err = b . db . Prepare ( ` INSERT INTO blocks_ranges_sequential
( blk_start , blk_first , blk_last , network_id , address ) VALUES ( ? , ? , ? , ? , ? ) ` )
2024-01-17 11:08:56 +00:00
}
2024-02-19 15:50:07 +00:00
if err != nil {
return err
}
_ , err = query . Exec ( ( * bigint . SQLBigInt ) ( blockRange . Start ) , ( * bigint . SQLBigInt ) ( blockRange . FirstKnown ) , ( * bigint . SQLBigInt ) ( blockRange . LastKnown ) , chainID , account )
2023-05-19 08:19:48 +00:00
2023-11-27 10:08:17 +00:00
return err
}
2023-12-05 15:11:15 +00:00
func ( b * BlockRangeSequentialDAO ) updateTokenRange ( chainID uint64 , account common . Address ,
2023-11-27 10:08:17 +00:00
newBlockRange * BlockRange ) ( err error ) {
2024-02-19 15:50:07 +00:00
ethTokensBlockRange , _ , err := b . getBlockRange ( chainID , account )
2023-11-27 10:08:17 +00:00
if err != nil {
return err
}
2024-01-24 13:00:08 +00:00
blockRange := prepareUpdatedBlockRange ( ethTokensBlockRange . tokens , newBlockRange )
2023-11-27 10:08:17 +00:00
2024-02-19 15:50:07 +00:00
log . Debug ( "update tokens blocks range" ,
"first" , blockRange . FirstKnown ,
"last" , blockRange . LastKnown )
2023-11-27 10:08:17 +00:00
2023-12-05 15:11:15 +00:00
update , err := b . db . Prepare ( ` UPDATE blocks_ranges_sequential SET token_blk_start = ?, token_blk_first = ?, token_blk_last = ? WHERE network_id = ? AND address = ? ` )
2023-11-27 10:08:17 +00:00
if err != nil {
return err
}
2024-01-17 11:08:56 +00:00
_ , err = update . Exec ( ( * bigint . SQLBigInt ) ( blockRange . Start ) , ( * bigint . SQLBigInt ) ( blockRange . FirstKnown ) ,
( * bigint . SQLBigInt ) ( blockRange . LastKnown ) , chainID , account )
2023-11-27 10:08:17 +00:00
return err
}
2024-01-24 13:00:08 +00:00
func prepareUpdatedBlockRange ( blockRange , newBlockRange * BlockRange ) * BlockRange {
2024-02-19 15:50:07 +00:00
if newBlockRange != nil {
// Ovewrite start block if there was not any or if new one is older, because it can be precised only
// to a greater value, because no history can be before some block that is considered
// as a start of history, but due to concurrent block range checks, a newer greater block
// can be found that matches criteria of a start block (nonce is zero, balances are equal)
if newBlockRange . Start != nil && ( blockRange . Start == nil || blockRange . Start . Cmp ( newBlockRange . Start ) < 0 ) {
blockRange . Start = newBlockRange . Start
}
// Overwrite first known block if there was not any or if new one is older
if ( blockRange . FirstKnown == nil && newBlockRange . FirstKnown != nil ) ||
( blockRange . FirstKnown != nil && newBlockRange . FirstKnown != nil && blockRange . FirstKnown . Cmp ( newBlockRange . FirstKnown ) > 0 ) {
blockRange . FirstKnown = newBlockRange . FirstKnown
}
// Overwrite last known block if there was not any or if new one is newer
if ( blockRange . LastKnown == nil && newBlockRange . LastKnown != nil ) ||
( blockRange . LastKnown != nil && newBlockRange . LastKnown != nil && blockRange . LastKnown . Cmp ( newBlockRange . LastKnown ) < 0 ) {
blockRange . LastKnown = newBlockRange . LastKnown
2023-11-27 10:08:17 +00:00
}
}
return blockRange
2023-05-19 08:19:48 +00:00
}