[#4202] Detect balance and erc20 transfers by a single contract call (#4508)

This commit is contained in:
Roman Volosovskyi 2024-01-19 16:57:04 +01:00 committed by GitHub
parent 81f4c86086
commit 9c0526f7d9
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
13 changed files with 887 additions and 284 deletions

View File

@ -1 +1 @@
0.172.5
0.172.6

View File

@ -0,0 +1,275 @@
// Code generated - DO NOT EDIT.
// This file is a generated binding and any manual changes will be lost.
package balancechecker
import (
"errors"
"math/big"
"strings"
ethereum "github.com/ethereum/go-ethereum"
"github.com/ethereum/go-ethereum/accounts/abi"
"github.com/ethereum/go-ethereum/accounts/abi/bind"
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/core/types"
"github.com/ethereum/go-ethereum/event"
)
// Reference imports to suppress errors if they are not otherwise used.
var (
_ = errors.New
_ = big.NewInt
_ = strings.NewReader
_ = ethereum.NotFound
_ = bind.Bind
_ = common.Big1
_ = types.BloomLookup
_ = event.NewSubscription
_ = abi.ConvertType
)
// BalanceCheckerMetaData contains all meta data concerning the BalanceChecker contract.
var BalanceCheckerMetaData = &bind.MetaData{
ABI: "[{\"inputs\":[{\"internalType\":\"address[]\",\"name\":\"users\",\"type\":\"address[]\"},{\"internalType\":\"address[]\",\"name\":\"tokens\",\"type\":\"address[]\"}],\"name\":\"balancesHash\",\"outputs\":[{\"internalType\":\"uint256\",\"name\":\"\",\"type\":\"uint256\"},{\"internalType\":\"bytes32[]\",\"name\":\"\",\"type\":\"bytes32[]\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"address\",\"name\":\"user\",\"type\":\"address\"},{\"internalType\":\"address[]\",\"name\":\"tokens\",\"type\":\"address[]\"}],\"name\":\"balancesPerAddress\",\"outputs\":[{\"internalType\":\"uint256[]\",\"name\":\"\",\"type\":\"uint256[]\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"address\",\"name\":\"user\",\"type\":\"address\"},{\"internalType\":\"address\",\"name\":\"token\",\"type\":\"address\"}],\"name\":\"tokenBalance\",\"outputs\":[{\"internalType\":\"uint256\",\"name\":\"\",\"type\":\"uint256\"}],\"stateMutability\":\"view\",\"type\":\"function\"}]",
}
// BalanceCheckerABI is the input ABI used to generate the binding from.
// Deprecated: Use BalanceCheckerMetaData.ABI instead.
var BalanceCheckerABI = BalanceCheckerMetaData.ABI
// BalanceChecker is an auto generated Go binding around an Ethereum contract.
type BalanceChecker struct {
BalanceCheckerCaller // Read-only binding to the contract
BalanceCheckerTransactor // Write-only binding to the contract
BalanceCheckerFilterer // Log filterer for contract events
}
// BalanceCheckerCaller is an auto generated read-only Go binding around an Ethereum contract.
type BalanceCheckerCaller struct {
contract *bind.BoundContract // Generic contract wrapper for the low level calls
}
// BalanceCheckerTransactor is an auto generated write-only Go binding around an Ethereum contract.
type BalanceCheckerTransactor struct {
contract *bind.BoundContract // Generic contract wrapper for the low level calls
}
// BalanceCheckerFilterer is an auto generated log filtering Go binding around an Ethereum contract events.
type BalanceCheckerFilterer struct {
contract *bind.BoundContract // Generic contract wrapper for the low level calls
}
// BalanceCheckerSession is an auto generated Go binding around an Ethereum contract,
// with pre-set call and transact options.
type BalanceCheckerSession struct {
Contract *BalanceChecker // Generic contract binding to set the session for
CallOpts bind.CallOpts // Call options to use throughout this session
TransactOpts bind.TransactOpts // Transaction auth options to use throughout this session
}
// BalanceCheckerCallerSession is an auto generated read-only Go binding around an Ethereum contract,
// with pre-set call options.
type BalanceCheckerCallerSession struct {
Contract *BalanceCheckerCaller // Generic contract caller binding to set the session for
CallOpts bind.CallOpts // Call options to use throughout this session
}
// BalanceCheckerTransactorSession is an auto generated write-only Go binding around an Ethereum contract,
// with pre-set transact options.
type BalanceCheckerTransactorSession struct {
Contract *BalanceCheckerTransactor // Generic contract transactor binding to set the session for
TransactOpts bind.TransactOpts // Transaction auth options to use throughout this session
}
// BalanceCheckerRaw is an auto generated low-level Go binding around an Ethereum contract.
type BalanceCheckerRaw struct {
Contract *BalanceChecker // Generic contract binding to access the raw methods on
}
// BalanceCheckerCallerRaw is an auto generated low-level read-only Go binding around an Ethereum contract.
type BalanceCheckerCallerRaw struct {
Contract *BalanceCheckerCaller // Generic read-only contract binding to access the raw methods on
}
// BalanceCheckerTransactorRaw is an auto generated low-level write-only Go binding around an Ethereum contract.
type BalanceCheckerTransactorRaw struct {
Contract *BalanceCheckerTransactor // Generic write-only contract binding to access the raw methods on
}
// NewBalanceChecker creates a new instance of BalanceChecker, bound to a specific deployed contract.
func NewBalanceChecker(address common.Address, backend bind.ContractBackend) (*BalanceChecker, error) {
contract, err := bindBalanceChecker(address, backend, backend, backend)
if err != nil {
return nil, err
}
return &BalanceChecker{BalanceCheckerCaller: BalanceCheckerCaller{contract: contract}, BalanceCheckerTransactor: BalanceCheckerTransactor{contract: contract}, BalanceCheckerFilterer: BalanceCheckerFilterer{contract: contract}}, nil
}
// NewBalanceCheckerCaller creates a new read-only instance of BalanceChecker, bound to a specific deployed contract.
func NewBalanceCheckerCaller(address common.Address, caller bind.ContractCaller) (*BalanceCheckerCaller, error) {
contract, err := bindBalanceChecker(address, caller, nil, nil)
if err != nil {
return nil, err
}
return &BalanceCheckerCaller{contract: contract}, nil
}
// NewBalanceCheckerTransactor creates a new write-only instance of BalanceChecker, bound to a specific deployed contract.
func NewBalanceCheckerTransactor(address common.Address, transactor bind.ContractTransactor) (*BalanceCheckerTransactor, error) {
contract, err := bindBalanceChecker(address, nil, transactor, nil)
if err != nil {
return nil, err
}
return &BalanceCheckerTransactor{contract: contract}, nil
}
// NewBalanceCheckerFilterer creates a new log filterer instance of BalanceChecker, bound to a specific deployed contract.
func NewBalanceCheckerFilterer(address common.Address, filterer bind.ContractFilterer) (*BalanceCheckerFilterer, error) {
contract, err := bindBalanceChecker(address, nil, nil, filterer)
if err != nil {
return nil, err
}
return &BalanceCheckerFilterer{contract: contract}, nil
}
// bindBalanceChecker binds a generic wrapper to an already deployed contract.
func bindBalanceChecker(address common.Address, caller bind.ContractCaller, transactor bind.ContractTransactor, filterer bind.ContractFilterer) (*bind.BoundContract, error) {
parsed, err := BalanceCheckerMetaData.GetAbi()
if err != nil {
return nil, err
}
return bind.NewBoundContract(address, *parsed, caller, transactor, filterer), nil
}
// Call invokes the (constant) contract method with params as input values and
// sets the output to result. The result type might be a single field for simple
// returns, a slice of interfaces for anonymous returns and a struct for named
// returns.
func (_BalanceChecker *BalanceCheckerRaw) Call(opts *bind.CallOpts, result *[]interface{}, method string, params ...interface{}) error {
return _BalanceChecker.Contract.BalanceCheckerCaller.contract.Call(opts, result, method, params...)
}
// Transfer initiates a plain transaction to move funds to the contract, calling
// its default method if one is available.
func (_BalanceChecker *BalanceCheckerRaw) Transfer(opts *bind.TransactOpts) (*types.Transaction, error) {
return _BalanceChecker.Contract.BalanceCheckerTransactor.contract.Transfer(opts)
}
// Transact invokes the (paid) contract method with params as input values.
func (_BalanceChecker *BalanceCheckerRaw) Transact(opts *bind.TransactOpts, method string, params ...interface{}) (*types.Transaction, error) {
return _BalanceChecker.Contract.BalanceCheckerTransactor.contract.Transact(opts, method, params...)
}
// Call invokes the (constant) contract method with params as input values and
// sets the output to result. The result type might be a single field for simple
// returns, a slice of interfaces for anonymous returns and a struct for named
// returns.
func (_BalanceChecker *BalanceCheckerCallerRaw) Call(opts *bind.CallOpts, result *[]interface{}, method string, params ...interface{}) error {
return _BalanceChecker.Contract.contract.Call(opts, result, method, params...)
}
// Transfer initiates a plain transaction to move funds to the contract, calling
// its default method if one is available.
func (_BalanceChecker *BalanceCheckerTransactorRaw) Transfer(opts *bind.TransactOpts) (*types.Transaction, error) {
return _BalanceChecker.Contract.contract.Transfer(opts)
}
// Transact invokes the (paid) contract method with params as input values.
func (_BalanceChecker *BalanceCheckerTransactorRaw) Transact(opts *bind.TransactOpts, method string, params ...interface{}) (*types.Transaction, error) {
return _BalanceChecker.Contract.contract.Transact(opts, method, params...)
}
// BalancesHash is a free data retrieval call binding the contract method 0x23c0503e.
//
// Solidity: function balancesHash(address[] users, address[] tokens) view returns(uint256, bytes32[])
func (_BalanceChecker *BalanceCheckerCaller) BalancesHash(opts *bind.CallOpts, users []common.Address, tokens []common.Address) (*big.Int, [][32]byte, error) {
var out []interface{}
err := _BalanceChecker.contract.Call(opts, &out, "balancesHash", users, tokens)
if err != nil {
return *new(*big.Int), *new([][32]byte), err
}
out0 := *abi.ConvertType(out[0], new(*big.Int)).(**big.Int)
out1 := *abi.ConvertType(out[1], new([][32]byte)).(*[][32]byte)
return out0, out1, err
}
// BalancesHash is a free data retrieval call binding the contract method 0x23c0503e.
//
// Solidity: function balancesHash(address[] users, address[] tokens) view returns(uint256, bytes32[])
func (_BalanceChecker *BalanceCheckerSession) BalancesHash(users []common.Address, tokens []common.Address) (*big.Int, [][32]byte, error) {
return _BalanceChecker.Contract.BalancesHash(&_BalanceChecker.CallOpts, users, tokens)
}
// BalancesHash is a free data retrieval call binding the contract method 0x23c0503e.
//
// Solidity: function balancesHash(address[] users, address[] tokens) view returns(uint256, bytes32[])
func (_BalanceChecker *BalanceCheckerCallerSession) BalancesHash(users []common.Address, tokens []common.Address) (*big.Int, [][32]byte, error) {
return _BalanceChecker.Contract.BalancesHash(&_BalanceChecker.CallOpts, users, tokens)
}
// BalancesPerAddress is a free data retrieval call binding the contract method 0x1a55d770.
//
// Solidity: function balancesPerAddress(address user, address[] tokens) view returns(uint256[])
func (_BalanceChecker *BalanceCheckerCaller) BalancesPerAddress(opts *bind.CallOpts, user common.Address, tokens []common.Address) ([]*big.Int, error) {
var out []interface{}
err := _BalanceChecker.contract.Call(opts, &out, "balancesPerAddress", user, tokens)
if err != nil {
return *new([]*big.Int), err
}
out0 := *abi.ConvertType(out[0], new([]*big.Int)).(*[]*big.Int)
return out0, err
}
// BalancesPerAddress is a free data retrieval call binding the contract method 0x1a55d770.
//
// Solidity: function balancesPerAddress(address user, address[] tokens) view returns(uint256[])
func (_BalanceChecker *BalanceCheckerSession) BalancesPerAddress(user common.Address, tokens []common.Address) ([]*big.Int, error) {
return _BalanceChecker.Contract.BalancesPerAddress(&_BalanceChecker.CallOpts, user, tokens)
}
// BalancesPerAddress is a free data retrieval call binding the contract method 0x1a55d770.
//
// Solidity: function balancesPerAddress(address user, address[] tokens) view returns(uint256[])
func (_BalanceChecker *BalanceCheckerCallerSession) BalancesPerAddress(user common.Address, tokens []common.Address) ([]*big.Int, error) {
return _BalanceChecker.Contract.BalancesPerAddress(&_BalanceChecker.CallOpts, user, tokens)
}
// TokenBalance is a free data retrieval call binding the contract method 0x1049334f.
//
// Solidity: function tokenBalance(address user, address token) view returns(uint256)
func (_BalanceChecker *BalanceCheckerCaller) TokenBalance(opts *bind.CallOpts, user common.Address, token common.Address) (*big.Int, error) {
var out []interface{}
err := _BalanceChecker.contract.Call(opts, &out, "tokenBalance", user, token)
if err != nil {
return *new(*big.Int), err
}
out0 := *abi.ConvertType(out[0], new(*big.Int)).(**big.Int)
return out0, err
}
// TokenBalance is a free data retrieval call binding the contract method 0x1049334f.
//
// Solidity: function tokenBalance(address user, address token) view returns(uint256)
func (_BalanceChecker *BalanceCheckerSession) TokenBalance(user common.Address, token common.Address) (*big.Int, error) {
return _BalanceChecker.Contract.TokenBalance(&_BalanceChecker.CallOpts, user, token)
}
// TokenBalance is a free data retrieval call binding the contract method 0x1049334f.
//
// Solidity: function tokenBalance(address user, address token) view returns(uint256)
func (_BalanceChecker *BalanceCheckerCallerSession) TokenBalance(user common.Address, token common.Address) (*big.Int, error) {
return _BalanceChecker.Contract.TokenBalance(&_BalanceChecker.CallOpts, user, token)
}

View File

@ -0,0 +1,46 @@
// SPDX-License-Identifier: MIT
pragma solidity >=0.4.22 <0.9.0;
// ERC20 contract interface
abstract contract Token {
function balanceOf(address) public view virtual returns (uint);
}
contract BalanceChecker {
function tokenBalance(
address user,
address token
) public view returns (uint) {
return Token(token).balanceOf(user);
}
function balancesPerAddress(
address user,
address[] memory tokens
) public view returns (uint[] memory) {
uint[] memory addrBalances = new uint[](
tokens.length + 1
);
for (uint i = 0; i < tokens.length; i++) {
addrBalances[i] = tokenBalance(user, tokens[i]);
}
addrBalances[tokens.length] = user.balance;
return addrBalances;
}
function balancesHash(
address[] calldata users,
address[] calldata tokens
) external view returns (uint256, bytes32[] memory) {
bytes32[] memory addrBalances = new bytes32[](users.length);
for (uint i = 0; i < users.length; i++) {
addrBalances[i] = keccak256(
abi.encodePacked(balancesPerAddress(users[i], tokens))
);
}
return (block.number, addrBalances);
}
}

View File

@ -0,0 +1,25 @@
package balancechecker
import (
"errors"
"github.com/ethereum/go-ethereum/common"
)
var errorNotAvailableOnChainID = errors.New("BalanceChecker not available for chainID")
var contractDataByChainID = map[uint64]common.Address{
1: common.HexToAddress("0x040EA8bFE441597849A9456182fa46D38B75BC05"), // mainnet
5: common.HexToAddress("0xA5522A3194B78Dd231b64d0ccd6deA6156DCa7C8"), // goerli
10: common.HexToAddress("0x55bD303eA3D50FC982A8a5b43972d7f38D129bbF"), // optimism
42161: common.HexToAddress("0x55bD303eA3D50FC982A8a5b43972d7f38D129bbF"), // arbitrum
777333: common.HexToAddress("0x0000000000000000000000000000000010777333"), // unit tests
}
func ContractAddress(chainID uint64) (common.Address, error) {
contract, exists := contractDataByChainID[chainID]
if !exists {
return *new(common.Address), errorNotAvailableOnChainID
}
return contract, nil
}

View File

@ -7,6 +7,7 @@ import (
"github.com/ethereum/go-ethereum/common"
"github.com/status-im/status-go/contracts/directory"
"github.com/status-im/status-go/contracts/ethscan"
"github.com/status-im/status-go/contracts/balancechecker"
"github.com/status-im/status-go/contracts/hop"
hopBridge "github.com/status-im/status-go/contracts/hop/bridge"
hopSwap "github.com/status-im/status-go/contracts/hop/swap"
@ -197,6 +198,22 @@ func (c *ContractMaker) NewEthScan(chainID uint64) (*ethscan.BalanceScanner, uin
return scanner, contractCreatedAt, err
}
func (c *ContractMaker) NewBalanceChecker(chainID uint64) (*balancechecker.BalanceChecker, error) {
contractAddr, err := balancechecker.ContractAddress(chainID)
if err != nil {
return nil, err
}
backend, err := c.RPCClient.EthClient(chainID)
if err != nil {
return nil, err
}
return balancechecker.NewBalanceChecker(
contractAddr,
backend,
)
}
func (c *ContractMaker) NewHopL2SaddlSwap(chainID uint64, symbol string) (*hopSwap.HopSwap, error) {
contractAddr, err := hop.L2SaddleSwapContractAddress(chainID, symbol)
if err != nil {

View File

@ -713,7 +713,7 @@ func (c *ClientWithFallback) PendingTransactionCount(ctx context.Context) (uint,
}
func (c *ClientWithFallback) CallContract(ctx context.Context, msg ethereum.CallMsg, blockNumber *big.Int) ([]byte, error) {
rpcstats.CountCall("eth_CallContract")
rpcstats.CountCall("eth_CallContract_" + msg.To.String())
data, err := c.makeCallSingleReturn(
func() (any, error) { return c.main.CallContract(ctx, msg, blockNumber) },

View File

@ -96,7 +96,7 @@ type ManagerInterface interface {
type Manager struct {
db *sql.DB
RPCClient *rpc.Client
contractMaker *contracts.ContractMaker
ContractMaker *contracts.ContractMaker
networkManager *network.Manager
stores []store // Set on init, not changed afterwards
communityTokensDB *communitytokens.Database
@ -161,7 +161,7 @@ func NewTokenManager(
return &Manager{
db: db,
RPCClient: RPCClient,
contractMaker: maker,
ContractMaker: maker,
networkManager: networkManager,
communityManager: communityManager,
stores: stores,
@ -502,7 +502,7 @@ func (tm *Manager) GetList() []*List {
}
func (tm *Manager) DiscoverToken(ctx context.Context, chainID uint64, address common.Address) (*Token, error) {
caller, err := tm.contractMaker.NewERC20(chainID, address)
caller, err := tm.ContractMaker.NewERC20(chainID, address)
if err != nil {
return nil, err
}
@ -693,7 +693,7 @@ func (tm *Manager) GetBalancesAtByChain(parent context.Context, clients map[uint
// Keep the reference to the client. DO NOT USE A LOOP, the client will be overridden in the coroutine
client := clients[clientIdx]
ethScanContract, availableAtBlock, err := tm.contractMaker.NewEthScan(client.NetworkID())
ethScanContract, availableAtBlock, err := tm.ContractMaker.NewEthScan(client.NetworkID())
if err != nil {
log.Error("error scanning contract", "err", err)
return nil, err

View File

@ -20,7 +20,7 @@ func setupTestTokenDB(t *testing.T) (*Manager, func()) {
return &Manager{
db: db,
RPCClient: nil,
contractMaker: nil,
ContractMaker: nil,
networkManager: nil,
stores: nil,
communityTokensDB: nil,

View File

@ -31,8 +31,9 @@ func NewBlockRange() *BlockRange {
}
type ethTokensBlockRanges struct {
eth *BlockRange
tokens *BlockRange
eth *BlockRange
tokens *BlockRange
balanceCheckHash string
}
func newEthTokensBlockRanges() *ethTokensBlockRanges {
@ -40,7 +41,7 @@ func newEthTokensBlockRanges() *ethTokensBlockRanges {
}
func (b *BlockRangeSequentialDAO) getBlockRange(chainID uint64, address common.Address) (blockRange *ethTokensBlockRanges, err error) {
query := `SELECT blk_start, blk_first, blk_last, token_blk_start, token_blk_first, token_blk_last FROM blocks_ranges_sequential
query := `SELECT blk_start, blk_first, blk_last, token_blk_start, token_blk_first, token_blk_last, balance_check_hash FROM blocks_ranges_sequential
WHERE address = ?
AND network_id = ?`
@ -53,8 +54,14 @@ func (b *BlockRangeSequentialDAO) getBlockRange(chainID uint64, address common.A
blockRange = &ethTokensBlockRanges{}
if rows.Next() {
blockRange = newEthTokensBlockRanges()
err = rows.Scan((*bigint.SQLBigInt)(blockRange.eth.Start), (*bigint.SQLBigInt)(blockRange.eth.FirstKnown), (*bigint.SQLBigInt)(blockRange.eth.LastKnown),
(*bigint.SQLBigInt)(blockRange.tokens.Start), (*bigint.SQLBigInt)(blockRange.tokens.FirstKnown), (*bigint.SQLBigInt)(blockRange.tokens.LastKnown))
err = rows.Scan((*bigint.SQLBigInt)(blockRange.eth.Start),
(*bigint.SQLBigInt)(blockRange.eth.FirstKnown),
(*bigint.SQLBigInt)(blockRange.eth.LastKnown),
(*bigint.SQLBigInt)(blockRange.tokens.Start),
(*bigint.SQLBigInt)(blockRange.tokens.FirstKnown),
(*bigint.SQLBigInt)(blockRange.tokens.LastKnown),
&blockRange.balanceCheckHash,
)
if err != nil {
return nil, err
}
@ -88,16 +95,16 @@ func (b *BlockRangeSequentialDAO) upsertRange(chainID uint64, account common.Add
log.Debug("update eth and tokens blocks range", "account", account, "chainID", chainID,
"eth.start", ethBlockRange.Start, "eth.first", ethBlockRange.FirstKnown, "eth.last", ethBlockRange.LastKnown,
"tokens.start", tokensBlockRange.Start, "tokens.first", ethBlockRange.FirstKnown, "eth.last", ethBlockRange.LastKnown)
"tokens.start", tokensBlockRange.Start, "tokens.first", ethBlockRange.FirstKnown, "eth.last", ethBlockRange.LastKnown, "hash", newBlockRange.balanceCheckHash)
upsert, err := b.db.Prepare(`REPLACE INTO blocks_ranges_sequential
(network_id, address, blk_start, blk_first, blk_last, token_blk_start, token_blk_first, token_blk_last) VALUES (?, ?, ?, ?, ?, ?, ?, ?)`)
(network_id, address, blk_start, blk_first, blk_last, token_blk_start, token_blk_first, token_blk_last, balance_check_hash) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)`)
if err != nil {
return err
}
_, err = upsert.Exec(chainID, account, (*bigint.SQLBigInt)(ethBlockRange.Start), (*bigint.SQLBigInt)(ethBlockRange.FirstKnown), (*bigint.SQLBigInt)(ethBlockRange.LastKnown),
(*bigint.SQLBigInt)(tokensBlockRange.Start), (*bigint.SQLBigInt)(tokensBlockRange.FirstKnown), (*bigint.SQLBigInt)(tokensBlockRange.LastKnown))
(*bigint.SQLBigInt)(tokensBlockRange.Start), (*bigint.SQLBigInt)(tokensBlockRange.FirstKnown), (*bigint.SQLBigInt)(tokensBlockRange.LastKnown), newBlockRange.balanceCheckHash)
return err
}
@ -113,10 +120,10 @@ func (b *BlockRangeSequentialDAO) upsertEthRange(chainID uint64, account common.
blockRange := prepareUpdatedBlockRange(chainID, account, ethTokensBlockRange.eth, newBlockRange)
log.Debug("update eth blocks range", "account", account, "chainID", chainID,
"start", blockRange.Start, "first", blockRange.FirstKnown, "last", blockRange.LastKnown)
"start", blockRange.Start, "first", blockRange.FirstKnown, "last", blockRange.LastKnown, "old hash", ethTokensBlockRange.balanceCheckHash)
upsert, err := b.db.Prepare(`REPLACE INTO blocks_ranges_sequential
(network_id, address, blk_start, blk_first, blk_last, token_blk_start, token_blk_first, token_blk_last) VALUES (?, ?, ?, ?, ?, ?, ?, ?)`)
(network_id, address, blk_start, blk_first, blk_last, token_blk_start, token_blk_first, token_blk_last, balance_check_hash) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)`)
if err != nil {
return err
}
@ -126,7 +133,7 @@ func (b *BlockRangeSequentialDAO) upsertEthRange(chainID uint64, account common.
}
_, err = upsert.Exec(chainID, account, (*bigint.SQLBigInt)(blockRange.Start), (*bigint.SQLBigInt)(blockRange.FirstKnown), (*bigint.SQLBigInt)(blockRange.LastKnown),
(*bigint.SQLBigInt)(ethTokensBlockRange.tokens.Start), (*bigint.SQLBigInt)(ethTokensBlockRange.tokens.FirstKnown), (*bigint.SQLBigInt)(ethTokensBlockRange.tokens.LastKnown))
(*bigint.SQLBigInt)(ethTokensBlockRange.tokens.Start), (*bigint.SQLBigInt)(ethTokensBlockRange.tokens.FirstKnown), (*bigint.SQLBigInt)(ethTokensBlockRange.tokens.LastKnown), ethTokensBlockRange.balanceCheckHash)
return err
}
@ -142,7 +149,7 @@ func (b *BlockRangeSequentialDAO) updateTokenRange(chainID uint64, account commo
blockRange := prepareUpdatedBlockRange(chainID, account, ethTokensBlockRange.tokens, newBlockRange)
log.Debug("update tokens blocks range", "account", account, "chainID", chainID,
"start", blockRange.Start, "first", blockRange.FirstKnown, "last", blockRange.LastKnown)
"start", blockRange.Start, "first", blockRange.FirstKnown, "last", blockRange.LastKnown, "old hash", ethTokensBlockRange.balanceCheckHash)
update, err := b.db.Prepare(`UPDATE blocks_ranges_sequential SET token_blk_start = ?, token_blk_first = ?, token_blk_last = ? WHERE network_id = ? AND address = ?`)
if err != nil {

View File

@ -6,10 +6,12 @@ import (
"sync"
"time"
"github.com/ethereum/go-ethereum/accounts/abi/bind"
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/core/types"
"github.com/ethereum/go-ethereum/event"
"github.com/ethereum/go-ethereum/log"
"github.com/status-im/status-go/contracts"
nodetypes "github.com/status-im/status-go/eth-node/types"
"github.com/status-im/status-go/multiaccounts/accounts"
"github.com/status-im/status-go/rpc/chain"
@ -56,6 +58,8 @@ func (ec *errorCounter) Error() error {
type findNewBlocksCommand struct {
*findBlocksCommand
contractMaker *contracts.ContractMaker
iteration int
}
func (c *findNewBlocksCommand) Command() async.Command {
@ -68,27 +72,122 @@ func (c *findNewBlocksCommand) Command() async.Command {
}.Run
}
func (c *findNewBlocksCommand) Run(parent context.Context) (err error) {
headNum, err := getHeadBlockNumber(parent, c.chainClient)
var requestTimeout = 20 * time.Second
func (c *findNewBlocksCommand) detectTransfers(parent context.Context, accounts []common.Address) (*big.Int, []common.Address, error) {
bc, err := c.contractMaker.NewBalanceChecker(c.chainClient.NetworkID())
if err != nil {
log.Error("findNewBlocksCommand getHeadBlockNumber", "error", err, "chain", c.chainClient.NetworkID())
log.Error("findNewBlocksCommand error creating balance checker", "error", err, "chain", c.chainClient.NetworkID())
return nil, nil, err
}
tokens, err := c.tokenManager.GetTokens(c.chainClient.NetworkID())
if err != nil {
return nil, nil, err
}
tokenAddresses := []common.Address{}
nilAddress := common.Address{}
for _, token := range tokens {
if token.Address != nilAddress {
tokenAddresses = append(tokenAddresses, token.Address)
}
}
log.Info("findNewBlocksCommand detectTransfers", "cnt", len(tokenAddresses), "addresses", tokenAddresses)
ctx, cancel := context.WithTimeout(parent, requestTimeout)
defer cancel()
blockNum, hashes, err := bc.BalancesHash(&bind.CallOpts{Context: ctx}, c.accounts, tokenAddresses)
if err != nil {
log.Error("findNewBlocksCommand can't get balances hashes", "error", err)
return nil, nil, err
}
addressesToCheck := []common.Address{}
for idx, account := range accounts {
blockRange, err := c.blockRangeDAO.getBlockRange(c.chainClient.NetworkID(), account)
if err != nil {
log.Error("findNewBlocksCommand can't block range", "error", err, "account", account, "chain", c.chainClient.NetworkID())
return nil, nil, err
}
if blockRange.eth == nil {
blockRange.eth = NewBlockRange()
blockRange.tokens = NewBlockRange()
}
if blockRange.eth.FirstKnown == nil {
blockRange.eth.FirstKnown = blockNum
}
if blockRange.eth.LastKnown == nil {
blockRange.eth.LastKnown = blockNum
}
checkHash := common.BytesToHash(hashes[idx][:])
log.Debug("findNewBlocksCommand comparing hashes", "account", account, "network", c.chainClient.NetworkID(), "old hash", blockRange.balanceCheckHash, "new hash", checkHash.String())
if checkHash.String() != blockRange.balanceCheckHash {
addressesToCheck = append(addressesToCheck, account)
}
blockRange.balanceCheckHash = checkHash.String()
err = c.blockRangeDAO.upsertRange(c.chainClient.NetworkID(), account, blockRange)
if err != nil {
log.Error("findNewBlocksCommand can't update balance check", "error", err, "account", account, "chain", c.chainClient.NetworkID())
return nil, nil, err
}
}
return blockNum, addressesToCheck, nil
}
var nonceCheckIntervalIterations = 30
var logsCheckIntervalIterations = 5
func (c *findNewBlocksCommand) Run(parent context.Context) error {
mnemonicWasNotShown, err := c.accountsDB.GetMnemonicWasNotShown()
if err != nil {
c.error = err
return err
}
// In case this is the first check, skip it, history fetching will do it
if c.fromBlockNumber.Cmp(headNum) >= 0 {
accounts := []common.Address{}
for _, account := range c.accounts {
if mnemonicWasNotShown {
acc, err := c.accountsDB.GetAccountByAddress(nodetypes.Address(account))
if err != nil {
c.error = err
return err
}
if acc.AddressWasNotShown {
log.Info("skip findNewBlocksCommand, mnemonic has not been shown and the address has not been shared yet", "address", account)
continue
}
}
accounts = append(accounts, account)
}
if len(accounts) == 0 {
return nil
}
c.findAndSaveEthBlocks(parent, c.fromBlockNumber, headNum)
c.findAndSaveTokenBlocks(parent, c.fromBlockNumber, headNum)
headNum, accountsWithDetectedChanges, err := c.detectTransfers(parent, accounts)
if err != nil {
log.Error("findNewBlocksCommand error on transfer detection", "error", err, "chain", c.chainClient.NetworkID())
return err
}
if len(accountsWithDetectedChanges) != 0 || c.iteration%nonceCheckIntervalIterations == 0 {
c.findAndSaveEthBlocks(parent, c.fromBlockNumber, headNum, accounts)
}
if len(accountsWithDetectedChanges) != 0 || c.iteration%logsCheckIntervalIterations == 0 {
c.findAndSaveTokenBlocks(parent, c.fromBlockNumber, headNum)
}
c.fromBlockNumber = headNum
c.iteration++
return nil
}
func (c *findNewBlocksCommand) findAndSaveEthBlocks(parent context.Context, fromNum, headNum *big.Int) {
func (c *findNewBlocksCommand) findAndSaveEthBlocks(parent context.Context, fromNum, headNum *big.Int, accounts []common.Address) {
// Check ETH transfers for each account independently
mnemonicWasNotShown, err := c.accountsDB.GetMnemonicWasNotShown()
if err != nil {
@ -96,7 +195,7 @@ func (c *findNewBlocksCommand) findAndSaveEthBlocks(parent context.Context, from
return
}
for _, account := range c.accounts {
for _, account := range accounts {
if mnemonicWasNotShown {
acc, err := c.accountsDB.GetAccountByAddress(nodetypes.Address(account))
if err != nil {
@ -366,7 +465,7 @@ func (c *findBlocksCommand) ERC20ScanByBalance(parent context.Context, account c
}
func (c *findBlocksCommand) checkERC20Tail(parent context.Context, account common.Address) ([]*DBHeader, error) {
log.Debug("checkERC20Tail", "account", account, "to block", c.startBlockNumber, "from", c.resFromBlock.Number)
log.Info("checkERC20Tail", "account", account, "to block", c.startBlockNumber, "from", c.resFromBlock.Number)
tokens, err := c.tokenManager.GetTokens(c.chainClient.NetworkID())
if err != nil {
return nil, err
@ -752,6 +851,7 @@ func newLoadBlocksAndTransfersCommand(accounts []common.Address, db *Database, a
blocksLoadedCh: make(chan []*DBHeader, 100),
omitHistory: omitHistory,
errorCounter: *newErrorCounter("loadBlocksAndTransfersCommand"),
contractMaker: tokenManager.ContractMaker,
}
}
@ -770,6 +870,7 @@ type loadBlocksAndTransfersCommand struct {
tokenManager *token.Manager
blocksLoadedCh chan []*DBHeader
omitHistory bool
contractMaker *contracts.ContractMaker
// Not to be set by the caller
transfersLoaded map[common.Address]bool // For event RecentHistoryReady to be sent only once per account during app lifetime
@ -985,6 +1086,7 @@ func (c *loadBlocksAndTransfersCommand) startFetchingNewBlocks(ctx context.Conte
blocksLoadedCh: blocksLoadedCh,
defaultNodeBlockChunkSize: DefaultNodeBlockChunkSize,
},
contractMaker: c.contractMaker,
}
group := async.NewGroup(ctx)
group.Add(newBlocksCmd.Command())

View File

@ -24,8 +24,11 @@ import (
"github.com/ethereum/go-ethereum/event"
"github.com/ethereum/go-ethereum/rpc"
"github.com/status-im/status-go/appdatabase"
"github.com/status-im/status-go/contracts"
"github.com/status-im/status-go/contracts/balancechecker"
"github.com/status-im/status-go/contracts/ethscan"
"github.com/status-im/status-go/contracts/ierc20"
ethtypes "github.com/status-im/status-go/eth-node/types"
"github.com/status-im/status-go/rpc/chain"
"github.com/status-im/status-go/server"
"github.com/status-im/status-go/services/wallet/async"
@ -47,14 +50,14 @@ import (
type TestClient struct {
t *testing.T
// [][block, newBalance, nonceDiff]
balances [][]int
outgoingERC20Transfers []testERC20Transfer
incomingERC20Transfers []testERC20Transfer
outgoingERC1155SingleTransfers []testERC20Transfer
incomingERC1155SingleTransfers []testERC20Transfer
balanceHistory map[uint64]*big.Int
tokenBalanceHistory map[common.Address]map[uint64]*big.Int
nonceHistory map[uint64]uint64
balances map[common.Address][][]int
outgoingERC20Transfers map[common.Address][]testERC20Transfer
incomingERC20Transfers map[common.Address][]testERC20Transfer
outgoingERC1155SingleTransfers map[common.Address][]testERC20Transfer
incomingERC1155SingleTransfers map[common.Address][]testERC20Transfer
balanceHistory map[common.Address]map[uint64]*big.Int
tokenBalanceHistory map[common.Address]map[common.Address]map[uint64]*big.Int
nonceHistory map[common.Address]map[uint64]uint64
traceAPICalls bool
printPreparedData bool
rw sync.RWMutex
@ -130,7 +133,7 @@ func (tc *TestClient) BlockByNumber(ctx context.Context, number *big.Int) (*type
func (tc *TestClient) NonceAt(ctx context.Context, account common.Address, blockNumber *big.Int) (uint64, error) {
tc.incCounter("NonceAt")
nonce := tc.nonceHistory[blockNumber.Uint64()]
nonce := tc.nonceHistory[account][blockNumber.Uint64()]
if tc.traceAPICalls {
tc.t.Log("NonceAt", blockNumber, "result:", nonce)
}
@ -165,21 +168,39 @@ func (tc *TestClient) FilterLogs(ctx context.Context, q ethereum.FilterQuery) ([
}
if len(to) > 0 {
allTransfers = append(allTransfers, tc.incomingERC1155SingleTransfers...)
for _, addressHash := range to {
address := &common.Address{}
address.SetBytes(addressHash.Bytes())
allTransfers = append(allTransfers, tc.incomingERC1155SingleTransfers[*address]...)
}
}
if len(from) > 0 {
allTransfers = append(allTransfers, tc.outgoingERC1155SingleTransfers...)
for _, addressHash := range from {
address := &common.Address{}
address.SetBytes(addressHash.Bytes())
allTransfers = append(allTransfers, tc.outgoingERC1155SingleTransfers[*address]...)
}
}
}
if slices.Contains(signatures, erc20TransferSignature) {
from := q.Topics[1]
to := q.Topics[2]
if len(to) > 0 {
allTransfers = append(allTransfers, tc.incomingERC20Transfers...)
for _, addressHash := range to {
address := &common.Address{}
address.SetBytes(addressHash.Bytes())
allTransfers = append(allTransfers, tc.incomingERC20Transfers[*address]...)
}
}
if len(from) > 0 {
allTransfers = append(allTransfers, tc.outgoingERC20Transfers...)
for _, addressHash := range from {
address := &common.Address{}
address.SetBytes(addressHash.Bytes())
allTransfers = append(allTransfers, tc.outgoingERC20Transfers[*address]...)
}
}
}
@ -210,24 +231,33 @@ func (tc *TestClient) FilterLogs(ctx context.Context, q ethereum.FilterQuery) ([
return logs, nil
}
func (tc *TestClient) getBalance(address common.Address, blockNumber *big.Int) *big.Int {
balance := tc.balanceHistory[address][blockNumber.Uint64()]
if balance == nil {
balance = big.NewInt(0)
}
return balance
}
func (tc *TestClient) BalanceAt(ctx context.Context, account common.Address, blockNumber *big.Int) (*big.Int, error) {
tc.incCounter("BalanceAt")
balance := tc.balanceHistory[blockNumber.Uint64()]
balance := tc.getBalance(account, blockNumber)
if tc.traceAPICalls {
tc.t.Log("BalanceAt", blockNumber, "result:", balance)
tc.t.Log("BalanceAt", blockNumber, "account:", account, "result:", balance)
}
return balance, nil
}
func (tc *TestClient) tokenBalanceAt(token common.Address, blockNumber *big.Int) *big.Int {
balance := tc.tokenBalanceHistory[token][blockNumber.Uint64()]
func (tc *TestClient) tokenBalanceAt(account common.Address, token common.Address, blockNumber *big.Int) *big.Int {
balance := tc.tokenBalanceHistory[account][token][blockNumber.Uint64()]
if balance == nil {
balance = big.NewInt(0)
}
if tc.traceAPICalls {
tc.t.Log("tokenBalanceAt", token, blockNumber, "result:", balance)
tc.t.Log("tokenBalanceAt", token, blockNumber, "account:", account, "result:", balance)
}
return balance
}
@ -286,6 +316,7 @@ func (tc *TestClient) ToBigInt() *big.Int {
}
var ethscanAddress = common.HexToAddress("0x0000000000000000000000000000000000777333")
var balanceCheckAddress = common.HexToAddress("0x0000000000000000000000000000000010777333")
func (tc *TestClient) CodeAt(ctx context.Context, contract common.Address, blockNumber *big.Int) ([]byte, error) {
tc.incCounter("CodeAt")
@ -293,7 +324,7 @@ func (tc *TestClient) CodeAt(ctx context.Context, contract common.Address, block
tc.t.Log("CodeAt", contract, blockNumber)
}
if ethscanAddress == contract {
if ethscanAddress == contract || balanceCheckAddress == contract {
return []byte{1}, nil
}
@ -320,10 +351,11 @@ func (tc *TestClient) CallContract(ctx context.Context, call ethereum.CallMsg, b
return nil, err
}
account := args[0].(common.Address)
tokens := args[1].([]common.Address)
balances := []*big.Int{}
for _, token := range tokens {
balances = append(balances, tc.tokenBalanceAt(token, blockNumber))
balances = append(balances, tc.tokenBalanceAt(account, token, blockNumber))
}
results := []ethscan.BalanceScannerResult{}
for _, balance := range balances {
@ -343,14 +375,24 @@ func (tc *TestClient) CallContract(ctx context.Context, call ethereum.CallMsg, b
}
if *call.To == tokenTXXAddress || *call.To == tokenTXYAddress {
balance := tc.tokenBalanceAt(*call.To, blockNumber)
parsed, err := abi.JSON(strings.NewReader(ierc20.IERC20ABI))
if err != nil {
return nil, err
}
method := parsed.Methods["balanceOf"]
params := call.Data[len(method.ID):]
args, err := method.Inputs.Unpack(params)
if err != nil {
tc.t.Log("ERROR on unpacking", err)
return nil, err
}
account := args[0].(common.Address)
balance := tc.tokenBalanceAt(account, *call.To, blockNumber)
output, err := method.Outputs.Pack(balance)
if err != nil {
tc.t.Log("ERROR on packing ERC20 balance", err)
@ -360,29 +402,79 @@ func (tc *TestClient) CallContract(ctx context.Context, call ethereum.CallMsg, b
return output, nil
}
if *call.To == balanceCheckAddress {
parsed, err := abi.JSON(strings.NewReader(balancechecker.BalanceCheckerABI))
if err != nil {
return nil, err
}
method := parsed.Methods["balancesHash"]
params := call.Data[len(method.ID):]
args, err := method.Inputs.Unpack(params)
if err != nil {
tc.t.Log("ERROR on unpacking", err)
return nil, err
}
addresses := args[0].([]common.Address)
tokens := args[1].([]common.Address)
bn := big.NewInt(int64(tc.currentBlock))
hashes := [][32]byte{}
for _, address := range addresses {
balance := tc.getBalance(address, big.NewInt(int64(tc.currentBlock)))
balanceBytes := balance.Bytes()
for _, token := range tokens {
balance := tc.tokenBalanceAt(address, token, bn)
balanceBytes = append(balanceBytes, balance.Bytes()...)
}
hash := [32]byte{}
for i, b := range ethtypes.BytesToHash(balanceBytes).Bytes() {
hash[i] = b
}
hashes = append(hashes, hash)
}
output, err := method.Outputs.Pack(bn, hashes)
if err != nil {
tc.t.Log("ERROR on packing", err)
return nil, err
}
return output, nil
}
return nil, nil
}
func (tc *TestClient) prepareBalanceHistory(toBlock int) {
var currentBlock, currentBalance, currentNonce int
tc.balanceHistory = map[common.Address]map[uint64]*big.Int{}
tc.nonceHistory = map[common.Address]map[uint64]uint64{}
tc.balanceHistory = map[uint64]*big.Int{}
tc.nonceHistory = map[uint64]uint64{}
for address, balances := range tc.balances {
var currentBlock, currentBalance, currentNonce int
if len(tc.balances) == 0 {
tc.balances = append(tc.balances, []int{toBlock + 1, 0, 0})
} else {
lastBlock := tc.balances[len(tc.balances)-1]
tc.balances = append(tc.balances, []int{toBlock + 1, lastBlock[1], 0})
}
for _, change := range tc.balances {
for blockN := currentBlock; blockN < change[0]; blockN++ {
tc.balanceHistory[uint64(blockN)] = big.NewInt(int64(currentBalance))
tc.nonceHistory[uint64(blockN)] = uint64(currentNonce)
tc.balanceHistory[address] = map[uint64]*big.Int{}
tc.nonceHistory[address] = map[uint64]uint64{}
if len(balances) == 0 {
balances = append(balances, []int{toBlock + 1, 0, 0})
} else {
lastBlock := balances[len(balances)-1]
balances = append(balances, []int{toBlock + 1, lastBlock[1], 0})
}
for _, change := range balances {
for blockN := currentBlock; blockN < change[0]; blockN++ {
tc.balanceHistory[address][uint64(blockN)] = big.NewInt(int64(currentBalance))
tc.nonceHistory[address][uint64(blockN)] = uint64(currentNonce)
}
currentBlock = change[0]
currentBalance = change[1]
currentNonce += change[2]
}
currentBlock = change[0]
currentBalance = change[1]
currentNonce += change[2]
}
if tc.printPreparedData {
@ -395,48 +487,74 @@ func (tc *TestClient) prepareBalanceHistory(toBlock int) {
}
func (tc *TestClient) prepareTokenBalanceHistory(toBlock int) {
transfersPerToken := map[common.Address][]testERC20Transfer{}
for _, transfer := range tc.outgoingERC20Transfers {
transfer.amount = new(big.Int).Neg(transfer.amount)
transfer.eventType = walletcommon.Erc20TransferEventType
transfersPerToken[transfer.address] = append(transfersPerToken[transfer.address], transfer)
}
for _, transfer := range tc.incomingERC20Transfers {
transfer.eventType = walletcommon.Erc20TransferEventType
transfersPerToken[transfer.address] = append(transfersPerToken[transfer.address], transfer)
}
for _, transfer := range tc.outgoingERC1155SingleTransfers {
transfer.amount = new(big.Int).Neg(transfer.amount)
transfer.eventType = walletcommon.Erc1155TransferSingleEventType
transfersPerToken[transfer.address] = append(transfersPerToken[transfer.address], transfer)
}
for _, transfer := range tc.incomingERC1155SingleTransfers {
transfer.eventType = walletcommon.Erc1155TransferSingleEventType
transfersPerToken[transfer.address] = append(transfersPerToken[transfer.address], transfer)
}
tc.tokenBalanceHistory = map[common.Address]map[uint64]*big.Int{}
for token, transfers := range transfersPerToken {
sort.Slice(transfers, func(i, j int) bool {
return transfers[i].block.Cmp(transfers[j].block) < 0
})
currentBlock := uint64(0)
currentBalance := big.NewInt(0)
tc.tokenBalanceHistory[token] = map[uint64]*big.Int{}
transfers = append(transfers, testERC20Transfer{big.NewInt(int64(toBlock + 1)), token, big.NewInt(0), walletcommon.Erc20TransferEventType})
transfersPerAddress := map[common.Address]map[common.Address][]testERC20Transfer{}
for account, transfers := range tc.outgoingERC20Transfers {
if _, ok := transfersPerAddress[account]; !ok {
transfersPerAddress[account] = map[common.Address][]testERC20Transfer{}
}
for _, transfer := range transfers {
for blockN := currentBlock; blockN < transfer.block.Uint64(); blockN++ {
tc.tokenBalanceHistory[token][blockN] = new(big.Int).Set(currentBalance)
transfer.amount = new(big.Int).Neg(transfer.amount)
transfer.eventType = walletcommon.Erc20TransferEventType
transfersPerAddress[account][transfer.address] = append(transfersPerAddress[account][transfer.address], transfer)
}
}
for account, transfers := range tc.incomingERC20Transfers {
if _, ok := transfersPerAddress[account]; !ok {
transfersPerAddress[account] = map[common.Address][]testERC20Transfer{}
}
for _, transfer := range transfers {
transfer.amount = new(big.Int).Neg(transfer.amount)
transfer.eventType = walletcommon.Erc20TransferEventType
transfersPerAddress[account][transfer.address] = append(transfersPerAddress[account][transfer.address], transfer)
}
}
for account, transfers := range tc.outgoingERC1155SingleTransfers {
if _, ok := transfersPerAddress[account]; !ok {
transfersPerAddress[account] = map[common.Address][]testERC20Transfer{}
}
for _, transfer := range transfers {
transfer.amount = new(big.Int).Neg(transfer.amount)
transfer.eventType = walletcommon.Erc1155TransferSingleEventType
transfersPerAddress[account][transfer.address] = append(transfersPerAddress[account][transfer.address], transfer)
}
}
for account, transfers := range tc.incomingERC1155SingleTransfers {
if _, ok := transfersPerAddress[account]; !ok {
transfersPerAddress[account] = map[common.Address][]testERC20Transfer{}
}
for _, transfer := range transfers {
transfer.amount = new(big.Int).Neg(transfer.amount)
transfer.eventType = walletcommon.Erc1155TransferSingleEventType
transfersPerAddress[account][transfer.address] = append(transfersPerAddress[account][transfer.address], transfer)
}
}
tc.tokenBalanceHistory = map[common.Address]map[common.Address]map[uint64]*big.Int{}
for account, transfersPerToken := range transfersPerAddress {
tc.tokenBalanceHistory[account] = map[common.Address]map[uint64]*big.Int{}
for token, transfers := range transfersPerToken {
sort.Slice(transfers, func(i, j int) bool {
return transfers[i].block.Cmp(transfers[j].block) < 0
})
currentBlock := uint64(0)
currentBalance := big.NewInt(0)
tc.tokenBalanceHistory[token] = map[common.Address]map[uint64]*big.Int{}
transfers = append(transfers, testERC20Transfer{big.NewInt(int64(toBlock + 1)), token, big.NewInt(0), walletcommon.Erc20TransferEventType})
tc.tokenBalanceHistory[account][token] = map[uint64]*big.Int{}
for _, transfer := range transfers {
for blockN := currentBlock; blockN < transfer.block.Uint64(); blockN++ {
tc.tokenBalanceHistory[account][token][blockN] = new(big.Int).Set(currentBalance)
}
currentBlock = transfer.block.Uint64()
currentBalance = new(big.Int).Add(currentBalance, transfer.amount)
}
currentBlock = transfer.block.Uint64()
currentBalance = new(big.Int).Add(currentBalance, transfer.amount)
}
}
if tc.printPreparedData {
@ -916,14 +1034,15 @@ func TestFindBlocksCommand(t *testing.T) {
mediaServer, err := server.NewMediaServer(appdb, nil, nil, db)
require.NoError(t, err)
accountAddress := common.HexToAddress("0x1234")
wdb := NewDB(db)
tc := &TestClient{
t: t,
balances: testCase.balanceChanges,
outgoingERC20Transfers: testCase.outgoingERC20Transfers,
incomingERC20Transfers: testCase.incomingERC20Transfers,
outgoingERC1155SingleTransfers: testCase.outgoingERC1155SingleTransfers,
incomingERC1155SingleTransfers: testCase.incomingERC1155SingleTransfers,
balances: map[common.Address][][]int{accountAddress: testCase.balanceChanges},
outgoingERC20Transfers: map[common.Address][]testERC20Transfer{accountAddress: testCase.outgoingERC20Transfers},
incomingERC20Transfers: map[common.Address][]testERC20Transfer{accountAddress: testCase.incomingERC20Transfers},
outgoingERC1155SingleTransfers: map[common.Address][]testERC20Transfer{accountAddress: testCase.outgoingERC1155SingleTransfers},
incomingERC1155SingleTransfers: map[common.Address][]testERC20Transfer{accountAddress: testCase.incomingERC1155SingleTransfers},
callsCounter: map[string]int{},
}
// tc.traceAPICalls = true
@ -959,7 +1078,7 @@ func TestFindBlocksCommand(t *testing.T) {
accDB, err := accounts.NewDB(appdb)
require.NoError(t, err)
fbc := &findBlocksCommand{
accounts: []common.Address{common.HexToAddress("0x1234")},
accounts: []common.Address{accountAddress},
db: wdb,
blockRangeDAO: &BlockRangeSequentialDAO{wdb.client},
accountsDB: accDB,
@ -1053,9 +1172,9 @@ func TestFetchTransfersForLoadedBlocks(t *testing.T) {
tc := &TestClient{
t: t,
balances: [][]int{},
outgoingERC20Transfers: []testERC20Transfer{},
incomingERC20Transfers: []testERC20Transfer{},
balances: map[common.Address][][]int{},
outgoingERC20Transfers: map[common.Address][]testERC20Transfer{},
incomingERC20Transfers: map[common.Address][]testERC20Transfer{},
callsCounter: map[string]int{},
currentBlock: 100,
}
@ -1103,6 +1222,7 @@ func TestFetchTransfersForLoadedBlocks(t *testing.T) {
tokenManager: tokenManager,
blocksLoadedCh: blockChannel,
omitHistory: true,
contractMaker: tokenManager.ContractMaker,
}
tc.prepareBalanceHistory(int(tc.currentBlock))
@ -1173,9 +1293,9 @@ func TestFetchNewBlocksCommand_findBlocksWithEthTransfers(t *testing.T) {
t.Log("case #", idx+1)
tc := &TestClient{
t: t,
balances: testCase.balanceChanges,
outgoingERC20Transfers: []testERC20Transfer{},
incomingERC20Transfers: []testERC20Transfer{},
balances: map[common.Address][][]int{address: testCase.balanceChanges},
outgoingERC20Transfers: map[common.Address][]testERC20Transfer{},
incomingERC20Transfers: map[common.Address][]testERC20Transfer{},
callsCounter: map[string]int{},
currentBlock: 100,
}
@ -1250,12 +1370,13 @@ func TestFetchNewBlocksCommand(t *testing.T) {
tc := &TestClient{
t: t,
balances: [][]int{},
outgoingERC20Transfers: []testERC20Transfer{},
incomingERC20Transfers: []testERC20Transfer{},
balances: map[common.Address][][]int{},
outgoingERC20Transfers: map[common.Address][]testERC20Transfer{},
incomingERC20Transfers: map[common.Address][]testERC20Transfer{},
callsCounter: map[string]int{},
currentBlock: 1,
}
//tc.printPreparedData = true
client, _ := statusRpc.NewClient(nil, 1, params.UpstreamRPCConfig{Enabled: false, URL: ""}, []params.Network{}, db)
client.SetClient(tc.NetworkID(), tc)
@ -1297,6 +1418,7 @@ func TestFetchNewBlocksCommand(t *testing.T) {
blocksLoadedCh: blockChannel,
defaultNodeBlockChunkSize: DefaultNodeBlockChunkSize,
},
contractMaker: tokenManager.ContractMaker,
}
ctx := context.Background()
@ -1321,11 +1443,13 @@ func TestFetchNewBlocksCommand(t *testing.T) {
// Verify that blocks are found and cmd.fromBlockNumber is incremented
tc.resetCounter()
tc.currentBlock = 3
tc.balances = [][]int{
{3, 1, 0},
tc.balances = map[common.Address][][]int{
address1: {{3, 1, 0}},
address2: {{3, 1, 0}},
}
tc.incomingERC20Transfers = []testERC20Transfer{
{big.NewInt(3), tokenTXXAddress, big.NewInt(1), walletcommon.Erc20TransferEventType},
tc.incomingERC20Transfers = map[common.Address][]testERC20Transfer{
address1: {{big.NewInt(3), tokenTXXAddress, big.NewInt(1), walletcommon.Erc20TransferEventType}},
address2: {{big.NewInt(3), tokenTXYAddress, big.NewInt(1), walletcommon.Erc20TransferEventType}},
}
tc.prepareBalanceHistory(int(tc.currentBlock))
tc.prepareTokenBalanceHistory(int(tc.currentBlock))
@ -1376,9 +1500,15 @@ func TestLoadBlocksAndTransfersCommand_StopOnErrorsOverflow(t *testing.T) {
},
}
db, err := helpers.SetupTestMemorySQLDB(walletdatabase.DbInitializer{})
require.NoError(t, err)
client, _ := statusRpc.NewClient(nil, 1, params.UpstreamRPCConfig{Enabled: false, URL: ""}, []params.Network{}, db)
maker, _ := contracts.NewContractMaker(client)
cmd := &loadBlocksAndTransfersCommand{
chainClient: tc,
errorCounter: *newErrorCounter("testLoadBlocksAndTransfersCommand"),
chainClient: tc,
errorCounter: *newErrorCounter("testLoadBlocksAndTransfersCommand"),
contractMaker: maker,
}
ctx := context.Background()
@ -1413,6 +1543,9 @@ func TestLoadBlocksAndTransfersCommand_StopOnErrorsOverflowWhenStarted(t *testin
db, err := helpers.SetupTestMemorySQLDB(walletdatabase.DbInitializer{})
require.NoError(t, err)
client, _ := statusRpc.NewClient(nil, 1, params.UpstreamRPCConfig{Enabled: false, URL: ""}, []params.Network{}, db)
maker, _ := contracts.NewContractMaker(client)
wdb := NewDB(db)
tc := &TestClient{
t: t,
@ -1430,7 +1563,8 @@ func TestLoadBlocksAndTransfersCommand_StopOnErrorsOverflowWhenStarted(t *testin
wdb.client,
},
},
accountsDB: accDB,
accountsDB: accDB,
contractMaker: maker,
}
ctx := context.Background()
@ -1470,6 +1604,9 @@ func TestLoadBlocksAndTransfersCommand_FiniteFinishedInfiniteRunning(t *testing.
db, err := helpers.SetupTestMemorySQLDB(walletdatabase.DbInitializer{})
require.NoError(t, err)
client, _ := statusRpc.NewClient(nil, 1, params.UpstreamRPCConfig{Enabled: false, URL: ""}, []params.Network{}, db)
maker, _ := contracts.NewContractMaker(client)
wdb := NewDB(db)
tc := &TestClient{
t: t,
@ -1487,7 +1624,8 @@ func TestLoadBlocksAndTransfersCommand_FiniteFinishedInfiniteRunning(t *testing.
wdb.client,
},
},
accountsDB: accDB,
accountsDB: accDB,
contractMaker: maker,
}
ctx, cancel := context.WithCancel(context.Background())

View File

@ -1,34 +1,34 @@
// Code generated by go-bindata. DO NOT EDIT.
// Code generated for package migrations by go-bindata DO NOT EDIT. (@generated)
// sources:
// 1691753758_initial.up.sql (5.738kB)
// 1692701329_add_collectibles_and_collections_data_cache.up.sql (1.808kB)
// 1692701339_add_scope_to_pending.up.sql (576B)
// 1694540071_add_collectibles_ownership_update_timestamp.up.sql (349B)
// 1694692748_add_raw_balance_to_token_balances.up.sql (165B)
// 1695133989_add_community_id_to_collectibles_and_collections_data_cache.up.sql (275B)
// 1695932536_balance_history_v2.up.sql (653B)
// 1696853635_input_data.up.sql (23.14kB)
// 1698117918_add_community_id_to_tokens.up.sql (61B)
// 1698257443_add_community_metadata_to_wallet_db.up.sql (323B)
// 1699987075_add_timestamp_and_state_to_community_data_cache.up.sql (865B)
// 1700414564_add_wallet_connect_pairings_table.up.sql (439B)
// 1701101493_add_token_blocks_range.up.sql (469B)
// 1702467441_wallet_connect_sessions_instead_of_pairings.up.sql (356B)
// 1702577524_add_community_collections_and_collectibles_images_cache.up.sql (210B)
// 1702867707_add_balance_to_collectibles_ownership_cache.up.sql (289B)
// 1703686612_add_color_to_saved_addresses.up.sql (114B)
// 1704701942_remove_favourite_and_change_primary_key_for_saved_addresses.up.sql (894B)
// 1704913491_add_type_and_tx_timestamp_to_collectibles_ownership_cache.up.sql (73B)
// doc.go (74B)
// 1691753758_initial.up.sql
// 1692701329_add_collectibles_and_collections_data_cache.up.sql
// 1692701339_add_scope_to_pending.up.sql
// 1694540071_add_collectibles_ownership_update_timestamp.up.sql
// 1694692748_add_raw_balance_to_token_balances.up.sql
// 1695133989_add_community_id_to_collectibles_and_collections_data_cache.up.sql
// 1695932536_balance_history_v2.up.sql
// 1696853635_input_data.up.sql
// 1698117918_add_community_id_to_tokens.up.sql
// 1698257443_add_community_metadata_to_wallet_db.up.sql
// 1699987075_add_timestamp_and_state_to_community_data_cache.up.sql
// 1700414564_add_wallet_connect_pairings_table.up.sql
// 1701101493_add_token_blocks_range.up.sql
// 1702467441_wallet_connect_sessions_instead_of_pairings.up.sql
// 1702577524_add_community_collections_and_collectibles_images_cache.up.sql
// 1702867707_add_balance_to_collectibles_ownership_cache.up.sql
// 1703686612_add_color_to_saved_addresses.up.sql
// 1704701942_remove_favourite_and_change_primary_key_for_saved_addresses.up.sql
// 1704913491_add_type_and_tx_timestamp_to_collectibles_ownership_cache.up.sql
// 1705664490_add_balance_check_fields_blocks_ranges_sequential.up.sql
// doc.go
package migrations
import (
"bytes"
"compress/gzip"
"crypto/sha256"
"fmt"
"io"
"io/ioutil"
"os"
"path/filepath"
"strings"
@ -38,7 +38,7 @@ import (
func bindataRead(data []byte, name string) ([]byte, error) {
gz, err := gzip.NewReader(bytes.NewBuffer(data))
if err != nil {
return nil, fmt.Errorf("read %q: %w", name, err)
return nil, fmt.Errorf("Read %q: %v", name, err)
}
var buf bytes.Buffer
@ -46,7 +46,7 @@ func bindataRead(data []byte, name string) ([]byte, error) {
clErr := gz.Close()
if err != nil {
return nil, fmt.Errorf("read %q: %w", name, err)
return nil, fmt.Errorf("Read %q: %v", name, err)
}
if clErr != nil {
return nil, err
@ -56,9 +56,8 @@ func bindataRead(data []byte, name string) ([]byte, error) {
}
type asset struct {
bytes []byte
info os.FileInfo
digest [sha256.Size]byte
bytes []byte
info os.FileInfo
}
type bindataFileInfo struct {
@ -68,21 +67,32 @@ type bindataFileInfo struct {
modTime time.Time
}
// Name return file name
func (fi bindataFileInfo) Name() string {
return fi.name
}
// Size return file size
func (fi bindataFileInfo) Size() int64 {
return fi.size
}
// Mode return file mode
func (fi bindataFileInfo) Mode() os.FileMode {
return fi.mode
}
// Mode return file modify time
func (fi bindataFileInfo) ModTime() time.Time {
return fi.modTime
}
// IsDir return file whether a directory
func (fi bindataFileInfo) IsDir() bool {
return false
return fi.mode&os.ModeDir != 0
}
// Sys return file is sys mode
func (fi bindataFileInfo) Sys() interface{} {
return nil
}
@ -102,8 +112,8 @@ func _1691753758_initialUpSql() (*asset, error) {
return nil, err
}
info := bindataFileInfo{name: "1691753758_initial.up.sql", size: 5738, mode: os.FileMode(0644), modTime: time.Unix(1704459396, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x6b, 0x25, 0x31, 0xc8, 0x27, 0x3, 0x6b, 0x9f, 0x15, 0x42, 0x2f, 0x85, 0xfb, 0xe3, 0x6, 0xea, 0xf7, 0x97, 0x12, 0x56, 0x3c, 0x9a, 0x5b, 0x1a, 0xca, 0xb1, 0x23, 0xfa, 0xcd, 0x57, 0x25, 0x5c}}
info := bindataFileInfo{name: "1691753758_initial.up.sql", size: 5738, mode: os.FileMode(420), modTime: time.Unix(1698751811, 0)}
a := &asset{bytes: bytes, info: info}
return a, nil
}
@ -122,8 +132,8 @@ func _1692701329_add_collectibles_and_collections_data_cacheUpSql() (*asset, err
return nil, err
}
info := bindataFileInfo{name: "1692701329_add_collectibles_and_collections_data_cache.up.sql", size: 1808, mode: os.FileMode(0644), modTime: time.Unix(1704459396, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x1, 0x51, 0xf4, 0x2b, 0x92, 0xde, 0x59, 0x65, 0xd8, 0x9b, 0x57, 0xe0, 0xfd, 0x7b, 0x12, 0xb, 0x29, 0x6e, 0x9d, 0xb5, 0x90, 0xe, 0xfa, 0x12, 0x97, 0xd, 0x61, 0x60, 0x7f, 0x32, 0x1d, 0xc3}}
info := bindataFileInfo{name: "1692701329_add_collectibles_and_collections_data_cache.up.sql", size: 1808, mode: os.FileMode(420), modTime: time.Unix(1698751811, 0)}
a := &asset{bytes: bytes, info: info}
return a, nil
}
@ -142,8 +152,8 @@ func _1692701339_add_scope_to_pendingUpSql() (*asset, error) {
return nil, err
}
info := bindataFileInfo{name: "1692701339_add_scope_to_pending.up.sql", size: 576, mode: os.FileMode(0644), modTime: time.Unix(1704459396, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x36, 0x8a, 0x5e, 0xe2, 0x63, 0x15, 0x37, 0xba, 0x55, 0x18, 0xf3, 0xcc, 0xe0, 0x5, 0x84, 0xe1, 0x5b, 0xe8, 0x1, 0x32, 0x6b, 0x9f, 0x7d, 0x9f, 0xd9, 0x23, 0x6c, 0xa9, 0xb5, 0xdc, 0xf4, 0x93}}
info := bindataFileInfo{name: "1692701339_add_scope_to_pending.up.sql", size: 576, mode: os.FileMode(420), modTime: time.Unix(1698751811, 0)}
a := &asset{bytes: bytes, info: info}
return a, nil
}
@ -162,8 +172,8 @@ func _1694540071_add_collectibles_ownership_update_timestampUpSql() (*asset, err
return nil, err
}
info := bindataFileInfo{name: "1694540071_add_collectibles_ownership_update_timestamp.up.sql", size: 349, mode: os.FileMode(0644), modTime: time.Unix(1704459396, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x7f, 0x45, 0xc7, 0xce, 0x79, 0x63, 0xbc, 0x6f, 0x83, 0x5f, 0xe2, 0x3, 0x56, 0xcc, 0x5, 0x2f, 0x85, 0xda, 0x7e, 0xea, 0xf5, 0xd2, 0xac, 0x19, 0xd4, 0xd8, 0x5e, 0xdd, 0xed, 0xe2, 0xa9, 0x97}}
info := bindataFileInfo{name: "1694540071_add_collectibles_ownership_update_timestamp.up.sql", size: 349, mode: os.FileMode(420), modTime: time.Unix(1698751811, 0)}
a := &asset{bytes: bytes, info: info}
return a, nil
}
@ -182,8 +192,8 @@ func _1694692748_add_raw_balance_to_token_balancesUpSql() (*asset, error) {
return nil, err
}
info := bindataFileInfo{name: "1694692748_add_raw_balance_to_token_balances.up.sql", size: 165, mode: os.FileMode(0644), modTime: time.Unix(1704459396, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xd4, 0xe0, 0x5b, 0x42, 0xf0, 0x96, 0xa5, 0xf5, 0xed, 0xc0, 0x97, 0x88, 0xb0, 0x6d, 0xfe, 0x7d, 0x97, 0x2e, 0x17, 0xd2, 0x16, 0xbc, 0x2a, 0xf2, 0xcc, 0x67, 0x9e, 0xc5, 0x47, 0xf6, 0x69, 0x1}}
info := bindataFileInfo{name: "1694692748_add_raw_balance_to_token_balances.up.sql", size: 165, mode: os.FileMode(420), modTime: time.Unix(1698751811, 0)}
a := &asset{bytes: bytes, info: info}
return a, nil
}
@ -202,8 +212,8 @@ func _1695133989_add_community_id_to_collectibles_and_collections_data_cacheUpSq
return nil, err
}
info := bindataFileInfo{name: "1695133989_add_community_id_to_collectibles_and_collections_data_cache.up.sql", size: 275, mode: os.FileMode(0644), modTime: time.Unix(1704459396, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xfa, 0x2, 0xa, 0x7f, 0x4b, 0xd1, 0x3, 0xd0, 0x3, 0x29, 0x84, 0x31, 0xed, 0x49, 0x4f, 0xb1, 0x2d, 0xd7, 0x80, 0x41, 0x5b, 0xfa, 0x6, 0xae, 0xb4, 0xf6, 0x6b, 0x49, 0xee, 0x57, 0x33, 0x76}}
info := bindataFileInfo{name: "1695133989_add_community_id_to_collectibles_and_collections_data_cache.up.sql", size: 275, mode: os.FileMode(420), modTime: time.Unix(1698751811, 0)}
a := &asset{bytes: bytes, info: info}
return a, nil
}
@ -222,8 +232,8 @@ func _1695932536_balance_history_v2UpSql() (*asset, error) {
return nil, err
}
info := bindataFileInfo{name: "1695932536_balance_history_v2.up.sql", size: 653, mode: os.FileMode(0644), modTime: time.Unix(1704459396, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x37, 0xf4, 0x14, 0x91, 0xf6, 0x5f, 0xc4, 0x9b, 0xb7, 0x83, 0x32, 0x72, 0xbe, 0x82, 0x42, 0x39, 0xa4, 0x3b, 0xc9, 0x78, 0x3d, 0xca, 0xd4, 0xbf, 0xfc, 0x7a, 0x33, 0x1e, 0xcd, 0x9e, 0xe4, 0x85}}
info := bindataFileInfo{name: "1695932536_balance_history_v2.up.sql", size: 653, mode: os.FileMode(420), modTime: time.Unix(1698751811, 0)}
a := &asset{bytes: bytes, info: info}
return a, nil
}
@ -242,8 +252,8 @@ func _1696853635_input_dataUpSql() (*asset, error) {
return nil, err
}
info := bindataFileInfo{name: "1696853635_input_data.up.sql", size: 23140, mode: os.FileMode(0644), modTime: time.Unix(1704459396, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x89, 0x30, 0x33, 0x33, 0x55, 0xc5, 0x57, 0x2b, 0xaf, 0xef, 0x3d, 0x8d, 0x2a, 0xaa, 0x5c, 0x32, 0xd1, 0xf4, 0xd, 0x4a, 0xd0, 0x33, 0x4a, 0xe8, 0xf6, 0x8, 0x6b, 0x65, 0xcc, 0xba, 0xed, 0x42}}
info := bindataFileInfo{name: "1696853635_input_data.up.sql", size: 23140, mode: os.FileMode(420), modTime: time.Unix(1698751811, 0)}
a := &asset{bytes: bytes, info: info}
return a, nil
}
@ -262,8 +272,8 @@ func _1698117918_add_community_id_to_tokensUpSql() (*asset, error) {
return nil, err
}
info := bindataFileInfo{name: "1698117918_add_community_id_to_tokens.up.sql", size: 61, mode: os.FileMode(0644), modTime: time.Unix(1704459396, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xb3, 0x82, 0xdb, 0xde, 0x3, 0x3, 0xc, 0x67, 0xf3, 0x54, 0xc4, 0xad, 0xd6, 0xce, 0x56, 0xfb, 0xc1, 0x87, 0xd7, 0xda, 0xab, 0xec, 0x1, 0xe1, 0x7d, 0xb3, 0x63, 0xd6, 0xe5, 0x5d, 0x1c, 0x15}}
info := bindataFileInfo{name: "1698117918_add_community_id_to_tokens.up.sql", size: 61, mode: os.FileMode(420), modTime: time.Unix(1698751811, 0)}
a := &asset{bytes: bytes, info: info}
return a, nil
}
@ -282,8 +292,8 @@ func _1698257443_add_community_metadata_to_wallet_dbUpSql() (*asset, error) {
return nil, err
}
info := bindataFileInfo{name: "1698257443_add_community_metadata_to_wallet_db.up.sql", size: 323, mode: os.FileMode(0644), modTime: time.Unix(1704459396, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x22, 0xd3, 0x4, 0x25, 0xfa, 0x23, 0x1, 0x48, 0x83, 0x26, 0x20, 0xf2, 0x3d, 0xbc, 0xc1, 0xa7, 0x7c, 0x27, 0x7c, 0x1d, 0x63, 0x3, 0xa, 0xd0, 0xce, 0x47, 0x86, 0xdc, 0xa1, 0x3c, 0x2, 0x1c}}
info := bindataFileInfo{name: "1698257443_add_community_metadata_to_wallet_db.up.sql", size: 323, mode: os.FileMode(420), modTime: time.Unix(1698751811, 0)}
a := &asset{bytes: bytes, info: info}
return a, nil
}
@ -302,8 +312,8 @@ func _1699987075_add_timestamp_and_state_to_community_data_cacheUpSql() (*asset,
return nil, err
}
info := bindataFileInfo{name: "1699987075_add_timestamp_and_state_to_community_data_cache.up.sql", size: 865, mode: os.FileMode(0644), modTime: time.Unix(1704459396, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xc3, 0xee, 0x37, 0xf9, 0x7f, 0x9e, 0xfe, 0x93, 0x66, 0x2b, 0xd, 0x57, 0xf4, 0x89, 0x6c, 0x51, 0xfd, 0x14, 0xe9, 0xcd, 0xab, 0x65, 0xe7, 0xa7, 0x83, 0x7e, 0xe0, 0x5c, 0x14, 0x49, 0xf3, 0xe5}}
info := bindataFileInfo{name: "1699987075_add_timestamp_and_state_to_community_data_cache.up.sql", size: 865, mode: os.FileMode(420), modTime: time.Unix(1700258852, 0)}
a := &asset{bytes: bytes, info: info}
return a, nil
}
@ -322,8 +332,8 @@ func _1700414564_add_wallet_connect_pairings_tableUpSql() (*asset, error) {
return nil, err
}
info := bindataFileInfo{name: "1700414564_add_wallet_connect_pairings_table.up.sql", size: 439, mode: os.FileMode(0644), modTime: time.Unix(1704459396, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xa9, 0x77, 0x5e, 0x19, 0x62, 0x3c, 0x3a, 0x81, 0x16, 0xa0, 0x95, 0x35, 0x62, 0xab, 0x5e, 0x2b, 0xea, 0x11, 0x71, 0x11, 0xd0, 0x9, 0xab, 0x9c, 0xab, 0xf2, 0xdd, 0x5f, 0x88, 0x83, 0x9a, 0x93}}
info := bindataFileInfo{name: "1700414564_add_wallet_connect_pairings_table.up.sql", size: 439, mode: os.FileMode(420), modTime: time.Unix(1701084281, 0)}
a := &asset{bytes: bytes, info: info}
return a, nil
}
@ -342,8 +352,8 @@ func _1701101493_add_token_blocks_rangeUpSql() (*asset, error) {
return nil, err
}
info := bindataFileInfo{name: "1701101493_add_token_blocks_range.up.sql", size: 469, mode: os.FileMode(0644), modTime: time.Unix(1704459396, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xe0, 0x37, 0xfb, 0x1a, 0x6c, 0x8c, 0xa8, 0x1e, 0xa2, 0xa5, 0x1f, 0x90, 0x73, 0x3e, 0x31, 0x5f, 0x48, 0x1e, 0x9a, 0x37, 0x27, 0x1c, 0xc, 0x67, 0x1, 0xcd, 0xec, 0x85, 0x4c, 0x1c, 0x26, 0x52}}
info := bindataFileInfo{name: "1701101493_add_token_blocks_range.up.sql", size: 469, mode: os.FileMode(420), modTime: time.Unix(1701895190, 0)}
a := &asset{bytes: bytes, info: info}
return a, nil
}
@ -362,8 +372,8 @@ func _1702467441_wallet_connect_sessions_instead_of_pairingsUpSql() (*asset, err
return nil, err
}
info := bindataFileInfo{name: "1702467441_wallet_connect_sessions_instead_of_pairings.up.sql", size: 356, mode: os.FileMode(0644), modTime: time.Unix(1704459396, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x73, 0x5f, 0x0, 0x60, 0x6, 0x28, 0x76, 0x61, 0x39, 0xdc, 0xa1, 0x84, 0x80, 0x46, 0x8a, 0xe4, 0x42, 0xb5, 0x1f, 0x18, 0x14, 0x23, 0x46, 0xb9, 0x51, 0xf, 0x62, 0xac, 0xc, 0x7, 0x98, 0xe}}
info := bindataFileInfo{name: "1702467441_wallet_connect_sessions_instead_of_pairings.up.sql", size: 356, mode: os.FileMode(420), modTime: time.Unix(1703074936, 0)}
a := &asset{bytes: bytes, info: info}
return a, nil
}
@ -382,8 +392,8 @@ func _1702577524_add_community_collections_and_collectibles_images_cacheUpSql()
return nil, err
}
info := bindataFileInfo{name: "1702577524_add_community_collections_and_collectibles_images_cache.up.sql", size: 210, mode: os.FileMode(0644), modTime: time.Unix(1704459396, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x8e, 0x1b, 0x32, 0x2c, 0xfa, 0x11, 0x5e, 0x5e, 0x5d, 0xef, 0x92, 0xa0, 0x29, 0x52, 0xbf, 0x6e, 0xe3, 0x30, 0xe4, 0xdf, 0xdc, 0x5, 0xbe, 0xd1, 0xf8, 0x3e, 0xd9, 0x9b, 0xd6, 0x9b, 0x95, 0x96}}
info := bindataFileInfo{name: "1702577524_add_community_collections_and_collectibles_images_cache.up.sql", size: 210, mode: os.FileMode(420), modTime: time.Unix(1703074936, 0)}
a := &asset{bytes: bytes, info: info}
return a, nil
}
@ -402,8 +412,8 @@ func _1702867707_add_balance_to_collectibles_ownership_cacheUpSql() (*asset, err
return nil, err
}
info := bindataFileInfo{name: "1702867707_add_balance_to_collectibles_ownership_cache.up.sql", size: 289, mode: os.FileMode(0644), modTime: time.Unix(1704459396, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x6f, 0x63, 0x30, 0x11, 0x22, 0xb9, 0xee, 0xae, 0xb8, 0xc4, 0xe6, 0xd3, 0x7, 0xc, 0xe6, 0xa3, 0x72, 0x8c, 0x6, 0x9d, 0x6c, 0x97, 0x8f, 0xb2, 0xd0, 0x37, 0x69, 0x69, 0x6, 0x7f, 0x67, 0x94}}
info := bindataFileInfo{name: "1702867707_add_balance_to_collectibles_ownership_cache.up.sql", size: 289, mode: os.FileMode(420), modTime: time.Unix(1703074936, 0)}
a := &asset{bytes: bytes, info: info}
return a, nil
}
@ -422,8 +432,8 @@ func _1703686612_add_color_to_saved_addressesUpSql() (*asset, error) {
return nil, err
}
info := bindataFileInfo{name: "1703686612_add_color_to_saved_addresses.up.sql", size: 114, mode: os.FileMode(0644), modTime: time.Unix(1704459396, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xb7, 0x6e, 0x8d, 0xc0, 0x49, 0xc, 0xb, 0x66, 0xa0, 0x77, 0x32, 0x76, 0xa8, 0xd0, 0x40, 0xce, 0x67, 0xa, 0x9e, 0x23, 0x36, 0xe, 0xc3, 0xd3, 0x9d, 0xe2, 0xde, 0x60, 0x19, 0xba, 0x44, 0xf1}}
info := bindataFileInfo{name: "1703686612_add_color_to_saved_addresses.up.sql", size: 114, mode: os.FileMode(420), modTime: time.Unix(1704191044, 0)}
a := &asset{bytes: bytes, info: info}
return a, nil
}
@ -442,8 +452,8 @@ func _1704701942_remove_favourite_and_change_primary_key_for_saved_addressesUpSq
return nil, err
}
info := bindataFileInfo{name: "1704701942_remove_favourite_and_change_primary_key_for_saved_addresses.up.sql", size: 894, mode: os.FileMode(0644), modTime: time.Unix(1704913465, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x70, 0xd3, 0xcf, 0x90, 0xb2, 0xa, 0x23, 0x41, 0x8a, 0xa5, 0x90, 0x7b, 0x34, 0xec, 0x3b, 0x3f, 0xa9, 0xb1, 0x95, 0xf3, 0x2a, 0xdf, 0xbb, 0x53, 0x57, 0x27, 0x2b, 0x12, 0x84, 0xf4, 0x83, 0xda}}
info := bindataFileInfo{name: "1704701942_remove_favourite_and_change_primary_key_for_saved_addresses.up.sql", size: 894, mode: os.FileMode(420), modTime: time.Unix(1704963397, 0)}
a := &asset{bytes: bytes, info: info}
return a, nil
}
@ -462,8 +472,28 @@ func _1704913491_add_type_and_tx_timestamp_to_collectibles_ownership_cacheUpSql(
return nil, err
}
info := bindataFileInfo{name: "1704913491_add_type_and_tx_timestamp_to_collectibles_ownership_cache.up.sql", size: 73, mode: os.FileMode(0644), modTime: time.Unix(1704913465, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xa2, 0xf0, 0x71, 0xb5, 0xaf, 0x22, 0xac, 0x77, 0xdb, 0x6f, 0x62, 0x27, 0x12, 0x46, 0x60, 0x3, 0x59, 0x43, 0x6f, 0x1, 0xdc, 0xe8, 0x6e, 0x89, 0xa5, 0x77, 0x37, 0x36, 0xd9, 0x4e, 0x6d, 0x9b}}
info := bindataFileInfo{name: "1704913491_add_type_and_tx_timestamp_to_collectibles_ownership_cache.up.sql", size: 73, mode: os.FileMode(420), modTime: time.Unix(1705491656, 0)}
a := &asset{bytes: bytes, info: info}
return a, nil
}
var __1705664490_add_balance_check_fields_blocks_ranges_sequentialUpSql = []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x04\xc0\x41\x0a\x02\x31\x0c\x05\xd0\xbd\xa7\xf8\xcc\x35\x5c\x55\x1b\x57\x51\x41\x32\xe0\x2e\x64\x42\xb0\xd2\x52\xd1\xe8\xfd\x7d\x85\x85\x6e\x90\x72\x60\xc2\x36\x5e\xde\x53\x3f\x36\x1f\x91\x9a\xf1\xfe\xc5\xfc\x3e\x6d\xa0\xd4\x8a\xe3\x95\xd7\xf3\x05\x9b\x0d\x9b\x1e\xea\x2d\xbc\x6b\xb3\x6c\x10\xba\x0b\x2a\x9d\xca\xca\x82\x65\xd9\xef\xfe\x01\x00\x00\xff\xff\xc3\x5b\xd9\x6b\x54\x00\x00\x00")
func _1705664490_add_balance_check_fields_blocks_ranges_sequentialUpSqlBytes() ([]byte, error) {
return bindataRead(
__1705664490_add_balance_check_fields_blocks_ranges_sequentialUpSql,
"1705664490_add_balance_check_fields_blocks_ranges_sequential.up.sql",
)
}
func _1705664490_add_balance_check_fields_blocks_ranges_sequentialUpSql() (*asset, error) {
bytes, err := _1705664490_add_balance_check_fields_blocks_ranges_sequentialUpSqlBytes()
if err != nil {
return nil, err
}
info := bindataFileInfo{name: "1705664490_add_balance_check_fields_blocks_ranges_sequential.up.sql", size: 84, mode: os.FileMode(420), modTime: time.Unix(1705560751, 0)}
a := &asset{bytes: bytes, info: info}
return a, nil
}
@ -482,8 +512,8 @@ func docGo() (*asset, error) {
return nil, err
}
info := bindataFileInfo{name: "doc.go", size: 74, mode: os.FileMode(0644), modTime: time.Unix(1704459396, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xde, 0x7c, 0x28, 0xcd, 0x47, 0xf2, 0xfa, 0x7c, 0x51, 0x2d, 0xd8, 0x38, 0xb, 0xb0, 0x34, 0x9d, 0x4c, 0x62, 0xa, 0x9e, 0x28, 0xc3, 0x31, 0x23, 0xd9, 0xbb, 0x89, 0x9f, 0xa0, 0x89, 0x1f, 0xe8}}
info := bindataFileInfo{name: "doc.go", size: 74, mode: os.FileMode(420), modTime: time.Unix(1698751811, 0)}
a := &asset{bytes: bytes, info: info}
return a, nil
}
@ -491,8 +521,8 @@ func docGo() (*asset, error) {
// It returns an error if the asset could not be found or
// could not be loaded.
func Asset(name string) ([]byte, error) {
canonicalName := strings.Replace(name, "\\", "/", -1)
if f, ok := _bindata[canonicalName]; ok {
cannonicalName := strings.Replace(name, "\\", "/", -1)
if f, ok := _bindata[cannonicalName]; ok {
a, err := f()
if err != nil {
return nil, fmt.Errorf("Asset %s can't read by error: %v", name, err)
@ -502,12 +532,6 @@ func Asset(name string) ([]byte, error) {
return nil, fmt.Errorf("Asset %s not found", name)
}
// AssetString returns the asset contents as a string (instead of a []byte).
func AssetString(name string) (string, error) {
data, err := Asset(name)
return string(data), err
}
// MustAsset is like Asset but panics when Asset would return an error.
// It simplifies safe initialization of global variables.
func MustAsset(name string) []byte {
@ -519,18 +543,12 @@ func MustAsset(name string) []byte {
return a
}
// MustAssetString is like AssetString but panics when Asset would return an
// error. It simplifies safe initialization of global variables.
func MustAssetString(name string) string {
return string(MustAsset(name))
}
// AssetInfo loads and returns the asset info for the given name.
// It returns an error if the asset could not be found or
// could not be loaded.
func AssetInfo(name string) (os.FileInfo, error) {
canonicalName := strings.Replace(name, "\\", "/", -1)
if f, ok := _bindata[canonicalName]; ok {
cannonicalName := strings.Replace(name, "\\", "/", -1)
if f, ok := _bindata[cannonicalName]; ok {
a, err := f()
if err != nil {
return nil, fmt.Errorf("AssetInfo %s can't read by error: %v", name, err)
@ -540,33 +558,6 @@ func AssetInfo(name string) (os.FileInfo, error) {
return nil, fmt.Errorf("AssetInfo %s not found", name)
}
// AssetDigest returns the digest of the file with the given name. It returns an
// error if the asset could not be found or the digest could not be loaded.
func AssetDigest(name string) ([sha256.Size]byte, error) {
canonicalName := strings.Replace(name, "\\", "/", -1)
if f, ok := _bindata[canonicalName]; ok {
a, err := f()
if err != nil {
return [sha256.Size]byte{}, fmt.Errorf("AssetDigest %s can't read by error: %v", name, err)
}
return a.digest, nil
}
return [sha256.Size]byte{}, fmt.Errorf("AssetDigest %s not found", name)
}
// Digests returns a map of all known files and their checksums.
func Digests() (map[string][sha256.Size]byte, error) {
mp := make(map[string][sha256.Size]byte, len(_bindata))
for name := range _bindata {
a, err := _bindata[name]()
if err != nil {
return nil, err
}
mp[name] = a.digest
}
return mp, nil
}
// AssetNames returns the names of the assets.
func AssetNames() []string {
names := make([]string, 0, len(_bindata))
@ -597,32 +588,28 @@ var _bindata = map[string]func() (*asset, error){
"1703686612_add_color_to_saved_addresses.up.sql": _1703686612_add_color_to_saved_addressesUpSql,
"1704701942_remove_favourite_and_change_primary_key_for_saved_addresses.up.sql": _1704701942_remove_favourite_and_change_primary_key_for_saved_addressesUpSql,
"1704913491_add_type_and_tx_timestamp_to_collectibles_ownership_cache.up.sql": _1704913491_add_type_and_tx_timestamp_to_collectibles_ownership_cacheUpSql,
"doc.go": docGo,
"1705664490_add_balance_check_fields_blocks_ranges_sequential.up.sql": _1705664490_add_balance_check_fields_blocks_ranges_sequentialUpSql,
"doc.go": docGo,
}
// AssetDebug is true if the assets were built with the debug flag enabled.
const AssetDebug = false
// AssetDir returns the file names below a certain
// directory embedded in the file by go-bindata.
// For example if you run go-bindata on data/... and data contains the
// following hierarchy:
//
// data/
// foo.txt
// img/
// a.png
// b.png
//
// then AssetDir("data") would return []string{"foo.txt", "img"},
// AssetDir("data/img") would return []string{"a.png", "b.png"},
// AssetDir("foo.txt") and AssetDir("notexist") would return an error, and
// data/
// foo.txt
// img/
// a.png
// b.png
// then AssetDir("data") would return []string{"foo.txt", "img"}
// AssetDir("data/img") would return []string{"a.png", "b.png"}
// AssetDir("foo.txt") and AssetDir("notexist") would return an error
// AssetDir("") will return []string{"data"}.
func AssetDir(name string) ([]string, error) {
node := _bintree
if len(name) != 0 {
canonicalName := strings.Replace(name, "\\", "/", -1)
pathList := strings.Split(canonicalName, "/")
cannonicalName := strings.Replace(name, "\\", "/", -1)
pathList := strings.Split(cannonicalName, "/")
for _, p := range pathList {
node = node.Children[p]
if node == nil {
@ -646,29 +633,30 @@ type bintree struct {
}
var _bintree = &bintree{nil, map[string]*bintree{
"1691753758_initial.up.sql": {_1691753758_initialUpSql, map[string]*bintree{}},
"1692701329_add_collectibles_and_collections_data_cache.up.sql": {_1692701329_add_collectibles_and_collections_data_cacheUpSql, map[string]*bintree{}},
"1692701339_add_scope_to_pending.up.sql": {_1692701339_add_scope_to_pendingUpSql, map[string]*bintree{}},
"1694540071_add_collectibles_ownership_update_timestamp.up.sql": {_1694540071_add_collectibles_ownership_update_timestampUpSql, map[string]*bintree{}},
"1694692748_add_raw_balance_to_token_balances.up.sql": {_1694692748_add_raw_balance_to_token_balancesUpSql, map[string]*bintree{}},
"1695133989_add_community_id_to_collectibles_and_collections_data_cache.up.sql": {_1695133989_add_community_id_to_collectibles_and_collections_data_cacheUpSql, map[string]*bintree{}},
"1695932536_balance_history_v2.up.sql": {_1695932536_balance_history_v2UpSql, map[string]*bintree{}},
"1696853635_input_data.up.sql": {_1696853635_input_dataUpSql, map[string]*bintree{}},
"1698117918_add_community_id_to_tokens.up.sql": {_1698117918_add_community_id_to_tokensUpSql, map[string]*bintree{}},
"1698257443_add_community_metadata_to_wallet_db.up.sql": {_1698257443_add_community_metadata_to_wallet_dbUpSql, map[string]*bintree{}},
"1699987075_add_timestamp_and_state_to_community_data_cache.up.sql": {_1699987075_add_timestamp_and_state_to_community_data_cacheUpSql, map[string]*bintree{}},
"1700414564_add_wallet_connect_pairings_table.up.sql": {_1700414564_add_wallet_connect_pairings_tableUpSql, map[string]*bintree{}},
"1701101493_add_token_blocks_range.up.sql": {_1701101493_add_token_blocks_rangeUpSql, map[string]*bintree{}},
"1702467441_wallet_connect_sessions_instead_of_pairings.up.sql": {_1702467441_wallet_connect_sessions_instead_of_pairingsUpSql, map[string]*bintree{}},
"1702577524_add_community_collections_and_collectibles_images_cache.up.sql": {_1702577524_add_community_collections_and_collectibles_images_cacheUpSql, map[string]*bintree{}},
"1702867707_add_balance_to_collectibles_ownership_cache.up.sql": {_1702867707_add_balance_to_collectibles_ownership_cacheUpSql, map[string]*bintree{}},
"1703686612_add_color_to_saved_addresses.up.sql": {_1703686612_add_color_to_saved_addressesUpSql, map[string]*bintree{}},
"1704701942_remove_favourite_and_change_primary_key_for_saved_addresses.up.sql": {_1704701942_remove_favourite_and_change_primary_key_for_saved_addressesUpSql, map[string]*bintree{}},
"1704913491_add_type_and_tx_timestamp_to_collectibles_ownership_cache.up.sql": {_1704913491_add_type_and_tx_timestamp_to_collectibles_ownership_cacheUpSql, map[string]*bintree{}},
"doc.go": {docGo, map[string]*bintree{}},
"1691753758_initial.up.sql": &bintree{_1691753758_initialUpSql, map[string]*bintree{}},
"1692701329_add_collectibles_and_collections_data_cache.up.sql": &bintree{_1692701329_add_collectibles_and_collections_data_cacheUpSql, map[string]*bintree{}},
"1692701339_add_scope_to_pending.up.sql": &bintree{_1692701339_add_scope_to_pendingUpSql, map[string]*bintree{}},
"1694540071_add_collectibles_ownership_update_timestamp.up.sql": &bintree{_1694540071_add_collectibles_ownership_update_timestampUpSql, map[string]*bintree{}},
"1694692748_add_raw_balance_to_token_balances.up.sql": &bintree{_1694692748_add_raw_balance_to_token_balancesUpSql, map[string]*bintree{}},
"1695133989_add_community_id_to_collectibles_and_collections_data_cache.up.sql": &bintree{_1695133989_add_community_id_to_collectibles_and_collections_data_cacheUpSql, map[string]*bintree{}},
"1695932536_balance_history_v2.up.sql": &bintree{_1695932536_balance_history_v2UpSql, map[string]*bintree{}},
"1696853635_input_data.up.sql": &bintree{_1696853635_input_dataUpSql, map[string]*bintree{}},
"1698117918_add_community_id_to_tokens.up.sql": &bintree{_1698117918_add_community_id_to_tokensUpSql, map[string]*bintree{}},
"1698257443_add_community_metadata_to_wallet_db.up.sql": &bintree{_1698257443_add_community_metadata_to_wallet_dbUpSql, map[string]*bintree{}},
"1699987075_add_timestamp_and_state_to_community_data_cache.up.sql": &bintree{_1699987075_add_timestamp_and_state_to_community_data_cacheUpSql, map[string]*bintree{}},
"1700414564_add_wallet_connect_pairings_table.up.sql": &bintree{_1700414564_add_wallet_connect_pairings_tableUpSql, map[string]*bintree{}},
"1701101493_add_token_blocks_range.up.sql": &bintree{_1701101493_add_token_blocks_rangeUpSql, map[string]*bintree{}},
"1702467441_wallet_connect_sessions_instead_of_pairings.up.sql": &bintree{_1702467441_wallet_connect_sessions_instead_of_pairingsUpSql, map[string]*bintree{}},
"1702577524_add_community_collections_and_collectibles_images_cache.up.sql": &bintree{_1702577524_add_community_collections_and_collectibles_images_cacheUpSql, map[string]*bintree{}},
"1702867707_add_balance_to_collectibles_ownership_cache.up.sql": &bintree{_1702867707_add_balance_to_collectibles_ownership_cacheUpSql, map[string]*bintree{}},
"1703686612_add_color_to_saved_addresses.up.sql": &bintree{_1703686612_add_color_to_saved_addressesUpSql, map[string]*bintree{}},
"1704701942_remove_favourite_and_change_primary_key_for_saved_addresses.up.sql": &bintree{_1704701942_remove_favourite_and_change_primary_key_for_saved_addressesUpSql, map[string]*bintree{}},
"1704913491_add_type_and_tx_timestamp_to_collectibles_ownership_cache.up.sql": &bintree{_1704913491_add_type_and_tx_timestamp_to_collectibles_ownership_cacheUpSql, map[string]*bintree{}},
"1705664490_add_balance_check_fields_blocks_ranges_sequential.up.sql": &bintree{_1705664490_add_balance_check_fields_blocks_ranges_sequentialUpSql, map[string]*bintree{}},
"doc.go": &bintree{docGo, map[string]*bintree{}},
}}
// RestoreAsset restores an asset under the given directory.
// RestoreAsset restores an asset under the given directory
func RestoreAsset(dir, name string) error {
data, err := Asset(name)
if err != nil {
@ -682,14 +670,18 @@ func RestoreAsset(dir, name string) error {
if err != nil {
return err
}
err = os.WriteFile(_filePath(dir, name), data, info.Mode())
err = ioutil.WriteFile(_filePath(dir, name), data, info.Mode())
if err != nil {
return err
}
return os.Chtimes(_filePath(dir, name), info.ModTime(), info.ModTime())
err = os.Chtimes(_filePath(dir, name), info.ModTime(), info.ModTime())
if err != nil {
return err
}
return nil
}
// RestoreAssets restores an asset under the given directory recursively.
// RestoreAssets restores an asset under the given directory recursively
func RestoreAssets(dir, name string) error {
children, err := AssetDir(name)
// File
@ -707,6 +699,6 @@ func RestoreAssets(dir, name string) error {
}
func _filePath(dir, name string) string {
canonicalName := strings.Replace(name, "\\", "/", -1)
return filepath.Join(append([]string{dir}, strings.Split(canonicalName, "/")...)...)
cannonicalName := strings.Replace(name, "\\", "/", -1)
return filepath.Join(append([]string{dir}, strings.Split(cannonicalName, "/")...)...)
}

View File

@ -0,0 +1 @@
ALTER TABLE blocks_ranges_sequential ADD COLUMN balance_check_hash TEXT DEFAULT "";