fix: correct transfer type detection

This commit is contained in:
Dario Gabriel Lipicar 2023-06-19 23:50:49 -03:00 committed by dlipicar
parent 7da1ed38d4
commit bf54a57780
12 changed files with 657 additions and 410 deletions

View File

@ -23,6 +23,7 @@ const nodeCfgMigrationDate = 1640111208
var customSteps = []sqlite.PostStep{ var customSteps = []sqlite.PostStep{
{Version: 1674136690, CustomMigration: migrateEnsUsernames}, {Version: 1674136690, CustomMigration: migrateEnsUsernames},
{Version: 1686048341, CustomMigration: migrateWalletJSONBlobs, RollBackVersion: 1686041510}, {Version: 1686048341, CustomMigration: migrateWalletJSONBlobs, RollBackVersion: 1686041510},
{Version: 1687193315, CustomMigration: migrateWalletTransferFromToAddresses, RollBackVersion: 1686825075},
} }
// InitializeDB creates db file at a given path and applies migrations. // InitializeDB creates db file at a given path and applies migrations.
@ -317,14 +318,14 @@ func migrateWalletJSONBlobs(sqlTx *sql.Tx) error {
} }
if nullableTx.Valid { if nullableTx.Valid {
correctType, tokenID, value, dbAddress := extractToken(entryType, tx, l, nullableL.Valid) correctType, tokenID, value, tokenAddress := extractToken(entryType, tx, l, nullableL.Valid)
gasPrice := sqlite.BigIntToClampedInt64(tx.GasPrice()) gasPrice := sqlite.BigIntToClampedInt64(tx.GasPrice())
gasTipCap := sqlite.BigIntToClampedInt64(tx.GasTipCap()) gasTipCap := sqlite.BigIntToClampedInt64(tx.GasTipCap())
gasFeeCap := sqlite.BigIntToClampedInt64(tx.GasFeeCap()) gasFeeCap := sqlite.BigIntToClampedInt64(tx.GasFeeCap())
valueStr := sqlite.BigIntToPadded128BitsStr(value) valueStr := sqlite.BigIntToPadded128BitsStr(value)
currentRow = append(currentRow, tx.Type(), tx.Protected(), tx.Gas(), gasPrice, gasTipCap, gasFeeCap, valueStr, tx.Nonce(), int64(tx.Size()), dbAddress, (*bigint.SQLBigIntBytes)(tokenID), correctType) currentRow = append(currentRow, tx.Type(), tx.Protected(), tx.Gas(), gasPrice, gasTipCap, gasFeeCap, valueStr, tx.Nonce(), int64(tx.Size()), tokenAddress, (*bigint.SQLBigIntBytes)(tokenID), correctType)
} else { } else {
for i := 0; i < 11; i++ { for i := 0; i < 11; i++ {
currentRow = append(currentRow, nil) currentRow = append(currentRow, nil)
@ -370,17 +371,110 @@ func migrateWalletJSONBlobs(sqlTx *sql.Tx) error {
return nil return nil
} }
func extractToken(entryType string, tx *types.Transaction, l *types.Log, logValid bool) (correctType w_common.Type, tokenID *big.Int, value *big.Int, dbAddress *string) { func extractToken(entryType string, tx *types.Transaction, l *types.Log, logValid bool) (correctType w_common.Type, tokenID *big.Int, value *big.Int, tokenAddress *common.Address) {
if logValid { if logValid {
var tokenAddress *common.Address correctType, tokenAddress, tokenID, value, _, _ = w_common.ExtractTokenIdentity(w_common.Type(entryType), l, tx)
correctType, tokenAddress, tokenID, value = w_common.ExtractTokenIdentity(w_common.Type(entryType), l, tx)
if tokenAddress != nil {
dbAddress = new(string)
*dbAddress = tokenAddress.Hex()
}
} else { } else {
correctType = w_common.Type(entryType) correctType = w_common.Type(entryType)
value = new(big.Int).Set(tx.Value()) value = new(big.Int).Set(tx.Value())
} }
return return
} }
func migrateWalletTransferFromToAddresses(sqlTx *sql.Tx) error {
var batchEntries [][]interface{}
// Extract transfer from/to addresses and add the information into the new columns
// Re-extract token address and insert it as blob instead of string
newColumnsAndIndexSetup := `
ALTER TABLE transfers ADD COLUMN tx_from_address BLOB;
ALTER TABLE transfers ADD COLUMN tx_to_address BLOB;`
rowIndex := 0
mightHaveRows := true
_, err := sqlTx.Exec(newColumnsAndIndexSetup)
if err != nil {
return err
}
for mightHaveRows {
var chainID uint64
var hash common.Hash
var address common.Address
var sender common.Address
var entryType string
rows, err := sqlTx.Query(`SELECT hash, address, sender, network_id, tx, log, type FROM transfers WHERE tx IS NOT NULL OR receipt IS NOT NULL LIMIT ? OFFSET ?`, batchSize, rowIndex)
if err != nil {
return err
}
curProcessed := 0
for rows.Next() {
tx := &types.Transaction{}
l := &types.Log{}
// Scan row data into the transaction and receipt objects
nullableTx := sqlite.JSONBlob{Data: tx}
nullableL := sqlite.JSONBlob{Data: l}
err = rows.Scan(&hash, &address, &sender, &chainID, &nullableTx, &nullableL, &entryType)
if err != nil {
rows.Close()
return err
}
var currentRow []interface{}
var tokenAddress *common.Address
var txFrom *common.Address
var txTo *common.Address
if nullableTx.Valid {
if nullableL.Valid {
_, tokenAddress, _, _, txFrom, txTo = w_common.ExtractTokenIdentity(w_common.Type(entryType), l, tx)
} else {
txFrom = &sender
txTo = tx.To()
}
}
currentRow = append(currentRow, tokenAddress, txFrom, txTo)
currentRow = append(currentRow, hash, address, chainID)
batchEntries = append(batchEntries, currentRow)
curProcessed++
}
rowIndex += curProcessed
// Check if there was an error in the last rows.Next()
rows.Close()
if err = rows.Err(); err != nil {
return err
}
mightHaveRows = (curProcessed == batchSize)
// insert extracted data into the new columns
if len(batchEntries) > 0 {
var stmt *sql.Stmt
stmt, err = sqlTx.Prepare(`UPDATE transfers SET token_address = ?, tx_from_address = ?, tx_to_address = ?
WHERE hash = ? AND address = ? AND network_id = ?`)
if err != nil {
return err
}
for _, dataEntry := range batchEntries {
_, err = stmt.Exec(dataEntry...)
if err != nil {
return err
}
}
// Reset placeHolders and batchEntries for the next batch
batchEntries = [][]interface{}{}
}
}
return nil
}

View File

@ -125,6 +125,8 @@ func TestMigrateWalletJsonBlobs(t *testing.T) {
insertTestTransaction := func(index int, txBlob string, receiptBlob string, logBlob string, ethType bool) error { insertTestTransaction := func(index int, txBlob string, receiptBlob string, logBlob string, ethType bool) error {
indexStr := strconv.Itoa(index) indexStr := strconv.Itoa(index)
senderStr := strconv.Itoa(index + 1)
var txValue *string var txValue *string
if txBlob != "" { if txBlob != "" {
txValue = &txBlob txValue = &txBlob
@ -142,9 +144,9 @@ func TestMigrateWalletJsonBlobs(t *testing.T) {
entryType = "erc20" entryType = "erc20"
} }
_, err = db.Exec(`INSERT OR IGNORE INTO blocks(network_id, address, blk_number, blk_hash) VALUES (?, ?, ?, ?); _, err = db.Exec(`INSERT OR IGNORE INTO blocks(network_id, address, blk_number, blk_hash) VALUES (?, ?, ?, ?);
INSERT INTO transfers (hash, address, network_id, tx, receipt, log, blk_hash, type, blk_number, timestamp) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`, INSERT INTO transfers (hash, address, sender, network_id, tx, receipt, log, blk_hash, type, blk_number, timestamp) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
index, common.HexToAddress(indexStr), index, common.HexToHash(indexStr), index, common.HexToAddress(indexStr), index, common.HexToHash(indexStr),
common.HexToHash(indexStr), common.HexToAddress(indexStr), index, txValue, receiptValue, logValue, common.HexToHash(indexStr), entryType, index, index) common.HexToHash(indexStr), common.HexToAddress(indexStr), common.HexToAddress(senderStr), index, txValue, receiptValue, logValue, common.HexToHash(indexStr), entryType, index, index)
return err return err
} }
@ -199,7 +201,7 @@ func TestMigrateWalletJsonBlobs(t *testing.T) {
require.False(t, exists) require.False(t, exists)
// Run test migration 1686048341_transfers_receipt_json_blob_out.<up/down>.sql // Run test migration 1686048341_transfers_receipt_json_blob_out.<up/down>.sql
err = migrations.MigrateTo(db, customSteps, customSteps[1].Version) err = migrations.MigrateTo(db, customSteps, customSteps[2].Version)
require.NoError(t, err) require.NoError(t, err)
// Validate that the migration was run and transfers table has now status column // Validate that the migration was run and transfers table has now status column
@ -207,22 +209,33 @@ func TestMigrateWalletJsonBlobs(t *testing.T) {
require.NoError(t, err) require.NoError(t, err)
require.True(t, exists) require.True(t, exists)
// Run test migration 1687193315.<up/down>.sql
err = migrations.MigrateTo(db, customSteps, customSteps[1].Version)
require.NoError(t, err)
// Validate that the migration was run and transfers table has now txFrom column
exists, err = ColumnExists(db, "transfers", "tx_from_address")
require.NoError(t, err)
require.True(t, exists)
var ( var (
status, receiptType, cumulativeGasUsed, gasUsed, txIndex sql.NullInt64 status, receiptType, cumulativeGasUsed, gasUsed, txIndex sql.NullInt64
gasLimit, gasPriceClamped64, gasTipCapClamped64 sql.NullInt64 gasLimit, gasPriceClamped64, gasTipCapClamped64 sql.NullInt64
gasFeeCapClamped64, accountNonce, size, logIndex, txType sql.NullInt64 gasFeeCapClamped64, accountNonce, size, logIndex, txType sql.NullInt64
protected sql.NullBool protected sql.NullBool
dbContractAddress, amount128Hex sql.NullString amount128Hex sql.NullString
contractAddress, tokenAddress common.Address contractAddress, tokenAddress *common.Address
txHash, blockHash []byte txFrom, txTo *common.Address
entryType string txHash, blockHash []byte
isTokenIDNull bool entryType string
isTokenIDNull bool
) )
var dbTokenAddress sql.NullString
tokenID := new(big.Int) tokenID := new(big.Int)
rows, err := db.Query(`SELECT status, receipt_type, tx_hash, log_index, block_hash, cumulative_gas_used, contract_address, gas_used, tx_index, rows, err := db.Query(`SELECT status, receipt_type, tx_hash, log_index, block_hash, cumulative_gas_used, contract_address, gas_used, tx_index,
tx_type, protected, gas_limit, gas_price_clamped64, gas_tip_cap_clamped64, gas_fee_cap_clamped64, amount_padded128hex, account_nonce, size, token_address, token_id, type, tx_type, protected, gas_limit, gas_price_clamped64, gas_tip_cap_clamped64, gas_fee_cap_clamped64, amount_padded128hex, account_nonce, size, token_address, token_id, type,
tx_from_address, tx_to_address,
CASE CASE
WHEN token_id IS NULL THEN 1 WHEN token_id IS NULL THEN 1
@ -237,17 +250,11 @@ func TestMigrateWalletJsonBlobs(t *testing.T) {
if rows.Err() != nil { if rows.Err() != nil {
return rows.Err() return rows.Err()
} }
err := rows.Scan(&status, &receiptType, &txHash, &logIndex, &blockHash, &cumulativeGasUsed, &dbContractAddress, &gasUsed, &txIndex, err := rows.Scan(&status, &receiptType, &txHash, &logIndex, &blockHash, &cumulativeGasUsed, &contractAddress, &gasUsed, &txIndex,
&txType, &protected, &gasLimit, &gasPriceClamped64, &gasTipCapClamped64, &gasFeeCapClamped64, &amount128Hex, &accountNonce, &size, &dbTokenAddress, (*bigint.SQLBigIntBytes)(tokenID), &entryType, &isTokenIDNull) &txType, &protected, &gasLimit, &gasPriceClamped64, &gasTipCapClamped64, &gasFeeCapClamped64, &amount128Hex, &accountNonce, &size, &tokenAddress, (*bigint.SQLBigIntBytes)(tokenID), &entryType, &txFrom, &txTo, &isTokenIDNull)
if err != nil { if err != nil {
return err return err
} }
if dbTokenAddress.Valid {
tokenAddress = common.HexToAddress(dbTokenAddress.String)
}
if dbContractAddress.Valid {
contractAddress = common.HexToAddress(dbContractAddress.String)
}
return nil return nil
} }
@ -262,7 +269,7 @@ func TestMigrateWalletJsonBlobs(t *testing.T) {
require.False(t, amount128Hex.Valid) require.False(t, amount128Hex.Valid)
require.False(t, accountNonce.Valid) require.False(t, accountNonce.Valid)
require.False(t, size.Valid) require.False(t, size.Valid)
require.Equal(t, common.Address{}, tokenAddress) require.Empty(t, tokenAddress)
require.True(t, isTokenIDNull) require.True(t, isTokenIDNull)
require.Equal(t, string(w_common.EthTransfer), entryType) require.Equal(t, string(w_common.EthTransfer), entryType)
} else { } else {
@ -286,26 +293,29 @@ func TestMigrateWalletJsonBlobs(t *testing.T) {
if expectedEntryType == w_common.EthTransfer { if expectedEntryType == w_common.EthTransfer {
require.True(t, amount128Hex.Valid) require.True(t, amount128Hex.Valid)
require.Equal(t, *sqlite.BigIntToPadded128BitsStr(tt.Value()), amount128Hex.String) require.Equal(t, *sqlite.BigIntToPadded128BitsStr(tt.Value()), amount128Hex.String)
require.False(t, dbTokenAddress.Valid)
require.True(t, isTokenIDNull) require.True(t, isTokenIDNull)
} else { } else {
actualEntryType, expectedTokenAddress, expectedTokenID, expectedValue := w_common.ExtractTokenIdentity(expectedEntryType, tl, tt) actualEntryType, expectedTokenAddress, expectedTokenID, expectedValue, expectedFrom, expectedTo := w_common.ExtractTokenIdentity(expectedEntryType, tl, tt)
if actualEntryType == w_common.Erc20Transfer { if actualEntryType == w_common.Erc20Transfer {
require.True(t, amount128Hex.Valid) require.True(t, amount128Hex.Valid)
require.Equal(t, *sqlite.BigIntToPadded128BitsStr(expectedValue), amount128Hex.String) require.Equal(t, *sqlite.BigIntToPadded128BitsStr(expectedValue), amount128Hex.String)
require.True(t, isTokenIDNull) require.True(t, isTokenIDNull)
require.True(t, dbTokenAddress.Valid) require.Equal(t, *expectedTokenAddress, *tokenAddress)
require.Equal(t, *expectedTokenAddress, tokenAddress) require.Equal(t, *expectedFrom, *txFrom)
require.Equal(t, *expectedTo, *txTo)
} else if actualEntryType == w_common.Erc721Transfer { } else if actualEntryType == w_common.Erc721Transfer {
require.False(t, amount128Hex.Valid) require.False(t, amount128Hex.Valid)
require.False(t, isTokenIDNull) require.False(t, isTokenIDNull)
require.Equal(t, expectedTokenID, expectedTokenID) require.Equal(t, expectedTokenID, expectedTokenID)
require.True(t, dbTokenAddress.Valid) require.Equal(t, *expectedTokenAddress, *tokenAddress)
require.Equal(t, *expectedTokenAddress, tokenAddress) require.Equal(t, *expectedFrom, *txFrom)
require.Equal(t, *expectedTo, *txTo)
} else { } else {
require.False(t, amount128Hex.Valid) require.False(t, amount128Hex.Valid)
require.True(t, isTokenIDNull) require.True(t, isTokenIDNull)
require.False(t, dbTokenAddress.Valid) require.Empty(t, tokenAddress)
require.Empty(t, txFrom)
require.Empty(t, txTo)
} }
require.Equal(t, expectedEntryType, actualEntryType) require.Equal(t, expectedEntryType, actualEntryType)
@ -320,7 +330,7 @@ func TestMigrateWalletJsonBlobs(t *testing.T) {
require.Equal(t, []byte(nil), txHash) require.Equal(t, []byte(nil), txHash)
require.Equal(t, []byte(nil), blockHash) require.Equal(t, []byte(nil), blockHash)
require.False(t, cumulativeGasUsed.Valid) require.False(t, cumulativeGasUsed.Valid)
require.Equal(t, common.Address{}, contractAddress) require.Empty(t, contractAddress)
require.False(t, gasUsed.Valid) require.False(t, gasUsed.Valid)
require.False(t, txIndex.Valid) require.False(t, txIndex.Valid)
} else { } else {
@ -332,7 +342,7 @@ func TestMigrateWalletJsonBlobs(t *testing.T) {
require.Equal(t, tr.BlockHash, common.BytesToHash(blockHash)) require.Equal(t, tr.BlockHash, common.BytesToHash(blockHash))
require.True(t, cumulativeGasUsed.Valid) require.True(t, cumulativeGasUsed.Valid)
require.Equal(t, int64(tr.CumulativeGasUsed), cumulativeGasUsed.Int64) require.Equal(t, int64(tr.CumulativeGasUsed), cumulativeGasUsed.Int64)
require.Equal(t, tr.ContractAddress, contractAddress) require.Equal(t, tr.ContractAddress, *contractAddress)
require.True(t, gasUsed.Valid) require.True(t, gasUsed.Valid)
require.Equal(t, int64(tr.GasUsed), gasUsed.Int64) require.Equal(t, int64(tr.GasUsed), gasUsed.Int64)
require.True(t, txIndex.Valid) require.True(t, txIndex.Valid)

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1 @@
-- This migration is in GO code. If GO migration fails this entry serves as rollback to the previous one

View File

@ -0,0 +1,11 @@
-- This migration is done in GO code as a custom step.
-- This file serves as an anchor for the migration system
-- Check migrateWalletTransferFromToAddresses from appdatabase/database.go
-- The following steps are done in GO code:
-- ALTER TABLE transfers ADD COLUMN tx_from_address BLOB;
-- ALTER TABLE transfers ADD COLUMN tx_to_address BLOB;
-- Extract transfer from/to addresses and add the information into the new columns
-- Re-extract token address and insert it as blob instead of string

View File

@ -30,6 +30,10 @@ const (
PendingTransactionPT PendingTransactionPT
) )
var (
ZeroAddress = eth.Address{}
)
type Entry struct { type Entry struct {
payloadType PayloadType payloadType PayloadType
transaction *transfer.TransactionIdentity transaction *transfer.TransactionIdentity
@ -326,8 +330,8 @@ const (
ELSE NULL ELSE NULL
END as tr_type, END as tr_type,
transfers.sender AS from_address, transfers.tx_from_address AS from_address,
transfers.address AS to_address, transfers.tx_to_address AS to_address,
transfers.amount_padded128hex AS tr_amount, transfers.amount_padded128hex AS tr_amount,
NULL AS mt_from_amount, NULL AS mt_from_amount,
NULL AS mt_to_amount, NULL AS mt_to_amount,
@ -345,9 +349,9 @@ const (
NULL AS to_token_code NULL AS to_token_code
FROM transfers, filter_conditions FROM transfers, filter_conditions
LEFT JOIN LEFT JOIN
filter_addresses from_join ON HEX(transfers.sender) = from_join.address filter_addresses from_join ON HEX(transfers.tx_from_address) = from_join.address
LEFT JOIN LEFT JOIN
filter_addresses to_join ON HEX(transfers.address) = to_join.address filter_addresses to_join ON HEX(transfers.tx_to_address) = to_join.address
WHERE transfers.multi_transaction_id = 0 WHERE transfers.multi_transaction_id = 0
AND ((startFilterDisabled OR timestamp >= startTimestamp) AND ((startFilterDisabled OR timestamp >= startTimestamp)
AND (endFilterDisabled OR timestamp <= endTimestamp) AND (endFilterDisabled OR timestamp <= endTimestamp)
@ -355,22 +359,23 @@ const (
AND (filterActivityTypeAll AND (filterActivityTypeAll
OR (filterActivityTypeSend OR (filterActivityTypeSend
AND (filterAllAddresses AND (filterAllAddresses
OR (HEX(transfers.sender) IN filter_addresses) OR (HEX(transfers.tx_from_address) IN filter_addresses)
) )
) )
OR (filterActivityTypeReceive OR (filterActivityTypeReceive
AND (filterAllAddresses OR (HEX(transfers.address) IN filter_addresses)) AND (filterAllAddresses
OR (HEX(transfers.tx_to_address) IN filter_addresses))
) )
) )
AND (filterAllAddresses AND (filterAllAddresses
OR (HEX(transfers.sender) IN filter_addresses) OR (HEX(transfers.tx_from_address) IN filter_addresses)
OR (HEX(transfers.address) IN filter_addresses) OR (HEX(transfers.tx_to_address) IN filter_addresses)
) )
AND (filterAllToAddresses AND (filterAllToAddresses
OR (HEX(transfers.address) IN filter_to_addresses) OR (HEX(transfers.tx_to_address) IN filter_to_addresses)
) )
AND (includeAllTokenTypeAssets OR (transfers.type = "eth" AND ("ETH" IN assets_token_codes)) AND (includeAllTokenTypeAssets OR (transfers.type = "eth" AND ("ETH" IN assets_token_codes))
OR (transfers.type = "erc20" AND ((transfers.network_id, transfers.token_address) IN assets_erc20))) OR (transfers.type = "erc20" AND ((transfers.network_id, HEX(transfers.token_address)) IN assets_erc20)))
AND (includeAllNetworks OR (transfers.network_id IN filter_networks)) AND (includeAllNetworks OR (transfers.network_id IN filter_networks))
AND (filterAllActivityStatus OR ((filterStatusCompleted OR filterStatusFinalized) AND transfers.status = 1) AND (filterAllActivityStatus OR ((filterStatusCompleted OR filterStatusFinalized) AND transfers.status = 1)
OR (filterStatusFailed AND transfers.status = 0) OR (filterStatusFailed AND transfers.status = 0)
@ -523,7 +528,8 @@ func getActivityEntries(ctx context.Context, deps FilterDependencies, addresses
if sliceChecksCondition(filter.Assets, func(item *Token) bool { return item.TokenType == Erc20 }) { if sliceChecksCondition(filter.Assets, func(item *Token) bool { return item.TokenType == Erc20 }) {
assetsERC20 = joinItems(filter.Assets, func(item Token) string { assetsERC20 = joinItems(filter.Assets, func(item Token) string {
if item.TokenType == Erc20 { if item.TokenType == Erc20 {
return fmt.Sprintf("%d, '%s'", item.ChainID, item.Address.Hex()) // SQL HEX() (Blob->Hex) conversion returns uppercase digits with no 0x prefix
return fmt.Sprintf("%d, '%s'", item.ChainID, strings.ToUpper(item.Address.Hex()[2:]))
} }
return "" return ""
}) })
@ -594,10 +600,11 @@ func getActivityEntries(ctx context.Context, deps FilterDependencies, addresses
var timestamp int64 var timestamp int64
var dbMtType, dbTrType sql.NullByte var dbMtType, dbTrType sql.NullByte
var toAddress, fromAddress eth.Address var toAddress, fromAddress eth.Address
var tokenAddress *eth.Address
var aggregatedStatus int var aggregatedStatus int
var dbTrAmount sql.NullString var dbTrAmount sql.NullString
var dbMtFromAmount, dbMtToAmount sql.NullString var dbMtFromAmount, dbMtToAmount sql.NullString
var tokenAddress, tokenCode, fromTokenCode, toTokenCode sql.NullString var tokenCode, fromTokenCode, toTokenCode sql.NullString
err := rows.Scan(&transferHash, &pendingHash, &chainID, &multiTxID, &timestamp, &dbMtType, &dbTrType, &fromAddress, err := rows.Scan(&transferHash, &pendingHash, &chainID, &multiTxID, &timestamp, &dbMtType, &dbTrType, &fromAddress,
&toAddress, &dbTrAmount, &dbMtFromAmount, &dbMtToAmount, &aggregatedStatus, &aggregatedCount, &toAddress, &dbTrAmount, &dbMtFromAmount, &dbMtToAmount, &aggregatedStatus, &aggregatedCount,
&tokenAddress, &tokenCode, &fromTokenCode, &toTokenCode) &tokenAddress, &tokenCode, &fromTokenCode, &toTokenCode)
@ -630,8 +637,8 @@ func getActivityEntries(ctx context.Context, deps FilterDependencies, addresses
// Extract tokens // Extract tokens
var involvedToken *Token var involvedToken *Token
if tokenAddress.Valid && eth.HexToAddress(tokenAddress.String) != eth.HexToAddress("0x") { if tokenAddress != nil && *tokenAddress != ZeroAddress {
involvedToken = &Token{TokenType: Erc20, ChainID: common.ChainID(chainID.Int64), Address: eth.HexToAddress(tokenAddress.String)} involvedToken = &Token{TokenType: Erc20, ChainID: common.ChainID(chainID.Int64), Address: *tokenAddress}
} else { } else {
involvedToken = &Token{TokenType: Native, ChainID: common.ChainID(chainID.Int64)} involvedToken = &Token{TokenType: Native, ChainID: common.ChainID(chainID.Int64)}
} }

View File

@ -106,7 +106,7 @@ func fillTestData(t *testing.T, db *sql.DB) (td testData, fromAddresses, toAddre
// Plain transfer // Plain transfer
td.tr1 = trs[0] td.tr1 = trs[0]
transfer.InsertTestTransfer(t, db, &td.tr1) transfer.InsertTestTransfer(t, db, td.tr1.To, &td.tr1)
// Pending transfer // Pending transfer
td.pendingTr = trs[1] td.pendingTr = trs[1]
@ -124,10 +124,10 @@ func fillTestData(t *testing.T, db *sql.DB) (td testData, fromAddresses, toAddre
td.multiTx1ID = transfer.InsertTestMultiTransaction(t, db, &td.multiTx1) td.multiTx1ID = transfer.InsertTestMultiTransaction(t, db, &td.multiTx1)
td.multiTx1Tr1.MultiTransactionID = td.multiTx1ID td.multiTx1Tr1.MultiTransactionID = td.multiTx1ID
transfer.InsertTestTransfer(t, db, &td.multiTx1Tr1) transfer.InsertTestTransfer(t, db, td.multiTx1Tr1.To, &td.multiTx1Tr1)
td.multiTx1Tr2.MultiTransactionID = td.multiTx1ID td.multiTx1Tr2.MultiTransactionID = td.multiTx1ID
transfer.InsertTestTransfer(t, db, &td.multiTx1Tr2) transfer.InsertTestTransfer(t, db, td.multiTx1Tr2.To, &td.multiTx1Tr2)
// Send Multitransaction containing 2 x Plain transfers + 1 x Pending transfer // Send Multitransaction containing 2 x Plain transfers + 1 x Pending transfer
td.multiTx2Tr1 = trs[3] td.multiTx2Tr1 = trs[3]
@ -139,10 +139,10 @@ func fillTestData(t *testing.T, db *sql.DB) (td testData, fromAddresses, toAddre
td.multiTx2ID = transfer.InsertTestMultiTransaction(t, db, &td.multiTx2) td.multiTx2ID = transfer.InsertTestMultiTransaction(t, db, &td.multiTx2)
td.multiTx2Tr1.MultiTransactionID = td.multiTx2ID td.multiTx2Tr1.MultiTransactionID = td.multiTx2ID
transfer.InsertTestTransfer(t, db, &td.multiTx2Tr1) transfer.InsertTestTransfer(t, db, td.multiTx2Tr1.To, &td.multiTx2Tr1)
td.multiTx2Tr2.MultiTransactionID = td.multiTx2ID td.multiTx2Tr2.MultiTransactionID = td.multiTx2ID
transfer.InsertTestTransfer(t, db, &td.multiTx2Tr2) transfer.InsertTestTransfer(t, db, td.multiTx2Tr2.To, &td.multiTx2Tr2)
td.multiTx2PendingTr.MultiTransactionID = td.multiTx2ID td.multiTx2PendingTr.MultiTransactionID = td.multiTx2ID
transfer.InsertTestPendingTransaction(t, db, &td.multiTx2PendingTr) transfer.InsertTestPendingTransaction(t, db, &td.multiTx2PendingTr)
@ -253,7 +253,7 @@ func TestGetActivityEntriesWithSameTransactionForSenderAndReceiverInDB(t *testin
// Ensure they are the oldest transactions (last in the list) and we have a consistent order // Ensure they are the oldest transactions (last in the list) and we have a consistent order
receiverTr.Timestamp-- receiverTr.Timestamp--
transfer.InsertTestTransfer(t, deps.db, &receiverTr) transfer.InsertTestTransfer(t, deps.db, receiverTr.To, &receiverTr)
var filter Filter var filter Filter
entries, err := getActivityEntries(context.Background(), deps, []eth.Address{td.tr1.From, receiverTr.From}, []common.ChainID{}, filter, 0, 10) entries, err := getActivityEntries(context.Background(), deps, []eth.Address{td.tr1.From, receiverTr.From}, []common.ChainID{}, filter, 0, 10)
@ -287,7 +287,7 @@ func TestGetActivityEntriesFilterByTime(t *testing.T) {
// Add 6 extractable transactions with timestamps 6-12 // Add 6 extractable transactions with timestamps 6-12
trs, fromTrs, toTrs := transfer.GenerateTestTransfers(t, deps.db, td.nextIndex, 6) trs, fromTrs, toTrs := transfer.GenerateTestTransfers(t, deps.db, td.nextIndex, 6)
for i := range trs { for i := range trs {
transfer.InsertTestTransfer(t, deps.db, &trs[i]) transfer.InsertTestTransfer(t, deps.db, trs[i].To, &trs[i])
} }
mockTestAccountsWithAddresses(t, deps.db, append(append(append(fromTds, toTds...), fromTrs...), toTrs...)) mockTestAccountsWithAddresses(t, deps.db, append(append(append(fromTds, toTds...), fromTrs...), toTrs...))
@ -395,7 +395,7 @@ func TestGetActivityEntriesCheckOffsetAndLimit(t *testing.T) {
// Add 10 extractable transactions with timestamps 1-10 // Add 10 extractable transactions with timestamps 1-10
trs, fromTrs, toTrs := transfer.GenerateTestTransfers(t, deps.db, 1, 10) trs, fromTrs, toTrs := transfer.GenerateTestTransfers(t, deps.db, 1, 10)
for i := range trs { for i := range trs {
transfer.InsertTestTransfer(t, deps.db, &trs[i]) transfer.InsertTestTransfer(t, deps.db, trs[i].To, &trs[i])
} }
mockTestAccountsWithAddresses(t, deps.db, append(fromTrs, toTrs...)) mockTestAccountsWithAddresses(t, deps.db, append(fromTrs, toTrs...))
@ -526,7 +526,7 @@ func TestGetActivityEntriesFilterByType(t *testing.T) {
lastMT = transfer.InsertTestMultiTransaction(t, deps.db, &multiTxs[i/2]) lastMT = transfer.InsertTestMultiTransaction(t, deps.db, &multiTxs[i/2])
} }
trs[i].MultiTransactionID = lastMT trs[i].MultiTransactionID = lastMT
transfer.InsertTestTransfer(t, deps.db, &trs[i]) transfer.InsertTestTransfer(t, deps.db, trs[i].To, &trs[i])
} }
// Test filtering out without address involved // Test filtering out without address involved
@ -572,7 +572,7 @@ func TestGetActivityEntriesFilterByAddresses(t *testing.T) {
td, fromTds, toTds := fillTestData(t, deps.db) td, fromTds, toTds := fillTestData(t, deps.db)
trs, fromTrs, toTrs := transfer.GenerateTestTransfers(t, deps.db, td.nextIndex, 6) trs, fromTrs, toTrs := transfer.GenerateTestTransfers(t, deps.db, td.nextIndex, 6)
for i := range trs { for i := range trs {
transfer.InsertTestTransfer(t, deps.db, &trs[i]) transfer.InsertTestTransfer(t, deps.db, trs[i].To, &trs[i])
} }
mockTestAccountsWithAddresses(t, deps.db, append(append(append(fromTds, toTds...), fromTrs...), toTrs...)) mockTestAccountsWithAddresses(t, deps.db, append(append(append(fromTds, toTds...), fromTrs...), toTrs...))
@ -642,7 +642,7 @@ func TestGetActivityEntriesFilterByStatus(t *testing.T) {
transfer.InsertTestPendingTransaction(t, deps.db, &trs[i]) transfer.InsertTestPendingTransaction(t, deps.db, &trs[i])
} else { } else {
trs[i].Success = i != 3 && i != 6 trs[i].Success = i != 3 && i != 6
transfer.InsertTestTransfer(t, deps.db, &trs[i]) transfer.InsertTestTransfer(t, deps.db, trs[i].To, &trs[i])
} }
} }
@ -696,7 +696,7 @@ func TestGetActivityEntriesFilterByTokenType(t *testing.T) {
for i := range trs { for i := range trs {
tokenAddr := transfer.TestTokens[i].Address tokenAddr := transfer.TestTokens[i].Address
trs[i].ChainID = common.ChainID(transfer.TestTokens[i].ChainID) trs[i].ChainID = common.ChainID(transfer.TestTokens[i].ChainID)
transfer.InsertTestTransferWithToken(t, deps.db, &trs[i], tokenAddr) transfer.InsertTestTransferWithToken(t, deps.db, trs[i].To, &trs[i], tokenAddr)
} }
mockTestAccountsWithAddresses(t, deps.db, append(append(append(fromTds, toTds...), fromTrs...), toTrs...)) mockTestAccountsWithAddresses(t, deps.db, append(append(append(fromTds, toTds...), fromTrs...), toTrs...))
@ -774,7 +774,7 @@ func TestGetActivityEntriesFilterByToAddresses(t *testing.T) {
// Add 6 extractable transactions // Add 6 extractable transactions
trs, fromTrs, toTrs := transfer.GenerateTestTransfers(t, deps.db, td.nextIndex, 6) trs, fromTrs, toTrs := transfer.GenerateTestTransfers(t, deps.db, td.nextIndex, 6)
for i := range trs { for i := range trs {
transfer.InsertTestTransfer(t, deps.db, &trs[i]) transfer.InsertTestTransfer(t, deps.db, trs[i].To, &trs[i])
} }
mockTestAccountsWithAddresses(t, deps.db, append(append(append(fromTds, toTds...), fromTrs...), toTrs...)) mockTestAccountsWithAddresses(t, deps.db, append(append(append(fromTds, toTds...), fromTrs...), toTrs...))
@ -809,7 +809,7 @@ func TestGetActivityEntriesFilterByNetworks(t *testing.T) {
// Add 6 extractable transactions // Add 6 extractable transactions
trs, fromTrs, toTrs := transfer.GenerateTestTransfers(t, deps.db, td.nextIndex, 6) trs, fromTrs, toTrs := transfer.GenerateTestTransfers(t, deps.db, td.nextIndex, 6)
for i := range trs { for i := range trs {
transfer.InsertTestTransfer(t, deps.db, &trs[i]) transfer.InsertTestTransfer(t, deps.db, trs[i].To, &trs[i])
} }
mockTestAccountsWithAddresses(t, deps.db, append(append(append(fromTds, toTds...), fromTrs...), toTrs...)) mockTestAccountsWithAddresses(t, deps.db, append(append(append(fromTds, toTds...), fromTrs...), toTrs...))
@ -841,7 +841,7 @@ func TestGetActivityEntriesCheckToAndFrom(t *testing.T) {
// Add extra transactions to test To address // Add extra transactions to test To address
trs, _, _ := transfer.GenerateTestTransfers(t, deps.db, td.nextIndex, 2) trs, _, _ := transfer.GenerateTestTransfers(t, deps.db, td.nextIndex, 2)
transfer.InsertTestTransfer(t, deps.db, &trs[0]) transfer.InsertTestTransfer(t, deps.db, trs[0].To, &trs[0])
transfer.InsertTestPendingTransaction(t, deps.db, &trs[1]) transfer.InsertTestPendingTransaction(t, deps.db, &trs[1])
addresses := []eth_common.Address{td.tr1.From, td.pendingTr.From, addresses := []eth_common.Address{td.tr1.From, td.pendingTr.From,

View File

@ -38,7 +38,7 @@ func TestGetRecipients(t *testing.T) {
// Add 6 extractable transactions // Add 6 extractable transactions
trs, _, toTrs := transfer.GenerateTestTransfers(t, db, 0, 6) trs, _, toTrs := transfer.GenerateTestTransfers(t, db, 0, 6)
for i := range trs { for i := range trs {
transfer.InsertTestTransfer(t, db, &trs[i]) transfer.InsertTestTransfer(t, db, trs[i].To, &trs[i])
} }
entries, hasMore, err := GetRecipients(context.Background(), db, 0, 15) entries, hasMore, err := GetRecipients(context.Background(), db, 0, 15)

View File

@ -237,7 +237,7 @@ func GetEventSignatureHash(signature string) common.Hash {
return crypto.Keccak256Hash([]byte(signature)) return crypto.Keccak256Hash([]byte(signature))
} }
func ExtractTokenIdentity(dbEntryType Type, log *types.Log, tx *types.Transaction) (correctType Type, tokenAddress *common.Address, tokenID *big.Int, value *big.Int) { func ExtractTokenIdentity(dbEntryType Type, log *types.Log, tx *types.Transaction) (correctType Type, tokenAddress *common.Address, txTokenID *big.Int, txValue *big.Int, txFrom *common.Address, txTo *common.Address) {
// erc721 transfers share signature with erc20 ones, so they both used to be categorized as erc20 // erc721 transfers share signature with erc20 ones, so they both used to be categorized as erc20
// by the Downloader. We fix this here since they might be mis-categorized in the db. // by the Downloader. We fix this here since they might be mis-categorized in the db.
if dbEntryType == Erc20Transfer { if dbEntryType == Erc20Transfer {
@ -250,16 +250,22 @@ func ExtractTokenIdentity(dbEntryType Type, log *types.Log, tx *types.Transactio
switch correctType { switch correctType {
case EthTransfer: case EthTransfer:
if tx != nil { if tx != nil {
value = new(big.Int).Set(tx.Value()) txValue = new(big.Int).Set(tx.Value())
} }
case Erc20Transfer: case Erc20Transfer:
tokenAddress = new(common.Address) tokenAddress = new(common.Address)
*tokenAddress = log.Address *tokenAddress = log.Address
_, _, value = ParseErc20TransferLog(log) from, to, value := ParseErc20TransferLog(log)
txValue = value
txFrom = &from
txTo = &to
case Erc721Transfer: case Erc721Transfer:
tokenAddress = new(common.Address) tokenAddress = new(common.Address)
*tokenAddress = log.Address *tokenAddress = log.Address
_, _, tokenID = ParseErc721TransferLog(log) from, to, tokenID := ParseErc721TransferLog(log)
txTokenID = tokenID
txFrom = &from
txTo = &to
} }
return return

View File

@ -351,12 +351,29 @@ func (db *Database) InsertBlock(chainID uint64, account common.Address, blockNum
_ = tx.Rollback() _ = tx.Rollback()
}() }()
insert, err := tx.Prepare("INSERT OR IGNORE INTO blocks(network_id, address, blk_number, blk_hash, loaded) VALUES (?, ?, ?, ?, ?)") blockDB := blockDBFields{
chainID: chainID,
account: account,
blockNumber: blockNumber,
blockHash: blockHash,
}
return insertBlockDBFields(tx, blockDB)
}
type blockDBFields struct {
chainID uint64
account common.Address
blockNumber *big.Int
blockHash common.Hash
}
func insertBlockDBFields(creator statementCreator, block blockDBFields) error {
insert, err := creator.Prepare("INSERT OR IGNORE INTO blocks(network_id, address, blk_number, blk_hash, loaded) VALUES (?, ?, ?, ?, ?)")
if err != nil { if err != nil {
return err return err
} }
_, err = insert.Exec(chainID, account, (*bigint.SQLBigInt)(blockNumber), blockHash, true) _, err = insert.Exec(block.chainID, block.account, (*bigint.SQLBigInt)(block.blockNumber), block.blockHash, true)
return err return err
} }
@ -413,15 +430,7 @@ func insertBlocksWithTransactions(chainID uint64, creator statementCreator, acco
} }
func updateOrInsertTransfers(chainID uint64, creator statementCreator, transfers []Transfer) error { func updateOrInsertTransfers(chainID uint64, creator statementCreator, transfers []Transfer) error {
insert, err := creator.Prepare(`INSERT OR REPLACE INTO transfers txsDBFields := make([]transferDBFields, 0, len(transfers))
(network_id, hash, blk_hash, blk_number, timestamp, address, tx, sender, receipt, log, type, loaded, base_gas_fee, multi_transaction_id,
status, receipt_type, tx_hash, log_index, block_hash, cumulative_gas_used, contract_address, gas_used, tx_index,
tx_type, protected, gas_limit, gas_price_clamped64, gas_tip_cap_clamped64, gas_fee_cap_clamped64, amount_padded128hex, account_nonce, size, token_address, token_id)
VALUES
(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, 1, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`)
if err != nil {
return err
}
for _, t := range transfers { for _, t := range transfers {
var receiptType *uint8 var receiptType *uint8
var txHash, blockHash *common.Hash var txHash, blockHash *common.Hash
@ -446,22 +455,20 @@ func updateOrInsertTransfers(chainID uint64, creator statementCreator, transfers
var txProtected *bool var txProtected *bool
var txGas, txNonce, txSize *uint64 var txGas, txNonce, txSize *uint64
var txGasPrice, txGasTipCap, txGasFeeCap *int64 var txGasPrice, txGasTipCap, txGasFeeCap *big.Int
var txType *uint8 var txType *uint8
var txValue *string var txValue *big.Int
var dbAddress *string var tokenAddress *common.Address
var tokenID *big.Int var tokenID *big.Int
var txFrom *common.Address
var txTo *common.Address
if t.Transaction != nil { if t.Transaction != nil {
var value *big.Int
if t.Log != nil { if t.Log != nil {
var tokenAddress *common.Address _, tokenAddress, tokenID, txValue, txFrom, txTo = w_common.ExtractTokenIdentity(t.Type, t.Log, t.Transaction)
_, tokenAddress, tokenID, value = w_common.ExtractTokenIdentity(t.Type, t.Log, t.Transaction)
if tokenAddress != nil {
dbAddress = new(string)
*dbAddress = tokenAddress.Hex()
}
} else { } else {
value = new(big.Int).Set(t.Transaction.Value()) txValue = new(big.Int).Set(t.Transaction.Value())
txFrom = &t.From
txTo = t.Transaction.To()
} }
txType = new(uint8) txType = new(uint8)
@ -470,21 +477,117 @@ func updateOrInsertTransfers(chainID uint64, creator statementCreator, transfers
*txProtected = t.Transaction.Protected() *txProtected = t.Transaction.Protected()
txGas = new(uint64) txGas = new(uint64)
*txGas = t.Transaction.Gas() *txGas = t.Transaction.Gas()
txGasPrice = sqlite.BigIntToClampedInt64(t.Transaction.GasPrice()) txGasPrice = t.Transaction.GasPrice()
txGasTipCap = sqlite.BigIntToClampedInt64(t.Transaction.GasTipCap()) txGasTipCap = t.Transaction.GasTipCap()
txGasFeeCap = sqlite.BigIntToClampedInt64(t.Transaction.GasFeeCap()) txGasFeeCap = t.Transaction.GasFeeCap()
txValue = sqlite.BigIntToPadded128BitsStr(value)
txNonce = new(uint64) txNonce = new(uint64)
*txNonce = t.Transaction.Nonce() *txNonce = t.Transaction.Nonce()
txSize = new(uint64) txSize = new(uint64)
*txSize = uint64(t.Transaction.Size()) *txSize = uint64(t.Transaction.Size())
} }
_, err = insert.Exec(chainID, t.ID, t.BlockHash, (*bigint.SQLBigInt)(t.BlockNumber), t.Timestamp, t.Address, &JSONBlob{t.Transaction}, t.From, &JSONBlob{t.Receipt}, &JSONBlob{t.Log}, t.Type, t.BaseGasFees, t.MultiTransactionID, dbFields := transferDBFields{
receiptStatus, receiptType, txHash, logIndex, blockHash, cumulativeGasUsed, contractAddress, gasUsed, transactionIndex, chainID: chainID,
txType, txProtected, txGas, txGasPrice, txGasTipCap, txGasFeeCap, txValue, txNonce, txSize, dbAddress, (*bigint.SQLBigIntBytes)(tokenID)) id: t.ID,
blockHash: t.BlockHash,
blockNumber: t.BlockNumber,
timestamp: t.Timestamp,
address: t.Address,
transaction: t.Transaction,
sender: t.From,
receipt: t.Receipt,
log: t.Log,
transferType: t.Type,
baseGasFees: t.BaseGasFees,
multiTransactionID: t.MultiTransactionID,
receiptStatus: receiptStatus,
receiptType: receiptType,
txHash: txHash,
logIndex: logIndex,
receiptBlockHash: blockHash,
cumulativeGasUsed: cumulativeGasUsed,
contractAddress: contractAddress,
gasUsed: gasUsed,
transactionIndex: transactionIndex,
txType: txType,
txProtected: txProtected,
txGas: txGas,
txGasPrice: txGasPrice,
txGasTipCap: txGasTipCap,
txGasFeeCap: txGasFeeCap,
txValue: txValue,
txNonce: txNonce,
txSize: txSize,
tokenAddress: tokenAddress,
tokenID: tokenID,
txFrom: txFrom,
txTo: txTo,
}
txsDBFields = append(txsDBFields, dbFields)
}
return updateOrInsertTransfersDBFields(creator, txsDBFields)
}
type transferDBFields struct {
chainID uint64
id common.Hash
blockHash common.Hash
blockNumber *big.Int
timestamp uint64
address common.Address
transaction *types.Transaction
sender common.Address
receipt *types.Receipt
log *types.Log
transferType w_common.Type
baseGasFees string
multiTransactionID MultiTransactionIDType
receiptStatus *uint64
receiptType *uint8
txHash *common.Hash
logIndex *uint
receiptBlockHash *common.Hash
cumulativeGasUsed *uint64
contractAddress *common.Address
gasUsed *uint64
transactionIndex *uint
txType *uint8
txProtected *bool
txGas *uint64
txGasPrice *big.Int
txGasTipCap *big.Int
txGasFeeCap *big.Int
txValue *big.Int
txNonce *uint64
txSize *uint64
tokenAddress *common.Address
tokenID *big.Int
txFrom *common.Address
txTo *common.Address
}
func updateOrInsertTransfersDBFields(creator statementCreator, transfers []transferDBFields) error {
insert, err := creator.Prepare(`INSERT OR REPLACE INTO transfers
(network_id, hash, blk_hash, blk_number, timestamp, address, tx, sender, receipt, log, type, loaded, base_gas_fee, multi_transaction_id,
status, receipt_type, tx_hash, log_index, block_hash, cumulative_gas_used, contract_address, gas_used, tx_index,
tx_type, protected, gas_limit, gas_price_clamped64, gas_tip_cap_clamped64, gas_fee_cap_clamped64, amount_padded128hex, account_nonce, size, token_address, token_id, tx_from_address, tx_to_address)
VALUES
(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, 1, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`)
if err != nil {
return err
}
for _, t := range transfers {
txGasPrice := sqlite.BigIntToClampedInt64(t.txGasPrice)
txGasTipCap := sqlite.BigIntToClampedInt64(t.txGasTipCap)
txGasFeeCap := sqlite.BigIntToClampedInt64(t.txGasFeeCap)
txValue := sqlite.BigIntToPadded128BitsStr(t.txValue)
_, err = insert.Exec(t.chainID, t.id, t.blockHash, (*bigint.SQLBigInt)(t.blockNumber), t.timestamp, t.address, &JSONBlob{t.transaction}, t.sender, &JSONBlob{t.receipt}, &JSONBlob{t.log}, t.transferType, t.baseGasFees, t.multiTransactionID,
t.receiptStatus, t.receiptType, t.txHash, t.logIndex, t.receiptBlockHash, t.cumulativeGasUsed, t.contractAddress, t.gasUsed, t.transactionIndex,
t.txType, t.txProtected, t.txGas, txGasPrice, txGasTipCap, txGasFeeCap, txValue, t.txNonce, t.txSize, t.tokenAddress, (*bigint.SQLBigIntBytes)(t.tokenID), t.txFrom, t.txTo)
if err != nil { if err != nil {
log.Error("can't save transfer", "b-hash", t.BlockHash, "b-n", t.BlockNumber, "a", t.Address, "h", t.ID) log.Error("can't save transfer", "b-hash", t.blockHash, "b-n", t.blockNumber, "a", t.address, "h", t.id)
return err return err
} }
} }

View File

@ -184,7 +184,7 @@ func TestGetTransfersForIdentities(t *testing.T) {
trs, _, _ := GenerateTestTransfers(t, db.client, 1, 4) trs, _, _ := GenerateTestTransfers(t, db.client, 1, 4)
for i := range trs { for i := range trs {
InsertTestTransfer(t, db.client, &trs[i]) InsertTestTransfer(t, db.client, trs[i].To, &trs[i])
} }
entries, err := db.GetTransfersForIdentities(context.Background(), []TransactionIdentity{ entries, err := db.GetTransfersForIdentities(context.Background(), []TransactionIdentity{

View File

@ -9,9 +9,9 @@ import (
eth_common "github.com/ethereum/go-ethereum/common" eth_common "github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/common/hexutil" "github.com/ethereum/go-ethereum/common/hexutil"
"github.com/status-im/status-go/services/wallet/common" "github.com/status-im/status-go/services/wallet/common"
w_common "github.com/status-im/status-go/services/wallet/common"
"github.com/status-im/status-go/services/wallet/testutils" "github.com/status-im/status-go/services/wallet/testutils"
"github.com/status-im/status-go/services/wallet/token" "github.com/status-im/status-go/services/wallet/token"
"github.com/status-im/status-go/sqlite"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
@ -205,31 +205,66 @@ var TestTokens = []*token.Token{
var NativeTokenIndices = []int{0, 1, 2} var NativeTokenIndices = []int{0, 1, 2}
func InsertTestTransfer(t *testing.T, db *sql.DB, tr *TestTransfer) { func InsertTestTransfer(t *testing.T, db *sql.DB, address eth_common.Address, tr *TestTransfer) {
token := TestTokens[int(tr.Timestamp)%len(TestTokens)] token := TestTokens[int(tr.Timestamp)%len(TestTokens)]
InsertTestTransferWithToken(t, db, tr, token.Address) InsertTestTransferWithToken(t, db, address, tr, token.Address)
} }
func InsertTestTransferWithToken(t *testing.T, db *sql.DB, tr *TestTransfer, tokenAddress eth_common.Address) { func InsertTestTransferWithToken(t *testing.T, db *sql.DB, address eth_common.Address, tr *TestTransfer, tokenAddress eth_common.Address) {
var (
tx *sql.Tx
)
tx, err := db.Begin()
require.NoError(t, err)
defer func() {
if err == nil {
err = tx.Commit()
return
}
_ = tx.Rollback()
}()
blkHash := eth_common.HexToHash("4")
block := blockDBFields{
chainID: uint64(tr.ChainID),
account: tr.To,
blockNumber: big.NewInt(tr.BlkNumber),
blockHash: blkHash,
}
// Respect `FOREIGN KEY(network_id,address,blk_hash)` of `transfers` table
err = insertBlockDBFields(tx, block)
require.NoError(t, err)
receiptStatus := uint64(0)
if tr.Success {
receiptStatus = 1
}
tokenType := "eth" tokenType := "eth"
if (tokenAddress != eth_common.Address{}) { if (tokenAddress != eth_common.Address{}) {
tokenType = "erc20" tokenType = "erc20"
} }
// Respect `FOREIGN KEY(network_id,address,blk_hash)` of `transfers` table transfer := transferDBFields{
blkHash := eth_common.HexToHash("4") chainID: uint64(tr.ChainID),
value := sqlite.Int64ToPadded128BitsStr(tr.Value) id: tr.Hash,
address: address,
_, err := db.Exec(` blockHash: blkHash,
INSERT OR IGNORE INTO blocks( blockNumber: big.NewInt(tr.BlkNumber),
network_id, address, blk_number, blk_hash sender: tr.From,
) VALUES (?, ?, ?, ?); transferType: w_common.Type(tokenType),
INSERT INTO transfers (network_id, hash, address, blk_hash, tx, timestamp: uint64(tr.Timestamp),
sender, receipt, log, type, blk_number, timestamp, loaded, multiTransactionID: tr.MultiTransactionID,
multi_transaction_id, base_gas_fee, status, amount_padded128hex, token_address baseGasFees: "0x0",
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, 0, ?, 0, ?, ?, ?)`, receiptStatus: &receiptStatus,
tr.ChainID, tr.To, tr.BlkNumber, blkHash, txValue: big.NewInt(tr.Value),
tr.ChainID, tr.Hash, tr.To, blkHash, &JSONBlob{}, tr.From, &JSONBlob{}, &JSONBlob{}, tokenType, tr.BlkNumber, tr.Timestamp, tr.MultiTransactionID, tr.Success, value, tokenAddress.Hex()) txFrom: &tr.From,
txTo: &tr.To,
tokenAddress: &tokenAddress,
}
err = updateOrInsertTransfersDBFields(tx, []transferDBFields{transfer})
require.NoError(t, err) require.NoError(t, err)
} }