feat(Wallet) detect uniswapV2 swap events

Part of #10251
This commit is contained in:
Dario Gabriel Lipicar 2023-06-02 17:08:45 -03:00 committed by dlipicar
parent 7557f0c799
commit 2fc79fb9b5
19 changed files with 934 additions and 810 deletions

View File

@ -64,6 +64,7 @@
// 1683627613_accounts_and_keycards_improvements.up.sql (3.64kB)
// 1685041348_settings_table_add_latest_derived_path_column.up.sql (115B)
// 1685440989_update_color_id_accounts.up.sql (918B)
// 1685463947_add_to_asset_to_multitransaction.up.sql (61B)
// doc.go (74B)
package migrations
@ -74,7 +75,6 @@ import (
"crypto/sha256"
"fmt"
"io"
"io/ioutil"
"os"
"path/filepath"
"strings"
@ -84,7 +84,7 @@ import (
func bindataRead(data []byte, name string) ([]byte, error) {
gz, err := gzip.NewReader(bytes.NewBuffer(data))
if err != nil {
return nil, fmt.Errorf("read %q: %v", name, err)
return nil, fmt.Errorf("read %q: %w", name, err)
}
var buf bytes.Buffer
@ -92,7 +92,7 @@ func bindataRead(data []byte, name string) ([]byte, error) {
clErr := gz.Close()
if err != nil {
return nil, fmt.Errorf("read %q: %v", name, err)
return nil, fmt.Errorf("read %q: %w", name, err)
}
if clErr != nil {
return nil, err
@ -148,7 +148,7 @@ func _1640111208_dummyUpSql() (*asset, error) {
return nil, err
}
info := bindataFileInfo{name: "1640111208_dummy.up.sql", size: 258, mode: os.FileMode(0644), modTime: time.Unix(1685003021, 0)}
info := bindataFileInfo{name: "1640111208_dummy.up.sql", size: 258, mode: os.FileMode(0644), modTime: time.Unix(1684852796, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x3e, 0xf0, 0xae, 0x20, 0x6e, 0x75, 0xd1, 0x36, 0x14, 0xf2, 0x40, 0xe5, 0xd6, 0x7a, 0xc4, 0xa5, 0x72, 0xaa, 0xb5, 0x4d, 0x71, 0x97, 0xb8, 0xe8, 0x95, 0x22, 0x95, 0xa2, 0xac, 0xaf, 0x48, 0x58}}
return a, nil
}
@ -168,7 +168,7 @@ func _1642666031_add_removed_clock_to_bookmarksUpSql() (*asset, error) {
return nil, err
}
info := bindataFileInfo{name: "1642666031_add_removed_clock_to_bookmarks.up.sql", size: 117, mode: os.FileMode(0644), modTime: time.Unix(1685003021, 0)}
info := bindataFileInfo{name: "1642666031_add_removed_clock_to_bookmarks.up.sql", size: 117, mode: os.FileMode(0644), modTime: time.Unix(1684852796, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x84, 0x4e, 0x38, 0x99, 0x7a, 0xc, 0x90, 0x13, 0xec, 0xfe, 0x2f, 0x55, 0xff, 0xb7, 0xb6, 0xaa, 0x96, 0xc6, 0x92, 0x79, 0xcc, 0xee, 0x4e, 0x99, 0x53, 0xfe, 0x1c, 0xbb, 0x32, 0x2, 0xa4, 0x27}}
return a, nil
}
@ -188,7 +188,7 @@ func _1643644541_gif_api_key_settingUpSql() (*asset, error) {
return nil, err
}
info := bindataFileInfo{name: "1643644541_gif_api_key_setting.up.sql", size: 108, mode: os.FileMode(0644), modTime: time.Unix(1685003021, 0)}
info := bindataFileInfo{name: "1643644541_gif_api_key_setting.up.sql", size: 108, mode: os.FileMode(0644), modTime: time.Unix(1684852796, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x1b, 0x94, 0x28, 0xfb, 0x66, 0xd1, 0x7c, 0xb8, 0x89, 0xe2, 0xb4, 0x71, 0x65, 0x24, 0x57, 0x22, 0x95, 0x38, 0x97, 0x3, 0x9b, 0xc6, 0xa4, 0x41, 0x7b, 0xba, 0xf7, 0xdb, 0x70, 0xf7, 0x20, 0x3a}}
return a, nil
}
@ -208,7 +208,7 @@ func _1644188994_recent_stickersUpSql() (*asset, error) {
return nil, err
}
info := bindataFileInfo{name: "1644188994_recent_stickers.up.sql", size: 79, mode: os.FileMode(0644), modTime: time.Unix(1685003021, 0)}
info := bindataFileInfo{name: "1644188994_recent_stickers.up.sql", size: 79, mode: os.FileMode(0644), modTime: time.Unix(1684852796, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x1e, 0xad, 0xaa, 0x30, 0xbf, 0x4, 0x7, 0xf8, 0xc3, 0x3, 0xb8, 0x97, 0x23, 0x2b, 0xbd, 0x1c, 0x60, 0x69, 0xb0, 0x42, 0x5e, 0x6b, 0xd, 0xa7, 0xa3, 0x6b, 0x2e, 0xdc, 0x70, 0x13, 0x72, 0x7}}
return a, nil
}
@ -228,7 +228,7 @@ func _1646659233_add_address_to_dapp_permisssionUpSql() (*asset, error) {
return nil, err
}
info := bindataFileInfo{name: "1646659233_add_address_to_dapp_permisssion.up.sql", size: 700, mode: os.FileMode(0644), modTime: time.Unix(1685003021, 0)}
info := bindataFileInfo{name: "1646659233_add_address_to_dapp_permisssion.up.sql", size: 700, mode: os.FileMode(0644), modTime: time.Unix(1684852796, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xed, 0xb0, 0x35, 0xcc, 0x2e, 0x16, 0xe6, 0x15, 0x86, 0x2c, 0x37, 0x80, 0xae, 0xa3, 0xc5, 0x31, 0x78, 0x5, 0x9d, 0xcd, 0x7b, 0xeb, 0x5f, 0xf2, 0xb3, 0x74, 0x72, 0xdf, 0xcf, 0x88, 0xb, 0x40}}
return a, nil
}
@ -248,7 +248,7 @@ func _1646841105_add_emoji_accountUpSql() (*asset, error) {
return nil, err
}
info := bindataFileInfo{name: "1646841105_add_emoji_account.up.sql", size: 96, mode: os.FileMode(0644), modTime: time.Unix(1685003021, 0)}
info := bindataFileInfo{name: "1646841105_add_emoji_account.up.sql", size: 96, mode: os.FileMode(0644), modTime: time.Unix(1684852796, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xe6, 0x77, 0x29, 0x95, 0x18, 0x64, 0x82, 0x63, 0xe7, 0xaf, 0x6c, 0xa9, 0x15, 0x7d, 0x46, 0xa6, 0xbc, 0xdf, 0xa7, 0xd, 0x2b, 0xd2, 0x2d, 0x97, 0x4d, 0xa, 0x6b, 0xd, 0x6e, 0x90, 0x42, 0x5c}}
return a, nil
}
@ -268,7 +268,7 @@ func _1647278782_display_nameUpSql() (*asset, error) {
return nil, err
}
info := bindataFileInfo{name: "1647278782_display_name.up.sql", size: 110, mode: os.FileMode(0644), modTime: time.Unix(1685003021, 0)}
info := bindataFileInfo{name: "1647278782_display_name.up.sql", size: 110, mode: os.FileMode(0644), modTime: time.Unix(1684852796, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xf4, 0xa1, 0x1f, 0x3e, 0x61, 0x65, 0x8d, 0xff, 0xee, 0xde, 0xc5, 0x91, 0xd9, 0x5c, 0xb5, 0xe2, 0xf0, 0xb7, 0xe7, 0x5c, 0x5c, 0x16, 0x25, 0x89, 0xee, 0x78, 0x12, 0xea, 0x3e, 0x48, 0x41, 0xa6}}
return a, nil
}
@ -288,7 +288,7 @@ func _1647862838_reset_last_backupUpSql() (*asset, error) {
return nil, err
}
info := bindataFileInfo{name: "1647862838_reset_last_backup.up.sql", size: 37, mode: os.FileMode(0644), modTime: time.Unix(1685003021, 0)}
info := bindataFileInfo{name: "1647862838_reset_last_backup.up.sql", size: 37, mode: os.FileMode(0644), modTime: time.Unix(1684852796, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x21, 0xe3, 0xd5, 0xf6, 0x5f, 0xfe, 0x65, 0xfa, 0x1d, 0x88, 0xf8, 0x5f, 0x24, 0x71, 0x34, 0x68, 0x96, 0x2a, 0x60, 0x87, 0x15, 0x82, 0x4d, 0x8a, 0x59, 0x3d, 0x1f, 0xd8, 0x56, 0xd4, 0xfb, 0xda}}
return a, nil
}
@ -308,7 +308,7 @@ func _1647871652_add_settings_sync_clock_tableUpSql() (*asset, error) {
return nil, err
}
info := bindataFileInfo{name: "1647871652_add_settings_sync_clock_table.up.sql", size: 1044, mode: os.FileMode(0644), modTime: time.Unix(1685003021, 0)}
info := bindataFileInfo{name: "1647871652_add_settings_sync_clock_table.up.sql", size: 1044, mode: os.FileMode(0644), modTime: time.Unix(1684852796, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xd8, 0x58, 0xec, 0x85, 0x90, 0xfa, 0x30, 0x98, 0x98, 0x9a, 0xa6, 0xa8, 0x96, 0x2b, 0x38, 0x93, 0xf3, 0xae, 0x46, 0x74, 0xa4, 0x41, 0x62, 0x9b, 0x2, 0x86, 0xbf, 0xe5, 0x2a, 0xce, 0xe2, 0xc0}}
return a, nil
}
@ -328,7 +328,7 @@ func _1647880168_add_torrent_configUpSql() (*asset, error) {
return nil, err
}
info := bindataFileInfo{name: "1647880168_add_torrent_config.up.sql", size: 211, mode: os.FileMode(0644), modTime: time.Unix(1685003021, 0)}
info := bindataFileInfo{name: "1647880168_add_torrent_config.up.sql", size: 211, mode: os.FileMode(0644), modTime: time.Unix(1684852796, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x1, 0x92, 0x22, 0x37, 0x96, 0xf3, 0xb5, 0x5b, 0x27, 0xd0, 0x7d, 0x43, 0x5, 0x4e, 0x9d, 0xe2, 0x49, 0xbe, 0x86, 0x31, 0xa1, 0x89, 0xff, 0xd6, 0x51, 0xe0, 0x9c, 0xb, 0xda, 0xfc, 0xf2, 0x93}}
return a, nil
}
@ -348,7 +348,7 @@ func _1647882837_add_communities_settings_tableUpSql() (*asset, error) {
return nil, err
}
info := bindataFileInfo{name: "1647882837_add_communities_settings_table.up.sql", size: 206, mode: os.FileMode(0644), modTime: time.Unix(1685003021, 0)}
info := bindataFileInfo{name: "1647882837_add_communities_settings_table.up.sql", size: 206, mode: os.FileMode(0644), modTime: time.Unix(1684852796, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xbd, 0x87, 0x78, 0x99, 0xd9, 0x5d, 0xbd, 0xf7, 0x57, 0x9c, 0xca, 0x97, 0xbd, 0xb3, 0xe9, 0xb5, 0x89, 0x31, 0x3f, 0xf6, 0x5c, 0x13, 0xb, 0xc3, 0x54, 0x93, 0x18, 0x40, 0x7, 0x82, 0xfe, 0x7e}}
return a, nil
}
@ -368,7 +368,7 @@ func _1647956635_add_waku_messages_tableUpSql() (*asset, error) {
return nil, err
}
info := bindataFileInfo{name: "1647956635_add_waku_messages_table.up.sql", size: 266, mode: os.FileMode(0644), modTime: time.Unix(1685003021, 0)}
info := bindataFileInfo{name: "1647956635_add_waku_messages_table.up.sql", size: 266, mode: os.FileMode(0644), modTime: time.Unix(1684852796, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xd1, 0xe, 0xe1, 0xdc, 0xda, 0x2e, 0x89, 0x8d, 0xdc, 0x2a, 0x1c, 0x13, 0xa1, 0xfc, 0xfe, 0xf, 0xb2, 0xb9, 0x85, 0xc8, 0x45, 0xd6, 0xd1, 0x7, 0x5c, 0xa3, 0x8, 0x47, 0x44, 0x6d, 0x96, 0xe0}}
return a, nil
}
@ -388,7 +388,7 @@ func _1648554928_network_testUpSql() (*asset, error) {
return nil, err
}
info := bindataFileInfo{name: "1648554928_network_test.up.sql", size: 132, mode: os.FileMode(0644), modTime: time.Unix(1685003021, 0)}
info := bindataFileInfo{name: "1648554928_network_test.up.sql", size: 132, mode: os.FileMode(0644), modTime: time.Unix(1684852796, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x9a, 0xc5, 0x7f, 0x87, 0xf3, 0x2c, 0xf7, 0xbb, 0xd3, 0x3a, 0x4e, 0x76, 0x88, 0xca, 0xaf, 0x73, 0xce, 0x8f, 0xa1, 0xf6, 0x3d, 0x4d, 0xed, 0x6f, 0x49, 0xf2, 0xfe, 0x56, 0x2a, 0x60, 0x68, 0xca}}
return a, nil
}
@ -408,7 +408,7 @@ func _1649174829_add_visitble_tokenUpSql() (*asset, error) {
return nil, err
}
info := bindataFileInfo{name: "1649174829_add_visitble_token.up.sql", size: 84, mode: os.FileMode(0644), modTime: time.Unix(1685003021, 0)}
info := bindataFileInfo{name: "1649174829_add_visitble_token.up.sql", size: 84, mode: os.FileMode(0644), modTime: time.Unix(1684852796, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xa3, 0x22, 0xc0, 0x2b, 0x3f, 0x4f, 0x3d, 0x5e, 0x4c, 0x68, 0x7c, 0xd0, 0x15, 0x36, 0x9f, 0xec, 0xa1, 0x2a, 0x7b, 0xb4, 0xe3, 0xc6, 0xc9, 0xb4, 0x81, 0x50, 0x4a, 0x11, 0x3b, 0x35, 0x7, 0xcf}}
return a, nil
}
@ -428,7 +428,7 @@ func _1649882262_add_derived_from_accountsUpSql() (*asset, error) {
return nil, err
}
info := bindataFileInfo{name: "1649882262_add_derived_from_accounts.up.sql", size: 110, mode: os.FileMode(0644), modTime: time.Unix(1685003021, 0)}
info := bindataFileInfo{name: "1649882262_add_derived_from_accounts.up.sql", size: 110, mode: os.FileMode(0644), modTime: time.Unix(1684852796, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x11, 0xb9, 0x44, 0x4d, 0x85, 0x8d, 0x7f, 0xb4, 0xae, 0x4f, 0x5c, 0x66, 0x64, 0xb6, 0xe2, 0xe, 0x3d, 0xad, 0x9d, 0x8, 0x4f, 0xab, 0x6e, 0xa8, 0x7d, 0x76, 0x3, 0xad, 0x96, 0x1, 0xee, 0x5c}}
return a, nil
}
@ -448,7 +448,7 @@ func _1650612625_add_community_message_archive_hashes_tableUpSql() (*asset, erro
return nil, err
}
info := bindataFileInfo{name: "1650612625_add_community_message_archive_hashes_table.up.sql", size: 130, mode: os.FileMode(0644), modTime: time.Unix(1685003021, 0)}
info := bindataFileInfo{name: "1650612625_add_community_message_archive_hashes_table.up.sql", size: 130, mode: os.FileMode(0644), modTime: time.Unix(1684852796, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x48, 0x31, 0xb3, 0x75, 0x23, 0xe2, 0x45, 0xe, 0x47, 0x1b, 0x35, 0xa5, 0x6e, 0x83, 0x4e, 0x64, 0x7d, 0xd7, 0xa2, 0xda, 0xe9, 0x53, 0xf1, 0x16, 0x86, 0x2c, 0x57, 0xad, 0xfa, 0xca, 0x39, 0xde}}
return a, nil
}
@ -468,7 +468,7 @@ func _1650616788_add_communities_archives_info_tableUpSql() (*asset, error) {
return nil, err
}
info := bindataFileInfo{name: "1650616788_add_communities_archives_info_table.up.sql", size: 208, mode: os.FileMode(0644), modTime: time.Unix(1685003021, 0)}
info := bindataFileInfo{name: "1650616788_add_communities_archives_info_table.up.sql", size: 208, mode: os.FileMode(0644), modTime: time.Unix(1684852796, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xd1, 0x4f, 0x80, 0x45, 0xb9, 0xd9, 0x15, 0xe2, 0x78, 0xd0, 0xcb, 0x71, 0xc1, 0x1b, 0xb7, 0x1b, 0x1b, 0x97, 0xfe, 0x47, 0x53, 0x3c, 0x62, 0xbc, 0xdd, 0x3a, 0x94, 0x1a, 0xc, 0x48, 0x76, 0xe}}
return a, nil
}
@ -488,7 +488,7 @@ func _1652715604_add_clock_accountsUpSql() (*asset, error) {
return nil, err
}
info := bindataFileInfo{name: "1652715604_add_clock_accounts.up.sql", size: 62, mode: os.FileMode(0644), modTime: time.Unix(1685003021, 0)}
info := bindataFileInfo{name: "1652715604_add_clock_accounts.up.sql", size: 62, mode: os.FileMode(0644), modTime: time.Unix(1684852796, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xb6, 0xd9, 0x8d, 0x73, 0xc9, 0xef, 0xfa, 0xb1, 0x4b, 0xa5, 0xf3, 0x5, 0x19, 0x26, 0x46, 0xf8, 0x47, 0x93, 0xdb, 0xac, 0x2, 0xef, 0xf9, 0x71, 0x56, 0x83, 0xe6, 0x2d, 0xb0, 0xd7, 0x83, 0x5c}}
return a, nil
}
@ -508,7 +508,7 @@ func _1653037334_add_notifications_settings_tableUpSql() (*asset, error) {
return nil, err
}
info := bindataFileInfo{name: "1653037334_add_notifications_settings_table.up.sql", size: 1276, mode: os.FileMode(0644), modTime: time.Unix(1685003021, 0)}
info := bindataFileInfo{name: "1653037334_add_notifications_settings_table.up.sql", size: 1276, mode: os.FileMode(0644), modTime: time.Unix(1684852796, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x4b, 0xc4, 0x65, 0xac, 0xa, 0xf2, 0xef, 0xb6, 0x39, 0x3c, 0xc5, 0xb1, 0xb2, 0x9c, 0x86, 0x58, 0xe0, 0x38, 0xcb, 0x57, 0x3c, 0x76, 0x73, 0x87, 0x79, 0x4e, 0xf6, 0xed, 0xb0, 0x8e, 0x9e, 0xa}}
return a, nil
}
@ -528,7 +528,7 @@ func _1654702119_add_mutual_contact_settingsUpSql() (*asset, error) {
return nil, err
}
info := bindataFileInfo{name: "1654702119_add_mutual_contact_settings.up.sql", size: 78, mode: os.FileMode(0644), modTime: time.Unix(1685003021, 0)}
info := bindataFileInfo{name: "1654702119_add_mutual_contact_settings.up.sql", size: 78, mode: os.FileMode(0644), modTime: time.Unix(1684852796, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x26, 0x66, 0x67, 0x50, 0xfe, 0xd7, 0xe3, 0x29, 0x8b, 0xff, 0x9d, 0x5a, 0x87, 0xa7, 0x99, 0x6e, 0xd6, 0xcd, 0x2e, 0xbb, 0x17, 0xdf, 0x7f, 0xf7, 0xa3, 0xfa, 0x32, 0x7c, 0x2d, 0x92, 0xc8, 0x74}}
return a, nil
}
@ -548,7 +548,7 @@ func _1655375270_add_clock_field_to_communities_settings_tableUpSql() (*asset, e
return nil, err
}
info := bindataFileInfo{name: "1655375270_add_clock_field_to_communities_settings_table.up.sql", size: 74, mode: os.FileMode(0644), modTime: time.Unix(1685003021, 0)}
info := bindataFileInfo{name: "1655375270_add_clock_field_to_communities_settings_table.up.sql", size: 74, mode: os.FileMode(0644), modTime: time.Unix(1684852796, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x19, 0xc5, 0xc0, 0xf9, 0x84, 0x53, 0xdf, 0x83, 0xcf, 0xb6, 0x40, 0x6d, 0xf5, 0xdc, 0x77, 0x37, 0xb7, 0xe3, 0xa, 0x75, 0xe7, 0x6, 0x11, 0xca, 0x2b, 0x51, 0x92, 0xdd, 0x7d, 0xdb, 0xc3, 0xf5}}
return a, nil
}
@ -568,7 +568,7 @@ func _1655385721_drop_networks_configUpSql() (*asset, error) {
return nil, err
}
info := bindataFileInfo{name: "1655385721_drop_networks_config.up.sql", size: 27, mode: os.FileMode(0644), modTime: time.Unix(1685003021, 0)}
info := bindataFileInfo{name: "1655385721_drop_networks_config.up.sql", size: 27, mode: os.FileMode(0644), modTime: time.Unix(1684852796, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xfc, 0xa7, 0x20, 0xbb, 0x67, 0x21, 0xe, 0xc6, 0xc8, 0x21, 0x74, 0xe0, 0xce, 0xc8, 0xe2, 0x2, 0xb4, 0xea, 0xf0, 0xe5, 0xc4, 0x4d, 0xdd, 0xd4, 0x52, 0x31, 0xa9, 0x3d, 0xcd, 0xd8, 0x9b, 0xab}}
return a, nil
}
@ -588,7 +588,7 @@ func _1655385724_networks_chaincolor_shortnameUpSql() (*asset, error) {
return nil, err
}
info := bindataFileInfo{name: "1655385724_networks_chainColor_shortName.up.sql", size: 220, mode: os.FileMode(0644), modTime: time.Unix(1685003021, 0)}
info := bindataFileInfo{name: "1655385724_networks_chainColor_shortName.up.sql", size: 220, mode: os.FileMode(0644), modTime: time.Unix(1684852796, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xd9, 0xe7, 0x84, 0xbb, 0x5f, 0xd2, 0x2c, 0x42, 0x88, 0x62, 0x52, 0xb6, 0x58, 0x31, 0xac, 0xc, 0x96, 0x2b, 0x1b, 0xe5, 0x4e, 0x9a, 0x3a, 0xf6, 0xf6, 0xfc, 0xa9, 0x1a, 0x35, 0x62, 0x28, 0x88}}
return a, nil
}
@ -608,7 +608,7 @@ func _1655456688_add_deleted_at_field_to_bookmarks_tableUpSql() (*asset, error)
return nil, err
}
info := bindataFileInfo{name: "1655456688_add_deleted_at_field_to_bookmarks_table.up.sql", size: 69, mode: os.FileMode(0644), modTime: time.Unix(1685003021, 0)}
info := bindataFileInfo{name: "1655456688_add_deleted_at_field_to_bookmarks_table.up.sql", size: 69, mode: os.FileMode(0644), modTime: time.Unix(1684852796, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xe7, 0x9a, 0xbd, 0x9a, 0xc9, 0xf, 0xdf, 0x90, 0x0, 0x5d, 0xea, 0x6e, 0x7d, 0x51, 0x95, 0xcd, 0x90, 0xd3, 0x1a, 0x36, 0x6c, 0xf4, 0xbd, 0xa7, 0x6b, 0xbf, 0xe5, 0xdb, 0xa3, 0x88, 0xe3, 0x50}}
return a, nil
}
@ -628,7 +628,7 @@ func _1655462032_create_bookmarks_deleted_at_indexUpSql() (*asset, error) {
return nil, err
}
info := bindataFileInfo{name: "1655462032_create_bookmarks_deleted_at_index.up.sql", size: 81, mode: os.FileMode(0644), modTime: time.Unix(1685003021, 0)}
info := bindataFileInfo{name: "1655462032_create_bookmarks_deleted_at_index.up.sql", size: 81, mode: os.FileMode(0644), modTime: time.Unix(1684852796, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xf, 0x8e, 0x20, 0x6b, 0x14, 0x9e, 0xcd, 0x97, 0xd3, 0xfe, 0x62, 0x3, 0x26, 0x59, 0x1, 0x6c, 0x99, 0xef, 0x6d, 0x21, 0xd4, 0xb5, 0xa3, 0xf4, 0x39, 0x40, 0x54, 0x6, 0xd, 0x60, 0x13, 0x38}}
return a, nil
}
@ -648,7 +648,7 @@ func _1657617291_add_multi_transactions_tableUpSql() (*asset, error) {
return nil, err
}
info := bindataFileInfo{name: "1657617291_add_multi_transactions_table.up.sql", size: 412, mode: os.FileMode(0644), modTime: time.Unix(1685003021, 0)}
info := bindataFileInfo{name: "1657617291_add_multi_transactions_table.up.sql", size: 412, mode: os.FileMode(0644), modTime: time.Unix(1684852796, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x86, 0xb0, 0x4e, 0x8c, 0x4, 0x82, 0xb4, 0x43, 0xaa, 0xd0, 0x16, 0xdd, 0xcb, 0x88, 0x81, 0xac, 0x4, 0x34, 0x1a, 0x8f, 0x2e, 0xc5, 0x69, 0xb, 0xf0, 0x17, 0xf7, 0xe3, 0x9, 0xe, 0x54, 0xe0}}
return a, nil
}
@ -668,7 +668,7 @@ func _1660134042_add_social_links_settings_tableUpSql() (*asset, error) {
return nil, err
}
info := bindataFileInfo{name: "1660134042_add_social_links_settings_table.up.sql", size: 334, mode: os.FileMode(0644), modTime: time.Unix(1685003021, 0)}
info := bindataFileInfo{name: "1660134042_add_social_links_settings_table.up.sql", size: 334, mode: os.FileMode(0644), modTime: time.Unix(1684852796, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x84, 0x73, 0xb6, 0xe7, 0x3f, 0xaa, 0x39, 0x9a, 0x56, 0x56, 0x31, 0xf1, 0x8e, 0x26, 0x23, 0x1, 0xe4, 0xfa, 0x98, 0xfe, 0x78, 0x87, 0x20, 0xcb, 0x52, 0xf4, 0x38, 0x7f, 0xc4, 0x1c, 0x4, 0x22}}
return a, nil
}
@ -688,7 +688,7 @@ func _1660134060_settings_bioUpSql() (*asset, error) {
return nil, err
}
info := bindataFileInfo{name: "1660134060_settings_bio.up.sql", size: 91, mode: os.FileMode(0644), modTime: time.Unix(1685003021, 0)}
info := bindataFileInfo{name: "1660134060_settings_bio.up.sql", size: 91, mode: os.FileMode(0644), modTime: time.Unix(1684852796, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x46, 0x25, 0xa0, 0xa6, 0x47, 0xff, 0xbc, 0x2a, 0x0, 0xff, 0x59, 0x4b, 0xb0, 0xc9, 0x4e, 0x15, 0xe4, 0xd9, 0xda, 0xeb, 0xfe, 0x55, 0x98, 0xc3, 0x9d, 0x96, 0xe7, 0xf, 0xd1, 0x5c, 0x93, 0x73}}
return a, nil
}
@ -708,7 +708,7 @@ func _1660134070_add_wakuv2_storeUpSql() (*asset, error) {
return nil, err
}
info := bindataFileInfo{name: "1660134070_add_wakuv2_store.up.sql", size: 269, mode: os.FileMode(0644), modTime: time.Unix(1685003021, 0)}
info := bindataFileInfo{name: "1660134070_add_wakuv2_store.up.sql", size: 269, mode: os.FileMode(0644), modTime: time.Unix(1684852796, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x1d, 0xe6, 0xc3, 0x9, 0xef, 0xdc, 0xae, 0x49, 0x30, 0x78, 0x54, 0xd6, 0xdb, 0xbf, 0xc0, 0x8e, 0x25, 0x8f, 0xfc, 0x67, 0x80, 0x39, 0x37, 0xd4, 0x86, 0xc1, 0x85, 0xc8, 0x99, 0xc4, 0x59, 0xd4}}
return a, nil
}
@ -728,7 +728,7 @@ func _1660134072_waku2_store_messagesUpSql() (*asset, error) {
return nil, err
}
info := bindataFileInfo{name: "1660134072_waku2_store_messages.up.sql", size: 497, mode: os.FileMode(0644), modTime: time.Unix(1685003021, 0)}
info := bindataFileInfo{name: "1660134072_waku2_store_messages.up.sql", size: 497, mode: os.FileMode(0644), modTime: time.Unix(1684852796, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x3e, 0xeb, 0xb4, 0xa0, 0xa1, 0x2b, 0xcb, 0x4c, 0x3c, 0xc6, 0xd0, 0xe8, 0x96, 0xe3, 0x96, 0xf1, 0x4f, 0x1f, 0xe0, 0xe7, 0x1f, 0x85, 0xa3, 0xe, 0xf7, 0x52, 0x56, 0x63, 0x2b, 0xb0, 0x87, 0x7b}}
return a, nil
}
@ -748,7 +748,7 @@ func _1662365868_add_key_uid_accountsUpSql() (*asset, error) {
return nil, err
}
info := bindataFileInfo{name: "1662365868_add_key_uid_accounts.up.sql", size: 68, mode: os.FileMode(0644), modTime: time.Unix(1685003021, 0)}
info := bindataFileInfo{name: "1662365868_add_key_uid_accounts.up.sql", size: 68, mode: os.FileMode(0644), modTime: time.Unix(1684852796, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xc6, 0xd8, 0x2f, 0x2f, 0x3b, 0xa8, 0xbd, 0x6d, 0xf6, 0x87, 0x7e, 0xd2, 0xf1, 0xa2, 0xf7, 0x81, 0x6a, 0x23, 0x10, 0xbc, 0xbf, 0x5b, 0xe7, 0x2b, 0x9c, 0xa9, 0x8a, 0x18, 0xbb, 0xd0, 0x86, 0x91}}
return a, nil
}
@ -768,7 +768,7 @@ func _1662447680_add_keypairs_tableUpSql() (*asset, error) {
return nil, err
}
info := bindataFileInfo{name: "1662447680_add_keypairs_table.up.sql", size: 218, mode: os.FileMode(0644), modTime: time.Unix(1685003021, 0)}
info := bindataFileInfo{name: "1662447680_add_keypairs_table.up.sql", size: 218, mode: os.FileMode(0644), modTime: time.Unix(1684852796, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xdc, 0x25, 0xa9, 0xc7, 0x63, 0x27, 0x97, 0x35, 0x5f, 0x6b, 0xab, 0x26, 0xcb, 0xf9, 0xbd, 0x5e, 0xac, 0x3, 0xa0, 0x5e, 0xb9, 0x71, 0xa3, 0x1f, 0xb3, 0x4f, 0x7f, 0x79, 0x28, 0x48, 0xbe, 0xc}}
return a, nil
}
@ -788,7 +788,7 @@ func _1662460056_move_favourites_to_saved_addressesUpSql() (*asset, error) {
return nil, err
}
info := bindataFileInfo{name: "1662460056_move_favourites_to_saved_addresses.up.sql", size: 233, mode: os.FileMode(0644), modTime: time.Unix(1685003021, 0)}
info := bindataFileInfo{name: "1662460056_move_favourites_to_saved_addresses.up.sql", size: 233, mode: os.FileMode(0644), modTime: time.Unix(1684852796, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x10, 0xa2, 0x8c, 0xa3, 0xec, 0xad, 0xdf, 0xc3, 0x48, 0x5, 0x9b, 0x50, 0x25, 0x59, 0xae, 0x7d, 0xee, 0x58, 0xd2, 0x41, 0x27, 0xf2, 0x22, 0x2e, 0x9a, 0xb9, 0x4a, 0xcc, 0x38, 0x6e, 0x3a, 0xb2}}
return a, nil
}
@ -808,7 +808,7 @@ func _1662738097_add_base_fee_transactionUpSql() (*asset, error) {
return nil, err
}
info := bindataFileInfo{name: "1662738097_add_base_fee_transaction.up.sql", size: 112, mode: os.FileMode(0644), modTime: time.Unix(1685003021, 0)}
info := bindataFileInfo{name: "1662738097_add_base_fee_transaction.up.sql", size: 112, mode: os.FileMode(0644), modTime: time.Unix(1684852796, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x6b, 0xfb, 0x10, 0xae, 0xfc, 0x77, 0x70, 0x98, 0x6f, 0xec, 0xaa, 0xcd, 0x7, 0xc7, 0x74, 0x23, 0xc, 0xd5, 0x1e, 0x82, 0xdd, 0xfe, 0xff, 0x3b, 0xd2, 0x49, 0x10, 0x5b, 0x30, 0xc, 0x2d, 0xb0}}
return a, nil
}
@ -828,7 +828,7 @@ func _1662972194_add_keypairs_tableUpSql() (*asset, error) {
return nil, err
}
info := bindataFileInfo{name: "1662972194_add_keypairs_table.up.sql", size: 345, mode: os.FileMode(0644), modTime: time.Unix(1685003021, 0)}
info := bindataFileInfo{name: "1662972194_add_keypairs_table.up.sql", size: 345, mode: os.FileMode(0644), modTime: time.Unix(1684852796, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xab, 0x76, 0xf2, 0x86, 0xe1, 0x7e, 0xe9, 0x47, 0x32, 0x48, 0xd5, 0x6b, 0xe5, 0xd, 0xab, 0xb7, 0xf1, 0xd4, 0xf1, 0xad, 0x38, 0xa6, 0x11, 0xe7, 0xce, 0x5c, 0x11, 0x11, 0xf, 0x47, 0xb2, 0x4}}
return a, nil
}
@ -848,7 +848,7 @@ func _1664392661_add_third_party_id_to_waku_messagesUpSql() (*asset, error) {
return nil, err
}
info := bindataFileInfo{name: "1664392661_add_third_party_id_to_waku_messages.up.sql", size: 70, mode: os.FileMode(0644), modTime: time.Unix(1685003021, 0)}
info := bindataFileInfo{name: "1664392661_add_third_party_id_to_waku_messages.up.sql", size: 70, mode: os.FileMode(0644), modTime: time.Unix(1684852796, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xfd, 0x67, 0x66, 0x9e, 0x66, 0x74, 0xce, 0x1c, 0xb, 0x1b, 0x9d, 0xd5, 0xfc, 0x65, 0xe, 0x83, 0x90, 0x4c, 0x61, 0x4e, 0x6b, 0xe7, 0x86, 0xbe, 0x36, 0x4f, 0x91, 0x36, 0x4, 0x47, 0x7b, 0x82}}
return a, nil
}
@ -868,7 +868,7 @@ func _1664783660_add_sync_info_to_saved_addressesUpSql() (*asset, error) {
return nil, err
}
info := bindataFileInfo{name: "1664783660_add_sync_info_to_saved_addresses.up.sql", size: 388, mode: os.FileMode(0644), modTime: time.Unix(1685003021, 0)}
info := bindataFileInfo{name: "1664783660_add_sync_info_to_saved_addresses.up.sql", size: 388, mode: os.FileMode(0644), modTime: time.Unix(1684852796, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x67, 0x7c, 0x3a, 0x95, 0x4e, 0x55, 0xb2, 0xbd, 0xb4, 0x18, 0x93, 0xc1, 0xcf, 0x9f, 0x12, 0xbb, 0x49, 0x8a, 0x2a, 0x6a, 0x2a, 0x7f, 0xad, 0x44, 0xc3, 0xf, 0x3a, 0x79, 0x18, 0xb9, 0x4c, 0x64}}
return a, nil
}
@ -888,7 +888,7 @@ func _1668109917_wakunodesUpSql() (*asset, error) {
return nil, err
}
info := bindataFileInfo{name: "1668109917_wakunodes.up.sql", size: 99, mode: os.FileMode(0644), modTime: time.Unix(1685003021, 0)}
info := bindataFileInfo{name: "1668109917_wakunodes.up.sql", size: 99, mode: os.FileMode(0644), modTime: time.Unix(1684852796, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x29, 0xaa, 0x9e, 0x2, 0x66, 0x85, 0x69, 0xa8, 0xd9, 0xe2, 0x4b, 0x8d, 0x2a, 0x9c, 0xdf, 0xd2, 0xef, 0x64, 0x58, 0xe3, 0xa6, 0xe7, 0xc1, 0xd1, 0xc8, 0x9c, 0xc0, 0x2c, 0x1, 0xa8, 0x7b, 0x81}}
return a, nil
}
@ -908,7 +908,7 @@ func _1670249678_display_name_to_settings_sync_clock_tableUpSql() (*asset, error
return nil, err
}
info := bindataFileInfo{name: "1670249678_display_name_to_settings_sync_clock_table.up.sql", size: 83, mode: os.FileMode(0644), modTime: time.Unix(1685003021, 0)}
info := bindataFileInfo{name: "1670249678_display_name_to_settings_sync_clock_table.up.sql", size: 83, mode: os.FileMode(0644), modTime: time.Unix(1684852796, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x39, 0x18, 0xdc, 0xc4, 0x1f, 0x79, 0x22, 0x16, 0x4d, 0xdf, 0x6c, 0x66, 0xd5, 0xa4, 0x88, 0x5d, 0x5, 0x37, 0xa7, 0x41, 0x5, 0x50, 0xae, 0x12, 0xfa, 0x7e, 0x89, 0x24, 0x5c, 0xae, 0x30, 0xfc}}
return a, nil
}
@ -928,7 +928,7 @@ func _1670836810_add_imported_flag_to_community_archive_hashesUpSql() (*asset, e
return nil, err
}
info := bindataFileInfo{name: "1670836810_add_imported_flag_to_community_archive_hashes.up.sql", size: 144, mode: os.FileMode(0644), modTime: time.Unix(1685003021, 0)}
info := bindataFileInfo{name: "1670836810_add_imported_flag_to_community_archive_hashes.up.sql", size: 144, mode: os.FileMode(0644), modTime: time.Unix(1684852796, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x6f, 0xf, 0xf0, 0xbd, 0xfe, 0x63, 0x25, 0x8f, 0x5e, 0x46, 0x4b, 0x45, 0x31, 0x8b, 0x3e, 0xd8, 0x6b, 0x5d, 0x9d, 0x6d, 0x10, 0x9a, 0x87, 0x4b, 0x18, 0xc6, 0x39, 0x81, 0x6e, 0xe4, 0x75, 0xfb}}
return a, nil
}
@ -948,7 +948,7 @@ func _1671438731_add_magnetlink_uri_to_communities_archive_infoUpSql() (*asset,
return nil, err
}
info := bindataFileInfo{name: "1671438731_add_magnetlink_uri_to_communities_archive_info.up.sql", size: 86, mode: os.FileMode(0644), modTime: time.Unix(1685003021, 0)}
info := bindataFileInfo{name: "1671438731_add_magnetlink_uri_to_communities_archive_info.up.sql", size: 86, mode: os.FileMode(0644), modTime: time.Unix(1684852796, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xda, 0x8b, 0x4b, 0xd6, 0xd8, 0xe2, 0x3d, 0xf7, 0x6b, 0xcd, 0x1e, 0x70, 0x9, 0x2e, 0x35, 0x4, 0x61, 0xc3, 0xb5, 0x9d, 0xc5, 0x27, 0x21, 0xa, 0x5a, 0xd6, 0x3e, 0xa6, 0x24, 0xa2, 0x12, 0xdf}}
return a, nil
}
@ -968,7 +968,7 @@ func _1672933930_switcher_cardUpSql() (*asset, error) {
return nil, err
}
info := bindataFileInfo{name: "1672933930_switcher_card.up.sql", size: 162, mode: os.FileMode(0644), modTime: time.Unix(1685003021, 0)}
info := bindataFileInfo{name: "1672933930_switcher_card.up.sql", size: 162, mode: os.FileMode(0644), modTime: time.Unix(1684852796, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x39, 0xba, 0xdc, 0xbb, 0x40, 0x4, 0xf2, 0x10, 0xdf, 0xb4, 0xd2, 0x80, 0x8a, 0x74, 0x4d, 0xf6, 0xbc, 0x50, 0x7, 0xd, 0x22, 0x7f, 0xc4, 0xaf, 0xaa, 0xde, 0xdc, 0x71, 0xe9, 0x42, 0x98, 0x36}}
return a, nil
}
@ -988,7 +988,7 @@ func _1674056187_add_price_cacheUpSql() (*asset, error) {
return nil, err
}
info := bindataFileInfo{name: "1674056187_add_price_cache.up.sql", size: 255, mode: os.FileMode(0644), modTime: time.Unix(1685003021, 0)}
info := bindataFileInfo{name: "1674056187_add_price_cache.up.sql", size: 255, mode: os.FileMode(0644), modTime: time.Unix(1684852796, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xb7, 0x79, 0x6a, 0x9b, 0x28, 0xd1, 0x22, 0xf0, 0x84, 0x76, 0x40, 0x39, 0x49, 0x15, 0x5d, 0xaa, 0xfd, 0x11, 0xff, 0x13, 0x27, 0x42, 0x12, 0xfa, 0x82, 0xe6, 0x7a, 0xf0, 0x5e, 0x1f, 0xe3, 0xba}}
return a, nil
}
@ -1008,7 +1008,7 @@ func _1674136690_ens_usernamesUpSql() (*asset, error) {
return nil, err
}
info := bindataFileInfo{name: "1674136690_ens_usernames.up.sql", size: 98, mode: os.FileMode(0644), modTime: time.Unix(1685003021, 0)}
info := bindataFileInfo{name: "1674136690_ens_usernames.up.sql", size: 98, mode: os.FileMode(0644), modTime: time.Unix(1684852796, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x81, 0x7a, 0xf3, 0xa8, 0x88, 0x99, 0xd6, 0x9c, 0x69, 0x48, 0x3c, 0x10, 0xda, 0x72, 0xdc, 0x14, 0xd, 0x6e, 0x8c, 0x82, 0x92, 0x2d, 0x2c, 0xee, 0x4c, 0x70, 0xa4, 0xdc, 0x5c, 0x5, 0x2, 0xc3}}
return a, nil
}
@ -1028,7 +1028,7 @@ func _1674232431_add_balance_historyUpSql() (*asset, error) {
return nil, err
}
info := bindataFileInfo{name: "1674232431_add_balance_history.up.sql", size: 698, mode: os.FileMode(0644), modTime: time.Unix(1685003021, 0)}
info := bindataFileInfo{name: "1674232431_add_balance_history.up.sql", size: 698, mode: os.FileMode(0644), modTime: time.Unix(1684852796, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xf7, 0xb5, 0x18, 0xca, 0x4a, 0x93, 0xbb, 0x6f, 0xa4, 0xee, 0xe4, 0x3e, 0xff, 0x6a, 0x4b, 0xe2, 0xe1, 0x61, 0x28, 0xee, 0xc5, 0x26, 0x57, 0x61, 0x5e, 0x6d, 0x44, 0x1e, 0x85, 0x43, 0x70, 0xa2}}
return a, nil
}
@ -1048,7 +1048,7 @@ func _1676368933_keypairs_to_keycardsUpSql() (*asset, error) {
return nil, err
}
info := bindataFileInfo{name: "1676368933_keypairs_to_keycards.up.sql", size: 639, mode: os.FileMode(0644), modTime: time.Unix(1685003021, 0)}
info := bindataFileInfo{name: "1676368933_keypairs_to_keycards.up.sql", size: 639, mode: os.FileMode(0644), modTime: time.Unix(1684852796, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x81, 0x93, 0x27, 0x2, 0xf0, 0x37, 0x81, 0x65, 0xa4, 0xb3, 0x5b, 0x60, 0x36, 0x95, 0xfc, 0x81, 0xf0, 0x3b, 0x7c, 0xc3, 0x2c, 0x85, 0xbd, 0x38, 0x46, 0xa4, 0x95, 0x4a, 0x6, 0x3e, 0x74, 0xd5}}
return a, nil
}
@ -1068,7 +1068,7 @@ func _1676951398_add_currency_format_cacheUpSql() (*asset, error) {
return nil, err
}
info := bindataFileInfo{name: "1676951398_add_currency_format_cache.up.sql", size: 291, mode: os.FileMode(0644), modTime: time.Unix(1685003021, 0)}
info := bindataFileInfo{name: "1676951398_add_currency_format_cache.up.sql", size: 291, mode: os.FileMode(0644), modTime: time.Unix(1684852796, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xf9, 0xa3, 0x76, 0x35, 0xca, 0xf, 0xe8, 0xdf, 0xd9, 0x61, 0xf9, 0xed, 0xfc, 0x6d, 0xf5, 0xe, 0x11, 0x88, 0xbd, 0x14, 0x92, 0xc6, 0x57, 0x53, 0xe, 0xcd, 0x52, 0xf4, 0xa9, 0xb1, 0xdd, 0xfd}}
return a, nil
}
@ -1088,7 +1088,7 @@ func _1676968196_keycards_add_clock_columnUpSql() (*asset, error) {
return nil, err
}
info := bindataFileInfo{name: "1676968196_keycards_add_clock_column.up.sql", size: 73, mode: os.FileMode(0644), modTime: time.Unix(1685003021, 0)}
info := bindataFileInfo{name: "1676968196_keycards_add_clock_column.up.sql", size: 73, mode: os.FileMode(0644), modTime: time.Unix(1684852796, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x4c, 0xf, 0x1c, 0x28, 0x41, 0x57, 0x57, 0x6c, 0xe, 0x75, 0x6b, 0x75, 0x12, 0x0, 0x18, 0x1e, 0x88, 0x1e, 0x45, 0xe0, 0x32, 0xb9, 0xd4, 0xd9, 0x2e, 0xc8, 0xb, 0x80, 0x6, 0x51, 0x3d, 0x28}}
return a, nil
}
@ -1108,7 +1108,7 @@ func _1676968197_add_fallback_rpc_to_networksUpSql() (*asset, error) {
return nil, err
}
info := bindataFileInfo{name: "1676968197_add_fallback_rpc_to_networks.up.sql", size: 112, mode: os.FileMode(0644), modTime: time.Unix(1685003021, 0)}
info := bindataFileInfo{name: "1676968197_add_fallback_rpc_to_networks.up.sql", size: 112, mode: os.FileMode(0644), modTime: time.Unix(1684852796, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x77, 0x6a, 0xc6, 0x45, 0xfa, 0x62, 0x84, 0x74, 0x6d, 0x7c, 0xd7, 0x1d, 0x79, 0xb6, 0x38, 0x43, 0xa8, 0x8, 0x6b, 0x75, 0x3d, 0x9, 0x2, 0xc5, 0x9f, 0xbb, 0x45, 0x56, 0x4c, 0x4e, 0x17, 0x89}}
return a, nil
}
@ -1128,7 +1128,7 @@ func _1677674090_add_chains_ens_istest_to_saved_addressesUpSql() (*asset, error)
return nil, err
}
info := bindataFileInfo{name: "1677674090_add_chains_ens_istest_to_saved_addresses.up.sql", size: 638, mode: os.FileMode(0644), modTime: time.Unix(1685003021, 0)}
info := bindataFileInfo{name: "1677674090_add_chains_ens_istest_to_saved_addresses.up.sql", size: 638, mode: os.FileMode(0644), modTime: time.Unix(1684852796, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xa8, 0x2d, 0xa4, 0x1b, 0xf6, 0x6a, 0x13, 0x7b, 0xe, 0x59, 0xcd, 0xe2, 0x4e, 0x81, 0x99, 0xc4, 0x33, 0x84, 0xde, 0x66, 0xca, 0xac, 0x2f, 0x5, 0x90, 0xac, 0xfd, 0x4e, 0xfc, 0x55, 0x44, 0xe5}}
return a, nil
}
@ -1148,7 +1148,7 @@ func _1677681143_accounts_table_type_column_updateUpSql() (*asset, error) {
return nil, err
}
info := bindataFileInfo{name: "1677681143_accounts_table_type_column_update.up.sql", size: 135, mode: os.FileMode(0644), modTime: time.Unix(1685003021, 0)}
info := bindataFileInfo{name: "1677681143_accounts_table_type_column_update.up.sql", size: 135, mode: os.FileMode(0644), modTime: time.Unix(1684852796, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xd2, 0xc4, 0x6, 0x42, 0x50, 0x1d, 0xf4, 0x48, 0x55, 0xbc, 0xa2, 0x19, 0xdd, 0xad, 0xc8, 0xc, 0xa7, 0x30, 0xb6, 0xaf, 0xe, 0x2b, 0xaa, 0x2a, 0xa4, 0xe1, 0xb9, 0x41, 0x23, 0x66, 0xd3, 0x3}}
return a, nil
}
@ -1168,7 +1168,7 @@ func _1678264207_accounts_table_new_columns_addedUpSql() (*asset, error) {
return nil, err
}
info := bindataFileInfo{name: "1678264207_accounts_table_new_columns_added.up.sql", size: 130, mode: os.FileMode(0644), modTime: time.Unix(1685003021, 0)}
info := bindataFileInfo{name: "1678264207_accounts_table_new_columns_added.up.sql", size: 130, mode: os.FileMode(0644), modTime: time.Unix(1684852796, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xf4, 0xd4, 0xf3, 0x35, 0xef, 0x5c, 0x19, 0x3c, 0x15, 0x90, 0x60, 0xbd, 0x1f, 0x81, 0xf0, 0x86, 0x73, 0x89, 0xa0, 0x70, 0xf2, 0x46, 0xae, 0xea, 0xd0, 0xc6, 0x9e, 0x55, 0x4a, 0x54, 0x62, 0xbb}}
return a, nil
}
@ -1188,7 +1188,7 @@ func _1680770368_add_bio_to_settings_sync_clock_tableUpSql() (*asset, error) {
return nil, err
}
info := bindataFileInfo{name: "1680770368_add_bio_to_settings_sync_clock_table.up.sql", size: 75, mode: os.FileMode(0644), modTime: time.Unix(1685003021, 0)}
info := bindataFileInfo{name: "1680770368_add_bio_to_settings_sync_clock_table.up.sql", size: 75, mode: os.FileMode(0644), modTime: time.Unix(1684852796, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x4a, 0x52, 0xf6, 0x3f, 0xaa, 0xd, 0xa0, 0xee, 0xe8, 0xe6, 0x16, 0x21, 0x80, 0x61, 0xe4, 0x7a, 0x4e, 0x37, 0x8d, 0x30, 0x51, 0x20, 0x4d, 0x15, 0x47, 0xfb, 0x6, 0xa1, 0xce, 0xc8, 0x27, 0x5a}}
return a, nil
}
@ -1208,7 +1208,7 @@ func _1681110436_add_mnemonic_to_settings_sync_clock_tableUpSql() (*asset, error
return nil, err
}
info := bindataFileInfo{name: "1681110436_add_mnemonic_to_settings_sync_clock_table.up.sql", size: 311, mode: os.FileMode(0644), modTime: time.Unix(1685003021, 0)}
info := bindataFileInfo{name: "1681110436_add_mnemonic_to_settings_sync_clock_table.up.sql", size: 311, mode: os.FileMode(0644), modTime: time.Unix(1684852796, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x3d, 0x74, 0x81, 0x7d, 0x9e, 0x77, 0xb6, 0xfe, 0xe3, 0xcb, 0x48, 0xe5, 0x5f, 0x39, 0x23, 0xa1, 0x7d, 0x53, 0x22, 0xe8, 0x96, 0x15, 0x8a, 0x1e, 0x8e, 0xbc, 0xe2, 0x1d, 0xc4, 0xc2, 0x56, 0x34}}
return a, nil
}
@ -1228,7 +1228,7 @@ func _1681392602_9d_sync_periodUpSql() (*asset, error) {
return nil, err
}
info := bindataFileInfo{name: "1681392602_9d_sync_period.up.sql", size: 60, mode: os.FileMode(0644), modTime: time.Unix(1685003021, 0)}
info := bindataFileInfo{name: "1681392602_9d_sync_period.up.sql", size: 60, mode: os.FileMode(0644), modTime: time.Unix(1684852796, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xc9, 0xa, 0x90, 0x29, 0x7f, 0x76, 0x98, 0xa7, 0x71, 0x80, 0x5a, 0x2f, 0xbe, 0x23, 0x9a, 0xd4, 0xf4, 0x39, 0x19, 0xd3, 0xa5, 0x34, 0x6e, 0x67, 0x6a, 0xbe, 0x8a, 0xad, 0x21, 0xc7, 0xba, 0x88}}
return a, nil
}
@ -1248,7 +1248,7 @@ func _1681762078_default_sync_period_9dUpSql() (*asset, error) {
return nil, err
}
info := bindataFileInfo{name: "1681762078_default_sync_period_9d.up.sql", size: 3002, mode: os.FileMode(0644), modTime: time.Unix(1685003021, 0)}
info := bindataFileInfo{name: "1681762078_default_sync_period_9d.up.sql", size: 3002, mode: os.FileMode(0644), modTime: time.Unix(1684852796, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x3e, 0xd9, 0x26, 0xfc, 0xa9, 0x45, 0xc1, 0x81, 0xa8, 0xe2, 0x2c, 0xe9, 0x3c, 0xea, 0x1d, 0x37, 0x11, 0x45, 0x8c, 0x6c, 0xbc, 0xc2, 0x6, 0x69, 0x2, 0x75, 0x29, 0x40, 0x9f, 0xc5, 0xbb, 0x36}}
return a, nil
}
@ -1268,7 +1268,7 @@ func _1681780680_add_clock_to_social_links_settingsUpSql() (*asset, error) {
return nil, err
}
info := bindataFileInfo{name: "1681780680_add_clock_to_social_links_settings.up.sql", size: 137, mode: os.FileMode(0644), modTime: time.Unix(1685003021, 0)}
info := bindataFileInfo{name: "1681780680_add_clock_to_social_links_settings.up.sql", size: 137, mode: os.FileMode(0644), modTime: time.Unix(1684852796, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x63, 0x11, 0xf5, 0x41, 0xe5, 0x5a, 0xf4, 0xe3, 0xf3, 0x14, 0x87, 0x28, 0xd8, 0xf0, 0x52, 0x31, 0x8, 0xd5, 0xbb, 0xf4, 0xff, 0x55, 0x5f, 0x42, 0x90, 0xcb, 0xf7, 0x46, 0x2, 0x6, 0xbe, 0x42}}
return a, nil
}
@ -1288,7 +1288,7 @@ func _1682073779_settings_table_remove_latest_derived_path_columnUpSql() (*asset
return nil, err
}
info := bindataFileInfo{name: "1682073779_settings_table_remove_latest_derived_path_column.up.sql", size: 4470, mode: os.FileMode(0644), modTime: time.Unix(1685003021, 0)}
info := bindataFileInfo{name: "1682073779_settings_table_remove_latest_derived_path_column.up.sql", size: 4470, mode: os.FileMode(0644), modTime: time.Unix(1684852796, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x7a, 0x36, 0x2, 0x41, 0xd, 0x5c, 0xd1, 0x92, 0x85, 0x6d, 0x84, 0xff, 0x67, 0xa7, 0x4c, 0x67, 0xa4, 0xef, 0x52, 0x69, 0x1f, 0x22, 0x25, 0x92, 0xc, 0xb3, 0x89, 0x50, 0x91, 0xc, 0x49, 0xf9}}
return a, nil
}
@ -1308,7 +1308,7 @@ func _1682146075_add_created_at_to_saved_addressesUpSql() (*asset, error) {
return nil, err
}
info := bindataFileInfo{name: "1682146075_add_created_at_to_saved_addresses.up.sql", size: 107, mode: os.FileMode(0644), modTime: time.Unix(1685003021, 0)}
info := bindataFileInfo{name: "1682146075_add_created_at_to_saved_addresses.up.sql", size: 107, mode: os.FileMode(0644), modTime: time.Unix(1684852796, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x88, 0xfe, 0x35, 0x9c, 0x6b, 0xdf, 0x67, 0x18, 0x16, 0xe4, 0xc9, 0xd4, 0x77, 0x7c, 0x4, 0xe2, 0x6c, 0x41, 0xd9, 0x53, 0x97, 0xfe, 0x5, 0xa3, 0x23, 0xce, 0x82, 0xad, 0x92, 0x5e, 0xd7, 0x7d}}
return a, nil
}
@ -1328,7 +1328,7 @@ func _1682393575_sync_ens_nameUpSql() (*asset, error) {
return nil, err
}
info := bindataFileInfo{name: "1682393575_sync_ens_name.up.sql", size: 713, mode: os.FileMode(0644), modTime: time.Unix(1685003021, 0)}
info := bindataFileInfo{name: "1682393575_sync_ens_name.up.sql", size: 713, mode: os.FileMode(0644), modTime: time.Unix(1684852796, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xfb, 0xea, 0xcb, 0x4d, 0x71, 0x5a, 0x49, 0x19, 0x8b, 0xef, 0x66, 0x27, 0x33, 0x89, 0xb0, 0xe, 0x37, 0x1b, 0x41, 0x8, 0x12, 0xcc, 0x56, 0xd8, 0x1b, 0xf, 0xf8, 0x50, 0x4b, 0x93, 0xf1, 0x29}}
return a, nil
}
@ -1348,7 +1348,7 @@ func _1683457503_add_blocks_ranges_sequential_tableUpSql() (*asset, error) {
return nil, err
}
info := bindataFileInfo{name: "1683457503_add_blocks_ranges_sequential_table.up.sql", size: 263, mode: os.FileMode(0644), modTime: time.Unix(1685003021, 0)}
info := bindataFileInfo{name: "1683457503_add_blocks_ranges_sequential_table.up.sql", size: 263, mode: os.FileMode(0644), modTime: time.Unix(1684852796, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xfe, 0x57, 0x2e, 0x0, 0x6a, 0x6e, 0xd7, 0xeb, 0xe6, 0x66, 0x79, 0x32, 0x22, 0x82, 0x92, 0xf4, 0xc9, 0xf1, 0x58, 0x1a, 0x45, 0x60, 0x77, 0x50, 0xe7, 0x54, 0x4a, 0xc0, 0x42, 0x3a, 0x4f, 0x35}}
return a, nil
}
@ -1368,7 +1368,7 @@ func _1683627613_accounts_and_keycards_improvementsUpSql() (*asset, error) {
return nil, err
}
info := bindataFileInfo{name: "1683627613_accounts_and_keycards_improvements.up.sql", size: 3640, mode: os.FileMode(0644), modTime: time.Unix(1685081963, 0)}
info := bindataFileInfo{name: "1683627613_accounts_and_keycards_improvements.up.sql", size: 3640, mode: os.FileMode(0644), modTime: time.Unix(1685358731, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x8e, 0xbe, 0x62, 0xf5, 0x9, 0x42, 0x8c, 0x8f, 0xa8, 0x45, 0xe7, 0x36, 0xc9, 0xde, 0xf4, 0xe2, 0xfd, 0xc4, 0x8, 0xd0, 0xa3, 0x8, 0x64, 0xe2, 0x56, 0xcc, 0xa7, 0x6d, 0xc5, 0xcc, 0x82, 0x2c}}
return a, nil
}
@ -1388,7 +1388,7 @@ func _1685041348_settings_table_add_latest_derived_path_columnUpSql() (*asset, e
return nil, err
}
info := bindataFileInfo{name: "1685041348_settings_table_add_latest_derived_path_column.up.sql", size: 115, mode: os.FileMode(0644), modTime: time.Unix(1685091534, 0)}
info := bindataFileInfo{name: "1685041348_settings_table_add_latest_derived_path_column.up.sql", size: 115, mode: os.FileMode(0644), modTime: time.Unix(1685358731, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x21, 0xd4, 0x1b, 0xbf, 0x8, 0xf9, 0xd4, 0xb0, 0xa0, 0x6, 0x5b, 0xfb, 0x7e, 0xff, 0xfa, 0xbf, 0xcc, 0x64, 0x47, 0x81, 0x8b, 0x5e, 0x17, 0x6a, 0xa7, 0xa4, 0x35, 0x8f, 0x30, 0x4f, 0xd9, 0xd}}
return a, nil
}
@ -1408,11 +1408,31 @@ func _1685440989_update_color_id_accountsUpSql() (*asset, error) {
return nil, err
}
info := bindataFileInfo{name: "1685440989_update_color_id_accounts.up.sql", size: 918, mode: os.FileMode(0644), modTime: time.Unix(1685948046, 0)}
info := bindataFileInfo{name: "1685440989_update_color_id_accounts.up.sql", size: 918, mode: os.FileMode(0644), modTime: time.Unix(1685733628, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x10, 0x2e, 0x51, 0x1d, 0x2d, 0x16, 0x84, 0xd6, 0xe8, 0xbc, 0x20, 0x53, 0x47, 0xb8, 0x40, 0x21, 0x52, 0x5c, 0xd9, 0xbb, 0xea, 0xe2, 0xa5, 0x77, 0xc8, 0x35, 0x4c, 0xe0, 0x9d, 0x42, 0x44, 0x50}}
return a, nil
}
var __1685463947_add_to_asset_to_multitransactionUpSql = []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x72\xf4\x09\x71\x0d\x52\x08\x71\x74\xf2\x71\x55\xc8\x2d\xcd\x29\xc9\x8c\x2f\x29\x4a\xcc\x2b\x4e\x4c\x2e\xc9\xcc\xcf\x2b\x56\x70\x74\x71\x51\x70\xf6\xf7\x09\xf5\xf5\x53\x28\xc9\x8f\x4f\xcc\xcd\x2f\xcd\x2b\x51\x08\x73\x0c\x72\xf6\x70\x0c\xb2\xe6\x02\x04\x00\x00\xff\xff\x40\x38\x81\x4e\x3d\x00\x00\x00")
func _1685463947_add_to_asset_to_multitransactionUpSqlBytes() ([]byte, error) {
return bindataRead(
__1685463947_add_to_asset_to_multitransactionUpSql,
"1685463947_add_to_asset_to_multitransaction.up.sql",
)
}
func _1685463947_add_to_asset_to_multitransactionUpSql() (*asset, error) {
bytes, err := _1685463947_add_to_asset_to_multitransactionUpSqlBytes()
if err != nil {
return nil, err
}
info := bindataFileInfo{name: "1685463947_add_to_asset_to_multitransaction.up.sql", size: 61, mode: os.FileMode(0644), modTime: time.Unix(1685964120, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xd3, 0x66, 0x15, 0x10, 0xfa, 0x66, 0x81, 0x68, 0xd9, 0xb4, 0x93, 0x9e, 0x11, 0xed, 0x1d, 0x16, 0x9d, 0x5a, 0xf8, 0xd7, 0x8, 0xea, 0x7a, 0xaf, 0xe4, 0xb3, 0x22, 0x19, 0xca, 0xff, 0x75, 0x7c}}
return a, nil
}
var _docGo = []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x2c\xc9\xb1\x0d\xc4\x20\x0c\x05\xd0\x9e\x29\xfe\x02\xd8\xfd\x6d\xe3\x4b\xac\x2f\x44\x82\x09\x78\x7f\xa5\x49\xfd\xa6\x1d\xdd\xe8\xd8\xcf\x55\x8a\x2a\xe3\x47\x1f\xbe\x2c\x1d\x8c\xfa\x6f\xe3\xb4\x34\xd4\xd9\x89\xbb\x71\x59\xb6\x18\x1b\x35\x20\xa2\x9f\x0a\x03\xa2\xe5\x0d\x00\x00\xff\xff\x60\xcd\x06\xbe\x4a\x00\x00\x00")
func docGoBytes() ([]byte, error) {
@ -1428,7 +1448,7 @@ func docGo() (*asset, error) {
return nil, err
}
info := bindataFileInfo{name: "doc.go", size: 74, mode: os.FileMode(0644), modTime: time.Unix(1685003021, 0)}
info := bindataFileInfo{name: "doc.go", size: 74, mode: os.FileMode(0644), modTime: time.Unix(1684852796, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xde, 0x7c, 0x28, 0xcd, 0x47, 0xf2, 0xfa, 0x7c, 0x51, 0x2d, 0xd8, 0x38, 0xb, 0xb0, 0x34, 0x9d, 0x4c, 0x62, 0xa, 0x9e, 0x28, 0xc3, 0x31, 0x23, 0xd9, 0xbb, 0x89, 0x9f, 0xa0, 0x89, 0x1f, 0xe8}}
return a, nil
}
@ -1524,146 +1544,88 @@ func AssetNames() []string {
// _bindata is a table, holding each asset generator, mapped to its name.
var _bindata = map[string]func() (*asset, error){
"1640111208_dummy.up.sql": _1640111208_dummyUpSql,
"1642666031_add_removed_clock_to_bookmarks.up.sql": _1642666031_add_removed_clock_to_bookmarksUpSql,
"1643644541_gif_api_key_setting.up.sql": _1643644541_gif_api_key_settingUpSql,
"1644188994_recent_stickers.up.sql": _1644188994_recent_stickersUpSql,
"1646659233_add_address_to_dapp_permisssion.up.sql": _1646659233_add_address_to_dapp_permisssionUpSql,
"1646841105_add_emoji_account.up.sql": _1646841105_add_emoji_accountUpSql,
"1647278782_display_name.up.sql": _1647278782_display_nameUpSql,
"1647862838_reset_last_backup.up.sql": _1647862838_reset_last_backupUpSql,
"1647871652_add_settings_sync_clock_table.up.sql": _1647871652_add_settings_sync_clock_tableUpSql,
"1647880168_add_torrent_config.up.sql": _1647880168_add_torrent_configUpSql,
"1647882837_add_communities_settings_table.up.sql": _1647882837_add_communities_settings_tableUpSql,
"1647956635_add_waku_messages_table.up.sql": _1647956635_add_waku_messages_tableUpSql,
"1648554928_network_test.up.sql": _1648554928_network_testUpSql,
"1649174829_add_visitble_token.up.sql": _1649174829_add_visitble_tokenUpSql,
"1649882262_add_derived_from_accounts.up.sql": _1649882262_add_derived_from_accountsUpSql,
"1650612625_add_community_message_archive_hashes_table.up.sql": _1650612625_add_community_message_archive_hashes_tableUpSql,
"1650616788_add_communities_archives_info_table.up.sql": _1650616788_add_communities_archives_info_tableUpSql,
"1652715604_add_clock_accounts.up.sql": _1652715604_add_clock_accountsUpSql,
"1653037334_add_notifications_settings_table.up.sql": _1653037334_add_notifications_settings_tableUpSql,
"1654702119_add_mutual_contact_settings.up.sql": _1654702119_add_mutual_contact_settingsUpSql,
"1655375270_add_clock_field_to_communities_settings_table.up.sql": _1655375270_add_clock_field_to_communities_settings_tableUpSql,
"1655385721_drop_networks_config.up.sql": _1655385721_drop_networks_configUpSql,
"1655385724_networks_chainColor_shortName.up.sql": _1655385724_networks_chaincolor_shortnameUpSql,
"1655456688_add_deleted_at_field_to_bookmarks_table.up.sql": _1655456688_add_deleted_at_field_to_bookmarks_tableUpSql,
"1655462032_create_bookmarks_deleted_at_index.up.sql": _1655462032_create_bookmarks_deleted_at_indexUpSql,
"1657617291_add_multi_transactions_table.up.sql": _1657617291_add_multi_transactions_tableUpSql,
"1660134042_add_social_links_settings_table.up.sql": _1660134042_add_social_links_settings_tableUpSql,
"1660134060_settings_bio.up.sql": _1660134060_settings_bioUpSql,
"1660134070_add_wakuv2_store.up.sql": _1660134070_add_wakuv2_storeUpSql,
"1660134072_waku2_store_messages.up.sql": _1660134072_waku2_store_messagesUpSql,
"1662365868_add_key_uid_accounts.up.sql": _1662365868_add_key_uid_accountsUpSql,
"1662447680_add_keypairs_table.up.sql": _1662447680_add_keypairs_tableUpSql,
"1662460056_move_favourites_to_saved_addresses.up.sql": _1662460056_move_favourites_to_saved_addressesUpSql,
"1662738097_add_base_fee_transaction.up.sql": _1662738097_add_base_fee_transactionUpSql,
"1662972194_add_keypairs_table.up.sql": _1662972194_add_keypairs_tableUpSql,
"1664392661_add_third_party_id_to_waku_messages.up.sql": _1664392661_add_third_party_id_to_waku_messagesUpSql,
"1664783660_add_sync_info_to_saved_addresses.up.sql": _1664783660_add_sync_info_to_saved_addressesUpSql,
"1668109917_wakunodes.up.sql": _1668109917_wakunodesUpSql,
"1670249678_display_name_to_settings_sync_clock_table.up.sql": _1670249678_display_name_to_settings_sync_clock_tableUpSql,
"1670836810_add_imported_flag_to_community_archive_hashes.up.sql": _1670836810_add_imported_flag_to_community_archive_hashesUpSql,
"1671438731_add_magnetlink_uri_to_communities_archive_info.up.sql": _1671438731_add_magnetlink_uri_to_communities_archive_infoUpSql,
"1672933930_switcher_card.up.sql": _1672933930_switcher_cardUpSql,
"1674056187_add_price_cache.up.sql": _1674056187_add_price_cacheUpSql,
"1674136690_ens_usernames.up.sql": _1674136690_ens_usernamesUpSql,
"1674232431_add_balance_history.up.sql": _1674232431_add_balance_historyUpSql,
"1676368933_keypairs_to_keycards.up.sql": _1676368933_keypairs_to_keycardsUpSql,
"1676951398_add_currency_format_cache.up.sql": _1676951398_add_currency_format_cacheUpSql,
"1676968196_keycards_add_clock_column.up.sql": _1676968196_keycards_add_clock_columnUpSql,
"1676968197_add_fallback_rpc_to_networks.up.sql": _1676968197_add_fallback_rpc_to_networksUpSql,
"1677674090_add_chains_ens_istest_to_saved_addresses.up.sql": _1677674090_add_chains_ens_istest_to_saved_addressesUpSql,
"1677681143_accounts_table_type_column_update.up.sql": _1677681143_accounts_table_type_column_updateUpSql,
"1678264207_accounts_table_new_columns_added.up.sql": _1678264207_accounts_table_new_columns_addedUpSql,
"1680770368_add_bio_to_settings_sync_clock_table.up.sql": _1680770368_add_bio_to_settings_sync_clock_tableUpSql,
"1681110436_add_mnemonic_to_settings_sync_clock_table.up.sql": _1681110436_add_mnemonic_to_settings_sync_clock_tableUpSql,
"1681392602_9d_sync_period.up.sql": _1681392602_9d_sync_periodUpSql,
"1681762078_default_sync_period_9d.up.sql": _1681762078_default_sync_period_9dUpSql,
"1681780680_add_clock_to_social_links_settings.up.sql": _1681780680_add_clock_to_social_links_settingsUpSql,
"1640111208_dummy.up.sql": _1640111208_dummyUpSql,
"1642666031_add_removed_clock_to_bookmarks.up.sql": _1642666031_add_removed_clock_to_bookmarksUpSql,
"1643644541_gif_api_key_setting.up.sql": _1643644541_gif_api_key_settingUpSql,
"1644188994_recent_stickers.up.sql": _1644188994_recent_stickersUpSql,
"1646659233_add_address_to_dapp_permisssion.up.sql": _1646659233_add_address_to_dapp_permisssionUpSql,
"1646841105_add_emoji_account.up.sql": _1646841105_add_emoji_accountUpSql,
"1647278782_display_name.up.sql": _1647278782_display_nameUpSql,
"1647862838_reset_last_backup.up.sql": _1647862838_reset_last_backupUpSql,
"1647871652_add_settings_sync_clock_table.up.sql": _1647871652_add_settings_sync_clock_tableUpSql,
"1647880168_add_torrent_config.up.sql": _1647880168_add_torrent_configUpSql,
"1647882837_add_communities_settings_table.up.sql": _1647882837_add_communities_settings_tableUpSql,
"1647956635_add_waku_messages_table.up.sql": _1647956635_add_waku_messages_tableUpSql,
"1648554928_network_test.up.sql": _1648554928_network_testUpSql,
"1649174829_add_visitble_token.up.sql": _1649174829_add_visitble_tokenUpSql,
"1649882262_add_derived_from_accounts.up.sql": _1649882262_add_derived_from_accountsUpSql,
"1650612625_add_community_message_archive_hashes_table.up.sql": _1650612625_add_community_message_archive_hashes_tableUpSql,
"1650616788_add_communities_archives_info_table.up.sql": _1650616788_add_communities_archives_info_tableUpSql,
"1652715604_add_clock_accounts.up.sql": _1652715604_add_clock_accountsUpSql,
"1653037334_add_notifications_settings_table.up.sql": _1653037334_add_notifications_settings_tableUpSql,
"1654702119_add_mutual_contact_settings.up.sql": _1654702119_add_mutual_contact_settingsUpSql,
"1655375270_add_clock_field_to_communities_settings_table.up.sql": _1655375270_add_clock_field_to_communities_settings_tableUpSql,
"1655385721_drop_networks_config.up.sql": _1655385721_drop_networks_configUpSql,
"1655385724_networks_chainColor_shortName.up.sql": _1655385724_networks_chaincolor_shortnameUpSql,
"1655456688_add_deleted_at_field_to_bookmarks_table.up.sql": _1655456688_add_deleted_at_field_to_bookmarks_tableUpSql,
"1655462032_create_bookmarks_deleted_at_index.up.sql": _1655462032_create_bookmarks_deleted_at_indexUpSql,
"1657617291_add_multi_transactions_table.up.sql": _1657617291_add_multi_transactions_tableUpSql,
"1660134042_add_social_links_settings_table.up.sql": _1660134042_add_social_links_settings_tableUpSql,
"1660134060_settings_bio.up.sql": _1660134060_settings_bioUpSql,
"1660134070_add_wakuv2_store.up.sql": _1660134070_add_wakuv2_storeUpSql,
"1660134072_waku2_store_messages.up.sql": _1660134072_waku2_store_messagesUpSql,
"1662365868_add_key_uid_accounts.up.sql": _1662365868_add_key_uid_accountsUpSql,
"1662447680_add_keypairs_table.up.sql": _1662447680_add_keypairs_tableUpSql,
"1662460056_move_favourites_to_saved_addresses.up.sql": _1662460056_move_favourites_to_saved_addressesUpSql,
"1662738097_add_base_fee_transaction.up.sql": _1662738097_add_base_fee_transactionUpSql,
"1662972194_add_keypairs_table.up.sql": _1662972194_add_keypairs_tableUpSql,
"1664392661_add_third_party_id_to_waku_messages.up.sql": _1664392661_add_third_party_id_to_waku_messagesUpSql,
"1664783660_add_sync_info_to_saved_addresses.up.sql": _1664783660_add_sync_info_to_saved_addressesUpSql,
"1668109917_wakunodes.up.sql": _1668109917_wakunodesUpSql,
"1670249678_display_name_to_settings_sync_clock_table.up.sql": _1670249678_display_name_to_settings_sync_clock_tableUpSql,
"1670836810_add_imported_flag_to_community_archive_hashes.up.sql": _1670836810_add_imported_flag_to_community_archive_hashesUpSql,
"1671438731_add_magnetlink_uri_to_communities_archive_info.up.sql": _1671438731_add_magnetlink_uri_to_communities_archive_infoUpSql,
"1672933930_switcher_card.up.sql": _1672933930_switcher_cardUpSql,
"1674056187_add_price_cache.up.sql": _1674056187_add_price_cacheUpSql,
"1674136690_ens_usernames.up.sql": _1674136690_ens_usernamesUpSql,
"1674232431_add_balance_history.up.sql": _1674232431_add_balance_historyUpSql,
"1676368933_keypairs_to_keycards.up.sql": _1676368933_keypairs_to_keycardsUpSql,
"1676951398_add_currency_format_cache.up.sql": _1676951398_add_currency_format_cacheUpSql,
"1676968196_keycards_add_clock_column.up.sql": _1676968196_keycards_add_clock_columnUpSql,
"1676968197_add_fallback_rpc_to_networks.up.sql": _1676968197_add_fallback_rpc_to_networksUpSql,
"1677674090_add_chains_ens_istest_to_saved_addresses.up.sql": _1677674090_add_chains_ens_istest_to_saved_addressesUpSql,
"1677681143_accounts_table_type_column_update.up.sql": _1677681143_accounts_table_type_column_updateUpSql,
"1678264207_accounts_table_new_columns_added.up.sql": _1678264207_accounts_table_new_columns_addedUpSql,
"1680770368_add_bio_to_settings_sync_clock_table.up.sql": _1680770368_add_bio_to_settings_sync_clock_tableUpSql,
"1681110436_add_mnemonic_to_settings_sync_clock_table.up.sql": _1681110436_add_mnemonic_to_settings_sync_clock_tableUpSql,
"1681392602_9d_sync_period.up.sql": _1681392602_9d_sync_periodUpSql,
"1681762078_default_sync_period_9d.up.sql": _1681762078_default_sync_period_9dUpSql,
"1681780680_add_clock_to_social_links_settings.up.sql": _1681780680_add_clock_to_social_links_settingsUpSql,
"1682073779_settings_table_remove_latest_derived_path_column.up.sql": _1682073779_settings_table_remove_latest_derived_path_columnUpSql,
"1682146075_add_created_at_to_saved_addresses.up.sql": _1682146075_add_created_at_to_saved_addressesUpSql,
"1682393575_sync_ens_name.up.sql": _1682393575_sync_ens_nameUpSql,
"1683457503_add_blocks_ranges_sequential_table.up.sql": _1683457503_add_blocks_ranges_sequential_tableUpSql,
"1683627613_accounts_and_keycards_improvements.up.sql": _1683627613_accounts_and_keycards_improvementsUpSql,
"1685041348_settings_table_add_latest_derived_path_column.up.sql": _1685041348_settings_table_add_latest_derived_path_columnUpSql,
"1685440989_update_color_id_accounts.up.sql": _1685440989_update_color_id_accountsUpSql,
"1682146075_add_created_at_to_saved_addresses.up.sql": _1682146075_add_created_at_to_saved_addressesUpSql,
"1682393575_sync_ens_name.up.sql": _1682393575_sync_ens_nameUpSql,
"1683457503_add_blocks_ranges_sequential_table.up.sql": _1683457503_add_blocks_ranges_sequential_tableUpSql,
"1683627613_accounts_and_keycards_improvements.up.sql": _1683627613_accounts_and_keycards_improvementsUpSql,
"1685041348_settings_table_add_latest_derived_path_column.up.sql": _1685041348_settings_table_add_latest_derived_path_columnUpSql,
"1685440989_update_color_id_accounts.up.sql": _1685440989_update_color_id_accountsUpSql,
"1685463947_add_to_asset_to_multitransaction.up.sql": _1685463947_add_to_asset_to_multitransactionUpSql,
"doc.go": docGo,
}
// AssetDebug is true if the assets were built with the debug flag enabled.
const AssetDebug = false
// AssetDir returns the file names below a certain
// directory embedded in the file by go-bindata.
// For example if you run go-bindata on data/... and data contains the
// following hierarchy:
// data/
// foo.txt
// img/
// a.png
// b.png
//
// data/
// foo.txt
// img/
// a.png
// b.png
//
// then AssetDir("data") would return []string{"foo.txt", "img"},
// AssetDir("data/img") would return []string{"a.png", "b.png"},
// AssetDir("foo.txt") and AssetDir("notexist") would return an error, and
@ -1696,71 +1658,72 @@ type bintree struct {
}
var _bintree = &bintree{nil, map[string]*bintree{
"1640111208_dummy.up.sql": &bintree{_1640111208_dummyUpSql, map[string]*bintree{}},
"1642666031_add_removed_clock_to_bookmarks.up.sql": &bintree{_1642666031_add_removed_clock_to_bookmarksUpSql, map[string]*bintree{}},
"1643644541_gif_api_key_setting.up.sql": &bintree{_1643644541_gif_api_key_settingUpSql, map[string]*bintree{}},
"1644188994_recent_stickers.up.sql": &bintree{_1644188994_recent_stickersUpSql, map[string]*bintree{}},
"1646659233_add_address_to_dapp_permisssion.up.sql": &bintree{_1646659233_add_address_to_dapp_permisssionUpSql, map[string]*bintree{}},
"1646841105_add_emoji_account.up.sql": &bintree{_1646841105_add_emoji_accountUpSql, map[string]*bintree{}},
"1647278782_display_name.up.sql": &bintree{_1647278782_display_nameUpSql, map[string]*bintree{}},
"1647862838_reset_last_backup.up.sql": &bintree{_1647862838_reset_last_backupUpSql, map[string]*bintree{}},
"1647871652_add_settings_sync_clock_table.up.sql": &bintree{_1647871652_add_settings_sync_clock_tableUpSql, map[string]*bintree{}},
"1647880168_add_torrent_config.up.sql": &bintree{_1647880168_add_torrent_configUpSql, map[string]*bintree{}},
"1647882837_add_communities_settings_table.up.sql": &bintree{_1647882837_add_communities_settings_tableUpSql, map[string]*bintree{}},
"1647956635_add_waku_messages_table.up.sql": &bintree{_1647956635_add_waku_messages_tableUpSql, map[string]*bintree{}},
"1648554928_network_test.up.sql": &bintree{_1648554928_network_testUpSql, map[string]*bintree{}},
"1649174829_add_visitble_token.up.sql": &bintree{_1649174829_add_visitble_tokenUpSql, map[string]*bintree{}},
"1649882262_add_derived_from_accounts.up.sql": &bintree{_1649882262_add_derived_from_accountsUpSql, map[string]*bintree{}},
"1650612625_add_community_message_archive_hashes_table.up.sql": &bintree{_1650612625_add_community_message_archive_hashes_tableUpSql, map[string]*bintree{}},
"1650616788_add_communities_archives_info_table.up.sql": &bintree{_1650616788_add_communities_archives_info_tableUpSql, map[string]*bintree{}},
"1652715604_add_clock_accounts.up.sql": &bintree{_1652715604_add_clock_accountsUpSql, map[string]*bintree{}},
"1653037334_add_notifications_settings_table.up.sql": &bintree{_1653037334_add_notifications_settings_tableUpSql, map[string]*bintree{}},
"1654702119_add_mutual_contact_settings.up.sql": &bintree{_1654702119_add_mutual_contact_settingsUpSql, map[string]*bintree{}},
"1655375270_add_clock_field_to_communities_settings_table.up.sql": &bintree{_1655375270_add_clock_field_to_communities_settings_tableUpSql, map[string]*bintree{}},
"1655385721_drop_networks_config.up.sql": &bintree{_1655385721_drop_networks_configUpSql, map[string]*bintree{}},
"1655385724_networks_chainColor_shortName.up.sql": &bintree{_1655385724_networks_chaincolor_shortnameUpSql, map[string]*bintree{}},
"1655456688_add_deleted_at_field_to_bookmarks_table.up.sql": &bintree{_1655456688_add_deleted_at_field_to_bookmarks_tableUpSql, map[string]*bintree{}},
"1655462032_create_bookmarks_deleted_at_index.up.sql": &bintree{_1655462032_create_bookmarks_deleted_at_indexUpSql, map[string]*bintree{}},
"1657617291_add_multi_transactions_table.up.sql": &bintree{_1657617291_add_multi_transactions_tableUpSql, map[string]*bintree{}},
"1660134042_add_social_links_settings_table.up.sql": &bintree{_1660134042_add_social_links_settings_tableUpSql, map[string]*bintree{}},
"1660134060_settings_bio.up.sql": &bintree{_1660134060_settings_bioUpSql, map[string]*bintree{}},
"1660134070_add_wakuv2_store.up.sql": &bintree{_1660134070_add_wakuv2_storeUpSql, map[string]*bintree{}},
"1660134072_waku2_store_messages.up.sql": &bintree{_1660134072_waku2_store_messagesUpSql, map[string]*bintree{}},
"1662365868_add_key_uid_accounts.up.sql": &bintree{_1662365868_add_key_uid_accountsUpSql, map[string]*bintree{}},
"1662447680_add_keypairs_table.up.sql": &bintree{_1662447680_add_keypairs_tableUpSql, map[string]*bintree{}},
"1662460056_move_favourites_to_saved_addresses.up.sql": &bintree{_1662460056_move_favourites_to_saved_addressesUpSql, map[string]*bintree{}},
"1662738097_add_base_fee_transaction.up.sql": &bintree{_1662738097_add_base_fee_transactionUpSql, map[string]*bintree{}},
"1662972194_add_keypairs_table.up.sql": &bintree{_1662972194_add_keypairs_tableUpSql, map[string]*bintree{}},
"1664392661_add_third_party_id_to_waku_messages.up.sql": &bintree{_1664392661_add_third_party_id_to_waku_messagesUpSql, map[string]*bintree{}},
"1664783660_add_sync_info_to_saved_addresses.up.sql": &bintree{_1664783660_add_sync_info_to_saved_addressesUpSql, map[string]*bintree{}},
"1668109917_wakunodes.up.sql": &bintree{_1668109917_wakunodesUpSql, map[string]*bintree{}},
"1670249678_display_name_to_settings_sync_clock_table.up.sql": &bintree{_1670249678_display_name_to_settings_sync_clock_tableUpSql, map[string]*bintree{}},
"1670836810_add_imported_flag_to_community_archive_hashes.up.sql": &bintree{_1670836810_add_imported_flag_to_community_archive_hashesUpSql, map[string]*bintree{}},
"1671438731_add_magnetlink_uri_to_communities_archive_info.up.sql": &bintree{_1671438731_add_magnetlink_uri_to_communities_archive_infoUpSql, map[string]*bintree{}},
"1672933930_switcher_card.up.sql": &bintree{_1672933930_switcher_cardUpSql, map[string]*bintree{}},
"1674056187_add_price_cache.up.sql": &bintree{_1674056187_add_price_cacheUpSql, map[string]*bintree{}},
"1674136690_ens_usernames.up.sql": &bintree{_1674136690_ens_usernamesUpSql, map[string]*bintree{}},
"1674232431_add_balance_history.up.sql": &bintree{_1674232431_add_balance_historyUpSql, map[string]*bintree{}},
"1676368933_keypairs_to_keycards.up.sql": &bintree{_1676368933_keypairs_to_keycardsUpSql, map[string]*bintree{}},
"1676951398_add_currency_format_cache.up.sql": &bintree{_1676951398_add_currency_format_cacheUpSql, map[string]*bintree{}},
"1676968196_keycards_add_clock_column.up.sql": &bintree{_1676968196_keycards_add_clock_columnUpSql, map[string]*bintree{}},
"1676968197_add_fallback_rpc_to_networks.up.sql": &bintree{_1676968197_add_fallback_rpc_to_networksUpSql, map[string]*bintree{}},
"1677674090_add_chains_ens_istest_to_saved_addresses.up.sql": &bintree{_1677674090_add_chains_ens_istest_to_saved_addressesUpSql, map[string]*bintree{}},
"1677681143_accounts_table_type_column_update.up.sql": &bintree{_1677681143_accounts_table_type_column_updateUpSql, map[string]*bintree{}},
"1678264207_accounts_table_new_columns_added.up.sql": &bintree{_1678264207_accounts_table_new_columns_addedUpSql, map[string]*bintree{}},
"1680770368_add_bio_to_settings_sync_clock_table.up.sql": &bintree{_1680770368_add_bio_to_settings_sync_clock_tableUpSql, map[string]*bintree{}},
"1681110436_add_mnemonic_to_settings_sync_clock_table.up.sql": &bintree{_1681110436_add_mnemonic_to_settings_sync_clock_tableUpSql, map[string]*bintree{}},
"1681392602_9d_sync_period.up.sql": &bintree{_1681392602_9d_sync_periodUpSql, map[string]*bintree{}},
"1681762078_default_sync_period_9d.up.sql": &bintree{_1681762078_default_sync_period_9dUpSql, map[string]*bintree{}},
"1681780680_add_clock_to_social_links_settings.up.sql": &bintree{_1681780680_add_clock_to_social_links_settingsUpSql, map[string]*bintree{}},
"1682073779_settings_table_remove_latest_derived_path_column.up.sql": &bintree{_1682073779_settings_table_remove_latest_derived_path_columnUpSql, map[string]*bintree{}},
"1682146075_add_created_at_to_saved_addresses.up.sql": &bintree{_1682146075_add_created_at_to_saved_addressesUpSql, map[string]*bintree{}},
"1682393575_sync_ens_name.up.sql": &bintree{_1682393575_sync_ens_nameUpSql, map[string]*bintree{}},
"1683457503_add_blocks_ranges_sequential_table.up.sql": &bintree{_1683457503_add_blocks_ranges_sequential_tableUpSql, map[string]*bintree{}},
"1683627613_accounts_and_keycards_improvements.up.sql": &bintree{_1683627613_accounts_and_keycards_improvementsUpSql, map[string]*bintree{}},
"1685041348_settings_table_add_latest_derived_path_column.up.sql": &bintree{_1685041348_settings_table_add_latest_derived_path_columnUpSql, map[string]*bintree{}},
"1685440989_update_color_id_accounts.up.sql": &bintree{_1685440989_update_color_id_accountsUpSql, map[string]*bintree{}},
"doc.go": &bintree{docGo, map[string]*bintree{}},
"1640111208_dummy.up.sql": {_1640111208_dummyUpSql, map[string]*bintree{}},
"1642666031_add_removed_clock_to_bookmarks.up.sql": {_1642666031_add_removed_clock_to_bookmarksUpSql, map[string]*bintree{}},
"1643644541_gif_api_key_setting.up.sql": {_1643644541_gif_api_key_settingUpSql, map[string]*bintree{}},
"1644188994_recent_stickers.up.sql": {_1644188994_recent_stickersUpSql, map[string]*bintree{}},
"1646659233_add_address_to_dapp_permisssion.up.sql": {_1646659233_add_address_to_dapp_permisssionUpSql, map[string]*bintree{}},
"1646841105_add_emoji_account.up.sql": {_1646841105_add_emoji_accountUpSql, map[string]*bintree{}},
"1647278782_display_name.up.sql": {_1647278782_display_nameUpSql, map[string]*bintree{}},
"1647862838_reset_last_backup.up.sql": {_1647862838_reset_last_backupUpSql, map[string]*bintree{}},
"1647871652_add_settings_sync_clock_table.up.sql": {_1647871652_add_settings_sync_clock_tableUpSql, map[string]*bintree{}},
"1647880168_add_torrent_config.up.sql": {_1647880168_add_torrent_configUpSql, map[string]*bintree{}},
"1647882837_add_communities_settings_table.up.sql": {_1647882837_add_communities_settings_tableUpSql, map[string]*bintree{}},
"1647956635_add_waku_messages_table.up.sql": {_1647956635_add_waku_messages_tableUpSql, map[string]*bintree{}},
"1648554928_network_test.up.sql": {_1648554928_network_testUpSql, map[string]*bintree{}},
"1649174829_add_visitble_token.up.sql": {_1649174829_add_visitble_tokenUpSql, map[string]*bintree{}},
"1649882262_add_derived_from_accounts.up.sql": {_1649882262_add_derived_from_accountsUpSql, map[string]*bintree{}},
"1650612625_add_community_message_archive_hashes_table.up.sql": {_1650612625_add_community_message_archive_hashes_tableUpSql, map[string]*bintree{}},
"1650616788_add_communities_archives_info_table.up.sql": {_1650616788_add_communities_archives_info_tableUpSql, map[string]*bintree{}},
"1652715604_add_clock_accounts.up.sql": {_1652715604_add_clock_accountsUpSql, map[string]*bintree{}},
"1653037334_add_notifications_settings_table.up.sql": {_1653037334_add_notifications_settings_tableUpSql, map[string]*bintree{}},
"1654702119_add_mutual_contact_settings.up.sql": {_1654702119_add_mutual_contact_settingsUpSql, map[string]*bintree{}},
"1655375270_add_clock_field_to_communities_settings_table.up.sql": {_1655375270_add_clock_field_to_communities_settings_tableUpSql, map[string]*bintree{}},
"1655385721_drop_networks_config.up.sql": {_1655385721_drop_networks_configUpSql, map[string]*bintree{}},
"1655385724_networks_chainColor_shortName.up.sql": {_1655385724_networks_chaincolor_shortnameUpSql, map[string]*bintree{}},
"1655456688_add_deleted_at_field_to_bookmarks_table.up.sql": {_1655456688_add_deleted_at_field_to_bookmarks_tableUpSql, map[string]*bintree{}},
"1655462032_create_bookmarks_deleted_at_index.up.sql": {_1655462032_create_bookmarks_deleted_at_indexUpSql, map[string]*bintree{}},
"1657617291_add_multi_transactions_table.up.sql": {_1657617291_add_multi_transactions_tableUpSql, map[string]*bintree{}},
"1660134042_add_social_links_settings_table.up.sql": {_1660134042_add_social_links_settings_tableUpSql, map[string]*bintree{}},
"1660134060_settings_bio.up.sql": {_1660134060_settings_bioUpSql, map[string]*bintree{}},
"1660134070_add_wakuv2_store.up.sql": {_1660134070_add_wakuv2_storeUpSql, map[string]*bintree{}},
"1660134072_waku2_store_messages.up.sql": {_1660134072_waku2_store_messagesUpSql, map[string]*bintree{}},
"1662365868_add_key_uid_accounts.up.sql": {_1662365868_add_key_uid_accountsUpSql, map[string]*bintree{}},
"1662447680_add_keypairs_table.up.sql": {_1662447680_add_keypairs_tableUpSql, map[string]*bintree{}},
"1662460056_move_favourites_to_saved_addresses.up.sql": {_1662460056_move_favourites_to_saved_addressesUpSql, map[string]*bintree{}},
"1662738097_add_base_fee_transaction.up.sql": {_1662738097_add_base_fee_transactionUpSql, map[string]*bintree{}},
"1662972194_add_keypairs_table.up.sql": {_1662972194_add_keypairs_tableUpSql, map[string]*bintree{}},
"1664392661_add_third_party_id_to_waku_messages.up.sql": {_1664392661_add_third_party_id_to_waku_messagesUpSql, map[string]*bintree{}},
"1664783660_add_sync_info_to_saved_addresses.up.sql": {_1664783660_add_sync_info_to_saved_addressesUpSql, map[string]*bintree{}},
"1668109917_wakunodes.up.sql": {_1668109917_wakunodesUpSql, map[string]*bintree{}},
"1670249678_display_name_to_settings_sync_clock_table.up.sql": {_1670249678_display_name_to_settings_sync_clock_tableUpSql, map[string]*bintree{}},
"1670836810_add_imported_flag_to_community_archive_hashes.up.sql": {_1670836810_add_imported_flag_to_community_archive_hashesUpSql, map[string]*bintree{}},
"1671438731_add_magnetlink_uri_to_communities_archive_info.up.sql": {_1671438731_add_magnetlink_uri_to_communities_archive_infoUpSql, map[string]*bintree{}},
"1672933930_switcher_card.up.sql": {_1672933930_switcher_cardUpSql, map[string]*bintree{}},
"1674056187_add_price_cache.up.sql": {_1674056187_add_price_cacheUpSql, map[string]*bintree{}},
"1674136690_ens_usernames.up.sql": {_1674136690_ens_usernamesUpSql, map[string]*bintree{}},
"1674232431_add_balance_history.up.sql": {_1674232431_add_balance_historyUpSql, map[string]*bintree{}},
"1676368933_keypairs_to_keycards.up.sql": {_1676368933_keypairs_to_keycardsUpSql, map[string]*bintree{}},
"1676951398_add_currency_format_cache.up.sql": {_1676951398_add_currency_format_cacheUpSql, map[string]*bintree{}},
"1676968196_keycards_add_clock_column.up.sql": {_1676968196_keycards_add_clock_columnUpSql, map[string]*bintree{}},
"1676968197_add_fallback_rpc_to_networks.up.sql": {_1676968197_add_fallback_rpc_to_networksUpSql, map[string]*bintree{}},
"1677674090_add_chains_ens_istest_to_saved_addresses.up.sql": {_1677674090_add_chains_ens_istest_to_saved_addressesUpSql, map[string]*bintree{}},
"1677681143_accounts_table_type_column_update.up.sql": {_1677681143_accounts_table_type_column_updateUpSql, map[string]*bintree{}},
"1678264207_accounts_table_new_columns_added.up.sql": {_1678264207_accounts_table_new_columns_addedUpSql, map[string]*bintree{}},
"1680770368_add_bio_to_settings_sync_clock_table.up.sql": {_1680770368_add_bio_to_settings_sync_clock_tableUpSql, map[string]*bintree{}},
"1681110436_add_mnemonic_to_settings_sync_clock_table.up.sql": {_1681110436_add_mnemonic_to_settings_sync_clock_tableUpSql, map[string]*bintree{}},
"1681392602_9d_sync_period.up.sql": {_1681392602_9d_sync_periodUpSql, map[string]*bintree{}},
"1681762078_default_sync_period_9d.up.sql": {_1681762078_default_sync_period_9dUpSql, map[string]*bintree{}},
"1681780680_add_clock_to_social_links_settings.up.sql": {_1681780680_add_clock_to_social_links_settingsUpSql, map[string]*bintree{}},
"1682073779_settings_table_remove_latest_derived_path_column.up.sql": {_1682073779_settings_table_remove_latest_derived_path_columnUpSql, map[string]*bintree{}},
"1682146075_add_created_at_to_saved_addresses.up.sql": {_1682146075_add_created_at_to_saved_addressesUpSql, map[string]*bintree{}},
"1682393575_sync_ens_name.up.sql": {_1682393575_sync_ens_nameUpSql, map[string]*bintree{}},
"1683457503_add_blocks_ranges_sequential_table.up.sql": {_1683457503_add_blocks_ranges_sequential_tableUpSql, map[string]*bintree{}},
"1683627613_accounts_and_keycards_improvements.up.sql": {_1683627613_accounts_and_keycards_improvementsUpSql, map[string]*bintree{}},
"1685041348_settings_table_add_latest_derived_path_column.up.sql": {_1685041348_settings_table_add_latest_derived_path_columnUpSql, map[string]*bintree{}},
"1685440989_update_color_id_accounts.up.sql": {_1685440989_update_color_id_accountsUpSql, map[string]*bintree{}},
"1685463947_add_to_asset_to_multitransaction.up.sql": {_1685463947_add_to_asset_to_multitransactionUpSql, map[string]*bintree{}},
"doc.go": {docGo, map[string]*bintree{}},
}}
// RestoreAsset restores an asset under the given directory.
@ -1777,7 +1740,7 @@ func RestoreAsset(dir, name string) error {
if err != nil {
return err
}
err = ioutil.WriteFile(_filePath(dir, name), data, info.Mode())
err = os.WriteFile(_filePath(dir, name), data, info.Mode())
if err != nil {
return err
}

View File

@ -0,0 +1 @@
ALTER TABLE multi_transactions ADD COLUMN to_amount VARCHAR;

View File

@ -95,6 +95,7 @@ func (api *API) GetTransfersByAddress(ctx context.Context, address common.Addres
}
// LoadTransferByHash loads transfer to the database
// Only used by status-mobile
func (api *API) LoadTransferByHash(ctx context.Context, address common.Address, hash common.Hash) error {
log.Debug("[WalletAPI:: LoadTransferByHash] get transfer by hash", "address", address, "hash", hash)
return api.s.transferController.LoadTransferByHash(ctx, api.s.rpcClient, address, hash)
@ -509,7 +510,7 @@ func (api *API) GetAddressDetails(ctx context.Context, chainID uint64, address s
func (api *API) CreateMultiTransaction(ctx context.Context, multiTransaction *transfer.MultiTransaction, data []*bridge.TransactionBridge, password string) (*transfer.MultiTransactionResult, error) {
log.Debug("[WalletAPI:: CreateMultiTransaction] create multi transaction")
return api.s.transactionManager.CreateMultiTransaction(ctx, multiTransaction, data, api.router.bridges, password)
return api.s.transactionManager.CreateBridgeMultiTransaction(ctx, multiTransaction, data, api.router.bridges, password)
}
func (api *API) GetMultiTransactions(ctx context.Context, transactionIDs []transfer.MultiTransactionIDType) ([]*transfer.MultiTransaction, error) {

View File

@ -86,7 +86,7 @@ func NewService(
tokenManager := token.NewTokenManager(db, rpcClient, rpcClient.NetworkManager)
savedAddressesManager := &SavedAddressesManager{db: db}
transactionManager := transfer.NewTransactionManager(db, gethManager, transactor, config, accountsDB)
transferController := transfer.NewTransferController(db, rpcClient, accountFeed, walletFeed, transactionManager, transfer.OnDemandFetchStrategyType)
transferController := transfer.NewTransferController(db, rpcClient, accountFeed, walletFeed, transactionManager, tokenManager, transfer.OnDemandFetchStrategyType)
cryptoCompare := cryptocompare.NewClient()
coingecko := coingecko.NewClient()
marketManager := market.NewManager(cryptoCompare, coingecko, walletFeed)

View File

@ -164,8 +164,8 @@ func (b *BlockDAO) getOldRanges(chainID uint64, account common.Address) ([]*Bloc
return ranges, nil
}
// GetBlocksByAddress loads blocks for a given address.
func (b *BlockDAO) GetBlocksByAddress(chainID uint64, address common.Address, limit int) (rst []*big.Int, err error) {
// GetBlocksToLoadByAddress gets unloaded blocks for a given address.
func (b *BlockDAO) GetBlocksToLoadByAddress(chainID uint64, address common.Address, limit int) (rst []*big.Int, err error) {
query := `SELECT blk_number FROM blocks
WHERE address = ? AND network_id = ? AND loaded = 0
ORDER BY blk_number DESC
@ -186,21 +186,6 @@ func (b *BlockDAO) GetBlocksByAddress(chainID uint64, address common.Address, li
return rst, nil
}
func (b *BlockDAO) RemoveBlockWithTransfer(chainID uint64, address common.Address, block *big.Int) error {
query := `DELETE FROM blocks
WHERE address = ?
AND blk_number = ?
AND network_id = ?`
_, err := b.db.Exec(query, address, (*bigint.SQLBigInt)(block), chainID)
if err != nil {
return err
}
return nil
}
func (b *BlockDAO) GetLastBlockByAddress(chainID uint64, address common.Address, limit int) (rst *big.Int, err error) {
query := `SELECT * FROM
(SELECT blk_number FROM blocks WHERE address = ? AND network_id = ? ORDER BY blk_number DESC LIMIT ?)
@ -224,31 +209,6 @@ func (b *BlockDAO) GetLastBlockByAddress(chainID uint64, address common.Address,
return nil, nil
}
// TODO remove as not used
func (b *BlockDAO) GetLastSavedBlock(chainID uint64) (rst *DBHeader, err error) {
query := `SELECT blk_number, blk_hash
FROM blocks
WHERE network_id = ?
ORDER BY blk_number DESC LIMIT 1`
rows, err := b.db.Query(query, chainID)
if err != nil {
return
}
defer rows.Close()
if rows.Next() {
header := &DBHeader{Hash: common.Hash{}, Number: new(big.Int)}
err = rows.Scan((*bigint.SQLBigInt)(header.Number), &header.Hash)
if err != nil {
return nil, err
}
return header, nil
}
return nil, nil
}
func (b *BlockDAO) GetFirstSavedBlock(chainID uint64, address common.Address) (rst *DBHeader, err error) {
query := `SELECT blk_number, blk_hash, loaded
FROM blocks
@ -273,54 +233,6 @@ func (b *BlockDAO) GetFirstSavedBlock(chainID uint64, address common.Address) (r
return nil, nil
}
// TODO remove as not used
func (b *BlockDAO) GetBlocks(chainID uint64) (rst []*DBHeader, err error) {
query := `SELECT blk_number, blk_hash, address FROM blocks`
rows, err := b.db.Query(query, chainID)
if err != nil {
return
}
defer rows.Close()
rst = []*DBHeader{}
for rows.Next() {
header := &DBHeader{Hash: common.Hash{}, Number: new(big.Int)}
err = rows.Scan((*bigint.SQLBigInt)(header.Number), &header.Hash, &header.Address)
if err != nil {
return nil, err
}
rst = append(rst, header)
}
return rst, nil
}
// TODO remove as not used
func (b *BlockDAO) GetLastSavedBlockBefore(chainID uint64, block *big.Int) (rst *DBHeader, err error) {
query := `SELECT blk_number, blk_hash
FROM blocks
WHERE network_id = ? AND blk_number < ?
ORDER BY blk_number DESC LIMIT 1`
rows, err := b.db.Query(query, chainID, (*bigint.SQLBigInt)(block))
if err != nil {
return
}
defer rows.Close()
if rows.Next() {
header := &DBHeader{Hash: common.Hash{}, Number: new(big.Int)}
err = rows.Scan((*bigint.SQLBigInt)(header.Number), &header.Hash)
if err != nil {
return nil, err
}
return header, nil
}
return nil, nil
}
func (b *BlockDAO) GetFirstKnownBlock(chainID uint64, address common.Address) (rst *big.Int, err error) {
query := `SELECT blk_from FROM blocks_ranges
WHERE address = ?

View File

@ -13,6 +13,7 @@ import (
"github.com/ethereum/go-ethereum/log"
"github.com/status-im/status-go/rpc/chain"
"github.com/status-im/status-go/services/wallet/async"
"github.com/status-im/status-go/services/wallet/token"
"github.com/status-im/status-go/services/wallet/walletevent"
)
@ -175,10 +176,11 @@ type controlCommand struct {
errorsCount int
nonArchivalRPCNode bool
transactionManager *TransactionManager
tokenManager *token.Manager
}
func (c *controlCommand) LoadTransfers(ctx context.Context, limit int) (map[common.Address][]Transfer, error) {
return loadTransfers(ctx, c.accounts, c.blockDAO, c.db, c.chainClient, limit, make(map[common.Address][]*big.Int), c.transactionManager)
return loadTransfers(ctx, c.accounts, c.blockDAO, c.db, c.chainClient, limit, make(map[common.Address][]*big.Int), c.transactionManager, c.tokenManager)
}
func (c *controlCommand) Run(parent context.Context) error {
@ -351,6 +353,7 @@ type transfersCommand struct {
chainClient *chain.ClientWithFallback
blocksLimit int
transactionManager *TransactionManager
tokenManager *token.Manager
// result
fetchedTransfers []Transfer
@ -370,7 +373,7 @@ func (c *transfersCommand) Run(ctx context.Context) (err error) {
for {
blocks := c.blockNums
if blocks == nil {
blocks, _ = c.blockDAO.GetBlocksByAddress(c.chainClient.ChainID, c.address, numberOfBlocksCheckedPerIteration)
blocks, _ = c.blockDAO.GetBlocksToLoadByAddress(c.chainClient.ChainID, c.address, numberOfBlocksCheckedPerIteration)
}
for _, blockNum := range blocks {
@ -384,7 +387,7 @@ func (c *transfersCommand) Run(ctx context.Context) (err error) {
return err
}
err = c.updateMultiTxFromPendingEntry(allTransfers)
err = c.processMultiTransactions(ctx, allTransfers)
if err != nil {
return err
}
@ -423,17 +426,77 @@ func (c *transfersCommand) Run(ctx context.Context) (err error) {
return nil
}
func (c *transfersCommand) updateMultiTxFromPendingEntry(allTransfers []Transfer) error {
func (c *transfersCommand) checkAndProcessPendingMultiTx(subTx *Transfer) (MultiTransactionIDType, error) {
// Update MultiTransactionID from pending entry
for index := range allTransfers {
transfer := &allTransfers[index]
entry, err := c.transactionManager.GetPendingEntry(c.chainClient.ChainID, transfer.ID)
if err == nil {
// Propagate the MultiTransactionID, in case the pending entry was a multi-transaction
transfer.MultiTransactionID = entry.MultiTransactionID
} else if err != sql.ErrNoRows {
log.Error("GetPendingEntry error", "error", err)
return err
entry, err := c.transactionManager.GetPendingEntry(c.chainClient.ChainID, subTx.ID)
if err == nil {
// Propagate the MultiTransactionID, in case the pending entry was a multi-transaction
return entry.MultiTransactionID, nil
} else if err != sql.ErrNoRows {
log.Error("GetPendingEntry error", "error", err)
return NoMultiTransactionID, err
}
return NoMultiTransactionID, nil
}
func (c *transfersCommand) checkAndProcessSwapMultiTx(ctx context.Context, subTx *Transfer) (MultiTransactionIDType, error) {
switch subTx.Type {
// If the Tx contains any uniswapV2Swap subTx, generate a Swap multiTx
case uniswapV2Swap:
multiTransaction, err := buildUniswapSwapMultitransaction(ctx, c.chainClient, c.tokenManager, subTx)
if err != nil {
return NoMultiTransactionID, err
}
if multiTransaction != nil {
id, err := c.transactionManager.InsertMultiTransaction(multiTransaction)
if err != nil {
return NoMultiTransactionID, err
}
return id, nil
}
}
return NoMultiTransactionID, nil
}
func (c *transfersCommand) processMultiTransactions(ctx context.Context, allTransfers []Transfer) error {
subTxsByTxHash := subTransactionsByTxHash(allTransfers)
// Detect / Generate multitransactions
// Iterate over all detected transactions
for _, subTxs := range subTxsByTxHash {
multiTxID := NoMultiTransactionID
var err error
// Iterate over transaction's subtransactions
for _, subTx := range subTxs {
if subTx.MultiTransactionID == NoMultiTransactionID {
// First check every subTX for pending transaction
multiTxID, err = c.checkAndProcessPendingMultiTx(subTx)
if err != nil {
return err
}
if multiTxID != NoMultiTransactionID {
break
}
// Then check for a Swap transaction
multiTxID, err = c.checkAndProcessSwapMultiTx(ctx, subTx)
if err != nil {
return err
}
if multiTxID != NoMultiTransactionID {
break
}
}
}
// Mark all subTxs of a given Tx with the same multiTxID
if multiTxID != NoMultiTransactionID {
for _, subTx := range subTxs {
subTx.MultiTransactionID = multiTxID
}
}
}
@ -449,6 +512,7 @@ type loadTransfersCommand struct {
foundTransfersByAddress map[common.Address][]Transfer
transactionManager *TransactionManager
blocksLimit int
tokenManager *token.Manager
}
func (c *loadTransfersCommand) Command() async.Command {
@ -458,12 +522,12 @@ func (c *loadTransfersCommand) Command() async.Command {
}.Run
}
func (c *loadTransfersCommand) LoadTransfers(ctx context.Context, limit int, blocksByAddress map[common.Address][]*big.Int, transactionManager *TransactionManager) (map[common.Address][]Transfer, error) {
return loadTransfers(ctx, c.accounts, c.blockDAO, c.db, c.chainClient, limit, blocksByAddress, c.transactionManager)
func (c *loadTransfersCommand) LoadTransfers(ctx context.Context, limit int, blocksByAddress map[common.Address][]*big.Int) (map[common.Address][]Transfer, error) {
return loadTransfers(ctx, c.accounts, c.blockDAO, c.db, c.chainClient, limit, blocksByAddress, c.transactionManager, c.tokenManager)
}
func (c *loadTransfersCommand) Run(parent context.Context) (err error) {
transfersByAddress, err := c.LoadTransfers(parent, c.blocksLimit, c.blocksByAddress, c.transactionManager)
transfersByAddress, err := c.LoadTransfers(parent, c.blocksLimit, c.blocksByAddress)
if err != nil {
return err
}
@ -493,7 +557,7 @@ func (c *findAndCheckBlockRangeCommand) Command() async.Command {
}.Run
}
func (c *findAndCheckBlockRangeCommand) Run(parent context.Context) (err error) {
func (c *findAndCheckBlockRangeCommand) Run(parent context.Context) error {
log.Debug("start findAndCHeckBlockRangeCommand")
newFromByAddress, ethHeadersByAddress, err := c.fastIndex(parent, c.balanceCache, c.fromByAddress, c.toByAddress)
@ -519,9 +583,17 @@ func (c *findAndCheckBlockRangeCommand) Run(parent context.Context) (err error)
erc20Headers := erc20HeadersByAddress[address]
allHeaders := append(ethHeaders, erc20Headers...)
// Ensure only 1 DBHeader per block hash.
uniqHeaders := []*DBHeader{}
if len(allHeaders) > 0 {
uniqHeaders = uniqueHeaders(allHeaders)
uniqHeaders = uniqueHeaderPerBlockHash(allHeaders)
}
// Ensure only 1 PreloadedTransaction per transaction hash during block discovery.
// Full list of SubTransactions will be obtained from the receipt logs
// at a later stage.
for _, header := range uniqHeaders {
header.PreloadedTransactions = uniquePreloadedTransactionPerTxHash(header.PreloadedTransactions)
}
foundHeaders[address] = uniqHeaders
@ -529,6 +601,7 @@ func (c *findAndCheckBlockRangeCommand) Run(parent context.Context) (err error)
lastBlockNumber := c.toByAddress[address]
log.Debug("saving headers", "len", len(uniqHeaders), "lastBlockNumber", lastBlockNumber,
"balance", c.balanceCache.ReadCachedBalance(address, lastBlockNumber), "nonce", c.balanceCache.ReadCachedNonce(address, lastBlockNumber))
to := &Block{
Number: lastBlockNumber,
Balance: c.balanceCache.ReadCachedBalance(address, lastBlockNumber),
@ -543,7 +616,7 @@ func (c *findAndCheckBlockRangeCommand) Run(parent context.Context) (err error)
c.foundHeaders = foundHeaders
log.Debug("end findAndCheckBlockRangeCommand")
return
return nil
}
// run fast indexing for every accont up to canonical chain head minus safety depth.
@ -616,19 +689,18 @@ func (c *findAndCheckBlockRangeCommand) fastIndexErc20(ctx context.Context, from
case <-ctx.Done():
return nil, ctx.Err()
case <-group.WaitAsync():
headres := map[common.Address][]*DBHeader{}
headers := map[common.Address][]*DBHeader{}
for _, command := range commands {
headres[command.address] = command.foundHeaders
headers[command.address] = command.foundHeaders
}
log.Info("fast indexer Erc20 finished", "in", time.Since(start))
return headres, nil
return headers, nil
}
}
func loadTransfers(ctx context.Context, accounts []common.Address, blockDAO *BlockDAO, db *Database,
chainClient *chain.ClientWithFallback, blocksLimitPerAccount int, blocksByAddress map[common.Address][]*big.Int,
transactionManager *TransactionManager) (map[common.Address][]Transfer, error) {
transactionManager *TransactionManager, tokenManager *token.Manager) (map[common.Address][]Transfer, error) {
log.Info("loadTransfers start", "accounts", accounts, "chain", chainClient.ChainID, "limit", blocksLimitPerAccount)
start := time.Now()
@ -649,6 +721,7 @@ func loadTransfers(ctx context.Context, accounts []common.Address, blockDAO *Blo
},
blockNums: blocksByAddress[address],
transactionManager: transactionManager,
tokenManager: tokenManager,
}
commands = append(commands, transfers)
group.Add(transfers.Command())
@ -664,14 +737,7 @@ func loadTransfers(ctx context.Context, accounts []common.Address, blockDAO *Blo
continue
}
transfers, ok := transfersByAddress[command.address]
if !ok {
transfers = []Transfer{}
}
for _, transfer := range command.fetchedTransfers {
transfersByAddress[command.address] = append(transfers, transfer)
}
transfersByAddress[command.address] = append(transfersByAddress[command.address], command.fetchedTransfers...)
}
log.Info("loadTransfers finished for account", "in", time.Since(start), "chain", chainClient.ChainID)
return transfersByAddress, nil
@ -766,13 +832,14 @@ func findFirstRanges(c context.Context, accounts []common.Address, initialTo *bi
return res, nil
}
func uniqueHeaders(allHeaders []*DBHeader) []*DBHeader {
// Ensure 1 DBHeader per Block Hash
func uniqueHeaderPerBlockHash(allHeaders []*DBHeader) []*DBHeader {
uniqHeadersByHash := map[common.Hash]*DBHeader{}
for _, header := range allHeaders {
uniqHeader, ok := uniqHeadersByHash[header.Hash]
if ok {
if len(header.Erc20Transfers) > 0 {
uniqHeader.Erc20Transfers = append(uniqHeader.Erc20Transfers, header.Erc20Transfers...)
if len(header.PreloadedTransactions) > 0 {
uniqHeader.PreloadedTransactions = append(uniqHeader.PreloadedTransactions, header.PreloadedTransactions...)
}
uniqHeadersByHash[header.Hash] = uniqHeader
} else {
@ -787,3 +854,34 @@ func uniqueHeaders(allHeaders []*DBHeader) []*DBHeader {
return uniqHeaders
}
// Ensure 1 PreloadedTransaction per Transaction Hash
func uniquePreloadedTransactionPerTxHash(allTransactions []*PreloadedTransaction) []*PreloadedTransaction {
uniqTransactionsByTransactionHash := map[common.Hash]*PreloadedTransaction{}
for _, transaction := range allTransactions {
uniqTransactionsByTransactionHash[transaction.Log.TxHash] = transaction
}
uniqTransactions := []*PreloadedTransaction{}
for _, transaction := range uniqTransactionsByTransactionHash {
uniqTransactions = append(uniqTransactions, transaction)
}
return uniqTransactions
}
// Organize subTransactions by Transaction Hash
func subTransactionsByTxHash(subTransactions []Transfer) map[common.Hash][]*Transfer {
rst := map[common.Hash][]*Transfer{}
for index := range subTransactions {
subTx := &subTransactions[index]
txHash := subTx.Transaction.Hash()
if _, ok := rst[txHash]; !ok {
rst[txHash] = make([]*Transfer, 0)
}
rst[txHash] = append(rst[txHash], subTx)
}
return rst
}

View File

@ -11,6 +11,7 @@ import (
"github.com/ethereum/go-ethereum/log"
"github.com/status-im/status-go/rpc/chain"
"github.com/status-im/status-go/services/wallet/async"
"github.com/status-im/status-go/services/wallet/token"
"github.com/status-im/status-go/services/wallet/walletevent"
)
@ -134,7 +135,7 @@ func (c *findBlocksCommand) checkRange(parent context.Context, from *big.Int, to
allHeaders := append(ethHeaders, erc20Headers...)
if len(allHeaders) > 0 {
foundHeaders = uniqueHeaders(allHeaders)
foundHeaders = uniqueHeaderPerBlockHash(allHeaders)
}
c.resFromBlock = newFromBlock
@ -267,6 +268,7 @@ type loadAllTransfersCommand struct {
chainClient *chain.ClientWithFallback
blocksByAddress map[common.Address][]*big.Int
transactionManager *TransactionManager
tokenManager *token.Manager
blocksLimit int
feed *event.Feed
}
@ -298,6 +300,7 @@ func (c *loadAllTransfersCommand) Run(parent context.Context) error {
blockNums: c.blocksByAddress[address],
blocksLimit: c.blocksLimit,
transactionManager: c.transactionManager,
tokenManager: c.tokenManager,
}
commands = append(commands, transfers)
group.Add(transfers.Command())
@ -331,7 +334,7 @@ func (c *loadAllTransfersCommand) notifyOfNewTransfers(commands []*transfersComm
func newLoadBlocksAndTransfersCommand(accounts []common.Address, db *Database,
blockDAO *BlockDAO, chainClient *chain.ClientWithFallback, feed *event.Feed,
transactionManager *TransactionManager) *loadBlocksAndTransfersCommand {
transactionManager *TransactionManager, tokenManager *token.Manager) *loadBlocksAndTransfersCommand {
return &loadBlocksAndTransfersCommand{
accounts: accounts,
@ -342,6 +345,7 @@ func newLoadBlocksAndTransfersCommand(accounts []common.Address, db *Database,
feed: feed,
errorsCount: 0,
transactionManager: transactionManager,
tokenManager: tokenManager,
transfersLoaded: make(map[common.Address]bool),
}
}
@ -357,6 +361,7 @@ type loadBlocksAndTransfersCommand struct {
errorsCount int
// nonArchivalRPCNode bool // TODO Make use of it
transactionManager *TransactionManager
tokenManager *token.Manager
// Not to be set by the caller
transfersLoaded map[common.Address]bool // For event RecentHistoryReady to be sent only once per account during app lifetime

View File

@ -14,6 +14,7 @@ import (
"github.com/status-im/status-go/rpc/chain"
"github.com/status-im/status-go/services/accounts/accountsevent"
"github.com/status-im/status-go/services/wallet/async"
"github.com/status-im/status-go/services/wallet/token"
)
type Controller struct {
@ -25,11 +26,12 @@ type Controller struct {
TransferFeed *event.Feed
group *async.Group
transactionManager *TransactionManager
tokenManager *token.Manager
fetchStrategyType FetchStrategyType
}
func NewTransferController(db *sql.DB, rpcClient *rpc.Client, accountFeed *event.Feed, transferFeed *event.Feed,
transactionManager *TransactionManager, fetchStrategyType FetchStrategyType) *Controller {
transactionManager *TransactionManager, tokenManager *token.Manager, fetchStrategyType FetchStrategyType) *Controller {
blockDAO := &BlockDAO{db}
return &Controller{
@ -39,6 +41,7 @@ func NewTransferController(db *sql.DB, rpcClient *rpc.Client, accountFeed *event
accountFeed: accountFeed,
TransferFeed: transferFeed,
transactionManager: transactionManager,
tokenManager: tokenManager,
fetchStrategyType: fetchStrategyType,
}
}
@ -112,7 +115,7 @@ func (c *Controller) CheckRecentHistory(chainIDs []uint64, accounts []common.Add
return err
}
} else {
c.reactor = NewReactor(c.db, c.blockDAO, c.TransferFeed, c.transactionManager)
c.reactor = NewReactor(c.db, c.blockDAO, c.TransferFeed, c.transactionManager, c.tokenManager)
err = c.reactor.start(chainClients, accounts, c.fetchStrategyType)
if err != nil {
@ -183,6 +186,7 @@ func mapToList(m map[common.Address]struct{}) []common.Address {
return rst
}
// Only used by status-mobile
func (c *Controller) LoadTransferByHash(ctx context.Context, rpcClient *rpc.Client, address common.Address, hash common.Hash) error {
chainClient, err := rpcClient.EthClient(rpcClient.UpstreamChainID)
if err != nil {

View File

@ -17,12 +17,12 @@ import (
// DBHeader fields from header that are stored in database.
type DBHeader struct {
Number *big.Int
Hash common.Hash
Timestamp uint64
Erc20Transfers []*Transfer
Network uint64
Address common.Address
Number *big.Int
Hash common.Hash
Timestamp uint64
PreloadedTransactions []*PreloadedTransaction
Network uint64
Address common.Address
// Head is true if the block was a head at the time it was pulled from chain.
Head bool
// Loaded is true if trasfers from this block has been already fetched
@ -114,7 +114,6 @@ func (db *Database) ProcessBlocks(chainID uint64, account common.Address, from *
return
}
// TODO remove as not used
func (db *Database) SaveBlocks(chainID uint64, account common.Address, headers []*DBHeader) (err error) {
var (
tx *sql.Tx
@ -226,7 +225,7 @@ func (db *Database) GetTransfersInRange(chainID uint64, address common.Address,
return
}
defer rows.Close()
return query.Scan(rows)
return query.TransferScan(rows)
}
// GetTransfersByAddress loads transfers for a given address between two blocks.
@ -243,7 +242,7 @@ func (db *Database) GetTransfersByAddress(chainID uint64, address common.Address
return
}
defer rows.Close()
return query.Scan(rows)
return query.TransferScan(rows)
}
// GetTransfersByAddressAndBlock loads transfers for a given address and block.
@ -260,7 +259,7 @@ func (db *Database) GetTransfersByAddressAndBlock(chainID uint64, address common
return
}
defer rows.Close()
return query.Scan(rows)
return query.TransferScan(rows)
}
// GetTransfers load transfers transfer between two blocks.
@ -271,7 +270,7 @@ func (db *Database) GetTransfers(chainID uint64, start, end *big.Int) (rst []Tra
return
}
defer rows.Close()
return query.Scan(rows)
return query.TransferScan(rows)
}
func (db *Database) GetTransfersForIdentities(ctx context.Context, identities []TransactionIdentity) (rst []Transfer, err error) {
@ -287,10 +286,10 @@ func (db *Database) GetTransfersForIdentities(ctx context.Context, identities []
return
}
defer rows.Close()
return query.Scan(rows)
return query.TransferScan(rows)
}
func (db *Database) GetPreloadedTransactions(chainID uint64, address common.Address, blockNumber *big.Int) (rst []Transfer, err error) {
func (db *Database) GetTransactionsToLoad(chainID uint64, address common.Address, blockNumber *big.Int) (rst []PreloadedTransaction, err error) {
query := newTransfersQuery().
FilterNetwork(chainID).
FilterAddress(address).
@ -302,65 +301,7 @@ func (db *Database) GetPreloadedTransactions(chainID uint64, address common.Addr
return
}
defer rows.Close()
return query.Scan(rows)
}
func (db *Database) GetTransactionsLog(chainID uint64, address common.Address, transactionHash common.Hash) (*types.Log, error) {
l := &types.Log{}
err := db.client.QueryRow("SELECT log FROM transfers WHERE network_id = ? AND address = ? AND hash = ?",
chainID, address, transactionHash).
Scan(&JSONBlob{l})
if err == nil {
return l, nil
}
if err == sql.ErrNoRows {
return nil, nil
}
return nil, err
}
// saveHeaders stores a list of headers atomically.
func (db *Database) saveHeaders(chainID uint64, headers []*types.Header, address common.Address) (err error) {
var (
tx *sql.Tx
insert *sql.Stmt
)
tx, err = db.client.Begin()
if err != nil {
return
}
insert, err = tx.Prepare("INSERT INTO blocks(network_id, blk_number, blk_hash, address) VALUES (?, ?, ?, ?)")
if err != nil {
return
}
defer func() {
if err == nil {
err = tx.Commit()
} else {
_ = tx.Rollback()
}
}()
for _, h := range headers {
_, err = insert.Exec(chainID, (*bigint.SQLBigInt)(h.Number), h.Hash(), address)
if err != nil {
return
}
}
return
}
// getHeaderByNumber selects header using block number.
func (db *Database) getHeaderByNumber(chainID uint64, number *big.Int) (header *DBHeader, err error) {
header = &DBHeader{Hash: common.Hash{}, Number: new(big.Int)}
err = db.client.QueryRow("SELECT blk_hash, blk_number FROM blocks WHERE blk_number = ? AND network_id = ?", (*bigint.SQLBigInt)(number), chainID).Scan(&header.Hash, (*bigint.SQLBigInt)(header.Number))
if err == nil {
return header, nil
}
if err == sql.ErrNoRows {
return nil, nil
}
return nil, err
return query.PreloadedTransactionScan(rows)
}
// statementCreator allows to pass transaction or database to use in consumer.
@ -391,6 +332,7 @@ func deleteHeaders(creator statementCreator, headers []*DBHeader) error {
return nil
}
// Only used by status-mobile
func (db *Database) InsertBlock(chainID uint64, account common.Address, blockNumber *big.Int, blockHash common.Hash) error {
var (
tx *sql.Tx
@ -429,8 +371,8 @@ func insertBlocksWithTransactions(chainID uint64, creator statementCreator, acco
}
insertTx, err := creator.Prepare(`INSERT OR IGNORE
INTO transfers (network_id, address, sender, hash, blk_number, blk_hash, type, timestamp, log, loaded, multi_transaction_id)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, 0, ?)`)
INTO transfers (network_id, address, sender, hash, blk_number, blk_hash, type, timestamp, log, loaded)
VALUES (?, ?, ?, ?, ?, ?, ?, 0, ?, 0)`)
if err != nil {
return err
}
@ -440,25 +382,23 @@ func insertBlocksWithTransactions(chainID uint64, creator statementCreator, acco
if err != nil {
return err
}
if len(header.Erc20Transfers) > 0 {
for _, transfer := range header.Erc20Transfers {
res, err := updateTx.Exec(&JSONBlob{transfer.Log}, chainID, account, transfer.ID)
if err != nil {
return err
}
affected, err := res.RowsAffected()
if err != nil {
return err
}
if affected > 0 {
continue
}
for _, transaction := range header.PreloadedTransactions {
res, err := updateTx.Exec(&JSONBlob{transaction.Log}, chainID, account, transaction.ID)
if err != nil {
return err
}
affected, err := res.RowsAffected()
if err != nil {
return err
}
if affected > 0 {
continue
}
_, err = insertTx.Exec(chainID, account, account, transfer.ID, (*bigint.SQLBigInt)(header.Number), header.Hash, erc20Transfer, transfer.Timestamp, &JSONBlob{transfer.Log}, transfer.MultiTransactionID)
if err != nil {
log.Error("error saving erc20transfer", "err", err)
return err
}
_, err = insertTx.Exec(chainID, account, account, transaction.ID, (*bigint.SQLBigInt)(header.Number), header.Hash, erc20Transfer, &JSONBlob{transaction.Log})
if err != nil {
log.Error("error saving erc20transfer", "err", err)
return err
}
}
}
@ -466,14 +406,7 @@ func insertBlocksWithTransactions(chainID uint64, creator statementCreator, acco
}
func updateOrInsertTransfers(chainID uint64, creator statementCreator, transfers []Transfer) error {
update, err := creator.Prepare(`UPDATE transfers
SET tx = ?, sender = ?, receipt = ?, timestamp = ?, loaded = 1, base_gas_fee = ?
WHERE address =? AND hash = ?`)
if err != nil {
return err
}
insert, err := creator.Prepare(`INSERT OR IGNORE INTO transfers
insert, err := creator.Prepare(`INSERT OR REPLACE INTO transfers
(network_id, hash, blk_hash, blk_number, timestamp, address, tx, sender, receipt, log, type, loaded, base_gas_fee, multi_transaction_id)
VALUES
(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, 1, ?, ?)`)
@ -481,19 +414,6 @@ func updateOrInsertTransfers(chainID uint64, creator statementCreator, transfers
return err
}
for _, t := range transfers {
res, err := update.Exec(&JSONBlob{t.Transaction}, t.From, &JSONBlob{t.Receipt}, t.Timestamp, t.BaseGasFees, t.Address, t.ID)
if err != nil {
return err
}
affected, err := res.RowsAffected()
if err != nil {
return err
}
if affected > 0 {
continue
}
_, err = insert.Exec(chainID, t.ID, t.BlockHash, (*bigint.SQLBigInt)(t.BlockNumber), t.Timestamp, t.Address, &JSONBlob{t.Transaction}, t.From, &JSONBlob{t.Receipt}, &JSONBlob{t.Log}, t.Type, t.BaseGasFees, t.MultiTransactionID)
if err != nil {
log.Error("can't save transfer", "b-hash", t.BlockHash, "b-n", t.BlockNumber, "a", t.Address, "h", t.ID)

View File

@ -21,28 +21,6 @@ func setupTestDB(t *testing.T) (*Database, *BlockDAO, func()) {
}
}
func TestDBGetHeaderByNumber(t *testing.T) {
db, _, stop := setupTestDB(t)
defer stop()
header := &types.Header{
Number: big.NewInt(10),
Difficulty: big.NewInt(1),
Time: 1,
}
require.NoError(t, db.saveHeaders(777, []*types.Header{header}, common.Address{1}))
rst, err := db.getHeaderByNumber(777, header.Number)
require.NoError(t, err)
require.Equal(t, header.Hash(), rst.Hash)
}
func TestDBGetHeaderByNumberNoRows(t *testing.T) {
db, _, stop := setupTestDB(t)
defer stop()
rst, err := db.getHeaderByNumber(777, big.NewInt(1))
require.NoError(t, err)
require.Nil(t, rst)
}
func TestDBProcessBlocks(t *testing.T) {
db, block, stop := setupTestDB(t)
defer stop()

View File

@ -16,31 +16,50 @@ import (
"github.com/status-im/status-go/rpc/chain"
)
// Type type of the asset that was transferred.
type Type string
type MultiTransactionIDType int64
const (
ethTransfer Type = "eth"
erc20Transfer Type = "erc20"
erc721Transfer Type = "erc721"
unknownTokenTransfer Type = "unknown"
erc20_721TransferEventSignature = "Transfer(address,address,uint256)"
erc20TransferEventIndexedParameters = 3 // signature, from, to
erc721TransferEventIndexedParameters = 4 // signature, from, to, tokenId
NoMultiTransactionID = MultiTransactionIDType(0)
)
func getEventSignatureHash(signature string) common.Hash {
return crypto.Keccak256Hash([]byte(signature))
}
func getLogSubTxID(log types.Log) common.Hash {
// Get unique ID by using TxHash and log index
index := [4]byte{}
binary.BigEndian.PutUint32(index[:], uint32(log.Index))
return crypto.Keccak256Hash(log.TxHash.Bytes(), index[:])
}
var (
zero = big.NewInt(0)
one = big.NewInt(1)
two = big.NewInt(2)
)
// Partial transaction info obtained by ERC20Downloader.
// A PreloadedTransaction represents a Transaction which contains one or more
// ERC20/ERC721 transfer events.
// To be converted into one or many Transfer objects post-indexing.
type PreloadedTransaction struct {
NetworkID uint64
Type Type `json:"type"`
ID common.Hash `json:"-"`
Address common.Address `json:"address"`
BlockNumber *big.Int `json:"blockNumber"`
BlockHash common.Hash `json:"blockhash"`
Loaded bool
// From is derived from tx signature in order to offload this computation from UI component.
From common.Address `json:"from"`
// Log that was used to generate preloaded transaction.
Log *types.Log `json:"log"`
BaseGasFees string
}
// Transfer stores information about transfer.
// A Transfer represents a plain ETH transfer or some token activity inside a Transaction
type Transfer struct {
Type Type `json:"type"`
ID common.Hash `json:"-"`
@ -71,24 +90,6 @@ type ETHDownloader struct {
var errLogsDownloaderStuck = errors.New("logs downloader stuck")
// GetTransfers checks if the balance was changed between two blocks.
// If so it downloads transaction that transfer ethereum from that block.
func (d *ETHDownloader) GetTransfers(ctx context.Context, header *DBHeader) (rst []Transfer, err error) {
// TODO(dshulyak) consider caching balance and reset it on reorg
if len(d.accounts) == 0 {
return nil, nil
}
blk, err := d.chainClient.BlockByHash(ctx, header.Hash)
if err != nil {
return nil, err
}
rst, err = d.getTransfersInBlock(ctx, blk, d.accounts)
if err != nil {
return nil, err
}
return rst, nil
}
func (d *ETHDownloader) GetTransfersByNumber(ctx context.Context, number *big.Int) ([]Transfer, error) {
blk, err := d.chainClient.BlockByNumber(ctx, number)
if err != nil {
@ -101,6 +102,7 @@ func (d *ETHDownloader) GetTransfersByNumber(ctx context.Context, number *big.In
return rst, err
}
// Only used by status-mobile
func getTransferByHash(ctx context.Context, client *chain.ClientWithFallback, signer types.Signer, address common.Address, hash common.Hash) (*Transfer, error) {
transaction, _, err := client.TransactionByHash(ctx, hash)
if err != nil {
@ -112,12 +114,8 @@ func getTransferByHash(ctx context.Context, client *chain.ClientWithFallback, si
return nil, err
}
transactionLog := getTokenLog(receipt.Logs)
transferType := ethTransfer
if transactionLog != nil {
transferType = erc20Transfer
}
eventType, transactionLog := GetFirstEvent(receipt.Logs)
transactionType := EventTypeToSubtransactionType(eventType)
from, err := types.Sender(signer, transaction)
@ -130,7 +128,8 @@ func getTransferByHash(ctx context.Context, client *chain.ClientWithFallback, si
return nil, err
}
transfer := &Transfer{Type: transferType,
transfer := &Transfer{
Type: transactionType,
ID: hash,
Address: address,
BlockNumber: receipt.BlockNumber,
@ -150,17 +149,19 @@ func (d *ETHDownloader) getTransfersInBlock(ctx context.Context, blk *types.Bloc
startTs := time.Now()
for _, address := range accounts {
preloadedTransfers, err := d.db.GetPreloadedTransactions(d.chainClient.ChainID, address, blk.Number())
// During block discovery, we should have populated the DB with 1 item per Transaction containing
// erc20/erc721 transfers
transactionsToLoad, err := d.db.GetTransactionsToLoad(d.chainClient.ChainID, address, blk.Number())
if err != nil {
return nil, err
}
for _, t := range preloadedTransfers {
transfer, err := d.transferFromLog(ctx, *t.Log, address, t.ID)
for _, t := range transactionsToLoad {
subtransactions, err := d.subTransactionsFromTransactionHash(ctx, t.Log.TxHash, address)
if err != nil {
log.Error("can't fetch erc20 transfer from log", "error", err)
return nil, err
}
rst = append(rst, transfer)
rst = append(rst, subtransactions...)
}
for _, tx := range blk.Transactions() {
@ -183,26 +184,29 @@ func (d *ETHDownloader) getTransfersInBlock(ctx context.Context, blk *types.Bloc
return nil, err
}
transactionLog := getTokenLog(receipt.Logs)
eventType, _ := GetFirstEvent(receipt.Logs)
baseGasFee, err := d.chainClient.GetBaseFeeFromBlock(blk.Number())
if err != nil {
return nil, err
}
if transactionLog == nil {
// If the transaction is not already some known transfer type, add it
// to the list as a plain eth transfer
if eventType == unknownEventType {
rst = append(rst, Transfer{
Type: ethTransfer,
ID: tx.Hash(),
Address: address,
BlockNumber: blk.Number(),
BlockHash: blk.Hash(),
Timestamp: blk.Time(),
Transaction: tx,
From: from,
Receipt: receipt,
Log: transactionLog,
BaseGasFees: baseGasFee})
Type: ethTransfer,
ID: tx.Hash(),
Address: address,
BlockNumber: blk.Number(),
BlockHash: receipt.BlockHash,
Timestamp: blk.Time(),
Transaction: tx,
From: from,
Receipt: receipt,
Log: nil,
BaseGasFees: baseGasFee,
MultiTransactionID: NoMultiTransactionID})
}
}
}
@ -214,7 +218,8 @@ func (d *ETHDownloader) getTransfersInBlock(ctx context.Context, blk *types.Bloc
// NewERC20TransfersDownloader returns new instance.
func NewERC20TransfersDownloader(client *chain.ClientWithFallback, accounts []common.Address, signer types.Signer) *ERC20TransfersDownloader {
signature := crypto.Keccak256Hash([]byte(erc20_721TransferEventSignature))
signature := getEventSignatureHash(erc20_721TransferEventSignature)
return &ERC20TransfersDownloader{
client: client,
accounts: accounts,
@ -253,119 +258,80 @@ func (d *ERC20TransfersDownloader) outboundTopics(address common.Address) [][]co
return [][]common.Hash{{d.signature}, {d.paddedAddress(address)}, {}}
}
func (d *ETHDownloader) transferFromLog(parent context.Context, ethlog types.Log, address common.Address, id common.Hash) (Transfer, error) {
func (d *ETHDownloader) subTransactionsFromTransactionHash(parent context.Context, txHash common.Hash, address common.Address) ([]Transfer, error) {
ctx, cancel := context.WithTimeout(parent, 3*time.Second)
tx, _, err := d.chainClient.TransactionByHash(ctx, ethlog.TxHash)
tx, _, err := d.chainClient.TransactionByHash(ctx, txHash)
cancel()
if err != nil {
return Transfer{}, err
return nil, err
}
from, err := types.Sender(d.signer, tx)
if err != nil {
return Transfer{}, err
return nil, err
}
ctx, cancel = context.WithTimeout(parent, 3*time.Second)
receipt, err := d.chainClient.TransactionReceipt(ctx, ethlog.TxHash)
receipt, err := d.chainClient.TransactionReceipt(ctx, txHash)
cancel()
if err != nil {
return Transfer{}, err
return nil, err
}
baseGasFee, err := d.chainClient.GetBaseFeeFromBlock(new(big.Int).SetUint64(ethlog.BlockNumber))
baseGasFee, err := d.chainClient.GetBaseFeeFromBlock(receipt.BlockNumber)
if err != nil {
return Transfer{}, err
return nil, err
}
ctx, cancel = context.WithTimeout(parent, 3*time.Second)
blk, err := d.chainClient.BlockByHash(ctx, ethlog.BlockHash)
blk, err := d.chainClient.BlockByHash(ctx, receipt.BlockHash)
cancel()
if err != nil {
return Transfer{}, err
}
return Transfer{
Address: address,
ID: id,
Type: erc20Transfer,
BlockNumber: new(big.Int).SetUint64(ethlog.BlockNumber),
BlockHash: ethlog.BlockHash,
Transaction: tx,
From: from,
Receipt: receipt,
Timestamp: blk.Time(),
Log: &ethlog,
BaseGasFees: baseGasFee,
}, nil
}
func (d *ERC20TransfersDownloader) transferFromLog(parent context.Context, ethlog types.Log, address common.Address) (Transfer, error) {
ctx, cancel := context.WithTimeout(parent, 3*time.Second)
tx, _, err := d.client.TransactionByHash(ctx, ethlog.TxHash)
cancel()
if err != nil {
return Transfer{}, err
}
from, err := types.Sender(d.signer, tx)
if err != nil {
return Transfer{}, err
}
ctx, cancel = context.WithTimeout(parent, 3*time.Second)
receipt, err := d.client.TransactionReceipt(ctx, ethlog.TxHash)
cancel()
if err != nil {
return Transfer{}, err
return nil, err
}
baseGasFee, err := d.client.GetBaseFeeFromBlock(new(big.Int).SetUint64(ethlog.BlockNumber))
if err != nil {
return Transfer{}, err
}
rst := make([]Transfer, 0, len(receipt.Logs))
ctx, cancel = context.WithTimeout(parent, 3*time.Second)
blk, err := d.client.BlockByHash(ctx, ethlog.BlockHash)
cancel()
if err != nil {
return Transfer{}, err
}
index := [4]byte{}
binary.BigEndian.PutUint32(index[:], uint32(ethlog.Index))
id := crypto.Keccak256Hash(ethlog.TxHash.Bytes(), index[:])
return Transfer{
Address: address,
ID: id,
Type: erc20Transfer,
BlockNumber: new(big.Int).SetUint64(ethlog.BlockNumber),
BlockHash: ethlog.BlockHash,
Transaction: tx,
From: from,
Receipt: receipt,
Timestamp: blk.Time(),
Log: &ethlog,
BaseGasFees: baseGasFee,
}, nil
}
func (d *ERC20TransfersDownloader) transfersFromLogs(parent context.Context, logs []types.Log, address common.Address) ([]Transfer, error) {
concurrent := NewConcurrentDownloader(parent, NoThreadLimit)
for i := range logs {
l := logs[i]
if l.Removed {
continue
}
concurrent.Add(func(ctx context.Context) error {
transfer, err := d.transferFromLog(ctx, l, address)
if err != nil {
return err
for _, log := range receipt.Logs {
eventType := GetEventType(log)
// Only add ERC20/ERC721 transfers from/to the given account
// Other types of events get always added
mustAppend := false
switch eventType {
case erc20TransferEventType:
from, to, _ := parseErc20TransferLog(log)
if from == address || to == address {
mustAppend = true
}
concurrent.Push(transfer)
return nil
})
case erc721TransferEventType:
from, to, _ := parseErc721TransferLog(log)
if from == address || to == address {
mustAppend = true
}
case uniswapV2SwapEventType:
mustAppend = true
}
if mustAppend {
transfer := Transfer{
Type: EventTypeToSubtransactionType(eventType),
ID: getLogSubTxID(*log),
Address: address,
BlockNumber: new(big.Int).SetUint64(log.BlockNumber),
BlockHash: log.BlockHash,
Loaded: true,
From: from,
Log: log,
BaseGasFees: baseGasFee,
Transaction: tx,
Receipt: receipt,
Timestamp: blk.Time(),
MultiTransactionID: NoMultiTransactionID,
}
rst = append(rst, transfer)
}
}
select {
case <-concurrent.WaitAsync():
case <-parent.Done():
return nil, errLogsDownloaderStuck
}
return concurrent.Get(), concurrent.Error()
return rst, nil
}
func (d *ERC20TransfersDownloader) blocksFromLogs(parent context.Context, logs []types.Log, address common.Address) ([]*DBHeader, error) {
@ -377,10 +343,7 @@ func (d *ERC20TransfersDownloader) blocksFromLogs(parent context.Context, logs [
continue
}
index := [4]byte{}
binary.BigEndian.PutUint32(index[:], uint32(l.Index))
id := crypto.Keccak256Hash(l.TxHash.Bytes(), index[:])
id := getLogSubTxID(l)
baseGasFee, err := d.client.GetBaseFeeFromBlock(new(big.Int).SetUint64(l.BlockNumber))
if err != nil {
return nil, err
@ -389,7 +352,7 @@ func (d *ERC20TransfersDownloader) blocksFromLogs(parent context.Context, logs [
header := &DBHeader{
Number: big.NewInt(int64(l.BlockNumber)),
Hash: l.BlockHash,
Erc20Transfers: []*Transfer{{
PreloadedTransactions: []*PreloadedTransaction{{
Address: address,
BlockNumber: big.NewInt(int64(l.BlockNumber)),
BlockHash: l.BlockHash,
@ -399,7 +362,9 @@ func (d *ERC20TransfersDownloader) blocksFromLogs(parent context.Context, logs [
Type: erc20Transfer,
Log: &l,
BaseGasFees: baseGasFee,
}}}
}},
Loaded: false,
}
concurrent.Add(func(ctx context.Context) error {
concurrent.PushHeader(header)
@ -414,38 +379,6 @@ func (d *ERC20TransfersDownloader) blocksFromLogs(parent context.Context, logs [
return concurrent.GetHeaders(), concurrent.Error()
}
// GetTransfers for erc20 uses eth_getLogs rpc with Transfer event signature and our address acount.
func (d *ERC20TransfersDownloader) GetTransfers(ctx context.Context, header *DBHeader) ([]Transfer, error) {
hash := header.Hash
transfers := []Transfer{}
for _, address := range d.accounts {
outbound, err := d.client.FilterLogs(ctx, ethereum.FilterQuery{
BlockHash: &hash,
Topics: d.outboundTopics(address),
})
if err != nil {
return nil, err
}
inbound, err := d.client.FilterLogs(ctx, ethereum.FilterQuery{
BlockHash: &hash,
Topics: d.inboundTopics(address),
})
if err != nil {
return nil, err
}
logs := append(outbound, inbound...)
if len(logs) == 0 {
continue
}
rst, err := d.transfersFromLogs(ctx, logs, address)
if err != nil {
return nil, err
}
transfers = append(transfers, rst...)
}
return transfers, nil
}
// GetHeadersInRange returns transfers between two blocks.
// time to get logs for 100000 blocks = 1.144686979s. with 249 events in the result set.
func (d *ERC20TransfersDownloader) GetHeadersInRange(parent context.Context, from, to *big.Int) ([]*DBHeader, error) {
@ -483,23 +416,3 @@ func (d *ERC20TransfersDownloader) GetHeadersInRange(parent context.Context, fro
log.Debug("found erc20 transfers between two blocks", "from", from, "to", to, "headers", len(headers), "took", time.Since(start))
return headers, nil
}
func IsTokenTransfer(logs []*types.Log) bool {
signature := crypto.Keccak256Hash([]byte(erc20_721TransferEventSignature))
for _, l := range logs {
if len(l.Topics) > 0 && l.Topics[0] == signature {
return true
}
}
return false
}
func getTokenLog(logs []*types.Log) *types.Log {
signature := crypto.Keccak256Hash([]byte(erc20_721TransferEventSignature))
for _, l := range logs {
if len(l.Topics) > 0 && l.Topics[0] == signature {
return l
}
}
return nil
}

View File

@ -0,0 +1,179 @@
package transfer
import (
"fmt"
"math/big"
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/core/types"
"github.com/ethereum/go-ethereum/log"
)
// Type type of transaction
type Type string
// Log Event type
type EventType string
const (
// Transaction types
ethTransfer Type = "eth"
erc20Transfer Type = "erc20"
erc721Transfer Type = "erc721"
uniswapV2Swap Type = "uniswapV2Swap"
unknownTransaction Type = "unknown"
// Event types
erc20TransferEventType EventType = "erc20Event"
erc721TransferEventType EventType = "erc721Event"
uniswapV2SwapEventType EventType = "uniswapV2SwapEvent"
unknownEventType EventType = "unknownEvent"
erc20_721TransferEventSignature = "Transfer(address,address,uint256)"
erc20TransferEventIndexedParameters = 3 // signature, from, to
erc721TransferEventIndexedParameters = 4 // signature, from, to, tokenId
uniswapV2SwapEventSignature = "Swap(address,uint256,uint256,uint256,uint256,address)" // also used by SushiSwap
)
var (
// MaxUint256 is the maximum value that can be represented by a uint256.
MaxUint256 = new(big.Int).Sub(new(big.Int).Lsh(common.Big1, 256), common.Big1)
)
// Detect event type for a cetain item from the Events Log
func GetEventType(log *types.Log) EventType {
erc20_721TransferEventSignatureHash := getEventSignatureHash(erc20_721TransferEventSignature)
uniswapV2SwapEventSignatureHash := getEventSignatureHash(uniswapV2SwapEventSignature)
if len(log.Topics) > 0 {
switch log.Topics[0] {
case erc20_721TransferEventSignatureHash:
switch len(log.Topics) {
case erc20TransferEventIndexedParameters:
return erc20TransferEventType
case erc721TransferEventIndexedParameters:
return erc721TransferEventType
}
case uniswapV2SwapEventSignatureHash:
return uniswapV2SwapEventType
}
}
return unknownEventType
}
func EventTypeToSubtransactionType(eventType EventType) Type {
switch eventType {
case erc20TransferEventType:
return erc20Transfer
case erc721TransferEventType:
return erc721Transfer
case uniswapV2SwapEventType:
return uniswapV2Swap
}
return unknownTransaction
}
func GetFirstEvent(logs []*types.Log) (EventType, *types.Log) {
for _, log := range logs {
eventType := GetEventType(log)
if eventType != unknownEventType {
return eventType, log
}
}
return unknownEventType, nil
}
func IsTokenTransfer(logs []*types.Log) bool {
eventType, _ := GetFirstEvent(logs)
return eventType == erc20TransferEventType
}
func parseErc20TransferLog(ethlog *types.Log) (from, to common.Address, amount *big.Int) {
amount = new(big.Int)
if len(ethlog.Topics) < 3 {
log.Warn("not enough topics for erc20 transfer", "topics", ethlog.Topics)
return
}
if len(ethlog.Topics[1]) != 32 {
log.Warn("second topic is not padded to 32 byte address", "topic", ethlog.Topics[1])
return
}
if len(ethlog.Topics[2]) != 32 {
log.Warn("third topic is not padded to 32 byte address", "topic", ethlog.Topics[2])
return
}
copy(from[:], ethlog.Topics[1][12:])
copy(to[:], ethlog.Topics[2][12:])
if len(ethlog.Data) != 32 {
log.Warn("data is not padded to 32 byts big int", "data", ethlog.Data)
return
}
amount.SetBytes(ethlog.Data)
return
}
func parseErc721TransferLog(ethlog *types.Log) (from, to common.Address, tokenID *big.Int) {
tokenID = new(big.Int)
if len(ethlog.Topics) < 4 {
log.Warn("not enough topics for erc721 transfer", "topics", ethlog.Topics)
return
}
if len(ethlog.Topics[1]) != 32 {
log.Warn("second topic is not padded to 32 byte address", "topic", ethlog.Topics[1])
return
}
if len(ethlog.Topics[2]) != 32 {
log.Warn("third topic is not padded to 32 byte address", "topic", ethlog.Topics[2])
return
}
if len(ethlog.Topics[3]) != 32 {
log.Warn("fourth topic is not 32 byte tokenId", "topic", ethlog.Topics[3])
return
}
copy(from[:], ethlog.Topics[1][12:])
copy(to[:], ethlog.Topics[2][12:])
tokenID.SetBytes(ethlog.Topics[3][:])
return
}
func parseUniswapV2Log(ethlog *types.Log) (pairAddress common.Address, from common.Address, to common.Address, amount0In *big.Int, amount1In *big.Int, amount0Out *big.Int, amount1Out *big.Int, err error) {
amount0In = new(big.Int)
amount1In = new(big.Int)
amount0Out = new(big.Int)
amount1Out = new(big.Int)
if len(ethlog.Topics) < 3 {
err = fmt.Errorf("not enough topics for uniswapV2 swap %s, %v", "topics", ethlog.Topics)
return
}
pairAddress = ethlog.Address
if len(ethlog.Topics[1]) != 32 {
err = fmt.Errorf("second topic is not padded to 32 byte address %s, %v", "topic", ethlog.Topics[1])
return
}
if len(ethlog.Topics[2]) != 32 {
err = fmt.Errorf("third topic is not padded to 32 byte address %s, %v", "topic", ethlog.Topics[2])
return
}
copy(from[:], ethlog.Topics[1][12:])
copy(to[:], ethlog.Topics[2][12:])
if len(ethlog.Data) != 32*4 {
err = fmt.Errorf("data is not padded to 4 * 32 bytes big int %s, %v", "data", ethlog.Data)
return
}
amount0In.SetBytes(ethlog.Data[0:32])
amount1In.SetBytes(ethlog.Data[32:64])
amount0Out.SetBytes(ethlog.Data[64:96])
amount1Out.SetBytes(ethlog.Data[96:128])
return
}

View File

@ -146,7 +146,7 @@ func (q *transfersQuery) Args() []interface{} {
return q.args
}
func (q *transfersQuery) Scan(rows *sql.Rows) (rst []Transfer, err error) {
func (q *transfersQuery) TransferScan(rows *sql.Rows) (rst []Transfer, err error) {
for rows.Next() {
transfer := Transfer{
BlockNumber: &big.Int{},
@ -163,5 +163,33 @@ func (q *transfersQuery) Scan(rows *sql.Rows) (rst []Transfer, err error) {
}
rst = append(rst, transfer)
}
return rst, nil
}
func (q *transfersQuery) PreloadedTransactionScan(rows *sql.Rows) (rst []PreloadedTransaction, err error) {
transfers, err := q.TransferScan(rows)
if err != nil {
return
}
rst = make([]PreloadedTransaction, 0, len(transfers))
for _, transfer := range transfers {
preloadedTransaction := PreloadedTransaction{
ID: transfer.ID,
Type: transfer.Type,
BlockHash: transfer.BlockHash,
BlockNumber: transfer.BlockNumber,
Address: transfer.Address,
From: transfer.From,
Log: transfer.Log,
NetworkID: transfer.NetworkID,
BaseGasFees: transfer.BaseGasFees,
}
rst = append(rst, preloadedTransaction)
}
return rst, nil
}

View File

@ -13,6 +13,7 @@ import (
"github.com/ethereum/go-ethereum/log"
"github.com/status-im/status-go/rpc/chain"
"github.com/status-im/status-go/services/wallet/async"
"github.com/status-im/status-go/services/wallet/token"
"github.com/status-im/status-go/services/wallet/walletevent"
)
@ -63,6 +64,7 @@ type OnDemandFetchStrategy struct {
group *async.Group
balanceCache *balanceCache
transactionManager *TransactionManager
tokenManager *token.Manager
chainClients map[uint64]*chain.ClientWithFallback
accounts []common.Address
}
@ -84,6 +86,7 @@ func (s *OnDemandFetchStrategy) newControlCommand(chainClient *chain.ClientWithF
feed: s.feed,
errorsCount: 0,
transactionManager: s.transactionManager,
tokenManager: s.tokenManager,
}
return ctl
@ -194,7 +197,7 @@ func (s *OnDemandFetchStrategy) getTransfersByAddress(ctx context.Context, chain
return nil, err
}
blocks, err := s.blockDAO.GetBlocksByAddress(chainID, address, numberOfBlocksCheckedPerIteration)
blocks, err := s.blockDAO.GetBlocksToLoadByAddress(chainID, address, numberOfBlocksCheckedPerIteration)
if err != nil {
return nil, err
}
@ -208,6 +211,7 @@ func (s *OnDemandFetchStrategy) getTransfersByAddress(ctx context.Context, chain
chainClient: chainClient,
transactionManager: s.transactionManager,
blocksLimit: numberOfBlocksCheckedPerIteration,
tokenManager: s.tokenManager,
}
err = txCommand.Command()(ctx)
@ -231,15 +235,17 @@ type Reactor struct {
blockDAO *BlockDAO
feed *event.Feed
transactionManager *TransactionManager
tokenManager *token.Manager
strategy HistoryFetcher
}
func NewReactor(db *Database, blockDAO *BlockDAO, feed *event.Feed, tm *TransactionManager) *Reactor {
func NewReactor(db *Database, blockDAO *BlockDAO, feed *event.Feed, tm *TransactionManager, tokenManager *token.Manager) *Reactor {
return &Reactor{
db: db,
blockDAO: blockDAO,
feed: feed,
transactionManager: tm,
tokenManager: tokenManager,
}
}
@ -274,6 +280,7 @@ func (r *Reactor) createFetchStrategy(chainClients map[uint64]*chain.ClientWithF
r.blockDAO,
r.feed,
r.transactionManager,
r.tokenManager,
chainClients,
accounts,
)
@ -284,6 +291,7 @@ func (r *Reactor) createFetchStrategy(chainClients map[uint64]*chain.ClientWithF
feed: r.feed,
blockDAO: r.blockDAO,
transactionManager: r.transactionManager,
tokenManager: r.tokenManager,
chainClients: chainClients,
accounts: accounts,
}

View File

@ -10,11 +10,13 @@ import (
"github.com/ethereum/go-ethereum/log"
"github.com/status-im/status-go/rpc/chain"
"github.com/status-im/status-go/services/wallet/async"
"github.com/status-im/status-go/services/wallet/token"
"github.com/status-im/status-go/services/wallet/walletevent"
)
func NewSequentialFetchStrategy(db *Database, blockDAO *BlockDAO, feed *event.Feed,
transactionManager *TransactionManager,
tokenManager *token.Manager,
chainClients map[uint64]*chain.ClientWithFallback,
accounts []common.Address) *SequentialFetchStrategy {
@ -23,6 +25,7 @@ func NewSequentialFetchStrategy(db *Database, blockDAO *BlockDAO, feed *event.Fe
blockDAO: blockDAO,
feed: feed,
transactionManager: transactionManager,
tokenManager: tokenManager,
chainClients: chainClients,
accounts: accounts,
}
@ -35,6 +38,7 @@ type SequentialFetchStrategy struct {
mu sync.Mutex
group *async.Group
transactionManager *TransactionManager
tokenManager *token.Manager
chainClients map[uint64]*chain.ClientWithFallback
accounts []common.Address
}
@ -43,7 +47,7 @@ func (s *SequentialFetchStrategy) newCommand(chainClient *chain.ClientWithFallba
accounts []common.Address) async.Commander {
return newLoadBlocksAndTransfersCommand(accounts, s.db, s.blockDAO, chainClient, s.feed,
s.transactionManager)
s.transactionManager, s.tokenManager)
}
func (s *SequentialFetchStrategy) start() error {

View File

@ -0,0 +1,149 @@
package transfer
import (
"context"
"fmt"
"math/big"
"github.com/ethereum/go-ethereum/accounts/abi/bind"
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/common/hexutil"
"github.com/ethereum/go-ethereum/core/types"
uniswapv2 "github.com/status-im/status-go/contracts/uniswapV2"
uniswapv3 "github.com/status-im/status-go/contracts/uniswapV3"
"github.com/status-im/status-go/rpc/chain"
"github.com/status-im/status-go/services/wallet/token"
)
func fetchUniswapV2PairInfo(ctx context.Context, client *chain.ClientWithFallback, pairAddress common.Address) (*common.Address, *common.Address, error) {
caller, err := uniswapv2.NewUniswapv2Caller(pairAddress, client)
if err != nil {
return nil, nil, err
}
token0Address, err := caller.Token0(&bind.CallOpts{
Context: ctx,
})
if err != nil {
return nil, nil, err
}
token1Address, err := caller.Token1(&bind.CallOpts{
Context: ctx,
})
if err != nil {
return nil, nil, err
}
return &token0Address, &token1Address, nil
}
func identifyUniswapV2Asset(tokenManager *token.Manager, chainID uint64, amount0 *big.Int, contractAddress0 common.Address, amount1 *big.Int, contractAddress1 common.Address) (token *token.Token, amount *big.Int, err error) {
// Either amount0 or amount1 should be 0
if amount1.Sign() == 0 && amount0.Sign() != 0 {
token = tokenManager.FindTokenByAddress(chainID, contractAddress0)
if token == nil {
err = fmt.Errorf("couldn't find symbol for token0 %v", contractAddress0)
return
}
amount = amount0
} else if amount0.Sign() == 0 && amount1.Sign() != 0 {
token = tokenManager.FindTokenByAddress(chainID, contractAddress1)
if token == nil {
err = fmt.Errorf("couldn't find symbol for token1 %v", contractAddress1)
return
}
amount = amount1
} else {
err = fmt.Errorf("couldn't identify token %v %v %v %v", contractAddress0, amount0, contractAddress1, amount1)
return
}
return
}
func fetchUniswapV2Info(ctx context.Context, client *chain.ClientWithFallback, tokenManager *token.Manager, log *types.Log) (fromAsset string, fromAmount *hexutil.Big, toAsset string, toAmount *hexutil.Big, err error) {
pairAddress, _, _, amount0In, amount1In, amount0Out, amount1Out, err := parseUniswapV2Log(log)
if err != nil {
return
}
token0ContractAddress, token1ContractAddress, err := fetchUniswapV2PairInfo(ctx, client, pairAddress)
if err != nil {
return
}
fromToken, fromAmountInt, err := identifyUniswapV2Asset(tokenManager, client.ChainID, amount0In, *token0ContractAddress, amount1In, *token1ContractAddress)
if err != nil {
// "Soft" error, allow to continue with unknown asset
fromAsset = ""
fromAmount = (*hexutil.Big)(big.NewInt(0))
} else {
fromAsset = fromToken.Symbol
fromAmount = (*hexutil.Big)(fromAmountInt)
}
toToken, toAmountInt, err := identifyUniswapV2Asset(tokenManager, client.ChainID, amount0Out, *token0ContractAddress, amount1Out, *token1ContractAddress)
if err != nil {
// "Soft" error, allow to continue with unknown asset
toAsset = ""
toAmount = (*hexutil.Big)(big.NewInt(0))
} else {
toAsset = toToken.Symbol
toAmount = (*hexutil.Big)(toAmountInt)
}
err = nil
return
}
func fetchUniswapInfo(ctx context.Context, client *chain.ClientWithFallback, tokenManager *token.Manager, log *types.Log, logType EventType) (fromAsset string, fromAmount *hexutil.Big, toAsset string, toAmount *hexutil.Big, err error) {
switch logType {
case uniswapV2SwapEventType:
return fetchUniswapV2Info(ctx, client, tokenManager, log)
}
err = fmt.Errorf("wrong log type %s", logType)
return
}
// Build a Swap multitransaction from a list containing one or several uniswapV2 subTxs
// We only care about the first and last swap to identify the input/output token and amounts
func buildUniswapSwapMultitransaction(ctx context.Context, client *chain.ClientWithFallback, tokenManager *token.Manager, transfer *Transfer) (*MultiTransaction, error) {
multiTransaction := MultiTransaction{
Type: MultiTransactionSwap,
FromAddress: transfer.Address,
ToAddress: transfer.Address,
}
var firstSwapLog, lastSwapLog *types.Log
var firstSwapLogType, lastSwapLogType EventType
for _, ethlog := range transfer.Receipt.Logs {
logType := GetEventType(ethlog)
switch logType {
case uniswapV2SwapEventType:
if firstSwapLog == nil {
firstSwapLog = ethlog
firstSwapLogType = logType
}
lastSwapLog = ethlog
lastSwapLogType = logType
}
}
var err error
multiTransaction.FromAsset, multiTransaction.FromAmount, multiTransaction.ToAsset, multiTransaction.ToAmount, err = fetchUniswapInfo(ctx, client, tokenManager, firstSwapLog, firstSwapLogType)
if err != nil {
return nil, err
}
if firstSwapLog != lastSwapLog {
_, _, multiTransaction.ToAsset, multiTransaction.ToAmount, err = fetchUniswapInfo(ctx, client, tokenManager, lastSwapLog, lastSwapLogType)
if err != nil {
return nil, err
}
}
return &multiTransaction, nil
}

View File

@ -60,6 +60,7 @@ type MultiTransaction struct {
FromAsset string `json:"fromAsset"`
ToAsset string `json:"toAsset"`
FromAmount *hexutil.Big `json:"fromAmount"`
ToAmount *hexutil.Big `json:"toAmount"`
Type MultiTransactionType `json:"type"`
}
@ -261,11 +262,11 @@ func (tm *TransactionManager) Watch(ctx context.Context, transactionHash common.
return watchTxCommand.Command()(commandContext)
}
const multiTransactionColumns = "from_address, from_asset, from_amount, to_address, to_asset, type, timestamp"
const multiTransactionColumns = "from_address, from_asset, from_amount, to_address, to_asset, to_amount, type, timestamp"
func insertMultiTransaction(db *sql.DB, multiTransaction *MultiTransaction) (MultiTransactionIDType, error) {
insert, err := db.Prepare(fmt.Sprintf(`INSERT OR REPLACE INTO multi_transactions (%s)
VALUES(?, ?, ?, ?, ?, ?, ?)`, multiTransactionColumns))
VALUES(?, ?, ?, ?, ?, ?, ?, ?)`, multiTransactionColumns))
if err != nil {
return 0, err
}
@ -275,6 +276,7 @@ func insertMultiTransaction(db *sql.DB, multiTransaction *MultiTransaction) (Mul
multiTransaction.FromAmount.String(),
multiTransaction.ToAddress,
multiTransaction.ToAsset,
multiTransaction.ToAmount.String(),
multiTransaction.Type,
time.Now().Unix(),
)
@ -286,7 +288,11 @@ func insertMultiTransaction(db *sql.DB, multiTransaction *MultiTransaction) (Mul
return MultiTransactionIDType(multiTransactionID), err
}
func (tm *TransactionManager) CreateMultiTransaction(ctx context.Context, multiTransaction *MultiTransaction, data []*bridge.TransactionBridge, bridges map[string]bridge.Bridge, password string) (*MultiTransactionResult, error) {
func (tm *TransactionManager) InsertMultiTransaction(multiTransaction *MultiTransaction) (MultiTransactionIDType, error) {
return insertMultiTransaction(tm.db, multiTransaction)
}
func (tm *TransactionManager) CreateBridgeMultiTransaction(ctx context.Context, multiTransaction *MultiTransaction, data []*bridge.TransactionBridge, bridges map[string]bridge.Bridge, password string) (*MultiTransactionResult, error) {
selectedAccount, err := tm.getVerifiedWalletAccount(multiTransaction.FromAddress.Hex(), password)
if err != nil {
return nil, err
@ -356,6 +362,7 @@ func (tm *TransactionManager) GetMultiTransactions(ctx context.Context, ids []Mu
for rows.Next() {
multiTransaction := &MultiTransaction{}
var fromAmount string
var toAmount string
err := rows.Scan(
&multiTransaction.ID,
&multiTransaction.FromAddress,
@ -363,6 +370,7 @@ func (tm *TransactionManager) GetMultiTransactions(ctx context.Context, ids []Mu
&fromAmount,
&multiTransaction.ToAddress,
&multiTransaction.ToAsset,
&toAmount,
&multiTransaction.Type,
&multiTransaction.Timestamp,
)
@ -376,6 +384,12 @@ func (tm *TransactionManager) GetMultiTransactions(ctx context.Context, ids []Mu
return nil, errors.New("failed to convert fromAmount to big.Int: " + fromAmount)
}
multiTransaction.ToAmount = new(hexutil.Big)
_, ok = (*big.Int)(multiTransaction.ToAmount).SetString(toAmount, 0)
if !ok {
return nil, errors.New("failed to convert toAmount to big.Int: " + toAmount)
}
multiTransactions = append(multiTransactions, multiTransaction)
}

View File

@ -86,10 +86,12 @@ func TestMultiTransactions(t *testing.T) {
FromAsset: "fromAsset",
ToAsset: "toAsset",
FromAmount: (*hexutil.Big)(big.NewInt(123)),
ToAmount: (*hexutil.Big)(big.NewInt(234)),
Type: MultiTransactionBridge,
}
trx2 := trx1
trx2.FromAmount = (*hexutil.Big)(big.NewInt(456))
trx2.ToAmount = (*hexutil.Big)(big.NewInt(567))
var err error
ids := make([]MultiTransactionIDType, 2)

View File

@ -5,8 +5,6 @@ import (
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/common/hexutil"
"github.com/ethereum/go-ethereum/core/types"
"github.com/ethereum/go-ethereum/log"
)
// View stores only fields used by a client and ensures that all relevant fields are
@ -40,9 +38,13 @@ type View struct {
}
func castToTransferViews(transfers []Transfer) []View {
views := make([]View, len(transfers))
for i := range transfers {
views[i] = CastToTransferView(transfers[i])
views := make([]View, 0, len(transfers))
for _, tx := range transfers {
switch tx.Type {
case ethTransfer, erc20Transfer, erc721Transfer:
view := CastToTransferView(tx)
views = append(views, view)
}
}
return views
}
@ -89,11 +91,11 @@ func CastToTransferView(t Transfer) View {
view.Contract = t.Receipt.ContractAddress
case erc20Transfer:
view.Contract = t.Log.Address
from, to, valueInt := parseErc20Log(t.Log)
from, to, valueInt := parseErc20TransferLog(t.Log)
view.From, view.To, value = from, to, (*hexutil.Big)(valueInt)
case erc721Transfer:
view.Contract = t.Log.Address
from, to, tokenIDInt := parseErc721Log(t.Log)
from, to, tokenIDInt := parseErc721TransferLog(t.Log)
view.From, view.To, tokenID = from, to, (*hexutil.Big)(tokenIDInt)
}
@ -105,68 +107,11 @@ func CastToTransferView(t Transfer) View {
}
func getFixedTransferType(tx Transfer) Type {
// erc721 transfers share signature with erc20 ones, so they are both (cached and new)
// categorized as erc20 by the Downloader. We fix this on the fly for the moment, until
// the Downloader gets refactored.
// erc721 transfers share signature with erc20 ones, so they both used to be categorized as erc20
// by the Downloader. We fix this here since they might be mis-categorized in the db.
if tx.Type == erc20Transfer {
switch len(tx.Log.Topics) {
case erc20TransferEventIndexedParameters:
// do nothing
case erc721TransferEventIndexedParameters:
return erc721Transfer
default:
return unknownTokenTransfer
}
eventType := GetEventType(tx.Log)
return EventTypeToSubtransactionType(eventType)
}
return tx.Type
}
func parseErc20Log(ethlog *types.Log) (from, to common.Address, amount *big.Int) {
amount = new(big.Int)
if len(ethlog.Topics) < 3 {
log.Warn("not enough topics for erc20 transfer", "topics", ethlog.Topics)
return
}
if len(ethlog.Topics[1]) != 32 {
log.Warn("second topic is not padded to 32 byte address", "topic", ethlog.Topics[1])
return
}
if len(ethlog.Topics[2]) != 32 {
log.Warn("third topic is not padded to 32 byte address", "topic", ethlog.Topics[2])
return
}
copy(from[:], ethlog.Topics[1][12:])
copy(to[:], ethlog.Topics[2][12:])
if len(ethlog.Data) != 32 {
log.Warn("data is not padded to 32 byts big int", "data", ethlog.Data)
return
}
amount.SetBytes(ethlog.Data)
return
}
func parseErc721Log(ethlog *types.Log) (from, to common.Address, tokenID *big.Int) {
tokenID = new(big.Int)
if len(ethlog.Topics) < 4 {
log.Warn("not enough topics for erc721 transfer", "topics", ethlog.Topics)
return
}
if len(ethlog.Topics[1]) != 32 {
log.Warn("second topic is not padded to 32 byte address", "topic", ethlog.Topics[1])
return
}
if len(ethlog.Topics[2]) != 32 {
log.Warn("third topic is not padded to 32 byte address", "topic", ethlog.Topics[2])
return
}
if len(ethlog.Topics[3]) != 32 {
log.Warn("fourth topic is not 32 byte tokenId", "topic", ethlog.Topics[3])
return
}
copy(from[:], ethlog.Topics[1][12:])
copy(to[:], ethlog.Topics[2][12:])
tokenID.SetBytes(ethlog.Topics[3][:])
return
}