feat: use media server for community collectible images

This commit is contained in:
Dario Gabriel Lipicar 2023-12-15 17:29:39 -03:00 committed by dlipicar
parent e3ef8c649a
commit b3cc73aa7f
26 changed files with 389 additions and 128 deletions

View File

@ -207,7 +207,7 @@ func (n *StatusNode) StartMediaServerWithoutDB() error {
} }
} }
httpServer, err := server.NewMediaServer(nil, nil, n.multiaccountsDB) httpServer, err := server.NewMediaServer(nil, nil, n.multiaccountsDB, nil)
if err != nil { if err != nil {
return err return err
} }
@ -279,7 +279,7 @@ func (n *StatusNode) startWithDB(config *params.NodeConfig, accs *accounts.Manag
} }
} }
httpServer, err := server.NewMediaServer(n.appDB, n.downloader, n.multiaccountsDB) httpServer, err := server.NewMediaServer(n.appDB, n.downloader, n.multiaccountsDB, n.walletDB)
if err != nil { if err != nil {
return err return err
} }

View File

@ -532,6 +532,7 @@ func (b *StatusNode) walletService(accountsDB *accounts.Database, appDB *sql.DB,
b.stickersService(accountsDB), b.stickersService(accountsDB),
b.pendingTracker, b.pendingTracker,
walletFeed, walletFeed,
b.httpServer,
) )
} }
return b.walletSrvc return b.walletSrvc

View File

@ -458,7 +458,7 @@ func NewMessenger(
if c.tokenManager != nil { if c.tokenManager != nil {
managerOptions = append(managerOptions, communities.WithTokenManager(c.tokenManager)) managerOptions = append(managerOptions, communities.WithTokenManager(c.tokenManager))
} else if c.rpcClient != nil { } else if c.rpcClient != nil {
tokenManager := token.NewTokenManager(c.walletDb, c.rpcClient, community.NewManager(database), c.rpcClient.NetworkManager, database) tokenManager := token.NewTokenManager(c.walletDb, c.rpcClient, community.NewManager(database, c.httpServer), c.rpcClient.NetworkManager, database)
managerOptions = append(managerOptions, communities.WithTokenManager(communities.NewDefaultTokenManager(tokenManager))) managerOptions = append(managerOptions, communities.WithTokenManager(communities.NewDefaultTokenManager(tokenManager)))
} }

View File

@ -140,7 +140,7 @@ func (s *MessengerActivityCenterMessageSuite) TestReplyWithImage() {
defer TearDownMessenger(&s.Suite, bob) defer TearDownMessenger(&s.Suite, bob)
// create an http server // create an http server
mediaServer, err := server.NewMediaServer(nil, nil, nil) mediaServer, err := server.NewMediaServer(nil, nil, nil, nil)
s.Require().NoError(err) s.Require().NoError(err)
s.Require().NotNil(mediaServer) s.Require().NotNil(mediaServer)
s.Require().NoError(mediaServer.Start()) s.Require().NoError(mediaServer.Start())

View File

@ -515,7 +515,7 @@ func (s *MessengerDeleteMessageSuite) TestDeleteMessageReplyToImage() {
ogMessage := sendResponse.Messages()[0] ogMessage := sendResponse.Messages()[0]
// create an http server // create an http server
mediaServer, err := server.NewMediaServer(nil, nil, nil) mediaServer, err := server.NewMediaServer(nil, nil, nil, nil)
s.Require().NoError(err) s.Require().NoError(err)
s.Require().NotNil(mediaServer) s.Require().NotNil(mediaServer)
s.Require().NoError(mediaServer.Start()) s.Require().NoError(mediaServer.Start())
@ -570,7 +570,7 @@ func (s *MessengerDeleteMessageSuite) TestDeleteMessageForMeReplyToImage() {
ogMessage := sendResponse.Messages()[0] ogMessage := sendResponse.Messages()[0]
// create an http server // create an http server
mediaServer, err := server.NewMediaServer(nil, nil, nil) mediaServer, err := server.NewMediaServer(nil, nil, nil, nil)
s.Require().NoError(err) s.Require().NoError(err)
s.Require().NotNil(mediaServer) s.Require().NotNil(mediaServer)
s.Require().NoError(mediaServer.Start()) s.Require().NoError(mediaServer.Start())

View File

@ -2292,7 +2292,7 @@ func (s *MessengerSuite) TestLastSentField() {
// } // }
func (s *MessengerSuite) TestSendMessageWithPreviews() { func (s *MessengerSuite) TestSendMessageWithPreviews() {
httpServer, err := server.NewMediaServer(s.m.database, nil, nil) httpServer, err := server.NewMediaServer(s.m.database, nil, nil, nil)
s.Require().NoError(err) s.Require().NoError(err)
err = httpServer.SetPort(9876) err = httpServer.SetPort(9876)
s.NoError(err) s.NoError(err)

View File

@ -6,6 +6,7 @@ import (
"errors" "errors"
"image" "image"
"image/color" "image/color"
"math/big"
"net/http" "net/http"
"net/url" "net/url"
"os" "os"
@ -15,11 +16,14 @@ import (
"go.uber.org/zap" "go.uber.org/zap"
eth_common "github.com/ethereum/go-ethereum/common"
"github.com/status-im/status-go/images" "github.com/status-im/status-go/images"
"github.com/status-im/status-go/ipfs" "github.com/status-im/status-go/ipfs"
"github.com/status-im/status-go/multiaccounts" "github.com/status-im/status-go/multiaccounts"
"github.com/status-im/status-go/protocol/identity/colorhash" "github.com/status-im/status-go/protocol/identity/colorhash"
"github.com/status-im/status-go/protocol/identity/ring" "github.com/status-im/status-go/protocol/identity/ring"
"github.com/status-im/status-go/services/wallet/bigint"
) )
const ( const (
@ -32,6 +36,11 @@ const (
LinkPreviewThumbnailPath = "/link-preview/thumbnail" LinkPreviewThumbnailPath = "/link-preview/thumbnail"
StatusLinkPreviewThumbnailPath = "/status-link-preview/thumbnail" StatusLinkPreviewThumbnailPath = "/status-link-preview/thumbnail"
walletBasePath = "/wallet"
walletCommunityImagesPath = walletBasePath + "/communityImages"
walletCollectionImagesPath = walletBasePath + "/collectionImages"
walletCollectibleImagesPath = walletBasePath + "/collectibleImages"
// Handler routes for pairing // Handler routes for pairing
accountImagesPath = "/accountImages" accountImagesPath = "/accountImages"
accountInitialsPath = "/accountInitials" accountInitialsPath = "/accountInitials"
@ -920,3 +929,164 @@ func handleQRCodeGeneration(multiaccountsDB *multiaccounts.Database, logger *zap
} }
} }
} }
func handleWalletCommunityImages(db *sql.DB, logger *zap.Logger) http.HandlerFunc {
if db == nil {
return handleRequestDBMissing(logger)
}
return func(w http.ResponseWriter, r *http.Request) {
params := r.URL.Query()
if len(params["communityID"]) == 0 {
logger.Error("no communityID")
return
}
var image []byte
err := db.QueryRow(`SELECT image_payload FROM community_data_cache WHERE id = ?`, params["communityID"][0]).Scan(&image)
if err != nil {
logger.Error("failed to find wallet community image", zap.Error(err))
return
}
if len(image) == 0 {
logger.Error("empty wallet community image")
return
}
mime, err := images.GetProtobufImageMime(image)
if err != nil {
logger.Error("failed to get wallet community image mime", zap.Error(err))
}
w.Header().Set("Content-Type", mime)
w.Header().Set("Cache-Control", "no-store")
_, err = w.Write(image)
if err != nil {
logger.Error("failed to write wallet community image", zap.Error(err))
}
}
}
func handleWalletCollectionImages(db *sql.DB, logger *zap.Logger) http.HandlerFunc {
if db == nil {
return handleRequestDBMissing(logger)
}
return func(w http.ResponseWriter, r *http.Request) {
params := r.URL.Query()
if len(params["chainID"]) == 0 {
logger.Error("no chainID")
return
}
if len(params["contractAddress"]) == 0 {
logger.Error("no contractAddress")
return
}
chainID, err := strconv.ParseUint(params["chainID"][0], 10, 64)
if err != nil {
logger.Error("invalid chainID in wallet collectible image", zap.Error(err))
return
}
contractAddress := eth_common.HexToAddress(params["contractAddress"][0])
if len(contractAddress) == 0 {
logger.Error("invalid contractAddress in wallet collectible image", zap.Error(err))
return
}
var image []byte
err = db.QueryRow(`SELECT image_payload FROM collection_data_cache WHERE chain_id = ? AND contract_address = ?`,
chainID,
contractAddress).Scan(&image)
if err != nil {
logger.Error("failed to find wallet collection image", zap.Error(err))
return
}
if len(image) == 0 {
logger.Error("empty wallet collection image")
return
}
mime, err := images.GetProtobufImageMime(image)
if err != nil {
logger.Error("failed to get wallet collection image mime", zap.Error(err))
}
w.Header().Set("Content-Type", mime)
w.Header().Set("Cache-Control", "no-store")
_, err = w.Write(image)
if err != nil {
logger.Error("failed to write wallet collection image", zap.Error(err))
}
}
}
func handleWalletCollectibleImages(db *sql.DB, logger *zap.Logger) http.HandlerFunc {
if db == nil {
return handleRequestDBMissing(logger)
}
return func(w http.ResponseWriter, r *http.Request) {
params := r.URL.Query()
if len(params["chainID"]) == 0 {
logger.Error("no chainID")
return
}
if len(params["contractAddress"]) == 0 {
logger.Error("no contractAddress")
return
}
if len(params["tokenID"]) == 0 {
logger.Error("no tokenID")
return
}
chainID, err := strconv.ParseUint(params["chainID"][0], 10, 64)
if err != nil {
logger.Error("invalid chainID in wallet collectible image", zap.Error(err))
return
}
contractAddress := eth_common.HexToAddress(params["contractAddress"][0])
if len(contractAddress) == 0 {
logger.Error("invalid contractAddress in wallet collectible image", zap.Error(err))
return
}
tokenID, ok := big.NewInt(0).SetString(params["tokenID"][0], 10)
if !ok {
logger.Error("invalid tokenID in wallet collectible image", zap.Error(err))
return
}
var image []byte
err = db.QueryRow(`SELECT image_payload FROM collectible_data_cache WHERE chain_id = ? AND contract_address = ? AND token_id = ?`,
chainID,
contractAddress,
(*bigint.SQLBigIntBytes)(tokenID)).Scan(&image)
if err != nil {
logger.Error("failed to find wallet collectible image", zap.Error(err))
return
}
if len(image) == 0 {
logger.Error("empty image")
return
}
mime, err := images.GetProtobufImageMime(image)
if err != nil {
logger.Error("failed to get wallet collectible image mime", zap.Error(err))
}
w.Header().Set("Content-Type", mime)
w.Header().Set("Cache-Control", "no-store")
_, err = w.Write(image)
if err != nil {
logger.Error("failed to write wallet collectible image", zap.Error(err))
}
}
}

View File

@ -40,7 +40,7 @@ func (s *QROpsTestSuite) SetupTest() {
s.SetupKeyComponents(s.T()) s.SetupKeyComponents(s.T())
s.SetupLoggerComponents() s.SetupLoggerComponents()
mediaServer, err := NewMediaServer(nil, nil, nil) mediaServer, err := NewMediaServer(nil, nil, nil, nil)
s.Require().NoError(err) s.Require().NoError(err)
s.server = mediaServer s.server = mediaServer

View File

@ -8,6 +8,7 @@ import (
"github.com/status-im/status-go/logutils" "github.com/status-im/status-go/logutils"
"github.com/status-im/status-go/multiaccounts" "github.com/status-im/status-go/multiaccounts"
"github.com/status-im/status-go/protocol/common" "github.com/status-im/status-go/protocol/common"
"github.com/status-im/status-go/services/wallet/thirdparty"
"github.com/status-im/status-go/signal" "github.com/status-im/status-go/signal"
) )
@ -17,10 +18,11 @@ type MediaServer struct {
db *sql.DB db *sql.DB
downloader *ipfs.Downloader downloader *ipfs.Downloader
multiaccountsDB *multiaccounts.Database multiaccountsDB *multiaccounts.Database
walletDB *sql.DB
} }
// NewMediaServer returns a *MediaServer // NewMediaServer returns a *MediaServer
func NewMediaServer(db *sql.DB, downloader *ipfs.Downloader, multiaccountsDB *multiaccounts.Database) (*MediaServer, error) { func NewMediaServer(db *sql.DB, downloader *ipfs.Downloader, multiaccountsDB *multiaccounts.Database, walletDB *sql.DB) (*MediaServer, error) {
err := generateMediaTLSCert() err := generateMediaTLSCert()
if err != nil { if err != nil {
return nil, err return nil, err
@ -36,6 +38,7 @@ func NewMediaServer(db *sql.DB, downloader *ipfs.Downloader, multiaccountsDB *mu
db: db, db: db,
downloader: downloader, downloader: downloader,
multiaccountsDB: multiaccountsDB, multiaccountsDB: multiaccountsDB,
walletDB: walletDB,
} }
s.SetHandlers(HandlerPatternMap{ s.SetHandlers(HandlerPatternMap{
accountImagesPath: handleAccountImages(s.multiaccountsDB, s.logger), accountImagesPath: handleAccountImages(s.multiaccountsDB, s.logger),
@ -49,6 +52,9 @@ func NewMediaServer(db *sql.DB, downloader *ipfs.Downloader, multiaccountsDB *mu
ipfsPath: handleIPFS(s.downloader, s.logger), ipfsPath: handleIPFS(s.downloader, s.logger),
LinkPreviewThumbnailPath: handleLinkPreviewThumbnail(s.db, s.logger), LinkPreviewThumbnailPath: handleLinkPreviewThumbnail(s.db, s.logger),
StatusLinkPreviewThumbnailPath: handleStatusLinkPreviewThumbnail(s.db, s.logger), StatusLinkPreviewThumbnailPath: handleStatusLinkPreviewThumbnail(s.db, s.logger),
walletCommunityImagesPath: handleWalletCommunityImages(s.walletDB, s.logger),
walletCollectionImagesPath: handleWalletCollectionImages(s.walletDB, s.logger),
walletCollectibleImagesPath: handleWalletCollectibleImages(s.walletDB, s.logger),
}) })
return s, nil return s, nil
@ -139,3 +145,36 @@ func (s *MediaServer) MakeContactImageURL(publicKey string, imageType string) st
return u.String() return u.String()
} }
func (s *MediaServer) MakeWalletCommunityImagesURL(communityID string) string {
u := s.MakeBaseURL()
u.Path = walletCommunityImagesPath
u.RawQuery = url.Values{
"communityID": {communityID},
}.Encode()
return u.String()
}
func (s *MediaServer) MakeWalletCollectionImagesURL(contractID thirdparty.ContractID) string {
u := s.MakeBaseURL()
u.Path = walletCollectionImagesPath
u.RawQuery = url.Values{
"chainID": {contractID.ChainID.String()},
"contractAddress": {contractID.Address.Hex()},
}.Encode()
return u.String()
}
func (s *MediaServer) MakeWalletCollectibleImagesURL(collectibleID thirdparty.CollectibleUniqueID) string {
u := s.MakeBaseURL()
u.Path = walletCollectibleImagesPath
u.RawQuery = url.Values{
"chainID": {collectibleID.ContractID.ChainID.String()},
"contractAddress": {collectibleID.ContractID.Address.Hex()},
"tokenID": {collectibleID.TokenID.String()},
}.Encode()
return u.String()
}

View File

@ -51,7 +51,7 @@ func (s *ServerURLSuite) SetupTest() {
s.SetupKeyComponents(s.T()) s.SetupKeyComponents(s.T())
s.SetupLoggerComponents() s.SetupLoggerComponents()
mediaServer, err := NewMediaServer(nil, nil, nil) mediaServer, err := NewMediaServer(nil, nil, nil, nil)
s.Require().NoError(err) s.Require().NoError(err)
s.serverForQR = mediaServer s.serverForQR = mediaServer

View File

@ -580,9 +580,11 @@ func (s *Service) FillCollectibleMetadata(collectible *thirdparty.FullCollectibl
privilegesLevel = permissionTypeToPrivilegesLevel(permission.GetType()) privilegesLevel = permissionTypeToPrivilegesLevel(permission.GetType())
} }
imagePayload, _ := images.GetPayloadFromURI(tokenMetadata.GetImage())
collectible.CollectibleData.Name = tokenMetadata.GetName() collectible.CollectibleData.Name = tokenMetadata.GetName()
collectible.CollectibleData.Description = tokenMetadata.GetDescription() collectible.CollectibleData.Description = tokenMetadata.GetDescription()
collectible.CollectibleData.ImageURL = tokenMetadata.GetImage() collectible.CollectibleData.ImagePayload = imagePayload
collectible.CollectibleData.Traits = getCollectibleCommunityTraits(communityToken) collectible.CollectibleData.Traits = getCollectibleCommunityTraits(communityToken)
if collectible.CollectionData == nil { if collectible.CollectionData == nil {
@ -592,7 +594,7 @@ func (s *Service) FillCollectibleMetadata(collectible *thirdparty.FullCollectibl
} }
} }
collectible.CollectionData.Name = tokenMetadata.GetName() collectible.CollectionData.Name = tokenMetadata.GetName()
collectible.CollectionData.ImageURL = tokenMetadata.GetImage() collectible.CollectionData.ImagePayload = imagePayload
collectible.CommunityInfo = &thirdparty.CollectibleCommunityInfo{ collectible.CommunityInfo = &thirdparty.CollectibleCommunityInfo{
PrivilegesLevel: privilegesLevel, PrivilegesLevel: privilegesLevel,
@ -622,9 +624,9 @@ func (s *Service) FetchCommunityInfo(communityID string) (*thirdparty.CommunityI
} }
communityInfo := &thirdparty.CommunityInfo{ communityInfo := &thirdparty.CommunityInfo{
CommunityName: community.Name(), CommunityName: community.Name(),
CommunityColor: community.Color(), CommunityColor: community.Color(),
CommunityImage: fetchCommunityImage(community), CommunityImagePayload: fetchCommunityImage(community),
} }
return communityInfo, nil return communityInfo, nil
@ -737,7 +739,7 @@ func fetchCommunityCollectiblePermission(community *communities.Community, id th
return nil return nil
} }
func fetchCommunityImage(community *communities.Community) string { func fetchCommunityImage(community *communities.Community) []byte {
imageTypes := []string{ imageTypes := []string{
images.LargeDimName, images.LargeDimName,
images.SmallDimName, images.SmallDimName,
@ -747,14 +749,11 @@ func fetchCommunityImage(community *communities.Community) string {
for _, imageType := range imageTypes { for _, imageType := range imageTypes {
if pbImage, ok := communityImages[imageType]; ok { if pbImage, ok := communityImages[imageType]; ok {
imageBase64, err := images.GetPayloadDataURI(pbImage.Payload) return pbImage.Payload
if err == nil {
return imageBase64
}
} }
} }
return "" return nil
} }
func boolToString(value bool) string { func boolToString(value bool) string {

View File

@ -21,7 +21,7 @@ func NewCollectibleDataDB(sqlDb *sql.DB) *CollectibleDataDB {
} }
} }
const collectibleDataColumns = "chain_id, contract_address, token_id, provider, name, description, permalink, image_url, animation_url, animation_media_type, background_color, token_uri, community_id" const collectibleDataColumns = "chain_id, contract_address, token_id, provider, name, description, permalink, image_url, image_payload, animation_url, animation_media_type, background_color, token_uri, community_id"
const collectibleCommunityDataColumns = "community_privileges_level" const collectibleCommunityDataColumns = "community_privileges_level"
const collectibleTraitsColumns = "chain_id, contract_address, token_id, trait_type, trait_value, display_type, max_value" const collectibleTraitsColumns = "chain_id, contract_address, token_id, trait_type, trait_value, display_type, max_value"
const selectCollectibleTraitsColumns = "trait_type, trait_value, display_type, max_value" const selectCollectibleTraitsColumns = "trait_type, trait_value, display_type, max_value"
@ -108,7 +108,7 @@ func upsertCollectibleTraits(creator sqlite.StatementCreator, id thirdparty.Coll
func upsertCollectiblesData(creator sqlite.StatementCreator, collectibles []thirdparty.CollectibleData) error { func upsertCollectiblesData(creator sqlite.StatementCreator, collectibles []thirdparty.CollectibleData) error {
insertCollectible, err := creator.Prepare(fmt.Sprintf(`INSERT OR REPLACE INTO collectible_data_cache (%s) insertCollectible, err := creator.Prepare(fmt.Sprintf(`INSERT OR REPLACE INTO collectible_data_cache (%s)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`, collectibleDataColumns)) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`, collectibleDataColumns))
if err != nil { if err != nil {
return err return err
} }
@ -123,6 +123,7 @@ func upsertCollectiblesData(creator sqlite.StatementCreator, collectibles []thir
c.Description, c.Description,
c.Permalink, c.Permalink,
c.ImageURL, c.ImageURL,
c.ImagePayload,
c.AnimationURL, c.AnimationURL,
c.AnimationMediaType, c.AnimationMediaType,
c.BackgroundColor, c.BackgroundColor,
@ -180,6 +181,7 @@ func scanCollectiblesDataRow(row *sql.Row) (*thirdparty.CollectibleData, error)
&c.Description, &c.Description,
&c.Permalink, &c.Permalink,
&c.ImageURL, &c.ImageURL,
&c.ImagePayload,
&c.AnimationURL, &c.AnimationURL,
&c.AnimationMediaType, &c.AnimationMediaType,
&c.BackgroundColor, &c.BackgroundColor,

View File

@ -42,6 +42,7 @@ func generateTestCollectiblesData(count int) (result []thirdparty.CollectibleDat
Description: fmt.Sprintf("description-%d", i), Description: fmt.Sprintf("description-%d", i),
Permalink: fmt.Sprintf("permalink-%d", i), Permalink: fmt.Sprintf("permalink-%d", i),
ImageURL: fmt.Sprintf("imageurl-%d", i), ImageURL: fmt.Sprintf("imageurl-%d", i),
ImagePayload: []byte(fmt.Sprintf("imagepayload-%d", i)),
AnimationURL: fmt.Sprintf("animationurl-%d", i), AnimationURL: fmt.Sprintf("animationurl-%d", i),
AnimationMediaType: fmt.Sprintf("animationmediatype-%d", i), AnimationMediaType: fmt.Sprintf("animationmediatype-%d", i),
Traits: []thirdparty.CollectibleTrait{ Traits: []thirdparty.CollectibleTrait{

View File

@ -18,7 +18,7 @@ func NewCollectionDataDB(sqlDb *sql.DB) *CollectionDataDB {
} }
} }
const collectionDataColumns = "chain_id, contract_address, provider, name, slug, image_url, community_id" const collectionDataColumns = "chain_id, contract_address, provider, name, slug, image_url, image_payload, community_id"
const collectionTraitsColumns = "chain_id, contract_address, trait_type, min, max" const collectionTraitsColumns = "chain_id, contract_address, trait_type, min, max"
const selectCollectionTraitsColumns = "trait_type, min, max" const selectCollectionTraitsColumns = "trait_type, min, max"
@ -100,7 +100,7 @@ func upsertCollectionTraits(creator sqlite.StatementCreator, id thirdparty.Contr
func upsertCollectionsData(creator sqlite.StatementCreator, collections []thirdparty.CollectionData) error { func upsertCollectionsData(creator sqlite.StatementCreator, collections []thirdparty.CollectionData) error {
insertCollection, err := creator.Prepare(fmt.Sprintf(`INSERT OR REPLACE INTO collection_data_cache (%s) insertCollection, err := creator.Prepare(fmt.Sprintf(`INSERT OR REPLACE INTO collection_data_cache (%s)
VALUES (?, ?, ?, ?, ?, ?, ?)`, collectionDataColumns)) VALUES (?, ?, ?, ?, ?, ?, ?, ?)`, collectionDataColumns))
if err != nil { if err != nil {
return err return err
} }
@ -113,6 +113,7 @@ func upsertCollectionsData(creator sqlite.StatementCreator, collections []thirdp
c.Name, c.Name,
c.Slug, c.Slug,
c.ImageURL, c.ImageURL,
c.ImagePayload,
c.CommunityID, c.CommunityID,
) )
if err != nil { if err != nil {
@ -161,6 +162,7 @@ func scanCollectionsDataRow(row *sql.Row) (*thirdparty.CollectionData, error) {
&c.Name, &c.Name,
&c.Slug, &c.Slug,
&c.ImageURL, &c.ImageURL,
&c.ImagePayload,
&c.CommunityID, &c.CommunityID,
) )
if err != nil { if err != nil {

View File

@ -40,12 +40,13 @@ func generateTestCollectionsData(count int) (result []thirdparty.CollectionData)
ChainID: w_common.ChainID(i), ChainID: w_common.ChainID(i),
Address: common.BigToAddress(bigI), Address: common.BigToAddress(bigI),
}, },
Provider: fmt.Sprintf("provider-%d", i), Provider: fmt.Sprintf("provider-%d", i),
Name: fmt.Sprintf("name-%d", i), Name: fmt.Sprintf("name-%d", i),
Slug: fmt.Sprintf("slug-%d", i), Slug: fmt.Sprintf("slug-%d", i),
ImageURL: fmt.Sprintf("imageurl-%d", i), ImageURL: fmt.Sprintf("imageurl-%d", i),
Traits: traits, ImagePayload: []byte(fmt.Sprintf("imagepayload-%d", i)),
CommunityID: fmt.Sprintf("community-%d", i), Traits: traits,
CommunityID: fmt.Sprintf("community-%d", i),
} }
result = append(result, newCollection) result = append(result, newCollection)
} }

View File

@ -17,6 +17,7 @@ import (
"github.com/ethereum/go-ethereum/log" "github.com/ethereum/go-ethereum/log"
"github.com/status-im/status-go/contracts/community-tokens/collectibles" "github.com/status-im/status-go/contracts/community-tokens/collectibles"
"github.com/status-im/status-go/rpc" "github.com/status-im/status-go/rpc"
"github.com/status-im/status-go/server"
"github.com/status-im/status-go/services/wallet/bigint" "github.com/status-im/status-go/services/wallet/bigint"
walletCommon "github.com/status-im/status-go/services/wallet/common" walletCommon "github.com/status-im/status-go/services/wallet/common"
"github.com/status-im/status-go/services/wallet/community" "github.com/status-im/status-go/services/wallet/community"
@ -61,6 +62,8 @@ type Manager struct {
collectionsDataDB *CollectionDataDB collectionsDataDB *CollectionDataDB
communityManager *community.Manager communityManager *community.Manager
mediaServer *server.MediaServer
statuses map[string]*connection.Status statuses map[string]*connection.Status
statusNotifier *connection.StatusNotifier statusNotifier *connection.StatusNotifier
} }
@ -73,6 +76,7 @@ func NewManager(
accountOwnershipProviders []thirdparty.CollectibleAccountOwnershipProvider, accountOwnershipProviders []thirdparty.CollectibleAccountOwnershipProvider,
collectibleDataProviders []thirdparty.CollectibleDataProvider, collectibleDataProviders []thirdparty.CollectibleDataProvider,
collectionDataProviders []thirdparty.CollectionDataProvider, collectionDataProviders []thirdparty.CollectionDataProvider,
mediaServer *server.MediaServer,
feed *event.Feed) *Manager { feed *event.Feed) *Manager {
hystrix.ConfigureCommand(hystrixContractOwnershipClientName, hystrix.CommandConfig{ hystrix.ConfigureCommand(hystrixContractOwnershipClientName, hystrix.CommandConfig{
Timeout: 10000, Timeout: 10000,
@ -135,6 +139,7 @@ func NewManager(
collectiblesDataDB: NewCollectibleDataDB(db), collectiblesDataDB: NewCollectibleDataDB(db),
collectionsDataDB: NewCollectionDataDB(db), collectionsDataDB: NewCollectionDataDB(db),
communityManager: communityManager, communityManager: communityManager,
mediaServer: mediaServer,
statuses: statuses, statuses: statuses,
statusNotifier: statusNotifier, statusNotifier: statusNotifier,
} }
@ -670,6 +675,9 @@ func (o *Manager) getCacheFullCollectibleData(uniqueIDs []thirdparty.Collectible
ID: id, ID: id,
} }
} }
if o.mediaServer != nil && len(collectibleData.ImagePayload) > 0 {
collectibleData.ImageURL = o.mediaServer.MakeWalletCollectibleImagesURL(collectibleData.ID)
}
collectionData, ok := collectionsData[id.ContractID.HashKey()] collectionData, ok := collectionsData[id.ContractID.HashKey()]
if !ok { if !ok {
@ -678,6 +686,9 @@ func (o *Manager) getCacheFullCollectibleData(uniqueIDs []thirdparty.Collectible
ID: id.ContractID, ID: id.ContractID,
} }
} }
if o.mediaServer != nil && len(collectionData.ImagePayload) > 0 {
collectionData.ImageURL = o.mediaServer.MakeWalletCollectionImagesURL(collectionData.ID)
}
communityInfo, err := o.collectiblesDataDB.GetCommunityInfo(id) communityInfo, err := o.collectiblesDataDB.GetCommunityInfo(id)
if err != nil { if err != nil {

View File

@ -23,8 +23,8 @@ type InfoState struct {
LastUpdateSuccesful bool LastUpdateSuccesful bool
} }
const communityInfoColumns = "id, name, color, image" const communityInfoColumns = "id, name, color, image, image_payload"
const selectCommunityInfoColumns = "name, color, image" const selectCommunityInfoColumns = "name, color, image, image_payload"
const communityInfoStateColumns = "id, last_update_timestamp, last_update_successful" const communityInfoStateColumns = "id, last_update_timestamp, last_update_successful"
const selectCommunityInfoStateColumns = "last_update_timestamp, last_update_successful" const selectCommunityInfoStateColumns = "last_update_timestamp, last_update_successful"
@ -60,7 +60,7 @@ func (o *DataDB) SetCommunityInfo(id string, c *thirdparty.CommunityInfo) (err e
if valid { if valid {
setInfo, err := tx.Prepare(fmt.Sprintf(`INSERT OR REPLACE INTO community_data_cache (%s) setInfo, err := tx.Prepare(fmt.Sprintf(`INSERT OR REPLACE INTO community_data_cache (%s)
VALUES (?, ?, ?, ?)`, communityInfoColumns)) VALUES (?, ?, ?, ?, ?)`, communityInfoColumns))
if err != nil { if err != nil {
return err return err
} }
@ -70,6 +70,7 @@ func (o *DataDB) SetCommunityInfo(id string, c *thirdparty.CommunityInfo) (err e
c.CommunityName, c.CommunityName,
c.CommunityColor, c.CommunityColor,
c.CommunityImage, c.CommunityImage,
c.CommunityImagePayload,
) )
if err != nil { if err != nil {
return err return err
@ -116,6 +117,7 @@ func (o *DataDB) GetCommunityInfo(id string) (*thirdparty.CommunityInfo, *InfoSt
&info.CommunityName, &info.CommunityName,
&info.CommunityColor, &info.CommunityColor,
&info.CommunityImage, &info.CommunityImage,
&info.CommunityImagePayload,
) )
if err == sql.ErrNoRows { if err == sql.ErrNoRows {

View File

@ -24,9 +24,10 @@ func generateTestCommunityInfo(count int) map[string]thirdparty.CommunityInfo {
for i := 0; i < count; i++ { for i := 0; i < count; i++ {
communityID := fmt.Sprintf("communityid-%d", i) communityID := fmt.Sprintf("communityid-%d", i)
newCommunity := thirdparty.CommunityInfo{ newCommunity := thirdparty.CommunityInfo{
CommunityName: fmt.Sprintf("communityname-%d", i), CommunityName: fmt.Sprintf("communityname-%d", i),
CommunityColor: fmt.Sprintf("communitycolor-%d", i), CommunityColor: fmt.Sprintf("communitycolor-%d", i),
CommunityImage: fmt.Sprintf("communityimage-%d", i), CommunityImage: fmt.Sprintf("communityimage-%d", i),
CommunityImagePayload: []byte(fmt.Sprintf("communityimagepayload-%d", i)),
} }
result[communityID] = newCommunity result[communityID] = newCommunity
} }

View File

@ -6,6 +6,7 @@ import (
"time" "time"
"github.com/ethereum/go-ethereum/log" "github.com/ethereum/go-ethereum/log"
"github.com/status-im/status-go/server"
"github.com/status-im/status-go/services/wallet/thirdparty" "github.com/status-im/status-go/services/wallet/thirdparty"
) )
@ -14,11 +15,13 @@ const failedCommunityFetchRetryDelay = 1 * time.Hour
type Manager struct { type Manager struct {
db *DataDB db *DataDB
communityInfoProvider thirdparty.CommunityInfoProvider communityInfoProvider thirdparty.CommunityInfoProvider
mediaServer *server.MediaServer
} }
func NewManager(db *sql.DB) *Manager { func NewManager(db *sql.DB, mediaServer *server.MediaServer) *Manager {
return &Manager{ return &Manager{
db: NewDataDB(db), db: NewDataDB(db),
mediaServer: mediaServer,
} }
} }
@ -28,7 +31,14 @@ func (cm *Manager) SetCommunityInfoProvider(communityInfoProvider thirdparty.Com
} }
func (cm *Manager) GetCommunityInfo(id string) (*thirdparty.CommunityInfo, *InfoState, error) { func (cm *Manager) GetCommunityInfo(id string) (*thirdparty.CommunityInfo, *InfoState, error) {
return cm.db.GetCommunityInfo(id) communityInfo, state, err := cm.db.GetCommunityInfo(id)
if err != nil {
return nil, nil, err
}
if cm.mediaServer != nil && communityInfo != nil && len(communityInfo.CommunityImagePayload) > 0 {
communityInfo.CommunityImage = cm.mediaServer.MakeWalletCommunityImagesURL(id)
}
return communityInfo, state, err
} }
func (cm *Manager) GetCommunityID(tokenURI string) string { func (cm *Manager) GetCommunityID(tokenURI string) string {

View File

@ -25,7 +25,7 @@ func TestKeycardPairingsFile(t *testing.T) {
db, err := helpers.SetupTestMemorySQLDB(walletdatabase.DbInitializer{}) db, err := helpers.SetupTestMemorySQLDB(walletdatabase.DbInitializer{})
require.NoError(t, err) require.NoError(t, err)
service := NewService(db, accountsDb, appDB, &rpc.Client{NetworkManager: network.NewManager(db)}, nil, nil, nil, nil, &params.NodeConfig{}, nil, nil, nil, nil) service := NewService(db, accountsDb, appDB, &rpc.Client{NetworkManager: network.NewManager(db)}, nil, nil, nil, nil, &params.NodeConfig{}, nil, nil, nil, nil, nil)
data, err := service.KeycardPairings().GetPairingsJSONFileContent() data, err := service.KeycardPairings().GetPairingsJSONFileContent()
require.NoError(t, err) require.NoError(t, err)

View File

@ -16,6 +16,7 @@ import (
"github.com/status-im/status-go/multiaccounts/accounts" "github.com/status-im/status-go/multiaccounts/accounts"
"github.com/status-im/status-go/params" "github.com/status-im/status-go/params"
"github.com/status-im/status-go/rpc" "github.com/status-im/status-go/rpc"
"github.com/status-im/status-go/server"
"github.com/status-im/status-go/services/ens" "github.com/status-im/status-go/services/ens"
"github.com/status-im/status-go/services/stickers" "github.com/status-im/status-go/services/stickers"
"github.com/status-im/status-go/services/wallet/activity" "github.com/status-im/status-go/services/wallet/activity"
@ -57,6 +58,7 @@ func NewService(
stickers *stickers.Service, stickers *stickers.Service,
pendingTxManager *transactions.PendingTxTracker, pendingTxManager *transactions.PendingTxTracker,
feed *event.Feed, feed *event.Feed,
mediaServer *server.MediaServer,
) *Service { ) *Service {
cryptoOnRampManager := NewCryptoOnRampManager(&CryptoOnRampOptions{ cryptoOnRampManager := NewCryptoOnRampManager(&CryptoOnRampOptions{
dataSourceType: DataSourceStatic, dataSourceType: DataSourceStatic,
@ -97,7 +99,7 @@ func NewService(
}) })
}) })
communityManager := community.NewManager(db) communityManager := community.NewManager(db, mediaServer)
balanceCacher := balance.NewCacherWithTTL(5 * time.Minute) balanceCacher := balance.NewCacherWithTTL(5 * time.Minute)
tokenManager := token.NewTokenManager(db, rpcClient, communityManager, rpcClient.NetworkManager, appDB) tokenManager := token.NewTokenManager(db, rpcClient, communityManager, rpcClient.NetworkManager, appDB)
savedAddressesManager := &SavedAddressesManager{db: db} savedAddressesManager := &SavedAddressesManager{db: db}
@ -142,7 +144,7 @@ func NewService(
alchemyClient, alchemyClient,
} }
collectiblesManager := collectibles.NewManager(db, rpcClient, communityManager, contractOwnershipProviders, accountOwnershipProviders, collectibleDataProviders, collectionDataProviders, feed) collectiblesManager := collectibles.NewManager(db, rpcClient, communityManager, contractOwnershipProviders, accountOwnershipProviders, collectibleDataProviders, collectionDataProviders, mediaServer, feed)
collectibles := collectibles.NewService(db, feed, accountsDB, accountFeed, settingsFeed, communityManager, rpcClient.NetworkManager, collectiblesManager) collectibles := collectibles.NewService(db, feed, accountsDB, accountFeed, settingsFeed, communityManager, rpcClient.NetworkManager, collectiblesManager)
activity := activity.NewService(db, tokenManager, collectiblesManager, feed) activity := activity.NewService(db, tokenManager, collectiblesManager, feed)

View File

@ -104,13 +104,14 @@ type CollectionTrait struct {
// Collection info // Collection info
type CollectionData struct { type CollectionData struct {
ID ContractID `json:"id"` ID ContractID `json:"id"`
CommunityID string `json:"community_id"` CommunityID string `json:"community_id"`
Provider string `json:"provider"` Provider string `json:"provider"`
Name string `json:"name"` Name string `json:"name"`
Slug string `json:"slug"` Slug string `json:"slug"`
ImageURL string `json:"image_url"` ImageURL string `json:"image_url"`
Traits map[string]CollectionTrait `json:"traits"` ImagePayload []byte
Traits map[string]CollectionTrait `json:"traits"`
} }
type CollectibleTrait struct { type CollectibleTrait struct {
@ -129,11 +130,12 @@ type CollectibleData struct {
Description string `json:"description"` Description string `json:"description"`
Permalink string `json:"permalink"` Permalink string `json:"permalink"`
ImageURL string `json:"image_url"` ImageURL string `json:"image_url"`
AnimationURL string `json:"animation_url"` ImagePayload []byte
AnimationMediaType string `json:"animation_media_type"` AnimationURL string `json:"animation_url"`
Traits []CollectibleTrait `json:"traits"` AnimationMediaType string `json:"animation_media_type"`
BackgroundColor string `json:"background_color"` Traits []CollectibleTrait `json:"traits"`
TokenURI string `json:"token_uri"` BackgroundColor string `json:"background_color"`
TokenURI string `json:"token_uri"`
} }
// Community-related collectible info. Present only for collectibles minted in a community. // Community-related collectible info. Present only for collectibles minted in a community.

View File

@ -2,9 +2,10 @@ package thirdparty
// Community-related info used by the wallet, cached in the wallet db. // Community-related info used by the wallet, cached in the wallet db.
type CommunityInfo struct { type CommunityInfo struct {
CommunityName string `json:"community_name"` CommunityName string `json:"community_name"`
CommunityColor string `json:"community_color"` CommunityColor string `json:"community_color"`
CommunityImage string `json:"community_image"` CommunityImage string `json:"community_image"`
CommunityImagePayload []byte
} }
type CommunityInfoProvider interface { type CommunityInfoProvider interface {

View File

@ -933,7 +933,7 @@ func TestFindBlocksCommand(t *testing.T) {
} }
client, _ := statusRpc.NewClient(nil, 1, params.UpstreamRPCConfig{Enabled: false, URL: ""}, []params.Network{}, db) client, _ := statusRpc.NewClient(nil, 1, params.UpstreamRPCConfig{Enabled: false, URL: ""}, []params.Network{}, db)
client.SetClient(tc.NetworkID(), tc) client.SetClient(tc.NetworkID(), tc)
tokenManager := token.NewTokenManager(db, client, community.NewManager(appdb), network.NewManager(appdb), appdb) tokenManager := token.NewTokenManager(db, client, community.NewManager(appdb, nil), network.NewManager(appdb), appdb)
tokenManager.SetTokens([]*token.Token{ tokenManager.SetTokens([]*token.Token{
{ {
Address: tokenTXXAddress, Address: tokenTXXAddress,
@ -1055,7 +1055,7 @@ func TestFetchTransfersForLoadedBlocks(t *testing.T) {
client, _ := statusRpc.NewClient(nil, 1, params.UpstreamRPCConfig{Enabled: false, URL: ""}, []params.Network{}, db) client, _ := statusRpc.NewClient(nil, 1, params.UpstreamRPCConfig{Enabled: false, URL: ""}, []params.Network{}, db)
client.SetClient(tc.NetworkID(), tc) client.SetClient(tc.NetworkID(), tc)
tokenManager := token.NewTokenManager(db, client, community.NewManager(appdb), network.NewManager(appdb), appdb) tokenManager := token.NewTokenManager(db, client, community.NewManager(appdb, nil), network.NewManager(appdb), appdb)
tokenManager.SetTokens([]*token.Token{ tokenManager.SetTokens([]*token.Token{
{ {
@ -1172,7 +1172,7 @@ func TestFetchNewBlocksCommand_findBlocksWithEthTransfers(t *testing.T) {
client, _ := statusRpc.NewClient(nil, 1, params.UpstreamRPCConfig{Enabled: false, URL: ""}, []params.Network{}, db) client, _ := statusRpc.NewClient(nil, 1, params.UpstreamRPCConfig{Enabled: false, URL: ""}, []params.Network{}, db)
client.SetClient(tc.NetworkID(), tc) client.SetClient(tc.NetworkID(), tc)
tokenManager := token.NewTokenManager(db, client, community.NewManager(appdb), network.NewManager(appdb), appdb) tokenManager := token.NewTokenManager(db, client, community.NewManager(appdb, nil), network.NewManager(appdb), appdb)
tokenManager.SetTokens([]*token.Token{ tokenManager.SetTokens([]*token.Token{
{ {
@ -1246,7 +1246,7 @@ func TestFetchNewBlocksCommand(t *testing.T) {
client, _ := statusRpc.NewClient(nil, 1, params.UpstreamRPCConfig{Enabled: false, URL: ""}, []params.Network{}, db) client, _ := statusRpc.NewClient(nil, 1, params.UpstreamRPCConfig{Enabled: false, URL: ""}, []params.Network{}, db)
client.SetClient(tc.NetworkID(), tc) client.SetClient(tc.NetworkID(), tc)
tokenManager := token.NewTokenManager(db, client, community.NewManager(appdb), network.NewManager(appdb), appdb) tokenManager := token.NewTokenManager(db, client, community.NewManager(appdb, nil), network.NewManager(appdb), appdb)
tokenManager.SetTokens([]*token.Token{ tokenManager.SetTokens([]*token.Token{
{ {

View File

@ -14,6 +14,7 @@
// 1700414564_add_wallet_connect_pairings_table.up.sql (439B) // 1700414564_add_wallet_connect_pairings_table.up.sql (439B)
// 1701101493_add_token_blocks_range.up.sql (469B) // 1701101493_add_token_blocks_range.up.sql (469B)
// 1702467441_wallet_connect_sessions_instead_of_pairings.up.sql (356B) // 1702467441_wallet_connect_sessions_instead_of_pairings.up.sql (356B)
// 1702577524_add_community_collections_and_collectibles_images_cache.up.sql (210B)
// doc.go (74B) // doc.go (74B)
package migrations package migrations
@ -24,7 +25,6 @@ import (
"crypto/sha256" "crypto/sha256"
"fmt" "fmt"
"io" "io"
"io/ioutil"
"os" "os"
"path/filepath" "path/filepath"
"strings" "strings"
@ -34,7 +34,7 @@ import (
func bindataRead(data []byte, name string) ([]byte, error) { func bindataRead(data []byte, name string) ([]byte, error) {
gz, err := gzip.NewReader(bytes.NewBuffer(data)) gz, err := gzip.NewReader(bytes.NewBuffer(data))
if err != nil { if err != nil {
return nil, fmt.Errorf("read %q: %v", name, err) return nil, fmt.Errorf("read %q: %w", name, err)
} }
var buf bytes.Buffer var buf bytes.Buffer
@ -42,7 +42,7 @@ func bindataRead(data []byte, name string) ([]byte, error) {
clErr := gz.Close() clErr := gz.Close()
if err != nil { if err != nil {
return nil, fmt.Errorf("read %q: %v", name, err) return nil, fmt.Errorf("read %q: %w", name, err)
} }
if clErr != nil { if clErr != nil {
return nil, err return nil, err
@ -98,7 +98,7 @@ func _1691753758_initialUpSql() (*asset, error) {
return nil, err return nil, err
} }
info := bindataFileInfo{name: "1691753758_initial.up.sql", size: 5738, mode: os.FileMode(0644), modTime: time.Unix(1700071475, 0)} info := bindataFileInfo{name: "1691753758_initial.up.sql", size: 5738, mode: os.FileMode(0644), modTime: time.Unix(1700230544, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x6b, 0x25, 0x31, 0xc8, 0x27, 0x3, 0x6b, 0x9f, 0x15, 0x42, 0x2f, 0x85, 0xfb, 0xe3, 0x6, 0xea, 0xf7, 0x97, 0x12, 0x56, 0x3c, 0x9a, 0x5b, 0x1a, 0xca, 0xb1, 0x23, 0xfa, 0xcd, 0x57, 0x25, 0x5c}} a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x6b, 0x25, 0x31, 0xc8, 0x27, 0x3, 0x6b, 0x9f, 0x15, 0x42, 0x2f, 0x85, 0xfb, 0xe3, 0x6, 0xea, 0xf7, 0x97, 0x12, 0x56, 0x3c, 0x9a, 0x5b, 0x1a, 0xca, 0xb1, 0x23, 0xfa, 0xcd, 0x57, 0x25, 0x5c}}
return a, nil return a, nil
} }
@ -118,7 +118,7 @@ func _1692701329_add_collectibles_and_collections_data_cacheUpSql() (*asset, err
return nil, err return nil, err
} }
info := bindataFileInfo{name: "1692701329_add_collectibles_and_collections_data_cache.up.sql", size: 1808, mode: os.FileMode(0644), modTime: time.Unix(1700071475, 0)} info := bindataFileInfo{name: "1692701329_add_collectibles_and_collections_data_cache.up.sql", size: 1808, mode: os.FileMode(0644), modTime: time.Unix(1700230544, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x1, 0x51, 0xf4, 0x2b, 0x92, 0xde, 0x59, 0x65, 0xd8, 0x9b, 0x57, 0xe0, 0xfd, 0x7b, 0x12, 0xb, 0x29, 0x6e, 0x9d, 0xb5, 0x90, 0xe, 0xfa, 0x12, 0x97, 0xd, 0x61, 0x60, 0x7f, 0x32, 0x1d, 0xc3}} a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x1, 0x51, 0xf4, 0x2b, 0x92, 0xde, 0x59, 0x65, 0xd8, 0x9b, 0x57, 0xe0, 0xfd, 0x7b, 0x12, 0xb, 0x29, 0x6e, 0x9d, 0xb5, 0x90, 0xe, 0xfa, 0x12, 0x97, 0xd, 0x61, 0x60, 0x7f, 0x32, 0x1d, 0xc3}}
return a, nil return a, nil
} }
@ -138,7 +138,7 @@ func _1692701339_add_scope_to_pendingUpSql() (*asset, error) {
return nil, err return nil, err
} }
info := bindataFileInfo{name: "1692701339_add_scope_to_pending.up.sql", size: 576, mode: os.FileMode(0644), modTime: time.Unix(1700071475, 0)} info := bindataFileInfo{name: "1692701339_add_scope_to_pending.up.sql", size: 576, mode: os.FileMode(0644), modTime: time.Unix(1700230544, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x36, 0x8a, 0x5e, 0xe2, 0x63, 0x15, 0x37, 0xba, 0x55, 0x18, 0xf3, 0xcc, 0xe0, 0x5, 0x84, 0xe1, 0x5b, 0xe8, 0x1, 0x32, 0x6b, 0x9f, 0x7d, 0x9f, 0xd9, 0x23, 0x6c, 0xa9, 0xb5, 0xdc, 0xf4, 0x93}} a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x36, 0x8a, 0x5e, 0xe2, 0x63, 0x15, 0x37, 0xba, 0x55, 0x18, 0xf3, 0xcc, 0xe0, 0x5, 0x84, 0xe1, 0x5b, 0xe8, 0x1, 0x32, 0x6b, 0x9f, 0x7d, 0x9f, 0xd9, 0x23, 0x6c, 0xa9, 0xb5, 0xdc, 0xf4, 0x93}}
return a, nil return a, nil
} }
@ -158,7 +158,7 @@ func _1694540071_add_collectibles_ownership_update_timestampUpSql() (*asset, err
return nil, err return nil, err
} }
info := bindataFileInfo{name: "1694540071_add_collectibles_ownership_update_timestamp.up.sql", size: 349, mode: os.FileMode(0644), modTime: time.Unix(1700071475, 0)} info := bindataFileInfo{name: "1694540071_add_collectibles_ownership_update_timestamp.up.sql", size: 349, mode: os.FileMode(0644), modTime: time.Unix(1700230544, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x7f, 0x45, 0xc7, 0xce, 0x79, 0x63, 0xbc, 0x6f, 0x83, 0x5f, 0xe2, 0x3, 0x56, 0xcc, 0x5, 0x2f, 0x85, 0xda, 0x7e, 0xea, 0xf5, 0xd2, 0xac, 0x19, 0xd4, 0xd8, 0x5e, 0xdd, 0xed, 0xe2, 0xa9, 0x97}} a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x7f, 0x45, 0xc7, 0xce, 0x79, 0x63, 0xbc, 0x6f, 0x83, 0x5f, 0xe2, 0x3, 0x56, 0xcc, 0x5, 0x2f, 0x85, 0xda, 0x7e, 0xea, 0xf5, 0xd2, 0xac, 0x19, 0xd4, 0xd8, 0x5e, 0xdd, 0xed, 0xe2, 0xa9, 0x97}}
return a, nil return a, nil
} }
@ -178,7 +178,7 @@ func _1694692748_add_raw_balance_to_token_balancesUpSql() (*asset, error) {
return nil, err return nil, err
} }
info := bindataFileInfo{name: "1694692748_add_raw_balance_to_token_balances.up.sql", size: 165, mode: os.FileMode(0644), modTime: time.Unix(1700071475, 0)} info := bindataFileInfo{name: "1694692748_add_raw_balance_to_token_balances.up.sql", size: 165, mode: os.FileMode(0644), modTime: time.Unix(1700230544, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xd4, 0xe0, 0x5b, 0x42, 0xf0, 0x96, 0xa5, 0xf5, 0xed, 0xc0, 0x97, 0x88, 0xb0, 0x6d, 0xfe, 0x7d, 0x97, 0x2e, 0x17, 0xd2, 0x16, 0xbc, 0x2a, 0xf2, 0xcc, 0x67, 0x9e, 0xc5, 0x47, 0xf6, 0x69, 0x1}} a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xd4, 0xe0, 0x5b, 0x42, 0xf0, 0x96, 0xa5, 0xf5, 0xed, 0xc0, 0x97, 0x88, 0xb0, 0x6d, 0xfe, 0x7d, 0x97, 0x2e, 0x17, 0xd2, 0x16, 0xbc, 0x2a, 0xf2, 0xcc, 0x67, 0x9e, 0xc5, 0x47, 0xf6, 0x69, 0x1}}
return a, nil return a, nil
} }
@ -198,7 +198,7 @@ func _1695133989_add_community_id_to_collectibles_and_collections_data_cacheUpSq
return nil, err return nil, err
} }
info := bindataFileInfo{name: "1695133989_add_community_id_to_collectibles_and_collections_data_cache.up.sql", size: 275, mode: os.FileMode(0644), modTime: time.Unix(1700071475, 0)} info := bindataFileInfo{name: "1695133989_add_community_id_to_collectibles_and_collections_data_cache.up.sql", size: 275, mode: os.FileMode(0644), modTime: time.Unix(1700230544, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xfa, 0x2, 0xa, 0x7f, 0x4b, 0xd1, 0x3, 0xd0, 0x3, 0x29, 0x84, 0x31, 0xed, 0x49, 0x4f, 0xb1, 0x2d, 0xd7, 0x80, 0x41, 0x5b, 0xfa, 0x6, 0xae, 0xb4, 0xf6, 0x6b, 0x49, 0xee, 0x57, 0x33, 0x76}} a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xfa, 0x2, 0xa, 0x7f, 0x4b, 0xd1, 0x3, 0xd0, 0x3, 0x29, 0x84, 0x31, 0xed, 0x49, 0x4f, 0xb1, 0x2d, 0xd7, 0x80, 0x41, 0x5b, 0xfa, 0x6, 0xae, 0xb4, 0xf6, 0x6b, 0x49, 0xee, 0x57, 0x33, 0x76}}
return a, nil return a, nil
} }
@ -218,7 +218,7 @@ func _1695932536_balance_history_v2UpSql() (*asset, error) {
return nil, err return nil, err
} }
info := bindataFileInfo{name: "1695932536_balance_history_v2.up.sql", size: 653, mode: os.FileMode(0644), modTime: time.Unix(1700071475, 0)} info := bindataFileInfo{name: "1695932536_balance_history_v2.up.sql", size: 653, mode: os.FileMode(0644), modTime: time.Unix(1700230544, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x37, 0xf4, 0x14, 0x91, 0xf6, 0x5f, 0xc4, 0x9b, 0xb7, 0x83, 0x32, 0x72, 0xbe, 0x82, 0x42, 0x39, 0xa4, 0x3b, 0xc9, 0x78, 0x3d, 0xca, 0xd4, 0xbf, 0xfc, 0x7a, 0x33, 0x1e, 0xcd, 0x9e, 0xe4, 0x85}} a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x37, 0xf4, 0x14, 0x91, 0xf6, 0x5f, 0xc4, 0x9b, 0xb7, 0x83, 0x32, 0x72, 0xbe, 0x82, 0x42, 0x39, 0xa4, 0x3b, 0xc9, 0x78, 0x3d, 0xca, 0xd4, 0xbf, 0xfc, 0x7a, 0x33, 0x1e, 0xcd, 0x9e, 0xe4, 0x85}}
return a, nil return a, nil
} }
@ -238,7 +238,7 @@ func _1696853635_input_dataUpSql() (*asset, error) {
return nil, err return nil, err
} }
info := bindataFileInfo{name: "1696853635_input_data.up.sql", size: 23140, mode: os.FileMode(0644), modTime: time.Unix(1700071475, 0)} info := bindataFileInfo{name: "1696853635_input_data.up.sql", size: 23140, mode: os.FileMode(0644), modTime: time.Unix(1700230544, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x89, 0x30, 0x33, 0x33, 0x55, 0xc5, 0x57, 0x2b, 0xaf, 0xef, 0x3d, 0x8d, 0x2a, 0xaa, 0x5c, 0x32, 0xd1, 0xf4, 0xd, 0x4a, 0xd0, 0x33, 0x4a, 0xe8, 0xf6, 0x8, 0x6b, 0x65, 0xcc, 0xba, 0xed, 0x42}} a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x89, 0x30, 0x33, 0x33, 0x55, 0xc5, 0x57, 0x2b, 0xaf, 0xef, 0x3d, 0x8d, 0x2a, 0xaa, 0x5c, 0x32, 0xd1, 0xf4, 0xd, 0x4a, 0xd0, 0x33, 0x4a, 0xe8, 0xf6, 0x8, 0x6b, 0x65, 0xcc, 0xba, 0xed, 0x42}}
return a, nil return a, nil
} }
@ -258,7 +258,7 @@ func _1698117918_add_community_id_to_tokensUpSql() (*asset, error) {
return nil, err return nil, err
} }
info := bindataFileInfo{name: "1698117918_add_community_id_to_tokens.up.sql", size: 61, mode: os.FileMode(0644), modTime: time.Unix(1700489912, 0)} info := bindataFileInfo{name: "1698117918_add_community_id_to_tokens.up.sql", size: 61, mode: os.FileMode(0644), modTime: time.Unix(1700584118, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xb3, 0x82, 0xdb, 0xde, 0x3, 0x3, 0xc, 0x67, 0xf3, 0x54, 0xc4, 0xad, 0xd6, 0xce, 0x56, 0xfb, 0xc1, 0x87, 0xd7, 0xda, 0xab, 0xec, 0x1, 0xe1, 0x7d, 0xb3, 0x63, 0xd6, 0xe5, 0x5d, 0x1c, 0x15}} a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xb3, 0x82, 0xdb, 0xde, 0x3, 0x3, 0xc, 0x67, 0xf3, 0x54, 0xc4, 0xad, 0xd6, 0xce, 0x56, 0xfb, 0xc1, 0x87, 0xd7, 0xda, 0xab, 0xec, 0x1, 0xe1, 0x7d, 0xb3, 0x63, 0xd6, 0xe5, 0x5d, 0x1c, 0x15}}
return a, nil return a, nil
} }
@ -278,7 +278,7 @@ func _1698257443_add_community_metadata_to_wallet_dbUpSql() (*asset, error) {
return nil, err return nil, err
} }
info := bindataFileInfo{name: "1698257443_add_community_metadata_to_wallet_db.up.sql", size: 323, mode: os.FileMode(0644), modTime: time.Unix(1700489912, 0)} info := bindataFileInfo{name: "1698257443_add_community_metadata_to_wallet_db.up.sql", size: 323, mode: os.FileMode(0644), modTime: time.Unix(1700584118, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x22, 0xd3, 0x4, 0x25, 0xfa, 0x23, 0x1, 0x48, 0x83, 0x26, 0x20, 0xf2, 0x3d, 0xbc, 0xc1, 0xa7, 0x7c, 0x27, 0x7c, 0x1d, 0x63, 0x3, 0xa, 0xd0, 0xce, 0x47, 0x86, 0xdc, 0xa1, 0x3c, 0x2, 0x1c}} a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x22, 0xd3, 0x4, 0x25, 0xfa, 0x23, 0x1, 0x48, 0x83, 0x26, 0x20, 0xf2, 0x3d, 0xbc, 0xc1, 0xa7, 0x7c, 0x27, 0x7c, 0x1d, 0x63, 0x3, 0xa, 0xd0, 0xce, 0x47, 0x86, 0xdc, 0xa1, 0x3c, 0x2, 0x1c}}
return a, nil return a, nil
} }
@ -298,7 +298,7 @@ func _1699987075_add_timestamp_and_state_to_community_data_cacheUpSql() (*asset,
return nil, err return nil, err
} }
info := bindataFileInfo{name: "1699987075_add_timestamp_and_state_to_community_data_cache.up.sql", size: 865, mode: os.FileMode(0644), modTime: time.Unix(1701075679, 0)} info := bindataFileInfo{name: "1699987075_add_timestamp_and_state_to_community_data_cache.up.sql", size: 865, mode: os.FileMode(0644), modTime: time.Unix(1700584118, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xc3, 0xee, 0x37, 0xf9, 0x7f, 0x9e, 0xfe, 0x93, 0x66, 0x2b, 0xd, 0x57, 0xf4, 0x89, 0x6c, 0x51, 0xfd, 0x14, 0xe9, 0xcd, 0xab, 0x65, 0xe7, 0xa7, 0x83, 0x7e, 0xe0, 0x5c, 0x14, 0x49, 0xf3, 0xe5}} a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xc3, 0xee, 0x37, 0xf9, 0x7f, 0x9e, 0xfe, 0x93, 0x66, 0x2b, 0xd, 0x57, 0xf4, 0x89, 0x6c, 0x51, 0xfd, 0x14, 0xe9, 0xcd, 0xab, 0x65, 0xe7, 0xa7, 0x83, 0x7e, 0xe0, 0x5c, 0x14, 0x49, 0xf3, 0xe5}}
return a, nil return a, nil
} }
@ -318,7 +318,7 @@ func _1700414564_add_wallet_connect_pairings_tableUpSql() (*asset, error) {
return nil, err return nil, err
} }
info := bindataFileInfo{name: "1700414564_add_wallet_connect_pairings_table.up.sql", size: 439, mode: os.FileMode(0644), modTime: time.Unix(1702469123, 0)} info := bindataFileInfo{name: "1700414564_add_wallet_connect_pairings_table.up.sql", size: 439, mode: os.FileMode(0644), modTime: time.Unix(1700845016, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xa9, 0x77, 0x5e, 0x19, 0x62, 0x3c, 0x3a, 0x81, 0x16, 0xa0, 0x95, 0x35, 0x62, 0xab, 0x5e, 0x2b, 0xea, 0x11, 0x71, 0x11, 0xd0, 0x9, 0xab, 0x9c, 0xab, 0xf2, 0xdd, 0x5f, 0x88, 0x83, 0x9a, 0x93}} a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xa9, 0x77, 0x5e, 0x19, 0x62, 0x3c, 0x3a, 0x81, 0x16, 0xa0, 0x95, 0x35, 0x62, 0xab, 0x5e, 0x2b, 0xea, 0x11, 0x71, 0x11, 0xd0, 0x9, 0xab, 0x9c, 0xab, 0xf2, 0xdd, 0x5f, 0x88, 0x83, 0x9a, 0x93}}
return a, nil return a, nil
} }
@ -338,7 +338,7 @@ func _1701101493_add_token_blocks_rangeUpSql() (*asset, error) {
return nil, err return nil, err
} }
info := bindataFileInfo{name: "1701101493_add_token_blocks_range.up.sql", size: 469, mode: os.FileMode(0644), modTime: time.Unix(1701936747, 0)} info := bindataFileInfo{name: "1701101493_add_token_blocks_range.up.sql", size: 469, mode: os.FileMode(0644), modTime: time.Unix(1701793991, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xe0, 0x37, 0xfb, 0x1a, 0x6c, 0x8c, 0xa8, 0x1e, 0xa2, 0xa5, 0x1f, 0x90, 0x73, 0x3e, 0x31, 0x5f, 0x48, 0x1e, 0x9a, 0x37, 0x27, 0x1c, 0xc, 0x67, 0x1, 0xcd, 0xec, 0x85, 0x4c, 0x1c, 0x26, 0x52}} a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xe0, 0x37, 0xfb, 0x1a, 0x6c, 0x8c, 0xa8, 0x1e, 0xa2, 0xa5, 0x1f, 0x90, 0x73, 0x3e, 0x31, 0x5f, 0x48, 0x1e, 0x9a, 0x37, 0x27, 0x1c, 0xc, 0x67, 0x1, 0xcd, 0xec, 0x85, 0x4c, 0x1c, 0x26, 0x52}}
return a, nil return a, nil
} }
@ -358,11 +358,31 @@ func _1702467441_wallet_connect_sessions_instead_of_pairingsUpSql() (*asset, err
return nil, err return nil, err
} }
info := bindataFileInfo{name: "1702467441_wallet_connect_sessions_instead_of_pairings.up.sql", size: 356, mode: os.FileMode(0644), modTime: time.Unix(1702472443, 0)} info := bindataFileInfo{name: "1702467441_wallet_connect_sessions_instead_of_pairings.up.sql", size: 356, mode: os.FileMode(0644), modTime: time.Unix(1702580157, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x73, 0x5f, 0x0, 0x60, 0x6, 0x28, 0x76, 0x61, 0x39, 0xdc, 0xa1, 0x84, 0x80, 0x46, 0x8a, 0xe4, 0x42, 0xb5, 0x1f, 0x18, 0x14, 0x23, 0x46, 0xb9, 0x51, 0xf, 0x62, 0xac, 0xc, 0x7, 0x98, 0xe}} a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x73, 0x5f, 0x0, 0x60, 0x6, 0x28, 0x76, 0x61, 0x39, 0xdc, 0xa1, 0x84, 0x80, 0x46, 0x8a, 0xe4, 0x42, 0xb5, 0x1f, 0x18, 0x14, 0x23, 0x46, 0xb9, 0x51, 0xf, 0x62, 0xac, 0xc, 0x7, 0x98, 0xe}}
return a, nil return a, nil
} }
var __1702577524_add_community_collections_and_collectibles_images_cacheUpSql = []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\xd2\xd5\x55\x70\x4c\x49\x51\x48\xce\xcf\x29\xcd\xcd\x2b\xe6\x72\xf4\x09\x71\x0d\x52\x08\x71\x74\xf2\x71\x55\x48\xce\xcf\xcd\x2d\xcd\xcb\x2c\xa9\x8c\x4f\x49\x2c\x49\x8c\x4f\x4e\x4c\xce\x48\x55\x70\x74\x71\x51\x70\xf6\xf7\x09\xf5\xf5\x53\xc8\xcc\x4d\x4c\x4f\x8d\x2f\x48\xac\xcc\xc9\x4f\x4c\x51\x70\xf2\xf1\x77\xb2\x46\xd3\x9f\x93\x93\x9a\x5c\x92\x99\x9f\x47\xa1\x01\x49\x39\xa9\x24\x98\x00\x08\x00\x00\xff\xff\x6f\x7a\x87\x63\xd2\x00\x00\x00")
func _1702577524_add_community_collections_and_collectibles_images_cacheUpSqlBytes() ([]byte, error) {
return bindataRead(
__1702577524_add_community_collections_and_collectibles_images_cacheUpSql,
"1702577524_add_community_collections_and_collectibles_images_cache.up.sql",
)
}
func _1702577524_add_community_collections_and_collectibles_images_cacheUpSql() (*asset, error) {
bytes, err := _1702577524_add_community_collections_and_collectibles_images_cacheUpSqlBytes()
if err != nil {
return nil, err
}
info := bindataFileInfo{name: "1702577524_add_community_collections_and_collectibles_images_cache.up.sql", size: 210, mode: os.FileMode(0644), modTime: time.Unix(1702580163, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x8e, 0x1b, 0x32, 0x2c, 0xfa, 0x11, 0x5e, 0x5e, 0x5d, 0xef, 0x92, 0xa0, 0x29, 0x52, 0xbf, 0x6e, 0xe3, 0x30, 0xe4, 0xdf, 0xdc, 0x5, 0xbe, 0xd1, 0xf8, 0x3e, 0xd9, 0x9b, 0xd6, 0x9b, 0x95, 0x96}}
return a, nil
}
var _docGo = []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x2c\xc9\xb1\x0d\xc4\x20\x0c\x05\xd0\x9e\x29\xfe\x02\xd8\xfd\x6d\xe3\x4b\xac\x2f\x44\x82\x09\x78\x7f\xa5\x49\xfd\xa6\x1d\xdd\xe8\xd8\xcf\x55\x8a\x2a\xe3\x47\x1f\xbe\x2c\x1d\x8c\xfa\x6f\xe3\xb4\x34\xd4\xd9\x89\xbb\x71\x59\xb6\x18\x1b\x35\x20\xa2\x9f\x0a\x03\xa2\xe5\x0d\x00\x00\xff\xff\x60\xcd\x06\xbe\x4a\x00\x00\x00") var _docGo = []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x2c\xc9\xb1\x0d\xc4\x20\x0c\x05\xd0\x9e\x29\xfe\x02\xd8\xfd\x6d\xe3\x4b\xac\x2f\x44\x82\x09\x78\x7f\xa5\x49\xfd\xa6\x1d\xdd\xe8\xd8\xcf\x55\x8a\x2a\xe3\x47\x1f\xbe\x2c\x1d\x8c\xfa\x6f\xe3\xb4\x34\xd4\xd9\x89\xbb\x71\x59\xb6\x18\x1b\x35\x20\xa2\x9f\x0a\x03\xa2\xe5\x0d\x00\x00\xff\xff\x60\xcd\x06\xbe\x4a\x00\x00\x00")
func docGoBytes() ([]byte, error) { func docGoBytes() ([]byte, error) {
@ -378,7 +398,7 @@ func docGo() (*asset, error) {
return nil, err return nil, err
} }
info := bindataFileInfo{name: "doc.go", size: 74, mode: os.FileMode(0644), modTime: time.Unix(1700071475, 0)} info := bindataFileInfo{name: "doc.go", size: 74, mode: os.FileMode(0644), modTime: time.Unix(1700230544, 0)}
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xde, 0x7c, 0x28, 0xcd, 0x47, 0xf2, 0xfa, 0x7c, 0x51, 0x2d, 0xd8, 0x38, 0xb, 0xb0, 0x34, 0x9d, 0x4c, 0x62, 0xa, 0x9e, 0x28, 0xc3, 0x31, 0x23, 0xd9, 0xbb, 0x89, 0x9f, 0xa0, 0x89, 0x1f, 0xe8}} a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xde, 0x7c, 0x28, 0xcd, 0x47, 0xf2, 0xfa, 0x7c, 0x51, 0x2d, 0xd8, 0x38, 0xb, 0xb0, 0x34, 0x9d, 0x4c, 0x62, 0xa, 0x9e, 0x28, 0xc3, 0x31, 0x23, 0xd9, 0xbb, 0x89, 0x9f, 0xa0, 0x89, 0x1f, 0xe8}}
return a, nil return a, nil
} }
@ -474,46 +494,38 @@ func AssetNames() []string {
// _bindata is a table, holding each asset generator, mapped to its name. // _bindata is a table, holding each asset generator, mapped to its name.
var _bindata = map[string]func() (*asset, error){ var _bindata = map[string]func() (*asset, error){
"1691753758_initial.up.sql": _1691753758_initialUpSql, "1691753758_initial.up.sql": _1691753758_initialUpSql,
"1692701329_add_collectibles_and_collections_data_cache.up.sql": _1692701329_add_collectibles_and_collections_data_cacheUpSql,
"1692701329_add_collectibles_and_collections_data_cache.up.sql": _1692701329_add_collectibles_and_collections_data_cacheUpSql, "1692701339_add_scope_to_pending.up.sql": _1692701339_add_scope_to_pendingUpSql,
"1694540071_add_collectibles_ownership_update_timestamp.up.sql": _1694540071_add_collectibles_ownership_update_timestampUpSql,
"1692701339_add_scope_to_pending.up.sql": _1692701339_add_scope_to_pendingUpSql, "1694692748_add_raw_balance_to_token_balances.up.sql": _1694692748_add_raw_balance_to_token_balancesUpSql,
"1694540071_add_collectibles_ownership_update_timestamp.up.sql": _1694540071_add_collectibles_ownership_update_timestampUpSql,
"1694692748_add_raw_balance_to_token_balances.up.sql": _1694692748_add_raw_balance_to_token_balancesUpSql,
"1695133989_add_community_id_to_collectibles_and_collections_data_cache.up.sql": _1695133989_add_community_id_to_collectibles_and_collections_data_cacheUpSql, "1695133989_add_community_id_to_collectibles_and_collections_data_cache.up.sql": _1695133989_add_community_id_to_collectibles_and_collections_data_cacheUpSql,
"1695932536_balance_history_v2.up.sql": _1695932536_balance_history_v2UpSql,
"1695932536_balance_history_v2.up.sql": _1695932536_balance_history_v2UpSql, "1696853635_input_data.up.sql": _1696853635_input_dataUpSql,
"1698117918_add_community_id_to_tokens.up.sql": _1698117918_add_community_id_to_tokensUpSql,
"1696853635_input_data.up.sql": _1696853635_input_dataUpSql, "1698257443_add_community_metadata_to_wallet_db.up.sql": _1698257443_add_community_metadata_to_wallet_dbUpSql,
"1699987075_add_timestamp_and_state_to_community_data_cache.up.sql": _1699987075_add_timestamp_and_state_to_community_data_cacheUpSql,
"1698117918_add_community_id_to_tokens.up.sql": _1698117918_add_community_id_to_tokensUpSql, "1700414564_add_wallet_connect_pairings_table.up.sql": _1700414564_add_wallet_connect_pairings_tableUpSql,
"1701101493_add_token_blocks_range.up.sql": _1701101493_add_token_blocks_rangeUpSql,
"1698257443_add_community_metadata_to_wallet_db.up.sql": _1698257443_add_community_metadata_to_wallet_dbUpSql, "1702467441_wallet_connect_sessions_instead_of_pairings.up.sql": _1702467441_wallet_connect_sessions_instead_of_pairingsUpSql,
"1702577524_add_community_collections_and_collectibles_images_cache.up.sql": _1702577524_add_community_collections_and_collectibles_images_cacheUpSql,
"1699987075_add_timestamp_and_state_to_community_data_cache.up.sql": _1699987075_add_timestamp_and_state_to_community_data_cacheUpSql,
"1700414564_add_wallet_connect_pairings_table.up.sql": _1700414564_add_wallet_connect_pairings_tableUpSql,
"1701101493_add_token_blocks_range.up.sql": _1701101493_add_token_blocks_rangeUpSql,
"1702467441_wallet_connect_sessions_instead_of_pairings.up.sql": _1702467441_wallet_connect_sessions_instead_of_pairingsUpSql,
"doc.go": docGo, "doc.go": docGo,
} }
// AssetDebug is true if the assets were built with the debug flag enabled.
const AssetDebug = false
// AssetDir returns the file names below a certain // AssetDir returns the file names below a certain
// directory embedded in the file by go-bindata. // directory embedded in the file by go-bindata.
// For example if you run go-bindata on data/... and data contains the // For example if you run go-bindata on data/... and data contains the
// following hierarchy: // following hierarchy:
// data/ //
// foo.txt // data/
// img/ // foo.txt
// a.png // img/
// b.png // a.png
// b.png
//
// then AssetDir("data") would return []string{"foo.txt", "img"}, // then AssetDir("data") would return []string{"foo.txt", "img"},
// AssetDir("data/img") would return []string{"a.png", "b.png"}, // AssetDir("data/img") would return []string{"a.png", "b.png"},
// AssetDir("foo.txt") and AssetDir("notexist") would return an error, and // AssetDir("foo.txt") and AssetDir("notexist") would return an error, and
@ -546,21 +558,22 @@ type bintree struct {
} }
var _bintree = &bintree{nil, map[string]*bintree{ var _bintree = &bintree{nil, map[string]*bintree{
"1691753758_initial.up.sql": &bintree{_1691753758_initialUpSql, map[string]*bintree{}}, "1691753758_initial.up.sql": {_1691753758_initialUpSql, map[string]*bintree{}},
"1692701329_add_collectibles_and_collections_data_cache.up.sql": &bintree{_1692701329_add_collectibles_and_collections_data_cacheUpSql, map[string]*bintree{}}, "1692701329_add_collectibles_and_collections_data_cache.up.sql": {_1692701329_add_collectibles_and_collections_data_cacheUpSql, map[string]*bintree{}},
"1692701339_add_scope_to_pending.up.sql": &bintree{_1692701339_add_scope_to_pendingUpSql, map[string]*bintree{}}, "1692701339_add_scope_to_pending.up.sql": {_1692701339_add_scope_to_pendingUpSql, map[string]*bintree{}},
"1694540071_add_collectibles_ownership_update_timestamp.up.sql": &bintree{_1694540071_add_collectibles_ownership_update_timestampUpSql, map[string]*bintree{}}, "1694540071_add_collectibles_ownership_update_timestamp.up.sql": {_1694540071_add_collectibles_ownership_update_timestampUpSql, map[string]*bintree{}},
"1694692748_add_raw_balance_to_token_balances.up.sql": &bintree{_1694692748_add_raw_balance_to_token_balancesUpSql, map[string]*bintree{}}, "1694692748_add_raw_balance_to_token_balances.up.sql": {_1694692748_add_raw_balance_to_token_balancesUpSql, map[string]*bintree{}},
"1695133989_add_community_id_to_collectibles_and_collections_data_cache.up.sql": &bintree{_1695133989_add_community_id_to_collectibles_and_collections_data_cacheUpSql, map[string]*bintree{}}, "1695133989_add_community_id_to_collectibles_and_collections_data_cache.up.sql": {_1695133989_add_community_id_to_collectibles_and_collections_data_cacheUpSql, map[string]*bintree{}},
"1695932536_balance_history_v2.up.sql": &bintree{_1695932536_balance_history_v2UpSql, map[string]*bintree{}}, "1695932536_balance_history_v2.up.sql": {_1695932536_balance_history_v2UpSql, map[string]*bintree{}},
"1696853635_input_data.up.sql": &bintree{_1696853635_input_dataUpSql, map[string]*bintree{}}, "1696853635_input_data.up.sql": {_1696853635_input_dataUpSql, map[string]*bintree{}},
"1698117918_add_community_id_to_tokens.up.sql": &bintree{_1698117918_add_community_id_to_tokensUpSql, map[string]*bintree{}}, "1698117918_add_community_id_to_tokens.up.sql": {_1698117918_add_community_id_to_tokensUpSql, map[string]*bintree{}},
"1698257443_add_community_metadata_to_wallet_db.up.sql": &bintree{_1698257443_add_community_metadata_to_wallet_dbUpSql, map[string]*bintree{}}, "1698257443_add_community_metadata_to_wallet_db.up.sql": {_1698257443_add_community_metadata_to_wallet_dbUpSql, map[string]*bintree{}},
"1699987075_add_timestamp_and_state_to_community_data_cache.up.sql": &bintree{_1699987075_add_timestamp_and_state_to_community_data_cacheUpSql, map[string]*bintree{}}, "1699987075_add_timestamp_and_state_to_community_data_cache.up.sql": {_1699987075_add_timestamp_and_state_to_community_data_cacheUpSql, map[string]*bintree{}},
"1700414564_add_wallet_connect_pairings_table.up.sql": &bintree{_1700414564_add_wallet_connect_pairings_tableUpSql, map[string]*bintree{}}, "1700414564_add_wallet_connect_pairings_table.up.sql": {_1700414564_add_wallet_connect_pairings_tableUpSql, map[string]*bintree{}},
"1701101493_add_token_blocks_range.up.sql": &bintree{_1701101493_add_token_blocks_rangeUpSql, map[string]*bintree{}}, "1701101493_add_token_blocks_range.up.sql": {_1701101493_add_token_blocks_rangeUpSql, map[string]*bintree{}},
"1702467441_wallet_connect_sessions_instead_of_pairings.up.sql": &bintree{_1702467441_wallet_connect_sessions_instead_of_pairingsUpSql, map[string]*bintree{}}, "1702467441_wallet_connect_sessions_instead_of_pairings.up.sql": {_1702467441_wallet_connect_sessions_instead_of_pairingsUpSql, map[string]*bintree{}},
"doc.go": &bintree{docGo, map[string]*bintree{}}, "1702577524_add_community_collections_and_collectibles_images_cache.up.sql": {_1702577524_add_community_collections_and_collectibles_images_cacheUpSql, map[string]*bintree{}},
"doc.go": {docGo, map[string]*bintree{}},
}} }}
// RestoreAsset restores an asset under the given directory. // RestoreAsset restores an asset under the given directory.
@ -577,7 +590,7 @@ func RestoreAsset(dir, name string) error {
if err != nil { if err != nil {
return err return err
} }
err = ioutil.WriteFile(_filePath(dir, name), data, info.Mode()) err = os.WriteFile(_filePath(dir, name), data, info.Mode())
if err != nil { if err != nil {
return err return err
} }

View File

@ -0,0 +1,4 @@
-- Add columns
ALTER TABLE community_data_cache ADD COLUMN image_payload BLOB;
ALTER TABLE collection_data_cache ADD COLUMN image_payload BLOB;
ALTER TABLE collectible_data_cache ADD COLUMN image_payload BLOB;